Go to the documentation of this file.
13 #ifndef LLVM_ADT_IMMUTABLESET_H
14 #define LLVM_ADT_IMMUTABLESET_H
41 template <
typename ImutInfo >
78 key_type_ref CurrentKey = ImutInfo::KeyOfValue(
T->getValue());
81 else if (ImutInfo::isLess(K,CurrentKey))
121 ImutInfo::KeyOfValue(V)))
125 if (!ImutInfo::isDataEqual(ImutInfo::DataOfValue(
getValue()),
126 ImutInfo::DataOfValue(V)))
146 while (LItr != LEnd && RItr != REnd) {
147 if (&*LItr == &*RItr) {
160 return LItr == LEnd && RItr == REnd;
175 template <
typename Callback>
176 void foreach(Callback&
C) {
199 &&
"Height calculation wrong");
201 assert((HL > HR ? HL-HR : HR-HL) <= 2
202 &&
"Balancing invariant violated");
206 ImutInfo::KeyOfValue(
getValue()))) &&
207 "Value in left child is not less that current value");
210 ImutInfo::isLess(ImutInfo::KeyOfValue(
getValue()),
212 "Current value is not less that value of right child");
228 unsigned height : 28;
230 bool IsDigestCached : 1;
231 bool IsCanonicalized : 1;
246 : factory(
f), left(
l),
right(r), height(height), IsMutable(
true),
247 IsDigestCached(
false), IsCanonicalized(
false), value(v)
259 bool isMutable()
const {
return IsMutable; }
263 bool hasCachedDigest()
const {
return IsDigestCached; }
278 void markImmutable() {
279 assert(isMutable() &&
"Mutable flag already removed.");
284 void markedCachedDigest() {
285 assert(!hasCachedDigest() &&
"NoCachedDigest flag already removed.");
286 IsDigestCached =
true;
291 void setHeight(
unsigned h) {
292 assert(isMutable() &&
"Only a mutable tree can have its height changed.");
296 static uint32_t computeDigest(ImutAVLTree *L, ImutAVLTree *
R,
301 digest += L->computeDigest();
305 ImutInfo::Profile(
ID,V);
306 digest +=
ID.ComputeHash();
309 digest +=
R->computeDigest();
317 if (hasCachedDigest())
322 markedCachedDigest();
344 if (IsCanonicalized) {
351 factory->Cache[factory->maskCacheIndex(computeDigest())] = next;
357 factory->freeNodes.push_back(
this);
361 template <
typename ImutInfo>
371 template <
typename ImutInfo >
372 class ImutAVLFactory {
382 std::vector<TreeTy*> createdNodes;
383 std::vector<TreeTy*> freeNodes;
385 bool ownsAllocator()
const {
402 :
Allocator(reinterpret_cast<uintptr_t>(&Alloc) | 0x1) {}
405 if (ownsAllocator())
delete &getAllocator();
444 return (hl > hr ? hl : hr) + 1;
451 for ( ;
I!=
E ; ++
I, ++TI) {
452 if (TI == TE || !
I->isElementEqual(&*TI))
471 if (!freeNodes.empty()) {
472 T = freeNodes.back();
473 freeNodes.pop_back();
480 createdNodes.push_back(
T);
489 for (
unsigned i = 0,
n = createdNodes.size();
i <
n; ++
i) {
491 if (
N->isMutable() &&
N->refCount == 0)
494 createdNodes.clear();
504 assert(!
isEmpty(L) &&
"Left tree cannot be empty to have a height >= 2");
512 assert(!
isEmpty(LR) &&
"LR cannot be empty because it has a height >= 1");
521 assert(!
isEmpty(
R) &&
"Right tree cannot be empty to have a height >= 2");
529 assert(!
isEmpty(RL) &&
"RL cannot be empty because it has a height >= 1");
548 key_type_ref K = ImutInfo::KeyOfValue(V);
549 key_type_ref KCurrent = ImutInfo::KeyOfValue(
getValue(
T));
553 else if (ImutInfo::isLess(K,KCurrent))
569 key_type_ref KCurrent = ImutInfo::KeyOfValue(
getValue(
T));
573 }
else if (ImutInfo::isLess(K,KCurrent)) {
605 if (!
T || !
T->isMutable())
617 if (TNew->IsCanonicalized)
622 unsigned digest = TNew->computeDigest();
635 if (TNew->refCount == 0)
645 TNew->IsCanonicalized =
true;
654 template <
typename ImutInfo>
655 class ImutAVLTreeGenericIterator
656 :
public std::iterator<std::bidirectional_iterator_tag,
657 ImutAVLTree<ImutInfo>> {
658 SmallVector<uintptr_t,20> stack;
668 if (Root)
stack.push_back(
reinterpret_cast<uintptr_t
>(Root));
710 return !(*
this ==
x);
720 stack.push_back(
reinterpret_cast<uintptr_t
>(L));
726 stack.push_back(
reinterpret_cast<uintptr_t
>(R));
765 template <
typename ImutInfo>
766 class ImutAVLTreeInOrderIterator
767 :
public std::iterator<std::bidirectional_iterator_tag,
768 ImutAVLTree<ImutInfo>> {
769 using InternalIteratorTy = ImutAVLTreeGenericIterator<ImutInfo>;
771 InternalIteratorTy InternalItr;
784 return InternalItr ==
x.InternalItr;
788 return !(*
this ==
x);
796 while (!InternalItr.atEnd() &&
797 InternalItr.getVisitState() != InternalIteratorTy::VisitedLeft);
804 while (!InternalItr.atBeginning() &&
805 InternalItr.getVisitState() != InternalIteratorTy::VisitedLeft);
811 InternalItr.skipToParent();
813 while (!InternalItr.atEnd() &&
814 InternalItr.getVisitState() != InternalIteratorTy::VisitedLeft)
821 template <
typename T>
824 ImutAVLValueIterator<T>, typename T::TreeTy::iterator,
825 typename std::iterator_traits<
826 typename T::TreeTy::iterator>::iterator_category,
827 const typename T::value_type> {
832 typename ImutAVLValueIterator::reference
operator*()
const {
833 return this->
I->getValue();
844 template <
typename T>
855 template <
typename T>
865 #define PROFILE_INTEGER_INFO(X)\
866 template<> struct ImutProfileInfo<X> : ImutProfileInteger<X> {};
879 #undef PROFILE_INTEGER_INFO
894 template <
typename T>
914 template <
typename T>
927 return std::equal_to<key_type>()(LHS,RHS);
931 return std::less<key_type>()(LHS,RHS);
940 template <
typename T>
963 template <
typename ValT,
typename ValInfo = ImutContainerInfo<ValT>>
982 const bool Canonicalize;
986 : Canonicalize(canonicalize) {}
989 :
F(Alloc), Canonicalize(canonicalize) {}
1007 TreeTy *NewT =
F.add(Old.Root.get(), V);
1008 return ImmutableSet(Canonicalize ?
F.getCanonicalTree(NewT) : NewT);
1019 TreeTy *NewT =
F.remove(Old.Root.get(), V);
1020 return ImmutableSet(Canonicalize ?
F.getCanonicalTree(NewT) : NewT);
1034 return Root ? Root->
contains(V) :
false;
1038 return Root && RHS.Root ? Root->
isEqual(*RHS.Root.get()) : Root == RHS.Root;
1042 return Root && RHS.Root ? Root->
isNotEqual(*RHS.Root.get())
1047 if (Root) { Root->
retain(); }
1060 template <
typename Callback>
1063 template <
typename Callback>
1064 void foreach() {
if (Root) { Callback
C; Root->
foreach(
C); } }
1082 ID.AddPointer(
S.Root.get());
1095 template <
typename ValT,
typename ValInfo = ImutContainerInfo<ValT>>
1128 return Root ? Root->
contains(V) :
false;
1133 canonicalize ? Factory->getCanonicalTree(Root.
get()) : Root.
get());
1139 return Root && RHS.Root ? Root->
isEqual(*RHS.Root.get()) : Root == RHS.Root;
1143 return Root && RHS.Root ? Root->
isNotEqual(*RHS.Root.get())
1170 ID.AddPointer(
S.Root.get());
1184 #endif // LLVM_ADT_IMMUTABLESET_H
ImutAVLTree * getLeft() const
Return a pointer to the left subtree.
bool isEmpty() const
isEmpty - Return true if the set contains no elements.
static void Profile(FoldingSetNodeID &ID, const ImmutableSetRef &S)
LLVM_NODISCARD ImmutableSet add(ImmutableSet Old, value_type_ref V)
add - Creates a new immutable set that contains all of the values of the original set with the additi...
the custom lowered code happens to be right
unsigned validateTree() const
validateTree - A utility method that checks that the balancing and ordering invariants of the tree ar...
This class represents lattice values for constants.
TreeTy * getRootWithoutRetain() const
bool isSingleton() const
isSingleton - Return true if the set contains exactly one element.
static void Profile(FoldingSetNodeID &ID, value_type_ref X)
TreeTy * getLeft(TreeTy *T) const
static key_type_ref KeyOfValue(value_type_ref D)
ImutAVLTree * getMaxElement()
getMaxElement - Find the subtree associated with the highest ranged key value.
ImutAVLTreeInOrderIterator(const TreeTy *Root)
static bool isEqual(key_type_ref LHS, key_type_ref RHS)
ImutAVLValueIterator::reference operator*() const
TreeTy::Factory * getTreeFactory() const
bool isEmpty(TreeTy *T) const
typename ImutProfileInfo< T >::value_type value_type
bool isNotEqual(const ImutAVLTree &RHS) const
isNotEqual - Compares two trees for structural inequality.
unsigned getHeight(TreeTy *T) const
bool contains(value_type_ref V) const
Returns true if the set contains the specified value.
ImutAVLTree * find(key_type_ref K)
find - Finds the subtree associated with the specified key value.
bool operator==(const ImutAVLTreeInOrderIterator &x) const
Generic iterator that wraps a T::TreeTy::iterator and exposes iterator::getValue() on dereference.
TreeTy * combineTrees(TreeTy *L, TreeTy *R)
TreeTy * getEmptyTree() const
bool operator!=(const ImmutableSetRef &RHS) const
uintptr_t getVisitState() const
static bool isEqual(key_type_ref LHS, key_type_ref RHS)
ImutAVLTreeGenericIterator()=default
CRTP base class for adapting an iterator to a different type.
ImmutableSet getEmptySet()
getEmptySet - Returns an immutable set that contains no elements.
unsigned size() const
size - Returns the number of nodes in the tree, which includes both leaves and non-leaf nodes.
void markImmutable(TreeTy *T)
markImmutable - Clears the mutable bits of a root and all of its descendants.
static bool isDataEqual(data_type_ref, data_type_ref)
Common register allocation spilling lr str ldr sxth r3 ldr mla r4 can lr mov lr str ldr sxth r3 mla r4 and then merge mul and lr str ldr sxth r3 mla r4 It also increase the likelihood the store may become dead bb27 Successors according to LLVM ID Predecessors according to mbb< bb27, 0x8b0a7c0 > Note ADDri is not a two address instruction its result reg1037 is an operand of the PHI node in bb76 and its operand reg1039 is the result of the PHI node We should treat it as a two address code and make sure the ADDri is scheduled after any node that reads reg1039 Use info(i.e. register scavenger) to assign it a free register to allow reuse the collector could move the objects and invalidate the derived pointer This is bad enough in the first but safe points can crop up unpredictably **array_addr i32 n y store obj * new
iterator end() const
end - Returns an iterator for the tree that denotes the end of an inorder traversal.
value_type value_type_ref
bool operator!=(const ImutAVLTreeInOrderIterator &x) const
unsigned incrementHeight(TreeTy *L, TreeTy *R) const
static data_type_ref DataOfValue(value_type_ref)
typename ImutProfileInfo< T >::value_type_ref value_type_ref
Itanium Name Demangler i e convert the string _Z1fv into f()". You can also use the CRTP base ManglingParser to perform some simple analysis on the mangled name
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
(vector float) vec_cmpeq(*A, *B) C
ImutAVLTreeGenericIterator & operator++()
ImmutableSetRef remove(value_type_ref V)
ImutAVLFactory(BumpPtrAllocator &Alloc)
bool contains(key_type_ref K)
contains - Returns true if this tree contains a subtree (node) that has an data element that matches ...
unsigned getHeight() const
getHeight - Returns the height of the tree.
This requires reassociating to forms of expressions that are already something that reassoc doesn t think about yet These two functions should generate the same code on big endian int * l
Generic profile template.
typename ValInfo::value_type value_type
TreeTy * getCanonicalTree(TreeTy *TNew)
ImmutableSet(TreeTy *R)
Constructs a set from a pointer to a tree root.
static bool isLess(key_type_ref LHS, key_type_ref RHS)
TreeTy * removeMinBinding(TreeTy *T, TreeTy *&Noderemoved)
iterator begin() const
begin - Returns an iterator that iterates over the nodes of the tree in an inorder traversal.
static void retain(ImutAVLTree< ImutInfo > *Tree)
Profile traits for integers.
TreeTy & operator*() const
static key_type_ref KeyOfValue(value_type_ref D)
ImutAVLValueIterator(typename T::TreeTy *Tree)
const bool & value_type_ref
static GCMetadataPrinterRegistry::Add< ErlangGCPrinter > X("erlang", "erlang-compatible garbage collector")
bool isElementEqual(const ImutAVLTree *RHS) const
static void Profile(FoldingSetNodeID &ID, const ImmutableSet &S)
const value_type & getValue() const
getValue - Returns the data value associated with the tree node.
LLVM_NODISCARD ImmutableSet remove(ImmutableSet Old, value_type_ref V)
remove - Creates a new immutable set that contains all of the values of the original set with the exc...
typename ValInfo::value_type_ref value_type_ref
bool contains(value_type_ref V) const
Returns true if the set contains the specified value.
static bool isDataEqual(data_type_ref, data_type_ref)
Factory(bool canonicalize=true)
ImutAVLTree * getRight() const
Return a pointer to the right subtree.
ImmutableSet< ValT > asImmutableSet(bool canonicalize=true) const
void operator=(const Factory &RHS)=delete
static GCRegistry::Add< StatepointGC > D("statepoint-example", "an example strategy for statepoint")
Allocate memory in an ever growing pool, as if by bump-pointer.
TreeTy * getRight(TreeTy *T) const
TreeTy * remove_internal(key_type_ref K, TreeTy *T)
remove_internal - Creates a new tree that includes all the data from the original tree except the spe...
static ImmutableSetRef getEmptySet(FactoryTy *F)
static void Profile(FoldingSetNodeID &ID, value_type_ref X)
TreeTy * add(TreeTy *T, value_type_ref V)
bool operator==(const ImmutableSet &RHS) const
ImutAVLTreeInOrderIterator< ImutInfo > iterator
bool isElementEqual(value_type_ref V) const
Factory(BumpPtrAllocator &Alloc, bool canonicalize=true)
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
bool isEqual(const ImutAVLTree &RHS) const
isEqual - Compares two trees for structural equality and returns true if they are equal.
static bool isLess(key_type_ref LHS, key_type_ref RHS)
static unsigned maskCacheIndex(unsigned I)
TreeTy * balanceTree(TreeTy *L, value_type_ref V, TreeTy *R)
balanceTree - Used by add_internal and remove_internal to balance a newly created tree.
bool operator==(const ImutAVLTreeGenericIterator &x) const
bool operator==(const ImmutableSetRef &RHS) const
typename ValInfo::value_type_ref value_type_ref
BumpPtrAllocator & getAllocator()
TreeTy & operator*() const
static void Profile(const T &X, FoldingSetNodeID &ID)
TreeTy * operator->() const
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
Class you can specialize to provide custom retain/release functionality for a type.
FoldingSetNodeID - This class is used to gather all the unique data bits of a node.
add sub stmia L5 ldr r0 bl L_printf $stub Instead of a and a wouldn t it be better to do three moves *Return an aggregate type is even return S
static void release(ImutAVLTree< ImutInfo > *Tree)
#define PROFILE_INTEGER_INFO(X)
Generic profile trait for pointer types.
typename ImutInfo::value_type value_type
typename ImutInfo::key_type_ref key_type_ref
unsigned getHeight() const
ImutAVLTreeInOrderIterator & operator--()
bool operator!=(const ImmutableSet &RHS) const
bool isSingleton() const
isSingleton - Return true if the set contains exactly one element.
value_type_ref key_type_ref
ImutAVLValueIterator< ImmutableSet > iterator
void validateTree() const
ImutAVLTreeGenericIterator(const TreeTy *Root)
typename ImutInfo::value_type_ref value_type_ref
ImutAVLTreeInOrderIterator & operator++()
ImutAVLTreeInOrderIterator()
#define LLVM_NODISCARD
LLVM_NODISCARD - Warn if a type or return value is discarded.
unsigned getHeight() const
static bool isEqual(const Function &Caller, const Function &Callee)
void foreach(Callback &C)
foreach - A member template the accepts invokes operator() on a functor object (specified by Callback...
bool isEmpty() const
isEmpty - Return true if the set contains no elements.
void validateTree() const
TreeTy * createNode(TreeTy *newLeft, TreeTy *oldTree, TreeTy *newRight)
value_type_ref key_type_ref
ImmutableSetRef(TreeTy *R, FactoryTy *F)
Constructs a set from a pointer to a tree root.
static bool compareTreeWithSection(TreeTy *T, typename TreeTy::iterator &TI, typename TreeTy::iterator &TE)
static void Profile(FoldingSetNodeID &ID, value_type_ref X)
void Profile(FoldingSetNodeID &ID) const
static data_type_ref DataOfValue(value_type_ref)
TreeTy * getRootWithoutRetain() const
S is passed via registers r2 But gcc stores them to the stack
TreeTy * operator->() const
static void Profile(FoldingSetNodeID &ID, value_type_ref X)
the multiplication has a latency of four as opposed to two cycles for the movl lea variant It appears gcc place string data with linkonce linkage in section coalesced instead of section coalesced Take a look at darwin h
ImutAVLValueIterator< ImmutableSetRef > iterator
TreeTy * createNode(TreeTy *L, value_type_ref V, TreeTy *R)
The same transformation can work with an even modulo with the addition of a and shrink the compare RHS by the same amount Unless the target supports that transformation probably isn t worthwhile The transformation can also easily be made to work with non zero equality for n
TreeTy * add_internal(value_type_ref V, TreeTy *T)
add_internal - Creates a new tree that includes the specified data and the data from the original tre...
typename ValInfo::value_type value_type
bool operator!=(const ImutAVLTreeGenericIterator &x) const
A smart pointer to a reference-counted object that inherits from RefCountedBase or ThreadSafeRefCount...
print Instructions which execute on loop entry
TreeTy * remove(TreeTy *T, key_type_ref V)
typename TreeTy::Factory FactoryTy
ImutAVLTreeGenericIterator & operator--()
value_type_ref getValue(TreeTy *T) const
ImmutableSetRef add(value_type_ref V)
ImutAVLValueIterator()=default
void Profile(FoldingSetNodeID &ID) const
ImutAVLFactory< ImutInfo > Factory
ImutContainerInfo - Generic definition of comparison operations for elements of immutable containers ...