14 #ifndef LLVM_ADT_DENSEMAP_H
15 #define LLVM_ADT_DENSEMAP_H
38 template <
typename KeyT,
typename ValueT>
40 KeyT &
getFirst() {
return std::pair<KeyT, ValueT>::first; }
41 const KeyT &
getFirst()
const {
return std::pair<KeyT, ValueT>::first; }
49 typename KeyT,
typename ValueT,
typename KeyInfoT = DenseMapInfo<KeyT>,
50 typename Bucket = detail::DenseMapPair<KeyT, ValueT>,
bool IsConst =
false>
53 template <
typename DerivedT,
typename KeyT,
typename ValueT,
typename KeyInfoT,
70 return iterator(getBucketsEnd(), getBucketsEnd(), *
this,
true);
77 return const_iterator(getBucketsEnd(), getBucketsEnd(), *
this,
true);
81 return getNumEntries() == 0;
83 unsigned size()
const {
return getNumEntries(); }
90 if (NumBuckets > getNumBuckets())
96 if (getNumEntries() == 0 && getNumTombstones() == 0)
return;
100 if (getNumEntries() * 4 < getNumBuckets() && getNumBuckets() > 64) {
106 unsigned NumEntries = getNumEntries();
107 for (BucketT *
P = getBuckets(), *
E = getBucketsEnd();
P !=
E; ++
P) {
110 P->getSecond().~ValueT();
113 P->getFirst() = EmptyKey;
116 assert(NumEntries == 0 &&
"Node count imbalance!");
123 const BucketT *TheBucket;
124 return LookupBucketFor(Val, TheBucket) ? 1 : 0;
129 if (LookupBucketFor(Val, TheBucket))
130 return iterator(TheBucket, getBucketsEnd(), *
this,
true);
134 const BucketT *TheBucket;
135 if (LookupBucketFor(Val, TheBucket))
145 template<
class LookupKeyT>
148 if (LookupBucketFor(Val, TheBucket))
149 return iterator(TheBucket, getBucketsEnd(), *
this,
true);
152 template<
class LookupKeyT>
154 const BucketT *TheBucket;
155 if (LookupBucketFor(Val, TheBucket))
163 const BucketT *TheBucket;
164 if (LookupBucketFor(Val, TheBucket))
165 return TheBucket->getSecond();
172 std::pair<iterator, bool>
insert(
const std::pair<KeyT, ValueT> &KV) {
179 std::pair<iterator, bool>
insert(std::pair<KeyT, ValueT> &&KV) {
180 return try_emplace(std::move(KV.first), std::move(KV.second));
186 template <
typename... Ts>
189 if (LookupBucketFor(Key, TheBucket))
190 return std::make_pair(
iterator(TheBucket, getBucketsEnd(), *
this,
true),
195 InsertIntoBucket(TheBucket, std::move(Key), std::forward<Ts>(
Args)...);
196 return std::make_pair(
iterator(TheBucket, getBucketsEnd(), *
this,
true),
203 template <
typename... Ts>
206 if (LookupBucketFor(Key, TheBucket))
207 return std::make_pair(
iterator(TheBucket, getBucketsEnd(), *
this,
true),
211 TheBucket = InsertIntoBucket(TheBucket, Key, std::forward<Ts>(
Args)...);
212 return std::make_pair(
iterator(TheBucket, getBucketsEnd(), *
this,
true),
221 template <
typename LookupKeyT>
222 std::pair<iterator, bool>
insert_as(std::pair<KeyT, ValueT> &&KV,
223 const LookupKeyT &Val) {
225 if (LookupBucketFor(Val, TheBucket))
226 return std::make_pair(
iterator(TheBucket, getBucketsEnd(), *
this,
true),
230 TheBucket = InsertIntoBucketWithLookup(TheBucket, std::move(KV.first),
231 std::move(KV.second), Val);
232 return std::make_pair(
iterator(TheBucket, getBucketsEnd(), *
this,
true),
237 template<
typename InputIt>
245 if (!LookupBucketFor(Val, TheBucket))
248 TheBucket->getSecond().~ValueT();
250 decrementNumEntries();
251 incrementNumTombstones();
255 BucketT *TheBucket = &*
I;
256 TheBucket->getSecond().~ValueT();
258 decrementNumEntries();
259 incrementNumTombstones();
264 if (LookupBucketFor(Key, TheBucket))
267 return *InsertIntoBucket(TheBucket, Key);
276 if (LookupBucketFor(Key, TheBucket))
279 return *InsertIntoBucket(TheBucket, std::move(Key));
290 return Ptr >= getBuckets() && Ptr < getBucketsEnd();
302 if (getNumBuckets() == 0)
306 for (BucketT *
P = getBuckets(), *
E = getBucketsEnd();
P !=
E; ++
P) {
309 P->getSecond().~ValueT();
310 P->getFirst().~KeyT();
318 assert((getNumBuckets() & (getNumBuckets()-1)) == 0 &&
319 "# initial buckets must be a power of two!");
321 for (BucketT *
B = getBuckets(), *
E = getBucketsEnd();
B !=
E; ++
B)
322 ::
new (&
B->getFirst()) KeyT(EmptyKey);
342 for (BucketT *
B = OldBucketsBegin, *
E = OldBucketsEnd;
B !=
E; ++
B) {
347 bool FoundVal = LookupBucketFor(
B->getFirst(), DestBucket);
349 assert(!FoundVal &&
"Key already in new map?");
350 DestBucket->getFirst() = std::move(
B->getFirst());
351 ::new (&DestBucket->getSecond())
ValueT(std::move(
B->getSecond()));
352 incrementNumEntries();
355 B->getSecond().~ValueT();
357 B->getFirst().~KeyT();
361 template <
typename OtherBaseT>
365 assert(getNumBuckets() == other.getNumBuckets());
367 setNumEntries(other.getNumEntries());
368 setNumTombstones(other.getNumTombstones());
371 memcpy(getBuckets(), other.getBuckets(),
372 getNumBuckets() *
sizeof(BucketT));
374 for (
size_t i = 0;
i < getNumBuckets(); ++
i) {
375 ::new (&getBuckets()[
i].getFirst())
376 KeyT(other.getBuckets()[
i].getFirst());
379 ::
new (&getBuckets()[
i].getSecond())
380 ValueT(other.getBuckets()[
i].getSecond());
385 return KeyInfoT::getHashValue(Val);
387 template<
typename LookupKeyT>
389 return KeyInfoT::getHashValue(Val);
392 return KeyInfoT::getEmptyKey();
395 return KeyInfoT::getTombstoneKey();
399 unsigned getNumEntries()
const {
400 return static_cast<const DerivedT *
>(
this)->getNumEntries();
402 void setNumEntries(
unsigned Num) {
403 static_cast<DerivedT *
>(
this)->setNumEntries(Num);
405 void incrementNumEntries() {
406 setNumEntries(getNumEntries() + 1);
408 void decrementNumEntries() {
409 setNumEntries(getNumEntries() - 1);
411 unsigned getNumTombstones()
const {
412 return static_cast<const DerivedT *
>(
this)->getNumTombstones();
414 void setNumTombstones(
unsigned Num) {
415 static_cast<DerivedT *
>(
this)->setNumTombstones(Num);
417 void incrementNumTombstones() {
418 setNumTombstones(getNumTombstones() + 1);
420 void decrementNumTombstones() {
421 setNumTombstones(getNumTombstones() - 1);
423 const BucketT *getBuckets()
const {
424 return static_cast<const DerivedT *
>(
this)->getBuckets();
426 BucketT *getBuckets() {
427 return static_cast<DerivedT *
>(
this)->getBuckets();
429 unsigned getNumBuckets()
const {
430 return static_cast<const DerivedT *
>(
this)->getNumBuckets();
432 BucketT *getBucketsEnd() {
433 return getBuckets() + getNumBuckets();
435 const BucketT *getBucketsEnd()
const {
436 return getBuckets() + getNumBuckets();
439 void grow(
unsigned AtLeast) {
440 static_cast<DerivedT *
>(
this)->grow(AtLeast);
443 void shrink_and_clear() {
444 static_cast<DerivedT *
>(
this)->shrink_and_clear();
447 template <
typename KeyArg,
typename... ValueArgs>
448 BucketT *InsertIntoBucket(BucketT *TheBucket, KeyArg &&Key,
449 ValueArgs &&... Values) {
450 TheBucket = InsertIntoBucketImpl(Key, Key, TheBucket);
452 TheBucket->getFirst() = std::forward<KeyArg>(Key);
453 ::new (&TheBucket->getSecond())
ValueT(std::forward<ValueArgs>(Values)...);
457 template <
typename LookupKeyT>
458 BucketT *InsertIntoBucketWithLookup(BucketT *TheBucket, KeyT &&Key,
460 TheBucket = InsertIntoBucketImpl(Key, Lookup, TheBucket);
462 TheBucket->getFirst() = std::move(Key);
463 ::new (&TheBucket->getSecond())
ValueT(std::move(Value));
467 template <
typename LookupKeyT>
468 BucketT *InsertIntoBucketImpl(
const KeyT &Key,
const LookupKeyT &Lookup,
469 BucketT *TheBucket) {
481 unsigned NewNumEntries = getNumEntries() + 1;
482 unsigned NumBuckets = getNumBuckets();
484 this->grow(NumBuckets * 2);
485 LookupBucketFor(Lookup, TheBucket);
486 NumBuckets = getNumBuckets();
487 }
else if (
LLVM_UNLIKELY(NumBuckets-(NewNumEntries+getNumTombstones()) <=
489 this->grow(NumBuckets);
490 LookupBucketFor(Lookup, TheBucket);
496 incrementNumEntries();
501 decrementNumTombstones();
510 template<
typename LookupKeyT>
511 bool LookupBucketFor(
const LookupKeyT &Val,
512 const BucketT *&FoundBucket)
const {
513 const BucketT *BucketsPtr = getBuckets();
514 const unsigned NumBuckets = getNumBuckets();
516 if (NumBuckets == 0) {
517 FoundBucket =
nullptr;
522 const BucketT *FoundTombstone =
nullptr;
527 "Empty/Tombstone value shouldn't be inserted into map!");
530 unsigned ProbeAmt = 1;
532 const BucketT *ThisBucket = BucketsPtr + BucketNo;
535 FoundBucket = ThisBucket;
544 FoundBucket = FoundTombstone ? FoundTombstone : ThisBucket;
552 FoundTombstone = ThisBucket;
556 BucketNo += ProbeAmt++;
557 BucketNo &= (NumBuckets-1);
561 template <
typename LookupKeyT>
562 bool LookupBucketFor(
const LookupKeyT &Val, BucketT *&FoundBucket) {
563 const BucketT *ConstFoundBucket;
565 ->LookupBucketFor(Val, ConstFoundBucket);
566 FoundBucket =
const_cast<BucketT *
>(ConstFoundBucket);
576 return getNumBuckets() *
sizeof(BucketT);
580 template <
typename KeyT,
typename ValueT,
581 typename KeyInfoT = DenseMapInfo<KeyT>,
582 typename BucketT = detail::DenseMapPair<KeyT, ValueT>>
584 KeyT, ValueT, KeyInfoT, BucketT> {
592 unsigned NumTombstones;
598 explicit DenseMap(
unsigned InitialReserve = 0) {
init(InitialReserve); }
610 template<
typename InputIt>
612 init(std::distance(I, E));
618 operator delete(Buckets);
626 std::swap(NumTombstones, RHS.NumTombstones);
638 operator delete(Buckets);
646 operator delete(Buckets);
647 if (allocateBuckets(other.NumBuckets)) {
655 void init(
unsigned InitNumEntries) {
657 if (allocateBuckets(InitBuckets)) {
666 unsigned OldNumBuckets = NumBuckets;
667 BucketT *OldBuckets = Buckets;
669 allocateBuckets(std::max<unsigned>(64, static_cast<unsigned>(
NextPowerOf2(AtLeast-1))));
679 operator delete(OldBuckets);
683 unsigned OldNumEntries = NumEntries;
687 unsigned NewNumBuckets = 0;
689 NewNumBuckets = std::max(64, 1 << (
Log2_32_Ceil(OldNumEntries) + 1));
690 if (NewNumBuckets == NumBuckets) {
695 operator delete(Buckets);
700 unsigned getNumEntries()
const {
703 void setNumEntries(
unsigned Num) {
707 unsigned getNumTombstones()
const {
708 return NumTombstones;
710 void setNumTombstones(
unsigned Num) {
714 BucketT *getBuckets()
const {
718 unsigned getNumBuckets()
const {
722 bool allocateBuckets(
unsigned Num) {
724 if (NumBuckets == 0) {
729 Buckets =
static_cast<BucketT*
>(
operator new(
sizeof(BucketT) * NumBuckets));
734 template <
typename KeyT,
typename ValueT,
unsigned InlineBuckets = 4,
735 typename KeyInfoT = DenseMapInfo<KeyT>,
736 typename BucketT = detail::DenseMapPair<KeyT, ValueT>>
739 SmallDenseMap<KeyT, ValueT, InlineBuckets, KeyInfoT, BucketT>, KeyT,
740 ValueT, KeyInfoT, BucketT> {
746 "InlineBuckets must be a power of 2.");
749 unsigned NumEntries : 31;
750 unsigned NumTombstones;
763 init(NumInitBuckets);
776 template<
typename InputIt>
788 unsigned TmpNumEntries = RHS.NumEntries;
789 RHS.NumEntries = NumEntries;
790 NumEntries = TmpNumEntries;
791 std::swap(NumTombstones, RHS.NumTombstones);
795 if (Small && RHS.Small) {
800 for (
unsigned i = 0, e = InlineBuckets;
i != e; ++
i) {
801 BucketT *LHSB = &getInlineBuckets()[
i],
802 *RHSB = &RHS.getInlineBuckets()[
i];
807 if (hasLHSValue && hasRHSValue) {
813 std::swap(LHSB->getFirst(), RHSB->getFirst());
815 ::new (&RHSB->getSecond())
ValueT(std::move(LHSB->getSecond()));
816 LHSB->getSecond().~ValueT();
817 }
else if (hasRHSValue) {
818 ::new (&LHSB->getSecond())
ValueT(std::move(RHSB->getSecond()));
819 RHSB->getSecond().~ValueT();
824 if (!Small && !RHS.Small) {
825 std::swap(getLargeRep()->Buckets, RHS.getLargeRep()->Buckets);
826 std::swap(getLargeRep()->NumBuckets, RHS.getLargeRep()->NumBuckets);
834 LargeRep TmpRep = std::move(*LargeSide.getLargeRep());
835 LargeSide.getLargeRep()->~LargeRep();
836 LargeSide.Small =
true;
841 for (
unsigned i = 0, e = InlineBuckets;
i != e; ++
i) {
842 BucketT *NewB = &LargeSide.getInlineBuckets()[
i],
843 *OldB = &SmallSide.getInlineBuckets()[
i];
844 ::new (&NewB->getFirst()) KeyT(std::move(OldB->getFirst()));
845 OldB->getFirst().~KeyT();
848 ::new (&NewB->getSecond())
ValueT(std::move(OldB->getSecond()));
849 OldB->getSecond().~ValueT();
855 SmallSide.Small =
false;
856 new (SmallSide.getLargeRep()) LargeRep(std::move(TmpRep));
877 if (other.getNumBuckets() > InlineBuckets) {
879 new (getLargeRep()) LargeRep(allocateBuckets(other.getNumBuckets()));
884 void init(
unsigned InitBuckets) {
886 if (InitBuckets > InlineBuckets) {
888 new (getLargeRep()) LargeRep(allocateBuckets(InitBuckets));
894 if (AtLeast >= InlineBuckets)
895 AtLeast = std::max<unsigned>(64,
NextPowerOf2(AtLeast-1));
898 if (AtLeast < InlineBuckets)
903 BucketT *TmpBegin =
reinterpret_cast<BucketT *
>(TmpStorage.buffer);
904 BucketT *TmpEnd = TmpBegin;
910 for (BucketT *
P = getBuckets(), *
E =
P + InlineBuckets;
P !=
E; ++
P) {
913 assert(
size_t(TmpEnd - TmpBegin) < InlineBuckets &&
914 "Too many inline buckets!");
915 ::new (&TmpEnd->getFirst()) KeyT(std::move(
P->getFirst()));
916 ::new (&TmpEnd->getSecond())
ValueT(std::move(
P->getSecond()));
918 P->getSecond().~ValueT();
920 P->getFirst().~KeyT();
926 new (getLargeRep()) LargeRep(allocateBuckets(AtLeast));
931 LargeRep OldRep = std::move(*getLargeRep());
932 getLargeRep()->~LargeRep();
933 if (AtLeast <= InlineBuckets) {
936 new (getLargeRep()) LargeRep(allocateBuckets(AtLeast));
942 operator delete(OldRep.Buckets);
946 unsigned OldSize = this->
size();
950 unsigned NewNumBuckets = 0;
953 if (NewNumBuckets > InlineBuckets && NewNumBuckets < 64u)
956 if ((Small && NewNumBuckets <= InlineBuckets) ||
957 (!Small && NewNumBuckets == getLargeRep()->NumBuckets)) {
967 unsigned getNumEntries()
const {
970 void setNumEntries(
unsigned Num) {
972 assert(Num < (1U << 31) &&
"Cannot support more than 1<<31 entries");
976 unsigned getNumTombstones()
const {
977 return NumTombstones;
979 void setNumTombstones(
unsigned Num) {
983 const BucketT *getInlineBuckets()
const {
988 return reinterpret_cast<const BucketT *
>(storage.buffer);
990 BucketT *getInlineBuckets() {
991 return const_cast<BucketT *
>(
992 const_cast<const SmallDenseMap *
>(
this)->getInlineBuckets());
994 const LargeRep *getLargeRep()
const {
997 return reinterpret_cast<const LargeRep *
>(storage.buffer);
999 LargeRep *getLargeRep() {
1000 return const_cast<LargeRep *
>(
1004 const BucketT *getBuckets()
const {
1005 return Small ? getInlineBuckets() : getLargeRep()->Buckets;
1007 BucketT *getBuckets() {
1008 return const_cast<BucketT *
>(
1011 unsigned getNumBuckets()
const {
1012 return Small ? InlineBuckets : getLargeRep()->NumBuckets;
1015 void deallocateBuckets() {
1019 operator delete(getLargeRep()->Buckets);
1020 getLargeRep()->~LargeRep();
1023 LargeRep allocateBuckets(
unsigned Num) {
1024 assert(Num > InlineBuckets &&
"Must allocate more buckets than are inline");
1026 static_cast<BucketT*
>(
operator new(
sizeof(BucketT) * Num)), Num
1032 template <
typename KeyT,
typename ValueT,
typename KeyInfoT,
typename Bucket,
1034 class DenseMapIterator : DebugEpochBase::HandleBase {
1035 typedef DenseMapIterator<KeyT, ValueT, KeyInfoT, Bucket, true> ConstIterator;
1041 typedef typename std::conditional<IsConst, const Bucket, Bucket>::type
1054 bool NoAdvance =
false)
1056 assert(isHandleInSync() &&
"invalid construction!");
1057 if (!NoAdvance) AdvancePastEmptyBuckets();
1063 template <
bool IsConstSrc,
1064 typename =
typename std::enable_if<!IsConstSrc && IsConst>::type>
1070 assert(isHandleInSync() &&
"invalid iterator access!");
1074 assert(isHandleInSync() &&
"invalid iterator access!");
1079 assert((!
Ptr || isHandleInSync()) &&
"handle not in sync!");
1082 "comparing incomparable iterators!");
1083 return Ptr == RHS.Ptr;
1086 assert((!
Ptr || isHandleInSync()) &&
"handle not in sync!");
1089 "comparing incomparable iterators!");
1090 return Ptr != RHS.Ptr;
1094 assert(isHandleInSync() &&
"invalid iterator access!");
1096 AdvancePastEmptyBuckets();
1100 assert(isHandleInSync() &&
"invalid iterator access!");
1105 void AdvancePastEmptyBuckets() {
1106 const KeyT
Empty = KeyInfoT::getEmptyKey();
1107 const KeyT Tombstone = KeyInfoT::getTombstoneKey();
1115 template<
typename KeyT,
typename ValueT,
typename KeyInfoT>
1116 static inline size_t
1123 #endif // LLVM_ADT_DENSEMAP_H
unsigned Log2_32_Ceil(uint32_t Value)
Log2_32_Ceil - This function returns the ceil log base 2 of the specified value, 32 if the value is z...
ValueT & operator[](const KeyT &Key)
void copyFrom(const DenseMap &other)
void moveFromOldBuckets(BucketT *OldBucketsBegin, BucketT *OldBucketsEnd)
const_iterator find_as(const LookupKeyT &Val) const
ValueT lookup(const KeyT &Val) const
lookup - Return the entry for the specified key, or a default constructed value if no such entry exis...
void init(unsigned InitNumEntries)
#define LLVM_UNLIKELY(EXPR)
#define LLVM_LIKELY(EXPR)
const KeyT & getFirst() const
std::forward_iterator_tag iterator_category
void init(unsigned InitBuckets)
static size_t capacity_in_bytes(const BitVector &X)
const_iterator begin() const
static const KeyT getTombstoneKey()
const_iterator end() const
DenseMap(unsigned InitialReserve=0)
Create a DenseMap wth an optional InitialReserve that guarantee that this number of elements can be i...
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
size_t getMemorySize() const
Return the approximate size (in bytes) of the actual map.
A base class for data structure classes wishing to make iterators ("handles") pointing into themselve...
const void * getEpochAddress() const
Returns a pointer to the epoch word stored in the data structure this handle points into...
static int Lookup(ArrayRef< TableEntry > Table, unsigned Opcode)
const void * getPointerIntoBucketsArray() const
getPointerIntoBucketsArray() - Return an opaque pointer into the buckets array.
void incrementEpoch()
Calling incrementEpoch invalidates all handles pointing into the calling instance.
SmallDenseMap(SmallDenseMap &&other)
std::pair< iterator, bool > try_emplace(KeyT &&Key, Ts &&...Args)
static bool isEqual(const Function &Caller, const Function &Callee)
unsigned getMinBucketToReserveForEntries(unsigned NumEntries)
Returns the number of buckets to allocate to ensure that the DenseMap can accommodate NumEntries with...
Function Alias Analysis false
static const KeyT getEmptyKey()
void copyFrom(const DenseMapBase< OtherBaseT, KeyT, ValueT, KeyInfoT, BucketT > &other)
static GCRegistry::Add< OcamlGC > B("ocaml","ocaml 3.10-compatible GC")
SmallDenseMap(unsigned NumInitBuckets=0)
void grow(unsigned AtLeast)
static GCRegistry::Add< CoreCLRGC > E("coreclr","CoreCLR-compatible GC")
value_type & FindAndConstruct(const KeyT &Key)
bool isHandleInSync() const
Returns true if the DebugEpochBase this Handle is linked to has not called incrementEpoch on itself s...
bool erase(const KeyT &Val)
DenseMap(DenseMap &&other)
SmallDenseMap(const SmallDenseMap &other)
static unsigned getHashValue(const LookupKeyT &Val)
bool operator!=(const ConstIterator &RHS) const
void grow(unsigned AtLeast)
constexpr bool isPowerOf2_64(uint64_t Value)
isPowerOf2_64 - This function returns true if the argument is a power of two 0 (64 bit edition...
DenseMap(const DenseMap &other)
static GCMetadataPrinterRegistry::Add< ErlangGCPrinter > X("erlang","erlang-compatible garbage collector")
static const unsigned End
void reserve(size_type NumEntries)
Grow the densemap so that it can contain at least NumEntries items before resizing again...
std::conditional< IsConst, const Bucket, Bucket >::type value_type
uint64_t NextPowerOf2(uint64_t A)
NextPowerOf2 - Returns the next power of two (in 64-bits) that is strictly greater than A...
DenseMapIterator< KeyT, ValueT, KeyInfoT, BucketT, true > const_iterator
void insert(InputIt I, InputIt E)
insert - Range insertion of pairs.
SmallDenseMap & operator=(const SmallDenseMap &other)
isPodLike - This is a type trait that is used to determine whether a given type can be copied around ...
DenseMapIterator operator++(int)
void swap(SmallDenseMap &RHS)
DenseMap & operator=(const DenseMap &other)
DenseMapIterator< KeyT, ValueT, KeyInfoT, BucketT > iterator
size_type count(const KeyT &Val) const
Return 1 if the specified key is in the map, 0 otherwise.
void swap(llvm::BitVector &LHS, llvm::BitVector &RHS)
Implement std::swap in terms of BitVector swap.
SmallDenseMap & operator=(SmallDenseMap &&other)
void copyFrom(const SmallDenseMap &other)
bool isPointerIntoBucketsArray(const void *Ptr) const
isPointerIntoBucketsArray - Return true if the specified pointer points somewhere into the DenseMap's...
DenseMapIterator & operator++()
ValueT & operator[](KeyT &&Key)
const_iterator find(const KeyT &Val) const
iterator find(const KeyT &Val)
std::pair< iterator, bool > insert(std::pair< KeyT, ValueT > &&KV)
LLVM_NODISCARD bool empty() const
#define LLVM_NODISCARD
LLVM_NODISCARD - Warn if a type or return value is discarded.
iterator find_as(const LookupKeyT &Val)
Alternate version of find() which allows a different, and possibly less expensive, key type.
const ValueT & getSecond() const
value_type & FindAndConstruct(KeyT &&Key)
SmallDenseMap(const InputIt &I, const InputIt &E)
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
DenseMap(const InputIt &I, const InputIt &E)
DenseMapIterator(const DenseMapIterator< KeyT, ValueT, KeyInfoT, Bucket, IsConstSrc > &I)
bool operator==(const ConstIterator &RHS) const
DenseMap & operator=(DenseMap &&other)
reference operator*() const
DenseMapIterator(pointer Pos, pointer E, const DebugEpochBase &Epoch, bool NoAdvance=false)
std::pair< iterator, bool > try_emplace(const KeyT &Key, Ts &&...Args)
std::pair< iterator, bool > insert_as(std::pair< KeyT, ValueT > &&KV, const LookupKeyT &Val)
Alternate version of insert() which allows a different, and possibly less expensive, key type.
ptrdiff_t difference_type
static unsigned getHashValue(const KeyT &Val)
pointer operator->() const