14#ifndef LLVM_ADT_DENSEMAP_H
15#define LLVM_ADT_DENSEMAP_H
29#include <initializer_list>
41template <
typename KeyT,
typename ValueT>
46 const KeyT &
getFirst()
const {
return std::pair<KeyT, ValueT>::first; }
54 typename KeyInfoT = DenseMapInfo<KeyT>,
57class DenseMapIterator;
59template <
typename DerivedT,
typename KeyT,
typename ValueT,
typename KeyInfoT,
80 if (shouldReverseIterate<KeyT>())
81 return makeIterator(getBucketsEnd() - 1, getBuckets(), *
this);
82 return makeIterator(getBuckets(), getBucketsEnd(), *
this);
85 return makeIterator(getBucketsEnd(), getBucketsEnd(), *
this,
true);
90 if (shouldReverseIterate<KeyT>())
91 return makeConstIterator(getBucketsEnd() - 1, getBuckets(), *
this);
92 return makeConstIterator(getBuckets(), getBucketsEnd(), *
this);
95 return makeConstIterator(getBucketsEnd(), getBucketsEnd(), *
this,
true);
98 [[nodiscard]]
bool empty()
const {
return getNumEntries() == 0; }
99 unsigned size()
const {
return getNumEntries(); }
106 if (NumBuckets > getNumBuckets())
112 if (getNumEntries() == 0 && getNumTombstones() == 0)
return;
116 if (getNumEntries() * 4 < getNumBuckets() && getNumBuckets() > 64) {
122 if (std::is_trivially_destructible<ValueT>::value) {
124 for (BucketT *
P = getBuckets(), *
E = getBucketsEnd();
P !=
E; ++
P)
125 P->getFirst() = EmptyKey;
128 unsigned NumEntries = getNumEntries();
129 for (BucketT *
P = getBuckets(), *
E = getBucketsEnd();
P !=
E; ++
P) {
130 if (!KeyInfoT::isEqual(
P->getFirst(), EmptyKey)) {
131 if (!KeyInfoT::isEqual(
P->getFirst(), TombstoneKey)) {
132 P->getSecond().~ValueT();
135 P->getFirst() = EmptyKey;
138 assert(NumEntries == 0 &&
"Node count imbalance!");
147 return doFind(Val) !=
nullptr;
156 if (BucketT *Bucket = doFind(Val))
158 Bucket, shouldReverseIterate<KeyT>() ? getBuckets() : getBucketsEnd(),
163 if (
const BucketT *Bucket = doFind(Val))
164 return makeConstIterator(
165 Bucket, shouldReverseIterate<KeyT>() ? getBuckets() : getBucketsEnd(),
175 template<
class LookupKeyT>
177 if (BucketT *Bucket = doFind(Val))
179 Bucket, shouldReverseIterate<KeyT>() ? getBuckets() : getBucketsEnd(),
183 template<
class LookupKeyT>
185 if (
const BucketT *Bucket = doFind(Val))
186 return makeConstIterator(
187 Bucket, shouldReverseIterate<KeyT>() ? getBuckets() : getBucketsEnd(),
195 if (
const BucketT *Bucket = doFind(Val))
196 return Bucket->getSecond();
202 const ValueT &
at(const_arg_type_t<KeyT> Val)
const {
203 auto Iter = this->
find(std::move(Val));
204 assert(Iter != this->
end() &&
"DenseMap::at failed due to a missing key");
211 std::pair<iterator, bool>
insert(
const std::pair<KeyT, ValueT> &KV) {
218 std::pair<iterator, bool>
insert(std::pair<KeyT, ValueT> &&KV) {
219 return try_emplace(std::move(KV.first), std::move(KV.second));
225 template <
typename... Ts>
228 if (LookupBucketFor(Key, TheBucket))
229 return std::make_pair(makeIterator(TheBucket,
230 shouldReverseIterate<KeyT>()
238 InsertIntoBucket(TheBucket, std::move(Key), std::forward<Ts>(Args)...);
239 return std::make_pair(makeIterator(TheBucket,
240 shouldReverseIterate<KeyT>()
250 template <
typename... Ts>
253 if (LookupBucketFor(Key, TheBucket))
254 return std::make_pair(makeIterator(TheBucket,
255 shouldReverseIterate<KeyT>()
262 TheBucket = InsertIntoBucket(TheBucket, Key, std::forward<Ts>(Args)...);
263 return std::make_pair(makeIterator(TheBucket,
264 shouldReverseIterate<KeyT>()
276 template <
typename LookupKeyT>
277 std::pair<iterator, bool>
insert_as(std::pair<KeyT, ValueT> &&KV,
278 const LookupKeyT &Val) {
280 if (LookupBucketFor(Val, TheBucket))
281 return std::make_pair(makeIterator(TheBucket,
282 shouldReverseIterate<KeyT>()
289 TheBucket = InsertIntoBucketWithLookup(TheBucket, std::move(KV.first),
290 std::move(KV.second), Val);
291 return std::make_pair(makeIterator(TheBucket,
292 shouldReverseIterate<KeyT>()
300 template<
typename InputIt>
306 template <
typename V>
310 Ret.first->second = std::forward<V>(Val);
314 template <
typename V>
316 auto Ret =
try_emplace(std::move(Key), std::forward<V>(Val));
318 Ret.first->second = std::forward<V>(Val);
337 BucketT *TheBucket = doFind(Val);
341 TheBucket->getSecond().~ValueT();
343 decrementNumEntries();
344 incrementNumTombstones();
348 BucketT *TheBucket = &*
I;
349 TheBucket->getSecond().~ValueT();
351 decrementNumEntries();
352 incrementNumTombstones();
357 if (LookupBucketFor(Key, TheBucket))
360 return *InsertIntoBucket(TheBucket, Key);
369 if (LookupBucketFor(Key, TheBucket))
372 return *InsertIntoBucket(TheBucket, std::move(Key));
383 return Ptr >= getBuckets() &&
Ptr < getBucketsEnd();
395 if (getNumBuckets() == 0)
399 for (BucketT *
P = getBuckets(), *
E = getBucketsEnd();
P !=
E; ++
P) {
400 if (!KeyInfoT::isEqual(
P->getFirst(), EmptyKey) &&
401 !KeyInfoT::isEqual(
P->getFirst(), TombstoneKey))
402 P->getSecond().~ValueT();
403 P->getFirst().~KeyT();
411 assert((getNumBuckets() & (getNumBuckets()-1)) == 0 &&
412 "# initial buckets must be a power of two!");
414 for (BucketT *
B = getBuckets(), *
E = getBucketsEnd();
B !=
E; ++
B)
415 ::new (&
B->getFirst())
KeyT(EmptyKey);
435 for (BucketT *
B = OldBucketsBegin, *
E = OldBucketsEnd;
B !=
E; ++
B) {
436 if (!KeyInfoT::isEqual(
B->getFirst(), EmptyKey) &&
437 !KeyInfoT::isEqual(
B->getFirst(), TombstoneKey)) {
440 bool FoundVal = LookupBucketFor(
B->getFirst(), DestBucket);
442 assert(!FoundVal &&
"Key already in new map?");
443 DestBucket->getFirst() = std::move(
B->getFirst());
444 ::new (&DestBucket->getSecond())
ValueT(std::move(
B->getSecond()));
445 incrementNumEntries();
448 B->getSecond().~ValueT();
450 B->getFirst().~KeyT();
454 template <
typename OtherBaseT>
458 assert(getNumBuckets() == other.getNumBuckets());
460 setNumEntries(other.getNumEntries());
461 setNumTombstones(other.getNumTombstones());
463 if (std::is_trivially_copyable<KeyT>::value &&
464 std::is_trivially_copyable<ValueT>::value)
465 memcpy(
reinterpret_cast<void *
>(getBuckets()), other.getBuckets(),
466 getNumBuckets() *
sizeof(BucketT));
468 for (
size_t i = 0; i < getNumBuckets(); ++i) {
469 ::new (&getBuckets()[i].getFirst())
470 KeyT(other.getBuckets()[i].getFirst());
471 if (!KeyInfoT::isEqual(getBuckets()[i].getFirst(),
getEmptyKey()) &&
473 ::new (&getBuckets()[i].getSecond())
474 ValueT(other.getBuckets()[i].getSecond());
479 return KeyInfoT::getHashValue(Val);
482 template<
typename LookupKeyT>
484 return KeyInfoT::getHashValue(Val);
488 static_assert(std::is_base_of<DenseMapBase, DerivedT>::value,
489 "Must pass the derived type to this template!");
490 return KeyInfoT::getEmptyKey();
494 return KeyInfoT::getTombstoneKey();
500 bool NoAdvance=
false) {
501 if (shouldReverseIterate<KeyT>()) {
502 BucketT *
B =
P == getBucketsEnd() ? getBuckets() :
P + 1;
509 const DebugEpochBase &Epoch,
510 const bool NoAdvance=
false)
const {
511 if (shouldReverseIterate<KeyT>()) {
512 const BucketT *
B =
P == getBucketsEnd() ? getBuckets() :
P + 1;
518 unsigned getNumEntries()
const {
519 return static_cast<const DerivedT *
>(
this)->getNumEntries();
522 void setNumEntries(
unsigned Num) {
523 static_cast<DerivedT *
>(
this)->setNumEntries(Num);
526 void incrementNumEntries() {
527 setNumEntries(getNumEntries() + 1);
530 void decrementNumEntries() {
531 setNumEntries(getNumEntries() - 1);
534 unsigned getNumTombstones()
const {
535 return static_cast<const DerivedT *
>(
this)->getNumTombstones();
538 void setNumTombstones(
unsigned Num) {
539 static_cast<DerivedT *
>(
this)->setNumTombstones(Num);
542 void incrementNumTombstones() {
543 setNumTombstones(getNumTombstones() + 1);
546 void decrementNumTombstones() {
547 setNumTombstones(getNumTombstones() - 1);
550 const BucketT *getBuckets()
const {
551 return static_cast<const DerivedT *
>(
this)->getBuckets();
554 BucketT *getBuckets() {
555 return static_cast<DerivedT *
>(
this)->getBuckets();
558 unsigned getNumBuckets()
const {
559 return static_cast<const DerivedT *
>(
this)->getNumBuckets();
562 BucketT *getBucketsEnd() {
563 return getBuckets() + getNumBuckets();
566 const BucketT *getBucketsEnd()
const {
567 return getBuckets() + getNumBuckets();
570 void grow(
unsigned AtLeast) {
571 static_cast<DerivedT *
>(
this)->grow(AtLeast);
574 void shrink_and_clear() {
575 static_cast<DerivedT *
>(
this)->shrink_and_clear();
578 template <
typename KeyArg,
typename... ValueArgs>
579 BucketT *InsertIntoBucket(BucketT *TheBucket, KeyArg &&Key,
580 ValueArgs &&... Values) {
581 TheBucket = InsertIntoBucketImpl(Key, Key, TheBucket);
583 TheBucket->getFirst() = std::forward<KeyArg>(Key);
584 ::new (&TheBucket->getSecond())
ValueT(
std::forward<ValueArgs>(Values)...);
588 template <typename LookupKeyT>
589 BucketT *InsertIntoBucketWithLookup(BucketT *TheBucket,
KeyT &&Key,
591 TheBucket = InsertIntoBucketImpl(Key,
Lookup, TheBucket);
593 TheBucket->getFirst() = std::move(Key);
598 template <typename LookupKeyT>
600 BucketT *TheBucket) {
612 unsigned NewNumEntries = getNumEntries() + 1;
613 unsigned NumBuckets = getNumBuckets();
615 this->grow(NumBuckets * 2);
616 LookupBucketFor(
Lookup, TheBucket);
617 NumBuckets = getNumBuckets();
618 }
else if (
LLVM_UNLIKELY(NumBuckets-(NewNumEntries+getNumTombstones()) <=
620 this->grow(NumBuckets);
621 LookupBucketFor(
Lookup, TheBucket);
627 incrementNumEntries();
631 if (!KeyInfoT::isEqual(TheBucket->getFirst(), EmptyKey))
632 decrementNumTombstones();
637 template <
typename LookupKeyT> BucketT *doFind(
const LookupKeyT &Val) {
638 BucketT *BucketsPtr = getBuckets();
639 const unsigned NumBuckets = getNumBuckets();
644 unsigned BucketNo =
getHashValue(Val) & (NumBuckets - 1);
645 unsigned ProbeAmt = 1;
647 BucketT *Bucket = BucketsPtr + BucketNo;
648 if (
LLVM_LIKELY(KeyInfoT::isEqual(Val, Bucket->getFirst())))
650 if (
LLVM_LIKELY(KeyInfoT::isEqual(Bucket->getFirst(), EmptyKey)))
655 BucketNo += ProbeAmt++;
656 BucketNo &= NumBuckets - 1;
660 template <
typename LookupKeyT>
661 const BucketT *doFind(
const LookupKeyT &Val)
const {
669 template<
typename LookupKeyT>
670 bool LookupBucketFor(
const LookupKeyT &Val,
671 const BucketT *&FoundBucket)
const {
672 const BucketT *BucketsPtr = getBuckets();
673 const unsigned NumBuckets = getNumBuckets();
675 if (NumBuckets == 0) {
676 FoundBucket =
nullptr;
681 const BucketT *FoundTombstone =
nullptr;
684 assert(!KeyInfoT::isEqual(Val, EmptyKey) &&
685 !KeyInfoT::isEqual(Val, TombstoneKey) &&
686 "Empty/Tombstone value shouldn't be inserted into map!");
689 unsigned ProbeAmt = 1;
691 const BucketT *ThisBucket = BucketsPtr + BucketNo;
693 if (
LLVM_LIKELY(KeyInfoT::isEqual(Val, ThisBucket->getFirst()))) {
694 FoundBucket = ThisBucket;
700 if (
LLVM_LIKELY(KeyInfoT::isEqual(ThisBucket->getFirst(), EmptyKey))) {
703 FoundBucket = FoundTombstone ? FoundTombstone : ThisBucket;
709 if (KeyInfoT::isEqual(ThisBucket->getFirst(), TombstoneKey) &&
711 FoundTombstone = ThisBucket;
715 BucketNo += ProbeAmt++;
716 BucketNo &= (NumBuckets-1);
720 template <
typename LookupKeyT>
721 bool LookupBucketFor(
const LookupKeyT &Val, BucketT *&FoundBucket) {
722 const BucketT *ConstFoundBucket;
724 ->LookupBucketFor(Val, ConstFoundBucket);
725 FoundBucket =
const_cast<BucketT *
>(ConstFoundBucket);
735 return getNumBuckets() *
sizeof(BucketT);
745template <
typename DerivedT,
typename KeyT,
typename ValueT,
typename KeyInfoT,
750 if (
LHS.size() !=
RHS.size())
753 for (
auto &KV :
LHS) {
754 auto I =
RHS.find(KV.first);
755 if (
I ==
RHS.end() ||
I->second != KV.second)
765template <
typename DerivedT,
typename KeyT,
typename ValueT,
typename KeyInfoT,
774 typename KeyInfoT = DenseMapInfo<KeyT>,
777 KeyT, ValueT, KeyInfoT, BucketT> {
786 unsigned NumTombstones;
792 explicit DenseMap(
unsigned InitialReserve = 0) { init(InitialReserve); }
804 template<
typename InputIt>
806 init(std::distance(
I,
E));
810 DenseMap(std::initializer_list<typename BaseT::value_type> Vals) {
812 this->insert(Vals.begin(), Vals.end());
821 this->incrementEpoch();
822 RHS.incrementEpoch();
846 if (allocateBuckets(other.NumBuckets)) {
847 this->BaseT::copyFrom(other);
854 void init(
unsigned InitNumEntries) {
855 auto InitBuckets = BaseT::getMinBucketToReserveForEntries(InitNumEntries);
856 if (allocateBuckets(InitBuckets)) {
857 this->BaseT::initEmpty();
865 unsigned OldNumBuckets = NumBuckets;
866 BucketT *OldBuckets = Buckets;
868 allocateBuckets(std::max<unsigned>(64,
static_cast<unsigned>(
NextPowerOf2(AtLeast-1))));
871 this->BaseT::initEmpty();
875 this->moveFromOldBuckets(OldBuckets, OldBuckets+OldNumBuckets);
883 unsigned OldNumBuckets = NumBuckets;
884 unsigned OldNumEntries = NumEntries;
888 unsigned NewNumBuckets = 0;
890 NewNumBuckets = std::max(64, 1 << (
Log2_32_Ceil(OldNumEntries) + 1));
891 if (NewNumBuckets == NumBuckets) {
892 this->BaseT::initEmpty();
902 unsigned getNumEntries()
const {
906 void setNumEntries(
unsigned Num) {
910 unsigned getNumTombstones()
const {
911 return NumTombstones;
914 void setNumTombstones(
unsigned Num) {
918 BucketT *getBuckets()
const {
922 unsigned getNumBuckets()
const {
926 bool allocateBuckets(
unsigned Num) {
928 if (NumBuckets == 0) {
933 Buckets =
static_cast<BucketT *
>(
939template <
typename KeyT,
typename ValueT,
unsigned InlineBuckets = 4,
940 typename KeyInfoT = DenseMapInfo<KeyT>,
944 SmallDenseMap<KeyT, ValueT, InlineBuckets, KeyInfoT, BucketT>, KeyT,
945 ValueT, KeyInfoT, BucketT> {
953 "InlineBuckets must be a power of 2.");
956 unsigned NumEntries : 31;
957 unsigned NumTombstones;
970 if (NumInitBuckets > InlineBuckets)
972 init(NumInitBuckets);
985 template<
typename InputIt>
1000 unsigned TmpNumEntries =
RHS.NumEntries;
1001 RHS.NumEntries = NumEntries;
1002 NumEntries = TmpNumEntries;
1005 const KeyT EmptyKey = this->getEmptyKey();
1006 const KeyT TombstoneKey = this->getTombstoneKey();
1007 if (Small &&
RHS.Small) {
1012 for (
unsigned i = 0, e = InlineBuckets; i != e; ++i) {
1013 BucketT *LHSB = &getInlineBuckets()[i],
1014 *RHSB = &
RHS.getInlineBuckets()[i];
1015 bool hasLHSValue = (!KeyInfoT::isEqual(LHSB->getFirst(), EmptyKey) &&
1016 !KeyInfoT::isEqual(LHSB->getFirst(), TombstoneKey));
1017 bool hasRHSValue = (!KeyInfoT::isEqual(RHSB->getFirst(), EmptyKey) &&
1018 !KeyInfoT::isEqual(RHSB->getFirst(), TombstoneKey));
1019 if (hasLHSValue && hasRHSValue) {
1025 std::swap(LHSB->getFirst(), RHSB->getFirst());
1027 ::new (&RHSB->getSecond())
ValueT(std::move(LHSB->getSecond()));
1028 LHSB->getSecond().~ValueT();
1029 }
else if (hasRHSValue) {
1030 ::new (&LHSB->getSecond())
ValueT(std::move(RHSB->getSecond()));
1031 RHSB->getSecond().~ValueT();
1036 if (!Small && !
RHS.Small) {
1037 std::swap(getLargeRep()->Buckets,
RHS.getLargeRep()->Buckets);
1038 std::swap(getLargeRep()->NumBuckets,
RHS.getLargeRep()->NumBuckets);
1046 LargeRep TmpRep = std::move(*LargeSide.getLargeRep());
1047 LargeSide.getLargeRep()->~LargeRep();
1048 LargeSide.Small =
true;
1053 for (
unsigned i = 0, e = InlineBuckets; i != e; ++i) {
1054 BucketT *NewB = &LargeSide.getInlineBuckets()[i],
1055 *OldB = &SmallSide.getInlineBuckets()[i];
1056 ::new (&NewB->getFirst())
KeyT(std::move(OldB->getFirst()));
1057 OldB->getFirst().~KeyT();
1058 if (!KeyInfoT::isEqual(NewB->getFirst(), EmptyKey) &&
1059 !KeyInfoT::isEqual(NewB->getFirst(), TombstoneKey)) {
1060 ::new (&NewB->getSecond())
ValueT(std::move(OldB->getSecond()));
1061 OldB->getSecond().~ValueT();
1067 SmallSide.Small =
false;
1068 new (SmallSide.getLargeRep()) LargeRep(std::move(TmpRep));
1079 deallocateBuckets();
1087 deallocateBuckets();
1089 if (other.getNumBuckets() > InlineBuckets) {
1091 new (getLargeRep()) LargeRep(allocateBuckets(other.getNumBuckets()));
1093 this->BaseT::copyFrom(other);
1098 if (InitBuckets > InlineBuckets) {
1100 new (getLargeRep()) LargeRep(allocateBuckets(InitBuckets));
1102 this->BaseT::initEmpty();
1106 if (AtLeast > InlineBuckets)
1107 AtLeast = std::max<unsigned>(64,
NextPowerOf2(AtLeast-1));
1112 BucketT *TmpBegin =
reinterpret_cast<BucketT *
>(&TmpStorage);
1113 BucketT *TmpEnd = TmpBegin;
1117 const KeyT EmptyKey = this->getEmptyKey();
1118 const KeyT TombstoneKey = this->getTombstoneKey();
1119 for (BucketT *
P = getBuckets(), *
E =
P + InlineBuckets;
P !=
E; ++
P) {
1120 if (!KeyInfoT::isEqual(
P->getFirst(), EmptyKey) &&
1121 !KeyInfoT::isEqual(
P->getFirst(), TombstoneKey)) {
1122 assert(
size_t(TmpEnd - TmpBegin) < InlineBuckets &&
1123 "Too many inline buckets!");
1124 ::new (&TmpEnd->getFirst())
KeyT(std::move(
P->getFirst()));
1125 ::new (&TmpEnd->getSecond())
ValueT(std::move(
P->getSecond()));
1127 P->getSecond().~ValueT();
1129 P->getFirst().~KeyT();
1135 if (AtLeast > InlineBuckets) {
1137 new (getLargeRep()) LargeRep(allocateBuckets(AtLeast));
1139 this->moveFromOldBuckets(TmpBegin, TmpEnd);
1143 LargeRep OldRep = std::move(*getLargeRep());
1144 getLargeRep()->~LargeRep();
1145 if (AtLeast <= InlineBuckets) {
1148 new (getLargeRep()) LargeRep(allocateBuckets(AtLeast));
1151 this->moveFromOldBuckets(OldRep.Buckets, OldRep.Buckets+OldRep.NumBuckets);
1159 unsigned OldSize = this->
size();
1163 unsigned NewNumBuckets = 0;
1166 if (NewNumBuckets > InlineBuckets && NewNumBuckets < 64u)
1169 if ((Small && NewNumBuckets <= InlineBuckets) ||
1170 (!Small && NewNumBuckets == getLargeRep()->NumBuckets)) {
1171 this->BaseT::initEmpty();
1175 deallocateBuckets();
1176 init(NewNumBuckets);
1180 unsigned getNumEntries()
const {
1184 void setNumEntries(
unsigned Num) {
1186 assert(Num < (1U << 31) &&
"Cannot support more than 1<<31 entries");
1190 unsigned getNumTombstones()
const {
1191 return NumTombstones;
1194 void setNumTombstones(
unsigned Num) {
1195 NumTombstones = Num;
1198 const BucketT *getInlineBuckets()
const {
1203 return reinterpret_cast<const BucketT *
>(&storage);
1206 BucketT *getInlineBuckets() {
1207 return const_cast<BucketT *
>(
1208 const_cast<const SmallDenseMap *
>(
this)->getInlineBuckets());
1211 const LargeRep *getLargeRep()
const {
1214 return reinterpret_cast<const LargeRep *
>(&storage);
1217 LargeRep *getLargeRep() {
1218 return const_cast<LargeRep *
>(
1219 const_cast<const SmallDenseMap *
>(
this)->getLargeRep());
1222 const BucketT *getBuckets()
const {
1223 return Small ? getInlineBuckets() : getLargeRep()->Buckets;
1226 BucketT *getBuckets() {
1227 return const_cast<BucketT *
>(
1228 const_cast<const SmallDenseMap *
>(
this)->getBuckets());
1231 unsigned getNumBuckets()
const {
1232 return Small ? InlineBuckets : getLargeRep()->NumBuckets;
1235 void deallocateBuckets() {
1240 sizeof(BucketT) * getLargeRep()->NumBuckets,
1242 getLargeRep()->~LargeRep();
1245 LargeRep allocateBuckets(
unsigned Num) {
1246 assert(Num > InlineBuckets &&
"Must allocate more buckets than are inline");
1248 sizeof(BucketT) * Num,
alignof(BucketT))),
1254template <
typename KeyT,
typename ValueT,
typename KeyInfoT,
typename Bucket,
1262 using value_type = std::conditional_t<IsConst, const Bucket, Bucket>;
1275 bool NoAdvance =
false)
1277 assert(isHandleInSync() &&
"invalid construction!");
1279 if (NoAdvance)
return;
1280 if (shouldReverseIterate<KeyT>()) {
1281 RetreatPastEmptyBuckets();
1284 AdvancePastEmptyBuckets();
1290 template <
bool IsConstSrc,
1291 typename = std::enable_if_t<!IsConstSrc && IsConst>>
1297 assert(isHandleInSync() &&
"invalid iterator access!");
1299 if (shouldReverseIterate<KeyT>())
1304 assert(isHandleInSync() &&
"invalid iterator access!");
1306 if (shouldReverseIterate<KeyT>())
1313 assert((!
LHS.Ptr ||
LHS.isHandleInSync()) &&
"handle not in sync!");
1314 assert((!
RHS.Ptr ||
RHS.isHandleInSync()) &&
"handle not in sync!");
1315 assert(
LHS.getEpochAddress() ==
RHS.getEpochAddress() &&
1316 "comparing incomparable iterators!");
1317 return LHS.Ptr ==
RHS.Ptr;
1326 assert(isHandleInSync() &&
"invalid iterator access!");
1328 if (shouldReverseIterate<KeyT>()) {
1330 RetreatPastEmptyBuckets();
1334 AdvancePastEmptyBuckets();
1338 assert(isHandleInSync() &&
"invalid iterator access!");
1343 void AdvancePastEmptyBuckets() {
1345 const KeyT Empty = KeyInfoT::getEmptyKey();
1346 const KeyT Tombstone = KeyInfoT::getTombstoneKey();
1348 while (
Ptr !=
End && (KeyInfoT::isEqual(
Ptr->getFirst(), Empty) ||
1349 KeyInfoT::isEqual(
Ptr->getFirst(), Tombstone)))
1353 void RetreatPastEmptyBuckets() {
1355 const KeyT Empty = KeyInfoT::getEmptyKey();
1356 const KeyT Tombstone = KeyInfoT::getTombstoneKey();
1358 while (
Ptr !=
End && (KeyInfoT::isEqual(
Ptr[-1].getFirst(), Empty) ||
1359 KeyInfoT::isEqual(
Ptr[-1].getFirst(), Tombstone)))
1364template <
typename KeyT,
typename ValueT,
typename KeyInfoT>
1366 return X.getMemorySize();
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
#define LLVM_UNLIKELY(EXPR)
#define LLVM_LIKELY(EXPR)
This file defines DenseMapInfo traits for DenseMap.
This file defines the DebugEpochBase and DebugEpochBase::HandleBase classes.
static GCMetadataPrinterRegistry::Add< ErlangGCPrinter > X("erlang", "erlang-compatible garbage collector")
This file defines counterparts of C library allocation functions defined in the namespace 'std'.
const MachineOperand & RHS
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
static int Lookup(ArrayRef< TableEntry > Table, unsigned Opcode)
ValueT & getOrInsertDefault(const KeyT &Key)
Returns the value associated to the key in the map if it exists.
ValueT lookup(const_arg_type_t< KeyT > Val) const
lookup - Return the entry for the specified key, or a default constructed value if no such entry exis...
iterator find(const_arg_type_t< KeyT > Val)
static unsigned getHashValue(const KeyT &Val)
static const KeyT getEmptyKey()
value_type & FindAndConstruct(KeyT &&Key)
std::pair< iterator, bool > insert(std::pair< KeyT, ValueT > &&KV)
std::pair< iterator, bool > try_emplace(KeyT &&Key, Ts &&... Args)
bool erase(const KeyT &Val)
DenseMapIterator< KeyT, ValueT, KeyInfoT, BucketT > iterator
std::pair< iterator, bool > insert_as(std::pair< KeyT, ValueT > &&KV, const LookupKeyT &Val)
Alternate version of insert() which allows a different, and possibly less expensive,...
const_iterator find_as(const LookupKeyT &Val) const
const_iterator end() const
std::pair< iterator, bool > try_emplace(const KeyT &Key, Ts &&... Args)
void moveFromOldBuckets(BucketT *OldBucketsBegin, BucketT *OldBucketsEnd)
iterator find_as(const LookupKeyT &Val)
Alternate version of find() which allows a different, and possibly less expensive,...
const_iterator find(const_arg_type_t< KeyT > Val) const
void insert(InputIt I, InputIt E)
insert - Range insertion of pairs.
size_type count(const_arg_type_t< KeyT > Val) const
Return 1 if the specified key is in the map, 0 otherwise.
static const KeyT getTombstoneKey()
const ValueT & at(const_arg_type_t< KeyT > Val) const
at - Return the entry for the specified key, or abort if no such entry exists.
bool isPointerIntoBucketsArray(const void *Ptr) const
isPointerIntoBucketsArray - Return true if the specified pointer points somewhere into the DenseMap's...
void copyFrom(const DenseMapBase< OtherBaseT, KeyT, ValueT, KeyInfoT, BucketT > &other)
bool contains(const_arg_type_t< KeyT > Val) const
Return true if the specified key is in the map, false otherwise.
value_type & FindAndConstruct(const KeyT &Key)
const_iterator begin() const
const void * getPointerIntoBucketsArray() const
getPointerIntoBucketsArray() - Return an opaque pointer into the buckets array.
std::pair< iterator, bool > insert_or_assign(KeyT &&Key, V &&Val)
unsigned getMinBucketToReserveForEntries(unsigned NumEntries)
Returns the number of buckets to allocate to ensure that the DenseMap can accommodate NumEntries with...
static unsigned getHashValue(const LookupKeyT &Val)
ValueT & operator[](const KeyT &Key)
DenseMapIterator< KeyT, ValueT, KeyInfoT, BucketT, true > const_iterator
ValueT & getOrInsertDefault(KeyT &&Key)
Returns the value associated to the key in the map if it exists.
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
std::pair< iterator, bool > insert_or_assign(const KeyT &Key, V &&Val)
void reserve(size_type NumEntries)
Grow the densemap so that it can contain at least NumEntries items before resizing again.
ValueT & operator[](KeyT &&Key)
size_t getMemorySize() const
Return the approximate size (in bytes) of the actual map.
std::conditional_t< IsConst, const Bucket, Bucket > value_type
friend bool operator!=(const DenseMapIterator &LHS, const DenseMapIterator &RHS)
DenseMapIterator & operator++()
pointer operator->() const
reference operator*() const
DenseMapIterator(pointer Pos, pointer E, const DebugEpochBase &Epoch, bool NoAdvance=false)
DenseMapIterator()=default
DenseMapIterator operator++(int)
DenseMapIterator(const DenseMapIterator< KeyT, ValueT, KeyInfoT, Bucket, IsConstSrc > &I)
friend bool operator==(const DenseMapIterator &LHS, const DenseMapIterator &RHS)
std::forward_iterator_tag iterator_category
DenseMap(std::initializer_list< typename BaseT::value_type > Vals)
void copyFrom(const DenseMap &other)
DenseMap & operator=(DenseMap &&other)
DenseMap(unsigned InitialReserve=0)
Create a DenseMap with an optional InitialReserve that guarantee that this number of elements can be ...
void grow(unsigned AtLeast)
void init(unsigned InitNumEntries)
DenseMap(const DenseMap &other)
DenseMap(const InputIt &I, const InputIt &E)
DenseMap(DenseMap &&other)
DenseMap & operator=(const DenseMap &other)
void grow(unsigned AtLeast)
SmallDenseMap(const InputIt &I, const InputIt &E)
void swap(SmallDenseMap &RHS)
void init(unsigned InitBuckets)
SmallDenseMap & operator=(SmallDenseMap &&other)
SmallDenseMap & operator=(const SmallDenseMap &other)
SmallDenseMap(unsigned NumInitBuckets=0)
SmallDenseMap(std::initializer_list< typename BaseT::value_type > Vals)
SmallDenseMap(SmallDenseMap &&other)
SmallDenseMap(const SmallDenseMap &other)
void copyFrom(const SmallDenseMap &other)
constexpr char IsConst[]
Key for Kernel::Arg::Metadata::mIsConst.
This is an optimization pass for GlobalISel generic memory operations.
unsigned Log2_32_Ceil(uint32_t Value)
Return the ceil log base 2 of the specified value, 32 if the value is zero.
auto size(R &&Range, std::enable_if_t< std::is_base_of< std::random_access_iterator_tag, typename std::iterator_traits< decltype(Range.begin())>::iterator_category >::value, void > *=nullptr)
Get the size of a range.
BitVector::size_type capacity_in_bytes(const BitVector &X)
bool operator!=(uint64_t V1, const APInt &V2)
constexpr bool isPowerOf2_64(uint64_t Value)
Return true if the argument is a power of two > 0 (64 bit edition.)
T bit_ceil(T Value)
Returns the smallest integral power of two no smaller than Value if Value is nonzero.
bool operator==(const AddressRangeValuePair &LHS, const AddressRangeValuePair &RHS)
LLVM_ATTRIBUTE_RETURNS_NONNULL LLVM_ATTRIBUTE_RETURNS_NOALIAS void * allocate_buffer(size_t Size, size_t Alignment)
Allocate a buffer of memory with the given size and alignment.
void deallocate_buffer(void *Ptr, size_t Size, size_t Alignment)
Deallocate a buffer of memory with the given size and alignment.
OutputIt move(R &&Range, OutputIt Out)
Provide wrappers to std::move which take ranges instead of having to pass begin/end explicitly.
constexpr uint64_t NextPowerOf2(uint64_t A)
Returns the next power of two (in 64-bits) that is strictly greater than A.
Implement std::hash so that hash_code can be used in STL containers.
void swap(llvm::BitVector &LHS, llvm::BitVector &RHS)
Implement std::swap in terms of BitVector swap.
A suitably aligned and sized character array member which can hold elements of any type.
const ValueT & getSecond() const
const KeyT & getFirst() const