17 #ifndef LLVM_SUPPORT_ALLOCATOR_H
18 #define LLVM_SUPPORT_ALLOCATOR_H
61 template <
typename AllocatorT = MallocAllocator,
size_t SlabSize = 4096,
62 size_t SizeThreshold = SlabSize,
size_t GrowthDelay = 128>
64 :
public AllocatorBase<BumpPtrAllocatorImpl<AllocatorT, SlabSize,
65 SizeThreshold, GrowthDelay>>,
70 static_assert(SizeThreshold <= SlabSize,
71 "The SizeThreshold must be at most the SlabSize to ensure "
72 "that objects larger than a slab go into their own memory "
74 static_assert(GrowthDelay > 0,
75 "GrowthDelay must be at least 1 which already increases the"
76 "slab size after each allocated slab.");
88 End(Old.End), Slabs(
std::
move(Old.Slabs)),
89 CustomSizedSlabs(
std::
move(Old.CustomSizedSlabs)),
90 BytesAllocated(Old.BytesAllocated), RedZoneSize(Old.RedZoneSize) {
91 Old.CurPtr = Old.End =
nullptr;
92 Old.BytesAllocated = 0;
94 Old.CustomSizedSlabs.clear();
98 DeallocateSlabs(Slabs.begin(), Slabs.end());
99 DeallocateCustomSizedSlabs();
103 DeallocateSlabs(Slabs.begin(), Slabs.end());
104 DeallocateCustomSizedSlabs();
108 BytesAllocated =
RHS.BytesAllocated;
109 RedZoneSize =
RHS.RedZoneSize;
114 RHS.CurPtr =
RHS.End =
nullptr;
115 RHS.BytesAllocated = 0;
117 RHS.CustomSizedSlabs.clear();
125 DeallocateCustomSizedSlabs();
126 CustomSizedSlabs.
clear();
133 CurPtr = (
char *)Slabs.front();
134 End = CurPtr + SlabSize;
137 DeallocateSlabs(std::next(Slabs.begin()), Slabs.end());
138 Slabs.
erase(std::next(Slabs.begin()), Slabs.end());
150 BytesAllocated += Size;
153 assert(Adjustment + Size >= Size &&
"Adjustment + Size must not overflow");
155 size_t SizeToAllocate = Size;
156 #if LLVM_ADDRESS_SANITIZER_BUILD
158 SizeToAllocate += RedZoneSize;
162 if (Adjustment + SizeToAllocate <=
size_t(End - CurPtr)
164 && CurPtr !=
nullptr) {
165 char *AlignedPtr = CurPtr + Adjustment;
166 CurPtr = AlignedPtr + SizeToAllocate;
177 size_t PaddedSize = SizeToAllocate + Alignment.
value() - 1;
178 if (PaddedSize > SizeThreshold) {
184 CustomSizedSlabs.push_back(std::make_pair(NewSlab, PaddedSize));
186 uintptr_t AlignedAddr =
alignAddr(NewSlab, Alignment);
187 assert(AlignedAddr + Size <= (uintptr_t)NewSlab + PaddedSize);
188 char *AlignedPtr = (
char*)AlignedAddr;
196 uintptr_t AlignedAddr =
alignAddr(CurPtr, Alignment);
197 assert(AlignedAddr + SizeToAllocate <= (uintptr_t)End &&
198 "Unable to allocate memory!");
199 char *AlignedPtr = (
char*)AlignedAddr;
200 CurPtr = AlignedPtr + SizeToAllocate;
208 assert(Alignment > 0 &&
"0-byte alignment is not allowed. Use 1 instead.");
225 size_t GetNumSlabs()
const {
return Slabs.size() + CustomSizedSlabs.size(); }
233 const char *
P =
static_cast<const char *
>(
Ptr);
234 int64_t InSlabIdx = 0;
235 for (
size_t Idx = 0,
E = Slabs.size(); Idx <
E; Idx++) {
236 const char *
S =
static_cast<const char *
>(Slabs[Idx]);
237 if (
P >=
S &&
P <
S + computeSlabSize(Idx))
238 return InSlabIdx +
static_cast<int64_t
>(
P -
S);
239 InSlabIdx +=
static_cast<int64_t
>(computeSlabSize(Idx));
243 int64_t InCustomSizedSlabIdx = -1;
244 for (
size_t Idx = 0,
E = CustomSizedSlabs.size(); Idx <
E; Idx++) {
245 const char *
S =
static_cast<const char *
>(CustomSizedSlabs[Idx].first);
246 size_t Size = CustomSizedSlabs[Idx].second;
247 if (
P >=
S &&
P <
S + Size)
248 return InCustomSizedSlabIdx -
static_cast<int64_t
>(
P -
S);
249 InCustomSizedSlabIdx -=
static_cast<int64_t
>(Size);
260 assert(Out &&
"Wrong allocator used");
274 template <
typename T>
277 assert(Out %
alignof(
T) == 0 &&
"Wrong alignment information");
278 return Out /
alignof(
T);
282 size_t TotalMemory = 0;
283 for (
auto I = Slabs.begin(),
E = Slabs.end();
I !=
E; ++
I)
284 TotalMemory += computeSlabSize(std::distance(Slabs.begin(),
I));
285 for (
const auto &PtrAndSize : CustomSizedSlabs)
286 TotalMemory += PtrAndSize.second;
293 RedZoneSize = NewSize;
305 char *CurPtr =
nullptr;
319 size_t BytesAllocated = 0;
323 size_t RedZoneSize = 1;
325 static size_t computeSlabSize(
unsigned SlabIdx) {
331 ((
size_t)1 << std::min<size_t>(30, SlabIdx / GrowthDelay));
336 void StartNewSlab() {
337 size_t AllocatedSlabSize = computeSlabSize(Slabs.size());
340 alignof(std::max_align_t));
345 Slabs.push_back(NewSlab);
346 CurPtr = (
char *)(NewSlab);
347 End = ((
char *)NewSlab) + AllocatedSlabSize;
353 for (;
I !=
E; ++
I) {
354 size_t AllocatedSlabSize =
355 computeSlabSize(std::distance(Slabs.begin(),
I));
357 alignof(std::max_align_t));
362 void DeallocateCustomSizedSlabs() {
363 for (
auto &PtrAndSize : CustomSizedSlabs) {
364 void *
Ptr = PtrAndSize.first;
365 size_t Size = PtrAndSize.second;
404 auto DestroyElements = [](
char *Begin,
char *End) {
406 for (
char *
Ptr = Begin;
Ptr +
sizeof(
T) <= End;
Ptr +=
sizeof(
T))
407 reinterpret_cast<T *
>(
Ptr)->~
T();
412 size_t AllocatedSlabSize = BumpPtrAllocator::computeSlabSize(
414 char *Begin = (
char *)
alignAddr(*
I, Align::Of<T>());
416 : (
char *)*
I + AllocatedSlabSize;
418 DestroyElements(Begin, End);
421 for (
auto &PtrAndSize :
Allocator.CustomSizedSlabs) {
422 void *
Ptr = PtrAndSize.first;
423 size_t Size = PtrAndSize.second;
437 template <
typename AllocatorT,
size_t SlabSize,
size_t SizeThreshold,
444 alignof(std::max_align_t)));
447 template <
typename AllocatorT,
size_t SlabSize,
size_t SizeThreshold,
449 void operator delete(
void *,
451 SizeThreshold, GrowthDelay> &) {
454 #endif // LLVM_SUPPORT_ALLOCATOR_H