21 #ifndef LLVM_SUPPORT_ALLOCATOR_H
22 #define LLVM_SUPPORT_ALLOCATOR_H
26 #include "llvm/Support/DataTypes.h"
46 void *
Allocate(
size_t Size,
size_t Alignment) {
48 static_assert(
static_cast<void *(
AllocatorBase::*)(
size_t,
size_t)
>(
50 static_cast<void *(DerivedT::*)(
size_t,
size_t)
>(
52 "Class derives from AllocatorBase without implementing the "
53 "core Allocate(size_t, size_t) overload!");
55 return static_cast<DerivedT *
>(
this)->
Allocate(Size, Alignment);
62 static_assert(
static_cast<void (
AllocatorBase::*)(
const void *,
size_t)
>(
64 static_cast<void (DerivedT::*)(
const void *,
size_t)
>(
65 &DerivedT::Deallocate),
66 "Class derives from AllocatorBase without implementing the "
67 "core Deallocate(void *) overload!");
69 return static_cast<DerivedT *
>(
this)->
Deallocate(Ptr, Size);
82 typename std::enable_if<
83 !std::is_same<typename std::remove_cv<T>::type,
void>::value,
void>::type
85 Deallocate(static_cast<const void *>(Ptr), Num *
sizeof(
T));
102 free(const_cast<void *>(Ptr));
133 template <
typename AllocatorT = MallocAllocator,
size_t SlabSize = 4096,
134 size_t SizeThreshold = SlabSize>
137 BumpPtrAllocatorImpl<AllocatorT, SlabSize, SizeThreshold>> {
139 static_assert(SizeThreshold <= SlabSize,
140 "The SizeThreshold must be at most the SlabSize to ensure "
141 "that objects larger than a slab go into their own memory "
145 : CurPtr(nullptr), End(nullptr), BytesAllocated(0), Allocator() {}
146 template <
typename T>
148 : CurPtr(nullptr), End(nullptr), BytesAllocated(0),
149 Allocator(std::forward<
T &&>(Allocator)) {}
154 : CurPtr(Old.CurPtr), End(Old.End), Slabs(std::move(Old.Slabs)),
155 CustomSizedSlabs(std::move(Old.CustomSizedSlabs)),
156 BytesAllocated(Old.BytesAllocated),
157 Allocator(std::move(Old.Allocator)) {
158 Old.CurPtr = Old.End =
nullptr;
159 Old.BytesAllocated = 0;
161 Old.CustomSizedSlabs.clear();
165 DeallocateSlabs(Slabs.
begin(), Slabs.
end());
166 DeallocateCustomSizedSlabs();
170 DeallocateSlabs(Slabs.
begin(), Slabs.
end());
171 DeallocateCustomSizedSlabs();
175 BytesAllocated = RHS.BytesAllocated;
176 Slabs = std::move(RHS.Slabs);
177 CustomSizedSlabs = std::move(RHS.CustomSizedSlabs);
178 Allocator = std::move(RHS.Allocator);
180 RHS.CurPtr = RHS.End =
nullptr;
181 RHS.BytesAllocated = 0;
183 RHS.CustomSizedSlabs.clear();
190 DeallocateCustomSizedSlabs();
191 CustomSizedSlabs.
clear();
198 CurPtr = (
char *)Slabs.
front();
199 End = CurPtr + SlabSize;
202 DeallocateSlabs(std::next(Slabs.
begin()), Slabs.
end());
209 assert(Alignment > 0 &&
"0-byte alignnment is not allowed. Use 1 instead.");
212 BytesAllocated += Size;
215 assert(Adjustment + Size >= Size &&
"Adjustment + Size must not overflow");
218 if (Adjustment + Size <=
size_t(End - CurPtr)) {
219 char *AlignedPtr = CurPtr + Adjustment;
220 CurPtr = AlignedPtr + Size;
229 size_t PaddedSize = Size + Alignment - 1;
230 if (PaddedSize > SizeThreshold) {
231 void *NewSlab = Allocator.Allocate(PaddedSize, 0);
232 CustomSizedSlabs.
push_back(std::make_pair(NewSlab, PaddedSize));
234 uintptr_t AlignedAddr =
alignAddr(NewSlab, Alignment);
235 assert(AlignedAddr + Size <= (uintptr_t)NewSlab + PaddedSize);
236 char *AlignedPtr = (
char*)AlignedAddr;
243 uintptr_t AlignedAddr =
alignAddr(CurPtr, Alignment);
244 assert(AlignedAddr + Size <= (uintptr_t)End &&
245 "Unable to allocate memory!");
246 char *AlignedPtr = (
char*)AlignedAddr;
247 CurPtr = AlignedPtr + Size;
263 size_t TotalMemory = 0;
264 for (
auto I = Slabs.
begin(), E = Slabs.
end();
I != E; ++
I)
265 TotalMemory += computeSlabSize(std::distance(Slabs.
begin(),
I));
266 for (
auto &PtrAndSize : CustomSizedSlabs)
267 TotalMemory += PtrAndSize.second;
294 size_t BytesAllocated;
297 AllocatorT Allocator;
299 static size_t computeSlabSize(
unsigned SlabIdx) {
304 return SlabSize * ((size_t)1 << std::min<size_t>(30, SlabIdx / 128));
309 void StartNewSlab() {
310 size_t AllocatedSlabSize = computeSlabSize(Slabs.
size());
312 void *NewSlab = Allocator.Allocate(AllocatedSlabSize, 0);
314 CurPtr = (
char *)(NewSlab);
315 End = ((
char *)NewSlab) + AllocatedSlabSize;
321 for (; I != E; ++
I) {
322 size_t AllocatedSlabSize =
323 computeSlabSize(std::distance(Slabs.
begin(),
I));
324 Allocator.Deallocate(*I, AllocatedSlabSize);
329 void DeallocateCustomSizedSlabs() {
330 for (
auto &PtrAndSize : CustomSizedSlabs) {
331 void *Ptr = PtrAndSize.first;
332 size_t Size = PtrAndSize.second;
333 Allocator.Deallocate(Ptr, Size);
355 : Allocator(std::move(Old.Allocator)) {}
359 Allocator = std::move(RHS.Allocator);
367 auto DestroyElements = [](
char *Begin,
char *End) {
368 assert(Begin == (
char*)
alignAddr(Begin, alignOf<T>()));
369 for (
char *Ptr = Begin; Ptr +
sizeof(
T) <= End; Ptr +=
sizeof(
T))
370 reinterpret_cast<T *
>(Ptr)->~
T();
373 for (
auto I = Allocator.Slabs.
begin(), E = Allocator.Slabs.
end(); I != E;
375 size_t AllocatedSlabSize = BumpPtrAllocator::computeSlabSize(
376 std::distance(Allocator.Slabs.
begin(),
I));
377 char *Begin = (
char*)
alignAddr(*I, alignOf<T>());
378 char *End = *I == Allocator.Slabs.
back() ? Allocator.CurPtr
379 : (
char *)*I + AllocatedSlabSize;
381 DestroyElements(Begin, End);
384 for (
auto &PtrAndSize : Allocator.CustomSizedSlabs) {
385 void *Ptr = PtrAndSize.first;
386 size_t Size = PtrAndSize.second;
387 DestroyElements((
char*)
alignAddr(Ptr, alignOf<T>()), (
char *)Ptr + Size);
399 template <
typename AllocatorT,
size_t SlabSize,
size_t SizeThreshold>
400 void *
operator new(
size_t Size,
402 SizeThreshold> &Allocator) {
412 return Allocator.Allocate(
416 template <
typename AllocatorT,
size_t SlabSize,
size_t SizeThreshold>
417 void operator delete(
421 #endif // LLVM_SUPPORT_ALLOCATOR_H
SuperClass::iterator iterator
void push_back(const T &Elt)
#define __msan_allocated_memory(p, size)
AlignOf - A templated class that contains an enum value representing the alignment of the template ar...
void Deallocate(const void *Ptr, size_t Size)
Deallocate Ptr to Size bytes of memory allocated by this allocator.
size_t getTotalMemory() const
void printBumpPtrAllocatorStats(unsigned NumSlabs, size_t BytesAllocated, size_t TotalMemory)
void * Allocate(size_t Size, size_t Alignment)
Allocate Size bytes of Alignment aligned memory.
void Reset()
Deallocate all but the current slab and reset the current pointer to the beginning of it...
BumpPtrAllocatorImpl & operator=(BumpPtrAllocatorImpl &&RHS)
Number of individual test Apply this number of consecutive mutations to each input exit after the first new interesting input is found the minimized corpus is saved into the first input directory Number of jobs to run If min(jobs, NumberOfCpuCores()/2)\" is used.") FUZZER_FLAG_INT(reload
size_t GetNumSlabs() const
bool LLVM_ATTRIBUTE_UNUSED_RESULT empty() const
void DestroyAll()
Call the destructor of each allocated object and deallocate all but the current slab and reset the cu...
BumpPtrAllocatorImpl BumpPtrAllocator
The standard BumpPtrAllocator which just uses the default template paramaters.
Allocate memory in an ever growing pool, as if by bump-pointer.
size_t alignmentAdjustment(const void *Ptr, size_t Alignment)
Returns the necessary adjustment for aligning Ptr to Alignment bytes, rounding up.
LLVM_ATTRIBUTE_RETURNS_NONNULL LLVM_ATTRIBUTE_RETURNS_NOALIAS void * Allocate(size_t Size, size_t Alignment)
Allocate space at the specified alignment.
void Deallocate(const void *Ptr, size_t)
T * Allocate(size_t num=1)
Allocate space for an array of objects without constructing them.
#define LLVM_ATTRIBUTE_RETURNS_NONNULL
~SpecificBumpPtrAllocator()
uint64_t NextPowerOf2(uint64_t A)
NextPowerOf2 - Returns the next power of two (in 64-bits) that is strictly greater than A...
iterator erase(iterator I)
void Deallocate(const void *, size_t)
SpecificBumpPtrAllocator & operator=(SpecificBumpPtrAllocator &&RHS)
SpecificBumpPtrAllocator(SpecificBumpPtrAllocator &&Old)
SpecificBumpPtrAllocator()
A BumpPtrAllocator that allows only elements of a specific type to be allocated.
#define LLVM_ATTRIBUTE_RETURNS_NOALIAS
LLVM_ATTRIBUTE_RETURNS_NOALIAS Used to mark a function as returning a pointer that does not alias any...
uintptr_t alignAddr(const void *Addr, size_t Alignment)
Aligns Addr to Alignment bytes, rounding up.
LLVM_ATTRIBUTE_RETURNS_NONNULL void * Allocate(size_t Size, size_t)
T * Allocate(size_t Num=1)
Allocate space for a sequence of objects without constructing them.
std::enable_if< !std::is_same< typename std::remove_cv< T >::type, void >::value, void >::type Deallocate(T *Ptr, size_t Num=1)
Deallocate space for a sequence of objects without constructing them.
BumpPtrAllocatorImpl(T &&Allocator)
BumpPtrAllocatorImpl(BumpPtrAllocatorImpl &&Old)
CRTP base class providing obvious overloads for the core Allocate() methods of LLVM-style allocators...