1 //===--- Allocator.cpp - Simple memory allocation abstraction -------------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file implements the BumpPtrAllocator interface. 11 // 12 //===----------------------------------------------------------------------===// 13 14 #include "llvm/Support/Allocator.h" 15 #include "llvm/Support/Compiler.h" 16 #include "llvm/Support/DataTypes.h" 17 #include "llvm/Support/Memory.h" 18 #include "llvm/Support/Recycler.h" 19 #include "llvm/Support/raw_ostream.h" 20 #include <cstring> 21 22 namespace llvm { 23 24 BumpPtrAllocator::BumpPtrAllocator(size_t size, size_t threshold, 25 SlabAllocator &allocator) 26 : SlabSize(size), SizeThreshold(std::min(size, threshold)), 27 Allocator(allocator), CurSlab(0), BytesAllocated(0), NumSlabs(0) {} 28 29 BumpPtrAllocator::BumpPtrAllocator(size_t size, size_t threshold) 30 : SlabSize(size), SizeThreshold(std::min(size, threshold)), 31 Allocator(DefaultSlabAllocator), CurSlab(0), BytesAllocated(0), 32 NumSlabs(0) {} 33 34 BumpPtrAllocator::~BumpPtrAllocator() { 35 DeallocateSlabs(CurSlab); 36 } 37 38 /// AlignPtr - Align Ptr to Alignment bytes, rounding up. Alignment should 39 /// be a power of two. This method rounds up, so AlignPtr(7, 4) == 8 and 40 /// AlignPtr(8, 4) == 8. 41 char *BumpPtrAllocator::AlignPtr(char *Ptr, size_t Alignment) { 42 assert(Alignment && (Alignment & (Alignment - 1)) == 0 && 43 "Alignment is not a power of two!"); 44 45 // Do the alignment. 46 return (char*)(((uintptr_t)Ptr + Alignment - 1) & 47 ~(uintptr_t)(Alignment - 1)); 48 } 49 50 /// StartNewSlab - Allocate a new slab and move the bump pointers over into 51 /// the new slab. Modifies CurPtr and End. 52 void BumpPtrAllocator::StartNewSlab() { 53 ++NumSlabs; 54 // Scale the actual allocated slab size based on the number of slabs 55 // allocated. Every 128 slabs allocated, we double the allocated size to 56 // reduce allocation frequency, but saturate at multiplying the slab size by 57 // 2^30. 58 // FIXME: Currently, this count includes special slabs for objects above the 59 // size threshold. That will be fixed in a subsequent commit to make the 60 // growth even more predictable. 61 size_t AllocatedSlabSize = 62 SlabSize * (1 << std::min<size_t>(30, NumSlabs / 128)); 63 64 MemSlab *NewSlab = Allocator.Allocate(AllocatedSlabSize); 65 NewSlab->NextPtr = CurSlab; 66 CurSlab = NewSlab; 67 CurPtr = (char*)(CurSlab + 1); 68 End = ((char*)CurSlab) + CurSlab->Size; 69 } 70 71 /// DeallocateSlabs - Deallocate all memory slabs after and including this 72 /// one. 73 void BumpPtrAllocator::DeallocateSlabs(MemSlab *Slab) { 74 while (Slab) { 75 MemSlab *NextSlab = Slab->NextPtr; 76 #ifndef NDEBUG 77 // Poison the memory so stale pointers crash sooner. Note we must 78 // preserve the Size and NextPtr fields at the beginning. 79 sys::Memory::setRangeWritable(Slab + 1, Slab->Size - sizeof(MemSlab)); 80 memset(Slab + 1, 0xCD, Slab->Size - sizeof(MemSlab)); 81 #endif 82 Allocator.Deallocate(Slab); 83 Slab = NextSlab; 84 --NumSlabs; 85 } 86 } 87 88 /// Reset - Deallocate all but the current slab and reset the current pointer 89 /// to the beginning of it, freeing all memory allocated so far. 90 void BumpPtrAllocator::Reset() { 91 if (!CurSlab) 92 return; 93 DeallocateSlabs(CurSlab->NextPtr); 94 CurSlab->NextPtr = 0; 95 CurPtr = (char*)(CurSlab + 1); 96 End = ((char*)CurSlab) + CurSlab->Size; 97 BytesAllocated = 0; 98 } 99 100 /// Allocate - Allocate space at the specified alignment. 101 /// 102 void *BumpPtrAllocator::Allocate(size_t Size, size_t Alignment) { 103 if (!CurSlab) // Start a new slab if we haven't allocated one already. 104 StartNewSlab(); 105 106 // Keep track of how many bytes we've allocated. 107 BytesAllocated += Size; 108 109 // 0-byte alignment means 1-byte alignment. 110 if (Alignment == 0) Alignment = 1; 111 112 // Allocate the aligned space, going forwards from CurPtr. 113 char *Ptr = AlignPtr(CurPtr, Alignment); 114 115 // Check if we can hold it. 116 if (Ptr + Size <= End) { 117 CurPtr = Ptr + Size; 118 // Update the allocation point of this memory block in MemorySanitizer. 119 // Without this, MemorySanitizer messages for values originated from here 120 // will point to the allocation of the entire slab. 121 __msan_allocated_memory(Ptr, Size); 122 return Ptr; 123 } 124 125 // If Size is really big, allocate a separate slab for it. 126 size_t PaddedSize = Size + sizeof(MemSlab) + Alignment - 1; 127 if (PaddedSize > SizeThreshold) { 128 ++NumSlabs; 129 MemSlab *NewSlab = Allocator.Allocate(PaddedSize); 130 131 // Put the new slab after the current slab, since we are not allocating 132 // into it. 133 NewSlab->NextPtr = CurSlab->NextPtr; 134 CurSlab->NextPtr = NewSlab; 135 136 Ptr = AlignPtr((char*)(NewSlab + 1), Alignment); 137 assert((uintptr_t)Ptr + Size <= (uintptr_t)NewSlab + NewSlab->Size); 138 __msan_allocated_memory(Ptr, Size); 139 return Ptr; 140 } 141 142 // Otherwise, start a new slab and try again. 143 StartNewSlab(); 144 Ptr = AlignPtr(CurPtr, Alignment); 145 CurPtr = Ptr + Size; 146 assert(CurPtr <= End && "Unable to allocate memory!"); 147 __msan_allocated_memory(Ptr, Size); 148 return Ptr; 149 } 150 151 size_t BumpPtrAllocator::getTotalMemory() const { 152 size_t TotalMemory = 0; 153 for (MemSlab *Slab = CurSlab; Slab != 0; Slab = Slab->NextPtr) { 154 TotalMemory += Slab->Size; 155 } 156 return TotalMemory; 157 } 158 159 void BumpPtrAllocator::PrintStats() const { 160 unsigned NumSlabs = 0; 161 size_t TotalMemory = 0; 162 for (MemSlab *Slab = CurSlab; Slab != 0; Slab = Slab->NextPtr) { 163 TotalMemory += Slab->Size; 164 ++NumSlabs; 165 } 166 167 errs() << "\nNumber of memory regions: " << NumSlabs << '\n' 168 << "Bytes used: " << BytesAllocated << '\n' 169 << "Bytes allocated: " << TotalMemory << '\n' 170 << "Bytes wasted: " << (TotalMemory - BytesAllocated) 171 << " (includes alignment, etc)\n"; 172 } 173 174 SlabAllocator::~SlabAllocator() { } 175 176 MallocSlabAllocator::~MallocSlabAllocator() { } 177 178 MemSlab *MallocSlabAllocator::Allocate(size_t Size) { 179 MemSlab *Slab = (MemSlab*)Allocator.Allocate(Size, 0); 180 Slab->Size = Size; 181 Slab->NextPtr = 0; 182 return Slab; 183 } 184 185 void MallocSlabAllocator::Deallocate(MemSlab *Slab) { 186 Allocator.Deallocate(Slab); 187 } 188 189 void PrintRecyclerStats(size_t Size, 190 size_t Align, 191 size_t FreeListSize) { 192 errs() << "Recycler element size: " << Size << '\n' 193 << "Recycler element alignment: " << Align << '\n' 194 << "Number of elements free for recycling: " << FreeListSize << '\n'; 195 } 196 197 } 198