xref: /llvm-project/libc/fuzzing/__support/freelist_heap_fuzz.cpp (revision 385961d7b23d5500d20954ad8137b27ecced2692)
1 //===-- freelist_heap_fuzz.cpp --------------------------------------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 ///
9 /// Fuzzing test for llvm-libc freelist-based heap implementation.
10 ///
11 //===----------------------------------------------------------------------===//
12 
13 #include "src/__support/CPP/bit.h"
14 #include "src/__support/CPP/optional.h"
15 #include "src/__support/freelist_heap.h"
16 #include "src/string/memory_utils/inline_memcpy.h"
17 #include "src/string/memory_utils/inline_memmove.h"
18 #include "src/string/memory_utils/inline_memset.h"
19 
20 using LIBC_NAMESPACE::FreeListHeap;
21 using LIBC_NAMESPACE::inline_memset;
22 using LIBC_NAMESPACE::cpp::nullopt;
23 using LIBC_NAMESPACE::cpp::optional;
24 
25 // Record of an outstanding allocation.
26 struct Alloc {
27   void *ptr;
28   size_t size;
29   size_t alignment;
30   uint8_t canary; // Byte written to the allocation
31 };
32 
33 // A simple vector that tracks allocations using the heap.
34 class AllocVec {
35 public:
36   AllocVec(FreeListHeap &heap) : heap(&heap), size_(0), capacity(0) {
37     allocs = nullptr;
38   }
39 
40   bool empty() const { return !size_; }
41 
42   size_t size() const { return size_; }
43 
44   bool push_back(Alloc alloc) {
45     if (size_ == capacity) {
46       size_t new_cap = capacity ? capacity * 2 : 1;
47       Alloc *new_allocs = reinterpret_cast<Alloc *>(
48           heap->realloc(allocs, new_cap * sizeof(Alloc)));
49       if (!new_allocs)
50         return false;
51       allocs = new_allocs;
52       capacity = new_cap;
53     }
54     allocs[size_++] = alloc;
55     return true;
56   }
57 
58   Alloc &operator[](size_t idx) { return allocs[idx]; }
59 
60   void erase_idx(size_t idx) {
61     LIBC_NAMESPACE::inline_memmove(&allocs[idx], &allocs[idx + 1],
62                                    sizeof(Alloc) * (size_ - idx - 1));
63     --size_;
64   }
65 
66 private:
67   FreeListHeap *heap;
68   Alloc *allocs;
69   size_t size_;
70   size_t capacity;
71 };
72 
73 // Choose a T value by casting libfuzzer data or exit.
74 template <typename T>
75 optional<T> choose(const uint8_t *&data, size_t &remainder) {
76   if (sizeof(T) > remainder)
77     return nullopt;
78   T out;
79   LIBC_NAMESPACE::inline_memcpy(&out, data, sizeof(T));
80   data += sizeof(T);
81   remainder -= sizeof(T);
82   return out;
83 }
84 
85 // The type of allocation to perform
86 enum class AllocType : uint8_t {
87   MALLOC,
88   ALIGNED_ALLOC,
89   REALLOC,
90   CALLOC,
91   NUM_ALLOC_TYPES,
92 };
93 
94 template <>
95 optional<AllocType> choose<AllocType>(const uint8_t *&data, size_t &remainder) {
96   auto raw = choose<uint8_t>(data, remainder);
97   if (!raw)
98     return nullopt;
99   return static_cast<AllocType>(
100       *raw % static_cast<uint8_t>(AllocType::NUM_ALLOC_TYPES));
101 }
102 
103 constexpr size_t heap_size = 64 * 1024;
104 
105 optional<size_t> choose_size(const uint8_t *&data, size_t &remainder) {
106   auto raw = choose<size_t>(data, remainder);
107   if (!raw)
108     return nullopt;
109   return *raw % heap_size;
110 }
111 
112 optional<size_t> choose_alloc_idx(const AllocVec &allocs, const uint8_t *&data,
113                                   size_t &remainder) {
114   if (allocs.empty())
115     return nullopt;
116   auto raw = choose<size_t>(data, remainder);
117   if (!raw)
118     return nullopt;
119   return *raw % allocs.size();
120 }
121 
122 #define ASSIGN_OR_RETURN(TYPE, NAME, EXPR)                                     \
123   auto maybe_##NAME = EXPR;                                                    \
124   if (!maybe_##NAME)                                                           \
125     return 0;                                                                  \
126   TYPE NAME = *maybe_##NAME
127 
128 extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t remainder) {
129   LIBC_NAMESPACE::FreeListHeapBuffer<heap_size> heap;
130   AllocVec allocs(heap);
131 
132   uint8_t canary = 0;
133   while (true) {
134     ASSIGN_OR_RETURN(auto, should_alloc, choose<bool>(data, remainder));
135     if (should_alloc) {
136       ASSIGN_OR_RETURN(auto, alloc_type, choose<AllocType>(data, remainder));
137       ASSIGN_OR_RETURN(size_t, alloc_size, choose_size(data, remainder));
138 
139       // Perform allocation.
140       void *ptr = nullptr;
141       size_t alignment = alignof(max_align_t);
142       switch (alloc_type) {
143       case AllocType::MALLOC:
144         ptr = heap.allocate(alloc_size);
145         break;
146       case AllocType::ALIGNED_ALLOC: {
147         ASSIGN_OR_RETURN(size_t, alignment, choose_size(data, remainder));
148         alignment = LIBC_NAMESPACE::cpp::bit_ceil(alignment);
149         ptr = heap.aligned_allocate(alignment, alloc_size);
150         break;
151       }
152       case AllocType::REALLOC: {
153         if (!alloc_size)
154           return 0;
155         ASSIGN_OR_RETURN(size_t, idx,
156                          choose_alloc_idx(allocs, data, remainder));
157         Alloc &alloc = allocs[idx];
158         ptr = heap.realloc(alloc.ptr, alloc_size);
159         if (ptr) {
160           // Extend the canary region if necessary.
161           if (alloc_size > alloc.size)
162             inline_memset(static_cast<char *>(ptr) + alloc.size, alloc.canary,
163                           alloc_size - alloc.size);
164           alloc.ptr = ptr;
165           alloc.size = alloc_size;
166           alloc.alignment = alignof(max_align_t);
167         }
168         break;
169       }
170       case AllocType::CALLOC: {
171         ASSIGN_OR_RETURN(size_t, count, choose_size(data, remainder));
172         size_t total;
173         if (__builtin_mul_overflow(count, alloc_size, &total))
174           return 0;
175         ptr = heap.calloc(count, alloc_size);
176         if (ptr)
177           for (size_t i = 0; i < total; ++i)
178             if (static_cast<char *>(ptr)[i] != 0)
179               __builtin_trap();
180         break;
181       }
182       case AllocType::NUM_ALLOC_TYPES:
183         __builtin_unreachable();
184       }
185 
186       if (ptr) {
187         // aligned_allocate should automatically apply a minimum alignment.
188         if (alignment < alignof(max_align_t))
189           alignment = alignof(max_align_t);
190         // Check alignment.
191         if (reinterpret_cast<uintptr_t>(ptr) % alignment)
192           __builtin_trap();
193 
194         // Reallocation is treated specially above, since we would otherwise
195         // lose the original size.
196         if (alloc_type != AllocType::REALLOC) {
197           // Fill the object with a canary byte.
198           inline_memset(ptr, canary, alloc_size);
199 
200           // Track the allocation.
201           if (!allocs.push_back({ptr, alloc_size, alignment, canary}))
202             return 0;
203           ++canary;
204         }
205       }
206     } else {
207       // Select a random allocation.
208       ASSIGN_OR_RETURN(size_t, idx, choose_alloc_idx(allocs, data, remainder));
209       Alloc &alloc = allocs[idx];
210 
211       // Check alignment.
212       if (reinterpret_cast<uintptr_t>(alloc.ptr) % alloc.alignment)
213         __builtin_trap();
214 
215       // Check the canary.
216       uint8_t *ptr = reinterpret_cast<uint8_t *>(alloc.ptr);
217       for (size_t i = 0; i < alloc.size; ++i)
218         if (ptr[i] != alloc.canary)
219           __builtin_trap();
220 
221       // Free the allocation and untrack it.
222       heap.free(alloc.ptr);
223       allocs.erase_idx(idx);
224     }
225   }
226   return 0;
227 }
228