1 | //===-- freelist_heap_fuzz.cpp --------------------------------------------===// |
2 | // |
3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
4 | // See https://llvm.org/LICENSE.txt for license information. |
5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
6 | // |
7 | //===----------------------------------------------------------------------===// |
8 | /// |
9 | /// Fuzzing test for llvm-libc freelist-based heap implementation. |
10 | /// |
11 | //===----------------------------------------------------------------------===// |
12 | |
13 | #include "src/__support/CPP/bit.h" |
14 | #include "src/__support/CPP/optional.h" |
15 | #include "src/__support/freelist_heap.h" |
16 | #include "src/string/memory_utils/inline_memcpy.h" |
17 | #include "src/string/memory_utils/inline_memmove.h" |
18 | #include "src/string/memory_utils/inline_memset.h" |
19 | |
20 | asm(R"( |
21 | .globl _end, __llvm_libc_heap_limit |
22 | |
23 | .bss |
24 | _end: |
25 | .fill 1024 |
26 | __llvm_libc_heap_limit: |
27 | )" ; |
28 | |
29 | using LIBC_NAMESPACE::FreeListHeap; |
30 | using LIBC_NAMESPACE::inline_memset; |
31 | using LIBC_NAMESPACE::cpp::nullopt; |
32 | using LIBC_NAMESPACE::cpp::optional; |
33 | |
34 | // Record of an outstanding allocation. |
35 | struct Alloc { |
36 | void *ptr; |
37 | size_t size; |
38 | size_t alignment; |
39 | uint8_t canary; // Byte written to the allocation |
40 | }; |
41 | |
42 | // A simple vector that tracks allocations using the heap. |
43 | class AllocVec { |
44 | public: |
45 | AllocVec(FreeListHeap &heap) : heap(&heap), size_(0), capacity(0) { |
46 | allocs = nullptr; |
47 | } |
48 | |
49 | bool empty() const { return !size_; } |
50 | |
51 | size_t size() const { return size_; } |
52 | |
53 | bool push_back(Alloc alloc) { |
54 | if (size_ == capacity) { |
55 | size_t new_cap = capacity ? capacity * 2 : 1; |
56 | Alloc *new_allocs = reinterpret_cast<Alloc *>( |
57 | heap->realloc(allocs, new_cap * sizeof(Alloc))); |
58 | if (!new_allocs) |
59 | return false; |
60 | allocs = new_allocs; |
61 | capacity = new_cap; |
62 | } |
63 | allocs[size_++] = alloc; |
64 | return true; |
65 | } |
66 | |
67 | Alloc &operator[](size_t idx) { return allocs[idx]; } |
68 | |
69 | void erase_idx(size_t idx) { |
70 | LIBC_NAMESPACE::inline_memmove(&allocs[idx], &allocs[idx + 1], |
71 | sizeof(Alloc) * (size_ - idx - 1)); |
72 | --size_; |
73 | } |
74 | |
75 | private: |
76 | FreeListHeap *heap; |
77 | Alloc *allocs; |
78 | size_t size_; |
79 | size_t capacity; |
80 | }; |
81 | |
82 | // Choose a T value by casting libfuzzer data or exit. |
83 | template <typename T> |
84 | optional<T> choose(const uint8_t *&data, size_t &remainder) { |
85 | if (sizeof(T) > remainder) |
86 | return nullopt; |
87 | T out; |
88 | LIBC_NAMESPACE::inline_memcpy(&out, data, sizeof(T)); |
89 | data += sizeof(T); |
90 | remainder -= sizeof(T); |
91 | return out; |
92 | } |
93 | |
94 | // The type of allocation to perform |
95 | enum class AllocType : uint8_t { |
96 | MALLOC, |
97 | ALIGNED_ALLOC, |
98 | REALLOC, |
99 | CALLOC, |
100 | NUM_ALLOC_TYPES, |
101 | }; |
102 | |
103 | template <> |
104 | optional<AllocType> choose<AllocType>(const uint8_t *&data, size_t &remainder) { |
105 | auto raw = choose<uint8_t>(data, remainder); |
106 | if (!raw) |
107 | return nullopt; |
108 | return static_cast<AllocType>( |
109 | *raw % static_cast<uint8_t>(AllocType::NUM_ALLOC_TYPES)); |
110 | } |
111 | |
112 | constexpr size_t heap_size = 64 * 1024; |
113 | |
114 | optional<size_t> choose_size(const uint8_t *&data, size_t &remainder) { |
115 | auto raw = choose<size_t>(data, remainder); |
116 | if (!raw) |
117 | return nullopt; |
118 | return *raw % heap_size; |
119 | } |
120 | |
121 | optional<size_t> choose_alloc_idx(const AllocVec &allocs, const uint8_t *&data, |
122 | size_t &remainder) { |
123 | if (allocs.empty()) |
124 | return nullopt; |
125 | auto raw = choose<size_t>(data, remainder); |
126 | if (!raw) |
127 | return nullopt; |
128 | return *raw % allocs.size(); |
129 | } |
130 | |
131 | #define ASSIGN_OR_RETURN(TYPE, NAME, EXPR) \ |
132 | auto maybe_##NAME = EXPR; \ |
133 | if (!maybe_##NAME) \ |
134 | return 0; \ |
135 | TYPE NAME = *maybe_##NAME |
136 | |
137 | extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t remainder) { |
138 | LIBC_NAMESPACE::FreeListHeapBuffer<heap_size> heap; |
139 | AllocVec allocs(heap); |
140 | |
141 | uint8_t canary = 0; |
142 | while (true) { |
143 | ASSIGN_OR_RETURN(auto, should_alloc, choose<bool>(data, remainder)); |
144 | if (should_alloc) { |
145 | ASSIGN_OR_RETURN(auto, alloc_type, choose<AllocType>(data, remainder)); |
146 | ASSIGN_OR_RETURN(size_t, alloc_size, choose_size(data, remainder)); |
147 | |
148 | // Perform allocation. |
149 | void *ptr = nullptr; |
150 | size_t alignment = alignof(max_align_t); |
151 | switch (alloc_type) { |
152 | case AllocType::MALLOC: |
153 | ptr = heap.allocate(alloc_size); |
154 | break; |
155 | case AllocType::ALIGNED_ALLOC: { |
156 | ASSIGN_OR_RETURN(size_t, alignment, choose_size(data, remainder)); |
157 | alignment = LIBC_NAMESPACE::cpp::bit_ceil(alignment); |
158 | ptr = heap.aligned_allocate(alignment, alloc_size); |
159 | break; |
160 | } |
161 | case AllocType::REALLOC: { |
162 | if (!alloc_size) |
163 | return 0; |
164 | ASSIGN_OR_RETURN(size_t, idx, |
165 | choose_alloc_idx(allocs, data, remainder)); |
166 | Alloc &alloc = allocs[idx]; |
167 | ptr = heap.realloc(alloc.ptr, alloc_size); |
168 | if (ptr) { |
169 | // Extend the canary region if necessary. |
170 | if (alloc_size > alloc.size) |
171 | inline_memset(static_cast<char *>(ptr) + alloc.size, alloc.canary, |
172 | alloc_size - alloc.size); |
173 | alloc.ptr = ptr; |
174 | alloc.size = alloc_size; |
175 | alloc.alignment = alignof(max_align_t); |
176 | } |
177 | break; |
178 | } |
179 | case AllocType::CALLOC: { |
180 | ASSIGN_OR_RETURN(size_t, count, choose_size(data, remainder)); |
181 | size_t total; |
182 | if (__builtin_mul_overflow(count, alloc_size, &total)) |
183 | return 0; |
184 | ptr = heap.calloc(count, alloc_size); |
185 | if (ptr) |
186 | for (size_t i = 0; i < total; ++i) |
187 | if (static_cast<char *>(ptr)[i] != 0) |
188 | __builtin_trap(); |
189 | break; |
190 | } |
191 | case AllocType::NUM_ALLOC_TYPES: |
192 | __builtin_unreachable(); |
193 | } |
194 | |
195 | if (ptr) { |
196 | // aligned_allocate should automatically apply a minimum alignment. |
197 | if (alignment < alignof(max_align_t)) |
198 | alignment = alignof(max_align_t); |
199 | // Check alignment. |
200 | if (reinterpret_cast<uintptr_t>(ptr) % alignment) |
201 | __builtin_trap(); |
202 | |
203 | // Reallocation is treated specially above, since we would otherwise |
204 | // lose the original size. |
205 | if (alloc_type != AllocType::REALLOC) { |
206 | // Fill the object with a canary byte. |
207 | inline_memset(ptr, canary, alloc_size); |
208 | |
209 | // Track the allocation. |
210 | if (!allocs.push_back({ptr, alloc_size, alignment, canary})) |
211 | return 0; |
212 | ++canary; |
213 | } |
214 | } |
215 | } else { |
216 | // Select a random allocation. |
217 | ASSIGN_OR_RETURN(size_t, idx, choose_alloc_idx(allocs, data, remainder)); |
218 | Alloc &alloc = allocs[idx]; |
219 | |
220 | // Check alignment. |
221 | if (reinterpret_cast<uintptr_t>(alloc.ptr) % alloc.alignment) |
222 | __builtin_trap(); |
223 | |
224 | // Check the canary. |
225 | uint8_t *ptr = reinterpret_cast<uint8_t *>(alloc.ptr); |
226 | for (size_t i = 0; i < alloc.size; ++i) |
227 | if (ptr[i] != alloc.canary) |
228 | __builtin_trap(); |
229 | |
230 | // Free the allocation and untrack it. |
231 | heap.free(alloc.ptr); |
232 | allocs.erase_idx(idx); |
233 | } |
234 | } |
235 | return 0; |
236 | } |
237 | |