1 | //===-- asan_fake_stack.cpp -----------------------------------------------===// |
2 | // |
3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
4 | // See https://llvm.org/LICENSE.txt for license information. |
5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
6 | // |
7 | //===----------------------------------------------------------------------===// |
8 | // |
9 | // This file is a part of AddressSanitizer, an address sanity checker. |
10 | // |
11 | // FakeStack is used to detect use-after-return bugs. |
12 | //===----------------------------------------------------------------------===// |
13 | |
14 | #include "asan_allocator.h" |
15 | #include "asan_poisoning.h" |
16 | #include "asan_thread.h" |
17 | |
18 | namespace __asan { |
19 | |
20 | static const u64 kMagic1 = kAsanStackAfterReturnMagic; |
21 | static const u64 kMagic2 = (kMagic1 << 8) | kMagic1; |
22 | static const u64 kMagic4 = (kMagic2 << 16) | kMagic2; |
23 | static const u64 kMagic8 = (kMagic4 << 32) | kMagic4; |
24 | |
25 | static const u64 kAllocaRedzoneSize = 32UL; |
26 | static const u64 kAllocaRedzoneMask = 31UL; |
27 | |
28 | // For small size classes inline PoisonShadow for better performance. |
29 | ALWAYS_INLINE void SetShadow(uptr ptr, uptr size, uptr class_id, u64 magic) { |
30 | u64 *shadow = reinterpret_cast<u64*>(MemToShadow(p: ptr)); |
31 | if (ASAN_SHADOW_SCALE == 3 && class_id <= 6) { |
32 | // This code expects ASAN_SHADOW_SCALE=3. |
33 | for (uptr i = 0; i < (((uptr)1) << class_id); i++) { |
34 | shadow[i] = magic; |
35 | // Make sure this does not become memset. |
36 | SanitizerBreakOptimization(arg: nullptr); |
37 | } |
38 | } else { |
39 | // The size class is too big, it's cheaper to poison only size bytes. |
40 | PoisonShadow(addr: ptr, size, value: static_cast<u8>(magic)); |
41 | } |
42 | } |
43 | |
44 | FakeStack *FakeStack::Create(uptr stack_size_log) { |
45 | static uptr kMinStackSizeLog = 16; |
46 | static uptr kMaxStackSizeLog = FIRST_32_SECOND_64(24, 28); |
47 | if (stack_size_log < kMinStackSizeLog) |
48 | stack_size_log = kMinStackSizeLog; |
49 | if (stack_size_log > kMaxStackSizeLog) |
50 | stack_size_log = kMaxStackSizeLog; |
51 | uptr size = RequiredSize(stack_size_log); |
52 | FakeStack *res = reinterpret_cast<FakeStack *>( |
53 | flags()->uar_noreserve ? MmapNoReserveOrDie(size, mem_type: "FakeStack" ) |
54 | : MmapOrDie(size, mem_type: "FakeStack" )); |
55 | res->stack_size_log_ = stack_size_log; |
56 | u8 *p = reinterpret_cast<u8 *>(res); |
57 | VReport(1, |
58 | "T%d: FakeStack created: %p -- %p stack_size_log: %zd; " |
59 | "mmapped %zdK, noreserve=%d \n" , |
60 | GetCurrentTidOrInvalid(), (void *)p, |
61 | (void *)(p + FakeStack::RequiredSize(stack_size_log)), stack_size_log, |
62 | size >> 10, flags()->uar_noreserve); |
63 | return res; |
64 | } |
65 | |
66 | void FakeStack::Destroy(int tid) { |
67 | PoisonAll(magic: 0); |
68 | if (Verbosity() >= 2) { |
69 | InternalScopedString str; |
70 | for (uptr class_id = 0; class_id < kNumberOfSizeClasses; class_id++) |
71 | str.AppendF(format: "%zd: %zd/%zd; " , class_id, hint_position_[class_id], |
72 | NumberOfFrames(stack_size_log: stack_size_log(), class_id)); |
73 | Report(format: "T%d: FakeStack destroyed: %s\n" , tid, str.data()); |
74 | } |
75 | uptr size = RequiredSize(stack_size_log: stack_size_log_); |
76 | FlushUnneededASanShadowMemory(p: reinterpret_cast<uptr>(this), size); |
77 | UnmapOrDie(addr: this, size); |
78 | } |
79 | |
80 | void FakeStack::PoisonAll(u8 magic) { |
81 | PoisonShadow(addr: reinterpret_cast<uptr>(this), size: RequiredSize(stack_size_log: stack_size_log()), |
82 | value: magic); |
83 | } |
84 | |
85 | #if !defined(_MSC_VER) || defined(__clang__) |
86 | ALWAYS_INLINE USED |
87 | #endif |
88 | FakeFrame *FakeStack::Allocate(uptr stack_size_log, uptr class_id, |
89 | uptr real_stack) { |
90 | CHECK_LT(class_id, kNumberOfSizeClasses); |
91 | if (needs_gc_) |
92 | GC(real_stack); |
93 | uptr &hint_position = hint_position_[class_id]; |
94 | const int num_iter = NumberOfFrames(stack_size_log, class_id); |
95 | u8 *flags = GetFlags(stack_size_log, class_id); |
96 | for (int i = 0; i < num_iter; i++) { |
97 | uptr pos = ModuloNumberOfFrames(stack_size_log, class_id, n: hint_position++); |
98 | // This part is tricky. On one hand, checking and setting flags[pos] |
99 | // should be atomic to ensure async-signal safety. But on the other hand, |
100 | // if the signal arrives between checking and setting flags[pos], the |
101 | // signal handler's fake stack will start from a different hint_position |
102 | // and so will not touch this particular byte. So, it is safe to do this |
103 | // with regular non-atomic load and store (at least I was not able to make |
104 | // this code crash). |
105 | if (flags[pos]) continue; |
106 | flags[pos] = 1; |
107 | FakeFrame *res = reinterpret_cast<FakeFrame *>( |
108 | GetFrame(stack_size_log, class_id, pos)); |
109 | res->real_stack = real_stack; |
110 | *SavedFlagPtr(x: reinterpret_cast<uptr>(res), class_id) = &flags[pos]; |
111 | return res; |
112 | } |
113 | return nullptr; // We are out of fake stack. |
114 | } |
115 | |
116 | uptr FakeStack::AddrIsInFakeStack(uptr ptr, uptr *frame_beg, uptr *frame_end) { |
117 | uptr stack_size_log = this->stack_size_log(); |
118 | uptr beg = reinterpret_cast<uptr>(GetFrame(stack_size_log, class_id: 0, pos: 0)); |
119 | uptr end = reinterpret_cast<uptr>(this) + RequiredSize(stack_size_log); |
120 | if (ptr < beg || ptr >= end) return 0; |
121 | uptr class_id = (ptr - beg) >> stack_size_log; |
122 | uptr base = beg + (class_id << stack_size_log); |
123 | CHECK_LE(base, ptr); |
124 | CHECK_LT(ptr, base + (((uptr)1) << stack_size_log)); |
125 | uptr pos = (ptr - base) >> (kMinStackFrameSizeLog + class_id); |
126 | uptr res = base + pos * BytesInSizeClass(class_id); |
127 | *frame_end = res + BytesInSizeClass(class_id); |
128 | *frame_beg = res + sizeof(FakeFrame); |
129 | return res; |
130 | } |
131 | |
132 | void FakeStack::HandleNoReturn() { |
133 | needs_gc_ = true; |
134 | } |
135 | |
136 | // Hack: The statement below is not true if we take into account sigaltstack or |
137 | // makecontext. It should be possible to make GC to discard wrong stack frame if |
138 | // we use these tools. For now, let's support the simplest case and allow GC to |
139 | // discard only frames from the default stack, assuming there is no buffer on |
140 | // the stack which is used for makecontext or sigaltstack. |
141 | // |
142 | // When throw, longjmp or some such happens we don't call OnFree() and |
143 | // as the result may leak one or more fake frames, but the good news is that |
144 | // we are notified about all such events by HandleNoReturn(). |
145 | // If we recently had such no-return event we need to collect garbage frames. |
146 | // We do it based on their 'real_stack' values -- everything that is lower |
147 | // than the current real_stack is garbage. |
148 | NOINLINE void FakeStack::GC(uptr real_stack) { |
149 | AsanThread *curr_thread = GetCurrentThread(); |
150 | if (!curr_thread) |
151 | return; // Try again when we have a thread. |
152 | auto top = curr_thread->stack_top(); |
153 | auto bottom = curr_thread->stack_bottom(); |
154 | if (real_stack < bottom || real_stack > top) |
155 | return; // Not the default stack. |
156 | |
157 | for (uptr class_id = 0; class_id < kNumberOfSizeClasses; class_id++) { |
158 | u8 *flags = GetFlags(stack_size_log: stack_size_log(), class_id); |
159 | for (uptr i = 0, n = NumberOfFrames(stack_size_log: stack_size_log(), class_id); i < n; |
160 | i++) { |
161 | if (flags[i] == 0) continue; // not allocated. |
162 | FakeFrame *ff = reinterpret_cast<FakeFrame *>( |
163 | GetFrame(stack_size_log: stack_size_log(), class_id, pos: i)); |
164 | // GC only on the default stack. |
165 | if (bottom < ff->real_stack && ff->real_stack < real_stack) { |
166 | flags[i] = 0; |
167 | // Poison the frame, so the any access will be reported as UAR. |
168 | SetShadow(ptr: reinterpret_cast<uptr>(ff), size: BytesInSizeClass(class_id), |
169 | class_id, magic: kMagic8); |
170 | } |
171 | } |
172 | } |
173 | needs_gc_ = false; |
174 | } |
175 | |
176 | void FakeStack::ForEachFakeFrame(RangeIteratorCallback callback, void *arg) { |
177 | for (uptr class_id = 0; class_id < kNumberOfSizeClasses; class_id++) { |
178 | u8 *flags = GetFlags(stack_size_log: stack_size_log(), class_id); |
179 | for (uptr i = 0, n = NumberOfFrames(stack_size_log: stack_size_log(), class_id); i < n; |
180 | i++) { |
181 | if (flags[i] == 0) continue; // not allocated. |
182 | FakeFrame *ff = reinterpret_cast<FakeFrame *>( |
183 | GetFrame(stack_size_log: stack_size_log(), class_id, pos: i)); |
184 | uptr begin = reinterpret_cast<uptr>(ff); |
185 | callback(begin, begin + FakeStack::BytesInSizeClass(class_id), arg); |
186 | } |
187 | } |
188 | } |
189 | |
190 | #if (SANITIZER_LINUX && !SANITIZER_ANDROID) || SANITIZER_FUCHSIA |
191 | static THREADLOCAL FakeStack *fake_stack_tls; |
192 | |
193 | FakeStack *GetTLSFakeStack() { |
194 | return fake_stack_tls; |
195 | } |
196 | void SetTLSFakeStack(FakeStack *fs) { |
197 | fake_stack_tls = fs; |
198 | } |
199 | #else |
200 | FakeStack *GetTLSFakeStack() { return 0; } |
201 | void SetTLSFakeStack(FakeStack *fs) { } |
202 | #endif // (SANITIZER_LINUX && !SANITIZER_ANDROID) || SANITIZER_FUCHSIA |
203 | |
204 | static FakeStack *GetFakeStack() { |
205 | AsanThread *t = GetCurrentThread(); |
206 | if (!t) return nullptr; |
207 | return t->get_or_create_fake_stack(); |
208 | } |
209 | |
210 | static FakeStack *GetFakeStackFast() { |
211 | if (FakeStack *fs = GetTLSFakeStack()) |
212 | return fs; |
213 | if (!__asan_option_detect_stack_use_after_return) |
214 | return nullptr; |
215 | return GetFakeStack(); |
216 | } |
217 | |
218 | static FakeStack *GetFakeStackFastAlways() { |
219 | if (FakeStack *fs = GetTLSFakeStack()) |
220 | return fs; |
221 | return GetFakeStack(); |
222 | } |
223 | |
224 | static ALWAYS_INLINE uptr OnMalloc(uptr class_id, uptr size) { |
225 | FakeStack *fs = GetFakeStackFast(); |
226 | if (!fs) |
227 | return 0; |
228 | FakeFrame *ff = |
229 | fs->Allocate(stack_size_log: fs->stack_size_log(), class_id, GET_CURRENT_FRAME()); |
230 | if (!ff) |
231 | return 0; // Out of fake stack. |
232 | uptr ptr = reinterpret_cast<uptr>(ff); |
233 | SetShadow(ptr, size, class_id, magic: 0); |
234 | return ptr; |
235 | } |
236 | |
237 | static ALWAYS_INLINE uptr OnMallocAlways(uptr class_id, uptr size) { |
238 | FakeStack *fs = GetFakeStackFastAlways(); |
239 | if (!fs) |
240 | return 0; |
241 | FakeFrame *ff = |
242 | fs->Allocate(stack_size_log: fs->stack_size_log(), class_id, GET_CURRENT_FRAME()); |
243 | if (!ff) |
244 | return 0; // Out of fake stack. |
245 | uptr ptr = reinterpret_cast<uptr>(ff); |
246 | SetShadow(ptr, size, class_id, magic: 0); |
247 | return ptr; |
248 | } |
249 | |
250 | static ALWAYS_INLINE void OnFree(uptr ptr, uptr class_id, uptr size) { |
251 | FakeStack::Deallocate(x: ptr, class_id); |
252 | SetShadow(ptr, size, class_id, magic: kMagic8); |
253 | } |
254 | |
255 | } // namespace __asan |
256 | |
257 | // ---------------------- Interface ---------------- {{{1 |
258 | using namespace __asan; |
259 | #define DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(class_id) \ |
260 | extern "C" SANITIZER_INTERFACE_ATTRIBUTE uptr \ |
261 | __asan_stack_malloc_##class_id(uptr size) { \ |
262 | return OnMalloc(class_id, size); \ |
263 | } \ |
264 | extern "C" SANITIZER_INTERFACE_ATTRIBUTE uptr \ |
265 | __asan_stack_malloc_always_##class_id(uptr size) { \ |
266 | return OnMallocAlways(class_id, size); \ |
267 | } \ |
268 | extern "C" SANITIZER_INTERFACE_ATTRIBUTE void __asan_stack_free_##class_id( \ |
269 | uptr ptr, uptr size) { \ |
270 | OnFree(ptr, class_id, size); \ |
271 | } |
272 | |
273 | DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(0) |
274 | DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(1) |
275 | DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(2) |
276 | DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(3) |
277 | DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(4) |
278 | DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(5) |
279 | DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(6) |
280 | DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(7) |
281 | DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(8) |
282 | DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(9) |
283 | DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(10) |
284 | |
285 | extern "C" { |
286 | // TODO: remove this method and fix tests that use it by setting |
287 | // -asan-use-after-return=never, after modal UAR flag lands |
288 | // (https://github.com/google/sanitizers/issues/1394) |
289 | SANITIZER_INTERFACE_ATTRIBUTE |
290 | void *__asan_get_current_fake_stack() { return GetFakeStackFast(); } |
291 | |
292 | SANITIZER_INTERFACE_ATTRIBUTE |
293 | void *__asan_addr_is_in_fake_stack(void *fake_stack, void *addr, void **beg, |
294 | void **end) { |
295 | FakeStack *fs = reinterpret_cast<FakeStack*>(fake_stack); |
296 | if (!fs) return nullptr; |
297 | uptr frame_beg, frame_end; |
298 | FakeFrame *frame = reinterpret_cast<FakeFrame *>(fs->AddrIsInFakeStack( |
299 | ptr: reinterpret_cast<uptr>(addr), frame_beg: &frame_beg, frame_end: &frame_end)); |
300 | if (!frame) return nullptr; |
301 | if (frame->magic != kCurrentStackFrameMagic) |
302 | return nullptr; |
303 | if (beg) *beg = reinterpret_cast<void*>(frame_beg); |
304 | if (end) *end = reinterpret_cast<void*>(frame_end); |
305 | return reinterpret_cast<void*>(frame->real_stack); |
306 | } |
307 | |
308 | SANITIZER_INTERFACE_ATTRIBUTE |
309 | void __asan_alloca_poison(uptr addr, uptr size) { |
310 | uptr LeftRedzoneAddr = addr - kAllocaRedzoneSize; |
311 | uptr PartialRzAddr = addr + size; |
312 | uptr RightRzAddr = (PartialRzAddr + kAllocaRedzoneMask) & ~kAllocaRedzoneMask; |
313 | uptr PartialRzAligned = PartialRzAddr & ~(ASAN_SHADOW_GRANULARITY - 1); |
314 | FastPoisonShadow(aligned_beg: LeftRedzoneAddr, aligned_size: kAllocaRedzoneSize, value: kAsanAllocaLeftMagic); |
315 | FastPoisonShadowPartialRightRedzone( |
316 | aligned_addr: PartialRzAligned, size: PartialRzAddr % ASAN_SHADOW_GRANULARITY, |
317 | redzone_size: RightRzAddr - PartialRzAligned, value: kAsanAllocaRightMagic); |
318 | FastPoisonShadow(aligned_beg: RightRzAddr, aligned_size: kAllocaRedzoneSize, value: kAsanAllocaRightMagic); |
319 | } |
320 | |
321 | SANITIZER_INTERFACE_ATTRIBUTE |
322 | void __asan_allocas_unpoison(uptr top, uptr bottom) { |
323 | if ((!top) || (top > bottom)) return; |
324 | REAL(memset) |
325 | (reinterpret_cast<void *>(MemToShadow(p: top)), 0, |
326 | (bottom - top) / ASAN_SHADOW_GRANULARITY); |
327 | } |
328 | } // extern "C" |
329 | |