1//===-- hwasan.cpp --------------------------------------------------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file is a part of HWAddressSanitizer.
10//
11// HWAddressSanitizer runtime.
12//===----------------------------------------------------------------------===//
13
14#include "hwasan.h"
15
16#include "hwasan_checks.h"
17#include "hwasan_dynamic_shadow.h"
18#include "hwasan_globals.h"
19#include "hwasan_mapping.h"
20#include "hwasan_poisoning.h"
21#include "hwasan_report.h"
22#include "hwasan_thread.h"
23#include "hwasan_thread_list.h"
24#include "sanitizer_common/sanitizer_atomic.h"
25#include "sanitizer_common/sanitizer_common.h"
26#include "sanitizer_common/sanitizer_flag_parser.h"
27#include "sanitizer_common/sanitizer_flags.h"
28#include "sanitizer_common/sanitizer_interface_internal.h"
29#include "sanitizer_common/sanitizer_libc.h"
30#include "sanitizer_common/sanitizer_procmaps.h"
31#include "sanitizer_common/sanitizer_stackdepot.h"
32#include "sanitizer_common/sanitizer_stacktrace.h"
33#include "sanitizer_common/sanitizer_symbolizer.h"
34#include "ubsan/ubsan_flags.h"
35#include "ubsan/ubsan_init.h"
36
37// ACHTUNG! No system header includes in this file.
38
39using namespace __sanitizer;
40
41namespace __hwasan {
42
43static Flags hwasan_flags;
44
45Flags *flags() {
46 return &hwasan_flags;
47}
48
49int hwasan_inited = 0;
50int hwasan_instrumentation_inited = 0;
51bool hwasan_init_is_running;
52
53int hwasan_report_count = 0;
54
55uptr kLowShadowStart;
56uptr kLowShadowEnd;
57uptr kHighShadowStart;
58uptr kHighShadowEnd;
59
60void Flags::SetDefaults() {
61#define HWASAN_FLAG(Type, Name, DefaultValue, Description) Name = DefaultValue;
62#include "hwasan_flags.inc"
63#undef HWASAN_FLAG
64}
65
66static void RegisterHwasanFlags(FlagParser *parser, Flags *f) {
67#define HWASAN_FLAG(Type, Name, DefaultValue, Description) \
68 RegisterFlag(parser, #Name, Description, &f->Name);
69#include "hwasan_flags.inc"
70#undef HWASAN_FLAG
71}
72
73static void InitializeFlags() {
74 SetCommonFlagsDefaults();
75 {
76 CommonFlags cf;
77 cf.CopyFrom(other: *common_flags());
78 cf.external_symbolizer_path = GetEnv(name: "HWASAN_SYMBOLIZER_PATH");
79 cf.malloc_context_size = 20;
80 cf.handle_ioctl = true;
81 // FIXME: test and enable.
82 cf.check_printf = false;
83 cf.intercept_tls_get_addr = true;
84 cf.exitcode = 99;
85 // 8 shadow pages ~512kB, small enough to cover common stack sizes.
86 cf.clear_shadow_mmap_threshold = 4096 * (SANITIZER_ANDROID ? 2 : 8);
87 // Sigtrap is used in error reporting.
88 cf.handle_sigtrap = kHandleSignalExclusive;
89 // For now only tested on Linux and Fuchsia. Other plantforms can be turned
90 // on as they become ready.
91 constexpr bool can_detect_leaks =
92 (SANITIZER_LINUX && !SANITIZER_ANDROID) || SANITIZER_FUCHSIA;
93 cf.detect_leaks = cf.detect_leaks && can_detect_leaks;
94
95#if SANITIZER_ANDROID
96 // Let platform handle other signals. It is better at reporting them then we
97 // are.
98 cf.handle_segv = kHandleSignalNo;
99 cf.handle_sigbus = kHandleSignalNo;
100 cf.handle_abort = kHandleSignalNo;
101 cf.handle_sigill = kHandleSignalNo;
102 cf.handle_sigfpe = kHandleSignalNo;
103#endif
104 OverrideCommonFlags(cf);
105 }
106
107 Flags *f = flags();
108 f->SetDefaults();
109
110 FlagParser parser;
111 RegisterHwasanFlags(parser: &parser, f);
112 RegisterCommonFlags(parser: &parser);
113
114#if CAN_SANITIZE_LEAKS
115 __lsan::Flags *lf = __lsan::flags();
116 lf->SetDefaults();
117
118 FlagParser lsan_parser;
119 __lsan::RegisterLsanFlags(parser: &lsan_parser, f: lf);
120 RegisterCommonFlags(parser: &lsan_parser);
121#endif
122
123#if HWASAN_CONTAINS_UBSAN
124 __ubsan::Flags *uf = __ubsan::flags();
125 uf->SetDefaults();
126
127 FlagParser ubsan_parser;
128 __ubsan::RegisterUbsanFlags(parser: &ubsan_parser, f: uf);
129 RegisterCommonFlags(parser: &ubsan_parser);
130#endif
131
132 // Override from user-specified string.
133 if (__hwasan_default_options)
134 parser.ParseString(s: __hwasan_default_options());
135#if CAN_SANITIZE_LEAKS
136 lsan_parser.ParseString(s: __lsan_default_options());
137#endif
138#if HWASAN_CONTAINS_UBSAN
139 const char *ubsan_default_options = __ubsan_default_options();
140 ubsan_parser.ParseString(s: ubsan_default_options);
141#endif
142
143 parser.ParseStringFromEnv(env_name: "HWASAN_OPTIONS");
144#if CAN_SANITIZE_LEAKS
145 lsan_parser.ParseStringFromEnv(env_name: "LSAN_OPTIONS");
146#endif
147#if HWASAN_CONTAINS_UBSAN
148 ubsan_parser.ParseStringFromEnv(env_name: "UBSAN_OPTIONS");
149#endif
150
151 InitializeCommonFlags();
152
153 if (Verbosity()) ReportUnrecognizedFlags();
154
155 if (common_flags()->help) parser.PrintFlagDescriptions();
156 // Flag validation:
157 if (!CAN_SANITIZE_LEAKS && common_flags()->detect_leaks) {
158 Report(format: "%s: detect_leaks is not supported on this platform.\n",
159 SanitizerToolName);
160 Die();
161 }
162}
163
164static void CheckUnwind() {
165 GET_FATAL_STACK_TRACE_PC_BP(StackTrace::GetCurrentPc(), GET_CURRENT_FRAME());
166 stack.Print();
167}
168
169static void HwasanFormatMemoryUsage(InternalScopedString &s) {
170 HwasanThreadList &thread_list = hwasanThreadList();
171 auto thread_stats = thread_list.GetThreadStats();
172 auto sds = StackDepotGetStats();
173 AllocatorStatCounters asc;
174 GetAllocatorStats(s: asc);
175 s.AppendF(
176 format: "HWASAN pid: %d rss: %zd threads: %zd stacks: %zd"
177 " thr_aux: %zd stack_depot: %zd uniq_stacks: %zd"
178 " heap: %zd",
179 internal_getpid(), GetRSS(), thread_stats.n_live_threads,
180 thread_stats.total_stack_size,
181 thread_stats.n_live_threads * thread_list.MemoryUsedPerThread(),
182 sds.allocated, sds.n_uniq_ids, asc[AllocatorStatMapped]);
183}
184
185#if SANITIZER_ANDROID
186static constexpr uptr kMemoryUsageBufferSize = 4096;
187
188static char *memory_usage_buffer = nullptr;
189
190static void InitMemoryUsage() {
191 memory_usage_buffer =
192 (char *)MmapOrDie(kMemoryUsageBufferSize, "memory usage string");
193 CHECK(memory_usage_buffer);
194 memory_usage_buffer[0] = '\0';
195 DecorateMapping((uptr)memory_usage_buffer, kMemoryUsageBufferSize,
196 memory_usage_buffer);
197}
198
199void UpdateMemoryUsage() {
200 if (!flags()->export_memory_stats)
201 return;
202 if (!memory_usage_buffer)
203 InitMemoryUsage();
204 InternalScopedString s;
205 HwasanFormatMemoryUsage(s);
206 internal_strncpy(memory_usage_buffer, s.data(), kMemoryUsageBufferSize - 1);
207 memory_usage_buffer[kMemoryUsageBufferSize - 1] = '\0';
208}
209#else
210void UpdateMemoryUsage() {}
211#endif
212
213void HwasanAtExit() {
214 if (common_flags()->print_module_map)
215 DumpProcessMap();
216 if (flags()->print_stats && (flags()->atexit || hwasan_report_count > 0))
217 ReportStats();
218 if (hwasan_report_count > 0) {
219 // ReportAtExitStatistics();
220 if (common_flags()->exitcode)
221 internal__exit(exitcode: common_flags()->exitcode);
222 }
223}
224
225void HandleTagMismatch(AccessInfo ai, uptr pc, uptr frame, void *uc,
226 uptr *registers_frame) {
227 InternalMmapVector<BufferedStackTrace> stack_buffer(1);
228 BufferedStackTrace *stack = stack_buffer.data();
229 stack->Reset();
230 stack->Unwind(pc, bp: frame, context: uc, request_fast: common_flags()->fast_unwind_on_fatal);
231
232 // The second stack frame contains the failure __hwasan_check function, as
233 // we have a stack frame for the registers saved in __hwasan_tag_mismatch that
234 // we wish to ignore. This (currently) only occurs on AArch64, as x64
235 // implementations use SIGTRAP to implement the failure, and thus do not go
236 // through the stack saver.
237 if (registers_frame && stack->trace && stack->size > 0) {
238 stack->trace++;
239 stack->size--;
240 }
241
242 bool fatal = flags()->halt_on_error || !ai.recover;
243 ReportTagMismatch(stack, addr: ai.addr, access_size: ai.size, is_store: ai.is_store, fatal,
244 registers_frame);
245}
246
247void HwasanTagMismatch(uptr addr, uptr pc, uptr frame, uptr access_info,
248 uptr *registers_frame, size_t outsize) {
249 __hwasan::AccessInfo ai;
250 ai.is_store = access_info & 0x10;
251 ai.is_load = !ai.is_store;
252 ai.recover = access_info & 0x20;
253 ai.addr = addr;
254 if ((access_info & 0xf) == 0xf)
255 ai.size = outsize;
256 else
257 ai.size = 1 << (access_info & 0xf);
258
259 HandleTagMismatch(ai, pc, frame, uc: nullptr, registers_frame);
260}
261
262Thread *GetCurrentThread() {
263 uptr *ThreadLongPtr = GetCurrentThreadLongPtr();
264 if (UNLIKELY(*ThreadLongPtr == 0))
265 return nullptr;
266 auto *R = (StackAllocationsRingBuffer *)ThreadLongPtr;
267 return hwasanThreadList().GetThreadByBufferAddress(p: (uptr)R->Next());
268}
269
270} // namespace __hwasan
271
272using namespace __hwasan;
273
274void __sanitizer::BufferedStackTrace::UnwindImpl(
275 uptr pc, uptr bp, void *context, bool request_fast, u32 max_depth) {
276 Thread *t = GetCurrentThread();
277 if (!t) {
278 // The thread is still being created, or has already been destroyed.
279 size = 0;
280 return;
281 }
282 Unwind(max_depth, pc, bp, context, stack_top: t->stack_top(), stack_bottom: t->stack_bottom(),
283 request_fast_unwind: request_fast);
284}
285
286static bool InitializeSingleGlobal(const hwasan_global &global) {
287 uptr full_granule_size = RoundDownTo(x: global.size(), boundary: 16);
288 TagMemoryAligned(p: global.addr(), size: full_granule_size, tag: global.tag());
289 if (global.size() % 16)
290 TagMemoryAligned(p: global.addr() + full_granule_size, size: 16, tag: global.size() % 16);
291 return false;
292}
293
294static void InitLoadedGlobals() {
295 // Fuchsia's libc provides a hook (__sanitizer_module_loaded) that runs on
296 // the startup path which calls into __hwasan_library_loaded on all
297 // initially loaded modules, so explicitly registering the globals here
298 // isn't needed.
299 if constexpr (!SANITIZER_FUCHSIA) {
300 dl_iterate_phdr(
301 callback: [](dl_phdr_info *info, size_t /* size */, void * /* data */) -> int {
302 for (const hwasan_global &global : HwasanGlobalsFor(
303 base: info->dlpi_addr, phdr: info->dlpi_phdr, phnum: info->dlpi_phnum))
304 InitializeSingleGlobal(global);
305 return 0;
306 },
307 data: nullptr);
308 }
309}
310
311// Prepare to run instrumented code on the main thread.
312static void InitInstrumentation() {
313 if (hwasan_instrumentation_inited) return;
314
315 InitializeOsSupport();
316
317 if (!InitShadow()) {
318 Printf(format: "FATAL: HWAddressSanitizer cannot mmap the shadow memory.\n");
319 DumpProcessMap();
320 Die();
321 }
322
323 InitThreads();
324
325 hwasan_instrumentation_inited = 1;
326}
327
328// Interface.
329
330uptr __hwasan_shadow_memory_dynamic_address; // Global interface symbol.
331
332// This function was used by the old frame descriptor mechanism. We keep it
333// around to avoid breaking ABI.
334void __hwasan_init_frames(uptr beg, uptr end) {}
335
336void __hwasan_init_static() {
337 InitShadowGOT();
338 InitInstrumentation();
339
340 // In the non-static code path we call dl_iterate_phdr here. But at this point
341 // libc might not have been initialized enough for dl_iterate_phdr to work.
342 // Fortunately, since this is a statically linked executable we can use the
343 // linker-defined symbol __ehdr_start to find the only relevant set of phdrs.
344 extern ElfW(Ehdr) __ehdr_start;
345 for (const hwasan_global &global : HwasanGlobalsFor(
346 /* base */ 0,
347 phdr: reinterpret_cast<const ElfW(Phdr) *>(
348 reinterpret_cast<const char *>(&__ehdr_start) +
349 __ehdr_start.e_phoff),
350 phnum: __ehdr_start.e_phnum))
351 InitializeSingleGlobal(global);
352}
353
354__attribute__((constructor(0))) void __hwasan_init() {
355 CHECK(!hwasan_init_is_running);
356 if (hwasan_inited) return;
357 hwasan_init_is_running = 1;
358 SanitizerToolName = "HWAddressSanitizer";
359
360 InitTlsSize();
361
362 CacheBinaryName();
363 InitializeFlags();
364
365 // Install tool-specific callbacks in sanitizer_common.
366 SetCheckUnwindCallback(CheckUnwind);
367
368 __sanitizer_set_report_path(path: common_flags()->log_path);
369
370 AndroidTestTlsSlot();
371
372 DisableCoreDumperIfNecessary();
373
374 InitInstrumentation();
375 InitLoadedGlobals();
376
377 // Needs to be called here because flags()->random_tags might not have been
378 // initialized when InitInstrumentation() was called.
379 GetCurrentThread()->EnsureRandomStateInited();
380
381 SetPrintfAndReportCallback(AppendToErrorMessageBuffer);
382 // This may call libc -> needs initialized shadow.
383 AndroidLogInit();
384
385 InitializeInterceptors();
386 InstallDeadlySignalHandlers(handler: HwasanOnDeadlySignal);
387 InstallAtExitHandler(); // Needs __cxa_atexit interceptor.
388
389 InitializeCoverage(enabled: common_flags()->coverage, coverage_dir: common_flags()->coverage_dir);
390
391 HwasanTSDInit();
392 HwasanTSDThreadInit();
393
394 HwasanAllocatorInit();
395 HwasanInstallAtForkHandler();
396
397 if (CAN_SANITIZE_LEAKS) {
398 __lsan::InitCommonLsan();
399 InstallAtExitCheckLeaks();
400 }
401
402#if HWASAN_CONTAINS_UBSAN
403 __ubsan::InitAsPlugin();
404#endif
405
406 if (CAN_SANITIZE_LEAKS && common_flags()->detect_leaks) {
407 __lsan::ScopedInterceptorDisabler disabler;
408 Symbolizer::LateInitialize();
409 }
410
411 VPrintf(1, "HWAddressSanitizer init done\n");
412
413 hwasan_init_is_running = 0;
414 hwasan_inited = 1;
415}
416
417void __hwasan_library_loaded(ElfW(Addr) base, const ElfW(Phdr) * phdr,
418 ElfW(Half) phnum) {
419 for (const hwasan_global &global : HwasanGlobalsFor(base, phdr, phnum))
420 InitializeSingleGlobal(global);
421}
422
423void __hwasan_library_unloaded(ElfW(Addr) base, const ElfW(Phdr) * phdr,
424 ElfW(Half) phnum) {
425 for (; phnum != 0; ++phdr, --phnum)
426 if (phdr->p_type == PT_LOAD)
427 TagMemory(p: base + phdr->p_vaddr, size: phdr->p_memsz, tag: 0);
428}
429
430void __hwasan_print_shadow(const void *p, uptr sz) {
431 uptr ptr_raw = UntagAddr(tagged_addr: reinterpret_cast<uptr>(p));
432 uptr shadow_first = MemToShadow(untagged_addr: ptr_raw);
433 uptr shadow_last = MemToShadow(untagged_addr: ptr_raw + sz - 1);
434 Printf(format: "HWASan shadow map for %zx .. %zx (pointer tag %x)\n", ptr_raw,
435 ptr_raw + sz, GetTagFromPointer(p: (uptr)p));
436 for (uptr s = shadow_first; s <= shadow_last; ++s) {
437 tag_t mem_tag = *reinterpret_cast<tag_t *>(s);
438 uptr granule_addr = ShadowToMem(shadow_addr: s);
439 if (mem_tag && mem_tag < kShadowAlignment)
440 Printf(format: " %zx: %02x(%02x)\n", granule_addr, mem_tag,
441 *reinterpret_cast<tag_t *>(granule_addr + kShadowAlignment - 1));
442 else
443 Printf(format: " %zx: %02x\n", granule_addr, mem_tag);
444 }
445}
446
447sptr __hwasan_test_shadow(const void *p, uptr sz) {
448 if (sz == 0)
449 return -1;
450 uptr ptr = reinterpret_cast<uptr>(p);
451 tag_t ptr_tag = GetTagFromPointer(p: ptr);
452 uptr ptr_raw = UntagAddr(tagged_addr: ptr);
453 uptr shadow_first = MemToShadow(untagged_addr: ptr_raw);
454 uptr shadow_last = MemToShadow(untagged_addr: ptr_raw + sz);
455 for (uptr s = shadow_first; s < shadow_last; ++s) {
456 if (UNLIKELY(*(tag_t *)s != ptr_tag)) {
457 uptr short_size =
458 ShortTagSize(mem_tag: *(tag_t *)s, ptr: AddTagToPointer(p: ShadowToMem(shadow_addr: s), tag: ptr_tag));
459 sptr offset = ShadowToMem(shadow_addr: s) - ptr_raw + short_size;
460 return offset < 0 ? 0 : offset;
461 }
462 }
463
464 uptr end = ptr + sz;
465 uptr tail_sz = end & (kShadowAlignment - 1);
466 if (!tail_sz)
467 return -1;
468
469 uptr short_size =
470 ShortTagSize(mem_tag: *(tag_t *)shadow_last, ptr: end & ~(kShadowAlignment - 1));
471 if (LIKELY(tail_sz <= short_size))
472 return -1;
473
474 sptr offset = sz - tail_sz + short_size;
475 return offset < 0 ? 0 : offset;
476}
477
478u16 __sanitizer_unaligned_load16(const uu16 *p) {
479 return *p;
480}
481u32 __sanitizer_unaligned_load32(const uu32 *p) {
482 return *p;
483}
484u64 __sanitizer_unaligned_load64(const uu64 *p) {
485 return *p;
486}
487void __sanitizer_unaligned_store16(uu16 *p, u16 x) {
488 *p = x;
489}
490void __sanitizer_unaligned_store32(uu32 *p, u32 x) {
491 *p = x;
492}
493void __sanitizer_unaligned_store64(uu64 *p, u64 x) {
494 *p = x;
495}
496
497void __hwasan_loadN(uptr p, uptr sz) {
498 CheckAddressSized<ErrorAction::Abort, AccessType::Load>(p, sz);
499}
500void __hwasan_load1(uptr p) {
501 CheckAddress<ErrorAction::Abort, AccessType::Load, 0>(p);
502}
503void __hwasan_load2(uptr p) {
504 CheckAddress<ErrorAction::Abort, AccessType::Load, 1>(p);
505}
506void __hwasan_load4(uptr p) {
507 CheckAddress<ErrorAction::Abort, AccessType::Load, 2>(p);
508}
509void __hwasan_load8(uptr p) {
510 CheckAddress<ErrorAction::Abort, AccessType::Load, 3>(p);
511}
512void __hwasan_load16(uptr p) {
513 CheckAddress<ErrorAction::Abort, AccessType::Load, 4>(p);
514}
515
516void __hwasan_loadN_noabort(uptr p, uptr sz) {
517 CheckAddressSized<ErrorAction::Recover, AccessType::Load>(p, sz);
518}
519void __hwasan_load1_noabort(uptr p) {
520 CheckAddress<ErrorAction::Recover, AccessType::Load, 0>(p);
521}
522void __hwasan_load2_noabort(uptr p) {
523 CheckAddress<ErrorAction::Recover, AccessType::Load, 1>(p);
524}
525void __hwasan_load4_noabort(uptr p) {
526 CheckAddress<ErrorAction::Recover, AccessType::Load, 2>(p);
527}
528void __hwasan_load8_noabort(uptr p) {
529 CheckAddress<ErrorAction::Recover, AccessType::Load, 3>(p);
530}
531void __hwasan_load16_noabort(uptr p) {
532 CheckAddress<ErrorAction::Recover, AccessType::Load, 4>(p);
533}
534
535void __hwasan_loadN_match_all(uptr p, uptr sz, u8 match_all_tag) {
536 if (GetTagFromPointer(p) != match_all_tag)
537 CheckAddressSized<ErrorAction::Abort, AccessType::Load>(p, sz);
538}
539void __hwasan_load1_match_all(uptr p, u8 match_all_tag) {
540 if (GetTagFromPointer(p) != match_all_tag)
541 CheckAddress<ErrorAction::Abort, AccessType::Load, 0>(p);
542}
543void __hwasan_load2_match_all(uptr p, u8 match_all_tag) {
544 if (GetTagFromPointer(p) != match_all_tag)
545 CheckAddress<ErrorAction::Abort, AccessType::Load, 1>(p);
546}
547void __hwasan_load4_match_all(uptr p, u8 match_all_tag) {
548 if (GetTagFromPointer(p) != match_all_tag)
549 CheckAddress<ErrorAction::Abort, AccessType::Load, 2>(p);
550}
551void __hwasan_load8_match_all(uptr p, u8 match_all_tag) {
552 if (GetTagFromPointer(p) != match_all_tag)
553 CheckAddress<ErrorAction::Abort, AccessType::Load, 3>(p);
554}
555void __hwasan_load16_match_all(uptr p, u8 match_all_tag) {
556 if (GetTagFromPointer(p) != match_all_tag)
557 CheckAddress<ErrorAction::Abort, AccessType::Load, 4>(p);
558}
559
560void __hwasan_loadN_match_all_noabort(uptr p, uptr sz, u8 match_all_tag) {
561 if (GetTagFromPointer(p) != match_all_tag)
562 CheckAddressSized<ErrorAction::Recover, AccessType::Load>(p, sz);
563}
564void __hwasan_load1_match_all_noabort(uptr p, u8 match_all_tag) {
565 if (GetTagFromPointer(p) != match_all_tag)
566 CheckAddress<ErrorAction::Recover, AccessType::Load, 0>(p);
567}
568void __hwasan_load2_match_all_noabort(uptr p, u8 match_all_tag) {
569 if (GetTagFromPointer(p) != match_all_tag)
570 CheckAddress<ErrorAction::Recover, AccessType::Load, 1>(p);
571}
572void __hwasan_load4_match_all_noabort(uptr p, u8 match_all_tag) {
573 if (GetTagFromPointer(p) != match_all_tag)
574 CheckAddress<ErrorAction::Recover, AccessType::Load, 2>(p);
575}
576void __hwasan_load8_match_all_noabort(uptr p, u8 match_all_tag) {
577 if (GetTagFromPointer(p) != match_all_tag)
578 CheckAddress<ErrorAction::Recover, AccessType::Load, 3>(p);
579}
580void __hwasan_load16_match_all_noabort(uptr p, u8 match_all_tag) {
581 if (GetTagFromPointer(p) != match_all_tag)
582 CheckAddress<ErrorAction::Recover, AccessType::Load, 4>(p);
583}
584
585void __hwasan_storeN(uptr p, uptr sz) {
586 CheckAddressSized<ErrorAction::Abort, AccessType::Store>(p, sz);
587}
588void __hwasan_store1(uptr p) {
589 CheckAddress<ErrorAction::Abort, AccessType::Store, 0>(p);
590}
591void __hwasan_store2(uptr p) {
592 CheckAddress<ErrorAction::Abort, AccessType::Store, 1>(p);
593}
594void __hwasan_store4(uptr p) {
595 CheckAddress<ErrorAction::Abort, AccessType::Store, 2>(p);
596}
597void __hwasan_store8(uptr p) {
598 CheckAddress<ErrorAction::Abort, AccessType::Store, 3>(p);
599}
600void __hwasan_store16(uptr p) {
601 CheckAddress<ErrorAction::Abort, AccessType::Store, 4>(p);
602}
603
604void __hwasan_storeN_noabort(uptr p, uptr sz) {
605 CheckAddressSized<ErrorAction::Recover, AccessType::Store>(p, sz);
606}
607void __hwasan_store1_noabort(uptr p) {
608 CheckAddress<ErrorAction::Recover, AccessType::Store, 0>(p);
609}
610void __hwasan_store2_noabort(uptr p) {
611 CheckAddress<ErrorAction::Recover, AccessType::Store, 1>(p);
612}
613void __hwasan_store4_noabort(uptr p) {
614 CheckAddress<ErrorAction::Recover, AccessType::Store, 2>(p);
615}
616void __hwasan_store8_noabort(uptr p) {
617 CheckAddress<ErrorAction::Recover, AccessType::Store, 3>(p);
618}
619void __hwasan_store16_noabort(uptr p) {
620 CheckAddress<ErrorAction::Recover, AccessType::Store, 4>(p);
621}
622
623void __hwasan_storeN_match_all(uptr p, uptr sz, u8 match_all_tag) {
624 if (GetTagFromPointer(p) != match_all_tag)
625 CheckAddressSized<ErrorAction::Abort, AccessType::Store>(p, sz);
626}
627void __hwasan_store1_match_all(uptr p, u8 match_all_tag) {
628 if (GetTagFromPointer(p) != match_all_tag)
629 CheckAddress<ErrorAction::Abort, AccessType::Store, 0>(p);
630}
631void __hwasan_store2_match_all(uptr p, u8 match_all_tag) {
632 if (GetTagFromPointer(p) != match_all_tag)
633 CheckAddress<ErrorAction::Abort, AccessType::Store, 1>(p);
634}
635void __hwasan_store4_match_all(uptr p, u8 match_all_tag) {
636 if (GetTagFromPointer(p) != match_all_tag)
637 CheckAddress<ErrorAction::Abort, AccessType::Store, 2>(p);
638}
639void __hwasan_store8_match_all(uptr p, u8 match_all_tag) {
640 if (GetTagFromPointer(p) != match_all_tag)
641 CheckAddress<ErrorAction::Abort, AccessType::Store, 3>(p);
642}
643void __hwasan_store16_match_all(uptr p, u8 match_all_tag) {
644 if (GetTagFromPointer(p) != match_all_tag)
645 CheckAddress<ErrorAction::Abort, AccessType::Store, 4>(p);
646}
647
648void __hwasan_storeN_match_all_noabort(uptr p, uptr sz, u8 match_all_tag) {
649 if (GetTagFromPointer(p) != match_all_tag)
650 CheckAddressSized<ErrorAction::Recover, AccessType::Store>(p, sz);
651}
652void __hwasan_store1_match_all_noabort(uptr p, u8 match_all_tag) {
653 if (GetTagFromPointer(p) != match_all_tag)
654 CheckAddress<ErrorAction::Recover, AccessType::Store, 0>(p);
655}
656void __hwasan_store2_match_all_noabort(uptr p, u8 match_all_tag) {
657 if (GetTagFromPointer(p) != match_all_tag)
658 CheckAddress<ErrorAction::Recover, AccessType::Store, 1>(p);
659}
660void __hwasan_store4_match_all_noabort(uptr p, u8 match_all_tag) {
661 if (GetTagFromPointer(p) != match_all_tag)
662 CheckAddress<ErrorAction::Recover, AccessType::Store, 2>(p);
663}
664void __hwasan_store8_match_all_noabort(uptr p, u8 match_all_tag) {
665 if (GetTagFromPointer(p) != match_all_tag)
666 CheckAddress<ErrorAction::Recover, AccessType::Store, 3>(p);
667}
668void __hwasan_store16_match_all_noabort(uptr p, u8 match_all_tag) {
669 if (GetTagFromPointer(p) != match_all_tag)
670 CheckAddress<ErrorAction::Recover, AccessType::Store, 4>(p);
671}
672
673void __hwasan_tag_memory(uptr p, u8 tag, uptr sz) {
674 TagMemoryAligned(p: UntagAddr(tagged_addr: p), size: sz, tag);
675}
676
677uptr __hwasan_tag_pointer(uptr p, u8 tag) {
678 return AddTagToPointer(p, tag);
679}
680
681u8 __hwasan_get_tag_from_pointer(uptr p) { return GetTagFromPointer(p); }
682
683void __hwasan_handle_longjmp(const void *sp_dst) {
684 uptr dst = (uptr)sp_dst;
685 // HWASan does not support tagged SP.
686 CHECK_EQ(GetTagFromPointer(dst), 0);
687
688 uptr sp = (uptr)__builtin_frame_address(0);
689 static const uptr kMaxExpectedCleanupSize = 64 << 20; // 64M
690 if (dst < sp || dst - sp > kMaxExpectedCleanupSize) {
691 Report(
692 format: "WARNING: HWASan is ignoring requested __hwasan_handle_longjmp: "
693 "stack top: %p; target %p; distance: %p (%zd)\n"
694 "False positive error reports may follow\n",
695 (void *)sp, (void *)dst, dst - sp, dst - sp);
696 return;
697 }
698 TagMemory(p: sp, size: dst - sp, tag: 0);
699}
700
701void __hwasan_handle_vfork(const void *sp_dst) {
702 uptr sp = (uptr)sp_dst;
703 Thread *t = GetCurrentThread();
704 CHECK(t);
705 uptr top = t->stack_top();
706 uptr bottom = t->stack_bottom();
707 if (top == 0 || bottom == 0 || sp < bottom || sp >= top) {
708 Report(
709 format: "WARNING: HWASan is ignoring requested __hwasan_handle_vfork: "
710 "stack top: %zx; current %zx; bottom: %zx \n"
711 "False positive error reports may follow\n",
712 top, sp, bottom);
713 return;
714 }
715 TagMemory(p: bottom, size: sp - bottom, tag: 0);
716}
717
718extern "C" void *__hwasan_extra_spill_area() {
719 Thread *t = GetCurrentThread();
720 return &t->vfork_spill();
721}
722
723void __hwasan_print_memory_usage() {
724 InternalScopedString s;
725 HwasanFormatMemoryUsage(s);
726 Printf(format: "%s\n", s.data());
727}
728
729static const u8 kFallbackTag = 0xBB & kTagMask;
730
731u8 __hwasan_generate_tag() {
732 Thread *t = GetCurrentThread();
733 if (!t) return kFallbackTag;
734 return t->GenerateRandomTag();
735}
736
737void __hwasan_add_frame_record(u64 frame_record_info) {
738 Thread *t = GetCurrentThread();
739 if (t)
740 t->stack_allocations()->push(t: frame_record_info);
741}
742
743#if !SANITIZER_SUPPORTS_WEAK_HOOKS
744extern "C" {
745SANITIZER_INTERFACE_ATTRIBUTE SANITIZER_WEAK_ATTRIBUTE
746const char* __hwasan_default_options() { return ""; }
747} // extern "C"
748#endif
749
750extern "C" {
751SANITIZER_INTERFACE_ATTRIBUTE
752void __sanitizer_print_stack_trace() {
753 GET_FATAL_STACK_TRACE_PC_BP(StackTrace::GetCurrentPc(), GET_CURRENT_FRAME());
754 stack.Print();
755}
756
757// Entry point for interoperability between __hwasan_tag_mismatch (ASM) and the
758// rest of the mismatch handling code (C++).
759void __hwasan_tag_mismatch4(uptr addr, uptr access_info, uptr *registers_frame,
760 size_t outsize) {
761 __hwasan::HwasanTagMismatch(addr, pc: (uptr)__builtin_return_address(0),
762 frame: (uptr)__builtin_frame_address(0), access_info,
763 registers_frame, outsize);
764}
765
766} // extern "C"
767

source code of compiler-rt/lib/hwasan/hwasan.cpp