1 //===-- asan_report.cpp ---------------------------------------------------===//
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7 //===----------------------------------------------------------------------===//
9 // This file is a part of AddressSanitizer, an address sanity checker.
11 // This file contains error reporting code.
12 //===----------------------------------------------------------------------===//
14 #include "asan_report.h"
16 #include "asan_descriptions.h"
17 #include "asan_errors.h"
18 #include "asan_flags.h"
19 #include "asan_internal.h"
20 #include "asan_mapping.h"
21 #include "asan_scariness_score.h"
22 #include "asan_stack.h"
23 #include "asan_thread.h"
24 #include "sanitizer_common/sanitizer_common.h"
25 #include "sanitizer_common/sanitizer_flags.h"
26 #include "sanitizer_common/sanitizer_interface_internal.h"
27 #include "sanitizer_common/sanitizer_report_decorator.h"
28 #include "sanitizer_common/sanitizer_stackdepot.h"
29 #include "sanitizer_common/sanitizer_symbolizer.h"
33 // -------------------- User-specified callbacks ----------------- {{{1
34 static void (*error_report_callback
)(const char*);
35 static char *error_message_buffer
= nullptr;
36 static uptr error_message_buffer_pos
= 0;
37 static Mutex error_message_buf_mutex
;
38 static const unsigned kAsanBuggyPcPoolSize
= 25;
39 static __sanitizer::atomic_uintptr_t AsanBuggyPcPool
[kAsanBuggyPcPoolSize
];
41 void AppendToErrorMessageBuffer(const char *buffer
) {
42 Lock
l(&error_message_buf_mutex
);
43 if (!error_message_buffer
) {
44 error_message_buffer
=
45 (char*)MmapOrDieQuietly(kErrorMessageBufferSize
, __func__
);
46 error_message_buffer_pos
= 0;
48 uptr length
= internal_strlen(buffer
);
49 RAW_CHECK(kErrorMessageBufferSize
>= error_message_buffer_pos
);
50 uptr remaining
= kErrorMessageBufferSize
- error_message_buffer_pos
;
51 internal_strncpy(error_message_buffer
+ error_message_buffer_pos
,
53 error_message_buffer
[kErrorMessageBufferSize
- 1] = '\0';
54 // FIXME: reallocate the buffer instead of truncating the message.
55 error_message_buffer_pos
+= Min(remaining
, length
);
58 // ---------------------- Helper functions ----------------------- {{{1
60 void PrintMemoryByte(InternalScopedString
*str
, const char *before
, u8 byte
,
61 bool in_shadow
, const char *after
) {
63 str
->append("%s%s%x%x%s%s", before
,
64 in_shadow
? d
.ShadowByte(byte
) : d
.MemoryByte(), byte
>> 4,
65 byte
& 15, d
.Default(), after
);
68 static void PrintZoneForPointer(uptr ptr
, uptr zone_ptr
,
69 const char *zone_name
) {
72 Printf("malloc_zone_from_ptr(%p) = %p, which is %s\n", (void *)ptr
,
73 (void *)zone_ptr
, zone_name
);
75 Printf("malloc_zone_from_ptr(%p) = %p, which doesn't have a name\n",
76 (void *)ptr
, (void *)zone_ptr
);
79 Printf("malloc_zone_from_ptr(%p) = 0\n", (void *)ptr
);
83 // ---------------------- Address Descriptions ------------------- {{{1
85 bool ParseFrameDescription(const char *frame_descr
,
86 InternalMmapVector
<StackVarDescr
> *vars
) {
89 // This string is created by the compiler and has the following form:
90 // "n alloc_1 alloc_2 ... alloc_n"
91 // where alloc_i looks like "offset size len ObjectName"
92 // or "offset size len ObjectName:line".
93 uptr n_objects
= (uptr
)internal_simple_strtoll(frame_descr
, &p
, 10);
97 for (uptr i
= 0; i
< n_objects
; i
++) {
98 uptr beg
= (uptr
)internal_simple_strtoll(p
, &p
, 10);
99 uptr size
= (uptr
)internal_simple_strtoll(p
, &p
, 10);
100 uptr len
= (uptr
)internal_simple_strtoll(p
, &p
, 10);
101 if (beg
== 0 || size
== 0 || *p
!= ' ') {
105 char *colon_pos
= internal_strchr(p
, ':');
108 if (colon_pos
!= nullptr && colon_pos
< p
+ len
) {
109 name_len
= colon_pos
- p
;
110 line
= (uptr
)internal_simple_strtoll(colon_pos
+ 1, nullptr, 10);
112 StackVarDescr var
= {beg
, size
, p
, name_len
, line
};
113 vars
->push_back(var
);
120 // -------------------- Different kinds of reports ----------------- {{{1
122 // Use ScopedInErrorReport to run common actions just before and
123 // immediately after printing error report.
124 class ScopedInErrorReport
{
126 explicit ScopedInErrorReport(bool fatal
= false)
127 : halt_on_error_(fatal
|| flags()->halt_on_error
) {
128 // Make sure the registry and sanitizer report mutexes are locked while
129 // we're printing an error report.
130 // We can lock them only here to avoid self-deadlock in case of
131 // recursive reports.
132 asanThreadRegistry().Lock();
134 "=================================================================\n");
137 ~ScopedInErrorReport() {
138 if (halt_on_error_
&& !__sanitizer_acquire_crash_state()) {
139 asanThreadRegistry().Unlock();
143 if (current_error_
.IsValid()) current_error_
.Print();
145 // Make sure the current thread is announced.
146 DescribeThread(GetCurrentThread());
147 // We may want to grab this lock again when printing stats.
148 asanThreadRegistry().Unlock();
149 // Print memory stats.
150 if (flags()->print_stats
)
151 __asan_print_accumulated_stats();
153 if (common_flags()->print_cmdline
)
156 if (common_flags()->print_module_map
== 2)
159 // Copy the message buffer so that we could start logging without holding a
160 // lock that gets acquired during printing.
161 InternalMmapVector
<char> buffer_copy(kErrorMessageBufferSize
);
163 Lock
l(&error_message_buf_mutex
);
164 internal_memcpy(buffer_copy
.data(),
165 error_message_buffer
, kErrorMessageBufferSize
);
166 // Clear error_message_buffer so that if we find other errors
167 // we don't re-log this error.
168 error_message_buffer_pos
= 0;
171 LogFullErrorReport(buffer_copy
.data());
173 if (error_report_callback
) {
174 error_report_callback(buffer_copy
.data());
177 if (halt_on_error_
&& common_flags()->abort_on_error
) {
178 // On Android the message is truncated to 512 characters.
179 // FIXME: implement "compact" error format, possibly without, or with
180 // highly compressed stack traces?
181 // FIXME: or just use the summary line as abort message?
182 SetAbortMessage(buffer_copy
.data());
185 // In halt_on_error = false mode, reset the current error object (before
188 internal_memset(¤t_error_
, 0, sizeof(current_error_
));
190 if (halt_on_error_
) {
191 Report("ABORTING\n");
196 void ReportError(const ErrorDescription
&description
) {
197 // Can only report one error per ScopedInErrorReport.
198 CHECK_EQ(current_error_
.kind
, kErrorKindInvalid
);
199 internal_memcpy(¤t_error_
, &description
, sizeof(current_error_
));
202 static ErrorDescription
&CurrentError() {
203 return current_error_
;
207 ScopedErrorReportLock error_report_lock_
;
208 // Error currently being reported. This enables the destructor to interact
209 // with the debugger and point it to an error description.
210 static ErrorDescription current_error_
;
214 ErrorDescription
ScopedInErrorReport::current_error_(LINKER_INITIALIZED
);
216 void ReportDeadlySignal(const SignalContext
&sig
) {
217 ScopedInErrorReport
in_report(/*fatal*/ true);
218 ErrorDeadlySignal
error(GetCurrentTidOrInvalid(), sig
);
219 in_report
.ReportError(error
);
222 void ReportDoubleFree(uptr addr
, BufferedStackTrace
*free_stack
) {
223 ScopedInErrorReport in_report
;
224 ErrorDoubleFree
error(GetCurrentTidOrInvalid(), free_stack
, addr
);
225 in_report
.ReportError(error
);
228 void ReportNewDeleteTypeMismatch(uptr addr
, uptr delete_size
,
229 uptr delete_alignment
,
230 BufferedStackTrace
*free_stack
) {
231 ScopedInErrorReport in_report
;
232 ErrorNewDeleteTypeMismatch
error(GetCurrentTidOrInvalid(), free_stack
, addr
,
233 delete_size
, delete_alignment
);
234 in_report
.ReportError(error
);
237 void ReportFreeNotMalloced(uptr addr
, BufferedStackTrace
*free_stack
) {
238 ScopedInErrorReport in_report
;
239 ErrorFreeNotMalloced
error(GetCurrentTidOrInvalid(), free_stack
, addr
);
240 in_report
.ReportError(error
);
243 void ReportAllocTypeMismatch(uptr addr
, BufferedStackTrace
*free_stack
,
244 AllocType alloc_type
,
245 AllocType dealloc_type
) {
246 ScopedInErrorReport in_report
;
247 ErrorAllocTypeMismatch
error(GetCurrentTidOrInvalid(), free_stack
, addr
,
248 alloc_type
, dealloc_type
);
249 in_report
.ReportError(error
);
252 void ReportMallocUsableSizeNotOwned(uptr addr
, BufferedStackTrace
*stack
) {
253 ScopedInErrorReport in_report
;
254 ErrorMallocUsableSizeNotOwned
error(GetCurrentTidOrInvalid(), stack
, addr
);
255 in_report
.ReportError(error
);
258 void ReportSanitizerGetAllocatedSizeNotOwned(uptr addr
,
259 BufferedStackTrace
*stack
) {
260 ScopedInErrorReport in_report
;
261 ErrorSanitizerGetAllocatedSizeNotOwned
error(GetCurrentTidOrInvalid(), stack
,
263 in_report
.ReportError(error
);
266 void ReportCallocOverflow(uptr count
, uptr size
, BufferedStackTrace
*stack
) {
267 ScopedInErrorReport
in_report(/*fatal*/ true);
268 ErrorCallocOverflow
error(GetCurrentTidOrInvalid(), stack
, count
, size
);
269 in_report
.ReportError(error
);
272 void ReportReallocArrayOverflow(uptr count
, uptr size
,
273 BufferedStackTrace
*stack
) {
274 ScopedInErrorReport
in_report(/*fatal*/ true);
275 ErrorReallocArrayOverflow
error(GetCurrentTidOrInvalid(), stack
, count
, size
);
276 in_report
.ReportError(error
);
279 void ReportPvallocOverflow(uptr size
, BufferedStackTrace
*stack
) {
280 ScopedInErrorReport
in_report(/*fatal*/ true);
281 ErrorPvallocOverflow
error(GetCurrentTidOrInvalid(), stack
, size
);
282 in_report
.ReportError(error
);
285 void ReportInvalidAllocationAlignment(uptr alignment
,
286 BufferedStackTrace
*stack
) {
287 ScopedInErrorReport
in_report(/*fatal*/ true);
288 ErrorInvalidAllocationAlignment
error(GetCurrentTidOrInvalid(), stack
,
290 in_report
.ReportError(error
);
293 void ReportInvalidAlignedAllocAlignment(uptr size
, uptr alignment
,
294 BufferedStackTrace
*stack
) {
295 ScopedInErrorReport
in_report(/*fatal*/ true);
296 ErrorInvalidAlignedAllocAlignment
error(GetCurrentTidOrInvalid(), stack
,
298 in_report
.ReportError(error
);
301 void ReportInvalidPosixMemalignAlignment(uptr alignment
,
302 BufferedStackTrace
*stack
) {
303 ScopedInErrorReport
in_report(/*fatal*/ true);
304 ErrorInvalidPosixMemalignAlignment
error(GetCurrentTidOrInvalid(), stack
,
306 in_report
.ReportError(error
);
309 void ReportAllocationSizeTooBig(uptr user_size
, uptr total_size
, uptr max_size
,
310 BufferedStackTrace
*stack
) {
311 ScopedInErrorReport
in_report(/*fatal*/ true);
312 ErrorAllocationSizeTooBig
error(GetCurrentTidOrInvalid(), stack
, user_size
,
313 total_size
, max_size
);
314 in_report
.ReportError(error
);
317 void ReportRssLimitExceeded(BufferedStackTrace
*stack
) {
318 ScopedInErrorReport
in_report(/*fatal*/ true);
319 ErrorRssLimitExceeded
error(GetCurrentTidOrInvalid(), stack
);
320 in_report
.ReportError(error
);
323 void ReportOutOfMemory(uptr requested_size
, BufferedStackTrace
*stack
) {
324 ScopedInErrorReport
in_report(/*fatal*/ true);
325 ErrorOutOfMemory
error(GetCurrentTidOrInvalid(), stack
, requested_size
);
326 in_report
.ReportError(error
);
329 void ReportStringFunctionMemoryRangesOverlap(const char *function
,
330 const char *offset1
, uptr length1
,
331 const char *offset2
, uptr length2
,
332 BufferedStackTrace
*stack
) {
333 ScopedInErrorReport in_report
;
334 ErrorStringFunctionMemoryRangesOverlap
error(
335 GetCurrentTidOrInvalid(), stack
, (uptr
)offset1
, length1
, (uptr
)offset2
,
337 in_report
.ReportError(error
);
340 void ReportStringFunctionSizeOverflow(uptr offset
, uptr size
,
341 BufferedStackTrace
*stack
) {
342 ScopedInErrorReport in_report
;
343 ErrorStringFunctionSizeOverflow
error(GetCurrentTidOrInvalid(), stack
, offset
,
345 in_report
.ReportError(error
);
348 void ReportBadParamsToAnnotateContiguousContainer(uptr beg
, uptr end
,
349 uptr old_mid
, uptr new_mid
,
350 BufferedStackTrace
*stack
) {
351 ScopedInErrorReport in_report
;
352 ErrorBadParamsToAnnotateContiguousContainer
error(
353 GetCurrentTidOrInvalid(), stack
, beg
, end
, old_mid
, new_mid
);
354 in_report
.ReportError(error
);
357 void ReportODRViolation(const __asan_global
*g1
, u32 stack_id1
,
358 const __asan_global
*g2
, u32 stack_id2
) {
359 ScopedInErrorReport in_report
;
360 ErrorODRViolation
error(GetCurrentTidOrInvalid(), g1
, stack_id1
, g2
,
362 in_report
.ReportError(error
);
365 // ----------------------- CheckForInvalidPointerPair ----------- {{{1
366 static NOINLINE
void ReportInvalidPointerPair(uptr pc
, uptr bp
, uptr sp
,
368 ScopedInErrorReport in_report
;
369 ErrorInvalidPointerPair
error(GetCurrentTidOrInvalid(), pc
, bp
, sp
, a1
, a2
);
370 in_report
.ReportError(error
);
373 static bool IsInvalidPointerPair(uptr a1
, uptr a2
) {
377 // 256B in shadow memory can be iterated quite fast
378 static const uptr kMaxOffset
= 2048;
380 uptr left
= a1
< a2
? a1
: a2
;
381 uptr right
= a1
< a2
? a2
: a1
;
382 uptr offset
= right
- left
;
383 if (offset
<= kMaxOffset
)
384 return __asan_region_is_poisoned(left
, offset
);
386 AsanThread
*t
= GetCurrentThread();
388 // check whether left is a stack memory pointer
389 if (uptr shadow_offset1
= t
->GetStackVariableShadowStart(left
)) {
390 uptr shadow_offset2
= t
->GetStackVariableShadowStart(right
);
391 return shadow_offset2
== 0 || shadow_offset1
!= shadow_offset2
;
394 // check whether left is a heap memory address
395 HeapAddressDescription hdesc1
, hdesc2
;
396 if (GetHeapAddressInformation(left
, 0, &hdesc1
) &&
397 hdesc1
.chunk_access
.access_type
== kAccessTypeInside
)
398 return !GetHeapAddressInformation(right
, 0, &hdesc2
) ||
399 hdesc2
.chunk_access
.access_type
!= kAccessTypeInside
||
400 hdesc1
.chunk_access
.chunk_begin
!= hdesc2
.chunk_access
.chunk_begin
;
402 // check whether left is an address of a global variable
403 GlobalAddressDescription gdesc1
, gdesc2
;
404 if (GetGlobalAddressInformation(left
, 0, &gdesc1
))
405 return !GetGlobalAddressInformation(right
- 1, 0, &gdesc2
) ||
406 !gdesc1
.PointsInsideTheSameVariable(gdesc2
);
408 if (t
->GetStackVariableShadowStart(right
) ||
409 GetHeapAddressInformation(right
, 0, &hdesc2
) ||
410 GetGlobalAddressInformation(right
- 1, 0, &gdesc2
))
413 // At this point we know nothing about both a1 and a2 addresses.
417 static inline void CheckForInvalidPointerPair(void *p1
, void *p2
) {
418 switch (flags()->detect_invalid_pointer_pairs
) {
422 if (p1
== nullptr || p2
== nullptr)
427 uptr a1
= reinterpret_cast<uptr
>(p1
);
428 uptr a2
= reinterpret_cast<uptr
>(p2
);
430 if (IsInvalidPointerPair(a1
, a2
)) {
432 ReportInvalidPointerPair(pc
, bp
, sp
, a1
, a2
);
435 // ----------------------- Mac-specific reports ----------------- {{{1
437 void ReportMacMzReallocUnknown(uptr addr
, uptr zone_ptr
, const char *zone_name
,
438 BufferedStackTrace
*stack
) {
439 ScopedInErrorReport in_report
;
441 "mz_realloc(%p) -- attempting to realloc unallocated memory.\n"
442 "This is an unrecoverable problem, exiting now.\n",
444 PrintZoneForPointer(addr
, zone_ptr
, zone_name
);
446 DescribeAddressIfHeap(addr
);
449 // -------------- SuppressErrorReport -------------- {{{1
450 // Avoid error reports duplicating for ASan recover mode.
451 static bool SuppressErrorReport(uptr pc
) {
452 if (!common_flags()->suppress_equal_pcs
) return false;
453 for (unsigned i
= 0; i
< kAsanBuggyPcPoolSize
; i
++) {
454 uptr cmp
= atomic_load_relaxed(&AsanBuggyPcPool
[i
]);
455 if (cmp
== 0 && atomic_compare_exchange_strong(&AsanBuggyPcPool
[i
], &cmp
,
456 pc
, memory_order_relaxed
))
458 if (cmp
== pc
) return true;
463 void ReportGenericError(uptr pc
, uptr bp
, uptr sp
, uptr addr
, bool is_write
,
464 uptr access_size
, u32 exp
, bool fatal
) {
465 if (__asan_test_only_reported_buggy_pointer
) {
466 *__asan_test_only_reported_buggy_pointer
= addr
;
469 if (!fatal
&& SuppressErrorReport(pc
)) return;
470 ENABLE_FRAME_POINTER
;
472 // Optimization experiments.
473 // The experiments can be used to evaluate potential optimizations that remove
474 // instrumentation (assess false negatives). Instead of completely removing
475 // some instrumentation, compiler can emit special calls into runtime
476 // (e.g. __asan_report_exp_load1 instead of __asan_report_load1) and pass
477 // mask of experiments (exp).
478 // The reaction to a non-zero value of exp is to be defined.
481 ScopedInErrorReport
in_report(fatal
);
482 ErrorGeneric
error(GetCurrentTidOrInvalid(), pc
, bp
, sp
, addr
, is_write
,
484 in_report
.ReportError(error
);
487 } // namespace __asan
489 // --------------------------- Interface --------------------- {{{1
490 using namespace __asan
;
492 void __asan_report_error(uptr pc
, uptr bp
, uptr sp
, uptr addr
, int is_write
,
493 uptr access_size
, u32 exp
) {
494 ENABLE_FRAME_POINTER
;
495 bool fatal
= flags()->halt_on_error
;
496 ReportGenericError(pc
, bp
, sp
, addr
, is_write
, access_size
, exp
, fatal
);
499 void NOINLINE
__asan_set_error_report_callback(void (*callback
)(const char*)) {
500 Lock
l(&error_message_buf_mutex
);
501 error_report_callback
= callback
;
504 void __asan_describe_address(uptr addr
) {
505 // Thread registry must be locked while we're describing an address.
506 asanThreadRegistry().Lock();
507 PrintAddressDescription(addr
, 1, "");
508 asanThreadRegistry().Unlock();
511 int __asan_report_present() {
512 return ScopedInErrorReport::CurrentError().kind
!= kErrorKindInvalid
;
515 uptr
__asan_get_report_pc() {
516 if (ScopedInErrorReport::CurrentError().kind
== kErrorKindGeneric
)
517 return ScopedInErrorReport::CurrentError().Generic
.pc
;
521 uptr
__asan_get_report_bp() {
522 if (ScopedInErrorReport::CurrentError().kind
== kErrorKindGeneric
)
523 return ScopedInErrorReport::CurrentError().Generic
.bp
;
527 uptr
__asan_get_report_sp() {
528 if (ScopedInErrorReport::CurrentError().kind
== kErrorKindGeneric
)
529 return ScopedInErrorReport::CurrentError().Generic
.sp
;
533 uptr
__asan_get_report_address() {
534 ErrorDescription
&err
= ScopedInErrorReport::CurrentError();
535 if (err
.kind
== kErrorKindGeneric
)
536 return err
.Generic
.addr_description
.Address();
537 else if (err
.kind
== kErrorKindDoubleFree
)
538 return err
.DoubleFree
.addr_description
.addr
;
542 int __asan_get_report_access_type() {
543 if (ScopedInErrorReport::CurrentError().kind
== kErrorKindGeneric
)
544 return ScopedInErrorReport::CurrentError().Generic
.is_write
;
548 uptr
__asan_get_report_access_size() {
549 if (ScopedInErrorReport::CurrentError().kind
== kErrorKindGeneric
)
550 return ScopedInErrorReport::CurrentError().Generic
.access_size
;
554 const char *__asan_get_report_description() {
555 if (ScopedInErrorReport::CurrentError().kind
== kErrorKindGeneric
)
556 return ScopedInErrorReport::CurrentError().Generic
.bug_descr
;
557 return ScopedInErrorReport::CurrentError().Base
.scariness
.GetDescription();
561 SANITIZER_INTERFACE_ATTRIBUTE
562 void __sanitizer_ptr_sub(void *a
, void *b
) {
563 CheckForInvalidPointerPair(a
, b
);
565 SANITIZER_INTERFACE_ATTRIBUTE
566 void __sanitizer_ptr_cmp(void *a
, void *b
) {
567 CheckForInvalidPointerPair(a
, b
);
571 // Provide default implementation of __asan_on_error that does nothing
572 // and may be overriden by user.
573 SANITIZER_INTERFACE_WEAK_DEF(void, __asan_on_error
, void) {}