1 //===-- asan_poisoning.cc -------------------------------------------------===//
3 // This file is distributed under the University of Illinois Open Source
4 // License. See LICENSE.TXT for details.
6 //===----------------------------------------------------------------------===//
8 // This file is a part of AddressSanitizer, an address sanity checker.
10 // Shadow memory poisoning by ASan RTL and by user application.
11 //===----------------------------------------------------------------------===//
13 #include "asan_poisoning.h"
14 #include "sanitizer_common/sanitizer_libc.h"
15 #include "sanitizer_common/sanitizer_flags.h"
19 void PoisonShadow(uptr addr
, uptr size
, u8 value
) {
20 if (!flags()->poison_heap
) return;
21 CHECK(AddrIsAlignedByGranularity(addr
));
22 CHECK(AddrIsInMem(addr
));
23 CHECK(AddrIsAlignedByGranularity(addr
+ size
));
24 CHECK(AddrIsInMem(addr
+ size
- SHADOW_GRANULARITY
));
26 FastPoisonShadow(addr
, size
, value
);
29 void PoisonShadowPartialRightRedzone(uptr addr
,
33 if (!flags()->poison_heap
) return;
34 CHECK(AddrIsAlignedByGranularity(addr
));
35 CHECK(AddrIsInMem(addr
));
36 FastPoisonShadowPartialRightRedzone(addr
, size
, redzone_size
, value
);
39 struct ShadowSegmentEndpoint
{
41 s8 offset
; // in [0, SHADOW_GRANULARITY)
42 s8 value
; // = *chunk;
44 explicit ShadowSegmentEndpoint(uptr address
) {
45 chunk
= (u8
*)MemToShadow(address
);
46 offset
= address
& (SHADOW_GRANULARITY
- 1);
53 // ---------------------- Interface ---------------- {{{1
54 using namespace __asan
; // NOLINT
56 // Current implementation of __asan_(un)poison_memory_region doesn't check
57 // that user program (un)poisons the memory it owns. It poisons memory
58 // conservatively, and unpoisons progressively to make sure asan shadow
59 // mapping invariant is preserved (see detailed mapping description here:
60 // http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm).
62 // * if user asks to poison region [left, right), the program poisons
63 // at least [left, AlignDown(right)).
64 // * if user asks to unpoison region [left, right), the program unpoisons
65 // at most [AlignDown(left), right).
66 void __asan_poison_memory_region(void const volatile *addr
, uptr size
) {
67 if (!flags()->allow_user_poisoning
|| size
== 0) return;
68 uptr beg_addr
= (uptr
)addr
;
69 uptr end_addr
= beg_addr
+ size
;
70 if (common_flags()->verbosity
>= 1) {
71 Printf("Trying to poison memory region [%p, %p)\n",
72 (void*)beg_addr
, (void*)end_addr
);
74 ShadowSegmentEndpoint
beg(beg_addr
);
75 ShadowSegmentEndpoint
end(end_addr
);
76 if (beg
.chunk
== end
.chunk
) {
77 CHECK(beg
.offset
< end
.offset
);
79 CHECK(value
== end
.value
);
80 // We can only poison memory if the byte in end.offset is unaddressable.
81 // No need to re-poison memory if it is poisoned already.
82 if (value
> 0 && value
<= end
.offset
) {
84 *beg
.chunk
= Min(value
, beg
.offset
);
86 *beg
.chunk
= kAsanUserPoisonedMemoryMagic
;
91 CHECK(beg
.chunk
< end
.chunk
);
93 // Mark bytes from beg.offset as unaddressable.
95 *beg
.chunk
= beg
.offset
;
97 *beg
.chunk
= Min(beg
.value
, beg
.offset
);
101 REAL(memset
)(beg
.chunk
, kAsanUserPoisonedMemoryMagic
, end
.chunk
- beg
.chunk
);
102 // Poison if byte in end.offset is unaddressable.
103 if (end
.value
> 0 && end
.value
<= end
.offset
) {
104 *end
.chunk
= kAsanUserPoisonedMemoryMagic
;
108 void __asan_unpoison_memory_region(void const volatile *addr
, uptr size
) {
109 if (!flags()->allow_user_poisoning
|| size
== 0) return;
110 uptr beg_addr
= (uptr
)addr
;
111 uptr end_addr
= beg_addr
+ size
;
112 if (common_flags()->verbosity
>= 1) {
113 Printf("Trying to unpoison memory region [%p, %p)\n",
114 (void*)beg_addr
, (void*)end_addr
);
116 ShadowSegmentEndpoint
beg(beg_addr
);
117 ShadowSegmentEndpoint
end(end_addr
);
118 if (beg
.chunk
== end
.chunk
) {
119 CHECK(beg
.offset
< end
.offset
);
120 s8 value
= beg
.value
;
121 CHECK(value
== end
.value
);
122 // We unpoison memory bytes up to enbytes up to end.offset if it is not
123 // unpoisoned already.
125 *beg
.chunk
= Max(value
, end
.offset
);
129 CHECK(beg
.chunk
< end
.chunk
);
130 if (beg
.offset
> 0) {
134 REAL(memset
)(beg
.chunk
, 0, end
.chunk
- beg
.chunk
);
135 if (end
.offset
> 0 && end
.value
!= 0) {
136 *end
.chunk
= Max(end
.value
, end
.offset
);
140 bool __asan_address_is_poisoned(void const volatile *addr
) {
141 return __asan::AddressIsPoisoned((uptr
)addr
);
144 uptr
__asan_region_is_poisoned(uptr beg
, uptr size
) {
146 uptr end
= beg
+ size
;
147 if (!AddrIsInMem(beg
)) return beg
;
148 if (!AddrIsInMem(end
)) return end
;
149 uptr aligned_b
= RoundUpTo(beg
, SHADOW_GRANULARITY
);
150 uptr aligned_e
= RoundDownTo(end
, SHADOW_GRANULARITY
);
151 uptr shadow_beg
= MemToShadow(aligned_b
);
152 uptr shadow_end
= MemToShadow(aligned_e
);
153 // First check the first and the last application bytes,
154 // then check the SHADOW_GRANULARITY-aligned region by calling
155 // mem_is_zero on the corresponding shadow.
156 if (!__asan::AddressIsPoisoned(beg
) &&
157 !__asan::AddressIsPoisoned(end
- 1) &&
158 (shadow_end
<= shadow_beg
||
159 __sanitizer::mem_is_zero((const char *)shadow_beg
,
160 shadow_end
- shadow_beg
)))
162 // The fast check failed, so we have a poisoned byte somewhere.
164 for (; beg
< end
; beg
++)
165 if (__asan::AddressIsPoisoned(beg
))
167 UNREACHABLE("mem_is_zero returned false, but poisoned byte was not found");
171 #define CHECK_SMALL_REGION(p, size, isWrite) \
173 uptr __p = reinterpret_cast<uptr>(p); \
174 uptr __size = size; \
175 if (UNLIKELY(__asan::AddressIsPoisoned(__p) || \
176 __asan::AddressIsPoisoned(__p + __size - 1))) { \
177 GET_CURRENT_PC_BP_SP; \
178 uptr __bad = __asan_region_is_poisoned(__p, __size); \
179 __asan_report_error(pc, bp, sp, __bad, isWrite, __size);\
184 extern "C" SANITIZER_INTERFACE_ATTRIBUTE
185 u16
__sanitizer_unaligned_load16(const uu16
*p
) {
186 CHECK_SMALL_REGION(p
, sizeof(*p
), false);
190 extern "C" SANITIZER_INTERFACE_ATTRIBUTE
191 u32
__sanitizer_unaligned_load32(const uu32
*p
) {
192 CHECK_SMALL_REGION(p
, sizeof(*p
), false);
196 extern "C" SANITIZER_INTERFACE_ATTRIBUTE
197 u64
__sanitizer_unaligned_load64(const uu64
*p
) {
198 CHECK_SMALL_REGION(p
, sizeof(*p
), false);
202 extern "C" SANITIZER_INTERFACE_ATTRIBUTE
203 void __sanitizer_unaligned_store16(uu16
*p
, u16 x
) {
204 CHECK_SMALL_REGION(p
, sizeof(*p
), true);
208 extern "C" SANITIZER_INTERFACE_ATTRIBUTE
209 void __sanitizer_unaligned_store32(uu32
*p
, u32 x
) {
210 CHECK_SMALL_REGION(p
, sizeof(*p
), true);
214 extern "C" SANITIZER_INTERFACE_ATTRIBUTE
215 void __sanitizer_unaligned_store64(uu64
*p
, u64 x
) {
216 CHECK_SMALL_REGION(p
, sizeof(*p
), true);
220 // This is a simplified version of __asan_(un)poison_memory_region, which
221 // assumes that left border of region to be poisoned is properly aligned.
222 static void PoisonAlignedStackMemory(uptr addr
, uptr size
, bool do_poison
) {
223 if (size
== 0) return;
224 uptr aligned_size
= size
& ~(SHADOW_GRANULARITY
- 1);
225 PoisonShadow(addr
, aligned_size
,
226 do_poison
? kAsanStackUseAfterScopeMagic
: 0);
227 if (size
== aligned_size
)
229 s8 end_offset
= (s8
)(size
- aligned_size
);
230 s8
* shadow_end
= (s8
*)MemToShadow(addr
+ aligned_size
);
231 s8 end_value
= *shadow_end
;
233 // If possible, mark all the bytes mapping to last shadow byte as
235 if (end_value
> 0 && end_value
<= end_offset
)
236 *shadow_end
= (s8
)kAsanStackUseAfterScopeMagic
;
238 // If necessary, mark few first bytes mapping to last shadow byte
241 *shadow_end
= Max(end_value
, end_offset
);
245 void __asan_poison_stack_memory(uptr addr
, uptr size
) {
246 if (common_flags()->verbosity
> 0)
247 Report("poisoning: %p %zx\n", (void*)addr
, size
);
248 PoisonAlignedStackMemory(addr
, size
, true);
251 void __asan_unpoison_stack_memory(uptr addr
, uptr size
) {
252 if (common_flags()->verbosity
> 0)
253 Report("unpoisoning: %p %zx\n", (void*)addr
, size
);
254 PoisonAlignedStackMemory(addr
, size
, false);
257 void __sanitizer_annotate_contiguous_container(const void *beg_p
,
259 const void *old_mid_p
,
260 const void *new_mid_p
) {
261 if (common_flags()->verbosity
>= 2)
262 Printf("contiguous_container: %p %p %p %p\n", beg_p
, end_p
, old_mid_p
,
264 uptr beg
= reinterpret_cast<uptr
>(beg_p
);
265 uptr end
= reinterpret_cast<uptr
>(end_p
);
266 uptr old_mid
= reinterpret_cast<uptr
>(old_mid_p
);
267 uptr new_mid
= reinterpret_cast<uptr
>(new_mid_p
);
268 uptr granularity
= SHADOW_GRANULARITY
;
269 CHECK(beg
<= old_mid
&& beg
<= new_mid
&& old_mid
<= end
&& new_mid
<= end
&&
270 IsAligned(beg
, granularity
));
272 FIRST_32_SECOND_64(1UL << 30, 1UL << 34)); // Sanity check.
274 uptr a
= RoundDownTo(Min(old_mid
, new_mid
), granularity
);
275 uptr c
= RoundUpTo(Max(old_mid
, new_mid
), granularity
);
276 uptr d1
= RoundDownTo(old_mid
, granularity
);
277 uptr d2
= RoundUpTo(old_mid
, granularity
);
278 // Currently we should be in this state:
279 // [a, d1) is good, [d2, c) is bad, [d1, d2) is partially good.
280 // Make a quick sanity check that we are indeed in this state.
282 CHECK_EQ(*(u8
*)MemToShadow(d1
), old_mid
- d1
);
283 if (a
+ granularity
<= d1
)
284 CHECK_EQ(*(u8
*)MemToShadow(a
), 0);
285 if (d2
+ granularity
<= c
&& c
<= end
)
286 CHECK_EQ(*(u8
*)MemToShadow(c
- granularity
),
287 kAsanContiguousContainerOOBMagic
);
289 uptr b1
= RoundDownTo(new_mid
, granularity
);
290 uptr b2
= RoundUpTo(new_mid
, granularity
);
292 // [a, b1) is good, [b2, c) is bad, [b1, b2) is partially good.
293 PoisonShadow(a
, b1
- a
, 0);
294 PoisonShadow(b2
, c
- b2
, kAsanContiguousContainerOOBMagic
);
296 CHECK_EQ(b2
- b1
, granularity
);
297 *(u8
*)MemToShadow(b1
) = static_cast<u8
>(new_mid
- b1
);