1 /* UndefinedBehaviorSanitizer, undefined behavior detector.
2 Copyright (C) 2014-2024 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
26 #include "stringpool.h"
30 /* Test if we should instrument vptr access. */
33 cp_ubsan_instrument_vptr_p (tree type
)
35 if (!flag_rtti
|| (flag_sanitize_trap
& SANITIZE_VPTR
))
38 if (!sanitize_flags_p (SANITIZE_VPTR
))
41 if (current_function_decl
== NULL_TREE
)
46 type
= TYPE_MAIN_VARIANT (type
);
47 if (!CLASS_TYPE_P (type
) || !CLASSTYPE_VTABLES (type
))
54 /* Helper function for
55 cp_ubsan_maybe_instrument_{member_{call,access},downcast}.
56 Instrument vptr access. */
59 cp_ubsan_instrument_vptr (location_t loc
, tree op
, tree type
, bool is_addr
,
60 enum ubsan_null_ckind ckind
)
62 type
= TYPE_MAIN_VARIANT (type
);
63 const char *mangled
= mangle_type_string (type
);
64 hashval_t str_hash1
= htab_hash_string (mangled
);
65 hashval_t str_hash2
= iterative_hash (mangled
, strlen (mangled
), 0);
66 tree str_hash
= wide_int_to_tree (uint64_type_node
,
67 wi::uhwi (((uint64_t) str_hash1
<< 32)
70 op
= build_fold_addr_expr_loc (loc
, op
);
72 tree vptr
= fold_build3_loc (loc
, COMPONENT_REF
,
73 TREE_TYPE (TYPE_VFIELD (type
)),
74 build_fold_indirect_ref_loc (loc
, op
),
75 TYPE_VFIELD (type
), NULL_TREE
);
76 vptr
= fold_convert_loc (loc
, pointer_sized_int_node
, vptr
);
77 vptr
= fold_convert_loc (loc
, uint64_type_node
, vptr
);
78 if (ckind
== UBSAN_DOWNCAST_POINTER
)
80 tree cond
= build2_loc (loc
, NE_EXPR
, boolean_type_node
, op
,
81 build_zero_cst (TREE_TYPE (op
)));
82 /* This is a compiler generated comparison, don't emit
83 e.g. -Wnonnull-compare warning for it. */
84 suppress_warning (cond
, OPT_Wnonnull_compare
);
85 vptr
= build3_loc (loc
, COND_EXPR
, uint64_type_node
, cond
,
86 vptr
, build_int_cst (uint64_type_node
, 0));
88 tree ti_decl
= get_tinfo_decl (type
);
90 tree ptype
= build_pointer_type (type
);
92 = build_call_expr_internal_loc (loc
, IFN_UBSAN_VPTR
,
93 void_type_node
, 5, op
, vptr
, str_hash
,
94 build_address (ti_decl
),
95 build_int_cst (ptype
, ckind
));
96 TREE_SIDE_EFFECTS (call
) = 1;
97 return fold_build2 (COMPOUND_EXPR
, TREE_TYPE (op
), call
, op
);
100 /* Helper function for
101 cp_ubsan_maybe_instrument_{member_{call,access},downcast}.
102 Instrument vptr access if it should be instrumented, otherwise return
106 cp_ubsan_maybe_instrument_vptr (location_t loc
, tree op
, tree type
,
107 bool is_addr
, enum ubsan_null_ckind ckind
)
109 if (!cp_ubsan_instrument_vptr_p (type
))
111 return cp_ubsan_instrument_vptr (loc
, op
, type
, is_addr
, ckind
);
114 /* Instrument a member call (but not constructor call) if needed. */
117 cp_ubsan_maybe_instrument_member_call (tree stmt
)
119 if (call_expr_nargs (stmt
) == 0)
123 tree fn
= CALL_EXPR_FN (stmt
);
124 if (fn
&& TREE_CODE (fn
) == OBJ_TYPE_REF
)
126 /* Virtual function call: Sanitize the use of the object pointer in the
127 OBJ_TYPE_REF, since the vtable reference will SEGV otherwise (95221).
128 OBJ_TYPE_REF_EXPR is ptr->vptr[N] and OBJ_TYPE_REF_OBJECT is ptr. But
129 we can't be sure of finding OBJ_TYPE_REF_OBJECT in OBJ_TYPE_REF_EXPR
130 if the latter has been optimized, so we use a COMPOUND_EXPR below. */
131 opp
= &OBJ_TYPE_REF_EXPR (fn
);
132 op
= OBJ_TYPE_REF_OBJECT (fn
);
136 /* Non-virtual call: Sanitize the 'this' argument. */
137 opp
= &CALL_EXPR_ARG (stmt
, 0);
138 if (*opp
== error_mark_node
139 || !INDIRECT_TYPE_P (TREE_TYPE (*opp
)))
141 while (TREE_CODE (*opp
) == COMPOUND_EXPR
)
142 opp
= &TREE_OPERAND (*opp
, 1);
145 op
= cp_ubsan_maybe_instrument_vptr (EXPR_LOCATION (stmt
), op
,
146 TREE_TYPE (TREE_TYPE (op
)),
147 true, UBSAN_MEMBER_CALL
);
150 else if (fn
&& TREE_CODE (fn
) == OBJ_TYPE_REF
)
151 *opp
= cp_build_compound_expr (op
, *opp
, tf_none
);
156 /* Data passed to cp_ubsan_check_member_access_r. */
158 struct cp_ubsan_check_member_access_data
160 hash_set
<tree
> *pset
;
164 static tree
cp_ubsan_check_member_access_r (tree
*, int *, void *);
166 /* Instrument a member access. */
169 cp_ubsan_maybe_instrument_member_access
170 (tree stmt
, cp_ubsan_check_member_access_data
*ucmd
)
172 if (DECL_ARTIFICIAL (TREE_OPERAND (stmt
, 1)))
175 tree base
= TREE_OPERAND (stmt
, 0);
176 if (!cp_ubsan_instrument_vptr_p (TREE_TYPE (base
)))
179 cp_walk_tree (&base
, cp_ubsan_check_member_access_r
, ucmd
, ucmd
->pset
);
181 base
= cp_ubsan_instrument_vptr (EXPR_LOCATION (stmt
), base
,
182 TREE_TYPE (base
), false,
183 UBSAN_MEMBER_ACCESS
);
184 TREE_OPERAND (stmt
, 0)
185 = build_fold_indirect_ref_loc (EXPR_LOCATION (stmt
), base
);
189 /* Attempt to instrument member accesses inside of the function.
190 cp_ubsan_maybe_instrument_member_access should be called on COMPONENT_REFs
191 in the GENERIC IL, but only when the field is actually accessed, not
192 merely when it's address is taken. Therefore we track in is_addr field
193 whether in the current context we are processing address taken
194 handled components or not. E.g. for &x->y[w->z] we want to call
195 cp_ubsan_maybe_instrument_member_access on *w.z COMPONENT_REF, but
199 cp_ubsan_check_member_access_r (tree
*stmt_p
, int *walk_subtrees
, void *data
)
201 tree stmt
= *stmt_p
, t
;
202 cp_ubsan_check_member_access_data
*ucmd
203 = (cp_ubsan_check_member_access_data
*) data
;
204 switch (TREE_CODE (stmt
))
207 t
= TREE_OPERAND (stmt
, 0);
208 while ((TREE_CODE (t
) == MEM_REF
|| INDIRECT_REF_P (t
))
209 && TREE_CODE (TREE_OPERAND (t
, 0)) == ADDR_EXPR
)
210 t
= TREE_OPERAND (TREE_OPERAND (t
, 0), 0);
211 if (handled_component_p (t
))
214 ucmd
->is_addr
= true;
215 cp_walk_tree (&t
, cp_ubsan_check_member_access_r
,
217 ucmd
->is_addr
= false;
222 t
= TREE_OPERAND (stmt
, 0);
223 if (TREE_CODE (t
) == ADDR_EXPR
)
226 t
= TREE_OPERAND (t
, 0);
227 cp_walk_tree (&t
, cp_ubsan_check_member_access_r
, data
, ucmd
->pset
);
231 if (!ucmd
->is_addr
&& cp_ubsan_maybe_instrument_member_access (stmt
, ucmd
))
238 if (ucmd
->is_addr
&& handled_component_p (stmt
))
240 int i
, len
= TREE_OPERAND_LENGTH (stmt
);
242 if (!handled_component_p (TREE_OPERAND (stmt
, 0)))
243 ucmd
->is_addr
= false;
244 for (i
= 0; i
< len
; i
++)
246 cp_walk_tree (&TREE_OPERAND (stmt
, i
),
247 cp_ubsan_check_member_access_r
, data
, ucmd
->pset
);
248 ucmd
->is_addr
= false;
250 ucmd
->is_addr
= true;
257 /* Instrument all member accesses inside GENERIC *T_P. */
260 cp_ubsan_instrument_member_accesses (tree
*t_p
)
262 if (cp_ubsan_instrument_vptr_p (NULL_TREE
))
265 cp_ubsan_check_member_access_data ucmd
;
267 ucmd
.is_addr
= false;
268 cp_walk_tree (t_p
, cp_ubsan_check_member_access_r
, &ucmd
, &pset
);
272 /* Instrument downcast. */
275 cp_ubsan_maybe_instrument_downcast (location_t loc
, tree type
,
276 tree intype
, tree op
)
278 if (!INDIRECT_TYPE_P (type
)
279 || !INDIRECT_TYPE_P (intype
)
280 || !INDIRECT_TYPE_P (TREE_TYPE (op
))
281 || !CLASS_TYPE_P (TREE_TYPE (TREE_TYPE (op
)))
282 || !is_properly_derived_from (TREE_TYPE (type
), TREE_TYPE (intype
)))
285 return cp_ubsan_maybe_instrument_vptr (loc
, op
, TREE_TYPE (type
), true,
287 ? UBSAN_DOWNCAST_POINTER
288 : UBSAN_DOWNCAST_REFERENCE
);
291 /* Instrument cast to virtual base. */
294 cp_ubsan_maybe_instrument_cast_to_vbase (location_t loc
, tree type
, tree op
)
296 return cp_ubsan_maybe_instrument_vptr (loc
, op
, type
, true,
297 UBSAN_CAST_TO_VBASE
);
300 /* Called from initialize_vtbl_ptrs via dfs_walk. BINFO is the base
301 which we want to initialize the vtable pointer for, DATA is
302 TREE_LIST whose TREE_VALUE is the this ptr expression. */
305 cp_ubsan_dfs_initialize_vtbl_ptrs (tree binfo
, void *data
)
307 if (!TYPE_CONTAINS_VPTR_P (BINFO_TYPE (binfo
)))
308 return dfs_skip_bases
;
310 if (!BINFO_PRIMARY_P (binfo
))
312 tree base_ptr
= TREE_VALUE ((tree
) data
);
314 base_ptr
= build_base_path (PLUS_EXPR
, base_ptr
, binfo
, /*nonnull=*/1,
315 tf_warning_or_error
);
317 /* Compute the location of the vtpr. */
319 = build_vfield_ref (cp_build_fold_indirect_ref (base_ptr
),
321 gcc_assert (vtbl_ptr
!= error_mark_node
);
323 /* Assign NULL to the vptr. */
324 tree vtbl
= build_zero_cst (TREE_TYPE (vtbl_ptr
));
325 tree stmt
= cp_build_modify_expr (input_location
, vtbl_ptr
, NOP_EXPR
,
326 vtbl
, tf_warning_or_error
);
327 if (vptr_via_virtual_p (binfo
))
328 /* If this vptr comes from a virtual base of the complete object, only
329 clear it if we're in charge of virtual bases. */
330 stmt
= build_if_in_charge (stmt
);
331 finish_expr_stmt (stmt
);
337 /* Initialize all the vtable pointers in the object pointed to by
338 ADDR to NULL, so that we catch invalid calls to methods before
339 mem-initializers are completed. */
342 cp_ubsan_maybe_initialize_vtbl_ptrs (tree addr
)
344 if (!cp_ubsan_instrument_vptr_p (NULL_TREE
))
347 tree type
= TREE_TYPE (TREE_TYPE (addr
));
348 tree list
= build_tree_list (type
, addr
);
349 /* We cannot rely on the vtable being set up. We have to indirect via the
351 int save_in_base_initializer
= in_base_initializer
;
352 in_base_initializer
= 1;
354 /* Walk through the hierarchy, initializing the vptr in each base
356 dfs_walk_once (TYPE_BINFO (type
), cp_ubsan_dfs_initialize_vtbl_ptrs
,
359 in_base_initializer
= save_in_base_initializer
;