1 /* Pointer Bounds Checker insrumentation pass.
2 Copyright (C) 2014-2015 Free Software Foundation, Inc.
3 Contributed by Ilya Enkovich (ilya.enkovich@intel.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "double-int.h"
35 #include "fold-const.h"
36 #include "stor-layout.h"
39 #include "tree-iterator.h"
41 #include "langhooks.h"
42 #include "tree-pass.h"
43 #include "diagnostic.h"
47 #include "stringpool.h"
48 #include "tree-ssa-alias.h"
49 #include "tree-ssanames.h"
50 #include "tree-ssa-operands.h"
51 #include "tree-ssa-address.h"
54 #include "dominance.h"
56 #include "basic-block.h"
57 #include "tree-ssa-loop-niter.h"
58 #include "gimple-expr.h"
60 #include "tree-phinodes.h"
61 #include "gimple-ssa.h"
62 #include "ssa-iterators.h"
63 #include "gimple-pretty-print.h"
64 #include "gimple-iterator.h"
66 #include "gimplify-me.h"
67 #include "print-tree.h"
70 #include "hard-reg-set.h"
74 #include "statistics.h"
76 #include "fixed-value.h"
77 #include "insn-config.h"
85 #include "tree-ssa-propagate.h"
86 #include "gimple-fold.h"
87 #include "tree-chkp.h"
88 #include "gimple-walk.h"
89 #include "rtl.h" /* For MEM_P, assign_temp. */
92 #include "lto-streamer.h"
97 /* Pointer Bounds Checker instruments code with memory checks to find
98 out-of-bounds memory accesses. Checks are performed by computing
99 bounds for each pointer and then comparing address of accessed
100 memory before pointer dereferencing.
108 There are few things to instrument:
110 a) Memory accesses - add checker calls to check address of accessed memory
111 against bounds of dereferenced pointer. Obviously safe memory
112 accesses like static variable access does not have to be instrumented
119 with 4 bytes access is transformed into:
121 __builtin___chkp_bndcl (__bound_tmp.1_3, p_1);
123 __builtin___chkp_bndcu (__bound_tmp.1_3, D.1_4);
126 where __bound_tmp.1_3 are bounds computed for pointer p_1,
127 __builtin___chkp_bndcl is a lower bound check and
128 __builtin___chkp_bndcu is an upper bound check.
132 When pointer is stored in memory we need to store its bounds. To
133 achieve compatibility of instrumented code with regular codes
134 we have to keep data layout and store bounds in special bound tables
135 via special checker call. Implementation of bounds table may vary for
136 different platforms. It has to associate pointer value and its
137 location (it is required because we may have two equal pointers
138 with different bounds stored in different places) with bounds.
139 Another checker builtin allows to get bounds for specified pointer
140 loaded from specified location.
150 __builtin___chkp_bndstx (D.1_2, &buf2, __bound_tmp.1_2);
152 where __bound_tmp.1_2 are bounds of &buf2.
154 c) Static initialization.
156 The special case of pointer store is static pointer initialization.
157 Bounds initialization is performed in a few steps:
158 - register all static initializations in front-end using
159 chkp_register_var_initializer
160 - when file compilation finishes we create functions with special
161 attribute 'chkp ctor' and put explicit initialization code
162 (assignments) for all statically initialized pointers.
163 - when checker constructor is compiled checker pass adds required
164 bounds initialization for all statically initialized pointers
165 - since we do not actually need excess pointers initialization
166 in checker constructor we remove such assignments from them
170 For each call in the code we add additional arguments to pass
171 bounds for pointer arguments. We determine type of call arguments
172 using arguments list from function declaration; if function
173 declaration is not available we use function type; otherwise
174 (e.g. for unnamed arguments) we use type of passed value. Function
175 declaration/type is replaced with the instrumented one.
179 val_1 = foo (&buf1, &buf2, &buf1, 0);
183 val_1 = foo.chkp (&buf1, __bound_tmp.1_2, &buf2, __bound_tmp.1_3,
184 &buf1, __bound_tmp.1_2, 0);
188 If function returns a pointer value we have to return bounds also.
189 A new operand was added for return statement to hold returned bounds.
197 return &_buf1, __bound_tmp.1_1;
199 3. Bounds computation.
201 Compiler is fully responsible for computing bounds to be used for each
202 memory access. The first step for bounds computation is to find the
203 origin of pointer dereferenced for memory access. Basing on pointer
204 origin we define a way to compute its bounds. There are just few
207 a) Pointer is returned by call.
209 In this case we use corresponding checker builtin method to obtain returned
214 buf_1 = malloc (size_2);
219 buf_1 = malloc (size_2);
220 __bound_tmp.1_3 = __builtin___chkp_bndret (buf_1);
221 foo (buf_1, __bound_tmp.1_3);
223 b) Pointer is an address of an object.
225 In this case compiler tries to compute objects size and create corresponding
226 bounds. If object has incomplete type then special checker builtin is used to
227 obtain its size at runtime.
233 <unnamed type> __bound_tmp.3;
237 __bound_tmp.3_2 = __builtin___chkp_bndmk (&buf, 400);
240 return &buf, __bound_tmp.3_2;
245 Address of an object 'extern int buf[]' with incomplete type is
250 <unnamed type> __bound_tmp.4;
251 long unsigned int __size_tmp.3;
254 __size_tmp.3_4 = __builtin_ia32_sizeof (buf);
255 __bound_tmp.4_3 = __builtin_ia32_bndmk (&buf, __size_tmp.3_4);
258 return &buf, __bound_tmp.4_3;
261 c) Pointer is the result of object narrowing.
263 It happens when we use pointer to an object to compute pointer to a part
264 of an object. E.g. we take pointer to a field of a structure. In this
265 case we perform bounds intersection using bounds of original object and
266 bounds of object's part (which are computed basing on its type).
268 There may be some debatable questions about when narrowing should occur
269 and when it should not. To avoid false bound violations in correct
270 programs we do not perform narrowing when address of an array element is
271 obtained (it has address of the whole array) and when address of the first
272 structure field is obtained (because it is guaranteed to be equal to
273 address of the whole structure and it is legal to cast it back to structure).
275 Default narrowing behavior may be changed using compiler flags.
279 In this example address of the second structure field is returned.
281 foo (struct A * p, __bounds_type __bounds_of_p)
283 <unnamed type> __bound_tmp.3;
288 _5 = &p_1(D)->second_field;
289 __bound_tmp.3_6 = __builtin___chkp_bndmk (_5, 4);
290 __bound_tmp.3_8 = __builtin___chkp_intersect (__bound_tmp.3_6,
292 _2 = &p_1(D)->second_field;
293 return _2, __bound_tmp.3_8;
298 In this example address of the first field of array element is returned.
300 foo (struct A * p, __bounds_type __bounds_of_p, int i)
302 long unsigned int _3;
303 long unsigned int _4;
308 _3 = (long unsigned int) i_1(D);
311 _7 = &_6->first_field;
312 return _7, __bounds_of_p_2(D);
316 d) Pointer is the result of pointer arithmetic or type cast.
318 In this case bounds of the base pointer are used. In case of binary
319 operation producing a pointer we are analyzing data flow further
320 looking for operand's bounds. One operand is considered as a base
321 if it has some valid bounds. If we fall into a case when none of
322 operands (or both of them) has valid bounds, a default bounds value
325 Trying to find out bounds for binary operations we may fall into
326 cyclic dependencies for pointers. To avoid infinite recursion all
327 walked phi nodes instantly obtain corresponding bounds but created
328 bounds are marked as incomplete. It helps us to stop DF walk during
331 When we reach pointer source, some args of incomplete bounds phi obtain
332 valid bounds and those values are propagated further through phi nodes.
333 If no valid bounds were found for phi node then we mark its result as
334 invalid bounds. Process stops when all incomplete bounds become either
335 valid or invalid and we are able to choose a pointer base.
337 e) Pointer is loaded from the memory.
339 In this case we just need to load bounds from the bounds table.
345 <unnamed type> __bound_tmp.3;
351 __bound_tmp.3_4 = __builtin___chkp_bndldx (&buf, _2);
352 return _2, __bound_tmp.3_4;
357 typedef void (*assign_handler
)(tree
, tree
, void *);
359 static tree
chkp_get_zero_bounds ();
360 static tree
chkp_find_bounds (tree ptr
, gimple_stmt_iterator
*iter
);
361 static tree
chkp_find_bounds_loaded (tree ptr
, tree ptr_src
,
362 gimple_stmt_iterator
*iter
);
363 static void chkp_parse_array_and_component_ref (tree node
, tree
*ptr
,
364 tree
*elt
, bool *safe
,
367 gimple_stmt_iterator
*iter
,
368 bool innermost_bounds
);
370 #define chkp_bndldx_fndecl \
371 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDLDX))
372 #define chkp_bndstx_fndecl \
373 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDSTX))
374 #define chkp_checkl_fndecl \
375 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCL))
376 #define chkp_checku_fndecl \
377 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCU))
378 #define chkp_bndmk_fndecl \
379 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDMK))
380 #define chkp_ret_bnd_fndecl \
381 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDRET))
382 #define chkp_intersect_fndecl \
383 (targetm.builtin_chkp_function (BUILT_IN_CHKP_INTERSECT))
384 #define chkp_narrow_bounds_fndecl \
385 (targetm.builtin_chkp_function (BUILT_IN_CHKP_NARROW))
386 #define chkp_sizeof_fndecl \
387 (targetm.builtin_chkp_function (BUILT_IN_CHKP_SIZEOF))
388 #define chkp_extract_lower_fndecl \
389 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_LOWER))
390 #define chkp_extract_upper_fndecl \
391 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_UPPER))
393 static GTY (()) tree chkp_uintptr_type
;
395 static GTY (()) tree chkp_zero_bounds_var
;
396 static GTY (()) tree chkp_none_bounds_var
;
398 static GTY (()) basic_block entry_block
;
399 static GTY (()) tree zero_bounds
;
400 static GTY (()) tree none_bounds
;
401 static GTY (()) tree incomplete_bounds
;
402 static GTY (()) tree tmp_var
;
403 static GTY (()) tree size_tmp_var
;
404 static GTY (()) bitmap chkp_abnormal_copies
;
406 struct hash_set
<tree
> *chkp_invalid_bounds
;
407 struct hash_set
<tree
> *chkp_completed_bounds_set
;
408 struct hash_map
<tree
, tree
> *chkp_reg_bounds
;
409 struct hash_map
<tree
, tree
> *chkp_bound_vars
;
410 struct hash_map
<tree
, tree
> *chkp_reg_addr_bounds
;
411 struct hash_map
<tree
, tree
> *chkp_incomplete_bounds_map
;
412 struct hash_map
<tree
, tree
> *chkp_bounds_map
;
413 struct hash_map
<tree
, tree
> *chkp_static_var_bounds
;
415 static bool in_chkp_pass
;
417 #define CHKP_BOUND_TMP_NAME "__bound_tmp"
418 #define CHKP_SIZE_TMP_NAME "__size_tmp"
419 #define CHKP_BOUNDS_OF_SYMBOL_PREFIX "__chkp_bounds_of_"
420 #define CHKP_STRING_BOUNDS_PREFIX "__chkp_string_bounds_"
421 #define CHKP_VAR_BOUNDS_PREFIX "__chkp_var_bounds_"
422 #define CHKP_ZERO_BOUNDS_VAR_NAME "__chkp_zero_bounds"
423 #define CHKP_NONE_BOUNDS_VAR_NAME "__chkp_none_bounds"
425 /* Static checker constructors may become very large and their
426 compilation with optimization may take too much time.
427 Therefore we put a limit to number of statements in one
428 constructor. Tests with 100 000 statically initialized
429 pointers showed following compilation times on Sandy Bridge
431 limit 100 => ~18 sec.
432 limit 300 => ~22 sec.
433 limit 1000 => ~30 sec.
434 limit 3000 => ~49 sec.
435 limit 5000 => ~55 sec.
436 limit 10000 => ~76 sec.
437 limit 100000 => ~532 sec. */
438 #define MAX_STMTS_IN_STATIC_CHKP_CTOR (PARAM_VALUE (PARAM_CHKP_MAX_CTOR_SIZE))
440 struct chkp_ctor_stmt_list
446 /* Return 1 if function FNDECL is instrumented by Pointer
449 chkp_function_instrumented_p (tree fndecl
)
452 && lookup_attribute ("chkp instrumented", DECL_ATTRIBUTES (fndecl
));
455 /* Mark function FNDECL as instrumented. */
457 chkp_function_mark_instrumented (tree fndecl
)
459 if (chkp_function_instrumented_p (fndecl
))
462 DECL_ATTRIBUTES (fndecl
)
463 = tree_cons (get_identifier ("chkp instrumented"), NULL
,
464 DECL_ATTRIBUTES (fndecl
));
467 /* Return true when STMT is builtin call to instrumentation function
468 corresponding to CODE. */
471 chkp_gimple_call_builtin_p (gimple call
,
472 enum built_in_function code
)
475 if (is_gimple_call (call
)
476 && (fndecl
= targetm
.builtin_chkp_function (code
))
477 && gimple_call_fndecl (call
) == fndecl
)
482 /* Emit code to store zero bounds for PTR located at MEM. */
484 chkp_expand_bounds_reset_for_mem (tree mem
, tree ptr
)
486 tree zero_bnd
, bnd
, addr
, bndstx
;
488 if (flag_chkp_use_static_const_bounds
)
489 zero_bnd
= chkp_get_zero_bounds_var ();
491 zero_bnd
= chkp_build_make_bounds_call (integer_zero_node
,
493 bnd
= make_tree (pointer_bounds_type_node
,
494 assign_temp (pointer_bounds_type_node
, 0, 1));
495 addr
= build1 (ADDR_EXPR
,
496 build_pointer_type (TREE_TYPE (mem
)), mem
);
497 bndstx
= chkp_build_bndstx_call (addr
, ptr
, bnd
);
499 expand_assignment (bnd
, zero_bnd
, false);
500 expand_normal (bndstx
);
503 /* Mark statement S to not be instrumented. */
505 chkp_mark_stmt (gimple s
)
507 gimple_set_plf (s
, GF_PLF_1
, true);
510 /* Mark statement S to be instrumented. */
512 chkp_unmark_stmt (gimple s
)
514 gimple_set_plf (s
, GF_PLF_1
, false);
517 /* Return 1 if statement S should not be instrumented. */
519 chkp_marked_stmt_p (gimple s
)
521 return gimple_plf (s
, GF_PLF_1
);
524 /* Get var to be used for bound temps. */
526 chkp_get_tmp_var (void)
529 tmp_var
= create_tmp_reg (pointer_bounds_type_node
, CHKP_BOUND_TMP_NAME
);
534 /* Get SSA_NAME to be used as temp. */
536 chkp_get_tmp_reg (gimple stmt
)
539 return make_ssa_name (chkp_get_tmp_var (), stmt
);
541 return make_temp_ssa_name (pointer_bounds_type_node
, stmt
,
542 CHKP_BOUND_TMP_NAME
);
545 /* Get var to be used for size temps. */
547 chkp_get_size_tmp_var (void)
550 size_tmp_var
= create_tmp_reg (chkp_uintptr_type
, CHKP_SIZE_TMP_NAME
);
555 /* Register bounds BND for address of OBJ. */
557 chkp_register_addr_bounds (tree obj
, tree bnd
)
559 if (bnd
== incomplete_bounds
)
562 chkp_reg_addr_bounds
->put (obj
, bnd
);
564 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
566 fprintf (dump_file
, "Regsitered bound ");
567 print_generic_expr (dump_file
, bnd
, 0);
568 fprintf (dump_file
, " for address of ");
569 print_generic_expr (dump_file
, obj
, 0);
570 fprintf (dump_file
, "\n");
574 /* Return bounds registered for address of OBJ. */
576 chkp_get_registered_addr_bounds (tree obj
)
578 tree
*slot
= chkp_reg_addr_bounds
->get (obj
);
579 return slot
? *slot
: NULL_TREE
;
582 /* Mark BOUNDS as completed. */
584 chkp_mark_completed_bounds (tree bounds
)
586 chkp_completed_bounds_set
->add (bounds
);
588 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
590 fprintf (dump_file
, "Marked bounds ");
591 print_generic_expr (dump_file
, bounds
, 0);
592 fprintf (dump_file
, " as completed\n");
596 /* Return 1 if BOUNDS were marked as completed and 0 otherwise. */
598 chkp_completed_bounds (tree bounds
)
600 return chkp_completed_bounds_set
->contains (bounds
);
603 /* Clear comleted bound marks. */
605 chkp_erase_completed_bounds (void)
607 delete chkp_completed_bounds_set
;
608 chkp_completed_bounds_set
= new hash_set
<tree
>;
611 /* Mark BOUNDS associated with PTR as incomplete. */
613 chkp_register_incomplete_bounds (tree bounds
, tree ptr
)
615 chkp_incomplete_bounds_map
->put (bounds
, ptr
);
617 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
619 fprintf (dump_file
, "Regsitered incomplete bounds ");
620 print_generic_expr (dump_file
, bounds
, 0);
621 fprintf (dump_file
, " for ");
622 print_generic_expr (dump_file
, ptr
, 0);
623 fprintf (dump_file
, "\n");
627 /* Return 1 if BOUNDS are incomplete and 0 otherwise. */
629 chkp_incomplete_bounds (tree bounds
)
631 if (bounds
== incomplete_bounds
)
634 if (chkp_completed_bounds (bounds
))
637 return chkp_incomplete_bounds_map
->get (bounds
) != NULL
;
640 /* Clear incomleted bound marks. */
642 chkp_erase_incomplete_bounds (void)
644 delete chkp_incomplete_bounds_map
;
645 chkp_incomplete_bounds_map
= new hash_map
<tree
, tree
>;
648 /* Build and return bndmk call which creates bounds for structure
649 pointed by PTR. Structure should have complete type. */
651 chkp_make_bounds_for_struct_addr (tree ptr
)
653 tree type
= TREE_TYPE (ptr
);
656 gcc_assert (POINTER_TYPE_P (type
));
658 size
= TYPE_SIZE (TREE_TYPE (type
));
662 return build_call_nary (pointer_bounds_type_node
,
663 build_fold_addr_expr (chkp_bndmk_fndecl
),
667 /* Traversal function for chkp_may_finish_incomplete_bounds.
668 Set RES to 0 if at least one argument of phi statement
669 defining bounds (passed in KEY arg) is unknown.
670 Traversal stops when first unknown phi argument is found. */
672 chkp_may_complete_phi_bounds (tree
const &bounds
, tree
*slot ATTRIBUTE_UNUSED
,
678 gcc_assert (TREE_CODE (bounds
) == SSA_NAME
);
680 phi
= SSA_NAME_DEF_STMT (bounds
);
682 gcc_assert (phi
&& gimple_code (phi
) == GIMPLE_PHI
);
684 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
686 tree phi_arg
= gimple_phi_arg_def (phi
, i
);
690 /* Do not need to traverse further. */
698 /* Return 1 if all phi nodes created for bounds have their
699 arguments computed. */
701 chkp_may_finish_incomplete_bounds (void)
705 chkp_incomplete_bounds_map
706 ->traverse
<bool *, chkp_may_complete_phi_bounds
> (&res
);
711 /* Helper function for chkp_finish_incomplete_bounds.
712 Recompute args for bounds phi node. */
714 chkp_recompute_phi_bounds (tree
const &bounds
, tree
*slot
,
715 void *res ATTRIBUTE_UNUSED
)
722 gcc_assert (TREE_CODE (bounds
) == SSA_NAME
);
723 gcc_assert (TREE_CODE (ptr
) == SSA_NAME
);
725 bounds_phi
= as_a
<gphi
*> (SSA_NAME_DEF_STMT (bounds
));
726 ptr_phi
= as_a
<gphi
*> (SSA_NAME_DEF_STMT (ptr
));
728 for (i
= 0; i
< gimple_phi_num_args (bounds_phi
); i
++)
730 tree ptr_arg
= gimple_phi_arg_def (ptr_phi
, i
);
731 tree bound_arg
= chkp_find_bounds (ptr_arg
, NULL
);
733 add_phi_arg (bounds_phi
, bound_arg
,
734 gimple_phi_arg_edge (ptr_phi
, i
),
741 /* Mark BOUNDS as invalid. */
743 chkp_mark_invalid_bounds (tree bounds
)
745 chkp_invalid_bounds
->add (bounds
);
747 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
749 fprintf (dump_file
, "Marked bounds ");
750 print_generic_expr (dump_file
, bounds
, 0);
751 fprintf (dump_file
, " as invalid\n");
755 /* Return 1 if BOUNDS were marked as invalid and 0 otherwise. */
757 chkp_valid_bounds (tree bounds
)
759 if (bounds
== zero_bounds
|| bounds
== none_bounds
)
762 return !chkp_invalid_bounds
->contains (bounds
);
765 /* Helper function for chkp_finish_incomplete_bounds.
766 Check all arguments of phi nodes trying to find
767 valid completed bounds. If there is at least one
768 such arg then bounds produced by phi node are marked
769 as valid completed bounds and all phi args are
772 chkp_find_valid_phi_bounds (tree
const &bounds
, tree
*slot
, bool *res
)
777 gcc_assert (TREE_CODE (bounds
) == SSA_NAME
);
779 if (chkp_completed_bounds (bounds
))
782 phi
= SSA_NAME_DEF_STMT (bounds
);
784 gcc_assert (phi
&& gimple_code (phi
) == GIMPLE_PHI
);
786 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
788 tree phi_arg
= gimple_phi_arg_def (phi
, i
);
790 gcc_assert (phi_arg
);
792 if (chkp_valid_bounds (phi_arg
) && !chkp_incomplete_bounds (phi_arg
))
795 chkp_mark_completed_bounds (bounds
);
796 chkp_recompute_phi_bounds (bounds
, slot
, NULL
);
804 /* Helper function for chkp_finish_incomplete_bounds.
805 Marks all incompleted bounds as invalid. */
807 chkp_mark_invalid_bounds_walker (tree
const &bounds
,
808 tree
*slot ATTRIBUTE_UNUSED
,
809 void *res ATTRIBUTE_UNUSED
)
811 if (!chkp_completed_bounds (bounds
))
813 chkp_mark_invalid_bounds (bounds
);
814 chkp_mark_completed_bounds (bounds
);
819 /* When all bound phi nodes have all their args computed
820 we have enough info to find valid bounds. We iterate
821 through all incompleted bounds searching for valid
822 bounds. Found valid bounds are marked as completed
823 and all remaining incompleted bounds are recomputed.
824 Process continues until no new valid bounds may be
825 found. All remained incompleted bounds are marked as
826 invalid (i.e. have no valid source of bounds). */
828 chkp_finish_incomplete_bounds (void)
836 chkp_incomplete_bounds_map
->
837 traverse
<bool *, chkp_find_valid_phi_bounds
> (&found_valid
);
840 chkp_incomplete_bounds_map
->
841 traverse
<void *, chkp_recompute_phi_bounds
> (NULL
);
844 chkp_incomplete_bounds_map
->
845 traverse
<void *, chkp_mark_invalid_bounds_walker
> (NULL
);
846 chkp_incomplete_bounds_map
->
847 traverse
<void *, chkp_recompute_phi_bounds
> (NULL
);
849 chkp_erase_completed_bounds ();
850 chkp_erase_incomplete_bounds ();
853 /* Return 1 if type TYPE is a pointer type or a
854 structure having a pointer type as one of its fields.
855 Otherwise return 0. */
857 chkp_type_has_pointer (const_tree type
)
861 if (BOUNDED_TYPE_P (type
))
863 else if (RECORD_OR_UNION_TYPE_P (type
))
867 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
868 if (TREE_CODE (field
) == FIELD_DECL
)
869 res
= res
|| chkp_type_has_pointer (TREE_TYPE (field
));
871 else if (TREE_CODE (type
) == ARRAY_TYPE
)
872 res
= chkp_type_has_pointer (TREE_TYPE (type
));
878 chkp_type_bounds_count (const_tree type
)
884 else if (BOUNDED_TYPE_P (type
))
886 else if (RECORD_OR_UNION_TYPE_P (type
))
890 bitmap_obstack_initialize (NULL
);
891 have_bound
= BITMAP_ALLOC (NULL
);
892 chkp_find_bound_slots (type
, have_bound
);
893 res
= bitmap_count_bits (have_bound
);
894 BITMAP_FREE (have_bound
);
895 bitmap_obstack_release (NULL
);
901 /* Get bounds associated with NODE via
902 chkp_set_bounds call. */
904 chkp_get_bounds (tree node
)
908 if (!chkp_bounds_map
)
911 slot
= chkp_bounds_map
->get (node
);
912 return slot
? *slot
: NULL_TREE
;
915 /* Associate bounds VAL with NODE. */
917 chkp_set_bounds (tree node
, tree val
)
919 if (!chkp_bounds_map
)
920 chkp_bounds_map
= new hash_map
<tree
, tree
>;
922 chkp_bounds_map
->put (node
, val
);
925 /* Check if statically initialized variable VAR require
926 static bounds initialization. If VAR is added into
927 bounds initlization list then 1 is returned. Otherwise
930 chkp_register_var_initializer (tree var
)
932 if (!flag_check_pointer_bounds
933 || DECL_INITIAL (var
) == error_mark_node
)
936 gcc_assert (TREE_CODE (var
) == VAR_DECL
);
937 gcc_assert (DECL_INITIAL (var
));
939 if (TREE_STATIC (var
)
940 && chkp_type_has_pointer (TREE_TYPE (var
)))
942 varpool_node::get_create (var
)->need_bounds_init
= 1;
949 /* Helper function for chkp_finish_file.
951 Add new modification statement (RHS is assigned to LHS)
952 into list of static initializer statementes (passed in ARG).
953 If statements list becomes too big, emit checker constructor
954 and start the new one. */
956 chkp_add_modification_to_stmt_list (tree lhs
,
960 struct chkp_ctor_stmt_list
*stmts
= (struct chkp_ctor_stmt_list
*)arg
;
963 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (rhs
)))
964 rhs
= build1 (CONVERT_EXPR
, TREE_TYPE (lhs
), rhs
);
966 modify
= build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, rhs
);
967 append_to_statement_list (modify
, &stmts
->stmts
);
972 /* Build and return ADDR_EXPR for specified object OBJ. */
974 chkp_build_addr_expr (tree obj
)
976 return TREE_CODE (obj
) == TARGET_MEM_REF
977 ? tree_mem_ref_addr (ptr_type_node
, obj
)
978 : build_fold_addr_expr (obj
);
981 /* Helper function for chkp_finish_file.
982 Initialize bound variable BND_VAR with bounds of variable
983 VAR to statements list STMTS. If statements list becomes
984 too big, emit checker constructor and start the new one. */
986 chkp_output_static_bounds (tree bnd_var
, tree var
,
987 struct chkp_ctor_stmt_list
*stmts
)
991 if (TREE_CODE (var
) == STRING_CST
)
993 lb
= build1 (CONVERT_EXPR
, size_type_node
, chkp_build_addr_expr (var
));
994 size
= build_int_cst (size_type_node
, TREE_STRING_LENGTH (var
) - 1);
996 else if (DECL_SIZE (var
)
997 && !chkp_variable_size_type (TREE_TYPE (var
)))
999 /* Compute bounds using statically known size. */
1000 lb
= build1 (CONVERT_EXPR
, size_type_node
, chkp_build_addr_expr (var
));
1001 size
= size_binop (MINUS_EXPR
, DECL_SIZE_UNIT (var
), size_one_node
);
1005 /* Compute bounds using dynamic size. */
1008 lb
= build1 (CONVERT_EXPR
, size_type_node
, chkp_build_addr_expr (var
));
1009 call
= build1 (ADDR_EXPR
,
1010 build_pointer_type (TREE_TYPE (chkp_sizeof_fndecl
)),
1011 chkp_sizeof_fndecl
);
1012 size
= build_call_nary (TREE_TYPE (TREE_TYPE (chkp_sizeof_fndecl
)),
1015 if (flag_chkp_zero_dynamic_size_as_infinite
)
1017 tree max_size
, cond
;
1019 max_size
= build2 (MINUS_EXPR
, size_type_node
, size_zero_node
, lb
);
1020 cond
= build2 (NE_EXPR
, boolean_type_node
, size
, size_zero_node
);
1021 size
= build3 (COND_EXPR
, size_type_node
, cond
, size
, max_size
);
1024 size
= size_binop (MINUS_EXPR
, size
, size_one_node
);
1027 ub
= size_binop (PLUS_EXPR
, lb
, size
);
1028 stmts
->avail
-= targetm
.chkp_initialize_bounds (bnd_var
, lb
, ub
,
1030 if (stmts
->avail
<= 0)
1032 cgraph_build_static_cdtor ('B', stmts
->stmts
,
1033 MAX_RESERVED_INIT_PRIORITY
+ 2);
1034 stmts
->avail
= MAX_STMTS_IN_STATIC_CHKP_CTOR
;
1035 stmts
->stmts
= NULL
;
1039 /* Return entry block to be used for checker initilization code.
1040 Create new block if required. */
1042 chkp_get_entry_block (void)
1045 entry_block
= split_block (ENTRY_BLOCK_PTR_FOR_FN (cfun
), NULL
)->dest
;
1050 /* Return a bounds var to be used for pointer var PTR_VAR. */
1052 chkp_get_bounds_var (tree ptr_var
)
1057 slot
= chkp_bound_vars
->get (ptr_var
);
1062 bnd_var
= create_tmp_reg (pointer_bounds_type_node
,
1063 CHKP_BOUND_TMP_NAME
);
1064 chkp_bound_vars
->put (ptr_var
, bnd_var
);
1072 /* Register bounds BND for object PTR in global bounds table.
1073 A copy of bounds may be created for abnormal ssa names.
1074 Returns bounds to use for PTR. */
1076 chkp_maybe_copy_and_register_bounds (tree ptr
, tree bnd
)
1080 if (!chkp_reg_bounds
)
1083 /* Do nothing if bounds are incomplete_bounds
1084 because it means bounds will be recomputed. */
1085 if (bnd
== incomplete_bounds
)
1088 abnormal_ptr
= (TREE_CODE (ptr
) == SSA_NAME
1089 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr
)
1090 && gimple_code (SSA_NAME_DEF_STMT (ptr
)) != GIMPLE_PHI
);
1092 /* A single bounds value may be reused multiple times for
1093 different pointer values. It may cause coalescing issues
1094 for abnormal SSA names. To avoid it we create a bounds
1095 copy in case it is computed for abnormal SSA name.
1097 We also cannot reuse such created copies for other pointers */
1099 || bitmap_bit_p (chkp_abnormal_copies
, SSA_NAME_VERSION (bnd
)))
1101 tree bnd_var
= NULL_TREE
;
1105 if (SSA_NAME_VAR (ptr
))
1106 bnd_var
= chkp_get_bounds_var (SSA_NAME_VAR (ptr
));
1109 bnd_var
= chkp_get_tmp_var ();
1111 /* For abnormal copies we may just find original
1112 bounds and use them. */
1113 if (!abnormal_ptr
&& !SSA_NAME_IS_DEFAULT_DEF (bnd
))
1115 gimple bnd_def
= SSA_NAME_DEF_STMT (bnd
);
1116 gcc_checking_assert (gimple_code (bnd_def
) == GIMPLE_ASSIGN
);
1117 bnd
= gimple_assign_rhs1 (bnd_def
);
1119 /* For undefined values we usually use none bounds
1120 value but in case of abnormal edge it may cause
1121 coalescing failures. Use default definition of
1122 bounds variable instead to avoid it. */
1123 else if (SSA_NAME_IS_DEFAULT_DEF (ptr
)
1124 && TREE_CODE (SSA_NAME_VAR (ptr
)) != PARM_DECL
)
1126 bnd
= get_or_create_ssa_default_def (cfun
, bnd_var
);
1128 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1130 fprintf (dump_file
, "Using default def bounds ");
1131 print_generic_expr (dump_file
, bnd
, 0);
1132 fprintf (dump_file
, " for abnormal default def SSA name ");
1133 print_generic_expr (dump_file
, ptr
, 0);
1134 fprintf (dump_file
, "\n");
1140 gimple def
= SSA_NAME_DEF_STMT (ptr
);
1142 gimple_stmt_iterator gsi
;
1145 copy
= make_ssa_name (bnd_var
, gimple_build_nop ());
1147 copy
= make_temp_ssa_name (pointer_bounds_type_node
,
1148 gimple_build_nop (),
1149 CHKP_BOUND_TMP_NAME
);
1150 assign
= gimple_build_assign (copy
, bnd
);
1152 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1154 fprintf (dump_file
, "Creating a copy of bounds ");
1155 print_generic_expr (dump_file
, bnd
, 0);
1156 fprintf (dump_file
, " for abnormal SSA name ");
1157 print_generic_expr (dump_file
, ptr
, 0);
1158 fprintf (dump_file
, "\n");
1161 if (gimple_code (def
) == GIMPLE_NOP
)
1163 gsi
= gsi_last_bb (chkp_get_entry_block ());
1164 if (!gsi_end_p (gsi
) && is_ctrl_stmt (gsi_stmt (gsi
)))
1165 gsi_insert_before (&gsi
, assign
, GSI_CONTINUE_LINKING
);
1167 gsi_insert_after (&gsi
, assign
, GSI_CONTINUE_LINKING
);
1171 gimple bnd_def
= SSA_NAME_DEF_STMT (bnd
);
1172 /* Sometimes (e.g. when we load a pointer from a
1173 memory) bounds are produced later than a pointer.
1174 We need to insert bounds copy appropriately. */
1175 if (gimple_code (bnd_def
) != GIMPLE_NOP
1176 && stmt_dominates_stmt_p (def
, bnd_def
))
1177 gsi
= gsi_for_stmt (bnd_def
);
1179 gsi
= gsi_for_stmt (def
);
1180 gsi_insert_after (&gsi
, assign
, GSI_CONTINUE_LINKING
);
1187 bitmap_set_bit (chkp_abnormal_copies
, SSA_NAME_VERSION (bnd
));
1190 chkp_reg_bounds
->put (ptr
, bnd
);
1192 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1194 fprintf (dump_file
, "Regsitered bound ");
1195 print_generic_expr (dump_file
, bnd
, 0);
1196 fprintf (dump_file
, " for pointer ");
1197 print_generic_expr (dump_file
, ptr
, 0);
1198 fprintf (dump_file
, "\n");
1204 /* Get bounds registered for object PTR in global bounds table. */
1206 chkp_get_registered_bounds (tree ptr
)
1210 if (!chkp_reg_bounds
)
1213 slot
= chkp_reg_bounds
->get (ptr
);
1214 return slot
? *slot
: NULL_TREE
;
1217 /* Add bound retvals to return statement pointed by GSI. */
1220 chkp_add_bounds_to_ret_stmt (gimple_stmt_iterator
*gsi
)
1222 greturn
*ret
= as_a
<greturn
*> (gsi_stmt (*gsi
));
1223 tree retval
= gimple_return_retval (ret
);
1224 tree ret_decl
= DECL_RESULT (cfun
->decl
);
1230 if (BOUNDED_P (ret_decl
))
1232 bounds
= chkp_find_bounds (retval
, gsi
);
1233 bounds
= chkp_maybe_copy_and_register_bounds (ret_decl
, bounds
);
1234 gimple_return_set_retbnd (ret
, bounds
);
1240 /* Force OP to be suitable for using as an argument for call.
1241 New statements (if any) go to SEQ. */
1243 chkp_force_gimple_call_op (tree op
, gimple_seq
*seq
)
1246 gimple_stmt_iterator si
;
1248 op
= force_gimple_operand (unshare_expr (op
), &stmts
, true, NULL_TREE
);
1250 for (si
= gsi_start (stmts
); !gsi_end_p (si
); gsi_next (&si
))
1251 chkp_mark_stmt (gsi_stmt (si
));
1253 gimple_seq_add_seq (seq
, stmts
);
1258 /* Generate lower bound check for memory access by ADDR.
1259 Check is inserted before the position pointed by ITER.
1260 DIRFLAG indicates whether memory access is load or store. */
1262 chkp_check_lower (tree addr
, tree bounds
,
1263 gimple_stmt_iterator iter
,
1264 location_t location
,
1271 if (bounds
== chkp_get_zero_bounds ())
1274 if (dirflag
== integer_zero_node
1275 && !flag_chkp_check_read
)
1278 if (dirflag
== integer_one_node
1279 && !flag_chkp_check_write
)
1284 node
= chkp_force_gimple_call_op (addr
, &seq
);
1286 check
= gimple_build_call (chkp_checkl_fndecl
, 2, node
, bounds
);
1287 chkp_mark_stmt (check
);
1288 gimple_call_set_with_bounds (check
, true);
1289 gimple_set_location (check
, location
);
1290 gimple_seq_add_stmt (&seq
, check
);
1292 gsi_insert_seq_before (&iter
, seq
, GSI_SAME_STMT
);
1294 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1296 gimple before
= gsi_stmt (iter
);
1297 fprintf (dump_file
, "Generated lower bound check for statement ");
1298 print_gimple_stmt (dump_file
, before
, 0, TDF_VOPS
|TDF_MEMSYMS
);
1299 fprintf (dump_file
, " ");
1300 print_gimple_stmt (dump_file
, check
, 0, TDF_VOPS
|TDF_MEMSYMS
);
1304 /* Generate upper bound check for memory access by ADDR.
1305 Check is inserted before the position pointed by ITER.
1306 DIRFLAG indicates whether memory access is load or store. */
1308 chkp_check_upper (tree addr
, tree bounds
,
1309 gimple_stmt_iterator iter
,
1310 location_t location
,
1317 if (bounds
== chkp_get_zero_bounds ())
1320 if (dirflag
== integer_zero_node
1321 && !flag_chkp_check_read
)
1324 if (dirflag
== integer_one_node
1325 && !flag_chkp_check_write
)
1330 node
= chkp_force_gimple_call_op (addr
, &seq
);
1332 check
= gimple_build_call (chkp_checku_fndecl
, 2, node
, bounds
);
1333 chkp_mark_stmt (check
);
1334 gimple_call_set_with_bounds (check
, true);
1335 gimple_set_location (check
, location
);
1336 gimple_seq_add_stmt (&seq
, check
);
1338 gsi_insert_seq_before (&iter
, seq
, GSI_SAME_STMT
);
1340 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1342 gimple before
= gsi_stmt (iter
);
1343 fprintf (dump_file
, "Generated upper bound check for statement ");
1344 print_gimple_stmt (dump_file
, before
, 0, TDF_VOPS
|TDF_MEMSYMS
);
1345 fprintf (dump_file
, " ");
1346 print_gimple_stmt (dump_file
, check
, 0, TDF_VOPS
|TDF_MEMSYMS
);
1350 /* Generate lower and upper bound checks for memory access
1351 to memory slot [FIRST, LAST] againsr BOUNDS. Checks
1352 are inserted before the position pointed by ITER.
1353 DIRFLAG indicates whether memory access is load or store. */
1355 chkp_check_mem_access (tree first
, tree last
, tree bounds
,
1356 gimple_stmt_iterator iter
,
1357 location_t location
,
1360 chkp_check_lower (first
, bounds
, iter
, location
, dirflag
);
1361 chkp_check_upper (last
, bounds
, iter
, location
, dirflag
);
1364 /* Replace call to _bnd_chk_* pointed by GSI with
1365 bndcu and bndcl calls. DIRFLAG determines whether
1366 check is for read or write. */
1369 chkp_replace_address_check_builtin (gimple_stmt_iterator
*gsi
,
1372 gimple_stmt_iterator call_iter
= *gsi
;
1373 gimple call
= gsi_stmt (*gsi
);
1374 tree fndecl
= gimple_call_fndecl (call
);
1375 tree addr
= gimple_call_arg (call
, 0);
1376 tree bounds
= chkp_find_bounds (addr
, gsi
);
1378 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1379 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS
)
1380 chkp_check_lower (addr
, bounds
, *gsi
, gimple_location (call
), dirflag
);
1382 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
)
1383 chkp_check_upper (addr
, bounds
, *gsi
, gimple_location (call
), dirflag
);
1385 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS
)
1387 tree size
= gimple_call_arg (call
, 1);
1388 addr
= fold_build_pointer_plus (addr
, size
);
1389 addr
= fold_build_pointer_plus_hwi (addr
, -1);
1390 chkp_check_upper (addr
, bounds
, *gsi
, gimple_location (call
), dirflag
);
1393 gsi_remove (&call_iter
, true);
1396 /* Replace call to _bnd_get_ptr_* pointed by GSI with
1397 corresponding bounds extract call. */
1400 chkp_replace_extract_builtin (gimple_stmt_iterator
*gsi
)
1402 gimple call
= gsi_stmt (*gsi
);
1403 tree fndecl
= gimple_call_fndecl (call
);
1404 tree addr
= gimple_call_arg (call
, 0);
1405 tree bounds
= chkp_find_bounds (addr
, gsi
);
1408 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_GET_PTR_LBOUND
)
1409 fndecl
= chkp_extract_lower_fndecl
;
1410 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_GET_PTR_UBOUND
)
1411 fndecl
= chkp_extract_upper_fndecl
;
1415 extract
= gimple_build_call (fndecl
, 1, bounds
);
1416 gimple_call_set_lhs (extract
, gimple_call_lhs (call
));
1417 chkp_mark_stmt (extract
);
1419 gsi_replace (gsi
, extract
, false);
1422 /* Return COMPONENT_REF accessing FIELD in OBJ. */
1424 chkp_build_component_ref (tree obj
, tree field
)
1428 /* If object is TMR then we do not use component_ref but
1429 add offset instead. We need it to be able to get addr
1430 of the reasult later. */
1431 if (TREE_CODE (obj
) == TARGET_MEM_REF
)
1433 tree offs
= TMR_OFFSET (obj
);
1434 offs
= fold_binary_to_constant (PLUS_EXPR
, TREE_TYPE (offs
),
1435 offs
, DECL_FIELD_OFFSET (field
));
1439 res
= copy_node (obj
);
1440 TREE_TYPE (res
) = TREE_TYPE (field
);
1441 TMR_OFFSET (res
) = offs
;
1444 res
= build3 (COMPONENT_REF
, TREE_TYPE (field
), obj
, field
, NULL_TREE
);
1449 /* Return ARRAY_REF for array ARR and index IDX with
1450 specified element type ETYPE and element size ESIZE. */
1452 chkp_build_array_ref (tree arr
, tree etype
, tree esize
,
1453 unsigned HOST_WIDE_INT idx
)
1455 tree index
= build_int_cst (size_type_node
, idx
);
1458 /* If object is TMR then we do not use array_ref but
1459 add offset instead. We need it to be able to get addr
1460 of the reasult later. */
1461 if (TREE_CODE (arr
) == TARGET_MEM_REF
)
1463 tree offs
= TMR_OFFSET (arr
);
1465 esize
= fold_binary_to_constant (MULT_EXPR
, TREE_TYPE (esize
),
1469 offs
= fold_binary_to_constant (PLUS_EXPR
, TREE_TYPE (offs
),
1473 res
= copy_node (arr
);
1474 TREE_TYPE (res
) = etype
;
1475 TMR_OFFSET (res
) = offs
;
1478 res
= build4 (ARRAY_REF
, etype
, arr
, index
, NULL_TREE
, NULL_TREE
);
1483 /* Helper function for chkp_add_bounds_to_call_stmt.
1484 Fill ALL_BOUNDS output array with created bounds.
1486 OFFS is used for recursive calls and holds basic
1487 offset of TYPE in outer structure in bits.
1489 ITER points a position where bounds are searched.
1491 ALL_BOUNDS[i] is filled with elem bounds if there
1492 is a field in TYPE which has pointer type and offset
1493 equal to i * POINTER_SIZE in bits. */
1495 chkp_find_bounds_for_elem (tree elem
, tree
*all_bounds
,
1497 gimple_stmt_iterator
*iter
)
1499 tree type
= TREE_TYPE (elem
);
1501 if (BOUNDED_TYPE_P (type
))
1503 if (!all_bounds
[offs
/ POINTER_SIZE
])
1505 tree temp
= make_temp_ssa_name (type
, gimple_build_nop (), "");
1506 gimple assign
= gimple_build_assign (temp
, elem
);
1507 gimple_stmt_iterator gsi
;
1509 gsi_insert_before (iter
, assign
, GSI_SAME_STMT
);
1510 gsi
= gsi_for_stmt (assign
);
1512 all_bounds
[offs
/ POINTER_SIZE
] = chkp_find_bounds (temp
, &gsi
);
1515 else if (RECORD_OR_UNION_TYPE_P (type
))
1519 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
1520 if (TREE_CODE (field
) == FIELD_DECL
)
1522 tree base
= unshare_expr (elem
);
1523 tree field_ref
= chkp_build_component_ref (base
, field
);
1524 HOST_WIDE_INT field_offs
1525 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field
));
1526 if (DECL_FIELD_OFFSET (field
))
1527 field_offs
+= TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field
)) * 8;
1529 chkp_find_bounds_for_elem (field_ref
, all_bounds
,
1530 offs
+ field_offs
, iter
);
1533 else if (TREE_CODE (type
) == ARRAY_TYPE
)
1535 tree maxval
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
1536 tree etype
= TREE_TYPE (type
);
1537 HOST_WIDE_INT esize
= TREE_INT_CST_LOW (TYPE_SIZE (etype
));
1538 unsigned HOST_WIDE_INT cur
;
1540 if (!maxval
|| integer_minus_onep (maxval
))
1543 for (cur
= 0; cur
<= TREE_INT_CST_LOW (maxval
); cur
++)
1545 tree base
= unshare_expr (elem
);
1546 tree arr_elem
= chkp_build_array_ref (base
, etype
,
1549 chkp_find_bounds_for_elem (arr_elem
, all_bounds
, offs
+ cur
* esize
,
1555 /* Fill HAVE_BOUND output bitmap with information about
1556 bounds requred for object of type TYPE.
1558 OFFS is used for recursive calls and holds basic
1559 offset of TYPE in outer structure in bits.
1561 HAVE_BOUND[i] is set to 1 if there is a field
1562 in TYPE which has pointer type and offset
1563 equal to i * POINTER_SIZE - OFFS in bits. */
1565 chkp_find_bound_slots_1 (const_tree type
, bitmap have_bound
,
1568 if (BOUNDED_TYPE_P (type
))
1569 bitmap_set_bit (have_bound
, offs
/ POINTER_SIZE
);
1570 else if (RECORD_OR_UNION_TYPE_P (type
))
1574 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
1575 if (TREE_CODE (field
) == FIELD_DECL
)
1577 HOST_WIDE_INT field_offs
1578 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field
));
1579 if (DECL_FIELD_OFFSET (field
))
1580 field_offs
+= TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field
)) * 8;
1581 chkp_find_bound_slots_1 (TREE_TYPE (field
), have_bound
,
1585 else if (TREE_CODE (type
) == ARRAY_TYPE
)
1587 tree maxval
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
1588 tree etype
= TREE_TYPE (type
);
1589 HOST_WIDE_INT esize
= TREE_INT_CST_LOW (TYPE_SIZE (etype
));
1590 unsigned HOST_WIDE_INT cur
;
1593 || TREE_CODE (maxval
) != INTEGER_CST
1594 || integer_minus_onep (maxval
))
1597 for (cur
= 0; cur
<= TREE_INT_CST_LOW (maxval
); cur
++)
1598 chkp_find_bound_slots_1 (etype
, have_bound
, offs
+ cur
* esize
);
1602 /* Fill bitmap RES with information about bounds for
1603 type TYPE. See chkp_find_bound_slots_1 for more
1606 chkp_find_bound_slots (const_tree type
, bitmap res
)
1609 chkp_find_bound_slots_1 (type
, res
, 0);
1612 /* Return 1 if call to FNDECL should be instrumented
1616 chkp_instrument_normal_builtin (tree fndecl
)
1618 switch (DECL_FUNCTION_CODE (fndecl
))
1620 case BUILT_IN_STRLEN
:
1621 case BUILT_IN_STRCPY
:
1622 case BUILT_IN_STRNCPY
:
1623 case BUILT_IN_STPCPY
:
1624 case BUILT_IN_STPNCPY
:
1625 case BUILT_IN_STRCAT
:
1626 case BUILT_IN_STRNCAT
:
1627 case BUILT_IN_MEMCPY
:
1628 case BUILT_IN_MEMPCPY
:
1629 case BUILT_IN_MEMSET
:
1630 case BUILT_IN_MEMMOVE
:
1631 case BUILT_IN_BZERO
:
1632 case BUILT_IN_STRCMP
:
1633 case BUILT_IN_STRNCMP
:
1635 case BUILT_IN_MEMCMP
:
1636 case BUILT_IN_MEMCPY_CHK
:
1637 case BUILT_IN_MEMPCPY_CHK
:
1638 case BUILT_IN_MEMMOVE_CHK
:
1639 case BUILT_IN_MEMSET_CHK
:
1640 case BUILT_IN_STRCPY_CHK
:
1641 case BUILT_IN_STRNCPY_CHK
:
1642 case BUILT_IN_STPCPY_CHK
:
1643 case BUILT_IN_STPNCPY_CHK
:
1644 case BUILT_IN_STRCAT_CHK
:
1645 case BUILT_IN_STRNCAT_CHK
:
1646 case BUILT_IN_MALLOC
:
1647 case BUILT_IN_CALLOC
:
1648 case BUILT_IN_REALLOC
:
1656 /* Add bound arguments to call statement pointed by GSI.
1657 Also performs a replacement of user checker builtins calls
1658 with internal ones. */
1661 chkp_add_bounds_to_call_stmt (gimple_stmt_iterator
*gsi
)
1663 gcall
*call
= as_a
<gcall
*> (gsi_stmt (*gsi
));
1664 unsigned arg_no
= 0;
1665 tree fndecl
= gimple_call_fndecl (call
);
1667 tree first_formal_arg
;
1669 bool use_fntype
= false;
1674 /* Do nothing for internal functions. */
1675 if (gimple_call_internal_p (call
))
1678 fntype
= TREE_TYPE (TREE_TYPE (gimple_call_fn (call
)));
1680 /* Do nothing if back-end builtin is called. */
1681 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
1684 /* Do nothing for some middle-end builtins. */
1685 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
1686 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_OBJECT_SIZE
)
1689 /* Do nothing for calls to not instrumentable functions. */
1690 if (fndecl
&& !chkp_instrumentable_p (fndecl
))
1693 /* Ignore CHKP_INIT_PTR_BOUNDS, CHKP_NULL_PTR_BOUNDS
1694 and CHKP_COPY_PTR_BOUNDS. */
1695 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
1696 && (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_INIT_PTR_BOUNDS
1697 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_NULL_PTR_BOUNDS
1698 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_COPY_PTR_BOUNDS
1699 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_SET_PTR_BOUNDS
))
1702 /* Check user builtins are replaced with checks. */
1703 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
1704 && (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1705 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
1706 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS
))
1708 chkp_replace_address_check_builtin (gsi
, integer_minus_one_node
);
1712 /* Check user builtins are replaced with bound extract. */
1713 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
1714 && (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_GET_PTR_LBOUND
1715 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_GET_PTR_UBOUND
))
1717 chkp_replace_extract_builtin (gsi
);
1721 /* BUILT_IN_CHKP_NARROW_PTR_BOUNDS call is replaced with
1722 target narrow bounds call. */
1723 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
1724 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
)
1726 tree arg
= gimple_call_arg (call
, 1);
1727 tree bounds
= chkp_find_bounds (arg
, gsi
);
1729 gimple_call_set_fndecl (call
, chkp_narrow_bounds_fndecl
);
1730 gimple_call_set_arg (call
, 1, bounds
);
1736 /* BUILT_IN_CHKP_STORE_PTR_BOUNDS call is replaced with
1738 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
1739 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_STORE_PTR_BOUNDS
)
1741 tree addr
= gimple_call_arg (call
, 0);
1742 tree ptr
= gimple_call_arg (call
, 1);
1743 tree bounds
= chkp_find_bounds (ptr
, gsi
);
1744 gimple_stmt_iterator iter
= gsi_for_stmt (call
);
1746 chkp_build_bndstx (addr
, ptr
, bounds
, gsi
);
1747 gsi_remove (&iter
, true);
1752 if (!flag_chkp_instrument_calls
)
1755 /* We instrument only some subset of builtins. We also instrument
1756 builtin calls to be inlined. */
1758 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
1759 && !chkp_instrument_normal_builtin (fndecl
))
1761 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl
)))
1764 struct cgraph_node
*clone
= chkp_maybe_create_clone (fndecl
);
1766 || !gimple_has_body_p (clone
->decl
))
1770 /* If function decl is available then use it for
1771 formal arguments list. Otherwise use function type. */
1772 if (fndecl
&& DECL_ARGUMENTS (fndecl
))
1773 first_formal_arg
= DECL_ARGUMENTS (fndecl
);
1776 first_formal_arg
= TYPE_ARG_TYPES (fntype
);
1780 /* Fill vector of new call args. */
1781 vec
<tree
> new_args
= vNULL
;
1782 new_args
.create (gimple_call_num_args (call
));
1783 arg
= first_formal_arg
;
1784 for (arg_no
= 0; arg_no
< gimple_call_num_args (call
); arg_no
++)
1786 tree call_arg
= gimple_call_arg (call
, arg_no
);
1789 /* Get arg type using formal argument description
1790 or actual argument type. */
1793 if (TREE_VALUE (arg
) != void_type_node
)
1795 type
= TREE_VALUE (arg
);
1796 arg
= TREE_CHAIN (arg
);
1799 type
= TREE_TYPE (call_arg
);
1802 type
= TREE_TYPE (arg
);
1803 arg
= TREE_CHAIN (arg
);
1806 type
= TREE_TYPE (call_arg
);
1808 new_args
.safe_push (call_arg
);
1810 if (BOUNDED_TYPE_P (type
)
1811 || pass_by_reference (NULL
, TYPE_MODE (type
), type
, true))
1812 new_args
.safe_push (chkp_find_bounds (call_arg
, gsi
));
1813 else if (chkp_type_has_pointer (type
))
1815 HOST_WIDE_INT max_bounds
1816 = TREE_INT_CST_LOW (TYPE_SIZE (type
)) / POINTER_SIZE
;
1817 tree
*all_bounds
= (tree
*)xmalloc (sizeof (tree
) * max_bounds
);
1818 HOST_WIDE_INT bnd_no
;
1820 memset (all_bounds
, 0, sizeof (tree
) * max_bounds
);
1822 chkp_find_bounds_for_elem (call_arg
, all_bounds
, 0, gsi
);
1824 for (bnd_no
= 0; bnd_no
< max_bounds
; bnd_no
++)
1825 if (all_bounds
[bnd_no
])
1826 new_args
.safe_push (all_bounds
[bnd_no
]);
1832 if (new_args
.length () == gimple_call_num_args (call
))
1836 new_call
= gimple_build_call_vec (gimple_op (call
, 1), new_args
);
1837 gimple_call_set_lhs (new_call
, gimple_call_lhs (call
));
1838 gimple_call_copy_flags (new_call
, call
);
1840 new_args
.release ();
1842 /* For direct calls fndecl is replaced with instrumented version. */
1845 tree new_decl
= chkp_maybe_create_clone (fndecl
)->decl
;
1846 gimple_call_set_fndecl (new_call
, new_decl
);
1847 gimple_call_set_fntype (new_call
, TREE_TYPE (new_decl
));
1849 /* For indirect call we should fix function pointer type if
1850 pass some bounds. */
1851 else if (new_call
!= call
)
1853 tree type
= gimple_call_fntype (call
);
1854 type
= chkp_copy_function_type_adding_bounds (type
);
1855 gimple_call_set_fntype (new_call
, type
);
1858 /* replace old call statement with the new one. */
1859 if (call
!= new_call
)
1861 FOR_EACH_SSA_TREE_OPERAND (op
, call
, iter
, SSA_OP_ALL_DEFS
)
1863 SSA_NAME_DEF_STMT (op
) = new_call
;
1865 gsi_replace (gsi
, new_call
, true);
1868 update_stmt (new_call
);
1870 gimple_call_set_with_bounds (new_call
, true);
1873 /* Return constant static bounds var with specified LB and UB
1874 if such var exists in varpool. Return NULL otherwise. */
1876 chkp_find_const_bounds_var (HOST_WIDE_INT lb
,
1879 tree val
= targetm
.chkp_make_bounds_constant (lb
, ub
);
1880 struct varpool_node
*node
;
1882 /* We expect bounds constant is represented as a complex value
1883 of two pointer sized integers. */
1884 gcc_assert (TREE_CODE (val
) == COMPLEX_CST
);
1886 FOR_EACH_VARIABLE (node
)
1887 if (POINTER_BOUNDS_P (node
->decl
)
1888 && TREE_READONLY (node
->decl
)
1889 && DECL_INITIAL (node
->decl
)
1890 && TREE_CODE (DECL_INITIAL (node
->decl
)) == COMPLEX_CST
1891 && tree_int_cst_equal (TREE_REALPART (DECL_INITIAL (node
->decl
)),
1892 TREE_REALPART (val
))
1893 && tree_int_cst_equal (TREE_IMAGPART (DECL_INITIAL (node
->decl
)),
1894 TREE_IMAGPART (val
)))
1900 /* Return constant static bounds var with specified bounds LB and UB.
1901 If such var does not exists then new var is created with specified NAME. */
1903 chkp_make_static_const_bounds (HOST_WIDE_INT lb
,
1909 /* With LTO we may have constant bounds already in varpool.
1911 var
= chkp_find_const_bounds_var (lb
, ub
);
1916 var
= build_decl (UNKNOWN_LOCATION
, VAR_DECL
,
1917 get_identifier (name
), pointer_bounds_type_node
);
1919 TREE_PUBLIC (var
) = 1;
1920 TREE_USED (var
) = 1;
1921 TREE_READONLY (var
) = 1;
1922 TREE_STATIC (var
) = 1;
1923 TREE_ADDRESSABLE (var
) = 0;
1924 DECL_ARTIFICIAL (var
) = 1;
1925 DECL_READ_P (var
) = 1;
1926 /* We may use this symbol during ctors generation in chkp_finish_file
1927 when all symbols are emitted. Force output to avoid undefined
1928 symbols in ctors. */
1931 DECL_INITIAL (var
) = targetm
.chkp_make_bounds_constant (lb
, ub
);
1932 DECL_COMDAT (var
) = 1;
1933 varpool_node::get_create (var
)->set_comdat_group (DECL_ASSEMBLER_NAME (var
));
1934 varpool_node::get_create (var
)->force_output
= 1;
1937 DECL_EXTERNAL (var
) = 1;
1938 varpool_node::finalize_decl (var
);
1943 /* Generate code to make bounds with specified lower bound LB and SIZE.
1944 if AFTER is 1 then code is inserted after position pointed by ITER
1945 otherwise code is inserted before position pointed by ITER.
1946 If ITER is NULL then code is added to entry block. */
1948 chkp_make_bounds (tree lb
, tree size
, gimple_stmt_iterator
*iter
, bool after
)
1951 gimple_stmt_iterator gsi
;
1958 gsi
= gsi_start_bb (chkp_get_entry_block ());
1962 lb
= chkp_force_gimple_call_op (lb
, &seq
);
1963 size
= chkp_force_gimple_call_op (size
, &seq
);
1965 stmt
= gimple_build_call (chkp_bndmk_fndecl
, 2, lb
, size
);
1966 chkp_mark_stmt (stmt
);
1968 bounds
= chkp_get_tmp_reg (stmt
);
1969 gimple_call_set_lhs (stmt
, bounds
);
1971 gimple_seq_add_stmt (&seq
, stmt
);
1974 gsi_insert_seq_after (&gsi
, seq
, GSI_SAME_STMT
);
1976 gsi_insert_seq_before (&gsi
, seq
, GSI_SAME_STMT
);
1978 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1980 fprintf (dump_file
, "Made bounds: ");
1981 print_gimple_stmt (dump_file
, stmt
, 0, TDF_VOPS
|TDF_MEMSYMS
);
1984 fprintf (dump_file
, " inserted before statement: ");
1985 print_gimple_stmt (dump_file
, gsi_stmt (*iter
), 0, TDF_VOPS
|TDF_MEMSYMS
);
1988 fprintf (dump_file
, " at function entry\n");
1991 /* update_stmt (stmt); */
1996 /* Return var holding zero bounds. */
1998 chkp_get_zero_bounds_var (void)
2000 if (!chkp_zero_bounds_var
)
2002 tree id
= get_identifier (CHKP_ZERO_BOUNDS_VAR_NAME
);
2003 symtab_node
*node
= symtab_node::get_for_asmname (id
);
2005 chkp_zero_bounds_var
= node
->decl
;
2008 if (!chkp_zero_bounds_var
)
2009 chkp_zero_bounds_var
2010 = chkp_make_static_const_bounds (0, -1,
2011 CHKP_ZERO_BOUNDS_VAR_NAME
);
2012 return chkp_zero_bounds_var
;
2015 /* Return var holding none bounds. */
2017 chkp_get_none_bounds_var (void)
2019 if (!chkp_none_bounds_var
)
2021 tree id
= get_identifier (CHKP_NONE_BOUNDS_VAR_NAME
);
2022 symtab_node
*node
= symtab_node::get_for_asmname (id
);
2024 chkp_none_bounds_var
= node
->decl
;
2027 if (!chkp_none_bounds_var
)
2028 chkp_none_bounds_var
2029 = chkp_make_static_const_bounds (-1, 0,
2030 CHKP_NONE_BOUNDS_VAR_NAME
);
2031 return chkp_none_bounds_var
;
2034 /* Return SSA_NAME used to represent zero bounds. */
2036 chkp_get_zero_bounds (void)
2041 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2042 fprintf (dump_file
, "Creating zero bounds...");
2044 if ((flag_chkp_use_static_bounds
&& flag_chkp_use_static_const_bounds
)
2045 || flag_chkp_use_static_const_bounds
> 0)
2047 gimple_stmt_iterator gsi
= gsi_start_bb (chkp_get_entry_block ());
2050 zero_bounds
= chkp_get_tmp_reg (gimple_build_nop ());
2051 stmt
= gimple_build_assign (zero_bounds
, chkp_get_zero_bounds_var ());
2052 gsi_insert_before (&gsi
, stmt
, GSI_SAME_STMT
);
2055 zero_bounds
= chkp_make_bounds (integer_zero_node
,
2063 /* Return SSA_NAME used to represent none bounds. */
2065 chkp_get_none_bounds (void)
2070 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2071 fprintf (dump_file
, "Creating none bounds...");
2074 if ((flag_chkp_use_static_bounds
&& flag_chkp_use_static_const_bounds
)
2075 || flag_chkp_use_static_const_bounds
> 0)
2077 gimple_stmt_iterator gsi
= gsi_start_bb (chkp_get_entry_block ());
2080 none_bounds
= chkp_get_tmp_reg (gimple_build_nop ());
2081 stmt
= gimple_build_assign (none_bounds
, chkp_get_none_bounds_var ());
2082 gsi_insert_before (&gsi
, stmt
, GSI_SAME_STMT
);
2085 none_bounds
= chkp_make_bounds (integer_minus_one_node
,
2086 build_int_cst (size_type_node
, 2),
2093 /* Return bounds to be used as a result of operation which
2094 should not create poiunter (e.g. MULT_EXPR). */
2096 chkp_get_invalid_op_bounds (void)
2098 return chkp_get_zero_bounds ();
2101 /* Return bounds to be used for loads of non-pointer values. */
2103 chkp_get_nonpointer_load_bounds (void)
2105 return chkp_get_zero_bounds ();
2108 /* Return 1 if may use bndret call to get bounds for pointer
2109 returned by CALL. */
2111 chkp_call_returns_bounds_p (gcall
*call
)
2113 if (gimple_call_internal_p (call
))
2116 tree fndecl
= gimple_call_fndecl (call
);
2118 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
2122 && lookup_attribute ("bnd_legacy", DECL_ATTRIBUTES (fndecl
)))
2125 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
2127 if (chkp_instrument_normal_builtin (fndecl
))
2130 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl
)))
2133 struct cgraph_node
*clone
= chkp_maybe_create_clone (fndecl
);
2134 return (clone
&& gimple_has_body_p (clone
->decl
));
2140 /* Build bounds returned by CALL. */
2142 chkp_build_returned_bound (gcall
*call
)
2144 gimple_stmt_iterator gsi
;
2147 tree fndecl
= gimple_call_fndecl (call
);
2149 /* To avoid fixing alloca expands in targets we handle
2152 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
2153 && (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_ALLOCA
2154 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_ALLOCA_WITH_ALIGN
))
2156 tree size
= gimple_call_arg (call
, 0);
2157 tree lb
= gimple_call_lhs (call
);
2158 gimple_stmt_iterator iter
= gsi_for_stmt (call
);
2159 bounds
= chkp_make_bounds (lb
, size
, &iter
, true);
2161 /* We know bounds returned by set_bounds builtin call. */
2163 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
2164 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_SET_PTR_BOUNDS
)
2166 tree lb
= gimple_call_arg (call
, 0);
2167 tree size
= gimple_call_arg (call
, 1);
2168 gimple_stmt_iterator iter
= gsi_for_stmt (call
);
2169 bounds
= chkp_make_bounds (lb
, size
, &iter
, true);
2171 /* Detect bounds initialization calls. */
2173 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
2174 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_INIT_PTR_BOUNDS
)
2175 bounds
= chkp_get_zero_bounds ();
2176 /* Detect bounds nullification calls. */
2178 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
2179 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_NULL_PTR_BOUNDS
)
2180 bounds
= chkp_get_none_bounds ();
2181 /* Detect bounds copy calls. */
2183 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
2184 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_COPY_PTR_BOUNDS
)
2186 gimple_stmt_iterator iter
= gsi_for_stmt (call
);
2187 bounds
= chkp_find_bounds (gimple_call_arg (call
, 1), &iter
);
2189 /* Do not use retbnd when returned bounds are equal to some
2190 of passed bounds. */
2191 else if (gimple_call_return_flags (call
) & ERF_RETURNS_ARG
)
2193 gimple_stmt_iterator iter
= gsi_for_stmt (call
);
2194 unsigned int retarg
= 0, argno
;
2195 if (gimple_call_return_flags (call
) & ERF_RETURNS_ARG
)
2196 retarg
= gimple_call_return_flags (call
) & ERF_RETURN_ARG_MASK
;
2197 if (gimple_call_with_bounds_p (call
))
2199 for (argno
= 0; argno
< gimple_call_num_args (call
); argno
++)
2200 if (!POINTER_BOUNDS_P (gimple_call_arg (call
, argno
)))
2211 bounds
= chkp_find_bounds (gimple_call_arg (call
, argno
), &iter
);
2213 else if (chkp_call_returns_bounds_p (call
))
2215 gcc_assert (TREE_CODE (gimple_call_lhs (call
)) == SSA_NAME
);
2217 /* In general case build checker builtin call to
2218 obtain returned bounds. */
2219 stmt
= gimple_build_call (chkp_ret_bnd_fndecl
, 1,
2220 gimple_call_lhs (call
));
2221 chkp_mark_stmt (stmt
);
2223 gsi
= gsi_for_stmt (call
);
2224 gsi_insert_after (&gsi
, stmt
, GSI_SAME_STMT
);
2226 bounds
= chkp_get_tmp_reg (stmt
);
2227 gimple_call_set_lhs (stmt
, bounds
);
2232 bounds
= chkp_get_zero_bounds ();
2234 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2236 fprintf (dump_file
, "Built returned bounds (");
2237 print_generic_expr (dump_file
, bounds
, 0);
2238 fprintf (dump_file
, ") for call: ");
2239 print_gimple_stmt (dump_file
, call
, 0, TDF_VOPS
|TDF_MEMSYMS
);
2242 bounds
= chkp_maybe_copy_and_register_bounds (gimple_call_lhs (call
), bounds
);
2247 /* Return bounds used as returned by call
2248 which produced SSA name VAL. */
2250 chkp_retbnd_call_by_val (tree val
)
2252 if (TREE_CODE (val
) != SSA_NAME
)
2255 gcc_assert (gimple_code (SSA_NAME_DEF_STMT (val
)) == GIMPLE_CALL
);
2257 imm_use_iterator use_iter
;
2258 use_operand_p use_p
;
2259 FOR_EACH_IMM_USE_FAST (use_p
, use_iter
, val
)
2260 if (gimple_code (USE_STMT (use_p
)) == GIMPLE_CALL
2261 && gimple_call_fndecl (USE_STMT (use_p
)) == chkp_ret_bnd_fndecl
)
2262 return as_a
<gcall
*> (USE_STMT (use_p
));
2267 /* Check the next parameter for the given PARM is bounds
2268 and return it's default SSA_NAME (create if required). */
2270 chkp_get_next_bounds_parm (tree parm
)
2272 tree bounds
= TREE_CHAIN (parm
);
2273 gcc_assert (POINTER_BOUNDS_P (bounds
));
2274 bounds
= ssa_default_def (cfun
, bounds
);
2277 bounds
= make_ssa_name (TREE_CHAIN (parm
), gimple_build_nop ());
2278 set_ssa_default_def (cfun
, TREE_CHAIN (parm
), bounds
);
2283 /* Return bounds to be used for input argument PARM. */
2285 chkp_get_bound_for_parm (tree parm
)
2287 tree decl
= SSA_NAME_VAR (parm
);
2290 gcc_assert (TREE_CODE (decl
) == PARM_DECL
);
2292 bounds
= chkp_get_registered_bounds (parm
);
2295 bounds
= chkp_get_registered_bounds (decl
);
2299 tree orig_decl
= cgraph_node::get (cfun
->decl
)->orig_decl
;
2301 /* For static chain param we return zero bounds
2302 because currently we do not check dereferences
2304 if (cfun
->static_chain_decl
== decl
)
2305 bounds
= chkp_get_zero_bounds ();
2306 /* If non instrumented runtime is used then it may be useful
2307 to use zero bounds for input arguments of main
2309 else if (flag_chkp_zero_input_bounds_for_main
2310 && strcmp (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (orig_decl
)),
2312 bounds
= chkp_get_zero_bounds ();
2313 else if (BOUNDED_P (parm
))
2315 bounds
= chkp_get_next_bounds_parm (decl
);
2316 bounds
= chkp_maybe_copy_and_register_bounds (decl
, bounds
);
2318 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2320 fprintf (dump_file
, "Built arg bounds (");
2321 print_generic_expr (dump_file
, bounds
, 0);
2322 fprintf (dump_file
, ") for arg: ");
2323 print_node (dump_file
, "", decl
, 0);
2327 bounds
= chkp_get_zero_bounds ();
2330 if (!chkp_get_registered_bounds (parm
))
2331 bounds
= chkp_maybe_copy_and_register_bounds (parm
, bounds
);
2333 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2335 fprintf (dump_file
, "Using bounds ");
2336 print_generic_expr (dump_file
, bounds
, 0);
2337 fprintf (dump_file
, " for parm ");
2338 print_generic_expr (dump_file
, parm
, 0);
2339 fprintf (dump_file
, " of type ");
2340 print_generic_expr (dump_file
, TREE_TYPE (parm
), 0);
2341 fprintf (dump_file
, ".\n");
2347 /* Build and return CALL_EXPR for bndstx builtin with specified
2350 chkp_build_bndldx_call (tree addr
, tree ptr
)
2352 tree fn
= build1 (ADDR_EXPR
,
2353 build_pointer_type (TREE_TYPE (chkp_bndldx_fndecl
)),
2354 chkp_bndldx_fndecl
);
2355 tree call
= build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndldx_fndecl
)),
2357 CALL_WITH_BOUNDS_P (call
) = true;
2361 /* Insert code to load bounds for PTR located by ADDR.
2362 Code is inserted after position pointed by GSI.
2363 Loaded bounds are returned. */
2365 chkp_build_bndldx (tree addr
, tree ptr
, gimple_stmt_iterator
*gsi
)
2373 addr
= chkp_force_gimple_call_op (addr
, &seq
);
2374 ptr
= chkp_force_gimple_call_op (ptr
, &seq
);
2376 stmt
= gimple_build_call (chkp_bndldx_fndecl
, 2, addr
, ptr
);
2377 chkp_mark_stmt (stmt
);
2378 bounds
= chkp_get_tmp_reg (stmt
);
2379 gimple_call_set_lhs (stmt
, bounds
);
2381 gimple_seq_add_stmt (&seq
, stmt
);
2383 gsi_insert_seq_after (gsi
, seq
, GSI_CONTINUE_LINKING
);
2385 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2387 fprintf (dump_file
, "Generated bndldx for pointer ");
2388 print_generic_expr (dump_file
, ptr
, 0);
2389 fprintf (dump_file
, ": ");
2390 print_gimple_stmt (dump_file
, stmt
, 0, TDF_VOPS
|TDF_MEMSYMS
);
2396 /* Build and return CALL_EXPR for bndstx builtin with specified
2399 chkp_build_bndstx_call (tree addr
, tree ptr
, tree bounds
)
2401 tree fn
= build1 (ADDR_EXPR
,
2402 build_pointer_type (TREE_TYPE (chkp_bndstx_fndecl
)),
2403 chkp_bndstx_fndecl
);
2404 tree call
= build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndstx_fndecl
)),
2405 fn
, 3, ptr
, bounds
, addr
);
2406 CALL_WITH_BOUNDS_P (call
) = true;
2410 /* Insert code to store BOUNDS for PTR stored by ADDR.
2411 New statements are inserted after position pointed
2414 chkp_build_bndstx (tree addr
, tree ptr
, tree bounds
,
2415 gimple_stmt_iterator
*gsi
)
2422 addr
= chkp_force_gimple_call_op (addr
, &seq
);
2423 ptr
= chkp_force_gimple_call_op (ptr
, &seq
);
2425 stmt
= gimple_build_call (chkp_bndstx_fndecl
, 3, ptr
, bounds
, addr
);
2426 chkp_mark_stmt (stmt
);
2427 gimple_call_set_with_bounds (stmt
, true);
2429 gimple_seq_add_stmt (&seq
, stmt
);
2431 gsi_insert_seq_after (gsi
, seq
, GSI_CONTINUE_LINKING
);
2433 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2435 fprintf (dump_file
, "Generated bndstx for pointer store ");
2436 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
), 0, TDF_VOPS
|TDF_MEMSYMS
);
2437 print_gimple_stmt (dump_file
, stmt
, 2, TDF_VOPS
|TDF_MEMSYMS
);
2441 /* Compute bounds for pointer NODE which was assigned in
2442 assignment statement ASSIGN. Return computed bounds. */
2444 chkp_compute_bounds_for_assignment (tree node
, gimple assign
)
2446 enum tree_code rhs_code
= gimple_assign_rhs_code (assign
);
2447 tree rhs1
= gimple_assign_rhs1 (assign
);
2448 tree bounds
= NULL_TREE
;
2449 gimple_stmt_iterator iter
= gsi_for_stmt (assign
);
2451 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2453 fprintf (dump_file
, "Computing bounds for assignment: ");
2454 print_gimple_stmt (dump_file
, assign
, 0, TDF_VOPS
|TDF_MEMSYMS
);
2460 case TARGET_MEM_REF
:
2463 /* We need to load bounds from the bounds table. */
2464 bounds
= chkp_find_bounds_loaded (node
, rhs1
, &iter
);
2470 case POINTER_PLUS_EXPR
:
2474 /* Bounds are just propagated from RHS. */
2475 bounds
= chkp_find_bounds (rhs1
, &iter
);
2478 case VIEW_CONVERT_EXPR
:
2479 /* Bounds are just propagated from RHS. */
2480 bounds
= chkp_find_bounds (TREE_OPERAND (rhs1
, 0), &iter
);
2484 if (BOUNDED_P (rhs1
))
2486 /* We need to load bounds from the bounds table. */
2487 bounds
= chkp_build_bndldx (chkp_build_addr_expr (rhs1
),
2489 TREE_ADDRESSABLE (rhs1
) = 1;
2492 bounds
= chkp_get_nonpointer_load_bounds ();
2501 tree rhs2
= gimple_assign_rhs2 (assign
);
2502 tree bnd1
= chkp_find_bounds (rhs1
, &iter
);
2503 tree bnd2
= chkp_find_bounds (rhs2
, &iter
);
2505 /* First we try to check types of operands. If it
2506 does not help then look at bound values.
2508 If some bounds are incomplete and other are
2509 not proven to be valid (i.e. also incomplete
2510 or invalid because value is not pointer) then
2511 resulting value is incomplete and will be
2512 recomputed later in chkp_finish_incomplete_bounds. */
2513 if (BOUNDED_P (rhs1
)
2514 && !BOUNDED_P (rhs2
))
2516 else if (BOUNDED_P (rhs2
)
2517 && !BOUNDED_P (rhs1
)
2518 && rhs_code
!= MINUS_EXPR
)
2520 else if (chkp_incomplete_bounds (bnd1
))
2521 if (chkp_valid_bounds (bnd2
) && rhs_code
!= MINUS_EXPR
2522 && !chkp_incomplete_bounds (bnd2
))
2525 bounds
= incomplete_bounds
;
2526 else if (chkp_incomplete_bounds (bnd2
))
2527 if (chkp_valid_bounds (bnd1
)
2528 && !chkp_incomplete_bounds (bnd1
))
2531 bounds
= incomplete_bounds
;
2532 else if (!chkp_valid_bounds (bnd1
))
2533 if (chkp_valid_bounds (bnd2
) && rhs_code
!= MINUS_EXPR
)
2535 else if (bnd2
== chkp_get_zero_bounds ())
2539 else if (!chkp_valid_bounds (bnd2
))
2542 /* Seems both operands may have valid bounds
2543 (e.g. pointer minus pointer). In such case
2544 use default invalid op bounds. */
2545 bounds
= chkp_get_invalid_op_bounds ();
2563 case TRUNC_DIV_EXPR
:
2564 case FLOOR_DIV_EXPR
:
2566 case ROUND_DIV_EXPR
:
2567 case TRUNC_MOD_EXPR
:
2568 case FLOOR_MOD_EXPR
:
2570 case ROUND_MOD_EXPR
:
2571 case EXACT_DIV_EXPR
:
2572 case FIX_TRUNC_EXPR
:
2576 /* No valid bounds may be produced by these exprs. */
2577 bounds
= chkp_get_invalid_op_bounds ();
2582 tree val1
= gimple_assign_rhs2 (assign
);
2583 tree val2
= gimple_assign_rhs3 (assign
);
2584 tree bnd1
= chkp_find_bounds (val1
, &iter
);
2585 tree bnd2
= chkp_find_bounds (val2
, &iter
);
2588 if (chkp_incomplete_bounds (bnd1
) || chkp_incomplete_bounds (bnd2
))
2589 bounds
= incomplete_bounds
;
2590 else if (bnd1
== bnd2
)
2594 rhs1
= unshare_expr (rhs1
);
2596 bounds
= chkp_get_tmp_reg (assign
);
2597 stmt
= gimple_build_assign (bounds
, COND_EXPR
, rhs1
, bnd1
, bnd2
);
2598 gsi_insert_after (&iter
, stmt
, GSI_SAME_STMT
);
2600 if (!chkp_valid_bounds (bnd1
) && !chkp_valid_bounds (bnd2
))
2601 chkp_mark_invalid_bounds (bounds
);
2609 tree rhs2
= gimple_assign_rhs2 (assign
);
2610 tree bnd1
= chkp_find_bounds (rhs1
, &iter
);
2611 tree bnd2
= chkp_find_bounds (rhs2
, &iter
);
2613 if (chkp_incomplete_bounds (bnd1
) || chkp_incomplete_bounds (bnd2
))
2614 bounds
= incomplete_bounds
;
2615 else if (bnd1
== bnd2
)
2620 tree cond
= build2 (rhs_code
== MAX_EXPR
? GT_EXPR
: LT_EXPR
,
2621 boolean_type_node
, rhs1
, rhs2
);
2622 bounds
= chkp_get_tmp_reg (assign
);
2623 stmt
= gimple_build_assign (bounds
, COND_EXPR
, cond
, bnd1
, bnd2
);
2625 gsi_insert_after (&iter
, stmt
, GSI_SAME_STMT
);
2627 if (!chkp_valid_bounds (bnd1
) && !chkp_valid_bounds (bnd2
))
2628 chkp_mark_invalid_bounds (bounds
);
2634 bounds
= chkp_get_zero_bounds ();
2635 warning (0, "pointer bounds were lost due to unexpected expression %s",
2636 get_tree_code_name (rhs_code
));
2639 gcc_assert (bounds
);
2642 bounds
= chkp_maybe_copy_and_register_bounds (node
, bounds
);
2647 /* Compute bounds for ssa name NODE defined by DEF_STMT pointed by ITER.
2649 There are just few statement codes allowed: NOP (for default ssa names),
2650 ASSIGN, CALL, PHI, ASM.
2652 Return computed bounds. */
2654 chkp_get_bounds_by_definition (tree node
, gimple def_stmt
,
2655 gphi_iterator
*iter
)
2658 enum gimple_code code
= gimple_code (def_stmt
);
2661 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2663 fprintf (dump_file
, "Searching for bounds for node: ");
2664 print_generic_expr (dump_file
, node
, 0);
2666 fprintf (dump_file
, " using its definition: ");
2667 print_gimple_stmt (dump_file
, def_stmt
, 0, TDF_VOPS
|TDF_MEMSYMS
);
2673 var
= SSA_NAME_VAR (node
);
2674 switch (TREE_CODE (var
))
2677 bounds
= chkp_get_bound_for_parm (node
);
2681 /* For uninitialized pointers use none bounds. */
2682 bounds
= chkp_get_none_bounds ();
2683 bounds
= chkp_maybe_copy_and_register_bounds (node
, bounds
);
2690 gcc_assert (TREE_CODE (TREE_TYPE (node
)) == REFERENCE_TYPE
);
2692 base_type
= TREE_TYPE (TREE_TYPE (node
));
2694 gcc_assert (TYPE_SIZE (base_type
)
2695 && TREE_CODE (TYPE_SIZE (base_type
)) == INTEGER_CST
2696 && tree_to_uhwi (TYPE_SIZE (base_type
)) != 0);
2698 bounds
= chkp_make_bounds (node
, TYPE_SIZE_UNIT (base_type
),
2700 bounds
= chkp_maybe_copy_and_register_bounds (node
, bounds
);
2705 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2707 fprintf (dump_file
, "Unexpected var with no definition\n");
2708 print_generic_expr (dump_file
, var
, 0);
2710 internal_error ("chkp_get_bounds_by_definition: Unexpected var of type %s",
2711 get_tree_code_name (TREE_CODE (var
)));
2716 bounds
= chkp_compute_bounds_for_assignment (node
, def_stmt
);
2720 bounds
= chkp_build_returned_bound (as_a
<gcall
*> (def_stmt
));
2724 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (node
))
2725 if (SSA_NAME_VAR (node
))
2726 var
= chkp_get_bounds_var (SSA_NAME_VAR (node
));
2728 var
= make_temp_ssa_name (pointer_bounds_type_node
,
2729 gimple_build_nop (),
2730 CHKP_BOUND_TMP_NAME
);
2732 var
= chkp_get_tmp_var ();
2733 stmt
= create_phi_node (var
, gimple_bb (def_stmt
));
2734 bounds
= gimple_phi_result (stmt
);
2735 *iter
= gsi_for_phi (stmt
);
2737 bounds
= chkp_maybe_copy_and_register_bounds (node
, bounds
);
2739 /* Created bounds do not have all phi args computed and
2740 therefore we do not know if there is a valid source
2741 of bounds for that node. Therefore we mark bounds
2742 as incomplete and then recompute them when all phi
2743 args are computed. */
2744 chkp_register_incomplete_bounds (bounds
, node
);
2748 bounds
= chkp_get_zero_bounds ();
2749 bounds
= chkp_maybe_copy_and_register_bounds (node
, bounds
);
2753 internal_error ("chkp_get_bounds_by_definition: Unexpected GIMPLE code %s",
2754 gimple_code_name
[code
]);
2760 /* Return CALL_EXPR for bndmk with specified LOWER_BOUND and SIZE. */
2762 chkp_build_make_bounds_call (tree lower_bound
, tree size
)
2764 tree call
= build1 (ADDR_EXPR
,
2765 build_pointer_type (TREE_TYPE (chkp_bndmk_fndecl
)),
2767 return build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndmk_fndecl
)),
2768 call
, 2, lower_bound
, size
);
2771 /* Create static bounds var of specfified OBJ which is
2772 is either VAR_DECL or string constant. */
2774 chkp_make_static_bounds (tree obj
)
2776 static int string_id
= 1;
2777 static int var_id
= 1;
2779 const char *var_name
;
2783 /* First check if we already have required var. */
2784 if (chkp_static_var_bounds
)
2786 /* For vars we use assembler name as a key in
2787 chkp_static_var_bounds map. It allows to
2788 avoid duplicating bound vars for decls
2789 sharing assembler name. */
2790 if (TREE_CODE (obj
) == VAR_DECL
)
2792 tree name
= DECL_ASSEMBLER_NAME (obj
);
2793 slot
= chkp_static_var_bounds
->get (name
);
2799 slot
= chkp_static_var_bounds
->get (obj
);
2805 /* Build decl for bounds var. */
2806 if (TREE_CODE (obj
) == VAR_DECL
)
2808 if (DECL_IGNORED_P (obj
))
2810 bnd_var_name
= (char *) xmalloc (strlen (CHKP_VAR_BOUNDS_PREFIX
) + 10);
2811 sprintf (bnd_var_name
, "%s%d", CHKP_VAR_BOUNDS_PREFIX
, var_id
++);
2815 var_name
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (obj
));
2817 /* For hidden symbols we want to skip first '*' char. */
2818 if (*var_name
== '*')
2821 bnd_var_name
= (char *) xmalloc (strlen (var_name
)
2822 + strlen (CHKP_BOUNDS_OF_SYMBOL_PREFIX
) + 1);
2823 strcpy (bnd_var_name
, CHKP_BOUNDS_OF_SYMBOL_PREFIX
);
2824 strcat (bnd_var_name
, var_name
);
2827 bnd_var
= build_decl (UNKNOWN_LOCATION
, VAR_DECL
,
2828 get_identifier (bnd_var_name
),
2829 pointer_bounds_type_node
);
2831 /* Address of the obj will be used as lower bound. */
2832 TREE_ADDRESSABLE (obj
) = 1;
2836 bnd_var_name
= (char *) xmalloc (strlen (CHKP_STRING_BOUNDS_PREFIX
) + 10);
2837 sprintf (bnd_var_name
, "%s%d", CHKP_STRING_BOUNDS_PREFIX
, string_id
++);
2839 bnd_var
= build_decl (UNKNOWN_LOCATION
, VAR_DECL
,
2840 get_identifier (bnd_var_name
),
2841 pointer_bounds_type_node
);
2844 TREE_PUBLIC (bnd_var
) = 0;
2845 TREE_USED (bnd_var
) = 1;
2846 TREE_READONLY (bnd_var
) = 0;
2847 TREE_STATIC (bnd_var
) = 1;
2848 TREE_ADDRESSABLE (bnd_var
) = 0;
2849 DECL_ARTIFICIAL (bnd_var
) = 1;
2850 DECL_COMMON (bnd_var
) = 1;
2851 DECL_COMDAT (bnd_var
) = 1;
2852 DECL_READ_P (bnd_var
) = 1;
2853 DECL_INITIAL (bnd_var
) = chkp_build_addr_expr (obj
);
2854 /* Force output similar to constant bounds.
2855 See chkp_make_static_const_bounds. */
2856 varpool_node::get_create (bnd_var
)->force_output
= 1;
2857 /* Mark symbol as requiring bounds initialization. */
2858 varpool_node::get_create (bnd_var
)->need_bounds_init
= 1;
2859 varpool_node::finalize_decl (bnd_var
);
2861 /* Add created var to the map to use it for other references
2863 if (!chkp_static_var_bounds
)
2864 chkp_static_var_bounds
= new hash_map
<tree
, tree
>;
2866 if (TREE_CODE (obj
) == VAR_DECL
)
2868 tree name
= DECL_ASSEMBLER_NAME (obj
);
2869 chkp_static_var_bounds
->put (name
, bnd_var
);
2872 chkp_static_var_bounds
->put (obj
, bnd_var
);
2877 /* When var has incomplete type we cannot get size to
2878 compute its bounds. In such cases we use checker
2879 builtin call which determines object size at runtime. */
2881 chkp_generate_extern_var_bounds (tree var
)
2883 tree bounds
, size_reloc
, lb
, size
, max_size
, cond
;
2884 gimple_stmt_iterator gsi
;
2885 gimple_seq seq
= NULL
;
2888 /* If instrumentation is not enabled for vars having
2889 incomplete type then just return zero bounds to avoid
2890 checks for this var. */
2891 if (!flag_chkp_incomplete_type
)
2892 return chkp_get_zero_bounds ();
2894 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2896 fprintf (dump_file
, "Generating bounds for extern symbol '");
2897 print_generic_expr (dump_file
, var
, 0);
2898 fprintf (dump_file
, "'\n");
2901 stmt
= gimple_build_call (chkp_sizeof_fndecl
, 1, var
);
2903 size_reloc
= create_tmp_reg (chkp_uintptr_type
, CHKP_SIZE_TMP_NAME
);
2904 gimple_call_set_lhs (stmt
, size_reloc
);
2906 gimple_seq_add_stmt (&seq
, stmt
);
2908 lb
= chkp_build_addr_expr (var
);
2909 size
= make_ssa_name (chkp_get_size_tmp_var (), gimple_build_nop ());
2911 if (flag_chkp_zero_dynamic_size_as_infinite
)
2913 /* We should check that size relocation was resolved.
2914 If it was not then use maximum possible size for the var. */
2915 max_size
= build2 (MINUS_EXPR
, chkp_uintptr_type
, integer_zero_node
,
2916 fold_convert (chkp_uintptr_type
, lb
));
2917 max_size
= chkp_force_gimple_call_op (max_size
, &seq
);
2919 cond
= build2 (NE_EXPR
, boolean_type_node
,
2920 size_reloc
, integer_zero_node
);
2921 stmt
= gimple_build_assign (size
, COND_EXPR
, cond
, size_reloc
, max_size
);
2922 gimple_seq_add_stmt (&seq
, stmt
);
2926 stmt
= gimple_build_assign (size
, size_reloc
);
2927 gimple_seq_add_stmt (&seq
, stmt
);
2930 gsi
= gsi_start_bb (chkp_get_entry_block ());
2931 gsi_insert_seq_after (&gsi
, seq
, GSI_CONTINUE_LINKING
);
2933 bounds
= chkp_make_bounds (lb
, size
, &gsi
, true);
2938 /* Return 1 if TYPE has fields with zero size or fields
2939 marked with chkp_variable_size attribute. */
2941 chkp_variable_size_type (tree type
)
2946 if (RECORD_OR_UNION_TYPE_P (type
))
2947 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
2949 if (TREE_CODE (field
) == FIELD_DECL
)
2951 || lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field
))
2952 || chkp_variable_size_type (TREE_TYPE (field
));
2955 res
= !TYPE_SIZE (type
)
2956 || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
2957 || tree_to_uhwi (TYPE_SIZE (type
)) == 0;
2962 /* Compute and return bounds for address of DECL which is
2963 one of VAR_DECL, PARM_DECL, RESULT_DECL. */
2965 chkp_get_bounds_for_decl_addr (tree decl
)
2969 gcc_assert (TREE_CODE (decl
) == VAR_DECL
2970 || TREE_CODE (decl
) == PARM_DECL
2971 || TREE_CODE (decl
) == RESULT_DECL
);
2973 bounds
= chkp_get_registered_addr_bounds (decl
);
2978 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2980 fprintf (dump_file
, "Building bounds for address of decl ");
2981 print_generic_expr (dump_file
, decl
, 0);
2982 fprintf (dump_file
, "\n");
2985 /* Use zero bounds if size is unknown and checks for
2986 unknown sizes are restricted. */
2987 if ((!DECL_SIZE (decl
)
2988 || (chkp_variable_size_type (TREE_TYPE (decl
))
2989 && (TREE_STATIC (decl
)
2990 || DECL_EXTERNAL (decl
)
2991 || TREE_PUBLIC (decl
))))
2992 && !flag_chkp_incomplete_type
)
2993 return chkp_get_zero_bounds ();
2995 if (flag_chkp_use_static_bounds
2996 && TREE_CODE (decl
) == VAR_DECL
2997 && (TREE_STATIC (decl
)
2998 || DECL_EXTERNAL (decl
)
2999 || TREE_PUBLIC (decl
))
3000 && !DECL_THREAD_LOCAL_P (decl
))
3002 tree bnd_var
= chkp_make_static_bounds (decl
);
3003 gimple_stmt_iterator gsi
= gsi_start_bb (chkp_get_entry_block ());
3006 bounds
= chkp_get_tmp_reg (gimple_build_nop ());
3007 stmt
= gimple_build_assign (bounds
, bnd_var
);
3008 gsi_insert_before (&gsi
, stmt
, GSI_SAME_STMT
);
3010 else if (!DECL_SIZE (decl
)
3011 || (chkp_variable_size_type (TREE_TYPE (decl
))
3012 && (TREE_STATIC (decl
)
3013 || DECL_EXTERNAL (decl
)
3014 || TREE_PUBLIC (decl
))))
3016 gcc_assert (TREE_CODE (decl
) == VAR_DECL
);
3017 bounds
= chkp_generate_extern_var_bounds (decl
);
3021 tree lb
= chkp_build_addr_expr (decl
);
3022 bounds
= chkp_make_bounds (lb
, DECL_SIZE_UNIT (decl
), NULL
, false);
3028 /* Compute and return bounds for constant string. */
3030 chkp_get_bounds_for_string_cst (tree cst
)
3036 gcc_assert (TREE_CODE (cst
) == STRING_CST
);
3038 bounds
= chkp_get_registered_bounds (cst
);
3043 if ((flag_chkp_use_static_bounds
&& flag_chkp_use_static_const_bounds
)
3044 || flag_chkp_use_static_const_bounds
> 0)
3046 tree bnd_var
= chkp_make_static_bounds (cst
);
3047 gimple_stmt_iterator gsi
= gsi_start_bb (chkp_get_entry_block ());
3050 bounds
= chkp_get_tmp_reg (gimple_build_nop ());
3051 stmt
= gimple_build_assign (bounds
, bnd_var
);
3052 gsi_insert_before (&gsi
, stmt
, GSI_SAME_STMT
);
3056 lb
= chkp_build_addr_expr (cst
);
3057 size
= build_int_cst (chkp_uintptr_type
, TREE_STRING_LENGTH (cst
));
3058 bounds
= chkp_make_bounds (lb
, size
, NULL
, false);
3061 bounds
= chkp_maybe_copy_and_register_bounds (cst
, bounds
);
3066 /* Generate code to instersect bounds BOUNDS1 and BOUNDS2 and
3067 return the result. if ITER is not NULL then Code is inserted
3068 before position pointed by ITER. Otherwise code is added to
3071 chkp_intersect_bounds (tree bounds1
, tree bounds2
, gimple_stmt_iterator
*iter
)
3073 if (!bounds1
|| bounds1
== chkp_get_zero_bounds ())
3074 return bounds2
? bounds2
: bounds1
;
3075 else if (!bounds2
|| bounds2
== chkp_get_zero_bounds ())
3085 stmt
= gimple_build_call (chkp_intersect_fndecl
, 2, bounds1
, bounds2
);
3086 chkp_mark_stmt (stmt
);
3088 bounds
= chkp_get_tmp_reg (stmt
);
3089 gimple_call_set_lhs (stmt
, bounds
);
3091 gimple_seq_add_stmt (&seq
, stmt
);
3093 /* We are probably doing narrowing for constant expression.
3094 In such case iter may be undefined. */
3097 gimple_stmt_iterator gsi
= gsi_last_bb (chkp_get_entry_block ());
3099 gsi_insert_seq_after (iter
, seq
, GSI_SAME_STMT
);
3102 gsi_insert_seq_before (iter
, seq
, GSI_SAME_STMT
);
3104 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3106 fprintf (dump_file
, "Bounds intersection: ");
3107 print_gimple_stmt (dump_file
, stmt
, 0, TDF_VOPS
|TDF_MEMSYMS
);
3108 fprintf (dump_file
, " inserted before statement: ");
3109 print_gimple_stmt (dump_file
, gsi_stmt (*iter
), 0,
3110 TDF_VOPS
|TDF_MEMSYMS
);
3117 /* Return 1 if we are allowed to narrow bounds for addressed FIELD
3120 chkp_may_narrow_to_field (tree field
)
3122 return DECL_SIZE (field
) && TREE_CODE (DECL_SIZE (field
)) == INTEGER_CST
3123 && tree_to_uhwi (DECL_SIZE (field
)) != 0
3124 && (!DECL_FIELD_OFFSET (field
)
3125 || TREE_CODE (DECL_FIELD_OFFSET (field
)) == INTEGER_CST
)
3126 && (!DECL_FIELD_BIT_OFFSET (field
)
3127 || TREE_CODE (DECL_FIELD_BIT_OFFSET (field
)) == INTEGER_CST
)
3128 && !lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field
))
3129 && !chkp_variable_size_type (TREE_TYPE (field
));
3132 /* Return 1 if bounds for FIELD should be narrowed to
3133 field's own size. */
3135 chkp_narrow_bounds_for_field (tree field
)
3138 HOST_WIDE_INT bit_offs
;
3140 if (!chkp_may_narrow_to_field (field
))
3143 /* Accesse to compiler generated fields should not cause
3144 bounds narrowing. */
3145 if (DECL_ARTIFICIAL (field
))
3148 offs
= tree_to_uhwi (DECL_FIELD_OFFSET (field
));
3149 bit_offs
= tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
));
3151 return (flag_chkp_narrow_bounds
3152 && (flag_chkp_first_field_has_own_bounds
3157 /* Perform narrowing for BOUNDS using bounds computed for field
3158 access COMPONENT. ITER meaning is the same as for
3159 chkp_intersect_bounds. */
3161 chkp_narrow_bounds_to_field (tree bounds
, tree component
,
3162 gimple_stmt_iterator
*iter
)
3164 tree field
= TREE_OPERAND (component
, 1);
3165 tree size
= DECL_SIZE_UNIT (field
);
3166 tree field_ptr
= chkp_build_addr_expr (component
);
3169 field_bounds
= chkp_make_bounds (field_ptr
, size
, iter
, false);
3171 return chkp_intersect_bounds (field_bounds
, bounds
, iter
);
3174 /* Parse field or array access NODE.
3176 PTR ouput parameter holds a pointer to the outermost
3179 BITFIELD output parameter is set to 1 if bitfield is
3180 accessed and to 0 otherwise. If it is 1 then ELT holds
3181 outer component for accessed bit field.
3183 SAFE outer parameter is set to 1 if access is safe and
3184 checks are not required.
3186 BOUNDS outer parameter holds bounds to be used to check
3187 access (may be NULL).
3189 If INNERMOST_BOUNDS is 1 then try to narrow bounds to the
3190 innermost accessed component. */
3192 chkp_parse_array_and_component_ref (tree node
, tree
*ptr
,
3193 tree
*elt
, bool *safe
,
3196 gimple_stmt_iterator
*iter
,
3197 bool innermost_bounds
)
3199 tree comp_to_narrow
= NULL_TREE
;
3200 tree last_comp
= NULL_TREE
;
3201 bool array_ref_found
= false;
3207 /* Compute tree height for expression. */
3210 while (TREE_CODE (var
) == COMPONENT_REF
3211 || TREE_CODE (var
) == ARRAY_REF
3212 || TREE_CODE (var
) == VIEW_CONVERT_EXPR
)
3214 var
= TREE_OPERAND (var
, 0);
3218 gcc_assert (len
> 1);
3220 /* It is more convenient for us to scan left-to-right,
3221 so walk tree again and put all node to nodes vector
3222 in reversed order. */
3223 nodes
= XALLOCAVEC (tree
, len
);
3224 nodes
[len
- 1] = node
;
3225 for (i
= len
- 2; i
>= 0; i
--)
3226 nodes
[i
] = TREE_OPERAND (nodes
[i
+ 1], 0);
3231 *bitfield
= (TREE_CODE (node
) == COMPONENT_REF
3232 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (node
, 1)));
3233 /* To get bitfield address we will need outer elemnt. */
3235 *elt
= nodes
[len
- 2];
3239 /* If we have indirection in expression then compute
3240 outermost structure bounds. Computed bounds may be
3242 if (TREE_CODE (nodes
[0]) == MEM_REF
|| INDIRECT_REF_P (nodes
[0]))
3245 *ptr
= TREE_OPERAND (nodes
[0], 0);
3247 *bounds
= chkp_find_bounds (*ptr
, iter
);
3251 gcc_assert (TREE_CODE (var
) == VAR_DECL
3252 || TREE_CODE (var
) == PARM_DECL
3253 || TREE_CODE (var
) == RESULT_DECL
3254 || TREE_CODE (var
) == STRING_CST
3255 || TREE_CODE (var
) == SSA_NAME
);
3257 *ptr
= chkp_build_addr_expr (var
);
3260 /* In this loop we are trying to find a field access
3261 requiring narrowing. There are two simple rules
3263 1. Leftmost array_ref is chosen if any.
3264 2. Rightmost suitable component_ref is chosen if innermost
3265 bounds are required and no array_ref exists. */
3266 for (i
= 1; i
< len
; i
++)
3270 if (TREE_CODE (var
) == ARRAY_REF
)
3273 array_ref_found
= true;
3274 if (flag_chkp_narrow_bounds
3275 && !flag_chkp_narrow_to_innermost_arrray
3277 || chkp_may_narrow_to_field (TREE_OPERAND (last_comp
, 1))))
3279 comp_to_narrow
= last_comp
;
3283 else if (TREE_CODE (var
) == COMPONENT_REF
)
3285 tree field
= TREE_OPERAND (var
, 1);
3287 if (innermost_bounds
3289 && chkp_narrow_bounds_for_field (field
))
3290 comp_to_narrow
= var
;
3293 if (flag_chkp_narrow_bounds
3294 && flag_chkp_narrow_to_innermost_arrray
3295 && TREE_CODE (TREE_TYPE (field
)) == ARRAY_TYPE
)
3298 *bounds
= chkp_narrow_bounds_to_field (*bounds
, var
, iter
);
3299 comp_to_narrow
= NULL
;
3302 else if (TREE_CODE (var
) == VIEW_CONVERT_EXPR
)
3303 /* Nothing to do for it. */
3309 if (comp_to_narrow
&& DECL_SIZE (TREE_OPERAND (comp_to_narrow
, 1)) && bounds
)
3310 *bounds
= chkp_narrow_bounds_to_field (*bounds
, comp_to_narrow
, iter
);
3312 if (innermost_bounds
&& bounds
&& !*bounds
)
3313 *bounds
= chkp_find_bounds (*ptr
, iter
);
3316 /* Compute and return bounds for address of OBJ. */
3318 chkp_make_addressed_object_bounds (tree obj
, gimple_stmt_iterator
*iter
)
3320 tree bounds
= chkp_get_registered_addr_bounds (obj
);
3325 switch (TREE_CODE (obj
))
3330 bounds
= chkp_get_bounds_for_decl_addr (obj
);
3334 bounds
= chkp_get_bounds_for_string_cst (obj
);
3345 chkp_parse_array_and_component_ref (obj
, &ptr
, &elt
, &safe
,
3346 &bitfield
, &bounds
, iter
, true);
3348 gcc_assert (bounds
);
3354 bounds
= chkp_get_zero_bounds ();
3358 bounds
= chkp_find_bounds (TREE_OPERAND (obj
, 0), iter
);
3363 bounds
= chkp_make_addressed_object_bounds (TREE_OPERAND (obj
, 0), iter
);
3367 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3369 fprintf (dump_file
, "chkp_make_addressed_object_bounds: "
3370 "unexpected object of type %s\n",
3371 get_tree_code_name (TREE_CODE (obj
)));
3372 print_node (dump_file
, "", obj
, 0);
3374 internal_error ("chkp_make_addressed_object_bounds: "
3375 "Unexpected tree code %s",
3376 get_tree_code_name (TREE_CODE (obj
)));
3379 chkp_register_addr_bounds (obj
, bounds
);
3384 /* Compute bounds for pointer PTR loaded from PTR_SRC. Generate statements
3385 to compute bounds if required. Computed bounds should be available at
3386 position pointed by ITER.
3388 If PTR_SRC is NULL_TREE then pointer definition is identified.
3390 If PTR_SRC is not NULL_TREE then ITER points to statements which loads
3391 PTR. If PTR is a any memory reference then ITER points to a statement
3392 after which bndldx will be inserterd. In both cases ITER will be updated
3393 to point to the inserted bndldx statement. */
3396 chkp_find_bounds_1 (tree ptr
, tree ptr_src
, gimple_stmt_iterator
*iter
)
3398 tree addr
= NULL_TREE
;
3399 tree bounds
= NULL_TREE
;
3404 bounds
= chkp_get_registered_bounds (ptr_src
);
3409 switch (TREE_CODE (ptr_src
))
3413 if (BOUNDED_P (ptr_src
))
3414 if (TREE_CODE (ptr
) == VAR_DECL
&& DECL_REGISTER (ptr
))
3415 bounds
= chkp_get_zero_bounds ();
3418 addr
= chkp_build_addr_expr (ptr_src
);
3419 bounds
= chkp_build_bndldx (addr
, ptr
, iter
);
3422 bounds
= chkp_get_nonpointer_load_bounds ();
3427 addr
= get_base_address (ptr_src
);
3429 || TREE_CODE (addr
) == MEM_REF
3430 || TREE_CODE (addr
) == TARGET_MEM_REF
)
3432 if (BOUNDED_P (ptr_src
))
3433 if (TREE_CODE (ptr
) == VAR_DECL
&& DECL_REGISTER (ptr
))
3434 bounds
= chkp_get_zero_bounds ();
3437 addr
= chkp_build_addr_expr (ptr_src
);
3438 bounds
= chkp_build_bndldx (addr
, ptr
, iter
);
3441 bounds
= chkp_get_nonpointer_load_bounds ();
3445 gcc_assert (TREE_CODE (addr
) == SSA_NAME
);
3446 bounds
= chkp_find_bounds (addr
, iter
);
3452 bounds
= chkp_get_bound_for_parm (ptr_src
);
3455 case TARGET_MEM_REF
:
3456 addr
= chkp_build_addr_expr (ptr_src
);
3457 bounds
= chkp_build_bndldx (addr
, ptr
, iter
);
3461 bounds
= chkp_get_registered_bounds (ptr_src
);
3464 gimple def_stmt
= SSA_NAME_DEF_STMT (ptr_src
);
3465 gphi_iterator phi_iter
;
3467 bounds
= chkp_get_bounds_by_definition (ptr_src
, def_stmt
, &phi_iter
);
3469 gcc_assert (bounds
);
3471 if (gphi
*def_phi
= dyn_cast
<gphi
*> (def_stmt
))
3475 for (i
= 0; i
< gimple_phi_num_args (def_phi
); i
++)
3477 tree arg
= gimple_phi_arg_def (def_phi
, i
);
3481 arg_bnd
= chkp_find_bounds (arg
, NULL
);
3483 /* chkp_get_bounds_by_definition created new phi
3484 statement and phi_iter points to it.
3486 Previous call to chkp_find_bounds could create
3487 new basic block and therefore change phi statement
3488 phi_iter points to. */
3489 phi_bnd
= phi_iter
.phi ();
3491 add_phi_arg (phi_bnd
, arg_bnd
,
3492 gimple_phi_arg_edge (def_phi
, i
),
3496 /* If all bound phi nodes have their arg computed
3497 then we may finish its computation. See
3498 chkp_finish_incomplete_bounds for more details. */
3499 if (chkp_may_finish_incomplete_bounds ())
3500 chkp_finish_incomplete_bounds ();
3503 gcc_assert (bounds
== chkp_get_registered_bounds (ptr_src
)
3504 || chkp_incomplete_bounds (bounds
));
3509 bounds
= chkp_make_addressed_object_bounds (TREE_OPERAND (ptr_src
, 0), iter
);
3513 if (integer_zerop (ptr_src
))
3514 bounds
= chkp_get_none_bounds ();
3516 bounds
= chkp_get_invalid_op_bounds ();
3520 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3522 fprintf (dump_file
, "chkp_find_bounds: unexpected ptr of type %s\n",
3523 get_tree_code_name (TREE_CODE (ptr_src
)));
3524 print_node (dump_file
, "", ptr_src
, 0);
3526 internal_error ("chkp_find_bounds: Unexpected tree code %s",
3527 get_tree_code_name (TREE_CODE (ptr_src
)));
3532 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3534 fprintf (stderr
, "chkp_find_bounds: cannot find bounds for pointer\n");
3535 print_node (dump_file
, "", ptr_src
, 0);
3537 internal_error ("chkp_find_bounds: Cannot find bounds for pointer");
3543 /* Normal case for bounds search without forced narrowing. */
3545 chkp_find_bounds (tree ptr
, gimple_stmt_iterator
*iter
)
3547 return chkp_find_bounds_1 (ptr
, NULL_TREE
, iter
);
3550 /* Search bounds for pointer PTR loaded from PTR_SRC
3551 by statement *ITER points to. */
3553 chkp_find_bounds_loaded (tree ptr
, tree ptr_src
, gimple_stmt_iterator
*iter
)
3555 return chkp_find_bounds_1 (ptr
, ptr_src
, iter
);
3558 /* Helper function which checks type of RHS and finds all pointers in
3559 it. For each found pointer we build it's accesses in LHS and RHS
3560 objects and then call HANDLER for them. Function is used to copy
3561 or initilize bounds for copied object. */
3563 chkp_walk_pointer_assignments (tree lhs
, tree rhs
, void *arg
,
3564 assign_handler handler
)
3566 tree type
= TREE_TYPE (lhs
);
3568 /* We have nothing to do with clobbers. */
3569 if (TREE_CLOBBER_P (rhs
))
3572 if (BOUNDED_TYPE_P (type
))
3573 handler (lhs
, rhs
, arg
);
3574 else if (RECORD_OR_UNION_TYPE_P (type
))
3578 if (TREE_CODE (rhs
) == CONSTRUCTOR
)
3580 unsigned HOST_WIDE_INT cnt
;
3583 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs
), cnt
, field
, val
)
3585 if (chkp_type_has_pointer (TREE_TYPE (field
)))
3587 tree lhs_field
= chkp_build_component_ref (lhs
, field
);
3588 chkp_walk_pointer_assignments (lhs_field
, val
, arg
, handler
);
3593 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
3594 if (TREE_CODE (field
) == FIELD_DECL
3595 && chkp_type_has_pointer (TREE_TYPE (field
)))
3597 tree rhs_field
= chkp_build_component_ref (rhs
, field
);
3598 tree lhs_field
= chkp_build_component_ref (lhs
, field
);
3599 chkp_walk_pointer_assignments (lhs_field
, rhs_field
, arg
, handler
);
3602 else if (TREE_CODE (type
) == ARRAY_TYPE
)
3604 unsigned HOST_WIDE_INT cur
= 0;
3605 tree maxval
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
3606 tree etype
= TREE_TYPE (type
);
3607 tree esize
= TYPE_SIZE (etype
);
3609 if (TREE_CODE (rhs
) == CONSTRUCTOR
)
3611 unsigned HOST_WIDE_INT cnt
;
3612 tree purp
, val
, lhs_elem
;
3614 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs
), cnt
, purp
, val
)
3616 if (purp
&& TREE_CODE (purp
) == RANGE_EXPR
)
3618 tree lo_index
= TREE_OPERAND (purp
, 0);
3619 tree hi_index
= TREE_OPERAND (purp
, 1);
3621 for (cur
= (unsigned)tree_to_uhwi (lo_index
);
3622 cur
<= (unsigned)tree_to_uhwi (hi_index
);
3625 lhs_elem
= chkp_build_array_ref (lhs
, etype
, esize
, cur
);
3626 chkp_walk_pointer_assignments (lhs_elem
, val
, arg
, handler
);
3633 gcc_assert (TREE_CODE (purp
) == INTEGER_CST
);
3634 cur
= tree_to_uhwi (purp
);
3637 lhs_elem
= chkp_build_array_ref (lhs
, etype
, esize
, cur
++);
3639 chkp_walk_pointer_assignments (lhs_elem
, val
, arg
, handler
);
3643 /* Copy array only when size is known. */
3644 else if (maxval
&& !integer_minus_onep (maxval
))
3645 for (cur
= 0; cur
<= TREE_INT_CST_LOW (maxval
); cur
++)
3647 tree lhs_elem
= chkp_build_array_ref (lhs
, etype
, esize
, cur
);
3648 tree rhs_elem
= chkp_build_array_ref (rhs
, etype
, esize
, cur
);
3649 chkp_walk_pointer_assignments (lhs_elem
, rhs_elem
, arg
, handler
);
3653 internal_error("chkp_walk_pointer_assignments: unexpected RHS type: %s",
3654 get_tree_code_name (TREE_CODE (type
)));
3657 /* Add code to copy bounds for assignment of RHS to LHS.
3658 ARG is an iterator pointing ne code position. */
3660 chkp_copy_bounds_for_elem (tree lhs
, tree rhs
, void *arg
)
3662 gimple_stmt_iterator
*iter
= (gimple_stmt_iterator
*)arg
;
3663 tree bounds
= chkp_find_bounds (rhs
, iter
);
3664 tree addr
= chkp_build_addr_expr(lhs
);
3666 chkp_build_bndstx (addr
, rhs
, bounds
, iter
);
3669 /* Emit static bound initilizers and size vars. */
3671 chkp_finish_file (void)
3673 struct varpool_node
*node
;
3674 struct chkp_ctor_stmt_list stmts
;
3679 /* Iterate through varpool and generate bounds initialization
3680 constructors for all statically initialized pointers. */
3681 stmts
.avail
= MAX_STMTS_IN_STATIC_CHKP_CTOR
;
3683 FOR_EACH_VARIABLE (node
)
3684 /* Check that var is actually emitted and we need and may initialize
3686 if (node
->need_bounds_init
3687 && !POINTER_BOUNDS_P (node
->decl
)
3688 && DECL_RTL (node
->decl
)
3689 && MEM_P (DECL_RTL (node
->decl
))
3690 && TREE_ASM_WRITTEN (node
->decl
))
3692 chkp_walk_pointer_assignments (node
->decl
,
3693 DECL_INITIAL (node
->decl
),
3695 chkp_add_modification_to_stmt_list
);
3697 if (stmts
.avail
<= 0)
3699 cgraph_build_static_cdtor ('P', stmts
.stmts
,
3700 MAX_RESERVED_INIT_PRIORITY
+ 3);
3701 stmts
.avail
= MAX_STMTS_IN_STATIC_CHKP_CTOR
;
3707 cgraph_build_static_cdtor ('P', stmts
.stmts
,
3708 MAX_RESERVED_INIT_PRIORITY
+ 3);
3710 /* Iterate through varpool and generate bounds initialization
3711 constructors for all static bounds vars. */
3712 stmts
.avail
= MAX_STMTS_IN_STATIC_CHKP_CTOR
;
3714 FOR_EACH_VARIABLE (node
)
3715 if (node
->need_bounds_init
3716 && POINTER_BOUNDS_P (node
->decl
)
3717 && TREE_ASM_WRITTEN (node
->decl
))
3719 tree bnd
= node
->decl
;
3722 gcc_assert (DECL_INITIAL (bnd
)
3723 && TREE_CODE (DECL_INITIAL (bnd
)) == ADDR_EXPR
);
3725 var
= TREE_OPERAND (DECL_INITIAL (bnd
), 0);
3726 chkp_output_static_bounds (bnd
, var
, &stmts
);
3730 cgraph_build_static_cdtor ('B', stmts
.stmts
,
3731 MAX_RESERVED_INIT_PRIORITY
+ 2);
3733 delete chkp_static_var_bounds
;
3734 delete chkp_bounds_map
;
3737 /* An instrumentation function which is called for each statement
3738 having memory access we want to instrument. It inserts check
3739 code and bounds copy code.
3741 ITER points to statement to instrument.
3743 NODE holds memory access in statement to check.
3745 LOC holds the location information for statement.
3747 DIRFLAGS determines whether access is read or write.
3749 ACCESS_OFFS should be added to address used in NODE
3752 ACCESS_SIZE holds size of checked access.
3754 SAFE indicates if NODE access is safe and should not be
3757 chkp_process_stmt (gimple_stmt_iterator
*iter
, tree node
,
3758 location_t loc
, tree dirflag
,
3759 tree access_offs
, tree access_size
,
3762 tree node_type
= TREE_TYPE (node
);
3763 tree size
= access_size
? access_size
: TYPE_SIZE_UNIT (node_type
);
3764 tree addr_first
= NULL_TREE
; /* address of the first accessed byte */
3765 tree addr_last
= NULL_TREE
; /* address of the last accessed byte */
3766 tree ptr
= NULL_TREE
; /* a pointer used for dereference */
3767 tree bounds
= NULL_TREE
;
3769 /* We do not need instrumentation for clobbers. */
3770 if (dirflag
== integer_one_node
3771 && gimple_code (gsi_stmt (*iter
)) == GIMPLE_ASSIGN
3772 && TREE_CLOBBER_P (gimple_assign_rhs1 (gsi_stmt (*iter
))))
3775 switch (TREE_CODE (node
))
3785 /* We are not going to generate any checks, so do not
3786 generate bounds as well. */
3787 addr_first
= chkp_build_addr_expr (node
);
3791 chkp_parse_array_and_component_ref (node
, &ptr
, &elt
, &safe
,
3792 &bitfield
, &bounds
, iter
, false);
3794 /* Break if there is no dereference and operation is safe. */
3798 tree field
= TREE_OPERAND (node
, 1);
3800 if (TREE_CODE (DECL_SIZE_UNIT (field
)) == INTEGER_CST
)
3801 size
= DECL_SIZE_UNIT (field
);
3804 elt
= chkp_build_addr_expr (elt
);
3805 addr_first
= fold_convert_loc (loc
, ptr_type_node
, elt
? elt
: ptr
);
3806 addr_first
= fold_build_pointer_plus_loc (loc
,
3808 byte_position (field
));
3811 addr_first
= chkp_build_addr_expr (node
);
3816 ptr
= TREE_OPERAND (node
, 0);
3821 ptr
= TREE_OPERAND (node
, 0);
3822 addr_first
= chkp_build_addr_expr (node
);
3825 case TARGET_MEM_REF
:
3826 ptr
= TMR_BASE (node
);
3827 addr_first
= chkp_build_addr_expr (node
);
3830 case ARRAY_RANGE_REF
:
3831 printf("ARRAY_RANGE_REF\n");
3832 debug_gimple_stmt(gsi_stmt(*iter
));
3839 tree offs
, rem
, bpu
;
3841 gcc_assert (!access_offs
);
3842 gcc_assert (!access_size
);
3844 bpu
= fold_convert (size_type_node
, bitsize_int (BITS_PER_UNIT
));
3845 offs
= fold_convert (size_type_node
, TREE_OPERAND (node
, 2));
3846 rem
= size_binop_loc (loc
, TRUNC_MOD_EXPR
, offs
, bpu
);
3847 offs
= size_binop_loc (loc
, TRUNC_DIV_EXPR
, offs
, bpu
);
3849 size
= fold_convert (size_type_node
, TREE_OPERAND (node
, 1));
3850 size
= size_binop_loc (loc
, PLUS_EXPR
, size
, rem
);
3851 size
= size_binop_loc (loc
, CEIL_DIV_EXPR
, size
, bpu
);
3852 size
= fold_convert (size_type_node
, size
);
3854 chkp_process_stmt (iter
, TREE_OPERAND (node
, 0), loc
,
3855 dirflag
, offs
, size
, safe
);
3863 if (dirflag
!= integer_one_node
3864 || DECL_REGISTER (node
))
3868 addr_first
= chkp_build_addr_expr (node
);
3875 /* If addr_last was not computed then use (addr_first + size - 1)
3876 expression to compute it. */
3879 addr_last
= fold_build_pointer_plus_loc (loc
, addr_first
, size
);
3880 addr_last
= fold_build_pointer_plus_hwi_loc (loc
, addr_last
, -1);
3883 /* Shift both first_addr and last_addr by access_offs if specified. */
3886 addr_first
= fold_build_pointer_plus_loc (loc
, addr_first
, access_offs
);
3887 addr_last
= fold_build_pointer_plus_loc (loc
, addr_last
, access_offs
);
3890 /* Generate bndcl/bndcu checks if memory access is not safe. */
3893 gimple_stmt_iterator stmt_iter
= *iter
;
3896 bounds
= chkp_find_bounds (ptr
, iter
);
3898 chkp_check_mem_access (addr_first
, addr_last
, bounds
,
3899 stmt_iter
, loc
, dirflag
);
3902 /* We need to store bounds in case pointer is stored. */
3903 if (dirflag
== integer_one_node
3904 && chkp_type_has_pointer (node_type
)
3905 && flag_chkp_store_bounds
)
3907 gimple stmt
= gsi_stmt (*iter
);
3908 tree rhs1
= gimple_assign_rhs1 (stmt
);
3909 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3911 if (get_gimple_rhs_class (rhs_code
) == GIMPLE_SINGLE_RHS
)
3912 chkp_walk_pointer_assignments (node
, rhs1
, iter
,
3913 chkp_copy_bounds_for_elem
);
3916 bounds
= chkp_compute_bounds_for_assignment (NULL_TREE
, stmt
);
3917 chkp_build_bndstx (addr_first
, rhs1
, bounds
, iter
);
3922 /* Add code to copy bounds for all pointers copied
3923 in ASSIGN created during inline of EDGE. */
3925 chkp_copy_bounds_for_assign (gimple assign
, struct cgraph_edge
*edge
)
3927 tree lhs
= gimple_assign_lhs (assign
);
3928 tree rhs
= gimple_assign_rhs1 (assign
);
3929 gimple_stmt_iterator iter
= gsi_for_stmt (assign
);
3931 if (!flag_chkp_store_bounds
)
3934 chkp_walk_pointer_assignments (lhs
, rhs
, &iter
, chkp_copy_bounds_for_elem
);
3936 /* We should create edges for all created calls to bndldx and bndstx. */
3937 while (gsi_stmt (iter
) != assign
)
3939 gimple stmt
= gsi_stmt (iter
);
3940 if (gimple_code (stmt
) == GIMPLE_CALL
)
3942 tree fndecl
= gimple_call_fndecl (stmt
);
3943 struct cgraph_node
*callee
= cgraph_node::get_create (fndecl
);
3944 struct cgraph_edge
*new_edge
;
3946 gcc_assert (fndecl
== chkp_bndstx_fndecl
3947 || fndecl
== chkp_bndldx_fndecl
3948 || fndecl
== chkp_ret_bnd_fndecl
);
3950 new_edge
= edge
->caller
->create_edge (callee
,
3951 as_a
<gcall
*> (stmt
),
3954 new_edge
->frequency
= compute_call_stmt_bb_frequency
3955 (edge
->caller
->decl
, gimple_bb (stmt
));
3961 /* Some code transformation made during instrumentation pass
3962 may put code into inconsistent state. Here we find and fix
3968 gimple_stmt_iterator i
;
3970 /* We could insert some code right after stmt which ends bb.
3971 We wanted to put this code on fallthru edge but did not
3972 add new edges from the beginning because it may cause new
3973 phi node creation which may be incorrect due to incomplete
3975 FOR_ALL_BB_FN (bb
, cfun
)
3976 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
3978 gimple stmt
= gsi_stmt (i
);
3979 gimple_stmt_iterator next
= i
;
3983 if (stmt_ends_bb_p (stmt
)
3984 && !gsi_end_p (next
))
3986 edge fall
= find_fallthru_edge (bb
->succs
);
3987 basic_block dest
= NULL
;
3992 /* We cannot split abnormal edge. Therefore we
3993 store its params, make it regular and then
3994 rebuild abnormal edge after split. */
3995 if (fall
->flags
& EDGE_ABNORMAL
)
3997 flags
= fall
->flags
& ~EDGE_FALLTHRU
;
4000 fall
->flags
&= ~EDGE_COMPLEX
;
4003 while (!gsi_end_p (next
))
4005 gimple next_stmt
= gsi_stmt (next
);
4006 gsi_remove (&next
, false);
4007 gsi_insert_on_edge (fall
, next_stmt
);
4010 gsi_commit_edge_inserts ();
4012 /* Re-create abnormal edge. */
4014 make_edge (bb
, dest
, flags
);
4019 /* Walker callback for chkp_replace_function_pointers. Replaces
4020 function pointer in the specified operand with pointer to the
4021 instrumented function version. */
4023 chkp_replace_function_pointer (tree
*op
, int *walk_subtrees
,
4024 void *data ATTRIBUTE_UNUSED
)
4026 if (TREE_CODE (*op
) == FUNCTION_DECL
4027 && !lookup_attribute ("bnd_legacy", DECL_ATTRIBUTES (*op
))
4028 && (DECL_BUILT_IN_CLASS (*op
) == NOT_BUILT_IN
4029 /* For builtins we replace pointers only for selected
4030 function and functions having definitions. */
4031 || (DECL_BUILT_IN_CLASS (*op
) == BUILT_IN_NORMAL
4032 && (chkp_instrument_normal_builtin (*op
)
4033 || gimple_has_body_p (*op
)))))
4035 struct cgraph_node
*node
= cgraph_node::get_create (*op
);
4036 struct cgraph_node
*clone
= NULL
;
4038 if (!node
->instrumentation_clone
)
4039 clone
= chkp_maybe_create_clone (*op
);
4049 /* This function searches for function pointers in statement
4050 pointed by GSI and replaces them with pointers to instrumented
4051 function versions. */
4053 chkp_replace_function_pointers (gimple_stmt_iterator
*gsi
)
4055 gimple stmt
= gsi_stmt (*gsi
);
4056 /* For calls we want to walk call args only. */
4057 if (gimple_code (stmt
) == GIMPLE_CALL
)
4060 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
4061 walk_tree (gimple_call_arg_ptr (stmt
, i
),
4062 chkp_replace_function_pointer
, NULL
, NULL
);
4065 walk_gimple_stmt (gsi
, NULL
, chkp_replace_function_pointer
, NULL
);
4068 /* This function instruments all statements working with memory,
4071 It also removes excess statements from static initializers. */
4073 chkp_instrument_function (void)
4075 basic_block bb
, next
;
4076 gimple_stmt_iterator i
;
4077 enum gimple_rhs_class grhs_class
;
4078 bool safe
= lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun
->decl
));
4080 bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->next_bb
;
4084 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); )
4086 gimple s
= gsi_stmt (i
);
4088 /* Skip statement marked to not be instrumented. */
4089 if (chkp_marked_stmt_p (s
))
4095 chkp_replace_function_pointers (&i
);
4097 switch (gimple_code (s
))
4100 chkp_process_stmt (&i
, gimple_assign_lhs (s
),
4101 gimple_location (s
), integer_one_node
,
4102 NULL_TREE
, NULL_TREE
, safe
);
4103 chkp_process_stmt (&i
, gimple_assign_rhs1 (s
),
4104 gimple_location (s
), integer_zero_node
,
4105 NULL_TREE
, NULL_TREE
, safe
);
4106 grhs_class
= get_gimple_rhs_class (gimple_assign_rhs_code (s
));
4107 if (grhs_class
== GIMPLE_BINARY_RHS
)
4108 chkp_process_stmt (&i
, gimple_assign_rhs2 (s
),
4109 gimple_location (s
), integer_zero_node
,
4110 NULL_TREE
, NULL_TREE
, safe
);
4115 greturn
*r
= as_a
<greturn
*> (s
);
4116 if (gimple_return_retval (r
) != NULL_TREE
)
4118 chkp_process_stmt (&i
, gimple_return_retval (r
),
4119 gimple_location (r
),
4121 NULL_TREE
, NULL_TREE
, safe
);
4123 /* Additionally we need to add bounds
4124 to return statement. */
4125 chkp_add_bounds_to_ret_stmt (&i
);
4131 chkp_add_bounds_to_call_stmt (&i
);
4140 /* We do not need any actual pointer stores in checker
4141 static initializer. */
4142 if (lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun
->decl
))
4143 && gimple_code (s
) == GIMPLE_ASSIGN
4144 && gimple_store_p (s
))
4146 gimple_stmt_iterator del_iter
= gsi_for_stmt (s
);
4147 gsi_remove (&del_iter
, true);
4148 unlink_stmt_vdef (s
);
4156 /* Some input params may have bounds and be address taken. In this case
4157 we should store incoming bounds into bounds table. */
4159 if (flag_chkp_store_bounds
)
4160 for (arg
= DECL_ARGUMENTS (cfun
->decl
); arg
; arg
= DECL_CHAIN (arg
))
4161 if (TREE_ADDRESSABLE (arg
))
4163 if (BOUNDED_P (arg
))
4165 tree bounds
= chkp_get_next_bounds_parm (arg
);
4166 tree def_ptr
= ssa_default_def (cfun
, arg
);
4167 gimple_stmt_iterator iter
4168 = gsi_start_bb (chkp_get_entry_block ());
4169 chkp_build_bndstx (chkp_build_addr_expr (arg
),
4170 def_ptr
? def_ptr
: arg
,
4173 /* Skip bounds arg. */
4174 arg
= TREE_CHAIN (arg
);
4176 else if (chkp_type_has_pointer (TREE_TYPE (arg
)))
4178 tree orig_arg
= arg
;
4179 bitmap slots
= BITMAP_ALLOC (NULL
);
4180 gimple_stmt_iterator iter
4181 = gsi_start_bb (chkp_get_entry_block ());
4185 chkp_find_bound_slots (TREE_TYPE (arg
), slots
);
4187 EXECUTE_IF_SET_IN_BITMAP (slots
, 0, bnd_no
, bi
)
4189 tree bounds
= chkp_get_next_bounds_parm (arg
);
4190 HOST_WIDE_INT offs
= bnd_no
* POINTER_SIZE
/ BITS_PER_UNIT
;
4191 tree addr
= chkp_build_addr_expr (orig_arg
);
4192 tree ptr
= build2 (MEM_REF
, ptr_type_node
, addr
,
4193 build_int_cst (ptr_type_node
, offs
));
4194 chkp_build_bndstx (chkp_build_addr_expr (ptr
), ptr
,
4197 arg
= DECL_CHAIN (arg
);
4199 BITMAP_FREE (slots
);
4204 /* Find init/null/copy_ptr_bounds calls and replace them
4205 with assignments. It should allow better code
4209 chkp_remove_useless_builtins ()
4212 gimple_stmt_iterator gsi
;
4214 FOR_EACH_BB_FN (bb
, cfun
)
4216 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4218 gimple stmt
= gsi_stmt (gsi
);
4220 enum built_in_function fcode
;
4222 /* Find builtins returning first arg and replace
4223 them with assignments. */
4224 if (gimple_code (stmt
) == GIMPLE_CALL
4225 && (fndecl
= gimple_call_fndecl (stmt
))
4226 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
4227 && (fcode
= DECL_FUNCTION_CODE (fndecl
))
4228 && (fcode
== BUILT_IN_CHKP_INIT_PTR_BOUNDS
4229 || fcode
== BUILT_IN_CHKP_NULL_PTR_BOUNDS
4230 || fcode
== BUILT_IN_CHKP_COPY_PTR_BOUNDS
4231 || fcode
== BUILT_IN_CHKP_SET_PTR_BOUNDS
))
4233 tree res
= gimple_call_arg (stmt
, 0);
4234 update_call_from_tree (&gsi
, res
);
4235 stmt
= gsi_stmt (gsi
);
4242 /* Initialize pass. */
4247 gimple_stmt_iterator i
;
4249 in_chkp_pass
= true;
4251 for (bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->next_bb
; bb
; bb
= bb
->next_bb
)
4252 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
4253 chkp_unmark_stmt (gsi_stmt (i
));
4255 chkp_invalid_bounds
= new hash_set
<tree
>;
4256 chkp_completed_bounds_set
= new hash_set
<tree
>;
4257 delete chkp_reg_bounds
;
4258 chkp_reg_bounds
= new hash_map
<tree
, tree
>;
4259 delete chkp_bound_vars
;
4260 chkp_bound_vars
= new hash_map
<tree
, tree
>;
4261 chkp_reg_addr_bounds
= new hash_map
<tree
, tree
>;
4262 chkp_incomplete_bounds_map
= new hash_map
<tree
, tree
>;
4263 delete chkp_bounds_map
;
4264 chkp_bounds_map
= new hash_map
<tree
, tree
>;
4265 chkp_abnormal_copies
= BITMAP_GGC_ALLOC ();
4268 zero_bounds
= NULL_TREE
;
4269 none_bounds
= NULL_TREE
;
4270 incomplete_bounds
= integer_zero_node
;
4271 tmp_var
= NULL_TREE
;
4272 size_tmp_var
= NULL_TREE
;
4274 chkp_uintptr_type
= lang_hooks
.types
.type_for_mode (ptr_mode
, true);
4276 /* We create these constant bounds once for each object file.
4277 These symbols go to comdat section and result in single copy
4278 of each one in the final binary. */
4279 chkp_get_zero_bounds_var ();
4280 chkp_get_none_bounds_var ();
4282 calculate_dominance_info (CDI_DOMINATORS
);
4283 calculate_dominance_info (CDI_POST_DOMINATORS
);
4285 bitmap_obstack_initialize (NULL
);
4288 /* Finalize instrumentation pass. */
4292 in_chkp_pass
= false;
4294 delete chkp_invalid_bounds
;
4295 delete chkp_completed_bounds_set
;
4296 delete chkp_reg_addr_bounds
;
4297 delete chkp_incomplete_bounds_map
;
4299 free_dominance_info (CDI_DOMINATORS
);
4300 free_dominance_info (CDI_POST_DOMINATORS
);
4302 bitmap_obstack_release (NULL
);
4305 /* Main instrumentation pass function. */
4311 chkp_instrument_function ();
4313 chkp_remove_useless_builtins ();
4315 chkp_function_mark_instrumented (cfun
->decl
);
4324 /* Instrumentation pass gate. */
4328 return cgraph_node::get (cfun
->decl
)->instrumentation_clone
4329 || lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun
->decl
));
4334 const pass_data pass_data_chkp
=
4336 GIMPLE_PASS
, /* type */
4338 OPTGROUP_NONE
, /* optinfo_flags */
4339 TV_NONE
, /* tv_id */
4340 PROP_ssa
| PROP_cfg
, /* properties_required */
4341 0, /* properties_provided */
4342 0, /* properties_destroyed */
4343 0, /* todo_flags_start */
4345 | TODO_update_ssa
/* todo_flags_finish */
4348 class pass_chkp
: public gimple_opt_pass
4351 pass_chkp (gcc::context
*ctxt
)
4352 : gimple_opt_pass (pass_data_chkp
, ctxt
)
4355 /* opt_pass methods: */
4356 virtual opt_pass
* clone ()
4358 return new pass_chkp (m_ctxt
);
4361 virtual bool gate (function
*)
4363 return chkp_gate ();
4366 virtual unsigned int execute (function
*)
4368 return chkp_execute ();
4371 }; // class pass_chkp
4376 make_pass_chkp (gcc::context
*ctxt
)
4378 return new pass_chkp (ctxt
);
4381 #include "gt-tree-chkp.h"