1 /* Pointer Bounds Checker insrumentation pass.
2 Copyright (C) 2014-2017 Free Software Foundation, Inc.
3 Contributed by Ilya Enkovich (ilya.enkovich@intel.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
30 #include "tree-pass.h"
33 #include "diagnostic.h"
34 #include "fold-const.h"
35 #include "stor-layout.h"
37 #include "tree-iterator.h"
39 #include "langhooks.h"
40 #include "tree-ssa-address.h"
41 #include "tree-ssa-loop-niter.h"
42 #include "gimple-pretty-print.h"
43 #include "gimple-iterator.h"
45 #include "gimplify-me.h"
46 #include "print-tree.h"
49 #include "tree-ssa-propagate.h"
50 #include "tree-chkp.h"
51 #include "gimple-walk.h"
56 /* Pointer Bounds Checker instruments code with memory checks to find
57 out-of-bounds memory accesses. Checks are performed by computing
58 bounds for each pointer and then comparing address of accessed
59 memory before pointer dereferencing.
67 There are few things to instrument:
69 a) Memory accesses - add checker calls to check address of accessed memory
70 against bounds of dereferenced pointer. Obviously safe memory
71 accesses like static variable access does not have to be instrumented
78 with 4 bytes access is transformed into:
80 __builtin___chkp_bndcl (__bound_tmp.1_3, p_1);
82 __builtin___chkp_bndcu (__bound_tmp.1_3, D.1_4);
85 where __bound_tmp.1_3 are bounds computed for pointer p_1,
86 __builtin___chkp_bndcl is a lower bound check and
87 __builtin___chkp_bndcu is an upper bound check.
91 When pointer is stored in memory we need to store its bounds. To
92 achieve compatibility of instrumented code with regular codes
93 we have to keep data layout and store bounds in special bound tables
94 via special checker call. Implementation of bounds table may vary for
95 different platforms. It has to associate pointer value and its
96 location (it is required because we may have two equal pointers
97 with different bounds stored in different places) with bounds.
98 Another checker builtin allows to get bounds for specified pointer
99 loaded from specified location.
109 __builtin___chkp_bndstx (D.1_2, &buf2, __bound_tmp.1_2);
111 where __bound_tmp.1_2 are bounds of &buf2.
113 c) Static initialization.
115 The special case of pointer store is static pointer initialization.
116 Bounds initialization is performed in a few steps:
117 - register all static initializations in front-end using
118 chkp_register_var_initializer
119 - when file compilation finishes we create functions with special
120 attribute 'chkp ctor' and put explicit initialization code
121 (assignments) for all statically initialized pointers.
122 - when checker constructor is compiled checker pass adds required
123 bounds initialization for all statically initialized pointers
124 - since we do not actually need excess pointers initialization
125 in checker constructor we remove such assignments from them
129 For each call in the code we add additional arguments to pass
130 bounds for pointer arguments. We determine type of call arguments
131 using arguments list from function declaration; if function
132 declaration is not available we use function type; otherwise
133 (e.g. for unnamed arguments) we use type of passed value. Function
134 declaration/type is replaced with the instrumented one.
138 val_1 = foo (&buf1, &buf2, &buf1, 0);
142 val_1 = foo.chkp (&buf1, __bound_tmp.1_2, &buf2, __bound_tmp.1_3,
143 &buf1, __bound_tmp.1_2, 0);
147 If function returns a pointer value we have to return bounds also.
148 A new operand was added for return statement to hold returned bounds.
156 return &_buf1, __bound_tmp.1_1;
158 3. Bounds computation.
160 Compiler is fully responsible for computing bounds to be used for each
161 memory access. The first step for bounds computation is to find the
162 origin of pointer dereferenced for memory access. Basing on pointer
163 origin we define a way to compute its bounds. There are just few
166 a) Pointer is returned by call.
168 In this case we use corresponding checker builtin method to obtain returned
173 buf_1 = malloc (size_2);
178 buf_1 = malloc (size_2);
179 __bound_tmp.1_3 = __builtin___chkp_bndret (buf_1);
180 foo (buf_1, __bound_tmp.1_3);
182 b) Pointer is an address of an object.
184 In this case compiler tries to compute objects size and create corresponding
185 bounds. If object has incomplete type then special checker builtin is used to
186 obtain its size at runtime.
192 <unnamed type> __bound_tmp.3;
196 __bound_tmp.3_2 = __builtin___chkp_bndmk (&buf, 400);
199 return &buf, __bound_tmp.3_2;
204 Address of an object 'extern int buf[]' with incomplete type is
209 <unnamed type> __bound_tmp.4;
210 long unsigned int __size_tmp.3;
213 __size_tmp.3_4 = __builtin_ia32_sizeof (buf);
214 __bound_tmp.4_3 = __builtin_ia32_bndmk (&buf, __size_tmp.3_4);
217 return &buf, __bound_tmp.4_3;
220 c) Pointer is the result of object narrowing.
222 It happens when we use pointer to an object to compute pointer to a part
223 of an object. E.g. we take pointer to a field of a structure. In this
224 case we perform bounds intersection using bounds of original object and
225 bounds of object's part (which are computed basing on its type).
227 There may be some debatable questions about when narrowing should occur
228 and when it should not. To avoid false bound violations in correct
229 programs we do not perform narrowing when address of an array element is
230 obtained (it has address of the whole array) and when address of the first
231 structure field is obtained (because it is guaranteed to be equal to
232 address of the whole structure and it is legal to cast it back to structure).
234 Default narrowing behavior may be changed using compiler flags.
238 In this example address of the second structure field is returned.
240 foo (struct A * p, __bounds_type __bounds_of_p)
242 <unnamed type> __bound_tmp.3;
247 _5 = &p_1(D)->second_field;
248 __bound_tmp.3_6 = __builtin___chkp_bndmk (_5, 4);
249 __bound_tmp.3_8 = __builtin___chkp_intersect (__bound_tmp.3_6,
251 _2 = &p_1(D)->second_field;
252 return _2, __bound_tmp.3_8;
257 In this example address of the first field of array element is returned.
259 foo (struct A * p, __bounds_type __bounds_of_p, int i)
261 long unsigned int _3;
262 long unsigned int _4;
267 _3 = (long unsigned int) i_1(D);
270 _7 = &_6->first_field;
271 return _7, __bounds_of_p_2(D);
275 d) Pointer is the result of pointer arithmetic or type cast.
277 In this case bounds of the base pointer are used. In case of binary
278 operation producing a pointer we are analyzing data flow further
279 looking for operand's bounds. One operand is considered as a base
280 if it has some valid bounds. If we fall into a case when none of
281 operands (or both of them) has valid bounds, a default bounds value
284 Trying to find out bounds for binary operations we may fall into
285 cyclic dependencies for pointers. To avoid infinite recursion all
286 walked phi nodes instantly obtain corresponding bounds but created
287 bounds are marked as incomplete. It helps us to stop DF walk during
290 When we reach pointer source, some args of incomplete bounds phi obtain
291 valid bounds and those values are propagated further through phi nodes.
292 If no valid bounds were found for phi node then we mark its result as
293 invalid bounds. Process stops when all incomplete bounds become either
294 valid or invalid and we are able to choose a pointer base.
296 e) Pointer is loaded from the memory.
298 In this case we just need to load bounds from the bounds table.
304 <unnamed type> __bound_tmp.3;
310 __bound_tmp.3_4 = __builtin___chkp_bndldx (&buf, _2);
311 return _2, __bound_tmp.3_4;
316 typedef void (*assign_handler
)(tree
, tree
, void *);
318 static tree
chkp_get_zero_bounds ();
319 static tree
chkp_find_bounds (tree ptr
, gimple_stmt_iterator
*iter
);
320 static tree
chkp_find_bounds_loaded (tree ptr
, tree ptr_src
,
321 gimple_stmt_iterator
*iter
);
322 static void chkp_parse_array_and_component_ref (tree node
, tree
*ptr
,
323 tree
*elt
, bool *safe
,
326 gimple_stmt_iterator
*iter
,
327 bool innermost_bounds
);
328 static void chkp_parse_bit_field_ref (tree node
, location_t loc
,
329 tree
*offset
, tree
*size
);
331 #define chkp_bndldx_fndecl \
332 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDLDX))
333 #define chkp_bndstx_fndecl \
334 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDSTX))
335 #define chkp_checkl_fndecl \
336 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCL))
337 #define chkp_checku_fndecl \
338 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCU))
339 #define chkp_bndmk_fndecl \
340 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDMK))
341 #define chkp_ret_bnd_fndecl \
342 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDRET))
343 #define chkp_intersect_fndecl \
344 (targetm.builtin_chkp_function (BUILT_IN_CHKP_INTERSECT))
345 #define chkp_narrow_bounds_fndecl \
346 (targetm.builtin_chkp_function (BUILT_IN_CHKP_NARROW))
347 #define chkp_sizeof_fndecl \
348 (targetm.builtin_chkp_function (BUILT_IN_CHKP_SIZEOF))
349 #define chkp_extract_lower_fndecl \
350 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_LOWER))
351 #define chkp_extract_upper_fndecl \
352 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_UPPER))
354 static GTY (()) tree chkp_uintptr_type
;
356 static GTY (()) tree chkp_zero_bounds_var
;
357 static GTY (()) tree chkp_none_bounds_var
;
359 static GTY (()) basic_block entry_block
;
360 static GTY (()) tree zero_bounds
;
361 static GTY (()) tree none_bounds
;
362 static GTY (()) tree incomplete_bounds
;
363 static GTY (()) tree tmp_var
;
364 static GTY (()) tree size_tmp_var
;
365 static GTY (()) bitmap chkp_abnormal_copies
;
367 struct hash_set
<tree
> *chkp_invalid_bounds
;
368 struct hash_set
<tree
> *chkp_completed_bounds_set
;
369 struct hash_map
<tree
, tree
> *chkp_reg_bounds
;
370 struct hash_map
<tree
, tree
> *chkp_bound_vars
;
371 struct hash_map
<tree
, tree
> *chkp_reg_addr_bounds
;
372 struct hash_map
<tree
, tree
> *chkp_incomplete_bounds_map
;
373 struct hash_map
<tree
, tree
> *chkp_bounds_map
;
374 struct hash_map
<tree
, tree
> *chkp_static_var_bounds
;
376 static bool in_chkp_pass
;
378 #define CHKP_BOUND_TMP_NAME "__bound_tmp"
379 #define CHKP_SIZE_TMP_NAME "__size_tmp"
380 #define CHKP_BOUNDS_OF_SYMBOL_PREFIX "__chkp_bounds_of_"
381 #define CHKP_STRING_BOUNDS_PREFIX "__chkp_string_bounds_"
382 #define CHKP_VAR_BOUNDS_PREFIX "__chkp_var_bounds_"
383 #define CHKP_ZERO_BOUNDS_VAR_NAME "__chkp_zero_bounds"
384 #define CHKP_NONE_BOUNDS_VAR_NAME "__chkp_none_bounds"
386 /* Static checker constructors may become very large and their
387 compilation with optimization may take too much time.
388 Therefore we put a limit to number of statements in one
389 constructor. Tests with 100 000 statically initialized
390 pointers showed following compilation times on Sandy Bridge
392 limit 100 => ~18 sec.
393 limit 300 => ~22 sec.
394 limit 1000 => ~30 sec.
395 limit 3000 => ~49 sec.
396 limit 5000 => ~55 sec.
397 limit 10000 => ~76 sec.
398 limit 100000 => ~532 sec. */
399 #define MAX_STMTS_IN_STATIC_CHKP_CTOR (PARAM_VALUE (PARAM_CHKP_MAX_CTOR_SIZE))
401 struct chkp_ctor_stmt_list
407 /* Return 1 if function FNDECL is instrumented by Pointer
410 chkp_function_instrumented_p (tree fndecl
)
413 && lookup_attribute ("chkp instrumented", DECL_ATTRIBUTES (fndecl
));
416 /* Mark function FNDECL as instrumented. */
418 chkp_function_mark_instrumented (tree fndecl
)
420 if (chkp_function_instrumented_p (fndecl
))
423 DECL_ATTRIBUTES (fndecl
)
424 = tree_cons (get_identifier ("chkp instrumented"), NULL
,
425 DECL_ATTRIBUTES (fndecl
));
428 /* Return true when STMT is builtin call to instrumentation function
429 corresponding to CODE. */
432 chkp_gimple_call_builtin_p (gimple
*call
,
433 enum built_in_function code
)
436 if (gimple_call_builtin_p (call
, BUILT_IN_MD
)
437 && (fndecl
= targetm
.builtin_chkp_function (code
))
438 && (DECL_FUNCTION_CODE (gimple_call_fndecl (call
))
439 == DECL_FUNCTION_CODE (fndecl
)))
444 /* Emit code to build zero bounds and return RTL holding
447 chkp_expand_zero_bounds ()
451 if (flag_chkp_use_static_const_bounds
)
452 zero_bnd
= chkp_get_zero_bounds_var ();
454 zero_bnd
= chkp_build_make_bounds_call (integer_zero_node
,
456 return expand_normal (zero_bnd
);
459 /* Emit code to store zero bounds for PTR located at MEM. */
461 chkp_expand_bounds_reset_for_mem (tree mem
, tree ptr
)
463 tree zero_bnd
, bnd
, addr
, bndstx
;
465 if (flag_chkp_use_static_const_bounds
)
466 zero_bnd
= chkp_get_zero_bounds_var ();
468 zero_bnd
= chkp_build_make_bounds_call (integer_zero_node
,
470 bnd
= make_tree (pointer_bounds_type_node
,
471 assign_temp (pointer_bounds_type_node
, 0, 1));
472 addr
= build1 (ADDR_EXPR
,
473 build_pointer_type (TREE_TYPE (mem
)), mem
);
474 bndstx
= chkp_build_bndstx_call (addr
, ptr
, bnd
);
476 expand_assignment (bnd
, zero_bnd
, false);
477 expand_normal (bndstx
);
480 /* Build retbnd call for returned value RETVAL.
482 If BNDVAL is not NULL then result is stored
483 in it. Otherwise a temporary is created to
486 GSI points to a position for a retbnd call
487 and is set to created stmt.
489 Cgraph edge is created for a new call if
492 Obtained bounds are returned. */
494 chkp_insert_retbnd_call (tree bndval
, tree retval
,
495 gimple_stmt_iterator
*gsi
)
500 bndval
= create_tmp_reg (pointer_bounds_type_node
, "retbnd");
502 call
= gimple_build_call (chkp_ret_bnd_fndecl
, 1, retval
);
503 gimple_call_set_lhs (call
, bndval
);
504 gsi_insert_after (gsi
, call
, GSI_CONTINUE_LINKING
);
509 /* Build a GIMPLE_CALL identical to CALL but skipping bounds
513 chkp_copy_call_skip_bounds (gcall
*call
)
518 bitmap_obstack_initialize (NULL
);
519 bounds
= BITMAP_ALLOC (NULL
);
521 for (i
= 0; i
< gimple_call_num_args (call
); i
++)
522 if (POINTER_BOUNDS_P (gimple_call_arg (call
, i
)))
523 bitmap_set_bit (bounds
, i
);
525 if (!bitmap_empty_p (bounds
))
526 call
= gimple_call_copy_skip_args (call
, bounds
);
527 gimple_call_set_with_bounds (call
, false);
529 BITMAP_FREE (bounds
);
530 bitmap_obstack_release (NULL
);
535 /* Redirect edge E to the correct node according to call_stmt.
536 Return 1 if bounds removal from call_stmt should be done
537 instead of redirection. */
540 chkp_redirect_edge (cgraph_edge
*e
)
542 bool instrumented
= false;
543 tree decl
= e
->callee
->decl
;
545 if (e
->callee
->instrumentation_clone
546 || chkp_function_instrumented_p (decl
))
550 && !gimple_call_with_bounds_p (e
->call_stmt
))
551 e
->redirect_callee (cgraph_node::get_create (e
->callee
->orig_decl
));
552 else if (!instrumented
553 && gimple_call_with_bounds_p (e
->call_stmt
)
554 && !chkp_gimple_call_builtin_p (e
->call_stmt
, BUILT_IN_CHKP_BNDCL
)
555 && !chkp_gimple_call_builtin_p (e
->call_stmt
, BUILT_IN_CHKP_BNDCU
)
556 && !chkp_gimple_call_builtin_p (e
->call_stmt
, BUILT_IN_CHKP_BNDSTX
))
558 if (e
->callee
->instrumented_version
)
559 e
->redirect_callee (e
->callee
->instrumented_version
);
562 tree args
= TYPE_ARG_TYPES (TREE_TYPE (decl
));
563 /* Avoid bounds removal if all args will be removed. */
564 if (!args
|| TREE_VALUE (args
) != void_type_node
)
567 gimple_call_set_with_bounds (e
->call_stmt
, false);
574 /* Mark statement S to not be instrumented. */
576 chkp_mark_stmt (gimple
*s
)
578 gimple_set_plf (s
, GF_PLF_1
, true);
581 /* Mark statement S to be instrumented. */
583 chkp_unmark_stmt (gimple
*s
)
585 gimple_set_plf (s
, GF_PLF_1
, false);
588 /* Return 1 if statement S should not be instrumented. */
590 chkp_marked_stmt_p (gimple
*s
)
592 return gimple_plf (s
, GF_PLF_1
);
595 /* Get var to be used for bound temps. */
597 chkp_get_tmp_var (void)
600 tmp_var
= create_tmp_reg (pointer_bounds_type_node
, CHKP_BOUND_TMP_NAME
);
605 /* Get SSA_NAME to be used as temp. */
607 chkp_get_tmp_reg (gimple
*stmt
)
610 return make_ssa_name (chkp_get_tmp_var (), stmt
);
612 return make_temp_ssa_name (pointer_bounds_type_node
, stmt
,
613 CHKP_BOUND_TMP_NAME
);
616 /* Get var to be used for size temps. */
618 chkp_get_size_tmp_var (void)
621 size_tmp_var
= create_tmp_reg (chkp_uintptr_type
, CHKP_SIZE_TMP_NAME
);
626 /* Register bounds BND for address of OBJ. */
628 chkp_register_addr_bounds (tree obj
, tree bnd
)
630 if (bnd
== incomplete_bounds
)
633 chkp_reg_addr_bounds
->put (obj
, bnd
);
635 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
637 fprintf (dump_file
, "Regsitered bound ");
638 print_generic_expr (dump_file
, bnd
, 0);
639 fprintf (dump_file
, " for address of ");
640 print_generic_expr (dump_file
, obj
, 0);
641 fprintf (dump_file
, "\n");
645 /* Return bounds registered for address of OBJ. */
647 chkp_get_registered_addr_bounds (tree obj
)
649 tree
*slot
= chkp_reg_addr_bounds
->get (obj
);
650 return slot
? *slot
: NULL_TREE
;
653 /* Mark BOUNDS as completed. */
655 chkp_mark_completed_bounds (tree bounds
)
657 chkp_completed_bounds_set
->add (bounds
);
659 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
661 fprintf (dump_file
, "Marked bounds ");
662 print_generic_expr (dump_file
, bounds
, 0);
663 fprintf (dump_file
, " as completed\n");
667 /* Return 1 if BOUNDS were marked as completed and 0 otherwise. */
669 chkp_completed_bounds (tree bounds
)
671 return chkp_completed_bounds_set
->contains (bounds
);
674 /* Clear comleted bound marks. */
676 chkp_erase_completed_bounds (void)
678 delete chkp_completed_bounds_set
;
679 chkp_completed_bounds_set
= new hash_set
<tree
>;
682 /* Mark BOUNDS associated with PTR as incomplete. */
684 chkp_register_incomplete_bounds (tree bounds
, tree ptr
)
686 chkp_incomplete_bounds_map
->put (bounds
, ptr
);
688 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
690 fprintf (dump_file
, "Regsitered incomplete bounds ");
691 print_generic_expr (dump_file
, bounds
, 0);
692 fprintf (dump_file
, " for ");
693 print_generic_expr (dump_file
, ptr
, 0);
694 fprintf (dump_file
, "\n");
698 /* Return 1 if BOUNDS are incomplete and 0 otherwise. */
700 chkp_incomplete_bounds (tree bounds
)
702 if (bounds
== incomplete_bounds
)
705 if (chkp_completed_bounds (bounds
))
708 return chkp_incomplete_bounds_map
->get (bounds
) != NULL
;
711 /* Clear incomleted bound marks. */
713 chkp_erase_incomplete_bounds (void)
715 delete chkp_incomplete_bounds_map
;
716 chkp_incomplete_bounds_map
= new hash_map
<tree
, tree
>;
719 /* Build and return bndmk call which creates bounds for structure
720 pointed by PTR. Structure should have complete type. */
722 chkp_make_bounds_for_struct_addr (tree ptr
)
724 tree type
= TREE_TYPE (ptr
);
727 gcc_assert (POINTER_TYPE_P (type
));
729 size
= TYPE_SIZE (TREE_TYPE (type
));
733 return build_call_nary (pointer_bounds_type_node
,
734 build_fold_addr_expr (chkp_bndmk_fndecl
),
738 /* Traversal function for chkp_may_finish_incomplete_bounds.
739 Set RES to 0 if at least one argument of phi statement
740 defining bounds (passed in KEY arg) is unknown.
741 Traversal stops when first unknown phi argument is found. */
743 chkp_may_complete_phi_bounds (tree
const &bounds
, tree
*slot ATTRIBUTE_UNUSED
,
749 gcc_assert (TREE_CODE (bounds
) == SSA_NAME
);
751 phi
= SSA_NAME_DEF_STMT (bounds
);
753 gcc_assert (phi
&& gimple_code (phi
) == GIMPLE_PHI
);
755 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
757 tree phi_arg
= gimple_phi_arg_def (phi
, i
);
761 /* Do not need to traverse further. */
769 /* Return 1 if all phi nodes created for bounds have their
770 arguments computed. */
772 chkp_may_finish_incomplete_bounds (void)
776 chkp_incomplete_bounds_map
777 ->traverse
<bool *, chkp_may_complete_phi_bounds
> (&res
);
782 /* Helper function for chkp_finish_incomplete_bounds.
783 Recompute args for bounds phi node. */
785 chkp_recompute_phi_bounds (tree
const &bounds
, tree
*slot
,
786 void *res ATTRIBUTE_UNUSED
)
793 gcc_assert (TREE_CODE (bounds
) == SSA_NAME
);
794 gcc_assert (TREE_CODE (ptr
) == SSA_NAME
);
796 bounds_phi
= as_a
<gphi
*> (SSA_NAME_DEF_STMT (bounds
));
797 ptr_phi
= as_a
<gphi
*> (SSA_NAME_DEF_STMT (ptr
));
799 for (i
= 0; i
< gimple_phi_num_args (bounds_phi
); i
++)
801 tree ptr_arg
= gimple_phi_arg_def (ptr_phi
, i
);
802 tree bound_arg
= chkp_find_bounds (ptr_arg
, NULL
);
804 add_phi_arg (bounds_phi
, bound_arg
,
805 gimple_phi_arg_edge (ptr_phi
, i
),
812 /* Mark BOUNDS as invalid. */
814 chkp_mark_invalid_bounds (tree bounds
)
816 chkp_invalid_bounds
->add (bounds
);
818 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
820 fprintf (dump_file
, "Marked bounds ");
821 print_generic_expr (dump_file
, bounds
, 0);
822 fprintf (dump_file
, " as invalid\n");
826 /* Return 1 if BOUNDS were marked as invalid and 0 otherwise. */
828 chkp_valid_bounds (tree bounds
)
830 if (bounds
== zero_bounds
|| bounds
== none_bounds
)
833 return !chkp_invalid_bounds
->contains (bounds
);
836 /* Helper function for chkp_finish_incomplete_bounds.
837 Check all arguments of phi nodes trying to find
838 valid completed bounds. If there is at least one
839 such arg then bounds produced by phi node are marked
840 as valid completed bounds and all phi args are
843 chkp_find_valid_phi_bounds (tree
const &bounds
, tree
*slot
, bool *res
)
848 gcc_assert (TREE_CODE (bounds
) == SSA_NAME
);
850 if (chkp_completed_bounds (bounds
))
853 phi
= SSA_NAME_DEF_STMT (bounds
);
855 gcc_assert (phi
&& gimple_code (phi
) == GIMPLE_PHI
);
857 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
859 tree phi_arg
= gimple_phi_arg_def (phi
, i
);
861 gcc_assert (phi_arg
);
863 if (chkp_valid_bounds (phi_arg
) && !chkp_incomplete_bounds (phi_arg
))
866 chkp_mark_completed_bounds (bounds
);
867 chkp_recompute_phi_bounds (bounds
, slot
, NULL
);
875 /* Helper function for chkp_finish_incomplete_bounds.
876 Marks all incompleted bounds as invalid. */
878 chkp_mark_invalid_bounds_walker (tree
const &bounds
,
879 tree
*slot ATTRIBUTE_UNUSED
,
880 void *res ATTRIBUTE_UNUSED
)
882 if (!chkp_completed_bounds (bounds
))
884 chkp_mark_invalid_bounds (bounds
);
885 chkp_mark_completed_bounds (bounds
);
890 /* When all bound phi nodes have all their args computed
891 we have enough info to find valid bounds. We iterate
892 through all incompleted bounds searching for valid
893 bounds. Found valid bounds are marked as completed
894 and all remaining incompleted bounds are recomputed.
895 Process continues until no new valid bounds may be
896 found. All remained incompleted bounds are marked as
897 invalid (i.e. have no valid source of bounds). */
899 chkp_finish_incomplete_bounds (void)
901 bool found_valid
= true;
907 chkp_incomplete_bounds_map
->
908 traverse
<bool *, chkp_find_valid_phi_bounds
> (&found_valid
);
911 chkp_incomplete_bounds_map
->
912 traverse
<void *, chkp_recompute_phi_bounds
> (NULL
);
915 chkp_incomplete_bounds_map
->
916 traverse
<void *, chkp_mark_invalid_bounds_walker
> (NULL
);
917 chkp_incomplete_bounds_map
->
918 traverse
<void *, chkp_recompute_phi_bounds
> (NULL
);
920 chkp_erase_completed_bounds ();
921 chkp_erase_incomplete_bounds ();
924 /* Return 1 if type TYPE is a pointer type or a
925 structure having a pointer type as one of its fields.
926 Otherwise return 0. */
928 chkp_type_has_pointer (const_tree type
)
932 if (BOUNDED_TYPE_P (type
))
934 else if (RECORD_OR_UNION_TYPE_P (type
))
938 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
939 if (TREE_CODE (field
) == FIELD_DECL
)
940 res
= res
|| chkp_type_has_pointer (TREE_TYPE (field
));
942 else if (TREE_CODE (type
) == ARRAY_TYPE
)
943 res
= chkp_type_has_pointer (TREE_TYPE (type
));
949 chkp_type_bounds_count (const_tree type
)
955 else if (BOUNDED_TYPE_P (type
))
957 else if (RECORD_OR_UNION_TYPE_P (type
))
961 bitmap_obstack_initialize (NULL
);
962 have_bound
= BITMAP_ALLOC (NULL
);
963 chkp_find_bound_slots (type
, have_bound
);
964 res
= bitmap_count_bits (have_bound
);
965 BITMAP_FREE (have_bound
);
966 bitmap_obstack_release (NULL
);
972 /* Get bounds associated with NODE via
973 chkp_set_bounds call. */
975 chkp_get_bounds (tree node
)
979 if (!chkp_bounds_map
)
982 slot
= chkp_bounds_map
->get (node
);
983 return slot
? *slot
: NULL_TREE
;
986 /* Associate bounds VAL with NODE. */
988 chkp_set_bounds (tree node
, tree val
)
990 if (!chkp_bounds_map
)
991 chkp_bounds_map
= new hash_map
<tree
, tree
>;
993 chkp_bounds_map
->put (node
, val
);
996 /* Check if statically initialized variable VAR require
997 static bounds initialization. If VAR is added into
998 bounds initlization list then 1 is returned. Otherwise
1001 chkp_register_var_initializer (tree var
)
1003 if (!flag_check_pointer_bounds
1004 || DECL_INITIAL (var
) == error_mark_node
)
1007 gcc_assert (VAR_P (var
));
1008 gcc_assert (DECL_INITIAL (var
));
1010 if (TREE_STATIC (var
)
1011 && chkp_type_has_pointer (TREE_TYPE (var
)))
1013 varpool_node::get_create (var
)->need_bounds_init
= 1;
1020 /* Helper function for chkp_finish_file.
1022 Add new modification statement (RHS is assigned to LHS)
1023 into list of static initializer statementes (passed in ARG).
1024 If statements list becomes too big, emit checker constructor
1025 and start the new one. */
1027 chkp_add_modification_to_stmt_list (tree lhs
,
1031 struct chkp_ctor_stmt_list
*stmts
= (struct chkp_ctor_stmt_list
*)arg
;
1034 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (rhs
)))
1035 rhs
= build1 (CONVERT_EXPR
, TREE_TYPE (lhs
), rhs
);
1037 modify
= build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, rhs
);
1038 append_to_statement_list (modify
, &stmts
->stmts
);
1043 /* Build and return ADDR_EXPR for specified object OBJ. */
1045 chkp_build_addr_expr (tree obj
)
1047 return TREE_CODE (obj
) == TARGET_MEM_REF
1048 ? tree_mem_ref_addr (ptr_type_node
, obj
)
1049 : build_fold_addr_expr (obj
);
1052 /* Helper function for chkp_finish_file.
1053 Initialize bound variable BND_VAR with bounds of variable
1054 VAR to statements list STMTS. If statements list becomes
1055 too big, emit checker constructor and start the new one. */
1057 chkp_output_static_bounds (tree bnd_var
, tree var
,
1058 struct chkp_ctor_stmt_list
*stmts
)
1062 if (TREE_CODE (var
) == STRING_CST
)
1064 lb
= build1 (CONVERT_EXPR
, size_type_node
, chkp_build_addr_expr (var
));
1065 size
= build_int_cst (size_type_node
, TREE_STRING_LENGTH (var
) - 1);
1067 else if (DECL_SIZE (var
)
1068 && !chkp_variable_size_type (TREE_TYPE (var
)))
1070 /* Compute bounds using statically known size. */
1071 lb
= build1 (CONVERT_EXPR
, size_type_node
, chkp_build_addr_expr (var
));
1072 size
= size_binop (MINUS_EXPR
, DECL_SIZE_UNIT (var
), size_one_node
);
1076 /* Compute bounds using dynamic size. */
1079 lb
= build1 (CONVERT_EXPR
, size_type_node
, chkp_build_addr_expr (var
));
1080 call
= build1 (ADDR_EXPR
,
1081 build_pointer_type (TREE_TYPE (chkp_sizeof_fndecl
)),
1082 chkp_sizeof_fndecl
);
1083 size
= build_call_nary (TREE_TYPE (TREE_TYPE (chkp_sizeof_fndecl
)),
1086 if (flag_chkp_zero_dynamic_size_as_infinite
)
1088 tree max_size
, cond
;
1090 max_size
= build2 (MINUS_EXPR
, size_type_node
, size_zero_node
, lb
);
1091 cond
= build2 (NE_EXPR
, boolean_type_node
, size
, size_zero_node
);
1092 size
= build3 (COND_EXPR
, size_type_node
, cond
, size
, max_size
);
1095 size
= size_binop (MINUS_EXPR
, size
, size_one_node
);
1098 ub
= size_binop (PLUS_EXPR
, lb
, size
);
1099 stmts
->avail
-= targetm
.chkp_initialize_bounds (bnd_var
, lb
, ub
,
1101 if (stmts
->avail
<= 0)
1103 cgraph_build_static_cdtor ('B', stmts
->stmts
,
1104 MAX_RESERVED_INIT_PRIORITY
+ 2);
1105 stmts
->avail
= MAX_STMTS_IN_STATIC_CHKP_CTOR
;
1106 stmts
->stmts
= NULL
;
1110 /* Return entry block to be used for checker initilization code.
1111 Create new block if required. */
1113 chkp_get_entry_block (void)
1117 = split_block_after_labels (ENTRY_BLOCK_PTR_FOR_FN (cfun
))->dest
;
1122 /* Return a bounds var to be used for pointer var PTR_VAR. */
1124 chkp_get_bounds_var (tree ptr_var
)
1129 slot
= chkp_bound_vars
->get (ptr_var
);
1134 bnd_var
= create_tmp_reg (pointer_bounds_type_node
,
1135 CHKP_BOUND_TMP_NAME
);
1136 chkp_bound_vars
->put (ptr_var
, bnd_var
);
1142 /* If BND is an abnormal bounds copy, return a copied value.
1143 Otherwise return BND. */
1145 chkp_get_orginal_bounds_for_abnormal_copy (tree bnd
)
1147 if (bitmap_bit_p (chkp_abnormal_copies
, SSA_NAME_VERSION (bnd
)))
1149 gimple
*bnd_def
= SSA_NAME_DEF_STMT (bnd
);
1150 gcc_checking_assert (gimple_code (bnd_def
) == GIMPLE_ASSIGN
);
1151 bnd
= gimple_assign_rhs1 (bnd_def
);
1157 /* Register bounds BND for object PTR in global bounds table.
1158 A copy of bounds may be created for abnormal ssa names.
1159 Returns bounds to use for PTR. */
1161 chkp_maybe_copy_and_register_bounds (tree ptr
, tree bnd
)
1165 if (!chkp_reg_bounds
)
1168 /* Do nothing if bounds are incomplete_bounds
1169 because it means bounds will be recomputed. */
1170 if (bnd
== incomplete_bounds
)
1173 abnormal_ptr
= (TREE_CODE (ptr
) == SSA_NAME
1174 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr
)
1175 && gimple_code (SSA_NAME_DEF_STMT (ptr
)) != GIMPLE_PHI
);
1177 /* A single bounds value may be reused multiple times for
1178 different pointer values. It may cause coalescing issues
1179 for abnormal SSA names. To avoid it we create a bounds
1180 copy in case it is computed for abnormal SSA name.
1182 We also cannot reuse such created copies for other pointers */
1184 || bitmap_bit_p (chkp_abnormal_copies
, SSA_NAME_VERSION (bnd
)))
1186 tree bnd_var
= NULL_TREE
;
1190 if (SSA_NAME_VAR (ptr
))
1191 bnd_var
= chkp_get_bounds_var (SSA_NAME_VAR (ptr
));
1194 bnd_var
= chkp_get_tmp_var ();
1196 /* For abnormal copies we may just find original
1197 bounds and use them. */
1198 if (!abnormal_ptr
&& !SSA_NAME_IS_DEFAULT_DEF (bnd
))
1199 bnd
= chkp_get_orginal_bounds_for_abnormal_copy (bnd
);
1200 /* For undefined values we usually use none bounds
1201 value but in case of abnormal edge it may cause
1202 coalescing failures. Use default definition of
1203 bounds variable instead to avoid it. */
1204 else if (SSA_NAME_IS_DEFAULT_DEF (ptr
)
1205 && TREE_CODE (SSA_NAME_VAR (ptr
)) != PARM_DECL
)
1207 bnd
= get_or_create_ssa_default_def (cfun
, bnd_var
);
1209 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1211 fprintf (dump_file
, "Using default def bounds ");
1212 print_generic_expr (dump_file
, bnd
, 0);
1213 fprintf (dump_file
, " for abnormal default def SSA name ");
1214 print_generic_expr (dump_file
, ptr
, 0);
1215 fprintf (dump_file
, "\n");
1221 gimple
*def
= SSA_NAME_DEF_STMT (ptr
);
1223 gimple_stmt_iterator gsi
;
1226 copy
= make_ssa_name (bnd_var
);
1228 copy
= make_temp_ssa_name (pointer_bounds_type_node
,
1230 CHKP_BOUND_TMP_NAME
);
1231 bnd
= chkp_get_orginal_bounds_for_abnormal_copy (bnd
);
1232 assign
= gimple_build_assign (copy
, bnd
);
1234 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1236 fprintf (dump_file
, "Creating a copy of bounds ");
1237 print_generic_expr (dump_file
, bnd
, 0);
1238 fprintf (dump_file
, " for abnormal SSA name ");
1239 print_generic_expr (dump_file
, ptr
, 0);
1240 fprintf (dump_file
, "\n");
1243 if (gimple_code (def
) == GIMPLE_NOP
)
1245 gsi
= gsi_last_bb (chkp_get_entry_block ());
1246 if (!gsi_end_p (gsi
) && is_ctrl_stmt (gsi_stmt (gsi
)))
1247 gsi_insert_before (&gsi
, assign
, GSI_CONTINUE_LINKING
);
1249 gsi_insert_after (&gsi
, assign
, GSI_CONTINUE_LINKING
);
1253 gimple
*bnd_def
= SSA_NAME_DEF_STMT (bnd
);
1254 /* Sometimes (e.g. when we load a pointer from a
1255 memory) bounds are produced later than a pointer.
1256 We need to insert bounds copy appropriately. */
1257 if (gimple_code (bnd_def
) != GIMPLE_NOP
1258 && stmt_dominates_stmt_p (def
, bnd_def
))
1259 gsi
= gsi_for_stmt (bnd_def
);
1261 gsi
= gsi_for_stmt (def
);
1262 gsi_insert_after (&gsi
, assign
, GSI_CONTINUE_LINKING
);
1269 bitmap_set_bit (chkp_abnormal_copies
, SSA_NAME_VERSION (bnd
));
1272 chkp_reg_bounds
->put (ptr
, bnd
);
1274 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1276 fprintf (dump_file
, "Regsitered bound ");
1277 print_generic_expr (dump_file
, bnd
, 0);
1278 fprintf (dump_file
, " for pointer ");
1279 print_generic_expr (dump_file
, ptr
, 0);
1280 fprintf (dump_file
, "\n");
1286 /* Get bounds registered for object PTR in global bounds table. */
1288 chkp_get_registered_bounds (tree ptr
)
1292 if (!chkp_reg_bounds
)
1295 slot
= chkp_reg_bounds
->get (ptr
);
1296 return slot
? *slot
: NULL_TREE
;
1299 /* Add bound retvals to return statement pointed by GSI. */
1302 chkp_add_bounds_to_ret_stmt (gimple_stmt_iterator
*gsi
)
1304 greturn
*ret
= as_a
<greturn
*> (gsi_stmt (*gsi
));
1305 tree retval
= gimple_return_retval (ret
);
1306 tree ret_decl
= DECL_RESULT (cfun
->decl
);
1312 if (BOUNDED_P (ret_decl
))
1314 bounds
= chkp_find_bounds (retval
, gsi
);
1315 bounds
= chkp_maybe_copy_and_register_bounds (ret_decl
, bounds
);
1316 gimple_return_set_retbnd (ret
, bounds
);
1322 /* Force OP to be suitable for using as an argument for call.
1323 New statements (if any) go to SEQ. */
1325 chkp_force_gimple_call_op (tree op
, gimple_seq
*seq
)
1328 gimple_stmt_iterator si
;
1330 op
= force_gimple_operand (unshare_expr (op
), &stmts
, true, NULL_TREE
);
1332 for (si
= gsi_start (stmts
); !gsi_end_p (si
); gsi_next (&si
))
1333 chkp_mark_stmt (gsi_stmt (si
));
1335 gimple_seq_add_seq (seq
, stmts
);
1340 /* Generate lower bound check for memory access by ADDR.
1341 Check is inserted before the position pointed by ITER.
1342 DIRFLAG indicates whether memory access is load or store. */
1344 chkp_check_lower (tree addr
, tree bounds
,
1345 gimple_stmt_iterator iter
,
1346 location_t location
,
1353 if (!chkp_function_instrumented_p (current_function_decl
)
1354 && bounds
== chkp_get_zero_bounds ())
1357 if (dirflag
== integer_zero_node
1358 && !flag_chkp_check_read
)
1361 if (dirflag
== integer_one_node
1362 && !flag_chkp_check_write
)
1367 node
= chkp_force_gimple_call_op (addr
, &seq
);
1369 check
= gimple_build_call (chkp_checkl_fndecl
, 2, node
, bounds
);
1370 chkp_mark_stmt (check
);
1371 gimple_call_set_with_bounds (check
, true);
1372 gimple_set_location (check
, location
);
1373 gimple_seq_add_stmt (&seq
, check
);
1375 gsi_insert_seq_before (&iter
, seq
, GSI_SAME_STMT
);
1377 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1379 gimple
*before
= gsi_stmt (iter
);
1380 fprintf (dump_file
, "Generated lower bound check for statement ");
1381 print_gimple_stmt (dump_file
, before
, 0, TDF_VOPS
|TDF_MEMSYMS
);
1382 fprintf (dump_file
, " ");
1383 print_gimple_stmt (dump_file
, check
, 0, TDF_VOPS
|TDF_MEMSYMS
);
1387 /* Generate upper bound check for memory access by ADDR.
1388 Check is inserted before the position pointed by ITER.
1389 DIRFLAG indicates whether memory access is load or store. */
1391 chkp_check_upper (tree addr
, tree bounds
,
1392 gimple_stmt_iterator iter
,
1393 location_t location
,
1400 if (!chkp_function_instrumented_p (current_function_decl
)
1401 && bounds
== chkp_get_zero_bounds ())
1404 if (dirflag
== integer_zero_node
1405 && !flag_chkp_check_read
)
1408 if (dirflag
== integer_one_node
1409 && !flag_chkp_check_write
)
1414 node
= chkp_force_gimple_call_op (addr
, &seq
);
1416 check
= gimple_build_call (chkp_checku_fndecl
, 2, node
, bounds
);
1417 chkp_mark_stmt (check
);
1418 gimple_call_set_with_bounds (check
, true);
1419 gimple_set_location (check
, location
);
1420 gimple_seq_add_stmt (&seq
, check
);
1422 gsi_insert_seq_before (&iter
, seq
, GSI_SAME_STMT
);
1424 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1426 gimple
*before
= gsi_stmt (iter
);
1427 fprintf (dump_file
, "Generated upper bound check for statement ");
1428 print_gimple_stmt (dump_file
, before
, 0, TDF_VOPS
|TDF_MEMSYMS
);
1429 fprintf (dump_file
, " ");
1430 print_gimple_stmt (dump_file
, check
, 0, TDF_VOPS
|TDF_MEMSYMS
);
1434 /* Generate lower and upper bound checks for memory access
1435 to memory slot [FIRST, LAST] againsr BOUNDS. Checks
1436 are inserted before the position pointed by ITER.
1437 DIRFLAG indicates whether memory access is load or store. */
1439 chkp_check_mem_access (tree first
, tree last
, tree bounds
,
1440 gimple_stmt_iterator iter
,
1441 location_t location
,
1444 chkp_check_lower (first
, bounds
, iter
, location
, dirflag
);
1445 chkp_check_upper (last
, bounds
, iter
, location
, dirflag
);
1448 /* Replace call to _bnd_chk_* pointed by GSI with
1449 bndcu and bndcl calls. DIRFLAG determines whether
1450 check is for read or write. */
1453 chkp_replace_address_check_builtin (gimple_stmt_iterator
*gsi
,
1456 gimple_stmt_iterator call_iter
= *gsi
;
1457 gimple
*call
= gsi_stmt (*gsi
);
1458 tree fndecl
= gimple_call_fndecl (call
);
1459 tree addr
= gimple_call_arg (call
, 0);
1460 tree bounds
= chkp_find_bounds (addr
, gsi
);
1462 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1463 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS
)
1464 chkp_check_lower (addr
, bounds
, *gsi
, gimple_location (call
), dirflag
);
1466 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
)
1467 chkp_check_upper (addr
, bounds
, *gsi
, gimple_location (call
), dirflag
);
1469 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS
)
1471 tree size
= gimple_call_arg (call
, 1);
1472 addr
= fold_build_pointer_plus (addr
, size
);
1473 addr
= fold_build_pointer_plus_hwi (addr
, -1);
1474 chkp_check_upper (addr
, bounds
, *gsi
, gimple_location (call
), dirflag
);
1477 gsi_remove (&call_iter
, true);
1480 /* Replace call to _bnd_get_ptr_* pointed by GSI with
1481 corresponding bounds extract call. */
1484 chkp_replace_extract_builtin (gimple_stmt_iterator
*gsi
)
1486 gimple
*call
= gsi_stmt (*gsi
);
1487 tree fndecl
= gimple_call_fndecl (call
);
1488 tree addr
= gimple_call_arg (call
, 0);
1489 tree bounds
= chkp_find_bounds (addr
, gsi
);
1492 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_GET_PTR_LBOUND
)
1493 fndecl
= chkp_extract_lower_fndecl
;
1494 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_GET_PTR_UBOUND
)
1495 fndecl
= chkp_extract_upper_fndecl
;
1499 extract
= gimple_build_call (fndecl
, 1, bounds
);
1500 gimple_call_set_lhs (extract
, gimple_call_lhs (call
));
1501 chkp_mark_stmt (extract
);
1503 gsi_replace (gsi
, extract
, false);
1506 /* Return COMPONENT_REF accessing FIELD in OBJ. */
1508 chkp_build_component_ref (tree obj
, tree field
)
1512 /* If object is TMR then we do not use component_ref but
1513 add offset instead. We need it to be able to get addr
1514 of the reasult later. */
1515 if (TREE_CODE (obj
) == TARGET_MEM_REF
)
1517 tree offs
= TMR_OFFSET (obj
);
1518 offs
= fold_binary_to_constant (PLUS_EXPR
, TREE_TYPE (offs
),
1519 offs
, DECL_FIELD_OFFSET (field
));
1523 res
= copy_node (obj
);
1524 TREE_TYPE (res
) = TREE_TYPE (field
);
1525 TMR_OFFSET (res
) = offs
;
1528 res
= build3 (COMPONENT_REF
, TREE_TYPE (field
), obj
, field
, NULL_TREE
);
1533 /* Return ARRAY_REF for array ARR and index IDX with
1534 specified element type ETYPE and element size ESIZE. */
1536 chkp_build_array_ref (tree arr
, tree etype
, tree esize
,
1537 unsigned HOST_WIDE_INT idx
)
1539 tree index
= build_int_cst (size_type_node
, idx
);
1542 /* If object is TMR then we do not use array_ref but
1543 add offset instead. We need it to be able to get addr
1544 of the reasult later. */
1545 if (TREE_CODE (arr
) == TARGET_MEM_REF
)
1547 tree offs
= TMR_OFFSET (arr
);
1549 esize
= fold_binary_to_constant (MULT_EXPR
, TREE_TYPE (esize
),
1553 offs
= fold_binary_to_constant (PLUS_EXPR
, TREE_TYPE (offs
),
1557 res
= copy_node (arr
);
1558 TREE_TYPE (res
) = etype
;
1559 TMR_OFFSET (res
) = offs
;
1562 res
= build4 (ARRAY_REF
, etype
, arr
, index
, NULL_TREE
, NULL_TREE
);
1567 /* Helper function for chkp_add_bounds_to_call_stmt.
1568 Fill ALL_BOUNDS output array with created bounds.
1570 OFFS is used for recursive calls and holds basic
1571 offset of TYPE in outer structure in bits.
1573 ITER points a position where bounds are searched.
1575 ALL_BOUNDS[i] is filled with elem bounds if there
1576 is a field in TYPE which has pointer type and offset
1577 equal to i * POINTER_SIZE in bits. */
1579 chkp_find_bounds_for_elem (tree elem
, tree
*all_bounds
,
1581 gimple_stmt_iterator
*iter
)
1583 tree type
= TREE_TYPE (elem
);
1585 if (BOUNDED_TYPE_P (type
))
1587 if (!all_bounds
[offs
/ POINTER_SIZE
])
1589 tree temp
= make_temp_ssa_name (type
, NULL
, "");
1590 gimple
*assign
= gimple_build_assign (temp
, elem
);
1591 gimple_stmt_iterator gsi
;
1593 gsi_insert_before (iter
, assign
, GSI_SAME_STMT
);
1594 gsi
= gsi_for_stmt (assign
);
1596 all_bounds
[offs
/ POINTER_SIZE
] = chkp_find_bounds (temp
, &gsi
);
1599 else if (RECORD_OR_UNION_TYPE_P (type
))
1603 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
1604 if (TREE_CODE (field
) == FIELD_DECL
)
1606 tree base
= unshare_expr (elem
);
1607 tree field_ref
= chkp_build_component_ref (base
, field
);
1608 HOST_WIDE_INT field_offs
1609 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field
));
1610 if (DECL_FIELD_OFFSET (field
))
1611 field_offs
+= TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field
)) * 8;
1613 chkp_find_bounds_for_elem (field_ref
, all_bounds
,
1614 offs
+ field_offs
, iter
);
1617 else if (TREE_CODE (type
) == ARRAY_TYPE
)
1619 tree maxval
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
1620 tree etype
= TREE_TYPE (type
);
1621 HOST_WIDE_INT esize
= TREE_INT_CST_LOW (TYPE_SIZE (etype
));
1622 unsigned HOST_WIDE_INT cur
;
1624 if (!maxval
|| integer_minus_onep (maxval
))
1627 for (cur
= 0; cur
<= TREE_INT_CST_LOW (maxval
); cur
++)
1629 tree base
= unshare_expr (elem
);
1630 tree arr_elem
= chkp_build_array_ref (base
, etype
,
1633 chkp_find_bounds_for_elem (arr_elem
, all_bounds
, offs
+ cur
* esize
,
1639 /* Fill HAVE_BOUND output bitmap with information about
1640 bounds requred for object of type TYPE.
1642 OFFS is used for recursive calls and holds basic
1643 offset of TYPE in outer structure in bits.
1645 HAVE_BOUND[i] is set to 1 if there is a field
1646 in TYPE which has pointer type and offset
1647 equal to i * POINTER_SIZE - OFFS in bits. */
1649 chkp_find_bound_slots_1 (const_tree type
, bitmap have_bound
,
1652 if (BOUNDED_TYPE_P (type
))
1653 bitmap_set_bit (have_bound
, offs
/ POINTER_SIZE
);
1654 else if (RECORD_OR_UNION_TYPE_P (type
))
1658 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
1659 if (TREE_CODE (field
) == FIELD_DECL
)
1661 HOST_WIDE_INT field_offs
= 0;
1662 if (DECL_FIELD_BIT_OFFSET (field
))
1663 field_offs
+= TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field
));
1664 if (DECL_FIELD_OFFSET (field
))
1665 field_offs
+= TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field
)) * 8;
1666 chkp_find_bound_slots_1 (TREE_TYPE (field
), have_bound
,
1670 else if (TREE_CODE (type
) == ARRAY_TYPE
&& TYPE_DOMAIN (type
))
1672 /* The object type is an array of complete type, i.e., other
1673 than a flexible array. */
1674 tree maxval
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
1675 tree etype
= TREE_TYPE (type
);
1676 HOST_WIDE_INT esize
= TREE_INT_CST_LOW (TYPE_SIZE (etype
));
1677 unsigned HOST_WIDE_INT cur
;
1680 || TREE_CODE (maxval
) != INTEGER_CST
1681 || integer_minus_onep (maxval
))
1684 for (cur
= 0; cur
<= TREE_INT_CST_LOW (maxval
); cur
++)
1685 chkp_find_bound_slots_1 (etype
, have_bound
, offs
+ cur
* esize
);
1689 /* Fill bitmap RES with information about bounds for
1690 type TYPE. See chkp_find_bound_slots_1 for more
1693 chkp_find_bound_slots (const_tree type
, bitmap res
)
1696 chkp_find_bound_slots_1 (type
, res
, 0);
1699 /* Return 1 if call to FNDECL should be instrumented
1703 chkp_instrument_normal_builtin (tree fndecl
)
1705 switch (DECL_FUNCTION_CODE (fndecl
))
1707 case BUILT_IN_STRLEN
:
1708 case BUILT_IN_STRCPY
:
1709 case BUILT_IN_STRNCPY
:
1710 case BUILT_IN_STPCPY
:
1711 case BUILT_IN_STPNCPY
:
1712 case BUILT_IN_STRCAT
:
1713 case BUILT_IN_STRNCAT
:
1714 case BUILT_IN_MEMCPY
:
1715 case BUILT_IN_MEMPCPY
:
1716 case BUILT_IN_MEMSET
:
1717 case BUILT_IN_MEMMOVE
:
1718 case BUILT_IN_BZERO
:
1719 case BUILT_IN_STRCMP
:
1720 case BUILT_IN_STRNCMP
:
1722 case BUILT_IN_MEMCMP
:
1723 case BUILT_IN_MEMCPY_CHK
:
1724 case BUILT_IN_MEMPCPY_CHK
:
1725 case BUILT_IN_MEMMOVE_CHK
:
1726 case BUILT_IN_MEMSET_CHK
:
1727 case BUILT_IN_STRCPY_CHK
:
1728 case BUILT_IN_STRNCPY_CHK
:
1729 case BUILT_IN_STPCPY_CHK
:
1730 case BUILT_IN_STPNCPY_CHK
:
1731 case BUILT_IN_STRCAT_CHK
:
1732 case BUILT_IN_STRNCAT_CHK
:
1733 case BUILT_IN_MALLOC
:
1734 case BUILT_IN_CALLOC
:
1735 case BUILT_IN_REALLOC
:
1743 /* Add bound arguments to call statement pointed by GSI.
1744 Also performs a replacement of user checker builtins calls
1745 with internal ones. */
1748 chkp_add_bounds_to_call_stmt (gimple_stmt_iterator
*gsi
)
1750 gcall
*call
= as_a
<gcall
*> (gsi_stmt (*gsi
));
1751 unsigned arg_no
= 0;
1752 tree fndecl
= gimple_call_fndecl (call
);
1754 tree first_formal_arg
;
1756 bool use_fntype
= false;
1761 /* Do nothing for internal functions. */
1762 if (gimple_call_internal_p (call
))
1765 fntype
= TREE_TYPE (TREE_TYPE (gimple_call_fn (call
)));
1767 /* Do nothing if back-end builtin is called. */
1768 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
1771 /* Do nothing for some middle-end builtins. */
1772 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
1773 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_OBJECT_SIZE
)
1776 /* Do nothing for calls to not instrumentable functions. */
1777 if (fndecl
&& !chkp_instrumentable_p (fndecl
))
1780 /* Ignore CHKP_INIT_PTR_BOUNDS, CHKP_NULL_PTR_BOUNDS
1781 and CHKP_COPY_PTR_BOUNDS. */
1782 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
1783 && (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_INIT_PTR_BOUNDS
1784 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_NULL_PTR_BOUNDS
1785 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_COPY_PTR_BOUNDS
1786 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_SET_PTR_BOUNDS
))
1789 /* Check user builtins are replaced with checks. */
1790 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
1791 && (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1792 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
1793 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS
))
1795 chkp_replace_address_check_builtin (gsi
, integer_minus_one_node
);
1799 /* Check user builtins are replaced with bound extract. */
1800 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
1801 && (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_GET_PTR_LBOUND
1802 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_GET_PTR_UBOUND
))
1804 chkp_replace_extract_builtin (gsi
);
1808 /* BUILT_IN_CHKP_NARROW_PTR_BOUNDS call is replaced with
1809 target narrow bounds call. */
1810 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
1811 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
)
1813 tree arg
= gimple_call_arg (call
, 1);
1814 tree bounds
= chkp_find_bounds (arg
, gsi
);
1816 gimple_call_set_fndecl (call
, chkp_narrow_bounds_fndecl
);
1817 gimple_call_set_arg (call
, 1, bounds
);
1823 /* BUILT_IN_CHKP_STORE_PTR_BOUNDS call is replaced with
1825 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
1826 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_STORE_PTR_BOUNDS
)
1828 tree addr
= gimple_call_arg (call
, 0);
1829 tree ptr
= gimple_call_arg (call
, 1);
1830 tree bounds
= chkp_find_bounds (ptr
, gsi
);
1831 gimple_stmt_iterator iter
= gsi_for_stmt (call
);
1833 chkp_build_bndstx (addr
, ptr
, bounds
, gsi
);
1834 gsi_remove (&iter
, true);
1839 if (!flag_chkp_instrument_calls
)
1842 /* We instrument only some subset of builtins. We also instrument
1843 builtin calls to be inlined. */
1845 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
1846 && !chkp_instrument_normal_builtin (fndecl
))
1848 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl
)))
1851 struct cgraph_node
*clone
= chkp_maybe_create_clone (fndecl
);
1853 || !gimple_has_body_p (clone
->decl
))
1857 /* If function decl is available then use it for
1858 formal arguments list. Otherwise use function type. */
1860 && DECL_ARGUMENTS (fndecl
)
1861 && gimple_call_fntype (call
) == TREE_TYPE (fndecl
))
1862 first_formal_arg
= DECL_ARGUMENTS (fndecl
);
1865 first_formal_arg
= TYPE_ARG_TYPES (fntype
);
1869 /* Fill vector of new call args. */
1870 vec
<tree
> new_args
= vNULL
;
1871 new_args
.create (gimple_call_num_args (call
));
1872 arg
= first_formal_arg
;
1873 for (arg_no
= 0; arg_no
< gimple_call_num_args (call
); arg_no
++)
1875 tree call_arg
= gimple_call_arg (call
, arg_no
);
1878 /* Get arg type using formal argument description
1879 or actual argument type. */
1882 if (TREE_VALUE (arg
) != void_type_node
)
1884 type
= TREE_VALUE (arg
);
1885 arg
= TREE_CHAIN (arg
);
1888 type
= TREE_TYPE (call_arg
);
1891 type
= TREE_TYPE (arg
);
1892 arg
= TREE_CHAIN (arg
);
1895 type
= TREE_TYPE (call_arg
);
1897 new_args
.safe_push (call_arg
);
1899 if (BOUNDED_TYPE_P (type
)
1900 || pass_by_reference (NULL
, TYPE_MODE (type
), type
, true))
1901 new_args
.safe_push (chkp_find_bounds (call_arg
, gsi
));
1902 else if (chkp_type_has_pointer (type
))
1904 HOST_WIDE_INT max_bounds
1905 = TREE_INT_CST_LOW (TYPE_SIZE (type
)) / POINTER_SIZE
;
1906 tree
*all_bounds
= (tree
*)xmalloc (sizeof (tree
) * max_bounds
);
1907 HOST_WIDE_INT bnd_no
;
1909 memset (all_bounds
, 0, sizeof (tree
) * max_bounds
);
1911 chkp_find_bounds_for_elem (call_arg
, all_bounds
, 0, gsi
);
1913 for (bnd_no
= 0; bnd_no
< max_bounds
; bnd_no
++)
1914 if (all_bounds
[bnd_no
])
1915 new_args
.safe_push (all_bounds
[bnd_no
]);
1921 if (new_args
.length () == gimple_call_num_args (call
))
1925 new_call
= gimple_build_call_vec (gimple_op (call
, 1), new_args
);
1926 gimple_call_set_lhs (new_call
, gimple_call_lhs (call
));
1927 gimple_call_copy_flags (new_call
, call
);
1928 gimple_call_set_chain (new_call
, gimple_call_chain (call
));
1930 new_args
.release ();
1932 /* For direct calls fndecl is replaced with instrumented version. */
1935 tree new_decl
= chkp_maybe_create_clone (fndecl
)->decl
;
1936 gimple_call_set_fndecl (new_call
, new_decl
);
1937 /* In case of a type cast we should modify used function
1938 type instead of using type of new fndecl. */
1939 if (gimple_call_fntype (call
) != TREE_TYPE (fndecl
))
1941 tree type
= gimple_call_fntype (call
);
1942 type
= chkp_copy_function_type_adding_bounds (type
);
1943 gimple_call_set_fntype (new_call
, type
);
1946 gimple_call_set_fntype (new_call
, TREE_TYPE (new_decl
));
1948 /* For indirect call we should fix function pointer type if
1949 pass some bounds. */
1950 else if (new_call
!= call
)
1952 tree type
= gimple_call_fntype (call
);
1953 type
= chkp_copy_function_type_adding_bounds (type
);
1954 gimple_call_set_fntype (new_call
, type
);
1957 /* replace old call statement with the new one. */
1958 if (call
!= new_call
)
1960 FOR_EACH_SSA_TREE_OPERAND (op
, call
, iter
, SSA_OP_ALL_DEFS
)
1962 SSA_NAME_DEF_STMT (op
) = new_call
;
1964 gsi_replace (gsi
, new_call
, true);
1967 update_stmt (new_call
);
1969 gimple_call_set_with_bounds (new_call
, true);
1972 /* Return constant static bounds var with specified bounds LB and UB.
1973 If such var does not exists then new var is created with specified NAME. */
1975 chkp_make_static_const_bounds (HOST_WIDE_INT lb
,
1979 tree id
= get_identifier (name
);
1984 var
= build_decl (UNKNOWN_LOCATION
, VAR_DECL
, id
,
1985 pointer_bounds_type_node
);
1986 TREE_STATIC (var
) = 1;
1987 TREE_PUBLIC (var
) = 1;
1989 /* With LTO we may have constant bounds already in varpool.
1991 if ((snode
= symtab_node::get_for_asmname (DECL_ASSEMBLER_NAME (var
))))
1993 /* We don't allow this symbol usage for non bounds. */
1994 if (snode
->type
!= SYMTAB_VARIABLE
1995 || !POINTER_BOUNDS_P (snode
->decl
))
1996 sorry ("-fcheck-pointer-bounds requires %qs "
1997 "name for internal usage",
1998 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (var
)));
2003 TREE_USED (var
) = 1;
2004 TREE_READONLY (var
) = 1;
2005 TREE_ADDRESSABLE (var
) = 0;
2006 DECL_ARTIFICIAL (var
) = 1;
2007 DECL_READ_P (var
) = 1;
2008 DECL_INITIAL (var
) = targetm
.chkp_make_bounds_constant (lb
, ub
);
2009 make_decl_one_only (var
, DECL_ASSEMBLER_NAME (var
));
2010 /* We may use this symbol during ctors generation in chkp_finish_file
2011 when all symbols are emitted. Force output to avoid undefined
2012 symbols in ctors. */
2013 node
= varpool_node::get_create (var
);
2014 node
->force_output
= 1;
2016 varpool_node::finalize_decl (var
);
2021 /* Generate code to make bounds with specified lower bound LB and SIZE.
2022 if AFTER is 1 then code is inserted after position pointed by ITER
2023 otherwise code is inserted before position pointed by ITER.
2024 If ITER is NULL then code is added to entry block. */
2026 chkp_make_bounds (tree lb
, tree size
, gimple_stmt_iterator
*iter
, bool after
)
2029 gimple_stmt_iterator gsi
;
2036 gsi
= gsi_start_bb (chkp_get_entry_block ());
2040 lb
= chkp_force_gimple_call_op (lb
, &seq
);
2041 size
= chkp_force_gimple_call_op (size
, &seq
);
2043 stmt
= gimple_build_call (chkp_bndmk_fndecl
, 2, lb
, size
);
2044 chkp_mark_stmt (stmt
);
2046 bounds
= chkp_get_tmp_reg (stmt
);
2047 gimple_call_set_lhs (stmt
, bounds
);
2049 gimple_seq_add_stmt (&seq
, stmt
);
2052 gsi_insert_seq_after (&gsi
, seq
, GSI_SAME_STMT
);
2054 gsi_insert_seq_before (&gsi
, seq
, GSI_SAME_STMT
);
2056 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2058 fprintf (dump_file
, "Made bounds: ");
2059 print_gimple_stmt (dump_file
, stmt
, 0, TDF_VOPS
|TDF_MEMSYMS
);
2062 fprintf (dump_file
, " inserted before statement: ");
2063 print_gimple_stmt (dump_file
, gsi_stmt (*iter
), 0, TDF_VOPS
|TDF_MEMSYMS
);
2066 fprintf (dump_file
, " at function entry\n");
2069 /* update_stmt (stmt); */
2074 /* Return var holding zero bounds. */
2076 chkp_get_zero_bounds_var (void)
2078 if (!chkp_zero_bounds_var
)
2079 chkp_zero_bounds_var
2080 = chkp_make_static_const_bounds (0, -1,
2081 CHKP_ZERO_BOUNDS_VAR_NAME
);
2082 return chkp_zero_bounds_var
;
2085 /* Return var holding none bounds. */
2087 chkp_get_none_bounds_var (void)
2089 if (!chkp_none_bounds_var
)
2090 chkp_none_bounds_var
2091 = chkp_make_static_const_bounds (-1, 0,
2092 CHKP_NONE_BOUNDS_VAR_NAME
);
2093 return chkp_none_bounds_var
;
2096 /* Return SSA_NAME used to represent zero bounds. */
2098 chkp_get_zero_bounds (void)
2103 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2104 fprintf (dump_file
, "Creating zero bounds...");
2106 if ((flag_chkp_use_static_bounds
&& flag_chkp_use_static_const_bounds
)
2107 || flag_chkp_use_static_const_bounds
> 0)
2109 gimple_stmt_iterator gsi
= gsi_start_bb (chkp_get_entry_block ());
2112 zero_bounds
= chkp_get_tmp_reg (NULL
);
2113 stmt
= gimple_build_assign (zero_bounds
, chkp_get_zero_bounds_var ());
2114 gsi_insert_before (&gsi
, stmt
, GSI_SAME_STMT
);
2117 zero_bounds
= chkp_make_bounds (integer_zero_node
,
2125 /* Return SSA_NAME used to represent none bounds. */
2127 chkp_get_none_bounds (void)
2132 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2133 fprintf (dump_file
, "Creating none bounds...");
2136 if ((flag_chkp_use_static_bounds
&& flag_chkp_use_static_const_bounds
)
2137 || flag_chkp_use_static_const_bounds
> 0)
2139 gimple_stmt_iterator gsi
= gsi_start_bb (chkp_get_entry_block ());
2142 none_bounds
= chkp_get_tmp_reg (NULL
);
2143 stmt
= gimple_build_assign (none_bounds
, chkp_get_none_bounds_var ());
2144 gsi_insert_before (&gsi
, stmt
, GSI_SAME_STMT
);
2147 none_bounds
= chkp_make_bounds (integer_minus_one_node
,
2148 build_int_cst (size_type_node
, 2),
2155 /* Return bounds to be used as a result of operation which
2156 should not create poiunter (e.g. MULT_EXPR). */
2158 chkp_get_invalid_op_bounds (void)
2160 return chkp_get_zero_bounds ();
2163 /* Return bounds to be used for loads of non-pointer values. */
2165 chkp_get_nonpointer_load_bounds (void)
2167 return chkp_get_zero_bounds ();
2170 /* Return 1 if may use bndret call to get bounds for pointer
2171 returned by CALL. */
2173 chkp_call_returns_bounds_p (gcall
*call
)
2175 if (gimple_call_internal_p (call
))
2177 if (gimple_call_internal_fn (call
) == IFN_VA_ARG
)
2182 if (gimple_call_builtin_p (call
, BUILT_IN_CHKP_NARROW_PTR_BOUNDS
)
2183 || chkp_gimple_call_builtin_p (call
, BUILT_IN_CHKP_NARROW
))
2186 if (gimple_call_with_bounds_p (call
))
2189 tree fndecl
= gimple_call_fndecl (call
);
2191 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
2194 if (fndecl
&& !chkp_instrumentable_p (fndecl
))
2197 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
2199 if (chkp_instrument_normal_builtin (fndecl
))
2202 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl
)))
2205 struct cgraph_node
*clone
= chkp_maybe_create_clone (fndecl
);
2206 return (clone
&& gimple_has_body_p (clone
->decl
));
2212 /* Build bounds returned by CALL. */
2214 chkp_build_returned_bound (gcall
*call
)
2216 gimple_stmt_iterator gsi
;
2219 tree fndecl
= gimple_call_fndecl (call
);
2220 unsigned int retflags
;
2222 /* To avoid fixing alloca expands in targets we handle
2225 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
2226 && (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_ALLOCA
2227 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_ALLOCA_WITH_ALIGN
))
2229 tree size
= gimple_call_arg (call
, 0);
2230 tree lb
= gimple_call_lhs (call
);
2231 gimple_stmt_iterator iter
= gsi_for_stmt (call
);
2232 bounds
= chkp_make_bounds (lb
, size
, &iter
, true);
2234 /* We know bounds returned by set_bounds builtin call. */
2236 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
2237 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_SET_PTR_BOUNDS
)
2239 tree lb
= gimple_call_arg (call
, 0);
2240 tree size
= gimple_call_arg (call
, 1);
2241 gimple_stmt_iterator iter
= gsi_for_stmt (call
);
2242 bounds
= chkp_make_bounds (lb
, size
, &iter
, true);
2244 /* Detect bounds initialization calls. */
2246 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
2247 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_INIT_PTR_BOUNDS
)
2248 bounds
= chkp_get_zero_bounds ();
2249 /* Detect bounds nullification calls. */
2251 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
2252 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_NULL_PTR_BOUNDS
)
2253 bounds
= chkp_get_none_bounds ();
2254 /* Detect bounds copy calls. */
2256 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
2257 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_COPY_PTR_BOUNDS
)
2259 gimple_stmt_iterator iter
= gsi_for_stmt (call
);
2260 bounds
= chkp_find_bounds (gimple_call_arg (call
, 1), &iter
);
2262 /* Do not use retbnd when returned bounds are equal to some
2263 of passed bounds. */
2264 else if (((retflags
= gimple_call_return_flags (call
)) & ERF_RETURNS_ARG
)
2265 && (retflags
& ERF_RETURN_ARG_MASK
) < gimple_call_num_args (call
))
2267 gimple_stmt_iterator iter
= gsi_for_stmt (call
);
2268 unsigned int retarg
= retflags
& ERF_RETURN_ARG_MASK
, argno
;
2269 if (gimple_call_with_bounds_p (call
))
2271 for (argno
= 0; argno
< gimple_call_num_args (call
); argno
++)
2272 if (!POINTER_BOUNDS_P (gimple_call_arg (call
, argno
)))
2283 bounds
= chkp_find_bounds (gimple_call_arg (call
, argno
), &iter
);
2285 else if (chkp_call_returns_bounds_p (call
))
2287 gcc_assert (TREE_CODE (gimple_call_lhs (call
)) == SSA_NAME
);
2289 /* In general case build checker builtin call to
2290 obtain returned bounds. */
2291 stmt
= gimple_build_call (chkp_ret_bnd_fndecl
, 1,
2292 gimple_call_lhs (call
));
2293 chkp_mark_stmt (stmt
);
2295 gsi
= gsi_for_stmt (call
);
2296 gsi_insert_after (&gsi
, stmt
, GSI_SAME_STMT
);
2298 bounds
= chkp_get_tmp_reg (stmt
);
2299 gimple_call_set_lhs (stmt
, bounds
);
2304 bounds
= chkp_get_zero_bounds ();
2306 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2308 fprintf (dump_file
, "Built returned bounds (");
2309 print_generic_expr (dump_file
, bounds
, 0);
2310 fprintf (dump_file
, ") for call: ");
2311 print_gimple_stmt (dump_file
, call
, 0, TDF_VOPS
|TDF_MEMSYMS
);
2314 bounds
= chkp_maybe_copy_and_register_bounds (gimple_call_lhs (call
), bounds
);
2319 /* Return bounds used as returned by call
2320 which produced SSA name VAL. */
2322 chkp_retbnd_call_by_val (tree val
)
2324 if (TREE_CODE (val
) != SSA_NAME
)
2327 gcc_assert (gimple_code (SSA_NAME_DEF_STMT (val
)) == GIMPLE_CALL
);
2329 imm_use_iterator use_iter
;
2330 use_operand_p use_p
;
2331 FOR_EACH_IMM_USE_FAST (use_p
, use_iter
, val
)
2332 if (chkp_gimple_call_builtin_p (USE_STMT (use_p
), BUILT_IN_CHKP_BNDRET
))
2333 return as_a
<gcall
*> (USE_STMT (use_p
));
2338 /* Check the next parameter for the given PARM is bounds
2339 and return it's default SSA_NAME (create if required). */
2341 chkp_get_next_bounds_parm (tree parm
)
2343 tree bounds
= TREE_CHAIN (parm
);
2344 gcc_assert (POINTER_BOUNDS_P (bounds
));
2345 bounds
= ssa_default_def (cfun
, bounds
);
2348 bounds
= make_ssa_name (TREE_CHAIN (parm
), gimple_build_nop ());
2349 set_ssa_default_def (cfun
, TREE_CHAIN (parm
), bounds
);
2354 /* Return bounds to be used for input argument PARM. */
2356 chkp_get_bound_for_parm (tree parm
)
2358 tree decl
= SSA_NAME_VAR (parm
);
2361 gcc_assert (TREE_CODE (decl
) == PARM_DECL
);
2363 bounds
= chkp_get_registered_bounds (parm
);
2366 bounds
= chkp_get_registered_bounds (decl
);
2370 tree orig_decl
= cgraph_node::get (cfun
->decl
)->orig_decl
;
2372 /* For static chain param we return zero bounds
2373 because currently we do not check dereferences
2375 if (cfun
->static_chain_decl
== decl
)
2376 bounds
= chkp_get_zero_bounds ();
2377 /* If non instrumented runtime is used then it may be useful
2378 to use zero bounds for input arguments of main
2380 else if (flag_chkp_zero_input_bounds_for_main
2381 && strcmp (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (orig_decl
)),
2383 bounds
= chkp_get_zero_bounds ();
2384 else if (BOUNDED_P (parm
))
2386 bounds
= chkp_get_next_bounds_parm (decl
);
2387 bounds
= chkp_maybe_copy_and_register_bounds (decl
, bounds
);
2389 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2391 fprintf (dump_file
, "Built arg bounds (");
2392 print_generic_expr (dump_file
, bounds
, 0);
2393 fprintf (dump_file
, ") for arg: ");
2394 print_node (dump_file
, "", decl
, 0);
2398 bounds
= chkp_get_zero_bounds ();
2401 if (!chkp_get_registered_bounds (parm
))
2402 bounds
= chkp_maybe_copy_and_register_bounds (parm
, bounds
);
2404 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2406 fprintf (dump_file
, "Using bounds ");
2407 print_generic_expr (dump_file
, bounds
, 0);
2408 fprintf (dump_file
, " for parm ");
2409 print_generic_expr (dump_file
, parm
, 0);
2410 fprintf (dump_file
, " of type ");
2411 print_generic_expr (dump_file
, TREE_TYPE (parm
), 0);
2412 fprintf (dump_file
, ".\n");
2418 /* Build and return CALL_EXPR for bndstx builtin with specified
2421 chkp_build_bndldx_call (tree addr
, tree ptr
)
2423 tree fn
= build1 (ADDR_EXPR
,
2424 build_pointer_type (TREE_TYPE (chkp_bndldx_fndecl
)),
2425 chkp_bndldx_fndecl
);
2426 tree call
= build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndldx_fndecl
)),
2428 CALL_WITH_BOUNDS_P (call
) = true;
2432 /* Insert code to load bounds for PTR located by ADDR.
2433 Code is inserted after position pointed by GSI.
2434 Loaded bounds are returned. */
2436 chkp_build_bndldx (tree addr
, tree ptr
, gimple_stmt_iterator
*gsi
)
2444 addr
= chkp_force_gimple_call_op (addr
, &seq
);
2445 ptr
= chkp_force_gimple_call_op (ptr
, &seq
);
2447 stmt
= gimple_build_call (chkp_bndldx_fndecl
, 2, addr
, ptr
);
2448 chkp_mark_stmt (stmt
);
2449 bounds
= chkp_get_tmp_reg (stmt
);
2450 gimple_call_set_lhs (stmt
, bounds
);
2452 gimple_seq_add_stmt (&seq
, stmt
);
2454 gsi_insert_seq_after (gsi
, seq
, GSI_CONTINUE_LINKING
);
2456 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2458 fprintf (dump_file
, "Generated bndldx for pointer ");
2459 print_generic_expr (dump_file
, ptr
, 0);
2460 fprintf (dump_file
, ": ");
2461 print_gimple_stmt (dump_file
, stmt
, 0, TDF_VOPS
|TDF_MEMSYMS
);
2467 /* Build and return CALL_EXPR for bndstx builtin with specified
2470 chkp_build_bndstx_call (tree addr
, tree ptr
, tree bounds
)
2472 tree fn
= build1 (ADDR_EXPR
,
2473 build_pointer_type (TREE_TYPE (chkp_bndstx_fndecl
)),
2474 chkp_bndstx_fndecl
);
2475 tree call
= build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndstx_fndecl
)),
2476 fn
, 3, ptr
, bounds
, addr
);
2477 CALL_WITH_BOUNDS_P (call
) = true;
2481 /* Insert code to store BOUNDS for PTR stored by ADDR.
2482 New statements are inserted after position pointed
2485 chkp_build_bndstx (tree addr
, tree ptr
, tree bounds
,
2486 gimple_stmt_iterator
*gsi
)
2493 addr
= chkp_force_gimple_call_op (addr
, &seq
);
2494 ptr
= chkp_force_gimple_call_op (ptr
, &seq
);
2496 stmt
= gimple_build_call (chkp_bndstx_fndecl
, 3, ptr
, bounds
, addr
);
2497 chkp_mark_stmt (stmt
);
2498 gimple_call_set_with_bounds (stmt
, true);
2500 gimple_seq_add_stmt (&seq
, stmt
);
2502 gsi_insert_seq_after (gsi
, seq
, GSI_CONTINUE_LINKING
);
2504 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2506 fprintf (dump_file
, "Generated bndstx for pointer store ");
2507 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
), 0, TDF_VOPS
|TDF_MEMSYMS
);
2508 print_gimple_stmt (dump_file
, stmt
, 2, TDF_VOPS
|TDF_MEMSYMS
);
2512 /* This function is called when call statement
2513 is inlined and therefore we can't use bndret
2514 for its LHS anymore. Function fixes bndret
2515 call using new RHS value if possible. */
2517 chkp_fixup_inlined_call (tree lhs
, tree rhs
)
2520 gcall
*retbnd
, *bndldx
;
2522 if (!BOUNDED_P (lhs
))
2525 /* Search for retbnd call. */
2526 retbnd
= chkp_retbnd_call_by_val (lhs
);
2530 /* Currently only handle cases when call is replaced
2531 with a memory access. In this case bndret call
2532 may be replaced with bndldx call. Otherwise we
2533 have to search for bounds which may cause wrong
2534 result due to various optimizations applied. */
2535 switch (TREE_CODE (rhs
))
2538 if (DECL_REGISTER (rhs
))
2547 addr
= get_base_address (rhs
);
2549 && TREE_CODE (addr
) != MEM_REF
)
2551 if (DECL_P (addr
) && DECL_REGISTER (addr
))
2559 /* Create a new statements sequence with bndldx call. */
2560 gimple_stmt_iterator gsi
= gsi_for_stmt (retbnd
);
2561 addr
= build_fold_addr_expr (rhs
);
2562 chkp_build_bndldx (addr
, lhs
, &gsi
);
2563 bndldx
= as_a
<gcall
*> (gsi_stmt (gsi
));
2565 /* Remove bndret call. */
2566 bounds
= gimple_call_lhs (retbnd
);
2567 gsi
= gsi_for_stmt (retbnd
);
2568 gsi_remove (&gsi
, true);
2570 /* Link new bndldx call. */
2571 gimple_call_set_lhs (bndldx
, bounds
);
2572 update_stmt (bndldx
);
2575 /* Compute bounds for pointer NODE which was assigned in
2576 assignment statement ASSIGN. Return computed bounds. */
2578 chkp_compute_bounds_for_assignment (tree node
, gimple
*assign
)
2580 enum tree_code rhs_code
= gimple_assign_rhs_code (assign
);
2581 tree rhs1
= gimple_assign_rhs1 (assign
);
2582 tree bounds
= NULL_TREE
;
2583 gimple_stmt_iterator iter
= gsi_for_stmt (assign
);
2586 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2588 fprintf (dump_file
, "Computing bounds for assignment: ");
2589 print_gimple_stmt (dump_file
, assign
, 0, TDF_VOPS
|TDF_MEMSYMS
);
2595 case TARGET_MEM_REF
:
2598 /* We need to load bounds from the bounds table. */
2599 bounds
= chkp_find_bounds_loaded (node
, rhs1
, &iter
);
2605 case POINTER_PLUS_EXPR
:
2609 /* Bounds are just propagated from RHS. */
2610 bounds
= chkp_find_bounds (rhs1
, &iter
);
2614 case VIEW_CONVERT_EXPR
:
2615 /* Bounds are just propagated from RHS. */
2616 bounds
= chkp_find_bounds (TREE_OPERAND (rhs1
, 0), &iter
);
2620 if (BOUNDED_P (rhs1
))
2622 /* We need to load bounds from the bounds table. */
2623 bounds
= chkp_build_bndldx (chkp_build_addr_expr (rhs1
),
2625 TREE_ADDRESSABLE (rhs1
) = 1;
2628 bounds
= chkp_get_nonpointer_load_bounds ();
2637 tree rhs2
= gimple_assign_rhs2 (assign
);
2638 tree bnd1
= chkp_find_bounds (rhs1
, &iter
);
2639 tree bnd2
= chkp_find_bounds (rhs2
, &iter
);
2641 /* First we try to check types of operands. If it
2642 does not help then look at bound values.
2644 If some bounds are incomplete and other are
2645 not proven to be valid (i.e. also incomplete
2646 or invalid because value is not pointer) then
2647 resulting value is incomplete and will be
2648 recomputed later in chkp_finish_incomplete_bounds. */
2649 if (BOUNDED_P (rhs1
)
2650 && !BOUNDED_P (rhs2
))
2652 else if (BOUNDED_P (rhs2
)
2653 && !BOUNDED_P (rhs1
)
2654 && rhs_code
!= MINUS_EXPR
)
2656 else if (chkp_incomplete_bounds (bnd1
))
2657 if (chkp_valid_bounds (bnd2
) && rhs_code
!= MINUS_EXPR
2658 && !chkp_incomplete_bounds (bnd2
))
2661 bounds
= incomplete_bounds
;
2662 else if (chkp_incomplete_bounds (bnd2
))
2663 if (chkp_valid_bounds (bnd1
)
2664 && !chkp_incomplete_bounds (bnd1
))
2667 bounds
= incomplete_bounds
;
2668 else if (!chkp_valid_bounds (bnd1
))
2669 if (chkp_valid_bounds (bnd2
) && rhs_code
!= MINUS_EXPR
)
2671 else if (bnd2
== chkp_get_zero_bounds ())
2675 else if (!chkp_valid_bounds (bnd2
))
2678 /* Seems both operands may have valid bounds
2679 (e.g. pointer minus pointer). In such case
2680 use default invalid op bounds. */
2681 bounds
= chkp_get_invalid_op_bounds ();
2683 base
= (bounds
== bnd1
) ? rhs1
: (bounds
== bnd2
) ? rhs2
: NULL
;
2701 case TRUNC_DIV_EXPR
:
2702 case FLOOR_DIV_EXPR
:
2704 case ROUND_DIV_EXPR
:
2705 case TRUNC_MOD_EXPR
:
2706 case FLOOR_MOD_EXPR
:
2708 case ROUND_MOD_EXPR
:
2709 case EXACT_DIV_EXPR
:
2710 case FIX_TRUNC_EXPR
:
2714 /* No valid bounds may be produced by these exprs. */
2715 bounds
= chkp_get_invalid_op_bounds ();
2720 tree val1
= gimple_assign_rhs2 (assign
);
2721 tree val2
= gimple_assign_rhs3 (assign
);
2722 tree bnd1
= chkp_find_bounds (val1
, &iter
);
2723 tree bnd2
= chkp_find_bounds (val2
, &iter
);
2726 if (chkp_incomplete_bounds (bnd1
) || chkp_incomplete_bounds (bnd2
))
2727 bounds
= incomplete_bounds
;
2728 else if (bnd1
== bnd2
)
2732 rhs1
= unshare_expr (rhs1
);
2734 bounds
= chkp_get_tmp_reg (assign
);
2735 stmt
= gimple_build_assign (bounds
, COND_EXPR
, rhs1
, bnd1
, bnd2
);
2736 gsi_insert_after (&iter
, stmt
, GSI_SAME_STMT
);
2738 if (!chkp_valid_bounds (bnd1
) && !chkp_valid_bounds (bnd2
))
2739 chkp_mark_invalid_bounds (bounds
);
2747 tree rhs2
= gimple_assign_rhs2 (assign
);
2748 tree bnd1
= chkp_find_bounds (rhs1
, &iter
);
2749 tree bnd2
= chkp_find_bounds (rhs2
, &iter
);
2751 if (chkp_incomplete_bounds (bnd1
) || chkp_incomplete_bounds (bnd2
))
2752 bounds
= incomplete_bounds
;
2753 else if (bnd1
== bnd2
)
2758 tree cond
= build2 (rhs_code
== MAX_EXPR
? GT_EXPR
: LT_EXPR
,
2759 boolean_type_node
, rhs1
, rhs2
);
2760 bounds
= chkp_get_tmp_reg (assign
);
2761 stmt
= gimple_build_assign (bounds
, COND_EXPR
, cond
, bnd1
, bnd2
);
2763 gsi_insert_after (&iter
, stmt
, GSI_SAME_STMT
);
2765 if (!chkp_valid_bounds (bnd1
) && !chkp_valid_bounds (bnd2
))
2766 chkp_mark_invalid_bounds (bounds
);
2772 bounds
= chkp_get_zero_bounds ();
2773 warning (0, "pointer bounds were lost due to unexpected expression %s",
2774 get_tree_code_name (rhs_code
));
2777 gcc_assert (bounds
);
2779 /* We may reuse bounds of other pointer we copy/modify. But it is not
2780 allowed for abnormal ssa names. If we produced a pointer using
2781 abnormal ssa name, we better make a bounds copy to avoid coalescing
2784 && TREE_CODE (base
) == SSA_NAME
2785 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (base
))
2787 gimple
*stmt
= gimple_build_assign (chkp_get_tmp_reg (NULL
), bounds
);
2788 gsi_insert_after (&iter
, stmt
, GSI_SAME_STMT
);
2789 bounds
= gimple_assign_lhs (stmt
);
2793 bounds
= chkp_maybe_copy_and_register_bounds (node
, bounds
);
2798 /* Compute bounds for ssa name NODE defined by DEF_STMT pointed by ITER.
2800 There are just few statement codes allowed: NOP (for default ssa names),
2801 ASSIGN, CALL, PHI, ASM.
2803 Return computed bounds. */
2805 chkp_get_bounds_by_definition (tree node
, gimple
*def_stmt
,
2806 gphi_iterator
*iter
)
2809 enum gimple_code code
= gimple_code (def_stmt
);
2812 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2814 fprintf (dump_file
, "Searching for bounds for node: ");
2815 print_generic_expr (dump_file
, node
, 0);
2817 fprintf (dump_file
, " using its definition: ");
2818 print_gimple_stmt (dump_file
, def_stmt
, 0, TDF_VOPS
|TDF_MEMSYMS
);
2824 var
= SSA_NAME_VAR (node
);
2825 switch (TREE_CODE (var
))
2828 bounds
= chkp_get_bound_for_parm (node
);
2832 /* For uninitialized pointers use none bounds. */
2833 bounds
= chkp_get_none_bounds ();
2834 bounds
= chkp_maybe_copy_and_register_bounds (node
, bounds
);
2841 gcc_assert (TREE_CODE (TREE_TYPE (node
)) == REFERENCE_TYPE
);
2843 base_type
= TREE_TYPE (TREE_TYPE (node
));
2845 gcc_assert (TYPE_SIZE (base_type
)
2846 && TREE_CODE (TYPE_SIZE (base_type
)) == INTEGER_CST
2847 && tree_to_uhwi (TYPE_SIZE (base_type
)) != 0);
2849 bounds
= chkp_make_bounds (node
, TYPE_SIZE_UNIT (base_type
),
2851 bounds
= chkp_maybe_copy_and_register_bounds (node
, bounds
);
2856 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2858 fprintf (dump_file
, "Unexpected var with no definition\n");
2859 print_generic_expr (dump_file
, var
, 0);
2861 internal_error ("chkp_get_bounds_by_definition: Unexpected var of type %s",
2862 get_tree_code_name (TREE_CODE (var
)));
2867 bounds
= chkp_compute_bounds_for_assignment (node
, def_stmt
);
2871 bounds
= chkp_build_returned_bound (as_a
<gcall
*> (def_stmt
));
2875 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (node
))
2876 if (SSA_NAME_VAR (node
))
2877 var
= chkp_get_bounds_var (SSA_NAME_VAR (node
));
2879 var
= make_temp_ssa_name (pointer_bounds_type_node
,
2881 CHKP_BOUND_TMP_NAME
);
2883 var
= chkp_get_tmp_var ();
2884 stmt
= create_phi_node (var
, gimple_bb (def_stmt
));
2885 bounds
= gimple_phi_result (stmt
);
2886 *iter
= gsi_for_phi (stmt
);
2888 bounds
= chkp_maybe_copy_and_register_bounds (node
, bounds
);
2890 /* Created bounds do not have all phi args computed and
2891 therefore we do not know if there is a valid source
2892 of bounds for that node. Therefore we mark bounds
2893 as incomplete and then recompute them when all phi
2894 args are computed. */
2895 chkp_register_incomplete_bounds (bounds
, node
);
2899 bounds
= chkp_get_zero_bounds ();
2900 bounds
= chkp_maybe_copy_and_register_bounds (node
, bounds
);
2904 internal_error ("chkp_get_bounds_by_definition: Unexpected GIMPLE code %s",
2905 gimple_code_name
[code
]);
2911 /* Return CALL_EXPR for bndmk with specified LOWER_BOUND and SIZE. */
2913 chkp_build_make_bounds_call (tree lower_bound
, tree size
)
2915 tree call
= build1 (ADDR_EXPR
,
2916 build_pointer_type (TREE_TYPE (chkp_bndmk_fndecl
)),
2918 return build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndmk_fndecl
)),
2919 call
, 2, lower_bound
, size
);
2922 /* Create static bounds var of specfified OBJ which is
2923 is either VAR_DECL or string constant. */
2925 chkp_make_static_bounds (tree obj
)
2927 static int string_id
= 1;
2928 static int var_id
= 1;
2930 const char *var_name
;
2934 /* First check if we already have required var. */
2935 if (chkp_static_var_bounds
)
2937 /* For vars we use assembler name as a key in
2938 chkp_static_var_bounds map. It allows to
2939 avoid duplicating bound vars for decls
2940 sharing assembler name. */
2943 tree name
= DECL_ASSEMBLER_NAME (obj
);
2944 slot
= chkp_static_var_bounds
->get (name
);
2950 slot
= chkp_static_var_bounds
->get (obj
);
2956 /* Build decl for bounds var. */
2959 if (DECL_IGNORED_P (obj
))
2961 bnd_var_name
= (char *) xmalloc (strlen (CHKP_VAR_BOUNDS_PREFIX
) + 10);
2962 sprintf (bnd_var_name
, "%s%d", CHKP_VAR_BOUNDS_PREFIX
, var_id
++);
2966 var_name
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (obj
));
2968 /* For hidden symbols we want to skip first '*' char. */
2969 if (*var_name
== '*')
2972 bnd_var_name
= (char *) xmalloc (strlen (var_name
)
2973 + strlen (CHKP_BOUNDS_OF_SYMBOL_PREFIX
) + 1);
2974 strcpy (bnd_var_name
, CHKP_BOUNDS_OF_SYMBOL_PREFIX
);
2975 strcat (bnd_var_name
, var_name
);
2978 bnd_var
= build_decl (UNKNOWN_LOCATION
, VAR_DECL
,
2979 get_identifier (bnd_var_name
),
2980 pointer_bounds_type_node
);
2982 /* Address of the obj will be used as lower bound. */
2983 TREE_ADDRESSABLE (obj
) = 1;
2987 bnd_var_name
= (char *) xmalloc (strlen (CHKP_STRING_BOUNDS_PREFIX
) + 10);
2988 sprintf (bnd_var_name
, "%s%d", CHKP_STRING_BOUNDS_PREFIX
, string_id
++);
2990 bnd_var
= build_decl (UNKNOWN_LOCATION
, VAR_DECL
,
2991 get_identifier (bnd_var_name
),
2992 pointer_bounds_type_node
);
2995 free (bnd_var_name
);
2997 TREE_PUBLIC (bnd_var
) = 0;
2998 TREE_USED (bnd_var
) = 1;
2999 TREE_READONLY (bnd_var
) = 0;
3000 TREE_STATIC (bnd_var
) = 1;
3001 TREE_ADDRESSABLE (bnd_var
) = 0;
3002 DECL_ARTIFICIAL (bnd_var
) = 1;
3003 DECL_COMMON (bnd_var
) = 1;
3004 DECL_COMDAT (bnd_var
) = 1;
3005 DECL_READ_P (bnd_var
) = 1;
3006 DECL_INITIAL (bnd_var
) = chkp_build_addr_expr (obj
);
3007 /* Force output similar to constant bounds.
3008 See chkp_make_static_const_bounds. */
3009 varpool_node::get_create (bnd_var
)->force_output
= 1;
3010 /* Mark symbol as requiring bounds initialization. */
3011 varpool_node::get_create (bnd_var
)->need_bounds_init
= 1;
3012 varpool_node::finalize_decl (bnd_var
);
3014 /* Add created var to the map to use it for other references
3016 if (!chkp_static_var_bounds
)
3017 chkp_static_var_bounds
= new hash_map
<tree
, tree
>;
3021 tree name
= DECL_ASSEMBLER_NAME (obj
);
3022 chkp_static_var_bounds
->put (name
, bnd_var
);
3025 chkp_static_var_bounds
->put (obj
, bnd_var
);
3030 /* When var has incomplete type we cannot get size to
3031 compute its bounds. In such cases we use checker
3032 builtin call which determines object size at runtime. */
3034 chkp_generate_extern_var_bounds (tree var
)
3036 tree bounds
, size_reloc
, lb
, size
, max_size
, cond
;
3037 gimple_stmt_iterator gsi
;
3038 gimple_seq seq
= NULL
;
3041 /* If instrumentation is not enabled for vars having
3042 incomplete type then just return zero bounds to avoid
3043 checks for this var. */
3044 if (!flag_chkp_incomplete_type
)
3045 return chkp_get_zero_bounds ();
3047 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3049 fprintf (dump_file
, "Generating bounds for extern symbol '");
3050 print_generic_expr (dump_file
, var
, 0);
3051 fprintf (dump_file
, "'\n");
3054 stmt
= gimple_build_call (chkp_sizeof_fndecl
, 1, var
);
3056 size_reloc
= create_tmp_reg (chkp_uintptr_type
, CHKP_SIZE_TMP_NAME
);
3057 gimple_call_set_lhs (stmt
, size_reloc
);
3059 gimple_seq_add_stmt (&seq
, stmt
);
3061 lb
= chkp_build_addr_expr (var
);
3062 size
= make_ssa_name (chkp_get_size_tmp_var ());
3064 if (flag_chkp_zero_dynamic_size_as_infinite
)
3066 /* We should check that size relocation was resolved.
3067 If it was not then use maximum possible size for the var. */
3068 max_size
= build2 (MINUS_EXPR
, chkp_uintptr_type
, integer_zero_node
,
3069 fold_convert (chkp_uintptr_type
, lb
));
3070 max_size
= chkp_force_gimple_call_op (max_size
, &seq
);
3072 cond
= build2 (NE_EXPR
, boolean_type_node
,
3073 size_reloc
, integer_zero_node
);
3074 stmt
= gimple_build_assign (size
, COND_EXPR
, cond
, size_reloc
, max_size
);
3075 gimple_seq_add_stmt (&seq
, stmt
);
3079 stmt
= gimple_build_assign (size
, size_reloc
);
3080 gimple_seq_add_stmt (&seq
, stmt
);
3083 gsi
= gsi_start_bb (chkp_get_entry_block ());
3084 gsi_insert_seq_after (&gsi
, seq
, GSI_CONTINUE_LINKING
);
3086 bounds
= chkp_make_bounds (lb
, size
, &gsi
, true);
3091 /* Return 1 if TYPE has fields with zero size or fields
3092 marked with chkp_variable_size attribute. */
3094 chkp_variable_size_type (tree type
)
3099 if (RECORD_OR_UNION_TYPE_P (type
))
3100 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
3102 if (TREE_CODE (field
) == FIELD_DECL
)
3104 || lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field
))
3105 || chkp_variable_size_type (TREE_TYPE (field
));
3108 res
= !TYPE_SIZE (type
)
3109 || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
3110 || tree_to_uhwi (TYPE_SIZE (type
)) == 0;
3115 /* Compute and return bounds for address of DECL which is
3116 one of VAR_DECL, PARM_DECL, RESULT_DECL. */
3118 chkp_get_bounds_for_decl_addr (tree decl
)
3122 gcc_assert (VAR_P (decl
)
3123 || TREE_CODE (decl
) == PARM_DECL
3124 || TREE_CODE (decl
) == RESULT_DECL
);
3126 bounds
= chkp_get_registered_addr_bounds (decl
);
3131 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3133 fprintf (dump_file
, "Building bounds for address of decl ");
3134 print_generic_expr (dump_file
, decl
, 0);
3135 fprintf (dump_file
, "\n");
3138 /* Use zero bounds if size is unknown and checks for
3139 unknown sizes are restricted. */
3140 if ((!DECL_SIZE (decl
)
3141 || (chkp_variable_size_type (TREE_TYPE (decl
))
3142 && (TREE_STATIC (decl
)
3143 || DECL_EXTERNAL (decl
)
3144 || TREE_PUBLIC (decl
))))
3145 && !flag_chkp_incomplete_type
)
3146 return chkp_get_zero_bounds ();
3148 if (flag_chkp_use_static_bounds
3150 && (TREE_STATIC (decl
)
3151 || DECL_EXTERNAL (decl
)
3152 || TREE_PUBLIC (decl
))
3153 && !DECL_THREAD_LOCAL_P (decl
))
3155 tree bnd_var
= chkp_make_static_bounds (decl
);
3156 gimple_stmt_iterator gsi
= gsi_start_bb (chkp_get_entry_block ());
3159 bounds
= chkp_get_tmp_reg (NULL
);
3160 stmt
= gimple_build_assign (bounds
, bnd_var
);
3161 gsi_insert_before (&gsi
, stmt
, GSI_SAME_STMT
);
3163 else if (!DECL_SIZE (decl
)
3164 || (chkp_variable_size_type (TREE_TYPE (decl
))
3165 && (TREE_STATIC (decl
)
3166 || DECL_EXTERNAL (decl
)
3167 || TREE_PUBLIC (decl
))))
3169 gcc_assert (VAR_P (decl
));
3170 bounds
= chkp_generate_extern_var_bounds (decl
);
3174 tree lb
= chkp_build_addr_expr (decl
);
3175 bounds
= chkp_make_bounds (lb
, DECL_SIZE_UNIT (decl
), NULL
, false);
3181 /* Compute and return bounds for constant string. */
3183 chkp_get_bounds_for_string_cst (tree cst
)
3189 gcc_assert (TREE_CODE (cst
) == STRING_CST
);
3191 bounds
= chkp_get_registered_bounds (cst
);
3196 if ((flag_chkp_use_static_bounds
&& flag_chkp_use_static_const_bounds
)
3197 || flag_chkp_use_static_const_bounds
> 0)
3199 tree bnd_var
= chkp_make_static_bounds (cst
);
3200 gimple_stmt_iterator gsi
= gsi_start_bb (chkp_get_entry_block ());
3203 bounds
= chkp_get_tmp_reg (NULL
);
3204 stmt
= gimple_build_assign (bounds
, bnd_var
);
3205 gsi_insert_before (&gsi
, stmt
, GSI_SAME_STMT
);
3209 lb
= chkp_build_addr_expr (cst
);
3210 size
= build_int_cst (chkp_uintptr_type
, TREE_STRING_LENGTH (cst
));
3211 bounds
= chkp_make_bounds (lb
, size
, NULL
, false);
3214 bounds
= chkp_maybe_copy_and_register_bounds (cst
, bounds
);
3219 /* Generate code to instersect bounds BOUNDS1 and BOUNDS2 and
3220 return the result. if ITER is not NULL then Code is inserted
3221 before position pointed by ITER. Otherwise code is added to
3224 chkp_intersect_bounds (tree bounds1
, tree bounds2
, gimple_stmt_iterator
*iter
)
3226 if (!bounds1
|| bounds1
== chkp_get_zero_bounds ())
3227 return bounds2
? bounds2
: bounds1
;
3228 else if (!bounds2
|| bounds2
== chkp_get_zero_bounds ())
3238 stmt
= gimple_build_call (chkp_intersect_fndecl
, 2, bounds1
, bounds2
);
3239 chkp_mark_stmt (stmt
);
3241 bounds
= chkp_get_tmp_reg (stmt
);
3242 gimple_call_set_lhs (stmt
, bounds
);
3244 gimple_seq_add_stmt (&seq
, stmt
);
3246 /* We are probably doing narrowing for constant expression.
3247 In such case iter may be undefined. */
3250 gimple_stmt_iterator gsi
= gsi_last_bb (chkp_get_entry_block ());
3252 gsi_insert_seq_after (iter
, seq
, GSI_SAME_STMT
);
3255 gsi_insert_seq_before (iter
, seq
, GSI_SAME_STMT
);
3257 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3259 fprintf (dump_file
, "Bounds intersection: ");
3260 print_gimple_stmt (dump_file
, stmt
, 0, TDF_VOPS
|TDF_MEMSYMS
);
3261 fprintf (dump_file
, " inserted before statement: ");
3262 print_gimple_stmt (dump_file
, gsi_stmt (*iter
), 0,
3263 TDF_VOPS
|TDF_MEMSYMS
);
3270 /* Return 1 if we are allowed to narrow bounds for addressed FIELD
3271 and 0 othersize. REF is reference to the field. */
3274 chkp_may_narrow_to_field (tree ref
, tree field
)
3276 return DECL_SIZE (field
) && TREE_CODE (DECL_SIZE (field
)) == INTEGER_CST
3277 && tree_to_uhwi (DECL_SIZE (field
)) != 0
3278 && !(flag_chkp_flexible_struct_trailing_arrays
3279 && array_at_struct_end_p (ref
, true))
3280 && (!DECL_FIELD_OFFSET (field
)
3281 || TREE_CODE (DECL_FIELD_OFFSET (field
)) == INTEGER_CST
)
3282 && (!DECL_FIELD_BIT_OFFSET (field
)
3283 || TREE_CODE (DECL_FIELD_BIT_OFFSET (field
)) == INTEGER_CST
)
3284 && !lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field
))
3285 && !chkp_variable_size_type (TREE_TYPE (field
));
3288 /* Return 1 if bounds for FIELD should be narrowed to
3289 field's own size. REF is reference to the field. */
3292 chkp_narrow_bounds_for_field (tree ref
, tree field
)
3295 HOST_WIDE_INT bit_offs
;
3297 if (!chkp_may_narrow_to_field (ref
, field
))
3300 /* Access to compiler generated fields should not cause
3301 bounds narrowing. */
3302 if (DECL_ARTIFICIAL (field
))
3305 offs
= tree_to_uhwi (DECL_FIELD_OFFSET (field
));
3306 bit_offs
= tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
));
3308 return (flag_chkp_narrow_bounds
3309 && (flag_chkp_first_field_has_own_bounds
3314 /* Perform narrowing for BOUNDS of an INNER reference. Shift boundary
3315 by OFFSET bytes and limit to SIZE bytes. Newly created statements are
3319 chkp_narrow_size_and_offset (tree bounds
, tree inner
, tree offset
,
3320 tree size
, gimple_stmt_iterator
*iter
)
3322 tree addr
= chkp_build_addr_expr (unshare_expr (inner
));
3323 tree t
= TREE_TYPE (addr
);
3325 gimple
*stmt
= gimple_build_assign (NULL_TREE
, addr
);
3326 addr
= make_temp_ssa_name (t
, stmt
, CHKP_BOUND_TMP_NAME
);
3327 gimple_assign_set_lhs (stmt
, addr
);
3328 gsi_insert_seq_before (iter
, stmt
, GSI_SAME_STMT
);
3330 stmt
= gimple_build_assign (NULL_TREE
, POINTER_PLUS_EXPR
, addr
, offset
);
3331 tree shifted
= make_temp_ssa_name (t
, stmt
, CHKP_BOUND_TMP_NAME
);
3332 gimple_assign_set_lhs (stmt
, shifted
);
3333 gsi_insert_seq_before (iter
, stmt
, GSI_SAME_STMT
);
3335 tree bounds2
= chkp_make_bounds (shifted
, size
, iter
, false);
3337 return chkp_intersect_bounds (bounds
, bounds2
, iter
);
3340 /* Perform narrowing for BOUNDS using bounds computed for field
3341 access COMPONENT. ITER meaning is the same as for
3342 chkp_intersect_bounds. */
3345 chkp_narrow_bounds_to_field (tree bounds
, tree component
,
3346 gimple_stmt_iterator
*iter
)
3348 tree field
= TREE_OPERAND (component
, 1);
3349 tree size
= DECL_SIZE_UNIT (field
);
3350 tree field_ptr
= chkp_build_addr_expr (component
);
3353 field_bounds
= chkp_make_bounds (field_ptr
, size
, iter
, false);
3355 return chkp_intersect_bounds (field_bounds
, bounds
, iter
);
3358 /* Parse field or array access NODE.
3360 PTR ouput parameter holds a pointer to the outermost
3363 BITFIELD output parameter is set to 1 if bitfield is
3364 accessed and to 0 otherwise. If it is 1 then ELT holds
3365 outer component for accessed bit field.
3367 SAFE outer parameter is set to 1 if access is safe and
3368 checks are not required.
3370 BOUNDS outer parameter holds bounds to be used to check
3371 access (may be NULL).
3373 If INNERMOST_BOUNDS is 1 then try to narrow bounds to the
3374 innermost accessed component. */
3376 chkp_parse_array_and_component_ref (tree node
, tree
*ptr
,
3377 tree
*elt
, bool *safe
,
3380 gimple_stmt_iterator
*iter
,
3381 bool innermost_bounds
)
3383 tree comp_to_narrow
= NULL_TREE
;
3384 tree last_comp
= NULL_TREE
;
3385 bool array_ref_found
= false;
3391 /* Compute tree height for expression. */
3394 while (TREE_CODE (var
) == COMPONENT_REF
3395 || TREE_CODE (var
) == ARRAY_REF
3396 || TREE_CODE (var
) == VIEW_CONVERT_EXPR
3397 || TREE_CODE (var
) == BIT_FIELD_REF
)
3399 var
= TREE_OPERAND (var
, 0);
3403 gcc_assert (len
> 1);
3405 /* It is more convenient for us to scan left-to-right,
3406 so walk tree again and put all node to nodes vector
3407 in reversed order. */
3408 nodes
= XALLOCAVEC (tree
, len
);
3409 nodes
[len
- 1] = node
;
3410 for (i
= len
- 2; i
>= 0; i
--)
3411 nodes
[i
] = TREE_OPERAND (nodes
[i
+ 1], 0);
3416 *bitfield
= ((TREE_CODE (node
) == COMPONENT_REF
3417 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (node
, 1)))
3418 || TREE_CODE (node
) == BIT_FIELD_REF
);
3419 /* To get bitfield address we will need outer element. */
3421 *elt
= nodes
[len
- 2];
3425 /* If we have indirection in expression then compute
3426 outermost structure bounds. Computed bounds may be
3428 if (TREE_CODE (nodes
[0]) == MEM_REF
|| INDIRECT_REF_P (nodes
[0]))
3431 *ptr
= TREE_OPERAND (nodes
[0], 0);
3433 *bounds
= chkp_find_bounds (*ptr
, iter
);
3437 gcc_assert (VAR_P (var
)
3438 || TREE_CODE (var
) == PARM_DECL
3439 || TREE_CODE (var
) == RESULT_DECL
3440 || TREE_CODE (var
) == STRING_CST
3441 || TREE_CODE (var
) == SSA_NAME
);
3443 *ptr
= chkp_build_addr_expr (var
);
3446 /* In this loop we are trying to find a field access
3447 requiring narrowing. There are two simple rules
3449 1. Leftmost array_ref is chosen if any.
3450 2. Rightmost suitable component_ref is chosen if innermost
3451 bounds are required and no array_ref exists. */
3452 for (i
= 1; i
< len
; i
++)
3456 if (TREE_CODE (var
) == ARRAY_REF
)
3459 array_ref_found
= true;
3460 if (flag_chkp_narrow_bounds
3461 && !flag_chkp_narrow_to_innermost_arrray
3463 || chkp_may_narrow_to_field (var
,
3464 TREE_OPERAND (last_comp
, 1))))
3466 comp_to_narrow
= last_comp
;
3470 else if (TREE_CODE (var
) == COMPONENT_REF
)
3472 tree field
= TREE_OPERAND (var
, 1);
3474 if (innermost_bounds
3476 && chkp_narrow_bounds_for_field (var
, field
))
3477 comp_to_narrow
= var
;
3480 if (flag_chkp_narrow_bounds
3481 && flag_chkp_narrow_to_innermost_arrray
3482 && TREE_CODE (TREE_TYPE (field
)) == ARRAY_TYPE
)
3485 *bounds
= chkp_narrow_bounds_to_field (*bounds
, var
, iter
);
3486 comp_to_narrow
= NULL
;
3489 else if (TREE_CODE (var
) == BIT_FIELD_REF
)
3491 if (flag_chkp_narrow_bounds
&& bounds
)
3494 chkp_parse_bit_field_ref (var
, UNKNOWN_LOCATION
, &offset
, &size
);
3496 = chkp_narrow_size_and_offset (*bounds
, TREE_OPERAND (var
, 0),
3497 offset
, size
, iter
);
3500 else if (TREE_CODE (var
) == VIEW_CONVERT_EXPR
)
3501 /* Nothing to do for it. */
3507 if (comp_to_narrow
&& DECL_SIZE (TREE_OPERAND (comp_to_narrow
, 1)) && bounds
)
3508 *bounds
= chkp_narrow_bounds_to_field (*bounds
, comp_to_narrow
, iter
);
3510 if (innermost_bounds
&& bounds
&& !*bounds
)
3511 *bounds
= chkp_find_bounds (*ptr
, iter
);
3514 /* Parse BIT_FIELD_REF to a NODE for a given location LOC. Return OFFSET
3515 and SIZE in bytes. */
3518 void chkp_parse_bit_field_ref (tree node
, location_t loc
, tree
*offset
,
3521 tree bpu
= fold_convert (size_type_node
, bitsize_int (BITS_PER_UNIT
));
3522 tree offs
= fold_convert (size_type_node
, TREE_OPERAND (node
, 2));
3523 tree rem
= size_binop_loc (loc
, TRUNC_MOD_EXPR
, offs
, bpu
);
3524 offs
= size_binop_loc (loc
, TRUNC_DIV_EXPR
, offs
, bpu
);
3526 tree s
= fold_convert (size_type_node
, TREE_OPERAND (node
, 1));
3527 s
= size_binop_loc (loc
, PLUS_EXPR
, s
, rem
);
3528 s
= size_binop_loc (loc
, CEIL_DIV_EXPR
, s
, bpu
);
3529 s
= fold_convert (size_type_node
, s
);
3535 /* Compute and return bounds for address of OBJ. */
3537 chkp_make_addressed_object_bounds (tree obj
, gimple_stmt_iterator
*iter
)
3539 tree bounds
= chkp_get_registered_addr_bounds (obj
);
3544 switch (TREE_CODE (obj
))
3549 bounds
= chkp_get_bounds_for_decl_addr (obj
);
3553 bounds
= chkp_get_bounds_for_string_cst (obj
);
3565 chkp_parse_array_and_component_ref (obj
, &ptr
, &elt
, &safe
,
3566 &bitfield
, &bounds
, iter
, true);
3568 gcc_assert (bounds
);
3574 bounds
= chkp_get_zero_bounds ();
3578 bounds
= chkp_find_bounds (TREE_OPERAND (obj
, 0), iter
);
3583 bounds
= chkp_make_addressed_object_bounds (TREE_OPERAND (obj
, 0), iter
);
3587 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3589 fprintf (dump_file
, "chkp_make_addressed_object_bounds: "
3590 "unexpected object of type %s\n",
3591 get_tree_code_name (TREE_CODE (obj
)));
3592 print_node (dump_file
, "", obj
, 0);
3594 internal_error ("chkp_make_addressed_object_bounds: "
3595 "Unexpected tree code %s",
3596 get_tree_code_name (TREE_CODE (obj
)));
3599 chkp_register_addr_bounds (obj
, bounds
);
3604 /* Compute bounds for pointer PTR loaded from PTR_SRC. Generate statements
3605 to compute bounds if required. Computed bounds should be available at
3606 position pointed by ITER.
3608 If PTR_SRC is NULL_TREE then pointer definition is identified.
3610 If PTR_SRC is not NULL_TREE then ITER points to statements which loads
3611 PTR. If PTR is a any memory reference then ITER points to a statement
3612 after which bndldx will be inserterd. In both cases ITER will be updated
3613 to point to the inserted bndldx statement. */
3616 chkp_find_bounds_1 (tree ptr
, tree ptr_src
, gimple_stmt_iterator
*iter
)
3618 tree addr
= NULL_TREE
;
3619 tree bounds
= NULL_TREE
;
3624 bounds
= chkp_get_registered_bounds (ptr_src
);
3629 switch (TREE_CODE (ptr_src
))
3633 if (BOUNDED_P (ptr_src
))
3634 if (VAR_P (ptr
) && DECL_REGISTER (ptr
))
3635 bounds
= chkp_get_zero_bounds ();
3638 addr
= chkp_build_addr_expr (ptr_src
);
3639 bounds
= chkp_build_bndldx (addr
, ptr
, iter
);
3642 bounds
= chkp_get_nonpointer_load_bounds ();
3647 addr
= get_base_address (ptr_src
);
3649 || TREE_CODE (addr
) == MEM_REF
3650 || TREE_CODE (addr
) == TARGET_MEM_REF
)
3652 if (BOUNDED_P (ptr_src
))
3653 if (VAR_P (ptr
) && DECL_REGISTER (ptr
))
3654 bounds
= chkp_get_zero_bounds ();
3657 addr
= chkp_build_addr_expr (ptr_src
);
3658 bounds
= chkp_build_bndldx (addr
, ptr
, iter
);
3661 bounds
= chkp_get_nonpointer_load_bounds ();
3665 gcc_assert (TREE_CODE (addr
) == SSA_NAME
);
3666 bounds
= chkp_find_bounds (addr
, iter
);
3671 /* Handled above but failed. */
3672 bounds
= chkp_get_invalid_op_bounds ();
3675 case TARGET_MEM_REF
:
3676 addr
= chkp_build_addr_expr (ptr_src
);
3677 bounds
= chkp_build_bndldx (addr
, ptr
, iter
);
3681 bounds
= chkp_get_registered_bounds (ptr_src
);
3684 gimple
*def_stmt
= SSA_NAME_DEF_STMT (ptr_src
);
3685 gphi_iterator phi_iter
;
3687 bounds
= chkp_get_bounds_by_definition (ptr_src
, def_stmt
, &phi_iter
);
3689 gcc_assert (bounds
);
3691 if (gphi
*def_phi
= dyn_cast
<gphi
*> (def_stmt
))
3695 for (i
= 0; i
< gimple_phi_num_args (def_phi
); i
++)
3697 tree arg
= gimple_phi_arg_def (def_phi
, i
);
3701 arg_bnd
= chkp_find_bounds (arg
, NULL
);
3703 /* chkp_get_bounds_by_definition created new phi
3704 statement and phi_iter points to it.
3706 Previous call to chkp_find_bounds could create
3707 new basic block and therefore change phi statement
3708 phi_iter points to. */
3709 phi_bnd
= phi_iter
.phi ();
3711 add_phi_arg (phi_bnd
, arg_bnd
,
3712 gimple_phi_arg_edge (def_phi
, i
),
3716 /* If all bound phi nodes have their arg computed
3717 then we may finish its computation. See
3718 chkp_finish_incomplete_bounds for more details. */
3719 if (chkp_may_finish_incomplete_bounds ())
3720 chkp_finish_incomplete_bounds ();
3723 gcc_assert (bounds
== chkp_get_registered_bounds (ptr_src
)
3724 || chkp_incomplete_bounds (bounds
));
3729 case WITH_SIZE_EXPR
:
3730 bounds
= chkp_make_addressed_object_bounds (TREE_OPERAND (ptr_src
, 0), iter
);
3734 if (integer_zerop (ptr_src
))
3735 bounds
= chkp_get_none_bounds ();
3737 bounds
= chkp_get_invalid_op_bounds ();
3741 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3743 fprintf (dump_file
, "chkp_find_bounds: unexpected ptr of type %s\n",
3744 get_tree_code_name (TREE_CODE (ptr_src
)));
3745 print_node (dump_file
, "", ptr_src
, 0);
3747 internal_error ("chkp_find_bounds: Unexpected tree code %s",
3748 get_tree_code_name (TREE_CODE (ptr_src
)));
3753 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3755 fprintf (stderr
, "chkp_find_bounds: cannot find bounds for pointer\n");
3756 print_node (dump_file
, "", ptr_src
, 0);
3758 internal_error ("chkp_find_bounds: Cannot find bounds for pointer");
3764 /* Normal case for bounds search without forced narrowing. */
3766 chkp_find_bounds (tree ptr
, gimple_stmt_iterator
*iter
)
3768 return chkp_find_bounds_1 (ptr
, NULL_TREE
, iter
);
3771 /* Search bounds for pointer PTR loaded from PTR_SRC
3772 by statement *ITER points to. */
3774 chkp_find_bounds_loaded (tree ptr
, tree ptr_src
, gimple_stmt_iterator
*iter
)
3776 return chkp_find_bounds_1 (ptr
, ptr_src
, iter
);
3779 /* Helper function which checks type of RHS and finds all pointers in
3780 it. For each found pointer we build it's accesses in LHS and RHS
3781 objects and then call HANDLER for them. Function is used to copy
3782 or initilize bounds for copied object. */
3784 chkp_walk_pointer_assignments (tree lhs
, tree rhs
, void *arg
,
3785 assign_handler handler
)
3787 tree type
= TREE_TYPE (lhs
);
3789 /* We have nothing to do with clobbers. */
3790 if (TREE_CLOBBER_P (rhs
))
3793 if (BOUNDED_TYPE_P (type
))
3794 handler (lhs
, rhs
, arg
);
3795 else if (RECORD_OR_UNION_TYPE_P (type
))
3799 if (TREE_CODE (rhs
) == CONSTRUCTOR
)
3801 unsigned HOST_WIDE_INT cnt
;
3804 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs
), cnt
, field
, val
)
3806 if (field
&& chkp_type_has_pointer (TREE_TYPE (field
)))
3808 tree lhs_field
= chkp_build_component_ref (lhs
, field
);
3809 chkp_walk_pointer_assignments (lhs_field
, val
, arg
, handler
);
3814 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
3815 if (TREE_CODE (field
) == FIELD_DECL
3816 && chkp_type_has_pointer (TREE_TYPE (field
)))
3818 tree rhs_field
= chkp_build_component_ref (rhs
, field
);
3819 tree lhs_field
= chkp_build_component_ref (lhs
, field
);
3820 chkp_walk_pointer_assignments (lhs_field
, rhs_field
, arg
, handler
);
3823 else if (TREE_CODE (type
) == ARRAY_TYPE
)
3825 unsigned HOST_WIDE_INT cur
= 0;
3826 tree maxval
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
3827 tree etype
= TREE_TYPE (type
);
3828 tree esize
= TYPE_SIZE (etype
);
3830 if (TREE_CODE (rhs
) == CONSTRUCTOR
)
3832 unsigned HOST_WIDE_INT cnt
;
3833 tree purp
, val
, lhs_elem
;
3835 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs
), cnt
, purp
, val
)
3837 if (purp
&& TREE_CODE (purp
) == RANGE_EXPR
)
3839 tree lo_index
= TREE_OPERAND (purp
, 0);
3840 tree hi_index
= TREE_OPERAND (purp
, 1);
3842 for (cur
= (unsigned)tree_to_uhwi (lo_index
);
3843 cur
<= (unsigned)tree_to_uhwi (hi_index
);
3846 lhs_elem
= chkp_build_array_ref (lhs
, etype
, esize
, cur
);
3847 chkp_walk_pointer_assignments (lhs_elem
, val
, arg
, handler
);
3854 gcc_assert (TREE_CODE (purp
) == INTEGER_CST
);
3855 cur
= tree_to_uhwi (purp
);
3858 lhs_elem
= chkp_build_array_ref (lhs
, etype
, esize
, cur
++);
3860 chkp_walk_pointer_assignments (lhs_elem
, val
, arg
, handler
);
3864 /* Copy array only when size is known. */
3865 else if (maxval
&& !integer_minus_onep (maxval
))
3866 for (cur
= 0; cur
<= TREE_INT_CST_LOW (maxval
); cur
++)
3868 tree lhs_elem
= chkp_build_array_ref (lhs
, etype
, esize
, cur
);
3869 tree rhs_elem
= chkp_build_array_ref (rhs
, etype
, esize
, cur
);
3870 chkp_walk_pointer_assignments (lhs_elem
, rhs_elem
, arg
, handler
);
3874 internal_error("chkp_walk_pointer_assignments: unexpected RHS type: %s",
3875 get_tree_code_name (TREE_CODE (type
)));
3878 /* Add code to copy bounds for assignment of RHS to LHS.
3879 ARG is an iterator pointing ne code position. */
3881 chkp_copy_bounds_for_elem (tree lhs
, tree rhs
, void *arg
)
3883 gimple_stmt_iterator
*iter
= (gimple_stmt_iterator
*)arg
;
3884 tree bounds
= chkp_find_bounds (rhs
, iter
);
3885 tree addr
= chkp_build_addr_expr(lhs
);
3887 chkp_build_bndstx (addr
, rhs
, bounds
, iter
);
3890 /* Emit static bound initilizers and size vars. */
3892 chkp_finish_file (void)
3894 struct varpool_node
*node
;
3895 struct chkp_ctor_stmt_list stmts
;
3900 /* Iterate through varpool and generate bounds initialization
3901 constructors for all statically initialized pointers. */
3902 stmts
.avail
= MAX_STMTS_IN_STATIC_CHKP_CTOR
;
3904 FOR_EACH_VARIABLE (node
)
3905 /* Check that var is actually emitted and we need and may initialize
3907 if (node
->need_bounds_init
3908 && !POINTER_BOUNDS_P (node
->decl
)
3909 && DECL_RTL (node
->decl
)
3910 && MEM_P (DECL_RTL (node
->decl
))
3911 && TREE_ASM_WRITTEN (node
->decl
))
3913 chkp_walk_pointer_assignments (node
->decl
,
3914 DECL_INITIAL (node
->decl
),
3916 chkp_add_modification_to_stmt_list
);
3918 if (stmts
.avail
<= 0)
3920 cgraph_build_static_cdtor ('P', stmts
.stmts
,
3921 MAX_RESERVED_INIT_PRIORITY
+ 3);
3922 stmts
.avail
= MAX_STMTS_IN_STATIC_CHKP_CTOR
;
3928 cgraph_build_static_cdtor ('P', stmts
.stmts
,
3929 MAX_RESERVED_INIT_PRIORITY
+ 3);
3931 /* Iterate through varpool and generate bounds initialization
3932 constructors for all static bounds vars. */
3933 stmts
.avail
= MAX_STMTS_IN_STATIC_CHKP_CTOR
;
3935 FOR_EACH_VARIABLE (node
)
3936 if (node
->need_bounds_init
3937 && POINTER_BOUNDS_P (node
->decl
)
3938 && TREE_ASM_WRITTEN (node
->decl
))
3940 tree bnd
= node
->decl
;
3943 gcc_assert (DECL_INITIAL (bnd
)
3944 && TREE_CODE (DECL_INITIAL (bnd
)) == ADDR_EXPR
);
3946 var
= TREE_OPERAND (DECL_INITIAL (bnd
), 0);
3947 chkp_output_static_bounds (bnd
, var
, &stmts
);
3951 cgraph_build_static_cdtor ('B', stmts
.stmts
,
3952 MAX_RESERVED_INIT_PRIORITY
+ 2);
3954 delete chkp_static_var_bounds
;
3955 delete chkp_bounds_map
;
3958 /* An instrumentation function which is called for each statement
3959 having memory access we want to instrument. It inserts check
3960 code and bounds copy code.
3962 ITER points to statement to instrument.
3964 NODE holds memory access in statement to check.
3966 LOC holds the location information for statement.
3968 DIRFLAGS determines whether access is read or write.
3970 ACCESS_OFFS should be added to address used in NODE
3973 ACCESS_SIZE holds size of checked access.
3975 SAFE indicates if NODE access is safe and should not be
3978 chkp_process_stmt (gimple_stmt_iterator
*iter
, tree node
,
3979 location_t loc
, tree dirflag
,
3980 tree access_offs
, tree access_size
,
3983 tree node_type
= TREE_TYPE (node
);
3984 tree size
= access_size
? access_size
: TYPE_SIZE_UNIT (node_type
);
3985 tree addr_first
= NULL_TREE
; /* address of the first accessed byte */
3986 tree addr_last
= NULL_TREE
; /* address of the last accessed byte */
3987 tree ptr
= NULL_TREE
; /* a pointer used for dereference */
3988 tree bounds
= NULL_TREE
;
3990 /* We do not need instrumentation for clobbers. */
3991 if (dirflag
== integer_one_node
3992 && gimple_code (gsi_stmt (*iter
)) == GIMPLE_ASSIGN
3993 && TREE_CLOBBER_P (gimple_assign_rhs1 (gsi_stmt (*iter
))))
3996 switch (TREE_CODE (node
))
4006 /* We are not going to generate any checks, so do not
4007 generate bounds as well. */
4008 addr_first
= chkp_build_addr_expr (node
);
4012 chkp_parse_array_and_component_ref (node
, &ptr
, &elt
, &safe
,
4013 &bitfield
, &bounds
, iter
, false);
4015 /* Break if there is no dereference and operation is safe. */
4019 tree field
= TREE_OPERAND (node
, 1);
4021 if (TREE_CODE (DECL_SIZE_UNIT (field
)) == INTEGER_CST
)
4022 size
= DECL_SIZE_UNIT (field
);
4025 elt
= chkp_build_addr_expr (elt
);
4026 addr_first
= fold_convert_loc (loc
, ptr_type_node
, elt
? elt
: ptr
);
4027 addr_first
= fold_build_pointer_plus_loc (loc
,
4029 byte_position (field
));
4032 addr_first
= chkp_build_addr_expr (node
);
4037 ptr
= TREE_OPERAND (node
, 0);
4042 ptr
= TREE_OPERAND (node
, 0);
4043 addr_first
= chkp_build_addr_expr (node
);
4046 case TARGET_MEM_REF
:
4047 ptr
= TMR_BASE (node
);
4048 addr_first
= chkp_build_addr_expr (node
);
4051 case ARRAY_RANGE_REF
:
4052 printf("ARRAY_RANGE_REF\n");
4053 debug_gimple_stmt(gsi_stmt(*iter
));
4062 gcc_assert (!access_offs
);
4063 gcc_assert (!access_size
);
4065 chkp_parse_bit_field_ref (node
, loc
, &offset
, &size
);
4067 chkp_process_stmt (iter
, TREE_OPERAND (node
, 0), loc
,
4068 dirflag
, offset
, size
, safe
);
4076 if (dirflag
!= integer_one_node
4077 || DECL_REGISTER (node
))
4081 addr_first
= chkp_build_addr_expr (node
);
4088 /* If addr_last was not computed then use (addr_first + size - 1)
4089 expression to compute it. */
4092 addr_last
= fold_build_pointer_plus_loc (loc
, addr_first
, size
);
4093 addr_last
= fold_build_pointer_plus_hwi_loc (loc
, addr_last
, -1);
4096 /* Shift both first_addr and last_addr by access_offs if specified. */
4099 addr_first
= fold_build_pointer_plus_loc (loc
, addr_first
, access_offs
);
4100 addr_last
= fold_build_pointer_plus_loc (loc
, addr_last
, access_offs
);
4103 /* Generate bndcl/bndcu checks if memory access is not safe. */
4106 gimple_stmt_iterator stmt_iter
= *iter
;
4109 bounds
= chkp_find_bounds (ptr
, iter
);
4111 chkp_check_mem_access (addr_first
, addr_last
, bounds
,
4112 stmt_iter
, loc
, dirflag
);
4115 /* We need to store bounds in case pointer is stored. */
4116 if (dirflag
== integer_one_node
4117 && chkp_type_has_pointer (node_type
)
4118 && flag_chkp_store_bounds
)
4120 gimple
*stmt
= gsi_stmt (*iter
);
4121 tree rhs1
= gimple_assign_rhs1 (stmt
);
4122 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
4124 if (get_gimple_rhs_class (rhs_code
) == GIMPLE_SINGLE_RHS
)
4125 chkp_walk_pointer_assignments (node
, rhs1
, iter
,
4126 chkp_copy_bounds_for_elem
);
4129 bounds
= chkp_compute_bounds_for_assignment (NULL_TREE
, stmt
);
4130 chkp_build_bndstx (addr_first
, rhs1
, bounds
, iter
);
4135 /* Add code to copy bounds for all pointers copied
4136 in ASSIGN created during inline of EDGE. */
4138 chkp_copy_bounds_for_assign (gimple
*assign
, struct cgraph_edge
*edge
)
4140 tree lhs
= gimple_assign_lhs (assign
);
4141 tree rhs
= gimple_assign_rhs1 (assign
);
4142 gimple_stmt_iterator iter
= gsi_for_stmt (assign
);
4144 if (!flag_chkp_store_bounds
)
4147 chkp_walk_pointer_assignments (lhs
, rhs
, &iter
, chkp_copy_bounds_for_elem
);
4149 /* We should create edges for all created calls to bndldx and bndstx. */
4150 while (gsi_stmt (iter
) != assign
)
4152 gimple
*stmt
= gsi_stmt (iter
);
4153 if (gimple_code (stmt
) == GIMPLE_CALL
)
4155 tree fndecl
= gimple_call_fndecl (stmt
);
4156 struct cgraph_node
*callee
= cgraph_node::get_create (fndecl
);
4157 struct cgraph_edge
*new_edge
;
4159 gcc_assert (chkp_gimple_call_builtin_p (stmt
, BUILT_IN_CHKP_BNDSTX
)
4160 || chkp_gimple_call_builtin_p (stmt
, BUILT_IN_CHKP_BNDLDX
)
4161 || chkp_gimple_call_builtin_p (stmt
, BUILT_IN_CHKP_BNDRET
));
4163 new_edge
= edge
->caller
->create_edge (callee
,
4164 as_a
<gcall
*> (stmt
),
4167 new_edge
->frequency
= compute_call_stmt_bb_frequency
4168 (edge
->caller
->decl
, gimple_bb (stmt
));
4174 /* Some code transformation made during instrumentation pass
4175 may put code into inconsistent state. Here we find and fix
4181 gimple_stmt_iterator i
;
4183 /* We could insert some code right after stmt which ends bb.
4184 We wanted to put this code on fallthru edge but did not
4185 add new edges from the beginning because it may cause new
4186 phi node creation which may be incorrect due to incomplete
4188 FOR_ALL_BB_FN (bb
, cfun
)
4189 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
4191 gimple
*stmt
= gsi_stmt (i
);
4192 gimple_stmt_iterator next
= i
;
4196 if (stmt_ends_bb_p (stmt
)
4197 && !gsi_end_p (next
))
4199 edge fall
= find_fallthru_edge (bb
->succs
);
4200 basic_block dest
= NULL
;
4205 /* We cannot split abnormal edge. Therefore we
4206 store its params, make it regular and then
4207 rebuild abnormal edge after split. */
4208 if (fall
->flags
& EDGE_ABNORMAL
)
4210 flags
= fall
->flags
& ~EDGE_FALLTHRU
;
4213 fall
->flags
&= ~EDGE_COMPLEX
;
4216 while (!gsi_end_p (next
))
4218 gimple
*next_stmt
= gsi_stmt (next
);
4219 gsi_remove (&next
, false);
4220 gsi_insert_on_edge (fall
, next_stmt
);
4223 gsi_commit_edge_inserts ();
4225 /* Re-create abnormal edge. */
4227 make_edge (bb
, dest
, flags
);
4232 /* Walker callback for chkp_replace_function_pointers. Replaces
4233 function pointer in the specified operand with pointer to the
4234 instrumented function version. */
4236 chkp_replace_function_pointer (tree
*op
, int *walk_subtrees
,
4237 void *data ATTRIBUTE_UNUSED
)
4239 if (TREE_CODE (*op
) == FUNCTION_DECL
4240 && chkp_instrumentable_p (*op
)
4241 && (DECL_BUILT_IN_CLASS (*op
) == NOT_BUILT_IN
4242 /* For builtins we replace pointers only for selected
4243 function and functions having definitions. */
4244 || (DECL_BUILT_IN_CLASS (*op
) == BUILT_IN_NORMAL
4245 && (chkp_instrument_normal_builtin (*op
)
4246 || gimple_has_body_p (*op
)))))
4248 struct cgraph_node
*node
= cgraph_node::get_create (*op
);
4249 struct cgraph_node
*clone
= NULL
;
4251 if (!node
->instrumentation_clone
)
4252 clone
= chkp_maybe_create_clone (*op
);
4262 /* This function searches for function pointers in statement
4263 pointed by GSI and replaces them with pointers to instrumented
4264 function versions. */
4266 chkp_replace_function_pointers (gimple_stmt_iterator
*gsi
)
4268 gimple
*stmt
= gsi_stmt (*gsi
);
4269 /* For calls we want to walk call args only. */
4270 if (gimple_code (stmt
) == GIMPLE_CALL
)
4273 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
4274 walk_tree (gimple_call_arg_ptr (stmt
, i
),
4275 chkp_replace_function_pointer
, NULL
, NULL
);
4278 walk_gimple_stmt (gsi
, NULL
, chkp_replace_function_pointer
, NULL
);
4281 /* This function instruments all statements working with memory,
4284 It also removes excess statements from static initializers. */
4286 chkp_instrument_function (void)
4288 basic_block bb
, next
;
4289 gimple_stmt_iterator i
;
4290 enum gimple_rhs_class grhs_class
;
4291 bool safe
= lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun
->decl
));
4293 bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->next_bb
;
4297 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); )
4299 gimple
*s
= gsi_stmt (i
);
4301 /* Skip statement marked to not be instrumented. */
4302 if (chkp_marked_stmt_p (s
))
4308 chkp_replace_function_pointers (&i
);
4310 switch (gimple_code (s
))
4313 chkp_process_stmt (&i
, gimple_assign_lhs (s
),
4314 gimple_location (s
), integer_one_node
,
4315 NULL_TREE
, NULL_TREE
, safe
);
4316 chkp_process_stmt (&i
, gimple_assign_rhs1 (s
),
4317 gimple_location (s
), integer_zero_node
,
4318 NULL_TREE
, NULL_TREE
, safe
);
4319 grhs_class
= get_gimple_rhs_class (gimple_assign_rhs_code (s
));
4320 if (grhs_class
== GIMPLE_BINARY_RHS
)
4321 chkp_process_stmt (&i
, gimple_assign_rhs2 (s
),
4322 gimple_location (s
), integer_zero_node
,
4323 NULL_TREE
, NULL_TREE
, safe
);
4328 greturn
*r
= as_a
<greturn
*> (s
);
4329 if (gimple_return_retval (r
) != NULL_TREE
)
4331 chkp_process_stmt (&i
, gimple_return_retval (r
),
4332 gimple_location (r
),
4334 NULL_TREE
, NULL_TREE
, safe
);
4336 /* Additionally we need to add bounds
4337 to return statement. */
4338 chkp_add_bounds_to_ret_stmt (&i
);
4344 chkp_add_bounds_to_call_stmt (&i
);
4353 /* We do not need any actual pointer stores in checker
4354 static initializer. */
4355 if (lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun
->decl
))
4356 && gimple_code (s
) == GIMPLE_ASSIGN
4357 && gimple_store_p (s
))
4359 gimple_stmt_iterator del_iter
= gsi_for_stmt (s
);
4360 gsi_remove (&del_iter
, true);
4361 unlink_stmt_vdef (s
);
4369 /* Some input params may have bounds and be address taken. In this case
4370 we should store incoming bounds into bounds table. */
4372 if (flag_chkp_store_bounds
)
4373 for (arg
= DECL_ARGUMENTS (cfun
->decl
); arg
; arg
= DECL_CHAIN (arg
))
4374 if (TREE_ADDRESSABLE (arg
))
4376 if (BOUNDED_P (arg
))
4378 tree bounds
= chkp_get_next_bounds_parm (arg
);
4379 tree def_ptr
= ssa_default_def (cfun
, arg
);
4380 gimple_stmt_iterator iter
4381 = gsi_start_bb (chkp_get_entry_block ());
4382 chkp_build_bndstx (chkp_build_addr_expr (arg
),
4383 def_ptr
? def_ptr
: arg
,
4386 /* Skip bounds arg. */
4387 arg
= TREE_CHAIN (arg
);
4389 else if (chkp_type_has_pointer (TREE_TYPE (arg
)))
4391 tree orig_arg
= arg
;
4392 bitmap slots
= BITMAP_ALLOC (NULL
);
4393 gimple_stmt_iterator iter
4394 = gsi_start_bb (chkp_get_entry_block ());
4398 chkp_find_bound_slots (TREE_TYPE (arg
), slots
);
4400 EXECUTE_IF_SET_IN_BITMAP (slots
, 0, bnd_no
, bi
)
4402 tree bounds
= chkp_get_next_bounds_parm (arg
);
4403 HOST_WIDE_INT offs
= bnd_no
* POINTER_SIZE
/ BITS_PER_UNIT
;
4404 tree addr
= chkp_build_addr_expr (orig_arg
);
4405 tree ptr
= build2 (MEM_REF
, ptr_type_node
, addr
,
4406 build_int_cst (ptr_type_node
, offs
));
4407 chkp_build_bndstx (chkp_build_addr_expr (ptr
), ptr
,
4410 arg
= DECL_CHAIN (arg
);
4412 BITMAP_FREE (slots
);
4417 /* Find init/null/copy_ptr_bounds calls and replace them
4418 with assignments. It should allow better code
4422 chkp_remove_useless_builtins ()
4425 gimple_stmt_iterator gsi
;
4427 FOR_EACH_BB_FN (bb
, cfun
)
4429 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4431 gimple
*stmt
= gsi_stmt (gsi
);
4433 enum built_in_function fcode
;
4435 /* Find builtins returning first arg and replace
4436 them with assignments. */
4437 if (gimple_code (stmt
) == GIMPLE_CALL
4438 && (fndecl
= gimple_call_fndecl (stmt
))
4439 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
4440 && (fcode
= DECL_FUNCTION_CODE (fndecl
))
4441 && (fcode
== BUILT_IN_CHKP_INIT_PTR_BOUNDS
4442 || fcode
== BUILT_IN_CHKP_NULL_PTR_BOUNDS
4443 || fcode
== BUILT_IN_CHKP_COPY_PTR_BOUNDS
4444 || fcode
== BUILT_IN_CHKP_SET_PTR_BOUNDS
))
4446 tree res
= gimple_call_arg (stmt
, 0);
4447 update_call_from_tree (&gsi
, res
);
4448 stmt
= gsi_stmt (gsi
);
4455 /* Initialize pass. */
4460 gimple_stmt_iterator i
;
4462 in_chkp_pass
= true;
4464 for (bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->next_bb
; bb
; bb
= bb
->next_bb
)
4465 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
4466 chkp_unmark_stmt (gsi_stmt (i
));
4468 chkp_invalid_bounds
= new hash_set
<tree
>;
4469 chkp_completed_bounds_set
= new hash_set
<tree
>;
4470 delete chkp_reg_bounds
;
4471 chkp_reg_bounds
= new hash_map
<tree
, tree
>;
4472 delete chkp_bound_vars
;
4473 chkp_bound_vars
= new hash_map
<tree
, tree
>;
4474 chkp_reg_addr_bounds
= new hash_map
<tree
, tree
>;
4475 chkp_incomplete_bounds_map
= new hash_map
<tree
, tree
>;
4476 delete chkp_bounds_map
;
4477 chkp_bounds_map
= new hash_map
<tree
, tree
>;
4478 chkp_abnormal_copies
= BITMAP_GGC_ALLOC ();
4481 zero_bounds
= NULL_TREE
;
4482 none_bounds
= NULL_TREE
;
4483 incomplete_bounds
= integer_zero_node
;
4484 tmp_var
= NULL_TREE
;
4485 size_tmp_var
= NULL_TREE
;
4487 chkp_uintptr_type
= lang_hooks
.types
.type_for_mode (ptr_mode
, true);
4489 /* We create these constant bounds once for each object file.
4490 These symbols go to comdat section and result in single copy
4491 of each one in the final binary. */
4492 chkp_get_zero_bounds_var ();
4493 chkp_get_none_bounds_var ();
4495 calculate_dominance_info (CDI_DOMINATORS
);
4496 calculate_dominance_info (CDI_POST_DOMINATORS
);
4498 bitmap_obstack_initialize (NULL
);
4501 /* Finalize instrumentation pass. */
4505 in_chkp_pass
= false;
4507 delete chkp_invalid_bounds
;
4508 delete chkp_completed_bounds_set
;
4509 delete chkp_reg_addr_bounds
;
4510 delete chkp_incomplete_bounds_map
;
4512 free_dominance_info (CDI_DOMINATORS
);
4513 free_dominance_info (CDI_POST_DOMINATORS
);
4515 bitmap_obstack_release (NULL
);
4518 zero_bounds
= NULL_TREE
;
4519 none_bounds
= NULL_TREE
;
4522 /* Main instrumentation pass function. */
4528 chkp_instrument_function ();
4530 chkp_remove_useless_builtins ();
4532 chkp_function_mark_instrumented (cfun
->decl
);
4541 /* Instrumentation pass gate. */
4545 cgraph_node
*node
= cgraph_node::get (cfun
->decl
);
4546 return ((node
!= NULL
4547 && node
->instrumentation_clone
)
4548 || lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun
->decl
)));
4553 const pass_data pass_data_chkp
=
4555 GIMPLE_PASS
, /* type */
4557 OPTGROUP_NONE
, /* optinfo_flags */
4558 TV_NONE
, /* tv_id */
4559 PROP_ssa
| PROP_cfg
, /* properties_required */
4560 0, /* properties_provided */
4561 0, /* properties_destroyed */
4562 0, /* todo_flags_start */
4564 | TODO_update_ssa
/* todo_flags_finish */
4567 class pass_chkp
: public gimple_opt_pass
4570 pass_chkp (gcc::context
*ctxt
)
4571 : gimple_opt_pass (pass_data_chkp
, ctxt
)
4574 /* opt_pass methods: */
4575 virtual opt_pass
* clone ()
4577 return new pass_chkp (m_ctxt
);
4580 virtual bool gate (function
*)
4582 return chkp_gate ();
4585 virtual unsigned int execute (function
*)
4587 return chkp_execute ();
4590 }; // class pass_chkp
4595 make_pass_chkp (gcc::context
*ctxt
)
4597 return new pass_chkp (ctxt
);
4600 #include "gt-tree-chkp.h"