1 /* Pointer Bounds Checker insrumentation pass.
2 Copyright (C) 2014-2015 Free Software Foundation, Inc.
3 Contributed by Ilya Enkovich (ilya.enkovich@intel.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
32 #include "fold-const.h"
33 #include "stor-layout.h"
36 #include "tree-iterator.h"
38 #include "langhooks.h"
39 #include "tree-pass.h"
40 #include "diagnostic.h"
42 #include "tree-ssa-address.h"
44 #include "tree-ssa-loop-niter.h"
45 #include "gimple-pretty-print.h"
46 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "print-tree.h"
51 #include "insn-config.h"
59 #include "tree-ssa-propagate.h"
60 #include "gimple-fold.h"
61 #include "tree-chkp.h"
62 #include "gimple-walk.h"
68 /* Pointer Bounds Checker instruments code with memory checks to find
69 out-of-bounds memory accesses. Checks are performed by computing
70 bounds for each pointer and then comparing address of accessed
71 memory before pointer dereferencing.
79 There are few things to instrument:
81 a) Memory accesses - add checker calls to check address of accessed memory
82 against bounds of dereferenced pointer. Obviously safe memory
83 accesses like static variable access does not have to be instrumented
90 with 4 bytes access is transformed into:
92 __builtin___chkp_bndcl (__bound_tmp.1_3, p_1);
94 __builtin___chkp_bndcu (__bound_tmp.1_3, D.1_4);
97 where __bound_tmp.1_3 are bounds computed for pointer p_1,
98 __builtin___chkp_bndcl is a lower bound check and
99 __builtin___chkp_bndcu is an upper bound check.
103 When pointer is stored in memory we need to store its bounds. To
104 achieve compatibility of instrumented code with regular codes
105 we have to keep data layout and store bounds in special bound tables
106 via special checker call. Implementation of bounds table may vary for
107 different platforms. It has to associate pointer value and its
108 location (it is required because we may have two equal pointers
109 with different bounds stored in different places) with bounds.
110 Another checker builtin allows to get bounds for specified pointer
111 loaded from specified location.
121 __builtin___chkp_bndstx (D.1_2, &buf2, __bound_tmp.1_2);
123 where __bound_tmp.1_2 are bounds of &buf2.
125 c) Static initialization.
127 The special case of pointer store is static pointer initialization.
128 Bounds initialization is performed in a few steps:
129 - register all static initializations in front-end using
130 chkp_register_var_initializer
131 - when file compilation finishes we create functions with special
132 attribute 'chkp ctor' and put explicit initialization code
133 (assignments) for all statically initialized pointers.
134 - when checker constructor is compiled checker pass adds required
135 bounds initialization for all statically initialized pointers
136 - since we do not actually need excess pointers initialization
137 in checker constructor we remove such assignments from them
141 For each call in the code we add additional arguments to pass
142 bounds for pointer arguments. We determine type of call arguments
143 using arguments list from function declaration; if function
144 declaration is not available we use function type; otherwise
145 (e.g. for unnamed arguments) we use type of passed value. Function
146 declaration/type is replaced with the instrumented one.
150 val_1 = foo (&buf1, &buf2, &buf1, 0);
154 val_1 = foo.chkp (&buf1, __bound_tmp.1_2, &buf2, __bound_tmp.1_3,
155 &buf1, __bound_tmp.1_2, 0);
159 If function returns a pointer value we have to return bounds also.
160 A new operand was added for return statement to hold returned bounds.
168 return &_buf1, __bound_tmp.1_1;
170 3. Bounds computation.
172 Compiler is fully responsible for computing bounds to be used for each
173 memory access. The first step for bounds computation is to find the
174 origin of pointer dereferenced for memory access. Basing on pointer
175 origin we define a way to compute its bounds. There are just few
178 a) Pointer is returned by call.
180 In this case we use corresponding checker builtin method to obtain returned
185 buf_1 = malloc (size_2);
190 buf_1 = malloc (size_2);
191 __bound_tmp.1_3 = __builtin___chkp_bndret (buf_1);
192 foo (buf_1, __bound_tmp.1_3);
194 b) Pointer is an address of an object.
196 In this case compiler tries to compute objects size and create corresponding
197 bounds. If object has incomplete type then special checker builtin is used to
198 obtain its size at runtime.
204 <unnamed type> __bound_tmp.3;
208 __bound_tmp.3_2 = __builtin___chkp_bndmk (&buf, 400);
211 return &buf, __bound_tmp.3_2;
216 Address of an object 'extern int buf[]' with incomplete type is
221 <unnamed type> __bound_tmp.4;
222 long unsigned int __size_tmp.3;
225 __size_tmp.3_4 = __builtin_ia32_sizeof (buf);
226 __bound_tmp.4_3 = __builtin_ia32_bndmk (&buf, __size_tmp.3_4);
229 return &buf, __bound_tmp.4_3;
232 c) Pointer is the result of object narrowing.
234 It happens when we use pointer to an object to compute pointer to a part
235 of an object. E.g. we take pointer to a field of a structure. In this
236 case we perform bounds intersection using bounds of original object and
237 bounds of object's part (which are computed basing on its type).
239 There may be some debatable questions about when narrowing should occur
240 and when it should not. To avoid false bound violations in correct
241 programs we do not perform narrowing when address of an array element is
242 obtained (it has address of the whole array) and when address of the first
243 structure field is obtained (because it is guaranteed to be equal to
244 address of the whole structure and it is legal to cast it back to structure).
246 Default narrowing behavior may be changed using compiler flags.
250 In this example address of the second structure field is returned.
252 foo (struct A * p, __bounds_type __bounds_of_p)
254 <unnamed type> __bound_tmp.3;
259 _5 = &p_1(D)->second_field;
260 __bound_tmp.3_6 = __builtin___chkp_bndmk (_5, 4);
261 __bound_tmp.3_8 = __builtin___chkp_intersect (__bound_tmp.3_6,
263 _2 = &p_1(D)->second_field;
264 return _2, __bound_tmp.3_8;
269 In this example address of the first field of array element is returned.
271 foo (struct A * p, __bounds_type __bounds_of_p, int i)
273 long unsigned int _3;
274 long unsigned int _4;
279 _3 = (long unsigned int) i_1(D);
282 _7 = &_6->first_field;
283 return _7, __bounds_of_p_2(D);
287 d) Pointer is the result of pointer arithmetic or type cast.
289 In this case bounds of the base pointer are used. In case of binary
290 operation producing a pointer we are analyzing data flow further
291 looking for operand's bounds. One operand is considered as a base
292 if it has some valid bounds. If we fall into a case when none of
293 operands (or both of them) has valid bounds, a default bounds value
296 Trying to find out bounds for binary operations we may fall into
297 cyclic dependencies for pointers. To avoid infinite recursion all
298 walked phi nodes instantly obtain corresponding bounds but created
299 bounds are marked as incomplete. It helps us to stop DF walk during
302 When we reach pointer source, some args of incomplete bounds phi obtain
303 valid bounds and those values are propagated further through phi nodes.
304 If no valid bounds were found for phi node then we mark its result as
305 invalid bounds. Process stops when all incomplete bounds become either
306 valid or invalid and we are able to choose a pointer base.
308 e) Pointer is loaded from the memory.
310 In this case we just need to load bounds from the bounds table.
316 <unnamed type> __bound_tmp.3;
322 __bound_tmp.3_4 = __builtin___chkp_bndldx (&buf, _2);
323 return _2, __bound_tmp.3_4;
328 typedef void (*assign_handler
)(tree
, tree
, void *);
330 static tree
chkp_get_zero_bounds ();
331 static tree
chkp_find_bounds (tree ptr
, gimple_stmt_iterator
*iter
);
332 static tree
chkp_find_bounds_loaded (tree ptr
, tree ptr_src
,
333 gimple_stmt_iterator
*iter
);
334 static void chkp_parse_array_and_component_ref (tree node
, tree
*ptr
,
335 tree
*elt
, bool *safe
,
338 gimple_stmt_iterator
*iter
,
339 bool innermost_bounds
);
341 #define chkp_bndldx_fndecl \
342 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDLDX))
343 #define chkp_bndstx_fndecl \
344 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDSTX))
345 #define chkp_checkl_fndecl \
346 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCL))
347 #define chkp_checku_fndecl \
348 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCU))
349 #define chkp_bndmk_fndecl \
350 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDMK))
351 #define chkp_ret_bnd_fndecl \
352 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDRET))
353 #define chkp_intersect_fndecl \
354 (targetm.builtin_chkp_function (BUILT_IN_CHKP_INTERSECT))
355 #define chkp_narrow_bounds_fndecl \
356 (targetm.builtin_chkp_function (BUILT_IN_CHKP_NARROW))
357 #define chkp_sizeof_fndecl \
358 (targetm.builtin_chkp_function (BUILT_IN_CHKP_SIZEOF))
359 #define chkp_extract_lower_fndecl \
360 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_LOWER))
361 #define chkp_extract_upper_fndecl \
362 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_UPPER))
364 static GTY (()) tree chkp_uintptr_type
;
366 static GTY (()) tree chkp_zero_bounds_var
;
367 static GTY (()) tree chkp_none_bounds_var
;
369 static GTY (()) basic_block entry_block
;
370 static GTY (()) tree zero_bounds
;
371 static GTY (()) tree none_bounds
;
372 static GTY (()) tree incomplete_bounds
;
373 static GTY (()) tree tmp_var
;
374 static GTY (()) tree size_tmp_var
;
375 static GTY (()) bitmap chkp_abnormal_copies
;
377 struct hash_set
<tree
> *chkp_invalid_bounds
;
378 struct hash_set
<tree
> *chkp_completed_bounds_set
;
379 struct hash_map
<tree
, tree
> *chkp_reg_bounds
;
380 struct hash_map
<tree
, tree
> *chkp_bound_vars
;
381 struct hash_map
<tree
, tree
> *chkp_reg_addr_bounds
;
382 struct hash_map
<tree
, tree
> *chkp_incomplete_bounds_map
;
383 struct hash_map
<tree
, tree
> *chkp_bounds_map
;
384 struct hash_map
<tree
, tree
> *chkp_static_var_bounds
;
386 static bool in_chkp_pass
;
388 #define CHKP_BOUND_TMP_NAME "__bound_tmp"
389 #define CHKP_SIZE_TMP_NAME "__size_tmp"
390 #define CHKP_BOUNDS_OF_SYMBOL_PREFIX "__chkp_bounds_of_"
391 #define CHKP_STRING_BOUNDS_PREFIX "__chkp_string_bounds_"
392 #define CHKP_VAR_BOUNDS_PREFIX "__chkp_var_bounds_"
393 #define CHKP_ZERO_BOUNDS_VAR_NAME "__chkp_zero_bounds"
394 #define CHKP_NONE_BOUNDS_VAR_NAME "__chkp_none_bounds"
396 /* Static checker constructors may become very large and their
397 compilation with optimization may take too much time.
398 Therefore we put a limit to number of statements in one
399 constructor. Tests with 100 000 statically initialized
400 pointers showed following compilation times on Sandy Bridge
402 limit 100 => ~18 sec.
403 limit 300 => ~22 sec.
404 limit 1000 => ~30 sec.
405 limit 3000 => ~49 sec.
406 limit 5000 => ~55 sec.
407 limit 10000 => ~76 sec.
408 limit 100000 => ~532 sec. */
409 #define MAX_STMTS_IN_STATIC_CHKP_CTOR (PARAM_VALUE (PARAM_CHKP_MAX_CTOR_SIZE))
411 struct chkp_ctor_stmt_list
417 /* Return 1 if function FNDECL is instrumented by Pointer
420 chkp_function_instrumented_p (tree fndecl
)
423 && lookup_attribute ("chkp instrumented", DECL_ATTRIBUTES (fndecl
));
426 /* Mark function FNDECL as instrumented. */
428 chkp_function_mark_instrumented (tree fndecl
)
430 if (chkp_function_instrumented_p (fndecl
))
433 DECL_ATTRIBUTES (fndecl
)
434 = tree_cons (get_identifier ("chkp instrumented"), NULL
,
435 DECL_ATTRIBUTES (fndecl
));
438 /* Return true when STMT is builtin call to instrumentation function
439 corresponding to CODE. */
442 chkp_gimple_call_builtin_p (gimple call
,
443 enum built_in_function code
)
446 if (is_gimple_call (call
)
447 && (fndecl
= targetm
.builtin_chkp_function (code
))
448 && gimple_call_fndecl (call
) == fndecl
)
453 /* Emit code to build zero bounds and return RTL holding
456 chkp_expand_zero_bounds ()
460 if (flag_chkp_use_static_const_bounds
)
461 zero_bnd
= chkp_get_zero_bounds_var ();
463 zero_bnd
= chkp_build_make_bounds_call (integer_zero_node
,
465 return expand_normal (zero_bnd
);
468 /* Emit code to store zero bounds for PTR located at MEM. */
470 chkp_expand_bounds_reset_for_mem (tree mem
, tree ptr
)
472 tree zero_bnd
, bnd
, addr
, bndstx
;
474 if (flag_chkp_use_static_const_bounds
)
475 zero_bnd
= chkp_get_zero_bounds_var ();
477 zero_bnd
= chkp_build_make_bounds_call (integer_zero_node
,
479 bnd
= make_tree (pointer_bounds_type_node
,
480 assign_temp (pointer_bounds_type_node
, 0, 1));
481 addr
= build1 (ADDR_EXPR
,
482 build_pointer_type (TREE_TYPE (mem
)), mem
);
483 bndstx
= chkp_build_bndstx_call (addr
, ptr
, bnd
);
485 expand_assignment (bnd
, zero_bnd
, false);
486 expand_normal (bndstx
);
489 /* Build retbnd call for returned value RETVAL.
491 If BNDVAL is not NULL then result is stored
492 in it. Otherwise a temporary is created to
495 GSI points to a position for a retbnd call
496 and is set to created stmt.
498 Cgraph edge is created for a new call if
501 Obtained bounds are returned. */
503 chkp_insert_retbnd_call (tree bndval
, tree retval
,
504 gimple_stmt_iterator
*gsi
)
509 bndval
= create_tmp_reg (pointer_bounds_type_node
, "retbnd");
511 call
= gimple_build_call (chkp_ret_bnd_fndecl
, 1, retval
);
512 gimple_call_set_lhs (call
, bndval
);
513 gsi_insert_after (gsi
, call
, GSI_CONTINUE_LINKING
);
518 /* Build a GIMPLE_CALL identical to CALL but skipping bounds
522 chkp_copy_call_skip_bounds (gcall
*call
)
527 bitmap_obstack_initialize (NULL
);
528 bounds
= BITMAP_ALLOC (NULL
);
530 for (i
= 0; i
< gimple_call_num_args (call
); i
++)
531 if (POINTER_BOUNDS_P (gimple_call_arg (call
, i
)))
532 bitmap_set_bit (bounds
, i
);
534 if (!bitmap_empty_p (bounds
))
535 call
= gimple_call_copy_skip_args (call
, bounds
);
536 gimple_call_set_with_bounds (call
, false);
538 BITMAP_FREE (bounds
);
539 bitmap_obstack_release (NULL
);
544 /* Redirect edge E to the correct node according to call_stmt.
545 Return 1 if bounds removal from call_stmt should be done
546 instead of redirection. */
549 chkp_redirect_edge (cgraph_edge
*e
)
551 bool instrumented
= false;
552 tree decl
= e
->callee
->decl
;
554 if (e
->callee
->instrumentation_clone
555 || chkp_function_instrumented_p (decl
))
559 && !gimple_call_with_bounds_p (e
->call_stmt
))
560 e
->redirect_callee (cgraph_node::get_create (e
->callee
->orig_decl
));
561 else if (!instrumented
562 && gimple_call_with_bounds_p (e
->call_stmt
)
563 && !chkp_gimple_call_builtin_p (e
->call_stmt
, BUILT_IN_CHKP_BNDCL
)
564 && !chkp_gimple_call_builtin_p (e
->call_stmt
, BUILT_IN_CHKP_BNDCU
)
565 && !chkp_gimple_call_builtin_p (e
->call_stmt
, BUILT_IN_CHKP_BNDSTX
))
567 if (e
->callee
->instrumented_version
)
568 e
->redirect_callee (e
->callee
->instrumented_version
);
571 tree args
= TYPE_ARG_TYPES (TREE_TYPE (decl
));
572 /* Avoid bounds removal if all args will be removed. */
573 if (!args
|| TREE_VALUE (args
) != void_type_node
)
576 gimple_call_set_with_bounds (e
->call_stmt
, false);
583 /* Mark statement S to not be instrumented. */
585 chkp_mark_stmt (gimple s
)
587 gimple_set_plf (s
, GF_PLF_1
, true);
590 /* Mark statement S to be instrumented. */
592 chkp_unmark_stmt (gimple s
)
594 gimple_set_plf (s
, GF_PLF_1
, false);
597 /* Return 1 if statement S should not be instrumented. */
599 chkp_marked_stmt_p (gimple s
)
601 return gimple_plf (s
, GF_PLF_1
);
604 /* Get var to be used for bound temps. */
606 chkp_get_tmp_var (void)
609 tmp_var
= create_tmp_reg (pointer_bounds_type_node
, CHKP_BOUND_TMP_NAME
);
614 /* Get SSA_NAME to be used as temp. */
616 chkp_get_tmp_reg (gimple stmt
)
619 return make_ssa_name (chkp_get_tmp_var (), stmt
);
621 return make_temp_ssa_name (pointer_bounds_type_node
, stmt
,
622 CHKP_BOUND_TMP_NAME
);
625 /* Get var to be used for size temps. */
627 chkp_get_size_tmp_var (void)
630 size_tmp_var
= create_tmp_reg (chkp_uintptr_type
, CHKP_SIZE_TMP_NAME
);
635 /* Register bounds BND for address of OBJ. */
637 chkp_register_addr_bounds (tree obj
, tree bnd
)
639 if (bnd
== incomplete_bounds
)
642 chkp_reg_addr_bounds
->put (obj
, bnd
);
644 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
646 fprintf (dump_file
, "Regsitered bound ");
647 print_generic_expr (dump_file
, bnd
, 0);
648 fprintf (dump_file
, " for address of ");
649 print_generic_expr (dump_file
, obj
, 0);
650 fprintf (dump_file
, "\n");
654 /* Return bounds registered for address of OBJ. */
656 chkp_get_registered_addr_bounds (tree obj
)
658 tree
*slot
= chkp_reg_addr_bounds
->get (obj
);
659 return slot
? *slot
: NULL_TREE
;
662 /* Mark BOUNDS as completed. */
664 chkp_mark_completed_bounds (tree bounds
)
666 chkp_completed_bounds_set
->add (bounds
);
668 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
670 fprintf (dump_file
, "Marked bounds ");
671 print_generic_expr (dump_file
, bounds
, 0);
672 fprintf (dump_file
, " as completed\n");
676 /* Return 1 if BOUNDS were marked as completed and 0 otherwise. */
678 chkp_completed_bounds (tree bounds
)
680 return chkp_completed_bounds_set
->contains (bounds
);
683 /* Clear comleted bound marks. */
685 chkp_erase_completed_bounds (void)
687 delete chkp_completed_bounds_set
;
688 chkp_completed_bounds_set
= new hash_set
<tree
>;
691 /* Mark BOUNDS associated with PTR as incomplete. */
693 chkp_register_incomplete_bounds (tree bounds
, tree ptr
)
695 chkp_incomplete_bounds_map
->put (bounds
, ptr
);
697 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
699 fprintf (dump_file
, "Regsitered incomplete bounds ");
700 print_generic_expr (dump_file
, bounds
, 0);
701 fprintf (dump_file
, " for ");
702 print_generic_expr (dump_file
, ptr
, 0);
703 fprintf (dump_file
, "\n");
707 /* Return 1 if BOUNDS are incomplete and 0 otherwise. */
709 chkp_incomplete_bounds (tree bounds
)
711 if (bounds
== incomplete_bounds
)
714 if (chkp_completed_bounds (bounds
))
717 return chkp_incomplete_bounds_map
->get (bounds
) != NULL
;
720 /* Clear incomleted bound marks. */
722 chkp_erase_incomplete_bounds (void)
724 delete chkp_incomplete_bounds_map
;
725 chkp_incomplete_bounds_map
= new hash_map
<tree
, tree
>;
728 /* Build and return bndmk call which creates bounds for structure
729 pointed by PTR. Structure should have complete type. */
731 chkp_make_bounds_for_struct_addr (tree ptr
)
733 tree type
= TREE_TYPE (ptr
);
736 gcc_assert (POINTER_TYPE_P (type
));
738 size
= TYPE_SIZE (TREE_TYPE (type
));
742 return build_call_nary (pointer_bounds_type_node
,
743 build_fold_addr_expr (chkp_bndmk_fndecl
),
747 /* Traversal function for chkp_may_finish_incomplete_bounds.
748 Set RES to 0 if at least one argument of phi statement
749 defining bounds (passed in KEY arg) is unknown.
750 Traversal stops when first unknown phi argument is found. */
752 chkp_may_complete_phi_bounds (tree
const &bounds
, tree
*slot ATTRIBUTE_UNUSED
,
758 gcc_assert (TREE_CODE (bounds
) == SSA_NAME
);
760 phi
= SSA_NAME_DEF_STMT (bounds
);
762 gcc_assert (phi
&& gimple_code (phi
) == GIMPLE_PHI
);
764 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
766 tree phi_arg
= gimple_phi_arg_def (phi
, i
);
770 /* Do not need to traverse further. */
778 /* Return 1 if all phi nodes created for bounds have their
779 arguments computed. */
781 chkp_may_finish_incomplete_bounds (void)
785 chkp_incomplete_bounds_map
786 ->traverse
<bool *, chkp_may_complete_phi_bounds
> (&res
);
791 /* Helper function for chkp_finish_incomplete_bounds.
792 Recompute args for bounds phi node. */
794 chkp_recompute_phi_bounds (tree
const &bounds
, tree
*slot
,
795 void *res ATTRIBUTE_UNUSED
)
802 gcc_assert (TREE_CODE (bounds
) == SSA_NAME
);
803 gcc_assert (TREE_CODE (ptr
) == SSA_NAME
);
805 bounds_phi
= as_a
<gphi
*> (SSA_NAME_DEF_STMT (bounds
));
806 ptr_phi
= as_a
<gphi
*> (SSA_NAME_DEF_STMT (ptr
));
808 for (i
= 0; i
< gimple_phi_num_args (bounds_phi
); i
++)
810 tree ptr_arg
= gimple_phi_arg_def (ptr_phi
, i
);
811 tree bound_arg
= chkp_find_bounds (ptr_arg
, NULL
);
813 add_phi_arg (bounds_phi
, bound_arg
,
814 gimple_phi_arg_edge (ptr_phi
, i
),
821 /* Mark BOUNDS as invalid. */
823 chkp_mark_invalid_bounds (tree bounds
)
825 chkp_invalid_bounds
->add (bounds
);
827 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
829 fprintf (dump_file
, "Marked bounds ");
830 print_generic_expr (dump_file
, bounds
, 0);
831 fprintf (dump_file
, " as invalid\n");
835 /* Return 1 if BOUNDS were marked as invalid and 0 otherwise. */
837 chkp_valid_bounds (tree bounds
)
839 if (bounds
== zero_bounds
|| bounds
== none_bounds
)
842 return !chkp_invalid_bounds
->contains (bounds
);
845 /* Helper function for chkp_finish_incomplete_bounds.
846 Check all arguments of phi nodes trying to find
847 valid completed bounds. If there is at least one
848 such arg then bounds produced by phi node are marked
849 as valid completed bounds and all phi args are
852 chkp_find_valid_phi_bounds (tree
const &bounds
, tree
*slot
, bool *res
)
857 gcc_assert (TREE_CODE (bounds
) == SSA_NAME
);
859 if (chkp_completed_bounds (bounds
))
862 phi
= SSA_NAME_DEF_STMT (bounds
);
864 gcc_assert (phi
&& gimple_code (phi
) == GIMPLE_PHI
);
866 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
868 tree phi_arg
= gimple_phi_arg_def (phi
, i
);
870 gcc_assert (phi_arg
);
872 if (chkp_valid_bounds (phi_arg
) && !chkp_incomplete_bounds (phi_arg
))
875 chkp_mark_completed_bounds (bounds
);
876 chkp_recompute_phi_bounds (bounds
, slot
, NULL
);
884 /* Helper function for chkp_finish_incomplete_bounds.
885 Marks all incompleted bounds as invalid. */
887 chkp_mark_invalid_bounds_walker (tree
const &bounds
,
888 tree
*slot ATTRIBUTE_UNUSED
,
889 void *res ATTRIBUTE_UNUSED
)
891 if (!chkp_completed_bounds (bounds
))
893 chkp_mark_invalid_bounds (bounds
);
894 chkp_mark_completed_bounds (bounds
);
899 /* When all bound phi nodes have all their args computed
900 we have enough info to find valid bounds. We iterate
901 through all incompleted bounds searching for valid
902 bounds. Found valid bounds are marked as completed
903 and all remaining incompleted bounds are recomputed.
904 Process continues until no new valid bounds may be
905 found. All remained incompleted bounds are marked as
906 invalid (i.e. have no valid source of bounds). */
908 chkp_finish_incomplete_bounds (void)
916 chkp_incomplete_bounds_map
->
917 traverse
<bool *, chkp_find_valid_phi_bounds
> (&found_valid
);
920 chkp_incomplete_bounds_map
->
921 traverse
<void *, chkp_recompute_phi_bounds
> (NULL
);
924 chkp_incomplete_bounds_map
->
925 traverse
<void *, chkp_mark_invalid_bounds_walker
> (NULL
);
926 chkp_incomplete_bounds_map
->
927 traverse
<void *, chkp_recompute_phi_bounds
> (NULL
);
929 chkp_erase_completed_bounds ();
930 chkp_erase_incomplete_bounds ();
933 /* Return 1 if type TYPE is a pointer type or a
934 structure having a pointer type as one of its fields.
935 Otherwise return 0. */
937 chkp_type_has_pointer (const_tree type
)
941 if (BOUNDED_TYPE_P (type
))
943 else if (RECORD_OR_UNION_TYPE_P (type
))
947 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
948 if (TREE_CODE (field
) == FIELD_DECL
)
949 res
= res
|| chkp_type_has_pointer (TREE_TYPE (field
));
951 else if (TREE_CODE (type
) == ARRAY_TYPE
)
952 res
= chkp_type_has_pointer (TREE_TYPE (type
));
958 chkp_type_bounds_count (const_tree type
)
964 else if (BOUNDED_TYPE_P (type
))
966 else if (RECORD_OR_UNION_TYPE_P (type
))
970 bitmap_obstack_initialize (NULL
);
971 have_bound
= BITMAP_ALLOC (NULL
);
972 chkp_find_bound_slots (type
, have_bound
);
973 res
= bitmap_count_bits (have_bound
);
974 BITMAP_FREE (have_bound
);
975 bitmap_obstack_release (NULL
);
981 /* Get bounds associated with NODE via
982 chkp_set_bounds call. */
984 chkp_get_bounds (tree node
)
988 if (!chkp_bounds_map
)
991 slot
= chkp_bounds_map
->get (node
);
992 return slot
? *slot
: NULL_TREE
;
995 /* Associate bounds VAL with NODE. */
997 chkp_set_bounds (tree node
, tree val
)
999 if (!chkp_bounds_map
)
1000 chkp_bounds_map
= new hash_map
<tree
, tree
>;
1002 chkp_bounds_map
->put (node
, val
);
1005 /* Check if statically initialized variable VAR require
1006 static bounds initialization. If VAR is added into
1007 bounds initlization list then 1 is returned. Otherwise
1010 chkp_register_var_initializer (tree var
)
1012 if (!flag_check_pointer_bounds
1013 || DECL_INITIAL (var
) == error_mark_node
)
1016 gcc_assert (TREE_CODE (var
) == VAR_DECL
);
1017 gcc_assert (DECL_INITIAL (var
));
1019 if (TREE_STATIC (var
)
1020 && chkp_type_has_pointer (TREE_TYPE (var
)))
1022 varpool_node::get_create (var
)->need_bounds_init
= 1;
1029 /* Helper function for chkp_finish_file.
1031 Add new modification statement (RHS is assigned to LHS)
1032 into list of static initializer statementes (passed in ARG).
1033 If statements list becomes too big, emit checker constructor
1034 and start the new one. */
1036 chkp_add_modification_to_stmt_list (tree lhs
,
1040 struct chkp_ctor_stmt_list
*stmts
= (struct chkp_ctor_stmt_list
*)arg
;
1043 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (rhs
)))
1044 rhs
= build1 (CONVERT_EXPR
, TREE_TYPE (lhs
), rhs
);
1046 modify
= build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, rhs
);
1047 append_to_statement_list (modify
, &stmts
->stmts
);
1052 /* Build and return ADDR_EXPR for specified object OBJ. */
1054 chkp_build_addr_expr (tree obj
)
1056 return TREE_CODE (obj
) == TARGET_MEM_REF
1057 ? tree_mem_ref_addr (ptr_type_node
, obj
)
1058 : build_fold_addr_expr (obj
);
1061 /* Helper function for chkp_finish_file.
1062 Initialize bound variable BND_VAR with bounds of variable
1063 VAR to statements list STMTS. If statements list becomes
1064 too big, emit checker constructor and start the new one. */
1066 chkp_output_static_bounds (tree bnd_var
, tree var
,
1067 struct chkp_ctor_stmt_list
*stmts
)
1071 if (TREE_CODE (var
) == STRING_CST
)
1073 lb
= build1 (CONVERT_EXPR
, size_type_node
, chkp_build_addr_expr (var
));
1074 size
= build_int_cst (size_type_node
, TREE_STRING_LENGTH (var
) - 1);
1076 else if (DECL_SIZE (var
)
1077 && !chkp_variable_size_type (TREE_TYPE (var
)))
1079 /* Compute bounds using statically known size. */
1080 lb
= build1 (CONVERT_EXPR
, size_type_node
, chkp_build_addr_expr (var
));
1081 size
= size_binop (MINUS_EXPR
, DECL_SIZE_UNIT (var
), size_one_node
);
1085 /* Compute bounds using dynamic size. */
1088 lb
= build1 (CONVERT_EXPR
, size_type_node
, chkp_build_addr_expr (var
));
1089 call
= build1 (ADDR_EXPR
,
1090 build_pointer_type (TREE_TYPE (chkp_sizeof_fndecl
)),
1091 chkp_sizeof_fndecl
);
1092 size
= build_call_nary (TREE_TYPE (TREE_TYPE (chkp_sizeof_fndecl
)),
1095 if (flag_chkp_zero_dynamic_size_as_infinite
)
1097 tree max_size
, cond
;
1099 max_size
= build2 (MINUS_EXPR
, size_type_node
, size_zero_node
, lb
);
1100 cond
= build2 (NE_EXPR
, boolean_type_node
, size
, size_zero_node
);
1101 size
= build3 (COND_EXPR
, size_type_node
, cond
, size
, max_size
);
1104 size
= size_binop (MINUS_EXPR
, size
, size_one_node
);
1107 ub
= size_binop (PLUS_EXPR
, lb
, size
);
1108 stmts
->avail
-= targetm
.chkp_initialize_bounds (bnd_var
, lb
, ub
,
1110 if (stmts
->avail
<= 0)
1112 cgraph_build_static_cdtor ('B', stmts
->stmts
,
1113 MAX_RESERVED_INIT_PRIORITY
+ 2);
1114 stmts
->avail
= MAX_STMTS_IN_STATIC_CHKP_CTOR
;
1115 stmts
->stmts
= NULL
;
1119 /* Return entry block to be used for checker initilization code.
1120 Create new block if required. */
1122 chkp_get_entry_block (void)
1126 = split_block_after_labels (ENTRY_BLOCK_PTR_FOR_FN (cfun
))->dest
;
1131 /* Return a bounds var to be used for pointer var PTR_VAR. */
1133 chkp_get_bounds_var (tree ptr_var
)
1138 slot
= chkp_bound_vars
->get (ptr_var
);
1143 bnd_var
= create_tmp_reg (pointer_bounds_type_node
,
1144 CHKP_BOUND_TMP_NAME
);
1145 chkp_bound_vars
->put (ptr_var
, bnd_var
);
1151 /* If BND is an abnormal bounds copy, return a copied value.
1152 Otherwise return BND. */
1154 chkp_get_orginal_bounds_for_abnormal_copy (tree bnd
)
1156 if (bitmap_bit_p (chkp_abnormal_copies
, SSA_NAME_VERSION (bnd
)))
1158 gimple bnd_def
= SSA_NAME_DEF_STMT (bnd
);
1159 gcc_checking_assert (gimple_code (bnd_def
) == GIMPLE_ASSIGN
);
1160 bnd
= gimple_assign_rhs1 (bnd_def
);
1166 /* Register bounds BND for object PTR in global bounds table.
1167 A copy of bounds may be created for abnormal ssa names.
1168 Returns bounds to use for PTR. */
1170 chkp_maybe_copy_and_register_bounds (tree ptr
, tree bnd
)
1174 if (!chkp_reg_bounds
)
1177 /* Do nothing if bounds are incomplete_bounds
1178 because it means bounds will be recomputed. */
1179 if (bnd
== incomplete_bounds
)
1182 abnormal_ptr
= (TREE_CODE (ptr
) == SSA_NAME
1183 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr
)
1184 && gimple_code (SSA_NAME_DEF_STMT (ptr
)) != GIMPLE_PHI
);
1186 /* A single bounds value may be reused multiple times for
1187 different pointer values. It may cause coalescing issues
1188 for abnormal SSA names. To avoid it we create a bounds
1189 copy in case it is computed for abnormal SSA name.
1191 We also cannot reuse such created copies for other pointers */
1193 || bitmap_bit_p (chkp_abnormal_copies
, SSA_NAME_VERSION (bnd
)))
1195 tree bnd_var
= NULL_TREE
;
1199 if (SSA_NAME_VAR (ptr
))
1200 bnd_var
= chkp_get_bounds_var (SSA_NAME_VAR (ptr
));
1203 bnd_var
= chkp_get_tmp_var ();
1205 /* For abnormal copies we may just find original
1206 bounds and use them. */
1207 if (!abnormal_ptr
&& !SSA_NAME_IS_DEFAULT_DEF (bnd
))
1208 bnd
= chkp_get_orginal_bounds_for_abnormal_copy (bnd
);
1209 /* For undefined values we usually use none bounds
1210 value but in case of abnormal edge it may cause
1211 coalescing failures. Use default definition of
1212 bounds variable instead to avoid it. */
1213 else if (SSA_NAME_IS_DEFAULT_DEF (ptr
)
1214 && TREE_CODE (SSA_NAME_VAR (ptr
)) != PARM_DECL
)
1216 bnd
= get_or_create_ssa_default_def (cfun
, bnd_var
);
1218 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1220 fprintf (dump_file
, "Using default def bounds ");
1221 print_generic_expr (dump_file
, bnd
, 0);
1222 fprintf (dump_file
, " for abnormal default def SSA name ");
1223 print_generic_expr (dump_file
, ptr
, 0);
1224 fprintf (dump_file
, "\n");
1230 gimple def
= SSA_NAME_DEF_STMT (ptr
);
1232 gimple_stmt_iterator gsi
;
1235 copy
= make_ssa_name (bnd_var
);
1237 copy
= make_temp_ssa_name (pointer_bounds_type_node
,
1239 CHKP_BOUND_TMP_NAME
);
1240 bnd
= chkp_get_orginal_bounds_for_abnormal_copy (bnd
);
1241 assign
= gimple_build_assign (copy
, bnd
);
1243 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1245 fprintf (dump_file
, "Creating a copy of bounds ");
1246 print_generic_expr (dump_file
, bnd
, 0);
1247 fprintf (dump_file
, " for abnormal SSA name ");
1248 print_generic_expr (dump_file
, ptr
, 0);
1249 fprintf (dump_file
, "\n");
1252 if (gimple_code (def
) == GIMPLE_NOP
)
1254 gsi
= gsi_last_bb (chkp_get_entry_block ());
1255 if (!gsi_end_p (gsi
) && is_ctrl_stmt (gsi_stmt (gsi
)))
1256 gsi_insert_before (&gsi
, assign
, GSI_CONTINUE_LINKING
);
1258 gsi_insert_after (&gsi
, assign
, GSI_CONTINUE_LINKING
);
1262 gimple bnd_def
= SSA_NAME_DEF_STMT (bnd
);
1263 /* Sometimes (e.g. when we load a pointer from a
1264 memory) bounds are produced later than a pointer.
1265 We need to insert bounds copy appropriately. */
1266 if (gimple_code (bnd_def
) != GIMPLE_NOP
1267 && stmt_dominates_stmt_p (def
, bnd_def
))
1268 gsi
= gsi_for_stmt (bnd_def
);
1270 gsi
= gsi_for_stmt (def
);
1271 gsi_insert_after (&gsi
, assign
, GSI_CONTINUE_LINKING
);
1278 bitmap_set_bit (chkp_abnormal_copies
, SSA_NAME_VERSION (bnd
));
1281 chkp_reg_bounds
->put (ptr
, bnd
);
1283 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1285 fprintf (dump_file
, "Regsitered bound ");
1286 print_generic_expr (dump_file
, bnd
, 0);
1287 fprintf (dump_file
, " for pointer ");
1288 print_generic_expr (dump_file
, ptr
, 0);
1289 fprintf (dump_file
, "\n");
1295 /* Get bounds registered for object PTR in global bounds table. */
1297 chkp_get_registered_bounds (tree ptr
)
1301 if (!chkp_reg_bounds
)
1304 slot
= chkp_reg_bounds
->get (ptr
);
1305 return slot
? *slot
: NULL_TREE
;
1308 /* Add bound retvals to return statement pointed by GSI. */
1311 chkp_add_bounds_to_ret_stmt (gimple_stmt_iterator
*gsi
)
1313 greturn
*ret
= as_a
<greturn
*> (gsi_stmt (*gsi
));
1314 tree retval
= gimple_return_retval (ret
);
1315 tree ret_decl
= DECL_RESULT (cfun
->decl
);
1321 if (BOUNDED_P (ret_decl
))
1323 bounds
= chkp_find_bounds (retval
, gsi
);
1324 bounds
= chkp_maybe_copy_and_register_bounds (ret_decl
, bounds
);
1325 gimple_return_set_retbnd (ret
, bounds
);
1331 /* Force OP to be suitable for using as an argument for call.
1332 New statements (if any) go to SEQ. */
1334 chkp_force_gimple_call_op (tree op
, gimple_seq
*seq
)
1337 gimple_stmt_iterator si
;
1339 op
= force_gimple_operand (unshare_expr (op
), &stmts
, true, NULL_TREE
);
1341 for (si
= gsi_start (stmts
); !gsi_end_p (si
); gsi_next (&si
))
1342 chkp_mark_stmt (gsi_stmt (si
));
1344 gimple_seq_add_seq (seq
, stmts
);
1349 /* Generate lower bound check for memory access by ADDR.
1350 Check is inserted before the position pointed by ITER.
1351 DIRFLAG indicates whether memory access is load or store. */
1353 chkp_check_lower (tree addr
, tree bounds
,
1354 gimple_stmt_iterator iter
,
1355 location_t location
,
1362 if (!chkp_function_instrumented_p (current_function_decl
)
1363 && bounds
== chkp_get_zero_bounds ())
1366 if (dirflag
== integer_zero_node
1367 && !flag_chkp_check_read
)
1370 if (dirflag
== integer_one_node
1371 && !flag_chkp_check_write
)
1376 node
= chkp_force_gimple_call_op (addr
, &seq
);
1378 check
= gimple_build_call (chkp_checkl_fndecl
, 2, node
, bounds
);
1379 chkp_mark_stmt (check
);
1380 gimple_call_set_with_bounds (check
, true);
1381 gimple_set_location (check
, location
);
1382 gimple_seq_add_stmt (&seq
, check
);
1384 gsi_insert_seq_before (&iter
, seq
, GSI_SAME_STMT
);
1386 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1388 gimple before
= gsi_stmt (iter
);
1389 fprintf (dump_file
, "Generated lower bound check for statement ");
1390 print_gimple_stmt (dump_file
, before
, 0, TDF_VOPS
|TDF_MEMSYMS
);
1391 fprintf (dump_file
, " ");
1392 print_gimple_stmt (dump_file
, check
, 0, TDF_VOPS
|TDF_MEMSYMS
);
1396 /* Generate upper bound check for memory access by ADDR.
1397 Check is inserted before the position pointed by ITER.
1398 DIRFLAG indicates whether memory access is load or store. */
1400 chkp_check_upper (tree addr
, tree bounds
,
1401 gimple_stmt_iterator iter
,
1402 location_t location
,
1409 if (!chkp_function_instrumented_p (current_function_decl
)
1410 && bounds
== chkp_get_zero_bounds ())
1413 if (dirflag
== integer_zero_node
1414 && !flag_chkp_check_read
)
1417 if (dirflag
== integer_one_node
1418 && !flag_chkp_check_write
)
1423 node
= chkp_force_gimple_call_op (addr
, &seq
);
1425 check
= gimple_build_call (chkp_checku_fndecl
, 2, node
, bounds
);
1426 chkp_mark_stmt (check
);
1427 gimple_call_set_with_bounds (check
, true);
1428 gimple_set_location (check
, location
);
1429 gimple_seq_add_stmt (&seq
, check
);
1431 gsi_insert_seq_before (&iter
, seq
, GSI_SAME_STMT
);
1433 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1435 gimple before
= gsi_stmt (iter
);
1436 fprintf (dump_file
, "Generated upper bound check for statement ");
1437 print_gimple_stmt (dump_file
, before
, 0, TDF_VOPS
|TDF_MEMSYMS
);
1438 fprintf (dump_file
, " ");
1439 print_gimple_stmt (dump_file
, check
, 0, TDF_VOPS
|TDF_MEMSYMS
);
1443 /* Generate lower and upper bound checks for memory access
1444 to memory slot [FIRST, LAST] againsr BOUNDS. Checks
1445 are inserted before the position pointed by ITER.
1446 DIRFLAG indicates whether memory access is load or store. */
1448 chkp_check_mem_access (tree first
, tree last
, tree bounds
,
1449 gimple_stmt_iterator iter
,
1450 location_t location
,
1453 chkp_check_lower (first
, bounds
, iter
, location
, dirflag
);
1454 chkp_check_upper (last
, bounds
, iter
, location
, dirflag
);
1457 /* Replace call to _bnd_chk_* pointed by GSI with
1458 bndcu and bndcl calls. DIRFLAG determines whether
1459 check is for read or write. */
1462 chkp_replace_address_check_builtin (gimple_stmt_iterator
*gsi
,
1465 gimple_stmt_iterator call_iter
= *gsi
;
1466 gimple call
= gsi_stmt (*gsi
);
1467 tree fndecl
= gimple_call_fndecl (call
);
1468 tree addr
= gimple_call_arg (call
, 0);
1469 tree bounds
= chkp_find_bounds (addr
, gsi
);
1471 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1472 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS
)
1473 chkp_check_lower (addr
, bounds
, *gsi
, gimple_location (call
), dirflag
);
1475 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
)
1476 chkp_check_upper (addr
, bounds
, *gsi
, gimple_location (call
), dirflag
);
1478 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS
)
1480 tree size
= gimple_call_arg (call
, 1);
1481 addr
= fold_build_pointer_plus (addr
, size
);
1482 addr
= fold_build_pointer_plus_hwi (addr
, -1);
1483 chkp_check_upper (addr
, bounds
, *gsi
, gimple_location (call
), dirflag
);
1486 gsi_remove (&call_iter
, true);
1489 /* Replace call to _bnd_get_ptr_* pointed by GSI with
1490 corresponding bounds extract call. */
1493 chkp_replace_extract_builtin (gimple_stmt_iterator
*gsi
)
1495 gimple call
= gsi_stmt (*gsi
);
1496 tree fndecl
= gimple_call_fndecl (call
);
1497 tree addr
= gimple_call_arg (call
, 0);
1498 tree bounds
= chkp_find_bounds (addr
, gsi
);
1501 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_GET_PTR_LBOUND
)
1502 fndecl
= chkp_extract_lower_fndecl
;
1503 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_GET_PTR_UBOUND
)
1504 fndecl
= chkp_extract_upper_fndecl
;
1508 extract
= gimple_build_call (fndecl
, 1, bounds
);
1509 gimple_call_set_lhs (extract
, gimple_call_lhs (call
));
1510 chkp_mark_stmt (extract
);
1512 gsi_replace (gsi
, extract
, false);
1515 /* Return COMPONENT_REF accessing FIELD in OBJ. */
1517 chkp_build_component_ref (tree obj
, tree field
)
1521 /* If object is TMR then we do not use component_ref but
1522 add offset instead. We need it to be able to get addr
1523 of the reasult later. */
1524 if (TREE_CODE (obj
) == TARGET_MEM_REF
)
1526 tree offs
= TMR_OFFSET (obj
);
1527 offs
= fold_binary_to_constant (PLUS_EXPR
, TREE_TYPE (offs
),
1528 offs
, DECL_FIELD_OFFSET (field
));
1532 res
= copy_node (obj
);
1533 TREE_TYPE (res
) = TREE_TYPE (field
);
1534 TMR_OFFSET (res
) = offs
;
1537 res
= build3 (COMPONENT_REF
, TREE_TYPE (field
), obj
, field
, NULL_TREE
);
1542 /* Return ARRAY_REF for array ARR and index IDX with
1543 specified element type ETYPE and element size ESIZE. */
1545 chkp_build_array_ref (tree arr
, tree etype
, tree esize
,
1546 unsigned HOST_WIDE_INT idx
)
1548 tree index
= build_int_cst (size_type_node
, idx
);
1551 /* If object is TMR then we do not use array_ref but
1552 add offset instead. We need it to be able to get addr
1553 of the reasult later. */
1554 if (TREE_CODE (arr
) == TARGET_MEM_REF
)
1556 tree offs
= TMR_OFFSET (arr
);
1558 esize
= fold_binary_to_constant (MULT_EXPR
, TREE_TYPE (esize
),
1562 offs
= fold_binary_to_constant (PLUS_EXPR
, TREE_TYPE (offs
),
1566 res
= copy_node (arr
);
1567 TREE_TYPE (res
) = etype
;
1568 TMR_OFFSET (res
) = offs
;
1571 res
= build4 (ARRAY_REF
, etype
, arr
, index
, NULL_TREE
, NULL_TREE
);
1576 /* Helper function for chkp_add_bounds_to_call_stmt.
1577 Fill ALL_BOUNDS output array with created bounds.
1579 OFFS is used for recursive calls and holds basic
1580 offset of TYPE in outer structure in bits.
1582 ITER points a position where bounds are searched.
1584 ALL_BOUNDS[i] is filled with elem bounds if there
1585 is a field in TYPE which has pointer type and offset
1586 equal to i * POINTER_SIZE in bits. */
1588 chkp_find_bounds_for_elem (tree elem
, tree
*all_bounds
,
1590 gimple_stmt_iterator
*iter
)
1592 tree type
= TREE_TYPE (elem
);
1594 if (BOUNDED_TYPE_P (type
))
1596 if (!all_bounds
[offs
/ POINTER_SIZE
])
1598 tree temp
= make_temp_ssa_name (type
, NULL
, "");
1599 gimple assign
= gimple_build_assign (temp
, elem
);
1600 gimple_stmt_iterator gsi
;
1602 gsi_insert_before (iter
, assign
, GSI_SAME_STMT
);
1603 gsi
= gsi_for_stmt (assign
);
1605 all_bounds
[offs
/ POINTER_SIZE
] = chkp_find_bounds (temp
, &gsi
);
1608 else if (RECORD_OR_UNION_TYPE_P (type
))
1612 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
1613 if (TREE_CODE (field
) == FIELD_DECL
)
1615 tree base
= unshare_expr (elem
);
1616 tree field_ref
= chkp_build_component_ref (base
, field
);
1617 HOST_WIDE_INT field_offs
1618 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field
));
1619 if (DECL_FIELD_OFFSET (field
))
1620 field_offs
+= TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field
)) * 8;
1622 chkp_find_bounds_for_elem (field_ref
, all_bounds
,
1623 offs
+ field_offs
, iter
);
1626 else if (TREE_CODE (type
) == ARRAY_TYPE
)
1628 tree maxval
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
1629 tree etype
= TREE_TYPE (type
);
1630 HOST_WIDE_INT esize
= TREE_INT_CST_LOW (TYPE_SIZE (etype
));
1631 unsigned HOST_WIDE_INT cur
;
1633 if (!maxval
|| integer_minus_onep (maxval
))
1636 for (cur
= 0; cur
<= TREE_INT_CST_LOW (maxval
); cur
++)
1638 tree base
= unshare_expr (elem
);
1639 tree arr_elem
= chkp_build_array_ref (base
, etype
,
1642 chkp_find_bounds_for_elem (arr_elem
, all_bounds
, offs
+ cur
* esize
,
1648 /* Fill HAVE_BOUND output bitmap with information about
1649 bounds requred for object of type TYPE.
1651 OFFS is used for recursive calls and holds basic
1652 offset of TYPE in outer structure in bits.
1654 HAVE_BOUND[i] is set to 1 if there is a field
1655 in TYPE which has pointer type and offset
1656 equal to i * POINTER_SIZE - OFFS in bits. */
1658 chkp_find_bound_slots_1 (const_tree type
, bitmap have_bound
,
1661 if (BOUNDED_TYPE_P (type
))
1662 bitmap_set_bit (have_bound
, offs
/ POINTER_SIZE
);
1663 else if (RECORD_OR_UNION_TYPE_P (type
))
1667 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
1668 if (TREE_CODE (field
) == FIELD_DECL
)
1670 HOST_WIDE_INT field_offs
1671 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field
));
1672 if (DECL_FIELD_OFFSET (field
))
1673 field_offs
+= TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field
)) * 8;
1674 chkp_find_bound_slots_1 (TREE_TYPE (field
), have_bound
,
1678 else if (TREE_CODE (type
) == ARRAY_TYPE
)
1680 tree maxval
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
1681 tree etype
= TREE_TYPE (type
);
1682 HOST_WIDE_INT esize
= TREE_INT_CST_LOW (TYPE_SIZE (etype
));
1683 unsigned HOST_WIDE_INT cur
;
1686 || TREE_CODE (maxval
) != INTEGER_CST
1687 || integer_minus_onep (maxval
))
1690 for (cur
= 0; cur
<= TREE_INT_CST_LOW (maxval
); cur
++)
1691 chkp_find_bound_slots_1 (etype
, have_bound
, offs
+ cur
* esize
);
1695 /* Fill bitmap RES with information about bounds for
1696 type TYPE. See chkp_find_bound_slots_1 for more
1699 chkp_find_bound_slots (const_tree type
, bitmap res
)
1702 chkp_find_bound_slots_1 (type
, res
, 0);
1705 /* Return 1 if call to FNDECL should be instrumented
1709 chkp_instrument_normal_builtin (tree fndecl
)
1711 switch (DECL_FUNCTION_CODE (fndecl
))
1713 case BUILT_IN_STRLEN
:
1714 case BUILT_IN_STRCPY
:
1715 case BUILT_IN_STRNCPY
:
1716 case BUILT_IN_STPCPY
:
1717 case BUILT_IN_STPNCPY
:
1718 case BUILT_IN_STRCAT
:
1719 case BUILT_IN_STRNCAT
:
1720 case BUILT_IN_MEMCPY
:
1721 case BUILT_IN_MEMPCPY
:
1722 case BUILT_IN_MEMSET
:
1723 case BUILT_IN_MEMMOVE
:
1724 case BUILT_IN_BZERO
:
1725 case BUILT_IN_STRCMP
:
1726 case BUILT_IN_STRNCMP
:
1728 case BUILT_IN_MEMCMP
:
1729 case BUILT_IN_MEMCPY_CHK
:
1730 case BUILT_IN_MEMPCPY_CHK
:
1731 case BUILT_IN_MEMMOVE_CHK
:
1732 case BUILT_IN_MEMSET_CHK
:
1733 case BUILT_IN_STRCPY_CHK
:
1734 case BUILT_IN_STRNCPY_CHK
:
1735 case BUILT_IN_STPCPY_CHK
:
1736 case BUILT_IN_STPNCPY_CHK
:
1737 case BUILT_IN_STRCAT_CHK
:
1738 case BUILT_IN_STRNCAT_CHK
:
1739 case BUILT_IN_MALLOC
:
1740 case BUILT_IN_CALLOC
:
1741 case BUILT_IN_REALLOC
:
1749 /* Add bound arguments to call statement pointed by GSI.
1750 Also performs a replacement of user checker builtins calls
1751 with internal ones. */
1754 chkp_add_bounds_to_call_stmt (gimple_stmt_iterator
*gsi
)
1756 gcall
*call
= as_a
<gcall
*> (gsi_stmt (*gsi
));
1757 unsigned arg_no
= 0;
1758 tree fndecl
= gimple_call_fndecl (call
);
1760 tree first_formal_arg
;
1762 bool use_fntype
= false;
1767 /* Do nothing for internal functions. */
1768 if (gimple_call_internal_p (call
))
1771 fntype
= TREE_TYPE (TREE_TYPE (gimple_call_fn (call
)));
1773 /* Do nothing if back-end builtin is called. */
1774 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
1777 /* Do nothing for some middle-end builtins. */
1778 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
1779 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_OBJECT_SIZE
)
1782 /* Do nothing for calls to not instrumentable functions. */
1783 if (fndecl
&& !chkp_instrumentable_p (fndecl
))
1786 /* Ignore CHKP_INIT_PTR_BOUNDS, CHKP_NULL_PTR_BOUNDS
1787 and CHKP_COPY_PTR_BOUNDS. */
1788 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
1789 && (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_INIT_PTR_BOUNDS
1790 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_NULL_PTR_BOUNDS
1791 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_COPY_PTR_BOUNDS
1792 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_SET_PTR_BOUNDS
))
1795 /* Check user builtins are replaced with checks. */
1796 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
1797 && (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1798 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
1799 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS
))
1801 chkp_replace_address_check_builtin (gsi
, integer_minus_one_node
);
1805 /* Check user builtins are replaced with bound extract. */
1806 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
1807 && (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_GET_PTR_LBOUND
1808 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_GET_PTR_UBOUND
))
1810 chkp_replace_extract_builtin (gsi
);
1814 /* BUILT_IN_CHKP_NARROW_PTR_BOUNDS call is replaced with
1815 target narrow bounds call. */
1816 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
1817 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
)
1819 tree arg
= gimple_call_arg (call
, 1);
1820 tree bounds
= chkp_find_bounds (arg
, gsi
);
1822 gimple_call_set_fndecl (call
, chkp_narrow_bounds_fndecl
);
1823 gimple_call_set_arg (call
, 1, bounds
);
1829 /* BUILT_IN_CHKP_STORE_PTR_BOUNDS call is replaced with
1831 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
1832 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_STORE_PTR_BOUNDS
)
1834 tree addr
= gimple_call_arg (call
, 0);
1835 tree ptr
= gimple_call_arg (call
, 1);
1836 tree bounds
= chkp_find_bounds (ptr
, gsi
);
1837 gimple_stmt_iterator iter
= gsi_for_stmt (call
);
1839 chkp_build_bndstx (addr
, ptr
, bounds
, gsi
);
1840 gsi_remove (&iter
, true);
1845 if (!flag_chkp_instrument_calls
)
1848 /* We instrument only some subset of builtins. We also instrument
1849 builtin calls to be inlined. */
1851 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
1852 && !chkp_instrument_normal_builtin (fndecl
))
1854 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl
)))
1857 struct cgraph_node
*clone
= chkp_maybe_create_clone (fndecl
);
1859 || !gimple_has_body_p (clone
->decl
))
1863 /* If function decl is available then use it for
1864 formal arguments list. Otherwise use function type. */
1865 if (fndecl
&& DECL_ARGUMENTS (fndecl
))
1866 first_formal_arg
= DECL_ARGUMENTS (fndecl
);
1869 first_formal_arg
= TYPE_ARG_TYPES (fntype
);
1873 /* Fill vector of new call args. */
1874 vec
<tree
> new_args
= vNULL
;
1875 new_args
.create (gimple_call_num_args (call
));
1876 arg
= first_formal_arg
;
1877 for (arg_no
= 0; arg_no
< gimple_call_num_args (call
); arg_no
++)
1879 tree call_arg
= gimple_call_arg (call
, arg_no
);
1882 /* Get arg type using formal argument description
1883 or actual argument type. */
1886 if (TREE_VALUE (arg
) != void_type_node
)
1888 type
= TREE_VALUE (arg
);
1889 arg
= TREE_CHAIN (arg
);
1892 type
= TREE_TYPE (call_arg
);
1895 type
= TREE_TYPE (arg
);
1896 arg
= TREE_CHAIN (arg
);
1899 type
= TREE_TYPE (call_arg
);
1901 new_args
.safe_push (call_arg
);
1903 if (BOUNDED_TYPE_P (type
)
1904 || pass_by_reference (NULL
, TYPE_MODE (type
), type
, true))
1905 new_args
.safe_push (chkp_find_bounds (call_arg
, gsi
));
1906 else if (chkp_type_has_pointer (type
))
1908 HOST_WIDE_INT max_bounds
1909 = TREE_INT_CST_LOW (TYPE_SIZE (type
)) / POINTER_SIZE
;
1910 tree
*all_bounds
= (tree
*)xmalloc (sizeof (tree
) * max_bounds
);
1911 HOST_WIDE_INT bnd_no
;
1913 memset (all_bounds
, 0, sizeof (tree
) * max_bounds
);
1915 chkp_find_bounds_for_elem (call_arg
, all_bounds
, 0, gsi
);
1917 for (bnd_no
= 0; bnd_no
< max_bounds
; bnd_no
++)
1918 if (all_bounds
[bnd_no
])
1919 new_args
.safe_push (all_bounds
[bnd_no
]);
1925 if (new_args
.length () == gimple_call_num_args (call
))
1929 new_call
= gimple_build_call_vec (gimple_op (call
, 1), new_args
);
1930 gimple_call_set_lhs (new_call
, gimple_call_lhs (call
));
1931 gimple_call_copy_flags (new_call
, call
);
1932 gimple_call_set_chain (new_call
, gimple_call_chain (call
));
1934 new_args
.release ();
1936 /* For direct calls fndecl is replaced with instrumented version. */
1939 tree new_decl
= chkp_maybe_create_clone (fndecl
)->decl
;
1940 gimple_call_set_fndecl (new_call
, new_decl
);
1941 gimple_call_set_fntype (new_call
, TREE_TYPE (new_decl
));
1943 /* For indirect call we should fix function pointer type if
1944 pass some bounds. */
1945 else if (new_call
!= call
)
1947 tree type
= gimple_call_fntype (call
);
1948 type
= chkp_copy_function_type_adding_bounds (type
);
1949 gimple_call_set_fntype (new_call
, type
);
1952 /* replace old call statement with the new one. */
1953 if (call
!= new_call
)
1955 FOR_EACH_SSA_TREE_OPERAND (op
, call
, iter
, SSA_OP_ALL_DEFS
)
1957 SSA_NAME_DEF_STMT (op
) = new_call
;
1959 gsi_replace (gsi
, new_call
, true);
1962 update_stmt (new_call
);
1964 gimple_call_set_with_bounds (new_call
, true);
1967 /* Return constant static bounds var with specified bounds LB and UB.
1968 If such var does not exists then new var is created with specified NAME. */
1970 chkp_make_static_const_bounds (HOST_WIDE_INT lb
,
1974 tree id
= get_identifier (name
);
1979 var
= build_decl (UNKNOWN_LOCATION
, VAR_DECL
, id
,
1980 pointer_bounds_type_node
);
1981 TREE_STATIC (var
) = 1;
1982 TREE_PUBLIC (var
) = 1;
1984 /* With LTO we may have constant bounds already in varpool.
1986 if ((snode
= symtab_node::get_for_asmname (DECL_ASSEMBLER_NAME (var
))))
1988 /* We don't allow this symbol usage for non bounds. */
1989 if (snode
->type
!= SYMTAB_VARIABLE
1990 || !POINTER_BOUNDS_P (snode
->decl
))
1991 sorry ("-fcheck-pointer-bounds requires '%s' "
1992 "name for internal usage",
1993 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (var
)));
1998 TREE_USED (var
) = 1;
1999 TREE_READONLY (var
) = 1;
2000 TREE_ADDRESSABLE (var
) = 0;
2001 DECL_ARTIFICIAL (var
) = 1;
2002 DECL_READ_P (var
) = 1;
2003 DECL_INITIAL (var
) = targetm
.chkp_make_bounds_constant (lb
, ub
);
2004 make_decl_one_only (var
, DECL_ASSEMBLER_NAME (var
));
2005 /* We may use this symbol during ctors generation in chkp_finish_file
2006 when all symbols are emitted. Force output to avoid undefined
2007 symbols in ctors. */
2008 node
= varpool_node::get_create (var
);
2009 node
->force_output
= 1;
2011 varpool_node::finalize_decl (var
);
2016 /* Generate code to make bounds with specified lower bound LB and SIZE.
2017 if AFTER is 1 then code is inserted after position pointed by ITER
2018 otherwise code is inserted before position pointed by ITER.
2019 If ITER is NULL then code is added to entry block. */
2021 chkp_make_bounds (tree lb
, tree size
, gimple_stmt_iterator
*iter
, bool after
)
2024 gimple_stmt_iterator gsi
;
2031 gsi
= gsi_start_bb (chkp_get_entry_block ());
2035 lb
= chkp_force_gimple_call_op (lb
, &seq
);
2036 size
= chkp_force_gimple_call_op (size
, &seq
);
2038 stmt
= gimple_build_call (chkp_bndmk_fndecl
, 2, lb
, size
);
2039 chkp_mark_stmt (stmt
);
2041 bounds
= chkp_get_tmp_reg (stmt
);
2042 gimple_call_set_lhs (stmt
, bounds
);
2044 gimple_seq_add_stmt (&seq
, stmt
);
2047 gsi_insert_seq_after (&gsi
, seq
, GSI_SAME_STMT
);
2049 gsi_insert_seq_before (&gsi
, seq
, GSI_SAME_STMT
);
2051 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2053 fprintf (dump_file
, "Made bounds: ");
2054 print_gimple_stmt (dump_file
, stmt
, 0, TDF_VOPS
|TDF_MEMSYMS
);
2057 fprintf (dump_file
, " inserted before statement: ");
2058 print_gimple_stmt (dump_file
, gsi_stmt (*iter
), 0, TDF_VOPS
|TDF_MEMSYMS
);
2061 fprintf (dump_file
, " at function entry\n");
2064 /* update_stmt (stmt); */
2069 /* Return var holding zero bounds. */
2071 chkp_get_zero_bounds_var (void)
2073 if (!chkp_zero_bounds_var
)
2074 chkp_zero_bounds_var
2075 = chkp_make_static_const_bounds (0, -1,
2076 CHKP_ZERO_BOUNDS_VAR_NAME
);
2077 return chkp_zero_bounds_var
;
2080 /* Return var holding none bounds. */
2082 chkp_get_none_bounds_var (void)
2084 if (!chkp_none_bounds_var
)
2085 chkp_none_bounds_var
2086 = chkp_make_static_const_bounds (-1, 0,
2087 CHKP_NONE_BOUNDS_VAR_NAME
);
2088 return chkp_none_bounds_var
;
2091 /* Return SSA_NAME used to represent zero bounds. */
2093 chkp_get_zero_bounds (void)
2098 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2099 fprintf (dump_file
, "Creating zero bounds...");
2101 if ((flag_chkp_use_static_bounds
&& flag_chkp_use_static_const_bounds
)
2102 || flag_chkp_use_static_const_bounds
> 0)
2104 gimple_stmt_iterator gsi
= gsi_start_bb (chkp_get_entry_block ());
2107 zero_bounds
= chkp_get_tmp_reg (NULL
);
2108 stmt
= gimple_build_assign (zero_bounds
, chkp_get_zero_bounds_var ());
2109 gsi_insert_before (&gsi
, stmt
, GSI_SAME_STMT
);
2112 zero_bounds
= chkp_make_bounds (integer_zero_node
,
2120 /* Return SSA_NAME used to represent none bounds. */
2122 chkp_get_none_bounds (void)
2127 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2128 fprintf (dump_file
, "Creating none bounds...");
2131 if ((flag_chkp_use_static_bounds
&& flag_chkp_use_static_const_bounds
)
2132 || flag_chkp_use_static_const_bounds
> 0)
2134 gimple_stmt_iterator gsi
= gsi_start_bb (chkp_get_entry_block ());
2137 none_bounds
= chkp_get_tmp_reg (NULL
);
2138 stmt
= gimple_build_assign (none_bounds
, chkp_get_none_bounds_var ());
2139 gsi_insert_before (&gsi
, stmt
, GSI_SAME_STMT
);
2142 none_bounds
= chkp_make_bounds (integer_minus_one_node
,
2143 build_int_cst (size_type_node
, 2),
2150 /* Return bounds to be used as a result of operation which
2151 should not create poiunter (e.g. MULT_EXPR). */
2153 chkp_get_invalid_op_bounds (void)
2155 return chkp_get_zero_bounds ();
2158 /* Return bounds to be used for loads of non-pointer values. */
2160 chkp_get_nonpointer_load_bounds (void)
2162 return chkp_get_zero_bounds ();
2165 /* Return 1 if may use bndret call to get bounds for pointer
2166 returned by CALL. */
2168 chkp_call_returns_bounds_p (gcall
*call
)
2170 if (gimple_call_internal_p (call
))
2173 if (gimple_call_builtin_p (call
, BUILT_IN_CHKP_NARROW_PTR_BOUNDS
)
2174 || chkp_gimple_call_builtin_p (call
, BUILT_IN_CHKP_NARROW
))
2177 if (gimple_call_with_bounds_p (call
))
2180 tree fndecl
= gimple_call_fndecl (call
);
2182 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
2185 if (fndecl
&& !chkp_instrumentable_p (fndecl
))
2188 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
2190 if (chkp_instrument_normal_builtin (fndecl
))
2193 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl
)))
2196 struct cgraph_node
*clone
= chkp_maybe_create_clone (fndecl
);
2197 return (clone
&& gimple_has_body_p (clone
->decl
));
2203 /* Build bounds returned by CALL. */
2205 chkp_build_returned_bound (gcall
*call
)
2207 gimple_stmt_iterator gsi
;
2210 tree fndecl
= gimple_call_fndecl (call
);
2211 unsigned int retflags
;
2213 /* To avoid fixing alloca expands in targets we handle
2216 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
2217 && (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_ALLOCA
2218 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_ALLOCA_WITH_ALIGN
))
2220 tree size
= gimple_call_arg (call
, 0);
2221 tree lb
= gimple_call_lhs (call
);
2222 gimple_stmt_iterator iter
= gsi_for_stmt (call
);
2223 bounds
= chkp_make_bounds (lb
, size
, &iter
, true);
2225 /* We know bounds returned by set_bounds builtin call. */
2227 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
2228 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_SET_PTR_BOUNDS
)
2230 tree lb
= gimple_call_arg (call
, 0);
2231 tree size
= gimple_call_arg (call
, 1);
2232 gimple_stmt_iterator iter
= gsi_for_stmt (call
);
2233 bounds
= chkp_make_bounds (lb
, size
, &iter
, true);
2235 /* Detect bounds initialization calls. */
2237 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
2238 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_INIT_PTR_BOUNDS
)
2239 bounds
= chkp_get_zero_bounds ();
2240 /* Detect bounds nullification calls. */
2242 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
2243 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_NULL_PTR_BOUNDS
)
2244 bounds
= chkp_get_none_bounds ();
2245 /* Detect bounds copy calls. */
2247 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
2248 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_COPY_PTR_BOUNDS
)
2250 gimple_stmt_iterator iter
= gsi_for_stmt (call
);
2251 bounds
= chkp_find_bounds (gimple_call_arg (call
, 1), &iter
);
2253 /* Do not use retbnd when returned bounds are equal to some
2254 of passed bounds. */
2255 else if (((retflags
= gimple_call_return_flags (call
)) & ERF_RETURNS_ARG
)
2256 && (retflags
& ERF_RETURN_ARG_MASK
) < gimple_call_num_args (call
))
2258 gimple_stmt_iterator iter
= gsi_for_stmt (call
);
2259 unsigned int retarg
= retflags
& ERF_RETURN_ARG_MASK
, argno
;
2260 if (gimple_call_with_bounds_p (call
))
2262 for (argno
= 0; argno
< gimple_call_num_args (call
); argno
++)
2263 if (!POINTER_BOUNDS_P (gimple_call_arg (call
, argno
)))
2274 bounds
= chkp_find_bounds (gimple_call_arg (call
, argno
), &iter
);
2276 else if (chkp_call_returns_bounds_p (call
))
2278 gcc_assert (TREE_CODE (gimple_call_lhs (call
)) == SSA_NAME
);
2280 /* In general case build checker builtin call to
2281 obtain returned bounds. */
2282 stmt
= gimple_build_call (chkp_ret_bnd_fndecl
, 1,
2283 gimple_call_lhs (call
));
2284 chkp_mark_stmt (stmt
);
2286 gsi
= gsi_for_stmt (call
);
2287 gsi_insert_after (&gsi
, stmt
, GSI_SAME_STMT
);
2289 bounds
= chkp_get_tmp_reg (stmt
);
2290 gimple_call_set_lhs (stmt
, bounds
);
2295 bounds
= chkp_get_zero_bounds ();
2297 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2299 fprintf (dump_file
, "Built returned bounds (");
2300 print_generic_expr (dump_file
, bounds
, 0);
2301 fprintf (dump_file
, ") for call: ");
2302 print_gimple_stmt (dump_file
, call
, 0, TDF_VOPS
|TDF_MEMSYMS
);
2305 bounds
= chkp_maybe_copy_and_register_bounds (gimple_call_lhs (call
), bounds
);
2310 /* Return bounds used as returned by call
2311 which produced SSA name VAL. */
2313 chkp_retbnd_call_by_val (tree val
)
2315 if (TREE_CODE (val
) != SSA_NAME
)
2318 gcc_assert (gimple_code (SSA_NAME_DEF_STMT (val
)) == GIMPLE_CALL
);
2320 imm_use_iterator use_iter
;
2321 use_operand_p use_p
;
2322 FOR_EACH_IMM_USE_FAST (use_p
, use_iter
, val
)
2323 if (gimple_code (USE_STMT (use_p
)) == GIMPLE_CALL
2324 && gimple_call_fndecl (USE_STMT (use_p
)) == chkp_ret_bnd_fndecl
)
2325 return as_a
<gcall
*> (USE_STMT (use_p
));
2330 /* Check the next parameter for the given PARM is bounds
2331 and return it's default SSA_NAME (create if required). */
2333 chkp_get_next_bounds_parm (tree parm
)
2335 tree bounds
= TREE_CHAIN (parm
);
2336 gcc_assert (POINTER_BOUNDS_P (bounds
));
2337 bounds
= ssa_default_def (cfun
, bounds
);
2340 bounds
= make_ssa_name (TREE_CHAIN (parm
), gimple_build_nop ());
2341 set_ssa_default_def (cfun
, TREE_CHAIN (parm
), bounds
);
2346 /* Return bounds to be used for input argument PARM. */
2348 chkp_get_bound_for_parm (tree parm
)
2350 tree decl
= SSA_NAME_VAR (parm
);
2353 gcc_assert (TREE_CODE (decl
) == PARM_DECL
);
2355 bounds
= chkp_get_registered_bounds (parm
);
2358 bounds
= chkp_get_registered_bounds (decl
);
2362 tree orig_decl
= cgraph_node::get (cfun
->decl
)->orig_decl
;
2364 /* For static chain param we return zero bounds
2365 because currently we do not check dereferences
2367 if (cfun
->static_chain_decl
== decl
)
2368 bounds
= chkp_get_zero_bounds ();
2369 /* If non instrumented runtime is used then it may be useful
2370 to use zero bounds for input arguments of main
2372 else if (flag_chkp_zero_input_bounds_for_main
2373 && strcmp (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (orig_decl
)),
2375 bounds
= chkp_get_zero_bounds ();
2376 else if (BOUNDED_P (parm
))
2378 bounds
= chkp_get_next_bounds_parm (decl
);
2379 bounds
= chkp_maybe_copy_and_register_bounds (decl
, bounds
);
2381 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2383 fprintf (dump_file
, "Built arg bounds (");
2384 print_generic_expr (dump_file
, bounds
, 0);
2385 fprintf (dump_file
, ") for arg: ");
2386 print_node (dump_file
, "", decl
, 0);
2390 bounds
= chkp_get_zero_bounds ();
2393 if (!chkp_get_registered_bounds (parm
))
2394 bounds
= chkp_maybe_copy_and_register_bounds (parm
, bounds
);
2396 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2398 fprintf (dump_file
, "Using bounds ");
2399 print_generic_expr (dump_file
, bounds
, 0);
2400 fprintf (dump_file
, " for parm ");
2401 print_generic_expr (dump_file
, parm
, 0);
2402 fprintf (dump_file
, " of type ");
2403 print_generic_expr (dump_file
, TREE_TYPE (parm
), 0);
2404 fprintf (dump_file
, ".\n");
2410 /* Build and return CALL_EXPR for bndstx builtin with specified
2413 chkp_build_bndldx_call (tree addr
, tree ptr
)
2415 tree fn
= build1 (ADDR_EXPR
,
2416 build_pointer_type (TREE_TYPE (chkp_bndldx_fndecl
)),
2417 chkp_bndldx_fndecl
);
2418 tree call
= build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndldx_fndecl
)),
2420 CALL_WITH_BOUNDS_P (call
) = true;
2424 /* Insert code to load bounds for PTR located by ADDR.
2425 Code is inserted after position pointed by GSI.
2426 Loaded bounds are returned. */
2428 chkp_build_bndldx (tree addr
, tree ptr
, gimple_stmt_iterator
*gsi
)
2436 addr
= chkp_force_gimple_call_op (addr
, &seq
);
2437 ptr
= chkp_force_gimple_call_op (ptr
, &seq
);
2439 stmt
= gimple_build_call (chkp_bndldx_fndecl
, 2, addr
, ptr
);
2440 chkp_mark_stmt (stmt
);
2441 bounds
= chkp_get_tmp_reg (stmt
);
2442 gimple_call_set_lhs (stmt
, bounds
);
2444 gimple_seq_add_stmt (&seq
, stmt
);
2446 gsi_insert_seq_after (gsi
, seq
, GSI_CONTINUE_LINKING
);
2448 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2450 fprintf (dump_file
, "Generated bndldx for pointer ");
2451 print_generic_expr (dump_file
, ptr
, 0);
2452 fprintf (dump_file
, ": ");
2453 print_gimple_stmt (dump_file
, stmt
, 0, TDF_VOPS
|TDF_MEMSYMS
);
2459 /* Build and return CALL_EXPR for bndstx builtin with specified
2462 chkp_build_bndstx_call (tree addr
, tree ptr
, tree bounds
)
2464 tree fn
= build1 (ADDR_EXPR
,
2465 build_pointer_type (TREE_TYPE (chkp_bndstx_fndecl
)),
2466 chkp_bndstx_fndecl
);
2467 tree call
= build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndstx_fndecl
)),
2468 fn
, 3, ptr
, bounds
, addr
);
2469 CALL_WITH_BOUNDS_P (call
) = true;
2473 /* Insert code to store BOUNDS for PTR stored by ADDR.
2474 New statements are inserted after position pointed
2477 chkp_build_bndstx (tree addr
, tree ptr
, tree bounds
,
2478 gimple_stmt_iterator
*gsi
)
2485 addr
= chkp_force_gimple_call_op (addr
, &seq
);
2486 ptr
= chkp_force_gimple_call_op (ptr
, &seq
);
2488 stmt
= gimple_build_call (chkp_bndstx_fndecl
, 3, ptr
, bounds
, addr
);
2489 chkp_mark_stmt (stmt
);
2490 gimple_call_set_with_bounds (stmt
, true);
2492 gimple_seq_add_stmt (&seq
, stmt
);
2494 gsi_insert_seq_after (gsi
, seq
, GSI_CONTINUE_LINKING
);
2496 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2498 fprintf (dump_file
, "Generated bndstx for pointer store ");
2499 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
), 0, TDF_VOPS
|TDF_MEMSYMS
);
2500 print_gimple_stmt (dump_file
, stmt
, 2, TDF_VOPS
|TDF_MEMSYMS
);
2504 /* Compute bounds for pointer NODE which was assigned in
2505 assignment statement ASSIGN. Return computed bounds. */
2507 chkp_compute_bounds_for_assignment (tree node
, gimple assign
)
2509 enum tree_code rhs_code
= gimple_assign_rhs_code (assign
);
2510 tree rhs1
= gimple_assign_rhs1 (assign
);
2511 tree bounds
= NULL_TREE
;
2512 gimple_stmt_iterator iter
= gsi_for_stmt (assign
);
2515 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2517 fprintf (dump_file
, "Computing bounds for assignment: ");
2518 print_gimple_stmt (dump_file
, assign
, 0, TDF_VOPS
|TDF_MEMSYMS
);
2524 case TARGET_MEM_REF
:
2527 /* We need to load bounds from the bounds table. */
2528 bounds
= chkp_find_bounds_loaded (node
, rhs1
, &iter
);
2534 case POINTER_PLUS_EXPR
:
2538 /* Bounds are just propagated from RHS. */
2539 bounds
= chkp_find_bounds (rhs1
, &iter
);
2543 case VIEW_CONVERT_EXPR
:
2544 /* Bounds are just propagated from RHS. */
2545 bounds
= chkp_find_bounds (TREE_OPERAND (rhs1
, 0), &iter
);
2549 if (BOUNDED_P (rhs1
))
2551 /* We need to load bounds from the bounds table. */
2552 bounds
= chkp_build_bndldx (chkp_build_addr_expr (rhs1
),
2554 TREE_ADDRESSABLE (rhs1
) = 1;
2557 bounds
= chkp_get_nonpointer_load_bounds ();
2566 tree rhs2
= gimple_assign_rhs2 (assign
);
2567 tree bnd1
= chkp_find_bounds (rhs1
, &iter
);
2568 tree bnd2
= chkp_find_bounds (rhs2
, &iter
);
2570 /* First we try to check types of operands. If it
2571 does not help then look at bound values.
2573 If some bounds are incomplete and other are
2574 not proven to be valid (i.e. also incomplete
2575 or invalid because value is not pointer) then
2576 resulting value is incomplete and will be
2577 recomputed later in chkp_finish_incomplete_bounds. */
2578 if (BOUNDED_P (rhs1
)
2579 && !BOUNDED_P (rhs2
))
2581 else if (BOUNDED_P (rhs2
)
2582 && !BOUNDED_P (rhs1
)
2583 && rhs_code
!= MINUS_EXPR
)
2585 else if (chkp_incomplete_bounds (bnd1
))
2586 if (chkp_valid_bounds (bnd2
) && rhs_code
!= MINUS_EXPR
2587 && !chkp_incomplete_bounds (bnd2
))
2590 bounds
= incomplete_bounds
;
2591 else if (chkp_incomplete_bounds (bnd2
))
2592 if (chkp_valid_bounds (bnd1
)
2593 && !chkp_incomplete_bounds (bnd1
))
2596 bounds
= incomplete_bounds
;
2597 else if (!chkp_valid_bounds (bnd1
))
2598 if (chkp_valid_bounds (bnd2
) && rhs_code
!= MINUS_EXPR
)
2600 else if (bnd2
== chkp_get_zero_bounds ())
2604 else if (!chkp_valid_bounds (bnd2
))
2607 /* Seems both operands may have valid bounds
2608 (e.g. pointer minus pointer). In such case
2609 use default invalid op bounds. */
2610 bounds
= chkp_get_invalid_op_bounds ();
2612 base
= (bounds
== bnd1
) ? rhs1
: (bounds
== bnd2
) ? rhs2
: NULL
;
2630 case TRUNC_DIV_EXPR
:
2631 case FLOOR_DIV_EXPR
:
2633 case ROUND_DIV_EXPR
:
2634 case TRUNC_MOD_EXPR
:
2635 case FLOOR_MOD_EXPR
:
2637 case ROUND_MOD_EXPR
:
2638 case EXACT_DIV_EXPR
:
2639 case FIX_TRUNC_EXPR
:
2643 /* No valid bounds may be produced by these exprs. */
2644 bounds
= chkp_get_invalid_op_bounds ();
2649 tree val1
= gimple_assign_rhs2 (assign
);
2650 tree val2
= gimple_assign_rhs3 (assign
);
2651 tree bnd1
= chkp_find_bounds (val1
, &iter
);
2652 tree bnd2
= chkp_find_bounds (val2
, &iter
);
2655 if (chkp_incomplete_bounds (bnd1
) || chkp_incomplete_bounds (bnd2
))
2656 bounds
= incomplete_bounds
;
2657 else if (bnd1
== bnd2
)
2661 rhs1
= unshare_expr (rhs1
);
2663 bounds
= chkp_get_tmp_reg (assign
);
2664 stmt
= gimple_build_assign (bounds
, COND_EXPR
, rhs1
, bnd1
, bnd2
);
2665 gsi_insert_after (&iter
, stmt
, GSI_SAME_STMT
);
2667 if (!chkp_valid_bounds (bnd1
) && !chkp_valid_bounds (bnd2
))
2668 chkp_mark_invalid_bounds (bounds
);
2676 tree rhs2
= gimple_assign_rhs2 (assign
);
2677 tree bnd1
= chkp_find_bounds (rhs1
, &iter
);
2678 tree bnd2
= chkp_find_bounds (rhs2
, &iter
);
2680 if (chkp_incomplete_bounds (bnd1
) || chkp_incomplete_bounds (bnd2
))
2681 bounds
= incomplete_bounds
;
2682 else if (bnd1
== bnd2
)
2687 tree cond
= build2 (rhs_code
== MAX_EXPR
? GT_EXPR
: LT_EXPR
,
2688 boolean_type_node
, rhs1
, rhs2
);
2689 bounds
= chkp_get_tmp_reg (assign
);
2690 stmt
= gimple_build_assign (bounds
, COND_EXPR
, cond
, bnd1
, bnd2
);
2692 gsi_insert_after (&iter
, stmt
, GSI_SAME_STMT
);
2694 if (!chkp_valid_bounds (bnd1
) && !chkp_valid_bounds (bnd2
))
2695 chkp_mark_invalid_bounds (bounds
);
2701 bounds
= chkp_get_zero_bounds ();
2702 warning (0, "pointer bounds were lost due to unexpected expression %s",
2703 get_tree_code_name (rhs_code
));
2706 gcc_assert (bounds
);
2708 /* We may reuse bounds of other pointer we copy/modify. But it is not
2709 allowed for abnormal ssa names. If we produced a pointer using
2710 abnormal ssa name, we better make a bounds copy to avoid coalescing
2713 && TREE_CODE (base
) == SSA_NAME
2714 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (base
))
2716 gimple stmt
= gimple_build_assign (chkp_get_tmp_reg (NULL
), bounds
);
2717 gsi_insert_after (&iter
, stmt
, GSI_SAME_STMT
);
2718 bounds
= gimple_assign_lhs (stmt
);
2722 bounds
= chkp_maybe_copy_and_register_bounds (node
, bounds
);
2727 /* Compute bounds for ssa name NODE defined by DEF_STMT pointed by ITER.
2729 There are just few statement codes allowed: NOP (for default ssa names),
2730 ASSIGN, CALL, PHI, ASM.
2732 Return computed bounds. */
2734 chkp_get_bounds_by_definition (tree node
, gimple def_stmt
,
2735 gphi_iterator
*iter
)
2738 enum gimple_code code
= gimple_code (def_stmt
);
2741 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2743 fprintf (dump_file
, "Searching for bounds for node: ");
2744 print_generic_expr (dump_file
, node
, 0);
2746 fprintf (dump_file
, " using its definition: ");
2747 print_gimple_stmt (dump_file
, def_stmt
, 0, TDF_VOPS
|TDF_MEMSYMS
);
2753 var
= SSA_NAME_VAR (node
);
2754 switch (TREE_CODE (var
))
2757 bounds
= chkp_get_bound_for_parm (node
);
2761 /* For uninitialized pointers use none bounds. */
2762 bounds
= chkp_get_none_bounds ();
2763 bounds
= chkp_maybe_copy_and_register_bounds (node
, bounds
);
2770 gcc_assert (TREE_CODE (TREE_TYPE (node
)) == REFERENCE_TYPE
);
2772 base_type
= TREE_TYPE (TREE_TYPE (node
));
2774 gcc_assert (TYPE_SIZE (base_type
)
2775 && TREE_CODE (TYPE_SIZE (base_type
)) == INTEGER_CST
2776 && tree_to_uhwi (TYPE_SIZE (base_type
)) != 0);
2778 bounds
= chkp_make_bounds (node
, TYPE_SIZE_UNIT (base_type
),
2780 bounds
= chkp_maybe_copy_and_register_bounds (node
, bounds
);
2785 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2787 fprintf (dump_file
, "Unexpected var with no definition\n");
2788 print_generic_expr (dump_file
, var
, 0);
2790 internal_error ("chkp_get_bounds_by_definition: Unexpected var of type %s",
2791 get_tree_code_name (TREE_CODE (var
)));
2796 bounds
= chkp_compute_bounds_for_assignment (node
, def_stmt
);
2800 bounds
= chkp_build_returned_bound (as_a
<gcall
*> (def_stmt
));
2804 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (node
))
2805 if (SSA_NAME_VAR (node
))
2806 var
= chkp_get_bounds_var (SSA_NAME_VAR (node
));
2808 var
= make_temp_ssa_name (pointer_bounds_type_node
,
2810 CHKP_BOUND_TMP_NAME
);
2812 var
= chkp_get_tmp_var ();
2813 stmt
= create_phi_node (var
, gimple_bb (def_stmt
));
2814 bounds
= gimple_phi_result (stmt
);
2815 *iter
= gsi_for_phi (stmt
);
2817 bounds
= chkp_maybe_copy_and_register_bounds (node
, bounds
);
2819 /* Created bounds do not have all phi args computed and
2820 therefore we do not know if there is a valid source
2821 of bounds for that node. Therefore we mark bounds
2822 as incomplete and then recompute them when all phi
2823 args are computed. */
2824 chkp_register_incomplete_bounds (bounds
, node
);
2828 bounds
= chkp_get_zero_bounds ();
2829 bounds
= chkp_maybe_copy_and_register_bounds (node
, bounds
);
2833 internal_error ("chkp_get_bounds_by_definition: Unexpected GIMPLE code %s",
2834 gimple_code_name
[code
]);
2840 /* Return CALL_EXPR for bndmk with specified LOWER_BOUND and SIZE. */
2842 chkp_build_make_bounds_call (tree lower_bound
, tree size
)
2844 tree call
= build1 (ADDR_EXPR
,
2845 build_pointer_type (TREE_TYPE (chkp_bndmk_fndecl
)),
2847 return build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndmk_fndecl
)),
2848 call
, 2, lower_bound
, size
);
2851 /* Create static bounds var of specfified OBJ which is
2852 is either VAR_DECL or string constant. */
2854 chkp_make_static_bounds (tree obj
)
2856 static int string_id
= 1;
2857 static int var_id
= 1;
2859 const char *var_name
;
2863 /* First check if we already have required var. */
2864 if (chkp_static_var_bounds
)
2866 /* For vars we use assembler name as a key in
2867 chkp_static_var_bounds map. It allows to
2868 avoid duplicating bound vars for decls
2869 sharing assembler name. */
2870 if (TREE_CODE (obj
) == VAR_DECL
)
2872 tree name
= DECL_ASSEMBLER_NAME (obj
);
2873 slot
= chkp_static_var_bounds
->get (name
);
2879 slot
= chkp_static_var_bounds
->get (obj
);
2885 /* Build decl for bounds var. */
2886 if (TREE_CODE (obj
) == VAR_DECL
)
2888 if (DECL_IGNORED_P (obj
))
2890 bnd_var_name
= (char *) xmalloc (strlen (CHKP_VAR_BOUNDS_PREFIX
) + 10);
2891 sprintf (bnd_var_name
, "%s%d", CHKP_VAR_BOUNDS_PREFIX
, var_id
++);
2895 var_name
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (obj
));
2897 /* For hidden symbols we want to skip first '*' char. */
2898 if (*var_name
== '*')
2901 bnd_var_name
= (char *) xmalloc (strlen (var_name
)
2902 + strlen (CHKP_BOUNDS_OF_SYMBOL_PREFIX
) + 1);
2903 strcpy (bnd_var_name
, CHKP_BOUNDS_OF_SYMBOL_PREFIX
);
2904 strcat (bnd_var_name
, var_name
);
2907 bnd_var
= build_decl (UNKNOWN_LOCATION
, VAR_DECL
,
2908 get_identifier (bnd_var_name
),
2909 pointer_bounds_type_node
);
2911 /* Address of the obj will be used as lower bound. */
2912 TREE_ADDRESSABLE (obj
) = 1;
2916 bnd_var_name
= (char *) xmalloc (strlen (CHKP_STRING_BOUNDS_PREFIX
) + 10);
2917 sprintf (bnd_var_name
, "%s%d", CHKP_STRING_BOUNDS_PREFIX
, string_id
++);
2919 bnd_var
= build_decl (UNKNOWN_LOCATION
, VAR_DECL
,
2920 get_identifier (bnd_var_name
),
2921 pointer_bounds_type_node
);
2924 TREE_PUBLIC (bnd_var
) = 0;
2925 TREE_USED (bnd_var
) = 1;
2926 TREE_READONLY (bnd_var
) = 0;
2927 TREE_STATIC (bnd_var
) = 1;
2928 TREE_ADDRESSABLE (bnd_var
) = 0;
2929 DECL_ARTIFICIAL (bnd_var
) = 1;
2930 DECL_COMMON (bnd_var
) = 1;
2931 DECL_COMDAT (bnd_var
) = 1;
2932 DECL_READ_P (bnd_var
) = 1;
2933 DECL_INITIAL (bnd_var
) = chkp_build_addr_expr (obj
);
2934 /* Force output similar to constant bounds.
2935 See chkp_make_static_const_bounds. */
2936 varpool_node::get_create (bnd_var
)->force_output
= 1;
2937 /* Mark symbol as requiring bounds initialization. */
2938 varpool_node::get_create (bnd_var
)->need_bounds_init
= 1;
2939 varpool_node::finalize_decl (bnd_var
);
2941 /* Add created var to the map to use it for other references
2943 if (!chkp_static_var_bounds
)
2944 chkp_static_var_bounds
= new hash_map
<tree
, tree
>;
2946 if (TREE_CODE (obj
) == VAR_DECL
)
2948 tree name
= DECL_ASSEMBLER_NAME (obj
);
2949 chkp_static_var_bounds
->put (name
, bnd_var
);
2952 chkp_static_var_bounds
->put (obj
, bnd_var
);
2957 /* When var has incomplete type we cannot get size to
2958 compute its bounds. In such cases we use checker
2959 builtin call which determines object size at runtime. */
2961 chkp_generate_extern_var_bounds (tree var
)
2963 tree bounds
, size_reloc
, lb
, size
, max_size
, cond
;
2964 gimple_stmt_iterator gsi
;
2965 gimple_seq seq
= NULL
;
2968 /* If instrumentation is not enabled for vars having
2969 incomplete type then just return zero bounds to avoid
2970 checks for this var. */
2971 if (!flag_chkp_incomplete_type
)
2972 return chkp_get_zero_bounds ();
2974 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2976 fprintf (dump_file
, "Generating bounds for extern symbol '");
2977 print_generic_expr (dump_file
, var
, 0);
2978 fprintf (dump_file
, "'\n");
2981 stmt
= gimple_build_call (chkp_sizeof_fndecl
, 1, var
);
2983 size_reloc
= create_tmp_reg (chkp_uintptr_type
, CHKP_SIZE_TMP_NAME
);
2984 gimple_call_set_lhs (stmt
, size_reloc
);
2986 gimple_seq_add_stmt (&seq
, stmt
);
2988 lb
= chkp_build_addr_expr (var
);
2989 size
= make_ssa_name (chkp_get_size_tmp_var ());
2991 if (flag_chkp_zero_dynamic_size_as_infinite
)
2993 /* We should check that size relocation was resolved.
2994 If it was not then use maximum possible size for the var. */
2995 max_size
= build2 (MINUS_EXPR
, chkp_uintptr_type
, integer_zero_node
,
2996 fold_convert (chkp_uintptr_type
, lb
));
2997 max_size
= chkp_force_gimple_call_op (max_size
, &seq
);
2999 cond
= build2 (NE_EXPR
, boolean_type_node
,
3000 size_reloc
, integer_zero_node
);
3001 stmt
= gimple_build_assign (size
, COND_EXPR
, cond
, size_reloc
, max_size
);
3002 gimple_seq_add_stmt (&seq
, stmt
);
3006 stmt
= gimple_build_assign (size
, size_reloc
);
3007 gimple_seq_add_stmt (&seq
, stmt
);
3010 gsi
= gsi_start_bb (chkp_get_entry_block ());
3011 gsi_insert_seq_after (&gsi
, seq
, GSI_CONTINUE_LINKING
);
3013 bounds
= chkp_make_bounds (lb
, size
, &gsi
, true);
3018 /* Return 1 if TYPE has fields with zero size or fields
3019 marked with chkp_variable_size attribute. */
3021 chkp_variable_size_type (tree type
)
3026 if (RECORD_OR_UNION_TYPE_P (type
))
3027 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
3029 if (TREE_CODE (field
) == FIELD_DECL
)
3031 || lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field
))
3032 || chkp_variable_size_type (TREE_TYPE (field
));
3035 res
= !TYPE_SIZE (type
)
3036 || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
3037 || tree_to_uhwi (TYPE_SIZE (type
)) == 0;
3042 /* Compute and return bounds for address of DECL which is
3043 one of VAR_DECL, PARM_DECL, RESULT_DECL. */
3045 chkp_get_bounds_for_decl_addr (tree decl
)
3049 gcc_assert (TREE_CODE (decl
) == VAR_DECL
3050 || TREE_CODE (decl
) == PARM_DECL
3051 || TREE_CODE (decl
) == RESULT_DECL
);
3053 bounds
= chkp_get_registered_addr_bounds (decl
);
3058 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3060 fprintf (dump_file
, "Building bounds for address of decl ");
3061 print_generic_expr (dump_file
, decl
, 0);
3062 fprintf (dump_file
, "\n");
3065 /* Use zero bounds if size is unknown and checks for
3066 unknown sizes are restricted. */
3067 if ((!DECL_SIZE (decl
)
3068 || (chkp_variable_size_type (TREE_TYPE (decl
))
3069 && (TREE_STATIC (decl
)
3070 || DECL_EXTERNAL (decl
)
3071 || TREE_PUBLIC (decl
))))
3072 && !flag_chkp_incomplete_type
)
3073 return chkp_get_zero_bounds ();
3075 if (flag_chkp_use_static_bounds
3076 && TREE_CODE (decl
) == VAR_DECL
3077 && (TREE_STATIC (decl
)
3078 || DECL_EXTERNAL (decl
)
3079 || TREE_PUBLIC (decl
))
3080 && !DECL_THREAD_LOCAL_P (decl
))
3082 tree bnd_var
= chkp_make_static_bounds (decl
);
3083 gimple_stmt_iterator gsi
= gsi_start_bb (chkp_get_entry_block ());
3086 bounds
= chkp_get_tmp_reg (NULL
);
3087 stmt
= gimple_build_assign (bounds
, bnd_var
);
3088 gsi_insert_before (&gsi
, stmt
, GSI_SAME_STMT
);
3090 else if (!DECL_SIZE (decl
)
3091 || (chkp_variable_size_type (TREE_TYPE (decl
))
3092 && (TREE_STATIC (decl
)
3093 || DECL_EXTERNAL (decl
)
3094 || TREE_PUBLIC (decl
))))
3096 gcc_assert (TREE_CODE (decl
) == VAR_DECL
);
3097 bounds
= chkp_generate_extern_var_bounds (decl
);
3101 tree lb
= chkp_build_addr_expr (decl
);
3102 bounds
= chkp_make_bounds (lb
, DECL_SIZE_UNIT (decl
), NULL
, false);
3108 /* Compute and return bounds for constant string. */
3110 chkp_get_bounds_for_string_cst (tree cst
)
3116 gcc_assert (TREE_CODE (cst
) == STRING_CST
);
3118 bounds
= chkp_get_registered_bounds (cst
);
3123 if ((flag_chkp_use_static_bounds
&& flag_chkp_use_static_const_bounds
)
3124 || flag_chkp_use_static_const_bounds
> 0)
3126 tree bnd_var
= chkp_make_static_bounds (cst
);
3127 gimple_stmt_iterator gsi
= gsi_start_bb (chkp_get_entry_block ());
3130 bounds
= chkp_get_tmp_reg (NULL
);
3131 stmt
= gimple_build_assign (bounds
, bnd_var
);
3132 gsi_insert_before (&gsi
, stmt
, GSI_SAME_STMT
);
3136 lb
= chkp_build_addr_expr (cst
);
3137 size
= build_int_cst (chkp_uintptr_type
, TREE_STRING_LENGTH (cst
));
3138 bounds
= chkp_make_bounds (lb
, size
, NULL
, false);
3141 bounds
= chkp_maybe_copy_and_register_bounds (cst
, bounds
);
3146 /* Generate code to instersect bounds BOUNDS1 and BOUNDS2 and
3147 return the result. if ITER is not NULL then Code is inserted
3148 before position pointed by ITER. Otherwise code is added to
3151 chkp_intersect_bounds (tree bounds1
, tree bounds2
, gimple_stmt_iterator
*iter
)
3153 if (!bounds1
|| bounds1
== chkp_get_zero_bounds ())
3154 return bounds2
? bounds2
: bounds1
;
3155 else if (!bounds2
|| bounds2
== chkp_get_zero_bounds ())
3165 stmt
= gimple_build_call (chkp_intersect_fndecl
, 2, bounds1
, bounds2
);
3166 chkp_mark_stmt (stmt
);
3168 bounds
= chkp_get_tmp_reg (stmt
);
3169 gimple_call_set_lhs (stmt
, bounds
);
3171 gimple_seq_add_stmt (&seq
, stmt
);
3173 /* We are probably doing narrowing for constant expression.
3174 In such case iter may be undefined. */
3177 gimple_stmt_iterator gsi
= gsi_last_bb (chkp_get_entry_block ());
3179 gsi_insert_seq_after (iter
, seq
, GSI_SAME_STMT
);
3182 gsi_insert_seq_before (iter
, seq
, GSI_SAME_STMT
);
3184 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3186 fprintf (dump_file
, "Bounds intersection: ");
3187 print_gimple_stmt (dump_file
, stmt
, 0, TDF_VOPS
|TDF_MEMSYMS
);
3188 fprintf (dump_file
, " inserted before statement: ");
3189 print_gimple_stmt (dump_file
, gsi_stmt (*iter
), 0,
3190 TDF_VOPS
|TDF_MEMSYMS
);
3197 /* Return 1 if we are allowed to narrow bounds for addressed FIELD
3200 chkp_may_narrow_to_field (tree field
)
3202 return DECL_SIZE (field
) && TREE_CODE (DECL_SIZE (field
)) == INTEGER_CST
3203 && tree_to_uhwi (DECL_SIZE (field
)) != 0
3204 && (!DECL_FIELD_OFFSET (field
)
3205 || TREE_CODE (DECL_FIELD_OFFSET (field
)) == INTEGER_CST
)
3206 && (!DECL_FIELD_BIT_OFFSET (field
)
3207 || TREE_CODE (DECL_FIELD_BIT_OFFSET (field
)) == INTEGER_CST
)
3208 && !lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field
))
3209 && !chkp_variable_size_type (TREE_TYPE (field
));
3212 /* Return 1 if bounds for FIELD should be narrowed to
3213 field's own size. */
3215 chkp_narrow_bounds_for_field (tree field
)
3218 HOST_WIDE_INT bit_offs
;
3220 if (!chkp_may_narrow_to_field (field
))
3223 /* Accesse to compiler generated fields should not cause
3224 bounds narrowing. */
3225 if (DECL_ARTIFICIAL (field
))
3228 offs
= tree_to_uhwi (DECL_FIELD_OFFSET (field
));
3229 bit_offs
= tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
));
3231 return (flag_chkp_narrow_bounds
3232 && (flag_chkp_first_field_has_own_bounds
3237 /* Perform narrowing for BOUNDS using bounds computed for field
3238 access COMPONENT. ITER meaning is the same as for
3239 chkp_intersect_bounds. */
3241 chkp_narrow_bounds_to_field (tree bounds
, tree component
,
3242 gimple_stmt_iterator
*iter
)
3244 tree field
= TREE_OPERAND (component
, 1);
3245 tree size
= DECL_SIZE_UNIT (field
);
3246 tree field_ptr
= chkp_build_addr_expr (component
);
3249 field_bounds
= chkp_make_bounds (field_ptr
, size
, iter
, false);
3251 return chkp_intersect_bounds (field_bounds
, bounds
, iter
);
3254 /* Parse field or array access NODE.
3256 PTR ouput parameter holds a pointer to the outermost
3259 BITFIELD output parameter is set to 1 if bitfield is
3260 accessed and to 0 otherwise. If it is 1 then ELT holds
3261 outer component for accessed bit field.
3263 SAFE outer parameter is set to 1 if access is safe and
3264 checks are not required.
3266 BOUNDS outer parameter holds bounds to be used to check
3267 access (may be NULL).
3269 If INNERMOST_BOUNDS is 1 then try to narrow bounds to the
3270 innermost accessed component. */
3272 chkp_parse_array_and_component_ref (tree node
, tree
*ptr
,
3273 tree
*elt
, bool *safe
,
3276 gimple_stmt_iterator
*iter
,
3277 bool innermost_bounds
)
3279 tree comp_to_narrow
= NULL_TREE
;
3280 tree last_comp
= NULL_TREE
;
3281 bool array_ref_found
= false;
3287 /* Compute tree height for expression. */
3290 while (TREE_CODE (var
) == COMPONENT_REF
3291 || TREE_CODE (var
) == ARRAY_REF
3292 || TREE_CODE (var
) == VIEW_CONVERT_EXPR
)
3294 var
= TREE_OPERAND (var
, 0);
3298 gcc_assert (len
> 1);
3300 /* It is more convenient for us to scan left-to-right,
3301 so walk tree again and put all node to nodes vector
3302 in reversed order. */
3303 nodes
= XALLOCAVEC (tree
, len
);
3304 nodes
[len
- 1] = node
;
3305 for (i
= len
- 2; i
>= 0; i
--)
3306 nodes
[i
] = TREE_OPERAND (nodes
[i
+ 1], 0);
3311 *bitfield
= (TREE_CODE (node
) == COMPONENT_REF
3312 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (node
, 1)));
3313 /* To get bitfield address we will need outer elemnt. */
3315 *elt
= nodes
[len
- 2];
3319 /* If we have indirection in expression then compute
3320 outermost structure bounds. Computed bounds may be
3322 if (TREE_CODE (nodes
[0]) == MEM_REF
|| INDIRECT_REF_P (nodes
[0]))
3325 *ptr
= TREE_OPERAND (nodes
[0], 0);
3327 *bounds
= chkp_find_bounds (*ptr
, iter
);
3331 gcc_assert (TREE_CODE (var
) == VAR_DECL
3332 || TREE_CODE (var
) == PARM_DECL
3333 || TREE_CODE (var
) == RESULT_DECL
3334 || TREE_CODE (var
) == STRING_CST
3335 || TREE_CODE (var
) == SSA_NAME
);
3337 *ptr
= chkp_build_addr_expr (var
);
3340 /* In this loop we are trying to find a field access
3341 requiring narrowing. There are two simple rules
3343 1. Leftmost array_ref is chosen if any.
3344 2. Rightmost suitable component_ref is chosen if innermost
3345 bounds are required and no array_ref exists. */
3346 for (i
= 1; i
< len
; i
++)
3350 if (TREE_CODE (var
) == ARRAY_REF
)
3353 array_ref_found
= true;
3354 if (flag_chkp_narrow_bounds
3355 && !flag_chkp_narrow_to_innermost_arrray
3357 || chkp_may_narrow_to_field (TREE_OPERAND (last_comp
, 1))))
3359 comp_to_narrow
= last_comp
;
3363 else if (TREE_CODE (var
) == COMPONENT_REF
)
3365 tree field
= TREE_OPERAND (var
, 1);
3367 if (innermost_bounds
3369 && chkp_narrow_bounds_for_field (field
))
3370 comp_to_narrow
= var
;
3373 if (flag_chkp_narrow_bounds
3374 && flag_chkp_narrow_to_innermost_arrray
3375 && TREE_CODE (TREE_TYPE (field
)) == ARRAY_TYPE
)
3378 *bounds
= chkp_narrow_bounds_to_field (*bounds
, var
, iter
);
3379 comp_to_narrow
= NULL
;
3382 else if (TREE_CODE (var
) == VIEW_CONVERT_EXPR
)
3383 /* Nothing to do for it. */
3389 if (comp_to_narrow
&& DECL_SIZE (TREE_OPERAND (comp_to_narrow
, 1)) && bounds
)
3390 *bounds
= chkp_narrow_bounds_to_field (*bounds
, comp_to_narrow
, iter
);
3392 if (innermost_bounds
&& bounds
&& !*bounds
)
3393 *bounds
= chkp_find_bounds (*ptr
, iter
);
3396 /* Compute and return bounds for address of OBJ. */
3398 chkp_make_addressed_object_bounds (tree obj
, gimple_stmt_iterator
*iter
)
3400 tree bounds
= chkp_get_registered_addr_bounds (obj
);
3405 switch (TREE_CODE (obj
))
3410 bounds
= chkp_get_bounds_for_decl_addr (obj
);
3414 bounds
= chkp_get_bounds_for_string_cst (obj
);
3425 chkp_parse_array_and_component_ref (obj
, &ptr
, &elt
, &safe
,
3426 &bitfield
, &bounds
, iter
, true);
3428 gcc_assert (bounds
);
3434 bounds
= chkp_get_zero_bounds ();
3438 bounds
= chkp_find_bounds (TREE_OPERAND (obj
, 0), iter
);
3443 bounds
= chkp_make_addressed_object_bounds (TREE_OPERAND (obj
, 0), iter
);
3447 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3449 fprintf (dump_file
, "chkp_make_addressed_object_bounds: "
3450 "unexpected object of type %s\n",
3451 get_tree_code_name (TREE_CODE (obj
)));
3452 print_node (dump_file
, "", obj
, 0);
3454 internal_error ("chkp_make_addressed_object_bounds: "
3455 "Unexpected tree code %s",
3456 get_tree_code_name (TREE_CODE (obj
)));
3459 chkp_register_addr_bounds (obj
, bounds
);
3464 /* Compute bounds for pointer PTR loaded from PTR_SRC. Generate statements
3465 to compute bounds if required. Computed bounds should be available at
3466 position pointed by ITER.
3468 If PTR_SRC is NULL_TREE then pointer definition is identified.
3470 If PTR_SRC is not NULL_TREE then ITER points to statements which loads
3471 PTR. If PTR is a any memory reference then ITER points to a statement
3472 after which bndldx will be inserterd. In both cases ITER will be updated
3473 to point to the inserted bndldx statement. */
3476 chkp_find_bounds_1 (tree ptr
, tree ptr_src
, gimple_stmt_iterator
*iter
)
3478 tree addr
= NULL_TREE
;
3479 tree bounds
= NULL_TREE
;
3484 bounds
= chkp_get_registered_bounds (ptr_src
);
3489 switch (TREE_CODE (ptr_src
))
3493 if (BOUNDED_P (ptr_src
))
3494 if (TREE_CODE (ptr
) == VAR_DECL
&& DECL_REGISTER (ptr
))
3495 bounds
= chkp_get_zero_bounds ();
3498 addr
= chkp_build_addr_expr (ptr_src
);
3499 bounds
= chkp_build_bndldx (addr
, ptr
, iter
);
3502 bounds
= chkp_get_nonpointer_load_bounds ();
3507 addr
= get_base_address (ptr_src
);
3509 || TREE_CODE (addr
) == MEM_REF
3510 || TREE_CODE (addr
) == TARGET_MEM_REF
)
3512 if (BOUNDED_P (ptr_src
))
3513 if (TREE_CODE (ptr
) == VAR_DECL
&& DECL_REGISTER (ptr
))
3514 bounds
= chkp_get_zero_bounds ();
3517 addr
= chkp_build_addr_expr (ptr_src
);
3518 bounds
= chkp_build_bndldx (addr
, ptr
, iter
);
3521 bounds
= chkp_get_nonpointer_load_bounds ();
3525 gcc_assert (TREE_CODE (addr
) == SSA_NAME
);
3526 bounds
= chkp_find_bounds (addr
, iter
);
3532 bounds
= chkp_get_bound_for_parm (ptr_src
);
3535 case TARGET_MEM_REF
:
3536 addr
= chkp_build_addr_expr (ptr_src
);
3537 bounds
= chkp_build_bndldx (addr
, ptr
, iter
);
3541 bounds
= chkp_get_registered_bounds (ptr_src
);
3544 gimple def_stmt
= SSA_NAME_DEF_STMT (ptr_src
);
3545 gphi_iterator phi_iter
;
3547 bounds
= chkp_get_bounds_by_definition (ptr_src
, def_stmt
, &phi_iter
);
3549 gcc_assert (bounds
);
3551 if (gphi
*def_phi
= dyn_cast
<gphi
*> (def_stmt
))
3555 for (i
= 0; i
< gimple_phi_num_args (def_phi
); i
++)
3557 tree arg
= gimple_phi_arg_def (def_phi
, i
);
3561 arg_bnd
= chkp_find_bounds (arg
, NULL
);
3563 /* chkp_get_bounds_by_definition created new phi
3564 statement and phi_iter points to it.
3566 Previous call to chkp_find_bounds could create
3567 new basic block and therefore change phi statement
3568 phi_iter points to. */
3569 phi_bnd
= phi_iter
.phi ();
3571 add_phi_arg (phi_bnd
, arg_bnd
,
3572 gimple_phi_arg_edge (def_phi
, i
),
3576 /* If all bound phi nodes have their arg computed
3577 then we may finish its computation. See
3578 chkp_finish_incomplete_bounds for more details. */
3579 if (chkp_may_finish_incomplete_bounds ())
3580 chkp_finish_incomplete_bounds ();
3583 gcc_assert (bounds
== chkp_get_registered_bounds (ptr_src
)
3584 || chkp_incomplete_bounds (bounds
));
3589 bounds
= chkp_make_addressed_object_bounds (TREE_OPERAND (ptr_src
, 0), iter
);
3593 if (integer_zerop (ptr_src
))
3594 bounds
= chkp_get_none_bounds ();
3596 bounds
= chkp_get_invalid_op_bounds ();
3600 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3602 fprintf (dump_file
, "chkp_find_bounds: unexpected ptr of type %s\n",
3603 get_tree_code_name (TREE_CODE (ptr_src
)));
3604 print_node (dump_file
, "", ptr_src
, 0);
3606 internal_error ("chkp_find_bounds: Unexpected tree code %s",
3607 get_tree_code_name (TREE_CODE (ptr_src
)));
3612 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3614 fprintf (stderr
, "chkp_find_bounds: cannot find bounds for pointer\n");
3615 print_node (dump_file
, "", ptr_src
, 0);
3617 internal_error ("chkp_find_bounds: Cannot find bounds for pointer");
3623 /* Normal case for bounds search without forced narrowing. */
3625 chkp_find_bounds (tree ptr
, gimple_stmt_iterator
*iter
)
3627 return chkp_find_bounds_1 (ptr
, NULL_TREE
, iter
);
3630 /* Search bounds for pointer PTR loaded from PTR_SRC
3631 by statement *ITER points to. */
3633 chkp_find_bounds_loaded (tree ptr
, tree ptr_src
, gimple_stmt_iterator
*iter
)
3635 return chkp_find_bounds_1 (ptr
, ptr_src
, iter
);
3638 /* Helper function which checks type of RHS and finds all pointers in
3639 it. For each found pointer we build it's accesses in LHS and RHS
3640 objects and then call HANDLER for them. Function is used to copy
3641 or initilize bounds for copied object. */
3643 chkp_walk_pointer_assignments (tree lhs
, tree rhs
, void *arg
,
3644 assign_handler handler
)
3646 tree type
= TREE_TYPE (lhs
);
3648 /* We have nothing to do with clobbers. */
3649 if (TREE_CLOBBER_P (rhs
))
3652 if (BOUNDED_TYPE_P (type
))
3653 handler (lhs
, rhs
, arg
);
3654 else if (RECORD_OR_UNION_TYPE_P (type
))
3658 if (TREE_CODE (rhs
) == CONSTRUCTOR
)
3660 unsigned HOST_WIDE_INT cnt
;
3663 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs
), cnt
, field
, val
)
3665 if (chkp_type_has_pointer (TREE_TYPE (field
)))
3667 tree lhs_field
= chkp_build_component_ref (lhs
, field
);
3668 chkp_walk_pointer_assignments (lhs_field
, val
, arg
, handler
);
3673 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
3674 if (TREE_CODE (field
) == FIELD_DECL
3675 && chkp_type_has_pointer (TREE_TYPE (field
)))
3677 tree rhs_field
= chkp_build_component_ref (rhs
, field
);
3678 tree lhs_field
= chkp_build_component_ref (lhs
, field
);
3679 chkp_walk_pointer_assignments (lhs_field
, rhs_field
, arg
, handler
);
3682 else if (TREE_CODE (type
) == ARRAY_TYPE
)
3684 unsigned HOST_WIDE_INT cur
= 0;
3685 tree maxval
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
3686 tree etype
= TREE_TYPE (type
);
3687 tree esize
= TYPE_SIZE (etype
);
3689 if (TREE_CODE (rhs
) == CONSTRUCTOR
)
3691 unsigned HOST_WIDE_INT cnt
;
3692 tree purp
, val
, lhs_elem
;
3694 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs
), cnt
, purp
, val
)
3696 if (purp
&& TREE_CODE (purp
) == RANGE_EXPR
)
3698 tree lo_index
= TREE_OPERAND (purp
, 0);
3699 tree hi_index
= TREE_OPERAND (purp
, 1);
3701 for (cur
= (unsigned)tree_to_uhwi (lo_index
);
3702 cur
<= (unsigned)tree_to_uhwi (hi_index
);
3705 lhs_elem
= chkp_build_array_ref (lhs
, etype
, esize
, cur
);
3706 chkp_walk_pointer_assignments (lhs_elem
, val
, arg
, handler
);
3713 gcc_assert (TREE_CODE (purp
) == INTEGER_CST
);
3714 cur
= tree_to_uhwi (purp
);
3717 lhs_elem
= chkp_build_array_ref (lhs
, etype
, esize
, cur
++);
3719 chkp_walk_pointer_assignments (lhs_elem
, val
, arg
, handler
);
3723 /* Copy array only when size is known. */
3724 else if (maxval
&& !integer_minus_onep (maxval
))
3725 for (cur
= 0; cur
<= TREE_INT_CST_LOW (maxval
); cur
++)
3727 tree lhs_elem
= chkp_build_array_ref (lhs
, etype
, esize
, cur
);
3728 tree rhs_elem
= chkp_build_array_ref (rhs
, etype
, esize
, cur
);
3729 chkp_walk_pointer_assignments (lhs_elem
, rhs_elem
, arg
, handler
);
3733 internal_error("chkp_walk_pointer_assignments: unexpected RHS type: %s",
3734 get_tree_code_name (TREE_CODE (type
)));
3737 /* Add code to copy bounds for assignment of RHS to LHS.
3738 ARG is an iterator pointing ne code position. */
3740 chkp_copy_bounds_for_elem (tree lhs
, tree rhs
, void *arg
)
3742 gimple_stmt_iterator
*iter
= (gimple_stmt_iterator
*)arg
;
3743 tree bounds
= chkp_find_bounds (rhs
, iter
);
3744 tree addr
= chkp_build_addr_expr(lhs
);
3746 chkp_build_bndstx (addr
, rhs
, bounds
, iter
);
3749 /* Emit static bound initilizers and size vars. */
3751 chkp_finish_file (void)
3753 struct varpool_node
*node
;
3754 struct chkp_ctor_stmt_list stmts
;
3759 /* Iterate through varpool and generate bounds initialization
3760 constructors for all statically initialized pointers. */
3761 stmts
.avail
= MAX_STMTS_IN_STATIC_CHKP_CTOR
;
3763 FOR_EACH_VARIABLE (node
)
3764 /* Check that var is actually emitted and we need and may initialize
3766 if (node
->need_bounds_init
3767 && !POINTER_BOUNDS_P (node
->decl
)
3768 && DECL_RTL (node
->decl
)
3769 && MEM_P (DECL_RTL (node
->decl
))
3770 && TREE_ASM_WRITTEN (node
->decl
))
3772 chkp_walk_pointer_assignments (node
->decl
,
3773 DECL_INITIAL (node
->decl
),
3775 chkp_add_modification_to_stmt_list
);
3777 if (stmts
.avail
<= 0)
3779 cgraph_build_static_cdtor ('P', stmts
.stmts
,
3780 MAX_RESERVED_INIT_PRIORITY
+ 3);
3781 stmts
.avail
= MAX_STMTS_IN_STATIC_CHKP_CTOR
;
3787 cgraph_build_static_cdtor ('P', stmts
.stmts
,
3788 MAX_RESERVED_INIT_PRIORITY
+ 3);
3790 /* Iterate through varpool and generate bounds initialization
3791 constructors for all static bounds vars. */
3792 stmts
.avail
= MAX_STMTS_IN_STATIC_CHKP_CTOR
;
3794 FOR_EACH_VARIABLE (node
)
3795 if (node
->need_bounds_init
3796 && POINTER_BOUNDS_P (node
->decl
)
3797 && TREE_ASM_WRITTEN (node
->decl
))
3799 tree bnd
= node
->decl
;
3802 gcc_assert (DECL_INITIAL (bnd
)
3803 && TREE_CODE (DECL_INITIAL (bnd
)) == ADDR_EXPR
);
3805 var
= TREE_OPERAND (DECL_INITIAL (bnd
), 0);
3806 chkp_output_static_bounds (bnd
, var
, &stmts
);
3810 cgraph_build_static_cdtor ('B', stmts
.stmts
,
3811 MAX_RESERVED_INIT_PRIORITY
+ 2);
3813 delete chkp_static_var_bounds
;
3814 delete chkp_bounds_map
;
3817 /* An instrumentation function which is called for each statement
3818 having memory access we want to instrument. It inserts check
3819 code and bounds copy code.
3821 ITER points to statement to instrument.
3823 NODE holds memory access in statement to check.
3825 LOC holds the location information for statement.
3827 DIRFLAGS determines whether access is read or write.
3829 ACCESS_OFFS should be added to address used in NODE
3832 ACCESS_SIZE holds size of checked access.
3834 SAFE indicates if NODE access is safe and should not be
3837 chkp_process_stmt (gimple_stmt_iterator
*iter
, tree node
,
3838 location_t loc
, tree dirflag
,
3839 tree access_offs
, tree access_size
,
3842 tree node_type
= TREE_TYPE (node
);
3843 tree size
= access_size
? access_size
: TYPE_SIZE_UNIT (node_type
);
3844 tree addr_first
= NULL_TREE
; /* address of the first accessed byte */
3845 tree addr_last
= NULL_TREE
; /* address of the last accessed byte */
3846 tree ptr
= NULL_TREE
; /* a pointer used for dereference */
3847 tree bounds
= NULL_TREE
;
3849 /* We do not need instrumentation for clobbers. */
3850 if (dirflag
== integer_one_node
3851 && gimple_code (gsi_stmt (*iter
)) == GIMPLE_ASSIGN
3852 && TREE_CLOBBER_P (gimple_assign_rhs1 (gsi_stmt (*iter
))))
3855 switch (TREE_CODE (node
))
3865 /* We are not going to generate any checks, so do not
3866 generate bounds as well. */
3867 addr_first
= chkp_build_addr_expr (node
);
3871 chkp_parse_array_and_component_ref (node
, &ptr
, &elt
, &safe
,
3872 &bitfield
, &bounds
, iter
, false);
3874 /* Break if there is no dereference and operation is safe. */
3878 tree field
= TREE_OPERAND (node
, 1);
3880 if (TREE_CODE (DECL_SIZE_UNIT (field
)) == INTEGER_CST
)
3881 size
= DECL_SIZE_UNIT (field
);
3884 elt
= chkp_build_addr_expr (elt
);
3885 addr_first
= fold_convert_loc (loc
, ptr_type_node
, elt
? elt
: ptr
);
3886 addr_first
= fold_build_pointer_plus_loc (loc
,
3888 byte_position (field
));
3891 addr_first
= chkp_build_addr_expr (node
);
3896 ptr
= TREE_OPERAND (node
, 0);
3901 ptr
= TREE_OPERAND (node
, 0);
3902 addr_first
= chkp_build_addr_expr (node
);
3905 case TARGET_MEM_REF
:
3906 ptr
= TMR_BASE (node
);
3907 addr_first
= chkp_build_addr_expr (node
);
3910 case ARRAY_RANGE_REF
:
3911 printf("ARRAY_RANGE_REF\n");
3912 debug_gimple_stmt(gsi_stmt(*iter
));
3919 tree offs
, rem
, bpu
;
3921 gcc_assert (!access_offs
);
3922 gcc_assert (!access_size
);
3924 bpu
= fold_convert (size_type_node
, bitsize_int (BITS_PER_UNIT
));
3925 offs
= fold_convert (size_type_node
, TREE_OPERAND (node
, 2));
3926 rem
= size_binop_loc (loc
, TRUNC_MOD_EXPR
, offs
, bpu
);
3927 offs
= size_binop_loc (loc
, TRUNC_DIV_EXPR
, offs
, bpu
);
3929 size
= fold_convert (size_type_node
, TREE_OPERAND (node
, 1));
3930 size
= size_binop_loc (loc
, PLUS_EXPR
, size
, rem
);
3931 size
= size_binop_loc (loc
, CEIL_DIV_EXPR
, size
, bpu
);
3932 size
= fold_convert (size_type_node
, size
);
3934 chkp_process_stmt (iter
, TREE_OPERAND (node
, 0), loc
,
3935 dirflag
, offs
, size
, safe
);
3943 if (dirflag
!= integer_one_node
3944 || DECL_REGISTER (node
))
3948 addr_first
= chkp_build_addr_expr (node
);
3955 /* If addr_last was not computed then use (addr_first + size - 1)
3956 expression to compute it. */
3959 addr_last
= fold_build_pointer_plus_loc (loc
, addr_first
, size
);
3960 addr_last
= fold_build_pointer_plus_hwi_loc (loc
, addr_last
, -1);
3963 /* Shift both first_addr and last_addr by access_offs if specified. */
3966 addr_first
= fold_build_pointer_plus_loc (loc
, addr_first
, access_offs
);
3967 addr_last
= fold_build_pointer_plus_loc (loc
, addr_last
, access_offs
);
3970 /* Generate bndcl/bndcu checks if memory access is not safe. */
3973 gimple_stmt_iterator stmt_iter
= *iter
;
3976 bounds
= chkp_find_bounds (ptr
, iter
);
3978 chkp_check_mem_access (addr_first
, addr_last
, bounds
,
3979 stmt_iter
, loc
, dirflag
);
3982 /* We need to store bounds in case pointer is stored. */
3983 if (dirflag
== integer_one_node
3984 && chkp_type_has_pointer (node_type
)
3985 && flag_chkp_store_bounds
)
3987 gimple stmt
= gsi_stmt (*iter
);
3988 tree rhs1
= gimple_assign_rhs1 (stmt
);
3989 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3991 if (get_gimple_rhs_class (rhs_code
) == GIMPLE_SINGLE_RHS
)
3992 chkp_walk_pointer_assignments (node
, rhs1
, iter
,
3993 chkp_copy_bounds_for_elem
);
3996 bounds
= chkp_compute_bounds_for_assignment (NULL_TREE
, stmt
);
3997 chkp_build_bndstx (addr_first
, rhs1
, bounds
, iter
);
4002 /* Add code to copy bounds for all pointers copied
4003 in ASSIGN created during inline of EDGE. */
4005 chkp_copy_bounds_for_assign (gimple assign
, struct cgraph_edge
*edge
)
4007 tree lhs
= gimple_assign_lhs (assign
);
4008 tree rhs
= gimple_assign_rhs1 (assign
);
4009 gimple_stmt_iterator iter
= gsi_for_stmt (assign
);
4011 if (!flag_chkp_store_bounds
)
4014 chkp_walk_pointer_assignments (lhs
, rhs
, &iter
, chkp_copy_bounds_for_elem
);
4016 /* We should create edges for all created calls to bndldx and bndstx. */
4017 while (gsi_stmt (iter
) != assign
)
4019 gimple stmt
= gsi_stmt (iter
);
4020 if (gimple_code (stmt
) == GIMPLE_CALL
)
4022 tree fndecl
= gimple_call_fndecl (stmt
);
4023 struct cgraph_node
*callee
= cgraph_node::get_create (fndecl
);
4024 struct cgraph_edge
*new_edge
;
4026 gcc_assert (fndecl
== chkp_bndstx_fndecl
4027 || fndecl
== chkp_bndldx_fndecl
4028 || fndecl
== chkp_ret_bnd_fndecl
);
4030 new_edge
= edge
->caller
->create_edge (callee
,
4031 as_a
<gcall
*> (stmt
),
4034 new_edge
->frequency
= compute_call_stmt_bb_frequency
4035 (edge
->caller
->decl
, gimple_bb (stmt
));
4041 /* Some code transformation made during instrumentation pass
4042 may put code into inconsistent state. Here we find and fix
4048 gimple_stmt_iterator i
;
4050 /* We could insert some code right after stmt which ends bb.
4051 We wanted to put this code on fallthru edge but did not
4052 add new edges from the beginning because it may cause new
4053 phi node creation which may be incorrect due to incomplete
4055 FOR_ALL_BB_FN (bb
, cfun
)
4056 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
4058 gimple stmt
= gsi_stmt (i
);
4059 gimple_stmt_iterator next
= i
;
4063 if (stmt_ends_bb_p (stmt
)
4064 && !gsi_end_p (next
))
4066 edge fall
= find_fallthru_edge (bb
->succs
);
4067 basic_block dest
= NULL
;
4072 /* We cannot split abnormal edge. Therefore we
4073 store its params, make it regular and then
4074 rebuild abnormal edge after split. */
4075 if (fall
->flags
& EDGE_ABNORMAL
)
4077 flags
= fall
->flags
& ~EDGE_FALLTHRU
;
4080 fall
->flags
&= ~EDGE_COMPLEX
;
4083 while (!gsi_end_p (next
))
4085 gimple next_stmt
= gsi_stmt (next
);
4086 gsi_remove (&next
, false);
4087 gsi_insert_on_edge (fall
, next_stmt
);
4090 gsi_commit_edge_inserts ();
4092 /* Re-create abnormal edge. */
4094 make_edge (bb
, dest
, flags
);
4099 /* Walker callback for chkp_replace_function_pointers. Replaces
4100 function pointer in the specified operand with pointer to the
4101 instrumented function version. */
4103 chkp_replace_function_pointer (tree
*op
, int *walk_subtrees
,
4104 void *data ATTRIBUTE_UNUSED
)
4106 if (TREE_CODE (*op
) == FUNCTION_DECL
4107 && chkp_instrumentable_p (*op
)
4108 && (DECL_BUILT_IN_CLASS (*op
) == NOT_BUILT_IN
4109 /* For builtins we replace pointers only for selected
4110 function and functions having definitions. */
4111 || (DECL_BUILT_IN_CLASS (*op
) == BUILT_IN_NORMAL
4112 && (chkp_instrument_normal_builtin (*op
)
4113 || gimple_has_body_p (*op
)))))
4115 struct cgraph_node
*node
= cgraph_node::get_create (*op
);
4116 struct cgraph_node
*clone
= NULL
;
4118 if (!node
->instrumentation_clone
)
4119 clone
= chkp_maybe_create_clone (*op
);
4129 /* This function searches for function pointers in statement
4130 pointed by GSI and replaces them with pointers to instrumented
4131 function versions. */
4133 chkp_replace_function_pointers (gimple_stmt_iterator
*gsi
)
4135 gimple stmt
= gsi_stmt (*gsi
);
4136 /* For calls we want to walk call args only. */
4137 if (gimple_code (stmt
) == GIMPLE_CALL
)
4140 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
4141 walk_tree (gimple_call_arg_ptr (stmt
, i
),
4142 chkp_replace_function_pointer
, NULL
, NULL
);
4145 walk_gimple_stmt (gsi
, NULL
, chkp_replace_function_pointer
, NULL
);
4148 /* This function instruments all statements working with memory,
4151 It also removes excess statements from static initializers. */
4153 chkp_instrument_function (void)
4155 basic_block bb
, next
;
4156 gimple_stmt_iterator i
;
4157 enum gimple_rhs_class grhs_class
;
4158 bool safe
= lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun
->decl
));
4160 bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->next_bb
;
4164 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); )
4166 gimple s
= gsi_stmt (i
);
4168 /* Skip statement marked to not be instrumented. */
4169 if (chkp_marked_stmt_p (s
))
4175 chkp_replace_function_pointers (&i
);
4177 switch (gimple_code (s
))
4180 chkp_process_stmt (&i
, gimple_assign_lhs (s
),
4181 gimple_location (s
), integer_one_node
,
4182 NULL_TREE
, NULL_TREE
, safe
);
4183 chkp_process_stmt (&i
, gimple_assign_rhs1 (s
),
4184 gimple_location (s
), integer_zero_node
,
4185 NULL_TREE
, NULL_TREE
, safe
);
4186 grhs_class
= get_gimple_rhs_class (gimple_assign_rhs_code (s
));
4187 if (grhs_class
== GIMPLE_BINARY_RHS
)
4188 chkp_process_stmt (&i
, gimple_assign_rhs2 (s
),
4189 gimple_location (s
), integer_zero_node
,
4190 NULL_TREE
, NULL_TREE
, safe
);
4195 greturn
*r
= as_a
<greturn
*> (s
);
4196 if (gimple_return_retval (r
) != NULL_TREE
)
4198 chkp_process_stmt (&i
, gimple_return_retval (r
),
4199 gimple_location (r
),
4201 NULL_TREE
, NULL_TREE
, safe
);
4203 /* Additionally we need to add bounds
4204 to return statement. */
4205 chkp_add_bounds_to_ret_stmt (&i
);
4211 chkp_add_bounds_to_call_stmt (&i
);
4220 /* We do not need any actual pointer stores in checker
4221 static initializer. */
4222 if (lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun
->decl
))
4223 && gimple_code (s
) == GIMPLE_ASSIGN
4224 && gimple_store_p (s
))
4226 gimple_stmt_iterator del_iter
= gsi_for_stmt (s
);
4227 gsi_remove (&del_iter
, true);
4228 unlink_stmt_vdef (s
);
4236 /* Some input params may have bounds and be address taken. In this case
4237 we should store incoming bounds into bounds table. */
4239 if (flag_chkp_store_bounds
)
4240 for (arg
= DECL_ARGUMENTS (cfun
->decl
); arg
; arg
= DECL_CHAIN (arg
))
4241 if (TREE_ADDRESSABLE (arg
))
4243 if (BOUNDED_P (arg
))
4245 tree bounds
= chkp_get_next_bounds_parm (arg
);
4246 tree def_ptr
= ssa_default_def (cfun
, arg
);
4247 gimple_stmt_iterator iter
4248 = gsi_start_bb (chkp_get_entry_block ());
4249 chkp_build_bndstx (chkp_build_addr_expr (arg
),
4250 def_ptr
? def_ptr
: arg
,
4253 /* Skip bounds arg. */
4254 arg
= TREE_CHAIN (arg
);
4256 else if (chkp_type_has_pointer (TREE_TYPE (arg
)))
4258 tree orig_arg
= arg
;
4259 bitmap slots
= BITMAP_ALLOC (NULL
);
4260 gimple_stmt_iterator iter
4261 = gsi_start_bb (chkp_get_entry_block ());
4265 chkp_find_bound_slots (TREE_TYPE (arg
), slots
);
4267 EXECUTE_IF_SET_IN_BITMAP (slots
, 0, bnd_no
, bi
)
4269 tree bounds
= chkp_get_next_bounds_parm (arg
);
4270 HOST_WIDE_INT offs
= bnd_no
* POINTER_SIZE
/ BITS_PER_UNIT
;
4271 tree addr
= chkp_build_addr_expr (orig_arg
);
4272 tree ptr
= build2 (MEM_REF
, ptr_type_node
, addr
,
4273 build_int_cst (ptr_type_node
, offs
));
4274 chkp_build_bndstx (chkp_build_addr_expr (ptr
), ptr
,
4277 arg
= DECL_CHAIN (arg
);
4279 BITMAP_FREE (slots
);
4284 /* Find init/null/copy_ptr_bounds calls and replace them
4285 with assignments. It should allow better code
4289 chkp_remove_useless_builtins ()
4292 gimple_stmt_iterator gsi
;
4294 FOR_EACH_BB_FN (bb
, cfun
)
4296 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4298 gimple stmt
= gsi_stmt (gsi
);
4300 enum built_in_function fcode
;
4302 /* Find builtins returning first arg and replace
4303 them with assignments. */
4304 if (gimple_code (stmt
) == GIMPLE_CALL
4305 && (fndecl
= gimple_call_fndecl (stmt
))
4306 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
4307 && (fcode
= DECL_FUNCTION_CODE (fndecl
))
4308 && (fcode
== BUILT_IN_CHKP_INIT_PTR_BOUNDS
4309 || fcode
== BUILT_IN_CHKP_NULL_PTR_BOUNDS
4310 || fcode
== BUILT_IN_CHKP_COPY_PTR_BOUNDS
4311 || fcode
== BUILT_IN_CHKP_SET_PTR_BOUNDS
))
4313 tree res
= gimple_call_arg (stmt
, 0);
4314 update_call_from_tree (&gsi
, res
);
4315 stmt
= gsi_stmt (gsi
);
4322 /* Initialize pass. */
4327 gimple_stmt_iterator i
;
4329 in_chkp_pass
= true;
4331 for (bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->next_bb
; bb
; bb
= bb
->next_bb
)
4332 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
4333 chkp_unmark_stmt (gsi_stmt (i
));
4335 chkp_invalid_bounds
= new hash_set
<tree
>;
4336 chkp_completed_bounds_set
= new hash_set
<tree
>;
4337 delete chkp_reg_bounds
;
4338 chkp_reg_bounds
= new hash_map
<tree
, tree
>;
4339 delete chkp_bound_vars
;
4340 chkp_bound_vars
= new hash_map
<tree
, tree
>;
4341 chkp_reg_addr_bounds
= new hash_map
<tree
, tree
>;
4342 chkp_incomplete_bounds_map
= new hash_map
<tree
, tree
>;
4343 delete chkp_bounds_map
;
4344 chkp_bounds_map
= new hash_map
<tree
, tree
>;
4345 chkp_abnormal_copies
= BITMAP_GGC_ALLOC ();
4348 zero_bounds
= NULL_TREE
;
4349 none_bounds
= NULL_TREE
;
4350 incomplete_bounds
= integer_zero_node
;
4351 tmp_var
= NULL_TREE
;
4352 size_tmp_var
= NULL_TREE
;
4354 chkp_uintptr_type
= lang_hooks
.types
.type_for_mode (ptr_mode
, true);
4356 /* We create these constant bounds once for each object file.
4357 These symbols go to comdat section and result in single copy
4358 of each one in the final binary. */
4359 chkp_get_zero_bounds_var ();
4360 chkp_get_none_bounds_var ();
4362 calculate_dominance_info (CDI_DOMINATORS
);
4363 calculate_dominance_info (CDI_POST_DOMINATORS
);
4365 bitmap_obstack_initialize (NULL
);
4368 /* Finalize instrumentation pass. */
4372 in_chkp_pass
= false;
4374 delete chkp_invalid_bounds
;
4375 delete chkp_completed_bounds_set
;
4376 delete chkp_reg_addr_bounds
;
4377 delete chkp_incomplete_bounds_map
;
4379 free_dominance_info (CDI_DOMINATORS
);
4380 free_dominance_info (CDI_POST_DOMINATORS
);
4382 bitmap_obstack_release (NULL
);
4385 zero_bounds
= NULL_TREE
;
4386 none_bounds
= NULL_TREE
;
4389 /* Main instrumentation pass function. */
4395 chkp_instrument_function ();
4397 chkp_remove_useless_builtins ();
4399 chkp_function_mark_instrumented (cfun
->decl
);
4408 /* Instrumentation pass gate. */
4412 cgraph_node
*node
= cgraph_node::get (cfun
->decl
);
4413 return ((node
!= NULL
4414 && node
->instrumentation_clone
)
4415 || lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun
->decl
)));
4420 const pass_data pass_data_chkp
=
4422 GIMPLE_PASS
, /* type */
4424 OPTGROUP_NONE
, /* optinfo_flags */
4425 TV_NONE
, /* tv_id */
4426 PROP_ssa
| PROP_cfg
, /* properties_required */
4427 0, /* properties_provided */
4428 0, /* properties_destroyed */
4429 0, /* todo_flags_start */
4431 | TODO_update_ssa
/* todo_flags_finish */
4434 class pass_chkp
: public gimple_opt_pass
4437 pass_chkp (gcc::context
*ctxt
)
4438 : gimple_opt_pass (pass_data_chkp
, ctxt
)
4441 /* opt_pass methods: */
4442 virtual opt_pass
* clone ()
4444 return new pass_chkp (m_ctxt
);
4447 virtual bool gate (function
*)
4449 return chkp_gate ();
4452 virtual unsigned int execute (function
*)
4454 return chkp_execute ();
4457 }; // class pass_chkp
4462 make_pass_chkp (gcc::context
*ctxt
)
4464 return new pass_chkp (ctxt
);
4467 #include "gt-tree-chkp.h"