runtime: set library name based on compiler name
[official-gcc.git] / gcc / tree-chkp.c
blobdf7d425fe66bdb0cf70458158fd061256c39fc4f
1 /* Pointer Bounds Checker insrumentation pass.
2 Copyright (C) 2014 Free Software Foundation, Inc.
3 Contributed by Ilya Enkovich (ilya.enkovich@intel.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree-core.h"
25 #include "stor-layout.h"
26 #include "varasm.h"
27 #include "tree.h"
28 #include "target.h"
29 #include "tree-iterator.h"
30 #include "tree-cfg.h"
31 #include "langhooks.h"
32 #include "tree-pass.h"
33 #include "diagnostic.h"
34 #include "ggc.h"
35 #include "is-a.h"
36 #include "cfgloop.h"
37 #include "stringpool.h"
38 #include "tree-ssa-alias.h"
39 #include "tree-ssanames.h"
40 #include "tree-ssa-operands.h"
41 #include "tree-ssa-address.h"
42 #include "tree-ssa.h"
43 #include "predict.h"
44 #include "dominance.h"
45 #include "cfg.h"
46 #include "basic-block.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "gimple-expr.h"
49 #include "gimple.h"
50 #include "tree-phinodes.h"
51 #include "gimple-ssa.h"
52 #include "ssa-iterators.h"
53 #include "gimple-pretty-print.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "gimplify-me.h"
57 #include "print-tree.h"
58 #include "expr.h"
59 #include "tree-ssa-propagate.h"
60 #include "gimple-fold.h"
61 #include "tree-chkp.h"
62 #include "gimple-walk.h"
63 #include "rtl.h" /* For MEM_P, assign_temp. */
64 #include "tree-dfa.h"
65 #include "ipa-ref.h"
66 #include "lto-streamer.h"
67 #include "cgraph.h"
68 #include "ipa-chkp.h"
69 #include "params.h"
70 #include "ipa-chkp.h"
71 #include "params.h"
73 /* Pointer Bounds Checker instruments code with memory checks to find
74 out-of-bounds memory accesses. Checks are performed by computing
75 bounds for each pointer and then comparing address of accessed
76 memory before pointer dereferencing.
78 1. Function clones.
80 See ipa-chkp.c.
82 2. Instrumentation.
84 There are few things to instrument:
86 a) Memory accesses - add checker calls to check address of accessed memory
87 against bounds of dereferenced pointer. Obviously safe memory
88 accesses like static variable access does not have to be instrumented
89 with checks.
91 Example:
93 val_2 = *p_1;
95 with 4 bytes access is transformed into:
97 __builtin___chkp_bndcl (__bound_tmp.1_3, p_1);
98 D.1_4 = p_1 + 3;
99 __builtin___chkp_bndcu (__bound_tmp.1_3, D.1_4);
100 val_2 = *p_1;
102 where __bound_tmp.1_3 are bounds computed for pointer p_1,
103 __builtin___chkp_bndcl is a lower bound check and
104 __builtin___chkp_bndcu is an upper bound check.
106 b) Pointer stores.
108 When pointer is stored in memory we need to store its bounds. To
109 achieve compatibility of instrumented code with regular codes
110 we have to keep data layout and store bounds in special bound tables
111 via special checker call. Implementation of bounds table may vary for
112 different platforms. It has to associate pointer value and its
113 location (it is required because we may have two equal pointers
114 with different bounds stored in different places) with bounds.
115 Another checker builtin allows to get bounds for specified pointer
116 loaded from specified location.
118 Example:
120 buf1[i_1] = &buf2;
122 is transformed into:
124 buf1[i_1] = &buf2;
125 D.1_2 = &buf1[i_1];
126 __builtin___chkp_bndstx (D.1_2, &buf2, __bound_tmp.1_2);
128 where __bound_tmp.1_2 are bounds of &buf2.
130 c) Static initialization.
132 The special case of pointer store is static pointer initialization.
133 Bounds initialization is performed in a few steps:
134 - register all static initializations in front-end using
135 chkp_register_var_initializer
136 - when file compilation finishes we create functions with special
137 attribute 'chkp ctor' and put explicit initialization code
138 (assignments) for all statically initialized pointers.
139 - when checker constructor is compiled checker pass adds required
140 bounds initialization for all statically initialized pointers
141 - since we do not actually need excess pointers initialization
142 in checker constructor we remove such assignments from them
144 d) Calls.
146 For each call in the code we add additional arguments to pass
147 bounds for pointer arguments. We determine type of call arguments
148 using arguments list from function declaration; if function
149 declaration is not available we use function type; otherwise
150 (e.g. for unnamed arguments) we use type of passed value. Function
151 declaration/type is replaced with the instrumented one.
153 Example:
155 val_1 = foo (&buf1, &buf2, &buf1, 0);
157 is translated into:
159 val_1 = foo.chkp (&buf1, __bound_tmp.1_2, &buf2, __bound_tmp.1_3,
160 &buf1, __bound_tmp.1_2, 0);
162 e) Returns.
164 If function returns a pointer value we have to return bounds also.
165 A new operand was added for return statement to hold returned bounds.
167 Example:
169 return &_buf1;
171 is transformed into
173 return &_buf1, __bound_tmp.1_1;
175 3. Bounds computation.
177 Compiler is fully responsible for computing bounds to be used for each
178 memory access. The first step for bounds computation is to find the
179 origin of pointer dereferenced for memory access. Basing on pointer
180 origin we define a way to compute its bounds. There are just few
181 possible cases:
183 a) Pointer is returned by call.
185 In this case we use corresponding checker builtin method to obtain returned
186 bounds.
188 Example:
190 buf_1 = malloc (size_2);
191 foo (buf_1);
193 is translated into:
195 buf_1 = malloc (size_2);
196 __bound_tmp.1_3 = __builtin___chkp_bndret (buf_1);
197 foo (buf_1, __bound_tmp.1_3);
199 b) Pointer is an address of an object.
201 In this case compiler tries to compute objects size and create corresponding
202 bounds. If object has incomplete type then special checker builtin is used to
203 obtain its size at runtime.
205 Example:
207 foo ()
209 <unnamed type> __bound_tmp.3;
210 static int buf[100];
212 <bb 3>:
213 __bound_tmp.3_2 = __builtin___chkp_bndmk (&buf, 400);
215 <bb 2>:
216 return &buf, __bound_tmp.3_2;
219 Example:
221 Address of an object 'extern int buf[]' with incomplete type is
222 returned.
224 foo ()
226 <unnamed type> __bound_tmp.4;
227 long unsigned int __size_tmp.3;
229 <bb 3>:
230 __size_tmp.3_4 = __builtin_ia32_sizeof (buf);
231 __bound_tmp.4_3 = __builtin_ia32_bndmk (&buf, __size_tmp.3_4);
233 <bb 2>:
234 return &buf, __bound_tmp.4_3;
237 c) Pointer is the result of object narrowing.
239 It happens when we use pointer to an object to compute pointer to a part
240 of an object. E.g. we take pointer to a field of a structure. In this
241 case we perform bounds intersection using bounds of original object and
242 bounds of object's part (which are computed basing on its type).
244 There may be some debatable questions about when narrowing should occur
245 and when it should not. To avoid false bound violations in correct
246 programs we do not perform narrowing when address of an array element is
247 obtained (it has address of the whole array) and when address of the first
248 structure field is obtained (because it is guaranteed to be equal to
249 address of the whole structure and it is legal to cast it back to structure).
251 Default narrowing behavior may be changed using compiler flags.
253 Example:
255 In this example address of the second structure field is returned.
257 foo (struct A * p, __bounds_type __bounds_of_p)
259 <unnamed type> __bound_tmp.3;
260 int * _2;
261 int * _5;
263 <bb 2>:
264 _5 = &p_1(D)->second_field;
265 __bound_tmp.3_6 = __builtin___chkp_bndmk (_5, 4);
266 __bound_tmp.3_8 = __builtin___chkp_intersect (__bound_tmp.3_6,
267 __bounds_of_p_3(D));
268 _2 = &p_1(D)->second_field;
269 return _2, __bound_tmp.3_8;
272 Example:
274 In this example address of the first field of array element is returned.
276 foo (struct A * p, __bounds_type __bounds_of_p, int i)
278 long unsigned int _3;
279 long unsigned int _4;
280 struct A * _6;
281 int * _7;
283 <bb 2>:
284 _3 = (long unsigned int) i_1(D);
285 _4 = _3 * 8;
286 _6 = p_5(D) + _4;
287 _7 = &_6->first_field;
288 return _7, __bounds_of_p_2(D);
292 d) Pointer is the result of pointer arithmetic or type cast.
294 In this case bounds of the base pointer are used. In case of binary
295 operation producing a pointer we are analyzing data flow further
296 looking for operand's bounds. One operand is considered as a base
297 if it has some valid bounds. If we fall into a case when none of
298 operands (or both of them) has valid bounds, a default bounds value
299 is used.
301 Trying to find out bounds for binary operations we may fall into
302 cyclic dependencies for pointers. To avoid infinite recursion all
303 walked phi nodes instantly obtain corresponding bounds but created
304 bounds are marked as incomplete. It helps us to stop DF walk during
305 bounds search.
307 When we reach pointer source, some args of incomplete bounds phi obtain
308 valid bounds and those values are propagated further through phi nodes.
309 If no valid bounds were found for phi node then we mark its result as
310 invalid bounds. Process stops when all incomplete bounds become either
311 valid or invalid and we are able to choose a pointer base.
313 e) Pointer is loaded from the memory.
315 In this case we just need to load bounds from the bounds table.
317 Example:
319 foo ()
321 <unnamed type> __bound_tmp.3;
322 static int * buf;
323 int * _2;
325 <bb 2>:
326 _2 = buf;
327 __bound_tmp.3_4 = __builtin___chkp_bndldx (&buf, _2);
328 return _2, __bound_tmp.3_4;
333 typedef void (*assign_handler)(tree, tree, void *);
335 static tree chkp_get_zero_bounds ();
336 static tree chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter);
337 static tree chkp_find_bounds_loaded (tree ptr, tree ptr_src,
338 gimple_stmt_iterator *iter);
339 static void chkp_parse_array_and_component_ref (tree node, tree *ptr,
340 tree *elt, bool *safe,
341 bool *bitfield,
342 tree *bounds,
343 gimple_stmt_iterator *iter,
344 bool innermost_bounds);
346 #define chkp_bndldx_fndecl \
347 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDLDX))
348 #define chkp_bndstx_fndecl \
349 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDSTX))
350 #define chkp_checkl_fndecl \
351 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCL))
352 #define chkp_checku_fndecl \
353 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCU))
354 #define chkp_bndmk_fndecl \
355 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDMK))
356 #define chkp_ret_bnd_fndecl \
357 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDRET))
358 #define chkp_intersect_fndecl \
359 (targetm.builtin_chkp_function (BUILT_IN_CHKP_INTERSECT))
360 #define chkp_narrow_bounds_fndecl \
361 (targetm.builtin_chkp_function (BUILT_IN_CHKP_NARROW))
362 #define chkp_sizeof_fndecl \
363 (targetm.builtin_chkp_function (BUILT_IN_CHKP_SIZEOF))
364 #define chkp_extract_lower_fndecl \
365 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_LOWER))
366 #define chkp_extract_upper_fndecl \
367 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_UPPER))
369 static GTY (()) tree chkp_uintptr_type;
371 static GTY (()) tree chkp_zero_bounds_var;
372 static GTY (()) tree chkp_none_bounds_var;
374 static GTY (()) basic_block entry_block;
375 static GTY (()) tree zero_bounds;
376 static GTY (()) tree none_bounds;
377 static GTY (()) tree incomplete_bounds;
378 static GTY (()) tree tmp_var;
379 static GTY (()) tree size_tmp_var;
380 static GTY (()) bitmap chkp_abnormal_copies;
382 struct hash_set<tree> *chkp_invalid_bounds;
383 struct hash_set<tree> *chkp_completed_bounds_set;
384 struct hash_map<tree, tree> *chkp_reg_bounds;
385 struct hash_map<tree, tree> *chkp_bound_vars;
386 struct hash_map<tree, tree> *chkp_reg_addr_bounds;
387 struct hash_map<tree, tree> *chkp_incomplete_bounds_map;
388 struct hash_map<tree, tree> *chkp_bounds_map;
389 struct hash_map<tree, tree> *chkp_static_var_bounds;
391 static bool in_chkp_pass;
393 #define CHKP_BOUND_TMP_NAME "__bound_tmp"
394 #define CHKP_SIZE_TMP_NAME "__size_tmp"
395 #define CHKP_BOUNDS_OF_SYMBOL_PREFIX "__chkp_bounds_of_"
396 #define CHKP_STRING_BOUNDS_PREFIX "__chkp_string_bounds_"
397 #define CHKP_VAR_BOUNDS_PREFIX "__chkp_var_bounds_"
398 #define CHKP_ZERO_BOUNDS_VAR_NAME "__chkp_zero_bounds"
399 #define CHKP_NONE_BOUNDS_VAR_NAME "__chkp_none_bounds"
401 /* Static checker constructors may become very large and their
402 compilation with optimization may take too much time.
403 Therefore we put a limit to number of statements in one
404 constructor. Tests with 100 000 statically initialized
405 pointers showed following compilation times on Sandy Bridge
406 server (used -O2):
407 limit 100 => ~18 sec.
408 limit 300 => ~22 sec.
409 limit 1000 => ~30 sec.
410 limit 3000 => ~49 sec.
411 limit 5000 => ~55 sec.
412 limit 10000 => ~76 sec.
413 limit 100000 => ~532 sec. */
414 #define MAX_STMTS_IN_STATIC_CHKP_CTOR (PARAM_VALUE (PARAM_CHKP_MAX_CTOR_SIZE))
416 struct chkp_ctor_stmt_list
418 tree stmts;
419 int avail;
422 /* Return 1 if function FNDECL is instrumented by Pointer
423 Bounds Checker. */
424 bool
425 chkp_function_instrumented_p (tree fndecl)
427 return fndecl
428 && lookup_attribute ("chkp instrumented", DECL_ATTRIBUTES (fndecl));
431 /* Mark function FNDECL as instrumented. */
432 void
433 chkp_function_mark_instrumented (tree fndecl)
435 if (chkp_function_instrumented_p (fndecl))
436 return;
438 DECL_ATTRIBUTES (fndecl)
439 = tree_cons (get_identifier ("chkp instrumented"), NULL,
440 DECL_ATTRIBUTES (fndecl));
443 /* Return true when STMT is builtin call to instrumentation function
444 corresponding to CODE. */
446 bool
447 chkp_gimple_call_builtin_p (gimple call,
448 enum built_in_function code)
450 tree fndecl;
451 if (is_gimple_call (call)
452 && (fndecl = targetm.builtin_chkp_function (code))
453 && gimple_call_fndecl (call) == fndecl)
454 return true;
455 return false;
458 /* Emit code to store zero bounds for PTR located at MEM. */
459 void
460 chkp_expand_bounds_reset_for_mem (tree mem, tree ptr)
462 tree zero_bnd, bnd, addr, bndstx;
464 if (flag_chkp_use_static_const_bounds)
465 zero_bnd = chkp_get_zero_bounds_var ();
466 else
467 zero_bnd = chkp_build_make_bounds_call (integer_zero_node,
468 integer_zero_node);
469 bnd = make_tree (pointer_bounds_type_node,
470 assign_temp (pointer_bounds_type_node, 0, 1));
471 addr = build1 (ADDR_EXPR,
472 build_pointer_type (TREE_TYPE (mem)), mem);
473 bndstx = chkp_build_bndstx_call (addr, ptr, bnd);
475 expand_assignment (bnd, zero_bnd, false);
476 expand_normal (bndstx);
479 /* Mark statement S to not be instrumented. */
480 static void
481 chkp_mark_stmt (gimple s)
483 gimple_set_plf (s, GF_PLF_1, true);
486 /* Mark statement S to be instrumented. */
487 static void
488 chkp_unmark_stmt (gimple s)
490 gimple_set_plf (s, GF_PLF_1, false);
493 /* Return 1 if statement S should not be instrumented. */
494 static bool
495 chkp_marked_stmt_p (gimple s)
497 return gimple_plf (s, GF_PLF_1);
500 /* Get var to be used for bound temps. */
501 static tree
502 chkp_get_tmp_var (void)
504 if (!tmp_var)
505 tmp_var = create_tmp_reg (pointer_bounds_type_node, CHKP_BOUND_TMP_NAME);
507 return tmp_var;
510 /* Get SSA_NAME to be used as temp. */
511 static tree
512 chkp_get_tmp_reg (gimple stmt)
514 if (in_chkp_pass)
515 return make_ssa_name (chkp_get_tmp_var (), stmt);
517 return make_temp_ssa_name (pointer_bounds_type_node, stmt,
518 CHKP_BOUND_TMP_NAME);
521 /* Get var to be used for size temps. */
522 static tree
523 chkp_get_size_tmp_var (void)
525 if (!size_tmp_var)
526 size_tmp_var = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
528 return size_tmp_var;
531 /* Register bounds BND for address of OBJ. */
532 static void
533 chkp_register_addr_bounds (tree obj, tree bnd)
535 if (bnd == incomplete_bounds)
536 return;
538 chkp_reg_addr_bounds->put (obj, bnd);
540 if (dump_file && (dump_flags & TDF_DETAILS))
542 fprintf (dump_file, "Regsitered bound ");
543 print_generic_expr (dump_file, bnd, 0);
544 fprintf (dump_file, " for address of ");
545 print_generic_expr (dump_file, obj, 0);
546 fprintf (dump_file, "\n");
550 /* Return bounds registered for address of OBJ. */
551 static tree
552 chkp_get_registered_addr_bounds (tree obj)
554 tree *slot = chkp_reg_addr_bounds->get (obj);
555 return slot ? *slot : NULL_TREE;
558 /* Mark BOUNDS as completed. */
559 static void
560 chkp_mark_completed_bounds (tree bounds)
562 chkp_completed_bounds_set->add (bounds);
564 if (dump_file && (dump_flags & TDF_DETAILS))
566 fprintf (dump_file, "Marked bounds ");
567 print_generic_expr (dump_file, bounds, 0);
568 fprintf (dump_file, " as completed\n");
572 /* Return 1 if BOUNDS were marked as completed and 0 otherwise. */
573 static bool
574 chkp_completed_bounds (tree bounds)
576 return chkp_completed_bounds_set->contains (bounds);
579 /* Clear comleted bound marks. */
580 static void
581 chkp_erase_completed_bounds (void)
583 delete chkp_completed_bounds_set;
584 chkp_completed_bounds_set = new hash_set<tree>;
587 /* Mark BOUNDS associated with PTR as incomplete. */
588 static void
589 chkp_register_incomplete_bounds (tree bounds, tree ptr)
591 chkp_incomplete_bounds_map->put (bounds, ptr);
593 if (dump_file && (dump_flags & TDF_DETAILS))
595 fprintf (dump_file, "Regsitered incomplete bounds ");
596 print_generic_expr (dump_file, bounds, 0);
597 fprintf (dump_file, " for ");
598 print_generic_expr (dump_file, ptr, 0);
599 fprintf (dump_file, "\n");
603 /* Return 1 if BOUNDS are incomplete and 0 otherwise. */
604 static bool
605 chkp_incomplete_bounds (tree bounds)
607 if (bounds == incomplete_bounds)
608 return true;
610 if (chkp_completed_bounds (bounds))
611 return false;
613 return chkp_incomplete_bounds_map->get (bounds) != NULL;
616 /* Clear incomleted bound marks. */
617 static void
618 chkp_erase_incomplete_bounds (void)
620 delete chkp_incomplete_bounds_map;
621 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
624 /* Build and return bndmk call which creates bounds for structure
625 pointed by PTR. Structure should have complete type. */
626 tree
627 chkp_make_bounds_for_struct_addr (tree ptr)
629 tree type = TREE_TYPE (ptr);
630 tree size;
632 gcc_assert (POINTER_TYPE_P (type));
634 size = TYPE_SIZE (TREE_TYPE (type));
636 gcc_assert (size);
638 return build_call_nary (pointer_bounds_type_node,
639 build_fold_addr_expr (chkp_bndmk_fndecl),
640 2, ptr, size);
643 /* Traversal function for chkp_may_finish_incomplete_bounds.
644 Set RES to 0 if at least one argument of phi statement
645 defining bounds (passed in KEY arg) is unknown.
646 Traversal stops when first unknown phi argument is found. */
647 bool
648 chkp_may_complete_phi_bounds (tree const &bounds, tree *slot ATTRIBUTE_UNUSED,
649 bool *res)
651 gimple phi;
652 unsigned i;
654 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
656 phi = SSA_NAME_DEF_STMT (bounds);
658 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
660 for (i = 0; i < gimple_phi_num_args (phi); i++)
662 tree phi_arg = gimple_phi_arg_def (phi, i);
663 if (!phi_arg)
665 *res = false;
666 /* Do not need to traverse further. */
667 return false;
671 return true;
674 /* Return 1 if all phi nodes created for bounds have their
675 arguments computed. */
676 static bool
677 chkp_may_finish_incomplete_bounds (void)
679 bool res = true;
681 chkp_incomplete_bounds_map
682 ->traverse<bool *, chkp_may_complete_phi_bounds> (&res);
684 return res;
687 /* Helper function for chkp_finish_incomplete_bounds.
688 Recompute args for bounds phi node. */
689 bool
690 chkp_recompute_phi_bounds (tree const &bounds, tree *slot,
691 void *res ATTRIBUTE_UNUSED)
693 tree ptr = *slot;
694 gimple bounds_phi;
695 gimple ptr_phi;
696 unsigned i;
698 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
699 gcc_assert (TREE_CODE (ptr) == SSA_NAME);
701 bounds_phi = SSA_NAME_DEF_STMT (bounds);
702 ptr_phi = SSA_NAME_DEF_STMT (ptr);
704 gcc_assert (bounds_phi && gimple_code (bounds_phi) == GIMPLE_PHI);
705 gcc_assert (ptr_phi && gimple_code (ptr_phi) == GIMPLE_PHI);
707 for (i = 0; i < gimple_phi_num_args (bounds_phi); i++)
709 tree ptr_arg = gimple_phi_arg_def (ptr_phi, i);
710 tree bound_arg = chkp_find_bounds (ptr_arg, NULL);
712 add_phi_arg (bounds_phi, bound_arg,
713 gimple_phi_arg_edge (ptr_phi, i),
714 UNKNOWN_LOCATION);
717 return true;
720 /* Mark BOUNDS as invalid. */
721 static void
722 chkp_mark_invalid_bounds (tree bounds)
724 chkp_invalid_bounds->add (bounds);
726 if (dump_file && (dump_flags & TDF_DETAILS))
728 fprintf (dump_file, "Marked bounds ");
729 print_generic_expr (dump_file, bounds, 0);
730 fprintf (dump_file, " as invalid\n");
734 /* Return 1 if BOUNDS were marked as invalid and 0 otherwise. */
735 static bool
736 chkp_valid_bounds (tree bounds)
738 if (bounds == zero_bounds || bounds == none_bounds)
739 return false;
741 return !chkp_invalid_bounds->contains (bounds);
744 /* Helper function for chkp_finish_incomplete_bounds.
745 Check all arguments of phi nodes trying to find
746 valid completed bounds. If there is at least one
747 such arg then bounds produced by phi node are marked
748 as valid completed bounds and all phi args are
749 recomputed. */
750 bool
751 chkp_find_valid_phi_bounds (tree const &bounds, tree *slot, bool *res)
753 gimple phi;
754 unsigned i;
756 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
758 if (chkp_completed_bounds (bounds))
759 return true;
761 phi = SSA_NAME_DEF_STMT (bounds);
763 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
765 for (i = 0; i < gimple_phi_num_args (phi); i++)
767 tree phi_arg = gimple_phi_arg_def (phi, i);
769 gcc_assert (phi_arg);
771 if (chkp_valid_bounds (phi_arg) && !chkp_incomplete_bounds (phi_arg))
773 *res = true;
774 chkp_mark_completed_bounds (bounds);
775 chkp_recompute_phi_bounds (bounds, slot, NULL);
776 return true;
780 return true;
783 /* Helper function for chkp_finish_incomplete_bounds.
784 Marks all incompleted bounds as invalid. */
785 bool
786 chkp_mark_invalid_bounds_walker (tree const &bounds,
787 tree *slot ATTRIBUTE_UNUSED,
788 void *res ATTRIBUTE_UNUSED)
790 if (!chkp_completed_bounds (bounds))
792 chkp_mark_invalid_bounds (bounds);
793 chkp_mark_completed_bounds (bounds);
795 return true;
798 /* When all bound phi nodes have all their args computed
799 we have enough info to find valid bounds. We iterate
800 through all incompleted bounds searching for valid
801 bounds. Found valid bounds are marked as completed
802 and all remaining incompleted bounds are recomputed.
803 Process continues until no new valid bounds may be
804 found. All remained incompleted bounds are marked as
805 invalid (i.e. have no valid source of bounds). */
806 static void
807 chkp_finish_incomplete_bounds (void)
809 bool found_valid;
811 while (found_valid)
813 found_valid = false;
815 chkp_incomplete_bounds_map->
816 traverse<bool *, chkp_find_valid_phi_bounds> (&found_valid);
818 if (found_valid)
819 chkp_incomplete_bounds_map->
820 traverse<void *, chkp_recompute_phi_bounds> (NULL);
823 chkp_incomplete_bounds_map->
824 traverse<void *, chkp_mark_invalid_bounds_walker> (NULL);
825 chkp_incomplete_bounds_map->
826 traverse<void *, chkp_recompute_phi_bounds> (NULL);
828 chkp_erase_completed_bounds ();
829 chkp_erase_incomplete_bounds ();
832 /* Return 1 if type TYPE is a pointer type or a
833 structure having a pointer type as one of its fields.
834 Otherwise return 0. */
835 bool
836 chkp_type_has_pointer (const_tree type)
838 bool res = false;
840 if (BOUNDED_TYPE_P (type))
841 res = true;
842 else if (RECORD_OR_UNION_TYPE_P (type))
844 tree field;
846 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
847 if (TREE_CODE (field) == FIELD_DECL)
848 res = res || chkp_type_has_pointer (TREE_TYPE (field));
850 else if (TREE_CODE (type) == ARRAY_TYPE)
851 res = chkp_type_has_pointer (TREE_TYPE (type));
853 return res;
856 unsigned
857 chkp_type_bounds_count (const_tree type)
859 unsigned res = 0;
861 if (!type)
862 res = 0;
863 else if (BOUNDED_TYPE_P (type))
864 res = 1;
865 else if (RECORD_OR_UNION_TYPE_P (type))
867 bitmap have_bound;
869 bitmap_obstack_initialize (NULL);
870 have_bound = BITMAP_ALLOC (NULL);
871 chkp_find_bound_slots (type, have_bound);
872 res = bitmap_count_bits (have_bound);
873 BITMAP_FREE (have_bound);
874 bitmap_obstack_release (NULL);
877 return res;
880 /* Get bounds associated with NODE via
881 chkp_set_bounds call. */
882 tree
883 chkp_get_bounds (tree node)
885 tree *slot;
887 if (!chkp_bounds_map)
888 return NULL_TREE;
890 slot = chkp_bounds_map->get (node);
891 return slot ? *slot : NULL_TREE;
894 /* Associate bounds VAL with NODE. */
895 void
896 chkp_set_bounds (tree node, tree val)
898 if (!chkp_bounds_map)
899 chkp_bounds_map = new hash_map<tree, tree>;
901 chkp_bounds_map->put (node, val);
904 /* Check if statically initialized variable VAR require
905 static bounds initialization. If VAR is added into
906 bounds initlization list then 1 is returned. Otherwise
907 return 0. */
908 extern bool
909 chkp_register_var_initializer (tree var)
911 if (!flag_check_pointer_bounds
912 || DECL_INITIAL (var) == error_mark_node)
913 return false;
915 gcc_assert (TREE_CODE (var) == VAR_DECL);
916 gcc_assert (DECL_INITIAL (var));
918 if (TREE_STATIC (var)
919 && chkp_type_has_pointer (TREE_TYPE (var)))
921 varpool_node::get_create (var)->need_bounds_init = 1;
922 return true;
925 return false;
928 /* Helper function for chkp_finish_file.
930 Add new modification statement (RHS is assigned to LHS)
931 into list of static initializer statementes (passed in ARG).
932 If statements list becomes too big, emit checker constructor
933 and start the new one. */
934 static void
935 chkp_add_modification_to_stmt_list (tree lhs,
936 tree rhs,
937 void *arg)
939 struct chkp_ctor_stmt_list *stmts = (struct chkp_ctor_stmt_list *)arg;
940 tree modify;
942 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
943 rhs = build1 (CONVERT_EXPR, TREE_TYPE (lhs), rhs);
945 modify = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
946 append_to_statement_list (modify, &stmts->stmts);
948 stmts->avail--;
951 /* Build and return ADDR_EXPR for specified object OBJ. */
952 static tree
953 chkp_build_addr_expr (tree obj)
955 return TREE_CODE (obj) == TARGET_MEM_REF
956 ? tree_mem_ref_addr (ptr_type_node, obj)
957 : build_fold_addr_expr (obj);
960 /* Helper function for chkp_finish_file.
961 Initialize bound variable BND_VAR with bounds of variable
962 VAR to statements list STMTS. If statements list becomes
963 too big, emit checker constructor and start the new one. */
964 static void
965 chkp_output_static_bounds (tree bnd_var, tree var,
966 struct chkp_ctor_stmt_list *stmts)
968 tree lb, ub, size;
970 if (TREE_CODE (var) == STRING_CST)
972 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
973 size = build_int_cst (size_type_node, TREE_STRING_LENGTH (var) - 1);
975 else if (DECL_SIZE (var)
976 && !chkp_variable_size_type (TREE_TYPE (var)))
978 /* Compute bounds using statically known size. */
979 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
980 size = size_binop (MINUS_EXPR, DECL_SIZE_UNIT (var), size_one_node);
982 else
984 /* Compute bounds using dynamic size. */
985 tree call;
987 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
988 call = build1 (ADDR_EXPR,
989 build_pointer_type (TREE_TYPE (chkp_sizeof_fndecl)),
990 chkp_sizeof_fndecl);
991 size = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_sizeof_fndecl)),
992 call, 1, var);
994 if (flag_chkp_zero_dynamic_size_as_infinite)
996 tree max_size, cond;
998 max_size = build2 (MINUS_EXPR, size_type_node, size_zero_node, lb);
999 cond = build2 (NE_EXPR, boolean_type_node, size, size_zero_node);
1000 size = build3 (COND_EXPR, size_type_node, cond, size, max_size);
1003 size = size_binop (MINUS_EXPR, size, size_one_node);
1006 ub = size_binop (PLUS_EXPR, lb, size);
1007 stmts->avail -= targetm.chkp_initialize_bounds (bnd_var, lb, ub,
1008 &stmts->stmts);
1009 if (stmts->avail <= 0)
1011 cgraph_build_static_cdtor ('B', stmts->stmts,
1012 MAX_RESERVED_INIT_PRIORITY + 2);
1013 stmts->avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
1014 stmts->stmts = NULL;
1018 /* Return entry block to be used for checker initilization code.
1019 Create new block if required. */
1020 static basic_block
1021 chkp_get_entry_block (void)
1023 if (!entry_block)
1024 entry_block = split_block (ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL)->dest;
1026 return entry_block;
1029 /* Return a bounds var to be used for pointer var PTR_VAR. */
1030 static tree
1031 chkp_get_bounds_var (tree ptr_var)
1033 tree bnd_var;
1034 tree *slot;
1036 slot = chkp_bound_vars->get (ptr_var);
1037 if (slot)
1038 bnd_var = *slot;
1039 else
1041 bnd_var = create_tmp_reg (pointer_bounds_type_node,
1042 CHKP_BOUND_TMP_NAME);
1043 chkp_bound_vars->put (ptr_var, bnd_var);
1046 return bnd_var;
1051 /* Register bounds BND for object PTR in global bounds table.
1052 A copy of bounds may be created for abnormal ssa names.
1053 Returns bounds to use for PTR. */
1054 static tree
1055 chkp_maybe_copy_and_register_bounds (tree ptr, tree bnd)
1057 bool abnormal_ptr;
1059 if (!chkp_reg_bounds)
1060 return bnd;
1062 /* Do nothing if bounds are incomplete_bounds
1063 because it means bounds will be recomputed. */
1064 if (bnd == incomplete_bounds)
1065 return bnd;
1067 abnormal_ptr = (TREE_CODE (ptr) == SSA_NAME
1068 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1069 && gimple_code (SSA_NAME_DEF_STMT (ptr)) != GIMPLE_PHI);
1071 /* A single bounds value may be reused multiple times for
1072 different pointer values. It may cause coalescing issues
1073 for abnormal SSA names. To avoid it we create a bounds
1074 copy in case it is computed for abnormal SSA name.
1076 We also cannot reuse such created copies for other pointers */
1077 if (abnormal_ptr
1078 || bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
1080 tree bnd_var = NULL_TREE;
1082 if (abnormal_ptr)
1084 if (SSA_NAME_VAR (ptr))
1085 bnd_var = chkp_get_bounds_var (SSA_NAME_VAR (ptr));
1087 else
1088 bnd_var = chkp_get_tmp_var ();
1090 /* For abnormal copies we may just find original
1091 bounds and use them. */
1092 if (!abnormal_ptr && !SSA_NAME_IS_DEFAULT_DEF (bnd))
1094 gimple bnd_def = SSA_NAME_DEF_STMT (bnd);
1095 gcc_checking_assert (gimple_code (bnd_def) == GIMPLE_ASSIGN);
1096 bnd = gimple_assign_rhs1 (bnd_def);
1098 /* For undefined values we usually use none bounds
1099 value but in case of abnormal edge it may cause
1100 coalescing failures. Use default definition of
1101 bounds variable instead to avoid it. */
1102 else if (SSA_NAME_IS_DEFAULT_DEF (ptr)
1103 && TREE_CODE (SSA_NAME_VAR (ptr)) != PARM_DECL)
1105 bnd = get_or_create_ssa_default_def (cfun, bnd_var);
1107 if (dump_file && (dump_flags & TDF_DETAILS))
1109 fprintf (dump_file, "Using default def bounds ");
1110 print_generic_expr (dump_file, bnd, 0);
1111 fprintf (dump_file, " for abnormal default def SSA name ");
1112 print_generic_expr (dump_file, ptr, 0);
1113 fprintf (dump_file, "\n");
1116 else
1118 tree copy;
1119 gimple def = SSA_NAME_DEF_STMT (ptr);
1120 gimple assign;
1121 gimple_stmt_iterator gsi;
1123 if (bnd_var)
1124 copy = make_ssa_name (bnd_var, gimple_build_nop ());
1125 else
1126 copy = make_temp_ssa_name (pointer_bounds_type_node,
1127 gimple_build_nop (),
1128 CHKP_BOUND_TMP_NAME);
1129 assign = gimple_build_assign (copy, bnd);
1131 if (dump_file && (dump_flags & TDF_DETAILS))
1133 fprintf (dump_file, "Creating a copy of bounds ");
1134 print_generic_expr (dump_file, bnd, 0);
1135 fprintf (dump_file, " for abnormal SSA name ");
1136 print_generic_expr (dump_file, ptr, 0);
1137 fprintf (dump_file, "\n");
1140 if (gimple_code (def) == GIMPLE_NOP)
1142 gsi = gsi_last_bb (chkp_get_entry_block ());
1143 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
1144 gsi_insert_before (&gsi, assign, GSI_CONTINUE_LINKING);
1145 else
1146 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1148 else
1150 gimple bnd_def = SSA_NAME_DEF_STMT (bnd);
1151 /* Sometimes (e.g. when we load a pointer from a
1152 memory) bounds are produced later than a pointer.
1153 We need to insert bounds copy appropriately. */
1154 if (gimple_code (bnd_def) != GIMPLE_NOP
1155 && stmt_dominates_stmt_p (def, bnd_def))
1156 gsi = gsi_for_stmt (bnd_def);
1157 else
1158 gsi = gsi_for_stmt (def);
1159 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1162 bnd = copy;
1165 if (abnormal_ptr)
1166 bitmap_set_bit (chkp_abnormal_copies, SSA_NAME_VERSION (bnd));
1169 chkp_reg_bounds->put (ptr, bnd);
1171 if (dump_file && (dump_flags & TDF_DETAILS))
1173 fprintf (dump_file, "Regsitered bound ");
1174 print_generic_expr (dump_file, bnd, 0);
1175 fprintf (dump_file, " for pointer ");
1176 print_generic_expr (dump_file, ptr, 0);
1177 fprintf (dump_file, "\n");
1180 return bnd;
1183 /* Get bounds registered for object PTR in global bounds table. */
1184 static tree
1185 chkp_get_registered_bounds (tree ptr)
1187 tree *slot;
1189 if (!chkp_reg_bounds)
1190 return NULL_TREE;
1192 slot = chkp_reg_bounds->get (ptr);
1193 return slot ? *slot : NULL_TREE;
1196 /* Add bound retvals to return statement pointed by GSI. */
1198 static void
1199 chkp_add_bounds_to_ret_stmt (gimple_stmt_iterator *gsi)
1201 gimple ret = gsi_stmt (*gsi);
1202 tree retval = gimple_return_retval (ret);
1203 tree ret_decl = DECL_RESULT (cfun->decl);
1204 tree bounds;
1206 if (!retval)
1207 return;
1209 if (BOUNDED_P (ret_decl))
1211 bounds = chkp_find_bounds (retval, gsi);
1212 bounds = chkp_maybe_copy_and_register_bounds (ret_decl, bounds);
1213 gimple_return_set_retbnd (ret, bounds);
1216 update_stmt (ret);
1219 /* Force OP to be suitable for using as an argument for call.
1220 New statements (if any) go to SEQ. */
1221 static tree
1222 chkp_force_gimple_call_op (tree op, gimple_seq *seq)
1224 gimple_seq stmts;
1225 gimple_stmt_iterator si;
1227 op = force_gimple_operand (unshare_expr (op), &stmts, true, NULL_TREE);
1229 for (si = gsi_start (stmts); !gsi_end_p (si); gsi_next (&si))
1230 chkp_mark_stmt (gsi_stmt (si));
1232 gimple_seq_add_seq (seq, stmts);
1234 return op;
1237 /* Generate lower bound check for memory access by ADDR.
1238 Check is inserted before the position pointed by ITER.
1239 DIRFLAG indicates whether memory access is load or store. */
1240 static void
1241 chkp_check_lower (tree addr, tree bounds,
1242 gimple_stmt_iterator iter,
1243 location_t location,
1244 tree dirflag)
1246 gimple_seq seq;
1247 gimple check;
1248 tree node;
1250 if (bounds == chkp_get_zero_bounds ())
1251 return;
1253 if (dirflag == integer_zero_node
1254 && !flag_chkp_check_read)
1255 return;
1257 if (dirflag == integer_one_node
1258 && !flag_chkp_check_write)
1259 return;
1261 seq = NULL;
1263 node = chkp_force_gimple_call_op (addr, &seq);
1265 check = gimple_build_call (chkp_checkl_fndecl, 2, node, bounds);
1266 chkp_mark_stmt (check);
1267 gimple_call_set_with_bounds (check, true);
1268 gimple_set_location (check, location);
1269 gimple_seq_add_stmt (&seq, check);
1271 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1273 if (dump_file && (dump_flags & TDF_DETAILS))
1275 gimple before = gsi_stmt (iter);
1276 fprintf (dump_file, "Generated lower bound check for statement ");
1277 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1278 fprintf (dump_file, " ");
1279 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1283 /* Generate upper bound check for memory access by ADDR.
1284 Check is inserted before the position pointed by ITER.
1285 DIRFLAG indicates whether memory access is load or store. */
1286 static void
1287 chkp_check_upper (tree addr, tree bounds,
1288 gimple_stmt_iterator iter,
1289 location_t location,
1290 tree dirflag)
1292 gimple_seq seq;
1293 gimple check;
1294 tree node;
1296 if (bounds == chkp_get_zero_bounds ())
1297 return;
1299 if (dirflag == integer_zero_node
1300 && !flag_chkp_check_read)
1301 return;
1303 if (dirflag == integer_one_node
1304 && !flag_chkp_check_write)
1305 return;
1307 seq = NULL;
1309 node = chkp_force_gimple_call_op (addr, &seq);
1311 check = gimple_build_call (chkp_checku_fndecl, 2, node, bounds);
1312 chkp_mark_stmt (check);
1313 gimple_call_set_with_bounds (check, true);
1314 gimple_set_location (check, location);
1315 gimple_seq_add_stmt (&seq, check);
1317 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1319 if (dump_file && (dump_flags & TDF_DETAILS))
1321 gimple before = gsi_stmt (iter);
1322 fprintf (dump_file, "Generated upper bound check for statement ");
1323 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1324 fprintf (dump_file, " ");
1325 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1329 /* Generate lower and upper bound checks for memory access
1330 to memory slot [FIRST, LAST] againsr BOUNDS. Checks
1331 are inserted before the position pointed by ITER.
1332 DIRFLAG indicates whether memory access is load or store. */
1333 void
1334 chkp_check_mem_access (tree first, tree last, tree bounds,
1335 gimple_stmt_iterator iter,
1336 location_t location,
1337 tree dirflag)
1339 chkp_check_lower (first, bounds, iter, location, dirflag);
1340 chkp_check_upper (last, bounds, iter, location, dirflag);
1343 /* Replace call to _bnd_chk_* pointed by GSI with
1344 bndcu and bndcl calls. DIRFLAG determines whether
1345 check is for read or write. */
1347 void
1348 chkp_replace_address_check_builtin (gimple_stmt_iterator *gsi,
1349 tree dirflag)
1351 gimple_stmt_iterator call_iter = *gsi;
1352 gimple call = gsi_stmt (*gsi);
1353 tree fndecl = gimple_call_fndecl (call);
1354 tree addr = gimple_call_arg (call, 0);
1355 tree bounds = chkp_find_bounds (addr, gsi);
1357 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1358 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1359 chkp_check_lower (addr, bounds, *gsi, gimple_location (call), dirflag);
1361 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS)
1362 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1364 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1366 tree size = gimple_call_arg (call, 1);
1367 addr = fold_build_pointer_plus (addr, size);
1368 addr = fold_build_pointer_plus_hwi (addr, -1);
1369 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1372 gsi_remove (&call_iter, true);
1375 /* Replace call to _bnd_get_ptr_* pointed by GSI with
1376 corresponding bounds extract call. */
1378 void
1379 chkp_replace_extract_builtin (gimple_stmt_iterator *gsi)
1381 gimple call = gsi_stmt (*gsi);
1382 tree fndecl = gimple_call_fndecl (call);
1383 tree addr = gimple_call_arg (call, 0);
1384 tree bounds = chkp_find_bounds (addr, gsi);
1385 gimple extract;
1387 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND)
1388 fndecl = chkp_extract_lower_fndecl;
1389 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND)
1390 fndecl = chkp_extract_upper_fndecl;
1391 else
1392 gcc_unreachable ();
1394 extract = gimple_build_call (fndecl, 1, bounds);
1395 gimple_call_set_lhs (extract, gimple_call_lhs (call));
1396 chkp_mark_stmt (extract);
1398 gsi_replace (gsi, extract, false);
1401 /* Return COMPONENT_REF accessing FIELD in OBJ. */
1402 static tree
1403 chkp_build_component_ref (tree obj, tree field)
1405 tree res;
1407 /* If object is TMR then we do not use component_ref but
1408 add offset instead. We need it to be able to get addr
1409 of the reasult later. */
1410 if (TREE_CODE (obj) == TARGET_MEM_REF)
1412 tree offs = TMR_OFFSET (obj);
1413 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1414 offs, DECL_FIELD_OFFSET (field));
1416 gcc_assert (offs);
1418 res = copy_node (obj);
1419 TREE_TYPE (res) = TREE_TYPE (field);
1420 TMR_OFFSET (res) = offs;
1422 else
1423 res = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL_TREE);
1425 return res;
1428 /* Return ARRAY_REF for array ARR and index IDX with
1429 specified element type ETYPE and element size ESIZE. */
1430 static tree
1431 chkp_build_array_ref (tree arr, tree etype, tree esize,
1432 unsigned HOST_WIDE_INT idx)
1434 tree index = build_int_cst (size_type_node, idx);
1435 tree res;
1437 /* If object is TMR then we do not use array_ref but
1438 add offset instead. We need it to be able to get addr
1439 of the reasult later. */
1440 if (TREE_CODE (arr) == TARGET_MEM_REF)
1442 tree offs = TMR_OFFSET (arr);
1444 esize = fold_binary_to_constant (MULT_EXPR, TREE_TYPE (esize),
1445 esize, index);
1446 gcc_assert(esize);
1448 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1449 offs, esize);
1450 gcc_assert (offs);
1452 res = copy_node (arr);
1453 TREE_TYPE (res) = etype;
1454 TMR_OFFSET (res) = offs;
1456 else
1457 res = build4 (ARRAY_REF, etype, arr, index, NULL_TREE, NULL_TREE);
1459 return res;
1462 /* Helper function for chkp_add_bounds_to_call_stmt.
1463 Fill ALL_BOUNDS output array with created bounds.
1465 OFFS is used for recursive calls and holds basic
1466 offset of TYPE in outer structure in bits.
1468 ITER points a position where bounds are searched.
1470 ALL_BOUNDS[i] is filled with elem bounds if there
1471 is a field in TYPE which has pointer type and offset
1472 equal to i * POINTER_SIZE in bits. */
1473 static void
1474 chkp_find_bounds_for_elem (tree elem, tree *all_bounds,
1475 HOST_WIDE_INT offs,
1476 gimple_stmt_iterator *iter)
1478 tree type = TREE_TYPE (elem);
1480 if (BOUNDED_TYPE_P (type))
1482 if (!all_bounds[offs / POINTER_SIZE])
1484 tree temp = make_temp_ssa_name (type, gimple_build_nop (), "");
1485 gimple assign = gimple_build_assign (temp, elem);
1486 gimple_stmt_iterator gsi;
1488 gsi_insert_before (iter, assign, GSI_SAME_STMT);
1489 gsi = gsi_for_stmt (assign);
1491 all_bounds[offs / POINTER_SIZE] = chkp_find_bounds (temp, &gsi);
1494 else if (RECORD_OR_UNION_TYPE_P (type))
1496 tree field;
1498 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1499 if (TREE_CODE (field) == FIELD_DECL)
1501 tree base = unshare_expr (elem);
1502 tree field_ref = chkp_build_component_ref (base, field);
1503 HOST_WIDE_INT field_offs
1504 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1505 if (DECL_FIELD_OFFSET (field))
1506 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1508 chkp_find_bounds_for_elem (field_ref, all_bounds,
1509 offs + field_offs, iter);
1512 else if (TREE_CODE (type) == ARRAY_TYPE)
1514 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1515 tree etype = TREE_TYPE (type);
1516 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1517 unsigned HOST_WIDE_INT cur;
1519 if (!maxval || integer_minus_onep (maxval))
1520 return;
1522 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1524 tree base = unshare_expr (elem);
1525 tree arr_elem = chkp_build_array_ref (base, etype,
1526 TYPE_SIZE (etype),
1527 cur);
1528 chkp_find_bounds_for_elem (arr_elem, all_bounds, offs + cur * esize,
1529 iter);
1534 /* Fill HAVE_BOUND output bitmap with information about
1535 bounds requred for object of type TYPE.
1537 OFFS is used for recursive calls and holds basic
1538 offset of TYPE in outer structure in bits.
1540 HAVE_BOUND[i] is set to 1 if there is a field
1541 in TYPE which has pointer type and offset
1542 equal to i * POINTER_SIZE - OFFS in bits. */
1543 void
1544 chkp_find_bound_slots_1 (const_tree type, bitmap have_bound,
1545 HOST_WIDE_INT offs)
1547 if (BOUNDED_TYPE_P (type))
1548 bitmap_set_bit (have_bound, offs / POINTER_SIZE);
1549 else if (RECORD_OR_UNION_TYPE_P (type))
1551 tree field;
1553 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1554 if (TREE_CODE (field) == FIELD_DECL)
1556 HOST_WIDE_INT field_offs
1557 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1558 if (DECL_FIELD_OFFSET (field))
1559 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1560 chkp_find_bound_slots_1 (TREE_TYPE (field), have_bound,
1561 offs + field_offs);
1564 else if (TREE_CODE (type) == ARRAY_TYPE)
1566 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1567 tree etype = TREE_TYPE (type);
1568 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1569 unsigned HOST_WIDE_INT cur;
1571 if (!maxval || integer_minus_onep (maxval))
1572 return;
1574 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1575 chkp_find_bound_slots_1 (etype, have_bound, offs + cur * esize);
1579 /* Fill bitmap RES with information about bounds for
1580 type TYPE. See chkp_find_bound_slots_1 for more
1581 details. */
1582 void
1583 chkp_find_bound_slots (const_tree type, bitmap res)
1585 bitmap_clear (res);
1586 chkp_find_bound_slots_1 (type, res, 0);
1589 /* Add bound arguments to call statement pointed by GSI.
1590 Also performs a replacement of user checker builtins calls
1591 with internal ones. */
1593 static void
1594 chkp_add_bounds_to_call_stmt (gimple_stmt_iterator *gsi)
1596 gimple call = gsi_stmt (*gsi);
1597 unsigned arg_no = 0;
1598 tree fndecl = gimple_call_fndecl (call);
1599 tree fntype;
1600 tree first_formal_arg;
1601 tree arg;
1602 bool use_fntype = false;
1603 tree op;
1604 ssa_op_iter iter;
1605 gimple new_call;
1607 /* Do nothing for internal functions. */
1608 if (gimple_call_internal_p (call))
1609 return;
1611 fntype = TREE_TYPE (TREE_TYPE (gimple_call_fn (call)));
1613 /* Do nothing if back-end builtin is called. */
1614 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
1615 return;
1617 /* Do nothing for some middle-end builtins. */
1618 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1619 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_OBJECT_SIZE)
1620 return;
1622 /* Donothing for calls to legacy functions. */
1623 if (fndecl
1624 && lookup_attribute ("bnd_legacy", DECL_ATTRIBUTES (fndecl)))
1625 return;
1627 /* Ignore CHKP_INIT_PTR_BOUNDS, CHKP_NULL_PTR_BOUNDS
1628 and CHKP_COPY_PTR_BOUNDS. */
1629 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1630 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS
1631 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS
1632 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS
1633 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS))
1634 return;
1636 /* Check user builtins are replaced with checks. */
1637 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1638 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1639 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
1640 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS))
1642 chkp_replace_address_check_builtin (gsi, integer_minus_one_node);
1643 return;
1646 /* Check user builtins are replaced with bound extract. */
1647 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1648 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND
1649 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND))
1651 chkp_replace_extract_builtin (gsi);
1652 return;
1655 /* BUILT_IN_CHKP_NARROW_PTR_BOUNDS call is replaced with
1656 target narrow bounds call. */
1657 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1658 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
1660 tree arg = gimple_call_arg (call, 1);
1661 tree bounds = chkp_find_bounds (arg, gsi);
1663 gimple_call_set_fndecl (call, chkp_narrow_bounds_fndecl);
1664 gimple_call_set_arg (call, 1, bounds);
1665 update_stmt (call);
1667 return;
1670 /* BUILT_IN_CHKP_STORE_PTR_BOUNDS call is replaced with
1671 bndstx call. */
1672 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1673 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_STORE_PTR_BOUNDS)
1675 tree addr = gimple_call_arg (call, 0);
1676 tree ptr = gimple_call_arg (call, 1);
1677 tree bounds = chkp_find_bounds (ptr, gsi);
1678 gimple_stmt_iterator iter = gsi_for_stmt (call);
1680 chkp_build_bndstx (addr, ptr, bounds, gsi);
1681 gsi_remove (&iter, true);
1683 return;
1686 if (!flag_chkp_instrument_calls)
1687 return;
1689 /* Avoid instrumented builtin functions for now. Due to IPA
1690 it also means we have to avoid instrumentation of indirect
1691 calls. */
1692 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) != NOT_BUILT_IN)
1693 return;
1695 /* If function decl is available then use it for
1696 formal arguments list. Otherwise use function type. */
1697 if (fndecl && DECL_ARGUMENTS (fndecl))
1698 first_formal_arg = DECL_ARGUMENTS (fndecl);
1699 else
1701 first_formal_arg = TYPE_ARG_TYPES (fntype);
1702 use_fntype = true;
1705 /* Fill vector of new call args. */
1706 vec<tree> new_args = vNULL;
1707 new_args.create (gimple_call_num_args (call));
1708 arg = first_formal_arg;
1709 for (arg_no = 0; arg_no < gimple_call_num_args (call); arg_no++)
1711 tree call_arg = gimple_call_arg (call, arg_no);
1712 tree type;
1714 /* Get arg type using formal argument description
1715 or actual argument type. */
1716 if (arg)
1717 if (use_fntype)
1718 if (TREE_VALUE (arg) != void_type_node)
1720 type = TREE_VALUE (arg);
1721 arg = TREE_CHAIN (arg);
1723 else
1724 type = TREE_TYPE (call_arg);
1725 else
1727 type = TREE_TYPE (arg);
1728 arg = TREE_CHAIN (arg);
1730 else
1731 type = TREE_TYPE (call_arg);
1733 new_args.safe_push (call_arg);
1735 if (BOUNDED_TYPE_P (type)
1736 || pass_by_reference (NULL, TYPE_MODE (type), type, true))
1737 new_args.safe_push (chkp_find_bounds (call_arg, gsi));
1738 else if (chkp_type_has_pointer (type))
1740 HOST_WIDE_INT max_bounds
1741 = TREE_INT_CST_LOW (TYPE_SIZE (type)) / POINTER_SIZE;
1742 tree *all_bounds = (tree *)xmalloc (sizeof (tree) * max_bounds);
1743 HOST_WIDE_INT bnd_no;
1745 memset (all_bounds, 0, sizeof (tree) * max_bounds);
1747 chkp_find_bounds_for_elem (call_arg, all_bounds, 0, gsi);
1749 for (bnd_no = 0; bnd_no < max_bounds; bnd_no++)
1750 if (all_bounds[bnd_no])
1751 new_args.safe_push (all_bounds[bnd_no]);
1753 free (all_bounds);
1757 if (new_args.length () == gimple_call_num_args (call))
1758 new_call = call;
1759 else
1761 new_call = gimple_build_call_vec (gimple_op (call, 1), new_args);
1762 gimple_call_set_lhs (new_call, gimple_call_lhs (call));
1763 gimple_call_copy_flags (new_call, call);
1765 new_args.release ();
1767 /* If we call built-in function and pass no bounds then
1768 we do not need to change anything. */
1769 if (new_call == call
1770 && fndecl
1771 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1772 && fndecl == builtin_decl_explicit (DECL_FUNCTION_CODE (fndecl)))
1773 return;
1775 /* For direct calls fndecl is replaced with instrumented version. */
1776 if (fndecl)
1778 tree new_decl = chkp_maybe_create_clone (fndecl)->decl;
1779 gimple_call_set_fndecl (new_call, new_decl);
1780 gimple_call_set_fntype (new_call, TREE_TYPE (new_decl));
1782 /* For indirect call we should fix function pointer type if
1783 pass some bounds. */
1784 else if (new_call != call)
1786 tree type = gimple_call_fntype (call);
1787 type = chkp_copy_function_type_adding_bounds (type);
1788 gimple_call_set_fntype (new_call, type);
1791 /* replace old call statement with the new one. */
1792 if (call != new_call)
1794 FOR_EACH_SSA_TREE_OPERAND (op, call, iter, SSA_OP_ALL_DEFS)
1796 SSA_NAME_DEF_STMT (op) = new_call;
1798 gsi_replace (gsi, new_call, true);
1800 else
1801 update_stmt (new_call);
1803 gimple_call_set_with_bounds (new_call, true);
1806 /* Return constant static bounds var with specified LB and UB
1807 if such var exists in varpool. Return NULL otherwise. */
1808 static tree
1809 chkp_find_const_bounds_var (HOST_WIDE_INT lb,
1810 HOST_WIDE_INT ub)
1812 tree val = targetm.chkp_make_bounds_constant (lb, ub);
1813 struct varpool_node *node;
1815 /* We expect bounds constant is represented as a complex value
1816 of two pointer sized integers. */
1817 gcc_assert (TREE_CODE (val) == COMPLEX_CST);
1819 FOR_EACH_VARIABLE (node)
1820 if (POINTER_BOUNDS_P (node->decl)
1821 && TREE_READONLY (node->decl)
1822 && DECL_INITIAL (node->decl)
1823 && TREE_CODE (DECL_INITIAL (node->decl)) == COMPLEX_CST
1824 && tree_int_cst_equal (TREE_REALPART (DECL_INITIAL (node->decl)),
1825 TREE_REALPART (val))
1826 && tree_int_cst_equal (TREE_IMAGPART (DECL_INITIAL (node->decl)),
1827 TREE_IMAGPART (val)))
1828 return node->decl;
1830 return NULL;
1833 /* Return constant static bounds var with specified bounds LB and UB.
1834 If such var does not exists then new var is created with specified NAME. */
1835 static tree
1836 chkp_make_static_const_bounds (HOST_WIDE_INT lb,
1837 HOST_WIDE_INT ub,
1838 const char *name)
1840 tree var;
1842 /* With LTO we may have constant bounds already in varpool.
1843 Try to find it. */
1844 var = chkp_find_const_bounds_var (lb, ub);
1846 if (var)
1847 return var;
1849 var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
1850 get_identifier (name), pointer_bounds_type_node);
1852 TREE_PUBLIC (var) = 1;
1853 TREE_USED (var) = 1;
1854 TREE_READONLY (var) = 1;
1855 TREE_STATIC (var) = 1;
1856 TREE_ADDRESSABLE (var) = 0;
1857 DECL_ARTIFICIAL (var) = 1;
1858 DECL_READ_P (var) = 1;
1859 /* We may use this symbol during ctors generation in chkp_finish_file
1860 when all symbols are emitted. Force output to avoid undefined
1861 symbols in ctors. */
1862 if (!in_lto_p)
1864 DECL_INITIAL (var) = targetm.chkp_make_bounds_constant (lb, ub);
1865 DECL_COMDAT (var) = 1;
1866 varpool_node::get_create (var)->set_comdat_group (DECL_ASSEMBLER_NAME (var));
1867 varpool_node::get_create (var)->force_output = 1;
1869 else
1870 DECL_EXTERNAL (var) = 1;
1871 varpool_node::finalize_decl (var);
1873 return var;
1876 /* Generate code to make bounds with specified lower bound LB and SIZE.
1877 if AFTER is 1 then code is inserted after position pointed by ITER
1878 otherwise code is inserted before position pointed by ITER.
1879 If ITER is NULL then code is added to entry block. */
1880 static tree
1881 chkp_make_bounds (tree lb, tree size, gimple_stmt_iterator *iter, bool after)
1883 gimple_seq seq;
1884 gimple_stmt_iterator gsi;
1885 gimple stmt;
1886 tree bounds;
1888 if (iter)
1889 gsi = *iter;
1890 else
1891 gsi = gsi_start_bb (chkp_get_entry_block ());
1893 seq = NULL;
1895 lb = chkp_force_gimple_call_op (lb, &seq);
1896 size = chkp_force_gimple_call_op (size, &seq);
1898 stmt = gimple_build_call (chkp_bndmk_fndecl, 2, lb, size);
1899 chkp_mark_stmt (stmt);
1901 bounds = chkp_get_tmp_reg (stmt);
1902 gimple_call_set_lhs (stmt, bounds);
1904 gimple_seq_add_stmt (&seq, stmt);
1906 if (iter && after)
1907 gsi_insert_seq_after (&gsi, seq, GSI_SAME_STMT);
1908 else
1909 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
1911 if (dump_file && (dump_flags & TDF_DETAILS))
1913 fprintf (dump_file, "Made bounds: ");
1914 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
1915 if (iter)
1917 fprintf (dump_file, " inserted before statement: ");
1918 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0, TDF_VOPS|TDF_MEMSYMS);
1920 else
1921 fprintf (dump_file, " at function entry\n");
1924 /* update_stmt (stmt); */
1926 return bounds;
1929 /* Return var holding zero bounds. */
1930 tree
1931 chkp_get_zero_bounds_var (void)
1933 if (!chkp_zero_bounds_var)
1935 tree id = get_identifier (CHKP_ZERO_BOUNDS_VAR_NAME);
1936 symtab_node *node = symtab_node::get_for_asmname (id);
1937 if (node)
1938 chkp_zero_bounds_var = node->decl;
1941 if (!chkp_zero_bounds_var)
1942 chkp_zero_bounds_var
1943 = chkp_make_static_const_bounds (0, -1,
1944 CHKP_ZERO_BOUNDS_VAR_NAME);
1945 return chkp_zero_bounds_var;
1948 /* Return var holding none bounds. */
1949 tree
1950 chkp_get_none_bounds_var (void)
1952 if (!chkp_none_bounds_var)
1954 tree id = get_identifier (CHKP_NONE_BOUNDS_VAR_NAME);
1955 symtab_node *node = symtab_node::get_for_asmname (id);
1956 if (node)
1957 chkp_none_bounds_var = node->decl;
1960 if (!chkp_none_bounds_var)
1961 chkp_none_bounds_var
1962 = chkp_make_static_const_bounds (-1, 0,
1963 CHKP_NONE_BOUNDS_VAR_NAME);
1964 return chkp_none_bounds_var;
1967 /* Return SSA_NAME used to represent zero bounds. */
1968 static tree
1969 chkp_get_zero_bounds (void)
1971 if (zero_bounds)
1972 return zero_bounds;
1974 if (dump_file && (dump_flags & TDF_DETAILS))
1975 fprintf (dump_file, "Creating zero bounds...");
1977 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
1978 || flag_chkp_use_static_const_bounds > 0)
1980 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
1981 gimple stmt;
1983 zero_bounds = chkp_get_tmp_reg (gimple_build_nop ());
1984 stmt = gimple_build_assign (zero_bounds, chkp_get_zero_bounds_var ());
1985 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
1987 else
1988 zero_bounds = chkp_make_bounds (integer_zero_node,
1989 integer_zero_node,
1990 NULL,
1991 false);
1993 return zero_bounds;
1996 /* Return SSA_NAME used to represent none bounds. */
1997 static tree
1998 chkp_get_none_bounds (void)
2000 if (none_bounds)
2001 return none_bounds;
2003 if (dump_file && (dump_flags & TDF_DETAILS))
2004 fprintf (dump_file, "Creating none bounds...");
2007 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2008 || flag_chkp_use_static_const_bounds > 0)
2010 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2011 gimple stmt;
2013 none_bounds = chkp_get_tmp_reg (gimple_build_nop ());
2014 stmt = gimple_build_assign (none_bounds, chkp_get_none_bounds_var ());
2015 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2017 else
2018 none_bounds = chkp_make_bounds (integer_minus_one_node,
2019 build_int_cst (size_type_node, 2),
2020 NULL,
2021 false);
2023 return none_bounds;
2026 /* Return bounds to be used as a result of operation which
2027 should not create poiunter (e.g. MULT_EXPR). */
2028 static tree
2029 chkp_get_invalid_op_bounds (void)
2031 return chkp_get_zero_bounds ();
2034 /* Return bounds to be used for loads of non-pointer values. */
2035 static tree
2036 chkp_get_nonpointer_load_bounds (void)
2038 return chkp_get_zero_bounds ();
2041 /* Build bounds returned by CALL. */
2042 static tree
2043 chkp_build_returned_bound (gimple call)
2045 gimple_stmt_iterator gsi;
2046 tree bounds;
2047 gimple stmt;
2048 tree fndecl = gimple_call_fndecl (call);
2050 /* To avoid fixing alloca expands in targets we handle
2051 it separately. */
2052 if (fndecl
2053 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2054 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
2055 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2057 tree size = gimple_call_arg (call, 0);
2058 tree lb = gimple_call_lhs (call);
2059 gimple_stmt_iterator iter = gsi_for_stmt (call);
2060 bounds = chkp_make_bounds (lb, size, &iter, true);
2062 /* We know bounds returned by set_bounds builtin call. */
2063 else if (fndecl
2064 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2065 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS)
2067 tree lb = gimple_call_arg (call, 0);
2068 tree size = gimple_call_arg (call, 1);
2069 gimple_stmt_iterator iter = gsi_for_stmt (call);
2070 bounds = chkp_make_bounds (lb, size, &iter, true);
2072 /* Detect bounds initialization calls. */
2073 else if (fndecl
2074 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2075 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS)
2076 bounds = chkp_get_zero_bounds ();
2077 /* Detect bounds nullification calls. */
2078 else if (fndecl
2079 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2080 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS)
2081 bounds = chkp_get_none_bounds ();
2082 /* Detect bounds copy calls. */
2083 else if (fndecl
2084 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2085 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
2087 gimple_stmt_iterator iter = gsi_for_stmt (call);
2088 bounds = chkp_find_bounds (gimple_call_arg (call, 1), &iter);
2090 /* Do not use retbnd when returned bounds are equal to some
2091 of passed bounds. */
2092 else if ((gimple_call_return_flags (call) & ERF_RETURNS_ARG)
2093 || gimple_call_builtin_p (call, BUILT_IN_STRCHR))
2095 gimple_stmt_iterator iter = gsi_for_stmt (call);
2096 unsigned int retarg = 0, argno;
2097 if (gimple_call_return_flags (call) & ERF_RETURNS_ARG)
2098 retarg = gimple_call_return_flags (call) & ERF_RETURN_ARG_MASK;
2099 if (gimple_call_with_bounds_p (call))
2101 for (argno = 0; argno < gimple_call_num_args (call); argno++)
2102 if (!POINTER_BOUNDS_P (gimple_call_arg (call, argno)))
2104 if (retarg)
2105 retarg--;
2106 else
2107 break;
2110 else
2111 argno = retarg;
2113 bounds = chkp_find_bounds (gimple_call_arg (call, argno), &iter);
2115 else
2117 gcc_assert (TREE_CODE (gimple_call_lhs (call)) == SSA_NAME);
2119 /* In general case build checker builtin call to
2120 obtain returned bounds. */
2121 stmt = gimple_build_call (chkp_ret_bnd_fndecl, 1,
2122 gimple_call_lhs (call));
2123 chkp_mark_stmt (stmt);
2125 gsi = gsi_for_stmt (call);
2126 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
2128 bounds = chkp_get_tmp_reg (stmt);
2129 gimple_call_set_lhs (stmt, bounds);
2131 update_stmt (stmt);
2134 if (dump_file && (dump_flags & TDF_DETAILS))
2136 fprintf (dump_file, "Built returned bounds (");
2137 print_generic_expr (dump_file, bounds, 0);
2138 fprintf (dump_file, ") for call: ");
2139 print_gimple_stmt (dump_file, call, 0, TDF_VOPS|TDF_MEMSYMS);
2142 bounds = chkp_maybe_copy_and_register_bounds (gimple_call_lhs (call), bounds);
2144 return bounds;
2147 /* Return bounds used as returned by call
2148 which produced SSA name VAL. */
2149 gimple
2150 chkp_retbnd_call_by_val (tree val)
2152 if (TREE_CODE (val) != SSA_NAME)
2153 return NULL;
2155 gcc_assert (gimple_code (SSA_NAME_DEF_STMT (val)) == GIMPLE_CALL);
2157 imm_use_iterator use_iter;
2158 use_operand_p use_p;
2159 FOR_EACH_IMM_USE_FAST (use_p, use_iter, val)
2160 if (gimple_code (USE_STMT (use_p)) == GIMPLE_CALL
2161 && gimple_call_fndecl (USE_STMT (use_p)) == chkp_ret_bnd_fndecl)
2162 return USE_STMT (use_p);
2164 return NULL;
2167 /* Check the next parameter for the given PARM is bounds
2168 and return it's default SSA_NAME (create if required). */
2169 static tree
2170 chkp_get_next_bounds_parm (tree parm)
2172 tree bounds = TREE_CHAIN (parm);
2173 gcc_assert (POINTER_BOUNDS_P (bounds));
2174 bounds = ssa_default_def (cfun, bounds);
2175 if (!bounds)
2177 bounds = make_ssa_name (TREE_CHAIN (parm), gimple_build_nop ());
2178 set_ssa_default_def (cfun, TREE_CHAIN (parm), bounds);
2180 return bounds;
2183 /* Return bounds to be used for input argument PARM. */
2184 static tree
2185 chkp_get_bound_for_parm (tree parm)
2187 tree decl = SSA_NAME_VAR (parm);
2188 tree bounds;
2190 gcc_assert (TREE_CODE (decl) == PARM_DECL);
2192 bounds = chkp_get_registered_bounds (parm);
2194 if (!bounds)
2195 bounds = chkp_get_registered_bounds (decl);
2197 if (!bounds)
2199 tree orig_decl = cgraph_node::get (cfun->decl)->orig_decl;
2201 /* For static chain param we return zero bounds
2202 because currently we do not check dereferences
2203 of this pointer. */
2204 if (cfun->static_chain_decl == decl)
2205 bounds = chkp_get_zero_bounds ();
2206 /* If non instrumented runtime is used then it may be useful
2207 to use zero bounds for input arguments of main
2208 function. */
2209 else if (flag_chkp_zero_input_bounds_for_main
2210 && strcmp (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (orig_decl)),
2211 "main") == 0)
2212 bounds = chkp_get_zero_bounds ();
2213 else if (BOUNDED_P (parm))
2215 bounds = chkp_get_next_bounds_parm (decl);
2216 bounds = chkp_maybe_copy_and_register_bounds (decl, bounds);
2218 if (dump_file && (dump_flags & TDF_DETAILS))
2220 fprintf (dump_file, "Built arg bounds (");
2221 print_generic_expr (dump_file, bounds, 0);
2222 fprintf (dump_file, ") for arg: ");
2223 print_node (dump_file, "", decl, 0);
2226 else
2227 bounds = chkp_get_zero_bounds ();
2230 if (!chkp_get_registered_bounds (parm))
2231 bounds = chkp_maybe_copy_and_register_bounds (parm, bounds);
2233 if (dump_file && (dump_flags & TDF_DETAILS))
2235 fprintf (dump_file, "Using bounds ");
2236 print_generic_expr (dump_file, bounds, 0);
2237 fprintf (dump_file, " for parm ");
2238 print_generic_expr (dump_file, parm, 0);
2239 fprintf (dump_file, " of type ");
2240 print_generic_expr (dump_file, TREE_TYPE (parm), 0);
2241 fprintf (dump_file, ".\n");
2244 return bounds;
2247 /* Build and return CALL_EXPR for bndstx builtin with specified
2248 arguments. */
2249 tree
2250 chkp_build_bndldx_call (tree addr, tree ptr)
2252 tree fn = build1 (ADDR_EXPR,
2253 build_pointer_type (TREE_TYPE (chkp_bndldx_fndecl)),
2254 chkp_bndldx_fndecl);
2255 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndldx_fndecl)),
2256 fn, 2, addr, ptr);
2257 CALL_WITH_BOUNDS_P (call) = true;
2258 return call;
2261 /* Insert code to load bounds for PTR located by ADDR.
2262 Code is inserted after position pointed by GSI.
2263 Loaded bounds are returned. */
2264 static tree
2265 chkp_build_bndldx (tree addr, tree ptr, gimple_stmt_iterator *gsi)
2267 gimple_seq seq;
2268 gimple stmt;
2269 tree bounds;
2271 seq = NULL;
2273 addr = chkp_force_gimple_call_op (addr, &seq);
2274 ptr = chkp_force_gimple_call_op (ptr, &seq);
2276 stmt = gimple_build_call (chkp_bndldx_fndecl, 2, addr, ptr);
2277 chkp_mark_stmt (stmt);
2278 bounds = chkp_get_tmp_reg (stmt);
2279 gimple_call_set_lhs (stmt, bounds);
2281 gimple_seq_add_stmt (&seq, stmt);
2283 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2285 if (dump_file && (dump_flags & TDF_DETAILS))
2287 fprintf (dump_file, "Generated bndldx for pointer ");
2288 print_generic_expr (dump_file, ptr, 0);
2289 fprintf (dump_file, ": ");
2290 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2293 return bounds;
2296 /* Build and return CALL_EXPR for bndstx builtin with specified
2297 arguments. */
2298 tree
2299 chkp_build_bndstx_call (tree addr, tree ptr, tree bounds)
2301 tree fn = build1 (ADDR_EXPR,
2302 build_pointer_type (TREE_TYPE (chkp_bndstx_fndecl)),
2303 chkp_bndstx_fndecl);
2304 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndstx_fndecl)),
2305 fn, 3, ptr, bounds, addr);
2306 CALL_WITH_BOUNDS_P (call) = true;
2307 return call;
2310 /* Insert code to store BOUNDS for PTR stored by ADDR.
2311 New statements are inserted after position pointed
2312 by GSI. */
2313 void
2314 chkp_build_bndstx (tree addr, tree ptr, tree bounds,
2315 gimple_stmt_iterator *gsi)
2317 gimple_seq seq;
2318 gimple stmt;
2320 seq = NULL;
2322 addr = chkp_force_gimple_call_op (addr, &seq);
2323 ptr = chkp_force_gimple_call_op (ptr, &seq);
2325 stmt = gimple_build_call (chkp_bndstx_fndecl, 3, ptr, bounds, addr);
2326 chkp_mark_stmt (stmt);
2327 gimple_call_set_with_bounds (stmt, true);
2329 gimple_seq_add_stmt (&seq, stmt);
2331 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2333 if (dump_file && (dump_flags & TDF_DETAILS))
2335 fprintf (dump_file, "Generated bndstx for pointer store ");
2336 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_VOPS|TDF_MEMSYMS);
2337 print_gimple_stmt (dump_file, stmt, 2, TDF_VOPS|TDF_MEMSYMS);
2341 /* Compute bounds for pointer NODE which was assigned in
2342 assignment statement ASSIGN. Return computed bounds. */
2343 static tree
2344 chkp_compute_bounds_for_assignment (tree node, gimple assign)
2346 enum tree_code rhs_code = gimple_assign_rhs_code (assign);
2347 tree rhs1 = gimple_assign_rhs1 (assign);
2348 tree bounds = NULL_TREE;
2349 gimple_stmt_iterator iter = gsi_for_stmt (assign);
2351 if (dump_file && (dump_flags & TDF_DETAILS))
2353 fprintf (dump_file, "Computing bounds for assignment: ");
2354 print_gimple_stmt (dump_file, assign, 0, TDF_VOPS|TDF_MEMSYMS);
2357 switch (rhs_code)
2359 case MEM_REF:
2360 case TARGET_MEM_REF:
2361 case COMPONENT_REF:
2362 case ARRAY_REF:
2363 /* We need to load bounds from the bounds table. */
2364 bounds = chkp_find_bounds_loaded (node, rhs1, &iter);
2365 break;
2367 case VAR_DECL:
2368 case SSA_NAME:
2369 case ADDR_EXPR:
2370 case POINTER_PLUS_EXPR:
2371 case NOP_EXPR:
2372 case CONVERT_EXPR:
2373 case INTEGER_CST:
2374 /* Bounds are just propagated from RHS. */
2375 bounds = chkp_find_bounds (rhs1, &iter);
2376 break;
2378 case VIEW_CONVERT_EXPR:
2379 /* Bounds are just propagated from RHS. */
2380 bounds = chkp_find_bounds (TREE_OPERAND (rhs1, 0), &iter);
2381 break;
2383 case PARM_DECL:
2384 if (BOUNDED_P (rhs1))
2386 /* We need to load bounds from the bounds table. */
2387 bounds = chkp_build_bndldx (chkp_build_addr_expr (rhs1),
2388 node, &iter);
2389 TREE_ADDRESSABLE (rhs1) = 1;
2391 else
2392 bounds = chkp_get_nonpointer_load_bounds ();
2393 break;
2395 case MINUS_EXPR:
2396 case PLUS_EXPR:
2397 case BIT_AND_EXPR:
2398 case BIT_IOR_EXPR:
2399 case BIT_XOR_EXPR:
2401 tree rhs2 = gimple_assign_rhs2 (assign);
2402 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2403 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2405 /* First we try to check types of operands. If it
2406 does not help then look at bound values.
2408 If some bounds are incomplete and other are
2409 not proven to be valid (i.e. also incomplete
2410 or invalid because value is not pointer) then
2411 resulting value is incomplete and will be
2412 recomputed later in chkp_finish_incomplete_bounds. */
2413 if (BOUNDED_P (rhs1)
2414 && !BOUNDED_P (rhs2))
2415 bounds = bnd1;
2416 else if (BOUNDED_P (rhs2)
2417 && !BOUNDED_P (rhs1)
2418 && rhs_code != MINUS_EXPR)
2419 bounds = bnd2;
2420 else if (chkp_incomplete_bounds (bnd1))
2421 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR
2422 && !chkp_incomplete_bounds (bnd2))
2423 bounds = bnd2;
2424 else
2425 bounds = incomplete_bounds;
2426 else if (chkp_incomplete_bounds (bnd2))
2427 if (chkp_valid_bounds (bnd1)
2428 && !chkp_incomplete_bounds (bnd1))
2429 bounds = bnd1;
2430 else
2431 bounds = incomplete_bounds;
2432 else if (!chkp_valid_bounds (bnd1))
2433 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR)
2434 bounds = bnd2;
2435 else if (bnd2 == chkp_get_zero_bounds ())
2436 bounds = bnd2;
2437 else
2438 bounds = bnd1;
2439 else if (!chkp_valid_bounds (bnd2))
2440 bounds = bnd1;
2441 else
2442 /* Seems both operands may have valid bounds
2443 (e.g. pointer minus pointer). In such case
2444 use default invalid op bounds. */
2445 bounds = chkp_get_invalid_op_bounds ();
2447 break;
2449 case BIT_NOT_EXPR:
2450 case NEGATE_EXPR:
2451 case LSHIFT_EXPR:
2452 case RSHIFT_EXPR:
2453 case LROTATE_EXPR:
2454 case RROTATE_EXPR:
2455 case EQ_EXPR:
2456 case NE_EXPR:
2457 case LT_EXPR:
2458 case LE_EXPR:
2459 case GT_EXPR:
2460 case GE_EXPR:
2461 case MULT_EXPR:
2462 case RDIV_EXPR:
2463 case TRUNC_DIV_EXPR:
2464 case FLOOR_DIV_EXPR:
2465 case CEIL_DIV_EXPR:
2466 case ROUND_DIV_EXPR:
2467 case TRUNC_MOD_EXPR:
2468 case FLOOR_MOD_EXPR:
2469 case CEIL_MOD_EXPR:
2470 case ROUND_MOD_EXPR:
2471 case EXACT_DIV_EXPR:
2472 case FIX_TRUNC_EXPR:
2473 case FLOAT_EXPR:
2474 case REALPART_EXPR:
2475 case IMAGPART_EXPR:
2476 /* No valid bounds may be produced by these exprs. */
2477 bounds = chkp_get_invalid_op_bounds ();
2478 break;
2480 case COND_EXPR:
2482 tree val1 = gimple_assign_rhs2 (assign);
2483 tree val2 = gimple_assign_rhs3 (assign);
2484 tree bnd1 = chkp_find_bounds (val1, &iter);
2485 tree bnd2 = chkp_find_bounds (val2, &iter);
2486 gimple stmt;
2488 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2489 bounds = incomplete_bounds;
2490 else if (bnd1 == bnd2)
2491 bounds = bnd1;
2492 else
2494 rhs1 = unshare_expr (rhs1);
2496 bounds = chkp_get_tmp_reg (assign);
2497 stmt = gimple_build_assign_with_ops (COND_EXPR, bounds,
2498 rhs1, bnd1, bnd2);
2499 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2501 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2502 chkp_mark_invalid_bounds (bounds);
2505 break;
2507 case MAX_EXPR:
2508 case MIN_EXPR:
2510 tree rhs2 = gimple_assign_rhs2 (assign);
2511 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2512 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2514 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2515 bounds = incomplete_bounds;
2516 else if (bnd1 == bnd2)
2517 bounds = bnd1;
2518 else
2520 gimple stmt;
2521 tree cond = build2 (rhs_code == MAX_EXPR ? GT_EXPR : LT_EXPR,
2522 boolean_type_node, rhs1, rhs2);
2523 bounds = chkp_get_tmp_reg (assign);
2524 stmt = gimple_build_assign_with_ops (COND_EXPR, bounds,
2525 cond, bnd1, bnd2);
2527 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2529 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2530 chkp_mark_invalid_bounds (bounds);
2533 break;
2535 default:
2536 bounds = chkp_get_zero_bounds ();
2537 warning (0, "pointer bounds were lost due to unexpected expression %s",
2538 get_tree_code_name (rhs_code));
2541 gcc_assert (bounds);
2543 if (node)
2544 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2546 return bounds;
2549 /* Compute bounds for ssa name NODE defined by DEF_STMT pointed by ITER.
2551 There are just few statement codes allowed: NOP (for default ssa names),
2552 ASSIGN, CALL, PHI, ASM.
2554 Return computed bounds. */
2555 static tree
2556 chkp_get_bounds_by_definition (tree node, gimple def_stmt,
2557 gimple_stmt_iterator *iter)
2559 tree var, bounds;
2560 enum gimple_code code = gimple_code (def_stmt);
2561 gimple stmt;
2563 if (dump_file && (dump_flags & TDF_DETAILS))
2565 fprintf (dump_file, "Searching for bounds for node: ");
2566 print_generic_expr (dump_file, node, 0);
2568 fprintf (dump_file, " using its definition: ");
2569 print_gimple_stmt (dump_file, def_stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2572 switch (code)
2574 case GIMPLE_NOP:
2575 var = SSA_NAME_VAR (node);
2576 switch (TREE_CODE (var))
2578 case PARM_DECL:
2579 bounds = chkp_get_bound_for_parm (node);
2580 break;
2582 case VAR_DECL:
2583 /* For uninitialized pointers use none bounds. */
2584 bounds = chkp_get_none_bounds ();
2585 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2586 break;
2588 case RESULT_DECL:
2590 tree base_type;
2592 gcc_assert (TREE_CODE (TREE_TYPE (node)) == REFERENCE_TYPE);
2594 base_type = TREE_TYPE (TREE_TYPE (node));
2596 gcc_assert (TYPE_SIZE (base_type)
2597 && TREE_CODE (TYPE_SIZE (base_type)) == INTEGER_CST
2598 && tree_to_uhwi (TYPE_SIZE (base_type)) != 0);
2600 bounds = chkp_make_bounds (node, TYPE_SIZE_UNIT (base_type),
2601 NULL, false);
2602 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2604 break;
2606 default:
2607 if (dump_file && (dump_flags & TDF_DETAILS))
2609 fprintf (dump_file, "Unexpected var with no definition\n");
2610 print_generic_expr (dump_file, var, 0);
2612 internal_error ("chkp_get_bounds_by_definition: Unexpected var of type %s",
2613 get_tree_code_name (TREE_CODE (var)));
2615 break;
2617 case GIMPLE_ASSIGN:
2618 bounds = chkp_compute_bounds_for_assignment (node, def_stmt);
2619 break;
2621 case GIMPLE_CALL:
2622 bounds = chkp_build_returned_bound (def_stmt);
2623 break;
2625 case GIMPLE_PHI:
2626 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (node))
2627 if (SSA_NAME_VAR (node))
2628 var = chkp_get_bounds_var (SSA_NAME_VAR (node));
2629 else
2630 var = make_temp_ssa_name (pointer_bounds_type_node,
2631 gimple_build_nop (),
2632 CHKP_BOUND_TMP_NAME);
2633 else
2634 var = chkp_get_tmp_var ();
2635 stmt = create_phi_node (var, gimple_bb (def_stmt));
2636 bounds = gimple_phi_result (stmt);
2637 *iter = gsi_for_stmt (stmt);
2639 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2641 /* Created bounds do not have all phi args computed and
2642 therefore we do not know if there is a valid source
2643 of bounds for that node. Therefore we mark bounds
2644 as incomplete and then recompute them when all phi
2645 args are computed. */
2646 chkp_register_incomplete_bounds (bounds, node);
2647 break;
2649 case GIMPLE_ASM:
2650 bounds = chkp_get_zero_bounds ();
2651 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2652 break;
2654 default:
2655 internal_error ("chkp_get_bounds_by_definition: Unexpected GIMPLE code %s",
2656 gimple_code_name[code]);
2659 return bounds;
2662 /* Return CALL_EXPR for bndmk with specified LOWER_BOUND and SIZE. */
2663 tree
2664 chkp_build_make_bounds_call (tree lower_bound, tree size)
2666 tree call = build1 (ADDR_EXPR,
2667 build_pointer_type (TREE_TYPE (chkp_bndmk_fndecl)),
2668 chkp_bndmk_fndecl);
2669 return build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndmk_fndecl)),
2670 call, 2, lower_bound, size);
2673 /* Create static bounds var of specfified OBJ which is
2674 is either VAR_DECL or string constant. */
2675 static tree
2676 chkp_make_static_bounds (tree obj)
2678 static int string_id = 1;
2679 static int var_id = 1;
2680 tree *slot;
2681 const char *var_name;
2682 char *bnd_var_name;
2683 tree bnd_var;
2685 /* First check if we already have required var. */
2686 if (chkp_static_var_bounds)
2688 slot = chkp_static_var_bounds->get (obj);
2689 if (slot)
2690 return *slot;
2693 /* Build decl for bounds var. */
2694 if (TREE_CODE (obj) == VAR_DECL)
2696 if (DECL_IGNORED_P (obj))
2698 bnd_var_name = (char *) xmalloc (strlen (CHKP_VAR_BOUNDS_PREFIX) + 10);
2699 sprintf (bnd_var_name, "%s%d", CHKP_VAR_BOUNDS_PREFIX, var_id++);
2701 else
2703 var_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (obj));
2705 /* For hidden symbols we want to skip first '*' char. */
2706 if (*var_name == '*')
2707 var_name++;
2709 bnd_var_name = (char *) xmalloc (strlen (var_name)
2710 + strlen (CHKP_BOUNDS_OF_SYMBOL_PREFIX) + 1);
2711 strcpy (bnd_var_name, CHKP_BOUNDS_OF_SYMBOL_PREFIX);
2712 strcat (bnd_var_name, var_name);
2715 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2716 get_identifier (bnd_var_name),
2717 pointer_bounds_type_node);
2719 /* Address of the obj will be used as lower bound. */
2720 TREE_ADDRESSABLE (obj) = 1;
2722 else
2724 bnd_var_name = (char *) xmalloc (strlen (CHKP_STRING_BOUNDS_PREFIX) + 10);
2725 sprintf (bnd_var_name, "%s%d", CHKP_STRING_BOUNDS_PREFIX, string_id++);
2727 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2728 get_identifier (bnd_var_name),
2729 pointer_bounds_type_node);
2732 TREE_PUBLIC (bnd_var) = 0;
2733 TREE_USED (bnd_var) = 1;
2734 TREE_READONLY (bnd_var) = 0;
2735 TREE_STATIC (bnd_var) = 1;
2736 TREE_ADDRESSABLE (bnd_var) = 0;
2737 DECL_ARTIFICIAL (bnd_var) = 1;
2738 DECL_COMMON (bnd_var) = 1;
2739 DECL_COMDAT (bnd_var) = 1;
2740 DECL_READ_P (bnd_var) = 1;
2741 DECL_INITIAL (bnd_var) = chkp_build_addr_expr (obj);
2742 /* Force output similar to constant bounds.
2743 See chkp_make_static_const_bounds. */
2744 varpool_node::get_create (bnd_var)->force_output = 1;
2745 /* Mark symbol as requiring bounds initialization. */
2746 varpool_node::get_create (bnd_var)->need_bounds_init = 1;
2747 varpool_node::finalize_decl (bnd_var);
2749 /* Add created var to the map to use it for other references
2750 to obj. */
2751 if (!chkp_static_var_bounds)
2752 chkp_static_var_bounds = new hash_map<tree, tree>;
2754 chkp_static_var_bounds->put (obj, bnd_var);
2756 return bnd_var;
2759 /* When var has incomplete type we cannot get size to
2760 compute its bounds. In such cases we use checker
2761 builtin call which determines object size at runtime. */
2762 static tree
2763 chkp_generate_extern_var_bounds (tree var)
2765 tree bounds, size_reloc, lb, size, max_size, cond;
2766 gimple_stmt_iterator gsi;
2767 gimple_seq seq = NULL;
2768 gimple stmt;
2770 /* If instrumentation is not enabled for vars having
2771 incomplete type then just return zero bounds to avoid
2772 checks for this var. */
2773 if (!flag_chkp_incomplete_type)
2774 return chkp_get_zero_bounds ();
2776 if (dump_file && (dump_flags & TDF_DETAILS))
2778 fprintf (dump_file, "Generating bounds for extern symbol '");
2779 print_generic_expr (dump_file, var, 0);
2780 fprintf (dump_file, "'\n");
2783 stmt = gimple_build_call (chkp_sizeof_fndecl, 1, var);
2785 size_reloc = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
2786 gimple_call_set_lhs (stmt, size_reloc);
2788 gimple_seq_add_stmt (&seq, stmt);
2790 lb = chkp_build_addr_expr (var);
2791 size = make_ssa_name (chkp_get_size_tmp_var (), gimple_build_nop ());
2793 if (flag_chkp_zero_dynamic_size_as_infinite)
2795 /* We should check that size relocation was resolved.
2796 If it was not then use maximum possible size for the var. */
2797 max_size = build2 (MINUS_EXPR, chkp_uintptr_type, integer_zero_node,
2798 fold_convert (chkp_uintptr_type, lb));
2799 max_size = chkp_force_gimple_call_op (max_size, &seq);
2801 cond = build2 (NE_EXPR, boolean_type_node, size_reloc, integer_zero_node);
2802 stmt = gimple_build_assign_with_ops (COND_EXPR, size,
2803 cond, size_reloc, max_size);
2804 gimple_seq_add_stmt (&seq, stmt);
2806 else
2808 stmt = gimple_build_assign (size, size_reloc);
2809 gimple_seq_add_stmt (&seq, stmt);
2812 gsi = gsi_start_bb (chkp_get_entry_block ());
2813 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
2815 bounds = chkp_make_bounds (lb, size, &gsi, true);
2817 return bounds;
2820 /* Return 1 if TYPE has fields with zero size or fields
2821 marked with chkp_variable_size attribute. */
2822 bool
2823 chkp_variable_size_type (tree type)
2825 bool res = false;
2826 tree field;
2828 if (RECORD_OR_UNION_TYPE_P (type))
2829 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2831 if (TREE_CODE (field) == FIELD_DECL)
2832 res = res
2833 || lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
2834 || chkp_variable_size_type (TREE_TYPE (field));
2836 else
2837 res = !TYPE_SIZE (type)
2838 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
2839 || tree_to_uhwi (TYPE_SIZE (type)) == 0;
2841 return res;
2844 /* Compute and return bounds for address of DECL which is
2845 one of VAR_DECL, PARM_DECL, RESULT_DECL. */
2846 static tree
2847 chkp_get_bounds_for_decl_addr (tree decl)
2849 tree bounds;
2851 gcc_assert (TREE_CODE (decl) == VAR_DECL
2852 || TREE_CODE (decl) == PARM_DECL
2853 || TREE_CODE (decl) == RESULT_DECL);
2855 bounds = chkp_get_registered_addr_bounds (decl);
2857 if (bounds)
2858 return bounds;
2860 if (dump_file && (dump_flags & TDF_DETAILS))
2862 fprintf (dump_file, "Building bounds for address of decl ");
2863 print_generic_expr (dump_file, decl, 0);
2864 fprintf (dump_file, "\n");
2867 /* Use zero bounds if size is unknown and checks for
2868 unknown sizes are restricted. */
2869 if ((!DECL_SIZE (decl)
2870 || (chkp_variable_size_type (TREE_TYPE (decl))
2871 && (TREE_STATIC (decl)
2872 || DECL_EXTERNAL (decl)
2873 || TREE_PUBLIC (decl))))
2874 && !flag_chkp_incomplete_type)
2875 return chkp_get_zero_bounds ();
2877 if (flag_chkp_use_static_bounds
2878 && TREE_CODE (decl) == VAR_DECL
2879 && (TREE_STATIC (decl)
2880 || DECL_EXTERNAL (decl)
2881 || TREE_PUBLIC (decl))
2882 && !DECL_THREAD_LOCAL_P (decl))
2884 tree bnd_var = chkp_make_static_bounds (decl);
2885 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2886 gimple stmt;
2888 bounds = chkp_get_tmp_reg (gimple_build_nop ());
2889 stmt = gimple_build_assign (bounds, bnd_var);
2890 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2892 else if (!DECL_SIZE (decl)
2893 || (chkp_variable_size_type (TREE_TYPE (decl))
2894 && (TREE_STATIC (decl)
2895 || DECL_EXTERNAL (decl)
2896 || TREE_PUBLIC (decl))))
2898 gcc_assert (TREE_CODE (decl) == VAR_DECL);
2899 bounds = chkp_generate_extern_var_bounds (decl);
2901 else
2903 tree lb = chkp_build_addr_expr (decl);
2904 bounds = chkp_make_bounds (lb, DECL_SIZE_UNIT (decl), NULL, false);
2907 return bounds;
2910 /* Compute and return bounds for constant string. */
2911 static tree
2912 chkp_get_bounds_for_string_cst (tree cst)
2914 tree bounds;
2915 tree lb;
2916 tree size;
2918 gcc_assert (TREE_CODE (cst) == STRING_CST);
2920 bounds = chkp_get_registered_bounds (cst);
2922 if (bounds)
2923 return bounds;
2925 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2926 || flag_chkp_use_static_const_bounds > 0)
2928 tree bnd_var = chkp_make_static_bounds (cst);
2929 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2930 gimple stmt;
2932 bounds = chkp_get_tmp_reg (gimple_build_nop ());
2933 stmt = gimple_build_assign (bounds, bnd_var);
2934 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2936 else
2938 lb = chkp_build_addr_expr (cst);
2939 size = build_int_cst (chkp_uintptr_type, TREE_STRING_LENGTH (cst));
2940 bounds = chkp_make_bounds (lb, size, NULL, false);
2943 bounds = chkp_maybe_copy_and_register_bounds (cst, bounds);
2945 return bounds;
2948 /* Generate code to instersect bounds BOUNDS1 and BOUNDS2 and
2949 return the result. if ITER is not NULL then Code is inserted
2950 before position pointed by ITER. Otherwise code is added to
2951 entry block. */
2952 static tree
2953 chkp_intersect_bounds (tree bounds1, tree bounds2, gimple_stmt_iterator *iter)
2955 if (!bounds1 || bounds1 == chkp_get_zero_bounds ())
2956 return bounds2 ? bounds2 : bounds1;
2957 else if (!bounds2 || bounds2 == chkp_get_zero_bounds ())
2958 return bounds1;
2959 else
2961 gimple_seq seq;
2962 gimple stmt;
2963 tree bounds;
2965 seq = NULL;
2967 stmt = gimple_build_call (chkp_intersect_fndecl, 2, bounds1, bounds2);
2968 chkp_mark_stmt (stmt);
2970 bounds = chkp_get_tmp_reg (stmt);
2971 gimple_call_set_lhs (stmt, bounds);
2973 gimple_seq_add_stmt (&seq, stmt);
2975 /* We are probably doing narrowing for constant expression.
2976 In such case iter may be undefined. */
2977 if (!iter)
2979 gimple_stmt_iterator gsi = gsi_last_bb (chkp_get_entry_block ());
2980 iter = &gsi;
2981 gsi_insert_seq_after (iter, seq, GSI_SAME_STMT);
2983 else
2984 gsi_insert_seq_before (iter, seq, GSI_SAME_STMT);
2986 if (dump_file && (dump_flags & TDF_DETAILS))
2988 fprintf (dump_file, "Bounds intersection: ");
2989 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2990 fprintf (dump_file, " inserted before statement: ");
2991 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0,
2992 TDF_VOPS|TDF_MEMSYMS);
2995 return bounds;
2999 /* Return 1 if we are allowed to narrow bounds for addressed FIELD
3000 and 0 othersize. */
3001 static bool
3002 chkp_may_narrow_to_field (tree field)
3004 return DECL_SIZE (field) && TREE_CODE (DECL_SIZE (field)) == INTEGER_CST
3005 && tree_to_uhwi (DECL_SIZE (field)) != 0
3006 && (!DECL_FIELD_OFFSET (field)
3007 || TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST)
3008 && (!DECL_FIELD_BIT_OFFSET (field)
3009 || TREE_CODE (DECL_FIELD_BIT_OFFSET (field)) == INTEGER_CST)
3010 && !lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
3011 && !chkp_variable_size_type (TREE_TYPE (field));
3014 /* Return 1 if bounds for FIELD should be narrowed to
3015 field's own size. */
3016 static bool
3017 chkp_narrow_bounds_for_field (tree field)
3019 HOST_WIDE_INT offs;
3020 HOST_WIDE_INT bit_offs;
3022 if (!chkp_may_narrow_to_field (field))
3023 return false;
3025 /* Accesse to compiler generated fields should not cause
3026 bounds narrowing. */
3027 if (DECL_ARTIFICIAL (field))
3028 return false;
3030 offs = tree_to_uhwi (DECL_FIELD_OFFSET (field));
3031 bit_offs = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
3033 return (flag_chkp_narrow_bounds
3034 && (flag_chkp_first_field_has_own_bounds
3035 || offs
3036 || bit_offs));
3039 /* Perform narrowing for BOUNDS using bounds computed for field
3040 access COMPONENT. ITER meaning is the same as for
3041 chkp_intersect_bounds. */
3042 static tree
3043 chkp_narrow_bounds_to_field (tree bounds, tree component,
3044 gimple_stmt_iterator *iter)
3046 tree field = TREE_OPERAND (component, 1);
3047 tree size = DECL_SIZE_UNIT (field);
3048 tree field_ptr = chkp_build_addr_expr (component);
3049 tree field_bounds;
3051 field_bounds = chkp_make_bounds (field_ptr, size, iter, false);
3053 return chkp_intersect_bounds (field_bounds, bounds, iter);
3056 /* Parse field or array access NODE.
3058 PTR ouput parameter holds a pointer to the outermost
3059 object.
3061 BITFIELD output parameter is set to 1 if bitfield is
3062 accessed and to 0 otherwise. If it is 1 then ELT holds
3063 outer component for accessed bit field.
3065 SAFE outer parameter is set to 1 if access is safe and
3066 checks are not required.
3068 BOUNDS outer parameter holds bounds to be used to check
3069 access (may be NULL).
3071 If INNERMOST_BOUNDS is 1 then try to narrow bounds to the
3072 innermost accessed component. */
3073 static void
3074 chkp_parse_array_and_component_ref (tree node, tree *ptr,
3075 tree *elt, bool *safe,
3076 bool *bitfield,
3077 tree *bounds,
3078 gimple_stmt_iterator *iter,
3079 bool innermost_bounds)
3081 tree comp_to_narrow = NULL_TREE;
3082 tree last_comp = NULL_TREE;
3083 bool array_ref_found = false;
3084 tree *nodes;
3085 tree var;
3086 int len;
3087 int i;
3089 /* Compute tree height for expression. */
3090 var = node;
3091 len = 1;
3092 while (TREE_CODE (var) == COMPONENT_REF
3093 || TREE_CODE (var) == ARRAY_REF
3094 || TREE_CODE (var) == VIEW_CONVERT_EXPR)
3096 var = TREE_OPERAND (var, 0);
3097 len++;
3100 gcc_assert (len > 1);
3102 /* It is more convenient for us to scan left-to-right,
3103 so walk tree again and put all node to nodes vector
3104 in reversed order. */
3105 nodes = XALLOCAVEC (tree, len);
3106 nodes[len - 1] = node;
3107 for (i = len - 2; i >= 0; i--)
3108 nodes[i] = TREE_OPERAND (nodes[i + 1], 0);
3110 if (bounds)
3111 *bounds = NULL;
3112 *safe = true;
3113 *bitfield = (TREE_CODE (node) == COMPONENT_REF
3114 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (node, 1)));
3115 /* To get bitfield address we will need outer elemnt. */
3116 if (*bitfield)
3117 *elt = nodes[len - 2];
3118 else
3119 *elt = NULL_TREE;
3121 /* If we have indirection in expression then compute
3122 outermost structure bounds. Computed bounds may be
3123 narrowed later. */
3124 if (TREE_CODE (nodes[0]) == MEM_REF || INDIRECT_REF_P (nodes[0]))
3126 *safe = false;
3127 *ptr = TREE_OPERAND (nodes[0], 0);
3128 if (bounds)
3129 *bounds = chkp_find_bounds (*ptr, iter);
3131 else
3133 gcc_assert (TREE_CODE (var) == VAR_DECL
3134 || TREE_CODE (var) == PARM_DECL
3135 || TREE_CODE (var) == RESULT_DECL
3136 || TREE_CODE (var) == STRING_CST
3137 || TREE_CODE (var) == SSA_NAME);
3139 *ptr = chkp_build_addr_expr (var);
3142 /* In this loop we are trying to find a field access
3143 requiring narrowing. There are two simple rules
3144 for search:
3145 1. Leftmost array_ref is chosen if any.
3146 2. Rightmost suitable component_ref is chosen if innermost
3147 bounds are required and no array_ref exists. */
3148 for (i = 1; i < len; i++)
3150 var = nodes[i];
3152 if (TREE_CODE (var) == ARRAY_REF)
3154 *safe = false;
3155 array_ref_found = true;
3156 if (flag_chkp_narrow_bounds
3157 && !flag_chkp_narrow_to_innermost_arrray
3158 && (!last_comp
3159 || chkp_may_narrow_to_field (TREE_OPERAND (last_comp, 1))))
3161 comp_to_narrow = last_comp;
3162 break;
3165 else if (TREE_CODE (var) == COMPONENT_REF)
3167 tree field = TREE_OPERAND (var, 1);
3169 if (innermost_bounds
3170 && !array_ref_found
3171 && chkp_narrow_bounds_for_field (field))
3172 comp_to_narrow = var;
3173 last_comp = var;
3175 if (flag_chkp_narrow_bounds
3176 && flag_chkp_narrow_to_innermost_arrray
3177 && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
3179 if (bounds)
3180 *bounds = chkp_narrow_bounds_to_field (*bounds, var, iter);
3181 comp_to_narrow = NULL;
3184 else if (TREE_CODE (var) == VIEW_CONVERT_EXPR)
3185 /* Nothing to do for it. */
3187 else
3188 gcc_unreachable ();
3191 if (comp_to_narrow && DECL_SIZE (TREE_OPERAND (comp_to_narrow, 1)) && bounds)
3192 *bounds = chkp_narrow_bounds_to_field (*bounds, comp_to_narrow, iter);
3194 if (innermost_bounds && bounds && !*bounds)
3195 *bounds = chkp_find_bounds (*ptr, iter);
3198 /* Compute and return bounds for address of OBJ. */
3199 static tree
3200 chkp_make_addressed_object_bounds (tree obj, gimple_stmt_iterator *iter)
3202 tree bounds = chkp_get_registered_addr_bounds (obj);
3204 if (bounds)
3205 return bounds;
3207 switch (TREE_CODE (obj))
3209 case VAR_DECL:
3210 case PARM_DECL:
3211 case RESULT_DECL:
3212 bounds = chkp_get_bounds_for_decl_addr (obj);
3213 break;
3215 case STRING_CST:
3216 bounds = chkp_get_bounds_for_string_cst (obj);
3217 break;
3219 case ARRAY_REF:
3220 case COMPONENT_REF:
3222 tree elt;
3223 tree ptr;
3224 bool safe;
3225 bool bitfield;
3227 chkp_parse_array_and_component_ref (obj, &ptr, &elt, &safe,
3228 &bitfield, &bounds, iter, true);
3230 gcc_assert (bounds);
3232 break;
3234 case FUNCTION_DECL:
3235 case LABEL_DECL:
3236 bounds = chkp_get_zero_bounds ();
3237 break;
3239 case MEM_REF:
3240 bounds = chkp_find_bounds (TREE_OPERAND (obj, 0), iter);
3241 break;
3243 case REALPART_EXPR:
3244 case IMAGPART_EXPR:
3245 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (obj, 0), iter);
3246 break;
3248 default:
3249 if (dump_file && (dump_flags & TDF_DETAILS))
3251 fprintf (dump_file, "chkp_make_addressed_object_bounds: "
3252 "unexpected object of type %s\n",
3253 get_tree_code_name (TREE_CODE (obj)));
3254 print_node (dump_file, "", obj, 0);
3256 internal_error ("chkp_make_addressed_object_bounds: "
3257 "Unexpected tree code %s",
3258 get_tree_code_name (TREE_CODE (obj)));
3261 chkp_register_addr_bounds (obj, bounds);
3263 return bounds;
3266 /* Compute bounds for pointer PTR loaded from PTR_SRC. Generate statements
3267 to compute bounds if required. Computed bounds should be available at
3268 position pointed by ITER.
3270 If PTR_SRC is NULL_TREE then pointer definition is identified.
3272 If PTR_SRC is not NULL_TREE then ITER points to statements which loads
3273 PTR. If PTR is a any memory reference then ITER points to a statement
3274 after which bndldx will be inserterd. In both cases ITER will be updated
3275 to point to the inserted bndldx statement. */
3277 static tree
3278 chkp_find_bounds_1 (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3280 tree addr = NULL_TREE;
3281 tree bounds = NULL_TREE;
3283 if (!ptr_src)
3284 ptr_src = ptr;
3286 bounds = chkp_get_registered_bounds (ptr_src);
3288 if (bounds)
3289 return bounds;
3291 switch (TREE_CODE (ptr_src))
3293 case MEM_REF:
3294 case VAR_DECL:
3295 if (BOUNDED_P (ptr_src))
3296 if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
3297 bounds = chkp_get_zero_bounds ();
3298 else
3300 addr = chkp_build_addr_expr (ptr_src);
3301 bounds = chkp_build_bndldx (addr, ptr, iter);
3303 else
3304 bounds = chkp_get_nonpointer_load_bounds ();
3305 break;
3307 case ARRAY_REF:
3308 case COMPONENT_REF:
3309 addr = get_base_address (ptr_src);
3310 if (DECL_P (addr)
3311 || TREE_CODE (addr) == MEM_REF
3312 || TREE_CODE (addr) == TARGET_MEM_REF)
3314 if (BOUNDED_P (ptr_src))
3315 if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
3316 bounds = chkp_get_zero_bounds ();
3317 else
3319 addr = chkp_build_addr_expr (ptr_src);
3320 bounds = chkp_build_bndldx (addr, ptr, iter);
3322 else
3323 bounds = chkp_get_nonpointer_load_bounds ();
3325 else
3327 gcc_assert (TREE_CODE (addr) == SSA_NAME);
3328 bounds = chkp_find_bounds (addr, iter);
3330 break;
3332 case PARM_DECL:
3333 gcc_unreachable ();
3334 bounds = chkp_get_bound_for_parm (ptr_src);
3335 break;
3337 case TARGET_MEM_REF:
3338 addr = chkp_build_addr_expr (ptr_src);
3339 bounds = chkp_build_bndldx (addr, ptr, iter);
3340 break;
3342 case SSA_NAME:
3343 bounds = chkp_get_registered_bounds (ptr_src);
3344 if (!bounds)
3346 gimple def_stmt = SSA_NAME_DEF_STMT (ptr_src);
3347 gimple_stmt_iterator phi_iter;
3349 bounds = chkp_get_bounds_by_definition (ptr_src, def_stmt, &phi_iter);
3351 gcc_assert (bounds);
3353 if (gimple_code (def_stmt) == GIMPLE_PHI)
3355 unsigned i;
3357 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
3359 tree arg = gimple_phi_arg_def (def_stmt, i);
3360 tree arg_bnd;
3361 gimple phi_bnd;
3363 arg_bnd = chkp_find_bounds (arg, NULL);
3365 /* chkp_get_bounds_by_definition created new phi
3366 statement and phi_iter points to it.
3368 Previous call to chkp_find_bounds could create
3369 new basic block and therefore change phi statement
3370 phi_iter points to. */
3371 phi_bnd = gsi_stmt (phi_iter);
3373 add_phi_arg (phi_bnd, arg_bnd,
3374 gimple_phi_arg_edge (def_stmt, i),
3375 UNKNOWN_LOCATION);
3378 /* If all bound phi nodes have their arg computed
3379 then we may finish its computation. See
3380 chkp_finish_incomplete_bounds for more details. */
3381 if (chkp_may_finish_incomplete_bounds ())
3382 chkp_finish_incomplete_bounds ();
3385 gcc_assert (bounds == chkp_get_registered_bounds (ptr_src)
3386 || chkp_incomplete_bounds (bounds));
3388 break;
3390 case ADDR_EXPR:
3391 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (ptr_src, 0), iter);
3392 break;
3394 case INTEGER_CST:
3395 if (integer_zerop (ptr_src))
3396 bounds = chkp_get_none_bounds ();
3397 else
3398 bounds = chkp_get_invalid_op_bounds ();
3399 break;
3401 default:
3402 if (dump_file && (dump_flags & TDF_DETAILS))
3404 fprintf (dump_file, "chkp_find_bounds: unexpected ptr of type %s\n",
3405 get_tree_code_name (TREE_CODE (ptr_src)));
3406 print_node (dump_file, "", ptr_src, 0);
3408 internal_error ("chkp_find_bounds: Unexpected tree code %s",
3409 get_tree_code_name (TREE_CODE (ptr_src)));
3412 if (!bounds)
3414 if (dump_file && (dump_flags & TDF_DETAILS))
3416 fprintf (stderr, "chkp_find_bounds: cannot find bounds for pointer\n");
3417 print_node (dump_file, "", ptr_src, 0);
3419 internal_error ("chkp_find_bounds: Cannot find bounds for pointer");
3422 return bounds;
3425 /* Normal case for bounds search without forced narrowing. */
3426 static tree
3427 chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter)
3429 return chkp_find_bounds_1 (ptr, NULL_TREE, iter);
3432 /* Search bounds for pointer PTR loaded from PTR_SRC
3433 by statement *ITER points to. */
3434 static tree
3435 chkp_find_bounds_loaded (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3437 return chkp_find_bounds_1 (ptr, ptr_src, iter);
3440 /* Helper function which checks type of RHS and finds all pointers in
3441 it. For each found pointer we build it's accesses in LHS and RHS
3442 objects and then call HANDLER for them. Function is used to copy
3443 or initilize bounds for copied object. */
3444 static void
3445 chkp_walk_pointer_assignments (tree lhs, tree rhs, void *arg,
3446 assign_handler handler)
3448 tree type = TREE_TYPE (lhs);
3450 /* We have nothing to do with clobbers. */
3451 if (TREE_CLOBBER_P (rhs))
3452 return;
3454 if (BOUNDED_TYPE_P (type))
3455 handler (lhs, rhs, arg);
3456 else if (RECORD_OR_UNION_TYPE_P (type))
3458 tree field;
3460 if (TREE_CODE (rhs) == CONSTRUCTOR)
3462 unsigned HOST_WIDE_INT cnt;
3463 tree val;
3465 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, field, val)
3467 if (chkp_type_has_pointer (TREE_TYPE (field)))
3469 tree lhs_field = chkp_build_component_ref (lhs, field);
3470 chkp_walk_pointer_assignments (lhs_field, val, arg, handler);
3474 else
3475 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3476 if (TREE_CODE (field) == FIELD_DECL
3477 && chkp_type_has_pointer (TREE_TYPE (field)))
3479 tree rhs_field = chkp_build_component_ref (rhs, field);
3480 tree lhs_field = chkp_build_component_ref (lhs, field);
3481 chkp_walk_pointer_assignments (lhs_field, rhs_field, arg, handler);
3484 else if (TREE_CODE (type) == ARRAY_TYPE)
3486 unsigned HOST_WIDE_INT cur = 0;
3487 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3488 tree etype = TREE_TYPE (type);
3489 tree esize = TYPE_SIZE (etype);
3491 if (TREE_CODE (rhs) == CONSTRUCTOR)
3493 unsigned HOST_WIDE_INT cnt;
3494 tree purp, val, lhs_elem;
3496 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, purp, val)
3498 if (purp && TREE_CODE (purp) == RANGE_EXPR)
3500 tree lo_index = TREE_OPERAND (purp, 0);
3501 tree hi_index = TREE_OPERAND (purp, 1);
3503 for (cur = (unsigned)tree_to_uhwi (lo_index);
3504 cur <= (unsigned)tree_to_uhwi (hi_index);
3505 cur++)
3507 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3508 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3511 else
3513 if (purp)
3515 gcc_assert (TREE_CODE (purp) == INTEGER_CST);
3516 cur = tree_to_uhwi (purp);
3519 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur++);
3521 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3525 /* Copy array only when size is known. */
3526 else if (maxval && !integer_minus_onep (maxval))
3527 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
3529 tree lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3530 tree rhs_elem = chkp_build_array_ref (rhs, etype, esize, cur);
3531 chkp_walk_pointer_assignments (lhs_elem, rhs_elem, arg, handler);
3534 else
3535 internal_error("chkp_walk_pointer_assignments: unexpected RHS type: %s",
3536 get_tree_code_name (TREE_CODE (type)));
3539 /* Add code to copy bounds for assignment of RHS to LHS.
3540 ARG is an iterator pointing ne code position. */
3541 static void
3542 chkp_copy_bounds_for_elem (tree lhs, tree rhs, void *arg)
3544 gimple_stmt_iterator *iter = (gimple_stmt_iterator *)arg;
3545 tree bounds = chkp_find_bounds (rhs, iter);
3546 tree addr = chkp_build_addr_expr(lhs);
3548 chkp_build_bndstx (addr, rhs, bounds, iter);
3551 /* Emit static bound initilizers and size vars. */
3552 void
3553 chkp_finish_file (void)
3555 struct varpool_node *node;
3556 struct chkp_ctor_stmt_list stmts;
3558 if (seen_error ())
3559 return;
3561 /* Iterate through varpool and generate bounds initialization
3562 constructors for all statically initialized pointers. */
3563 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3564 stmts.stmts = NULL;
3565 FOR_EACH_VARIABLE (node)
3566 /* Check that var is actually emitted and we need and may initialize
3567 its bounds. */
3568 if (node->need_bounds_init
3569 && !POINTER_BOUNDS_P (node->decl)
3570 && DECL_RTL (node->decl)
3571 && MEM_P (DECL_RTL (node->decl))
3572 && TREE_ASM_WRITTEN (node->decl))
3574 chkp_walk_pointer_assignments (node->decl,
3575 DECL_INITIAL (node->decl),
3576 &stmts,
3577 chkp_add_modification_to_stmt_list);
3579 if (stmts.avail <= 0)
3581 cgraph_build_static_cdtor ('P', stmts.stmts,
3582 MAX_RESERVED_INIT_PRIORITY + 3);
3583 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3584 stmts.stmts = NULL;
3588 if (stmts.stmts)
3589 cgraph_build_static_cdtor ('P', stmts.stmts,
3590 MAX_RESERVED_INIT_PRIORITY + 3);
3592 /* Iterate through varpool and generate bounds initialization
3593 constructors for all static bounds vars. */
3594 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3595 stmts.stmts = NULL;
3596 FOR_EACH_VARIABLE (node)
3597 if (node->need_bounds_init
3598 && POINTER_BOUNDS_P (node->decl)
3599 && TREE_ASM_WRITTEN (node->decl))
3601 tree bnd = node->decl;
3602 tree var;
3604 gcc_assert (DECL_INITIAL (bnd)
3605 && TREE_CODE (DECL_INITIAL (bnd)) == ADDR_EXPR);
3607 var = TREE_OPERAND (DECL_INITIAL (bnd), 0);
3608 chkp_output_static_bounds (bnd, var, &stmts);
3611 if (stmts.stmts)
3612 cgraph_build_static_cdtor ('B', stmts.stmts,
3613 MAX_RESERVED_INIT_PRIORITY + 2);
3615 delete chkp_static_var_bounds;
3616 delete chkp_bounds_map;
3619 /* An instrumentation function which is called for each statement
3620 having memory access we want to instrument. It inserts check
3621 code and bounds copy code.
3623 ITER points to statement to instrument.
3625 NODE holds memory access in statement to check.
3627 LOC holds the location information for statement.
3629 DIRFLAGS determines whether access is read or write.
3631 ACCESS_OFFS should be added to address used in NODE
3632 before check.
3634 ACCESS_SIZE holds size of checked access.
3636 SAFE indicates if NODE access is safe and should not be
3637 checked. */
3638 static void
3639 chkp_process_stmt (gimple_stmt_iterator *iter, tree node,
3640 location_t loc, tree dirflag,
3641 tree access_offs, tree access_size,
3642 bool safe)
3644 tree node_type = TREE_TYPE (node);
3645 tree size = access_size ? access_size : TYPE_SIZE_UNIT (node_type);
3646 tree addr_first = NULL_TREE; /* address of the first accessed byte */
3647 tree addr_last = NULL_TREE; /* address of the last accessed byte */
3648 tree ptr = NULL_TREE; /* a pointer used for dereference */
3649 tree bounds = NULL_TREE;
3651 /* We do not need instrumentation for clobbers. */
3652 if (dirflag == integer_one_node
3653 && gimple_code (gsi_stmt (*iter)) == GIMPLE_ASSIGN
3654 && TREE_CLOBBER_P (gimple_assign_rhs1 (gsi_stmt (*iter))))
3655 return;
3657 switch (TREE_CODE (node))
3659 case ARRAY_REF:
3660 case COMPONENT_REF:
3662 bool bitfield;
3663 tree elt;
3665 if (safe)
3667 /* We are not going to generate any checks, so do not
3668 generate bounds as well. */
3669 addr_first = chkp_build_addr_expr (node);
3670 break;
3673 chkp_parse_array_and_component_ref (node, &ptr, &elt, &safe,
3674 &bitfield, &bounds, iter, false);
3676 /* Break if there is no dereference and operation is safe. */
3678 if (bitfield)
3680 tree field = TREE_OPERAND (node, 1);
3682 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
3683 size = DECL_SIZE_UNIT (field);
3685 if (elt)
3686 elt = chkp_build_addr_expr (elt);
3687 addr_first = fold_convert_loc (loc, ptr_type_node, elt ? elt : ptr);
3688 addr_first = fold_build_pointer_plus_loc (loc,
3689 addr_first,
3690 byte_position (field));
3692 else
3693 addr_first = chkp_build_addr_expr (node);
3695 break;
3697 case INDIRECT_REF:
3698 ptr = TREE_OPERAND (node, 0);
3699 addr_first = ptr;
3700 break;
3702 case MEM_REF:
3703 ptr = TREE_OPERAND (node, 0);
3704 addr_first = chkp_build_addr_expr (node);
3705 break;
3707 case TARGET_MEM_REF:
3708 ptr = TMR_BASE (node);
3709 addr_first = chkp_build_addr_expr (node);
3710 break;
3712 case ARRAY_RANGE_REF:
3713 printf("ARRAY_RANGE_REF\n");
3714 debug_gimple_stmt(gsi_stmt(*iter));
3715 debug_tree(node);
3716 gcc_unreachable ();
3717 break;
3719 case BIT_FIELD_REF:
3721 tree offs, rem, bpu;
3723 gcc_assert (!access_offs);
3724 gcc_assert (!access_size);
3726 bpu = fold_convert (size_type_node, bitsize_int (BITS_PER_UNIT));
3727 offs = fold_convert (size_type_node, TREE_OPERAND (node, 2));
3728 rem = size_binop_loc (loc, TRUNC_MOD_EXPR, offs, bpu);
3729 offs = size_binop_loc (loc, TRUNC_DIV_EXPR, offs, bpu);
3731 size = fold_convert (size_type_node, TREE_OPERAND (node, 1));
3732 size = size_binop_loc (loc, PLUS_EXPR, size, rem);
3733 size = size_binop_loc (loc, CEIL_DIV_EXPR, size, bpu);
3734 size = fold_convert (size_type_node, size);
3736 chkp_process_stmt (iter, TREE_OPERAND (node, 0), loc,
3737 dirflag, offs, size, safe);
3738 return;
3740 break;
3742 case VAR_DECL:
3743 case RESULT_DECL:
3744 case PARM_DECL:
3745 if (dirflag != integer_one_node
3746 || DECL_REGISTER (node))
3747 return;
3749 safe = true;
3750 addr_first = chkp_build_addr_expr (node);
3751 break;
3753 default:
3754 return;
3757 /* If addr_last was not computed then use (addr_first + size - 1)
3758 expression to compute it. */
3759 if (!addr_last)
3761 addr_last = fold_build_pointer_plus_loc (loc, addr_first, size);
3762 addr_last = fold_build_pointer_plus_hwi_loc (loc, addr_last, -1);
3765 /* Shift both first_addr and last_addr by access_offs if specified. */
3766 if (access_offs)
3768 addr_first = fold_build_pointer_plus_loc (loc, addr_first, access_offs);
3769 addr_last = fold_build_pointer_plus_loc (loc, addr_last, access_offs);
3772 /* Generate bndcl/bndcu checks if memory access is not safe. */
3773 if (!safe)
3775 gimple_stmt_iterator stmt_iter = *iter;
3777 if (!bounds)
3778 bounds = chkp_find_bounds (ptr, iter);
3780 chkp_check_mem_access (addr_first, addr_last, bounds,
3781 stmt_iter, loc, dirflag);
3784 /* We need to store bounds in case pointer is stored. */
3785 if (dirflag == integer_one_node
3786 && chkp_type_has_pointer (node_type)
3787 && flag_chkp_store_bounds)
3789 gimple stmt = gsi_stmt (*iter);
3790 tree rhs1 = gimple_assign_rhs1 (stmt);
3791 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3793 if (get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS)
3794 chkp_walk_pointer_assignments (node, rhs1, iter,
3795 chkp_copy_bounds_for_elem);
3796 else
3798 bounds = chkp_compute_bounds_for_assignment (NULL_TREE, stmt);
3799 chkp_build_bndstx (addr_first, rhs1, bounds, iter);
3804 /* Add code to copy bounds for all pointers copied
3805 in ASSIGN created during inline of EDGE. */
3806 void
3807 chkp_copy_bounds_for_assign (gimple assign, struct cgraph_edge *edge)
3809 tree lhs = gimple_assign_lhs (assign);
3810 tree rhs = gimple_assign_rhs1 (assign);
3811 gimple_stmt_iterator iter = gsi_for_stmt (assign);
3813 if (!flag_chkp_store_bounds)
3814 return;
3816 chkp_walk_pointer_assignments (lhs, rhs, &iter, chkp_copy_bounds_for_elem);
3818 /* We should create edges for all created calls to bndldx and bndstx. */
3819 while (gsi_stmt (iter) != assign)
3821 gimple stmt = gsi_stmt (iter);
3822 if (gimple_code (stmt) == GIMPLE_CALL)
3824 tree fndecl = gimple_call_fndecl (stmt);
3825 struct cgraph_node *callee = cgraph_node::get_create (fndecl);
3826 struct cgraph_edge *new_edge;
3828 gcc_assert (fndecl == chkp_bndstx_fndecl
3829 || fndecl == chkp_bndldx_fndecl
3830 || fndecl == chkp_ret_bnd_fndecl);
3832 new_edge = edge->caller->create_edge (callee, stmt, edge->count,
3833 edge->frequency);
3834 new_edge->frequency = compute_call_stmt_bb_frequency
3835 (edge->caller->decl, gimple_bb (stmt));
3837 gsi_prev (&iter);
3841 /* Some code transformation made during instrumentation pass
3842 may put code into inconsistent state. Here we find and fix
3843 such flaws. */
3844 void
3845 chkp_fix_cfg ()
3847 basic_block bb;
3848 gimple_stmt_iterator i;
3850 /* We could insert some code right after stmt which ends bb.
3851 We wanted to put this code on fallthru edge but did not
3852 add new edges from the beginning because it may cause new
3853 phi node creation which may be incorrect due to incomplete
3854 bound phi nodes. */
3855 FOR_ALL_BB_FN (bb, cfun)
3856 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
3858 gimple stmt = gsi_stmt (i);
3859 gimple_stmt_iterator next = i;
3861 gsi_next (&next);
3863 if (stmt_ends_bb_p (stmt)
3864 && !gsi_end_p (next))
3866 edge fall = find_fallthru_edge (bb->succs);
3867 basic_block dest = NULL;
3868 int flags = 0;
3870 gcc_assert (fall);
3872 /* We cannot split abnormal edge. Therefore we
3873 store its params, make it regular and then
3874 rebuild abnormal edge after split. */
3875 if (fall->flags & EDGE_ABNORMAL)
3877 flags = fall->flags & ~EDGE_FALLTHRU;
3878 dest = fall->dest;
3880 fall->flags &= ~EDGE_COMPLEX;
3883 while (!gsi_end_p (next))
3885 gimple next_stmt = gsi_stmt (next);
3886 gsi_remove (&next, false);
3887 gsi_insert_on_edge (fall, next_stmt);
3890 gsi_commit_edge_inserts ();
3892 /* Re-create abnormal edge. */
3893 if (dest)
3894 make_edge (bb, dest, flags);
3899 /* Walker callback for chkp_replace_function_pointers. Replaces
3900 function pointer in the specified operand with pointer to the
3901 instrumented function version. */
3902 static tree
3903 chkp_replace_function_pointer (tree *op, int *walk_subtrees,
3904 void *data ATTRIBUTE_UNUSED)
3906 if (TREE_CODE (*op) == FUNCTION_DECL
3907 && !lookup_attribute ("bnd_legacy", DECL_ATTRIBUTES (*op))
3908 /* Do not replace builtins for now. */
3909 && DECL_BUILT_IN_CLASS (*op) == NOT_BUILT_IN)
3911 struct cgraph_node *node = cgraph_node::get_create (*op);
3913 if (!node->instrumentation_clone)
3914 chkp_maybe_create_clone (*op);
3916 *op = node->instrumented_version->decl;
3917 *walk_subtrees = 0;
3920 return NULL;
3923 /* This function searches for function pointers in statement
3924 pointed by GSI and replaces them with pointers to instrumented
3925 function versions. */
3926 static void
3927 chkp_replace_function_pointers (gimple_stmt_iterator *gsi)
3929 gimple stmt = gsi_stmt (*gsi);
3930 /* For calls we want to walk call args only. */
3931 if (gimple_code (stmt) == GIMPLE_CALL)
3933 unsigned i;
3934 for (i = 0; i < gimple_call_num_args (stmt); i++)
3935 walk_tree (gimple_call_arg_ptr (stmt, i),
3936 chkp_replace_function_pointer, NULL, NULL);
3938 else
3939 walk_gimple_stmt (gsi, NULL, chkp_replace_function_pointer, NULL);
3942 /* This function instruments all statements working with memory,
3943 calls and rets.
3945 It also removes excess statements from static initializers. */
3946 static void
3947 chkp_instrument_function (void)
3949 basic_block bb, next;
3950 gimple_stmt_iterator i;
3951 enum gimple_rhs_class grhs_class;
3952 bool safe = lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl));
3954 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
3957 next = bb->next_bb;
3958 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
3960 gimple s = gsi_stmt (i);
3962 /* Skip statement marked to not be instrumented. */
3963 if (chkp_marked_stmt_p (s))
3965 gsi_next (&i);
3966 continue;
3969 chkp_replace_function_pointers (&i);
3971 switch (gimple_code (s))
3973 case GIMPLE_ASSIGN:
3974 chkp_process_stmt (&i, gimple_assign_lhs (s),
3975 gimple_location (s), integer_one_node,
3976 NULL_TREE, NULL_TREE, safe);
3977 chkp_process_stmt (&i, gimple_assign_rhs1 (s),
3978 gimple_location (s), integer_zero_node,
3979 NULL_TREE, NULL_TREE, safe);
3980 grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s));
3981 if (grhs_class == GIMPLE_BINARY_RHS)
3982 chkp_process_stmt (&i, gimple_assign_rhs2 (s),
3983 gimple_location (s), integer_zero_node,
3984 NULL_TREE, NULL_TREE, safe);
3985 break;
3987 case GIMPLE_RETURN:
3988 if (gimple_return_retval (s) != NULL_TREE)
3990 chkp_process_stmt (&i, gimple_return_retval (s),
3991 gimple_location (s),
3992 integer_zero_node,
3993 NULL_TREE, NULL_TREE, safe);
3995 /* Additionally we need to add bounds
3996 to return statement. */
3997 chkp_add_bounds_to_ret_stmt (&i);
3999 break;
4001 case GIMPLE_CALL:
4002 chkp_add_bounds_to_call_stmt (&i);
4003 break;
4005 default:
4009 gsi_next (&i);
4011 /* We do not need any actual pointer stores in checker
4012 static initializer. */
4013 if (lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl))
4014 && gimple_code (s) == GIMPLE_ASSIGN
4015 && gimple_store_p (s))
4017 gimple_stmt_iterator del_iter = gsi_for_stmt (s);
4018 gsi_remove (&del_iter, true);
4019 unlink_stmt_vdef (s);
4020 release_defs(s);
4023 bb = next;
4025 while (bb);
4027 /* Some input params may have bounds and be address taken. In this case
4028 we should store incoming bounds into bounds table. */
4029 tree arg;
4030 if (flag_chkp_store_bounds)
4031 for (arg = DECL_ARGUMENTS (cfun->decl); arg; arg = DECL_CHAIN (arg))
4032 if (TREE_ADDRESSABLE (arg))
4034 if (BOUNDED_P (arg))
4036 tree bounds = chkp_get_next_bounds_parm (arg);
4037 tree def_ptr = ssa_default_def (cfun, arg);
4038 gimple_stmt_iterator iter
4039 = gsi_start_bb (chkp_get_entry_block ());
4040 chkp_build_bndstx (chkp_build_addr_expr (arg),
4041 def_ptr ? def_ptr : arg,
4042 bounds, &iter);
4044 /* Skip bounds arg. */
4045 arg = TREE_CHAIN (arg);
4047 else if (chkp_type_has_pointer (TREE_TYPE (arg)))
4049 tree orig_arg = arg;
4050 bitmap slots = BITMAP_ALLOC (NULL);
4051 gimple_stmt_iterator iter
4052 = gsi_start_bb (chkp_get_entry_block ());
4053 bitmap_iterator bi;
4054 unsigned bnd_no;
4056 chkp_find_bound_slots (TREE_TYPE (arg), slots);
4058 EXECUTE_IF_SET_IN_BITMAP (slots, 0, bnd_no, bi)
4060 tree bounds = chkp_get_next_bounds_parm (arg);
4061 HOST_WIDE_INT offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
4062 tree addr = chkp_build_addr_expr (orig_arg);
4063 tree ptr = build2 (MEM_REF, ptr_type_node, addr,
4064 build_int_cst (ptr_type_node, offs));
4065 chkp_build_bndstx (chkp_build_addr_expr (ptr), ptr,
4066 bounds, &iter);
4068 arg = DECL_CHAIN (arg);
4070 BITMAP_FREE (slots);
4075 /* Find init/null/copy_ptr_bounds calls and replace them
4076 with assignments. It should allow better code
4077 optimization. */
4079 static void
4080 chkp_remove_useless_builtins ()
4082 basic_block bb;
4083 gimple_stmt_iterator gsi;
4085 FOR_EACH_BB_FN (bb, cfun)
4087 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4089 gimple stmt = gsi_stmt (gsi);
4090 tree fndecl;
4091 enum built_in_function fcode;
4093 /* Find builtins returning first arg and replace
4094 them with assignments. */
4095 if (gimple_code (stmt) == GIMPLE_CALL
4096 && (fndecl = gimple_call_fndecl (stmt))
4097 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4098 && (fcode = DECL_FUNCTION_CODE (fndecl))
4099 && (fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
4100 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
4101 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS
4102 || fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS))
4104 tree res = gimple_call_arg (stmt, 0);
4105 update_call_from_tree (&gsi, res);
4106 stmt = gsi_stmt (gsi);
4107 update_stmt (stmt);
4113 /* Initialize pass. */
4114 static void
4115 chkp_init (void)
4117 basic_block bb;
4118 gimple_stmt_iterator i;
4120 in_chkp_pass = true;
4122 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = bb->next_bb)
4123 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
4124 chkp_unmark_stmt (gsi_stmt (i));
4126 chkp_invalid_bounds = new hash_set<tree>;
4127 chkp_completed_bounds_set = new hash_set<tree>;
4128 delete chkp_reg_bounds;
4129 chkp_reg_bounds = new hash_map<tree, tree>;
4130 delete chkp_bound_vars;
4131 chkp_bound_vars = new hash_map<tree, tree>;
4132 chkp_reg_addr_bounds = new hash_map<tree, tree>;
4133 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
4134 delete chkp_bounds_map;
4135 chkp_bounds_map = new hash_map<tree, tree>;
4136 chkp_abnormal_copies = BITMAP_GGC_ALLOC ();
4138 entry_block = NULL;
4139 zero_bounds = NULL_TREE;
4140 none_bounds = NULL_TREE;
4141 incomplete_bounds = integer_zero_node;
4142 tmp_var = NULL_TREE;
4143 size_tmp_var = NULL_TREE;
4145 chkp_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode, true);
4147 /* We create these constant bounds once for each object file.
4148 These symbols go to comdat section and result in single copy
4149 of each one in the final binary. */
4150 chkp_get_zero_bounds_var ();
4151 chkp_get_none_bounds_var ();
4153 calculate_dominance_info (CDI_DOMINATORS);
4154 calculate_dominance_info (CDI_POST_DOMINATORS);
4156 bitmap_obstack_initialize (NULL);
4159 /* Finalize instrumentation pass. */
4160 static void
4161 chkp_fini (void)
4163 in_chkp_pass = false;
4165 delete chkp_invalid_bounds;
4166 delete chkp_completed_bounds_set;
4167 delete chkp_reg_addr_bounds;
4168 delete chkp_incomplete_bounds_map;
4170 free_dominance_info (CDI_DOMINATORS);
4171 free_dominance_info (CDI_POST_DOMINATORS);
4173 bitmap_obstack_release (NULL);
4176 /* Main instrumentation pass function. */
4177 static unsigned int
4178 chkp_execute (void)
4180 chkp_init ();
4182 chkp_instrument_function ();
4184 chkp_remove_useless_builtins ();
4186 chkp_function_mark_instrumented (cfun->decl);
4188 chkp_fix_cfg ();
4190 chkp_fini ();
4192 return 0;
4195 /* Instrumentation pass gate. */
4196 static bool
4197 chkp_gate (void)
4199 return cgraph_node::get (cfun->decl)->instrumentation_clone
4200 || lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl));
4203 namespace {
4205 const pass_data pass_data_chkp =
4207 GIMPLE_PASS, /* type */
4208 "chkp", /* name */
4209 OPTGROUP_NONE, /* optinfo_flags */
4210 TV_NONE, /* tv_id */
4211 PROP_ssa | PROP_cfg, /* properties_required */
4212 0, /* properties_provided */
4213 0, /* properties_destroyed */
4214 0, /* todo_flags_start */
4215 TODO_verify_il
4216 | TODO_update_ssa /* todo_flags_finish */
4219 class pass_chkp : public gimple_opt_pass
4221 public:
4222 pass_chkp (gcc::context *ctxt)
4223 : gimple_opt_pass (pass_data_chkp, ctxt)
4226 /* opt_pass methods: */
4227 virtual opt_pass * clone ()
4229 return new pass_chkp (m_ctxt);
4232 virtual bool gate (function *)
4234 return chkp_gate ();
4237 virtual unsigned int execute (function *)
4239 return chkp_execute ();
4242 }; // class pass_chkp
4244 } // anon namespace
4246 gimple_opt_pass *
4247 make_pass_chkp (gcc::context *ctxt)
4249 return new pass_chkp (ctxt);
4252 #include "gt-tree-chkp.h"