Fixups after merge
[official-gcc.git] / gcc / tree-chkp.c
blobaadb1cf541cf041baaa216ed04351902c1437997
1 /* Pointer Bounds Checker insrumentation pass.
2 Copyright (C) 2014 Free Software Foundation, Inc.
3 Contributed by Ilya Enkovich (ilya.enkovich@intel.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree-core.h"
25 #include "stor-layout.h"
26 #include "varasm.h"
27 #include "tree.h"
28 #include "target.h"
29 #include "tree-iterator.h"
30 #include "tree-cfg.h"
31 #include "langhooks.h"
32 #include "tree-pass.h"
33 #include "diagnostic.h"
34 #include "ggc.h"
35 #include "is-a.h"
36 #include "cfgloop.h"
37 #include "stringpool.h"
38 #include "tree-ssa-alias.h"
39 #include "tree-ssanames.h"
40 #include "tree-ssa-operands.h"
41 #include "tree-ssa-address.h"
42 #include "tree-ssa.h"
43 #include "predict.h"
44 #include "dominance.h"
45 #include "cfg.h"
46 #include "basic-block.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "gimple-expr.h"
49 #include "gimple.h"
50 #include "tree-phinodes.h"
51 #include "gimple-ssa.h"
52 #include "ssa-iterators.h"
53 #include "gimple-pretty-print.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "gimplify-me.h"
57 #include "print-tree.h"
58 #include "expr.h"
59 #include "tree-ssa-propagate.h"
60 #include "gimple-fold.h"
61 #include "tree-chkp.h"
62 #include "gimple-walk.h"
63 #include "rtl.h" /* For MEM_P, assign_temp. */
64 #include "tree-dfa.h"
65 #include "ipa-ref.h"
66 #include "lto-streamer.h"
67 #include "cgraph.h"
68 #include "ipa-chkp.h"
69 #include "params.h"
70 #include "ipa-chkp.h"
71 #include "params.h"
73 /* Pointer Bounds Checker instruments code with memory checks to find
74 out-of-bounds memory accesses. Checks are performed by computing
75 bounds for each pointer and then comparing address of accessed
76 memory before pointer dereferencing.
78 1. Function clones.
80 See ipa-chkp.c.
82 2. Instrumentation.
84 There are few things to instrument:
86 a) Memory accesses - add checker calls to check address of accessed memory
87 against bounds of dereferenced pointer. Obviously safe memory
88 accesses like static variable access does not have to be instrumented
89 with checks.
91 Example:
93 val_2 = *p_1;
95 with 4 bytes access is transformed into:
97 __builtin___chkp_bndcl (__bound_tmp.1_3, p_1);
98 D.1_4 = p_1 + 3;
99 __builtin___chkp_bndcu (__bound_tmp.1_3, D.1_4);
100 val_2 = *p_1;
102 where __bound_tmp.1_3 are bounds computed for pointer p_1,
103 __builtin___chkp_bndcl is a lower bound check and
104 __builtin___chkp_bndcu is an upper bound check.
106 b) Pointer stores.
108 When pointer is stored in memory we need to store its bounds. To
109 achieve compatibility of instrumented code with regular codes
110 we have to keep data layout and store bounds in special bound tables
111 via special checker call. Implementation of bounds table may vary for
112 different platforms. It has to associate pointer value and its
113 location (it is required because we may have two equal pointers
114 with different bounds stored in different places) with bounds.
115 Another checker builtin allows to get bounds for specified pointer
116 loaded from specified location.
118 Example:
120 buf1[i_1] = &buf2;
122 is transformed into:
124 buf1[i_1] = &buf2;
125 D.1_2 = &buf1[i_1];
126 __builtin___chkp_bndstx (D.1_2, &buf2, __bound_tmp.1_2);
128 where __bound_tmp.1_2 are bounds of &buf2.
130 c) Static initialization.
132 The special case of pointer store is static pointer initialization.
133 Bounds initialization is performed in a few steps:
134 - register all static initializations in front-end using
135 chkp_register_var_initializer
136 - when file compilation finishes we create functions with special
137 attribute 'chkp ctor' and put explicit initialization code
138 (assignments) for all statically initialized pointers.
139 - when checker constructor is compiled checker pass adds required
140 bounds initialization for all statically initialized pointers
141 - since we do not actually need excess pointers initialization
142 in checker constructor we remove such assignments from them
144 d) Calls.
146 For each call in the code we add additional arguments to pass
147 bounds for pointer arguments. We determine type of call arguments
148 using arguments list from function declaration; if function
149 declaration is not available we use function type; otherwise
150 (e.g. for unnamed arguments) we use type of passed value. Function
151 declaration/type is replaced with the instrumented one.
153 Example:
155 val_1 = foo (&buf1, &buf2, &buf1, 0);
157 is translated into:
159 val_1 = foo.chkp (&buf1, __bound_tmp.1_2, &buf2, __bound_tmp.1_3,
160 &buf1, __bound_tmp.1_2, 0);
162 e) Returns.
164 If function returns a pointer value we have to return bounds also.
165 A new operand was added for return statement to hold returned bounds.
167 Example:
169 return &_buf1;
171 is transformed into
173 return &_buf1, __bound_tmp.1_1;
175 3. Bounds computation.
177 Compiler is fully responsible for computing bounds to be used for each
178 memory access. The first step for bounds computation is to find the
179 origin of pointer dereferenced for memory access. Basing on pointer
180 origin we define a way to compute its bounds. There are just few
181 possible cases:
183 a) Pointer is returned by call.
185 In this case we use corresponding checker builtin method to obtain returned
186 bounds.
188 Example:
190 buf_1 = malloc (size_2);
191 foo (buf_1);
193 is translated into:
195 buf_1 = malloc (size_2);
196 __bound_tmp.1_3 = __builtin___chkp_bndret (buf_1);
197 foo (buf_1, __bound_tmp.1_3);
199 b) Pointer is an address of an object.
201 In this case compiler tries to compute objects size and create corresponding
202 bounds. If object has incomplete type then special checker builtin is used to
203 obtain its size at runtime.
205 Example:
207 foo ()
209 <unnamed type> __bound_tmp.3;
210 static int buf[100];
212 <bb 3>:
213 __bound_tmp.3_2 = __builtin___chkp_bndmk (&buf, 400);
215 <bb 2>:
216 return &buf, __bound_tmp.3_2;
219 Example:
221 Address of an object 'extern int buf[]' with incomplete type is
222 returned.
224 foo ()
226 <unnamed type> __bound_tmp.4;
227 long unsigned int __size_tmp.3;
229 <bb 3>:
230 __size_tmp.3_4 = __builtin_ia32_sizeof (buf);
231 __bound_tmp.4_3 = __builtin_ia32_bndmk (&buf, __size_tmp.3_4);
233 <bb 2>:
234 return &buf, __bound_tmp.4_3;
237 c) Pointer is the result of object narrowing.
239 It happens when we use pointer to an object to compute pointer to a part
240 of an object. E.g. we take pointer to a field of a structure. In this
241 case we perform bounds intersection using bounds of original object and
242 bounds of object's part (which are computed basing on its type).
244 There may be some debatable questions about when narrowing should occur
245 and when it should not. To avoid false bound violations in correct
246 programs we do not perform narrowing when address of an array element is
247 obtained (it has address of the whole array) and when address of the first
248 structure field is obtained (because it is guaranteed to be equal to
249 address of the whole structure and it is legal to cast it back to structure).
251 Default narrowing behavior may be changed using compiler flags.
253 Example:
255 In this example address of the second structure field is returned.
257 foo (struct A * p, __bounds_type __bounds_of_p)
259 <unnamed type> __bound_tmp.3;
260 int * _2;
261 int * _5;
263 <bb 2>:
264 _5 = &p_1(D)->second_field;
265 __bound_tmp.3_6 = __builtin___chkp_bndmk (_5, 4);
266 __bound_tmp.3_8 = __builtin___chkp_intersect (__bound_tmp.3_6,
267 __bounds_of_p_3(D));
268 _2 = &p_1(D)->second_field;
269 return _2, __bound_tmp.3_8;
272 Example:
274 In this example address of the first field of array element is returned.
276 foo (struct A * p, __bounds_type __bounds_of_p, int i)
278 long unsigned int _3;
279 long unsigned int _4;
280 struct A * _6;
281 int * _7;
283 <bb 2>:
284 _3 = (long unsigned int) i_1(D);
285 _4 = _3 * 8;
286 _6 = p_5(D) + _4;
287 _7 = &_6->first_field;
288 return _7, __bounds_of_p_2(D);
292 d) Pointer is the result of pointer arithmetic or type cast.
294 In this case bounds of the base pointer are used. In case of binary
295 operation producing a pointer we are analyzing data flow further
296 looking for operand's bounds. One operand is considered as a base
297 if it has some valid bounds. If we fall into a case when none of
298 operands (or both of them) has valid bounds, a default bounds value
299 is used.
301 Trying to find out bounds for binary operations we may fall into
302 cyclic dependencies for pointers. To avoid infinite recursion all
303 walked phi nodes instantly obtain corresponding bounds but created
304 bounds are marked as incomplete. It helps us to stop DF walk during
305 bounds search.
307 When we reach pointer source, some args of incomplete bounds phi obtain
308 valid bounds and those values are propagated further through phi nodes.
309 If no valid bounds were found for phi node then we mark its result as
310 invalid bounds. Process stops when all incomplete bounds become either
311 valid or invalid and we are able to choose a pointer base.
313 e) Pointer is loaded from the memory.
315 In this case we just need to load bounds from the bounds table.
317 Example:
319 foo ()
321 <unnamed type> __bound_tmp.3;
322 static int * buf;
323 int * _2;
325 <bb 2>:
326 _2 = buf;
327 __bound_tmp.3_4 = __builtin___chkp_bndldx (&buf, _2);
328 return _2, __bound_tmp.3_4;
333 typedef void (*assign_handler)(tree, tree, void *);
335 static tree chkp_get_zero_bounds ();
336 static tree chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter);
337 static tree chkp_find_bounds_loaded (tree ptr, tree ptr_src,
338 gimple_stmt_iterator *iter);
339 static void chkp_parse_array_and_component_ref (tree node, tree *ptr,
340 tree *elt, bool *safe,
341 bool *bitfield,
342 tree *bounds,
343 gimple_stmt_iterator *iter,
344 bool innermost_bounds);
346 #define chkp_bndldx_fndecl \
347 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDLDX))
348 #define chkp_bndstx_fndecl \
349 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDSTX))
350 #define chkp_checkl_fndecl \
351 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCL))
352 #define chkp_checku_fndecl \
353 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCU))
354 #define chkp_bndmk_fndecl \
355 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDMK))
356 #define chkp_ret_bnd_fndecl \
357 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDRET))
358 #define chkp_intersect_fndecl \
359 (targetm.builtin_chkp_function (BUILT_IN_CHKP_INTERSECT))
360 #define chkp_narrow_bounds_fndecl \
361 (targetm.builtin_chkp_function (BUILT_IN_CHKP_NARROW))
362 #define chkp_sizeof_fndecl \
363 (targetm.builtin_chkp_function (BUILT_IN_CHKP_SIZEOF))
364 #define chkp_extract_lower_fndecl \
365 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_LOWER))
366 #define chkp_extract_upper_fndecl \
367 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_UPPER))
369 static GTY (()) tree chkp_uintptr_type;
371 static GTY (()) tree chkp_zero_bounds_var;
372 static GTY (()) tree chkp_none_bounds_var;
374 static GTY (()) basic_block entry_block;
375 static GTY (()) tree zero_bounds;
376 static GTY (()) tree none_bounds;
377 static GTY (()) tree incomplete_bounds;
378 static GTY (()) tree tmp_var;
379 static GTY (()) tree size_tmp_var;
380 static GTY (()) bitmap chkp_abnormal_copies;
382 struct hash_set<tree> *chkp_invalid_bounds;
383 struct hash_set<tree> *chkp_completed_bounds_set;
384 struct hash_map<tree, tree> *chkp_reg_bounds;
385 struct hash_map<tree, tree> *chkp_bound_vars;
386 struct hash_map<tree, tree> *chkp_reg_addr_bounds;
387 struct hash_map<tree, tree> *chkp_incomplete_bounds_map;
388 struct hash_map<tree, tree> *chkp_bounds_map;
389 struct hash_map<tree, tree> *chkp_static_var_bounds;
391 static bool in_chkp_pass;
393 #define CHKP_BOUND_TMP_NAME "__bound_tmp"
394 #define CHKP_SIZE_TMP_NAME "__size_tmp"
395 #define CHKP_BOUNDS_OF_SYMBOL_PREFIX "__chkp_bounds_of_"
396 #define CHKP_STRING_BOUNDS_PREFIX "__chkp_string_bounds_"
397 #define CHKP_VAR_BOUNDS_PREFIX "__chkp_var_bounds_"
398 #define CHKP_ZERO_BOUNDS_VAR_NAME "__chkp_zero_bounds"
399 #define CHKP_NONE_BOUNDS_VAR_NAME "__chkp_none_bounds"
401 /* Static checker constructors may become very large and their
402 compilation with optimization may take too much time.
403 Therefore we put a limit to number of statements in one
404 constructor. Tests with 100 000 statically initialized
405 pointers showed following compilation times on Sandy Bridge
406 server (used -O2):
407 limit 100 => ~18 sec.
408 limit 300 => ~22 sec.
409 limit 1000 => ~30 sec.
410 limit 3000 => ~49 sec.
411 limit 5000 => ~55 sec.
412 limit 10000 => ~76 sec.
413 limit 100000 => ~532 sec. */
414 #define MAX_STMTS_IN_STATIC_CHKP_CTOR (PARAM_VALUE (PARAM_CHKP_MAX_CTOR_SIZE))
416 struct chkp_ctor_stmt_list
418 tree stmts;
419 int avail;
422 /* Return 1 if function FNDECL is instrumented by Pointer
423 Bounds Checker. */
424 bool
425 chkp_function_instrumented_p (tree fndecl)
427 return fndecl
428 && lookup_attribute ("chkp instrumented", DECL_ATTRIBUTES (fndecl));
431 /* Mark function FNDECL as instrumented. */
432 void
433 chkp_function_mark_instrumented (tree fndecl)
435 if (chkp_function_instrumented_p (fndecl))
436 return;
438 DECL_ATTRIBUTES (fndecl)
439 = tree_cons (get_identifier ("chkp instrumented"), NULL,
440 DECL_ATTRIBUTES (fndecl));
443 /* Return true when STMT is builtin call to instrumentation function
444 corresponding to CODE. */
446 bool
447 chkp_gimple_call_builtin_p (gimple call,
448 enum built_in_function code)
450 tree fndecl;
451 if (is_gimple_call (call)
452 && (fndecl = targetm.builtin_chkp_function (code))
453 && gimple_call_fndecl (call) == fndecl)
454 return true;
455 return false;
458 /* Emit code to store zero bounds for PTR located at MEM. */
459 void
460 chkp_expand_bounds_reset_for_mem (tree mem, tree ptr)
462 tree zero_bnd, bnd, addr, bndstx;
464 if (flag_chkp_use_static_const_bounds)
465 zero_bnd = chkp_get_zero_bounds_var ();
466 else
467 zero_bnd = chkp_build_make_bounds_call (integer_zero_node,
468 integer_zero_node);
469 bnd = make_tree (pointer_bounds_type_node,
470 assign_temp (pointer_bounds_type_node, 0, 1));
471 addr = build1 (ADDR_EXPR,
472 build_pointer_type (TREE_TYPE (mem)), mem);
473 bndstx = chkp_build_bndstx_call (addr, ptr, bnd);
475 expand_assignment (bnd, zero_bnd, false);
476 expand_normal (bndstx);
479 /* Mark statement S to not be instrumented. */
480 static void
481 chkp_mark_stmt (gimple s)
483 gimple_set_plf (s, GF_PLF_1, true);
486 /* Mark statement S to be instrumented. */
487 static void
488 chkp_unmark_stmt (gimple s)
490 gimple_set_plf (s, GF_PLF_1, false);
493 /* Return 1 if statement S should not be instrumented. */
494 static bool
495 chkp_marked_stmt_p (gimple s)
497 return gimple_plf (s, GF_PLF_1);
500 /* Get var to be used for bound temps. */
501 static tree
502 chkp_get_tmp_var (void)
504 if (!tmp_var)
505 tmp_var = create_tmp_reg (pointer_bounds_type_node, CHKP_BOUND_TMP_NAME);
507 return tmp_var;
510 /* Get SSA_NAME to be used as temp. */
511 static tree
512 chkp_get_tmp_reg (gimple stmt)
514 if (in_chkp_pass)
515 return make_ssa_name (chkp_get_tmp_var (), stmt);
517 return make_temp_ssa_name (pointer_bounds_type_node, stmt,
518 CHKP_BOUND_TMP_NAME);
521 /* Get var to be used for size temps. */
522 static tree
523 chkp_get_size_tmp_var (void)
525 if (!size_tmp_var)
526 size_tmp_var = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
528 return size_tmp_var;
531 /* Register bounds BND for address of OBJ. */
532 static void
533 chkp_register_addr_bounds (tree obj, tree bnd)
535 if (bnd == incomplete_bounds)
536 return;
538 chkp_reg_addr_bounds->put (obj, bnd);
540 if (dump_file && (dump_flags & TDF_DETAILS))
542 fprintf (dump_file, "Regsitered bound ");
543 print_generic_expr (dump_file, bnd, 0);
544 fprintf (dump_file, " for address of ");
545 print_generic_expr (dump_file, obj, 0);
546 fprintf (dump_file, "\n");
550 /* Return bounds registered for address of OBJ. */
551 static tree
552 chkp_get_registered_addr_bounds (tree obj)
554 tree *slot = chkp_reg_addr_bounds->get (obj);
555 return slot ? *slot : NULL_TREE;
558 /* Mark BOUNDS as completed. */
559 static void
560 chkp_mark_completed_bounds (tree bounds)
562 chkp_completed_bounds_set->add (bounds);
564 if (dump_file && (dump_flags & TDF_DETAILS))
566 fprintf (dump_file, "Marked bounds ");
567 print_generic_expr (dump_file, bounds, 0);
568 fprintf (dump_file, " as completed\n");
572 /* Return 1 if BOUNDS were marked as completed and 0 otherwise. */
573 static bool
574 chkp_completed_bounds (tree bounds)
576 return chkp_completed_bounds_set->contains (bounds);
579 /* Clear comleted bound marks. */
580 static void
581 chkp_erase_completed_bounds (void)
583 delete chkp_completed_bounds_set;
584 chkp_completed_bounds_set = new hash_set<tree>;
587 /* Mark BOUNDS associated with PTR as incomplete. */
588 static void
589 chkp_register_incomplete_bounds (tree bounds, tree ptr)
591 chkp_incomplete_bounds_map->put (bounds, ptr);
593 if (dump_file && (dump_flags & TDF_DETAILS))
595 fprintf (dump_file, "Regsitered incomplete bounds ");
596 print_generic_expr (dump_file, bounds, 0);
597 fprintf (dump_file, " for ");
598 print_generic_expr (dump_file, ptr, 0);
599 fprintf (dump_file, "\n");
603 /* Return 1 if BOUNDS are incomplete and 0 otherwise. */
604 static bool
605 chkp_incomplete_bounds (tree bounds)
607 if (bounds == incomplete_bounds)
608 return true;
610 if (chkp_completed_bounds (bounds))
611 return false;
613 return chkp_incomplete_bounds_map->get (bounds) != NULL;
616 /* Clear incomleted bound marks. */
617 static void
618 chkp_erase_incomplete_bounds (void)
620 delete chkp_incomplete_bounds_map;
621 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
624 /* Build and return bndmk call which creates bounds for structure
625 pointed by PTR. Structure should have complete type. */
626 tree
627 chkp_make_bounds_for_struct_addr (tree ptr)
629 tree type = TREE_TYPE (ptr);
630 tree size;
632 gcc_assert (POINTER_TYPE_P (type));
634 size = TYPE_SIZE (TREE_TYPE (type));
636 gcc_assert (size);
638 return build_call_nary (pointer_bounds_type_node,
639 build_fold_addr_expr (chkp_bndmk_fndecl),
640 2, ptr, size);
643 /* Traversal function for chkp_may_finish_incomplete_bounds.
644 Set RES to 0 if at least one argument of phi statement
645 defining bounds (passed in KEY arg) is unknown.
646 Traversal stops when first unknown phi argument is found. */
647 bool
648 chkp_may_complete_phi_bounds (tree const &bounds, tree *slot ATTRIBUTE_UNUSED,
649 bool *res)
651 gimple phi;
652 unsigned i;
654 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
656 phi = SSA_NAME_DEF_STMT (bounds);
658 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
660 for (i = 0; i < gimple_phi_num_args (phi); i++)
662 tree phi_arg = gimple_phi_arg_def (phi, i);
663 if (!phi_arg)
665 *res = false;
666 /* Do not need to traverse further. */
667 return false;
671 return true;
674 /* Return 1 if all phi nodes created for bounds have their
675 arguments computed. */
676 static bool
677 chkp_may_finish_incomplete_bounds (void)
679 bool res = true;
681 chkp_incomplete_bounds_map
682 ->traverse<bool *, chkp_may_complete_phi_bounds> (&res);
684 return res;
687 /* Helper function for chkp_finish_incomplete_bounds.
688 Recompute args for bounds phi node. */
689 bool
690 chkp_recompute_phi_bounds (tree const &bounds, tree *slot,
691 void *res ATTRIBUTE_UNUSED)
693 tree ptr = *slot;
694 gphi *bounds_phi;
695 gphi *ptr_phi;
696 unsigned i;
698 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
699 gcc_assert (TREE_CODE (ptr) == SSA_NAME);
701 bounds_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (bounds));
702 ptr_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (ptr));
704 for (i = 0; i < gimple_phi_num_args (bounds_phi); i++)
706 tree ptr_arg = gimple_phi_arg_def (ptr_phi, i);
707 tree bound_arg = chkp_find_bounds (ptr_arg, NULL);
709 add_phi_arg (bounds_phi, bound_arg,
710 gimple_phi_arg_edge (ptr_phi, i),
711 UNKNOWN_LOCATION);
714 return true;
717 /* Mark BOUNDS as invalid. */
718 static void
719 chkp_mark_invalid_bounds (tree bounds)
721 chkp_invalid_bounds->add (bounds);
723 if (dump_file && (dump_flags & TDF_DETAILS))
725 fprintf (dump_file, "Marked bounds ");
726 print_generic_expr (dump_file, bounds, 0);
727 fprintf (dump_file, " as invalid\n");
731 /* Return 1 if BOUNDS were marked as invalid and 0 otherwise. */
732 static bool
733 chkp_valid_bounds (tree bounds)
735 if (bounds == zero_bounds || bounds == none_bounds)
736 return false;
738 return !chkp_invalid_bounds->contains (bounds);
741 /* Helper function for chkp_finish_incomplete_bounds.
742 Check all arguments of phi nodes trying to find
743 valid completed bounds. If there is at least one
744 such arg then bounds produced by phi node are marked
745 as valid completed bounds and all phi args are
746 recomputed. */
747 bool
748 chkp_find_valid_phi_bounds (tree const &bounds, tree *slot, bool *res)
750 gimple phi;
751 unsigned i;
753 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
755 if (chkp_completed_bounds (bounds))
756 return true;
758 phi = SSA_NAME_DEF_STMT (bounds);
760 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
762 for (i = 0; i < gimple_phi_num_args (phi); i++)
764 tree phi_arg = gimple_phi_arg_def (phi, i);
766 gcc_assert (phi_arg);
768 if (chkp_valid_bounds (phi_arg) && !chkp_incomplete_bounds (phi_arg))
770 *res = true;
771 chkp_mark_completed_bounds (bounds);
772 chkp_recompute_phi_bounds (bounds, slot, NULL);
773 return true;
777 return true;
780 /* Helper function for chkp_finish_incomplete_bounds.
781 Marks all incompleted bounds as invalid. */
782 bool
783 chkp_mark_invalid_bounds_walker (tree const &bounds,
784 tree *slot ATTRIBUTE_UNUSED,
785 void *res ATTRIBUTE_UNUSED)
787 if (!chkp_completed_bounds (bounds))
789 chkp_mark_invalid_bounds (bounds);
790 chkp_mark_completed_bounds (bounds);
792 return true;
795 /* When all bound phi nodes have all their args computed
796 we have enough info to find valid bounds. We iterate
797 through all incompleted bounds searching for valid
798 bounds. Found valid bounds are marked as completed
799 and all remaining incompleted bounds are recomputed.
800 Process continues until no new valid bounds may be
801 found. All remained incompleted bounds are marked as
802 invalid (i.e. have no valid source of bounds). */
803 static void
804 chkp_finish_incomplete_bounds (void)
806 bool found_valid;
808 while (found_valid)
810 found_valid = false;
812 chkp_incomplete_bounds_map->
813 traverse<bool *, chkp_find_valid_phi_bounds> (&found_valid);
815 if (found_valid)
816 chkp_incomplete_bounds_map->
817 traverse<void *, chkp_recompute_phi_bounds> (NULL);
820 chkp_incomplete_bounds_map->
821 traverse<void *, chkp_mark_invalid_bounds_walker> (NULL);
822 chkp_incomplete_bounds_map->
823 traverse<void *, chkp_recompute_phi_bounds> (NULL);
825 chkp_erase_completed_bounds ();
826 chkp_erase_incomplete_bounds ();
829 /* Return 1 if type TYPE is a pointer type or a
830 structure having a pointer type as one of its fields.
831 Otherwise return 0. */
832 bool
833 chkp_type_has_pointer (const_tree type)
835 bool res = false;
837 if (BOUNDED_TYPE_P (type))
838 res = true;
839 else if (RECORD_OR_UNION_TYPE_P (type))
841 tree field;
843 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
844 if (TREE_CODE (field) == FIELD_DECL)
845 res = res || chkp_type_has_pointer (TREE_TYPE (field));
847 else if (TREE_CODE (type) == ARRAY_TYPE)
848 res = chkp_type_has_pointer (TREE_TYPE (type));
850 return res;
853 unsigned
854 chkp_type_bounds_count (const_tree type)
856 unsigned res = 0;
858 if (!type)
859 res = 0;
860 else if (BOUNDED_TYPE_P (type))
861 res = 1;
862 else if (RECORD_OR_UNION_TYPE_P (type))
864 bitmap have_bound;
866 bitmap_obstack_initialize (NULL);
867 have_bound = BITMAP_ALLOC (NULL);
868 chkp_find_bound_slots (type, have_bound);
869 res = bitmap_count_bits (have_bound);
870 BITMAP_FREE (have_bound);
871 bitmap_obstack_release (NULL);
874 return res;
877 /* Get bounds associated with NODE via
878 chkp_set_bounds call. */
879 tree
880 chkp_get_bounds (tree node)
882 tree *slot;
884 if (!chkp_bounds_map)
885 return NULL_TREE;
887 slot = chkp_bounds_map->get (node);
888 return slot ? *slot : NULL_TREE;
891 /* Associate bounds VAL with NODE. */
892 void
893 chkp_set_bounds (tree node, tree val)
895 if (!chkp_bounds_map)
896 chkp_bounds_map = new hash_map<tree, tree>;
898 chkp_bounds_map->put (node, val);
901 /* Check if statically initialized variable VAR require
902 static bounds initialization. If VAR is added into
903 bounds initlization list then 1 is returned. Otherwise
904 return 0. */
905 extern bool
906 chkp_register_var_initializer (tree var)
908 if (!flag_check_pointer_bounds
909 || DECL_INITIAL (var) == error_mark_node)
910 return false;
912 gcc_assert (TREE_CODE (var) == VAR_DECL);
913 gcc_assert (DECL_INITIAL (var));
915 if (TREE_STATIC (var)
916 && chkp_type_has_pointer (TREE_TYPE (var)))
918 varpool_node::get_create (var)->need_bounds_init = 1;
919 return true;
922 return false;
925 /* Helper function for chkp_finish_file.
927 Add new modification statement (RHS is assigned to LHS)
928 into list of static initializer statementes (passed in ARG).
929 If statements list becomes too big, emit checker constructor
930 and start the new one. */
931 static void
932 chkp_add_modification_to_stmt_list (tree lhs,
933 tree rhs,
934 void *arg)
936 struct chkp_ctor_stmt_list *stmts = (struct chkp_ctor_stmt_list *)arg;
937 tree modify;
939 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
940 rhs = build1 (CONVERT_EXPR, TREE_TYPE (lhs), rhs);
942 modify = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
943 append_to_statement_list (modify, &stmts->stmts);
945 stmts->avail--;
948 /* Build and return ADDR_EXPR for specified object OBJ. */
949 static tree
950 chkp_build_addr_expr (tree obj)
952 return TREE_CODE (obj) == TARGET_MEM_REF
953 ? tree_mem_ref_addr (ptr_type_node, obj)
954 : build_fold_addr_expr (obj);
957 /* Helper function for chkp_finish_file.
958 Initialize bound variable BND_VAR with bounds of variable
959 VAR to statements list STMTS. If statements list becomes
960 too big, emit checker constructor and start the new one. */
961 static void
962 chkp_output_static_bounds (tree bnd_var, tree var,
963 struct chkp_ctor_stmt_list *stmts)
965 tree lb, ub, size;
967 if (TREE_CODE (var) == STRING_CST)
969 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
970 size = build_int_cst (size_type_node, TREE_STRING_LENGTH (var) - 1);
972 else if (DECL_SIZE (var)
973 && !chkp_variable_size_type (TREE_TYPE (var)))
975 /* Compute bounds using statically known size. */
976 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
977 size = size_binop (MINUS_EXPR, DECL_SIZE_UNIT (var), size_one_node);
979 else
981 /* Compute bounds using dynamic size. */
982 tree call;
984 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
985 call = build1 (ADDR_EXPR,
986 build_pointer_type (TREE_TYPE (chkp_sizeof_fndecl)),
987 chkp_sizeof_fndecl);
988 size = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_sizeof_fndecl)),
989 call, 1, var);
991 if (flag_chkp_zero_dynamic_size_as_infinite)
993 tree max_size, cond;
995 max_size = build2 (MINUS_EXPR, size_type_node, size_zero_node, lb);
996 cond = build2 (NE_EXPR, boolean_type_node, size, size_zero_node);
997 size = build3 (COND_EXPR, size_type_node, cond, size, max_size);
1000 size = size_binop (MINUS_EXPR, size, size_one_node);
1003 ub = size_binop (PLUS_EXPR, lb, size);
1004 stmts->avail -= targetm.chkp_initialize_bounds (bnd_var, lb, ub,
1005 &stmts->stmts);
1006 if (stmts->avail <= 0)
1008 cgraph_build_static_cdtor ('B', stmts->stmts,
1009 MAX_RESERVED_INIT_PRIORITY + 2);
1010 stmts->avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
1011 stmts->stmts = NULL;
1015 /* Return entry block to be used for checker initilization code.
1016 Create new block if required. */
1017 static basic_block
1018 chkp_get_entry_block (void)
1020 if (!entry_block)
1021 entry_block = split_block (ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL)->dest;
1023 return entry_block;
1026 /* Return a bounds var to be used for pointer var PTR_VAR. */
1027 static tree
1028 chkp_get_bounds_var (tree ptr_var)
1030 tree bnd_var;
1031 tree *slot;
1033 slot = chkp_bound_vars->get (ptr_var);
1034 if (slot)
1035 bnd_var = *slot;
1036 else
1038 bnd_var = create_tmp_reg (pointer_bounds_type_node,
1039 CHKP_BOUND_TMP_NAME);
1040 chkp_bound_vars->put (ptr_var, bnd_var);
1043 return bnd_var;
1048 /* Register bounds BND for object PTR in global bounds table.
1049 A copy of bounds may be created for abnormal ssa names.
1050 Returns bounds to use for PTR. */
1051 static tree
1052 chkp_maybe_copy_and_register_bounds (tree ptr, tree bnd)
1054 bool abnormal_ptr;
1056 if (!chkp_reg_bounds)
1057 return bnd;
1059 /* Do nothing if bounds are incomplete_bounds
1060 because it means bounds will be recomputed. */
1061 if (bnd == incomplete_bounds)
1062 return bnd;
1064 abnormal_ptr = (TREE_CODE (ptr) == SSA_NAME
1065 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1066 && gimple_code (SSA_NAME_DEF_STMT (ptr)) != GIMPLE_PHI);
1068 /* A single bounds value may be reused multiple times for
1069 different pointer values. It may cause coalescing issues
1070 for abnormal SSA names. To avoid it we create a bounds
1071 copy in case it is computed for abnormal SSA name.
1073 We also cannot reuse such created copies for other pointers */
1074 if (abnormal_ptr
1075 || bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
1077 tree bnd_var = NULL_TREE;
1079 if (abnormal_ptr)
1081 if (SSA_NAME_VAR (ptr))
1082 bnd_var = chkp_get_bounds_var (SSA_NAME_VAR (ptr));
1084 else
1085 bnd_var = chkp_get_tmp_var ();
1087 /* For abnormal copies we may just find original
1088 bounds and use them. */
1089 if (!abnormal_ptr && !SSA_NAME_IS_DEFAULT_DEF (bnd))
1091 gimple bnd_def = SSA_NAME_DEF_STMT (bnd);
1092 gcc_checking_assert (gimple_code (bnd_def) == GIMPLE_ASSIGN);
1093 bnd = gimple_assign_rhs1 (bnd_def);
1095 /* For undefined values we usually use none bounds
1096 value but in case of abnormal edge it may cause
1097 coalescing failures. Use default definition of
1098 bounds variable instead to avoid it. */
1099 else if (SSA_NAME_IS_DEFAULT_DEF (ptr)
1100 && TREE_CODE (SSA_NAME_VAR (ptr)) != PARM_DECL)
1102 bnd = get_or_create_ssa_default_def (cfun, bnd_var);
1104 if (dump_file && (dump_flags & TDF_DETAILS))
1106 fprintf (dump_file, "Using default def bounds ");
1107 print_generic_expr (dump_file, bnd, 0);
1108 fprintf (dump_file, " for abnormal default def SSA name ");
1109 print_generic_expr (dump_file, ptr, 0);
1110 fprintf (dump_file, "\n");
1113 else
1115 tree copy;
1116 gimple def = SSA_NAME_DEF_STMT (ptr);
1117 gimple assign;
1118 gimple_stmt_iterator gsi;
1120 if (bnd_var)
1121 copy = make_ssa_name (bnd_var, gimple_build_nop ());
1122 else
1123 copy = make_temp_ssa_name (pointer_bounds_type_node,
1124 gimple_build_nop (),
1125 CHKP_BOUND_TMP_NAME);
1126 assign = gimple_build_assign (copy, bnd);
1128 if (dump_file && (dump_flags & TDF_DETAILS))
1130 fprintf (dump_file, "Creating a copy of bounds ");
1131 print_generic_expr (dump_file, bnd, 0);
1132 fprintf (dump_file, " for abnormal SSA name ");
1133 print_generic_expr (dump_file, ptr, 0);
1134 fprintf (dump_file, "\n");
1137 if (gimple_code (def) == GIMPLE_NOP)
1139 gsi = gsi_last_bb (chkp_get_entry_block ());
1140 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
1141 gsi_insert_before (&gsi, assign, GSI_CONTINUE_LINKING);
1142 else
1143 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1145 else
1147 gimple bnd_def = SSA_NAME_DEF_STMT (bnd);
1148 /* Sometimes (e.g. when we load a pointer from a
1149 memory) bounds are produced later than a pointer.
1150 We need to insert bounds copy appropriately. */
1151 if (gimple_code (bnd_def) != GIMPLE_NOP
1152 && stmt_dominates_stmt_p (def, bnd_def))
1153 gsi = gsi_for_stmt (bnd_def);
1154 else
1155 gsi = gsi_for_stmt (def);
1156 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1159 bnd = copy;
1162 if (abnormal_ptr)
1163 bitmap_set_bit (chkp_abnormal_copies, SSA_NAME_VERSION (bnd));
1166 chkp_reg_bounds->put (ptr, bnd);
1168 if (dump_file && (dump_flags & TDF_DETAILS))
1170 fprintf (dump_file, "Regsitered bound ");
1171 print_generic_expr (dump_file, bnd, 0);
1172 fprintf (dump_file, " for pointer ");
1173 print_generic_expr (dump_file, ptr, 0);
1174 fprintf (dump_file, "\n");
1177 return bnd;
1180 /* Get bounds registered for object PTR in global bounds table. */
1181 static tree
1182 chkp_get_registered_bounds (tree ptr)
1184 tree *slot;
1186 if (!chkp_reg_bounds)
1187 return NULL_TREE;
1189 slot = chkp_reg_bounds->get (ptr);
1190 return slot ? *slot : NULL_TREE;
1193 /* Add bound retvals to return statement pointed by GSI. */
1195 static void
1196 chkp_add_bounds_to_ret_stmt (gimple_stmt_iterator *gsi)
1198 greturn *ret = as_a <greturn *> (gsi_stmt (*gsi));
1199 tree retval = gimple_return_retval (ret);
1200 tree ret_decl = DECL_RESULT (cfun->decl);
1201 tree bounds;
1203 if (!retval)
1204 return;
1206 if (BOUNDED_P (ret_decl))
1208 bounds = chkp_find_bounds (retval, gsi);
1209 bounds = chkp_maybe_copy_and_register_bounds (ret_decl, bounds);
1210 gimple_return_set_retbnd (ret, bounds);
1213 update_stmt (ret);
1216 /* Force OP to be suitable for using as an argument for call.
1217 New statements (if any) go to SEQ. */
1218 static tree
1219 chkp_force_gimple_call_op (tree op, gimple_seq *seq)
1221 gimple_seq stmts;
1222 gimple_stmt_iterator si;
1224 op = force_gimple_operand (unshare_expr (op), &stmts, true, NULL_TREE);
1226 for (si = gsi_start (stmts); !gsi_end_p (si); gsi_next (&si))
1227 chkp_mark_stmt (gsi_stmt (si));
1229 gimple_seq_add_seq (seq, stmts);
1231 return op;
1234 /* Generate lower bound check for memory access by ADDR.
1235 Check is inserted before the position pointed by ITER.
1236 DIRFLAG indicates whether memory access is load or store. */
1237 static void
1238 chkp_check_lower (tree addr, tree bounds,
1239 gimple_stmt_iterator iter,
1240 location_t location,
1241 tree dirflag)
1243 gimple_seq seq;
1244 gimple check;
1245 tree node;
1247 if (bounds == chkp_get_zero_bounds ())
1248 return;
1250 if (dirflag == integer_zero_node
1251 && !flag_chkp_check_read)
1252 return;
1254 if (dirflag == integer_one_node
1255 && !flag_chkp_check_write)
1256 return;
1258 seq = NULL;
1260 node = chkp_force_gimple_call_op (addr, &seq);
1262 check = gimple_build_call (chkp_checkl_fndecl, 2, node, bounds);
1263 chkp_mark_stmt (check);
1264 gimple_call_set_with_bounds (check, true);
1265 gimple_set_location (check, location);
1266 gimple_seq_add_stmt (&seq, check);
1268 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1270 if (dump_file && (dump_flags & TDF_DETAILS))
1272 gimple before = gsi_stmt (iter);
1273 fprintf (dump_file, "Generated lower bound check for statement ");
1274 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1275 fprintf (dump_file, " ");
1276 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1280 /* Generate upper bound check for memory access by ADDR.
1281 Check is inserted before the position pointed by ITER.
1282 DIRFLAG indicates whether memory access is load or store. */
1283 static void
1284 chkp_check_upper (tree addr, tree bounds,
1285 gimple_stmt_iterator iter,
1286 location_t location,
1287 tree dirflag)
1289 gimple_seq seq;
1290 gimple check;
1291 tree node;
1293 if (bounds == chkp_get_zero_bounds ())
1294 return;
1296 if (dirflag == integer_zero_node
1297 && !flag_chkp_check_read)
1298 return;
1300 if (dirflag == integer_one_node
1301 && !flag_chkp_check_write)
1302 return;
1304 seq = NULL;
1306 node = chkp_force_gimple_call_op (addr, &seq);
1308 check = gimple_build_call (chkp_checku_fndecl, 2, node, bounds);
1309 chkp_mark_stmt (check);
1310 gimple_call_set_with_bounds (check, true);
1311 gimple_set_location (check, location);
1312 gimple_seq_add_stmt (&seq, check);
1314 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1316 if (dump_file && (dump_flags & TDF_DETAILS))
1318 gimple before = gsi_stmt (iter);
1319 fprintf (dump_file, "Generated upper bound check for statement ");
1320 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1321 fprintf (dump_file, " ");
1322 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1326 /* Generate lower and upper bound checks for memory access
1327 to memory slot [FIRST, LAST] againsr BOUNDS. Checks
1328 are inserted before the position pointed by ITER.
1329 DIRFLAG indicates whether memory access is load or store. */
1330 void
1331 chkp_check_mem_access (tree first, tree last, tree bounds,
1332 gimple_stmt_iterator iter,
1333 location_t location,
1334 tree dirflag)
1336 chkp_check_lower (first, bounds, iter, location, dirflag);
1337 chkp_check_upper (last, bounds, iter, location, dirflag);
1340 /* Replace call to _bnd_chk_* pointed by GSI with
1341 bndcu and bndcl calls. DIRFLAG determines whether
1342 check is for read or write. */
1344 void
1345 chkp_replace_address_check_builtin (gimple_stmt_iterator *gsi,
1346 tree dirflag)
1348 gimple_stmt_iterator call_iter = *gsi;
1349 gimple call = gsi_stmt (*gsi);
1350 tree fndecl = gimple_call_fndecl (call);
1351 tree addr = gimple_call_arg (call, 0);
1352 tree bounds = chkp_find_bounds (addr, gsi);
1354 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1355 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1356 chkp_check_lower (addr, bounds, *gsi, gimple_location (call), dirflag);
1358 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS)
1359 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1361 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1363 tree size = gimple_call_arg (call, 1);
1364 addr = fold_build_pointer_plus (addr, size);
1365 addr = fold_build_pointer_plus_hwi (addr, -1);
1366 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1369 gsi_remove (&call_iter, true);
1372 /* Replace call to _bnd_get_ptr_* pointed by GSI with
1373 corresponding bounds extract call. */
1375 void
1376 chkp_replace_extract_builtin (gimple_stmt_iterator *gsi)
1378 gimple call = gsi_stmt (*gsi);
1379 tree fndecl = gimple_call_fndecl (call);
1380 tree addr = gimple_call_arg (call, 0);
1381 tree bounds = chkp_find_bounds (addr, gsi);
1382 gimple extract;
1384 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND)
1385 fndecl = chkp_extract_lower_fndecl;
1386 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND)
1387 fndecl = chkp_extract_upper_fndecl;
1388 else
1389 gcc_unreachable ();
1391 extract = gimple_build_call (fndecl, 1, bounds);
1392 gimple_call_set_lhs (extract, gimple_call_lhs (call));
1393 chkp_mark_stmt (extract);
1395 gsi_replace (gsi, extract, false);
1398 /* Return COMPONENT_REF accessing FIELD in OBJ. */
1399 static tree
1400 chkp_build_component_ref (tree obj, tree field)
1402 tree res;
1404 /* If object is TMR then we do not use component_ref but
1405 add offset instead. We need it to be able to get addr
1406 of the reasult later. */
1407 if (TREE_CODE (obj) == TARGET_MEM_REF)
1409 tree offs = TMR_OFFSET (obj);
1410 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1411 offs, DECL_FIELD_OFFSET (field));
1413 gcc_assert (offs);
1415 res = copy_node (obj);
1416 TREE_TYPE (res) = TREE_TYPE (field);
1417 TMR_OFFSET (res) = offs;
1419 else
1420 res = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL_TREE);
1422 return res;
1425 /* Return ARRAY_REF for array ARR and index IDX with
1426 specified element type ETYPE and element size ESIZE. */
1427 static tree
1428 chkp_build_array_ref (tree arr, tree etype, tree esize,
1429 unsigned HOST_WIDE_INT idx)
1431 tree index = build_int_cst (size_type_node, idx);
1432 tree res;
1434 /* If object is TMR then we do not use array_ref but
1435 add offset instead. We need it to be able to get addr
1436 of the reasult later. */
1437 if (TREE_CODE (arr) == TARGET_MEM_REF)
1439 tree offs = TMR_OFFSET (arr);
1441 esize = fold_binary_to_constant (MULT_EXPR, TREE_TYPE (esize),
1442 esize, index);
1443 gcc_assert(esize);
1445 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1446 offs, esize);
1447 gcc_assert (offs);
1449 res = copy_node (arr);
1450 TREE_TYPE (res) = etype;
1451 TMR_OFFSET (res) = offs;
1453 else
1454 res = build4 (ARRAY_REF, etype, arr, index, NULL_TREE, NULL_TREE);
1456 return res;
1459 /* Helper function for chkp_add_bounds_to_call_stmt.
1460 Fill ALL_BOUNDS output array with created bounds.
1462 OFFS is used for recursive calls and holds basic
1463 offset of TYPE in outer structure in bits.
1465 ITER points a position where bounds are searched.
1467 ALL_BOUNDS[i] is filled with elem bounds if there
1468 is a field in TYPE which has pointer type and offset
1469 equal to i * POINTER_SIZE in bits. */
1470 static void
1471 chkp_find_bounds_for_elem (tree elem, tree *all_bounds,
1472 HOST_WIDE_INT offs,
1473 gimple_stmt_iterator *iter)
1475 tree type = TREE_TYPE (elem);
1477 if (BOUNDED_TYPE_P (type))
1479 if (!all_bounds[offs / POINTER_SIZE])
1481 tree temp = make_temp_ssa_name (type, gimple_build_nop (), "");
1482 gimple assign = gimple_build_assign (temp, elem);
1483 gimple_stmt_iterator gsi;
1485 gsi_insert_before (iter, assign, GSI_SAME_STMT);
1486 gsi = gsi_for_stmt (assign);
1488 all_bounds[offs / POINTER_SIZE] = chkp_find_bounds (temp, &gsi);
1491 else if (RECORD_OR_UNION_TYPE_P (type))
1493 tree field;
1495 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1496 if (TREE_CODE (field) == FIELD_DECL)
1498 tree base = unshare_expr (elem);
1499 tree field_ref = chkp_build_component_ref (base, field);
1500 HOST_WIDE_INT field_offs
1501 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1502 if (DECL_FIELD_OFFSET (field))
1503 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1505 chkp_find_bounds_for_elem (field_ref, all_bounds,
1506 offs + field_offs, iter);
1509 else if (TREE_CODE (type) == ARRAY_TYPE)
1511 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1512 tree etype = TREE_TYPE (type);
1513 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1514 unsigned HOST_WIDE_INT cur;
1516 if (!maxval || integer_minus_onep (maxval))
1517 return;
1519 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1521 tree base = unshare_expr (elem);
1522 tree arr_elem = chkp_build_array_ref (base, etype,
1523 TYPE_SIZE (etype),
1524 cur);
1525 chkp_find_bounds_for_elem (arr_elem, all_bounds, offs + cur * esize,
1526 iter);
1531 /* Fill HAVE_BOUND output bitmap with information about
1532 bounds requred for object of type TYPE.
1534 OFFS is used for recursive calls and holds basic
1535 offset of TYPE in outer structure in bits.
1537 HAVE_BOUND[i] is set to 1 if there is a field
1538 in TYPE which has pointer type and offset
1539 equal to i * POINTER_SIZE - OFFS in bits. */
1540 void
1541 chkp_find_bound_slots_1 (const_tree type, bitmap have_bound,
1542 HOST_WIDE_INT offs)
1544 if (BOUNDED_TYPE_P (type))
1545 bitmap_set_bit (have_bound, offs / POINTER_SIZE);
1546 else if (RECORD_OR_UNION_TYPE_P (type))
1548 tree field;
1550 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1551 if (TREE_CODE (field) == FIELD_DECL)
1553 HOST_WIDE_INT field_offs
1554 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1555 if (DECL_FIELD_OFFSET (field))
1556 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1557 chkp_find_bound_slots_1 (TREE_TYPE (field), have_bound,
1558 offs + field_offs);
1561 else if (TREE_CODE (type) == ARRAY_TYPE)
1563 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1564 tree etype = TREE_TYPE (type);
1565 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1566 unsigned HOST_WIDE_INT cur;
1568 if (!maxval || integer_minus_onep (maxval))
1569 return;
1571 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1572 chkp_find_bound_slots_1 (etype, have_bound, offs + cur * esize);
1576 /* Fill bitmap RES with information about bounds for
1577 type TYPE. See chkp_find_bound_slots_1 for more
1578 details. */
1579 void
1580 chkp_find_bound_slots (const_tree type, bitmap res)
1582 bitmap_clear (res);
1583 chkp_find_bound_slots_1 (type, res, 0);
1586 /* Add bound arguments to call statement pointed by GSI.
1587 Also performs a replacement of user checker builtins calls
1588 with internal ones. */
1590 static void
1591 chkp_add_bounds_to_call_stmt (gimple_stmt_iterator *gsi)
1593 gcall *call = as_a <gcall *> (gsi_stmt (*gsi));
1594 unsigned arg_no = 0;
1595 tree fndecl = gimple_call_fndecl (call);
1596 tree fntype;
1597 tree first_formal_arg;
1598 tree arg;
1599 bool use_fntype = false;
1600 tree op;
1601 ssa_op_iter iter;
1602 gcall *new_call;
1604 /* Do nothing for internal functions. */
1605 if (gimple_call_internal_p (call))
1606 return;
1608 fntype = TREE_TYPE (TREE_TYPE (gimple_call_fn (call)));
1610 /* Do nothing if back-end builtin is called. */
1611 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
1612 return;
1614 /* Do nothing for some middle-end builtins. */
1615 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1616 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_OBJECT_SIZE)
1617 return;
1619 /* Donothing for calls to legacy functions. */
1620 if (fndecl
1621 && lookup_attribute ("bnd_legacy", DECL_ATTRIBUTES (fndecl)))
1622 return;
1624 /* Ignore CHKP_INIT_PTR_BOUNDS, CHKP_NULL_PTR_BOUNDS
1625 and CHKP_COPY_PTR_BOUNDS. */
1626 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1627 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS
1628 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS
1629 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS
1630 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS))
1631 return;
1633 /* Check user builtins are replaced with checks. */
1634 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1635 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1636 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
1637 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS))
1639 chkp_replace_address_check_builtin (gsi, integer_minus_one_node);
1640 return;
1643 /* Check user builtins are replaced with bound extract. */
1644 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1645 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND
1646 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND))
1648 chkp_replace_extract_builtin (gsi);
1649 return;
1652 /* BUILT_IN_CHKP_NARROW_PTR_BOUNDS call is replaced with
1653 target narrow bounds call. */
1654 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1655 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
1657 tree arg = gimple_call_arg (call, 1);
1658 tree bounds = chkp_find_bounds (arg, gsi);
1660 gimple_call_set_fndecl (call, chkp_narrow_bounds_fndecl);
1661 gimple_call_set_arg (call, 1, bounds);
1662 update_stmt (call);
1664 return;
1667 /* BUILT_IN_CHKP_STORE_PTR_BOUNDS call is replaced with
1668 bndstx call. */
1669 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1670 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_STORE_PTR_BOUNDS)
1672 tree addr = gimple_call_arg (call, 0);
1673 tree ptr = gimple_call_arg (call, 1);
1674 tree bounds = chkp_find_bounds (ptr, gsi);
1675 gimple_stmt_iterator iter = gsi_for_stmt (call);
1677 chkp_build_bndstx (addr, ptr, bounds, gsi);
1678 gsi_remove (&iter, true);
1680 return;
1683 if (!flag_chkp_instrument_calls)
1684 return;
1686 /* Avoid instrumented builtin functions for now. Due to IPA
1687 it also means we have to avoid instrumentation of indirect
1688 calls. */
1689 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) != NOT_BUILT_IN)
1690 return;
1692 /* If function decl is available then use it for
1693 formal arguments list. Otherwise use function type. */
1694 if (fndecl && DECL_ARGUMENTS (fndecl))
1695 first_formal_arg = DECL_ARGUMENTS (fndecl);
1696 else
1698 first_formal_arg = TYPE_ARG_TYPES (fntype);
1699 use_fntype = true;
1702 /* Fill vector of new call args. */
1703 vec<tree> new_args = vNULL;
1704 new_args.create (gimple_call_num_args (call));
1705 arg = first_formal_arg;
1706 for (arg_no = 0; arg_no < gimple_call_num_args (call); arg_no++)
1708 tree call_arg = gimple_call_arg (call, arg_no);
1709 tree type;
1711 /* Get arg type using formal argument description
1712 or actual argument type. */
1713 if (arg)
1714 if (use_fntype)
1715 if (TREE_VALUE (arg) != void_type_node)
1717 type = TREE_VALUE (arg);
1718 arg = TREE_CHAIN (arg);
1720 else
1721 type = TREE_TYPE (call_arg);
1722 else
1724 type = TREE_TYPE (arg);
1725 arg = TREE_CHAIN (arg);
1727 else
1728 type = TREE_TYPE (call_arg);
1730 new_args.safe_push (call_arg);
1732 if (BOUNDED_TYPE_P (type)
1733 || pass_by_reference (NULL, TYPE_MODE (type), type, true))
1734 new_args.safe_push (chkp_find_bounds (call_arg, gsi));
1735 else if (chkp_type_has_pointer (type))
1737 HOST_WIDE_INT max_bounds
1738 = TREE_INT_CST_LOW (TYPE_SIZE (type)) / POINTER_SIZE;
1739 tree *all_bounds = (tree *)xmalloc (sizeof (tree) * max_bounds);
1740 HOST_WIDE_INT bnd_no;
1742 memset (all_bounds, 0, sizeof (tree) * max_bounds);
1744 chkp_find_bounds_for_elem (call_arg, all_bounds, 0, gsi);
1746 for (bnd_no = 0; bnd_no < max_bounds; bnd_no++)
1747 if (all_bounds[bnd_no])
1748 new_args.safe_push (all_bounds[bnd_no]);
1750 free (all_bounds);
1754 if (new_args.length () == gimple_call_num_args (call))
1755 new_call = call;
1756 else
1758 new_call = gimple_build_call_vec (gimple_op (call, 1), new_args);
1759 gimple_call_set_lhs (new_call, gimple_call_lhs (call));
1760 gimple_call_copy_flags (new_call, call);
1762 new_args.release ();
1764 /* If we call built-in function and pass no bounds then
1765 we do not need to change anything. */
1766 if (new_call == call
1767 && fndecl
1768 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1769 && fndecl == builtin_decl_explicit (DECL_FUNCTION_CODE (fndecl)))
1770 return;
1772 /* For direct calls fndecl is replaced with instrumented version. */
1773 if (fndecl)
1775 tree new_decl = chkp_maybe_create_clone (fndecl)->decl;
1776 gimple_call_set_fndecl (new_call, new_decl);
1777 gimple_call_set_fntype (new_call, TREE_TYPE (new_decl));
1779 /* For indirect call we should fix function pointer type if
1780 pass some bounds. */
1781 else if (new_call != call)
1783 tree type = gimple_call_fntype (call);
1784 type = chkp_copy_function_type_adding_bounds (type);
1785 gimple_call_set_fntype (new_call, type);
1788 /* replace old call statement with the new one. */
1789 if (call != new_call)
1791 FOR_EACH_SSA_TREE_OPERAND (op, call, iter, SSA_OP_ALL_DEFS)
1793 SSA_NAME_DEF_STMT (op) = new_call;
1795 gsi_replace (gsi, new_call, true);
1797 else
1798 update_stmt (new_call);
1800 gimple_call_set_with_bounds (new_call, true);
1803 /* Return constant static bounds var with specified LB and UB
1804 if such var exists in varpool. Return NULL otherwise. */
1805 static tree
1806 chkp_find_const_bounds_var (HOST_WIDE_INT lb,
1807 HOST_WIDE_INT ub)
1809 tree val = targetm.chkp_make_bounds_constant (lb, ub);
1810 struct varpool_node *node;
1812 /* We expect bounds constant is represented as a complex value
1813 of two pointer sized integers. */
1814 gcc_assert (TREE_CODE (val) == COMPLEX_CST);
1816 FOR_EACH_VARIABLE (node)
1817 if (POINTER_BOUNDS_P (node->decl)
1818 && TREE_READONLY (node->decl)
1819 && DECL_INITIAL (node->decl)
1820 && TREE_CODE (DECL_INITIAL (node->decl)) == COMPLEX_CST
1821 && tree_int_cst_equal (TREE_REALPART (DECL_INITIAL (node->decl)),
1822 TREE_REALPART (val))
1823 && tree_int_cst_equal (TREE_IMAGPART (DECL_INITIAL (node->decl)),
1824 TREE_IMAGPART (val)))
1825 return node->decl;
1827 return NULL;
1830 /* Return constant static bounds var with specified bounds LB and UB.
1831 If such var does not exists then new var is created with specified NAME. */
1832 static tree
1833 chkp_make_static_const_bounds (HOST_WIDE_INT lb,
1834 HOST_WIDE_INT ub,
1835 const char *name)
1837 tree var;
1839 /* With LTO we may have constant bounds already in varpool.
1840 Try to find it. */
1841 var = chkp_find_const_bounds_var (lb, ub);
1843 if (var)
1844 return var;
1846 var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
1847 get_identifier (name), pointer_bounds_type_node);
1849 TREE_PUBLIC (var) = 1;
1850 TREE_USED (var) = 1;
1851 TREE_READONLY (var) = 1;
1852 TREE_STATIC (var) = 1;
1853 TREE_ADDRESSABLE (var) = 0;
1854 DECL_ARTIFICIAL (var) = 1;
1855 DECL_READ_P (var) = 1;
1856 /* We may use this symbol during ctors generation in chkp_finish_file
1857 when all symbols are emitted. Force output to avoid undefined
1858 symbols in ctors. */
1859 if (!in_lto_p)
1861 DECL_INITIAL (var) = targetm.chkp_make_bounds_constant (lb, ub);
1862 DECL_COMDAT (var) = 1;
1863 varpool_node::get_create (var)->set_comdat_group (DECL_ASSEMBLER_NAME (var));
1864 varpool_node::get_create (var)->force_output = 1;
1866 else
1867 DECL_EXTERNAL (var) = 1;
1868 varpool_node::finalize_decl (var);
1870 return var;
1873 /* Generate code to make bounds with specified lower bound LB and SIZE.
1874 if AFTER is 1 then code is inserted after position pointed by ITER
1875 otherwise code is inserted before position pointed by ITER.
1876 If ITER is NULL then code is added to entry block. */
1877 static tree
1878 chkp_make_bounds (tree lb, tree size, gimple_stmt_iterator *iter, bool after)
1880 gimple_seq seq;
1881 gimple_stmt_iterator gsi;
1882 gimple stmt;
1883 tree bounds;
1885 if (iter)
1886 gsi = *iter;
1887 else
1888 gsi = gsi_start_bb (chkp_get_entry_block ());
1890 seq = NULL;
1892 lb = chkp_force_gimple_call_op (lb, &seq);
1893 size = chkp_force_gimple_call_op (size, &seq);
1895 stmt = gimple_build_call (chkp_bndmk_fndecl, 2, lb, size);
1896 chkp_mark_stmt (stmt);
1898 bounds = chkp_get_tmp_reg (stmt);
1899 gimple_call_set_lhs (stmt, bounds);
1901 gimple_seq_add_stmt (&seq, stmt);
1903 if (iter && after)
1904 gsi_insert_seq_after (&gsi, seq, GSI_SAME_STMT);
1905 else
1906 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
1908 if (dump_file && (dump_flags & TDF_DETAILS))
1910 fprintf (dump_file, "Made bounds: ");
1911 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
1912 if (iter)
1914 fprintf (dump_file, " inserted before statement: ");
1915 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0, TDF_VOPS|TDF_MEMSYMS);
1917 else
1918 fprintf (dump_file, " at function entry\n");
1921 /* update_stmt (stmt); */
1923 return bounds;
1926 /* Return var holding zero bounds. */
1927 tree
1928 chkp_get_zero_bounds_var (void)
1930 if (!chkp_zero_bounds_var)
1932 tree id = get_identifier (CHKP_ZERO_BOUNDS_VAR_NAME);
1933 symtab_node *node = symtab_node::get_for_asmname (id);
1934 if (node)
1935 chkp_zero_bounds_var = node->decl;
1938 if (!chkp_zero_bounds_var)
1939 chkp_zero_bounds_var
1940 = chkp_make_static_const_bounds (0, -1,
1941 CHKP_ZERO_BOUNDS_VAR_NAME);
1942 return chkp_zero_bounds_var;
1945 /* Return var holding none bounds. */
1946 tree
1947 chkp_get_none_bounds_var (void)
1949 if (!chkp_none_bounds_var)
1951 tree id = get_identifier (CHKP_NONE_BOUNDS_VAR_NAME);
1952 symtab_node *node = symtab_node::get_for_asmname (id);
1953 if (node)
1954 chkp_none_bounds_var = node->decl;
1957 if (!chkp_none_bounds_var)
1958 chkp_none_bounds_var
1959 = chkp_make_static_const_bounds (-1, 0,
1960 CHKP_NONE_BOUNDS_VAR_NAME);
1961 return chkp_none_bounds_var;
1964 /* Return SSA_NAME used to represent zero bounds. */
1965 static tree
1966 chkp_get_zero_bounds (void)
1968 if (zero_bounds)
1969 return zero_bounds;
1971 if (dump_file && (dump_flags & TDF_DETAILS))
1972 fprintf (dump_file, "Creating zero bounds...");
1974 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
1975 || flag_chkp_use_static_const_bounds > 0)
1977 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
1978 gimple stmt;
1980 zero_bounds = chkp_get_tmp_reg (gimple_build_nop ());
1981 stmt = gimple_build_assign (zero_bounds, chkp_get_zero_bounds_var ());
1982 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
1984 else
1985 zero_bounds = chkp_make_bounds (integer_zero_node,
1986 integer_zero_node,
1987 NULL,
1988 false);
1990 return zero_bounds;
1993 /* Return SSA_NAME used to represent none bounds. */
1994 static tree
1995 chkp_get_none_bounds (void)
1997 if (none_bounds)
1998 return none_bounds;
2000 if (dump_file && (dump_flags & TDF_DETAILS))
2001 fprintf (dump_file, "Creating none bounds...");
2004 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2005 || flag_chkp_use_static_const_bounds > 0)
2007 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2008 gimple stmt;
2010 none_bounds = chkp_get_tmp_reg (gimple_build_nop ());
2011 stmt = gimple_build_assign (none_bounds, chkp_get_none_bounds_var ());
2012 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2014 else
2015 none_bounds = chkp_make_bounds (integer_minus_one_node,
2016 build_int_cst (size_type_node, 2),
2017 NULL,
2018 false);
2020 return none_bounds;
2023 /* Return bounds to be used as a result of operation which
2024 should not create poiunter (e.g. MULT_EXPR). */
2025 static tree
2026 chkp_get_invalid_op_bounds (void)
2028 return chkp_get_zero_bounds ();
2031 /* Return bounds to be used for loads of non-pointer values. */
2032 static tree
2033 chkp_get_nonpointer_load_bounds (void)
2035 return chkp_get_zero_bounds ();
2038 /* Build bounds returned by CALL. */
2039 static tree
2040 chkp_build_returned_bound (gcall *call)
2042 gimple_stmt_iterator gsi;
2043 tree bounds;
2044 gimple stmt;
2045 tree fndecl = gimple_call_fndecl (call);
2047 /* To avoid fixing alloca expands in targets we handle
2048 it separately. */
2049 if (fndecl
2050 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2051 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
2052 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2054 tree size = gimple_call_arg (call, 0);
2055 tree lb = gimple_call_lhs (call);
2056 gimple_stmt_iterator iter = gsi_for_stmt (call);
2057 bounds = chkp_make_bounds (lb, size, &iter, true);
2059 /* We know bounds returned by set_bounds builtin call. */
2060 else if (fndecl
2061 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2062 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS)
2064 tree lb = gimple_call_arg (call, 0);
2065 tree size = gimple_call_arg (call, 1);
2066 gimple_stmt_iterator iter = gsi_for_stmt (call);
2067 bounds = chkp_make_bounds (lb, size, &iter, true);
2069 /* Detect bounds initialization calls. */
2070 else if (fndecl
2071 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2072 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS)
2073 bounds = chkp_get_zero_bounds ();
2074 /* Detect bounds nullification calls. */
2075 else if (fndecl
2076 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2077 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS)
2078 bounds = chkp_get_none_bounds ();
2079 /* Detect bounds copy calls. */
2080 else if (fndecl
2081 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2082 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
2084 gimple_stmt_iterator iter = gsi_for_stmt (call);
2085 bounds = chkp_find_bounds (gimple_call_arg (call, 1), &iter);
2087 /* Do not use retbnd when returned bounds are equal to some
2088 of passed bounds. */
2089 else if ((gimple_call_return_flags (call) & ERF_RETURNS_ARG)
2090 || gimple_call_builtin_p (call, BUILT_IN_STRCHR))
2092 gimple_stmt_iterator iter = gsi_for_stmt (call);
2093 unsigned int retarg = 0, argno;
2094 if (gimple_call_return_flags (call) & ERF_RETURNS_ARG)
2095 retarg = gimple_call_return_flags (call) & ERF_RETURN_ARG_MASK;
2096 if (gimple_call_with_bounds_p (call))
2098 for (argno = 0; argno < gimple_call_num_args (call); argno++)
2099 if (!POINTER_BOUNDS_P (gimple_call_arg (call, argno)))
2101 if (retarg)
2102 retarg--;
2103 else
2104 break;
2107 else
2108 argno = retarg;
2110 bounds = chkp_find_bounds (gimple_call_arg (call, argno), &iter);
2112 else
2114 gcc_assert (TREE_CODE (gimple_call_lhs (call)) == SSA_NAME);
2116 /* In general case build checker builtin call to
2117 obtain returned bounds. */
2118 stmt = gimple_build_call (chkp_ret_bnd_fndecl, 1,
2119 gimple_call_lhs (call));
2120 chkp_mark_stmt (stmt);
2122 gsi = gsi_for_stmt (call);
2123 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
2125 bounds = chkp_get_tmp_reg (stmt);
2126 gimple_call_set_lhs (stmt, bounds);
2128 update_stmt (stmt);
2131 if (dump_file && (dump_flags & TDF_DETAILS))
2133 fprintf (dump_file, "Built returned bounds (");
2134 print_generic_expr (dump_file, bounds, 0);
2135 fprintf (dump_file, ") for call: ");
2136 print_gimple_stmt (dump_file, call, 0, TDF_VOPS|TDF_MEMSYMS);
2139 bounds = chkp_maybe_copy_and_register_bounds (gimple_call_lhs (call), bounds);
2141 return bounds;
2144 /* Return bounds used as returned by call
2145 which produced SSA name VAL. */
2146 gcall *
2147 chkp_retbnd_call_by_val (tree val)
2149 if (TREE_CODE (val) != SSA_NAME)
2150 return NULL;
2152 gcc_assert (gimple_code (SSA_NAME_DEF_STMT (val)) == GIMPLE_CALL);
2154 imm_use_iterator use_iter;
2155 use_operand_p use_p;
2156 FOR_EACH_IMM_USE_FAST (use_p, use_iter, val)
2157 if (gimple_code (USE_STMT (use_p)) == GIMPLE_CALL
2158 && gimple_call_fndecl (USE_STMT (use_p)) == chkp_ret_bnd_fndecl)
2159 return as_a <gcall *> (USE_STMT (use_p));
2161 return NULL;
2164 /* Check the next parameter for the given PARM is bounds
2165 and return it's default SSA_NAME (create if required). */
2166 static tree
2167 chkp_get_next_bounds_parm (tree parm)
2169 tree bounds = TREE_CHAIN (parm);
2170 gcc_assert (POINTER_BOUNDS_P (bounds));
2171 bounds = ssa_default_def (cfun, bounds);
2172 if (!bounds)
2174 bounds = make_ssa_name (TREE_CHAIN (parm), gimple_build_nop ());
2175 set_ssa_default_def (cfun, TREE_CHAIN (parm), bounds);
2177 return bounds;
2180 /* Return bounds to be used for input argument PARM. */
2181 static tree
2182 chkp_get_bound_for_parm (tree parm)
2184 tree decl = SSA_NAME_VAR (parm);
2185 tree bounds;
2187 gcc_assert (TREE_CODE (decl) == PARM_DECL);
2189 bounds = chkp_get_registered_bounds (parm);
2191 if (!bounds)
2192 bounds = chkp_get_registered_bounds (decl);
2194 if (!bounds)
2196 tree orig_decl = cgraph_node::get (cfun->decl)->orig_decl;
2198 /* For static chain param we return zero bounds
2199 because currently we do not check dereferences
2200 of this pointer. */
2201 if (cfun->static_chain_decl == decl)
2202 bounds = chkp_get_zero_bounds ();
2203 /* If non instrumented runtime is used then it may be useful
2204 to use zero bounds for input arguments of main
2205 function. */
2206 else if (flag_chkp_zero_input_bounds_for_main
2207 && strcmp (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (orig_decl)),
2208 "main") == 0)
2209 bounds = chkp_get_zero_bounds ();
2210 else if (BOUNDED_P (parm))
2212 bounds = chkp_get_next_bounds_parm (decl);
2213 bounds = chkp_maybe_copy_and_register_bounds (decl, bounds);
2215 if (dump_file && (dump_flags & TDF_DETAILS))
2217 fprintf (dump_file, "Built arg bounds (");
2218 print_generic_expr (dump_file, bounds, 0);
2219 fprintf (dump_file, ") for arg: ");
2220 print_node (dump_file, "", decl, 0);
2223 else
2224 bounds = chkp_get_zero_bounds ();
2227 if (!chkp_get_registered_bounds (parm))
2228 bounds = chkp_maybe_copy_and_register_bounds (parm, bounds);
2230 if (dump_file && (dump_flags & TDF_DETAILS))
2232 fprintf (dump_file, "Using bounds ");
2233 print_generic_expr (dump_file, bounds, 0);
2234 fprintf (dump_file, " for parm ");
2235 print_generic_expr (dump_file, parm, 0);
2236 fprintf (dump_file, " of type ");
2237 print_generic_expr (dump_file, TREE_TYPE (parm), 0);
2238 fprintf (dump_file, ".\n");
2241 return bounds;
2244 /* Build and return CALL_EXPR for bndstx builtin with specified
2245 arguments. */
2246 tree
2247 chkp_build_bndldx_call (tree addr, tree ptr)
2249 tree fn = build1 (ADDR_EXPR,
2250 build_pointer_type (TREE_TYPE (chkp_bndldx_fndecl)),
2251 chkp_bndldx_fndecl);
2252 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndldx_fndecl)),
2253 fn, 2, addr, ptr);
2254 CALL_WITH_BOUNDS_P (call) = true;
2255 return call;
2258 /* Insert code to load bounds for PTR located by ADDR.
2259 Code is inserted after position pointed by GSI.
2260 Loaded bounds are returned. */
2261 static tree
2262 chkp_build_bndldx (tree addr, tree ptr, gimple_stmt_iterator *gsi)
2264 gimple_seq seq;
2265 gimple stmt;
2266 tree bounds;
2268 seq = NULL;
2270 addr = chkp_force_gimple_call_op (addr, &seq);
2271 ptr = chkp_force_gimple_call_op (ptr, &seq);
2273 stmt = gimple_build_call (chkp_bndldx_fndecl, 2, addr, ptr);
2274 chkp_mark_stmt (stmt);
2275 bounds = chkp_get_tmp_reg (stmt);
2276 gimple_call_set_lhs (stmt, bounds);
2278 gimple_seq_add_stmt (&seq, stmt);
2280 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2282 if (dump_file && (dump_flags & TDF_DETAILS))
2284 fprintf (dump_file, "Generated bndldx for pointer ");
2285 print_generic_expr (dump_file, ptr, 0);
2286 fprintf (dump_file, ": ");
2287 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2290 return bounds;
2293 /* Build and return CALL_EXPR for bndstx builtin with specified
2294 arguments. */
2295 tree
2296 chkp_build_bndstx_call (tree addr, tree ptr, tree bounds)
2298 tree fn = build1 (ADDR_EXPR,
2299 build_pointer_type (TREE_TYPE (chkp_bndstx_fndecl)),
2300 chkp_bndstx_fndecl);
2301 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndstx_fndecl)),
2302 fn, 3, ptr, bounds, addr);
2303 CALL_WITH_BOUNDS_P (call) = true;
2304 return call;
2307 /* Insert code to store BOUNDS for PTR stored by ADDR.
2308 New statements are inserted after position pointed
2309 by GSI. */
2310 void
2311 chkp_build_bndstx (tree addr, tree ptr, tree bounds,
2312 gimple_stmt_iterator *gsi)
2314 gimple_seq seq;
2315 gimple stmt;
2317 seq = NULL;
2319 addr = chkp_force_gimple_call_op (addr, &seq);
2320 ptr = chkp_force_gimple_call_op (ptr, &seq);
2322 stmt = gimple_build_call (chkp_bndstx_fndecl, 3, ptr, bounds, addr);
2323 chkp_mark_stmt (stmt);
2324 gimple_call_set_with_bounds (stmt, true);
2326 gimple_seq_add_stmt (&seq, stmt);
2328 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2330 if (dump_file && (dump_flags & TDF_DETAILS))
2332 fprintf (dump_file, "Generated bndstx for pointer store ");
2333 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_VOPS|TDF_MEMSYMS);
2334 print_gimple_stmt (dump_file, stmt, 2, TDF_VOPS|TDF_MEMSYMS);
2338 /* Compute bounds for pointer NODE which was assigned in
2339 assignment statement ASSIGN. Return computed bounds. */
2340 static tree
2341 chkp_compute_bounds_for_assignment (tree node, gimple assign)
2343 enum tree_code rhs_code = gimple_assign_rhs_code (assign);
2344 tree rhs1 = gimple_assign_rhs1 (assign);
2345 tree bounds = NULL_TREE;
2346 gimple_stmt_iterator iter = gsi_for_stmt (assign);
2348 if (dump_file && (dump_flags & TDF_DETAILS))
2350 fprintf (dump_file, "Computing bounds for assignment: ");
2351 print_gimple_stmt (dump_file, assign, 0, TDF_VOPS|TDF_MEMSYMS);
2354 switch (rhs_code)
2356 case MEM_REF:
2357 case TARGET_MEM_REF:
2358 case COMPONENT_REF:
2359 case ARRAY_REF:
2360 /* We need to load bounds from the bounds table. */
2361 bounds = chkp_find_bounds_loaded (node, rhs1, &iter);
2362 break;
2364 case VAR_DECL:
2365 case SSA_NAME:
2366 case ADDR_EXPR:
2367 case POINTER_PLUS_EXPR:
2368 case NOP_EXPR:
2369 case CONVERT_EXPR:
2370 case INTEGER_CST:
2371 /* Bounds are just propagated from RHS. */
2372 bounds = chkp_find_bounds (rhs1, &iter);
2373 break;
2375 case VIEW_CONVERT_EXPR:
2376 /* Bounds are just propagated from RHS. */
2377 bounds = chkp_find_bounds (TREE_OPERAND (rhs1, 0), &iter);
2378 break;
2380 case PARM_DECL:
2381 if (BOUNDED_P (rhs1))
2383 /* We need to load bounds from the bounds table. */
2384 bounds = chkp_build_bndldx (chkp_build_addr_expr (rhs1),
2385 node, &iter);
2386 TREE_ADDRESSABLE (rhs1) = 1;
2388 else
2389 bounds = chkp_get_nonpointer_load_bounds ();
2390 break;
2392 case MINUS_EXPR:
2393 case PLUS_EXPR:
2394 case BIT_AND_EXPR:
2395 case BIT_IOR_EXPR:
2396 case BIT_XOR_EXPR:
2398 tree rhs2 = gimple_assign_rhs2 (assign);
2399 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2400 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2402 /* First we try to check types of operands. If it
2403 does not help then look at bound values.
2405 If some bounds are incomplete and other are
2406 not proven to be valid (i.e. also incomplete
2407 or invalid because value is not pointer) then
2408 resulting value is incomplete and will be
2409 recomputed later in chkp_finish_incomplete_bounds. */
2410 if (BOUNDED_P (rhs1)
2411 && !BOUNDED_P (rhs2))
2412 bounds = bnd1;
2413 else if (BOUNDED_P (rhs2)
2414 && !BOUNDED_P (rhs1)
2415 && rhs_code != MINUS_EXPR)
2416 bounds = bnd2;
2417 else if (chkp_incomplete_bounds (bnd1))
2418 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR
2419 && !chkp_incomplete_bounds (bnd2))
2420 bounds = bnd2;
2421 else
2422 bounds = incomplete_bounds;
2423 else if (chkp_incomplete_bounds (bnd2))
2424 if (chkp_valid_bounds (bnd1)
2425 && !chkp_incomplete_bounds (bnd1))
2426 bounds = bnd1;
2427 else
2428 bounds = incomplete_bounds;
2429 else if (!chkp_valid_bounds (bnd1))
2430 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR)
2431 bounds = bnd2;
2432 else if (bnd2 == chkp_get_zero_bounds ())
2433 bounds = bnd2;
2434 else
2435 bounds = bnd1;
2436 else if (!chkp_valid_bounds (bnd2))
2437 bounds = bnd1;
2438 else
2439 /* Seems both operands may have valid bounds
2440 (e.g. pointer minus pointer). In such case
2441 use default invalid op bounds. */
2442 bounds = chkp_get_invalid_op_bounds ();
2444 break;
2446 case BIT_NOT_EXPR:
2447 case NEGATE_EXPR:
2448 case LSHIFT_EXPR:
2449 case RSHIFT_EXPR:
2450 case LROTATE_EXPR:
2451 case RROTATE_EXPR:
2452 case EQ_EXPR:
2453 case NE_EXPR:
2454 case LT_EXPR:
2455 case LE_EXPR:
2456 case GT_EXPR:
2457 case GE_EXPR:
2458 case MULT_EXPR:
2459 case RDIV_EXPR:
2460 case TRUNC_DIV_EXPR:
2461 case FLOOR_DIV_EXPR:
2462 case CEIL_DIV_EXPR:
2463 case ROUND_DIV_EXPR:
2464 case TRUNC_MOD_EXPR:
2465 case FLOOR_MOD_EXPR:
2466 case CEIL_MOD_EXPR:
2467 case ROUND_MOD_EXPR:
2468 case EXACT_DIV_EXPR:
2469 case FIX_TRUNC_EXPR:
2470 case FLOAT_EXPR:
2471 case REALPART_EXPR:
2472 case IMAGPART_EXPR:
2473 /* No valid bounds may be produced by these exprs. */
2474 bounds = chkp_get_invalid_op_bounds ();
2475 break;
2477 case COND_EXPR:
2479 tree val1 = gimple_assign_rhs2 (assign);
2480 tree val2 = gimple_assign_rhs3 (assign);
2481 tree bnd1 = chkp_find_bounds (val1, &iter);
2482 tree bnd2 = chkp_find_bounds (val2, &iter);
2483 gimple stmt;
2485 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2486 bounds = incomplete_bounds;
2487 else if (bnd1 == bnd2)
2488 bounds = bnd1;
2489 else
2491 rhs1 = unshare_expr (rhs1);
2493 bounds = chkp_get_tmp_reg (assign);
2494 stmt = gimple_build_assign_with_ops (COND_EXPR, bounds,
2495 rhs1, bnd1, bnd2);
2496 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2498 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2499 chkp_mark_invalid_bounds (bounds);
2502 break;
2504 case MAX_EXPR:
2505 case MIN_EXPR:
2507 tree rhs2 = gimple_assign_rhs2 (assign);
2508 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2509 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2511 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2512 bounds = incomplete_bounds;
2513 else if (bnd1 == bnd2)
2514 bounds = bnd1;
2515 else
2517 gimple stmt;
2518 tree cond = build2 (rhs_code == MAX_EXPR ? GT_EXPR : LT_EXPR,
2519 boolean_type_node, rhs1, rhs2);
2520 bounds = chkp_get_tmp_reg (assign);
2521 stmt = gimple_build_assign_with_ops (COND_EXPR, bounds,
2522 cond, bnd1, bnd2);
2524 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2526 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2527 chkp_mark_invalid_bounds (bounds);
2530 break;
2532 default:
2533 bounds = chkp_get_zero_bounds ();
2534 warning (0, "pointer bounds were lost due to unexpected expression %s",
2535 get_tree_code_name (rhs_code));
2538 gcc_assert (bounds);
2540 if (node)
2541 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2543 return bounds;
2546 /* Compute bounds for ssa name NODE defined by DEF_STMT pointed by ITER.
2548 There are just few statement codes allowed: NOP (for default ssa names),
2549 ASSIGN, CALL, PHI, ASM.
2551 Return computed bounds. */
2552 static tree
2553 chkp_get_bounds_by_definition (tree node, gimple def_stmt,
2554 gphi_iterator *iter)
2556 tree var, bounds;
2557 enum gimple_code code = gimple_code (def_stmt);
2558 gphi *stmt;
2560 if (dump_file && (dump_flags & TDF_DETAILS))
2562 fprintf (dump_file, "Searching for bounds for node: ");
2563 print_generic_expr (dump_file, node, 0);
2565 fprintf (dump_file, " using its definition: ");
2566 print_gimple_stmt (dump_file, def_stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2569 switch (code)
2571 case GIMPLE_NOP:
2572 var = SSA_NAME_VAR (node);
2573 switch (TREE_CODE (var))
2575 case PARM_DECL:
2576 bounds = chkp_get_bound_for_parm (node);
2577 break;
2579 case VAR_DECL:
2580 /* For uninitialized pointers use none bounds. */
2581 bounds = chkp_get_none_bounds ();
2582 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2583 break;
2585 case RESULT_DECL:
2587 tree base_type;
2589 gcc_assert (TREE_CODE (TREE_TYPE (node)) == REFERENCE_TYPE);
2591 base_type = TREE_TYPE (TREE_TYPE (node));
2593 gcc_assert (TYPE_SIZE (base_type)
2594 && TREE_CODE (TYPE_SIZE (base_type)) == INTEGER_CST
2595 && tree_to_uhwi (TYPE_SIZE (base_type)) != 0);
2597 bounds = chkp_make_bounds (node, TYPE_SIZE_UNIT (base_type),
2598 NULL, false);
2599 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2601 break;
2603 default:
2604 if (dump_file && (dump_flags & TDF_DETAILS))
2606 fprintf (dump_file, "Unexpected var with no definition\n");
2607 print_generic_expr (dump_file, var, 0);
2609 internal_error ("chkp_get_bounds_by_definition: Unexpected var of type %s",
2610 get_tree_code_name (TREE_CODE (var)));
2612 break;
2614 case GIMPLE_ASSIGN:
2615 bounds = chkp_compute_bounds_for_assignment (node, def_stmt);
2616 break;
2618 case GIMPLE_CALL:
2619 bounds = chkp_build_returned_bound (as_a <gcall *> (def_stmt));
2620 break;
2622 case GIMPLE_PHI:
2623 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (node))
2624 if (SSA_NAME_VAR (node))
2625 var = chkp_get_bounds_var (SSA_NAME_VAR (node));
2626 else
2627 var = make_temp_ssa_name (pointer_bounds_type_node,
2628 gimple_build_nop (),
2629 CHKP_BOUND_TMP_NAME);
2630 else
2631 var = chkp_get_tmp_var ();
2632 stmt = create_phi_node (var, gimple_bb (def_stmt));
2633 bounds = gimple_phi_result (stmt);
2634 *iter = gsi_for_phi (stmt);
2636 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2638 /* Created bounds do not have all phi args computed and
2639 therefore we do not know if there is a valid source
2640 of bounds for that node. Therefore we mark bounds
2641 as incomplete and then recompute them when all phi
2642 args are computed. */
2643 chkp_register_incomplete_bounds (bounds, node);
2644 break;
2646 case GIMPLE_ASM:
2647 bounds = chkp_get_zero_bounds ();
2648 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2649 break;
2651 default:
2652 internal_error ("chkp_get_bounds_by_definition: Unexpected GIMPLE code %s",
2653 gimple_code_name[code]);
2656 return bounds;
2659 /* Return CALL_EXPR for bndmk with specified LOWER_BOUND and SIZE. */
2660 tree
2661 chkp_build_make_bounds_call (tree lower_bound, tree size)
2663 tree call = build1 (ADDR_EXPR,
2664 build_pointer_type (TREE_TYPE (chkp_bndmk_fndecl)),
2665 chkp_bndmk_fndecl);
2666 return build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndmk_fndecl)),
2667 call, 2, lower_bound, size);
2670 /* Create static bounds var of specfified OBJ which is
2671 is either VAR_DECL or string constant. */
2672 static tree
2673 chkp_make_static_bounds (tree obj)
2675 static int string_id = 1;
2676 static int var_id = 1;
2677 tree *slot;
2678 const char *var_name;
2679 char *bnd_var_name;
2680 tree bnd_var;
2682 /* First check if we already have required var. */
2683 if (chkp_static_var_bounds)
2685 slot = chkp_static_var_bounds->get (obj);
2686 if (slot)
2687 return *slot;
2690 /* Build decl for bounds var. */
2691 if (TREE_CODE (obj) == VAR_DECL)
2693 if (DECL_IGNORED_P (obj))
2695 bnd_var_name = (char *) xmalloc (strlen (CHKP_VAR_BOUNDS_PREFIX) + 10);
2696 sprintf (bnd_var_name, "%s%d", CHKP_VAR_BOUNDS_PREFIX, var_id++);
2698 else
2700 var_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (obj));
2702 /* For hidden symbols we want to skip first '*' char. */
2703 if (*var_name == '*')
2704 var_name++;
2706 bnd_var_name = (char *) xmalloc (strlen (var_name)
2707 + strlen (CHKP_BOUNDS_OF_SYMBOL_PREFIX) + 1);
2708 strcpy (bnd_var_name, CHKP_BOUNDS_OF_SYMBOL_PREFIX);
2709 strcat (bnd_var_name, var_name);
2712 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2713 get_identifier (bnd_var_name),
2714 pointer_bounds_type_node);
2716 /* Address of the obj will be used as lower bound. */
2717 TREE_ADDRESSABLE (obj) = 1;
2719 else
2721 bnd_var_name = (char *) xmalloc (strlen (CHKP_STRING_BOUNDS_PREFIX) + 10);
2722 sprintf (bnd_var_name, "%s%d", CHKP_STRING_BOUNDS_PREFIX, string_id++);
2724 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2725 get_identifier (bnd_var_name),
2726 pointer_bounds_type_node);
2729 TREE_PUBLIC (bnd_var) = 0;
2730 TREE_USED (bnd_var) = 1;
2731 TREE_READONLY (bnd_var) = 0;
2732 TREE_STATIC (bnd_var) = 1;
2733 TREE_ADDRESSABLE (bnd_var) = 0;
2734 DECL_ARTIFICIAL (bnd_var) = 1;
2735 DECL_COMMON (bnd_var) = 1;
2736 DECL_COMDAT (bnd_var) = 1;
2737 DECL_READ_P (bnd_var) = 1;
2738 DECL_INITIAL (bnd_var) = chkp_build_addr_expr (obj);
2739 /* Force output similar to constant bounds.
2740 See chkp_make_static_const_bounds. */
2741 varpool_node::get_create (bnd_var)->force_output = 1;
2742 /* Mark symbol as requiring bounds initialization. */
2743 varpool_node::get_create (bnd_var)->need_bounds_init = 1;
2744 varpool_node::finalize_decl (bnd_var);
2746 /* Add created var to the map to use it for other references
2747 to obj. */
2748 if (!chkp_static_var_bounds)
2749 chkp_static_var_bounds = new hash_map<tree, tree>;
2751 chkp_static_var_bounds->put (obj, bnd_var);
2753 return bnd_var;
2756 /* When var has incomplete type we cannot get size to
2757 compute its bounds. In such cases we use checker
2758 builtin call which determines object size at runtime. */
2759 static tree
2760 chkp_generate_extern_var_bounds (tree var)
2762 tree bounds, size_reloc, lb, size, max_size, cond;
2763 gimple_stmt_iterator gsi;
2764 gimple_seq seq = NULL;
2765 gimple stmt;
2767 /* If instrumentation is not enabled for vars having
2768 incomplete type then just return zero bounds to avoid
2769 checks for this var. */
2770 if (!flag_chkp_incomplete_type)
2771 return chkp_get_zero_bounds ();
2773 if (dump_file && (dump_flags & TDF_DETAILS))
2775 fprintf (dump_file, "Generating bounds for extern symbol '");
2776 print_generic_expr (dump_file, var, 0);
2777 fprintf (dump_file, "'\n");
2780 stmt = gimple_build_call (chkp_sizeof_fndecl, 1, var);
2782 size_reloc = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
2783 gimple_call_set_lhs (stmt, size_reloc);
2785 gimple_seq_add_stmt (&seq, stmt);
2787 lb = chkp_build_addr_expr (var);
2788 size = make_ssa_name (chkp_get_size_tmp_var (), gimple_build_nop ());
2790 if (flag_chkp_zero_dynamic_size_as_infinite)
2792 /* We should check that size relocation was resolved.
2793 If it was not then use maximum possible size for the var. */
2794 max_size = build2 (MINUS_EXPR, chkp_uintptr_type, integer_zero_node,
2795 fold_convert (chkp_uintptr_type, lb));
2796 max_size = chkp_force_gimple_call_op (max_size, &seq);
2798 cond = build2 (NE_EXPR, boolean_type_node, size_reloc, integer_zero_node);
2799 stmt = gimple_build_assign_with_ops (COND_EXPR, size,
2800 cond, size_reloc, max_size);
2801 gimple_seq_add_stmt (&seq, stmt);
2803 else
2805 stmt = gimple_build_assign (size, size_reloc);
2806 gimple_seq_add_stmt (&seq, stmt);
2809 gsi = gsi_start_bb (chkp_get_entry_block ());
2810 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
2812 bounds = chkp_make_bounds (lb, size, &gsi, true);
2814 return bounds;
2817 /* Return 1 if TYPE has fields with zero size or fields
2818 marked with chkp_variable_size attribute. */
2819 bool
2820 chkp_variable_size_type (tree type)
2822 bool res = false;
2823 tree field;
2825 if (RECORD_OR_UNION_TYPE_P (type))
2826 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2828 if (TREE_CODE (field) == FIELD_DECL)
2829 res = res
2830 || lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
2831 || chkp_variable_size_type (TREE_TYPE (field));
2833 else
2834 res = !TYPE_SIZE (type)
2835 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
2836 || tree_to_uhwi (TYPE_SIZE (type)) == 0;
2838 return res;
2841 /* Compute and return bounds for address of DECL which is
2842 one of VAR_DECL, PARM_DECL, RESULT_DECL. */
2843 static tree
2844 chkp_get_bounds_for_decl_addr (tree decl)
2846 tree bounds;
2848 gcc_assert (TREE_CODE (decl) == VAR_DECL
2849 || TREE_CODE (decl) == PARM_DECL
2850 || TREE_CODE (decl) == RESULT_DECL);
2852 bounds = chkp_get_registered_addr_bounds (decl);
2854 if (bounds)
2855 return bounds;
2857 if (dump_file && (dump_flags & TDF_DETAILS))
2859 fprintf (dump_file, "Building bounds for address of decl ");
2860 print_generic_expr (dump_file, decl, 0);
2861 fprintf (dump_file, "\n");
2864 /* Use zero bounds if size is unknown and checks for
2865 unknown sizes are restricted. */
2866 if ((!DECL_SIZE (decl)
2867 || (chkp_variable_size_type (TREE_TYPE (decl))
2868 && (TREE_STATIC (decl)
2869 || DECL_EXTERNAL (decl)
2870 || TREE_PUBLIC (decl))))
2871 && !flag_chkp_incomplete_type)
2872 return chkp_get_zero_bounds ();
2874 if (flag_chkp_use_static_bounds
2875 && TREE_CODE (decl) == VAR_DECL
2876 && (TREE_STATIC (decl)
2877 || DECL_EXTERNAL (decl)
2878 || TREE_PUBLIC (decl))
2879 && !DECL_THREAD_LOCAL_P (decl))
2881 tree bnd_var = chkp_make_static_bounds (decl);
2882 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2883 gimple stmt;
2885 bounds = chkp_get_tmp_reg (gimple_build_nop ());
2886 stmt = gimple_build_assign (bounds, bnd_var);
2887 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2889 else if (!DECL_SIZE (decl)
2890 || (chkp_variable_size_type (TREE_TYPE (decl))
2891 && (TREE_STATIC (decl)
2892 || DECL_EXTERNAL (decl)
2893 || TREE_PUBLIC (decl))))
2895 gcc_assert (TREE_CODE (decl) == VAR_DECL);
2896 bounds = chkp_generate_extern_var_bounds (decl);
2898 else
2900 tree lb = chkp_build_addr_expr (decl);
2901 bounds = chkp_make_bounds (lb, DECL_SIZE_UNIT (decl), NULL, false);
2904 return bounds;
2907 /* Compute and return bounds for constant string. */
2908 static tree
2909 chkp_get_bounds_for_string_cst (tree cst)
2911 tree bounds;
2912 tree lb;
2913 tree size;
2915 gcc_assert (TREE_CODE (cst) == STRING_CST);
2917 bounds = chkp_get_registered_bounds (cst);
2919 if (bounds)
2920 return bounds;
2922 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2923 || flag_chkp_use_static_const_bounds > 0)
2925 tree bnd_var = chkp_make_static_bounds (cst);
2926 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2927 gimple stmt;
2929 bounds = chkp_get_tmp_reg (gimple_build_nop ());
2930 stmt = gimple_build_assign (bounds, bnd_var);
2931 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2933 else
2935 lb = chkp_build_addr_expr (cst);
2936 size = build_int_cst (chkp_uintptr_type, TREE_STRING_LENGTH (cst));
2937 bounds = chkp_make_bounds (lb, size, NULL, false);
2940 bounds = chkp_maybe_copy_and_register_bounds (cst, bounds);
2942 return bounds;
2945 /* Generate code to instersect bounds BOUNDS1 and BOUNDS2 and
2946 return the result. if ITER is not NULL then Code is inserted
2947 before position pointed by ITER. Otherwise code is added to
2948 entry block. */
2949 static tree
2950 chkp_intersect_bounds (tree bounds1, tree bounds2, gimple_stmt_iterator *iter)
2952 if (!bounds1 || bounds1 == chkp_get_zero_bounds ())
2953 return bounds2 ? bounds2 : bounds1;
2954 else if (!bounds2 || bounds2 == chkp_get_zero_bounds ())
2955 return bounds1;
2956 else
2958 gimple_seq seq;
2959 gimple stmt;
2960 tree bounds;
2962 seq = NULL;
2964 stmt = gimple_build_call (chkp_intersect_fndecl, 2, bounds1, bounds2);
2965 chkp_mark_stmt (stmt);
2967 bounds = chkp_get_tmp_reg (stmt);
2968 gimple_call_set_lhs (stmt, bounds);
2970 gimple_seq_add_stmt (&seq, stmt);
2972 /* We are probably doing narrowing for constant expression.
2973 In such case iter may be undefined. */
2974 if (!iter)
2976 gimple_stmt_iterator gsi = gsi_last_bb (chkp_get_entry_block ());
2977 iter = &gsi;
2978 gsi_insert_seq_after (iter, seq, GSI_SAME_STMT);
2980 else
2981 gsi_insert_seq_before (iter, seq, GSI_SAME_STMT);
2983 if (dump_file && (dump_flags & TDF_DETAILS))
2985 fprintf (dump_file, "Bounds intersection: ");
2986 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2987 fprintf (dump_file, " inserted before statement: ");
2988 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0,
2989 TDF_VOPS|TDF_MEMSYMS);
2992 return bounds;
2996 /* Return 1 if we are allowed to narrow bounds for addressed FIELD
2997 and 0 othersize. */
2998 static bool
2999 chkp_may_narrow_to_field (tree field)
3001 return DECL_SIZE (field) && TREE_CODE (DECL_SIZE (field)) == INTEGER_CST
3002 && tree_to_uhwi (DECL_SIZE (field)) != 0
3003 && (!DECL_FIELD_OFFSET (field)
3004 || TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST)
3005 && (!DECL_FIELD_BIT_OFFSET (field)
3006 || TREE_CODE (DECL_FIELD_BIT_OFFSET (field)) == INTEGER_CST)
3007 && !lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
3008 && !chkp_variable_size_type (TREE_TYPE (field));
3011 /* Return 1 if bounds for FIELD should be narrowed to
3012 field's own size. */
3013 static bool
3014 chkp_narrow_bounds_for_field (tree field)
3016 HOST_WIDE_INT offs;
3017 HOST_WIDE_INT bit_offs;
3019 if (!chkp_may_narrow_to_field (field))
3020 return false;
3022 /* Accesse to compiler generated fields should not cause
3023 bounds narrowing. */
3024 if (DECL_ARTIFICIAL (field))
3025 return false;
3027 offs = tree_to_uhwi (DECL_FIELD_OFFSET (field));
3028 bit_offs = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
3030 return (flag_chkp_narrow_bounds
3031 && (flag_chkp_first_field_has_own_bounds
3032 || offs
3033 || bit_offs));
3036 /* Perform narrowing for BOUNDS using bounds computed for field
3037 access COMPONENT. ITER meaning is the same as for
3038 chkp_intersect_bounds. */
3039 static tree
3040 chkp_narrow_bounds_to_field (tree bounds, tree component,
3041 gimple_stmt_iterator *iter)
3043 tree field = TREE_OPERAND (component, 1);
3044 tree size = DECL_SIZE_UNIT (field);
3045 tree field_ptr = chkp_build_addr_expr (component);
3046 tree field_bounds;
3048 field_bounds = chkp_make_bounds (field_ptr, size, iter, false);
3050 return chkp_intersect_bounds (field_bounds, bounds, iter);
3053 /* Parse field or array access NODE.
3055 PTR ouput parameter holds a pointer to the outermost
3056 object.
3058 BITFIELD output parameter is set to 1 if bitfield is
3059 accessed and to 0 otherwise. If it is 1 then ELT holds
3060 outer component for accessed bit field.
3062 SAFE outer parameter is set to 1 if access is safe and
3063 checks are not required.
3065 BOUNDS outer parameter holds bounds to be used to check
3066 access (may be NULL).
3068 If INNERMOST_BOUNDS is 1 then try to narrow bounds to the
3069 innermost accessed component. */
3070 static void
3071 chkp_parse_array_and_component_ref (tree node, tree *ptr,
3072 tree *elt, bool *safe,
3073 bool *bitfield,
3074 tree *bounds,
3075 gimple_stmt_iterator *iter,
3076 bool innermost_bounds)
3078 tree comp_to_narrow = NULL_TREE;
3079 tree last_comp = NULL_TREE;
3080 bool array_ref_found = false;
3081 tree *nodes;
3082 tree var;
3083 int len;
3084 int i;
3086 /* Compute tree height for expression. */
3087 var = node;
3088 len = 1;
3089 while (TREE_CODE (var) == COMPONENT_REF
3090 || TREE_CODE (var) == ARRAY_REF
3091 || TREE_CODE (var) == VIEW_CONVERT_EXPR)
3093 var = TREE_OPERAND (var, 0);
3094 len++;
3097 gcc_assert (len > 1);
3099 /* It is more convenient for us to scan left-to-right,
3100 so walk tree again and put all node to nodes vector
3101 in reversed order. */
3102 nodes = XALLOCAVEC (tree, len);
3103 nodes[len - 1] = node;
3104 for (i = len - 2; i >= 0; i--)
3105 nodes[i] = TREE_OPERAND (nodes[i + 1], 0);
3107 if (bounds)
3108 *bounds = NULL;
3109 *safe = true;
3110 *bitfield = (TREE_CODE (node) == COMPONENT_REF
3111 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (node, 1)));
3112 /* To get bitfield address we will need outer elemnt. */
3113 if (*bitfield)
3114 *elt = nodes[len - 2];
3115 else
3116 *elt = NULL_TREE;
3118 /* If we have indirection in expression then compute
3119 outermost structure bounds. Computed bounds may be
3120 narrowed later. */
3121 if (TREE_CODE (nodes[0]) == MEM_REF || INDIRECT_REF_P (nodes[0]))
3123 *safe = false;
3124 *ptr = TREE_OPERAND (nodes[0], 0);
3125 if (bounds)
3126 *bounds = chkp_find_bounds (*ptr, iter);
3128 else
3130 gcc_assert (TREE_CODE (var) == VAR_DECL
3131 || TREE_CODE (var) == PARM_DECL
3132 || TREE_CODE (var) == RESULT_DECL
3133 || TREE_CODE (var) == STRING_CST
3134 || TREE_CODE (var) == SSA_NAME);
3136 *ptr = chkp_build_addr_expr (var);
3139 /* In this loop we are trying to find a field access
3140 requiring narrowing. There are two simple rules
3141 for search:
3142 1. Leftmost array_ref is chosen if any.
3143 2. Rightmost suitable component_ref is chosen if innermost
3144 bounds are required and no array_ref exists. */
3145 for (i = 1; i < len; i++)
3147 var = nodes[i];
3149 if (TREE_CODE (var) == ARRAY_REF)
3151 *safe = false;
3152 array_ref_found = true;
3153 if (flag_chkp_narrow_bounds
3154 && !flag_chkp_narrow_to_innermost_arrray
3155 && (!last_comp
3156 || chkp_may_narrow_to_field (TREE_OPERAND (last_comp, 1))))
3158 comp_to_narrow = last_comp;
3159 break;
3162 else if (TREE_CODE (var) == COMPONENT_REF)
3164 tree field = TREE_OPERAND (var, 1);
3166 if (innermost_bounds
3167 && !array_ref_found
3168 && chkp_narrow_bounds_for_field (field))
3169 comp_to_narrow = var;
3170 last_comp = var;
3172 if (flag_chkp_narrow_bounds
3173 && flag_chkp_narrow_to_innermost_arrray
3174 && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
3176 if (bounds)
3177 *bounds = chkp_narrow_bounds_to_field (*bounds, var, iter);
3178 comp_to_narrow = NULL;
3181 else if (TREE_CODE (var) == VIEW_CONVERT_EXPR)
3182 /* Nothing to do for it. */
3184 else
3185 gcc_unreachable ();
3188 if (comp_to_narrow && DECL_SIZE (TREE_OPERAND (comp_to_narrow, 1)) && bounds)
3189 *bounds = chkp_narrow_bounds_to_field (*bounds, comp_to_narrow, iter);
3191 if (innermost_bounds && bounds && !*bounds)
3192 *bounds = chkp_find_bounds (*ptr, iter);
3195 /* Compute and return bounds for address of OBJ. */
3196 static tree
3197 chkp_make_addressed_object_bounds (tree obj, gimple_stmt_iterator *iter)
3199 tree bounds = chkp_get_registered_addr_bounds (obj);
3201 if (bounds)
3202 return bounds;
3204 switch (TREE_CODE (obj))
3206 case VAR_DECL:
3207 case PARM_DECL:
3208 case RESULT_DECL:
3209 bounds = chkp_get_bounds_for_decl_addr (obj);
3210 break;
3212 case STRING_CST:
3213 bounds = chkp_get_bounds_for_string_cst (obj);
3214 break;
3216 case ARRAY_REF:
3217 case COMPONENT_REF:
3219 tree elt;
3220 tree ptr;
3221 bool safe;
3222 bool bitfield;
3224 chkp_parse_array_and_component_ref (obj, &ptr, &elt, &safe,
3225 &bitfield, &bounds, iter, true);
3227 gcc_assert (bounds);
3229 break;
3231 case FUNCTION_DECL:
3232 case LABEL_DECL:
3233 bounds = chkp_get_zero_bounds ();
3234 break;
3236 case MEM_REF:
3237 bounds = chkp_find_bounds (TREE_OPERAND (obj, 0), iter);
3238 break;
3240 case REALPART_EXPR:
3241 case IMAGPART_EXPR:
3242 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (obj, 0), iter);
3243 break;
3245 default:
3246 if (dump_file && (dump_flags & TDF_DETAILS))
3248 fprintf (dump_file, "chkp_make_addressed_object_bounds: "
3249 "unexpected object of type %s\n",
3250 get_tree_code_name (TREE_CODE (obj)));
3251 print_node (dump_file, "", obj, 0);
3253 internal_error ("chkp_make_addressed_object_bounds: "
3254 "Unexpected tree code %s",
3255 get_tree_code_name (TREE_CODE (obj)));
3258 chkp_register_addr_bounds (obj, bounds);
3260 return bounds;
3263 /* Compute bounds for pointer PTR loaded from PTR_SRC. Generate statements
3264 to compute bounds if required. Computed bounds should be available at
3265 position pointed by ITER.
3267 If PTR_SRC is NULL_TREE then pointer definition is identified.
3269 If PTR_SRC is not NULL_TREE then ITER points to statements which loads
3270 PTR. If PTR is a any memory reference then ITER points to a statement
3271 after which bndldx will be inserterd. In both cases ITER will be updated
3272 to point to the inserted bndldx statement. */
3274 static tree
3275 chkp_find_bounds_1 (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3277 tree addr = NULL_TREE;
3278 tree bounds = NULL_TREE;
3280 if (!ptr_src)
3281 ptr_src = ptr;
3283 bounds = chkp_get_registered_bounds (ptr_src);
3285 if (bounds)
3286 return bounds;
3288 switch (TREE_CODE (ptr_src))
3290 case MEM_REF:
3291 case VAR_DECL:
3292 if (BOUNDED_P (ptr_src))
3293 if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
3294 bounds = chkp_get_zero_bounds ();
3295 else
3297 addr = chkp_build_addr_expr (ptr_src);
3298 bounds = chkp_build_bndldx (addr, ptr, iter);
3300 else
3301 bounds = chkp_get_nonpointer_load_bounds ();
3302 break;
3304 case ARRAY_REF:
3305 case COMPONENT_REF:
3306 addr = get_base_address (ptr_src);
3307 if (DECL_P (addr)
3308 || TREE_CODE (addr) == MEM_REF
3309 || TREE_CODE (addr) == TARGET_MEM_REF)
3311 if (BOUNDED_P (ptr_src))
3312 if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
3313 bounds = chkp_get_zero_bounds ();
3314 else
3316 addr = chkp_build_addr_expr (ptr_src);
3317 bounds = chkp_build_bndldx (addr, ptr, iter);
3319 else
3320 bounds = chkp_get_nonpointer_load_bounds ();
3322 else
3324 gcc_assert (TREE_CODE (addr) == SSA_NAME);
3325 bounds = chkp_find_bounds (addr, iter);
3327 break;
3329 case PARM_DECL:
3330 gcc_unreachable ();
3331 bounds = chkp_get_bound_for_parm (ptr_src);
3332 break;
3334 case TARGET_MEM_REF:
3335 addr = chkp_build_addr_expr (ptr_src);
3336 bounds = chkp_build_bndldx (addr, ptr, iter);
3337 break;
3339 case SSA_NAME:
3340 bounds = chkp_get_registered_bounds (ptr_src);
3341 if (!bounds)
3343 gimple def_stmt = SSA_NAME_DEF_STMT (ptr_src);
3344 gphi_iterator phi_iter;
3346 bounds = chkp_get_bounds_by_definition (ptr_src, def_stmt, &phi_iter);
3348 gcc_assert (bounds);
3350 if (gphi *def_phi = dyn_cast <gphi *> (def_stmt))
3352 unsigned i;
3354 for (i = 0; i < gimple_phi_num_args (def_phi); i++)
3356 tree arg = gimple_phi_arg_def (def_phi, i);
3357 tree arg_bnd;
3358 gphi *phi_bnd;
3360 arg_bnd = chkp_find_bounds (arg, NULL);
3362 /* chkp_get_bounds_by_definition created new phi
3363 statement and phi_iter points to it.
3365 Previous call to chkp_find_bounds could create
3366 new basic block and therefore change phi statement
3367 phi_iter points to. */
3368 phi_bnd = phi_iter.phi ();
3370 add_phi_arg (phi_bnd, arg_bnd,
3371 gimple_phi_arg_edge (def_phi, i),
3372 UNKNOWN_LOCATION);
3375 /* If all bound phi nodes have their arg computed
3376 then we may finish its computation. See
3377 chkp_finish_incomplete_bounds for more details. */
3378 if (chkp_may_finish_incomplete_bounds ())
3379 chkp_finish_incomplete_bounds ();
3382 gcc_assert (bounds == chkp_get_registered_bounds (ptr_src)
3383 || chkp_incomplete_bounds (bounds));
3385 break;
3387 case ADDR_EXPR:
3388 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (ptr_src, 0), iter);
3389 break;
3391 case INTEGER_CST:
3392 if (integer_zerop (ptr_src))
3393 bounds = chkp_get_none_bounds ();
3394 else
3395 bounds = chkp_get_invalid_op_bounds ();
3396 break;
3398 default:
3399 if (dump_file && (dump_flags & TDF_DETAILS))
3401 fprintf (dump_file, "chkp_find_bounds: unexpected ptr of type %s\n",
3402 get_tree_code_name (TREE_CODE (ptr_src)));
3403 print_node (dump_file, "", ptr_src, 0);
3405 internal_error ("chkp_find_bounds: Unexpected tree code %s",
3406 get_tree_code_name (TREE_CODE (ptr_src)));
3409 if (!bounds)
3411 if (dump_file && (dump_flags & TDF_DETAILS))
3413 fprintf (stderr, "chkp_find_bounds: cannot find bounds for pointer\n");
3414 print_node (dump_file, "", ptr_src, 0);
3416 internal_error ("chkp_find_bounds: Cannot find bounds for pointer");
3419 return bounds;
3422 /* Normal case for bounds search without forced narrowing. */
3423 static tree
3424 chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter)
3426 return chkp_find_bounds_1 (ptr, NULL_TREE, iter);
3429 /* Search bounds for pointer PTR loaded from PTR_SRC
3430 by statement *ITER points to. */
3431 static tree
3432 chkp_find_bounds_loaded (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3434 return chkp_find_bounds_1 (ptr, ptr_src, iter);
3437 /* Helper function which checks type of RHS and finds all pointers in
3438 it. For each found pointer we build it's accesses in LHS and RHS
3439 objects and then call HANDLER for them. Function is used to copy
3440 or initilize bounds for copied object. */
3441 static void
3442 chkp_walk_pointer_assignments (tree lhs, tree rhs, void *arg,
3443 assign_handler handler)
3445 tree type = TREE_TYPE (lhs);
3447 /* We have nothing to do with clobbers. */
3448 if (TREE_CLOBBER_P (rhs))
3449 return;
3451 if (BOUNDED_TYPE_P (type))
3452 handler (lhs, rhs, arg);
3453 else if (RECORD_OR_UNION_TYPE_P (type))
3455 tree field;
3457 if (TREE_CODE (rhs) == CONSTRUCTOR)
3459 unsigned HOST_WIDE_INT cnt;
3460 tree val;
3462 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, field, val)
3464 if (chkp_type_has_pointer (TREE_TYPE (field)))
3466 tree lhs_field = chkp_build_component_ref (lhs, field);
3467 chkp_walk_pointer_assignments (lhs_field, val, arg, handler);
3471 else
3472 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3473 if (TREE_CODE (field) == FIELD_DECL
3474 && chkp_type_has_pointer (TREE_TYPE (field)))
3476 tree rhs_field = chkp_build_component_ref (rhs, field);
3477 tree lhs_field = chkp_build_component_ref (lhs, field);
3478 chkp_walk_pointer_assignments (lhs_field, rhs_field, arg, handler);
3481 else if (TREE_CODE (type) == ARRAY_TYPE)
3483 unsigned HOST_WIDE_INT cur = 0;
3484 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3485 tree etype = TREE_TYPE (type);
3486 tree esize = TYPE_SIZE (etype);
3488 if (TREE_CODE (rhs) == CONSTRUCTOR)
3490 unsigned HOST_WIDE_INT cnt;
3491 tree purp, val, lhs_elem;
3493 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, purp, val)
3495 if (purp && TREE_CODE (purp) == RANGE_EXPR)
3497 tree lo_index = TREE_OPERAND (purp, 0);
3498 tree hi_index = TREE_OPERAND (purp, 1);
3500 for (cur = (unsigned)tree_to_uhwi (lo_index);
3501 cur <= (unsigned)tree_to_uhwi (hi_index);
3502 cur++)
3504 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3505 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3508 else
3510 if (purp)
3512 gcc_assert (TREE_CODE (purp) == INTEGER_CST);
3513 cur = tree_to_uhwi (purp);
3516 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur++);
3518 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3522 /* Copy array only when size is known. */
3523 else if (maxval && !integer_minus_onep (maxval))
3524 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
3526 tree lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3527 tree rhs_elem = chkp_build_array_ref (rhs, etype, esize, cur);
3528 chkp_walk_pointer_assignments (lhs_elem, rhs_elem, arg, handler);
3531 else
3532 internal_error("chkp_walk_pointer_assignments: unexpected RHS type: %s",
3533 get_tree_code_name (TREE_CODE (type)));
3536 /* Add code to copy bounds for assignment of RHS to LHS.
3537 ARG is an iterator pointing ne code position. */
3538 static void
3539 chkp_copy_bounds_for_elem (tree lhs, tree rhs, void *arg)
3541 gimple_stmt_iterator *iter = (gimple_stmt_iterator *)arg;
3542 tree bounds = chkp_find_bounds (rhs, iter);
3543 tree addr = chkp_build_addr_expr(lhs);
3545 chkp_build_bndstx (addr, rhs, bounds, iter);
3548 /* Emit static bound initilizers and size vars. */
3549 void
3550 chkp_finish_file (void)
3552 struct varpool_node *node;
3553 struct chkp_ctor_stmt_list stmts;
3555 if (seen_error ())
3556 return;
3558 /* Iterate through varpool and generate bounds initialization
3559 constructors for all statically initialized pointers. */
3560 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3561 stmts.stmts = NULL;
3562 FOR_EACH_VARIABLE (node)
3563 /* Check that var is actually emitted and we need and may initialize
3564 its bounds. */
3565 if (node->need_bounds_init
3566 && !POINTER_BOUNDS_P (node->decl)
3567 && DECL_RTL (node->decl)
3568 && MEM_P (DECL_RTL (node->decl))
3569 && TREE_ASM_WRITTEN (node->decl))
3571 chkp_walk_pointer_assignments (node->decl,
3572 DECL_INITIAL (node->decl),
3573 &stmts,
3574 chkp_add_modification_to_stmt_list);
3576 if (stmts.avail <= 0)
3578 cgraph_build_static_cdtor ('P', stmts.stmts,
3579 MAX_RESERVED_INIT_PRIORITY + 3);
3580 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3581 stmts.stmts = NULL;
3585 if (stmts.stmts)
3586 cgraph_build_static_cdtor ('P', stmts.stmts,
3587 MAX_RESERVED_INIT_PRIORITY + 3);
3589 /* Iterate through varpool and generate bounds initialization
3590 constructors for all static bounds vars. */
3591 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3592 stmts.stmts = NULL;
3593 FOR_EACH_VARIABLE (node)
3594 if (node->need_bounds_init
3595 && POINTER_BOUNDS_P (node->decl)
3596 && TREE_ASM_WRITTEN (node->decl))
3598 tree bnd = node->decl;
3599 tree var;
3601 gcc_assert (DECL_INITIAL (bnd)
3602 && TREE_CODE (DECL_INITIAL (bnd)) == ADDR_EXPR);
3604 var = TREE_OPERAND (DECL_INITIAL (bnd), 0);
3605 chkp_output_static_bounds (bnd, var, &stmts);
3608 if (stmts.stmts)
3609 cgraph_build_static_cdtor ('B', stmts.stmts,
3610 MAX_RESERVED_INIT_PRIORITY + 2);
3612 delete chkp_static_var_bounds;
3613 delete chkp_bounds_map;
3616 /* An instrumentation function which is called for each statement
3617 having memory access we want to instrument. It inserts check
3618 code and bounds copy code.
3620 ITER points to statement to instrument.
3622 NODE holds memory access in statement to check.
3624 LOC holds the location information for statement.
3626 DIRFLAGS determines whether access is read or write.
3628 ACCESS_OFFS should be added to address used in NODE
3629 before check.
3631 ACCESS_SIZE holds size of checked access.
3633 SAFE indicates if NODE access is safe and should not be
3634 checked. */
3635 static void
3636 chkp_process_stmt (gimple_stmt_iterator *iter, tree node,
3637 location_t loc, tree dirflag,
3638 tree access_offs, tree access_size,
3639 bool safe)
3641 tree node_type = TREE_TYPE (node);
3642 tree size = access_size ? access_size : TYPE_SIZE_UNIT (node_type);
3643 tree addr_first = NULL_TREE; /* address of the first accessed byte */
3644 tree addr_last = NULL_TREE; /* address of the last accessed byte */
3645 tree ptr = NULL_TREE; /* a pointer used for dereference */
3646 tree bounds = NULL_TREE;
3648 /* We do not need instrumentation for clobbers. */
3649 if (dirflag == integer_one_node
3650 && gimple_code (gsi_stmt (*iter)) == GIMPLE_ASSIGN
3651 && TREE_CLOBBER_P (gimple_assign_rhs1 (gsi_stmt (*iter))))
3652 return;
3654 switch (TREE_CODE (node))
3656 case ARRAY_REF:
3657 case COMPONENT_REF:
3659 bool bitfield;
3660 tree elt;
3662 if (safe)
3664 /* We are not going to generate any checks, so do not
3665 generate bounds as well. */
3666 addr_first = chkp_build_addr_expr (node);
3667 break;
3670 chkp_parse_array_and_component_ref (node, &ptr, &elt, &safe,
3671 &bitfield, &bounds, iter, false);
3673 /* Break if there is no dereference and operation is safe. */
3675 if (bitfield)
3677 tree field = TREE_OPERAND (node, 1);
3679 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
3680 size = DECL_SIZE_UNIT (field);
3682 if (elt)
3683 elt = chkp_build_addr_expr (elt);
3684 addr_first = fold_convert_loc (loc, ptr_type_node, elt ? elt : ptr);
3685 addr_first = fold_build_pointer_plus_loc (loc,
3686 addr_first,
3687 byte_position (field));
3689 else
3690 addr_first = chkp_build_addr_expr (node);
3692 break;
3694 case INDIRECT_REF:
3695 ptr = TREE_OPERAND (node, 0);
3696 addr_first = ptr;
3697 break;
3699 case MEM_REF:
3700 ptr = TREE_OPERAND (node, 0);
3701 addr_first = chkp_build_addr_expr (node);
3702 break;
3704 case TARGET_MEM_REF:
3705 ptr = TMR_BASE (node);
3706 addr_first = chkp_build_addr_expr (node);
3707 break;
3709 case ARRAY_RANGE_REF:
3710 printf("ARRAY_RANGE_REF\n");
3711 debug_gimple_stmt(gsi_stmt(*iter));
3712 debug_tree(node);
3713 gcc_unreachable ();
3714 break;
3716 case BIT_FIELD_REF:
3718 tree offs, rem, bpu;
3720 gcc_assert (!access_offs);
3721 gcc_assert (!access_size);
3723 bpu = fold_convert (size_type_node, bitsize_int (BITS_PER_UNIT));
3724 offs = fold_convert (size_type_node, TREE_OPERAND (node, 2));
3725 rem = size_binop_loc (loc, TRUNC_MOD_EXPR, offs, bpu);
3726 offs = size_binop_loc (loc, TRUNC_DIV_EXPR, offs, bpu);
3728 size = fold_convert (size_type_node, TREE_OPERAND (node, 1));
3729 size = size_binop_loc (loc, PLUS_EXPR, size, rem);
3730 size = size_binop_loc (loc, CEIL_DIV_EXPR, size, bpu);
3731 size = fold_convert (size_type_node, size);
3733 chkp_process_stmt (iter, TREE_OPERAND (node, 0), loc,
3734 dirflag, offs, size, safe);
3735 return;
3737 break;
3739 case VAR_DECL:
3740 case RESULT_DECL:
3741 case PARM_DECL:
3742 if (dirflag != integer_one_node
3743 || DECL_REGISTER (node))
3744 return;
3746 safe = true;
3747 addr_first = chkp_build_addr_expr (node);
3748 break;
3750 default:
3751 return;
3754 /* If addr_last was not computed then use (addr_first + size - 1)
3755 expression to compute it. */
3756 if (!addr_last)
3758 addr_last = fold_build_pointer_plus_loc (loc, addr_first, size);
3759 addr_last = fold_build_pointer_plus_hwi_loc (loc, addr_last, -1);
3762 /* Shift both first_addr and last_addr by access_offs if specified. */
3763 if (access_offs)
3765 addr_first = fold_build_pointer_plus_loc (loc, addr_first, access_offs);
3766 addr_last = fold_build_pointer_plus_loc (loc, addr_last, access_offs);
3769 /* Generate bndcl/bndcu checks if memory access is not safe. */
3770 if (!safe)
3772 gimple_stmt_iterator stmt_iter = *iter;
3774 if (!bounds)
3775 bounds = chkp_find_bounds (ptr, iter);
3777 chkp_check_mem_access (addr_first, addr_last, bounds,
3778 stmt_iter, loc, dirflag);
3781 /* We need to store bounds in case pointer is stored. */
3782 if (dirflag == integer_one_node
3783 && chkp_type_has_pointer (node_type)
3784 && flag_chkp_store_bounds)
3786 gimple stmt = gsi_stmt (*iter);
3787 tree rhs1 = gimple_assign_rhs1 (stmt);
3788 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3790 if (get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS)
3791 chkp_walk_pointer_assignments (node, rhs1, iter,
3792 chkp_copy_bounds_for_elem);
3793 else
3795 bounds = chkp_compute_bounds_for_assignment (NULL_TREE, stmt);
3796 chkp_build_bndstx (addr_first, rhs1, bounds, iter);
3801 /* Add code to copy bounds for all pointers copied
3802 in ASSIGN created during inline of EDGE. */
3803 void
3804 chkp_copy_bounds_for_assign (gimple assign, struct cgraph_edge *edge)
3806 tree lhs = gimple_assign_lhs (assign);
3807 tree rhs = gimple_assign_rhs1 (assign);
3808 gimple_stmt_iterator iter = gsi_for_stmt (assign);
3810 if (!flag_chkp_store_bounds)
3811 return;
3813 chkp_walk_pointer_assignments (lhs, rhs, &iter, chkp_copy_bounds_for_elem);
3815 /* We should create edges for all created calls to bndldx and bndstx. */
3816 while (gsi_stmt (iter) != assign)
3818 gimple stmt = gsi_stmt (iter);
3819 if (gimple_code (stmt) == GIMPLE_CALL)
3821 tree fndecl = gimple_call_fndecl (stmt);
3822 struct cgraph_node *callee = cgraph_node::get_create (fndecl);
3823 struct cgraph_edge *new_edge;
3825 gcc_assert (fndecl == chkp_bndstx_fndecl
3826 || fndecl == chkp_bndldx_fndecl
3827 || fndecl == chkp_ret_bnd_fndecl);
3829 new_edge = edge->caller->create_edge (callee,
3830 as_a <gcall *> (stmt),
3831 edge->count,
3832 edge->frequency);
3833 new_edge->frequency = compute_call_stmt_bb_frequency
3834 (edge->caller->decl, gimple_bb (stmt));
3836 gsi_prev (&iter);
3840 /* Some code transformation made during instrumentation pass
3841 may put code into inconsistent state. Here we find and fix
3842 such flaws. */
3843 void
3844 chkp_fix_cfg ()
3846 basic_block bb;
3847 gimple_stmt_iterator i;
3849 /* We could insert some code right after stmt which ends bb.
3850 We wanted to put this code on fallthru edge but did not
3851 add new edges from the beginning because it may cause new
3852 phi node creation which may be incorrect due to incomplete
3853 bound phi nodes. */
3854 FOR_ALL_BB_FN (bb, cfun)
3855 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
3857 gimple stmt = gsi_stmt (i);
3858 gimple_stmt_iterator next = i;
3860 gsi_next (&next);
3862 if (stmt_ends_bb_p (stmt)
3863 && !gsi_end_p (next))
3865 edge fall = find_fallthru_edge (bb->succs);
3866 basic_block dest = NULL;
3867 int flags = 0;
3869 gcc_assert (fall);
3871 /* We cannot split abnormal edge. Therefore we
3872 store its params, make it regular and then
3873 rebuild abnormal edge after split. */
3874 if (fall->flags & EDGE_ABNORMAL)
3876 flags = fall->flags & ~EDGE_FALLTHRU;
3877 dest = fall->dest;
3879 fall->flags &= ~EDGE_COMPLEX;
3882 while (!gsi_end_p (next))
3884 gimple next_stmt = gsi_stmt (next);
3885 gsi_remove (&next, false);
3886 gsi_insert_on_edge (fall, next_stmt);
3889 gsi_commit_edge_inserts ();
3891 /* Re-create abnormal edge. */
3892 if (dest)
3893 make_edge (bb, dest, flags);
3898 /* Walker callback for chkp_replace_function_pointers. Replaces
3899 function pointer in the specified operand with pointer to the
3900 instrumented function version. */
3901 static tree
3902 chkp_replace_function_pointer (tree *op, int *walk_subtrees,
3903 void *data ATTRIBUTE_UNUSED)
3905 if (TREE_CODE (*op) == FUNCTION_DECL
3906 && !lookup_attribute ("bnd_legacy", DECL_ATTRIBUTES (*op))
3907 /* Do not replace builtins for now. */
3908 && DECL_BUILT_IN_CLASS (*op) == NOT_BUILT_IN)
3910 struct cgraph_node *node = cgraph_node::get_create (*op);
3912 if (!node->instrumentation_clone)
3913 chkp_maybe_create_clone (*op);
3915 *op = node->instrumented_version->decl;
3916 *walk_subtrees = 0;
3919 return NULL;
3922 /* This function searches for function pointers in statement
3923 pointed by GSI and replaces them with pointers to instrumented
3924 function versions. */
3925 static void
3926 chkp_replace_function_pointers (gimple_stmt_iterator *gsi)
3928 gimple stmt = gsi_stmt (*gsi);
3929 /* For calls we want to walk call args only. */
3930 if (gimple_code (stmt) == GIMPLE_CALL)
3932 unsigned i;
3933 for (i = 0; i < gimple_call_num_args (stmt); i++)
3934 walk_tree (gimple_call_arg_ptr (stmt, i),
3935 chkp_replace_function_pointer, NULL, NULL);
3937 else
3938 walk_gimple_stmt (gsi, NULL, chkp_replace_function_pointer, NULL);
3941 /* This function instruments all statements working with memory,
3942 calls and rets.
3944 It also removes excess statements from static initializers. */
3945 static void
3946 chkp_instrument_function (void)
3948 basic_block bb, next;
3949 gimple_stmt_iterator i;
3950 enum gimple_rhs_class grhs_class;
3951 bool safe = lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl));
3953 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
3956 next = bb->next_bb;
3957 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
3959 gimple s = gsi_stmt (i);
3961 /* Skip statement marked to not be instrumented. */
3962 if (chkp_marked_stmt_p (s))
3964 gsi_next (&i);
3965 continue;
3968 chkp_replace_function_pointers (&i);
3970 switch (gimple_code (s))
3972 case GIMPLE_ASSIGN:
3973 chkp_process_stmt (&i, gimple_assign_lhs (s),
3974 gimple_location (s), integer_one_node,
3975 NULL_TREE, NULL_TREE, safe);
3976 chkp_process_stmt (&i, gimple_assign_rhs1 (s),
3977 gimple_location (s), integer_zero_node,
3978 NULL_TREE, NULL_TREE, safe);
3979 grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s));
3980 if (grhs_class == GIMPLE_BINARY_RHS)
3981 chkp_process_stmt (&i, gimple_assign_rhs2 (s),
3982 gimple_location (s), integer_zero_node,
3983 NULL_TREE, NULL_TREE, safe);
3984 break;
3986 case GIMPLE_RETURN:
3988 greturn *r = as_a <greturn *> (s);
3989 if (gimple_return_retval (r) != NULL_TREE)
3991 chkp_process_stmt (&i, gimple_return_retval (r),
3992 gimple_location (r),
3993 integer_zero_node,
3994 NULL_TREE, NULL_TREE, safe);
3996 /* Additionally we need to add bounds
3997 to return statement. */
3998 chkp_add_bounds_to_ret_stmt (&i);
4001 break;
4003 case GIMPLE_CALL:
4004 chkp_add_bounds_to_call_stmt (&i);
4005 break;
4007 default:
4011 gsi_next (&i);
4013 /* We do not need any actual pointer stores in checker
4014 static initializer. */
4015 if (lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl))
4016 && gimple_code (s) == GIMPLE_ASSIGN
4017 && gimple_store_p (s))
4019 gimple_stmt_iterator del_iter = gsi_for_stmt (s);
4020 gsi_remove (&del_iter, true);
4021 unlink_stmt_vdef (s);
4022 release_defs(s);
4025 bb = next;
4027 while (bb);
4029 /* Some input params may have bounds and be address taken. In this case
4030 we should store incoming bounds into bounds table. */
4031 tree arg;
4032 if (flag_chkp_store_bounds)
4033 for (arg = DECL_ARGUMENTS (cfun->decl); arg; arg = DECL_CHAIN (arg))
4034 if (TREE_ADDRESSABLE (arg))
4036 if (BOUNDED_P (arg))
4038 tree bounds = chkp_get_next_bounds_parm (arg);
4039 tree def_ptr = ssa_default_def (cfun, arg);
4040 gimple_stmt_iterator iter
4041 = gsi_start_bb (chkp_get_entry_block ());
4042 chkp_build_bndstx (chkp_build_addr_expr (arg),
4043 def_ptr ? def_ptr : arg,
4044 bounds, &iter);
4046 /* Skip bounds arg. */
4047 arg = TREE_CHAIN (arg);
4049 else if (chkp_type_has_pointer (TREE_TYPE (arg)))
4051 tree orig_arg = arg;
4052 bitmap slots = BITMAP_ALLOC (NULL);
4053 gimple_stmt_iterator iter
4054 = gsi_start_bb (chkp_get_entry_block ());
4055 bitmap_iterator bi;
4056 unsigned bnd_no;
4058 chkp_find_bound_slots (TREE_TYPE (arg), slots);
4060 EXECUTE_IF_SET_IN_BITMAP (slots, 0, bnd_no, bi)
4062 tree bounds = chkp_get_next_bounds_parm (arg);
4063 HOST_WIDE_INT offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
4064 tree addr = chkp_build_addr_expr (orig_arg);
4065 tree ptr = build2 (MEM_REF, ptr_type_node, addr,
4066 build_int_cst (ptr_type_node, offs));
4067 chkp_build_bndstx (chkp_build_addr_expr (ptr), ptr,
4068 bounds, &iter);
4070 arg = DECL_CHAIN (arg);
4072 BITMAP_FREE (slots);
4077 /* Find init/null/copy_ptr_bounds calls and replace them
4078 with assignments. It should allow better code
4079 optimization. */
4081 static void
4082 chkp_remove_useless_builtins ()
4084 basic_block bb;
4085 gimple_stmt_iterator gsi;
4087 FOR_EACH_BB_FN (bb, cfun)
4089 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4091 gimple stmt = gsi_stmt (gsi);
4092 tree fndecl;
4093 enum built_in_function fcode;
4095 /* Find builtins returning first arg and replace
4096 them with assignments. */
4097 if (gimple_code (stmt) == GIMPLE_CALL
4098 && (fndecl = gimple_call_fndecl (stmt))
4099 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4100 && (fcode = DECL_FUNCTION_CODE (fndecl))
4101 && (fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
4102 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
4103 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS
4104 || fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS))
4106 tree res = gimple_call_arg (stmt, 0);
4107 update_call_from_tree (&gsi, res);
4108 stmt = gsi_stmt (gsi);
4109 update_stmt (stmt);
4115 /* Initialize pass. */
4116 static void
4117 chkp_init (void)
4119 basic_block bb;
4120 gimple_stmt_iterator i;
4122 in_chkp_pass = true;
4124 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = bb->next_bb)
4125 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
4126 chkp_unmark_stmt (gsi_stmt (i));
4128 chkp_invalid_bounds = new hash_set<tree>;
4129 chkp_completed_bounds_set = new hash_set<tree>;
4130 delete chkp_reg_bounds;
4131 chkp_reg_bounds = new hash_map<tree, tree>;
4132 delete chkp_bound_vars;
4133 chkp_bound_vars = new hash_map<tree, tree>;
4134 chkp_reg_addr_bounds = new hash_map<tree, tree>;
4135 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
4136 delete chkp_bounds_map;
4137 chkp_bounds_map = new hash_map<tree, tree>;
4138 chkp_abnormal_copies = BITMAP_GGC_ALLOC ();
4140 entry_block = NULL;
4141 zero_bounds = NULL_TREE;
4142 none_bounds = NULL_TREE;
4143 incomplete_bounds = integer_zero_node;
4144 tmp_var = NULL_TREE;
4145 size_tmp_var = NULL_TREE;
4147 chkp_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode, true);
4149 /* We create these constant bounds once for each object file.
4150 These symbols go to comdat section and result in single copy
4151 of each one in the final binary. */
4152 chkp_get_zero_bounds_var ();
4153 chkp_get_none_bounds_var ();
4155 calculate_dominance_info (CDI_DOMINATORS);
4156 calculate_dominance_info (CDI_POST_DOMINATORS);
4158 bitmap_obstack_initialize (NULL);
4161 /* Finalize instrumentation pass. */
4162 static void
4163 chkp_fini (void)
4165 in_chkp_pass = false;
4167 delete chkp_invalid_bounds;
4168 delete chkp_completed_bounds_set;
4169 delete chkp_reg_addr_bounds;
4170 delete chkp_incomplete_bounds_map;
4172 free_dominance_info (CDI_DOMINATORS);
4173 free_dominance_info (CDI_POST_DOMINATORS);
4175 bitmap_obstack_release (NULL);
4178 /* Main instrumentation pass function. */
4179 static unsigned int
4180 chkp_execute (void)
4182 chkp_init ();
4184 chkp_instrument_function ();
4186 chkp_remove_useless_builtins ();
4188 chkp_function_mark_instrumented (cfun->decl);
4190 chkp_fix_cfg ();
4192 chkp_fini ();
4194 return 0;
4197 /* Instrumentation pass gate. */
4198 static bool
4199 chkp_gate (void)
4201 return cgraph_node::get (cfun->decl)->instrumentation_clone
4202 || lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl));
4205 namespace {
4207 const pass_data pass_data_chkp =
4209 GIMPLE_PASS, /* type */
4210 "chkp", /* name */
4211 OPTGROUP_NONE, /* optinfo_flags */
4212 TV_NONE, /* tv_id */
4213 PROP_ssa | PROP_cfg, /* properties_required */
4214 0, /* properties_provided */
4215 0, /* properties_destroyed */
4216 0, /* todo_flags_start */
4217 TODO_verify_il
4218 | TODO_update_ssa /* todo_flags_finish */
4221 class pass_chkp : public gimple_opt_pass
4223 public:
4224 pass_chkp (gcc::context *ctxt)
4225 : gimple_opt_pass (pass_data_chkp, ctxt)
4228 /* opt_pass methods: */
4229 virtual opt_pass * clone ()
4231 return new pass_chkp (m_ctxt);
4234 virtual bool gate (function *)
4236 return chkp_gate ();
4239 virtual unsigned int execute (function *)
4241 return chkp_execute ();
4244 }; // class pass_chkp
4246 } // anon namespace
4248 gimple_opt_pass *
4249 make_pass_chkp (gcc::context *ctxt)
4251 return new pass_chkp (ctxt);
4254 #include "gt-tree-chkp.h"