* lib/ubsan-dg.exp (check_effective_target_fsanitize_undefined):
[official-gcc.git] / gcc / tree-chkp.c
blobe7b9bbf602ec457e7dd0a7f73fa108476d1899cd
1 /* Pointer Bounds Checker insrumentation pass.
2 Copyright (C) 2014 Free Software Foundation, Inc.
3 Contributed by Ilya Enkovich (ilya.enkovich@intel.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree-core.h"
25 #include "stor-layout.h"
26 #include "varasm.h"
27 #include "tree.h"
28 #include "target.h"
29 #include "tree-iterator.h"
30 #include "tree-cfg.h"
31 #include "langhooks.h"
32 #include "tree-pass.h"
33 #include "diagnostic.h"
34 #include "ggc.h"
35 #include "is-a.h"
36 #include "cfgloop.h"
37 #include "stringpool.h"
38 #include "tree-ssa-alias.h"
39 #include "tree-ssanames.h"
40 #include "tree-ssa-operands.h"
41 #include "tree-ssa-address.h"
42 #include "tree-ssa.h"
43 #include "predict.h"
44 #include "dominance.h"
45 #include "cfg.h"
46 #include "basic-block.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "gimple-expr.h"
49 #include "gimple.h"
50 #include "tree-phinodes.h"
51 #include "gimple-ssa.h"
52 #include "ssa-iterators.h"
53 #include "gimple-pretty-print.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "gimplify-me.h"
57 #include "print-tree.h"
58 #include "expr.h"
59 #include "tree-ssa-propagate.h"
60 #include "gimple-fold.h"
61 #include "tree-chkp.h"
62 #include "gimple-walk.h"
63 #include "rtl.h" /* For MEM_P, assign_temp. */
64 #include "tree-dfa.h"
65 #include "ipa-ref.h"
66 #include "lto-streamer.h"
67 #include "cgraph.h"
68 #include "ipa-chkp.h"
69 #include "params.h"
70 #include "ipa-chkp.h"
71 #include "params.h"
73 /* Pointer Bounds Checker instruments code with memory checks to find
74 out-of-bounds memory accesses. Checks are performed by computing
75 bounds for each pointer and then comparing address of accessed
76 memory before pointer dereferencing.
78 1. Function clones.
80 See ipa-chkp.c.
82 2. Instrumentation.
84 There are few things to instrument:
86 a) Memory accesses - add checker calls to check address of accessed memory
87 against bounds of dereferenced pointer. Obviously safe memory
88 accesses like static variable access does not have to be instrumented
89 with checks.
91 Example:
93 val_2 = *p_1;
95 with 4 bytes access is transformed into:
97 __builtin___chkp_bndcl (__bound_tmp.1_3, p_1);
98 D.1_4 = p_1 + 3;
99 __builtin___chkp_bndcu (__bound_tmp.1_3, D.1_4);
100 val_2 = *p_1;
102 where __bound_tmp.1_3 are bounds computed for pointer p_1,
103 __builtin___chkp_bndcl is a lower bound check and
104 __builtin___chkp_bndcu is an upper bound check.
106 b) Pointer stores.
108 When pointer is stored in memory we need to store its bounds. To
109 achieve compatibility of instrumented code with regular codes
110 we have to keep data layout and store bounds in special bound tables
111 via special checker call. Implementation of bounds table may vary for
112 different platforms. It has to associate pointer value and its
113 location (it is required because we may have two equal pointers
114 with different bounds stored in different places) with bounds.
115 Another checker builtin allows to get bounds for specified pointer
116 loaded from specified location.
118 Example:
120 buf1[i_1] = &buf2;
122 is transformed into:
124 buf1[i_1] = &buf2;
125 D.1_2 = &buf1[i_1];
126 __builtin___chkp_bndstx (D.1_2, &buf2, __bound_tmp.1_2);
128 where __bound_tmp.1_2 are bounds of &buf2.
130 c) Static initialization.
132 The special case of pointer store is static pointer initialization.
133 Bounds initialization is performed in a few steps:
134 - register all static initializations in front-end using
135 chkp_register_var_initializer
136 - when file compilation finishes we create functions with special
137 attribute 'chkp ctor' and put explicit initialization code
138 (assignments) for all statically initialized pointers.
139 - when checker constructor is compiled checker pass adds required
140 bounds initialization for all statically initialized pointers
141 - since we do not actually need excess pointers initialization
142 in checker constructor we remove such assignments from them
144 d) Calls.
146 For each call in the code we add additional arguments to pass
147 bounds for pointer arguments. We determine type of call arguments
148 using arguments list from function declaration; if function
149 declaration is not available we use function type; otherwise
150 (e.g. for unnamed arguments) we use type of passed value. Function
151 declaration/type is replaced with the instrumented one.
153 Example:
155 val_1 = foo (&buf1, &buf2, &buf1, 0);
157 is translated into:
159 val_1 = foo.chkp (&buf1, __bound_tmp.1_2, &buf2, __bound_tmp.1_3,
160 &buf1, __bound_tmp.1_2, 0);
162 e) Returns.
164 If function returns a pointer value we have to return bounds also.
165 A new operand was added for return statement to hold returned bounds.
167 Example:
169 return &_buf1;
171 is transformed into
173 return &_buf1, __bound_tmp.1_1;
175 3. Bounds computation.
177 Compiler is fully responsible for computing bounds to be used for each
178 memory access. The first step for bounds computation is to find the
179 origin of pointer dereferenced for memory access. Basing on pointer
180 origin we define a way to compute its bounds. There are just few
181 possible cases:
183 a) Pointer is returned by call.
185 In this case we use corresponding checker builtin method to obtain returned
186 bounds.
188 Example:
190 buf_1 = malloc (size_2);
191 foo (buf_1);
193 is translated into:
195 buf_1 = malloc (size_2);
196 __bound_tmp.1_3 = __builtin___chkp_bndret (buf_1);
197 foo (buf_1, __bound_tmp.1_3);
199 b) Pointer is an address of an object.
201 In this case compiler tries to compute objects size and create corresponding
202 bounds. If object has incomplete type then special checker builtin is used to
203 obtain its size at runtime.
205 Example:
207 foo ()
209 <unnamed type> __bound_tmp.3;
210 static int buf[100];
212 <bb 3>:
213 __bound_tmp.3_2 = __builtin___chkp_bndmk (&buf, 400);
215 <bb 2>:
216 return &buf, __bound_tmp.3_2;
219 Example:
221 Address of an object 'extern int buf[]' with incomplete type is
222 returned.
224 foo ()
226 <unnamed type> __bound_tmp.4;
227 long unsigned int __size_tmp.3;
229 <bb 3>:
230 __size_tmp.3_4 = __builtin_ia32_sizeof (buf);
231 __bound_tmp.4_3 = __builtin_ia32_bndmk (&buf, __size_tmp.3_4);
233 <bb 2>:
234 return &buf, __bound_tmp.4_3;
237 c) Pointer is the result of object narrowing.
239 It happens when we use pointer to an object to compute pointer to a part
240 of an object. E.g. we take pointer to a field of a structure. In this
241 case we perform bounds intersection using bounds of original object and
242 bounds of object's part (which are computed basing on its type).
244 There may be some debatable questions about when narrowing should occur
245 and when it should not. To avoid false bound violations in correct
246 programs we do not perform narrowing when address of an array element is
247 obtained (it has address of the whole array) and when address of the first
248 structure field is obtained (because it is guaranteed to be equal to
249 address of the whole structure and it is legal to cast it back to structure).
251 Default narrowing behavior may be changed using compiler flags.
253 Example:
255 In this example address of the second structure field is returned.
257 foo (struct A * p, __bounds_type __bounds_of_p)
259 <unnamed type> __bound_tmp.3;
260 int * _2;
261 int * _5;
263 <bb 2>:
264 _5 = &p_1(D)->second_field;
265 __bound_tmp.3_6 = __builtin___chkp_bndmk (_5, 4);
266 __bound_tmp.3_8 = __builtin___chkp_intersect (__bound_tmp.3_6,
267 __bounds_of_p_3(D));
268 _2 = &p_1(D)->second_field;
269 return _2, __bound_tmp.3_8;
272 Example:
274 In this example address of the first field of array element is returned.
276 foo (struct A * p, __bounds_type __bounds_of_p, int i)
278 long unsigned int _3;
279 long unsigned int _4;
280 struct A * _6;
281 int * _7;
283 <bb 2>:
284 _3 = (long unsigned int) i_1(D);
285 _4 = _3 * 8;
286 _6 = p_5(D) + _4;
287 _7 = &_6->first_field;
288 return _7, __bounds_of_p_2(D);
292 d) Pointer is the result of pointer arithmetic or type cast.
294 In this case bounds of the base pointer are used. In case of binary
295 operation producing a pointer we are analyzing data flow further
296 looking for operand's bounds. One operand is considered as a base
297 if it has some valid bounds. If we fall into a case when none of
298 operands (or both of them) has valid bounds, a default bounds value
299 is used.
301 Trying to find out bounds for binary operations we may fall into
302 cyclic dependencies for pointers. To avoid infinite recursion all
303 walked phi nodes instantly obtain corresponding bounds but created
304 bounds are marked as incomplete. It helps us to stop DF walk during
305 bounds search.
307 When we reach pointer source, some args of incomplete bounds phi obtain
308 valid bounds and those values are propagated further through phi nodes.
309 If no valid bounds were found for phi node then we mark its result as
310 invalid bounds. Process stops when all incomplete bounds become either
311 valid or invalid and we are able to choose a pointer base.
313 e) Pointer is loaded from the memory.
315 In this case we just need to load bounds from the bounds table.
317 Example:
319 foo ()
321 <unnamed type> __bound_tmp.3;
322 static int * buf;
323 int * _2;
325 <bb 2>:
326 _2 = buf;
327 __bound_tmp.3_4 = __builtin___chkp_bndldx (&buf, _2);
328 return _2, __bound_tmp.3_4;
333 typedef void (*assign_handler)(tree, tree, void *);
335 static tree chkp_get_zero_bounds ();
336 static tree chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter);
337 static tree chkp_find_bounds_loaded (tree ptr, tree ptr_src,
338 gimple_stmt_iterator *iter);
339 static void chkp_parse_array_and_component_ref (tree node, tree *ptr,
340 tree *elt, bool *safe,
341 bool *bitfield,
342 tree *bounds,
343 gimple_stmt_iterator *iter,
344 bool innermost_bounds);
346 #define chkp_bndldx_fndecl \
347 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDLDX))
348 #define chkp_bndstx_fndecl \
349 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDSTX))
350 #define chkp_checkl_fndecl \
351 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCL))
352 #define chkp_checku_fndecl \
353 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCU))
354 #define chkp_bndmk_fndecl \
355 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDMK))
356 #define chkp_ret_bnd_fndecl \
357 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDRET))
358 #define chkp_intersect_fndecl \
359 (targetm.builtin_chkp_function (BUILT_IN_CHKP_INTERSECT))
360 #define chkp_narrow_bounds_fndecl \
361 (targetm.builtin_chkp_function (BUILT_IN_CHKP_NARROW))
362 #define chkp_sizeof_fndecl \
363 (targetm.builtin_chkp_function (BUILT_IN_CHKP_SIZEOF))
364 #define chkp_extract_lower_fndecl \
365 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_LOWER))
366 #define chkp_extract_upper_fndecl \
367 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_UPPER))
369 static GTY (()) tree chkp_uintptr_type;
371 static GTY (()) tree chkp_zero_bounds_var;
372 static GTY (()) tree chkp_none_bounds_var;
374 static GTY (()) basic_block entry_block;
375 static GTY (()) tree zero_bounds;
376 static GTY (()) tree none_bounds;
377 static GTY (()) tree incomplete_bounds;
378 static GTY (()) tree tmp_var;
379 static GTY (()) tree size_tmp_var;
380 static GTY (()) bitmap chkp_abnormal_copies;
382 struct hash_set<tree> *chkp_invalid_bounds;
383 struct hash_set<tree> *chkp_completed_bounds_set;
384 struct hash_map<tree, tree> *chkp_reg_bounds;
385 struct hash_map<tree, tree> *chkp_bound_vars;
386 struct hash_map<tree, tree> *chkp_reg_addr_bounds;
387 struct hash_map<tree, tree> *chkp_incomplete_bounds_map;
388 struct hash_map<tree, tree> *chkp_bounds_map;
389 struct hash_map<tree, tree> *chkp_static_var_bounds;
391 static bool in_chkp_pass;
393 #define CHKP_BOUND_TMP_NAME "__bound_tmp"
394 #define CHKP_SIZE_TMP_NAME "__size_tmp"
395 #define CHKP_BOUNDS_OF_SYMBOL_PREFIX "__chkp_bounds_of_"
396 #define CHKP_STRING_BOUNDS_PREFIX "__chkp_string_bounds_"
397 #define CHKP_VAR_BOUNDS_PREFIX "__chkp_var_bounds_"
398 #define CHKP_ZERO_BOUNDS_VAR_NAME "__chkp_zero_bounds"
399 #define CHKP_NONE_BOUNDS_VAR_NAME "__chkp_none_bounds"
401 /* Static checker constructors may become very large and their
402 compilation with optimization may take too much time.
403 Therefore we put a limit to number of statements in one
404 constructor. Tests with 100 000 statically initialized
405 pointers showed following compilation times on Sandy Bridge
406 server (used -O2):
407 limit 100 => ~18 sec.
408 limit 300 => ~22 sec.
409 limit 1000 => ~30 sec.
410 limit 3000 => ~49 sec.
411 limit 5000 => ~55 sec.
412 limit 10000 => ~76 sec.
413 limit 100000 => ~532 sec. */
414 #define MAX_STMTS_IN_STATIC_CHKP_CTOR (PARAM_VALUE (PARAM_CHKP_MAX_CTOR_SIZE))
416 struct chkp_ctor_stmt_list
418 tree stmts;
419 int avail;
422 /* Return 1 if function FNDECL is instrumented by Pointer
423 Bounds Checker. */
424 bool
425 chkp_function_instrumented_p (tree fndecl)
427 return fndecl
428 && lookup_attribute ("chkp instrumented", DECL_ATTRIBUTES (fndecl));
431 /* Mark function FNDECL as instrumented. */
432 void
433 chkp_function_mark_instrumented (tree fndecl)
435 if (chkp_function_instrumented_p (fndecl))
436 return;
438 DECL_ATTRIBUTES (fndecl)
439 = tree_cons (get_identifier ("chkp instrumented"), NULL,
440 DECL_ATTRIBUTES (fndecl));
443 /* Return true when STMT is builtin call to instrumentation function
444 corresponding to CODE. */
446 bool
447 chkp_gimple_call_builtin_p (gimple call,
448 enum built_in_function code)
450 tree fndecl;
451 if (is_gimple_call (call)
452 && (fndecl = targetm.builtin_chkp_function (code))
453 && gimple_call_fndecl (call) == fndecl)
454 return true;
455 return false;
458 /* Emit code to store zero bounds for PTR located at MEM. */
459 void
460 chkp_expand_bounds_reset_for_mem (tree mem, tree ptr)
462 tree zero_bnd, bnd, addr, bndstx;
464 if (flag_chkp_use_static_const_bounds)
465 zero_bnd = chkp_get_zero_bounds_var ();
466 else
467 zero_bnd = chkp_build_make_bounds_call (integer_zero_node,
468 integer_zero_node);
469 bnd = make_tree (pointer_bounds_type_node,
470 assign_temp (pointer_bounds_type_node, 0, 1));
471 addr = build1 (ADDR_EXPR,
472 build_pointer_type (TREE_TYPE (mem)), mem);
473 bndstx = chkp_build_bndstx_call (addr, ptr, bnd);
475 expand_assignment (bnd, zero_bnd, false);
476 expand_normal (bndstx);
479 /* Mark statement S to not be instrumented. */
480 static void
481 chkp_mark_stmt (gimple s)
483 gimple_set_plf (s, GF_PLF_1, true);
486 /* Mark statement S to be instrumented. */
487 static void
488 chkp_unmark_stmt (gimple s)
490 gimple_set_plf (s, GF_PLF_1, false);
493 /* Return 1 if statement S should not be instrumented. */
494 static bool
495 chkp_marked_stmt_p (gimple s)
497 return gimple_plf (s, GF_PLF_1);
500 /* Get var to be used for bound temps. */
501 static tree
502 chkp_get_tmp_var (void)
504 if (!tmp_var)
505 tmp_var = create_tmp_reg (pointer_bounds_type_node, CHKP_BOUND_TMP_NAME);
507 return tmp_var;
510 /* Get SSA_NAME to be used as temp. */
511 static tree
512 chkp_get_tmp_reg (gimple stmt)
514 if (in_chkp_pass)
515 return make_ssa_name (chkp_get_tmp_var (), stmt);
517 return make_temp_ssa_name (pointer_bounds_type_node, stmt,
518 CHKP_BOUND_TMP_NAME);
521 /* Get var to be used for size temps. */
522 static tree
523 chkp_get_size_tmp_var (void)
525 if (!size_tmp_var)
526 size_tmp_var = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
528 return size_tmp_var;
531 /* Register bounds BND for address of OBJ. */
532 static void
533 chkp_register_addr_bounds (tree obj, tree bnd)
535 if (bnd == incomplete_bounds)
536 return;
538 chkp_reg_addr_bounds->put (obj, bnd);
540 if (dump_file && (dump_flags & TDF_DETAILS))
542 fprintf (dump_file, "Regsitered bound ");
543 print_generic_expr (dump_file, bnd, 0);
544 fprintf (dump_file, " for address of ");
545 print_generic_expr (dump_file, obj, 0);
546 fprintf (dump_file, "\n");
550 /* Return bounds registered for address of OBJ. */
551 static tree
552 chkp_get_registered_addr_bounds (tree obj)
554 tree *slot = chkp_reg_addr_bounds->get (obj);
555 return slot ? *slot : NULL_TREE;
558 /* Mark BOUNDS as completed. */
559 static void
560 chkp_mark_completed_bounds (tree bounds)
562 chkp_completed_bounds_set->add (bounds);
564 if (dump_file && (dump_flags & TDF_DETAILS))
566 fprintf (dump_file, "Marked bounds ");
567 print_generic_expr (dump_file, bounds, 0);
568 fprintf (dump_file, " as completed\n");
572 /* Return 1 if BOUNDS were marked as completed and 0 otherwise. */
573 static bool
574 chkp_completed_bounds (tree bounds)
576 return chkp_completed_bounds_set->contains (bounds);
579 /* Clear comleted bound marks. */
580 static void
581 chkp_erase_completed_bounds (void)
583 delete chkp_completed_bounds_set;
584 chkp_completed_bounds_set = new hash_set<tree>;
587 /* Mark BOUNDS associated with PTR as incomplete. */
588 static void
589 chkp_register_incomplete_bounds (tree bounds, tree ptr)
591 chkp_incomplete_bounds_map->put (bounds, ptr);
593 if (dump_file && (dump_flags & TDF_DETAILS))
595 fprintf (dump_file, "Regsitered incomplete bounds ");
596 print_generic_expr (dump_file, bounds, 0);
597 fprintf (dump_file, " for ");
598 print_generic_expr (dump_file, ptr, 0);
599 fprintf (dump_file, "\n");
603 /* Return 1 if BOUNDS are incomplete and 0 otherwise. */
604 static bool
605 chkp_incomplete_bounds (tree bounds)
607 if (bounds == incomplete_bounds)
608 return true;
610 if (chkp_completed_bounds (bounds))
611 return false;
613 return chkp_incomplete_bounds_map->get (bounds) != NULL;
616 /* Clear incomleted bound marks. */
617 static void
618 chkp_erase_incomplete_bounds (void)
620 delete chkp_incomplete_bounds_map;
621 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
624 /* Build and return bndmk call which creates bounds for structure
625 pointed by PTR. Structure should have complete type. */
626 tree
627 chkp_make_bounds_for_struct_addr (tree ptr)
629 tree type = TREE_TYPE (ptr);
630 tree size;
632 gcc_assert (POINTER_TYPE_P (type));
634 size = TYPE_SIZE (TREE_TYPE (type));
636 gcc_assert (size);
638 return build_call_nary (pointer_bounds_type_node,
639 build_fold_addr_expr (chkp_bndmk_fndecl),
640 2, ptr, size);
643 /* Traversal function for chkp_may_finish_incomplete_bounds.
644 Set RES to 0 if at least one argument of phi statement
645 defining bounds (passed in KEY arg) is unknown.
646 Traversal stops when first unknown phi argument is found. */
647 bool
648 chkp_may_complete_phi_bounds (tree const &bounds, tree *slot ATTRIBUTE_UNUSED,
649 bool *res)
651 gimple phi;
652 unsigned i;
654 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
656 phi = SSA_NAME_DEF_STMT (bounds);
658 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
660 for (i = 0; i < gimple_phi_num_args (phi); i++)
662 tree phi_arg = gimple_phi_arg_def (phi, i);
663 if (!phi_arg)
665 *res = false;
666 /* Do not need to traverse further. */
667 return false;
671 return true;
674 /* Return 1 if all phi nodes created for bounds have their
675 arguments computed. */
676 static bool
677 chkp_may_finish_incomplete_bounds (void)
679 bool res = true;
681 chkp_incomplete_bounds_map
682 ->traverse<bool *, chkp_may_complete_phi_bounds> (&res);
684 return res;
687 /* Helper function for chkp_finish_incomplete_bounds.
688 Recompute args for bounds phi node. */
689 bool
690 chkp_recompute_phi_bounds (tree const &bounds, tree *slot,
691 void *res ATTRIBUTE_UNUSED)
693 tree ptr = *slot;
694 gphi *bounds_phi;
695 gphi *ptr_phi;
696 unsigned i;
698 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
699 gcc_assert (TREE_CODE (ptr) == SSA_NAME);
701 bounds_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (bounds));
702 ptr_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (ptr));
704 for (i = 0; i < gimple_phi_num_args (bounds_phi); i++)
706 tree ptr_arg = gimple_phi_arg_def (ptr_phi, i);
707 tree bound_arg = chkp_find_bounds (ptr_arg, NULL);
709 add_phi_arg (bounds_phi, bound_arg,
710 gimple_phi_arg_edge (ptr_phi, i),
711 UNKNOWN_LOCATION);
714 return true;
717 /* Mark BOUNDS as invalid. */
718 static void
719 chkp_mark_invalid_bounds (tree bounds)
721 chkp_invalid_bounds->add (bounds);
723 if (dump_file && (dump_flags & TDF_DETAILS))
725 fprintf (dump_file, "Marked bounds ");
726 print_generic_expr (dump_file, bounds, 0);
727 fprintf (dump_file, " as invalid\n");
731 /* Return 1 if BOUNDS were marked as invalid and 0 otherwise. */
732 static bool
733 chkp_valid_bounds (tree bounds)
735 if (bounds == zero_bounds || bounds == none_bounds)
736 return false;
738 return !chkp_invalid_bounds->contains (bounds);
741 /* Helper function for chkp_finish_incomplete_bounds.
742 Check all arguments of phi nodes trying to find
743 valid completed bounds. If there is at least one
744 such arg then bounds produced by phi node are marked
745 as valid completed bounds and all phi args are
746 recomputed. */
747 bool
748 chkp_find_valid_phi_bounds (tree const &bounds, tree *slot, bool *res)
750 gimple phi;
751 unsigned i;
753 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
755 if (chkp_completed_bounds (bounds))
756 return true;
758 phi = SSA_NAME_DEF_STMT (bounds);
760 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
762 for (i = 0; i < gimple_phi_num_args (phi); i++)
764 tree phi_arg = gimple_phi_arg_def (phi, i);
766 gcc_assert (phi_arg);
768 if (chkp_valid_bounds (phi_arg) && !chkp_incomplete_bounds (phi_arg))
770 *res = true;
771 chkp_mark_completed_bounds (bounds);
772 chkp_recompute_phi_bounds (bounds, slot, NULL);
773 return true;
777 return true;
780 /* Helper function for chkp_finish_incomplete_bounds.
781 Marks all incompleted bounds as invalid. */
782 bool
783 chkp_mark_invalid_bounds_walker (tree const &bounds,
784 tree *slot ATTRIBUTE_UNUSED,
785 void *res ATTRIBUTE_UNUSED)
787 if (!chkp_completed_bounds (bounds))
789 chkp_mark_invalid_bounds (bounds);
790 chkp_mark_completed_bounds (bounds);
792 return true;
795 /* When all bound phi nodes have all their args computed
796 we have enough info to find valid bounds. We iterate
797 through all incompleted bounds searching for valid
798 bounds. Found valid bounds are marked as completed
799 and all remaining incompleted bounds are recomputed.
800 Process continues until no new valid bounds may be
801 found. All remained incompleted bounds are marked as
802 invalid (i.e. have no valid source of bounds). */
803 static void
804 chkp_finish_incomplete_bounds (void)
806 bool found_valid;
808 while (found_valid)
810 found_valid = false;
812 chkp_incomplete_bounds_map->
813 traverse<bool *, chkp_find_valid_phi_bounds> (&found_valid);
815 if (found_valid)
816 chkp_incomplete_bounds_map->
817 traverse<void *, chkp_recompute_phi_bounds> (NULL);
820 chkp_incomplete_bounds_map->
821 traverse<void *, chkp_mark_invalid_bounds_walker> (NULL);
822 chkp_incomplete_bounds_map->
823 traverse<void *, chkp_recompute_phi_bounds> (NULL);
825 chkp_erase_completed_bounds ();
826 chkp_erase_incomplete_bounds ();
829 /* Return 1 if type TYPE is a pointer type or a
830 structure having a pointer type as one of its fields.
831 Otherwise return 0. */
832 bool
833 chkp_type_has_pointer (const_tree type)
835 bool res = false;
837 if (BOUNDED_TYPE_P (type))
838 res = true;
839 else if (RECORD_OR_UNION_TYPE_P (type))
841 tree field;
843 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
844 if (TREE_CODE (field) == FIELD_DECL)
845 res = res || chkp_type_has_pointer (TREE_TYPE (field));
847 else if (TREE_CODE (type) == ARRAY_TYPE)
848 res = chkp_type_has_pointer (TREE_TYPE (type));
850 return res;
853 unsigned
854 chkp_type_bounds_count (const_tree type)
856 unsigned res = 0;
858 if (!type)
859 res = 0;
860 else if (BOUNDED_TYPE_P (type))
861 res = 1;
862 else if (RECORD_OR_UNION_TYPE_P (type))
864 bitmap have_bound;
866 bitmap_obstack_initialize (NULL);
867 have_bound = BITMAP_ALLOC (NULL);
868 chkp_find_bound_slots (type, have_bound);
869 res = bitmap_count_bits (have_bound);
870 BITMAP_FREE (have_bound);
871 bitmap_obstack_release (NULL);
874 return res;
877 /* Get bounds associated with NODE via
878 chkp_set_bounds call. */
879 tree
880 chkp_get_bounds (tree node)
882 tree *slot;
884 if (!chkp_bounds_map)
885 return NULL_TREE;
887 slot = chkp_bounds_map->get (node);
888 return slot ? *slot : NULL_TREE;
891 /* Associate bounds VAL with NODE. */
892 void
893 chkp_set_bounds (tree node, tree val)
895 if (!chkp_bounds_map)
896 chkp_bounds_map = new hash_map<tree, tree>;
898 chkp_bounds_map->put (node, val);
901 /* Check if statically initialized variable VAR require
902 static bounds initialization. If VAR is added into
903 bounds initlization list then 1 is returned. Otherwise
904 return 0. */
905 extern bool
906 chkp_register_var_initializer (tree var)
908 if (!flag_check_pointer_bounds
909 || DECL_INITIAL (var) == error_mark_node)
910 return false;
912 gcc_assert (TREE_CODE (var) == VAR_DECL);
913 gcc_assert (DECL_INITIAL (var));
915 if (TREE_STATIC (var)
916 && chkp_type_has_pointer (TREE_TYPE (var)))
918 varpool_node::get_create (var)->need_bounds_init = 1;
919 return true;
922 return false;
925 /* Helper function for chkp_finish_file.
927 Add new modification statement (RHS is assigned to LHS)
928 into list of static initializer statementes (passed in ARG).
929 If statements list becomes too big, emit checker constructor
930 and start the new one. */
931 static void
932 chkp_add_modification_to_stmt_list (tree lhs,
933 tree rhs,
934 void *arg)
936 struct chkp_ctor_stmt_list *stmts = (struct chkp_ctor_stmt_list *)arg;
937 tree modify;
939 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
940 rhs = build1 (CONVERT_EXPR, TREE_TYPE (lhs), rhs);
942 modify = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
943 append_to_statement_list (modify, &stmts->stmts);
945 stmts->avail--;
948 /* Build and return ADDR_EXPR for specified object OBJ. */
949 static tree
950 chkp_build_addr_expr (tree obj)
952 return TREE_CODE (obj) == TARGET_MEM_REF
953 ? tree_mem_ref_addr (ptr_type_node, obj)
954 : build_fold_addr_expr (obj);
957 /* Helper function for chkp_finish_file.
958 Initialize bound variable BND_VAR with bounds of variable
959 VAR to statements list STMTS. If statements list becomes
960 too big, emit checker constructor and start the new one. */
961 static void
962 chkp_output_static_bounds (tree bnd_var, tree var,
963 struct chkp_ctor_stmt_list *stmts)
965 tree lb, ub, size;
967 if (TREE_CODE (var) == STRING_CST)
969 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
970 size = build_int_cst (size_type_node, TREE_STRING_LENGTH (var) - 1);
972 else if (DECL_SIZE (var)
973 && !chkp_variable_size_type (TREE_TYPE (var)))
975 /* Compute bounds using statically known size. */
976 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
977 size = size_binop (MINUS_EXPR, DECL_SIZE_UNIT (var), size_one_node);
979 else
981 /* Compute bounds using dynamic size. */
982 tree call;
984 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
985 call = build1 (ADDR_EXPR,
986 build_pointer_type (TREE_TYPE (chkp_sizeof_fndecl)),
987 chkp_sizeof_fndecl);
988 size = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_sizeof_fndecl)),
989 call, 1, var);
991 if (flag_chkp_zero_dynamic_size_as_infinite)
993 tree max_size, cond;
995 max_size = build2 (MINUS_EXPR, size_type_node, size_zero_node, lb);
996 cond = build2 (NE_EXPR, boolean_type_node, size, size_zero_node);
997 size = build3 (COND_EXPR, size_type_node, cond, size, max_size);
1000 size = size_binop (MINUS_EXPR, size, size_one_node);
1003 ub = size_binop (PLUS_EXPR, lb, size);
1004 stmts->avail -= targetm.chkp_initialize_bounds (bnd_var, lb, ub,
1005 &stmts->stmts);
1006 if (stmts->avail <= 0)
1008 cgraph_build_static_cdtor ('B', stmts->stmts,
1009 MAX_RESERVED_INIT_PRIORITY + 2);
1010 stmts->avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
1011 stmts->stmts = NULL;
1015 /* Return entry block to be used for checker initilization code.
1016 Create new block if required. */
1017 static basic_block
1018 chkp_get_entry_block (void)
1020 if (!entry_block)
1021 entry_block = split_block (ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL)->dest;
1023 return entry_block;
1026 /* Return a bounds var to be used for pointer var PTR_VAR. */
1027 static tree
1028 chkp_get_bounds_var (tree ptr_var)
1030 tree bnd_var;
1031 tree *slot;
1033 slot = chkp_bound_vars->get (ptr_var);
1034 if (slot)
1035 bnd_var = *slot;
1036 else
1038 bnd_var = create_tmp_reg (pointer_bounds_type_node,
1039 CHKP_BOUND_TMP_NAME);
1040 chkp_bound_vars->put (ptr_var, bnd_var);
1043 return bnd_var;
1048 /* Register bounds BND for object PTR in global bounds table.
1049 A copy of bounds may be created for abnormal ssa names.
1050 Returns bounds to use for PTR. */
1051 static tree
1052 chkp_maybe_copy_and_register_bounds (tree ptr, tree bnd)
1054 bool abnormal_ptr;
1056 if (!chkp_reg_bounds)
1057 return bnd;
1059 /* Do nothing if bounds are incomplete_bounds
1060 because it means bounds will be recomputed. */
1061 if (bnd == incomplete_bounds)
1062 return bnd;
1064 abnormal_ptr = (TREE_CODE (ptr) == SSA_NAME
1065 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1066 && gimple_code (SSA_NAME_DEF_STMT (ptr)) != GIMPLE_PHI);
1068 /* A single bounds value may be reused multiple times for
1069 different pointer values. It may cause coalescing issues
1070 for abnormal SSA names. To avoid it we create a bounds
1071 copy in case it is computed for abnormal SSA name.
1073 We also cannot reuse such created copies for other pointers */
1074 if (abnormal_ptr
1075 || bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
1077 tree bnd_var = NULL_TREE;
1079 if (abnormal_ptr)
1081 if (SSA_NAME_VAR (ptr))
1082 bnd_var = chkp_get_bounds_var (SSA_NAME_VAR (ptr));
1084 else
1085 bnd_var = chkp_get_tmp_var ();
1087 /* For abnormal copies we may just find original
1088 bounds and use them. */
1089 if (!abnormal_ptr && !SSA_NAME_IS_DEFAULT_DEF (bnd))
1091 gimple bnd_def = SSA_NAME_DEF_STMT (bnd);
1092 gcc_checking_assert (gimple_code (bnd_def) == GIMPLE_ASSIGN);
1093 bnd = gimple_assign_rhs1 (bnd_def);
1095 /* For undefined values we usually use none bounds
1096 value but in case of abnormal edge it may cause
1097 coalescing failures. Use default definition of
1098 bounds variable instead to avoid it. */
1099 else if (SSA_NAME_IS_DEFAULT_DEF (ptr)
1100 && TREE_CODE (SSA_NAME_VAR (ptr)) != PARM_DECL)
1102 bnd = get_or_create_ssa_default_def (cfun, bnd_var);
1104 if (dump_file && (dump_flags & TDF_DETAILS))
1106 fprintf (dump_file, "Using default def bounds ");
1107 print_generic_expr (dump_file, bnd, 0);
1108 fprintf (dump_file, " for abnormal default def SSA name ");
1109 print_generic_expr (dump_file, ptr, 0);
1110 fprintf (dump_file, "\n");
1113 else
1115 tree copy;
1116 gimple def = SSA_NAME_DEF_STMT (ptr);
1117 gimple assign;
1118 gimple_stmt_iterator gsi;
1120 if (bnd_var)
1121 copy = make_ssa_name (bnd_var, gimple_build_nop ());
1122 else
1123 copy = make_temp_ssa_name (pointer_bounds_type_node,
1124 gimple_build_nop (),
1125 CHKP_BOUND_TMP_NAME);
1126 assign = gimple_build_assign (copy, bnd);
1128 if (dump_file && (dump_flags & TDF_DETAILS))
1130 fprintf (dump_file, "Creating a copy of bounds ");
1131 print_generic_expr (dump_file, bnd, 0);
1132 fprintf (dump_file, " for abnormal SSA name ");
1133 print_generic_expr (dump_file, ptr, 0);
1134 fprintf (dump_file, "\n");
1137 if (gimple_code (def) == GIMPLE_NOP)
1139 gsi = gsi_last_bb (chkp_get_entry_block ());
1140 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
1141 gsi_insert_before (&gsi, assign, GSI_CONTINUE_LINKING);
1142 else
1143 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1145 else
1147 gimple bnd_def = SSA_NAME_DEF_STMT (bnd);
1148 /* Sometimes (e.g. when we load a pointer from a
1149 memory) bounds are produced later than a pointer.
1150 We need to insert bounds copy appropriately. */
1151 if (gimple_code (bnd_def) != GIMPLE_NOP
1152 && stmt_dominates_stmt_p (def, bnd_def))
1153 gsi = gsi_for_stmt (bnd_def);
1154 else
1155 gsi = gsi_for_stmt (def);
1156 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1159 bnd = copy;
1162 if (abnormal_ptr)
1163 bitmap_set_bit (chkp_abnormal_copies, SSA_NAME_VERSION (bnd));
1166 chkp_reg_bounds->put (ptr, bnd);
1168 if (dump_file && (dump_flags & TDF_DETAILS))
1170 fprintf (dump_file, "Regsitered bound ");
1171 print_generic_expr (dump_file, bnd, 0);
1172 fprintf (dump_file, " for pointer ");
1173 print_generic_expr (dump_file, ptr, 0);
1174 fprintf (dump_file, "\n");
1177 return bnd;
1180 /* Get bounds registered for object PTR in global bounds table. */
1181 static tree
1182 chkp_get_registered_bounds (tree ptr)
1184 tree *slot;
1186 if (!chkp_reg_bounds)
1187 return NULL_TREE;
1189 slot = chkp_reg_bounds->get (ptr);
1190 return slot ? *slot : NULL_TREE;
1193 /* Add bound retvals to return statement pointed by GSI. */
1195 static void
1196 chkp_add_bounds_to_ret_stmt (gimple_stmt_iterator *gsi)
1198 greturn *ret = as_a <greturn *> (gsi_stmt (*gsi));
1199 tree retval = gimple_return_retval (ret);
1200 tree ret_decl = DECL_RESULT (cfun->decl);
1201 tree bounds;
1203 if (!retval)
1204 return;
1206 if (BOUNDED_P (ret_decl))
1208 bounds = chkp_find_bounds (retval, gsi);
1209 bounds = chkp_maybe_copy_and_register_bounds (ret_decl, bounds);
1210 gimple_return_set_retbnd (ret, bounds);
1213 update_stmt (ret);
1216 /* Force OP to be suitable for using as an argument for call.
1217 New statements (if any) go to SEQ. */
1218 static tree
1219 chkp_force_gimple_call_op (tree op, gimple_seq *seq)
1221 gimple_seq stmts;
1222 gimple_stmt_iterator si;
1224 op = force_gimple_operand (unshare_expr (op), &stmts, true, NULL_TREE);
1226 for (si = gsi_start (stmts); !gsi_end_p (si); gsi_next (&si))
1227 chkp_mark_stmt (gsi_stmt (si));
1229 gimple_seq_add_seq (seq, stmts);
1231 return op;
1234 /* Generate lower bound check for memory access by ADDR.
1235 Check is inserted before the position pointed by ITER.
1236 DIRFLAG indicates whether memory access is load or store. */
1237 static void
1238 chkp_check_lower (tree addr, tree bounds,
1239 gimple_stmt_iterator iter,
1240 location_t location,
1241 tree dirflag)
1243 gimple_seq seq;
1244 gimple check;
1245 tree node;
1247 if (bounds == chkp_get_zero_bounds ())
1248 return;
1250 if (dirflag == integer_zero_node
1251 && !flag_chkp_check_read)
1252 return;
1254 if (dirflag == integer_one_node
1255 && !flag_chkp_check_write)
1256 return;
1258 seq = NULL;
1260 node = chkp_force_gimple_call_op (addr, &seq);
1262 check = gimple_build_call (chkp_checkl_fndecl, 2, node, bounds);
1263 chkp_mark_stmt (check);
1264 gimple_call_set_with_bounds (check, true);
1265 gimple_set_location (check, location);
1266 gimple_seq_add_stmt (&seq, check);
1268 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1270 if (dump_file && (dump_flags & TDF_DETAILS))
1272 gimple before = gsi_stmt (iter);
1273 fprintf (dump_file, "Generated lower bound check for statement ");
1274 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1275 fprintf (dump_file, " ");
1276 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1280 /* Generate upper bound check for memory access by ADDR.
1281 Check is inserted before the position pointed by ITER.
1282 DIRFLAG indicates whether memory access is load or store. */
1283 static void
1284 chkp_check_upper (tree addr, tree bounds,
1285 gimple_stmt_iterator iter,
1286 location_t location,
1287 tree dirflag)
1289 gimple_seq seq;
1290 gimple check;
1291 tree node;
1293 if (bounds == chkp_get_zero_bounds ())
1294 return;
1296 if (dirflag == integer_zero_node
1297 && !flag_chkp_check_read)
1298 return;
1300 if (dirflag == integer_one_node
1301 && !flag_chkp_check_write)
1302 return;
1304 seq = NULL;
1306 node = chkp_force_gimple_call_op (addr, &seq);
1308 check = gimple_build_call (chkp_checku_fndecl, 2, node, bounds);
1309 chkp_mark_stmt (check);
1310 gimple_call_set_with_bounds (check, true);
1311 gimple_set_location (check, location);
1312 gimple_seq_add_stmt (&seq, check);
1314 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1316 if (dump_file && (dump_flags & TDF_DETAILS))
1318 gimple before = gsi_stmt (iter);
1319 fprintf (dump_file, "Generated upper bound check for statement ");
1320 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1321 fprintf (dump_file, " ");
1322 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1326 /* Generate lower and upper bound checks for memory access
1327 to memory slot [FIRST, LAST] againsr BOUNDS. Checks
1328 are inserted before the position pointed by ITER.
1329 DIRFLAG indicates whether memory access is load or store. */
1330 void
1331 chkp_check_mem_access (tree first, tree last, tree bounds,
1332 gimple_stmt_iterator iter,
1333 location_t location,
1334 tree dirflag)
1336 chkp_check_lower (first, bounds, iter, location, dirflag);
1337 chkp_check_upper (last, bounds, iter, location, dirflag);
1340 /* Replace call to _bnd_chk_* pointed by GSI with
1341 bndcu and bndcl calls. DIRFLAG determines whether
1342 check is for read or write. */
1344 void
1345 chkp_replace_address_check_builtin (gimple_stmt_iterator *gsi,
1346 tree dirflag)
1348 gimple_stmt_iterator call_iter = *gsi;
1349 gimple call = gsi_stmt (*gsi);
1350 tree fndecl = gimple_call_fndecl (call);
1351 tree addr = gimple_call_arg (call, 0);
1352 tree bounds = chkp_find_bounds (addr, gsi);
1354 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1355 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1356 chkp_check_lower (addr, bounds, *gsi, gimple_location (call), dirflag);
1358 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS)
1359 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1361 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1363 tree size = gimple_call_arg (call, 1);
1364 addr = fold_build_pointer_plus (addr, size);
1365 addr = fold_build_pointer_plus_hwi (addr, -1);
1366 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1369 gsi_remove (&call_iter, true);
1372 /* Replace call to _bnd_get_ptr_* pointed by GSI with
1373 corresponding bounds extract call. */
1375 void
1376 chkp_replace_extract_builtin (gimple_stmt_iterator *gsi)
1378 gimple call = gsi_stmt (*gsi);
1379 tree fndecl = gimple_call_fndecl (call);
1380 tree addr = gimple_call_arg (call, 0);
1381 tree bounds = chkp_find_bounds (addr, gsi);
1382 gimple extract;
1384 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND)
1385 fndecl = chkp_extract_lower_fndecl;
1386 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND)
1387 fndecl = chkp_extract_upper_fndecl;
1388 else
1389 gcc_unreachable ();
1391 extract = gimple_build_call (fndecl, 1, bounds);
1392 gimple_call_set_lhs (extract, gimple_call_lhs (call));
1393 chkp_mark_stmt (extract);
1395 gsi_replace (gsi, extract, false);
1398 /* Return COMPONENT_REF accessing FIELD in OBJ. */
1399 static tree
1400 chkp_build_component_ref (tree obj, tree field)
1402 tree res;
1404 /* If object is TMR then we do not use component_ref but
1405 add offset instead. We need it to be able to get addr
1406 of the reasult later. */
1407 if (TREE_CODE (obj) == TARGET_MEM_REF)
1409 tree offs = TMR_OFFSET (obj);
1410 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1411 offs, DECL_FIELD_OFFSET (field));
1413 gcc_assert (offs);
1415 res = copy_node (obj);
1416 TREE_TYPE (res) = TREE_TYPE (field);
1417 TMR_OFFSET (res) = offs;
1419 else
1420 res = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL_TREE);
1422 return res;
1425 /* Return ARRAY_REF for array ARR and index IDX with
1426 specified element type ETYPE and element size ESIZE. */
1427 static tree
1428 chkp_build_array_ref (tree arr, tree etype, tree esize,
1429 unsigned HOST_WIDE_INT idx)
1431 tree index = build_int_cst (size_type_node, idx);
1432 tree res;
1434 /* If object is TMR then we do not use array_ref but
1435 add offset instead. We need it to be able to get addr
1436 of the reasult later. */
1437 if (TREE_CODE (arr) == TARGET_MEM_REF)
1439 tree offs = TMR_OFFSET (arr);
1441 esize = fold_binary_to_constant (MULT_EXPR, TREE_TYPE (esize),
1442 esize, index);
1443 gcc_assert(esize);
1445 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1446 offs, esize);
1447 gcc_assert (offs);
1449 res = copy_node (arr);
1450 TREE_TYPE (res) = etype;
1451 TMR_OFFSET (res) = offs;
1453 else
1454 res = build4 (ARRAY_REF, etype, arr, index, NULL_TREE, NULL_TREE);
1456 return res;
1459 /* Helper function for chkp_add_bounds_to_call_stmt.
1460 Fill ALL_BOUNDS output array with created bounds.
1462 OFFS is used for recursive calls and holds basic
1463 offset of TYPE in outer structure in bits.
1465 ITER points a position where bounds are searched.
1467 ALL_BOUNDS[i] is filled with elem bounds if there
1468 is a field in TYPE which has pointer type and offset
1469 equal to i * POINTER_SIZE in bits. */
1470 static void
1471 chkp_find_bounds_for_elem (tree elem, tree *all_bounds,
1472 HOST_WIDE_INT offs,
1473 gimple_stmt_iterator *iter)
1475 tree type = TREE_TYPE (elem);
1477 if (BOUNDED_TYPE_P (type))
1479 if (!all_bounds[offs / POINTER_SIZE])
1481 tree temp = make_temp_ssa_name (type, gimple_build_nop (), "");
1482 gimple assign = gimple_build_assign (temp, elem);
1483 gimple_stmt_iterator gsi;
1485 gsi_insert_before (iter, assign, GSI_SAME_STMT);
1486 gsi = gsi_for_stmt (assign);
1488 all_bounds[offs / POINTER_SIZE] = chkp_find_bounds (temp, &gsi);
1491 else if (RECORD_OR_UNION_TYPE_P (type))
1493 tree field;
1495 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1496 if (TREE_CODE (field) == FIELD_DECL)
1498 tree base = unshare_expr (elem);
1499 tree field_ref = chkp_build_component_ref (base, field);
1500 HOST_WIDE_INT field_offs
1501 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1502 if (DECL_FIELD_OFFSET (field))
1503 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1505 chkp_find_bounds_for_elem (field_ref, all_bounds,
1506 offs + field_offs, iter);
1509 else if (TREE_CODE (type) == ARRAY_TYPE)
1511 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1512 tree etype = TREE_TYPE (type);
1513 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1514 unsigned HOST_WIDE_INT cur;
1516 if (!maxval || integer_minus_onep (maxval))
1517 return;
1519 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1521 tree base = unshare_expr (elem);
1522 tree arr_elem = chkp_build_array_ref (base, etype,
1523 TYPE_SIZE (etype),
1524 cur);
1525 chkp_find_bounds_for_elem (arr_elem, all_bounds, offs + cur * esize,
1526 iter);
1531 /* Fill HAVE_BOUND output bitmap with information about
1532 bounds requred for object of type TYPE.
1534 OFFS is used for recursive calls and holds basic
1535 offset of TYPE in outer structure in bits.
1537 HAVE_BOUND[i] is set to 1 if there is a field
1538 in TYPE which has pointer type and offset
1539 equal to i * POINTER_SIZE - OFFS in bits. */
1540 void
1541 chkp_find_bound_slots_1 (const_tree type, bitmap have_bound,
1542 HOST_WIDE_INT offs)
1544 if (BOUNDED_TYPE_P (type))
1545 bitmap_set_bit (have_bound, offs / POINTER_SIZE);
1546 else if (RECORD_OR_UNION_TYPE_P (type))
1548 tree field;
1550 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1551 if (TREE_CODE (field) == FIELD_DECL)
1553 HOST_WIDE_INT field_offs
1554 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1555 if (DECL_FIELD_OFFSET (field))
1556 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1557 chkp_find_bound_slots_1 (TREE_TYPE (field), have_bound,
1558 offs + field_offs);
1561 else if (TREE_CODE (type) == ARRAY_TYPE)
1563 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1564 tree etype = TREE_TYPE (type);
1565 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1566 unsigned HOST_WIDE_INT cur;
1568 if (!maxval
1569 || TREE_CODE (maxval) != INTEGER_CST
1570 || integer_minus_onep (maxval))
1571 return;
1573 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1574 chkp_find_bound_slots_1 (etype, have_bound, offs + cur * esize);
1578 /* Fill bitmap RES with information about bounds for
1579 type TYPE. See chkp_find_bound_slots_1 for more
1580 details. */
1581 void
1582 chkp_find_bound_slots (const_tree type, bitmap res)
1584 bitmap_clear (res);
1585 chkp_find_bound_slots_1 (type, res, 0);
1588 /* Return 1 if call to FNDECL should be instrumented
1589 and 0 otherwise. */
1591 static bool
1592 chkp_instrument_normal_builtin (tree fndecl)
1594 switch (DECL_FUNCTION_CODE (fndecl))
1596 case BUILT_IN_STRLEN:
1597 case BUILT_IN_STRCPY:
1598 case BUILT_IN_STRNCPY:
1599 case BUILT_IN_STPCPY:
1600 case BUILT_IN_STPNCPY:
1601 case BUILT_IN_STRCAT:
1602 case BUILT_IN_STRNCAT:
1603 case BUILT_IN_MEMCPY:
1604 case BUILT_IN_MEMPCPY:
1605 case BUILT_IN_MEMSET:
1606 case BUILT_IN_MEMMOVE:
1607 case BUILT_IN_BZERO:
1608 case BUILT_IN_STRCMP:
1609 case BUILT_IN_STRNCMP:
1610 case BUILT_IN_BCMP:
1611 case BUILT_IN_MEMCMP:
1612 case BUILT_IN_MEMCPY_CHK:
1613 case BUILT_IN_MEMPCPY_CHK:
1614 case BUILT_IN_MEMMOVE_CHK:
1615 case BUILT_IN_MEMSET_CHK:
1616 case BUILT_IN_STRCPY_CHK:
1617 case BUILT_IN_STRNCPY_CHK:
1618 case BUILT_IN_STPCPY_CHK:
1619 case BUILT_IN_STPNCPY_CHK:
1620 case BUILT_IN_STRCAT_CHK:
1621 case BUILT_IN_STRNCAT_CHK:
1622 case BUILT_IN_MALLOC:
1623 case BUILT_IN_CALLOC:
1624 case BUILT_IN_REALLOC:
1625 return 1;
1627 default:
1628 return 0;
1632 /* Add bound arguments to call statement pointed by GSI.
1633 Also performs a replacement of user checker builtins calls
1634 with internal ones. */
1636 static void
1637 chkp_add_bounds_to_call_stmt (gimple_stmt_iterator *gsi)
1639 gcall *call = as_a <gcall *> (gsi_stmt (*gsi));
1640 unsigned arg_no = 0;
1641 tree fndecl = gimple_call_fndecl (call);
1642 tree fntype;
1643 tree first_formal_arg;
1644 tree arg;
1645 bool use_fntype = false;
1646 tree op;
1647 ssa_op_iter iter;
1648 gcall *new_call;
1650 /* Do nothing for internal functions. */
1651 if (gimple_call_internal_p (call))
1652 return;
1654 fntype = TREE_TYPE (TREE_TYPE (gimple_call_fn (call)));
1656 /* Do nothing if back-end builtin is called. */
1657 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
1658 return;
1660 /* Do nothing for some middle-end builtins. */
1661 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1662 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_OBJECT_SIZE)
1663 return;
1665 /* Do nothing for calls to legacy functions. */
1666 if (fndecl
1667 && lookup_attribute ("bnd_legacy", DECL_ATTRIBUTES (fndecl)))
1668 return;
1670 /* Ignore CHKP_INIT_PTR_BOUNDS, CHKP_NULL_PTR_BOUNDS
1671 and CHKP_COPY_PTR_BOUNDS. */
1672 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1673 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS
1674 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS
1675 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS
1676 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS))
1677 return;
1679 /* Check user builtins are replaced with checks. */
1680 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1681 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1682 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
1683 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS))
1685 chkp_replace_address_check_builtin (gsi, integer_minus_one_node);
1686 return;
1689 /* Check user builtins are replaced with bound extract. */
1690 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1691 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND
1692 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND))
1694 chkp_replace_extract_builtin (gsi);
1695 return;
1698 /* BUILT_IN_CHKP_NARROW_PTR_BOUNDS call is replaced with
1699 target narrow bounds call. */
1700 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1701 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
1703 tree arg = gimple_call_arg (call, 1);
1704 tree bounds = chkp_find_bounds (arg, gsi);
1706 gimple_call_set_fndecl (call, chkp_narrow_bounds_fndecl);
1707 gimple_call_set_arg (call, 1, bounds);
1708 update_stmt (call);
1710 return;
1713 /* BUILT_IN_CHKP_STORE_PTR_BOUNDS call is replaced with
1714 bndstx call. */
1715 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1716 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_STORE_PTR_BOUNDS)
1718 tree addr = gimple_call_arg (call, 0);
1719 tree ptr = gimple_call_arg (call, 1);
1720 tree bounds = chkp_find_bounds (ptr, gsi);
1721 gimple_stmt_iterator iter = gsi_for_stmt (call);
1723 chkp_build_bndstx (addr, ptr, bounds, gsi);
1724 gsi_remove (&iter, true);
1726 return;
1729 if (!flag_chkp_instrument_calls)
1730 return;
1732 /* We instrument only some subset of builtins. We also instrument
1733 builtin calls to be inlined. */
1734 if (fndecl
1735 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1736 && !chkp_instrument_normal_builtin (fndecl))
1738 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
1739 return;
1741 struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
1742 if (!clone
1743 || !gimple_has_body_p (clone->decl))
1744 return;
1747 /* If function decl is available then use it for
1748 formal arguments list. Otherwise use function type. */
1749 if (fndecl && DECL_ARGUMENTS (fndecl))
1750 first_formal_arg = DECL_ARGUMENTS (fndecl);
1751 else
1753 first_formal_arg = TYPE_ARG_TYPES (fntype);
1754 use_fntype = true;
1757 /* Fill vector of new call args. */
1758 vec<tree> new_args = vNULL;
1759 new_args.create (gimple_call_num_args (call));
1760 arg = first_formal_arg;
1761 for (arg_no = 0; arg_no < gimple_call_num_args (call); arg_no++)
1763 tree call_arg = gimple_call_arg (call, arg_no);
1764 tree type;
1766 /* Get arg type using formal argument description
1767 or actual argument type. */
1768 if (arg)
1769 if (use_fntype)
1770 if (TREE_VALUE (arg) != void_type_node)
1772 type = TREE_VALUE (arg);
1773 arg = TREE_CHAIN (arg);
1775 else
1776 type = TREE_TYPE (call_arg);
1777 else
1779 type = TREE_TYPE (arg);
1780 arg = TREE_CHAIN (arg);
1782 else
1783 type = TREE_TYPE (call_arg);
1785 new_args.safe_push (call_arg);
1787 if (BOUNDED_TYPE_P (type)
1788 || pass_by_reference (NULL, TYPE_MODE (type), type, true))
1789 new_args.safe_push (chkp_find_bounds (call_arg, gsi));
1790 else if (chkp_type_has_pointer (type))
1792 HOST_WIDE_INT max_bounds
1793 = TREE_INT_CST_LOW (TYPE_SIZE (type)) / POINTER_SIZE;
1794 tree *all_bounds = (tree *)xmalloc (sizeof (tree) * max_bounds);
1795 HOST_WIDE_INT bnd_no;
1797 memset (all_bounds, 0, sizeof (tree) * max_bounds);
1799 chkp_find_bounds_for_elem (call_arg, all_bounds, 0, gsi);
1801 for (bnd_no = 0; bnd_no < max_bounds; bnd_no++)
1802 if (all_bounds[bnd_no])
1803 new_args.safe_push (all_bounds[bnd_no]);
1805 free (all_bounds);
1809 if (new_args.length () == gimple_call_num_args (call))
1810 new_call = call;
1811 else
1813 new_call = gimple_build_call_vec (gimple_op (call, 1), new_args);
1814 gimple_call_set_lhs (new_call, gimple_call_lhs (call));
1815 gimple_call_copy_flags (new_call, call);
1817 new_args.release ();
1819 /* For direct calls fndecl is replaced with instrumented version. */
1820 if (fndecl)
1822 tree new_decl = chkp_maybe_create_clone (fndecl)->decl;
1823 gimple_call_set_fndecl (new_call, new_decl);
1824 gimple_call_set_fntype (new_call, TREE_TYPE (new_decl));
1826 /* For indirect call we should fix function pointer type if
1827 pass some bounds. */
1828 else if (new_call != call)
1830 tree type = gimple_call_fntype (call);
1831 type = chkp_copy_function_type_adding_bounds (type);
1832 gimple_call_set_fntype (new_call, type);
1835 /* replace old call statement with the new one. */
1836 if (call != new_call)
1838 FOR_EACH_SSA_TREE_OPERAND (op, call, iter, SSA_OP_ALL_DEFS)
1840 SSA_NAME_DEF_STMT (op) = new_call;
1842 gsi_replace (gsi, new_call, true);
1844 else
1845 update_stmt (new_call);
1847 gimple_call_set_with_bounds (new_call, true);
1850 /* Return constant static bounds var with specified LB and UB
1851 if such var exists in varpool. Return NULL otherwise. */
1852 static tree
1853 chkp_find_const_bounds_var (HOST_WIDE_INT lb,
1854 HOST_WIDE_INT ub)
1856 tree val = targetm.chkp_make_bounds_constant (lb, ub);
1857 struct varpool_node *node;
1859 /* We expect bounds constant is represented as a complex value
1860 of two pointer sized integers. */
1861 gcc_assert (TREE_CODE (val) == COMPLEX_CST);
1863 FOR_EACH_VARIABLE (node)
1864 if (POINTER_BOUNDS_P (node->decl)
1865 && TREE_READONLY (node->decl)
1866 && DECL_INITIAL (node->decl)
1867 && TREE_CODE (DECL_INITIAL (node->decl)) == COMPLEX_CST
1868 && tree_int_cst_equal (TREE_REALPART (DECL_INITIAL (node->decl)),
1869 TREE_REALPART (val))
1870 && tree_int_cst_equal (TREE_IMAGPART (DECL_INITIAL (node->decl)),
1871 TREE_IMAGPART (val)))
1872 return node->decl;
1874 return NULL;
1877 /* Return constant static bounds var with specified bounds LB and UB.
1878 If such var does not exists then new var is created with specified NAME. */
1879 static tree
1880 chkp_make_static_const_bounds (HOST_WIDE_INT lb,
1881 HOST_WIDE_INT ub,
1882 const char *name)
1884 tree var;
1886 /* With LTO we may have constant bounds already in varpool.
1887 Try to find it. */
1888 var = chkp_find_const_bounds_var (lb, ub);
1890 if (var)
1891 return var;
1893 var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
1894 get_identifier (name), pointer_bounds_type_node);
1896 TREE_PUBLIC (var) = 1;
1897 TREE_USED (var) = 1;
1898 TREE_READONLY (var) = 1;
1899 TREE_STATIC (var) = 1;
1900 TREE_ADDRESSABLE (var) = 0;
1901 DECL_ARTIFICIAL (var) = 1;
1902 DECL_READ_P (var) = 1;
1903 /* We may use this symbol during ctors generation in chkp_finish_file
1904 when all symbols are emitted. Force output to avoid undefined
1905 symbols in ctors. */
1906 if (!in_lto_p)
1908 DECL_INITIAL (var) = targetm.chkp_make_bounds_constant (lb, ub);
1909 DECL_COMDAT (var) = 1;
1910 varpool_node::get_create (var)->set_comdat_group (DECL_ASSEMBLER_NAME (var));
1911 varpool_node::get_create (var)->force_output = 1;
1913 else
1914 DECL_EXTERNAL (var) = 1;
1915 varpool_node::finalize_decl (var);
1917 return var;
1920 /* Generate code to make bounds with specified lower bound LB and SIZE.
1921 if AFTER is 1 then code is inserted after position pointed by ITER
1922 otherwise code is inserted before position pointed by ITER.
1923 If ITER is NULL then code is added to entry block. */
1924 static tree
1925 chkp_make_bounds (tree lb, tree size, gimple_stmt_iterator *iter, bool after)
1927 gimple_seq seq;
1928 gimple_stmt_iterator gsi;
1929 gimple stmt;
1930 tree bounds;
1932 if (iter)
1933 gsi = *iter;
1934 else
1935 gsi = gsi_start_bb (chkp_get_entry_block ());
1937 seq = NULL;
1939 lb = chkp_force_gimple_call_op (lb, &seq);
1940 size = chkp_force_gimple_call_op (size, &seq);
1942 stmt = gimple_build_call (chkp_bndmk_fndecl, 2, lb, size);
1943 chkp_mark_stmt (stmt);
1945 bounds = chkp_get_tmp_reg (stmt);
1946 gimple_call_set_lhs (stmt, bounds);
1948 gimple_seq_add_stmt (&seq, stmt);
1950 if (iter && after)
1951 gsi_insert_seq_after (&gsi, seq, GSI_SAME_STMT);
1952 else
1953 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
1955 if (dump_file && (dump_flags & TDF_DETAILS))
1957 fprintf (dump_file, "Made bounds: ");
1958 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
1959 if (iter)
1961 fprintf (dump_file, " inserted before statement: ");
1962 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0, TDF_VOPS|TDF_MEMSYMS);
1964 else
1965 fprintf (dump_file, " at function entry\n");
1968 /* update_stmt (stmt); */
1970 return bounds;
1973 /* Return var holding zero bounds. */
1974 tree
1975 chkp_get_zero_bounds_var (void)
1977 if (!chkp_zero_bounds_var)
1979 tree id = get_identifier (CHKP_ZERO_BOUNDS_VAR_NAME);
1980 symtab_node *node = symtab_node::get_for_asmname (id);
1981 if (node)
1982 chkp_zero_bounds_var = node->decl;
1985 if (!chkp_zero_bounds_var)
1986 chkp_zero_bounds_var
1987 = chkp_make_static_const_bounds (0, -1,
1988 CHKP_ZERO_BOUNDS_VAR_NAME);
1989 return chkp_zero_bounds_var;
1992 /* Return var holding none bounds. */
1993 tree
1994 chkp_get_none_bounds_var (void)
1996 if (!chkp_none_bounds_var)
1998 tree id = get_identifier (CHKP_NONE_BOUNDS_VAR_NAME);
1999 symtab_node *node = symtab_node::get_for_asmname (id);
2000 if (node)
2001 chkp_none_bounds_var = node->decl;
2004 if (!chkp_none_bounds_var)
2005 chkp_none_bounds_var
2006 = chkp_make_static_const_bounds (-1, 0,
2007 CHKP_NONE_BOUNDS_VAR_NAME);
2008 return chkp_none_bounds_var;
2011 /* Return SSA_NAME used to represent zero bounds. */
2012 static tree
2013 chkp_get_zero_bounds (void)
2015 if (zero_bounds)
2016 return zero_bounds;
2018 if (dump_file && (dump_flags & TDF_DETAILS))
2019 fprintf (dump_file, "Creating zero bounds...");
2021 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2022 || flag_chkp_use_static_const_bounds > 0)
2024 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2025 gimple stmt;
2027 zero_bounds = chkp_get_tmp_reg (gimple_build_nop ());
2028 stmt = gimple_build_assign (zero_bounds, chkp_get_zero_bounds_var ());
2029 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2031 else
2032 zero_bounds = chkp_make_bounds (integer_zero_node,
2033 integer_zero_node,
2034 NULL,
2035 false);
2037 return zero_bounds;
2040 /* Return SSA_NAME used to represent none bounds. */
2041 static tree
2042 chkp_get_none_bounds (void)
2044 if (none_bounds)
2045 return none_bounds;
2047 if (dump_file && (dump_flags & TDF_DETAILS))
2048 fprintf (dump_file, "Creating none bounds...");
2051 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2052 || flag_chkp_use_static_const_bounds > 0)
2054 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2055 gimple stmt;
2057 none_bounds = chkp_get_tmp_reg (gimple_build_nop ());
2058 stmt = gimple_build_assign (none_bounds, chkp_get_none_bounds_var ());
2059 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2061 else
2062 none_bounds = chkp_make_bounds (integer_minus_one_node,
2063 build_int_cst (size_type_node, 2),
2064 NULL,
2065 false);
2067 return none_bounds;
2070 /* Return bounds to be used as a result of operation which
2071 should not create poiunter (e.g. MULT_EXPR). */
2072 static tree
2073 chkp_get_invalid_op_bounds (void)
2075 return chkp_get_zero_bounds ();
2078 /* Return bounds to be used for loads of non-pointer values. */
2079 static tree
2080 chkp_get_nonpointer_load_bounds (void)
2082 return chkp_get_zero_bounds ();
2085 /* Return 1 if may use bndret call to get bounds for pointer
2086 returned by CALL. */
2087 static bool
2088 chkp_call_returns_bounds_p (gcall *call)
2090 if (gimple_call_internal_p (call))
2091 return false;
2093 tree fndecl = gimple_call_fndecl (call);
2095 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
2096 return false;
2098 if (fndecl
2099 && lookup_attribute ("bnd_legacy", DECL_ATTRIBUTES (fndecl)))
2100 return false;
2102 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2104 if (chkp_instrument_normal_builtin (fndecl))
2105 return true;
2107 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
2108 return false;
2110 struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
2111 return (clone && gimple_has_body_p (clone->decl));
2114 return true;
2117 /* Build bounds returned by CALL. */
2118 static tree
2119 chkp_build_returned_bound (gcall *call)
2121 gimple_stmt_iterator gsi;
2122 tree bounds;
2123 gimple stmt;
2124 tree fndecl = gimple_call_fndecl (call);
2126 /* To avoid fixing alloca expands in targets we handle
2127 it separately. */
2128 if (fndecl
2129 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2130 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
2131 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2133 tree size = gimple_call_arg (call, 0);
2134 tree lb = gimple_call_lhs (call);
2135 gimple_stmt_iterator iter = gsi_for_stmt (call);
2136 bounds = chkp_make_bounds (lb, size, &iter, true);
2138 /* We know bounds returned by set_bounds builtin call. */
2139 else if (fndecl
2140 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2141 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS)
2143 tree lb = gimple_call_arg (call, 0);
2144 tree size = gimple_call_arg (call, 1);
2145 gimple_stmt_iterator iter = gsi_for_stmt (call);
2146 bounds = chkp_make_bounds (lb, size, &iter, true);
2148 /* Detect bounds initialization calls. */
2149 else if (fndecl
2150 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2151 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS)
2152 bounds = chkp_get_zero_bounds ();
2153 /* Detect bounds nullification calls. */
2154 else if (fndecl
2155 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2156 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS)
2157 bounds = chkp_get_none_bounds ();
2158 /* Detect bounds copy calls. */
2159 else if (fndecl
2160 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2161 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
2163 gimple_stmt_iterator iter = gsi_for_stmt (call);
2164 bounds = chkp_find_bounds (gimple_call_arg (call, 1), &iter);
2166 /* Do not use retbnd when returned bounds are equal to some
2167 of passed bounds. */
2168 else if (gimple_call_return_flags (call) & ERF_RETURNS_ARG)
2170 gimple_stmt_iterator iter = gsi_for_stmt (call);
2171 unsigned int retarg = 0, argno;
2172 if (gimple_call_return_flags (call) & ERF_RETURNS_ARG)
2173 retarg = gimple_call_return_flags (call) & ERF_RETURN_ARG_MASK;
2174 if (gimple_call_with_bounds_p (call))
2176 for (argno = 0; argno < gimple_call_num_args (call); argno++)
2177 if (!POINTER_BOUNDS_P (gimple_call_arg (call, argno)))
2179 if (retarg)
2180 retarg--;
2181 else
2182 break;
2185 else
2186 argno = retarg;
2188 bounds = chkp_find_bounds (gimple_call_arg (call, argno), &iter);
2190 else if (chkp_call_returns_bounds_p (call))
2192 gcc_assert (TREE_CODE (gimple_call_lhs (call)) == SSA_NAME);
2194 /* In general case build checker builtin call to
2195 obtain returned bounds. */
2196 stmt = gimple_build_call (chkp_ret_bnd_fndecl, 1,
2197 gimple_call_lhs (call));
2198 chkp_mark_stmt (stmt);
2200 gsi = gsi_for_stmt (call);
2201 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
2203 bounds = chkp_get_tmp_reg (stmt);
2204 gimple_call_set_lhs (stmt, bounds);
2206 update_stmt (stmt);
2208 else
2209 bounds = chkp_get_zero_bounds ();
2211 if (dump_file && (dump_flags & TDF_DETAILS))
2213 fprintf (dump_file, "Built returned bounds (");
2214 print_generic_expr (dump_file, bounds, 0);
2215 fprintf (dump_file, ") for call: ");
2216 print_gimple_stmt (dump_file, call, 0, TDF_VOPS|TDF_MEMSYMS);
2219 bounds = chkp_maybe_copy_and_register_bounds (gimple_call_lhs (call), bounds);
2221 return bounds;
2224 /* Return bounds used as returned by call
2225 which produced SSA name VAL. */
2226 gcall *
2227 chkp_retbnd_call_by_val (tree val)
2229 if (TREE_CODE (val) != SSA_NAME)
2230 return NULL;
2232 gcc_assert (gimple_code (SSA_NAME_DEF_STMT (val)) == GIMPLE_CALL);
2234 imm_use_iterator use_iter;
2235 use_operand_p use_p;
2236 FOR_EACH_IMM_USE_FAST (use_p, use_iter, val)
2237 if (gimple_code (USE_STMT (use_p)) == GIMPLE_CALL
2238 && gimple_call_fndecl (USE_STMT (use_p)) == chkp_ret_bnd_fndecl)
2239 return as_a <gcall *> (USE_STMT (use_p));
2241 return NULL;
2244 /* Check the next parameter for the given PARM is bounds
2245 and return it's default SSA_NAME (create if required). */
2246 static tree
2247 chkp_get_next_bounds_parm (tree parm)
2249 tree bounds = TREE_CHAIN (parm);
2250 gcc_assert (POINTER_BOUNDS_P (bounds));
2251 bounds = ssa_default_def (cfun, bounds);
2252 if (!bounds)
2254 bounds = make_ssa_name (TREE_CHAIN (parm), gimple_build_nop ());
2255 set_ssa_default_def (cfun, TREE_CHAIN (parm), bounds);
2257 return bounds;
2260 /* Return bounds to be used for input argument PARM. */
2261 static tree
2262 chkp_get_bound_for_parm (tree parm)
2264 tree decl = SSA_NAME_VAR (parm);
2265 tree bounds;
2267 gcc_assert (TREE_CODE (decl) == PARM_DECL);
2269 bounds = chkp_get_registered_bounds (parm);
2271 if (!bounds)
2272 bounds = chkp_get_registered_bounds (decl);
2274 if (!bounds)
2276 tree orig_decl = cgraph_node::get (cfun->decl)->orig_decl;
2278 /* For static chain param we return zero bounds
2279 because currently we do not check dereferences
2280 of this pointer. */
2281 if (cfun->static_chain_decl == decl)
2282 bounds = chkp_get_zero_bounds ();
2283 /* If non instrumented runtime is used then it may be useful
2284 to use zero bounds for input arguments of main
2285 function. */
2286 else if (flag_chkp_zero_input_bounds_for_main
2287 && strcmp (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (orig_decl)),
2288 "main") == 0)
2289 bounds = chkp_get_zero_bounds ();
2290 else if (BOUNDED_P (parm))
2292 bounds = chkp_get_next_bounds_parm (decl);
2293 bounds = chkp_maybe_copy_and_register_bounds (decl, bounds);
2295 if (dump_file && (dump_flags & TDF_DETAILS))
2297 fprintf (dump_file, "Built arg bounds (");
2298 print_generic_expr (dump_file, bounds, 0);
2299 fprintf (dump_file, ") for arg: ");
2300 print_node (dump_file, "", decl, 0);
2303 else
2304 bounds = chkp_get_zero_bounds ();
2307 if (!chkp_get_registered_bounds (parm))
2308 bounds = chkp_maybe_copy_and_register_bounds (parm, bounds);
2310 if (dump_file && (dump_flags & TDF_DETAILS))
2312 fprintf (dump_file, "Using bounds ");
2313 print_generic_expr (dump_file, bounds, 0);
2314 fprintf (dump_file, " for parm ");
2315 print_generic_expr (dump_file, parm, 0);
2316 fprintf (dump_file, " of type ");
2317 print_generic_expr (dump_file, TREE_TYPE (parm), 0);
2318 fprintf (dump_file, ".\n");
2321 return bounds;
2324 /* Build and return CALL_EXPR for bndstx builtin with specified
2325 arguments. */
2326 tree
2327 chkp_build_bndldx_call (tree addr, tree ptr)
2329 tree fn = build1 (ADDR_EXPR,
2330 build_pointer_type (TREE_TYPE (chkp_bndldx_fndecl)),
2331 chkp_bndldx_fndecl);
2332 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndldx_fndecl)),
2333 fn, 2, addr, ptr);
2334 CALL_WITH_BOUNDS_P (call) = true;
2335 return call;
2338 /* Insert code to load bounds for PTR located by ADDR.
2339 Code is inserted after position pointed by GSI.
2340 Loaded bounds are returned. */
2341 static tree
2342 chkp_build_bndldx (tree addr, tree ptr, gimple_stmt_iterator *gsi)
2344 gimple_seq seq;
2345 gimple stmt;
2346 tree bounds;
2348 seq = NULL;
2350 addr = chkp_force_gimple_call_op (addr, &seq);
2351 ptr = chkp_force_gimple_call_op (ptr, &seq);
2353 stmt = gimple_build_call (chkp_bndldx_fndecl, 2, addr, ptr);
2354 chkp_mark_stmt (stmt);
2355 bounds = chkp_get_tmp_reg (stmt);
2356 gimple_call_set_lhs (stmt, bounds);
2358 gimple_seq_add_stmt (&seq, stmt);
2360 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2362 if (dump_file && (dump_flags & TDF_DETAILS))
2364 fprintf (dump_file, "Generated bndldx for pointer ");
2365 print_generic_expr (dump_file, ptr, 0);
2366 fprintf (dump_file, ": ");
2367 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2370 return bounds;
2373 /* Build and return CALL_EXPR for bndstx builtin with specified
2374 arguments. */
2375 tree
2376 chkp_build_bndstx_call (tree addr, tree ptr, tree bounds)
2378 tree fn = build1 (ADDR_EXPR,
2379 build_pointer_type (TREE_TYPE (chkp_bndstx_fndecl)),
2380 chkp_bndstx_fndecl);
2381 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndstx_fndecl)),
2382 fn, 3, ptr, bounds, addr);
2383 CALL_WITH_BOUNDS_P (call) = true;
2384 return call;
2387 /* Insert code to store BOUNDS for PTR stored by ADDR.
2388 New statements are inserted after position pointed
2389 by GSI. */
2390 void
2391 chkp_build_bndstx (tree addr, tree ptr, tree bounds,
2392 gimple_stmt_iterator *gsi)
2394 gimple_seq seq;
2395 gimple stmt;
2397 seq = NULL;
2399 addr = chkp_force_gimple_call_op (addr, &seq);
2400 ptr = chkp_force_gimple_call_op (ptr, &seq);
2402 stmt = gimple_build_call (chkp_bndstx_fndecl, 3, ptr, bounds, addr);
2403 chkp_mark_stmt (stmt);
2404 gimple_call_set_with_bounds (stmt, true);
2406 gimple_seq_add_stmt (&seq, stmt);
2408 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2410 if (dump_file && (dump_flags & TDF_DETAILS))
2412 fprintf (dump_file, "Generated bndstx for pointer store ");
2413 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_VOPS|TDF_MEMSYMS);
2414 print_gimple_stmt (dump_file, stmt, 2, TDF_VOPS|TDF_MEMSYMS);
2418 /* Compute bounds for pointer NODE which was assigned in
2419 assignment statement ASSIGN. Return computed bounds. */
2420 static tree
2421 chkp_compute_bounds_for_assignment (tree node, gimple assign)
2423 enum tree_code rhs_code = gimple_assign_rhs_code (assign);
2424 tree rhs1 = gimple_assign_rhs1 (assign);
2425 tree bounds = NULL_TREE;
2426 gimple_stmt_iterator iter = gsi_for_stmt (assign);
2428 if (dump_file && (dump_flags & TDF_DETAILS))
2430 fprintf (dump_file, "Computing bounds for assignment: ");
2431 print_gimple_stmt (dump_file, assign, 0, TDF_VOPS|TDF_MEMSYMS);
2434 switch (rhs_code)
2436 case MEM_REF:
2437 case TARGET_MEM_REF:
2438 case COMPONENT_REF:
2439 case ARRAY_REF:
2440 /* We need to load bounds from the bounds table. */
2441 bounds = chkp_find_bounds_loaded (node, rhs1, &iter);
2442 break;
2444 case VAR_DECL:
2445 case SSA_NAME:
2446 case ADDR_EXPR:
2447 case POINTER_PLUS_EXPR:
2448 case NOP_EXPR:
2449 case CONVERT_EXPR:
2450 case INTEGER_CST:
2451 /* Bounds are just propagated from RHS. */
2452 bounds = chkp_find_bounds (rhs1, &iter);
2453 break;
2455 case VIEW_CONVERT_EXPR:
2456 /* Bounds are just propagated from RHS. */
2457 bounds = chkp_find_bounds (TREE_OPERAND (rhs1, 0), &iter);
2458 break;
2460 case PARM_DECL:
2461 if (BOUNDED_P (rhs1))
2463 /* We need to load bounds from the bounds table. */
2464 bounds = chkp_build_bndldx (chkp_build_addr_expr (rhs1),
2465 node, &iter);
2466 TREE_ADDRESSABLE (rhs1) = 1;
2468 else
2469 bounds = chkp_get_nonpointer_load_bounds ();
2470 break;
2472 case MINUS_EXPR:
2473 case PLUS_EXPR:
2474 case BIT_AND_EXPR:
2475 case BIT_IOR_EXPR:
2476 case BIT_XOR_EXPR:
2478 tree rhs2 = gimple_assign_rhs2 (assign);
2479 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2480 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2482 /* First we try to check types of operands. If it
2483 does not help then look at bound values.
2485 If some bounds are incomplete and other are
2486 not proven to be valid (i.e. also incomplete
2487 or invalid because value is not pointer) then
2488 resulting value is incomplete and will be
2489 recomputed later in chkp_finish_incomplete_bounds. */
2490 if (BOUNDED_P (rhs1)
2491 && !BOUNDED_P (rhs2))
2492 bounds = bnd1;
2493 else if (BOUNDED_P (rhs2)
2494 && !BOUNDED_P (rhs1)
2495 && rhs_code != MINUS_EXPR)
2496 bounds = bnd2;
2497 else if (chkp_incomplete_bounds (bnd1))
2498 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR
2499 && !chkp_incomplete_bounds (bnd2))
2500 bounds = bnd2;
2501 else
2502 bounds = incomplete_bounds;
2503 else if (chkp_incomplete_bounds (bnd2))
2504 if (chkp_valid_bounds (bnd1)
2505 && !chkp_incomplete_bounds (bnd1))
2506 bounds = bnd1;
2507 else
2508 bounds = incomplete_bounds;
2509 else if (!chkp_valid_bounds (bnd1))
2510 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR)
2511 bounds = bnd2;
2512 else if (bnd2 == chkp_get_zero_bounds ())
2513 bounds = bnd2;
2514 else
2515 bounds = bnd1;
2516 else if (!chkp_valid_bounds (bnd2))
2517 bounds = bnd1;
2518 else
2519 /* Seems both operands may have valid bounds
2520 (e.g. pointer minus pointer). In such case
2521 use default invalid op bounds. */
2522 bounds = chkp_get_invalid_op_bounds ();
2524 break;
2526 case BIT_NOT_EXPR:
2527 case NEGATE_EXPR:
2528 case LSHIFT_EXPR:
2529 case RSHIFT_EXPR:
2530 case LROTATE_EXPR:
2531 case RROTATE_EXPR:
2532 case EQ_EXPR:
2533 case NE_EXPR:
2534 case LT_EXPR:
2535 case LE_EXPR:
2536 case GT_EXPR:
2537 case GE_EXPR:
2538 case MULT_EXPR:
2539 case RDIV_EXPR:
2540 case TRUNC_DIV_EXPR:
2541 case FLOOR_DIV_EXPR:
2542 case CEIL_DIV_EXPR:
2543 case ROUND_DIV_EXPR:
2544 case TRUNC_MOD_EXPR:
2545 case FLOOR_MOD_EXPR:
2546 case CEIL_MOD_EXPR:
2547 case ROUND_MOD_EXPR:
2548 case EXACT_DIV_EXPR:
2549 case FIX_TRUNC_EXPR:
2550 case FLOAT_EXPR:
2551 case REALPART_EXPR:
2552 case IMAGPART_EXPR:
2553 /* No valid bounds may be produced by these exprs. */
2554 bounds = chkp_get_invalid_op_bounds ();
2555 break;
2557 case COND_EXPR:
2559 tree val1 = gimple_assign_rhs2 (assign);
2560 tree val2 = gimple_assign_rhs3 (assign);
2561 tree bnd1 = chkp_find_bounds (val1, &iter);
2562 tree bnd2 = chkp_find_bounds (val2, &iter);
2563 gimple stmt;
2565 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2566 bounds = incomplete_bounds;
2567 else if (bnd1 == bnd2)
2568 bounds = bnd1;
2569 else
2571 rhs1 = unshare_expr (rhs1);
2573 bounds = chkp_get_tmp_reg (assign);
2574 stmt = gimple_build_assign (bounds, COND_EXPR, rhs1, bnd1, bnd2);
2575 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2577 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2578 chkp_mark_invalid_bounds (bounds);
2581 break;
2583 case MAX_EXPR:
2584 case MIN_EXPR:
2586 tree rhs2 = gimple_assign_rhs2 (assign);
2587 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2588 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2590 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2591 bounds = incomplete_bounds;
2592 else if (bnd1 == bnd2)
2593 bounds = bnd1;
2594 else
2596 gimple stmt;
2597 tree cond = build2 (rhs_code == MAX_EXPR ? GT_EXPR : LT_EXPR,
2598 boolean_type_node, rhs1, rhs2);
2599 bounds = chkp_get_tmp_reg (assign);
2600 stmt = gimple_build_assign (bounds, COND_EXPR, cond, bnd1, bnd2);
2602 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2604 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2605 chkp_mark_invalid_bounds (bounds);
2608 break;
2610 default:
2611 bounds = chkp_get_zero_bounds ();
2612 warning (0, "pointer bounds were lost due to unexpected expression %s",
2613 get_tree_code_name (rhs_code));
2616 gcc_assert (bounds);
2618 if (node)
2619 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2621 return bounds;
2624 /* Compute bounds for ssa name NODE defined by DEF_STMT pointed by ITER.
2626 There are just few statement codes allowed: NOP (for default ssa names),
2627 ASSIGN, CALL, PHI, ASM.
2629 Return computed bounds. */
2630 static tree
2631 chkp_get_bounds_by_definition (tree node, gimple def_stmt,
2632 gphi_iterator *iter)
2634 tree var, bounds;
2635 enum gimple_code code = gimple_code (def_stmt);
2636 gphi *stmt;
2638 if (dump_file && (dump_flags & TDF_DETAILS))
2640 fprintf (dump_file, "Searching for bounds for node: ");
2641 print_generic_expr (dump_file, node, 0);
2643 fprintf (dump_file, " using its definition: ");
2644 print_gimple_stmt (dump_file, def_stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2647 switch (code)
2649 case GIMPLE_NOP:
2650 var = SSA_NAME_VAR (node);
2651 switch (TREE_CODE (var))
2653 case PARM_DECL:
2654 bounds = chkp_get_bound_for_parm (node);
2655 break;
2657 case VAR_DECL:
2658 /* For uninitialized pointers use none bounds. */
2659 bounds = chkp_get_none_bounds ();
2660 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2661 break;
2663 case RESULT_DECL:
2665 tree base_type;
2667 gcc_assert (TREE_CODE (TREE_TYPE (node)) == REFERENCE_TYPE);
2669 base_type = TREE_TYPE (TREE_TYPE (node));
2671 gcc_assert (TYPE_SIZE (base_type)
2672 && TREE_CODE (TYPE_SIZE (base_type)) == INTEGER_CST
2673 && tree_to_uhwi (TYPE_SIZE (base_type)) != 0);
2675 bounds = chkp_make_bounds (node, TYPE_SIZE_UNIT (base_type),
2676 NULL, false);
2677 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2679 break;
2681 default:
2682 if (dump_file && (dump_flags & TDF_DETAILS))
2684 fprintf (dump_file, "Unexpected var with no definition\n");
2685 print_generic_expr (dump_file, var, 0);
2687 internal_error ("chkp_get_bounds_by_definition: Unexpected var of type %s",
2688 get_tree_code_name (TREE_CODE (var)));
2690 break;
2692 case GIMPLE_ASSIGN:
2693 bounds = chkp_compute_bounds_for_assignment (node, def_stmt);
2694 break;
2696 case GIMPLE_CALL:
2697 bounds = chkp_build_returned_bound (as_a <gcall *> (def_stmt));
2698 break;
2700 case GIMPLE_PHI:
2701 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (node))
2702 if (SSA_NAME_VAR (node))
2703 var = chkp_get_bounds_var (SSA_NAME_VAR (node));
2704 else
2705 var = make_temp_ssa_name (pointer_bounds_type_node,
2706 gimple_build_nop (),
2707 CHKP_BOUND_TMP_NAME);
2708 else
2709 var = chkp_get_tmp_var ();
2710 stmt = create_phi_node (var, gimple_bb (def_stmt));
2711 bounds = gimple_phi_result (stmt);
2712 *iter = gsi_for_phi (stmt);
2714 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2716 /* Created bounds do not have all phi args computed and
2717 therefore we do not know if there is a valid source
2718 of bounds for that node. Therefore we mark bounds
2719 as incomplete and then recompute them when all phi
2720 args are computed. */
2721 chkp_register_incomplete_bounds (bounds, node);
2722 break;
2724 case GIMPLE_ASM:
2725 bounds = chkp_get_zero_bounds ();
2726 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2727 break;
2729 default:
2730 internal_error ("chkp_get_bounds_by_definition: Unexpected GIMPLE code %s",
2731 gimple_code_name[code]);
2734 return bounds;
2737 /* Return CALL_EXPR for bndmk with specified LOWER_BOUND and SIZE. */
2738 tree
2739 chkp_build_make_bounds_call (tree lower_bound, tree size)
2741 tree call = build1 (ADDR_EXPR,
2742 build_pointer_type (TREE_TYPE (chkp_bndmk_fndecl)),
2743 chkp_bndmk_fndecl);
2744 return build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndmk_fndecl)),
2745 call, 2, lower_bound, size);
2748 /* Create static bounds var of specfified OBJ which is
2749 is either VAR_DECL or string constant. */
2750 static tree
2751 chkp_make_static_bounds (tree obj)
2753 static int string_id = 1;
2754 static int var_id = 1;
2755 tree *slot;
2756 const char *var_name;
2757 char *bnd_var_name;
2758 tree bnd_var;
2760 /* First check if we already have required var. */
2761 if (chkp_static_var_bounds)
2763 /* For vars we use assembler name as a key in
2764 chkp_static_var_bounds map. It allows to
2765 avoid duplicating bound vars for decls
2766 sharing assembler name. */
2767 if (TREE_CODE (obj) == VAR_DECL)
2769 tree name = DECL_ASSEMBLER_NAME (obj);
2770 slot = chkp_static_var_bounds->get (name);
2771 if (slot)
2772 return *slot;
2774 else
2776 slot = chkp_static_var_bounds->get (obj);
2777 if (slot)
2778 return *slot;
2782 /* Build decl for bounds var. */
2783 if (TREE_CODE (obj) == VAR_DECL)
2785 if (DECL_IGNORED_P (obj))
2787 bnd_var_name = (char *) xmalloc (strlen (CHKP_VAR_BOUNDS_PREFIX) + 10);
2788 sprintf (bnd_var_name, "%s%d", CHKP_VAR_BOUNDS_PREFIX, var_id++);
2790 else
2792 var_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (obj));
2794 /* For hidden symbols we want to skip first '*' char. */
2795 if (*var_name == '*')
2796 var_name++;
2798 bnd_var_name = (char *) xmalloc (strlen (var_name)
2799 + strlen (CHKP_BOUNDS_OF_SYMBOL_PREFIX) + 1);
2800 strcpy (bnd_var_name, CHKP_BOUNDS_OF_SYMBOL_PREFIX);
2801 strcat (bnd_var_name, var_name);
2804 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2805 get_identifier (bnd_var_name),
2806 pointer_bounds_type_node);
2808 /* Address of the obj will be used as lower bound. */
2809 TREE_ADDRESSABLE (obj) = 1;
2811 else
2813 bnd_var_name = (char *) xmalloc (strlen (CHKP_STRING_BOUNDS_PREFIX) + 10);
2814 sprintf (bnd_var_name, "%s%d", CHKP_STRING_BOUNDS_PREFIX, string_id++);
2816 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2817 get_identifier (bnd_var_name),
2818 pointer_bounds_type_node);
2821 TREE_PUBLIC (bnd_var) = 0;
2822 TREE_USED (bnd_var) = 1;
2823 TREE_READONLY (bnd_var) = 0;
2824 TREE_STATIC (bnd_var) = 1;
2825 TREE_ADDRESSABLE (bnd_var) = 0;
2826 DECL_ARTIFICIAL (bnd_var) = 1;
2827 DECL_COMMON (bnd_var) = 1;
2828 DECL_COMDAT (bnd_var) = 1;
2829 DECL_READ_P (bnd_var) = 1;
2830 DECL_INITIAL (bnd_var) = chkp_build_addr_expr (obj);
2831 /* Force output similar to constant bounds.
2832 See chkp_make_static_const_bounds. */
2833 varpool_node::get_create (bnd_var)->force_output = 1;
2834 /* Mark symbol as requiring bounds initialization. */
2835 varpool_node::get_create (bnd_var)->need_bounds_init = 1;
2836 varpool_node::finalize_decl (bnd_var);
2838 /* Add created var to the map to use it for other references
2839 to obj. */
2840 if (!chkp_static_var_bounds)
2841 chkp_static_var_bounds = new hash_map<tree, tree>;
2843 if (TREE_CODE (obj) == VAR_DECL)
2845 tree name = DECL_ASSEMBLER_NAME (obj);
2846 chkp_static_var_bounds->put (name, bnd_var);
2848 else
2849 chkp_static_var_bounds->put (obj, bnd_var);
2851 return bnd_var;
2854 /* When var has incomplete type we cannot get size to
2855 compute its bounds. In such cases we use checker
2856 builtin call which determines object size at runtime. */
2857 static tree
2858 chkp_generate_extern_var_bounds (tree var)
2860 tree bounds, size_reloc, lb, size, max_size, cond;
2861 gimple_stmt_iterator gsi;
2862 gimple_seq seq = NULL;
2863 gimple stmt;
2865 /* If instrumentation is not enabled for vars having
2866 incomplete type then just return zero bounds to avoid
2867 checks for this var. */
2868 if (!flag_chkp_incomplete_type)
2869 return chkp_get_zero_bounds ();
2871 if (dump_file && (dump_flags & TDF_DETAILS))
2873 fprintf (dump_file, "Generating bounds for extern symbol '");
2874 print_generic_expr (dump_file, var, 0);
2875 fprintf (dump_file, "'\n");
2878 stmt = gimple_build_call (chkp_sizeof_fndecl, 1, var);
2880 size_reloc = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
2881 gimple_call_set_lhs (stmt, size_reloc);
2883 gimple_seq_add_stmt (&seq, stmt);
2885 lb = chkp_build_addr_expr (var);
2886 size = make_ssa_name (chkp_get_size_tmp_var (), gimple_build_nop ());
2888 if (flag_chkp_zero_dynamic_size_as_infinite)
2890 /* We should check that size relocation was resolved.
2891 If it was not then use maximum possible size for the var. */
2892 max_size = build2 (MINUS_EXPR, chkp_uintptr_type, integer_zero_node,
2893 fold_convert (chkp_uintptr_type, lb));
2894 max_size = chkp_force_gimple_call_op (max_size, &seq);
2896 cond = build2 (NE_EXPR, boolean_type_node,
2897 size_reloc, integer_zero_node);
2898 stmt = gimple_build_assign (size, COND_EXPR, cond, size_reloc, max_size);
2899 gimple_seq_add_stmt (&seq, stmt);
2901 else
2903 stmt = gimple_build_assign (size, size_reloc);
2904 gimple_seq_add_stmt (&seq, stmt);
2907 gsi = gsi_start_bb (chkp_get_entry_block ());
2908 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
2910 bounds = chkp_make_bounds (lb, size, &gsi, true);
2912 return bounds;
2915 /* Return 1 if TYPE has fields with zero size or fields
2916 marked with chkp_variable_size attribute. */
2917 bool
2918 chkp_variable_size_type (tree type)
2920 bool res = false;
2921 tree field;
2923 if (RECORD_OR_UNION_TYPE_P (type))
2924 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2926 if (TREE_CODE (field) == FIELD_DECL)
2927 res = res
2928 || lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
2929 || chkp_variable_size_type (TREE_TYPE (field));
2931 else
2932 res = !TYPE_SIZE (type)
2933 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
2934 || tree_to_uhwi (TYPE_SIZE (type)) == 0;
2936 return res;
2939 /* Compute and return bounds for address of DECL which is
2940 one of VAR_DECL, PARM_DECL, RESULT_DECL. */
2941 static tree
2942 chkp_get_bounds_for_decl_addr (tree decl)
2944 tree bounds;
2946 gcc_assert (TREE_CODE (decl) == VAR_DECL
2947 || TREE_CODE (decl) == PARM_DECL
2948 || TREE_CODE (decl) == RESULT_DECL);
2950 bounds = chkp_get_registered_addr_bounds (decl);
2952 if (bounds)
2953 return bounds;
2955 if (dump_file && (dump_flags & TDF_DETAILS))
2957 fprintf (dump_file, "Building bounds for address of decl ");
2958 print_generic_expr (dump_file, decl, 0);
2959 fprintf (dump_file, "\n");
2962 /* Use zero bounds if size is unknown and checks for
2963 unknown sizes are restricted. */
2964 if ((!DECL_SIZE (decl)
2965 || (chkp_variable_size_type (TREE_TYPE (decl))
2966 && (TREE_STATIC (decl)
2967 || DECL_EXTERNAL (decl)
2968 || TREE_PUBLIC (decl))))
2969 && !flag_chkp_incomplete_type)
2970 return chkp_get_zero_bounds ();
2972 if (flag_chkp_use_static_bounds
2973 && TREE_CODE (decl) == VAR_DECL
2974 && (TREE_STATIC (decl)
2975 || DECL_EXTERNAL (decl)
2976 || TREE_PUBLIC (decl))
2977 && !DECL_THREAD_LOCAL_P (decl))
2979 tree bnd_var = chkp_make_static_bounds (decl);
2980 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2981 gimple stmt;
2983 bounds = chkp_get_tmp_reg (gimple_build_nop ());
2984 stmt = gimple_build_assign (bounds, bnd_var);
2985 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2987 else if (!DECL_SIZE (decl)
2988 || (chkp_variable_size_type (TREE_TYPE (decl))
2989 && (TREE_STATIC (decl)
2990 || DECL_EXTERNAL (decl)
2991 || TREE_PUBLIC (decl))))
2993 gcc_assert (TREE_CODE (decl) == VAR_DECL);
2994 bounds = chkp_generate_extern_var_bounds (decl);
2996 else
2998 tree lb = chkp_build_addr_expr (decl);
2999 bounds = chkp_make_bounds (lb, DECL_SIZE_UNIT (decl), NULL, false);
3002 return bounds;
3005 /* Compute and return bounds for constant string. */
3006 static tree
3007 chkp_get_bounds_for_string_cst (tree cst)
3009 tree bounds;
3010 tree lb;
3011 tree size;
3013 gcc_assert (TREE_CODE (cst) == STRING_CST);
3015 bounds = chkp_get_registered_bounds (cst);
3017 if (bounds)
3018 return bounds;
3020 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
3021 || flag_chkp_use_static_const_bounds > 0)
3023 tree bnd_var = chkp_make_static_bounds (cst);
3024 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
3025 gimple stmt;
3027 bounds = chkp_get_tmp_reg (gimple_build_nop ());
3028 stmt = gimple_build_assign (bounds, bnd_var);
3029 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3031 else
3033 lb = chkp_build_addr_expr (cst);
3034 size = build_int_cst (chkp_uintptr_type, TREE_STRING_LENGTH (cst));
3035 bounds = chkp_make_bounds (lb, size, NULL, false);
3038 bounds = chkp_maybe_copy_and_register_bounds (cst, bounds);
3040 return bounds;
3043 /* Generate code to instersect bounds BOUNDS1 and BOUNDS2 and
3044 return the result. if ITER is not NULL then Code is inserted
3045 before position pointed by ITER. Otherwise code is added to
3046 entry block. */
3047 static tree
3048 chkp_intersect_bounds (tree bounds1, tree bounds2, gimple_stmt_iterator *iter)
3050 if (!bounds1 || bounds1 == chkp_get_zero_bounds ())
3051 return bounds2 ? bounds2 : bounds1;
3052 else if (!bounds2 || bounds2 == chkp_get_zero_bounds ())
3053 return bounds1;
3054 else
3056 gimple_seq seq;
3057 gimple stmt;
3058 tree bounds;
3060 seq = NULL;
3062 stmt = gimple_build_call (chkp_intersect_fndecl, 2, bounds1, bounds2);
3063 chkp_mark_stmt (stmt);
3065 bounds = chkp_get_tmp_reg (stmt);
3066 gimple_call_set_lhs (stmt, bounds);
3068 gimple_seq_add_stmt (&seq, stmt);
3070 /* We are probably doing narrowing for constant expression.
3071 In such case iter may be undefined. */
3072 if (!iter)
3074 gimple_stmt_iterator gsi = gsi_last_bb (chkp_get_entry_block ());
3075 iter = &gsi;
3076 gsi_insert_seq_after (iter, seq, GSI_SAME_STMT);
3078 else
3079 gsi_insert_seq_before (iter, seq, GSI_SAME_STMT);
3081 if (dump_file && (dump_flags & TDF_DETAILS))
3083 fprintf (dump_file, "Bounds intersection: ");
3084 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
3085 fprintf (dump_file, " inserted before statement: ");
3086 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0,
3087 TDF_VOPS|TDF_MEMSYMS);
3090 return bounds;
3094 /* Return 1 if we are allowed to narrow bounds for addressed FIELD
3095 and 0 othersize. */
3096 static bool
3097 chkp_may_narrow_to_field (tree field)
3099 return DECL_SIZE (field) && TREE_CODE (DECL_SIZE (field)) == INTEGER_CST
3100 && tree_to_uhwi (DECL_SIZE (field)) != 0
3101 && (!DECL_FIELD_OFFSET (field)
3102 || TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST)
3103 && (!DECL_FIELD_BIT_OFFSET (field)
3104 || TREE_CODE (DECL_FIELD_BIT_OFFSET (field)) == INTEGER_CST)
3105 && !lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
3106 && !chkp_variable_size_type (TREE_TYPE (field));
3109 /* Return 1 if bounds for FIELD should be narrowed to
3110 field's own size. */
3111 static bool
3112 chkp_narrow_bounds_for_field (tree field)
3114 HOST_WIDE_INT offs;
3115 HOST_WIDE_INT bit_offs;
3117 if (!chkp_may_narrow_to_field (field))
3118 return false;
3120 /* Accesse to compiler generated fields should not cause
3121 bounds narrowing. */
3122 if (DECL_ARTIFICIAL (field))
3123 return false;
3125 offs = tree_to_uhwi (DECL_FIELD_OFFSET (field));
3126 bit_offs = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
3128 return (flag_chkp_narrow_bounds
3129 && (flag_chkp_first_field_has_own_bounds
3130 || offs
3131 || bit_offs));
3134 /* Perform narrowing for BOUNDS using bounds computed for field
3135 access COMPONENT. ITER meaning is the same as for
3136 chkp_intersect_bounds. */
3137 static tree
3138 chkp_narrow_bounds_to_field (tree bounds, tree component,
3139 gimple_stmt_iterator *iter)
3141 tree field = TREE_OPERAND (component, 1);
3142 tree size = DECL_SIZE_UNIT (field);
3143 tree field_ptr = chkp_build_addr_expr (component);
3144 tree field_bounds;
3146 field_bounds = chkp_make_bounds (field_ptr, size, iter, false);
3148 return chkp_intersect_bounds (field_bounds, bounds, iter);
3151 /* Parse field or array access NODE.
3153 PTR ouput parameter holds a pointer to the outermost
3154 object.
3156 BITFIELD output parameter is set to 1 if bitfield is
3157 accessed and to 0 otherwise. If it is 1 then ELT holds
3158 outer component for accessed bit field.
3160 SAFE outer parameter is set to 1 if access is safe and
3161 checks are not required.
3163 BOUNDS outer parameter holds bounds to be used to check
3164 access (may be NULL).
3166 If INNERMOST_BOUNDS is 1 then try to narrow bounds to the
3167 innermost accessed component. */
3168 static void
3169 chkp_parse_array_and_component_ref (tree node, tree *ptr,
3170 tree *elt, bool *safe,
3171 bool *bitfield,
3172 tree *bounds,
3173 gimple_stmt_iterator *iter,
3174 bool innermost_bounds)
3176 tree comp_to_narrow = NULL_TREE;
3177 tree last_comp = NULL_TREE;
3178 bool array_ref_found = false;
3179 tree *nodes;
3180 tree var;
3181 int len;
3182 int i;
3184 /* Compute tree height for expression. */
3185 var = node;
3186 len = 1;
3187 while (TREE_CODE (var) == COMPONENT_REF
3188 || TREE_CODE (var) == ARRAY_REF
3189 || TREE_CODE (var) == VIEW_CONVERT_EXPR)
3191 var = TREE_OPERAND (var, 0);
3192 len++;
3195 gcc_assert (len > 1);
3197 /* It is more convenient for us to scan left-to-right,
3198 so walk tree again and put all node to nodes vector
3199 in reversed order. */
3200 nodes = XALLOCAVEC (tree, len);
3201 nodes[len - 1] = node;
3202 for (i = len - 2; i >= 0; i--)
3203 nodes[i] = TREE_OPERAND (nodes[i + 1], 0);
3205 if (bounds)
3206 *bounds = NULL;
3207 *safe = true;
3208 *bitfield = (TREE_CODE (node) == COMPONENT_REF
3209 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (node, 1)));
3210 /* To get bitfield address we will need outer elemnt. */
3211 if (*bitfield)
3212 *elt = nodes[len - 2];
3213 else
3214 *elt = NULL_TREE;
3216 /* If we have indirection in expression then compute
3217 outermost structure bounds. Computed bounds may be
3218 narrowed later. */
3219 if (TREE_CODE (nodes[0]) == MEM_REF || INDIRECT_REF_P (nodes[0]))
3221 *safe = false;
3222 *ptr = TREE_OPERAND (nodes[0], 0);
3223 if (bounds)
3224 *bounds = chkp_find_bounds (*ptr, iter);
3226 else
3228 gcc_assert (TREE_CODE (var) == VAR_DECL
3229 || TREE_CODE (var) == PARM_DECL
3230 || TREE_CODE (var) == RESULT_DECL
3231 || TREE_CODE (var) == STRING_CST
3232 || TREE_CODE (var) == SSA_NAME);
3234 *ptr = chkp_build_addr_expr (var);
3237 /* In this loop we are trying to find a field access
3238 requiring narrowing. There are two simple rules
3239 for search:
3240 1. Leftmost array_ref is chosen if any.
3241 2. Rightmost suitable component_ref is chosen if innermost
3242 bounds are required and no array_ref exists. */
3243 for (i = 1; i < len; i++)
3245 var = nodes[i];
3247 if (TREE_CODE (var) == ARRAY_REF)
3249 *safe = false;
3250 array_ref_found = true;
3251 if (flag_chkp_narrow_bounds
3252 && !flag_chkp_narrow_to_innermost_arrray
3253 && (!last_comp
3254 || chkp_may_narrow_to_field (TREE_OPERAND (last_comp, 1))))
3256 comp_to_narrow = last_comp;
3257 break;
3260 else if (TREE_CODE (var) == COMPONENT_REF)
3262 tree field = TREE_OPERAND (var, 1);
3264 if (innermost_bounds
3265 && !array_ref_found
3266 && chkp_narrow_bounds_for_field (field))
3267 comp_to_narrow = var;
3268 last_comp = var;
3270 if (flag_chkp_narrow_bounds
3271 && flag_chkp_narrow_to_innermost_arrray
3272 && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
3274 if (bounds)
3275 *bounds = chkp_narrow_bounds_to_field (*bounds, var, iter);
3276 comp_to_narrow = NULL;
3279 else if (TREE_CODE (var) == VIEW_CONVERT_EXPR)
3280 /* Nothing to do for it. */
3282 else
3283 gcc_unreachable ();
3286 if (comp_to_narrow && DECL_SIZE (TREE_OPERAND (comp_to_narrow, 1)) && bounds)
3287 *bounds = chkp_narrow_bounds_to_field (*bounds, comp_to_narrow, iter);
3289 if (innermost_bounds && bounds && !*bounds)
3290 *bounds = chkp_find_bounds (*ptr, iter);
3293 /* Compute and return bounds for address of OBJ. */
3294 static tree
3295 chkp_make_addressed_object_bounds (tree obj, gimple_stmt_iterator *iter)
3297 tree bounds = chkp_get_registered_addr_bounds (obj);
3299 if (bounds)
3300 return bounds;
3302 switch (TREE_CODE (obj))
3304 case VAR_DECL:
3305 case PARM_DECL:
3306 case RESULT_DECL:
3307 bounds = chkp_get_bounds_for_decl_addr (obj);
3308 break;
3310 case STRING_CST:
3311 bounds = chkp_get_bounds_for_string_cst (obj);
3312 break;
3314 case ARRAY_REF:
3315 case COMPONENT_REF:
3317 tree elt;
3318 tree ptr;
3319 bool safe;
3320 bool bitfield;
3322 chkp_parse_array_and_component_ref (obj, &ptr, &elt, &safe,
3323 &bitfield, &bounds, iter, true);
3325 gcc_assert (bounds);
3327 break;
3329 case FUNCTION_DECL:
3330 case LABEL_DECL:
3331 bounds = chkp_get_zero_bounds ();
3332 break;
3334 case MEM_REF:
3335 bounds = chkp_find_bounds (TREE_OPERAND (obj, 0), iter);
3336 break;
3338 case REALPART_EXPR:
3339 case IMAGPART_EXPR:
3340 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (obj, 0), iter);
3341 break;
3343 default:
3344 if (dump_file && (dump_flags & TDF_DETAILS))
3346 fprintf (dump_file, "chkp_make_addressed_object_bounds: "
3347 "unexpected object of type %s\n",
3348 get_tree_code_name (TREE_CODE (obj)));
3349 print_node (dump_file, "", obj, 0);
3351 internal_error ("chkp_make_addressed_object_bounds: "
3352 "Unexpected tree code %s",
3353 get_tree_code_name (TREE_CODE (obj)));
3356 chkp_register_addr_bounds (obj, bounds);
3358 return bounds;
3361 /* Compute bounds for pointer PTR loaded from PTR_SRC. Generate statements
3362 to compute bounds if required. Computed bounds should be available at
3363 position pointed by ITER.
3365 If PTR_SRC is NULL_TREE then pointer definition is identified.
3367 If PTR_SRC is not NULL_TREE then ITER points to statements which loads
3368 PTR. If PTR is a any memory reference then ITER points to a statement
3369 after which bndldx will be inserterd. In both cases ITER will be updated
3370 to point to the inserted bndldx statement. */
3372 static tree
3373 chkp_find_bounds_1 (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3375 tree addr = NULL_TREE;
3376 tree bounds = NULL_TREE;
3378 if (!ptr_src)
3379 ptr_src = ptr;
3381 bounds = chkp_get_registered_bounds (ptr_src);
3383 if (bounds)
3384 return bounds;
3386 switch (TREE_CODE (ptr_src))
3388 case MEM_REF:
3389 case VAR_DECL:
3390 if (BOUNDED_P (ptr_src))
3391 if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
3392 bounds = chkp_get_zero_bounds ();
3393 else
3395 addr = chkp_build_addr_expr (ptr_src);
3396 bounds = chkp_build_bndldx (addr, ptr, iter);
3398 else
3399 bounds = chkp_get_nonpointer_load_bounds ();
3400 break;
3402 case ARRAY_REF:
3403 case COMPONENT_REF:
3404 addr = get_base_address (ptr_src);
3405 if (DECL_P (addr)
3406 || TREE_CODE (addr) == MEM_REF
3407 || TREE_CODE (addr) == TARGET_MEM_REF)
3409 if (BOUNDED_P (ptr_src))
3410 if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
3411 bounds = chkp_get_zero_bounds ();
3412 else
3414 addr = chkp_build_addr_expr (ptr_src);
3415 bounds = chkp_build_bndldx (addr, ptr, iter);
3417 else
3418 bounds = chkp_get_nonpointer_load_bounds ();
3420 else
3422 gcc_assert (TREE_CODE (addr) == SSA_NAME);
3423 bounds = chkp_find_bounds (addr, iter);
3425 break;
3427 case PARM_DECL:
3428 gcc_unreachable ();
3429 bounds = chkp_get_bound_for_parm (ptr_src);
3430 break;
3432 case TARGET_MEM_REF:
3433 addr = chkp_build_addr_expr (ptr_src);
3434 bounds = chkp_build_bndldx (addr, ptr, iter);
3435 break;
3437 case SSA_NAME:
3438 bounds = chkp_get_registered_bounds (ptr_src);
3439 if (!bounds)
3441 gimple def_stmt = SSA_NAME_DEF_STMT (ptr_src);
3442 gphi_iterator phi_iter;
3444 bounds = chkp_get_bounds_by_definition (ptr_src, def_stmt, &phi_iter);
3446 gcc_assert (bounds);
3448 if (gphi *def_phi = dyn_cast <gphi *> (def_stmt))
3450 unsigned i;
3452 for (i = 0; i < gimple_phi_num_args (def_phi); i++)
3454 tree arg = gimple_phi_arg_def (def_phi, i);
3455 tree arg_bnd;
3456 gphi *phi_bnd;
3458 arg_bnd = chkp_find_bounds (arg, NULL);
3460 /* chkp_get_bounds_by_definition created new phi
3461 statement and phi_iter points to it.
3463 Previous call to chkp_find_bounds could create
3464 new basic block and therefore change phi statement
3465 phi_iter points to. */
3466 phi_bnd = phi_iter.phi ();
3468 add_phi_arg (phi_bnd, arg_bnd,
3469 gimple_phi_arg_edge (def_phi, i),
3470 UNKNOWN_LOCATION);
3473 /* If all bound phi nodes have their arg computed
3474 then we may finish its computation. See
3475 chkp_finish_incomplete_bounds for more details. */
3476 if (chkp_may_finish_incomplete_bounds ())
3477 chkp_finish_incomplete_bounds ();
3480 gcc_assert (bounds == chkp_get_registered_bounds (ptr_src)
3481 || chkp_incomplete_bounds (bounds));
3483 break;
3485 case ADDR_EXPR:
3486 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (ptr_src, 0), iter);
3487 break;
3489 case INTEGER_CST:
3490 if (integer_zerop (ptr_src))
3491 bounds = chkp_get_none_bounds ();
3492 else
3493 bounds = chkp_get_invalid_op_bounds ();
3494 break;
3496 default:
3497 if (dump_file && (dump_flags & TDF_DETAILS))
3499 fprintf (dump_file, "chkp_find_bounds: unexpected ptr of type %s\n",
3500 get_tree_code_name (TREE_CODE (ptr_src)));
3501 print_node (dump_file, "", ptr_src, 0);
3503 internal_error ("chkp_find_bounds: Unexpected tree code %s",
3504 get_tree_code_name (TREE_CODE (ptr_src)));
3507 if (!bounds)
3509 if (dump_file && (dump_flags & TDF_DETAILS))
3511 fprintf (stderr, "chkp_find_bounds: cannot find bounds for pointer\n");
3512 print_node (dump_file, "", ptr_src, 0);
3514 internal_error ("chkp_find_bounds: Cannot find bounds for pointer");
3517 return bounds;
3520 /* Normal case for bounds search without forced narrowing. */
3521 static tree
3522 chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter)
3524 return chkp_find_bounds_1 (ptr, NULL_TREE, iter);
3527 /* Search bounds for pointer PTR loaded from PTR_SRC
3528 by statement *ITER points to. */
3529 static tree
3530 chkp_find_bounds_loaded (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3532 return chkp_find_bounds_1 (ptr, ptr_src, iter);
3535 /* Helper function which checks type of RHS and finds all pointers in
3536 it. For each found pointer we build it's accesses in LHS and RHS
3537 objects and then call HANDLER for them. Function is used to copy
3538 or initilize bounds for copied object. */
3539 static void
3540 chkp_walk_pointer_assignments (tree lhs, tree rhs, void *arg,
3541 assign_handler handler)
3543 tree type = TREE_TYPE (lhs);
3545 /* We have nothing to do with clobbers. */
3546 if (TREE_CLOBBER_P (rhs))
3547 return;
3549 if (BOUNDED_TYPE_P (type))
3550 handler (lhs, rhs, arg);
3551 else if (RECORD_OR_UNION_TYPE_P (type))
3553 tree field;
3555 if (TREE_CODE (rhs) == CONSTRUCTOR)
3557 unsigned HOST_WIDE_INT cnt;
3558 tree val;
3560 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, field, val)
3562 if (chkp_type_has_pointer (TREE_TYPE (field)))
3564 tree lhs_field = chkp_build_component_ref (lhs, field);
3565 chkp_walk_pointer_assignments (lhs_field, val, arg, handler);
3569 else
3570 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3571 if (TREE_CODE (field) == FIELD_DECL
3572 && chkp_type_has_pointer (TREE_TYPE (field)))
3574 tree rhs_field = chkp_build_component_ref (rhs, field);
3575 tree lhs_field = chkp_build_component_ref (lhs, field);
3576 chkp_walk_pointer_assignments (lhs_field, rhs_field, arg, handler);
3579 else if (TREE_CODE (type) == ARRAY_TYPE)
3581 unsigned HOST_WIDE_INT cur = 0;
3582 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3583 tree etype = TREE_TYPE (type);
3584 tree esize = TYPE_SIZE (etype);
3586 if (TREE_CODE (rhs) == CONSTRUCTOR)
3588 unsigned HOST_WIDE_INT cnt;
3589 tree purp, val, lhs_elem;
3591 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, purp, val)
3593 if (purp && TREE_CODE (purp) == RANGE_EXPR)
3595 tree lo_index = TREE_OPERAND (purp, 0);
3596 tree hi_index = TREE_OPERAND (purp, 1);
3598 for (cur = (unsigned)tree_to_uhwi (lo_index);
3599 cur <= (unsigned)tree_to_uhwi (hi_index);
3600 cur++)
3602 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3603 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3606 else
3608 if (purp)
3610 gcc_assert (TREE_CODE (purp) == INTEGER_CST);
3611 cur = tree_to_uhwi (purp);
3614 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur++);
3616 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3620 /* Copy array only when size is known. */
3621 else if (maxval && !integer_minus_onep (maxval))
3622 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
3624 tree lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3625 tree rhs_elem = chkp_build_array_ref (rhs, etype, esize, cur);
3626 chkp_walk_pointer_assignments (lhs_elem, rhs_elem, arg, handler);
3629 else
3630 internal_error("chkp_walk_pointer_assignments: unexpected RHS type: %s",
3631 get_tree_code_name (TREE_CODE (type)));
3634 /* Add code to copy bounds for assignment of RHS to LHS.
3635 ARG is an iterator pointing ne code position. */
3636 static void
3637 chkp_copy_bounds_for_elem (tree lhs, tree rhs, void *arg)
3639 gimple_stmt_iterator *iter = (gimple_stmt_iterator *)arg;
3640 tree bounds = chkp_find_bounds (rhs, iter);
3641 tree addr = chkp_build_addr_expr(lhs);
3643 chkp_build_bndstx (addr, rhs, bounds, iter);
3646 /* Emit static bound initilizers and size vars. */
3647 void
3648 chkp_finish_file (void)
3650 struct varpool_node *node;
3651 struct chkp_ctor_stmt_list stmts;
3653 if (seen_error ())
3654 return;
3656 /* Iterate through varpool and generate bounds initialization
3657 constructors for all statically initialized pointers. */
3658 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3659 stmts.stmts = NULL;
3660 FOR_EACH_VARIABLE (node)
3661 /* Check that var is actually emitted and we need and may initialize
3662 its bounds. */
3663 if (node->need_bounds_init
3664 && !POINTER_BOUNDS_P (node->decl)
3665 && DECL_RTL (node->decl)
3666 && MEM_P (DECL_RTL (node->decl))
3667 && TREE_ASM_WRITTEN (node->decl))
3669 chkp_walk_pointer_assignments (node->decl,
3670 DECL_INITIAL (node->decl),
3671 &stmts,
3672 chkp_add_modification_to_stmt_list);
3674 if (stmts.avail <= 0)
3676 cgraph_build_static_cdtor ('P', stmts.stmts,
3677 MAX_RESERVED_INIT_PRIORITY + 3);
3678 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3679 stmts.stmts = NULL;
3683 if (stmts.stmts)
3684 cgraph_build_static_cdtor ('P', stmts.stmts,
3685 MAX_RESERVED_INIT_PRIORITY + 3);
3687 /* Iterate through varpool and generate bounds initialization
3688 constructors for all static bounds vars. */
3689 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3690 stmts.stmts = NULL;
3691 FOR_EACH_VARIABLE (node)
3692 if (node->need_bounds_init
3693 && POINTER_BOUNDS_P (node->decl)
3694 && TREE_ASM_WRITTEN (node->decl))
3696 tree bnd = node->decl;
3697 tree var;
3699 gcc_assert (DECL_INITIAL (bnd)
3700 && TREE_CODE (DECL_INITIAL (bnd)) == ADDR_EXPR);
3702 var = TREE_OPERAND (DECL_INITIAL (bnd), 0);
3703 chkp_output_static_bounds (bnd, var, &stmts);
3706 if (stmts.stmts)
3707 cgraph_build_static_cdtor ('B', stmts.stmts,
3708 MAX_RESERVED_INIT_PRIORITY + 2);
3710 delete chkp_static_var_bounds;
3711 delete chkp_bounds_map;
3714 /* An instrumentation function which is called for each statement
3715 having memory access we want to instrument. It inserts check
3716 code and bounds copy code.
3718 ITER points to statement to instrument.
3720 NODE holds memory access in statement to check.
3722 LOC holds the location information for statement.
3724 DIRFLAGS determines whether access is read or write.
3726 ACCESS_OFFS should be added to address used in NODE
3727 before check.
3729 ACCESS_SIZE holds size of checked access.
3731 SAFE indicates if NODE access is safe and should not be
3732 checked. */
3733 static void
3734 chkp_process_stmt (gimple_stmt_iterator *iter, tree node,
3735 location_t loc, tree dirflag,
3736 tree access_offs, tree access_size,
3737 bool safe)
3739 tree node_type = TREE_TYPE (node);
3740 tree size = access_size ? access_size : TYPE_SIZE_UNIT (node_type);
3741 tree addr_first = NULL_TREE; /* address of the first accessed byte */
3742 tree addr_last = NULL_TREE; /* address of the last accessed byte */
3743 tree ptr = NULL_TREE; /* a pointer used for dereference */
3744 tree bounds = NULL_TREE;
3746 /* We do not need instrumentation for clobbers. */
3747 if (dirflag == integer_one_node
3748 && gimple_code (gsi_stmt (*iter)) == GIMPLE_ASSIGN
3749 && TREE_CLOBBER_P (gimple_assign_rhs1 (gsi_stmt (*iter))))
3750 return;
3752 switch (TREE_CODE (node))
3754 case ARRAY_REF:
3755 case COMPONENT_REF:
3757 bool bitfield;
3758 tree elt;
3760 if (safe)
3762 /* We are not going to generate any checks, so do not
3763 generate bounds as well. */
3764 addr_first = chkp_build_addr_expr (node);
3765 break;
3768 chkp_parse_array_and_component_ref (node, &ptr, &elt, &safe,
3769 &bitfield, &bounds, iter, false);
3771 /* Break if there is no dereference and operation is safe. */
3773 if (bitfield)
3775 tree field = TREE_OPERAND (node, 1);
3777 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
3778 size = DECL_SIZE_UNIT (field);
3780 if (elt)
3781 elt = chkp_build_addr_expr (elt);
3782 addr_first = fold_convert_loc (loc, ptr_type_node, elt ? elt : ptr);
3783 addr_first = fold_build_pointer_plus_loc (loc,
3784 addr_first,
3785 byte_position (field));
3787 else
3788 addr_first = chkp_build_addr_expr (node);
3790 break;
3792 case INDIRECT_REF:
3793 ptr = TREE_OPERAND (node, 0);
3794 addr_first = ptr;
3795 break;
3797 case MEM_REF:
3798 ptr = TREE_OPERAND (node, 0);
3799 addr_first = chkp_build_addr_expr (node);
3800 break;
3802 case TARGET_MEM_REF:
3803 ptr = TMR_BASE (node);
3804 addr_first = chkp_build_addr_expr (node);
3805 break;
3807 case ARRAY_RANGE_REF:
3808 printf("ARRAY_RANGE_REF\n");
3809 debug_gimple_stmt(gsi_stmt(*iter));
3810 debug_tree(node);
3811 gcc_unreachable ();
3812 break;
3814 case BIT_FIELD_REF:
3816 tree offs, rem, bpu;
3818 gcc_assert (!access_offs);
3819 gcc_assert (!access_size);
3821 bpu = fold_convert (size_type_node, bitsize_int (BITS_PER_UNIT));
3822 offs = fold_convert (size_type_node, TREE_OPERAND (node, 2));
3823 rem = size_binop_loc (loc, TRUNC_MOD_EXPR, offs, bpu);
3824 offs = size_binop_loc (loc, TRUNC_DIV_EXPR, offs, bpu);
3826 size = fold_convert (size_type_node, TREE_OPERAND (node, 1));
3827 size = size_binop_loc (loc, PLUS_EXPR, size, rem);
3828 size = size_binop_loc (loc, CEIL_DIV_EXPR, size, bpu);
3829 size = fold_convert (size_type_node, size);
3831 chkp_process_stmt (iter, TREE_OPERAND (node, 0), loc,
3832 dirflag, offs, size, safe);
3833 return;
3835 break;
3837 case VAR_DECL:
3838 case RESULT_DECL:
3839 case PARM_DECL:
3840 if (dirflag != integer_one_node
3841 || DECL_REGISTER (node))
3842 return;
3844 safe = true;
3845 addr_first = chkp_build_addr_expr (node);
3846 break;
3848 default:
3849 return;
3852 /* If addr_last was not computed then use (addr_first + size - 1)
3853 expression to compute it. */
3854 if (!addr_last)
3856 addr_last = fold_build_pointer_plus_loc (loc, addr_first, size);
3857 addr_last = fold_build_pointer_plus_hwi_loc (loc, addr_last, -1);
3860 /* Shift both first_addr and last_addr by access_offs if specified. */
3861 if (access_offs)
3863 addr_first = fold_build_pointer_plus_loc (loc, addr_first, access_offs);
3864 addr_last = fold_build_pointer_plus_loc (loc, addr_last, access_offs);
3867 /* Generate bndcl/bndcu checks if memory access is not safe. */
3868 if (!safe)
3870 gimple_stmt_iterator stmt_iter = *iter;
3872 if (!bounds)
3873 bounds = chkp_find_bounds (ptr, iter);
3875 chkp_check_mem_access (addr_first, addr_last, bounds,
3876 stmt_iter, loc, dirflag);
3879 /* We need to store bounds in case pointer is stored. */
3880 if (dirflag == integer_one_node
3881 && chkp_type_has_pointer (node_type)
3882 && flag_chkp_store_bounds)
3884 gimple stmt = gsi_stmt (*iter);
3885 tree rhs1 = gimple_assign_rhs1 (stmt);
3886 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3888 if (get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS)
3889 chkp_walk_pointer_assignments (node, rhs1, iter,
3890 chkp_copy_bounds_for_elem);
3891 else
3893 bounds = chkp_compute_bounds_for_assignment (NULL_TREE, stmt);
3894 chkp_build_bndstx (addr_first, rhs1, bounds, iter);
3899 /* Add code to copy bounds for all pointers copied
3900 in ASSIGN created during inline of EDGE. */
3901 void
3902 chkp_copy_bounds_for_assign (gimple assign, struct cgraph_edge *edge)
3904 tree lhs = gimple_assign_lhs (assign);
3905 tree rhs = gimple_assign_rhs1 (assign);
3906 gimple_stmt_iterator iter = gsi_for_stmt (assign);
3908 if (!flag_chkp_store_bounds)
3909 return;
3911 chkp_walk_pointer_assignments (lhs, rhs, &iter, chkp_copy_bounds_for_elem);
3913 /* We should create edges for all created calls to bndldx and bndstx. */
3914 while (gsi_stmt (iter) != assign)
3916 gimple stmt = gsi_stmt (iter);
3917 if (gimple_code (stmt) == GIMPLE_CALL)
3919 tree fndecl = gimple_call_fndecl (stmt);
3920 struct cgraph_node *callee = cgraph_node::get_create (fndecl);
3921 struct cgraph_edge *new_edge;
3923 gcc_assert (fndecl == chkp_bndstx_fndecl
3924 || fndecl == chkp_bndldx_fndecl
3925 || fndecl == chkp_ret_bnd_fndecl);
3927 new_edge = edge->caller->create_edge (callee,
3928 as_a <gcall *> (stmt),
3929 edge->count,
3930 edge->frequency);
3931 new_edge->frequency = compute_call_stmt_bb_frequency
3932 (edge->caller->decl, gimple_bb (stmt));
3934 gsi_prev (&iter);
3938 /* Some code transformation made during instrumentation pass
3939 may put code into inconsistent state. Here we find and fix
3940 such flaws. */
3941 void
3942 chkp_fix_cfg ()
3944 basic_block bb;
3945 gimple_stmt_iterator i;
3947 /* We could insert some code right after stmt which ends bb.
3948 We wanted to put this code on fallthru edge but did not
3949 add new edges from the beginning because it may cause new
3950 phi node creation which may be incorrect due to incomplete
3951 bound phi nodes. */
3952 FOR_ALL_BB_FN (bb, cfun)
3953 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
3955 gimple stmt = gsi_stmt (i);
3956 gimple_stmt_iterator next = i;
3958 gsi_next (&next);
3960 if (stmt_ends_bb_p (stmt)
3961 && !gsi_end_p (next))
3963 edge fall = find_fallthru_edge (bb->succs);
3964 basic_block dest = NULL;
3965 int flags = 0;
3967 gcc_assert (fall);
3969 /* We cannot split abnormal edge. Therefore we
3970 store its params, make it regular and then
3971 rebuild abnormal edge after split. */
3972 if (fall->flags & EDGE_ABNORMAL)
3974 flags = fall->flags & ~EDGE_FALLTHRU;
3975 dest = fall->dest;
3977 fall->flags &= ~EDGE_COMPLEX;
3980 while (!gsi_end_p (next))
3982 gimple next_stmt = gsi_stmt (next);
3983 gsi_remove (&next, false);
3984 gsi_insert_on_edge (fall, next_stmt);
3987 gsi_commit_edge_inserts ();
3989 /* Re-create abnormal edge. */
3990 if (dest)
3991 make_edge (bb, dest, flags);
3996 /* Walker callback for chkp_replace_function_pointers. Replaces
3997 function pointer in the specified operand with pointer to the
3998 instrumented function version. */
3999 static tree
4000 chkp_replace_function_pointer (tree *op, int *walk_subtrees,
4001 void *data ATTRIBUTE_UNUSED)
4003 if (TREE_CODE (*op) == FUNCTION_DECL
4004 && !lookup_attribute ("bnd_legacy", DECL_ATTRIBUTES (*op))
4005 && (DECL_BUILT_IN_CLASS (*op) == NOT_BUILT_IN
4006 /* For builtins we replace pointers only for selected
4007 function and functions having definitions. */
4008 || (DECL_BUILT_IN_CLASS (*op) == BUILT_IN_NORMAL
4009 && (chkp_instrument_normal_builtin (*op)
4010 || gimple_has_body_p (*op)))))
4012 struct cgraph_node *node = cgraph_node::get_create (*op);
4013 struct cgraph_node *clone = NULL;
4015 if (!node->instrumentation_clone)
4016 clone = chkp_maybe_create_clone (*op);
4018 if (clone)
4019 *op = clone->decl;
4020 *walk_subtrees = 0;
4023 return NULL;
4026 /* This function searches for function pointers in statement
4027 pointed by GSI and replaces them with pointers to instrumented
4028 function versions. */
4029 static void
4030 chkp_replace_function_pointers (gimple_stmt_iterator *gsi)
4032 gimple stmt = gsi_stmt (*gsi);
4033 /* For calls we want to walk call args only. */
4034 if (gimple_code (stmt) == GIMPLE_CALL)
4036 unsigned i;
4037 for (i = 0; i < gimple_call_num_args (stmt); i++)
4038 walk_tree (gimple_call_arg_ptr (stmt, i),
4039 chkp_replace_function_pointer, NULL, NULL);
4041 else
4042 walk_gimple_stmt (gsi, NULL, chkp_replace_function_pointer, NULL);
4045 /* This function instruments all statements working with memory,
4046 calls and rets.
4048 It also removes excess statements from static initializers. */
4049 static void
4050 chkp_instrument_function (void)
4052 basic_block bb, next;
4053 gimple_stmt_iterator i;
4054 enum gimple_rhs_class grhs_class;
4055 bool safe = lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl));
4057 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
4060 next = bb->next_bb;
4061 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
4063 gimple s = gsi_stmt (i);
4065 /* Skip statement marked to not be instrumented. */
4066 if (chkp_marked_stmt_p (s))
4068 gsi_next (&i);
4069 continue;
4072 chkp_replace_function_pointers (&i);
4074 switch (gimple_code (s))
4076 case GIMPLE_ASSIGN:
4077 chkp_process_stmt (&i, gimple_assign_lhs (s),
4078 gimple_location (s), integer_one_node,
4079 NULL_TREE, NULL_TREE, safe);
4080 chkp_process_stmt (&i, gimple_assign_rhs1 (s),
4081 gimple_location (s), integer_zero_node,
4082 NULL_TREE, NULL_TREE, safe);
4083 grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s));
4084 if (grhs_class == GIMPLE_BINARY_RHS)
4085 chkp_process_stmt (&i, gimple_assign_rhs2 (s),
4086 gimple_location (s), integer_zero_node,
4087 NULL_TREE, NULL_TREE, safe);
4088 break;
4090 case GIMPLE_RETURN:
4092 greturn *r = as_a <greturn *> (s);
4093 if (gimple_return_retval (r) != NULL_TREE)
4095 chkp_process_stmt (&i, gimple_return_retval (r),
4096 gimple_location (r),
4097 integer_zero_node,
4098 NULL_TREE, NULL_TREE, safe);
4100 /* Additionally we need to add bounds
4101 to return statement. */
4102 chkp_add_bounds_to_ret_stmt (&i);
4105 break;
4107 case GIMPLE_CALL:
4108 chkp_add_bounds_to_call_stmt (&i);
4109 break;
4111 default:
4115 gsi_next (&i);
4117 /* We do not need any actual pointer stores in checker
4118 static initializer. */
4119 if (lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl))
4120 && gimple_code (s) == GIMPLE_ASSIGN
4121 && gimple_store_p (s))
4123 gimple_stmt_iterator del_iter = gsi_for_stmt (s);
4124 gsi_remove (&del_iter, true);
4125 unlink_stmt_vdef (s);
4126 release_defs(s);
4129 bb = next;
4131 while (bb);
4133 /* Some input params may have bounds and be address taken. In this case
4134 we should store incoming bounds into bounds table. */
4135 tree arg;
4136 if (flag_chkp_store_bounds)
4137 for (arg = DECL_ARGUMENTS (cfun->decl); arg; arg = DECL_CHAIN (arg))
4138 if (TREE_ADDRESSABLE (arg))
4140 if (BOUNDED_P (arg))
4142 tree bounds = chkp_get_next_bounds_parm (arg);
4143 tree def_ptr = ssa_default_def (cfun, arg);
4144 gimple_stmt_iterator iter
4145 = gsi_start_bb (chkp_get_entry_block ());
4146 chkp_build_bndstx (chkp_build_addr_expr (arg),
4147 def_ptr ? def_ptr : arg,
4148 bounds, &iter);
4150 /* Skip bounds arg. */
4151 arg = TREE_CHAIN (arg);
4153 else if (chkp_type_has_pointer (TREE_TYPE (arg)))
4155 tree orig_arg = arg;
4156 bitmap slots = BITMAP_ALLOC (NULL);
4157 gimple_stmt_iterator iter
4158 = gsi_start_bb (chkp_get_entry_block ());
4159 bitmap_iterator bi;
4160 unsigned bnd_no;
4162 chkp_find_bound_slots (TREE_TYPE (arg), slots);
4164 EXECUTE_IF_SET_IN_BITMAP (slots, 0, bnd_no, bi)
4166 tree bounds = chkp_get_next_bounds_parm (arg);
4167 HOST_WIDE_INT offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
4168 tree addr = chkp_build_addr_expr (orig_arg);
4169 tree ptr = build2 (MEM_REF, ptr_type_node, addr,
4170 build_int_cst (ptr_type_node, offs));
4171 chkp_build_bndstx (chkp_build_addr_expr (ptr), ptr,
4172 bounds, &iter);
4174 arg = DECL_CHAIN (arg);
4176 BITMAP_FREE (slots);
4181 /* Find init/null/copy_ptr_bounds calls and replace them
4182 with assignments. It should allow better code
4183 optimization. */
4185 static void
4186 chkp_remove_useless_builtins ()
4188 basic_block bb;
4189 gimple_stmt_iterator gsi;
4191 FOR_EACH_BB_FN (bb, cfun)
4193 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4195 gimple stmt = gsi_stmt (gsi);
4196 tree fndecl;
4197 enum built_in_function fcode;
4199 /* Find builtins returning first arg and replace
4200 them with assignments. */
4201 if (gimple_code (stmt) == GIMPLE_CALL
4202 && (fndecl = gimple_call_fndecl (stmt))
4203 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4204 && (fcode = DECL_FUNCTION_CODE (fndecl))
4205 && (fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
4206 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
4207 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS
4208 || fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS))
4210 tree res = gimple_call_arg (stmt, 0);
4211 update_call_from_tree (&gsi, res);
4212 stmt = gsi_stmt (gsi);
4213 update_stmt (stmt);
4219 /* Initialize pass. */
4220 static void
4221 chkp_init (void)
4223 basic_block bb;
4224 gimple_stmt_iterator i;
4226 in_chkp_pass = true;
4228 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = bb->next_bb)
4229 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
4230 chkp_unmark_stmt (gsi_stmt (i));
4232 chkp_invalid_bounds = new hash_set<tree>;
4233 chkp_completed_bounds_set = new hash_set<tree>;
4234 delete chkp_reg_bounds;
4235 chkp_reg_bounds = new hash_map<tree, tree>;
4236 delete chkp_bound_vars;
4237 chkp_bound_vars = new hash_map<tree, tree>;
4238 chkp_reg_addr_bounds = new hash_map<tree, tree>;
4239 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
4240 delete chkp_bounds_map;
4241 chkp_bounds_map = new hash_map<tree, tree>;
4242 chkp_abnormal_copies = BITMAP_GGC_ALLOC ();
4244 entry_block = NULL;
4245 zero_bounds = NULL_TREE;
4246 none_bounds = NULL_TREE;
4247 incomplete_bounds = integer_zero_node;
4248 tmp_var = NULL_TREE;
4249 size_tmp_var = NULL_TREE;
4251 chkp_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode, true);
4253 /* We create these constant bounds once for each object file.
4254 These symbols go to comdat section and result in single copy
4255 of each one in the final binary. */
4256 chkp_get_zero_bounds_var ();
4257 chkp_get_none_bounds_var ();
4259 calculate_dominance_info (CDI_DOMINATORS);
4260 calculate_dominance_info (CDI_POST_DOMINATORS);
4262 bitmap_obstack_initialize (NULL);
4265 /* Finalize instrumentation pass. */
4266 static void
4267 chkp_fini (void)
4269 in_chkp_pass = false;
4271 delete chkp_invalid_bounds;
4272 delete chkp_completed_bounds_set;
4273 delete chkp_reg_addr_bounds;
4274 delete chkp_incomplete_bounds_map;
4276 free_dominance_info (CDI_DOMINATORS);
4277 free_dominance_info (CDI_POST_DOMINATORS);
4279 bitmap_obstack_release (NULL);
4282 /* Main instrumentation pass function. */
4283 static unsigned int
4284 chkp_execute (void)
4286 chkp_init ();
4288 chkp_instrument_function ();
4290 chkp_remove_useless_builtins ();
4292 chkp_function_mark_instrumented (cfun->decl);
4294 chkp_fix_cfg ();
4296 chkp_fini ();
4298 return 0;
4301 /* Instrumentation pass gate. */
4302 static bool
4303 chkp_gate (void)
4305 return cgraph_node::get (cfun->decl)->instrumentation_clone
4306 || lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl));
4309 namespace {
4311 const pass_data pass_data_chkp =
4313 GIMPLE_PASS, /* type */
4314 "chkp", /* name */
4315 OPTGROUP_NONE, /* optinfo_flags */
4316 TV_NONE, /* tv_id */
4317 PROP_ssa | PROP_cfg, /* properties_required */
4318 0, /* properties_provided */
4319 0, /* properties_destroyed */
4320 0, /* todo_flags_start */
4321 TODO_verify_il
4322 | TODO_update_ssa /* todo_flags_finish */
4325 class pass_chkp : public gimple_opt_pass
4327 public:
4328 pass_chkp (gcc::context *ctxt)
4329 : gimple_opt_pass (pass_data_chkp, ctxt)
4332 /* opt_pass methods: */
4333 virtual opt_pass * clone ()
4335 return new pass_chkp (m_ctxt);
4338 virtual bool gate (function *)
4340 return chkp_gate ();
4343 virtual unsigned int execute (function *)
4345 return chkp_execute ();
4348 }; // class pass_chkp
4350 } // anon namespace
4352 gimple_opt_pass *
4353 make_pass_chkp (gcc::context *ctxt)
4355 return new pass_chkp (ctxt);
4358 #include "gt-tree-chkp.h"