runtime: GOARCH values for ppc64 BE & LE
[official-gcc.git] / gcc / tree-chkp.c
blob3e386918e014bd420e8cd0b5fe95231be91d6b33
1 /* Pointer Bounds Checker insrumentation pass.
2 Copyright (C) 2014 Free Software Foundation, Inc.
3 Contributed by Ilya Enkovich (ilya.enkovich@intel.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree-core.h"
25 #include "stor-layout.h"
26 #include "varasm.h"
27 #include "tree.h"
28 #include "target.h"
29 #include "tree-iterator.h"
30 #include "tree-cfg.h"
31 #include "langhooks.h"
32 #include "tree-pass.h"
33 #include "diagnostic.h"
34 #include "ggc.h"
35 #include "is-a.h"
36 #include "cfgloop.h"
37 #include "stringpool.h"
38 #include "tree-ssa-alias.h"
39 #include "tree-ssanames.h"
40 #include "tree-ssa-operands.h"
41 #include "tree-ssa-address.h"
42 #include "tree-ssa.h"
43 #include "predict.h"
44 #include "dominance.h"
45 #include "cfg.h"
46 #include "basic-block.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "gimple-expr.h"
49 #include "gimple.h"
50 #include "tree-phinodes.h"
51 #include "gimple-ssa.h"
52 #include "ssa-iterators.h"
53 #include "gimple-pretty-print.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "gimplify-me.h"
57 #include "print-tree.h"
58 #include "expr.h"
59 #include "tree-ssa-propagate.h"
60 #include "gimple-fold.h"
61 #include "tree-chkp.h"
62 #include "gimple-walk.h"
63 #include "rtl.h" /* For MEM_P, assign_temp. */
64 #include "tree-dfa.h"
65 #include "ipa-ref.h"
66 #include "lto-streamer.h"
67 #include "cgraph.h"
68 #include "ipa-chkp.h"
69 #include "params.h"
70 #include "ipa-chkp.h"
71 #include "params.h"
73 /* Pointer Bounds Checker instruments code with memory checks to find
74 out-of-bounds memory accesses. Checks are performed by computing
75 bounds for each pointer and then comparing address of accessed
76 memory before pointer dereferencing.
78 1. Function clones.
80 See ipa-chkp.c.
82 2. Instrumentation.
84 There are few things to instrument:
86 a) Memory accesses - add checker calls to check address of accessed memory
87 against bounds of dereferenced pointer. Obviously safe memory
88 accesses like static variable access does not have to be instrumented
89 with checks.
91 Example:
93 val_2 = *p_1;
95 with 4 bytes access is transformed into:
97 __builtin___chkp_bndcl (__bound_tmp.1_3, p_1);
98 D.1_4 = p_1 + 3;
99 __builtin___chkp_bndcu (__bound_tmp.1_3, D.1_4);
100 val_2 = *p_1;
102 where __bound_tmp.1_3 are bounds computed for pointer p_1,
103 __builtin___chkp_bndcl is a lower bound check and
104 __builtin___chkp_bndcu is an upper bound check.
106 b) Pointer stores.
108 When pointer is stored in memory we need to store its bounds. To
109 achieve compatibility of instrumented code with regular codes
110 we have to keep data layout and store bounds in special bound tables
111 via special checker call. Implementation of bounds table may vary for
112 different platforms. It has to associate pointer value and its
113 location (it is required because we may have two equal pointers
114 with different bounds stored in different places) with bounds.
115 Another checker builtin allows to get bounds for specified pointer
116 loaded from specified location.
118 Example:
120 buf1[i_1] = &buf2;
122 is transformed into:
124 buf1[i_1] = &buf2;
125 D.1_2 = &buf1[i_1];
126 __builtin___chkp_bndstx (D.1_2, &buf2, __bound_tmp.1_2);
128 where __bound_tmp.1_2 are bounds of &buf2.
130 c) Static initialization.
132 The special case of pointer store is static pointer initialization.
133 Bounds initialization is performed in a few steps:
134 - register all static initializations in front-end using
135 chkp_register_var_initializer
136 - when file compilation finishes we create functions with special
137 attribute 'chkp ctor' and put explicit initialization code
138 (assignments) for all statically initialized pointers.
139 - when checker constructor is compiled checker pass adds required
140 bounds initialization for all statically initialized pointers
141 - since we do not actually need excess pointers initialization
142 in checker constructor we remove such assignments from them
144 d) Calls.
146 For each call in the code we add additional arguments to pass
147 bounds for pointer arguments. We determine type of call arguments
148 using arguments list from function declaration; if function
149 declaration is not available we use function type; otherwise
150 (e.g. for unnamed arguments) we use type of passed value. Function
151 declaration/type is replaced with the instrumented one.
153 Example:
155 val_1 = foo (&buf1, &buf2, &buf1, 0);
157 is translated into:
159 val_1 = foo.chkp (&buf1, __bound_tmp.1_2, &buf2, __bound_tmp.1_3,
160 &buf1, __bound_tmp.1_2, 0);
162 e) Returns.
164 If function returns a pointer value we have to return bounds also.
165 A new operand was added for return statement to hold returned bounds.
167 Example:
169 return &_buf1;
171 is transformed into
173 return &_buf1, __bound_tmp.1_1;
175 3. Bounds computation.
177 Compiler is fully responsible for computing bounds to be used for each
178 memory access. The first step for bounds computation is to find the
179 origin of pointer dereferenced for memory access. Basing on pointer
180 origin we define a way to compute its bounds. There are just few
181 possible cases:
183 a) Pointer is returned by call.
185 In this case we use corresponding checker builtin method to obtain returned
186 bounds.
188 Example:
190 buf_1 = malloc (size_2);
191 foo (buf_1);
193 is translated into:
195 buf_1 = malloc (size_2);
196 __bound_tmp.1_3 = __builtin___chkp_bndret (buf_1);
197 foo (buf_1, __bound_tmp.1_3);
199 b) Pointer is an address of an object.
201 In this case compiler tries to compute objects size and create corresponding
202 bounds. If object has incomplete type then special checker builtin is used to
203 obtain its size at runtime.
205 Example:
207 foo ()
209 <unnamed type> __bound_tmp.3;
210 static int buf[100];
212 <bb 3>:
213 __bound_tmp.3_2 = __builtin___chkp_bndmk (&buf, 400);
215 <bb 2>:
216 return &buf, __bound_tmp.3_2;
219 Example:
221 Address of an object 'extern int buf[]' with incomplete type is
222 returned.
224 foo ()
226 <unnamed type> __bound_tmp.4;
227 long unsigned int __size_tmp.3;
229 <bb 3>:
230 __size_tmp.3_4 = __builtin_ia32_sizeof (buf);
231 __bound_tmp.4_3 = __builtin_ia32_bndmk (&buf, __size_tmp.3_4);
233 <bb 2>:
234 return &buf, __bound_tmp.4_3;
237 c) Pointer is the result of object narrowing.
239 It happens when we use pointer to an object to compute pointer to a part
240 of an object. E.g. we take pointer to a field of a structure. In this
241 case we perform bounds intersection using bounds of original object and
242 bounds of object's part (which are computed basing on its type).
244 There may be some debatable questions about when narrowing should occur
245 and when it should not. To avoid false bound violations in correct
246 programs we do not perform narrowing when address of an array element is
247 obtained (it has address of the whole array) and when address of the first
248 structure field is obtained (because it is guaranteed to be equal to
249 address of the whole structure and it is legal to cast it back to structure).
251 Default narrowing behavior may be changed using compiler flags.
253 Example:
255 In this example address of the second structure field is returned.
257 foo (struct A * p, __bounds_type __bounds_of_p)
259 <unnamed type> __bound_tmp.3;
260 int * _2;
261 int * _5;
263 <bb 2>:
264 _5 = &p_1(D)->second_field;
265 __bound_tmp.3_6 = __builtin___chkp_bndmk (_5, 4);
266 __bound_tmp.3_8 = __builtin___chkp_intersect (__bound_tmp.3_6,
267 __bounds_of_p_3(D));
268 _2 = &p_1(D)->second_field;
269 return _2, __bound_tmp.3_8;
272 Example:
274 In this example address of the first field of array element is returned.
276 foo (struct A * p, __bounds_type __bounds_of_p, int i)
278 long unsigned int _3;
279 long unsigned int _4;
280 struct A * _6;
281 int * _7;
283 <bb 2>:
284 _3 = (long unsigned int) i_1(D);
285 _4 = _3 * 8;
286 _6 = p_5(D) + _4;
287 _7 = &_6->first_field;
288 return _7, __bounds_of_p_2(D);
292 d) Pointer is the result of pointer arithmetic or type cast.
294 In this case bounds of the base pointer are used. In case of binary
295 operation producing a pointer we are analyzing data flow further
296 looking for operand's bounds. One operand is considered as a base
297 if it has some valid bounds. If we fall into a case when none of
298 operands (or both of them) has valid bounds, a default bounds value
299 is used.
301 Trying to find out bounds for binary operations we may fall into
302 cyclic dependencies for pointers. To avoid infinite recursion all
303 walked phi nodes instantly obtain corresponding bounds but created
304 bounds are marked as incomplete. It helps us to stop DF walk during
305 bounds search.
307 When we reach pointer source, some args of incomplete bounds phi obtain
308 valid bounds and those values are propagated further through phi nodes.
309 If no valid bounds were found for phi node then we mark its result as
310 invalid bounds. Process stops when all incomplete bounds become either
311 valid or invalid and we are able to choose a pointer base.
313 e) Pointer is loaded from the memory.
315 In this case we just need to load bounds from the bounds table.
317 Example:
319 foo ()
321 <unnamed type> __bound_tmp.3;
322 static int * buf;
323 int * _2;
325 <bb 2>:
326 _2 = buf;
327 __bound_tmp.3_4 = __builtin___chkp_bndldx (&buf, _2);
328 return _2, __bound_tmp.3_4;
333 typedef void (*assign_handler)(tree, tree, void *);
335 static tree chkp_get_zero_bounds ();
336 static tree chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter);
337 static tree chkp_find_bounds_loaded (tree ptr, tree ptr_src,
338 gimple_stmt_iterator *iter);
339 static void chkp_parse_array_and_component_ref (tree node, tree *ptr,
340 tree *elt, bool *safe,
341 bool *bitfield,
342 tree *bounds,
343 gimple_stmt_iterator *iter,
344 bool innermost_bounds);
346 #define chkp_bndldx_fndecl \
347 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDLDX))
348 #define chkp_bndstx_fndecl \
349 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDSTX))
350 #define chkp_checkl_fndecl \
351 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCL))
352 #define chkp_checku_fndecl \
353 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCU))
354 #define chkp_bndmk_fndecl \
355 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDMK))
356 #define chkp_ret_bnd_fndecl \
357 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDRET))
358 #define chkp_intersect_fndecl \
359 (targetm.builtin_chkp_function (BUILT_IN_CHKP_INTERSECT))
360 #define chkp_narrow_bounds_fndecl \
361 (targetm.builtin_chkp_function (BUILT_IN_CHKP_NARROW))
362 #define chkp_sizeof_fndecl \
363 (targetm.builtin_chkp_function (BUILT_IN_CHKP_SIZEOF))
364 #define chkp_extract_lower_fndecl \
365 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_LOWER))
366 #define chkp_extract_upper_fndecl \
367 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_UPPER))
369 static GTY (()) tree chkp_uintptr_type;
371 static GTY (()) tree chkp_zero_bounds_var;
372 static GTY (()) tree chkp_none_bounds_var;
374 static GTY (()) basic_block entry_block;
375 static GTY (()) tree zero_bounds;
376 static GTY (()) tree none_bounds;
377 static GTY (()) tree incomplete_bounds;
378 static GTY (()) tree tmp_var;
379 static GTY (()) tree size_tmp_var;
380 static GTY (()) bitmap chkp_abnormal_copies;
382 struct hash_set<tree> *chkp_invalid_bounds;
383 struct hash_set<tree> *chkp_completed_bounds_set;
384 struct hash_map<tree, tree> *chkp_reg_bounds;
385 struct hash_map<tree, tree> *chkp_bound_vars;
386 struct hash_map<tree, tree> *chkp_reg_addr_bounds;
387 struct hash_map<tree, tree> *chkp_incomplete_bounds_map;
388 struct hash_map<tree, tree> *chkp_bounds_map;
389 struct hash_map<tree, tree> *chkp_static_var_bounds;
391 static bool in_chkp_pass;
393 #define CHKP_BOUND_TMP_NAME "__bound_tmp"
394 #define CHKP_SIZE_TMP_NAME "__size_tmp"
395 #define CHKP_BOUNDS_OF_SYMBOL_PREFIX "__chkp_bounds_of_"
396 #define CHKP_STRING_BOUNDS_PREFIX "__chkp_string_bounds_"
397 #define CHKP_VAR_BOUNDS_PREFIX "__chkp_var_bounds_"
398 #define CHKP_ZERO_BOUNDS_VAR_NAME "__chkp_zero_bounds"
399 #define CHKP_NONE_BOUNDS_VAR_NAME "__chkp_none_bounds"
401 /* Static checker constructors may become very large and their
402 compilation with optimization may take too much time.
403 Therefore we put a limit to number of statements in one
404 constructor. Tests with 100 000 statically initialized
405 pointers showed following compilation times on Sandy Bridge
406 server (used -O2):
407 limit 100 => ~18 sec.
408 limit 300 => ~22 sec.
409 limit 1000 => ~30 sec.
410 limit 3000 => ~49 sec.
411 limit 5000 => ~55 sec.
412 limit 10000 => ~76 sec.
413 limit 100000 => ~532 sec. */
414 #define MAX_STMTS_IN_STATIC_CHKP_CTOR (PARAM_VALUE (PARAM_CHKP_MAX_CTOR_SIZE))
416 struct chkp_ctor_stmt_list
418 tree stmts;
419 int avail;
422 /* Return 1 if function FNDECL is instrumented by Pointer
423 Bounds Checker. */
424 bool
425 chkp_function_instrumented_p (tree fndecl)
427 return fndecl
428 && lookup_attribute ("chkp instrumented", DECL_ATTRIBUTES (fndecl));
431 /* Mark function FNDECL as instrumented. */
432 void
433 chkp_function_mark_instrumented (tree fndecl)
435 if (chkp_function_instrumented_p (fndecl))
436 return;
438 DECL_ATTRIBUTES (fndecl)
439 = tree_cons (get_identifier ("chkp instrumented"), NULL,
440 DECL_ATTRIBUTES (fndecl));
443 /* Return true when STMT is builtin call to instrumentation function
444 corresponding to CODE. */
446 bool
447 chkp_gimple_call_builtin_p (gimple call,
448 enum built_in_function code)
450 tree fndecl;
451 if (is_gimple_call (call)
452 && (fndecl = targetm.builtin_chkp_function (code))
453 && gimple_call_fndecl (call) == fndecl)
454 return true;
455 return false;
458 /* Emit code to store zero bounds for PTR located at MEM. */
459 void
460 chkp_expand_bounds_reset_for_mem (tree mem, tree ptr)
462 tree zero_bnd, bnd, addr, bndstx;
464 if (flag_chkp_use_static_const_bounds)
465 zero_bnd = chkp_get_zero_bounds_var ();
466 else
467 zero_bnd = chkp_build_make_bounds_call (integer_zero_node,
468 integer_zero_node);
469 bnd = make_tree (pointer_bounds_type_node,
470 assign_temp (pointer_bounds_type_node, 0, 1));
471 addr = build1 (ADDR_EXPR,
472 build_pointer_type (TREE_TYPE (mem)), mem);
473 bndstx = chkp_build_bndstx_call (addr, ptr, bnd);
475 expand_assignment (bnd, zero_bnd, false);
476 expand_normal (bndstx);
479 /* Mark statement S to not be instrumented. */
480 static void
481 chkp_mark_stmt (gimple s)
483 gimple_set_plf (s, GF_PLF_1, true);
486 /* Mark statement S to be instrumented. */
487 static void
488 chkp_unmark_stmt (gimple s)
490 gimple_set_plf (s, GF_PLF_1, false);
493 /* Return 1 if statement S should not be instrumented. */
494 static bool
495 chkp_marked_stmt_p (gimple s)
497 return gimple_plf (s, GF_PLF_1);
500 /* Get var to be used for bound temps. */
501 static tree
502 chkp_get_tmp_var (void)
504 if (!tmp_var)
505 tmp_var = create_tmp_reg (pointer_bounds_type_node, CHKP_BOUND_TMP_NAME);
507 return tmp_var;
510 /* Get SSA_NAME to be used as temp. */
511 static tree
512 chkp_get_tmp_reg (gimple stmt)
514 if (in_chkp_pass)
515 return make_ssa_name (chkp_get_tmp_var (), stmt);
517 return make_temp_ssa_name (pointer_bounds_type_node, stmt,
518 CHKP_BOUND_TMP_NAME);
521 /* Get var to be used for size temps. */
522 static tree
523 chkp_get_size_tmp_var (void)
525 if (!size_tmp_var)
526 size_tmp_var = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
528 return size_tmp_var;
531 /* Register bounds BND for address of OBJ. */
532 static void
533 chkp_register_addr_bounds (tree obj, tree bnd)
535 if (bnd == incomplete_bounds)
536 return;
538 chkp_reg_addr_bounds->put (obj, bnd);
540 if (dump_file && (dump_flags & TDF_DETAILS))
542 fprintf (dump_file, "Regsitered bound ");
543 print_generic_expr (dump_file, bnd, 0);
544 fprintf (dump_file, " for address of ");
545 print_generic_expr (dump_file, obj, 0);
546 fprintf (dump_file, "\n");
550 /* Return bounds registered for address of OBJ. */
551 static tree
552 chkp_get_registered_addr_bounds (tree obj)
554 tree *slot = chkp_reg_addr_bounds->get (obj);
555 return slot ? *slot : NULL_TREE;
558 /* Mark BOUNDS as completed. */
559 static void
560 chkp_mark_completed_bounds (tree bounds)
562 chkp_completed_bounds_set->add (bounds);
564 if (dump_file && (dump_flags & TDF_DETAILS))
566 fprintf (dump_file, "Marked bounds ");
567 print_generic_expr (dump_file, bounds, 0);
568 fprintf (dump_file, " as completed\n");
572 /* Return 1 if BOUNDS were marked as completed and 0 otherwise. */
573 static bool
574 chkp_completed_bounds (tree bounds)
576 return chkp_completed_bounds_set->contains (bounds);
579 /* Clear comleted bound marks. */
580 static void
581 chkp_erase_completed_bounds (void)
583 delete chkp_completed_bounds_set;
584 chkp_completed_bounds_set = new hash_set<tree>;
587 /* Mark BOUNDS associated with PTR as incomplete. */
588 static void
589 chkp_register_incomplete_bounds (tree bounds, tree ptr)
591 chkp_incomplete_bounds_map->put (bounds, ptr);
593 if (dump_file && (dump_flags & TDF_DETAILS))
595 fprintf (dump_file, "Regsitered incomplete bounds ");
596 print_generic_expr (dump_file, bounds, 0);
597 fprintf (dump_file, " for ");
598 print_generic_expr (dump_file, ptr, 0);
599 fprintf (dump_file, "\n");
603 /* Return 1 if BOUNDS are incomplete and 0 otherwise. */
604 static bool
605 chkp_incomplete_bounds (tree bounds)
607 if (bounds == incomplete_bounds)
608 return true;
610 if (chkp_completed_bounds (bounds))
611 return false;
613 return chkp_incomplete_bounds_map->get (bounds) != NULL;
616 /* Clear incomleted bound marks. */
617 static void
618 chkp_erase_incomplete_bounds (void)
620 delete chkp_incomplete_bounds_map;
621 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
624 /* Build and return bndmk call which creates bounds for structure
625 pointed by PTR. Structure should have complete type. */
626 tree
627 chkp_make_bounds_for_struct_addr (tree ptr)
629 tree type = TREE_TYPE (ptr);
630 tree size;
632 gcc_assert (POINTER_TYPE_P (type));
634 size = TYPE_SIZE (TREE_TYPE (type));
636 gcc_assert (size);
638 return build_call_nary (pointer_bounds_type_node,
639 build_fold_addr_expr (chkp_bndmk_fndecl),
640 2, ptr, size);
643 /* Traversal function for chkp_may_finish_incomplete_bounds.
644 Set RES to 0 if at least one argument of phi statement
645 defining bounds (passed in KEY arg) is unknown.
646 Traversal stops when first unknown phi argument is found. */
647 bool
648 chkp_may_complete_phi_bounds (tree const &bounds, tree *slot ATTRIBUTE_UNUSED,
649 bool *res)
651 gimple phi;
652 unsigned i;
654 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
656 phi = SSA_NAME_DEF_STMT (bounds);
658 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
660 for (i = 0; i < gimple_phi_num_args (phi); i++)
662 tree phi_arg = gimple_phi_arg_def (phi, i);
663 if (!phi_arg)
665 *res = false;
666 /* Do not need to traverse further. */
667 return false;
671 return true;
674 /* Return 1 if all phi nodes created for bounds have their
675 arguments computed. */
676 static bool
677 chkp_may_finish_incomplete_bounds (void)
679 bool res = true;
681 chkp_incomplete_bounds_map
682 ->traverse<bool *, chkp_may_complete_phi_bounds> (&res);
684 return res;
687 /* Helper function for chkp_finish_incomplete_bounds.
688 Recompute args for bounds phi node. */
689 bool
690 chkp_recompute_phi_bounds (tree const &bounds, tree *slot,
691 void *res ATTRIBUTE_UNUSED)
693 tree ptr = *slot;
694 gphi *bounds_phi;
695 gphi *ptr_phi;
696 unsigned i;
698 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
699 gcc_assert (TREE_CODE (ptr) == SSA_NAME);
701 bounds_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (bounds));
702 ptr_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (ptr));
704 for (i = 0; i < gimple_phi_num_args (bounds_phi); i++)
706 tree ptr_arg = gimple_phi_arg_def (ptr_phi, i);
707 tree bound_arg = chkp_find_bounds (ptr_arg, NULL);
709 add_phi_arg (bounds_phi, bound_arg,
710 gimple_phi_arg_edge (ptr_phi, i),
711 UNKNOWN_LOCATION);
714 return true;
717 /* Mark BOUNDS as invalid. */
718 static void
719 chkp_mark_invalid_bounds (tree bounds)
721 chkp_invalid_bounds->add (bounds);
723 if (dump_file && (dump_flags & TDF_DETAILS))
725 fprintf (dump_file, "Marked bounds ");
726 print_generic_expr (dump_file, bounds, 0);
727 fprintf (dump_file, " as invalid\n");
731 /* Return 1 if BOUNDS were marked as invalid and 0 otherwise. */
732 static bool
733 chkp_valid_bounds (tree bounds)
735 if (bounds == zero_bounds || bounds == none_bounds)
736 return false;
738 return !chkp_invalid_bounds->contains (bounds);
741 /* Helper function for chkp_finish_incomplete_bounds.
742 Check all arguments of phi nodes trying to find
743 valid completed bounds. If there is at least one
744 such arg then bounds produced by phi node are marked
745 as valid completed bounds and all phi args are
746 recomputed. */
747 bool
748 chkp_find_valid_phi_bounds (tree const &bounds, tree *slot, bool *res)
750 gimple phi;
751 unsigned i;
753 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
755 if (chkp_completed_bounds (bounds))
756 return true;
758 phi = SSA_NAME_DEF_STMT (bounds);
760 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
762 for (i = 0; i < gimple_phi_num_args (phi); i++)
764 tree phi_arg = gimple_phi_arg_def (phi, i);
766 gcc_assert (phi_arg);
768 if (chkp_valid_bounds (phi_arg) && !chkp_incomplete_bounds (phi_arg))
770 *res = true;
771 chkp_mark_completed_bounds (bounds);
772 chkp_recompute_phi_bounds (bounds, slot, NULL);
773 return true;
777 return true;
780 /* Helper function for chkp_finish_incomplete_bounds.
781 Marks all incompleted bounds as invalid. */
782 bool
783 chkp_mark_invalid_bounds_walker (tree const &bounds,
784 tree *slot ATTRIBUTE_UNUSED,
785 void *res ATTRIBUTE_UNUSED)
787 if (!chkp_completed_bounds (bounds))
789 chkp_mark_invalid_bounds (bounds);
790 chkp_mark_completed_bounds (bounds);
792 return true;
795 /* When all bound phi nodes have all their args computed
796 we have enough info to find valid bounds. We iterate
797 through all incompleted bounds searching for valid
798 bounds. Found valid bounds are marked as completed
799 and all remaining incompleted bounds are recomputed.
800 Process continues until no new valid bounds may be
801 found. All remained incompleted bounds are marked as
802 invalid (i.e. have no valid source of bounds). */
803 static void
804 chkp_finish_incomplete_bounds (void)
806 bool found_valid;
808 while (found_valid)
810 found_valid = false;
812 chkp_incomplete_bounds_map->
813 traverse<bool *, chkp_find_valid_phi_bounds> (&found_valid);
815 if (found_valid)
816 chkp_incomplete_bounds_map->
817 traverse<void *, chkp_recompute_phi_bounds> (NULL);
820 chkp_incomplete_bounds_map->
821 traverse<void *, chkp_mark_invalid_bounds_walker> (NULL);
822 chkp_incomplete_bounds_map->
823 traverse<void *, chkp_recompute_phi_bounds> (NULL);
825 chkp_erase_completed_bounds ();
826 chkp_erase_incomplete_bounds ();
829 /* Return 1 if type TYPE is a pointer type or a
830 structure having a pointer type as one of its fields.
831 Otherwise return 0. */
832 bool
833 chkp_type_has_pointer (const_tree type)
835 bool res = false;
837 if (BOUNDED_TYPE_P (type))
838 res = true;
839 else if (RECORD_OR_UNION_TYPE_P (type))
841 tree field;
843 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
844 if (TREE_CODE (field) == FIELD_DECL)
845 res = res || chkp_type_has_pointer (TREE_TYPE (field));
847 else if (TREE_CODE (type) == ARRAY_TYPE)
848 res = chkp_type_has_pointer (TREE_TYPE (type));
850 return res;
853 unsigned
854 chkp_type_bounds_count (const_tree type)
856 unsigned res = 0;
858 if (!type)
859 res = 0;
860 else if (BOUNDED_TYPE_P (type))
861 res = 1;
862 else if (RECORD_OR_UNION_TYPE_P (type))
864 bitmap have_bound;
866 bitmap_obstack_initialize (NULL);
867 have_bound = BITMAP_ALLOC (NULL);
868 chkp_find_bound_slots (type, have_bound);
869 res = bitmap_count_bits (have_bound);
870 BITMAP_FREE (have_bound);
871 bitmap_obstack_release (NULL);
874 return res;
877 /* Get bounds associated with NODE via
878 chkp_set_bounds call. */
879 tree
880 chkp_get_bounds (tree node)
882 tree *slot;
884 if (!chkp_bounds_map)
885 return NULL_TREE;
887 slot = chkp_bounds_map->get (node);
888 return slot ? *slot : NULL_TREE;
891 /* Associate bounds VAL with NODE. */
892 void
893 chkp_set_bounds (tree node, tree val)
895 if (!chkp_bounds_map)
896 chkp_bounds_map = new hash_map<tree, tree>;
898 chkp_bounds_map->put (node, val);
901 /* Check if statically initialized variable VAR require
902 static bounds initialization. If VAR is added into
903 bounds initlization list then 1 is returned. Otherwise
904 return 0. */
905 extern bool
906 chkp_register_var_initializer (tree var)
908 if (!flag_check_pointer_bounds
909 || DECL_INITIAL (var) == error_mark_node)
910 return false;
912 gcc_assert (TREE_CODE (var) == VAR_DECL);
913 gcc_assert (DECL_INITIAL (var));
915 if (TREE_STATIC (var)
916 && chkp_type_has_pointer (TREE_TYPE (var)))
918 varpool_node::get_create (var)->need_bounds_init = 1;
919 return true;
922 return false;
925 /* Helper function for chkp_finish_file.
927 Add new modification statement (RHS is assigned to LHS)
928 into list of static initializer statementes (passed in ARG).
929 If statements list becomes too big, emit checker constructor
930 and start the new one. */
931 static void
932 chkp_add_modification_to_stmt_list (tree lhs,
933 tree rhs,
934 void *arg)
936 struct chkp_ctor_stmt_list *stmts = (struct chkp_ctor_stmt_list *)arg;
937 tree modify;
939 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
940 rhs = build1 (CONVERT_EXPR, TREE_TYPE (lhs), rhs);
942 modify = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
943 append_to_statement_list (modify, &stmts->stmts);
945 stmts->avail--;
948 /* Build and return ADDR_EXPR for specified object OBJ. */
949 static tree
950 chkp_build_addr_expr (tree obj)
952 return TREE_CODE (obj) == TARGET_MEM_REF
953 ? tree_mem_ref_addr (ptr_type_node, obj)
954 : build_fold_addr_expr (obj);
957 /* Helper function for chkp_finish_file.
958 Initialize bound variable BND_VAR with bounds of variable
959 VAR to statements list STMTS. If statements list becomes
960 too big, emit checker constructor and start the new one. */
961 static void
962 chkp_output_static_bounds (tree bnd_var, tree var,
963 struct chkp_ctor_stmt_list *stmts)
965 tree lb, ub, size;
967 if (TREE_CODE (var) == STRING_CST)
969 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
970 size = build_int_cst (size_type_node, TREE_STRING_LENGTH (var) - 1);
972 else if (DECL_SIZE (var)
973 && !chkp_variable_size_type (TREE_TYPE (var)))
975 /* Compute bounds using statically known size. */
976 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
977 size = size_binop (MINUS_EXPR, DECL_SIZE_UNIT (var), size_one_node);
979 else
981 /* Compute bounds using dynamic size. */
982 tree call;
984 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
985 call = build1 (ADDR_EXPR,
986 build_pointer_type (TREE_TYPE (chkp_sizeof_fndecl)),
987 chkp_sizeof_fndecl);
988 size = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_sizeof_fndecl)),
989 call, 1, var);
991 if (flag_chkp_zero_dynamic_size_as_infinite)
993 tree max_size, cond;
995 max_size = build2 (MINUS_EXPR, size_type_node, size_zero_node, lb);
996 cond = build2 (NE_EXPR, boolean_type_node, size, size_zero_node);
997 size = build3 (COND_EXPR, size_type_node, cond, size, max_size);
1000 size = size_binop (MINUS_EXPR, size, size_one_node);
1003 ub = size_binop (PLUS_EXPR, lb, size);
1004 stmts->avail -= targetm.chkp_initialize_bounds (bnd_var, lb, ub,
1005 &stmts->stmts);
1006 if (stmts->avail <= 0)
1008 cgraph_build_static_cdtor ('B', stmts->stmts,
1009 MAX_RESERVED_INIT_PRIORITY + 2);
1010 stmts->avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
1011 stmts->stmts = NULL;
1015 /* Return entry block to be used for checker initilization code.
1016 Create new block if required. */
1017 static basic_block
1018 chkp_get_entry_block (void)
1020 if (!entry_block)
1021 entry_block = split_block (ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL)->dest;
1023 return entry_block;
1026 /* Return a bounds var to be used for pointer var PTR_VAR. */
1027 static tree
1028 chkp_get_bounds_var (tree ptr_var)
1030 tree bnd_var;
1031 tree *slot;
1033 slot = chkp_bound_vars->get (ptr_var);
1034 if (slot)
1035 bnd_var = *slot;
1036 else
1038 bnd_var = create_tmp_reg (pointer_bounds_type_node,
1039 CHKP_BOUND_TMP_NAME);
1040 chkp_bound_vars->put (ptr_var, bnd_var);
1043 return bnd_var;
1048 /* Register bounds BND for object PTR in global bounds table.
1049 A copy of bounds may be created for abnormal ssa names.
1050 Returns bounds to use for PTR. */
1051 static tree
1052 chkp_maybe_copy_and_register_bounds (tree ptr, tree bnd)
1054 bool abnormal_ptr;
1056 if (!chkp_reg_bounds)
1057 return bnd;
1059 /* Do nothing if bounds are incomplete_bounds
1060 because it means bounds will be recomputed. */
1061 if (bnd == incomplete_bounds)
1062 return bnd;
1064 abnormal_ptr = (TREE_CODE (ptr) == SSA_NAME
1065 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1066 && gimple_code (SSA_NAME_DEF_STMT (ptr)) != GIMPLE_PHI);
1068 /* A single bounds value may be reused multiple times for
1069 different pointer values. It may cause coalescing issues
1070 for abnormal SSA names. To avoid it we create a bounds
1071 copy in case it is computed for abnormal SSA name.
1073 We also cannot reuse such created copies for other pointers */
1074 if (abnormal_ptr
1075 || bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
1077 tree bnd_var = NULL_TREE;
1079 if (abnormal_ptr)
1081 if (SSA_NAME_VAR (ptr))
1082 bnd_var = chkp_get_bounds_var (SSA_NAME_VAR (ptr));
1084 else
1085 bnd_var = chkp_get_tmp_var ();
1087 /* For abnormal copies we may just find original
1088 bounds and use them. */
1089 if (!abnormal_ptr && !SSA_NAME_IS_DEFAULT_DEF (bnd))
1091 gimple bnd_def = SSA_NAME_DEF_STMT (bnd);
1092 gcc_checking_assert (gimple_code (bnd_def) == GIMPLE_ASSIGN);
1093 bnd = gimple_assign_rhs1 (bnd_def);
1095 /* For undefined values we usually use none bounds
1096 value but in case of abnormal edge it may cause
1097 coalescing failures. Use default definition of
1098 bounds variable instead to avoid it. */
1099 else if (SSA_NAME_IS_DEFAULT_DEF (ptr)
1100 && TREE_CODE (SSA_NAME_VAR (ptr)) != PARM_DECL)
1102 bnd = get_or_create_ssa_default_def (cfun, bnd_var);
1104 if (dump_file && (dump_flags & TDF_DETAILS))
1106 fprintf (dump_file, "Using default def bounds ");
1107 print_generic_expr (dump_file, bnd, 0);
1108 fprintf (dump_file, " for abnormal default def SSA name ");
1109 print_generic_expr (dump_file, ptr, 0);
1110 fprintf (dump_file, "\n");
1113 else
1115 tree copy;
1116 gimple def = SSA_NAME_DEF_STMT (ptr);
1117 gimple assign;
1118 gimple_stmt_iterator gsi;
1120 if (bnd_var)
1121 copy = make_ssa_name (bnd_var, gimple_build_nop ());
1122 else
1123 copy = make_temp_ssa_name (pointer_bounds_type_node,
1124 gimple_build_nop (),
1125 CHKP_BOUND_TMP_NAME);
1126 assign = gimple_build_assign (copy, bnd);
1128 if (dump_file && (dump_flags & TDF_DETAILS))
1130 fprintf (dump_file, "Creating a copy of bounds ");
1131 print_generic_expr (dump_file, bnd, 0);
1132 fprintf (dump_file, " for abnormal SSA name ");
1133 print_generic_expr (dump_file, ptr, 0);
1134 fprintf (dump_file, "\n");
1137 if (gimple_code (def) == GIMPLE_NOP)
1139 gsi = gsi_last_bb (chkp_get_entry_block ());
1140 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
1141 gsi_insert_before (&gsi, assign, GSI_CONTINUE_LINKING);
1142 else
1143 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1145 else
1147 gimple bnd_def = SSA_NAME_DEF_STMT (bnd);
1148 /* Sometimes (e.g. when we load a pointer from a
1149 memory) bounds are produced later than a pointer.
1150 We need to insert bounds copy appropriately. */
1151 if (gimple_code (bnd_def) != GIMPLE_NOP
1152 && stmt_dominates_stmt_p (def, bnd_def))
1153 gsi = gsi_for_stmt (bnd_def);
1154 else
1155 gsi = gsi_for_stmt (def);
1156 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1159 bnd = copy;
1162 if (abnormal_ptr)
1163 bitmap_set_bit (chkp_abnormal_copies, SSA_NAME_VERSION (bnd));
1166 chkp_reg_bounds->put (ptr, bnd);
1168 if (dump_file && (dump_flags & TDF_DETAILS))
1170 fprintf (dump_file, "Regsitered bound ");
1171 print_generic_expr (dump_file, bnd, 0);
1172 fprintf (dump_file, " for pointer ");
1173 print_generic_expr (dump_file, ptr, 0);
1174 fprintf (dump_file, "\n");
1177 return bnd;
1180 /* Get bounds registered for object PTR in global bounds table. */
1181 static tree
1182 chkp_get_registered_bounds (tree ptr)
1184 tree *slot;
1186 if (!chkp_reg_bounds)
1187 return NULL_TREE;
1189 slot = chkp_reg_bounds->get (ptr);
1190 return slot ? *slot : NULL_TREE;
1193 /* Add bound retvals to return statement pointed by GSI. */
1195 static void
1196 chkp_add_bounds_to_ret_stmt (gimple_stmt_iterator *gsi)
1198 greturn *ret = as_a <greturn *> (gsi_stmt (*gsi));
1199 tree retval = gimple_return_retval (ret);
1200 tree ret_decl = DECL_RESULT (cfun->decl);
1201 tree bounds;
1203 if (!retval)
1204 return;
1206 if (BOUNDED_P (ret_decl))
1208 bounds = chkp_find_bounds (retval, gsi);
1209 bounds = chkp_maybe_copy_and_register_bounds (ret_decl, bounds);
1210 gimple_return_set_retbnd (ret, bounds);
1213 update_stmt (ret);
1216 /* Force OP to be suitable for using as an argument for call.
1217 New statements (if any) go to SEQ. */
1218 static tree
1219 chkp_force_gimple_call_op (tree op, gimple_seq *seq)
1221 gimple_seq stmts;
1222 gimple_stmt_iterator si;
1224 op = force_gimple_operand (unshare_expr (op), &stmts, true, NULL_TREE);
1226 for (si = gsi_start (stmts); !gsi_end_p (si); gsi_next (&si))
1227 chkp_mark_stmt (gsi_stmt (si));
1229 gimple_seq_add_seq (seq, stmts);
1231 return op;
1234 /* Generate lower bound check for memory access by ADDR.
1235 Check is inserted before the position pointed by ITER.
1236 DIRFLAG indicates whether memory access is load or store. */
1237 static void
1238 chkp_check_lower (tree addr, tree bounds,
1239 gimple_stmt_iterator iter,
1240 location_t location,
1241 tree dirflag)
1243 gimple_seq seq;
1244 gimple check;
1245 tree node;
1247 if (bounds == chkp_get_zero_bounds ())
1248 return;
1250 if (dirflag == integer_zero_node
1251 && !flag_chkp_check_read)
1252 return;
1254 if (dirflag == integer_one_node
1255 && !flag_chkp_check_write)
1256 return;
1258 seq = NULL;
1260 node = chkp_force_gimple_call_op (addr, &seq);
1262 check = gimple_build_call (chkp_checkl_fndecl, 2, node, bounds);
1263 chkp_mark_stmt (check);
1264 gimple_call_set_with_bounds (check, true);
1265 gimple_set_location (check, location);
1266 gimple_seq_add_stmt (&seq, check);
1268 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1270 if (dump_file && (dump_flags & TDF_DETAILS))
1272 gimple before = gsi_stmt (iter);
1273 fprintf (dump_file, "Generated lower bound check for statement ");
1274 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1275 fprintf (dump_file, " ");
1276 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1280 /* Generate upper bound check for memory access by ADDR.
1281 Check is inserted before the position pointed by ITER.
1282 DIRFLAG indicates whether memory access is load or store. */
1283 static void
1284 chkp_check_upper (tree addr, tree bounds,
1285 gimple_stmt_iterator iter,
1286 location_t location,
1287 tree dirflag)
1289 gimple_seq seq;
1290 gimple check;
1291 tree node;
1293 if (bounds == chkp_get_zero_bounds ())
1294 return;
1296 if (dirflag == integer_zero_node
1297 && !flag_chkp_check_read)
1298 return;
1300 if (dirflag == integer_one_node
1301 && !flag_chkp_check_write)
1302 return;
1304 seq = NULL;
1306 node = chkp_force_gimple_call_op (addr, &seq);
1308 check = gimple_build_call (chkp_checku_fndecl, 2, node, bounds);
1309 chkp_mark_stmt (check);
1310 gimple_call_set_with_bounds (check, true);
1311 gimple_set_location (check, location);
1312 gimple_seq_add_stmt (&seq, check);
1314 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1316 if (dump_file && (dump_flags & TDF_DETAILS))
1318 gimple before = gsi_stmt (iter);
1319 fprintf (dump_file, "Generated upper bound check for statement ");
1320 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1321 fprintf (dump_file, " ");
1322 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1326 /* Generate lower and upper bound checks for memory access
1327 to memory slot [FIRST, LAST] againsr BOUNDS. Checks
1328 are inserted before the position pointed by ITER.
1329 DIRFLAG indicates whether memory access is load or store. */
1330 void
1331 chkp_check_mem_access (tree first, tree last, tree bounds,
1332 gimple_stmt_iterator iter,
1333 location_t location,
1334 tree dirflag)
1336 chkp_check_lower (first, bounds, iter, location, dirflag);
1337 chkp_check_upper (last, bounds, iter, location, dirflag);
1340 /* Replace call to _bnd_chk_* pointed by GSI with
1341 bndcu and bndcl calls. DIRFLAG determines whether
1342 check is for read or write. */
1344 void
1345 chkp_replace_address_check_builtin (gimple_stmt_iterator *gsi,
1346 tree dirflag)
1348 gimple_stmt_iterator call_iter = *gsi;
1349 gimple call = gsi_stmt (*gsi);
1350 tree fndecl = gimple_call_fndecl (call);
1351 tree addr = gimple_call_arg (call, 0);
1352 tree bounds = chkp_find_bounds (addr, gsi);
1354 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1355 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1356 chkp_check_lower (addr, bounds, *gsi, gimple_location (call), dirflag);
1358 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS)
1359 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1361 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1363 tree size = gimple_call_arg (call, 1);
1364 addr = fold_build_pointer_plus (addr, size);
1365 addr = fold_build_pointer_plus_hwi (addr, -1);
1366 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1369 gsi_remove (&call_iter, true);
1372 /* Replace call to _bnd_get_ptr_* pointed by GSI with
1373 corresponding bounds extract call. */
1375 void
1376 chkp_replace_extract_builtin (gimple_stmt_iterator *gsi)
1378 gimple call = gsi_stmt (*gsi);
1379 tree fndecl = gimple_call_fndecl (call);
1380 tree addr = gimple_call_arg (call, 0);
1381 tree bounds = chkp_find_bounds (addr, gsi);
1382 gimple extract;
1384 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND)
1385 fndecl = chkp_extract_lower_fndecl;
1386 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND)
1387 fndecl = chkp_extract_upper_fndecl;
1388 else
1389 gcc_unreachable ();
1391 extract = gimple_build_call (fndecl, 1, bounds);
1392 gimple_call_set_lhs (extract, gimple_call_lhs (call));
1393 chkp_mark_stmt (extract);
1395 gsi_replace (gsi, extract, false);
1398 /* Return COMPONENT_REF accessing FIELD in OBJ. */
1399 static tree
1400 chkp_build_component_ref (tree obj, tree field)
1402 tree res;
1404 /* If object is TMR then we do not use component_ref but
1405 add offset instead. We need it to be able to get addr
1406 of the reasult later. */
1407 if (TREE_CODE (obj) == TARGET_MEM_REF)
1409 tree offs = TMR_OFFSET (obj);
1410 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1411 offs, DECL_FIELD_OFFSET (field));
1413 gcc_assert (offs);
1415 res = copy_node (obj);
1416 TREE_TYPE (res) = TREE_TYPE (field);
1417 TMR_OFFSET (res) = offs;
1419 else
1420 res = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL_TREE);
1422 return res;
1425 /* Return ARRAY_REF for array ARR and index IDX with
1426 specified element type ETYPE and element size ESIZE. */
1427 static tree
1428 chkp_build_array_ref (tree arr, tree etype, tree esize,
1429 unsigned HOST_WIDE_INT idx)
1431 tree index = build_int_cst (size_type_node, idx);
1432 tree res;
1434 /* If object is TMR then we do not use array_ref but
1435 add offset instead. We need it to be able to get addr
1436 of the reasult later. */
1437 if (TREE_CODE (arr) == TARGET_MEM_REF)
1439 tree offs = TMR_OFFSET (arr);
1441 esize = fold_binary_to_constant (MULT_EXPR, TREE_TYPE (esize),
1442 esize, index);
1443 gcc_assert(esize);
1445 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1446 offs, esize);
1447 gcc_assert (offs);
1449 res = copy_node (arr);
1450 TREE_TYPE (res) = etype;
1451 TMR_OFFSET (res) = offs;
1453 else
1454 res = build4 (ARRAY_REF, etype, arr, index, NULL_TREE, NULL_TREE);
1456 return res;
1459 /* Helper function for chkp_add_bounds_to_call_stmt.
1460 Fill ALL_BOUNDS output array with created bounds.
1462 OFFS is used for recursive calls and holds basic
1463 offset of TYPE in outer structure in bits.
1465 ITER points a position where bounds are searched.
1467 ALL_BOUNDS[i] is filled with elem bounds if there
1468 is a field in TYPE which has pointer type and offset
1469 equal to i * POINTER_SIZE in bits. */
1470 static void
1471 chkp_find_bounds_for_elem (tree elem, tree *all_bounds,
1472 HOST_WIDE_INT offs,
1473 gimple_stmt_iterator *iter)
1475 tree type = TREE_TYPE (elem);
1477 if (BOUNDED_TYPE_P (type))
1479 if (!all_bounds[offs / POINTER_SIZE])
1481 tree temp = make_temp_ssa_name (type, gimple_build_nop (), "");
1482 gimple assign = gimple_build_assign (temp, elem);
1483 gimple_stmt_iterator gsi;
1485 gsi_insert_before (iter, assign, GSI_SAME_STMT);
1486 gsi = gsi_for_stmt (assign);
1488 all_bounds[offs / POINTER_SIZE] = chkp_find_bounds (temp, &gsi);
1491 else if (RECORD_OR_UNION_TYPE_P (type))
1493 tree field;
1495 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1496 if (TREE_CODE (field) == FIELD_DECL)
1498 tree base = unshare_expr (elem);
1499 tree field_ref = chkp_build_component_ref (base, field);
1500 HOST_WIDE_INT field_offs
1501 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1502 if (DECL_FIELD_OFFSET (field))
1503 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1505 chkp_find_bounds_for_elem (field_ref, all_bounds,
1506 offs + field_offs, iter);
1509 else if (TREE_CODE (type) == ARRAY_TYPE)
1511 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1512 tree etype = TREE_TYPE (type);
1513 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1514 unsigned HOST_WIDE_INT cur;
1516 if (!maxval || integer_minus_onep (maxval))
1517 return;
1519 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1521 tree base = unshare_expr (elem);
1522 tree arr_elem = chkp_build_array_ref (base, etype,
1523 TYPE_SIZE (etype),
1524 cur);
1525 chkp_find_bounds_for_elem (arr_elem, all_bounds, offs + cur * esize,
1526 iter);
1531 /* Fill HAVE_BOUND output bitmap with information about
1532 bounds requred for object of type TYPE.
1534 OFFS is used for recursive calls and holds basic
1535 offset of TYPE in outer structure in bits.
1537 HAVE_BOUND[i] is set to 1 if there is a field
1538 in TYPE which has pointer type and offset
1539 equal to i * POINTER_SIZE - OFFS in bits. */
1540 void
1541 chkp_find_bound_slots_1 (const_tree type, bitmap have_bound,
1542 HOST_WIDE_INT offs)
1544 if (BOUNDED_TYPE_P (type))
1545 bitmap_set_bit (have_bound, offs / POINTER_SIZE);
1546 else if (RECORD_OR_UNION_TYPE_P (type))
1548 tree field;
1550 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1551 if (TREE_CODE (field) == FIELD_DECL)
1553 HOST_WIDE_INT field_offs
1554 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1555 if (DECL_FIELD_OFFSET (field))
1556 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1557 chkp_find_bound_slots_1 (TREE_TYPE (field), have_bound,
1558 offs + field_offs);
1561 else if (TREE_CODE (type) == ARRAY_TYPE)
1563 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1564 tree etype = TREE_TYPE (type);
1565 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1566 unsigned HOST_WIDE_INT cur;
1568 if (!maxval || integer_minus_onep (maxval))
1569 return;
1571 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1572 chkp_find_bound_slots_1 (etype, have_bound, offs + cur * esize);
1576 /* Fill bitmap RES with information about bounds for
1577 type TYPE. See chkp_find_bound_slots_1 for more
1578 details. */
1579 void
1580 chkp_find_bound_slots (const_tree type, bitmap res)
1582 bitmap_clear (res);
1583 chkp_find_bound_slots_1 (type, res, 0);
1586 /* Return 1 if call to FNDECL should be instrumented
1587 and 0 otherwise. */
1589 static bool
1590 chkp_instrument_normal_builtin (tree fndecl)
1592 switch (DECL_FUNCTION_CODE (fndecl))
1594 case BUILT_IN_STRLEN:
1595 case BUILT_IN_STRCPY:
1596 case BUILT_IN_STRNCPY:
1597 case BUILT_IN_STPCPY:
1598 case BUILT_IN_STPNCPY:
1599 case BUILT_IN_STRCAT:
1600 case BUILT_IN_STRNCAT:
1601 case BUILT_IN_MEMCPY:
1602 case BUILT_IN_MEMPCPY:
1603 case BUILT_IN_MEMSET:
1604 case BUILT_IN_MEMMOVE:
1605 case BUILT_IN_BZERO:
1606 case BUILT_IN_STRCMP:
1607 case BUILT_IN_STRNCMP:
1608 case BUILT_IN_BCMP:
1609 case BUILT_IN_MEMCMP:
1610 case BUILT_IN_MEMCPY_CHK:
1611 case BUILT_IN_MEMPCPY_CHK:
1612 case BUILT_IN_MEMMOVE_CHK:
1613 case BUILT_IN_MEMSET_CHK:
1614 case BUILT_IN_STRCPY_CHK:
1615 case BUILT_IN_STRNCPY_CHK:
1616 case BUILT_IN_STPCPY_CHK:
1617 case BUILT_IN_STPNCPY_CHK:
1618 case BUILT_IN_STRCAT_CHK:
1619 case BUILT_IN_STRNCAT_CHK:
1620 case BUILT_IN_MALLOC:
1621 case BUILT_IN_CALLOC:
1622 case BUILT_IN_REALLOC:
1623 return 1;
1625 default:
1626 return 0;
1630 /* Add bound arguments to call statement pointed by GSI.
1631 Also performs a replacement of user checker builtins calls
1632 with internal ones. */
1634 static void
1635 chkp_add_bounds_to_call_stmt (gimple_stmt_iterator *gsi)
1637 gcall *call = as_a <gcall *> (gsi_stmt (*gsi));
1638 unsigned arg_no = 0;
1639 tree fndecl = gimple_call_fndecl (call);
1640 tree fntype;
1641 tree first_formal_arg;
1642 tree arg;
1643 bool use_fntype = false;
1644 tree op;
1645 ssa_op_iter iter;
1646 gcall *new_call;
1648 /* Do nothing for internal functions. */
1649 if (gimple_call_internal_p (call))
1650 return;
1652 fntype = TREE_TYPE (TREE_TYPE (gimple_call_fn (call)));
1654 /* Do nothing if back-end builtin is called. */
1655 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
1656 return;
1658 /* Do nothing for some middle-end builtins. */
1659 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1660 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_OBJECT_SIZE)
1661 return;
1663 /* Do nothing for calls to legacy functions. */
1664 if (fndecl
1665 && lookup_attribute ("bnd_legacy", DECL_ATTRIBUTES (fndecl)))
1666 return;
1668 /* Ignore CHKP_INIT_PTR_BOUNDS, CHKP_NULL_PTR_BOUNDS
1669 and CHKP_COPY_PTR_BOUNDS. */
1670 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1671 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS
1672 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS
1673 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS
1674 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS))
1675 return;
1677 /* Check user builtins are replaced with checks. */
1678 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1679 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1680 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
1681 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS))
1683 chkp_replace_address_check_builtin (gsi, integer_minus_one_node);
1684 return;
1687 /* Check user builtins are replaced with bound extract. */
1688 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1689 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND
1690 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND))
1692 chkp_replace_extract_builtin (gsi);
1693 return;
1696 /* BUILT_IN_CHKP_NARROW_PTR_BOUNDS call is replaced with
1697 target narrow bounds call. */
1698 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1699 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
1701 tree arg = gimple_call_arg (call, 1);
1702 tree bounds = chkp_find_bounds (arg, gsi);
1704 gimple_call_set_fndecl (call, chkp_narrow_bounds_fndecl);
1705 gimple_call_set_arg (call, 1, bounds);
1706 update_stmt (call);
1708 return;
1711 /* BUILT_IN_CHKP_STORE_PTR_BOUNDS call is replaced with
1712 bndstx call. */
1713 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1714 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_STORE_PTR_BOUNDS)
1716 tree addr = gimple_call_arg (call, 0);
1717 tree ptr = gimple_call_arg (call, 1);
1718 tree bounds = chkp_find_bounds (ptr, gsi);
1719 gimple_stmt_iterator iter = gsi_for_stmt (call);
1721 chkp_build_bndstx (addr, ptr, bounds, gsi);
1722 gsi_remove (&iter, true);
1724 return;
1727 if (!flag_chkp_instrument_calls)
1728 return;
1730 /* We instrument only some subset of builtins. We also instrument
1731 builtin calls to be inlined. */
1732 if (fndecl
1733 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1734 && !chkp_instrument_normal_builtin (fndecl))
1736 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
1737 return;
1739 struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
1740 if (!clone
1741 || !gimple_has_body_p (clone->decl))
1742 return;
1745 /* If function decl is available then use it for
1746 formal arguments list. Otherwise use function type. */
1747 if (fndecl && DECL_ARGUMENTS (fndecl))
1748 first_formal_arg = DECL_ARGUMENTS (fndecl);
1749 else
1751 first_formal_arg = TYPE_ARG_TYPES (fntype);
1752 use_fntype = true;
1755 /* Fill vector of new call args. */
1756 vec<tree> new_args = vNULL;
1757 new_args.create (gimple_call_num_args (call));
1758 arg = first_formal_arg;
1759 for (arg_no = 0; arg_no < gimple_call_num_args (call); arg_no++)
1761 tree call_arg = gimple_call_arg (call, arg_no);
1762 tree type;
1764 /* Get arg type using formal argument description
1765 or actual argument type. */
1766 if (arg)
1767 if (use_fntype)
1768 if (TREE_VALUE (arg) != void_type_node)
1770 type = TREE_VALUE (arg);
1771 arg = TREE_CHAIN (arg);
1773 else
1774 type = TREE_TYPE (call_arg);
1775 else
1777 type = TREE_TYPE (arg);
1778 arg = TREE_CHAIN (arg);
1780 else
1781 type = TREE_TYPE (call_arg);
1783 new_args.safe_push (call_arg);
1785 if (BOUNDED_TYPE_P (type)
1786 || pass_by_reference (NULL, TYPE_MODE (type), type, true))
1787 new_args.safe_push (chkp_find_bounds (call_arg, gsi));
1788 else if (chkp_type_has_pointer (type))
1790 HOST_WIDE_INT max_bounds
1791 = TREE_INT_CST_LOW (TYPE_SIZE (type)) / POINTER_SIZE;
1792 tree *all_bounds = (tree *)xmalloc (sizeof (tree) * max_bounds);
1793 HOST_WIDE_INT bnd_no;
1795 memset (all_bounds, 0, sizeof (tree) * max_bounds);
1797 chkp_find_bounds_for_elem (call_arg, all_bounds, 0, gsi);
1799 for (bnd_no = 0; bnd_no < max_bounds; bnd_no++)
1800 if (all_bounds[bnd_no])
1801 new_args.safe_push (all_bounds[bnd_no]);
1803 free (all_bounds);
1807 if (new_args.length () == gimple_call_num_args (call))
1808 new_call = call;
1809 else
1811 new_call = gimple_build_call_vec (gimple_op (call, 1), new_args);
1812 gimple_call_set_lhs (new_call, gimple_call_lhs (call));
1813 gimple_call_copy_flags (new_call, call);
1815 new_args.release ();
1817 /* For direct calls fndecl is replaced with instrumented version. */
1818 if (fndecl)
1820 tree new_decl = chkp_maybe_create_clone (fndecl)->decl;
1821 gimple_call_set_fndecl (new_call, new_decl);
1822 gimple_call_set_fntype (new_call, TREE_TYPE (new_decl));
1824 /* For indirect call we should fix function pointer type if
1825 pass some bounds. */
1826 else if (new_call != call)
1828 tree type = gimple_call_fntype (call);
1829 type = chkp_copy_function_type_adding_bounds (type);
1830 gimple_call_set_fntype (new_call, type);
1833 /* replace old call statement with the new one. */
1834 if (call != new_call)
1836 FOR_EACH_SSA_TREE_OPERAND (op, call, iter, SSA_OP_ALL_DEFS)
1838 SSA_NAME_DEF_STMT (op) = new_call;
1840 gsi_replace (gsi, new_call, true);
1842 else
1843 update_stmt (new_call);
1845 gimple_call_set_with_bounds (new_call, true);
1848 /* Return constant static bounds var with specified LB and UB
1849 if such var exists in varpool. Return NULL otherwise. */
1850 static tree
1851 chkp_find_const_bounds_var (HOST_WIDE_INT lb,
1852 HOST_WIDE_INT ub)
1854 tree val = targetm.chkp_make_bounds_constant (lb, ub);
1855 struct varpool_node *node;
1857 /* We expect bounds constant is represented as a complex value
1858 of two pointer sized integers. */
1859 gcc_assert (TREE_CODE (val) == COMPLEX_CST);
1861 FOR_EACH_VARIABLE (node)
1862 if (POINTER_BOUNDS_P (node->decl)
1863 && TREE_READONLY (node->decl)
1864 && DECL_INITIAL (node->decl)
1865 && TREE_CODE (DECL_INITIAL (node->decl)) == COMPLEX_CST
1866 && tree_int_cst_equal (TREE_REALPART (DECL_INITIAL (node->decl)),
1867 TREE_REALPART (val))
1868 && tree_int_cst_equal (TREE_IMAGPART (DECL_INITIAL (node->decl)),
1869 TREE_IMAGPART (val)))
1870 return node->decl;
1872 return NULL;
1875 /* Return constant static bounds var with specified bounds LB and UB.
1876 If such var does not exists then new var is created with specified NAME. */
1877 static tree
1878 chkp_make_static_const_bounds (HOST_WIDE_INT lb,
1879 HOST_WIDE_INT ub,
1880 const char *name)
1882 tree var;
1884 /* With LTO we may have constant bounds already in varpool.
1885 Try to find it. */
1886 var = chkp_find_const_bounds_var (lb, ub);
1888 if (var)
1889 return var;
1891 var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
1892 get_identifier (name), pointer_bounds_type_node);
1894 TREE_PUBLIC (var) = 1;
1895 TREE_USED (var) = 1;
1896 TREE_READONLY (var) = 1;
1897 TREE_STATIC (var) = 1;
1898 TREE_ADDRESSABLE (var) = 0;
1899 DECL_ARTIFICIAL (var) = 1;
1900 DECL_READ_P (var) = 1;
1901 /* We may use this symbol during ctors generation in chkp_finish_file
1902 when all symbols are emitted. Force output to avoid undefined
1903 symbols in ctors. */
1904 if (!in_lto_p)
1906 DECL_INITIAL (var) = targetm.chkp_make_bounds_constant (lb, ub);
1907 DECL_COMDAT (var) = 1;
1908 varpool_node::get_create (var)->set_comdat_group (DECL_ASSEMBLER_NAME (var));
1909 varpool_node::get_create (var)->force_output = 1;
1911 else
1912 DECL_EXTERNAL (var) = 1;
1913 varpool_node::finalize_decl (var);
1915 return var;
1918 /* Generate code to make bounds with specified lower bound LB and SIZE.
1919 if AFTER is 1 then code is inserted after position pointed by ITER
1920 otherwise code is inserted before position pointed by ITER.
1921 If ITER is NULL then code is added to entry block. */
1922 static tree
1923 chkp_make_bounds (tree lb, tree size, gimple_stmt_iterator *iter, bool after)
1925 gimple_seq seq;
1926 gimple_stmt_iterator gsi;
1927 gimple stmt;
1928 tree bounds;
1930 if (iter)
1931 gsi = *iter;
1932 else
1933 gsi = gsi_start_bb (chkp_get_entry_block ());
1935 seq = NULL;
1937 lb = chkp_force_gimple_call_op (lb, &seq);
1938 size = chkp_force_gimple_call_op (size, &seq);
1940 stmt = gimple_build_call (chkp_bndmk_fndecl, 2, lb, size);
1941 chkp_mark_stmt (stmt);
1943 bounds = chkp_get_tmp_reg (stmt);
1944 gimple_call_set_lhs (stmt, bounds);
1946 gimple_seq_add_stmt (&seq, stmt);
1948 if (iter && after)
1949 gsi_insert_seq_after (&gsi, seq, GSI_SAME_STMT);
1950 else
1951 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
1953 if (dump_file && (dump_flags & TDF_DETAILS))
1955 fprintf (dump_file, "Made bounds: ");
1956 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
1957 if (iter)
1959 fprintf (dump_file, " inserted before statement: ");
1960 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0, TDF_VOPS|TDF_MEMSYMS);
1962 else
1963 fprintf (dump_file, " at function entry\n");
1966 /* update_stmt (stmt); */
1968 return bounds;
1971 /* Return var holding zero bounds. */
1972 tree
1973 chkp_get_zero_bounds_var (void)
1975 if (!chkp_zero_bounds_var)
1977 tree id = get_identifier (CHKP_ZERO_BOUNDS_VAR_NAME);
1978 symtab_node *node = symtab_node::get_for_asmname (id);
1979 if (node)
1980 chkp_zero_bounds_var = node->decl;
1983 if (!chkp_zero_bounds_var)
1984 chkp_zero_bounds_var
1985 = chkp_make_static_const_bounds (0, -1,
1986 CHKP_ZERO_BOUNDS_VAR_NAME);
1987 return chkp_zero_bounds_var;
1990 /* Return var holding none bounds. */
1991 tree
1992 chkp_get_none_bounds_var (void)
1994 if (!chkp_none_bounds_var)
1996 tree id = get_identifier (CHKP_NONE_BOUNDS_VAR_NAME);
1997 symtab_node *node = symtab_node::get_for_asmname (id);
1998 if (node)
1999 chkp_none_bounds_var = node->decl;
2002 if (!chkp_none_bounds_var)
2003 chkp_none_bounds_var
2004 = chkp_make_static_const_bounds (-1, 0,
2005 CHKP_NONE_BOUNDS_VAR_NAME);
2006 return chkp_none_bounds_var;
2009 /* Return SSA_NAME used to represent zero bounds. */
2010 static tree
2011 chkp_get_zero_bounds (void)
2013 if (zero_bounds)
2014 return zero_bounds;
2016 if (dump_file && (dump_flags & TDF_DETAILS))
2017 fprintf (dump_file, "Creating zero bounds...");
2019 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2020 || flag_chkp_use_static_const_bounds > 0)
2022 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2023 gimple stmt;
2025 zero_bounds = chkp_get_tmp_reg (gimple_build_nop ());
2026 stmt = gimple_build_assign (zero_bounds, chkp_get_zero_bounds_var ());
2027 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2029 else
2030 zero_bounds = chkp_make_bounds (integer_zero_node,
2031 integer_zero_node,
2032 NULL,
2033 false);
2035 return zero_bounds;
2038 /* Return SSA_NAME used to represent none bounds. */
2039 static tree
2040 chkp_get_none_bounds (void)
2042 if (none_bounds)
2043 return none_bounds;
2045 if (dump_file && (dump_flags & TDF_DETAILS))
2046 fprintf (dump_file, "Creating none bounds...");
2049 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2050 || flag_chkp_use_static_const_bounds > 0)
2052 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2053 gimple stmt;
2055 none_bounds = chkp_get_tmp_reg (gimple_build_nop ());
2056 stmt = gimple_build_assign (none_bounds, chkp_get_none_bounds_var ());
2057 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2059 else
2060 none_bounds = chkp_make_bounds (integer_minus_one_node,
2061 build_int_cst (size_type_node, 2),
2062 NULL,
2063 false);
2065 return none_bounds;
2068 /* Return bounds to be used as a result of operation which
2069 should not create poiunter (e.g. MULT_EXPR). */
2070 static tree
2071 chkp_get_invalid_op_bounds (void)
2073 return chkp_get_zero_bounds ();
2076 /* Return bounds to be used for loads of non-pointer values. */
2077 static tree
2078 chkp_get_nonpointer_load_bounds (void)
2080 return chkp_get_zero_bounds ();
2083 /* Build bounds returned by CALL. */
2084 static tree
2085 chkp_build_returned_bound (gcall *call)
2087 gimple_stmt_iterator gsi;
2088 tree bounds;
2089 gimple stmt;
2090 tree fndecl = gimple_call_fndecl (call);
2092 /* To avoid fixing alloca expands in targets we handle
2093 it separately. */
2094 if (fndecl
2095 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2096 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
2097 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2099 tree size = gimple_call_arg (call, 0);
2100 tree lb = gimple_call_lhs (call);
2101 gimple_stmt_iterator iter = gsi_for_stmt (call);
2102 bounds = chkp_make_bounds (lb, size, &iter, true);
2104 /* We know bounds returned by set_bounds builtin call. */
2105 else if (fndecl
2106 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2107 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS)
2109 tree lb = gimple_call_arg (call, 0);
2110 tree size = gimple_call_arg (call, 1);
2111 gimple_stmt_iterator iter = gsi_for_stmt (call);
2112 bounds = chkp_make_bounds (lb, size, &iter, true);
2114 /* Detect bounds initialization calls. */
2115 else if (fndecl
2116 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2117 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS)
2118 bounds = chkp_get_zero_bounds ();
2119 /* Detect bounds nullification calls. */
2120 else if (fndecl
2121 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2122 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS)
2123 bounds = chkp_get_none_bounds ();
2124 /* Detect bounds copy calls. */
2125 else if (fndecl
2126 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2127 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
2129 gimple_stmt_iterator iter = gsi_for_stmt (call);
2130 bounds = chkp_find_bounds (gimple_call_arg (call, 1), &iter);
2132 /* Do not use retbnd when returned bounds are equal to some
2133 of passed bounds. */
2134 else if ((gimple_call_return_flags (call) & ERF_RETURNS_ARG)
2135 || gimple_call_builtin_p (call, BUILT_IN_STRCHR))
2137 gimple_stmt_iterator iter = gsi_for_stmt (call);
2138 unsigned int retarg = 0, argno;
2139 if (gimple_call_return_flags (call) & ERF_RETURNS_ARG)
2140 retarg = gimple_call_return_flags (call) & ERF_RETURN_ARG_MASK;
2141 if (gimple_call_with_bounds_p (call))
2143 for (argno = 0; argno < gimple_call_num_args (call); argno++)
2144 if (!POINTER_BOUNDS_P (gimple_call_arg (call, argno)))
2146 if (retarg)
2147 retarg--;
2148 else
2149 break;
2152 else
2153 argno = retarg;
2155 bounds = chkp_find_bounds (gimple_call_arg (call, argno), &iter);
2157 else
2159 gcc_assert (TREE_CODE (gimple_call_lhs (call)) == SSA_NAME);
2161 /* In general case build checker builtin call to
2162 obtain returned bounds. */
2163 stmt = gimple_build_call (chkp_ret_bnd_fndecl, 1,
2164 gimple_call_lhs (call));
2165 chkp_mark_stmt (stmt);
2167 gsi = gsi_for_stmt (call);
2168 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
2170 bounds = chkp_get_tmp_reg (stmt);
2171 gimple_call_set_lhs (stmt, bounds);
2173 update_stmt (stmt);
2176 if (dump_file && (dump_flags & TDF_DETAILS))
2178 fprintf (dump_file, "Built returned bounds (");
2179 print_generic_expr (dump_file, bounds, 0);
2180 fprintf (dump_file, ") for call: ");
2181 print_gimple_stmt (dump_file, call, 0, TDF_VOPS|TDF_MEMSYMS);
2184 bounds = chkp_maybe_copy_and_register_bounds (gimple_call_lhs (call), bounds);
2186 return bounds;
2189 /* Return bounds used as returned by call
2190 which produced SSA name VAL. */
2191 gcall *
2192 chkp_retbnd_call_by_val (tree val)
2194 if (TREE_CODE (val) != SSA_NAME)
2195 return NULL;
2197 gcc_assert (gimple_code (SSA_NAME_DEF_STMT (val)) == GIMPLE_CALL);
2199 imm_use_iterator use_iter;
2200 use_operand_p use_p;
2201 FOR_EACH_IMM_USE_FAST (use_p, use_iter, val)
2202 if (gimple_code (USE_STMT (use_p)) == GIMPLE_CALL
2203 && gimple_call_fndecl (USE_STMT (use_p)) == chkp_ret_bnd_fndecl)
2204 return as_a <gcall *> (USE_STMT (use_p));
2206 return NULL;
2209 /* Check the next parameter for the given PARM is bounds
2210 and return it's default SSA_NAME (create if required). */
2211 static tree
2212 chkp_get_next_bounds_parm (tree parm)
2214 tree bounds = TREE_CHAIN (parm);
2215 gcc_assert (POINTER_BOUNDS_P (bounds));
2216 bounds = ssa_default_def (cfun, bounds);
2217 if (!bounds)
2219 bounds = make_ssa_name (TREE_CHAIN (parm), gimple_build_nop ());
2220 set_ssa_default_def (cfun, TREE_CHAIN (parm), bounds);
2222 return bounds;
2225 /* Return bounds to be used for input argument PARM. */
2226 static tree
2227 chkp_get_bound_for_parm (tree parm)
2229 tree decl = SSA_NAME_VAR (parm);
2230 tree bounds;
2232 gcc_assert (TREE_CODE (decl) == PARM_DECL);
2234 bounds = chkp_get_registered_bounds (parm);
2236 if (!bounds)
2237 bounds = chkp_get_registered_bounds (decl);
2239 if (!bounds)
2241 tree orig_decl = cgraph_node::get (cfun->decl)->orig_decl;
2243 /* For static chain param we return zero bounds
2244 because currently we do not check dereferences
2245 of this pointer. */
2246 if (cfun->static_chain_decl == decl)
2247 bounds = chkp_get_zero_bounds ();
2248 /* If non instrumented runtime is used then it may be useful
2249 to use zero bounds for input arguments of main
2250 function. */
2251 else if (flag_chkp_zero_input_bounds_for_main
2252 && strcmp (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (orig_decl)),
2253 "main") == 0)
2254 bounds = chkp_get_zero_bounds ();
2255 else if (BOUNDED_P (parm))
2257 bounds = chkp_get_next_bounds_parm (decl);
2258 bounds = chkp_maybe_copy_and_register_bounds (decl, bounds);
2260 if (dump_file && (dump_flags & TDF_DETAILS))
2262 fprintf (dump_file, "Built arg bounds (");
2263 print_generic_expr (dump_file, bounds, 0);
2264 fprintf (dump_file, ") for arg: ");
2265 print_node (dump_file, "", decl, 0);
2268 else
2269 bounds = chkp_get_zero_bounds ();
2272 if (!chkp_get_registered_bounds (parm))
2273 bounds = chkp_maybe_copy_and_register_bounds (parm, bounds);
2275 if (dump_file && (dump_flags & TDF_DETAILS))
2277 fprintf (dump_file, "Using bounds ");
2278 print_generic_expr (dump_file, bounds, 0);
2279 fprintf (dump_file, " for parm ");
2280 print_generic_expr (dump_file, parm, 0);
2281 fprintf (dump_file, " of type ");
2282 print_generic_expr (dump_file, TREE_TYPE (parm), 0);
2283 fprintf (dump_file, ".\n");
2286 return bounds;
2289 /* Build and return CALL_EXPR for bndstx builtin with specified
2290 arguments. */
2291 tree
2292 chkp_build_bndldx_call (tree addr, tree ptr)
2294 tree fn = build1 (ADDR_EXPR,
2295 build_pointer_type (TREE_TYPE (chkp_bndldx_fndecl)),
2296 chkp_bndldx_fndecl);
2297 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndldx_fndecl)),
2298 fn, 2, addr, ptr);
2299 CALL_WITH_BOUNDS_P (call) = true;
2300 return call;
2303 /* Insert code to load bounds for PTR located by ADDR.
2304 Code is inserted after position pointed by GSI.
2305 Loaded bounds are returned. */
2306 static tree
2307 chkp_build_bndldx (tree addr, tree ptr, gimple_stmt_iterator *gsi)
2309 gimple_seq seq;
2310 gimple stmt;
2311 tree bounds;
2313 seq = NULL;
2315 addr = chkp_force_gimple_call_op (addr, &seq);
2316 ptr = chkp_force_gimple_call_op (ptr, &seq);
2318 stmt = gimple_build_call (chkp_bndldx_fndecl, 2, addr, ptr);
2319 chkp_mark_stmt (stmt);
2320 bounds = chkp_get_tmp_reg (stmt);
2321 gimple_call_set_lhs (stmt, bounds);
2323 gimple_seq_add_stmt (&seq, stmt);
2325 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2327 if (dump_file && (dump_flags & TDF_DETAILS))
2329 fprintf (dump_file, "Generated bndldx for pointer ");
2330 print_generic_expr (dump_file, ptr, 0);
2331 fprintf (dump_file, ": ");
2332 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2335 return bounds;
2338 /* Build and return CALL_EXPR for bndstx builtin with specified
2339 arguments. */
2340 tree
2341 chkp_build_bndstx_call (tree addr, tree ptr, tree bounds)
2343 tree fn = build1 (ADDR_EXPR,
2344 build_pointer_type (TREE_TYPE (chkp_bndstx_fndecl)),
2345 chkp_bndstx_fndecl);
2346 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndstx_fndecl)),
2347 fn, 3, ptr, bounds, addr);
2348 CALL_WITH_BOUNDS_P (call) = true;
2349 return call;
2352 /* Insert code to store BOUNDS for PTR stored by ADDR.
2353 New statements are inserted after position pointed
2354 by GSI. */
2355 void
2356 chkp_build_bndstx (tree addr, tree ptr, tree bounds,
2357 gimple_stmt_iterator *gsi)
2359 gimple_seq seq;
2360 gimple stmt;
2362 seq = NULL;
2364 addr = chkp_force_gimple_call_op (addr, &seq);
2365 ptr = chkp_force_gimple_call_op (ptr, &seq);
2367 stmt = gimple_build_call (chkp_bndstx_fndecl, 3, ptr, bounds, addr);
2368 chkp_mark_stmt (stmt);
2369 gimple_call_set_with_bounds (stmt, true);
2371 gimple_seq_add_stmt (&seq, stmt);
2373 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2375 if (dump_file && (dump_flags & TDF_DETAILS))
2377 fprintf (dump_file, "Generated bndstx for pointer store ");
2378 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_VOPS|TDF_MEMSYMS);
2379 print_gimple_stmt (dump_file, stmt, 2, TDF_VOPS|TDF_MEMSYMS);
2383 /* Compute bounds for pointer NODE which was assigned in
2384 assignment statement ASSIGN. Return computed bounds. */
2385 static tree
2386 chkp_compute_bounds_for_assignment (tree node, gimple assign)
2388 enum tree_code rhs_code = gimple_assign_rhs_code (assign);
2389 tree rhs1 = gimple_assign_rhs1 (assign);
2390 tree bounds = NULL_TREE;
2391 gimple_stmt_iterator iter = gsi_for_stmt (assign);
2393 if (dump_file && (dump_flags & TDF_DETAILS))
2395 fprintf (dump_file, "Computing bounds for assignment: ");
2396 print_gimple_stmt (dump_file, assign, 0, TDF_VOPS|TDF_MEMSYMS);
2399 switch (rhs_code)
2401 case MEM_REF:
2402 case TARGET_MEM_REF:
2403 case COMPONENT_REF:
2404 case ARRAY_REF:
2405 /* We need to load bounds from the bounds table. */
2406 bounds = chkp_find_bounds_loaded (node, rhs1, &iter);
2407 break;
2409 case VAR_DECL:
2410 case SSA_NAME:
2411 case ADDR_EXPR:
2412 case POINTER_PLUS_EXPR:
2413 case NOP_EXPR:
2414 case CONVERT_EXPR:
2415 case INTEGER_CST:
2416 /* Bounds are just propagated from RHS. */
2417 bounds = chkp_find_bounds (rhs1, &iter);
2418 break;
2420 case VIEW_CONVERT_EXPR:
2421 /* Bounds are just propagated from RHS. */
2422 bounds = chkp_find_bounds (TREE_OPERAND (rhs1, 0), &iter);
2423 break;
2425 case PARM_DECL:
2426 if (BOUNDED_P (rhs1))
2428 /* We need to load bounds from the bounds table. */
2429 bounds = chkp_build_bndldx (chkp_build_addr_expr (rhs1),
2430 node, &iter);
2431 TREE_ADDRESSABLE (rhs1) = 1;
2433 else
2434 bounds = chkp_get_nonpointer_load_bounds ();
2435 break;
2437 case MINUS_EXPR:
2438 case PLUS_EXPR:
2439 case BIT_AND_EXPR:
2440 case BIT_IOR_EXPR:
2441 case BIT_XOR_EXPR:
2443 tree rhs2 = gimple_assign_rhs2 (assign);
2444 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2445 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2447 /* First we try to check types of operands. If it
2448 does not help then look at bound values.
2450 If some bounds are incomplete and other are
2451 not proven to be valid (i.e. also incomplete
2452 or invalid because value is not pointer) then
2453 resulting value is incomplete and will be
2454 recomputed later in chkp_finish_incomplete_bounds. */
2455 if (BOUNDED_P (rhs1)
2456 && !BOUNDED_P (rhs2))
2457 bounds = bnd1;
2458 else if (BOUNDED_P (rhs2)
2459 && !BOUNDED_P (rhs1)
2460 && rhs_code != MINUS_EXPR)
2461 bounds = bnd2;
2462 else if (chkp_incomplete_bounds (bnd1))
2463 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR
2464 && !chkp_incomplete_bounds (bnd2))
2465 bounds = bnd2;
2466 else
2467 bounds = incomplete_bounds;
2468 else if (chkp_incomplete_bounds (bnd2))
2469 if (chkp_valid_bounds (bnd1)
2470 && !chkp_incomplete_bounds (bnd1))
2471 bounds = bnd1;
2472 else
2473 bounds = incomplete_bounds;
2474 else if (!chkp_valid_bounds (bnd1))
2475 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR)
2476 bounds = bnd2;
2477 else if (bnd2 == chkp_get_zero_bounds ())
2478 bounds = bnd2;
2479 else
2480 bounds = bnd1;
2481 else if (!chkp_valid_bounds (bnd2))
2482 bounds = bnd1;
2483 else
2484 /* Seems both operands may have valid bounds
2485 (e.g. pointer minus pointer). In such case
2486 use default invalid op bounds. */
2487 bounds = chkp_get_invalid_op_bounds ();
2489 break;
2491 case BIT_NOT_EXPR:
2492 case NEGATE_EXPR:
2493 case LSHIFT_EXPR:
2494 case RSHIFT_EXPR:
2495 case LROTATE_EXPR:
2496 case RROTATE_EXPR:
2497 case EQ_EXPR:
2498 case NE_EXPR:
2499 case LT_EXPR:
2500 case LE_EXPR:
2501 case GT_EXPR:
2502 case GE_EXPR:
2503 case MULT_EXPR:
2504 case RDIV_EXPR:
2505 case TRUNC_DIV_EXPR:
2506 case FLOOR_DIV_EXPR:
2507 case CEIL_DIV_EXPR:
2508 case ROUND_DIV_EXPR:
2509 case TRUNC_MOD_EXPR:
2510 case FLOOR_MOD_EXPR:
2511 case CEIL_MOD_EXPR:
2512 case ROUND_MOD_EXPR:
2513 case EXACT_DIV_EXPR:
2514 case FIX_TRUNC_EXPR:
2515 case FLOAT_EXPR:
2516 case REALPART_EXPR:
2517 case IMAGPART_EXPR:
2518 /* No valid bounds may be produced by these exprs. */
2519 bounds = chkp_get_invalid_op_bounds ();
2520 break;
2522 case COND_EXPR:
2524 tree val1 = gimple_assign_rhs2 (assign);
2525 tree val2 = gimple_assign_rhs3 (assign);
2526 tree bnd1 = chkp_find_bounds (val1, &iter);
2527 tree bnd2 = chkp_find_bounds (val2, &iter);
2528 gimple stmt;
2530 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2531 bounds = incomplete_bounds;
2532 else if (bnd1 == bnd2)
2533 bounds = bnd1;
2534 else
2536 rhs1 = unshare_expr (rhs1);
2538 bounds = chkp_get_tmp_reg (assign);
2539 stmt = gimple_build_assign_with_ops (COND_EXPR, bounds,
2540 rhs1, bnd1, bnd2);
2541 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2543 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2544 chkp_mark_invalid_bounds (bounds);
2547 break;
2549 case MAX_EXPR:
2550 case MIN_EXPR:
2552 tree rhs2 = gimple_assign_rhs2 (assign);
2553 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2554 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2556 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2557 bounds = incomplete_bounds;
2558 else if (bnd1 == bnd2)
2559 bounds = bnd1;
2560 else
2562 gimple stmt;
2563 tree cond = build2 (rhs_code == MAX_EXPR ? GT_EXPR : LT_EXPR,
2564 boolean_type_node, rhs1, rhs2);
2565 bounds = chkp_get_tmp_reg (assign);
2566 stmt = gimple_build_assign_with_ops (COND_EXPR, bounds,
2567 cond, bnd1, bnd2);
2569 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2571 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2572 chkp_mark_invalid_bounds (bounds);
2575 break;
2577 default:
2578 bounds = chkp_get_zero_bounds ();
2579 warning (0, "pointer bounds were lost due to unexpected expression %s",
2580 get_tree_code_name (rhs_code));
2583 gcc_assert (bounds);
2585 if (node)
2586 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2588 return bounds;
2591 /* Compute bounds for ssa name NODE defined by DEF_STMT pointed by ITER.
2593 There are just few statement codes allowed: NOP (for default ssa names),
2594 ASSIGN, CALL, PHI, ASM.
2596 Return computed bounds. */
2597 static tree
2598 chkp_get_bounds_by_definition (tree node, gimple def_stmt,
2599 gphi_iterator *iter)
2601 tree var, bounds;
2602 enum gimple_code code = gimple_code (def_stmt);
2603 gphi *stmt;
2605 if (dump_file && (dump_flags & TDF_DETAILS))
2607 fprintf (dump_file, "Searching for bounds for node: ");
2608 print_generic_expr (dump_file, node, 0);
2610 fprintf (dump_file, " using its definition: ");
2611 print_gimple_stmt (dump_file, def_stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2614 switch (code)
2616 case GIMPLE_NOP:
2617 var = SSA_NAME_VAR (node);
2618 switch (TREE_CODE (var))
2620 case PARM_DECL:
2621 bounds = chkp_get_bound_for_parm (node);
2622 break;
2624 case VAR_DECL:
2625 /* For uninitialized pointers use none bounds. */
2626 bounds = chkp_get_none_bounds ();
2627 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2628 break;
2630 case RESULT_DECL:
2632 tree base_type;
2634 gcc_assert (TREE_CODE (TREE_TYPE (node)) == REFERENCE_TYPE);
2636 base_type = TREE_TYPE (TREE_TYPE (node));
2638 gcc_assert (TYPE_SIZE (base_type)
2639 && TREE_CODE (TYPE_SIZE (base_type)) == INTEGER_CST
2640 && tree_to_uhwi (TYPE_SIZE (base_type)) != 0);
2642 bounds = chkp_make_bounds (node, TYPE_SIZE_UNIT (base_type),
2643 NULL, false);
2644 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2646 break;
2648 default:
2649 if (dump_file && (dump_flags & TDF_DETAILS))
2651 fprintf (dump_file, "Unexpected var with no definition\n");
2652 print_generic_expr (dump_file, var, 0);
2654 internal_error ("chkp_get_bounds_by_definition: Unexpected var of type %s",
2655 get_tree_code_name (TREE_CODE (var)));
2657 break;
2659 case GIMPLE_ASSIGN:
2660 bounds = chkp_compute_bounds_for_assignment (node, def_stmt);
2661 break;
2663 case GIMPLE_CALL:
2664 bounds = chkp_build_returned_bound (as_a <gcall *> (def_stmt));
2665 break;
2667 case GIMPLE_PHI:
2668 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (node))
2669 if (SSA_NAME_VAR (node))
2670 var = chkp_get_bounds_var (SSA_NAME_VAR (node));
2671 else
2672 var = make_temp_ssa_name (pointer_bounds_type_node,
2673 gimple_build_nop (),
2674 CHKP_BOUND_TMP_NAME);
2675 else
2676 var = chkp_get_tmp_var ();
2677 stmt = create_phi_node (var, gimple_bb (def_stmt));
2678 bounds = gimple_phi_result (stmt);
2679 *iter = gsi_for_phi (stmt);
2681 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2683 /* Created bounds do not have all phi args computed and
2684 therefore we do not know if there is a valid source
2685 of bounds for that node. Therefore we mark bounds
2686 as incomplete and then recompute them when all phi
2687 args are computed. */
2688 chkp_register_incomplete_bounds (bounds, node);
2689 break;
2691 case GIMPLE_ASM:
2692 bounds = chkp_get_zero_bounds ();
2693 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2694 break;
2696 default:
2697 internal_error ("chkp_get_bounds_by_definition: Unexpected GIMPLE code %s",
2698 gimple_code_name[code]);
2701 return bounds;
2704 /* Return CALL_EXPR for bndmk with specified LOWER_BOUND and SIZE. */
2705 tree
2706 chkp_build_make_bounds_call (tree lower_bound, tree size)
2708 tree call = build1 (ADDR_EXPR,
2709 build_pointer_type (TREE_TYPE (chkp_bndmk_fndecl)),
2710 chkp_bndmk_fndecl);
2711 return build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndmk_fndecl)),
2712 call, 2, lower_bound, size);
2715 /* Create static bounds var of specfified OBJ which is
2716 is either VAR_DECL or string constant. */
2717 static tree
2718 chkp_make_static_bounds (tree obj)
2720 static int string_id = 1;
2721 static int var_id = 1;
2722 tree *slot;
2723 const char *var_name;
2724 char *bnd_var_name;
2725 tree bnd_var;
2727 /* First check if we already have required var. */
2728 if (chkp_static_var_bounds)
2730 slot = chkp_static_var_bounds->get (obj);
2731 if (slot)
2732 return *slot;
2735 /* Build decl for bounds var. */
2736 if (TREE_CODE (obj) == VAR_DECL)
2738 if (DECL_IGNORED_P (obj))
2740 bnd_var_name = (char *) xmalloc (strlen (CHKP_VAR_BOUNDS_PREFIX) + 10);
2741 sprintf (bnd_var_name, "%s%d", CHKP_VAR_BOUNDS_PREFIX, var_id++);
2743 else
2745 var_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (obj));
2747 /* For hidden symbols we want to skip first '*' char. */
2748 if (*var_name == '*')
2749 var_name++;
2751 bnd_var_name = (char *) xmalloc (strlen (var_name)
2752 + strlen (CHKP_BOUNDS_OF_SYMBOL_PREFIX) + 1);
2753 strcpy (bnd_var_name, CHKP_BOUNDS_OF_SYMBOL_PREFIX);
2754 strcat (bnd_var_name, var_name);
2757 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2758 get_identifier (bnd_var_name),
2759 pointer_bounds_type_node);
2761 /* Address of the obj will be used as lower bound. */
2762 TREE_ADDRESSABLE (obj) = 1;
2764 else
2766 bnd_var_name = (char *) xmalloc (strlen (CHKP_STRING_BOUNDS_PREFIX) + 10);
2767 sprintf (bnd_var_name, "%s%d", CHKP_STRING_BOUNDS_PREFIX, string_id++);
2769 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2770 get_identifier (bnd_var_name),
2771 pointer_bounds_type_node);
2774 TREE_PUBLIC (bnd_var) = 0;
2775 TREE_USED (bnd_var) = 1;
2776 TREE_READONLY (bnd_var) = 0;
2777 TREE_STATIC (bnd_var) = 1;
2778 TREE_ADDRESSABLE (bnd_var) = 0;
2779 DECL_ARTIFICIAL (bnd_var) = 1;
2780 DECL_COMMON (bnd_var) = 1;
2781 DECL_COMDAT (bnd_var) = 1;
2782 DECL_READ_P (bnd_var) = 1;
2783 DECL_INITIAL (bnd_var) = chkp_build_addr_expr (obj);
2784 /* Force output similar to constant bounds.
2785 See chkp_make_static_const_bounds. */
2786 varpool_node::get_create (bnd_var)->force_output = 1;
2787 /* Mark symbol as requiring bounds initialization. */
2788 varpool_node::get_create (bnd_var)->need_bounds_init = 1;
2789 varpool_node::finalize_decl (bnd_var);
2791 /* Add created var to the map to use it for other references
2792 to obj. */
2793 if (!chkp_static_var_bounds)
2794 chkp_static_var_bounds = new hash_map<tree, tree>;
2796 chkp_static_var_bounds->put (obj, bnd_var);
2798 return bnd_var;
2801 /* When var has incomplete type we cannot get size to
2802 compute its bounds. In such cases we use checker
2803 builtin call which determines object size at runtime. */
2804 static tree
2805 chkp_generate_extern_var_bounds (tree var)
2807 tree bounds, size_reloc, lb, size, max_size, cond;
2808 gimple_stmt_iterator gsi;
2809 gimple_seq seq = NULL;
2810 gimple stmt;
2812 /* If instrumentation is not enabled for vars having
2813 incomplete type then just return zero bounds to avoid
2814 checks for this var. */
2815 if (!flag_chkp_incomplete_type)
2816 return chkp_get_zero_bounds ();
2818 if (dump_file && (dump_flags & TDF_DETAILS))
2820 fprintf (dump_file, "Generating bounds for extern symbol '");
2821 print_generic_expr (dump_file, var, 0);
2822 fprintf (dump_file, "'\n");
2825 stmt = gimple_build_call (chkp_sizeof_fndecl, 1, var);
2827 size_reloc = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
2828 gimple_call_set_lhs (stmt, size_reloc);
2830 gimple_seq_add_stmt (&seq, stmt);
2832 lb = chkp_build_addr_expr (var);
2833 size = make_ssa_name (chkp_get_size_tmp_var (), gimple_build_nop ());
2835 if (flag_chkp_zero_dynamic_size_as_infinite)
2837 /* We should check that size relocation was resolved.
2838 If it was not then use maximum possible size for the var. */
2839 max_size = build2 (MINUS_EXPR, chkp_uintptr_type, integer_zero_node,
2840 fold_convert (chkp_uintptr_type, lb));
2841 max_size = chkp_force_gimple_call_op (max_size, &seq);
2843 cond = build2 (NE_EXPR, boolean_type_node, size_reloc, integer_zero_node);
2844 stmt = gimple_build_assign_with_ops (COND_EXPR, size,
2845 cond, size_reloc, max_size);
2846 gimple_seq_add_stmt (&seq, stmt);
2848 else
2850 stmt = gimple_build_assign (size, size_reloc);
2851 gimple_seq_add_stmt (&seq, stmt);
2854 gsi = gsi_start_bb (chkp_get_entry_block ());
2855 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
2857 bounds = chkp_make_bounds (lb, size, &gsi, true);
2859 return bounds;
2862 /* Return 1 if TYPE has fields with zero size or fields
2863 marked with chkp_variable_size attribute. */
2864 bool
2865 chkp_variable_size_type (tree type)
2867 bool res = false;
2868 tree field;
2870 if (RECORD_OR_UNION_TYPE_P (type))
2871 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2873 if (TREE_CODE (field) == FIELD_DECL)
2874 res = res
2875 || lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
2876 || chkp_variable_size_type (TREE_TYPE (field));
2878 else
2879 res = !TYPE_SIZE (type)
2880 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
2881 || tree_to_uhwi (TYPE_SIZE (type)) == 0;
2883 return res;
2886 /* Compute and return bounds for address of DECL which is
2887 one of VAR_DECL, PARM_DECL, RESULT_DECL. */
2888 static tree
2889 chkp_get_bounds_for_decl_addr (tree decl)
2891 tree bounds;
2893 gcc_assert (TREE_CODE (decl) == VAR_DECL
2894 || TREE_CODE (decl) == PARM_DECL
2895 || TREE_CODE (decl) == RESULT_DECL);
2897 bounds = chkp_get_registered_addr_bounds (decl);
2899 if (bounds)
2900 return bounds;
2902 if (dump_file && (dump_flags & TDF_DETAILS))
2904 fprintf (dump_file, "Building bounds for address of decl ");
2905 print_generic_expr (dump_file, decl, 0);
2906 fprintf (dump_file, "\n");
2909 /* Use zero bounds if size is unknown and checks for
2910 unknown sizes are restricted. */
2911 if ((!DECL_SIZE (decl)
2912 || (chkp_variable_size_type (TREE_TYPE (decl))
2913 && (TREE_STATIC (decl)
2914 || DECL_EXTERNAL (decl)
2915 || TREE_PUBLIC (decl))))
2916 && !flag_chkp_incomplete_type)
2917 return chkp_get_zero_bounds ();
2919 if (flag_chkp_use_static_bounds
2920 && TREE_CODE (decl) == VAR_DECL
2921 && (TREE_STATIC (decl)
2922 || DECL_EXTERNAL (decl)
2923 || TREE_PUBLIC (decl))
2924 && !DECL_THREAD_LOCAL_P (decl))
2926 tree bnd_var = chkp_make_static_bounds (decl);
2927 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2928 gimple stmt;
2930 bounds = chkp_get_tmp_reg (gimple_build_nop ());
2931 stmt = gimple_build_assign (bounds, bnd_var);
2932 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2934 else if (!DECL_SIZE (decl)
2935 || (chkp_variable_size_type (TREE_TYPE (decl))
2936 && (TREE_STATIC (decl)
2937 || DECL_EXTERNAL (decl)
2938 || TREE_PUBLIC (decl))))
2940 gcc_assert (TREE_CODE (decl) == VAR_DECL);
2941 bounds = chkp_generate_extern_var_bounds (decl);
2943 else
2945 tree lb = chkp_build_addr_expr (decl);
2946 bounds = chkp_make_bounds (lb, DECL_SIZE_UNIT (decl), NULL, false);
2949 return bounds;
2952 /* Compute and return bounds for constant string. */
2953 static tree
2954 chkp_get_bounds_for_string_cst (tree cst)
2956 tree bounds;
2957 tree lb;
2958 tree size;
2960 gcc_assert (TREE_CODE (cst) == STRING_CST);
2962 bounds = chkp_get_registered_bounds (cst);
2964 if (bounds)
2965 return bounds;
2967 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2968 || flag_chkp_use_static_const_bounds > 0)
2970 tree bnd_var = chkp_make_static_bounds (cst);
2971 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2972 gimple stmt;
2974 bounds = chkp_get_tmp_reg (gimple_build_nop ());
2975 stmt = gimple_build_assign (bounds, bnd_var);
2976 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2978 else
2980 lb = chkp_build_addr_expr (cst);
2981 size = build_int_cst (chkp_uintptr_type, TREE_STRING_LENGTH (cst));
2982 bounds = chkp_make_bounds (lb, size, NULL, false);
2985 bounds = chkp_maybe_copy_and_register_bounds (cst, bounds);
2987 return bounds;
2990 /* Generate code to instersect bounds BOUNDS1 and BOUNDS2 and
2991 return the result. if ITER is not NULL then Code is inserted
2992 before position pointed by ITER. Otherwise code is added to
2993 entry block. */
2994 static tree
2995 chkp_intersect_bounds (tree bounds1, tree bounds2, gimple_stmt_iterator *iter)
2997 if (!bounds1 || bounds1 == chkp_get_zero_bounds ())
2998 return bounds2 ? bounds2 : bounds1;
2999 else if (!bounds2 || bounds2 == chkp_get_zero_bounds ())
3000 return bounds1;
3001 else
3003 gimple_seq seq;
3004 gimple stmt;
3005 tree bounds;
3007 seq = NULL;
3009 stmt = gimple_build_call (chkp_intersect_fndecl, 2, bounds1, bounds2);
3010 chkp_mark_stmt (stmt);
3012 bounds = chkp_get_tmp_reg (stmt);
3013 gimple_call_set_lhs (stmt, bounds);
3015 gimple_seq_add_stmt (&seq, stmt);
3017 /* We are probably doing narrowing for constant expression.
3018 In such case iter may be undefined. */
3019 if (!iter)
3021 gimple_stmt_iterator gsi = gsi_last_bb (chkp_get_entry_block ());
3022 iter = &gsi;
3023 gsi_insert_seq_after (iter, seq, GSI_SAME_STMT);
3025 else
3026 gsi_insert_seq_before (iter, seq, GSI_SAME_STMT);
3028 if (dump_file && (dump_flags & TDF_DETAILS))
3030 fprintf (dump_file, "Bounds intersection: ");
3031 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
3032 fprintf (dump_file, " inserted before statement: ");
3033 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0,
3034 TDF_VOPS|TDF_MEMSYMS);
3037 return bounds;
3041 /* Return 1 if we are allowed to narrow bounds for addressed FIELD
3042 and 0 othersize. */
3043 static bool
3044 chkp_may_narrow_to_field (tree field)
3046 return DECL_SIZE (field) && TREE_CODE (DECL_SIZE (field)) == INTEGER_CST
3047 && tree_to_uhwi (DECL_SIZE (field)) != 0
3048 && (!DECL_FIELD_OFFSET (field)
3049 || TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST)
3050 && (!DECL_FIELD_BIT_OFFSET (field)
3051 || TREE_CODE (DECL_FIELD_BIT_OFFSET (field)) == INTEGER_CST)
3052 && !lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
3053 && !chkp_variable_size_type (TREE_TYPE (field));
3056 /* Return 1 if bounds for FIELD should be narrowed to
3057 field's own size. */
3058 static bool
3059 chkp_narrow_bounds_for_field (tree field)
3061 HOST_WIDE_INT offs;
3062 HOST_WIDE_INT bit_offs;
3064 if (!chkp_may_narrow_to_field (field))
3065 return false;
3067 /* Accesse to compiler generated fields should not cause
3068 bounds narrowing. */
3069 if (DECL_ARTIFICIAL (field))
3070 return false;
3072 offs = tree_to_uhwi (DECL_FIELD_OFFSET (field));
3073 bit_offs = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
3075 return (flag_chkp_narrow_bounds
3076 && (flag_chkp_first_field_has_own_bounds
3077 || offs
3078 || bit_offs));
3081 /* Perform narrowing for BOUNDS using bounds computed for field
3082 access COMPONENT. ITER meaning is the same as for
3083 chkp_intersect_bounds. */
3084 static tree
3085 chkp_narrow_bounds_to_field (tree bounds, tree component,
3086 gimple_stmt_iterator *iter)
3088 tree field = TREE_OPERAND (component, 1);
3089 tree size = DECL_SIZE_UNIT (field);
3090 tree field_ptr = chkp_build_addr_expr (component);
3091 tree field_bounds;
3093 field_bounds = chkp_make_bounds (field_ptr, size, iter, false);
3095 return chkp_intersect_bounds (field_bounds, bounds, iter);
3098 /* Parse field or array access NODE.
3100 PTR ouput parameter holds a pointer to the outermost
3101 object.
3103 BITFIELD output parameter is set to 1 if bitfield is
3104 accessed and to 0 otherwise. If it is 1 then ELT holds
3105 outer component for accessed bit field.
3107 SAFE outer parameter is set to 1 if access is safe and
3108 checks are not required.
3110 BOUNDS outer parameter holds bounds to be used to check
3111 access (may be NULL).
3113 If INNERMOST_BOUNDS is 1 then try to narrow bounds to the
3114 innermost accessed component. */
3115 static void
3116 chkp_parse_array_and_component_ref (tree node, tree *ptr,
3117 tree *elt, bool *safe,
3118 bool *bitfield,
3119 tree *bounds,
3120 gimple_stmt_iterator *iter,
3121 bool innermost_bounds)
3123 tree comp_to_narrow = NULL_TREE;
3124 tree last_comp = NULL_TREE;
3125 bool array_ref_found = false;
3126 tree *nodes;
3127 tree var;
3128 int len;
3129 int i;
3131 /* Compute tree height for expression. */
3132 var = node;
3133 len = 1;
3134 while (TREE_CODE (var) == COMPONENT_REF
3135 || TREE_CODE (var) == ARRAY_REF
3136 || TREE_CODE (var) == VIEW_CONVERT_EXPR)
3138 var = TREE_OPERAND (var, 0);
3139 len++;
3142 gcc_assert (len > 1);
3144 /* It is more convenient for us to scan left-to-right,
3145 so walk tree again and put all node to nodes vector
3146 in reversed order. */
3147 nodes = XALLOCAVEC (tree, len);
3148 nodes[len - 1] = node;
3149 for (i = len - 2; i >= 0; i--)
3150 nodes[i] = TREE_OPERAND (nodes[i + 1], 0);
3152 if (bounds)
3153 *bounds = NULL;
3154 *safe = true;
3155 *bitfield = (TREE_CODE (node) == COMPONENT_REF
3156 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (node, 1)));
3157 /* To get bitfield address we will need outer elemnt. */
3158 if (*bitfield)
3159 *elt = nodes[len - 2];
3160 else
3161 *elt = NULL_TREE;
3163 /* If we have indirection in expression then compute
3164 outermost structure bounds. Computed bounds may be
3165 narrowed later. */
3166 if (TREE_CODE (nodes[0]) == MEM_REF || INDIRECT_REF_P (nodes[0]))
3168 *safe = false;
3169 *ptr = TREE_OPERAND (nodes[0], 0);
3170 if (bounds)
3171 *bounds = chkp_find_bounds (*ptr, iter);
3173 else
3175 gcc_assert (TREE_CODE (var) == VAR_DECL
3176 || TREE_CODE (var) == PARM_DECL
3177 || TREE_CODE (var) == RESULT_DECL
3178 || TREE_CODE (var) == STRING_CST
3179 || TREE_CODE (var) == SSA_NAME);
3181 *ptr = chkp_build_addr_expr (var);
3184 /* In this loop we are trying to find a field access
3185 requiring narrowing. There are two simple rules
3186 for search:
3187 1. Leftmost array_ref is chosen if any.
3188 2. Rightmost suitable component_ref is chosen if innermost
3189 bounds are required and no array_ref exists. */
3190 for (i = 1; i < len; i++)
3192 var = nodes[i];
3194 if (TREE_CODE (var) == ARRAY_REF)
3196 *safe = false;
3197 array_ref_found = true;
3198 if (flag_chkp_narrow_bounds
3199 && !flag_chkp_narrow_to_innermost_arrray
3200 && (!last_comp
3201 || chkp_may_narrow_to_field (TREE_OPERAND (last_comp, 1))))
3203 comp_to_narrow = last_comp;
3204 break;
3207 else if (TREE_CODE (var) == COMPONENT_REF)
3209 tree field = TREE_OPERAND (var, 1);
3211 if (innermost_bounds
3212 && !array_ref_found
3213 && chkp_narrow_bounds_for_field (field))
3214 comp_to_narrow = var;
3215 last_comp = var;
3217 if (flag_chkp_narrow_bounds
3218 && flag_chkp_narrow_to_innermost_arrray
3219 && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
3221 if (bounds)
3222 *bounds = chkp_narrow_bounds_to_field (*bounds, var, iter);
3223 comp_to_narrow = NULL;
3226 else if (TREE_CODE (var) == VIEW_CONVERT_EXPR)
3227 /* Nothing to do for it. */
3229 else
3230 gcc_unreachable ();
3233 if (comp_to_narrow && DECL_SIZE (TREE_OPERAND (comp_to_narrow, 1)) && bounds)
3234 *bounds = chkp_narrow_bounds_to_field (*bounds, comp_to_narrow, iter);
3236 if (innermost_bounds && bounds && !*bounds)
3237 *bounds = chkp_find_bounds (*ptr, iter);
3240 /* Compute and return bounds for address of OBJ. */
3241 static tree
3242 chkp_make_addressed_object_bounds (tree obj, gimple_stmt_iterator *iter)
3244 tree bounds = chkp_get_registered_addr_bounds (obj);
3246 if (bounds)
3247 return bounds;
3249 switch (TREE_CODE (obj))
3251 case VAR_DECL:
3252 case PARM_DECL:
3253 case RESULT_DECL:
3254 bounds = chkp_get_bounds_for_decl_addr (obj);
3255 break;
3257 case STRING_CST:
3258 bounds = chkp_get_bounds_for_string_cst (obj);
3259 break;
3261 case ARRAY_REF:
3262 case COMPONENT_REF:
3264 tree elt;
3265 tree ptr;
3266 bool safe;
3267 bool bitfield;
3269 chkp_parse_array_and_component_ref (obj, &ptr, &elt, &safe,
3270 &bitfield, &bounds, iter, true);
3272 gcc_assert (bounds);
3274 break;
3276 case FUNCTION_DECL:
3277 case LABEL_DECL:
3278 bounds = chkp_get_zero_bounds ();
3279 break;
3281 case MEM_REF:
3282 bounds = chkp_find_bounds (TREE_OPERAND (obj, 0), iter);
3283 break;
3285 case REALPART_EXPR:
3286 case IMAGPART_EXPR:
3287 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (obj, 0), iter);
3288 break;
3290 default:
3291 if (dump_file && (dump_flags & TDF_DETAILS))
3293 fprintf (dump_file, "chkp_make_addressed_object_bounds: "
3294 "unexpected object of type %s\n",
3295 get_tree_code_name (TREE_CODE (obj)));
3296 print_node (dump_file, "", obj, 0);
3298 internal_error ("chkp_make_addressed_object_bounds: "
3299 "Unexpected tree code %s",
3300 get_tree_code_name (TREE_CODE (obj)));
3303 chkp_register_addr_bounds (obj, bounds);
3305 return bounds;
3308 /* Compute bounds for pointer PTR loaded from PTR_SRC. Generate statements
3309 to compute bounds if required. Computed bounds should be available at
3310 position pointed by ITER.
3312 If PTR_SRC is NULL_TREE then pointer definition is identified.
3314 If PTR_SRC is not NULL_TREE then ITER points to statements which loads
3315 PTR. If PTR is a any memory reference then ITER points to a statement
3316 after which bndldx will be inserterd. In both cases ITER will be updated
3317 to point to the inserted bndldx statement. */
3319 static tree
3320 chkp_find_bounds_1 (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3322 tree addr = NULL_TREE;
3323 tree bounds = NULL_TREE;
3325 if (!ptr_src)
3326 ptr_src = ptr;
3328 bounds = chkp_get_registered_bounds (ptr_src);
3330 if (bounds)
3331 return bounds;
3333 switch (TREE_CODE (ptr_src))
3335 case MEM_REF:
3336 case VAR_DECL:
3337 if (BOUNDED_P (ptr_src))
3338 if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
3339 bounds = chkp_get_zero_bounds ();
3340 else
3342 addr = chkp_build_addr_expr (ptr_src);
3343 bounds = chkp_build_bndldx (addr, ptr, iter);
3345 else
3346 bounds = chkp_get_nonpointer_load_bounds ();
3347 break;
3349 case ARRAY_REF:
3350 case COMPONENT_REF:
3351 addr = get_base_address (ptr_src);
3352 if (DECL_P (addr)
3353 || TREE_CODE (addr) == MEM_REF
3354 || TREE_CODE (addr) == TARGET_MEM_REF)
3356 if (BOUNDED_P (ptr_src))
3357 if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
3358 bounds = chkp_get_zero_bounds ();
3359 else
3361 addr = chkp_build_addr_expr (ptr_src);
3362 bounds = chkp_build_bndldx (addr, ptr, iter);
3364 else
3365 bounds = chkp_get_nonpointer_load_bounds ();
3367 else
3369 gcc_assert (TREE_CODE (addr) == SSA_NAME);
3370 bounds = chkp_find_bounds (addr, iter);
3372 break;
3374 case PARM_DECL:
3375 gcc_unreachable ();
3376 bounds = chkp_get_bound_for_parm (ptr_src);
3377 break;
3379 case TARGET_MEM_REF:
3380 addr = chkp_build_addr_expr (ptr_src);
3381 bounds = chkp_build_bndldx (addr, ptr, iter);
3382 break;
3384 case SSA_NAME:
3385 bounds = chkp_get_registered_bounds (ptr_src);
3386 if (!bounds)
3388 gimple def_stmt = SSA_NAME_DEF_STMT (ptr_src);
3389 gphi_iterator phi_iter;
3391 bounds = chkp_get_bounds_by_definition (ptr_src, def_stmt, &phi_iter);
3393 gcc_assert (bounds);
3395 if (gphi *def_phi = dyn_cast <gphi *> (def_stmt))
3397 unsigned i;
3399 for (i = 0; i < gimple_phi_num_args (def_phi); i++)
3401 tree arg = gimple_phi_arg_def (def_phi, i);
3402 tree arg_bnd;
3403 gphi *phi_bnd;
3405 arg_bnd = chkp_find_bounds (arg, NULL);
3407 /* chkp_get_bounds_by_definition created new phi
3408 statement and phi_iter points to it.
3410 Previous call to chkp_find_bounds could create
3411 new basic block and therefore change phi statement
3412 phi_iter points to. */
3413 phi_bnd = phi_iter.phi ();
3415 add_phi_arg (phi_bnd, arg_bnd,
3416 gimple_phi_arg_edge (def_phi, i),
3417 UNKNOWN_LOCATION);
3420 /* If all bound phi nodes have their arg computed
3421 then we may finish its computation. See
3422 chkp_finish_incomplete_bounds for more details. */
3423 if (chkp_may_finish_incomplete_bounds ())
3424 chkp_finish_incomplete_bounds ();
3427 gcc_assert (bounds == chkp_get_registered_bounds (ptr_src)
3428 || chkp_incomplete_bounds (bounds));
3430 break;
3432 case ADDR_EXPR:
3433 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (ptr_src, 0), iter);
3434 break;
3436 case INTEGER_CST:
3437 if (integer_zerop (ptr_src))
3438 bounds = chkp_get_none_bounds ();
3439 else
3440 bounds = chkp_get_invalid_op_bounds ();
3441 break;
3443 default:
3444 if (dump_file && (dump_flags & TDF_DETAILS))
3446 fprintf (dump_file, "chkp_find_bounds: unexpected ptr of type %s\n",
3447 get_tree_code_name (TREE_CODE (ptr_src)));
3448 print_node (dump_file, "", ptr_src, 0);
3450 internal_error ("chkp_find_bounds: Unexpected tree code %s",
3451 get_tree_code_name (TREE_CODE (ptr_src)));
3454 if (!bounds)
3456 if (dump_file && (dump_flags & TDF_DETAILS))
3458 fprintf (stderr, "chkp_find_bounds: cannot find bounds for pointer\n");
3459 print_node (dump_file, "", ptr_src, 0);
3461 internal_error ("chkp_find_bounds: Cannot find bounds for pointer");
3464 return bounds;
3467 /* Normal case for bounds search without forced narrowing. */
3468 static tree
3469 chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter)
3471 return chkp_find_bounds_1 (ptr, NULL_TREE, iter);
3474 /* Search bounds for pointer PTR loaded from PTR_SRC
3475 by statement *ITER points to. */
3476 static tree
3477 chkp_find_bounds_loaded (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3479 return chkp_find_bounds_1 (ptr, ptr_src, iter);
3482 /* Helper function which checks type of RHS and finds all pointers in
3483 it. For each found pointer we build it's accesses in LHS and RHS
3484 objects and then call HANDLER for them. Function is used to copy
3485 or initilize bounds for copied object. */
3486 static void
3487 chkp_walk_pointer_assignments (tree lhs, tree rhs, void *arg,
3488 assign_handler handler)
3490 tree type = TREE_TYPE (lhs);
3492 /* We have nothing to do with clobbers. */
3493 if (TREE_CLOBBER_P (rhs))
3494 return;
3496 if (BOUNDED_TYPE_P (type))
3497 handler (lhs, rhs, arg);
3498 else if (RECORD_OR_UNION_TYPE_P (type))
3500 tree field;
3502 if (TREE_CODE (rhs) == CONSTRUCTOR)
3504 unsigned HOST_WIDE_INT cnt;
3505 tree val;
3507 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, field, val)
3509 if (chkp_type_has_pointer (TREE_TYPE (field)))
3511 tree lhs_field = chkp_build_component_ref (lhs, field);
3512 chkp_walk_pointer_assignments (lhs_field, val, arg, handler);
3516 else
3517 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3518 if (TREE_CODE (field) == FIELD_DECL
3519 && chkp_type_has_pointer (TREE_TYPE (field)))
3521 tree rhs_field = chkp_build_component_ref (rhs, field);
3522 tree lhs_field = chkp_build_component_ref (lhs, field);
3523 chkp_walk_pointer_assignments (lhs_field, rhs_field, arg, handler);
3526 else if (TREE_CODE (type) == ARRAY_TYPE)
3528 unsigned HOST_WIDE_INT cur = 0;
3529 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3530 tree etype = TREE_TYPE (type);
3531 tree esize = TYPE_SIZE (etype);
3533 if (TREE_CODE (rhs) == CONSTRUCTOR)
3535 unsigned HOST_WIDE_INT cnt;
3536 tree purp, val, lhs_elem;
3538 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, purp, val)
3540 if (purp && TREE_CODE (purp) == RANGE_EXPR)
3542 tree lo_index = TREE_OPERAND (purp, 0);
3543 tree hi_index = TREE_OPERAND (purp, 1);
3545 for (cur = (unsigned)tree_to_uhwi (lo_index);
3546 cur <= (unsigned)tree_to_uhwi (hi_index);
3547 cur++)
3549 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3550 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3553 else
3555 if (purp)
3557 gcc_assert (TREE_CODE (purp) == INTEGER_CST);
3558 cur = tree_to_uhwi (purp);
3561 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur++);
3563 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3567 /* Copy array only when size is known. */
3568 else if (maxval && !integer_minus_onep (maxval))
3569 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
3571 tree lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3572 tree rhs_elem = chkp_build_array_ref (rhs, etype, esize, cur);
3573 chkp_walk_pointer_assignments (lhs_elem, rhs_elem, arg, handler);
3576 else
3577 internal_error("chkp_walk_pointer_assignments: unexpected RHS type: %s",
3578 get_tree_code_name (TREE_CODE (type)));
3581 /* Add code to copy bounds for assignment of RHS to LHS.
3582 ARG is an iterator pointing ne code position. */
3583 static void
3584 chkp_copy_bounds_for_elem (tree lhs, tree rhs, void *arg)
3586 gimple_stmt_iterator *iter = (gimple_stmt_iterator *)arg;
3587 tree bounds = chkp_find_bounds (rhs, iter);
3588 tree addr = chkp_build_addr_expr(lhs);
3590 chkp_build_bndstx (addr, rhs, bounds, iter);
3593 /* Emit static bound initilizers and size vars. */
3594 void
3595 chkp_finish_file (void)
3597 struct varpool_node *node;
3598 struct chkp_ctor_stmt_list stmts;
3600 if (seen_error ())
3601 return;
3603 /* Iterate through varpool and generate bounds initialization
3604 constructors for all statically initialized pointers. */
3605 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3606 stmts.stmts = NULL;
3607 FOR_EACH_VARIABLE (node)
3608 /* Check that var is actually emitted and we need and may initialize
3609 its bounds. */
3610 if (node->need_bounds_init
3611 && !POINTER_BOUNDS_P (node->decl)
3612 && DECL_RTL (node->decl)
3613 && MEM_P (DECL_RTL (node->decl))
3614 && TREE_ASM_WRITTEN (node->decl))
3616 chkp_walk_pointer_assignments (node->decl,
3617 DECL_INITIAL (node->decl),
3618 &stmts,
3619 chkp_add_modification_to_stmt_list);
3621 if (stmts.avail <= 0)
3623 cgraph_build_static_cdtor ('P', stmts.stmts,
3624 MAX_RESERVED_INIT_PRIORITY + 3);
3625 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3626 stmts.stmts = NULL;
3630 if (stmts.stmts)
3631 cgraph_build_static_cdtor ('P', stmts.stmts,
3632 MAX_RESERVED_INIT_PRIORITY + 3);
3634 /* Iterate through varpool and generate bounds initialization
3635 constructors for all static bounds vars. */
3636 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3637 stmts.stmts = NULL;
3638 FOR_EACH_VARIABLE (node)
3639 if (node->need_bounds_init
3640 && POINTER_BOUNDS_P (node->decl)
3641 && TREE_ASM_WRITTEN (node->decl))
3643 tree bnd = node->decl;
3644 tree var;
3646 gcc_assert (DECL_INITIAL (bnd)
3647 && TREE_CODE (DECL_INITIAL (bnd)) == ADDR_EXPR);
3649 var = TREE_OPERAND (DECL_INITIAL (bnd), 0);
3650 chkp_output_static_bounds (bnd, var, &stmts);
3653 if (stmts.stmts)
3654 cgraph_build_static_cdtor ('B', stmts.stmts,
3655 MAX_RESERVED_INIT_PRIORITY + 2);
3657 delete chkp_static_var_bounds;
3658 delete chkp_bounds_map;
3661 /* An instrumentation function which is called for each statement
3662 having memory access we want to instrument. It inserts check
3663 code and bounds copy code.
3665 ITER points to statement to instrument.
3667 NODE holds memory access in statement to check.
3669 LOC holds the location information for statement.
3671 DIRFLAGS determines whether access is read or write.
3673 ACCESS_OFFS should be added to address used in NODE
3674 before check.
3676 ACCESS_SIZE holds size of checked access.
3678 SAFE indicates if NODE access is safe and should not be
3679 checked. */
3680 static void
3681 chkp_process_stmt (gimple_stmt_iterator *iter, tree node,
3682 location_t loc, tree dirflag,
3683 tree access_offs, tree access_size,
3684 bool safe)
3686 tree node_type = TREE_TYPE (node);
3687 tree size = access_size ? access_size : TYPE_SIZE_UNIT (node_type);
3688 tree addr_first = NULL_TREE; /* address of the first accessed byte */
3689 tree addr_last = NULL_TREE; /* address of the last accessed byte */
3690 tree ptr = NULL_TREE; /* a pointer used for dereference */
3691 tree bounds = NULL_TREE;
3693 /* We do not need instrumentation for clobbers. */
3694 if (dirflag == integer_one_node
3695 && gimple_code (gsi_stmt (*iter)) == GIMPLE_ASSIGN
3696 && TREE_CLOBBER_P (gimple_assign_rhs1 (gsi_stmt (*iter))))
3697 return;
3699 switch (TREE_CODE (node))
3701 case ARRAY_REF:
3702 case COMPONENT_REF:
3704 bool bitfield;
3705 tree elt;
3707 if (safe)
3709 /* We are not going to generate any checks, so do not
3710 generate bounds as well. */
3711 addr_first = chkp_build_addr_expr (node);
3712 break;
3715 chkp_parse_array_and_component_ref (node, &ptr, &elt, &safe,
3716 &bitfield, &bounds, iter, false);
3718 /* Break if there is no dereference and operation is safe. */
3720 if (bitfield)
3722 tree field = TREE_OPERAND (node, 1);
3724 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
3725 size = DECL_SIZE_UNIT (field);
3727 if (elt)
3728 elt = chkp_build_addr_expr (elt);
3729 addr_first = fold_convert_loc (loc, ptr_type_node, elt ? elt : ptr);
3730 addr_first = fold_build_pointer_plus_loc (loc,
3731 addr_first,
3732 byte_position (field));
3734 else
3735 addr_first = chkp_build_addr_expr (node);
3737 break;
3739 case INDIRECT_REF:
3740 ptr = TREE_OPERAND (node, 0);
3741 addr_first = ptr;
3742 break;
3744 case MEM_REF:
3745 ptr = TREE_OPERAND (node, 0);
3746 addr_first = chkp_build_addr_expr (node);
3747 break;
3749 case TARGET_MEM_REF:
3750 ptr = TMR_BASE (node);
3751 addr_first = chkp_build_addr_expr (node);
3752 break;
3754 case ARRAY_RANGE_REF:
3755 printf("ARRAY_RANGE_REF\n");
3756 debug_gimple_stmt(gsi_stmt(*iter));
3757 debug_tree(node);
3758 gcc_unreachable ();
3759 break;
3761 case BIT_FIELD_REF:
3763 tree offs, rem, bpu;
3765 gcc_assert (!access_offs);
3766 gcc_assert (!access_size);
3768 bpu = fold_convert (size_type_node, bitsize_int (BITS_PER_UNIT));
3769 offs = fold_convert (size_type_node, TREE_OPERAND (node, 2));
3770 rem = size_binop_loc (loc, TRUNC_MOD_EXPR, offs, bpu);
3771 offs = size_binop_loc (loc, TRUNC_DIV_EXPR, offs, bpu);
3773 size = fold_convert (size_type_node, TREE_OPERAND (node, 1));
3774 size = size_binop_loc (loc, PLUS_EXPR, size, rem);
3775 size = size_binop_loc (loc, CEIL_DIV_EXPR, size, bpu);
3776 size = fold_convert (size_type_node, size);
3778 chkp_process_stmt (iter, TREE_OPERAND (node, 0), loc,
3779 dirflag, offs, size, safe);
3780 return;
3782 break;
3784 case VAR_DECL:
3785 case RESULT_DECL:
3786 case PARM_DECL:
3787 if (dirflag != integer_one_node
3788 || DECL_REGISTER (node))
3789 return;
3791 safe = true;
3792 addr_first = chkp_build_addr_expr (node);
3793 break;
3795 default:
3796 return;
3799 /* If addr_last was not computed then use (addr_first + size - 1)
3800 expression to compute it. */
3801 if (!addr_last)
3803 addr_last = fold_build_pointer_plus_loc (loc, addr_first, size);
3804 addr_last = fold_build_pointer_plus_hwi_loc (loc, addr_last, -1);
3807 /* Shift both first_addr and last_addr by access_offs if specified. */
3808 if (access_offs)
3810 addr_first = fold_build_pointer_plus_loc (loc, addr_first, access_offs);
3811 addr_last = fold_build_pointer_plus_loc (loc, addr_last, access_offs);
3814 /* Generate bndcl/bndcu checks if memory access is not safe. */
3815 if (!safe)
3817 gimple_stmt_iterator stmt_iter = *iter;
3819 if (!bounds)
3820 bounds = chkp_find_bounds (ptr, iter);
3822 chkp_check_mem_access (addr_first, addr_last, bounds,
3823 stmt_iter, loc, dirflag);
3826 /* We need to store bounds in case pointer is stored. */
3827 if (dirflag == integer_one_node
3828 && chkp_type_has_pointer (node_type)
3829 && flag_chkp_store_bounds)
3831 gimple stmt = gsi_stmt (*iter);
3832 tree rhs1 = gimple_assign_rhs1 (stmt);
3833 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3835 if (get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS)
3836 chkp_walk_pointer_assignments (node, rhs1, iter,
3837 chkp_copy_bounds_for_elem);
3838 else
3840 bounds = chkp_compute_bounds_for_assignment (NULL_TREE, stmt);
3841 chkp_build_bndstx (addr_first, rhs1, bounds, iter);
3846 /* Add code to copy bounds for all pointers copied
3847 in ASSIGN created during inline of EDGE. */
3848 void
3849 chkp_copy_bounds_for_assign (gimple assign, struct cgraph_edge *edge)
3851 tree lhs = gimple_assign_lhs (assign);
3852 tree rhs = gimple_assign_rhs1 (assign);
3853 gimple_stmt_iterator iter = gsi_for_stmt (assign);
3855 if (!flag_chkp_store_bounds)
3856 return;
3858 chkp_walk_pointer_assignments (lhs, rhs, &iter, chkp_copy_bounds_for_elem);
3860 /* We should create edges for all created calls to bndldx and bndstx. */
3861 while (gsi_stmt (iter) != assign)
3863 gimple stmt = gsi_stmt (iter);
3864 if (gimple_code (stmt) == GIMPLE_CALL)
3866 tree fndecl = gimple_call_fndecl (stmt);
3867 struct cgraph_node *callee = cgraph_node::get_create (fndecl);
3868 struct cgraph_edge *new_edge;
3870 gcc_assert (fndecl == chkp_bndstx_fndecl
3871 || fndecl == chkp_bndldx_fndecl
3872 || fndecl == chkp_ret_bnd_fndecl);
3874 new_edge = edge->caller->create_edge (callee,
3875 as_a <gcall *> (stmt),
3876 edge->count,
3877 edge->frequency);
3878 new_edge->frequency = compute_call_stmt_bb_frequency
3879 (edge->caller->decl, gimple_bb (stmt));
3881 gsi_prev (&iter);
3885 /* Some code transformation made during instrumentation pass
3886 may put code into inconsistent state. Here we find and fix
3887 such flaws. */
3888 void
3889 chkp_fix_cfg ()
3891 basic_block bb;
3892 gimple_stmt_iterator i;
3894 /* We could insert some code right after stmt which ends bb.
3895 We wanted to put this code on fallthru edge but did not
3896 add new edges from the beginning because it may cause new
3897 phi node creation which may be incorrect due to incomplete
3898 bound phi nodes. */
3899 FOR_ALL_BB_FN (bb, cfun)
3900 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
3902 gimple stmt = gsi_stmt (i);
3903 gimple_stmt_iterator next = i;
3905 gsi_next (&next);
3907 if (stmt_ends_bb_p (stmt)
3908 && !gsi_end_p (next))
3910 edge fall = find_fallthru_edge (bb->succs);
3911 basic_block dest = NULL;
3912 int flags = 0;
3914 gcc_assert (fall);
3916 /* We cannot split abnormal edge. Therefore we
3917 store its params, make it regular and then
3918 rebuild abnormal edge after split. */
3919 if (fall->flags & EDGE_ABNORMAL)
3921 flags = fall->flags & ~EDGE_FALLTHRU;
3922 dest = fall->dest;
3924 fall->flags &= ~EDGE_COMPLEX;
3927 while (!gsi_end_p (next))
3929 gimple next_stmt = gsi_stmt (next);
3930 gsi_remove (&next, false);
3931 gsi_insert_on_edge (fall, next_stmt);
3934 gsi_commit_edge_inserts ();
3936 /* Re-create abnormal edge. */
3937 if (dest)
3938 make_edge (bb, dest, flags);
3943 /* Walker callback for chkp_replace_function_pointers. Replaces
3944 function pointer in the specified operand with pointer to the
3945 instrumented function version. */
3946 static tree
3947 chkp_replace_function_pointer (tree *op, int *walk_subtrees,
3948 void *data ATTRIBUTE_UNUSED)
3950 if (TREE_CODE (*op) == FUNCTION_DECL
3951 && !lookup_attribute ("bnd_legacy", DECL_ATTRIBUTES (*op))
3952 && (DECL_BUILT_IN_CLASS (*op) == NOT_BUILT_IN
3953 /* For builtins we replace pointers only for selected
3954 function and functions having definitions. */
3955 || (DECL_BUILT_IN_CLASS (*op) == BUILT_IN_NORMAL
3956 && (chkp_instrument_normal_builtin (*op)
3957 || gimple_has_body_p (*op)))))
3959 struct cgraph_node *node = cgraph_node::get_create (*op);
3960 struct cgraph_node *clone = NULL;
3962 if (!node->instrumentation_clone)
3963 clone = chkp_maybe_create_clone (*op);
3965 if (clone)
3966 *op = clone->decl;
3967 *walk_subtrees = 0;
3970 return NULL;
3973 /* This function searches for function pointers in statement
3974 pointed by GSI and replaces them with pointers to instrumented
3975 function versions. */
3976 static void
3977 chkp_replace_function_pointers (gimple_stmt_iterator *gsi)
3979 gimple stmt = gsi_stmt (*gsi);
3980 /* For calls we want to walk call args only. */
3981 if (gimple_code (stmt) == GIMPLE_CALL)
3983 unsigned i;
3984 for (i = 0; i < gimple_call_num_args (stmt); i++)
3985 walk_tree (gimple_call_arg_ptr (stmt, i),
3986 chkp_replace_function_pointer, NULL, NULL);
3988 else
3989 walk_gimple_stmt (gsi, NULL, chkp_replace_function_pointer, NULL);
3992 /* This function instruments all statements working with memory,
3993 calls and rets.
3995 It also removes excess statements from static initializers. */
3996 static void
3997 chkp_instrument_function (void)
3999 basic_block bb, next;
4000 gimple_stmt_iterator i;
4001 enum gimple_rhs_class grhs_class;
4002 bool safe = lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl));
4004 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
4007 next = bb->next_bb;
4008 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
4010 gimple s = gsi_stmt (i);
4012 /* Skip statement marked to not be instrumented. */
4013 if (chkp_marked_stmt_p (s))
4015 gsi_next (&i);
4016 continue;
4019 chkp_replace_function_pointers (&i);
4021 switch (gimple_code (s))
4023 case GIMPLE_ASSIGN:
4024 chkp_process_stmt (&i, gimple_assign_lhs (s),
4025 gimple_location (s), integer_one_node,
4026 NULL_TREE, NULL_TREE, safe);
4027 chkp_process_stmt (&i, gimple_assign_rhs1 (s),
4028 gimple_location (s), integer_zero_node,
4029 NULL_TREE, NULL_TREE, safe);
4030 grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s));
4031 if (grhs_class == GIMPLE_BINARY_RHS)
4032 chkp_process_stmt (&i, gimple_assign_rhs2 (s),
4033 gimple_location (s), integer_zero_node,
4034 NULL_TREE, NULL_TREE, safe);
4035 break;
4037 case GIMPLE_RETURN:
4039 greturn *r = as_a <greturn *> (s);
4040 if (gimple_return_retval (r) != NULL_TREE)
4042 chkp_process_stmt (&i, gimple_return_retval (r),
4043 gimple_location (r),
4044 integer_zero_node,
4045 NULL_TREE, NULL_TREE, safe);
4047 /* Additionally we need to add bounds
4048 to return statement. */
4049 chkp_add_bounds_to_ret_stmt (&i);
4052 break;
4054 case GIMPLE_CALL:
4055 chkp_add_bounds_to_call_stmt (&i);
4056 break;
4058 default:
4062 gsi_next (&i);
4064 /* We do not need any actual pointer stores in checker
4065 static initializer. */
4066 if (lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl))
4067 && gimple_code (s) == GIMPLE_ASSIGN
4068 && gimple_store_p (s))
4070 gimple_stmt_iterator del_iter = gsi_for_stmt (s);
4071 gsi_remove (&del_iter, true);
4072 unlink_stmt_vdef (s);
4073 release_defs(s);
4076 bb = next;
4078 while (bb);
4080 /* Some input params may have bounds and be address taken. In this case
4081 we should store incoming bounds into bounds table. */
4082 tree arg;
4083 if (flag_chkp_store_bounds)
4084 for (arg = DECL_ARGUMENTS (cfun->decl); arg; arg = DECL_CHAIN (arg))
4085 if (TREE_ADDRESSABLE (arg))
4087 if (BOUNDED_P (arg))
4089 tree bounds = chkp_get_next_bounds_parm (arg);
4090 tree def_ptr = ssa_default_def (cfun, arg);
4091 gimple_stmt_iterator iter
4092 = gsi_start_bb (chkp_get_entry_block ());
4093 chkp_build_bndstx (chkp_build_addr_expr (arg),
4094 def_ptr ? def_ptr : arg,
4095 bounds, &iter);
4097 /* Skip bounds arg. */
4098 arg = TREE_CHAIN (arg);
4100 else if (chkp_type_has_pointer (TREE_TYPE (arg)))
4102 tree orig_arg = arg;
4103 bitmap slots = BITMAP_ALLOC (NULL);
4104 gimple_stmt_iterator iter
4105 = gsi_start_bb (chkp_get_entry_block ());
4106 bitmap_iterator bi;
4107 unsigned bnd_no;
4109 chkp_find_bound_slots (TREE_TYPE (arg), slots);
4111 EXECUTE_IF_SET_IN_BITMAP (slots, 0, bnd_no, bi)
4113 tree bounds = chkp_get_next_bounds_parm (arg);
4114 HOST_WIDE_INT offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
4115 tree addr = chkp_build_addr_expr (orig_arg);
4116 tree ptr = build2 (MEM_REF, ptr_type_node, addr,
4117 build_int_cst (ptr_type_node, offs));
4118 chkp_build_bndstx (chkp_build_addr_expr (ptr), ptr,
4119 bounds, &iter);
4121 arg = DECL_CHAIN (arg);
4123 BITMAP_FREE (slots);
4128 /* Find init/null/copy_ptr_bounds calls and replace them
4129 with assignments. It should allow better code
4130 optimization. */
4132 static void
4133 chkp_remove_useless_builtins ()
4135 basic_block bb;
4136 gimple_stmt_iterator gsi;
4138 FOR_EACH_BB_FN (bb, cfun)
4140 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4142 gimple stmt = gsi_stmt (gsi);
4143 tree fndecl;
4144 enum built_in_function fcode;
4146 /* Find builtins returning first arg and replace
4147 them with assignments. */
4148 if (gimple_code (stmt) == GIMPLE_CALL
4149 && (fndecl = gimple_call_fndecl (stmt))
4150 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4151 && (fcode = DECL_FUNCTION_CODE (fndecl))
4152 && (fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
4153 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
4154 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS
4155 || fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS))
4157 tree res = gimple_call_arg (stmt, 0);
4158 update_call_from_tree (&gsi, res);
4159 stmt = gsi_stmt (gsi);
4160 update_stmt (stmt);
4166 /* Initialize pass. */
4167 static void
4168 chkp_init (void)
4170 basic_block bb;
4171 gimple_stmt_iterator i;
4173 in_chkp_pass = true;
4175 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = bb->next_bb)
4176 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
4177 chkp_unmark_stmt (gsi_stmt (i));
4179 chkp_invalid_bounds = new hash_set<tree>;
4180 chkp_completed_bounds_set = new hash_set<tree>;
4181 delete chkp_reg_bounds;
4182 chkp_reg_bounds = new hash_map<tree, tree>;
4183 delete chkp_bound_vars;
4184 chkp_bound_vars = new hash_map<tree, tree>;
4185 chkp_reg_addr_bounds = new hash_map<tree, tree>;
4186 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
4187 delete chkp_bounds_map;
4188 chkp_bounds_map = new hash_map<tree, tree>;
4189 chkp_abnormal_copies = BITMAP_GGC_ALLOC ();
4191 entry_block = NULL;
4192 zero_bounds = NULL_TREE;
4193 none_bounds = NULL_TREE;
4194 incomplete_bounds = integer_zero_node;
4195 tmp_var = NULL_TREE;
4196 size_tmp_var = NULL_TREE;
4198 chkp_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode, true);
4200 /* We create these constant bounds once for each object file.
4201 These symbols go to comdat section and result in single copy
4202 of each one in the final binary. */
4203 chkp_get_zero_bounds_var ();
4204 chkp_get_none_bounds_var ();
4206 calculate_dominance_info (CDI_DOMINATORS);
4207 calculate_dominance_info (CDI_POST_DOMINATORS);
4209 bitmap_obstack_initialize (NULL);
4212 /* Finalize instrumentation pass. */
4213 static void
4214 chkp_fini (void)
4216 in_chkp_pass = false;
4218 delete chkp_invalid_bounds;
4219 delete chkp_completed_bounds_set;
4220 delete chkp_reg_addr_bounds;
4221 delete chkp_incomplete_bounds_map;
4223 free_dominance_info (CDI_DOMINATORS);
4224 free_dominance_info (CDI_POST_DOMINATORS);
4226 bitmap_obstack_release (NULL);
4229 /* Main instrumentation pass function. */
4230 static unsigned int
4231 chkp_execute (void)
4233 chkp_init ();
4235 chkp_instrument_function ();
4237 chkp_remove_useless_builtins ();
4239 chkp_function_mark_instrumented (cfun->decl);
4241 chkp_fix_cfg ();
4243 chkp_fini ();
4245 return 0;
4248 /* Instrumentation pass gate. */
4249 static bool
4250 chkp_gate (void)
4252 return cgraph_node::get (cfun->decl)->instrumentation_clone
4253 || lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl));
4256 namespace {
4258 const pass_data pass_data_chkp =
4260 GIMPLE_PASS, /* type */
4261 "chkp", /* name */
4262 OPTGROUP_NONE, /* optinfo_flags */
4263 TV_NONE, /* tv_id */
4264 PROP_ssa | PROP_cfg, /* properties_required */
4265 0, /* properties_provided */
4266 0, /* properties_destroyed */
4267 0, /* todo_flags_start */
4268 TODO_verify_il
4269 | TODO_update_ssa /* todo_flags_finish */
4272 class pass_chkp : public gimple_opt_pass
4274 public:
4275 pass_chkp (gcc::context *ctxt)
4276 : gimple_opt_pass (pass_data_chkp, ctxt)
4279 /* opt_pass methods: */
4280 virtual opt_pass * clone ()
4282 return new pass_chkp (m_ctxt);
4285 virtual bool gate (function *)
4287 return chkp_gate ();
4290 virtual unsigned int execute (function *)
4292 return chkp_execute ();
4295 }; // class pass_chkp
4297 } // anon namespace
4299 gimple_opt_pass *
4300 make_pass_chkp (gcc::context *ctxt)
4302 return new pass_chkp (ctxt);
4305 #include "gt-tree-chkp.h"