PR ipa/65232
[official-gcc.git] / gcc / tree-chkp.c
blobb0a3a1547ffe965e59551e010129bb0ee5446e89
1 /* Pointer Bounds Checker insrumentation pass.
2 Copyright (C) 2014-2015 Free Software Foundation, Inc.
3 Contributed by Ilya Enkovich (ilya.enkovich@intel.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hash-set.h"
25 #include "machmode.h"
26 #include "vec.h"
27 #include "double-int.h"
28 #include "input.h"
29 #include "alias.h"
30 #include "symtab.h"
31 #include "options.h"
32 #include "wide-int.h"
33 #include "inchash.h"
34 #include "tree.h"
35 #include "fold-const.h"
36 #include "stor-layout.h"
37 #include "varasm.h"
38 #include "target.h"
39 #include "tree-iterator.h"
40 #include "tree-cfg.h"
41 #include "langhooks.h"
42 #include "tree-pass.h"
43 #include "diagnostic.h"
44 #include "ggc.h"
45 #include "is-a.h"
46 #include "cfgloop.h"
47 #include "stringpool.h"
48 #include "tree-ssa-alias.h"
49 #include "tree-ssanames.h"
50 #include "tree-ssa-operands.h"
51 #include "tree-ssa-address.h"
52 #include "tree-ssa.h"
53 #include "predict.h"
54 #include "dominance.h"
55 #include "cfg.h"
56 #include "basic-block.h"
57 #include "tree-ssa-loop-niter.h"
58 #include "gimple-expr.h"
59 #include "gimple.h"
60 #include "tree-phinodes.h"
61 #include "gimple-ssa.h"
62 #include "ssa-iterators.h"
63 #include "gimple-pretty-print.h"
64 #include "gimple-iterator.h"
65 #include "gimplify.h"
66 #include "gimplify-me.h"
67 #include "print-tree.h"
68 #include "hashtab.h"
69 #include "tm.h"
70 #include "hard-reg-set.h"
71 #include "function.h"
72 #include "rtl.h"
73 #include "flags.h"
74 #include "statistics.h"
75 #include "real.h"
76 #include "fixed-value.h"
77 #include "insn-config.h"
78 #include "expmed.h"
79 #include "dojump.h"
80 #include "explow.h"
81 #include "calls.h"
82 #include "emit-rtl.h"
83 #include "stmt.h"
84 #include "expr.h"
85 #include "tree-ssa-propagate.h"
86 #include "gimple-fold.h"
87 #include "tree-chkp.h"
88 #include "gimple-walk.h"
89 #include "rtl.h" /* For MEM_P, assign_temp. */
90 #include "tree-dfa.h"
91 #include "ipa-ref.h"
92 #include "lto-streamer.h"
93 #include "cgraph.h"
94 #include "ipa-chkp.h"
95 #include "params.h"
97 /* Pointer Bounds Checker instruments code with memory checks to find
98 out-of-bounds memory accesses. Checks are performed by computing
99 bounds for each pointer and then comparing address of accessed
100 memory before pointer dereferencing.
102 1. Function clones.
104 See ipa-chkp.c.
106 2. Instrumentation.
108 There are few things to instrument:
110 a) Memory accesses - add checker calls to check address of accessed memory
111 against bounds of dereferenced pointer. Obviously safe memory
112 accesses like static variable access does not have to be instrumented
113 with checks.
115 Example:
117 val_2 = *p_1;
119 with 4 bytes access is transformed into:
121 __builtin___chkp_bndcl (__bound_tmp.1_3, p_1);
122 D.1_4 = p_1 + 3;
123 __builtin___chkp_bndcu (__bound_tmp.1_3, D.1_4);
124 val_2 = *p_1;
126 where __bound_tmp.1_3 are bounds computed for pointer p_1,
127 __builtin___chkp_bndcl is a lower bound check and
128 __builtin___chkp_bndcu is an upper bound check.
130 b) Pointer stores.
132 When pointer is stored in memory we need to store its bounds. To
133 achieve compatibility of instrumented code with regular codes
134 we have to keep data layout and store bounds in special bound tables
135 via special checker call. Implementation of bounds table may vary for
136 different platforms. It has to associate pointer value and its
137 location (it is required because we may have two equal pointers
138 with different bounds stored in different places) with bounds.
139 Another checker builtin allows to get bounds for specified pointer
140 loaded from specified location.
142 Example:
144 buf1[i_1] = &buf2;
146 is transformed into:
148 buf1[i_1] = &buf2;
149 D.1_2 = &buf1[i_1];
150 __builtin___chkp_bndstx (D.1_2, &buf2, __bound_tmp.1_2);
152 where __bound_tmp.1_2 are bounds of &buf2.
154 c) Static initialization.
156 The special case of pointer store is static pointer initialization.
157 Bounds initialization is performed in a few steps:
158 - register all static initializations in front-end using
159 chkp_register_var_initializer
160 - when file compilation finishes we create functions with special
161 attribute 'chkp ctor' and put explicit initialization code
162 (assignments) for all statically initialized pointers.
163 - when checker constructor is compiled checker pass adds required
164 bounds initialization for all statically initialized pointers
165 - since we do not actually need excess pointers initialization
166 in checker constructor we remove such assignments from them
168 d) Calls.
170 For each call in the code we add additional arguments to pass
171 bounds for pointer arguments. We determine type of call arguments
172 using arguments list from function declaration; if function
173 declaration is not available we use function type; otherwise
174 (e.g. for unnamed arguments) we use type of passed value. Function
175 declaration/type is replaced with the instrumented one.
177 Example:
179 val_1 = foo (&buf1, &buf2, &buf1, 0);
181 is translated into:
183 val_1 = foo.chkp (&buf1, __bound_tmp.1_2, &buf2, __bound_tmp.1_3,
184 &buf1, __bound_tmp.1_2, 0);
186 e) Returns.
188 If function returns a pointer value we have to return bounds also.
189 A new operand was added for return statement to hold returned bounds.
191 Example:
193 return &_buf1;
195 is transformed into
197 return &_buf1, __bound_tmp.1_1;
199 3. Bounds computation.
201 Compiler is fully responsible for computing bounds to be used for each
202 memory access. The first step for bounds computation is to find the
203 origin of pointer dereferenced for memory access. Basing on pointer
204 origin we define a way to compute its bounds. There are just few
205 possible cases:
207 a) Pointer is returned by call.
209 In this case we use corresponding checker builtin method to obtain returned
210 bounds.
212 Example:
214 buf_1 = malloc (size_2);
215 foo (buf_1);
217 is translated into:
219 buf_1 = malloc (size_2);
220 __bound_tmp.1_3 = __builtin___chkp_bndret (buf_1);
221 foo (buf_1, __bound_tmp.1_3);
223 b) Pointer is an address of an object.
225 In this case compiler tries to compute objects size and create corresponding
226 bounds. If object has incomplete type then special checker builtin is used to
227 obtain its size at runtime.
229 Example:
231 foo ()
233 <unnamed type> __bound_tmp.3;
234 static int buf[100];
236 <bb 3>:
237 __bound_tmp.3_2 = __builtin___chkp_bndmk (&buf, 400);
239 <bb 2>:
240 return &buf, __bound_tmp.3_2;
243 Example:
245 Address of an object 'extern int buf[]' with incomplete type is
246 returned.
248 foo ()
250 <unnamed type> __bound_tmp.4;
251 long unsigned int __size_tmp.3;
253 <bb 3>:
254 __size_tmp.3_4 = __builtin_ia32_sizeof (buf);
255 __bound_tmp.4_3 = __builtin_ia32_bndmk (&buf, __size_tmp.3_4);
257 <bb 2>:
258 return &buf, __bound_tmp.4_3;
261 c) Pointer is the result of object narrowing.
263 It happens when we use pointer to an object to compute pointer to a part
264 of an object. E.g. we take pointer to a field of a structure. In this
265 case we perform bounds intersection using bounds of original object and
266 bounds of object's part (which are computed basing on its type).
268 There may be some debatable questions about when narrowing should occur
269 and when it should not. To avoid false bound violations in correct
270 programs we do not perform narrowing when address of an array element is
271 obtained (it has address of the whole array) and when address of the first
272 structure field is obtained (because it is guaranteed to be equal to
273 address of the whole structure and it is legal to cast it back to structure).
275 Default narrowing behavior may be changed using compiler flags.
277 Example:
279 In this example address of the second structure field is returned.
281 foo (struct A * p, __bounds_type __bounds_of_p)
283 <unnamed type> __bound_tmp.3;
284 int * _2;
285 int * _5;
287 <bb 2>:
288 _5 = &p_1(D)->second_field;
289 __bound_tmp.3_6 = __builtin___chkp_bndmk (_5, 4);
290 __bound_tmp.3_8 = __builtin___chkp_intersect (__bound_tmp.3_6,
291 __bounds_of_p_3(D));
292 _2 = &p_1(D)->second_field;
293 return _2, __bound_tmp.3_8;
296 Example:
298 In this example address of the first field of array element is returned.
300 foo (struct A * p, __bounds_type __bounds_of_p, int i)
302 long unsigned int _3;
303 long unsigned int _4;
304 struct A * _6;
305 int * _7;
307 <bb 2>:
308 _3 = (long unsigned int) i_1(D);
309 _4 = _3 * 8;
310 _6 = p_5(D) + _4;
311 _7 = &_6->first_field;
312 return _7, __bounds_of_p_2(D);
316 d) Pointer is the result of pointer arithmetic or type cast.
318 In this case bounds of the base pointer are used. In case of binary
319 operation producing a pointer we are analyzing data flow further
320 looking for operand's bounds. One operand is considered as a base
321 if it has some valid bounds. If we fall into a case when none of
322 operands (or both of them) has valid bounds, a default bounds value
323 is used.
325 Trying to find out bounds for binary operations we may fall into
326 cyclic dependencies for pointers. To avoid infinite recursion all
327 walked phi nodes instantly obtain corresponding bounds but created
328 bounds are marked as incomplete. It helps us to stop DF walk during
329 bounds search.
331 When we reach pointer source, some args of incomplete bounds phi obtain
332 valid bounds and those values are propagated further through phi nodes.
333 If no valid bounds were found for phi node then we mark its result as
334 invalid bounds. Process stops when all incomplete bounds become either
335 valid or invalid and we are able to choose a pointer base.
337 e) Pointer is loaded from the memory.
339 In this case we just need to load bounds from the bounds table.
341 Example:
343 foo ()
345 <unnamed type> __bound_tmp.3;
346 static int * buf;
347 int * _2;
349 <bb 2>:
350 _2 = buf;
351 __bound_tmp.3_4 = __builtin___chkp_bndldx (&buf, _2);
352 return _2, __bound_tmp.3_4;
357 typedef void (*assign_handler)(tree, tree, void *);
359 static tree chkp_get_zero_bounds ();
360 static tree chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter);
361 static tree chkp_find_bounds_loaded (tree ptr, tree ptr_src,
362 gimple_stmt_iterator *iter);
363 static void chkp_parse_array_and_component_ref (tree node, tree *ptr,
364 tree *elt, bool *safe,
365 bool *bitfield,
366 tree *bounds,
367 gimple_stmt_iterator *iter,
368 bool innermost_bounds);
370 #define chkp_bndldx_fndecl \
371 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDLDX))
372 #define chkp_bndstx_fndecl \
373 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDSTX))
374 #define chkp_checkl_fndecl \
375 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCL))
376 #define chkp_checku_fndecl \
377 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCU))
378 #define chkp_bndmk_fndecl \
379 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDMK))
380 #define chkp_ret_bnd_fndecl \
381 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDRET))
382 #define chkp_intersect_fndecl \
383 (targetm.builtin_chkp_function (BUILT_IN_CHKP_INTERSECT))
384 #define chkp_narrow_bounds_fndecl \
385 (targetm.builtin_chkp_function (BUILT_IN_CHKP_NARROW))
386 #define chkp_sizeof_fndecl \
387 (targetm.builtin_chkp_function (BUILT_IN_CHKP_SIZEOF))
388 #define chkp_extract_lower_fndecl \
389 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_LOWER))
390 #define chkp_extract_upper_fndecl \
391 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_UPPER))
393 static GTY (()) tree chkp_uintptr_type;
395 static GTY (()) tree chkp_zero_bounds_var;
396 static GTY (()) tree chkp_none_bounds_var;
398 static GTY (()) basic_block entry_block;
399 static GTY (()) tree zero_bounds;
400 static GTY (()) tree none_bounds;
401 static GTY (()) tree incomplete_bounds;
402 static GTY (()) tree tmp_var;
403 static GTY (()) tree size_tmp_var;
404 static GTY (()) bitmap chkp_abnormal_copies;
406 struct hash_set<tree> *chkp_invalid_bounds;
407 struct hash_set<tree> *chkp_completed_bounds_set;
408 struct hash_map<tree, tree> *chkp_reg_bounds;
409 struct hash_map<tree, tree> *chkp_bound_vars;
410 struct hash_map<tree, tree> *chkp_reg_addr_bounds;
411 struct hash_map<tree, tree> *chkp_incomplete_bounds_map;
412 struct hash_map<tree, tree> *chkp_bounds_map;
413 struct hash_map<tree, tree> *chkp_static_var_bounds;
415 static bool in_chkp_pass;
417 #define CHKP_BOUND_TMP_NAME "__bound_tmp"
418 #define CHKP_SIZE_TMP_NAME "__size_tmp"
419 #define CHKP_BOUNDS_OF_SYMBOL_PREFIX "__chkp_bounds_of_"
420 #define CHKP_STRING_BOUNDS_PREFIX "__chkp_string_bounds_"
421 #define CHKP_VAR_BOUNDS_PREFIX "__chkp_var_bounds_"
422 #define CHKP_ZERO_BOUNDS_VAR_NAME "__chkp_zero_bounds"
423 #define CHKP_NONE_BOUNDS_VAR_NAME "__chkp_none_bounds"
425 /* Static checker constructors may become very large and their
426 compilation with optimization may take too much time.
427 Therefore we put a limit to number of statements in one
428 constructor. Tests with 100 000 statically initialized
429 pointers showed following compilation times on Sandy Bridge
430 server (used -O2):
431 limit 100 => ~18 sec.
432 limit 300 => ~22 sec.
433 limit 1000 => ~30 sec.
434 limit 3000 => ~49 sec.
435 limit 5000 => ~55 sec.
436 limit 10000 => ~76 sec.
437 limit 100000 => ~532 sec. */
438 #define MAX_STMTS_IN_STATIC_CHKP_CTOR (PARAM_VALUE (PARAM_CHKP_MAX_CTOR_SIZE))
440 struct chkp_ctor_stmt_list
442 tree stmts;
443 int avail;
446 /* Return 1 if function FNDECL is instrumented by Pointer
447 Bounds Checker. */
448 bool
449 chkp_function_instrumented_p (tree fndecl)
451 return fndecl
452 && lookup_attribute ("chkp instrumented", DECL_ATTRIBUTES (fndecl));
455 /* Mark function FNDECL as instrumented. */
456 void
457 chkp_function_mark_instrumented (tree fndecl)
459 if (chkp_function_instrumented_p (fndecl))
460 return;
462 DECL_ATTRIBUTES (fndecl)
463 = tree_cons (get_identifier ("chkp instrumented"), NULL,
464 DECL_ATTRIBUTES (fndecl));
467 /* Return true when STMT is builtin call to instrumentation function
468 corresponding to CODE. */
470 bool
471 chkp_gimple_call_builtin_p (gimple call,
472 enum built_in_function code)
474 tree fndecl;
475 if (is_gimple_call (call)
476 && (fndecl = targetm.builtin_chkp_function (code))
477 && gimple_call_fndecl (call) == fndecl)
478 return true;
479 return false;
482 /* Emit code to store zero bounds for PTR located at MEM. */
483 void
484 chkp_expand_bounds_reset_for_mem (tree mem, tree ptr)
486 tree zero_bnd, bnd, addr, bndstx;
488 if (flag_chkp_use_static_const_bounds)
489 zero_bnd = chkp_get_zero_bounds_var ();
490 else
491 zero_bnd = chkp_build_make_bounds_call (integer_zero_node,
492 integer_zero_node);
493 bnd = make_tree (pointer_bounds_type_node,
494 assign_temp (pointer_bounds_type_node, 0, 1));
495 addr = build1 (ADDR_EXPR,
496 build_pointer_type (TREE_TYPE (mem)), mem);
497 bndstx = chkp_build_bndstx_call (addr, ptr, bnd);
499 expand_assignment (bnd, zero_bnd, false);
500 expand_normal (bndstx);
503 /* Mark statement S to not be instrumented. */
504 static void
505 chkp_mark_stmt (gimple s)
507 gimple_set_plf (s, GF_PLF_1, true);
510 /* Mark statement S to be instrumented. */
511 static void
512 chkp_unmark_stmt (gimple s)
514 gimple_set_plf (s, GF_PLF_1, false);
517 /* Return 1 if statement S should not be instrumented. */
518 static bool
519 chkp_marked_stmt_p (gimple s)
521 return gimple_plf (s, GF_PLF_1);
524 /* Get var to be used for bound temps. */
525 static tree
526 chkp_get_tmp_var (void)
528 if (!tmp_var)
529 tmp_var = create_tmp_reg (pointer_bounds_type_node, CHKP_BOUND_TMP_NAME);
531 return tmp_var;
534 /* Get SSA_NAME to be used as temp. */
535 static tree
536 chkp_get_tmp_reg (gimple stmt)
538 if (in_chkp_pass)
539 return make_ssa_name (chkp_get_tmp_var (), stmt);
541 return make_temp_ssa_name (pointer_bounds_type_node, stmt,
542 CHKP_BOUND_TMP_NAME);
545 /* Get var to be used for size temps. */
546 static tree
547 chkp_get_size_tmp_var (void)
549 if (!size_tmp_var)
550 size_tmp_var = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
552 return size_tmp_var;
555 /* Register bounds BND for address of OBJ. */
556 static void
557 chkp_register_addr_bounds (tree obj, tree bnd)
559 if (bnd == incomplete_bounds)
560 return;
562 chkp_reg_addr_bounds->put (obj, bnd);
564 if (dump_file && (dump_flags & TDF_DETAILS))
566 fprintf (dump_file, "Regsitered bound ");
567 print_generic_expr (dump_file, bnd, 0);
568 fprintf (dump_file, " for address of ");
569 print_generic_expr (dump_file, obj, 0);
570 fprintf (dump_file, "\n");
574 /* Return bounds registered for address of OBJ. */
575 static tree
576 chkp_get_registered_addr_bounds (tree obj)
578 tree *slot = chkp_reg_addr_bounds->get (obj);
579 return slot ? *slot : NULL_TREE;
582 /* Mark BOUNDS as completed. */
583 static void
584 chkp_mark_completed_bounds (tree bounds)
586 chkp_completed_bounds_set->add (bounds);
588 if (dump_file && (dump_flags & TDF_DETAILS))
590 fprintf (dump_file, "Marked bounds ");
591 print_generic_expr (dump_file, bounds, 0);
592 fprintf (dump_file, " as completed\n");
596 /* Return 1 if BOUNDS were marked as completed and 0 otherwise. */
597 static bool
598 chkp_completed_bounds (tree bounds)
600 return chkp_completed_bounds_set->contains (bounds);
603 /* Clear comleted bound marks. */
604 static void
605 chkp_erase_completed_bounds (void)
607 delete chkp_completed_bounds_set;
608 chkp_completed_bounds_set = new hash_set<tree>;
611 /* Mark BOUNDS associated with PTR as incomplete. */
612 static void
613 chkp_register_incomplete_bounds (tree bounds, tree ptr)
615 chkp_incomplete_bounds_map->put (bounds, ptr);
617 if (dump_file && (dump_flags & TDF_DETAILS))
619 fprintf (dump_file, "Regsitered incomplete bounds ");
620 print_generic_expr (dump_file, bounds, 0);
621 fprintf (dump_file, " for ");
622 print_generic_expr (dump_file, ptr, 0);
623 fprintf (dump_file, "\n");
627 /* Return 1 if BOUNDS are incomplete and 0 otherwise. */
628 static bool
629 chkp_incomplete_bounds (tree bounds)
631 if (bounds == incomplete_bounds)
632 return true;
634 if (chkp_completed_bounds (bounds))
635 return false;
637 return chkp_incomplete_bounds_map->get (bounds) != NULL;
640 /* Clear incomleted bound marks. */
641 static void
642 chkp_erase_incomplete_bounds (void)
644 delete chkp_incomplete_bounds_map;
645 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
648 /* Build and return bndmk call which creates bounds for structure
649 pointed by PTR. Structure should have complete type. */
650 tree
651 chkp_make_bounds_for_struct_addr (tree ptr)
653 tree type = TREE_TYPE (ptr);
654 tree size;
656 gcc_assert (POINTER_TYPE_P (type));
658 size = TYPE_SIZE (TREE_TYPE (type));
660 gcc_assert (size);
662 return build_call_nary (pointer_bounds_type_node,
663 build_fold_addr_expr (chkp_bndmk_fndecl),
664 2, ptr, size);
667 /* Traversal function for chkp_may_finish_incomplete_bounds.
668 Set RES to 0 if at least one argument of phi statement
669 defining bounds (passed in KEY arg) is unknown.
670 Traversal stops when first unknown phi argument is found. */
671 bool
672 chkp_may_complete_phi_bounds (tree const &bounds, tree *slot ATTRIBUTE_UNUSED,
673 bool *res)
675 gimple phi;
676 unsigned i;
678 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
680 phi = SSA_NAME_DEF_STMT (bounds);
682 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
684 for (i = 0; i < gimple_phi_num_args (phi); i++)
686 tree phi_arg = gimple_phi_arg_def (phi, i);
687 if (!phi_arg)
689 *res = false;
690 /* Do not need to traverse further. */
691 return false;
695 return true;
698 /* Return 1 if all phi nodes created for bounds have their
699 arguments computed. */
700 static bool
701 chkp_may_finish_incomplete_bounds (void)
703 bool res = true;
705 chkp_incomplete_bounds_map
706 ->traverse<bool *, chkp_may_complete_phi_bounds> (&res);
708 return res;
711 /* Helper function for chkp_finish_incomplete_bounds.
712 Recompute args for bounds phi node. */
713 bool
714 chkp_recompute_phi_bounds (tree const &bounds, tree *slot,
715 void *res ATTRIBUTE_UNUSED)
717 tree ptr = *slot;
718 gphi *bounds_phi;
719 gphi *ptr_phi;
720 unsigned i;
722 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
723 gcc_assert (TREE_CODE (ptr) == SSA_NAME);
725 bounds_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (bounds));
726 ptr_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (ptr));
728 for (i = 0; i < gimple_phi_num_args (bounds_phi); i++)
730 tree ptr_arg = gimple_phi_arg_def (ptr_phi, i);
731 tree bound_arg = chkp_find_bounds (ptr_arg, NULL);
733 add_phi_arg (bounds_phi, bound_arg,
734 gimple_phi_arg_edge (ptr_phi, i),
735 UNKNOWN_LOCATION);
738 return true;
741 /* Mark BOUNDS as invalid. */
742 static void
743 chkp_mark_invalid_bounds (tree bounds)
745 chkp_invalid_bounds->add (bounds);
747 if (dump_file && (dump_flags & TDF_DETAILS))
749 fprintf (dump_file, "Marked bounds ");
750 print_generic_expr (dump_file, bounds, 0);
751 fprintf (dump_file, " as invalid\n");
755 /* Return 1 if BOUNDS were marked as invalid and 0 otherwise. */
756 static bool
757 chkp_valid_bounds (tree bounds)
759 if (bounds == zero_bounds || bounds == none_bounds)
760 return false;
762 return !chkp_invalid_bounds->contains (bounds);
765 /* Helper function for chkp_finish_incomplete_bounds.
766 Check all arguments of phi nodes trying to find
767 valid completed bounds. If there is at least one
768 such arg then bounds produced by phi node are marked
769 as valid completed bounds and all phi args are
770 recomputed. */
771 bool
772 chkp_find_valid_phi_bounds (tree const &bounds, tree *slot, bool *res)
774 gimple phi;
775 unsigned i;
777 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
779 if (chkp_completed_bounds (bounds))
780 return true;
782 phi = SSA_NAME_DEF_STMT (bounds);
784 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
786 for (i = 0; i < gimple_phi_num_args (phi); i++)
788 tree phi_arg = gimple_phi_arg_def (phi, i);
790 gcc_assert (phi_arg);
792 if (chkp_valid_bounds (phi_arg) && !chkp_incomplete_bounds (phi_arg))
794 *res = true;
795 chkp_mark_completed_bounds (bounds);
796 chkp_recompute_phi_bounds (bounds, slot, NULL);
797 return true;
801 return true;
804 /* Helper function for chkp_finish_incomplete_bounds.
805 Marks all incompleted bounds as invalid. */
806 bool
807 chkp_mark_invalid_bounds_walker (tree const &bounds,
808 tree *slot ATTRIBUTE_UNUSED,
809 void *res ATTRIBUTE_UNUSED)
811 if (!chkp_completed_bounds (bounds))
813 chkp_mark_invalid_bounds (bounds);
814 chkp_mark_completed_bounds (bounds);
816 return true;
819 /* When all bound phi nodes have all their args computed
820 we have enough info to find valid bounds. We iterate
821 through all incompleted bounds searching for valid
822 bounds. Found valid bounds are marked as completed
823 and all remaining incompleted bounds are recomputed.
824 Process continues until no new valid bounds may be
825 found. All remained incompleted bounds are marked as
826 invalid (i.e. have no valid source of bounds). */
827 static void
828 chkp_finish_incomplete_bounds (void)
830 bool found_valid;
832 while (found_valid)
834 found_valid = false;
836 chkp_incomplete_bounds_map->
837 traverse<bool *, chkp_find_valid_phi_bounds> (&found_valid);
839 if (found_valid)
840 chkp_incomplete_bounds_map->
841 traverse<void *, chkp_recompute_phi_bounds> (NULL);
844 chkp_incomplete_bounds_map->
845 traverse<void *, chkp_mark_invalid_bounds_walker> (NULL);
846 chkp_incomplete_bounds_map->
847 traverse<void *, chkp_recompute_phi_bounds> (NULL);
849 chkp_erase_completed_bounds ();
850 chkp_erase_incomplete_bounds ();
853 /* Return 1 if type TYPE is a pointer type or a
854 structure having a pointer type as one of its fields.
855 Otherwise return 0. */
856 bool
857 chkp_type_has_pointer (const_tree type)
859 bool res = false;
861 if (BOUNDED_TYPE_P (type))
862 res = true;
863 else if (RECORD_OR_UNION_TYPE_P (type))
865 tree field;
867 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
868 if (TREE_CODE (field) == FIELD_DECL)
869 res = res || chkp_type_has_pointer (TREE_TYPE (field));
871 else if (TREE_CODE (type) == ARRAY_TYPE)
872 res = chkp_type_has_pointer (TREE_TYPE (type));
874 return res;
877 unsigned
878 chkp_type_bounds_count (const_tree type)
880 unsigned res = 0;
882 if (!type)
883 res = 0;
884 else if (BOUNDED_TYPE_P (type))
885 res = 1;
886 else if (RECORD_OR_UNION_TYPE_P (type))
888 bitmap have_bound;
890 bitmap_obstack_initialize (NULL);
891 have_bound = BITMAP_ALLOC (NULL);
892 chkp_find_bound_slots (type, have_bound);
893 res = bitmap_count_bits (have_bound);
894 BITMAP_FREE (have_bound);
895 bitmap_obstack_release (NULL);
898 return res;
901 /* Get bounds associated with NODE via
902 chkp_set_bounds call. */
903 tree
904 chkp_get_bounds (tree node)
906 tree *slot;
908 if (!chkp_bounds_map)
909 return NULL_TREE;
911 slot = chkp_bounds_map->get (node);
912 return slot ? *slot : NULL_TREE;
915 /* Associate bounds VAL with NODE. */
916 void
917 chkp_set_bounds (tree node, tree val)
919 if (!chkp_bounds_map)
920 chkp_bounds_map = new hash_map<tree, tree>;
922 chkp_bounds_map->put (node, val);
925 /* Check if statically initialized variable VAR require
926 static bounds initialization. If VAR is added into
927 bounds initlization list then 1 is returned. Otherwise
928 return 0. */
929 extern bool
930 chkp_register_var_initializer (tree var)
932 if (!flag_check_pointer_bounds
933 || DECL_INITIAL (var) == error_mark_node)
934 return false;
936 gcc_assert (TREE_CODE (var) == VAR_DECL);
937 gcc_assert (DECL_INITIAL (var));
939 if (TREE_STATIC (var)
940 && chkp_type_has_pointer (TREE_TYPE (var)))
942 varpool_node::get_create (var)->need_bounds_init = 1;
943 return true;
946 return false;
949 /* Helper function for chkp_finish_file.
951 Add new modification statement (RHS is assigned to LHS)
952 into list of static initializer statementes (passed in ARG).
953 If statements list becomes too big, emit checker constructor
954 and start the new one. */
955 static void
956 chkp_add_modification_to_stmt_list (tree lhs,
957 tree rhs,
958 void *arg)
960 struct chkp_ctor_stmt_list *stmts = (struct chkp_ctor_stmt_list *)arg;
961 tree modify;
963 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
964 rhs = build1 (CONVERT_EXPR, TREE_TYPE (lhs), rhs);
966 modify = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
967 append_to_statement_list (modify, &stmts->stmts);
969 stmts->avail--;
972 /* Build and return ADDR_EXPR for specified object OBJ. */
973 static tree
974 chkp_build_addr_expr (tree obj)
976 return TREE_CODE (obj) == TARGET_MEM_REF
977 ? tree_mem_ref_addr (ptr_type_node, obj)
978 : build_fold_addr_expr (obj);
981 /* Helper function for chkp_finish_file.
982 Initialize bound variable BND_VAR with bounds of variable
983 VAR to statements list STMTS. If statements list becomes
984 too big, emit checker constructor and start the new one. */
985 static void
986 chkp_output_static_bounds (tree bnd_var, tree var,
987 struct chkp_ctor_stmt_list *stmts)
989 tree lb, ub, size;
991 if (TREE_CODE (var) == STRING_CST)
993 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
994 size = build_int_cst (size_type_node, TREE_STRING_LENGTH (var) - 1);
996 else if (DECL_SIZE (var)
997 && !chkp_variable_size_type (TREE_TYPE (var)))
999 /* Compute bounds using statically known size. */
1000 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1001 size = size_binop (MINUS_EXPR, DECL_SIZE_UNIT (var), size_one_node);
1003 else
1005 /* Compute bounds using dynamic size. */
1006 tree call;
1008 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1009 call = build1 (ADDR_EXPR,
1010 build_pointer_type (TREE_TYPE (chkp_sizeof_fndecl)),
1011 chkp_sizeof_fndecl);
1012 size = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_sizeof_fndecl)),
1013 call, 1, var);
1015 if (flag_chkp_zero_dynamic_size_as_infinite)
1017 tree max_size, cond;
1019 max_size = build2 (MINUS_EXPR, size_type_node, size_zero_node, lb);
1020 cond = build2 (NE_EXPR, boolean_type_node, size, size_zero_node);
1021 size = build3 (COND_EXPR, size_type_node, cond, size, max_size);
1024 size = size_binop (MINUS_EXPR, size, size_one_node);
1027 ub = size_binop (PLUS_EXPR, lb, size);
1028 stmts->avail -= targetm.chkp_initialize_bounds (bnd_var, lb, ub,
1029 &stmts->stmts);
1030 if (stmts->avail <= 0)
1032 cgraph_build_static_cdtor ('B', stmts->stmts,
1033 MAX_RESERVED_INIT_PRIORITY + 2);
1034 stmts->avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
1035 stmts->stmts = NULL;
1039 /* Return entry block to be used for checker initilization code.
1040 Create new block if required. */
1041 static basic_block
1042 chkp_get_entry_block (void)
1044 if (!entry_block)
1045 entry_block = split_block (ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL)->dest;
1047 return entry_block;
1050 /* Return a bounds var to be used for pointer var PTR_VAR. */
1051 static tree
1052 chkp_get_bounds_var (tree ptr_var)
1054 tree bnd_var;
1055 tree *slot;
1057 slot = chkp_bound_vars->get (ptr_var);
1058 if (slot)
1059 bnd_var = *slot;
1060 else
1062 bnd_var = create_tmp_reg (pointer_bounds_type_node,
1063 CHKP_BOUND_TMP_NAME);
1064 chkp_bound_vars->put (ptr_var, bnd_var);
1067 return bnd_var;
1072 /* Register bounds BND for object PTR in global bounds table.
1073 A copy of bounds may be created for abnormal ssa names.
1074 Returns bounds to use for PTR. */
1075 static tree
1076 chkp_maybe_copy_and_register_bounds (tree ptr, tree bnd)
1078 bool abnormal_ptr;
1080 if (!chkp_reg_bounds)
1081 return bnd;
1083 /* Do nothing if bounds are incomplete_bounds
1084 because it means bounds will be recomputed. */
1085 if (bnd == incomplete_bounds)
1086 return bnd;
1088 abnormal_ptr = (TREE_CODE (ptr) == SSA_NAME
1089 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1090 && gimple_code (SSA_NAME_DEF_STMT (ptr)) != GIMPLE_PHI);
1092 /* A single bounds value may be reused multiple times for
1093 different pointer values. It may cause coalescing issues
1094 for abnormal SSA names. To avoid it we create a bounds
1095 copy in case it is computed for abnormal SSA name.
1097 We also cannot reuse such created copies for other pointers */
1098 if (abnormal_ptr
1099 || bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
1101 tree bnd_var = NULL_TREE;
1103 if (abnormal_ptr)
1105 if (SSA_NAME_VAR (ptr))
1106 bnd_var = chkp_get_bounds_var (SSA_NAME_VAR (ptr));
1108 else
1109 bnd_var = chkp_get_tmp_var ();
1111 /* For abnormal copies we may just find original
1112 bounds and use them. */
1113 if (!abnormal_ptr && !SSA_NAME_IS_DEFAULT_DEF (bnd))
1115 gimple bnd_def = SSA_NAME_DEF_STMT (bnd);
1116 gcc_checking_assert (gimple_code (bnd_def) == GIMPLE_ASSIGN);
1117 bnd = gimple_assign_rhs1 (bnd_def);
1119 /* For undefined values we usually use none bounds
1120 value but in case of abnormal edge it may cause
1121 coalescing failures. Use default definition of
1122 bounds variable instead to avoid it. */
1123 else if (SSA_NAME_IS_DEFAULT_DEF (ptr)
1124 && TREE_CODE (SSA_NAME_VAR (ptr)) != PARM_DECL)
1126 bnd = get_or_create_ssa_default_def (cfun, bnd_var);
1128 if (dump_file && (dump_flags & TDF_DETAILS))
1130 fprintf (dump_file, "Using default def bounds ");
1131 print_generic_expr (dump_file, bnd, 0);
1132 fprintf (dump_file, " for abnormal default def SSA name ");
1133 print_generic_expr (dump_file, ptr, 0);
1134 fprintf (dump_file, "\n");
1137 else
1139 tree copy;
1140 gimple def = SSA_NAME_DEF_STMT (ptr);
1141 gimple assign;
1142 gimple_stmt_iterator gsi;
1144 if (bnd_var)
1145 copy = make_ssa_name (bnd_var, gimple_build_nop ());
1146 else
1147 copy = make_temp_ssa_name (pointer_bounds_type_node,
1148 gimple_build_nop (),
1149 CHKP_BOUND_TMP_NAME);
1150 assign = gimple_build_assign (copy, bnd);
1152 if (dump_file && (dump_flags & TDF_DETAILS))
1154 fprintf (dump_file, "Creating a copy of bounds ");
1155 print_generic_expr (dump_file, bnd, 0);
1156 fprintf (dump_file, " for abnormal SSA name ");
1157 print_generic_expr (dump_file, ptr, 0);
1158 fprintf (dump_file, "\n");
1161 if (gimple_code (def) == GIMPLE_NOP)
1163 gsi = gsi_last_bb (chkp_get_entry_block ());
1164 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
1165 gsi_insert_before (&gsi, assign, GSI_CONTINUE_LINKING);
1166 else
1167 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1169 else
1171 gimple bnd_def = SSA_NAME_DEF_STMT (bnd);
1172 /* Sometimes (e.g. when we load a pointer from a
1173 memory) bounds are produced later than a pointer.
1174 We need to insert bounds copy appropriately. */
1175 if (gimple_code (bnd_def) != GIMPLE_NOP
1176 && stmt_dominates_stmt_p (def, bnd_def))
1177 gsi = gsi_for_stmt (bnd_def);
1178 else
1179 gsi = gsi_for_stmt (def);
1180 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1183 bnd = copy;
1186 if (abnormal_ptr)
1187 bitmap_set_bit (chkp_abnormal_copies, SSA_NAME_VERSION (bnd));
1190 chkp_reg_bounds->put (ptr, bnd);
1192 if (dump_file && (dump_flags & TDF_DETAILS))
1194 fprintf (dump_file, "Regsitered bound ");
1195 print_generic_expr (dump_file, bnd, 0);
1196 fprintf (dump_file, " for pointer ");
1197 print_generic_expr (dump_file, ptr, 0);
1198 fprintf (dump_file, "\n");
1201 return bnd;
1204 /* Get bounds registered for object PTR in global bounds table. */
1205 static tree
1206 chkp_get_registered_bounds (tree ptr)
1208 tree *slot;
1210 if (!chkp_reg_bounds)
1211 return NULL_TREE;
1213 slot = chkp_reg_bounds->get (ptr);
1214 return slot ? *slot : NULL_TREE;
1217 /* Add bound retvals to return statement pointed by GSI. */
1219 static void
1220 chkp_add_bounds_to_ret_stmt (gimple_stmt_iterator *gsi)
1222 greturn *ret = as_a <greturn *> (gsi_stmt (*gsi));
1223 tree retval = gimple_return_retval (ret);
1224 tree ret_decl = DECL_RESULT (cfun->decl);
1225 tree bounds;
1227 if (!retval)
1228 return;
1230 if (BOUNDED_P (ret_decl))
1232 bounds = chkp_find_bounds (retval, gsi);
1233 bounds = chkp_maybe_copy_and_register_bounds (ret_decl, bounds);
1234 gimple_return_set_retbnd (ret, bounds);
1237 update_stmt (ret);
1240 /* Force OP to be suitable for using as an argument for call.
1241 New statements (if any) go to SEQ. */
1242 static tree
1243 chkp_force_gimple_call_op (tree op, gimple_seq *seq)
1245 gimple_seq stmts;
1246 gimple_stmt_iterator si;
1248 op = force_gimple_operand (unshare_expr (op), &stmts, true, NULL_TREE);
1250 for (si = gsi_start (stmts); !gsi_end_p (si); gsi_next (&si))
1251 chkp_mark_stmt (gsi_stmt (si));
1253 gimple_seq_add_seq (seq, stmts);
1255 return op;
1258 /* Generate lower bound check for memory access by ADDR.
1259 Check is inserted before the position pointed by ITER.
1260 DIRFLAG indicates whether memory access is load or store. */
1261 static void
1262 chkp_check_lower (tree addr, tree bounds,
1263 gimple_stmt_iterator iter,
1264 location_t location,
1265 tree dirflag)
1267 gimple_seq seq;
1268 gimple check;
1269 tree node;
1271 if (bounds == chkp_get_zero_bounds ())
1272 return;
1274 if (dirflag == integer_zero_node
1275 && !flag_chkp_check_read)
1276 return;
1278 if (dirflag == integer_one_node
1279 && !flag_chkp_check_write)
1280 return;
1282 seq = NULL;
1284 node = chkp_force_gimple_call_op (addr, &seq);
1286 check = gimple_build_call (chkp_checkl_fndecl, 2, node, bounds);
1287 chkp_mark_stmt (check);
1288 gimple_call_set_with_bounds (check, true);
1289 gimple_set_location (check, location);
1290 gimple_seq_add_stmt (&seq, check);
1292 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1294 if (dump_file && (dump_flags & TDF_DETAILS))
1296 gimple before = gsi_stmt (iter);
1297 fprintf (dump_file, "Generated lower bound check for statement ");
1298 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1299 fprintf (dump_file, " ");
1300 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1304 /* Generate upper bound check for memory access by ADDR.
1305 Check is inserted before the position pointed by ITER.
1306 DIRFLAG indicates whether memory access is load or store. */
1307 static void
1308 chkp_check_upper (tree addr, tree bounds,
1309 gimple_stmt_iterator iter,
1310 location_t location,
1311 tree dirflag)
1313 gimple_seq seq;
1314 gimple check;
1315 tree node;
1317 if (bounds == chkp_get_zero_bounds ())
1318 return;
1320 if (dirflag == integer_zero_node
1321 && !flag_chkp_check_read)
1322 return;
1324 if (dirflag == integer_one_node
1325 && !flag_chkp_check_write)
1326 return;
1328 seq = NULL;
1330 node = chkp_force_gimple_call_op (addr, &seq);
1332 check = gimple_build_call (chkp_checku_fndecl, 2, node, bounds);
1333 chkp_mark_stmt (check);
1334 gimple_call_set_with_bounds (check, true);
1335 gimple_set_location (check, location);
1336 gimple_seq_add_stmt (&seq, check);
1338 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1340 if (dump_file && (dump_flags & TDF_DETAILS))
1342 gimple before = gsi_stmt (iter);
1343 fprintf (dump_file, "Generated upper bound check for statement ");
1344 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1345 fprintf (dump_file, " ");
1346 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1350 /* Generate lower and upper bound checks for memory access
1351 to memory slot [FIRST, LAST] againsr BOUNDS. Checks
1352 are inserted before the position pointed by ITER.
1353 DIRFLAG indicates whether memory access is load or store. */
1354 void
1355 chkp_check_mem_access (tree first, tree last, tree bounds,
1356 gimple_stmt_iterator iter,
1357 location_t location,
1358 tree dirflag)
1360 chkp_check_lower (first, bounds, iter, location, dirflag);
1361 chkp_check_upper (last, bounds, iter, location, dirflag);
1364 /* Replace call to _bnd_chk_* pointed by GSI with
1365 bndcu and bndcl calls. DIRFLAG determines whether
1366 check is for read or write. */
1368 void
1369 chkp_replace_address_check_builtin (gimple_stmt_iterator *gsi,
1370 tree dirflag)
1372 gimple_stmt_iterator call_iter = *gsi;
1373 gimple call = gsi_stmt (*gsi);
1374 tree fndecl = gimple_call_fndecl (call);
1375 tree addr = gimple_call_arg (call, 0);
1376 tree bounds = chkp_find_bounds (addr, gsi);
1378 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1379 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1380 chkp_check_lower (addr, bounds, *gsi, gimple_location (call), dirflag);
1382 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS)
1383 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1385 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1387 tree size = gimple_call_arg (call, 1);
1388 addr = fold_build_pointer_plus (addr, size);
1389 addr = fold_build_pointer_plus_hwi (addr, -1);
1390 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1393 gsi_remove (&call_iter, true);
1396 /* Replace call to _bnd_get_ptr_* pointed by GSI with
1397 corresponding bounds extract call. */
1399 void
1400 chkp_replace_extract_builtin (gimple_stmt_iterator *gsi)
1402 gimple call = gsi_stmt (*gsi);
1403 tree fndecl = gimple_call_fndecl (call);
1404 tree addr = gimple_call_arg (call, 0);
1405 tree bounds = chkp_find_bounds (addr, gsi);
1406 gimple extract;
1408 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND)
1409 fndecl = chkp_extract_lower_fndecl;
1410 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND)
1411 fndecl = chkp_extract_upper_fndecl;
1412 else
1413 gcc_unreachable ();
1415 extract = gimple_build_call (fndecl, 1, bounds);
1416 gimple_call_set_lhs (extract, gimple_call_lhs (call));
1417 chkp_mark_stmt (extract);
1419 gsi_replace (gsi, extract, false);
1422 /* Return COMPONENT_REF accessing FIELD in OBJ. */
1423 static tree
1424 chkp_build_component_ref (tree obj, tree field)
1426 tree res;
1428 /* If object is TMR then we do not use component_ref but
1429 add offset instead. We need it to be able to get addr
1430 of the reasult later. */
1431 if (TREE_CODE (obj) == TARGET_MEM_REF)
1433 tree offs = TMR_OFFSET (obj);
1434 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1435 offs, DECL_FIELD_OFFSET (field));
1437 gcc_assert (offs);
1439 res = copy_node (obj);
1440 TREE_TYPE (res) = TREE_TYPE (field);
1441 TMR_OFFSET (res) = offs;
1443 else
1444 res = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL_TREE);
1446 return res;
1449 /* Return ARRAY_REF for array ARR and index IDX with
1450 specified element type ETYPE and element size ESIZE. */
1451 static tree
1452 chkp_build_array_ref (tree arr, tree etype, tree esize,
1453 unsigned HOST_WIDE_INT idx)
1455 tree index = build_int_cst (size_type_node, idx);
1456 tree res;
1458 /* If object is TMR then we do not use array_ref but
1459 add offset instead. We need it to be able to get addr
1460 of the reasult later. */
1461 if (TREE_CODE (arr) == TARGET_MEM_REF)
1463 tree offs = TMR_OFFSET (arr);
1465 esize = fold_binary_to_constant (MULT_EXPR, TREE_TYPE (esize),
1466 esize, index);
1467 gcc_assert(esize);
1469 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1470 offs, esize);
1471 gcc_assert (offs);
1473 res = copy_node (arr);
1474 TREE_TYPE (res) = etype;
1475 TMR_OFFSET (res) = offs;
1477 else
1478 res = build4 (ARRAY_REF, etype, arr, index, NULL_TREE, NULL_TREE);
1480 return res;
1483 /* Helper function for chkp_add_bounds_to_call_stmt.
1484 Fill ALL_BOUNDS output array with created bounds.
1486 OFFS is used for recursive calls and holds basic
1487 offset of TYPE in outer structure in bits.
1489 ITER points a position where bounds are searched.
1491 ALL_BOUNDS[i] is filled with elem bounds if there
1492 is a field in TYPE which has pointer type and offset
1493 equal to i * POINTER_SIZE in bits. */
1494 static void
1495 chkp_find_bounds_for_elem (tree elem, tree *all_bounds,
1496 HOST_WIDE_INT offs,
1497 gimple_stmt_iterator *iter)
1499 tree type = TREE_TYPE (elem);
1501 if (BOUNDED_TYPE_P (type))
1503 if (!all_bounds[offs / POINTER_SIZE])
1505 tree temp = make_temp_ssa_name (type, gimple_build_nop (), "");
1506 gimple assign = gimple_build_assign (temp, elem);
1507 gimple_stmt_iterator gsi;
1509 gsi_insert_before (iter, assign, GSI_SAME_STMT);
1510 gsi = gsi_for_stmt (assign);
1512 all_bounds[offs / POINTER_SIZE] = chkp_find_bounds (temp, &gsi);
1515 else if (RECORD_OR_UNION_TYPE_P (type))
1517 tree field;
1519 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1520 if (TREE_CODE (field) == FIELD_DECL)
1522 tree base = unshare_expr (elem);
1523 tree field_ref = chkp_build_component_ref (base, field);
1524 HOST_WIDE_INT field_offs
1525 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1526 if (DECL_FIELD_OFFSET (field))
1527 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1529 chkp_find_bounds_for_elem (field_ref, all_bounds,
1530 offs + field_offs, iter);
1533 else if (TREE_CODE (type) == ARRAY_TYPE)
1535 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1536 tree etype = TREE_TYPE (type);
1537 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1538 unsigned HOST_WIDE_INT cur;
1540 if (!maxval || integer_minus_onep (maxval))
1541 return;
1543 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1545 tree base = unshare_expr (elem);
1546 tree arr_elem = chkp_build_array_ref (base, etype,
1547 TYPE_SIZE (etype),
1548 cur);
1549 chkp_find_bounds_for_elem (arr_elem, all_bounds, offs + cur * esize,
1550 iter);
1555 /* Fill HAVE_BOUND output bitmap with information about
1556 bounds requred for object of type TYPE.
1558 OFFS is used for recursive calls and holds basic
1559 offset of TYPE in outer structure in bits.
1561 HAVE_BOUND[i] is set to 1 if there is a field
1562 in TYPE which has pointer type and offset
1563 equal to i * POINTER_SIZE - OFFS in bits. */
1564 void
1565 chkp_find_bound_slots_1 (const_tree type, bitmap have_bound,
1566 HOST_WIDE_INT offs)
1568 if (BOUNDED_TYPE_P (type))
1569 bitmap_set_bit (have_bound, offs / POINTER_SIZE);
1570 else if (RECORD_OR_UNION_TYPE_P (type))
1572 tree field;
1574 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1575 if (TREE_CODE (field) == FIELD_DECL)
1577 HOST_WIDE_INT field_offs
1578 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1579 if (DECL_FIELD_OFFSET (field))
1580 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1581 chkp_find_bound_slots_1 (TREE_TYPE (field), have_bound,
1582 offs + field_offs);
1585 else if (TREE_CODE (type) == ARRAY_TYPE)
1587 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1588 tree etype = TREE_TYPE (type);
1589 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1590 unsigned HOST_WIDE_INT cur;
1592 if (!maxval
1593 || TREE_CODE (maxval) != INTEGER_CST
1594 || integer_minus_onep (maxval))
1595 return;
1597 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1598 chkp_find_bound_slots_1 (etype, have_bound, offs + cur * esize);
1602 /* Fill bitmap RES with information about bounds for
1603 type TYPE. See chkp_find_bound_slots_1 for more
1604 details. */
1605 void
1606 chkp_find_bound_slots (const_tree type, bitmap res)
1608 bitmap_clear (res);
1609 chkp_find_bound_slots_1 (type, res, 0);
1612 /* Return 1 if call to FNDECL should be instrumented
1613 and 0 otherwise. */
1615 static bool
1616 chkp_instrument_normal_builtin (tree fndecl)
1618 switch (DECL_FUNCTION_CODE (fndecl))
1620 case BUILT_IN_STRLEN:
1621 case BUILT_IN_STRCPY:
1622 case BUILT_IN_STRNCPY:
1623 case BUILT_IN_STPCPY:
1624 case BUILT_IN_STPNCPY:
1625 case BUILT_IN_STRCAT:
1626 case BUILT_IN_STRNCAT:
1627 case BUILT_IN_MEMCPY:
1628 case BUILT_IN_MEMPCPY:
1629 case BUILT_IN_MEMSET:
1630 case BUILT_IN_MEMMOVE:
1631 case BUILT_IN_BZERO:
1632 case BUILT_IN_STRCMP:
1633 case BUILT_IN_STRNCMP:
1634 case BUILT_IN_BCMP:
1635 case BUILT_IN_MEMCMP:
1636 case BUILT_IN_MEMCPY_CHK:
1637 case BUILT_IN_MEMPCPY_CHK:
1638 case BUILT_IN_MEMMOVE_CHK:
1639 case BUILT_IN_MEMSET_CHK:
1640 case BUILT_IN_STRCPY_CHK:
1641 case BUILT_IN_STRNCPY_CHK:
1642 case BUILT_IN_STPCPY_CHK:
1643 case BUILT_IN_STPNCPY_CHK:
1644 case BUILT_IN_STRCAT_CHK:
1645 case BUILT_IN_STRNCAT_CHK:
1646 case BUILT_IN_MALLOC:
1647 case BUILT_IN_CALLOC:
1648 case BUILT_IN_REALLOC:
1649 return 1;
1651 default:
1652 return 0;
1656 /* Add bound arguments to call statement pointed by GSI.
1657 Also performs a replacement of user checker builtins calls
1658 with internal ones. */
1660 static void
1661 chkp_add_bounds_to_call_stmt (gimple_stmt_iterator *gsi)
1663 gcall *call = as_a <gcall *> (gsi_stmt (*gsi));
1664 unsigned arg_no = 0;
1665 tree fndecl = gimple_call_fndecl (call);
1666 tree fntype;
1667 tree first_formal_arg;
1668 tree arg;
1669 bool use_fntype = false;
1670 tree op;
1671 ssa_op_iter iter;
1672 gcall *new_call;
1674 /* Do nothing for internal functions. */
1675 if (gimple_call_internal_p (call))
1676 return;
1678 fntype = TREE_TYPE (TREE_TYPE (gimple_call_fn (call)));
1680 /* Do nothing if back-end builtin is called. */
1681 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
1682 return;
1684 /* Do nothing for some middle-end builtins. */
1685 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1686 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_OBJECT_SIZE)
1687 return;
1689 /* Do nothing for calls to not instrumentable functions. */
1690 if (fndecl && !chkp_instrumentable_p (fndecl))
1691 return;
1693 /* Ignore CHKP_INIT_PTR_BOUNDS, CHKP_NULL_PTR_BOUNDS
1694 and CHKP_COPY_PTR_BOUNDS. */
1695 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1696 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS
1697 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS
1698 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS
1699 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS))
1700 return;
1702 /* Check user builtins are replaced with checks. */
1703 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1704 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1705 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
1706 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS))
1708 chkp_replace_address_check_builtin (gsi, integer_minus_one_node);
1709 return;
1712 /* Check user builtins are replaced with bound extract. */
1713 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1714 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND
1715 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND))
1717 chkp_replace_extract_builtin (gsi);
1718 return;
1721 /* BUILT_IN_CHKP_NARROW_PTR_BOUNDS call is replaced with
1722 target narrow bounds call. */
1723 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1724 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
1726 tree arg = gimple_call_arg (call, 1);
1727 tree bounds = chkp_find_bounds (arg, gsi);
1729 gimple_call_set_fndecl (call, chkp_narrow_bounds_fndecl);
1730 gimple_call_set_arg (call, 1, bounds);
1731 update_stmt (call);
1733 return;
1736 /* BUILT_IN_CHKP_STORE_PTR_BOUNDS call is replaced with
1737 bndstx call. */
1738 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1739 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_STORE_PTR_BOUNDS)
1741 tree addr = gimple_call_arg (call, 0);
1742 tree ptr = gimple_call_arg (call, 1);
1743 tree bounds = chkp_find_bounds (ptr, gsi);
1744 gimple_stmt_iterator iter = gsi_for_stmt (call);
1746 chkp_build_bndstx (addr, ptr, bounds, gsi);
1747 gsi_remove (&iter, true);
1749 return;
1752 if (!flag_chkp_instrument_calls)
1753 return;
1755 /* We instrument only some subset of builtins. We also instrument
1756 builtin calls to be inlined. */
1757 if (fndecl
1758 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1759 && !chkp_instrument_normal_builtin (fndecl))
1761 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
1762 return;
1764 struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
1765 if (!clone
1766 || !gimple_has_body_p (clone->decl))
1767 return;
1770 /* If function decl is available then use it for
1771 formal arguments list. Otherwise use function type. */
1772 if (fndecl && DECL_ARGUMENTS (fndecl))
1773 first_formal_arg = DECL_ARGUMENTS (fndecl);
1774 else
1776 first_formal_arg = TYPE_ARG_TYPES (fntype);
1777 use_fntype = true;
1780 /* Fill vector of new call args. */
1781 vec<tree> new_args = vNULL;
1782 new_args.create (gimple_call_num_args (call));
1783 arg = first_formal_arg;
1784 for (arg_no = 0; arg_no < gimple_call_num_args (call); arg_no++)
1786 tree call_arg = gimple_call_arg (call, arg_no);
1787 tree type;
1789 /* Get arg type using formal argument description
1790 or actual argument type. */
1791 if (arg)
1792 if (use_fntype)
1793 if (TREE_VALUE (arg) != void_type_node)
1795 type = TREE_VALUE (arg);
1796 arg = TREE_CHAIN (arg);
1798 else
1799 type = TREE_TYPE (call_arg);
1800 else
1802 type = TREE_TYPE (arg);
1803 arg = TREE_CHAIN (arg);
1805 else
1806 type = TREE_TYPE (call_arg);
1808 new_args.safe_push (call_arg);
1810 if (BOUNDED_TYPE_P (type)
1811 || pass_by_reference (NULL, TYPE_MODE (type), type, true))
1812 new_args.safe_push (chkp_find_bounds (call_arg, gsi));
1813 else if (chkp_type_has_pointer (type))
1815 HOST_WIDE_INT max_bounds
1816 = TREE_INT_CST_LOW (TYPE_SIZE (type)) / POINTER_SIZE;
1817 tree *all_bounds = (tree *)xmalloc (sizeof (tree) * max_bounds);
1818 HOST_WIDE_INT bnd_no;
1820 memset (all_bounds, 0, sizeof (tree) * max_bounds);
1822 chkp_find_bounds_for_elem (call_arg, all_bounds, 0, gsi);
1824 for (bnd_no = 0; bnd_no < max_bounds; bnd_no++)
1825 if (all_bounds[bnd_no])
1826 new_args.safe_push (all_bounds[bnd_no]);
1828 free (all_bounds);
1832 if (new_args.length () == gimple_call_num_args (call))
1833 new_call = call;
1834 else
1836 new_call = gimple_build_call_vec (gimple_op (call, 1), new_args);
1837 gimple_call_set_lhs (new_call, gimple_call_lhs (call));
1838 gimple_call_copy_flags (new_call, call);
1840 new_args.release ();
1842 /* For direct calls fndecl is replaced with instrumented version. */
1843 if (fndecl)
1845 tree new_decl = chkp_maybe_create_clone (fndecl)->decl;
1846 gimple_call_set_fndecl (new_call, new_decl);
1847 gimple_call_set_fntype (new_call, TREE_TYPE (new_decl));
1849 /* For indirect call we should fix function pointer type if
1850 pass some bounds. */
1851 else if (new_call != call)
1853 tree type = gimple_call_fntype (call);
1854 type = chkp_copy_function_type_adding_bounds (type);
1855 gimple_call_set_fntype (new_call, type);
1858 /* replace old call statement with the new one. */
1859 if (call != new_call)
1861 FOR_EACH_SSA_TREE_OPERAND (op, call, iter, SSA_OP_ALL_DEFS)
1863 SSA_NAME_DEF_STMT (op) = new_call;
1865 gsi_replace (gsi, new_call, true);
1867 else
1868 update_stmt (new_call);
1870 gimple_call_set_with_bounds (new_call, true);
1873 /* Return constant static bounds var with specified LB and UB
1874 if such var exists in varpool. Return NULL otherwise. */
1875 static tree
1876 chkp_find_const_bounds_var (HOST_WIDE_INT lb,
1877 HOST_WIDE_INT ub)
1879 tree val = targetm.chkp_make_bounds_constant (lb, ub);
1880 struct varpool_node *node;
1882 /* We expect bounds constant is represented as a complex value
1883 of two pointer sized integers. */
1884 gcc_assert (TREE_CODE (val) == COMPLEX_CST);
1886 FOR_EACH_VARIABLE (node)
1887 if (POINTER_BOUNDS_P (node->decl)
1888 && TREE_READONLY (node->decl)
1889 && DECL_INITIAL (node->decl)
1890 && TREE_CODE (DECL_INITIAL (node->decl)) == COMPLEX_CST
1891 && tree_int_cst_equal (TREE_REALPART (DECL_INITIAL (node->decl)),
1892 TREE_REALPART (val))
1893 && tree_int_cst_equal (TREE_IMAGPART (DECL_INITIAL (node->decl)),
1894 TREE_IMAGPART (val)))
1895 return node->decl;
1897 return NULL;
1900 /* Return constant static bounds var with specified bounds LB and UB.
1901 If such var does not exists then new var is created with specified NAME. */
1902 static tree
1903 chkp_make_static_const_bounds (HOST_WIDE_INT lb,
1904 HOST_WIDE_INT ub,
1905 const char *name)
1907 tree var;
1909 /* With LTO we may have constant bounds already in varpool.
1910 Try to find it. */
1911 var = chkp_find_const_bounds_var (lb, ub);
1913 if (var)
1914 return var;
1916 var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
1917 get_identifier (name), pointer_bounds_type_node);
1919 TREE_PUBLIC (var) = 1;
1920 TREE_USED (var) = 1;
1921 TREE_READONLY (var) = 1;
1922 TREE_STATIC (var) = 1;
1923 TREE_ADDRESSABLE (var) = 0;
1924 DECL_ARTIFICIAL (var) = 1;
1925 DECL_READ_P (var) = 1;
1926 /* We may use this symbol during ctors generation in chkp_finish_file
1927 when all symbols are emitted. Force output to avoid undefined
1928 symbols in ctors. */
1929 if (!in_lto_p)
1931 DECL_INITIAL (var) = targetm.chkp_make_bounds_constant (lb, ub);
1932 DECL_COMDAT (var) = 1;
1933 varpool_node::get_create (var)->set_comdat_group (DECL_ASSEMBLER_NAME (var));
1934 varpool_node::get_create (var)->force_output = 1;
1936 else
1937 DECL_EXTERNAL (var) = 1;
1938 varpool_node::finalize_decl (var);
1940 return var;
1943 /* Generate code to make bounds with specified lower bound LB and SIZE.
1944 if AFTER is 1 then code is inserted after position pointed by ITER
1945 otherwise code is inserted before position pointed by ITER.
1946 If ITER is NULL then code is added to entry block. */
1947 static tree
1948 chkp_make_bounds (tree lb, tree size, gimple_stmt_iterator *iter, bool after)
1950 gimple_seq seq;
1951 gimple_stmt_iterator gsi;
1952 gimple stmt;
1953 tree bounds;
1955 if (iter)
1956 gsi = *iter;
1957 else
1958 gsi = gsi_start_bb (chkp_get_entry_block ());
1960 seq = NULL;
1962 lb = chkp_force_gimple_call_op (lb, &seq);
1963 size = chkp_force_gimple_call_op (size, &seq);
1965 stmt = gimple_build_call (chkp_bndmk_fndecl, 2, lb, size);
1966 chkp_mark_stmt (stmt);
1968 bounds = chkp_get_tmp_reg (stmt);
1969 gimple_call_set_lhs (stmt, bounds);
1971 gimple_seq_add_stmt (&seq, stmt);
1973 if (iter && after)
1974 gsi_insert_seq_after (&gsi, seq, GSI_SAME_STMT);
1975 else
1976 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
1978 if (dump_file && (dump_flags & TDF_DETAILS))
1980 fprintf (dump_file, "Made bounds: ");
1981 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
1982 if (iter)
1984 fprintf (dump_file, " inserted before statement: ");
1985 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0, TDF_VOPS|TDF_MEMSYMS);
1987 else
1988 fprintf (dump_file, " at function entry\n");
1991 /* update_stmt (stmt); */
1993 return bounds;
1996 /* Return var holding zero bounds. */
1997 tree
1998 chkp_get_zero_bounds_var (void)
2000 if (!chkp_zero_bounds_var)
2002 tree id = get_identifier (CHKP_ZERO_BOUNDS_VAR_NAME);
2003 symtab_node *node = symtab_node::get_for_asmname (id);
2004 if (node)
2005 chkp_zero_bounds_var = node->decl;
2008 if (!chkp_zero_bounds_var)
2009 chkp_zero_bounds_var
2010 = chkp_make_static_const_bounds (0, -1,
2011 CHKP_ZERO_BOUNDS_VAR_NAME);
2012 return chkp_zero_bounds_var;
2015 /* Return var holding none bounds. */
2016 tree
2017 chkp_get_none_bounds_var (void)
2019 if (!chkp_none_bounds_var)
2021 tree id = get_identifier (CHKP_NONE_BOUNDS_VAR_NAME);
2022 symtab_node *node = symtab_node::get_for_asmname (id);
2023 if (node)
2024 chkp_none_bounds_var = node->decl;
2027 if (!chkp_none_bounds_var)
2028 chkp_none_bounds_var
2029 = chkp_make_static_const_bounds (-1, 0,
2030 CHKP_NONE_BOUNDS_VAR_NAME);
2031 return chkp_none_bounds_var;
2034 /* Return SSA_NAME used to represent zero bounds. */
2035 static tree
2036 chkp_get_zero_bounds (void)
2038 if (zero_bounds)
2039 return zero_bounds;
2041 if (dump_file && (dump_flags & TDF_DETAILS))
2042 fprintf (dump_file, "Creating zero bounds...");
2044 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2045 || flag_chkp_use_static_const_bounds > 0)
2047 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2048 gimple stmt;
2050 zero_bounds = chkp_get_tmp_reg (gimple_build_nop ());
2051 stmt = gimple_build_assign (zero_bounds, chkp_get_zero_bounds_var ());
2052 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2054 else
2055 zero_bounds = chkp_make_bounds (integer_zero_node,
2056 integer_zero_node,
2057 NULL,
2058 false);
2060 return zero_bounds;
2063 /* Return SSA_NAME used to represent none bounds. */
2064 static tree
2065 chkp_get_none_bounds (void)
2067 if (none_bounds)
2068 return none_bounds;
2070 if (dump_file && (dump_flags & TDF_DETAILS))
2071 fprintf (dump_file, "Creating none bounds...");
2074 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2075 || flag_chkp_use_static_const_bounds > 0)
2077 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2078 gimple stmt;
2080 none_bounds = chkp_get_tmp_reg (gimple_build_nop ());
2081 stmt = gimple_build_assign (none_bounds, chkp_get_none_bounds_var ());
2082 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2084 else
2085 none_bounds = chkp_make_bounds (integer_minus_one_node,
2086 build_int_cst (size_type_node, 2),
2087 NULL,
2088 false);
2090 return none_bounds;
2093 /* Return bounds to be used as a result of operation which
2094 should not create poiunter (e.g. MULT_EXPR). */
2095 static tree
2096 chkp_get_invalid_op_bounds (void)
2098 return chkp_get_zero_bounds ();
2101 /* Return bounds to be used for loads of non-pointer values. */
2102 static tree
2103 chkp_get_nonpointer_load_bounds (void)
2105 return chkp_get_zero_bounds ();
2108 /* Return 1 if may use bndret call to get bounds for pointer
2109 returned by CALL. */
2110 static bool
2111 chkp_call_returns_bounds_p (gcall *call)
2113 if (gimple_call_internal_p (call))
2114 return false;
2116 if (gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
2117 || chkp_gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW))
2118 return true;
2120 if (gimple_call_with_bounds_p (call))
2121 return true;
2123 tree fndecl = gimple_call_fndecl (call);
2125 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
2126 return false;
2128 if (fndecl && !chkp_instrumentable_p (fndecl))
2129 return false;
2131 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2133 if (chkp_instrument_normal_builtin (fndecl))
2134 return true;
2136 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
2137 return false;
2139 struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
2140 return (clone && gimple_has_body_p (clone->decl));
2143 return true;
2146 /* Build bounds returned by CALL. */
2147 static tree
2148 chkp_build_returned_bound (gcall *call)
2150 gimple_stmt_iterator gsi;
2151 tree bounds;
2152 gimple stmt;
2153 tree fndecl = gimple_call_fndecl (call);
2155 /* To avoid fixing alloca expands in targets we handle
2156 it separately. */
2157 if (fndecl
2158 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2159 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
2160 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2162 tree size = gimple_call_arg (call, 0);
2163 tree lb = gimple_call_lhs (call);
2164 gimple_stmt_iterator iter = gsi_for_stmt (call);
2165 bounds = chkp_make_bounds (lb, size, &iter, true);
2167 /* We know bounds returned by set_bounds builtin call. */
2168 else if (fndecl
2169 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2170 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS)
2172 tree lb = gimple_call_arg (call, 0);
2173 tree size = gimple_call_arg (call, 1);
2174 gimple_stmt_iterator iter = gsi_for_stmt (call);
2175 bounds = chkp_make_bounds (lb, size, &iter, true);
2177 /* Detect bounds initialization calls. */
2178 else if (fndecl
2179 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2180 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS)
2181 bounds = chkp_get_zero_bounds ();
2182 /* Detect bounds nullification calls. */
2183 else if (fndecl
2184 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2185 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS)
2186 bounds = chkp_get_none_bounds ();
2187 /* Detect bounds copy calls. */
2188 else if (fndecl
2189 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2190 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
2192 gimple_stmt_iterator iter = gsi_for_stmt (call);
2193 bounds = chkp_find_bounds (gimple_call_arg (call, 1), &iter);
2195 /* Do not use retbnd when returned bounds are equal to some
2196 of passed bounds. */
2197 else if (gimple_call_return_flags (call) & ERF_RETURNS_ARG)
2199 gimple_stmt_iterator iter = gsi_for_stmt (call);
2200 unsigned int retarg = 0, argno;
2201 if (gimple_call_return_flags (call) & ERF_RETURNS_ARG)
2202 retarg = gimple_call_return_flags (call) & ERF_RETURN_ARG_MASK;
2203 if (gimple_call_with_bounds_p (call))
2205 for (argno = 0; argno < gimple_call_num_args (call); argno++)
2206 if (!POINTER_BOUNDS_P (gimple_call_arg (call, argno)))
2208 if (retarg)
2209 retarg--;
2210 else
2211 break;
2214 else
2215 argno = retarg;
2217 bounds = chkp_find_bounds (gimple_call_arg (call, argno), &iter);
2219 else if (chkp_call_returns_bounds_p (call))
2221 gcc_assert (TREE_CODE (gimple_call_lhs (call)) == SSA_NAME);
2223 /* In general case build checker builtin call to
2224 obtain returned bounds. */
2225 stmt = gimple_build_call (chkp_ret_bnd_fndecl, 1,
2226 gimple_call_lhs (call));
2227 chkp_mark_stmt (stmt);
2229 gsi = gsi_for_stmt (call);
2230 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
2232 bounds = chkp_get_tmp_reg (stmt);
2233 gimple_call_set_lhs (stmt, bounds);
2235 update_stmt (stmt);
2237 else
2238 bounds = chkp_get_zero_bounds ();
2240 if (dump_file && (dump_flags & TDF_DETAILS))
2242 fprintf (dump_file, "Built returned bounds (");
2243 print_generic_expr (dump_file, bounds, 0);
2244 fprintf (dump_file, ") for call: ");
2245 print_gimple_stmt (dump_file, call, 0, TDF_VOPS|TDF_MEMSYMS);
2248 bounds = chkp_maybe_copy_and_register_bounds (gimple_call_lhs (call), bounds);
2250 return bounds;
2253 /* Return bounds used as returned by call
2254 which produced SSA name VAL. */
2255 gcall *
2256 chkp_retbnd_call_by_val (tree val)
2258 if (TREE_CODE (val) != SSA_NAME)
2259 return NULL;
2261 gcc_assert (gimple_code (SSA_NAME_DEF_STMT (val)) == GIMPLE_CALL);
2263 imm_use_iterator use_iter;
2264 use_operand_p use_p;
2265 FOR_EACH_IMM_USE_FAST (use_p, use_iter, val)
2266 if (gimple_code (USE_STMT (use_p)) == GIMPLE_CALL
2267 && gimple_call_fndecl (USE_STMT (use_p)) == chkp_ret_bnd_fndecl)
2268 return as_a <gcall *> (USE_STMT (use_p));
2270 return NULL;
2273 /* Check the next parameter for the given PARM is bounds
2274 and return it's default SSA_NAME (create if required). */
2275 static tree
2276 chkp_get_next_bounds_parm (tree parm)
2278 tree bounds = TREE_CHAIN (parm);
2279 gcc_assert (POINTER_BOUNDS_P (bounds));
2280 bounds = ssa_default_def (cfun, bounds);
2281 if (!bounds)
2283 bounds = make_ssa_name (TREE_CHAIN (parm), gimple_build_nop ());
2284 set_ssa_default_def (cfun, TREE_CHAIN (parm), bounds);
2286 return bounds;
2289 /* Return bounds to be used for input argument PARM. */
2290 static tree
2291 chkp_get_bound_for_parm (tree parm)
2293 tree decl = SSA_NAME_VAR (parm);
2294 tree bounds;
2296 gcc_assert (TREE_CODE (decl) == PARM_DECL);
2298 bounds = chkp_get_registered_bounds (parm);
2300 if (!bounds)
2301 bounds = chkp_get_registered_bounds (decl);
2303 if (!bounds)
2305 tree orig_decl = cgraph_node::get (cfun->decl)->orig_decl;
2307 /* For static chain param we return zero bounds
2308 because currently we do not check dereferences
2309 of this pointer. */
2310 if (cfun->static_chain_decl == decl)
2311 bounds = chkp_get_zero_bounds ();
2312 /* If non instrumented runtime is used then it may be useful
2313 to use zero bounds for input arguments of main
2314 function. */
2315 else if (flag_chkp_zero_input_bounds_for_main
2316 && strcmp (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (orig_decl)),
2317 "main") == 0)
2318 bounds = chkp_get_zero_bounds ();
2319 else if (BOUNDED_P (parm))
2321 bounds = chkp_get_next_bounds_parm (decl);
2322 bounds = chkp_maybe_copy_and_register_bounds (decl, bounds);
2324 if (dump_file && (dump_flags & TDF_DETAILS))
2326 fprintf (dump_file, "Built arg bounds (");
2327 print_generic_expr (dump_file, bounds, 0);
2328 fprintf (dump_file, ") for arg: ");
2329 print_node (dump_file, "", decl, 0);
2332 else
2333 bounds = chkp_get_zero_bounds ();
2336 if (!chkp_get_registered_bounds (parm))
2337 bounds = chkp_maybe_copy_and_register_bounds (parm, bounds);
2339 if (dump_file && (dump_flags & TDF_DETAILS))
2341 fprintf (dump_file, "Using bounds ");
2342 print_generic_expr (dump_file, bounds, 0);
2343 fprintf (dump_file, " for parm ");
2344 print_generic_expr (dump_file, parm, 0);
2345 fprintf (dump_file, " of type ");
2346 print_generic_expr (dump_file, TREE_TYPE (parm), 0);
2347 fprintf (dump_file, ".\n");
2350 return bounds;
2353 /* Build and return CALL_EXPR for bndstx builtin with specified
2354 arguments. */
2355 tree
2356 chkp_build_bndldx_call (tree addr, tree ptr)
2358 tree fn = build1 (ADDR_EXPR,
2359 build_pointer_type (TREE_TYPE (chkp_bndldx_fndecl)),
2360 chkp_bndldx_fndecl);
2361 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndldx_fndecl)),
2362 fn, 2, addr, ptr);
2363 CALL_WITH_BOUNDS_P (call) = true;
2364 return call;
2367 /* Insert code to load bounds for PTR located by ADDR.
2368 Code is inserted after position pointed by GSI.
2369 Loaded bounds are returned. */
2370 static tree
2371 chkp_build_bndldx (tree addr, tree ptr, gimple_stmt_iterator *gsi)
2373 gimple_seq seq;
2374 gimple stmt;
2375 tree bounds;
2377 seq = NULL;
2379 addr = chkp_force_gimple_call_op (addr, &seq);
2380 ptr = chkp_force_gimple_call_op (ptr, &seq);
2382 stmt = gimple_build_call (chkp_bndldx_fndecl, 2, addr, ptr);
2383 chkp_mark_stmt (stmt);
2384 bounds = chkp_get_tmp_reg (stmt);
2385 gimple_call_set_lhs (stmt, bounds);
2387 gimple_seq_add_stmt (&seq, stmt);
2389 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2391 if (dump_file && (dump_flags & TDF_DETAILS))
2393 fprintf (dump_file, "Generated bndldx for pointer ");
2394 print_generic_expr (dump_file, ptr, 0);
2395 fprintf (dump_file, ": ");
2396 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2399 return bounds;
2402 /* Build and return CALL_EXPR for bndstx builtin with specified
2403 arguments. */
2404 tree
2405 chkp_build_bndstx_call (tree addr, tree ptr, tree bounds)
2407 tree fn = build1 (ADDR_EXPR,
2408 build_pointer_type (TREE_TYPE (chkp_bndstx_fndecl)),
2409 chkp_bndstx_fndecl);
2410 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndstx_fndecl)),
2411 fn, 3, ptr, bounds, addr);
2412 CALL_WITH_BOUNDS_P (call) = true;
2413 return call;
2416 /* Insert code to store BOUNDS for PTR stored by ADDR.
2417 New statements are inserted after position pointed
2418 by GSI. */
2419 void
2420 chkp_build_bndstx (tree addr, tree ptr, tree bounds,
2421 gimple_stmt_iterator *gsi)
2423 gimple_seq seq;
2424 gimple stmt;
2426 seq = NULL;
2428 addr = chkp_force_gimple_call_op (addr, &seq);
2429 ptr = chkp_force_gimple_call_op (ptr, &seq);
2431 stmt = gimple_build_call (chkp_bndstx_fndecl, 3, ptr, bounds, addr);
2432 chkp_mark_stmt (stmt);
2433 gimple_call_set_with_bounds (stmt, true);
2435 gimple_seq_add_stmt (&seq, stmt);
2437 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2439 if (dump_file && (dump_flags & TDF_DETAILS))
2441 fprintf (dump_file, "Generated bndstx for pointer store ");
2442 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_VOPS|TDF_MEMSYMS);
2443 print_gimple_stmt (dump_file, stmt, 2, TDF_VOPS|TDF_MEMSYMS);
2447 /* Compute bounds for pointer NODE which was assigned in
2448 assignment statement ASSIGN. Return computed bounds. */
2449 static tree
2450 chkp_compute_bounds_for_assignment (tree node, gimple assign)
2452 enum tree_code rhs_code = gimple_assign_rhs_code (assign);
2453 tree rhs1 = gimple_assign_rhs1 (assign);
2454 tree bounds = NULL_TREE;
2455 gimple_stmt_iterator iter = gsi_for_stmt (assign);
2457 if (dump_file && (dump_flags & TDF_DETAILS))
2459 fprintf (dump_file, "Computing bounds for assignment: ");
2460 print_gimple_stmt (dump_file, assign, 0, TDF_VOPS|TDF_MEMSYMS);
2463 switch (rhs_code)
2465 case MEM_REF:
2466 case TARGET_MEM_REF:
2467 case COMPONENT_REF:
2468 case ARRAY_REF:
2469 /* We need to load bounds from the bounds table. */
2470 bounds = chkp_find_bounds_loaded (node, rhs1, &iter);
2471 break;
2473 case VAR_DECL:
2474 case SSA_NAME:
2475 case ADDR_EXPR:
2476 case POINTER_PLUS_EXPR:
2477 case NOP_EXPR:
2478 case CONVERT_EXPR:
2479 case INTEGER_CST:
2480 /* Bounds are just propagated from RHS. */
2481 bounds = chkp_find_bounds (rhs1, &iter);
2482 break;
2484 case VIEW_CONVERT_EXPR:
2485 /* Bounds are just propagated from RHS. */
2486 bounds = chkp_find_bounds (TREE_OPERAND (rhs1, 0), &iter);
2487 break;
2489 case PARM_DECL:
2490 if (BOUNDED_P (rhs1))
2492 /* We need to load bounds from the bounds table. */
2493 bounds = chkp_build_bndldx (chkp_build_addr_expr (rhs1),
2494 node, &iter);
2495 TREE_ADDRESSABLE (rhs1) = 1;
2497 else
2498 bounds = chkp_get_nonpointer_load_bounds ();
2499 break;
2501 case MINUS_EXPR:
2502 case PLUS_EXPR:
2503 case BIT_AND_EXPR:
2504 case BIT_IOR_EXPR:
2505 case BIT_XOR_EXPR:
2507 tree rhs2 = gimple_assign_rhs2 (assign);
2508 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2509 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2511 /* First we try to check types of operands. If it
2512 does not help then look at bound values.
2514 If some bounds are incomplete and other are
2515 not proven to be valid (i.e. also incomplete
2516 or invalid because value is not pointer) then
2517 resulting value is incomplete and will be
2518 recomputed later in chkp_finish_incomplete_bounds. */
2519 if (BOUNDED_P (rhs1)
2520 && !BOUNDED_P (rhs2))
2521 bounds = bnd1;
2522 else if (BOUNDED_P (rhs2)
2523 && !BOUNDED_P (rhs1)
2524 && rhs_code != MINUS_EXPR)
2525 bounds = bnd2;
2526 else if (chkp_incomplete_bounds (bnd1))
2527 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR
2528 && !chkp_incomplete_bounds (bnd2))
2529 bounds = bnd2;
2530 else
2531 bounds = incomplete_bounds;
2532 else if (chkp_incomplete_bounds (bnd2))
2533 if (chkp_valid_bounds (bnd1)
2534 && !chkp_incomplete_bounds (bnd1))
2535 bounds = bnd1;
2536 else
2537 bounds = incomplete_bounds;
2538 else if (!chkp_valid_bounds (bnd1))
2539 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR)
2540 bounds = bnd2;
2541 else if (bnd2 == chkp_get_zero_bounds ())
2542 bounds = bnd2;
2543 else
2544 bounds = bnd1;
2545 else if (!chkp_valid_bounds (bnd2))
2546 bounds = bnd1;
2547 else
2548 /* Seems both operands may have valid bounds
2549 (e.g. pointer minus pointer). In such case
2550 use default invalid op bounds. */
2551 bounds = chkp_get_invalid_op_bounds ();
2553 break;
2555 case BIT_NOT_EXPR:
2556 case NEGATE_EXPR:
2557 case LSHIFT_EXPR:
2558 case RSHIFT_EXPR:
2559 case LROTATE_EXPR:
2560 case RROTATE_EXPR:
2561 case EQ_EXPR:
2562 case NE_EXPR:
2563 case LT_EXPR:
2564 case LE_EXPR:
2565 case GT_EXPR:
2566 case GE_EXPR:
2567 case MULT_EXPR:
2568 case RDIV_EXPR:
2569 case TRUNC_DIV_EXPR:
2570 case FLOOR_DIV_EXPR:
2571 case CEIL_DIV_EXPR:
2572 case ROUND_DIV_EXPR:
2573 case TRUNC_MOD_EXPR:
2574 case FLOOR_MOD_EXPR:
2575 case CEIL_MOD_EXPR:
2576 case ROUND_MOD_EXPR:
2577 case EXACT_DIV_EXPR:
2578 case FIX_TRUNC_EXPR:
2579 case FLOAT_EXPR:
2580 case REALPART_EXPR:
2581 case IMAGPART_EXPR:
2582 /* No valid bounds may be produced by these exprs. */
2583 bounds = chkp_get_invalid_op_bounds ();
2584 break;
2586 case COND_EXPR:
2588 tree val1 = gimple_assign_rhs2 (assign);
2589 tree val2 = gimple_assign_rhs3 (assign);
2590 tree bnd1 = chkp_find_bounds (val1, &iter);
2591 tree bnd2 = chkp_find_bounds (val2, &iter);
2592 gimple stmt;
2594 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2595 bounds = incomplete_bounds;
2596 else if (bnd1 == bnd2)
2597 bounds = bnd1;
2598 else
2600 rhs1 = unshare_expr (rhs1);
2602 bounds = chkp_get_tmp_reg (assign);
2603 stmt = gimple_build_assign (bounds, COND_EXPR, rhs1, bnd1, bnd2);
2604 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2606 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2607 chkp_mark_invalid_bounds (bounds);
2610 break;
2612 case MAX_EXPR:
2613 case MIN_EXPR:
2615 tree rhs2 = gimple_assign_rhs2 (assign);
2616 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2617 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2619 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2620 bounds = incomplete_bounds;
2621 else if (bnd1 == bnd2)
2622 bounds = bnd1;
2623 else
2625 gimple stmt;
2626 tree cond = build2 (rhs_code == MAX_EXPR ? GT_EXPR : LT_EXPR,
2627 boolean_type_node, rhs1, rhs2);
2628 bounds = chkp_get_tmp_reg (assign);
2629 stmt = gimple_build_assign (bounds, COND_EXPR, cond, bnd1, bnd2);
2631 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2633 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2634 chkp_mark_invalid_bounds (bounds);
2637 break;
2639 default:
2640 bounds = chkp_get_zero_bounds ();
2641 warning (0, "pointer bounds were lost due to unexpected expression %s",
2642 get_tree_code_name (rhs_code));
2645 gcc_assert (bounds);
2647 if (node)
2648 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2650 return bounds;
2653 /* Compute bounds for ssa name NODE defined by DEF_STMT pointed by ITER.
2655 There are just few statement codes allowed: NOP (for default ssa names),
2656 ASSIGN, CALL, PHI, ASM.
2658 Return computed bounds. */
2659 static tree
2660 chkp_get_bounds_by_definition (tree node, gimple def_stmt,
2661 gphi_iterator *iter)
2663 tree var, bounds;
2664 enum gimple_code code = gimple_code (def_stmt);
2665 gphi *stmt;
2667 if (dump_file && (dump_flags & TDF_DETAILS))
2669 fprintf (dump_file, "Searching for bounds for node: ");
2670 print_generic_expr (dump_file, node, 0);
2672 fprintf (dump_file, " using its definition: ");
2673 print_gimple_stmt (dump_file, def_stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2676 switch (code)
2678 case GIMPLE_NOP:
2679 var = SSA_NAME_VAR (node);
2680 switch (TREE_CODE (var))
2682 case PARM_DECL:
2683 bounds = chkp_get_bound_for_parm (node);
2684 break;
2686 case VAR_DECL:
2687 /* For uninitialized pointers use none bounds. */
2688 bounds = chkp_get_none_bounds ();
2689 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2690 break;
2692 case RESULT_DECL:
2694 tree base_type;
2696 gcc_assert (TREE_CODE (TREE_TYPE (node)) == REFERENCE_TYPE);
2698 base_type = TREE_TYPE (TREE_TYPE (node));
2700 gcc_assert (TYPE_SIZE (base_type)
2701 && TREE_CODE (TYPE_SIZE (base_type)) == INTEGER_CST
2702 && tree_to_uhwi (TYPE_SIZE (base_type)) != 0);
2704 bounds = chkp_make_bounds (node, TYPE_SIZE_UNIT (base_type),
2705 NULL, false);
2706 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2708 break;
2710 default:
2711 if (dump_file && (dump_flags & TDF_DETAILS))
2713 fprintf (dump_file, "Unexpected var with no definition\n");
2714 print_generic_expr (dump_file, var, 0);
2716 internal_error ("chkp_get_bounds_by_definition: Unexpected var of type %s",
2717 get_tree_code_name (TREE_CODE (var)));
2719 break;
2721 case GIMPLE_ASSIGN:
2722 bounds = chkp_compute_bounds_for_assignment (node, def_stmt);
2723 break;
2725 case GIMPLE_CALL:
2726 bounds = chkp_build_returned_bound (as_a <gcall *> (def_stmt));
2727 break;
2729 case GIMPLE_PHI:
2730 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (node))
2731 if (SSA_NAME_VAR (node))
2732 var = chkp_get_bounds_var (SSA_NAME_VAR (node));
2733 else
2734 var = make_temp_ssa_name (pointer_bounds_type_node,
2735 gimple_build_nop (),
2736 CHKP_BOUND_TMP_NAME);
2737 else
2738 var = chkp_get_tmp_var ();
2739 stmt = create_phi_node (var, gimple_bb (def_stmt));
2740 bounds = gimple_phi_result (stmt);
2741 *iter = gsi_for_phi (stmt);
2743 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2745 /* Created bounds do not have all phi args computed and
2746 therefore we do not know if there is a valid source
2747 of bounds for that node. Therefore we mark bounds
2748 as incomplete and then recompute them when all phi
2749 args are computed. */
2750 chkp_register_incomplete_bounds (bounds, node);
2751 break;
2753 case GIMPLE_ASM:
2754 bounds = chkp_get_zero_bounds ();
2755 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2756 break;
2758 default:
2759 internal_error ("chkp_get_bounds_by_definition: Unexpected GIMPLE code %s",
2760 gimple_code_name[code]);
2763 return bounds;
2766 /* Return CALL_EXPR for bndmk with specified LOWER_BOUND and SIZE. */
2767 tree
2768 chkp_build_make_bounds_call (tree lower_bound, tree size)
2770 tree call = build1 (ADDR_EXPR,
2771 build_pointer_type (TREE_TYPE (chkp_bndmk_fndecl)),
2772 chkp_bndmk_fndecl);
2773 return build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndmk_fndecl)),
2774 call, 2, lower_bound, size);
2777 /* Create static bounds var of specfified OBJ which is
2778 is either VAR_DECL or string constant. */
2779 static tree
2780 chkp_make_static_bounds (tree obj)
2782 static int string_id = 1;
2783 static int var_id = 1;
2784 tree *slot;
2785 const char *var_name;
2786 char *bnd_var_name;
2787 tree bnd_var;
2789 /* First check if we already have required var. */
2790 if (chkp_static_var_bounds)
2792 /* For vars we use assembler name as a key in
2793 chkp_static_var_bounds map. It allows to
2794 avoid duplicating bound vars for decls
2795 sharing assembler name. */
2796 if (TREE_CODE (obj) == VAR_DECL)
2798 tree name = DECL_ASSEMBLER_NAME (obj);
2799 slot = chkp_static_var_bounds->get (name);
2800 if (slot)
2801 return *slot;
2803 else
2805 slot = chkp_static_var_bounds->get (obj);
2806 if (slot)
2807 return *slot;
2811 /* Build decl for bounds var. */
2812 if (TREE_CODE (obj) == VAR_DECL)
2814 if (DECL_IGNORED_P (obj))
2816 bnd_var_name = (char *) xmalloc (strlen (CHKP_VAR_BOUNDS_PREFIX) + 10);
2817 sprintf (bnd_var_name, "%s%d", CHKP_VAR_BOUNDS_PREFIX, var_id++);
2819 else
2821 var_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (obj));
2823 /* For hidden symbols we want to skip first '*' char. */
2824 if (*var_name == '*')
2825 var_name++;
2827 bnd_var_name = (char *) xmalloc (strlen (var_name)
2828 + strlen (CHKP_BOUNDS_OF_SYMBOL_PREFIX) + 1);
2829 strcpy (bnd_var_name, CHKP_BOUNDS_OF_SYMBOL_PREFIX);
2830 strcat (bnd_var_name, var_name);
2833 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2834 get_identifier (bnd_var_name),
2835 pointer_bounds_type_node);
2837 /* Address of the obj will be used as lower bound. */
2838 TREE_ADDRESSABLE (obj) = 1;
2840 else
2842 bnd_var_name = (char *) xmalloc (strlen (CHKP_STRING_BOUNDS_PREFIX) + 10);
2843 sprintf (bnd_var_name, "%s%d", CHKP_STRING_BOUNDS_PREFIX, string_id++);
2845 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2846 get_identifier (bnd_var_name),
2847 pointer_bounds_type_node);
2850 TREE_PUBLIC (bnd_var) = 0;
2851 TREE_USED (bnd_var) = 1;
2852 TREE_READONLY (bnd_var) = 0;
2853 TREE_STATIC (bnd_var) = 1;
2854 TREE_ADDRESSABLE (bnd_var) = 0;
2855 DECL_ARTIFICIAL (bnd_var) = 1;
2856 DECL_COMMON (bnd_var) = 1;
2857 DECL_COMDAT (bnd_var) = 1;
2858 DECL_READ_P (bnd_var) = 1;
2859 DECL_INITIAL (bnd_var) = chkp_build_addr_expr (obj);
2860 /* Force output similar to constant bounds.
2861 See chkp_make_static_const_bounds. */
2862 varpool_node::get_create (bnd_var)->force_output = 1;
2863 /* Mark symbol as requiring bounds initialization. */
2864 varpool_node::get_create (bnd_var)->need_bounds_init = 1;
2865 varpool_node::finalize_decl (bnd_var);
2867 /* Add created var to the map to use it for other references
2868 to obj. */
2869 if (!chkp_static_var_bounds)
2870 chkp_static_var_bounds = new hash_map<tree, tree>;
2872 if (TREE_CODE (obj) == VAR_DECL)
2874 tree name = DECL_ASSEMBLER_NAME (obj);
2875 chkp_static_var_bounds->put (name, bnd_var);
2877 else
2878 chkp_static_var_bounds->put (obj, bnd_var);
2880 return bnd_var;
2883 /* When var has incomplete type we cannot get size to
2884 compute its bounds. In such cases we use checker
2885 builtin call which determines object size at runtime. */
2886 static tree
2887 chkp_generate_extern_var_bounds (tree var)
2889 tree bounds, size_reloc, lb, size, max_size, cond;
2890 gimple_stmt_iterator gsi;
2891 gimple_seq seq = NULL;
2892 gimple stmt;
2894 /* If instrumentation is not enabled for vars having
2895 incomplete type then just return zero bounds to avoid
2896 checks for this var. */
2897 if (!flag_chkp_incomplete_type)
2898 return chkp_get_zero_bounds ();
2900 if (dump_file && (dump_flags & TDF_DETAILS))
2902 fprintf (dump_file, "Generating bounds for extern symbol '");
2903 print_generic_expr (dump_file, var, 0);
2904 fprintf (dump_file, "'\n");
2907 stmt = gimple_build_call (chkp_sizeof_fndecl, 1, var);
2909 size_reloc = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
2910 gimple_call_set_lhs (stmt, size_reloc);
2912 gimple_seq_add_stmt (&seq, stmt);
2914 lb = chkp_build_addr_expr (var);
2915 size = make_ssa_name (chkp_get_size_tmp_var (), gimple_build_nop ());
2917 if (flag_chkp_zero_dynamic_size_as_infinite)
2919 /* We should check that size relocation was resolved.
2920 If it was not then use maximum possible size for the var. */
2921 max_size = build2 (MINUS_EXPR, chkp_uintptr_type, integer_zero_node,
2922 fold_convert (chkp_uintptr_type, lb));
2923 max_size = chkp_force_gimple_call_op (max_size, &seq);
2925 cond = build2 (NE_EXPR, boolean_type_node,
2926 size_reloc, integer_zero_node);
2927 stmt = gimple_build_assign (size, COND_EXPR, cond, size_reloc, max_size);
2928 gimple_seq_add_stmt (&seq, stmt);
2930 else
2932 stmt = gimple_build_assign (size, size_reloc);
2933 gimple_seq_add_stmt (&seq, stmt);
2936 gsi = gsi_start_bb (chkp_get_entry_block ());
2937 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
2939 bounds = chkp_make_bounds (lb, size, &gsi, true);
2941 return bounds;
2944 /* Return 1 if TYPE has fields with zero size or fields
2945 marked with chkp_variable_size attribute. */
2946 bool
2947 chkp_variable_size_type (tree type)
2949 bool res = false;
2950 tree field;
2952 if (RECORD_OR_UNION_TYPE_P (type))
2953 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2955 if (TREE_CODE (field) == FIELD_DECL)
2956 res = res
2957 || lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
2958 || chkp_variable_size_type (TREE_TYPE (field));
2960 else
2961 res = !TYPE_SIZE (type)
2962 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
2963 || tree_to_uhwi (TYPE_SIZE (type)) == 0;
2965 return res;
2968 /* Compute and return bounds for address of DECL which is
2969 one of VAR_DECL, PARM_DECL, RESULT_DECL. */
2970 static tree
2971 chkp_get_bounds_for_decl_addr (tree decl)
2973 tree bounds;
2975 gcc_assert (TREE_CODE (decl) == VAR_DECL
2976 || TREE_CODE (decl) == PARM_DECL
2977 || TREE_CODE (decl) == RESULT_DECL);
2979 bounds = chkp_get_registered_addr_bounds (decl);
2981 if (bounds)
2982 return bounds;
2984 if (dump_file && (dump_flags & TDF_DETAILS))
2986 fprintf (dump_file, "Building bounds for address of decl ");
2987 print_generic_expr (dump_file, decl, 0);
2988 fprintf (dump_file, "\n");
2991 /* Use zero bounds if size is unknown and checks for
2992 unknown sizes are restricted. */
2993 if ((!DECL_SIZE (decl)
2994 || (chkp_variable_size_type (TREE_TYPE (decl))
2995 && (TREE_STATIC (decl)
2996 || DECL_EXTERNAL (decl)
2997 || TREE_PUBLIC (decl))))
2998 && !flag_chkp_incomplete_type)
2999 return chkp_get_zero_bounds ();
3001 if (flag_chkp_use_static_bounds
3002 && TREE_CODE (decl) == VAR_DECL
3003 && (TREE_STATIC (decl)
3004 || DECL_EXTERNAL (decl)
3005 || TREE_PUBLIC (decl))
3006 && !DECL_THREAD_LOCAL_P (decl))
3008 tree bnd_var = chkp_make_static_bounds (decl);
3009 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
3010 gimple stmt;
3012 bounds = chkp_get_tmp_reg (gimple_build_nop ());
3013 stmt = gimple_build_assign (bounds, bnd_var);
3014 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3016 else if (!DECL_SIZE (decl)
3017 || (chkp_variable_size_type (TREE_TYPE (decl))
3018 && (TREE_STATIC (decl)
3019 || DECL_EXTERNAL (decl)
3020 || TREE_PUBLIC (decl))))
3022 gcc_assert (TREE_CODE (decl) == VAR_DECL);
3023 bounds = chkp_generate_extern_var_bounds (decl);
3025 else
3027 tree lb = chkp_build_addr_expr (decl);
3028 bounds = chkp_make_bounds (lb, DECL_SIZE_UNIT (decl), NULL, false);
3031 return bounds;
3034 /* Compute and return bounds for constant string. */
3035 static tree
3036 chkp_get_bounds_for_string_cst (tree cst)
3038 tree bounds;
3039 tree lb;
3040 tree size;
3042 gcc_assert (TREE_CODE (cst) == STRING_CST);
3044 bounds = chkp_get_registered_bounds (cst);
3046 if (bounds)
3047 return bounds;
3049 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
3050 || flag_chkp_use_static_const_bounds > 0)
3052 tree bnd_var = chkp_make_static_bounds (cst);
3053 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
3054 gimple stmt;
3056 bounds = chkp_get_tmp_reg (gimple_build_nop ());
3057 stmt = gimple_build_assign (bounds, bnd_var);
3058 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3060 else
3062 lb = chkp_build_addr_expr (cst);
3063 size = build_int_cst (chkp_uintptr_type, TREE_STRING_LENGTH (cst));
3064 bounds = chkp_make_bounds (lb, size, NULL, false);
3067 bounds = chkp_maybe_copy_and_register_bounds (cst, bounds);
3069 return bounds;
3072 /* Generate code to instersect bounds BOUNDS1 and BOUNDS2 and
3073 return the result. if ITER is not NULL then Code is inserted
3074 before position pointed by ITER. Otherwise code is added to
3075 entry block. */
3076 static tree
3077 chkp_intersect_bounds (tree bounds1, tree bounds2, gimple_stmt_iterator *iter)
3079 if (!bounds1 || bounds1 == chkp_get_zero_bounds ())
3080 return bounds2 ? bounds2 : bounds1;
3081 else if (!bounds2 || bounds2 == chkp_get_zero_bounds ())
3082 return bounds1;
3083 else
3085 gimple_seq seq;
3086 gimple stmt;
3087 tree bounds;
3089 seq = NULL;
3091 stmt = gimple_build_call (chkp_intersect_fndecl, 2, bounds1, bounds2);
3092 chkp_mark_stmt (stmt);
3094 bounds = chkp_get_tmp_reg (stmt);
3095 gimple_call_set_lhs (stmt, bounds);
3097 gimple_seq_add_stmt (&seq, stmt);
3099 /* We are probably doing narrowing for constant expression.
3100 In such case iter may be undefined. */
3101 if (!iter)
3103 gimple_stmt_iterator gsi = gsi_last_bb (chkp_get_entry_block ());
3104 iter = &gsi;
3105 gsi_insert_seq_after (iter, seq, GSI_SAME_STMT);
3107 else
3108 gsi_insert_seq_before (iter, seq, GSI_SAME_STMT);
3110 if (dump_file && (dump_flags & TDF_DETAILS))
3112 fprintf (dump_file, "Bounds intersection: ");
3113 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
3114 fprintf (dump_file, " inserted before statement: ");
3115 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0,
3116 TDF_VOPS|TDF_MEMSYMS);
3119 return bounds;
3123 /* Return 1 if we are allowed to narrow bounds for addressed FIELD
3124 and 0 othersize. */
3125 static bool
3126 chkp_may_narrow_to_field (tree field)
3128 return DECL_SIZE (field) && TREE_CODE (DECL_SIZE (field)) == INTEGER_CST
3129 && tree_to_uhwi (DECL_SIZE (field)) != 0
3130 && (!DECL_FIELD_OFFSET (field)
3131 || TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST)
3132 && (!DECL_FIELD_BIT_OFFSET (field)
3133 || TREE_CODE (DECL_FIELD_BIT_OFFSET (field)) == INTEGER_CST)
3134 && !lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
3135 && !chkp_variable_size_type (TREE_TYPE (field));
3138 /* Return 1 if bounds for FIELD should be narrowed to
3139 field's own size. */
3140 static bool
3141 chkp_narrow_bounds_for_field (tree field)
3143 HOST_WIDE_INT offs;
3144 HOST_WIDE_INT bit_offs;
3146 if (!chkp_may_narrow_to_field (field))
3147 return false;
3149 /* Accesse to compiler generated fields should not cause
3150 bounds narrowing. */
3151 if (DECL_ARTIFICIAL (field))
3152 return false;
3154 offs = tree_to_uhwi (DECL_FIELD_OFFSET (field));
3155 bit_offs = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
3157 return (flag_chkp_narrow_bounds
3158 && (flag_chkp_first_field_has_own_bounds
3159 || offs
3160 || bit_offs));
3163 /* Perform narrowing for BOUNDS using bounds computed for field
3164 access COMPONENT. ITER meaning is the same as for
3165 chkp_intersect_bounds. */
3166 static tree
3167 chkp_narrow_bounds_to_field (tree bounds, tree component,
3168 gimple_stmt_iterator *iter)
3170 tree field = TREE_OPERAND (component, 1);
3171 tree size = DECL_SIZE_UNIT (field);
3172 tree field_ptr = chkp_build_addr_expr (component);
3173 tree field_bounds;
3175 field_bounds = chkp_make_bounds (field_ptr, size, iter, false);
3177 return chkp_intersect_bounds (field_bounds, bounds, iter);
3180 /* Parse field or array access NODE.
3182 PTR ouput parameter holds a pointer to the outermost
3183 object.
3185 BITFIELD output parameter is set to 1 if bitfield is
3186 accessed and to 0 otherwise. If it is 1 then ELT holds
3187 outer component for accessed bit field.
3189 SAFE outer parameter is set to 1 if access is safe and
3190 checks are not required.
3192 BOUNDS outer parameter holds bounds to be used to check
3193 access (may be NULL).
3195 If INNERMOST_BOUNDS is 1 then try to narrow bounds to the
3196 innermost accessed component. */
3197 static void
3198 chkp_parse_array_and_component_ref (tree node, tree *ptr,
3199 tree *elt, bool *safe,
3200 bool *bitfield,
3201 tree *bounds,
3202 gimple_stmt_iterator *iter,
3203 bool innermost_bounds)
3205 tree comp_to_narrow = NULL_TREE;
3206 tree last_comp = NULL_TREE;
3207 bool array_ref_found = false;
3208 tree *nodes;
3209 tree var;
3210 int len;
3211 int i;
3213 /* Compute tree height for expression. */
3214 var = node;
3215 len = 1;
3216 while (TREE_CODE (var) == COMPONENT_REF
3217 || TREE_CODE (var) == ARRAY_REF
3218 || TREE_CODE (var) == VIEW_CONVERT_EXPR)
3220 var = TREE_OPERAND (var, 0);
3221 len++;
3224 gcc_assert (len > 1);
3226 /* It is more convenient for us to scan left-to-right,
3227 so walk tree again and put all node to nodes vector
3228 in reversed order. */
3229 nodes = XALLOCAVEC (tree, len);
3230 nodes[len - 1] = node;
3231 for (i = len - 2; i >= 0; i--)
3232 nodes[i] = TREE_OPERAND (nodes[i + 1], 0);
3234 if (bounds)
3235 *bounds = NULL;
3236 *safe = true;
3237 *bitfield = (TREE_CODE (node) == COMPONENT_REF
3238 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (node, 1)));
3239 /* To get bitfield address we will need outer elemnt. */
3240 if (*bitfield)
3241 *elt = nodes[len - 2];
3242 else
3243 *elt = NULL_TREE;
3245 /* If we have indirection in expression then compute
3246 outermost structure bounds. Computed bounds may be
3247 narrowed later. */
3248 if (TREE_CODE (nodes[0]) == MEM_REF || INDIRECT_REF_P (nodes[0]))
3250 *safe = false;
3251 *ptr = TREE_OPERAND (nodes[0], 0);
3252 if (bounds)
3253 *bounds = chkp_find_bounds (*ptr, iter);
3255 else
3257 gcc_assert (TREE_CODE (var) == VAR_DECL
3258 || TREE_CODE (var) == PARM_DECL
3259 || TREE_CODE (var) == RESULT_DECL
3260 || TREE_CODE (var) == STRING_CST
3261 || TREE_CODE (var) == SSA_NAME);
3263 *ptr = chkp_build_addr_expr (var);
3266 /* In this loop we are trying to find a field access
3267 requiring narrowing. There are two simple rules
3268 for search:
3269 1. Leftmost array_ref is chosen if any.
3270 2. Rightmost suitable component_ref is chosen if innermost
3271 bounds are required and no array_ref exists. */
3272 for (i = 1; i < len; i++)
3274 var = nodes[i];
3276 if (TREE_CODE (var) == ARRAY_REF)
3278 *safe = false;
3279 array_ref_found = true;
3280 if (flag_chkp_narrow_bounds
3281 && !flag_chkp_narrow_to_innermost_arrray
3282 && (!last_comp
3283 || chkp_may_narrow_to_field (TREE_OPERAND (last_comp, 1))))
3285 comp_to_narrow = last_comp;
3286 break;
3289 else if (TREE_CODE (var) == COMPONENT_REF)
3291 tree field = TREE_OPERAND (var, 1);
3293 if (innermost_bounds
3294 && !array_ref_found
3295 && chkp_narrow_bounds_for_field (field))
3296 comp_to_narrow = var;
3297 last_comp = var;
3299 if (flag_chkp_narrow_bounds
3300 && flag_chkp_narrow_to_innermost_arrray
3301 && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
3303 if (bounds)
3304 *bounds = chkp_narrow_bounds_to_field (*bounds, var, iter);
3305 comp_to_narrow = NULL;
3308 else if (TREE_CODE (var) == VIEW_CONVERT_EXPR)
3309 /* Nothing to do for it. */
3311 else
3312 gcc_unreachable ();
3315 if (comp_to_narrow && DECL_SIZE (TREE_OPERAND (comp_to_narrow, 1)) && bounds)
3316 *bounds = chkp_narrow_bounds_to_field (*bounds, comp_to_narrow, iter);
3318 if (innermost_bounds && bounds && !*bounds)
3319 *bounds = chkp_find_bounds (*ptr, iter);
3322 /* Compute and return bounds for address of OBJ. */
3323 static tree
3324 chkp_make_addressed_object_bounds (tree obj, gimple_stmt_iterator *iter)
3326 tree bounds = chkp_get_registered_addr_bounds (obj);
3328 if (bounds)
3329 return bounds;
3331 switch (TREE_CODE (obj))
3333 case VAR_DECL:
3334 case PARM_DECL:
3335 case RESULT_DECL:
3336 bounds = chkp_get_bounds_for_decl_addr (obj);
3337 break;
3339 case STRING_CST:
3340 bounds = chkp_get_bounds_for_string_cst (obj);
3341 break;
3343 case ARRAY_REF:
3344 case COMPONENT_REF:
3346 tree elt;
3347 tree ptr;
3348 bool safe;
3349 bool bitfield;
3351 chkp_parse_array_and_component_ref (obj, &ptr, &elt, &safe,
3352 &bitfield, &bounds, iter, true);
3354 gcc_assert (bounds);
3356 break;
3358 case FUNCTION_DECL:
3359 case LABEL_DECL:
3360 bounds = chkp_get_zero_bounds ();
3361 break;
3363 case MEM_REF:
3364 bounds = chkp_find_bounds (TREE_OPERAND (obj, 0), iter);
3365 break;
3367 case REALPART_EXPR:
3368 case IMAGPART_EXPR:
3369 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (obj, 0), iter);
3370 break;
3372 default:
3373 if (dump_file && (dump_flags & TDF_DETAILS))
3375 fprintf (dump_file, "chkp_make_addressed_object_bounds: "
3376 "unexpected object of type %s\n",
3377 get_tree_code_name (TREE_CODE (obj)));
3378 print_node (dump_file, "", obj, 0);
3380 internal_error ("chkp_make_addressed_object_bounds: "
3381 "Unexpected tree code %s",
3382 get_tree_code_name (TREE_CODE (obj)));
3385 chkp_register_addr_bounds (obj, bounds);
3387 return bounds;
3390 /* Compute bounds for pointer PTR loaded from PTR_SRC. Generate statements
3391 to compute bounds if required. Computed bounds should be available at
3392 position pointed by ITER.
3394 If PTR_SRC is NULL_TREE then pointer definition is identified.
3396 If PTR_SRC is not NULL_TREE then ITER points to statements which loads
3397 PTR. If PTR is a any memory reference then ITER points to a statement
3398 after which bndldx will be inserterd. In both cases ITER will be updated
3399 to point to the inserted bndldx statement. */
3401 static tree
3402 chkp_find_bounds_1 (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3404 tree addr = NULL_TREE;
3405 tree bounds = NULL_TREE;
3407 if (!ptr_src)
3408 ptr_src = ptr;
3410 bounds = chkp_get_registered_bounds (ptr_src);
3412 if (bounds)
3413 return bounds;
3415 switch (TREE_CODE (ptr_src))
3417 case MEM_REF:
3418 case VAR_DECL:
3419 if (BOUNDED_P (ptr_src))
3420 if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
3421 bounds = chkp_get_zero_bounds ();
3422 else
3424 addr = chkp_build_addr_expr (ptr_src);
3425 bounds = chkp_build_bndldx (addr, ptr, iter);
3427 else
3428 bounds = chkp_get_nonpointer_load_bounds ();
3429 break;
3431 case ARRAY_REF:
3432 case COMPONENT_REF:
3433 addr = get_base_address (ptr_src);
3434 if (DECL_P (addr)
3435 || TREE_CODE (addr) == MEM_REF
3436 || TREE_CODE (addr) == TARGET_MEM_REF)
3438 if (BOUNDED_P (ptr_src))
3439 if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
3440 bounds = chkp_get_zero_bounds ();
3441 else
3443 addr = chkp_build_addr_expr (ptr_src);
3444 bounds = chkp_build_bndldx (addr, ptr, iter);
3446 else
3447 bounds = chkp_get_nonpointer_load_bounds ();
3449 else
3451 gcc_assert (TREE_CODE (addr) == SSA_NAME);
3452 bounds = chkp_find_bounds (addr, iter);
3454 break;
3456 case PARM_DECL:
3457 gcc_unreachable ();
3458 bounds = chkp_get_bound_for_parm (ptr_src);
3459 break;
3461 case TARGET_MEM_REF:
3462 addr = chkp_build_addr_expr (ptr_src);
3463 bounds = chkp_build_bndldx (addr, ptr, iter);
3464 break;
3466 case SSA_NAME:
3467 bounds = chkp_get_registered_bounds (ptr_src);
3468 if (!bounds)
3470 gimple def_stmt = SSA_NAME_DEF_STMT (ptr_src);
3471 gphi_iterator phi_iter;
3473 bounds = chkp_get_bounds_by_definition (ptr_src, def_stmt, &phi_iter);
3475 gcc_assert (bounds);
3477 if (gphi *def_phi = dyn_cast <gphi *> (def_stmt))
3479 unsigned i;
3481 for (i = 0; i < gimple_phi_num_args (def_phi); i++)
3483 tree arg = gimple_phi_arg_def (def_phi, i);
3484 tree arg_bnd;
3485 gphi *phi_bnd;
3487 arg_bnd = chkp_find_bounds (arg, NULL);
3489 /* chkp_get_bounds_by_definition created new phi
3490 statement and phi_iter points to it.
3492 Previous call to chkp_find_bounds could create
3493 new basic block and therefore change phi statement
3494 phi_iter points to. */
3495 phi_bnd = phi_iter.phi ();
3497 add_phi_arg (phi_bnd, arg_bnd,
3498 gimple_phi_arg_edge (def_phi, i),
3499 UNKNOWN_LOCATION);
3502 /* If all bound phi nodes have their arg computed
3503 then we may finish its computation. See
3504 chkp_finish_incomplete_bounds for more details. */
3505 if (chkp_may_finish_incomplete_bounds ())
3506 chkp_finish_incomplete_bounds ();
3509 gcc_assert (bounds == chkp_get_registered_bounds (ptr_src)
3510 || chkp_incomplete_bounds (bounds));
3512 break;
3514 case ADDR_EXPR:
3515 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (ptr_src, 0), iter);
3516 break;
3518 case INTEGER_CST:
3519 if (integer_zerop (ptr_src))
3520 bounds = chkp_get_none_bounds ();
3521 else
3522 bounds = chkp_get_invalid_op_bounds ();
3523 break;
3525 default:
3526 if (dump_file && (dump_flags & TDF_DETAILS))
3528 fprintf (dump_file, "chkp_find_bounds: unexpected ptr of type %s\n",
3529 get_tree_code_name (TREE_CODE (ptr_src)));
3530 print_node (dump_file, "", ptr_src, 0);
3532 internal_error ("chkp_find_bounds: Unexpected tree code %s",
3533 get_tree_code_name (TREE_CODE (ptr_src)));
3536 if (!bounds)
3538 if (dump_file && (dump_flags & TDF_DETAILS))
3540 fprintf (stderr, "chkp_find_bounds: cannot find bounds for pointer\n");
3541 print_node (dump_file, "", ptr_src, 0);
3543 internal_error ("chkp_find_bounds: Cannot find bounds for pointer");
3546 return bounds;
3549 /* Normal case for bounds search without forced narrowing. */
3550 static tree
3551 chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter)
3553 return chkp_find_bounds_1 (ptr, NULL_TREE, iter);
3556 /* Search bounds for pointer PTR loaded from PTR_SRC
3557 by statement *ITER points to. */
3558 static tree
3559 chkp_find_bounds_loaded (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3561 return chkp_find_bounds_1 (ptr, ptr_src, iter);
3564 /* Helper function which checks type of RHS and finds all pointers in
3565 it. For each found pointer we build it's accesses in LHS and RHS
3566 objects and then call HANDLER for them. Function is used to copy
3567 or initilize bounds for copied object. */
3568 static void
3569 chkp_walk_pointer_assignments (tree lhs, tree rhs, void *arg,
3570 assign_handler handler)
3572 tree type = TREE_TYPE (lhs);
3574 /* We have nothing to do with clobbers. */
3575 if (TREE_CLOBBER_P (rhs))
3576 return;
3578 if (BOUNDED_TYPE_P (type))
3579 handler (lhs, rhs, arg);
3580 else if (RECORD_OR_UNION_TYPE_P (type))
3582 tree field;
3584 if (TREE_CODE (rhs) == CONSTRUCTOR)
3586 unsigned HOST_WIDE_INT cnt;
3587 tree val;
3589 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, field, val)
3591 if (chkp_type_has_pointer (TREE_TYPE (field)))
3593 tree lhs_field = chkp_build_component_ref (lhs, field);
3594 chkp_walk_pointer_assignments (lhs_field, val, arg, handler);
3598 else
3599 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3600 if (TREE_CODE (field) == FIELD_DECL
3601 && chkp_type_has_pointer (TREE_TYPE (field)))
3603 tree rhs_field = chkp_build_component_ref (rhs, field);
3604 tree lhs_field = chkp_build_component_ref (lhs, field);
3605 chkp_walk_pointer_assignments (lhs_field, rhs_field, arg, handler);
3608 else if (TREE_CODE (type) == ARRAY_TYPE)
3610 unsigned HOST_WIDE_INT cur = 0;
3611 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3612 tree etype = TREE_TYPE (type);
3613 tree esize = TYPE_SIZE (etype);
3615 if (TREE_CODE (rhs) == CONSTRUCTOR)
3617 unsigned HOST_WIDE_INT cnt;
3618 tree purp, val, lhs_elem;
3620 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, purp, val)
3622 if (purp && TREE_CODE (purp) == RANGE_EXPR)
3624 tree lo_index = TREE_OPERAND (purp, 0);
3625 tree hi_index = TREE_OPERAND (purp, 1);
3627 for (cur = (unsigned)tree_to_uhwi (lo_index);
3628 cur <= (unsigned)tree_to_uhwi (hi_index);
3629 cur++)
3631 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3632 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3635 else
3637 if (purp)
3639 gcc_assert (TREE_CODE (purp) == INTEGER_CST);
3640 cur = tree_to_uhwi (purp);
3643 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur++);
3645 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3649 /* Copy array only when size is known. */
3650 else if (maxval && !integer_minus_onep (maxval))
3651 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
3653 tree lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3654 tree rhs_elem = chkp_build_array_ref (rhs, etype, esize, cur);
3655 chkp_walk_pointer_assignments (lhs_elem, rhs_elem, arg, handler);
3658 else
3659 internal_error("chkp_walk_pointer_assignments: unexpected RHS type: %s",
3660 get_tree_code_name (TREE_CODE (type)));
3663 /* Add code to copy bounds for assignment of RHS to LHS.
3664 ARG is an iterator pointing ne code position. */
3665 static void
3666 chkp_copy_bounds_for_elem (tree lhs, tree rhs, void *arg)
3668 gimple_stmt_iterator *iter = (gimple_stmt_iterator *)arg;
3669 tree bounds = chkp_find_bounds (rhs, iter);
3670 tree addr = chkp_build_addr_expr(lhs);
3672 chkp_build_bndstx (addr, rhs, bounds, iter);
3675 /* Emit static bound initilizers and size vars. */
3676 void
3677 chkp_finish_file (void)
3679 struct varpool_node *node;
3680 struct chkp_ctor_stmt_list stmts;
3682 if (seen_error ())
3683 return;
3685 /* Iterate through varpool and generate bounds initialization
3686 constructors for all statically initialized pointers. */
3687 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3688 stmts.stmts = NULL;
3689 FOR_EACH_VARIABLE (node)
3690 /* Check that var is actually emitted and we need and may initialize
3691 its bounds. */
3692 if (node->need_bounds_init
3693 && !POINTER_BOUNDS_P (node->decl)
3694 && DECL_RTL (node->decl)
3695 && MEM_P (DECL_RTL (node->decl))
3696 && TREE_ASM_WRITTEN (node->decl))
3698 chkp_walk_pointer_assignments (node->decl,
3699 DECL_INITIAL (node->decl),
3700 &stmts,
3701 chkp_add_modification_to_stmt_list);
3703 if (stmts.avail <= 0)
3705 cgraph_build_static_cdtor ('P', stmts.stmts,
3706 MAX_RESERVED_INIT_PRIORITY + 3);
3707 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3708 stmts.stmts = NULL;
3712 if (stmts.stmts)
3713 cgraph_build_static_cdtor ('P', stmts.stmts,
3714 MAX_RESERVED_INIT_PRIORITY + 3);
3716 /* Iterate through varpool and generate bounds initialization
3717 constructors for all static bounds vars. */
3718 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3719 stmts.stmts = NULL;
3720 FOR_EACH_VARIABLE (node)
3721 if (node->need_bounds_init
3722 && POINTER_BOUNDS_P (node->decl)
3723 && TREE_ASM_WRITTEN (node->decl))
3725 tree bnd = node->decl;
3726 tree var;
3728 gcc_assert (DECL_INITIAL (bnd)
3729 && TREE_CODE (DECL_INITIAL (bnd)) == ADDR_EXPR);
3731 var = TREE_OPERAND (DECL_INITIAL (bnd), 0);
3732 chkp_output_static_bounds (bnd, var, &stmts);
3735 if (stmts.stmts)
3736 cgraph_build_static_cdtor ('B', stmts.stmts,
3737 MAX_RESERVED_INIT_PRIORITY + 2);
3739 delete chkp_static_var_bounds;
3740 delete chkp_bounds_map;
3743 /* An instrumentation function which is called for each statement
3744 having memory access we want to instrument. It inserts check
3745 code and bounds copy code.
3747 ITER points to statement to instrument.
3749 NODE holds memory access in statement to check.
3751 LOC holds the location information for statement.
3753 DIRFLAGS determines whether access is read or write.
3755 ACCESS_OFFS should be added to address used in NODE
3756 before check.
3758 ACCESS_SIZE holds size of checked access.
3760 SAFE indicates if NODE access is safe and should not be
3761 checked. */
3762 static void
3763 chkp_process_stmt (gimple_stmt_iterator *iter, tree node,
3764 location_t loc, tree dirflag,
3765 tree access_offs, tree access_size,
3766 bool safe)
3768 tree node_type = TREE_TYPE (node);
3769 tree size = access_size ? access_size : TYPE_SIZE_UNIT (node_type);
3770 tree addr_first = NULL_TREE; /* address of the first accessed byte */
3771 tree addr_last = NULL_TREE; /* address of the last accessed byte */
3772 tree ptr = NULL_TREE; /* a pointer used for dereference */
3773 tree bounds = NULL_TREE;
3775 /* We do not need instrumentation for clobbers. */
3776 if (dirflag == integer_one_node
3777 && gimple_code (gsi_stmt (*iter)) == GIMPLE_ASSIGN
3778 && TREE_CLOBBER_P (gimple_assign_rhs1 (gsi_stmt (*iter))))
3779 return;
3781 switch (TREE_CODE (node))
3783 case ARRAY_REF:
3784 case COMPONENT_REF:
3786 bool bitfield;
3787 tree elt;
3789 if (safe)
3791 /* We are not going to generate any checks, so do not
3792 generate bounds as well. */
3793 addr_first = chkp_build_addr_expr (node);
3794 break;
3797 chkp_parse_array_and_component_ref (node, &ptr, &elt, &safe,
3798 &bitfield, &bounds, iter, false);
3800 /* Break if there is no dereference and operation is safe. */
3802 if (bitfield)
3804 tree field = TREE_OPERAND (node, 1);
3806 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
3807 size = DECL_SIZE_UNIT (field);
3809 if (elt)
3810 elt = chkp_build_addr_expr (elt);
3811 addr_first = fold_convert_loc (loc, ptr_type_node, elt ? elt : ptr);
3812 addr_first = fold_build_pointer_plus_loc (loc,
3813 addr_first,
3814 byte_position (field));
3816 else
3817 addr_first = chkp_build_addr_expr (node);
3819 break;
3821 case INDIRECT_REF:
3822 ptr = TREE_OPERAND (node, 0);
3823 addr_first = ptr;
3824 break;
3826 case MEM_REF:
3827 ptr = TREE_OPERAND (node, 0);
3828 addr_first = chkp_build_addr_expr (node);
3829 break;
3831 case TARGET_MEM_REF:
3832 ptr = TMR_BASE (node);
3833 addr_first = chkp_build_addr_expr (node);
3834 break;
3836 case ARRAY_RANGE_REF:
3837 printf("ARRAY_RANGE_REF\n");
3838 debug_gimple_stmt(gsi_stmt(*iter));
3839 debug_tree(node);
3840 gcc_unreachable ();
3841 break;
3843 case BIT_FIELD_REF:
3845 tree offs, rem, bpu;
3847 gcc_assert (!access_offs);
3848 gcc_assert (!access_size);
3850 bpu = fold_convert (size_type_node, bitsize_int (BITS_PER_UNIT));
3851 offs = fold_convert (size_type_node, TREE_OPERAND (node, 2));
3852 rem = size_binop_loc (loc, TRUNC_MOD_EXPR, offs, bpu);
3853 offs = size_binop_loc (loc, TRUNC_DIV_EXPR, offs, bpu);
3855 size = fold_convert (size_type_node, TREE_OPERAND (node, 1));
3856 size = size_binop_loc (loc, PLUS_EXPR, size, rem);
3857 size = size_binop_loc (loc, CEIL_DIV_EXPR, size, bpu);
3858 size = fold_convert (size_type_node, size);
3860 chkp_process_stmt (iter, TREE_OPERAND (node, 0), loc,
3861 dirflag, offs, size, safe);
3862 return;
3864 break;
3866 case VAR_DECL:
3867 case RESULT_DECL:
3868 case PARM_DECL:
3869 if (dirflag != integer_one_node
3870 || DECL_REGISTER (node))
3871 return;
3873 safe = true;
3874 addr_first = chkp_build_addr_expr (node);
3875 break;
3877 default:
3878 return;
3881 /* If addr_last was not computed then use (addr_first + size - 1)
3882 expression to compute it. */
3883 if (!addr_last)
3885 addr_last = fold_build_pointer_plus_loc (loc, addr_first, size);
3886 addr_last = fold_build_pointer_plus_hwi_loc (loc, addr_last, -1);
3889 /* Shift both first_addr and last_addr by access_offs if specified. */
3890 if (access_offs)
3892 addr_first = fold_build_pointer_plus_loc (loc, addr_first, access_offs);
3893 addr_last = fold_build_pointer_plus_loc (loc, addr_last, access_offs);
3896 /* Generate bndcl/bndcu checks if memory access is not safe. */
3897 if (!safe)
3899 gimple_stmt_iterator stmt_iter = *iter;
3901 if (!bounds)
3902 bounds = chkp_find_bounds (ptr, iter);
3904 chkp_check_mem_access (addr_first, addr_last, bounds,
3905 stmt_iter, loc, dirflag);
3908 /* We need to store bounds in case pointer is stored. */
3909 if (dirflag == integer_one_node
3910 && chkp_type_has_pointer (node_type)
3911 && flag_chkp_store_bounds)
3913 gimple stmt = gsi_stmt (*iter);
3914 tree rhs1 = gimple_assign_rhs1 (stmt);
3915 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3917 if (get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS)
3918 chkp_walk_pointer_assignments (node, rhs1, iter,
3919 chkp_copy_bounds_for_elem);
3920 else
3922 bounds = chkp_compute_bounds_for_assignment (NULL_TREE, stmt);
3923 chkp_build_bndstx (addr_first, rhs1, bounds, iter);
3928 /* Add code to copy bounds for all pointers copied
3929 in ASSIGN created during inline of EDGE. */
3930 void
3931 chkp_copy_bounds_for_assign (gimple assign, struct cgraph_edge *edge)
3933 tree lhs = gimple_assign_lhs (assign);
3934 tree rhs = gimple_assign_rhs1 (assign);
3935 gimple_stmt_iterator iter = gsi_for_stmt (assign);
3937 if (!flag_chkp_store_bounds)
3938 return;
3940 chkp_walk_pointer_assignments (lhs, rhs, &iter, chkp_copy_bounds_for_elem);
3942 /* We should create edges for all created calls to bndldx and bndstx. */
3943 while (gsi_stmt (iter) != assign)
3945 gimple stmt = gsi_stmt (iter);
3946 if (gimple_code (stmt) == GIMPLE_CALL)
3948 tree fndecl = gimple_call_fndecl (stmt);
3949 struct cgraph_node *callee = cgraph_node::get_create (fndecl);
3950 struct cgraph_edge *new_edge;
3952 gcc_assert (fndecl == chkp_bndstx_fndecl
3953 || fndecl == chkp_bndldx_fndecl
3954 || fndecl == chkp_ret_bnd_fndecl);
3956 new_edge = edge->caller->create_edge (callee,
3957 as_a <gcall *> (stmt),
3958 edge->count,
3959 edge->frequency);
3960 new_edge->frequency = compute_call_stmt_bb_frequency
3961 (edge->caller->decl, gimple_bb (stmt));
3963 gsi_prev (&iter);
3967 /* Some code transformation made during instrumentation pass
3968 may put code into inconsistent state. Here we find and fix
3969 such flaws. */
3970 void
3971 chkp_fix_cfg ()
3973 basic_block bb;
3974 gimple_stmt_iterator i;
3976 /* We could insert some code right after stmt which ends bb.
3977 We wanted to put this code on fallthru edge but did not
3978 add new edges from the beginning because it may cause new
3979 phi node creation which may be incorrect due to incomplete
3980 bound phi nodes. */
3981 FOR_ALL_BB_FN (bb, cfun)
3982 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
3984 gimple stmt = gsi_stmt (i);
3985 gimple_stmt_iterator next = i;
3987 gsi_next (&next);
3989 if (stmt_ends_bb_p (stmt)
3990 && !gsi_end_p (next))
3992 edge fall = find_fallthru_edge (bb->succs);
3993 basic_block dest = NULL;
3994 int flags = 0;
3996 gcc_assert (fall);
3998 /* We cannot split abnormal edge. Therefore we
3999 store its params, make it regular and then
4000 rebuild abnormal edge after split. */
4001 if (fall->flags & EDGE_ABNORMAL)
4003 flags = fall->flags & ~EDGE_FALLTHRU;
4004 dest = fall->dest;
4006 fall->flags &= ~EDGE_COMPLEX;
4009 while (!gsi_end_p (next))
4011 gimple next_stmt = gsi_stmt (next);
4012 gsi_remove (&next, false);
4013 gsi_insert_on_edge (fall, next_stmt);
4016 gsi_commit_edge_inserts ();
4018 /* Re-create abnormal edge. */
4019 if (dest)
4020 make_edge (bb, dest, flags);
4025 /* Walker callback for chkp_replace_function_pointers. Replaces
4026 function pointer in the specified operand with pointer to the
4027 instrumented function version. */
4028 static tree
4029 chkp_replace_function_pointer (tree *op, int *walk_subtrees,
4030 void *data ATTRIBUTE_UNUSED)
4032 if (TREE_CODE (*op) == FUNCTION_DECL
4033 && !lookup_attribute ("bnd_legacy", DECL_ATTRIBUTES (*op))
4034 && (DECL_BUILT_IN_CLASS (*op) == NOT_BUILT_IN
4035 /* For builtins we replace pointers only for selected
4036 function and functions having definitions. */
4037 || (DECL_BUILT_IN_CLASS (*op) == BUILT_IN_NORMAL
4038 && (chkp_instrument_normal_builtin (*op)
4039 || gimple_has_body_p (*op)))))
4041 struct cgraph_node *node = cgraph_node::get_create (*op);
4042 struct cgraph_node *clone = NULL;
4044 if (!node->instrumentation_clone)
4045 clone = chkp_maybe_create_clone (*op);
4047 if (clone)
4048 *op = clone->decl;
4049 *walk_subtrees = 0;
4052 return NULL;
4055 /* This function searches for function pointers in statement
4056 pointed by GSI and replaces them with pointers to instrumented
4057 function versions. */
4058 static void
4059 chkp_replace_function_pointers (gimple_stmt_iterator *gsi)
4061 gimple stmt = gsi_stmt (*gsi);
4062 /* For calls we want to walk call args only. */
4063 if (gimple_code (stmt) == GIMPLE_CALL)
4065 unsigned i;
4066 for (i = 0; i < gimple_call_num_args (stmt); i++)
4067 walk_tree (gimple_call_arg_ptr (stmt, i),
4068 chkp_replace_function_pointer, NULL, NULL);
4070 else
4071 walk_gimple_stmt (gsi, NULL, chkp_replace_function_pointer, NULL);
4074 /* This function instruments all statements working with memory,
4075 calls and rets.
4077 It also removes excess statements from static initializers. */
4078 static void
4079 chkp_instrument_function (void)
4081 basic_block bb, next;
4082 gimple_stmt_iterator i;
4083 enum gimple_rhs_class grhs_class;
4084 bool safe = lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl));
4086 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
4089 next = bb->next_bb;
4090 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
4092 gimple s = gsi_stmt (i);
4094 /* Skip statement marked to not be instrumented. */
4095 if (chkp_marked_stmt_p (s))
4097 gsi_next (&i);
4098 continue;
4101 chkp_replace_function_pointers (&i);
4103 switch (gimple_code (s))
4105 case GIMPLE_ASSIGN:
4106 chkp_process_stmt (&i, gimple_assign_lhs (s),
4107 gimple_location (s), integer_one_node,
4108 NULL_TREE, NULL_TREE, safe);
4109 chkp_process_stmt (&i, gimple_assign_rhs1 (s),
4110 gimple_location (s), integer_zero_node,
4111 NULL_TREE, NULL_TREE, safe);
4112 grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s));
4113 if (grhs_class == GIMPLE_BINARY_RHS)
4114 chkp_process_stmt (&i, gimple_assign_rhs2 (s),
4115 gimple_location (s), integer_zero_node,
4116 NULL_TREE, NULL_TREE, safe);
4117 break;
4119 case GIMPLE_RETURN:
4121 greturn *r = as_a <greturn *> (s);
4122 if (gimple_return_retval (r) != NULL_TREE)
4124 chkp_process_stmt (&i, gimple_return_retval (r),
4125 gimple_location (r),
4126 integer_zero_node,
4127 NULL_TREE, NULL_TREE, safe);
4129 /* Additionally we need to add bounds
4130 to return statement. */
4131 chkp_add_bounds_to_ret_stmt (&i);
4134 break;
4136 case GIMPLE_CALL:
4137 chkp_add_bounds_to_call_stmt (&i);
4138 break;
4140 default:
4144 gsi_next (&i);
4146 /* We do not need any actual pointer stores in checker
4147 static initializer. */
4148 if (lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl))
4149 && gimple_code (s) == GIMPLE_ASSIGN
4150 && gimple_store_p (s))
4152 gimple_stmt_iterator del_iter = gsi_for_stmt (s);
4153 gsi_remove (&del_iter, true);
4154 unlink_stmt_vdef (s);
4155 release_defs(s);
4158 bb = next;
4160 while (bb);
4162 /* Some input params may have bounds and be address taken. In this case
4163 we should store incoming bounds into bounds table. */
4164 tree arg;
4165 if (flag_chkp_store_bounds)
4166 for (arg = DECL_ARGUMENTS (cfun->decl); arg; arg = DECL_CHAIN (arg))
4167 if (TREE_ADDRESSABLE (arg))
4169 if (BOUNDED_P (arg))
4171 tree bounds = chkp_get_next_bounds_parm (arg);
4172 tree def_ptr = ssa_default_def (cfun, arg);
4173 gimple_stmt_iterator iter
4174 = gsi_start_bb (chkp_get_entry_block ());
4175 chkp_build_bndstx (chkp_build_addr_expr (arg),
4176 def_ptr ? def_ptr : arg,
4177 bounds, &iter);
4179 /* Skip bounds arg. */
4180 arg = TREE_CHAIN (arg);
4182 else if (chkp_type_has_pointer (TREE_TYPE (arg)))
4184 tree orig_arg = arg;
4185 bitmap slots = BITMAP_ALLOC (NULL);
4186 gimple_stmt_iterator iter
4187 = gsi_start_bb (chkp_get_entry_block ());
4188 bitmap_iterator bi;
4189 unsigned bnd_no;
4191 chkp_find_bound_slots (TREE_TYPE (arg), slots);
4193 EXECUTE_IF_SET_IN_BITMAP (slots, 0, bnd_no, bi)
4195 tree bounds = chkp_get_next_bounds_parm (arg);
4196 HOST_WIDE_INT offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
4197 tree addr = chkp_build_addr_expr (orig_arg);
4198 tree ptr = build2 (MEM_REF, ptr_type_node, addr,
4199 build_int_cst (ptr_type_node, offs));
4200 chkp_build_bndstx (chkp_build_addr_expr (ptr), ptr,
4201 bounds, &iter);
4203 arg = DECL_CHAIN (arg);
4205 BITMAP_FREE (slots);
4210 /* Find init/null/copy_ptr_bounds calls and replace them
4211 with assignments. It should allow better code
4212 optimization. */
4214 static void
4215 chkp_remove_useless_builtins ()
4217 basic_block bb;
4218 gimple_stmt_iterator gsi;
4220 FOR_EACH_BB_FN (bb, cfun)
4222 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4224 gimple stmt = gsi_stmt (gsi);
4225 tree fndecl;
4226 enum built_in_function fcode;
4228 /* Find builtins returning first arg and replace
4229 them with assignments. */
4230 if (gimple_code (stmt) == GIMPLE_CALL
4231 && (fndecl = gimple_call_fndecl (stmt))
4232 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4233 && (fcode = DECL_FUNCTION_CODE (fndecl))
4234 && (fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
4235 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
4236 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS
4237 || fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS))
4239 tree res = gimple_call_arg (stmt, 0);
4240 update_call_from_tree (&gsi, res);
4241 stmt = gsi_stmt (gsi);
4242 update_stmt (stmt);
4248 /* Initialize pass. */
4249 static void
4250 chkp_init (void)
4252 basic_block bb;
4253 gimple_stmt_iterator i;
4255 in_chkp_pass = true;
4257 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = bb->next_bb)
4258 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
4259 chkp_unmark_stmt (gsi_stmt (i));
4261 chkp_invalid_bounds = new hash_set<tree>;
4262 chkp_completed_bounds_set = new hash_set<tree>;
4263 delete chkp_reg_bounds;
4264 chkp_reg_bounds = new hash_map<tree, tree>;
4265 delete chkp_bound_vars;
4266 chkp_bound_vars = new hash_map<tree, tree>;
4267 chkp_reg_addr_bounds = new hash_map<tree, tree>;
4268 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
4269 delete chkp_bounds_map;
4270 chkp_bounds_map = new hash_map<tree, tree>;
4271 chkp_abnormal_copies = BITMAP_GGC_ALLOC ();
4273 entry_block = NULL;
4274 zero_bounds = NULL_TREE;
4275 none_bounds = NULL_TREE;
4276 incomplete_bounds = integer_zero_node;
4277 tmp_var = NULL_TREE;
4278 size_tmp_var = NULL_TREE;
4280 chkp_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode, true);
4282 /* We create these constant bounds once for each object file.
4283 These symbols go to comdat section and result in single copy
4284 of each one in the final binary. */
4285 chkp_get_zero_bounds_var ();
4286 chkp_get_none_bounds_var ();
4288 calculate_dominance_info (CDI_DOMINATORS);
4289 calculate_dominance_info (CDI_POST_DOMINATORS);
4291 bitmap_obstack_initialize (NULL);
4294 /* Finalize instrumentation pass. */
4295 static void
4296 chkp_fini (void)
4298 in_chkp_pass = false;
4300 delete chkp_invalid_bounds;
4301 delete chkp_completed_bounds_set;
4302 delete chkp_reg_addr_bounds;
4303 delete chkp_incomplete_bounds_map;
4305 free_dominance_info (CDI_DOMINATORS);
4306 free_dominance_info (CDI_POST_DOMINATORS);
4308 bitmap_obstack_release (NULL);
4311 /* Main instrumentation pass function. */
4312 static unsigned int
4313 chkp_execute (void)
4315 chkp_init ();
4317 chkp_instrument_function ();
4319 chkp_remove_useless_builtins ();
4321 chkp_function_mark_instrumented (cfun->decl);
4323 chkp_fix_cfg ();
4325 chkp_fini ();
4327 return 0;
4330 /* Instrumentation pass gate. */
4331 static bool
4332 chkp_gate (void)
4334 return cgraph_node::get (cfun->decl)->instrumentation_clone
4335 || lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl));
4338 namespace {
4340 const pass_data pass_data_chkp =
4342 GIMPLE_PASS, /* type */
4343 "chkp", /* name */
4344 OPTGROUP_NONE, /* optinfo_flags */
4345 TV_NONE, /* tv_id */
4346 PROP_ssa | PROP_cfg, /* properties_required */
4347 0, /* properties_provided */
4348 0, /* properties_destroyed */
4349 0, /* todo_flags_start */
4350 TODO_verify_il
4351 | TODO_update_ssa /* todo_flags_finish */
4354 class pass_chkp : public gimple_opt_pass
4356 public:
4357 pass_chkp (gcc::context *ctxt)
4358 : gimple_opt_pass (pass_data_chkp, ctxt)
4361 /* opt_pass methods: */
4362 virtual opt_pass * clone ()
4364 return new pass_chkp (m_ctxt);
4367 virtual bool gate (function *)
4369 return chkp_gate ();
4372 virtual unsigned int execute (function *)
4374 return chkp_execute ();
4377 }; // class pass_chkp
4379 } // anon namespace
4381 gimple_opt_pass *
4382 make_pass_chkp (gcc::context *ctxt)
4384 return new pass_chkp (ctxt);
4387 #include "gt-tree-chkp.h"