Daily bump.
[official-gcc.git] / gcc / tree-chkp.c
blob03f75b35da84d209929d8f89112ab1e48e10abe6
1 /* Pointer Bounds Checker insrumentation pass.
2 Copyright (C) 2014-2015 Free Software Foundation, Inc.
3 Contributed by Ilya Enkovich (ilya.enkovich@intel.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hash-set.h"
25 #include "machmode.h"
26 #include "vec.h"
27 #include "double-int.h"
28 #include "input.h"
29 #include "alias.h"
30 #include "symtab.h"
31 #include "options.h"
32 #include "wide-int.h"
33 #include "inchash.h"
34 #include "tree.h"
35 #include "fold-const.h"
36 #include "stor-layout.h"
37 #include "varasm.h"
38 #include "target.h"
39 #include "tree-iterator.h"
40 #include "tree-cfg.h"
41 #include "langhooks.h"
42 #include "tree-pass.h"
43 #include "diagnostic.h"
44 #include "ggc.h"
45 #include "is-a.h"
46 #include "cfgloop.h"
47 #include "stringpool.h"
48 #include "tree-ssa-alias.h"
49 #include "tree-ssanames.h"
50 #include "tree-ssa-operands.h"
51 #include "tree-ssa-address.h"
52 #include "tree-ssa.h"
53 #include "predict.h"
54 #include "dominance.h"
55 #include "cfg.h"
56 #include "basic-block.h"
57 #include "tree-ssa-loop-niter.h"
58 #include "gimple-expr.h"
59 #include "gimple.h"
60 #include "tree-phinodes.h"
61 #include "gimple-ssa.h"
62 #include "ssa-iterators.h"
63 #include "gimple-pretty-print.h"
64 #include "gimple-iterator.h"
65 #include "gimplify.h"
66 #include "gimplify-me.h"
67 #include "print-tree.h"
68 #include "hashtab.h"
69 #include "tm.h"
70 #include "hard-reg-set.h"
71 #include "function.h"
72 #include "rtl.h"
73 #include "flags.h"
74 #include "statistics.h"
75 #include "real.h"
76 #include "fixed-value.h"
77 #include "insn-config.h"
78 #include "expmed.h"
79 #include "dojump.h"
80 #include "explow.h"
81 #include "calls.h"
82 #include "emit-rtl.h"
83 #include "stmt.h"
84 #include "expr.h"
85 #include "tree-ssa-propagate.h"
86 #include "gimple-fold.h"
87 #include "tree-chkp.h"
88 #include "gimple-walk.h"
89 #include "rtl.h" /* For MEM_P, assign_temp. */
90 #include "tree-dfa.h"
91 #include "ipa-ref.h"
92 #include "lto-streamer.h"
93 #include "cgraph.h"
94 #include "ipa-chkp.h"
95 #include "params.h"
97 /* Pointer Bounds Checker instruments code with memory checks to find
98 out-of-bounds memory accesses. Checks are performed by computing
99 bounds for each pointer and then comparing address of accessed
100 memory before pointer dereferencing.
102 1. Function clones.
104 See ipa-chkp.c.
106 2. Instrumentation.
108 There are few things to instrument:
110 a) Memory accesses - add checker calls to check address of accessed memory
111 against bounds of dereferenced pointer. Obviously safe memory
112 accesses like static variable access does not have to be instrumented
113 with checks.
115 Example:
117 val_2 = *p_1;
119 with 4 bytes access is transformed into:
121 __builtin___chkp_bndcl (__bound_tmp.1_3, p_1);
122 D.1_4 = p_1 + 3;
123 __builtin___chkp_bndcu (__bound_tmp.1_3, D.1_4);
124 val_2 = *p_1;
126 where __bound_tmp.1_3 are bounds computed for pointer p_1,
127 __builtin___chkp_bndcl is a lower bound check and
128 __builtin___chkp_bndcu is an upper bound check.
130 b) Pointer stores.
132 When pointer is stored in memory we need to store its bounds. To
133 achieve compatibility of instrumented code with regular codes
134 we have to keep data layout and store bounds in special bound tables
135 via special checker call. Implementation of bounds table may vary for
136 different platforms. It has to associate pointer value and its
137 location (it is required because we may have two equal pointers
138 with different bounds stored in different places) with bounds.
139 Another checker builtin allows to get bounds for specified pointer
140 loaded from specified location.
142 Example:
144 buf1[i_1] = &buf2;
146 is transformed into:
148 buf1[i_1] = &buf2;
149 D.1_2 = &buf1[i_1];
150 __builtin___chkp_bndstx (D.1_2, &buf2, __bound_tmp.1_2);
152 where __bound_tmp.1_2 are bounds of &buf2.
154 c) Static initialization.
156 The special case of pointer store is static pointer initialization.
157 Bounds initialization is performed in a few steps:
158 - register all static initializations in front-end using
159 chkp_register_var_initializer
160 - when file compilation finishes we create functions with special
161 attribute 'chkp ctor' and put explicit initialization code
162 (assignments) for all statically initialized pointers.
163 - when checker constructor is compiled checker pass adds required
164 bounds initialization for all statically initialized pointers
165 - since we do not actually need excess pointers initialization
166 in checker constructor we remove such assignments from them
168 d) Calls.
170 For each call in the code we add additional arguments to pass
171 bounds for pointer arguments. We determine type of call arguments
172 using arguments list from function declaration; if function
173 declaration is not available we use function type; otherwise
174 (e.g. for unnamed arguments) we use type of passed value. Function
175 declaration/type is replaced with the instrumented one.
177 Example:
179 val_1 = foo (&buf1, &buf2, &buf1, 0);
181 is translated into:
183 val_1 = foo.chkp (&buf1, __bound_tmp.1_2, &buf2, __bound_tmp.1_3,
184 &buf1, __bound_tmp.1_2, 0);
186 e) Returns.
188 If function returns a pointer value we have to return bounds also.
189 A new operand was added for return statement to hold returned bounds.
191 Example:
193 return &_buf1;
195 is transformed into
197 return &_buf1, __bound_tmp.1_1;
199 3. Bounds computation.
201 Compiler is fully responsible for computing bounds to be used for each
202 memory access. The first step for bounds computation is to find the
203 origin of pointer dereferenced for memory access. Basing on pointer
204 origin we define a way to compute its bounds. There are just few
205 possible cases:
207 a) Pointer is returned by call.
209 In this case we use corresponding checker builtin method to obtain returned
210 bounds.
212 Example:
214 buf_1 = malloc (size_2);
215 foo (buf_1);
217 is translated into:
219 buf_1 = malloc (size_2);
220 __bound_tmp.1_3 = __builtin___chkp_bndret (buf_1);
221 foo (buf_1, __bound_tmp.1_3);
223 b) Pointer is an address of an object.
225 In this case compiler tries to compute objects size and create corresponding
226 bounds. If object has incomplete type then special checker builtin is used to
227 obtain its size at runtime.
229 Example:
231 foo ()
233 <unnamed type> __bound_tmp.3;
234 static int buf[100];
236 <bb 3>:
237 __bound_tmp.3_2 = __builtin___chkp_bndmk (&buf, 400);
239 <bb 2>:
240 return &buf, __bound_tmp.3_2;
243 Example:
245 Address of an object 'extern int buf[]' with incomplete type is
246 returned.
248 foo ()
250 <unnamed type> __bound_tmp.4;
251 long unsigned int __size_tmp.3;
253 <bb 3>:
254 __size_tmp.3_4 = __builtin_ia32_sizeof (buf);
255 __bound_tmp.4_3 = __builtin_ia32_bndmk (&buf, __size_tmp.3_4);
257 <bb 2>:
258 return &buf, __bound_tmp.4_3;
261 c) Pointer is the result of object narrowing.
263 It happens when we use pointer to an object to compute pointer to a part
264 of an object. E.g. we take pointer to a field of a structure. In this
265 case we perform bounds intersection using bounds of original object and
266 bounds of object's part (which are computed basing on its type).
268 There may be some debatable questions about when narrowing should occur
269 and when it should not. To avoid false bound violations in correct
270 programs we do not perform narrowing when address of an array element is
271 obtained (it has address of the whole array) and when address of the first
272 structure field is obtained (because it is guaranteed to be equal to
273 address of the whole structure and it is legal to cast it back to structure).
275 Default narrowing behavior may be changed using compiler flags.
277 Example:
279 In this example address of the second structure field is returned.
281 foo (struct A * p, __bounds_type __bounds_of_p)
283 <unnamed type> __bound_tmp.3;
284 int * _2;
285 int * _5;
287 <bb 2>:
288 _5 = &p_1(D)->second_field;
289 __bound_tmp.3_6 = __builtin___chkp_bndmk (_5, 4);
290 __bound_tmp.3_8 = __builtin___chkp_intersect (__bound_tmp.3_6,
291 __bounds_of_p_3(D));
292 _2 = &p_1(D)->second_field;
293 return _2, __bound_tmp.3_8;
296 Example:
298 In this example address of the first field of array element is returned.
300 foo (struct A * p, __bounds_type __bounds_of_p, int i)
302 long unsigned int _3;
303 long unsigned int _4;
304 struct A * _6;
305 int * _7;
307 <bb 2>:
308 _3 = (long unsigned int) i_1(D);
309 _4 = _3 * 8;
310 _6 = p_5(D) + _4;
311 _7 = &_6->first_field;
312 return _7, __bounds_of_p_2(D);
316 d) Pointer is the result of pointer arithmetic or type cast.
318 In this case bounds of the base pointer are used. In case of binary
319 operation producing a pointer we are analyzing data flow further
320 looking for operand's bounds. One operand is considered as a base
321 if it has some valid bounds. If we fall into a case when none of
322 operands (or both of them) has valid bounds, a default bounds value
323 is used.
325 Trying to find out bounds for binary operations we may fall into
326 cyclic dependencies for pointers. To avoid infinite recursion all
327 walked phi nodes instantly obtain corresponding bounds but created
328 bounds are marked as incomplete. It helps us to stop DF walk during
329 bounds search.
331 When we reach pointer source, some args of incomplete bounds phi obtain
332 valid bounds and those values are propagated further through phi nodes.
333 If no valid bounds were found for phi node then we mark its result as
334 invalid bounds. Process stops when all incomplete bounds become either
335 valid or invalid and we are able to choose a pointer base.
337 e) Pointer is loaded from the memory.
339 In this case we just need to load bounds from the bounds table.
341 Example:
343 foo ()
345 <unnamed type> __bound_tmp.3;
346 static int * buf;
347 int * _2;
349 <bb 2>:
350 _2 = buf;
351 __bound_tmp.3_4 = __builtin___chkp_bndldx (&buf, _2);
352 return _2, __bound_tmp.3_4;
357 typedef void (*assign_handler)(tree, tree, void *);
359 static tree chkp_get_zero_bounds ();
360 static tree chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter);
361 static tree chkp_find_bounds_loaded (tree ptr, tree ptr_src,
362 gimple_stmt_iterator *iter);
363 static void chkp_parse_array_and_component_ref (tree node, tree *ptr,
364 tree *elt, bool *safe,
365 bool *bitfield,
366 tree *bounds,
367 gimple_stmt_iterator *iter,
368 bool innermost_bounds);
370 #define chkp_bndldx_fndecl \
371 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDLDX))
372 #define chkp_bndstx_fndecl \
373 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDSTX))
374 #define chkp_checkl_fndecl \
375 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCL))
376 #define chkp_checku_fndecl \
377 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCU))
378 #define chkp_bndmk_fndecl \
379 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDMK))
380 #define chkp_ret_bnd_fndecl \
381 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDRET))
382 #define chkp_intersect_fndecl \
383 (targetm.builtin_chkp_function (BUILT_IN_CHKP_INTERSECT))
384 #define chkp_narrow_bounds_fndecl \
385 (targetm.builtin_chkp_function (BUILT_IN_CHKP_NARROW))
386 #define chkp_sizeof_fndecl \
387 (targetm.builtin_chkp_function (BUILT_IN_CHKP_SIZEOF))
388 #define chkp_extract_lower_fndecl \
389 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_LOWER))
390 #define chkp_extract_upper_fndecl \
391 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_UPPER))
393 static GTY (()) tree chkp_uintptr_type;
395 static GTY (()) tree chkp_zero_bounds_var;
396 static GTY (()) tree chkp_none_bounds_var;
398 static GTY (()) basic_block entry_block;
399 static GTY (()) tree zero_bounds;
400 static GTY (()) tree none_bounds;
401 static GTY (()) tree incomplete_bounds;
402 static GTY (()) tree tmp_var;
403 static GTY (()) tree size_tmp_var;
404 static GTY (()) bitmap chkp_abnormal_copies;
406 struct hash_set<tree> *chkp_invalid_bounds;
407 struct hash_set<tree> *chkp_completed_bounds_set;
408 struct hash_map<tree, tree> *chkp_reg_bounds;
409 struct hash_map<tree, tree> *chkp_bound_vars;
410 struct hash_map<tree, tree> *chkp_reg_addr_bounds;
411 struct hash_map<tree, tree> *chkp_incomplete_bounds_map;
412 struct hash_map<tree, tree> *chkp_bounds_map;
413 struct hash_map<tree, tree> *chkp_static_var_bounds;
415 static bool in_chkp_pass;
417 #define CHKP_BOUND_TMP_NAME "__bound_tmp"
418 #define CHKP_SIZE_TMP_NAME "__size_tmp"
419 #define CHKP_BOUNDS_OF_SYMBOL_PREFIX "__chkp_bounds_of_"
420 #define CHKP_STRING_BOUNDS_PREFIX "__chkp_string_bounds_"
421 #define CHKP_VAR_BOUNDS_PREFIX "__chkp_var_bounds_"
422 #define CHKP_ZERO_BOUNDS_VAR_NAME "__chkp_zero_bounds"
423 #define CHKP_NONE_BOUNDS_VAR_NAME "__chkp_none_bounds"
425 /* Static checker constructors may become very large and their
426 compilation with optimization may take too much time.
427 Therefore we put a limit to number of statements in one
428 constructor. Tests with 100 000 statically initialized
429 pointers showed following compilation times on Sandy Bridge
430 server (used -O2):
431 limit 100 => ~18 sec.
432 limit 300 => ~22 sec.
433 limit 1000 => ~30 sec.
434 limit 3000 => ~49 sec.
435 limit 5000 => ~55 sec.
436 limit 10000 => ~76 sec.
437 limit 100000 => ~532 sec. */
438 #define MAX_STMTS_IN_STATIC_CHKP_CTOR (PARAM_VALUE (PARAM_CHKP_MAX_CTOR_SIZE))
440 struct chkp_ctor_stmt_list
442 tree stmts;
443 int avail;
446 /* Return 1 if function FNDECL is instrumented by Pointer
447 Bounds Checker. */
448 bool
449 chkp_function_instrumented_p (tree fndecl)
451 return fndecl
452 && lookup_attribute ("chkp instrumented", DECL_ATTRIBUTES (fndecl));
455 /* Mark function FNDECL as instrumented. */
456 void
457 chkp_function_mark_instrumented (tree fndecl)
459 if (chkp_function_instrumented_p (fndecl))
460 return;
462 DECL_ATTRIBUTES (fndecl)
463 = tree_cons (get_identifier ("chkp instrumented"), NULL,
464 DECL_ATTRIBUTES (fndecl));
467 /* Return true when STMT is builtin call to instrumentation function
468 corresponding to CODE. */
470 bool
471 chkp_gimple_call_builtin_p (gimple call,
472 enum built_in_function code)
474 tree fndecl;
475 if (is_gimple_call (call)
476 && (fndecl = targetm.builtin_chkp_function (code))
477 && gimple_call_fndecl (call) == fndecl)
478 return true;
479 return false;
482 /* Emit code to store zero bounds for PTR located at MEM. */
483 void
484 chkp_expand_bounds_reset_for_mem (tree mem, tree ptr)
486 tree zero_bnd, bnd, addr, bndstx;
488 if (flag_chkp_use_static_const_bounds)
489 zero_bnd = chkp_get_zero_bounds_var ();
490 else
491 zero_bnd = chkp_build_make_bounds_call (integer_zero_node,
492 integer_zero_node);
493 bnd = make_tree (pointer_bounds_type_node,
494 assign_temp (pointer_bounds_type_node, 0, 1));
495 addr = build1 (ADDR_EXPR,
496 build_pointer_type (TREE_TYPE (mem)), mem);
497 bndstx = chkp_build_bndstx_call (addr, ptr, bnd);
499 expand_assignment (bnd, zero_bnd, false);
500 expand_normal (bndstx);
503 /* Mark statement S to not be instrumented. */
504 static void
505 chkp_mark_stmt (gimple s)
507 gimple_set_plf (s, GF_PLF_1, true);
510 /* Mark statement S to be instrumented. */
511 static void
512 chkp_unmark_stmt (gimple s)
514 gimple_set_plf (s, GF_PLF_1, false);
517 /* Return 1 if statement S should not be instrumented. */
518 static bool
519 chkp_marked_stmt_p (gimple s)
521 return gimple_plf (s, GF_PLF_1);
524 /* Get var to be used for bound temps. */
525 static tree
526 chkp_get_tmp_var (void)
528 if (!tmp_var)
529 tmp_var = create_tmp_reg (pointer_bounds_type_node, CHKP_BOUND_TMP_NAME);
531 return tmp_var;
534 /* Get SSA_NAME to be used as temp. */
535 static tree
536 chkp_get_tmp_reg (gimple stmt)
538 if (in_chkp_pass)
539 return make_ssa_name (chkp_get_tmp_var (), stmt);
541 return make_temp_ssa_name (pointer_bounds_type_node, stmt,
542 CHKP_BOUND_TMP_NAME);
545 /* Get var to be used for size temps. */
546 static tree
547 chkp_get_size_tmp_var (void)
549 if (!size_tmp_var)
550 size_tmp_var = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
552 return size_tmp_var;
555 /* Register bounds BND for address of OBJ. */
556 static void
557 chkp_register_addr_bounds (tree obj, tree bnd)
559 if (bnd == incomplete_bounds)
560 return;
562 chkp_reg_addr_bounds->put (obj, bnd);
564 if (dump_file && (dump_flags & TDF_DETAILS))
566 fprintf (dump_file, "Regsitered bound ");
567 print_generic_expr (dump_file, bnd, 0);
568 fprintf (dump_file, " for address of ");
569 print_generic_expr (dump_file, obj, 0);
570 fprintf (dump_file, "\n");
574 /* Return bounds registered for address of OBJ. */
575 static tree
576 chkp_get_registered_addr_bounds (tree obj)
578 tree *slot = chkp_reg_addr_bounds->get (obj);
579 return slot ? *slot : NULL_TREE;
582 /* Mark BOUNDS as completed. */
583 static void
584 chkp_mark_completed_bounds (tree bounds)
586 chkp_completed_bounds_set->add (bounds);
588 if (dump_file && (dump_flags & TDF_DETAILS))
590 fprintf (dump_file, "Marked bounds ");
591 print_generic_expr (dump_file, bounds, 0);
592 fprintf (dump_file, " as completed\n");
596 /* Return 1 if BOUNDS were marked as completed and 0 otherwise. */
597 static bool
598 chkp_completed_bounds (tree bounds)
600 return chkp_completed_bounds_set->contains (bounds);
603 /* Clear comleted bound marks. */
604 static void
605 chkp_erase_completed_bounds (void)
607 delete chkp_completed_bounds_set;
608 chkp_completed_bounds_set = new hash_set<tree>;
611 /* Mark BOUNDS associated with PTR as incomplete. */
612 static void
613 chkp_register_incomplete_bounds (tree bounds, tree ptr)
615 chkp_incomplete_bounds_map->put (bounds, ptr);
617 if (dump_file && (dump_flags & TDF_DETAILS))
619 fprintf (dump_file, "Regsitered incomplete bounds ");
620 print_generic_expr (dump_file, bounds, 0);
621 fprintf (dump_file, " for ");
622 print_generic_expr (dump_file, ptr, 0);
623 fprintf (dump_file, "\n");
627 /* Return 1 if BOUNDS are incomplete and 0 otherwise. */
628 static bool
629 chkp_incomplete_bounds (tree bounds)
631 if (bounds == incomplete_bounds)
632 return true;
634 if (chkp_completed_bounds (bounds))
635 return false;
637 return chkp_incomplete_bounds_map->get (bounds) != NULL;
640 /* Clear incomleted bound marks. */
641 static void
642 chkp_erase_incomplete_bounds (void)
644 delete chkp_incomplete_bounds_map;
645 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
648 /* Build and return bndmk call which creates bounds for structure
649 pointed by PTR. Structure should have complete type. */
650 tree
651 chkp_make_bounds_for_struct_addr (tree ptr)
653 tree type = TREE_TYPE (ptr);
654 tree size;
656 gcc_assert (POINTER_TYPE_P (type));
658 size = TYPE_SIZE (TREE_TYPE (type));
660 gcc_assert (size);
662 return build_call_nary (pointer_bounds_type_node,
663 build_fold_addr_expr (chkp_bndmk_fndecl),
664 2, ptr, size);
667 /* Traversal function for chkp_may_finish_incomplete_bounds.
668 Set RES to 0 if at least one argument of phi statement
669 defining bounds (passed in KEY arg) is unknown.
670 Traversal stops when first unknown phi argument is found. */
671 bool
672 chkp_may_complete_phi_bounds (tree const &bounds, tree *slot ATTRIBUTE_UNUSED,
673 bool *res)
675 gimple phi;
676 unsigned i;
678 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
680 phi = SSA_NAME_DEF_STMT (bounds);
682 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
684 for (i = 0; i < gimple_phi_num_args (phi); i++)
686 tree phi_arg = gimple_phi_arg_def (phi, i);
687 if (!phi_arg)
689 *res = false;
690 /* Do not need to traverse further. */
691 return false;
695 return true;
698 /* Return 1 if all phi nodes created for bounds have their
699 arguments computed. */
700 static bool
701 chkp_may_finish_incomplete_bounds (void)
703 bool res = true;
705 chkp_incomplete_bounds_map
706 ->traverse<bool *, chkp_may_complete_phi_bounds> (&res);
708 return res;
711 /* Helper function for chkp_finish_incomplete_bounds.
712 Recompute args for bounds phi node. */
713 bool
714 chkp_recompute_phi_bounds (tree const &bounds, tree *slot,
715 void *res ATTRIBUTE_UNUSED)
717 tree ptr = *slot;
718 gphi *bounds_phi;
719 gphi *ptr_phi;
720 unsigned i;
722 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
723 gcc_assert (TREE_CODE (ptr) == SSA_NAME);
725 bounds_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (bounds));
726 ptr_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (ptr));
728 for (i = 0; i < gimple_phi_num_args (bounds_phi); i++)
730 tree ptr_arg = gimple_phi_arg_def (ptr_phi, i);
731 tree bound_arg = chkp_find_bounds (ptr_arg, NULL);
733 add_phi_arg (bounds_phi, bound_arg,
734 gimple_phi_arg_edge (ptr_phi, i),
735 UNKNOWN_LOCATION);
738 return true;
741 /* Mark BOUNDS as invalid. */
742 static void
743 chkp_mark_invalid_bounds (tree bounds)
745 chkp_invalid_bounds->add (bounds);
747 if (dump_file && (dump_flags & TDF_DETAILS))
749 fprintf (dump_file, "Marked bounds ");
750 print_generic_expr (dump_file, bounds, 0);
751 fprintf (dump_file, " as invalid\n");
755 /* Return 1 if BOUNDS were marked as invalid and 0 otherwise. */
756 static bool
757 chkp_valid_bounds (tree bounds)
759 if (bounds == zero_bounds || bounds == none_bounds)
760 return false;
762 return !chkp_invalid_bounds->contains (bounds);
765 /* Helper function for chkp_finish_incomplete_bounds.
766 Check all arguments of phi nodes trying to find
767 valid completed bounds. If there is at least one
768 such arg then bounds produced by phi node are marked
769 as valid completed bounds and all phi args are
770 recomputed. */
771 bool
772 chkp_find_valid_phi_bounds (tree const &bounds, tree *slot, bool *res)
774 gimple phi;
775 unsigned i;
777 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
779 if (chkp_completed_bounds (bounds))
780 return true;
782 phi = SSA_NAME_DEF_STMT (bounds);
784 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
786 for (i = 0; i < gimple_phi_num_args (phi); i++)
788 tree phi_arg = gimple_phi_arg_def (phi, i);
790 gcc_assert (phi_arg);
792 if (chkp_valid_bounds (phi_arg) && !chkp_incomplete_bounds (phi_arg))
794 *res = true;
795 chkp_mark_completed_bounds (bounds);
796 chkp_recompute_phi_bounds (bounds, slot, NULL);
797 return true;
801 return true;
804 /* Helper function for chkp_finish_incomplete_bounds.
805 Marks all incompleted bounds as invalid. */
806 bool
807 chkp_mark_invalid_bounds_walker (tree const &bounds,
808 tree *slot ATTRIBUTE_UNUSED,
809 void *res ATTRIBUTE_UNUSED)
811 if (!chkp_completed_bounds (bounds))
813 chkp_mark_invalid_bounds (bounds);
814 chkp_mark_completed_bounds (bounds);
816 return true;
819 /* When all bound phi nodes have all their args computed
820 we have enough info to find valid bounds. We iterate
821 through all incompleted bounds searching for valid
822 bounds. Found valid bounds are marked as completed
823 and all remaining incompleted bounds are recomputed.
824 Process continues until no new valid bounds may be
825 found. All remained incompleted bounds are marked as
826 invalid (i.e. have no valid source of bounds). */
827 static void
828 chkp_finish_incomplete_bounds (void)
830 bool found_valid;
832 while (found_valid)
834 found_valid = false;
836 chkp_incomplete_bounds_map->
837 traverse<bool *, chkp_find_valid_phi_bounds> (&found_valid);
839 if (found_valid)
840 chkp_incomplete_bounds_map->
841 traverse<void *, chkp_recompute_phi_bounds> (NULL);
844 chkp_incomplete_bounds_map->
845 traverse<void *, chkp_mark_invalid_bounds_walker> (NULL);
846 chkp_incomplete_bounds_map->
847 traverse<void *, chkp_recompute_phi_bounds> (NULL);
849 chkp_erase_completed_bounds ();
850 chkp_erase_incomplete_bounds ();
853 /* Return 1 if type TYPE is a pointer type or a
854 structure having a pointer type as one of its fields.
855 Otherwise return 0. */
856 bool
857 chkp_type_has_pointer (const_tree type)
859 bool res = false;
861 if (BOUNDED_TYPE_P (type))
862 res = true;
863 else if (RECORD_OR_UNION_TYPE_P (type))
865 tree field;
867 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
868 if (TREE_CODE (field) == FIELD_DECL)
869 res = res || chkp_type_has_pointer (TREE_TYPE (field));
871 else if (TREE_CODE (type) == ARRAY_TYPE)
872 res = chkp_type_has_pointer (TREE_TYPE (type));
874 return res;
877 unsigned
878 chkp_type_bounds_count (const_tree type)
880 unsigned res = 0;
882 if (!type)
883 res = 0;
884 else if (BOUNDED_TYPE_P (type))
885 res = 1;
886 else if (RECORD_OR_UNION_TYPE_P (type))
888 bitmap have_bound;
890 bitmap_obstack_initialize (NULL);
891 have_bound = BITMAP_ALLOC (NULL);
892 chkp_find_bound_slots (type, have_bound);
893 res = bitmap_count_bits (have_bound);
894 BITMAP_FREE (have_bound);
895 bitmap_obstack_release (NULL);
898 return res;
901 /* Get bounds associated with NODE via
902 chkp_set_bounds call. */
903 tree
904 chkp_get_bounds (tree node)
906 tree *slot;
908 if (!chkp_bounds_map)
909 return NULL_TREE;
911 slot = chkp_bounds_map->get (node);
912 return slot ? *slot : NULL_TREE;
915 /* Associate bounds VAL with NODE. */
916 void
917 chkp_set_bounds (tree node, tree val)
919 if (!chkp_bounds_map)
920 chkp_bounds_map = new hash_map<tree, tree>;
922 chkp_bounds_map->put (node, val);
925 /* Check if statically initialized variable VAR require
926 static bounds initialization. If VAR is added into
927 bounds initlization list then 1 is returned. Otherwise
928 return 0. */
929 extern bool
930 chkp_register_var_initializer (tree var)
932 if (!flag_check_pointer_bounds
933 || DECL_INITIAL (var) == error_mark_node)
934 return false;
936 gcc_assert (TREE_CODE (var) == VAR_DECL);
937 gcc_assert (DECL_INITIAL (var));
939 if (TREE_STATIC (var)
940 && chkp_type_has_pointer (TREE_TYPE (var)))
942 varpool_node::get_create (var)->need_bounds_init = 1;
943 return true;
946 return false;
949 /* Helper function for chkp_finish_file.
951 Add new modification statement (RHS is assigned to LHS)
952 into list of static initializer statementes (passed in ARG).
953 If statements list becomes too big, emit checker constructor
954 and start the new one. */
955 static void
956 chkp_add_modification_to_stmt_list (tree lhs,
957 tree rhs,
958 void *arg)
960 struct chkp_ctor_stmt_list *stmts = (struct chkp_ctor_stmt_list *)arg;
961 tree modify;
963 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
964 rhs = build1 (CONVERT_EXPR, TREE_TYPE (lhs), rhs);
966 modify = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
967 append_to_statement_list (modify, &stmts->stmts);
969 stmts->avail--;
972 /* Build and return ADDR_EXPR for specified object OBJ. */
973 static tree
974 chkp_build_addr_expr (tree obj)
976 return TREE_CODE (obj) == TARGET_MEM_REF
977 ? tree_mem_ref_addr (ptr_type_node, obj)
978 : build_fold_addr_expr (obj);
981 /* Helper function for chkp_finish_file.
982 Initialize bound variable BND_VAR with bounds of variable
983 VAR to statements list STMTS. If statements list becomes
984 too big, emit checker constructor and start the new one. */
985 static void
986 chkp_output_static_bounds (tree bnd_var, tree var,
987 struct chkp_ctor_stmt_list *stmts)
989 tree lb, ub, size;
991 if (TREE_CODE (var) == STRING_CST)
993 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
994 size = build_int_cst (size_type_node, TREE_STRING_LENGTH (var) - 1);
996 else if (DECL_SIZE (var)
997 && !chkp_variable_size_type (TREE_TYPE (var)))
999 /* Compute bounds using statically known size. */
1000 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1001 size = size_binop (MINUS_EXPR, DECL_SIZE_UNIT (var), size_one_node);
1003 else
1005 /* Compute bounds using dynamic size. */
1006 tree call;
1008 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1009 call = build1 (ADDR_EXPR,
1010 build_pointer_type (TREE_TYPE (chkp_sizeof_fndecl)),
1011 chkp_sizeof_fndecl);
1012 size = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_sizeof_fndecl)),
1013 call, 1, var);
1015 if (flag_chkp_zero_dynamic_size_as_infinite)
1017 tree max_size, cond;
1019 max_size = build2 (MINUS_EXPR, size_type_node, size_zero_node, lb);
1020 cond = build2 (NE_EXPR, boolean_type_node, size, size_zero_node);
1021 size = build3 (COND_EXPR, size_type_node, cond, size, max_size);
1024 size = size_binop (MINUS_EXPR, size, size_one_node);
1027 ub = size_binop (PLUS_EXPR, lb, size);
1028 stmts->avail -= targetm.chkp_initialize_bounds (bnd_var, lb, ub,
1029 &stmts->stmts);
1030 if (stmts->avail <= 0)
1032 cgraph_build_static_cdtor ('B', stmts->stmts,
1033 MAX_RESERVED_INIT_PRIORITY + 2);
1034 stmts->avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
1035 stmts->stmts = NULL;
1039 /* Return entry block to be used for checker initilization code.
1040 Create new block if required. */
1041 static basic_block
1042 chkp_get_entry_block (void)
1044 if (!entry_block)
1045 entry_block = split_block (ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL)->dest;
1047 return entry_block;
1050 /* Return a bounds var to be used for pointer var PTR_VAR. */
1051 static tree
1052 chkp_get_bounds_var (tree ptr_var)
1054 tree bnd_var;
1055 tree *slot;
1057 slot = chkp_bound_vars->get (ptr_var);
1058 if (slot)
1059 bnd_var = *slot;
1060 else
1062 bnd_var = create_tmp_reg (pointer_bounds_type_node,
1063 CHKP_BOUND_TMP_NAME);
1064 chkp_bound_vars->put (ptr_var, bnd_var);
1067 return bnd_var;
1072 /* Register bounds BND for object PTR in global bounds table.
1073 A copy of bounds may be created for abnormal ssa names.
1074 Returns bounds to use for PTR. */
1075 static tree
1076 chkp_maybe_copy_and_register_bounds (tree ptr, tree bnd)
1078 bool abnormal_ptr;
1080 if (!chkp_reg_bounds)
1081 return bnd;
1083 /* Do nothing if bounds are incomplete_bounds
1084 because it means bounds will be recomputed. */
1085 if (bnd == incomplete_bounds)
1086 return bnd;
1088 abnormal_ptr = (TREE_CODE (ptr) == SSA_NAME
1089 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1090 && gimple_code (SSA_NAME_DEF_STMT (ptr)) != GIMPLE_PHI);
1092 /* A single bounds value may be reused multiple times for
1093 different pointer values. It may cause coalescing issues
1094 for abnormal SSA names. To avoid it we create a bounds
1095 copy in case it is computed for abnormal SSA name.
1097 We also cannot reuse such created copies for other pointers */
1098 if (abnormal_ptr
1099 || bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
1101 tree bnd_var = NULL_TREE;
1103 if (abnormal_ptr)
1105 if (SSA_NAME_VAR (ptr))
1106 bnd_var = chkp_get_bounds_var (SSA_NAME_VAR (ptr));
1108 else
1109 bnd_var = chkp_get_tmp_var ();
1111 /* For abnormal copies we may just find original
1112 bounds and use them. */
1113 if (!abnormal_ptr && !SSA_NAME_IS_DEFAULT_DEF (bnd))
1115 gimple bnd_def = SSA_NAME_DEF_STMT (bnd);
1116 gcc_checking_assert (gimple_code (bnd_def) == GIMPLE_ASSIGN);
1117 bnd = gimple_assign_rhs1 (bnd_def);
1119 /* For undefined values we usually use none bounds
1120 value but in case of abnormal edge it may cause
1121 coalescing failures. Use default definition of
1122 bounds variable instead to avoid it. */
1123 else if (SSA_NAME_IS_DEFAULT_DEF (ptr)
1124 && TREE_CODE (SSA_NAME_VAR (ptr)) != PARM_DECL)
1126 bnd = get_or_create_ssa_default_def (cfun, bnd_var);
1128 if (dump_file && (dump_flags & TDF_DETAILS))
1130 fprintf (dump_file, "Using default def bounds ");
1131 print_generic_expr (dump_file, bnd, 0);
1132 fprintf (dump_file, " for abnormal default def SSA name ");
1133 print_generic_expr (dump_file, ptr, 0);
1134 fprintf (dump_file, "\n");
1137 else
1139 tree copy;
1140 gimple def = SSA_NAME_DEF_STMT (ptr);
1141 gimple assign;
1142 gimple_stmt_iterator gsi;
1144 if (bnd_var)
1145 copy = make_ssa_name (bnd_var, gimple_build_nop ());
1146 else
1147 copy = make_temp_ssa_name (pointer_bounds_type_node,
1148 gimple_build_nop (),
1149 CHKP_BOUND_TMP_NAME);
1150 assign = gimple_build_assign (copy, bnd);
1152 if (dump_file && (dump_flags & TDF_DETAILS))
1154 fprintf (dump_file, "Creating a copy of bounds ");
1155 print_generic_expr (dump_file, bnd, 0);
1156 fprintf (dump_file, " for abnormal SSA name ");
1157 print_generic_expr (dump_file, ptr, 0);
1158 fprintf (dump_file, "\n");
1161 if (gimple_code (def) == GIMPLE_NOP)
1163 gsi = gsi_last_bb (chkp_get_entry_block ());
1164 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
1165 gsi_insert_before (&gsi, assign, GSI_CONTINUE_LINKING);
1166 else
1167 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1169 else
1171 gimple bnd_def = SSA_NAME_DEF_STMT (bnd);
1172 /* Sometimes (e.g. when we load a pointer from a
1173 memory) bounds are produced later than a pointer.
1174 We need to insert bounds copy appropriately. */
1175 if (gimple_code (bnd_def) != GIMPLE_NOP
1176 && stmt_dominates_stmt_p (def, bnd_def))
1177 gsi = gsi_for_stmt (bnd_def);
1178 else
1179 gsi = gsi_for_stmt (def);
1180 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1183 bnd = copy;
1186 if (abnormal_ptr)
1187 bitmap_set_bit (chkp_abnormal_copies, SSA_NAME_VERSION (bnd));
1190 chkp_reg_bounds->put (ptr, bnd);
1192 if (dump_file && (dump_flags & TDF_DETAILS))
1194 fprintf (dump_file, "Regsitered bound ");
1195 print_generic_expr (dump_file, bnd, 0);
1196 fprintf (dump_file, " for pointer ");
1197 print_generic_expr (dump_file, ptr, 0);
1198 fprintf (dump_file, "\n");
1201 return bnd;
1204 /* Get bounds registered for object PTR in global bounds table. */
1205 static tree
1206 chkp_get_registered_bounds (tree ptr)
1208 tree *slot;
1210 if (!chkp_reg_bounds)
1211 return NULL_TREE;
1213 slot = chkp_reg_bounds->get (ptr);
1214 return slot ? *slot : NULL_TREE;
1217 /* Add bound retvals to return statement pointed by GSI. */
1219 static void
1220 chkp_add_bounds_to_ret_stmt (gimple_stmt_iterator *gsi)
1222 greturn *ret = as_a <greturn *> (gsi_stmt (*gsi));
1223 tree retval = gimple_return_retval (ret);
1224 tree ret_decl = DECL_RESULT (cfun->decl);
1225 tree bounds;
1227 if (!retval)
1228 return;
1230 if (BOUNDED_P (ret_decl))
1232 bounds = chkp_find_bounds (retval, gsi);
1233 bounds = chkp_maybe_copy_and_register_bounds (ret_decl, bounds);
1234 gimple_return_set_retbnd (ret, bounds);
1237 update_stmt (ret);
1240 /* Force OP to be suitable for using as an argument for call.
1241 New statements (if any) go to SEQ. */
1242 static tree
1243 chkp_force_gimple_call_op (tree op, gimple_seq *seq)
1245 gimple_seq stmts;
1246 gimple_stmt_iterator si;
1248 op = force_gimple_operand (unshare_expr (op), &stmts, true, NULL_TREE);
1250 for (si = gsi_start (stmts); !gsi_end_p (si); gsi_next (&si))
1251 chkp_mark_stmt (gsi_stmt (si));
1253 gimple_seq_add_seq (seq, stmts);
1255 return op;
1258 /* Generate lower bound check for memory access by ADDR.
1259 Check is inserted before the position pointed by ITER.
1260 DIRFLAG indicates whether memory access is load or store. */
1261 static void
1262 chkp_check_lower (tree addr, tree bounds,
1263 gimple_stmt_iterator iter,
1264 location_t location,
1265 tree dirflag)
1267 gimple_seq seq;
1268 gimple check;
1269 tree node;
1271 if (!chkp_function_instrumented_p (current_function_decl)
1272 && bounds == chkp_get_zero_bounds ())
1273 return;
1275 if (dirflag == integer_zero_node
1276 && !flag_chkp_check_read)
1277 return;
1279 if (dirflag == integer_one_node
1280 && !flag_chkp_check_write)
1281 return;
1283 seq = NULL;
1285 node = chkp_force_gimple_call_op (addr, &seq);
1287 check = gimple_build_call (chkp_checkl_fndecl, 2, node, bounds);
1288 chkp_mark_stmt (check);
1289 gimple_call_set_with_bounds (check, true);
1290 gimple_set_location (check, location);
1291 gimple_seq_add_stmt (&seq, check);
1293 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1295 if (dump_file && (dump_flags & TDF_DETAILS))
1297 gimple before = gsi_stmt (iter);
1298 fprintf (dump_file, "Generated lower bound check for statement ");
1299 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1300 fprintf (dump_file, " ");
1301 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1305 /* Generate upper bound check for memory access by ADDR.
1306 Check is inserted before the position pointed by ITER.
1307 DIRFLAG indicates whether memory access is load or store. */
1308 static void
1309 chkp_check_upper (tree addr, tree bounds,
1310 gimple_stmt_iterator iter,
1311 location_t location,
1312 tree dirflag)
1314 gimple_seq seq;
1315 gimple check;
1316 tree node;
1318 if (!chkp_function_instrumented_p (current_function_decl)
1319 && bounds == chkp_get_zero_bounds ())
1320 return;
1322 if (dirflag == integer_zero_node
1323 && !flag_chkp_check_read)
1324 return;
1326 if (dirflag == integer_one_node
1327 && !flag_chkp_check_write)
1328 return;
1330 seq = NULL;
1332 node = chkp_force_gimple_call_op (addr, &seq);
1334 check = gimple_build_call (chkp_checku_fndecl, 2, node, bounds);
1335 chkp_mark_stmt (check);
1336 gimple_call_set_with_bounds (check, true);
1337 gimple_set_location (check, location);
1338 gimple_seq_add_stmt (&seq, check);
1340 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1342 if (dump_file && (dump_flags & TDF_DETAILS))
1344 gimple before = gsi_stmt (iter);
1345 fprintf (dump_file, "Generated upper bound check for statement ");
1346 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1347 fprintf (dump_file, " ");
1348 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1352 /* Generate lower and upper bound checks for memory access
1353 to memory slot [FIRST, LAST] againsr BOUNDS. Checks
1354 are inserted before the position pointed by ITER.
1355 DIRFLAG indicates whether memory access is load or store. */
1356 void
1357 chkp_check_mem_access (tree first, tree last, tree bounds,
1358 gimple_stmt_iterator iter,
1359 location_t location,
1360 tree dirflag)
1362 chkp_check_lower (first, bounds, iter, location, dirflag);
1363 chkp_check_upper (last, bounds, iter, location, dirflag);
1366 /* Replace call to _bnd_chk_* pointed by GSI with
1367 bndcu and bndcl calls. DIRFLAG determines whether
1368 check is for read or write. */
1370 void
1371 chkp_replace_address_check_builtin (gimple_stmt_iterator *gsi,
1372 tree dirflag)
1374 gimple_stmt_iterator call_iter = *gsi;
1375 gimple call = gsi_stmt (*gsi);
1376 tree fndecl = gimple_call_fndecl (call);
1377 tree addr = gimple_call_arg (call, 0);
1378 tree bounds = chkp_find_bounds (addr, gsi);
1380 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1381 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1382 chkp_check_lower (addr, bounds, *gsi, gimple_location (call), dirflag);
1384 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS)
1385 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1387 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1389 tree size = gimple_call_arg (call, 1);
1390 addr = fold_build_pointer_plus (addr, size);
1391 addr = fold_build_pointer_plus_hwi (addr, -1);
1392 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1395 gsi_remove (&call_iter, true);
1398 /* Replace call to _bnd_get_ptr_* pointed by GSI with
1399 corresponding bounds extract call. */
1401 void
1402 chkp_replace_extract_builtin (gimple_stmt_iterator *gsi)
1404 gimple call = gsi_stmt (*gsi);
1405 tree fndecl = gimple_call_fndecl (call);
1406 tree addr = gimple_call_arg (call, 0);
1407 tree bounds = chkp_find_bounds (addr, gsi);
1408 gimple extract;
1410 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND)
1411 fndecl = chkp_extract_lower_fndecl;
1412 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND)
1413 fndecl = chkp_extract_upper_fndecl;
1414 else
1415 gcc_unreachable ();
1417 extract = gimple_build_call (fndecl, 1, bounds);
1418 gimple_call_set_lhs (extract, gimple_call_lhs (call));
1419 chkp_mark_stmt (extract);
1421 gsi_replace (gsi, extract, false);
1424 /* Return COMPONENT_REF accessing FIELD in OBJ. */
1425 static tree
1426 chkp_build_component_ref (tree obj, tree field)
1428 tree res;
1430 /* If object is TMR then we do not use component_ref but
1431 add offset instead. We need it to be able to get addr
1432 of the reasult later. */
1433 if (TREE_CODE (obj) == TARGET_MEM_REF)
1435 tree offs = TMR_OFFSET (obj);
1436 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1437 offs, DECL_FIELD_OFFSET (field));
1439 gcc_assert (offs);
1441 res = copy_node (obj);
1442 TREE_TYPE (res) = TREE_TYPE (field);
1443 TMR_OFFSET (res) = offs;
1445 else
1446 res = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL_TREE);
1448 return res;
1451 /* Return ARRAY_REF for array ARR and index IDX with
1452 specified element type ETYPE and element size ESIZE. */
1453 static tree
1454 chkp_build_array_ref (tree arr, tree etype, tree esize,
1455 unsigned HOST_WIDE_INT idx)
1457 tree index = build_int_cst (size_type_node, idx);
1458 tree res;
1460 /* If object is TMR then we do not use array_ref but
1461 add offset instead. We need it to be able to get addr
1462 of the reasult later. */
1463 if (TREE_CODE (arr) == TARGET_MEM_REF)
1465 tree offs = TMR_OFFSET (arr);
1467 esize = fold_binary_to_constant (MULT_EXPR, TREE_TYPE (esize),
1468 esize, index);
1469 gcc_assert(esize);
1471 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1472 offs, esize);
1473 gcc_assert (offs);
1475 res = copy_node (arr);
1476 TREE_TYPE (res) = etype;
1477 TMR_OFFSET (res) = offs;
1479 else
1480 res = build4 (ARRAY_REF, etype, arr, index, NULL_TREE, NULL_TREE);
1482 return res;
1485 /* Helper function for chkp_add_bounds_to_call_stmt.
1486 Fill ALL_BOUNDS output array with created bounds.
1488 OFFS is used for recursive calls and holds basic
1489 offset of TYPE in outer structure in bits.
1491 ITER points a position where bounds are searched.
1493 ALL_BOUNDS[i] is filled with elem bounds if there
1494 is a field in TYPE which has pointer type and offset
1495 equal to i * POINTER_SIZE in bits. */
1496 static void
1497 chkp_find_bounds_for_elem (tree elem, tree *all_bounds,
1498 HOST_WIDE_INT offs,
1499 gimple_stmt_iterator *iter)
1501 tree type = TREE_TYPE (elem);
1503 if (BOUNDED_TYPE_P (type))
1505 if (!all_bounds[offs / POINTER_SIZE])
1507 tree temp = make_temp_ssa_name (type, gimple_build_nop (), "");
1508 gimple assign = gimple_build_assign (temp, elem);
1509 gimple_stmt_iterator gsi;
1511 gsi_insert_before (iter, assign, GSI_SAME_STMT);
1512 gsi = gsi_for_stmt (assign);
1514 all_bounds[offs / POINTER_SIZE] = chkp_find_bounds (temp, &gsi);
1517 else if (RECORD_OR_UNION_TYPE_P (type))
1519 tree field;
1521 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1522 if (TREE_CODE (field) == FIELD_DECL)
1524 tree base = unshare_expr (elem);
1525 tree field_ref = chkp_build_component_ref (base, field);
1526 HOST_WIDE_INT field_offs
1527 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1528 if (DECL_FIELD_OFFSET (field))
1529 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1531 chkp_find_bounds_for_elem (field_ref, all_bounds,
1532 offs + field_offs, iter);
1535 else if (TREE_CODE (type) == ARRAY_TYPE)
1537 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1538 tree etype = TREE_TYPE (type);
1539 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1540 unsigned HOST_WIDE_INT cur;
1542 if (!maxval || integer_minus_onep (maxval))
1543 return;
1545 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1547 tree base = unshare_expr (elem);
1548 tree arr_elem = chkp_build_array_ref (base, etype,
1549 TYPE_SIZE (etype),
1550 cur);
1551 chkp_find_bounds_for_elem (arr_elem, all_bounds, offs + cur * esize,
1552 iter);
1557 /* Fill HAVE_BOUND output bitmap with information about
1558 bounds requred for object of type TYPE.
1560 OFFS is used for recursive calls and holds basic
1561 offset of TYPE in outer structure in bits.
1563 HAVE_BOUND[i] is set to 1 if there is a field
1564 in TYPE which has pointer type and offset
1565 equal to i * POINTER_SIZE - OFFS in bits. */
1566 void
1567 chkp_find_bound_slots_1 (const_tree type, bitmap have_bound,
1568 HOST_WIDE_INT offs)
1570 if (BOUNDED_TYPE_P (type))
1571 bitmap_set_bit (have_bound, offs / POINTER_SIZE);
1572 else if (RECORD_OR_UNION_TYPE_P (type))
1574 tree field;
1576 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1577 if (TREE_CODE (field) == FIELD_DECL)
1579 HOST_WIDE_INT field_offs
1580 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1581 if (DECL_FIELD_OFFSET (field))
1582 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1583 chkp_find_bound_slots_1 (TREE_TYPE (field), have_bound,
1584 offs + field_offs);
1587 else if (TREE_CODE (type) == ARRAY_TYPE)
1589 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1590 tree etype = TREE_TYPE (type);
1591 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1592 unsigned HOST_WIDE_INT cur;
1594 if (!maxval
1595 || TREE_CODE (maxval) != INTEGER_CST
1596 || integer_minus_onep (maxval))
1597 return;
1599 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1600 chkp_find_bound_slots_1 (etype, have_bound, offs + cur * esize);
1604 /* Fill bitmap RES with information about bounds for
1605 type TYPE. See chkp_find_bound_slots_1 for more
1606 details. */
1607 void
1608 chkp_find_bound_slots (const_tree type, bitmap res)
1610 bitmap_clear (res);
1611 chkp_find_bound_slots_1 (type, res, 0);
1614 /* Return 1 if call to FNDECL should be instrumented
1615 and 0 otherwise. */
1617 static bool
1618 chkp_instrument_normal_builtin (tree fndecl)
1620 switch (DECL_FUNCTION_CODE (fndecl))
1622 case BUILT_IN_STRLEN:
1623 case BUILT_IN_STRCPY:
1624 case BUILT_IN_STRNCPY:
1625 case BUILT_IN_STPCPY:
1626 case BUILT_IN_STPNCPY:
1627 case BUILT_IN_STRCAT:
1628 case BUILT_IN_STRNCAT:
1629 case BUILT_IN_MEMCPY:
1630 case BUILT_IN_MEMPCPY:
1631 case BUILT_IN_MEMSET:
1632 case BUILT_IN_MEMMOVE:
1633 case BUILT_IN_BZERO:
1634 case BUILT_IN_STRCMP:
1635 case BUILT_IN_STRNCMP:
1636 case BUILT_IN_BCMP:
1637 case BUILT_IN_MEMCMP:
1638 case BUILT_IN_MEMCPY_CHK:
1639 case BUILT_IN_MEMPCPY_CHK:
1640 case BUILT_IN_MEMMOVE_CHK:
1641 case BUILT_IN_MEMSET_CHK:
1642 case BUILT_IN_STRCPY_CHK:
1643 case BUILT_IN_STRNCPY_CHK:
1644 case BUILT_IN_STPCPY_CHK:
1645 case BUILT_IN_STPNCPY_CHK:
1646 case BUILT_IN_STRCAT_CHK:
1647 case BUILT_IN_STRNCAT_CHK:
1648 case BUILT_IN_MALLOC:
1649 case BUILT_IN_CALLOC:
1650 case BUILT_IN_REALLOC:
1651 return 1;
1653 default:
1654 return 0;
1658 /* Add bound arguments to call statement pointed by GSI.
1659 Also performs a replacement of user checker builtins calls
1660 with internal ones. */
1662 static void
1663 chkp_add_bounds_to_call_stmt (gimple_stmt_iterator *gsi)
1665 gcall *call = as_a <gcall *> (gsi_stmt (*gsi));
1666 unsigned arg_no = 0;
1667 tree fndecl = gimple_call_fndecl (call);
1668 tree fntype;
1669 tree first_formal_arg;
1670 tree arg;
1671 bool use_fntype = false;
1672 tree op;
1673 ssa_op_iter iter;
1674 gcall *new_call;
1676 /* Do nothing for internal functions. */
1677 if (gimple_call_internal_p (call))
1678 return;
1680 fntype = TREE_TYPE (TREE_TYPE (gimple_call_fn (call)));
1682 /* Do nothing if back-end builtin is called. */
1683 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
1684 return;
1686 /* Do nothing for some middle-end builtins. */
1687 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1688 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_OBJECT_SIZE)
1689 return;
1691 /* Do nothing for calls to not instrumentable functions. */
1692 if (fndecl && !chkp_instrumentable_p (fndecl))
1693 return;
1695 /* Ignore CHKP_INIT_PTR_BOUNDS, CHKP_NULL_PTR_BOUNDS
1696 and CHKP_COPY_PTR_BOUNDS. */
1697 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1698 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS
1699 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS
1700 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS
1701 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS))
1702 return;
1704 /* Check user builtins are replaced with checks. */
1705 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1706 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1707 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
1708 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS))
1710 chkp_replace_address_check_builtin (gsi, integer_minus_one_node);
1711 return;
1714 /* Check user builtins are replaced with bound extract. */
1715 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1716 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND
1717 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND))
1719 chkp_replace_extract_builtin (gsi);
1720 return;
1723 /* BUILT_IN_CHKP_NARROW_PTR_BOUNDS call is replaced with
1724 target narrow bounds call. */
1725 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1726 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
1728 tree arg = gimple_call_arg (call, 1);
1729 tree bounds = chkp_find_bounds (arg, gsi);
1731 gimple_call_set_fndecl (call, chkp_narrow_bounds_fndecl);
1732 gimple_call_set_arg (call, 1, bounds);
1733 update_stmt (call);
1735 return;
1738 /* BUILT_IN_CHKP_STORE_PTR_BOUNDS call is replaced with
1739 bndstx call. */
1740 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1741 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_STORE_PTR_BOUNDS)
1743 tree addr = gimple_call_arg (call, 0);
1744 tree ptr = gimple_call_arg (call, 1);
1745 tree bounds = chkp_find_bounds (ptr, gsi);
1746 gimple_stmt_iterator iter = gsi_for_stmt (call);
1748 chkp_build_bndstx (addr, ptr, bounds, gsi);
1749 gsi_remove (&iter, true);
1751 return;
1754 if (!flag_chkp_instrument_calls)
1755 return;
1757 /* We instrument only some subset of builtins. We also instrument
1758 builtin calls to be inlined. */
1759 if (fndecl
1760 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1761 && !chkp_instrument_normal_builtin (fndecl))
1763 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
1764 return;
1766 struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
1767 if (!clone
1768 || !gimple_has_body_p (clone->decl))
1769 return;
1772 /* If function decl is available then use it for
1773 formal arguments list. Otherwise use function type. */
1774 if (fndecl && DECL_ARGUMENTS (fndecl))
1775 first_formal_arg = DECL_ARGUMENTS (fndecl);
1776 else
1778 first_formal_arg = TYPE_ARG_TYPES (fntype);
1779 use_fntype = true;
1782 /* Fill vector of new call args. */
1783 vec<tree> new_args = vNULL;
1784 new_args.create (gimple_call_num_args (call));
1785 arg = first_formal_arg;
1786 for (arg_no = 0; arg_no < gimple_call_num_args (call); arg_no++)
1788 tree call_arg = gimple_call_arg (call, arg_no);
1789 tree type;
1791 /* Get arg type using formal argument description
1792 or actual argument type. */
1793 if (arg)
1794 if (use_fntype)
1795 if (TREE_VALUE (arg) != void_type_node)
1797 type = TREE_VALUE (arg);
1798 arg = TREE_CHAIN (arg);
1800 else
1801 type = TREE_TYPE (call_arg);
1802 else
1804 type = TREE_TYPE (arg);
1805 arg = TREE_CHAIN (arg);
1807 else
1808 type = TREE_TYPE (call_arg);
1810 new_args.safe_push (call_arg);
1812 if (BOUNDED_TYPE_P (type)
1813 || pass_by_reference (NULL, TYPE_MODE (type), type, true))
1814 new_args.safe_push (chkp_find_bounds (call_arg, gsi));
1815 else if (chkp_type_has_pointer (type))
1817 HOST_WIDE_INT max_bounds
1818 = TREE_INT_CST_LOW (TYPE_SIZE (type)) / POINTER_SIZE;
1819 tree *all_bounds = (tree *)xmalloc (sizeof (tree) * max_bounds);
1820 HOST_WIDE_INT bnd_no;
1822 memset (all_bounds, 0, sizeof (tree) * max_bounds);
1824 chkp_find_bounds_for_elem (call_arg, all_bounds, 0, gsi);
1826 for (bnd_no = 0; bnd_no < max_bounds; bnd_no++)
1827 if (all_bounds[bnd_no])
1828 new_args.safe_push (all_bounds[bnd_no]);
1830 free (all_bounds);
1834 if (new_args.length () == gimple_call_num_args (call))
1835 new_call = call;
1836 else
1838 new_call = gimple_build_call_vec (gimple_op (call, 1), new_args);
1839 gimple_call_set_lhs (new_call, gimple_call_lhs (call));
1840 gimple_call_copy_flags (new_call, call);
1841 gimple_call_set_chain (new_call, gimple_call_chain (call));
1843 new_args.release ();
1845 /* For direct calls fndecl is replaced with instrumented version. */
1846 if (fndecl)
1848 tree new_decl = chkp_maybe_create_clone (fndecl)->decl;
1849 gimple_call_set_fndecl (new_call, new_decl);
1850 gimple_call_set_fntype (new_call, TREE_TYPE (new_decl));
1852 /* For indirect call we should fix function pointer type if
1853 pass some bounds. */
1854 else if (new_call != call)
1856 tree type = gimple_call_fntype (call);
1857 type = chkp_copy_function_type_adding_bounds (type);
1858 gimple_call_set_fntype (new_call, type);
1861 /* replace old call statement with the new one. */
1862 if (call != new_call)
1864 FOR_EACH_SSA_TREE_OPERAND (op, call, iter, SSA_OP_ALL_DEFS)
1866 SSA_NAME_DEF_STMT (op) = new_call;
1868 gsi_replace (gsi, new_call, true);
1870 else
1871 update_stmt (new_call);
1873 gimple_call_set_with_bounds (new_call, true);
1876 /* Return constant static bounds var with specified LB and UB
1877 if such var exists in varpool. Return NULL otherwise. */
1878 static tree
1879 chkp_find_const_bounds_var (HOST_WIDE_INT lb,
1880 HOST_WIDE_INT ub)
1882 tree val = targetm.chkp_make_bounds_constant (lb, ub);
1883 struct varpool_node *node;
1885 /* We expect bounds constant is represented as a complex value
1886 of two pointer sized integers. */
1887 gcc_assert (TREE_CODE (val) == COMPLEX_CST);
1889 FOR_EACH_VARIABLE (node)
1890 if (POINTER_BOUNDS_P (node->decl)
1891 && TREE_READONLY (node->decl)
1892 && DECL_INITIAL (node->decl)
1893 && TREE_CODE (DECL_INITIAL (node->decl)) == COMPLEX_CST
1894 && tree_int_cst_equal (TREE_REALPART (DECL_INITIAL (node->decl)),
1895 TREE_REALPART (val))
1896 && tree_int_cst_equal (TREE_IMAGPART (DECL_INITIAL (node->decl)),
1897 TREE_IMAGPART (val)))
1898 return node->decl;
1900 return NULL;
1903 /* Return constant static bounds var with specified bounds LB and UB.
1904 If such var does not exists then new var is created with specified NAME. */
1905 static tree
1906 chkp_make_static_const_bounds (HOST_WIDE_INT lb,
1907 HOST_WIDE_INT ub,
1908 const char *name)
1910 tree var;
1912 /* With LTO we may have constant bounds already in varpool.
1913 Try to find it. */
1914 var = chkp_find_const_bounds_var (lb, ub);
1916 if (var)
1917 return var;
1919 var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
1920 get_identifier (name), pointer_bounds_type_node);
1922 TREE_PUBLIC (var) = 1;
1923 TREE_USED (var) = 1;
1924 TREE_READONLY (var) = 1;
1925 TREE_STATIC (var) = 1;
1926 TREE_ADDRESSABLE (var) = 0;
1927 DECL_ARTIFICIAL (var) = 1;
1928 DECL_READ_P (var) = 1;
1929 /* We may use this symbol during ctors generation in chkp_finish_file
1930 when all symbols are emitted. Force output to avoid undefined
1931 symbols in ctors. */
1932 if (!in_lto_p)
1934 DECL_INITIAL (var) = targetm.chkp_make_bounds_constant (lb, ub);
1935 DECL_COMDAT (var) = 1;
1936 varpool_node::get_create (var)->set_comdat_group (DECL_ASSEMBLER_NAME (var));
1937 varpool_node::get_create (var)->force_output = 1;
1939 else
1940 DECL_EXTERNAL (var) = 1;
1941 varpool_node::finalize_decl (var);
1943 return var;
1946 /* Generate code to make bounds with specified lower bound LB and SIZE.
1947 if AFTER is 1 then code is inserted after position pointed by ITER
1948 otherwise code is inserted before position pointed by ITER.
1949 If ITER is NULL then code is added to entry block. */
1950 static tree
1951 chkp_make_bounds (tree lb, tree size, gimple_stmt_iterator *iter, bool after)
1953 gimple_seq seq;
1954 gimple_stmt_iterator gsi;
1955 gimple stmt;
1956 tree bounds;
1958 if (iter)
1959 gsi = *iter;
1960 else
1961 gsi = gsi_start_bb (chkp_get_entry_block ());
1963 seq = NULL;
1965 lb = chkp_force_gimple_call_op (lb, &seq);
1966 size = chkp_force_gimple_call_op (size, &seq);
1968 stmt = gimple_build_call (chkp_bndmk_fndecl, 2, lb, size);
1969 chkp_mark_stmt (stmt);
1971 bounds = chkp_get_tmp_reg (stmt);
1972 gimple_call_set_lhs (stmt, bounds);
1974 gimple_seq_add_stmt (&seq, stmt);
1976 if (iter && after)
1977 gsi_insert_seq_after (&gsi, seq, GSI_SAME_STMT);
1978 else
1979 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
1981 if (dump_file && (dump_flags & TDF_DETAILS))
1983 fprintf (dump_file, "Made bounds: ");
1984 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
1985 if (iter)
1987 fprintf (dump_file, " inserted before statement: ");
1988 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0, TDF_VOPS|TDF_MEMSYMS);
1990 else
1991 fprintf (dump_file, " at function entry\n");
1994 /* update_stmt (stmt); */
1996 return bounds;
1999 /* Return var holding zero bounds. */
2000 tree
2001 chkp_get_zero_bounds_var (void)
2003 if (!chkp_zero_bounds_var)
2005 tree id = get_identifier (CHKP_ZERO_BOUNDS_VAR_NAME);
2006 symtab_node *node = symtab_node::get_for_asmname (id);
2007 if (node)
2008 chkp_zero_bounds_var = node->decl;
2011 if (!chkp_zero_bounds_var)
2012 chkp_zero_bounds_var
2013 = chkp_make_static_const_bounds (0, -1,
2014 CHKP_ZERO_BOUNDS_VAR_NAME);
2015 return chkp_zero_bounds_var;
2018 /* Return var holding none bounds. */
2019 tree
2020 chkp_get_none_bounds_var (void)
2022 if (!chkp_none_bounds_var)
2024 tree id = get_identifier (CHKP_NONE_BOUNDS_VAR_NAME);
2025 symtab_node *node = symtab_node::get_for_asmname (id);
2026 if (node)
2027 chkp_none_bounds_var = node->decl;
2030 if (!chkp_none_bounds_var)
2031 chkp_none_bounds_var
2032 = chkp_make_static_const_bounds (-1, 0,
2033 CHKP_NONE_BOUNDS_VAR_NAME);
2034 return chkp_none_bounds_var;
2037 /* Return SSA_NAME used to represent zero bounds. */
2038 static tree
2039 chkp_get_zero_bounds (void)
2041 if (zero_bounds)
2042 return zero_bounds;
2044 if (dump_file && (dump_flags & TDF_DETAILS))
2045 fprintf (dump_file, "Creating zero bounds...");
2047 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2048 || flag_chkp_use_static_const_bounds > 0)
2050 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2051 gimple stmt;
2053 zero_bounds = chkp_get_tmp_reg (gimple_build_nop ());
2054 stmt = gimple_build_assign (zero_bounds, chkp_get_zero_bounds_var ());
2055 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2057 else
2058 zero_bounds = chkp_make_bounds (integer_zero_node,
2059 integer_zero_node,
2060 NULL,
2061 false);
2063 return zero_bounds;
2066 /* Return SSA_NAME used to represent none bounds. */
2067 static tree
2068 chkp_get_none_bounds (void)
2070 if (none_bounds)
2071 return none_bounds;
2073 if (dump_file && (dump_flags & TDF_DETAILS))
2074 fprintf (dump_file, "Creating none bounds...");
2077 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2078 || flag_chkp_use_static_const_bounds > 0)
2080 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2081 gimple stmt;
2083 none_bounds = chkp_get_tmp_reg (gimple_build_nop ());
2084 stmt = gimple_build_assign (none_bounds, chkp_get_none_bounds_var ());
2085 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2087 else
2088 none_bounds = chkp_make_bounds (integer_minus_one_node,
2089 build_int_cst (size_type_node, 2),
2090 NULL,
2091 false);
2093 return none_bounds;
2096 /* Return bounds to be used as a result of operation which
2097 should not create poiunter (e.g. MULT_EXPR). */
2098 static tree
2099 chkp_get_invalid_op_bounds (void)
2101 return chkp_get_zero_bounds ();
2104 /* Return bounds to be used for loads of non-pointer values. */
2105 static tree
2106 chkp_get_nonpointer_load_bounds (void)
2108 return chkp_get_zero_bounds ();
2111 /* Return 1 if may use bndret call to get bounds for pointer
2112 returned by CALL. */
2113 static bool
2114 chkp_call_returns_bounds_p (gcall *call)
2116 if (gimple_call_internal_p (call))
2117 return false;
2119 if (gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
2120 || chkp_gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW))
2121 return true;
2123 if (gimple_call_with_bounds_p (call))
2124 return true;
2126 tree fndecl = gimple_call_fndecl (call);
2128 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
2129 return false;
2131 if (fndecl && !chkp_instrumentable_p (fndecl))
2132 return false;
2134 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2136 if (chkp_instrument_normal_builtin (fndecl))
2137 return true;
2139 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
2140 return false;
2142 struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
2143 return (clone && gimple_has_body_p (clone->decl));
2146 return true;
2149 /* Build bounds returned by CALL. */
2150 static tree
2151 chkp_build_returned_bound (gcall *call)
2153 gimple_stmt_iterator gsi;
2154 tree bounds;
2155 gimple stmt;
2156 tree fndecl = gimple_call_fndecl (call);
2157 unsigned int retflags;
2159 /* To avoid fixing alloca expands in targets we handle
2160 it separately. */
2161 if (fndecl
2162 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2163 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
2164 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2166 tree size = gimple_call_arg (call, 0);
2167 tree lb = gimple_call_lhs (call);
2168 gimple_stmt_iterator iter = gsi_for_stmt (call);
2169 bounds = chkp_make_bounds (lb, size, &iter, true);
2171 /* We know bounds returned by set_bounds builtin call. */
2172 else if (fndecl
2173 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2174 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS)
2176 tree lb = gimple_call_arg (call, 0);
2177 tree size = gimple_call_arg (call, 1);
2178 gimple_stmt_iterator iter = gsi_for_stmt (call);
2179 bounds = chkp_make_bounds (lb, size, &iter, true);
2181 /* Detect bounds initialization calls. */
2182 else if (fndecl
2183 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2184 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS)
2185 bounds = chkp_get_zero_bounds ();
2186 /* Detect bounds nullification calls. */
2187 else if (fndecl
2188 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2189 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS)
2190 bounds = chkp_get_none_bounds ();
2191 /* Detect bounds copy calls. */
2192 else if (fndecl
2193 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2194 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
2196 gimple_stmt_iterator iter = gsi_for_stmt (call);
2197 bounds = chkp_find_bounds (gimple_call_arg (call, 1), &iter);
2199 /* Do not use retbnd when returned bounds are equal to some
2200 of passed bounds. */
2201 else if (((retflags = gimple_call_return_flags (call)) & ERF_RETURNS_ARG)
2202 && (retflags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (call))
2204 gimple_stmt_iterator iter = gsi_for_stmt (call);
2205 unsigned int retarg = retflags & ERF_RETURN_ARG_MASK, argno;
2206 if (gimple_call_with_bounds_p (call))
2208 for (argno = 0; argno < gimple_call_num_args (call); argno++)
2209 if (!POINTER_BOUNDS_P (gimple_call_arg (call, argno)))
2211 if (retarg)
2212 retarg--;
2213 else
2214 break;
2217 else
2218 argno = retarg;
2220 bounds = chkp_find_bounds (gimple_call_arg (call, argno), &iter);
2222 else if (chkp_call_returns_bounds_p (call))
2224 gcc_assert (TREE_CODE (gimple_call_lhs (call)) == SSA_NAME);
2226 /* In general case build checker builtin call to
2227 obtain returned bounds. */
2228 stmt = gimple_build_call (chkp_ret_bnd_fndecl, 1,
2229 gimple_call_lhs (call));
2230 chkp_mark_stmt (stmt);
2232 gsi = gsi_for_stmt (call);
2233 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
2235 bounds = chkp_get_tmp_reg (stmt);
2236 gimple_call_set_lhs (stmt, bounds);
2238 update_stmt (stmt);
2240 else
2241 bounds = chkp_get_zero_bounds ();
2243 if (dump_file && (dump_flags & TDF_DETAILS))
2245 fprintf (dump_file, "Built returned bounds (");
2246 print_generic_expr (dump_file, bounds, 0);
2247 fprintf (dump_file, ") for call: ");
2248 print_gimple_stmt (dump_file, call, 0, TDF_VOPS|TDF_MEMSYMS);
2251 bounds = chkp_maybe_copy_and_register_bounds (gimple_call_lhs (call), bounds);
2253 return bounds;
2256 /* Return bounds used as returned by call
2257 which produced SSA name VAL. */
2258 gcall *
2259 chkp_retbnd_call_by_val (tree val)
2261 if (TREE_CODE (val) != SSA_NAME)
2262 return NULL;
2264 gcc_assert (gimple_code (SSA_NAME_DEF_STMT (val)) == GIMPLE_CALL);
2266 imm_use_iterator use_iter;
2267 use_operand_p use_p;
2268 FOR_EACH_IMM_USE_FAST (use_p, use_iter, val)
2269 if (gimple_code (USE_STMT (use_p)) == GIMPLE_CALL
2270 && gimple_call_fndecl (USE_STMT (use_p)) == chkp_ret_bnd_fndecl)
2271 return as_a <gcall *> (USE_STMT (use_p));
2273 return NULL;
2276 /* Check the next parameter for the given PARM is bounds
2277 and return it's default SSA_NAME (create if required). */
2278 static tree
2279 chkp_get_next_bounds_parm (tree parm)
2281 tree bounds = TREE_CHAIN (parm);
2282 gcc_assert (POINTER_BOUNDS_P (bounds));
2283 bounds = ssa_default_def (cfun, bounds);
2284 if (!bounds)
2286 bounds = make_ssa_name (TREE_CHAIN (parm), gimple_build_nop ());
2287 set_ssa_default_def (cfun, TREE_CHAIN (parm), bounds);
2289 return bounds;
2292 /* Return bounds to be used for input argument PARM. */
2293 static tree
2294 chkp_get_bound_for_parm (tree parm)
2296 tree decl = SSA_NAME_VAR (parm);
2297 tree bounds;
2299 gcc_assert (TREE_CODE (decl) == PARM_DECL);
2301 bounds = chkp_get_registered_bounds (parm);
2303 if (!bounds)
2304 bounds = chkp_get_registered_bounds (decl);
2306 if (!bounds)
2308 tree orig_decl = cgraph_node::get (cfun->decl)->orig_decl;
2310 /* For static chain param we return zero bounds
2311 because currently we do not check dereferences
2312 of this pointer. */
2313 if (cfun->static_chain_decl == decl)
2314 bounds = chkp_get_zero_bounds ();
2315 /* If non instrumented runtime is used then it may be useful
2316 to use zero bounds for input arguments of main
2317 function. */
2318 else if (flag_chkp_zero_input_bounds_for_main
2319 && strcmp (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (orig_decl)),
2320 "main") == 0)
2321 bounds = chkp_get_zero_bounds ();
2322 else if (BOUNDED_P (parm))
2324 bounds = chkp_get_next_bounds_parm (decl);
2325 bounds = chkp_maybe_copy_and_register_bounds (decl, bounds);
2327 if (dump_file && (dump_flags & TDF_DETAILS))
2329 fprintf (dump_file, "Built arg bounds (");
2330 print_generic_expr (dump_file, bounds, 0);
2331 fprintf (dump_file, ") for arg: ");
2332 print_node (dump_file, "", decl, 0);
2335 else
2336 bounds = chkp_get_zero_bounds ();
2339 if (!chkp_get_registered_bounds (parm))
2340 bounds = chkp_maybe_copy_and_register_bounds (parm, bounds);
2342 if (dump_file && (dump_flags & TDF_DETAILS))
2344 fprintf (dump_file, "Using bounds ");
2345 print_generic_expr (dump_file, bounds, 0);
2346 fprintf (dump_file, " for parm ");
2347 print_generic_expr (dump_file, parm, 0);
2348 fprintf (dump_file, " of type ");
2349 print_generic_expr (dump_file, TREE_TYPE (parm), 0);
2350 fprintf (dump_file, ".\n");
2353 return bounds;
2356 /* Build and return CALL_EXPR for bndstx builtin with specified
2357 arguments. */
2358 tree
2359 chkp_build_bndldx_call (tree addr, tree ptr)
2361 tree fn = build1 (ADDR_EXPR,
2362 build_pointer_type (TREE_TYPE (chkp_bndldx_fndecl)),
2363 chkp_bndldx_fndecl);
2364 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndldx_fndecl)),
2365 fn, 2, addr, ptr);
2366 CALL_WITH_BOUNDS_P (call) = true;
2367 return call;
2370 /* Insert code to load bounds for PTR located by ADDR.
2371 Code is inserted after position pointed by GSI.
2372 Loaded bounds are returned. */
2373 static tree
2374 chkp_build_bndldx (tree addr, tree ptr, gimple_stmt_iterator *gsi)
2376 gimple_seq seq;
2377 gimple stmt;
2378 tree bounds;
2380 seq = NULL;
2382 addr = chkp_force_gimple_call_op (addr, &seq);
2383 ptr = chkp_force_gimple_call_op (ptr, &seq);
2385 stmt = gimple_build_call (chkp_bndldx_fndecl, 2, addr, ptr);
2386 chkp_mark_stmt (stmt);
2387 bounds = chkp_get_tmp_reg (stmt);
2388 gimple_call_set_lhs (stmt, bounds);
2390 gimple_seq_add_stmt (&seq, stmt);
2392 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2394 if (dump_file && (dump_flags & TDF_DETAILS))
2396 fprintf (dump_file, "Generated bndldx for pointer ");
2397 print_generic_expr (dump_file, ptr, 0);
2398 fprintf (dump_file, ": ");
2399 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2402 return bounds;
2405 /* Build and return CALL_EXPR for bndstx builtin with specified
2406 arguments. */
2407 tree
2408 chkp_build_bndstx_call (tree addr, tree ptr, tree bounds)
2410 tree fn = build1 (ADDR_EXPR,
2411 build_pointer_type (TREE_TYPE (chkp_bndstx_fndecl)),
2412 chkp_bndstx_fndecl);
2413 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndstx_fndecl)),
2414 fn, 3, ptr, bounds, addr);
2415 CALL_WITH_BOUNDS_P (call) = true;
2416 return call;
2419 /* Insert code to store BOUNDS for PTR stored by ADDR.
2420 New statements are inserted after position pointed
2421 by GSI. */
2422 void
2423 chkp_build_bndstx (tree addr, tree ptr, tree bounds,
2424 gimple_stmt_iterator *gsi)
2426 gimple_seq seq;
2427 gimple stmt;
2429 seq = NULL;
2431 addr = chkp_force_gimple_call_op (addr, &seq);
2432 ptr = chkp_force_gimple_call_op (ptr, &seq);
2434 stmt = gimple_build_call (chkp_bndstx_fndecl, 3, ptr, bounds, addr);
2435 chkp_mark_stmt (stmt);
2436 gimple_call_set_with_bounds (stmt, true);
2438 gimple_seq_add_stmt (&seq, stmt);
2440 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2442 if (dump_file && (dump_flags & TDF_DETAILS))
2444 fprintf (dump_file, "Generated bndstx for pointer store ");
2445 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_VOPS|TDF_MEMSYMS);
2446 print_gimple_stmt (dump_file, stmt, 2, TDF_VOPS|TDF_MEMSYMS);
2450 /* Compute bounds for pointer NODE which was assigned in
2451 assignment statement ASSIGN. Return computed bounds. */
2452 static tree
2453 chkp_compute_bounds_for_assignment (tree node, gimple assign)
2455 enum tree_code rhs_code = gimple_assign_rhs_code (assign);
2456 tree rhs1 = gimple_assign_rhs1 (assign);
2457 tree bounds = NULL_TREE;
2458 gimple_stmt_iterator iter = gsi_for_stmt (assign);
2460 if (dump_file && (dump_flags & TDF_DETAILS))
2462 fprintf (dump_file, "Computing bounds for assignment: ");
2463 print_gimple_stmt (dump_file, assign, 0, TDF_VOPS|TDF_MEMSYMS);
2466 switch (rhs_code)
2468 case MEM_REF:
2469 case TARGET_MEM_REF:
2470 case COMPONENT_REF:
2471 case ARRAY_REF:
2472 /* We need to load bounds from the bounds table. */
2473 bounds = chkp_find_bounds_loaded (node, rhs1, &iter);
2474 break;
2476 case VAR_DECL:
2477 case SSA_NAME:
2478 case ADDR_EXPR:
2479 case POINTER_PLUS_EXPR:
2480 case NOP_EXPR:
2481 case CONVERT_EXPR:
2482 case INTEGER_CST:
2483 /* Bounds are just propagated from RHS. */
2484 bounds = chkp_find_bounds (rhs1, &iter);
2485 break;
2487 case VIEW_CONVERT_EXPR:
2488 /* Bounds are just propagated from RHS. */
2489 bounds = chkp_find_bounds (TREE_OPERAND (rhs1, 0), &iter);
2490 break;
2492 case PARM_DECL:
2493 if (BOUNDED_P (rhs1))
2495 /* We need to load bounds from the bounds table. */
2496 bounds = chkp_build_bndldx (chkp_build_addr_expr (rhs1),
2497 node, &iter);
2498 TREE_ADDRESSABLE (rhs1) = 1;
2500 else
2501 bounds = chkp_get_nonpointer_load_bounds ();
2502 break;
2504 case MINUS_EXPR:
2505 case PLUS_EXPR:
2506 case BIT_AND_EXPR:
2507 case BIT_IOR_EXPR:
2508 case BIT_XOR_EXPR:
2510 tree rhs2 = gimple_assign_rhs2 (assign);
2511 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2512 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2514 /* First we try to check types of operands. If it
2515 does not help then look at bound values.
2517 If some bounds are incomplete and other are
2518 not proven to be valid (i.e. also incomplete
2519 or invalid because value is not pointer) then
2520 resulting value is incomplete and will be
2521 recomputed later in chkp_finish_incomplete_bounds. */
2522 if (BOUNDED_P (rhs1)
2523 && !BOUNDED_P (rhs2))
2524 bounds = bnd1;
2525 else if (BOUNDED_P (rhs2)
2526 && !BOUNDED_P (rhs1)
2527 && rhs_code != MINUS_EXPR)
2528 bounds = bnd2;
2529 else if (chkp_incomplete_bounds (bnd1))
2530 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR
2531 && !chkp_incomplete_bounds (bnd2))
2532 bounds = bnd2;
2533 else
2534 bounds = incomplete_bounds;
2535 else if (chkp_incomplete_bounds (bnd2))
2536 if (chkp_valid_bounds (bnd1)
2537 && !chkp_incomplete_bounds (bnd1))
2538 bounds = bnd1;
2539 else
2540 bounds = incomplete_bounds;
2541 else if (!chkp_valid_bounds (bnd1))
2542 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR)
2543 bounds = bnd2;
2544 else if (bnd2 == chkp_get_zero_bounds ())
2545 bounds = bnd2;
2546 else
2547 bounds = bnd1;
2548 else if (!chkp_valid_bounds (bnd2))
2549 bounds = bnd1;
2550 else
2551 /* Seems both operands may have valid bounds
2552 (e.g. pointer minus pointer). In such case
2553 use default invalid op bounds. */
2554 bounds = chkp_get_invalid_op_bounds ();
2556 break;
2558 case BIT_NOT_EXPR:
2559 case NEGATE_EXPR:
2560 case LSHIFT_EXPR:
2561 case RSHIFT_EXPR:
2562 case LROTATE_EXPR:
2563 case RROTATE_EXPR:
2564 case EQ_EXPR:
2565 case NE_EXPR:
2566 case LT_EXPR:
2567 case LE_EXPR:
2568 case GT_EXPR:
2569 case GE_EXPR:
2570 case MULT_EXPR:
2571 case RDIV_EXPR:
2572 case TRUNC_DIV_EXPR:
2573 case FLOOR_DIV_EXPR:
2574 case CEIL_DIV_EXPR:
2575 case ROUND_DIV_EXPR:
2576 case TRUNC_MOD_EXPR:
2577 case FLOOR_MOD_EXPR:
2578 case CEIL_MOD_EXPR:
2579 case ROUND_MOD_EXPR:
2580 case EXACT_DIV_EXPR:
2581 case FIX_TRUNC_EXPR:
2582 case FLOAT_EXPR:
2583 case REALPART_EXPR:
2584 case IMAGPART_EXPR:
2585 /* No valid bounds may be produced by these exprs. */
2586 bounds = chkp_get_invalid_op_bounds ();
2587 break;
2589 case COND_EXPR:
2591 tree val1 = gimple_assign_rhs2 (assign);
2592 tree val2 = gimple_assign_rhs3 (assign);
2593 tree bnd1 = chkp_find_bounds (val1, &iter);
2594 tree bnd2 = chkp_find_bounds (val2, &iter);
2595 gimple stmt;
2597 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2598 bounds = incomplete_bounds;
2599 else if (bnd1 == bnd2)
2600 bounds = bnd1;
2601 else
2603 rhs1 = unshare_expr (rhs1);
2605 bounds = chkp_get_tmp_reg (assign);
2606 stmt = gimple_build_assign (bounds, COND_EXPR, rhs1, bnd1, bnd2);
2607 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2609 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2610 chkp_mark_invalid_bounds (bounds);
2613 break;
2615 case MAX_EXPR:
2616 case MIN_EXPR:
2618 tree rhs2 = gimple_assign_rhs2 (assign);
2619 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2620 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2622 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2623 bounds = incomplete_bounds;
2624 else if (bnd1 == bnd2)
2625 bounds = bnd1;
2626 else
2628 gimple stmt;
2629 tree cond = build2 (rhs_code == MAX_EXPR ? GT_EXPR : LT_EXPR,
2630 boolean_type_node, rhs1, rhs2);
2631 bounds = chkp_get_tmp_reg (assign);
2632 stmt = gimple_build_assign (bounds, COND_EXPR, cond, bnd1, bnd2);
2634 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2636 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2637 chkp_mark_invalid_bounds (bounds);
2640 break;
2642 default:
2643 bounds = chkp_get_zero_bounds ();
2644 warning (0, "pointer bounds were lost due to unexpected expression %s",
2645 get_tree_code_name (rhs_code));
2648 gcc_assert (bounds);
2650 if (node)
2651 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2653 return bounds;
2656 /* Compute bounds for ssa name NODE defined by DEF_STMT pointed by ITER.
2658 There are just few statement codes allowed: NOP (for default ssa names),
2659 ASSIGN, CALL, PHI, ASM.
2661 Return computed bounds. */
2662 static tree
2663 chkp_get_bounds_by_definition (tree node, gimple def_stmt,
2664 gphi_iterator *iter)
2666 tree var, bounds;
2667 enum gimple_code code = gimple_code (def_stmt);
2668 gphi *stmt;
2670 if (dump_file && (dump_flags & TDF_DETAILS))
2672 fprintf (dump_file, "Searching for bounds for node: ");
2673 print_generic_expr (dump_file, node, 0);
2675 fprintf (dump_file, " using its definition: ");
2676 print_gimple_stmt (dump_file, def_stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2679 switch (code)
2681 case GIMPLE_NOP:
2682 var = SSA_NAME_VAR (node);
2683 switch (TREE_CODE (var))
2685 case PARM_DECL:
2686 bounds = chkp_get_bound_for_parm (node);
2687 break;
2689 case VAR_DECL:
2690 /* For uninitialized pointers use none bounds. */
2691 bounds = chkp_get_none_bounds ();
2692 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2693 break;
2695 case RESULT_DECL:
2697 tree base_type;
2699 gcc_assert (TREE_CODE (TREE_TYPE (node)) == REFERENCE_TYPE);
2701 base_type = TREE_TYPE (TREE_TYPE (node));
2703 gcc_assert (TYPE_SIZE (base_type)
2704 && TREE_CODE (TYPE_SIZE (base_type)) == INTEGER_CST
2705 && tree_to_uhwi (TYPE_SIZE (base_type)) != 0);
2707 bounds = chkp_make_bounds (node, TYPE_SIZE_UNIT (base_type),
2708 NULL, false);
2709 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2711 break;
2713 default:
2714 if (dump_file && (dump_flags & TDF_DETAILS))
2716 fprintf (dump_file, "Unexpected var with no definition\n");
2717 print_generic_expr (dump_file, var, 0);
2719 internal_error ("chkp_get_bounds_by_definition: Unexpected var of type %s",
2720 get_tree_code_name (TREE_CODE (var)));
2722 break;
2724 case GIMPLE_ASSIGN:
2725 bounds = chkp_compute_bounds_for_assignment (node, def_stmt);
2726 break;
2728 case GIMPLE_CALL:
2729 bounds = chkp_build_returned_bound (as_a <gcall *> (def_stmt));
2730 break;
2732 case GIMPLE_PHI:
2733 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (node))
2734 if (SSA_NAME_VAR (node))
2735 var = chkp_get_bounds_var (SSA_NAME_VAR (node));
2736 else
2737 var = make_temp_ssa_name (pointer_bounds_type_node,
2738 gimple_build_nop (),
2739 CHKP_BOUND_TMP_NAME);
2740 else
2741 var = chkp_get_tmp_var ();
2742 stmt = create_phi_node (var, gimple_bb (def_stmt));
2743 bounds = gimple_phi_result (stmt);
2744 *iter = gsi_for_phi (stmt);
2746 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2748 /* Created bounds do not have all phi args computed and
2749 therefore we do not know if there is a valid source
2750 of bounds for that node. Therefore we mark bounds
2751 as incomplete and then recompute them when all phi
2752 args are computed. */
2753 chkp_register_incomplete_bounds (bounds, node);
2754 break;
2756 case GIMPLE_ASM:
2757 bounds = chkp_get_zero_bounds ();
2758 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2759 break;
2761 default:
2762 internal_error ("chkp_get_bounds_by_definition: Unexpected GIMPLE code %s",
2763 gimple_code_name[code]);
2766 return bounds;
2769 /* Return CALL_EXPR for bndmk with specified LOWER_BOUND and SIZE. */
2770 tree
2771 chkp_build_make_bounds_call (tree lower_bound, tree size)
2773 tree call = build1 (ADDR_EXPR,
2774 build_pointer_type (TREE_TYPE (chkp_bndmk_fndecl)),
2775 chkp_bndmk_fndecl);
2776 return build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndmk_fndecl)),
2777 call, 2, lower_bound, size);
2780 /* Create static bounds var of specfified OBJ which is
2781 is either VAR_DECL or string constant. */
2782 static tree
2783 chkp_make_static_bounds (tree obj)
2785 static int string_id = 1;
2786 static int var_id = 1;
2787 tree *slot;
2788 const char *var_name;
2789 char *bnd_var_name;
2790 tree bnd_var;
2792 /* First check if we already have required var. */
2793 if (chkp_static_var_bounds)
2795 /* For vars we use assembler name as a key in
2796 chkp_static_var_bounds map. It allows to
2797 avoid duplicating bound vars for decls
2798 sharing assembler name. */
2799 if (TREE_CODE (obj) == VAR_DECL)
2801 tree name = DECL_ASSEMBLER_NAME (obj);
2802 slot = chkp_static_var_bounds->get (name);
2803 if (slot)
2804 return *slot;
2806 else
2808 slot = chkp_static_var_bounds->get (obj);
2809 if (slot)
2810 return *slot;
2814 /* Build decl for bounds var. */
2815 if (TREE_CODE (obj) == VAR_DECL)
2817 if (DECL_IGNORED_P (obj))
2819 bnd_var_name = (char *) xmalloc (strlen (CHKP_VAR_BOUNDS_PREFIX) + 10);
2820 sprintf (bnd_var_name, "%s%d", CHKP_VAR_BOUNDS_PREFIX, var_id++);
2822 else
2824 var_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (obj));
2826 /* For hidden symbols we want to skip first '*' char. */
2827 if (*var_name == '*')
2828 var_name++;
2830 bnd_var_name = (char *) xmalloc (strlen (var_name)
2831 + strlen (CHKP_BOUNDS_OF_SYMBOL_PREFIX) + 1);
2832 strcpy (bnd_var_name, CHKP_BOUNDS_OF_SYMBOL_PREFIX);
2833 strcat (bnd_var_name, var_name);
2836 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2837 get_identifier (bnd_var_name),
2838 pointer_bounds_type_node);
2840 /* Address of the obj will be used as lower bound. */
2841 TREE_ADDRESSABLE (obj) = 1;
2843 else
2845 bnd_var_name = (char *) xmalloc (strlen (CHKP_STRING_BOUNDS_PREFIX) + 10);
2846 sprintf (bnd_var_name, "%s%d", CHKP_STRING_BOUNDS_PREFIX, string_id++);
2848 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2849 get_identifier (bnd_var_name),
2850 pointer_bounds_type_node);
2853 TREE_PUBLIC (bnd_var) = 0;
2854 TREE_USED (bnd_var) = 1;
2855 TREE_READONLY (bnd_var) = 0;
2856 TREE_STATIC (bnd_var) = 1;
2857 TREE_ADDRESSABLE (bnd_var) = 0;
2858 DECL_ARTIFICIAL (bnd_var) = 1;
2859 DECL_COMMON (bnd_var) = 1;
2860 DECL_COMDAT (bnd_var) = 1;
2861 DECL_READ_P (bnd_var) = 1;
2862 DECL_INITIAL (bnd_var) = chkp_build_addr_expr (obj);
2863 /* Force output similar to constant bounds.
2864 See chkp_make_static_const_bounds. */
2865 varpool_node::get_create (bnd_var)->force_output = 1;
2866 /* Mark symbol as requiring bounds initialization. */
2867 varpool_node::get_create (bnd_var)->need_bounds_init = 1;
2868 varpool_node::finalize_decl (bnd_var);
2870 /* Add created var to the map to use it for other references
2871 to obj. */
2872 if (!chkp_static_var_bounds)
2873 chkp_static_var_bounds = new hash_map<tree, tree>;
2875 if (TREE_CODE (obj) == VAR_DECL)
2877 tree name = DECL_ASSEMBLER_NAME (obj);
2878 chkp_static_var_bounds->put (name, bnd_var);
2880 else
2881 chkp_static_var_bounds->put (obj, bnd_var);
2883 return bnd_var;
2886 /* When var has incomplete type we cannot get size to
2887 compute its bounds. In such cases we use checker
2888 builtin call which determines object size at runtime. */
2889 static tree
2890 chkp_generate_extern_var_bounds (tree var)
2892 tree bounds, size_reloc, lb, size, max_size, cond;
2893 gimple_stmt_iterator gsi;
2894 gimple_seq seq = NULL;
2895 gimple stmt;
2897 /* If instrumentation is not enabled for vars having
2898 incomplete type then just return zero bounds to avoid
2899 checks for this var. */
2900 if (!flag_chkp_incomplete_type)
2901 return chkp_get_zero_bounds ();
2903 if (dump_file && (dump_flags & TDF_DETAILS))
2905 fprintf (dump_file, "Generating bounds for extern symbol '");
2906 print_generic_expr (dump_file, var, 0);
2907 fprintf (dump_file, "'\n");
2910 stmt = gimple_build_call (chkp_sizeof_fndecl, 1, var);
2912 size_reloc = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
2913 gimple_call_set_lhs (stmt, size_reloc);
2915 gimple_seq_add_stmt (&seq, stmt);
2917 lb = chkp_build_addr_expr (var);
2918 size = make_ssa_name (chkp_get_size_tmp_var (), gimple_build_nop ());
2920 if (flag_chkp_zero_dynamic_size_as_infinite)
2922 /* We should check that size relocation was resolved.
2923 If it was not then use maximum possible size for the var. */
2924 max_size = build2 (MINUS_EXPR, chkp_uintptr_type, integer_zero_node,
2925 fold_convert (chkp_uintptr_type, lb));
2926 max_size = chkp_force_gimple_call_op (max_size, &seq);
2928 cond = build2 (NE_EXPR, boolean_type_node,
2929 size_reloc, integer_zero_node);
2930 stmt = gimple_build_assign (size, COND_EXPR, cond, size_reloc, max_size);
2931 gimple_seq_add_stmt (&seq, stmt);
2933 else
2935 stmt = gimple_build_assign (size, size_reloc);
2936 gimple_seq_add_stmt (&seq, stmt);
2939 gsi = gsi_start_bb (chkp_get_entry_block ());
2940 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
2942 bounds = chkp_make_bounds (lb, size, &gsi, true);
2944 return bounds;
2947 /* Return 1 if TYPE has fields with zero size or fields
2948 marked with chkp_variable_size attribute. */
2949 bool
2950 chkp_variable_size_type (tree type)
2952 bool res = false;
2953 tree field;
2955 if (RECORD_OR_UNION_TYPE_P (type))
2956 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2958 if (TREE_CODE (field) == FIELD_DECL)
2959 res = res
2960 || lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
2961 || chkp_variable_size_type (TREE_TYPE (field));
2963 else
2964 res = !TYPE_SIZE (type)
2965 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
2966 || tree_to_uhwi (TYPE_SIZE (type)) == 0;
2968 return res;
2971 /* Compute and return bounds for address of DECL which is
2972 one of VAR_DECL, PARM_DECL, RESULT_DECL. */
2973 static tree
2974 chkp_get_bounds_for_decl_addr (tree decl)
2976 tree bounds;
2978 gcc_assert (TREE_CODE (decl) == VAR_DECL
2979 || TREE_CODE (decl) == PARM_DECL
2980 || TREE_CODE (decl) == RESULT_DECL);
2982 bounds = chkp_get_registered_addr_bounds (decl);
2984 if (bounds)
2985 return bounds;
2987 if (dump_file && (dump_flags & TDF_DETAILS))
2989 fprintf (dump_file, "Building bounds for address of decl ");
2990 print_generic_expr (dump_file, decl, 0);
2991 fprintf (dump_file, "\n");
2994 /* Use zero bounds if size is unknown and checks for
2995 unknown sizes are restricted. */
2996 if ((!DECL_SIZE (decl)
2997 || (chkp_variable_size_type (TREE_TYPE (decl))
2998 && (TREE_STATIC (decl)
2999 || DECL_EXTERNAL (decl)
3000 || TREE_PUBLIC (decl))))
3001 && !flag_chkp_incomplete_type)
3002 return chkp_get_zero_bounds ();
3004 if (flag_chkp_use_static_bounds
3005 && TREE_CODE (decl) == VAR_DECL
3006 && (TREE_STATIC (decl)
3007 || DECL_EXTERNAL (decl)
3008 || TREE_PUBLIC (decl))
3009 && !DECL_THREAD_LOCAL_P (decl))
3011 tree bnd_var = chkp_make_static_bounds (decl);
3012 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
3013 gimple stmt;
3015 bounds = chkp_get_tmp_reg (gimple_build_nop ());
3016 stmt = gimple_build_assign (bounds, bnd_var);
3017 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3019 else if (!DECL_SIZE (decl)
3020 || (chkp_variable_size_type (TREE_TYPE (decl))
3021 && (TREE_STATIC (decl)
3022 || DECL_EXTERNAL (decl)
3023 || TREE_PUBLIC (decl))))
3025 gcc_assert (TREE_CODE (decl) == VAR_DECL);
3026 bounds = chkp_generate_extern_var_bounds (decl);
3028 else
3030 tree lb = chkp_build_addr_expr (decl);
3031 bounds = chkp_make_bounds (lb, DECL_SIZE_UNIT (decl), NULL, false);
3034 return bounds;
3037 /* Compute and return bounds for constant string. */
3038 static tree
3039 chkp_get_bounds_for_string_cst (tree cst)
3041 tree bounds;
3042 tree lb;
3043 tree size;
3045 gcc_assert (TREE_CODE (cst) == STRING_CST);
3047 bounds = chkp_get_registered_bounds (cst);
3049 if (bounds)
3050 return bounds;
3052 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
3053 || flag_chkp_use_static_const_bounds > 0)
3055 tree bnd_var = chkp_make_static_bounds (cst);
3056 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
3057 gimple stmt;
3059 bounds = chkp_get_tmp_reg (gimple_build_nop ());
3060 stmt = gimple_build_assign (bounds, bnd_var);
3061 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3063 else
3065 lb = chkp_build_addr_expr (cst);
3066 size = build_int_cst (chkp_uintptr_type, TREE_STRING_LENGTH (cst));
3067 bounds = chkp_make_bounds (lb, size, NULL, false);
3070 bounds = chkp_maybe_copy_and_register_bounds (cst, bounds);
3072 return bounds;
3075 /* Generate code to instersect bounds BOUNDS1 and BOUNDS2 and
3076 return the result. if ITER is not NULL then Code is inserted
3077 before position pointed by ITER. Otherwise code is added to
3078 entry block. */
3079 static tree
3080 chkp_intersect_bounds (tree bounds1, tree bounds2, gimple_stmt_iterator *iter)
3082 if (!bounds1 || bounds1 == chkp_get_zero_bounds ())
3083 return bounds2 ? bounds2 : bounds1;
3084 else if (!bounds2 || bounds2 == chkp_get_zero_bounds ())
3085 return bounds1;
3086 else
3088 gimple_seq seq;
3089 gimple stmt;
3090 tree bounds;
3092 seq = NULL;
3094 stmt = gimple_build_call (chkp_intersect_fndecl, 2, bounds1, bounds2);
3095 chkp_mark_stmt (stmt);
3097 bounds = chkp_get_tmp_reg (stmt);
3098 gimple_call_set_lhs (stmt, bounds);
3100 gimple_seq_add_stmt (&seq, stmt);
3102 /* We are probably doing narrowing for constant expression.
3103 In such case iter may be undefined. */
3104 if (!iter)
3106 gimple_stmt_iterator gsi = gsi_last_bb (chkp_get_entry_block ());
3107 iter = &gsi;
3108 gsi_insert_seq_after (iter, seq, GSI_SAME_STMT);
3110 else
3111 gsi_insert_seq_before (iter, seq, GSI_SAME_STMT);
3113 if (dump_file && (dump_flags & TDF_DETAILS))
3115 fprintf (dump_file, "Bounds intersection: ");
3116 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
3117 fprintf (dump_file, " inserted before statement: ");
3118 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0,
3119 TDF_VOPS|TDF_MEMSYMS);
3122 return bounds;
3126 /* Return 1 if we are allowed to narrow bounds for addressed FIELD
3127 and 0 othersize. */
3128 static bool
3129 chkp_may_narrow_to_field (tree field)
3131 return DECL_SIZE (field) && TREE_CODE (DECL_SIZE (field)) == INTEGER_CST
3132 && tree_to_uhwi (DECL_SIZE (field)) != 0
3133 && (!DECL_FIELD_OFFSET (field)
3134 || TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST)
3135 && (!DECL_FIELD_BIT_OFFSET (field)
3136 || TREE_CODE (DECL_FIELD_BIT_OFFSET (field)) == INTEGER_CST)
3137 && !lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
3138 && !chkp_variable_size_type (TREE_TYPE (field));
3141 /* Return 1 if bounds for FIELD should be narrowed to
3142 field's own size. */
3143 static bool
3144 chkp_narrow_bounds_for_field (tree field)
3146 HOST_WIDE_INT offs;
3147 HOST_WIDE_INT bit_offs;
3149 if (!chkp_may_narrow_to_field (field))
3150 return false;
3152 /* Accesse to compiler generated fields should not cause
3153 bounds narrowing. */
3154 if (DECL_ARTIFICIAL (field))
3155 return false;
3157 offs = tree_to_uhwi (DECL_FIELD_OFFSET (field));
3158 bit_offs = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
3160 return (flag_chkp_narrow_bounds
3161 && (flag_chkp_first_field_has_own_bounds
3162 || offs
3163 || bit_offs));
3166 /* Perform narrowing for BOUNDS using bounds computed for field
3167 access COMPONENT. ITER meaning is the same as for
3168 chkp_intersect_bounds. */
3169 static tree
3170 chkp_narrow_bounds_to_field (tree bounds, tree component,
3171 gimple_stmt_iterator *iter)
3173 tree field = TREE_OPERAND (component, 1);
3174 tree size = DECL_SIZE_UNIT (field);
3175 tree field_ptr = chkp_build_addr_expr (component);
3176 tree field_bounds;
3178 field_bounds = chkp_make_bounds (field_ptr, size, iter, false);
3180 return chkp_intersect_bounds (field_bounds, bounds, iter);
3183 /* Parse field or array access NODE.
3185 PTR ouput parameter holds a pointer to the outermost
3186 object.
3188 BITFIELD output parameter is set to 1 if bitfield is
3189 accessed and to 0 otherwise. If it is 1 then ELT holds
3190 outer component for accessed bit field.
3192 SAFE outer parameter is set to 1 if access is safe and
3193 checks are not required.
3195 BOUNDS outer parameter holds bounds to be used to check
3196 access (may be NULL).
3198 If INNERMOST_BOUNDS is 1 then try to narrow bounds to the
3199 innermost accessed component. */
3200 static void
3201 chkp_parse_array_and_component_ref (tree node, tree *ptr,
3202 tree *elt, bool *safe,
3203 bool *bitfield,
3204 tree *bounds,
3205 gimple_stmt_iterator *iter,
3206 bool innermost_bounds)
3208 tree comp_to_narrow = NULL_TREE;
3209 tree last_comp = NULL_TREE;
3210 bool array_ref_found = false;
3211 tree *nodes;
3212 tree var;
3213 int len;
3214 int i;
3216 /* Compute tree height for expression. */
3217 var = node;
3218 len = 1;
3219 while (TREE_CODE (var) == COMPONENT_REF
3220 || TREE_CODE (var) == ARRAY_REF
3221 || TREE_CODE (var) == VIEW_CONVERT_EXPR)
3223 var = TREE_OPERAND (var, 0);
3224 len++;
3227 gcc_assert (len > 1);
3229 /* It is more convenient for us to scan left-to-right,
3230 so walk tree again and put all node to nodes vector
3231 in reversed order. */
3232 nodes = XALLOCAVEC (tree, len);
3233 nodes[len - 1] = node;
3234 for (i = len - 2; i >= 0; i--)
3235 nodes[i] = TREE_OPERAND (nodes[i + 1], 0);
3237 if (bounds)
3238 *bounds = NULL;
3239 *safe = true;
3240 *bitfield = (TREE_CODE (node) == COMPONENT_REF
3241 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (node, 1)));
3242 /* To get bitfield address we will need outer elemnt. */
3243 if (*bitfield)
3244 *elt = nodes[len - 2];
3245 else
3246 *elt = NULL_TREE;
3248 /* If we have indirection in expression then compute
3249 outermost structure bounds. Computed bounds may be
3250 narrowed later. */
3251 if (TREE_CODE (nodes[0]) == MEM_REF || INDIRECT_REF_P (nodes[0]))
3253 *safe = false;
3254 *ptr = TREE_OPERAND (nodes[0], 0);
3255 if (bounds)
3256 *bounds = chkp_find_bounds (*ptr, iter);
3258 else
3260 gcc_assert (TREE_CODE (var) == VAR_DECL
3261 || TREE_CODE (var) == PARM_DECL
3262 || TREE_CODE (var) == RESULT_DECL
3263 || TREE_CODE (var) == STRING_CST
3264 || TREE_CODE (var) == SSA_NAME);
3266 *ptr = chkp_build_addr_expr (var);
3269 /* In this loop we are trying to find a field access
3270 requiring narrowing. There are two simple rules
3271 for search:
3272 1. Leftmost array_ref is chosen if any.
3273 2. Rightmost suitable component_ref is chosen if innermost
3274 bounds are required and no array_ref exists. */
3275 for (i = 1; i < len; i++)
3277 var = nodes[i];
3279 if (TREE_CODE (var) == ARRAY_REF)
3281 *safe = false;
3282 array_ref_found = true;
3283 if (flag_chkp_narrow_bounds
3284 && !flag_chkp_narrow_to_innermost_arrray
3285 && (!last_comp
3286 || chkp_may_narrow_to_field (TREE_OPERAND (last_comp, 1))))
3288 comp_to_narrow = last_comp;
3289 break;
3292 else if (TREE_CODE (var) == COMPONENT_REF)
3294 tree field = TREE_OPERAND (var, 1);
3296 if (innermost_bounds
3297 && !array_ref_found
3298 && chkp_narrow_bounds_for_field (field))
3299 comp_to_narrow = var;
3300 last_comp = var;
3302 if (flag_chkp_narrow_bounds
3303 && flag_chkp_narrow_to_innermost_arrray
3304 && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
3306 if (bounds)
3307 *bounds = chkp_narrow_bounds_to_field (*bounds, var, iter);
3308 comp_to_narrow = NULL;
3311 else if (TREE_CODE (var) == VIEW_CONVERT_EXPR)
3312 /* Nothing to do for it. */
3314 else
3315 gcc_unreachable ();
3318 if (comp_to_narrow && DECL_SIZE (TREE_OPERAND (comp_to_narrow, 1)) && bounds)
3319 *bounds = chkp_narrow_bounds_to_field (*bounds, comp_to_narrow, iter);
3321 if (innermost_bounds && bounds && !*bounds)
3322 *bounds = chkp_find_bounds (*ptr, iter);
3325 /* Compute and return bounds for address of OBJ. */
3326 static tree
3327 chkp_make_addressed_object_bounds (tree obj, gimple_stmt_iterator *iter)
3329 tree bounds = chkp_get_registered_addr_bounds (obj);
3331 if (bounds)
3332 return bounds;
3334 switch (TREE_CODE (obj))
3336 case VAR_DECL:
3337 case PARM_DECL:
3338 case RESULT_DECL:
3339 bounds = chkp_get_bounds_for_decl_addr (obj);
3340 break;
3342 case STRING_CST:
3343 bounds = chkp_get_bounds_for_string_cst (obj);
3344 break;
3346 case ARRAY_REF:
3347 case COMPONENT_REF:
3349 tree elt;
3350 tree ptr;
3351 bool safe;
3352 bool bitfield;
3354 chkp_parse_array_and_component_ref (obj, &ptr, &elt, &safe,
3355 &bitfield, &bounds, iter, true);
3357 gcc_assert (bounds);
3359 break;
3361 case FUNCTION_DECL:
3362 case LABEL_DECL:
3363 bounds = chkp_get_zero_bounds ();
3364 break;
3366 case MEM_REF:
3367 bounds = chkp_find_bounds (TREE_OPERAND (obj, 0), iter);
3368 break;
3370 case REALPART_EXPR:
3371 case IMAGPART_EXPR:
3372 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (obj, 0), iter);
3373 break;
3375 default:
3376 if (dump_file && (dump_flags & TDF_DETAILS))
3378 fprintf (dump_file, "chkp_make_addressed_object_bounds: "
3379 "unexpected object of type %s\n",
3380 get_tree_code_name (TREE_CODE (obj)));
3381 print_node (dump_file, "", obj, 0);
3383 internal_error ("chkp_make_addressed_object_bounds: "
3384 "Unexpected tree code %s",
3385 get_tree_code_name (TREE_CODE (obj)));
3388 chkp_register_addr_bounds (obj, bounds);
3390 return bounds;
3393 /* Compute bounds for pointer PTR loaded from PTR_SRC. Generate statements
3394 to compute bounds if required. Computed bounds should be available at
3395 position pointed by ITER.
3397 If PTR_SRC is NULL_TREE then pointer definition is identified.
3399 If PTR_SRC is not NULL_TREE then ITER points to statements which loads
3400 PTR. If PTR is a any memory reference then ITER points to a statement
3401 after which bndldx will be inserterd. In both cases ITER will be updated
3402 to point to the inserted bndldx statement. */
3404 static tree
3405 chkp_find_bounds_1 (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3407 tree addr = NULL_TREE;
3408 tree bounds = NULL_TREE;
3410 if (!ptr_src)
3411 ptr_src = ptr;
3413 bounds = chkp_get_registered_bounds (ptr_src);
3415 if (bounds)
3416 return bounds;
3418 switch (TREE_CODE (ptr_src))
3420 case MEM_REF:
3421 case VAR_DECL:
3422 if (BOUNDED_P (ptr_src))
3423 if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
3424 bounds = chkp_get_zero_bounds ();
3425 else
3427 addr = chkp_build_addr_expr (ptr_src);
3428 bounds = chkp_build_bndldx (addr, ptr, iter);
3430 else
3431 bounds = chkp_get_nonpointer_load_bounds ();
3432 break;
3434 case ARRAY_REF:
3435 case COMPONENT_REF:
3436 addr = get_base_address (ptr_src);
3437 if (DECL_P (addr)
3438 || TREE_CODE (addr) == MEM_REF
3439 || TREE_CODE (addr) == TARGET_MEM_REF)
3441 if (BOUNDED_P (ptr_src))
3442 if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
3443 bounds = chkp_get_zero_bounds ();
3444 else
3446 addr = chkp_build_addr_expr (ptr_src);
3447 bounds = chkp_build_bndldx (addr, ptr, iter);
3449 else
3450 bounds = chkp_get_nonpointer_load_bounds ();
3452 else
3454 gcc_assert (TREE_CODE (addr) == SSA_NAME);
3455 bounds = chkp_find_bounds (addr, iter);
3457 break;
3459 case PARM_DECL:
3460 gcc_unreachable ();
3461 bounds = chkp_get_bound_for_parm (ptr_src);
3462 break;
3464 case TARGET_MEM_REF:
3465 addr = chkp_build_addr_expr (ptr_src);
3466 bounds = chkp_build_bndldx (addr, ptr, iter);
3467 break;
3469 case SSA_NAME:
3470 bounds = chkp_get_registered_bounds (ptr_src);
3471 if (!bounds)
3473 gimple def_stmt = SSA_NAME_DEF_STMT (ptr_src);
3474 gphi_iterator phi_iter;
3476 bounds = chkp_get_bounds_by_definition (ptr_src, def_stmt, &phi_iter);
3478 gcc_assert (bounds);
3480 if (gphi *def_phi = dyn_cast <gphi *> (def_stmt))
3482 unsigned i;
3484 for (i = 0; i < gimple_phi_num_args (def_phi); i++)
3486 tree arg = gimple_phi_arg_def (def_phi, i);
3487 tree arg_bnd;
3488 gphi *phi_bnd;
3490 arg_bnd = chkp_find_bounds (arg, NULL);
3492 /* chkp_get_bounds_by_definition created new phi
3493 statement and phi_iter points to it.
3495 Previous call to chkp_find_bounds could create
3496 new basic block and therefore change phi statement
3497 phi_iter points to. */
3498 phi_bnd = phi_iter.phi ();
3500 add_phi_arg (phi_bnd, arg_bnd,
3501 gimple_phi_arg_edge (def_phi, i),
3502 UNKNOWN_LOCATION);
3505 /* If all bound phi nodes have their arg computed
3506 then we may finish its computation. See
3507 chkp_finish_incomplete_bounds for more details. */
3508 if (chkp_may_finish_incomplete_bounds ())
3509 chkp_finish_incomplete_bounds ();
3512 gcc_assert (bounds == chkp_get_registered_bounds (ptr_src)
3513 || chkp_incomplete_bounds (bounds));
3515 break;
3517 case ADDR_EXPR:
3518 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (ptr_src, 0), iter);
3519 break;
3521 case INTEGER_CST:
3522 if (integer_zerop (ptr_src))
3523 bounds = chkp_get_none_bounds ();
3524 else
3525 bounds = chkp_get_invalid_op_bounds ();
3526 break;
3528 default:
3529 if (dump_file && (dump_flags & TDF_DETAILS))
3531 fprintf (dump_file, "chkp_find_bounds: unexpected ptr of type %s\n",
3532 get_tree_code_name (TREE_CODE (ptr_src)));
3533 print_node (dump_file, "", ptr_src, 0);
3535 internal_error ("chkp_find_bounds: Unexpected tree code %s",
3536 get_tree_code_name (TREE_CODE (ptr_src)));
3539 if (!bounds)
3541 if (dump_file && (dump_flags & TDF_DETAILS))
3543 fprintf (stderr, "chkp_find_bounds: cannot find bounds for pointer\n");
3544 print_node (dump_file, "", ptr_src, 0);
3546 internal_error ("chkp_find_bounds: Cannot find bounds for pointer");
3549 return bounds;
3552 /* Normal case for bounds search without forced narrowing. */
3553 static tree
3554 chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter)
3556 return chkp_find_bounds_1 (ptr, NULL_TREE, iter);
3559 /* Search bounds for pointer PTR loaded from PTR_SRC
3560 by statement *ITER points to. */
3561 static tree
3562 chkp_find_bounds_loaded (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3564 return chkp_find_bounds_1 (ptr, ptr_src, iter);
3567 /* Helper function which checks type of RHS and finds all pointers in
3568 it. For each found pointer we build it's accesses in LHS and RHS
3569 objects and then call HANDLER for them. Function is used to copy
3570 or initilize bounds for copied object. */
3571 static void
3572 chkp_walk_pointer_assignments (tree lhs, tree rhs, void *arg,
3573 assign_handler handler)
3575 tree type = TREE_TYPE (lhs);
3577 /* We have nothing to do with clobbers. */
3578 if (TREE_CLOBBER_P (rhs))
3579 return;
3581 if (BOUNDED_TYPE_P (type))
3582 handler (lhs, rhs, arg);
3583 else if (RECORD_OR_UNION_TYPE_P (type))
3585 tree field;
3587 if (TREE_CODE (rhs) == CONSTRUCTOR)
3589 unsigned HOST_WIDE_INT cnt;
3590 tree val;
3592 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, field, val)
3594 if (chkp_type_has_pointer (TREE_TYPE (field)))
3596 tree lhs_field = chkp_build_component_ref (lhs, field);
3597 chkp_walk_pointer_assignments (lhs_field, val, arg, handler);
3601 else
3602 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3603 if (TREE_CODE (field) == FIELD_DECL
3604 && chkp_type_has_pointer (TREE_TYPE (field)))
3606 tree rhs_field = chkp_build_component_ref (rhs, field);
3607 tree lhs_field = chkp_build_component_ref (lhs, field);
3608 chkp_walk_pointer_assignments (lhs_field, rhs_field, arg, handler);
3611 else if (TREE_CODE (type) == ARRAY_TYPE)
3613 unsigned HOST_WIDE_INT cur = 0;
3614 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3615 tree etype = TREE_TYPE (type);
3616 tree esize = TYPE_SIZE (etype);
3618 if (TREE_CODE (rhs) == CONSTRUCTOR)
3620 unsigned HOST_WIDE_INT cnt;
3621 tree purp, val, lhs_elem;
3623 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, purp, val)
3625 if (purp && TREE_CODE (purp) == RANGE_EXPR)
3627 tree lo_index = TREE_OPERAND (purp, 0);
3628 tree hi_index = TREE_OPERAND (purp, 1);
3630 for (cur = (unsigned)tree_to_uhwi (lo_index);
3631 cur <= (unsigned)tree_to_uhwi (hi_index);
3632 cur++)
3634 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3635 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3638 else
3640 if (purp)
3642 gcc_assert (TREE_CODE (purp) == INTEGER_CST);
3643 cur = tree_to_uhwi (purp);
3646 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur++);
3648 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3652 /* Copy array only when size is known. */
3653 else if (maxval && !integer_minus_onep (maxval))
3654 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
3656 tree lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3657 tree rhs_elem = chkp_build_array_ref (rhs, etype, esize, cur);
3658 chkp_walk_pointer_assignments (lhs_elem, rhs_elem, arg, handler);
3661 else
3662 internal_error("chkp_walk_pointer_assignments: unexpected RHS type: %s",
3663 get_tree_code_name (TREE_CODE (type)));
3666 /* Add code to copy bounds for assignment of RHS to LHS.
3667 ARG is an iterator pointing ne code position. */
3668 static void
3669 chkp_copy_bounds_for_elem (tree lhs, tree rhs, void *arg)
3671 gimple_stmt_iterator *iter = (gimple_stmt_iterator *)arg;
3672 tree bounds = chkp_find_bounds (rhs, iter);
3673 tree addr = chkp_build_addr_expr(lhs);
3675 chkp_build_bndstx (addr, rhs, bounds, iter);
3678 /* Emit static bound initilizers and size vars. */
3679 void
3680 chkp_finish_file (void)
3682 struct varpool_node *node;
3683 struct chkp_ctor_stmt_list stmts;
3685 if (seen_error ())
3686 return;
3688 /* Iterate through varpool and generate bounds initialization
3689 constructors for all statically initialized pointers. */
3690 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3691 stmts.stmts = NULL;
3692 FOR_EACH_VARIABLE (node)
3693 /* Check that var is actually emitted and we need and may initialize
3694 its bounds. */
3695 if (node->need_bounds_init
3696 && !POINTER_BOUNDS_P (node->decl)
3697 && DECL_RTL (node->decl)
3698 && MEM_P (DECL_RTL (node->decl))
3699 && TREE_ASM_WRITTEN (node->decl))
3701 chkp_walk_pointer_assignments (node->decl,
3702 DECL_INITIAL (node->decl),
3703 &stmts,
3704 chkp_add_modification_to_stmt_list);
3706 if (stmts.avail <= 0)
3708 cgraph_build_static_cdtor ('P', stmts.stmts,
3709 MAX_RESERVED_INIT_PRIORITY + 3);
3710 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3711 stmts.stmts = NULL;
3715 if (stmts.stmts)
3716 cgraph_build_static_cdtor ('P', stmts.stmts,
3717 MAX_RESERVED_INIT_PRIORITY + 3);
3719 /* Iterate through varpool and generate bounds initialization
3720 constructors for all static bounds vars. */
3721 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3722 stmts.stmts = NULL;
3723 FOR_EACH_VARIABLE (node)
3724 if (node->need_bounds_init
3725 && POINTER_BOUNDS_P (node->decl)
3726 && TREE_ASM_WRITTEN (node->decl))
3728 tree bnd = node->decl;
3729 tree var;
3731 gcc_assert (DECL_INITIAL (bnd)
3732 && TREE_CODE (DECL_INITIAL (bnd)) == ADDR_EXPR);
3734 var = TREE_OPERAND (DECL_INITIAL (bnd), 0);
3735 chkp_output_static_bounds (bnd, var, &stmts);
3738 if (stmts.stmts)
3739 cgraph_build_static_cdtor ('B', stmts.stmts,
3740 MAX_RESERVED_INIT_PRIORITY + 2);
3742 delete chkp_static_var_bounds;
3743 delete chkp_bounds_map;
3746 /* An instrumentation function which is called for each statement
3747 having memory access we want to instrument. It inserts check
3748 code and bounds copy code.
3750 ITER points to statement to instrument.
3752 NODE holds memory access in statement to check.
3754 LOC holds the location information for statement.
3756 DIRFLAGS determines whether access is read or write.
3758 ACCESS_OFFS should be added to address used in NODE
3759 before check.
3761 ACCESS_SIZE holds size of checked access.
3763 SAFE indicates if NODE access is safe and should not be
3764 checked. */
3765 static void
3766 chkp_process_stmt (gimple_stmt_iterator *iter, tree node,
3767 location_t loc, tree dirflag,
3768 tree access_offs, tree access_size,
3769 bool safe)
3771 tree node_type = TREE_TYPE (node);
3772 tree size = access_size ? access_size : TYPE_SIZE_UNIT (node_type);
3773 tree addr_first = NULL_TREE; /* address of the first accessed byte */
3774 tree addr_last = NULL_TREE; /* address of the last accessed byte */
3775 tree ptr = NULL_TREE; /* a pointer used for dereference */
3776 tree bounds = NULL_TREE;
3778 /* We do not need instrumentation for clobbers. */
3779 if (dirflag == integer_one_node
3780 && gimple_code (gsi_stmt (*iter)) == GIMPLE_ASSIGN
3781 && TREE_CLOBBER_P (gimple_assign_rhs1 (gsi_stmt (*iter))))
3782 return;
3784 switch (TREE_CODE (node))
3786 case ARRAY_REF:
3787 case COMPONENT_REF:
3789 bool bitfield;
3790 tree elt;
3792 if (safe)
3794 /* We are not going to generate any checks, so do not
3795 generate bounds as well. */
3796 addr_first = chkp_build_addr_expr (node);
3797 break;
3800 chkp_parse_array_and_component_ref (node, &ptr, &elt, &safe,
3801 &bitfield, &bounds, iter, false);
3803 /* Break if there is no dereference and operation is safe. */
3805 if (bitfield)
3807 tree field = TREE_OPERAND (node, 1);
3809 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
3810 size = DECL_SIZE_UNIT (field);
3812 if (elt)
3813 elt = chkp_build_addr_expr (elt);
3814 addr_first = fold_convert_loc (loc, ptr_type_node, elt ? elt : ptr);
3815 addr_first = fold_build_pointer_plus_loc (loc,
3816 addr_first,
3817 byte_position (field));
3819 else
3820 addr_first = chkp_build_addr_expr (node);
3822 break;
3824 case INDIRECT_REF:
3825 ptr = TREE_OPERAND (node, 0);
3826 addr_first = ptr;
3827 break;
3829 case MEM_REF:
3830 ptr = TREE_OPERAND (node, 0);
3831 addr_first = chkp_build_addr_expr (node);
3832 break;
3834 case TARGET_MEM_REF:
3835 ptr = TMR_BASE (node);
3836 addr_first = chkp_build_addr_expr (node);
3837 break;
3839 case ARRAY_RANGE_REF:
3840 printf("ARRAY_RANGE_REF\n");
3841 debug_gimple_stmt(gsi_stmt(*iter));
3842 debug_tree(node);
3843 gcc_unreachable ();
3844 break;
3846 case BIT_FIELD_REF:
3848 tree offs, rem, bpu;
3850 gcc_assert (!access_offs);
3851 gcc_assert (!access_size);
3853 bpu = fold_convert (size_type_node, bitsize_int (BITS_PER_UNIT));
3854 offs = fold_convert (size_type_node, TREE_OPERAND (node, 2));
3855 rem = size_binop_loc (loc, TRUNC_MOD_EXPR, offs, bpu);
3856 offs = size_binop_loc (loc, TRUNC_DIV_EXPR, offs, bpu);
3858 size = fold_convert (size_type_node, TREE_OPERAND (node, 1));
3859 size = size_binop_loc (loc, PLUS_EXPR, size, rem);
3860 size = size_binop_loc (loc, CEIL_DIV_EXPR, size, bpu);
3861 size = fold_convert (size_type_node, size);
3863 chkp_process_stmt (iter, TREE_OPERAND (node, 0), loc,
3864 dirflag, offs, size, safe);
3865 return;
3867 break;
3869 case VAR_DECL:
3870 case RESULT_DECL:
3871 case PARM_DECL:
3872 if (dirflag != integer_one_node
3873 || DECL_REGISTER (node))
3874 return;
3876 safe = true;
3877 addr_first = chkp_build_addr_expr (node);
3878 break;
3880 default:
3881 return;
3884 /* If addr_last was not computed then use (addr_first + size - 1)
3885 expression to compute it. */
3886 if (!addr_last)
3888 addr_last = fold_build_pointer_plus_loc (loc, addr_first, size);
3889 addr_last = fold_build_pointer_plus_hwi_loc (loc, addr_last, -1);
3892 /* Shift both first_addr and last_addr by access_offs if specified. */
3893 if (access_offs)
3895 addr_first = fold_build_pointer_plus_loc (loc, addr_first, access_offs);
3896 addr_last = fold_build_pointer_plus_loc (loc, addr_last, access_offs);
3899 /* Generate bndcl/bndcu checks if memory access is not safe. */
3900 if (!safe)
3902 gimple_stmt_iterator stmt_iter = *iter;
3904 if (!bounds)
3905 bounds = chkp_find_bounds (ptr, iter);
3907 chkp_check_mem_access (addr_first, addr_last, bounds,
3908 stmt_iter, loc, dirflag);
3911 /* We need to store bounds in case pointer is stored. */
3912 if (dirflag == integer_one_node
3913 && chkp_type_has_pointer (node_type)
3914 && flag_chkp_store_bounds)
3916 gimple stmt = gsi_stmt (*iter);
3917 tree rhs1 = gimple_assign_rhs1 (stmt);
3918 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3920 if (get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS)
3921 chkp_walk_pointer_assignments (node, rhs1, iter,
3922 chkp_copy_bounds_for_elem);
3923 else
3925 bounds = chkp_compute_bounds_for_assignment (NULL_TREE, stmt);
3926 chkp_build_bndstx (addr_first, rhs1, bounds, iter);
3931 /* Add code to copy bounds for all pointers copied
3932 in ASSIGN created during inline of EDGE. */
3933 void
3934 chkp_copy_bounds_for_assign (gimple assign, struct cgraph_edge *edge)
3936 tree lhs = gimple_assign_lhs (assign);
3937 tree rhs = gimple_assign_rhs1 (assign);
3938 gimple_stmt_iterator iter = gsi_for_stmt (assign);
3940 if (!flag_chkp_store_bounds)
3941 return;
3943 chkp_walk_pointer_assignments (lhs, rhs, &iter, chkp_copy_bounds_for_elem);
3945 /* We should create edges for all created calls to bndldx and bndstx. */
3946 while (gsi_stmt (iter) != assign)
3948 gimple stmt = gsi_stmt (iter);
3949 if (gimple_code (stmt) == GIMPLE_CALL)
3951 tree fndecl = gimple_call_fndecl (stmt);
3952 struct cgraph_node *callee = cgraph_node::get_create (fndecl);
3953 struct cgraph_edge *new_edge;
3955 gcc_assert (fndecl == chkp_bndstx_fndecl
3956 || fndecl == chkp_bndldx_fndecl
3957 || fndecl == chkp_ret_bnd_fndecl);
3959 new_edge = edge->caller->create_edge (callee,
3960 as_a <gcall *> (stmt),
3961 edge->count,
3962 edge->frequency);
3963 new_edge->frequency = compute_call_stmt_bb_frequency
3964 (edge->caller->decl, gimple_bb (stmt));
3966 gsi_prev (&iter);
3970 /* Some code transformation made during instrumentation pass
3971 may put code into inconsistent state. Here we find and fix
3972 such flaws. */
3973 void
3974 chkp_fix_cfg ()
3976 basic_block bb;
3977 gimple_stmt_iterator i;
3979 /* We could insert some code right after stmt which ends bb.
3980 We wanted to put this code on fallthru edge but did not
3981 add new edges from the beginning because it may cause new
3982 phi node creation which may be incorrect due to incomplete
3983 bound phi nodes. */
3984 FOR_ALL_BB_FN (bb, cfun)
3985 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
3987 gimple stmt = gsi_stmt (i);
3988 gimple_stmt_iterator next = i;
3990 gsi_next (&next);
3992 if (stmt_ends_bb_p (stmt)
3993 && !gsi_end_p (next))
3995 edge fall = find_fallthru_edge (bb->succs);
3996 basic_block dest = NULL;
3997 int flags = 0;
3999 gcc_assert (fall);
4001 /* We cannot split abnormal edge. Therefore we
4002 store its params, make it regular and then
4003 rebuild abnormal edge after split. */
4004 if (fall->flags & EDGE_ABNORMAL)
4006 flags = fall->flags & ~EDGE_FALLTHRU;
4007 dest = fall->dest;
4009 fall->flags &= ~EDGE_COMPLEX;
4012 while (!gsi_end_p (next))
4014 gimple next_stmt = gsi_stmt (next);
4015 gsi_remove (&next, false);
4016 gsi_insert_on_edge (fall, next_stmt);
4019 gsi_commit_edge_inserts ();
4021 /* Re-create abnormal edge. */
4022 if (dest)
4023 make_edge (bb, dest, flags);
4028 /* Walker callback for chkp_replace_function_pointers. Replaces
4029 function pointer in the specified operand with pointer to the
4030 instrumented function version. */
4031 static tree
4032 chkp_replace_function_pointer (tree *op, int *walk_subtrees,
4033 void *data ATTRIBUTE_UNUSED)
4035 if (TREE_CODE (*op) == FUNCTION_DECL
4036 && !lookup_attribute ("bnd_legacy", DECL_ATTRIBUTES (*op))
4037 && (DECL_BUILT_IN_CLASS (*op) == NOT_BUILT_IN
4038 /* For builtins we replace pointers only for selected
4039 function and functions having definitions. */
4040 || (DECL_BUILT_IN_CLASS (*op) == BUILT_IN_NORMAL
4041 && (chkp_instrument_normal_builtin (*op)
4042 || gimple_has_body_p (*op)))))
4044 struct cgraph_node *node = cgraph_node::get_create (*op);
4045 struct cgraph_node *clone = NULL;
4047 if (!node->instrumentation_clone)
4048 clone = chkp_maybe_create_clone (*op);
4050 if (clone)
4051 *op = clone->decl;
4052 *walk_subtrees = 0;
4055 return NULL;
4058 /* This function searches for function pointers in statement
4059 pointed by GSI and replaces them with pointers to instrumented
4060 function versions. */
4061 static void
4062 chkp_replace_function_pointers (gimple_stmt_iterator *gsi)
4064 gimple stmt = gsi_stmt (*gsi);
4065 /* For calls we want to walk call args only. */
4066 if (gimple_code (stmt) == GIMPLE_CALL)
4068 unsigned i;
4069 for (i = 0; i < gimple_call_num_args (stmt); i++)
4070 walk_tree (gimple_call_arg_ptr (stmt, i),
4071 chkp_replace_function_pointer, NULL, NULL);
4073 else
4074 walk_gimple_stmt (gsi, NULL, chkp_replace_function_pointer, NULL);
4077 /* This function instruments all statements working with memory,
4078 calls and rets.
4080 It also removes excess statements from static initializers. */
4081 static void
4082 chkp_instrument_function (void)
4084 basic_block bb, next;
4085 gimple_stmt_iterator i;
4086 enum gimple_rhs_class grhs_class;
4087 bool safe = lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl));
4089 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
4092 next = bb->next_bb;
4093 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
4095 gimple s = gsi_stmt (i);
4097 /* Skip statement marked to not be instrumented. */
4098 if (chkp_marked_stmt_p (s))
4100 gsi_next (&i);
4101 continue;
4104 chkp_replace_function_pointers (&i);
4106 switch (gimple_code (s))
4108 case GIMPLE_ASSIGN:
4109 chkp_process_stmt (&i, gimple_assign_lhs (s),
4110 gimple_location (s), integer_one_node,
4111 NULL_TREE, NULL_TREE, safe);
4112 chkp_process_stmt (&i, gimple_assign_rhs1 (s),
4113 gimple_location (s), integer_zero_node,
4114 NULL_TREE, NULL_TREE, safe);
4115 grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s));
4116 if (grhs_class == GIMPLE_BINARY_RHS)
4117 chkp_process_stmt (&i, gimple_assign_rhs2 (s),
4118 gimple_location (s), integer_zero_node,
4119 NULL_TREE, NULL_TREE, safe);
4120 break;
4122 case GIMPLE_RETURN:
4124 greturn *r = as_a <greturn *> (s);
4125 if (gimple_return_retval (r) != NULL_TREE)
4127 chkp_process_stmt (&i, gimple_return_retval (r),
4128 gimple_location (r),
4129 integer_zero_node,
4130 NULL_TREE, NULL_TREE, safe);
4132 /* Additionally we need to add bounds
4133 to return statement. */
4134 chkp_add_bounds_to_ret_stmt (&i);
4137 break;
4139 case GIMPLE_CALL:
4140 chkp_add_bounds_to_call_stmt (&i);
4141 break;
4143 default:
4147 gsi_next (&i);
4149 /* We do not need any actual pointer stores in checker
4150 static initializer. */
4151 if (lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl))
4152 && gimple_code (s) == GIMPLE_ASSIGN
4153 && gimple_store_p (s))
4155 gimple_stmt_iterator del_iter = gsi_for_stmt (s);
4156 gsi_remove (&del_iter, true);
4157 unlink_stmt_vdef (s);
4158 release_defs(s);
4161 bb = next;
4163 while (bb);
4165 /* Some input params may have bounds and be address taken. In this case
4166 we should store incoming bounds into bounds table. */
4167 tree arg;
4168 if (flag_chkp_store_bounds)
4169 for (arg = DECL_ARGUMENTS (cfun->decl); arg; arg = DECL_CHAIN (arg))
4170 if (TREE_ADDRESSABLE (arg))
4172 if (BOUNDED_P (arg))
4174 tree bounds = chkp_get_next_bounds_parm (arg);
4175 tree def_ptr = ssa_default_def (cfun, arg);
4176 gimple_stmt_iterator iter
4177 = gsi_start_bb (chkp_get_entry_block ());
4178 chkp_build_bndstx (chkp_build_addr_expr (arg),
4179 def_ptr ? def_ptr : arg,
4180 bounds, &iter);
4182 /* Skip bounds arg. */
4183 arg = TREE_CHAIN (arg);
4185 else if (chkp_type_has_pointer (TREE_TYPE (arg)))
4187 tree orig_arg = arg;
4188 bitmap slots = BITMAP_ALLOC (NULL);
4189 gimple_stmt_iterator iter
4190 = gsi_start_bb (chkp_get_entry_block ());
4191 bitmap_iterator bi;
4192 unsigned bnd_no;
4194 chkp_find_bound_slots (TREE_TYPE (arg), slots);
4196 EXECUTE_IF_SET_IN_BITMAP (slots, 0, bnd_no, bi)
4198 tree bounds = chkp_get_next_bounds_parm (arg);
4199 HOST_WIDE_INT offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
4200 tree addr = chkp_build_addr_expr (orig_arg);
4201 tree ptr = build2 (MEM_REF, ptr_type_node, addr,
4202 build_int_cst (ptr_type_node, offs));
4203 chkp_build_bndstx (chkp_build_addr_expr (ptr), ptr,
4204 bounds, &iter);
4206 arg = DECL_CHAIN (arg);
4208 BITMAP_FREE (slots);
4213 /* Find init/null/copy_ptr_bounds calls and replace them
4214 with assignments. It should allow better code
4215 optimization. */
4217 static void
4218 chkp_remove_useless_builtins ()
4220 basic_block bb;
4221 gimple_stmt_iterator gsi;
4223 FOR_EACH_BB_FN (bb, cfun)
4225 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4227 gimple stmt = gsi_stmt (gsi);
4228 tree fndecl;
4229 enum built_in_function fcode;
4231 /* Find builtins returning first arg and replace
4232 them with assignments. */
4233 if (gimple_code (stmt) == GIMPLE_CALL
4234 && (fndecl = gimple_call_fndecl (stmt))
4235 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4236 && (fcode = DECL_FUNCTION_CODE (fndecl))
4237 && (fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
4238 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
4239 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS
4240 || fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS))
4242 tree res = gimple_call_arg (stmt, 0);
4243 update_call_from_tree (&gsi, res);
4244 stmt = gsi_stmt (gsi);
4245 update_stmt (stmt);
4251 /* Initialize pass. */
4252 static void
4253 chkp_init (void)
4255 basic_block bb;
4256 gimple_stmt_iterator i;
4258 in_chkp_pass = true;
4260 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = bb->next_bb)
4261 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
4262 chkp_unmark_stmt (gsi_stmt (i));
4264 chkp_invalid_bounds = new hash_set<tree>;
4265 chkp_completed_bounds_set = new hash_set<tree>;
4266 delete chkp_reg_bounds;
4267 chkp_reg_bounds = new hash_map<tree, tree>;
4268 delete chkp_bound_vars;
4269 chkp_bound_vars = new hash_map<tree, tree>;
4270 chkp_reg_addr_bounds = new hash_map<tree, tree>;
4271 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
4272 delete chkp_bounds_map;
4273 chkp_bounds_map = new hash_map<tree, tree>;
4274 chkp_abnormal_copies = BITMAP_GGC_ALLOC ();
4276 entry_block = NULL;
4277 zero_bounds = NULL_TREE;
4278 none_bounds = NULL_TREE;
4279 incomplete_bounds = integer_zero_node;
4280 tmp_var = NULL_TREE;
4281 size_tmp_var = NULL_TREE;
4283 chkp_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode, true);
4285 /* We create these constant bounds once for each object file.
4286 These symbols go to comdat section and result in single copy
4287 of each one in the final binary. */
4288 chkp_get_zero_bounds_var ();
4289 chkp_get_none_bounds_var ();
4291 calculate_dominance_info (CDI_DOMINATORS);
4292 calculate_dominance_info (CDI_POST_DOMINATORS);
4294 bitmap_obstack_initialize (NULL);
4297 /* Finalize instrumentation pass. */
4298 static void
4299 chkp_fini (void)
4301 in_chkp_pass = false;
4303 delete chkp_invalid_bounds;
4304 delete chkp_completed_bounds_set;
4305 delete chkp_reg_addr_bounds;
4306 delete chkp_incomplete_bounds_map;
4308 free_dominance_info (CDI_DOMINATORS);
4309 free_dominance_info (CDI_POST_DOMINATORS);
4311 bitmap_obstack_release (NULL);
4313 entry_block = NULL;
4314 zero_bounds = NULL_TREE;
4315 none_bounds = NULL_TREE;
4318 /* Main instrumentation pass function. */
4319 static unsigned int
4320 chkp_execute (void)
4322 chkp_init ();
4324 chkp_instrument_function ();
4326 chkp_remove_useless_builtins ();
4328 chkp_function_mark_instrumented (cfun->decl);
4330 chkp_fix_cfg ();
4332 chkp_fini ();
4334 return 0;
4337 /* Instrumentation pass gate. */
4338 static bool
4339 chkp_gate (void)
4341 return cgraph_node::get (cfun->decl)->instrumentation_clone
4342 || lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl));
4345 namespace {
4347 const pass_data pass_data_chkp =
4349 GIMPLE_PASS, /* type */
4350 "chkp", /* name */
4351 OPTGROUP_NONE, /* optinfo_flags */
4352 TV_NONE, /* tv_id */
4353 PROP_ssa | PROP_cfg, /* properties_required */
4354 0, /* properties_provided */
4355 0, /* properties_destroyed */
4356 0, /* todo_flags_start */
4357 TODO_verify_il
4358 | TODO_update_ssa /* todo_flags_finish */
4361 class pass_chkp : public gimple_opt_pass
4363 public:
4364 pass_chkp (gcc::context *ctxt)
4365 : gimple_opt_pass (pass_data_chkp, ctxt)
4368 /* opt_pass methods: */
4369 virtual opt_pass * clone ()
4371 return new pass_chkp (m_ctxt);
4374 virtual bool gate (function *)
4376 return chkp_gate ();
4379 virtual unsigned int execute (function *)
4381 return chkp_execute ();
4384 }; // class pass_chkp
4386 } // anon namespace
4388 gimple_opt_pass *
4389 make_pass_chkp (gcc::context *ctxt)
4391 return new pass_chkp (ctxt);
4394 #include "gt-tree-chkp.h"