2015-03-02 Hristian Kirtchev <kirtchev@adacore.com>
[official-gcc.git] / gcc / tree-chkp.c
blobd2df4bad0fb934999c4cfcc2bed1b5f271c620f2
1 /* Pointer Bounds Checker insrumentation pass.
2 Copyright (C) 2014-2015 Free Software Foundation, Inc.
3 Contributed by Ilya Enkovich (ilya.enkovich@intel.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hash-set.h"
25 #include "machmode.h"
26 #include "vec.h"
27 #include "double-int.h"
28 #include "input.h"
29 #include "alias.h"
30 #include "symtab.h"
31 #include "options.h"
32 #include "wide-int.h"
33 #include "inchash.h"
34 #include "tree.h"
35 #include "fold-const.h"
36 #include "stor-layout.h"
37 #include "varasm.h"
38 #include "target.h"
39 #include "tree-iterator.h"
40 #include "tree-cfg.h"
41 #include "langhooks.h"
42 #include "tree-pass.h"
43 #include "diagnostic.h"
44 #include "ggc.h"
45 #include "is-a.h"
46 #include "cfgloop.h"
47 #include "stringpool.h"
48 #include "tree-ssa-alias.h"
49 #include "tree-ssanames.h"
50 #include "tree-ssa-operands.h"
51 #include "tree-ssa-address.h"
52 #include "tree-ssa.h"
53 #include "predict.h"
54 #include "dominance.h"
55 #include "cfg.h"
56 #include "basic-block.h"
57 #include "tree-ssa-loop-niter.h"
58 #include "gimple-expr.h"
59 #include "gimple.h"
60 #include "tree-phinodes.h"
61 #include "gimple-ssa.h"
62 #include "ssa-iterators.h"
63 #include "gimple-pretty-print.h"
64 #include "gimple-iterator.h"
65 #include "gimplify.h"
66 #include "gimplify-me.h"
67 #include "print-tree.h"
68 #include "hashtab.h"
69 #include "tm.h"
70 #include "hard-reg-set.h"
71 #include "function.h"
72 #include "rtl.h"
73 #include "flags.h"
74 #include "statistics.h"
75 #include "real.h"
76 #include "fixed-value.h"
77 #include "insn-config.h"
78 #include "expmed.h"
79 #include "dojump.h"
80 #include "explow.h"
81 #include "calls.h"
82 #include "emit-rtl.h"
83 #include "stmt.h"
84 #include "expr.h"
85 #include "tree-ssa-propagate.h"
86 #include "gimple-fold.h"
87 #include "tree-chkp.h"
88 #include "gimple-walk.h"
89 #include "rtl.h" /* For MEM_P, assign_temp. */
90 #include "tree-dfa.h"
91 #include "ipa-ref.h"
92 #include "lto-streamer.h"
93 #include "cgraph.h"
94 #include "ipa-chkp.h"
95 #include "params.h"
97 /* Pointer Bounds Checker instruments code with memory checks to find
98 out-of-bounds memory accesses. Checks are performed by computing
99 bounds for each pointer and then comparing address of accessed
100 memory before pointer dereferencing.
102 1. Function clones.
104 See ipa-chkp.c.
106 2. Instrumentation.
108 There are few things to instrument:
110 a) Memory accesses - add checker calls to check address of accessed memory
111 against bounds of dereferenced pointer. Obviously safe memory
112 accesses like static variable access does not have to be instrumented
113 with checks.
115 Example:
117 val_2 = *p_1;
119 with 4 bytes access is transformed into:
121 __builtin___chkp_bndcl (__bound_tmp.1_3, p_1);
122 D.1_4 = p_1 + 3;
123 __builtin___chkp_bndcu (__bound_tmp.1_3, D.1_4);
124 val_2 = *p_1;
126 where __bound_tmp.1_3 are bounds computed for pointer p_1,
127 __builtin___chkp_bndcl is a lower bound check and
128 __builtin___chkp_bndcu is an upper bound check.
130 b) Pointer stores.
132 When pointer is stored in memory we need to store its bounds. To
133 achieve compatibility of instrumented code with regular codes
134 we have to keep data layout and store bounds in special bound tables
135 via special checker call. Implementation of bounds table may vary for
136 different platforms. It has to associate pointer value and its
137 location (it is required because we may have two equal pointers
138 with different bounds stored in different places) with bounds.
139 Another checker builtin allows to get bounds for specified pointer
140 loaded from specified location.
142 Example:
144 buf1[i_1] = &buf2;
146 is transformed into:
148 buf1[i_1] = &buf2;
149 D.1_2 = &buf1[i_1];
150 __builtin___chkp_bndstx (D.1_2, &buf2, __bound_tmp.1_2);
152 where __bound_tmp.1_2 are bounds of &buf2.
154 c) Static initialization.
156 The special case of pointer store is static pointer initialization.
157 Bounds initialization is performed in a few steps:
158 - register all static initializations in front-end using
159 chkp_register_var_initializer
160 - when file compilation finishes we create functions with special
161 attribute 'chkp ctor' and put explicit initialization code
162 (assignments) for all statically initialized pointers.
163 - when checker constructor is compiled checker pass adds required
164 bounds initialization for all statically initialized pointers
165 - since we do not actually need excess pointers initialization
166 in checker constructor we remove such assignments from them
168 d) Calls.
170 For each call in the code we add additional arguments to pass
171 bounds for pointer arguments. We determine type of call arguments
172 using arguments list from function declaration; if function
173 declaration is not available we use function type; otherwise
174 (e.g. for unnamed arguments) we use type of passed value. Function
175 declaration/type is replaced with the instrumented one.
177 Example:
179 val_1 = foo (&buf1, &buf2, &buf1, 0);
181 is translated into:
183 val_1 = foo.chkp (&buf1, __bound_tmp.1_2, &buf2, __bound_tmp.1_3,
184 &buf1, __bound_tmp.1_2, 0);
186 e) Returns.
188 If function returns a pointer value we have to return bounds also.
189 A new operand was added for return statement to hold returned bounds.
191 Example:
193 return &_buf1;
195 is transformed into
197 return &_buf1, __bound_tmp.1_1;
199 3. Bounds computation.
201 Compiler is fully responsible for computing bounds to be used for each
202 memory access. The first step for bounds computation is to find the
203 origin of pointer dereferenced for memory access. Basing on pointer
204 origin we define a way to compute its bounds. There are just few
205 possible cases:
207 a) Pointer is returned by call.
209 In this case we use corresponding checker builtin method to obtain returned
210 bounds.
212 Example:
214 buf_1 = malloc (size_2);
215 foo (buf_1);
217 is translated into:
219 buf_1 = malloc (size_2);
220 __bound_tmp.1_3 = __builtin___chkp_bndret (buf_1);
221 foo (buf_1, __bound_tmp.1_3);
223 b) Pointer is an address of an object.
225 In this case compiler tries to compute objects size and create corresponding
226 bounds. If object has incomplete type then special checker builtin is used to
227 obtain its size at runtime.
229 Example:
231 foo ()
233 <unnamed type> __bound_tmp.3;
234 static int buf[100];
236 <bb 3>:
237 __bound_tmp.3_2 = __builtin___chkp_bndmk (&buf, 400);
239 <bb 2>:
240 return &buf, __bound_tmp.3_2;
243 Example:
245 Address of an object 'extern int buf[]' with incomplete type is
246 returned.
248 foo ()
250 <unnamed type> __bound_tmp.4;
251 long unsigned int __size_tmp.3;
253 <bb 3>:
254 __size_tmp.3_4 = __builtin_ia32_sizeof (buf);
255 __bound_tmp.4_3 = __builtin_ia32_bndmk (&buf, __size_tmp.3_4);
257 <bb 2>:
258 return &buf, __bound_tmp.4_3;
261 c) Pointer is the result of object narrowing.
263 It happens when we use pointer to an object to compute pointer to a part
264 of an object. E.g. we take pointer to a field of a structure. In this
265 case we perform bounds intersection using bounds of original object and
266 bounds of object's part (which are computed basing on its type).
268 There may be some debatable questions about when narrowing should occur
269 and when it should not. To avoid false bound violations in correct
270 programs we do not perform narrowing when address of an array element is
271 obtained (it has address of the whole array) and when address of the first
272 structure field is obtained (because it is guaranteed to be equal to
273 address of the whole structure and it is legal to cast it back to structure).
275 Default narrowing behavior may be changed using compiler flags.
277 Example:
279 In this example address of the second structure field is returned.
281 foo (struct A * p, __bounds_type __bounds_of_p)
283 <unnamed type> __bound_tmp.3;
284 int * _2;
285 int * _5;
287 <bb 2>:
288 _5 = &p_1(D)->second_field;
289 __bound_tmp.3_6 = __builtin___chkp_bndmk (_5, 4);
290 __bound_tmp.3_8 = __builtin___chkp_intersect (__bound_tmp.3_6,
291 __bounds_of_p_3(D));
292 _2 = &p_1(D)->second_field;
293 return _2, __bound_tmp.3_8;
296 Example:
298 In this example address of the first field of array element is returned.
300 foo (struct A * p, __bounds_type __bounds_of_p, int i)
302 long unsigned int _3;
303 long unsigned int _4;
304 struct A * _6;
305 int * _7;
307 <bb 2>:
308 _3 = (long unsigned int) i_1(D);
309 _4 = _3 * 8;
310 _6 = p_5(D) + _4;
311 _7 = &_6->first_field;
312 return _7, __bounds_of_p_2(D);
316 d) Pointer is the result of pointer arithmetic or type cast.
318 In this case bounds of the base pointer are used. In case of binary
319 operation producing a pointer we are analyzing data flow further
320 looking for operand's bounds. One operand is considered as a base
321 if it has some valid bounds. If we fall into a case when none of
322 operands (or both of them) has valid bounds, a default bounds value
323 is used.
325 Trying to find out bounds for binary operations we may fall into
326 cyclic dependencies for pointers. To avoid infinite recursion all
327 walked phi nodes instantly obtain corresponding bounds but created
328 bounds are marked as incomplete. It helps us to stop DF walk during
329 bounds search.
331 When we reach pointer source, some args of incomplete bounds phi obtain
332 valid bounds and those values are propagated further through phi nodes.
333 If no valid bounds were found for phi node then we mark its result as
334 invalid bounds. Process stops when all incomplete bounds become either
335 valid or invalid and we are able to choose a pointer base.
337 e) Pointer is loaded from the memory.
339 In this case we just need to load bounds from the bounds table.
341 Example:
343 foo ()
345 <unnamed type> __bound_tmp.3;
346 static int * buf;
347 int * _2;
349 <bb 2>:
350 _2 = buf;
351 __bound_tmp.3_4 = __builtin___chkp_bndldx (&buf, _2);
352 return _2, __bound_tmp.3_4;
357 typedef void (*assign_handler)(tree, tree, void *);
359 static tree chkp_get_zero_bounds ();
360 static tree chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter);
361 static tree chkp_find_bounds_loaded (tree ptr, tree ptr_src,
362 gimple_stmt_iterator *iter);
363 static void chkp_parse_array_and_component_ref (tree node, tree *ptr,
364 tree *elt, bool *safe,
365 bool *bitfield,
366 tree *bounds,
367 gimple_stmt_iterator *iter,
368 bool innermost_bounds);
370 #define chkp_bndldx_fndecl \
371 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDLDX))
372 #define chkp_bndstx_fndecl \
373 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDSTX))
374 #define chkp_checkl_fndecl \
375 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCL))
376 #define chkp_checku_fndecl \
377 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCU))
378 #define chkp_bndmk_fndecl \
379 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDMK))
380 #define chkp_ret_bnd_fndecl \
381 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDRET))
382 #define chkp_intersect_fndecl \
383 (targetm.builtin_chkp_function (BUILT_IN_CHKP_INTERSECT))
384 #define chkp_narrow_bounds_fndecl \
385 (targetm.builtin_chkp_function (BUILT_IN_CHKP_NARROW))
386 #define chkp_sizeof_fndecl \
387 (targetm.builtin_chkp_function (BUILT_IN_CHKP_SIZEOF))
388 #define chkp_extract_lower_fndecl \
389 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_LOWER))
390 #define chkp_extract_upper_fndecl \
391 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_UPPER))
393 static GTY (()) tree chkp_uintptr_type;
395 static GTY (()) tree chkp_zero_bounds_var;
396 static GTY (()) tree chkp_none_bounds_var;
398 static GTY (()) basic_block entry_block;
399 static GTY (()) tree zero_bounds;
400 static GTY (()) tree none_bounds;
401 static GTY (()) tree incomplete_bounds;
402 static GTY (()) tree tmp_var;
403 static GTY (()) tree size_tmp_var;
404 static GTY (()) bitmap chkp_abnormal_copies;
406 struct hash_set<tree> *chkp_invalid_bounds;
407 struct hash_set<tree> *chkp_completed_bounds_set;
408 struct hash_map<tree, tree> *chkp_reg_bounds;
409 struct hash_map<tree, tree> *chkp_bound_vars;
410 struct hash_map<tree, tree> *chkp_reg_addr_bounds;
411 struct hash_map<tree, tree> *chkp_incomplete_bounds_map;
412 struct hash_map<tree, tree> *chkp_bounds_map;
413 struct hash_map<tree, tree> *chkp_static_var_bounds;
415 static bool in_chkp_pass;
417 #define CHKP_BOUND_TMP_NAME "__bound_tmp"
418 #define CHKP_SIZE_TMP_NAME "__size_tmp"
419 #define CHKP_BOUNDS_OF_SYMBOL_PREFIX "__chkp_bounds_of_"
420 #define CHKP_STRING_BOUNDS_PREFIX "__chkp_string_bounds_"
421 #define CHKP_VAR_BOUNDS_PREFIX "__chkp_var_bounds_"
422 #define CHKP_ZERO_BOUNDS_VAR_NAME "__chkp_zero_bounds"
423 #define CHKP_NONE_BOUNDS_VAR_NAME "__chkp_none_bounds"
425 /* Static checker constructors may become very large and their
426 compilation with optimization may take too much time.
427 Therefore we put a limit to number of statements in one
428 constructor. Tests with 100 000 statically initialized
429 pointers showed following compilation times on Sandy Bridge
430 server (used -O2):
431 limit 100 => ~18 sec.
432 limit 300 => ~22 sec.
433 limit 1000 => ~30 sec.
434 limit 3000 => ~49 sec.
435 limit 5000 => ~55 sec.
436 limit 10000 => ~76 sec.
437 limit 100000 => ~532 sec. */
438 #define MAX_STMTS_IN_STATIC_CHKP_CTOR (PARAM_VALUE (PARAM_CHKP_MAX_CTOR_SIZE))
440 struct chkp_ctor_stmt_list
442 tree stmts;
443 int avail;
446 /* Return 1 if function FNDECL is instrumented by Pointer
447 Bounds Checker. */
448 bool
449 chkp_function_instrumented_p (tree fndecl)
451 return fndecl
452 && lookup_attribute ("chkp instrumented", DECL_ATTRIBUTES (fndecl));
455 /* Mark function FNDECL as instrumented. */
456 void
457 chkp_function_mark_instrumented (tree fndecl)
459 if (chkp_function_instrumented_p (fndecl))
460 return;
462 DECL_ATTRIBUTES (fndecl)
463 = tree_cons (get_identifier ("chkp instrumented"), NULL,
464 DECL_ATTRIBUTES (fndecl));
467 /* Return true when STMT is builtin call to instrumentation function
468 corresponding to CODE. */
470 bool
471 chkp_gimple_call_builtin_p (gimple call,
472 enum built_in_function code)
474 tree fndecl;
475 if (is_gimple_call (call)
476 && (fndecl = targetm.builtin_chkp_function (code))
477 && gimple_call_fndecl (call) == fndecl)
478 return true;
479 return false;
482 /* Emit code to store zero bounds for PTR located at MEM. */
483 void
484 chkp_expand_bounds_reset_for_mem (tree mem, tree ptr)
486 tree zero_bnd, bnd, addr, bndstx;
488 if (flag_chkp_use_static_const_bounds)
489 zero_bnd = chkp_get_zero_bounds_var ();
490 else
491 zero_bnd = chkp_build_make_bounds_call (integer_zero_node,
492 integer_zero_node);
493 bnd = make_tree (pointer_bounds_type_node,
494 assign_temp (pointer_bounds_type_node, 0, 1));
495 addr = build1 (ADDR_EXPR,
496 build_pointer_type (TREE_TYPE (mem)), mem);
497 bndstx = chkp_build_bndstx_call (addr, ptr, bnd);
499 expand_assignment (bnd, zero_bnd, false);
500 expand_normal (bndstx);
503 /* Mark statement S to not be instrumented. */
504 static void
505 chkp_mark_stmt (gimple s)
507 gimple_set_plf (s, GF_PLF_1, true);
510 /* Mark statement S to be instrumented. */
511 static void
512 chkp_unmark_stmt (gimple s)
514 gimple_set_plf (s, GF_PLF_1, false);
517 /* Return 1 if statement S should not be instrumented. */
518 static bool
519 chkp_marked_stmt_p (gimple s)
521 return gimple_plf (s, GF_PLF_1);
524 /* Get var to be used for bound temps. */
525 static tree
526 chkp_get_tmp_var (void)
528 if (!tmp_var)
529 tmp_var = create_tmp_reg (pointer_bounds_type_node, CHKP_BOUND_TMP_NAME);
531 return tmp_var;
534 /* Get SSA_NAME to be used as temp. */
535 static tree
536 chkp_get_tmp_reg (gimple stmt)
538 if (in_chkp_pass)
539 return make_ssa_name (chkp_get_tmp_var (), stmt);
541 return make_temp_ssa_name (pointer_bounds_type_node, stmt,
542 CHKP_BOUND_TMP_NAME);
545 /* Get var to be used for size temps. */
546 static tree
547 chkp_get_size_tmp_var (void)
549 if (!size_tmp_var)
550 size_tmp_var = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
552 return size_tmp_var;
555 /* Register bounds BND for address of OBJ. */
556 static void
557 chkp_register_addr_bounds (tree obj, tree bnd)
559 if (bnd == incomplete_bounds)
560 return;
562 chkp_reg_addr_bounds->put (obj, bnd);
564 if (dump_file && (dump_flags & TDF_DETAILS))
566 fprintf (dump_file, "Regsitered bound ");
567 print_generic_expr (dump_file, bnd, 0);
568 fprintf (dump_file, " for address of ");
569 print_generic_expr (dump_file, obj, 0);
570 fprintf (dump_file, "\n");
574 /* Return bounds registered for address of OBJ. */
575 static tree
576 chkp_get_registered_addr_bounds (tree obj)
578 tree *slot = chkp_reg_addr_bounds->get (obj);
579 return slot ? *slot : NULL_TREE;
582 /* Mark BOUNDS as completed. */
583 static void
584 chkp_mark_completed_bounds (tree bounds)
586 chkp_completed_bounds_set->add (bounds);
588 if (dump_file && (dump_flags & TDF_DETAILS))
590 fprintf (dump_file, "Marked bounds ");
591 print_generic_expr (dump_file, bounds, 0);
592 fprintf (dump_file, " as completed\n");
596 /* Return 1 if BOUNDS were marked as completed and 0 otherwise. */
597 static bool
598 chkp_completed_bounds (tree bounds)
600 return chkp_completed_bounds_set->contains (bounds);
603 /* Clear comleted bound marks. */
604 static void
605 chkp_erase_completed_bounds (void)
607 delete chkp_completed_bounds_set;
608 chkp_completed_bounds_set = new hash_set<tree>;
611 /* Mark BOUNDS associated with PTR as incomplete. */
612 static void
613 chkp_register_incomplete_bounds (tree bounds, tree ptr)
615 chkp_incomplete_bounds_map->put (bounds, ptr);
617 if (dump_file && (dump_flags & TDF_DETAILS))
619 fprintf (dump_file, "Regsitered incomplete bounds ");
620 print_generic_expr (dump_file, bounds, 0);
621 fprintf (dump_file, " for ");
622 print_generic_expr (dump_file, ptr, 0);
623 fprintf (dump_file, "\n");
627 /* Return 1 if BOUNDS are incomplete and 0 otherwise. */
628 static bool
629 chkp_incomplete_bounds (tree bounds)
631 if (bounds == incomplete_bounds)
632 return true;
634 if (chkp_completed_bounds (bounds))
635 return false;
637 return chkp_incomplete_bounds_map->get (bounds) != NULL;
640 /* Clear incomleted bound marks. */
641 static void
642 chkp_erase_incomplete_bounds (void)
644 delete chkp_incomplete_bounds_map;
645 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
648 /* Build and return bndmk call which creates bounds for structure
649 pointed by PTR. Structure should have complete type. */
650 tree
651 chkp_make_bounds_for_struct_addr (tree ptr)
653 tree type = TREE_TYPE (ptr);
654 tree size;
656 gcc_assert (POINTER_TYPE_P (type));
658 size = TYPE_SIZE (TREE_TYPE (type));
660 gcc_assert (size);
662 return build_call_nary (pointer_bounds_type_node,
663 build_fold_addr_expr (chkp_bndmk_fndecl),
664 2, ptr, size);
667 /* Traversal function for chkp_may_finish_incomplete_bounds.
668 Set RES to 0 if at least one argument of phi statement
669 defining bounds (passed in KEY arg) is unknown.
670 Traversal stops when first unknown phi argument is found. */
671 bool
672 chkp_may_complete_phi_bounds (tree const &bounds, tree *slot ATTRIBUTE_UNUSED,
673 bool *res)
675 gimple phi;
676 unsigned i;
678 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
680 phi = SSA_NAME_DEF_STMT (bounds);
682 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
684 for (i = 0; i < gimple_phi_num_args (phi); i++)
686 tree phi_arg = gimple_phi_arg_def (phi, i);
687 if (!phi_arg)
689 *res = false;
690 /* Do not need to traverse further. */
691 return false;
695 return true;
698 /* Return 1 if all phi nodes created for bounds have their
699 arguments computed. */
700 static bool
701 chkp_may_finish_incomplete_bounds (void)
703 bool res = true;
705 chkp_incomplete_bounds_map
706 ->traverse<bool *, chkp_may_complete_phi_bounds> (&res);
708 return res;
711 /* Helper function for chkp_finish_incomplete_bounds.
712 Recompute args for bounds phi node. */
713 bool
714 chkp_recompute_phi_bounds (tree const &bounds, tree *slot,
715 void *res ATTRIBUTE_UNUSED)
717 tree ptr = *slot;
718 gphi *bounds_phi;
719 gphi *ptr_phi;
720 unsigned i;
722 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
723 gcc_assert (TREE_CODE (ptr) == SSA_NAME);
725 bounds_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (bounds));
726 ptr_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (ptr));
728 for (i = 0; i < gimple_phi_num_args (bounds_phi); i++)
730 tree ptr_arg = gimple_phi_arg_def (ptr_phi, i);
731 tree bound_arg = chkp_find_bounds (ptr_arg, NULL);
733 add_phi_arg (bounds_phi, bound_arg,
734 gimple_phi_arg_edge (ptr_phi, i),
735 UNKNOWN_LOCATION);
738 return true;
741 /* Mark BOUNDS as invalid. */
742 static void
743 chkp_mark_invalid_bounds (tree bounds)
745 chkp_invalid_bounds->add (bounds);
747 if (dump_file && (dump_flags & TDF_DETAILS))
749 fprintf (dump_file, "Marked bounds ");
750 print_generic_expr (dump_file, bounds, 0);
751 fprintf (dump_file, " as invalid\n");
755 /* Return 1 if BOUNDS were marked as invalid and 0 otherwise. */
756 static bool
757 chkp_valid_bounds (tree bounds)
759 if (bounds == zero_bounds || bounds == none_bounds)
760 return false;
762 return !chkp_invalid_bounds->contains (bounds);
765 /* Helper function for chkp_finish_incomplete_bounds.
766 Check all arguments of phi nodes trying to find
767 valid completed bounds. If there is at least one
768 such arg then bounds produced by phi node are marked
769 as valid completed bounds and all phi args are
770 recomputed. */
771 bool
772 chkp_find_valid_phi_bounds (tree const &bounds, tree *slot, bool *res)
774 gimple phi;
775 unsigned i;
777 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
779 if (chkp_completed_bounds (bounds))
780 return true;
782 phi = SSA_NAME_DEF_STMT (bounds);
784 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
786 for (i = 0; i < gimple_phi_num_args (phi); i++)
788 tree phi_arg = gimple_phi_arg_def (phi, i);
790 gcc_assert (phi_arg);
792 if (chkp_valid_bounds (phi_arg) && !chkp_incomplete_bounds (phi_arg))
794 *res = true;
795 chkp_mark_completed_bounds (bounds);
796 chkp_recompute_phi_bounds (bounds, slot, NULL);
797 return true;
801 return true;
804 /* Helper function for chkp_finish_incomplete_bounds.
805 Marks all incompleted bounds as invalid. */
806 bool
807 chkp_mark_invalid_bounds_walker (tree const &bounds,
808 tree *slot ATTRIBUTE_UNUSED,
809 void *res ATTRIBUTE_UNUSED)
811 if (!chkp_completed_bounds (bounds))
813 chkp_mark_invalid_bounds (bounds);
814 chkp_mark_completed_bounds (bounds);
816 return true;
819 /* When all bound phi nodes have all their args computed
820 we have enough info to find valid bounds. We iterate
821 through all incompleted bounds searching for valid
822 bounds. Found valid bounds are marked as completed
823 and all remaining incompleted bounds are recomputed.
824 Process continues until no new valid bounds may be
825 found. All remained incompleted bounds are marked as
826 invalid (i.e. have no valid source of bounds). */
827 static void
828 chkp_finish_incomplete_bounds (void)
830 bool found_valid;
832 while (found_valid)
834 found_valid = false;
836 chkp_incomplete_bounds_map->
837 traverse<bool *, chkp_find_valid_phi_bounds> (&found_valid);
839 if (found_valid)
840 chkp_incomplete_bounds_map->
841 traverse<void *, chkp_recompute_phi_bounds> (NULL);
844 chkp_incomplete_bounds_map->
845 traverse<void *, chkp_mark_invalid_bounds_walker> (NULL);
846 chkp_incomplete_bounds_map->
847 traverse<void *, chkp_recompute_phi_bounds> (NULL);
849 chkp_erase_completed_bounds ();
850 chkp_erase_incomplete_bounds ();
853 /* Return 1 if type TYPE is a pointer type or a
854 structure having a pointer type as one of its fields.
855 Otherwise return 0. */
856 bool
857 chkp_type_has_pointer (const_tree type)
859 bool res = false;
861 if (BOUNDED_TYPE_P (type))
862 res = true;
863 else if (RECORD_OR_UNION_TYPE_P (type))
865 tree field;
867 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
868 if (TREE_CODE (field) == FIELD_DECL)
869 res = res || chkp_type_has_pointer (TREE_TYPE (field));
871 else if (TREE_CODE (type) == ARRAY_TYPE)
872 res = chkp_type_has_pointer (TREE_TYPE (type));
874 return res;
877 unsigned
878 chkp_type_bounds_count (const_tree type)
880 unsigned res = 0;
882 if (!type)
883 res = 0;
884 else if (BOUNDED_TYPE_P (type))
885 res = 1;
886 else if (RECORD_OR_UNION_TYPE_P (type))
888 bitmap have_bound;
890 bitmap_obstack_initialize (NULL);
891 have_bound = BITMAP_ALLOC (NULL);
892 chkp_find_bound_slots (type, have_bound);
893 res = bitmap_count_bits (have_bound);
894 BITMAP_FREE (have_bound);
895 bitmap_obstack_release (NULL);
898 return res;
901 /* Get bounds associated with NODE via
902 chkp_set_bounds call. */
903 tree
904 chkp_get_bounds (tree node)
906 tree *slot;
908 if (!chkp_bounds_map)
909 return NULL_TREE;
911 slot = chkp_bounds_map->get (node);
912 return slot ? *slot : NULL_TREE;
915 /* Associate bounds VAL with NODE. */
916 void
917 chkp_set_bounds (tree node, tree val)
919 if (!chkp_bounds_map)
920 chkp_bounds_map = new hash_map<tree, tree>;
922 chkp_bounds_map->put (node, val);
925 /* Check if statically initialized variable VAR require
926 static bounds initialization. If VAR is added into
927 bounds initlization list then 1 is returned. Otherwise
928 return 0. */
929 extern bool
930 chkp_register_var_initializer (tree var)
932 if (!flag_check_pointer_bounds
933 || DECL_INITIAL (var) == error_mark_node)
934 return false;
936 gcc_assert (TREE_CODE (var) == VAR_DECL);
937 gcc_assert (DECL_INITIAL (var));
939 if (TREE_STATIC (var)
940 && chkp_type_has_pointer (TREE_TYPE (var)))
942 varpool_node::get_create (var)->need_bounds_init = 1;
943 return true;
946 return false;
949 /* Helper function for chkp_finish_file.
951 Add new modification statement (RHS is assigned to LHS)
952 into list of static initializer statementes (passed in ARG).
953 If statements list becomes too big, emit checker constructor
954 and start the new one. */
955 static void
956 chkp_add_modification_to_stmt_list (tree lhs,
957 tree rhs,
958 void *arg)
960 struct chkp_ctor_stmt_list *stmts = (struct chkp_ctor_stmt_list *)arg;
961 tree modify;
963 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
964 rhs = build1 (CONVERT_EXPR, TREE_TYPE (lhs), rhs);
966 modify = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
967 append_to_statement_list (modify, &stmts->stmts);
969 stmts->avail--;
972 /* Build and return ADDR_EXPR for specified object OBJ. */
973 static tree
974 chkp_build_addr_expr (tree obj)
976 return TREE_CODE (obj) == TARGET_MEM_REF
977 ? tree_mem_ref_addr (ptr_type_node, obj)
978 : build_fold_addr_expr (obj);
981 /* Helper function for chkp_finish_file.
982 Initialize bound variable BND_VAR with bounds of variable
983 VAR to statements list STMTS. If statements list becomes
984 too big, emit checker constructor and start the new one. */
985 static void
986 chkp_output_static_bounds (tree bnd_var, tree var,
987 struct chkp_ctor_stmt_list *stmts)
989 tree lb, ub, size;
991 if (TREE_CODE (var) == STRING_CST)
993 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
994 size = build_int_cst (size_type_node, TREE_STRING_LENGTH (var) - 1);
996 else if (DECL_SIZE (var)
997 && !chkp_variable_size_type (TREE_TYPE (var)))
999 /* Compute bounds using statically known size. */
1000 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1001 size = size_binop (MINUS_EXPR, DECL_SIZE_UNIT (var), size_one_node);
1003 else
1005 /* Compute bounds using dynamic size. */
1006 tree call;
1008 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1009 call = build1 (ADDR_EXPR,
1010 build_pointer_type (TREE_TYPE (chkp_sizeof_fndecl)),
1011 chkp_sizeof_fndecl);
1012 size = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_sizeof_fndecl)),
1013 call, 1, var);
1015 if (flag_chkp_zero_dynamic_size_as_infinite)
1017 tree max_size, cond;
1019 max_size = build2 (MINUS_EXPR, size_type_node, size_zero_node, lb);
1020 cond = build2 (NE_EXPR, boolean_type_node, size, size_zero_node);
1021 size = build3 (COND_EXPR, size_type_node, cond, size, max_size);
1024 size = size_binop (MINUS_EXPR, size, size_one_node);
1027 ub = size_binop (PLUS_EXPR, lb, size);
1028 stmts->avail -= targetm.chkp_initialize_bounds (bnd_var, lb, ub,
1029 &stmts->stmts);
1030 if (stmts->avail <= 0)
1032 cgraph_build_static_cdtor ('B', stmts->stmts,
1033 MAX_RESERVED_INIT_PRIORITY + 2);
1034 stmts->avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
1035 stmts->stmts = NULL;
1039 /* Return entry block to be used for checker initilization code.
1040 Create new block if required. */
1041 static basic_block
1042 chkp_get_entry_block (void)
1044 if (!entry_block)
1045 entry_block = split_block (ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL)->dest;
1047 return entry_block;
1050 /* Return a bounds var to be used for pointer var PTR_VAR. */
1051 static tree
1052 chkp_get_bounds_var (tree ptr_var)
1054 tree bnd_var;
1055 tree *slot;
1057 slot = chkp_bound_vars->get (ptr_var);
1058 if (slot)
1059 bnd_var = *slot;
1060 else
1062 bnd_var = create_tmp_reg (pointer_bounds_type_node,
1063 CHKP_BOUND_TMP_NAME);
1064 chkp_bound_vars->put (ptr_var, bnd_var);
1067 return bnd_var;
1072 /* Register bounds BND for object PTR in global bounds table.
1073 A copy of bounds may be created for abnormal ssa names.
1074 Returns bounds to use for PTR. */
1075 static tree
1076 chkp_maybe_copy_and_register_bounds (tree ptr, tree bnd)
1078 bool abnormal_ptr;
1080 if (!chkp_reg_bounds)
1081 return bnd;
1083 /* Do nothing if bounds are incomplete_bounds
1084 because it means bounds will be recomputed. */
1085 if (bnd == incomplete_bounds)
1086 return bnd;
1088 abnormal_ptr = (TREE_CODE (ptr) == SSA_NAME
1089 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1090 && gimple_code (SSA_NAME_DEF_STMT (ptr)) != GIMPLE_PHI);
1092 /* A single bounds value may be reused multiple times for
1093 different pointer values. It may cause coalescing issues
1094 for abnormal SSA names. To avoid it we create a bounds
1095 copy in case it is computed for abnormal SSA name.
1097 We also cannot reuse such created copies for other pointers */
1098 if (abnormal_ptr
1099 || bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
1101 tree bnd_var = NULL_TREE;
1103 if (abnormal_ptr)
1105 if (SSA_NAME_VAR (ptr))
1106 bnd_var = chkp_get_bounds_var (SSA_NAME_VAR (ptr));
1108 else
1109 bnd_var = chkp_get_tmp_var ();
1111 /* For abnormal copies we may just find original
1112 bounds and use them. */
1113 if (!abnormal_ptr && !SSA_NAME_IS_DEFAULT_DEF (bnd))
1115 gimple bnd_def = SSA_NAME_DEF_STMT (bnd);
1116 gcc_checking_assert (gimple_code (bnd_def) == GIMPLE_ASSIGN);
1117 bnd = gimple_assign_rhs1 (bnd_def);
1119 /* For undefined values we usually use none bounds
1120 value but in case of abnormal edge it may cause
1121 coalescing failures. Use default definition of
1122 bounds variable instead to avoid it. */
1123 else if (SSA_NAME_IS_DEFAULT_DEF (ptr)
1124 && TREE_CODE (SSA_NAME_VAR (ptr)) != PARM_DECL)
1126 bnd = get_or_create_ssa_default_def (cfun, bnd_var);
1128 if (dump_file && (dump_flags & TDF_DETAILS))
1130 fprintf (dump_file, "Using default def bounds ");
1131 print_generic_expr (dump_file, bnd, 0);
1132 fprintf (dump_file, " for abnormal default def SSA name ");
1133 print_generic_expr (dump_file, ptr, 0);
1134 fprintf (dump_file, "\n");
1137 else
1139 tree copy;
1140 gimple def = SSA_NAME_DEF_STMT (ptr);
1141 gimple assign;
1142 gimple_stmt_iterator gsi;
1144 if (bnd_var)
1145 copy = make_ssa_name (bnd_var, gimple_build_nop ());
1146 else
1147 copy = make_temp_ssa_name (pointer_bounds_type_node,
1148 gimple_build_nop (),
1149 CHKP_BOUND_TMP_NAME);
1150 assign = gimple_build_assign (copy, bnd);
1152 if (dump_file && (dump_flags & TDF_DETAILS))
1154 fprintf (dump_file, "Creating a copy of bounds ");
1155 print_generic_expr (dump_file, bnd, 0);
1156 fprintf (dump_file, " for abnormal SSA name ");
1157 print_generic_expr (dump_file, ptr, 0);
1158 fprintf (dump_file, "\n");
1161 if (gimple_code (def) == GIMPLE_NOP)
1163 gsi = gsi_last_bb (chkp_get_entry_block ());
1164 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
1165 gsi_insert_before (&gsi, assign, GSI_CONTINUE_LINKING);
1166 else
1167 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1169 else
1171 gimple bnd_def = SSA_NAME_DEF_STMT (bnd);
1172 /* Sometimes (e.g. when we load a pointer from a
1173 memory) bounds are produced later than a pointer.
1174 We need to insert bounds copy appropriately. */
1175 if (gimple_code (bnd_def) != GIMPLE_NOP
1176 && stmt_dominates_stmt_p (def, bnd_def))
1177 gsi = gsi_for_stmt (bnd_def);
1178 else
1179 gsi = gsi_for_stmt (def);
1180 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1183 bnd = copy;
1186 if (abnormal_ptr)
1187 bitmap_set_bit (chkp_abnormal_copies, SSA_NAME_VERSION (bnd));
1190 chkp_reg_bounds->put (ptr, bnd);
1192 if (dump_file && (dump_flags & TDF_DETAILS))
1194 fprintf (dump_file, "Regsitered bound ");
1195 print_generic_expr (dump_file, bnd, 0);
1196 fprintf (dump_file, " for pointer ");
1197 print_generic_expr (dump_file, ptr, 0);
1198 fprintf (dump_file, "\n");
1201 return bnd;
1204 /* Get bounds registered for object PTR in global bounds table. */
1205 static tree
1206 chkp_get_registered_bounds (tree ptr)
1208 tree *slot;
1210 if (!chkp_reg_bounds)
1211 return NULL_TREE;
1213 slot = chkp_reg_bounds->get (ptr);
1214 return slot ? *slot : NULL_TREE;
1217 /* Add bound retvals to return statement pointed by GSI. */
1219 static void
1220 chkp_add_bounds_to_ret_stmt (gimple_stmt_iterator *gsi)
1222 greturn *ret = as_a <greturn *> (gsi_stmt (*gsi));
1223 tree retval = gimple_return_retval (ret);
1224 tree ret_decl = DECL_RESULT (cfun->decl);
1225 tree bounds;
1227 if (!retval)
1228 return;
1230 if (BOUNDED_P (ret_decl))
1232 bounds = chkp_find_bounds (retval, gsi);
1233 bounds = chkp_maybe_copy_and_register_bounds (ret_decl, bounds);
1234 gimple_return_set_retbnd (ret, bounds);
1237 update_stmt (ret);
1240 /* Force OP to be suitable for using as an argument for call.
1241 New statements (if any) go to SEQ. */
1242 static tree
1243 chkp_force_gimple_call_op (tree op, gimple_seq *seq)
1245 gimple_seq stmts;
1246 gimple_stmt_iterator si;
1248 op = force_gimple_operand (unshare_expr (op), &stmts, true, NULL_TREE);
1250 for (si = gsi_start (stmts); !gsi_end_p (si); gsi_next (&si))
1251 chkp_mark_stmt (gsi_stmt (si));
1253 gimple_seq_add_seq (seq, stmts);
1255 return op;
1258 /* Generate lower bound check for memory access by ADDR.
1259 Check is inserted before the position pointed by ITER.
1260 DIRFLAG indicates whether memory access is load or store. */
1261 static void
1262 chkp_check_lower (tree addr, tree bounds,
1263 gimple_stmt_iterator iter,
1264 location_t location,
1265 tree dirflag)
1267 gimple_seq seq;
1268 gimple check;
1269 tree node;
1271 if (!chkp_function_instrumented_p (current_function_decl)
1272 && bounds == chkp_get_zero_bounds ())
1273 return;
1275 if (dirflag == integer_zero_node
1276 && !flag_chkp_check_read)
1277 return;
1279 if (dirflag == integer_one_node
1280 && !flag_chkp_check_write)
1281 return;
1283 seq = NULL;
1285 node = chkp_force_gimple_call_op (addr, &seq);
1287 check = gimple_build_call (chkp_checkl_fndecl, 2, node, bounds);
1288 chkp_mark_stmt (check);
1289 gimple_call_set_with_bounds (check, true);
1290 gimple_set_location (check, location);
1291 gimple_seq_add_stmt (&seq, check);
1293 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1295 if (dump_file && (dump_flags & TDF_DETAILS))
1297 gimple before = gsi_stmt (iter);
1298 fprintf (dump_file, "Generated lower bound check for statement ");
1299 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1300 fprintf (dump_file, " ");
1301 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1305 /* Generate upper bound check for memory access by ADDR.
1306 Check is inserted before the position pointed by ITER.
1307 DIRFLAG indicates whether memory access is load or store. */
1308 static void
1309 chkp_check_upper (tree addr, tree bounds,
1310 gimple_stmt_iterator iter,
1311 location_t location,
1312 tree dirflag)
1314 gimple_seq seq;
1315 gimple check;
1316 tree node;
1318 if (!chkp_function_instrumented_p (current_function_decl)
1319 && bounds == chkp_get_zero_bounds ())
1320 return;
1322 if (dirflag == integer_zero_node
1323 && !flag_chkp_check_read)
1324 return;
1326 if (dirflag == integer_one_node
1327 && !flag_chkp_check_write)
1328 return;
1330 seq = NULL;
1332 node = chkp_force_gimple_call_op (addr, &seq);
1334 check = gimple_build_call (chkp_checku_fndecl, 2, node, bounds);
1335 chkp_mark_stmt (check);
1336 gimple_call_set_with_bounds (check, true);
1337 gimple_set_location (check, location);
1338 gimple_seq_add_stmt (&seq, check);
1340 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1342 if (dump_file && (dump_flags & TDF_DETAILS))
1344 gimple before = gsi_stmt (iter);
1345 fprintf (dump_file, "Generated upper bound check for statement ");
1346 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1347 fprintf (dump_file, " ");
1348 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1352 /* Generate lower and upper bound checks for memory access
1353 to memory slot [FIRST, LAST] againsr BOUNDS. Checks
1354 are inserted before the position pointed by ITER.
1355 DIRFLAG indicates whether memory access is load or store. */
1356 void
1357 chkp_check_mem_access (tree first, tree last, tree bounds,
1358 gimple_stmt_iterator iter,
1359 location_t location,
1360 tree dirflag)
1362 chkp_check_lower (first, bounds, iter, location, dirflag);
1363 chkp_check_upper (last, bounds, iter, location, dirflag);
1366 /* Replace call to _bnd_chk_* pointed by GSI with
1367 bndcu and bndcl calls. DIRFLAG determines whether
1368 check is for read or write. */
1370 void
1371 chkp_replace_address_check_builtin (gimple_stmt_iterator *gsi,
1372 tree dirflag)
1374 gimple_stmt_iterator call_iter = *gsi;
1375 gimple call = gsi_stmt (*gsi);
1376 tree fndecl = gimple_call_fndecl (call);
1377 tree addr = gimple_call_arg (call, 0);
1378 tree bounds = chkp_find_bounds (addr, gsi);
1380 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1381 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1382 chkp_check_lower (addr, bounds, *gsi, gimple_location (call), dirflag);
1384 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS)
1385 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1387 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1389 tree size = gimple_call_arg (call, 1);
1390 addr = fold_build_pointer_plus (addr, size);
1391 addr = fold_build_pointer_plus_hwi (addr, -1);
1392 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1395 gsi_remove (&call_iter, true);
1398 /* Replace call to _bnd_get_ptr_* pointed by GSI with
1399 corresponding bounds extract call. */
1401 void
1402 chkp_replace_extract_builtin (gimple_stmt_iterator *gsi)
1404 gimple call = gsi_stmt (*gsi);
1405 tree fndecl = gimple_call_fndecl (call);
1406 tree addr = gimple_call_arg (call, 0);
1407 tree bounds = chkp_find_bounds (addr, gsi);
1408 gimple extract;
1410 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND)
1411 fndecl = chkp_extract_lower_fndecl;
1412 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND)
1413 fndecl = chkp_extract_upper_fndecl;
1414 else
1415 gcc_unreachable ();
1417 extract = gimple_build_call (fndecl, 1, bounds);
1418 gimple_call_set_lhs (extract, gimple_call_lhs (call));
1419 chkp_mark_stmt (extract);
1421 gsi_replace (gsi, extract, false);
1424 /* Return COMPONENT_REF accessing FIELD in OBJ. */
1425 static tree
1426 chkp_build_component_ref (tree obj, tree field)
1428 tree res;
1430 /* If object is TMR then we do not use component_ref but
1431 add offset instead. We need it to be able to get addr
1432 of the reasult later. */
1433 if (TREE_CODE (obj) == TARGET_MEM_REF)
1435 tree offs = TMR_OFFSET (obj);
1436 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1437 offs, DECL_FIELD_OFFSET (field));
1439 gcc_assert (offs);
1441 res = copy_node (obj);
1442 TREE_TYPE (res) = TREE_TYPE (field);
1443 TMR_OFFSET (res) = offs;
1445 else
1446 res = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL_TREE);
1448 return res;
1451 /* Return ARRAY_REF for array ARR and index IDX with
1452 specified element type ETYPE and element size ESIZE. */
1453 static tree
1454 chkp_build_array_ref (tree arr, tree etype, tree esize,
1455 unsigned HOST_WIDE_INT idx)
1457 tree index = build_int_cst (size_type_node, idx);
1458 tree res;
1460 /* If object is TMR then we do not use array_ref but
1461 add offset instead. We need it to be able to get addr
1462 of the reasult later. */
1463 if (TREE_CODE (arr) == TARGET_MEM_REF)
1465 tree offs = TMR_OFFSET (arr);
1467 esize = fold_binary_to_constant (MULT_EXPR, TREE_TYPE (esize),
1468 esize, index);
1469 gcc_assert(esize);
1471 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1472 offs, esize);
1473 gcc_assert (offs);
1475 res = copy_node (arr);
1476 TREE_TYPE (res) = etype;
1477 TMR_OFFSET (res) = offs;
1479 else
1480 res = build4 (ARRAY_REF, etype, arr, index, NULL_TREE, NULL_TREE);
1482 return res;
1485 /* Helper function for chkp_add_bounds_to_call_stmt.
1486 Fill ALL_BOUNDS output array with created bounds.
1488 OFFS is used for recursive calls and holds basic
1489 offset of TYPE in outer structure in bits.
1491 ITER points a position where bounds are searched.
1493 ALL_BOUNDS[i] is filled with elem bounds if there
1494 is a field in TYPE which has pointer type and offset
1495 equal to i * POINTER_SIZE in bits. */
1496 static void
1497 chkp_find_bounds_for_elem (tree elem, tree *all_bounds,
1498 HOST_WIDE_INT offs,
1499 gimple_stmt_iterator *iter)
1501 tree type = TREE_TYPE (elem);
1503 if (BOUNDED_TYPE_P (type))
1505 if (!all_bounds[offs / POINTER_SIZE])
1507 tree temp = make_temp_ssa_name (type, gimple_build_nop (), "");
1508 gimple assign = gimple_build_assign (temp, elem);
1509 gimple_stmt_iterator gsi;
1511 gsi_insert_before (iter, assign, GSI_SAME_STMT);
1512 gsi = gsi_for_stmt (assign);
1514 all_bounds[offs / POINTER_SIZE] = chkp_find_bounds (temp, &gsi);
1517 else if (RECORD_OR_UNION_TYPE_P (type))
1519 tree field;
1521 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1522 if (TREE_CODE (field) == FIELD_DECL)
1524 tree base = unshare_expr (elem);
1525 tree field_ref = chkp_build_component_ref (base, field);
1526 HOST_WIDE_INT field_offs
1527 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1528 if (DECL_FIELD_OFFSET (field))
1529 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1531 chkp_find_bounds_for_elem (field_ref, all_bounds,
1532 offs + field_offs, iter);
1535 else if (TREE_CODE (type) == ARRAY_TYPE)
1537 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1538 tree etype = TREE_TYPE (type);
1539 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1540 unsigned HOST_WIDE_INT cur;
1542 if (!maxval || integer_minus_onep (maxval))
1543 return;
1545 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1547 tree base = unshare_expr (elem);
1548 tree arr_elem = chkp_build_array_ref (base, etype,
1549 TYPE_SIZE (etype),
1550 cur);
1551 chkp_find_bounds_for_elem (arr_elem, all_bounds, offs + cur * esize,
1552 iter);
1557 /* Fill HAVE_BOUND output bitmap with information about
1558 bounds requred for object of type TYPE.
1560 OFFS is used for recursive calls and holds basic
1561 offset of TYPE in outer structure in bits.
1563 HAVE_BOUND[i] is set to 1 if there is a field
1564 in TYPE which has pointer type and offset
1565 equal to i * POINTER_SIZE - OFFS in bits. */
1566 void
1567 chkp_find_bound_slots_1 (const_tree type, bitmap have_bound,
1568 HOST_WIDE_INT offs)
1570 if (BOUNDED_TYPE_P (type))
1571 bitmap_set_bit (have_bound, offs / POINTER_SIZE);
1572 else if (RECORD_OR_UNION_TYPE_P (type))
1574 tree field;
1576 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1577 if (TREE_CODE (field) == FIELD_DECL)
1579 HOST_WIDE_INT field_offs
1580 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1581 if (DECL_FIELD_OFFSET (field))
1582 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1583 chkp_find_bound_slots_1 (TREE_TYPE (field), have_bound,
1584 offs + field_offs);
1587 else if (TREE_CODE (type) == ARRAY_TYPE)
1589 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1590 tree etype = TREE_TYPE (type);
1591 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1592 unsigned HOST_WIDE_INT cur;
1594 if (!maxval
1595 || TREE_CODE (maxval) != INTEGER_CST
1596 || integer_minus_onep (maxval))
1597 return;
1599 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1600 chkp_find_bound_slots_1 (etype, have_bound, offs + cur * esize);
1604 /* Fill bitmap RES with information about bounds for
1605 type TYPE. See chkp_find_bound_slots_1 for more
1606 details. */
1607 void
1608 chkp_find_bound_slots (const_tree type, bitmap res)
1610 bitmap_clear (res);
1611 chkp_find_bound_slots_1 (type, res, 0);
1614 /* Return 1 if call to FNDECL should be instrumented
1615 and 0 otherwise. */
1617 static bool
1618 chkp_instrument_normal_builtin (tree fndecl)
1620 switch (DECL_FUNCTION_CODE (fndecl))
1622 case BUILT_IN_STRLEN:
1623 case BUILT_IN_STRCPY:
1624 case BUILT_IN_STRNCPY:
1625 case BUILT_IN_STPCPY:
1626 case BUILT_IN_STPNCPY:
1627 case BUILT_IN_STRCAT:
1628 case BUILT_IN_STRNCAT:
1629 case BUILT_IN_MEMCPY:
1630 case BUILT_IN_MEMPCPY:
1631 case BUILT_IN_MEMSET:
1632 case BUILT_IN_MEMMOVE:
1633 case BUILT_IN_BZERO:
1634 case BUILT_IN_STRCMP:
1635 case BUILT_IN_STRNCMP:
1636 case BUILT_IN_BCMP:
1637 case BUILT_IN_MEMCMP:
1638 case BUILT_IN_MEMCPY_CHK:
1639 case BUILT_IN_MEMPCPY_CHK:
1640 case BUILT_IN_MEMMOVE_CHK:
1641 case BUILT_IN_MEMSET_CHK:
1642 case BUILT_IN_STRCPY_CHK:
1643 case BUILT_IN_STRNCPY_CHK:
1644 case BUILT_IN_STPCPY_CHK:
1645 case BUILT_IN_STPNCPY_CHK:
1646 case BUILT_IN_STRCAT_CHK:
1647 case BUILT_IN_STRNCAT_CHK:
1648 case BUILT_IN_MALLOC:
1649 case BUILT_IN_CALLOC:
1650 case BUILT_IN_REALLOC:
1651 return 1;
1653 default:
1654 return 0;
1658 /* Add bound arguments to call statement pointed by GSI.
1659 Also performs a replacement of user checker builtins calls
1660 with internal ones. */
1662 static void
1663 chkp_add_bounds_to_call_stmt (gimple_stmt_iterator *gsi)
1665 gcall *call = as_a <gcall *> (gsi_stmt (*gsi));
1666 unsigned arg_no = 0;
1667 tree fndecl = gimple_call_fndecl (call);
1668 tree fntype;
1669 tree first_formal_arg;
1670 tree arg;
1671 bool use_fntype = false;
1672 tree op;
1673 ssa_op_iter iter;
1674 gcall *new_call;
1676 /* Do nothing for internal functions. */
1677 if (gimple_call_internal_p (call))
1678 return;
1680 fntype = TREE_TYPE (TREE_TYPE (gimple_call_fn (call)));
1682 /* Do nothing if back-end builtin is called. */
1683 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
1684 return;
1686 /* Do nothing for some middle-end builtins. */
1687 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1688 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_OBJECT_SIZE)
1689 return;
1691 /* Do nothing for calls to not instrumentable functions. */
1692 if (fndecl && !chkp_instrumentable_p (fndecl))
1693 return;
1695 /* Ignore CHKP_INIT_PTR_BOUNDS, CHKP_NULL_PTR_BOUNDS
1696 and CHKP_COPY_PTR_BOUNDS. */
1697 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1698 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS
1699 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS
1700 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS
1701 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS))
1702 return;
1704 /* Check user builtins are replaced with checks. */
1705 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1706 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1707 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
1708 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS))
1710 chkp_replace_address_check_builtin (gsi, integer_minus_one_node);
1711 return;
1714 /* Check user builtins are replaced with bound extract. */
1715 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1716 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND
1717 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND))
1719 chkp_replace_extract_builtin (gsi);
1720 return;
1723 /* BUILT_IN_CHKP_NARROW_PTR_BOUNDS call is replaced with
1724 target narrow bounds call. */
1725 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1726 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
1728 tree arg = gimple_call_arg (call, 1);
1729 tree bounds = chkp_find_bounds (arg, gsi);
1731 gimple_call_set_fndecl (call, chkp_narrow_bounds_fndecl);
1732 gimple_call_set_arg (call, 1, bounds);
1733 update_stmt (call);
1735 return;
1738 /* BUILT_IN_CHKP_STORE_PTR_BOUNDS call is replaced with
1739 bndstx call. */
1740 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1741 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_STORE_PTR_BOUNDS)
1743 tree addr = gimple_call_arg (call, 0);
1744 tree ptr = gimple_call_arg (call, 1);
1745 tree bounds = chkp_find_bounds (ptr, gsi);
1746 gimple_stmt_iterator iter = gsi_for_stmt (call);
1748 chkp_build_bndstx (addr, ptr, bounds, gsi);
1749 gsi_remove (&iter, true);
1751 return;
1754 if (!flag_chkp_instrument_calls)
1755 return;
1757 /* We instrument only some subset of builtins. We also instrument
1758 builtin calls to be inlined. */
1759 if (fndecl
1760 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1761 && !chkp_instrument_normal_builtin (fndecl))
1763 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
1764 return;
1766 struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
1767 if (!clone
1768 || !gimple_has_body_p (clone->decl))
1769 return;
1772 /* If function decl is available then use it for
1773 formal arguments list. Otherwise use function type. */
1774 if (fndecl && DECL_ARGUMENTS (fndecl))
1775 first_formal_arg = DECL_ARGUMENTS (fndecl);
1776 else
1778 first_formal_arg = TYPE_ARG_TYPES (fntype);
1779 use_fntype = true;
1782 /* Fill vector of new call args. */
1783 vec<tree> new_args = vNULL;
1784 new_args.create (gimple_call_num_args (call));
1785 arg = first_formal_arg;
1786 for (arg_no = 0; arg_no < gimple_call_num_args (call); arg_no++)
1788 tree call_arg = gimple_call_arg (call, arg_no);
1789 tree type;
1791 /* Get arg type using formal argument description
1792 or actual argument type. */
1793 if (arg)
1794 if (use_fntype)
1795 if (TREE_VALUE (arg) != void_type_node)
1797 type = TREE_VALUE (arg);
1798 arg = TREE_CHAIN (arg);
1800 else
1801 type = TREE_TYPE (call_arg);
1802 else
1804 type = TREE_TYPE (arg);
1805 arg = TREE_CHAIN (arg);
1807 else
1808 type = TREE_TYPE (call_arg);
1810 new_args.safe_push (call_arg);
1812 if (BOUNDED_TYPE_P (type)
1813 || pass_by_reference (NULL, TYPE_MODE (type), type, true))
1814 new_args.safe_push (chkp_find_bounds (call_arg, gsi));
1815 else if (chkp_type_has_pointer (type))
1817 HOST_WIDE_INT max_bounds
1818 = TREE_INT_CST_LOW (TYPE_SIZE (type)) / POINTER_SIZE;
1819 tree *all_bounds = (tree *)xmalloc (sizeof (tree) * max_bounds);
1820 HOST_WIDE_INT bnd_no;
1822 memset (all_bounds, 0, sizeof (tree) * max_bounds);
1824 chkp_find_bounds_for_elem (call_arg, all_bounds, 0, gsi);
1826 for (bnd_no = 0; bnd_no < max_bounds; bnd_no++)
1827 if (all_bounds[bnd_no])
1828 new_args.safe_push (all_bounds[bnd_no]);
1830 free (all_bounds);
1834 if (new_args.length () == gimple_call_num_args (call))
1835 new_call = call;
1836 else
1838 new_call = gimple_build_call_vec (gimple_op (call, 1), new_args);
1839 gimple_call_set_lhs (new_call, gimple_call_lhs (call));
1840 gimple_call_copy_flags (new_call, call);
1842 new_args.release ();
1844 /* For direct calls fndecl is replaced with instrumented version. */
1845 if (fndecl)
1847 tree new_decl = chkp_maybe_create_clone (fndecl)->decl;
1848 gimple_call_set_fndecl (new_call, new_decl);
1849 gimple_call_set_fntype (new_call, TREE_TYPE (new_decl));
1851 /* For indirect call we should fix function pointer type if
1852 pass some bounds. */
1853 else if (new_call != call)
1855 tree type = gimple_call_fntype (call);
1856 type = chkp_copy_function_type_adding_bounds (type);
1857 gimple_call_set_fntype (new_call, type);
1860 /* replace old call statement with the new one. */
1861 if (call != new_call)
1863 FOR_EACH_SSA_TREE_OPERAND (op, call, iter, SSA_OP_ALL_DEFS)
1865 SSA_NAME_DEF_STMT (op) = new_call;
1867 gsi_replace (gsi, new_call, true);
1869 else
1870 update_stmt (new_call);
1872 gimple_call_set_with_bounds (new_call, true);
1875 /* Return constant static bounds var with specified LB and UB
1876 if such var exists in varpool. Return NULL otherwise. */
1877 static tree
1878 chkp_find_const_bounds_var (HOST_WIDE_INT lb,
1879 HOST_WIDE_INT ub)
1881 tree val = targetm.chkp_make_bounds_constant (lb, ub);
1882 struct varpool_node *node;
1884 /* We expect bounds constant is represented as a complex value
1885 of two pointer sized integers. */
1886 gcc_assert (TREE_CODE (val) == COMPLEX_CST);
1888 FOR_EACH_VARIABLE (node)
1889 if (POINTER_BOUNDS_P (node->decl)
1890 && TREE_READONLY (node->decl)
1891 && DECL_INITIAL (node->decl)
1892 && TREE_CODE (DECL_INITIAL (node->decl)) == COMPLEX_CST
1893 && tree_int_cst_equal (TREE_REALPART (DECL_INITIAL (node->decl)),
1894 TREE_REALPART (val))
1895 && tree_int_cst_equal (TREE_IMAGPART (DECL_INITIAL (node->decl)),
1896 TREE_IMAGPART (val)))
1897 return node->decl;
1899 return NULL;
1902 /* Return constant static bounds var with specified bounds LB and UB.
1903 If such var does not exists then new var is created with specified NAME. */
1904 static tree
1905 chkp_make_static_const_bounds (HOST_WIDE_INT lb,
1906 HOST_WIDE_INT ub,
1907 const char *name)
1909 tree var;
1911 /* With LTO we may have constant bounds already in varpool.
1912 Try to find it. */
1913 var = chkp_find_const_bounds_var (lb, ub);
1915 if (var)
1916 return var;
1918 var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
1919 get_identifier (name), pointer_bounds_type_node);
1921 TREE_PUBLIC (var) = 1;
1922 TREE_USED (var) = 1;
1923 TREE_READONLY (var) = 1;
1924 TREE_STATIC (var) = 1;
1925 TREE_ADDRESSABLE (var) = 0;
1926 DECL_ARTIFICIAL (var) = 1;
1927 DECL_READ_P (var) = 1;
1928 /* We may use this symbol during ctors generation in chkp_finish_file
1929 when all symbols are emitted. Force output to avoid undefined
1930 symbols in ctors. */
1931 if (!in_lto_p)
1933 DECL_INITIAL (var) = targetm.chkp_make_bounds_constant (lb, ub);
1934 DECL_COMDAT (var) = 1;
1935 varpool_node::get_create (var)->set_comdat_group (DECL_ASSEMBLER_NAME (var));
1936 varpool_node::get_create (var)->force_output = 1;
1938 else
1939 DECL_EXTERNAL (var) = 1;
1940 varpool_node::finalize_decl (var);
1942 return var;
1945 /* Generate code to make bounds with specified lower bound LB and SIZE.
1946 if AFTER is 1 then code is inserted after position pointed by ITER
1947 otherwise code is inserted before position pointed by ITER.
1948 If ITER is NULL then code is added to entry block. */
1949 static tree
1950 chkp_make_bounds (tree lb, tree size, gimple_stmt_iterator *iter, bool after)
1952 gimple_seq seq;
1953 gimple_stmt_iterator gsi;
1954 gimple stmt;
1955 tree bounds;
1957 if (iter)
1958 gsi = *iter;
1959 else
1960 gsi = gsi_start_bb (chkp_get_entry_block ());
1962 seq = NULL;
1964 lb = chkp_force_gimple_call_op (lb, &seq);
1965 size = chkp_force_gimple_call_op (size, &seq);
1967 stmt = gimple_build_call (chkp_bndmk_fndecl, 2, lb, size);
1968 chkp_mark_stmt (stmt);
1970 bounds = chkp_get_tmp_reg (stmt);
1971 gimple_call_set_lhs (stmt, bounds);
1973 gimple_seq_add_stmt (&seq, stmt);
1975 if (iter && after)
1976 gsi_insert_seq_after (&gsi, seq, GSI_SAME_STMT);
1977 else
1978 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
1980 if (dump_file && (dump_flags & TDF_DETAILS))
1982 fprintf (dump_file, "Made bounds: ");
1983 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
1984 if (iter)
1986 fprintf (dump_file, " inserted before statement: ");
1987 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0, TDF_VOPS|TDF_MEMSYMS);
1989 else
1990 fprintf (dump_file, " at function entry\n");
1993 /* update_stmt (stmt); */
1995 return bounds;
1998 /* Return var holding zero bounds. */
1999 tree
2000 chkp_get_zero_bounds_var (void)
2002 if (!chkp_zero_bounds_var)
2004 tree id = get_identifier (CHKP_ZERO_BOUNDS_VAR_NAME);
2005 symtab_node *node = symtab_node::get_for_asmname (id);
2006 if (node)
2007 chkp_zero_bounds_var = node->decl;
2010 if (!chkp_zero_bounds_var)
2011 chkp_zero_bounds_var
2012 = chkp_make_static_const_bounds (0, -1,
2013 CHKP_ZERO_BOUNDS_VAR_NAME);
2014 return chkp_zero_bounds_var;
2017 /* Return var holding none bounds. */
2018 tree
2019 chkp_get_none_bounds_var (void)
2021 if (!chkp_none_bounds_var)
2023 tree id = get_identifier (CHKP_NONE_BOUNDS_VAR_NAME);
2024 symtab_node *node = symtab_node::get_for_asmname (id);
2025 if (node)
2026 chkp_none_bounds_var = node->decl;
2029 if (!chkp_none_bounds_var)
2030 chkp_none_bounds_var
2031 = chkp_make_static_const_bounds (-1, 0,
2032 CHKP_NONE_BOUNDS_VAR_NAME);
2033 return chkp_none_bounds_var;
2036 /* Return SSA_NAME used to represent zero bounds. */
2037 static tree
2038 chkp_get_zero_bounds (void)
2040 if (zero_bounds)
2041 return zero_bounds;
2043 if (dump_file && (dump_flags & TDF_DETAILS))
2044 fprintf (dump_file, "Creating zero bounds...");
2046 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2047 || flag_chkp_use_static_const_bounds > 0)
2049 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2050 gimple stmt;
2052 zero_bounds = chkp_get_tmp_reg (gimple_build_nop ());
2053 stmt = gimple_build_assign (zero_bounds, chkp_get_zero_bounds_var ());
2054 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2056 else
2057 zero_bounds = chkp_make_bounds (integer_zero_node,
2058 integer_zero_node,
2059 NULL,
2060 false);
2062 return zero_bounds;
2065 /* Return SSA_NAME used to represent none bounds. */
2066 static tree
2067 chkp_get_none_bounds (void)
2069 if (none_bounds)
2070 return none_bounds;
2072 if (dump_file && (dump_flags & TDF_DETAILS))
2073 fprintf (dump_file, "Creating none bounds...");
2076 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2077 || flag_chkp_use_static_const_bounds > 0)
2079 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2080 gimple stmt;
2082 none_bounds = chkp_get_tmp_reg (gimple_build_nop ());
2083 stmt = gimple_build_assign (none_bounds, chkp_get_none_bounds_var ());
2084 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2086 else
2087 none_bounds = chkp_make_bounds (integer_minus_one_node,
2088 build_int_cst (size_type_node, 2),
2089 NULL,
2090 false);
2092 return none_bounds;
2095 /* Return bounds to be used as a result of operation which
2096 should not create poiunter (e.g. MULT_EXPR). */
2097 static tree
2098 chkp_get_invalid_op_bounds (void)
2100 return chkp_get_zero_bounds ();
2103 /* Return bounds to be used for loads of non-pointer values. */
2104 static tree
2105 chkp_get_nonpointer_load_bounds (void)
2107 return chkp_get_zero_bounds ();
2110 /* Return 1 if may use bndret call to get bounds for pointer
2111 returned by CALL. */
2112 static bool
2113 chkp_call_returns_bounds_p (gcall *call)
2115 if (gimple_call_internal_p (call))
2116 return false;
2118 if (gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
2119 || chkp_gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW))
2120 return true;
2122 if (gimple_call_with_bounds_p (call))
2123 return true;
2125 tree fndecl = gimple_call_fndecl (call);
2127 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
2128 return false;
2130 if (fndecl && !chkp_instrumentable_p (fndecl))
2131 return false;
2133 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2135 if (chkp_instrument_normal_builtin (fndecl))
2136 return true;
2138 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
2139 return false;
2141 struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
2142 return (clone && gimple_has_body_p (clone->decl));
2145 return true;
2148 /* Build bounds returned by CALL. */
2149 static tree
2150 chkp_build_returned_bound (gcall *call)
2152 gimple_stmt_iterator gsi;
2153 tree bounds;
2154 gimple stmt;
2155 tree fndecl = gimple_call_fndecl (call);
2157 /* To avoid fixing alloca expands in targets we handle
2158 it separately. */
2159 if (fndecl
2160 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2161 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
2162 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2164 tree size = gimple_call_arg (call, 0);
2165 tree lb = gimple_call_lhs (call);
2166 gimple_stmt_iterator iter = gsi_for_stmt (call);
2167 bounds = chkp_make_bounds (lb, size, &iter, true);
2169 /* We know bounds returned by set_bounds builtin call. */
2170 else if (fndecl
2171 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2172 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS)
2174 tree lb = gimple_call_arg (call, 0);
2175 tree size = gimple_call_arg (call, 1);
2176 gimple_stmt_iterator iter = gsi_for_stmt (call);
2177 bounds = chkp_make_bounds (lb, size, &iter, true);
2179 /* Detect bounds initialization calls. */
2180 else if (fndecl
2181 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2182 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS)
2183 bounds = chkp_get_zero_bounds ();
2184 /* Detect bounds nullification calls. */
2185 else if (fndecl
2186 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2187 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS)
2188 bounds = chkp_get_none_bounds ();
2189 /* Detect bounds copy calls. */
2190 else if (fndecl
2191 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2192 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
2194 gimple_stmt_iterator iter = gsi_for_stmt (call);
2195 bounds = chkp_find_bounds (gimple_call_arg (call, 1), &iter);
2197 /* Do not use retbnd when returned bounds are equal to some
2198 of passed bounds. */
2199 else if (gimple_call_return_flags (call) & ERF_RETURNS_ARG)
2201 gimple_stmt_iterator iter = gsi_for_stmt (call);
2202 unsigned int retarg = 0, argno;
2203 if (gimple_call_return_flags (call) & ERF_RETURNS_ARG)
2204 retarg = gimple_call_return_flags (call) & ERF_RETURN_ARG_MASK;
2205 if (gimple_call_with_bounds_p (call))
2207 for (argno = 0; argno < gimple_call_num_args (call); argno++)
2208 if (!POINTER_BOUNDS_P (gimple_call_arg (call, argno)))
2210 if (retarg)
2211 retarg--;
2212 else
2213 break;
2216 else
2217 argno = retarg;
2219 bounds = chkp_find_bounds (gimple_call_arg (call, argno), &iter);
2221 else if (chkp_call_returns_bounds_p (call))
2223 gcc_assert (TREE_CODE (gimple_call_lhs (call)) == SSA_NAME);
2225 /* In general case build checker builtin call to
2226 obtain returned bounds. */
2227 stmt = gimple_build_call (chkp_ret_bnd_fndecl, 1,
2228 gimple_call_lhs (call));
2229 chkp_mark_stmt (stmt);
2231 gsi = gsi_for_stmt (call);
2232 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
2234 bounds = chkp_get_tmp_reg (stmt);
2235 gimple_call_set_lhs (stmt, bounds);
2237 update_stmt (stmt);
2239 else
2240 bounds = chkp_get_zero_bounds ();
2242 if (dump_file && (dump_flags & TDF_DETAILS))
2244 fprintf (dump_file, "Built returned bounds (");
2245 print_generic_expr (dump_file, bounds, 0);
2246 fprintf (dump_file, ") for call: ");
2247 print_gimple_stmt (dump_file, call, 0, TDF_VOPS|TDF_MEMSYMS);
2250 bounds = chkp_maybe_copy_and_register_bounds (gimple_call_lhs (call), bounds);
2252 return bounds;
2255 /* Return bounds used as returned by call
2256 which produced SSA name VAL. */
2257 gcall *
2258 chkp_retbnd_call_by_val (tree val)
2260 if (TREE_CODE (val) != SSA_NAME)
2261 return NULL;
2263 gcc_assert (gimple_code (SSA_NAME_DEF_STMT (val)) == GIMPLE_CALL);
2265 imm_use_iterator use_iter;
2266 use_operand_p use_p;
2267 FOR_EACH_IMM_USE_FAST (use_p, use_iter, val)
2268 if (gimple_code (USE_STMT (use_p)) == GIMPLE_CALL
2269 && gimple_call_fndecl (USE_STMT (use_p)) == chkp_ret_bnd_fndecl)
2270 return as_a <gcall *> (USE_STMT (use_p));
2272 return NULL;
2275 /* Check the next parameter for the given PARM is bounds
2276 and return it's default SSA_NAME (create if required). */
2277 static tree
2278 chkp_get_next_bounds_parm (tree parm)
2280 tree bounds = TREE_CHAIN (parm);
2281 gcc_assert (POINTER_BOUNDS_P (bounds));
2282 bounds = ssa_default_def (cfun, bounds);
2283 if (!bounds)
2285 bounds = make_ssa_name (TREE_CHAIN (parm), gimple_build_nop ());
2286 set_ssa_default_def (cfun, TREE_CHAIN (parm), bounds);
2288 return bounds;
2291 /* Return bounds to be used for input argument PARM. */
2292 static tree
2293 chkp_get_bound_for_parm (tree parm)
2295 tree decl = SSA_NAME_VAR (parm);
2296 tree bounds;
2298 gcc_assert (TREE_CODE (decl) == PARM_DECL);
2300 bounds = chkp_get_registered_bounds (parm);
2302 if (!bounds)
2303 bounds = chkp_get_registered_bounds (decl);
2305 if (!bounds)
2307 tree orig_decl = cgraph_node::get (cfun->decl)->orig_decl;
2309 /* For static chain param we return zero bounds
2310 because currently we do not check dereferences
2311 of this pointer. */
2312 if (cfun->static_chain_decl == decl)
2313 bounds = chkp_get_zero_bounds ();
2314 /* If non instrumented runtime is used then it may be useful
2315 to use zero bounds for input arguments of main
2316 function. */
2317 else if (flag_chkp_zero_input_bounds_for_main
2318 && strcmp (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (orig_decl)),
2319 "main") == 0)
2320 bounds = chkp_get_zero_bounds ();
2321 else if (BOUNDED_P (parm))
2323 bounds = chkp_get_next_bounds_parm (decl);
2324 bounds = chkp_maybe_copy_and_register_bounds (decl, bounds);
2326 if (dump_file && (dump_flags & TDF_DETAILS))
2328 fprintf (dump_file, "Built arg bounds (");
2329 print_generic_expr (dump_file, bounds, 0);
2330 fprintf (dump_file, ") for arg: ");
2331 print_node (dump_file, "", decl, 0);
2334 else
2335 bounds = chkp_get_zero_bounds ();
2338 if (!chkp_get_registered_bounds (parm))
2339 bounds = chkp_maybe_copy_and_register_bounds (parm, bounds);
2341 if (dump_file && (dump_flags & TDF_DETAILS))
2343 fprintf (dump_file, "Using bounds ");
2344 print_generic_expr (dump_file, bounds, 0);
2345 fprintf (dump_file, " for parm ");
2346 print_generic_expr (dump_file, parm, 0);
2347 fprintf (dump_file, " of type ");
2348 print_generic_expr (dump_file, TREE_TYPE (parm), 0);
2349 fprintf (dump_file, ".\n");
2352 return bounds;
2355 /* Build and return CALL_EXPR for bndstx builtin with specified
2356 arguments. */
2357 tree
2358 chkp_build_bndldx_call (tree addr, tree ptr)
2360 tree fn = build1 (ADDR_EXPR,
2361 build_pointer_type (TREE_TYPE (chkp_bndldx_fndecl)),
2362 chkp_bndldx_fndecl);
2363 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndldx_fndecl)),
2364 fn, 2, addr, ptr);
2365 CALL_WITH_BOUNDS_P (call) = true;
2366 return call;
2369 /* Insert code to load bounds for PTR located by ADDR.
2370 Code is inserted after position pointed by GSI.
2371 Loaded bounds are returned. */
2372 static tree
2373 chkp_build_bndldx (tree addr, tree ptr, gimple_stmt_iterator *gsi)
2375 gimple_seq seq;
2376 gimple stmt;
2377 tree bounds;
2379 seq = NULL;
2381 addr = chkp_force_gimple_call_op (addr, &seq);
2382 ptr = chkp_force_gimple_call_op (ptr, &seq);
2384 stmt = gimple_build_call (chkp_bndldx_fndecl, 2, addr, ptr);
2385 chkp_mark_stmt (stmt);
2386 bounds = chkp_get_tmp_reg (stmt);
2387 gimple_call_set_lhs (stmt, bounds);
2389 gimple_seq_add_stmt (&seq, stmt);
2391 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2393 if (dump_file && (dump_flags & TDF_DETAILS))
2395 fprintf (dump_file, "Generated bndldx for pointer ");
2396 print_generic_expr (dump_file, ptr, 0);
2397 fprintf (dump_file, ": ");
2398 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2401 return bounds;
2404 /* Build and return CALL_EXPR for bndstx builtin with specified
2405 arguments. */
2406 tree
2407 chkp_build_bndstx_call (tree addr, tree ptr, tree bounds)
2409 tree fn = build1 (ADDR_EXPR,
2410 build_pointer_type (TREE_TYPE (chkp_bndstx_fndecl)),
2411 chkp_bndstx_fndecl);
2412 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndstx_fndecl)),
2413 fn, 3, ptr, bounds, addr);
2414 CALL_WITH_BOUNDS_P (call) = true;
2415 return call;
2418 /* Insert code to store BOUNDS for PTR stored by ADDR.
2419 New statements are inserted after position pointed
2420 by GSI. */
2421 void
2422 chkp_build_bndstx (tree addr, tree ptr, tree bounds,
2423 gimple_stmt_iterator *gsi)
2425 gimple_seq seq;
2426 gimple stmt;
2428 seq = NULL;
2430 addr = chkp_force_gimple_call_op (addr, &seq);
2431 ptr = chkp_force_gimple_call_op (ptr, &seq);
2433 stmt = gimple_build_call (chkp_bndstx_fndecl, 3, ptr, bounds, addr);
2434 chkp_mark_stmt (stmt);
2435 gimple_call_set_with_bounds (stmt, true);
2437 gimple_seq_add_stmt (&seq, stmt);
2439 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2441 if (dump_file && (dump_flags & TDF_DETAILS))
2443 fprintf (dump_file, "Generated bndstx for pointer store ");
2444 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_VOPS|TDF_MEMSYMS);
2445 print_gimple_stmt (dump_file, stmt, 2, TDF_VOPS|TDF_MEMSYMS);
2449 /* Compute bounds for pointer NODE which was assigned in
2450 assignment statement ASSIGN. Return computed bounds. */
2451 static tree
2452 chkp_compute_bounds_for_assignment (tree node, gimple assign)
2454 enum tree_code rhs_code = gimple_assign_rhs_code (assign);
2455 tree rhs1 = gimple_assign_rhs1 (assign);
2456 tree bounds = NULL_TREE;
2457 gimple_stmt_iterator iter = gsi_for_stmt (assign);
2459 if (dump_file && (dump_flags & TDF_DETAILS))
2461 fprintf (dump_file, "Computing bounds for assignment: ");
2462 print_gimple_stmt (dump_file, assign, 0, TDF_VOPS|TDF_MEMSYMS);
2465 switch (rhs_code)
2467 case MEM_REF:
2468 case TARGET_MEM_REF:
2469 case COMPONENT_REF:
2470 case ARRAY_REF:
2471 /* We need to load bounds from the bounds table. */
2472 bounds = chkp_find_bounds_loaded (node, rhs1, &iter);
2473 break;
2475 case VAR_DECL:
2476 case SSA_NAME:
2477 case ADDR_EXPR:
2478 case POINTER_PLUS_EXPR:
2479 case NOP_EXPR:
2480 case CONVERT_EXPR:
2481 case INTEGER_CST:
2482 /* Bounds are just propagated from RHS. */
2483 bounds = chkp_find_bounds (rhs1, &iter);
2484 break;
2486 case VIEW_CONVERT_EXPR:
2487 /* Bounds are just propagated from RHS. */
2488 bounds = chkp_find_bounds (TREE_OPERAND (rhs1, 0), &iter);
2489 break;
2491 case PARM_DECL:
2492 if (BOUNDED_P (rhs1))
2494 /* We need to load bounds from the bounds table. */
2495 bounds = chkp_build_bndldx (chkp_build_addr_expr (rhs1),
2496 node, &iter);
2497 TREE_ADDRESSABLE (rhs1) = 1;
2499 else
2500 bounds = chkp_get_nonpointer_load_bounds ();
2501 break;
2503 case MINUS_EXPR:
2504 case PLUS_EXPR:
2505 case BIT_AND_EXPR:
2506 case BIT_IOR_EXPR:
2507 case BIT_XOR_EXPR:
2509 tree rhs2 = gimple_assign_rhs2 (assign);
2510 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2511 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2513 /* First we try to check types of operands. If it
2514 does not help then look at bound values.
2516 If some bounds are incomplete and other are
2517 not proven to be valid (i.e. also incomplete
2518 or invalid because value is not pointer) then
2519 resulting value is incomplete and will be
2520 recomputed later in chkp_finish_incomplete_bounds. */
2521 if (BOUNDED_P (rhs1)
2522 && !BOUNDED_P (rhs2))
2523 bounds = bnd1;
2524 else if (BOUNDED_P (rhs2)
2525 && !BOUNDED_P (rhs1)
2526 && rhs_code != MINUS_EXPR)
2527 bounds = bnd2;
2528 else if (chkp_incomplete_bounds (bnd1))
2529 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR
2530 && !chkp_incomplete_bounds (bnd2))
2531 bounds = bnd2;
2532 else
2533 bounds = incomplete_bounds;
2534 else if (chkp_incomplete_bounds (bnd2))
2535 if (chkp_valid_bounds (bnd1)
2536 && !chkp_incomplete_bounds (bnd1))
2537 bounds = bnd1;
2538 else
2539 bounds = incomplete_bounds;
2540 else if (!chkp_valid_bounds (bnd1))
2541 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR)
2542 bounds = bnd2;
2543 else if (bnd2 == chkp_get_zero_bounds ())
2544 bounds = bnd2;
2545 else
2546 bounds = bnd1;
2547 else if (!chkp_valid_bounds (bnd2))
2548 bounds = bnd1;
2549 else
2550 /* Seems both operands may have valid bounds
2551 (e.g. pointer minus pointer). In such case
2552 use default invalid op bounds. */
2553 bounds = chkp_get_invalid_op_bounds ();
2555 break;
2557 case BIT_NOT_EXPR:
2558 case NEGATE_EXPR:
2559 case LSHIFT_EXPR:
2560 case RSHIFT_EXPR:
2561 case LROTATE_EXPR:
2562 case RROTATE_EXPR:
2563 case EQ_EXPR:
2564 case NE_EXPR:
2565 case LT_EXPR:
2566 case LE_EXPR:
2567 case GT_EXPR:
2568 case GE_EXPR:
2569 case MULT_EXPR:
2570 case RDIV_EXPR:
2571 case TRUNC_DIV_EXPR:
2572 case FLOOR_DIV_EXPR:
2573 case CEIL_DIV_EXPR:
2574 case ROUND_DIV_EXPR:
2575 case TRUNC_MOD_EXPR:
2576 case FLOOR_MOD_EXPR:
2577 case CEIL_MOD_EXPR:
2578 case ROUND_MOD_EXPR:
2579 case EXACT_DIV_EXPR:
2580 case FIX_TRUNC_EXPR:
2581 case FLOAT_EXPR:
2582 case REALPART_EXPR:
2583 case IMAGPART_EXPR:
2584 /* No valid bounds may be produced by these exprs. */
2585 bounds = chkp_get_invalid_op_bounds ();
2586 break;
2588 case COND_EXPR:
2590 tree val1 = gimple_assign_rhs2 (assign);
2591 tree val2 = gimple_assign_rhs3 (assign);
2592 tree bnd1 = chkp_find_bounds (val1, &iter);
2593 tree bnd2 = chkp_find_bounds (val2, &iter);
2594 gimple stmt;
2596 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2597 bounds = incomplete_bounds;
2598 else if (bnd1 == bnd2)
2599 bounds = bnd1;
2600 else
2602 rhs1 = unshare_expr (rhs1);
2604 bounds = chkp_get_tmp_reg (assign);
2605 stmt = gimple_build_assign (bounds, COND_EXPR, rhs1, bnd1, bnd2);
2606 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2608 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2609 chkp_mark_invalid_bounds (bounds);
2612 break;
2614 case MAX_EXPR:
2615 case MIN_EXPR:
2617 tree rhs2 = gimple_assign_rhs2 (assign);
2618 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2619 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2621 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2622 bounds = incomplete_bounds;
2623 else if (bnd1 == bnd2)
2624 bounds = bnd1;
2625 else
2627 gimple stmt;
2628 tree cond = build2 (rhs_code == MAX_EXPR ? GT_EXPR : LT_EXPR,
2629 boolean_type_node, rhs1, rhs2);
2630 bounds = chkp_get_tmp_reg (assign);
2631 stmt = gimple_build_assign (bounds, COND_EXPR, cond, bnd1, bnd2);
2633 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2635 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2636 chkp_mark_invalid_bounds (bounds);
2639 break;
2641 default:
2642 bounds = chkp_get_zero_bounds ();
2643 warning (0, "pointer bounds were lost due to unexpected expression %s",
2644 get_tree_code_name (rhs_code));
2647 gcc_assert (bounds);
2649 if (node)
2650 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2652 return bounds;
2655 /* Compute bounds for ssa name NODE defined by DEF_STMT pointed by ITER.
2657 There are just few statement codes allowed: NOP (for default ssa names),
2658 ASSIGN, CALL, PHI, ASM.
2660 Return computed bounds. */
2661 static tree
2662 chkp_get_bounds_by_definition (tree node, gimple def_stmt,
2663 gphi_iterator *iter)
2665 tree var, bounds;
2666 enum gimple_code code = gimple_code (def_stmt);
2667 gphi *stmt;
2669 if (dump_file && (dump_flags & TDF_DETAILS))
2671 fprintf (dump_file, "Searching for bounds for node: ");
2672 print_generic_expr (dump_file, node, 0);
2674 fprintf (dump_file, " using its definition: ");
2675 print_gimple_stmt (dump_file, def_stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2678 switch (code)
2680 case GIMPLE_NOP:
2681 var = SSA_NAME_VAR (node);
2682 switch (TREE_CODE (var))
2684 case PARM_DECL:
2685 bounds = chkp_get_bound_for_parm (node);
2686 break;
2688 case VAR_DECL:
2689 /* For uninitialized pointers use none bounds. */
2690 bounds = chkp_get_none_bounds ();
2691 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2692 break;
2694 case RESULT_DECL:
2696 tree base_type;
2698 gcc_assert (TREE_CODE (TREE_TYPE (node)) == REFERENCE_TYPE);
2700 base_type = TREE_TYPE (TREE_TYPE (node));
2702 gcc_assert (TYPE_SIZE (base_type)
2703 && TREE_CODE (TYPE_SIZE (base_type)) == INTEGER_CST
2704 && tree_to_uhwi (TYPE_SIZE (base_type)) != 0);
2706 bounds = chkp_make_bounds (node, TYPE_SIZE_UNIT (base_type),
2707 NULL, false);
2708 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2710 break;
2712 default:
2713 if (dump_file && (dump_flags & TDF_DETAILS))
2715 fprintf (dump_file, "Unexpected var with no definition\n");
2716 print_generic_expr (dump_file, var, 0);
2718 internal_error ("chkp_get_bounds_by_definition: Unexpected var of type %s",
2719 get_tree_code_name (TREE_CODE (var)));
2721 break;
2723 case GIMPLE_ASSIGN:
2724 bounds = chkp_compute_bounds_for_assignment (node, def_stmt);
2725 break;
2727 case GIMPLE_CALL:
2728 bounds = chkp_build_returned_bound (as_a <gcall *> (def_stmt));
2729 break;
2731 case GIMPLE_PHI:
2732 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (node))
2733 if (SSA_NAME_VAR (node))
2734 var = chkp_get_bounds_var (SSA_NAME_VAR (node));
2735 else
2736 var = make_temp_ssa_name (pointer_bounds_type_node,
2737 gimple_build_nop (),
2738 CHKP_BOUND_TMP_NAME);
2739 else
2740 var = chkp_get_tmp_var ();
2741 stmt = create_phi_node (var, gimple_bb (def_stmt));
2742 bounds = gimple_phi_result (stmt);
2743 *iter = gsi_for_phi (stmt);
2745 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2747 /* Created bounds do not have all phi args computed and
2748 therefore we do not know if there is a valid source
2749 of bounds for that node. Therefore we mark bounds
2750 as incomplete and then recompute them when all phi
2751 args are computed. */
2752 chkp_register_incomplete_bounds (bounds, node);
2753 break;
2755 case GIMPLE_ASM:
2756 bounds = chkp_get_zero_bounds ();
2757 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2758 break;
2760 default:
2761 internal_error ("chkp_get_bounds_by_definition: Unexpected GIMPLE code %s",
2762 gimple_code_name[code]);
2765 return bounds;
2768 /* Return CALL_EXPR for bndmk with specified LOWER_BOUND and SIZE. */
2769 tree
2770 chkp_build_make_bounds_call (tree lower_bound, tree size)
2772 tree call = build1 (ADDR_EXPR,
2773 build_pointer_type (TREE_TYPE (chkp_bndmk_fndecl)),
2774 chkp_bndmk_fndecl);
2775 return build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndmk_fndecl)),
2776 call, 2, lower_bound, size);
2779 /* Create static bounds var of specfified OBJ which is
2780 is either VAR_DECL or string constant. */
2781 static tree
2782 chkp_make_static_bounds (tree obj)
2784 static int string_id = 1;
2785 static int var_id = 1;
2786 tree *slot;
2787 const char *var_name;
2788 char *bnd_var_name;
2789 tree bnd_var;
2791 /* First check if we already have required var. */
2792 if (chkp_static_var_bounds)
2794 /* For vars we use assembler name as a key in
2795 chkp_static_var_bounds map. It allows to
2796 avoid duplicating bound vars for decls
2797 sharing assembler name. */
2798 if (TREE_CODE (obj) == VAR_DECL)
2800 tree name = DECL_ASSEMBLER_NAME (obj);
2801 slot = chkp_static_var_bounds->get (name);
2802 if (slot)
2803 return *slot;
2805 else
2807 slot = chkp_static_var_bounds->get (obj);
2808 if (slot)
2809 return *slot;
2813 /* Build decl for bounds var. */
2814 if (TREE_CODE (obj) == VAR_DECL)
2816 if (DECL_IGNORED_P (obj))
2818 bnd_var_name = (char *) xmalloc (strlen (CHKP_VAR_BOUNDS_PREFIX) + 10);
2819 sprintf (bnd_var_name, "%s%d", CHKP_VAR_BOUNDS_PREFIX, var_id++);
2821 else
2823 var_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (obj));
2825 /* For hidden symbols we want to skip first '*' char. */
2826 if (*var_name == '*')
2827 var_name++;
2829 bnd_var_name = (char *) xmalloc (strlen (var_name)
2830 + strlen (CHKP_BOUNDS_OF_SYMBOL_PREFIX) + 1);
2831 strcpy (bnd_var_name, CHKP_BOUNDS_OF_SYMBOL_PREFIX);
2832 strcat (bnd_var_name, var_name);
2835 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2836 get_identifier (bnd_var_name),
2837 pointer_bounds_type_node);
2839 /* Address of the obj will be used as lower bound. */
2840 TREE_ADDRESSABLE (obj) = 1;
2842 else
2844 bnd_var_name = (char *) xmalloc (strlen (CHKP_STRING_BOUNDS_PREFIX) + 10);
2845 sprintf (bnd_var_name, "%s%d", CHKP_STRING_BOUNDS_PREFIX, string_id++);
2847 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2848 get_identifier (bnd_var_name),
2849 pointer_bounds_type_node);
2852 TREE_PUBLIC (bnd_var) = 0;
2853 TREE_USED (bnd_var) = 1;
2854 TREE_READONLY (bnd_var) = 0;
2855 TREE_STATIC (bnd_var) = 1;
2856 TREE_ADDRESSABLE (bnd_var) = 0;
2857 DECL_ARTIFICIAL (bnd_var) = 1;
2858 DECL_COMMON (bnd_var) = 1;
2859 DECL_COMDAT (bnd_var) = 1;
2860 DECL_READ_P (bnd_var) = 1;
2861 DECL_INITIAL (bnd_var) = chkp_build_addr_expr (obj);
2862 /* Force output similar to constant bounds.
2863 See chkp_make_static_const_bounds. */
2864 varpool_node::get_create (bnd_var)->force_output = 1;
2865 /* Mark symbol as requiring bounds initialization. */
2866 varpool_node::get_create (bnd_var)->need_bounds_init = 1;
2867 varpool_node::finalize_decl (bnd_var);
2869 /* Add created var to the map to use it for other references
2870 to obj. */
2871 if (!chkp_static_var_bounds)
2872 chkp_static_var_bounds = new hash_map<tree, tree>;
2874 if (TREE_CODE (obj) == VAR_DECL)
2876 tree name = DECL_ASSEMBLER_NAME (obj);
2877 chkp_static_var_bounds->put (name, bnd_var);
2879 else
2880 chkp_static_var_bounds->put (obj, bnd_var);
2882 return bnd_var;
2885 /* When var has incomplete type we cannot get size to
2886 compute its bounds. In such cases we use checker
2887 builtin call which determines object size at runtime. */
2888 static tree
2889 chkp_generate_extern_var_bounds (tree var)
2891 tree bounds, size_reloc, lb, size, max_size, cond;
2892 gimple_stmt_iterator gsi;
2893 gimple_seq seq = NULL;
2894 gimple stmt;
2896 /* If instrumentation is not enabled for vars having
2897 incomplete type then just return zero bounds to avoid
2898 checks for this var. */
2899 if (!flag_chkp_incomplete_type)
2900 return chkp_get_zero_bounds ();
2902 if (dump_file && (dump_flags & TDF_DETAILS))
2904 fprintf (dump_file, "Generating bounds for extern symbol '");
2905 print_generic_expr (dump_file, var, 0);
2906 fprintf (dump_file, "'\n");
2909 stmt = gimple_build_call (chkp_sizeof_fndecl, 1, var);
2911 size_reloc = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
2912 gimple_call_set_lhs (stmt, size_reloc);
2914 gimple_seq_add_stmt (&seq, stmt);
2916 lb = chkp_build_addr_expr (var);
2917 size = make_ssa_name (chkp_get_size_tmp_var (), gimple_build_nop ());
2919 if (flag_chkp_zero_dynamic_size_as_infinite)
2921 /* We should check that size relocation was resolved.
2922 If it was not then use maximum possible size for the var. */
2923 max_size = build2 (MINUS_EXPR, chkp_uintptr_type, integer_zero_node,
2924 fold_convert (chkp_uintptr_type, lb));
2925 max_size = chkp_force_gimple_call_op (max_size, &seq);
2927 cond = build2 (NE_EXPR, boolean_type_node,
2928 size_reloc, integer_zero_node);
2929 stmt = gimple_build_assign (size, COND_EXPR, cond, size_reloc, max_size);
2930 gimple_seq_add_stmt (&seq, stmt);
2932 else
2934 stmt = gimple_build_assign (size, size_reloc);
2935 gimple_seq_add_stmt (&seq, stmt);
2938 gsi = gsi_start_bb (chkp_get_entry_block ());
2939 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
2941 bounds = chkp_make_bounds (lb, size, &gsi, true);
2943 return bounds;
2946 /* Return 1 if TYPE has fields with zero size or fields
2947 marked with chkp_variable_size attribute. */
2948 bool
2949 chkp_variable_size_type (tree type)
2951 bool res = false;
2952 tree field;
2954 if (RECORD_OR_UNION_TYPE_P (type))
2955 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2957 if (TREE_CODE (field) == FIELD_DECL)
2958 res = res
2959 || lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
2960 || chkp_variable_size_type (TREE_TYPE (field));
2962 else
2963 res = !TYPE_SIZE (type)
2964 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
2965 || tree_to_uhwi (TYPE_SIZE (type)) == 0;
2967 return res;
2970 /* Compute and return bounds for address of DECL which is
2971 one of VAR_DECL, PARM_DECL, RESULT_DECL. */
2972 static tree
2973 chkp_get_bounds_for_decl_addr (tree decl)
2975 tree bounds;
2977 gcc_assert (TREE_CODE (decl) == VAR_DECL
2978 || TREE_CODE (decl) == PARM_DECL
2979 || TREE_CODE (decl) == RESULT_DECL);
2981 bounds = chkp_get_registered_addr_bounds (decl);
2983 if (bounds)
2984 return bounds;
2986 if (dump_file && (dump_flags & TDF_DETAILS))
2988 fprintf (dump_file, "Building bounds for address of decl ");
2989 print_generic_expr (dump_file, decl, 0);
2990 fprintf (dump_file, "\n");
2993 /* Use zero bounds if size is unknown and checks for
2994 unknown sizes are restricted. */
2995 if ((!DECL_SIZE (decl)
2996 || (chkp_variable_size_type (TREE_TYPE (decl))
2997 && (TREE_STATIC (decl)
2998 || DECL_EXTERNAL (decl)
2999 || TREE_PUBLIC (decl))))
3000 && !flag_chkp_incomplete_type)
3001 return chkp_get_zero_bounds ();
3003 if (flag_chkp_use_static_bounds
3004 && TREE_CODE (decl) == VAR_DECL
3005 && (TREE_STATIC (decl)
3006 || DECL_EXTERNAL (decl)
3007 || TREE_PUBLIC (decl))
3008 && !DECL_THREAD_LOCAL_P (decl))
3010 tree bnd_var = chkp_make_static_bounds (decl);
3011 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
3012 gimple stmt;
3014 bounds = chkp_get_tmp_reg (gimple_build_nop ());
3015 stmt = gimple_build_assign (bounds, bnd_var);
3016 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3018 else if (!DECL_SIZE (decl)
3019 || (chkp_variable_size_type (TREE_TYPE (decl))
3020 && (TREE_STATIC (decl)
3021 || DECL_EXTERNAL (decl)
3022 || TREE_PUBLIC (decl))))
3024 gcc_assert (TREE_CODE (decl) == VAR_DECL);
3025 bounds = chkp_generate_extern_var_bounds (decl);
3027 else
3029 tree lb = chkp_build_addr_expr (decl);
3030 bounds = chkp_make_bounds (lb, DECL_SIZE_UNIT (decl), NULL, false);
3033 return bounds;
3036 /* Compute and return bounds for constant string. */
3037 static tree
3038 chkp_get_bounds_for_string_cst (tree cst)
3040 tree bounds;
3041 tree lb;
3042 tree size;
3044 gcc_assert (TREE_CODE (cst) == STRING_CST);
3046 bounds = chkp_get_registered_bounds (cst);
3048 if (bounds)
3049 return bounds;
3051 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
3052 || flag_chkp_use_static_const_bounds > 0)
3054 tree bnd_var = chkp_make_static_bounds (cst);
3055 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
3056 gimple stmt;
3058 bounds = chkp_get_tmp_reg (gimple_build_nop ());
3059 stmt = gimple_build_assign (bounds, bnd_var);
3060 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3062 else
3064 lb = chkp_build_addr_expr (cst);
3065 size = build_int_cst (chkp_uintptr_type, TREE_STRING_LENGTH (cst));
3066 bounds = chkp_make_bounds (lb, size, NULL, false);
3069 bounds = chkp_maybe_copy_and_register_bounds (cst, bounds);
3071 return bounds;
3074 /* Generate code to instersect bounds BOUNDS1 and BOUNDS2 and
3075 return the result. if ITER is not NULL then Code is inserted
3076 before position pointed by ITER. Otherwise code is added to
3077 entry block. */
3078 static tree
3079 chkp_intersect_bounds (tree bounds1, tree bounds2, gimple_stmt_iterator *iter)
3081 if (!bounds1 || bounds1 == chkp_get_zero_bounds ())
3082 return bounds2 ? bounds2 : bounds1;
3083 else if (!bounds2 || bounds2 == chkp_get_zero_bounds ())
3084 return bounds1;
3085 else
3087 gimple_seq seq;
3088 gimple stmt;
3089 tree bounds;
3091 seq = NULL;
3093 stmt = gimple_build_call (chkp_intersect_fndecl, 2, bounds1, bounds2);
3094 chkp_mark_stmt (stmt);
3096 bounds = chkp_get_tmp_reg (stmt);
3097 gimple_call_set_lhs (stmt, bounds);
3099 gimple_seq_add_stmt (&seq, stmt);
3101 /* We are probably doing narrowing for constant expression.
3102 In such case iter may be undefined. */
3103 if (!iter)
3105 gimple_stmt_iterator gsi = gsi_last_bb (chkp_get_entry_block ());
3106 iter = &gsi;
3107 gsi_insert_seq_after (iter, seq, GSI_SAME_STMT);
3109 else
3110 gsi_insert_seq_before (iter, seq, GSI_SAME_STMT);
3112 if (dump_file && (dump_flags & TDF_DETAILS))
3114 fprintf (dump_file, "Bounds intersection: ");
3115 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
3116 fprintf (dump_file, " inserted before statement: ");
3117 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0,
3118 TDF_VOPS|TDF_MEMSYMS);
3121 return bounds;
3125 /* Return 1 if we are allowed to narrow bounds for addressed FIELD
3126 and 0 othersize. */
3127 static bool
3128 chkp_may_narrow_to_field (tree field)
3130 return DECL_SIZE (field) && TREE_CODE (DECL_SIZE (field)) == INTEGER_CST
3131 && tree_to_uhwi (DECL_SIZE (field)) != 0
3132 && (!DECL_FIELD_OFFSET (field)
3133 || TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST)
3134 && (!DECL_FIELD_BIT_OFFSET (field)
3135 || TREE_CODE (DECL_FIELD_BIT_OFFSET (field)) == INTEGER_CST)
3136 && !lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
3137 && !chkp_variable_size_type (TREE_TYPE (field));
3140 /* Return 1 if bounds for FIELD should be narrowed to
3141 field's own size. */
3142 static bool
3143 chkp_narrow_bounds_for_field (tree field)
3145 HOST_WIDE_INT offs;
3146 HOST_WIDE_INT bit_offs;
3148 if (!chkp_may_narrow_to_field (field))
3149 return false;
3151 /* Accesse to compiler generated fields should not cause
3152 bounds narrowing. */
3153 if (DECL_ARTIFICIAL (field))
3154 return false;
3156 offs = tree_to_uhwi (DECL_FIELD_OFFSET (field));
3157 bit_offs = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
3159 return (flag_chkp_narrow_bounds
3160 && (flag_chkp_first_field_has_own_bounds
3161 || offs
3162 || bit_offs));
3165 /* Perform narrowing for BOUNDS using bounds computed for field
3166 access COMPONENT. ITER meaning is the same as for
3167 chkp_intersect_bounds. */
3168 static tree
3169 chkp_narrow_bounds_to_field (tree bounds, tree component,
3170 gimple_stmt_iterator *iter)
3172 tree field = TREE_OPERAND (component, 1);
3173 tree size = DECL_SIZE_UNIT (field);
3174 tree field_ptr = chkp_build_addr_expr (component);
3175 tree field_bounds;
3177 field_bounds = chkp_make_bounds (field_ptr, size, iter, false);
3179 return chkp_intersect_bounds (field_bounds, bounds, iter);
3182 /* Parse field or array access NODE.
3184 PTR ouput parameter holds a pointer to the outermost
3185 object.
3187 BITFIELD output parameter is set to 1 if bitfield is
3188 accessed and to 0 otherwise. If it is 1 then ELT holds
3189 outer component for accessed bit field.
3191 SAFE outer parameter is set to 1 if access is safe and
3192 checks are not required.
3194 BOUNDS outer parameter holds bounds to be used to check
3195 access (may be NULL).
3197 If INNERMOST_BOUNDS is 1 then try to narrow bounds to the
3198 innermost accessed component. */
3199 static void
3200 chkp_parse_array_and_component_ref (tree node, tree *ptr,
3201 tree *elt, bool *safe,
3202 bool *bitfield,
3203 tree *bounds,
3204 gimple_stmt_iterator *iter,
3205 bool innermost_bounds)
3207 tree comp_to_narrow = NULL_TREE;
3208 tree last_comp = NULL_TREE;
3209 bool array_ref_found = false;
3210 tree *nodes;
3211 tree var;
3212 int len;
3213 int i;
3215 /* Compute tree height for expression. */
3216 var = node;
3217 len = 1;
3218 while (TREE_CODE (var) == COMPONENT_REF
3219 || TREE_CODE (var) == ARRAY_REF
3220 || TREE_CODE (var) == VIEW_CONVERT_EXPR)
3222 var = TREE_OPERAND (var, 0);
3223 len++;
3226 gcc_assert (len > 1);
3228 /* It is more convenient for us to scan left-to-right,
3229 so walk tree again and put all node to nodes vector
3230 in reversed order. */
3231 nodes = XALLOCAVEC (tree, len);
3232 nodes[len - 1] = node;
3233 for (i = len - 2; i >= 0; i--)
3234 nodes[i] = TREE_OPERAND (nodes[i + 1], 0);
3236 if (bounds)
3237 *bounds = NULL;
3238 *safe = true;
3239 *bitfield = (TREE_CODE (node) == COMPONENT_REF
3240 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (node, 1)));
3241 /* To get bitfield address we will need outer elemnt. */
3242 if (*bitfield)
3243 *elt = nodes[len - 2];
3244 else
3245 *elt = NULL_TREE;
3247 /* If we have indirection in expression then compute
3248 outermost structure bounds. Computed bounds may be
3249 narrowed later. */
3250 if (TREE_CODE (nodes[0]) == MEM_REF || INDIRECT_REF_P (nodes[0]))
3252 *safe = false;
3253 *ptr = TREE_OPERAND (nodes[0], 0);
3254 if (bounds)
3255 *bounds = chkp_find_bounds (*ptr, iter);
3257 else
3259 gcc_assert (TREE_CODE (var) == VAR_DECL
3260 || TREE_CODE (var) == PARM_DECL
3261 || TREE_CODE (var) == RESULT_DECL
3262 || TREE_CODE (var) == STRING_CST
3263 || TREE_CODE (var) == SSA_NAME);
3265 *ptr = chkp_build_addr_expr (var);
3268 /* In this loop we are trying to find a field access
3269 requiring narrowing. There are two simple rules
3270 for search:
3271 1. Leftmost array_ref is chosen if any.
3272 2. Rightmost suitable component_ref is chosen if innermost
3273 bounds are required and no array_ref exists. */
3274 for (i = 1; i < len; i++)
3276 var = nodes[i];
3278 if (TREE_CODE (var) == ARRAY_REF)
3280 *safe = false;
3281 array_ref_found = true;
3282 if (flag_chkp_narrow_bounds
3283 && !flag_chkp_narrow_to_innermost_arrray
3284 && (!last_comp
3285 || chkp_may_narrow_to_field (TREE_OPERAND (last_comp, 1))))
3287 comp_to_narrow = last_comp;
3288 break;
3291 else if (TREE_CODE (var) == COMPONENT_REF)
3293 tree field = TREE_OPERAND (var, 1);
3295 if (innermost_bounds
3296 && !array_ref_found
3297 && chkp_narrow_bounds_for_field (field))
3298 comp_to_narrow = var;
3299 last_comp = var;
3301 if (flag_chkp_narrow_bounds
3302 && flag_chkp_narrow_to_innermost_arrray
3303 && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
3305 if (bounds)
3306 *bounds = chkp_narrow_bounds_to_field (*bounds, var, iter);
3307 comp_to_narrow = NULL;
3310 else if (TREE_CODE (var) == VIEW_CONVERT_EXPR)
3311 /* Nothing to do for it. */
3313 else
3314 gcc_unreachable ();
3317 if (comp_to_narrow && DECL_SIZE (TREE_OPERAND (comp_to_narrow, 1)) && bounds)
3318 *bounds = chkp_narrow_bounds_to_field (*bounds, comp_to_narrow, iter);
3320 if (innermost_bounds && bounds && !*bounds)
3321 *bounds = chkp_find_bounds (*ptr, iter);
3324 /* Compute and return bounds for address of OBJ. */
3325 static tree
3326 chkp_make_addressed_object_bounds (tree obj, gimple_stmt_iterator *iter)
3328 tree bounds = chkp_get_registered_addr_bounds (obj);
3330 if (bounds)
3331 return bounds;
3333 switch (TREE_CODE (obj))
3335 case VAR_DECL:
3336 case PARM_DECL:
3337 case RESULT_DECL:
3338 bounds = chkp_get_bounds_for_decl_addr (obj);
3339 break;
3341 case STRING_CST:
3342 bounds = chkp_get_bounds_for_string_cst (obj);
3343 break;
3345 case ARRAY_REF:
3346 case COMPONENT_REF:
3348 tree elt;
3349 tree ptr;
3350 bool safe;
3351 bool bitfield;
3353 chkp_parse_array_and_component_ref (obj, &ptr, &elt, &safe,
3354 &bitfield, &bounds, iter, true);
3356 gcc_assert (bounds);
3358 break;
3360 case FUNCTION_DECL:
3361 case LABEL_DECL:
3362 bounds = chkp_get_zero_bounds ();
3363 break;
3365 case MEM_REF:
3366 bounds = chkp_find_bounds (TREE_OPERAND (obj, 0), iter);
3367 break;
3369 case REALPART_EXPR:
3370 case IMAGPART_EXPR:
3371 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (obj, 0), iter);
3372 break;
3374 default:
3375 if (dump_file && (dump_flags & TDF_DETAILS))
3377 fprintf (dump_file, "chkp_make_addressed_object_bounds: "
3378 "unexpected object of type %s\n",
3379 get_tree_code_name (TREE_CODE (obj)));
3380 print_node (dump_file, "", obj, 0);
3382 internal_error ("chkp_make_addressed_object_bounds: "
3383 "Unexpected tree code %s",
3384 get_tree_code_name (TREE_CODE (obj)));
3387 chkp_register_addr_bounds (obj, bounds);
3389 return bounds;
3392 /* Compute bounds for pointer PTR loaded from PTR_SRC. Generate statements
3393 to compute bounds if required. Computed bounds should be available at
3394 position pointed by ITER.
3396 If PTR_SRC is NULL_TREE then pointer definition is identified.
3398 If PTR_SRC is not NULL_TREE then ITER points to statements which loads
3399 PTR. If PTR is a any memory reference then ITER points to a statement
3400 after which bndldx will be inserterd. In both cases ITER will be updated
3401 to point to the inserted bndldx statement. */
3403 static tree
3404 chkp_find_bounds_1 (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3406 tree addr = NULL_TREE;
3407 tree bounds = NULL_TREE;
3409 if (!ptr_src)
3410 ptr_src = ptr;
3412 bounds = chkp_get_registered_bounds (ptr_src);
3414 if (bounds)
3415 return bounds;
3417 switch (TREE_CODE (ptr_src))
3419 case MEM_REF:
3420 case VAR_DECL:
3421 if (BOUNDED_P (ptr_src))
3422 if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
3423 bounds = chkp_get_zero_bounds ();
3424 else
3426 addr = chkp_build_addr_expr (ptr_src);
3427 bounds = chkp_build_bndldx (addr, ptr, iter);
3429 else
3430 bounds = chkp_get_nonpointer_load_bounds ();
3431 break;
3433 case ARRAY_REF:
3434 case COMPONENT_REF:
3435 addr = get_base_address (ptr_src);
3436 if (DECL_P (addr)
3437 || TREE_CODE (addr) == MEM_REF
3438 || TREE_CODE (addr) == TARGET_MEM_REF)
3440 if (BOUNDED_P (ptr_src))
3441 if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
3442 bounds = chkp_get_zero_bounds ();
3443 else
3445 addr = chkp_build_addr_expr (ptr_src);
3446 bounds = chkp_build_bndldx (addr, ptr, iter);
3448 else
3449 bounds = chkp_get_nonpointer_load_bounds ();
3451 else
3453 gcc_assert (TREE_CODE (addr) == SSA_NAME);
3454 bounds = chkp_find_bounds (addr, iter);
3456 break;
3458 case PARM_DECL:
3459 gcc_unreachable ();
3460 bounds = chkp_get_bound_for_parm (ptr_src);
3461 break;
3463 case TARGET_MEM_REF:
3464 addr = chkp_build_addr_expr (ptr_src);
3465 bounds = chkp_build_bndldx (addr, ptr, iter);
3466 break;
3468 case SSA_NAME:
3469 bounds = chkp_get_registered_bounds (ptr_src);
3470 if (!bounds)
3472 gimple def_stmt = SSA_NAME_DEF_STMT (ptr_src);
3473 gphi_iterator phi_iter;
3475 bounds = chkp_get_bounds_by_definition (ptr_src, def_stmt, &phi_iter);
3477 gcc_assert (bounds);
3479 if (gphi *def_phi = dyn_cast <gphi *> (def_stmt))
3481 unsigned i;
3483 for (i = 0; i < gimple_phi_num_args (def_phi); i++)
3485 tree arg = gimple_phi_arg_def (def_phi, i);
3486 tree arg_bnd;
3487 gphi *phi_bnd;
3489 arg_bnd = chkp_find_bounds (arg, NULL);
3491 /* chkp_get_bounds_by_definition created new phi
3492 statement and phi_iter points to it.
3494 Previous call to chkp_find_bounds could create
3495 new basic block and therefore change phi statement
3496 phi_iter points to. */
3497 phi_bnd = phi_iter.phi ();
3499 add_phi_arg (phi_bnd, arg_bnd,
3500 gimple_phi_arg_edge (def_phi, i),
3501 UNKNOWN_LOCATION);
3504 /* If all bound phi nodes have their arg computed
3505 then we may finish its computation. See
3506 chkp_finish_incomplete_bounds for more details. */
3507 if (chkp_may_finish_incomplete_bounds ())
3508 chkp_finish_incomplete_bounds ();
3511 gcc_assert (bounds == chkp_get_registered_bounds (ptr_src)
3512 || chkp_incomplete_bounds (bounds));
3514 break;
3516 case ADDR_EXPR:
3517 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (ptr_src, 0), iter);
3518 break;
3520 case INTEGER_CST:
3521 if (integer_zerop (ptr_src))
3522 bounds = chkp_get_none_bounds ();
3523 else
3524 bounds = chkp_get_invalid_op_bounds ();
3525 break;
3527 default:
3528 if (dump_file && (dump_flags & TDF_DETAILS))
3530 fprintf (dump_file, "chkp_find_bounds: unexpected ptr of type %s\n",
3531 get_tree_code_name (TREE_CODE (ptr_src)));
3532 print_node (dump_file, "", ptr_src, 0);
3534 internal_error ("chkp_find_bounds: Unexpected tree code %s",
3535 get_tree_code_name (TREE_CODE (ptr_src)));
3538 if (!bounds)
3540 if (dump_file && (dump_flags & TDF_DETAILS))
3542 fprintf (stderr, "chkp_find_bounds: cannot find bounds for pointer\n");
3543 print_node (dump_file, "", ptr_src, 0);
3545 internal_error ("chkp_find_bounds: Cannot find bounds for pointer");
3548 return bounds;
3551 /* Normal case for bounds search without forced narrowing. */
3552 static tree
3553 chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter)
3555 return chkp_find_bounds_1 (ptr, NULL_TREE, iter);
3558 /* Search bounds for pointer PTR loaded from PTR_SRC
3559 by statement *ITER points to. */
3560 static tree
3561 chkp_find_bounds_loaded (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3563 return chkp_find_bounds_1 (ptr, ptr_src, iter);
3566 /* Helper function which checks type of RHS and finds all pointers in
3567 it. For each found pointer we build it's accesses in LHS and RHS
3568 objects and then call HANDLER for them. Function is used to copy
3569 or initilize bounds for copied object. */
3570 static void
3571 chkp_walk_pointer_assignments (tree lhs, tree rhs, void *arg,
3572 assign_handler handler)
3574 tree type = TREE_TYPE (lhs);
3576 /* We have nothing to do with clobbers. */
3577 if (TREE_CLOBBER_P (rhs))
3578 return;
3580 if (BOUNDED_TYPE_P (type))
3581 handler (lhs, rhs, arg);
3582 else if (RECORD_OR_UNION_TYPE_P (type))
3584 tree field;
3586 if (TREE_CODE (rhs) == CONSTRUCTOR)
3588 unsigned HOST_WIDE_INT cnt;
3589 tree val;
3591 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, field, val)
3593 if (chkp_type_has_pointer (TREE_TYPE (field)))
3595 tree lhs_field = chkp_build_component_ref (lhs, field);
3596 chkp_walk_pointer_assignments (lhs_field, val, arg, handler);
3600 else
3601 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3602 if (TREE_CODE (field) == FIELD_DECL
3603 && chkp_type_has_pointer (TREE_TYPE (field)))
3605 tree rhs_field = chkp_build_component_ref (rhs, field);
3606 tree lhs_field = chkp_build_component_ref (lhs, field);
3607 chkp_walk_pointer_assignments (lhs_field, rhs_field, arg, handler);
3610 else if (TREE_CODE (type) == ARRAY_TYPE)
3612 unsigned HOST_WIDE_INT cur = 0;
3613 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3614 tree etype = TREE_TYPE (type);
3615 tree esize = TYPE_SIZE (etype);
3617 if (TREE_CODE (rhs) == CONSTRUCTOR)
3619 unsigned HOST_WIDE_INT cnt;
3620 tree purp, val, lhs_elem;
3622 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, purp, val)
3624 if (purp && TREE_CODE (purp) == RANGE_EXPR)
3626 tree lo_index = TREE_OPERAND (purp, 0);
3627 tree hi_index = TREE_OPERAND (purp, 1);
3629 for (cur = (unsigned)tree_to_uhwi (lo_index);
3630 cur <= (unsigned)tree_to_uhwi (hi_index);
3631 cur++)
3633 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3634 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3637 else
3639 if (purp)
3641 gcc_assert (TREE_CODE (purp) == INTEGER_CST);
3642 cur = tree_to_uhwi (purp);
3645 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur++);
3647 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3651 /* Copy array only when size is known. */
3652 else if (maxval && !integer_minus_onep (maxval))
3653 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
3655 tree lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3656 tree rhs_elem = chkp_build_array_ref (rhs, etype, esize, cur);
3657 chkp_walk_pointer_assignments (lhs_elem, rhs_elem, arg, handler);
3660 else
3661 internal_error("chkp_walk_pointer_assignments: unexpected RHS type: %s",
3662 get_tree_code_name (TREE_CODE (type)));
3665 /* Add code to copy bounds for assignment of RHS to LHS.
3666 ARG is an iterator pointing ne code position. */
3667 static void
3668 chkp_copy_bounds_for_elem (tree lhs, tree rhs, void *arg)
3670 gimple_stmt_iterator *iter = (gimple_stmt_iterator *)arg;
3671 tree bounds = chkp_find_bounds (rhs, iter);
3672 tree addr = chkp_build_addr_expr(lhs);
3674 chkp_build_bndstx (addr, rhs, bounds, iter);
3677 /* Emit static bound initilizers and size vars. */
3678 void
3679 chkp_finish_file (void)
3681 struct varpool_node *node;
3682 struct chkp_ctor_stmt_list stmts;
3684 if (seen_error ())
3685 return;
3687 /* Iterate through varpool and generate bounds initialization
3688 constructors for all statically initialized pointers. */
3689 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3690 stmts.stmts = NULL;
3691 FOR_EACH_VARIABLE (node)
3692 /* Check that var is actually emitted and we need and may initialize
3693 its bounds. */
3694 if (node->need_bounds_init
3695 && !POINTER_BOUNDS_P (node->decl)
3696 && DECL_RTL (node->decl)
3697 && MEM_P (DECL_RTL (node->decl))
3698 && TREE_ASM_WRITTEN (node->decl))
3700 chkp_walk_pointer_assignments (node->decl,
3701 DECL_INITIAL (node->decl),
3702 &stmts,
3703 chkp_add_modification_to_stmt_list);
3705 if (stmts.avail <= 0)
3707 cgraph_build_static_cdtor ('P', stmts.stmts,
3708 MAX_RESERVED_INIT_PRIORITY + 3);
3709 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3710 stmts.stmts = NULL;
3714 if (stmts.stmts)
3715 cgraph_build_static_cdtor ('P', stmts.stmts,
3716 MAX_RESERVED_INIT_PRIORITY + 3);
3718 /* Iterate through varpool and generate bounds initialization
3719 constructors for all static bounds vars. */
3720 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3721 stmts.stmts = NULL;
3722 FOR_EACH_VARIABLE (node)
3723 if (node->need_bounds_init
3724 && POINTER_BOUNDS_P (node->decl)
3725 && TREE_ASM_WRITTEN (node->decl))
3727 tree bnd = node->decl;
3728 tree var;
3730 gcc_assert (DECL_INITIAL (bnd)
3731 && TREE_CODE (DECL_INITIAL (bnd)) == ADDR_EXPR);
3733 var = TREE_OPERAND (DECL_INITIAL (bnd), 0);
3734 chkp_output_static_bounds (bnd, var, &stmts);
3737 if (stmts.stmts)
3738 cgraph_build_static_cdtor ('B', stmts.stmts,
3739 MAX_RESERVED_INIT_PRIORITY + 2);
3741 delete chkp_static_var_bounds;
3742 delete chkp_bounds_map;
3745 /* An instrumentation function which is called for each statement
3746 having memory access we want to instrument. It inserts check
3747 code and bounds copy code.
3749 ITER points to statement to instrument.
3751 NODE holds memory access in statement to check.
3753 LOC holds the location information for statement.
3755 DIRFLAGS determines whether access is read or write.
3757 ACCESS_OFFS should be added to address used in NODE
3758 before check.
3760 ACCESS_SIZE holds size of checked access.
3762 SAFE indicates if NODE access is safe and should not be
3763 checked. */
3764 static void
3765 chkp_process_stmt (gimple_stmt_iterator *iter, tree node,
3766 location_t loc, tree dirflag,
3767 tree access_offs, tree access_size,
3768 bool safe)
3770 tree node_type = TREE_TYPE (node);
3771 tree size = access_size ? access_size : TYPE_SIZE_UNIT (node_type);
3772 tree addr_first = NULL_TREE; /* address of the first accessed byte */
3773 tree addr_last = NULL_TREE; /* address of the last accessed byte */
3774 tree ptr = NULL_TREE; /* a pointer used for dereference */
3775 tree bounds = NULL_TREE;
3777 /* We do not need instrumentation for clobbers. */
3778 if (dirflag == integer_one_node
3779 && gimple_code (gsi_stmt (*iter)) == GIMPLE_ASSIGN
3780 && TREE_CLOBBER_P (gimple_assign_rhs1 (gsi_stmt (*iter))))
3781 return;
3783 switch (TREE_CODE (node))
3785 case ARRAY_REF:
3786 case COMPONENT_REF:
3788 bool bitfield;
3789 tree elt;
3791 if (safe)
3793 /* We are not going to generate any checks, so do not
3794 generate bounds as well. */
3795 addr_first = chkp_build_addr_expr (node);
3796 break;
3799 chkp_parse_array_and_component_ref (node, &ptr, &elt, &safe,
3800 &bitfield, &bounds, iter, false);
3802 /* Break if there is no dereference and operation is safe. */
3804 if (bitfield)
3806 tree field = TREE_OPERAND (node, 1);
3808 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
3809 size = DECL_SIZE_UNIT (field);
3811 if (elt)
3812 elt = chkp_build_addr_expr (elt);
3813 addr_first = fold_convert_loc (loc, ptr_type_node, elt ? elt : ptr);
3814 addr_first = fold_build_pointer_plus_loc (loc,
3815 addr_first,
3816 byte_position (field));
3818 else
3819 addr_first = chkp_build_addr_expr (node);
3821 break;
3823 case INDIRECT_REF:
3824 ptr = TREE_OPERAND (node, 0);
3825 addr_first = ptr;
3826 break;
3828 case MEM_REF:
3829 ptr = TREE_OPERAND (node, 0);
3830 addr_first = chkp_build_addr_expr (node);
3831 break;
3833 case TARGET_MEM_REF:
3834 ptr = TMR_BASE (node);
3835 addr_first = chkp_build_addr_expr (node);
3836 break;
3838 case ARRAY_RANGE_REF:
3839 printf("ARRAY_RANGE_REF\n");
3840 debug_gimple_stmt(gsi_stmt(*iter));
3841 debug_tree(node);
3842 gcc_unreachable ();
3843 break;
3845 case BIT_FIELD_REF:
3847 tree offs, rem, bpu;
3849 gcc_assert (!access_offs);
3850 gcc_assert (!access_size);
3852 bpu = fold_convert (size_type_node, bitsize_int (BITS_PER_UNIT));
3853 offs = fold_convert (size_type_node, TREE_OPERAND (node, 2));
3854 rem = size_binop_loc (loc, TRUNC_MOD_EXPR, offs, bpu);
3855 offs = size_binop_loc (loc, TRUNC_DIV_EXPR, offs, bpu);
3857 size = fold_convert (size_type_node, TREE_OPERAND (node, 1));
3858 size = size_binop_loc (loc, PLUS_EXPR, size, rem);
3859 size = size_binop_loc (loc, CEIL_DIV_EXPR, size, bpu);
3860 size = fold_convert (size_type_node, size);
3862 chkp_process_stmt (iter, TREE_OPERAND (node, 0), loc,
3863 dirflag, offs, size, safe);
3864 return;
3866 break;
3868 case VAR_DECL:
3869 case RESULT_DECL:
3870 case PARM_DECL:
3871 if (dirflag != integer_one_node
3872 || DECL_REGISTER (node))
3873 return;
3875 safe = true;
3876 addr_first = chkp_build_addr_expr (node);
3877 break;
3879 default:
3880 return;
3883 /* If addr_last was not computed then use (addr_first + size - 1)
3884 expression to compute it. */
3885 if (!addr_last)
3887 addr_last = fold_build_pointer_plus_loc (loc, addr_first, size);
3888 addr_last = fold_build_pointer_plus_hwi_loc (loc, addr_last, -1);
3891 /* Shift both first_addr and last_addr by access_offs if specified. */
3892 if (access_offs)
3894 addr_first = fold_build_pointer_plus_loc (loc, addr_first, access_offs);
3895 addr_last = fold_build_pointer_plus_loc (loc, addr_last, access_offs);
3898 /* Generate bndcl/bndcu checks if memory access is not safe. */
3899 if (!safe)
3901 gimple_stmt_iterator stmt_iter = *iter;
3903 if (!bounds)
3904 bounds = chkp_find_bounds (ptr, iter);
3906 chkp_check_mem_access (addr_first, addr_last, bounds,
3907 stmt_iter, loc, dirflag);
3910 /* We need to store bounds in case pointer is stored. */
3911 if (dirflag == integer_one_node
3912 && chkp_type_has_pointer (node_type)
3913 && flag_chkp_store_bounds)
3915 gimple stmt = gsi_stmt (*iter);
3916 tree rhs1 = gimple_assign_rhs1 (stmt);
3917 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3919 if (get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS)
3920 chkp_walk_pointer_assignments (node, rhs1, iter,
3921 chkp_copy_bounds_for_elem);
3922 else
3924 bounds = chkp_compute_bounds_for_assignment (NULL_TREE, stmt);
3925 chkp_build_bndstx (addr_first, rhs1, bounds, iter);
3930 /* Add code to copy bounds for all pointers copied
3931 in ASSIGN created during inline of EDGE. */
3932 void
3933 chkp_copy_bounds_for_assign (gimple assign, struct cgraph_edge *edge)
3935 tree lhs = gimple_assign_lhs (assign);
3936 tree rhs = gimple_assign_rhs1 (assign);
3937 gimple_stmt_iterator iter = gsi_for_stmt (assign);
3939 if (!flag_chkp_store_bounds)
3940 return;
3942 chkp_walk_pointer_assignments (lhs, rhs, &iter, chkp_copy_bounds_for_elem);
3944 /* We should create edges for all created calls to bndldx and bndstx. */
3945 while (gsi_stmt (iter) != assign)
3947 gimple stmt = gsi_stmt (iter);
3948 if (gimple_code (stmt) == GIMPLE_CALL)
3950 tree fndecl = gimple_call_fndecl (stmt);
3951 struct cgraph_node *callee = cgraph_node::get_create (fndecl);
3952 struct cgraph_edge *new_edge;
3954 gcc_assert (fndecl == chkp_bndstx_fndecl
3955 || fndecl == chkp_bndldx_fndecl
3956 || fndecl == chkp_ret_bnd_fndecl);
3958 new_edge = edge->caller->create_edge (callee,
3959 as_a <gcall *> (stmt),
3960 edge->count,
3961 edge->frequency);
3962 new_edge->frequency = compute_call_stmt_bb_frequency
3963 (edge->caller->decl, gimple_bb (stmt));
3965 gsi_prev (&iter);
3969 /* Some code transformation made during instrumentation pass
3970 may put code into inconsistent state. Here we find and fix
3971 such flaws. */
3972 void
3973 chkp_fix_cfg ()
3975 basic_block bb;
3976 gimple_stmt_iterator i;
3978 /* We could insert some code right after stmt which ends bb.
3979 We wanted to put this code on fallthru edge but did not
3980 add new edges from the beginning because it may cause new
3981 phi node creation which may be incorrect due to incomplete
3982 bound phi nodes. */
3983 FOR_ALL_BB_FN (bb, cfun)
3984 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
3986 gimple stmt = gsi_stmt (i);
3987 gimple_stmt_iterator next = i;
3989 gsi_next (&next);
3991 if (stmt_ends_bb_p (stmt)
3992 && !gsi_end_p (next))
3994 edge fall = find_fallthru_edge (bb->succs);
3995 basic_block dest = NULL;
3996 int flags = 0;
3998 gcc_assert (fall);
4000 /* We cannot split abnormal edge. Therefore we
4001 store its params, make it regular and then
4002 rebuild abnormal edge after split. */
4003 if (fall->flags & EDGE_ABNORMAL)
4005 flags = fall->flags & ~EDGE_FALLTHRU;
4006 dest = fall->dest;
4008 fall->flags &= ~EDGE_COMPLEX;
4011 while (!gsi_end_p (next))
4013 gimple next_stmt = gsi_stmt (next);
4014 gsi_remove (&next, false);
4015 gsi_insert_on_edge (fall, next_stmt);
4018 gsi_commit_edge_inserts ();
4020 /* Re-create abnormal edge. */
4021 if (dest)
4022 make_edge (bb, dest, flags);
4027 /* Walker callback for chkp_replace_function_pointers. Replaces
4028 function pointer in the specified operand with pointer to the
4029 instrumented function version. */
4030 static tree
4031 chkp_replace_function_pointer (tree *op, int *walk_subtrees,
4032 void *data ATTRIBUTE_UNUSED)
4034 if (TREE_CODE (*op) == FUNCTION_DECL
4035 && !lookup_attribute ("bnd_legacy", DECL_ATTRIBUTES (*op))
4036 && (DECL_BUILT_IN_CLASS (*op) == NOT_BUILT_IN
4037 /* For builtins we replace pointers only for selected
4038 function and functions having definitions. */
4039 || (DECL_BUILT_IN_CLASS (*op) == BUILT_IN_NORMAL
4040 && (chkp_instrument_normal_builtin (*op)
4041 || gimple_has_body_p (*op)))))
4043 struct cgraph_node *node = cgraph_node::get_create (*op);
4044 struct cgraph_node *clone = NULL;
4046 if (!node->instrumentation_clone)
4047 clone = chkp_maybe_create_clone (*op);
4049 if (clone)
4050 *op = clone->decl;
4051 *walk_subtrees = 0;
4054 return NULL;
4057 /* This function searches for function pointers in statement
4058 pointed by GSI and replaces them with pointers to instrumented
4059 function versions. */
4060 static void
4061 chkp_replace_function_pointers (gimple_stmt_iterator *gsi)
4063 gimple stmt = gsi_stmt (*gsi);
4064 /* For calls we want to walk call args only. */
4065 if (gimple_code (stmt) == GIMPLE_CALL)
4067 unsigned i;
4068 for (i = 0; i < gimple_call_num_args (stmt); i++)
4069 walk_tree (gimple_call_arg_ptr (stmt, i),
4070 chkp_replace_function_pointer, NULL, NULL);
4072 else
4073 walk_gimple_stmt (gsi, NULL, chkp_replace_function_pointer, NULL);
4076 /* This function instruments all statements working with memory,
4077 calls and rets.
4079 It also removes excess statements from static initializers. */
4080 static void
4081 chkp_instrument_function (void)
4083 basic_block bb, next;
4084 gimple_stmt_iterator i;
4085 enum gimple_rhs_class grhs_class;
4086 bool safe = lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl));
4088 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
4091 next = bb->next_bb;
4092 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
4094 gimple s = gsi_stmt (i);
4096 /* Skip statement marked to not be instrumented. */
4097 if (chkp_marked_stmt_p (s))
4099 gsi_next (&i);
4100 continue;
4103 chkp_replace_function_pointers (&i);
4105 switch (gimple_code (s))
4107 case GIMPLE_ASSIGN:
4108 chkp_process_stmt (&i, gimple_assign_lhs (s),
4109 gimple_location (s), integer_one_node,
4110 NULL_TREE, NULL_TREE, safe);
4111 chkp_process_stmt (&i, gimple_assign_rhs1 (s),
4112 gimple_location (s), integer_zero_node,
4113 NULL_TREE, NULL_TREE, safe);
4114 grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s));
4115 if (grhs_class == GIMPLE_BINARY_RHS)
4116 chkp_process_stmt (&i, gimple_assign_rhs2 (s),
4117 gimple_location (s), integer_zero_node,
4118 NULL_TREE, NULL_TREE, safe);
4119 break;
4121 case GIMPLE_RETURN:
4123 greturn *r = as_a <greturn *> (s);
4124 if (gimple_return_retval (r) != NULL_TREE)
4126 chkp_process_stmt (&i, gimple_return_retval (r),
4127 gimple_location (r),
4128 integer_zero_node,
4129 NULL_TREE, NULL_TREE, safe);
4131 /* Additionally we need to add bounds
4132 to return statement. */
4133 chkp_add_bounds_to_ret_stmt (&i);
4136 break;
4138 case GIMPLE_CALL:
4139 chkp_add_bounds_to_call_stmt (&i);
4140 break;
4142 default:
4146 gsi_next (&i);
4148 /* We do not need any actual pointer stores in checker
4149 static initializer. */
4150 if (lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl))
4151 && gimple_code (s) == GIMPLE_ASSIGN
4152 && gimple_store_p (s))
4154 gimple_stmt_iterator del_iter = gsi_for_stmt (s);
4155 gsi_remove (&del_iter, true);
4156 unlink_stmt_vdef (s);
4157 release_defs(s);
4160 bb = next;
4162 while (bb);
4164 /* Some input params may have bounds and be address taken. In this case
4165 we should store incoming bounds into bounds table. */
4166 tree arg;
4167 if (flag_chkp_store_bounds)
4168 for (arg = DECL_ARGUMENTS (cfun->decl); arg; arg = DECL_CHAIN (arg))
4169 if (TREE_ADDRESSABLE (arg))
4171 if (BOUNDED_P (arg))
4173 tree bounds = chkp_get_next_bounds_parm (arg);
4174 tree def_ptr = ssa_default_def (cfun, arg);
4175 gimple_stmt_iterator iter
4176 = gsi_start_bb (chkp_get_entry_block ());
4177 chkp_build_bndstx (chkp_build_addr_expr (arg),
4178 def_ptr ? def_ptr : arg,
4179 bounds, &iter);
4181 /* Skip bounds arg. */
4182 arg = TREE_CHAIN (arg);
4184 else if (chkp_type_has_pointer (TREE_TYPE (arg)))
4186 tree orig_arg = arg;
4187 bitmap slots = BITMAP_ALLOC (NULL);
4188 gimple_stmt_iterator iter
4189 = gsi_start_bb (chkp_get_entry_block ());
4190 bitmap_iterator bi;
4191 unsigned bnd_no;
4193 chkp_find_bound_slots (TREE_TYPE (arg), slots);
4195 EXECUTE_IF_SET_IN_BITMAP (slots, 0, bnd_no, bi)
4197 tree bounds = chkp_get_next_bounds_parm (arg);
4198 HOST_WIDE_INT offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
4199 tree addr = chkp_build_addr_expr (orig_arg);
4200 tree ptr = build2 (MEM_REF, ptr_type_node, addr,
4201 build_int_cst (ptr_type_node, offs));
4202 chkp_build_bndstx (chkp_build_addr_expr (ptr), ptr,
4203 bounds, &iter);
4205 arg = DECL_CHAIN (arg);
4207 BITMAP_FREE (slots);
4212 /* Find init/null/copy_ptr_bounds calls and replace them
4213 with assignments. It should allow better code
4214 optimization. */
4216 static void
4217 chkp_remove_useless_builtins ()
4219 basic_block bb;
4220 gimple_stmt_iterator gsi;
4222 FOR_EACH_BB_FN (bb, cfun)
4224 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4226 gimple stmt = gsi_stmt (gsi);
4227 tree fndecl;
4228 enum built_in_function fcode;
4230 /* Find builtins returning first arg and replace
4231 them with assignments. */
4232 if (gimple_code (stmt) == GIMPLE_CALL
4233 && (fndecl = gimple_call_fndecl (stmt))
4234 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4235 && (fcode = DECL_FUNCTION_CODE (fndecl))
4236 && (fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
4237 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
4238 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS
4239 || fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS))
4241 tree res = gimple_call_arg (stmt, 0);
4242 update_call_from_tree (&gsi, res);
4243 stmt = gsi_stmt (gsi);
4244 update_stmt (stmt);
4250 /* Initialize pass. */
4251 static void
4252 chkp_init (void)
4254 basic_block bb;
4255 gimple_stmt_iterator i;
4257 in_chkp_pass = true;
4259 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = bb->next_bb)
4260 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
4261 chkp_unmark_stmt (gsi_stmt (i));
4263 chkp_invalid_bounds = new hash_set<tree>;
4264 chkp_completed_bounds_set = new hash_set<tree>;
4265 delete chkp_reg_bounds;
4266 chkp_reg_bounds = new hash_map<tree, tree>;
4267 delete chkp_bound_vars;
4268 chkp_bound_vars = new hash_map<tree, tree>;
4269 chkp_reg_addr_bounds = new hash_map<tree, tree>;
4270 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
4271 delete chkp_bounds_map;
4272 chkp_bounds_map = new hash_map<tree, tree>;
4273 chkp_abnormal_copies = BITMAP_GGC_ALLOC ();
4275 entry_block = NULL;
4276 zero_bounds = NULL_TREE;
4277 none_bounds = NULL_TREE;
4278 incomplete_bounds = integer_zero_node;
4279 tmp_var = NULL_TREE;
4280 size_tmp_var = NULL_TREE;
4282 chkp_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode, true);
4284 /* We create these constant bounds once for each object file.
4285 These symbols go to comdat section and result in single copy
4286 of each one in the final binary. */
4287 chkp_get_zero_bounds_var ();
4288 chkp_get_none_bounds_var ();
4290 calculate_dominance_info (CDI_DOMINATORS);
4291 calculate_dominance_info (CDI_POST_DOMINATORS);
4293 bitmap_obstack_initialize (NULL);
4296 /* Finalize instrumentation pass. */
4297 static void
4298 chkp_fini (void)
4300 in_chkp_pass = false;
4302 delete chkp_invalid_bounds;
4303 delete chkp_completed_bounds_set;
4304 delete chkp_reg_addr_bounds;
4305 delete chkp_incomplete_bounds_map;
4307 free_dominance_info (CDI_DOMINATORS);
4308 free_dominance_info (CDI_POST_DOMINATORS);
4310 bitmap_obstack_release (NULL);
4312 entry_block = NULL;
4313 zero_bounds = NULL_TREE;
4314 none_bounds = NULL_TREE;
4317 /* Main instrumentation pass function. */
4318 static unsigned int
4319 chkp_execute (void)
4321 chkp_init ();
4323 chkp_instrument_function ();
4325 chkp_remove_useless_builtins ();
4327 chkp_function_mark_instrumented (cfun->decl);
4329 chkp_fix_cfg ();
4331 chkp_fini ();
4333 return 0;
4336 /* Instrumentation pass gate. */
4337 static bool
4338 chkp_gate (void)
4340 return cgraph_node::get (cfun->decl)->instrumentation_clone
4341 || lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl));
4344 namespace {
4346 const pass_data pass_data_chkp =
4348 GIMPLE_PASS, /* type */
4349 "chkp", /* name */
4350 OPTGROUP_NONE, /* optinfo_flags */
4351 TV_NONE, /* tv_id */
4352 PROP_ssa | PROP_cfg, /* properties_required */
4353 0, /* properties_provided */
4354 0, /* properties_destroyed */
4355 0, /* todo_flags_start */
4356 TODO_verify_il
4357 | TODO_update_ssa /* todo_flags_finish */
4360 class pass_chkp : public gimple_opt_pass
4362 public:
4363 pass_chkp (gcc::context *ctxt)
4364 : gimple_opt_pass (pass_data_chkp, ctxt)
4367 /* opt_pass methods: */
4368 virtual opt_pass * clone ()
4370 return new pass_chkp (m_ctxt);
4373 virtual bool gate (function *)
4375 return chkp_gate ();
4378 virtual unsigned int execute (function *)
4380 return chkp_execute ();
4383 }; // class pass_chkp
4385 } // anon namespace
4387 gimple_opt_pass *
4388 make_pass_chkp (gcc::context *ctxt)
4390 return new pass_chkp (ctxt);
4393 #include "gt-tree-chkp.h"