Daily bump.
[official-gcc.git] / gcc / tree-chkp.c
blob288470bbe576d8be4780babf76af58f949efa560
1 /* Pointer Bounds Checker insrumentation pass.
2 Copyright (C) 2014-2015 Free Software Foundation, Inc.
3 Contributed by Ilya Enkovich (ilya.enkovich@intel.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hash-set.h"
25 #include "machmode.h"
26 #include "vec.h"
27 #include "double-int.h"
28 #include "input.h"
29 #include "alias.h"
30 #include "symtab.h"
31 #include "options.h"
32 #include "wide-int.h"
33 #include "inchash.h"
34 #include "tree.h"
35 #include "fold-const.h"
36 #include "stor-layout.h"
37 #include "varasm.h"
38 #include "target.h"
39 #include "tree-iterator.h"
40 #include "tree-cfg.h"
41 #include "langhooks.h"
42 #include "tree-pass.h"
43 #include "diagnostic.h"
44 #include "ggc.h"
45 #include "is-a.h"
46 #include "cfgloop.h"
47 #include "stringpool.h"
48 #include "tree-ssa-alias.h"
49 #include "tree-ssanames.h"
50 #include "tree-ssa-operands.h"
51 #include "tree-ssa-address.h"
52 #include "tree-ssa.h"
53 #include "predict.h"
54 #include "dominance.h"
55 #include "cfg.h"
56 #include "basic-block.h"
57 #include "tree-ssa-loop-niter.h"
58 #include "gimple-expr.h"
59 #include "gimple.h"
60 #include "tree-phinodes.h"
61 #include "gimple-ssa.h"
62 #include "ssa-iterators.h"
63 #include "gimple-pretty-print.h"
64 #include "gimple-iterator.h"
65 #include "gimplify.h"
66 #include "gimplify-me.h"
67 #include "print-tree.h"
68 #include "hashtab.h"
69 #include "tm.h"
70 #include "hard-reg-set.h"
71 #include "function.h"
72 #include "rtl.h"
73 #include "flags.h"
74 #include "statistics.h"
75 #include "real.h"
76 #include "fixed-value.h"
77 #include "insn-config.h"
78 #include "expmed.h"
79 #include "dojump.h"
80 #include "explow.h"
81 #include "calls.h"
82 #include "emit-rtl.h"
83 #include "stmt.h"
84 #include "expr.h"
85 #include "tree-ssa-propagate.h"
86 #include "gimple-fold.h"
87 #include "tree-chkp.h"
88 #include "gimple-walk.h"
89 #include "rtl.h" /* For MEM_P, assign_temp. */
90 #include "tree-dfa.h"
91 #include "ipa-ref.h"
92 #include "lto-streamer.h"
93 #include "cgraph.h"
94 #include "ipa-chkp.h"
95 #include "params.h"
97 /* Pointer Bounds Checker instruments code with memory checks to find
98 out-of-bounds memory accesses. Checks are performed by computing
99 bounds for each pointer and then comparing address of accessed
100 memory before pointer dereferencing.
102 1. Function clones.
104 See ipa-chkp.c.
106 2. Instrumentation.
108 There are few things to instrument:
110 a) Memory accesses - add checker calls to check address of accessed memory
111 against bounds of dereferenced pointer. Obviously safe memory
112 accesses like static variable access does not have to be instrumented
113 with checks.
115 Example:
117 val_2 = *p_1;
119 with 4 bytes access is transformed into:
121 __builtin___chkp_bndcl (__bound_tmp.1_3, p_1);
122 D.1_4 = p_1 + 3;
123 __builtin___chkp_bndcu (__bound_tmp.1_3, D.1_4);
124 val_2 = *p_1;
126 where __bound_tmp.1_3 are bounds computed for pointer p_1,
127 __builtin___chkp_bndcl is a lower bound check and
128 __builtin___chkp_bndcu is an upper bound check.
130 b) Pointer stores.
132 When pointer is stored in memory we need to store its bounds. To
133 achieve compatibility of instrumented code with regular codes
134 we have to keep data layout and store bounds in special bound tables
135 via special checker call. Implementation of bounds table may vary for
136 different platforms. It has to associate pointer value and its
137 location (it is required because we may have two equal pointers
138 with different bounds stored in different places) with bounds.
139 Another checker builtin allows to get bounds for specified pointer
140 loaded from specified location.
142 Example:
144 buf1[i_1] = &buf2;
146 is transformed into:
148 buf1[i_1] = &buf2;
149 D.1_2 = &buf1[i_1];
150 __builtin___chkp_bndstx (D.1_2, &buf2, __bound_tmp.1_2);
152 where __bound_tmp.1_2 are bounds of &buf2.
154 c) Static initialization.
156 The special case of pointer store is static pointer initialization.
157 Bounds initialization is performed in a few steps:
158 - register all static initializations in front-end using
159 chkp_register_var_initializer
160 - when file compilation finishes we create functions with special
161 attribute 'chkp ctor' and put explicit initialization code
162 (assignments) for all statically initialized pointers.
163 - when checker constructor is compiled checker pass adds required
164 bounds initialization for all statically initialized pointers
165 - since we do not actually need excess pointers initialization
166 in checker constructor we remove such assignments from them
168 d) Calls.
170 For each call in the code we add additional arguments to pass
171 bounds for pointer arguments. We determine type of call arguments
172 using arguments list from function declaration; if function
173 declaration is not available we use function type; otherwise
174 (e.g. for unnamed arguments) we use type of passed value. Function
175 declaration/type is replaced with the instrumented one.
177 Example:
179 val_1 = foo (&buf1, &buf2, &buf1, 0);
181 is translated into:
183 val_1 = foo.chkp (&buf1, __bound_tmp.1_2, &buf2, __bound_tmp.1_3,
184 &buf1, __bound_tmp.1_2, 0);
186 e) Returns.
188 If function returns a pointer value we have to return bounds also.
189 A new operand was added for return statement to hold returned bounds.
191 Example:
193 return &_buf1;
195 is transformed into
197 return &_buf1, __bound_tmp.1_1;
199 3. Bounds computation.
201 Compiler is fully responsible for computing bounds to be used for each
202 memory access. The first step for bounds computation is to find the
203 origin of pointer dereferenced for memory access. Basing on pointer
204 origin we define a way to compute its bounds. There are just few
205 possible cases:
207 a) Pointer is returned by call.
209 In this case we use corresponding checker builtin method to obtain returned
210 bounds.
212 Example:
214 buf_1 = malloc (size_2);
215 foo (buf_1);
217 is translated into:
219 buf_1 = malloc (size_2);
220 __bound_tmp.1_3 = __builtin___chkp_bndret (buf_1);
221 foo (buf_1, __bound_tmp.1_3);
223 b) Pointer is an address of an object.
225 In this case compiler tries to compute objects size and create corresponding
226 bounds. If object has incomplete type then special checker builtin is used to
227 obtain its size at runtime.
229 Example:
231 foo ()
233 <unnamed type> __bound_tmp.3;
234 static int buf[100];
236 <bb 3>:
237 __bound_tmp.3_2 = __builtin___chkp_bndmk (&buf, 400);
239 <bb 2>:
240 return &buf, __bound_tmp.3_2;
243 Example:
245 Address of an object 'extern int buf[]' with incomplete type is
246 returned.
248 foo ()
250 <unnamed type> __bound_tmp.4;
251 long unsigned int __size_tmp.3;
253 <bb 3>:
254 __size_tmp.3_4 = __builtin_ia32_sizeof (buf);
255 __bound_tmp.4_3 = __builtin_ia32_bndmk (&buf, __size_tmp.3_4);
257 <bb 2>:
258 return &buf, __bound_tmp.4_3;
261 c) Pointer is the result of object narrowing.
263 It happens when we use pointer to an object to compute pointer to a part
264 of an object. E.g. we take pointer to a field of a structure. In this
265 case we perform bounds intersection using bounds of original object and
266 bounds of object's part (which are computed basing on its type).
268 There may be some debatable questions about when narrowing should occur
269 and when it should not. To avoid false bound violations in correct
270 programs we do not perform narrowing when address of an array element is
271 obtained (it has address of the whole array) and when address of the first
272 structure field is obtained (because it is guaranteed to be equal to
273 address of the whole structure and it is legal to cast it back to structure).
275 Default narrowing behavior may be changed using compiler flags.
277 Example:
279 In this example address of the second structure field is returned.
281 foo (struct A * p, __bounds_type __bounds_of_p)
283 <unnamed type> __bound_tmp.3;
284 int * _2;
285 int * _5;
287 <bb 2>:
288 _5 = &p_1(D)->second_field;
289 __bound_tmp.3_6 = __builtin___chkp_bndmk (_5, 4);
290 __bound_tmp.3_8 = __builtin___chkp_intersect (__bound_tmp.3_6,
291 __bounds_of_p_3(D));
292 _2 = &p_1(D)->second_field;
293 return _2, __bound_tmp.3_8;
296 Example:
298 In this example address of the first field of array element is returned.
300 foo (struct A * p, __bounds_type __bounds_of_p, int i)
302 long unsigned int _3;
303 long unsigned int _4;
304 struct A * _6;
305 int * _7;
307 <bb 2>:
308 _3 = (long unsigned int) i_1(D);
309 _4 = _3 * 8;
310 _6 = p_5(D) + _4;
311 _7 = &_6->first_field;
312 return _7, __bounds_of_p_2(D);
316 d) Pointer is the result of pointer arithmetic or type cast.
318 In this case bounds of the base pointer are used. In case of binary
319 operation producing a pointer we are analyzing data flow further
320 looking for operand's bounds. One operand is considered as a base
321 if it has some valid bounds. If we fall into a case when none of
322 operands (or both of them) has valid bounds, a default bounds value
323 is used.
325 Trying to find out bounds for binary operations we may fall into
326 cyclic dependencies for pointers. To avoid infinite recursion all
327 walked phi nodes instantly obtain corresponding bounds but created
328 bounds are marked as incomplete. It helps us to stop DF walk during
329 bounds search.
331 When we reach pointer source, some args of incomplete bounds phi obtain
332 valid bounds and those values are propagated further through phi nodes.
333 If no valid bounds were found for phi node then we mark its result as
334 invalid bounds. Process stops when all incomplete bounds become either
335 valid or invalid and we are able to choose a pointer base.
337 e) Pointer is loaded from the memory.
339 In this case we just need to load bounds from the bounds table.
341 Example:
343 foo ()
345 <unnamed type> __bound_tmp.3;
346 static int * buf;
347 int * _2;
349 <bb 2>:
350 _2 = buf;
351 __bound_tmp.3_4 = __builtin___chkp_bndldx (&buf, _2);
352 return _2, __bound_tmp.3_4;
357 typedef void (*assign_handler)(tree, tree, void *);
359 static tree chkp_get_zero_bounds ();
360 static tree chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter);
361 static tree chkp_find_bounds_loaded (tree ptr, tree ptr_src,
362 gimple_stmt_iterator *iter);
363 static void chkp_parse_array_and_component_ref (tree node, tree *ptr,
364 tree *elt, bool *safe,
365 bool *bitfield,
366 tree *bounds,
367 gimple_stmt_iterator *iter,
368 bool innermost_bounds);
370 #define chkp_bndldx_fndecl \
371 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDLDX))
372 #define chkp_bndstx_fndecl \
373 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDSTX))
374 #define chkp_checkl_fndecl \
375 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCL))
376 #define chkp_checku_fndecl \
377 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCU))
378 #define chkp_bndmk_fndecl \
379 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDMK))
380 #define chkp_ret_bnd_fndecl \
381 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDRET))
382 #define chkp_intersect_fndecl \
383 (targetm.builtin_chkp_function (BUILT_IN_CHKP_INTERSECT))
384 #define chkp_narrow_bounds_fndecl \
385 (targetm.builtin_chkp_function (BUILT_IN_CHKP_NARROW))
386 #define chkp_sizeof_fndecl \
387 (targetm.builtin_chkp_function (BUILT_IN_CHKP_SIZEOF))
388 #define chkp_extract_lower_fndecl \
389 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_LOWER))
390 #define chkp_extract_upper_fndecl \
391 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_UPPER))
393 static GTY (()) tree chkp_uintptr_type;
395 static GTY (()) tree chkp_zero_bounds_var;
396 static GTY (()) tree chkp_none_bounds_var;
398 static GTY (()) basic_block entry_block;
399 static GTY (()) tree zero_bounds;
400 static GTY (()) tree none_bounds;
401 static GTY (()) tree incomplete_bounds;
402 static GTY (()) tree tmp_var;
403 static GTY (()) tree size_tmp_var;
404 static GTY (()) bitmap chkp_abnormal_copies;
406 struct hash_set<tree> *chkp_invalid_bounds;
407 struct hash_set<tree> *chkp_completed_bounds_set;
408 struct hash_map<tree, tree> *chkp_reg_bounds;
409 struct hash_map<tree, tree> *chkp_bound_vars;
410 struct hash_map<tree, tree> *chkp_reg_addr_bounds;
411 struct hash_map<tree, tree> *chkp_incomplete_bounds_map;
412 struct hash_map<tree, tree> *chkp_bounds_map;
413 struct hash_map<tree, tree> *chkp_static_var_bounds;
415 static bool in_chkp_pass;
417 #define CHKP_BOUND_TMP_NAME "__bound_tmp"
418 #define CHKP_SIZE_TMP_NAME "__size_tmp"
419 #define CHKP_BOUNDS_OF_SYMBOL_PREFIX "__chkp_bounds_of_"
420 #define CHKP_STRING_BOUNDS_PREFIX "__chkp_string_bounds_"
421 #define CHKP_VAR_BOUNDS_PREFIX "__chkp_var_bounds_"
422 #define CHKP_ZERO_BOUNDS_VAR_NAME "__chkp_zero_bounds"
423 #define CHKP_NONE_BOUNDS_VAR_NAME "__chkp_none_bounds"
425 /* Static checker constructors may become very large and their
426 compilation with optimization may take too much time.
427 Therefore we put a limit to number of statements in one
428 constructor. Tests with 100 000 statically initialized
429 pointers showed following compilation times on Sandy Bridge
430 server (used -O2):
431 limit 100 => ~18 sec.
432 limit 300 => ~22 sec.
433 limit 1000 => ~30 sec.
434 limit 3000 => ~49 sec.
435 limit 5000 => ~55 sec.
436 limit 10000 => ~76 sec.
437 limit 100000 => ~532 sec. */
438 #define MAX_STMTS_IN_STATIC_CHKP_CTOR (PARAM_VALUE (PARAM_CHKP_MAX_CTOR_SIZE))
440 struct chkp_ctor_stmt_list
442 tree stmts;
443 int avail;
446 /* Return 1 if function FNDECL is instrumented by Pointer
447 Bounds Checker. */
448 bool
449 chkp_function_instrumented_p (tree fndecl)
451 return fndecl
452 && lookup_attribute ("chkp instrumented", DECL_ATTRIBUTES (fndecl));
455 /* Mark function FNDECL as instrumented. */
456 void
457 chkp_function_mark_instrumented (tree fndecl)
459 if (chkp_function_instrumented_p (fndecl))
460 return;
462 DECL_ATTRIBUTES (fndecl)
463 = tree_cons (get_identifier ("chkp instrumented"), NULL,
464 DECL_ATTRIBUTES (fndecl));
467 /* Return true when STMT is builtin call to instrumentation function
468 corresponding to CODE. */
470 bool
471 chkp_gimple_call_builtin_p (gimple call,
472 enum built_in_function code)
474 tree fndecl;
475 if (is_gimple_call (call)
476 && (fndecl = targetm.builtin_chkp_function (code))
477 && gimple_call_fndecl (call) == fndecl)
478 return true;
479 return false;
482 /* Emit code to store zero bounds for PTR located at MEM. */
483 void
484 chkp_expand_bounds_reset_for_mem (tree mem, tree ptr)
486 tree zero_bnd, bnd, addr, bndstx;
488 if (flag_chkp_use_static_const_bounds)
489 zero_bnd = chkp_get_zero_bounds_var ();
490 else
491 zero_bnd = chkp_build_make_bounds_call (integer_zero_node,
492 integer_zero_node);
493 bnd = make_tree (pointer_bounds_type_node,
494 assign_temp (pointer_bounds_type_node, 0, 1));
495 addr = build1 (ADDR_EXPR,
496 build_pointer_type (TREE_TYPE (mem)), mem);
497 bndstx = chkp_build_bndstx_call (addr, ptr, bnd);
499 expand_assignment (bnd, zero_bnd, false);
500 expand_normal (bndstx);
503 /* Build retbnd call for returned value RETVAL.
505 If BNDVAL is not NULL then result is stored
506 in it. Otherwise a temporary is created to
507 hold returned value.
509 GSI points to a position for a retbnd call
510 and is set to created stmt.
512 Cgraph edge is created for a new call if
513 UPDATE_EDGE is 1.
515 Obtained bounds are returned. */
516 tree
517 chkp_insert_retbnd_call (tree bndval, tree retval,
518 gimple_stmt_iterator *gsi)
520 gimple call;
522 if (!bndval)
523 bndval = create_tmp_reg (pointer_bounds_type_node, "retbnd");
525 call = gimple_build_call (chkp_ret_bnd_fndecl, 1, retval);
526 gimple_call_set_lhs (call, bndval);
527 gsi_insert_after (gsi, call, GSI_CONTINUE_LINKING);
529 return bndval;
532 /* Mark statement S to not be instrumented. */
533 static void
534 chkp_mark_stmt (gimple s)
536 gimple_set_plf (s, GF_PLF_1, true);
539 /* Mark statement S to be instrumented. */
540 static void
541 chkp_unmark_stmt (gimple s)
543 gimple_set_plf (s, GF_PLF_1, false);
546 /* Return 1 if statement S should not be instrumented. */
547 static bool
548 chkp_marked_stmt_p (gimple s)
550 return gimple_plf (s, GF_PLF_1);
553 /* Get var to be used for bound temps. */
554 static tree
555 chkp_get_tmp_var (void)
557 if (!tmp_var)
558 tmp_var = create_tmp_reg (pointer_bounds_type_node, CHKP_BOUND_TMP_NAME);
560 return tmp_var;
563 /* Get SSA_NAME to be used as temp. */
564 static tree
565 chkp_get_tmp_reg (gimple stmt)
567 if (in_chkp_pass)
568 return make_ssa_name (chkp_get_tmp_var (), stmt);
570 return make_temp_ssa_name (pointer_bounds_type_node, stmt,
571 CHKP_BOUND_TMP_NAME);
574 /* Get var to be used for size temps. */
575 static tree
576 chkp_get_size_tmp_var (void)
578 if (!size_tmp_var)
579 size_tmp_var = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
581 return size_tmp_var;
584 /* Register bounds BND for address of OBJ. */
585 static void
586 chkp_register_addr_bounds (tree obj, tree bnd)
588 if (bnd == incomplete_bounds)
589 return;
591 chkp_reg_addr_bounds->put (obj, bnd);
593 if (dump_file && (dump_flags & TDF_DETAILS))
595 fprintf (dump_file, "Regsitered bound ");
596 print_generic_expr (dump_file, bnd, 0);
597 fprintf (dump_file, " for address of ");
598 print_generic_expr (dump_file, obj, 0);
599 fprintf (dump_file, "\n");
603 /* Return bounds registered for address of OBJ. */
604 static tree
605 chkp_get_registered_addr_bounds (tree obj)
607 tree *slot = chkp_reg_addr_bounds->get (obj);
608 return slot ? *slot : NULL_TREE;
611 /* Mark BOUNDS as completed. */
612 static void
613 chkp_mark_completed_bounds (tree bounds)
615 chkp_completed_bounds_set->add (bounds);
617 if (dump_file && (dump_flags & TDF_DETAILS))
619 fprintf (dump_file, "Marked bounds ");
620 print_generic_expr (dump_file, bounds, 0);
621 fprintf (dump_file, " as completed\n");
625 /* Return 1 if BOUNDS were marked as completed and 0 otherwise. */
626 static bool
627 chkp_completed_bounds (tree bounds)
629 return chkp_completed_bounds_set->contains (bounds);
632 /* Clear comleted bound marks. */
633 static void
634 chkp_erase_completed_bounds (void)
636 delete chkp_completed_bounds_set;
637 chkp_completed_bounds_set = new hash_set<tree>;
640 /* Mark BOUNDS associated with PTR as incomplete. */
641 static void
642 chkp_register_incomplete_bounds (tree bounds, tree ptr)
644 chkp_incomplete_bounds_map->put (bounds, ptr);
646 if (dump_file && (dump_flags & TDF_DETAILS))
648 fprintf (dump_file, "Regsitered incomplete bounds ");
649 print_generic_expr (dump_file, bounds, 0);
650 fprintf (dump_file, " for ");
651 print_generic_expr (dump_file, ptr, 0);
652 fprintf (dump_file, "\n");
656 /* Return 1 if BOUNDS are incomplete and 0 otherwise. */
657 static bool
658 chkp_incomplete_bounds (tree bounds)
660 if (bounds == incomplete_bounds)
661 return true;
663 if (chkp_completed_bounds (bounds))
664 return false;
666 return chkp_incomplete_bounds_map->get (bounds) != NULL;
669 /* Clear incomleted bound marks. */
670 static void
671 chkp_erase_incomplete_bounds (void)
673 delete chkp_incomplete_bounds_map;
674 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
677 /* Build and return bndmk call which creates bounds for structure
678 pointed by PTR. Structure should have complete type. */
679 tree
680 chkp_make_bounds_for_struct_addr (tree ptr)
682 tree type = TREE_TYPE (ptr);
683 tree size;
685 gcc_assert (POINTER_TYPE_P (type));
687 size = TYPE_SIZE (TREE_TYPE (type));
689 gcc_assert (size);
691 return build_call_nary (pointer_bounds_type_node,
692 build_fold_addr_expr (chkp_bndmk_fndecl),
693 2, ptr, size);
696 /* Traversal function for chkp_may_finish_incomplete_bounds.
697 Set RES to 0 if at least one argument of phi statement
698 defining bounds (passed in KEY arg) is unknown.
699 Traversal stops when first unknown phi argument is found. */
700 bool
701 chkp_may_complete_phi_bounds (tree const &bounds, tree *slot ATTRIBUTE_UNUSED,
702 bool *res)
704 gimple phi;
705 unsigned i;
707 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
709 phi = SSA_NAME_DEF_STMT (bounds);
711 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
713 for (i = 0; i < gimple_phi_num_args (phi); i++)
715 tree phi_arg = gimple_phi_arg_def (phi, i);
716 if (!phi_arg)
718 *res = false;
719 /* Do not need to traverse further. */
720 return false;
724 return true;
727 /* Return 1 if all phi nodes created for bounds have their
728 arguments computed. */
729 static bool
730 chkp_may_finish_incomplete_bounds (void)
732 bool res = true;
734 chkp_incomplete_bounds_map
735 ->traverse<bool *, chkp_may_complete_phi_bounds> (&res);
737 return res;
740 /* Helper function for chkp_finish_incomplete_bounds.
741 Recompute args for bounds phi node. */
742 bool
743 chkp_recompute_phi_bounds (tree const &bounds, tree *slot,
744 void *res ATTRIBUTE_UNUSED)
746 tree ptr = *slot;
747 gphi *bounds_phi;
748 gphi *ptr_phi;
749 unsigned i;
751 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
752 gcc_assert (TREE_CODE (ptr) == SSA_NAME);
754 bounds_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (bounds));
755 ptr_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (ptr));
757 for (i = 0; i < gimple_phi_num_args (bounds_phi); i++)
759 tree ptr_arg = gimple_phi_arg_def (ptr_phi, i);
760 tree bound_arg = chkp_find_bounds (ptr_arg, NULL);
762 add_phi_arg (bounds_phi, bound_arg,
763 gimple_phi_arg_edge (ptr_phi, i),
764 UNKNOWN_LOCATION);
767 return true;
770 /* Mark BOUNDS as invalid. */
771 static void
772 chkp_mark_invalid_bounds (tree bounds)
774 chkp_invalid_bounds->add (bounds);
776 if (dump_file && (dump_flags & TDF_DETAILS))
778 fprintf (dump_file, "Marked bounds ");
779 print_generic_expr (dump_file, bounds, 0);
780 fprintf (dump_file, " as invalid\n");
784 /* Return 1 if BOUNDS were marked as invalid and 0 otherwise. */
785 static bool
786 chkp_valid_bounds (tree bounds)
788 if (bounds == zero_bounds || bounds == none_bounds)
789 return false;
791 return !chkp_invalid_bounds->contains (bounds);
794 /* Helper function for chkp_finish_incomplete_bounds.
795 Check all arguments of phi nodes trying to find
796 valid completed bounds. If there is at least one
797 such arg then bounds produced by phi node are marked
798 as valid completed bounds and all phi args are
799 recomputed. */
800 bool
801 chkp_find_valid_phi_bounds (tree const &bounds, tree *slot, bool *res)
803 gimple phi;
804 unsigned i;
806 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
808 if (chkp_completed_bounds (bounds))
809 return true;
811 phi = SSA_NAME_DEF_STMT (bounds);
813 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
815 for (i = 0; i < gimple_phi_num_args (phi); i++)
817 tree phi_arg = gimple_phi_arg_def (phi, i);
819 gcc_assert (phi_arg);
821 if (chkp_valid_bounds (phi_arg) && !chkp_incomplete_bounds (phi_arg))
823 *res = true;
824 chkp_mark_completed_bounds (bounds);
825 chkp_recompute_phi_bounds (bounds, slot, NULL);
826 return true;
830 return true;
833 /* Helper function for chkp_finish_incomplete_bounds.
834 Marks all incompleted bounds as invalid. */
835 bool
836 chkp_mark_invalid_bounds_walker (tree const &bounds,
837 tree *slot ATTRIBUTE_UNUSED,
838 void *res ATTRIBUTE_UNUSED)
840 if (!chkp_completed_bounds (bounds))
842 chkp_mark_invalid_bounds (bounds);
843 chkp_mark_completed_bounds (bounds);
845 return true;
848 /* When all bound phi nodes have all their args computed
849 we have enough info to find valid bounds. We iterate
850 through all incompleted bounds searching for valid
851 bounds. Found valid bounds are marked as completed
852 and all remaining incompleted bounds are recomputed.
853 Process continues until no new valid bounds may be
854 found. All remained incompleted bounds are marked as
855 invalid (i.e. have no valid source of bounds). */
856 static void
857 chkp_finish_incomplete_bounds (void)
859 bool found_valid;
861 while (found_valid)
863 found_valid = false;
865 chkp_incomplete_bounds_map->
866 traverse<bool *, chkp_find_valid_phi_bounds> (&found_valid);
868 if (found_valid)
869 chkp_incomplete_bounds_map->
870 traverse<void *, chkp_recompute_phi_bounds> (NULL);
873 chkp_incomplete_bounds_map->
874 traverse<void *, chkp_mark_invalid_bounds_walker> (NULL);
875 chkp_incomplete_bounds_map->
876 traverse<void *, chkp_recompute_phi_bounds> (NULL);
878 chkp_erase_completed_bounds ();
879 chkp_erase_incomplete_bounds ();
882 /* Return 1 if type TYPE is a pointer type or a
883 structure having a pointer type as one of its fields.
884 Otherwise return 0. */
885 bool
886 chkp_type_has_pointer (const_tree type)
888 bool res = false;
890 if (BOUNDED_TYPE_P (type))
891 res = true;
892 else if (RECORD_OR_UNION_TYPE_P (type))
894 tree field;
896 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
897 if (TREE_CODE (field) == FIELD_DECL)
898 res = res || chkp_type_has_pointer (TREE_TYPE (field));
900 else if (TREE_CODE (type) == ARRAY_TYPE)
901 res = chkp_type_has_pointer (TREE_TYPE (type));
903 return res;
906 unsigned
907 chkp_type_bounds_count (const_tree type)
909 unsigned res = 0;
911 if (!type)
912 res = 0;
913 else if (BOUNDED_TYPE_P (type))
914 res = 1;
915 else if (RECORD_OR_UNION_TYPE_P (type))
917 bitmap have_bound;
919 bitmap_obstack_initialize (NULL);
920 have_bound = BITMAP_ALLOC (NULL);
921 chkp_find_bound_slots (type, have_bound);
922 res = bitmap_count_bits (have_bound);
923 BITMAP_FREE (have_bound);
924 bitmap_obstack_release (NULL);
927 return res;
930 /* Get bounds associated with NODE via
931 chkp_set_bounds call. */
932 tree
933 chkp_get_bounds (tree node)
935 tree *slot;
937 if (!chkp_bounds_map)
938 return NULL_TREE;
940 slot = chkp_bounds_map->get (node);
941 return slot ? *slot : NULL_TREE;
944 /* Associate bounds VAL with NODE. */
945 void
946 chkp_set_bounds (tree node, tree val)
948 if (!chkp_bounds_map)
949 chkp_bounds_map = new hash_map<tree, tree>;
951 chkp_bounds_map->put (node, val);
954 /* Check if statically initialized variable VAR require
955 static bounds initialization. If VAR is added into
956 bounds initlization list then 1 is returned. Otherwise
957 return 0. */
958 extern bool
959 chkp_register_var_initializer (tree var)
961 if (!flag_check_pointer_bounds
962 || DECL_INITIAL (var) == error_mark_node)
963 return false;
965 gcc_assert (TREE_CODE (var) == VAR_DECL);
966 gcc_assert (DECL_INITIAL (var));
968 if (TREE_STATIC (var)
969 && chkp_type_has_pointer (TREE_TYPE (var)))
971 varpool_node::get_create (var)->need_bounds_init = 1;
972 return true;
975 return false;
978 /* Helper function for chkp_finish_file.
980 Add new modification statement (RHS is assigned to LHS)
981 into list of static initializer statementes (passed in ARG).
982 If statements list becomes too big, emit checker constructor
983 and start the new one. */
984 static void
985 chkp_add_modification_to_stmt_list (tree lhs,
986 tree rhs,
987 void *arg)
989 struct chkp_ctor_stmt_list *stmts = (struct chkp_ctor_stmt_list *)arg;
990 tree modify;
992 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
993 rhs = build1 (CONVERT_EXPR, TREE_TYPE (lhs), rhs);
995 modify = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
996 append_to_statement_list (modify, &stmts->stmts);
998 stmts->avail--;
1001 /* Build and return ADDR_EXPR for specified object OBJ. */
1002 static tree
1003 chkp_build_addr_expr (tree obj)
1005 return TREE_CODE (obj) == TARGET_MEM_REF
1006 ? tree_mem_ref_addr (ptr_type_node, obj)
1007 : build_fold_addr_expr (obj);
1010 /* Helper function for chkp_finish_file.
1011 Initialize bound variable BND_VAR with bounds of variable
1012 VAR to statements list STMTS. If statements list becomes
1013 too big, emit checker constructor and start the new one. */
1014 static void
1015 chkp_output_static_bounds (tree bnd_var, tree var,
1016 struct chkp_ctor_stmt_list *stmts)
1018 tree lb, ub, size;
1020 if (TREE_CODE (var) == STRING_CST)
1022 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1023 size = build_int_cst (size_type_node, TREE_STRING_LENGTH (var) - 1);
1025 else if (DECL_SIZE (var)
1026 && !chkp_variable_size_type (TREE_TYPE (var)))
1028 /* Compute bounds using statically known size. */
1029 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1030 size = size_binop (MINUS_EXPR, DECL_SIZE_UNIT (var), size_one_node);
1032 else
1034 /* Compute bounds using dynamic size. */
1035 tree call;
1037 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1038 call = build1 (ADDR_EXPR,
1039 build_pointer_type (TREE_TYPE (chkp_sizeof_fndecl)),
1040 chkp_sizeof_fndecl);
1041 size = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_sizeof_fndecl)),
1042 call, 1, var);
1044 if (flag_chkp_zero_dynamic_size_as_infinite)
1046 tree max_size, cond;
1048 max_size = build2 (MINUS_EXPR, size_type_node, size_zero_node, lb);
1049 cond = build2 (NE_EXPR, boolean_type_node, size, size_zero_node);
1050 size = build3 (COND_EXPR, size_type_node, cond, size, max_size);
1053 size = size_binop (MINUS_EXPR, size, size_one_node);
1056 ub = size_binop (PLUS_EXPR, lb, size);
1057 stmts->avail -= targetm.chkp_initialize_bounds (bnd_var, lb, ub,
1058 &stmts->stmts);
1059 if (stmts->avail <= 0)
1061 cgraph_build_static_cdtor ('B', stmts->stmts,
1062 MAX_RESERVED_INIT_PRIORITY + 2);
1063 stmts->avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
1064 stmts->stmts = NULL;
1068 /* Return entry block to be used for checker initilization code.
1069 Create new block if required. */
1070 static basic_block
1071 chkp_get_entry_block (void)
1073 if (!entry_block)
1074 entry_block
1075 = split_block_after_labels (ENTRY_BLOCK_PTR_FOR_FN (cfun))->dest;
1077 return entry_block;
1080 /* Return a bounds var to be used for pointer var PTR_VAR. */
1081 static tree
1082 chkp_get_bounds_var (tree ptr_var)
1084 tree bnd_var;
1085 tree *slot;
1087 slot = chkp_bound_vars->get (ptr_var);
1088 if (slot)
1089 bnd_var = *slot;
1090 else
1092 bnd_var = create_tmp_reg (pointer_bounds_type_node,
1093 CHKP_BOUND_TMP_NAME);
1094 chkp_bound_vars->put (ptr_var, bnd_var);
1097 return bnd_var;
1102 /* Register bounds BND for object PTR in global bounds table.
1103 A copy of bounds may be created for abnormal ssa names.
1104 Returns bounds to use for PTR. */
1105 static tree
1106 chkp_maybe_copy_and_register_bounds (tree ptr, tree bnd)
1108 bool abnormal_ptr;
1110 if (!chkp_reg_bounds)
1111 return bnd;
1113 /* Do nothing if bounds are incomplete_bounds
1114 because it means bounds will be recomputed. */
1115 if (bnd == incomplete_bounds)
1116 return bnd;
1118 abnormal_ptr = (TREE_CODE (ptr) == SSA_NAME
1119 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1120 && gimple_code (SSA_NAME_DEF_STMT (ptr)) != GIMPLE_PHI);
1122 /* A single bounds value may be reused multiple times for
1123 different pointer values. It may cause coalescing issues
1124 for abnormal SSA names. To avoid it we create a bounds
1125 copy in case it is computed for abnormal SSA name.
1127 We also cannot reuse such created copies for other pointers */
1128 if (abnormal_ptr
1129 || bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
1131 tree bnd_var = NULL_TREE;
1133 if (abnormal_ptr)
1135 if (SSA_NAME_VAR (ptr))
1136 bnd_var = chkp_get_bounds_var (SSA_NAME_VAR (ptr));
1138 else
1139 bnd_var = chkp_get_tmp_var ();
1141 /* For abnormal copies we may just find original
1142 bounds and use them. */
1143 if (!abnormal_ptr && !SSA_NAME_IS_DEFAULT_DEF (bnd))
1145 gimple bnd_def = SSA_NAME_DEF_STMT (bnd);
1146 gcc_checking_assert (gimple_code (bnd_def) == GIMPLE_ASSIGN);
1147 bnd = gimple_assign_rhs1 (bnd_def);
1149 /* For undefined values we usually use none bounds
1150 value but in case of abnormal edge it may cause
1151 coalescing failures. Use default definition of
1152 bounds variable instead to avoid it. */
1153 else if (SSA_NAME_IS_DEFAULT_DEF (ptr)
1154 && TREE_CODE (SSA_NAME_VAR (ptr)) != PARM_DECL)
1156 bnd = get_or_create_ssa_default_def (cfun, bnd_var);
1158 if (dump_file && (dump_flags & TDF_DETAILS))
1160 fprintf (dump_file, "Using default def bounds ");
1161 print_generic_expr (dump_file, bnd, 0);
1162 fprintf (dump_file, " for abnormal default def SSA name ");
1163 print_generic_expr (dump_file, ptr, 0);
1164 fprintf (dump_file, "\n");
1167 else
1169 tree copy;
1170 gimple def = SSA_NAME_DEF_STMT (ptr);
1171 gimple assign;
1172 gimple_stmt_iterator gsi;
1174 if (bnd_var)
1175 copy = make_ssa_name (bnd_var, gimple_build_nop ());
1176 else
1177 copy = make_temp_ssa_name (pointer_bounds_type_node,
1178 gimple_build_nop (),
1179 CHKP_BOUND_TMP_NAME);
1180 assign = gimple_build_assign (copy, bnd);
1182 if (dump_file && (dump_flags & TDF_DETAILS))
1184 fprintf (dump_file, "Creating a copy of bounds ");
1185 print_generic_expr (dump_file, bnd, 0);
1186 fprintf (dump_file, " for abnormal SSA name ");
1187 print_generic_expr (dump_file, ptr, 0);
1188 fprintf (dump_file, "\n");
1191 if (gimple_code (def) == GIMPLE_NOP)
1193 gsi = gsi_last_bb (chkp_get_entry_block ());
1194 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
1195 gsi_insert_before (&gsi, assign, GSI_CONTINUE_LINKING);
1196 else
1197 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1199 else
1201 gimple bnd_def = SSA_NAME_DEF_STMT (bnd);
1202 /* Sometimes (e.g. when we load a pointer from a
1203 memory) bounds are produced later than a pointer.
1204 We need to insert bounds copy appropriately. */
1205 if (gimple_code (bnd_def) != GIMPLE_NOP
1206 && stmt_dominates_stmt_p (def, bnd_def))
1207 gsi = gsi_for_stmt (bnd_def);
1208 else
1209 gsi = gsi_for_stmt (def);
1210 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1213 bnd = copy;
1216 if (abnormal_ptr)
1217 bitmap_set_bit (chkp_abnormal_copies, SSA_NAME_VERSION (bnd));
1220 chkp_reg_bounds->put (ptr, bnd);
1222 if (dump_file && (dump_flags & TDF_DETAILS))
1224 fprintf (dump_file, "Regsitered bound ");
1225 print_generic_expr (dump_file, bnd, 0);
1226 fprintf (dump_file, " for pointer ");
1227 print_generic_expr (dump_file, ptr, 0);
1228 fprintf (dump_file, "\n");
1231 return bnd;
1234 /* Get bounds registered for object PTR in global bounds table. */
1235 static tree
1236 chkp_get_registered_bounds (tree ptr)
1238 tree *slot;
1240 if (!chkp_reg_bounds)
1241 return NULL_TREE;
1243 slot = chkp_reg_bounds->get (ptr);
1244 return slot ? *slot : NULL_TREE;
1247 /* Add bound retvals to return statement pointed by GSI. */
1249 static void
1250 chkp_add_bounds_to_ret_stmt (gimple_stmt_iterator *gsi)
1252 greturn *ret = as_a <greturn *> (gsi_stmt (*gsi));
1253 tree retval = gimple_return_retval (ret);
1254 tree ret_decl = DECL_RESULT (cfun->decl);
1255 tree bounds;
1257 if (!retval)
1258 return;
1260 if (BOUNDED_P (ret_decl))
1262 bounds = chkp_find_bounds (retval, gsi);
1263 bounds = chkp_maybe_copy_and_register_bounds (ret_decl, bounds);
1264 gimple_return_set_retbnd (ret, bounds);
1267 update_stmt (ret);
1270 /* Force OP to be suitable for using as an argument for call.
1271 New statements (if any) go to SEQ. */
1272 static tree
1273 chkp_force_gimple_call_op (tree op, gimple_seq *seq)
1275 gimple_seq stmts;
1276 gimple_stmt_iterator si;
1278 op = force_gimple_operand (unshare_expr (op), &stmts, true, NULL_TREE);
1280 for (si = gsi_start (stmts); !gsi_end_p (si); gsi_next (&si))
1281 chkp_mark_stmt (gsi_stmt (si));
1283 gimple_seq_add_seq (seq, stmts);
1285 return op;
1288 /* Generate lower bound check for memory access by ADDR.
1289 Check is inserted before the position pointed by ITER.
1290 DIRFLAG indicates whether memory access is load or store. */
1291 static void
1292 chkp_check_lower (tree addr, tree bounds,
1293 gimple_stmt_iterator iter,
1294 location_t location,
1295 tree dirflag)
1297 gimple_seq seq;
1298 gimple check;
1299 tree node;
1301 if (!chkp_function_instrumented_p (current_function_decl)
1302 && bounds == chkp_get_zero_bounds ())
1303 return;
1305 if (dirflag == integer_zero_node
1306 && !flag_chkp_check_read)
1307 return;
1309 if (dirflag == integer_one_node
1310 && !flag_chkp_check_write)
1311 return;
1313 seq = NULL;
1315 node = chkp_force_gimple_call_op (addr, &seq);
1317 check = gimple_build_call (chkp_checkl_fndecl, 2, node, bounds);
1318 chkp_mark_stmt (check);
1319 gimple_call_set_with_bounds (check, true);
1320 gimple_set_location (check, location);
1321 gimple_seq_add_stmt (&seq, check);
1323 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1325 if (dump_file && (dump_flags & TDF_DETAILS))
1327 gimple before = gsi_stmt (iter);
1328 fprintf (dump_file, "Generated lower bound check for statement ");
1329 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1330 fprintf (dump_file, " ");
1331 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1335 /* Generate upper bound check for memory access by ADDR.
1336 Check is inserted before the position pointed by ITER.
1337 DIRFLAG indicates whether memory access is load or store. */
1338 static void
1339 chkp_check_upper (tree addr, tree bounds,
1340 gimple_stmt_iterator iter,
1341 location_t location,
1342 tree dirflag)
1344 gimple_seq seq;
1345 gimple check;
1346 tree node;
1348 if (!chkp_function_instrumented_p (current_function_decl)
1349 && bounds == chkp_get_zero_bounds ())
1350 return;
1352 if (dirflag == integer_zero_node
1353 && !flag_chkp_check_read)
1354 return;
1356 if (dirflag == integer_one_node
1357 && !flag_chkp_check_write)
1358 return;
1360 seq = NULL;
1362 node = chkp_force_gimple_call_op (addr, &seq);
1364 check = gimple_build_call (chkp_checku_fndecl, 2, node, bounds);
1365 chkp_mark_stmt (check);
1366 gimple_call_set_with_bounds (check, true);
1367 gimple_set_location (check, location);
1368 gimple_seq_add_stmt (&seq, check);
1370 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1372 if (dump_file && (dump_flags & TDF_DETAILS))
1374 gimple before = gsi_stmt (iter);
1375 fprintf (dump_file, "Generated upper bound check for statement ");
1376 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1377 fprintf (dump_file, " ");
1378 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1382 /* Generate lower and upper bound checks for memory access
1383 to memory slot [FIRST, LAST] againsr BOUNDS. Checks
1384 are inserted before the position pointed by ITER.
1385 DIRFLAG indicates whether memory access is load or store. */
1386 void
1387 chkp_check_mem_access (tree first, tree last, tree bounds,
1388 gimple_stmt_iterator iter,
1389 location_t location,
1390 tree dirflag)
1392 chkp_check_lower (first, bounds, iter, location, dirflag);
1393 chkp_check_upper (last, bounds, iter, location, dirflag);
1396 /* Replace call to _bnd_chk_* pointed by GSI with
1397 bndcu and bndcl calls. DIRFLAG determines whether
1398 check is for read or write. */
1400 void
1401 chkp_replace_address_check_builtin (gimple_stmt_iterator *gsi,
1402 tree dirflag)
1404 gimple_stmt_iterator call_iter = *gsi;
1405 gimple call = gsi_stmt (*gsi);
1406 tree fndecl = gimple_call_fndecl (call);
1407 tree addr = gimple_call_arg (call, 0);
1408 tree bounds = chkp_find_bounds (addr, gsi);
1410 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1411 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1412 chkp_check_lower (addr, bounds, *gsi, gimple_location (call), dirflag);
1414 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS)
1415 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1417 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1419 tree size = gimple_call_arg (call, 1);
1420 addr = fold_build_pointer_plus (addr, size);
1421 addr = fold_build_pointer_plus_hwi (addr, -1);
1422 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1425 gsi_remove (&call_iter, true);
1428 /* Replace call to _bnd_get_ptr_* pointed by GSI with
1429 corresponding bounds extract call. */
1431 void
1432 chkp_replace_extract_builtin (gimple_stmt_iterator *gsi)
1434 gimple call = gsi_stmt (*gsi);
1435 tree fndecl = gimple_call_fndecl (call);
1436 tree addr = gimple_call_arg (call, 0);
1437 tree bounds = chkp_find_bounds (addr, gsi);
1438 gimple extract;
1440 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND)
1441 fndecl = chkp_extract_lower_fndecl;
1442 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND)
1443 fndecl = chkp_extract_upper_fndecl;
1444 else
1445 gcc_unreachable ();
1447 extract = gimple_build_call (fndecl, 1, bounds);
1448 gimple_call_set_lhs (extract, gimple_call_lhs (call));
1449 chkp_mark_stmt (extract);
1451 gsi_replace (gsi, extract, false);
1454 /* Return COMPONENT_REF accessing FIELD in OBJ. */
1455 static tree
1456 chkp_build_component_ref (tree obj, tree field)
1458 tree res;
1460 /* If object is TMR then we do not use component_ref but
1461 add offset instead. We need it to be able to get addr
1462 of the reasult later. */
1463 if (TREE_CODE (obj) == TARGET_MEM_REF)
1465 tree offs = TMR_OFFSET (obj);
1466 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1467 offs, DECL_FIELD_OFFSET (field));
1469 gcc_assert (offs);
1471 res = copy_node (obj);
1472 TREE_TYPE (res) = TREE_TYPE (field);
1473 TMR_OFFSET (res) = offs;
1475 else
1476 res = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL_TREE);
1478 return res;
1481 /* Return ARRAY_REF for array ARR and index IDX with
1482 specified element type ETYPE and element size ESIZE. */
1483 static tree
1484 chkp_build_array_ref (tree arr, tree etype, tree esize,
1485 unsigned HOST_WIDE_INT idx)
1487 tree index = build_int_cst (size_type_node, idx);
1488 tree res;
1490 /* If object is TMR then we do not use array_ref but
1491 add offset instead. We need it to be able to get addr
1492 of the reasult later. */
1493 if (TREE_CODE (arr) == TARGET_MEM_REF)
1495 tree offs = TMR_OFFSET (arr);
1497 esize = fold_binary_to_constant (MULT_EXPR, TREE_TYPE (esize),
1498 esize, index);
1499 gcc_assert(esize);
1501 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1502 offs, esize);
1503 gcc_assert (offs);
1505 res = copy_node (arr);
1506 TREE_TYPE (res) = etype;
1507 TMR_OFFSET (res) = offs;
1509 else
1510 res = build4 (ARRAY_REF, etype, arr, index, NULL_TREE, NULL_TREE);
1512 return res;
1515 /* Helper function for chkp_add_bounds_to_call_stmt.
1516 Fill ALL_BOUNDS output array with created bounds.
1518 OFFS is used for recursive calls and holds basic
1519 offset of TYPE in outer structure in bits.
1521 ITER points a position where bounds are searched.
1523 ALL_BOUNDS[i] is filled with elem bounds if there
1524 is a field in TYPE which has pointer type and offset
1525 equal to i * POINTER_SIZE in bits. */
1526 static void
1527 chkp_find_bounds_for_elem (tree elem, tree *all_bounds,
1528 HOST_WIDE_INT offs,
1529 gimple_stmt_iterator *iter)
1531 tree type = TREE_TYPE (elem);
1533 if (BOUNDED_TYPE_P (type))
1535 if (!all_bounds[offs / POINTER_SIZE])
1537 tree temp = make_temp_ssa_name (type, gimple_build_nop (), "");
1538 gimple assign = gimple_build_assign (temp, elem);
1539 gimple_stmt_iterator gsi;
1541 gsi_insert_before (iter, assign, GSI_SAME_STMT);
1542 gsi = gsi_for_stmt (assign);
1544 all_bounds[offs / POINTER_SIZE] = chkp_find_bounds (temp, &gsi);
1547 else if (RECORD_OR_UNION_TYPE_P (type))
1549 tree field;
1551 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1552 if (TREE_CODE (field) == FIELD_DECL)
1554 tree base = unshare_expr (elem);
1555 tree field_ref = chkp_build_component_ref (base, field);
1556 HOST_WIDE_INT field_offs
1557 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1558 if (DECL_FIELD_OFFSET (field))
1559 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1561 chkp_find_bounds_for_elem (field_ref, all_bounds,
1562 offs + field_offs, iter);
1565 else if (TREE_CODE (type) == ARRAY_TYPE)
1567 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1568 tree etype = TREE_TYPE (type);
1569 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1570 unsigned HOST_WIDE_INT cur;
1572 if (!maxval || integer_minus_onep (maxval))
1573 return;
1575 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1577 tree base = unshare_expr (elem);
1578 tree arr_elem = chkp_build_array_ref (base, etype,
1579 TYPE_SIZE (etype),
1580 cur);
1581 chkp_find_bounds_for_elem (arr_elem, all_bounds, offs + cur * esize,
1582 iter);
1587 /* Fill HAVE_BOUND output bitmap with information about
1588 bounds requred for object of type TYPE.
1590 OFFS is used for recursive calls and holds basic
1591 offset of TYPE in outer structure in bits.
1593 HAVE_BOUND[i] is set to 1 if there is a field
1594 in TYPE which has pointer type and offset
1595 equal to i * POINTER_SIZE - OFFS in bits. */
1596 void
1597 chkp_find_bound_slots_1 (const_tree type, bitmap have_bound,
1598 HOST_WIDE_INT offs)
1600 if (BOUNDED_TYPE_P (type))
1601 bitmap_set_bit (have_bound, offs / POINTER_SIZE);
1602 else if (RECORD_OR_UNION_TYPE_P (type))
1604 tree field;
1606 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1607 if (TREE_CODE (field) == FIELD_DECL)
1609 HOST_WIDE_INT field_offs
1610 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1611 if (DECL_FIELD_OFFSET (field))
1612 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1613 chkp_find_bound_slots_1 (TREE_TYPE (field), have_bound,
1614 offs + field_offs);
1617 else if (TREE_CODE (type) == ARRAY_TYPE)
1619 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1620 tree etype = TREE_TYPE (type);
1621 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1622 unsigned HOST_WIDE_INT cur;
1624 if (!maxval
1625 || TREE_CODE (maxval) != INTEGER_CST
1626 || integer_minus_onep (maxval))
1627 return;
1629 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1630 chkp_find_bound_slots_1 (etype, have_bound, offs + cur * esize);
1634 /* Fill bitmap RES with information about bounds for
1635 type TYPE. See chkp_find_bound_slots_1 for more
1636 details. */
1637 void
1638 chkp_find_bound_slots (const_tree type, bitmap res)
1640 bitmap_clear (res);
1641 chkp_find_bound_slots_1 (type, res, 0);
1644 /* Return 1 if call to FNDECL should be instrumented
1645 and 0 otherwise. */
1647 static bool
1648 chkp_instrument_normal_builtin (tree fndecl)
1650 switch (DECL_FUNCTION_CODE (fndecl))
1652 case BUILT_IN_STRLEN:
1653 case BUILT_IN_STRCPY:
1654 case BUILT_IN_STRNCPY:
1655 case BUILT_IN_STPCPY:
1656 case BUILT_IN_STPNCPY:
1657 case BUILT_IN_STRCAT:
1658 case BUILT_IN_STRNCAT:
1659 case BUILT_IN_MEMCPY:
1660 case BUILT_IN_MEMPCPY:
1661 case BUILT_IN_MEMSET:
1662 case BUILT_IN_MEMMOVE:
1663 case BUILT_IN_BZERO:
1664 case BUILT_IN_STRCMP:
1665 case BUILT_IN_STRNCMP:
1666 case BUILT_IN_BCMP:
1667 case BUILT_IN_MEMCMP:
1668 case BUILT_IN_MEMCPY_CHK:
1669 case BUILT_IN_MEMPCPY_CHK:
1670 case BUILT_IN_MEMMOVE_CHK:
1671 case BUILT_IN_MEMSET_CHK:
1672 case BUILT_IN_STRCPY_CHK:
1673 case BUILT_IN_STRNCPY_CHK:
1674 case BUILT_IN_STPCPY_CHK:
1675 case BUILT_IN_STPNCPY_CHK:
1676 case BUILT_IN_STRCAT_CHK:
1677 case BUILT_IN_STRNCAT_CHK:
1678 case BUILT_IN_MALLOC:
1679 case BUILT_IN_CALLOC:
1680 case BUILT_IN_REALLOC:
1681 return 1;
1683 default:
1684 return 0;
1688 /* Add bound arguments to call statement pointed by GSI.
1689 Also performs a replacement of user checker builtins calls
1690 with internal ones. */
1692 static void
1693 chkp_add_bounds_to_call_stmt (gimple_stmt_iterator *gsi)
1695 gcall *call = as_a <gcall *> (gsi_stmt (*gsi));
1696 unsigned arg_no = 0;
1697 tree fndecl = gimple_call_fndecl (call);
1698 tree fntype;
1699 tree first_formal_arg;
1700 tree arg;
1701 bool use_fntype = false;
1702 tree op;
1703 ssa_op_iter iter;
1704 gcall *new_call;
1706 /* Do nothing for internal functions. */
1707 if (gimple_call_internal_p (call))
1708 return;
1710 fntype = TREE_TYPE (TREE_TYPE (gimple_call_fn (call)));
1712 /* Do nothing if back-end builtin is called. */
1713 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
1714 return;
1716 /* Do nothing for some middle-end builtins. */
1717 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1718 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_OBJECT_SIZE)
1719 return;
1721 /* Do nothing for calls to not instrumentable functions. */
1722 if (fndecl && !chkp_instrumentable_p (fndecl))
1723 return;
1725 /* Ignore CHKP_INIT_PTR_BOUNDS, CHKP_NULL_PTR_BOUNDS
1726 and CHKP_COPY_PTR_BOUNDS. */
1727 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1728 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS
1729 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS
1730 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS
1731 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS))
1732 return;
1734 /* Check user builtins are replaced with checks. */
1735 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1736 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1737 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
1738 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS))
1740 chkp_replace_address_check_builtin (gsi, integer_minus_one_node);
1741 return;
1744 /* Check user builtins are replaced with bound extract. */
1745 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1746 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND
1747 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND))
1749 chkp_replace_extract_builtin (gsi);
1750 return;
1753 /* BUILT_IN_CHKP_NARROW_PTR_BOUNDS call is replaced with
1754 target narrow bounds call. */
1755 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1756 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
1758 tree arg = gimple_call_arg (call, 1);
1759 tree bounds = chkp_find_bounds (arg, gsi);
1761 gimple_call_set_fndecl (call, chkp_narrow_bounds_fndecl);
1762 gimple_call_set_arg (call, 1, bounds);
1763 update_stmt (call);
1765 return;
1768 /* BUILT_IN_CHKP_STORE_PTR_BOUNDS call is replaced with
1769 bndstx call. */
1770 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1771 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_STORE_PTR_BOUNDS)
1773 tree addr = gimple_call_arg (call, 0);
1774 tree ptr = gimple_call_arg (call, 1);
1775 tree bounds = chkp_find_bounds (ptr, gsi);
1776 gimple_stmt_iterator iter = gsi_for_stmt (call);
1778 chkp_build_bndstx (addr, ptr, bounds, gsi);
1779 gsi_remove (&iter, true);
1781 return;
1784 if (!flag_chkp_instrument_calls)
1785 return;
1787 /* We instrument only some subset of builtins. We also instrument
1788 builtin calls to be inlined. */
1789 if (fndecl
1790 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1791 && !chkp_instrument_normal_builtin (fndecl))
1793 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
1794 return;
1796 struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
1797 if (!clone
1798 || !gimple_has_body_p (clone->decl))
1799 return;
1802 /* If function decl is available then use it for
1803 formal arguments list. Otherwise use function type. */
1804 if (fndecl && DECL_ARGUMENTS (fndecl))
1805 first_formal_arg = DECL_ARGUMENTS (fndecl);
1806 else
1808 first_formal_arg = TYPE_ARG_TYPES (fntype);
1809 use_fntype = true;
1812 /* Fill vector of new call args. */
1813 vec<tree> new_args = vNULL;
1814 new_args.create (gimple_call_num_args (call));
1815 arg = first_formal_arg;
1816 for (arg_no = 0; arg_no < gimple_call_num_args (call); arg_no++)
1818 tree call_arg = gimple_call_arg (call, arg_no);
1819 tree type;
1821 /* Get arg type using formal argument description
1822 or actual argument type. */
1823 if (arg)
1824 if (use_fntype)
1825 if (TREE_VALUE (arg) != void_type_node)
1827 type = TREE_VALUE (arg);
1828 arg = TREE_CHAIN (arg);
1830 else
1831 type = TREE_TYPE (call_arg);
1832 else
1834 type = TREE_TYPE (arg);
1835 arg = TREE_CHAIN (arg);
1837 else
1838 type = TREE_TYPE (call_arg);
1840 new_args.safe_push (call_arg);
1842 if (BOUNDED_TYPE_P (type)
1843 || pass_by_reference (NULL, TYPE_MODE (type), type, true))
1844 new_args.safe_push (chkp_find_bounds (call_arg, gsi));
1845 else if (chkp_type_has_pointer (type))
1847 HOST_WIDE_INT max_bounds
1848 = TREE_INT_CST_LOW (TYPE_SIZE (type)) / POINTER_SIZE;
1849 tree *all_bounds = (tree *)xmalloc (sizeof (tree) * max_bounds);
1850 HOST_WIDE_INT bnd_no;
1852 memset (all_bounds, 0, sizeof (tree) * max_bounds);
1854 chkp_find_bounds_for_elem (call_arg, all_bounds, 0, gsi);
1856 for (bnd_no = 0; bnd_no < max_bounds; bnd_no++)
1857 if (all_bounds[bnd_no])
1858 new_args.safe_push (all_bounds[bnd_no]);
1860 free (all_bounds);
1864 if (new_args.length () == gimple_call_num_args (call))
1865 new_call = call;
1866 else
1868 new_call = gimple_build_call_vec (gimple_op (call, 1), new_args);
1869 gimple_call_set_lhs (new_call, gimple_call_lhs (call));
1870 gimple_call_copy_flags (new_call, call);
1871 gimple_call_set_chain (new_call, gimple_call_chain (call));
1873 new_args.release ();
1875 /* For direct calls fndecl is replaced with instrumented version. */
1876 if (fndecl)
1878 tree new_decl = chkp_maybe_create_clone (fndecl)->decl;
1879 gimple_call_set_fndecl (new_call, new_decl);
1880 gimple_call_set_fntype (new_call, TREE_TYPE (new_decl));
1882 /* For indirect call we should fix function pointer type if
1883 pass some bounds. */
1884 else if (new_call != call)
1886 tree type = gimple_call_fntype (call);
1887 type = chkp_copy_function_type_adding_bounds (type);
1888 gimple_call_set_fntype (new_call, type);
1891 /* replace old call statement with the new one. */
1892 if (call != new_call)
1894 FOR_EACH_SSA_TREE_OPERAND (op, call, iter, SSA_OP_ALL_DEFS)
1896 SSA_NAME_DEF_STMT (op) = new_call;
1898 gsi_replace (gsi, new_call, true);
1900 else
1901 update_stmt (new_call);
1903 gimple_call_set_with_bounds (new_call, true);
1906 /* Return constant static bounds var with specified bounds LB and UB.
1907 If such var does not exists then new var is created with specified NAME. */
1908 static tree
1909 chkp_make_static_const_bounds (HOST_WIDE_INT lb,
1910 HOST_WIDE_INT ub,
1911 const char *name)
1913 tree id = get_identifier (name);
1914 tree var;
1915 varpool_node *node;
1916 symtab_node *snode;
1918 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, id,
1919 pointer_bounds_type_node);
1920 TREE_STATIC (var) = 1;
1921 TREE_PUBLIC (var) = 1;
1923 /* With LTO we may have constant bounds already in varpool.
1924 Try to find it. */
1925 if ((snode = symtab_node::get_for_asmname (DECL_ASSEMBLER_NAME (var))))
1927 /* We don't allow this symbol usage for non bounds. */
1928 if (snode->type != SYMTAB_VARIABLE
1929 || !POINTER_BOUNDS_P (snode->decl))
1930 sorry ("-fcheck-pointer-bounds requires '%s' "
1931 "name for internal usage",
1932 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (var)));
1934 return snode->decl;
1937 TREE_USED (var) = 1;
1938 TREE_READONLY (var) = 1;
1939 TREE_ADDRESSABLE (var) = 0;
1940 DECL_ARTIFICIAL (var) = 1;
1941 DECL_READ_P (var) = 1;
1942 DECL_INITIAL (var) = targetm.chkp_make_bounds_constant (lb, ub);
1943 make_decl_one_only (var, DECL_ASSEMBLER_NAME (var));
1944 /* We may use this symbol during ctors generation in chkp_finish_file
1945 when all symbols are emitted. Force output to avoid undefined
1946 symbols in ctors. */
1947 node = varpool_node::get_create (var);
1948 node->force_output = 1;
1950 varpool_node::finalize_decl (var);
1952 return var;
1955 /* Generate code to make bounds with specified lower bound LB and SIZE.
1956 if AFTER is 1 then code is inserted after position pointed by ITER
1957 otherwise code is inserted before position pointed by ITER.
1958 If ITER is NULL then code is added to entry block. */
1959 static tree
1960 chkp_make_bounds (tree lb, tree size, gimple_stmt_iterator *iter, bool after)
1962 gimple_seq seq;
1963 gimple_stmt_iterator gsi;
1964 gimple stmt;
1965 tree bounds;
1967 if (iter)
1968 gsi = *iter;
1969 else
1970 gsi = gsi_start_bb (chkp_get_entry_block ());
1972 seq = NULL;
1974 lb = chkp_force_gimple_call_op (lb, &seq);
1975 size = chkp_force_gimple_call_op (size, &seq);
1977 stmt = gimple_build_call (chkp_bndmk_fndecl, 2, lb, size);
1978 chkp_mark_stmt (stmt);
1980 bounds = chkp_get_tmp_reg (stmt);
1981 gimple_call_set_lhs (stmt, bounds);
1983 gimple_seq_add_stmt (&seq, stmt);
1985 if (iter && after)
1986 gsi_insert_seq_after (&gsi, seq, GSI_SAME_STMT);
1987 else
1988 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
1990 if (dump_file && (dump_flags & TDF_DETAILS))
1992 fprintf (dump_file, "Made bounds: ");
1993 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
1994 if (iter)
1996 fprintf (dump_file, " inserted before statement: ");
1997 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0, TDF_VOPS|TDF_MEMSYMS);
1999 else
2000 fprintf (dump_file, " at function entry\n");
2003 /* update_stmt (stmt); */
2005 return bounds;
2008 /* Return var holding zero bounds. */
2009 tree
2010 chkp_get_zero_bounds_var (void)
2012 if (!chkp_zero_bounds_var)
2013 chkp_zero_bounds_var
2014 = chkp_make_static_const_bounds (0, -1,
2015 CHKP_ZERO_BOUNDS_VAR_NAME);
2016 return chkp_zero_bounds_var;
2019 /* Return var holding none bounds. */
2020 tree
2021 chkp_get_none_bounds_var (void)
2023 if (!chkp_none_bounds_var)
2024 chkp_none_bounds_var
2025 = chkp_make_static_const_bounds (-1, 0,
2026 CHKP_NONE_BOUNDS_VAR_NAME);
2027 return chkp_none_bounds_var;
2030 /* Return SSA_NAME used to represent zero bounds. */
2031 static tree
2032 chkp_get_zero_bounds (void)
2034 if (zero_bounds)
2035 return zero_bounds;
2037 if (dump_file && (dump_flags & TDF_DETAILS))
2038 fprintf (dump_file, "Creating zero bounds...");
2040 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2041 || flag_chkp_use_static_const_bounds > 0)
2043 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2044 gimple stmt;
2046 zero_bounds = chkp_get_tmp_reg (gimple_build_nop ());
2047 stmt = gimple_build_assign (zero_bounds, chkp_get_zero_bounds_var ());
2048 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2050 else
2051 zero_bounds = chkp_make_bounds (integer_zero_node,
2052 integer_zero_node,
2053 NULL,
2054 false);
2056 return zero_bounds;
2059 /* Return SSA_NAME used to represent none bounds. */
2060 static tree
2061 chkp_get_none_bounds (void)
2063 if (none_bounds)
2064 return none_bounds;
2066 if (dump_file && (dump_flags & TDF_DETAILS))
2067 fprintf (dump_file, "Creating none bounds...");
2070 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2071 || flag_chkp_use_static_const_bounds > 0)
2073 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2074 gimple stmt;
2076 none_bounds = chkp_get_tmp_reg (gimple_build_nop ());
2077 stmt = gimple_build_assign (none_bounds, chkp_get_none_bounds_var ());
2078 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2080 else
2081 none_bounds = chkp_make_bounds (integer_minus_one_node,
2082 build_int_cst (size_type_node, 2),
2083 NULL,
2084 false);
2086 return none_bounds;
2089 /* Return bounds to be used as a result of operation which
2090 should not create poiunter (e.g. MULT_EXPR). */
2091 static tree
2092 chkp_get_invalid_op_bounds (void)
2094 return chkp_get_zero_bounds ();
2097 /* Return bounds to be used for loads of non-pointer values. */
2098 static tree
2099 chkp_get_nonpointer_load_bounds (void)
2101 return chkp_get_zero_bounds ();
2104 /* Return 1 if may use bndret call to get bounds for pointer
2105 returned by CALL. */
2106 static bool
2107 chkp_call_returns_bounds_p (gcall *call)
2109 if (gimple_call_internal_p (call))
2110 return false;
2112 if (gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
2113 || chkp_gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW))
2114 return true;
2116 if (gimple_call_with_bounds_p (call))
2117 return true;
2119 tree fndecl = gimple_call_fndecl (call);
2121 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
2122 return false;
2124 if (fndecl && !chkp_instrumentable_p (fndecl))
2125 return false;
2127 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2129 if (chkp_instrument_normal_builtin (fndecl))
2130 return true;
2132 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
2133 return false;
2135 struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
2136 return (clone && gimple_has_body_p (clone->decl));
2139 return true;
2142 /* Build bounds returned by CALL. */
2143 static tree
2144 chkp_build_returned_bound (gcall *call)
2146 gimple_stmt_iterator gsi;
2147 tree bounds;
2148 gimple stmt;
2149 tree fndecl = gimple_call_fndecl (call);
2150 unsigned int retflags;
2152 /* To avoid fixing alloca expands in targets we handle
2153 it separately. */
2154 if (fndecl
2155 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2156 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
2157 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2159 tree size = gimple_call_arg (call, 0);
2160 tree lb = gimple_call_lhs (call);
2161 gimple_stmt_iterator iter = gsi_for_stmt (call);
2162 bounds = chkp_make_bounds (lb, size, &iter, true);
2164 /* We know bounds returned by set_bounds builtin call. */
2165 else if (fndecl
2166 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2167 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS)
2169 tree lb = gimple_call_arg (call, 0);
2170 tree size = gimple_call_arg (call, 1);
2171 gimple_stmt_iterator iter = gsi_for_stmt (call);
2172 bounds = chkp_make_bounds (lb, size, &iter, true);
2174 /* Detect bounds initialization calls. */
2175 else if (fndecl
2176 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2177 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS)
2178 bounds = chkp_get_zero_bounds ();
2179 /* Detect bounds nullification calls. */
2180 else if (fndecl
2181 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2182 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS)
2183 bounds = chkp_get_none_bounds ();
2184 /* Detect bounds copy calls. */
2185 else if (fndecl
2186 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2187 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
2189 gimple_stmt_iterator iter = gsi_for_stmt (call);
2190 bounds = chkp_find_bounds (gimple_call_arg (call, 1), &iter);
2192 /* Do not use retbnd when returned bounds are equal to some
2193 of passed bounds. */
2194 else if (((retflags = gimple_call_return_flags (call)) & ERF_RETURNS_ARG)
2195 && (retflags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (call))
2197 gimple_stmt_iterator iter = gsi_for_stmt (call);
2198 unsigned int retarg = retflags & ERF_RETURN_ARG_MASK, argno;
2199 if (gimple_call_with_bounds_p (call))
2201 for (argno = 0; argno < gimple_call_num_args (call); argno++)
2202 if (!POINTER_BOUNDS_P (gimple_call_arg (call, argno)))
2204 if (retarg)
2205 retarg--;
2206 else
2207 break;
2210 else
2211 argno = retarg;
2213 bounds = chkp_find_bounds (gimple_call_arg (call, argno), &iter);
2215 else if (chkp_call_returns_bounds_p (call))
2217 gcc_assert (TREE_CODE (gimple_call_lhs (call)) == SSA_NAME);
2219 /* In general case build checker builtin call to
2220 obtain returned bounds. */
2221 stmt = gimple_build_call (chkp_ret_bnd_fndecl, 1,
2222 gimple_call_lhs (call));
2223 chkp_mark_stmt (stmt);
2225 gsi = gsi_for_stmt (call);
2226 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
2228 bounds = chkp_get_tmp_reg (stmt);
2229 gimple_call_set_lhs (stmt, bounds);
2231 update_stmt (stmt);
2233 else
2234 bounds = chkp_get_zero_bounds ();
2236 if (dump_file && (dump_flags & TDF_DETAILS))
2238 fprintf (dump_file, "Built returned bounds (");
2239 print_generic_expr (dump_file, bounds, 0);
2240 fprintf (dump_file, ") for call: ");
2241 print_gimple_stmt (dump_file, call, 0, TDF_VOPS|TDF_MEMSYMS);
2244 bounds = chkp_maybe_copy_and_register_bounds (gimple_call_lhs (call), bounds);
2246 return bounds;
2249 /* Return bounds used as returned by call
2250 which produced SSA name VAL. */
2251 gcall *
2252 chkp_retbnd_call_by_val (tree val)
2254 if (TREE_CODE (val) != SSA_NAME)
2255 return NULL;
2257 gcc_assert (gimple_code (SSA_NAME_DEF_STMT (val)) == GIMPLE_CALL);
2259 imm_use_iterator use_iter;
2260 use_operand_p use_p;
2261 FOR_EACH_IMM_USE_FAST (use_p, use_iter, val)
2262 if (gimple_code (USE_STMT (use_p)) == GIMPLE_CALL
2263 && gimple_call_fndecl (USE_STMT (use_p)) == chkp_ret_bnd_fndecl)
2264 return as_a <gcall *> (USE_STMT (use_p));
2266 return NULL;
2269 /* Check the next parameter for the given PARM is bounds
2270 and return it's default SSA_NAME (create if required). */
2271 static tree
2272 chkp_get_next_bounds_parm (tree parm)
2274 tree bounds = TREE_CHAIN (parm);
2275 gcc_assert (POINTER_BOUNDS_P (bounds));
2276 bounds = ssa_default_def (cfun, bounds);
2277 if (!bounds)
2279 bounds = make_ssa_name (TREE_CHAIN (parm), gimple_build_nop ());
2280 set_ssa_default_def (cfun, TREE_CHAIN (parm), bounds);
2282 return bounds;
2285 /* Return bounds to be used for input argument PARM. */
2286 static tree
2287 chkp_get_bound_for_parm (tree parm)
2289 tree decl = SSA_NAME_VAR (parm);
2290 tree bounds;
2292 gcc_assert (TREE_CODE (decl) == PARM_DECL);
2294 bounds = chkp_get_registered_bounds (parm);
2296 if (!bounds)
2297 bounds = chkp_get_registered_bounds (decl);
2299 if (!bounds)
2301 tree orig_decl = cgraph_node::get (cfun->decl)->orig_decl;
2303 /* For static chain param we return zero bounds
2304 because currently we do not check dereferences
2305 of this pointer. */
2306 if (cfun->static_chain_decl == decl)
2307 bounds = chkp_get_zero_bounds ();
2308 /* If non instrumented runtime is used then it may be useful
2309 to use zero bounds for input arguments of main
2310 function. */
2311 else if (flag_chkp_zero_input_bounds_for_main
2312 && strcmp (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (orig_decl)),
2313 "main") == 0)
2314 bounds = chkp_get_zero_bounds ();
2315 else if (BOUNDED_P (parm))
2317 bounds = chkp_get_next_bounds_parm (decl);
2318 bounds = chkp_maybe_copy_and_register_bounds (decl, bounds);
2320 if (dump_file && (dump_flags & TDF_DETAILS))
2322 fprintf (dump_file, "Built arg bounds (");
2323 print_generic_expr (dump_file, bounds, 0);
2324 fprintf (dump_file, ") for arg: ");
2325 print_node (dump_file, "", decl, 0);
2328 else
2329 bounds = chkp_get_zero_bounds ();
2332 if (!chkp_get_registered_bounds (parm))
2333 bounds = chkp_maybe_copy_and_register_bounds (parm, bounds);
2335 if (dump_file && (dump_flags & TDF_DETAILS))
2337 fprintf (dump_file, "Using bounds ");
2338 print_generic_expr (dump_file, bounds, 0);
2339 fprintf (dump_file, " for parm ");
2340 print_generic_expr (dump_file, parm, 0);
2341 fprintf (dump_file, " of type ");
2342 print_generic_expr (dump_file, TREE_TYPE (parm), 0);
2343 fprintf (dump_file, ".\n");
2346 return bounds;
2349 /* Build and return CALL_EXPR for bndstx builtin with specified
2350 arguments. */
2351 tree
2352 chkp_build_bndldx_call (tree addr, tree ptr)
2354 tree fn = build1 (ADDR_EXPR,
2355 build_pointer_type (TREE_TYPE (chkp_bndldx_fndecl)),
2356 chkp_bndldx_fndecl);
2357 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndldx_fndecl)),
2358 fn, 2, addr, ptr);
2359 CALL_WITH_BOUNDS_P (call) = true;
2360 return call;
2363 /* Insert code to load bounds for PTR located by ADDR.
2364 Code is inserted after position pointed by GSI.
2365 Loaded bounds are returned. */
2366 static tree
2367 chkp_build_bndldx (tree addr, tree ptr, gimple_stmt_iterator *gsi)
2369 gimple_seq seq;
2370 gimple stmt;
2371 tree bounds;
2373 seq = NULL;
2375 addr = chkp_force_gimple_call_op (addr, &seq);
2376 ptr = chkp_force_gimple_call_op (ptr, &seq);
2378 stmt = gimple_build_call (chkp_bndldx_fndecl, 2, addr, ptr);
2379 chkp_mark_stmt (stmt);
2380 bounds = chkp_get_tmp_reg (stmt);
2381 gimple_call_set_lhs (stmt, bounds);
2383 gimple_seq_add_stmt (&seq, stmt);
2385 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2387 if (dump_file && (dump_flags & TDF_DETAILS))
2389 fprintf (dump_file, "Generated bndldx for pointer ");
2390 print_generic_expr (dump_file, ptr, 0);
2391 fprintf (dump_file, ": ");
2392 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2395 return bounds;
2398 /* Build and return CALL_EXPR for bndstx builtin with specified
2399 arguments. */
2400 tree
2401 chkp_build_bndstx_call (tree addr, tree ptr, tree bounds)
2403 tree fn = build1 (ADDR_EXPR,
2404 build_pointer_type (TREE_TYPE (chkp_bndstx_fndecl)),
2405 chkp_bndstx_fndecl);
2406 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndstx_fndecl)),
2407 fn, 3, ptr, bounds, addr);
2408 CALL_WITH_BOUNDS_P (call) = true;
2409 return call;
2412 /* Insert code to store BOUNDS for PTR stored by ADDR.
2413 New statements are inserted after position pointed
2414 by GSI. */
2415 void
2416 chkp_build_bndstx (tree addr, tree ptr, tree bounds,
2417 gimple_stmt_iterator *gsi)
2419 gimple_seq seq;
2420 gimple stmt;
2422 seq = NULL;
2424 addr = chkp_force_gimple_call_op (addr, &seq);
2425 ptr = chkp_force_gimple_call_op (ptr, &seq);
2427 stmt = gimple_build_call (chkp_bndstx_fndecl, 3, ptr, bounds, addr);
2428 chkp_mark_stmt (stmt);
2429 gimple_call_set_with_bounds (stmt, true);
2431 gimple_seq_add_stmt (&seq, stmt);
2433 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2435 if (dump_file && (dump_flags & TDF_DETAILS))
2437 fprintf (dump_file, "Generated bndstx for pointer store ");
2438 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_VOPS|TDF_MEMSYMS);
2439 print_gimple_stmt (dump_file, stmt, 2, TDF_VOPS|TDF_MEMSYMS);
2443 /* Compute bounds for pointer NODE which was assigned in
2444 assignment statement ASSIGN. Return computed bounds. */
2445 static tree
2446 chkp_compute_bounds_for_assignment (tree node, gimple assign)
2448 enum tree_code rhs_code = gimple_assign_rhs_code (assign);
2449 tree rhs1 = gimple_assign_rhs1 (assign);
2450 tree bounds = NULL_TREE;
2451 gimple_stmt_iterator iter = gsi_for_stmt (assign);
2453 if (dump_file && (dump_flags & TDF_DETAILS))
2455 fprintf (dump_file, "Computing bounds for assignment: ");
2456 print_gimple_stmt (dump_file, assign, 0, TDF_VOPS|TDF_MEMSYMS);
2459 switch (rhs_code)
2461 case MEM_REF:
2462 case TARGET_MEM_REF:
2463 case COMPONENT_REF:
2464 case ARRAY_REF:
2465 /* We need to load bounds from the bounds table. */
2466 bounds = chkp_find_bounds_loaded (node, rhs1, &iter);
2467 break;
2469 case VAR_DECL:
2470 case SSA_NAME:
2471 case ADDR_EXPR:
2472 case POINTER_PLUS_EXPR:
2473 case NOP_EXPR:
2474 case CONVERT_EXPR:
2475 case INTEGER_CST:
2476 /* Bounds are just propagated from RHS. */
2477 bounds = chkp_find_bounds (rhs1, &iter);
2478 break;
2480 case VIEW_CONVERT_EXPR:
2481 /* Bounds are just propagated from RHS. */
2482 bounds = chkp_find_bounds (TREE_OPERAND (rhs1, 0), &iter);
2483 break;
2485 case PARM_DECL:
2486 if (BOUNDED_P (rhs1))
2488 /* We need to load bounds from the bounds table. */
2489 bounds = chkp_build_bndldx (chkp_build_addr_expr (rhs1),
2490 node, &iter);
2491 TREE_ADDRESSABLE (rhs1) = 1;
2493 else
2494 bounds = chkp_get_nonpointer_load_bounds ();
2495 break;
2497 case MINUS_EXPR:
2498 case PLUS_EXPR:
2499 case BIT_AND_EXPR:
2500 case BIT_IOR_EXPR:
2501 case BIT_XOR_EXPR:
2503 tree rhs2 = gimple_assign_rhs2 (assign);
2504 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2505 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2507 /* First we try to check types of operands. If it
2508 does not help then look at bound values.
2510 If some bounds are incomplete and other are
2511 not proven to be valid (i.e. also incomplete
2512 or invalid because value is not pointer) then
2513 resulting value is incomplete and will be
2514 recomputed later in chkp_finish_incomplete_bounds. */
2515 if (BOUNDED_P (rhs1)
2516 && !BOUNDED_P (rhs2))
2517 bounds = bnd1;
2518 else if (BOUNDED_P (rhs2)
2519 && !BOUNDED_P (rhs1)
2520 && rhs_code != MINUS_EXPR)
2521 bounds = bnd2;
2522 else if (chkp_incomplete_bounds (bnd1))
2523 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR
2524 && !chkp_incomplete_bounds (bnd2))
2525 bounds = bnd2;
2526 else
2527 bounds = incomplete_bounds;
2528 else if (chkp_incomplete_bounds (bnd2))
2529 if (chkp_valid_bounds (bnd1)
2530 && !chkp_incomplete_bounds (bnd1))
2531 bounds = bnd1;
2532 else
2533 bounds = incomplete_bounds;
2534 else if (!chkp_valid_bounds (bnd1))
2535 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR)
2536 bounds = bnd2;
2537 else if (bnd2 == chkp_get_zero_bounds ())
2538 bounds = bnd2;
2539 else
2540 bounds = bnd1;
2541 else if (!chkp_valid_bounds (bnd2))
2542 bounds = bnd1;
2543 else
2544 /* Seems both operands may have valid bounds
2545 (e.g. pointer minus pointer). In such case
2546 use default invalid op bounds. */
2547 bounds = chkp_get_invalid_op_bounds ();
2549 break;
2551 case BIT_NOT_EXPR:
2552 case NEGATE_EXPR:
2553 case LSHIFT_EXPR:
2554 case RSHIFT_EXPR:
2555 case LROTATE_EXPR:
2556 case RROTATE_EXPR:
2557 case EQ_EXPR:
2558 case NE_EXPR:
2559 case LT_EXPR:
2560 case LE_EXPR:
2561 case GT_EXPR:
2562 case GE_EXPR:
2563 case MULT_EXPR:
2564 case RDIV_EXPR:
2565 case TRUNC_DIV_EXPR:
2566 case FLOOR_DIV_EXPR:
2567 case CEIL_DIV_EXPR:
2568 case ROUND_DIV_EXPR:
2569 case TRUNC_MOD_EXPR:
2570 case FLOOR_MOD_EXPR:
2571 case CEIL_MOD_EXPR:
2572 case ROUND_MOD_EXPR:
2573 case EXACT_DIV_EXPR:
2574 case FIX_TRUNC_EXPR:
2575 case FLOAT_EXPR:
2576 case REALPART_EXPR:
2577 case IMAGPART_EXPR:
2578 /* No valid bounds may be produced by these exprs. */
2579 bounds = chkp_get_invalid_op_bounds ();
2580 break;
2582 case COND_EXPR:
2584 tree val1 = gimple_assign_rhs2 (assign);
2585 tree val2 = gimple_assign_rhs3 (assign);
2586 tree bnd1 = chkp_find_bounds (val1, &iter);
2587 tree bnd2 = chkp_find_bounds (val2, &iter);
2588 gimple stmt;
2590 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2591 bounds = incomplete_bounds;
2592 else if (bnd1 == bnd2)
2593 bounds = bnd1;
2594 else
2596 rhs1 = unshare_expr (rhs1);
2598 bounds = chkp_get_tmp_reg (assign);
2599 stmt = gimple_build_assign (bounds, COND_EXPR, rhs1, bnd1, bnd2);
2600 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2602 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2603 chkp_mark_invalid_bounds (bounds);
2606 break;
2608 case MAX_EXPR:
2609 case MIN_EXPR:
2611 tree rhs2 = gimple_assign_rhs2 (assign);
2612 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2613 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2615 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2616 bounds = incomplete_bounds;
2617 else if (bnd1 == bnd2)
2618 bounds = bnd1;
2619 else
2621 gimple stmt;
2622 tree cond = build2 (rhs_code == MAX_EXPR ? GT_EXPR : LT_EXPR,
2623 boolean_type_node, rhs1, rhs2);
2624 bounds = chkp_get_tmp_reg (assign);
2625 stmt = gimple_build_assign (bounds, COND_EXPR, cond, bnd1, bnd2);
2627 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2629 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2630 chkp_mark_invalid_bounds (bounds);
2633 break;
2635 default:
2636 bounds = chkp_get_zero_bounds ();
2637 warning (0, "pointer bounds were lost due to unexpected expression %s",
2638 get_tree_code_name (rhs_code));
2641 gcc_assert (bounds);
2643 if (node)
2644 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2646 return bounds;
2649 /* Compute bounds for ssa name NODE defined by DEF_STMT pointed by ITER.
2651 There are just few statement codes allowed: NOP (for default ssa names),
2652 ASSIGN, CALL, PHI, ASM.
2654 Return computed bounds. */
2655 static tree
2656 chkp_get_bounds_by_definition (tree node, gimple def_stmt,
2657 gphi_iterator *iter)
2659 tree var, bounds;
2660 enum gimple_code code = gimple_code (def_stmt);
2661 gphi *stmt;
2663 if (dump_file && (dump_flags & TDF_DETAILS))
2665 fprintf (dump_file, "Searching for bounds for node: ");
2666 print_generic_expr (dump_file, node, 0);
2668 fprintf (dump_file, " using its definition: ");
2669 print_gimple_stmt (dump_file, def_stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2672 switch (code)
2674 case GIMPLE_NOP:
2675 var = SSA_NAME_VAR (node);
2676 switch (TREE_CODE (var))
2678 case PARM_DECL:
2679 bounds = chkp_get_bound_for_parm (node);
2680 break;
2682 case VAR_DECL:
2683 /* For uninitialized pointers use none bounds. */
2684 bounds = chkp_get_none_bounds ();
2685 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2686 break;
2688 case RESULT_DECL:
2690 tree base_type;
2692 gcc_assert (TREE_CODE (TREE_TYPE (node)) == REFERENCE_TYPE);
2694 base_type = TREE_TYPE (TREE_TYPE (node));
2696 gcc_assert (TYPE_SIZE (base_type)
2697 && TREE_CODE (TYPE_SIZE (base_type)) == INTEGER_CST
2698 && tree_to_uhwi (TYPE_SIZE (base_type)) != 0);
2700 bounds = chkp_make_bounds (node, TYPE_SIZE_UNIT (base_type),
2701 NULL, false);
2702 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2704 break;
2706 default:
2707 if (dump_file && (dump_flags & TDF_DETAILS))
2709 fprintf (dump_file, "Unexpected var with no definition\n");
2710 print_generic_expr (dump_file, var, 0);
2712 internal_error ("chkp_get_bounds_by_definition: Unexpected var of type %s",
2713 get_tree_code_name (TREE_CODE (var)));
2715 break;
2717 case GIMPLE_ASSIGN:
2718 bounds = chkp_compute_bounds_for_assignment (node, def_stmt);
2719 break;
2721 case GIMPLE_CALL:
2722 bounds = chkp_build_returned_bound (as_a <gcall *> (def_stmt));
2723 break;
2725 case GIMPLE_PHI:
2726 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (node))
2727 if (SSA_NAME_VAR (node))
2728 var = chkp_get_bounds_var (SSA_NAME_VAR (node));
2729 else
2730 var = make_temp_ssa_name (pointer_bounds_type_node,
2731 gimple_build_nop (),
2732 CHKP_BOUND_TMP_NAME);
2733 else
2734 var = chkp_get_tmp_var ();
2735 stmt = create_phi_node (var, gimple_bb (def_stmt));
2736 bounds = gimple_phi_result (stmt);
2737 *iter = gsi_for_phi (stmt);
2739 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2741 /* Created bounds do not have all phi args computed and
2742 therefore we do not know if there is a valid source
2743 of bounds for that node. Therefore we mark bounds
2744 as incomplete and then recompute them when all phi
2745 args are computed. */
2746 chkp_register_incomplete_bounds (bounds, node);
2747 break;
2749 case GIMPLE_ASM:
2750 bounds = chkp_get_zero_bounds ();
2751 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2752 break;
2754 default:
2755 internal_error ("chkp_get_bounds_by_definition: Unexpected GIMPLE code %s",
2756 gimple_code_name[code]);
2759 return bounds;
2762 /* Return CALL_EXPR for bndmk with specified LOWER_BOUND and SIZE. */
2763 tree
2764 chkp_build_make_bounds_call (tree lower_bound, tree size)
2766 tree call = build1 (ADDR_EXPR,
2767 build_pointer_type (TREE_TYPE (chkp_bndmk_fndecl)),
2768 chkp_bndmk_fndecl);
2769 return build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndmk_fndecl)),
2770 call, 2, lower_bound, size);
2773 /* Create static bounds var of specfified OBJ which is
2774 is either VAR_DECL or string constant. */
2775 static tree
2776 chkp_make_static_bounds (tree obj)
2778 static int string_id = 1;
2779 static int var_id = 1;
2780 tree *slot;
2781 const char *var_name;
2782 char *bnd_var_name;
2783 tree bnd_var;
2785 /* First check if we already have required var. */
2786 if (chkp_static_var_bounds)
2788 /* For vars we use assembler name as a key in
2789 chkp_static_var_bounds map. It allows to
2790 avoid duplicating bound vars for decls
2791 sharing assembler name. */
2792 if (TREE_CODE (obj) == VAR_DECL)
2794 tree name = DECL_ASSEMBLER_NAME (obj);
2795 slot = chkp_static_var_bounds->get (name);
2796 if (slot)
2797 return *slot;
2799 else
2801 slot = chkp_static_var_bounds->get (obj);
2802 if (slot)
2803 return *slot;
2807 /* Build decl for bounds var. */
2808 if (TREE_CODE (obj) == VAR_DECL)
2810 if (DECL_IGNORED_P (obj))
2812 bnd_var_name = (char *) xmalloc (strlen (CHKP_VAR_BOUNDS_PREFIX) + 10);
2813 sprintf (bnd_var_name, "%s%d", CHKP_VAR_BOUNDS_PREFIX, var_id++);
2815 else
2817 var_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (obj));
2819 /* For hidden symbols we want to skip first '*' char. */
2820 if (*var_name == '*')
2821 var_name++;
2823 bnd_var_name = (char *) xmalloc (strlen (var_name)
2824 + strlen (CHKP_BOUNDS_OF_SYMBOL_PREFIX) + 1);
2825 strcpy (bnd_var_name, CHKP_BOUNDS_OF_SYMBOL_PREFIX);
2826 strcat (bnd_var_name, var_name);
2829 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2830 get_identifier (bnd_var_name),
2831 pointer_bounds_type_node);
2833 /* Address of the obj will be used as lower bound. */
2834 TREE_ADDRESSABLE (obj) = 1;
2836 else
2838 bnd_var_name = (char *) xmalloc (strlen (CHKP_STRING_BOUNDS_PREFIX) + 10);
2839 sprintf (bnd_var_name, "%s%d", CHKP_STRING_BOUNDS_PREFIX, string_id++);
2841 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2842 get_identifier (bnd_var_name),
2843 pointer_bounds_type_node);
2846 TREE_PUBLIC (bnd_var) = 0;
2847 TREE_USED (bnd_var) = 1;
2848 TREE_READONLY (bnd_var) = 0;
2849 TREE_STATIC (bnd_var) = 1;
2850 TREE_ADDRESSABLE (bnd_var) = 0;
2851 DECL_ARTIFICIAL (bnd_var) = 1;
2852 DECL_COMMON (bnd_var) = 1;
2853 DECL_COMDAT (bnd_var) = 1;
2854 DECL_READ_P (bnd_var) = 1;
2855 DECL_INITIAL (bnd_var) = chkp_build_addr_expr (obj);
2856 /* Force output similar to constant bounds.
2857 See chkp_make_static_const_bounds. */
2858 varpool_node::get_create (bnd_var)->force_output = 1;
2859 /* Mark symbol as requiring bounds initialization. */
2860 varpool_node::get_create (bnd_var)->need_bounds_init = 1;
2861 varpool_node::finalize_decl (bnd_var);
2863 /* Add created var to the map to use it for other references
2864 to obj. */
2865 if (!chkp_static_var_bounds)
2866 chkp_static_var_bounds = new hash_map<tree, tree>;
2868 if (TREE_CODE (obj) == VAR_DECL)
2870 tree name = DECL_ASSEMBLER_NAME (obj);
2871 chkp_static_var_bounds->put (name, bnd_var);
2873 else
2874 chkp_static_var_bounds->put (obj, bnd_var);
2876 return bnd_var;
2879 /* When var has incomplete type we cannot get size to
2880 compute its bounds. In such cases we use checker
2881 builtin call which determines object size at runtime. */
2882 static tree
2883 chkp_generate_extern_var_bounds (tree var)
2885 tree bounds, size_reloc, lb, size, max_size, cond;
2886 gimple_stmt_iterator gsi;
2887 gimple_seq seq = NULL;
2888 gimple stmt;
2890 /* If instrumentation is not enabled for vars having
2891 incomplete type then just return zero bounds to avoid
2892 checks for this var. */
2893 if (!flag_chkp_incomplete_type)
2894 return chkp_get_zero_bounds ();
2896 if (dump_file && (dump_flags & TDF_DETAILS))
2898 fprintf (dump_file, "Generating bounds for extern symbol '");
2899 print_generic_expr (dump_file, var, 0);
2900 fprintf (dump_file, "'\n");
2903 stmt = gimple_build_call (chkp_sizeof_fndecl, 1, var);
2905 size_reloc = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
2906 gimple_call_set_lhs (stmt, size_reloc);
2908 gimple_seq_add_stmt (&seq, stmt);
2910 lb = chkp_build_addr_expr (var);
2911 size = make_ssa_name (chkp_get_size_tmp_var (), gimple_build_nop ());
2913 if (flag_chkp_zero_dynamic_size_as_infinite)
2915 /* We should check that size relocation was resolved.
2916 If it was not then use maximum possible size for the var. */
2917 max_size = build2 (MINUS_EXPR, chkp_uintptr_type, integer_zero_node,
2918 fold_convert (chkp_uintptr_type, lb));
2919 max_size = chkp_force_gimple_call_op (max_size, &seq);
2921 cond = build2 (NE_EXPR, boolean_type_node,
2922 size_reloc, integer_zero_node);
2923 stmt = gimple_build_assign (size, COND_EXPR, cond, size_reloc, max_size);
2924 gimple_seq_add_stmt (&seq, stmt);
2926 else
2928 stmt = gimple_build_assign (size, size_reloc);
2929 gimple_seq_add_stmt (&seq, stmt);
2932 gsi = gsi_start_bb (chkp_get_entry_block ());
2933 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
2935 bounds = chkp_make_bounds (lb, size, &gsi, true);
2937 return bounds;
2940 /* Return 1 if TYPE has fields with zero size or fields
2941 marked with chkp_variable_size attribute. */
2942 bool
2943 chkp_variable_size_type (tree type)
2945 bool res = false;
2946 tree field;
2948 if (RECORD_OR_UNION_TYPE_P (type))
2949 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2951 if (TREE_CODE (field) == FIELD_DECL)
2952 res = res
2953 || lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
2954 || chkp_variable_size_type (TREE_TYPE (field));
2956 else
2957 res = !TYPE_SIZE (type)
2958 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
2959 || tree_to_uhwi (TYPE_SIZE (type)) == 0;
2961 return res;
2964 /* Compute and return bounds for address of DECL which is
2965 one of VAR_DECL, PARM_DECL, RESULT_DECL. */
2966 static tree
2967 chkp_get_bounds_for_decl_addr (tree decl)
2969 tree bounds;
2971 gcc_assert (TREE_CODE (decl) == VAR_DECL
2972 || TREE_CODE (decl) == PARM_DECL
2973 || TREE_CODE (decl) == RESULT_DECL);
2975 bounds = chkp_get_registered_addr_bounds (decl);
2977 if (bounds)
2978 return bounds;
2980 if (dump_file && (dump_flags & TDF_DETAILS))
2982 fprintf (dump_file, "Building bounds for address of decl ");
2983 print_generic_expr (dump_file, decl, 0);
2984 fprintf (dump_file, "\n");
2987 /* Use zero bounds if size is unknown and checks for
2988 unknown sizes are restricted. */
2989 if ((!DECL_SIZE (decl)
2990 || (chkp_variable_size_type (TREE_TYPE (decl))
2991 && (TREE_STATIC (decl)
2992 || DECL_EXTERNAL (decl)
2993 || TREE_PUBLIC (decl))))
2994 && !flag_chkp_incomplete_type)
2995 return chkp_get_zero_bounds ();
2997 if (flag_chkp_use_static_bounds
2998 && TREE_CODE (decl) == VAR_DECL
2999 && (TREE_STATIC (decl)
3000 || DECL_EXTERNAL (decl)
3001 || TREE_PUBLIC (decl))
3002 && !DECL_THREAD_LOCAL_P (decl))
3004 tree bnd_var = chkp_make_static_bounds (decl);
3005 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
3006 gimple stmt;
3008 bounds = chkp_get_tmp_reg (gimple_build_nop ());
3009 stmt = gimple_build_assign (bounds, bnd_var);
3010 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3012 else if (!DECL_SIZE (decl)
3013 || (chkp_variable_size_type (TREE_TYPE (decl))
3014 && (TREE_STATIC (decl)
3015 || DECL_EXTERNAL (decl)
3016 || TREE_PUBLIC (decl))))
3018 gcc_assert (TREE_CODE (decl) == VAR_DECL);
3019 bounds = chkp_generate_extern_var_bounds (decl);
3021 else
3023 tree lb = chkp_build_addr_expr (decl);
3024 bounds = chkp_make_bounds (lb, DECL_SIZE_UNIT (decl), NULL, false);
3027 return bounds;
3030 /* Compute and return bounds for constant string. */
3031 static tree
3032 chkp_get_bounds_for_string_cst (tree cst)
3034 tree bounds;
3035 tree lb;
3036 tree size;
3038 gcc_assert (TREE_CODE (cst) == STRING_CST);
3040 bounds = chkp_get_registered_bounds (cst);
3042 if (bounds)
3043 return bounds;
3045 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
3046 || flag_chkp_use_static_const_bounds > 0)
3048 tree bnd_var = chkp_make_static_bounds (cst);
3049 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
3050 gimple stmt;
3052 bounds = chkp_get_tmp_reg (gimple_build_nop ());
3053 stmt = gimple_build_assign (bounds, bnd_var);
3054 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3056 else
3058 lb = chkp_build_addr_expr (cst);
3059 size = build_int_cst (chkp_uintptr_type, TREE_STRING_LENGTH (cst));
3060 bounds = chkp_make_bounds (lb, size, NULL, false);
3063 bounds = chkp_maybe_copy_and_register_bounds (cst, bounds);
3065 return bounds;
3068 /* Generate code to instersect bounds BOUNDS1 and BOUNDS2 and
3069 return the result. if ITER is not NULL then Code is inserted
3070 before position pointed by ITER. Otherwise code is added to
3071 entry block. */
3072 static tree
3073 chkp_intersect_bounds (tree bounds1, tree bounds2, gimple_stmt_iterator *iter)
3075 if (!bounds1 || bounds1 == chkp_get_zero_bounds ())
3076 return bounds2 ? bounds2 : bounds1;
3077 else if (!bounds2 || bounds2 == chkp_get_zero_bounds ())
3078 return bounds1;
3079 else
3081 gimple_seq seq;
3082 gimple stmt;
3083 tree bounds;
3085 seq = NULL;
3087 stmt = gimple_build_call (chkp_intersect_fndecl, 2, bounds1, bounds2);
3088 chkp_mark_stmt (stmt);
3090 bounds = chkp_get_tmp_reg (stmt);
3091 gimple_call_set_lhs (stmt, bounds);
3093 gimple_seq_add_stmt (&seq, stmt);
3095 /* We are probably doing narrowing for constant expression.
3096 In such case iter may be undefined. */
3097 if (!iter)
3099 gimple_stmt_iterator gsi = gsi_last_bb (chkp_get_entry_block ());
3100 iter = &gsi;
3101 gsi_insert_seq_after (iter, seq, GSI_SAME_STMT);
3103 else
3104 gsi_insert_seq_before (iter, seq, GSI_SAME_STMT);
3106 if (dump_file && (dump_flags & TDF_DETAILS))
3108 fprintf (dump_file, "Bounds intersection: ");
3109 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
3110 fprintf (dump_file, " inserted before statement: ");
3111 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0,
3112 TDF_VOPS|TDF_MEMSYMS);
3115 return bounds;
3119 /* Return 1 if we are allowed to narrow bounds for addressed FIELD
3120 and 0 othersize. */
3121 static bool
3122 chkp_may_narrow_to_field (tree field)
3124 return DECL_SIZE (field) && TREE_CODE (DECL_SIZE (field)) == INTEGER_CST
3125 && tree_to_uhwi (DECL_SIZE (field)) != 0
3126 && (!DECL_FIELD_OFFSET (field)
3127 || TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST)
3128 && (!DECL_FIELD_BIT_OFFSET (field)
3129 || TREE_CODE (DECL_FIELD_BIT_OFFSET (field)) == INTEGER_CST)
3130 && !lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
3131 && !chkp_variable_size_type (TREE_TYPE (field));
3134 /* Return 1 if bounds for FIELD should be narrowed to
3135 field's own size. */
3136 static bool
3137 chkp_narrow_bounds_for_field (tree field)
3139 HOST_WIDE_INT offs;
3140 HOST_WIDE_INT bit_offs;
3142 if (!chkp_may_narrow_to_field (field))
3143 return false;
3145 /* Accesse to compiler generated fields should not cause
3146 bounds narrowing. */
3147 if (DECL_ARTIFICIAL (field))
3148 return false;
3150 offs = tree_to_uhwi (DECL_FIELD_OFFSET (field));
3151 bit_offs = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
3153 return (flag_chkp_narrow_bounds
3154 && (flag_chkp_first_field_has_own_bounds
3155 || offs
3156 || bit_offs));
3159 /* Perform narrowing for BOUNDS using bounds computed for field
3160 access COMPONENT. ITER meaning is the same as for
3161 chkp_intersect_bounds. */
3162 static tree
3163 chkp_narrow_bounds_to_field (tree bounds, tree component,
3164 gimple_stmt_iterator *iter)
3166 tree field = TREE_OPERAND (component, 1);
3167 tree size = DECL_SIZE_UNIT (field);
3168 tree field_ptr = chkp_build_addr_expr (component);
3169 tree field_bounds;
3171 field_bounds = chkp_make_bounds (field_ptr, size, iter, false);
3173 return chkp_intersect_bounds (field_bounds, bounds, iter);
3176 /* Parse field or array access NODE.
3178 PTR ouput parameter holds a pointer to the outermost
3179 object.
3181 BITFIELD output parameter is set to 1 if bitfield is
3182 accessed and to 0 otherwise. If it is 1 then ELT holds
3183 outer component for accessed bit field.
3185 SAFE outer parameter is set to 1 if access is safe and
3186 checks are not required.
3188 BOUNDS outer parameter holds bounds to be used to check
3189 access (may be NULL).
3191 If INNERMOST_BOUNDS is 1 then try to narrow bounds to the
3192 innermost accessed component. */
3193 static void
3194 chkp_parse_array_and_component_ref (tree node, tree *ptr,
3195 tree *elt, bool *safe,
3196 bool *bitfield,
3197 tree *bounds,
3198 gimple_stmt_iterator *iter,
3199 bool innermost_bounds)
3201 tree comp_to_narrow = NULL_TREE;
3202 tree last_comp = NULL_TREE;
3203 bool array_ref_found = false;
3204 tree *nodes;
3205 tree var;
3206 int len;
3207 int i;
3209 /* Compute tree height for expression. */
3210 var = node;
3211 len = 1;
3212 while (TREE_CODE (var) == COMPONENT_REF
3213 || TREE_CODE (var) == ARRAY_REF
3214 || TREE_CODE (var) == VIEW_CONVERT_EXPR)
3216 var = TREE_OPERAND (var, 0);
3217 len++;
3220 gcc_assert (len > 1);
3222 /* It is more convenient for us to scan left-to-right,
3223 so walk tree again and put all node to nodes vector
3224 in reversed order. */
3225 nodes = XALLOCAVEC (tree, len);
3226 nodes[len - 1] = node;
3227 for (i = len - 2; i >= 0; i--)
3228 nodes[i] = TREE_OPERAND (nodes[i + 1], 0);
3230 if (bounds)
3231 *bounds = NULL;
3232 *safe = true;
3233 *bitfield = (TREE_CODE (node) == COMPONENT_REF
3234 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (node, 1)));
3235 /* To get bitfield address we will need outer elemnt. */
3236 if (*bitfield)
3237 *elt = nodes[len - 2];
3238 else
3239 *elt = NULL_TREE;
3241 /* If we have indirection in expression then compute
3242 outermost structure bounds. Computed bounds may be
3243 narrowed later. */
3244 if (TREE_CODE (nodes[0]) == MEM_REF || INDIRECT_REF_P (nodes[0]))
3246 *safe = false;
3247 *ptr = TREE_OPERAND (nodes[0], 0);
3248 if (bounds)
3249 *bounds = chkp_find_bounds (*ptr, iter);
3251 else
3253 gcc_assert (TREE_CODE (var) == VAR_DECL
3254 || TREE_CODE (var) == PARM_DECL
3255 || TREE_CODE (var) == RESULT_DECL
3256 || TREE_CODE (var) == STRING_CST
3257 || TREE_CODE (var) == SSA_NAME);
3259 *ptr = chkp_build_addr_expr (var);
3262 /* In this loop we are trying to find a field access
3263 requiring narrowing. There are two simple rules
3264 for search:
3265 1. Leftmost array_ref is chosen if any.
3266 2. Rightmost suitable component_ref is chosen if innermost
3267 bounds are required and no array_ref exists. */
3268 for (i = 1; i < len; i++)
3270 var = nodes[i];
3272 if (TREE_CODE (var) == ARRAY_REF)
3274 *safe = false;
3275 array_ref_found = true;
3276 if (flag_chkp_narrow_bounds
3277 && !flag_chkp_narrow_to_innermost_arrray
3278 && (!last_comp
3279 || chkp_may_narrow_to_field (TREE_OPERAND (last_comp, 1))))
3281 comp_to_narrow = last_comp;
3282 break;
3285 else if (TREE_CODE (var) == COMPONENT_REF)
3287 tree field = TREE_OPERAND (var, 1);
3289 if (innermost_bounds
3290 && !array_ref_found
3291 && chkp_narrow_bounds_for_field (field))
3292 comp_to_narrow = var;
3293 last_comp = var;
3295 if (flag_chkp_narrow_bounds
3296 && flag_chkp_narrow_to_innermost_arrray
3297 && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
3299 if (bounds)
3300 *bounds = chkp_narrow_bounds_to_field (*bounds, var, iter);
3301 comp_to_narrow = NULL;
3304 else if (TREE_CODE (var) == VIEW_CONVERT_EXPR)
3305 /* Nothing to do for it. */
3307 else
3308 gcc_unreachable ();
3311 if (comp_to_narrow && DECL_SIZE (TREE_OPERAND (comp_to_narrow, 1)) && bounds)
3312 *bounds = chkp_narrow_bounds_to_field (*bounds, comp_to_narrow, iter);
3314 if (innermost_bounds && bounds && !*bounds)
3315 *bounds = chkp_find_bounds (*ptr, iter);
3318 /* Compute and return bounds for address of OBJ. */
3319 static tree
3320 chkp_make_addressed_object_bounds (tree obj, gimple_stmt_iterator *iter)
3322 tree bounds = chkp_get_registered_addr_bounds (obj);
3324 if (bounds)
3325 return bounds;
3327 switch (TREE_CODE (obj))
3329 case VAR_DECL:
3330 case PARM_DECL:
3331 case RESULT_DECL:
3332 bounds = chkp_get_bounds_for_decl_addr (obj);
3333 break;
3335 case STRING_CST:
3336 bounds = chkp_get_bounds_for_string_cst (obj);
3337 break;
3339 case ARRAY_REF:
3340 case COMPONENT_REF:
3342 tree elt;
3343 tree ptr;
3344 bool safe;
3345 bool bitfield;
3347 chkp_parse_array_and_component_ref (obj, &ptr, &elt, &safe,
3348 &bitfield, &bounds, iter, true);
3350 gcc_assert (bounds);
3352 break;
3354 case FUNCTION_DECL:
3355 case LABEL_DECL:
3356 bounds = chkp_get_zero_bounds ();
3357 break;
3359 case MEM_REF:
3360 bounds = chkp_find_bounds (TREE_OPERAND (obj, 0), iter);
3361 break;
3363 case REALPART_EXPR:
3364 case IMAGPART_EXPR:
3365 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (obj, 0), iter);
3366 break;
3368 default:
3369 if (dump_file && (dump_flags & TDF_DETAILS))
3371 fprintf (dump_file, "chkp_make_addressed_object_bounds: "
3372 "unexpected object of type %s\n",
3373 get_tree_code_name (TREE_CODE (obj)));
3374 print_node (dump_file, "", obj, 0);
3376 internal_error ("chkp_make_addressed_object_bounds: "
3377 "Unexpected tree code %s",
3378 get_tree_code_name (TREE_CODE (obj)));
3381 chkp_register_addr_bounds (obj, bounds);
3383 return bounds;
3386 /* Compute bounds for pointer PTR loaded from PTR_SRC. Generate statements
3387 to compute bounds if required. Computed bounds should be available at
3388 position pointed by ITER.
3390 If PTR_SRC is NULL_TREE then pointer definition is identified.
3392 If PTR_SRC is not NULL_TREE then ITER points to statements which loads
3393 PTR. If PTR is a any memory reference then ITER points to a statement
3394 after which bndldx will be inserterd. In both cases ITER will be updated
3395 to point to the inserted bndldx statement. */
3397 static tree
3398 chkp_find_bounds_1 (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3400 tree addr = NULL_TREE;
3401 tree bounds = NULL_TREE;
3403 if (!ptr_src)
3404 ptr_src = ptr;
3406 bounds = chkp_get_registered_bounds (ptr_src);
3408 if (bounds)
3409 return bounds;
3411 switch (TREE_CODE (ptr_src))
3413 case MEM_REF:
3414 case VAR_DECL:
3415 if (BOUNDED_P (ptr_src))
3416 if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
3417 bounds = chkp_get_zero_bounds ();
3418 else
3420 addr = chkp_build_addr_expr (ptr_src);
3421 bounds = chkp_build_bndldx (addr, ptr, iter);
3423 else
3424 bounds = chkp_get_nonpointer_load_bounds ();
3425 break;
3427 case ARRAY_REF:
3428 case COMPONENT_REF:
3429 addr = get_base_address (ptr_src);
3430 if (DECL_P (addr)
3431 || TREE_CODE (addr) == MEM_REF
3432 || TREE_CODE (addr) == TARGET_MEM_REF)
3434 if (BOUNDED_P (ptr_src))
3435 if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
3436 bounds = chkp_get_zero_bounds ();
3437 else
3439 addr = chkp_build_addr_expr (ptr_src);
3440 bounds = chkp_build_bndldx (addr, ptr, iter);
3442 else
3443 bounds = chkp_get_nonpointer_load_bounds ();
3445 else
3447 gcc_assert (TREE_CODE (addr) == SSA_NAME);
3448 bounds = chkp_find_bounds (addr, iter);
3450 break;
3452 case PARM_DECL:
3453 gcc_unreachable ();
3454 bounds = chkp_get_bound_for_parm (ptr_src);
3455 break;
3457 case TARGET_MEM_REF:
3458 addr = chkp_build_addr_expr (ptr_src);
3459 bounds = chkp_build_bndldx (addr, ptr, iter);
3460 break;
3462 case SSA_NAME:
3463 bounds = chkp_get_registered_bounds (ptr_src);
3464 if (!bounds)
3466 gimple def_stmt = SSA_NAME_DEF_STMT (ptr_src);
3467 gphi_iterator phi_iter;
3469 bounds = chkp_get_bounds_by_definition (ptr_src, def_stmt, &phi_iter);
3471 gcc_assert (bounds);
3473 if (gphi *def_phi = dyn_cast <gphi *> (def_stmt))
3475 unsigned i;
3477 for (i = 0; i < gimple_phi_num_args (def_phi); i++)
3479 tree arg = gimple_phi_arg_def (def_phi, i);
3480 tree arg_bnd;
3481 gphi *phi_bnd;
3483 arg_bnd = chkp_find_bounds (arg, NULL);
3485 /* chkp_get_bounds_by_definition created new phi
3486 statement and phi_iter points to it.
3488 Previous call to chkp_find_bounds could create
3489 new basic block and therefore change phi statement
3490 phi_iter points to. */
3491 phi_bnd = phi_iter.phi ();
3493 add_phi_arg (phi_bnd, arg_bnd,
3494 gimple_phi_arg_edge (def_phi, i),
3495 UNKNOWN_LOCATION);
3498 /* If all bound phi nodes have their arg computed
3499 then we may finish its computation. See
3500 chkp_finish_incomplete_bounds for more details. */
3501 if (chkp_may_finish_incomplete_bounds ())
3502 chkp_finish_incomplete_bounds ();
3505 gcc_assert (bounds == chkp_get_registered_bounds (ptr_src)
3506 || chkp_incomplete_bounds (bounds));
3508 break;
3510 case ADDR_EXPR:
3511 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (ptr_src, 0), iter);
3512 break;
3514 case INTEGER_CST:
3515 if (integer_zerop (ptr_src))
3516 bounds = chkp_get_none_bounds ();
3517 else
3518 bounds = chkp_get_invalid_op_bounds ();
3519 break;
3521 default:
3522 if (dump_file && (dump_flags & TDF_DETAILS))
3524 fprintf (dump_file, "chkp_find_bounds: unexpected ptr of type %s\n",
3525 get_tree_code_name (TREE_CODE (ptr_src)));
3526 print_node (dump_file, "", ptr_src, 0);
3528 internal_error ("chkp_find_bounds: Unexpected tree code %s",
3529 get_tree_code_name (TREE_CODE (ptr_src)));
3532 if (!bounds)
3534 if (dump_file && (dump_flags & TDF_DETAILS))
3536 fprintf (stderr, "chkp_find_bounds: cannot find bounds for pointer\n");
3537 print_node (dump_file, "", ptr_src, 0);
3539 internal_error ("chkp_find_bounds: Cannot find bounds for pointer");
3542 return bounds;
3545 /* Normal case for bounds search without forced narrowing. */
3546 static tree
3547 chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter)
3549 return chkp_find_bounds_1 (ptr, NULL_TREE, iter);
3552 /* Search bounds for pointer PTR loaded from PTR_SRC
3553 by statement *ITER points to. */
3554 static tree
3555 chkp_find_bounds_loaded (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3557 return chkp_find_bounds_1 (ptr, ptr_src, iter);
3560 /* Helper function which checks type of RHS and finds all pointers in
3561 it. For each found pointer we build it's accesses in LHS and RHS
3562 objects and then call HANDLER for them. Function is used to copy
3563 or initilize bounds for copied object. */
3564 static void
3565 chkp_walk_pointer_assignments (tree lhs, tree rhs, void *arg,
3566 assign_handler handler)
3568 tree type = TREE_TYPE (lhs);
3570 /* We have nothing to do with clobbers. */
3571 if (TREE_CLOBBER_P (rhs))
3572 return;
3574 if (BOUNDED_TYPE_P (type))
3575 handler (lhs, rhs, arg);
3576 else if (RECORD_OR_UNION_TYPE_P (type))
3578 tree field;
3580 if (TREE_CODE (rhs) == CONSTRUCTOR)
3582 unsigned HOST_WIDE_INT cnt;
3583 tree val;
3585 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, field, val)
3587 if (chkp_type_has_pointer (TREE_TYPE (field)))
3589 tree lhs_field = chkp_build_component_ref (lhs, field);
3590 chkp_walk_pointer_assignments (lhs_field, val, arg, handler);
3594 else
3595 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3596 if (TREE_CODE (field) == FIELD_DECL
3597 && chkp_type_has_pointer (TREE_TYPE (field)))
3599 tree rhs_field = chkp_build_component_ref (rhs, field);
3600 tree lhs_field = chkp_build_component_ref (lhs, field);
3601 chkp_walk_pointer_assignments (lhs_field, rhs_field, arg, handler);
3604 else if (TREE_CODE (type) == ARRAY_TYPE)
3606 unsigned HOST_WIDE_INT cur = 0;
3607 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3608 tree etype = TREE_TYPE (type);
3609 tree esize = TYPE_SIZE (etype);
3611 if (TREE_CODE (rhs) == CONSTRUCTOR)
3613 unsigned HOST_WIDE_INT cnt;
3614 tree purp, val, lhs_elem;
3616 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, purp, val)
3618 if (purp && TREE_CODE (purp) == RANGE_EXPR)
3620 tree lo_index = TREE_OPERAND (purp, 0);
3621 tree hi_index = TREE_OPERAND (purp, 1);
3623 for (cur = (unsigned)tree_to_uhwi (lo_index);
3624 cur <= (unsigned)tree_to_uhwi (hi_index);
3625 cur++)
3627 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3628 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3631 else
3633 if (purp)
3635 gcc_assert (TREE_CODE (purp) == INTEGER_CST);
3636 cur = tree_to_uhwi (purp);
3639 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur++);
3641 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3645 /* Copy array only when size is known. */
3646 else if (maxval && !integer_minus_onep (maxval))
3647 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
3649 tree lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3650 tree rhs_elem = chkp_build_array_ref (rhs, etype, esize, cur);
3651 chkp_walk_pointer_assignments (lhs_elem, rhs_elem, arg, handler);
3654 else
3655 internal_error("chkp_walk_pointer_assignments: unexpected RHS type: %s",
3656 get_tree_code_name (TREE_CODE (type)));
3659 /* Add code to copy bounds for assignment of RHS to LHS.
3660 ARG is an iterator pointing ne code position. */
3661 static void
3662 chkp_copy_bounds_for_elem (tree lhs, tree rhs, void *arg)
3664 gimple_stmt_iterator *iter = (gimple_stmt_iterator *)arg;
3665 tree bounds = chkp_find_bounds (rhs, iter);
3666 tree addr = chkp_build_addr_expr(lhs);
3668 chkp_build_bndstx (addr, rhs, bounds, iter);
3671 /* Emit static bound initilizers and size vars. */
3672 void
3673 chkp_finish_file (void)
3675 struct varpool_node *node;
3676 struct chkp_ctor_stmt_list stmts;
3678 if (seen_error ())
3679 return;
3681 /* Iterate through varpool and generate bounds initialization
3682 constructors for all statically initialized pointers. */
3683 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3684 stmts.stmts = NULL;
3685 FOR_EACH_VARIABLE (node)
3686 /* Check that var is actually emitted and we need and may initialize
3687 its bounds. */
3688 if (node->need_bounds_init
3689 && !POINTER_BOUNDS_P (node->decl)
3690 && DECL_RTL (node->decl)
3691 && MEM_P (DECL_RTL (node->decl))
3692 && TREE_ASM_WRITTEN (node->decl))
3694 chkp_walk_pointer_assignments (node->decl,
3695 DECL_INITIAL (node->decl),
3696 &stmts,
3697 chkp_add_modification_to_stmt_list);
3699 if (stmts.avail <= 0)
3701 cgraph_build_static_cdtor ('P', stmts.stmts,
3702 MAX_RESERVED_INIT_PRIORITY + 3);
3703 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3704 stmts.stmts = NULL;
3708 if (stmts.stmts)
3709 cgraph_build_static_cdtor ('P', stmts.stmts,
3710 MAX_RESERVED_INIT_PRIORITY + 3);
3712 /* Iterate through varpool and generate bounds initialization
3713 constructors for all static bounds vars. */
3714 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3715 stmts.stmts = NULL;
3716 FOR_EACH_VARIABLE (node)
3717 if (node->need_bounds_init
3718 && POINTER_BOUNDS_P (node->decl)
3719 && TREE_ASM_WRITTEN (node->decl))
3721 tree bnd = node->decl;
3722 tree var;
3724 gcc_assert (DECL_INITIAL (bnd)
3725 && TREE_CODE (DECL_INITIAL (bnd)) == ADDR_EXPR);
3727 var = TREE_OPERAND (DECL_INITIAL (bnd), 0);
3728 chkp_output_static_bounds (bnd, var, &stmts);
3731 if (stmts.stmts)
3732 cgraph_build_static_cdtor ('B', stmts.stmts,
3733 MAX_RESERVED_INIT_PRIORITY + 2);
3735 delete chkp_static_var_bounds;
3736 delete chkp_bounds_map;
3739 /* An instrumentation function which is called for each statement
3740 having memory access we want to instrument. It inserts check
3741 code and bounds copy code.
3743 ITER points to statement to instrument.
3745 NODE holds memory access in statement to check.
3747 LOC holds the location information for statement.
3749 DIRFLAGS determines whether access is read or write.
3751 ACCESS_OFFS should be added to address used in NODE
3752 before check.
3754 ACCESS_SIZE holds size of checked access.
3756 SAFE indicates if NODE access is safe and should not be
3757 checked. */
3758 static void
3759 chkp_process_stmt (gimple_stmt_iterator *iter, tree node,
3760 location_t loc, tree dirflag,
3761 tree access_offs, tree access_size,
3762 bool safe)
3764 tree node_type = TREE_TYPE (node);
3765 tree size = access_size ? access_size : TYPE_SIZE_UNIT (node_type);
3766 tree addr_first = NULL_TREE; /* address of the first accessed byte */
3767 tree addr_last = NULL_TREE; /* address of the last accessed byte */
3768 tree ptr = NULL_TREE; /* a pointer used for dereference */
3769 tree bounds = NULL_TREE;
3771 /* We do not need instrumentation for clobbers. */
3772 if (dirflag == integer_one_node
3773 && gimple_code (gsi_stmt (*iter)) == GIMPLE_ASSIGN
3774 && TREE_CLOBBER_P (gimple_assign_rhs1 (gsi_stmt (*iter))))
3775 return;
3777 switch (TREE_CODE (node))
3779 case ARRAY_REF:
3780 case COMPONENT_REF:
3782 bool bitfield;
3783 tree elt;
3785 if (safe)
3787 /* We are not going to generate any checks, so do not
3788 generate bounds as well. */
3789 addr_first = chkp_build_addr_expr (node);
3790 break;
3793 chkp_parse_array_and_component_ref (node, &ptr, &elt, &safe,
3794 &bitfield, &bounds, iter, false);
3796 /* Break if there is no dereference and operation is safe. */
3798 if (bitfield)
3800 tree field = TREE_OPERAND (node, 1);
3802 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
3803 size = DECL_SIZE_UNIT (field);
3805 if (elt)
3806 elt = chkp_build_addr_expr (elt);
3807 addr_first = fold_convert_loc (loc, ptr_type_node, elt ? elt : ptr);
3808 addr_first = fold_build_pointer_plus_loc (loc,
3809 addr_first,
3810 byte_position (field));
3812 else
3813 addr_first = chkp_build_addr_expr (node);
3815 break;
3817 case INDIRECT_REF:
3818 ptr = TREE_OPERAND (node, 0);
3819 addr_first = ptr;
3820 break;
3822 case MEM_REF:
3823 ptr = TREE_OPERAND (node, 0);
3824 addr_first = chkp_build_addr_expr (node);
3825 break;
3827 case TARGET_MEM_REF:
3828 ptr = TMR_BASE (node);
3829 addr_first = chkp_build_addr_expr (node);
3830 break;
3832 case ARRAY_RANGE_REF:
3833 printf("ARRAY_RANGE_REF\n");
3834 debug_gimple_stmt(gsi_stmt(*iter));
3835 debug_tree(node);
3836 gcc_unreachable ();
3837 break;
3839 case BIT_FIELD_REF:
3841 tree offs, rem, bpu;
3843 gcc_assert (!access_offs);
3844 gcc_assert (!access_size);
3846 bpu = fold_convert (size_type_node, bitsize_int (BITS_PER_UNIT));
3847 offs = fold_convert (size_type_node, TREE_OPERAND (node, 2));
3848 rem = size_binop_loc (loc, TRUNC_MOD_EXPR, offs, bpu);
3849 offs = size_binop_loc (loc, TRUNC_DIV_EXPR, offs, bpu);
3851 size = fold_convert (size_type_node, TREE_OPERAND (node, 1));
3852 size = size_binop_loc (loc, PLUS_EXPR, size, rem);
3853 size = size_binop_loc (loc, CEIL_DIV_EXPR, size, bpu);
3854 size = fold_convert (size_type_node, size);
3856 chkp_process_stmt (iter, TREE_OPERAND (node, 0), loc,
3857 dirflag, offs, size, safe);
3858 return;
3860 break;
3862 case VAR_DECL:
3863 case RESULT_DECL:
3864 case PARM_DECL:
3865 if (dirflag != integer_one_node
3866 || DECL_REGISTER (node))
3867 return;
3869 safe = true;
3870 addr_first = chkp_build_addr_expr (node);
3871 break;
3873 default:
3874 return;
3877 /* If addr_last was not computed then use (addr_first + size - 1)
3878 expression to compute it. */
3879 if (!addr_last)
3881 addr_last = fold_build_pointer_plus_loc (loc, addr_first, size);
3882 addr_last = fold_build_pointer_plus_hwi_loc (loc, addr_last, -1);
3885 /* Shift both first_addr and last_addr by access_offs if specified. */
3886 if (access_offs)
3888 addr_first = fold_build_pointer_plus_loc (loc, addr_first, access_offs);
3889 addr_last = fold_build_pointer_plus_loc (loc, addr_last, access_offs);
3892 /* Generate bndcl/bndcu checks if memory access is not safe. */
3893 if (!safe)
3895 gimple_stmt_iterator stmt_iter = *iter;
3897 if (!bounds)
3898 bounds = chkp_find_bounds (ptr, iter);
3900 chkp_check_mem_access (addr_first, addr_last, bounds,
3901 stmt_iter, loc, dirflag);
3904 /* We need to store bounds in case pointer is stored. */
3905 if (dirflag == integer_one_node
3906 && chkp_type_has_pointer (node_type)
3907 && flag_chkp_store_bounds)
3909 gimple stmt = gsi_stmt (*iter);
3910 tree rhs1 = gimple_assign_rhs1 (stmt);
3911 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3913 if (get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS)
3914 chkp_walk_pointer_assignments (node, rhs1, iter,
3915 chkp_copy_bounds_for_elem);
3916 else
3918 bounds = chkp_compute_bounds_for_assignment (NULL_TREE, stmt);
3919 chkp_build_bndstx (addr_first, rhs1, bounds, iter);
3924 /* Add code to copy bounds for all pointers copied
3925 in ASSIGN created during inline of EDGE. */
3926 void
3927 chkp_copy_bounds_for_assign (gimple assign, struct cgraph_edge *edge)
3929 tree lhs = gimple_assign_lhs (assign);
3930 tree rhs = gimple_assign_rhs1 (assign);
3931 gimple_stmt_iterator iter = gsi_for_stmt (assign);
3933 if (!flag_chkp_store_bounds)
3934 return;
3936 chkp_walk_pointer_assignments (lhs, rhs, &iter, chkp_copy_bounds_for_elem);
3938 /* We should create edges for all created calls to bndldx and bndstx. */
3939 while (gsi_stmt (iter) != assign)
3941 gimple stmt = gsi_stmt (iter);
3942 if (gimple_code (stmt) == GIMPLE_CALL)
3944 tree fndecl = gimple_call_fndecl (stmt);
3945 struct cgraph_node *callee = cgraph_node::get_create (fndecl);
3946 struct cgraph_edge *new_edge;
3948 gcc_assert (fndecl == chkp_bndstx_fndecl
3949 || fndecl == chkp_bndldx_fndecl
3950 || fndecl == chkp_ret_bnd_fndecl);
3952 new_edge = edge->caller->create_edge (callee,
3953 as_a <gcall *> (stmt),
3954 edge->count,
3955 edge->frequency);
3956 new_edge->frequency = compute_call_stmt_bb_frequency
3957 (edge->caller->decl, gimple_bb (stmt));
3959 gsi_prev (&iter);
3963 /* Some code transformation made during instrumentation pass
3964 may put code into inconsistent state. Here we find and fix
3965 such flaws. */
3966 void
3967 chkp_fix_cfg ()
3969 basic_block bb;
3970 gimple_stmt_iterator i;
3972 /* We could insert some code right after stmt which ends bb.
3973 We wanted to put this code on fallthru edge but did not
3974 add new edges from the beginning because it may cause new
3975 phi node creation which may be incorrect due to incomplete
3976 bound phi nodes. */
3977 FOR_ALL_BB_FN (bb, cfun)
3978 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
3980 gimple stmt = gsi_stmt (i);
3981 gimple_stmt_iterator next = i;
3983 gsi_next (&next);
3985 if (stmt_ends_bb_p (stmt)
3986 && !gsi_end_p (next))
3988 edge fall = find_fallthru_edge (bb->succs);
3989 basic_block dest = NULL;
3990 int flags = 0;
3992 gcc_assert (fall);
3994 /* We cannot split abnormal edge. Therefore we
3995 store its params, make it regular and then
3996 rebuild abnormal edge after split. */
3997 if (fall->flags & EDGE_ABNORMAL)
3999 flags = fall->flags & ~EDGE_FALLTHRU;
4000 dest = fall->dest;
4002 fall->flags &= ~EDGE_COMPLEX;
4005 while (!gsi_end_p (next))
4007 gimple next_stmt = gsi_stmt (next);
4008 gsi_remove (&next, false);
4009 gsi_insert_on_edge (fall, next_stmt);
4012 gsi_commit_edge_inserts ();
4014 /* Re-create abnormal edge. */
4015 if (dest)
4016 make_edge (bb, dest, flags);
4021 /* Walker callback for chkp_replace_function_pointers. Replaces
4022 function pointer in the specified operand with pointer to the
4023 instrumented function version. */
4024 static tree
4025 chkp_replace_function_pointer (tree *op, int *walk_subtrees,
4026 void *data ATTRIBUTE_UNUSED)
4028 if (TREE_CODE (*op) == FUNCTION_DECL
4029 && !lookup_attribute ("bnd_legacy", DECL_ATTRIBUTES (*op))
4030 && (DECL_BUILT_IN_CLASS (*op) == NOT_BUILT_IN
4031 /* For builtins we replace pointers only for selected
4032 function and functions having definitions. */
4033 || (DECL_BUILT_IN_CLASS (*op) == BUILT_IN_NORMAL
4034 && (chkp_instrument_normal_builtin (*op)
4035 || gimple_has_body_p (*op)))))
4037 struct cgraph_node *node = cgraph_node::get_create (*op);
4038 struct cgraph_node *clone = NULL;
4040 if (!node->instrumentation_clone)
4041 clone = chkp_maybe_create_clone (*op);
4043 if (clone)
4044 *op = clone->decl;
4045 *walk_subtrees = 0;
4048 return NULL;
4051 /* This function searches for function pointers in statement
4052 pointed by GSI and replaces them with pointers to instrumented
4053 function versions. */
4054 static void
4055 chkp_replace_function_pointers (gimple_stmt_iterator *gsi)
4057 gimple stmt = gsi_stmt (*gsi);
4058 /* For calls we want to walk call args only. */
4059 if (gimple_code (stmt) == GIMPLE_CALL)
4061 unsigned i;
4062 for (i = 0; i < gimple_call_num_args (stmt); i++)
4063 walk_tree (gimple_call_arg_ptr (stmt, i),
4064 chkp_replace_function_pointer, NULL, NULL);
4066 else
4067 walk_gimple_stmt (gsi, NULL, chkp_replace_function_pointer, NULL);
4070 /* This function instruments all statements working with memory,
4071 calls and rets.
4073 It also removes excess statements from static initializers. */
4074 static void
4075 chkp_instrument_function (void)
4077 basic_block bb, next;
4078 gimple_stmt_iterator i;
4079 enum gimple_rhs_class grhs_class;
4080 bool safe = lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl));
4082 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
4085 next = bb->next_bb;
4086 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
4088 gimple s = gsi_stmt (i);
4090 /* Skip statement marked to not be instrumented. */
4091 if (chkp_marked_stmt_p (s))
4093 gsi_next (&i);
4094 continue;
4097 chkp_replace_function_pointers (&i);
4099 switch (gimple_code (s))
4101 case GIMPLE_ASSIGN:
4102 chkp_process_stmt (&i, gimple_assign_lhs (s),
4103 gimple_location (s), integer_one_node,
4104 NULL_TREE, NULL_TREE, safe);
4105 chkp_process_stmt (&i, gimple_assign_rhs1 (s),
4106 gimple_location (s), integer_zero_node,
4107 NULL_TREE, NULL_TREE, safe);
4108 grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s));
4109 if (grhs_class == GIMPLE_BINARY_RHS)
4110 chkp_process_stmt (&i, gimple_assign_rhs2 (s),
4111 gimple_location (s), integer_zero_node,
4112 NULL_TREE, NULL_TREE, safe);
4113 break;
4115 case GIMPLE_RETURN:
4117 greturn *r = as_a <greturn *> (s);
4118 if (gimple_return_retval (r) != NULL_TREE)
4120 chkp_process_stmt (&i, gimple_return_retval (r),
4121 gimple_location (r),
4122 integer_zero_node,
4123 NULL_TREE, NULL_TREE, safe);
4125 /* Additionally we need to add bounds
4126 to return statement. */
4127 chkp_add_bounds_to_ret_stmt (&i);
4130 break;
4132 case GIMPLE_CALL:
4133 chkp_add_bounds_to_call_stmt (&i);
4134 break;
4136 default:
4140 gsi_next (&i);
4142 /* We do not need any actual pointer stores in checker
4143 static initializer. */
4144 if (lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl))
4145 && gimple_code (s) == GIMPLE_ASSIGN
4146 && gimple_store_p (s))
4148 gimple_stmt_iterator del_iter = gsi_for_stmt (s);
4149 gsi_remove (&del_iter, true);
4150 unlink_stmt_vdef (s);
4151 release_defs(s);
4154 bb = next;
4156 while (bb);
4158 /* Some input params may have bounds and be address taken. In this case
4159 we should store incoming bounds into bounds table. */
4160 tree arg;
4161 if (flag_chkp_store_bounds)
4162 for (arg = DECL_ARGUMENTS (cfun->decl); arg; arg = DECL_CHAIN (arg))
4163 if (TREE_ADDRESSABLE (arg))
4165 if (BOUNDED_P (arg))
4167 tree bounds = chkp_get_next_bounds_parm (arg);
4168 tree def_ptr = ssa_default_def (cfun, arg);
4169 gimple_stmt_iterator iter
4170 = gsi_start_bb (chkp_get_entry_block ());
4171 chkp_build_bndstx (chkp_build_addr_expr (arg),
4172 def_ptr ? def_ptr : arg,
4173 bounds, &iter);
4175 /* Skip bounds arg. */
4176 arg = TREE_CHAIN (arg);
4178 else if (chkp_type_has_pointer (TREE_TYPE (arg)))
4180 tree orig_arg = arg;
4181 bitmap slots = BITMAP_ALLOC (NULL);
4182 gimple_stmt_iterator iter
4183 = gsi_start_bb (chkp_get_entry_block ());
4184 bitmap_iterator bi;
4185 unsigned bnd_no;
4187 chkp_find_bound_slots (TREE_TYPE (arg), slots);
4189 EXECUTE_IF_SET_IN_BITMAP (slots, 0, bnd_no, bi)
4191 tree bounds = chkp_get_next_bounds_parm (arg);
4192 HOST_WIDE_INT offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
4193 tree addr = chkp_build_addr_expr (orig_arg);
4194 tree ptr = build2 (MEM_REF, ptr_type_node, addr,
4195 build_int_cst (ptr_type_node, offs));
4196 chkp_build_bndstx (chkp_build_addr_expr (ptr), ptr,
4197 bounds, &iter);
4199 arg = DECL_CHAIN (arg);
4201 BITMAP_FREE (slots);
4206 /* Find init/null/copy_ptr_bounds calls and replace them
4207 with assignments. It should allow better code
4208 optimization. */
4210 static void
4211 chkp_remove_useless_builtins ()
4213 basic_block bb;
4214 gimple_stmt_iterator gsi;
4216 FOR_EACH_BB_FN (bb, cfun)
4218 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4220 gimple stmt = gsi_stmt (gsi);
4221 tree fndecl;
4222 enum built_in_function fcode;
4224 /* Find builtins returning first arg and replace
4225 them with assignments. */
4226 if (gimple_code (stmt) == GIMPLE_CALL
4227 && (fndecl = gimple_call_fndecl (stmt))
4228 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4229 && (fcode = DECL_FUNCTION_CODE (fndecl))
4230 && (fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
4231 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
4232 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS
4233 || fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS))
4235 tree res = gimple_call_arg (stmt, 0);
4236 update_call_from_tree (&gsi, res);
4237 stmt = gsi_stmt (gsi);
4238 update_stmt (stmt);
4244 /* Initialize pass. */
4245 static void
4246 chkp_init (void)
4248 basic_block bb;
4249 gimple_stmt_iterator i;
4251 in_chkp_pass = true;
4253 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = bb->next_bb)
4254 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
4255 chkp_unmark_stmt (gsi_stmt (i));
4257 chkp_invalid_bounds = new hash_set<tree>;
4258 chkp_completed_bounds_set = new hash_set<tree>;
4259 delete chkp_reg_bounds;
4260 chkp_reg_bounds = new hash_map<tree, tree>;
4261 delete chkp_bound_vars;
4262 chkp_bound_vars = new hash_map<tree, tree>;
4263 chkp_reg_addr_bounds = new hash_map<tree, tree>;
4264 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
4265 delete chkp_bounds_map;
4266 chkp_bounds_map = new hash_map<tree, tree>;
4267 chkp_abnormal_copies = BITMAP_GGC_ALLOC ();
4269 entry_block = NULL;
4270 zero_bounds = NULL_TREE;
4271 none_bounds = NULL_TREE;
4272 incomplete_bounds = integer_zero_node;
4273 tmp_var = NULL_TREE;
4274 size_tmp_var = NULL_TREE;
4276 chkp_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode, true);
4278 /* We create these constant bounds once for each object file.
4279 These symbols go to comdat section and result in single copy
4280 of each one in the final binary. */
4281 chkp_get_zero_bounds_var ();
4282 chkp_get_none_bounds_var ();
4284 calculate_dominance_info (CDI_DOMINATORS);
4285 calculate_dominance_info (CDI_POST_DOMINATORS);
4287 bitmap_obstack_initialize (NULL);
4290 /* Finalize instrumentation pass. */
4291 static void
4292 chkp_fini (void)
4294 in_chkp_pass = false;
4296 delete chkp_invalid_bounds;
4297 delete chkp_completed_bounds_set;
4298 delete chkp_reg_addr_bounds;
4299 delete chkp_incomplete_bounds_map;
4301 free_dominance_info (CDI_DOMINATORS);
4302 free_dominance_info (CDI_POST_DOMINATORS);
4304 bitmap_obstack_release (NULL);
4306 entry_block = NULL;
4307 zero_bounds = NULL_TREE;
4308 none_bounds = NULL_TREE;
4311 /* Main instrumentation pass function. */
4312 static unsigned int
4313 chkp_execute (void)
4315 chkp_init ();
4317 chkp_instrument_function ();
4319 chkp_remove_useless_builtins ();
4321 chkp_function_mark_instrumented (cfun->decl);
4323 chkp_fix_cfg ();
4325 chkp_fini ();
4327 return 0;
4330 /* Instrumentation pass gate. */
4331 static bool
4332 chkp_gate (void)
4334 cgraph_node *node = cgraph_node::get (cfun->decl);
4335 return ((node != NULL
4336 && node->instrumentation_clone)
4337 || lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl)));
4340 namespace {
4342 const pass_data pass_data_chkp =
4344 GIMPLE_PASS, /* type */
4345 "chkp", /* name */
4346 OPTGROUP_NONE, /* optinfo_flags */
4347 TV_NONE, /* tv_id */
4348 PROP_ssa | PROP_cfg, /* properties_required */
4349 0, /* properties_provided */
4350 0, /* properties_destroyed */
4351 0, /* todo_flags_start */
4352 TODO_verify_il
4353 | TODO_update_ssa /* todo_flags_finish */
4356 class pass_chkp : public gimple_opt_pass
4358 public:
4359 pass_chkp (gcc::context *ctxt)
4360 : gimple_opt_pass (pass_data_chkp, ctxt)
4363 /* opt_pass methods: */
4364 virtual opt_pass * clone ()
4366 return new pass_chkp (m_ctxt);
4369 virtual bool gate (function *)
4371 return chkp_gate ();
4374 virtual unsigned int execute (function *)
4376 return chkp_execute ();
4379 }; // class pass_chkp
4381 } // anon namespace
4383 gimple_opt_pass *
4384 make_pass_chkp (gcc::context *ctxt)
4386 return new pass_chkp (ctxt);
4389 #include "gt-tree-chkp.h"