Daily bump.
[official-gcc.git] / gcc / tree-chkp.c
blob8c5a628a9ad5bf6369ddab20720e1691ce055982
1 /* Pointer Bounds Checker insrumentation pass.
2 Copyright (C) 2014-2015 Free Software Foundation, Inc.
3 Contributed by Ilya Enkovich (ilya.enkovich@intel.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hash-set.h"
25 #include "machmode.h"
26 #include "vec.h"
27 #include "double-int.h"
28 #include "input.h"
29 #include "alias.h"
30 #include "symtab.h"
31 #include "options.h"
32 #include "wide-int.h"
33 #include "inchash.h"
34 #include "tree.h"
35 #include "fold-const.h"
36 #include "stor-layout.h"
37 #include "varasm.h"
38 #include "target.h"
39 #include "tree-iterator.h"
40 #include "tree-cfg.h"
41 #include "langhooks.h"
42 #include "tree-pass.h"
43 #include "diagnostic.h"
44 #include "ggc.h"
45 #include "is-a.h"
46 #include "cfgloop.h"
47 #include "stringpool.h"
48 #include "tree-ssa-alias.h"
49 #include "tree-ssanames.h"
50 #include "tree-ssa-operands.h"
51 #include "tree-ssa-address.h"
52 #include "tree-ssa.h"
53 #include "predict.h"
54 #include "dominance.h"
55 #include "cfg.h"
56 #include "basic-block.h"
57 #include "tree-ssa-loop-niter.h"
58 #include "gimple-expr.h"
59 #include "gimple.h"
60 #include "tree-phinodes.h"
61 #include "gimple-ssa.h"
62 #include "ssa-iterators.h"
63 #include "gimple-pretty-print.h"
64 #include "gimple-iterator.h"
65 #include "gimplify.h"
66 #include "gimplify-me.h"
67 #include "print-tree.h"
68 #include "hashtab.h"
69 #include "tm.h"
70 #include "hard-reg-set.h"
71 #include "function.h"
72 #include "rtl.h"
73 #include "flags.h"
74 #include "statistics.h"
75 #include "real.h"
76 #include "fixed-value.h"
77 #include "insn-config.h"
78 #include "expmed.h"
79 #include "dojump.h"
80 #include "explow.h"
81 #include "calls.h"
82 #include "emit-rtl.h"
83 #include "stmt.h"
84 #include "expr.h"
85 #include "tree-ssa-propagate.h"
86 #include "gimple-fold.h"
87 #include "tree-chkp.h"
88 #include "gimple-walk.h"
89 #include "rtl.h" /* For MEM_P, assign_temp. */
90 #include "tree-dfa.h"
91 #include "ipa-ref.h"
92 #include "lto-streamer.h"
93 #include "cgraph.h"
94 #include "ipa-chkp.h"
95 #include "params.h"
97 /* Pointer Bounds Checker instruments code with memory checks to find
98 out-of-bounds memory accesses. Checks are performed by computing
99 bounds for each pointer and then comparing address of accessed
100 memory before pointer dereferencing.
102 1. Function clones.
104 See ipa-chkp.c.
106 2. Instrumentation.
108 There are few things to instrument:
110 a) Memory accesses - add checker calls to check address of accessed memory
111 against bounds of dereferenced pointer. Obviously safe memory
112 accesses like static variable access does not have to be instrumented
113 with checks.
115 Example:
117 val_2 = *p_1;
119 with 4 bytes access is transformed into:
121 __builtin___chkp_bndcl (__bound_tmp.1_3, p_1);
122 D.1_4 = p_1 + 3;
123 __builtin___chkp_bndcu (__bound_tmp.1_3, D.1_4);
124 val_2 = *p_1;
126 where __bound_tmp.1_3 are bounds computed for pointer p_1,
127 __builtin___chkp_bndcl is a lower bound check and
128 __builtin___chkp_bndcu is an upper bound check.
130 b) Pointer stores.
132 When pointer is stored in memory we need to store its bounds. To
133 achieve compatibility of instrumented code with regular codes
134 we have to keep data layout and store bounds in special bound tables
135 via special checker call. Implementation of bounds table may vary for
136 different platforms. It has to associate pointer value and its
137 location (it is required because we may have two equal pointers
138 with different bounds stored in different places) with bounds.
139 Another checker builtin allows to get bounds for specified pointer
140 loaded from specified location.
142 Example:
144 buf1[i_1] = &buf2;
146 is transformed into:
148 buf1[i_1] = &buf2;
149 D.1_2 = &buf1[i_1];
150 __builtin___chkp_bndstx (D.1_2, &buf2, __bound_tmp.1_2);
152 where __bound_tmp.1_2 are bounds of &buf2.
154 c) Static initialization.
156 The special case of pointer store is static pointer initialization.
157 Bounds initialization is performed in a few steps:
158 - register all static initializations in front-end using
159 chkp_register_var_initializer
160 - when file compilation finishes we create functions with special
161 attribute 'chkp ctor' and put explicit initialization code
162 (assignments) for all statically initialized pointers.
163 - when checker constructor is compiled checker pass adds required
164 bounds initialization for all statically initialized pointers
165 - since we do not actually need excess pointers initialization
166 in checker constructor we remove such assignments from them
168 d) Calls.
170 For each call in the code we add additional arguments to pass
171 bounds for pointer arguments. We determine type of call arguments
172 using arguments list from function declaration; if function
173 declaration is not available we use function type; otherwise
174 (e.g. for unnamed arguments) we use type of passed value. Function
175 declaration/type is replaced with the instrumented one.
177 Example:
179 val_1 = foo (&buf1, &buf2, &buf1, 0);
181 is translated into:
183 val_1 = foo.chkp (&buf1, __bound_tmp.1_2, &buf2, __bound_tmp.1_3,
184 &buf1, __bound_tmp.1_2, 0);
186 e) Returns.
188 If function returns a pointer value we have to return bounds also.
189 A new operand was added for return statement to hold returned bounds.
191 Example:
193 return &_buf1;
195 is transformed into
197 return &_buf1, __bound_tmp.1_1;
199 3. Bounds computation.
201 Compiler is fully responsible for computing bounds to be used for each
202 memory access. The first step for bounds computation is to find the
203 origin of pointer dereferenced for memory access. Basing on pointer
204 origin we define a way to compute its bounds. There are just few
205 possible cases:
207 a) Pointer is returned by call.
209 In this case we use corresponding checker builtin method to obtain returned
210 bounds.
212 Example:
214 buf_1 = malloc (size_2);
215 foo (buf_1);
217 is translated into:
219 buf_1 = malloc (size_2);
220 __bound_tmp.1_3 = __builtin___chkp_bndret (buf_1);
221 foo (buf_1, __bound_tmp.1_3);
223 b) Pointer is an address of an object.
225 In this case compiler tries to compute objects size and create corresponding
226 bounds. If object has incomplete type then special checker builtin is used to
227 obtain its size at runtime.
229 Example:
231 foo ()
233 <unnamed type> __bound_tmp.3;
234 static int buf[100];
236 <bb 3>:
237 __bound_tmp.3_2 = __builtin___chkp_bndmk (&buf, 400);
239 <bb 2>:
240 return &buf, __bound_tmp.3_2;
243 Example:
245 Address of an object 'extern int buf[]' with incomplete type is
246 returned.
248 foo ()
250 <unnamed type> __bound_tmp.4;
251 long unsigned int __size_tmp.3;
253 <bb 3>:
254 __size_tmp.3_4 = __builtin_ia32_sizeof (buf);
255 __bound_tmp.4_3 = __builtin_ia32_bndmk (&buf, __size_tmp.3_4);
257 <bb 2>:
258 return &buf, __bound_tmp.4_3;
261 c) Pointer is the result of object narrowing.
263 It happens when we use pointer to an object to compute pointer to a part
264 of an object. E.g. we take pointer to a field of a structure. In this
265 case we perform bounds intersection using bounds of original object and
266 bounds of object's part (which are computed basing on its type).
268 There may be some debatable questions about when narrowing should occur
269 and when it should not. To avoid false bound violations in correct
270 programs we do not perform narrowing when address of an array element is
271 obtained (it has address of the whole array) and when address of the first
272 structure field is obtained (because it is guaranteed to be equal to
273 address of the whole structure and it is legal to cast it back to structure).
275 Default narrowing behavior may be changed using compiler flags.
277 Example:
279 In this example address of the second structure field is returned.
281 foo (struct A * p, __bounds_type __bounds_of_p)
283 <unnamed type> __bound_tmp.3;
284 int * _2;
285 int * _5;
287 <bb 2>:
288 _5 = &p_1(D)->second_field;
289 __bound_tmp.3_6 = __builtin___chkp_bndmk (_5, 4);
290 __bound_tmp.3_8 = __builtin___chkp_intersect (__bound_tmp.3_6,
291 __bounds_of_p_3(D));
292 _2 = &p_1(D)->second_field;
293 return _2, __bound_tmp.3_8;
296 Example:
298 In this example address of the first field of array element is returned.
300 foo (struct A * p, __bounds_type __bounds_of_p, int i)
302 long unsigned int _3;
303 long unsigned int _4;
304 struct A * _6;
305 int * _7;
307 <bb 2>:
308 _3 = (long unsigned int) i_1(D);
309 _4 = _3 * 8;
310 _6 = p_5(D) + _4;
311 _7 = &_6->first_field;
312 return _7, __bounds_of_p_2(D);
316 d) Pointer is the result of pointer arithmetic or type cast.
318 In this case bounds of the base pointer are used. In case of binary
319 operation producing a pointer we are analyzing data flow further
320 looking for operand's bounds. One operand is considered as a base
321 if it has some valid bounds. If we fall into a case when none of
322 operands (or both of them) has valid bounds, a default bounds value
323 is used.
325 Trying to find out bounds for binary operations we may fall into
326 cyclic dependencies for pointers. To avoid infinite recursion all
327 walked phi nodes instantly obtain corresponding bounds but created
328 bounds are marked as incomplete. It helps us to stop DF walk during
329 bounds search.
331 When we reach pointer source, some args of incomplete bounds phi obtain
332 valid bounds and those values are propagated further through phi nodes.
333 If no valid bounds were found for phi node then we mark its result as
334 invalid bounds. Process stops when all incomplete bounds become either
335 valid or invalid and we are able to choose a pointer base.
337 e) Pointer is loaded from the memory.
339 In this case we just need to load bounds from the bounds table.
341 Example:
343 foo ()
345 <unnamed type> __bound_tmp.3;
346 static int * buf;
347 int * _2;
349 <bb 2>:
350 _2 = buf;
351 __bound_tmp.3_4 = __builtin___chkp_bndldx (&buf, _2);
352 return _2, __bound_tmp.3_4;
357 typedef void (*assign_handler)(tree, tree, void *);
359 static tree chkp_get_zero_bounds ();
360 static tree chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter);
361 static tree chkp_find_bounds_loaded (tree ptr, tree ptr_src,
362 gimple_stmt_iterator *iter);
363 static void chkp_parse_array_and_component_ref (tree node, tree *ptr,
364 tree *elt, bool *safe,
365 bool *bitfield,
366 tree *bounds,
367 gimple_stmt_iterator *iter,
368 bool innermost_bounds);
370 #define chkp_bndldx_fndecl \
371 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDLDX))
372 #define chkp_bndstx_fndecl \
373 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDSTX))
374 #define chkp_checkl_fndecl \
375 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCL))
376 #define chkp_checku_fndecl \
377 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCU))
378 #define chkp_bndmk_fndecl \
379 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDMK))
380 #define chkp_ret_bnd_fndecl \
381 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDRET))
382 #define chkp_intersect_fndecl \
383 (targetm.builtin_chkp_function (BUILT_IN_CHKP_INTERSECT))
384 #define chkp_narrow_bounds_fndecl \
385 (targetm.builtin_chkp_function (BUILT_IN_CHKP_NARROW))
386 #define chkp_sizeof_fndecl \
387 (targetm.builtin_chkp_function (BUILT_IN_CHKP_SIZEOF))
388 #define chkp_extract_lower_fndecl \
389 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_LOWER))
390 #define chkp_extract_upper_fndecl \
391 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_UPPER))
393 static GTY (()) tree chkp_uintptr_type;
395 static GTY (()) tree chkp_zero_bounds_var;
396 static GTY (()) tree chkp_none_bounds_var;
398 static GTY (()) basic_block entry_block;
399 static GTY (()) tree zero_bounds;
400 static GTY (()) tree none_bounds;
401 static GTY (()) tree incomplete_bounds;
402 static GTY (()) tree tmp_var;
403 static GTY (()) tree size_tmp_var;
404 static GTY (()) bitmap chkp_abnormal_copies;
406 struct hash_set<tree> *chkp_invalid_bounds;
407 struct hash_set<tree> *chkp_completed_bounds_set;
408 struct hash_map<tree, tree> *chkp_reg_bounds;
409 struct hash_map<tree, tree> *chkp_bound_vars;
410 struct hash_map<tree, tree> *chkp_reg_addr_bounds;
411 struct hash_map<tree, tree> *chkp_incomplete_bounds_map;
412 struct hash_map<tree, tree> *chkp_bounds_map;
413 struct hash_map<tree, tree> *chkp_static_var_bounds;
415 static bool in_chkp_pass;
417 #define CHKP_BOUND_TMP_NAME "__bound_tmp"
418 #define CHKP_SIZE_TMP_NAME "__size_tmp"
419 #define CHKP_BOUNDS_OF_SYMBOL_PREFIX "__chkp_bounds_of_"
420 #define CHKP_STRING_BOUNDS_PREFIX "__chkp_string_bounds_"
421 #define CHKP_VAR_BOUNDS_PREFIX "__chkp_var_bounds_"
422 #define CHKP_ZERO_BOUNDS_VAR_NAME "__chkp_zero_bounds"
423 #define CHKP_NONE_BOUNDS_VAR_NAME "__chkp_none_bounds"
425 /* Static checker constructors may become very large and their
426 compilation with optimization may take too much time.
427 Therefore we put a limit to number of statements in one
428 constructor. Tests with 100 000 statically initialized
429 pointers showed following compilation times on Sandy Bridge
430 server (used -O2):
431 limit 100 => ~18 sec.
432 limit 300 => ~22 sec.
433 limit 1000 => ~30 sec.
434 limit 3000 => ~49 sec.
435 limit 5000 => ~55 sec.
436 limit 10000 => ~76 sec.
437 limit 100000 => ~532 sec. */
438 #define MAX_STMTS_IN_STATIC_CHKP_CTOR (PARAM_VALUE (PARAM_CHKP_MAX_CTOR_SIZE))
440 struct chkp_ctor_stmt_list
442 tree stmts;
443 int avail;
446 /* Return 1 if function FNDECL is instrumented by Pointer
447 Bounds Checker. */
448 bool
449 chkp_function_instrumented_p (tree fndecl)
451 return fndecl
452 && lookup_attribute ("chkp instrumented", DECL_ATTRIBUTES (fndecl));
455 /* Mark function FNDECL as instrumented. */
456 void
457 chkp_function_mark_instrumented (tree fndecl)
459 if (chkp_function_instrumented_p (fndecl))
460 return;
462 DECL_ATTRIBUTES (fndecl)
463 = tree_cons (get_identifier ("chkp instrumented"), NULL,
464 DECL_ATTRIBUTES (fndecl));
467 /* Return true when STMT is builtin call to instrumentation function
468 corresponding to CODE. */
470 bool
471 chkp_gimple_call_builtin_p (gimple call,
472 enum built_in_function code)
474 tree fndecl;
475 if (is_gimple_call (call)
476 && (fndecl = targetm.builtin_chkp_function (code))
477 && gimple_call_fndecl (call) == fndecl)
478 return true;
479 return false;
482 /* Emit code to store zero bounds for PTR located at MEM. */
483 void
484 chkp_expand_bounds_reset_for_mem (tree mem, tree ptr)
486 tree zero_bnd, bnd, addr, bndstx;
488 if (flag_chkp_use_static_const_bounds)
489 zero_bnd = chkp_get_zero_bounds_var ();
490 else
491 zero_bnd = chkp_build_make_bounds_call (integer_zero_node,
492 integer_zero_node);
493 bnd = make_tree (pointer_bounds_type_node,
494 assign_temp (pointer_bounds_type_node, 0, 1));
495 addr = build1 (ADDR_EXPR,
496 build_pointer_type (TREE_TYPE (mem)), mem);
497 bndstx = chkp_build_bndstx_call (addr, ptr, bnd);
499 expand_assignment (bnd, zero_bnd, false);
500 expand_normal (bndstx);
503 /* Build retbnd call for returned value RETVAL.
505 If BNDVAL is not NULL then result is stored
506 in it. Otherwise a temporary is created to
507 hold returned value.
509 GSI points to a position for a retbnd call
510 and is set to created stmt.
512 Cgraph edge is created for a new call if
513 UPDATE_EDGE is 1.
515 Obtained bounds are returned. */
516 tree
517 chkp_insert_retbnd_call (tree bndval, tree retval,
518 gimple_stmt_iterator *gsi)
520 gimple call;
522 if (!bndval)
523 bndval = create_tmp_reg (pointer_bounds_type_node, "retbnd");
525 call = gimple_build_call (chkp_ret_bnd_fndecl, 1, retval);
526 gimple_call_set_lhs (call, bndval);
527 gsi_insert_after (gsi, call, GSI_CONTINUE_LINKING);
529 return bndval;
532 /* Mark statement S to not be instrumented. */
533 static void
534 chkp_mark_stmt (gimple s)
536 gimple_set_plf (s, GF_PLF_1, true);
539 /* Mark statement S to be instrumented. */
540 static void
541 chkp_unmark_stmt (gimple s)
543 gimple_set_plf (s, GF_PLF_1, false);
546 /* Return 1 if statement S should not be instrumented. */
547 static bool
548 chkp_marked_stmt_p (gimple s)
550 return gimple_plf (s, GF_PLF_1);
553 /* Get var to be used for bound temps. */
554 static tree
555 chkp_get_tmp_var (void)
557 if (!tmp_var)
558 tmp_var = create_tmp_reg (pointer_bounds_type_node, CHKP_BOUND_TMP_NAME);
560 return tmp_var;
563 /* Get SSA_NAME to be used as temp. */
564 static tree
565 chkp_get_tmp_reg (gimple stmt)
567 if (in_chkp_pass)
568 return make_ssa_name (chkp_get_tmp_var (), stmt);
570 return make_temp_ssa_name (pointer_bounds_type_node, stmt,
571 CHKP_BOUND_TMP_NAME);
574 /* Get var to be used for size temps. */
575 static tree
576 chkp_get_size_tmp_var (void)
578 if (!size_tmp_var)
579 size_tmp_var = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
581 return size_tmp_var;
584 /* Register bounds BND for address of OBJ. */
585 static void
586 chkp_register_addr_bounds (tree obj, tree bnd)
588 if (bnd == incomplete_bounds)
589 return;
591 chkp_reg_addr_bounds->put (obj, bnd);
593 if (dump_file && (dump_flags & TDF_DETAILS))
595 fprintf (dump_file, "Regsitered bound ");
596 print_generic_expr (dump_file, bnd, 0);
597 fprintf (dump_file, " for address of ");
598 print_generic_expr (dump_file, obj, 0);
599 fprintf (dump_file, "\n");
603 /* Return bounds registered for address of OBJ. */
604 static tree
605 chkp_get_registered_addr_bounds (tree obj)
607 tree *slot = chkp_reg_addr_bounds->get (obj);
608 return slot ? *slot : NULL_TREE;
611 /* Mark BOUNDS as completed. */
612 static void
613 chkp_mark_completed_bounds (tree bounds)
615 chkp_completed_bounds_set->add (bounds);
617 if (dump_file && (dump_flags & TDF_DETAILS))
619 fprintf (dump_file, "Marked bounds ");
620 print_generic_expr (dump_file, bounds, 0);
621 fprintf (dump_file, " as completed\n");
625 /* Return 1 if BOUNDS were marked as completed and 0 otherwise. */
626 static bool
627 chkp_completed_bounds (tree bounds)
629 return chkp_completed_bounds_set->contains (bounds);
632 /* Clear comleted bound marks. */
633 static void
634 chkp_erase_completed_bounds (void)
636 delete chkp_completed_bounds_set;
637 chkp_completed_bounds_set = new hash_set<tree>;
640 /* Mark BOUNDS associated with PTR as incomplete. */
641 static void
642 chkp_register_incomplete_bounds (tree bounds, tree ptr)
644 chkp_incomplete_bounds_map->put (bounds, ptr);
646 if (dump_file && (dump_flags & TDF_DETAILS))
648 fprintf (dump_file, "Regsitered incomplete bounds ");
649 print_generic_expr (dump_file, bounds, 0);
650 fprintf (dump_file, " for ");
651 print_generic_expr (dump_file, ptr, 0);
652 fprintf (dump_file, "\n");
656 /* Return 1 if BOUNDS are incomplete and 0 otherwise. */
657 static bool
658 chkp_incomplete_bounds (tree bounds)
660 if (bounds == incomplete_bounds)
661 return true;
663 if (chkp_completed_bounds (bounds))
664 return false;
666 return chkp_incomplete_bounds_map->get (bounds) != NULL;
669 /* Clear incomleted bound marks. */
670 static void
671 chkp_erase_incomplete_bounds (void)
673 delete chkp_incomplete_bounds_map;
674 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
677 /* Build and return bndmk call which creates bounds for structure
678 pointed by PTR. Structure should have complete type. */
679 tree
680 chkp_make_bounds_for_struct_addr (tree ptr)
682 tree type = TREE_TYPE (ptr);
683 tree size;
685 gcc_assert (POINTER_TYPE_P (type));
687 size = TYPE_SIZE (TREE_TYPE (type));
689 gcc_assert (size);
691 return build_call_nary (pointer_bounds_type_node,
692 build_fold_addr_expr (chkp_bndmk_fndecl),
693 2, ptr, size);
696 /* Traversal function for chkp_may_finish_incomplete_bounds.
697 Set RES to 0 if at least one argument of phi statement
698 defining bounds (passed in KEY arg) is unknown.
699 Traversal stops when first unknown phi argument is found. */
700 bool
701 chkp_may_complete_phi_bounds (tree const &bounds, tree *slot ATTRIBUTE_UNUSED,
702 bool *res)
704 gimple phi;
705 unsigned i;
707 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
709 phi = SSA_NAME_DEF_STMT (bounds);
711 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
713 for (i = 0; i < gimple_phi_num_args (phi); i++)
715 tree phi_arg = gimple_phi_arg_def (phi, i);
716 if (!phi_arg)
718 *res = false;
719 /* Do not need to traverse further. */
720 return false;
724 return true;
727 /* Return 1 if all phi nodes created for bounds have their
728 arguments computed. */
729 static bool
730 chkp_may_finish_incomplete_bounds (void)
732 bool res = true;
734 chkp_incomplete_bounds_map
735 ->traverse<bool *, chkp_may_complete_phi_bounds> (&res);
737 return res;
740 /* Helper function for chkp_finish_incomplete_bounds.
741 Recompute args for bounds phi node. */
742 bool
743 chkp_recompute_phi_bounds (tree const &bounds, tree *slot,
744 void *res ATTRIBUTE_UNUSED)
746 tree ptr = *slot;
747 gphi *bounds_phi;
748 gphi *ptr_phi;
749 unsigned i;
751 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
752 gcc_assert (TREE_CODE (ptr) == SSA_NAME);
754 bounds_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (bounds));
755 ptr_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (ptr));
757 for (i = 0; i < gimple_phi_num_args (bounds_phi); i++)
759 tree ptr_arg = gimple_phi_arg_def (ptr_phi, i);
760 tree bound_arg = chkp_find_bounds (ptr_arg, NULL);
762 add_phi_arg (bounds_phi, bound_arg,
763 gimple_phi_arg_edge (ptr_phi, i),
764 UNKNOWN_LOCATION);
767 return true;
770 /* Mark BOUNDS as invalid. */
771 static void
772 chkp_mark_invalid_bounds (tree bounds)
774 chkp_invalid_bounds->add (bounds);
776 if (dump_file && (dump_flags & TDF_DETAILS))
778 fprintf (dump_file, "Marked bounds ");
779 print_generic_expr (dump_file, bounds, 0);
780 fprintf (dump_file, " as invalid\n");
784 /* Return 1 if BOUNDS were marked as invalid and 0 otherwise. */
785 static bool
786 chkp_valid_bounds (tree bounds)
788 if (bounds == zero_bounds || bounds == none_bounds)
789 return false;
791 return !chkp_invalid_bounds->contains (bounds);
794 /* Helper function for chkp_finish_incomplete_bounds.
795 Check all arguments of phi nodes trying to find
796 valid completed bounds. If there is at least one
797 such arg then bounds produced by phi node are marked
798 as valid completed bounds and all phi args are
799 recomputed. */
800 bool
801 chkp_find_valid_phi_bounds (tree const &bounds, tree *slot, bool *res)
803 gimple phi;
804 unsigned i;
806 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
808 if (chkp_completed_bounds (bounds))
809 return true;
811 phi = SSA_NAME_DEF_STMT (bounds);
813 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
815 for (i = 0; i < gimple_phi_num_args (phi); i++)
817 tree phi_arg = gimple_phi_arg_def (phi, i);
819 gcc_assert (phi_arg);
821 if (chkp_valid_bounds (phi_arg) && !chkp_incomplete_bounds (phi_arg))
823 *res = true;
824 chkp_mark_completed_bounds (bounds);
825 chkp_recompute_phi_bounds (bounds, slot, NULL);
826 return true;
830 return true;
833 /* Helper function for chkp_finish_incomplete_bounds.
834 Marks all incompleted bounds as invalid. */
835 bool
836 chkp_mark_invalid_bounds_walker (tree const &bounds,
837 tree *slot ATTRIBUTE_UNUSED,
838 void *res ATTRIBUTE_UNUSED)
840 if (!chkp_completed_bounds (bounds))
842 chkp_mark_invalid_bounds (bounds);
843 chkp_mark_completed_bounds (bounds);
845 return true;
848 /* When all bound phi nodes have all their args computed
849 we have enough info to find valid bounds. We iterate
850 through all incompleted bounds searching for valid
851 bounds. Found valid bounds are marked as completed
852 and all remaining incompleted bounds are recomputed.
853 Process continues until no new valid bounds may be
854 found. All remained incompleted bounds are marked as
855 invalid (i.e. have no valid source of bounds). */
856 static void
857 chkp_finish_incomplete_bounds (void)
859 bool found_valid;
861 while (found_valid)
863 found_valid = false;
865 chkp_incomplete_bounds_map->
866 traverse<bool *, chkp_find_valid_phi_bounds> (&found_valid);
868 if (found_valid)
869 chkp_incomplete_bounds_map->
870 traverse<void *, chkp_recompute_phi_bounds> (NULL);
873 chkp_incomplete_bounds_map->
874 traverse<void *, chkp_mark_invalid_bounds_walker> (NULL);
875 chkp_incomplete_bounds_map->
876 traverse<void *, chkp_recompute_phi_bounds> (NULL);
878 chkp_erase_completed_bounds ();
879 chkp_erase_incomplete_bounds ();
882 /* Return 1 if type TYPE is a pointer type or a
883 structure having a pointer type as one of its fields.
884 Otherwise return 0. */
885 bool
886 chkp_type_has_pointer (const_tree type)
888 bool res = false;
890 if (BOUNDED_TYPE_P (type))
891 res = true;
892 else if (RECORD_OR_UNION_TYPE_P (type))
894 tree field;
896 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
897 if (TREE_CODE (field) == FIELD_DECL)
898 res = res || chkp_type_has_pointer (TREE_TYPE (field));
900 else if (TREE_CODE (type) == ARRAY_TYPE)
901 res = chkp_type_has_pointer (TREE_TYPE (type));
903 return res;
906 unsigned
907 chkp_type_bounds_count (const_tree type)
909 unsigned res = 0;
911 if (!type)
912 res = 0;
913 else if (BOUNDED_TYPE_P (type))
914 res = 1;
915 else if (RECORD_OR_UNION_TYPE_P (type))
917 bitmap have_bound;
919 bitmap_obstack_initialize (NULL);
920 have_bound = BITMAP_ALLOC (NULL);
921 chkp_find_bound_slots (type, have_bound);
922 res = bitmap_count_bits (have_bound);
923 BITMAP_FREE (have_bound);
924 bitmap_obstack_release (NULL);
927 return res;
930 /* Get bounds associated with NODE via
931 chkp_set_bounds call. */
932 tree
933 chkp_get_bounds (tree node)
935 tree *slot;
937 if (!chkp_bounds_map)
938 return NULL_TREE;
940 slot = chkp_bounds_map->get (node);
941 return slot ? *slot : NULL_TREE;
944 /* Associate bounds VAL with NODE. */
945 void
946 chkp_set_bounds (tree node, tree val)
948 if (!chkp_bounds_map)
949 chkp_bounds_map = new hash_map<tree, tree>;
951 chkp_bounds_map->put (node, val);
954 /* Check if statically initialized variable VAR require
955 static bounds initialization. If VAR is added into
956 bounds initlization list then 1 is returned. Otherwise
957 return 0. */
958 extern bool
959 chkp_register_var_initializer (tree var)
961 if (!flag_check_pointer_bounds
962 || DECL_INITIAL (var) == error_mark_node)
963 return false;
965 gcc_assert (TREE_CODE (var) == VAR_DECL);
966 gcc_assert (DECL_INITIAL (var));
968 if (TREE_STATIC (var)
969 && chkp_type_has_pointer (TREE_TYPE (var)))
971 varpool_node::get_create (var)->need_bounds_init = 1;
972 return true;
975 return false;
978 /* Helper function for chkp_finish_file.
980 Add new modification statement (RHS is assigned to LHS)
981 into list of static initializer statementes (passed in ARG).
982 If statements list becomes too big, emit checker constructor
983 and start the new one. */
984 static void
985 chkp_add_modification_to_stmt_list (tree lhs,
986 tree rhs,
987 void *arg)
989 struct chkp_ctor_stmt_list *stmts = (struct chkp_ctor_stmt_list *)arg;
990 tree modify;
992 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
993 rhs = build1 (CONVERT_EXPR, TREE_TYPE (lhs), rhs);
995 modify = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
996 append_to_statement_list (modify, &stmts->stmts);
998 stmts->avail--;
1001 /* Build and return ADDR_EXPR for specified object OBJ. */
1002 static tree
1003 chkp_build_addr_expr (tree obj)
1005 return TREE_CODE (obj) == TARGET_MEM_REF
1006 ? tree_mem_ref_addr (ptr_type_node, obj)
1007 : build_fold_addr_expr (obj);
1010 /* Helper function for chkp_finish_file.
1011 Initialize bound variable BND_VAR with bounds of variable
1012 VAR to statements list STMTS. If statements list becomes
1013 too big, emit checker constructor and start the new one. */
1014 static void
1015 chkp_output_static_bounds (tree bnd_var, tree var,
1016 struct chkp_ctor_stmt_list *stmts)
1018 tree lb, ub, size;
1020 if (TREE_CODE (var) == STRING_CST)
1022 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1023 size = build_int_cst (size_type_node, TREE_STRING_LENGTH (var) - 1);
1025 else if (DECL_SIZE (var)
1026 && !chkp_variable_size_type (TREE_TYPE (var)))
1028 /* Compute bounds using statically known size. */
1029 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1030 size = size_binop (MINUS_EXPR, DECL_SIZE_UNIT (var), size_one_node);
1032 else
1034 /* Compute bounds using dynamic size. */
1035 tree call;
1037 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1038 call = build1 (ADDR_EXPR,
1039 build_pointer_type (TREE_TYPE (chkp_sizeof_fndecl)),
1040 chkp_sizeof_fndecl);
1041 size = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_sizeof_fndecl)),
1042 call, 1, var);
1044 if (flag_chkp_zero_dynamic_size_as_infinite)
1046 tree max_size, cond;
1048 max_size = build2 (MINUS_EXPR, size_type_node, size_zero_node, lb);
1049 cond = build2 (NE_EXPR, boolean_type_node, size, size_zero_node);
1050 size = build3 (COND_EXPR, size_type_node, cond, size, max_size);
1053 size = size_binop (MINUS_EXPR, size, size_one_node);
1056 ub = size_binop (PLUS_EXPR, lb, size);
1057 stmts->avail -= targetm.chkp_initialize_bounds (bnd_var, lb, ub,
1058 &stmts->stmts);
1059 if (stmts->avail <= 0)
1061 cgraph_build_static_cdtor ('B', stmts->stmts,
1062 MAX_RESERVED_INIT_PRIORITY + 2);
1063 stmts->avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
1064 stmts->stmts = NULL;
1068 /* Return entry block to be used for checker initilization code.
1069 Create new block if required. */
1070 static basic_block
1071 chkp_get_entry_block (void)
1073 if (!entry_block)
1074 entry_block = split_block (ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL)->dest;
1076 return entry_block;
1079 /* Return a bounds var to be used for pointer var PTR_VAR. */
1080 static tree
1081 chkp_get_bounds_var (tree ptr_var)
1083 tree bnd_var;
1084 tree *slot;
1086 slot = chkp_bound_vars->get (ptr_var);
1087 if (slot)
1088 bnd_var = *slot;
1089 else
1091 bnd_var = create_tmp_reg (pointer_bounds_type_node,
1092 CHKP_BOUND_TMP_NAME);
1093 chkp_bound_vars->put (ptr_var, bnd_var);
1096 return bnd_var;
1101 /* Register bounds BND for object PTR in global bounds table.
1102 A copy of bounds may be created for abnormal ssa names.
1103 Returns bounds to use for PTR. */
1104 static tree
1105 chkp_maybe_copy_and_register_bounds (tree ptr, tree bnd)
1107 bool abnormal_ptr;
1109 if (!chkp_reg_bounds)
1110 return bnd;
1112 /* Do nothing if bounds are incomplete_bounds
1113 because it means bounds will be recomputed. */
1114 if (bnd == incomplete_bounds)
1115 return bnd;
1117 abnormal_ptr = (TREE_CODE (ptr) == SSA_NAME
1118 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1119 && gimple_code (SSA_NAME_DEF_STMT (ptr)) != GIMPLE_PHI);
1121 /* A single bounds value may be reused multiple times for
1122 different pointer values. It may cause coalescing issues
1123 for abnormal SSA names. To avoid it we create a bounds
1124 copy in case it is computed for abnormal SSA name.
1126 We also cannot reuse such created copies for other pointers */
1127 if (abnormal_ptr
1128 || bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
1130 tree bnd_var = NULL_TREE;
1132 if (abnormal_ptr)
1134 if (SSA_NAME_VAR (ptr))
1135 bnd_var = chkp_get_bounds_var (SSA_NAME_VAR (ptr));
1137 else
1138 bnd_var = chkp_get_tmp_var ();
1140 /* For abnormal copies we may just find original
1141 bounds and use them. */
1142 if (!abnormal_ptr && !SSA_NAME_IS_DEFAULT_DEF (bnd))
1144 gimple bnd_def = SSA_NAME_DEF_STMT (bnd);
1145 gcc_checking_assert (gimple_code (bnd_def) == GIMPLE_ASSIGN);
1146 bnd = gimple_assign_rhs1 (bnd_def);
1148 /* For undefined values we usually use none bounds
1149 value but in case of abnormal edge it may cause
1150 coalescing failures. Use default definition of
1151 bounds variable instead to avoid it. */
1152 else if (SSA_NAME_IS_DEFAULT_DEF (ptr)
1153 && TREE_CODE (SSA_NAME_VAR (ptr)) != PARM_DECL)
1155 bnd = get_or_create_ssa_default_def (cfun, bnd_var);
1157 if (dump_file && (dump_flags & TDF_DETAILS))
1159 fprintf (dump_file, "Using default def bounds ");
1160 print_generic_expr (dump_file, bnd, 0);
1161 fprintf (dump_file, " for abnormal default def SSA name ");
1162 print_generic_expr (dump_file, ptr, 0);
1163 fprintf (dump_file, "\n");
1166 else
1168 tree copy;
1169 gimple def = SSA_NAME_DEF_STMT (ptr);
1170 gimple assign;
1171 gimple_stmt_iterator gsi;
1173 if (bnd_var)
1174 copy = make_ssa_name (bnd_var, gimple_build_nop ());
1175 else
1176 copy = make_temp_ssa_name (pointer_bounds_type_node,
1177 gimple_build_nop (),
1178 CHKP_BOUND_TMP_NAME);
1179 assign = gimple_build_assign (copy, bnd);
1181 if (dump_file && (dump_flags & TDF_DETAILS))
1183 fprintf (dump_file, "Creating a copy of bounds ");
1184 print_generic_expr (dump_file, bnd, 0);
1185 fprintf (dump_file, " for abnormal SSA name ");
1186 print_generic_expr (dump_file, ptr, 0);
1187 fprintf (dump_file, "\n");
1190 if (gimple_code (def) == GIMPLE_NOP)
1192 gsi = gsi_last_bb (chkp_get_entry_block ());
1193 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
1194 gsi_insert_before (&gsi, assign, GSI_CONTINUE_LINKING);
1195 else
1196 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1198 else
1200 gimple bnd_def = SSA_NAME_DEF_STMT (bnd);
1201 /* Sometimes (e.g. when we load a pointer from a
1202 memory) bounds are produced later than a pointer.
1203 We need to insert bounds copy appropriately. */
1204 if (gimple_code (bnd_def) != GIMPLE_NOP
1205 && stmt_dominates_stmt_p (def, bnd_def))
1206 gsi = gsi_for_stmt (bnd_def);
1207 else
1208 gsi = gsi_for_stmt (def);
1209 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1212 bnd = copy;
1215 if (abnormal_ptr)
1216 bitmap_set_bit (chkp_abnormal_copies, SSA_NAME_VERSION (bnd));
1219 chkp_reg_bounds->put (ptr, bnd);
1221 if (dump_file && (dump_flags & TDF_DETAILS))
1223 fprintf (dump_file, "Regsitered bound ");
1224 print_generic_expr (dump_file, bnd, 0);
1225 fprintf (dump_file, " for pointer ");
1226 print_generic_expr (dump_file, ptr, 0);
1227 fprintf (dump_file, "\n");
1230 return bnd;
1233 /* Get bounds registered for object PTR in global bounds table. */
1234 static tree
1235 chkp_get_registered_bounds (tree ptr)
1237 tree *slot;
1239 if (!chkp_reg_bounds)
1240 return NULL_TREE;
1242 slot = chkp_reg_bounds->get (ptr);
1243 return slot ? *slot : NULL_TREE;
1246 /* Add bound retvals to return statement pointed by GSI. */
1248 static void
1249 chkp_add_bounds_to_ret_stmt (gimple_stmt_iterator *gsi)
1251 greturn *ret = as_a <greturn *> (gsi_stmt (*gsi));
1252 tree retval = gimple_return_retval (ret);
1253 tree ret_decl = DECL_RESULT (cfun->decl);
1254 tree bounds;
1256 if (!retval)
1257 return;
1259 if (BOUNDED_P (ret_decl))
1261 bounds = chkp_find_bounds (retval, gsi);
1262 bounds = chkp_maybe_copy_and_register_bounds (ret_decl, bounds);
1263 gimple_return_set_retbnd (ret, bounds);
1266 update_stmt (ret);
1269 /* Force OP to be suitable for using as an argument for call.
1270 New statements (if any) go to SEQ. */
1271 static tree
1272 chkp_force_gimple_call_op (tree op, gimple_seq *seq)
1274 gimple_seq stmts;
1275 gimple_stmt_iterator si;
1277 op = force_gimple_operand (unshare_expr (op), &stmts, true, NULL_TREE);
1279 for (si = gsi_start (stmts); !gsi_end_p (si); gsi_next (&si))
1280 chkp_mark_stmt (gsi_stmt (si));
1282 gimple_seq_add_seq (seq, stmts);
1284 return op;
1287 /* Generate lower bound check for memory access by ADDR.
1288 Check is inserted before the position pointed by ITER.
1289 DIRFLAG indicates whether memory access is load or store. */
1290 static void
1291 chkp_check_lower (tree addr, tree bounds,
1292 gimple_stmt_iterator iter,
1293 location_t location,
1294 tree dirflag)
1296 gimple_seq seq;
1297 gimple check;
1298 tree node;
1300 if (!chkp_function_instrumented_p (current_function_decl)
1301 && bounds == chkp_get_zero_bounds ())
1302 return;
1304 if (dirflag == integer_zero_node
1305 && !flag_chkp_check_read)
1306 return;
1308 if (dirflag == integer_one_node
1309 && !flag_chkp_check_write)
1310 return;
1312 seq = NULL;
1314 node = chkp_force_gimple_call_op (addr, &seq);
1316 check = gimple_build_call (chkp_checkl_fndecl, 2, node, bounds);
1317 chkp_mark_stmt (check);
1318 gimple_call_set_with_bounds (check, true);
1319 gimple_set_location (check, location);
1320 gimple_seq_add_stmt (&seq, check);
1322 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1324 if (dump_file && (dump_flags & TDF_DETAILS))
1326 gimple before = gsi_stmt (iter);
1327 fprintf (dump_file, "Generated lower bound check for statement ");
1328 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1329 fprintf (dump_file, " ");
1330 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1334 /* Generate upper bound check for memory access by ADDR.
1335 Check is inserted before the position pointed by ITER.
1336 DIRFLAG indicates whether memory access is load or store. */
1337 static void
1338 chkp_check_upper (tree addr, tree bounds,
1339 gimple_stmt_iterator iter,
1340 location_t location,
1341 tree dirflag)
1343 gimple_seq seq;
1344 gimple check;
1345 tree node;
1347 if (!chkp_function_instrumented_p (current_function_decl)
1348 && bounds == chkp_get_zero_bounds ())
1349 return;
1351 if (dirflag == integer_zero_node
1352 && !flag_chkp_check_read)
1353 return;
1355 if (dirflag == integer_one_node
1356 && !flag_chkp_check_write)
1357 return;
1359 seq = NULL;
1361 node = chkp_force_gimple_call_op (addr, &seq);
1363 check = gimple_build_call (chkp_checku_fndecl, 2, node, bounds);
1364 chkp_mark_stmt (check);
1365 gimple_call_set_with_bounds (check, true);
1366 gimple_set_location (check, location);
1367 gimple_seq_add_stmt (&seq, check);
1369 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1371 if (dump_file && (dump_flags & TDF_DETAILS))
1373 gimple before = gsi_stmt (iter);
1374 fprintf (dump_file, "Generated upper bound check for statement ");
1375 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1376 fprintf (dump_file, " ");
1377 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1381 /* Generate lower and upper bound checks for memory access
1382 to memory slot [FIRST, LAST] againsr BOUNDS. Checks
1383 are inserted before the position pointed by ITER.
1384 DIRFLAG indicates whether memory access is load or store. */
1385 void
1386 chkp_check_mem_access (tree first, tree last, tree bounds,
1387 gimple_stmt_iterator iter,
1388 location_t location,
1389 tree dirflag)
1391 chkp_check_lower (first, bounds, iter, location, dirflag);
1392 chkp_check_upper (last, bounds, iter, location, dirflag);
1395 /* Replace call to _bnd_chk_* pointed by GSI with
1396 bndcu and bndcl calls. DIRFLAG determines whether
1397 check is for read or write. */
1399 void
1400 chkp_replace_address_check_builtin (gimple_stmt_iterator *gsi,
1401 tree dirflag)
1403 gimple_stmt_iterator call_iter = *gsi;
1404 gimple call = gsi_stmt (*gsi);
1405 tree fndecl = gimple_call_fndecl (call);
1406 tree addr = gimple_call_arg (call, 0);
1407 tree bounds = chkp_find_bounds (addr, gsi);
1409 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1410 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1411 chkp_check_lower (addr, bounds, *gsi, gimple_location (call), dirflag);
1413 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS)
1414 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1416 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1418 tree size = gimple_call_arg (call, 1);
1419 addr = fold_build_pointer_plus (addr, size);
1420 addr = fold_build_pointer_plus_hwi (addr, -1);
1421 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1424 gsi_remove (&call_iter, true);
1427 /* Replace call to _bnd_get_ptr_* pointed by GSI with
1428 corresponding bounds extract call. */
1430 void
1431 chkp_replace_extract_builtin (gimple_stmt_iterator *gsi)
1433 gimple call = gsi_stmt (*gsi);
1434 tree fndecl = gimple_call_fndecl (call);
1435 tree addr = gimple_call_arg (call, 0);
1436 tree bounds = chkp_find_bounds (addr, gsi);
1437 gimple extract;
1439 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND)
1440 fndecl = chkp_extract_lower_fndecl;
1441 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND)
1442 fndecl = chkp_extract_upper_fndecl;
1443 else
1444 gcc_unreachable ();
1446 extract = gimple_build_call (fndecl, 1, bounds);
1447 gimple_call_set_lhs (extract, gimple_call_lhs (call));
1448 chkp_mark_stmt (extract);
1450 gsi_replace (gsi, extract, false);
1453 /* Return COMPONENT_REF accessing FIELD in OBJ. */
1454 static tree
1455 chkp_build_component_ref (tree obj, tree field)
1457 tree res;
1459 /* If object is TMR then we do not use component_ref but
1460 add offset instead. We need it to be able to get addr
1461 of the reasult later. */
1462 if (TREE_CODE (obj) == TARGET_MEM_REF)
1464 tree offs = TMR_OFFSET (obj);
1465 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1466 offs, DECL_FIELD_OFFSET (field));
1468 gcc_assert (offs);
1470 res = copy_node (obj);
1471 TREE_TYPE (res) = TREE_TYPE (field);
1472 TMR_OFFSET (res) = offs;
1474 else
1475 res = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL_TREE);
1477 return res;
1480 /* Return ARRAY_REF for array ARR and index IDX with
1481 specified element type ETYPE and element size ESIZE. */
1482 static tree
1483 chkp_build_array_ref (tree arr, tree etype, tree esize,
1484 unsigned HOST_WIDE_INT idx)
1486 tree index = build_int_cst (size_type_node, idx);
1487 tree res;
1489 /* If object is TMR then we do not use array_ref but
1490 add offset instead. We need it to be able to get addr
1491 of the reasult later. */
1492 if (TREE_CODE (arr) == TARGET_MEM_REF)
1494 tree offs = TMR_OFFSET (arr);
1496 esize = fold_binary_to_constant (MULT_EXPR, TREE_TYPE (esize),
1497 esize, index);
1498 gcc_assert(esize);
1500 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1501 offs, esize);
1502 gcc_assert (offs);
1504 res = copy_node (arr);
1505 TREE_TYPE (res) = etype;
1506 TMR_OFFSET (res) = offs;
1508 else
1509 res = build4 (ARRAY_REF, etype, arr, index, NULL_TREE, NULL_TREE);
1511 return res;
1514 /* Helper function for chkp_add_bounds_to_call_stmt.
1515 Fill ALL_BOUNDS output array with created bounds.
1517 OFFS is used for recursive calls and holds basic
1518 offset of TYPE in outer structure in bits.
1520 ITER points a position where bounds are searched.
1522 ALL_BOUNDS[i] is filled with elem bounds if there
1523 is a field in TYPE which has pointer type and offset
1524 equal to i * POINTER_SIZE in bits. */
1525 static void
1526 chkp_find_bounds_for_elem (tree elem, tree *all_bounds,
1527 HOST_WIDE_INT offs,
1528 gimple_stmt_iterator *iter)
1530 tree type = TREE_TYPE (elem);
1532 if (BOUNDED_TYPE_P (type))
1534 if (!all_bounds[offs / POINTER_SIZE])
1536 tree temp = make_temp_ssa_name (type, gimple_build_nop (), "");
1537 gimple assign = gimple_build_assign (temp, elem);
1538 gimple_stmt_iterator gsi;
1540 gsi_insert_before (iter, assign, GSI_SAME_STMT);
1541 gsi = gsi_for_stmt (assign);
1543 all_bounds[offs / POINTER_SIZE] = chkp_find_bounds (temp, &gsi);
1546 else if (RECORD_OR_UNION_TYPE_P (type))
1548 tree field;
1550 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1551 if (TREE_CODE (field) == FIELD_DECL)
1553 tree base = unshare_expr (elem);
1554 tree field_ref = chkp_build_component_ref (base, field);
1555 HOST_WIDE_INT field_offs
1556 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1557 if (DECL_FIELD_OFFSET (field))
1558 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1560 chkp_find_bounds_for_elem (field_ref, all_bounds,
1561 offs + field_offs, iter);
1564 else if (TREE_CODE (type) == ARRAY_TYPE)
1566 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1567 tree etype = TREE_TYPE (type);
1568 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1569 unsigned HOST_WIDE_INT cur;
1571 if (!maxval || integer_minus_onep (maxval))
1572 return;
1574 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1576 tree base = unshare_expr (elem);
1577 tree arr_elem = chkp_build_array_ref (base, etype,
1578 TYPE_SIZE (etype),
1579 cur);
1580 chkp_find_bounds_for_elem (arr_elem, all_bounds, offs + cur * esize,
1581 iter);
1586 /* Fill HAVE_BOUND output bitmap with information about
1587 bounds requred for object of type TYPE.
1589 OFFS is used for recursive calls and holds basic
1590 offset of TYPE in outer structure in bits.
1592 HAVE_BOUND[i] is set to 1 if there is a field
1593 in TYPE which has pointer type and offset
1594 equal to i * POINTER_SIZE - OFFS in bits. */
1595 void
1596 chkp_find_bound_slots_1 (const_tree type, bitmap have_bound,
1597 HOST_WIDE_INT offs)
1599 if (BOUNDED_TYPE_P (type))
1600 bitmap_set_bit (have_bound, offs / POINTER_SIZE);
1601 else if (RECORD_OR_UNION_TYPE_P (type))
1603 tree field;
1605 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1606 if (TREE_CODE (field) == FIELD_DECL)
1608 HOST_WIDE_INT field_offs
1609 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1610 if (DECL_FIELD_OFFSET (field))
1611 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1612 chkp_find_bound_slots_1 (TREE_TYPE (field), have_bound,
1613 offs + field_offs);
1616 else if (TREE_CODE (type) == ARRAY_TYPE)
1618 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1619 tree etype = TREE_TYPE (type);
1620 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1621 unsigned HOST_WIDE_INT cur;
1623 if (!maxval
1624 || TREE_CODE (maxval) != INTEGER_CST
1625 || integer_minus_onep (maxval))
1626 return;
1628 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1629 chkp_find_bound_slots_1 (etype, have_bound, offs + cur * esize);
1633 /* Fill bitmap RES with information about bounds for
1634 type TYPE. See chkp_find_bound_slots_1 for more
1635 details. */
1636 void
1637 chkp_find_bound_slots (const_tree type, bitmap res)
1639 bitmap_clear (res);
1640 chkp_find_bound_slots_1 (type, res, 0);
1643 /* Return 1 if call to FNDECL should be instrumented
1644 and 0 otherwise. */
1646 static bool
1647 chkp_instrument_normal_builtin (tree fndecl)
1649 switch (DECL_FUNCTION_CODE (fndecl))
1651 case BUILT_IN_STRLEN:
1652 case BUILT_IN_STRCPY:
1653 case BUILT_IN_STRNCPY:
1654 case BUILT_IN_STPCPY:
1655 case BUILT_IN_STPNCPY:
1656 case BUILT_IN_STRCAT:
1657 case BUILT_IN_STRNCAT:
1658 case BUILT_IN_MEMCPY:
1659 case BUILT_IN_MEMPCPY:
1660 case BUILT_IN_MEMSET:
1661 case BUILT_IN_MEMMOVE:
1662 case BUILT_IN_BZERO:
1663 case BUILT_IN_STRCMP:
1664 case BUILT_IN_STRNCMP:
1665 case BUILT_IN_BCMP:
1666 case BUILT_IN_MEMCMP:
1667 case BUILT_IN_MEMCPY_CHK:
1668 case BUILT_IN_MEMPCPY_CHK:
1669 case BUILT_IN_MEMMOVE_CHK:
1670 case BUILT_IN_MEMSET_CHK:
1671 case BUILT_IN_STRCPY_CHK:
1672 case BUILT_IN_STRNCPY_CHK:
1673 case BUILT_IN_STPCPY_CHK:
1674 case BUILT_IN_STPNCPY_CHK:
1675 case BUILT_IN_STRCAT_CHK:
1676 case BUILT_IN_STRNCAT_CHK:
1677 case BUILT_IN_MALLOC:
1678 case BUILT_IN_CALLOC:
1679 case BUILT_IN_REALLOC:
1680 return 1;
1682 default:
1683 return 0;
1687 /* Add bound arguments to call statement pointed by GSI.
1688 Also performs a replacement of user checker builtins calls
1689 with internal ones. */
1691 static void
1692 chkp_add_bounds_to_call_stmt (gimple_stmt_iterator *gsi)
1694 gcall *call = as_a <gcall *> (gsi_stmt (*gsi));
1695 unsigned arg_no = 0;
1696 tree fndecl = gimple_call_fndecl (call);
1697 tree fntype;
1698 tree first_formal_arg;
1699 tree arg;
1700 bool use_fntype = false;
1701 tree op;
1702 ssa_op_iter iter;
1703 gcall *new_call;
1705 /* Do nothing for internal functions. */
1706 if (gimple_call_internal_p (call))
1707 return;
1709 fntype = TREE_TYPE (TREE_TYPE (gimple_call_fn (call)));
1711 /* Do nothing if back-end builtin is called. */
1712 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
1713 return;
1715 /* Do nothing for some middle-end builtins. */
1716 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1717 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_OBJECT_SIZE)
1718 return;
1720 /* Do nothing for calls to not instrumentable functions. */
1721 if (fndecl && !chkp_instrumentable_p (fndecl))
1722 return;
1724 /* Ignore CHKP_INIT_PTR_BOUNDS, CHKP_NULL_PTR_BOUNDS
1725 and CHKP_COPY_PTR_BOUNDS. */
1726 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1727 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS
1728 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS
1729 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS
1730 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS))
1731 return;
1733 /* Check user builtins are replaced with checks. */
1734 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1735 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1736 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
1737 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS))
1739 chkp_replace_address_check_builtin (gsi, integer_minus_one_node);
1740 return;
1743 /* Check user builtins are replaced with bound extract. */
1744 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1745 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND
1746 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND))
1748 chkp_replace_extract_builtin (gsi);
1749 return;
1752 /* BUILT_IN_CHKP_NARROW_PTR_BOUNDS call is replaced with
1753 target narrow bounds call. */
1754 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1755 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
1757 tree arg = gimple_call_arg (call, 1);
1758 tree bounds = chkp_find_bounds (arg, gsi);
1760 gimple_call_set_fndecl (call, chkp_narrow_bounds_fndecl);
1761 gimple_call_set_arg (call, 1, bounds);
1762 update_stmt (call);
1764 return;
1767 /* BUILT_IN_CHKP_STORE_PTR_BOUNDS call is replaced with
1768 bndstx call. */
1769 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1770 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_STORE_PTR_BOUNDS)
1772 tree addr = gimple_call_arg (call, 0);
1773 tree ptr = gimple_call_arg (call, 1);
1774 tree bounds = chkp_find_bounds (ptr, gsi);
1775 gimple_stmt_iterator iter = gsi_for_stmt (call);
1777 chkp_build_bndstx (addr, ptr, bounds, gsi);
1778 gsi_remove (&iter, true);
1780 return;
1783 if (!flag_chkp_instrument_calls)
1784 return;
1786 /* We instrument only some subset of builtins. We also instrument
1787 builtin calls to be inlined. */
1788 if (fndecl
1789 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1790 && !chkp_instrument_normal_builtin (fndecl))
1792 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
1793 return;
1795 struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
1796 if (!clone
1797 || !gimple_has_body_p (clone->decl))
1798 return;
1801 /* If function decl is available then use it for
1802 formal arguments list. Otherwise use function type. */
1803 if (fndecl && DECL_ARGUMENTS (fndecl))
1804 first_formal_arg = DECL_ARGUMENTS (fndecl);
1805 else
1807 first_formal_arg = TYPE_ARG_TYPES (fntype);
1808 use_fntype = true;
1811 /* Fill vector of new call args. */
1812 vec<tree> new_args = vNULL;
1813 new_args.create (gimple_call_num_args (call));
1814 arg = first_formal_arg;
1815 for (arg_no = 0; arg_no < gimple_call_num_args (call); arg_no++)
1817 tree call_arg = gimple_call_arg (call, arg_no);
1818 tree type;
1820 /* Get arg type using formal argument description
1821 or actual argument type. */
1822 if (arg)
1823 if (use_fntype)
1824 if (TREE_VALUE (arg) != void_type_node)
1826 type = TREE_VALUE (arg);
1827 arg = TREE_CHAIN (arg);
1829 else
1830 type = TREE_TYPE (call_arg);
1831 else
1833 type = TREE_TYPE (arg);
1834 arg = TREE_CHAIN (arg);
1836 else
1837 type = TREE_TYPE (call_arg);
1839 new_args.safe_push (call_arg);
1841 if (BOUNDED_TYPE_P (type)
1842 || pass_by_reference (NULL, TYPE_MODE (type), type, true))
1843 new_args.safe_push (chkp_find_bounds (call_arg, gsi));
1844 else if (chkp_type_has_pointer (type))
1846 HOST_WIDE_INT max_bounds
1847 = TREE_INT_CST_LOW (TYPE_SIZE (type)) / POINTER_SIZE;
1848 tree *all_bounds = (tree *)xmalloc (sizeof (tree) * max_bounds);
1849 HOST_WIDE_INT bnd_no;
1851 memset (all_bounds, 0, sizeof (tree) * max_bounds);
1853 chkp_find_bounds_for_elem (call_arg, all_bounds, 0, gsi);
1855 for (bnd_no = 0; bnd_no < max_bounds; bnd_no++)
1856 if (all_bounds[bnd_no])
1857 new_args.safe_push (all_bounds[bnd_no]);
1859 free (all_bounds);
1863 if (new_args.length () == gimple_call_num_args (call))
1864 new_call = call;
1865 else
1867 new_call = gimple_build_call_vec (gimple_op (call, 1), new_args);
1868 gimple_call_set_lhs (new_call, gimple_call_lhs (call));
1869 gimple_call_copy_flags (new_call, call);
1870 gimple_call_set_chain (new_call, gimple_call_chain (call));
1872 new_args.release ();
1874 /* For direct calls fndecl is replaced with instrumented version. */
1875 if (fndecl)
1877 tree new_decl = chkp_maybe_create_clone (fndecl)->decl;
1878 gimple_call_set_fndecl (new_call, new_decl);
1879 gimple_call_set_fntype (new_call, TREE_TYPE (new_decl));
1881 /* For indirect call we should fix function pointer type if
1882 pass some bounds. */
1883 else if (new_call != call)
1885 tree type = gimple_call_fntype (call);
1886 type = chkp_copy_function_type_adding_bounds (type);
1887 gimple_call_set_fntype (new_call, type);
1890 /* replace old call statement with the new one. */
1891 if (call != new_call)
1893 FOR_EACH_SSA_TREE_OPERAND (op, call, iter, SSA_OP_ALL_DEFS)
1895 SSA_NAME_DEF_STMT (op) = new_call;
1897 gsi_replace (gsi, new_call, true);
1899 else
1900 update_stmt (new_call);
1902 gimple_call_set_with_bounds (new_call, true);
1905 /* Return constant static bounds var with specified bounds LB and UB.
1906 If such var does not exists then new var is created with specified NAME. */
1907 static tree
1908 chkp_make_static_const_bounds (HOST_WIDE_INT lb,
1909 HOST_WIDE_INT ub,
1910 const char *name)
1912 tree id = get_identifier (name);
1913 tree var;
1914 varpool_node *node;
1915 symtab_node *snode;
1917 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, id,
1918 pointer_bounds_type_node);
1919 TREE_STATIC (var) = 1;
1920 TREE_PUBLIC (var) = 1;
1922 /* With LTO we may have constant bounds already in varpool.
1923 Try to find it. */
1924 if ((snode = symtab_node::get_for_asmname (DECL_ASSEMBLER_NAME (var))))
1926 /* We don't allow this symbol usage for non bounds. */
1927 if (snode->type != SYMTAB_VARIABLE
1928 || !POINTER_BOUNDS_P (snode->decl))
1929 sorry ("-fcheck-pointer-bounds requires '%s' "
1930 "name for internal usage",
1931 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (var)));
1933 return snode->decl;
1936 TREE_USED (var) = 1;
1937 TREE_READONLY (var) = 1;
1938 TREE_ADDRESSABLE (var) = 0;
1939 DECL_ARTIFICIAL (var) = 1;
1940 DECL_READ_P (var) = 1;
1941 DECL_INITIAL (var) = targetm.chkp_make_bounds_constant (lb, ub);
1942 make_decl_one_only (var, DECL_ASSEMBLER_NAME (var));
1943 /* We may use this symbol during ctors generation in chkp_finish_file
1944 when all symbols are emitted. Force output to avoid undefined
1945 symbols in ctors. */
1946 node = varpool_node::get_create (var);
1947 node->force_output = 1;
1949 varpool_node::finalize_decl (var);
1951 return var;
1954 /* Generate code to make bounds with specified lower bound LB and SIZE.
1955 if AFTER is 1 then code is inserted after position pointed by ITER
1956 otherwise code is inserted before position pointed by ITER.
1957 If ITER is NULL then code is added to entry block. */
1958 static tree
1959 chkp_make_bounds (tree lb, tree size, gimple_stmt_iterator *iter, bool after)
1961 gimple_seq seq;
1962 gimple_stmt_iterator gsi;
1963 gimple stmt;
1964 tree bounds;
1966 if (iter)
1967 gsi = *iter;
1968 else
1969 gsi = gsi_start_bb (chkp_get_entry_block ());
1971 seq = NULL;
1973 lb = chkp_force_gimple_call_op (lb, &seq);
1974 size = chkp_force_gimple_call_op (size, &seq);
1976 stmt = gimple_build_call (chkp_bndmk_fndecl, 2, lb, size);
1977 chkp_mark_stmt (stmt);
1979 bounds = chkp_get_tmp_reg (stmt);
1980 gimple_call_set_lhs (stmt, bounds);
1982 gimple_seq_add_stmt (&seq, stmt);
1984 if (iter && after)
1985 gsi_insert_seq_after (&gsi, seq, GSI_SAME_STMT);
1986 else
1987 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
1989 if (dump_file && (dump_flags & TDF_DETAILS))
1991 fprintf (dump_file, "Made bounds: ");
1992 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
1993 if (iter)
1995 fprintf (dump_file, " inserted before statement: ");
1996 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0, TDF_VOPS|TDF_MEMSYMS);
1998 else
1999 fprintf (dump_file, " at function entry\n");
2002 /* update_stmt (stmt); */
2004 return bounds;
2007 /* Return var holding zero bounds. */
2008 tree
2009 chkp_get_zero_bounds_var (void)
2011 if (!chkp_zero_bounds_var)
2012 chkp_zero_bounds_var
2013 = chkp_make_static_const_bounds (0, -1,
2014 CHKP_ZERO_BOUNDS_VAR_NAME);
2015 return chkp_zero_bounds_var;
2018 /* Return var holding none bounds. */
2019 tree
2020 chkp_get_none_bounds_var (void)
2022 if (!chkp_none_bounds_var)
2023 chkp_none_bounds_var
2024 = chkp_make_static_const_bounds (-1, 0,
2025 CHKP_NONE_BOUNDS_VAR_NAME);
2026 return chkp_none_bounds_var;
2029 /* Return SSA_NAME used to represent zero bounds. */
2030 static tree
2031 chkp_get_zero_bounds (void)
2033 if (zero_bounds)
2034 return zero_bounds;
2036 if (dump_file && (dump_flags & TDF_DETAILS))
2037 fprintf (dump_file, "Creating zero bounds...");
2039 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2040 || flag_chkp_use_static_const_bounds > 0)
2042 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2043 gimple stmt;
2045 zero_bounds = chkp_get_tmp_reg (gimple_build_nop ());
2046 stmt = gimple_build_assign (zero_bounds, chkp_get_zero_bounds_var ());
2047 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2049 else
2050 zero_bounds = chkp_make_bounds (integer_zero_node,
2051 integer_zero_node,
2052 NULL,
2053 false);
2055 return zero_bounds;
2058 /* Return SSA_NAME used to represent none bounds. */
2059 static tree
2060 chkp_get_none_bounds (void)
2062 if (none_bounds)
2063 return none_bounds;
2065 if (dump_file && (dump_flags & TDF_DETAILS))
2066 fprintf (dump_file, "Creating none bounds...");
2069 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2070 || flag_chkp_use_static_const_bounds > 0)
2072 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2073 gimple stmt;
2075 none_bounds = chkp_get_tmp_reg (gimple_build_nop ());
2076 stmt = gimple_build_assign (none_bounds, chkp_get_none_bounds_var ());
2077 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2079 else
2080 none_bounds = chkp_make_bounds (integer_minus_one_node,
2081 build_int_cst (size_type_node, 2),
2082 NULL,
2083 false);
2085 return none_bounds;
2088 /* Return bounds to be used as a result of operation which
2089 should not create poiunter (e.g. MULT_EXPR). */
2090 static tree
2091 chkp_get_invalid_op_bounds (void)
2093 return chkp_get_zero_bounds ();
2096 /* Return bounds to be used for loads of non-pointer values. */
2097 static tree
2098 chkp_get_nonpointer_load_bounds (void)
2100 return chkp_get_zero_bounds ();
2103 /* Return 1 if may use bndret call to get bounds for pointer
2104 returned by CALL. */
2105 static bool
2106 chkp_call_returns_bounds_p (gcall *call)
2108 if (gimple_call_internal_p (call))
2109 return false;
2111 if (gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
2112 || chkp_gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW))
2113 return true;
2115 if (gimple_call_with_bounds_p (call))
2116 return true;
2118 tree fndecl = gimple_call_fndecl (call);
2120 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
2121 return false;
2123 if (fndecl && !chkp_instrumentable_p (fndecl))
2124 return false;
2126 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2128 if (chkp_instrument_normal_builtin (fndecl))
2129 return true;
2131 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
2132 return false;
2134 struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
2135 return (clone && gimple_has_body_p (clone->decl));
2138 return true;
2141 /* Build bounds returned by CALL. */
2142 static tree
2143 chkp_build_returned_bound (gcall *call)
2145 gimple_stmt_iterator gsi;
2146 tree bounds;
2147 gimple stmt;
2148 tree fndecl = gimple_call_fndecl (call);
2149 unsigned int retflags;
2151 /* To avoid fixing alloca expands in targets we handle
2152 it separately. */
2153 if (fndecl
2154 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2155 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
2156 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2158 tree size = gimple_call_arg (call, 0);
2159 tree lb = gimple_call_lhs (call);
2160 gimple_stmt_iterator iter = gsi_for_stmt (call);
2161 bounds = chkp_make_bounds (lb, size, &iter, true);
2163 /* We know bounds returned by set_bounds builtin call. */
2164 else if (fndecl
2165 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2166 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS)
2168 tree lb = gimple_call_arg (call, 0);
2169 tree size = gimple_call_arg (call, 1);
2170 gimple_stmt_iterator iter = gsi_for_stmt (call);
2171 bounds = chkp_make_bounds (lb, size, &iter, true);
2173 /* Detect bounds initialization calls. */
2174 else if (fndecl
2175 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2176 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS)
2177 bounds = chkp_get_zero_bounds ();
2178 /* Detect bounds nullification calls. */
2179 else if (fndecl
2180 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2181 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS)
2182 bounds = chkp_get_none_bounds ();
2183 /* Detect bounds copy calls. */
2184 else if (fndecl
2185 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2186 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
2188 gimple_stmt_iterator iter = gsi_for_stmt (call);
2189 bounds = chkp_find_bounds (gimple_call_arg (call, 1), &iter);
2191 /* Do not use retbnd when returned bounds are equal to some
2192 of passed bounds. */
2193 else if (((retflags = gimple_call_return_flags (call)) & ERF_RETURNS_ARG)
2194 && (retflags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (call))
2196 gimple_stmt_iterator iter = gsi_for_stmt (call);
2197 unsigned int retarg = retflags & ERF_RETURN_ARG_MASK, argno;
2198 if (gimple_call_with_bounds_p (call))
2200 for (argno = 0; argno < gimple_call_num_args (call); argno++)
2201 if (!POINTER_BOUNDS_P (gimple_call_arg (call, argno)))
2203 if (retarg)
2204 retarg--;
2205 else
2206 break;
2209 else
2210 argno = retarg;
2212 bounds = chkp_find_bounds (gimple_call_arg (call, argno), &iter);
2214 else if (chkp_call_returns_bounds_p (call))
2216 gcc_assert (TREE_CODE (gimple_call_lhs (call)) == SSA_NAME);
2218 /* In general case build checker builtin call to
2219 obtain returned bounds. */
2220 stmt = gimple_build_call (chkp_ret_bnd_fndecl, 1,
2221 gimple_call_lhs (call));
2222 chkp_mark_stmt (stmt);
2224 gsi = gsi_for_stmt (call);
2225 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
2227 bounds = chkp_get_tmp_reg (stmt);
2228 gimple_call_set_lhs (stmt, bounds);
2230 update_stmt (stmt);
2232 else
2233 bounds = chkp_get_zero_bounds ();
2235 if (dump_file && (dump_flags & TDF_DETAILS))
2237 fprintf (dump_file, "Built returned bounds (");
2238 print_generic_expr (dump_file, bounds, 0);
2239 fprintf (dump_file, ") for call: ");
2240 print_gimple_stmt (dump_file, call, 0, TDF_VOPS|TDF_MEMSYMS);
2243 bounds = chkp_maybe_copy_and_register_bounds (gimple_call_lhs (call), bounds);
2245 return bounds;
2248 /* Return bounds used as returned by call
2249 which produced SSA name VAL. */
2250 gcall *
2251 chkp_retbnd_call_by_val (tree val)
2253 if (TREE_CODE (val) != SSA_NAME)
2254 return NULL;
2256 gcc_assert (gimple_code (SSA_NAME_DEF_STMT (val)) == GIMPLE_CALL);
2258 imm_use_iterator use_iter;
2259 use_operand_p use_p;
2260 FOR_EACH_IMM_USE_FAST (use_p, use_iter, val)
2261 if (gimple_code (USE_STMT (use_p)) == GIMPLE_CALL
2262 && gimple_call_fndecl (USE_STMT (use_p)) == chkp_ret_bnd_fndecl)
2263 return as_a <gcall *> (USE_STMT (use_p));
2265 return NULL;
2268 /* Check the next parameter for the given PARM is bounds
2269 and return it's default SSA_NAME (create if required). */
2270 static tree
2271 chkp_get_next_bounds_parm (tree parm)
2273 tree bounds = TREE_CHAIN (parm);
2274 gcc_assert (POINTER_BOUNDS_P (bounds));
2275 bounds = ssa_default_def (cfun, bounds);
2276 if (!bounds)
2278 bounds = make_ssa_name (TREE_CHAIN (parm), gimple_build_nop ());
2279 set_ssa_default_def (cfun, TREE_CHAIN (parm), bounds);
2281 return bounds;
2284 /* Return bounds to be used for input argument PARM. */
2285 static tree
2286 chkp_get_bound_for_parm (tree parm)
2288 tree decl = SSA_NAME_VAR (parm);
2289 tree bounds;
2291 gcc_assert (TREE_CODE (decl) == PARM_DECL);
2293 bounds = chkp_get_registered_bounds (parm);
2295 if (!bounds)
2296 bounds = chkp_get_registered_bounds (decl);
2298 if (!bounds)
2300 tree orig_decl = cgraph_node::get (cfun->decl)->orig_decl;
2302 /* For static chain param we return zero bounds
2303 because currently we do not check dereferences
2304 of this pointer. */
2305 if (cfun->static_chain_decl == decl)
2306 bounds = chkp_get_zero_bounds ();
2307 /* If non instrumented runtime is used then it may be useful
2308 to use zero bounds for input arguments of main
2309 function. */
2310 else if (flag_chkp_zero_input_bounds_for_main
2311 && strcmp (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (orig_decl)),
2312 "main") == 0)
2313 bounds = chkp_get_zero_bounds ();
2314 else if (BOUNDED_P (parm))
2316 bounds = chkp_get_next_bounds_parm (decl);
2317 bounds = chkp_maybe_copy_and_register_bounds (decl, bounds);
2319 if (dump_file && (dump_flags & TDF_DETAILS))
2321 fprintf (dump_file, "Built arg bounds (");
2322 print_generic_expr (dump_file, bounds, 0);
2323 fprintf (dump_file, ") for arg: ");
2324 print_node (dump_file, "", decl, 0);
2327 else
2328 bounds = chkp_get_zero_bounds ();
2331 if (!chkp_get_registered_bounds (parm))
2332 bounds = chkp_maybe_copy_and_register_bounds (parm, bounds);
2334 if (dump_file && (dump_flags & TDF_DETAILS))
2336 fprintf (dump_file, "Using bounds ");
2337 print_generic_expr (dump_file, bounds, 0);
2338 fprintf (dump_file, " for parm ");
2339 print_generic_expr (dump_file, parm, 0);
2340 fprintf (dump_file, " of type ");
2341 print_generic_expr (dump_file, TREE_TYPE (parm), 0);
2342 fprintf (dump_file, ".\n");
2345 return bounds;
2348 /* Build and return CALL_EXPR for bndstx builtin with specified
2349 arguments. */
2350 tree
2351 chkp_build_bndldx_call (tree addr, tree ptr)
2353 tree fn = build1 (ADDR_EXPR,
2354 build_pointer_type (TREE_TYPE (chkp_bndldx_fndecl)),
2355 chkp_bndldx_fndecl);
2356 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndldx_fndecl)),
2357 fn, 2, addr, ptr);
2358 CALL_WITH_BOUNDS_P (call) = true;
2359 return call;
2362 /* Insert code to load bounds for PTR located by ADDR.
2363 Code is inserted after position pointed by GSI.
2364 Loaded bounds are returned. */
2365 static tree
2366 chkp_build_bndldx (tree addr, tree ptr, gimple_stmt_iterator *gsi)
2368 gimple_seq seq;
2369 gimple stmt;
2370 tree bounds;
2372 seq = NULL;
2374 addr = chkp_force_gimple_call_op (addr, &seq);
2375 ptr = chkp_force_gimple_call_op (ptr, &seq);
2377 stmt = gimple_build_call (chkp_bndldx_fndecl, 2, addr, ptr);
2378 chkp_mark_stmt (stmt);
2379 bounds = chkp_get_tmp_reg (stmt);
2380 gimple_call_set_lhs (stmt, bounds);
2382 gimple_seq_add_stmt (&seq, stmt);
2384 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2386 if (dump_file && (dump_flags & TDF_DETAILS))
2388 fprintf (dump_file, "Generated bndldx for pointer ");
2389 print_generic_expr (dump_file, ptr, 0);
2390 fprintf (dump_file, ": ");
2391 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2394 return bounds;
2397 /* Build and return CALL_EXPR for bndstx builtin with specified
2398 arguments. */
2399 tree
2400 chkp_build_bndstx_call (tree addr, tree ptr, tree bounds)
2402 tree fn = build1 (ADDR_EXPR,
2403 build_pointer_type (TREE_TYPE (chkp_bndstx_fndecl)),
2404 chkp_bndstx_fndecl);
2405 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndstx_fndecl)),
2406 fn, 3, ptr, bounds, addr);
2407 CALL_WITH_BOUNDS_P (call) = true;
2408 return call;
2411 /* Insert code to store BOUNDS for PTR stored by ADDR.
2412 New statements are inserted after position pointed
2413 by GSI. */
2414 void
2415 chkp_build_bndstx (tree addr, tree ptr, tree bounds,
2416 gimple_stmt_iterator *gsi)
2418 gimple_seq seq;
2419 gimple stmt;
2421 seq = NULL;
2423 addr = chkp_force_gimple_call_op (addr, &seq);
2424 ptr = chkp_force_gimple_call_op (ptr, &seq);
2426 stmt = gimple_build_call (chkp_bndstx_fndecl, 3, ptr, bounds, addr);
2427 chkp_mark_stmt (stmt);
2428 gimple_call_set_with_bounds (stmt, true);
2430 gimple_seq_add_stmt (&seq, stmt);
2432 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2434 if (dump_file && (dump_flags & TDF_DETAILS))
2436 fprintf (dump_file, "Generated bndstx for pointer store ");
2437 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_VOPS|TDF_MEMSYMS);
2438 print_gimple_stmt (dump_file, stmt, 2, TDF_VOPS|TDF_MEMSYMS);
2442 /* Compute bounds for pointer NODE which was assigned in
2443 assignment statement ASSIGN. Return computed bounds. */
2444 static tree
2445 chkp_compute_bounds_for_assignment (tree node, gimple assign)
2447 enum tree_code rhs_code = gimple_assign_rhs_code (assign);
2448 tree rhs1 = gimple_assign_rhs1 (assign);
2449 tree bounds = NULL_TREE;
2450 gimple_stmt_iterator iter = gsi_for_stmt (assign);
2452 if (dump_file && (dump_flags & TDF_DETAILS))
2454 fprintf (dump_file, "Computing bounds for assignment: ");
2455 print_gimple_stmt (dump_file, assign, 0, TDF_VOPS|TDF_MEMSYMS);
2458 switch (rhs_code)
2460 case MEM_REF:
2461 case TARGET_MEM_REF:
2462 case COMPONENT_REF:
2463 case ARRAY_REF:
2464 /* We need to load bounds from the bounds table. */
2465 bounds = chkp_find_bounds_loaded (node, rhs1, &iter);
2466 break;
2468 case VAR_DECL:
2469 case SSA_NAME:
2470 case ADDR_EXPR:
2471 case POINTER_PLUS_EXPR:
2472 case NOP_EXPR:
2473 case CONVERT_EXPR:
2474 case INTEGER_CST:
2475 /* Bounds are just propagated from RHS. */
2476 bounds = chkp_find_bounds (rhs1, &iter);
2477 break;
2479 case VIEW_CONVERT_EXPR:
2480 /* Bounds are just propagated from RHS. */
2481 bounds = chkp_find_bounds (TREE_OPERAND (rhs1, 0), &iter);
2482 break;
2484 case PARM_DECL:
2485 if (BOUNDED_P (rhs1))
2487 /* We need to load bounds from the bounds table. */
2488 bounds = chkp_build_bndldx (chkp_build_addr_expr (rhs1),
2489 node, &iter);
2490 TREE_ADDRESSABLE (rhs1) = 1;
2492 else
2493 bounds = chkp_get_nonpointer_load_bounds ();
2494 break;
2496 case MINUS_EXPR:
2497 case PLUS_EXPR:
2498 case BIT_AND_EXPR:
2499 case BIT_IOR_EXPR:
2500 case BIT_XOR_EXPR:
2502 tree rhs2 = gimple_assign_rhs2 (assign);
2503 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2504 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2506 /* First we try to check types of operands. If it
2507 does not help then look at bound values.
2509 If some bounds are incomplete and other are
2510 not proven to be valid (i.e. also incomplete
2511 or invalid because value is not pointer) then
2512 resulting value is incomplete and will be
2513 recomputed later in chkp_finish_incomplete_bounds. */
2514 if (BOUNDED_P (rhs1)
2515 && !BOUNDED_P (rhs2))
2516 bounds = bnd1;
2517 else if (BOUNDED_P (rhs2)
2518 && !BOUNDED_P (rhs1)
2519 && rhs_code != MINUS_EXPR)
2520 bounds = bnd2;
2521 else if (chkp_incomplete_bounds (bnd1))
2522 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR
2523 && !chkp_incomplete_bounds (bnd2))
2524 bounds = bnd2;
2525 else
2526 bounds = incomplete_bounds;
2527 else if (chkp_incomplete_bounds (bnd2))
2528 if (chkp_valid_bounds (bnd1)
2529 && !chkp_incomplete_bounds (bnd1))
2530 bounds = bnd1;
2531 else
2532 bounds = incomplete_bounds;
2533 else if (!chkp_valid_bounds (bnd1))
2534 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR)
2535 bounds = bnd2;
2536 else if (bnd2 == chkp_get_zero_bounds ())
2537 bounds = bnd2;
2538 else
2539 bounds = bnd1;
2540 else if (!chkp_valid_bounds (bnd2))
2541 bounds = bnd1;
2542 else
2543 /* Seems both operands may have valid bounds
2544 (e.g. pointer minus pointer). In such case
2545 use default invalid op bounds. */
2546 bounds = chkp_get_invalid_op_bounds ();
2548 break;
2550 case BIT_NOT_EXPR:
2551 case NEGATE_EXPR:
2552 case LSHIFT_EXPR:
2553 case RSHIFT_EXPR:
2554 case LROTATE_EXPR:
2555 case RROTATE_EXPR:
2556 case EQ_EXPR:
2557 case NE_EXPR:
2558 case LT_EXPR:
2559 case LE_EXPR:
2560 case GT_EXPR:
2561 case GE_EXPR:
2562 case MULT_EXPR:
2563 case RDIV_EXPR:
2564 case TRUNC_DIV_EXPR:
2565 case FLOOR_DIV_EXPR:
2566 case CEIL_DIV_EXPR:
2567 case ROUND_DIV_EXPR:
2568 case TRUNC_MOD_EXPR:
2569 case FLOOR_MOD_EXPR:
2570 case CEIL_MOD_EXPR:
2571 case ROUND_MOD_EXPR:
2572 case EXACT_DIV_EXPR:
2573 case FIX_TRUNC_EXPR:
2574 case FLOAT_EXPR:
2575 case REALPART_EXPR:
2576 case IMAGPART_EXPR:
2577 /* No valid bounds may be produced by these exprs. */
2578 bounds = chkp_get_invalid_op_bounds ();
2579 break;
2581 case COND_EXPR:
2583 tree val1 = gimple_assign_rhs2 (assign);
2584 tree val2 = gimple_assign_rhs3 (assign);
2585 tree bnd1 = chkp_find_bounds (val1, &iter);
2586 tree bnd2 = chkp_find_bounds (val2, &iter);
2587 gimple stmt;
2589 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2590 bounds = incomplete_bounds;
2591 else if (bnd1 == bnd2)
2592 bounds = bnd1;
2593 else
2595 rhs1 = unshare_expr (rhs1);
2597 bounds = chkp_get_tmp_reg (assign);
2598 stmt = gimple_build_assign (bounds, COND_EXPR, rhs1, bnd1, bnd2);
2599 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2601 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2602 chkp_mark_invalid_bounds (bounds);
2605 break;
2607 case MAX_EXPR:
2608 case MIN_EXPR:
2610 tree rhs2 = gimple_assign_rhs2 (assign);
2611 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2612 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2614 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2615 bounds = incomplete_bounds;
2616 else if (bnd1 == bnd2)
2617 bounds = bnd1;
2618 else
2620 gimple stmt;
2621 tree cond = build2 (rhs_code == MAX_EXPR ? GT_EXPR : LT_EXPR,
2622 boolean_type_node, rhs1, rhs2);
2623 bounds = chkp_get_tmp_reg (assign);
2624 stmt = gimple_build_assign (bounds, COND_EXPR, cond, bnd1, bnd2);
2626 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2628 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2629 chkp_mark_invalid_bounds (bounds);
2632 break;
2634 default:
2635 bounds = chkp_get_zero_bounds ();
2636 warning (0, "pointer bounds were lost due to unexpected expression %s",
2637 get_tree_code_name (rhs_code));
2640 gcc_assert (bounds);
2642 if (node)
2643 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2645 return bounds;
2648 /* Compute bounds for ssa name NODE defined by DEF_STMT pointed by ITER.
2650 There are just few statement codes allowed: NOP (for default ssa names),
2651 ASSIGN, CALL, PHI, ASM.
2653 Return computed bounds. */
2654 static tree
2655 chkp_get_bounds_by_definition (tree node, gimple def_stmt,
2656 gphi_iterator *iter)
2658 tree var, bounds;
2659 enum gimple_code code = gimple_code (def_stmt);
2660 gphi *stmt;
2662 if (dump_file && (dump_flags & TDF_DETAILS))
2664 fprintf (dump_file, "Searching for bounds for node: ");
2665 print_generic_expr (dump_file, node, 0);
2667 fprintf (dump_file, " using its definition: ");
2668 print_gimple_stmt (dump_file, def_stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2671 switch (code)
2673 case GIMPLE_NOP:
2674 var = SSA_NAME_VAR (node);
2675 switch (TREE_CODE (var))
2677 case PARM_DECL:
2678 bounds = chkp_get_bound_for_parm (node);
2679 break;
2681 case VAR_DECL:
2682 /* For uninitialized pointers use none bounds. */
2683 bounds = chkp_get_none_bounds ();
2684 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2685 break;
2687 case RESULT_DECL:
2689 tree base_type;
2691 gcc_assert (TREE_CODE (TREE_TYPE (node)) == REFERENCE_TYPE);
2693 base_type = TREE_TYPE (TREE_TYPE (node));
2695 gcc_assert (TYPE_SIZE (base_type)
2696 && TREE_CODE (TYPE_SIZE (base_type)) == INTEGER_CST
2697 && tree_to_uhwi (TYPE_SIZE (base_type)) != 0);
2699 bounds = chkp_make_bounds (node, TYPE_SIZE_UNIT (base_type),
2700 NULL, false);
2701 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2703 break;
2705 default:
2706 if (dump_file && (dump_flags & TDF_DETAILS))
2708 fprintf (dump_file, "Unexpected var with no definition\n");
2709 print_generic_expr (dump_file, var, 0);
2711 internal_error ("chkp_get_bounds_by_definition: Unexpected var of type %s",
2712 get_tree_code_name (TREE_CODE (var)));
2714 break;
2716 case GIMPLE_ASSIGN:
2717 bounds = chkp_compute_bounds_for_assignment (node, def_stmt);
2718 break;
2720 case GIMPLE_CALL:
2721 bounds = chkp_build_returned_bound (as_a <gcall *> (def_stmt));
2722 break;
2724 case GIMPLE_PHI:
2725 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (node))
2726 if (SSA_NAME_VAR (node))
2727 var = chkp_get_bounds_var (SSA_NAME_VAR (node));
2728 else
2729 var = make_temp_ssa_name (pointer_bounds_type_node,
2730 gimple_build_nop (),
2731 CHKP_BOUND_TMP_NAME);
2732 else
2733 var = chkp_get_tmp_var ();
2734 stmt = create_phi_node (var, gimple_bb (def_stmt));
2735 bounds = gimple_phi_result (stmt);
2736 *iter = gsi_for_phi (stmt);
2738 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2740 /* Created bounds do not have all phi args computed and
2741 therefore we do not know if there is a valid source
2742 of bounds for that node. Therefore we mark bounds
2743 as incomplete and then recompute them when all phi
2744 args are computed. */
2745 chkp_register_incomplete_bounds (bounds, node);
2746 break;
2748 case GIMPLE_ASM:
2749 bounds = chkp_get_zero_bounds ();
2750 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2751 break;
2753 default:
2754 internal_error ("chkp_get_bounds_by_definition: Unexpected GIMPLE code %s",
2755 gimple_code_name[code]);
2758 return bounds;
2761 /* Return CALL_EXPR for bndmk with specified LOWER_BOUND and SIZE. */
2762 tree
2763 chkp_build_make_bounds_call (tree lower_bound, tree size)
2765 tree call = build1 (ADDR_EXPR,
2766 build_pointer_type (TREE_TYPE (chkp_bndmk_fndecl)),
2767 chkp_bndmk_fndecl);
2768 return build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndmk_fndecl)),
2769 call, 2, lower_bound, size);
2772 /* Create static bounds var of specfified OBJ which is
2773 is either VAR_DECL or string constant. */
2774 static tree
2775 chkp_make_static_bounds (tree obj)
2777 static int string_id = 1;
2778 static int var_id = 1;
2779 tree *slot;
2780 const char *var_name;
2781 char *bnd_var_name;
2782 tree bnd_var;
2784 /* First check if we already have required var. */
2785 if (chkp_static_var_bounds)
2787 /* For vars we use assembler name as a key in
2788 chkp_static_var_bounds map. It allows to
2789 avoid duplicating bound vars for decls
2790 sharing assembler name. */
2791 if (TREE_CODE (obj) == VAR_DECL)
2793 tree name = DECL_ASSEMBLER_NAME (obj);
2794 slot = chkp_static_var_bounds->get (name);
2795 if (slot)
2796 return *slot;
2798 else
2800 slot = chkp_static_var_bounds->get (obj);
2801 if (slot)
2802 return *slot;
2806 /* Build decl for bounds var. */
2807 if (TREE_CODE (obj) == VAR_DECL)
2809 if (DECL_IGNORED_P (obj))
2811 bnd_var_name = (char *) xmalloc (strlen (CHKP_VAR_BOUNDS_PREFIX) + 10);
2812 sprintf (bnd_var_name, "%s%d", CHKP_VAR_BOUNDS_PREFIX, var_id++);
2814 else
2816 var_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (obj));
2818 /* For hidden symbols we want to skip first '*' char. */
2819 if (*var_name == '*')
2820 var_name++;
2822 bnd_var_name = (char *) xmalloc (strlen (var_name)
2823 + strlen (CHKP_BOUNDS_OF_SYMBOL_PREFIX) + 1);
2824 strcpy (bnd_var_name, CHKP_BOUNDS_OF_SYMBOL_PREFIX);
2825 strcat (bnd_var_name, var_name);
2828 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2829 get_identifier (bnd_var_name),
2830 pointer_bounds_type_node);
2832 /* Address of the obj will be used as lower bound. */
2833 TREE_ADDRESSABLE (obj) = 1;
2835 else
2837 bnd_var_name = (char *) xmalloc (strlen (CHKP_STRING_BOUNDS_PREFIX) + 10);
2838 sprintf (bnd_var_name, "%s%d", CHKP_STRING_BOUNDS_PREFIX, string_id++);
2840 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2841 get_identifier (bnd_var_name),
2842 pointer_bounds_type_node);
2845 TREE_PUBLIC (bnd_var) = 0;
2846 TREE_USED (bnd_var) = 1;
2847 TREE_READONLY (bnd_var) = 0;
2848 TREE_STATIC (bnd_var) = 1;
2849 TREE_ADDRESSABLE (bnd_var) = 0;
2850 DECL_ARTIFICIAL (bnd_var) = 1;
2851 DECL_COMMON (bnd_var) = 1;
2852 DECL_COMDAT (bnd_var) = 1;
2853 DECL_READ_P (bnd_var) = 1;
2854 DECL_INITIAL (bnd_var) = chkp_build_addr_expr (obj);
2855 /* Force output similar to constant bounds.
2856 See chkp_make_static_const_bounds. */
2857 varpool_node::get_create (bnd_var)->force_output = 1;
2858 /* Mark symbol as requiring bounds initialization. */
2859 varpool_node::get_create (bnd_var)->need_bounds_init = 1;
2860 varpool_node::finalize_decl (bnd_var);
2862 /* Add created var to the map to use it for other references
2863 to obj. */
2864 if (!chkp_static_var_bounds)
2865 chkp_static_var_bounds = new hash_map<tree, tree>;
2867 if (TREE_CODE (obj) == VAR_DECL)
2869 tree name = DECL_ASSEMBLER_NAME (obj);
2870 chkp_static_var_bounds->put (name, bnd_var);
2872 else
2873 chkp_static_var_bounds->put (obj, bnd_var);
2875 return bnd_var;
2878 /* When var has incomplete type we cannot get size to
2879 compute its bounds. In such cases we use checker
2880 builtin call which determines object size at runtime. */
2881 static tree
2882 chkp_generate_extern_var_bounds (tree var)
2884 tree bounds, size_reloc, lb, size, max_size, cond;
2885 gimple_stmt_iterator gsi;
2886 gimple_seq seq = NULL;
2887 gimple stmt;
2889 /* If instrumentation is not enabled for vars having
2890 incomplete type then just return zero bounds to avoid
2891 checks for this var. */
2892 if (!flag_chkp_incomplete_type)
2893 return chkp_get_zero_bounds ();
2895 if (dump_file && (dump_flags & TDF_DETAILS))
2897 fprintf (dump_file, "Generating bounds for extern symbol '");
2898 print_generic_expr (dump_file, var, 0);
2899 fprintf (dump_file, "'\n");
2902 stmt = gimple_build_call (chkp_sizeof_fndecl, 1, var);
2904 size_reloc = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
2905 gimple_call_set_lhs (stmt, size_reloc);
2907 gimple_seq_add_stmt (&seq, stmt);
2909 lb = chkp_build_addr_expr (var);
2910 size = make_ssa_name (chkp_get_size_tmp_var (), gimple_build_nop ());
2912 if (flag_chkp_zero_dynamic_size_as_infinite)
2914 /* We should check that size relocation was resolved.
2915 If it was not then use maximum possible size for the var. */
2916 max_size = build2 (MINUS_EXPR, chkp_uintptr_type, integer_zero_node,
2917 fold_convert (chkp_uintptr_type, lb));
2918 max_size = chkp_force_gimple_call_op (max_size, &seq);
2920 cond = build2 (NE_EXPR, boolean_type_node,
2921 size_reloc, integer_zero_node);
2922 stmt = gimple_build_assign (size, COND_EXPR, cond, size_reloc, max_size);
2923 gimple_seq_add_stmt (&seq, stmt);
2925 else
2927 stmt = gimple_build_assign (size, size_reloc);
2928 gimple_seq_add_stmt (&seq, stmt);
2931 gsi = gsi_start_bb (chkp_get_entry_block ());
2932 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
2934 bounds = chkp_make_bounds (lb, size, &gsi, true);
2936 return bounds;
2939 /* Return 1 if TYPE has fields with zero size or fields
2940 marked with chkp_variable_size attribute. */
2941 bool
2942 chkp_variable_size_type (tree type)
2944 bool res = false;
2945 tree field;
2947 if (RECORD_OR_UNION_TYPE_P (type))
2948 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2950 if (TREE_CODE (field) == FIELD_DECL)
2951 res = res
2952 || lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
2953 || chkp_variable_size_type (TREE_TYPE (field));
2955 else
2956 res = !TYPE_SIZE (type)
2957 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
2958 || tree_to_uhwi (TYPE_SIZE (type)) == 0;
2960 return res;
2963 /* Compute and return bounds for address of DECL which is
2964 one of VAR_DECL, PARM_DECL, RESULT_DECL. */
2965 static tree
2966 chkp_get_bounds_for_decl_addr (tree decl)
2968 tree bounds;
2970 gcc_assert (TREE_CODE (decl) == VAR_DECL
2971 || TREE_CODE (decl) == PARM_DECL
2972 || TREE_CODE (decl) == RESULT_DECL);
2974 bounds = chkp_get_registered_addr_bounds (decl);
2976 if (bounds)
2977 return bounds;
2979 if (dump_file && (dump_flags & TDF_DETAILS))
2981 fprintf (dump_file, "Building bounds for address of decl ");
2982 print_generic_expr (dump_file, decl, 0);
2983 fprintf (dump_file, "\n");
2986 /* Use zero bounds if size is unknown and checks for
2987 unknown sizes are restricted. */
2988 if ((!DECL_SIZE (decl)
2989 || (chkp_variable_size_type (TREE_TYPE (decl))
2990 && (TREE_STATIC (decl)
2991 || DECL_EXTERNAL (decl)
2992 || TREE_PUBLIC (decl))))
2993 && !flag_chkp_incomplete_type)
2994 return chkp_get_zero_bounds ();
2996 if (flag_chkp_use_static_bounds
2997 && TREE_CODE (decl) == VAR_DECL
2998 && (TREE_STATIC (decl)
2999 || DECL_EXTERNAL (decl)
3000 || TREE_PUBLIC (decl))
3001 && !DECL_THREAD_LOCAL_P (decl))
3003 tree bnd_var = chkp_make_static_bounds (decl);
3004 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
3005 gimple stmt;
3007 bounds = chkp_get_tmp_reg (gimple_build_nop ());
3008 stmt = gimple_build_assign (bounds, bnd_var);
3009 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3011 else if (!DECL_SIZE (decl)
3012 || (chkp_variable_size_type (TREE_TYPE (decl))
3013 && (TREE_STATIC (decl)
3014 || DECL_EXTERNAL (decl)
3015 || TREE_PUBLIC (decl))))
3017 gcc_assert (TREE_CODE (decl) == VAR_DECL);
3018 bounds = chkp_generate_extern_var_bounds (decl);
3020 else
3022 tree lb = chkp_build_addr_expr (decl);
3023 bounds = chkp_make_bounds (lb, DECL_SIZE_UNIT (decl), NULL, false);
3026 return bounds;
3029 /* Compute and return bounds for constant string. */
3030 static tree
3031 chkp_get_bounds_for_string_cst (tree cst)
3033 tree bounds;
3034 tree lb;
3035 tree size;
3037 gcc_assert (TREE_CODE (cst) == STRING_CST);
3039 bounds = chkp_get_registered_bounds (cst);
3041 if (bounds)
3042 return bounds;
3044 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
3045 || flag_chkp_use_static_const_bounds > 0)
3047 tree bnd_var = chkp_make_static_bounds (cst);
3048 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
3049 gimple stmt;
3051 bounds = chkp_get_tmp_reg (gimple_build_nop ());
3052 stmt = gimple_build_assign (bounds, bnd_var);
3053 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3055 else
3057 lb = chkp_build_addr_expr (cst);
3058 size = build_int_cst (chkp_uintptr_type, TREE_STRING_LENGTH (cst));
3059 bounds = chkp_make_bounds (lb, size, NULL, false);
3062 bounds = chkp_maybe_copy_and_register_bounds (cst, bounds);
3064 return bounds;
3067 /* Generate code to instersect bounds BOUNDS1 and BOUNDS2 and
3068 return the result. if ITER is not NULL then Code is inserted
3069 before position pointed by ITER. Otherwise code is added to
3070 entry block. */
3071 static tree
3072 chkp_intersect_bounds (tree bounds1, tree bounds2, gimple_stmt_iterator *iter)
3074 if (!bounds1 || bounds1 == chkp_get_zero_bounds ())
3075 return bounds2 ? bounds2 : bounds1;
3076 else if (!bounds2 || bounds2 == chkp_get_zero_bounds ())
3077 return bounds1;
3078 else
3080 gimple_seq seq;
3081 gimple stmt;
3082 tree bounds;
3084 seq = NULL;
3086 stmt = gimple_build_call (chkp_intersect_fndecl, 2, bounds1, bounds2);
3087 chkp_mark_stmt (stmt);
3089 bounds = chkp_get_tmp_reg (stmt);
3090 gimple_call_set_lhs (stmt, bounds);
3092 gimple_seq_add_stmt (&seq, stmt);
3094 /* We are probably doing narrowing for constant expression.
3095 In such case iter may be undefined. */
3096 if (!iter)
3098 gimple_stmt_iterator gsi = gsi_last_bb (chkp_get_entry_block ());
3099 iter = &gsi;
3100 gsi_insert_seq_after (iter, seq, GSI_SAME_STMT);
3102 else
3103 gsi_insert_seq_before (iter, seq, GSI_SAME_STMT);
3105 if (dump_file && (dump_flags & TDF_DETAILS))
3107 fprintf (dump_file, "Bounds intersection: ");
3108 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
3109 fprintf (dump_file, " inserted before statement: ");
3110 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0,
3111 TDF_VOPS|TDF_MEMSYMS);
3114 return bounds;
3118 /* Return 1 if we are allowed to narrow bounds for addressed FIELD
3119 and 0 othersize. */
3120 static bool
3121 chkp_may_narrow_to_field (tree field)
3123 return DECL_SIZE (field) && TREE_CODE (DECL_SIZE (field)) == INTEGER_CST
3124 && tree_to_uhwi (DECL_SIZE (field)) != 0
3125 && (!DECL_FIELD_OFFSET (field)
3126 || TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST)
3127 && (!DECL_FIELD_BIT_OFFSET (field)
3128 || TREE_CODE (DECL_FIELD_BIT_OFFSET (field)) == INTEGER_CST)
3129 && !lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
3130 && !chkp_variable_size_type (TREE_TYPE (field));
3133 /* Return 1 if bounds for FIELD should be narrowed to
3134 field's own size. */
3135 static bool
3136 chkp_narrow_bounds_for_field (tree field)
3138 HOST_WIDE_INT offs;
3139 HOST_WIDE_INT bit_offs;
3141 if (!chkp_may_narrow_to_field (field))
3142 return false;
3144 /* Accesse to compiler generated fields should not cause
3145 bounds narrowing. */
3146 if (DECL_ARTIFICIAL (field))
3147 return false;
3149 offs = tree_to_uhwi (DECL_FIELD_OFFSET (field));
3150 bit_offs = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
3152 return (flag_chkp_narrow_bounds
3153 && (flag_chkp_first_field_has_own_bounds
3154 || offs
3155 || bit_offs));
3158 /* Perform narrowing for BOUNDS using bounds computed for field
3159 access COMPONENT. ITER meaning is the same as for
3160 chkp_intersect_bounds. */
3161 static tree
3162 chkp_narrow_bounds_to_field (tree bounds, tree component,
3163 gimple_stmt_iterator *iter)
3165 tree field = TREE_OPERAND (component, 1);
3166 tree size = DECL_SIZE_UNIT (field);
3167 tree field_ptr = chkp_build_addr_expr (component);
3168 tree field_bounds;
3170 field_bounds = chkp_make_bounds (field_ptr, size, iter, false);
3172 return chkp_intersect_bounds (field_bounds, bounds, iter);
3175 /* Parse field or array access NODE.
3177 PTR ouput parameter holds a pointer to the outermost
3178 object.
3180 BITFIELD output parameter is set to 1 if bitfield is
3181 accessed and to 0 otherwise. If it is 1 then ELT holds
3182 outer component for accessed bit field.
3184 SAFE outer parameter is set to 1 if access is safe and
3185 checks are not required.
3187 BOUNDS outer parameter holds bounds to be used to check
3188 access (may be NULL).
3190 If INNERMOST_BOUNDS is 1 then try to narrow bounds to the
3191 innermost accessed component. */
3192 static void
3193 chkp_parse_array_and_component_ref (tree node, tree *ptr,
3194 tree *elt, bool *safe,
3195 bool *bitfield,
3196 tree *bounds,
3197 gimple_stmt_iterator *iter,
3198 bool innermost_bounds)
3200 tree comp_to_narrow = NULL_TREE;
3201 tree last_comp = NULL_TREE;
3202 bool array_ref_found = false;
3203 tree *nodes;
3204 tree var;
3205 int len;
3206 int i;
3208 /* Compute tree height for expression. */
3209 var = node;
3210 len = 1;
3211 while (TREE_CODE (var) == COMPONENT_REF
3212 || TREE_CODE (var) == ARRAY_REF
3213 || TREE_CODE (var) == VIEW_CONVERT_EXPR)
3215 var = TREE_OPERAND (var, 0);
3216 len++;
3219 gcc_assert (len > 1);
3221 /* It is more convenient for us to scan left-to-right,
3222 so walk tree again and put all node to nodes vector
3223 in reversed order. */
3224 nodes = XALLOCAVEC (tree, len);
3225 nodes[len - 1] = node;
3226 for (i = len - 2; i >= 0; i--)
3227 nodes[i] = TREE_OPERAND (nodes[i + 1], 0);
3229 if (bounds)
3230 *bounds = NULL;
3231 *safe = true;
3232 *bitfield = (TREE_CODE (node) == COMPONENT_REF
3233 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (node, 1)));
3234 /* To get bitfield address we will need outer elemnt. */
3235 if (*bitfield)
3236 *elt = nodes[len - 2];
3237 else
3238 *elt = NULL_TREE;
3240 /* If we have indirection in expression then compute
3241 outermost structure bounds. Computed bounds may be
3242 narrowed later. */
3243 if (TREE_CODE (nodes[0]) == MEM_REF || INDIRECT_REF_P (nodes[0]))
3245 *safe = false;
3246 *ptr = TREE_OPERAND (nodes[0], 0);
3247 if (bounds)
3248 *bounds = chkp_find_bounds (*ptr, iter);
3250 else
3252 gcc_assert (TREE_CODE (var) == VAR_DECL
3253 || TREE_CODE (var) == PARM_DECL
3254 || TREE_CODE (var) == RESULT_DECL
3255 || TREE_CODE (var) == STRING_CST
3256 || TREE_CODE (var) == SSA_NAME);
3258 *ptr = chkp_build_addr_expr (var);
3261 /* In this loop we are trying to find a field access
3262 requiring narrowing. There are two simple rules
3263 for search:
3264 1. Leftmost array_ref is chosen if any.
3265 2. Rightmost suitable component_ref is chosen if innermost
3266 bounds are required and no array_ref exists. */
3267 for (i = 1; i < len; i++)
3269 var = nodes[i];
3271 if (TREE_CODE (var) == ARRAY_REF)
3273 *safe = false;
3274 array_ref_found = true;
3275 if (flag_chkp_narrow_bounds
3276 && !flag_chkp_narrow_to_innermost_arrray
3277 && (!last_comp
3278 || chkp_may_narrow_to_field (TREE_OPERAND (last_comp, 1))))
3280 comp_to_narrow = last_comp;
3281 break;
3284 else if (TREE_CODE (var) == COMPONENT_REF)
3286 tree field = TREE_OPERAND (var, 1);
3288 if (innermost_bounds
3289 && !array_ref_found
3290 && chkp_narrow_bounds_for_field (field))
3291 comp_to_narrow = var;
3292 last_comp = var;
3294 if (flag_chkp_narrow_bounds
3295 && flag_chkp_narrow_to_innermost_arrray
3296 && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
3298 if (bounds)
3299 *bounds = chkp_narrow_bounds_to_field (*bounds, var, iter);
3300 comp_to_narrow = NULL;
3303 else if (TREE_CODE (var) == VIEW_CONVERT_EXPR)
3304 /* Nothing to do for it. */
3306 else
3307 gcc_unreachable ();
3310 if (comp_to_narrow && DECL_SIZE (TREE_OPERAND (comp_to_narrow, 1)) && bounds)
3311 *bounds = chkp_narrow_bounds_to_field (*bounds, comp_to_narrow, iter);
3313 if (innermost_bounds && bounds && !*bounds)
3314 *bounds = chkp_find_bounds (*ptr, iter);
3317 /* Compute and return bounds for address of OBJ. */
3318 static tree
3319 chkp_make_addressed_object_bounds (tree obj, gimple_stmt_iterator *iter)
3321 tree bounds = chkp_get_registered_addr_bounds (obj);
3323 if (bounds)
3324 return bounds;
3326 switch (TREE_CODE (obj))
3328 case VAR_DECL:
3329 case PARM_DECL:
3330 case RESULT_DECL:
3331 bounds = chkp_get_bounds_for_decl_addr (obj);
3332 break;
3334 case STRING_CST:
3335 bounds = chkp_get_bounds_for_string_cst (obj);
3336 break;
3338 case ARRAY_REF:
3339 case COMPONENT_REF:
3341 tree elt;
3342 tree ptr;
3343 bool safe;
3344 bool bitfield;
3346 chkp_parse_array_and_component_ref (obj, &ptr, &elt, &safe,
3347 &bitfield, &bounds, iter, true);
3349 gcc_assert (bounds);
3351 break;
3353 case FUNCTION_DECL:
3354 case LABEL_DECL:
3355 bounds = chkp_get_zero_bounds ();
3356 break;
3358 case MEM_REF:
3359 bounds = chkp_find_bounds (TREE_OPERAND (obj, 0), iter);
3360 break;
3362 case REALPART_EXPR:
3363 case IMAGPART_EXPR:
3364 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (obj, 0), iter);
3365 break;
3367 default:
3368 if (dump_file && (dump_flags & TDF_DETAILS))
3370 fprintf (dump_file, "chkp_make_addressed_object_bounds: "
3371 "unexpected object of type %s\n",
3372 get_tree_code_name (TREE_CODE (obj)));
3373 print_node (dump_file, "", obj, 0);
3375 internal_error ("chkp_make_addressed_object_bounds: "
3376 "Unexpected tree code %s",
3377 get_tree_code_name (TREE_CODE (obj)));
3380 chkp_register_addr_bounds (obj, bounds);
3382 return bounds;
3385 /* Compute bounds for pointer PTR loaded from PTR_SRC. Generate statements
3386 to compute bounds if required. Computed bounds should be available at
3387 position pointed by ITER.
3389 If PTR_SRC is NULL_TREE then pointer definition is identified.
3391 If PTR_SRC is not NULL_TREE then ITER points to statements which loads
3392 PTR. If PTR is a any memory reference then ITER points to a statement
3393 after which bndldx will be inserterd. In both cases ITER will be updated
3394 to point to the inserted bndldx statement. */
3396 static tree
3397 chkp_find_bounds_1 (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3399 tree addr = NULL_TREE;
3400 tree bounds = NULL_TREE;
3402 if (!ptr_src)
3403 ptr_src = ptr;
3405 bounds = chkp_get_registered_bounds (ptr_src);
3407 if (bounds)
3408 return bounds;
3410 switch (TREE_CODE (ptr_src))
3412 case MEM_REF:
3413 case VAR_DECL:
3414 if (BOUNDED_P (ptr_src))
3415 if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
3416 bounds = chkp_get_zero_bounds ();
3417 else
3419 addr = chkp_build_addr_expr (ptr_src);
3420 bounds = chkp_build_bndldx (addr, ptr, iter);
3422 else
3423 bounds = chkp_get_nonpointer_load_bounds ();
3424 break;
3426 case ARRAY_REF:
3427 case COMPONENT_REF:
3428 addr = get_base_address (ptr_src);
3429 if (DECL_P (addr)
3430 || TREE_CODE (addr) == MEM_REF
3431 || TREE_CODE (addr) == TARGET_MEM_REF)
3433 if (BOUNDED_P (ptr_src))
3434 if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
3435 bounds = chkp_get_zero_bounds ();
3436 else
3438 addr = chkp_build_addr_expr (ptr_src);
3439 bounds = chkp_build_bndldx (addr, ptr, iter);
3441 else
3442 bounds = chkp_get_nonpointer_load_bounds ();
3444 else
3446 gcc_assert (TREE_CODE (addr) == SSA_NAME);
3447 bounds = chkp_find_bounds (addr, iter);
3449 break;
3451 case PARM_DECL:
3452 gcc_unreachable ();
3453 bounds = chkp_get_bound_for_parm (ptr_src);
3454 break;
3456 case TARGET_MEM_REF:
3457 addr = chkp_build_addr_expr (ptr_src);
3458 bounds = chkp_build_bndldx (addr, ptr, iter);
3459 break;
3461 case SSA_NAME:
3462 bounds = chkp_get_registered_bounds (ptr_src);
3463 if (!bounds)
3465 gimple def_stmt = SSA_NAME_DEF_STMT (ptr_src);
3466 gphi_iterator phi_iter;
3468 bounds = chkp_get_bounds_by_definition (ptr_src, def_stmt, &phi_iter);
3470 gcc_assert (bounds);
3472 if (gphi *def_phi = dyn_cast <gphi *> (def_stmt))
3474 unsigned i;
3476 for (i = 0; i < gimple_phi_num_args (def_phi); i++)
3478 tree arg = gimple_phi_arg_def (def_phi, i);
3479 tree arg_bnd;
3480 gphi *phi_bnd;
3482 arg_bnd = chkp_find_bounds (arg, NULL);
3484 /* chkp_get_bounds_by_definition created new phi
3485 statement and phi_iter points to it.
3487 Previous call to chkp_find_bounds could create
3488 new basic block and therefore change phi statement
3489 phi_iter points to. */
3490 phi_bnd = phi_iter.phi ();
3492 add_phi_arg (phi_bnd, arg_bnd,
3493 gimple_phi_arg_edge (def_phi, i),
3494 UNKNOWN_LOCATION);
3497 /* If all bound phi nodes have their arg computed
3498 then we may finish its computation. See
3499 chkp_finish_incomplete_bounds for more details. */
3500 if (chkp_may_finish_incomplete_bounds ())
3501 chkp_finish_incomplete_bounds ();
3504 gcc_assert (bounds == chkp_get_registered_bounds (ptr_src)
3505 || chkp_incomplete_bounds (bounds));
3507 break;
3509 case ADDR_EXPR:
3510 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (ptr_src, 0), iter);
3511 break;
3513 case INTEGER_CST:
3514 if (integer_zerop (ptr_src))
3515 bounds = chkp_get_none_bounds ();
3516 else
3517 bounds = chkp_get_invalid_op_bounds ();
3518 break;
3520 default:
3521 if (dump_file && (dump_flags & TDF_DETAILS))
3523 fprintf (dump_file, "chkp_find_bounds: unexpected ptr of type %s\n",
3524 get_tree_code_name (TREE_CODE (ptr_src)));
3525 print_node (dump_file, "", ptr_src, 0);
3527 internal_error ("chkp_find_bounds: Unexpected tree code %s",
3528 get_tree_code_name (TREE_CODE (ptr_src)));
3531 if (!bounds)
3533 if (dump_file && (dump_flags & TDF_DETAILS))
3535 fprintf (stderr, "chkp_find_bounds: cannot find bounds for pointer\n");
3536 print_node (dump_file, "", ptr_src, 0);
3538 internal_error ("chkp_find_bounds: Cannot find bounds for pointer");
3541 return bounds;
3544 /* Normal case for bounds search without forced narrowing. */
3545 static tree
3546 chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter)
3548 return chkp_find_bounds_1 (ptr, NULL_TREE, iter);
3551 /* Search bounds for pointer PTR loaded from PTR_SRC
3552 by statement *ITER points to. */
3553 static tree
3554 chkp_find_bounds_loaded (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3556 return chkp_find_bounds_1 (ptr, ptr_src, iter);
3559 /* Helper function which checks type of RHS and finds all pointers in
3560 it. For each found pointer we build it's accesses in LHS and RHS
3561 objects and then call HANDLER for them. Function is used to copy
3562 or initilize bounds for copied object. */
3563 static void
3564 chkp_walk_pointer_assignments (tree lhs, tree rhs, void *arg,
3565 assign_handler handler)
3567 tree type = TREE_TYPE (lhs);
3569 /* We have nothing to do with clobbers. */
3570 if (TREE_CLOBBER_P (rhs))
3571 return;
3573 if (BOUNDED_TYPE_P (type))
3574 handler (lhs, rhs, arg);
3575 else if (RECORD_OR_UNION_TYPE_P (type))
3577 tree field;
3579 if (TREE_CODE (rhs) == CONSTRUCTOR)
3581 unsigned HOST_WIDE_INT cnt;
3582 tree val;
3584 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, field, val)
3586 if (chkp_type_has_pointer (TREE_TYPE (field)))
3588 tree lhs_field = chkp_build_component_ref (lhs, field);
3589 chkp_walk_pointer_assignments (lhs_field, val, arg, handler);
3593 else
3594 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3595 if (TREE_CODE (field) == FIELD_DECL
3596 && chkp_type_has_pointer (TREE_TYPE (field)))
3598 tree rhs_field = chkp_build_component_ref (rhs, field);
3599 tree lhs_field = chkp_build_component_ref (lhs, field);
3600 chkp_walk_pointer_assignments (lhs_field, rhs_field, arg, handler);
3603 else if (TREE_CODE (type) == ARRAY_TYPE)
3605 unsigned HOST_WIDE_INT cur = 0;
3606 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3607 tree etype = TREE_TYPE (type);
3608 tree esize = TYPE_SIZE (etype);
3610 if (TREE_CODE (rhs) == CONSTRUCTOR)
3612 unsigned HOST_WIDE_INT cnt;
3613 tree purp, val, lhs_elem;
3615 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, purp, val)
3617 if (purp && TREE_CODE (purp) == RANGE_EXPR)
3619 tree lo_index = TREE_OPERAND (purp, 0);
3620 tree hi_index = TREE_OPERAND (purp, 1);
3622 for (cur = (unsigned)tree_to_uhwi (lo_index);
3623 cur <= (unsigned)tree_to_uhwi (hi_index);
3624 cur++)
3626 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3627 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3630 else
3632 if (purp)
3634 gcc_assert (TREE_CODE (purp) == INTEGER_CST);
3635 cur = tree_to_uhwi (purp);
3638 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur++);
3640 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3644 /* Copy array only when size is known. */
3645 else if (maxval && !integer_minus_onep (maxval))
3646 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
3648 tree lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3649 tree rhs_elem = chkp_build_array_ref (rhs, etype, esize, cur);
3650 chkp_walk_pointer_assignments (lhs_elem, rhs_elem, arg, handler);
3653 else
3654 internal_error("chkp_walk_pointer_assignments: unexpected RHS type: %s",
3655 get_tree_code_name (TREE_CODE (type)));
3658 /* Add code to copy bounds for assignment of RHS to LHS.
3659 ARG is an iterator pointing ne code position. */
3660 static void
3661 chkp_copy_bounds_for_elem (tree lhs, tree rhs, void *arg)
3663 gimple_stmt_iterator *iter = (gimple_stmt_iterator *)arg;
3664 tree bounds = chkp_find_bounds (rhs, iter);
3665 tree addr = chkp_build_addr_expr(lhs);
3667 chkp_build_bndstx (addr, rhs, bounds, iter);
3670 /* Emit static bound initilizers and size vars. */
3671 void
3672 chkp_finish_file (void)
3674 struct varpool_node *node;
3675 struct chkp_ctor_stmt_list stmts;
3677 if (seen_error ())
3678 return;
3680 /* Iterate through varpool and generate bounds initialization
3681 constructors for all statically initialized pointers. */
3682 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3683 stmts.stmts = NULL;
3684 FOR_EACH_VARIABLE (node)
3685 /* Check that var is actually emitted and we need and may initialize
3686 its bounds. */
3687 if (node->need_bounds_init
3688 && !POINTER_BOUNDS_P (node->decl)
3689 && DECL_RTL (node->decl)
3690 && MEM_P (DECL_RTL (node->decl))
3691 && TREE_ASM_WRITTEN (node->decl))
3693 chkp_walk_pointer_assignments (node->decl,
3694 DECL_INITIAL (node->decl),
3695 &stmts,
3696 chkp_add_modification_to_stmt_list);
3698 if (stmts.avail <= 0)
3700 cgraph_build_static_cdtor ('P', stmts.stmts,
3701 MAX_RESERVED_INIT_PRIORITY + 3);
3702 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3703 stmts.stmts = NULL;
3707 if (stmts.stmts)
3708 cgraph_build_static_cdtor ('P', stmts.stmts,
3709 MAX_RESERVED_INIT_PRIORITY + 3);
3711 /* Iterate through varpool and generate bounds initialization
3712 constructors for all static bounds vars. */
3713 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3714 stmts.stmts = NULL;
3715 FOR_EACH_VARIABLE (node)
3716 if (node->need_bounds_init
3717 && POINTER_BOUNDS_P (node->decl)
3718 && TREE_ASM_WRITTEN (node->decl))
3720 tree bnd = node->decl;
3721 tree var;
3723 gcc_assert (DECL_INITIAL (bnd)
3724 && TREE_CODE (DECL_INITIAL (bnd)) == ADDR_EXPR);
3726 var = TREE_OPERAND (DECL_INITIAL (bnd), 0);
3727 chkp_output_static_bounds (bnd, var, &stmts);
3730 if (stmts.stmts)
3731 cgraph_build_static_cdtor ('B', stmts.stmts,
3732 MAX_RESERVED_INIT_PRIORITY + 2);
3734 delete chkp_static_var_bounds;
3735 delete chkp_bounds_map;
3738 /* An instrumentation function which is called for each statement
3739 having memory access we want to instrument. It inserts check
3740 code and bounds copy code.
3742 ITER points to statement to instrument.
3744 NODE holds memory access in statement to check.
3746 LOC holds the location information for statement.
3748 DIRFLAGS determines whether access is read or write.
3750 ACCESS_OFFS should be added to address used in NODE
3751 before check.
3753 ACCESS_SIZE holds size of checked access.
3755 SAFE indicates if NODE access is safe and should not be
3756 checked. */
3757 static void
3758 chkp_process_stmt (gimple_stmt_iterator *iter, tree node,
3759 location_t loc, tree dirflag,
3760 tree access_offs, tree access_size,
3761 bool safe)
3763 tree node_type = TREE_TYPE (node);
3764 tree size = access_size ? access_size : TYPE_SIZE_UNIT (node_type);
3765 tree addr_first = NULL_TREE; /* address of the first accessed byte */
3766 tree addr_last = NULL_TREE; /* address of the last accessed byte */
3767 tree ptr = NULL_TREE; /* a pointer used for dereference */
3768 tree bounds = NULL_TREE;
3770 /* We do not need instrumentation for clobbers. */
3771 if (dirflag == integer_one_node
3772 && gimple_code (gsi_stmt (*iter)) == GIMPLE_ASSIGN
3773 && TREE_CLOBBER_P (gimple_assign_rhs1 (gsi_stmt (*iter))))
3774 return;
3776 switch (TREE_CODE (node))
3778 case ARRAY_REF:
3779 case COMPONENT_REF:
3781 bool bitfield;
3782 tree elt;
3784 if (safe)
3786 /* We are not going to generate any checks, so do not
3787 generate bounds as well. */
3788 addr_first = chkp_build_addr_expr (node);
3789 break;
3792 chkp_parse_array_and_component_ref (node, &ptr, &elt, &safe,
3793 &bitfield, &bounds, iter, false);
3795 /* Break if there is no dereference and operation is safe. */
3797 if (bitfield)
3799 tree field = TREE_OPERAND (node, 1);
3801 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
3802 size = DECL_SIZE_UNIT (field);
3804 if (elt)
3805 elt = chkp_build_addr_expr (elt);
3806 addr_first = fold_convert_loc (loc, ptr_type_node, elt ? elt : ptr);
3807 addr_first = fold_build_pointer_plus_loc (loc,
3808 addr_first,
3809 byte_position (field));
3811 else
3812 addr_first = chkp_build_addr_expr (node);
3814 break;
3816 case INDIRECT_REF:
3817 ptr = TREE_OPERAND (node, 0);
3818 addr_first = ptr;
3819 break;
3821 case MEM_REF:
3822 ptr = TREE_OPERAND (node, 0);
3823 addr_first = chkp_build_addr_expr (node);
3824 break;
3826 case TARGET_MEM_REF:
3827 ptr = TMR_BASE (node);
3828 addr_first = chkp_build_addr_expr (node);
3829 break;
3831 case ARRAY_RANGE_REF:
3832 printf("ARRAY_RANGE_REF\n");
3833 debug_gimple_stmt(gsi_stmt(*iter));
3834 debug_tree(node);
3835 gcc_unreachable ();
3836 break;
3838 case BIT_FIELD_REF:
3840 tree offs, rem, bpu;
3842 gcc_assert (!access_offs);
3843 gcc_assert (!access_size);
3845 bpu = fold_convert (size_type_node, bitsize_int (BITS_PER_UNIT));
3846 offs = fold_convert (size_type_node, TREE_OPERAND (node, 2));
3847 rem = size_binop_loc (loc, TRUNC_MOD_EXPR, offs, bpu);
3848 offs = size_binop_loc (loc, TRUNC_DIV_EXPR, offs, bpu);
3850 size = fold_convert (size_type_node, TREE_OPERAND (node, 1));
3851 size = size_binop_loc (loc, PLUS_EXPR, size, rem);
3852 size = size_binop_loc (loc, CEIL_DIV_EXPR, size, bpu);
3853 size = fold_convert (size_type_node, size);
3855 chkp_process_stmt (iter, TREE_OPERAND (node, 0), loc,
3856 dirflag, offs, size, safe);
3857 return;
3859 break;
3861 case VAR_DECL:
3862 case RESULT_DECL:
3863 case PARM_DECL:
3864 if (dirflag != integer_one_node
3865 || DECL_REGISTER (node))
3866 return;
3868 safe = true;
3869 addr_first = chkp_build_addr_expr (node);
3870 break;
3872 default:
3873 return;
3876 /* If addr_last was not computed then use (addr_first + size - 1)
3877 expression to compute it. */
3878 if (!addr_last)
3880 addr_last = fold_build_pointer_plus_loc (loc, addr_first, size);
3881 addr_last = fold_build_pointer_plus_hwi_loc (loc, addr_last, -1);
3884 /* Shift both first_addr and last_addr by access_offs if specified. */
3885 if (access_offs)
3887 addr_first = fold_build_pointer_plus_loc (loc, addr_first, access_offs);
3888 addr_last = fold_build_pointer_plus_loc (loc, addr_last, access_offs);
3891 /* Generate bndcl/bndcu checks if memory access is not safe. */
3892 if (!safe)
3894 gimple_stmt_iterator stmt_iter = *iter;
3896 if (!bounds)
3897 bounds = chkp_find_bounds (ptr, iter);
3899 chkp_check_mem_access (addr_first, addr_last, bounds,
3900 stmt_iter, loc, dirflag);
3903 /* We need to store bounds in case pointer is stored. */
3904 if (dirflag == integer_one_node
3905 && chkp_type_has_pointer (node_type)
3906 && flag_chkp_store_bounds)
3908 gimple stmt = gsi_stmt (*iter);
3909 tree rhs1 = gimple_assign_rhs1 (stmt);
3910 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3912 if (get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS)
3913 chkp_walk_pointer_assignments (node, rhs1, iter,
3914 chkp_copy_bounds_for_elem);
3915 else
3917 bounds = chkp_compute_bounds_for_assignment (NULL_TREE, stmt);
3918 chkp_build_bndstx (addr_first, rhs1, bounds, iter);
3923 /* Add code to copy bounds for all pointers copied
3924 in ASSIGN created during inline of EDGE. */
3925 void
3926 chkp_copy_bounds_for_assign (gimple assign, struct cgraph_edge *edge)
3928 tree lhs = gimple_assign_lhs (assign);
3929 tree rhs = gimple_assign_rhs1 (assign);
3930 gimple_stmt_iterator iter = gsi_for_stmt (assign);
3932 if (!flag_chkp_store_bounds)
3933 return;
3935 chkp_walk_pointer_assignments (lhs, rhs, &iter, chkp_copy_bounds_for_elem);
3937 /* We should create edges for all created calls to bndldx and bndstx. */
3938 while (gsi_stmt (iter) != assign)
3940 gimple stmt = gsi_stmt (iter);
3941 if (gimple_code (stmt) == GIMPLE_CALL)
3943 tree fndecl = gimple_call_fndecl (stmt);
3944 struct cgraph_node *callee = cgraph_node::get_create (fndecl);
3945 struct cgraph_edge *new_edge;
3947 gcc_assert (fndecl == chkp_bndstx_fndecl
3948 || fndecl == chkp_bndldx_fndecl
3949 || fndecl == chkp_ret_bnd_fndecl);
3951 new_edge = edge->caller->create_edge (callee,
3952 as_a <gcall *> (stmt),
3953 edge->count,
3954 edge->frequency);
3955 new_edge->frequency = compute_call_stmt_bb_frequency
3956 (edge->caller->decl, gimple_bb (stmt));
3958 gsi_prev (&iter);
3962 /* Some code transformation made during instrumentation pass
3963 may put code into inconsistent state. Here we find and fix
3964 such flaws. */
3965 void
3966 chkp_fix_cfg ()
3968 basic_block bb;
3969 gimple_stmt_iterator i;
3971 /* We could insert some code right after stmt which ends bb.
3972 We wanted to put this code on fallthru edge but did not
3973 add new edges from the beginning because it may cause new
3974 phi node creation which may be incorrect due to incomplete
3975 bound phi nodes. */
3976 FOR_ALL_BB_FN (bb, cfun)
3977 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
3979 gimple stmt = gsi_stmt (i);
3980 gimple_stmt_iterator next = i;
3982 gsi_next (&next);
3984 if (stmt_ends_bb_p (stmt)
3985 && !gsi_end_p (next))
3987 edge fall = find_fallthru_edge (bb->succs);
3988 basic_block dest = NULL;
3989 int flags = 0;
3991 gcc_assert (fall);
3993 /* We cannot split abnormal edge. Therefore we
3994 store its params, make it regular and then
3995 rebuild abnormal edge after split. */
3996 if (fall->flags & EDGE_ABNORMAL)
3998 flags = fall->flags & ~EDGE_FALLTHRU;
3999 dest = fall->dest;
4001 fall->flags &= ~EDGE_COMPLEX;
4004 while (!gsi_end_p (next))
4006 gimple next_stmt = gsi_stmt (next);
4007 gsi_remove (&next, false);
4008 gsi_insert_on_edge (fall, next_stmt);
4011 gsi_commit_edge_inserts ();
4013 /* Re-create abnormal edge. */
4014 if (dest)
4015 make_edge (bb, dest, flags);
4020 /* Walker callback for chkp_replace_function_pointers. Replaces
4021 function pointer in the specified operand with pointer to the
4022 instrumented function version. */
4023 static tree
4024 chkp_replace_function_pointer (tree *op, int *walk_subtrees,
4025 void *data ATTRIBUTE_UNUSED)
4027 if (TREE_CODE (*op) == FUNCTION_DECL
4028 && !lookup_attribute ("bnd_legacy", DECL_ATTRIBUTES (*op))
4029 && (DECL_BUILT_IN_CLASS (*op) == NOT_BUILT_IN
4030 /* For builtins we replace pointers only for selected
4031 function and functions having definitions. */
4032 || (DECL_BUILT_IN_CLASS (*op) == BUILT_IN_NORMAL
4033 && (chkp_instrument_normal_builtin (*op)
4034 || gimple_has_body_p (*op)))))
4036 struct cgraph_node *node = cgraph_node::get_create (*op);
4037 struct cgraph_node *clone = NULL;
4039 if (!node->instrumentation_clone)
4040 clone = chkp_maybe_create_clone (*op);
4042 if (clone)
4043 *op = clone->decl;
4044 *walk_subtrees = 0;
4047 return NULL;
4050 /* This function searches for function pointers in statement
4051 pointed by GSI and replaces them with pointers to instrumented
4052 function versions. */
4053 static void
4054 chkp_replace_function_pointers (gimple_stmt_iterator *gsi)
4056 gimple stmt = gsi_stmt (*gsi);
4057 /* For calls we want to walk call args only. */
4058 if (gimple_code (stmt) == GIMPLE_CALL)
4060 unsigned i;
4061 for (i = 0; i < gimple_call_num_args (stmt); i++)
4062 walk_tree (gimple_call_arg_ptr (stmt, i),
4063 chkp_replace_function_pointer, NULL, NULL);
4065 else
4066 walk_gimple_stmt (gsi, NULL, chkp_replace_function_pointer, NULL);
4069 /* This function instruments all statements working with memory,
4070 calls and rets.
4072 It also removes excess statements from static initializers. */
4073 static void
4074 chkp_instrument_function (void)
4076 basic_block bb, next;
4077 gimple_stmt_iterator i;
4078 enum gimple_rhs_class grhs_class;
4079 bool safe = lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl));
4081 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
4084 next = bb->next_bb;
4085 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
4087 gimple s = gsi_stmt (i);
4089 /* Skip statement marked to not be instrumented. */
4090 if (chkp_marked_stmt_p (s))
4092 gsi_next (&i);
4093 continue;
4096 chkp_replace_function_pointers (&i);
4098 switch (gimple_code (s))
4100 case GIMPLE_ASSIGN:
4101 chkp_process_stmt (&i, gimple_assign_lhs (s),
4102 gimple_location (s), integer_one_node,
4103 NULL_TREE, NULL_TREE, safe);
4104 chkp_process_stmt (&i, gimple_assign_rhs1 (s),
4105 gimple_location (s), integer_zero_node,
4106 NULL_TREE, NULL_TREE, safe);
4107 grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s));
4108 if (grhs_class == GIMPLE_BINARY_RHS)
4109 chkp_process_stmt (&i, gimple_assign_rhs2 (s),
4110 gimple_location (s), integer_zero_node,
4111 NULL_TREE, NULL_TREE, safe);
4112 break;
4114 case GIMPLE_RETURN:
4116 greturn *r = as_a <greturn *> (s);
4117 if (gimple_return_retval (r) != NULL_TREE)
4119 chkp_process_stmt (&i, gimple_return_retval (r),
4120 gimple_location (r),
4121 integer_zero_node,
4122 NULL_TREE, NULL_TREE, safe);
4124 /* Additionally we need to add bounds
4125 to return statement. */
4126 chkp_add_bounds_to_ret_stmt (&i);
4129 break;
4131 case GIMPLE_CALL:
4132 chkp_add_bounds_to_call_stmt (&i);
4133 break;
4135 default:
4139 gsi_next (&i);
4141 /* We do not need any actual pointer stores in checker
4142 static initializer. */
4143 if (lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl))
4144 && gimple_code (s) == GIMPLE_ASSIGN
4145 && gimple_store_p (s))
4147 gimple_stmt_iterator del_iter = gsi_for_stmt (s);
4148 gsi_remove (&del_iter, true);
4149 unlink_stmt_vdef (s);
4150 release_defs(s);
4153 bb = next;
4155 while (bb);
4157 /* Some input params may have bounds and be address taken. In this case
4158 we should store incoming bounds into bounds table. */
4159 tree arg;
4160 if (flag_chkp_store_bounds)
4161 for (arg = DECL_ARGUMENTS (cfun->decl); arg; arg = DECL_CHAIN (arg))
4162 if (TREE_ADDRESSABLE (arg))
4164 if (BOUNDED_P (arg))
4166 tree bounds = chkp_get_next_bounds_parm (arg);
4167 tree def_ptr = ssa_default_def (cfun, arg);
4168 gimple_stmt_iterator iter
4169 = gsi_start_bb (chkp_get_entry_block ());
4170 chkp_build_bndstx (chkp_build_addr_expr (arg),
4171 def_ptr ? def_ptr : arg,
4172 bounds, &iter);
4174 /* Skip bounds arg. */
4175 arg = TREE_CHAIN (arg);
4177 else if (chkp_type_has_pointer (TREE_TYPE (arg)))
4179 tree orig_arg = arg;
4180 bitmap slots = BITMAP_ALLOC (NULL);
4181 gimple_stmt_iterator iter
4182 = gsi_start_bb (chkp_get_entry_block ());
4183 bitmap_iterator bi;
4184 unsigned bnd_no;
4186 chkp_find_bound_slots (TREE_TYPE (arg), slots);
4188 EXECUTE_IF_SET_IN_BITMAP (slots, 0, bnd_no, bi)
4190 tree bounds = chkp_get_next_bounds_parm (arg);
4191 HOST_WIDE_INT offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
4192 tree addr = chkp_build_addr_expr (orig_arg);
4193 tree ptr = build2 (MEM_REF, ptr_type_node, addr,
4194 build_int_cst (ptr_type_node, offs));
4195 chkp_build_bndstx (chkp_build_addr_expr (ptr), ptr,
4196 bounds, &iter);
4198 arg = DECL_CHAIN (arg);
4200 BITMAP_FREE (slots);
4205 /* Find init/null/copy_ptr_bounds calls and replace them
4206 with assignments. It should allow better code
4207 optimization. */
4209 static void
4210 chkp_remove_useless_builtins ()
4212 basic_block bb;
4213 gimple_stmt_iterator gsi;
4215 FOR_EACH_BB_FN (bb, cfun)
4217 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4219 gimple stmt = gsi_stmt (gsi);
4220 tree fndecl;
4221 enum built_in_function fcode;
4223 /* Find builtins returning first arg and replace
4224 them with assignments. */
4225 if (gimple_code (stmt) == GIMPLE_CALL
4226 && (fndecl = gimple_call_fndecl (stmt))
4227 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4228 && (fcode = DECL_FUNCTION_CODE (fndecl))
4229 && (fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
4230 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
4231 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS
4232 || fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS))
4234 tree res = gimple_call_arg (stmt, 0);
4235 update_call_from_tree (&gsi, res);
4236 stmt = gsi_stmt (gsi);
4237 update_stmt (stmt);
4243 /* Initialize pass. */
4244 static void
4245 chkp_init (void)
4247 basic_block bb;
4248 gimple_stmt_iterator i;
4250 in_chkp_pass = true;
4252 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = bb->next_bb)
4253 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
4254 chkp_unmark_stmt (gsi_stmt (i));
4256 chkp_invalid_bounds = new hash_set<tree>;
4257 chkp_completed_bounds_set = new hash_set<tree>;
4258 delete chkp_reg_bounds;
4259 chkp_reg_bounds = new hash_map<tree, tree>;
4260 delete chkp_bound_vars;
4261 chkp_bound_vars = new hash_map<tree, tree>;
4262 chkp_reg_addr_bounds = new hash_map<tree, tree>;
4263 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
4264 delete chkp_bounds_map;
4265 chkp_bounds_map = new hash_map<tree, tree>;
4266 chkp_abnormal_copies = BITMAP_GGC_ALLOC ();
4268 entry_block = NULL;
4269 zero_bounds = NULL_TREE;
4270 none_bounds = NULL_TREE;
4271 incomplete_bounds = integer_zero_node;
4272 tmp_var = NULL_TREE;
4273 size_tmp_var = NULL_TREE;
4275 chkp_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode, true);
4277 /* We create these constant bounds once for each object file.
4278 These symbols go to comdat section and result in single copy
4279 of each one in the final binary. */
4280 chkp_get_zero_bounds_var ();
4281 chkp_get_none_bounds_var ();
4283 calculate_dominance_info (CDI_DOMINATORS);
4284 calculate_dominance_info (CDI_POST_DOMINATORS);
4286 bitmap_obstack_initialize (NULL);
4289 /* Finalize instrumentation pass. */
4290 static void
4291 chkp_fini (void)
4293 in_chkp_pass = false;
4295 delete chkp_invalid_bounds;
4296 delete chkp_completed_bounds_set;
4297 delete chkp_reg_addr_bounds;
4298 delete chkp_incomplete_bounds_map;
4300 free_dominance_info (CDI_DOMINATORS);
4301 free_dominance_info (CDI_POST_DOMINATORS);
4303 bitmap_obstack_release (NULL);
4305 entry_block = NULL;
4306 zero_bounds = NULL_TREE;
4307 none_bounds = NULL_TREE;
4310 /* Main instrumentation pass function. */
4311 static unsigned int
4312 chkp_execute (void)
4314 chkp_init ();
4316 chkp_instrument_function ();
4318 chkp_remove_useless_builtins ();
4320 chkp_function_mark_instrumented (cfun->decl);
4322 chkp_fix_cfg ();
4324 chkp_fini ();
4326 return 0;
4329 /* Instrumentation pass gate. */
4330 static bool
4331 chkp_gate (void)
4333 return cgraph_node::get (cfun->decl)->instrumentation_clone
4334 || lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl));
4337 namespace {
4339 const pass_data pass_data_chkp =
4341 GIMPLE_PASS, /* type */
4342 "chkp", /* name */
4343 OPTGROUP_NONE, /* optinfo_flags */
4344 TV_NONE, /* tv_id */
4345 PROP_ssa | PROP_cfg, /* properties_required */
4346 0, /* properties_provided */
4347 0, /* properties_destroyed */
4348 0, /* todo_flags_start */
4349 TODO_verify_il
4350 | TODO_update_ssa /* todo_flags_finish */
4353 class pass_chkp : public gimple_opt_pass
4355 public:
4356 pass_chkp (gcc::context *ctxt)
4357 : gimple_opt_pass (pass_data_chkp, ctxt)
4360 /* opt_pass methods: */
4361 virtual opt_pass * clone ()
4363 return new pass_chkp (m_ctxt);
4366 virtual bool gate (function *)
4368 return chkp_gate ();
4371 virtual unsigned int execute (function *)
4373 return chkp_execute ();
4376 }; // class pass_chkp
4378 } // anon namespace
4380 gimple_opt_pass *
4381 make_pass_chkp (gcc::context *ctxt)
4383 return new pass_chkp (ctxt);
4386 #include "gt-tree-chkp.h"