2015-05-22 Robert Dewar <dewar@adacore.com>
[official-gcc.git] / gcc / tree-chkp.c
blob88c1f459c182f8dab145d015a39ce38329f27c9a
1 /* Pointer Bounds Checker insrumentation pass.
2 Copyright (C) 2014-2015 Free Software Foundation, Inc.
3 Contributed by Ilya Enkovich (ilya.enkovich@intel.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hash-set.h"
25 #include "machmode.h"
26 #include "vec.h"
27 #include "double-int.h"
28 #include "input.h"
29 #include "alias.h"
30 #include "symtab.h"
31 #include "options.h"
32 #include "wide-int.h"
33 #include "inchash.h"
34 #include "tree.h"
35 #include "fold-const.h"
36 #include "stor-layout.h"
37 #include "varasm.h"
38 #include "target.h"
39 #include "tree-iterator.h"
40 #include "tree-cfg.h"
41 #include "langhooks.h"
42 #include "tree-pass.h"
43 #include "diagnostic.h"
44 #include "ggc.h"
45 #include "is-a.h"
46 #include "cfgloop.h"
47 #include "stringpool.h"
48 #include "tree-ssa-alias.h"
49 #include "tree-ssanames.h"
50 #include "tree-ssa-operands.h"
51 #include "tree-ssa-address.h"
52 #include "tree-ssa.h"
53 #include "predict.h"
54 #include "dominance.h"
55 #include "cfg.h"
56 #include "basic-block.h"
57 #include "tree-ssa-loop-niter.h"
58 #include "gimple-expr.h"
59 #include "gimple.h"
60 #include "tree-phinodes.h"
61 #include "gimple-ssa.h"
62 #include "ssa-iterators.h"
63 #include "gimple-pretty-print.h"
64 #include "gimple-iterator.h"
65 #include "gimplify.h"
66 #include "gimplify-me.h"
67 #include "print-tree.h"
68 #include "hashtab.h"
69 #include "tm.h"
70 #include "hard-reg-set.h"
71 #include "function.h"
72 #include "rtl.h"
73 #include "flags.h"
74 #include "statistics.h"
75 #include "real.h"
76 #include "fixed-value.h"
77 #include "insn-config.h"
78 #include "expmed.h"
79 #include "dojump.h"
80 #include "explow.h"
81 #include "calls.h"
82 #include "emit-rtl.h"
83 #include "stmt.h"
84 #include "expr.h"
85 #include "tree-ssa-propagate.h"
86 #include "gimple-fold.h"
87 #include "tree-chkp.h"
88 #include "gimple-walk.h"
89 #include "rtl.h" /* For MEM_P, assign_temp. */
90 #include "tree-dfa.h"
91 #include "ipa-ref.h"
92 #include "lto-streamer.h"
93 #include "cgraph.h"
94 #include "ipa-chkp.h"
95 #include "params.h"
97 /* Pointer Bounds Checker instruments code with memory checks to find
98 out-of-bounds memory accesses. Checks are performed by computing
99 bounds for each pointer and then comparing address of accessed
100 memory before pointer dereferencing.
102 1. Function clones.
104 See ipa-chkp.c.
106 2. Instrumentation.
108 There are few things to instrument:
110 a) Memory accesses - add checker calls to check address of accessed memory
111 against bounds of dereferenced pointer. Obviously safe memory
112 accesses like static variable access does not have to be instrumented
113 with checks.
115 Example:
117 val_2 = *p_1;
119 with 4 bytes access is transformed into:
121 __builtin___chkp_bndcl (__bound_tmp.1_3, p_1);
122 D.1_4 = p_1 + 3;
123 __builtin___chkp_bndcu (__bound_tmp.1_3, D.1_4);
124 val_2 = *p_1;
126 where __bound_tmp.1_3 are bounds computed for pointer p_1,
127 __builtin___chkp_bndcl is a lower bound check and
128 __builtin___chkp_bndcu is an upper bound check.
130 b) Pointer stores.
132 When pointer is stored in memory we need to store its bounds. To
133 achieve compatibility of instrumented code with regular codes
134 we have to keep data layout and store bounds in special bound tables
135 via special checker call. Implementation of bounds table may vary for
136 different platforms. It has to associate pointer value and its
137 location (it is required because we may have two equal pointers
138 with different bounds stored in different places) with bounds.
139 Another checker builtin allows to get bounds for specified pointer
140 loaded from specified location.
142 Example:
144 buf1[i_1] = &buf2;
146 is transformed into:
148 buf1[i_1] = &buf2;
149 D.1_2 = &buf1[i_1];
150 __builtin___chkp_bndstx (D.1_2, &buf2, __bound_tmp.1_2);
152 where __bound_tmp.1_2 are bounds of &buf2.
154 c) Static initialization.
156 The special case of pointer store is static pointer initialization.
157 Bounds initialization is performed in a few steps:
158 - register all static initializations in front-end using
159 chkp_register_var_initializer
160 - when file compilation finishes we create functions with special
161 attribute 'chkp ctor' and put explicit initialization code
162 (assignments) for all statically initialized pointers.
163 - when checker constructor is compiled checker pass adds required
164 bounds initialization for all statically initialized pointers
165 - since we do not actually need excess pointers initialization
166 in checker constructor we remove such assignments from them
168 d) Calls.
170 For each call in the code we add additional arguments to pass
171 bounds for pointer arguments. We determine type of call arguments
172 using arguments list from function declaration; if function
173 declaration is not available we use function type; otherwise
174 (e.g. for unnamed arguments) we use type of passed value. Function
175 declaration/type is replaced with the instrumented one.
177 Example:
179 val_1 = foo (&buf1, &buf2, &buf1, 0);
181 is translated into:
183 val_1 = foo.chkp (&buf1, __bound_tmp.1_2, &buf2, __bound_tmp.1_3,
184 &buf1, __bound_tmp.1_2, 0);
186 e) Returns.
188 If function returns a pointer value we have to return bounds also.
189 A new operand was added for return statement to hold returned bounds.
191 Example:
193 return &_buf1;
195 is transformed into
197 return &_buf1, __bound_tmp.1_1;
199 3. Bounds computation.
201 Compiler is fully responsible for computing bounds to be used for each
202 memory access. The first step for bounds computation is to find the
203 origin of pointer dereferenced for memory access. Basing on pointer
204 origin we define a way to compute its bounds. There are just few
205 possible cases:
207 a) Pointer is returned by call.
209 In this case we use corresponding checker builtin method to obtain returned
210 bounds.
212 Example:
214 buf_1 = malloc (size_2);
215 foo (buf_1);
217 is translated into:
219 buf_1 = malloc (size_2);
220 __bound_tmp.1_3 = __builtin___chkp_bndret (buf_1);
221 foo (buf_1, __bound_tmp.1_3);
223 b) Pointer is an address of an object.
225 In this case compiler tries to compute objects size and create corresponding
226 bounds. If object has incomplete type then special checker builtin is used to
227 obtain its size at runtime.
229 Example:
231 foo ()
233 <unnamed type> __bound_tmp.3;
234 static int buf[100];
236 <bb 3>:
237 __bound_tmp.3_2 = __builtin___chkp_bndmk (&buf, 400);
239 <bb 2>:
240 return &buf, __bound_tmp.3_2;
243 Example:
245 Address of an object 'extern int buf[]' with incomplete type is
246 returned.
248 foo ()
250 <unnamed type> __bound_tmp.4;
251 long unsigned int __size_tmp.3;
253 <bb 3>:
254 __size_tmp.3_4 = __builtin_ia32_sizeof (buf);
255 __bound_tmp.4_3 = __builtin_ia32_bndmk (&buf, __size_tmp.3_4);
257 <bb 2>:
258 return &buf, __bound_tmp.4_3;
261 c) Pointer is the result of object narrowing.
263 It happens when we use pointer to an object to compute pointer to a part
264 of an object. E.g. we take pointer to a field of a structure. In this
265 case we perform bounds intersection using bounds of original object and
266 bounds of object's part (which are computed basing on its type).
268 There may be some debatable questions about when narrowing should occur
269 and when it should not. To avoid false bound violations in correct
270 programs we do not perform narrowing when address of an array element is
271 obtained (it has address of the whole array) and when address of the first
272 structure field is obtained (because it is guaranteed to be equal to
273 address of the whole structure and it is legal to cast it back to structure).
275 Default narrowing behavior may be changed using compiler flags.
277 Example:
279 In this example address of the second structure field is returned.
281 foo (struct A * p, __bounds_type __bounds_of_p)
283 <unnamed type> __bound_tmp.3;
284 int * _2;
285 int * _5;
287 <bb 2>:
288 _5 = &p_1(D)->second_field;
289 __bound_tmp.3_6 = __builtin___chkp_bndmk (_5, 4);
290 __bound_tmp.3_8 = __builtin___chkp_intersect (__bound_tmp.3_6,
291 __bounds_of_p_3(D));
292 _2 = &p_1(D)->second_field;
293 return _2, __bound_tmp.3_8;
296 Example:
298 In this example address of the first field of array element is returned.
300 foo (struct A * p, __bounds_type __bounds_of_p, int i)
302 long unsigned int _3;
303 long unsigned int _4;
304 struct A * _6;
305 int * _7;
307 <bb 2>:
308 _3 = (long unsigned int) i_1(D);
309 _4 = _3 * 8;
310 _6 = p_5(D) + _4;
311 _7 = &_6->first_field;
312 return _7, __bounds_of_p_2(D);
316 d) Pointer is the result of pointer arithmetic or type cast.
318 In this case bounds of the base pointer are used. In case of binary
319 operation producing a pointer we are analyzing data flow further
320 looking for operand's bounds. One operand is considered as a base
321 if it has some valid bounds. If we fall into a case when none of
322 operands (or both of them) has valid bounds, a default bounds value
323 is used.
325 Trying to find out bounds for binary operations we may fall into
326 cyclic dependencies for pointers. To avoid infinite recursion all
327 walked phi nodes instantly obtain corresponding bounds but created
328 bounds are marked as incomplete. It helps us to stop DF walk during
329 bounds search.
331 When we reach pointer source, some args of incomplete bounds phi obtain
332 valid bounds and those values are propagated further through phi nodes.
333 If no valid bounds were found for phi node then we mark its result as
334 invalid bounds. Process stops when all incomplete bounds become either
335 valid or invalid and we are able to choose a pointer base.
337 e) Pointer is loaded from the memory.
339 In this case we just need to load bounds from the bounds table.
341 Example:
343 foo ()
345 <unnamed type> __bound_tmp.3;
346 static int * buf;
347 int * _2;
349 <bb 2>:
350 _2 = buf;
351 __bound_tmp.3_4 = __builtin___chkp_bndldx (&buf, _2);
352 return _2, __bound_tmp.3_4;
357 typedef void (*assign_handler)(tree, tree, void *);
359 static tree chkp_get_zero_bounds ();
360 static tree chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter);
361 static tree chkp_find_bounds_loaded (tree ptr, tree ptr_src,
362 gimple_stmt_iterator *iter);
363 static void chkp_parse_array_and_component_ref (tree node, tree *ptr,
364 tree *elt, bool *safe,
365 bool *bitfield,
366 tree *bounds,
367 gimple_stmt_iterator *iter,
368 bool innermost_bounds);
370 #define chkp_bndldx_fndecl \
371 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDLDX))
372 #define chkp_bndstx_fndecl \
373 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDSTX))
374 #define chkp_checkl_fndecl \
375 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCL))
376 #define chkp_checku_fndecl \
377 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCU))
378 #define chkp_bndmk_fndecl \
379 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDMK))
380 #define chkp_ret_bnd_fndecl \
381 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDRET))
382 #define chkp_intersect_fndecl \
383 (targetm.builtin_chkp_function (BUILT_IN_CHKP_INTERSECT))
384 #define chkp_narrow_bounds_fndecl \
385 (targetm.builtin_chkp_function (BUILT_IN_CHKP_NARROW))
386 #define chkp_sizeof_fndecl \
387 (targetm.builtin_chkp_function (BUILT_IN_CHKP_SIZEOF))
388 #define chkp_extract_lower_fndecl \
389 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_LOWER))
390 #define chkp_extract_upper_fndecl \
391 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_UPPER))
393 static GTY (()) tree chkp_uintptr_type;
395 static GTY (()) tree chkp_zero_bounds_var;
396 static GTY (()) tree chkp_none_bounds_var;
398 static GTY (()) basic_block entry_block;
399 static GTY (()) tree zero_bounds;
400 static GTY (()) tree none_bounds;
401 static GTY (()) tree incomplete_bounds;
402 static GTY (()) tree tmp_var;
403 static GTY (()) tree size_tmp_var;
404 static GTY (()) bitmap chkp_abnormal_copies;
406 struct hash_set<tree> *chkp_invalid_bounds;
407 struct hash_set<tree> *chkp_completed_bounds_set;
408 struct hash_map<tree, tree> *chkp_reg_bounds;
409 struct hash_map<tree, tree> *chkp_bound_vars;
410 struct hash_map<tree, tree> *chkp_reg_addr_bounds;
411 struct hash_map<tree, tree> *chkp_incomplete_bounds_map;
412 struct hash_map<tree, tree> *chkp_bounds_map;
413 struct hash_map<tree, tree> *chkp_static_var_bounds;
415 static bool in_chkp_pass;
417 #define CHKP_BOUND_TMP_NAME "__bound_tmp"
418 #define CHKP_SIZE_TMP_NAME "__size_tmp"
419 #define CHKP_BOUNDS_OF_SYMBOL_PREFIX "__chkp_bounds_of_"
420 #define CHKP_STRING_BOUNDS_PREFIX "__chkp_string_bounds_"
421 #define CHKP_VAR_BOUNDS_PREFIX "__chkp_var_bounds_"
422 #define CHKP_ZERO_BOUNDS_VAR_NAME "__chkp_zero_bounds"
423 #define CHKP_NONE_BOUNDS_VAR_NAME "__chkp_none_bounds"
425 /* Static checker constructors may become very large and their
426 compilation with optimization may take too much time.
427 Therefore we put a limit to number of statements in one
428 constructor. Tests with 100 000 statically initialized
429 pointers showed following compilation times on Sandy Bridge
430 server (used -O2):
431 limit 100 => ~18 sec.
432 limit 300 => ~22 sec.
433 limit 1000 => ~30 sec.
434 limit 3000 => ~49 sec.
435 limit 5000 => ~55 sec.
436 limit 10000 => ~76 sec.
437 limit 100000 => ~532 sec. */
438 #define MAX_STMTS_IN_STATIC_CHKP_CTOR (PARAM_VALUE (PARAM_CHKP_MAX_CTOR_SIZE))
440 struct chkp_ctor_stmt_list
442 tree stmts;
443 int avail;
446 /* Return 1 if function FNDECL is instrumented by Pointer
447 Bounds Checker. */
448 bool
449 chkp_function_instrumented_p (tree fndecl)
451 return fndecl
452 && lookup_attribute ("chkp instrumented", DECL_ATTRIBUTES (fndecl));
455 /* Mark function FNDECL as instrumented. */
456 void
457 chkp_function_mark_instrumented (tree fndecl)
459 if (chkp_function_instrumented_p (fndecl))
460 return;
462 DECL_ATTRIBUTES (fndecl)
463 = tree_cons (get_identifier ("chkp instrumented"), NULL,
464 DECL_ATTRIBUTES (fndecl));
467 /* Return true when STMT is builtin call to instrumentation function
468 corresponding to CODE. */
470 bool
471 chkp_gimple_call_builtin_p (gimple call,
472 enum built_in_function code)
474 tree fndecl;
475 if (is_gimple_call (call)
476 && (fndecl = targetm.builtin_chkp_function (code))
477 && gimple_call_fndecl (call) == fndecl)
478 return true;
479 return false;
482 /* Emit code to store zero bounds for PTR located at MEM. */
483 void
484 chkp_expand_bounds_reset_for_mem (tree mem, tree ptr)
486 tree zero_bnd, bnd, addr, bndstx;
488 if (flag_chkp_use_static_const_bounds)
489 zero_bnd = chkp_get_zero_bounds_var ();
490 else
491 zero_bnd = chkp_build_make_bounds_call (integer_zero_node,
492 integer_zero_node);
493 bnd = make_tree (pointer_bounds_type_node,
494 assign_temp (pointer_bounds_type_node, 0, 1));
495 addr = build1 (ADDR_EXPR,
496 build_pointer_type (TREE_TYPE (mem)), mem);
497 bndstx = chkp_build_bndstx_call (addr, ptr, bnd);
499 expand_assignment (bnd, zero_bnd, false);
500 expand_normal (bndstx);
503 /* Build retbnd call for returned value RETVAL.
505 If BNDVAL is not NULL then result is stored
506 in it. Otherwise a temporary is created to
507 hold returned value.
509 GSI points to a position for a retbnd call
510 and is set to created stmt.
512 Cgraph edge is created for a new call if
513 UPDATE_EDGE is 1.
515 Obtained bounds are returned. */
516 tree
517 chkp_insert_retbnd_call (tree bndval, tree retval,
518 gimple_stmt_iterator *gsi)
520 gimple call;
522 if (!bndval)
523 bndval = create_tmp_reg (pointer_bounds_type_node, "retbnd");
525 call = gimple_build_call (chkp_ret_bnd_fndecl, 1, retval);
526 gimple_call_set_lhs (call, bndval);
527 gsi_insert_after (gsi, call, GSI_CONTINUE_LINKING);
529 return bndval;
532 /* Mark statement S to not be instrumented. */
533 static void
534 chkp_mark_stmt (gimple s)
536 gimple_set_plf (s, GF_PLF_1, true);
539 /* Mark statement S to be instrumented. */
540 static void
541 chkp_unmark_stmt (gimple s)
543 gimple_set_plf (s, GF_PLF_1, false);
546 /* Return 1 if statement S should not be instrumented. */
547 static bool
548 chkp_marked_stmt_p (gimple s)
550 return gimple_plf (s, GF_PLF_1);
553 /* Get var to be used for bound temps. */
554 static tree
555 chkp_get_tmp_var (void)
557 if (!tmp_var)
558 tmp_var = create_tmp_reg (pointer_bounds_type_node, CHKP_BOUND_TMP_NAME);
560 return tmp_var;
563 /* Get SSA_NAME to be used as temp. */
564 static tree
565 chkp_get_tmp_reg (gimple stmt)
567 if (in_chkp_pass)
568 return make_ssa_name (chkp_get_tmp_var (), stmt);
570 return make_temp_ssa_name (pointer_bounds_type_node, stmt,
571 CHKP_BOUND_TMP_NAME);
574 /* Get var to be used for size temps. */
575 static tree
576 chkp_get_size_tmp_var (void)
578 if (!size_tmp_var)
579 size_tmp_var = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
581 return size_tmp_var;
584 /* Register bounds BND for address of OBJ. */
585 static void
586 chkp_register_addr_bounds (tree obj, tree bnd)
588 if (bnd == incomplete_bounds)
589 return;
591 chkp_reg_addr_bounds->put (obj, bnd);
593 if (dump_file && (dump_flags & TDF_DETAILS))
595 fprintf (dump_file, "Regsitered bound ");
596 print_generic_expr (dump_file, bnd, 0);
597 fprintf (dump_file, " for address of ");
598 print_generic_expr (dump_file, obj, 0);
599 fprintf (dump_file, "\n");
603 /* Return bounds registered for address of OBJ. */
604 static tree
605 chkp_get_registered_addr_bounds (tree obj)
607 tree *slot = chkp_reg_addr_bounds->get (obj);
608 return slot ? *slot : NULL_TREE;
611 /* Mark BOUNDS as completed. */
612 static void
613 chkp_mark_completed_bounds (tree bounds)
615 chkp_completed_bounds_set->add (bounds);
617 if (dump_file && (dump_flags & TDF_DETAILS))
619 fprintf (dump_file, "Marked bounds ");
620 print_generic_expr (dump_file, bounds, 0);
621 fprintf (dump_file, " as completed\n");
625 /* Return 1 if BOUNDS were marked as completed and 0 otherwise. */
626 static bool
627 chkp_completed_bounds (tree bounds)
629 return chkp_completed_bounds_set->contains (bounds);
632 /* Clear comleted bound marks. */
633 static void
634 chkp_erase_completed_bounds (void)
636 delete chkp_completed_bounds_set;
637 chkp_completed_bounds_set = new hash_set<tree>;
640 /* Mark BOUNDS associated with PTR as incomplete. */
641 static void
642 chkp_register_incomplete_bounds (tree bounds, tree ptr)
644 chkp_incomplete_bounds_map->put (bounds, ptr);
646 if (dump_file && (dump_flags & TDF_DETAILS))
648 fprintf (dump_file, "Regsitered incomplete bounds ");
649 print_generic_expr (dump_file, bounds, 0);
650 fprintf (dump_file, " for ");
651 print_generic_expr (dump_file, ptr, 0);
652 fprintf (dump_file, "\n");
656 /* Return 1 if BOUNDS are incomplete and 0 otherwise. */
657 static bool
658 chkp_incomplete_bounds (tree bounds)
660 if (bounds == incomplete_bounds)
661 return true;
663 if (chkp_completed_bounds (bounds))
664 return false;
666 return chkp_incomplete_bounds_map->get (bounds) != NULL;
669 /* Clear incomleted bound marks. */
670 static void
671 chkp_erase_incomplete_bounds (void)
673 delete chkp_incomplete_bounds_map;
674 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
677 /* Build and return bndmk call which creates bounds for structure
678 pointed by PTR. Structure should have complete type. */
679 tree
680 chkp_make_bounds_for_struct_addr (tree ptr)
682 tree type = TREE_TYPE (ptr);
683 tree size;
685 gcc_assert (POINTER_TYPE_P (type));
687 size = TYPE_SIZE (TREE_TYPE (type));
689 gcc_assert (size);
691 return build_call_nary (pointer_bounds_type_node,
692 build_fold_addr_expr (chkp_bndmk_fndecl),
693 2, ptr, size);
696 /* Traversal function for chkp_may_finish_incomplete_bounds.
697 Set RES to 0 if at least one argument of phi statement
698 defining bounds (passed in KEY arg) is unknown.
699 Traversal stops when first unknown phi argument is found. */
700 bool
701 chkp_may_complete_phi_bounds (tree const &bounds, tree *slot ATTRIBUTE_UNUSED,
702 bool *res)
704 gimple phi;
705 unsigned i;
707 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
709 phi = SSA_NAME_DEF_STMT (bounds);
711 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
713 for (i = 0; i < gimple_phi_num_args (phi); i++)
715 tree phi_arg = gimple_phi_arg_def (phi, i);
716 if (!phi_arg)
718 *res = false;
719 /* Do not need to traverse further. */
720 return false;
724 return true;
727 /* Return 1 if all phi nodes created for bounds have their
728 arguments computed. */
729 static bool
730 chkp_may_finish_incomplete_bounds (void)
732 bool res = true;
734 chkp_incomplete_bounds_map
735 ->traverse<bool *, chkp_may_complete_phi_bounds> (&res);
737 return res;
740 /* Helper function for chkp_finish_incomplete_bounds.
741 Recompute args for bounds phi node. */
742 bool
743 chkp_recompute_phi_bounds (tree const &bounds, tree *slot,
744 void *res ATTRIBUTE_UNUSED)
746 tree ptr = *slot;
747 gphi *bounds_phi;
748 gphi *ptr_phi;
749 unsigned i;
751 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
752 gcc_assert (TREE_CODE (ptr) == SSA_NAME);
754 bounds_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (bounds));
755 ptr_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (ptr));
757 for (i = 0; i < gimple_phi_num_args (bounds_phi); i++)
759 tree ptr_arg = gimple_phi_arg_def (ptr_phi, i);
760 tree bound_arg = chkp_find_bounds (ptr_arg, NULL);
762 add_phi_arg (bounds_phi, bound_arg,
763 gimple_phi_arg_edge (ptr_phi, i),
764 UNKNOWN_LOCATION);
767 return true;
770 /* Mark BOUNDS as invalid. */
771 static void
772 chkp_mark_invalid_bounds (tree bounds)
774 chkp_invalid_bounds->add (bounds);
776 if (dump_file && (dump_flags & TDF_DETAILS))
778 fprintf (dump_file, "Marked bounds ");
779 print_generic_expr (dump_file, bounds, 0);
780 fprintf (dump_file, " as invalid\n");
784 /* Return 1 if BOUNDS were marked as invalid and 0 otherwise. */
785 static bool
786 chkp_valid_bounds (tree bounds)
788 if (bounds == zero_bounds || bounds == none_bounds)
789 return false;
791 return !chkp_invalid_bounds->contains (bounds);
794 /* Helper function for chkp_finish_incomplete_bounds.
795 Check all arguments of phi nodes trying to find
796 valid completed bounds. If there is at least one
797 such arg then bounds produced by phi node are marked
798 as valid completed bounds and all phi args are
799 recomputed. */
800 bool
801 chkp_find_valid_phi_bounds (tree const &bounds, tree *slot, bool *res)
803 gimple phi;
804 unsigned i;
806 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
808 if (chkp_completed_bounds (bounds))
809 return true;
811 phi = SSA_NAME_DEF_STMT (bounds);
813 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
815 for (i = 0; i < gimple_phi_num_args (phi); i++)
817 tree phi_arg = gimple_phi_arg_def (phi, i);
819 gcc_assert (phi_arg);
821 if (chkp_valid_bounds (phi_arg) && !chkp_incomplete_bounds (phi_arg))
823 *res = true;
824 chkp_mark_completed_bounds (bounds);
825 chkp_recompute_phi_bounds (bounds, slot, NULL);
826 return true;
830 return true;
833 /* Helper function for chkp_finish_incomplete_bounds.
834 Marks all incompleted bounds as invalid. */
835 bool
836 chkp_mark_invalid_bounds_walker (tree const &bounds,
837 tree *slot ATTRIBUTE_UNUSED,
838 void *res ATTRIBUTE_UNUSED)
840 if (!chkp_completed_bounds (bounds))
842 chkp_mark_invalid_bounds (bounds);
843 chkp_mark_completed_bounds (bounds);
845 return true;
848 /* When all bound phi nodes have all their args computed
849 we have enough info to find valid bounds. We iterate
850 through all incompleted bounds searching for valid
851 bounds. Found valid bounds are marked as completed
852 and all remaining incompleted bounds are recomputed.
853 Process continues until no new valid bounds may be
854 found. All remained incompleted bounds are marked as
855 invalid (i.e. have no valid source of bounds). */
856 static void
857 chkp_finish_incomplete_bounds (void)
859 bool found_valid;
861 while (found_valid)
863 found_valid = false;
865 chkp_incomplete_bounds_map->
866 traverse<bool *, chkp_find_valid_phi_bounds> (&found_valid);
868 if (found_valid)
869 chkp_incomplete_bounds_map->
870 traverse<void *, chkp_recompute_phi_bounds> (NULL);
873 chkp_incomplete_bounds_map->
874 traverse<void *, chkp_mark_invalid_bounds_walker> (NULL);
875 chkp_incomplete_bounds_map->
876 traverse<void *, chkp_recompute_phi_bounds> (NULL);
878 chkp_erase_completed_bounds ();
879 chkp_erase_incomplete_bounds ();
882 /* Return 1 if type TYPE is a pointer type or a
883 structure having a pointer type as one of its fields.
884 Otherwise return 0. */
885 bool
886 chkp_type_has_pointer (const_tree type)
888 bool res = false;
890 if (BOUNDED_TYPE_P (type))
891 res = true;
892 else if (RECORD_OR_UNION_TYPE_P (type))
894 tree field;
896 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
897 if (TREE_CODE (field) == FIELD_DECL)
898 res = res || chkp_type_has_pointer (TREE_TYPE (field));
900 else if (TREE_CODE (type) == ARRAY_TYPE)
901 res = chkp_type_has_pointer (TREE_TYPE (type));
903 return res;
906 unsigned
907 chkp_type_bounds_count (const_tree type)
909 unsigned res = 0;
911 if (!type)
912 res = 0;
913 else if (BOUNDED_TYPE_P (type))
914 res = 1;
915 else if (RECORD_OR_UNION_TYPE_P (type))
917 bitmap have_bound;
919 bitmap_obstack_initialize (NULL);
920 have_bound = BITMAP_ALLOC (NULL);
921 chkp_find_bound_slots (type, have_bound);
922 res = bitmap_count_bits (have_bound);
923 BITMAP_FREE (have_bound);
924 bitmap_obstack_release (NULL);
927 return res;
930 /* Get bounds associated with NODE via
931 chkp_set_bounds call. */
932 tree
933 chkp_get_bounds (tree node)
935 tree *slot;
937 if (!chkp_bounds_map)
938 return NULL_TREE;
940 slot = chkp_bounds_map->get (node);
941 return slot ? *slot : NULL_TREE;
944 /* Associate bounds VAL with NODE. */
945 void
946 chkp_set_bounds (tree node, tree val)
948 if (!chkp_bounds_map)
949 chkp_bounds_map = new hash_map<tree, tree>;
951 chkp_bounds_map->put (node, val);
954 /* Check if statically initialized variable VAR require
955 static bounds initialization. If VAR is added into
956 bounds initlization list then 1 is returned. Otherwise
957 return 0. */
958 extern bool
959 chkp_register_var_initializer (tree var)
961 if (!flag_check_pointer_bounds
962 || DECL_INITIAL (var) == error_mark_node)
963 return false;
965 gcc_assert (TREE_CODE (var) == VAR_DECL);
966 gcc_assert (DECL_INITIAL (var));
968 if (TREE_STATIC (var)
969 && chkp_type_has_pointer (TREE_TYPE (var)))
971 varpool_node::get_create (var)->need_bounds_init = 1;
972 return true;
975 return false;
978 /* Helper function for chkp_finish_file.
980 Add new modification statement (RHS is assigned to LHS)
981 into list of static initializer statementes (passed in ARG).
982 If statements list becomes too big, emit checker constructor
983 and start the new one. */
984 static void
985 chkp_add_modification_to_stmt_list (tree lhs,
986 tree rhs,
987 void *arg)
989 struct chkp_ctor_stmt_list *stmts = (struct chkp_ctor_stmt_list *)arg;
990 tree modify;
992 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
993 rhs = build1 (CONVERT_EXPR, TREE_TYPE (lhs), rhs);
995 modify = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
996 append_to_statement_list (modify, &stmts->stmts);
998 stmts->avail--;
1001 /* Build and return ADDR_EXPR for specified object OBJ. */
1002 static tree
1003 chkp_build_addr_expr (tree obj)
1005 return TREE_CODE (obj) == TARGET_MEM_REF
1006 ? tree_mem_ref_addr (ptr_type_node, obj)
1007 : build_fold_addr_expr (obj);
1010 /* Helper function for chkp_finish_file.
1011 Initialize bound variable BND_VAR with bounds of variable
1012 VAR to statements list STMTS. If statements list becomes
1013 too big, emit checker constructor and start the new one. */
1014 static void
1015 chkp_output_static_bounds (tree bnd_var, tree var,
1016 struct chkp_ctor_stmt_list *stmts)
1018 tree lb, ub, size;
1020 if (TREE_CODE (var) == STRING_CST)
1022 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1023 size = build_int_cst (size_type_node, TREE_STRING_LENGTH (var) - 1);
1025 else if (DECL_SIZE (var)
1026 && !chkp_variable_size_type (TREE_TYPE (var)))
1028 /* Compute bounds using statically known size. */
1029 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1030 size = size_binop (MINUS_EXPR, DECL_SIZE_UNIT (var), size_one_node);
1032 else
1034 /* Compute bounds using dynamic size. */
1035 tree call;
1037 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1038 call = build1 (ADDR_EXPR,
1039 build_pointer_type (TREE_TYPE (chkp_sizeof_fndecl)),
1040 chkp_sizeof_fndecl);
1041 size = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_sizeof_fndecl)),
1042 call, 1, var);
1044 if (flag_chkp_zero_dynamic_size_as_infinite)
1046 tree max_size, cond;
1048 max_size = build2 (MINUS_EXPR, size_type_node, size_zero_node, lb);
1049 cond = build2 (NE_EXPR, boolean_type_node, size, size_zero_node);
1050 size = build3 (COND_EXPR, size_type_node, cond, size, max_size);
1053 size = size_binop (MINUS_EXPR, size, size_one_node);
1056 ub = size_binop (PLUS_EXPR, lb, size);
1057 stmts->avail -= targetm.chkp_initialize_bounds (bnd_var, lb, ub,
1058 &stmts->stmts);
1059 if (stmts->avail <= 0)
1061 cgraph_build_static_cdtor ('B', stmts->stmts,
1062 MAX_RESERVED_INIT_PRIORITY + 2);
1063 stmts->avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
1064 stmts->stmts = NULL;
1068 /* Return entry block to be used for checker initilization code.
1069 Create new block if required. */
1070 static basic_block
1071 chkp_get_entry_block (void)
1073 if (!entry_block)
1074 entry_block
1075 = split_block_after_labels (ENTRY_BLOCK_PTR_FOR_FN (cfun))->dest;
1077 return entry_block;
1080 /* Return a bounds var to be used for pointer var PTR_VAR. */
1081 static tree
1082 chkp_get_bounds_var (tree ptr_var)
1084 tree bnd_var;
1085 tree *slot;
1087 slot = chkp_bound_vars->get (ptr_var);
1088 if (slot)
1089 bnd_var = *slot;
1090 else
1092 bnd_var = create_tmp_reg (pointer_bounds_type_node,
1093 CHKP_BOUND_TMP_NAME);
1094 chkp_bound_vars->put (ptr_var, bnd_var);
1097 return bnd_var;
1100 /* If BND is an abnormal bounds copy, return a copied value.
1101 Otherwise return BND. */
1102 static tree
1103 chkp_get_orginal_bounds_for_abnormal_copy (tree bnd)
1105 if (bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
1107 gimple bnd_def = SSA_NAME_DEF_STMT (bnd);
1108 gcc_checking_assert (gimple_code (bnd_def) == GIMPLE_ASSIGN);
1109 bnd = gimple_assign_rhs1 (bnd_def);
1112 return bnd;
1115 /* Register bounds BND for object PTR in global bounds table.
1116 A copy of bounds may be created for abnormal ssa names.
1117 Returns bounds to use for PTR. */
1118 static tree
1119 chkp_maybe_copy_and_register_bounds (tree ptr, tree bnd)
1121 bool abnormal_ptr;
1123 if (!chkp_reg_bounds)
1124 return bnd;
1126 /* Do nothing if bounds are incomplete_bounds
1127 because it means bounds will be recomputed. */
1128 if (bnd == incomplete_bounds)
1129 return bnd;
1131 abnormal_ptr = (TREE_CODE (ptr) == SSA_NAME
1132 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1133 && gimple_code (SSA_NAME_DEF_STMT (ptr)) != GIMPLE_PHI);
1135 /* A single bounds value may be reused multiple times for
1136 different pointer values. It may cause coalescing issues
1137 for abnormal SSA names. To avoid it we create a bounds
1138 copy in case it is computed for abnormal SSA name.
1140 We also cannot reuse such created copies for other pointers */
1141 if (abnormal_ptr
1142 || bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
1144 tree bnd_var = NULL_TREE;
1146 if (abnormal_ptr)
1148 if (SSA_NAME_VAR (ptr))
1149 bnd_var = chkp_get_bounds_var (SSA_NAME_VAR (ptr));
1151 else
1152 bnd_var = chkp_get_tmp_var ();
1154 /* For abnormal copies we may just find original
1155 bounds and use them. */
1156 if (!abnormal_ptr && !SSA_NAME_IS_DEFAULT_DEF (bnd))
1157 bnd = chkp_get_orginal_bounds_for_abnormal_copy (bnd);
1158 /* For undefined values we usually use none bounds
1159 value but in case of abnormal edge it may cause
1160 coalescing failures. Use default definition of
1161 bounds variable instead to avoid it. */
1162 else if (SSA_NAME_IS_DEFAULT_DEF (ptr)
1163 && TREE_CODE (SSA_NAME_VAR (ptr)) != PARM_DECL)
1165 bnd = get_or_create_ssa_default_def (cfun, bnd_var);
1167 if (dump_file && (dump_flags & TDF_DETAILS))
1169 fprintf (dump_file, "Using default def bounds ");
1170 print_generic_expr (dump_file, bnd, 0);
1171 fprintf (dump_file, " for abnormal default def SSA name ");
1172 print_generic_expr (dump_file, ptr, 0);
1173 fprintf (dump_file, "\n");
1176 else
1178 tree copy;
1179 gimple def = SSA_NAME_DEF_STMT (ptr);
1180 gimple assign;
1181 gimple_stmt_iterator gsi;
1183 if (bnd_var)
1184 copy = make_ssa_name (bnd_var);
1185 else
1186 copy = make_temp_ssa_name (pointer_bounds_type_node,
1187 NULL,
1188 CHKP_BOUND_TMP_NAME);
1189 bnd = chkp_get_orginal_bounds_for_abnormal_copy (bnd);
1190 assign = gimple_build_assign (copy, bnd);
1192 if (dump_file && (dump_flags & TDF_DETAILS))
1194 fprintf (dump_file, "Creating a copy of bounds ");
1195 print_generic_expr (dump_file, bnd, 0);
1196 fprintf (dump_file, " for abnormal SSA name ");
1197 print_generic_expr (dump_file, ptr, 0);
1198 fprintf (dump_file, "\n");
1201 if (gimple_code (def) == GIMPLE_NOP)
1203 gsi = gsi_last_bb (chkp_get_entry_block ());
1204 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
1205 gsi_insert_before (&gsi, assign, GSI_CONTINUE_LINKING);
1206 else
1207 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1209 else
1211 gimple bnd_def = SSA_NAME_DEF_STMT (bnd);
1212 /* Sometimes (e.g. when we load a pointer from a
1213 memory) bounds are produced later than a pointer.
1214 We need to insert bounds copy appropriately. */
1215 if (gimple_code (bnd_def) != GIMPLE_NOP
1216 && stmt_dominates_stmt_p (def, bnd_def))
1217 gsi = gsi_for_stmt (bnd_def);
1218 else
1219 gsi = gsi_for_stmt (def);
1220 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1223 bnd = copy;
1226 if (abnormal_ptr)
1227 bitmap_set_bit (chkp_abnormal_copies, SSA_NAME_VERSION (bnd));
1230 chkp_reg_bounds->put (ptr, bnd);
1232 if (dump_file && (dump_flags & TDF_DETAILS))
1234 fprintf (dump_file, "Regsitered bound ");
1235 print_generic_expr (dump_file, bnd, 0);
1236 fprintf (dump_file, " for pointer ");
1237 print_generic_expr (dump_file, ptr, 0);
1238 fprintf (dump_file, "\n");
1241 return bnd;
1244 /* Get bounds registered for object PTR in global bounds table. */
1245 static tree
1246 chkp_get_registered_bounds (tree ptr)
1248 tree *slot;
1250 if (!chkp_reg_bounds)
1251 return NULL_TREE;
1253 slot = chkp_reg_bounds->get (ptr);
1254 return slot ? *slot : NULL_TREE;
1257 /* Add bound retvals to return statement pointed by GSI. */
1259 static void
1260 chkp_add_bounds_to_ret_stmt (gimple_stmt_iterator *gsi)
1262 greturn *ret = as_a <greturn *> (gsi_stmt (*gsi));
1263 tree retval = gimple_return_retval (ret);
1264 tree ret_decl = DECL_RESULT (cfun->decl);
1265 tree bounds;
1267 if (!retval)
1268 return;
1270 if (BOUNDED_P (ret_decl))
1272 bounds = chkp_find_bounds (retval, gsi);
1273 bounds = chkp_maybe_copy_and_register_bounds (ret_decl, bounds);
1274 gimple_return_set_retbnd (ret, bounds);
1277 update_stmt (ret);
1280 /* Force OP to be suitable for using as an argument for call.
1281 New statements (if any) go to SEQ. */
1282 static tree
1283 chkp_force_gimple_call_op (tree op, gimple_seq *seq)
1285 gimple_seq stmts;
1286 gimple_stmt_iterator si;
1288 op = force_gimple_operand (unshare_expr (op), &stmts, true, NULL_TREE);
1290 for (si = gsi_start (stmts); !gsi_end_p (si); gsi_next (&si))
1291 chkp_mark_stmt (gsi_stmt (si));
1293 gimple_seq_add_seq (seq, stmts);
1295 return op;
1298 /* Generate lower bound check for memory access by ADDR.
1299 Check is inserted before the position pointed by ITER.
1300 DIRFLAG indicates whether memory access is load or store. */
1301 static void
1302 chkp_check_lower (tree addr, tree bounds,
1303 gimple_stmt_iterator iter,
1304 location_t location,
1305 tree dirflag)
1307 gimple_seq seq;
1308 gimple check;
1309 tree node;
1311 if (!chkp_function_instrumented_p (current_function_decl)
1312 && bounds == chkp_get_zero_bounds ())
1313 return;
1315 if (dirflag == integer_zero_node
1316 && !flag_chkp_check_read)
1317 return;
1319 if (dirflag == integer_one_node
1320 && !flag_chkp_check_write)
1321 return;
1323 seq = NULL;
1325 node = chkp_force_gimple_call_op (addr, &seq);
1327 check = gimple_build_call (chkp_checkl_fndecl, 2, node, bounds);
1328 chkp_mark_stmt (check);
1329 gimple_call_set_with_bounds (check, true);
1330 gimple_set_location (check, location);
1331 gimple_seq_add_stmt (&seq, check);
1333 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1335 if (dump_file && (dump_flags & TDF_DETAILS))
1337 gimple before = gsi_stmt (iter);
1338 fprintf (dump_file, "Generated lower bound check for statement ");
1339 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1340 fprintf (dump_file, " ");
1341 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1345 /* Generate upper bound check for memory access by ADDR.
1346 Check is inserted before the position pointed by ITER.
1347 DIRFLAG indicates whether memory access is load or store. */
1348 static void
1349 chkp_check_upper (tree addr, tree bounds,
1350 gimple_stmt_iterator iter,
1351 location_t location,
1352 tree dirflag)
1354 gimple_seq seq;
1355 gimple check;
1356 tree node;
1358 if (!chkp_function_instrumented_p (current_function_decl)
1359 && bounds == chkp_get_zero_bounds ())
1360 return;
1362 if (dirflag == integer_zero_node
1363 && !flag_chkp_check_read)
1364 return;
1366 if (dirflag == integer_one_node
1367 && !flag_chkp_check_write)
1368 return;
1370 seq = NULL;
1372 node = chkp_force_gimple_call_op (addr, &seq);
1374 check = gimple_build_call (chkp_checku_fndecl, 2, node, bounds);
1375 chkp_mark_stmt (check);
1376 gimple_call_set_with_bounds (check, true);
1377 gimple_set_location (check, location);
1378 gimple_seq_add_stmt (&seq, check);
1380 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1382 if (dump_file && (dump_flags & TDF_DETAILS))
1384 gimple before = gsi_stmt (iter);
1385 fprintf (dump_file, "Generated upper bound check for statement ");
1386 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1387 fprintf (dump_file, " ");
1388 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1392 /* Generate lower and upper bound checks for memory access
1393 to memory slot [FIRST, LAST] againsr BOUNDS. Checks
1394 are inserted before the position pointed by ITER.
1395 DIRFLAG indicates whether memory access is load or store. */
1396 void
1397 chkp_check_mem_access (tree first, tree last, tree bounds,
1398 gimple_stmt_iterator iter,
1399 location_t location,
1400 tree dirflag)
1402 chkp_check_lower (first, bounds, iter, location, dirflag);
1403 chkp_check_upper (last, bounds, iter, location, dirflag);
1406 /* Replace call to _bnd_chk_* pointed by GSI with
1407 bndcu and bndcl calls. DIRFLAG determines whether
1408 check is for read or write. */
1410 void
1411 chkp_replace_address_check_builtin (gimple_stmt_iterator *gsi,
1412 tree dirflag)
1414 gimple_stmt_iterator call_iter = *gsi;
1415 gimple call = gsi_stmt (*gsi);
1416 tree fndecl = gimple_call_fndecl (call);
1417 tree addr = gimple_call_arg (call, 0);
1418 tree bounds = chkp_find_bounds (addr, gsi);
1420 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1421 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1422 chkp_check_lower (addr, bounds, *gsi, gimple_location (call), dirflag);
1424 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS)
1425 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1427 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1429 tree size = gimple_call_arg (call, 1);
1430 addr = fold_build_pointer_plus (addr, size);
1431 addr = fold_build_pointer_plus_hwi (addr, -1);
1432 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1435 gsi_remove (&call_iter, true);
1438 /* Replace call to _bnd_get_ptr_* pointed by GSI with
1439 corresponding bounds extract call. */
1441 void
1442 chkp_replace_extract_builtin (gimple_stmt_iterator *gsi)
1444 gimple call = gsi_stmt (*gsi);
1445 tree fndecl = gimple_call_fndecl (call);
1446 tree addr = gimple_call_arg (call, 0);
1447 tree bounds = chkp_find_bounds (addr, gsi);
1448 gimple extract;
1450 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND)
1451 fndecl = chkp_extract_lower_fndecl;
1452 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND)
1453 fndecl = chkp_extract_upper_fndecl;
1454 else
1455 gcc_unreachable ();
1457 extract = gimple_build_call (fndecl, 1, bounds);
1458 gimple_call_set_lhs (extract, gimple_call_lhs (call));
1459 chkp_mark_stmt (extract);
1461 gsi_replace (gsi, extract, false);
1464 /* Return COMPONENT_REF accessing FIELD in OBJ. */
1465 static tree
1466 chkp_build_component_ref (tree obj, tree field)
1468 tree res;
1470 /* If object is TMR then we do not use component_ref but
1471 add offset instead. We need it to be able to get addr
1472 of the reasult later. */
1473 if (TREE_CODE (obj) == TARGET_MEM_REF)
1475 tree offs = TMR_OFFSET (obj);
1476 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1477 offs, DECL_FIELD_OFFSET (field));
1479 gcc_assert (offs);
1481 res = copy_node (obj);
1482 TREE_TYPE (res) = TREE_TYPE (field);
1483 TMR_OFFSET (res) = offs;
1485 else
1486 res = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL_TREE);
1488 return res;
1491 /* Return ARRAY_REF for array ARR and index IDX with
1492 specified element type ETYPE and element size ESIZE. */
1493 static tree
1494 chkp_build_array_ref (tree arr, tree etype, tree esize,
1495 unsigned HOST_WIDE_INT idx)
1497 tree index = build_int_cst (size_type_node, idx);
1498 tree res;
1500 /* If object is TMR then we do not use array_ref but
1501 add offset instead. We need it to be able to get addr
1502 of the reasult later. */
1503 if (TREE_CODE (arr) == TARGET_MEM_REF)
1505 tree offs = TMR_OFFSET (arr);
1507 esize = fold_binary_to_constant (MULT_EXPR, TREE_TYPE (esize),
1508 esize, index);
1509 gcc_assert(esize);
1511 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1512 offs, esize);
1513 gcc_assert (offs);
1515 res = copy_node (arr);
1516 TREE_TYPE (res) = etype;
1517 TMR_OFFSET (res) = offs;
1519 else
1520 res = build4 (ARRAY_REF, etype, arr, index, NULL_TREE, NULL_TREE);
1522 return res;
1525 /* Helper function for chkp_add_bounds_to_call_stmt.
1526 Fill ALL_BOUNDS output array with created bounds.
1528 OFFS is used for recursive calls and holds basic
1529 offset of TYPE in outer structure in bits.
1531 ITER points a position where bounds are searched.
1533 ALL_BOUNDS[i] is filled with elem bounds if there
1534 is a field in TYPE which has pointer type and offset
1535 equal to i * POINTER_SIZE in bits. */
1536 static void
1537 chkp_find_bounds_for_elem (tree elem, tree *all_bounds,
1538 HOST_WIDE_INT offs,
1539 gimple_stmt_iterator *iter)
1541 tree type = TREE_TYPE (elem);
1543 if (BOUNDED_TYPE_P (type))
1545 if (!all_bounds[offs / POINTER_SIZE])
1547 tree temp = make_temp_ssa_name (type, NULL, "");
1548 gimple assign = gimple_build_assign (temp, elem);
1549 gimple_stmt_iterator gsi;
1551 gsi_insert_before (iter, assign, GSI_SAME_STMT);
1552 gsi = gsi_for_stmt (assign);
1554 all_bounds[offs / POINTER_SIZE] = chkp_find_bounds (temp, &gsi);
1557 else if (RECORD_OR_UNION_TYPE_P (type))
1559 tree field;
1561 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1562 if (TREE_CODE (field) == FIELD_DECL)
1564 tree base = unshare_expr (elem);
1565 tree field_ref = chkp_build_component_ref (base, field);
1566 HOST_WIDE_INT field_offs
1567 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1568 if (DECL_FIELD_OFFSET (field))
1569 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1571 chkp_find_bounds_for_elem (field_ref, all_bounds,
1572 offs + field_offs, iter);
1575 else if (TREE_CODE (type) == ARRAY_TYPE)
1577 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1578 tree etype = TREE_TYPE (type);
1579 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1580 unsigned HOST_WIDE_INT cur;
1582 if (!maxval || integer_minus_onep (maxval))
1583 return;
1585 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1587 tree base = unshare_expr (elem);
1588 tree arr_elem = chkp_build_array_ref (base, etype,
1589 TYPE_SIZE (etype),
1590 cur);
1591 chkp_find_bounds_for_elem (arr_elem, all_bounds, offs + cur * esize,
1592 iter);
1597 /* Fill HAVE_BOUND output bitmap with information about
1598 bounds requred for object of type TYPE.
1600 OFFS is used for recursive calls and holds basic
1601 offset of TYPE in outer structure in bits.
1603 HAVE_BOUND[i] is set to 1 if there is a field
1604 in TYPE which has pointer type and offset
1605 equal to i * POINTER_SIZE - OFFS in bits. */
1606 void
1607 chkp_find_bound_slots_1 (const_tree type, bitmap have_bound,
1608 HOST_WIDE_INT offs)
1610 if (BOUNDED_TYPE_P (type))
1611 bitmap_set_bit (have_bound, offs / POINTER_SIZE);
1612 else if (RECORD_OR_UNION_TYPE_P (type))
1614 tree field;
1616 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1617 if (TREE_CODE (field) == FIELD_DECL)
1619 HOST_WIDE_INT field_offs
1620 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1621 if (DECL_FIELD_OFFSET (field))
1622 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1623 chkp_find_bound_slots_1 (TREE_TYPE (field), have_bound,
1624 offs + field_offs);
1627 else if (TREE_CODE (type) == ARRAY_TYPE)
1629 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1630 tree etype = TREE_TYPE (type);
1631 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1632 unsigned HOST_WIDE_INT cur;
1634 if (!maxval
1635 || TREE_CODE (maxval) != INTEGER_CST
1636 || integer_minus_onep (maxval))
1637 return;
1639 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1640 chkp_find_bound_slots_1 (etype, have_bound, offs + cur * esize);
1644 /* Fill bitmap RES with information about bounds for
1645 type TYPE. See chkp_find_bound_slots_1 for more
1646 details. */
1647 void
1648 chkp_find_bound_slots (const_tree type, bitmap res)
1650 bitmap_clear (res);
1651 chkp_find_bound_slots_1 (type, res, 0);
1654 /* Return 1 if call to FNDECL should be instrumented
1655 and 0 otherwise. */
1657 static bool
1658 chkp_instrument_normal_builtin (tree fndecl)
1660 switch (DECL_FUNCTION_CODE (fndecl))
1662 case BUILT_IN_STRLEN:
1663 case BUILT_IN_STRCPY:
1664 case BUILT_IN_STRNCPY:
1665 case BUILT_IN_STPCPY:
1666 case BUILT_IN_STPNCPY:
1667 case BUILT_IN_STRCAT:
1668 case BUILT_IN_STRNCAT:
1669 case BUILT_IN_MEMCPY:
1670 case BUILT_IN_MEMPCPY:
1671 case BUILT_IN_MEMSET:
1672 case BUILT_IN_MEMMOVE:
1673 case BUILT_IN_BZERO:
1674 case BUILT_IN_STRCMP:
1675 case BUILT_IN_STRNCMP:
1676 case BUILT_IN_BCMP:
1677 case BUILT_IN_MEMCMP:
1678 case BUILT_IN_MEMCPY_CHK:
1679 case BUILT_IN_MEMPCPY_CHK:
1680 case BUILT_IN_MEMMOVE_CHK:
1681 case BUILT_IN_MEMSET_CHK:
1682 case BUILT_IN_STRCPY_CHK:
1683 case BUILT_IN_STRNCPY_CHK:
1684 case BUILT_IN_STPCPY_CHK:
1685 case BUILT_IN_STPNCPY_CHK:
1686 case BUILT_IN_STRCAT_CHK:
1687 case BUILT_IN_STRNCAT_CHK:
1688 case BUILT_IN_MALLOC:
1689 case BUILT_IN_CALLOC:
1690 case BUILT_IN_REALLOC:
1691 return 1;
1693 default:
1694 return 0;
1698 /* Add bound arguments to call statement pointed by GSI.
1699 Also performs a replacement of user checker builtins calls
1700 with internal ones. */
1702 static void
1703 chkp_add_bounds_to_call_stmt (gimple_stmt_iterator *gsi)
1705 gcall *call = as_a <gcall *> (gsi_stmt (*gsi));
1706 unsigned arg_no = 0;
1707 tree fndecl = gimple_call_fndecl (call);
1708 tree fntype;
1709 tree first_formal_arg;
1710 tree arg;
1711 bool use_fntype = false;
1712 tree op;
1713 ssa_op_iter iter;
1714 gcall *new_call;
1716 /* Do nothing for internal functions. */
1717 if (gimple_call_internal_p (call))
1718 return;
1720 fntype = TREE_TYPE (TREE_TYPE (gimple_call_fn (call)));
1722 /* Do nothing if back-end builtin is called. */
1723 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
1724 return;
1726 /* Do nothing for some middle-end builtins. */
1727 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1728 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_OBJECT_SIZE)
1729 return;
1731 /* Do nothing for calls to not instrumentable functions. */
1732 if (fndecl && !chkp_instrumentable_p (fndecl))
1733 return;
1735 /* Ignore CHKP_INIT_PTR_BOUNDS, CHKP_NULL_PTR_BOUNDS
1736 and CHKP_COPY_PTR_BOUNDS. */
1737 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1738 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS
1739 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS
1740 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS
1741 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS))
1742 return;
1744 /* Check user builtins are replaced with checks. */
1745 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1746 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1747 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
1748 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS))
1750 chkp_replace_address_check_builtin (gsi, integer_minus_one_node);
1751 return;
1754 /* Check user builtins are replaced with bound extract. */
1755 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1756 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND
1757 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND))
1759 chkp_replace_extract_builtin (gsi);
1760 return;
1763 /* BUILT_IN_CHKP_NARROW_PTR_BOUNDS call is replaced with
1764 target narrow bounds call. */
1765 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1766 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
1768 tree arg = gimple_call_arg (call, 1);
1769 tree bounds = chkp_find_bounds (arg, gsi);
1771 gimple_call_set_fndecl (call, chkp_narrow_bounds_fndecl);
1772 gimple_call_set_arg (call, 1, bounds);
1773 update_stmt (call);
1775 return;
1778 /* BUILT_IN_CHKP_STORE_PTR_BOUNDS call is replaced with
1779 bndstx call. */
1780 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1781 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_STORE_PTR_BOUNDS)
1783 tree addr = gimple_call_arg (call, 0);
1784 tree ptr = gimple_call_arg (call, 1);
1785 tree bounds = chkp_find_bounds (ptr, gsi);
1786 gimple_stmt_iterator iter = gsi_for_stmt (call);
1788 chkp_build_bndstx (addr, ptr, bounds, gsi);
1789 gsi_remove (&iter, true);
1791 return;
1794 if (!flag_chkp_instrument_calls)
1795 return;
1797 /* We instrument only some subset of builtins. We also instrument
1798 builtin calls to be inlined. */
1799 if (fndecl
1800 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1801 && !chkp_instrument_normal_builtin (fndecl))
1803 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
1804 return;
1806 struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
1807 if (!clone
1808 || !gimple_has_body_p (clone->decl))
1809 return;
1812 /* If function decl is available then use it for
1813 formal arguments list. Otherwise use function type. */
1814 if (fndecl && DECL_ARGUMENTS (fndecl))
1815 first_formal_arg = DECL_ARGUMENTS (fndecl);
1816 else
1818 first_formal_arg = TYPE_ARG_TYPES (fntype);
1819 use_fntype = true;
1822 /* Fill vector of new call args. */
1823 vec<tree> new_args = vNULL;
1824 new_args.create (gimple_call_num_args (call));
1825 arg = first_formal_arg;
1826 for (arg_no = 0; arg_no < gimple_call_num_args (call); arg_no++)
1828 tree call_arg = gimple_call_arg (call, arg_no);
1829 tree type;
1831 /* Get arg type using formal argument description
1832 or actual argument type. */
1833 if (arg)
1834 if (use_fntype)
1835 if (TREE_VALUE (arg) != void_type_node)
1837 type = TREE_VALUE (arg);
1838 arg = TREE_CHAIN (arg);
1840 else
1841 type = TREE_TYPE (call_arg);
1842 else
1844 type = TREE_TYPE (arg);
1845 arg = TREE_CHAIN (arg);
1847 else
1848 type = TREE_TYPE (call_arg);
1850 new_args.safe_push (call_arg);
1852 if (BOUNDED_TYPE_P (type)
1853 || pass_by_reference (NULL, TYPE_MODE (type), type, true))
1854 new_args.safe_push (chkp_find_bounds (call_arg, gsi));
1855 else if (chkp_type_has_pointer (type))
1857 HOST_WIDE_INT max_bounds
1858 = TREE_INT_CST_LOW (TYPE_SIZE (type)) / POINTER_SIZE;
1859 tree *all_bounds = (tree *)xmalloc (sizeof (tree) * max_bounds);
1860 HOST_WIDE_INT bnd_no;
1862 memset (all_bounds, 0, sizeof (tree) * max_bounds);
1864 chkp_find_bounds_for_elem (call_arg, all_bounds, 0, gsi);
1866 for (bnd_no = 0; bnd_no < max_bounds; bnd_no++)
1867 if (all_bounds[bnd_no])
1868 new_args.safe_push (all_bounds[bnd_no]);
1870 free (all_bounds);
1874 if (new_args.length () == gimple_call_num_args (call))
1875 new_call = call;
1876 else
1878 new_call = gimple_build_call_vec (gimple_op (call, 1), new_args);
1879 gimple_call_set_lhs (new_call, gimple_call_lhs (call));
1880 gimple_call_copy_flags (new_call, call);
1881 gimple_call_set_chain (new_call, gimple_call_chain (call));
1883 new_args.release ();
1885 /* For direct calls fndecl is replaced with instrumented version. */
1886 if (fndecl)
1888 tree new_decl = chkp_maybe_create_clone (fndecl)->decl;
1889 gimple_call_set_fndecl (new_call, new_decl);
1890 gimple_call_set_fntype (new_call, TREE_TYPE (new_decl));
1892 /* For indirect call we should fix function pointer type if
1893 pass some bounds. */
1894 else if (new_call != call)
1896 tree type = gimple_call_fntype (call);
1897 type = chkp_copy_function_type_adding_bounds (type);
1898 gimple_call_set_fntype (new_call, type);
1901 /* replace old call statement with the new one. */
1902 if (call != new_call)
1904 FOR_EACH_SSA_TREE_OPERAND (op, call, iter, SSA_OP_ALL_DEFS)
1906 SSA_NAME_DEF_STMT (op) = new_call;
1908 gsi_replace (gsi, new_call, true);
1910 else
1911 update_stmt (new_call);
1913 gimple_call_set_with_bounds (new_call, true);
1916 /* Return constant static bounds var with specified bounds LB and UB.
1917 If such var does not exists then new var is created with specified NAME. */
1918 static tree
1919 chkp_make_static_const_bounds (HOST_WIDE_INT lb,
1920 HOST_WIDE_INT ub,
1921 const char *name)
1923 tree id = get_identifier (name);
1924 tree var;
1925 varpool_node *node;
1926 symtab_node *snode;
1928 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, id,
1929 pointer_bounds_type_node);
1930 TREE_STATIC (var) = 1;
1931 TREE_PUBLIC (var) = 1;
1933 /* With LTO we may have constant bounds already in varpool.
1934 Try to find it. */
1935 if ((snode = symtab_node::get_for_asmname (DECL_ASSEMBLER_NAME (var))))
1937 /* We don't allow this symbol usage for non bounds. */
1938 if (snode->type != SYMTAB_VARIABLE
1939 || !POINTER_BOUNDS_P (snode->decl))
1940 sorry ("-fcheck-pointer-bounds requires '%s' "
1941 "name for internal usage",
1942 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (var)));
1944 return snode->decl;
1947 TREE_USED (var) = 1;
1948 TREE_READONLY (var) = 1;
1949 TREE_ADDRESSABLE (var) = 0;
1950 DECL_ARTIFICIAL (var) = 1;
1951 DECL_READ_P (var) = 1;
1952 DECL_INITIAL (var) = targetm.chkp_make_bounds_constant (lb, ub);
1953 make_decl_one_only (var, DECL_ASSEMBLER_NAME (var));
1954 /* We may use this symbol during ctors generation in chkp_finish_file
1955 when all symbols are emitted. Force output to avoid undefined
1956 symbols in ctors. */
1957 node = varpool_node::get_create (var);
1958 node->force_output = 1;
1960 varpool_node::finalize_decl (var);
1962 return var;
1965 /* Generate code to make bounds with specified lower bound LB and SIZE.
1966 if AFTER is 1 then code is inserted after position pointed by ITER
1967 otherwise code is inserted before position pointed by ITER.
1968 If ITER is NULL then code is added to entry block. */
1969 static tree
1970 chkp_make_bounds (tree lb, tree size, gimple_stmt_iterator *iter, bool after)
1972 gimple_seq seq;
1973 gimple_stmt_iterator gsi;
1974 gimple stmt;
1975 tree bounds;
1977 if (iter)
1978 gsi = *iter;
1979 else
1980 gsi = gsi_start_bb (chkp_get_entry_block ());
1982 seq = NULL;
1984 lb = chkp_force_gimple_call_op (lb, &seq);
1985 size = chkp_force_gimple_call_op (size, &seq);
1987 stmt = gimple_build_call (chkp_bndmk_fndecl, 2, lb, size);
1988 chkp_mark_stmt (stmt);
1990 bounds = chkp_get_tmp_reg (stmt);
1991 gimple_call_set_lhs (stmt, bounds);
1993 gimple_seq_add_stmt (&seq, stmt);
1995 if (iter && after)
1996 gsi_insert_seq_after (&gsi, seq, GSI_SAME_STMT);
1997 else
1998 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
2000 if (dump_file && (dump_flags & TDF_DETAILS))
2002 fprintf (dump_file, "Made bounds: ");
2003 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2004 if (iter)
2006 fprintf (dump_file, " inserted before statement: ");
2007 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0, TDF_VOPS|TDF_MEMSYMS);
2009 else
2010 fprintf (dump_file, " at function entry\n");
2013 /* update_stmt (stmt); */
2015 return bounds;
2018 /* Return var holding zero bounds. */
2019 tree
2020 chkp_get_zero_bounds_var (void)
2022 if (!chkp_zero_bounds_var)
2023 chkp_zero_bounds_var
2024 = chkp_make_static_const_bounds (0, -1,
2025 CHKP_ZERO_BOUNDS_VAR_NAME);
2026 return chkp_zero_bounds_var;
2029 /* Return var holding none bounds. */
2030 tree
2031 chkp_get_none_bounds_var (void)
2033 if (!chkp_none_bounds_var)
2034 chkp_none_bounds_var
2035 = chkp_make_static_const_bounds (-1, 0,
2036 CHKP_NONE_BOUNDS_VAR_NAME);
2037 return chkp_none_bounds_var;
2040 /* Return SSA_NAME used to represent zero bounds. */
2041 static tree
2042 chkp_get_zero_bounds (void)
2044 if (zero_bounds)
2045 return zero_bounds;
2047 if (dump_file && (dump_flags & TDF_DETAILS))
2048 fprintf (dump_file, "Creating zero bounds...");
2050 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2051 || flag_chkp_use_static_const_bounds > 0)
2053 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2054 gimple stmt;
2056 zero_bounds = chkp_get_tmp_reg (NULL);
2057 stmt = gimple_build_assign (zero_bounds, chkp_get_zero_bounds_var ());
2058 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2060 else
2061 zero_bounds = chkp_make_bounds (integer_zero_node,
2062 integer_zero_node,
2063 NULL,
2064 false);
2066 return zero_bounds;
2069 /* Return SSA_NAME used to represent none bounds. */
2070 static tree
2071 chkp_get_none_bounds (void)
2073 if (none_bounds)
2074 return none_bounds;
2076 if (dump_file && (dump_flags & TDF_DETAILS))
2077 fprintf (dump_file, "Creating none bounds...");
2080 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2081 || flag_chkp_use_static_const_bounds > 0)
2083 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2084 gimple stmt;
2086 none_bounds = chkp_get_tmp_reg (NULL);
2087 stmt = gimple_build_assign (none_bounds, chkp_get_none_bounds_var ());
2088 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2090 else
2091 none_bounds = chkp_make_bounds (integer_minus_one_node,
2092 build_int_cst (size_type_node, 2),
2093 NULL,
2094 false);
2096 return none_bounds;
2099 /* Return bounds to be used as a result of operation which
2100 should not create poiunter (e.g. MULT_EXPR). */
2101 static tree
2102 chkp_get_invalid_op_bounds (void)
2104 return chkp_get_zero_bounds ();
2107 /* Return bounds to be used for loads of non-pointer values. */
2108 static tree
2109 chkp_get_nonpointer_load_bounds (void)
2111 return chkp_get_zero_bounds ();
2114 /* Return 1 if may use bndret call to get bounds for pointer
2115 returned by CALL. */
2116 static bool
2117 chkp_call_returns_bounds_p (gcall *call)
2119 if (gimple_call_internal_p (call))
2120 return false;
2122 if (gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
2123 || chkp_gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW))
2124 return true;
2126 if (gimple_call_with_bounds_p (call))
2127 return true;
2129 tree fndecl = gimple_call_fndecl (call);
2131 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
2132 return false;
2134 if (fndecl && !chkp_instrumentable_p (fndecl))
2135 return false;
2137 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2139 if (chkp_instrument_normal_builtin (fndecl))
2140 return true;
2142 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
2143 return false;
2145 struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
2146 return (clone && gimple_has_body_p (clone->decl));
2149 return true;
2152 /* Build bounds returned by CALL. */
2153 static tree
2154 chkp_build_returned_bound (gcall *call)
2156 gimple_stmt_iterator gsi;
2157 tree bounds;
2158 gimple stmt;
2159 tree fndecl = gimple_call_fndecl (call);
2160 unsigned int retflags;
2162 /* To avoid fixing alloca expands in targets we handle
2163 it separately. */
2164 if (fndecl
2165 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2166 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
2167 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2169 tree size = gimple_call_arg (call, 0);
2170 tree lb = gimple_call_lhs (call);
2171 gimple_stmt_iterator iter = gsi_for_stmt (call);
2172 bounds = chkp_make_bounds (lb, size, &iter, true);
2174 /* We know bounds returned by set_bounds builtin call. */
2175 else if (fndecl
2176 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2177 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS)
2179 tree lb = gimple_call_arg (call, 0);
2180 tree size = gimple_call_arg (call, 1);
2181 gimple_stmt_iterator iter = gsi_for_stmt (call);
2182 bounds = chkp_make_bounds (lb, size, &iter, true);
2184 /* Detect bounds initialization calls. */
2185 else if (fndecl
2186 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2187 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS)
2188 bounds = chkp_get_zero_bounds ();
2189 /* Detect bounds nullification calls. */
2190 else if (fndecl
2191 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2192 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS)
2193 bounds = chkp_get_none_bounds ();
2194 /* Detect bounds copy calls. */
2195 else if (fndecl
2196 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2197 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
2199 gimple_stmt_iterator iter = gsi_for_stmt (call);
2200 bounds = chkp_find_bounds (gimple_call_arg (call, 1), &iter);
2202 /* Do not use retbnd when returned bounds are equal to some
2203 of passed bounds. */
2204 else if (((retflags = gimple_call_return_flags (call)) & ERF_RETURNS_ARG)
2205 && (retflags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (call))
2207 gimple_stmt_iterator iter = gsi_for_stmt (call);
2208 unsigned int retarg = retflags & ERF_RETURN_ARG_MASK, argno;
2209 if (gimple_call_with_bounds_p (call))
2211 for (argno = 0; argno < gimple_call_num_args (call); argno++)
2212 if (!POINTER_BOUNDS_P (gimple_call_arg (call, argno)))
2214 if (retarg)
2215 retarg--;
2216 else
2217 break;
2220 else
2221 argno = retarg;
2223 bounds = chkp_find_bounds (gimple_call_arg (call, argno), &iter);
2225 else if (chkp_call_returns_bounds_p (call))
2227 gcc_assert (TREE_CODE (gimple_call_lhs (call)) == SSA_NAME);
2229 /* In general case build checker builtin call to
2230 obtain returned bounds. */
2231 stmt = gimple_build_call (chkp_ret_bnd_fndecl, 1,
2232 gimple_call_lhs (call));
2233 chkp_mark_stmt (stmt);
2235 gsi = gsi_for_stmt (call);
2236 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
2238 bounds = chkp_get_tmp_reg (stmt);
2239 gimple_call_set_lhs (stmt, bounds);
2241 update_stmt (stmt);
2243 else
2244 bounds = chkp_get_zero_bounds ();
2246 if (dump_file && (dump_flags & TDF_DETAILS))
2248 fprintf (dump_file, "Built returned bounds (");
2249 print_generic_expr (dump_file, bounds, 0);
2250 fprintf (dump_file, ") for call: ");
2251 print_gimple_stmt (dump_file, call, 0, TDF_VOPS|TDF_MEMSYMS);
2254 bounds = chkp_maybe_copy_and_register_bounds (gimple_call_lhs (call), bounds);
2256 return bounds;
2259 /* Return bounds used as returned by call
2260 which produced SSA name VAL. */
2261 gcall *
2262 chkp_retbnd_call_by_val (tree val)
2264 if (TREE_CODE (val) != SSA_NAME)
2265 return NULL;
2267 gcc_assert (gimple_code (SSA_NAME_DEF_STMT (val)) == GIMPLE_CALL);
2269 imm_use_iterator use_iter;
2270 use_operand_p use_p;
2271 FOR_EACH_IMM_USE_FAST (use_p, use_iter, val)
2272 if (gimple_code (USE_STMT (use_p)) == GIMPLE_CALL
2273 && gimple_call_fndecl (USE_STMT (use_p)) == chkp_ret_bnd_fndecl)
2274 return as_a <gcall *> (USE_STMT (use_p));
2276 return NULL;
2279 /* Check the next parameter for the given PARM is bounds
2280 and return it's default SSA_NAME (create if required). */
2281 static tree
2282 chkp_get_next_bounds_parm (tree parm)
2284 tree bounds = TREE_CHAIN (parm);
2285 gcc_assert (POINTER_BOUNDS_P (bounds));
2286 bounds = ssa_default_def (cfun, bounds);
2287 if (!bounds)
2289 bounds = make_ssa_name (TREE_CHAIN (parm), gimple_build_nop ());
2290 set_ssa_default_def (cfun, TREE_CHAIN (parm), bounds);
2292 return bounds;
2295 /* Return bounds to be used for input argument PARM. */
2296 static tree
2297 chkp_get_bound_for_parm (tree parm)
2299 tree decl = SSA_NAME_VAR (parm);
2300 tree bounds;
2302 gcc_assert (TREE_CODE (decl) == PARM_DECL);
2304 bounds = chkp_get_registered_bounds (parm);
2306 if (!bounds)
2307 bounds = chkp_get_registered_bounds (decl);
2309 if (!bounds)
2311 tree orig_decl = cgraph_node::get (cfun->decl)->orig_decl;
2313 /* For static chain param we return zero bounds
2314 because currently we do not check dereferences
2315 of this pointer. */
2316 if (cfun->static_chain_decl == decl)
2317 bounds = chkp_get_zero_bounds ();
2318 /* If non instrumented runtime is used then it may be useful
2319 to use zero bounds for input arguments of main
2320 function. */
2321 else if (flag_chkp_zero_input_bounds_for_main
2322 && strcmp (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (orig_decl)),
2323 "main") == 0)
2324 bounds = chkp_get_zero_bounds ();
2325 else if (BOUNDED_P (parm))
2327 bounds = chkp_get_next_bounds_parm (decl);
2328 bounds = chkp_maybe_copy_and_register_bounds (decl, bounds);
2330 if (dump_file && (dump_flags & TDF_DETAILS))
2332 fprintf (dump_file, "Built arg bounds (");
2333 print_generic_expr (dump_file, bounds, 0);
2334 fprintf (dump_file, ") for arg: ");
2335 print_node (dump_file, "", decl, 0);
2338 else
2339 bounds = chkp_get_zero_bounds ();
2342 if (!chkp_get_registered_bounds (parm))
2343 bounds = chkp_maybe_copy_and_register_bounds (parm, bounds);
2345 if (dump_file && (dump_flags & TDF_DETAILS))
2347 fprintf (dump_file, "Using bounds ");
2348 print_generic_expr (dump_file, bounds, 0);
2349 fprintf (dump_file, " for parm ");
2350 print_generic_expr (dump_file, parm, 0);
2351 fprintf (dump_file, " of type ");
2352 print_generic_expr (dump_file, TREE_TYPE (parm), 0);
2353 fprintf (dump_file, ".\n");
2356 return bounds;
2359 /* Build and return CALL_EXPR for bndstx builtin with specified
2360 arguments. */
2361 tree
2362 chkp_build_bndldx_call (tree addr, tree ptr)
2364 tree fn = build1 (ADDR_EXPR,
2365 build_pointer_type (TREE_TYPE (chkp_bndldx_fndecl)),
2366 chkp_bndldx_fndecl);
2367 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndldx_fndecl)),
2368 fn, 2, addr, ptr);
2369 CALL_WITH_BOUNDS_P (call) = true;
2370 return call;
2373 /* Insert code to load bounds for PTR located by ADDR.
2374 Code is inserted after position pointed by GSI.
2375 Loaded bounds are returned. */
2376 static tree
2377 chkp_build_bndldx (tree addr, tree ptr, gimple_stmt_iterator *gsi)
2379 gimple_seq seq;
2380 gimple stmt;
2381 tree bounds;
2383 seq = NULL;
2385 addr = chkp_force_gimple_call_op (addr, &seq);
2386 ptr = chkp_force_gimple_call_op (ptr, &seq);
2388 stmt = gimple_build_call (chkp_bndldx_fndecl, 2, addr, ptr);
2389 chkp_mark_stmt (stmt);
2390 bounds = chkp_get_tmp_reg (stmt);
2391 gimple_call_set_lhs (stmt, bounds);
2393 gimple_seq_add_stmt (&seq, stmt);
2395 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2397 if (dump_file && (dump_flags & TDF_DETAILS))
2399 fprintf (dump_file, "Generated bndldx for pointer ");
2400 print_generic_expr (dump_file, ptr, 0);
2401 fprintf (dump_file, ": ");
2402 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2405 return bounds;
2408 /* Build and return CALL_EXPR for bndstx builtin with specified
2409 arguments. */
2410 tree
2411 chkp_build_bndstx_call (tree addr, tree ptr, tree bounds)
2413 tree fn = build1 (ADDR_EXPR,
2414 build_pointer_type (TREE_TYPE (chkp_bndstx_fndecl)),
2415 chkp_bndstx_fndecl);
2416 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndstx_fndecl)),
2417 fn, 3, ptr, bounds, addr);
2418 CALL_WITH_BOUNDS_P (call) = true;
2419 return call;
2422 /* Insert code to store BOUNDS for PTR stored by ADDR.
2423 New statements are inserted after position pointed
2424 by GSI. */
2425 void
2426 chkp_build_bndstx (tree addr, tree ptr, tree bounds,
2427 gimple_stmt_iterator *gsi)
2429 gimple_seq seq;
2430 gimple stmt;
2432 seq = NULL;
2434 addr = chkp_force_gimple_call_op (addr, &seq);
2435 ptr = chkp_force_gimple_call_op (ptr, &seq);
2437 stmt = gimple_build_call (chkp_bndstx_fndecl, 3, ptr, bounds, addr);
2438 chkp_mark_stmt (stmt);
2439 gimple_call_set_with_bounds (stmt, true);
2441 gimple_seq_add_stmt (&seq, stmt);
2443 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2445 if (dump_file && (dump_flags & TDF_DETAILS))
2447 fprintf (dump_file, "Generated bndstx for pointer store ");
2448 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_VOPS|TDF_MEMSYMS);
2449 print_gimple_stmt (dump_file, stmt, 2, TDF_VOPS|TDF_MEMSYMS);
2453 /* Compute bounds for pointer NODE which was assigned in
2454 assignment statement ASSIGN. Return computed bounds. */
2455 static tree
2456 chkp_compute_bounds_for_assignment (tree node, gimple assign)
2458 enum tree_code rhs_code = gimple_assign_rhs_code (assign);
2459 tree rhs1 = gimple_assign_rhs1 (assign);
2460 tree bounds = NULL_TREE;
2461 gimple_stmt_iterator iter = gsi_for_stmt (assign);
2463 if (dump_file && (dump_flags & TDF_DETAILS))
2465 fprintf (dump_file, "Computing bounds for assignment: ");
2466 print_gimple_stmt (dump_file, assign, 0, TDF_VOPS|TDF_MEMSYMS);
2469 switch (rhs_code)
2471 case MEM_REF:
2472 case TARGET_MEM_REF:
2473 case COMPONENT_REF:
2474 case ARRAY_REF:
2475 /* We need to load bounds from the bounds table. */
2476 bounds = chkp_find_bounds_loaded (node, rhs1, &iter);
2477 break;
2479 case VAR_DECL:
2480 case SSA_NAME:
2481 case ADDR_EXPR:
2482 case POINTER_PLUS_EXPR:
2483 case NOP_EXPR:
2484 case CONVERT_EXPR:
2485 case INTEGER_CST:
2486 /* Bounds are just propagated from RHS. */
2487 bounds = chkp_find_bounds (rhs1, &iter);
2488 break;
2490 case VIEW_CONVERT_EXPR:
2491 /* Bounds are just propagated from RHS. */
2492 bounds = chkp_find_bounds (TREE_OPERAND (rhs1, 0), &iter);
2493 break;
2495 case PARM_DECL:
2496 if (BOUNDED_P (rhs1))
2498 /* We need to load bounds from the bounds table. */
2499 bounds = chkp_build_bndldx (chkp_build_addr_expr (rhs1),
2500 node, &iter);
2501 TREE_ADDRESSABLE (rhs1) = 1;
2503 else
2504 bounds = chkp_get_nonpointer_load_bounds ();
2505 break;
2507 case MINUS_EXPR:
2508 case PLUS_EXPR:
2509 case BIT_AND_EXPR:
2510 case BIT_IOR_EXPR:
2511 case BIT_XOR_EXPR:
2513 tree rhs2 = gimple_assign_rhs2 (assign);
2514 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2515 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2517 /* First we try to check types of operands. If it
2518 does not help then look at bound values.
2520 If some bounds are incomplete and other are
2521 not proven to be valid (i.e. also incomplete
2522 or invalid because value is not pointer) then
2523 resulting value is incomplete and will be
2524 recomputed later in chkp_finish_incomplete_bounds. */
2525 if (BOUNDED_P (rhs1)
2526 && !BOUNDED_P (rhs2))
2527 bounds = bnd1;
2528 else if (BOUNDED_P (rhs2)
2529 && !BOUNDED_P (rhs1)
2530 && rhs_code != MINUS_EXPR)
2531 bounds = bnd2;
2532 else if (chkp_incomplete_bounds (bnd1))
2533 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR
2534 && !chkp_incomplete_bounds (bnd2))
2535 bounds = bnd2;
2536 else
2537 bounds = incomplete_bounds;
2538 else if (chkp_incomplete_bounds (bnd2))
2539 if (chkp_valid_bounds (bnd1)
2540 && !chkp_incomplete_bounds (bnd1))
2541 bounds = bnd1;
2542 else
2543 bounds = incomplete_bounds;
2544 else if (!chkp_valid_bounds (bnd1))
2545 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR)
2546 bounds = bnd2;
2547 else if (bnd2 == chkp_get_zero_bounds ())
2548 bounds = bnd2;
2549 else
2550 bounds = bnd1;
2551 else if (!chkp_valid_bounds (bnd2))
2552 bounds = bnd1;
2553 else
2554 /* Seems both operands may have valid bounds
2555 (e.g. pointer minus pointer). In such case
2556 use default invalid op bounds. */
2557 bounds = chkp_get_invalid_op_bounds ();
2559 break;
2561 case BIT_NOT_EXPR:
2562 case NEGATE_EXPR:
2563 case LSHIFT_EXPR:
2564 case RSHIFT_EXPR:
2565 case LROTATE_EXPR:
2566 case RROTATE_EXPR:
2567 case EQ_EXPR:
2568 case NE_EXPR:
2569 case LT_EXPR:
2570 case LE_EXPR:
2571 case GT_EXPR:
2572 case GE_EXPR:
2573 case MULT_EXPR:
2574 case RDIV_EXPR:
2575 case TRUNC_DIV_EXPR:
2576 case FLOOR_DIV_EXPR:
2577 case CEIL_DIV_EXPR:
2578 case ROUND_DIV_EXPR:
2579 case TRUNC_MOD_EXPR:
2580 case FLOOR_MOD_EXPR:
2581 case CEIL_MOD_EXPR:
2582 case ROUND_MOD_EXPR:
2583 case EXACT_DIV_EXPR:
2584 case FIX_TRUNC_EXPR:
2585 case FLOAT_EXPR:
2586 case REALPART_EXPR:
2587 case IMAGPART_EXPR:
2588 /* No valid bounds may be produced by these exprs. */
2589 bounds = chkp_get_invalid_op_bounds ();
2590 break;
2592 case COND_EXPR:
2594 tree val1 = gimple_assign_rhs2 (assign);
2595 tree val2 = gimple_assign_rhs3 (assign);
2596 tree bnd1 = chkp_find_bounds (val1, &iter);
2597 tree bnd2 = chkp_find_bounds (val2, &iter);
2598 gimple stmt;
2600 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2601 bounds = incomplete_bounds;
2602 else if (bnd1 == bnd2)
2603 bounds = bnd1;
2604 else
2606 rhs1 = unshare_expr (rhs1);
2608 bounds = chkp_get_tmp_reg (assign);
2609 stmt = gimple_build_assign (bounds, COND_EXPR, rhs1, bnd1, bnd2);
2610 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2612 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2613 chkp_mark_invalid_bounds (bounds);
2616 break;
2618 case MAX_EXPR:
2619 case MIN_EXPR:
2621 tree rhs2 = gimple_assign_rhs2 (assign);
2622 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2623 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2625 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2626 bounds = incomplete_bounds;
2627 else if (bnd1 == bnd2)
2628 bounds = bnd1;
2629 else
2631 gimple stmt;
2632 tree cond = build2 (rhs_code == MAX_EXPR ? GT_EXPR : LT_EXPR,
2633 boolean_type_node, rhs1, rhs2);
2634 bounds = chkp_get_tmp_reg (assign);
2635 stmt = gimple_build_assign (bounds, COND_EXPR, cond, bnd1, bnd2);
2637 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2639 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2640 chkp_mark_invalid_bounds (bounds);
2643 break;
2645 default:
2646 bounds = chkp_get_zero_bounds ();
2647 warning (0, "pointer bounds were lost due to unexpected expression %s",
2648 get_tree_code_name (rhs_code));
2651 gcc_assert (bounds);
2653 if (node)
2654 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2656 return bounds;
2659 /* Compute bounds for ssa name NODE defined by DEF_STMT pointed by ITER.
2661 There are just few statement codes allowed: NOP (for default ssa names),
2662 ASSIGN, CALL, PHI, ASM.
2664 Return computed bounds. */
2665 static tree
2666 chkp_get_bounds_by_definition (tree node, gimple def_stmt,
2667 gphi_iterator *iter)
2669 tree var, bounds;
2670 enum gimple_code code = gimple_code (def_stmt);
2671 gphi *stmt;
2673 if (dump_file && (dump_flags & TDF_DETAILS))
2675 fprintf (dump_file, "Searching for bounds for node: ");
2676 print_generic_expr (dump_file, node, 0);
2678 fprintf (dump_file, " using its definition: ");
2679 print_gimple_stmt (dump_file, def_stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2682 switch (code)
2684 case GIMPLE_NOP:
2685 var = SSA_NAME_VAR (node);
2686 switch (TREE_CODE (var))
2688 case PARM_DECL:
2689 bounds = chkp_get_bound_for_parm (node);
2690 break;
2692 case VAR_DECL:
2693 /* For uninitialized pointers use none bounds. */
2694 bounds = chkp_get_none_bounds ();
2695 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2696 break;
2698 case RESULT_DECL:
2700 tree base_type;
2702 gcc_assert (TREE_CODE (TREE_TYPE (node)) == REFERENCE_TYPE);
2704 base_type = TREE_TYPE (TREE_TYPE (node));
2706 gcc_assert (TYPE_SIZE (base_type)
2707 && TREE_CODE (TYPE_SIZE (base_type)) == INTEGER_CST
2708 && tree_to_uhwi (TYPE_SIZE (base_type)) != 0);
2710 bounds = chkp_make_bounds (node, TYPE_SIZE_UNIT (base_type),
2711 NULL, false);
2712 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2714 break;
2716 default:
2717 if (dump_file && (dump_flags & TDF_DETAILS))
2719 fprintf (dump_file, "Unexpected var with no definition\n");
2720 print_generic_expr (dump_file, var, 0);
2722 internal_error ("chkp_get_bounds_by_definition: Unexpected var of type %s",
2723 get_tree_code_name (TREE_CODE (var)));
2725 break;
2727 case GIMPLE_ASSIGN:
2728 bounds = chkp_compute_bounds_for_assignment (node, def_stmt);
2729 break;
2731 case GIMPLE_CALL:
2732 bounds = chkp_build_returned_bound (as_a <gcall *> (def_stmt));
2733 break;
2735 case GIMPLE_PHI:
2736 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (node))
2737 if (SSA_NAME_VAR (node))
2738 var = chkp_get_bounds_var (SSA_NAME_VAR (node));
2739 else
2740 var = make_temp_ssa_name (pointer_bounds_type_node,
2741 NULL,
2742 CHKP_BOUND_TMP_NAME);
2743 else
2744 var = chkp_get_tmp_var ();
2745 stmt = create_phi_node (var, gimple_bb (def_stmt));
2746 bounds = gimple_phi_result (stmt);
2747 *iter = gsi_for_phi (stmt);
2749 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2751 /* Created bounds do not have all phi args computed and
2752 therefore we do not know if there is a valid source
2753 of bounds for that node. Therefore we mark bounds
2754 as incomplete and then recompute them when all phi
2755 args are computed. */
2756 chkp_register_incomplete_bounds (bounds, node);
2757 break;
2759 case GIMPLE_ASM:
2760 bounds = chkp_get_zero_bounds ();
2761 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2762 break;
2764 default:
2765 internal_error ("chkp_get_bounds_by_definition: Unexpected GIMPLE code %s",
2766 gimple_code_name[code]);
2769 return bounds;
2772 /* Return CALL_EXPR for bndmk with specified LOWER_BOUND and SIZE. */
2773 tree
2774 chkp_build_make_bounds_call (tree lower_bound, tree size)
2776 tree call = build1 (ADDR_EXPR,
2777 build_pointer_type (TREE_TYPE (chkp_bndmk_fndecl)),
2778 chkp_bndmk_fndecl);
2779 return build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndmk_fndecl)),
2780 call, 2, lower_bound, size);
2783 /* Create static bounds var of specfified OBJ which is
2784 is either VAR_DECL or string constant. */
2785 static tree
2786 chkp_make_static_bounds (tree obj)
2788 static int string_id = 1;
2789 static int var_id = 1;
2790 tree *slot;
2791 const char *var_name;
2792 char *bnd_var_name;
2793 tree bnd_var;
2795 /* First check if we already have required var. */
2796 if (chkp_static_var_bounds)
2798 /* For vars we use assembler name as a key in
2799 chkp_static_var_bounds map. It allows to
2800 avoid duplicating bound vars for decls
2801 sharing assembler name. */
2802 if (TREE_CODE (obj) == VAR_DECL)
2804 tree name = DECL_ASSEMBLER_NAME (obj);
2805 slot = chkp_static_var_bounds->get (name);
2806 if (slot)
2807 return *slot;
2809 else
2811 slot = chkp_static_var_bounds->get (obj);
2812 if (slot)
2813 return *slot;
2817 /* Build decl for bounds var. */
2818 if (TREE_CODE (obj) == VAR_DECL)
2820 if (DECL_IGNORED_P (obj))
2822 bnd_var_name = (char *) xmalloc (strlen (CHKP_VAR_BOUNDS_PREFIX) + 10);
2823 sprintf (bnd_var_name, "%s%d", CHKP_VAR_BOUNDS_PREFIX, var_id++);
2825 else
2827 var_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (obj));
2829 /* For hidden symbols we want to skip first '*' char. */
2830 if (*var_name == '*')
2831 var_name++;
2833 bnd_var_name = (char *) xmalloc (strlen (var_name)
2834 + strlen (CHKP_BOUNDS_OF_SYMBOL_PREFIX) + 1);
2835 strcpy (bnd_var_name, CHKP_BOUNDS_OF_SYMBOL_PREFIX);
2836 strcat (bnd_var_name, var_name);
2839 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2840 get_identifier (bnd_var_name),
2841 pointer_bounds_type_node);
2843 /* Address of the obj will be used as lower bound. */
2844 TREE_ADDRESSABLE (obj) = 1;
2846 else
2848 bnd_var_name = (char *) xmalloc (strlen (CHKP_STRING_BOUNDS_PREFIX) + 10);
2849 sprintf (bnd_var_name, "%s%d", CHKP_STRING_BOUNDS_PREFIX, string_id++);
2851 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2852 get_identifier (bnd_var_name),
2853 pointer_bounds_type_node);
2856 TREE_PUBLIC (bnd_var) = 0;
2857 TREE_USED (bnd_var) = 1;
2858 TREE_READONLY (bnd_var) = 0;
2859 TREE_STATIC (bnd_var) = 1;
2860 TREE_ADDRESSABLE (bnd_var) = 0;
2861 DECL_ARTIFICIAL (bnd_var) = 1;
2862 DECL_COMMON (bnd_var) = 1;
2863 DECL_COMDAT (bnd_var) = 1;
2864 DECL_READ_P (bnd_var) = 1;
2865 DECL_INITIAL (bnd_var) = chkp_build_addr_expr (obj);
2866 /* Force output similar to constant bounds.
2867 See chkp_make_static_const_bounds. */
2868 varpool_node::get_create (bnd_var)->force_output = 1;
2869 /* Mark symbol as requiring bounds initialization. */
2870 varpool_node::get_create (bnd_var)->need_bounds_init = 1;
2871 varpool_node::finalize_decl (bnd_var);
2873 /* Add created var to the map to use it for other references
2874 to obj. */
2875 if (!chkp_static_var_bounds)
2876 chkp_static_var_bounds = new hash_map<tree, tree>;
2878 if (TREE_CODE (obj) == VAR_DECL)
2880 tree name = DECL_ASSEMBLER_NAME (obj);
2881 chkp_static_var_bounds->put (name, bnd_var);
2883 else
2884 chkp_static_var_bounds->put (obj, bnd_var);
2886 return bnd_var;
2889 /* When var has incomplete type we cannot get size to
2890 compute its bounds. In such cases we use checker
2891 builtin call which determines object size at runtime. */
2892 static tree
2893 chkp_generate_extern_var_bounds (tree var)
2895 tree bounds, size_reloc, lb, size, max_size, cond;
2896 gimple_stmt_iterator gsi;
2897 gimple_seq seq = NULL;
2898 gimple stmt;
2900 /* If instrumentation is not enabled for vars having
2901 incomplete type then just return zero bounds to avoid
2902 checks for this var. */
2903 if (!flag_chkp_incomplete_type)
2904 return chkp_get_zero_bounds ();
2906 if (dump_file && (dump_flags & TDF_DETAILS))
2908 fprintf (dump_file, "Generating bounds for extern symbol '");
2909 print_generic_expr (dump_file, var, 0);
2910 fprintf (dump_file, "'\n");
2913 stmt = gimple_build_call (chkp_sizeof_fndecl, 1, var);
2915 size_reloc = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
2916 gimple_call_set_lhs (stmt, size_reloc);
2918 gimple_seq_add_stmt (&seq, stmt);
2920 lb = chkp_build_addr_expr (var);
2921 size = make_ssa_name (chkp_get_size_tmp_var ());
2923 if (flag_chkp_zero_dynamic_size_as_infinite)
2925 /* We should check that size relocation was resolved.
2926 If it was not then use maximum possible size for the var. */
2927 max_size = build2 (MINUS_EXPR, chkp_uintptr_type, integer_zero_node,
2928 fold_convert (chkp_uintptr_type, lb));
2929 max_size = chkp_force_gimple_call_op (max_size, &seq);
2931 cond = build2 (NE_EXPR, boolean_type_node,
2932 size_reloc, integer_zero_node);
2933 stmt = gimple_build_assign (size, COND_EXPR, cond, size_reloc, max_size);
2934 gimple_seq_add_stmt (&seq, stmt);
2936 else
2938 stmt = gimple_build_assign (size, size_reloc);
2939 gimple_seq_add_stmt (&seq, stmt);
2942 gsi = gsi_start_bb (chkp_get_entry_block ());
2943 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
2945 bounds = chkp_make_bounds (lb, size, &gsi, true);
2947 return bounds;
2950 /* Return 1 if TYPE has fields with zero size or fields
2951 marked with chkp_variable_size attribute. */
2952 bool
2953 chkp_variable_size_type (tree type)
2955 bool res = false;
2956 tree field;
2958 if (RECORD_OR_UNION_TYPE_P (type))
2959 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2961 if (TREE_CODE (field) == FIELD_DECL)
2962 res = res
2963 || lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
2964 || chkp_variable_size_type (TREE_TYPE (field));
2966 else
2967 res = !TYPE_SIZE (type)
2968 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
2969 || tree_to_uhwi (TYPE_SIZE (type)) == 0;
2971 return res;
2974 /* Compute and return bounds for address of DECL which is
2975 one of VAR_DECL, PARM_DECL, RESULT_DECL. */
2976 static tree
2977 chkp_get_bounds_for_decl_addr (tree decl)
2979 tree bounds;
2981 gcc_assert (TREE_CODE (decl) == VAR_DECL
2982 || TREE_CODE (decl) == PARM_DECL
2983 || TREE_CODE (decl) == RESULT_DECL);
2985 bounds = chkp_get_registered_addr_bounds (decl);
2987 if (bounds)
2988 return bounds;
2990 if (dump_file && (dump_flags & TDF_DETAILS))
2992 fprintf (dump_file, "Building bounds for address of decl ");
2993 print_generic_expr (dump_file, decl, 0);
2994 fprintf (dump_file, "\n");
2997 /* Use zero bounds if size is unknown and checks for
2998 unknown sizes are restricted. */
2999 if ((!DECL_SIZE (decl)
3000 || (chkp_variable_size_type (TREE_TYPE (decl))
3001 && (TREE_STATIC (decl)
3002 || DECL_EXTERNAL (decl)
3003 || TREE_PUBLIC (decl))))
3004 && !flag_chkp_incomplete_type)
3005 return chkp_get_zero_bounds ();
3007 if (flag_chkp_use_static_bounds
3008 && TREE_CODE (decl) == VAR_DECL
3009 && (TREE_STATIC (decl)
3010 || DECL_EXTERNAL (decl)
3011 || TREE_PUBLIC (decl))
3012 && !DECL_THREAD_LOCAL_P (decl))
3014 tree bnd_var = chkp_make_static_bounds (decl);
3015 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
3016 gimple stmt;
3018 bounds = chkp_get_tmp_reg (NULL);
3019 stmt = gimple_build_assign (bounds, bnd_var);
3020 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3022 else if (!DECL_SIZE (decl)
3023 || (chkp_variable_size_type (TREE_TYPE (decl))
3024 && (TREE_STATIC (decl)
3025 || DECL_EXTERNAL (decl)
3026 || TREE_PUBLIC (decl))))
3028 gcc_assert (TREE_CODE (decl) == VAR_DECL);
3029 bounds = chkp_generate_extern_var_bounds (decl);
3031 else
3033 tree lb = chkp_build_addr_expr (decl);
3034 bounds = chkp_make_bounds (lb, DECL_SIZE_UNIT (decl), NULL, false);
3037 return bounds;
3040 /* Compute and return bounds for constant string. */
3041 static tree
3042 chkp_get_bounds_for_string_cst (tree cst)
3044 tree bounds;
3045 tree lb;
3046 tree size;
3048 gcc_assert (TREE_CODE (cst) == STRING_CST);
3050 bounds = chkp_get_registered_bounds (cst);
3052 if (bounds)
3053 return bounds;
3055 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
3056 || flag_chkp_use_static_const_bounds > 0)
3058 tree bnd_var = chkp_make_static_bounds (cst);
3059 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
3060 gimple stmt;
3062 bounds = chkp_get_tmp_reg (NULL);
3063 stmt = gimple_build_assign (bounds, bnd_var);
3064 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3066 else
3068 lb = chkp_build_addr_expr (cst);
3069 size = build_int_cst (chkp_uintptr_type, TREE_STRING_LENGTH (cst));
3070 bounds = chkp_make_bounds (lb, size, NULL, false);
3073 bounds = chkp_maybe_copy_and_register_bounds (cst, bounds);
3075 return bounds;
3078 /* Generate code to instersect bounds BOUNDS1 and BOUNDS2 and
3079 return the result. if ITER is not NULL then Code is inserted
3080 before position pointed by ITER. Otherwise code is added to
3081 entry block. */
3082 static tree
3083 chkp_intersect_bounds (tree bounds1, tree bounds2, gimple_stmt_iterator *iter)
3085 if (!bounds1 || bounds1 == chkp_get_zero_bounds ())
3086 return bounds2 ? bounds2 : bounds1;
3087 else if (!bounds2 || bounds2 == chkp_get_zero_bounds ())
3088 return bounds1;
3089 else
3091 gimple_seq seq;
3092 gimple stmt;
3093 tree bounds;
3095 seq = NULL;
3097 stmt = gimple_build_call (chkp_intersect_fndecl, 2, bounds1, bounds2);
3098 chkp_mark_stmt (stmt);
3100 bounds = chkp_get_tmp_reg (stmt);
3101 gimple_call_set_lhs (stmt, bounds);
3103 gimple_seq_add_stmt (&seq, stmt);
3105 /* We are probably doing narrowing for constant expression.
3106 In such case iter may be undefined. */
3107 if (!iter)
3109 gimple_stmt_iterator gsi = gsi_last_bb (chkp_get_entry_block ());
3110 iter = &gsi;
3111 gsi_insert_seq_after (iter, seq, GSI_SAME_STMT);
3113 else
3114 gsi_insert_seq_before (iter, seq, GSI_SAME_STMT);
3116 if (dump_file && (dump_flags & TDF_DETAILS))
3118 fprintf (dump_file, "Bounds intersection: ");
3119 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
3120 fprintf (dump_file, " inserted before statement: ");
3121 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0,
3122 TDF_VOPS|TDF_MEMSYMS);
3125 return bounds;
3129 /* Return 1 if we are allowed to narrow bounds for addressed FIELD
3130 and 0 othersize. */
3131 static bool
3132 chkp_may_narrow_to_field (tree field)
3134 return DECL_SIZE (field) && TREE_CODE (DECL_SIZE (field)) == INTEGER_CST
3135 && tree_to_uhwi (DECL_SIZE (field)) != 0
3136 && (!DECL_FIELD_OFFSET (field)
3137 || TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST)
3138 && (!DECL_FIELD_BIT_OFFSET (field)
3139 || TREE_CODE (DECL_FIELD_BIT_OFFSET (field)) == INTEGER_CST)
3140 && !lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
3141 && !chkp_variable_size_type (TREE_TYPE (field));
3144 /* Return 1 if bounds for FIELD should be narrowed to
3145 field's own size. */
3146 static bool
3147 chkp_narrow_bounds_for_field (tree field)
3149 HOST_WIDE_INT offs;
3150 HOST_WIDE_INT bit_offs;
3152 if (!chkp_may_narrow_to_field (field))
3153 return false;
3155 /* Accesse to compiler generated fields should not cause
3156 bounds narrowing. */
3157 if (DECL_ARTIFICIAL (field))
3158 return false;
3160 offs = tree_to_uhwi (DECL_FIELD_OFFSET (field));
3161 bit_offs = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
3163 return (flag_chkp_narrow_bounds
3164 && (flag_chkp_first_field_has_own_bounds
3165 || offs
3166 || bit_offs));
3169 /* Perform narrowing for BOUNDS using bounds computed for field
3170 access COMPONENT. ITER meaning is the same as for
3171 chkp_intersect_bounds. */
3172 static tree
3173 chkp_narrow_bounds_to_field (tree bounds, tree component,
3174 gimple_stmt_iterator *iter)
3176 tree field = TREE_OPERAND (component, 1);
3177 tree size = DECL_SIZE_UNIT (field);
3178 tree field_ptr = chkp_build_addr_expr (component);
3179 tree field_bounds;
3181 field_bounds = chkp_make_bounds (field_ptr, size, iter, false);
3183 return chkp_intersect_bounds (field_bounds, bounds, iter);
3186 /* Parse field or array access NODE.
3188 PTR ouput parameter holds a pointer to the outermost
3189 object.
3191 BITFIELD output parameter is set to 1 if bitfield is
3192 accessed and to 0 otherwise. If it is 1 then ELT holds
3193 outer component for accessed bit field.
3195 SAFE outer parameter is set to 1 if access is safe and
3196 checks are not required.
3198 BOUNDS outer parameter holds bounds to be used to check
3199 access (may be NULL).
3201 If INNERMOST_BOUNDS is 1 then try to narrow bounds to the
3202 innermost accessed component. */
3203 static void
3204 chkp_parse_array_and_component_ref (tree node, tree *ptr,
3205 tree *elt, bool *safe,
3206 bool *bitfield,
3207 tree *bounds,
3208 gimple_stmt_iterator *iter,
3209 bool innermost_bounds)
3211 tree comp_to_narrow = NULL_TREE;
3212 tree last_comp = NULL_TREE;
3213 bool array_ref_found = false;
3214 tree *nodes;
3215 tree var;
3216 int len;
3217 int i;
3219 /* Compute tree height for expression. */
3220 var = node;
3221 len = 1;
3222 while (TREE_CODE (var) == COMPONENT_REF
3223 || TREE_CODE (var) == ARRAY_REF
3224 || TREE_CODE (var) == VIEW_CONVERT_EXPR)
3226 var = TREE_OPERAND (var, 0);
3227 len++;
3230 gcc_assert (len > 1);
3232 /* It is more convenient for us to scan left-to-right,
3233 so walk tree again and put all node to nodes vector
3234 in reversed order. */
3235 nodes = XALLOCAVEC (tree, len);
3236 nodes[len - 1] = node;
3237 for (i = len - 2; i >= 0; i--)
3238 nodes[i] = TREE_OPERAND (nodes[i + 1], 0);
3240 if (bounds)
3241 *bounds = NULL;
3242 *safe = true;
3243 *bitfield = (TREE_CODE (node) == COMPONENT_REF
3244 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (node, 1)));
3245 /* To get bitfield address we will need outer elemnt. */
3246 if (*bitfield)
3247 *elt = nodes[len - 2];
3248 else
3249 *elt = NULL_TREE;
3251 /* If we have indirection in expression then compute
3252 outermost structure bounds. Computed bounds may be
3253 narrowed later. */
3254 if (TREE_CODE (nodes[0]) == MEM_REF || INDIRECT_REF_P (nodes[0]))
3256 *safe = false;
3257 *ptr = TREE_OPERAND (nodes[0], 0);
3258 if (bounds)
3259 *bounds = chkp_find_bounds (*ptr, iter);
3261 else
3263 gcc_assert (TREE_CODE (var) == VAR_DECL
3264 || TREE_CODE (var) == PARM_DECL
3265 || TREE_CODE (var) == RESULT_DECL
3266 || TREE_CODE (var) == STRING_CST
3267 || TREE_CODE (var) == SSA_NAME);
3269 *ptr = chkp_build_addr_expr (var);
3272 /* In this loop we are trying to find a field access
3273 requiring narrowing. There are two simple rules
3274 for search:
3275 1. Leftmost array_ref is chosen if any.
3276 2. Rightmost suitable component_ref is chosen if innermost
3277 bounds are required and no array_ref exists. */
3278 for (i = 1; i < len; i++)
3280 var = nodes[i];
3282 if (TREE_CODE (var) == ARRAY_REF)
3284 *safe = false;
3285 array_ref_found = true;
3286 if (flag_chkp_narrow_bounds
3287 && !flag_chkp_narrow_to_innermost_arrray
3288 && (!last_comp
3289 || chkp_may_narrow_to_field (TREE_OPERAND (last_comp, 1))))
3291 comp_to_narrow = last_comp;
3292 break;
3295 else if (TREE_CODE (var) == COMPONENT_REF)
3297 tree field = TREE_OPERAND (var, 1);
3299 if (innermost_bounds
3300 && !array_ref_found
3301 && chkp_narrow_bounds_for_field (field))
3302 comp_to_narrow = var;
3303 last_comp = var;
3305 if (flag_chkp_narrow_bounds
3306 && flag_chkp_narrow_to_innermost_arrray
3307 && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
3309 if (bounds)
3310 *bounds = chkp_narrow_bounds_to_field (*bounds, var, iter);
3311 comp_to_narrow = NULL;
3314 else if (TREE_CODE (var) == VIEW_CONVERT_EXPR)
3315 /* Nothing to do for it. */
3317 else
3318 gcc_unreachable ();
3321 if (comp_to_narrow && DECL_SIZE (TREE_OPERAND (comp_to_narrow, 1)) && bounds)
3322 *bounds = chkp_narrow_bounds_to_field (*bounds, comp_to_narrow, iter);
3324 if (innermost_bounds && bounds && !*bounds)
3325 *bounds = chkp_find_bounds (*ptr, iter);
3328 /* Compute and return bounds for address of OBJ. */
3329 static tree
3330 chkp_make_addressed_object_bounds (tree obj, gimple_stmt_iterator *iter)
3332 tree bounds = chkp_get_registered_addr_bounds (obj);
3334 if (bounds)
3335 return bounds;
3337 switch (TREE_CODE (obj))
3339 case VAR_DECL:
3340 case PARM_DECL:
3341 case RESULT_DECL:
3342 bounds = chkp_get_bounds_for_decl_addr (obj);
3343 break;
3345 case STRING_CST:
3346 bounds = chkp_get_bounds_for_string_cst (obj);
3347 break;
3349 case ARRAY_REF:
3350 case COMPONENT_REF:
3352 tree elt;
3353 tree ptr;
3354 bool safe;
3355 bool bitfield;
3357 chkp_parse_array_and_component_ref (obj, &ptr, &elt, &safe,
3358 &bitfield, &bounds, iter, true);
3360 gcc_assert (bounds);
3362 break;
3364 case FUNCTION_DECL:
3365 case LABEL_DECL:
3366 bounds = chkp_get_zero_bounds ();
3367 break;
3369 case MEM_REF:
3370 bounds = chkp_find_bounds (TREE_OPERAND (obj, 0), iter);
3371 break;
3373 case REALPART_EXPR:
3374 case IMAGPART_EXPR:
3375 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (obj, 0), iter);
3376 break;
3378 default:
3379 if (dump_file && (dump_flags & TDF_DETAILS))
3381 fprintf (dump_file, "chkp_make_addressed_object_bounds: "
3382 "unexpected object of type %s\n",
3383 get_tree_code_name (TREE_CODE (obj)));
3384 print_node (dump_file, "", obj, 0);
3386 internal_error ("chkp_make_addressed_object_bounds: "
3387 "Unexpected tree code %s",
3388 get_tree_code_name (TREE_CODE (obj)));
3391 chkp_register_addr_bounds (obj, bounds);
3393 return bounds;
3396 /* Compute bounds for pointer PTR loaded from PTR_SRC. Generate statements
3397 to compute bounds if required. Computed bounds should be available at
3398 position pointed by ITER.
3400 If PTR_SRC is NULL_TREE then pointer definition is identified.
3402 If PTR_SRC is not NULL_TREE then ITER points to statements which loads
3403 PTR. If PTR is a any memory reference then ITER points to a statement
3404 after which bndldx will be inserterd. In both cases ITER will be updated
3405 to point to the inserted bndldx statement. */
3407 static tree
3408 chkp_find_bounds_1 (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3410 tree addr = NULL_TREE;
3411 tree bounds = NULL_TREE;
3413 if (!ptr_src)
3414 ptr_src = ptr;
3416 bounds = chkp_get_registered_bounds (ptr_src);
3418 if (bounds)
3419 return bounds;
3421 switch (TREE_CODE (ptr_src))
3423 case MEM_REF:
3424 case VAR_DECL:
3425 if (BOUNDED_P (ptr_src))
3426 if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
3427 bounds = chkp_get_zero_bounds ();
3428 else
3430 addr = chkp_build_addr_expr (ptr_src);
3431 bounds = chkp_build_bndldx (addr, ptr, iter);
3433 else
3434 bounds = chkp_get_nonpointer_load_bounds ();
3435 break;
3437 case ARRAY_REF:
3438 case COMPONENT_REF:
3439 addr = get_base_address (ptr_src);
3440 if (DECL_P (addr)
3441 || TREE_CODE (addr) == MEM_REF
3442 || TREE_CODE (addr) == TARGET_MEM_REF)
3444 if (BOUNDED_P (ptr_src))
3445 if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
3446 bounds = chkp_get_zero_bounds ();
3447 else
3449 addr = chkp_build_addr_expr (ptr_src);
3450 bounds = chkp_build_bndldx (addr, ptr, iter);
3452 else
3453 bounds = chkp_get_nonpointer_load_bounds ();
3455 else
3457 gcc_assert (TREE_CODE (addr) == SSA_NAME);
3458 bounds = chkp_find_bounds (addr, iter);
3460 break;
3462 case PARM_DECL:
3463 gcc_unreachable ();
3464 bounds = chkp_get_bound_for_parm (ptr_src);
3465 break;
3467 case TARGET_MEM_REF:
3468 addr = chkp_build_addr_expr (ptr_src);
3469 bounds = chkp_build_bndldx (addr, ptr, iter);
3470 break;
3472 case SSA_NAME:
3473 bounds = chkp_get_registered_bounds (ptr_src);
3474 if (!bounds)
3476 gimple def_stmt = SSA_NAME_DEF_STMT (ptr_src);
3477 gphi_iterator phi_iter;
3479 bounds = chkp_get_bounds_by_definition (ptr_src, def_stmt, &phi_iter);
3481 gcc_assert (bounds);
3483 if (gphi *def_phi = dyn_cast <gphi *> (def_stmt))
3485 unsigned i;
3487 for (i = 0; i < gimple_phi_num_args (def_phi); i++)
3489 tree arg = gimple_phi_arg_def (def_phi, i);
3490 tree arg_bnd;
3491 gphi *phi_bnd;
3493 arg_bnd = chkp_find_bounds (arg, NULL);
3495 /* chkp_get_bounds_by_definition created new phi
3496 statement and phi_iter points to it.
3498 Previous call to chkp_find_bounds could create
3499 new basic block and therefore change phi statement
3500 phi_iter points to. */
3501 phi_bnd = phi_iter.phi ();
3503 add_phi_arg (phi_bnd, arg_bnd,
3504 gimple_phi_arg_edge (def_phi, i),
3505 UNKNOWN_LOCATION);
3508 /* If all bound phi nodes have their arg computed
3509 then we may finish its computation. See
3510 chkp_finish_incomplete_bounds for more details. */
3511 if (chkp_may_finish_incomplete_bounds ())
3512 chkp_finish_incomplete_bounds ();
3515 gcc_assert (bounds == chkp_get_registered_bounds (ptr_src)
3516 || chkp_incomplete_bounds (bounds));
3518 break;
3520 case ADDR_EXPR:
3521 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (ptr_src, 0), iter);
3522 break;
3524 case INTEGER_CST:
3525 if (integer_zerop (ptr_src))
3526 bounds = chkp_get_none_bounds ();
3527 else
3528 bounds = chkp_get_invalid_op_bounds ();
3529 break;
3531 default:
3532 if (dump_file && (dump_flags & TDF_DETAILS))
3534 fprintf (dump_file, "chkp_find_bounds: unexpected ptr of type %s\n",
3535 get_tree_code_name (TREE_CODE (ptr_src)));
3536 print_node (dump_file, "", ptr_src, 0);
3538 internal_error ("chkp_find_bounds: Unexpected tree code %s",
3539 get_tree_code_name (TREE_CODE (ptr_src)));
3542 if (!bounds)
3544 if (dump_file && (dump_flags & TDF_DETAILS))
3546 fprintf (stderr, "chkp_find_bounds: cannot find bounds for pointer\n");
3547 print_node (dump_file, "", ptr_src, 0);
3549 internal_error ("chkp_find_bounds: Cannot find bounds for pointer");
3552 return bounds;
3555 /* Normal case for bounds search without forced narrowing. */
3556 static tree
3557 chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter)
3559 return chkp_find_bounds_1 (ptr, NULL_TREE, iter);
3562 /* Search bounds for pointer PTR loaded from PTR_SRC
3563 by statement *ITER points to. */
3564 static tree
3565 chkp_find_bounds_loaded (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3567 return chkp_find_bounds_1 (ptr, ptr_src, iter);
3570 /* Helper function which checks type of RHS and finds all pointers in
3571 it. For each found pointer we build it's accesses in LHS and RHS
3572 objects and then call HANDLER for them. Function is used to copy
3573 or initilize bounds for copied object. */
3574 static void
3575 chkp_walk_pointer_assignments (tree lhs, tree rhs, void *arg,
3576 assign_handler handler)
3578 tree type = TREE_TYPE (lhs);
3580 /* We have nothing to do with clobbers. */
3581 if (TREE_CLOBBER_P (rhs))
3582 return;
3584 if (BOUNDED_TYPE_P (type))
3585 handler (lhs, rhs, arg);
3586 else if (RECORD_OR_UNION_TYPE_P (type))
3588 tree field;
3590 if (TREE_CODE (rhs) == CONSTRUCTOR)
3592 unsigned HOST_WIDE_INT cnt;
3593 tree val;
3595 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, field, val)
3597 if (chkp_type_has_pointer (TREE_TYPE (field)))
3599 tree lhs_field = chkp_build_component_ref (lhs, field);
3600 chkp_walk_pointer_assignments (lhs_field, val, arg, handler);
3604 else
3605 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3606 if (TREE_CODE (field) == FIELD_DECL
3607 && chkp_type_has_pointer (TREE_TYPE (field)))
3609 tree rhs_field = chkp_build_component_ref (rhs, field);
3610 tree lhs_field = chkp_build_component_ref (lhs, field);
3611 chkp_walk_pointer_assignments (lhs_field, rhs_field, arg, handler);
3614 else if (TREE_CODE (type) == ARRAY_TYPE)
3616 unsigned HOST_WIDE_INT cur = 0;
3617 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3618 tree etype = TREE_TYPE (type);
3619 tree esize = TYPE_SIZE (etype);
3621 if (TREE_CODE (rhs) == CONSTRUCTOR)
3623 unsigned HOST_WIDE_INT cnt;
3624 tree purp, val, lhs_elem;
3626 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, purp, val)
3628 if (purp && TREE_CODE (purp) == RANGE_EXPR)
3630 tree lo_index = TREE_OPERAND (purp, 0);
3631 tree hi_index = TREE_OPERAND (purp, 1);
3633 for (cur = (unsigned)tree_to_uhwi (lo_index);
3634 cur <= (unsigned)tree_to_uhwi (hi_index);
3635 cur++)
3637 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3638 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3641 else
3643 if (purp)
3645 gcc_assert (TREE_CODE (purp) == INTEGER_CST);
3646 cur = tree_to_uhwi (purp);
3649 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur++);
3651 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3655 /* Copy array only when size is known. */
3656 else if (maxval && !integer_minus_onep (maxval))
3657 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
3659 tree lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3660 tree rhs_elem = chkp_build_array_ref (rhs, etype, esize, cur);
3661 chkp_walk_pointer_assignments (lhs_elem, rhs_elem, arg, handler);
3664 else
3665 internal_error("chkp_walk_pointer_assignments: unexpected RHS type: %s",
3666 get_tree_code_name (TREE_CODE (type)));
3669 /* Add code to copy bounds for assignment of RHS to LHS.
3670 ARG is an iterator pointing ne code position. */
3671 static void
3672 chkp_copy_bounds_for_elem (tree lhs, tree rhs, void *arg)
3674 gimple_stmt_iterator *iter = (gimple_stmt_iterator *)arg;
3675 tree bounds = chkp_find_bounds (rhs, iter);
3676 tree addr = chkp_build_addr_expr(lhs);
3678 chkp_build_bndstx (addr, rhs, bounds, iter);
3681 /* Emit static bound initilizers and size vars. */
3682 void
3683 chkp_finish_file (void)
3685 struct varpool_node *node;
3686 struct chkp_ctor_stmt_list stmts;
3688 if (seen_error ())
3689 return;
3691 /* Iterate through varpool and generate bounds initialization
3692 constructors for all statically initialized pointers. */
3693 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3694 stmts.stmts = NULL;
3695 FOR_EACH_VARIABLE (node)
3696 /* Check that var is actually emitted and we need and may initialize
3697 its bounds. */
3698 if (node->need_bounds_init
3699 && !POINTER_BOUNDS_P (node->decl)
3700 && DECL_RTL (node->decl)
3701 && MEM_P (DECL_RTL (node->decl))
3702 && TREE_ASM_WRITTEN (node->decl))
3704 chkp_walk_pointer_assignments (node->decl,
3705 DECL_INITIAL (node->decl),
3706 &stmts,
3707 chkp_add_modification_to_stmt_list);
3709 if (stmts.avail <= 0)
3711 cgraph_build_static_cdtor ('P', stmts.stmts,
3712 MAX_RESERVED_INIT_PRIORITY + 3);
3713 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3714 stmts.stmts = NULL;
3718 if (stmts.stmts)
3719 cgraph_build_static_cdtor ('P', stmts.stmts,
3720 MAX_RESERVED_INIT_PRIORITY + 3);
3722 /* Iterate through varpool and generate bounds initialization
3723 constructors for all static bounds vars. */
3724 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3725 stmts.stmts = NULL;
3726 FOR_EACH_VARIABLE (node)
3727 if (node->need_bounds_init
3728 && POINTER_BOUNDS_P (node->decl)
3729 && TREE_ASM_WRITTEN (node->decl))
3731 tree bnd = node->decl;
3732 tree var;
3734 gcc_assert (DECL_INITIAL (bnd)
3735 && TREE_CODE (DECL_INITIAL (bnd)) == ADDR_EXPR);
3737 var = TREE_OPERAND (DECL_INITIAL (bnd), 0);
3738 chkp_output_static_bounds (bnd, var, &stmts);
3741 if (stmts.stmts)
3742 cgraph_build_static_cdtor ('B', stmts.stmts,
3743 MAX_RESERVED_INIT_PRIORITY + 2);
3745 delete chkp_static_var_bounds;
3746 delete chkp_bounds_map;
3749 /* An instrumentation function which is called for each statement
3750 having memory access we want to instrument. It inserts check
3751 code and bounds copy code.
3753 ITER points to statement to instrument.
3755 NODE holds memory access in statement to check.
3757 LOC holds the location information for statement.
3759 DIRFLAGS determines whether access is read or write.
3761 ACCESS_OFFS should be added to address used in NODE
3762 before check.
3764 ACCESS_SIZE holds size of checked access.
3766 SAFE indicates if NODE access is safe and should not be
3767 checked. */
3768 static void
3769 chkp_process_stmt (gimple_stmt_iterator *iter, tree node,
3770 location_t loc, tree dirflag,
3771 tree access_offs, tree access_size,
3772 bool safe)
3774 tree node_type = TREE_TYPE (node);
3775 tree size = access_size ? access_size : TYPE_SIZE_UNIT (node_type);
3776 tree addr_first = NULL_TREE; /* address of the first accessed byte */
3777 tree addr_last = NULL_TREE; /* address of the last accessed byte */
3778 tree ptr = NULL_TREE; /* a pointer used for dereference */
3779 tree bounds = NULL_TREE;
3781 /* We do not need instrumentation for clobbers. */
3782 if (dirflag == integer_one_node
3783 && gimple_code (gsi_stmt (*iter)) == GIMPLE_ASSIGN
3784 && TREE_CLOBBER_P (gimple_assign_rhs1 (gsi_stmt (*iter))))
3785 return;
3787 switch (TREE_CODE (node))
3789 case ARRAY_REF:
3790 case COMPONENT_REF:
3792 bool bitfield;
3793 tree elt;
3795 if (safe)
3797 /* We are not going to generate any checks, so do not
3798 generate bounds as well. */
3799 addr_first = chkp_build_addr_expr (node);
3800 break;
3803 chkp_parse_array_and_component_ref (node, &ptr, &elt, &safe,
3804 &bitfield, &bounds, iter, false);
3806 /* Break if there is no dereference and operation is safe. */
3808 if (bitfield)
3810 tree field = TREE_OPERAND (node, 1);
3812 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
3813 size = DECL_SIZE_UNIT (field);
3815 if (elt)
3816 elt = chkp_build_addr_expr (elt);
3817 addr_first = fold_convert_loc (loc, ptr_type_node, elt ? elt : ptr);
3818 addr_first = fold_build_pointer_plus_loc (loc,
3819 addr_first,
3820 byte_position (field));
3822 else
3823 addr_first = chkp_build_addr_expr (node);
3825 break;
3827 case INDIRECT_REF:
3828 ptr = TREE_OPERAND (node, 0);
3829 addr_first = ptr;
3830 break;
3832 case MEM_REF:
3833 ptr = TREE_OPERAND (node, 0);
3834 addr_first = chkp_build_addr_expr (node);
3835 break;
3837 case TARGET_MEM_REF:
3838 ptr = TMR_BASE (node);
3839 addr_first = chkp_build_addr_expr (node);
3840 break;
3842 case ARRAY_RANGE_REF:
3843 printf("ARRAY_RANGE_REF\n");
3844 debug_gimple_stmt(gsi_stmt(*iter));
3845 debug_tree(node);
3846 gcc_unreachable ();
3847 break;
3849 case BIT_FIELD_REF:
3851 tree offs, rem, bpu;
3853 gcc_assert (!access_offs);
3854 gcc_assert (!access_size);
3856 bpu = fold_convert (size_type_node, bitsize_int (BITS_PER_UNIT));
3857 offs = fold_convert (size_type_node, TREE_OPERAND (node, 2));
3858 rem = size_binop_loc (loc, TRUNC_MOD_EXPR, offs, bpu);
3859 offs = size_binop_loc (loc, TRUNC_DIV_EXPR, offs, bpu);
3861 size = fold_convert (size_type_node, TREE_OPERAND (node, 1));
3862 size = size_binop_loc (loc, PLUS_EXPR, size, rem);
3863 size = size_binop_loc (loc, CEIL_DIV_EXPR, size, bpu);
3864 size = fold_convert (size_type_node, size);
3866 chkp_process_stmt (iter, TREE_OPERAND (node, 0), loc,
3867 dirflag, offs, size, safe);
3868 return;
3870 break;
3872 case VAR_DECL:
3873 case RESULT_DECL:
3874 case PARM_DECL:
3875 if (dirflag != integer_one_node
3876 || DECL_REGISTER (node))
3877 return;
3879 safe = true;
3880 addr_first = chkp_build_addr_expr (node);
3881 break;
3883 default:
3884 return;
3887 /* If addr_last was not computed then use (addr_first + size - 1)
3888 expression to compute it. */
3889 if (!addr_last)
3891 addr_last = fold_build_pointer_plus_loc (loc, addr_first, size);
3892 addr_last = fold_build_pointer_plus_hwi_loc (loc, addr_last, -1);
3895 /* Shift both first_addr and last_addr by access_offs if specified. */
3896 if (access_offs)
3898 addr_first = fold_build_pointer_plus_loc (loc, addr_first, access_offs);
3899 addr_last = fold_build_pointer_plus_loc (loc, addr_last, access_offs);
3902 /* Generate bndcl/bndcu checks if memory access is not safe. */
3903 if (!safe)
3905 gimple_stmt_iterator stmt_iter = *iter;
3907 if (!bounds)
3908 bounds = chkp_find_bounds (ptr, iter);
3910 chkp_check_mem_access (addr_first, addr_last, bounds,
3911 stmt_iter, loc, dirflag);
3914 /* We need to store bounds in case pointer is stored. */
3915 if (dirflag == integer_one_node
3916 && chkp_type_has_pointer (node_type)
3917 && flag_chkp_store_bounds)
3919 gimple stmt = gsi_stmt (*iter);
3920 tree rhs1 = gimple_assign_rhs1 (stmt);
3921 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3923 if (get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS)
3924 chkp_walk_pointer_assignments (node, rhs1, iter,
3925 chkp_copy_bounds_for_elem);
3926 else
3928 bounds = chkp_compute_bounds_for_assignment (NULL_TREE, stmt);
3929 chkp_build_bndstx (addr_first, rhs1, bounds, iter);
3934 /* Add code to copy bounds for all pointers copied
3935 in ASSIGN created during inline of EDGE. */
3936 void
3937 chkp_copy_bounds_for_assign (gimple assign, struct cgraph_edge *edge)
3939 tree lhs = gimple_assign_lhs (assign);
3940 tree rhs = gimple_assign_rhs1 (assign);
3941 gimple_stmt_iterator iter = gsi_for_stmt (assign);
3943 if (!flag_chkp_store_bounds)
3944 return;
3946 chkp_walk_pointer_assignments (lhs, rhs, &iter, chkp_copy_bounds_for_elem);
3948 /* We should create edges for all created calls to bndldx and bndstx. */
3949 while (gsi_stmt (iter) != assign)
3951 gimple stmt = gsi_stmt (iter);
3952 if (gimple_code (stmt) == GIMPLE_CALL)
3954 tree fndecl = gimple_call_fndecl (stmt);
3955 struct cgraph_node *callee = cgraph_node::get_create (fndecl);
3956 struct cgraph_edge *new_edge;
3958 gcc_assert (fndecl == chkp_bndstx_fndecl
3959 || fndecl == chkp_bndldx_fndecl
3960 || fndecl == chkp_ret_bnd_fndecl);
3962 new_edge = edge->caller->create_edge (callee,
3963 as_a <gcall *> (stmt),
3964 edge->count,
3965 edge->frequency);
3966 new_edge->frequency = compute_call_stmt_bb_frequency
3967 (edge->caller->decl, gimple_bb (stmt));
3969 gsi_prev (&iter);
3973 /* Some code transformation made during instrumentation pass
3974 may put code into inconsistent state. Here we find and fix
3975 such flaws. */
3976 void
3977 chkp_fix_cfg ()
3979 basic_block bb;
3980 gimple_stmt_iterator i;
3982 /* We could insert some code right after stmt which ends bb.
3983 We wanted to put this code on fallthru edge but did not
3984 add new edges from the beginning because it may cause new
3985 phi node creation which may be incorrect due to incomplete
3986 bound phi nodes. */
3987 FOR_ALL_BB_FN (bb, cfun)
3988 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
3990 gimple stmt = gsi_stmt (i);
3991 gimple_stmt_iterator next = i;
3993 gsi_next (&next);
3995 if (stmt_ends_bb_p (stmt)
3996 && !gsi_end_p (next))
3998 edge fall = find_fallthru_edge (bb->succs);
3999 basic_block dest = NULL;
4000 int flags = 0;
4002 gcc_assert (fall);
4004 /* We cannot split abnormal edge. Therefore we
4005 store its params, make it regular and then
4006 rebuild abnormal edge after split. */
4007 if (fall->flags & EDGE_ABNORMAL)
4009 flags = fall->flags & ~EDGE_FALLTHRU;
4010 dest = fall->dest;
4012 fall->flags &= ~EDGE_COMPLEX;
4015 while (!gsi_end_p (next))
4017 gimple next_stmt = gsi_stmt (next);
4018 gsi_remove (&next, false);
4019 gsi_insert_on_edge (fall, next_stmt);
4022 gsi_commit_edge_inserts ();
4024 /* Re-create abnormal edge. */
4025 if (dest)
4026 make_edge (bb, dest, flags);
4031 /* Walker callback for chkp_replace_function_pointers. Replaces
4032 function pointer in the specified operand with pointer to the
4033 instrumented function version. */
4034 static tree
4035 chkp_replace_function_pointer (tree *op, int *walk_subtrees,
4036 void *data ATTRIBUTE_UNUSED)
4038 if (TREE_CODE (*op) == FUNCTION_DECL
4039 && !lookup_attribute ("bnd_legacy", DECL_ATTRIBUTES (*op))
4040 && (DECL_BUILT_IN_CLASS (*op) == NOT_BUILT_IN
4041 /* For builtins we replace pointers only for selected
4042 function and functions having definitions. */
4043 || (DECL_BUILT_IN_CLASS (*op) == BUILT_IN_NORMAL
4044 && (chkp_instrument_normal_builtin (*op)
4045 || gimple_has_body_p (*op)))))
4047 struct cgraph_node *node = cgraph_node::get_create (*op);
4048 struct cgraph_node *clone = NULL;
4050 if (!node->instrumentation_clone)
4051 clone = chkp_maybe_create_clone (*op);
4053 if (clone)
4054 *op = clone->decl;
4055 *walk_subtrees = 0;
4058 return NULL;
4061 /* This function searches for function pointers in statement
4062 pointed by GSI and replaces them with pointers to instrumented
4063 function versions. */
4064 static void
4065 chkp_replace_function_pointers (gimple_stmt_iterator *gsi)
4067 gimple stmt = gsi_stmt (*gsi);
4068 /* For calls we want to walk call args only. */
4069 if (gimple_code (stmt) == GIMPLE_CALL)
4071 unsigned i;
4072 for (i = 0; i < gimple_call_num_args (stmt); i++)
4073 walk_tree (gimple_call_arg_ptr (stmt, i),
4074 chkp_replace_function_pointer, NULL, NULL);
4076 else
4077 walk_gimple_stmt (gsi, NULL, chkp_replace_function_pointer, NULL);
4080 /* This function instruments all statements working with memory,
4081 calls and rets.
4083 It also removes excess statements from static initializers. */
4084 static void
4085 chkp_instrument_function (void)
4087 basic_block bb, next;
4088 gimple_stmt_iterator i;
4089 enum gimple_rhs_class grhs_class;
4090 bool safe = lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl));
4092 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
4095 next = bb->next_bb;
4096 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
4098 gimple s = gsi_stmt (i);
4100 /* Skip statement marked to not be instrumented. */
4101 if (chkp_marked_stmt_p (s))
4103 gsi_next (&i);
4104 continue;
4107 chkp_replace_function_pointers (&i);
4109 switch (gimple_code (s))
4111 case GIMPLE_ASSIGN:
4112 chkp_process_stmt (&i, gimple_assign_lhs (s),
4113 gimple_location (s), integer_one_node,
4114 NULL_TREE, NULL_TREE, safe);
4115 chkp_process_stmt (&i, gimple_assign_rhs1 (s),
4116 gimple_location (s), integer_zero_node,
4117 NULL_TREE, NULL_TREE, safe);
4118 grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s));
4119 if (grhs_class == GIMPLE_BINARY_RHS)
4120 chkp_process_stmt (&i, gimple_assign_rhs2 (s),
4121 gimple_location (s), integer_zero_node,
4122 NULL_TREE, NULL_TREE, safe);
4123 break;
4125 case GIMPLE_RETURN:
4127 greturn *r = as_a <greturn *> (s);
4128 if (gimple_return_retval (r) != NULL_TREE)
4130 chkp_process_stmt (&i, gimple_return_retval (r),
4131 gimple_location (r),
4132 integer_zero_node,
4133 NULL_TREE, NULL_TREE, safe);
4135 /* Additionally we need to add bounds
4136 to return statement. */
4137 chkp_add_bounds_to_ret_stmt (&i);
4140 break;
4142 case GIMPLE_CALL:
4143 chkp_add_bounds_to_call_stmt (&i);
4144 break;
4146 default:
4150 gsi_next (&i);
4152 /* We do not need any actual pointer stores in checker
4153 static initializer. */
4154 if (lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl))
4155 && gimple_code (s) == GIMPLE_ASSIGN
4156 && gimple_store_p (s))
4158 gimple_stmt_iterator del_iter = gsi_for_stmt (s);
4159 gsi_remove (&del_iter, true);
4160 unlink_stmt_vdef (s);
4161 release_defs(s);
4164 bb = next;
4166 while (bb);
4168 /* Some input params may have bounds and be address taken. In this case
4169 we should store incoming bounds into bounds table. */
4170 tree arg;
4171 if (flag_chkp_store_bounds)
4172 for (arg = DECL_ARGUMENTS (cfun->decl); arg; arg = DECL_CHAIN (arg))
4173 if (TREE_ADDRESSABLE (arg))
4175 if (BOUNDED_P (arg))
4177 tree bounds = chkp_get_next_bounds_parm (arg);
4178 tree def_ptr = ssa_default_def (cfun, arg);
4179 gimple_stmt_iterator iter
4180 = gsi_start_bb (chkp_get_entry_block ());
4181 chkp_build_bndstx (chkp_build_addr_expr (arg),
4182 def_ptr ? def_ptr : arg,
4183 bounds, &iter);
4185 /* Skip bounds arg. */
4186 arg = TREE_CHAIN (arg);
4188 else if (chkp_type_has_pointer (TREE_TYPE (arg)))
4190 tree orig_arg = arg;
4191 bitmap slots = BITMAP_ALLOC (NULL);
4192 gimple_stmt_iterator iter
4193 = gsi_start_bb (chkp_get_entry_block ());
4194 bitmap_iterator bi;
4195 unsigned bnd_no;
4197 chkp_find_bound_slots (TREE_TYPE (arg), slots);
4199 EXECUTE_IF_SET_IN_BITMAP (slots, 0, bnd_no, bi)
4201 tree bounds = chkp_get_next_bounds_parm (arg);
4202 HOST_WIDE_INT offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
4203 tree addr = chkp_build_addr_expr (orig_arg);
4204 tree ptr = build2 (MEM_REF, ptr_type_node, addr,
4205 build_int_cst (ptr_type_node, offs));
4206 chkp_build_bndstx (chkp_build_addr_expr (ptr), ptr,
4207 bounds, &iter);
4209 arg = DECL_CHAIN (arg);
4211 BITMAP_FREE (slots);
4216 /* Find init/null/copy_ptr_bounds calls and replace them
4217 with assignments. It should allow better code
4218 optimization. */
4220 static void
4221 chkp_remove_useless_builtins ()
4223 basic_block bb;
4224 gimple_stmt_iterator gsi;
4226 FOR_EACH_BB_FN (bb, cfun)
4228 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4230 gimple stmt = gsi_stmt (gsi);
4231 tree fndecl;
4232 enum built_in_function fcode;
4234 /* Find builtins returning first arg and replace
4235 them with assignments. */
4236 if (gimple_code (stmt) == GIMPLE_CALL
4237 && (fndecl = gimple_call_fndecl (stmt))
4238 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4239 && (fcode = DECL_FUNCTION_CODE (fndecl))
4240 && (fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
4241 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
4242 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS
4243 || fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS))
4245 tree res = gimple_call_arg (stmt, 0);
4246 update_call_from_tree (&gsi, res);
4247 stmt = gsi_stmt (gsi);
4248 update_stmt (stmt);
4254 /* Initialize pass. */
4255 static void
4256 chkp_init (void)
4258 basic_block bb;
4259 gimple_stmt_iterator i;
4261 in_chkp_pass = true;
4263 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = bb->next_bb)
4264 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
4265 chkp_unmark_stmt (gsi_stmt (i));
4267 chkp_invalid_bounds = new hash_set<tree>;
4268 chkp_completed_bounds_set = new hash_set<tree>;
4269 delete chkp_reg_bounds;
4270 chkp_reg_bounds = new hash_map<tree, tree>;
4271 delete chkp_bound_vars;
4272 chkp_bound_vars = new hash_map<tree, tree>;
4273 chkp_reg_addr_bounds = new hash_map<tree, tree>;
4274 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
4275 delete chkp_bounds_map;
4276 chkp_bounds_map = new hash_map<tree, tree>;
4277 chkp_abnormal_copies = BITMAP_GGC_ALLOC ();
4279 entry_block = NULL;
4280 zero_bounds = NULL_TREE;
4281 none_bounds = NULL_TREE;
4282 incomplete_bounds = integer_zero_node;
4283 tmp_var = NULL_TREE;
4284 size_tmp_var = NULL_TREE;
4286 chkp_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode, true);
4288 /* We create these constant bounds once for each object file.
4289 These symbols go to comdat section and result in single copy
4290 of each one in the final binary. */
4291 chkp_get_zero_bounds_var ();
4292 chkp_get_none_bounds_var ();
4294 calculate_dominance_info (CDI_DOMINATORS);
4295 calculate_dominance_info (CDI_POST_DOMINATORS);
4297 bitmap_obstack_initialize (NULL);
4300 /* Finalize instrumentation pass. */
4301 static void
4302 chkp_fini (void)
4304 in_chkp_pass = false;
4306 delete chkp_invalid_bounds;
4307 delete chkp_completed_bounds_set;
4308 delete chkp_reg_addr_bounds;
4309 delete chkp_incomplete_bounds_map;
4311 free_dominance_info (CDI_DOMINATORS);
4312 free_dominance_info (CDI_POST_DOMINATORS);
4314 bitmap_obstack_release (NULL);
4316 entry_block = NULL;
4317 zero_bounds = NULL_TREE;
4318 none_bounds = NULL_TREE;
4321 /* Main instrumentation pass function. */
4322 static unsigned int
4323 chkp_execute (void)
4325 chkp_init ();
4327 chkp_instrument_function ();
4329 chkp_remove_useless_builtins ();
4331 chkp_function_mark_instrumented (cfun->decl);
4333 chkp_fix_cfg ();
4335 chkp_fini ();
4337 return 0;
4340 /* Instrumentation pass gate. */
4341 static bool
4342 chkp_gate (void)
4344 cgraph_node *node = cgraph_node::get (cfun->decl);
4345 return ((node != NULL
4346 && node->instrumentation_clone)
4347 || lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl)));
4350 namespace {
4352 const pass_data pass_data_chkp =
4354 GIMPLE_PASS, /* type */
4355 "chkp", /* name */
4356 OPTGROUP_NONE, /* optinfo_flags */
4357 TV_NONE, /* tv_id */
4358 PROP_ssa | PROP_cfg, /* properties_required */
4359 0, /* properties_provided */
4360 0, /* properties_destroyed */
4361 0, /* todo_flags_start */
4362 TODO_verify_il
4363 | TODO_update_ssa /* todo_flags_finish */
4366 class pass_chkp : public gimple_opt_pass
4368 public:
4369 pass_chkp (gcc::context *ctxt)
4370 : gimple_opt_pass (pass_data_chkp, ctxt)
4373 /* opt_pass methods: */
4374 virtual opt_pass * clone ()
4376 return new pass_chkp (m_ctxt);
4379 virtual bool gate (function *)
4381 return chkp_gate ();
4384 virtual unsigned int execute (function *)
4386 return chkp_execute ();
4389 }; // class pass_chkp
4391 } // anon namespace
4393 gimple_opt_pass *
4394 make_pass_chkp (gcc::context *ctxt)
4396 return new pass_chkp (ctxt);
4399 #include "gt-tree-chkp.h"