2015-09-29 Steven G. Kargl <kargl@gcc.gnu.org>
[official-gcc.git] / gcc / tree-chkp.c
blob190916d5ce87ba67e6ee139fa0d7965c4a124538
1 /* Pointer Bounds Checker insrumentation pass.
2 Copyright (C) 2014-2015 Free Software Foundation, Inc.
3 Contributed by Ilya Enkovich (ilya.enkovich@intel.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "alias.h"
25 #include "backend.h"
26 #include "cfghooks.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "rtl.h"
30 #include "ssa.h"
31 #include "options.h"
32 #include "fold-const.h"
33 #include "stor-layout.h"
34 #include "varasm.h"
35 #include "target.h"
36 #include "tree-iterator.h"
37 #include "tree-cfg.h"
38 #include "langhooks.h"
39 #include "tree-pass.h"
40 #include "diagnostic.h"
41 #include "cfgloop.h"
42 #include "tree-ssa-address.h"
43 #include "tree-ssa.h"
44 #include "tree-ssa-loop-niter.h"
45 #include "gimple-pretty-print.h"
46 #include "gimple-iterator.h"
47 #include "gimplify.h"
48 #include "gimplify-me.h"
49 #include "print-tree.h"
50 #include "flags.h"
51 #include "insn-config.h"
52 #include "expmed.h"
53 #include "dojump.h"
54 #include "explow.h"
55 #include "calls.h"
56 #include "emit-rtl.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "tree-ssa-propagate.h"
60 #include "gimple-fold.h"
61 #include "tree-chkp.h"
62 #include "gimple-walk.h"
63 #include "tree-dfa.h"
64 #include "cgraph.h"
65 #include "ipa-chkp.h"
66 #include "params.h"
68 /* Pointer Bounds Checker instruments code with memory checks to find
69 out-of-bounds memory accesses. Checks are performed by computing
70 bounds for each pointer and then comparing address of accessed
71 memory before pointer dereferencing.
73 1. Function clones.
75 See ipa-chkp.c.
77 2. Instrumentation.
79 There are few things to instrument:
81 a) Memory accesses - add checker calls to check address of accessed memory
82 against bounds of dereferenced pointer. Obviously safe memory
83 accesses like static variable access does not have to be instrumented
84 with checks.
86 Example:
88 val_2 = *p_1;
90 with 4 bytes access is transformed into:
92 __builtin___chkp_bndcl (__bound_tmp.1_3, p_1);
93 D.1_4 = p_1 + 3;
94 __builtin___chkp_bndcu (__bound_tmp.1_3, D.1_4);
95 val_2 = *p_1;
97 where __bound_tmp.1_3 are bounds computed for pointer p_1,
98 __builtin___chkp_bndcl is a lower bound check and
99 __builtin___chkp_bndcu is an upper bound check.
101 b) Pointer stores.
103 When pointer is stored in memory we need to store its bounds. To
104 achieve compatibility of instrumented code with regular codes
105 we have to keep data layout and store bounds in special bound tables
106 via special checker call. Implementation of bounds table may vary for
107 different platforms. It has to associate pointer value and its
108 location (it is required because we may have two equal pointers
109 with different bounds stored in different places) with bounds.
110 Another checker builtin allows to get bounds for specified pointer
111 loaded from specified location.
113 Example:
115 buf1[i_1] = &buf2;
117 is transformed into:
119 buf1[i_1] = &buf2;
120 D.1_2 = &buf1[i_1];
121 __builtin___chkp_bndstx (D.1_2, &buf2, __bound_tmp.1_2);
123 where __bound_tmp.1_2 are bounds of &buf2.
125 c) Static initialization.
127 The special case of pointer store is static pointer initialization.
128 Bounds initialization is performed in a few steps:
129 - register all static initializations in front-end using
130 chkp_register_var_initializer
131 - when file compilation finishes we create functions with special
132 attribute 'chkp ctor' and put explicit initialization code
133 (assignments) for all statically initialized pointers.
134 - when checker constructor is compiled checker pass adds required
135 bounds initialization for all statically initialized pointers
136 - since we do not actually need excess pointers initialization
137 in checker constructor we remove such assignments from them
139 d) Calls.
141 For each call in the code we add additional arguments to pass
142 bounds for pointer arguments. We determine type of call arguments
143 using arguments list from function declaration; if function
144 declaration is not available we use function type; otherwise
145 (e.g. for unnamed arguments) we use type of passed value. Function
146 declaration/type is replaced with the instrumented one.
148 Example:
150 val_1 = foo (&buf1, &buf2, &buf1, 0);
152 is translated into:
154 val_1 = foo.chkp (&buf1, __bound_tmp.1_2, &buf2, __bound_tmp.1_3,
155 &buf1, __bound_tmp.1_2, 0);
157 e) Returns.
159 If function returns a pointer value we have to return bounds also.
160 A new operand was added for return statement to hold returned bounds.
162 Example:
164 return &_buf1;
166 is transformed into
168 return &_buf1, __bound_tmp.1_1;
170 3. Bounds computation.
172 Compiler is fully responsible for computing bounds to be used for each
173 memory access. The first step for bounds computation is to find the
174 origin of pointer dereferenced for memory access. Basing on pointer
175 origin we define a way to compute its bounds. There are just few
176 possible cases:
178 a) Pointer is returned by call.
180 In this case we use corresponding checker builtin method to obtain returned
181 bounds.
183 Example:
185 buf_1 = malloc (size_2);
186 foo (buf_1);
188 is translated into:
190 buf_1 = malloc (size_2);
191 __bound_tmp.1_3 = __builtin___chkp_bndret (buf_1);
192 foo (buf_1, __bound_tmp.1_3);
194 b) Pointer is an address of an object.
196 In this case compiler tries to compute objects size and create corresponding
197 bounds. If object has incomplete type then special checker builtin is used to
198 obtain its size at runtime.
200 Example:
202 foo ()
204 <unnamed type> __bound_tmp.3;
205 static int buf[100];
207 <bb 3>:
208 __bound_tmp.3_2 = __builtin___chkp_bndmk (&buf, 400);
210 <bb 2>:
211 return &buf, __bound_tmp.3_2;
214 Example:
216 Address of an object 'extern int buf[]' with incomplete type is
217 returned.
219 foo ()
221 <unnamed type> __bound_tmp.4;
222 long unsigned int __size_tmp.3;
224 <bb 3>:
225 __size_tmp.3_4 = __builtin_ia32_sizeof (buf);
226 __bound_tmp.4_3 = __builtin_ia32_bndmk (&buf, __size_tmp.3_4);
228 <bb 2>:
229 return &buf, __bound_tmp.4_3;
232 c) Pointer is the result of object narrowing.
234 It happens when we use pointer to an object to compute pointer to a part
235 of an object. E.g. we take pointer to a field of a structure. In this
236 case we perform bounds intersection using bounds of original object and
237 bounds of object's part (which are computed basing on its type).
239 There may be some debatable questions about when narrowing should occur
240 and when it should not. To avoid false bound violations in correct
241 programs we do not perform narrowing when address of an array element is
242 obtained (it has address of the whole array) and when address of the first
243 structure field is obtained (because it is guaranteed to be equal to
244 address of the whole structure and it is legal to cast it back to structure).
246 Default narrowing behavior may be changed using compiler flags.
248 Example:
250 In this example address of the second structure field is returned.
252 foo (struct A * p, __bounds_type __bounds_of_p)
254 <unnamed type> __bound_tmp.3;
255 int * _2;
256 int * _5;
258 <bb 2>:
259 _5 = &p_1(D)->second_field;
260 __bound_tmp.3_6 = __builtin___chkp_bndmk (_5, 4);
261 __bound_tmp.3_8 = __builtin___chkp_intersect (__bound_tmp.3_6,
262 __bounds_of_p_3(D));
263 _2 = &p_1(D)->second_field;
264 return _2, __bound_tmp.3_8;
267 Example:
269 In this example address of the first field of array element is returned.
271 foo (struct A * p, __bounds_type __bounds_of_p, int i)
273 long unsigned int _3;
274 long unsigned int _4;
275 struct A * _6;
276 int * _7;
278 <bb 2>:
279 _3 = (long unsigned int) i_1(D);
280 _4 = _3 * 8;
281 _6 = p_5(D) + _4;
282 _7 = &_6->first_field;
283 return _7, __bounds_of_p_2(D);
287 d) Pointer is the result of pointer arithmetic or type cast.
289 In this case bounds of the base pointer are used. In case of binary
290 operation producing a pointer we are analyzing data flow further
291 looking for operand's bounds. One operand is considered as a base
292 if it has some valid bounds. If we fall into a case when none of
293 operands (or both of them) has valid bounds, a default bounds value
294 is used.
296 Trying to find out bounds for binary operations we may fall into
297 cyclic dependencies for pointers. To avoid infinite recursion all
298 walked phi nodes instantly obtain corresponding bounds but created
299 bounds are marked as incomplete. It helps us to stop DF walk during
300 bounds search.
302 When we reach pointer source, some args of incomplete bounds phi obtain
303 valid bounds and those values are propagated further through phi nodes.
304 If no valid bounds were found for phi node then we mark its result as
305 invalid bounds. Process stops when all incomplete bounds become either
306 valid or invalid and we are able to choose a pointer base.
308 e) Pointer is loaded from the memory.
310 In this case we just need to load bounds from the bounds table.
312 Example:
314 foo ()
316 <unnamed type> __bound_tmp.3;
317 static int * buf;
318 int * _2;
320 <bb 2>:
321 _2 = buf;
322 __bound_tmp.3_4 = __builtin___chkp_bndldx (&buf, _2);
323 return _2, __bound_tmp.3_4;
328 typedef void (*assign_handler)(tree, tree, void *);
330 static tree chkp_get_zero_bounds ();
331 static tree chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter);
332 static tree chkp_find_bounds_loaded (tree ptr, tree ptr_src,
333 gimple_stmt_iterator *iter);
334 static void chkp_parse_array_and_component_ref (tree node, tree *ptr,
335 tree *elt, bool *safe,
336 bool *bitfield,
337 tree *bounds,
338 gimple_stmt_iterator *iter,
339 bool innermost_bounds);
341 #define chkp_bndldx_fndecl \
342 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDLDX))
343 #define chkp_bndstx_fndecl \
344 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDSTX))
345 #define chkp_checkl_fndecl \
346 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCL))
347 #define chkp_checku_fndecl \
348 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCU))
349 #define chkp_bndmk_fndecl \
350 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDMK))
351 #define chkp_ret_bnd_fndecl \
352 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDRET))
353 #define chkp_intersect_fndecl \
354 (targetm.builtin_chkp_function (BUILT_IN_CHKP_INTERSECT))
355 #define chkp_narrow_bounds_fndecl \
356 (targetm.builtin_chkp_function (BUILT_IN_CHKP_NARROW))
357 #define chkp_sizeof_fndecl \
358 (targetm.builtin_chkp_function (BUILT_IN_CHKP_SIZEOF))
359 #define chkp_extract_lower_fndecl \
360 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_LOWER))
361 #define chkp_extract_upper_fndecl \
362 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_UPPER))
364 static GTY (()) tree chkp_uintptr_type;
366 static GTY (()) tree chkp_zero_bounds_var;
367 static GTY (()) tree chkp_none_bounds_var;
369 static GTY (()) basic_block entry_block;
370 static GTY (()) tree zero_bounds;
371 static GTY (()) tree none_bounds;
372 static GTY (()) tree incomplete_bounds;
373 static GTY (()) tree tmp_var;
374 static GTY (()) tree size_tmp_var;
375 static GTY (()) bitmap chkp_abnormal_copies;
377 struct hash_set<tree> *chkp_invalid_bounds;
378 struct hash_set<tree> *chkp_completed_bounds_set;
379 struct hash_map<tree, tree> *chkp_reg_bounds;
380 struct hash_map<tree, tree> *chkp_bound_vars;
381 struct hash_map<tree, tree> *chkp_reg_addr_bounds;
382 struct hash_map<tree, tree> *chkp_incomplete_bounds_map;
383 struct hash_map<tree, tree> *chkp_bounds_map;
384 struct hash_map<tree, tree> *chkp_static_var_bounds;
386 static bool in_chkp_pass;
388 #define CHKP_BOUND_TMP_NAME "__bound_tmp"
389 #define CHKP_SIZE_TMP_NAME "__size_tmp"
390 #define CHKP_BOUNDS_OF_SYMBOL_PREFIX "__chkp_bounds_of_"
391 #define CHKP_STRING_BOUNDS_PREFIX "__chkp_string_bounds_"
392 #define CHKP_VAR_BOUNDS_PREFIX "__chkp_var_bounds_"
393 #define CHKP_ZERO_BOUNDS_VAR_NAME "__chkp_zero_bounds"
394 #define CHKP_NONE_BOUNDS_VAR_NAME "__chkp_none_bounds"
396 /* Static checker constructors may become very large and their
397 compilation with optimization may take too much time.
398 Therefore we put a limit to number of statements in one
399 constructor. Tests with 100 000 statically initialized
400 pointers showed following compilation times on Sandy Bridge
401 server (used -O2):
402 limit 100 => ~18 sec.
403 limit 300 => ~22 sec.
404 limit 1000 => ~30 sec.
405 limit 3000 => ~49 sec.
406 limit 5000 => ~55 sec.
407 limit 10000 => ~76 sec.
408 limit 100000 => ~532 sec. */
409 #define MAX_STMTS_IN_STATIC_CHKP_CTOR (PARAM_VALUE (PARAM_CHKP_MAX_CTOR_SIZE))
411 struct chkp_ctor_stmt_list
413 tree stmts;
414 int avail;
417 /* Return 1 if function FNDECL is instrumented by Pointer
418 Bounds Checker. */
419 bool
420 chkp_function_instrumented_p (tree fndecl)
422 return fndecl
423 && lookup_attribute ("chkp instrumented", DECL_ATTRIBUTES (fndecl));
426 /* Mark function FNDECL as instrumented. */
427 void
428 chkp_function_mark_instrumented (tree fndecl)
430 if (chkp_function_instrumented_p (fndecl))
431 return;
433 DECL_ATTRIBUTES (fndecl)
434 = tree_cons (get_identifier ("chkp instrumented"), NULL,
435 DECL_ATTRIBUTES (fndecl));
438 /* Return true when STMT is builtin call to instrumentation function
439 corresponding to CODE. */
441 bool
442 chkp_gimple_call_builtin_p (gimple *call,
443 enum built_in_function code)
445 tree fndecl;
446 if (is_gimple_call (call)
447 && (fndecl = targetm.builtin_chkp_function (code))
448 && gimple_call_fndecl (call) == fndecl)
449 return true;
450 return false;
453 /* Emit code to build zero bounds and return RTL holding
454 the result. */
456 chkp_expand_zero_bounds ()
458 tree zero_bnd;
460 if (flag_chkp_use_static_const_bounds)
461 zero_bnd = chkp_get_zero_bounds_var ();
462 else
463 zero_bnd = chkp_build_make_bounds_call (integer_zero_node,
464 integer_zero_node);
465 return expand_normal (zero_bnd);
468 /* Emit code to store zero bounds for PTR located at MEM. */
469 void
470 chkp_expand_bounds_reset_for_mem (tree mem, tree ptr)
472 tree zero_bnd, bnd, addr, bndstx;
474 if (flag_chkp_use_static_const_bounds)
475 zero_bnd = chkp_get_zero_bounds_var ();
476 else
477 zero_bnd = chkp_build_make_bounds_call (integer_zero_node,
478 integer_zero_node);
479 bnd = make_tree (pointer_bounds_type_node,
480 assign_temp (pointer_bounds_type_node, 0, 1));
481 addr = build1 (ADDR_EXPR,
482 build_pointer_type (TREE_TYPE (mem)), mem);
483 bndstx = chkp_build_bndstx_call (addr, ptr, bnd);
485 expand_assignment (bnd, zero_bnd, false);
486 expand_normal (bndstx);
489 /* Build retbnd call for returned value RETVAL.
491 If BNDVAL is not NULL then result is stored
492 in it. Otherwise a temporary is created to
493 hold returned value.
495 GSI points to a position for a retbnd call
496 and is set to created stmt.
498 Cgraph edge is created for a new call if
499 UPDATE_EDGE is 1.
501 Obtained bounds are returned. */
502 tree
503 chkp_insert_retbnd_call (tree bndval, tree retval,
504 gimple_stmt_iterator *gsi)
506 gimple *call;
508 if (!bndval)
509 bndval = create_tmp_reg (pointer_bounds_type_node, "retbnd");
511 call = gimple_build_call (chkp_ret_bnd_fndecl, 1, retval);
512 gimple_call_set_lhs (call, bndval);
513 gsi_insert_after (gsi, call, GSI_CONTINUE_LINKING);
515 return bndval;
518 /* Build a GIMPLE_CALL identical to CALL but skipping bounds
519 arguments. */
521 gcall *
522 chkp_copy_call_skip_bounds (gcall *call)
524 bitmap bounds;
525 unsigned i;
527 bitmap_obstack_initialize (NULL);
528 bounds = BITMAP_ALLOC (NULL);
530 for (i = 0; i < gimple_call_num_args (call); i++)
531 if (POINTER_BOUNDS_P (gimple_call_arg (call, i)))
532 bitmap_set_bit (bounds, i);
534 if (!bitmap_empty_p (bounds))
535 call = gimple_call_copy_skip_args (call, bounds);
536 gimple_call_set_with_bounds (call, false);
538 BITMAP_FREE (bounds);
539 bitmap_obstack_release (NULL);
541 return call;
544 /* Redirect edge E to the correct node according to call_stmt.
545 Return 1 if bounds removal from call_stmt should be done
546 instead of redirection. */
548 bool
549 chkp_redirect_edge (cgraph_edge *e)
551 bool instrumented = false;
552 tree decl = e->callee->decl;
554 if (e->callee->instrumentation_clone
555 || chkp_function_instrumented_p (decl))
556 instrumented = true;
558 if (instrumented
559 && !gimple_call_with_bounds_p (e->call_stmt))
560 e->redirect_callee (cgraph_node::get_create (e->callee->orig_decl));
561 else if (!instrumented
562 && gimple_call_with_bounds_p (e->call_stmt)
563 && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDCL)
564 && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDCU)
565 && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDSTX))
567 if (e->callee->instrumented_version)
568 e->redirect_callee (e->callee->instrumented_version);
569 else
571 tree args = TYPE_ARG_TYPES (TREE_TYPE (decl));
572 /* Avoid bounds removal if all args will be removed. */
573 if (!args || TREE_VALUE (args) != void_type_node)
574 return true;
575 else
576 gimple_call_set_with_bounds (e->call_stmt, false);
580 return false;
583 /* Mark statement S to not be instrumented. */
584 static void
585 chkp_mark_stmt (gimple *s)
587 gimple_set_plf (s, GF_PLF_1, true);
590 /* Mark statement S to be instrumented. */
591 static void
592 chkp_unmark_stmt (gimple *s)
594 gimple_set_plf (s, GF_PLF_1, false);
597 /* Return 1 if statement S should not be instrumented. */
598 static bool
599 chkp_marked_stmt_p (gimple *s)
601 return gimple_plf (s, GF_PLF_1);
604 /* Get var to be used for bound temps. */
605 static tree
606 chkp_get_tmp_var (void)
608 if (!tmp_var)
609 tmp_var = create_tmp_reg (pointer_bounds_type_node, CHKP_BOUND_TMP_NAME);
611 return tmp_var;
614 /* Get SSA_NAME to be used as temp. */
615 static tree
616 chkp_get_tmp_reg (gimple *stmt)
618 if (in_chkp_pass)
619 return make_ssa_name (chkp_get_tmp_var (), stmt);
621 return make_temp_ssa_name (pointer_bounds_type_node, stmt,
622 CHKP_BOUND_TMP_NAME);
625 /* Get var to be used for size temps. */
626 static tree
627 chkp_get_size_tmp_var (void)
629 if (!size_tmp_var)
630 size_tmp_var = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
632 return size_tmp_var;
635 /* Register bounds BND for address of OBJ. */
636 static void
637 chkp_register_addr_bounds (tree obj, tree bnd)
639 if (bnd == incomplete_bounds)
640 return;
642 chkp_reg_addr_bounds->put (obj, bnd);
644 if (dump_file && (dump_flags & TDF_DETAILS))
646 fprintf (dump_file, "Regsitered bound ");
647 print_generic_expr (dump_file, bnd, 0);
648 fprintf (dump_file, " for address of ");
649 print_generic_expr (dump_file, obj, 0);
650 fprintf (dump_file, "\n");
654 /* Return bounds registered for address of OBJ. */
655 static tree
656 chkp_get_registered_addr_bounds (tree obj)
658 tree *slot = chkp_reg_addr_bounds->get (obj);
659 return slot ? *slot : NULL_TREE;
662 /* Mark BOUNDS as completed. */
663 static void
664 chkp_mark_completed_bounds (tree bounds)
666 chkp_completed_bounds_set->add (bounds);
668 if (dump_file && (dump_flags & TDF_DETAILS))
670 fprintf (dump_file, "Marked bounds ");
671 print_generic_expr (dump_file, bounds, 0);
672 fprintf (dump_file, " as completed\n");
676 /* Return 1 if BOUNDS were marked as completed and 0 otherwise. */
677 static bool
678 chkp_completed_bounds (tree bounds)
680 return chkp_completed_bounds_set->contains (bounds);
683 /* Clear comleted bound marks. */
684 static void
685 chkp_erase_completed_bounds (void)
687 delete chkp_completed_bounds_set;
688 chkp_completed_bounds_set = new hash_set<tree>;
691 /* Mark BOUNDS associated with PTR as incomplete. */
692 static void
693 chkp_register_incomplete_bounds (tree bounds, tree ptr)
695 chkp_incomplete_bounds_map->put (bounds, ptr);
697 if (dump_file && (dump_flags & TDF_DETAILS))
699 fprintf (dump_file, "Regsitered incomplete bounds ");
700 print_generic_expr (dump_file, bounds, 0);
701 fprintf (dump_file, " for ");
702 print_generic_expr (dump_file, ptr, 0);
703 fprintf (dump_file, "\n");
707 /* Return 1 if BOUNDS are incomplete and 0 otherwise. */
708 static bool
709 chkp_incomplete_bounds (tree bounds)
711 if (bounds == incomplete_bounds)
712 return true;
714 if (chkp_completed_bounds (bounds))
715 return false;
717 return chkp_incomplete_bounds_map->get (bounds) != NULL;
720 /* Clear incomleted bound marks. */
721 static void
722 chkp_erase_incomplete_bounds (void)
724 delete chkp_incomplete_bounds_map;
725 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
728 /* Build and return bndmk call which creates bounds for structure
729 pointed by PTR. Structure should have complete type. */
730 tree
731 chkp_make_bounds_for_struct_addr (tree ptr)
733 tree type = TREE_TYPE (ptr);
734 tree size;
736 gcc_assert (POINTER_TYPE_P (type));
738 size = TYPE_SIZE (TREE_TYPE (type));
740 gcc_assert (size);
742 return build_call_nary (pointer_bounds_type_node,
743 build_fold_addr_expr (chkp_bndmk_fndecl),
744 2, ptr, size);
747 /* Traversal function for chkp_may_finish_incomplete_bounds.
748 Set RES to 0 if at least one argument of phi statement
749 defining bounds (passed in KEY arg) is unknown.
750 Traversal stops when first unknown phi argument is found. */
751 bool
752 chkp_may_complete_phi_bounds (tree const &bounds, tree *slot ATTRIBUTE_UNUSED,
753 bool *res)
755 gimple *phi;
756 unsigned i;
758 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
760 phi = SSA_NAME_DEF_STMT (bounds);
762 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
764 for (i = 0; i < gimple_phi_num_args (phi); i++)
766 tree phi_arg = gimple_phi_arg_def (phi, i);
767 if (!phi_arg)
769 *res = false;
770 /* Do not need to traverse further. */
771 return false;
775 return true;
778 /* Return 1 if all phi nodes created for bounds have their
779 arguments computed. */
780 static bool
781 chkp_may_finish_incomplete_bounds (void)
783 bool res = true;
785 chkp_incomplete_bounds_map
786 ->traverse<bool *, chkp_may_complete_phi_bounds> (&res);
788 return res;
791 /* Helper function for chkp_finish_incomplete_bounds.
792 Recompute args for bounds phi node. */
793 bool
794 chkp_recompute_phi_bounds (tree const &bounds, tree *slot,
795 void *res ATTRIBUTE_UNUSED)
797 tree ptr = *slot;
798 gphi *bounds_phi;
799 gphi *ptr_phi;
800 unsigned i;
802 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
803 gcc_assert (TREE_CODE (ptr) == SSA_NAME);
805 bounds_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (bounds));
806 ptr_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (ptr));
808 for (i = 0; i < gimple_phi_num_args (bounds_phi); i++)
810 tree ptr_arg = gimple_phi_arg_def (ptr_phi, i);
811 tree bound_arg = chkp_find_bounds (ptr_arg, NULL);
813 add_phi_arg (bounds_phi, bound_arg,
814 gimple_phi_arg_edge (ptr_phi, i),
815 UNKNOWN_LOCATION);
818 return true;
821 /* Mark BOUNDS as invalid. */
822 static void
823 chkp_mark_invalid_bounds (tree bounds)
825 chkp_invalid_bounds->add (bounds);
827 if (dump_file && (dump_flags & TDF_DETAILS))
829 fprintf (dump_file, "Marked bounds ");
830 print_generic_expr (dump_file, bounds, 0);
831 fprintf (dump_file, " as invalid\n");
835 /* Return 1 if BOUNDS were marked as invalid and 0 otherwise. */
836 static bool
837 chkp_valid_bounds (tree bounds)
839 if (bounds == zero_bounds || bounds == none_bounds)
840 return false;
842 return !chkp_invalid_bounds->contains (bounds);
845 /* Helper function for chkp_finish_incomplete_bounds.
846 Check all arguments of phi nodes trying to find
847 valid completed bounds. If there is at least one
848 such arg then bounds produced by phi node are marked
849 as valid completed bounds and all phi args are
850 recomputed. */
851 bool
852 chkp_find_valid_phi_bounds (tree const &bounds, tree *slot, bool *res)
854 gimple *phi;
855 unsigned i;
857 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
859 if (chkp_completed_bounds (bounds))
860 return true;
862 phi = SSA_NAME_DEF_STMT (bounds);
864 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
866 for (i = 0; i < gimple_phi_num_args (phi); i++)
868 tree phi_arg = gimple_phi_arg_def (phi, i);
870 gcc_assert (phi_arg);
872 if (chkp_valid_bounds (phi_arg) && !chkp_incomplete_bounds (phi_arg))
874 *res = true;
875 chkp_mark_completed_bounds (bounds);
876 chkp_recompute_phi_bounds (bounds, slot, NULL);
877 return true;
881 return true;
884 /* Helper function for chkp_finish_incomplete_bounds.
885 Marks all incompleted bounds as invalid. */
886 bool
887 chkp_mark_invalid_bounds_walker (tree const &bounds,
888 tree *slot ATTRIBUTE_UNUSED,
889 void *res ATTRIBUTE_UNUSED)
891 if (!chkp_completed_bounds (bounds))
893 chkp_mark_invalid_bounds (bounds);
894 chkp_mark_completed_bounds (bounds);
896 return true;
899 /* When all bound phi nodes have all their args computed
900 we have enough info to find valid bounds. We iterate
901 through all incompleted bounds searching for valid
902 bounds. Found valid bounds are marked as completed
903 and all remaining incompleted bounds are recomputed.
904 Process continues until no new valid bounds may be
905 found. All remained incompleted bounds are marked as
906 invalid (i.e. have no valid source of bounds). */
907 static void
908 chkp_finish_incomplete_bounds (void)
910 bool found_valid;
912 while (found_valid)
914 found_valid = false;
916 chkp_incomplete_bounds_map->
917 traverse<bool *, chkp_find_valid_phi_bounds> (&found_valid);
919 if (found_valid)
920 chkp_incomplete_bounds_map->
921 traverse<void *, chkp_recompute_phi_bounds> (NULL);
924 chkp_incomplete_bounds_map->
925 traverse<void *, chkp_mark_invalid_bounds_walker> (NULL);
926 chkp_incomplete_bounds_map->
927 traverse<void *, chkp_recompute_phi_bounds> (NULL);
929 chkp_erase_completed_bounds ();
930 chkp_erase_incomplete_bounds ();
933 /* Return 1 if type TYPE is a pointer type or a
934 structure having a pointer type as one of its fields.
935 Otherwise return 0. */
936 bool
937 chkp_type_has_pointer (const_tree type)
939 bool res = false;
941 if (BOUNDED_TYPE_P (type))
942 res = true;
943 else if (RECORD_OR_UNION_TYPE_P (type))
945 tree field;
947 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
948 if (TREE_CODE (field) == FIELD_DECL)
949 res = res || chkp_type_has_pointer (TREE_TYPE (field));
951 else if (TREE_CODE (type) == ARRAY_TYPE)
952 res = chkp_type_has_pointer (TREE_TYPE (type));
954 return res;
957 unsigned
958 chkp_type_bounds_count (const_tree type)
960 unsigned res = 0;
962 if (!type)
963 res = 0;
964 else if (BOUNDED_TYPE_P (type))
965 res = 1;
966 else if (RECORD_OR_UNION_TYPE_P (type))
968 bitmap have_bound;
970 bitmap_obstack_initialize (NULL);
971 have_bound = BITMAP_ALLOC (NULL);
972 chkp_find_bound_slots (type, have_bound);
973 res = bitmap_count_bits (have_bound);
974 BITMAP_FREE (have_bound);
975 bitmap_obstack_release (NULL);
978 return res;
981 /* Get bounds associated with NODE via
982 chkp_set_bounds call. */
983 tree
984 chkp_get_bounds (tree node)
986 tree *slot;
988 if (!chkp_bounds_map)
989 return NULL_TREE;
991 slot = chkp_bounds_map->get (node);
992 return slot ? *slot : NULL_TREE;
995 /* Associate bounds VAL with NODE. */
996 void
997 chkp_set_bounds (tree node, tree val)
999 if (!chkp_bounds_map)
1000 chkp_bounds_map = new hash_map<tree, tree>;
1002 chkp_bounds_map->put (node, val);
1005 /* Check if statically initialized variable VAR require
1006 static bounds initialization. If VAR is added into
1007 bounds initlization list then 1 is returned. Otherwise
1008 return 0. */
1009 extern bool
1010 chkp_register_var_initializer (tree var)
1012 if (!flag_check_pointer_bounds
1013 || DECL_INITIAL (var) == error_mark_node)
1014 return false;
1016 gcc_assert (TREE_CODE (var) == VAR_DECL);
1017 gcc_assert (DECL_INITIAL (var));
1019 if (TREE_STATIC (var)
1020 && chkp_type_has_pointer (TREE_TYPE (var)))
1022 varpool_node::get_create (var)->need_bounds_init = 1;
1023 return true;
1026 return false;
1029 /* Helper function for chkp_finish_file.
1031 Add new modification statement (RHS is assigned to LHS)
1032 into list of static initializer statementes (passed in ARG).
1033 If statements list becomes too big, emit checker constructor
1034 and start the new one. */
1035 static void
1036 chkp_add_modification_to_stmt_list (tree lhs,
1037 tree rhs,
1038 void *arg)
1040 struct chkp_ctor_stmt_list *stmts = (struct chkp_ctor_stmt_list *)arg;
1041 tree modify;
1043 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
1044 rhs = build1 (CONVERT_EXPR, TREE_TYPE (lhs), rhs);
1046 modify = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
1047 append_to_statement_list (modify, &stmts->stmts);
1049 stmts->avail--;
1052 /* Build and return ADDR_EXPR for specified object OBJ. */
1053 static tree
1054 chkp_build_addr_expr (tree obj)
1056 return TREE_CODE (obj) == TARGET_MEM_REF
1057 ? tree_mem_ref_addr (ptr_type_node, obj)
1058 : build_fold_addr_expr (obj);
1061 /* Helper function for chkp_finish_file.
1062 Initialize bound variable BND_VAR with bounds of variable
1063 VAR to statements list STMTS. If statements list becomes
1064 too big, emit checker constructor and start the new one. */
1065 static void
1066 chkp_output_static_bounds (tree bnd_var, tree var,
1067 struct chkp_ctor_stmt_list *stmts)
1069 tree lb, ub, size;
1071 if (TREE_CODE (var) == STRING_CST)
1073 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1074 size = build_int_cst (size_type_node, TREE_STRING_LENGTH (var) - 1);
1076 else if (DECL_SIZE (var)
1077 && !chkp_variable_size_type (TREE_TYPE (var)))
1079 /* Compute bounds using statically known size. */
1080 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1081 size = size_binop (MINUS_EXPR, DECL_SIZE_UNIT (var), size_one_node);
1083 else
1085 /* Compute bounds using dynamic size. */
1086 tree call;
1088 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1089 call = build1 (ADDR_EXPR,
1090 build_pointer_type (TREE_TYPE (chkp_sizeof_fndecl)),
1091 chkp_sizeof_fndecl);
1092 size = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_sizeof_fndecl)),
1093 call, 1, var);
1095 if (flag_chkp_zero_dynamic_size_as_infinite)
1097 tree max_size, cond;
1099 max_size = build2 (MINUS_EXPR, size_type_node, size_zero_node, lb);
1100 cond = build2 (NE_EXPR, boolean_type_node, size, size_zero_node);
1101 size = build3 (COND_EXPR, size_type_node, cond, size, max_size);
1104 size = size_binop (MINUS_EXPR, size, size_one_node);
1107 ub = size_binop (PLUS_EXPR, lb, size);
1108 stmts->avail -= targetm.chkp_initialize_bounds (bnd_var, lb, ub,
1109 &stmts->stmts);
1110 if (stmts->avail <= 0)
1112 cgraph_build_static_cdtor ('B', stmts->stmts,
1113 MAX_RESERVED_INIT_PRIORITY + 2);
1114 stmts->avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
1115 stmts->stmts = NULL;
1119 /* Return entry block to be used for checker initilization code.
1120 Create new block if required. */
1121 static basic_block
1122 chkp_get_entry_block (void)
1124 if (!entry_block)
1125 entry_block
1126 = split_block_after_labels (ENTRY_BLOCK_PTR_FOR_FN (cfun))->dest;
1128 return entry_block;
1131 /* Return a bounds var to be used for pointer var PTR_VAR. */
1132 static tree
1133 chkp_get_bounds_var (tree ptr_var)
1135 tree bnd_var;
1136 tree *slot;
1138 slot = chkp_bound_vars->get (ptr_var);
1139 if (slot)
1140 bnd_var = *slot;
1141 else
1143 bnd_var = create_tmp_reg (pointer_bounds_type_node,
1144 CHKP_BOUND_TMP_NAME);
1145 chkp_bound_vars->put (ptr_var, bnd_var);
1148 return bnd_var;
1151 /* If BND is an abnormal bounds copy, return a copied value.
1152 Otherwise return BND. */
1153 static tree
1154 chkp_get_orginal_bounds_for_abnormal_copy (tree bnd)
1156 if (bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
1158 gimple *bnd_def = SSA_NAME_DEF_STMT (bnd);
1159 gcc_checking_assert (gimple_code (bnd_def) == GIMPLE_ASSIGN);
1160 bnd = gimple_assign_rhs1 (bnd_def);
1163 return bnd;
1166 /* Register bounds BND for object PTR in global bounds table.
1167 A copy of bounds may be created for abnormal ssa names.
1168 Returns bounds to use for PTR. */
1169 static tree
1170 chkp_maybe_copy_and_register_bounds (tree ptr, tree bnd)
1172 bool abnormal_ptr;
1174 if (!chkp_reg_bounds)
1175 return bnd;
1177 /* Do nothing if bounds are incomplete_bounds
1178 because it means bounds will be recomputed. */
1179 if (bnd == incomplete_bounds)
1180 return bnd;
1182 abnormal_ptr = (TREE_CODE (ptr) == SSA_NAME
1183 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1184 && gimple_code (SSA_NAME_DEF_STMT (ptr)) != GIMPLE_PHI);
1186 /* A single bounds value may be reused multiple times for
1187 different pointer values. It may cause coalescing issues
1188 for abnormal SSA names. To avoid it we create a bounds
1189 copy in case it is computed for abnormal SSA name.
1191 We also cannot reuse such created copies for other pointers */
1192 if (abnormal_ptr
1193 || bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
1195 tree bnd_var = NULL_TREE;
1197 if (abnormal_ptr)
1199 if (SSA_NAME_VAR (ptr))
1200 bnd_var = chkp_get_bounds_var (SSA_NAME_VAR (ptr));
1202 else
1203 bnd_var = chkp_get_tmp_var ();
1205 /* For abnormal copies we may just find original
1206 bounds and use them. */
1207 if (!abnormal_ptr && !SSA_NAME_IS_DEFAULT_DEF (bnd))
1208 bnd = chkp_get_orginal_bounds_for_abnormal_copy (bnd);
1209 /* For undefined values we usually use none bounds
1210 value but in case of abnormal edge it may cause
1211 coalescing failures. Use default definition of
1212 bounds variable instead to avoid it. */
1213 else if (SSA_NAME_IS_DEFAULT_DEF (ptr)
1214 && TREE_CODE (SSA_NAME_VAR (ptr)) != PARM_DECL)
1216 bnd = get_or_create_ssa_default_def (cfun, bnd_var);
1218 if (dump_file && (dump_flags & TDF_DETAILS))
1220 fprintf (dump_file, "Using default def bounds ");
1221 print_generic_expr (dump_file, bnd, 0);
1222 fprintf (dump_file, " for abnormal default def SSA name ");
1223 print_generic_expr (dump_file, ptr, 0);
1224 fprintf (dump_file, "\n");
1227 else
1229 tree copy;
1230 gimple *def = SSA_NAME_DEF_STMT (ptr);
1231 gimple *assign;
1232 gimple_stmt_iterator gsi;
1234 if (bnd_var)
1235 copy = make_ssa_name (bnd_var);
1236 else
1237 copy = make_temp_ssa_name (pointer_bounds_type_node,
1238 NULL,
1239 CHKP_BOUND_TMP_NAME);
1240 bnd = chkp_get_orginal_bounds_for_abnormal_copy (bnd);
1241 assign = gimple_build_assign (copy, bnd);
1243 if (dump_file && (dump_flags & TDF_DETAILS))
1245 fprintf (dump_file, "Creating a copy of bounds ");
1246 print_generic_expr (dump_file, bnd, 0);
1247 fprintf (dump_file, " for abnormal SSA name ");
1248 print_generic_expr (dump_file, ptr, 0);
1249 fprintf (dump_file, "\n");
1252 if (gimple_code (def) == GIMPLE_NOP)
1254 gsi = gsi_last_bb (chkp_get_entry_block ());
1255 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
1256 gsi_insert_before (&gsi, assign, GSI_CONTINUE_LINKING);
1257 else
1258 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1260 else
1262 gimple *bnd_def = SSA_NAME_DEF_STMT (bnd);
1263 /* Sometimes (e.g. when we load a pointer from a
1264 memory) bounds are produced later than a pointer.
1265 We need to insert bounds copy appropriately. */
1266 if (gimple_code (bnd_def) != GIMPLE_NOP
1267 && stmt_dominates_stmt_p (def, bnd_def))
1268 gsi = gsi_for_stmt (bnd_def);
1269 else
1270 gsi = gsi_for_stmt (def);
1271 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1274 bnd = copy;
1277 if (abnormal_ptr)
1278 bitmap_set_bit (chkp_abnormal_copies, SSA_NAME_VERSION (bnd));
1281 chkp_reg_bounds->put (ptr, bnd);
1283 if (dump_file && (dump_flags & TDF_DETAILS))
1285 fprintf (dump_file, "Regsitered bound ");
1286 print_generic_expr (dump_file, bnd, 0);
1287 fprintf (dump_file, " for pointer ");
1288 print_generic_expr (dump_file, ptr, 0);
1289 fprintf (dump_file, "\n");
1292 return bnd;
1295 /* Get bounds registered for object PTR in global bounds table. */
1296 static tree
1297 chkp_get_registered_bounds (tree ptr)
1299 tree *slot;
1301 if (!chkp_reg_bounds)
1302 return NULL_TREE;
1304 slot = chkp_reg_bounds->get (ptr);
1305 return slot ? *slot : NULL_TREE;
1308 /* Add bound retvals to return statement pointed by GSI. */
1310 static void
1311 chkp_add_bounds_to_ret_stmt (gimple_stmt_iterator *gsi)
1313 greturn *ret = as_a <greturn *> (gsi_stmt (*gsi));
1314 tree retval = gimple_return_retval (ret);
1315 tree ret_decl = DECL_RESULT (cfun->decl);
1316 tree bounds;
1318 if (!retval)
1319 return;
1321 if (BOUNDED_P (ret_decl))
1323 bounds = chkp_find_bounds (retval, gsi);
1324 bounds = chkp_maybe_copy_and_register_bounds (ret_decl, bounds);
1325 gimple_return_set_retbnd (ret, bounds);
1328 update_stmt (ret);
1331 /* Force OP to be suitable for using as an argument for call.
1332 New statements (if any) go to SEQ. */
1333 static tree
1334 chkp_force_gimple_call_op (tree op, gimple_seq *seq)
1336 gimple_seq stmts;
1337 gimple_stmt_iterator si;
1339 op = force_gimple_operand (unshare_expr (op), &stmts, true, NULL_TREE);
1341 for (si = gsi_start (stmts); !gsi_end_p (si); gsi_next (&si))
1342 chkp_mark_stmt (gsi_stmt (si));
1344 gimple_seq_add_seq (seq, stmts);
1346 return op;
1349 /* Generate lower bound check for memory access by ADDR.
1350 Check is inserted before the position pointed by ITER.
1351 DIRFLAG indicates whether memory access is load or store. */
1352 static void
1353 chkp_check_lower (tree addr, tree bounds,
1354 gimple_stmt_iterator iter,
1355 location_t location,
1356 tree dirflag)
1358 gimple_seq seq;
1359 gimple *check;
1360 tree node;
1362 if (!chkp_function_instrumented_p (current_function_decl)
1363 && bounds == chkp_get_zero_bounds ())
1364 return;
1366 if (dirflag == integer_zero_node
1367 && !flag_chkp_check_read)
1368 return;
1370 if (dirflag == integer_one_node
1371 && !flag_chkp_check_write)
1372 return;
1374 seq = NULL;
1376 node = chkp_force_gimple_call_op (addr, &seq);
1378 check = gimple_build_call (chkp_checkl_fndecl, 2, node, bounds);
1379 chkp_mark_stmt (check);
1380 gimple_call_set_with_bounds (check, true);
1381 gimple_set_location (check, location);
1382 gimple_seq_add_stmt (&seq, check);
1384 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1386 if (dump_file && (dump_flags & TDF_DETAILS))
1388 gimple *before = gsi_stmt (iter);
1389 fprintf (dump_file, "Generated lower bound check for statement ");
1390 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1391 fprintf (dump_file, " ");
1392 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1396 /* Generate upper bound check for memory access by ADDR.
1397 Check is inserted before the position pointed by ITER.
1398 DIRFLAG indicates whether memory access is load or store. */
1399 static void
1400 chkp_check_upper (tree addr, tree bounds,
1401 gimple_stmt_iterator iter,
1402 location_t location,
1403 tree dirflag)
1405 gimple_seq seq;
1406 gimple *check;
1407 tree node;
1409 if (!chkp_function_instrumented_p (current_function_decl)
1410 && bounds == chkp_get_zero_bounds ())
1411 return;
1413 if (dirflag == integer_zero_node
1414 && !flag_chkp_check_read)
1415 return;
1417 if (dirflag == integer_one_node
1418 && !flag_chkp_check_write)
1419 return;
1421 seq = NULL;
1423 node = chkp_force_gimple_call_op (addr, &seq);
1425 check = gimple_build_call (chkp_checku_fndecl, 2, node, bounds);
1426 chkp_mark_stmt (check);
1427 gimple_call_set_with_bounds (check, true);
1428 gimple_set_location (check, location);
1429 gimple_seq_add_stmt (&seq, check);
1431 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1433 if (dump_file && (dump_flags & TDF_DETAILS))
1435 gimple *before = gsi_stmt (iter);
1436 fprintf (dump_file, "Generated upper bound check for statement ");
1437 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1438 fprintf (dump_file, " ");
1439 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1443 /* Generate lower and upper bound checks for memory access
1444 to memory slot [FIRST, LAST] againsr BOUNDS. Checks
1445 are inserted before the position pointed by ITER.
1446 DIRFLAG indicates whether memory access is load or store. */
1447 void
1448 chkp_check_mem_access (tree first, tree last, tree bounds,
1449 gimple_stmt_iterator iter,
1450 location_t location,
1451 tree dirflag)
1453 chkp_check_lower (first, bounds, iter, location, dirflag);
1454 chkp_check_upper (last, bounds, iter, location, dirflag);
1457 /* Replace call to _bnd_chk_* pointed by GSI with
1458 bndcu and bndcl calls. DIRFLAG determines whether
1459 check is for read or write. */
1461 void
1462 chkp_replace_address_check_builtin (gimple_stmt_iterator *gsi,
1463 tree dirflag)
1465 gimple_stmt_iterator call_iter = *gsi;
1466 gimple *call = gsi_stmt (*gsi);
1467 tree fndecl = gimple_call_fndecl (call);
1468 tree addr = gimple_call_arg (call, 0);
1469 tree bounds = chkp_find_bounds (addr, gsi);
1471 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1472 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1473 chkp_check_lower (addr, bounds, *gsi, gimple_location (call), dirflag);
1475 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS)
1476 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1478 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1480 tree size = gimple_call_arg (call, 1);
1481 addr = fold_build_pointer_plus (addr, size);
1482 addr = fold_build_pointer_plus_hwi (addr, -1);
1483 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1486 gsi_remove (&call_iter, true);
1489 /* Replace call to _bnd_get_ptr_* pointed by GSI with
1490 corresponding bounds extract call. */
1492 void
1493 chkp_replace_extract_builtin (gimple_stmt_iterator *gsi)
1495 gimple *call = gsi_stmt (*gsi);
1496 tree fndecl = gimple_call_fndecl (call);
1497 tree addr = gimple_call_arg (call, 0);
1498 tree bounds = chkp_find_bounds (addr, gsi);
1499 gimple *extract;
1501 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND)
1502 fndecl = chkp_extract_lower_fndecl;
1503 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND)
1504 fndecl = chkp_extract_upper_fndecl;
1505 else
1506 gcc_unreachable ();
1508 extract = gimple_build_call (fndecl, 1, bounds);
1509 gimple_call_set_lhs (extract, gimple_call_lhs (call));
1510 chkp_mark_stmt (extract);
1512 gsi_replace (gsi, extract, false);
1515 /* Return COMPONENT_REF accessing FIELD in OBJ. */
1516 static tree
1517 chkp_build_component_ref (tree obj, tree field)
1519 tree res;
1521 /* If object is TMR then we do not use component_ref but
1522 add offset instead. We need it to be able to get addr
1523 of the reasult later. */
1524 if (TREE_CODE (obj) == TARGET_MEM_REF)
1526 tree offs = TMR_OFFSET (obj);
1527 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1528 offs, DECL_FIELD_OFFSET (field));
1530 gcc_assert (offs);
1532 res = copy_node (obj);
1533 TREE_TYPE (res) = TREE_TYPE (field);
1534 TMR_OFFSET (res) = offs;
1536 else
1537 res = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL_TREE);
1539 return res;
1542 /* Return ARRAY_REF for array ARR and index IDX with
1543 specified element type ETYPE and element size ESIZE. */
1544 static tree
1545 chkp_build_array_ref (tree arr, tree etype, tree esize,
1546 unsigned HOST_WIDE_INT idx)
1548 tree index = build_int_cst (size_type_node, idx);
1549 tree res;
1551 /* If object is TMR then we do not use array_ref but
1552 add offset instead. We need it to be able to get addr
1553 of the reasult later. */
1554 if (TREE_CODE (arr) == TARGET_MEM_REF)
1556 tree offs = TMR_OFFSET (arr);
1558 esize = fold_binary_to_constant (MULT_EXPR, TREE_TYPE (esize),
1559 esize, index);
1560 gcc_assert(esize);
1562 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1563 offs, esize);
1564 gcc_assert (offs);
1566 res = copy_node (arr);
1567 TREE_TYPE (res) = etype;
1568 TMR_OFFSET (res) = offs;
1570 else
1571 res = build4 (ARRAY_REF, etype, arr, index, NULL_TREE, NULL_TREE);
1573 return res;
1576 /* Helper function for chkp_add_bounds_to_call_stmt.
1577 Fill ALL_BOUNDS output array with created bounds.
1579 OFFS is used for recursive calls and holds basic
1580 offset of TYPE in outer structure in bits.
1582 ITER points a position where bounds are searched.
1584 ALL_BOUNDS[i] is filled with elem bounds if there
1585 is a field in TYPE which has pointer type and offset
1586 equal to i * POINTER_SIZE in bits. */
1587 static void
1588 chkp_find_bounds_for_elem (tree elem, tree *all_bounds,
1589 HOST_WIDE_INT offs,
1590 gimple_stmt_iterator *iter)
1592 tree type = TREE_TYPE (elem);
1594 if (BOUNDED_TYPE_P (type))
1596 if (!all_bounds[offs / POINTER_SIZE])
1598 tree temp = make_temp_ssa_name (type, NULL, "");
1599 gimple *assign = gimple_build_assign (temp, elem);
1600 gimple_stmt_iterator gsi;
1602 gsi_insert_before (iter, assign, GSI_SAME_STMT);
1603 gsi = gsi_for_stmt (assign);
1605 all_bounds[offs / POINTER_SIZE] = chkp_find_bounds (temp, &gsi);
1608 else if (RECORD_OR_UNION_TYPE_P (type))
1610 tree field;
1612 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1613 if (TREE_CODE (field) == FIELD_DECL)
1615 tree base = unshare_expr (elem);
1616 tree field_ref = chkp_build_component_ref (base, field);
1617 HOST_WIDE_INT field_offs
1618 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1619 if (DECL_FIELD_OFFSET (field))
1620 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1622 chkp_find_bounds_for_elem (field_ref, all_bounds,
1623 offs + field_offs, iter);
1626 else if (TREE_CODE (type) == ARRAY_TYPE)
1628 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1629 tree etype = TREE_TYPE (type);
1630 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1631 unsigned HOST_WIDE_INT cur;
1633 if (!maxval || integer_minus_onep (maxval))
1634 return;
1636 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1638 tree base = unshare_expr (elem);
1639 tree arr_elem = chkp_build_array_ref (base, etype,
1640 TYPE_SIZE (etype),
1641 cur);
1642 chkp_find_bounds_for_elem (arr_elem, all_bounds, offs + cur * esize,
1643 iter);
1648 /* Fill HAVE_BOUND output bitmap with information about
1649 bounds requred for object of type TYPE.
1651 OFFS is used for recursive calls and holds basic
1652 offset of TYPE in outer structure in bits.
1654 HAVE_BOUND[i] is set to 1 if there is a field
1655 in TYPE which has pointer type and offset
1656 equal to i * POINTER_SIZE - OFFS in bits. */
1657 void
1658 chkp_find_bound_slots_1 (const_tree type, bitmap have_bound,
1659 HOST_WIDE_INT offs)
1661 if (BOUNDED_TYPE_P (type))
1662 bitmap_set_bit (have_bound, offs / POINTER_SIZE);
1663 else if (RECORD_OR_UNION_TYPE_P (type))
1665 tree field;
1667 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1668 if (TREE_CODE (field) == FIELD_DECL)
1670 HOST_WIDE_INT field_offs = 0;
1671 if (DECL_FIELD_BIT_OFFSET (field))
1672 field_offs += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1673 if (DECL_FIELD_OFFSET (field))
1674 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1675 chkp_find_bound_slots_1 (TREE_TYPE (field), have_bound,
1676 offs + field_offs);
1679 else if (TREE_CODE (type) == ARRAY_TYPE)
1681 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1682 tree etype = TREE_TYPE (type);
1683 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1684 unsigned HOST_WIDE_INT cur;
1686 if (!maxval
1687 || TREE_CODE (maxval) != INTEGER_CST
1688 || integer_minus_onep (maxval))
1689 return;
1691 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1692 chkp_find_bound_slots_1 (etype, have_bound, offs + cur * esize);
1696 /* Fill bitmap RES with information about bounds for
1697 type TYPE. See chkp_find_bound_slots_1 for more
1698 details. */
1699 void
1700 chkp_find_bound_slots (const_tree type, bitmap res)
1702 bitmap_clear (res);
1703 chkp_find_bound_slots_1 (type, res, 0);
1706 /* Return 1 if call to FNDECL should be instrumented
1707 and 0 otherwise. */
1709 static bool
1710 chkp_instrument_normal_builtin (tree fndecl)
1712 switch (DECL_FUNCTION_CODE (fndecl))
1714 case BUILT_IN_STRLEN:
1715 case BUILT_IN_STRCPY:
1716 case BUILT_IN_STRNCPY:
1717 case BUILT_IN_STPCPY:
1718 case BUILT_IN_STPNCPY:
1719 case BUILT_IN_STRCAT:
1720 case BUILT_IN_STRNCAT:
1721 case BUILT_IN_MEMCPY:
1722 case BUILT_IN_MEMPCPY:
1723 case BUILT_IN_MEMSET:
1724 case BUILT_IN_MEMMOVE:
1725 case BUILT_IN_BZERO:
1726 case BUILT_IN_STRCMP:
1727 case BUILT_IN_STRNCMP:
1728 case BUILT_IN_BCMP:
1729 case BUILT_IN_MEMCMP:
1730 case BUILT_IN_MEMCPY_CHK:
1731 case BUILT_IN_MEMPCPY_CHK:
1732 case BUILT_IN_MEMMOVE_CHK:
1733 case BUILT_IN_MEMSET_CHK:
1734 case BUILT_IN_STRCPY_CHK:
1735 case BUILT_IN_STRNCPY_CHK:
1736 case BUILT_IN_STPCPY_CHK:
1737 case BUILT_IN_STPNCPY_CHK:
1738 case BUILT_IN_STRCAT_CHK:
1739 case BUILT_IN_STRNCAT_CHK:
1740 case BUILT_IN_MALLOC:
1741 case BUILT_IN_CALLOC:
1742 case BUILT_IN_REALLOC:
1743 return 1;
1745 default:
1746 return 0;
1750 /* Add bound arguments to call statement pointed by GSI.
1751 Also performs a replacement of user checker builtins calls
1752 with internal ones. */
1754 static void
1755 chkp_add_bounds_to_call_stmt (gimple_stmt_iterator *gsi)
1757 gcall *call = as_a <gcall *> (gsi_stmt (*gsi));
1758 unsigned arg_no = 0;
1759 tree fndecl = gimple_call_fndecl (call);
1760 tree fntype;
1761 tree first_formal_arg;
1762 tree arg;
1763 bool use_fntype = false;
1764 tree op;
1765 ssa_op_iter iter;
1766 gcall *new_call;
1768 /* Do nothing for internal functions. */
1769 if (gimple_call_internal_p (call))
1770 return;
1772 fntype = TREE_TYPE (TREE_TYPE (gimple_call_fn (call)));
1774 /* Do nothing if back-end builtin is called. */
1775 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
1776 return;
1778 /* Do nothing for some middle-end builtins. */
1779 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1780 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_OBJECT_SIZE)
1781 return;
1783 /* Do nothing for calls to not instrumentable functions. */
1784 if (fndecl && !chkp_instrumentable_p (fndecl))
1785 return;
1787 /* Ignore CHKP_INIT_PTR_BOUNDS, CHKP_NULL_PTR_BOUNDS
1788 and CHKP_COPY_PTR_BOUNDS. */
1789 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1790 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS
1791 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS
1792 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS
1793 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS))
1794 return;
1796 /* Check user builtins are replaced with checks. */
1797 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1798 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1799 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
1800 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS))
1802 chkp_replace_address_check_builtin (gsi, integer_minus_one_node);
1803 return;
1806 /* Check user builtins are replaced with bound extract. */
1807 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1808 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND
1809 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND))
1811 chkp_replace_extract_builtin (gsi);
1812 return;
1815 /* BUILT_IN_CHKP_NARROW_PTR_BOUNDS call is replaced with
1816 target narrow bounds call. */
1817 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1818 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
1820 tree arg = gimple_call_arg (call, 1);
1821 tree bounds = chkp_find_bounds (arg, gsi);
1823 gimple_call_set_fndecl (call, chkp_narrow_bounds_fndecl);
1824 gimple_call_set_arg (call, 1, bounds);
1825 update_stmt (call);
1827 return;
1830 /* BUILT_IN_CHKP_STORE_PTR_BOUNDS call is replaced with
1831 bndstx call. */
1832 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1833 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_STORE_PTR_BOUNDS)
1835 tree addr = gimple_call_arg (call, 0);
1836 tree ptr = gimple_call_arg (call, 1);
1837 tree bounds = chkp_find_bounds (ptr, gsi);
1838 gimple_stmt_iterator iter = gsi_for_stmt (call);
1840 chkp_build_bndstx (addr, ptr, bounds, gsi);
1841 gsi_remove (&iter, true);
1843 return;
1846 if (!flag_chkp_instrument_calls)
1847 return;
1849 /* We instrument only some subset of builtins. We also instrument
1850 builtin calls to be inlined. */
1851 if (fndecl
1852 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1853 && !chkp_instrument_normal_builtin (fndecl))
1855 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
1856 return;
1858 struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
1859 if (!clone
1860 || !gimple_has_body_p (clone->decl))
1861 return;
1864 /* If function decl is available then use it for
1865 formal arguments list. Otherwise use function type. */
1866 if (fndecl && DECL_ARGUMENTS (fndecl))
1867 first_formal_arg = DECL_ARGUMENTS (fndecl);
1868 else
1870 first_formal_arg = TYPE_ARG_TYPES (fntype);
1871 use_fntype = true;
1874 /* Fill vector of new call args. */
1875 vec<tree> new_args = vNULL;
1876 new_args.create (gimple_call_num_args (call));
1877 arg = first_formal_arg;
1878 for (arg_no = 0; arg_no < gimple_call_num_args (call); arg_no++)
1880 tree call_arg = gimple_call_arg (call, arg_no);
1881 tree type;
1883 /* Get arg type using formal argument description
1884 or actual argument type. */
1885 if (arg)
1886 if (use_fntype)
1887 if (TREE_VALUE (arg) != void_type_node)
1889 type = TREE_VALUE (arg);
1890 arg = TREE_CHAIN (arg);
1892 else
1893 type = TREE_TYPE (call_arg);
1894 else
1896 type = TREE_TYPE (arg);
1897 arg = TREE_CHAIN (arg);
1899 else
1900 type = TREE_TYPE (call_arg);
1902 new_args.safe_push (call_arg);
1904 if (BOUNDED_TYPE_P (type)
1905 || pass_by_reference (NULL, TYPE_MODE (type), type, true))
1906 new_args.safe_push (chkp_find_bounds (call_arg, gsi));
1907 else if (chkp_type_has_pointer (type))
1909 HOST_WIDE_INT max_bounds
1910 = TREE_INT_CST_LOW (TYPE_SIZE (type)) / POINTER_SIZE;
1911 tree *all_bounds = (tree *)xmalloc (sizeof (tree) * max_bounds);
1912 HOST_WIDE_INT bnd_no;
1914 memset (all_bounds, 0, sizeof (tree) * max_bounds);
1916 chkp_find_bounds_for_elem (call_arg, all_bounds, 0, gsi);
1918 for (bnd_no = 0; bnd_no < max_bounds; bnd_no++)
1919 if (all_bounds[bnd_no])
1920 new_args.safe_push (all_bounds[bnd_no]);
1922 free (all_bounds);
1926 if (new_args.length () == gimple_call_num_args (call))
1927 new_call = call;
1928 else
1930 new_call = gimple_build_call_vec (gimple_op (call, 1), new_args);
1931 gimple_call_set_lhs (new_call, gimple_call_lhs (call));
1932 gimple_call_copy_flags (new_call, call);
1933 gimple_call_set_chain (new_call, gimple_call_chain (call));
1935 new_args.release ();
1937 /* For direct calls fndecl is replaced with instrumented version. */
1938 if (fndecl)
1940 tree new_decl = chkp_maybe_create_clone (fndecl)->decl;
1941 gimple_call_set_fndecl (new_call, new_decl);
1942 gimple_call_set_fntype (new_call, TREE_TYPE (new_decl));
1944 /* For indirect call we should fix function pointer type if
1945 pass some bounds. */
1946 else if (new_call != call)
1948 tree type = gimple_call_fntype (call);
1949 type = chkp_copy_function_type_adding_bounds (type);
1950 gimple_call_set_fntype (new_call, type);
1953 /* replace old call statement with the new one. */
1954 if (call != new_call)
1956 FOR_EACH_SSA_TREE_OPERAND (op, call, iter, SSA_OP_ALL_DEFS)
1958 SSA_NAME_DEF_STMT (op) = new_call;
1960 gsi_replace (gsi, new_call, true);
1962 else
1963 update_stmt (new_call);
1965 gimple_call_set_with_bounds (new_call, true);
1968 /* Return constant static bounds var with specified bounds LB and UB.
1969 If such var does not exists then new var is created with specified NAME. */
1970 static tree
1971 chkp_make_static_const_bounds (HOST_WIDE_INT lb,
1972 HOST_WIDE_INT ub,
1973 const char *name)
1975 tree id = get_identifier (name);
1976 tree var;
1977 varpool_node *node;
1978 symtab_node *snode;
1980 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, id,
1981 pointer_bounds_type_node);
1982 TREE_STATIC (var) = 1;
1983 TREE_PUBLIC (var) = 1;
1985 /* With LTO we may have constant bounds already in varpool.
1986 Try to find it. */
1987 if ((snode = symtab_node::get_for_asmname (DECL_ASSEMBLER_NAME (var))))
1989 /* We don't allow this symbol usage for non bounds. */
1990 if (snode->type != SYMTAB_VARIABLE
1991 || !POINTER_BOUNDS_P (snode->decl))
1992 sorry ("-fcheck-pointer-bounds requires '%s' "
1993 "name for internal usage",
1994 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (var)));
1996 return snode->decl;
1999 TREE_USED (var) = 1;
2000 TREE_READONLY (var) = 1;
2001 TREE_ADDRESSABLE (var) = 0;
2002 DECL_ARTIFICIAL (var) = 1;
2003 DECL_READ_P (var) = 1;
2004 DECL_INITIAL (var) = targetm.chkp_make_bounds_constant (lb, ub);
2005 make_decl_one_only (var, DECL_ASSEMBLER_NAME (var));
2006 /* We may use this symbol during ctors generation in chkp_finish_file
2007 when all symbols are emitted. Force output to avoid undefined
2008 symbols in ctors. */
2009 node = varpool_node::get_create (var);
2010 node->force_output = 1;
2012 varpool_node::finalize_decl (var);
2014 return var;
2017 /* Generate code to make bounds with specified lower bound LB and SIZE.
2018 if AFTER is 1 then code is inserted after position pointed by ITER
2019 otherwise code is inserted before position pointed by ITER.
2020 If ITER is NULL then code is added to entry block. */
2021 static tree
2022 chkp_make_bounds (tree lb, tree size, gimple_stmt_iterator *iter, bool after)
2024 gimple_seq seq;
2025 gimple_stmt_iterator gsi;
2026 gimple *stmt;
2027 tree bounds;
2029 if (iter)
2030 gsi = *iter;
2031 else
2032 gsi = gsi_start_bb (chkp_get_entry_block ());
2034 seq = NULL;
2036 lb = chkp_force_gimple_call_op (lb, &seq);
2037 size = chkp_force_gimple_call_op (size, &seq);
2039 stmt = gimple_build_call (chkp_bndmk_fndecl, 2, lb, size);
2040 chkp_mark_stmt (stmt);
2042 bounds = chkp_get_tmp_reg (stmt);
2043 gimple_call_set_lhs (stmt, bounds);
2045 gimple_seq_add_stmt (&seq, stmt);
2047 if (iter && after)
2048 gsi_insert_seq_after (&gsi, seq, GSI_SAME_STMT);
2049 else
2050 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
2052 if (dump_file && (dump_flags & TDF_DETAILS))
2054 fprintf (dump_file, "Made bounds: ");
2055 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2056 if (iter)
2058 fprintf (dump_file, " inserted before statement: ");
2059 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0, TDF_VOPS|TDF_MEMSYMS);
2061 else
2062 fprintf (dump_file, " at function entry\n");
2065 /* update_stmt (stmt); */
2067 return bounds;
2070 /* Return var holding zero bounds. */
2071 tree
2072 chkp_get_zero_bounds_var (void)
2074 if (!chkp_zero_bounds_var)
2075 chkp_zero_bounds_var
2076 = chkp_make_static_const_bounds (0, -1,
2077 CHKP_ZERO_BOUNDS_VAR_NAME);
2078 return chkp_zero_bounds_var;
2081 /* Return var holding none bounds. */
2082 tree
2083 chkp_get_none_bounds_var (void)
2085 if (!chkp_none_bounds_var)
2086 chkp_none_bounds_var
2087 = chkp_make_static_const_bounds (-1, 0,
2088 CHKP_NONE_BOUNDS_VAR_NAME);
2089 return chkp_none_bounds_var;
2092 /* Return SSA_NAME used to represent zero bounds. */
2093 static tree
2094 chkp_get_zero_bounds (void)
2096 if (zero_bounds)
2097 return zero_bounds;
2099 if (dump_file && (dump_flags & TDF_DETAILS))
2100 fprintf (dump_file, "Creating zero bounds...");
2102 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2103 || flag_chkp_use_static_const_bounds > 0)
2105 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2106 gimple *stmt;
2108 zero_bounds = chkp_get_tmp_reg (NULL);
2109 stmt = gimple_build_assign (zero_bounds, chkp_get_zero_bounds_var ());
2110 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2112 else
2113 zero_bounds = chkp_make_bounds (integer_zero_node,
2114 integer_zero_node,
2115 NULL,
2116 false);
2118 return zero_bounds;
2121 /* Return SSA_NAME used to represent none bounds. */
2122 static tree
2123 chkp_get_none_bounds (void)
2125 if (none_bounds)
2126 return none_bounds;
2128 if (dump_file && (dump_flags & TDF_DETAILS))
2129 fprintf (dump_file, "Creating none bounds...");
2132 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2133 || flag_chkp_use_static_const_bounds > 0)
2135 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2136 gimple *stmt;
2138 none_bounds = chkp_get_tmp_reg (NULL);
2139 stmt = gimple_build_assign (none_bounds, chkp_get_none_bounds_var ());
2140 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2142 else
2143 none_bounds = chkp_make_bounds (integer_minus_one_node,
2144 build_int_cst (size_type_node, 2),
2145 NULL,
2146 false);
2148 return none_bounds;
2151 /* Return bounds to be used as a result of operation which
2152 should not create poiunter (e.g. MULT_EXPR). */
2153 static tree
2154 chkp_get_invalid_op_bounds (void)
2156 return chkp_get_zero_bounds ();
2159 /* Return bounds to be used for loads of non-pointer values. */
2160 static tree
2161 chkp_get_nonpointer_load_bounds (void)
2163 return chkp_get_zero_bounds ();
2166 /* Return 1 if may use bndret call to get bounds for pointer
2167 returned by CALL. */
2168 static bool
2169 chkp_call_returns_bounds_p (gcall *call)
2171 if (gimple_call_internal_p (call))
2172 return false;
2174 if (gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
2175 || chkp_gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW))
2176 return true;
2178 if (gimple_call_with_bounds_p (call))
2179 return true;
2181 tree fndecl = gimple_call_fndecl (call);
2183 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
2184 return false;
2186 if (fndecl && !chkp_instrumentable_p (fndecl))
2187 return false;
2189 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2191 if (chkp_instrument_normal_builtin (fndecl))
2192 return true;
2194 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
2195 return false;
2197 struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
2198 return (clone && gimple_has_body_p (clone->decl));
2201 return true;
2204 /* Build bounds returned by CALL. */
2205 static tree
2206 chkp_build_returned_bound (gcall *call)
2208 gimple_stmt_iterator gsi;
2209 tree bounds;
2210 gimple *stmt;
2211 tree fndecl = gimple_call_fndecl (call);
2212 unsigned int retflags;
2214 /* To avoid fixing alloca expands in targets we handle
2215 it separately. */
2216 if (fndecl
2217 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2218 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
2219 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2221 tree size = gimple_call_arg (call, 0);
2222 tree lb = gimple_call_lhs (call);
2223 gimple_stmt_iterator iter = gsi_for_stmt (call);
2224 bounds = chkp_make_bounds (lb, size, &iter, true);
2226 /* We know bounds returned by set_bounds builtin call. */
2227 else if (fndecl
2228 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2229 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS)
2231 tree lb = gimple_call_arg (call, 0);
2232 tree size = gimple_call_arg (call, 1);
2233 gimple_stmt_iterator iter = gsi_for_stmt (call);
2234 bounds = chkp_make_bounds (lb, size, &iter, true);
2236 /* Detect bounds initialization calls. */
2237 else if (fndecl
2238 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2239 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS)
2240 bounds = chkp_get_zero_bounds ();
2241 /* Detect bounds nullification calls. */
2242 else if (fndecl
2243 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2244 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS)
2245 bounds = chkp_get_none_bounds ();
2246 /* Detect bounds copy calls. */
2247 else if (fndecl
2248 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2249 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
2251 gimple_stmt_iterator iter = gsi_for_stmt (call);
2252 bounds = chkp_find_bounds (gimple_call_arg (call, 1), &iter);
2254 /* Do not use retbnd when returned bounds are equal to some
2255 of passed bounds. */
2256 else if (((retflags = gimple_call_return_flags (call)) & ERF_RETURNS_ARG)
2257 && (retflags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (call))
2259 gimple_stmt_iterator iter = gsi_for_stmt (call);
2260 unsigned int retarg = retflags & ERF_RETURN_ARG_MASK, argno;
2261 if (gimple_call_with_bounds_p (call))
2263 for (argno = 0; argno < gimple_call_num_args (call); argno++)
2264 if (!POINTER_BOUNDS_P (gimple_call_arg (call, argno)))
2266 if (retarg)
2267 retarg--;
2268 else
2269 break;
2272 else
2273 argno = retarg;
2275 bounds = chkp_find_bounds (gimple_call_arg (call, argno), &iter);
2277 else if (chkp_call_returns_bounds_p (call))
2279 gcc_assert (TREE_CODE (gimple_call_lhs (call)) == SSA_NAME);
2281 /* In general case build checker builtin call to
2282 obtain returned bounds. */
2283 stmt = gimple_build_call (chkp_ret_bnd_fndecl, 1,
2284 gimple_call_lhs (call));
2285 chkp_mark_stmt (stmt);
2287 gsi = gsi_for_stmt (call);
2288 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
2290 bounds = chkp_get_tmp_reg (stmt);
2291 gimple_call_set_lhs (stmt, bounds);
2293 update_stmt (stmt);
2295 else
2296 bounds = chkp_get_zero_bounds ();
2298 if (dump_file && (dump_flags & TDF_DETAILS))
2300 fprintf (dump_file, "Built returned bounds (");
2301 print_generic_expr (dump_file, bounds, 0);
2302 fprintf (dump_file, ") for call: ");
2303 print_gimple_stmt (dump_file, call, 0, TDF_VOPS|TDF_MEMSYMS);
2306 bounds = chkp_maybe_copy_and_register_bounds (gimple_call_lhs (call), bounds);
2308 return bounds;
2311 /* Return bounds used as returned by call
2312 which produced SSA name VAL. */
2313 gcall *
2314 chkp_retbnd_call_by_val (tree val)
2316 if (TREE_CODE (val) != SSA_NAME)
2317 return NULL;
2319 gcc_assert (gimple_code (SSA_NAME_DEF_STMT (val)) == GIMPLE_CALL);
2321 imm_use_iterator use_iter;
2322 use_operand_p use_p;
2323 FOR_EACH_IMM_USE_FAST (use_p, use_iter, val)
2324 if (gimple_code (USE_STMT (use_p)) == GIMPLE_CALL
2325 && gimple_call_fndecl (USE_STMT (use_p)) == chkp_ret_bnd_fndecl)
2326 return as_a <gcall *> (USE_STMT (use_p));
2328 return NULL;
2331 /* Check the next parameter for the given PARM is bounds
2332 and return it's default SSA_NAME (create if required). */
2333 static tree
2334 chkp_get_next_bounds_parm (tree parm)
2336 tree bounds = TREE_CHAIN (parm);
2337 gcc_assert (POINTER_BOUNDS_P (bounds));
2338 bounds = ssa_default_def (cfun, bounds);
2339 if (!bounds)
2341 bounds = make_ssa_name (TREE_CHAIN (parm), gimple_build_nop ());
2342 set_ssa_default_def (cfun, TREE_CHAIN (parm), bounds);
2344 return bounds;
2347 /* Return bounds to be used for input argument PARM. */
2348 static tree
2349 chkp_get_bound_for_parm (tree parm)
2351 tree decl = SSA_NAME_VAR (parm);
2352 tree bounds;
2354 gcc_assert (TREE_CODE (decl) == PARM_DECL);
2356 bounds = chkp_get_registered_bounds (parm);
2358 if (!bounds)
2359 bounds = chkp_get_registered_bounds (decl);
2361 if (!bounds)
2363 tree orig_decl = cgraph_node::get (cfun->decl)->orig_decl;
2365 /* For static chain param we return zero bounds
2366 because currently we do not check dereferences
2367 of this pointer. */
2368 if (cfun->static_chain_decl == decl)
2369 bounds = chkp_get_zero_bounds ();
2370 /* If non instrumented runtime is used then it may be useful
2371 to use zero bounds for input arguments of main
2372 function. */
2373 else if (flag_chkp_zero_input_bounds_for_main
2374 && strcmp (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (orig_decl)),
2375 "main") == 0)
2376 bounds = chkp_get_zero_bounds ();
2377 else if (BOUNDED_P (parm))
2379 bounds = chkp_get_next_bounds_parm (decl);
2380 bounds = chkp_maybe_copy_and_register_bounds (decl, bounds);
2382 if (dump_file && (dump_flags & TDF_DETAILS))
2384 fprintf (dump_file, "Built arg bounds (");
2385 print_generic_expr (dump_file, bounds, 0);
2386 fprintf (dump_file, ") for arg: ");
2387 print_node (dump_file, "", decl, 0);
2390 else
2391 bounds = chkp_get_zero_bounds ();
2394 if (!chkp_get_registered_bounds (parm))
2395 bounds = chkp_maybe_copy_and_register_bounds (parm, bounds);
2397 if (dump_file && (dump_flags & TDF_DETAILS))
2399 fprintf (dump_file, "Using bounds ");
2400 print_generic_expr (dump_file, bounds, 0);
2401 fprintf (dump_file, " for parm ");
2402 print_generic_expr (dump_file, parm, 0);
2403 fprintf (dump_file, " of type ");
2404 print_generic_expr (dump_file, TREE_TYPE (parm), 0);
2405 fprintf (dump_file, ".\n");
2408 return bounds;
2411 /* Build and return CALL_EXPR for bndstx builtin with specified
2412 arguments. */
2413 tree
2414 chkp_build_bndldx_call (tree addr, tree ptr)
2416 tree fn = build1 (ADDR_EXPR,
2417 build_pointer_type (TREE_TYPE (chkp_bndldx_fndecl)),
2418 chkp_bndldx_fndecl);
2419 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndldx_fndecl)),
2420 fn, 2, addr, ptr);
2421 CALL_WITH_BOUNDS_P (call) = true;
2422 return call;
2425 /* Insert code to load bounds for PTR located by ADDR.
2426 Code is inserted after position pointed by GSI.
2427 Loaded bounds are returned. */
2428 static tree
2429 chkp_build_bndldx (tree addr, tree ptr, gimple_stmt_iterator *gsi)
2431 gimple_seq seq;
2432 gimple *stmt;
2433 tree bounds;
2435 seq = NULL;
2437 addr = chkp_force_gimple_call_op (addr, &seq);
2438 ptr = chkp_force_gimple_call_op (ptr, &seq);
2440 stmt = gimple_build_call (chkp_bndldx_fndecl, 2, addr, ptr);
2441 chkp_mark_stmt (stmt);
2442 bounds = chkp_get_tmp_reg (stmt);
2443 gimple_call_set_lhs (stmt, bounds);
2445 gimple_seq_add_stmt (&seq, stmt);
2447 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2449 if (dump_file && (dump_flags & TDF_DETAILS))
2451 fprintf (dump_file, "Generated bndldx for pointer ");
2452 print_generic_expr (dump_file, ptr, 0);
2453 fprintf (dump_file, ": ");
2454 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2457 return bounds;
2460 /* Build and return CALL_EXPR for bndstx builtin with specified
2461 arguments. */
2462 tree
2463 chkp_build_bndstx_call (tree addr, tree ptr, tree bounds)
2465 tree fn = build1 (ADDR_EXPR,
2466 build_pointer_type (TREE_TYPE (chkp_bndstx_fndecl)),
2467 chkp_bndstx_fndecl);
2468 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndstx_fndecl)),
2469 fn, 3, ptr, bounds, addr);
2470 CALL_WITH_BOUNDS_P (call) = true;
2471 return call;
2474 /* Insert code to store BOUNDS for PTR stored by ADDR.
2475 New statements are inserted after position pointed
2476 by GSI. */
2477 void
2478 chkp_build_bndstx (tree addr, tree ptr, tree bounds,
2479 gimple_stmt_iterator *gsi)
2481 gimple_seq seq;
2482 gimple *stmt;
2484 seq = NULL;
2486 addr = chkp_force_gimple_call_op (addr, &seq);
2487 ptr = chkp_force_gimple_call_op (ptr, &seq);
2489 stmt = gimple_build_call (chkp_bndstx_fndecl, 3, ptr, bounds, addr);
2490 chkp_mark_stmt (stmt);
2491 gimple_call_set_with_bounds (stmt, true);
2493 gimple_seq_add_stmt (&seq, stmt);
2495 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2497 if (dump_file && (dump_flags & TDF_DETAILS))
2499 fprintf (dump_file, "Generated bndstx for pointer store ");
2500 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_VOPS|TDF_MEMSYMS);
2501 print_gimple_stmt (dump_file, stmt, 2, TDF_VOPS|TDF_MEMSYMS);
2505 /* Compute bounds for pointer NODE which was assigned in
2506 assignment statement ASSIGN. Return computed bounds. */
2507 static tree
2508 chkp_compute_bounds_for_assignment (tree node, gimple *assign)
2510 enum tree_code rhs_code = gimple_assign_rhs_code (assign);
2511 tree rhs1 = gimple_assign_rhs1 (assign);
2512 tree bounds = NULL_TREE;
2513 gimple_stmt_iterator iter = gsi_for_stmt (assign);
2514 tree base = NULL;
2516 if (dump_file && (dump_flags & TDF_DETAILS))
2518 fprintf (dump_file, "Computing bounds for assignment: ");
2519 print_gimple_stmt (dump_file, assign, 0, TDF_VOPS|TDF_MEMSYMS);
2522 switch (rhs_code)
2524 case MEM_REF:
2525 case TARGET_MEM_REF:
2526 case COMPONENT_REF:
2527 case ARRAY_REF:
2528 /* We need to load bounds from the bounds table. */
2529 bounds = chkp_find_bounds_loaded (node, rhs1, &iter);
2530 break;
2532 case VAR_DECL:
2533 case SSA_NAME:
2534 case ADDR_EXPR:
2535 case POINTER_PLUS_EXPR:
2536 case NOP_EXPR:
2537 case CONVERT_EXPR:
2538 case INTEGER_CST:
2539 /* Bounds are just propagated from RHS. */
2540 bounds = chkp_find_bounds (rhs1, &iter);
2541 base = rhs1;
2542 break;
2544 case VIEW_CONVERT_EXPR:
2545 /* Bounds are just propagated from RHS. */
2546 bounds = chkp_find_bounds (TREE_OPERAND (rhs1, 0), &iter);
2547 break;
2549 case PARM_DECL:
2550 if (BOUNDED_P (rhs1))
2552 /* We need to load bounds from the bounds table. */
2553 bounds = chkp_build_bndldx (chkp_build_addr_expr (rhs1),
2554 node, &iter);
2555 TREE_ADDRESSABLE (rhs1) = 1;
2557 else
2558 bounds = chkp_get_nonpointer_load_bounds ();
2559 break;
2561 case MINUS_EXPR:
2562 case PLUS_EXPR:
2563 case BIT_AND_EXPR:
2564 case BIT_IOR_EXPR:
2565 case BIT_XOR_EXPR:
2567 tree rhs2 = gimple_assign_rhs2 (assign);
2568 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2569 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2571 /* First we try to check types of operands. If it
2572 does not help then look at bound values.
2574 If some bounds are incomplete and other are
2575 not proven to be valid (i.e. also incomplete
2576 or invalid because value is not pointer) then
2577 resulting value is incomplete and will be
2578 recomputed later in chkp_finish_incomplete_bounds. */
2579 if (BOUNDED_P (rhs1)
2580 && !BOUNDED_P (rhs2))
2581 bounds = bnd1;
2582 else if (BOUNDED_P (rhs2)
2583 && !BOUNDED_P (rhs1)
2584 && rhs_code != MINUS_EXPR)
2585 bounds = bnd2;
2586 else if (chkp_incomplete_bounds (bnd1))
2587 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR
2588 && !chkp_incomplete_bounds (bnd2))
2589 bounds = bnd2;
2590 else
2591 bounds = incomplete_bounds;
2592 else if (chkp_incomplete_bounds (bnd2))
2593 if (chkp_valid_bounds (bnd1)
2594 && !chkp_incomplete_bounds (bnd1))
2595 bounds = bnd1;
2596 else
2597 bounds = incomplete_bounds;
2598 else if (!chkp_valid_bounds (bnd1))
2599 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR)
2600 bounds = bnd2;
2601 else if (bnd2 == chkp_get_zero_bounds ())
2602 bounds = bnd2;
2603 else
2604 bounds = bnd1;
2605 else if (!chkp_valid_bounds (bnd2))
2606 bounds = bnd1;
2607 else
2608 /* Seems both operands may have valid bounds
2609 (e.g. pointer minus pointer). In such case
2610 use default invalid op bounds. */
2611 bounds = chkp_get_invalid_op_bounds ();
2613 base = (bounds == bnd1) ? rhs1 : (bounds == bnd2) ? rhs2 : NULL;
2615 break;
2617 case BIT_NOT_EXPR:
2618 case NEGATE_EXPR:
2619 case LSHIFT_EXPR:
2620 case RSHIFT_EXPR:
2621 case LROTATE_EXPR:
2622 case RROTATE_EXPR:
2623 case EQ_EXPR:
2624 case NE_EXPR:
2625 case LT_EXPR:
2626 case LE_EXPR:
2627 case GT_EXPR:
2628 case GE_EXPR:
2629 case MULT_EXPR:
2630 case RDIV_EXPR:
2631 case TRUNC_DIV_EXPR:
2632 case FLOOR_DIV_EXPR:
2633 case CEIL_DIV_EXPR:
2634 case ROUND_DIV_EXPR:
2635 case TRUNC_MOD_EXPR:
2636 case FLOOR_MOD_EXPR:
2637 case CEIL_MOD_EXPR:
2638 case ROUND_MOD_EXPR:
2639 case EXACT_DIV_EXPR:
2640 case FIX_TRUNC_EXPR:
2641 case FLOAT_EXPR:
2642 case REALPART_EXPR:
2643 case IMAGPART_EXPR:
2644 /* No valid bounds may be produced by these exprs. */
2645 bounds = chkp_get_invalid_op_bounds ();
2646 break;
2648 case COND_EXPR:
2650 tree val1 = gimple_assign_rhs2 (assign);
2651 tree val2 = gimple_assign_rhs3 (assign);
2652 tree bnd1 = chkp_find_bounds (val1, &iter);
2653 tree bnd2 = chkp_find_bounds (val2, &iter);
2654 gimple *stmt;
2656 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2657 bounds = incomplete_bounds;
2658 else if (bnd1 == bnd2)
2659 bounds = bnd1;
2660 else
2662 rhs1 = unshare_expr (rhs1);
2664 bounds = chkp_get_tmp_reg (assign);
2665 stmt = gimple_build_assign (bounds, COND_EXPR, rhs1, bnd1, bnd2);
2666 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2668 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2669 chkp_mark_invalid_bounds (bounds);
2672 break;
2674 case MAX_EXPR:
2675 case MIN_EXPR:
2677 tree rhs2 = gimple_assign_rhs2 (assign);
2678 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2679 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2681 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2682 bounds = incomplete_bounds;
2683 else if (bnd1 == bnd2)
2684 bounds = bnd1;
2685 else
2687 gimple *stmt;
2688 tree cond = build2 (rhs_code == MAX_EXPR ? GT_EXPR : LT_EXPR,
2689 boolean_type_node, rhs1, rhs2);
2690 bounds = chkp_get_tmp_reg (assign);
2691 stmt = gimple_build_assign (bounds, COND_EXPR, cond, bnd1, bnd2);
2693 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2695 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2696 chkp_mark_invalid_bounds (bounds);
2699 break;
2701 default:
2702 bounds = chkp_get_zero_bounds ();
2703 warning (0, "pointer bounds were lost due to unexpected expression %s",
2704 get_tree_code_name (rhs_code));
2707 gcc_assert (bounds);
2709 /* We may reuse bounds of other pointer we copy/modify. But it is not
2710 allowed for abnormal ssa names. If we produced a pointer using
2711 abnormal ssa name, we better make a bounds copy to avoid coalescing
2712 issues. */
2713 if (base
2714 && TREE_CODE (base) == SSA_NAME
2715 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (base))
2717 gimple *stmt = gimple_build_assign (chkp_get_tmp_reg (NULL), bounds);
2718 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2719 bounds = gimple_assign_lhs (stmt);
2722 if (node)
2723 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2725 return bounds;
2728 /* Compute bounds for ssa name NODE defined by DEF_STMT pointed by ITER.
2730 There are just few statement codes allowed: NOP (for default ssa names),
2731 ASSIGN, CALL, PHI, ASM.
2733 Return computed bounds. */
2734 static tree
2735 chkp_get_bounds_by_definition (tree node, gimple *def_stmt,
2736 gphi_iterator *iter)
2738 tree var, bounds;
2739 enum gimple_code code = gimple_code (def_stmt);
2740 gphi *stmt;
2742 if (dump_file && (dump_flags & TDF_DETAILS))
2744 fprintf (dump_file, "Searching for bounds for node: ");
2745 print_generic_expr (dump_file, node, 0);
2747 fprintf (dump_file, " using its definition: ");
2748 print_gimple_stmt (dump_file, def_stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2751 switch (code)
2753 case GIMPLE_NOP:
2754 var = SSA_NAME_VAR (node);
2755 switch (TREE_CODE (var))
2757 case PARM_DECL:
2758 bounds = chkp_get_bound_for_parm (node);
2759 break;
2761 case VAR_DECL:
2762 /* For uninitialized pointers use none bounds. */
2763 bounds = chkp_get_none_bounds ();
2764 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2765 break;
2767 case RESULT_DECL:
2769 tree base_type;
2771 gcc_assert (TREE_CODE (TREE_TYPE (node)) == REFERENCE_TYPE);
2773 base_type = TREE_TYPE (TREE_TYPE (node));
2775 gcc_assert (TYPE_SIZE (base_type)
2776 && TREE_CODE (TYPE_SIZE (base_type)) == INTEGER_CST
2777 && tree_to_uhwi (TYPE_SIZE (base_type)) != 0);
2779 bounds = chkp_make_bounds (node, TYPE_SIZE_UNIT (base_type),
2780 NULL, false);
2781 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2783 break;
2785 default:
2786 if (dump_file && (dump_flags & TDF_DETAILS))
2788 fprintf (dump_file, "Unexpected var with no definition\n");
2789 print_generic_expr (dump_file, var, 0);
2791 internal_error ("chkp_get_bounds_by_definition: Unexpected var of type %s",
2792 get_tree_code_name (TREE_CODE (var)));
2794 break;
2796 case GIMPLE_ASSIGN:
2797 bounds = chkp_compute_bounds_for_assignment (node, def_stmt);
2798 break;
2800 case GIMPLE_CALL:
2801 bounds = chkp_build_returned_bound (as_a <gcall *> (def_stmt));
2802 break;
2804 case GIMPLE_PHI:
2805 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (node))
2806 if (SSA_NAME_VAR (node))
2807 var = chkp_get_bounds_var (SSA_NAME_VAR (node));
2808 else
2809 var = make_temp_ssa_name (pointer_bounds_type_node,
2810 NULL,
2811 CHKP_BOUND_TMP_NAME);
2812 else
2813 var = chkp_get_tmp_var ();
2814 stmt = create_phi_node (var, gimple_bb (def_stmt));
2815 bounds = gimple_phi_result (stmt);
2816 *iter = gsi_for_phi (stmt);
2818 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2820 /* Created bounds do not have all phi args computed and
2821 therefore we do not know if there is a valid source
2822 of bounds for that node. Therefore we mark bounds
2823 as incomplete and then recompute them when all phi
2824 args are computed. */
2825 chkp_register_incomplete_bounds (bounds, node);
2826 break;
2828 case GIMPLE_ASM:
2829 bounds = chkp_get_zero_bounds ();
2830 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2831 break;
2833 default:
2834 internal_error ("chkp_get_bounds_by_definition: Unexpected GIMPLE code %s",
2835 gimple_code_name[code]);
2838 return bounds;
2841 /* Return CALL_EXPR for bndmk with specified LOWER_BOUND and SIZE. */
2842 tree
2843 chkp_build_make_bounds_call (tree lower_bound, tree size)
2845 tree call = build1 (ADDR_EXPR,
2846 build_pointer_type (TREE_TYPE (chkp_bndmk_fndecl)),
2847 chkp_bndmk_fndecl);
2848 return build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndmk_fndecl)),
2849 call, 2, lower_bound, size);
2852 /* Create static bounds var of specfified OBJ which is
2853 is either VAR_DECL or string constant. */
2854 static tree
2855 chkp_make_static_bounds (tree obj)
2857 static int string_id = 1;
2858 static int var_id = 1;
2859 tree *slot;
2860 const char *var_name;
2861 char *bnd_var_name;
2862 tree bnd_var;
2864 /* First check if we already have required var. */
2865 if (chkp_static_var_bounds)
2867 /* For vars we use assembler name as a key in
2868 chkp_static_var_bounds map. It allows to
2869 avoid duplicating bound vars for decls
2870 sharing assembler name. */
2871 if (TREE_CODE (obj) == VAR_DECL)
2873 tree name = DECL_ASSEMBLER_NAME (obj);
2874 slot = chkp_static_var_bounds->get (name);
2875 if (slot)
2876 return *slot;
2878 else
2880 slot = chkp_static_var_bounds->get (obj);
2881 if (slot)
2882 return *slot;
2886 /* Build decl for bounds var. */
2887 if (TREE_CODE (obj) == VAR_DECL)
2889 if (DECL_IGNORED_P (obj))
2891 bnd_var_name = (char *) xmalloc (strlen (CHKP_VAR_BOUNDS_PREFIX) + 10);
2892 sprintf (bnd_var_name, "%s%d", CHKP_VAR_BOUNDS_PREFIX, var_id++);
2894 else
2896 var_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (obj));
2898 /* For hidden symbols we want to skip first '*' char. */
2899 if (*var_name == '*')
2900 var_name++;
2902 bnd_var_name = (char *) xmalloc (strlen (var_name)
2903 + strlen (CHKP_BOUNDS_OF_SYMBOL_PREFIX) + 1);
2904 strcpy (bnd_var_name, CHKP_BOUNDS_OF_SYMBOL_PREFIX);
2905 strcat (bnd_var_name, var_name);
2908 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2909 get_identifier (bnd_var_name),
2910 pointer_bounds_type_node);
2912 /* Address of the obj will be used as lower bound. */
2913 TREE_ADDRESSABLE (obj) = 1;
2915 else
2917 bnd_var_name = (char *) xmalloc (strlen (CHKP_STRING_BOUNDS_PREFIX) + 10);
2918 sprintf (bnd_var_name, "%s%d", CHKP_STRING_BOUNDS_PREFIX, string_id++);
2920 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2921 get_identifier (bnd_var_name),
2922 pointer_bounds_type_node);
2925 TREE_PUBLIC (bnd_var) = 0;
2926 TREE_USED (bnd_var) = 1;
2927 TREE_READONLY (bnd_var) = 0;
2928 TREE_STATIC (bnd_var) = 1;
2929 TREE_ADDRESSABLE (bnd_var) = 0;
2930 DECL_ARTIFICIAL (bnd_var) = 1;
2931 DECL_COMMON (bnd_var) = 1;
2932 DECL_COMDAT (bnd_var) = 1;
2933 DECL_READ_P (bnd_var) = 1;
2934 DECL_INITIAL (bnd_var) = chkp_build_addr_expr (obj);
2935 /* Force output similar to constant bounds.
2936 See chkp_make_static_const_bounds. */
2937 varpool_node::get_create (bnd_var)->force_output = 1;
2938 /* Mark symbol as requiring bounds initialization. */
2939 varpool_node::get_create (bnd_var)->need_bounds_init = 1;
2940 varpool_node::finalize_decl (bnd_var);
2942 /* Add created var to the map to use it for other references
2943 to obj. */
2944 if (!chkp_static_var_bounds)
2945 chkp_static_var_bounds = new hash_map<tree, tree>;
2947 if (TREE_CODE (obj) == VAR_DECL)
2949 tree name = DECL_ASSEMBLER_NAME (obj);
2950 chkp_static_var_bounds->put (name, bnd_var);
2952 else
2953 chkp_static_var_bounds->put (obj, bnd_var);
2955 return bnd_var;
2958 /* When var has incomplete type we cannot get size to
2959 compute its bounds. In such cases we use checker
2960 builtin call which determines object size at runtime. */
2961 static tree
2962 chkp_generate_extern_var_bounds (tree var)
2964 tree bounds, size_reloc, lb, size, max_size, cond;
2965 gimple_stmt_iterator gsi;
2966 gimple_seq seq = NULL;
2967 gimple *stmt;
2969 /* If instrumentation is not enabled for vars having
2970 incomplete type then just return zero bounds to avoid
2971 checks for this var. */
2972 if (!flag_chkp_incomplete_type)
2973 return chkp_get_zero_bounds ();
2975 if (dump_file && (dump_flags & TDF_DETAILS))
2977 fprintf (dump_file, "Generating bounds for extern symbol '");
2978 print_generic_expr (dump_file, var, 0);
2979 fprintf (dump_file, "'\n");
2982 stmt = gimple_build_call (chkp_sizeof_fndecl, 1, var);
2984 size_reloc = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
2985 gimple_call_set_lhs (stmt, size_reloc);
2987 gimple_seq_add_stmt (&seq, stmt);
2989 lb = chkp_build_addr_expr (var);
2990 size = make_ssa_name (chkp_get_size_tmp_var ());
2992 if (flag_chkp_zero_dynamic_size_as_infinite)
2994 /* We should check that size relocation was resolved.
2995 If it was not then use maximum possible size for the var. */
2996 max_size = build2 (MINUS_EXPR, chkp_uintptr_type, integer_zero_node,
2997 fold_convert (chkp_uintptr_type, lb));
2998 max_size = chkp_force_gimple_call_op (max_size, &seq);
3000 cond = build2 (NE_EXPR, boolean_type_node,
3001 size_reloc, integer_zero_node);
3002 stmt = gimple_build_assign (size, COND_EXPR, cond, size_reloc, max_size);
3003 gimple_seq_add_stmt (&seq, stmt);
3005 else
3007 stmt = gimple_build_assign (size, size_reloc);
3008 gimple_seq_add_stmt (&seq, stmt);
3011 gsi = gsi_start_bb (chkp_get_entry_block ());
3012 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
3014 bounds = chkp_make_bounds (lb, size, &gsi, true);
3016 return bounds;
3019 /* Return 1 if TYPE has fields with zero size or fields
3020 marked with chkp_variable_size attribute. */
3021 bool
3022 chkp_variable_size_type (tree type)
3024 bool res = false;
3025 tree field;
3027 if (RECORD_OR_UNION_TYPE_P (type))
3028 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3030 if (TREE_CODE (field) == FIELD_DECL)
3031 res = res
3032 || lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
3033 || chkp_variable_size_type (TREE_TYPE (field));
3035 else
3036 res = !TYPE_SIZE (type)
3037 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
3038 || tree_to_uhwi (TYPE_SIZE (type)) == 0;
3040 return res;
3043 /* Compute and return bounds for address of DECL which is
3044 one of VAR_DECL, PARM_DECL, RESULT_DECL. */
3045 static tree
3046 chkp_get_bounds_for_decl_addr (tree decl)
3048 tree bounds;
3050 gcc_assert (TREE_CODE (decl) == VAR_DECL
3051 || TREE_CODE (decl) == PARM_DECL
3052 || TREE_CODE (decl) == RESULT_DECL);
3054 bounds = chkp_get_registered_addr_bounds (decl);
3056 if (bounds)
3057 return bounds;
3059 if (dump_file && (dump_flags & TDF_DETAILS))
3061 fprintf (dump_file, "Building bounds for address of decl ");
3062 print_generic_expr (dump_file, decl, 0);
3063 fprintf (dump_file, "\n");
3066 /* Use zero bounds if size is unknown and checks for
3067 unknown sizes are restricted. */
3068 if ((!DECL_SIZE (decl)
3069 || (chkp_variable_size_type (TREE_TYPE (decl))
3070 && (TREE_STATIC (decl)
3071 || DECL_EXTERNAL (decl)
3072 || TREE_PUBLIC (decl))))
3073 && !flag_chkp_incomplete_type)
3074 return chkp_get_zero_bounds ();
3076 if (flag_chkp_use_static_bounds
3077 && TREE_CODE (decl) == VAR_DECL
3078 && (TREE_STATIC (decl)
3079 || DECL_EXTERNAL (decl)
3080 || TREE_PUBLIC (decl))
3081 && !DECL_THREAD_LOCAL_P (decl))
3083 tree bnd_var = chkp_make_static_bounds (decl);
3084 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
3085 gimple *stmt;
3087 bounds = chkp_get_tmp_reg (NULL);
3088 stmt = gimple_build_assign (bounds, bnd_var);
3089 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3091 else if (!DECL_SIZE (decl)
3092 || (chkp_variable_size_type (TREE_TYPE (decl))
3093 && (TREE_STATIC (decl)
3094 || DECL_EXTERNAL (decl)
3095 || TREE_PUBLIC (decl))))
3097 gcc_assert (TREE_CODE (decl) == VAR_DECL);
3098 bounds = chkp_generate_extern_var_bounds (decl);
3100 else
3102 tree lb = chkp_build_addr_expr (decl);
3103 bounds = chkp_make_bounds (lb, DECL_SIZE_UNIT (decl), NULL, false);
3106 return bounds;
3109 /* Compute and return bounds for constant string. */
3110 static tree
3111 chkp_get_bounds_for_string_cst (tree cst)
3113 tree bounds;
3114 tree lb;
3115 tree size;
3117 gcc_assert (TREE_CODE (cst) == STRING_CST);
3119 bounds = chkp_get_registered_bounds (cst);
3121 if (bounds)
3122 return bounds;
3124 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
3125 || flag_chkp_use_static_const_bounds > 0)
3127 tree bnd_var = chkp_make_static_bounds (cst);
3128 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
3129 gimple *stmt;
3131 bounds = chkp_get_tmp_reg (NULL);
3132 stmt = gimple_build_assign (bounds, bnd_var);
3133 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3135 else
3137 lb = chkp_build_addr_expr (cst);
3138 size = build_int_cst (chkp_uintptr_type, TREE_STRING_LENGTH (cst));
3139 bounds = chkp_make_bounds (lb, size, NULL, false);
3142 bounds = chkp_maybe_copy_and_register_bounds (cst, bounds);
3144 return bounds;
3147 /* Generate code to instersect bounds BOUNDS1 and BOUNDS2 and
3148 return the result. if ITER is not NULL then Code is inserted
3149 before position pointed by ITER. Otherwise code is added to
3150 entry block. */
3151 static tree
3152 chkp_intersect_bounds (tree bounds1, tree bounds2, gimple_stmt_iterator *iter)
3154 if (!bounds1 || bounds1 == chkp_get_zero_bounds ())
3155 return bounds2 ? bounds2 : bounds1;
3156 else if (!bounds2 || bounds2 == chkp_get_zero_bounds ())
3157 return bounds1;
3158 else
3160 gimple_seq seq;
3161 gimple *stmt;
3162 tree bounds;
3164 seq = NULL;
3166 stmt = gimple_build_call (chkp_intersect_fndecl, 2, bounds1, bounds2);
3167 chkp_mark_stmt (stmt);
3169 bounds = chkp_get_tmp_reg (stmt);
3170 gimple_call_set_lhs (stmt, bounds);
3172 gimple_seq_add_stmt (&seq, stmt);
3174 /* We are probably doing narrowing for constant expression.
3175 In such case iter may be undefined. */
3176 if (!iter)
3178 gimple_stmt_iterator gsi = gsi_last_bb (chkp_get_entry_block ());
3179 iter = &gsi;
3180 gsi_insert_seq_after (iter, seq, GSI_SAME_STMT);
3182 else
3183 gsi_insert_seq_before (iter, seq, GSI_SAME_STMT);
3185 if (dump_file && (dump_flags & TDF_DETAILS))
3187 fprintf (dump_file, "Bounds intersection: ");
3188 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
3189 fprintf (dump_file, " inserted before statement: ");
3190 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0,
3191 TDF_VOPS|TDF_MEMSYMS);
3194 return bounds;
3198 /* Return 1 if we are allowed to narrow bounds for addressed FIELD
3199 and 0 othersize. */
3200 static bool
3201 chkp_may_narrow_to_field (tree field)
3203 return DECL_SIZE (field) && TREE_CODE (DECL_SIZE (field)) == INTEGER_CST
3204 && tree_to_uhwi (DECL_SIZE (field)) != 0
3205 && (!DECL_FIELD_OFFSET (field)
3206 || TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST)
3207 && (!DECL_FIELD_BIT_OFFSET (field)
3208 || TREE_CODE (DECL_FIELD_BIT_OFFSET (field)) == INTEGER_CST)
3209 && !lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
3210 && !chkp_variable_size_type (TREE_TYPE (field));
3213 /* Return 1 if bounds for FIELD should be narrowed to
3214 field's own size. */
3215 static bool
3216 chkp_narrow_bounds_for_field (tree field)
3218 HOST_WIDE_INT offs;
3219 HOST_WIDE_INT bit_offs;
3221 if (!chkp_may_narrow_to_field (field))
3222 return false;
3224 /* Accesse to compiler generated fields should not cause
3225 bounds narrowing. */
3226 if (DECL_ARTIFICIAL (field))
3227 return false;
3229 offs = tree_to_uhwi (DECL_FIELD_OFFSET (field));
3230 bit_offs = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
3232 return (flag_chkp_narrow_bounds
3233 && (flag_chkp_first_field_has_own_bounds
3234 || offs
3235 || bit_offs));
3238 /* Perform narrowing for BOUNDS using bounds computed for field
3239 access COMPONENT. ITER meaning is the same as for
3240 chkp_intersect_bounds. */
3241 static tree
3242 chkp_narrow_bounds_to_field (tree bounds, tree component,
3243 gimple_stmt_iterator *iter)
3245 tree field = TREE_OPERAND (component, 1);
3246 tree size = DECL_SIZE_UNIT (field);
3247 tree field_ptr = chkp_build_addr_expr (component);
3248 tree field_bounds;
3250 field_bounds = chkp_make_bounds (field_ptr, size, iter, false);
3252 return chkp_intersect_bounds (field_bounds, bounds, iter);
3255 /* Parse field or array access NODE.
3257 PTR ouput parameter holds a pointer to the outermost
3258 object.
3260 BITFIELD output parameter is set to 1 if bitfield is
3261 accessed and to 0 otherwise. If it is 1 then ELT holds
3262 outer component for accessed bit field.
3264 SAFE outer parameter is set to 1 if access is safe and
3265 checks are not required.
3267 BOUNDS outer parameter holds bounds to be used to check
3268 access (may be NULL).
3270 If INNERMOST_BOUNDS is 1 then try to narrow bounds to the
3271 innermost accessed component. */
3272 static void
3273 chkp_parse_array_and_component_ref (tree node, tree *ptr,
3274 tree *elt, bool *safe,
3275 bool *bitfield,
3276 tree *bounds,
3277 gimple_stmt_iterator *iter,
3278 bool innermost_bounds)
3280 tree comp_to_narrow = NULL_TREE;
3281 tree last_comp = NULL_TREE;
3282 bool array_ref_found = false;
3283 tree *nodes;
3284 tree var;
3285 int len;
3286 int i;
3288 /* Compute tree height for expression. */
3289 var = node;
3290 len = 1;
3291 while (TREE_CODE (var) == COMPONENT_REF
3292 || TREE_CODE (var) == ARRAY_REF
3293 || TREE_CODE (var) == VIEW_CONVERT_EXPR)
3295 var = TREE_OPERAND (var, 0);
3296 len++;
3299 gcc_assert (len > 1);
3301 /* It is more convenient for us to scan left-to-right,
3302 so walk tree again and put all node to nodes vector
3303 in reversed order. */
3304 nodes = XALLOCAVEC (tree, len);
3305 nodes[len - 1] = node;
3306 for (i = len - 2; i >= 0; i--)
3307 nodes[i] = TREE_OPERAND (nodes[i + 1], 0);
3309 if (bounds)
3310 *bounds = NULL;
3311 *safe = true;
3312 *bitfield = (TREE_CODE (node) == COMPONENT_REF
3313 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (node, 1)));
3314 /* To get bitfield address we will need outer elemnt. */
3315 if (*bitfield)
3316 *elt = nodes[len - 2];
3317 else
3318 *elt = NULL_TREE;
3320 /* If we have indirection in expression then compute
3321 outermost structure bounds. Computed bounds may be
3322 narrowed later. */
3323 if (TREE_CODE (nodes[0]) == MEM_REF || INDIRECT_REF_P (nodes[0]))
3325 *safe = false;
3326 *ptr = TREE_OPERAND (nodes[0], 0);
3327 if (bounds)
3328 *bounds = chkp_find_bounds (*ptr, iter);
3330 else
3332 gcc_assert (TREE_CODE (var) == VAR_DECL
3333 || TREE_CODE (var) == PARM_DECL
3334 || TREE_CODE (var) == RESULT_DECL
3335 || TREE_CODE (var) == STRING_CST
3336 || TREE_CODE (var) == SSA_NAME);
3338 *ptr = chkp_build_addr_expr (var);
3341 /* In this loop we are trying to find a field access
3342 requiring narrowing. There are two simple rules
3343 for search:
3344 1. Leftmost array_ref is chosen if any.
3345 2. Rightmost suitable component_ref is chosen if innermost
3346 bounds are required and no array_ref exists. */
3347 for (i = 1; i < len; i++)
3349 var = nodes[i];
3351 if (TREE_CODE (var) == ARRAY_REF)
3353 *safe = false;
3354 array_ref_found = true;
3355 if (flag_chkp_narrow_bounds
3356 && !flag_chkp_narrow_to_innermost_arrray
3357 && (!last_comp
3358 || chkp_may_narrow_to_field (TREE_OPERAND (last_comp, 1))))
3360 comp_to_narrow = last_comp;
3361 break;
3364 else if (TREE_CODE (var) == COMPONENT_REF)
3366 tree field = TREE_OPERAND (var, 1);
3368 if (innermost_bounds
3369 && !array_ref_found
3370 && chkp_narrow_bounds_for_field (field))
3371 comp_to_narrow = var;
3372 last_comp = var;
3374 if (flag_chkp_narrow_bounds
3375 && flag_chkp_narrow_to_innermost_arrray
3376 && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
3378 if (bounds)
3379 *bounds = chkp_narrow_bounds_to_field (*bounds, var, iter);
3380 comp_to_narrow = NULL;
3383 else if (TREE_CODE (var) == VIEW_CONVERT_EXPR)
3384 /* Nothing to do for it. */
3386 else
3387 gcc_unreachable ();
3390 if (comp_to_narrow && DECL_SIZE (TREE_OPERAND (comp_to_narrow, 1)) && bounds)
3391 *bounds = chkp_narrow_bounds_to_field (*bounds, comp_to_narrow, iter);
3393 if (innermost_bounds && bounds && !*bounds)
3394 *bounds = chkp_find_bounds (*ptr, iter);
3397 /* Compute and return bounds for address of OBJ. */
3398 static tree
3399 chkp_make_addressed_object_bounds (tree obj, gimple_stmt_iterator *iter)
3401 tree bounds = chkp_get_registered_addr_bounds (obj);
3403 if (bounds)
3404 return bounds;
3406 switch (TREE_CODE (obj))
3408 case VAR_DECL:
3409 case PARM_DECL:
3410 case RESULT_DECL:
3411 bounds = chkp_get_bounds_for_decl_addr (obj);
3412 break;
3414 case STRING_CST:
3415 bounds = chkp_get_bounds_for_string_cst (obj);
3416 break;
3418 case ARRAY_REF:
3419 case COMPONENT_REF:
3421 tree elt;
3422 tree ptr;
3423 bool safe;
3424 bool bitfield;
3426 chkp_parse_array_and_component_ref (obj, &ptr, &elt, &safe,
3427 &bitfield, &bounds, iter, true);
3429 gcc_assert (bounds);
3431 break;
3433 case FUNCTION_DECL:
3434 case LABEL_DECL:
3435 bounds = chkp_get_zero_bounds ();
3436 break;
3438 case MEM_REF:
3439 bounds = chkp_find_bounds (TREE_OPERAND (obj, 0), iter);
3440 break;
3442 case REALPART_EXPR:
3443 case IMAGPART_EXPR:
3444 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (obj, 0), iter);
3445 break;
3447 default:
3448 if (dump_file && (dump_flags & TDF_DETAILS))
3450 fprintf (dump_file, "chkp_make_addressed_object_bounds: "
3451 "unexpected object of type %s\n",
3452 get_tree_code_name (TREE_CODE (obj)));
3453 print_node (dump_file, "", obj, 0);
3455 internal_error ("chkp_make_addressed_object_bounds: "
3456 "Unexpected tree code %s",
3457 get_tree_code_name (TREE_CODE (obj)));
3460 chkp_register_addr_bounds (obj, bounds);
3462 return bounds;
3465 /* Compute bounds for pointer PTR loaded from PTR_SRC. Generate statements
3466 to compute bounds if required. Computed bounds should be available at
3467 position pointed by ITER.
3469 If PTR_SRC is NULL_TREE then pointer definition is identified.
3471 If PTR_SRC is not NULL_TREE then ITER points to statements which loads
3472 PTR. If PTR is a any memory reference then ITER points to a statement
3473 after which bndldx will be inserterd. In both cases ITER will be updated
3474 to point to the inserted bndldx statement. */
3476 static tree
3477 chkp_find_bounds_1 (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3479 tree addr = NULL_TREE;
3480 tree bounds = NULL_TREE;
3482 if (!ptr_src)
3483 ptr_src = ptr;
3485 bounds = chkp_get_registered_bounds (ptr_src);
3487 if (bounds)
3488 return bounds;
3490 switch (TREE_CODE (ptr_src))
3492 case MEM_REF:
3493 case VAR_DECL:
3494 if (BOUNDED_P (ptr_src))
3495 if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
3496 bounds = chkp_get_zero_bounds ();
3497 else
3499 addr = chkp_build_addr_expr (ptr_src);
3500 bounds = chkp_build_bndldx (addr, ptr, iter);
3502 else
3503 bounds = chkp_get_nonpointer_load_bounds ();
3504 break;
3506 case ARRAY_REF:
3507 case COMPONENT_REF:
3508 addr = get_base_address (ptr_src);
3509 if (DECL_P (addr)
3510 || TREE_CODE (addr) == MEM_REF
3511 || TREE_CODE (addr) == TARGET_MEM_REF)
3513 if (BOUNDED_P (ptr_src))
3514 if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
3515 bounds = chkp_get_zero_bounds ();
3516 else
3518 addr = chkp_build_addr_expr (ptr_src);
3519 bounds = chkp_build_bndldx (addr, ptr, iter);
3521 else
3522 bounds = chkp_get_nonpointer_load_bounds ();
3524 else
3526 gcc_assert (TREE_CODE (addr) == SSA_NAME);
3527 bounds = chkp_find_bounds (addr, iter);
3529 break;
3531 case PARM_DECL:
3532 gcc_unreachable ();
3533 bounds = chkp_get_bound_for_parm (ptr_src);
3534 break;
3536 case TARGET_MEM_REF:
3537 addr = chkp_build_addr_expr (ptr_src);
3538 bounds = chkp_build_bndldx (addr, ptr, iter);
3539 break;
3541 case SSA_NAME:
3542 bounds = chkp_get_registered_bounds (ptr_src);
3543 if (!bounds)
3545 gimple *def_stmt = SSA_NAME_DEF_STMT (ptr_src);
3546 gphi_iterator phi_iter;
3548 bounds = chkp_get_bounds_by_definition (ptr_src, def_stmt, &phi_iter);
3550 gcc_assert (bounds);
3552 if (gphi *def_phi = dyn_cast <gphi *> (def_stmt))
3554 unsigned i;
3556 for (i = 0; i < gimple_phi_num_args (def_phi); i++)
3558 tree arg = gimple_phi_arg_def (def_phi, i);
3559 tree arg_bnd;
3560 gphi *phi_bnd;
3562 arg_bnd = chkp_find_bounds (arg, NULL);
3564 /* chkp_get_bounds_by_definition created new phi
3565 statement and phi_iter points to it.
3567 Previous call to chkp_find_bounds could create
3568 new basic block and therefore change phi statement
3569 phi_iter points to. */
3570 phi_bnd = phi_iter.phi ();
3572 add_phi_arg (phi_bnd, arg_bnd,
3573 gimple_phi_arg_edge (def_phi, i),
3574 UNKNOWN_LOCATION);
3577 /* If all bound phi nodes have their arg computed
3578 then we may finish its computation. See
3579 chkp_finish_incomplete_bounds for more details. */
3580 if (chkp_may_finish_incomplete_bounds ())
3581 chkp_finish_incomplete_bounds ();
3584 gcc_assert (bounds == chkp_get_registered_bounds (ptr_src)
3585 || chkp_incomplete_bounds (bounds));
3587 break;
3589 case ADDR_EXPR:
3590 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (ptr_src, 0), iter);
3591 break;
3593 case INTEGER_CST:
3594 if (integer_zerop (ptr_src))
3595 bounds = chkp_get_none_bounds ();
3596 else
3597 bounds = chkp_get_invalid_op_bounds ();
3598 break;
3600 default:
3601 if (dump_file && (dump_flags & TDF_DETAILS))
3603 fprintf (dump_file, "chkp_find_bounds: unexpected ptr of type %s\n",
3604 get_tree_code_name (TREE_CODE (ptr_src)));
3605 print_node (dump_file, "", ptr_src, 0);
3607 internal_error ("chkp_find_bounds: Unexpected tree code %s",
3608 get_tree_code_name (TREE_CODE (ptr_src)));
3611 if (!bounds)
3613 if (dump_file && (dump_flags & TDF_DETAILS))
3615 fprintf (stderr, "chkp_find_bounds: cannot find bounds for pointer\n");
3616 print_node (dump_file, "", ptr_src, 0);
3618 internal_error ("chkp_find_bounds: Cannot find bounds for pointer");
3621 return bounds;
3624 /* Normal case for bounds search without forced narrowing. */
3625 static tree
3626 chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter)
3628 return chkp_find_bounds_1 (ptr, NULL_TREE, iter);
3631 /* Search bounds for pointer PTR loaded from PTR_SRC
3632 by statement *ITER points to. */
3633 static tree
3634 chkp_find_bounds_loaded (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3636 return chkp_find_bounds_1 (ptr, ptr_src, iter);
3639 /* Helper function which checks type of RHS and finds all pointers in
3640 it. For each found pointer we build it's accesses in LHS and RHS
3641 objects and then call HANDLER for them. Function is used to copy
3642 or initilize bounds for copied object. */
3643 static void
3644 chkp_walk_pointer_assignments (tree lhs, tree rhs, void *arg,
3645 assign_handler handler)
3647 tree type = TREE_TYPE (lhs);
3649 /* We have nothing to do with clobbers. */
3650 if (TREE_CLOBBER_P (rhs))
3651 return;
3653 if (BOUNDED_TYPE_P (type))
3654 handler (lhs, rhs, arg);
3655 else if (RECORD_OR_UNION_TYPE_P (type))
3657 tree field;
3659 if (TREE_CODE (rhs) == CONSTRUCTOR)
3661 unsigned HOST_WIDE_INT cnt;
3662 tree val;
3664 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, field, val)
3666 if (chkp_type_has_pointer (TREE_TYPE (field)))
3668 tree lhs_field = chkp_build_component_ref (lhs, field);
3669 chkp_walk_pointer_assignments (lhs_field, val, arg, handler);
3673 else
3674 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3675 if (TREE_CODE (field) == FIELD_DECL
3676 && chkp_type_has_pointer (TREE_TYPE (field)))
3678 tree rhs_field = chkp_build_component_ref (rhs, field);
3679 tree lhs_field = chkp_build_component_ref (lhs, field);
3680 chkp_walk_pointer_assignments (lhs_field, rhs_field, arg, handler);
3683 else if (TREE_CODE (type) == ARRAY_TYPE)
3685 unsigned HOST_WIDE_INT cur = 0;
3686 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3687 tree etype = TREE_TYPE (type);
3688 tree esize = TYPE_SIZE (etype);
3690 if (TREE_CODE (rhs) == CONSTRUCTOR)
3692 unsigned HOST_WIDE_INT cnt;
3693 tree purp, val, lhs_elem;
3695 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, purp, val)
3697 if (purp && TREE_CODE (purp) == RANGE_EXPR)
3699 tree lo_index = TREE_OPERAND (purp, 0);
3700 tree hi_index = TREE_OPERAND (purp, 1);
3702 for (cur = (unsigned)tree_to_uhwi (lo_index);
3703 cur <= (unsigned)tree_to_uhwi (hi_index);
3704 cur++)
3706 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3707 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3710 else
3712 if (purp)
3714 gcc_assert (TREE_CODE (purp) == INTEGER_CST);
3715 cur = tree_to_uhwi (purp);
3718 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur++);
3720 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3724 /* Copy array only when size is known. */
3725 else if (maxval && !integer_minus_onep (maxval))
3726 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
3728 tree lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3729 tree rhs_elem = chkp_build_array_ref (rhs, etype, esize, cur);
3730 chkp_walk_pointer_assignments (lhs_elem, rhs_elem, arg, handler);
3733 else
3734 internal_error("chkp_walk_pointer_assignments: unexpected RHS type: %s",
3735 get_tree_code_name (TREE_CODE (type)));
3738 /* Add code to copy bounds for assignment of RHS to LHS.
3739 ARG is an iterator pointing ne code position. */
3740 static void
3741 chkp_copy_bounds_for_elem (tree lhs, tree rhs, void *arg)
3743 gimple_stmt_iterator *iter = (gimple_stmt_iterator *)arg;
3744 tree bounds = chkp_find_bounds (rhs, iter);
3745 tree addr = chkp_build_addr_expr(lhs);
3747 chkp_build_bndstx (addr, rhs, bounds, iter);
3750 /* Emit static bound initilizers and size vars. */
3751 void
3752 chkp_finish_file (void)
3754 struct varpool_node *node;
3755 struct chkp_ctor_stmt_list stmts;
3757 if (seen_error ())
3758 return;
3760 /* Iterate through varpool and generate bounds initialization
3761 constructors for all statically initialized pointers. */
3762 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3763 stmts.stmts = NULL;
3764 FOR_EACH_VARIABLE (node)
3765 /* Check that var is actually emitted and we need and may initialize
3766 its bounds. */
3767 if (node->need_bounds_init
3768 && !POINTER_BOUNDS_P (node->decl)
3769 && DECL_RTL (node->decl)
3770 && MEM_P (DECL_RTL (node->decl))
3771 && TREE_ASM_WRITTEN (node->decl))
3773 chkp_walk_pointer_assignments (node->decl,
3774 DECL_INITIAL (node->decl),
3775 &stmts,
3776 chkp_add_modification_to_stmt_list);
3778 if (stmts.avail <= 0)
3780 cgraph_build_static_cdtor ('P', stmts.stmts,
3781 MAX_RESERVED_INIT_PRIORITY + 3);
3782 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3783 stmts.stmts = NULL;
3787 if (stmts.stmts)
3788 cgraph_build_static_cdtor ('P', stmts.stmts,
3789 MAX_RESERVED_INIT_PRIORITY + 3);
3791 /* Iterate through varpool and generate bounds initialization
3792 constructors for all static bounds vars. */
3793 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3794 stmts.stmts = NULL;
3795 FOR_EACH_VARIABLE (node)
3796 if (node->need_bounds_init
3797 && POINTER_BOUNDS_P (node->decl)
3798 && TREE_ASM_WRITTEN (node->decl))
3800 tree bnd = node->decl;
3801 tree var;
3803 gcc_assert (DECL_INITIAL (bnd)
3804 && TREE_CODE (DECL_INITIAL (bnd)) == ADDR_EXPR);
3806 var = TREE_OPERAND (DECL_INITIAL (bnd), 0);
3807 chkp_output_static_bounds (bnd, var, &stmts);
3810 if (stmts.stmts)
3811 cgraph_build_static_cdtor ('B', stmts.stmts,
3812 MAX_RESERVED_INIT_PRIORITY + 2);
3814 delete chkp_static_var_bounds;
3815 delete chkp_bounds_map;
3818 /* An instrumentation function which is called for each statement
3819 having memory access we want to instrument. It inserts check
3820 code and bounds copy code.
3822 ITER points to statement to instrument.
3824 NODE holds memory access in statement to check.
3826 LOC holds the location information for statement.
3828 DIRFLAGS determines whether access is read or write.
3830 ACCESS_OFFS should be added to address used in NODE
3831 before check.
3833 ACCESS_SIZE holds size of checked access.
3835 SAFE indicates if NODE access is safe and should not be
3836 checked. */
3837 static void
3838 chkp_process_stmt (gimple_stmt_iterator *iter, tree node,
3839 location_t loc, tree dirflag,
3840 tree access_offs, tree access_size,
3841 bool safe)
3843 tree node_type = TREE_TYPE (node);
3844 tree size = access_size ? access_size : TYPE_SIZE_UNIT (node_type);
3845 tree addr_first = NULL_TREE; /* address of the first accessed byte */
3846 tree addr_last = NULL_TREE; /* address of the last accessed byte */
3847 tree ptr = NULL_TREE; /* a pointer used for dereference */
3848 tree bounds = NULL_TREE;
3850 /* We do not need instrumentation for clobbers. */
3851 if (dirflag == integer_one_node
3852 && gimple_code (gsi_stmt (*iter)) == GIMPLE_ASSIGN
3853 && TREE_CLOBBER_P (gimple_assign_rhs1 (gsi_stmt (*iter))))
3854 return;
3856 switch (TREE_CODE (node))
3858 case ARRAY_REF:
3859 case COMPONENT_REF:
3861 bool bitfield;
3862 tree elt;
3864 if (safe)
3866 /* We are not going to generate any checks, so do not
3867 generate bounds as well. */
3868 addr_first = chkp_build_addr_expr (node);
3869 break;
3872 chkp_parse_array_and_component_ref (node, &ptr, &elt, &safe,
3873 &bitfield, &bounds, iter, false);
3875 /* Break if there is no dereference and operation is safe. */
3877 if (bitfield)
3879 tree field = TREE_OPERAND (node, 1);
3881 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
3882 size = DECL_SIZE_UNIT (field);
3884 if (elt)
3885 elt = chkp_build_addr_expr (elt);
3886 addr_first = fold_convert_loc (loc, ptr_type_node, elt ? elt : ptr);
3887 addr_first = fold_build_pointer_plus_loc (loc,
3888 addr_first,
3889 byte_position (field));
3891 else
3892 addr_first = chkp_build_addr_expr (node);
3894 break;
3896 case INDIRECT_REF:
3897 ptr = TREE_OPERAND (node, 0);
3898 addr_first = ptr;
3899 break;
3901 case MEM_REF:
3902 ptr = TREE_OPERAND (node, 0);
3903 addr_first = chkp_build_addr_expr (node);
3904 break;
3906 case TARGET_MEM_REF:
3907 ptr = TMR_BASE (node);
3908 addr_first = chkp_build_addr_expr (node);
3909 break;
3911 case ARRAY_RANGE_REF:
3912 printf("ARRAY_RANGE_REF\n");
3913 debug_gimple_stmt(gsi_stmt(*iter));
3914 debug_tree(node);
3915 gcc_unreachable ();
3916 break;
3918 case BIT_FIELD_REF:
3920 tree offs, rem, bpu;
3922 gcc_assert (!access_offs);
3923 gcc_assert (!access_size);
3925 bpu = fold_convert (size_type_node, bitsize_int (BITS_PER_UNIT));
3926 offs = fold_convert (size_type_node, TREE_OPERAND (node, 2));
3927 rem = size_binop_loc (loc, TRUNC_MOD_EXPR, offs, bpu);
3928 offs = size_binop_loc (loc, TRUNC_DIV_EXPR, offs, bpu);
3930 size = fold_convert (size_type_node, TREE_OPERAND (node, 1));
3931 size = size_binop_loc (loc, PLUS_EXPR, size, rem);
3932 size = size_binop_loc (loc, CEIL_DIV_EXPR, size, bpu);
3933 size = fold_convert (size_type_node, size);
3935 chkp_process_stmt (iter, TREE_OPERAND (node, 0), loc,
3936 dirflag, offs, size, safe);
3937 return;
3939 break;
3941 case VAR_DECL:
3942 case RESULT_DECL:
3943 case PARM_DECL:
3944 if (dirflag != integer_one_node
3945 || DECL_REGISTER (node))
3946 return;
3948 safe = true;
3949 addr_first = chkp_build_addr_expr (node);
3950 break;
3952 default:
3953 return;
3956 /* If addr_last was not computed then use (addr_first + size - 1)
3957 expression to compute it. */
3958 if (!addr_last)
3960 addr_last = fold_build_pointer_plus_loc (loc, addr_first, size);
3961 addr_last = fold_build_pointer_plus_hwi_loc (loc, addr_last, -1);
3964 /* Shift both first_addr and last_addr by access_offs if specified. */
3965 if (access_offs)
3967 addr_first = fold_build_pointer_plus_loc (loc, addr_first, access_offs);
3968 addr_last = fold_build_pointer_plus_loc (loc, addr_last, access_offs);
3971 /* Generate bndcl/bndcu checks if memory access is not safe. */
3972 if (!safe)
3974 gimple_stmt_iterator stmt_iter = *iter;
3976 if (!bounds)
3977 bounds = chkp_find_bounds (ptr, iter);
3979 chkp_check_mem_access (addr_first, addr_last, bounds,
3980 stmt_iter, loc, dirflag);
3983 /* We need to store bounds in case pointer is stored. */
3984 if (dirflag == integer_one_node
3985 && chkp_type_has_pointer (node_type)
3986 && flag_chkp_store_bounds)
3988 gimple *stmt = gsi_stmt (*iter);
3989 tree rhs1 = gimple_assign_rhs1 (stmt);
3990 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3992 if (get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS)
3993 chkp_walk_pointer_assignments (node, rhs1, iter,
3994 chkp_copy_bounds_for_elem);
3995 else
3997 bounds = chkp_compute_bounds_for_assignment (NULL_TREE, stmt);
3998 chkp_build_bndstx (addr_first, rhs1, bounds, iter);
4003 /* Add code to copy bounds for all pointers copied
4004 in ASSIGN created during inline of EDGE. */
4005 void
4006 chkp_copy_bounds_for_assign (gimple *assign, struct cgraph_edge *edge)
4008 tree lhs = gimple_assign_lhs (assign);
4009 tree rhs = gimple_assign_rhs1 (assign);
4010 gimple_stmt_iterator iter = gsi_for_stmt (assign);
4012 if (!flag_chkp_store_bounds)
4013 return;
4015 chkp_walk_pointer_assignments (lhs, rhs, &iter, chkp_copy_bounds_for_elem);
4017 /* We should create edges for all created calls to bndldx and bndstx. */
4018 while (gsi_stmt (iter) != assign)
4020 gimple *stmt = gsi_stmt (iter);
4021 if (gimple_code (stmt) == GIMPLE_CALL)
4023 tree fndecl = gimple_call_fndecl (stmt);
4024 struct cgraph_node *callee = cgraph_node::get_create (fndecl);
4025 struct cgraph_edge *new_edge;
4027 gcc_assert (fndecl == chkp_bndstx_fndecl
4028 || fndecl == chkp_bndldx_fndecl
4029 || fndecl == chkp_ret_bnd_fndecl);
4031 new_edge = edge->caller->create_edge (callee,
4032 as_a <gcall *> (stmt),
4033 edge->count,
4034 edge->frequency);
4035 new_edge->frequency = compute_call_stmt_bb_frequency
4036 (edge->caller->decl, gimple_bb (stmt));
4038 gsi_prev (&iter);
4042 /* Some code transformation made during instrumentation pass
4043 may put code into inconsistent state. Here we find and fix
4044 such flaws. */
4045 void
4046 chkp_fix_cfg ()
4048 basic_block bb;
4049 gimple_stmt_iterator i;
4051 /* We could insert some code right after stmt which ends bb.
4052 We wanted to put this code on fallthru edge but did not
4053 add new edges from the beginning because it may cause new
4054 phi node creation which may be incorrect due to incomplete
4055 bound phi nodes. */
4056 FOR_ALL_BB_FN (bb, cfun)
4057 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
4059 gimple *stmt = gsi_stmt (i);
4060 gimple_stmt_iterator next = i;
4062 gsi_next (&next);
4064 if (stmt_ends_bb_p (stmt)
4065 && !gsi_end_p (next))
4067 edge fall = find_fallthru_edge (bb->succs);
4068 basic_block dest = NULL;
4069 int flags = 0;
4071 gcc_assert (fall);
4073 /* We cannot split abnormal edge. Therefore we
4074 store its params, make it regular and then
4075 rebuild abnormal edge after split. */
4076 if (fall->flags & EDGE_ABNORMAL)
4078 flags = fall->flags & ~EDGE_FALLTHRU;
4079 dest = fall->dest;
4081 fall->flags &= ~EDGE_COMPLEX;
4084 while (!gsi_end_p (next))
4086 gimple *next_stmt = gsi_stmt (next);
4087 gsi_remove (&next, false);
4088 gsi_insert_on_edge (fall, next_stmt);
4091 gsi_commit_edge_inserts ();
4093 /* Re-create abnormal edge. */
4094 if (dest)
4095 make_edge (bb, dest, flags);
4100 /* Walker callback for chkp_replace_function_pointers. Replaces
4101 function pointer in the specified operand with pointer to the
4102 instrumented function version. */
4103 static tree
4104 chkp_replace_function_pointer (tree *op, int *walk_subtrees,
4105 void *data ATTRIBUTE_UNUSED)
4107 if (TREE_CODE (*op) == FUNCTION_DECL
4108 && chkp_instrumentable_p (*op)
4109 && (DECL_BUILT_IN_CLASS (*op) == NOT_BUILT_IN
4110 /* For builtins we replace pointers only for selected
4111 function and functions having definitions. */
4112 || (DECL_BUILT_IN_CLASS (*op) == BUILT_IN_NORMAL
4113 && (chkp_instrument_normal_builtin (*op)
4114 || gimple_has_body_p (*op)))))
4116 struct cgraph_node *node = cgraph_node::get_create (*op);
4117 struct cgraph_node *clone = NULL;
4119 if (!node->instrumentation_clone)
4120 clone = chkp_maybe_create_clone (*op);
4122 if (clone)
4123 *op = clone->decl;
4124 *walk_subtrees = 0;
4127 return NULL;
4130 /* This function searches for function pointers in statement
4131 pointed by GSI and replaces them with pointers to instrumented
4132 function versions. */
4133 static void
4134 chkp_replace_function_pointers (gimple_stmt_iterator *gsi)
4136 gimple *stmt = gsi_stmt (*gsi);
4137 /* For calls we want to walk call args only. */
4138 if (gimple_code (stmt) == GIMPLE_CALL)
4140 unsigned i;
4141 for (i = 0; i < gimple_call_num_args (stmt); i++)
4142 walk_tree (gimple_call_arg_ptr (stmt, i),
4143 chkp_replace_function_pointer, NULL, NULL);
4145 else
4146 walk_gimple_stmt (gsi, NULL, chkp_replace_function_pointer, NULL);
4149 /* This function instruments all statements working with memory,
4150 calls and rets.
4152 It also removes excess statements from static initializers. */
4153 static void
4154 chkp_instrument_function (void)
4156 basic_block bb, next;
4157 gimple_stmt_iterator i;
4158 enum gimple_rhs_class grhs_class;
4159 bool safe = lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl));
4161 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
4164 next = bb->next_bb;
4165 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
4167 gimple *s = gsi_stmt (i);
4169 /* Skip statement marked to not be instrumented. */
4170 if (chkp_marked_stmt_p (s))
4172 gsi_next (&i);
4173 continue;
4176 chkp_replace_function_pointers (&i);
4178 switch (gimple_code (s))
4180 case GIMPLE_ASSIGN:
4181 chkp_process_stmt (&i, gimple_assign_lhs (s),
4182 gimple_location (s), integer_one_node,
4183 NULL_TREE, NULL_TREE, safe);
4184 chkp_process_stmt (&i, gimple_assign_rhs1 (s),
4185 gimple_location (s), integer_zero_node,
4186 NULL_TREE, NULL_TREE, safe);
4187 grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s));
4188 if (grhs_class == GIMPLE_BINARY_RHS)
4189 chkp_process_stmt (&i, gimple_assign_rhs2 (s),
4190 gimple_location (s), integer_zero_node,
4191 NULL_TREE, NULL_TREE, safe);
4192 break;
4194 case GIMPLE_RETURN:
4196 greturn *r = as_a <greturn *> (s);
4197 if (gimple_return_retval (r) != NULL_TREE)
4199 chkp_process_stmt (&i, gimple_return_retval (r),
4200 gimple_location (r),
4201 integer_zero_node,
4202 NULL_TREE, NULL_TREE, safe);
4204 /* Additionally we need to add bounds
4205 to return statement. */
4206 chkp_add_bounds_to_ret_stmt (&i);
4209 break;
4211 case GIMPLE_CALL:
4212 chkp_add_bounds_to_call_stmt (&i);
4213 break;
4215 default:
4219 gsi_next (&i);
4221 /* We do not need any actual pointer stores in checker
4222 static initializer. */
4223 if (lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl))
4224 && gimple_code (s) == GIMPLE_ASSIGN
4225 && gimple_store_p (s))
4227 gimple_stmt_iterator del_iter = gsi_for_stmt (s);
4228 gsi_remove (&del_iter, true);
4229 unlink_stmt_vdef (s);
4230 release_defs(s);
4233 bb = next;
4235 while (bb);
4237 /* Some input params may have bounds and be address taken. In this case
4238 we should store incoming bounds into bounds table. */
4239 tree arg;
4240 if (flag_chkp_store_bounds)
4241 for (arg = DECL_ARGUMENTS (cfun->decl); arg; arg = DECL_CHAIN (arg))
4242 if (TREE_ADDRESSABLE (arg))
4244 if (BOUNDED_P (arg))
4246 tree bounds = chkp_get_next_bounds_parm (arg);
4247 tree def_ptr = ssa_default_def (cfun, arg);
4248 gimple_stmt_iterator iter
4249 = gsi_start_bb (chkp_get_entry_block ());
4250 chkp_build_bndstx (chkp_build_addr_expr (arg),
4251 def_ptr ? def_ptr : arg,
4252 bounds, &iter);
4254 /* Skip bounds arg. */
4255 arg = TREE_CHAIN (arg);
4257 else if (chkp_type_has_pointer (TREE_TYPE (arg)))
4259 tree orig_arg = arg;
4260 bitmap slots = BITMAP_ALLOC (NULL);
4261 gimple_stmt_iterator iter
4262 = gsi_start_bb (chkp_get_entry_block ());
4263 bitmap_iterator bi;
4264 unsigned bnd_no;
4266 chkp_find_bound_slots (TREE_TYPE (arg), slots);
4268 EXECUTE_IF_SET_IN_BITMAP (slots, 0, bnd_no, bi)
4270 tree bounds = chkp_get_next_bounds_parm (arg);
4271 HOST_WIDE_INT offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
4272 tree addr = chkp_build_addr_expr (orig_arg);
4273 tree ptr = build2 (MEM_REF, ptr_type_node, addr,
4274 build_int_cst (ptr_type_node, offs));
4275 chkp_build_bndstx (chkp_build_addr_expr (ptr), ptr,
4276 bounds, &iter);
4278 arg = DECL_CHAIN (arg);
4280 BITMAP_FREE (slots);
4285 /* Find init/null/copy_ptr_bounds calls and replace them
4286 with assignments. It should allow better code
4287 optimization. */
4289 static void
4290 chkp_remove_useless_builtins ()
4292 basic_block bb;
4293 gimple_stmt_iterator gsi;
4295 FOR_EACH_BB_FN (bb, cfun)
4297 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4299 gimple *stmt = gsi_stmt (gsi);
4300 tree fndecl;
4301 enum built_in_function fcode;
4303 /* Find builtins returning first arg and replace
4304 them with assignments. */
4305 if (gimple_code (stmt) == GIMPLE_CALL
4306 && (fndecl = gimple_call_fndecl (stmt))
4307 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4308 && (fcode = DECL_FUNCTION_CODE (fndecl))
4309 && (fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
4310 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
4311 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS
4312 || fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS))
4314 tree res = gimple_call_arg (stmt, 0);
4315 update_call_from_tree (&gsi, res);
4316 stmt = gsi_stmt (gsi);
4317 update_stmt (stmt);
4323 /* Initialize pass. */
4324 static void
4325 chkp_init (void)
4327 basic_block bb;
4328 gimple_stmt_iterator i;
4330 in_chkp_pass = true;
4332 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = bb->next_bb)
4333 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
4334 chkp_unmark_stmt (gsi_stmt (i));
4336 chkp_invalid_bounds = new hash_set<tree>;
4337 chkp_completed_bounds_set = new hash_set<tree>;
4338 delete chkp_reg_bounds;
4339 chkp_reg_bounds = new hash_map<tree, tree>;
4340 delete chkp_bound_vars;
4341 chkp_bound_vars = new hash_map<tree, tree>;
4342 chkp_reg_addr_bounds = new hash_map<tree, tree>;
4343 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
4344 delete chkp_bounds_map;
4345 chkp_bounds_map = new hash_map<tree, tree>;
4346 chkp_abnormal_copies = BITMAP_GGC_ALLOC ();
4348 entry_block = NULL;
4349 zero_bounds = NULL_TREE;
4350 none_bounds = NULL_TREE;
4351 incomplete_bounds = integer_zero_node;
4352 tmp_var = NULL_TREE;
4353 size_tmp_var = NULL_TREE;
4355 chkp_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode, true);
4357 /* We create these constant bounds once for each object file.
4358 These symbols go to comdat section and result in single copy
4359 of each one in the final binary. */
4360 chkp_get_zero_bounds_var ();
4361 chkp_get_none_bounds_var ();
4363 calculate_dominance_info (CDI_DOMINATORS);
4364 calculate_dominance_info (CDI_POST_DOMINATORS);
4366 bitmap_obstack_initialize (NULL);
4369 /* Finalize instrumentation pass. */
4370 static void
4371 chkp_fini (void)
4373 in_chkp_pass = false;
4375 delete chkp_invalid_bounds;
4376 delete chkp_completed_bounds_set;
4377 delete chkp_reg_addr_bounds;
4378 delete chkp_incomplete_bounds_map;
4380 free_dominance_info (CDI_DOMINATORS);
4381 free_dominance_info (CDI_POST_DOMINATORS);
4383 bitmap_obstack_release (NULL);
4385 entry_block = NULL;
4386 zero_bounds = NULL_TREE;
4387 none_bounds = NULL_TREE;
4390 /* Main instrumentation pass function. */
4391 static unsigned int
4392 chkp_execute (void)
4394 chkp_init ();
4396 chkp_instrument_function ();
4398 chkp_remove_useless_builtins ();
4400 chkp_function_mark_instrumented (cfun->decl);
4402 chkp_fix_cfg ();
4404 chkp_fini ();
4406 return 0;
4409 /* Instrumentation pass gate. */
4410 static bool
4411 chkp_gate (void)
4413 cgraph_node *node = cgraph_node::get (cfun->decl);
4414 return ((node != NULL
4415 && node->instrumentation_clone)
4416 || lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl)));
4419 namespace {
4421 const pass_data pass_data_chkp =
4423 GIMPLE_PASS, /* type */
4424 "chkp", /* name */
4425 OPTGROUP_NONE, /* optinfo_flags */
4426 TV_NONE, /* tv_id */
4427 PROP_ssa | PROP_cfg, /* properties_required */
4428 0, /* properties_provided */
4429 0, /* properties_destroyed */
4430 0, /* todo_flags_start */
4431 TODO_verify_il
4432 | TODO_update_ssa /* todo_flags_finish */
4435 class pass_chkp : public gimple_opt_pass
4437 public:
4438 pass_chkp (gcc::context *ctxt)
4439 : gimple_opt_pass (pass_data_chkp, ctxt)
4442 /* opt_pass methods: */
4443 virtual opt_pass * clone ()
4445 return new pass_chkp (m_ctxt);
4448 virtual bool gate (function *)
4450 return chkp_gate ();
4453 virtual unsigned int execute (function *)
4455 return chkp_execute ();
4458 }; // class pass_chkp
4460 } // anon namespace
4462 gimple_opt_pass *
4463 make_pass_chkp (gcc::context *ctxt)
4465 return new pass_chkp (ctxt);
4468 #include "gt-tree-chkp.h"