/cp
[official-gcc.git] / gcc / tree-chkp.c
blob2d5249579f9804365edc2af1e5ab4dc8b6953da9
1 /* Pointer Bounds Checker insrumentation pass.
2 Copyright (C) 2014-2015 Free Software Foundation, Inc.
3 Contributed by Ilya Enkovich (ilya.enkovich@intel.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "alias.h"
25 #include "symtab.h"
26 #include "options.h"
27 #include "tree.h"
28 #include "fold-const.h"
29 #include "stor-layout.h"
30 #include "varasm.h"
31 #include "target.h"
32 #include "tree-iterator.h"
33 #include "tree-cfg.h"
34 #include "langhooks.h"
35 #include "tree-pass.h"
36 #include "diagnostic.h"
37 #include "cfgloop.h"
38 #include "stringpool.h"
39 #include "tree-ssa-alias.h"
40 #include "tree-ssanames.h"
41 #include "tree-ssa-operands.h"
42 #include "tree-ssa-address.h"
43 #include "tree-ssa.h"
44 #include "predict.h"
45 #include "dominance.h"
46 #include "cfg.h"
47 #include "basic-block.h"
48 #include "tree-ssa-loop-niter.h"
49 #include "gimple-expr.h"
50 #include "gimple.h"
51 #include "tree-phinodes.h"
52 #include "gimple-ssa.h"
53 #include "ssa-iterators.h"
54 #include "gimple-pretty-print.h"
55 #include "gimple-iterator.h"
56 #include "gimplify.h"
57 #include "gimplify-me.h"
58 #include "print-tree.h"
59 #include "tm.h"
60 #include "hard-reg-set.h"
61 #include "function.h"
62 #include "rtl.h"
63 #include "flags.h"
64 #include "insn-config.h"
65 #include "expmed.h"
66 #include "dojump.h"
67 #include "explow.h"
68 #include "calls.h"
69 #include "emit-rtl.h"
70 #include "stmt.h"
71 #include "expr.h"
72 #include "tree-ssa-propagate.h"
73 #include "gimple-fold.h"
74 #include "tree-chkp.h"
75 #include "gimple-walk.h"
76 #include "rtl.h" /* For MEM_P, assign_temp. */
77 #include "tree-dfa.h"
78 #include "lto-streamer.h"
79 #include "cgraph.h"
80 #include "ipa-chkp.h"
81 #include "params.h"
83 /* Pointer Bounds Checker instruments code with memory checks to find
84 out-of-bounds memory accesses. Checks are performed by computing
85 bounds for each pointer and then comparing address of accessed
86 memory before pointer dereferencing.
88 1. Function clones.
90 See ipa-chkp.c.
92 2. Instrumentation.
94 There are few things to instrument:
96 a) Memory accesses - add checker calls to check address of accessed memory
97 against bounds of dereferenced pointer. Obviously safe memory
98 accesses like static variable access does not have to be instrumented
99 with checks.
101 Example:
103 val_2 = *p_1;
105 with 4 bytes access is transformed into:
107 __builtin___chkp_bndcl (__bound_tmp.1_3, p_1);
108 D.1_4 = p_1 + 3;
109 __builtin___chkp_bndcu (__bound_tmp.1_3, D.1_4);
110 val_2 = *p_1;
112 where __bound_tmp.1_3 are bounds computed for pointer p_1,
113 __builtin___chkp_bndcl is a lower bound check and
114 __builtin___chkp_bndcu is an upper bound check.
116 b) Pointer stores.
118 When pointer is stored in memory we need to store its bounds. To
119 achieve compatibility of instrumented code with regular codes
120 we have to keep data layout and store bounds in special bound tables
121 via special checker call. Implementation of bounds table may vary for
122 different platforms. It has to associate pointer value and its
123 location (it is required because we may have two equal pointers
124 with different bounds stored in different places) with bounds.
125 Another checker builtin allows to get bounds for specified pointer
126 loaded from specified location.
128 Example:
130 buf1[i_1] = &buf2;
132 is transformed into:
134 buf1[i_1] = &buf2;
135 D.1_2 = &buf1[i_1];
136 __builtin___chkp_bndstx (D.1_2, &buf2, __bound_tmp.1_2);
138 where __bound_tmp.1_2 are bounds of &buf2.
140 c) Static initialization.
142 The special case of pointer store is static pointer initialization.
143 Bounds initialization is performed in a few steps:
144 - register all static initializations in front-end using
145 chkp_register_var_initializer
146 - when file compilation finishes we create functions with special
147 attribute 'chkp ctor' and put explicit initialization code
148 (assignments) for all statically initialized pointers.
149 - when checker constructor is compiled checker pass adds required
150 bounds initialization for all statically initialized pointers
151 - since we do not actually need excess pointers initialization
152 in checker constructor we remove such assignments from them
154 d) Calls.
156 For each call in the code we add additional arguments to pass
157 bounds for pointer arguments. We determine type of call arguments
158 using arguments list from function declaration; if function
159 declaration is not available we use function type; otherwise
160 (e.g. for unnamed arguments) we use type of passed value. Function
161 declaration/type is replaced with the instrumented one.
163 Example:
165 val_1 = foo (&buf1, &buf2, &buf1, 0);
167 is translated into:
169 val_1 = foo.chkp (&buf1, __bound_tmp.1_2, &buf2, __bound_tmp.1_3,
170 &buf1, __bound_tmp.1_2, 0);
172 e) Returns.
174 If function returns a pointer value we have to return bounds also.
175 A new operand was added for return statement to hold returned bounds.
177 Example:
179 return &_buf1;
181 is transformed into
183 return &_buf1, __bound_tmp.1_1;
185 3. Bounds computation.
187 Compiler is fully responsible for computing bounds to be used for each
188 memory access. The first step for bounds computation is to find the
189 origin of pointer dereferenced for memory access. Basing on pointer
190 origin we define a way to compute its bounds. There are just few
191 possible cases:
193 a) Pointer is returned by call.
195 In this case we use corresponding checker builtin method to obtain returned
196 bounds.
198 Example:
200 buf_1 = malloc (size_2);
201 foo (buf_1);
203 is translated into:
205 buf_1 = malloc (size_2);
206 __bound_tmp.1_3 = __builtin___chkp_bndret (buf_1);
207 foo (buf_1, __bound_tmp.1_3);
209 b) Pointer is an address of an object.
211 In this case compiler tries to compute objects size and create corresponding
212 bounds. If object has incomplete type then special checker builtin is used to
213 obtain its size at runtime.
215 Example:
217 foo ()
219 <unnamed type> __bound_tmp.3;
220 static int buf[100];
222 <bb 3>:
223 __bound_tmp.3_2 = __builtin___chkp_bndmk (&buf, 400);
225 <bb 2>:
226 return &buf, __bound_tmp.3_2;
229 Example:
231 Address of an object 'extern int buf[]' with incomplete type is
232 returned.
234 foo ()
236 <unnamed type> __bound_tmp.4;
237 long unsigned int __size_tmp.3;
239 <bb 3>:
240 __size_tmp.3_4 = __builtin_ia32_sizeof (buf);
241 __bound_tmp.4_3 = __builtin_ia32_bndmk (&buf, __size_tmp.3_4);
243 <bb 2>:
244 return &buf, __bound_tmp.4_3;
247 c) Pointer is the result of object narrowing.
249 It happens when we use pointer to an object to compute pointer to a part
250 of an object. E.g. we take pointer to a field of a structure. In this
251 case we perform bounds intersection using bounds of original object and
252 bounds of object's part (which are computed basing on its type).
254 There may be some debatable questions about when narrowing should occur
255 and when it should not. To avoid false bound violations in correct
256 programs we do not perform narrowing when address of an array element is
257 obtained (it has address of the whole array) and when address of the first
258 structure field is obtained (because it is guaranteed to be equal to
259 address of the whole structure and it is legal to cast it back to structure).
261 Default narrowing behavior may be changed using compiler flags.
263 Example:
265 In this example address of the second structure field is returned.
267 foo (struct A * p, __bounds_type __bounds_of_p)
269 <unnamed type> __bound_tmp.3;
270 int * _2;
271 int * _5;
273 <bb 2>:
274 _5 = &p_1(D)->second_field;
275 __bound_tmp.3_6 = __builtin___chkp_bndmk (_5, 4);
276 __bound_tmp.3_8 = __builtin___chkp_intersect (__bound_tmp.3_6,
277 __bounds_of_p_3(D));
278 _2 = &p_1(D)->second_field;
279 return _2, __bound_tmp.3_8;
282 Example:
284 In this example address of the first field of array element is returned.
286 foo (struct A * p, __bounds_type __bounds_of_p, int i)
288 long unsigned int _3;
289 long unsigned int _4;
290 struct A * _6;
291 int * _7;
293 <bb 2>:
294 _3 = (long unsigned int) i_1(D);
295 _4 = _3 * 8;
296 _6 = p_5(D) + _4;
297 _7 = &_6->first_field;
298 return _7, __bounds_of_p_2(D);
302 d) Pointer is the result of pointer arithmetic or type cast.
304 In this case bounds of the base pointer are used. In case of binary
305 operation producing a pointer we are analyzing data flow further
306 looking for operand's bounds. One operand is considered as a base
307 if it has some valid bounds. If we fall into a case when none of
308 operands (or both of them) has valid bounds, a default bounds value
309 is used.
311 Trying to find out bounds for binary operations we may fall into
312 cyclic dependencies for pointers. To avoid infinite recursion all
313 walked phi nodes instantly obtain corresponding bounds but created
314 bounds are marked as incomplete. It helps us to stop DF walk during
315 bounds search.
317 When we reach pointer source, some args of incomplete bounds phi obtain
318 valid bounds and those values are propagated further through phi nodes.
319 If no valid bounds were found for phi node then we mark its result as
320 invalid bounds. Process stops when all incomplete bounds become either
321 valid or invalid and we are able to choose a pointer base.
323 e) Pointer is loaded from the memory.
325 In this case we just need to load bounds from the bounds table.
327 Example:
329 foo ()
331 <unnamed type> __bound_tmp.3;
332 static int * buf;
333 int * _2;
335 <bb 2>:
336 _2 = buf;
337 __bound_tmp.3_4 = __builtin___chkp_bndldx (&buf, _2);
338 return _2, __bound_tmp.3_4;
343 typedef void (*assign_handler)(tree, tree, void *);
345 static tree chkp_get_zero_bounds ();
346 static tree chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter);
347 static tree chkp_find_bounds_loaded (tree ptr, tree ptr_src,
348 gimple_stmt_iterator *iter);
349 static void chkp_parse_array_and_component_ref (tree node, tree *ptr,
350 tree *elt, bool *safe,
351 bool *bitfield,
352 tree *bounds,
353 gimple_stmt_iterator *iter,
354 bool innermost_bounds);
356 #define chkp_bndldx_fndecl \
357 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDLDX))
358 #define chkp_bndstx_fndecl \
359 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDSTX))
360 #define chkp_checkl_fndecl \
361 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCL))
362 #define chkp_checku_fndecl \
363 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCU))
364 #define chkp_bndmk_fndecl \
365 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDMK))
366 #define chkp_ret_bnd_fndecl \
367 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDRET))
368 #define chkp_intersect_fndecl \
369 (targetm.builtin_chkp_function (BUILT_IN_CHKP_INTERSECT))
370 #define chkp_narrow_bounds_fndecl \
371 (targetm.builtin_chkp_function (BUILT_IN_CHKP_NARROW))
372 #define chkp_sizeof_fndecl \
373 (targetm.builtin_chkp_function (BUILT_IN_CHKP_SIZEOF))
374 #define chkp_extract_lower_fndecl \
375 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_LOWER))
376 #define chkp_extract_upper_fndecl \
377 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_UPPER))
379 static GTY (()) tree chkp_uintptr_type;
381 static GTY (()) tree chkp_zero_bounds_var;
382 static GTY (()) tree chkp_none_bounds_var;
384 static GTY (()) basic_block entry_block;
385 static GTY (()) tree zero_bounds;
386 static GTY (()) tree none_bounds;
387 static GTY (()) tree incomplete_bounds;
388 static GTY (()) tree tmp_var;
389 static GTY (()) tree size_tmp_var;
390 static GTY (()) bitmap chkp_abnormal_copies;
392 struct hash_set<tree> *chkp_invalid_bounds;
393 struct hash_set<tree> *chkp_completed_bounds_set;
394 struct hash_map<tree, tree> *chkp_reg_bounds;
395 struct hash_map<tree, tree> *chkp_bound_vars;
396 struct hash_map<tree, tree> *chkp_reg_addr_bounds;
397 struct hash_map<tree, tree> *chkp_incomplete_bounds_map;
398 struct hash_map<tree, tree> *chkp_bounds_map;
399 struct hash_map<tree, tree> *chkp_static_var_bounds;
401 static bool in_chkp_pass;
403 #define CHKP_BOUND_TMP_NAME "__bound_tmp"
404 #define CHKP_SIZE_TMP_NAME "__size_tmp"
405 #define CHKP_BOUNDS_OF_SYMBOL_PREFIX "__chkp_bounds_of_"
406 #define CHKP_STRING_BOUNDS_PREFIX "__chkp_string_bounds_"
407 #define CHKP_VAR_BOUNDS_PREFIX "__chkp_var_bounds_"
408 #define CHKP_ZERO_BOUNDS_VAR_NAME "__chkp_zero_bounds"
409 #define CHKP_NONE_BOUNDS_VAR_NAME "__chkp_none_bounds"
411 /* Static checker constructors may become very large and their
412 compilation with optimization may take too much time.
413 Therefore we put a limit to number of statements in one
414 constructor. Tests with 100 000 statically initialized
415 pointers showed following compilation times on Sandy Bridge
416 server (used -O2):
417 limit 100 => ~18 sec.
418 limit 300 => ~22 sec.
419 limit 1000 => ~30 sec.
420 limit 3000 => ~49 sec.
421 limit 5000 => ~55 sec.
422 limit 10000 => ~76 sec.
423 limit 100000 => ~532 sec. */
424 #define MAX_STMTS_IN_STATIC_CHKP_CTOR (PARAM_VALUE (PARAM_CHKP_MAX_CTOR_SIZE))
426 struct chkp_ctor_stmt_list
428 tree stmts;
429 int avail;
432 /* Return 1 if function FNDECL is instrumented by Pointer
433 Bounds Checker. */
434 bool
435 chkp_function_instrumented_p (tree fndecl)
437 return fndecl
438 && lookup_attribute ("chkp instrumented", DECL_ATTRIBUTES (fndecl));
441 /* Mark function FNDECL as instrumented. */
442 void
443 chkp_function_mark_instrumented (tree fndecl)
445 if (chkp_function_instrumented_p (fndecl))
446 return;
448 DECL_ATTRIBUTES (fndecl)
449 = tree_cons (get_identifier ("chkp instrumented"), NULL,
450 DECL_ATTRIBUTES (fndecl));
453 /* Return true when STMT is builtin call to instrumentation function
454 corresponding to CODE. */
456 bool
457 chkp_gimple_call_builtin_p (gimple call,
458 enum built_in_function code)
460 tree fndecl;
461 if (is_gimple_call (call)
462 && (fndecl = targetm.builtin_chkp_function (code))
463 && gimple_call_fndecl (call) == fndecl)
464 return true;
465 return false;
468 /* Emit code to build zero bounds and return RTL holding
469 the result. */
471 chkp_expand_zero_bounds ()
473 tree zero_bnd;
475 if (flag_chkp_use_static_const_bounds)
476 zero_bnd = chkp_get_zero_bounds_var ();
477 else
478 zero_bnd = chkp_build_make_bounds_call (integer_zero_node,
479 integer_zero_node);
480 return expand_normal (zero_bnd);
483 /* Emit code to store zero bounds for PTR located at MEM. */
484 void
485 chkp_expand_bounds_reset_for_mem (tree mem, tree ptr)
487 tree zero_bnd, bnd, addr, bndstx;
489 if (flag_chkp_use_static_const_bounds)
490 zero_bnd = chkp_get_zero_bounds_var ();
491 else
492 zero_bnd = chkp_build_make_bounds_call (integer_zero_node,
493 integer_zero_node);
494 bnd = make_tree (pointer_bounds_type_node,
495 assign_temp (pointer_bounds_type_node, 0, 1));
496 addr = build1 (ADDR_EXPR,
497 build_pointer_type (TREE_TYPE (mem)), mem);
498 bndstx = chkp_build_bndstx_call (addr, ptr, bnd);
500 expand_assignment (bnd, zero_bnd, false);
501 expand_normal (bndstx);
504 /* Build retbnd call for returned value RETVAL.
506 If BNDVAL is not NULL then result is stored
507 in it. Otherwise a temporary is created to
508 hold returned value.
510 GSI points to a position for a retbnd call
511 and is set to created stmt.
513 Cgraph edge is created for a new call if
514 UPDATE_EDGE is 1.
516 Obtained bounds are returned. */
517 tree
518 chkp_insert_retbnd_call (tree bndval, tree retval,
519 gimple_stmt_iterator *gsi)
521 gimple call;
523 if (!bndval)
524 bndval = create_tmp_reg (pointer_bounds_type_node, "retbnd");
526 call = gimple_build_call (chkp_ret_bnd_fndecl, 1, retval);
527 gimple_call_set_lhs (call, bndval);
528 gsi_insert_after (gsi, call, GSI_CONTINUE_LINKING);
530 return bndval;
533 /* Build a GIMPLE_CALL identical to CALL but skipping bounds
534 arguments. */
536 gcall *
537 chkp_copy_call_skip_bounds (gcall *call)
539 bitmap bounds;
540 unsigned i;
542 bitmap_obstack_initialize (NULL);
543 bounds = BITMAP_ALLOC (NULL);
545 for (i = 0; i < gimple_call_num_args (call); i++)
546 if (POINTER_BOUNDS_P (gimple_call_arg (call, i)))
547 bitmap_set_bit (bounds, i);
549 if (!bitmap_empty_p (bounds))
550 call = gimple_call_copy_skip_args (call, bounds);
551 gimple_call_set_with_bounds (call, false);
553 BITMAP_FREE (bounds);
554 bitmap_obstack_release (NULL);
556 return call;
559 /* Redirect edge E to the correct node according to call_stmt.
560 Return 1 if bounds removal from call_stmt should be done
561 instead of redirection. */
563 bool
564 chkp_redirect_edge (cgraph_edge *e)
566 bool instrumented = false;
567 tree decl = e->callee->decl;
569 if (e->callee->instrumentation_clone
570 || chkp_function_instrumented_p (decl))
571 instrumented = true;
573 if (instrumented
574 && !gimple_call_with_bounds_p (e->call_stmt))
575 e->redirect_callee (cgraph_node::get_create (e->callee->orig_decl));
576 else if (!instrumented
577 && gimple_call_with_bounds_p (e->call_stmt)
578 && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDCL)
579 && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDCU)
580 && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDSTX))
582 if (e->callee->instrumented_version)
583 e->redirect_callee (e->callee->instrumented_version);
584 else
586 tree args = TYPE_ARG_TYPES (TREE_TYPE (decl));
587 /* Avoid bounds removal if all args will be removed. */
588 if (!args || TREE_VALUE (args) != void_type_node)
589 return true;
590 else
591 gimple_call_set_with_bounds (e->call_stmt, false);
595 return false;
598 /* Mark statement S to not be instrumented. */
599 static void
600 chkp_mark_stmt (gimple s)
602 gimple_set_plf (s, GF_PLF_1, true);
605 /* Mark statement S to be instrumented. */
606 static void
607 chkp_unmark_stmt (gimple s)
609 gimple_set_plf (s, GF_PLF_1, false);
612 /* Return 1 if statement S should not be instrumented. */
613 static bool
614 chkp_marked_stmt_p (gimple s)
616 return gimple_plf (s, GF_PLF_1);
619 /* Get var to be used for bound temps. */
620 static tree
621 chkp_get_tmp_var (void)
623 if (!tmp_var)
624 tmp_var = create_tmp_reg (pointer_bounds_type_node, CHKP_BOUND_TMP_NAME);
626 return tmp_var;
629 /* Get SSA_NAME to be used as temp. */
630 static tree
631 chkp_get_tmp_reg (gimple stmt)
633 if (in_chkp_pass)
634 return make_ssa_name (chkp_get_tmp_var (), stmt);
636 return make_temp_ssa_name (pointer_bounds_type_node, stmt,
637 CHKP_BOUND_TMP_NAME);
640 /* Get var to be used for size temps. */
641 static tree
642 chkp_get_size_tmp_var (void)
644 if (!size_tmp_var)
645 size_tmp_var = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
647 return size_tmp_var;
650 /* Register bounds BND for address of OBJ. */
651 static void
652 chkp_register_addr_bounds (tree obj, tree bnd)
654 if (bnd == incomplete_bounds)
655 return;
657 chkp_reg_addr_bounds->put (obj, bnd);
659 if (dump_file && (dump_flags & TDF_DETAILS))
661 fprintf (dump_file, "Regsitered bound ");
662 print_generic_expr (dump_file, bnd, 0);
663 fprintf (dump_file, " for address of ");
664 print_generic_expr (dump_file, obj, 0);
665 fprintf (dump_file, "\n");
669 /* Return bounds registered for address of OBJ. */
670 static tree
671 chkp_get_registered_addr_bounds (tree obj)
673 tree *slot = chkp_reg_addr_bounds->get (obj);
674 return slot ? *slot : NULL_TREE;
677 /* Mark BOUNDS as completed. */
678 static void
679 chkp_mark_completed_bounds (tree bounds)
681 chkp_completed_bounds_set->add (bounds);
683 if (dump_file && (dump_flags & TDF_DETAILS))
685 fprintf (dump_file, "Marked bounds ");
686 print_generic_expr (dump_file, bounds, 0);
687 fprintf (dump_file, " as completed\n");
691 /* Return 1 if BOUNDS were marked as completed and 0 otherwise. */
692 static bool
693 chkp_completed_bounds (tree bounds)
695 return chkp_completed_bounds_set->contains (bounds);
698 /* Clear comleted bound marks. */
699 static void
700 chkp_erase_completed_bounds (void)
702 delete chkp_completed_bounds_set;
703 chkp_completed_bounds_set = new hash_set<tree>;
706 /* Mark BOUNDS associated with PTR as incomplete. */
707 static void
708 chkp_register_incomplete_bounds (tree bounds, tree ptr)
710 chkp_incomplete_bounds_map->put (bounds, ptr);
712 if (dump_file && (dump_flags & TDF_DETAILS))
714 fprintf (dump_file, "Regsitered incomplete bounds ");
715 print_generic_expr (dump_file, bounds, 0);
716 fprintf (dump_file, " for ");
717 print_generic_expr (dump_file, ptr, 0);
718 fprintf (dump_file, "\n");
722 /* Return 1 if BOUNDS are incomplete and 0 otherwise. */
723 static bool
724 chkp_incomplete_bounds (tree bounds)
726 if (bounds == incomplete_bounds)
727 return true;
729 if (chkp_completed_bounds (bounds))
730 return false;
732 return chkp_incomplete_bounds_map->get (bounds) != NULL;
735 /* Clear incomleted bound marks. */
736 static void
737 chkp_erase_incomplete_bounds (void)
739 delete chkp_incomplete_bounds_map;
740 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
743 /* Build and return bndmk call which creates bounds for structure
744 pointed by PTR. Structure should have complete type. */
745 tree
746 chkp_make_bounds_for_struct_addr (tree ptr)
748 tree type = TREE_TYPE (ptr);
749 tree size;
751 gcc_assert (POINTER_TYPE_P (type));
753 size = TYPE_SIZE (TREE_TYPE (type));
755 gcc_assert (size);
757 return build_call_nary (pointer_bounds_type_node,
758 build_fold_addr_expr (chkp_bndmk_fndecl),
759 2, ptr, size);
762 /* Traversal function for chkp_may_finish_incomplete_bounds.
763 Set RES to 0 if at least one argument of phi statement
764 defining bounds (passed in KEY arg) is unknown.
765 Traversal stops when first unknown phi argument is found. */
766 bool
767 chkp_may_complete_phi_bounds (tree const &bounds, tree *slot ATTRIBUTE_UNUSED,
768 bool *res)
770 gimple phi;
771 unsigned i;
773 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
775 phi = SSA_NAME_DEF_STMT (bounds);
777 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
779 for (i = 0; i < gimple_phi_num_args (phi); i++)
781 tree phi_arg = gimple_phi_arg_def (phi, i);
782 if (!phi_arg)
784 *res = false;
785 /* Do not need to traverse further. */
786 return false;
790 return true;
793 /* Return 1 if all phi nodes created for bounds have their
794 arguments computed. */
795 static bool
796 chkp_may_finish_incomplete_bounds (void)
798 bool res = true;
800 chkp_incomplete_bounds_map
801 ->traverse<bool *, chkp_may_complete_phi_bounds> (&res);
803 return res;
806 /* Helper function for chkp_finish_incomplete_bounds.
807 Recompute args for bounds phi node. */
808 bool
809 chkp_recompute_phi_bounds (tree const &bounds, tree *slot,
810 void *res ATTRIBUTE_UNUSED)
812 tree ptr = *slot;
813 gphi *bounds_phi;
814 gphi *ptr_phi;
815 unsigned i;
817 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
818 gcc_assert (TREE_CODE (ptr) == SSA_NAME);
820 bounds_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (bounds));
821 ptr_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (ptr));
823 for (i = 0; i < gimple_phi_num_args (bounds_phi); i++)
825 tree ptr_arg = gimple_phi_arg_def (ptr_phi, i);
826 tree bound_arg = chkp_find_bounds (ptr_arg, NULL);
828 add_phi_arg (bounds_phi, bound_arg,
829 gimple_phi_arg_edge (ptr_phi, i),
830 UNKNOWN_LOCATION);
833 return true;
836 /* Mark BOUNDS as invalid. */
837 static void
838 chkp_mark_invalid_bounds (tree bounds)
840 chkp_invalid_bounds->add (bounds);
842 if (dump_file && (dump_flags & TDF_DETAILS))
844 fprintf (dump_file, "Marked bounds ");
845 print_generic_expr (dump_file, bounds, 0);
846 fprintf (dump_file, " as invalid\n");
850 /* Return 1 if BOUNDS were marked as invalid and 0 otherwise. */
851 static bool
852 chkp_valid_bounds (tree bounds)
854 if (bounds == zero_bounds || bounds == none_bounds)
855 return false;
857 return !chkp_invalid_bounds->contains (bounds);
860 /* Helper function for chkp_finish_incomplete_bounds.
861 Check all arguments of phi nodes trying to find
862 valid completed bounds. If there is at least one
863 such arg then bounds produced by phi node are marked
864 as valid completed bounds and all phi args are
865 recomputed. */
866 bool
867 chkp_find_valid_phi_bounds (tree const &bounds, tree *slot, bool *res)
869 gimple phi;
870 unsigned i;
872 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
874 if (chkp_completed_bounds (bounds))
875 return true;
877 phi = SSA_NAME_DEF_STMT (bounds);
879 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
881 for (i = 0; i < gimple_phi_num_args (phi); i++)
883 tree phi_arg = gimple_phi_arg_def (phi, i);
885 gcc_assert (phi_arg);
887 if (chkp_valid_bounds (phi_arg) && !chkp_incomplete_bounds (phi_arg))
889 *res = true;
890 chkp_mark_completed_bounds (bounds);
891 chkp_recompute_phi_bounds (bounds, slot, NULL);
892 return true;
896 return true;
899 /* Helper function for chkp_finish_incomplete_bounds.
900 Marks all incompleted bounds as invalid. */
901 bool
902 chkp_mark_invalid_bounds_walker (tree const &bounds,
903 tree *slot ATTRIBUTE_UNUSED,
904 void *res ATTRIBUTE_UNUSED)
906 if (!chkp_completed_bounds (bounds))
908 chkp_mark_invalid_bounds (bounds);
909 chkp_mark_completed_bounds (bounds);
911 return true;
914 /* When all bound phi nodes have all their args computed
915 we have enough info to find valid bounds. We iterate
916 through all incompleted bounds searching for valid
917 bounds. Found valid bounds are marked as completed
918 and all remaining incompleted bounds are recomputed.
919 Process continues until no new valid bounds may be
920 found. All remained incompleted bounds are marked as
921 invalid (i.e. have no valid source of bounds). */
922 static void
923 chkp_finish_incomplete_bounds (void)
925 bool found_valid;
927 while (found_valid)
929 found_valid = false;
931 chkp_incomplete_bounds_map->
932 traverse<bool *, chkp_find_valid_phi_bounds> (&found_valid);
934 if (found_valid)
935 chkp_incomplete_bounds_map->
936 traverse<void *, chkp_recompute_phi_bounds> (NULL);
939 chkp_incomplete_bounds_map->
940 traverse<void *, chkp_mark_invalid_bounds_walker> (NULL);
941 chkp_incomplete_bounds_map->
942 traverse<void *, chkp_recompute_phi_bounds> (NULL);
944 chkp_erase_completed_bounds ();
945 chkp_erase_incomplete_bounds ();
948 /* Return 1 if type TYPE is a pointer type or a
949 structure having a pointer type as one of its fields.
950 Otherwise return 0. */
951 bool
952 chkp_type_has_pointer (const_tree type)
954 bool res = false;
956 if (BOUNDED_TYPE_P (type))
957 res = true;
958 else if (RECORD_OR_UNION_TYPE_P (type))
960 tree field;
962 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
963 if (TREE_CODE (field) == FIELD_DECL)
964 res = res || chkp_type_has_pointer (TREE_TYPE (field));
966 else if (TREE_CODE (type) == ARRAY_TYPE)
967 res = chkp_type_has_pointer (TREE_TYPE (type));
969 return res;
972 unsigned
973 chkp_type_bounds_count (const_tree type)
975 unsigned res = 0;
977 if (!type)
978 res = 0;
979 else if (BOUNDED_TYPE_P (type))
980 res = 1;
981 else if (RECORD_OR_UNION_TYPE_P (type))
983 bitmap have_bound;
985 bitmap_obstack_initialize (NULL);
986 have_bound = BITMAP_ALLOC (NULL);
987 chkp_find_bound_slots (type, have_bound);
988 res = bitmap_count_bits (have_bound);
989 BITMAP_FREE (have_bound);
990 bitmap_obstack_release (NULL);
993 return res;
996 /* Get bounds associated with NODE via
997 chkp_set_bounds call. */
998 tree
999 chkp_get_bounds (tree node)
1001 tree *slot;
1003 if (!chkp_bounds_map)
1004 return NULL_TREE;
1006 slot = chkp_bounds_map->get (node);
1007 return slot ? *slot : NULL_TREE;
1010 /* Associate bounds VAL with NODE. */
1011 void
1012 chkp_set_bounds (tree node, tree val)
1014 if (!chkp_bounds_map)
1015 chkp_bounds_map = new hash_map<tree, tree>;
1017 chkp_bounds_map->put (node, val);
1020 /* Check if statically initialized variable VAR require
1021 static bounds initialization. If VAR is added into
1022 bounds initlization list then 1 is returned. Otherwise
1023 return 0. */
1024 extern bool
1025 chkp_register_var_initializer (tree var)
1027 if (!flag_check_pointer_bounds
1028 || DECL_INITIAL (var) == error_mark_node)
1029 return false;
1031 gcc_assert (TREE_CODE (var) == VAR_DECL);
1032 gcc_assert (DECL_INITIAL (var));
1034 if (TREE_STATIC (var)
1035 && chkp_type_has_pointer (TREE_TYPE (var)))
1037 varpool_node::get_create (var)->need_bounds_init = 1;
1038 return true;
1041 return false;
1044 /* Helper function for chkp_finish_file.
1046 Add new modification statement (RHS is assigned to LHS)
1047 into list of static initializer statementes (passed in ARG).
1048 If statements list becomes too big, emit checker constructor
1049 and start the new one. */
1050 static void
1051 chkp_add_modification_to_stmt_list (tree lhs,
1052 tree rhs,
1053 void *arg)
1055 struct chkp_ctor_stmt_list *stmts = (struct chkp_ctor_stmt_list *)arg;
1056 tree modify;
1058 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
1059 rhs = build1 (CONVERT_EXPR, TREE_TYPE (lhs), rhs);
1061 modify = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
1062 append_to_statement_list (modify, &stmts->stmts);
1064 stmts->avail--;
1067 /* Build and return ADDR_EXPR for specified object OBJ. */
1068 static tree
1069 chkp_build_addr_expr (tree obj)
1071 return TREE_CODE (obj) == TARGET_MEM_REF
1072 ? tree_mem_ref_addr (ptr_type_node, obj)
1073 : build_fold_addr_expr (obj);
1076 /* Helper function for chkp_finish_file.
1077 Initialize bound variable BND_VAR with bounds of variable
1078 VAR to statements list STMTS. If statements list becomes
1079 too big, emit checker constructor and start the new one. */
1080 static void
1081 chkp_output_static_bounds (tree bnd_var, tree var,
1082 struct chkp_ctor_stmt_list *stmts)
1084 tree lb, ub, size;
1086 if (TREE_CODE (var) == STRING_CST)
1088 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1089 size = build_int_cst (size_type_node, TREE_STRING_LENGTH (var) - 1);
1091 else if (DECL_SIZE (var)
1092 && !chkp_variable_size_type (TREE_TYPE (var)))
1094 /* Compute bounds using statically known size. */
1095 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1096 size = size_binop (MINUS_EXPR, DECL_SIZE_UNIT (var), size_one_node);
1098 else
1100 /* Compute bounds using dynamic size. */
1101 tree call;
1103 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1104 call = build1 (ADDR_EXPR,
1105 build_pointer_type (TREE_TYPE (chkp_sizeof_fndecl)),
1106 chkp_sizeof_fndecl);
1107 size = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_sizeof_fndecl)),
1108 call, 1, var);
1110 if (flag_chkp_zero_dynamic_size_as_infinite)
1112 tree max_size, cond;
1114 max_size = build2 (MINUS_EXPR, size_type_node, size_zero_node, lb);
1115 cond = build2 (NE_EXPR, boolean_type_node, size, size_zero_node);
1116 size = build3 (COND_EXPR, size_type_node, cond, size, max_size);
1119 size = size_binop (MINUS_EXPR, size, size_one_node);
1122 ub = size_binop (PLUS_EXPR, lb, size);
1123 stmts->avail -= targetm.chkp_initialize_bounds (bnd_var, lb, ub,
1124 &stmts->stmts);
1125 if (stmts->avail <= 0)
1127 cgraph_build_static_cdtor ('B', stmts->stmts,
1128 MAX_RESERVED_INIT_PRIORITY + 2);
1129 stmts->avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
1130 stmts->stmts = NULL;
1134 /* Return entry block to be used for checker initilization code.
1135 Create new block if required. */
1136 static basic_block
1137 chkp_get_entry_block (void)
1139 if (!entry_block)
1140 entry_block
1141 = split_block_after_labels (ENTRY_BLOCK_PTR_FOR_FN (cfun))->dest;
1143 return entry_block;
1146 /* Return a bounds var to be used for pointer var PTR_VAR. */
1147 static tree
1148 chkp_get_bounds_var (tree ptr_var)
1150 tree bnd_var;
1151 tree *slot;
1153 slot = chkp_bound_vars->get (ptr_var);
1154 if (slot)
1155 bnd_var = *slot;
1156 else
1158 bnd_var = create_tmp_reg (pointer_bounds_type_node,
1159 CHKP_BOUND_TMP_NAME);
1160 chkp_bound_vars->put (ptr_var, bnd_var);
1163 return bnd_var;
1166 /* If BND is an abnormal bounds copy, return a copied value.
1167 Otherwise return BND. */
1168 static tree
1169 chkp_get_orginal_bounds_for_abnormal_copy (tree bnd)
1171 if (bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
1173 gimple bnd_def = SSA_NAME_DEF_STMT (bnd);
1174 gcc_checking_assert (gimple_code (bnd_def) == GIMPLE_ASSIGN);
1175 bnd = gimple_assign_rhs1 (bnd_def);
1178 return bnd;
1181 /* Register bounds BND for object PTR in global bounds table.
1182 A copy of bounds may be created for abnormal ssa names.
1183 Returns bounds to use for PTR. */
1184 static tree
1185 chkp_maybe_copy_and_register_bounds (tree ptr, tree bnd)
1187 bool abnormal_ptr;
1189 if (!chkp_reg_bounds)
1190 return bnd;
1192 /* Do nothing if bounds are incomplete_bounds
1193 because it means bounds will be recomputed. */
1194 if (bnd == incomplete_bounds)
1195 return bnd;
1197 abnormal_ptr = (TREE_CODE (ptr) == SSA_NAME
1198 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1199 && gimple_code (SSA_NAME_DEF_STMT (ptr)) != GIMPLE_PHI);
1201 /* A single bounds value may be reused multiple times for
1202 different pointer values. It may cause coalescing issues
1203 for abnormal SSA names. To avoid it we create a bounds
1204 copy in case it is computed for abnormal SSA name.
1206 We also cannot reuse such created copies for other pointers */
1207 if (abnormal_ptr
1208 || bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
1210 tree bnd_var = NULL_TREE;
1212 if (abnormal_ptr)
1214 if (SSA_NAME_VAR (ptr))
1215 bnd_var = chkp_get_bounds_var (SSA_NAME_VAR (ptr));
1217 else
1218 bnd_var = chkp_get_tmp_var ();
1220 /* For abnormal copies we may just find original
1221 bounds and use them. */
1222 if (!abnormal_ptr && !SSA_NAME_IS_DEFAULT_DEF (bnd))
1223 bnd = chkp_get_orginal_bounds_for_abnormal_copy (bnd);
1224 /* For undefined values we usually use none bounds
1225 value but in case of abnormal edge it may cause
1226 coalescing failures. Use default definition of
1227 bounds variable instead to avoid it. */
1228 else if (SSA_NAME_IS_DEFAULT_DEF (ptr)
1229 && TREE_CODE (SSA_NAME_VAR (ptr)) != PARM_DECL)
1231 bnd = get_or_create_ssa_default_def (cfun, bnd_var);
1233 if (dump_file && (dump_flags & TDF_DETAILS))
1235 fprintf (dump_file, "Using default def bounds ");
1236 print_generic_expr (dump_file, bnd, 0);
1237 fprintf (dump_file, " for abnormal default def SSA name ");
1238 print_generic_expr (dump_file, ptr, 0);
1239 fprintf (dump_file, "\n");
1242 else
1244 tree copy;
1245 gimple def = SSA_NAME_DEF_STMT (ptr);
1246 gimple assign;
1247 gimple_stmt_iterator gsi;
1249 if (bnd_var)
1250 copy = make_ssa_name (bnd_var);
1251 else
1252 copy = make_temp_ssa_name (pointer_bounds_type_node,
1253 NULL,
1254 CHKP_BOUND_TMP_NAME);
1255 bnd = chkp_get_orginal_bounds_for_abnormal_copy (bnd);
1256 assign = gimple_build_assign (copy, bnd);
1258 if (dump_file && (dump_flags & TDF_DETAILS))
1260 fprintf (dump_file, "Creating a copy of bounds ");
1261 print_generic_expr (dump_file, bnd, 0);
1262 fprintf (dump_file, " for abnormal SSA name ");
1263 print_generic_expr (dump_file, ptr, 0);
1264 fprintf (dump_file, "\n");
1267 if (gimple_code (def) == GIMPLE_NOP)
1269 gsi = gsi_last_bb (chkp_get_entry_block ());
1270 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
1271 gsi_insert_before (&gsi, assign, GSI_CONTINUE_LINKING);
1272 else
1273 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1275 else
1277 gimple bnd_def = SSA_NAME_DEF_STMT (bnd);
1278 /* Sometimes (e.g. when we load a pointer from a
1279 memory) bounds are produced later than a pointer.
1280 We need to insert bounds copy appropriately. */
1281 if (gimple_code (bnd_def) != GIMPLE_NOP
1282 && stmt_dominates_stmt_p (def, bnd_def))
1283 gsi = gsi_for_stmt (bnd_def);
1284 else
1285 gsi = gsi_for_stmt (def);
1286 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1289 bnd = copy;
1292 if (abnormal_ptr)
1293 bitmap_set_bit (chkp_abnormal_copies, SSA_NAME_VERSION (bnd));
1296 chkp_reg_bounds->put (ptr, bnd);
1298 if (dump_file && (dump_flags & TDF_DETAILS))
1300 fprintf (dump_file, "Regsitered bound ");
1301 print_generic_expr (dump_file, bnd, 0);
1302 fprintf (dump_file, " for pointer ");
1303 print_generic_expr (dump_file, ptr, 0);
1304 fprintf (dump_file, "\n");
1307 return bnd;
1310 /* Get bounds registered for object PTR in global bounds table. */
1311 static tree
1312 chkp_get_registered_bounds (tree ptr)
1314 tree *slot;
1316 if (!chkp_reg_bounds)
1317 return NULL_TREE;
1319 slot = chkp_reg_bounds->get (ptr);
1320 return slot ? *slot : NULL_TREE;
1323 /* Add bound retvals to return statement pointed by GSI. */
1325 static void
1326 chkp_add_bounds_to_ret_stmt (gimple_stmt_iterator *gsi)
1328 greturn *ret = as_a <greturn *> (gsi_stmt (*gsi));
1329 tree retval = gimple_return_retval (ret);
1330 tree ret_decl = DECL_RESULT (cfun->decl);
1331 tree bounds;
1333 if (!retval)
1334 return;
1336 if (BOUNDED_P (ret_decl))
1338 bounds = chkp_find_bounds (retval, gsi);
1339 bounds = chkp_maybe_copy_and_register_bounds (ret_decl, bounds);
1340 gimple_return_set_retbnd (ret, bounds);
1343 update_stmt (ret);
1346 /* Force OP to be suitable for using as an argument for call.
1347 New statements (if any) go to SEQ. */
1348 static tree
1349 chkp_force_gimple_call_op (tree op, gimple_seq *seq)
1351 gimple_seq stmts;
1352 gimple_stmt_iterator si;
1354 op = force_gimple_operand (unshare_expr (op), &stmts, true, NULL_TREE);
1356 for (si = gsi_start (stmts); !gsi_end_p (si); gsi_next (&si))
1357 chkp_mark_stmt (gsi_stmt (si));
1359 gimple_seq_add_seq (seq, stmts);
1361 return op;
1364 /* Generate lower bound check for memory access by ADDR.
1365 Check is inserted before the position pointed by ITER.
1366 DIRFLAG indicates whether memory access is load or store. */
1367 static void
1368 chkp_check_lower (tree addr, tree bounds,
1369 gimple_stmt_iterator iter,
1370 location_t location,
1371 tree dirflag)
1373 gimple_seq seq;
1374 gimple check;
1375 tree node;
1377 if (!chkp_function_instrumented_p (current_function_decl)
1378 && bounds == chkp_get_zero_bounds ())
1379 return;
1381 if (dirflag == integer_zero_node
1382 && !flag_chkp_check_read)
1383 return;
1385 if (dirflag == integer_one_node
1386 && !flag_chkp_check_write)
1387 return;
1389 seq = NULL;
1391 node = chkp_force_gimple_call_op (addr, &seq);
1393 check = gimple_build_call (chkp_checkl_fndecl, 2, node, bounds);
1394 chkp_mark_stmt (check);
1395 gimple_call_set_with_bounds (check, true);
1396 gimple_set_location (check, location);
1397 gimple_seq_add_stmt (&seq, check);
1399 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1401 if (dump_file && (dump_flags & TDF_DETAILS))
1403 gimple before = gsi_stmt (iter);
1404 fprintf (dump_file, "Generated lower bound check for statement ");
1405 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1406 fprintf (dump_file, " ");
1407 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1411 /* Generate upper bound check for memory access by ADDR.
1412 Check is inserted before the position pointed by ITER.
1413 DIRFLAG indicates whether memory access is load or store. */
1414 static void
1415 chkp_check_upper (tree addr, tree bounds,
1416 gimple_stmt_iterator iter,
1417 location_t location,
1418 tree dirflag)
1420 gimple_seq seq;
1421 gimple check;
1422 tree node;
1424 if (!chkp_function_instrumented_p (current_function_decl)
1425 && bounds == chkp_get_zero_bounds ())
1426 return;
1428 if (dirflag == integer_zero_node
1429 && !flag_chkp_check_read)
1430 return;
1432 if (dirflag == integer_one_node
1433 && !flag_chkp_check_write)
1434 return;
1436 seq = NULL;
1438 node = chkp_force_gimple_call_op (addr, &seq);
1440 check = gimple_build_call (chkp_checku_fndecl, 2, node, bounds);
1441 chkp_mark_stmt (check);
1442 gimple_call_set_with_bounds (check, true);
1443 gimple_set_location (check, location);
1444 gimple_seq_add_stmt (&seq, check);
1446 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1448 if (dump_file && (dump_flags & TDF_DETAILS))
1450 gimple before = gsi_stmt (iter);
1451 fprintf (dump_file, "Generated upper bound check for statement ");
1452 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1453 fprintf (dump_file, " ");
1454 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1458 /* Generate lower and upper bound checks for memory access
1459 to memory slot [FIRST, LAST] againsr BOUNDS. Checks
1460 are inserted before the position pointed by ITER.
1461 DIRFLAG indicates whether memory access is load or store. */
1462 void
1463 chkp_check_mem_access (tree first, tree last, tree bounds,
1464 gimple_stmt_iterator iter,
1465 location_t location,
1466 tree dirflag)
1468 chkp_check_lower (first, bounds, iter, location, dirflag);
1469 chkp_check_upper (last, bounds, iter, location, dirflag);
1472 /* Replace call to _bnd_chk_* pointed by GSI with
1473 bndcu and bndcl calls. DIRFLAG determines whether
1474 check is for read or write. */
1476 void
1477 chkp_replace_address_check_builtin (gimple_stmt_iterator *gsi,
1478 tree dirflag)
1480 gimple_stmt_iterator call_iter = *gsi;
1481 gimple call = gsi_stmt (*gsi);
1482 tree fndecl = gimple_call_fndecl (call);
1483 tree addr = gimple_call_arg (call, 0);
1484 tree bounds = chkp_find_bounds (addr, gsi);
1486 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1487 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1488 chkp_check_lower (addr, bounds, *gsi, gimple_location (call), dirflag);
1490 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS)
1491 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1493 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1495 tree size = gimple_call_arg (call, 1);
1496 addr = fold_build_pointer_plus (addr, size);
1497 addr = fold_build_pointer_plus_hwi (addr, -1);
1498 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1501 gsi_remove (&call_iter, true);
1504 /* Replace call to _bnd_get_ptr_* pointed by GSI with
1505 corresponding bounds extract call. */
1507 void
1508 chkp_replace_extract_builtin (gimple_stmt_iterator *gsi)
1510 gimple call = gsi_stmt (*gsi);
1511 tree fndecl = gimple_call_fndecl (call);
1512 tree addr = gimple_call_arg (call, 0);
1513 tree bounds = chkp_find_bounds (addr, gsi);
1514 gimple extract;
1516 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND)
1517 fndecl = chkp_extract_lower_fndecl;
1518 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND)
1519 fndecl = chkp_extract_upper_fndecl;
1520 else
1521 gcc_unreachable ();
1523 extract = gimple_build_call (fndecl, 1, bounds);
1524 gimple_call_set_lhs (extract, gimple_call_lhs (call));
1525 chkp_mark_stmt (extract);
1527 gsi_replace (gsi, extract, false);
1530 /* Return COMPONENT_REF accessing FIELD in OBJ. */
1531 static tree
1532 chkp_build_component_ref (tree obj, tree field)
1534 tree res;
1536 /* If object is TMR then we do not use component_ref but
1537 add offset instead. We need it to be able to get addr
1538 of the reasult later. */
1539 if (TREE_CODE (obj) == TARGET_MEM_REF)
1541 tree offs = TMR_OFFSET (obj);
1542 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1543 offs, DECL_FIELD_OFFSET (field));
1545 gcc_assert (offs);
1547 res = copy_node (obj);
1548 TREE_TYPE (res) = TREE_TYPE (field);
1549 TMR_OFFSET (res) = offs;
1551 else
1552 res = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL_TREE);
1554 return res;
1557 /* Return ARRAY_REF for array ARR and index IDX with
1558 specified element type ETYPE and element size ESIZE. */
1559 static tree
1560 chkp_build_array_ref (tree arr, tree etype, tree esize,
1561 unsigned HOST_WIDE_INT idx)
1563 tree index = build_int_cst (size_type_node, idx);
1564 tree res;
1566 /* If object is TMR then we do not use array_ref but
1567 add offset instead. We need it to be able to get addr
1568 of the reasult later. */
1569 if (TREE_CODE (arr) == TARGET_MEM_REF)
1571 tree offs = TMR_OFFSET (arr);
1573 esize = fold_binary_to_constant (MULT_EXPR, TREE_TYPE (esize),
1574 esize, index);
1575 gcc_assert(esize);
1577 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1578 offs, esize);
1579 gcc_assert (offs);
1581 res = copy_node (arr);
1582 TREE_TYPE (res) = etype;
1583 TMR_OFFSET (res) = offs;
1585 else
1586 res = build4 (ARRAY_REF, etype, arr, index, NULL_TREE, NULL_TREE);
1588 return res;
1591 /* Helper function for chkp_add_bounds_to_call_stmt.
1592 Fill ALL_BOUNDS output array with created bounds.
1594 OFFS is used for recursive calls and holds basic
1595 offset of TYPE in outer structure in bits.
1597 ITER points a position where bounds are searched.
1599 ALL_BOUNDS[i] is filled with elem bounds if there
1600 is a field in TYPE which has pointer type and offset
1601 equal to i * POINTER_SIZE in bits. */
1602 static void
1603 chkp_find_bounds_for_elem (tree elem, tree *all_bounds,
1604 HOST_WIDE_INT offs,
1605 gimple_stmt_iterator *iter)
1607 tree type = TREE_TYPE (elem);
1609 if (BOUNDED_TYPE_P (type))
1611 if (!all_bounds[offs / POINTER_SIZE])
1613 tree temp = make_temp_ssa_name (type, NULL, "");
1614 gimple assign = gimple_build_assign (temp, elem);
1615 gimple_stmt_iterator gsi;
1617 gsi_insert_before (iter, assign, GSI_SAME_STMT);
1618 gsi = gsi_for_stmt (assign);
1620 all_bounds[offs / POINTER_SIZE] = chkp_find_bounds (temp, &gsi);
1623 else if (RECORD_OR_UNION_TYPE_P (type))
1625 tree field;
1627 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1628 if (TREE_CODE (field) == FIELD_DECL)
1630 tree base = unshare_expr (elem);
1631 tree field_ref = chkp_build_component_ref (base, field);
1632 HOST_WIDE_INT field_offs
1633 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1634 if (DECL_FIELD_OFFSET (field))
1635 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1637 chkp_find_bounds_for_elem (field_ref, all_bounds,
1638 offs + field_offs, iter);
1641 else if (TREE_CODE (type) == ARRAY_TYPE)
1643 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1644 tree etype = TREE_TYPE (type);
1645 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1646 unsigned HOST_WIDE_INT cur;
1648 if (!maxval || integer_minus_onep (maxval))
1649 return;
1651 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1653 tree base = unshare_expr (elem);
1654 tree arr_elem = chkp_build_array_ref (base, etype,
1655 TYPE_SIZE (etype),
1656 cur);
1657 chkp_find_bounds_for_elem (arr_elem, all_bounds, offs + cur * esize,
1658 iter);
1663 /* Fill HAVE_BOUND output bitmap with information about
1664 bounds requred for object of type TYPE.
1666 OFFS is used for recursive calls and holds basic
1667 offset of TYPE in outer structure in bits.
1669 HAVE_BOUND[i] is set to 1 if there is a field
1670 in TYPE which has pointer type and offset
1671 equal to i * POINTER_SIZE - OFFS in bits. */
1672 void
1673 chkp_find_bound_slots_1 (const_tree type, bitmap have_bound,
1674 HOST_WIDE_INT offs)
1676 if (BOUNDED_TYPE_P (type))
1677 bitmap_set_bit (have_bound, offs / POINTER_SIZE);
1678 else if (RECORD_OR_UNION_TYPE_P (type))
1680 tree field;
1682 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1683 if (TREE_CODE (field) == FIELD_DECL)
1685 HOST_WIDE_INT field_offs
1686 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1687 if (DECL_FIELD_OFFSET (field))
1688 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1689 chkp_find_bound_slots_1 (TREE_TYPE (field), have_bound,
1690 offs + field_offs);
1693 else if (TREE_CODE (type) == ARRAY_TYPE)
1695 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1696 tree etype = TREE_TYPE (type);
1697 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1698 unsigned HOST_WIDE_INT cur;
1700 if (!maxval
1701 || TREE_CODE (maxval) != INTEGER_CST
1702 || integer_minus_onep (maxval))
1703 return;
1705 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1706 chkp_find_bound_slots_1 (etype, have_bound, offs + cur * esize);
1710 /* Fill bitmap RES with information about bounds for
1711 type TYPE. See chkp_find_bound_slots_1 for more
1712 details. */
1713 void
1714 chkp_find_bound_slots (const_tree type, bitmap res)
1716 bitmap_clear (res);
1717 chkp_find_bound_slots_1 (type, res, 0);
1720 /* Return 1 if call to FNDECL should be instrumented
1721 and 0 otherwise. */
1723 static bool
1724 chkp_instrument_normal_builtin (tree fndecl)
1726 switch (DECL_FUNCTION_CODE (fndecl))
1728 case BUILT_IN_STRLEN:
1729 case BUILT_IN_STRCPY:
1730 case BUILT_IN_STRNCPY:
1731 case BUILT_IN_STPCPY:
1732 case BUILT_IN_STPNCPY:
1733 case BUILT_IN_STRCAT:
1734 case BUILT_IN_STRNCAT:
1735 case BUILT_IN_MEMCPY:
1736 case BUILT_IN_MEMPCPY:
1737 case BUILT_IN_MEMSET:
1738 case BUILT_IN_MEMMOVE:
1739 case BUILT_IN_BZERO:
1740 case BUILT_IN_STRCMP:
1741 case BUILT_IN_STRNCMP:
1742 case BUILT_IN_BCMP:
1743 case BUILT_IN_MEMCMP:
1744 case BUILT_IN_MEMCPY_CHK:
1745 case BUILT_IN_MEMPCPY_CHK:
1746 case BUILT_IN_MEMMOVE_CHK:
1747 case BUILT_IN_MEMSET_CHK:
1748 case BUILT_IN_STRCPY_CHK:
1749 case BUILT_IN_STRNCPY_CHK:
1750 case BUILT_IN_STPCPY_CHK:
1751 case BUILT_IN_STPNCPY_CHK:
1752 case BUILT_IN_STRCAT_CHK:
1753 case BUILT_IN_STRNCAT_CHK:
1754 case BUILT_IN_MALLOC:
1755 case BUILT_IN_CALLOC:
1756 case BUILT_IN_REALLOC:
1757 return 1;
1759 default:
1760 return 0;
1764 /* Add bound arguments to call statement pointed by GSI.
1765 Also performs a replacement of user checker builtins calls
1766 with internal ones. */
1768 static void
1769 chkp_add_bounds_to_call_stmt (gimple_stmt_iterator *gsi)
1771 gcall *call = as_a <gcall *> (gsi_stmt (*gsi));
1772 unsigned arg_no = 0;
1773 tree fndecl = gimple_call_fndecl (call);
1774 tree fntype;
1775 tree first_formal_arg;
1776 tree arg;
1777 bool use_fntype = false;
1778 tree op;
1779 ssa_op_iter iter;
1780 gcall *new_call;
1782 /* Do nothing for internal functions. */
1783 if (gimple_call_internal_p (call))
1784 return;
1786 fntype = TREE_TYPE (TREE_TYPE (gimple_call_fn (call)));
1788 /* Do nothing if back-end builtin is called. */
1789 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
1790 return;
1792 /* Do nothing for some middle-end builtins. */
1793 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1794 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_OBJECT_SIZE)
1795 return;
1797 /* Do nothing for calls to not instrumentable functions. */
1798 if (fndecl && !chkp_instrumentable_p (fndecl))
1799 return;
1801 /* Ignore CHKP_INIT_PTR_BOUNDS, CHKP_NULL_PTR_BOUNDS
1802 and CHKP_COPY_PTR_BOUNDS. */
1803 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1804 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS
1805 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS
1806 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS
1807 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS))
1808 return;
1810 /* Check user builtins are replaced with checks. */
1811 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1812 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1813 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
1814 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS))
1816 chkp_replace_address_check_builtin (gsi, integer_minus_one_node);
1817 return;
1820 /* Check user builtins are replaced with bound extract. */
1821 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1822 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND
1823 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND))
1825 chkp_replace_extract_builtin (gsi);
1826 return;
1829 /* BUILT_IN_CHKP_NARROW_PTR_BOUNDS call is replaced with
1830 target narrow bounds call. */
1831 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1832 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
1834 tree arg = gimple_call_arg (call, 1);
1835 tree bounds = chkp_find_bounds (arg, gsi);
1837 gimple_call_set_fndecl (call, chkp_narrow_bounds_fndecl);
1838 gimple_call_set_arg (call, 1, bounds);
1839 update_stmt (call);
1841 return;
1844 /* BUILT_IN_CHKP_STORE_PTR_BOUNDS call is replaced with
1845 bndstx call. */
1846 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1847 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_STORE_PTR_BOUNDS)
1849 tree addr = gimple_call_arg (call, 0);
1850 tree ptr = gimple_call_arg (call, 1);
1851 tree bounds = chkp_find_bounds (ptr, gsi);
1852 gimple_stmt_iterator iter = gsi_for_stmt (call);
1854 chkp_build_bndstx (addr, ptr, bounds, gsi);
1855 gsi_remove (&iter, true);
1857 return;
1860 if (!flag_chkp_instrument_calls)
1861 return;
1863 /* We instrument only some subset of builtins. We also instrument
1864 builtin calls to be inlined. */
1865 if (fndecl
1866 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1867 && !chkp_instrument_normal_builtin (fndecl))
1869 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
1870 return;
1872 struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
1873 if (!clone
1874 || !gimple_has_body_p (clone->decl))
1875 return;
1878 /* If function decl is available then use it for
1879 formal arguments list. Otherwise use function type. */
1880 if (fndecl && DECL_ARGUMENTS (fndecl))
1881 first_formal_arg = DECL_ARGUMENTS (fndecl);
1882 else
1884 first_formal_arg = TYPE_ARG_TYPES (fntype);
1885 use_fntype = true;
1888 /* Fill vector of new call args. */
1889 vec<tree> new_args = vNULL;
1890 new_args.create (gimple_call_num_args (call));
1891 arg = first_formal_arg;
1892 for (arg_no = 0; arg_no < gimple_call_num_args (call); arg_no++)
1894 tree call_arg = gimple_call_arg (call, arg_no);
1895 tree type;
1897 /* Get arg type using formal argument description
1898 or actual argument type. */
1899 if (arg)
1900 if (use_fntype)
1901 if (TREE_VALUE (arg) != void_type_node)
1903 type = TREE_VALUE (arg);
1904 arg = TREE_CHAIN (arg);
1906 else
1907 type = TREE_TYPE (call_arg);
1908 else
1910 type = TREE_TYPE (arg);
1911 arg = TREE_CHAIN (arg);
1913 else
1914 type = TREE_TYPE (call_arg);
1916 new_args.safe_push (call_arg);
1918 if (BOUNDED_TYPE_P (type)
1919 || pass_by_reference (NULL, TYPE_MODE (type), type, true))
1920 new_args.safe_push (chkp_find_bounds (call_arg, gsi));
1921 else if (chkp_type_has_pointer (type))
1923 HOST_WIDE_INT max_bounds
1924 = TREE_INT_CST_LOW (TYPE_SIZE (type)) / POINTER_SIZE;
1925 tree *all_bounds = (tree *)xmalloc (sizeof (tree) * max_bounds);
1926 HOST_WIDE_INT bnd_no;
1928 memset (all_bounds, 0, sizeof (tree) * max_bounds);
1930 chkp_find_bounds_for_elem (call_arg, all_bounds, 0, gsi);
1932 for (bnd_no = 0; bnd_no < max_bounds; bnd_no++)
1933 if (all_bounds[bnd_no])
1934 new_args.safe_push (all_bounds[bnd_no]);
1936 free (all_bounds);
1940 if (new_args.length () == gimple_call_num_args (call))
1941 new_call = call;
1942 else
1944 new_call = gimple_build_call_vec (gimple_op (call, 1), new_args);
1945 gimple_call_set_lhs (new_call, gimple_call_lhs (call));
1946 gimple_call_copy_flags (new_call, call);
1947 gimple_call_set_chain (new_call, gimple_call_chain (call));
1949 new_args.release ();
1951 /* For direct calls fndecl is replaced with instrumented version. */
1952 if (fndecl)
1954 tree new_decl = chkp_maybe_create_clone (fndecl)->decl;
1955 gimple_call_set_fndecl (new_call, new_decl);
1956 gimple_call_set_fntype (new_call, TREE_TYPE (new_decl));
1958 /* For indirect call we should fix function pointer type if
1959 pass some bounds. */
1960 else if (new_call != call)
1962 tree type = gimple_call_fntype (call);
1963 type = chkp_copy_function_type_adding_bounds (type);
1964 gimple_call_set_fntype (new_call, type);
1967 /* replace old call statement with the new one. */
1968 if (call != new_call)
1970 FOR_EACH_SSA_TREE_OPERAND (op, call, iter, SSA_OP_ALL_DEFS)
1972 SSA_NAME_DEF_STMT (op) = new_call;
1974 gsi_replace (gsi, new_call, true);
1976 else
1977 update_stmt (new_call);
1979 gimple_call_set_with_bounds (new_call, true);
1982 /* Return constant static bounds var with specified bounds LB and UB.
1983 If such var does not exists then new var is created with specified NAME. */
1984 static tree
1985 chkp_make_static_const_bounds (HOST_WIDE_INT lb,
1986 HOST_WIDE_INT ub,
1987 const char *name)
1989 tree id = get_identifier (name);
1990 tree var;
1991 varpool_node *node;
1992 symtab_node *snode;
1994 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, id,
1995 pointer_bounds_type_node);
1996 TREE_STATIC (var) = 1;
1997 TREE_PUBLIC (var) = 1;
1999 /* With LTO we may have constant bounds already in varpool.
2000 Try to find it. */
2001 if ((snode = symtab_node::get_for_asmname (DECL_ASSEMBLER_NAME (var))))
2003 /* We don't allow this symbol usage for non bounds. */
2004 if (snode->type != SYMTAB_VARIABLE
2005 || !POINTER_BOUNDS_P (snode->decl))
2006 sorry ("-fcheck-pointer-bounds requires '%s' "
2007 "name for internal usage",
2008 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (var)));
2010 return snode->decl;
2013 TREE_USED (var) = 1;
2014 TREE_READONLY (var) = 1;
2015 TREE_ADDRESSABLE (var) = 0;
2016 DECL_ARTIFICIAL (var) = 1;
2017 DECL_READ_P (var) = 1;
2018 DECL_INITIAL (var) = targetm.chkp_make_bounds_constant (lb, ub);
2019 make_decl_one_only (var, DECL_ASSEMBLER_NAME (var));
2020 /* We may use this symbol during ctors generation in chkp_finish_file
2021 when all symbols are emitted. Force output to avoid undefined
2022 symbols in ctors. */
2023 node = varpool_node::get_create (var);
2024 node->force_output = 1;
2026 varpool_node::finalize_decl (var);
2028 return var;
2031 /* Generate code to make bounds with specified lower bound LB and SIZE.
2032 if AFTER is 1 then code is inserted after position pointed by ITER
2033 otherwise code is inserted before position pointed by ITER.
2034 If ITER is NULL then code is added to entry block. */
2035 static tree
2036 chkp_make_bounds (tree lb, tree size, gimple_stmt_iterator *iter, bool after)
2038 gimple_seq seq;
2039 gimple_stmt_iterator gsi;
2040 gimple stmt;
2041 tree bounds;
2043 if (iter)
2044 gsi = *iter;
2045 else
2046 gsi = gsi_start_bb (chkp_get_entry_block ());
2048 seq = NULL;
2050 lb = chkp_force_gimple_call_op (lb, &seq);
2051 size = chkp_force_gimple_call_op (size, &seq);
2053 stmt = gimple_build_call (chkp_bndmk_fndecl, 2, lb, size);
2054 chkp_mark_stmt (stmt);
2056 bounds = chkp_get_tmp_reg (stmt);
2057 gimple_call_set_lhs (stmt, bounds);
2059 gimple_seq_add_stmt (&seq, stmt);
2061 if (iter && after)
2062 gsi_insert_seq_after (&gsi, seq, GSI_SAME_STMT);
2063 else
2064 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
2066 if (dump_file && (dump_flags & TDF_DETAILS))
2068 fprintf (dump_file, "Made bounds: ");
2069 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2070 if (iter)
2072 fprintf (dump_file, " inserted before statement: ");
2073 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0, TDF_VOPS|TDF_MEMSYMS);
2075 else
2076 fprintf (dump_file, " at function entry\n");
2079 /* update_stmt (stmt); */
2081 return bounds;
2084 /* Return var holding zero bounds. */
2085 tree
2086 chkp_get_zero_bounds_var (void)
2088 if (!chkp_zero_bounds_var)
2089 chkp_zero_bounds_var
2090 = chkp_make_static_const_bounds (0, -1,
2091 CHKP_ZERO_BOUNDS_VAR_NAME);
2092 return chkp_zero_bounds_var;
2095 /* Return var holding none bounds. */
2096 tree
2097 chkp_get_none_bounds_var (void)
2099 if (!chkp_none_bounds_var)
2100 chkp_none_bounds_var
2101 = chkp_make_static_const_bounds (-1, 0,
2102 CHKP_NONE_BOUNDS_VAR_NAME);
2103 return chkp_none_bounds_var;
2106 /* Return SSA_NAME used to represent zero bounds. */
2107 static tree
2108 chkp_get_zero_bounds (void)
2110 if (zero_bounds)
2111 return zero_bounds;
2113 if (dump_file && (dump_flags & TDF_DETAILS))
2114 fprintf (dump_file, "Creating zero bounds...");
2116 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2117 || flag_chkp_use_static_const_bounds > 0)
2119 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2120 gimple stmt;
2122 zero_bounds = chkp_get_tmp_reg (NULL);
2123 stmt = gimple_build_assign (zero_bounds, chkp_get_zero_bounds_var ());
2124 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2126 else
2127 zero_bounds = chkp_make_bounds (integer_zero_node,
2128 integer_zero_node,
2129 NULL,
2130 false);
2132 return zero_bounds;
2135 /* Return SSA_NAME used to represent none bounds. */
2136 static tree
2137 chkp_get_none_bounds (void)
2139 if (none_bounds)
2140 return none_bounds;
2142 if (dump_file && (dump_flags & TDF_DETAILS))
2143 fprintf (dump_file, "Creating none bounds...");
2146 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2147 || flag_chkp_use_static_const_bounds > 0)
2149 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2150 gimple stmt;
2152 none_bounds = chkp_get_tmp_reg (NULL);
2153 stmt = gimple_build_assign (none_bounds, chkp_get_none_bounds_var ());
2154 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2156 else
2157 none_bounds = chkp_make_bounds (integer_minus_one_node,
2158 build_int_cst (size_type_node, 2),
2159 NULL,
2160 false);
2162 return none_bounds;
2165 /* Return bounds to be used as a result of operation which
2166 should not create poiunter (e.g. MULT_EXPR). */
2167 static tree
2168 chkp_get_invalid_op_bounds (void)
2170 return chkp_get_zero_bounds ();
2173 /* Return bounds to be used for loads of non-pointer values. */
2174 static tree
2175 chkp_get_nonpointer_load_bounds (void)
2177 return chkp_get_zero_bounds ();
2180 /* Return 1 if may use bndret call to get bounds for pointer
2181 returned by CALL. */
2182 static bool
2183 chkp_call_returns_bounds_p (gcall *call)
2185 if (gimple_call_internal_p (call))
2186 return false;
2188 if (gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
2189 || chkp_gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW))
2190 return true;
2192 if (gimple_call_with_bounds_p (call))
2193 return true;
2195 tree fndecl = gimple_call_fndecl (call);
2197 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
2198 return false;
2200 if (fndecl && !chkp_instrumentable_p (fndecl))
2201 return false;
2203 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2205 if (chkp_instrument_normal_builtin (fndecl))
2206 return true;
2208 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
2209 return false;
2211 struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
2212 return (clone && gimple_has_body_p (clone->decl));
2215 return true;
2218 /* Build bounds returned by CALL. */
2219 static tree
2220 chkp_build_returned_bound (gcall *call)
2222 gimple_stmt_iterator gsi;
2223 tree bounds;
2224 gimple stmt;
2225 tree fndecl = gimple_call_fndecl (call);
2226 unsigned int retflags;
2228 /* To avoid fixing alloca expands in targets we handle
2229 it separately. */
2230 if (fndecl
2231 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2232 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
2233 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2235 tree size = gimple_call_arg (call, 0);
2236 tree lb = gimple_call_lhs (call);
2237 gimple_stmt_iterator iter = gsi_for_stmt (call);
2238 bounds = chkp_make_bounds (lb, size, &iter, true);
2240 /* We know bounds returned by set_bounds builtin call. */
2241 else if (fndecl
2242 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2243 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS)
2245 tree lb = gimple_call_arg (call, 0);
2246 tree size = gimple_call_arg (call, 1);
2247 gimple_stmt_iterator iter = gsi_for_stmt (call);
2248 bounds = chkp_make_bounds (lb, size, &iter, true);
2250 /* Detect bounds initialization calls. */
2251 else if (fndecl
2252 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2253 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS)
2254 bounds = chkp_get_zero_bounds ();
2255 /* Detect bounds nullification calls. */
2256 else if (fndecl
2257 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2258 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS)
2259 bounds = chkp_get_none_bounds ();
2260 /* Detect bounds copy calls. */
2261 else if (fndecl
2262 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2263 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
2265 gimple_stmt_iterator iter = gsi_for_stmt (call);
2266 bounds = chkp_find_bounds (gimple_call_arg (call, 1), &iter);
2268 /* Do not use retbnd when returned bounds are equal to some
2269 of passed bounds. */
2270 else if (((retflags = gimple_call_return_flags (call)) & ERF_RETURNS_ARG)
2271 && (retflags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (call))
2273 gimple_stmt_iterator iter = gsi_for_stmt (call);
2274 unsigned int retarg = retflags & ERF_RETURN_ARG_MASK, argno;
2275 if (gimple_call_with_bounds_p (call))
2277 for (argno = 0; argno < gimple_call_num_args (call); argno++)
2278 if (!POINTER_BOUNDS_P (gimple_call_arg (call, argno)))
2280 if (retarg)
2281 retarg--;
2282 else
2283 break;
2286 else
2287 argno = retarg;
2289 bounds = chkp_find_bounds (gimple_call_arg (call, argno), &iter);
2291 else if (chkp_call_returns_bounds_p (call))
2293 gcc_assert (TREE_CODE (gimple_call_lhs (call)) == SSA_NAME);
2295 /* In general case build checker builtin call to
2296 obtain returned bounds. */
2297 stmt = gimple_build_call (chkp_ret_bnd_fndecl, 1,
2298 gimple_call_lhs (call));
2299 chkp_mark_stmt (stmt);
2301 gsi = gsi_for_stmt (call);
2302 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
2304 bounds = chkp_get_tmp_reg (stmt);
2305 gimple_call_set_lhs (stmt, bounds);
2307 update_stmt (stmt);
2309 else
2310 bounds = chkp_get_zero_bounds ();
2312 if (dump_file && (dump_flags & TDF_DETAILS))
2314 fprintf (dump_file, "Built returned bounds (");
2315 print_generic_expr (dump_file, bounds, 0);
2316 fprintf (dump_file, ") for call: ");
2317 print_gimple_stmt (dump_file, call, 0, TDF_VOPS|TDF_MEMSYMS);
2320 bounds = chkp_maybe_copy_and_register_bounds (gimple_call_lhs (call), bounds);
2322 return bounds;
2325 /* Return bounds used as returned by call
2326 which produced SSA name VAL. */
2327 gcall *
2328 chkp_retbnd_call_by_val (tree val)
2330 if (TREE_CODE (val) != SSA_NAME)
2331 return NULL;
2333 gcc_assert (gimple_code (SSA_NAME_DEF_STMT (val)) == GIMPLE_CALL);
2335 imm_use_iterator use_iter;
2336 use_operand_p use_p;
2337 FOR_EACH_IMM_USE_FAST (use_p, use_iter, val)
2338 if (gimple_code (USE_STMT (use_p)) == GIMPLE_CALL
2339 && gimple_call_fndecl (USE_STMT (use_p)) == chkp_ret_bnd_fndecl)
2340 return as_a <gcall *> (USE_STMT (use_p));
2342 return NULL;
2345 /* Check the next parameter for the given PARM is bounds
2346 and return it's default SSA_NAME (create if required). */
2347 static tree
2348 chkp_get_next_bounds_parm (tree parm)
2350 tree bounds = TREE_CHAIN (parm);
2351 gcc_assert (POINTER_BOUNDS_P (bounds));
2352 bounds = ssa_default_def (cfun, bounds);
2353 if (!bounds)
2355 bounds = make_ssa_name (TREE_CHAIN (parm), gimple_build_nop ());
2356 set_ssa_default_def (cfun, TREE_CHAIN (parm), bounds);
2358 return bounds;
2361 /* Return bounds to be used for input argument PARM. */
2362 static tree
2363 chkp_get_bound_for_parm (tree parm)
2365 tree decl = SSA_NAME_VAR (parm);
2366 tree bounds;
2368 gcc_assert (TREE_CODE (decl) == PARM_DECL);
2370 bounds = chkp_get_registered_bounds (parm);
2372 if (!bounds)
2373 bounds = chkp_get_registered_bounds (decl);
2375 if (!bounds)
2377 tree orig_decl = cgraph_node::get (cfun->decl)->orig_decl;
2379 /* For static chain param we return zero bounds
2380 because currently we do not check dereferences
2381 of this pointer. */
2382 if (cfun->static_chain_decl == decl)
2383 bounds = chkp_get_zero_bounds ();
2384 /* If non instrumented runtime is used then it may be useful
2385 to use zero bounds for input arguments of main
2386 function. */
2387 else if (flag_chkp_zero_input_bounds_for_main
2388 && strcmp (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (orig_decl)),
2389 "main") == 0)
2390 bounds = chkp_get_zero_bounds ();
2391 else if (BOUNDED_P (parm))
2393 bounds = chkp_get_next_bounds_parm (decl);
2394 bounds = chkp_maybe_copy_and_register_bounds (decl, bounds);
2396 if (dump_file && (dump_flags & TDF_DETAILS))
2398 fprintf (dump_file, "Built arg bounds (");
2399 print_generic_expr (dump_file, bounds, 0);
2400 fprintf (dump_file, ") for arg: ");
2401 print_node (dump_file, "", decl, 0);
2404 else
2405 bounds = chkp_get_zero_bounds ();
2408 if (!chkp_get_registered_bounds (parm))
2409 bounds = chkp_maybe_copy_and_register_bounds (parm, bounds);
2411 if (dump_file && (dump_flags & TDF_DETAILS))
2413 fprintf (dump_file, "Using bounds ");
2414 print_generic_expr (dump_file, bounds, 0);
2415 fprintf (dump_file, " for parm ");
2416 print_generic_expr (dump_file, parm, 0);
2417 fprintf (dump_file, " of type ");
2418 print_generic_expr (dump_file, TREE_TYPE (parm), 0);
2419 fprintf (dump_file, ".\n");
2422 return bounds;
2425 /* Build and return CALL_EXPR for bndstx builtin with specified
2426 arguments. */
2427 tree
2428 chkp_build_bndldx_call (tree addr, tree ptr)
2430 tree fn = build1 (ADDR_EXPR,
2431 build_pointer_type (TREE_TYPE (chkp_bndldx_fndecl)),
2432 chkp_bndldx_fndecl);
2433 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndldx_fndecl)),
2434 fn, 2, addr, ptr);
2435 CALL_WITH_BOUNDS_P (call) = true;
2436 return call;
2439 /* Insert code to load bounds for PTR located by ADDR.
2440 Code is inserted after position pointed by GSI.
2441 Loaded bounds are returned. */
2442 static tree
2443 chkp_build_bndldx (tree addr, tree ptr, gimple_stmt_iterator *gsi)
2445 gimple_seq seq;
2446 gimple stmt;
2447 tree bounds;
2449 seq = NULL;
2451 addr = chkp_force_gimple_call_op (addr, &seq);
2452 ptr = chkp_force_gimple_call_op (ptr, &seq);
2454 stmt = gimple_build_call (chkp_bndldx_fndecl, 2, addr, ptr);
2455 chkp_mark_stmt (stmt);
2456 bounds = chkp_get_tmp_reg (stmt);
2457 gimple_call_set_lhs (stmt, bounds);
2459 gimple_seq_add_stmt (&seq, stmt);
2461 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2463 if (dump_file && (dump_flags & TDF_DETAILS))
2465 fprintf (dump_file, "Generated bndldx for pointer ");
2466 print_generic_expr (dump_file, ptr, 0);
2467 fprintf (dump_file, ": ");
2468 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2471 return bounds;
2474 /* Build and return CALL_EXPR for bndstx builtin with specified
2475 arguments. */
2476 tree
2477 chkp_build_bndstx_call (tree addr, tree ptr, tree bounds)
2479 tree fn = build1 (ADDR_EXPR,
2480 build_pointer_type (TREE_TYPE (chkp_bndstx_fndecl)),
2481 chkp_bndstx_fndecl);
2482 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndstx_fndecl)),
2483 fn, 3, ptr, bounds, addr);
2484 CALL_WITH_BOUNDS_P (call) = true;
2485 return call;
2488 /* Insert code to store BOUNDS for PTR stored by ADDR.
2489 New statements are inserted after position pointed
2490 by GSI. */
2491 void
2492 chkp_build_bndstx (tree addr, tree ptr, tree bounds,
2493 gimple_stmt_iterator *gsi)
2495 gimple_seq seq;
2496 gimple stmt;
2498 seq = NULL;
2500 addr = chkp_force_gimple_call_op (addr, &seq);
2501 ptr = chkp_force_gimple_call_op (ptr, &seq);
2503 stmt = gimple_build_call (chkp_bndstx_fndecl, 3, ptr, bounds, addr);
2504 chkp_mark_stmt (stmt);
2505 gimple_call_set_with_bounds (stmt, true);
2507 gimple_seq_add_stmt (&seq, stmt);
2509 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2511 if (dump_file && (dump_flags & TDF_DETAILS))
2513 fprintf (dump_file, "Generated bndstx for pointer store ");
2514 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_VOPS|TDF_MEMSYMS);
2515 print_gimple_stmt (dump_file, stmt, 2, TDF_VOPS|TDF_MEMSYMS);
2519 /* Compute bounds for pointer NODE which was assigned in
2520 assignment statement ASSIGN. Return computed bounds. */
2521 static tree
2522 chkp_compute_bounds_for_assignment (tree node, gimple assign)
2524 enum tree_code rhs_code = gimple_assign_rhs_code (assign);
2525 tree rhs1 = gimple_assign_rhs1 (assign);
2526 tree bounds = NULL_TREE;
2527 gimple_stmt_iterator iter = gsi_for_stmt (assign);
2528 tree base = NULL;
2530 if (dump_file && (dump_flags & TDF_DETAILS))
2532 fprintf (dump_file, "Computing bounds for assignment: ");
2533 print_gimple_stmt (dump_file, assign, 0, TDF_VOPS|TDF_MEMSYMS);
2536 switch (rhs_code)
2538 case MEM_REF:
2539 case TARGET_MEM_REF:
2540 case COMPONENT_REF:
2541 case ARRAY_REF:
2542 /* We need to load bounds from the bounds table. */
2543 bounds = chkp_find_bounds_loaded (node, rhs1, &iter);
2544 break;
2546 case VAR_DECL:
2547 case SSA_NAME:
2548 case ADDR_EXPR:
2549 case POINTER_PLUS_EXPR:
2550 case NOP_EXPR:
2551 case CONVERT_EXPR:
2552 case INTEGER_CST:
2553 /* Bounds are just propagated from RHS. */
2554 bounds = chkp_find_bounds (rhs1, &iter);
2555 base = rhs1;
2556 break;
2558 case VIEW_CONVERT_EXPR:
2559 /* Bounds are just propagated from RHS. */
2560 bounds = chkp_find_bounds (TREE_OPERAND (rhs1, 0), &iter);
2561 break;
2563 case PARM_DECL:
2564 if (BOUNDED_P (rhs1))
2566 /* We need to load bounds from the bounds table. */
2567 bounds = chkp_build_bndldx (chkp_build_addr_expr (rhs1),
2568 node, &iter);
2569 TREE_ADDRESSABLE (rhs1) = 1;
2571 else
2572 bounds = chkp_get_nonpointer_load_bounds ();
2573 break;
2575 case MINUS_EXPR:
2576 case PLUS_EXPR:
2577 case BIT_AND_EXPR:
2578 case BIT_IOR_EXPR:
2579 case BIT_XOR_EXPR:
2581 tree rhs2 = gimple_assign_rhs2 (assign);
2582 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2583 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2585 /* First we try to check types of operands. If it
2586 does not help then look at bound values.
2588 If some bounds are incomplete and other are
2589 not proven to be valid (i.e. also incomplete
2590 or invalid because value is not pointer) then
2591 resulting value is incomplete and will be
2592 recomputed later in chkp_finish_incomplete_bounds. */
2593 if (BOUNDED_P (rhs1)
2594 && !BOUNDED_P (rhs2))
2595 bounds = bnd1;
2596 else if (BOUNDED_P (rhs2)
2597 && !BOUNDED_P (rhs1)
2598 && rhs_code != MINUS_EXPR)
2599 bounds = bnd2;
2600 else if (chkp_incomplete_bounds (bnd1))
2601 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR
2602 && !chkp_incomplete_bounds (bnd2))
2603 bounds = bnd2;
2604 else
2605 bounds = incomplete_bounds;
2606 else if (chkp_incomplete_bounds (bnd2))
2607 if (chkp_valid_bounds (bnd1)
2608 && !chkp_incomplete_bounds (bnd1))
2609 bounds = bnd1;
2610 else
2611 bounds = incomplete_bounds;
2612 else if (!chkp_valid_bounds (bnd1))
2613 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR)
2614 bounds = bnd2;
2615 else if (bnd2 == chkp_get_zero_bounds ())
2616 bounds = bnd2;
2617 else
2618 bounds = bnd1;
2619 else if (!chkp_valid_bounds (bnd2))
2620 bounds = bnd1;
2621 else
2622 /* Seems both operands may have valid bounds
2623 (e.g. pointer minus pointer). In such case
2624 use default invalid op bounds. */
2625 bounds = chkp_get_invalid_op_bounds ();
2627 base = (bounds == bnd1) ? rhs1 : (bounds == bnd2) ? rhs2 : NULL;
2629 break;
2631 case BIT_NOT_EXPR:
2632 case NEGATE_EXPR:
2633 case LSHIFT_EXPR:
2634 case RSHIFT_EXPR:
2635 case LROTATE_EXPR:
2636 case RROTATE_EXPR:
2637 case EQ_EXPR:
2638 case NE_EXPR:
2639 case LT_EXPR:
2640 case LE_EXPR:
2641 case GT_EXPR:
2642 case GE_EXPR:
2643 case MULT_EXPR:
2644 case RDIV_EXPR:
2645 case TRUNC_DIV_EXPR:
2646 case FLOOR_DIV_EXPR:
2647 case CEIL_DIV_EXPR:
2648 case ROUND_DIV_EXPR:
2649 case TRUNC_MOD_EXPR:
2650 case FLOOR_MOD_EXPR:
2651 case CEIL_MOD_EXPR:
2652 case ROUND_MOD_EXPR:
2653 case EXACT_DIV_EXPR:
2654 case FIX_TRUNC_EXPR:
2655 case FLOAT_EXPR:
2656 case REALPART_EXPR:
2657 case IMAGPART_EXPR:
2658 /* No valid bounds may be produced by these exprs. */
2659 bounds = chkp_get_invalid_op_bounds ();
2660 break;
2662 case COND_EXPR:
2664 tree val1 = gimple_assign_rhs2 (assign);
2665 tree val2 = gimple_assign_rhs3 (assign);
2666 tree bnd1 = chkp_find_bounds (val1, &iter);
2667 tree bnd2 = chkp_find_bounds (val2, &iter);
2668 gimple stmt;
2670 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2671 bounds = incomplete_bounds;
2672 else if (bnd1 == bnd2)
2673 bounds = bnd1;
2674 else
2676 rhs1 = unshare_expr (rhs1);
2678 bounds = chkp_get_tmp_reg (assign);
2679 stmt = gimple_build_assign (bounds, COND_EXPR, rhs1, bnd1, bnd2);
2680 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2682 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2683 chkp_mark_invalid_bounds (bounds);
2686 break;
2688 case MAX_EXPR:
2689 case MIN_EXPR:
2691 tree rhs2 = gimple_assign_rhs2 (assign);
2692 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2693 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2695 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2696 bounds = incomplete_bounds;
2697 else if (bnd1 == bnd2)
2698 bounds = bnd1;
2699 else
2701 gimple stmt;
2702 tree cond = build2 (rhs_code == MAX_EXPR ? GT_EXPR : LT_EXPR,
2703 boolean_type_node, rhs1, rhs2);
2704 bounds = chkp_get_tmp_reg (assign);
2705 stmt = gimple_build_assign (bounds, COND_EXPR, cond, bnd1, bnd2);
2707 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2709 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2710 chkp_mark_invalid_bounds (bounds);
2713 break;
2715 default:
2716 bounds = chkp_get_zero_bounds ();
2717 warning (0, "pointer bounds were lost due to unexpected expression %s",
2718 get_tree_code_name (rhs_code));
2721 gcc_assert (bounds);
2723 /* We may reuse bounds of other pointer we copy/modify. But it is not
2724 allowed for abnormal ssa names. If we produced a pointer using
2725 abnormal ssa name, we better make a bounds copy to avoid coalescing
2726 issues. */
2727 if (base
2728 && TREE_CODE (base) == SSA_NAME
2729 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (base))
2731 gimple stmt = gimple_build_assign (chkp_get_tmp_reg (NULL), bounds);
2732 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2733 bounds = gimple_assign_lhs (stmt);
2736 if (node)
2737 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2739 return bounds;
2742 /* Compute bounds for ssa name NODE defined by DEF_STMT pointed by ITER.
2744 There are just few statement codes allowed: NOP (for default ssa names),
2745 ASSIGN, CALL, PHI, ASM.
2747 Return computed bounds. */
2748 static tree
2749 chkp_get_bounds_by_definition (tree node, gimple def_stmt,
2750 gphi_iterator *iter)
2752 tree var, bounds;
2753 enum gimple_code code = gimple_code (def_stmt);
2754 gphi *stmt;
2756 if (dump_file && (dump_flags & TDF_DETAILS))
2758 fprintf (dump_file, "Searching for bounds for node: ");
2759 print_generic_expr (dump_file, node, 0);
2761 fprintf (dump_file, " using its definition: ");
2762 print_gimple_stmt (dump_file, def_stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2765 switch (code)
2767 case GIMPLE_NOP:
2768 var = SSA_NAME_VAR (node);
2769 switch (TREE_CODE (var))
2771 case PARM_DECL:
2772 bounds = chkp_get_bound_for_parm (node);
2773 break;
2775 case VAR_DECL:
2776 /* For uninitialized pointers use none bounds. */
2777 bounds = chkp_get_none_bounds ();
2778 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2779 break;
2781 case RESULT_DECL:
2783 tree base_type;
2785 gcc_assert (TREE_CODE (TREE_TYPE (node)) == REFERENCE_TYPE);
2787 base_type = TREE_TYPE (TREE_TYPE (node));
2789 gcc_assert (TYPE_SIZE (base_type)
2790 && TREE_CODE (TYPE_SIZE (base_type)) == INTEGER_CST
2791 && tree_to_uhwi (TYPE_SIZE (base_type)) != 0);
2793 bounds = chkp_make_bounds (node, TYPE_SIZE_UNIT (base_type),
2794 NULL, false);
2795 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2797 break;
2799 default:
2800 if (dump_file && (dump_flags & TDF_DETAILS))
2802 fprintf (dump_file, "Unexpected var with no definition\n");
2803 print_generic_expr (dump_file, var, 0);
2805 internal_error ("chkp_get_bounds_by_definition: Unexpected var of type %s",
2806 get_tree_code_name (TREE_CODE (var)));
2808 break;
2810 case GIMPLE_ASSIGN:
2811 bounds = chkp_compute_bounds_for_assignment (node, def_stmt);
2812 break;
2814 case GIMPLE_CALL:
2815 bounds = chkp_build_returned_bound (as_a <gcall *> (def_stmt));
2816 break;
2818 case GIMPLE_PHI:
2819 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (node))
2820 if (SSA_NAME_VAR (node))
2821 var = chkp_get_bounds_var (SSA_NAME_VAR (node));
2822 else
2823 var = make_temp_ssa_name (pointer_bounds_type_node,
2824 NULL,
2825 CHKP_BOUND_TMP_NAME);
2826 else
2827 var = chkp_get_tmp_var ();
2828 stmt = create_phi_node (var, gimple_bb (def_stmt));
2829 bounds = gimple_phi_result (stmt);
2830 *iter = gsi_for_phi (stmt);
2832 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2834 /* Created bounds do not have all phi args computed and
2835 therefore we do not know if there is a valid source
2836 of bounds for that node. Therefore we mark bounds
2837 as incomplete and then recompute them when all phi
2838 args are computed. */
2839 chkp_register_incomplete_bounds (bounds, node);
2840 break;
2842 case GIMPLE_ASM:
2843 bounds = chkp_get_zero_bounds ();
2844 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2845 break;
2847 default:
2848 internal_error ("chkp_get_bounds_by_definition: Unexpected GIMPLE code %s",
2849 gimple_code_name[code]);
2852 return bounds;
2855 /* Return CALL_EXPR for bndmk with specified LOWER_BOUND and SIZE. */
2856 tree
2857 chkp_build_make_bounds_call (tree lower_bound, tree size)
2859 tree call = build1 (ADDR_EXPR,
2860 build_pointer_type (TREE_TYPE (chkp_bndmk_fndecl)),
2861 chkp_bndmk_fndecl);
2862 return build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndmk_fndecl)),
2863 call, 2, lower_bound, size);
2866 /* Create static bounds var of specfified OBJ which is
2867 is either VAR_DECL or string constant. */
2868 static tree
2869 chkp_make_static_bounds (tree obj)
2871 static int string_id = 1;
2872 static int var_id = 1;
2873 tree *slot;
2874 const char *var_name;
2875 char *bnd_var_name;
2876 tree bnd_var;
2878 /* First check if we already have required var. */
2879 if (chkp_static_var_bounds)
2881 /* For vars we use assembler name as a key in
2882 chkp_static_var_bounds map. It allows to
2883 avoid duplicating bound vars for decls
2884 sharing assembler name. */
2885 if (TREE_CODE (obj) == VAR_DECL)
2887 tree name = DECL_ASSEMBLER_NAME (obj);
2888 slot = chkp_static_var_bounds->get (name);
2889 if (slot)
2890 return *slot;
2892 else
2894 slot = chkp_static_var_bounds->get (obj);
2895 if (slot)
2896 return *slot;
2900 /* Build decl for bounds var. */
2901 if (TREE_CODE (obj) == VAR_DECL)
2903 if (DECL_IGNORED_P (obj))
2905 bnd_var_name = (char *) xmalloc (strlen (CHKP_VAR_BOUNDS_PREFIX) + 10);
2906 sprintf (bnd_var_name, "%s%d", CHKP_VAR_BOUNDS_PREFIX, var_id++);
2908 else
2910 var_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (obj));
2912 /* For hidden symbols we want to skip first '*' char. */
2913 if (*var_name == '*')
2914 var_name++;
2916 bnd_var_name = (char *) xmalloc (strlen (var_name)
2917 + strlen (CHKP_BOUNDS_OF_SYMBOL_PREFIX) + 1);
2918 strcpy (bnd_var_name, CHKP_BOUNDS_OF_SYMBOL_PREFIX);
2919 strcat (bnd_var_name, var_name);
2922 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2923 get_identifier (bnd_var_name),
2924 pointer_bounds_type_node);
2926 /* Address of the obj will be used as lower bound. */
2927 TREE_ADDRESSABLE (obj) = 1;
2929 else
2931 bnd_var_name = (char *) xmalloc (strlen (CHKP_STRING_BOUNDS_PREFIX) + 10);
2932 sprintf (bnd_var_name, "%s%d", CHKP_STRING_BOUNDS_PREFIX, string_id++);
2934 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2935 get_identifier (bnd_var_name),
2936 pointer_bounds_type_node);
2939 TREE_PUBLIC (bnd_var) = 0;
2940 TREE_USED (bnd_var) = 1;
2941 TREE_READONLY (bnd_var) = 0;
2942 TREE_STATIC (bnd_var) = 1;
2943 TREE_ADDRESSABLE (bnd_var) = 0;
2944 DECL_ARTIFICIAL (bnd_var) = 1;
2945 DECL_COMMON (bnd_var) = 1;
2946 DECL_COMDAT (bnd_var) = 1;
2947 DECL_READ_P (bnd_var) = 1;
2948 DECL_INITIAL (bnd_var) = chkp_build_addr_expr (obj);
2949 /* Force output similar to constant bounds.
2950 See chkp_make_static_const_bounds. */
2951 varpool_node::get_create (bnd_var)->force_output = 1;
2952 /* Mark symbol as requiring bounds initialization. */
2953 varpool_node::get_create (bnd_var)->need_bounds_init = 1;
2954 varpool_node::finalize_decl (bnd_var);
2956 /* Add created var to the map to use it for other references
2957 to obj. */
2958 if (!chkp_static_var_bounds)
2959 chkp_static_var_bounds = new hash_map<tree, tree>;
2961 if (TREE_CODE (obj) == VAR_DECL)
2963 tree name = DECL_ASSEMBLER_NAME (obj);
2964 chkp_static_var_bounds->put (name, bnd_var);
2966 else
2967 chkp_static_var_bounds->put (obj, bnd_var);
2969 return bnd_var;
2972 /* When var has incomplete type we cannot get size to
2973 compute its bounds. In such cases we use checker
2974 builtin call which determines object size at runtime. */
2975 static tree
2976 chkp_generate_extern_var_bounds (tree var)
2978 tree bounds, size_reloc, lb, size, max_size, cond;
2979 gimple_stmt_iterator gsi;
2980 gimple_seq seq = NULL;
2981 gimple stmt;
2983 /* If instrumentation is not enabled for vars having
2984 incomplete type then just return zero bounds to avoid
2985 checks for this var. */
2986 if (!flag_chkp_incomplete_type)
2987 return chkp_get_zero_bounds ();
2989 if (dump_file && (dump_flags & TDF_DETAILS))
2991 fprintf (dump_file, "Generating bounds for extern symbol '");
2992 print_generic_expr (dump_file, var, 0);
2993 fprintf (dump_file, "'\n");
2996 stmt = gimple_build_call (chkp_sizeof_fndecl, 1, var);
2998 size_reloc = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
2999 gimple_call_set_lhs (stmt, size_reloc);
3001 gimple_seq_add_stmt (&seq, stmt);
3003 lb = chkp_build_addr_expr (var);
3004 size = make_ssa_name (chkp_get_size_tmp_var ());
3006 if (flag_chkp_zero_dynamic_size_as_infinite)
3008 /* We should check that size relocation was resolved.
3009 If it was not then use maximum possible size for the var. */
3010 max_size = build2 (MINUS_EXPR, chkp_uintptr_type, integer_zero_node,
3011 fold_convert (chkp_uintptr_type, lb));
3012 max_size = chkp_force_gimple_call_op (max_size, &seq);
3014 cond = build2 (NE_EXPR, boolean_type_node,
3015 size_reloc, integer_zero_node);
3016 stmt = gimple_build_assign (size, COND_EXPR, cond, size_reloc, max_size);
3017 gimple_seq_add_stmt (&seq, stmt);
3019 else
3021 stmt = gimple_build_assign (size, size_reloc);
3022 gimple_seq_add_stmt (&seq, stmt);
3025 gsi = gsi_start_bb (chkp_get_entry_block ());
3026 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
3028 bounds = chkp_make_bounds (lb, size, &gsi, true);
3030 return bounds;
3033 /* Return 1 if TYPE has fields with zero size or fields
3034 marked with chkp_variable_size attribute. */
3035 bool
3036 chkp_variable_size_type (tree type)
3038 bool res = false;
3039 tree field;
3041 if (RECORD_OR_UNION_TYPE_P (type))
3042 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3044 if (TREE_CODE (field) == FIELD_DECL)
3045 res = res
3046 || lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
3047 || chkp_variable_size_type (TREE_TYPE (field));
3049 else
3050 res = !TYPE_SIZE (type)
3051 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
3052 || tree_to_uhwi (TYPE_SIZE (type)) == 0;
3054 return res;
3057 /* Compute and return bounds for address of DECL which is
3058 one of VAR_DECL, PARM_DECL, RESULT_DECL. */
3059 static tree
3060 chkp_get_bounds_for_decl_addr (tree decl)
3062 tree bounds;
3064 gcc_assert (TREE_CODE (decl) == VAR_DECL
3065 || TREE_CODE (decl) == PARM_DECL
3066 || TREE_CODE (decl) == RESULT_DECL);
3068 bounds = chkp_get_registered_addr_bounds (decl);
3070 if (bounds)
3071 return bounds;
3073 if (dump_file && (dump_flags & TDF_DETAILS))
3075 fprintf (dump_file, "Building bounds for address of decl ");
3076 print_generic_expr (dump_file, decl, 0);
3077 fprintf (dump_file, "\n");
3080 /* Use zero bounds if size is unknown and checks for
3081 unknown sizes are restricted. */
3082 if ((!DECL_SIZE (decl)
3083 || (chkp_variable_size_type (TREE_TYPE (decl))
3084 && (TREE_STATIC (decl)
3085 || DECL_EXTERNAL (decl)
3086 || TREE_PUBLIC (decl))))
3087 && !flag_chkp_incomplete_type)
3088 return chkp_get_zero_bounds ();
3090 if (flag_chkp_use_static_bounds
3091 && TREE_CODE (decl) == VAR_DECL
3092 && (TREE_STATIC (decl)
3093 || DECL_EXTERNAL (decl)
3094 || TREE_PUBLIC (decl))
3095 && !DECL_THREAD_LOCAL_P (decl))
3097 tree bnd_var = chkp_make_static_bounds (decl);
3098 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
3099 gimple stmt;
3101 bounds = chkp_get_tmp_reg (NULL);
3102 stmt = gimple_build_assign (bounds, bnd_var);
3103 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3105 else if (!DECL_SIZE (decl)
3106 || (chkp_variable_size_type (TREE_TYPE (decl))
3107 && (TREE_STATIC (decl)
3108 || DECL_EXTERNAL (decl)
3109 || TREE_PUBLIC (decl))))
3111 gcc_assert (TREE_CODE (decl) == VAR_DECL);
3112 bounds = chkp_generate_extern_var_bounds (decl);
3114 else
3116 tree lb = chkp_build_addr_expr (decl);
3117 bounds = chkp_make_bounds (lb, DECL_SIZE_UNIT (decl), NULL, false);
3120 return bounds;
3123 /* Compute and return bounds for constant string. */
3124 static tree
3125 chkp_get_bounds_for_string_cst (tree cst)
3127 tree bounds;
3128 tree lb;
3129 tree size;
3131 gcc_assert (TREE_CODE (cst) == STRING_CST);
3133 bounds = chkp_get_registered_bounds (cst);
3135 if (bounds)
3136 return bounds;
3138 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
3139 || flag_chkp_use_static_const_bounds > 0)
3141 tree bnd_var = chkp_make_static_bounds (cst);
3142 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
3143 gimple stmt;
3145 bounds = chkp_get_tmp_reg (NULL);
3146 stmt = gimple_build_assign (bounds, bnd_var);
3147 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3149 else
3151 lb = chkp_build_addr_expr (cst);
3152 size = build_int_cst (chkp_uintptr_type, TREE_STRING_LENGTH (cst));
3153 bounds = chkp_make_bounds (lb, size, NULL, false);
3156 bounds = chkp_maybe_copy_and_register_bounds (cst, bounds);
3158 return bounds;
3161 /* Generate code to instersect bounds BOUNDS1 and BOUNDS2 and
3162 return the result. if ITER is not NULL then Code is inserted
3163 before position pointed by ITER. Otherwise code is added to
3164 entry block. */
3165 static tree
3166 chkp_intersect_bounds (tree bounds1, tree bounds2, gimple_stmt_iterator *iter)
3168 if (!bounds1 || bounds1 == chkp_get_zero_bounds ())
3169 return bounds2 ? bounds2 : bounds1;
3170 else if (!bounds2 || bounds2 == chkp_get_zero_bounds ())
3171 return bounds1;
3172 else
3174 gimple_seq seq;
3175 gimple stmt;
3176 tree bounds;
3178 seq = NULL;
3180 stmt = gimple_build_call (chkp_intersect_fndecl, 2, bounds1, bounds2);
3181 chkp_mark_stmt (stmt);
3183 bounds = chkp_get_tmp_reg (stmt);
3184 gimple_call_set_lhs (stmt, bounds);
3186 gimple_seq_add_stmt (&seq, stmt);
3188 /* We are probably doing narrowing for constant expression.
3189 In such case iter may be undefined. */
3190 if (!iter)
3192 gimple_stmt_iterator gsi = gsi_last_bb (chkp_get_entry_block ());
3193 iter = &gsi;
3194 gsi_insert_seq_after (iter, seq, GSI_SAME_STMT);
3196 else
3197 gsi_insert_seq_before (iter, seq, GSI_SAME_STMT);
3199 if (dump_file && (dump_flags & TDF_DETAILS))
3201 fprintf (dump_file, "Bounds intersection: ");
3202 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
3203 fprintf (dump_file, " inserted before statement: ");
3204 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0,
3205 TDF_VOPS|TDF_MEMSYMS);
3208 return bounds;
3212 /* Return 1 if we are allowed to narrow bounds for addressed FIELD
3213 and 0 othersize. */
3214 static bool
3215 chkp_may_narrow_to_field (tree field)
3217 return DECL_SIZE (field) && TREE_CODE (DECL_SIZE (field)) == INTEGER_CST
3218 && tree_to_uhwi (DECL_SIZE (field)) != 0
3219 && (!DECL_FIELD_OFFSET (field)
3220 || TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST)
3221 && (!DECL_FIELD_BIT_OFFSET (field)
3222 || TREE_CODE (DECL_FIELD_BIT_OFFSET (field)) == INTEGER_CST)
3223 && !lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
3224 && !chkp_variable_size_type (TREE_TYPE (field));
3227 /* Return 1 if bounds for FIELD should be narrowed to
3228 field's own size. */
3229 static bool
3230 chkp_narrow_bounds_for_field (tree field)
3232 HOST_WIDE_INT offs;
3233 HOST_WIDE_INT bit_offs;
3235 if (!chkp_may_narrow_to_field (field))
3236 return false;
3238 /* Accesse to compiler generated fields should not cause
3239 bounds narrowing. */
3240 if (DECL_ARTIFICIAL (field))
3241 return false;
3243 offs = tree_to_uhwi (DECL_FIELD_OFFSET (field));
3244 bit_offs = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
3246 return (flag_chkp_narrow_bounds
3247 && (flag_chkp_first_field_has_own_bounds
3248 || offs
3249 || bit_offs));
3252 /* Perform narrowing for BOUNDS using bounds computed for field
3253 access COMPONENT. ITER meaning is the same as for
3254 chkp_intersect_bounds. */
3255 static tree
3256 chkp_narrow_bounds_to_field (tree bounds, tree component,
3257 gimple_stmt_iterator *iter)
3259 tree field = TREE_OPERAND (component, 1);
3260 tree size = DECL_SIZE_UNIT (field);
3261 tree field_ptr = chkp_build_addr_expr (component);
3262 tree field_bounds;
3264 field_bounds = chkp_make_bounds (field_ptr, size, iter, false);
3266 return chkp_intersect_bounds (field_bounds, bounds, iter);
3269 /* Parse field or array access NODE.
3271 PTR ouput parameter holds a pointer to the outermost
3272 object.
3274 BITFIELD output parameter is set to 1 if bitfield is
3275 accessed and to 0 otherwise. If it is 1 then ELT holds
3276 outer component for accessed bit field.
3278 SAFE outer parameter is set to 1 if access is safe and
3279 checks are not required.
3281 BOUNDS outer parameter holds bounds to be used to check
3282 access (may be NULL).
3284 If INNERMOST_BOUNDS is 1 then try to narrow bounds to the
3285 innermost accessed component. */
3286 static void
3287 chkp_parse_array_and_component_ref (tree node, tree *ptr,
3288 tree *elt, bool *safe,
3289 bool *bitfield,
3290 tree *bounds,
3291 gimple_stmt_iterator *iter,
3292 bool innermost_bounds)
3294 tree comp_to_narrow = NULL_TREE;
3295 tree last_comp = NULL_TREE;
3296 bool array_ref_found = false;
3297 tree *nodes;
3298 tree var;
3299 int len;
3300 int i;
3302 /* Compute tree height for expression. */
3303 var = node;
3304 len = 1;
3305 while (TREE_CODE (var) == COMPONENT_REF
3306 || TREE_CODE (var) == ARRAY_REF
3307 || TREE_CODE (var) == VIEW_CONVERT_EXPR)
3309 var = TREE_OPERAND (var, 0);
3310 len++;
3313 gcc_assert (len > 1);
3315 /* It is more convenient for us to scan left-to-right,
3316 so walk tree again and put all node to nodes vector
3317 in reversed order. */
3318 nodes = XALLOCAVEC (tree, len);
3319 nodes[len - 1] = node;
3320 for (i = len - 2; i >= 0; i--)
3321 nodes[i] = TREE_OPERAND (nodes[i + 1], 0);
3323 if (bounds)
3324 *bounds = NULL;
3325 *safe = true;
3326 *bitfield = (TREE_CODE (node) == COMPONENT_REF
3327 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (node, 1)));
3328 /* To get bitfield address we will need outer elemnt. */
3329 if (*bitfield)
3330 *elt = nodes[len - 2];
3331 else
3332 *elt = NULL_TREE;
3334 /* If we have indirection in expression then compute
3335 outermost structure bounds. Computed bounds may be
3336 narrowed later. */
3337 if (TREE_CODE (nodes[0]) == MEM_REF || INDIRECT_REF_P (nodes[0]))
3339 *safe = false;
3340 *ptr = TREE_OPERAND (nodes[0], 0);
3341 if (bounds)
3342 *bounds = chkp_find_bounds (*ptr, iter);
3344 else
3346 gcc_assert (TREE_CODE (var) == VAR_DECL
3347 || TREE_CODE (var) == PARM_DECL
3348 || TREE_CODE (var) == RESULT_DECL
3349 || TREE_CODE (var) == STRING_CST
3350 || TREE_CODE (var) == SSA_NAME);
3352 *ptr = chkp_build_addr_expr (var);
3355 /* In this loop we are trying to find a field access
3356 requiring narrowing. There are two simple rules
3357 for search:
3358 1. Leftmost array_ref is chosen if any.
3359 2. Rightmost suitable component_ref is chosen if innermost
3360 bounds are required and no array_ref exists. */
3361 for (i = 1; i < len; i++)
3363 var = nodes[i];
3365 if (TREE_CODE (var) == ARRAY_REF)
3367 *safe = false;
3368 array_ref_found = true;
3369 if (flag_chkp_narrow_bounds
3370 && !flag_chkp_narrow_to_innermost_arrray
3371 && (!last_comp
3372 || chkp_may_narrow_to_field (TREE_OPERAND (last_comp, 1))))
3374 comp_to_narrow = last_comp;
3375 break;
3378 else if (TREE_CODE (var) == COMPONENT_REF)
3380 tree field = TREE_OPERAND (var, 1);
3382 if (innermost_bounds
3383 && !array_ref_found
3384 && chkp_narrow_bounds_for_field (field))
3385 comp_to_narrow = var;
3386 last_comp = var;
3388 if (flag_chkp_narrow_bounds
3389 && flag_chkp_narrow_to_innermost_arrray
3390 && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
3392 if (bounds)
3393 *bounds = chkp_narrow_bounds_to_field (*bounds, var, iter);
3394 comp_to_narrow = NULL;
3397 else if (TREE_CODE (var) == VIEW_CONVERT_EXPR)
3398 /* Nothing to do for it. */
3400 else
3401 gcc_unreachable ();
3404 if (comp_to_narrow && DECL_SIZE (TREE_OPERAND (comp_to_narrow, 1)) && bounds)
3405 *bounds = chkp_narrow_bounds_to_field (*bounds, comp_to_narrow, iter);
3407 if (innermost_bounds && bounds && !*bounds)
3408 *bounds = chkp_find_bounds (*ptr, iter);
3411 /* Compute and return bounds for address of OBJ. */
3412 static tree
3413 chkp_make_addressed_object_bounds (tree obj, gimple_stmt_iterator *iter)
3415 tree bounds = chkp_get_registered_addr_bounds (obj);
3417 if (bounds)
3418 return bounds;
3420 switch (TREE_CODE (obj))
3422 case VAR_DECL:
3423 case PARM_DECL:
3424 case RESULT_DECL:
3425 bounds = chkp_get_bounds_for_decl_addr (obj);
3426 break;
3428 case STRING_CST:
3429 bounds = chkp_get_bounds_for_string_cst (obj);
3430 break;
3432 case ARRAY_REF:
3433 case COMPONENT_REF:
3435 tree elt;
3436 tree ptr;
3437 bool safe;
3438 bool bitfield;
3440 chkp_parse_array_and_component_ref (obj, &ptr, &elt, &safe,
3441 &bitfield, &bounds, iter, true);
3443 gcc_assert (bounds);
3445 break;
3447 case FUNCTION_DECL:
3448 case LABEL_DECL:
3449 bounds = chkp_get_zero_bounds ();
3450 break;
3452 case MEM_REF:
3453 bounds = chkp_find_bounds (TREE_OPERAND (obj, 0), iter);
3454 break;
3456 case REALPART_EXPR:
3457 case IMAGPART_EXPR:
3458 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (obj, 0), iter);
3459 break;
3461 default:
3462 if (dump_file && (dump_flags & TDF_DETAILS))
3464 fprintf (dump_file, "chkp_make_addressed_object_bounds: "
3465 "unexpected object of type %s\n",
3466 get_tree_code_name (TREE_CODE (obj)));
3467 print_node (dump_file, "", obj, 0);
3469 internal_error ("chkp_make_addressed_object_bounds: "
3470 "Unexpected tree code %s",
3471 get_tree_code_name (TREE_CODE (obj)));
3474 chkp_register_addr_bounds (obj, bounds);
3476 return bounds;
3479 /* Compute bounds for pointer PTR loaded from PTR_SRC. Generate statements
3480 to compute bounds if required. Computed bounds should be available at
3481 position pointed by ITER.
3483 If PTR_SRC is NULL_TREE then pointer definition is identified.
3485 If PTR_SRC is not NULL_TREE then ITER points to statements which loads
3486 PTR. If PTR is a any memory reference then ITER points to a statement
3487 after which bndldx will be inserterd. In both cases ITER will be updated
3488 to point to the inserted bndldx statement. */
3490 static tree
3491 chkp_find_bounds_1 (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3493 tree addr = NULL_TREE;
3494 tree bounds = NULL_TREE;
3496 if (!ptr_src)
3497 ptr_src = ptr;
3499 bounds = chkp_get_registered_bounds (ptr_src);
3501 if (bounds)
3502 return bounds;
3504 switch (TREE_CODE (ptr_src))
3506 case MEM_REF:
3507 case VAR_DECL:
3508 if (BOUNDED_P (ptr_src))
3509 if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
3510 bounds = chkp_get_zero_bounds ();
3511 else
3513 addr = chkp_build_addr_expr (ptr_src);
3514 bounds = chkp_build_bndldx (addr, ptr, iter);
3516 else
3517 bounds = chkp_get_nonpointer_load_bounds ();
3518 break;
3520 case ARRAY_REF:
3521 case COMPONENT_REF:
3522 addr = get_base_address (ptr_src);
3523 if (DECL_P (addr)
3524 || TREE_CODE (addr) == MEM_REF
3525 || TREE_CODE (addr) == TARGET_MEM_REF)
3527 if (BOUNDED_P (ptr_src))
3528 if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
3529 bounds = chkp_get_zero_bounds ();
3530 else
3532 addr = chkp_build_addr_expr (ptr_src);
3533 bounds = chkp_build_bndldx (addr, ptr, iter);
3535 else
3536 bounds = chkp_get_nonpointer_load_bounds ();
3538 else
3540 gcc_assert (TREE_CODE (addr) == SSA_NAME);
3541 bounds = chkp_find_bounds (addr, iter);
3543 break;
3545 case PARM_DECL:
3546 gcc_unreachable ();
3547 bounds = chkp_get_bound_for_parm (ptr_src);
3548 break;
3550 case TARGET_MEM_REF:
3551 addr = chkp_build_addr_expr (ptr_src);
3552 bounds = chkp_build_bndldx (addr, ptr, iter);
3553 break;
3555 case SSA_NAME:
3556 bounds = chkp_get_registered_bounds (ptr_src);
3557 if (!bounds)
3559 gimple def_stmt = SSA_NAME_DEF_STMT (ptr_src);
3560 gphi_iterator phi_iter;
3562 bounds = chkp_get_bounds_by_definition (ptr_src, def_stmt, &phi_iter);
3564 gcc_assert (bounds);
3566 if (gphi *def_phi = dyn_cast <gphi *> (def_stmt))
3568 unsigned i;
3570 for (i = 0; i < gimple_phi_num_args (def_phi); i++)
3572 tree arg = gimple_phi_arg_def (def_phi, i);
3573 tree arg_bnd;
3574 gphi *phi_bnd;
3576 arg_bnd = chkp_find_bounds (arg, NULL);
3578 /* chkp_get_bounds_by_definition created new phi
3579 statement and phi_iter points to it.
3581 Previous call to chkp_find_bounds could create
3582 new basic block and therefore change phi statement
3583 phi_iter points to. */
3584 phi_bnd = phi_iter.phi ();
3586 add_phi_arg (phi_bnd, arg_bnd,
3587 gimple_phi_arg_edge (def_phi, i),
3588 UNKNOWN_LOCATION);
3591 /* If all bound phi nodes have their arg computed
3592 then we may finish its computation. See
3593 chkp_finish_incomplete_bounds for more details. */
3594 if (chkp_may_finish_incomplete_bounds ())
3595 chkp_finish_incomplete_bounds ();
3598 gcc_assert (bounds == chkp_get_registered_bounds (ptr_src)
3599 || chkp_incomplete_bounds (bounds));
3601 break;
3603 case ADDR_EXPR:
3604 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (ptr_src, 0), iter);
3605 break;
3607 case INTEGER_CST:
3608 if (integer_zerop (ptr_src))
3609 bounds = chkp_get_none_bounds ();
3610 else
3611 bounds = chkp_get_invalid_op_bounds ();
3612 break;
3614 default:
3615 if (dump_file && (dump_flags & TDF_DETAILS))
3617 fprintf (dump_file, "chkp_find_bounds: unexpected ptr of type %s\n",
3618 get_tree_code_name (TREE_CODE (ptr_src)));
3619 print_node (dump_file, "", ptr_src, 0);
3621 internal_error ("chkp_find_bounds: Unexpected tree code %s",
3622 get_tree_code_name (TREE_CODE (ptr_src)));
3625 if (!bounds)
3627 if (dump_file && (dump_flags & TDF_DETAILS))
3629 fprintf (stderr, "chkp_find_bounds: cannot find bounds for pointer\n");
3630 print_node (dump_file, "", ptr_src, 0);
3632 internal_error ("chkp_find_bounds: Cannot find bounds for pointer");
3635 return bounds;
3638 /* Normal case for bounds search without forced narrowing. */
3639 static tree
3640 chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter)
3642 return chkp_find_bounds_1 (ptr, NULL_TREE, iter);
3645 /* Search bounds for pointer PTR loaded from PTR_SRC
3646 by statement *ITER points to. */
3647 static tree
3648 chkp_find_bounds_loaded (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3650 return chkp_find_bounds_1 (ptr, ptr_src, iter);
3653 /* Helper function which checks type of RHS and finds all pointers in
3654 it. For each found pointer we build it's accesses in LHS and RHS
3655 objects and then call HANDLER for them. Function is used to copy
3656 or initilize bounds for copied object. */
3657 static void
3658 chkp_walk_pointer_assignments (tree lhs, tree rhs, void *arg,
3659 assign_handler handler)
3661 tree type = TREE_TYPE (lhs);
3663 /* We have nothing to do with clobbers. */
3664 if (TREE_CLOBBER_P (rhs))
3665 return;
3667 if (BOUNDED_TYPE_P (type))
3668 handler (lhs, rhs, arg);
3669 else if (RECORD_OR_UNION_TYPE_P (type))
3671 tree field;
3673 if (TREE_CODE (rhs) == CONSTRUCTOR)
3675 unsigned HOST_WIDE_INT cnt;
3676 tree val;
3678 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, field, val)
3680 if (chkp_type_has_pointer (TREE_TYPE (field)))
3682 tree lhs_field = chkp_build_component_ref (lhs, field);
3683 chkp_walk_pointer_assignments (lhs_field, val, arg, handler);
3687 else
3688 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3689 if (TREE_CODE (field) == FIELD_DECL
3690 && chkp_type_has_pointer (TREE_TYPE (field)))
3692 tree rhs_field = chkp_build_component_ref (rhs, field);
3693 tree lhs_field = chkp_build_component_ref (lhs, field);
3694 chkp_walk_pointer_assignments (lhs_field, rhs_field, arg, handler);
3697 else if (TREE_CODE (type) == ARRAY_TYPE)
3699 unsigned HOST_WIDE_INT cur = 0;
3700 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3701 tree etype = TREE_TYPE (type);
3702 tree esize = TYPE_SIZE (etype);
3704 if (TREE_CODE (rhs) == CONSTRUCTOR)
3706 unsigned HOST_WIDE_INT cnt;
3707 tree purp, val, lhs_elem;
3709 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, purp, val)
3711 if (purp && TREE_CODE (purp) == RANGE_EXPR)
3713 tree lo_index = TREE_OPERAND (purp, 0);
3714 tree hi_index = TREE_OPERAND (purp, 1);
3716 for (cur = (unsigned)tree_to_uhwi (lo_index);
3717 cur <= (unsigned)tree_to_uhwi (hi_index);
3718 cur++)
3720 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3721 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3724 else
3726 if (purp)
3728 gcc_assert (TREE_CODE (purp) == INTEGER_CST);
3729 cur = tree_to_uhwi (purp);
3732 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur++);
3734 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3738 /* Copy array only when size is known. */
3739 else if (maxval && !integer_minus_onep (maxval))
3740 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
3742 tree lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3743 tree rhs_elem = chkp_build_array_ref (rhs, etype, esize, cur);
3744 chkp_walk_pointer_assignments (lhs_elem, rhs_elem, arg, handler);
3747 else
3748 internal_error("chkp_walk_pointer_assignments: unexpected RHS type: %s",
3749 get_tree_code_name (TREE_CODE (type)));
3752 /* Add code to copy bounds for assignment of RHS to LHS.
3753 ARG is an iterator pointing ne code position. */
3754 static void
3755 chkp_copy_bounds_for_elem (tree lhs, tree rhs, void *arg)
3757 gimple_stmt_iterator *iter = (gimple_stmt_iterator *)arg;
3758 tree bounds = chkp_find_bounds (rhs, iter);
3759 tree addr = chkp_build_addr_expr(lhs);
3761 chkp_build_bndstx (addr, rhs, bounds, iter);
3764 /* Emit static bound initilizers and size vars. */
3765 void
3766 chkp_finish_file (void)
3768 struct varpool_node *node;
3769 struct chkp_ctor_stmt_list stmts;
3771 if (seen_error ())
3772 return;
3774 /* Iterate through varpool and generate bounds initialization
3775 constructors for all statically initialized pointers. */
3776 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3777 stmts.stmts = NULL;
3778 FOR_EACH_VARIABLE (node)
3779 /* Check that var is actually emitted and we need and may initialize
3780 its bounds. */
3781 if (node->need_bounds_init
3782 && !POINTER_BOUNDS_P (node->decl)
3783 && DECL_RTL (node->decl)
3784 && MEM_P (DECL_RTL (node->decl))
3785 && TREE_ASM_WRITTEN (node->decl))
3787 chkp_walk_pointer_assignments (node->decl,
3788 DECL_INITIAL (node->decl),
3789 &stmts,
3790 chkp_add_modification_to_stmt_list);
3792 if (stmts.avail <= 0)
3794 cgraph_build_static_cdtor ('P', stmts.stmts,
3795 MAX_RESERVED_INIT_PRIORITY + 3);
3796 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3797 stmts.stmts = NULL;
3801 if (stmts.stmts)
3802 cgraph_build_static_cdtor ('P', stmts.stmts,
3803 MAX_RESERVED_INIT_PRIORITY + 3);
3805 /* Iterate through varpool and generate bounds initialization
3806 constructors for all static bounds vars. */
3807 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3808 stmts.stmts = NULL;
3809 FOR_EACH_VARIABLE (node)
3810 if (node->need_bounds_init
3811 && POINTER_BOUNDS_P (node->decl)
3812 && TREE_ASM_WRITTEN (node->decl))
3814 tree bnd = node->decl;
3815 tree var;
3817 gcc_assert (DECL_INITIAL (bnd)
3818 && TREE_CODE (DECL_INITIAL (bnd)) == ADDR_EXPR);
3820 var = TREE_OPERAND (DECL_INITIAL (bnd), 0);
3821 chkp_output_static_bounds (bnd, var, &stmts);
3824 if (stmts.stmts)
3825 cgraph_build_static_cdtor ('B', stmts.stmts,
3826 MAX_RESERVED_INIT_PRIORITY + 2);
3828 delete chkp_static_var_bounds;
3829 delete chkp_bounds_map;
3832 /* An instrumentation function which is called for each statement
3833 having memory access we want to instrument. It inserts check
3834 code and bounds copy code.
3836 ITER points to statement to instrument.
3838 NODE holds memory access in statement to check.
3840 LOC holds the location information for statement.
3842 DIRFLAGS determines whether access is read or write.
3844 ACCESS_OFFS should be added to address used in NODE
3845 before check.
3847 ACCESS_SIZE holds size of checked access.
3849 SAFE indicates if NODE access is safe and should not be
3850 checked. */
3851 static void
3852 chkp_process_stmt (gimple_stmt_iterator *iter, tree node,
3853 location_t loc, tree dirflag,
3854 tree access_offs, tree access_size,
3855 bool safe)
3857 tree node_type = TREE_TYPE (node);
3858 tree size = access_size ? access_size : TYPE_SIZE_UNIT (node_type);
3859 tree addr_first = NULL_TREE; /* address of the first accessed byte */
3860 tree addr_last = NULL_TREE; /* address of the last accessed byte */
3861 tree ptr = NULL_TREE; /* a pointer used for dereference */
3862 tree bounds = NULL_TREE;
3864 /* We do not need instrumentation for clobbers. */
3865 if (dirflag == integer_one_node
3866 && gimple_code (gsi_stmt (*iter)) == GIMPLE_ASSIGN
3867 && TREE_CLOBBER_P (gimple_assign_rhs1 (gsi_stmt (*iter))))
3868 return;
3870 switch (TREE_CODE (node))
3872 case ARRAY_REF:
3873 case COMPONENT_REF:
3875 bool bitfield;
3876 tree elt;
3878 if (safe)
3880 /* We are not going to generate any checks, so do not
3881 generate bounds as well. */
3882 addr_first = chkp_build_addr_expr (node);
3883 break;
3886 chkp_parse_array_and_component_ref (node, &ptr, &elt, &safe,
3887 &bitfield, &bounds, iter, false);
3889 /* Break if there is no dereference and operation is safe. */
3891 if (bitfield)
3893 tree field = TREE_OPERAND (node, 1);
3895 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
3896 size = DECL_SIZE_UNIT (field);
3898 if (elt)
3899 elt = chkp_build_addr_expr (elt);
3900 addr_first = fold_convert_loc (loc, ptr_type_node, elt ? elt : ptr);
3901 addr_first = fold_build_pointer_plus_loc (loc,
3902 addr_first,
3903 byte_position (field));
3905 else
3906 addr_first = chkp_build_addr_expr (node);
3908 break;
3910 case INDIRECT_REF:
3911 ptr = TREE_OPERAND (node, 0);
3912 addr_first = ptr;
3913 break;
3915 case MEM_REF:
3916 ptr = TREE_OPERAND (node, 0);
3917 addr_first = chkp_build_addr_expr (node);
3918 break;
3920 case TARGET_MEM_REF:
3921 ptr = TMR_BASE (node);
3922 addr_first = chkp_build_addr_expr (node);
3923 break;
3925 case ARRAY_RANGE_REF:
3926 printf("ARRAY_RANGE_REF\n");
3927 debug_gimple_stmt(gsi_stmt(*iter));
3928 debug_tree(node);
3929 gcc_unreachable ();
3930 break;
3932 case BIT_FIELD_REF:
3934 tree offs, rem, bpu;
3936 gcc_assert (!access_offs);
3937 gcc_assert (!access_size);
3939 bpu = fold_convert (size_type_node, bitsize_int (BITS_PER_UNIT));
3940 offs = fold_convert (size_type_node, TREE_OPERAND (node, 2));
3941 rem = size_binop_loc (loc, TRUNC_MOD_EXPR, offs, bpu);
3942 offs = size_binop_loc (loc, TRUNC_DIV_EXPR, offs, bpu);
3944 size = fold_convert (size_type_node, TREE_OPERAND (node, 1));
3945 size = size_binop_loc (loc, PLUS_EXPR, size, rem);
3946 size = size_binop_loc (loc, CEIL_DIV_EXPR, size, bpu);
3947 size = fold_convert (size_type_node, size);
3949 chkp_process_stmt (iter, TREE_OPERAND (node, 0), loc,
3950 dirflag, offs, size, safe);
3951 return;
3953 break;
3955 case VAR_DECL:
3956 case RESULT_DECL:
3957 case PARM_DECL:
3958 if (dirflag != integer_one_node
3959 || DECL_REGISTER (node))
3960 return;
3962 safe = true;
3963 addr_first = chkp_build_addr_expr (node);
3964 break;
3966 default:
3967 return;
3970 /* If addr_last was not computed then use (addr_first + size - 1)
3971 expression to compute it. */
3972 if (!addr_last)
3974 addr_last = fold_build_pointer_plus_loc (loc, addr_first, size);
3975 addr_last = fold_build_pointer_plus_hwi_loc (loc, addr_last, -1);
3978 /* Shift both first_addr and last_addr by access_offs if specified. */
3979 if (access_offs)
3981 addr_first = fold_build_pointer_plus_loc (loc, addr_first, access_offs);
3982 addr_last = fold_build_pointer_plus_loc (loc, addr_last, access_offs);
3985 /* Generate bndcl/bndcu checks if memory access is not safe. */
3986 if (!safe)
3988 gimple_stmt_iterator stmt_iter = *iter;
3990 if (!bounds)
3991 bounds = chkp_find_bounds (ptr, iter);
3993 chkp_check_mem_access (addr_first, addr_last, bounds,
3994 stmt_iter, loc, dirflag);
3997 /* We need to store bounds in case pointer is stored. */
3998 if (dirflag == integer_one_node
3999 && chkp_type_has_pointer (node_type)
4000 && flag_chkp_store_bounds)
4002 gimple stmt = gsi_stmt (*iter);
4003 tree rhs1 = gimple_assign_rhs1 (stmt);
4004 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4006 if (get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS)
4007 chkp_walk_pointer_assignments (node, rhs1, iter,
4008 chkp_copy_bounds_for_elem);
4009 else
4011 bounds = chkp_compute_bounds_for_assignment (NULL_TREE, stmt);
4012 chkp_build_bndstx (addr_first, rhs1, bounds, iter);
4017 /* Add code to copy bounds for all pointers copied
4018 in ASSIGN created during inline of EDGE. */
4019 void
4020 chkp_copy_bounds_for_assign (gimple assign, struct cgraph_edge *edge)
4022 tree lhs = gimple_assign_lhs (assign);
4023 tree rhs = gimple_assign_rhs1 (assign);
4024 gimple_stmt_iterator iter = gsi_for_stmt (assign);
4026 if (!flag_chkp_store_bounds)
4027 return;
4029 chkp_walk_pointer_assignments (lhs, rhs, &iter, chkp_copy_bounds_for_elem);
4031 /* We should create edges for all created calls to bndldx and bndstx. */
4032 while (gsi_stmt (iter) != assign)
4034 gimple stmt = gsi_stmt (iter);
4035 if (gimple_code (stmt) == GIMPLE_CALL)
4037 tree fndecl = gimple_call_fndecl (stmt);
4038 struct cgraph_node *callee = cgraph_node::get_create (fndecl);
4039 struct cgraph_edge *new_edge;
4041 gcc_assert (fndecl == chkp_bndstx_fndecl
4042 || fndecl == chkp_bndldx_fndecl
4043 || fndecl == chkp_ret_bnd_fndecl);
4045 new_edge = edge->caller->create_edge (callee,
4046 as_a <gcall *> (stmt),
4047 edge->count,
4048 edge->frequency);
4049 new_edge->frequency = compute_call_stmt_bb_frequency
4050 (edge->caller->decl, gimple_bb (stmt));
4052 gsi_prev (&iter);
4056 /* Some code transformation made during instrumentation pass
4057 may put code into inconsistent state. Here we find and fix
4058 such flaws. */
4059 void
4060 chkp_fix_cfg ()
4062 basic_block bb;
4063 gimple_stmt_iterator i;
4065 /* We could insert some code right after stmt which ends bb.
4066 We wanted to put this code on fallthru edge but did not
4067 add new edges from the beginning because it may cause new
4068 phi node creation which may be incorrect due to incomplete
4069 bound phi nodes. */
4070 FOR_ALL_BB_FN (bb, cfun)
4071 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
4073 gimple stmt = gsi_stmt (i);
4074 gimple_stmt_iterator next = i;
4076 gsi_next (&next);
4078 if (stmt_ends_bb_p (stmt)
4079 && !gsi_end_p (next))
4081 edge fall = find_fallthru_edge (bb->succs);
4082 basic_block dest = NULL;
4083 int flags = 0;
4085 gcc_assert (fall);
4087 /* We cannot split abnormal edge. Therefore we
4088 store its params, make it regular and then
4089 rebuild abnormal edge after split. */
4090 if (fall->flags & EDGE_ABNORMAL)
4092 flags = fall->flags & ~EDGE_FALLTHRU;
4093 dest = fall->dest;
4095 fall->flags &= ~EDGE_COMPLEX;
4098 while (!gsi_end_p (next))
4100 gimple next_stmt = gsi_stmt (next);
4101 gsi_remove (&next, false);
4102 gsi_insert_on_edge (fall, next_stmt);
4105 gsi_commit_edge_inserts ();
4107 /* Re-create abnormal edge. */
4108 if (dest)
4109 make_edge (bb, dest, flags);
4114 /* Walker callback for chkp_replace_function_pointers. Replaces
4115 function pointer in the specified operand with pointer to the
4116 instrumented function version. */
4117 static tree
4118 chkp_replace_function_pointer (tree *op, int *walk_subtrees,
4119 void *data ATTRIBUTE_UNUSED)
4121 if (TREE_CODE (*op) == FUNCTION_DECL
4122 && chkp_instrumentable_p (*op)
4123 && (DECL_BUILT_IN_CLASS (*op) == NOT_BUILT_IN
4124 /* For builtins we replace pointers only for selected
4125 function and functions having definitions. */
4126 || (DECL_BUILT_IN_CLASS (*op) == BUILT_IN_NORMAL
4127 && (chkp_instrument_normal_builtin (*op)
4128 || gimple_has_body_p (*op)))))
4130 struct cgraph_node *node = cgraph_node::get_create (*op);
4131 struct cgraph_node *clone = NULL;
4133 if (!node->instrumentation_clone)
4134 clone = chkp_maybe_create_clone (*op);
4136 if (clone)
4137 *op = clone->decl;
4138 *walk_subtrees = 0;
4141 return NULL;
4144 /* This function searches for function pointers in statement
4145 pointed by GSI and replaces them with pointers to instrumented
4146 function versions. */
4147 static void
4148 chkp_replace_function_pointers (gimple_stmt_iterator *gsi)
4150 gimple stmt = gsi_stmt (*gsi);
4151 /* For calls we want to walk call args only. */
4152 if (gimple_code (stmt) == GIMPLE_CALL)
4154 unsigned i;
4155 for (i = 0; i < gimple_call_num_args (stmt); i++)
4156 walk_tree (gimple_call_arg_ptr (stmt, i),
4157 chkp_replace_function_pointer, NULL, NULL);
4159 else
4160 walk_gimple_stmt (gsi, NULL, chkp_replace_function_pointer, NULL);
4163 /* This function instruments all statements working with memory,
4164 calls and rets.
4166 It also removes excess statements from static initializers. */
4167 static void
4168 chkp_instrument_function (void)
4170 basic_block bb, next;
4171 gimple_stmt_iterator i;
4172 enum gimple_rhs_class grhs_class;
4173 bool safe = lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl));
4175 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
4178 next = bb->next_bb;
4179 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
4181 gimple s = gsi_stmt (i);
4183 /* Skip statement marked to not be instrumented. */
4184 if (chkp_marked_stmt_p (s))
4186 gsi_next (&i);
4187 continue;
4190 chkp_replace_function_pointers (&i);
4192 switch (gimple_code (s))
4194 case GIMPLE_ASSIGN:
4195 chkp_process_stmt (&i, gimple_assign_lhs (s),
4196 gimple_location (s), integer_one_node,
4197 NULL_TREE, NULL_TREE, safe);
4198 chkp_process_stmt (&i, gimple_assign_rhs1 (s),
4199 gimple_location (s), integer_zero_node,
4200 NULL_TREE, NULL_TREE, safe);
4201 grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s));
4202 if (grhs_class == GIMPLE_BINARY_RHS)
4203 chkp_process_stmt (&i, gimple_assign_rhs2 (s),
4204 gimple_location (s), integer_zero_node,
4205 NULL_TREE, NULL_TREE, safe);
4206 break;
4208 case GIMPLE_RETURN:
4210 greturn *r = as_a <greturn *> (s);
4211 if (gimple_return_retval (r) != NULL_TREE)
4213 chkp_process_stmt (&i, gimple_return_retval (r),
4214 gimple_location (r),
4215 integer_zero_node,
4216 NULL_TREE, NULL_TREE, safe);
4218 /* Additionally we need to add bounds
4219 to return statement. */
4220 chkp_add_bounds_to_ret_stmt (&i);
4223 break;
4225 case GIMPLE_CALL:
4226 chkp_add_bounds_to_call_stmt (&i);
4227 break;
4229 default:
4233 gsi_next (&i);
4235 /* We do not need any actual pointer stores in checker
4236 static initializer. */
4237 if (lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl))
4238 && gimple_code (s) == GIMPLE_ASSIGN
4239 && gimple_store_p (s))
4241 gimple_stmt_iterator del_iter = gsi_for_stmt (s);
4242 gsi_remove (&del_iter, true);
4243 unlink_stmt_vdef (s);
4244 release_defs(s);
4247 bb = next;
4249 while (bb);
4251 /* Some input params may have bounds and be address taken. In this case
4252 we should store incoming bounds into bounds table. */
4253 tree arg;
4254 if (flag_chkp_store_bounds)
4255 for (arg = DECL_ARGUMENTS (cfun->decl); arg; arg = DECL_CHAIN (arg))
4256 if (TREE_ADDRESSABLE (arg))
4258 if (BOUNDED_P (arg))
4260 tree bounds = chkp_get_next_bounds_parm (arg);
4261 tree def_ptr = ssa_default_def (cfun, arg);
4262 gimple_stmt_iterator iter
4263 = gsi_start_bb (chkp_get_entry_block ());
4264 chkp_build_bndstx (chkp_build_addr_expr (arg),
4265 def_ptr ? def_ptr : arg,
4266 bounds, &iter);
4268 /* Skip bounds arg. */
4269 arg = TREE_CHAIN (arg);
4271 else if (chkp_type_has_pointer (TREE_TYPE (arg)))
4273 tree orig_arg = arg;
4274 bitmap slots = BITMAP_ALLOC (NULL);
4275 gimple_stmt_iterator iter
4276 = gsi_start_bb (chkp_get_entry_block ());
4277 bitmap_iterator bi;
4278 unsigned bnd_no;
4280 chkp_find_bound_slots (TREE_TYPE (arg), slots);
4282 EXECUTE_IF_SET_IN_BITMAP (slots, 0, bnd_no, bi)
4284 tree bounds = chkp_get_next_bounds_parm (arg);
4285 HOST_WIDE_INT offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
4286 tree addr = chkp_build_addr_expr (orig_arg);
4287 tree ptr = build2 (MEM_REF, ptr_type_node, addr,
4288 build_int_cst (ptr_type_node, offs));
4289 chkp_build_bndstx (chkp_build_addr_expr (ptr), ptr,
4290 bounds, &iter);
4292 arg = DECL_CHAIN (arg);
4294 BITMAP_FREE (slots);
4299 /* Find init/null/copy_ptr_bounds calls and replace them
4300 with assignments. It should allow better code
4301 optimization. */
4303 static void
4304 chkp_remove_useless_builtins ()
4306 basic_block bb;
4307 gimple_stmt_iterator gsi;
4309 FOR_EACH_BB_FN (bb, cfun)
4311 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4313 gimple stmt = gsi_stmt (gsi);
4314 tree fndecl;
4315 enum built_in_function fcode;
4317 /* Find builtins returning first arg and replace
4318 them with assignments. */
4319 if (gimple_code (stmt) == GIMPLE_CALL
4320 && (fndecl = gimple_call_fndecl (stmt))
4321 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4322 && (fcode = DECL_FUNCTION_CODE (fndecl))
4323 && (fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
4324 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
4325 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS
4326 || fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS))
4328 tree res = gimple_call_arg (stmt, 0);
4329 update_call_from_tree (&gsi, res);
4330 stmt = gsi_stmt (gsi);
4331 update_stmt (stmt);
4337 /* Initialize pass. */
4338 static void
4339 chkp_init (void)
4341 basic_block bb;
4342 gimple_stmt_iterator i;
4344 in_chkp_pass = true;
4346 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = bb->next_bb)
4347 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
4348 chkp_unmark_stmt (gsi_stmt (i));
4350 chkp_invalid_bounds = new hash_set<tree>;
4351 chkp_completed_bounds_set = new hash_set<tree>;
4352 delete chkp_reg_bounds;
4353 chkp_reg_bounds = new hash_map<tree, tree>;
4354 delete chkp_bound_vars;
4355 chkp_bound_vars = new hash_map<tree, tree>;
4356 chkp_reg_addr_bounds = new hash_map<tree, tree>;
4357 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
4358 delete chkp_bounds_map;
4359 chkp_bounds_map = new hash_map<tree, tree>;
4360 chkp_abnormal_copies = BITMAP_GGC_ALLOC ();
4362 entry_block = NULL;
4363 zero_bounds = NULL_TREE;
4364 none_bounds = NULL_TREE;
4365 incomplete_bounds = integer_zero_node;
4366 tmp_var = NULL_TREE;
4367 size_tmp_var = NULL_TREE;
4369 chkp_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode, true);
4371 /* We create these constant bounds once for each object file.
4372 These symbols go to comdat section and result in single copy
4373 of each one in the final binary. */
4374 chkp_get_zero_bounds_var ();
4375 chkp_get_none_bounds_var ();
4377 calculate_dominance_info (CDI_DOMINATORS);
4378 calculate_dominance_info (CDI_POST_DOMINATORS);
4380 bitmap_obstack_initialize (NULL);
4383 /* Finalize instrumentation pass. */
4384 static void
4385 chkp_fini (void)
4387 in_chkp_pass = false;
4389 delete chkp_invalid_bounds;
4390 delete chkp_completed_bounds_set;
4391 delete chkp_reg_addr_bounds;
4392 delete chkp_incomplete_bounds_map;
4394 free_dominance_info (CDI_DOMINATORS);
4395 free_dominance_info (CDI_POST_DOMINATORS);
4397 bitmap_obstack_release (NULL);
4399 entry_block = NULL;
4400 zero_bounds = NULL_TREE;
4401 none_bounds = NULL_TREE;
4404 /* Main instrumentation pass function. */
4405 static unsigned int
4406 chkp_execute (void)
4408 chkp_init ();
4410 chkp_instrument_function ();
4412 chkp_remove_useless_builtins ();
4414 chkp_function_mark_instrumented (cfun->decl);
4416 chkp_fix_cfg ();
4418 chkp_fini ();
4420 return 0;
4423 /* Instrumentation pass gate. */
4424 static bool
4425 chkp_gate (void)
4427 cgraph_node *node = cgraph_node::get (cfun->decl);
4428 return ((node != NULL
4429 && node->instrumentation_clone)
4430 || lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl)));
4433 namespace {
4435 const pass_data pass_data_chkp =
4437 GIMPLE_PASS, /* type */
4438 "chkp", /* name */
4439 OPTGROUP_NONE, /* optinfo_flags */
4440 TV_NONE, /* tv_id */
4441 PROP_ssa | PROP_cfg, /* properties_required */
4442 0, /* properties_provided */
4443 0, /* properties_destroyed */
4444 0, /* todo_flags_start */
4445 TODO_verify_il
4446 | TODO_update_ssa /* todo_flags_finish */
4449 class pass_chkp : public gimple_opt_pass
4451 public:
4452 pass_chkp (gcc::context *ctxt)
4453 : gimple_opt_pass (pass_data_chkp, ctxt)
4456 /* opt_pass methods: */
4457 virtual opt_pass * clone ()
4459 return new pass_chkp (m_ctxt);
4462 virtual bool gate (function *)
4464 return chkp_gate ();
4467 virtual unsigned int execute (function *)
4469 return chkp_execute ();
4472 }; // class pass_chkp
4474 } // anon namespace
4476 gimple_opt_pass *
4477 make_pass_chkp (gcc::context *ctxt)
4479 return new pass_chkp (ctxt);
4482 #include "gt-tree-chkp.h"