2015-06-11 Paul Thomas <pault@gcc.gnu.org>
[official-gcc.git] / gcc / tree-chkp.c
blob8edef488a03df65b728b85e18f209ac7c8f13b68
1 /* Pointer Bounds Checker insrumentation pass.
2 Copyright (C) 2014-2015 Free Software Foundation, Inc.
3 Contributed by Ilya Enkovich (ilya.enkovich@intel.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "input.h"
25 #include "alias.h"
26 #include "symtab.h"
27 #include "options.h"
28 #include "tree.h"
29 #include "fold-const.h"
30 #include "stor-layout.h"
31 #include "varasm.h"
32 #include "target.h"
33 #include "tree-iterator.h"
34 #include "tree-cfg.h"
35 #include "langhooks.h"
36 #include "tree-pass.h"
37 #include "diagnostic.h"
38 #include "is-a.h"
39 #include "cfgloop.h"
40 #include "stringpool.h"
41 #include "tree-ssa-alias.h"
42 #include "tree-ssanames.h"
43 #include "tree-ssa-operands.h"
44 #include "tree-ssa-address.h"
45 #include "tree-ssa.h"
46 #include "predict.h"
47 #include "dominance.h"
48 #include "cfg.h"
49 #include "basic-block.h"
50 #include "tree-ssa-loop-niter.h"
51 #include "gimple-expr.h"
52 #include "gimple.h"
53 #include "tree-phinodes.h"
54 #include "gimple-ssa.h"
55 #include "ssa-iterators.h"
56 #include "gimple-pretty-print.h"
57 #include "gimple-iterator.h"
58 #include "gimplify.h"
59 #include "gimplify-me.h"
60 #include "print-tree.h"
61 #include "tm.h"
62 #include "hard-reg-set.h"
63 #include "function.h"
64 #include "rtl.h"
65 #include "flags.h"
66 #include "insn-config.h"
67 #include "expmed.h"
68 #include "dojump.h"
69 #include "explow.h"
70 #include "calls.h"
71 #include "emit-rtl.h"
72 #include "stmt.h"
73 #include "expr.h"
74 #include "tree-ssa-propagate.h"
75 #include "gimple-fold.h"
76 #include "tree-chkp.h"
77 #include "gimple-walk.h"
78 #include "rtl.h" /* For MEM_P, assign_temp. */
79 #include "tree-dfa.h"
80 #include "ipa-ref.h"
81 #include "lto-streamer.h"
82 #include "cgraph.h"
83 #include "ipa-chkp.h"
84 #include "params.h"
86 /* Pointer Bounds Checker instruments code with memory checks to find
87 out-of-bounds memory accesses. Checks are performed by computing
88 bounds for each pointer and then comparing address of accessed
89 memory before pointer dereferencing.
91 1. Function clones.
93 See ipa-chkp.c.
95 2. Instrumentation.
97 There are few things to instrument:
99 a) Memory accesses - add checker calls to check address of accessed memory
100 against bounds of dereferenced pointer. Obviously safe memory
101 accesses like static variable access does not have to be instrumented
102 with checks.
104 Example:
106 val_2 = *p_1;
108 with 4 bytes access is transformed into:
110 __builtin___chkp_bndcl (__bound_tmp.1_3, p_1);
111 D.1_4 = p_1 + 3;
112 __builtin___chkp_bndcu (__bound_tmp.1_3, D.1_4);
113 val_2 = *p_1;
115 where __bound_tmp.1_3 are bounds computed for pointer p_1,
116 __builtin___chkp_bndcl is a lower bound check and
117 __builtin___chkp_bndcu is an upper bound check.
119 b) Pointer stores.
121 When pointer is stored in memory we need to store its bounds. To
122 achieve compatibility of instrumented code with regular codes
123 we have to keep data layout and store bounds in special bound tables
124 via special checker call. Implementation of bounds table may vary for
125 different platforms. It has to associate pointer value and its
126 location (it is required because we may have two equal pointers
127 with different bounds stored in different places) with bounds.
128 Another checker builtin allows to get bounds for specified pointer
129 loaded from specified location.
131 Example:
133 buf1[i_1] = &buf2;
135 is transformed into:
137 buf1[i_1] = &buf2;
138 D.1_2 = &buf1[i_1];
139 __builtin___chkp_bndstx (D.1_2, &buf2, __bound_tmp.1_2);
141 where __bound_tmp.1_2 are bounds of &buf2.
143 c) Static initialization.
145 The special case of pointer store is static pointer initialization.
146 Bounds initialization is performed in a few steps:
147 - register all static initializations in front-end using
148 chkp_register_var_initializer
149 - when file compilation finishes we create functions with special
150 attribute 'chkp ctor' and put explicit initialization code
151 (assignments) for all statically initialized pointers.
152 - when checker constructor is compiled checker pass adds required
153 bounds initialization for all statically initialized pointers
154 - since we do not actually need excess pointers initialization
155 in checker constructor we remove such assignments from them
157 d) Calls.
159 For each call in the code we add additional arguments to pass
160 bounds for pointer arguments. We determine type of call arguments
161 using arguments list from function declaration; if function
162 declaration is not available we use function type; otherwise
163 (e.g. for unnamed arguments) we use type of passed value. Function
164 declaration/type is replaced with the instrumented one.
166 Example:
168 val_1 = foo (&buf1, &buf2, &buf1, 0);
170 is translated into:
172 val_1 = foo.chkp (&buf1, __bound_tmp.1_2, &buf2, __bound_tmp.1_3,
173 &buf1, __bound_tmp.1_2, 0);
175 e) Returns.
177 If function returns a pointer value we have to return bounds also.
178 A new operand was added for return statement to hold returned bounds.
180 Example:
182 return &_buf1;
184 is transformed into
186 return &_buf1, __bound_tmp.1_1;
188 3. Bounds computation.
190 Compiler is fully responsible for computing bounds to be used for each
191 memory access. The first step for bounds computation is to find the
192 origin of pointer dereferenced for memory access. Basing on pointer
193 origin we define a way to compute its bounds. There are just few
194 possible cases:
196 a) Pointer is returned by call.
198 In this case we use corresponding checker builtin method to obtain returned
199 bounds.
201 Example:
203 buf_1 = malloc (size_2);
204 foo (buf_1);
206 is translated into:
208 buf_1 = malloc (size_2);
209 __bound_tmp.1_3 = __builtin___chkp_bndret (buf_1);
210 foo (buf_1, __bound_tmp.1_3);
212 b) Pointer is an address of an object.
214 In this case compiler tries to compute objects size and create corresponding
215 bounds. If object has incomplete type then special checker builtin is used to
216 obtain its size at runtime.
218 Example:
220 foo ()
222 <unnamed type> __bound_tmp.3;
223 static int buf[100];
225 <bb 3>:
226 __bound_tmp.3_2 = __builtin___chkp_bndmk (&buf, 400);
228 <bb 2>:
229 return &buf, __bound_tmp.3_2;
232 Example:
234 Address of an object 'extern int buf[]' with incomplete type is
235 returned.
237 foo ()
239 <unnamed type> __bound_tmp.4;
240 long unsigned int __size_tmp.3;
242 <bb 3>:
243 __size_tmp.3_4 = __builtin_ia32_sizeof (buf);
244 __bound_tmp.4_3 = __builtin_ia32_bndmk (&buf, __size_tmp.3_4);
246 <bb 2>:
247 return &buf, __bound_tmp.4_3;
250 c) Pointer is the result of object narrowing.
252 It happens when we use pointer to an object to compute pointer to a part
253 of an object. E.g. we take pointer to a field of a structure. In this
254 case we perform bounds intersection using bounds of original object and
255 bounds of object's part (which are computed basing on its type).
257 There may be some debatable questions about when narrowing should occur
258 and when it should not. To avoid false bound violations in correct
259 programs we do not perform narrowing when address of an array element is
260 obtained (it has address of the whole array) and when address of the first
261 structure field is obtained (because it is guaranteed to be equal to
262 address of the whole structure and it is legal to cast it back to structure).
264 Default narrowing behavior may be changed using compiler flags.
266 Example:
268 In this example address of the second structure field is returned.
270 foo (struct A * p, __bounds_type __bounds_of_p)
272 <unnamed type> __bound_tmp.3;
273 int * _2;
274 int * _5;
276 <bb 2>:
277 _5 = &p_1(D)->second_field;
278 __bound_tmp.3_6 = __builtin___chkp_bndmk (_5, 4);
279 __bound_tmp.3_8 = __builtin___chkp_intersect (__bound_tmp.3_6,
280 __bounds_of_p_3(D));
281 _2 = &p_1(D)->second_field;
282 return _2, __bound_tmp.3_8;
285 Example:
287 In this example address of the first field of array element is returned.
289 foo (struct A * p, __bounds_type __bounds_of_p, int i)
291 long unsigned int _3;
292 long unsigned int _4;
293 struct A * _6;
294 int * _7;
296 <bb 2>:
297 _3 = (long unsigned int) i_1(D);
298 _4 = _3 * 8;
299 _6 = p_5(D) + _4;
300 _7 = &_6->first_field;
301 return _7, __bounds_of_p_2(D);
305 d) Pointer is the result of pointer arithmetic or type cast.
307 In this case bounds of the base pointer are used. In case of binary
308 operation producing a pointer we are analyzing data flow further
309 looking for operand's bounds. One operand is considered as a base
310 if it has some valid bounds. If we fall into a case when none of
311 operands (or both of them) has valid bounds, a default bounds value
312 is used.
314 Trying to find out bounds for binary operations we may fall into
315 cyclic dependencies for pointers. To avoid infinite recursion all
316 walked phi nodes instantly obtain corresponding bounds but created
317 bounds are marked as incomplete. It helps us to stop DF walk during
318 bounds search.
320 When we reach pointer source, some args of incomplete bounds phi obtain
321 valid bounds and those values are propagated further through phi nodes.
322 If no valid bounds were found for phi node then we mark its result as
323 invalid bounds. Process stops when all incomplete bounds become either
324 valid or invalid and we are able to choose a pointer base.
326 e) Pointer is loaded from the memory.
328 In this case we just need to load bounds from the bounds table.
330 Example:
332 foo ()
334 <unnamed type> __bound_tmp.3;
335 static int * buf;
336 int * _2;
338 <bb 2>:
339 _2 = buf;
340 __bound_tmp.3_4 = __builtin___chkp_bndldx (&buf, _2);
341 return _2, __bound_tmp.3_4;
346 typedef void (*assign_handler)(tree, tree, void *);
348 static tree chkp_get_zero_bounds ();
349 static tree chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter);
350 static tree chkp_find_bounds_loaded (tree ptr, tree ptr_src,
351 gimple_stmt_iterator *iter);
352 static void chkp_parse_array_and_component_ref (tree node, tree *ptr,
353 tree *elt, bool *safe,
354 bool *bitfield,
355 tree *bounds,
356 gimple_stmt_iterator *iter,
357 bool innermost_bounds);
359 #define chkp_bndldx_fndecl \
360 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDLDX))
361 #define chkp_bndstx_fndecl \
362 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDSTX))
363 #define chkp_checkl_fndecl \
364 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCL))
365 #define chkp_checku_fndecl \
366 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCU))
367 #define chkp_bndmk_fndecl \
368 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDMK))
369 #define chkp_ret_bnd_fndecl \
370 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDRET))
371 #define chkp_intersect_fndecl \
372 (targetm.builtin_chkp_function (BUILT_IN_CHKP_INTERSECT))
373 #define chkp_narrow_bounds_fndecl \
374 (targetm.builtin_chkp_function (BUILT_IN_CHKP_NARROW))
375 #define chkp_sizeof_fndecl \
376 (targetm.builtin_chkp_function (BUILT_IN_CHKP_SIZEOF))
377 #define chkp_extract_lower_fndecl \
378 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_LOWER))
379 #define chkp_extract_upper_fndecl \
380 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_UPPER))
382 static GTY (()) tree chkp_uintptr_type;
384 static GTY (()) tree chkp_zero_bounds_var;
385 static GTY (()) tree chkp_none_bounds_var;
387 static GTY (()) basic_block entry_block;
388 static GTY (()) tree zero_bounds;
389 static GTY (()) tree none_bounds;
390 static GTY (()) tree incomplete_bounds;
391 static GTY (()) tree tmp_var;
392 static GTY (()) tree size_tmp_var;
393 static GTY (()) bitmap chkp_abnormal_copies;
395 struct hash_set<tree> *chkp_invalid_bounds;
396 struct hash_set<tree> *chkp_completed_bounds_set;
397 struct hash_map<tree, tree> *chkp_reg_bounds;
398 struct hash_map<tree, tree> *chkp_bound_vars;
399 struct hash_map<tree, tree> *chkp_reg_addr_bounds;
400 struct hash_map<tree, tree> *chkp_incomplete_bounds_map;
401 struct hash_map<tree, tree> *chkp_bounds_map;
402 struct hash_map<tree, tree> *chkp_static_var_bounds;
404 static bool in_chkp_pass;
406 #define CHKP_BOUND_TMP_NAME "__bound_tmp"
407 #define CHKP_SIZE_TMP_NAME "__size_tmp"
408 #define CHKP_BOUNDS_OF_SYMBOL_PREFIX "__chkp_bounds_of_"
409 #define CHKP_STRING_BOUNDS_PREFIX "__chkp_string_bounds_"
410 #define CHKP_VAR_BOUNDS_PREFIX "__chkp_var_bounds_"
411 #define CHKP_ZERO_BOUNDS_VAR_NAME "__chkp_zero_bounds"
412 #define CHKP_NONE_BOUNDS_VAR_NAME "__chkp_none_bounds"
414 /* Static checker constructors may become very large and their
415 compilation with optimization may take too much time.
416 Therefore we put a limit to number of statements in one
417 constructor. Tests with 100 000 statically initialized
418 pointers showed following compilation times on Sandy Bridge
419 server (used -O2):
420 limit 100 => ~18 sec.
421 limit 300 => ~22 sec.
422 limit 1000 => ~30 sec.
423 limit 3000 => ~49 sec.
424 limit 5000 => ~55 sec.
425 limit 10000 => ~76 sec.
426 limit 100000 => ~532 sec. */
427 #define MAX_STMTS_IN_STATIC_CHKP_CTOR (PARAM_VALUE (PARAM_CHKP_MAX_CTOR_SIZE))
429 struct chkp_ctor_stmt_list
431 tree stmts;
432 int avail;
435 /* Return 1 if function FNDECL is instrumented by Pointer
436 Bounds Checker. */
437 bool
438 chkp_function_instrumented_p (tree fndecl)
440 return fndecl
441 && lookup_attribute ("chkp instrumented", DECL_ATTRIBUTES (fndecl));
444 /* Mark function FNDECL as instrumented. */
445 void
446 chkp_function_mark_instrumented (tree fndecl)
448 if (chkp_function_instrumented_p (fndecl))
449 return;
451 DECL_ATTRIBUTES (fndecl)
452 = tree_cons (get_identifier ("chkp instrumented"), NULL,
453 DECL_ATTRIBUTES (fndecl));
456 /* Return true when STMT is builtin call to instrumentation function
457 corresponding to CODE. */
459 bool
460 chkp_gimple_call_builtin_p (gimple call,
461 enum built_in_function code)
463 tree fndecl;
464 if (is_gimple_call (call)
465 && (fndecl = targetm.builtin_chkp_function (code))
466 && gimple_call_fndecl (call) == fndecl)
467 return true;
468 return false;
471 /* Emit code to store zero bounds for PTR located at MEM. */
472 void
473 chkp_expand_bounds_reset_for_mem (tree mem, tree ptr)
475 tree zero_bnd, bnd, addr, bndstx;
477 if (flag_chkp_use_static_const_bounds)
478 zero_bnd = chkp_get_zero_bounds_var ();
479 else
480 zero_bnd = chkp_build_make_bounds_call (integer_zero_node,
481 integer_zero_node);
482 bnd = make_tree (pointer_bounds_type_node,
483 assign_temp (pointer_bounds_type_node, 0, 1));
484 addr = build1 (ADDR_EXPR,
485 build_pointer_type (TREE_TYPE (mem)), mem);
486 bndstx = chkp_build_bndstx_call (addr, ptr, bnd);
488 expand_assignment (bnd, zero_bnd, false);
489 expand_normal (bndstx);
492 /* Build retbnd call for returned value RETVAL.
494 If BNDVAL is not NULL then result is stored
495 in it. Otherwise a temporary is created to
496 hold returned value.
498 GSI points to a position for a retbnd call
499 and is set to created stmt.
501 Cgraph edge is created for a new call if
502 UPDATE_EDGE is 1.
504 Obtained bounds are returned. */
505 tree
506 chkp_insert_retbnd_call (tree bndval, tree retval,
507 gimple_stmt_iterator *gsi)
509 gimple call;
511 if (!bndval)
512 bndval = create_tmp_reg (pointer_bounds_type_node, "retbnd");
514 call = gimple_build_call (chkp_ret_bnd_fndecl, 1, retval);
515 gimple_call_set_lhs (call, bndval);
516 gsi_insert_after (gsi, call, GSI_CONTINUE_LINKING);
518 return bndval;
521 /* Build a GIMPLE_CALL identical to CALL but skipping bounds
522 arguments. */
524 gcall *
525 chkp_copy_call_skip_bounds (gcall *call)
527 bitmap bounds;
528 unsigned i;
530 bitmap_obstack_initialize (NULL);
531 bounds = BITMAP_ALLOC (NULL);
533 for (i = 0; i < gimple_call_num_args (call); i++)
534 if (POINTER_BOUNDS_P (gimple_call_arg (call, i)))
535 bitmap_set_bit (bounds, i);
537 if (!bitmap_empty_p (bounds))
538 call = gimple_call_copy_skip_args (call, bounds);
539 gimple_call_set_with_bounds (call, false);
541 BITMAP_FREE (bounds);
542 bitmap_obstack_release (NULL);
544 return call;
547 /* Redirect edge E to the correct node according to call_stmt.
548 Return 1 if bounds removal from call_stmt should be done
549 instead of redirection. */
551 bool
552 chkp_redirect_edge (cgraph_edge *e)
554 bool instrumented = false;
555 tree decl = e->callee->decl;
557 if (e->callee->instrumentation_clone
558 || chkp_function_instrumented_p (decl))
559 instrumented = true;
561 if (instrumented
562 && !gimple_call_with_bounds_p (e->call_stmt))
563 e->redirect_callee (cgraph_node::get_create (e->callee->orig_decl));
564 else if (!instrumented
565 && gimple_call_with_bounds_p (e->call_stmt)
566 && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDCL)
567 && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDCU)
568 && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDSTX))
570 if (e->callee->instrumented_version)
571 e->redirect_callee (e->callee->instrumented_version);
572 else
574 tree args = TYPE_ARG_TYPES (TREE_TYPE (decl));
575 /* Avoid bounds removal if all args will be removed. */
576 if (!args || TREE_VALUE (args) != void_type_node)
577 return true;
578 else
579 gimple_call_set_with_bounds (e->call_stmt, false);
583 return false;
586 /* Mark statement S to not be instrumented. */
587 static void
588 chkp_mark_stmt (gimple s)
590 gimple_set_plf (s, GF_PLF_1, true);
593 /* Mark statement S to be instrumented. */
594 static void
595 chkp_unmark_stmt (gimple s)
597 gimple_set_plf (s, GF_PLF_1, false);
600 /* Return 1 if statement S should not be instrumented. */
601 static bool
602 chkp_marked_stmt_p (gimple s)
604 return gimple_plf (s, GF_PLF_1);
607 /* Get var to be used for bound temps. */
608 static tree
609 chkp_get_tmp_var (void)
611 if (!tmp_var)
612 tmp_var = create_tmp_reg (pointer_bounds_type_node, CHKP_BOUND_TMP_NAME);
614 return tmp_var;
617 /* Get SSA_NAME to be used as temp. */
618 static tree
619 chkp_get_tmp_reg (gimple stmt)
621 if (in_chkp_pass)
622 return make_ssa_name (chkp_get_tmp_var (), stmt);
624 return make_temp_ssa_name (pointer_bounds_type_node, stmt,
625 CHKP_BOUND_TMP_NAME);
628 /* Get var to be used for size temps. */
629 static tree
630 chkp_get_size_tmp_var (void)
632 if (!size_tmp_var)
633 size_tmp_var = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
635 return size_tmp_var;
638 /* Register bounds BND for address of OBJ. */
639 static void
640 chkp_register_addr_bounds (tree obj, tree bnd)
642 if (bnd == incomplete_bounds)
643 return;
645 chkp_reg_addr_bounds->put (obj, bnd);
647 if (dump_file && (dump_flags & TDF_DETAILS))
649 fprintf (dump_file, "Regsitered bound ");
650 print_generic_expr (dump_file, bnd, 0);
651 fprintf (dump_file, " for address of ");
652 print_generic_expr (dump_file, obj, 0);
653 fprintf (dump_file, "\n");
657 /* Return bounds registered for address of OBJ. */
658 static tree
659 chkp_get_registered_addr_bounds (tree obj)
661 tree *slot = chkp_reg_addr_bounds->get (obj);
662 return slot ? *slot : NULL_TREE;
665 /* Mark BOUNDS as completed. */
666 static void
667 chkp_mark_completed_bounds (tree bounds)
669 chkp_completed_bounds_set->add (bounds);
671 if (dump_file && (dump_flags & TDF_DETAILS))
673 fprintf (dump_file, "Marked bounds ");
674 print_generic_expr (dump_file, bounds, 0);
675 fprintf (dump_file, " as completed\n");
679 /* Return 1 if BOUNDS were marked as completed and 0 otherwise. */
680 static bool
681 chkp_completed_bounds (tree bounds)
683 return chkp_completed_bounds_set->contains (bounds);
686 /* Clear comleted bound marks. */
687 static void
688 chkp_erase_completed_bounds (void)
690 delete chkp_completed_bounds_set;
691 chkp_completed_bounds_set = new hash_set<tree>;
694 /* Mark BOUNDS associated with PTR as incomplete. */
695 static void
696 chkp_register_incomplete_bounds (tree bounds, tree ptr)
698 chkp_incomplete_bounds_map->put (bounds, ptr);
700 if (dump_file && (dump_flags & TDF_DETAILS))
702 fprintf (dump_file, "Regsitered incomplete bounds ");
703 print_generic_expr (dump_file, bounds, 0);
704 fprintf (dump_file, " for ");
705 print_generic_expr (dump_file, ptr, 0);
706 fprintf (dump_file, "\n");
710 /* Return 1 if BOUNDS are incomplete and 0 otherwise. */
711 static bool
712 chkp_incomplete_bounds (tree bounds)
714 if (bounds == incomplete_bounds)
715 return true;
717 if (chkp_completed_bounds (bounds))
718 return false;
720 return chkp_incomplete_bounds_map->get (bounds) != NULL;
723 /* Clear incomleted bound marks. */
724 static void
725 chkp_erase_incomplete_bounds (void)
727 delete chkp_incomplete_bounds_map;
728 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
731 /* Build and return bndmk call which creates bounds for structure
732 pointed by PTR. Structure should have complete type. */
733 tree
734 chkp_make_bounds_for_struct_addr (tree ptr)
736 tree type = TREE_TYPE (ptr);
737 tree size;
739 gcc_assert (POINTER_TYPE_P (type));
741 size = TYPE_SIZE (TREE_TYPE (type));
743 gcc_assert (size);
745 return build_call_nary (pointer_bounds_type_node,
746 build_fold_addr_expr (chkp_bndmk_fndecl),
747 2, ptr, size);
750 /* Traversal function for chkp_may_finish_incomplete_bounds.
751 Set RES to 0 if at least one argument of phi statement
752 defining bounds (passed in KEY arg) is unknown.
753 Traversal stops when first unknown phi argument is found. */
754 bool
755 chkp_may_complete_phi_bounds (tree const &bounds, tree *slot ATTRIBUTE_UNUSED,
756 bool *res)
758 gimple phi;
759 unsigned i;
761 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
763 phi = SSA_NAME_DEF_STMT (bounds);
765 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
767 for (i = 0; i < gimple_phi_num_args (phi); i++)
769 tree phi_arg = gimple_phi_arg_def (phi, i);
770 if (!phi_arg)
772 *res = false;
773 /* Do not need to traverse further. */
774 return false;
778 return true;
781 /* Return 1 if all phi nodes created for bounds have their
782 arguments computed. */
783 static bool
784 chkp_may_finish_incomplete_bounds (void)
786 bool res = true;
788 chkp_incomplete_bounds_map
789 ->traverse<bool *, chkp_may_complete_phi_bounds> (&res);
791 return res;
794 /* Helper function for chkp_finish_incomplete_bounds.
795 Recompute args for bounds phi node. */
796 bool
797 chkp_recompute_phi_bounds (tree const &bounds, tree *slot,
798 void *res ATTRIBUTE_UNUSED)
800 tree ptr = *slot;
801 gphi *bounds_phi;
802 gphi *ptr_phi;
803 unsigned i;
805 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
806 gcc_assert (TREE_CODE (ptr) == SSA_NAME);
808 bounds_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (bounds));
809 ptr_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (ptr));
811 for (i = 0; i < gimple_phi_num_args (bounds_phi); i++)
813 tree ptr_arg = gimple_phi_arg_def (ptr_phi, i);
814 tree bound_arg = chkp_find_bounds (ptr_arg, NULL);
816 add_phi_arg (bounds_phi, bound_arg,
817 gimple_phi_arg_edge (ptr_phi, i),
818 UNKNOWN_LOCATION);
821 return true;
824 /* Mark BOUNDS as invalid. */
825 static void
826 chkp_mark_invalid_bounds (tree bounds)
828 chkp_invalid_bounds->add (bounds);
830 if (dump_file && (dump_flags & TDF_DETAILS))
832 fprintf (dump_file, "Marked bounds ");
833 print_generic_expr (dump_file, bounds, 0);
834 fprintf (dump_file, " as invalid\n");
838 /* Return 1 if BOUNDS were marked as invalid and 0 otherwise. */
839 static bool
840 chkp_valid_bounds (tree bounds)
842 if (bounds == zero_bounds || bounds == none_bounds)
843 return false;
845 return !chkp_invalid_bounds->contains (bounds);
848 /* Helper function for chkp_finish_incomplete_bounds.
849 Check all arguments of phi nodes trying to find
850 valid completed bounds. If there is at least one
851 such arg then bounds produced by phi node are marked
852 as valid completed bounds and all phi args are
853 recomputed. */
854 bool
855 chkp_find_valid_phi_bounds (tree const &bounds, tree *slot, bool *res)
857 gimple phi;
858 unsigned i;
860 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
862 if (chkp_completed_bounds (bounds))
863 return true;
865 phi = SSA_NAME_DEF_STMT (bounds);
867 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
869 for (i = 0; i < gimple_phi_num_args (phi); i++)
871 tree phi_arg = gimple_phi_arg_def (phi, i);
873 gcc_assert (phi_arg);
875 if (chkp_valid_bounds (phi_arg) && !chkp_incomplete_bounds (phi_arg))
877 *res = true;
878 chkp_mark_completed_bounds (bounds);
879 chkp_recompute_phi_bounds (bounds, slot, NULL);
880 return true;
884 return true;
887 /* Helper function for chkp_finish_incomplete_bounds.
888 Marks all incompleted bounds as invalid. */
889 bool
890 chkp_mark_invalid_bounds_walker (tree const &bounds,
891 tree *slot ATTRIBUTE_UNUSED,
892 void *res ATTRIBUTE_UNUSED)
894 if (!chkp_completed_bounds (bounds))
896 chkp_mark_invalid_bounds (bounds);
897 chkp_mark_completed_bounds (bounds);
899 return true;
902 /* When all bound phi nodes have all their args computed
903 we have enough info to find valid bounds. We iterate
904 through all incompleted bounds searching for valid
905 bounds. Found valid bounds are marked as completed
906 and all remaining incompleted bounds are recomputed.
907 Process continues until no new valid bounds may be
908 found. All remained incompleted bounds are marked as
909 invalid (i.e. have no valid source of bounds). */
910 static void
911 chkp_finish_incomplete_bounds (void)
913 bool found_valid;
915 while (found_valid)
917 found_valid = false;
919 chkp_incomplete_bounds_map->
920 traverse<bool *, chkp_find_valid_phi_bounds> (&found_valid);
922 if (found_valid)
923 chkp_incomplete_bounds_map->
924 traverse<void *, chkp_recompute_phi_bounds> (NULL);
927 chkp_incomplete_bounds_map->
928 traverse<void *, chkp_mark_invalid_bounds_walker> (NULL);
929 chkp_incomplete_bounds_map->
930 traverse<void *, chkp_recompute_phi_bounds> (NULL);
932 chkp_erase_completed_bounds ();
933 chkp_erase_incomplete_bounds ();
936 /* Return 1 if type TYPE is a pointer type or a
937 structure having a pointer type as one of its fields.
938 Otherwise return 0. */
939 bool
940 chkp_type_has_pointer (const_tree type)
942 bool res = false;
944 if (BOUNDED_TYPE_P (type))
945 res = true;
946 else if (RECORD_OR_UNION_TYPE_P (type))
948 tree field;
950 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
951 if (TREE_CODE (field) == FIELD_DECL)
952 res = res || chkp_type_has_pointer (TREE_TYPE (field));
954 else if (TREE_CODE (type) == ARRAY_TYPE)
955 res = chkp_type_has_pointer (TREE_TYPE (type));
957 return res;
960 unsigned
961 chkp_type_bounds_count (const_tree type)
963 unsigned res = 0;
965 if (!type)
966 res = 0;
967 else if (BOUNDED_TYPE_P (type))
968 res = 1;
969 else if (RECORD_OR_UNION_TYPE_P (type))
971 bitmap have_bound;
973 bitmap_obstack_initialize (NULL);
974 have_bound = BITMAP_ALLOC (NULL);
975 chkp_find_bound_slots (type, have_bound);
976 res = bitmap_count_bits (have_bound);
977 BITMAP_FREE (have_bound);
978 bitmap_obstack_release (NULL);
981 return res;
984 /* Get bounds associated with NODE via
985 chkp_set_bounds call. */
986 tree
987 chkp_get_bounds (tree node)
989 tree *slot;
991 if (!chkp_bounds_map)
992 return NULL_TREE;
994 slot = chkp_bounds_map->get (node);
995 return slot ? *slot : NULL_TREE;
998 /* Associate bounds VAL with NODE. */
999 void
1000 chkp_set_bounds (tree node, tree val)
1002 if (!chkp_bounds_map)
1003 chkp_bounds_map = new hash_map<tree, tree>;
1005 chkp_bounds_map->put (node, val);
1008 /* Check if statically initialized variable VAR require
1009 static bounds initialization. If VAR is added into
1010 bounds initlization list then 1 is returned. Otherwise
1011 return 0. */
1012 extern bool
1013 chkp_register_var_initializer (tree var)
1015 if (!flag_check_pointer_bounds
1016 || DECL_INITIAL (var) == error_mark_node)
1017 return false;
1019 gcc_assert (TREE_CODE (var) == VAR_DECL);
1020 gcc_assert (DECL_INITIAL (var));
1022 if (TREE_STATIC (var)
1023 && chkp_type_has_pointer (TREE_TYPE (var)))
1025 varpool_node::get_create (var)->need_bounds_init = 1;
1026 return true;
1029 return false;
1032 /* Helper function for chkp_finish_file.
1034 Add new modification statement (RHS is assigned to LHS)
1035 into list of static initializer statementes (passed in ARG).
1036 If statements list becomes too big, emit checker constructor
1037 and start the new one. */
1038 static void
1039 chkp_add_modification_to_stmt_list (tree lhs,
1040 tree rhs,
1041 void *arg)
1043 struct chkp_ctor_stmt_list *stmts = (struct chkp_ctor_stmt_list *)arg;
1044 tree modify;
1046 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
1047 rhs = build1 (CONVERT_EXPR, TREE_TYPE (lhs), rhs);
1049 modify = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
1050 append_to_statement_list (modify, &stmts->stmts);
1052 stmts->avail--;
1055 /* Build and return ADDR_EXPR for specified object OBJ. */
1056 static tree
1057 chkp_build_addr_expr (tree obj)
1059 return TREE_CODE (obj) == TARGET_MEM_REF
1060 ? tree_mem_ref_addr (ptr_type_node, obj)
1061 : build_fold_addr_expr (obj);
1064 /* Helper function for chkp_finish_file.
1065 Initialize bound variable BND_VAR with bounds of variable
1066 VAR to statements list STMTS. If statements list becomes
1067 too big, emit checker constructor and start the new one. */
1068 static void
1069 chkp_output_static_bounds (tree bnd_var, tree var,
1070 struct chkp_ctor_stmt_list *stmts)
1072 tree lb, ub, size;
1074 if (TREE_CODE (var) == STRING_CST)
1076 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1077 size = build_int_cst (size_type_node, TREE_STRING_LENGTH (var) - 1);
1079 else if (DECL_SIZE (var)
1080 && !chkp_variable_size_type (TREE_TYPE (var)))
1082 /* Compute bounds using statically known size. */
1083 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1084 size = size_binop (MINUS_EXPR, DECL_SIZE_UNIT (var), size_one_node);
1086 else
1088 /* Compute bounds using dynamic size. */
1089 tree call;
1091 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1092 call = build1 (ADDR_EXPR,
1093 build_pointer_type (TREE_TYPE (chkp_sizeof_fndecl)),
1094 chkp_sizeof_fndecl);
1095 size = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_sizeof_fndecl)),
1096 call, 1, var);
1098 if (flag_chkp_zero_dynamic_size_as_infinite)
1100 tree max_size, cond;
1102 max_size = build2 (MINUS_EXPR, size_type_node, size_zero_node, lb);
1103 cond = build2 (NE_EXPR, boolean_type_node, size, size_zero_node);
1104 size = build3 (COND_EXPR, size_type_node, cond, size, max_size);
1107 size = size_binop (MINUS_EXPR, size, size_one_node);
1110 ub = size_binop (PLUS_EXPR, lb, size);
1111 stmts->avail -= targetm.chkp_initialize_bounds (bnd_var, lb, ub,
1112 &stmts->stmts);
1113 if (stmts->avail <= 0)
1115 cgraph_build_static_cdtor ('B', stmts->stmts,
1116 MAX_RESERVED_INIT_PRIORITY + 2);
1117 stmts->avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
1118 stmts->stmts = NULL;
1122 /* Return entry block to be used for checker initilization code.
1123 Create new block if required. */
1124 static basic_block
1125 chkp_get_entry_block (void)
1127 if (!entry_block)
1128 entry_block
1129 = split_block_after_labels (ENTRY_BLOCK_PTR_FOR_FN (cfun))->dest;
1131 return entry_block;
1134 /* Return a bounds var to be used for pointer var PTR_VAR. */
1135 static tree
1136 chkp_get_bounds_var (tree ptr_var)
1138 tree bnd_var;
1139 tree *slot;
1141 slot = chkp_bound_vars->get (ptr_var);
1142 if (slot)
1143 bnd_var = *slot;
1144 else
1146 bnd_var = create_tmp_reg (pointer_bounds_type_node,
1147 CHKP_BOUND_TMP_NAME);
1148 chkp_bound_vars->put (ptr_var, bnd_var);
1151 return bnd_var;
1154 /* If BND is an abnormal bounds copy, return a copied value.
1155 Otherwise return BND. */
1156 static tree
1157 chkp_get_orginal_bounds_for_abnormal_copy (tree bnd)
1159 if (bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
1161 gimple bnd_def = SSA_NAME_DEF_STMT (bnd);
1162 gcc_checking_assert (gimple_code (bnd_def) == GIMPLE_ASSIGN);
1163 bnd = gimple_assign_rhs1 (bnd_def);
1166 return bnd;
1169 /* Register bounds BND for object PTR in global bounds table.
1170 A copy of bounds may be created for abnormal ssa names.
1171 Returns bounds to use for PTR. */
1172 static tree
1173 chkp_maybe_copy_and_register_bounds (tree ptr, tree bnd)
1175 bool abnormal_ptr;
1177 if (!chkp_reg_bounds)
1178 return bnd;
1180 /* Do nothing if bounds are incomplete_bounds
1181 because it means bounds will be recomputed. */
1182 if (bnd == incomplete_bounds)
1183 return bnd;
1185 abnormal_ptr = (TREE_CODE (ptr) == SSA_NAME
1186 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1187 && gimple_code (SSA_NAME_DEF_STMT (ptr)) != GIMPLE_PHI);
1189 /* A single bounds value may be reused multiple times for
1190 different pointer values. It may cause coalescing issues
1191 for abnormal SSA names. To avoid it we create a bounds
1192 copy in case it is computed for abnormal SSA name.
1194 We also cannot reuse such created copies for other pointers */
1195 if (abnormal_ptr
1196 || bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
1198 tree bnd_var = NULL_TREE;
1200 if (abnormal_ptr)
1202 if (SSA_NAME_VAR (ptr))
1203 bnd_var = chkp_get_bounds_var (SSA_NAME_VAR (ptr));
1205 else
1206 bnd_var = chkp_get_tmp_var ();
1208 /* For abnormal copies we may just find original
1209 bounds and use them. */
1210 if (!abnormal_ptr && !SSA_NAME_IS_DEFAULT_DEF (bnd))
1211 bnd = chkp_get_orginal_bounds_for_abnormal_copy (bnd);
1212 /* For undefined values we usually use none bounds
1213 value but in case of abnormal edge it may cause
1214 coalescing failures. Use default definition of
1215 bounds variable instead to avoid it. */
1216 else if (SSA_NAME_IS_DEFAULT_DEF (ptr)
1217 && TREE_CODE (SSA_NAME_VAR (ptr)) != PARM_DECL)
1219 bnd = get_or_create_ssa_default_def (cfun, bnd_var);
1221 if (dump_file && (dump_flags & TDF_DETAILS))
1223 fprintf (dump_file, "Using default def bounds ");
1224 print_generic_expr (dump_file, bnd, 0);
1225 fprintf (dump_file, " for abnormal default def SSA name ");
1226 print_generic_expr (dump_file, ptr, 0);
1227 fprintf (dump_file, "\n");
1230 else
1232 tree copy;
1233 gimple def = SSA_NAME_DEF_STMT (ptr);
1234 gimple assign;
1235 gimple_stmt_iterator gsi;
1237 if (bnd_var)
1238 copy = make_ssa_name (bnd_var);
1239 else
1240 copy = make_temp_ssa_name (pointer_bounds_type_node,
1241 NULL,
1242 CHKP_BOUND_TMP_NAME);
1243 bnd = chkp_get_orginal_bounds_for_abnormal_copy (bnd);
1244 assign = gimple_build_assign (copy, bnd);
1246 if (dump_file && (dump_flags & TDF_DETAILS))
1248 fprintf (dump_file, "Creating a copy of bounds ");
1249 print_generic_expr (dump_file, bnd, 0);
1250 fprintf (dump_file, " for abnormal SSA name ");
1251 print_generic_expr (dump_file, ptr, 0);
1252 fprintf (dump_file, "\n");
1255 if (gimple_code (def) == GIMPLE_NOP)
1257 gsi = gsi_last_bb (chkp_get_entry_block ());
1258 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
1259 gsi_insert_before (&gsi, assign, GSI_CONTINUE_LINKING);
1260 else
1261 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1263 else
1265 gimple bnd_def = SSA_NAME_DEF_STMT (bnd);
1266 /* Sometimes (e.g. when we load a pointer from a
1267 memory) bounds are produced later than a pointer.
1268 We need to insert bounds copy appropriately. */
1269 if (gimple_code (bnd_def) != GIMPLE_NOP
1270 && stmt_dominates_stmt_p (def, bnd_def))
1271 gsi = gsi_for_stmt (bnd_def);
1272 else
1273 gsi = gsi_for_stmt (def);
1274 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1277 bnd = copy;
1280 if (abnormal_ptr)
1281 bitmap_set_bit (chkp_abnormal_copies, SSA_NAME_VERSION (bnd));
1284 chkp_reg_bounds->put (ptr, bnd);
1286 if (dump_file && (dump_flags & TDF_DETAILS))
1288 fprintf (dump_file, "Regsitered bound ");
1289 print_generic_expr (dump_file, bnd, 0);
1290 fprintf (dump_file, " for pointer ");
1291 print_generic_expr (dump_file, ptr, 0);
1292 fprintf (dump_file, "\n");
1295 return bnd;
1298 /* Get bounds registered for object PTR in global bounds table. */
1299 static tree
1300 chkp_get_registered_bounds (tree ptr)
1302 tree *slot;
1304 if (!chkp_reg_bounds)
1305 return NULL_TREE;
1307 slot = chkp_reg_bounds->get (ptr);
1308 return slot ? *slot : NULL_TREE;
1311 /* Add bound retvals to return statement pointed by GSI. */
1313 static void
1314 chkp_add_bounds_to_ret_stmt (gimple_stmt_iterator *gsi)
1316 greturn *ret = as_a <greturn *> (gsi_stmt (*gsi));
1317 tree retval = gimple_return_retval (ret);
1318 tree ret_decl = DECL_RESULT (cfun->decl);
1319 tree bounds;
1321 if (!retval)
1322 return;
1324 if (BOUNDED_P (ret_decl))
1326 bounds = chkp_find_bounds (retval, gsi);
1327 bounds = chkp_maybe_copy_and_register_bounds (ret_decl, bounds);
1328 gimple_return_set_retbnd (ret, bounds);
1331 update_stmt (ret);
1334 /* Force OP to be suitable for using as an argument for call.
1335 New statements (if any) go to SEQ. */
1336 static tree
1337 chkp_force_gimple_call_op (tree op, gimple_seq *seq)
1339 gimple_seq stmts;
1340 gimple_stmt_iterator si;
1342 op = force_gimple_operand (unshare_expr (op), &stmts, true, NULL_TREE);
1344 for (si = gsi_start (stmts); !gsi_end_p (si); gsi_next (&si))
1345 chkp_mark_stmt (gsi_stmt (si));
1347 gimple_seq_add_seq (seq, stmts);
1349 return op;
1352 /* Generate lower bound check for memory access by ADDR.
1353 Check is inserted before the position pointed by ITER.
1354 DIRFLAG indicates whether memory access is load or store. */
1355 static void
1356 chkp_check_lower (tree addr, tree bounds,
1357 gimple_stmt_iterator iter,
1358 location_t location,
1359 tree dirflag)
1361 gimple_seq seq;
1362 gimple check;
1363 tree node;
1365 if (!chkp_function_instrumented_p (current_function_decl)
1366 && bounds == chkp_get_zero_bounds ())
1367 return;
1369 if (dirflag == integer_zero_node
1370 && !flag_chkp_check_read)
1371 return;
1373 if (dirflag == integer_one_node
1374 && !flag_chkp_check_write)
1375 return;
1377 seq = NULL;
1379 node = chkp_force_gimple_call_op (addr, &seq);
1381 check = gimple_build_call (chkp_checkl_fndecl, 2, node, bounds);
1382 chkp_mark_stmt (check);
1383 gimple_call_set_with_bounds (check, true);
1384 gimple_set_location (check, location);
1385 gimple_seq_add_stmt (&seq, check);
1387 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1389 if (dump_file && (dump_flags & TDF_DETAILS))
1391 gimple before = gsi_stmt (iter);
1392 fprintf (dump_file, "Generated lower bound check for statement ");
1393 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1394 fprintf (dump_file, " ");
1395 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1399 /* Generate upper bound check for memory access by ADDR.
1400 Check is inserted before the position pointed by ITER.
1401 DIRFLAG indicates whether memory access is load or store. */
1402 static void
1403 chkp_check_upper (tree addr, tree bounds,
1404 gimple_stmt_iterator iter,
1405 location_t location,
1406 tree dirflag)
1408 gimple_seq seq;
1409 gimple check;
1410 tree node;
1412 if (!chkp_function_instrumented_p (current_function_decl)
1413 && bounds == chkp_get_zero_bounds ())
1414 return;
1416 if (dirflag == integer_zero_node
1417 && !flag_chkp_check_read)
1418 return;
1420 if (dirflag == integer_one_node
1421 && !flag_chkp_check_write)
1422 return;
1424 seq = NULL;
1426 node = chkp_force_gimple_call_op (addr, &seq);
1428 check = gimple_build_call (chkp_checku_fndecl, 2, node, bounds);
1429 chkp_mark_stmt (check);
1430 gimple_call_set_with_bounds (check, true);
1431 gimple_set_location (check, location);
1432 gimple_seq_add_stmt (&seq, check);
1434 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1436 if (dump_file && (dump_flags & TDF_DETAILS))
1438 gimple before = gsi_stmt (iter);
1439 fprintf (dump_file, "Generated upper bound check for statement ");
1440 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1441 fprintf (dump_file, " ");
1442 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1446 /* Generate lower and upper bound checks for memory access
1447 to memory slot [FIRST, LAST] againsr BOUNDS. Checks
1448 are inserted before the position pointed by ITER.
1449 DIRFLAG indicates whether memory access is load or store. */
1450 void
1451 chkp_check_mem_access (tree first, tree last, tree bounds,
1452 gimple_stmt_iterator iter,
1453 location_t location,
1454 tree dirflag)
1456 chkp_check_lower (first, bounds, iter, location, dirflag);
1457 chkp_check_upper (last, bounds, iter, location, dirflag);
1460 /* Replace call to _bnd_chk_* pointed by GSI with
1461 bndcu and bndcl calls. DIRFLAG determines whether
1462 check is for read or write. */
1464 void
1465 chkp_replace_address_check_builtin (gimple_stmt_iterator *gsi,
1466 tree dirflag)
1468 gimple_stmt_iterator call_iter = *gsi;
1469 gimple call = gsi_stmt (*gsi);
1470 tree fndecl = gimple_call_fndecl (call);
1471 tree addr = gimple_call_arg (call, 0);
1472 tree bounds = chkp_find_bounds (addr, gsi);
1474 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1475 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1476 chkp_check_lower (addr, bounds, *gsi, gimple_location (call), dirflag);
1478 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS)
1479 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1481 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1483 tree size = gimple_call_arg (call, 1);
1484 addr = fold_build_pointer_plus (addr, size);
1485 addr = fold_build_pointer_plus_hwi (addr, -1);
1486 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1489 gsi_remove (&call_iter, true);
1492 /* Replace call to _bnd_get_ptr_* pointed by GSI with
1493 corresponding bounds extract call. */
1495 void
1496 chkp_replace_extract_builtin (gimple_stmt_iterator *gsi)
1498 gimple call = gsi_stmt (*gsi);
1499 tree fndecl = gimple_call_fndecl (call);
1500 tree addr = gimple_call_arg (call, 0);
1501 tree bounds = chkp_find_bounds (addr, gsi);
1502 gimple extract;
1504 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND)
1505 fndecl = chkp_extract_lower_fndecl;
1506 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND)
1507 fndecl = chkp_extract_upper_fndecl;
1508 else
1509 gcc_unreachable ();
1511 extract = gimple_build_call (fndecl, 1, bounds);
1512 gimple_call_set_lhs (extract, gimple_call_lhs (call));
1513 chkp_mark_stmt (extract);
1515 gsi_replace (gsi, extract, false);
1518 /* Return COMPONENT_REF accessing FIELD in OBJ. */
1519 static tree
1520 chkp_build_component_ref (tree obj, tree field)
1522 tree res;
1524 /* If object is TMR then we do not use component_ref but
1525 add offset instead. We need it to be able to get addr
1526 of the reasult later. */
1527 if (TREE_CODE (obj) == TARGET_MEM_REF)
1529 tree offs = TMR_OFFSET (obj);
1530 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1531 offs, DECL_FIELD_OFFSET (field));
1533 gcc_assert (offs);
1535 res = copy_node (obj);
1536 TREE_TYPE (res) = TREE_TYPE (field);
1537 TMR_OFFSET (res) = offs;
1539 else
1540 res = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL_TREE);
1542 return res;
1545 /* Return ARRAY_REF for array ARR and index IDX with
1546 specified element type ETYPE and element size ESIZE. */
1547 static tree
1548 chkp_build_array_ref (tree arr, tree etype, tree esize,
1549 unsigned HOST_WIDE_INT idx)
1551 tree index = build_int_cst (size_type_node, idx);
1552 tree res;
1554 /* If object is TMR then we do not use array_ref but
1555 add offset instead. We need it to be able to get addr
1556 of the reasult later. */
1557 if (TREE_CODE (arr) == TARGET_MEM_REF)
1559 tree offs = TMR_OFFSET (arr);
1561 esize = fold_binary_to_constant (MULT_EXPR, TREE_TYPE (esize),
1562 esize, index);
1563 gcc_assert(esize);
1565 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1566 offs, esize);
1567 gcc_assert (offs);
1569 res = copy_node (arr);
1570 TREE_TYPE (res) = etype;
1571 TMR_OFFSET (res) = offs;
1573 else
1574 res = build4 (ARRAY_REF, etype, arr, index, NULL_TREE, NULL_TREE);
1576 return res;
1579 /* Helper function for chkp_add_bounds_to_call_stmt.
1580 Fill ALL_BOUNDS output array with created bounds.
1582 OFFS is used for recursive calls and holds basic
1583 offset of TYPE in outer structure in bits.
1585 ITER points a position where bounds are searched.
1587 ALL_BOUNDS[i] is filled with elem bounds if there
1588 is a field in TYPE which has pointer type and offset
1589 equal to i * POINTER_SIZE in bits. */
1590 static void
1591 chkp_find_bounds_for_elem (tree elem, tree *all_bounds,
1592 HOST_WIDE_INT offs,
1593 gimple_stmt_iterator *iter)
1595 tree type = TREE_TYPE (elem);
1597 if (BOUNDED_TYPE_P (type))
1599 if (!all_bounds[offs / POINTER_SIZE])
1601 tree temp = make_temp_ssa_name (type, NULL, "");
1602 gimple assign = gimple_build_assign (temp, elem);
1603 gimple_stmt_iterator gsi;
1605 gsi_insert_before (iter, assign, GSI_SAME_STMT);
1606 gsi = gsi_for_stmt (assign);
1608 all_bounds[offs / POINTER_SIZE] = chkp_find_bounds (temp, &gsi);
1611 else if (RECORD_OR_UNION_TYPE_P (type))
1613 tree field;
1615 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1616 if (TREE_CODE (field) == FIELD_DECL)
1618 tree base = unshare_expr (elem);
1619 tree field_ref = chkp_build_component_ref (base, field);
1620 HOST_WIDE_INT field_offs
1621 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1622 if (DECL_FIELD_OFFSET (field))
1623 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1625 chkp_find_bounds_for_elem (field_ref, all_bounds,
1626 offs + field_offs, iter);
1629 else if (TREE_CODE (type) == ARRAY_TYPE)
1631 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1632 tree etype = TREE_TYPE (type);
1633 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1634 unsigned HOST_WIDE_INT cur;
1636 if (!maxval || integer_minus_onep (maxval))
1637 return;
1639 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1641 tree base = unshare_expr (elem);
1642 tree arr_elem = chkp_build_array_ref (base, etype,
1643 TYPE_SIZE (etype),
1644 cur);
1645 chkp_find_bounds_for_elem (arr_elem, all_bounds, offs + cur * esize,
1646 iter);
1651 /* Fill HAVE_BOUND output bitmap with information about
1652 bounds requred for object of type TYPE.
1654 OFFS is used for recursive calls and holds basic
1655 offset of TYPE in outer structure in bits.
1657 HAVE_BOUND[i] is set to 1 if there is a field
1658 in TYPE which has pointer type and offset
1659 equal to i * POINTER_SIZE - OFFS in bits. */
1660 void
1661 chkp_find_bound_slots_1 (const_tree type, bitmap have_bound,
1662 HOST_WIDE_INT offs)
1664 if (BOUNDED_TYPE_P (type))
1665 bitmap_set_bit (have_bound, offs / POINTER_SIZE);
1666 else if (RECORD_OR_UNION_TYPE_P (type))
1668 tree field;
1670 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1671 if (TREE_CODE (field) == FIELD_DECL)
1673 HOST_WIDE_INT field_offs
1674 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1675 if (DECL_FIELD_OFFSET (field))
1676 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1677 chkp_find_bound_slots_1 (TREE_TYPE (field), have_bound,
1678 offs + field_offs);
1681 else if (TREE_CODE (type) == ARRAY_TYPE)
1683 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1684 tree etype = TREE_TYPE (type);
1685 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1686 unsigned HOST_WIDE_INT cur;
1688 if (!maxval
1689 || TREE_CODE (maxval) != INTEGER_CST
1690 || integer_minus_onep (maxval))
1691 return;
1693 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1694 chkp_find_bound_slots_1 (etype, have_bound, offs + cur * esize);
1698 /* Fill bitmap RES with information about bounds for
1699 type TYPE. See chkp_find_bound_slots_1 for more
1700 details. */
1701 void
1702 chkp_find_bound_slots (const_tree type, bitmap res)
1704 bitmap_clear (res);
1705 chkp_find_bound_slots_1 (type, res, 0);
1708 /* Return 1 if call to FNDECL should be instrumented
1709 and 0 otherwise. */
1711 static bool
1712 chkp_instrument_normal_builtin (tree fndecl)
1714 switch (DECL_FUNCTION_CODE (fndecl))
1716 case BUILT_IN_STRLEN:
1717 case BUILT_IN_STRCPY:
1718 case BUILT_IN_STRNCPY:
1719 case BUILT_IN_STPCPY:
1720 case BUILT_IN_STPNCPY:
1721 case BUILT_IN_STRCAT:
1722 case BUILT_IN_STRNCAT:
1723 case BUILT_IN_MEMCPY:
1724 case BUILT_IN_MEMPCPY:
1725 case BUILT_IN_MEMSET:
1726 case BUILT_IN_MEMMOVE:
1727 case BUILT_IN_BZERO:
1728 case BUILT_IN_STRCMP:
1729 case BUILT_IN_STRNCMP:
1730 case BUILT_IN_BCMP:
1731 case BUILT_IN_MEMCMP:
1732 case BUILT_IN_MEMCPY_CHK:
1733 case BUILT_IN_MEMPCPY_CHK:
1734 case BUILT_IN_MEMMOVE_CHK:
1735 case BUILT_IN_MEMSET_CHK:
1736 case BUILT_IN_STRCPY_CHK:
1737 case BUILT_IN_STRNCPY_CHK:
1738 case BUILT_IN_STPCPY_CHK:
1739 case BUILT_IN_STPNCPY_CHK:
1740 case BUILT_IN_STRCAT_CHK:
1741 case BUILT_IN_STRNCAT_CHK:
1742 case BUILT_IN_MALLOC:
1743 case BUILT_IN_CALLOC:
1744 case BUILT_IN_REALLOC:
1745 return 1;
1747 default:
1748 return 0;
1752 /* Add bound arguments to call statement pointed by GSI.
1753 Also performs a replacement of user checker builtins calls
1754 with internal ones. */
1756 static void
1757 chkp_add_bounds_to_call_stmt (gimple_stmt_iterator *gsi)
1759 gcall *call = as_a <gcall *> (gsi_stmt (*gsi));
1760 unsigned arg_no = 0;
1761 tree fndecl = gimple_call_fndecl (call);
1762 tree fntype;
1763 tree first_formal_arg;
1764 tree arg;
1765 bool use_fntype = false;
1766 tree op;
1767 ssa_op_iter iter;
1768 gcall *new_call;
1770 /* Do nothing for internal functions. */
1771 if (gimple_call_internal_p (call))
1772 return;
1774 fntype = TREE_TYPE (TREE_TYPE (gimple_call_fn (call)));
1776 /* Do nothing if back-end builtin is called. */
1777 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
1778 return;
1780 /* Do nothing for some middle-end builtins. */
1781 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1782 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_OBJECT_SIZE)
1783 return;
1785 /* Do nothing for calls to not instrumentable functions. */
1786 if (fndecl && !chkp_instrumentable_p (fndecl))
1787 return;
1789 /* Ignore CHKP_INIT_PTR_BOUNDS, CHKP_NULL_PTR_BOUNDS
1790 and CHKP_COPY_PTR_BOUNDS. */
1791 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1792 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS
1793 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS
1794 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS
1795 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS))
1796 return;
1798 /* Check user builtins are replaced with checks. */
1799 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1800 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1801 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
1802 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS))
1804 chkp_replace_address_check_builtin (gsi, integer_minus_one_node);
1805 return;
1808 /* Check user builtins are replaced with bound extract. */
1809 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1810 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND
1811 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND))
1813 chkp_replace_extract_builtin (gsi);
1814 return;
1817 /* BUILT_IN_CHKP_NARROW_PTR_BOUNDS call is replaced with
1818 target narrow bounds call. */
1819 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1820 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
1822 tree arg = gimple_call_arg (call, 1);
1823 tree bounds = chkp_find_bounds (arg, gsi);
1825 gimple_call_set_fndecl (call, chkp_narrow_bounds_fndecl);
1826 gimple_call_set_arg (call, 1, bounds);
1827 update_stmt (call);
1829 return;
1832 /* BUILT_IN_CHKP_STORE_PTR_BOUNDS call is replaced with
1833 bndstx call. */
1834 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1835 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_STORE_PTR_BOUNDS)
1837 tree addr = gimple_call_arg (call, 0);
1838 tree ptr = gimple_call_arg (call, 1);
1839 tree bounds = chkp_find_bounds (ptr, gsi);
1840 gimple_stmt_iterator iter = gsi_for_stmt (call);
1842 chkp_build_bndstx (addr, ptr, bounds, gsi);
1843 gsi_remove (&iter, true);
1845 return;
1848 if (!flag_chkp_instrument_calls)
1849 return;
1851 /* We instrument only some subset of builtins. We also instrument
1852 builtin calls to be inlined. */
1853 if (fndecl
1854 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1855 && !chkp_instrument_normal_builtin (fndecl))
1857 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
1858 return;
1860 struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
1861 if (!clone
1862 || !gimple_has_body_p (clone->decl))
1863 return;
1866 /* If function decl is available then use it for
1867 formal arguments list. Otherwise use function type. */
1868 if (fndecl && DECL_ARGUMENTS (fndecl))
1869 first_formal_arg = DECL_ARGUMENTS (fndecl);
1870 else
1872 first_formal_arg = TYPE_ARG_TYPES (fntype);
1873 use_fntype = true;
1876 /* Fill vector of new call args. */
1877 vec<tree> new_args = vNULL;
1878 new_args.create (gimple_call_num_args (call));
1879 arg = first_formal_arg;
1880 for (arg_no = 0; arg_no < gimple_call_num_args (call); arg_no++)
1882 tree call_arg = gimple_call_arg (call, arg_no);
1883 tree type;
1885 /* Get arg type using formal argument description
1886 or actual argument type. */
1887 if (arg)
1888 if (use_fntype)
1889 if (TREE_VALUE (arg) != void_type_node)
1891 type = TREE_VALUE (arg);
1892 arg = TREE_CHAIN (arg);
1894 else
1895 type = TREE_TYPE (call_arg);
1896 else
1898 type = TREE_TYPE (arg);
1899 arg = TREE_CHAIN (arg);
1901 else
1902 type = TREE_TYPE (call_arg);
1904 new_args.safe_push (call_arg);
1906 if (BOUNDED_TYPE_P (type)
1907 || pass_by_reference (NULL, TYPE_MODE (type), type, true))
1908 new_args.safe_push (chkp_find_bounds (call_arg, gsi));
1909 else if (chkp_type_has_pointer (type))
1911 HOST_WIDE_INT max_bounds
1912 = TREE_INT_CST_LOW (TYPE_SIZE (type)) / POINTER_SIZE;
1913 tree *all_bounds = (tree *)xmalloc (sizeof (tree) * max_bounds);
1914 HOST_WIDE_INT bnd_no;
1916 memset (all_bounds, 0, sizeof (tree) * max_bounds);
1918 chkp_find_bounds_for_elem (call_arg, all_bounds, 0, gsi);
1920 for (bnd_no = 0; bnd_no < max_bounds; bnd_no++)
1921 if (all_bounds[bnd_no])
1922 new_args.safe_push (all_bounds[bnd_no]);
1924 free (all_bounds);
1928 if (new_args.length () == gimple_call_num_args (call))
1929 new_call = call;
1930 else
1932 new_call = gimple_build_call_vec (gimple_op (call, 1), new_args);
1933 gimple_call_set_lhs (new_call, gimple_call_lhs (call));
1934 gimple_call_copy_flags (new_call, call);
1935 gimple_call_set_chain (new_call, gimple_call_chain (call));
1937 new_args.release ();
1939 /* For direct calls fndecl is replaced with instrumented version. */
1940 if (fndecl)
1942 tree new_decl = chkp_maybe_create_clone (fndecl)->decl;
1943 gimple_call_set_fndecl (new_call, new_decl);
1944 gimple_call_set_fntype (new_call, TREE_TYPE (new_decl));
1946 /* For indirect call we should fix function pointer type if
1947 pass some bounds. */
1948 else if (new_call != call)
1950 tree type = gimple_call_fntype (call);
1951 type = chkp_copy_function_type_adding_bounds (type);
1952 gimple_call_set_fntype (new_call, type);
1955 /* replace old call statement with the new one. */
1956 if (call != new_call)
1958 FOR_EACH_SSA_TREE_OPERAND (op, call, iter, SSA_OP_ALL_DEFS)
1960 SSA_NAME_DEF_STMT (op) = new_call;
1962 gsi_replace (gsi, new_call, true);
1964 else
1965 update_stmt (new_call);
1967 gimple_call_set_with_bounds (new_call, true);
1970 /* Return constant static bounds var with specified bounds LB and UB.
1971 If such var does not exists then new var is created with specified NAME. */
1972 static tree
1973 chkp_make_static_const_bounds (HOST_WIDE_INT lb,
1974 HOST_WIDE_INT ub,
1975 const char *name)
1977 tree id = get_identifier (name);
1978 tree var;
1979 varpool_node *node;
1980 symtab_node *snode;
1982 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, id,
1983 pointer_bounds_type_node);
1984 TREE_STATIC (var) = 1;
1985 TREE_PUBLIC (var) = 1;
1987 /* With LTO we may have constant bounds already in varpool.
1988 Try to find it. */
1989 if ((snode = symtab_node::get_for_asmname (DECL_ASSEMBLER_NAME (var))))
1991 /* We don't allow this symbol usage for non bounds. */
1992 if (snode->type != SYMTAB_VARIABLE
1993 || !POINTER_BOUNDS_P (snode->decl))
1994 sorry ("-fcheck-pointer-bounds requires '%s' "
1995 "name for internal usage",
1996 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (var)));
1998 return snode->decl;
2001 TREE_USED (var) = 1;
2002 TREE_READONLY (var) = 1;
2003 TREE_ADDRESSABLE (var) = 0;
2004 DECL_ARTIFICIAL (var) = 1;
2005 DECL_READ_P (var) = 1;
2006 DECL_INITIAL (var) = targetm.chkp_make_bounds_constant (lb, ub);
2007 make_decl_one_only (var, DECL_ASSEMBLER_NAME (var));
2008 /* We may use this symbol during ctors generation in chkp_finish_file
2009 when all symbols are emitted. Force output to avoid undefined
2010 symbols in ctors. */
2011 node = varpool_node::get_create (var);
2012 node->force_output = 1;
2014 varpool_node::finalize_decl (var);
2016 return var;
2019 /* Generate code to make bounds with specified lower bound LB and SIZE.
2020 if AFTER is 1 then code is inserted after position pointed by ITER
2021 otherwise code is inserted before position pointed by ITER.
2022 If ITER is NULL then code is added to entry block. */
2023 static tree
2024 chkp_make_bounds (tree lb, tree size, gimple_stmt_iterator *iter, bool after)
2026 gimple_seq seq;
2027 gimple_stmt_iterator gsi;
2028 gimple stmt;
2029 tree bounds;
2031 if (iter)
2032 gsi = *iter;
2033 else
2034 gsi = gsi_start_bb (chkp_get_entry_block ());
2036 seq = NULL;
2038 lb = chkp_force_gimple_call_op (lb, &seq);
2039 size = chkp_force_gimple_call_op (size, &seq);
2041 stmt = gimple_build_call (chkp_bndmk_fndecl, 2, lb, size);
2042 chkp_mark_stmt (stmt);
2044 bounds = chkp_get_tmp_reg (stmt);
2045 gimple_call_set_lhs (stmt, bounds);
2047 gimple_seq_add_stmt (&seq, stmt);
2049 if (iter && after)
2050 gsi_insert_seq_after (&gsi, seq, GSI_SAME_STMT);
2051 else
2052 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
2054 if (dump_file && (dump_flags & TDF_DETAILS))
2056 fprintf (dump_file, "Made bounds: ");
2057 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2058 if (iter)
2060 fprintf (dump_file, " inserted before statement: ");
2061 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0, TDF_VOPS|TDF_MEMSYMS);
2063 else
2064 fprintf (dump_file, " at function entry\n");
2067 /* update_stmt (stmt); */
2069 return bounds;
2072 /* Return var holding zero bounds. */
2073 tree
2074 chkp_get_zero_bounds_var (void)
2076 if (!chkp_zero_bounds_var)
2077 chkp_zero_bounds_var
2078 = chkp_make_static_const_bounds (0, -1,
2079 CHKP_ZERO_BOUNDS_VAR_NAME);
2080 return chkp_zero_bounds_var;
2083 /* Return var holding none bounds. */
2084 tree
2085 chkp_get_none_bounds_var (void)
2087 if (!chkp_none_bounds_var)
2088 chkp_none_bounds_var
2089 = chkp_make_static_const_bounds (-1, 0,
2090 CHKP_NONE_BOUNDS_VAR_NAME);
2091 return chkp_none_bounds_var;
2094 /* Return SSA_NAME used to represent zero bounds. */
2095 static tree
2096 chkp_get_zero_bounds (void)
2098 if (zero_bounds)
2099 return zero_bounds;
2101 if (dump_file && (dump_flags & TDF_DETAILS))
2102 fprintf (dump_file, "Creating zero bounds...");
2104 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2105 || flag_chkp_use_static_const_bounds > 0)
2107 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2108 gimple stmt;
2110 zero_bounds = chkp_get_tmp_reg (NULL);
2111 stmt = gimple_build_assign (zero_bounds, chkp_get_zero_bounds_var ());
2112 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2114 else
2115 zero_bounds = chkp_make_bounds (integer_zero_node,
2116 integer_zero_node,
2117 NULL,
2118 false);
2120 return zero_bounds;
2123 /* Return SSA_NAME used to represent none bounds. */
2124 static tree
2125 chkp_get_none_bounds (void)
2127 if (none_bounds)
2128 return none_bounds;
2130 if (dump_file && (dump_flags & TDF_DETAILS))
2131 fprintf (dump_file, "Creating none bounds...");
2134 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2135 || flag_chkp_use_static_const_bounds > 0)
2137 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2138 gimple stmt;
2140 none_bounds = chkp_get_tmp_reg (NULL);
2141 stmt = gimple_build_assign (none_bounds, chkp_get_none_bounds_var ());
2142 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2144 else
2145 none_bounds = chkp_make_bounds (integer_minus_one_node,
2146 build_int_cst (size_type_node, 2),
2147 NULL,
2148 false);
2150 return none_bounds;
2153 /* Return bounds to be used as a result of operation which
2154 should not create poiunter (e.g. MULT_EXPR). */
2155 static tree
2156 chkp_get_invalid_op_bounds (void)
2158 return chkp_get_zero_bounds ();
2161 /* Return bounds to be used for loads of non-pointer values. */
2162 static tree
2163 chkp_get_nonpointer_load_bounds (void)
2165 return chkp_get_zero_bounds ();
2168 /* Return 1 if may use bndret call to get bounds for pointer
2169 returned by CALL. */
2170 static bool
2171 chkp_call_returns_bounds_p (gcall *call)
2173 if (gimple_call_internal_p (call))
2174 return false;
2176 if (gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
2177 || chkp_gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW))
2178 return true;
2180 if (gimple_call_with_bounds_p (call))
2181 return true;
2183 tree fndecl = gimple_call_fndecl (call);
2185 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
2186 return false;
2188 if (fndecl && !chkp_instrumentable_p (fndecl))
2189 return false;
2191 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2193 if (chkp_instrument_normal_builtin (fndecl))
2194 return true;
2196 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
2197 return false;
2199 struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
2200 return (clone && gimple_has_body_p (clone->decl));
2203 return true;
2206 /* Build bounds returned by CALL. */
2207 static tree
2208 chkp_build_returned_bound (gcall *call)
2210 gimple_stmt_iterator gsi;
2211 tree bounds;
2212 gimple stmt;
2213 tree fndecl = gimple_call_fndecl (call);
2214 unsigned int retflags;
2216 /* To avoid fixing alloca expands in targets we handle
2217 it separately. */
2218 if (fndecl
2219 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2220 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
2221 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2223 tree size = gimple_call_arg (call, 0);
2224 tree lb = gimple_call_lhs (call);
2225 gimple_stmt_iterator iter = gsi_for_stmt (call);
2226 bounds = chkp_make_bounds (lb, size, &iter, true);
2228 /* We know bounds returned by set_bounds builtin call. */
2229 else if (fndecl
2230 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2231 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS)
2233 tree lb = gimple_call_arg (call, 0);
2234 tree size = gimple_call_arg (call, 1);
2235 gimple_stmt_iterator iter = gsi_for_stmt (call);
2236 bounds = chkp_make_bounds (lb, size, &iter, true);
2238 /* Detect bounds initialization calls. */
2239 else if (fndecl
2240 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2241 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS)
2242 bounds = chkp_get_zero_bounds ();
2243 /* Detect bounds nullification calls. */
2244 else if (fndecl
2245 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2246 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS)
2247 bounds = chkp_get_none_bounds ();
2248 /* Detect bounds copy calls. */
2249 else if (fndecl
2250 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2251 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
2253 gimple_stmt_iterator iter = gsi_for_stmt (call);
2254 bounds = chkp_find_bounds (gimple_call_arg (call, 1), &iter);
2256 /* Do not use retbnd when returned bounds are equal to some
2257 of passed bounds. */
2258 else if (((retflags = gimple_call_return_flags (call)) & ERF_RETURNS_ARG)
2259 && (retflags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (call))
2261 gimple_stmt_iterator iter = gsi_for_stmt (call);
2262 unsigned int retarg = retflags & ERF_RETURN_ARG_MASK, argno;
2263 if (gimple_call_with_bounds_p (call))
2265 for (argno = 0; argno < gimple_call_num_args (call); argno++)
2266 if (!POINTER_BOUNDS_P (gimple_call_arg (call, argno)))
2268 if (retarg)
2269 retarg--;
2270 else
2271 break;
2274 else
2275 argno = retarg;
2277 bounds = chkp_find_bounds (gimple_call_arg (call, argno), &iter);
2279 else if (chkp_call_returns_bounds_p (call))
2281 gcc_assert (TREE_CODE (gimple_call_lhs (call)) == SSA_NAME);
2283 /* In general case build checker builtin call to
2284 obtain returned bounds. */
2285 stmt = gimple_build_call (chkp_ret_bnd_fndecl, 1,
2286 gimple_call_lhs (call));
2287 chkp_mark_stmt (stmt);
2289 gsi = gsi_for_stmt (call);
2290 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
2292 bounds = chkp_get_tmp_reg (stmt);
2293 gimple_call_set_lhs (stmt, bounds);
2295 update_stmt (stmt);
2297 else
2298 bounds = chkp_get_zero_bounds ();
2300 if (dump_file && (dump_flags & TDF_DETAILS))
2302 fprintf (dump_file, "Built returned bounds (");
2303 print_generic_expr (dump_file, bounds, 0);
2304 fprintf (dump_file, ") for call: ");
2305 print_gimple_stmt (dump_file, call, 0, TDF_VOPS|TDF_MEMSYMS);
2308 bounds = chkp_maybe_copy_and_register_bounds (gimple_call_lhs (call), bounds);
2310 return bounds;
2313 /* Return bounds used as returned by call
2314 which produced SSA name VAL. */
2315 gcall *
2316 chkp_retbnd_call_by_val (tree val)
2318 if (TREE_CODE (val) != SSA_NAME)
2319 return NULL;
2321 gcc_assert (gimple_code (SSA_NAME_DEF_STMT (val)) == GIMPLE_CALL);
2323 imm_use_iterator use_iter;
2324 use_operand_p use_p;
2325 FOR_EACH_IMM_USE_FAST (use_p, use_iter, val)
2326 if (gimple_code (USE_STMT (use_p)) == GIMPLE_CALL
2327 && gimple_call_fndecl (USE_STMT (use_p)) == chkp_ret_bnd_fndecl)
2328 return as_a <gcall *> (USE_STMT (use_p));
2330 return NULL;
2333 /* Check the next parameter for the given PARM is bounds
2334 and return it's default SSA_NAME (create if required). */
2335 static tree
2336 chkp_get_next_bounds_parm (tree parm)
2338 tree bounds = TREE_CHAIN (parm);
2339 gcc_assert (POINTER_BOUNDS_P (bounds));
2340 bounds = ssa_default_def (cfun, bounds);
2341 if (!bounds)
2343 bounds = make_ssa_name (TREE_CHAIN (parm), gimple_build_nop ());
2344 set_ssa_default_def (cfun, TREE_CHAIN (parm), bounds);
2346 return bounds;
2349 /* Return bounds to be used for input argument PARM. */
2350 static tree
2351 chkp_get_bound_for_parm (tree parm)
2353 tree decl = SSA_NAME_VAR (parm);
2354 tree bounds;
2356 gcc_assert (TREE_CODE (decl) == PARM_DECL);
2358 bounds = chkp_get_registered_bounds (parm);
2360 if (!bounds)
2361 bounds = chkp_get_registered_bounds (decl);
2363 if (!bounds)
2365 tree orig_decl = cgraph_node::get (cfun->decl)->orig_decl;
2367 /* For static chain param we return zero bounds
2368 because currently we do not check dereferences
2369 of this pointer. */
2370 if (cfun->static_chain_decl == decl)
2371 bounds = chkp_get_zero_bounds ();
2372 /* If non instrumented runtime is used then it may be useful
2373 to use zero bounds for input arguments of main
2374 function. */
2375 else if (flag_chkp_zero_input_bounds_for_main
2376 && strcmp (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (orig_decl)),
2377 "main") == 0)
2378 bounds = chkp_get_zero_bounds ();
2379 else if (BOUNDED_P (parm))
2381 bounds = chkp_get_next_bounds_parm (decl);
2382 bounds = chkp_maybe_copy_and_register_bounds (decl, bounds);
2384 if (dump_file && (dump_flags & TDF_DETAILS))
2386 fprintf (dump_file, "Built arg bounds (");
2387 print_generic_expr (dump_file, bounds, 0);
2388 fprintf (dump_file, ") for arg: ");
2389 print_node (dump_file, "", decl, 0);
2392 else
2393 bounds = chkp_get_zero_bounds ();
2396 if (!chkp_get_registered_bounds (parm))
2397 bounds = chkp_maybe_copy_and_register_bounds (parm, bounds);
2399 if (dump_file && (dump_flags & TDF_DETAILS))
2401 fprintf (dump_file, "Using bounds ");
2402 print_generic_expr (dump_file, bounds, 0);
2403 fprintf (dump_file, " for parm ");
2404 print_generic_expr (dump_file, parm, 0);
2405 fprintf (dump_file, " of type ");
2406 print_generic_expr (dump_file, TREE_TYPE (parm), 0);
2407 fprintf (dump_file, ".\n");
2410 return bounds;
2413 /* Build and return CALL_EXPR for bndstx builtin with specified
2414 arguments. */
2415 tree
2416 chkp_build_bndldx_call (tree addr, tree ptr)
2418 tree fn = build1 (ADDR_EXPR,
2419 build_pointer_type (TREE_TYPE (chkp_bndldx_fndecl)),
2420 chkp_bndldx_fndecl);
2421 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndldx_fndecl)),
2422 fn, 2, addr, ptr);
2423 CALL_WITH_BOUNDS_P (call) = true;
2424 return call;
2427 /* Insert code to load bounds for PTR located by ADDR.
2428 Code is inserted after position pointed by GSI.
2429 Loaded bounds are returned. */
2430 static tree
2431 chkp_build_bndldx (tree addr, tree ptr, gimple_stmt_iterator *gsi)
2433 gimple_seq seq;
2434 gimple stmt;
2435 tree bounds;
2437 seq = NULL;
2439 addr = chkp_force_gimple_call_op (addr, &seq);
2440 ptr = chkp_force_gimple_call_op (ptr, &seq);
2442 stmt = gimple_build_call (chkp_bndldx_fndecl, 2, addr, ptr);
2443 chkp_mark_stmt (stmt);
2444 bounds = chkp_get_tmp_reg (stmt);
2445 gimple_call_set_lhs (stmt, bounds);
2447 gimple_seq_add_stmt (&seq, stmt);
2449 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2451 if (dump_file && (dump_flags & TDF_DETAILS))
2453 fprintf (dump_file, "Generated bndldx for pointer ");
2454 print_generic_expr (dump_file, ptr, 0);
2455 fprintf (dump_file, ": ");
2456 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2459 return bounds;
2462 /* Build and return CALL_EXPR for bndstx builtin with specified
2463 arguments. */
2464 tree
2465 chkp_build_bndstx_call (tree addr, tree ptr, tree bounds)
2467 tree fn = build1 (ADDR_EXPR,
2468 build_pointer_type (TREE_TYPE (chkp_bndstx_fndecl)),
2469 chkp_bndstx_fndecl);
2470 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndstx_fndecl)),
2471 fn, 3, ptr, bounds, addr);
2472 CALL_WITH_BOUNDS_P (call) = true;
2473 return call;
2476 /* Insert code to store BOUNDS for PTR stored by ADDR.
2477 New statements are inserted after position pointed
2478 by GSI. */
2479 void
2480 chkp_build_bndstx (tree addr, tree ptr, tree bounds,
2481 gimple_stmt_iterator *gsi)
2483 gimple_seq seq;
2484 gimple stmt;
2486 seq = NULL;
2488 addr = chkp_force_gimple_call_op (addr, &seq);
2489 ptr = chkp_force_gimple_call_op (ptr, &seq);
2491 stmt = gimple_build_call (chkp_bndstx_fndecl, 3, ptr, bounds, addr);
2492 chkp_mark_stmt (stmt);
2493 gimple_call_set_with_bounds (stmt, true);
2495 gimple_seq_add_stmt (&seq, stmt);
2497 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2499 if (dump_file && (dump_flags & TDF_DETAILS))
2501 fprintf (dump_file, "Generated bndstx for pointer store ");
2502 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_VOPS|TDF_MEMSYMS);
2503 print_gimple_stmt (dump_file, stmt, 2, TDF_VOPS|TDF_MEMSYMS);
2507 /* Compute bounds for pointer NODE which was assigned in
2508 assignment statement ASSIGN. Return computed bounds. */
2509 static tree
2510 chkp_compute_bounds_for_assignment (tree node, gimple assign)
2512 enum tree_code rhs_code = gimple_assign_rhs_code (assign);
2513 tree rhs1 = gimple_assign_rhs1 (assign);
2514 tree bounds = NULL_TREE;
2515 gimple_stmt_iterator iter = gsi_for_stmt (assign);
2517 if (dump_file && (dump_flags & TDF_DETAILS))
2519 fprintf (dump_file, "Computing bounds for assignment: ");
2520 print_gimple_stmt (dump_file, assign, 0, TDF_VOPS|TDF_MEMSYMS);
2523 switch (rhs_code)
2525 case MEM_REF:
2526 case TARGET_MEM_REF:
2527 case COMPONENT_REF:
2528 case ARRAY_REF:
2529 /* We need to load bounds from the bounds table. */
2530 bounds = chkp_find_bounds_loaded (node, rhs1, &iter);
2531 break;
2533 case VAR_DECL:
2534 case SSA_NAME:
2535 case ADDR_EXPR:
2536 case POINTER_PLUS_EXPR:
2537 case NOP_EXPR:
2538 case CONVERT_EXPR:
2539 case INTEGER_CST:
2540 /* Bounds are just propagated from RHS. */
2541 bounds = chkp_find_bounds (rhs1, &iter);
2542 break;
2544 case VIEW_CONVERT_EXPR:
2545 /* Bounds are just propagated from RHS. */
2546 bounds = chkp_find_bounds (TREE_OPERAND (rhs1, 0), &iter);
2547 break;
2549 case PARM_DECL:
2550 if (BOUNDED_P (rhs1))
2552 /* We need to load bounds from the bounds table. */
2553 bounds = chkp_build_bndldx (chkp_build_addr_expr (rhs1),
2554 node, &iter);
2555 TREE_ADDRESSABLE (rhs1) = 1;
2557 else
2558 bounds = chkp_get_nonpointer_load_bounds ();
2559 break;
2561 case MINUS_EXPR:
2562 case PLUS_EXPR:
2563 case BIT_AND_EXPR:
2564 case BIT_IOR_EXPR:
2565 case BIT_XOR_EXPR:
2567 tree rhs2 = gimple_assign_rhs2 (assign);
2568 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2569 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2571 /* First we try to check types of operands. If it
2572 does not help then look at bound values.
2574 If some bounds are incomplete and other are
2575 not proven to be valid (i.e. also incomplete
2576 or invalid because value is not pointer) then
2577 resulting value is incomplete and will be
2578 recomputed later in chkp_finish_incomplete_bounds. */
2579 if (BOUNDED_P (rhs1)
2580 && !BOUNDED_P (rhs2))
2581 bounds = bnd1;
2582 else if (BOUNDED_P (rhs2)
2583 && !BOUNDED_P (rhs1)
2584 && rhs_code != MINUS_EXPR)
2585 bounds = bnd2;
2586 else if (chkp_incomplete_bounds (bnd1))
2587 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR
2588 && !chkp_incomplete_bounds (bnd2))
2589 bounds = bnd2;
2590 else
2591 bounds = incomplete_bounds;
2592 else if (chkp_incomplete_bounds (bnd2))
2593 if (chkp_valid_bounds (bnd1)
2594 && !chkp_incomplete_bounds (bnd1))
2595 bounds = bnd1;
2596 else
2597 bounds = incomplete_bounds;
2598 else if (!chkp_valid_bounds (bnd1))
2599 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR)
2600 bounds = bnd2;
2601 else if (bnd2 == chkp_get_zero_bounds ())
2602 bounds = bnd2;
2603 else
2604 bounds = bnd1;
2605 else if (!chkp_valid_bounds (bnd2))
2606 bounds = bnd1;
2607 else
2608 /* Seems both operands may have valid bounds
2609 (e.g. pointer minus pointer). In such case
2610 use default invalid op bounds. */
2611 bounds = chkp_get_invalid_op_bounds ();
2613 break;
2615 case BIT_NOT_EXPR:
2616 case NEGATE_EXPR:
2617 case LSHIFT_EXPR:
2618 case RSHIFT_EXPR:
2619 case LROTATE_EXPR:
2620 case RROTATE_EXPR:
2621 case EQ_EXPR:
2622 case NE_EXPR:
2623 case LT_EXPR:
2624 case LE_EXPR:
2625 case GT_EXPR:
2626 case GE_EXPR:
2627 case MULT_EXPR:
2628 case RDIV_EXPR:
2629 case TRUNC_DIV_EXPR:
2630 case FLOOR_DIV_EXPR:
2631 case CEIL_DIV_EXPR:
2632 case ROUND_DIV_EXPR:
2633 case TRUNC_MOD_EXPR:
2634 case FLOOR_MOD_EXPR:
2635 case CEIL_MOD_EXPR:
2636 case ROUND_MOD_EXPR:
2637 case EXACT_DIV_EXPR:
2638 case FIX_TRUNC_EXPR:
2639 case FLOAT_EXPR:
2640 case REALPART_EXPR:
2641 case IMAGPART_EXPR:
2642 /* No valid bounds may be produced by these exprs. */
2643 bounds = chkp_get_invalid_op_bounds ();
2644 break;
2646 case COND_EXPR:
2648 tree val1 = gimple_assign_rhs2 (assign);
2649 tree val2 = gimple_assign_rhs3 (assign);
2650 tree bnd1 = chkp_find_bounds (val1, &iter);
2651 tree bnd2 = chkp_find_bounds (val2, &iter);
2652 gimple stmt;
2654 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2655 bounds = incomplete_bounds;
2656 else if (bnd1 == bnd2)
2657 bounds = bnd1;
2658 else
2660 rhs1 = unshare_expr (rhs1);
2662 bounds = chkp_get_tmp_reg (assign);
2663 stmt = gimple_build_assign (bounds, COND_EXPR, rhs1, bnd1, bnd2);
2664 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2666 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2667 chkp_mark_invalid_bounds (bounds);
2670 break;
2672 case MAX_EXPR:
2673 case MIN_EXPR:
2675 tree rhs2 = gimple_assign_rhs2 (assign);
2676 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2677 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2679 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2680 bounds = incomplete_bounds;
2681 else if (bnd1 == bnd2)
2682 bounds = bnd1;
2683 else
2685 gimple stmt;
2686 tree cond = build2 (rhs_code == MAX_EXPR ? GT_EXPR : LT_EXPR,
2687 boolean_type_node, rhs1, rhs2);
2688 bounds = chkp_get_tmp_reg (assign);
2689 stmt = gimple_build_assign (bounds, COND_EXPR, cond, bnd1, bnd2);
2691 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2693 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2694 chkp_mark_invalid_bounds (bounds);
2697 break;
2699 default:
2700 bounds = chkp_get_zero_bounds ();
2701 warning (0, "pointer bounds were lost due to unexpected expression %s",
2702 get_tree_code_name (rhs_code));
2705 gcc_assert (bounds);
2707 if (node)
2708 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2710 return bounds;
2713 /* Compute bounds for ssa name NODE defined by DEF_STMT pointed by ITER.
2715 There are just few statement codes allowed: NOP (for default ssa names),
2716 ASSIGN, CALL, PHI, ASM.
2718 Return computed bounds. */
2719 static tree
2720 chkp_get_bounds_by_definition (tree node, gimple def_stmt,
2721 gphi_iterator *iter)
2723 tree var, bounds;
2724 enum gimple_code code = gimple_code (def_stmt);
2725 gphi *stmt;
2727 if (dump_file && (dump_flags & TDF_DETAILS))
2729 fprintf (dump_file, "Searching for bounds for node: ");
2730 print_generic_expr (dump_file, node, 0);
2732 fprintf (dump_file, " using its definition: ");
2733 print_gimple_stmt (dump_file, def_stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2736 switch (code)
2738 case GIMPLE_NOP:
2739 var = SSA_NAME_VAR (node);
2740 switch (TREE_CODE (var))
2742 case PARM_DECL:
2743 bounds = chkp_get_bound_for_parm (node);
2744 break;
2746 case VAR_DECL:
2747 /* For uninitialized pointers use none bounds. */
2748 bounds = chkp_get_none_bounds ();
2749 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2750 break;
2752 case RESULT_DECL:
2754 tree base_type;
2756 gcc_assert (TREE_CODE (TREE_TYPE (node)) == REFERENCE_TYPE);
2758 base_type = TREE_TYPE (TREE_TYPE (node));
2760 gcc_assert (TYPE_SIZE (base_type)
2761 && TREE_CODE (TYPE_SIZE (base_type)) == INTEGER_CST
2762 && tree_to_uhwi (TYPE_SIZE (base_type)) != 0);
2764 bounds = chkp_make_bounds (node, TYPE_SIZE_UNIT (base_type),
2765 NULL, false);
2766 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2768 break;
2770 default:
2771 if (dump_file && (dump_flags & TDF_DETAILS))
2773 fprintf (dump_file, "Unexpected var with no definition\n");
2774 print_generic_expr (dump_file, var, 0);
2776 internal_error ("chkp_get_bounds_by_definition: Unexpected var of type %s",
2777 get_tree_code_name (TREE_CODE (var)));
2779 break;
2781 case GIMPLE_ASSIGN:
2782 bounds = chkp_compute_bounds_for_assignment (node, def_stmt);
2783 break;
2785 case GIMPLE_CALL:
2786 bounds = chkp_build_returned_bound (as_a <gcall *> (def_stmt));
2787 break;
2789 case GIMPLE_PHI:
2790 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (node))
2791 if (SSA_NAME_VAR (node))
2792 var = chkp_get_bounds_var (SSA_NAME_VAR (node));
2793 else
2794 var = make_temp_ssa_name (pointer_bounds_type_node,
2795 NULL,
2796 CHKP_BOUND_TMP_NAME);
2797 else
2798 var = chkp_get_tmp_var ();
2799 stmt = create_phi_node (var, gimple_bb (def_stmt));
2800 bounds = gimple_phi_result (stmt);
2801 *iter = gsi_for_phi (stmt);
2803 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2805 /* Created bounds do not have all phi args computed and
2806 therefore we do not know if there is a valid source
2807 of bounds for that node. Therefore we mark bounds
2808 as incomplete and then recompute them when all phi
2809 args are computed. */
2810 chkp_register_incomplete_bounds (bounds, node);
2811 break;
2813 case GIMPLE_ASM:
2814 bounds = chkp_get_zero_bounds ();
2815 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2816 break;
2818 default:
2819 internal_error ("chkp_get_bounds_by_definition: Unexpected GIMPLE code %s",
2820 gimple_code_name[code]);
2823 return bounds;
2826 /* Return CALL_EXPR for bndmk with specified LOWER_BOUND and SIZE. */
2827 tree
2828 chkp_build_make_bounds_call (tree lower_bound, tree size)
2830 tree call = build1 (ADDR_EXPR,
2831 build_pointer_type (TREE_TYPE (chkp_bndmk_fndecl)),
2832 chkp_bndmk_fndecl);
2833 return build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndmk_fndecl)),
2834 call, 2, lower_bound, size);
2837 /* Create static bounds var of specfified OBJ which is
2838 is either VAR_DECL or string constant. */
2839 static tree
2840 chkp_make_static_bounds (tree obj)
2842 static int string_id = 1;
2843 static int var_id = 1;
2844 tree *slot;
2845 const char *var_name;
2846 char *bnd_var_name;
2847 tree bnd_var;
2849 /* First check if we already have required var. */
2850 if (chkp_static_var_bounds)
2852 /* For vars we use assembler name as a key in
2853 chkp_static_var_bounds map. It allows to
2854 avoid duplicating bound vars for decls
2855 sharing assembler name. */
2856 if (TREE_CODE (obj) == VAR_DECL)
2858 tree name = DECL_ASSEMBLER_NAME (obj);
2859 slot = chkp_static_var_bounds->get (name);
2860 if (slot)
2861 return *slot;
2863 else
2865 slot = chkp_static_var_bounds->get (obj);
2866 if (slot)
2867 return *slot;
2871 /* Build decl for bounds var. */
2872 if (TREE_CODE (obj) == VAR_DECL)
2874 if (DECL_IGNORED_P (obj))
2876 bnd_var_name = (char *) xmalloc (strlen (CHKP_VAR_BOUNDS_PREFIX) + 10);
2877 sprintf (bnd_var_name, "%s%d", CHKP_VAR_BOUNDS_PREFIX, var_id++);
2879 else
2881 var_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (obj));
2883 /* For hidden symbols we want to skip first '*' char. */
2884 if (*var_name == '*')
2885 var_name++;
2887 bnd_var_name = (char *) xmalloc (strlen (var_name)
2888 + strlen (CHKP_BOUNDS_OF_SYMBOL_PREFIX) + 1);
2889 strcpy (bnd_var_name, CHKP_BOUNDS_OF_SYMBOL_PREFIX);
2890 strcat (bnd_var_name, var_name);
2893 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2894 get_identifier (bnd_var_name),
2895 pointer_bounds_type_node);
2897 /* Address of the obj will be used as lower bound. */
2898 TREE_ADDRESSABLE (obj) = 1;
2900 else
2902 bnd_var_name = (char *) xmalloc (strlen (CHKP_STRING_BOUNDS_PREFIX) + 10);
2903 sprintf (bnd_var_name, "%s%d", CHKP_STRING_BOUNDS_PREFIX, string_id++);
2905 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2906 get_identifier (bnd_var_name),
2907 pointer_bounds_type_node);
2910 TREE_PUBLIC (bnd_var) = 0;
2911 TREE_USED (bnd_var) = 1;
2912 TREE_READONLY (bnd_var) = 0;
2913 TREE_STATIC (bnd_var) = 1;
2914 TREE_ADDRESSABLE (bnd_var) = 0;
2915 DECL_ARTIFICIAL (bnd_var) = 1;
2916 DECL_COMMON (bnd_var) = 1;
2917 DECL_COMDAT (bnd_var) = 1;
2918 DECL_READ_P (bnd_var) = 1;
2919 DECL_INITIAL (bnd_var) = chkp_build_addr_expr (obj);
2920 /* Force output similar to constant bounds.
2921 See chkp_make_static_const_bounds. */
2922 varpool_node::get_create (bnd_var)->force_output = 1;
2923 /* Mark symbol as requiring bounds initialization. */
2924 varpool_node::get_create (bnd_var)->need_bounds_init = 1;
2925 varpool_node::finalize_decl (bnd_var);
2927 /* Add created var to the map to use it for other references
2928 to obj. */
2929 if (!chkp_static_var_bounds)
2930 chkp_static_var_bounds = new hash_map<tree, tree>;
2932 if (TREE_CODE (obj) == VAR_DECL)
2934 tree name = DECL_ASSEMBLER_NAME (obj);
2935 chkp_static_var_bounds->put (name, bnd_var);
2937 else
2938 chkp_static_var_bounds->put (obj, bnd_var);
2940 return bnd_var;
2943 /* When var has incomplete type we cannot get size to
2944 compute its bounds. In such cases we use checker
2945 builtin call which determines object size at runtime. */
2946 static tree
2947 chkp_generate_extern_var_bounds (tree var)
2949 tree bounds, size_reloc, lb, size, max_size, cond;
2950 gimple_stmt_iterator gsi;
2951 gimple_seq seq = NULL;
2952 gimple stmt;
2954 /* If instrumentation is not enabled for vars having
2955 incomplete type then just return zero bounds to avoid
2956 checks for this var. */
2957 if (!flag_chkp_incomplete_type)
2958 return chkp_get_zero_bounds ();
2960 if (dump_file && (dump_flags & TDF_DETAILS))
2962 fprintf (dump_file, "Generating bounds for extern symbol '");
2963 print_generic_expr (dump_file, var, 0);
2964 fprintf (dump_file, "'\n");
2967 stmt = gimple_build_call (chkp_sizeof_fndecl, 1, var);
2969 size_reloc = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
2970 gimple_call_set_lhs (stmt, size_reloc);
2972 gimple_seq_add_stmt (&seq, stmt);
2974 lb = chkp_build_addr_expr (var);
2975 size = make_ssa_name (chkp_get_size_tmp_var ());
2977 if (flag_chkp_zero_dynamic_size_as_infinite)
2979 /* We should check that size relocation was resolved.
2980 If it was not then use maximum possible size for the var. */
2981 max_size = build2 (MINUS_EXPR, chkp_uintptr_type, integer_zero_node,
2982 fold_convert (chkp_uintptr_type, lb));
2983 max_size = chkp_force_gimple_call_op (max_size, &seq);
2985 cond = build2 (NE_EXPR, boolean_type_node,
2986 size_reloc, integer_zero_node);
2987 stmt = gimple_build_assign (size, COND_EXPR, cond, size_reloc, max_size);
2988 gimple_seq_add_stmt (&seq, stmt);
2990 else
2992 stmt = gimple_build_assign (size, size_reloc);
2993 gimple_seq_add_stmt (&seq, stmt);
2996 gsi = gsi_start_bb (chkp_get_entry_block ());
2997 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
2999 bounds = chkp_make_bounds (lb, size, &gsi, true);
3001 return bounds;
3004 /* Return 1 if TYPE has fields with zero size or fields
3005 marked with chkp_variable_size attribute. */
3006 bool
3007 chkp_variable_size_type (tree type)
3009 bool res = false;
3010 tree field;
3012 if (RECORD_OR_UNION_TYPE_P (type))
3013 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3015 if (TREE_CODE (field) == FIELD_DECL)
3016 res = res
3017 || lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
3018 || chkp_variable_size_type (TREE_TYPE (field));
3020 else
3021 res = !TYPE_SIZE (type)
3022 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
3023 || tree_to_uhwi (TYPE_SIZE (type)) == 0;
3025 return res;
3028 /* Compute and return bounds for address of DECL which is
3029 one of VAR_DECL, PARM_DECL, RESULT_DECL. */
3030 static tree
3031 chkp_get_bounds_for_decl_addr (tree decl)
3033 tree bounds;
3035 gcc_assert (TREE_CODE (decl) == VAR_DECL
3036 || TREE_CODE (decl) == PARM_DECL
3037 || TREE_CODE (decl) == RESULT_DECL);
3039 bounds = chkp_get_registered_addr_bounds (decl);
3041 if (bounds)
3042 return bounds;
3044 if (dump_file && (dump_flags & TDF_DETAILS))
3046 fprintf (dump_file, "Building bounds for address of decl ");
3047 print_generic_expr (dump_file, decl, 0);
3048 fprintf (dump_file, "\n");
3051 /* Use zero bounds if size is unknown and checks for
3052 unknown sizes are restricted. */
3053 if ((!DECL_SIZE (decl)
3054 || (chkp_variable_size_type (TREE_TYPE (decl))
3055 && (TREE_STATIC (decl)
3056 || DECL_EXTERNAL (decl)
3057 || TREE_PUBLIC (decl))))
3058 && !flag_chkp_incomplete_type)
3059 return chkp_get_zero_bounds ();
3061 if (flag_chkp_use_static_bounds
3062 && TREE_CODE (decl) == VAR_DECL
3063 && (TREE_STATIC (decl)
3064 || DECL_EXTERNAL (decl)
3065 || TREE_PUBLIC (decl))
3066 && !DECL_THREAD_LOCAL_P (decl))
3068 tree bnd_var = chkp_make_static_bounds (decl);
3069 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
3070 gimple stmt;
3072 bounds = chkp_get_tmp_reg (NULL);
3073 stmt = gimple_build_assign (bounds, bnd_var);
3074 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3076 else if (!DECL_SIZE (decl)
3077 || (chkp_variable_size_type (TREE_TYPE (decl))
3078 && (TREE_STATIC (decl)
3079 || DECL_EXTERNAL (decl)
3080 || TREE_PUBLIC (decl))))
3082 gcc_assert (TREE_CODE (decl) == VAR_DECL);
3083 bounds = chkp_generate_extern_var_bounds (decl);
3085 else
3087 tree lb = chkp_build_addr_expr (decl);
3088 bounds = chkp_make_bounds (lb, DECL_SIZE_UNIT (decl), NULL, false);
3091 return bounds;
3094 /* Compute and return bounds for constant string. */
3095 static tree
3096 chkp_get_bounds_for_string_cst (tree cst)
3098 tree bounds;
3099 tree lb;
3100 tree size;
3102 gcc_assert (TREE_CODE (cst) == STRING_CST);
3104 bounds = chkp_get_registered_bounds (cst);
3106 if (bounds)
3107 return bounds;
3109 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
3110 || flag_chkp_use_static_const_bounds > 0)
3112 tree bnd_var = chkp_make_static_bounds (cst);
3113 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
3114 gimple stmt;
3116 bounds = chkp_get_tmp_reg (NULL);
3117 stmt = gimple_build_assign (bounds, bnd_var);
3118 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3120 else
3122 lb = chkp_build_addr_expr (cst);
3123 size = build_int_cst (chkp_uintptr_type, TREE_STRING_LENGTH (cst));
3124 bounds = chkp_make_bounds (lb, size, NULL, false);
3127 bounds = chkp_maybe_copy_and_register_bounds (cst, bounds);
3129 return bounds;
3132 /* Generate code to instersect bounds BOUNDS1 and BOUNDS2 and
3133 return the result. if ITER is not NULL then Code is inserted
3134 before position pointed by ITER. Otherwise code is added to
3135 entry block. */
3136 static tree
3137 chkp_intersect_bounds (tree bounds1, tree bounds2, gimple_stmt_iterator *iter)
3139 if (!bounds1 || bounds1 == chkp_get_zero_bounds ())
3140 return bounds2 ? bounds2 : bounds1;
3141 else if (!bounds2 || bounds2 == chkp_get_zero_bounds ())
3142 return bounds1;
3143 else
3145 gimple_seq seq;
3146 gimple stmt;
3147 tree bounds;
3149 seq = NULL;
3151 stmt = gimple_build_call (chkp_intersect_fndecl, 2, bounds1, bounds2);
3152 chkp_mark_stmt (stmt);
3154 bounds = chkp_get_tmp_reg (stmt);
3155 gimple_call_set_lhs (stmt, bounds);
3157 gimple_seq_add_stmt (&seq, stmt);
3159 /* We are probably doing narrowing for constant expression.
3160 In such case iter may be undefined. */
3161 if (!iter)
3163 gimple_stmt_iterator gsi = gsi_last_bb (chkp_get_entry_block ());
3164 iter = &gsi;
3165 gsi_insert_seq_after (iter, seq, GSI_SAME_STMT);
3167 else
3168 gsi_insert_seq_before (iter, seq, GSI_SAME_STMT);
3170 if (dump_file && (dump_flags & TDF_DETAILS))
3172 fprintf (dump_file, "Bounds intersection: ");
3173 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
3174 fprintf (dump_file, " inserted before statement: ");
3175 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0,
3176 TDF_VOPS|TDF_MEMSYMS);
3179 return bounds;
3183 /* Return 1 if we are allowed to narrow bounds for addressed FIELD
3184 and 0 othersize. */
3185 static bool
3186 chkp_may_narrow_to_field (tree field)
3188 return DECL_SIZE (field) && TREE_CODE (DECL_SIZE (field)) == INTEGER_CST
3189 && tree_to_uhwi (DECL_SIZE (field)) != 0
3190 && (!DECL_FIELD_OFFSET (field)
3191 || TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST)
3192 && (!DECL_FIELD_BIT_OFFSET (field)
3193 || TREE_CODE (DECL_FIELD_BIT_OFFSET (field)) == INTEGER_CST)
3194 && !lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
3195 && !chkp_variable_size_type (TREE_TYPE (field));
3198 /* Return 1 if bounds for FIELD should be narrowed to
3199 field's own size. */
3200 static bool
3201 chkp_narrow_bounds_for_field (tree field)
3203 HOST_WIDE_INT offs;
3204 HOST_WIDE_INT bit_offs;
3206 if (!chkp_may_narrow_to_field (field))
3207 return false;
3209 /* Accesse to compiler generated fields should not cause
3210 bounds narrowing. */
3211 if (DECL_ARTIFICIAL (field))
3212 return false;
3214 offs = tree_to_uhwi (DECL_FIELD_OFFSET (field));
3215 bit_offs = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
3217 return (flag_chkp_narrow_bounds
3218 && (flag_chkp_first_field_has_own_bounds
3219 || offs
3220 || bit_offs));
3223 /* Perform narrowing for BOUNDS using bounds computed for field
3224 access COMPONENT. ITER meaning is the same as for
3225 chkp_intersect_bounds. */
3226 static tree
3227 chkp_narrow_bounds_to_field (tree bounds, tree component,
3228 gimple_stmt_iterator *iter)
3230 tree field = TREE_OPERAND (component, 1);
3231 tree size = DECL_SIZE_UNIT (field);
3232 tree field_ptr = chkp_build_addr_expr (component);
3233 tree field_bounds;
3235 field_bounds = chkp_make_bounds (field_ptr, size, iter, false);
3237 return chkp_intersect_bounds (field_bounds, bounds, iter);
3240 /* Parse field or array access NODE.
3242 PTR ouput parameter holds a pointer to the outermost
3243 object.
3245 BITFIELD output parameter is set to 1 if bitfield is
3246 accessed and to 0 otherwise. If it is 1 then ELT holds
3247 outer component for accessed bit field.
3249 SAFE outer parameter is set to 1 if access is safe and
3250 checks are not required.
3252 BOUNDS outer parameter holds bounds to be used to check
3253 access (may be NULL).
3255 If INNERMOST_BOUNDS is 1 then try to narrow bounds to the
3256 innermost accessed component. */
3257 static void
3258 chkp_parse_array_and_component_ref (tree node, tree *ptr,
3259 tree *elt, bool *safe,
3260 bool *bitfield,
3261 tree *bounds,
3262 gimple_stmt_iterator *iter,
3263 bool innermost_bounds)
3265 tree comp_to_narrow = NULL_TREE;
3266 tree last_comp = NULL_TREE;
3267 bool array_ref_found = false;
3268 tree *nodes;
3269 tree var;
3270 int len;
3271 int i;
3273 /* Compute tree height for expression. */
3274 var = node;
3275 len = 1;
3276 while (TREE_CODE (var) == COMPONENT_REF
3277 || TREE_CODE (var) == ARRAY_REF
3278 || TREE_CODE (var) == VIEW_CONVERT_EXPR)
3280 var = TREE_OPERAND (var, 0);
3281 len++;
3284 gcc_assert (len > 1);
3286 /* It is more convenient for us to scan left-to-right,
3287 so walk tree again and put all node to nodes vector
3288 in reversed order. */
3289 nodes = XALLOCAVEC (tree, len);
3290 nodes[len - 1] = node;
3291 for (i = len - 2; i >= 0; i--)
3292 nodes[i] = TREE_OPERAND (nodes[i + 1], 0);
3294 if (bounds)
3295 *bounds = NULL;
3296 *safe = true;
3297 *bitfield = (TREE_CODE (node) == COMPONENT_REF
3298 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (node, 1)));
3299 /* To get bitfield address we will need outer elemnt. */
3300 if (*bitfield)
3301 *elt = nodes[len - 2];
3302 else
3303 *elt = NULL_TREE;
3305 /* If we have indirection in expression then compute
3306 outermost structure bounds. Computed bounds may be
3307 narrowed later. */
3308 if (TREE_CODE (nodes[0]) == MEM_REF || INDIRECT_REF_P (nodes[0]))
3310 *safe = false;
3311 *ptr = TREE_OPERAND (nodes[0], 0);
3312 if (bounds)
3313 *bounds = chkp_find_bounds (*ptr, iter);
3315 else
3317 gcc_assert (TREE_CODE (var) == VAR_DECL
3318 || TREE_CODE (var) == PARM_DECL
3319 || TREE_CODE (var) == RESULT_DECL
3320 || TREE_CODE (var) == STRING_CST
3321 || TREE_CODE (var) == SSA_NAME);
3323 *ptr = chkp_build_addr_expr (var);
3326 /* In this loop we are trying to find a field access
3327 requiring narrowing. There are two simple rules
3328 for search:
3329 1. Leftmost array_ref is chosen if any.
3330 2. Rightmost suitable component_ref is chosen if innermost
3331 bounds are required and no array_ref exists. */
3332 for (i = 1; i < len; i++)
3334 var = nodes[i];
3336 if (TREE_CODE (var) == ARRAY_REF)
3338 *safe = false;
3339 array_ref_found = true;
3340 if (flag_chkp_narrow_bounds
3341 && !flag_chkp_narrow_to_innermost_arrray
3342 && (!last_comp
3343 || chkp_may_narrow_to_field (TREE_OPERAND (last_comp, 1))))
3345 comp_to_narrow = last_comp;
3346 break;
3349 else if (TREE_CODE (var) == COMPONENT_REF)
3351 tree field = TREE_OPERAND (var, 1);
3353 if (innermost_bounds
3354 && !array_ref_found
3355 && chkp_narrow_bounds_for_field (field))
3356 comp_to_narrow = var;
3357 last_comp = var;
3359 if (flag_chkp_narrow_bounds
3360 && flag_chkp_narrow_to_innermost_arrray
3361 && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
3363 if (bounds)
3364 *bounds = chkp_narrow_bounds_to_field (*bounds, var, iter);
3365 comp_to_narrow = NULL;
3368 else if (TREE_CODE (var) == VIEW_CONVERT_EXPR)
3369 /* Nothing to do for it. */
3371 else
3372 gcc_unreachable ();
3375 if (comp_to_narrow && DECL_SIZE (TREE_OPERAND (comp_to_narrow, 1)) && bounds)
3376 *bounds = chkp_narrow_bounds_to_field (*bounds, comp_to_narrow, iter);
3378 if (innermost_bounds && bounds && !*bounds)
3379 *bounds = chkp_find_bounds (*ptr, iter);
3382 /* Compute and return bounds for address of OBJ. */
3383 static tree
3384 chkp_make_addressed_object_bounds (tree obj, gimple_stmt_iterator *iter)
3386 tree bounds = chkp_get_registered_addr_bounds (obj);
3388 if (bounds)
3389 return bounds;
3391 switch (TREE_CODE (obj))
3393 case VAR_DECL:
3394 case PARM_DECL:
3395 case RESULT_DECL:
3396 bounds = chkp_get_bounds_for_decl_addr (obj);
3397 break;
3399 case STRING_CST:
3400 bounds = chkp_get_bounds_for_string_cst (obj);
3401 break;
3403 case ARRAY_REF:
3404 case COMPONENT_REF:
3406 tree elt;
3407 tree ptr;
3408 bool safe;
3409 bool bitfield;
3411 chkp_parse_array_and_component_ref (obj, &ptr, &elt, &safe,
3412 &bitfield, &bounds, iter, true);
3414 gcc_assert (bounds);
3416 break;
3418 case FUNCTION_DECL:
3419 case LABEL_DECL:
3420 bounds = chkp_get_zero_bounds ();
3421 break;
3423 case MEM_REF:
3424 bounds = chkp_find_bounds (TREE_OPERAND (obj, 0), iter);
3425 break;
3427 case REALPART_EXPR:
3428 case IMAGPART_EXPR:
3429 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (obj, 0), iter);
3430 break;
3432 default:
3433 if (dump_file && (dump_flags & TDF_DETAILS))
3435 fprintf (dump_file, "chkp_make_addressed_object_bounds: "
3436 "unexpected object of type %s\n",
3437 get_tree_code_name (TREE_CODE (obj)));
3438 print_node (dump_file, "", obj, 0);
3440 internal_error ("chkp_make_addressed_object_bounds: "
3441 "Unexpected tree code %s",
3442 get_tree_code_name (TREE_CODE (obj)));
3445 chkp_register_addr_bounds (obj, bounds);
3447 return bounds;
3450 /* Compute bounds for pointer PTR loaded from PTR_SRC. Generate statements
3451 to compute bounds if required. Computed bounds should be available at
3452 position pointed by ITER.
3454 If PTR_SRC is NULL_TREE then pointer definition is identified.
3456 If PTR_SRC is not NULL_TREE then ITER points to statements which loads
3457 PTR. If PTR is a any memory reference then ITER points to a statement
3458 after which bndldx will be inserterd. In both cases ITER will be updated
3459 to point to the inserted bndldx statement. */
3461 static tree
3462 chkp_find_bounds_1 (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3464 tree addr = NULL_TREE;
3465 tree bounds = NULL_TREE;
3467 if (!ptr_src)
3468 ptr_src = ptr;
3470 bounds = chkp_get_registered_bounds (ptr_src);
3472 if (bounds)
3473 return bounds;
3475 switch (TREE_CODE (ptr_src))
3477 case MEM_REF:
3478 case VAR_DECL:
3479 if (BOUNDED_P (ptr_src))
3480 if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
3481 bounds = chkp_get_zero_bounds ();
3482 else
3484 addr = chkp_build_addr_expr (ptr_src);
3485 bounds = chkp_build_bndldx (addr, ptr, iter);
3487 else
3488 bounds = chkp_get_nonpointer_load_bounds ();
3489 break;
3491 case ARRAY_REF:
3492 case COMPONENT_REF:
3493 addr = get_base_address (ptr_src);
3494 if (DECL_P (addr)
3495 || TREE_CODE (addr) == MEM_REF
3496 || TREE_CODE (addr) == TARGET_MEM_REF)
3498 if (BOUNDED_P (ptr_src))
3499 if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
3500 bounds = chkp_get_zero_bounds ();
3501 else
3503 addr = chkp_build_addr_expr (ptr_src);
3504 bounds = chkp_build_bndldx (addr, ptr, iter);
3506 else
3507 bounds = chkp_get_nonpointer_load_bounds ();
3509 else
3511 gcc_assert (TREE_CODE (addr) == SSA_NAME);
3512 bounds = chkp_find_bounds (addr, iter);
3514 break;
3516 case PARM_DECL:
3517 gcc_unreachable ();
3518 bounds = chkp_get_bound_for_parm (ptr_src);
3519 break;
3521 case TARGET_MEM_REF:
3522 addr = chkp_build_addr_expr (ptr_src);
3523 bounds = chkp_build_bndldx (addr, ptr, iter);
3524 break;
3526 case SSA_NAME:
3527 bounds = chkp_get_registered_bounds (ptr_src);
3528 if (!bounds)
3530 gimple def_stmt = SSA_NAME_DEF_STMT (ptr_src);
3531 gphi_iterator phi_iter;
3533 bounds = chkp_get_bounds_by_definition (ptr_src, def_stmt, &phi_iter);
3535 gcc_assert (bounds);
3537 if (gphi *def_phi = dyn_cast <gphi *> (def_stmt))
3539 unsigned i;
3541 for (i = 0; i < gimple_phi_num_args (def_phi); i++)
3543 tree arg = gimple_phi_arg_def (def_phi, i);
3544 tree arg_bnd;
3545 gphi *phi_bnd;
3547 arg_bnd = chkp_find_bounds (arg, NULL);
3549 /* chkp_get_bounds_by_definition created new phi
3550 statement and phi_iter points to it.
3552 Previous call to chkp_find_bounds could create
3553 new basic block and therefore change phi statement
3554 phi_iter points to. */
3555 phi_bnd = phi_iter.phi ();
3557 add_phi_arg (phi_bnd, arg_bnd,
3558 gimple_phi_arg_edge (def_phi, i),
3559 UNKNOWN_LOCATION);
3562 /* If all bound phi nodes have their arg computed
3563 then we may finish its computation. See
3564 chkp_finish_incomplete_bounds for more details. */
3565 if (chkp_may_finish_incomplete_bounds ())
3566 chkp_finish_incomplete_bounds ();
3569 gcc_assert (bounds == chkp_get_registered_bounds (ptr_src)
3570 || chkp_incomplete_bounds (bounds));
3572 break;
3574 case ADDR_EXPR:
3575 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (ptr_src, 0), iter);
3576 break;
3578 case INTEGER_CST:
3579 if (integer_zerop (ptr_src))
3580 bounds = chkp_get_none_bounds ();
3581 else
3582 bounds = chkp_get_invalid_op_bounds ();
3583 break;
3585 default:
3586 if (dump_file && (dump_flags & TDF_DETAILS))
3588 fprintf (dump_file, "chkp_find_bounds: unexpected ptr of type %s\n",
3589 get_tree_code_name (TREE_CODE (ptr_src)));
3590 print_node (dump_file, "", ptr_src, 0);
3592 internal_error ("chkp_find_bounds: Unexpected tree code %s",
3593 get_tree_code_name (TREE_CODE (ptr_src)));
3596 if (!bounds)
3598 if (dump_file && (dump_flags & TDF_DETAILS))
3600 fprintf (stderr, "chkp_find_bounds: cannot find bounds for pointer\n");
3601 print_node (dump_file, "", ptr_src, 0);
3603 internal_error ("chkp_find_bounds: Cannot find bounds for pointer");
3606 return bounds;
3609 /* Normal case for bounds search without forced narrowing. */
3610 static tree
3611 chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter)
3613 return chkp_find_bounds_1 (ptr, NULL_TREE, iter);
3616 /* Search bounds for pointer PTR loaded from PTR_SRC
3617 by statement *ITER points to. */
3618 static tree
3619 chkp_find_bounds_loaded (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3621 return chkp_find_bounds_1 (ptr, ptr_src, iter);
3624 /* Helper function which checks type of RHS and finds all pointers in
3625 it. For each found pointer we build it's accesses in LHS and RHS
3626 objects and then call HANDLER for them. Function is used to copy
3627 or initilize bounds for copied object. */
3628 static void
3629 chkp_walk_pointer_assignments (tree lhs, tree rhs, void *arg,
3630 assign_handler handler)
3632 tree type = TREE_TYPE (lhs);
3634 /* We have nothing to do with clobbers. */
3635 if (TREE_CLOBBER_P (rhs))
3636 return;
3638 if (BOUNDED_TYPE_P (type))
3639 handler (lhs, rhs, arg);
3640 else if (RECORD_OR_UNION_TYPE_P (type))
3642 tree field;
3644 if (TREE_CODE (rhs) == CONSTRUCTOR)
3646 unsigned HOST_WIDE_INT cnt;
3647 tree val;
3649 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, field, val)
3651 if (chkp_type_has_pointer (TREE_TYPE (field)))
3653 tree lhs_field = chkp_build_component_ref (lhs, field);
3654 chkp_walk_pointer_assignments (lhs_field, val, arg, handler);
3658 else
3659 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3660 if (TREE_CODE (field) == FIELD_DECL
3661 && chkp_type_has_pointer (TREE_TYPE (field)))
3663 tree rhs_field = chkp_build_component_ref (rhs, field);
3664 tree lhs_field = chkp_build_component_ref (lhs, field);
3665 chkp_walk_pointer_assignments (lhs_field, rhs_field, arg, handler);
3668 else if (TREE_CODE (type) == ARRAY_TYPE)
3670 unsigned HOST_WIDE_INT cur = 0;
3671 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3672 tree etype = TREE_TYPE (type);
3673 tree esize = TYPE_SIZE (etype);
3675 if (TREE_CODE (rhs) == CONSTRUCTOR)
3677 unsigned HOST_WIDE_INT cnt;
3678 tree purp, val, lhs_elem;
3680 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, purp, val)
3682 if (purp && TREE_CODE (purp) == RANGE_EXPR)
3684 tree lo_index = TREE_OPERAND (purp, 0);
3685 tree hi_index = TREE_OPERAND (purp, 1);
3687 for (cur = (unsigned)tree_to_uhwi (lo_index);
3688 cur <= (unsigned)tree_to_uhwi (hi_index);
3689 cur++)
3691 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3692 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3695 else
3697 if (purp)
3699 gcc_assert (TREE_CODE (purp) == INTEGER_CST);
3700 cur = tree_to_uhwi (purp);
3703 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur++);
3705 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3709 /* Copy array only when size is known. */
3710 else if (maxval && !integer_minus_onep (maxval))
3711 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
3713 tree lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3714 tree rhs_elem = chkp_build_array_ref (rhs, etype, esize, cur);
3715 chkp_walk_pointer_assignments (lhs_elem, rhs_elem, arg, handler);
3718 else
3719 internal_error("chkp_walk_pointer_assignments: unexpected RHS type: %s",
3720 get_tree_code_name (TREE_CODE (type)));
3723 /* Add code to copy bounds for assignment of RHS to LHS.
3724 ARG is an iterator pointing ne code position. */
3725 static void
3726 chkp_copy_bounds_for_elem (tree lhs, tree rhs, void *arg)
3728 gimple_stmt_iterator *iter = (gimple_stmt_iterator *)arg;
3729 tree bounds = chkp_find_bounds (rhs, iter);
3730 tree addr = chkp_build_addr_expr(lhs);
3732 chkp_build_bndstx (addr, rhs, bounds, iter);
3735 /* Emit static bound initilizers and size vars. */
3736 void
3737 chkp_finish_file (void)
3739 struct varpool_node *node;
3740 struct chkp_ctor_stmt_list stmts;
3742 if (seen_error ())
3743 return;
3745 /* Iterate through varpool and generate bounds initialization
3746 constructors for all statically initialized pointers. */
3747 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3748 stmts.stmts = NULL;
3749 FOR_EACH_VARIABLE (node)
3750 /* Check that var is actually emitted and we need and may initialize
3751 its bounds. */
3752 if (node->need_bounds_init
3753 && !POINTER_BOUNDS_P (node->decl)
3754 && DECL_RTL (node->decl)
3755 && MEM_P (DECL_RTL (node->decl))
3756 && TREE_ASM_WRITTEN (node->decl))
3758 chkp_walk_pointer_assignments (node->decl,
3759 DECL_INITIAL (node->decl),
3760 &stmts,
3761 chkp_add_modification_to_stmt_list);
3763 if (stmts.avail <= 0)
3765 cgraph_build_static_cdtor ('P', stmts.stmts,
3766 MAX_RESERVED_INIT_PRIORITY + 3);
3767 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3768 stmts.stmts = NULL;
3772 if (stmts.stmts)
3773 cgraph_build_static_cdtor ('P', stmts.stmts,
3774 MAX_RESERVED_INIT_PRIORITY + 3);
3776 /* Iterate through varpool and generate bounds initialization
3777 constructors for all static bounds vars. */
3778 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3779 stmts.stmts = NULL;
3780 FOR_EACH_VARIABLE (node)
3781 if (node->need_bounds_init
3782 && POINTER_BOUNDS_P (node->decl)
3783 && TREE_ASM_WRITTEN (node->decl))
3785 tree bnd = node->decl;
3786 tree var;
3788 gcc_assert (DECL_INITIAL (bnd)
3789 && TREE_CODE (DECL_INITIAL (bnd)) == ADDR_EXPR);
3791 var = TREE_OPERAND (DECL_INITIAL (bnd), 0);
3792 chkp_output_static_bounds (bnd, var, &stmts);
3795 if (stmts.stmts)
3796 cgraph_build_static_cdtor ('B', stmts.stmts,
3797 MAX_RESERVED_INIT_PRIORITY + 2);
3799 delete chkp_static_var_bounds;
3800 delete chkp_bounds_map;
3803 /* An instrumentation function which is called for each statement
3804 having memory access we want to instrument. It inserts check
3805 code and bounds copy code.
3807 ITER points to statement to instrument.
3809 NODE holds memory access in statement to check.
3811 LOC holds the location information for statement.
3813 DIRFLAGS determines whether access is read or write.
3815 ACCESS_OFFS should be added to address used in NODE
3816 before check.
3818 ACCESS_SIZE holds size of checked access.
3820 SAFE indicates if NODE access is safe and should not be
3821 checked. */
3822 static void
3823 chkp_process_stmt (gimple_stmt_iterator *iter, tree node,
3824 location_t loc, tree dirflag,
3825 tree access_offs, tree access_size,
3826 bool safe)
3828 tree node_type = TREE_TYPE (node);
3829 tree size = access_size ? access_size : TYPE_SIZE_UNIT (node_type);
3830 tree addr_first = NULL_TREE; /* address of the first accessed byte */
3831 tree addr_last = NULL_TREE; /* address of the last accessed byte */
3832 tree ptr = NULL_TREE; /* a pointer used for dereference */
3833 tree bounds = NULL_TREE;
3835 /* We do not need instrumentation for clobbers. */
3836 if (dirflag == integer_one_node
3837 && gimple_code (gsi_stmt (*iter)) == GIMPLE_ASSIGN
3838 && TREE_CLOBBER_P (gimple_assign_rhs1 (gsi_stmt (*iter))))
3839 return;
3841 switch (TREE_CODE (node))
3843 case ARRAY_REF:
3844 case COMPONENT_REF:
3846 bool bitfield;
3847 tree elt;
3849 if (safe)
3851 /* We are not going to generate any checks, so do not
3852 generate bounds as well. */
3853 addr_first = chkp_build_addr_expr (node);
3854 break;
3857 chkp_parse_array_and_component_ref (node, &ptr, &elt, &safe,
3858 &bitfield, &bounds, iter, false);
3860 /* Break if there is no dereference and operation is safe. */
3862 if (bitfield)
3864 tree field = TREE_OPERAND (node, 1);
3866 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
3867 size = DECL_SIZE_UNIT (field);
3869 if (elt)
3870 elt = chkp_build_addr_expr (elt);
3871 addr_first = fold_convert_loc (loc, ptr_type_node, elt ? elt : ptr);
3872 addr_first = fold_build_pointer_plus_loc (loc,
3873 addr_first,
3874 byte_position (field));
3876 else
3877 addr_first = chkp_build_addr_expr (node);
3879 break;
3881 case INDIRECT_REF:
3882 ptr = TREE_OPERAND (node, 0);
3883 addr_first = ptr;
3884 break;
3886 case MEM_REF:
3887 ptr = TREE_OPERAND (node, 0);
3888 addr_first = chkp_build_addr_expr (node);
3889 break;
3891 case TARGET_MEM_REF:
3892 ptr = TMR_BASE (node);
3893 addr_first = chkp_build_addr_expr (node);
3894 break;
3896 case ARRAY_RANGE_REF:
3897 printf("ARRAY_RANGE_REF\n");
3898 debug_gimple_stmt(gsi_stmt(*iter));
3899 debug_tree(node);
3900 gcc_unreachable ();
3901 break;
3903 case BIT_FIELD_REF:
3905 tree offs, rem, bpu;
3907 gcc_assert (!access_offs);
3908 gcc_assert (!access_size);
3910 bpu = fold_convert (size_type_node, bitsize_int (BITS_PER_UNIT));
3911 offs = fold_convert (size_type_node, TREE_OPERAND (node, 2));
3912 rem = size_binop_loc (loc, TRUNC_MOD_EXPR, offs, bpu);
3913 offs = size_binop_loc (loc, TRUNC_DIV_EXPR, offs, bpu);
3915 size = fold_convert (size_type_node, TREE_OPERAND (node, 1));
3916 size = size_binop_loc (loc, PLUS_EXPR, size, rem);
3917 size = size_binop_loc (loc, CEIL_DIV_EXPR, size, bpu);
3918 size = fold_convert (size_type_node, size);
3920 chkp_process_stmt (iter, TREE_OPERAND (node, 0), loc,
3921 dirflag, offs, size, safe);
3922 return;
3924 break;
3926 case VAR_DECL:
3927 case RESULT_DECL:
3928 case PARM_DECL:
3929 if (dirflag != integer_one_node
3930 || DECL_REGISTER (node))
3931 return;
3933 safe = true;
3934 addr_first = chkp_build_addr_expr (node);
3935 break;
3937 default:
3938 return;
3941 /* If addr_last was not computed then use (addr_first + size - 1)
3942 expression to compute it. */
3943 if (!addr_last)
3945 addr_last = fold_build_pointer_plus_loc (loc, addr_first, size);
3946 addr_last = fold_build_pointer_plus_hwi_loc (loc, addr_last, -1);
3949 /* Shift both first_addr and last_addr by access_offs if specified. */
3950 if (access_offs)
3952 addr_first = fold_build_pointer_plus_loc (loc, addr_first, access_offs);
3953 addr_last = fold_build_pointer_plus_loc (loc, addr_last, access_offs);
3956 /* Generate bndcl/bndcu checks if memory access is not safe. */
3957 if (!safe)
3959 gimple_stmt_iterator stmt_iter = *iter;
3961 if (!bounds)
3962 bounds = chkp_find_bounds (ptr, iter);
3964 chkp_check_mem_access (addr_first, addr_last, bounds,
3965 stmt_iter, loc, dirflag);
3968 /* We need to store bounds in case pointer is stored. */
3969 if (dirflag == integer_one_node
3970 && chkp_type_has_pointer (node_type)
3971 && flag_chkp_store_bounds)
3973 gimple stmt = gsi_stmt (*iter);
3974 tree rhs1 = gimple_assign_rhs1 (stmt);
3975 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3977 if (get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS)
3978 chkp_walk_pointer_assignments (node, rhs1, iter,
3979 chkp_copy_bounds_for_elem);
3980 else
3982 bounds = chkp_compute_bounds_for_assignment (NULL_TREE, stmt);
3983 chkp_build_bndstx (addr_first, rhs1, bounds, iter);
3988 /* Add code to copy bounds for all pointers copied
3989 in ASSIGN created during inline of EDGE. */
3990 void
3991 chkp_copy_bounds_for_assign (gimple assign, struct cgraph_edge *edge)
3993 tree lhs = gimple_assign_lhs (assign);
3994 tree rhs = gimple_assign_rhs1 (assign);
3995 gimple_stmt_iterator iter = gsi_for_stmt (assign);
3997 if (!flag_chkp_store_bounds)
3998 return;
4000 chkp_walk_pointer_assignments (lhs, rhs, &iter, chkp_copy_bounds_for_elem);
4002 /* We should create edges for all created calls to bndldx and bndstx. */
4003 while (gsi_stmt (iter) != assign)
4005 gimple stmt = gsi_stmt (iter);
4006 if (gimple_code (stmt) == GIMPLE_CALL)
4008 tree fndecl = gimple_call_fndecl (stmt);
4009 struct cgraph_node *callee = cgraph_node::get_create (fndecl);
4010 struct cgraph_edge *new_edge;
4012 gcc_assert (fndecl == chkp_bndstx_fndecl
4013 || fndecl == chkp_bndldx_fndecl
4014 || fndecl == chkp_ret_bnd_fndecl);
4016 new_edge = edge->caller->create_edge (callee,
4017 as_a <gcall *> (stmt),
4018 edge->count,
4019 edge->frequency);
4020 new_edge->frequency = compute_call_stmt_bb_frequency
4021 (edge->caller->decl, gimple_bb (stmt));
4023 gsi_prev (&iter);
4027 /* Some code transformation made during instrumentation pass
4028 may put code into inconsistent state. Here we find and fix
4029 such flaws. */
4030 void
4031 chkp_fix_cfg ()
4033 basic_block bb;
4034 gimple_stmt_iterator i;
4036 /* We could insert some code right after stmt which ends bb.
4037 We wanted to put this code on fallthru edge but did not
4038 add new edges from the beginning because it may cause new
4039 phi node creation which may be incorrect due to incomplete
4040 bound phi nodes. */
4041 FOR_ALL_BB_FN (bb, cfun)
4042 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
4044 gimple stmt = gsi_stmt (i);
4045 gimple_stmt_iterator next = i;
4047 gsi_next (&next);
4049 if (stmt_ends_bb_p (stmt)
4050 && !gsi_end_p (next))
4052 edge fall = find_fallthru_edge (bb->succs);
4053 basic_block dest = NULL;
4054 int flags = 0;
4056 gcc_assert (fall);
4058 /* We cannot split abnormal edge. Therefore we
4059 store its params, make it regular and then
4060 rebuild abnormal edge after split. */
4061 if (fall->flags & EDGE_ABNORMAL)
4063 flags = fall->flags & ~EDGE_FALLTHRU;
4064 dest = fall->dest;
4066 fall->flags &= ~EDGE_COMPLEX;
4069 while (!gsi_end_p (next))
4071 gimple next_stmt = gsi_stmt (next);
4072 gsi_remove (&next, false);
4073 gsi_insert_on_edge (fall, next_stmt);
4076 gsi_commit_edge_inserts ();
4078 /* Re-create abnormal edge. */
4079 if (dest)
4080 make_edge (bb, dest, flags);
4085 /* Walker callback for chkp_replace_function_pointers. Replaces
4086 function pointer in the specified operand with pointer to the
4087 instrumented function version. */
4088 static tree
4089 chkp_replace_function_pointer (tree *op, int *walk_subtrees,
4090 void *data ATTRIBUTE_UNUSED)
4092 if (TREE_CODE (*op) == FUNCTION_DECL
4093 && !lookup_attribute ("bnd_legacy", DECL_ATTRIBUTES (*op))
4094 && (DECL_BUILT_IN_CLASS (*op) == NOT_BUILT_IN
4095 /* For builtins we replace pointers only for selected
4096 function and functions having definitions. */
4097 || (DECL_BUILT_IN_CLASS (*op) == BUILT_IN_NORMAL
4098 && (chkp_instrument_normal_builtin (*op)
4099 || gimple_has_body_p (*op)))))
4101 struct cgraph_node *node = cgraph_node::get_create (*op);
4102 struct cgraph_node *clone = NULL;
4104 if (!node->instrumentation_clone)
4105 clone = chkp_maybe_create_clone (*op);
4107 if (clone)
4108 *op = clone->decl;
4109 *walk_subtrees = 0;
4112 return NULL;
4115 /* This function searches for function pointers in statement
4116 pointed by GSI and replaces them with pointers to instrumented
4117 function versions. */
4118 static void
4119 chkp_replace_function_pointers (gimple_stmt_iterator *gsi)
4121 gimple stmt = gsi_stmt (*gsi);
4122 /* For calls we want to walk call args only. */
4123 if (gimple_code (stmt) == GIMPLE_CALL)
4125 unsigned i;
4126 for (i = 0; i < gimple_call_num_args (stmt); i++)
4127 walk_tree (gimple_call_arg_ptr (stmt, i),
4128 chkp_replace_function_pointer, NULL, NULL);
4130 else
4131 walk_gimple_stmt (gsi, NULL, chkp_replace_function_pointer, NULL);
4134 /* This function instruments all statements working with memory,
4135 calls and rets.
4137 It also removes excess statements from static initializers. */
4138 static void
4139 chkp_instrument_function (void)
4141 basic_block bb, next;
4142 gimple_stmt_iterator i;
4143 enum gimple_rhs_class grhs_class;
4144 bool safe = lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl));
4146 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
4149 next = bb->next_bb;
4150 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
4152 gimple s = gsi_stmt (i);
4154 /* Skip statement marked to not be instrumented. */
4155 if (chkp_marked_stmt_p (s))
4157 gsi_next (&i);
4158 continue;
4161 chkp_replace_function_pointers (&i);
4163 switch (gimple_code (s))
4165 case GIMPLE_ASSIGN:
4166 chkp_process_stmt (&i, gimple_assign_lhs (s),
4167 gimple_location (s), integer_one_node,
4168 NULL_TREE, NULL_TREE, safe);
4169 chkp_process_stmt (&i, gimple_assign_rhs1 (s),
4170 gimple_location (s), integer_zero_node,
4171 NULL_TREE, NULL_TREE, safe);
4172 grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s));
4173 if (grhs_class == GIMPLE_BINARY_RHS)
4174 chkp_process_stmt (&i, gimple_assign_rhs2 (s),
4175 gimple_location (s), integer_zero_node,
4176 NULL_TREE, NULL_TREE, safe);
4177 break;
4179 case GIMPLE_RETURN:
4181 greturn *r = as_a <greturn *> (s);
4182 if (gimple_return_retval (r) != NULL_TREE)
4184 chkp_process_stmt (&i, gimple_return_retval (r),
4185 gimple_location (r),
4186 integer_zero_node,
4187 NULL_TREE, NULL_TREE, safe);
4189 /* Additionally we need to add bounds
4190 to return statement. */
4191 chkp_add_bounds_to_ret_stmt (&i);
4194 break;
4196 case GIMPLE_CALL:
4197 chkp_add_bounds_to_call_stmt (&i);
4198 break;
4200 default:
4204 gsi_next (&i);
4206 /* We do not need any actual pointer stores in checker
4207 static initializer. */
4208 if (lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl))
4209 && gimple_code (s) == GIMPLE_ASSIGN
4210 && gimple_store_p (s))
4212 gimple_stmt_iterator del_iter = gsi_for_stmt (s);
4213 gsi_remove (&del_iter, true);
4214 unlink_stmt_vdef (s);
4215 release_defs(s);
4218 bb = next;
4220 while (bb);
4222 /* Some input params may have bounds and be address taken. In this case
4223 we should store incoming bounds into bounds table. */
4224 tree arg;
4225 if (flag_chkp_store_bounds)
4226 for (arg = DECL_ARGUMENTS (cfun->decl); arg; arg = DECL_CHAIN (arg))
4227 if (TREE_ADDRESSABLE (arg))
4229 if (BOUNDED_P (arg))
4231 tree bounds = chkp_get_next_bounds_parm (arg);
4232 tree def_ptr = ssa_default_def (cfun, arg);
4233 gimple_stmt_iterator iter
4234 = gsi_start_bb (chkp_get_entry_block ());
4235 chkp_build_bndstx (chkp_build_addr_expr (arg),
4236 def_ptr ? def_ptr : arg,
4237 bounds, &iter);
4239 /* Skip bounds arg. */
4240 arg = TREE_CHAIN (arg);
4242 else if (chkp_type_has_pointer (TREE_TYPE (arg)))
4244 tree orig_arg = arg;
4245 bitmap slots = BITMAP_ALLOC (NULL);
4246 gimple_stmt_iterator iter
4247 = gsi_start_bb (chkp_get_entry_block ());
4248 bitmap_iterator bi;
4249 unsigned bnd_no;
4251 chkp_find_bound_slots (TREE_TYPE (arg), slots);
4253 EXECUTE_IF_SET_IN_BITMAP (slots, 0, bnd_no, bi)
4255 tree bounds = chkp_get_next_bounds_parm (arg);
4256 HOST_WIDE_INT offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
4257 tree addr = chkp_build_addr_expr (orig_arg);
4258 tree ptr = build2 (MEM_REF, ptr_type_node, addr,
4259 build_int_cst (ptr_type_node, offs));
4260 chkp_build_bndstx (chkp_build_addr_expr (ptr), ptr,
4261 bounds, &iter);
4263 arg = DECL_CHAIN (arg);
4265 BITMAP_FREE (slots);
4270 /* Find init/null/copy_ptr_bounds calls and replace them
4271 with assignments. It should allow better code
4272 optimization. */
4274 static void
4275 chkp_remove_useless_builtins ()
4277 basic_block bb;
4278 gimple_stmt_iterator gsi;
4280 FOR_EACH_BB_FN (bb, cfun)
4282 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4284 gimple stmt = gsi_stmt (gsi);
4285 tree fndecl;
4286 enum built_in_function fcode;
4288 /* Find builtins returning first arg and replace
4289 them with assignments. */
4290 if (gimple_code (stmt) == GIMPLE_CALL
4291 && (fndecl = gimple_call_fndecl (stmt))
4292 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4293 && (fcode = DECL_FUNCTION_CODE (fndecl))
4294 && (fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
4295 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
4296 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS
4297 || fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS))
4299 tree res = gimple_call_arg (stmt, 0);
4300 update_call_from_tree (&gsi, res);
4301 stmt = gsi_stmt (gsi);
4302 update_stmt (stmt);
4308 /* Initialize pass. */
4309 static void
4310 chkp_init (void)
4312 basic_block bb;
4313 gimple_stmt_iterator i;
4315 in_chkp_pass = true;
4317 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = bb->next_bb)
4318 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
4319 chkp_unmark_stmt (gsi_stmt (i));
4321 chkp_invalid_bounds = new hash_set<tree>;
4322 chkp_completed_bounds_set = new hash_set<tree>;
4323 delete chkp_reg_bounds;
4324 chkp_reg_bounds = new hash_map<tree, tree>;
4325 delete chkp_bound_vars;
4326 chkp_bound_vars = new hash_map<tree, tree>;
4327 chkp_reg_addr_bounds = new hash_map<tree, tree>;
4328 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
4329 delete chkp_bounds_map;
4330 chkp_bounds_map = new hash_map<tree, tree>;
4331 chkp_abnormal_copies = BITMAP_GGC_ALLOC ();
4333 entry_block = NULL;
4334 zero_bounds = NULL_TREE;
4335 none_bounds = NULL_TREE;
4336 incomplete_bounds = integer_zero_node;
4337 tmp_var = NULL_TREE;
4338 size_tmp_var = NULL_TREE;
4340 chkp_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode, true);
4342 /* We create these constant bounds once for each object file.
4343 These symbols go to comdat section and result in single copy
4344 of each one in the final binary. */
4345 chkp_get_zero_bounds_var ();
4346 chkp_get_none_bounds_var ();
4348 calculate_dominance_info (CDI_DOMINATORS);
4349 calculate_dominance_info (CDI_POST_DOMINATORS);
4351 bitmap_obstack_initialize (NULL);
4354 /* Finalize instrumentation pass. */
4355 static void
4356 chkp_fini (void)
4358 in_chkp_pass = false;
4360 delete chkp_invalid_bounds;
4361 delete chkp_completed_bounds_set;
4362 delete chkp_reg_addr_bounds;
4363 delete chkp_incomplete_bounds_map;
4365 free_dominance_info (CDI_DOMINATORS);
4366 free_dominance_info (CDI_POST_DOMINATORS);
4368 bitmap_obstack_release (NULL);
4370 entry_block = NULL;
4371 zero_bounds = NULL_TREE;
4372 none_bounds = NULL_TREE;
4375 /* Main instrumentation pass function. */
4376 static unsigned int
4377 chkp_execute (void)
4379 chkp_init ();
4381 chkp_instrument_function ();
4383 chkp_remove_useless_builtins ();
4385 chkp_function_mark_instrumented (cfun->decl);
4387 chkp_fix_cfg ();
4389 chkp_fini ();
4391 return 0;
4394 /* Instrumentation pass gate. */
4395 static bool
4396 chkp_gate (void)
4398 cgraph_node *node = cgraph_node::get (cfun->decl);
4399 return ((node != NULL
4400 && node->instrumentation_clone)
4401 || lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl)));
4404 namespace {
4406 const pass_data pass_data_chkp =
4408 GIMPLE_PASS, /* type */
4409 "chkp", /* name */
4410 OPTGROUP_NONE, /* optinfo_flags */
4411 TV_NONE, /* tv_id */
4412 PROP_ssa | PROP_cfg, /* properties_required */
4413 0, /* properties_provided */
4414 0, /* properties_destroyed */
4415 0, /* todo_flags_start */
4416 TODO_verify_il
4417 | TODO_update_ssa /* todo_flags_finish */
4420 class pass_chkp : public gimple_opt_pass
4422 public:
4423 pass_chkp (gcc::context *ctxt)
4424 : gimple_opt_pass (pass_data_chkp, ctxt)
4427 /* opt_pass methods: */
4428 virtual opt_pass * clone ()
4430 return new pass_chkp (m_ctxt);
4433 virtual bool gate (function *)
4435 return chkp_gate ();
4438 virtual unsigned int execute (function *)
4440 return chkp_execute ();
4443 }; // class pass_chkp
4445 } // anon namespace
4447 gimple_opt_pass *
4448 make_pass_chkp (gcc::context *ctxt)
4450 return new pass_chkp (ctxt);
4453 #include "gt-tree-chkp.h"