Fix cygwin performance loss on linpack.
[official-gcc.git] / gcc / tree-chkp.c
blob34d9dfc47f12b80c3c59c652f057a7f61b77e74e
1 /* Pointer Bounds Checker insrumentation pass.
2 Copyright (C) 2014-2015 Free Software Foundation, Inc.
3 Contributed by Ilya Enkovich (ilya.enkovich@intel.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "diagnostic.h"
34 #include "fold-const.h"
35 #include "stor-layout.h"
36 #include "varasm.h"
37 #include "tree-iterator.h"
38 #include "tree-cfg.h"
39 #include "langhooks.h"
40 #include "tree-ssa-address.h"
41 #include "tree-ssa-loop-niter.h"
42 #include "gimple-pretty-print.h"
43 #include "gimple-iterator.h"
44 #include "gimplify.h"
45 #include "gimplify-me.h"
46 #include "print-tree.h"
47 #include "calls.h"
48 #include "expr.h"
49 #include "tree-ssa-propagate.h"
50 #include "tree-chkp.h"
51 #include "gimple-walk.h"
52 #include "tree-dfa.h"
53 #include "ipa-chkp.h"
54 #include "params.h"
56 /* Pointer Bounds Checker instruments code with memory checks to find
57 out-of-bounds memory accesses. Checks are performed by computing
58 bounds for each pointer and then comparing address of accessed
59 memory before pointer dereferencing.
61 1. Function clones.
63 See ipa-chkp.c.
65 2. Instrumentation.
67 There are few things to instrument:
69 a) Memory accesses - add checker calls to check address of accessed memory
70 against bounds of dereferenced pointer. Obviously safe memory
71 accesses like static variable access does not have to be instrumented
72 with checks.
74 Example:
76 val_2 = *p_1;
78 with 4 bytes access is transformed into:
80 __builtin___chkp_bndcl (__bound_tmp.1_3, p_1);
81 D.1_4 = p_1 + 3;
82 __builtin___chkp_bndcu (__bound_tmp.1_3, D.1_4);
83 val_2 = *p_1;
85 where __bound_tmp.1_3 are bounds computed for pointer p_1,
86 __builtin___chkp_bndcl is a lower bound check and
87 __builtin___chkp_bndcu is an upper bound check.
89 b) Pointer stores.
91 When pointer is stored in memory we need to store its bounds. To
92 achieve compatibility of instrumented code with regular codes
93 we have to keep data layout and store bounds in special bound tables
94 via special checker call. Implementation of bounds table may vary for
95 different platforms. It has to associate pointer value and its
96 location (it is required because we may have two equal pointers
97 with different bounds stored in different places) with bounds.
98 Another checker builtin allows to get bounds for specified pointer
99 loaded from specified location.
101 Example:
103 buf1[i_1] = &buf2;
105 is transformed into:
107 buf1[i_1] = &buf2;
108 D.1_2 = &buf1[i_1];
109 __builtin___chkp_bndstx (D.1_2, &buf2, __bound_tmp.1_2);
111 where __bound_tmp.1_2 are bounds of &buf2.
113 c) Static initialization.
115 The special case of pointer store is static pointer initialization.
116 Bounds initialization is performed in a few steps:
117 - register all static initializations in front-end using
118 chkp_register_var_initializer
119 - when file compilation finishes we create functions with special
120 attribute 'chkp ctor' and put explicit initialization code
121 (assignments) for all statically initialized pointers.
122 - when checker constructor is compiled checker pass adds required
123 bounds initialization for all statically initialized pointers
124 - since we do not actually need excess pointers initialization
125 in checker constructor we remove such assignments from them
127 d) Calls.
129 For each call in the code we add additional arguments to pass
130 bounds for pointer arguments. We determine type of call arguments
131 using arguments list from function declaration; if function
132 declaration is not available we use function type; otherwise
133 (e.g. for unnamed arguments) we use type of passed value. Function
134 declaration/type is replaced with the instrumented one.
136 Example:
138 val_1 = foo (&buf1, &buf2, &buf1, 0);
140 is translated into:
142 val_1 = foo.chkp (&buf1, __bound_tmp.1_2, &buf2, __bound_tmp.1_3,
143 &buf1, __bound_tmp.1_2, 0);
145 e) Returns.
147 If function returns a pointer value we have to return bounds also.
148 A new operand was added for return statement to hold returned bounds.
150 Example:
152 return &_buf1;
154 is transformed into
156 return &_buf1, __bound_tmp.1_1;
158 3. Bounds computation.
160 Compiler is fully responsible for computing bounds to be used for each
161 memory access. The first step for bounds computation is to find the
162 origin of pointer dereferenced for memory access. Basing on pointer
163 origin we define a way to compute its bounds. There are just few
164 possible cases:
166 a) Pointer is returned by call.
168 In this case we use corresponding checker builtin method to obtain returned
169 bounds.
171 Example:
173 buf_1 = malloc (size_2);
174 foo (buf_1);
176 is translated into:
178 buf_1 = malloc (size_2);
179 __bound_tmp.1_3 = __builtin___chkp_bndret (buf_1);
180 foo (buf_1, __bound_tmp.1_3);
182 b) Pointer is an address of an object.
184 In this case compiler tries to compute objects size and create corresponding
185 bounds. If object has incomplete type then special checker builtin is used to
186 obtain its size at runtime.
188 Example:
190 foo ()
192 <unnamed type> __bound_tmp.3;
193 static int buf[100];
195 <bb 3>:
196 __bound_tmp.3_2 = __builtin___chkp_bndmk (&buf, 400);
198 <bb 2>:
199 return &buf, __bound_tmp.3_2;
202 Example:
204 Address of an object 'extern int buf[]' with incomplete type is
205 returned.
207 foo ()
209 <unnamed type> __bound_tmp.4;
210 long unsigned int __size_tmp.3;
212 <bb 3>:
213 __size_tmp.3_4 = __builtin_ia32_sizeof (buf);
214 __bound_tmp.4_3 = __builtin_ia32_bndmk (&buf, __size_tmp.3_4);
216 <bb 2>:
217 return &buf, __bound_tmp.4_3;
220 c) Pointer is the result of object narrowing.
222 It happens when we use pointer to an object to compute pointer to a part
223 of an object. E.g. we take pointer to a field of a structure. In this
224 case we perform bounds intersection using bounds of original object and
225 bounds of object's part (which are computed basing on its type).
227 There may be some debatable questions about when narrowing should occur
228 and when it should not. To avoid false bound violations in correct
229 programs we do not perform narrowing when address of an array element is
230 obtained (it has address of the whole array) and when address of the first
231 structure field is obtained (because it is guaranteed to be equal to
232 address of the whole structure and it is legal to cast it back to structure).
234 Default narrowing behavior may be changed using compiler flags.
236 Example:
238 In this example address of the second structure field is returned.
240 foo (struct A * p, __bounds_type __bounds_of_p)
242 <unnamed type> __bound_tmp.3;
243 int * _2;
244 int * _5;
246 <bb 2>:
247 _5 = &p_1(D)->second_field;
248 __bound_tmp.3_6 = __builtin___chkp_bndmk (_5, 4);
249 __bound_tmp.3_8 = __builtin___chkp_intersect (__bound_tmp.3_6,
250 __bounds_of_p_3(D));
251 _2 = &p_1(D)->second_field;
252 return _2, __bound_tmp.3_8;
255 Example:
257 In this example address of the first field of array element is returned.
259 foo (struct A * p, __bounds_type __bounds_of_p, int i)
261 long unsigned int _3;
262 long unsigned int _4;
263 struct A * _6;
264 int * _7;
266 <bb 2>:
267 _3 = (long unsigned int) i_1(D);
268 _4 = _3 * 8;
269 _6 = p_5(D) + _4;
270 _7 = &_6->first_field;
271 return _7, __bounds_of_p_2(D);
275 d) Pointer is the result of pointer arithmetic or type cast.
277 In this case bounds of the base pointer are used. In case of binary
278 operation producing a pointer we are analyzing data flow further
279 looking for operand's bounds. One operand is considered as a base
280 if it has some valid bounds. If we fall into a case when none of
281 operands (or both of them) has valid bounds, a default bounds value
282 is used.
284 Trying to find out bounds for binary operations we may fall into
285 cyclic dependencies for pointers. To avoid infinite recursion all
286 walked phi nodes instantly obtain corresponding bounds but created
287 bounds are marked as incomplete. It helps us to stop DF walk during
288 bounds search.
290 When we reach pointer source, some args of incomplete bounds phi obtain
291 valid bounds and those values are propagated further through phi nodes.
292 If no valid bounds were found for phi node then we mark its result as
293 invalid bounds. Process stops when all incomplete bounds become either
294 valid or invalid and we are able to choose a pointer base.
296 e) Pointer is loaded from the memory.
298 In this case we just need to load bounds from the bounds table.
300 Example:
302 foo ()
304 <unnamed type> __bound_tmp.3;
305 static int * buf;
306 int * _2;
308 <bb 2>:
309 _2 = buf;
310 __bound_tmp.3_4 = __builtin___chkp_bndldx (&buf, _2);
311 return _2, __bound_tmp.3_4;
316 typedef void (*assign_handler)(tree, tree, void *);
318 static tree chkp_get_zero_bounds ();
319 static tree chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter);
320 static tree chkp_find_bounds_loaded (tree ptr, tree ptr_src,
321 gimple_stmt_iterator *iter);
322 static void chkp_parse_array_and_component_ref (tree node, tree *ptr,
323 tree *elt, bool *safe,
324 bool *bitfield,
325 tree *bounds,
326 gimple_stmt_iterator *iter,
327 bool innermost_bounds);
329 #define chkp_bndldx_fndecl \
330 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDLDX))
331 #define chkp_bndstx_fndecl \
332 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDSTX))
333 #define chkp_checkl_fndecl \
334 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCL))
335 #define chkp_checku_fndecl \
336 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCU))
337 #define chkp_bndmk_fndecl \
338 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDMK))
339 #define chkp_ret_bnd_fndecl \
340 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDRET))
341 #define chkp_intersect_fndecl \
342 (targetm.builtin_chkp_function (BUILT_IN_CHKP_INTERSECT))
343 #define chkp_narrow_bounds_fndecl \
344 (targetm.builtin_chkp_function (BUILT_IN_CHKP_NARROW))
345 #define chkp_sizeof_fndecl \
346 (targetm.builtin_chkp_function (BUILT_IN_CHKP_SIZEOF))
347 #define chkp_extract_lower_fndecl \
348 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_LOWER))
349 #define chkp_extract_upper_fndecl \
350 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_UPPER))
352 static GTY (()) tree chkp_uintptr_type;
354 static GTY (()) tree chkp_zero_bounds_var;
355 static GTY (()) tree chkp_none_bounds_var;
357 static GTY (()) basic_block entry_block;
358 static GTY (()) tree zero_bounds;
359 static GTY (()) tree none_bounds;
360 static GTY (()) tree incomplete_bounds;
361 static GTY (()) tree tmp_var;
362 static GTY (()) tree size_tmp_var;
363 static GTY (()) bitmap chkp_abnormal_copies;
365 struct hash_set<tree> *chkp_invalid_bounds;
366 struct hash_set<tree> *chkp_completed_bounds_set;
367 struct hash_map<tree, tree> *chkp_reg_bounds;
368 struct hash_map<tree, tree> *chkp_bound_vars;
369 struct hash_map<tree, tree> *chkp_reg_addr_bounds;
370 struct hash_map<tree, tree> *chkp_incomplete_bounds_map;
371 struct hash_map<tree, tree> *chkp_bounds_map;
372 struct hash_map<tree, tree> *chkp_static_var_bounds;
374 static bool in_chkp_pass;
376 #define CHKP_BOUND_TMP_NAME "__bound_tmp"
377 #define CHKP_SIZE_TMP_NAME "__size_tmp"
378 #define CHKP_BOUNDS_OF_SYMBOL_PREFIX "__chkp_bounds_of_"
379 #define CHKP_STRING_BOUNDS_PREFIX "__chkp_string_bounds_"
380 #define CHKP_VAR_BOUNDS_PREFIX "__chkp_var_bounds_"
381 #define CHKP_ZERO_BOUNDS_VAR_NAME "__chkp_zero_bounds"
382 #define CHKP_NONE_BOUNDS_VAR_NAME "__chkp_none_bounds"
384 /* Static checker constructors may become very large and their
385 compilation with optimization may take too much time.
386 Therefore we put a limit to number of statements in one
387 constructor. Tests with 100 000 statically initialized
388 pointers showed following compilation times on Sandy Bridge
389 server (used -O2):
390 limit 100 => ~18 sec.
391 limit 300 => ~22 sec.
392 limit 1000 => ~30 sec.
393 limit 3000 => ~49 sec.
394 limit 5000 => ~55 sec.
395 limit 10000 => ~76 sec.
396 limit 100000 => ~532 sec. */
397 #define MAX_STMTS_IN_STATIC_CHKP_CTOR (PARAM_VALUE (PARAM_CHKP_MAX_CTOR_SIZE))
399 struct chkp_ctor_stmt_list
401 tree stmts;
402 int avail;
405 /* Return 1 if function FNDECL is instrumented by Pointer
406 Bounds Checker. */
407 bool
408 chkp_function_instrumented_p (tree fndecl)
410 return fndecl
411 && lookup_attribute ("chkp instrumented", DECL_ATTRIBUTES (fndecl));
414 /* Mark function FNDECL as instrumented. */
415 void
416 chkp_function_mark_instrumented (tree fndecl)
418 if (chkp_function_instrumented_p (fndecl))
419 return;
421 DECL_ATTRIBUTES (fndecl)
422 = tree_cons (get_identifier ("chkp instrumented"), NULL,
423 DECL_ATTRIBUTES (fndecl));
426 /* Return true when STMT is builtin call to instrumentation function
427 corresponding to CODE. */
429 bool
430 chkp_gimple_call_builtin_p (gimple *call,
431 enum built_in_function code)
433 tree fndecl;
434 if (is_gimple_call (call)
435 && (fndecl = targetm.builtin_chkp_function (code))
436 && gimple_call_fndecl (call) == fndecl)
437 return true;
438 return false;
441 /* Emit code to build zero bounds and return RTL holding
442 the result. */
444 chkp_expand_zero_bounds ()
446 tree zero_bnd;
448 if (flag_chkp_use_static_const_bounds)
449 zero_bnd = chkp_get_zero_bounds_var ();
450 else
451 zero_bnd = chkp_build_make_bounds_call (integer_zero_node,
452 integer_zero_node);
453 return expand_normal (zero_bnd);
456 /* Emit code to store zero bounds for PTR located at MEM. */
457 void
458 chkp_expand_bounds_reset_for_mem (tree mem, tree ptr)
460 tree zero_bnd, bnd, addr, bndstx;
462 if (flag_chkp_use_static_const_bounds)
463 zero_bnd = chkp_get_zero_bounds_var ();
464 else
465 zero_bnd = chkp_build_make_bounds_call (integer_zero_node,
466 integer_zero_node);
467 bnd = make_tree (pointer_bounds_type_node,
468 assign_temp (pointer_bounds_type_node, 0, 1));
469 addr = build1 (ADDR_EXPR,
470 build_pointer_type (TREE_TYPE (mem)), mem);
471 bndstx = chkp_build_bndstx_call (addr, ptr, bnd);
473 expand_assignment (bnd, zero_bnd, false);
474 expand_normal (bndstx);
477 /* Build retbnd call for returned value RETVAL.
479 If BNDVAL is not NULL then result is stored
480 in it. Otherwise a temporary is created to
481 hold returned value.
483 GSI points to a position for a retbnd call
484 and is set to created stmt.
486 Cgraph edge is created for a new call if
487 UPDATE_EDGE is 1.
489 Obtained bounds are returned. */
490 tree
491 chkp_insert_retbnd_call (tree bndval, tree retval,
492 gimple_stmt_iterator *gsi)
494 gimple *call;
496 if (!bndval)
497 bndval = create_tmp_reg (pointer_bounds_type_node, "retbnd");
499 call = gimple_build_call (chkp_ret_bnd_fndecl, 1, retval);
500 gimple_call_set_lhs (call, bndval);
501 gsi_insert_after (gsi, call, GSI_CONTINUE_LINKING);
503 return bndval;
506 /* Build a GIMPLE_CALL identical to CALL but skipping bounds
507 arguments. */
509 gcall *
510 chkp_copy_call_skip_bounds (gcall *call)
512 bitmap bounds;
513 unsigned i;
515 bitmap_obstack_initialize (NULL);
516 bounds = BITMAP_ALLOC (NULL);
518 for (i = 0; i < gimple_call_num_args (call); i++)
519 if (POINTER_BOUNDS_P (gimple_call_arg (call, i)))
520 bitmap_set_bit (bounds, i);
522 if (!bitmap_empty_p (bounds))
523 call = gimple_call_copy_skip_args (call, bounds);
524 gimple_call_set_with_bounds (call, false);
526 BITMAP_FREE (bounds);
527 bitmap_obstack_release (NULL);
529 return call;
532 /* Redirect edge E to the correct node according to call_stmt.
533 Return 1 if bounds removal from call_stmt should be done
534 instead of redirection. */
536 bool
537 chkp_redirect_edge (cgraph_edge *e)
539 bool instrumented = false;
540 tree decl = e->callee->decl;
542 if (e->callee->instrumentation_clone
543 || chkp_function_instrumented_p (decl))
544 instrumented = true;
546 if (instrumented
547 && !gimple_call_with_bounds_p (e->call_stmt))
548 e->redirect_callee (cgraph_node::get_create (e->callee->orig_decl));
549 else if (!instrumented
550 && gimple_call_with_bounds_p (e->call_stmt)
551 && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDCL)
552 && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDCU)
553 && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDSTX))
555 if (e->callee->instrumented_version)
556 e->redirect_callee (e->callee->instrumented_version);
557 else
559 tree args = TYPE_ARG_TYPES (TREE_TYPE (decl));
560 /* Avoid bounds removal if all args will be removed. */
561 if (!args || TREE_VALUE (args) != void_type_node)
562 return true;
563 else
564 gimple_call_set_with_bounds (e->call_stmt, false);
568 return false;
571 /* Mark statement S to not be instrumented. */
572 static void
573 chkp_mark_stmt (gimple *s)
575 gimple_set_plf (s, GF_PLF_1, true);
578 /* Mark statement S to be instrumented. */
579 static void
580 chkp_unmark_stmt (gimple *s)
582 gimple_set_plf (s, GF_PLF_1, false);
585 /* Return 1 if statement S should not be instrumented. */
586 static bool
587 chkp_marked_stmt_p (gimple *s)
589 return gimple_plf (s, GF_PLF_1);
592 /* Get var to be used for bound temps. */
593 static tree
594 chkp_get_tmp_var (void)
596 if (!tmp_var)
597 tmp_var = create_tmp_reg (pointer_bounds_type_node, CHKP_BOUND_TMP_NAME);
599 return tmp_var;
602 /* Get SSA_NAME to be used as temp. */
603 static tree
604 chkp_get_tmp_reg (gimple *stmt)
606 if (in_chkp_pass)
607 return make_ssa_name (chkp_get_tmp_var (), stmt);
609 return make_temp_ssa_name (pointer_bounds_type_node, stmt,
610 CHKP_BOUND_TMP_NAME);
613 /* Get var to be used for size temps. */
614 static tree
615 chkp_get_size_tmp_var (void)
617 if (!size_tmp_var)
618 size_tmp_var = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
620 return size_tmp_var;
623 /* Register bounds BND for address of OBJ. */
624 static void
625 chkp_register_addr_bounds (tree obj, tree bnd)
627 if (bnd == incomplete_bounds)
628 return;
630 chkp_reg_addr_bounds->put (obj, bnd);
632 if (dump_file && (dump_flags & TDF_DETAILS))
634 fprintf (dump_file, "Regsitered bound ");
635 print_generic_expr (dump_file, bnd, 0);
636 fprintf (dump_file, " for address of ");
637 print_generic_expr (dump_file, obj, 0);
638 fprintf (dump_file, "\n");
642 /* Return bounds registered for address of OBJ. */
643 static tree
644 chkp_get_registered_addr_bounds (tree obj)
646 tree *slot = chkp_reg_addr_bounds->get (obj);
647 return slot ? *slot : NULL_TREE;
650 /* Mark BOUNDS as completed. */
651 static void
652 chkp_mark_completed_bounds (tree bounds)
654 chkp_completed_bounds_set->add (bounds);
656 if (dump_file && (dump_flags & TDF_DETAILS))
658 fprintf (dump_file, "Marked bounds ");
659 print_generic_expr (dump_file, bounds, 0);
660 fprintf (dump_file, " as completed\n");
664 /* Return 1 if BOUNDS were marked as completed and 0 otherwise. */
665 static bool
666 chkp_completed_bounds (tree bounds)
668 return chkp_completed_bounds_set->contains (bounds);
671 /* Clear comleted bound marks. */
672 static void
673 chkp_erase_completed_bounds (void)
675 delete chkp_completed_bounds_set;
676 chkp_completed_bounds_set = new hash_set<tree>;
679 /* Mark BOUNDS associated with PTR as incomplete. */
680 static void
681 chkp_register_incomplete_bounds (tree bounds, tree ptr)
683 chkp_incomplete_bounds_map->put (bounds, ptr);
685 if (dump_file && (dump_flags & TDF_DETAILS))
687 fprintf (dump_file, "Regsitered incomplete bounds ");
688 print_generic_expr (dump_file, bounds, 0);
689 fprintf (dump_file, " for ");
690 print_generic_expr (dump_file, ptr, 0);
691 fprintf (dump_file, "\n");
695 /* Return 1 if BOUNDS are incomplete and 0 otherwise. */
696 static bool
697 chkp_incomplete_bounds (tree bounds)
699 if (bounds == incomplete_bounds)
700 return true;
702 if (chkp_completed_bounds (bounds))
703 return false;
705 return chkp_incomplete_bounds_map->get (bounds) != NULL;
708 /* Clear incomleted bound marks. */
709 static void
710 chkp_erase_incomplete_bounds (void)
712 delete chkp_incomplete_bounds_map;
713 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
716 /* Build and return bndmk call which creates bounds for structure
717 pointed by PTR. Structure should have complete type. */
718 tree
719 chkp_make_bounds_for_struct_addr (tree ptr)
721 tree type = TREE_TYPE (ptr);
722 tree size;
724 gcc_assert (POINTER_TYPE_P (type));
726 size = TYPE_SIZE (TREE_TYPE (type));
728 gcc_assert (size);
730 return build_call_nary (pointer_bounds_type_node,
731 build_fold_addr_expr (chkp_bndmk_fndecl),
732 2, ptr, size);
735 /* Traversal function for chkp_may_finish_incomplete_bounds.
736 Set RES to 0 if at least one argument of phi statement
737 defining bounds (passed in KEY arg) is unknown.
738 Traversal stops when first unknown phi argument is found. */
739 bool
740 chkp_may_complete_phi_bounds (tree const &bounds, tree *slot ATTRIBUTE_UNUSED,
741 bool *res)
743 gimple *phi;
744 unsigned i;
746 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
748 phi = SSA_NAME_DEF_STMT (bounds);
750 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
752 for (i = 0; i < gimple_phi_num_args (phi); i++)
754 tree phi_arg = gimple_phi_arg_def (phi, i);
755 if (!phi_arg)
757 *res = false;
758 /* Do not need to traverse further. */
759 return false;
763 return true;
766 /* Return 1 if all phi nodes created for bounds have their
767 arguments computed. */
768 static bool
769 chkp_may_finish_incomplete_bounds (void)
771 bool res = true;
773 chkp_incomplete_bounds_map
774 ->traverse<bool *, chkp_may_complete_phi_bounds> (&res);
776 return res;
779 /* Helper function for chkp_finish_incomplete_bounds.
780 Recompute args for bounds phi node. */
781 bool
782 chkp_recompute_phi_bounds (tree const &bounds, tree *slot,
783 void *res ATTRIBUTE_UNUSED)
785 tree ptr = *slot;
786 gphi *bounds_phi;
787 gphi *ptr_phi;
788 unsigned i;
790 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
791 gcc_assert (TREE_CODE (ptr) == SSA_NAME);
793 bounds_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (bounds));
794 ptr_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (ptr));
796 for (i = 0; i < gimple_phi_num_args (bounds_phi); i++)
798 tree ptr_arg = gimple_phi_arg_def (ptr_phi, i);
799 tree bound_arg = chkp_find_bounds (ptr_arg, NULL);
801 add_phi_arg (bounds_phi, bound_arg,
802 gimple_phi_arg_edge (ptr_phi, i),
803 UNKNOWN_LOCATION);
806 return true;
809 /* Mark BOUNDS as invalid. */
810 static void
811 chkp_mark_invalid_bounds (tree bounds)
813 chkp_invalid_bounds->add (bounds);
815 if (dump_file && (dump_flags & TDF_DETAILS))
817 fprintf (dump_file, "Marked bounds ");
818 print_generic_expr (dump_file, bounds, 0);
819 fprintf (dump_file, " as invalid\n");
823 /* Return 1 if BOUNDS were marked as invalid and 0 otherwise. */
824 static bool
825 chkp_valid_bounds (tree bounds)
827 if (bounds == zero_bounds || bounds == none_bounds)
828 return false;
830 return !chkp_invalid_bounds->contains (bounds);
833 /* Helper function for chkp_finish_incomplete_bounds.
834 Check all arguments of phi nodes trying to find
835 valid completed bounds. If there is at least one
836 such arg then bounds produced by phi node are marked
837 as valid completed bounds and all phi args are
838 recomputed. */
839 bool
840 chkp_find_valid_phi_bounds (tree const &bounds, tree *slot, bool *res)
842 gimple *phi;
843 unsigned i;
845 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
847 if (chkp_completed_bounds (bounds))
848 return true;
850 phi = SSA_NAME_DEF_STMT (bounds);
852 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
854 for (i = 0; i < gimple_phi_num_args (phi); i++)
856 tree phi_arg = gimple_phi_arg_def (phi, i);
858 gcc_assert (phi_arg);
860 if (chkp_valid_bounds (phi_arg) && !chkp_incomplete_bounds (phi_arg))
862 *res = true;
863 chkp_mark_completed_bounds (bounds);
864 chkp_recompute_phi_bounds (bounds, slot, NULL);
865 return true;
869 return true;
872 /* Helper function for chkp_finish_incomplete_bounds.
873 Marks all incompleted bounds as invalid. */
874 bool
875 chkp_mark_invalid_bounds_walker (tree const &bounds,
876 tree *slot ATTRIBUTE_UNUSED,
877 void *res ATTRIBUTE_UNUSED)
879 if (!chkp_completed_bounds (bounds))
881 chkp_mark_invalid_bounds (bounds);
882 chkp_mark_completed_bounds (bounds);
884 return true;
887 /* When all bound phi nodes have all their args computed
888 we have enough info to find valid bounds. We iterate
889 through all incompleted bounds searching for valid
890 bounds. Found valid bounds are marked as completed
891 and all remaining incompleted bounds are recomputed.
892 Process continues until no new valid bounds may be
893 found. All remained incompleted bounds are marked as
894 invalid (i.e. have no valid source of bounds). */
895 static void
896 chkp_finish_incomplete_bounds (void)
898 bool found_valid;
900 while (found_valid)
902 found_valid = false;
904 chkp_incomplete_bounds_map->
905 traverse<bool *, chkp_find_valid_phi_bounds> (&found_valid);
907 if (found_valid)
908 chkp_incomplete_bounds_map->
909 traverse<void *, chkp_recompute_phi_bounds> (NULL);
912 chkp_incomplete_bounds_map->
913 traverse<void *, chkp_mark_invalid_bounds_walker> (NULL);
914 chkp_incomplete_bounds_map->
915 traverse<void *, chkp_recompute_phi_bounds> (NULL);
917 chkp_erase_completed_bounds ();
918 chkp_erase_incomplete_bounds ();
921 /* Return 1 if type TYPE is a pointer type or a
922 structure having a pointer type as one of its fields.
923 Otherwise return 0. */
924 bool
925 chkp_type_has_pointer (const_tree type)
927 bool res = false;
929 if (BOUNDED_TYPE_P (type))
930 res = true;
931 else if (RECORD_OR_UNION_TYPE_P (type))
933 tree field;
935 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
936 if (TREE_CODE (field) == FIELD_DECL)
937 res = res || chkp_type_has_pointer (TREE_TYPE (field));
939 else if (TREE_CODE (type) == ARRAY_TYPE)
940 res = chkp_type_has_pointer (TREE_TYPE (type));
942 return res;
945 unsigned
946 chkp_type_bounds_count (const_tree type)
948 unsigned res = 0;
950 if (!type)
951 res = 0;
952 else if (BOUNDED_TYPE_P (type))
953 res = 1;
954 else if (RECORD_OR_UNION_TYPE_P (type))
956 bitmap have_bound;
958 bitmap_obstack_initialize (NULL);
959 have_bound = BITMAP_ALLOC (NULL);
960 chkp_find_bound_slots (type, have_bound);
961 res = bitmap_count_bits (have_bound);
962 BITMAP_FREE (have_bound);
963 bitmap_obstack_release (NULL);
966 return res;
969 /* Get bounds associated with NODE via
970 chkp_set_bounds call. */
971 tree
972 chkp_get_bounds (tree node)
974 tree *slot;
976 if (!chkp_bounds_map)
977 return NULL_TREE;
979 slot = chkp_bounds_map->get (node);
980 return slot ? *slot : NULL_TREE;
983 /* Associate bounds VAL with NODE. */
984 void
985 chkp_set_bounds (tree node, tree val)
987 if (!chkp_bounds_map)
988 chkp_bounds_map = new hash_map<tree, tree>;
990 chkp_bounds_map->put (node, val);
993 /* Check if statically initialized variable VAR require
994 static bounds initialization. If VAR is added into
995 bounds initlization list then 1 is returned. Otherwise
996 return 0. */
997 extern bool
998 chkp_register_var_initializer (tree var)
1000 if (!flag_check_pointer_bounds
1001 || DECL_INITIAL (var) == error_mark_node)
1002 return false;
1004 gcc_assert (TREE_CODE (var) == VAR_DECL);
1005 gcc_assert (DECL_INITIAL (var));
1007 if (TREE_STATIC (var)
1008 && chkp_type_has_pointer (TREE_TYPE (var)))
1010 varpool_node::get_create (var)->need_bounds_init = 1;
1011 return true;
1014 return false;
1017 /* Helper function for chkp_finish_file.
1019 Add new modification statement (RHS is assigned to LHS)
1020 into list of static initializer statementes (passed in ARG).
1021 If statements list becomes too big, emit checker constructor
1022 and start the new one. */
1023 static void
1024 chkp_add_modification_to_stmt_list (tree lhs,
1025 tree rhs,
1026 void *arg)
1028 struct chkp_ctor_stmt_list *stmts = (struct chkp_ctor_stmt_list *)arg;
1029 tree modify;
1031 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
1032 rhs = build1 (CONVERT_EXPR, TREE_TYPE (lhs), rhs);
1034 modify = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
1035 append_to_statement_list (modify, &stmts->stmts);
1037 stmts->avail--;
1040 /* Build and return ADDR_EXPR for specified object OBJ. */
1041 static tree
1042 chkp_build_addr_expr (tree obj)
1044 return TREE_CODE (obj) == TARGET_MEM_REF
1045 ? tree_mem_ref_addr (ptr_type_node, obj)
1046 : build_fold_addr_expr (obj);
1049 /* Helper function for chkp_finish_file.
1050 Initialize bound variable BND_VAR with bounds of variable
1051 VAR to statements list STMTS. If statements list becomes
1052 too big, emit checker constructor and start the new one. */
1053 static void
1054 chkp_output_static_bounds (tree bnd_var, tree var,
1055 struct chkp_ctor_stmt_list *stmts)
1057 tree lb, ub, size;
1059 if (TREE_CODE (var) == STRING_CST)
1061 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1062 size = build_int_cst (size_type_node, TREE_STRING_LENGTH (var) - 1);
1064 else if (DECL_SIZE (var)
1065 && !chkp_variable_size_type (TREE_TYPE (var)))
1067 /* Compute bounds using statically known size. */
1068 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1069 size = size_binop (MINUS_EXPR, DECL_SIZE_UNIT (var), size_one_node);
1071 else
1073 /* Compute bounds using dynamic size. */
1074 tree call;
1076 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1077 call = build1 (ADDR_EXPR,
1078 build_pointer_type (TREE_TYPE (chkp_sizeof_fndecl)),
1079 chkp_sizeof_fndecl);
1080 size = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_sizeof_fndecl)),
1081 call, 1, var);
1083 if (flag_chkp_zero_dynamic_size_as_infinite)
1085 tree max_size, cond;
1087 max_size = build2 (MINUS_EXPR, size_type_node, size_zero_node, lb);
1088 cond = build2 (NE_EXPR, boolean_type_node, size, size_zero_node);
1089 size = build3 (COND_EXPR, size_type_node, cond, size, max_size);
1092 size = size_binop (MINUS_EXPR, size, size_one_node);
1095 ub = size_binop (PLUS_EXPR, lb, size);
1096 stmts->avail -= targetm.chkp_initialize_bounds (bnd_var, lb, ub,
1097 &stmts->stmts);
1098 if (stmts->avail <= 0)
1100 cgraph_build_static_cdtor ('B', stmts->stmts,
1101 MAX_RESERVED_INIT_PRIORITY + 2);
1102 stmts->avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
1103 stmts->stmts = NULL;
1107 /* Return entry block to be used for checker initilization code.
1108 Create new block if required. */
1109 static basic_block
1110 chkp_get_entry_block (void)
1112 if (!entry_block)
1113 entry_block
1114 = split_block_after_labels (ENTRY_BLOCK_PTR_FOR_FN (cfun))->dest;
1116 return entry_block;
1119 /* Return a bounds var to be used for pointer var PTR_VAR. */
1120 static tree
1121 chkp_get_bounds_var (tree ptr_var)
1123 tree bnd_var;
1124 tree *slot;
1126 slot = chkp_bound_vars->get (ptr_var);
1127 if (slot)
1128 bnd_var = *slot;
1129 else
1131 bnd_var = create_tmp_reg (pointer_bounds_type_node,
1132 CHKP_BOUND_TMP_NAME);
1133 chkp_bound_vars->put (ptr_var, bnd_var);
1136 return bnd_var;
1139 /* If BND is an abnormal bounds copy, return a copied value.
1140 Otherwise return BND. */
1141 static tree
1142 chkp_get_orginal_bounds_for_abnormal_copy (tree bnd)
1144 if (bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
1146 gimple *bnd_def = SSA_NAME_DEF_STMT (bnd);
1147 gcc_checking_assert (gimple_code (bnd_def) == GIMPLE_ASSIGN);
1148 bnd = gimple_assign_rhs1 (bnd_def);
1151 return bnd;
1154 /* Register bounds BND for object PTR in global bounds table.
1155 A copy of bounds may be created for abnormal ssa names.
1156 Returns bounds to use for PTR. */
1157 static tree
1158 chkp_maybe_copy_and_register_bounds (tree ptr, tree bnd)
1160 bool abnormal_ptr;
1162 if (!chkp_reg_bounds)
1163 return bnd;
1165 /* Do nothing if bounds are incomplete_bounds
1166 because it means bounds will be recomputed. */
1167 if (bnd == incomplete_bounds)
1168 return bnd;
1170 abnormal_ptr = (TREE_CODE (ptr) == SSA_NAME
1171 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1172 && gimple_code (SSA_NAME_DEF_STMT (ptr)) != GIMPLE_PHI);
1174 /* A single bounds value may be reused multiple times for
1175 different pointer values. It may cause coalescing issues
1176 for abnormal SSA names. To avoid it we create a bounds
1177 copy in case it is computed for abnormal SSA name.
1179 We also cannot reuse such created copies for other pointers */
1180 if (abnormal_ptr
1181 || bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
1183 tree bnd_var = NULL_TREE;
1185 if (abnormal_ptr)
1187 if (SSA_NAME_VAR (ptr))
1188 bnd_var = chkp_get_bounds_var (SSA_NAME_VAR (ptr));
1190 else
1191 bnd_var = chkp_get_tmp_var ();
1193 /* For abnormal copies we may just find original
1194 bounds and use them. */
1195 if (!abnormal_ptr && !SSA_NAME_IS_DEFAULT_DEF (bnd))
1196 bnd = chkp_get_orginal_bounds_for_abnormal_copy (bnd);
1197 /* For undefined values we usually use none bounds
1198 value but in case of abnormal edge it may cause
1199 coalescing failures. Use default definition of
1200 bounds variable instead to avoid it. */
1201 else if (SSA_NAME_IS_DEFAULT_DEF (ptr)
1202 && TREE_CODE (SSA_NAME_VAR (ptr)) != PARM_DECL)
1204 bnd = get_or_create_ssa_default_def (cfun, bnd_var);
1206 if (dump_file && (dump_flags & TDF_DETAILS))
1208 fprintf (dump_file, "Using default def bounds ");
1209 print_generic_expr (dump_file, bnd, 0);
1210 fprintf (dump_file, " for abnormal default def SSA name ");
1211 print_generic_expr (dump_file, ptr, 0);
1212 fprintf (dump_file, "\n");
1215 else
1217 tree copy;
1218 gimple *def = SSA_NAME_DEF_STMT (ptr);
1219 gimple *assign;
1220 gimple_stmt_iterator gsi;
1222 if (bnd_var)
1223 copy = make_ssa_name (bnd_var);
1224 else
1225 copy = make_temp_ssa_name (pointer_bounds_type_node,
1226 NULL,
1227 CHKP_BOUND_TMP_NAME);
1228 bnd = chkp_get_orginal_bounds_for_abnormal_copy (bnd);
1229 assign = gimple_build_assign (copy, bnd);
1231 if (dump_file && (dump_flags & TDF_DETAILS))
1233 fprintf (dump_file, "Creating a copy of bounds ");
1234 print_generic_expr (dump_file, bnd, 0);
1235 fprintf (dump_file, " for abnormal SSA name ");
1236 print_generic_expr (dump_file, ptr, 0);
1237 fprintf (dump_file, "\n");
1240 if (gimple_code (def) == GIMPLE_NOP)
1242 gsi = gsi_last_bb (chkp_get_entry_block ());
1243 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
1244 gsi_insert_before (&gsi, assign, GSI_CONTINUE_LINKING);
1245 else
1246 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1248 else
1250 gimple *bnd_def = SSA_NAME_DEF_STMT (bnd);
1251 /* Sometimes (e.g. when we load a pointer from a
1252 memory) bounds are produced later than a pointer.
1253 We need to insert bounds copy appropriately. */
1254 if (gimple_code (bnd_def) != GIMPLE_NOP
1255 && stmt_dominates_stmt_p (def, bnd_def))
1256 gsi = gsi_for_stmt (bnd_def);
1257 else
1258 gsi = gsi_for_stmt (def);
1259 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1262 bnd = copy;
1265 if (abnormal_ptr)
1266 bitmap_set_bit (chkp_abnormal_copies, SSA_NAME_VERSION (bnd));
1269 chkp_reg_bounds->put (ptr, bnd);
1271 if (dump_file && (dump_flags & TDF_DETAILS))
1273 fprintf (dump_file, "Regsitered bound ");
1274 print_generic_expr (dump_file, bnd, 0);
1275 fprintf (dump_file, " for pointer ");
1276 print_generic_expr (dump_file, ptr, 0);
1277 fprintf (dump_file, "\n");
1280 return bnd;
1283 /* Get bounds registered for object PTR in global bounds table. */
1284 static tree
1285 chkp_get_registered_bounds (tree ptr)
1287 tree *slot;
1289 if (!chkp_reg_bounds)
1290 return NULL_TREE;
1292 slot = chkp_reg_bounds->get (ptr);
1293 return slot ? *slot : NULL_TREE;
1296 /* Add bound retvals to return statement pointed by GSI. */
1298 static void
1299 chkp_add_bounds_to_ret_stmt (gimple_stmt_iterator *gsi)
1301 greturn *ret = as_a <greturn *> (gsi_stmt (*gsi));
1302 tree retval = gimple_return_retval (ret);
1303 tree ret_decl = DECL_RESULT (cfun->decl);
1304 tree bounds;
1306 if (!retval)
1307 return;
1309 if (BOUNDED_P (ret_decl))
1311 bounds = chkp_find_bounds (retval, gsi);
1312 bounds = chkp_maybe_copy_and_register_bounds (ret_decl, bounds);
1313 gimple_return_set_retbnd (ret, bounds);
1316 update_stmt (ret);
1319 /* Force OP to be suitable for using as an argument for call.
1320 New statements (if any) go to SEQ. */
1321 static tree
1322 chkp_force_gimple_call_op (tree op, gimple_seq *seq)
1324 gimple_seq stmts;
1325 gimple_stmt_iterator si;
1327 op = force_gimple_operand (unshare_expr (op), &stmts, true, NULL_TREE);
1329 for (si = gsi_start (stmts); !gsi_end_p (si); gsi_next (&si))
1330 chkp_mark_stmt (gsi_stmt (si));
1332 gimple_seq_add_seq (seq, stmts);
1334 return op;
1337 /* Generate lower bound check for memory access by ADDR.
1338 Check is inserted before the position pointed by ITER.
1339 DIRFLAG indicates whether memory access is load or store. */
1340 static void
1341 chkp_check_lower (tree addr, tree bounds,
1342 gimple_stmt_iterator iter,
1343 location_t location,
1344 tree dirflag)
1346 gimple_seq seq;
1347 gimple *check;
1348 tree node;
1350 if (!chkp_function_instrumented_p (current_function_decl)
1351 && bounds == chkp_get_zero_bounds ())
1352 return;
1354 if (dirflag == integer_zero_node
1355 && !flag_chkp_check_read)
1356 return;
1358 if (dirflag == integer_one_node
1359 && !flag_chkp_check_write)
1360 return;
1362 seq = NULL;
1364 node = chkp_force_gimple_call_op (addr, &seq);
1366 check = gimple_build_call (chkp_checkl_fndecl, 2, node, bounds);
1367 chkp_mark_stmt (check);
1368 gimple_call_set_with_bounds (check, true);
1369 gimple_set_location (check, location);
1370 gimple_seq_add_stmt (&seq, check);
1372 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1374 if (dump_file && (dump_flags & TDF_DETAILS))
1376 gimple *before = gsi_stmt (iter);
1377 fprintf (dump_file, "Generated lower bound check for statement ");
1378 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1379 fprintf (dump_file, " ");
1380 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1384 /* Generate upper bound check for memory access by ADDR.
1385 Check is inserted before the position pointed by ITER.
1386 DIRFLAG indicates whether memory access is load or store. */
1387 static void
1388 chkp_check_upper (tree addr, tree bounds,
1389 gimple_stmt_iterator iter,
1390 location_t location,
1391 tree dirflag)
1393 gimple_seq seq;
1394 gimple *check;
1395 tree node;
1397 if (!chkp_function_instrumented_p (current_function_decl)
1398 && bounds == chkp_get_zero_bounds ())
1399 return;
1401 if (dirflag == integer_zero_node
1402 && !flag_chkp_check_read)
1403 return;
1405 if (dirflag == integer_one_node
1406 && !flag_chkp_check_write)
1407 return;
1409 seq = NULL;
1411 node = chkp_force_gimple_call_op (addr, &seq);
1413 check = gimple_build_call (chkp_checku_fndecl, 2, node, bounds);
1414 chkp_mark_stmt (check);
1415 gimple_call_set_with_bounds (check, true);
1416 gimple_set_location (check, location);
1417 gimple_seq_add_stmt (&seq, check);
1419 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1421 if (dump_file && (dump_flags & TDF_DETAILS))
1423 gimple *before = gsi_stmt (iter);
1424 fprintf (dump_file, "Generated upper bound check for statement ");
1425 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1426 fprintf (dump_file, " ");
1427 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1431 /* Generate lower and upper bound checks for memory access
1432 to memory slot [FIRST, LAST] againsr BOUNDS. Checks
1433 are inserted before the position pointed by ITER.
1434 DIRFLAG indicates whether memory access is load or store. */
1435 void
1436 chkp_check_mem_access (tree first, tree last, tree bounds,
1437 gimple_stmt_iterator iter,
1438 location_t location,
1439 tree dirflag)
1441 chkp_check_lower (first, bounds, iter, location, dirflag);
1442 chkp_check_upper (last, bounds, iter, location, dirflag);
1445 /* Replace call to _bnd_chk_* pointed by GSI with
1446 bndcu and bndcl calls. DIRFLAG determines whether
1447 check is for read or write. */
1449 void
1450 chkp_replace_address_check_builtin (gimple_stmt_iterator *gsi,
1451 tree dirflag)
1453 gimple_stmt_iterator call_iter = *gsi;
1454 gimple *call = gsi_stmt (*gsi);
1455 tree fndecl = gimple_call_fndecl (call);
1456 tree addr = gimple_call_arg (call, 0);
1457 tree bounds = chkp_find_bounds (addr, gsi);
1459 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1460 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1461 chkp_check_lower (addr, bounds, *gsi, gimple_location (call), dirflag);
1463 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS)
1464 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1466 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1468 tree size = gimple_call_arg (call, 1);
1469 addr = fold_build_pointer_plus (addr, size);
1470 addr = fold_build_pointer_plus_hwi (addr, -1);
1471 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1474 gsi_remove (&call_iter, true);
1477 /* Replace call to _bnd_get_ptr_* pointed by GSI with
1478 corresponding bounds extract call. */
1480 void
1481 chkp_replace_extract_builtin (gimple_stmt_iterator *gsi)
1483 gimple *call = gsi_stmt (*gsi);
1484 tree fndecl = gimple_call_fndecl (call);
1485 tree addr = gimple_call_arg (call, 0);
1486 tree bounds = chkp_find_bounds (addr, gsi);
1487 gimple *extract;
1489 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND)
1490 fndecl = chkp_extract_lower_fndecl;
1491 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND)
1492 fndecl = chkp_extract_upper_fndecl;
1493 else
1494 gcc_unreachable ();
1496 extract = gimple_build_call (fndecl, 1, bounds);
1497 gimple_call_set_lhs (extract, gimple_call_lhs (call));
1498 chkp_mark_stmt (extract);
1500 gsi_replace (gsi, extract, false);
1503 /* Return COMPONENT_REF accessing FIELD in OBJ. */
1504 static tree
1505 chkp_build_component_ref (tree obj, tree field)
1507 tree res;
1509 /* If object is TMR then we do not use component_ref but
1510 add offset instead. We need it to be able to get addr
1511 of the reasult later. */
1512 if (TREE_CODE (obj) == TARGET_MEM_REF)
1514 tree offs = TMR_OFFSET (obj);
1515 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1516 offs, DECL_FIELD_OFFSET (field));
1518 gcc_assert (offs);
1520 res = copy_node (obj);
1521 TREE_TYPE (res) = TREE_TYPE (field);
1522 TMR_OFFSET (res) = offs;
1524 else
1525 res = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL_TREE);
1527 return res;
1530 /* Return ARRAY_REF for array ARR and index IDX with
1531 specified element type ETYPE and element size ESIZE. */
1532 static tree
1533 chkp_build_array_ref (tree arr, tree etype, tree esize,
1534 unsigned HOST_WIDE_INT idx)
1536 tree index = build_int_cst (size_type_node, idx);
1537 tree res;
1539 /* If object is TMR then we do not use array_ref but
1540 add offset instead. We need it to be able to get addr
1541 of the reasult later. */
1542 if (TREE_CODE (arr) == TARGET_MEM_REF)
1544 tree offs = TMR_OFFSET (arr);
1546 esize = fold_binary_to_constant (MULT_EXPR, TREE_TYPE (esize),
1547 esize, index);
1548 gcc_assert(esize);
1550 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1551 offs, esize);
1552 gcc_assert (offs);
1554 res = copy_node (arr);
1555 TREE_TYPE (res) = etype;
1556 TMR_OFFSET (res) = offs;
1558 else
1559 res = build4 (ARRAY_REF, etype, arr, index, NULL_TREE, NULL_TREE);
1561 return res;
1564 /* Helper function for chkp_add_bounds_to_call_stmt.
1565 Fill ALL_BOUNDS output array with created bounds.
1567 OFFS is used for recursive calls and holds basic
1568 offset of TYPE in outer structure in bits.
1570 ITER points a position where bounds are searched.
1572 ALL_BOUNDS[i] is filled with elem bounds if there
1573 is a field in TYPE which has pointer type and offset
1574 equal to i * POINTER_SIZE in bits. */
1575 static void
1576 chkp_find_bounds_for_elem (tree elem, tree *all_bounds,
1577 HOST_WIDE_INT offs,
1578 gimple_stmt_iterator *iter)
1580 tree type = TREE_TYPE (elem);
1582 if (BOUNDED_TYPE_P (type))
1584 if (!all_bounds[offs / POINTER_SIZE])
1586 tree temp = make_temp_ssa_name (type, NULL, "");
1587 gimple *assign = gimple_build_assign (temp, elem);
1588 gimple_stmt_iterator gsi;
1590 gsi_insert_before (iter, assign, GSI_SAME_STMT);
1591 gsi = gsi_for_stmt (assign);
1593 all_bounds[offs / POINTER_SIZE] = chkp_find_bounds (temp, &gsi);
1596 else if (RECORD_OR_UNION_TYPE_P (type))
1598 tree field;
1600 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1601 if (TREE_CODE (field) == FIELD_DECL)
1603 tree base = unshare_expr (elem);
1604 tree field_ref = chkp_build_component_ref (base, field);
1605 HOST_WIDE_INT field_offs
1606 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1607 if (DECL_FIELD_OFFSET (field))
1608 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1610 chkp_find_bounds_for_elem (field_ref, all_bounds,
1611 offs + field_offs, iter);
1614 else if (TREE_CODE (type) == ARRAY_TYPE)
1616 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1617 tree etype = TREE_TYPE (type);
1618 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1619 unsigned HOST_WIDE_INT cur;
1621 if (!maxval || integer_minus_onep (maxval))
1622 return;
1624 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1626 tree base = unshare_expr (elem);
1627 tree arr_elem = chkp_build_array_ref (base, etype,
1628 TYPE_SIZE (etype),
1629 cur);
1630 chkp_find_bounds_for_elem (arr_elem, all_bounds, offs + cur * esize,
1631 iter);
1636 /* Fill HAVE_BOUND output bitmap with information about
1637 bounds requred for object of type TYPE.
1639 OFFS is used for recursive calls and holds basic
1640 offset of TYPE in outer structure in bits.
1642 HAVE_BOUND[i] is set to 1 if there is a field
1643 in TYPE which has pointer type and offset
1644 equal to i * POINTER_SIZE - OFFS in bits. */
1645 void
1646 chkp_find_bound_slots_1 (const_tree type, bitmap have_bound,
1647 HOST_WIDE_INT offs)
1649 if (BOUNDED_TYPE_P (type))
1650 bitmap_set_bit (have_bound, offs / POINTER_SIZE);
1651 else if (RECORD_OR_UNION_TYPE_P (type))
1653 tree field;
1655 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1656 if (TREE_CODE (field) == FIELD_DECL)
1658 HOST_WIDE_INT field_offs = 0;
1659 if (DECL_FIELD_BIT_OFFSET (field))
1660 field_offs += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1661 if (DECL_FIELD_OFFSET (field))
1662 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1663 chkp_find_bound_slots_1 (TREE_TYPE (field), have_bound,
1664 offs + field_offs);
1667 else if (TREE_CODE (type) == ARRAY_TYPE)
1669 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1670 tree etype = TREE_TYPE (type);
1671 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1672 unsigned HOST_WIDE_INT cur;
1674 if (!maxval
1675 || TREE_CODE (maxval) != INTEGER_CST
1676 || integer_minus_onep (maxval))
1677 return;
1679 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1680 chkp_find_bound_slots_1 (etype, have_bound, offs + cur * esize);
1684 /* Fill bitmap RES with information about bounds for
1685 type TYPE. See chkp_find_bound_slots_1 for more
1686 details. */
1687 void
1688 chkp_find_bound_slots (const_tree type, bitmap res)
1690 bitmap_clear (res);
1691 chkp_find_bound_slots_1 (type, res, 0);
1694 /* Return 1 if call to FNDECL should be instrumented
1695 and 0 otherwise. */
1697 static bool
1698 chkp_instrument_normal_builtin (tree fndecl)
1700 switch (DECL_FUNCTION_CODE (fndecl))
1702 case BUILT_IN_STRLEN:
1703 case BUILT_IN_STRCPY:
1704 case BUILT_IN_STRNCPY:
1705 case BUILT_IN_STPCPY:
1706 case BUILT_IN_STPNCPY:
1707 case BUILT_IN_STRCAT:
1708 case BUILT_IN_STRNCAT:
1709 case BUILT_IN_MEMCPY:
1710 case BUILT_IN_MEMPCPY:
1711 case BUILT_IN_MEMSET:
1712 case BUILT_IN_MEMMOVE:
1713 case BUILT_IN_BZERO:
1714 case BUILT_IN_STRCMP:
1715 case BUILT_IN_STRNCMP:
1716 case BUILT_IN_BCMP:
1717 case BUILT_IN_MEMCMP:
1718 case BUILT_IN_MEMCPY_CHK:
1719 case BUILT_IN_MEMPCPY_CHK:
1720 case BUILT_IN_MEMMOVE_CHK:
1721 case BUILT_IN_MEMSET_CHK:
1722 case BUILT_IN_STRCPY_CHK:
1723 case BUILT_IN_STRNCPY_CHK:
1724 case BUILT_IN_STPCPY_CHK:
1725 case BUILT_IN_STPNCPY_CHK:
1726 case BUILT_IN_STRCAT_CHK:
1727 case BUILT_IN_STRNCAT_CHK:
1728 case BUILT_IN_MALLOC:
1729 case BUILT_IN_CALLOC:
1730 case BUILT_IN_REALLOC:
1731 return 1;
1733 default:
1734 return 0;
1738 /* Add bound arguments to call statement pointed by GSI.
1739 Also performs a replacement of user checker builtins calls
1740 with internal ones. */
1742 static void
1743 chkp_add_bounds_to_call_stmt (gimple_stmt_iterator *gsi)
1745 gcall *call = as_a <gcall *> (gsi_stmt (*gsi));
1746 unsigned arg_no = 0;
1747 tree fndecl = gimple_call_fndecl (call);
1748 tree fntype;
1749 tree first_formal_arg;
1750 tree arg;
1751 bool use_fntype = false;
1752 tree op;
1753 ssa_op_iter iter;
1754 gcall *new_call;
1756 /* Do nothing for internal functions. */
1757 if (gimple_call_internal_p (call))
1758 return;
1760 fntype = TREE_TYPE (TREE_TYPE (gimple_call_fn (call)));
1762 /* Do nothing if back-end builtin is called. */
1763 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
1764 return;
1766 /* Do nothing for some middle-end builtins. */
1767 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1768 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_OBJECT_SIZE)
1769 return;
1771 /* Do nothing for calls to not instrumentable functions. */
1772 if (fndecl && !chkp_instrumentable_p (fndecl))
1773 return;
1775 /* Ignore CHKP_INIT_PTR_BOUNDS, CHKP_NULL_PTR_BOUNDS
1776 and CHKP_COPY_PTR_BOUNDS. */
1777 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1778 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS
1779 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS
1780 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS
1781 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS))
1782 return;
1784 /* Check user builtins are replaced with checks. */
1785 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1786 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1787 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
1788 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS))
1790 chkp_replace_address_check_builtin (gsi, integer_minus_one_node);
1791 return;
1794 /* Check user builtins are replaced with bound extract. */
1795 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1796 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND
1797 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND))
1799 chkp_replace_extract_builtin (gsi);
1800 return;
1803 /* BUILT_IN_CHKP_NARROW_PTR_BOUNDS call is replaced with
1804 target narrow bounds call. */
1805 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1806 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
1808 tree arg = gimple_call_arg (call, 1);
1809 tree bounds = chkp_find_bounds (arg, gsi);
1811 gimple_call_set_fndecl (call, chkp_narrow_bounds_fndecl);
1812 gimple_call_set_arg (call, 1, bounds);
1813 update_stmt (call);
1815 return;
1818 /* BUILT_IN_CHKP_STORE_PTR_BOUNDS call is replaced with
1819 bndstx call. */
1820 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1821 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_STORE_PTR_BOUNDS)
1823 tree addr = gimple_call_arg (call, 0);
1824 tree ptr = gimple_call_arg (call, 1);
1825 tree bounds = chkp_find_bounds (ptr, gsi);
1826 gimple_stmt_iterator iter = gsi_for_stmt (call);
1828 chkp_build_bndstx (addr, ptr, bounds, gsi);
1829 gsi_remove (&iter, true);
1831 return;
1834 if (!flag_chkp_instrument_calls)
1835 return;
1837 /* We instrument only some subset of builtins. We also instrument
1838 builtin calls to be inlined. */
1839 if (fndecl
1840 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1841 && !chkp_instrument_normal_builtin (fndecl))
1843 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
1844 return;
1846 struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
1847 if (!clone
1848 || !gimple_has_body_p (clone->decl))
1849 return;
1852 /* If function decl is available then use it for
1853 formal arguments list. Otherwise use function type. */
1854 if (fndecl && DECL_ARGUMENTS (fndecl))
1855 first_formal_arg = DECL_ARGUMENTS (fndecl);
1856 else
1858 first_formal_arg = TYPE_ARG_TYPES (fntype);
1859 use_fntype = true;
1862 /* Fill vector of new call args. */
1863 vec<tree> new_args = vNULL;
1864 new_args.create (gimple_call_num_args (call));
1865 arg = first_formal_arg;
1866 for (arg_no = 0; arg_no < gimple_call_num_args (call); arg_no++)
1868 tree call_arg = gimple_call_arg (call, arg_no);
1869 tree type;
1871 /* Get arg type using formal argument description
1872 or actual argument type. */
1873 if (arg)
1874 if (use_fntype)
1875 if (TREE_VALUE (arg) != void_type_node)
1877 type = TREE_VALUE (arg);
1878 arg = TREE_CHAIN (arg);
1880 else
1881 type = TREE_TYPE (call_arg);
1882 else
1884 type = TREE_TYPE (arg);
1885 arg = TREE_CHAIN (arg);
1887 else
1888 type = TREE_TYPE (call_arg);
1890 new_args.safe_push (call_arg);
1892 if (BOUNDED_TYPE_P (type)
1893 || pass_by_reference (NULL, TYPE_MODE (type), type, true))
1894 new_args.safe_push (chkp_find_bounds (call_arg, gsi));
1895 else if (chkp_type_has_pointer (type))
1897 HOST_WIDE_INT max_bounds
1898 = TREE_INT_CST_LOW (TYPE_SIZE (type)) / POINTER_SIZE;
1899 tree *all_bounds = (tree *)xmalloc (sizeof (tree) * max_bounds);
1900 HOST_WIDE_INT bnd_no;
1902 memset (all_bounds, 0, sizeof (tree) * max_bounds);
1904 chkp_find_bounds_for_elem (call_arg, all_bounds, 0, gsi);
1906 for (bnd_no = 0; bnd_no < max_bounds; bnd_no++)
1907 if (all_bounds[bnd_no])
1908 new_args.safe_push (all_bounds[bnd_no]);
1910 free (all_bounds);
1914 if (new_args.length () == gimple_call_num_args (call))
1915 new_call = call;
1916 else
1918 new_call = gimple_build_call_vec (gimple_op (call, 1), new_args);
1919 gimple_call_set_lhs (new_call, gimple_call_lhs (call));
1920 gimple_call_copy_flags (new_call, call);
1921 gimple_call_set_chain (new_call, gimple_call_chain (call));
1923 new_args.release ();
1925 /* For direct calls fndecl is replaced with instrumented version. */
1926 if (fndecl)
1928 tree new_decl = chkp_maybe_create_clone (fndecl)->decl;
1929 gimple_call_set_fndecl (new_call, new_decl);
1930 gimple_call_set_fntype (new_call, TREE_TYPE (new_decl));
1932 /* For indirect call we should fix function pointer type if
1933 pass some bounds. */
1934 else if (new_call != call)
1936 tree type = gimple_call_fntype (call);
1937 type = chkp_copy_function_type_adding_bounds (type);
1938 gimple_call_set_fntype (new_call, type);
1941 /* replace old call statement with the new one. */
1942 if (call != new_call)
1944 FOR_EACH_SSA_TREE_OPERAND (op, call, iter, SSA_OP_ALL_DEFS)
1946 SSA_NAME_DEF_STMT (op) = new_call;
1948 gsi_replace (gsi, new_call, true);
1950 else
1951 update_stmt (new_call);
1953 gimple_call_set_with_bounds (new_call, true);
1956 /* Return constant static bounds var with specified bounds LB and UB.
1957 If such var does not exists then new var is created with specified NAME. */
1958 static tree
1959 chkp_make_static_const_bounds (HOST_WIDE_INT lb,
1960 HOST_WIDE_INT ub,
1961 const char *name)
1963 tree id = get_identifier (name);
1964 tree var;
1965 varpool_node *node;
1966 symtab_node *snode;
1968 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, id,
1969 pointer_bounds_type_node);
1970 TREE_STATIC (var) = 1;
1971 TREE_PUBLIC (var) = 1;
1973 /* With LTO we may have constant bounds already in varpool.
1974 Try to find it. */
1975 if ((snode = symtab_node::get_for_asmname (DECL_ASSEMBLER_NAME (var))))
1977 /* We don't allow this symbol usage for non bounds. */
1978 if (snode->type != SYMTAB_VARIABLE
1979 || !POINTER_BOUNDS_P (snode->decl))
1980 sorry ("-fcheck-pointer-bounds requires '%s' "
1981 "name for internal usage",
1982 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (var)));
1984 return snode->decl;
1987 TREE_USED (var) = 1;
1988 TREE_READONLY (var) = 1;
1989 TREE_ADDRESSABLE (var) = 0;
1990 DECL_ARTIFICIAL (var) = 1;
1991 DECL_READ_P (var) = 1;
1992 DECL_INITIAL (var) = targetm.chkp_make_bounds_constant (lb, ub);
1993 make_decl_one_only (var, DECL_ASSEMBLER_NAME (var));
1994 /* We may use this symbol during ctors generation in chkp_finish_file
1995 when all symbols are emitted. Force output to avoid undefined
1996 symbols in ctors. */
1997 node = varpool_node::get_create (var);
1998 node->force_output = 1;
2000 varpool_node::finalize_decl (var);
2002 return var;
2005 /* Generate code to make bounds with specified lower bound LB and SIZE.
2006 if AFTER is 1 then code is inserted after position pointed by ITER
2007 otherwise code is inserted before position pointed by ITER.
2008 If ITER is NULL then code is added to entry block. */
2009 static tree
2010 chkp_make_bounds (tree lb, tree size, gimple_stmt_iterator *iter, bool after)
2012 gimple_seq seq;
2013 gimple_stmt_iterator gsi;
2014 gimple *stmt;
2015 tree bounds;
2017 if (iter)
2018 gsi = *iter;
2019 else
2020 gsi = gsi_start_bb (chkp_get_entry_block ());
2022 seq = NULL;
2024 lb = chkp_force_gimple_call_op (lb, &seq);
2025 size = chkp_force_gimple_call_op (size, &seq);
2027 stmt = gimple_build_call (chkp_bndmk_fndecl, 2, lb, size);
2028 chkp_mark_stmt (stmt);
2030 bounds = chkp_get_tmp_reg (stmt);
2031 gimple_call_set_lhs (stmt, bounds);
2033 gimple_seq_add_stmt (&seq, stmt);
2035 if (iter && after)
2036 gsi_insert_seq_after (&gsi, seq, GSI_SAME_STMT);
2037 else
2038 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
2040 if (dump_file && (dump_flags & TDF_DETAILS))
2042 fprintf (dump_file, "Made bounds: ");
2043 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2044 if (iter)
2046 fprintf (dump_file, " inserted before statement: ");
2047 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0, TDF_VOPS|TDF_MEMSYMS);
2049 else
2050 fprintf (dump_file, " at function entry\n");
2053 /* update_stmt (stmt); */
2055 return bounds;
2058 /* Return var holding zero bounds. */
2059 tree
2060 chkp_get_zero_bounds_var (void)
2062 if (!chkp_zero_bounds_var)
2063 chkp_zero_bounds_var
2064 = chkp_make_static_const_bounds (0, -1,
2065 CHKP_ZERO_BOUNDS_VAR_NAME);
2066 return chkp_zero_bounds_var;
2069 /* Return var holding none bounds. */
2070 tree
2071 chkp_get_none_bounds_var (void)
2073 if (!chkp_none_bounds_var)
2074 chkp_none_bounds_var
2075 = chkp_make_static_const_bounds (-1, 0,
2076 CHKP_NONE_BOUNDS_VAR_NAME);
2077 return chkp_none_bounds_var;
2080 /* Return SSA_NAME used to represent zero bounds. */
2081 static tree
2082 chkp_get_zero_bounds (void)
2084 if (zero_bounds)
2085 return zero_bounds;
2087 if (dump_file && (dump_flags & TDF_DETAILS))
2088 fprintf (dump_file, "Creating zero bounds...");
2090 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2091 || flag_chkp_use_static_const_bounds > 0)
2093 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2094 gimple *stmt;
2096 zero_bounds = chkp_get_tmp_reg (NULL);
2097 stmt = gimple_build_assign (zero_bounds, chkp_get_zero_bounds_var ());
2098 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2100 else
2101 zero_bounds = chkp_make_bounds (integer_zero_node,
2102 integer_zero_node,
2103 NULL,
2104 false);
2106 return zero_bounds;
2109 /* Return SSA_NAME used to represent none bounds. */
2110 static tree
2111 chkp_get_none_bounds (void)
2113 if (none_bounds)
2114 return none_bounds;
2116 if (dump_file && (dump_flags & TDF_DETAILS))
2117 fprintf (dump_file, "Creating none bounds...");
2120 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2121 || flag_chkp_use_static_const_bounds > 0)
2123 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2124 gimple *stmt;
2126 none_bounds = chkp_get_tmp_reg (NULL);
2127 stmt = gimple_build_assign (none_bounds, chkp_get_none_bounds_var ());
2128 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2130 else
2131 none_bounds = chkp_make_bounds (integer_minus_one_node,
2132 build_int_cst (size_type_node, 2),
2133 NULL,
2134 false);
2136 return none_bounds;
2139 /* Return bounds to be used as a result of operation which
2140 should not create poiunter (e.g. MULT_EXPR). */
2141 static tree
2142 chkp_get_invalid_op_bounds (void)
2144 return chkp_get_zero_bounds ();
2147 /* Return bounds to be used for loads of non-pointer values. */
2148 static tree
2149 chkp_get_nonpointer_load_bounds (void)
2151 return chkp_get_zero_bounds ();
2154 /* Return 1 if may use bndret call to get bounds for pointer
2155 returned by CALL. */
2156 static bool
2157 chkp_call_returns_bounds_p (gcall *call)
2159 if (gimple_call_internal_p (call))
2160 return false;
2162 if (gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
2163 || chkp_gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW))
2164 return true;
2166 if (gimple_call_with_bounds_p (call))
2167 return true;
2169 tree fndecl = gimple_call_fndecl (call);
2171 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
2172 return false;
2174 if (fndecl && !chkp_instrumentable_p (fndecl))
2175 return false;
2177 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2179 if (chkp_instrument_normal_builtin (fndecl))
2180 return true;
2182 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
2183 return false;
2185 struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
2186 return (clone && gimple_has_body_p (clone->decl));
2189 return true;
2192 /* Build bounds returned by CALL. */
2193 static tree
2194 chkp_build_returned_bound (gcall *call)
2196 gimple_stmt_iterator gsi;
2197 tree bounds;
2198 gimple *stmt;
2199 tree fndecl = gimple_call_fndecl (call);
2200 unsigned int retflags;
2202 /* To avoid fixing alloca expands in targets we handle
2203 it separately. */
2204 if (fndecl
2205 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2206 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
2207 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2209 tree size = gimple_call_arg (call, 0);
2210 tree lb = gimple_call_lhs (call);
2211 gimple_stmt_iterator iter = gsi_for_stmt (call);
2212 bounds = chkp_make_bounds (lb, size, &iter, true);
2214 /* We know bounds returned by set_bounds builtin call. */
2215 else if (fndecl
2216 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2217 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS)
2219 tree lb = gimple_call_arg (call, 0);
2220 tree size = gimple_call_arg (call, 1);
2221 gimple_stmt_iterator iter = gsi_for_stmt (call);
2222 bounds = chkp_make_bounds (lb, size, &iter, true);
2224 /* Detect bounds initialization calls. */
2225 else if (fndecl
2226 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2227 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS)
2228 bounds = chkp_get_zero_bounds ();
2229 /* Detect bounds nullification calls. */
2230 else if (fndecl
2231 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2232 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS)
2233 bounds = chkp_get_none_bounds ();
2234 /* Detect bounds copy calls. */
2235 else if (fndecl
2236 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2237 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
2239 gimple_stmt_iterator iter = gsi_for_stmt (call);
2240 bounds = chkp_find_bounds (gimple_call_arg (call, 1), &iter);
2242 /* Do not use retbnd when returned bounds are equal to some
2243 of passed bounds. */
2244 else if (((retflags = gimple_call_return_flags (call)) & ERF_RETURNS_ARG)
2245 && (retflags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (call))
2247 gimple_stmt_iterator iter = gsi_for_stmt (call);
2248 unsigned int retarg = retflags & ERF_RETURN_ARG_MASK, argno;
2249 if (gimple_call_with_bounds_p (call))
2251 for (argno = 0; argno < gimple_call_num_args (call); argno++)
2252 if (!POINTER_BOUNDS_P (gimple_call_arg (call, argno)))
2254 if (retarg)
2255 retarg--;
2256 else
2257 break;
2260 else
2261 argno = retarg;
2263 bounds = chkp_find_bounds (gimple_call_arg (call, argno), &iter);
2265 else if (chkp_call_returns_bounds_p (call))
2267 gcc_assert (TREE_CODE (gimple_call_lhs (call)) == SSA_NAME);
2269 /* In general case build checker builtin call to
2270 obtain returned bounds. */
2271 stmt = gimple_build_call (chkp_ret_bnd_fndecl, 1,
2272 gimple_call_lhs (call));
2273 chkp_mark_stmt (stmt);
2275 gsi = gsi_for_stmt (call);
2276 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
2278 bounds = chkp_get_tmp_reg (stmt);
2279 gimple_call_set_lhs (stmt, bounds);
2281 update_stmt (stmt);
2283 else
2284 bounds = chkp_get_zero_bounds ();
2286 if (dump_file && (dump_flags & TDF_DETAILS))
2288 fprintf (dump_file, "Built returned bounds (");
2289 print_generic_expr (dump_file, bounds, 0);
2290 fprintf (dump_file, ") for call: ");
2291 print_gimple_stmt (dump_file, call, 0, TDF_VOPS|TDF_MEMSYMS);
2294 bounds = chkp_maybe_copy_and_register_bounds (gimple_call_lhs (call), bounds);
2296 return bounds;
2299 /* Return bounds used as returned by call
2300 which produced SSA name VAL. */
2301 gcall *
2302 chkp_retbnd_call_by_val (tree val)
2304 if (TREE_CODE (val) != SSA_NAME)
2305 return NULL;
2307 gcc_assert (gimple_code (SSA_NAME_DEF_STMT (val)) == GIMPLE_CALL);
2309 imm_use_iterator use_iter;
2310 use_operand_p use_p;
2311 FOR_EACH_IMM_USE_FAST (use_p, use_iter, val)
2312 if (gimple_code (USE_STMT (use_p)) == GIMPLE_CALL
2313 && gimple_call_fndecl (USE_STMT (use_p)) == chkp_ret_bnd_fndecl)
2314 return as_a <gcall *> (USE_STMT (use_p));
2316 return NULL;
2319 /* Check the next parameter for the given PARM is bounds
2320 and return it's default SSA_NAME (create if required). */
2321 static tree
2322 chkp_get_next_bounds_parm (tree parm)
2324 tree bounds = TREE_CHAIN (parm);
2325 gcc_assert (POINTER_BOUNDS_P (bounds));
2326 bounds = ssa_default_def (cfun, bounds);
2327 if (!bounds)
2329 bounds = make_ssa_name (TREE_CHAIN (parm), gimple_build_nop ());
2330 set_ssa_default_def (cfun, TREE_CHAIN (parm), bounds);
2332 return bounds;
2335 /* Return bounds to be used for input argument PARM. */
2336 static tree
2337 chkp_get_bound_for_parm (tree parm)
2339 tree decl = SSA_NAME_VAR (parm);
2340 tree bounds;
2342 gcc_assert (TREE_CODE (decl) == PARM_DECL);
2344 bounds = chkp_get_registered_bounds (parm);
2346 if (!bounds)
2347 bounds = chkp_get_registered_bounds (decl);
2349 if (!bounds)
2351 tree orig_decl = cgraph_node::get (cfun->decl)->orig_decl;
2353 /* For static chain param we return zero bounds
2354 because currently we do not check dereferences
2355 of this pointer. */
2356 if (cfun->static_chain_decl == decl)
2357 bounds = chkp_get_zero_bounds ();
2358 /* If non instrumented runtime is used then it may be useful
2359 to use zero bounds for input arguments of main
2360 function. */
2361 else if (flag_chkp_zero_input_bounds_for_main
2362 && strcmp (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (orig_decl)),
2363 "main") == 0)
2364 bounds = chkp_get_zero_bounds ();
2365 else if (BOUNDED_P (parm))
2367 bounds = chkp_get_next_bounds_parm (decl);
2368 bounds = chkp_maybe_copy_and_register_bounds (decl, bounds);
2370 if (dump_file && (dump_flags & TDF_DETAILS))
2372 fprintf (dump_file, "Built arg bounds (");
2373 print_generic_expr (dump_file, bounds, 0);
2374 fprintf (dump_file, ") for arg: ");
2375 print_node (dump_file, "", decl, 0);
2378 else
2379 bounds = chkp_get_zero_bounds ();
2382 if (!chkp_get_registered_bounds (parm))
2383 bounds = chkp_maybe_copy_and_register_bounds (parm, bounds);
2385 if (dump_file && (dump_flags & TDF_DETAILS))
2387 fprintf (dump_file, "Using bounds ");
2388 print_generic_expr (dump_file, bounds, 0);
2389 fprintf (dump_file, " for parm ");
2390 print_generic_expr (dump_file, parm, 0);
2391 fprintf (dump_file, " of type ");
2392 print_generic_expr (dump_file, TREE_TYPE (parm), 0);
2393 fprintf (dump_file, ".\n");
2396 return bounds;
2399 /* Build and return CALL_EXPR for bndstx builtin with specified
2400 arguments. */
2401 tree
2402 chkp_build_bndldx_call (tree addr, tree ptr)
2404 tree fn = build1 (ADDR_EXPR,
2405 build_pointer_type (TREE_TYPE (chkp_bndldx_fndecl)),
2406 chkp_bndldx_fndecl);
2407 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndldx_fndecl)),
2408 fn, 2, addr, ptr);
2409 CALL_WITH_BOUNDS_P (call) = true;
2410 return call;
2413 /* Insert code to load bounds for PTR located by ADDR.
2414 Code is inserted after position pointed by GSI.
2415 Loaded bounds are returned. */
2416 static tree
2417 chkp_build_bndldx (tree addr, tree ptr, gimple_stmt_iterator *gsi)
2419 gimple_seq seq;
2420 gimple *stmt;
2421 tree bounds;
2423 seq = NULL;
2425 addr = chkp_force_gimple_call_op (addr, &seq);
2426 ptr = chkp_force_gimple_call_op (ptr, &seq);
2428 stmt = gimple_build_call (chkp_bndldx_fndecl, 2, addr, ptr);
2429 chkp_mark_stmt (stmt);
2430 bounds = chkp_get_tmp_reg (stmt);
2431 gimple_call_set_lhs (stmt, bounds);
2433 gimple_seq_add_stmt (&seq, stmt);
2435 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2437 if (dump_file && (dump_flags & TDF_DETAILS))
2439 fprintf (dump_file, "Generated bndldx for pointer ");
2440 print_generic_expr (dump_file, ptr, 0);
2441 fprintf (dump_file, ": ");
2442 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2445 return bounds;
2448 /* Build and return CALL_EXPR for bndstx builtin with specified
2449 arguments. */
2450 tree
2451 chkp_build_bndstx_call (tree addr, tree ptr, tree bounds)
2453 tree fn = build1 (ADDR_EXPR,
2454 build_pointer_type (TREE_TYPE (chkp_bndstx_fndecl)),
2455 chkp_bndstx_fndecl);
2456 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndstx_fndecl)),
2457 fn, 3, ptr, bounds, addr);
2458 CALL_WITH_BOUNDS_P (call) = true;
2459 return call;
2462 /* Insert code to store BOUNDS for PTR stored by ADDR.
2463 New statements are inserted after position pointed
2464 by GSI. */
2465 void
2466 chkp_build_bndstx (tree addr, tree ptr, tree bounds,
2467 gimple_stmt_iterator *gsi)
2469 gimple_seq seq;
2470 gimple *stmt;
2472 seq = NULL;
2474 addr = chkp_force_gimple_call_op (addr, &seq);
2475 ptr = chkp_force_gimple_call_op (ptr, &seq);
2477 stmt = gimple_build_call (chkp_bndstx_fndecl, 3, ptr, bounds, addr);
2478 chkp_mark_stmt (stmt);
2479 gimple_call_set_with_bounds (stmt, true);
2481 gimple_seq_add_stmt (&seq, stmt);
2483 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2485 if (dump_file && (dump_flags & TDF_DETAILS))
2487 fprintf (dump_file, "Generated bndstx for pointer store ");
2488 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_VOPS|TDF_MEMSYMS);
2489 print_gimple_stmt (dump_file, stmt, 2, TDF_VOPS|TDF_MEMSYMS);
2493 /* Compute bounds for pointer NODE which was assigned in
2494 assignment statement ASSIGN. Return computed bounds. */
2495 static tree
2496 chkp_compute_bounds_for_assignment (tree node, gimple *assign)
2498 enum tree_code rhs_code = gimple_assign_rhs_code (assign);
2499 tree rhs1 = gimple_assign_rhs1 (assign);
2500 tree bounds = NULL_TREE;
2501 gimple_stmt_iterator iter = gsi_for_stmt (assign);
2502 tree base = NULL;
2504 if (dump_file && (dump_flags & TDF_DETAILS))
2506 fprintf (dump_file, "Computing bounds for assignment: ");
2507 print_gimple_stmt (dump_file, assign, 0, TDF_VOPS|TDF_MEMSYMS);
2510 switch (rhs_code)
2512 case MEM_REF:
2513 case TARGET_MEM_REF:
2514 case COMPONENT_REF:
2515 case ARRAY_REF:
2516 /* We need to load bounds from the bounds table. */
2517 bounds = chkp_find_bounds_loaded (node, rhs1, &iter);
2518 break;
2520 case VAR_DECL:
2521 case SSA_NAME:
2522 case ADDR_EXPR:
2523 case POINTER_PLUS_EXPR:
2524 case NOP_EXPR:
2525 case CONVERT_EXPR:
2526 case INTEGER_CST:
2527 /* Bounds are just propagated from RHS. */
2528 bounds = chkp_find_bounds (rhs1, &iter);
2529 base = rhs1;
2530 break;
2532 case VIEW_CONVERT_EXPR:
2533 /* Bounds are just propagated from RHS. */
2534 bounds = chkp_find_bounds (TREE_OPERAND (rhs1, 0), &iter);
2535 break;
2537 case PARM_DECL:
2538 if (BOUNDED_P (rhs1))
2540 /* We need to load bounds from the bounds table. */
2541 bounds = chkp_build_bndldx (chkp_build_addr_expr (rhs1),
2542 node, &iter);
2543 TREE_ADDRESSABLE (rhs1) = 1;
2545 else
2546 bounds = chkp_get_nonpointer_load_bounds ();
2547 break;
2549 case MINUS_EXPR:
2550 case PLUS_EXPR:
2551 case BIT_AND_EXPR:
2552 case BIT_IOR_EXPR:
2553 case BIT_XOR_EXPR:
2555 tree rhs2 = gimple_assign_rhs2 (assign);
2556 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2557 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2559 /* First we try to check types of operands. If it
2560 does not help then look at bound values.
2562 If some bounds are incomplete and other are
2563 not proven to be valid (i.e. also incomplete
2564 or invalid because value is not pointer) then
2565 resulting value is incomplete and will be
2566 recomputed later in chkp_finish_incomplete_bounds. */
2567 if (BOUNDED_P (rhs1)
2568 && !BOUNDED_P (rhs2))
2569 bounds = bnd1;
2570 else if (BOUNDED_P (rhs2)
2571 && !BOUNDED_P (rhs1)
2572 && rhs_code != MINUS_EXPR)
2573 bounds = bnd2;
2574 else if (chkp_incomplete_bounds (bnd1))
2575 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR
2576 && !chkp_incomplete_bounds (bnd2))
2577 bounds = bnd2;
2578 else
2579 bounds = incomplete_bounds;
2580 else if (chkp_incomplete_bounds (bnd2))
2581 if (chkp_valid_bounds (bnd1)
2582 && !chkp_incomplete_bounds (bnd1))
2583 bounds = bnd1;
2584 else
2585 bounds = incomplete_bounds;
2586 else if (!chkp_valid_bounds (bnd1))
2587 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR)
2588 bounds = bnd2;
2589 else if (bnd2 == chkp_get_zero_bounds ())
2590 bounds = bnd2;
2591 else
2592 bounds = bnd1;
2593 else if (!chkp_valid_bounds (bnd2))
2594 bounds = bnd1;
2595 else
2596 /* Seems both operands may have valid bounds
2597 (e.g. pointer minus pointer). In such case
2598 use default invalid op bounds. */
2599 bounds = chkp_get_invalid_op_bounds ();
2601 base = (bounds == bnd1) ? rhs1 : (bounds == bnd2) ? rhs2 : NULL;
2603 break;
2605 case BIT_NOT_EXPR:
2606 case NEGATE_EXPR:
2607 case LSHIFT_EXPR:
2608 case RSHIFT_EXPR:
2609 case LROTATE_EXPR:
2610 case RROTATE_EXPR:
2611 case EQ_EXPR:
2612 case NE_EXPR:
2613 case LT_EXPR:
2614 case LE_EXPR:
2615 case GT_EXPR:
2616 case GE_EXPR:
2617 case MULT_EXPR:
2618 case RDIV_EXPR:
2619 case TRUNC_DIV_EXPR:
2620 case FLOOR_DIV_EXPR:
2621 case CEIL_DIV_EXPR:
2622 case ROUND_DIV_EXPR:
2623 case TRUNC_MOD_EXPR:
2624 case FLOOR_MOD_EXPR:
2625 case CEIL_MOD_EXPR:
2626 case ROUND_MOD_EXPR:
2627 case EXACT_DIV_EXPR:
2628 case FIX_TRUNC_EXPR:
2629 case FLOAT_EXPR:
2630 case REALPART_EXPR:
2631 case IMAGPART_EXPR:
2632 /* No valid bounds may be produced by these exprs. */
2633 bounds = chkp_get_invalid_op_bounds ();
2634 break;
2636 case COND_EXPR:
2638 tree val1 = gimple_assign_rhs2 (assign);
2639 tree val2 = gimple_assign_rhs3 (assign);
2640 tree bnd1 = chkp_find_bounds (val1, &iter);
2641 tree bnd2 = chkp_find_bounds (val2, &iter);
2642 gimple *stmt;
2644 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2645 bounds = incomplete_bounds;
2646 else if (bnd1 == bnd2)
2647 bounds = bnd1;
2648 else
2650 rhs1 = unshare_expr (rhs1);
2652 bounds = chkp_get_tmp_reg (assign);
2653 stmt = gimple_build_assign (bounds, COND_EXPR, rhs1, bnd1, bnd2);
2654 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2656 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2657 chkp_mark_invalid_bounds (bounds);
2660 break;
2662 case MAX_EXPR:
2663 case MIN_EXPR:
2665 tree rhs2 = gimple_assign_rhs2 (assign);
2666 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2667 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2669 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2670 bounds = incomplete_bounds;
2671 else if (bnd1 == bnd2)
2672 bounds = bnd1;
2673 else
2675 gimple *stmt;
2676 tree cond = build2 (rhs_code == MAX_EXPR ? GT_EXPR : LT_EXPR,
2677 boolean_type_node, rhs1, rhs2);
2678 bounds = chkp_get_tmp_reg (assign);
2679 stmt = gimple_build_assign (bounds, COND_EXPR, cond, bnd1, bnd2);
2681 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2683 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2684 chkp_mark_invalid_bounds (bounds);
2687 break;
2689 default:
2690 bounds = chkp_get_zero_bounds ();
2691 warning (0, "pointer bounds were lost due to unexpected expression %s",
2692 get_tree_code_name (rhs_code));
2695 gcc_assert (bounds);
2697 /* We may reuse bounds of other pointer we copy/modify. But it is not
2698 allowed for abnormal ssa names. If we produced a pointer using
2699 abnormal ssa name, we better make a bounds copy to avoid coalescing
2700 issues. */
2701 if (base
2702 && TREE_CODE (base) == SSA_NAME
2703 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (base))
2705 gimple *stmt = gimple_build_assign (chkp_get_tmp_reg (NULL), bounds);
2706 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2707 bounds = gimple_assign_lhs (stmt);
2710 if (node)
2711 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2713 return bounds;
2716 /* Compute bounds for ssa name NODE defined by DEF_STMT pointed by ITER.
2718 There are just few statement codes allowed: NOP (for default ssa names),
2719 ASSIGN, CALL, PHI, ASM.
2721 Return computed bounds. */
2722 static tree
2723 chkp_get_bounds_by_definition (tree node, gimple *def_stmt,
2724 gphi_iterator *iter)
2726 tree var, bounds;
2727 enum gimple_code code = gimple_code (def_stmt);
2728 gphi *stmt;
2730 if (dump_file && (dump_flags & TDF_DETAILS))
2732 fprintf (dump_file, "Searching for bounds for node: ");
2733 print_generic_expr (dump_file, node, 0);
2735 fprintf (dump_file, " using its definition: ");
2736 print_gimple_stmt (dump_file, def_stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2739 switch (code)
2741 case GIMPLE_NOP:
2742 var = SSA_NAME_VAR (node);
2743 switch (TREE_CODE (var))
2745 case PARM_DECL:
2746 bounds = chkp_get_bound_for_parm (node);
2747 break;
2749 case VAR_DECL:
2750 /* For uninitialized pointers use none bounds. */
2751 bounds = chkp_get_none_bounds ();
2752 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2753 break;
2755 case RESULT_DECL:
2757 tree base_type;
2759 gcc_assert (TREE_CODE (TREE_TYPE (node)) == REFERENCE_TYPE);
2761 base_type = TREE_TYPE (TREE_TYPE (node));
2763 gcc_assert (TYPE_SIZE (base_type)
2764 && TREE_CODE (TYPE_SIZE (base_type)) == INTEGER_CST
2765 && tree_to_uhwi (TYPE_SIZE (base_type)) != 0);
2767 bounds = chkp_make_bounds (node, TYPE_SIZE_UNIT (base_type),
2768 NULL, false);
2769 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2771 break;
2773 default:
2774 if (dump_file && (dump_flags & TDF_DETAILS))
2776 fprintf (dump_file, "Unexpected var with no definition\n");
2777 print_generic_expr (dump_file, var, 0);
2779 internal_error ("chkp_get_bounds_by_definition: Unexpected var of type %s",
2780 get_tree_code_name (TREE_CODE (var)));
2782 break;
2784 case GIMPLE_ASSIGN:
2785 bounds = chkp_compute_bounds_for_assignment (node, def_stmt);
2786 break;
2788 case GIMPLE_CALL:
2789 bounds = chkp_build_returned_bound (as_a <gcall *> (def_stmt));
2790 break;
2792 case GIMPLE_PHI:
2793 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (node))
2794 if (SSA_NAME_VAR (node))
2795 var = chkp_get_bounds_var (SSA_NAME_VAR (node));
2796 else
2797 var = make_temp_ssa_name (pointer_bounds_type_node,
2798 NULL,
2799 CHKP_BOUND_TMP_NAME);
2800 else
2801 var = chkp_get_tmp_var ();
2802 stmt = create_phi_node (var, gimple_bb (def_stmt));
2803 bounds = gimple_phi_result (stmt);
2804 *iter = gsi_for_phi (stmt);
2806 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2808 /* Created bounds do not have all phi args computed and
2809 therefore we do not know if there is a valid source
2810 of bounds for that node. Therefore we mark bounds
2811 as incomplete and then recompute them when all phi
2812 args are computed. */
2813 chkp_register_incomplete_bounds (bounds, node);
2814 break;
2816 case GIMPLE_ASM:
2817 bounds = chkp_get_zero_bounds ();
2818 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2819 break;
2821 default:
2822 internal_error ("chkp_get_bounds_by_definition: Unexpected GIMPLE code %s",
2823 gimple_code_name[code]);
2826 return bounds;
2829 /* Return CALL_EXPR for bndmk with specified LOWER_BOUND and SIZE. */
2830 tree
2831 chkp_build_make_bounds_call (tree lower_bound, tree size)
2833 tree call = build1 (ADDR_EXPR,
2834 build_pointer_type (TREE_TYPE (chkp_bndmk_fndecl)),
2835 chkp_bndmk_fndecl);
2836 return build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndmk_fndecl)),
2837 call, 2, lower_bound, size);
2840 /* Create static bounds var of specfified OBJ which is
2841 is either VAR_DECL or string constant. */
2842 static tree
2843 chkp_make_static_bounds (tree obj)
2845 static int string_id = 1;
2846 static int var_id = 1;
2847 tree *slot;
2848 const char *var_name;
2849 char *bnd_var_name;
2850 tree bnd_var;
2852 /* First check if we already have required var. */
2853 if (chkp_static_var_bounds)
2855 /* For vars we use assembler name as a key in
2856 chkp_static_var_bounds map. It allows to
2857 avoid duplicating bound vars for decls
2858 sharing assembler name. */
2859 if (TREE_CODE (obj) == VAR_DECL)
2861 tree name = DECL_ASSEMBLER_NAME (obj);
2862 slot = chkp_static_var_bounds->get (name);
2863 if (slot)
2864 return *slot;
2866 else
2868 slot = chkp_static_var_bounds->get (obj);
2869 if (slot)
2870 return *slot;
2874 /* Build decl for bounds var. */
2875 if (TREE_CODE (obj) == VAR_DECL)
2877 if (DECL_IGNORED_P (obj))
2879 bnd_var_name = (char *) xmalloc (strlen (CHKP_VAR_BOUNDS_PREFIX) + 10);
2880 sprintf (bnd_var_name, "%s%d", CHKP_VAR_BOUNDS_PREFIX, var_id++);
2882 else
2884 var_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (obj));
2886 /* For hidden symbols we want to skip first '*' char. */
2887 if (*var_name == '*')
2888 var_name++;
2890 bnd_var_name = (char *) xmalloc (strlen (var_name)
2891 + strlen (CHKP_BOUNDS_OF_SYMBOL_PREFIX) + 1);
2892 strcpy (bnd_var_name, CHKP_BOUNDS_OF_SYMBOL_PREFIX);
2893 strcat (bnd_var_name, var_name);
2896 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2897 get_identifier (bnd_var_name),
2898 pointer_bounds_type_node);
2900 /* Address of the obj will be used as lower bound. */
2901 TREE_ADDRESSABLE (obj) = 1;
2903 else
2905 bnd_var_name = (char *) xmalloc (strlen (CHKP_STRING_BOUNDS_PREFIX) + 10);
2906 sprintf (bnd_var_name, "%s%d", CHKP_STRING_BOUNDS_PREFIX, string_id++);
2908 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2909 get_identifier (bnd_var_name),
2910 pointer_bounds_type_node);
2913 TREE_PUBLIC (bnd_var) = 0;
2914 TREE_USED (bnd_var) = 1;
2915 TREE_READONLY (bnd_var) = 0;
2916 TREE_STATIC (bnd_var) = 1;
2917 TREE_ADDRESSABLE (bnd_var) = 0;
2918 DECL_ARTIFICIAL (bnd_var) = 1;
2919 DECL_COMMON (bnd_var) = 1;
2920 DECL_COMDAT (bnd_var) = 1;
2921 DECL_READ_P (bnd_var) = 1;
2922 DECL_INITIAL (bnd_var) = chkp_build_addr_expr (obj);
2923 /* Force output similar to constant bounds.
2924 See chkp_make_static_const_bounds. */
2925 varpool_node::get_create (bnd_var)->force_output = 1;
2926 /* Mark symbol as requiring bounds initialization. */
2927 varpool_node::get_create (bnd_var)->need_bounds_init = 1;
2928 varpool_node::finalize_decl (bnd_var);
2930 /* Add created var to the map to use it for other references
2931 to obj. */
2932 if (!chkp_static_var_bounds)
2933 chkp_static_var_bounds = new hash_map<tree, tree>;
2935 if (TREE_CODE (obj) == VAR_DECL)
2937 tree name = DECL_ASSEMBLER_NAME (obj);
2938 chkp_static_var_bounds->put (name, bnd_var);
2940 else
2941 chkp_static_var_bounds->put (obj, bnd_var);
2943 return bnd_var;
2946 /* When var has incomplete type we cannot get size to
2947 compute its bounds. In such cases we use checker
2948 builtin call which determines object size at runtime. */
2949 static tree
2950 chkp_generate_extern_var_bounds (tree var)
2952 tree bounds, size_reloc, lb, size, max_size, cond;
2953 gimple_stmt_iterator gsi;
2954 gimple_seq seq = NULL;
2955 gimple *stmt;
2957 /* If instrumentation is not enabled for vars having
2958 incomplete type then just return zero bounds to avoid
2959 checks for this var. */
2960 if (!flag_chkp_incomplete_type)
2961 return chkp_get_zero_bounds ();
2963 if (dump_file && (dump_flags & TDF_DETAILS))
2965 fprintf (dump_file, "Generating bounds for extern symbol '");
2966 print_generic_expr (dump_file, var, 0);
2967 fprintf (dump_file, "'\n");
2970 stmt = gimple_build_call (chkp_sizeof_fndecl, 1, var);
2972 size_reloc = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
2973 gimple_call_set_lhs (stmt, size_reloc);
2975 gimple_seq_add_stmt (&seq, stmt);
2977 lb = chkp_build_addr_expr (var);
2978 size = make_ssa_name (chkp_get_size_tmp_var ());
2980 if (flag_chkp_zero_dynamic_size_as_infinite)
2982 /* We should check that size relocation was resolved.
2983 If it was not then use maximum possible size for the var. */
2984 max_size = build2 (MINUS_EXPR, chkp_uintptr_type, integer_zero_node,
2985 fold_convert (chkp_uintptr_type, lb));
2986 max_size = chkp_force_gimple_call_op (max_size, &seq);
2988 cond = build2 (NE_EXPR, boolean_type_node,
2989 size_reloc, integer_zero_node);
2990 stmt = gimple_build_assign (size, COND_EXPR, cond, size_reloc, max_size);
2991 gimple_seq_add_stmt (&seq, stmt);
2993 else
2995 stmt = gimple_build_assign (size, size_reloc);
2996 gimple_seq_add_stmt (&seq, stmt);
2999 gsi = gsi_start_bb (chkp_get_entry_block ());
3000 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
3002 bounds = chkp_make_bounds (lb, size, &gsi, true);
3004 return bounds;
3007 /* Return 1 if TYPE has fields with zero size or fields
3008 marked with chkp_variable_size attribute. */
3009 bool
3010 chkp_variable_size_type (tree type)
3012 bool res = false;
3013 tree field;
3015 if (RECORD_OR_UNION_TYPE_P (type))
3016 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3018 if (TREE_CODE (field) == FIELD_DECL)
3019 res = res
3020 || lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
3021 || chkp_variable_size_type (TREE_TYPE (field));
3023 else
3024 res = !TYPE_SIZE (type)
3025 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
3026 || tree_to_uhwi (TYPE_SIZE (type)) == 0;
3028 return res;
3031 /* Compute and return bounds for address of DECL which is
3032 one of VAR_DECL, PARM_DECL, RESULT_DECL. */
3033 static tree
3034 chkp_get_bounds_for_decl_addr (tree decl)
3036 tree bounds;
3038 gcc_assert (TREE_CODE (decl) == VAR_DECL
3039 || TREE_CODE (decl) == PARM_DECL
3040 || TREE_CODE (decl) == RESULT_DECL);
3042 bounds = chkp_get_registered_addr_bounds (decl);
3044 if (bounds)
3045 return bounds;
3047 if (dump_file && (dump_flags & TDF_DETAILS))
3049 fprintf (dump_file, "Building bounds for address of decl ");
3050 print_generic_expr (dump_file, decl, 0);
3051 fprintf (dump_file, "\n");
3054 /* Use zero bounds if size is unknown and checks for
3055 unknown sizes are restricted. */
3056 if ((!DECL_SIZE (decl)
3057 || (chkp_variable_size_type (TREE_TYPE (decl))
3058 && (TREE_STATIC (decl)
3059 || DECL_EXTERNAL (decl)
3060 || TREE_PUBLIC (decl))))
3061 && !flag_chkp_incomplete_type)
3062 return chkp_get_zero_bounds ();
3064 if (flag_chkp_use_static_bounds
3065 && TREE_CODE (decl) == VAR_DECL
3066 && (TREE_STATIC (decl)
3067 || DECL_EXTERNAL (decl)
3068 || TREE_PUBLIC (decl))
3069 && !DECL_THREAD_LOCAL_P (decl))
3071 tree bnd_var = chkp_make_static_bounds (decl);
3072 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
3073 gimple *stmt;
3075 bounds = chkp_get_tmp_reg (NULL);
3076 stmt = gimple_build_assign (bounds, bnd_var);
3077 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3079 else if (!DECL_SIZE (decl)
3080 || (chkp_variable_size_type (TREE_TYPE (decl))
3081 && (TREE_STATIC (decl)
3082 || DECL_EXTERNAL (decl)
3083 || TREE_PUBLIC (decl))))
3085 gcc_assert (TREE_CODE (decl) == VAR_DECL);
3086 bounds = chkp_generate_extern_var_bounds (decl);
3088 else
3090 tree lb = chkp_build_addr_expr (decl);
3091 bounds = chkp_make_bounds (lb, DECL_SIZE_UNIT (decl), NULL, false);
3094 return bounds;
3097 /* Compute and return bounds for constant string. */
3098 static tree
3099 chkp_get_bounds_for_string_cst (tree cst)
3101 tree bounds;
3102 tree lb;
3103 tree size;
3105 gcc_assert (TREE_CODE (cst) == STRING_CST);
3107 bounds = chkp_get_registered_bounds (cst);
3109 if (bounds)
3110 return bounds;
3112 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
3113 || flag_chkp_use_static_const_bounds > 0)
3115 tree bnd_var = chkp_make_static_bounds (cst);
3116 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
3117 gimple *stmt;
3119 bounds = chkp_get_tmp_reg (NULL);
3120 stmt = gimple_build_assign (bounds, bnd_var);
3121 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3123 else
3125 lb = chkp_build_addr_expr (cst);
3126 size = build_int_cst (chkp_uintptr_type, TREE_STRING_LENGTH (cst));
3127 bounds = chkp_make_bounds (lb, size, NULL, false);
3130 bounds = chkp_maybe_copy_and_register_bounds (cst, bounds);
3132 return bounds;
3135 /* Generate code to instersect bounds BOUNDS1 and BOUNDS2 and
3136 return the result. if ITER is not NULL then Code is inserted
3137 before position pointed by ITER. Otherwise code is added to
3138 entry block. */
3139 static tree
3140 chkp_intersect_bounds (tree bounds1, tree bounds2, gimple_stmt_iterator *iter)
3142 if (!bounds1 || bounds1 == chkp_get_zero_bounds ())
3143 return bounds2 ? bounds2 : bounds1;
3144 else if (!bounds2 || bounds2 == chkp_get_zero_bounds ())
3145 return bounds1;
3146 else
3148 gimple_seq seq;
3149 gimple *stmt;
3150 tree bounds;
3152 seq = NULL;
3154 stmt = gimple_build_call (chkp_intersect_fndecl, 2, bounds1, bounds2);
3155 chkp_mark_stmt (stmt);
3157 bounds = chkp_get_tmp_reg (stmt);
3158 gimple_call_set_lhs (stmt, bounds);
3160 gimple_seq_add_stmt (&seq, stmt);
3162 /* We are probably doing narrowing for constant expression.
3163 In such case iter may be undefined. */
3164 if (!iter)
3166 gimple_stmt_iterator gsi = gsi_last_bb (chkp_get_entry_block ());
3167 iter = &gsi;
3168 gsi_insert_seq_after (iter, seq, GSI_SAME_STMT);
3170 else
3171 gsi_insert_seq_before (iter, seq, GSI_SAME_STMT);
3173 if (dump_file && (dump_flags & TDF_DETAILS))
3175 fprintf (dump_file, "Bounds intersection: ");
3176 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
3177 fprintf (dump_file, " inserted before statement: ");
3178 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0,
3179 TDF_VOPS|TDF_MEMSYMS);
3182 return bounds;
3186 /* Return 1 if we are allowed to narrow bounds for addressed FIELD
3187 and 0 othersize. */
3188 static bool
3189 chkp_may_narrow_to_field (tree field)
3191 return DECL_SIZE (field) && TREE_CODE (DECL_SIZE (field)) == INTEGER_CST
3192 && tree_to_uhwi (DECL_SIZE (field)) != 0
3193 && (!DECL_FIELD_OFFSET (field)
3194 || TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST)
3195 && (!DECL_FIELD_BIT_OFFSET (field)
3196 || TREE_CODE (DECL_FIELD_BIT_OFFSET (field)) == INTEGER_CST)
3197 && !lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
3198 && !chkp_variable_size_type (TREE_TYPE (field));
3201 /* Return 1 if bounds for FIELD should be narrowed to
3202 field's own size. */
3203 static bool
3204 chkp_narrow_bounds_for_field (tree field)
3206 HOST_WIDE_INT offs;
3207 HOST_WIDE_INT bit_offs;
3209 if (!chkp_may_narrow_to_field (field))
3210 return false;
3212 /* Accesse to compiler generated fields should not cause
3213 bounds narrowing. */
3214 if (DECL_ARTIFICIAL (field))
3215 return false;
3217 offs = tree_to_uhwi (DECL_FIELD_OFFSET (field));
3218 bit_offs = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
3220 return (flag_chkp_narrow_bounds
3221 && (flag_chkp_first_field_has_own_bounds
3222 || offs
3223 || bit_offs));
3226 /* Perform narrowing for BOUNDS using bounds computed for field
3227 access COMPONENT. ITER meaning is the same as for
3228 chkp_intersect_bounds. */
3229 static tree
3230 chkp_narrow_bounds_to_field (tree bounds, tree component,
3231 gimple_stmt_iterator *iter)
3233 tree field = TREE_OPERAND (component, 1);
3234 tree size = DECL_SIZE_UNIT (field);
3235 tree field_ptr = chkp_build_addr_expr (component);
3236 tree field_bounds;
3238 field_bounds = chkp_make_bounds (field_ptr, size, iter, false);
3240 return chkp_intersect_bounds (field_bounds, bounds, iter);
3243 /* Parse field or array access NODE.
3245 PTR ouput parameter holds a pointer to the outermost
3246 object.
3248 BITFIELD output parameter is set to 1 if bitfield is
3249 accessed and to 0 otherwise. If it is 1 then ELT holds
3250 outer component for accessed bit field.
3252 SAFE outer parameter is set to 1 if access is safe and
3253 checks are not required.
3255 BOUNDS outer parameter holds bounds to be used to check
3256 access (may be NULL).
3258 If INNERMOST_BOUNDS is 1 then try to narrow bounds to the
3259 innermost accessed component. */
3260 static void
3261 chkp_parse_array_and_component_ref (tree node, tree *ptr,
3262 tree *elt, bool *safe,
3263 bool *bitfield,
3264 tree *bounds,
3265 gimple_stmt_iterator *iter,
3266 bool innermost_bounds)
3268 tree comp_to_narrow = NULL_TREE;
3269 tree last_comp = NULL_TREE;
3270 bool array_ref_found = false;
3271 tree *nodes;
3272 tree var;
3273 int len;
3274 int i;
3276 /* Compute tree height for expression. */
3277 var = node;
3278 len = 1;
3279 while (TREE_CODE (var) == COMPONENT_REF
3280 || TREE_CODE (var) == ARRAY_REF
3281 || TREE_CODE (var) == VIEW_CONVERT_EXPR)
3283 var = TREE_OPERAND (var, 0);
3284 len++;
3287 gcc_assert (len > 1);
3289 /* It is more convenient for us to scan left-to-right,
3290 so walk tree again and put all node to nodes vector
3291 in reversed order. */
3292 nodes = XALLOCAVEC (tree, len);
3293 nodes[len - 1] = node;
3294 for (i = len - 2; i >= 0; i--)
3295 nodes[i] = TREE_OPERAND (nodes[i + 1], 0);
3297 if (bounds)
3298 *bounds = NULL;
3299 *safe = true;
3300 *bitfield = (TREE_CODE (node) == COMPONENT_REF
3301 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (node, 1)));
3302 /* To get bitfield address we will need outer elemnt. */
3303 if (*bitfield)
3304 *elt = nodes[len - 2];
3305 else
3306 *elt = NULL_TREE;
3308 /* If we have indirection in expression then compute
3309 outermost structure bounds. Computed bounds may be
3310 narrowed later. */
3311 if (TREE_CODE (nodes[0]) == MEM_REF || INDIRECT_REF_P (nodes[0]))
3313 *safe = false;
3314 *ptr = TREE_OPERAND (nodes[0], 0);
3315 if (bounds)
3316 *bounds = chkp_find_bounds (*ptr, iter);
3318 else
3320 gcc_assert (TREE_CODE (var) == VAR_DECL
3321 || TREE_CODE (var) == PARM_DECL
3322 || TREE_CODE (var) == RESULT_DECL
3323 || TREE_CODE (var) == STRING_CST
3324 || TREE_CODE (var) == SSA_NAME);
3326 *ptr = chkp_build_addr_expr (var);
3329 /* In this loop we are trying to find a field access
3330 requiring narrowing. There are two simple rules
3331 for search:
3332 1. Leftmost array_ref is chosen if any.
3333 2. Rightmost suitable component_ref is chosen if innermost
3334 bounds are required and no array_ref exists. */
3335 for (i = 1; i < len; i++)
3337 var = nodes[i];
3339 if (TREE_CODE (var) == ARRAY_REF)
3341 *safe = false;
3342 array_ref_found = true;
3343 if (flag_chkp_narrow_bounds
3344 && !flag_chkp_narrow_to_innermost_arrray
3345 && (!last_comp
3346 || chkp_may_narrow_to_field (TREE_OPERAND (last_comp, 1))))
3348 comp_to_narrow = last_comp;
3349 break;
3352 else if (TREE_CODE (var) == COMPONENT_REF)
3354 tree field = TREE_OPERAND (var, 1);
3356 if (innermost_bounds
3357 && !array_ref_found
3358 && chkp_narrow_bounds_for_field (field))
3359 comp_to_narrow = var;
3360 last_comp = var;
3362 if (flag_chkp_narrow_bounds
3363 && flag_chkp_narrow_to_innermost_arrray
3364 && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
3366 if (bounds)
3367 *bounds = chkp_narrow_bounds_to_field (*bounds, var, iter);
3368 comp_to_narrow = NULL;
3371 else if (TREE_CODE (var) == VIEW_CONVERT_EXPR)
3372 /* Nothing to do for it. */
3374 else
3375 gcc_unreachable ();
3378 if (comp_to_narrow && DECL_SIZE (TREE_OPERAND (comp_to_narrow, 1)) && bounds)
3379 *bounds = chkp_narrow_bounds_to_field (*bounds, comp_to_narrow, iter);
3381 if (innermost_bounds && bounds && !*bounds)
3382 *bounds = chkp_find_bounds (*ptr, iter);
3385 /* Compute and return bounds for address of OBJ. */
3386 static tree
3387 chkp_make_addressed_object_bounds (tree obj, gimple_stmt_iterator *iter)
3389 tree bounds = chkp_get_registered_addr_bounds (obj);
3391 if (bounds)
3392 return bounds;
3394 switch (TREE_CODE (obj))
3396 case VAR_DECL:
3397 case PARM_DECL:
3398 case RESULT_DECL:
3399 bounds = chkp_get_bounds_for_decl_addr (obj);
3400 break;
3402 case STRING_CST:
3403 bounds = chkp_get_bounds_for_string_cst (obj);
3404 break;
3406 case ARRAY_REF:
3407 case COMPONENT_REF:
3409 tree elt;
3410 tree ptr;
3411 bool safe;
3412 bool bitfield;
3414 chkp_parse_array_and_component_ref (obj, &ptr, &elt, &safe,
3415 &bitfield, &bounds, iter, true);
3417 gcc_assert (bounds);
3419 break;
3421 case FUNCTION_DECL:
3422 case LABEL_DECL:
3423 bounds = chkp_get_zero_bounds ();
3424 break;
3426 case MEM_REF:
3427 bounds = chkp_find_bounds (TREE_OPERAND (obj, 0), iter);
3428 break;
3430 case REALPART_EXPR:
3431 case IMAGPART_EXPR:
3432 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (obj, 0), iter);
3433 break;
3435 default:
3436 if (dump_file && (dump_flags & TDF_DETAILS))
3438 fprintf (dump_file, "chkp_make_addressed_object_bounds: "
3439 "unexpected object of type %s\n",
3440 get_tree_code_name (TREE_CODE (obj)));
3441 print_node (dump_file, "", obj, 0);
3443 internal_error ("chkp_make_addressed_object_bounds: "
3444 "Unexpected tree code %s",
3445 get_tree_code_name (TREE_CODE (obj)));
3448 chkp_register_addr_bounds (obj, bounds);
3450 return bounds;
3453 /* Compute bounds for pointer PTR loaded from PTR_SRC. Generate statements
3454 to compute bounds if required. Computed bounds should be available at
3455 position pointed by ITER.
3457 If PTR_SRC is NULL_TREE then pointer definition is identified.
3459 If PTR_SRC is not NULL_TREE then ITER points to statements which loads
3460 PTR. If PTR is a any memory reference then ITER points to a statement
3461 after which bndldx will be inserterd. In both cases ITER will be updated
3462 to point to the inserted bndldx statement. */
3464 static tree
3465 chkp_find_bounds_1 (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3467 tree addr = NULL_TREE;
3468 tree bounds = NULL_TREE;
3470 if (!ptr_src)
3471 ptr_src = ptr;
3473 bounds = chkp_get_registered_bounds (ptr_src);
3475 if (bounds)
3476 return bounds;
3478 switch (TREE_CODE (ptr_src))
3480 case MEM_REF:
3481 case VAR_DECL:
3482 if (BOUNDED_P (ptr_src))
3483 if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
3484 bounds = chkp_get_zero_bounds ();
3485 else
3487 addr = chkp_build_addr_expr (ptr_src);
3488 bounds = chkp_build_bndldx (addr, ptr, iter);
3490 else
3491 bounds = chkp_get_nonpointer_load_bounds ();
3492 break;
3494 case ARRAY_REF:
3495 case COMPONENT_REF:
3496 addr = get_base_address (ptr_src);
3497 if (DECL_P (addr)
3498 || TREE_CODE (addr) == MEM_REF
3499 || TREE_CODE (addr) == TARGET_MEM_REF)
3501 if (BOUNDED_P (ptr_src))
3502 if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
3503 bounds = chkp_get_zero_bounds ();
3504 else
3506 addr = chkp_build_addr_expr (ptr_src);
3507 bounds = chkp_build_bndldx (addr, ptr, iter);
3509 else
3510 bounds = chkp_get_nonpointer_load_bounds ();
3512 else
3514 gcc_assert (TREE_CODE (addr) == SSA_NAME);
3515 bounds = chkp_find_bounds (addr, iter);
3517 break;
3519 case PARM_DECL:
3520 gcc_unreachable ();
3521 bounds = chkp_get_bound_for_parm (ptr_src);
3522 break;
3524 case TARGET_MEM_REF:
3525 addr = chkp_build_addr_expr (ptr_src);
3526 bounds = chkp_build_bndldx (addr, ptr, iter);
3527 break;
3529 case SSA_NAME:
3530 bounds = chkp_get_registered_bounds (ptr_src);
3531 if (!bounds)
3533 gimple *def_stmt = SSA_NAME_DEF_STMT (ptr_src);
3534 gphi_iterator phi_iter;
3536 bounds = chkp_get_bounds_by_definition (ptr_src, def_stmt, &phi_iter);
3538 gcc_assert (bounds);
3540 if (gphi *def_phi = dyn_cast <gphi *> (def_stmt))
3542 unsigned i;
3544 for (i = 0; i < gimple_phi_num_args (def_phi); i++)
3546 tree arg = gimple_phi_arg_def (def_phi, i);
3547 tree arg_bnd;
3548 gphi *phi_bnd;
3550 arg_bnd = chkp_find_bounds (arg, NULL);
3552 /* chkp_get_bounds_by_definition created new phi
3553 statement and phi_iter points to it.
3555 Previous call to chkp_find_bounds could create
3556 new basic block and therefore change phi statement
3557 phi_iter points to. */
3558 phi_bnd = phi_iter.phi ();
3560 add_phi_arg (phi_bnd, arg_bnd,
3561 gimple_phi_arg_edge (def_phi, i),
3562 UNKNOWN_LOCATION);
3565 /* If all bound phi nodes have their arg computed
3566 then we may finish its computation. See
3567 chkp_finish_incomplete_bounds for more details. */
3568 if (chkp_may_finish_incomplete_bounds ())
3569 chkp_finish_incomplete_bounds ();
3572 gcc_assert (bounds == chkp_get_registered_bounds (ptr_src)
3573 || chkp_incomplete_bounds (bounds));
3575 break;
3577 case ADDR_EXPR:
3578 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (ptr_src, 0), iter);
3579 break;
3581 case INTEGER_CST:
3582 if (integer_zerop (ptr_src))
3583 bounds = chkp_get_none_bounds ();
3584 else
3585 bounds = chkp_get_invalid_op_bounds ();
3586 break;
3588 default:
3589 if (dump_file && (dump_flags & TDF_DETAILS))
3591 fprintf (dump_file, "chkp_find_bounds: unexpected ptr of type %s\n",
3592 get_tree_code_name (TREE_CODE (ptr_src)));
3593 print_node (dump_file, "", ptr_src, 0);
3595 internal_error ("chkp_find_bounds: Unexpected tree code %s",
3596 get_tree_code_name (TREE_CODE (ptr_src)));
3599 if (!bounds)
3601 if (dump_file && (dump_flags & TDF_DETAILS))
3603 fprintf (stderr, "chkp_find_bounds: cannot find bounds for pointer\n");
3604 print_node (dump_file, "", ptr_src, 0);
3606 internal_error ("chkp_find_bounds: Cannot find bounds for pointer");
3609 return bounds;
3612 /* Normal case for bounds search without forced narrowing. */
3613 static tree
3614 chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter)
3616 return chkp_find_bounds_1 (ptr, NULL_TREE, iter);
3619 /* Search bounds for pointer PTR loaded from PTR_SRC
3620 by statement *ITER points to. */
3621 static tree
3622 chkp_find_bounds_loaded (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3624 return chkp_find_bounds_1 (ptr, ptr_src, iter);
3627 /* Helper function which checks type of RHS and finds all pointers in
3628 it. For each found pointer we build it's accesses in LHS and RHS
3629 objects and then call HANDLER for them. Function is used to copy
3630 or initilize bounds for copied object. */
3631 static void
3632 chkp_walk_pointer_assignments (tree lhs, tree rhs, void *arg,
3633 assign_handler handler)
3635 tree type = TREE_TYPE (lhs);
3637 /* We have nothing to do with clobbers. */
3638 if (TREE_CLOBBER_P (rhs))
3639 return;
3641 if (BOUNDED_TYPE_P (type))
3642 handler (lhs, rhs, arg);
3643 else if (RECORD_OR_UNION_TYPE_P (type))
3645 tree field;
3647 if (TREE_CODE (rhs) == CONSTRUCTOR)
3649 unsigned HOST_WIDE_INT cnt;
3650 tree val;
3652 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, field, val)
3654 if (chkp_type_has_pointer (TREE_TYPE (field)))
3656 tree lhs_field = chkp_build_component_ref (lhs, field);
3657 chkp_walk_pointer_assignments (lhs_field, val, arg, handler);
3661 else
3662 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3663 if (TREE_CODE (field) == FIELD_DECL
3664 && chkp_type_has_pointer (TREE_TYPE (field)))
3666 tree rhs_field = chkp_build_component_ref (rhs, field);
3667 tree lhs_field = chkp_build_component_ref (lhs, field);
3668 chkp_walk_pointer_assignments (lhs_field, rhs_field, arg, handler);
3671 else if (TREE_CODE (type) == ARRAY_TYPE)
3673 unsigned HOST_WIDE_INT cur = 0;
3674 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3675 tree etype = TREE_TYPE (type);
3676 tree esize = TYPE_SIZE (etype);
3678 if (TREE_CODE (rhs) == CONSTRUCTOR)
3680 unsigned HOST_WIDE_INT cnt;
3681 tree purp, val, lhs_elem;
3683 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, purp, val)
3685 if (purp && TREE_CODE (purp) == RANGE_EXPR)
3687 tree lo_index = TREE_OPERAND (purp, 0);
3688 tree hi_index = TREE_OPERAND (purp, 1);
3690 for (cur = (unsigned)tree_to_uhwi (lo_index);
3691 cur <= (unsigned)tree_to_uhwi (hi_index);
3692 cur++)
3694 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3695 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3698 else
3700 if (purp)
3702 gcc_assert (TREE_CODE (purp) == INTEGER_CST);
3703 cur = tree_to_uhwi (purp);
3706 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur++);
3708 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3712 /* Copy array only when size is known. */
3713 else if (maxval && !integer_minus_onep (maxval))
3714 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
3716 tree lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3717 tree rhs_elem = chkp_build_array_ref (rhs, etype, esize, cur);
3718 chkp_walk_pointer_assignments (lhs_elem, rhs_elem, arg, handler);
3721 else
3722 internal_error("chkp_walk_pointer_assignments: unexpected RHS type: %s",
3723 get_tree_code_name (TREE_CODE (type)));
3726 /* Add code to copy bounds for assignment of RHS to LHS.
3727 ARG is an iterator pointing ne code position. */
3728 static void
3729 chkp_copy_bounds_for_elem (tree lhs, tree rhs, void *arg)
3731 gimple_stmt_iterator *iter = (gimple_stmt_iterator *)arg;
3732 tree bounds = chkp_find_bounds (rhs, iter);
3733 tree addr = chkp_build_addr_expr(lhs);
3735 chkp_build_bndstx (addr, rhs, bounds, iter);
3738 /* Emit static bound initilizers and size vars. */
3739 void
3740 chkp_finish_file (void)
3742 struct varpool_node *node;
3743 struct chkp_ctor_stmt_list stmts;
3745 if (seen_error ())
3746 return;
3748 /* Iterate through varpool and generate bounds initialization
3749 constructors for all statically initialized pointers. */
3750 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3751 stmts.stmts = NULL;
3752 FOR_EACH_VARIABLE (node)
3753 /* Check that var is actually emitted and we need and may initialize
3754 its bounds. */
3755 if (node->need_bounds_init
3756 && !POINTER_BOUNDS_P (node->decl)
3757 && DECL_RTL (node->decl)
3758 && MEM_P (DECL_RTL (node->decl))
3759 && TREE_ASM_WRITTEN (node->decl))
3761 chkp_walk_pointer_assignments (node->decl,
3762 DECL_INITIAL (node->decl),
3763 &stmts,
3764 chkp_add_modification_to_stmt_list);
3766 if (stmts.avail <= 0)
3768 cgraph_build_static_cdtor ('P', stmts.stmts,
3769 MAX_RESERVED_INIT_PRIORITY + 3);
3770 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3771 stmts.stmts = NULL;
3775 if (stmts.stmts)
3776 cgraph_build_static_cdtor ('P', stmts.stmts,
3777 MAX_RESERVED_INIT_PRIORITY + 3);
3779 /* Iterate through varpool and generate bounds initialization
3780 constructors for all static bounds vars. */
3781 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3782 stmts.stmts = NULL;
3783 FOR_EACH_VARIABLE (node)
3784 if (node->need_bounds_init
3785 && POINTER_BOUNDS_P (node->decl)
3786 && TREE_ASM_WRITTEN (node->decl))
3788 tree bnd = node->decl;
3789 tree var;
3791 gcc_assert (DECL_INITIAL (bnd)
3792 && TREE_CODE (DECL_INITIAL (bnd)) == ADDR_EXPR);
3794 var = TREE_OPERAND (DECL_INITIAL (bnd), 0);
3795 chkp_output_static_bounds (bnd, var, &stmts);
3798 if (stmts.stmts)
3799 cgraph_build_static_cdtor ('B', stmts.stmts,
3800 MAX_RESERVED_INIT_PRIORITY + 2);
3802 delete chkp_static_var_bounds;
3803 delete chkp_bounds_map;
3806 /* An instrumentation function which is called for each statement
3807 having memory access we want to instrument. It inserts check
3808 code and bounds copy code.
3810 ITER points to statement to instrument.
3812 NODE holds memory access in statement to check.
3814 LOC holds the location information for statement.
3816 DIRFLAGS determines whether access is read or write.
3818 ACCESS_OFFS should be added to address used in NODE
3819 before check.
3821 ACCESS_SIZE holds size of checked access.
3823 SAFE indicates if NODE access is safe and should not be
3824 checked. */
3825 static void
3826 chkp_process_stmt (gimple_stmt_iterator *iter, tree node,
3827 location_t loc, tree dirflag,
3828 tree access_offs, tree access_size,
3829 bool safe)
3831 tree node_type = TREE_TYPE (node);
3832 tree size = access_size ? access_size : TYPE_SIZE_UNIT (node_type);
3833 tree addr_first = NULL_TREE; /* address of the first accessed byte */
3834 tree addr_last = NULL_TREE; /* address of the last accessed byte */
3835 tree ptr = NULL_TREE; /* a pointer used for dereference */
3836 tree bounds = NULL_TREE;
3838 /* We do not need instrumentation for clobbers. */
3839 if (dirflag == integer_one_node
3840 && gimple_code (gsi_stmt (*iter)) == GIMPLE_ASSIGN
3841 && TREE_CLOBBER_P (gimple_assign_rhs1 (gsi_stmt (*iter))))
3842 return;
3844 switch (TREE_CODE (node))
3846 case ARRAY_REF:
3847 case COMPONENT_REF:
3849 bool bitfield;
3850 tree elt;
3852 if (safe)
3854 /* We are not going to generate any checks, so do not
3855 generate bounds as well. */
3856 addr_first = chkp_build_addr_expr (node);
3857 break;
3860 chkp_parse_array_and_component_ref (node, &ptr, &elt, &safe,
3861 &bitfield, &bounds, iter, false);
3863 /* Break if there is no dereference and operation is safe. */
3865 if (bitfield)
3867 tree field = TREE_OPERAND (node, 1);
3869 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
3870 size = DECL_SIZE_UNIT (field);
3872 if (elt)
3873 elt = chkp_build_addr_expr (elt);
3874 addr_first = fold_convert_loc (loc, ptr_type_node, elt ? elt : ptr);
3875 addr_first = fold_build_pointer_plus_loc (loc,
3876 addr_first,
3877 byte_position (field));
3879 else
3880 addr_first = chkp_build_addr_expr (node);
3882 break;
3884 case INDIRECT_REF:
3885 ptr = TREE_OPERAND (node, 0);
3886 addr_first = ptr;
3887 break;
3889 case MEM_REF:
3890 ptr = TREE_OPERAND (node, 0);
3891 addr_first = chkp_build_addr_expr (node);
3892 break;
3894 case TARGET_MEM_REF:
3895 ptr = TMR_BASE (node);
3896 addr_first = chkp_build_addr_expr (node);
3897 break;
3899 case ARRAY_RANGE_REF:
3900 printf("ARRAY_RANGE_REF\n");
3901 debug_gimple_stmt(gsi_stmt(*iter));
3902 debug_tree(node);
3903 gcc_unreachable ();
3904 break;
3906 case BIT_FIELD_REF:
3908 tree offs, rem, bpu;
3910 gcc_assert (!access_offs);
3911 gcc_assert (!access_size);
3913 bpu = fold_convert (size_type_node, bitsize_int (BITS_PER_UNIT));
3914 offs = fold_convert (size_type_node, TREE_OPERAND (node, 2));
3915 rem = size_binop_loc (loc, TRUNC_MOD_EXPR, offs, bpu);
3916 offs = size_binop_loc (loc, TRUNC_DIV_EXPR, offs, bpu);
3918 size = fold_convert (size_type_node, TREE_OPERAND (node, 1));
3919 size = size_binop_loc (loc, PLUS_EXPR, size, rem);
3920 size = size_binop_loc (loc, CEIL_DIV_EXPR, size, bpu);
3921 size = fold_convert (size_type_node, size);
3923 chkp_process_stmt (iter, TREE_OPERAND (node, 0), loc,
3924 dirflag, offs, size, safe);
3925 return;
3927 break;
3929 case VAR_DECL:
3930 case RESULT_DECL:
3931 case PARM_DECL:
3932 if (dirflag != integer_one_node
3933 || DECL_REGISTER (node))
3934 return;
3936 safe = true;
3937 addr_first = chkp_build_addr_expr (node);
3938 break;
3940 default:
3941 return;
3944 /* If addr_last was not computed then use (addr_first + size - 1)
3945 expression to compute it. */
3946 if (!addr_last)
3948 addr_last = fold_build_pointer_plus_loc (loc, addr_first, size);
3949 addr_last = fold_build_pointer_plus_hwi_loc (loc, addr_last, -1);
3952 /* Shift both first_addr and last_addr by access_offs if specified. */
3953 if (access_offs)
3955 addr_first = fold_build_pointer_plus_loc (loc, addr_first, access_offs);
3956 addr_last = fold_build_pointer_plus_loc (loc, addr_last, access_offs);
3959 /* Generate bndcl/bndcu checks if memory access is not safe. */
3960 if (!safe)
3962 gimple_stmt_iterator stmt_iter = *iter;
3964 if (!bounds)
3965 bounds = chkp_find_bounds (ptr, iter);
3967 chkp_check_mem_access (addr_first, addr_last, bounds,
3968 stmt_iter, loc, dirflag);
3971 /* We need to store bounds in case pointer is stored. */
3972 if (dirflag == integer_one_node
3973 && chkp_type_has_pointer (node_type)
3974 && flag_chkp_store_bounds)
3976 gimple *stmt = gsi_stmt (*iter);
3977 tree rhs1 = gimple_assign_rhs1 (stmt);
3978 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3980 if (get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS)
3981 chkp_walk_pointer_assignments (node, rhs1, iter,
3982 chkp_copy_bounds_for_elem);
3983 else
3985 bounds = chkp_compute_bounds_for_assignment (NULL_TREE, stmt);
3986 chkp_build_bndstx (addr_first, rhs1, bounds, iter);
3991 /* Add code to copy bounds for all pointers copied
3992 in ASSIGN created during inline of EDGE. */
3993 void
3994 chkp_copy_bounds_for_assign (gimple *assign, struct cgraph_edge *edge)
3996 tree lhs = gimple_assign_lhs (assign);
3997 tree rhs = gimple_assign_rhs1 (assign);
3998 gimple_stmt_iterator iter = gsi_for_stmt (assign);
4000 if (!flag_chkp_store_bounds)
4001 return;
4003 chkp_walk_pointer_assignments (lhs, rhs, &iter, chkp_copy_bounds_for_elem);
4005 /* We should create edges for all created calls to bndldx and bndstx. */
4006 while (gsi_stmt (iter) != assign)
4008 gimple *stmt = gsi_stmt (iter);
4009 if (gimple_code (stmt) == GIMPLE_CALL)
4011 tree fndecl = gimple_call_fndecl (stmt);
4012 struct cgraph_node *callee = cgraph_node::get_create (fndecl);
4013 struct cgraph_edge *new_edge;
4015 gcc_assert (fndecl == chkp_bndstx_fndecl
4016 || fndecl == chkp_bndldx_fndecl
4017 || fndecl == chkp_ret_bnd_fndecl);
4019 new_edge = edge->caller->create_edge (callee,
4020 as_a <gcall *> (stmt),
4021 edge->count,
4022 edge->frequency);
4023 new_edge->frequency = compute_call_stmt_bb_frequency
4024 (edge->caller->decl, gimple_bb (stmt));
4026 gsi_prev (&iter);
4030 /* Some code transformation made during instrumentation pass
4031 may put code into inconsistent state. Here we find and fix
4032 such flaws. */
4033 void
4034 chkp_fix_cfg ()
4036 basic_block bb;
4037 gimple_stmt_iterator i;
4039 /* We could insert some code right after stmt which ends bb.
4040 We wanted to put this code on fallthru edge but did not
4041 add new edges from the beginning because it may cause new
4042 phi node creation which may be incorrect due to incomplete
4043 bound phi nodes. */
4044 FOR_ALL_BB_FN (bb, cfun)
4045 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
4047 gimple *stmt = gsi_stmt (i);
4048 gimple_stmt_iterator next = i;
4050 gsi_next (&next);
4052 if (stmt_ends_bb_p (stmt)
4053 && !gsi_end_p (next))
4055 edge fall = find_fallthru_edge (bb->succs);
4056 basic_block dest = NULL;
4057 int flags = 0;
4059 gcc_assert (fall);
4061 /* We cannot split abnormal edge. Therefore we
4062 store its params, make it regular and then
4063 rebuild abnormal edge after split. */
4064 if (fall->flags & EDGE_ABNORMAL)
4066 flags = fall->flags & ~EDGE_FALLTHRU;
4067 dest = fall->dest;
4069 fall->flags &= ~EDGE_COMPLEX;
4072 while (!gsi_end_p (next))
4074 gimple *next_stmt = gsi_stmt (next);
4075 gsi_remove (&next, false);
4076 gsi_insert_on_edge (fall, next_stmt);
4079 gsi_commit_edge_inserts ();
4081 /* Re-create abnormal edge. */
4082 if (dest)
4083 make_edge (bb, dest, flags);
4088 /* Walker callback for chkp_replace_function_pointers. Replaces
4089 function pointer in the specified operand with pointer to the
4090 instrumented function version. */
4091 static tree
4092 chkp_replace_function_pointer (tree *op, int *walk_subtrees,
4093 void *data ATTRIBUTE_UNUSED)
4095 if (TREE_CODE (*op) == FUNCTION_DECL
4096 && chkp_instrumentable_p (*op)
4097 && (DECL_BUILT_IN_CLASS (*op) == NOT_BUILT_IN
4098 /* For builtins we replace pointers only for selected
4099 function and functions having definitions. */
4100 || (DECL_BUILT_IN_CLASS (*op) == BUILT_IN_NORMAL
4101 && (chkp_instrument_normal_builtin (*op)
4102 || gimple_has_body_p (*op)))))
4104 struct cgraph_node *node = cgraph_node::get_create (*op);
4105 struct cgraph_node *clone = NULL;
4107 if (!node->instrumentation_clone)
4108 clone = chkp_maybe_create_clone (*op);
4110 if (clone)
4111 *op = clone->decl;
4112 *walk_subtrees = 0;
4115 return NULL;
4118 /* This function searches for function pointers in statement
4119 pointed by GSI and replaces them with pointers to instrumented
4120 function versions. */
4121 static void
4122 chkp_replace_function_pointers (gimple_stmt_iterator *gsi)
4124 gimple *stmt = gsi_stmt (*gsi);
4125 /* For calls we want to walk call args only. */
4126 if (gimple_code (stmt) == GIMPLE_CALL)
4128 unsigned i;
4129 for (i = 0; i < gimple_call_num_args (stmt); i++)
4130 walk_tree (gimple_call_arg_ptr (stmt, i),
4131 chkp_replace_function_pointer, NULL, NULL);
4133 else
4134 walk_gimple_stmt (gsi, NULL, chkp_replace_function_pointer, NULL);
4137 /* This function instruments all statements working with memory,
4138 calls and rets.
4140 It also removes excess statements from static initializers. */
4141 static void
4142 chkp_instrument_function (void)
4144 basic_block bb, next;
4145 gimple_stmt_iterator i;
4146 enum gimple_rhs_class grhs_class;
4147 bool safe = lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl));
4149 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
4152 next = bb->next_bb;
4153 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
4155 gimple *s = gsi_stmt (i);
4157 /* Skip statement marked to not be instrumented. */
4158 if (chkp_marked_stmt_p (s))
4160 gsi_next (&i);
4161 continue;
4164 chkp_replace_function_pointers (&i);
4166 switch (gimple_code (s))
4168 case GIMPLE_ASSIGN:
4169 chkp_process_stmt (&i, gimple_assign_lhs (s),
4170 gimple_location (s), integer_one_node,
4171 NULL_TREE, NULL_TREE, safe);
4172 chkp_process_stmt (&i, gimple_assign_rhs1 (s),
4173 gimple_location (s), integer_zero_node,
4174 NULL_TREE, NULL_TREE, safe);
4175 grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s));
4176 if (grhs_class == GIMPLE_BINARY_RHS)
4177 chkp_process_stmt (&i, gimple_assign_rhs2 (s),
4178 gimple_location (s), integer_zero_node,
4179 NULL_TREE, NULL_TREE, safe);
4180 break;
4182 case GIMPLE_RETURN:
4184 greturn *r = as_a <greturn *> (s);
4185 if (gimple_return_retval (r) != NULL_TREE)
4187 chkp_process_stmt (&i, gimple_return_retval (r),
4188 gimple_location (r),
4189 integer_zero_node,
4190 NULL_TREE, NULL_TREE, safe);
4192 /* Additionally we need to add bounds
4193 to return statement. */
4194 chkp_add_bounds_to_ret_stmt (&i);
4197 break;
4199 case GIMPLE_CALL:
4200 chkp_add_bounds_to_call_stmt (&i);
4201 break;
4203 default:
4207 gsi_next (&i);
4209 /* We do not need any actual pointer stores in checker
4210 static initializer. */
4211 if (lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl))
4212 && gimple_code (s) == GIMPLE_ASSIGN
4213 && gimple_store_p (s))
4215 gimple_stmt_iterator del_iter = gsi_for_stmt (s);
4216 gsi_remove (&del_iter, true);
4217 unlink_stmt_vdef (s);
4218 release_defs(s);
4221 bb = next;
4223 while (bb);
4225 /* Some input params may have bounds and be address taken. In this case
4226 we should store incoming bounds into bounds table. */
4227 tree arg;
4228 if (flag_chkp_store_bounds)
4229 for (arg = DECL_ARGUMENTS (cfun->decl); arg; arg = DECL_CHAIN (arg))
4230 if (TREE_ADDRESSABLE (arg))
4232 if (BOUNDED_P (arg))
4234 tree bounds = chkp_get_next_bounds_parm (arg);
4235 tree def_ptr = ssa_default_def (cfun, arg);
4236 gimple_stmt_iterator iter
4237 = gsi_start_bb (chkp_get_entry_block ());
4238 chkp_build_bndstx (chkp_build_addr_expr (arg),
4239 def_ptr ? def_ptr : arg,
4240 bounds, &iter);
4242 /* Skip bounds arg. */
4243 arg = TREE_CHAIN (arg);
4245 else if (chkp_type_has_pointer (TREE_TYPE (arg)))
4247 tree orig_arg = arg;
4248 bitmap slots = BITMAP_ALLOC (NULL);
4249 gimple_stmt_iterator iter
4250 = gsi_start_bb (chkp_get_entry_block ());
4251 bitmap_iterator bi;
4252 unsigned bnd_no;
4254 chkp_find_bound_slots (TREE_TYPE (arg), slots);
4256 EXECUTE_IF_SET_IN_BITMAP (slots, 0, bnd_no, bi)
4258 tree bounds = chkp_get_next_bounds_parm (arg);
4259 HOST_WIDE_INT offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
4260 tree addr = chkp_build_addr_expr (orig_arg);
4261 tree ptr = build2 (MEM_REF, ptr_type_node, addr,
4262 build_int_cst (ptr_type_node, offs));
4263 chkp_build_bndstx (chkp_build_addr_expr (ptr), ptr,
4264 bounds, &iter);
4266 arg = DECL_CHAIN (arg);
4268 BITMAP_FREE (slots);
4273 /* Find init/null/copy_ptr_bounds calls and replace them
4274 with assignments. It should allow better code
4275 optimization. */
4277 static void
4278 chkp_remove_useless_builtins ()
4280 basic_block bb;
4281 gimple_stmt_iterator gsi;
4283 FOR_EACH_BB_FN (bb, cfun)
4285 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4287 gimple *stmt = gsi_stmt (gsi);
4288 tree fndecl;
4289 enum built_in_function fcode;
4291 /* Find builtins returning first arg and replace
4292 them with assignments. */
4293 if (gimple_code (stmt) == GIMPLE_CALL
4294 && (fndecl = gimple_call_fndecl (stmt))
4295 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4296 && (fcode = DECL_FUNCTION_CODE (fndecl))
4297 && (fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
4298 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
4299 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS
4300 || fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS))
4302 tree res = gimple_call_arg (stmt, 0);
4303 update_call_from_tree (&gsi, res);
4304 stmt = gsi_stmt (gsi);
4305 update_stmt (stmt);
4311 /* Initialize pass. */
4312 static void
4313 chkp_init (void)
4315 basic_block bb;
4316 gimple_stmt_iterator i;
4318 in_chkp_pass = true;
4320 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = bb->next_bb)
4321 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
4322 chkp_unmark_stmt (gsi_stmt (i));
4324 chkp_invalid_bounds = new hash_set<tree>;
4325 chkp_completed_bounds_set = new hash_set<tree>;
4326 delete chkp_reg_bounds;
4327 chkp_reg_bounds = new hash_map<tree, tree>;
4328 delete chkp_bound_vars;
4329 chkp_bound_vars = new hash_map<tree, tree>;
4330 chkp_reg_addr_bounds = new hash_map<tree, tree>;
4331 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
4332 delete chkp_bounds_map;
4333 chkp_bounds_map = new hash_map<tree, tree>;
4334 chkp_abnormal_copies = BITMAP_GGC_ALLOC ();
4336 entry_block = NULL;
4337 zero_bounds = NULL_TREE;
4338 none_bounds = NULL_TREE;
4339 incomplete_bounds = integer_zero_node;
4340 tmp_var = NULL_TREE;
4341 size_tmp_var = NULL_TREE;
4343 chkp_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode, true);
4345 /* We create these constant bounds once for each object file.
4346 These symbols go to comdat section and result in single copy
4347 of each one in the final binary. */
4348 chkp_get_zero_bounds_var ();
4349 chkp_get_none_bounds_var ();
4351 calculate_dominance_info (CDI_DOMINATORS);
4352 calculate_dominance_info (CDI_POST_DOMINATORS);
4354 bitmap_obstack_initialize (NULL);
4357 /* Finalize instrumentation pass. */
4358 static void
4359 chkp_fini (void)
4361 in_chkp_pass = false;
4363 delete chkp_invalid_bounds;
4364 delete chkp_completed_bounds_set;
4365 delete chkp_reg_addr_bounds;
4366 delete chkp_incomplete_bounds_map;
4368 free_dominance_info (CDI_DOMINATORS);
4369 free_dominance_info (CDI_POST_DOMINATORS);
4371 bitmap_obstack_release (NULL);
4373 entry_block = NULL;
4374 zero_bounds = NULL_TREE;
4375 none_bounds = NULL_TREE;
4378 /* Main instrumentation pass function. */
4379 static unsigned int
4380 chkp_execute (void)
4382 chkp_init ();
4384 chkp_instrument_function ();
4386 chkp_remove_useless_builtins ();
4388 chkp_function_mark_instrumented (cfun->decl);
4390 chkp_fix_cfg ();
4392 chkp_fini ();
4394 return 0;
4397 /* Instrumentation pass gate. */
4398 static bool
4399 chkp_gate (void)
4401 cgraph_node *node = cgraph_node::get (cfun->decl);
4402 return ((node != NULL
4403 && node->instrumentation_clone)
4404 || lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl)));
4407 namespace {
4409 const pass_data pass_data_chkp =
4411 GIMPLE_PASS, /* type */
4412 "chkp", /* name */
4413 OPTGROUP_NONE, /* optinfo_flags */
4414 TV_NONE, /* tv_id */
4415 PROP_ssa | PROP_cfg, /* properties_required */
4416 0, /* properties_provided */
4417 0, /* properties_destroyed */
4418 0, /* todo_flags_start */
4419 TODO_verify_il
4420 | TODO_update_ssa /* todo_flags_finish */
4423 class pass_chkp : public gimple_opt_pass
4425 public:
4426 pass_chkp (gcc::context *ctxt)
4427 : gimple_opt_pass (pass_data_chkp, ctxt)
4430 /* opt_pass methods: */
4431 virtual opt_pass * clone ()
4433 return new pass_chkp (m_ctxt);
4436 virtual bool gate (function *)
4438 return chkp_gate ();
4441 virtual unsigned int execute (function *)
4443 return chkp_execute ();
4446 }; // class pass_chkp
4448 } // anon namespace
4450 gimple_opt_pass *
4451 make_pass_chkp (gcc::context *ctxt)
4453 return new pass_chkp (ctxt);
4456 #include "gt-tree-chkp.h"