[Patch AArch64 1/3] Enable CRC by default for armv8.1-a
[official-gcc.git] / gcc / tree-chkp.c
blob3fba12c2ae46ac07999b3710c6498c1af77e9f39
1 /* Pointer Bounds Checker insrumentation pass.
2 Copyright (C) 2014-2016 Free Software Foundation, Inc.
3 Contributed by Ilya Enkovich (ilya.enkovich@intel.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "diagnostic.h"
34 #include "fold-const.h"
35 #include "stor-layout.h"
36 #include "varasm.h"
37 #include "tree-iterator.h"
38 #include "tree-cfg.h"
39 #include "langhooks.h"
40 #include "tree-ssa-address.h"
41 #include "tree-ssa-loop-niter.h"
42 #include "gimple-pretty-print.h"
43 #include "gimple-iterator.h"
44 #include "gimplify.h"
45 #include "gimplify-me.h"
46 #include "print-tree.h"
47 #include "calls.h"
48 #include "expr.h"
49 #include "tree-ssa-propagate.h"
50 #include "tree-chkp.h"
51 #include "gimple-walk.h"
52 #include "tree-dfa.h"
53 #include "ipa-chkp.h"
54 #include "params.h"
56 /* Pointer Bounds Checker instruments code with memory checks to find
57 out-of-bounds memory accesses. Checks are performed by computing
58 bounds for each pointer and then comparing address of accessed
59 memory before pointer dereferencing.
61 1. Function clones.
63 See ipa-chkp.c.
65 2. Instrumentation.
67 There are few things to instrument:
69 a) Memory accesses - add checker calls to check address of accessed memory
70 against bounds of dereferenced pointer. Obviously safe memory
71 accesses like static variable access does not have to be instrumented
72 with checks.
74 Example:
76 val_2 = *p_1;
78 with 4 bytes access is transformed into:
80 __builtin___chkp_bndcl (__bound_tmp.1_3, p_1);
81 D.1_4 = p_1 + 3;
82 __builtin___chkp_bndcu (__bound_tmp.1_3, D.1_4);
83 val_2 = *p_1;
85 where __bound_tmp.1_3 are bounds computed for pointer p_1,
86 __builtin___chkp_bndcl is a lower bound check and
87 __builtin___chkp_bndcu is an upper bound check.
89 b) Pointer stores.
91 When pointer is stored in memory we need to store its bounds. To
92 achieve compatibility of instrumented code with regular codes
93 we have to keep data layout and store bounds in special bound tables
94 via special checker call. Implementation of bounds table may vary for
95 different platforms. It has to associate pointer value and its
96 location (it is required because we may have two equal pointers
97 with different bounds stored in different places) with bounds.
98 Another checker builtin allows to get bounds for specified pointer
99 loaded from specified location.
101 Example:
103 buf1[i_1] = &buf2;
105 is transformed into:
107 buf1[i_1] = &buf2;
108 D.1_2 = &buf1[i_1];
109 __builtin___chkp_bndstx (D.1_2, &buf2, __bound_tmp.1_2);
111 where __bound_tmp.1_2 are bounds of &buf2.
113 c) Static initialization.
115 The special case of pointer store is static pointer initialization.
116 Bounds initialization is performed in a few steps:
117 - register all static initializations in front-end using
118 chkp_register_var_initializer
119 - when file compilation finishes we create functions with special
120 attribute 'chkp ctor' and put explicit initialization code
121 (assignments) for all statically initialized pointers.
122 - when checker constructor is compiled checker pass adds required
123 bounds initialization for all statically initialized pointers
124 - since we do not actually need excess pointers initialization
125 in checker constructor we remove such assignments from them
127 d) Calls.
129 For each call in the code we add additional arguments to pass
130 bounds for pointer arguments. We determine type of call arguments
131 using arguments list from function declaration; if function
132 declaration is not available we use function type; otherwise
133 (e.g. for unnamed arguments) we use type of passed value. Function
134 declaration/type is replaced with the instrumented one.
136 Example:
138 val_1 = foo (&buf1, &buf2, &buf1, 0);
140 is translated into:
142 val_1 = foo.chkp (&buf1, __bound_tmp.1_2, &buf2, __bound_tmp.1_3,
143 &buf1, __bound_tmp.1_2, 0);
145 e) Returns.
147 If function returns a pointer value we have to return bounds also.
148 A new operand was added for return statement to hold returned bounds.
150 Example:
152 return &_buf1;
154 is transformed into
156 return &_buf1, __bound_tmp.1_1;
158 3. Bounds computation.
160 Compiler is fully responsible for computing bounds to be used for each
161 memory access. The first step for bounds computation is to find the
162 origin of pointer dereferenced for memory access. Basing on pointer
163 origin we define a way to compute its bounds. There are just few
164 possible cases:
166 a) Pointer is returned by call.
168 In this case we use corresponding checker builtin method to obtain returned
169 bounds.
171 Example:
173 buf_1 = malloc (size_2);
174 foo (buf_1);
176 is translated into:
178 buf_1 = malloc (size_2);
179 __bound_tmp.1_3 = __builtin___chkp_bndret (buf_1);
180 foo (buf_1, __bound_tmp.1_3);
182 b) Pointer is an address of an object.
184 In this case compiler tries to compute objects size and create corresponding
185 bounds. If object has incomplete type then special checker builtin is used to
186 obtain its size at runtime.
188 Example:
190 foo ()
192 <unnamed type> __bound_tmp.3;
193 static int buf[100];
195 <bb 3>:
196 __bound_tmp.3_2 = __builtin___chkp_bndmk (&buf, 400);
198 <bb 2>:
199 return &buf, __bound_tmp.3_2;
202 Example:
204 Address of an object 'extern int buf[]' with incomplete type is
205 returned.
207 foo ()
209 <unnamed type> __bound_tmp.4;
210 long unsigned int __size_tmp.3;
212 <bb 3>:
213 __size_tmp.3_4 = __builtin_ia32_sizeof (buf);
214 __bound_tmp.4_3 = __builtin_ia32_bndmk (&buf, __size_tmp.3_4);
216 <bb 2>:
217 return &buf, __bound_tmp.4_3;
220 c) Pointer is the result of object narrowing.
222 It happens when we use pointer to an object to compute pointer to a part
223 of an object. E.g. we take pointer to a field of a structure. In this
224 case we perform bounds intersection using bounds of original object and
225 bounds of object's part (which are computed basing on its type).
227 There may be some debatable questions about when narrowing should occur
228 and when it should not. To avoid false bound violations in correct
229 programs we do not perform narrowing when address of an array element is
230 obtained (it has address of the whole array) and when address of the first
231 structure field is obtained (because it is guaranteed to be equal to
232 address of the whole structure and it is legal to cast it back to structure).
234 Default narrowing behavior may be changed using compiler flags.
236 Example:
238 In this example address of the second structure field is returned.
240 foo (struct A * p, __bounds_type __bounds_of_p)
242 <unnamed type> __bound_tmp.3;
243 int * _2;
244 int * _5;
246 <bb 2>:
247 _5 = &p_1(D)->second_field;
248 __bound_tmp.3_6 = __builtin___chkp_bndmk (_5, 4);
249 __bound_tmp.3_8 = __builtin___chkp_intersect (__bound_tmp.3_6,
250 __bounds_of_p_3(D));
251 _2 = &p_1(D)->second_field;
252 return _2, __bound_tmp.3_8;
255 Example:
257 In this example address of the first field of array element is returned.
259 foo (struct A * p, __bounds_type __bounds_of_p, int i)
261 long unsigned int _3;
262 long unsigned int _4;
263 struct A * _6;
264 int * _7;
266 <bb 2>:
267 _3 = (long unsigned int) i_1(D);
268 _4 = _3 * 8;
269 _6 = p_5(D) + _4;
270 _7 = &_6->first_field;
271 return _7, __bounds_of_p_2(D);
275 d) Pointer is the result of pointer arithmetic or type cast.
277 In this case bounds of the base pointer are used. In case of binary
278 operation producing a pointer we are analyzing data flow further
279 looking for operand's bounds. One operand is considered as a base
280 if it has some valid bounds. If we fall into a case when none of
281 operands (or both of them) has valid bounds, a default bounds value
282 is used.
284 Trying to find out bounds for binary operations we may fall into
285 cyclic dependencies for pointers. To avoid infinite recursion all
286 walked phi nodes instantly obtain corresponding bounds but created
287 bounds are marked as incomplete. It helps us to stop DF walk during
288 bounds search.
290 When we reach pointer source, some args of incomplete bounds phi obtain
291 valid bounds and those values are propagated further through phi nodes.
292 If no valid bounds were found for phi node then we mark its result as
293 invalid bounds. Process stops when all incomplete bounds become either
294 valid or invalid and we are able to choose a pointer base.
296 e) Pointer is loaded from the memory.
298 In this case we just need to load bounds from the bounds table.
300 Example:
302 foo ()
304 <unnamed type> __bound_tmp.3;
305 static int * buf;
306 int * _2;
308 <bb 2>:
309 _2 = buf;
310 __bound_tmp.3_4 = __builtin___chkp_bndldx (&buf, _2);
311 return _2, __bound_tmp.3_4;
316 typedef void (*assign_handler)(tree, tree, void *);
318 static tree chkp_get_zero_bounds ();
319 static tree chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter);
320 static tree chkp_find_bounds_loaded (tree ptr, tree ptr_src,
321 gimple_stmt_iterator *iter);
322 static void chkp_parse_array_and_component_ref (tree node, tree *ptr,
323 tree *elt, bool *safe,
324 bool *bitfield,
325 tree *bounds,
326 gimple_stmt_iterator *iter,
327 bool innermost_bounds);
329 #define chkp_bndldx_fndecl \
330 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDLDX))
331 #define chkp_bndstx_fndecl \
332 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDSTX))
333 #define chkp_checkl_fndecl \
334 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCL))
335 #define chkp_checku_fndecl \
336 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCU))
337 #define chkp_bndmk_fndecl \
338 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDMK))
339 #define chkp_ret_bnd_fndecl \
340 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDRET))
341 #define chkp_intersect_fndecl \
342 (targetm.builtin_chkp_function (BUILT_IN_CHKP_INTERSECT))
343 #define chkp_narrow_bounds_fndecl \
344 (targetm.builtin_chkp_function (BUILT_IN_CHKP_NARROW))
345 #define chkp_sizeof_fndecl \
346 (targetm.builtin_chkp_function (BUILT_IN_CHKP_SIZEOF))
347 #define chkp_extract_lower_fndecl \
348 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_LOWER))
349 #define chkp_extract_upper_fndecl \
350 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_UPPER))
352 static GTY (()) tree chkp_uintptr_type;
354 static GTY (()) tree chkp_zero_bounds_var;
355 static GTY (()) tree chkp_none_bounds_var;
357 static GTY (()) basic_block entry_block;
358 static GTY (()) tree zero_bounds;
359 static GTY (()) tree none_bounds;
360 static GTY (()) tree incomplete_bounds;
361 static GTY (()) tree tmp_var;
362 static GTY (()) tree size_tmp_var;
363 static GTY (()) bitmap chkp_abnormal_copies;
365 struct hash_set<tree> *chkp_invalid_bounds;
366 struct hash_set<tree> *chkp_completed_bounds_set;
367 struct hash_map<tree, tree> *chkp_reg_bounds;
368 struct hash_map<tree, tree> *chkp_bound_vars;
369 struct hash_map<tree, tree> *chkp_reg_addr_bounds;
370 struct hash_map<tree, tree> *chkp_incomplete_bounds_map;
371 struct hash_map<tree, tree> *chkp_bounds_map;
372 struct hash_map<tree, tree> *chkp_static_var_bounds;
374 static bool in_chkp_pass;
376 #define CHKP_BOUND_TMP_NAME "__bound_tmp"
377 #define CHKP_SIZE_TMP_NAME "__size_tmp"
378 #define CHKP_BOUNDS_OF_SYMBOL_PREFIX "__chkp_bounds_of_"
379 #define CHKP_STRING_BOUNDS_PREFIX "__chkp_string_bounds_"
380 #define CHKP_VAR_BOUNDS_PREFIX "__chkp_var_bounds_"
381 #define CHKP_ZERO_BOUNDS_VAR_NAME "__chkp_zero_bounds"
382 #define CHKP_NONE_BOUNDS_VAR_NAME "__chkp_none_bounds"
384 /* Static checker constructors may become very large and their
385 compilation with optimization may take too much time.
386 Therefore we put a limit to number of statements in one
387 constructor. Tests with 100 000 statically initialized
388 pointers showed following compilation times on Sandy Bridge
389 server (used -O2):
390 limit 100 => ~18 sec.
391 limit 300 => ~22 sec.
392 limit 1000 => ~30 sec.
393 limit 3000 => ~49 sec.
394 limit 5000 => ~55 sec.
395 limit 10000 => ~76 sec.
396 limit 100000 => ~532 sec. */
397 #define MAX_STMTS_IN_STATIC_CHKP_CTOR (PARAM_VALUE (PARAM_CHKP_MAX_CTOR_SIZE))
399 struct chkp_ctor_stmt_list
401 tree stmts;
402 int avail;
405 /* Return 1 if function FNDECL is instrumented by Pointer
406 Bounds Checker. */
407 bool
408 chkp_function_instrumented_p (tree fndecl)
410 return fndecl
411 && lookup_attribute ("chkp instrumented", DECL_ATTRIBUTES (fndecl));
414 /* Mark function FNDECL as instrumented. */
415 void
416 chkp_function_mark_instrumented (tree fndecl)
418 if (chkp_function_instrumented_p (fndecl))
419 return;
421 DECL_ATTRIBUTES (fndecl)
422 = tree_cons (get_identifier ("chkp instrumented"), NULL,
423 DECL_ATTRIBUTES (fndecl));
426 /* Return true when STMT is builtin call to instrumentation function
427 corresponding to CODE. */
429 bool
430 chkp_gimple_call_builtin_p (gimple *call,
431 enum built_in_function code)
433 tree fndecl;
434 if (is_gimple_call (call)
435 && (fndecl = targetm.builtin_chkp_function (code))
436 && gimple_call_fndecl (call) == fndecl)
437 return true;
438 return false;
441 /* Emit code to build zero bounds and return RTL holding
442 the result. */
444 chkp_expand_zero_bounds ()
446 tree zero_bnd;
448 if (flag_chkp_use_static_const_bounds)
449 zero_bnd = chkp_get_zero_bounds_var ();
450 else
451 zero_bnd = chkp_build_make_bounds_call (integer_zero_node,
452 integer_zero_node);
453 return expand_normal (zero_bnd);
456 /* Emit code to store zero bounds for PTR located at MEM. */
457 void
458 chkp_expand_bounds_reset_for_mem (tree mem, tree ptr)
460 tree zero_bnd, bnd, addr, bndstx;
462 if (flag_chkp_use_static_const_bounds)
463 zero_bnd = chkp_get_zero_bounds_var ();
464 else
465 zero_bnd = chkp_build_make_bounds_call (integer_zero_node,
466 integer_zero_node);
467 bnd = make_tree (pointer_bounds_type_node,
468 assign_temp (pointer_bounds_type_node, 0, 1));
469 addr = build1 (ADDR_EXPR,
470 build_pointer_type (TREE_TYPE (mem)), mem);
471 bndstx = chkp_build_bndstx_call (addr, ptr, bnd);
473 expand_assignment (bnd, zero_bnd, false);
474 expand_normal (bndstx);
477 /* Build retbnd call for returned value RETVAL.
479 If BNDVAL is not NULL then result is stored
480 in it. Otherwise a temporary is created to
481 hold returned value.
483 GSI points to a position for a retbnd call
484 and is set to created stmt.
486 Cgraph edge is created for a new call if
487 UPDATE_EDGE is 1.
489 Obtained bounds are returned. */
490 tree
491 chkp_insert_retbnd_call (tree bndval, tree retval,
492 gimple_stmt_iterator *gsi)
494 gimple *call;
496 if (!bndval)
497 bndval = create_tmp_reg (pointer_bounds_type_node, "retbnd");
499 call = gimple_build_call (chkp_ret_bnd_fndecl, 1, retval);
500 gimple_call_set_lhs (call, bndval);
501 gsi_insert_after (gsi, call, GSI_CONTINUE_LINKING);
503 return bndval;
506 /* Build a GIMPLE_CALL identical to CALL but skipping bounds
507 arguments. */
509 gcall *
510 chkp_copy_call_skip_bounds (gcall *call)
512 bitmap bounds;
513 unsigned i;
515 bitmap_obstack_initialize (NULL);
516 bounds = BITMAP_ALLOC (NULL);
518 for (i = 0; i < gimple_call_num_args (call); i++)
519 if (POINTER_BOUNDS_P (gimple_call_arg (call, i)))
520 bitmap_set_bit (bounds, i);
522 if (!bitmap_empty_p (bounds))
523 call = gimple_call_copy_skip_args (call, bounds);
524 gimple_call_set_with_bounds (call, false);
526 BITMAP_FREE (bounds);
527 bitmap_obstack_release (NULL);
529 return call;
532 /* Redirect edge E to the correct node according to call_stmt.
533 Return 1 if bounds removal from call_stmt should be done
534 instead of redirection. */
536 bool
537 chkp_redirect_edge (cgraph_edge *e)
539 bool instrumented = false;
540 tree decl = e->callee->decl;
542 if (e->callee->instrumentation_clone
543 || chkp_function_instrumented_p (decl))
544 instrumented = true;
546 if (instrumented
547 && !gimple_call_with_bounds_p (e->call_stmt))
548 e->redirect_callee (cgraph_node::get_create (e->callee->orig_decl));
549 else if (!instrumented
550 && gimple_call_with_bounds_p (e->call_stmt)
551 && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDCL)
552 && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDCU)
553 && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDSTX))
555 if (e->callee->instrumented_version)
556 e->redirect_callee (e->callee->instrumented_version);
557 else
559 tree args = TYPE_ARG_TYPES (TREE_TYPE (decl));
560 /* Avoid bounds removal if all args will be removed. */
561 if (!args || TREE_VALUE (args) != void_type_node)
562 return true;
563 else
564 gimple_call_set_with_bounds (e->call_stmt, false);
568 return false;
571 /* Mark statement S to not be instrumented. */
572 static void
573 chkp_mark_stmt (gimple *s)
575 gimple_set_plf (s, GF_PLF_1, true);
578 /* Mark statement S to be instrumented. */
579 static void
580 chkp_unmark_stmt (gimple *s)
582 gimple_set_plf (s, GF_PLF_1, false);
585 /* Return 1 if statement S should not be instrumented. */
586 static bool
587 chkp_marked_stmt_p (gimple *s)
589 return gimple_plf (s, GF_PLF_1);
592 /* Get var to be used for bound temps. */
593 static tree
594 chkp_get_tmp_var (void)
596 if (!tmp_var)
597 tmp_var = create_tmp_reg (pointer_bounds_type_node, CHKP_BOUND_TMP_NAME);
599 return tmp_var;
602 /* Get SSA_NAME to be used as temp. */
603 static tree
604 chkp_get_tmp_reg (gimple *stmt)
606 if (in_chkp_pass)
607 return make_ssa_name (chkp_get_tmp_var (), stmt);
609 return make_temp_ssa_name (pointer_bounds_type_node, stmt,
610 CHKP_BOUND_TMP_NAME);
613 /* Get var to be used for size temps. */
614 static tree
615 chkp_get_size_tmp_var (void)
617 if (!size_tmp_var)
618 size_tmp_var = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
620 return size_tmp_var;
623 /* Register bounds BND for address of OBJ. */
624 static void
625 chkp_register_addr_bounds (tree obj, tree bnd)
627 if (bnd == incomplete_bounds)
628 return;
630 chkp_reg_addr_bounds->put (obj, bnd);
632 if (dump_file && (dump_flags & TDF_DETAILS))
634 fprintf (dump_file, "Regsitered bound ");
635 print_generic_expr (dump_file, bnd, 0);
636 fprintf (dump_file, " for address of ");
637 print_generic_expr (dump_file, obj, 0);
638 fprintf (dump_file, "\n");
642 /* Return bounds registered for address of OBJ. */
643 static tree
644 chkp_get_registered_addr_bounds (tree obj)
646 tree *slot = chkp_reg_addr_bounds->get (obj);
647 return slot ? *slot : NULL_TREE;
650 /* Mark BOUNDS as completed. */
651 static void
652 chkp_mark_completed_bounds (tree bounds)
654 chkp_completed_bounds_set->add (bounds);
656 if (dump_file && (dump_flags & TDF_DETAILS))
658 fprintf (dump_file, "Marked bounds ");
659 print_generic_expr (dump_file, bounds, 0);
660 fprintf (dump_file, " as completed\n");
664 /* Return 1 if BOUNDS were marked as completed and 0 otherwise. */
665 static bool
666 chkp_completed_bounds (tree bounds)
668 return chkp_completed_bounds_set->contains (bounds);
671 /* Clear comleted bound marks. */
672 static void
673 chkp_erase_completed_bounds (void)
675 delete chkp_completed_bounds_set;
676 chkp_completed_bounds_set = new hash_set<tree>;
679 /* Mark BOUNDS associated with PTR as incomplete. */
680 static void
681 chkp_register_incomplete_bounds (tree bounds, tree ptr)
683 chkp_incomplete_bounds_map->put (bounds, ptr);
685 if (dump_file && (dump_flags & TDF_DETAILS))
687 fprintf (dump_file, "Regsitered incomplete bounds ");
688 print_generic_expr (dump_file, bounds, 0);
689 fprintf (dump_file, " for ");
690 print_generic_expr (dump_file, ptr, 0);
691 fprintf (dump_file, "\n");
695 /* Return 1 if BOUNDS are incomplete and 0 otherwise. */
696 static bool
697 chkp_incomplete_bounds (tree bounds)
699 if (bounds == incomplete_bounds)
700 return true;
702 if (chkp_completed_bounds (bounds))
703 return false;
705 return chkp_incomplete_bounds_map->get (bounds) != NULL;
708 /* Clear incomleted bound marks. */
709 static void
710 chkp_erase_incomplete_bounds (void)
712 delete chkp_incomplete_bounds_map;
713 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
716 /* Build and return bndmk call which creates bounds for structure
717 pointed by PTR. Structure should have complete type. */
718 tree
719 chkp_make_bounds_for_struct_addr (tree ptr)
721 tree type = TREE_TYPE (ptr);
722 tree size;
724 gcc_assert (POINTER_TYPE_P (type));
726 size = TYPE_SIZE (TREE_TYPE (type));
728 gcc_assert (size);
730 return build_call_nary (pointer_bounds_type_node,
731 build_fold_addr_expr (chkp_bndmk_fndecl),
732 2, ptr, size);
735 /* Traversal function for chkp_may_finish_incomplete_bounds.
736 Set RES to 0 if at least one argument of phi statement
737 defining bounds (passed in KEY arg) is unknown.
738 Traversal stops when first unknown phi argument is found. */
739 bool
740 chkp_may_complete_phi_bounds (tree const &bounds, tree *slot ATTRIBUTE_UNUSED,
741 bool *res)
743 gimple *phi;
744 unsigned i;
746 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
748 phi = SSA_NAME_DEF_STMT (bounds);
750 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
752 for (i = 0; i < gimple_phi_num_args (phi); i++)
754 tree phi_arg = gimple_phi_arg_def (phi, i);
755 if (!phi_arg)
757 *res = false;
758 /* Do not need to traverse further. */
759 return false;
763 return true;
766 /* Return 1 if all phi nodes created for bounds have their
767 arguments computed. */
768 static bool
769 chkp_may_finish_incomplete_bounds (void)
771 bool res = true;
773 chkp_incomplete_bounds_map
774 ->traverse<bool *, chkp_may_complete_phi_bounds> (&res);
776 return res;
779 /* Helper function for chkp_finish_incomplete_bounds.
780 Recompute args for bounds phi node. */
781 bool
782 chkp_recompute_phi_bounds (tree const &bounds, tree *slot,
783 void *res ATTRIBUTE_UNUSED)
785 tree ptr = *slot;
786 gphi *bounds_phi;
787 gphi *ptr_phi;
788 unsigned i;
790 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
791 gcc_assert (TREE_CODE (ptr) == SSA_NAME);
793 bounds_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (bounds));
794 ptr_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (ptr));
796 for (i = 0; i < gimple_phi_num_args (bounds_phi); i++)
798 tree ptr_arg = gimple_phi_arg_def (ptr_phi, i);
799 tree bound_arg = chkp_find_bounds (ptr_arg, NULL);
801 add_phi_arg (bounds_phi, bound_arg,
802 gimple_phi_arg_edge (ptr_phi, i),
803 UNKNOWN_LOCATION);
806 return true;
809 /* Mark BOUNDS as invalid. */
810 static void
811 chkp_mark_invalid_bounds (tree bounds)
813 chkp_invalid_bounds->add (bounds);
815 if (dump_file && (dump_flags & TDF_DETAILS))
817 fprintf (dump_file, "Marked bounds ");
818 print_generic_expr (dump_file, bounds, 0);
819 fprintf (dump_file, " as invalid\n");
823 /* Return 1 if BOUNDS were marked as invalid and 0 otherwise. */
824 static bool
825 chkp_valid_bounds (tree bounds)
827 if (bounds == zero_bounds || bounds == none_bounds)
828 return false;
830 return !chkp_invalid_bounds->contains (bounds);
833 /* Helper function for chkp_finish_incomplete_bounds.
834 Check all arguments of phi nodes trying to find
835 valid completed bounds. If there is at least one
836 such arg then bounds produced by phi node are marked
837 as valid completed bounds and all phi args are
838 recomputed. */
839 bool
840 chkp_find_valid_phi_bounds (tree const &bounds, tree *slot, bool *res)
842 gimple *phi;
843 unsigned i;
845 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
847 if (chkp_completed_bounds (bounds))
848 return true;
850 phi = SSA_NAME_DEF_STMT (bounds);
852 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
854 for (i = 0; i < gimple_phi_num_args (phi); i++)
856 tree phi_arg = gimple_phi_arg_def (phi, i);
858 gcc_assert (phi_arg);
860 if (chkp_valid_bounds (phi_arg) && !chkp_incomplete_bounds (phi_arg))
862 *res = true;
863 chkp_mark_completed_bounds (bounds);
864 chkp_recompute_phi_bounds (bounds, slot, NULL);
865 return true;
869 return true;
872 /* Helper function for chkp_finish_incomplete_bounds.
873 Marks all incompleted bounds as invalid. */
874 bool
875 chkp_mark_invalid_bounds_walker (tree const &bounds,
876 tree *slot ATTRIBUTE_UNUSED,
877 void *res ATTRIBUTE_UNUSED)
879 if (!chkp_completed_bounds (bounds))
881 chkp_mark_invalid_bounds (bounds);
882 chkp_mark_completed_bounds (bounds);
884 return true;
887 /* When all bound phi nodes have all their args computed
888 we have enough info to find valid bounds. We iterate
889 through all incompleted bounds searching for valid
890 bounds. Found valid bounds are marked as completed
891 and all remaining incompleted bounds are recomputed.
892 Process continues until no new valid bounds may be
893 found. All remained incompleted bounds are marked as
894 invalid (i.e. have no valid source of bounds). */
895 static void
896 chkp_finish_incomplete_bounds (void)
898 bool found_valid = true;
900 while (found_valid)
902 found_valid = false;
904 chkp_incomplete_bounds_map->
905 traverse<bool *, chkp_find_valid_phi_bounds> (&found_valid);
907 if (found_valid)
908 chkp_incomplete_bounds_map->
909 traverse<void *, chkp_recompute_phi_bounds> (NULL);
912 chkp_incomplete_bounds_map->
913 traverse<void *, chkp_mark_invalid_bounds_walker> (NULL);
914 chkp_incomplete_bounds_map->
915 traverse<void *, chkp_recompute_phi_bounds> (NULL);
917 chkp_erase_completed_bounds ();
918 chkp_erase_incomplete_bounds ();
921 /* Return 1 if type TYPE is a pointer type or a
922 structure having a pointer type as one of its fields.
923 Otherwise return 0. */
924 bool
925 chkp_type_has_pointer (const_tree type)
927 bool res = false;
929 if (BOUNDED_TYPE_P (type))
930 res = true;
931 else if (RECORD_OR_UNION_TYPE_P (type))
933 tree field;
935 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
936 if (TREE_CODE (field) == FIELD_DECL)
937 res = res || chkp_type_has_pointer (TREE_TYPE (field));
939 else if (TREE_CODE (type) == ARRAY_TYPE)
940 res = chkp_type_has_pointer (TREE_TYPE (type));
942 return res;
945 unsigned
946 chkp_type_bounds_count (const_tree type)
948 unsigned res = 0;
950 if (!type)
951 res = 0;
952 else if (BOUNDED_TYPE_P (type))
953 res = 1;
954 else if (RECORD_OR_UNION_TYPE_P (type))
956 bitmap have_bound;
958 bitmap_obstack_initialize (NULL);
959 have_bound = BITMAP_ALLOC (NULL);
960 chkp_find_bound_slots (type, have_bound);
961 res = bitmap_count_bits (have_bound);
962 BITMAP_FREE (have_bound);
963 bitmap_obstack_release (NULL);
966 return res;
969 /* Get bounds associated with NODE via
970 chkp_set_bounds call. */
971 tree
972 chkp_get_bounds (tree node)
974 tree *slot;
976 if (!chkp_bounds_map)
977 return NULL_TREE;
979 slot = chkp_bounds_map->get (node);
980 return slot ? *slot : NULL_TREE;
983 /* Associate bounds VAL with NODE. */
984 void
985 chkp_set_bounds (tree node, tree val)
987 if (!chkp_bounds_map)
988 chkp_bounds_map = new hash_map<tree, tree>;
990 chkp_bounds_map->put (node, val);
993 /* Check if statically initialized variable VAR require
994 static bounds initialization. If VAR is added into
995 bounds initlization list then 1 is returned. Otherwise
996 return 0. */
997 extern bool
998 chkp_register_var_initializer (tree var)
1000 if (!flag_check_pointer_bounds
1001 || DECL_INITIAL (var) == error_mark_node)
1002 return false;
1004 gcc_assert (TREE_CODE (var) == VAR_DECL);
1005 gcc_assert (DECL_INITIAL (var));
1007 if (TREE_STATIC (var)
1008 && chkp_type_has_pointer (TREE_TYPE (var)))
1010 varpool_node::get_create (var)->need_bounds_init = 1;
1011 return true;
1014 return false;
1017 /* Helper function for chkp_finish_file.
1019 Add new modification statement (RHS is assigned to LHS)
1020 into list of static initializer statementes (passed in ARG).
1021 If statements list becomes too big, emit checker constructor
1022 and start the new one. */
1023 static void
1024 chkp_add_modification_to_stmt_list (tree lhs,
1025 tree rhs,
1026 void *arg)
1028 struct chkp_ctor_stmt_list *stmts = (struct chkp_ctor_stmt_list *)arg;
1029 tree modify;
1031 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
1032 rhs = build1 (CONVERT_EXPR, TREE_TYPE (lhs), rhs);
1034 modify = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
1035 append_to_statement_list (modify, &stmts->stmts);
1037 stmts->avail--;
1040 /* Build and return ADDR_EXPR for specified object OBJ. */
1041 static tree
1042 chkp_build_addr_expr (tree obj)
1044 return TREE_CODE (obj) == TARGET_MEM_REF
1045 ? tree_mem_ref_addr (ptr_type_node, obj)
1046 : build_fold_addr_expr (obj);
1049 /* Helper function for chkp_finish_file.
1050 Initialize bound variable BND_VAR with bounds of variable
1051 VAR to statements list STMTS. If statements list becomes
1052 too big, emit checker constructor and start the new one. */
1053 static void
1054 chkp_output_static_bounds (tree bnd_var, tree var,
1055 struct chkp_ctor_stmt_list *stmts)
1057 tree lb, ub, size;
1059 if (TREE_CODE (var) == STRING_CST)
1061 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1062 size = build_int_cst (size_type_node, TREE_STRING_LENGTH (var) - 1);
1064 else if (DECL_SIZE (var)
1065 && !chkp_variable_size_type (TREE_TYPE (var)))
1067 /* Compute bounds using statically known size. */
1068 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1069 size = size_binop (MINUS_EXPR, DECL_SIZE_UNIT (var), size_one_node);
1071 else
1073 /* Compute bounds using dynamic size. */
1074 tree call;
1076 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1077 call = build1 (ADDR_EXPR,
1078 build_pointer_type (TREE_TYPE (chkp_sizeof_fndecl)),
1079 chkp_sizeof_fndecl);
1080 size = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_sizeof_fndecl)),
1081 call, 1, var);
1083 if (flag_chkp_zero_dynamic_size_as_infinite)
1085 tree max_size, cond;
1087 max_size = build2 (MINUS_EXPR, size_type_node, size_zero_node, lb);
1088 cond = build2 (NE_EXPR, boolean_type_node, size, size_zero_node);
1089 size = build3 (COND_EXPR, size_type_node, cond, size, max_size);
1092 size = size_binop (MINUS_EXPR, size, size_one_node);
1095 ub = size_binop (PLUS_EXPR, lb, size);
1096 stmts->avail -= targetm.chkp_initialize_bounds (bnd_var, lb, ub,
1097 &stmts->stmts);
1098 if (stmts->avail <= 0)
1100 cgraph_build_static_cdtor ('B', stmts->stmts,
1101 MAX_RESERVED_INIT_PRIORITY + 2);
1102 stmts->avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
1103 stmts->stmts = NULL;
1107 /* Return entry block to be used for checker initilization code.
1108 Create new block if required. */
1109 static basic_block
1110 chkp_get_entry_block (void)
1112 if (!entry_block)
1113 entry_block
1114 = split_block_after_labels (ENTRY_BLOCK_PTR_FOR_FN (cfun))->dest;
1116 return entry_block;
1119 /* Return a bounds var to be used for pointer var PTR_VAR. */
1120 static tree
1121 chkp_get_bounds_var (tree ptr_var)
1123 tree bnd_var;
1124 tree *slot;
1126 slot = chkp_bound_vars->get (ptr_var);
1127 if (slot)
1128 bnd_var = *slot;
1129 else
1131 bnd_var = create_tmp_reg (pointer_bounds_type_node,
1132 CHKP_BOUND_TMP_NAME);
1133 chkp_bound_vars->put (ptr_var, bnd_var);
1136 return bnd_var;
1139 /* If BND is an abnormal bounds copy, return a copied value.
1140 Otherwise return BND. */
1141 static tree
1142 chkp_get_orginal_bounds_for_abnormal_copy (tree bnd)
1144 if (bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
1146 gimple *bnd_def = SSA_NAME_DEF_STMT (bnd);
1147 gcc_checking_assert (gimple_code (bnd_def) == GIMPLE_ASSIGN);
1148 bnd = gimple_assign_rhs1 (bnd_def);
1151 return bnd;
1154 /* Register bounds BND for object PTR in global bounds table.
1155 A copy of bounds may be created for abnormal ssa names.
1156 Returns bounds to use for PTR. */
1157 static tree
1158 chkp_maybe_copy_and_register_bounds (tree ptr, tree bnd)
1160 bool abnormal_ptr;
1162 if (!chkp_reg_bounds)
1163 return bnd;
1165 /* Do nothing if bounds are incomplete_bounds
1166 because it means bounds will be recomputed. */
1167 if (bnd == incomplete_bounds)
1168 return bnd;
1170 abnormal_ptr = (TREE_CODE (ptr) == SSA_NAME
1171 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1172 && gimple_code (SSA_NAME_DEF_STMT (ptr)) != GIMPLE_PHI);
1174 /* A single bounds value may be reused multiple times for
1175 different pointer values. It may cause coalescing issues
1176 for abnormal SSA names. To avoid it we create a bounds
1177 copy in case it is computed for abnormal SSA name.
1179 We also cannot reuse such created copies for other pointers */
1180 if (abnormal_ptr
1181 || bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
1183 tree bnd_var = NULL_TREE;
1185 if (abnormal_ptr)
1187 if (SSA_NAME_VAR (ptr))
1188 bnd_var = chkp_get_bounds_var (SSA_NAME_VAR (ptr));
1190 else
1191 bnd_var = chkp_get_tmp_var ();
1193 /* For abnormal copies we may just find original
1194 bounds and use them. */
1195 if (!abnormal_ptr && !SSA_NAME_IS_DEFAULT_DEF (bnd))
1196 bnd = chkp_get_orginal_bounds_for_abnormal_copy (bnd);
1197 /* For undefined values we usually use none bounds
1198 value but in case of abnormal edge it may cause
1199 coalescing failures. Use default definition of
1200 bounds variable instead to avoid it. */
1201 else if (SSA_NAME_IS_DEFAULT_DEF (ptr)
1202 && TREE_CODE (SSA_NAME_VAR (ptr)) != PARM_DECL)
1204 bnd = get_or_create_ssa_default_def (cfun, bnd_var);
1206 if (dump_file && (dump_flags & TDF_DETAILS))
1208 fprintf (dump_file, "Using default def bounds ");
1209 print_generic_expr (dump_file, bnd, 0);
1210 fprintf (dump_file, " for abnormal default def SSA name ");
1211 print_generic_expr (dump_file, ptr, 0);
1212 fprintf (dump_file, "\n");
1215 else
1217 tree copy;
1218 gimple *def = SSA_NAME_DEF_STMT (ptr);
1219 gimple *assign;
1220 gimple_stmt_iterator gsi;
1222 if (bnd_var)
1223 copy = make_ssa_name (bnd_var);
1224 else
1225 copy = make_temp_ssa_name (pointer_bounds_type_node,
1226 NULL,
1227 CHKP_BOUND_TMP_NAME);
1228 bnd = chkp_get_orginal_bounds_for_abnormal_copy (bnd);
1229 assign = gimple_build_assign (copy, bnd);
1231 if (dump_file && (dump_flags & TDF_DETAILS))
1233 fprintf (dump_file, "Creating a copy of bounds ");
1234 print_generic_expr (dump_file, bnd, 0);
1235 fprintf (dump_file, " for abnormal SSA name ");
1236 print_generic_expr (dump_file, ptr, 0);
1237 fprintf (dump_file, "\n");
1240 if (gimple_code (def) == GIMPLE_NOP)
1242 gsi = gsi_last_bb (chkp_get_entry_block ());
1243 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
1244 gsi_insert_before (&gsi, assign, GSI_CONTINUE_LINKING);
1245 else
1246 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1248 else
1250 gimple *bnd_def = SSA_NAME_DEF_STMT (bnd);
1251 /* Sometimes (e.g. when we load a pointer from a
1252 memory) bounds are produced later than a pointer.
1253 We need to insert bounds copy appropriately. */
1254 if (gimple_code (bnd_def) != GIMPLE_NOP
1255 && stmt_dominates_stmt_p (def, bnd_def))
1256 gsi = gsi_for_stmt (bnd_def);
1257 else
1258 gsi = gsi_for_stmt (def);
1259 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1262 bnd = copy;
1265 if (abnormal_ptr)
1266 bitmap_set_bit (chkp_abnormal_copies, SSA_NAME_VERSION (bnd));
1269 chkp_reg_bounds->put (ptr, bnd);
1271 if (dump_file && (dump_flags & TDF_DETAILS))
1273 fprintf (dump_file, "Regsitered bound ");
1274 print_generic_expr (dump_file, bnd, 0);
1275 fprintf (dump_file, " for pointer ");
1276 print_generic_expr (dump_file, ptr, 0);
1277 fprintf (dump_file, "\n");
1280 return bnd;
1283 /* Get bounds registered for object PTR in global bounds table. */
1284 static tree
1285 chkp_get_registered_bounds (tree ptr)
1287 tree *slot;
1289 if (!chkp_reg_bounds)
1290 return NULL_TREE;
1292 slot = chkp_reg_bounds->get (ptr);
1293 return slot ? *slot : NULL_TREE;
1296 /* Add bound retvals to return statement pointed by GSI. */
1298 static void
1299 chkp_add_bounds_to_ret_stmt (gimple_stmt_iterator *gsi)
1301 greturn *ret = as_a <greturn *> (gsi_stmt (*gsi));
1302 tree retval = gimple_return_retval (ret);
1303 tree ret_decl = DECL_RESULT (cfun->decl);
1304 tree bounds;
1306 if (!retval)
1307 return;
1309 if (BOUNDED_P (ret_decl))
1311 bounds = chkp_find_bounds (retval, gsi);
1312 bounds = chkp_maybe_copy_and_register_bounds (ret_decl, bounds);
1313 gimple_return_set_retbnd (ret, bounds);
1316 update_stmt (ret);
1319 /* Force OP to be suitable for using as an argument for call.
1320 New statements (if any) go to SEQ. */
1321 static tree
1322 chkp_force_gimple_call_op (tree op, gimple_seq *seq)
1324 gimple_seq stmts;
1325 gimple_stmt_iterator si;
1327 op = force_gimple_operand (unshare_expr (op), &stmts, true, NULL_TREE);
1329 for (si = gsi_start (stmts); !gsi_end_p (si); gsi_next (&si))
1330 chkp_mark_stmt (gsi_stmt (si));
1332 gimple_seq_add_seq (seq, stmts);
1334 return op;
1337 /* Generate lower bound check for memory access by ADDR.
1338 Check is inserted before the position pointed by ITER.
1339 DIRFLAG indicates whether memory access is load or store. */
1340 static void
1341 chkp_check_lower (tree addr, tree bounds,
1342 gimple_stmt_iterator iter,
1343 location_t location,
1344 tree dirflag)
1346 gimple_seq seq;
1347 gimple *check;
1348 tree node;
1350 if (!chkp_function_instrumented_p (current_function_decl)
1351 && bounds == chkp_get_zero_bounds ())
1352 return;
1354 if (dirflag == integer_zero_node
1355 && !flag_chkp_check_read)
1356 return;
1358 if (dirflag == integer_one_node
1359 && !flag_chkp_check_write)
1360 return;
1362 seq = NULL;
1364 node = chkp_force_gimple_call_op (addr, &seq);
1366 check = gimple_build_call (chkp_checkl_fndecl, 2, node, bounds);
1367 chkp_mark_stmt (check);
1368 gimple_call_set_with_bounds (check, true);
1369 gimple_set_location (check, location);
1370 gimple_seq_add_stmt (&seq, check);
1372 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1374 if (dump_file && (dump_flags & TDF_DETAILS))
1376 gimple *before = gsi_stmt (iter);
1377 fprintf (dump_file, "Generated lower bound check for statement ");
1378 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1379 fprintf (dump_file, " ");
1380 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1384 /* Generate upper bound check for memory access by ADDR.
1385 Check is inserted before the position pointed by ITER.
1386 DIRFLAG indicates whether memory access is load or store. */
1387 static void
1388 chkp_check_upper (tree addr, tree bounds,
1389 gimple_stmt_iterator iter,
1390 location_t location,
1391 tree dirflag)
1393 gimple_seq seq;
1394 gimple *check;
1395 tree node;
1397 if (!chkp_function_instrumented_p (current_function_decl)
1398 && bounds == chkp_get_zero_bounds ())
1399 return;
1401 if (dirflag == integer_zero_node
1402 && !flag_chkp_check_read)
1403 return;
1405 if (dirflag == integer_one_node
1406 && !flag_chkp_check_write)
1407 return;
1409 seq = NULL;
1411 node = chkp_force_gimple_call_op (addr, &seq);
1413 check = gimple_build_call (chkp_checku_fndecl, 2, node, bounds);
1414 chkp_mark_stmt (check);
1415 gimple_call_set_with_bounds (check, true);
1416 gimple_set_location (check, location);
1417 gimple_seq_add_stmt (&seq, check);
1419 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1421 if (dump_file && (dump_flags & TDF_DETAILS))
1423 gimple *before = gsi_stmt (iter);
1424 fprintf (dump_file, "Generated upper bound check for statement ");
1425 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1426 fprintf (dump_file, " ");
1427 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1431 /* Generate lower and upper bound checks for memory access
1432 to memory slot [FIRST, LAST] againsr BOUNDS. Checks
1433 are inserted before the position pointed by ITER.
1434 DIRFLAG indicates whether memory access is load or store. */
1435 void
1436 chkp_check_mem_access (tree first, tree last, tree bounds,
1437 gimple_stmt_iterator iter,
1438 location_t location,
1439 tree dirflag)
1441 chkp_check_lower (first, bounds, iter, location, dirflag);
1442 chkp_check_upper (last, bounds, iter, location, dirflag);
1445 /* Replace call to _bnd_chk_* pointed by GSI with
1446 bndcu and bndcl calls. DIRFLAG determines whether
1447 check is for read or write. */
1449 void
1450 chkp_replace_address_check_builtin (gimple_stmt_iterator *gsi,
1451 tree dirflag)
1453 gimple_stmt_iterator call_iter = *gsi;
1454 gimple *call = gsi_stmt (*gsi);
1455 tree fndecl = gimple_call_fndecl (call);
1456 tree addr = gimple_call_arg (call, 0);
1457 tree bounds = chkp_find_bounds (addr, gsi);
1459 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1460 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1461 chkp_check_lower (addr, bounds, *gsi, gimple_location (call), dirflag);
1463 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS)
1464 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1466 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1468 tree size = gimple_call_arg (call, 1);
1469 addr = fold_build_pointer_plus (addr, size);
1470 addr = fold_build_pointer_plus_hwi (addr, -1);
1471 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1474 gsi_remove (&call_iter, true);
1477 /* Replace call to _bnd_get_ptr_* pointed by GSI with
1478 corresponding bounds extract call. */
1480 void
1481 chkp_replace_extract_builtin (gimple_stmt_iterator *gsi)
1483 gimple *call = gsi_stmt (*gsi);
1484 tree fndecl = gimple_call_fndecl (call);
1485 tree addr = gimple_call_arg (call, 0);
1486 tree bounds = chkp_find_bounds (addr, gsi);
1487 gimple *extract;
1489 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND)
1490 fndecl = chkp_extract_lower_fndecl;
1491 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND)
1492 fndecl = chkp_extract_upper_fndecl;
1493 else
1494 gcc_unreachable ();
1496 extract = gimple_build_call (fndecl, 1, bounds);
1497 gimple_call_set_lhs (extract, gimple_call_lhs (call));
1498 chkp_mark_stmt (extract);
1500 gsi_replace (gsi, extract, false);
1503 /* Return COMPONENT_REF accessing FIELD in OBJ. */
1504 static tree
1505 chkp_build_component_ref (tree obj, tree field)
1507 tree res;
1509 /* If object is TMR then we do not use component_ref but
1510 add offset instead. We need it to be able to get addr
1511 of the reasult later. */
1512 if (TREE_CODE (obj) == TARGET_MEM_REF)
1514 tree offs = TMR_OFFSET (obj);
1515 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1516 offs, DECL_FIELD_OFFSET (field));
1518 gcc_assert (offs);
1520 res = copy_node (obj);
1521 TREE_TYPE (res) = TREE_TYPE (field);
1522 TMR_OFFSET (res) = offs;
1524 else
1525 res = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL_TREE);
1527 return res;
1530 /* Return ARRAY_REF for array ARR and index IDX with
1531 specified element type ETYPE and element size ESIZE. */
1532 static tree
1533 chkp_build_array_ref (tree arr, tree etype, tree esize,
1534 unsigned HOST_WIDE_INT idx)
1536 tree index = build_int_cst (size_type_node, idx);
1537 tree res;
1539 /* If object is TMR then we do not use array_ref but
1540 add offset instead. We need it to be able to get addr
1541 of the reasult later. */
1542 if (TREE_CODE (arr) == TARGET_MEM_REF)
1544 tree offs = TMR_OFFSET (arr);
1546 esize = fold_binary_to_constant (MULT_EXPR, TREE_TYPE (esize),
1547 esize, index);
1548 gcc_assert(esize);
1550 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1551 offs, esize);
1552 gcc_assert (offs);
1554 res = copy_node (arr);
1555 TREE_TYPE (res) = etype;
1556 TMR_OFFSET (res) = offs;
1558 else
1559 res = build4 (ARRAY_REF, etype, arr, index, NULL_TREE, NULL_TREE);
1561 return res;
1564 /* Helper function for chkp_add_bounds_to_call_stmt.
1565 Fill ALL_BOUNDS output array with created bounds.
1567 OFFS is used for recursive calls and holds basic
1568 offset of TYPE in outer structure in bits.
1570 ITER points a position where bounds are searched.
1572 ALL_BOUNDS[i] is filled with elem bounds if there
1573 is a field in TYPE which has pointer type and offset
1574 equal to i * POINTER_SIZE in bits. */
1575 static void
1576 chkp_find_bounds_for_elem (tree elem, tree *all_bounds,
1577 HOST_WIDE_INT offs,
1578 gimple_stmt_iterator *iter)
1580 tree type = TREE_TYPE (elem);
1582 if (BOUNDED_TYPE_P (type))
1584 if (!all_bounds[offs / POINTER_SIZE])
1586 tree temp = make_temp_ssa_name (type, NULL, "");
1587 gimple *assign = gimple_build_assign (temp, elem);
1588 gimple_stmt_iterator gsi;
1590 gsi_insert_before (iter, assign, GSI_SAME_STMT);
1591 gsi = gsi_for_stmt (assign);
1593 all_bounds[offs / POINTER_SIZE] = chkp_find_bounds (temp, &gsi);
1596 else if (RECORD_OR_UNION_TYPE_P (type))
1598 tree field;
1600 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1601 if (TREE_CODE (field) == FIELD_DECL)
1603 tree base = unshare_expr (elem);
1604 tree field_ref = chkp_build_component_ref (base, field);
1605 HOST_WIDE_INT field_offs
1606 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1607 if (DECL_FIELD_OFFSET (field))
1608 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1610 chkp_find_bounds_for_elem (field_ref, all_bounds,
1611 offs + field_offs, iter);
1614 else if (TREE_CODE (type) == ARRAY_TYPE)
1616 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1617 tree etype = TREE_TYPE (type);
1618 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1619 unsigned HOST_WIDE_INT cur;
1621 if (!maxval || integer_minus_onep (maxval))
1622 return;
1624 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1626 tree base = unshare_expr (elem);
1627 tree arr_elem = chkp_build_array_ref (base, etype,
1628 TYPE_SIZE (etype),
1629 cur);
1630 chkp_find_bounds_for_elem (arr_elem, all_bounds, offs + cur * esize,
1631 iter);
1636 /* Fill HAVE_BOUND output bitmap with information about
1637 bounds requred for object of type TYPE.
1639 OFFS is used for recursive calls and holds basic
1640 offset of TYPE in outer structure in bits.
1642 HAVE_BOUND[i] is set to 1 if there is a field
1643 in TYPE which has pointer type and offset
1644 equal to i * POINTER_SIZE - OFFS in bits. */
1645 void
1646 chkp_find_bound_slots_1 (const_tree type, bitmap have_bound,
1647 HOST_WIDE_INT offs)
1649 if (BOUNDED_TYPE_P (type))
1650 bitmap_set_bit (have_bound, offs / POINTER_SIZE);
1651 else if (RECORD_OR_UNION_TYPE_P (type))
1653 tree field;
1655 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1656 if (TREE_CODE (field) == FIELD_DECL)
1658 HOST_WIDE_INT field_offs = 0;
1659 if (DECL_FIELD_BIT_OFFSET (field))
1660 field_offs += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1661 if (DECL_FIELD_OFFSET (field))
1662 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1663 chkp_find_bound_slots_1 (TREE_TYPE (field), have_bound,
1664 offs + field_offs);
1667 else if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
1669 /* The object type is an array of complete type, i.e., other
1670 than a flexible array. */
1671 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1672 tree etype = TREE_TYPE (type);
1673 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1674 unsigned HOST_WIDE_INT cur;
1676 if (!maxval
1677 || TREE_CODE (maxval) != INTEGER_CST
1678 || integer_minus_onep (maxval))
1679 return;
1681 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1682 chkp_find_bound_slots_1 (etype, have_bound, offs + cur * esize);
1686 /* Fill bitmap RES with information about bounds for
1687 type TYPE. See chkp_find_bound_slots_1 for more
1688 details. */
1689 void
1690 chkp_find_bound_slots (const_tree type, bitmap res)
1692 bitmap_clear (res);
1693 chkp_find_bound_slots_1 (type, res, 0);
1696 /* Return 1 if call to FNDECL should be instrumented
1697 and 0 otherwise. */
1699 static bool
1700 chkp_instrument_normal_builtin (tree fndecl)
1702 switch (DECL_FUNCTION_CODE (fndecl))
1704 case BUILT_IN_STRLEN:
1705 case BUILT_IN_STRCPY:
1706 case BUILT_IN_STRNCPY:
1707 case BUILT_IN_STPCPY:
1708 case BUILT_IN_STPNCPY:
1709 case BUILT_IN_STRCAT:
1710 case BUILT_IN_STRNCAT:
1711 case BUILT_IN_MEMCPY:
1712 case BUILT_IN_MEMPCPY:
1713 case BUILT_IN_MEMSET:
1714 case BUILT_IN_MEMMOVE:
1715 case BUILT_IN_BZERO:
1716 case BUILT_IN_STRCMP:
1717 case BUILT_IN_STRNCMP:
1718 case BUILT_IN_BCMP:
1719 case BUILT_IN_MEMCMP:
1720 case BUILT_IN_MEMCPY_CHK:
1721 case BUILT_IN_MEMPCPY_CHK:
1722 case BUILT_IN_MEMMOVE_CHK:
1723 case BUILT_IN_MEMSET_CHK:
1724 case BUILT_IN_STRCPY_CHK:
1725 case BUILT_IN_STRNCPY_CHK:
1726 case BUILT_IN_STPCPY_CHK:
1727 case BUILT_IN_STPNCPY_CHK:
1728 case BUILT_IN_STRCAT_CHK:
1729 case BUILT_IN_STRNCAT_CHK:
1730 case BUILT_IN_MALLOC:
1731 case BUILT_IN_CALLOC:
1732 case BUILT_IN_REALLOC:
1733 return 1;
1735 default:
1736 return 0;
1740 /* Add bound arguments to call statement pointed by GSI.
1741 Also performs a replacement of user checker builtins calls
1742 with internal ones. */
1744 static void
1745 chkp_add_bounds_to_call_stmt (gimple_stmt_iterator *gsi)
1747 gcall *call = as_a <gcall *> (gsi_stmt (*gsi));
1748 unsigned arg_no = 0;
1749 tree fndecl = gimple_call_fndecl (call);
1750 tree fntype;
1751 tree first_formal_arg;
1752 tree arg;
1753 bool use_fntype = false;
1754 tree op;
1755 ssa_op_iter iter;
1756 gcall *new_call;
1758 /* Do nothing for internal functions. */
1759 if (gimple_call_internal_p (call))
1760 return;
1762 fntype = TREE_TYPE (TREE_TYPE (gimple_call_fn (call)));
1764 /* Do nothing if back-end builtin is called. */
1765 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
1766 return;
1768 /* Do nothing for some middle-end builtins. */
1769 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1770 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_OBJECT_SIZE)
1771 return;
1773 /* Do nothing for calls to not instrumentable functions. */
1774 if (fndecl && !chkp_instrumentable_p (fndecl))
1775 return;
1777 /* Ignore CHKP_INIT_PTR_BOUNDS, CHKP_NULL_PTR_BOUNDS
1778 and CHKP_COPY_PTR_BOUNDS. */
1779 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1780 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS
1781 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS
1782 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS
1783 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS))
1784 return;
1786 /* Check user builtins are replaced with checks. */
1787 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1788 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1789 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
1790 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS))
1792 chkp_replace_address_check_builtin (gsi, integer_minus_one_node);
1793 return;
1796 /* Check user builtins are replaced with bound extract. */
1797 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1798 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND
1799 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND))
1801 chkp_replace_extract_builtin (gsi);
1802 return;
1805 /* BUILT_IN_CHKP_NARROW_PTR_BOUNDS call is replaced with
1806 target narrow bounds call. */
1807 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1808 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
1810 tree arg = gimple_call_arg (call, 1);
1811 tree bounds = chkp_find_bounds (arg, gsi);
1813 gimple_call_set_fndecl (call, chkp_narrow_bounds_fndecl);
1814 gimple_call_set_arg (call, 1, bounds);
1815 update_stmt (call);
1817 return;
1820 /* BUILT_IN_CHKP_STORE_PTR_BOUNDS call is replaced with
1821 bndstx call. */
1822 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1823 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_STORE_PTR_BOUNDS)
1825 tree addr = gimple_call_arg (call, 0);
1826 tree ptr = gimple_call_arg (call, 1);
1827 tree bounds = chkp_find_bounds (ptr, gsi);
1828 gimple_stmt_iterator iter = gsi_for_stmt (call);
1830 chkp_build_bndstx (addr, ptr, bounds, gsi);
1831 gsi_remove (&iter, true);
1833 return;
1836 if (!flag_chkp_instrument_calls)
1837 return;
1839 /* We instrument only some subset of builtins. We also instrument
1840 builtin calls to be inlined. */
1841 if (fndecl
1842 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1843 && !chkp_instrument_normal_builtin (fndecl))
1845 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
1846 return;
1848 struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
1849 if (!clone
1850 || !gimple_has_body_p (clone->decl))
1851 return;
1854 /* If function decl is available then use it for
1855 formal arguments list. Otherwise use function type. */
1856 if (fndecl && DECL_ARGUMENTS (fndecl))
1857 first_formal_arg = DECL_ARGUMENTS (fndecl);
1858 else
1860 first_formal_arg = TYPE_ARG_TYPES (fntype);
1861 use_fntype = true;
1864 /* Fill vector of new call args. */
1865 vec<tree> new_args = vNULL;
1866 new_args.create (gimple_call_num_args (call));
1867 arg = first_formal_arg;
1868 for (arg_no = 0; arg_no < gimple_call_num_args (call); arg_no++)
1870 tree call_arg = gimple_call_arg (call, arg_no);
1871 tree type;
1873 /* Get arg type using formal argument description
1874 or actual argument type. */
1875 if (arg)
1876 if (use_fntype)
1877 if (TREE_VALUE (arg) != void_type_node)
1879 type = TREE_VALUE (arg);
1880 arg = TREE_CHAIN (arg);
1882 else
1883 type = TREE_TYPE (call_arg);
1884 else
1886 type = TREE_TYPE (arg);
1887 arg = TREE_CHAIN (arg);
1889 else
1890 type = TREE_TYPE (call_arg);
1892 new_args.safe_push (call_arg);
1894 if (BOUNDED_TYPE_P (type)
1895 || pass_by_reference (NULL, TYPE_MODE (type), type, true))
1896 new_args.safe_push (chkp_find_bounds (call_arg, gsi));
1897 else if (chkp_type_has_pointer (type))
1899 HOST_WIDE_INT max_bounds
1900 = TREE_INT_CST_LOW (TYPE_SIZE (type)) / POINTER_SIZE;
1901 tree *all_bounds = (tree *)xmalloc (sizeof (tree) * max_bounds);
1902 HOST_WIDE_INT bnd_no;
1904 memset (all_bounds, 0, sizeof (tree) * max_bounds);
1906 chkp_find_bounds_for_elem (call_arg, all_bounds, 0, gsi);
1908 for (bnd_no = 0; bnd_no < max_bounds; bnd_no++)
1909 if (all_bounds[bnd_no])
1910 new_args.safe_push (all_bounds[bnd_no]);
1912 free (all_bounds);
1916 if (new_args.length () == gimple_call_num_args (call))
1917 new_call = call;
1918 else
1920 new_call = gimple_build_call_vec (gimple_op (call, 1), new_args);
1921 gimple_call_set_lhs (new_call, gimple_call_lhs (call));
1922 gimple_call_copy_flags (new_call, call);
1923 gimple_call_set_chain (new_call, gimple_call_chain (call));
1925 new_args.release ();
1927 /* For direct calls fndecl is replaced with instrumented version. */
1928 if (fndecl)
1930 tree new_decl = chkp_maybe_create_clone (fndecl)->decl;
1931 gimple_call_set_fndecl (new_call, new_decl);
1932 gimple_call_set_fntype (new_call, TREE_TYPE (new_decl));
1934 /* For indirect call we should fix function pointer type if
1935 pass some bounds. */
1936 else if (new_call != call)
1938 tree type = gimple_call_fntype (call);
1939 type = chkp_copy_function_type_adding_bounds (type);
1940 gimple_call_set_fntype (new_call, type);
1943 /* replace old call statement with the new one. */
1944 if (call != new_call)
1946 FOR_EACH_SSA_TREE_OPERAND (op, call, iter, SSA_OP_ALL_DEFS)
1948 SSA_NAME_DEF_STMT (op) = new_call;
1950 gsi_replace (gsi, new_call, true);
1952 else
1953 update_stmt (new_call);
1955 gimple_call_set_with_bounds (new_call, true);
1958 /* Return constant static bounds var with specified bounds LB and UB.
1959 If such var does not exists then new var is created with specified NAME. */
1960 static tree
1961 chkp_make_static_const_bounds (HOST_WIDE_INT lb,
1962 HOST_WIDE_INT ub,
1963 const char *name)
1965 tree id = get_identifier (name);
1966 tree var;
1967 varpool_node *node;
1968 symtab_node *snode;
1970 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, id,
1971 pointer_bounds_type_node);
1972 TREE_STATIC (var) = 1;
1973 TREE_PUBLIC (var) = 1;
1975 /* With LTO we may have constant bounds already in varpool.
1976 Try to find it. */
1977 if ((snode = symtab_node::get_for_asmname (DECL_ASSEMBLER_NAME (var))))
1979 /* We don't allow this symbol usage for non bounds. */
1980 if (snode->type != SYMTAB_VARIABLE
1981 || !POINTER_BOUNDS_P (snode->decl))
1982 sorry ("-fcheck-pointer-bounds requires '%s' "
1983 "name for internal usage",
1984 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (var)));
1986 return snode->decl;
1989 TREE_USED (var) = 1;
1990 TREE_READONLY (var) = 1;
1991 TREE_ADDRESSABLE (var) = 0;
1992 DECL_ARTIFICIAL (var) = 1;
1993 DECL_READ_P (var) = 1;
1994 DECL_INITIAL (var) = targetm.chkp_make_bounds_constant (lb, ub);
1995 make_decl_one_only (var, DECL_ASSEMBLER_NAME (var));
1996 /* We may use this symbol during ctors generation in chkp_finish_file
1997 when all symbols are emitted. Force output to avoid undefined
1998 symbols in ctors. */
1999 node = varpool_node::get_create (var);
2000 node->force_output = 1;
2002 varpool_node::finalize_decl (var);
2004 return var;
2007 /* Generate code to make bounds with specified lower bound LB and SIZE.
2008 if AFTER is 1 then code is inserted after position pointed by ITER
2009 otherwise code is inserted before position pointed by ITER.
2010 If ITER is NULL then code is added to entry block. */
2011 static tree
2012 chkp_make_bounds (tree lb, tree size, gimple_stmt_iterator *iter, bool after)
2014 gimple_seq seq;
2015 gimple_stmt_iterator gsi;
2016 gimple *stmt;
2017 tree bounds;
2019 if (iter)
2020 gsi = *iter;
2021 else
2022 gsi = gsi_start_bb (chkp_get_entry_block ());
2024 seq = NULL;
2026 lb = chkp_force_gimple_call_op (lb, &seq);
2027 size = chkp_force_gimple_call_op (size, &seq);
2029 stmt = gimple_build_call (chkp_bndmk_fndecl, 2, lb, size);
2030 chkp_mark_stmt (stmt);
2032 bounds = chkp_get_tmp_reg (stmt);
2033 gimple_call_set_lhs (stmt, bounds);
2035 gimple_seq_add_stmt (&seq, stmt);
2037 if (iter && after)
2038 gsi_insert_seq_after (&gsi, seq, GSI_SAME_STMT);
2039 else
2040 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
2042 if (dump_file && (dump_flags & TDF_DETAILS))
2044 fprintf (dump_file, "Made bounds: ");
2045 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2046 if (iter)
2048 fprintf (dump_file, " inserted before statement: ");
2049 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0, TDF_VOPS|TDF_MEMSYMS);
2051 else
2052 fprintf (dump_file, " at function entry\n");
2055 /* update_stmt (stmt); */
2057 return bounds;
2060 /* Return var holding zero bounds. */
2061 tree
2062 chkp_get_zero_bounds_var (void)
2064 if (!chkp_zero_bounds_var)
2065 chkp_zero_bounds_var
2066 = chkp_make_static_const_bounds (0, -1,
2067 CHKP_ZERO_BOUNDS_VAR_NAME);
2068 return chkp_zero_bounds_var;
2071 /* Return var holding none bounds. */
2072 tree
2073 chkp_get_none_bounds_var (void)
2075 if (!chkp_none_bounds_var)
2076 chkp_none_bounds_var
2077 = chkp_make_static_const_bounds (-1, 0,
2078 CHKP_NONE_BOUNDS_VAR_NAME);
2079 return chkp_none_bounds_var;
2082 /* Return SSA_NAME used to represent zero bounds. */
2083 static tree
2084 chkp_get_zero_bounds (void)
2086 if (zero_bounds)
2087 return zero_bounds;
2089 if (dump_file && (dump_flags & TDF_DETAILS))
2090 fprintf (dump_file, "Creating zero bounds...");
2092 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2093 || flag_chkp_use_static_const_bounds > 0)
2095 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2096 gimple *stmt;
2098 zero_bounds = chkp_get_tmp_reg (NULL);
2099 stmt = gimple_build_assign (zero_bounds, chkp_get_zero_bounds_var ());
2100 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2102 else
2103 zero_bounds = chkp_make_bounds (integer_zero_node,
2104 integer_zero_node,
2105 NULL,
2106 false);
2108 return zero_bounds;
2111 /* Return SSA_NAME used to represent none bounds. */
2112 static tree
2113 chkp_get_none_bounds (void)
2115 if (none_bounds)
2116 return none_bounds;
2118 if (dump_file && (dump_flags & TDF_DETAILS))
2119 fprintf (dump_file, "Creating none bounds...");
2122 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2123 || flag_chkp_use_static_const_bounds > 0)
2125 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2126 gimple *stmt;
2128 none_bounds = chkp_get_tmp_reg (NULL);
2129 stmt = gimple_build_assign (none_bounds, chkp_get_none_bounds_var ());
2130 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2132 else
2133 none_bounds = chkp_make_bounds (integer_minus_one_node,
2134 build_int_cst (size_type_node, 2),
2135 NULL,
2136 false);
2138 return none_bounds;
2141 /* Return bounds to be used as a result of operation which
2142 should not create poiunter (e.g. MULT_EXPR). */
2143 static tree
2144 chkp_get_invalid_op_bounds (void)
2146 return chkp_get_zero_bounds ();
2149 /* Return bounds to be used for loads of non-pointer values. */
2150 static tree
2151 chkp_get_nonpointer_load_bounds (void)
2153 return chkp_get_zero_bounds ();
2156 /* Return 1 if may use bndret call to get bounds for pointer
2157 returned by CALL. */
2158 static bool
2159 chkp_call_returns_bounds_p (gcall *call)
2161 if (gimple_call_internal_p (call))
2163 if (gimple_call_internal_fn (call) == IFN_VA_ARG)
2164 return true;
2165 return false;
2168 if (gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
2169 || chkp_gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW))
2170 return true;
2172 if (gimple_call_with_bounds_p (call))
2173 return true;
2175 tree fndecl = gimple_call_fndecl (call);
2177 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
2178 return false;
2180 if (fndecl && !chkp_instrumentable_p (fndecl))
2181 return false;
2183 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2185 if (chkp_instrument_normal_builtin (fndecl))
2186 return true;
2188 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
2189 return false;
2191 struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
2192 return (clone && gimple_has_body_p (clone->decl));
2195 return true;
2198 /* Build bounds returned by CALL. */
2199 static tree
2200 chkp_build_returned_bound (gcall *call)
2202 gimple_stmt_iterator gsi;
2203 tree bounds;
2204 gimple *stmt;
2205 tree fndecl = gimple_call_fndecl (call);
2206 unsigned int retflags;
2208 /* To avoid fixing alloca expands in targets we handle
2209 it separately. */
2210 if (fndecl
2211 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2212 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
2213 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2215 tree size = gimple_call_arg (call, 0);
2216 tree lb = gimple_call_lhs (call);
2217 gimple_stmt_iterator iter = gsi_for_stmt (call);
2218 bounds = chkp_make_bounds (lb, size, &iter, true);
2220 /* We know bounds returned by set_bounds builtin call. */
2221 else if (fndecl
2222 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2223 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS)
2225 tree lb = gimple_call_arg (call, 0);
2226 tree size = gimple_call_arg (call, 1);
2227 gimple_stmt_iterator iter = gsi_for_stmt (call);
2228 bounds = chkp_make_bounds (lb, size, &iter, true);
2230 /* Detect bounds initialization calls. */
2231 else if (fndecl
2232 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2233 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS)
2234 bounds = chkp_get_zero_bounds ();
2235 /* Detect bounds nullification calls. */
2236 else if (fndecl
2237 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2238 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS)
2239 bounds = chkp_get_none_bounds ();
2240 /* Detect bounds copy calls. */
2241 else if (fndecl
2242 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2243 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
2245 gimple_stmt_iterator iter = gsi_for_stmt (call);
2246 bounds = chkp_find_bounds (gimple_call_arg (call, 1), &iter);
2248 /* Do not use retbnd when returned bounds are equal to some
2249 of passed bounds. */
2250 else if (((retflags = gimple_call_return_flags (call)) & ERF_RETURNS_ARG)
2251 && (retflags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (call))
2253 gimple_stmt_iterator iter = gsi_for_stmt (call);
2254 unsigned int retarg = retflags & ERF_RETURN_ARG_MASK, argno;
2255 if (gimple_call_with_bounds_p (call))
2257 for (argno = 0; argno < gimple_call_num_args (call); argno++)
2258 if (!POINTER_BOUNDS_P (gimple_call_arg (call, argno)))
2260 if (retarg)
2261 retarg--;
2262 else
2263 break;
2266 else
2267 argno = retarg;
2269 bounds = chkp_find_bounds (gimple_call_arg (call, argno), &iter);
2271 else if (chkp_call_returns_bounds_p (call))
2273 gcc_assert (TREE_CODE (gimple_call_lhs (call)) == SSA_NAME);
2275 /* In general case build checker builtin call to
2276 obtain returned bounds. */
2277 stmt = gimple_build_call (chkp_ret_bnd_fndecl, 1,
2278 gimple_call_lhs (call));
2279 chkp_mark_stmt (stmt);
2281 gsi = gsi_for_stmt (call);
2282 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
2284 bounds = chkp_get_tmp_reg (stmt);
2285 gimple_call_set_lhs (stmt, bounds);
2287 update_stmt (stmt);
2289 else
2290 bounds = chkp_get_zero_bounds ();
2292 if (dump_file && (dump_flags & TDF_DETAILS))
2294 fprintf (dump_file, "Built returned bounds (");
2295 print_generic_expr (dump_file, bounds, 0);
2296 fprintf (dump_file, ") for call: ");
2297 print_gimple_stmt (dump_file, call, 0, TDF_VOPS|TDF_MEMSYMS);
2300 bounds = chkp_maybe_copy_and_register_bounds (gimple_call_lhs (call), bounds);
2302 return bounds;
2305 /* Return bounds used as returned by call
2306 which produced SSA name VAL. */
2307 gcall *
2308 chkp_retbnd_call_by_val (tree val)
2310 if (TREE_CODE (val) != SSA_NAME)
2311 return NULL;
2313 gcc_assert (gimple_code (SSA_NAME_DEF_STMT (val)) == GIMPLE_CALL);
2315 imm_use_iterator use_iter;
2316 use_operand_p use_p;
2317 FOR_EACH_IMM_USE_FAST (use_p, use_iter, val)
2318 if (gimple_code (USE_STMT (use_p)) == GIMPLE_CALL
2319 && gimple_call_fndecl (USE_STMT (use_p)) == chkp_ret_bnd_fndecl)
2320 return as_a <gcall *> (USE_STMT (use_p));
2322 return NULL;
2325 /* Check the next parameter for the given PARM is bounds
2326 and return it's default SSA_NAME (create if required). */
2327 static tree
2328 chkp_get_next_bounds_parm (tree parm)
2330 tree bounds = TREE_CHAIN (parm);
2331 gcc_assert (POINTER_BOUNDS_P (bounds));
2332 bounds = ssa_default_def (cfun, bounds);
2333 if (!bounds)
2335 bounds = make_ssa_name (TREE_CHAIN (parm), gimple_build_nop ());
2336 set_ssa_default_def (cfun, TREE_CHAIN (parm), bounds);
2338 return bounds;
2341 /* Return bounds to be used for input argument PARM. */
2342 static tree
2343 chkp_get_bound_for_parm (tree parm)
2345 tree decl = SSA_NAME_VAR (parm);
2346 tree bounds;
2348 gcc_assert (TREE_CODE (decl) == PARM_DECL);
2350 bounds = chkp_get_registered_bounds (parm);
2352 if (!bounds)
2353 bounds = chkp_get_registered_bounds (decl);
2355 if (!bounds)
2357 tree orig_decl = cgraph_node::get (cfun->decl)->orig_decl;
2359 /* For static chain param we return zero bounds
2360 because currently we do not check dereferences
2361 of this pointer. */
2362 if (cfun->static_chain_decl == decl)
2363 bounds = chkp_get_zero_bounds ();
2364 /* If non instrumented runtime is used then it may be useful
2365 to use zero bounds for input arguments of main
2366 function. */
2367 else if (flag_chkp_zero_input_bounds_for_main
2368 && strcmp (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (orig_decl)),
2369 "main") == 0)
2370 bounds = chkp_get_zero_bounds ();
2371 else if (BOUNDED_P (parm))
2373 bounds = chkp_get_next_bounds_parm (decl);
2374 bounds = chkp_maybe_copy_and_register_bounds (decl, bounds);
2376 if (dump_file && (dump_flags & TDF_DETAILS))
2378 fprintf (dump_file, "Built arg bounds (");
2379 print_generic_expr (dump_file, bounds, 0);
2380 fprintf (dump_file, ") for arg: ");
2381 print_node (dump_file, "", decl, 0);
2384 else
2385 bounds = chkp_get_zero_bounds ();
2388 if (!chkp_get_registered_bounds (parm))
2389 bounds = chkp_maybe_copy_and_register_bounds (parm, bounds);
2391 if (dump_file && (dump_flags & TDF_DETAILS))
2393 fprintf (dump_file, "Using bounds ");
2394 print_generic_expr (dump_file, bounds, 0);
2395 fprintf (dump_file, " for parm ");
2396 print_generic_expr (dump_file, parm, 0);
2397 fprintf (dump_file, " of type ");
2398 print_generic_expr (dump_file, TREE_TYPE (parm), 0);
2399 fprintf (dump_file, ".\n");
2402 return bounds;
2405 /* Build and return CALL_EXPR for bndstx builtin with specified
2406 arguments. */
2407 tree
2408 chkp_build_bndldx_call (tree addr, tree ptr)
2410 tree fn = build1 (ADDR_EXPR,
2411 build_pointer_type (TREE_TYPE (chkp_bndldx_fndecl)),
2412 chkp_bndldx_fndecl);
2413 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndldx_fndecl)),
2414 fn, 2, addr, ptr);
2415 CALL_WITH_BOUNDS_P (call) = true;
2416 return call;
2419 /* Insert code to load bounds for PTR located by ADDR.
2420 Code is inserted after position pointed by GSI.
2421 Loaded bounds are returned. */
2422 static tree
2423 chkp_build_bndldx (tree addr, tree ptr, gimple_stmt_iterator *gsi)
2425 gimple_seq seq;
2426 gimple *stmt;
2427 tree bounds;
2429 seq = NULL;
2431 addr = chkp_force_gimple_call_op (addr, &seq);
2432 ptr = chkp_force_gimple_call_op (ptr, &seq);
2434 stmt = gimple_build_call (chkp_bndldx_fndecl, 2, addr, ptr);
2435 chkp_mark_stmt (stmt);
2436 bounds = chkp_get_tmp_reg (stmt);
2437 gimple_call_set_lhs (stmt, bounds);
2439 gimple_seq_add_stmt (&seq, stmt);
2441 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2443 if (dump_file && (dump_flags & TDF_DETAILS))
2445 fprintf (dump_file, "Generated bndldx for pointer ");
2446 print_generic_expr (dump_file, ptr, 0);
2447 fprintf (dump_file, ": ");
2448 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2451 return bounds;
2454 /* Build and return CALL_EXPR for bndstx builtin with specified
2455 arguments. */
2456 tree
2457 chkp_build_bndstx_call (tree addr, tree ptr, tree bounds)
2459 tree fn = build1 (ADDR_EXPR,
2460 build_pointer_type (TREE_TYPE (chkp_bndstx_fndecl)),
2461 chkp_bndstx_fndecl);
2462 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndstx_fndecl)),
2463 fn, 3, ptr, bounds, addr);
2464 CALL_WITH_BOUNDS_P (call) = true;
2465 return call;
2468 /* Insert code to store BOUNDS for PTR stored by ADDR.
2469 New statements are inserted after position pointed
2470 by GSI. */
2471 void
2472 chkp_build_bndstx (tree addr, tree ptr, tree bounds,
2473 gimple_stmt_iterator *gsi)
2475 gimple_seq seq;
2476 gimple *stmt;
2478 seq = NULL;
2480 addr = chkp_force_gimple_call_op (addr, &seq);
2481 ptr = chkp_force_gimple_call_op (ptr, &seq);
2483 stmt = gimple_build_call (chkp_bndstx_fndecl, 3, ptr, bounds, addr);
2484 chkp_mark_stmt (stmt);
2485 gimple_call_set_with_bounds (stmt, true);
2487 gimple_seq_add_stmt (&seq, stmt);
2489 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2491 if (dump_file && (dump_flags & TDF_DETAILS))
2493 fprintf (dump_file, "Generated bndstx for pointer store ");
2494 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_VOPS|TDF_MEMSYMS);
2495 print_gimple_stmt (dump_file, stmt, 2, TDF_VOPS|TDF_MEMSYMS);
2499 /* This function is called when call statement
2500 is inlined and therefore we can't use bndret
2501 for its LHS anymore. Function fixes bndret
2502 call using new RHS value if possible. */
2503 void
2504 chkp_fixup_inlined_call (tree lhs, tree rhs)
2506 tree addr, bounds;
2507 gcall *retbnd, *bndldx;
2509 if (!BOUNDED_P (lhs))
2510 return;
2512 /* Search for retbnd call. */
2513 retbnd = chkp_retbnd_call_by_val (lhs);
2514 if (!retbnd)
2515 return;
2517 /* Currently only handle cases when call is replaced
2518 with a memory access. In this case bndret call
2519 may be replaced with bndldx call. Otherwise we
2520 have to search for bounds which may cause wrong
2521 result due to various optimizations applied. */
2522 switch (TREE_CODE (rhs))
2524 case VAR_DECL:
2525 if (DECL_REGISTER (rhs))
2526 return;
2527 break;
2529 case MEM_REF:
2530 break;
2532 case ARRAY_REF:
2533 case COMPONENT_REF:
2534 addr = get_base_address (rhs);
2535 if (!DECL_P (addr)
2536 && TREE_CODE (addr) != MEM_REF)
2537 return;
2538 if (DECL_P (addr) && DECL_REGISTER (addr))
2539 return;
2540 break;
2542 default:
2543 return;
2546 /* Create a new statements sequence with bndldx call. */
2547 gimple_stmt_iterator gsi = gsi_for_stmt (retbnd);
2548 addr = build_fold_addr_expr (rhs);
2549 chkp_build_bndldx (addr, lhs, &gsi);
2550 bndldx = as_a <gcall *> (gsi_stmt (gsi));
2552 /* Remove bndret call. */
2553 bounds = gimple_call_lhs (retbnd);
2554 gsi = gsi_for_stmt (retbnd);
2555 gsi_remove (&gsi, true);
2557 /* Link new bndldx call. */
2558 gimple_call_set_lhs (bndldx, bounds);
2559 update_stmt (bndldx);
2562 /* Compute bounds for pointer NODE which was assigned in
2563 assignment statement ASSIGN. Return computed bounds. */
2564 static tree
2565 chkp_compute_bounds_for_assignment (tree node, gimple *assign)
2567 enum tree_code rhs_code = gimple_assign_rhs_code (assign);
2568 tree rhs1 = gimple_assign_rhs1 (assign);
2569 tree bounds = NULL_TREE;
2570 gimple_stmt_iterator iter = gsi_for_stmt (assign);
2571 tree base = NULL;
2573 if (dump_file && (dump_flags & TDF_DETAILS))
2575 fprintf (dump_file, "Computing bounds for assignment: ");
2576 print_gimple_stmt (dump_file, assign, 0, TDF_VOPS|TDF_MEMSYMS);
2579 switch (rhs_code)
2581 case MEM_REF:
2582 case TARGET_MEM_REF:
2583 case COMPONENT_REF:
2584 case ARRAY_REF:
2585 /* We need to load bounds from the bounds table. */
2586 bounds = chkp_find_bounds_loaded (node, rhs1, &iter);
2587 break;
2589 case VAR_DECL:
2590 case SSA_NAME:
2591 case ADDR_EXPR:
2592 case POINTER_PLUS_EXPR:
2593 case NOP_EXPR:
2594 case CONVERT_EXPR:
2595 case INTEGER_CST:
2596 /* Bounds are just propagated from RHS. */
2597 bounds = chkp_find_bounds (rhs1, &iter);
2598 base = rhs1;
2599 break;
2601 case VIEW_CONVERT_EXPR:
2602 /* Bounds are just propagated from RHS. */
2603 bounds = chkp_find_bounds (TREE_OPERAND (rhs1, 0), &iter);
2604 break;
2606 case PARM_DECL:
2607 if (BOUNDED_P (rhs1))
2609 /* We need to load bounds from the bounds table. */
2610 bounds = chkp_build_bndldx (chkp_build_addr_expr (rhs1),
2611 node, &iter);
2612 TREE_ADDRESSABLE (rhs1) = 1;
2614 else
2615 bounds = chkp_get_nonpointer_load_bounds ();
2616 break;
2618 case MINUS_EXPR:
2619 case PLUS_EXPR:
2620 case BIT_AND_EXPR:
2621 case BIT_IOR_EXPR:
2622 case BIT_XOR_EXPR:
2624 tree rhs2 = gimple_assign_rhs2 (assign);
2625 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2626 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2628 /* First we try to check types of operands. If it
2629 does not help then look at bound values.
2631 If some bounds are incomplete and other are
2632 not proven to be valid (i.e. also incomplete
2633 or invalid because value is not pointer) then
2634 resulting value is incomplete and will be
2635 recomputed later in chkp_finish_incomplete_bounds. */
2636 if (BOUNDED_P (rhs1)
2637 && !BOUNDED_P (rhs2))
2638 bounds = bnd1;
2639 else if (BOUNDED_P (rhs2)
2640 && !BOUNDED_P (rhs1)
2641 && rhs_code != MINUS_EXPR)
2642 bounds = bnd2;
2643 else if (chkp_incomplete_bounds (bnd1))
2644 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR
2645 && !chkp_incomplete_bounds (bnd2))
2646 bounds = bnd2;
2647 else
2648 bounds = incomplete_bounds;
2649 else if (chkp_incomplete_bounds (bnd2))
2650 if (chkp_valid_bounds (bnd1)
2651 && !chkp_incomplete_bounds (bnd1))
2652 bounds = bnd1;
2653 else
2654 bounds = incomplete_bounds;
2655 else if (!chkp_valid_bounds (bnd1))
2656 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR)
2657 bounds = bnd2;
2658 else if (bnd2 == chkp_get_zero_bounds ())
2659 bounds = bnd2;
2660 else
2661 bounds = bnd1;
2662 else if (!chkp_valid_bounds (bnd2))
2663 bounds = bnd1;
2664 else
2665 /* Seems both operands may have valid bounds
2666 (e.g. pointer minus pointer). In such case
2667 use default invalid op bounds. */
2668 bounds = chkp_get_invalid_op_bounds ();
2670 base = (bounds == bnd1) ? rhs1 : (bounds == bnd2) ? rhs2 : NULL;
2672 break;
2674 case BIT_NOT_EXPR:
2675 case NEGATE_EXPR:
2676 case LSHIFT_EXPR:
2677 case RSHIFT_EXPR:
2678 case LROTATE_EXPR:
2679 case RROTATE_EXPR:
2680 case EQ_EXPR:
2681 case NE_EXPR:
2682 case LT_EXPR:
2683 case LE_EXPR:
2684 case GT_EXPR:
2685 case GE_EXPR:
2686 case MULT_EXPR:
2687 case RDIV_EXPR:
2688 case TRUNC_DIV_EXPR:
2689 case FLOOR_DIV_EXPR:
2690 case CEIL_DIV_EXPR:
2691 case ROUND_DIV_EXPR:
2692 case TRUNC_MOD_EXPR:
2693 case FLOOR_MOD_EXPR:
2694 case CEIL_MOD_EXPR:
2695 case ROUND_MOD_EXPR:
2696 case EXACT_DIV_EXPR:
2697 case FIX_TRUNC_EXPR:
2698 case FLOAT_EXPR:
2699 case REALPART_EXPR:
2700 case IMAGPART_EXPR:
2701 /* No valid bounds may be produced by these exprs. */
2702 bounds = chkp_get_invalid_op_bounds ();
2703 break;
2705 case COND_EXPR:
2707 tree val1 = gimple_assign_rhs2 (assign);
2708 tree val2 = gimple_assign_rhs3 (assign);
2709 tree bnd1 = chkp_find_bounds (val1, &iter);
2710 tree bnd2 = chkp_find_bounds (val2, &iter);
2711 gimple *stmt;
2713 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2714 bounds = incomplete_bounds;
2715 else if (bnd1 == bnd2)
2716 bounds = bnd1;
2717 else
2719 rhs1 = unshare_expr (rhs1);
2721 bounds = chkp_get_tmp_reg (assign);
2722 stmt = gimple_build_assign (bounds, COND_EXPR, rhs1, bnd1, bnd2);
2723 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2725 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2726 chkp_mark_invalid_bounds (bounds);
2729 break;
2731 case MAX_EXPR:
2732 case MIN_EXPR:
2734 tree rhs2 = gimple_assign_rhs2 (assign);
2735 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2736 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2738 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2739 bounds = incomplete_bounds;
2740 else if (bnd1 == bnd2)
2741 bounds = bnd1;
2742 else
2744 gimple *stmt;
2745 tree cond = build2 (rhs_code == MAX_EXPR ? GT_EXPR : LT_EXPR,
2746 boolean_type_node, rhs1, rhs2);
2747 bounds = chkp_get_tmp_reg (assign);
2748 stmt = gimple_build_assign (bounds, COND_EXPR, cond, bnd1, bnd2);
2750 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2752 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2753 chkp_mark_invalid_bounds (bounds);
2756 break;
2758 default:
2759 bounds = chkp_get_zero_bounds ();
2760 warning (0, "pointer bounds were lost due to unexpected expression %s",
2761 get_tree_code_name (rhs_code));
2764 gcc_assert (bounds);
2766 /* We may reuse bounds of other pointer we copy/modify. But it is not
2767 allowed for abnormal ssa names. If we produced a pointer using
2768 abnormal ssa name, we better make a bounds copy to avoid coalescing
2769 issues. */
2770 if (base
2771 && TREE_CODE (base) == SSA_NAME
2772 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (base))
2774 gimple *stmt = gimple_build_assign (chkp_get_tmp_reg (NULL), bounds);
2775 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2776 bounds = gimple_assign_lhs (stmt);
2779 if (node)
2780 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2782 return bounds;
2785 /* Compute bounds for ssa name NODE defined by DEF_STMT pointed by ITER.
2787 There are just few statement codes allowed: NOP (for default ssa names),
2788 ASSIGN, CALL, PHI, ASM.
2790 Return computed bounds. */
2791 static tree
2792 chkp_get_bounds_by_definition (tree node, gimple *def_stmt,
2793 gphi_iterator *iter)
2795 tree var, bounds;
2796 enum gimple_code code = gimple_code (def_stmt);
2797 gphi *stmt;
2799 if (dump_file && (dump_flags & TDF_DETAILS))
2801 fprintf (dump_file, "Searching for bounds for node: ");
2802 print_generic_expr (dump_file, node, 0);
2804 fprintf (dump_file, " using its definition: ");
2805 print_gimple_stmt (dump_file, def_stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2808 switch (code)
2810 case GIMPLE_NOP:
2811 var = SSA_NAME_VAR (node);
2812 switch (TREE_CODE (var))
2814 case PARM_DECL:
2815 bounds = chkp_get_bound_for_parm (node);
2816 break;
2818 case VAR_DECL:
2819 /* For uninitialized pointers use none bounds. */
2820 bounds = chkp_get_none_bounds ();
2821 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2822 break;
2824 case RESULT_DECL:
2826 tree base_type;
2828 gcc_assert (TREE_CODE (TREE_TYPE (node)) == REFERENCE_TYPE);
2830 base_type = TREE_TYPE (TREE_TYPE (node));
2832 gcc_assert (TYPE_SIZE (base_type)
2833 && TREE_CODE (TYPE_SIZE (base_type)) == INTEGER_CST
2834 && tree_to_uhwi (TYPE_SIZE (base_type)) != 0);
2836 bounds = chkp_make_bounds (node, TYPE_SIZE_UNIT (base_type),
2837 NULL, false);
2838 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2840 break;
2842 default:
2843 if (dump_file && (dump_flags & TDF_DETAILS))
2845 fprintf (dump_file, "Unexpected var with no definition\n");
2846 print_generic_expr (dump_file, var, 0);
2848 internal_error ("chkp_get_bounds_by_definition: Unexpected var of type %s",
2849 get_tree_code_name (TREE_CODE (var)));
2851 break;
2853 case GIMPLE_ASSIGN:
2854 bounds = chkp_compute_bounds_for_assignment (node, def_stmt);
2855 break;
2857 case GIMPLE_CALL:
2858 bounds = chkp_build_returned_bound (as_a <gcall *> (def_stmt));
2859 break;
2861 case GIMPLE_PHI:
2862 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (node))
2863 if (SSA_NAME_VAR (node))
2864 var = chkp_get_bounds_var (SSA_NAME_VAR (node));
2865 else
2866 var = make_temp_ssa_name (pointer_bounds_type_node,
2867 NULL,
2868 CHKP_BOUND_TMP_NAME);
2869 else
2870 var = chkp_get_tmp_var ();
2871 stmt = create_phi_node (var, gimple_bb (def_stmt));
2872 bounds = gimple_phi_result (stmt);
2873 *iter = gsi_for_phi (stmt);
2875 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2877 /* Created bounds do not have all phi args computed and
2878 therefore we do not know if there is a valid source
2879 of bounds for that node. Therefore we mark bounds
2880 as incomplete and then recompute them when all phi
2881 args are computed. */
2882 chkp_register_incomplete_bounds (bounds, node);
2883 break;
2885 case GIMPLE_ASM:
2886 bounds = chkp_get_zero_bounds ();
2887 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2888 break;
2890 default:
2891 internal_error ("chkp_get_bounds_by_definition: Unexpected GIMPLE code %s",
2892 gimple_code_name[code]);
2895 return bounds;
2898 /* Return CALL_EXPR for bndmk with specified LOWER_BOUND and SIZE. */
2899 tree
2900 chkp_build_make_bounds_call (tree lower_bound, tree size)
2902 tree call = build1 (ADDR_EXPR,
2903 build_pointer_type (TREE_TYPE (chkp_bndmk_fndecl)),
2904 chkp_bndmk_fndecl);
2905 return build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndmk_fndecl)),
2906 call, 2, lower_bound, size);
2909 /* Create static bounds var of specfified OBJ which is
2910 is either VAR_DECL or string constant. */
2911 static tree
2912 chkp_make_static_bounds (tree obj)
2914 static int string_id = 1;
2915 static int var_id = 1;
2916 tree *slot;
2917 const char *var_name;
2918 char *bnd_var_name;
2919 tree bnd_var;
2921 /* First check if we already have required var. */
2922 if (chkp_static_var_bounds)
2924 /* For vars we use assembler name as a key in
2925 chkp_static_var_bounds map. It allows to
2926 avoid duplicating bound vars for decls
2927 sharing assembler name. */
2928 if (TREE_CODE (obj) == VAR_DECL)
2930 tree name = DECL_ASSEMBLER_NAME (obj);
2931 slot = chkp_static_var_bounds->get (name);
2932 if (slot)
2933 return *slot;
2935 else
2937 slot = chkp_static_var_bounds->get (obj);
2938 if (slot)
2939 return *slot;
2943 /* Build decl for bounds var. */
2944 if (TREE_CODE (obj) == VAR_DECL)
2946 if (DECL_IGNORED_P (obj))
2948 bnd_var_name = (char *) xmalloc (strlen (CHKP_VAR_BOUNDS_PREFIX) + 10);
2949 sprintf (bnd_var_name, "%s%d", CHKP_VAR_BOUNDS_PREFIX, var_id++);
2951 else
2953 var_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (obj));
2955 /* For hidden symbols we want to skip first '*' char. */
2956 if (*var_name == '*')
2957 var_name++;
2959 bnd_var_name = (char *) xmalloc (strlen (var_name)
2960 + strlen (CHKP_BOUNDS_OF_SYMBOL_PREFIX) + 1);
2961 strcpy (bnd_var_name, CHKP_BOUNDS_OF_SYMBOL_PREFIX);
2962 strcat (bnd_var_name, var_name);
2965 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2966 get_identifier (bnd_var_name),
2967 pointer_bounds_type_node);
2969 /* Address of the obj will be used as lower bound. */
2970 TREE_ADDRESSABLE (obj) = 1;
2972 else
2974 bnd_var_name = (char *) xmalloc (strlen (CHKP_STRING_BOUNDS_PREFIX) + 10);
2975 sprintf (bnd_var_name, "%s%d", CHKP_STRING_BOUNDS_PREFIX, string_id++);
2977 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2978 get_identifier (bnd_var_name),
2979 pointer_bounds_type_node);
2982 free (bnd_var_name);
2984 TREE_PUBLIC (bnd_var) = 0;
2985 TREE_USED (bnd_var) = 1;
2986 TREE_READONLY (bnd_var) = 0;
2987 TREE_STATIC (bnd_var) = 1;
2988 TREE_ADDRESSABLE (bnd_var) = 0;
2989 DECL_ARTIFICIAL (bnd_var) = 1;
2990 DECL_COMMON (bnd_var) = 1;
2991 DECL_COMDAT (bnd_var) = 1;
2992 DECL_READ_P (bnd_var) = 1;
2993 DECL_INITIAL (bnd_var) = chkp_build_addr_expr (obj);
2994 /* Force output similar to constant bounds.
2995 See chkp_make_static_const_bounds. */
2996 varpool_node::get_create (bnd_var)->force_output = 1;
2997 /* Mark symbol as requiring bounds initialization. */
2998 varpool_node::get_create (bnd_var)->need_bounds_init = 1;
2999 varpool_node::finalize_decl (bnd_var);
3001 /* Add created var to the map to use it for other references
3002 to obj. */
3003 if (!chkp_static_var_bounds)
3004 chkp_static_var_bounds = new hash_map<tree, tree>;
3006 if (TREE_CODE (obj) == VAR_DECL)
3008 tree name = DECL_ASSEMBLER_NAME (obj);
3009 chkp_static_var_bounds->put (name, bnd_var);
3011 else
3012 chkp_static_var_bounds->put (obj, bnd_var);
3014 return bnd_var;
3017 /* When var has incomplete type we cannot get size to
3018 compute its bounds. In such cases we use checker
3019 builtin call which determines object size at runtime. */
3020 static tree
3021 chkp_generate_extern_var_bounds (tree var)
3023 tree bounds, size_reloc, lb, size, max_size, cond;
3024 gimple_stmt_iterator gsi;
3025 gimple_seq seq = NULL;
3026 gimple *stmt;
3028 /* If instrumentation is not enabled for vars having
3029 incomplete type then just return zero bounds to avoid
3030 checks for this var. */
3031 if (!flag_chkp_incomplete_type)
3032 return chkp_get_zero_bounds ();
3034 if (dump_file && (dump_flags & TDF_DETAILS))
3036 fprintf (dump_file, "Generating bounds for extern symbol '");
3037 print_generic_expr (dump_file, var, 0);
3038 fprintf (dump_file, "'\n");
3041 stmt = gimple_build_call (chkp_sizeof_fndecl, 1, var);
3043 size_reloc = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
3044 gimple_call_set_lhs (stmt, size_reloc);
3046 gimple_seq_add_stmt (&seq, stmt);
3048 lb = chkp_build_addr_expr (var);
3049 size = make_ssa_name (chkp_get_size_tmp_var ());
3051 if (flag_chkp_zero_dynamic_size_as_infinite)
3053 /* We should check that size relocation was resolved.
3054 If it was not then use maximum possible size for the var. */
3055 max_size = build2 (MINUS_EXPR, chkp_uintptr_type, integer_zero_node,
3056 fold_convert (chkp_uintptr_type, lb));
3057 max_size = chkp_force_gimple_call_op (max_size, &seq);
3059 cond = build2 (NE_EXPR, boolean_type_node,
3060 size_reloc, integer_zero_node);
3061 stmt = gimple_build_assign (size, COND_EXPR, cond, size_reloc, max_size);
3062 gimple_seq_add_stmt (&seq, stmt);
3064 else
3066 stmt = gimple_build_assign (size, size_reloc);
3067 gimple_seq_add_stmt (&seq, stmt);
3070 gsi = gsi_start_bb (chkp_get_entry_block ());
3071 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
3073 bounds = chkp_make_bounds (lb, size, &gsi, true);
3075 return bounds;
3078 /* Return 1 if TYPE has fields with zero size or fields
3079 marked with chkp_variable_size attribute. */
3080 bool
3081 chkp_variable_size_type (tree type)
3083 bool res = false;
3084 tree field;
3086 if (RECORD_OR_UNION_TYPE_P (type))
3087 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3089 if (TREE_CODE (field) == FIELD_DECL)
3090 res = res
3091 || lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
3092 || chkp_variable_size_type (TREE_TYPE (field));
3094 else
3095 res = !TYPE_SIZE (type)
3096 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
3097 || tree_to_uhwi (TYPE_SIZE (type)) == 0;
3099 return res;
3102 /* Compute and return bounds for address of DECL which is
3103 one of VAR_DECL, PARM_DECL, RESULT_DECL. */
3104 static tree
3105 chkp_get_bounds_for_decl_addr (tree decl)
3107 tree bounds;
3109 gcc_assert (TREE_CODE (decl) == VAR_DECL
3110 || TREE_CODE (decl) == PARM_DECL
3111 || TREE_CODE (decl) == RESULT_DECL);
3113 bounds = chkp_get_registered_addr_bounds (decl);
3115 if (bounds)
3116 return bounds;
3118 if (dump_file && (dump_flags & TDF_DETAILS))
3120 fprintf (dump_file, "Building bounds for address of decl ");
3121 print_generic_expr (dump_file, decl, 0);
3122 fprintf (dump_file, "\n");
3125 /* Use zero bounds if size is unknown and checks for
3126 unknown sizes are restricted. */
3127 if ((!DECL_SIZE (decl)
3128 || (chkp_variable_size_type (TREE_TYPE (decl))
3129 && (TREE_STATIC (decl)
3130 || DECL_EXTERNAL (decl)
3131 || TREE_PUBLIC (decl))))
3132 && !flag_chkp_incomplete_type)
3133 return chkp_get_zero_bounds ();
3135 if (flag_chkp_use_static_bounds
3136 && TREE_CODE (decl) == VAR_DECL
3137 && (TREE_STATIC (decl)
3138 || DECL_EXTERNAL (decl)
3139 || TREE_PUBLIC (decl))
3140 && !DECL_THREAD_LOCAL_P (decl))
3142 tree bnd_var = chkp_make_static_bounds (decl);
3143 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
3144 gimple *stmt;
3146 bounds = chkp_get_tmp_reg (NULL);
3147 stmt = gimple_build_assign (bounds, bnd_var);
3148 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3150 else if (!DECL_SIZE (decl)
3151 || (chkp_variable_size_type (TREE_TYPE (decl))
3152 && (TREE_STATIC (decl)
3153 || DECL_EXTERNAL (decl)
3154 || TREE_PUBLIC (decl))))
3156 gcc_assert (TREE_CODE (decl) == VAR_DECL);
3157 bounds = chkp_generate_extern_var_bounds (decl);
3159 else
3161 tree lb = chkp_build_addr_expr (decl);
3162 bounds = chkp_make_bounds (lb, DECL_SIZE_UNIT (decl), NULL, false);
3165 return bounds;
3168 /* Compute and return bounds for constant string. */
3169 static tree
3170 chkp_get_bounds_for_string_cst (tree cst)
3172 tree bounds;
3173 tree lb;
3174 tree size;
3176 gcc_assert (TREE_CODE (cst) == STRING_CST);
3178 bounds = chkp_get_registered_bounds (cst);
3180 if (bounds)
3181 return bounds;
3183 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
3184 || flag_chkp_use_static_const_bounds > 0)
3186 tree bnd_var = chkp_make_static_bounds (cst);
3187 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
3188 gimple *stmt;
3190 bounds = chkp_get_tmp_reg (NULL);
3191 stmt = gimple_build_assign (bounds, bnd_var);
3192 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3194 else
3196 lb = chkp_build_addr_expr (cst);
3197 size = build_int_cst (chkp_uintptr_type, TREE_STRING_LENGTH (cst));
3198 bounds = chkp_make_bounds (lb, size, NULL, false);
3201 bounds = chkp_maybe_copy_and_register_bounds (cst, bounds);
3203 return bounds;
3206 /* Generate code to instersect bounds BOUNDS1 and BOUNDS2 and
3207 return the result. if ITER is not NULL then Code is inserted
3208 before position pointed by ITER. Otherwise code is added to
3209 entry block. */
3210 static tree
3211 chkp_intersect_bounds (tree bounds1, tree bounds2, gimple_stmt_iterator *iter)
3213 if (!bounds1 || bounds1 == chkp_get_zero_bounds ())
3214 return bounds2 ? bounds2 : bounds1;
3215 else if (!bounds2 || bounds2 == chkp_get_zero_bounds ())
3216 return bounds1;
3217 else
3219 gimple_seq seq;
3220 gimple *stmt;
3221 tree bounds;
3223 seq = NULL;
3225 stmt = gimple_build_call (chkp_intersect_fndecl, 2, bounds1, bounds2);
3226 chkp_mark_stmt (stmt);
3228 bounds = chkp_get_tmp_reg (stmt);
3229 gimple_call_set_lhs (stmt, bounds);
3231 gimple_seq_add_stmt (&seq, stmt);
3233 /* We are probably doing narrowing for constant expression.
3234 In such case iter may be undefined. */
3235 if (!iter)
3237 gimple_stmt_iterator gsi = gsi_last_bb (chkp_get_entry_block ());
3238 iter = &gsi;
3239 gsi_insert_seq_after (iter, seq, GSI_SAME_STMT);
3241 else
3242 gsi_insert_seq_before (iter, seq, GSI_SAME_STMT);
3244 if (dump_file && (dump_flags & TDF_DETAILS))
3246 fprintf (dump_file, "Bounds intersection: ");
3247 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
3248 fprintf (dump_file, " inserted before statement: ");
3249 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0,
3250 TDF_VOPS|TDF_MEMSYMS);
3253 return bounds;
3257 /* Return 1 if we are allowed to narrow bounds for addressed FIELD
3258 and 0 othersize. */
3259 static bool
3260 chkp_may_narrow_to_field (tree field)
3262 return DECL_SIZE (field) && TREE_CODE (DECL_SIZE (field)) == INTEGER_CST
3263 && tree_to_uhwi (DECL_SIZE (field)) != 0
3264 && (!DECL_FIELD_OFFSET (field)
3265 || TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST)
3266 && (!DECL_FIELD_BIT_OFFSET (field)
3267 || TREE_CODE (DECL_FIELD_BIT_OFFSET (field)) == INTEGER_CST)
3268 && !lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
3269 && !chkp_variable_size_type (TREE_TYPE (field));
3272 /* Return 1 if bounds for FIELD should be narrowed to
3273 field's own size. */
3274 static bool
3275 chkp_narrow_bounds_for_field (tree field)
3277 HOST_WIDE_INT offs;
3278 HOST_WIDE_INT bit_offs;
3280 if (!chkp_may_narrow_to_field (field))
3281 return false;
3283 /* Accesse to compiler generated fields should not cause
3284 bounds narrowing. */
3285 if (DECL_ARTIFICIAL (field))
3286 return false;
3288 offs = tree_to_uhwi (DECL_FIELD_OFFSET (field));
3289 bit_offs = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
3291 return (flag_chkp_narrow_bounds
3292 && (flag_chkp_first_field_has_own_bounds
3293 || offs
3294 || bit_offs));
3297 /* Perform narrowing for BOUNDS using bounds computed for field
3298 access COMPONENT. ITER meaning is the same as for
3299 chkp_intersect_bounds. */
3300 static tree
3301 chkp_narrow_bounds_to_field (tree bounds, tree component,
3302 gimple_stmt_iterator *iter)
3304 tree field = TREE_OPERAND (component, 1);
3305 tree size = DECL_SIZE_UNIT (field);
3306 tree field_ptr = chkp_build_addr_expr (component);
3307 tree field_bounds;
3309 field_bounds = chkp_make_bounds (field_ptr, size, iter, false);
3311 return chkp_intersect_bounds (field_bounds, bounds, iter);
3314 /* Parse field or array access NODE.
3316 PTR ouput parameter holds a pointer to the outermost
3317 object.
3319 BITFIELD output parameter is set to 1 if bitfield is
3320 accessed and to 0 otherwise. If it is 1 then ELT holds
3321 outer component for accessed bit field.
3323 SAFE outer parameter is set to 1 if access is safe and
3324 checks are not required.
3326 BOUNDS outer parameter holds bounds to be used to check
3327 access (may be NULL).
3329 If INNERMOST_BOUNDS is 1 then try to narrow bounds to the
3330 innermost accessed component. */
3331 static void
3332 chkp_parse_array_and_component_ref (tree node, tree *ptr,
3333 tree *elt, bool *safe,
3334 bool *bitfield,
3335 tree *bounds,
3336 gimple_stmt_iterator *iter,
3337 bool innermost_bounds)
3339 tree comp_to_narrow = NULL_TREE;
3340 tree last_comp = NULL_TREE;
3341 bool array_ref_found = false;
3342 tree *nodes;
3343 tree var;
3344 int len;
3345 int i;
3347 /* Compute tree height for expression. */
3348 var = node;
3349 len = 1;
3350 while (TREE_CODE (var) == COMPONENT_REF
3351 || TREE_CODE (var) == ARRAY_REF
3352 || TREE_CODE (var) == VIEW_CONVERT_EXPR)
3354 var = TREE_OPERAND (var, 0);
3355 len++;
3358 gcc_assert (len > 1);
3360 /* It is more convenient for us to scan left-to-right,
3361 so walk tree again and put all node to nodes vector
3362 in reversed order. */
3363 nodes = XALLOCAVEC (tree, len);
3364 nodes[len - 1] = node;
3365 for (i = len - 2; i >= 0; i--)
3366 nodes[i] = TREE_OPERAND (nodes[i + 1], 0);
3368 if (bounds)
3369 *bounds = NULL;
3370 *safe = true;
3371 *bitfield = (TREE_CODE (node) == COMPONENT_REF
3372 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (node, 1)));
3373 /* To get bitfield address we will need outer elemnt. */
3374 if (*bitfield)
3375 *elt = nodes[len - 2];
3376 else
3377 *elt = NULL_TREE;
3379 /* If we have indirection in expression then compute
3380 outermost structure bounds. Computed bounds may be
3381 narrowed later. */
3382 if (TREE_CODE (nodes[0]) == MEM_REF || INDIRECT_REF_P (nodes[0]))
3384 *safe = false;
3385 *ptr = TREE_OPERAND (nodes[0], 0);
3386 if (bounds)
3387 *bounds = chkp_find_bounds (*ptr, iter);
3389 else
3391 gcc_assert (TREE_CODE (var) == VAR_DECL
3392 || TREE_CODE (var) == PARM_DECL
3393 || TREE_CODE (var) == RESULT_DECL
3394 || TREE_CODE (var) == STRING_CST
3395 || TREE_CODE (var) == SSA_NAME);
3397 *ptr = chkp_build_addr_expr (var);
3400 /* In this loop we are trying to find a field access
3401 requiring narrowing. There are two simple rules
3402 for search:
3403 1. Leftmost array_ref is chosen if any.
3404 2. Rightmost suitable component_ref is chosen if innermost
3405 bounds are required and no array_ref exists. */
3406 for (i = 1; i < len; i++)
3408 var = nodes[i];
3410 if (TREE_CODE (var) == ARRAY_REF)
3412 *safe = false;
3413 array_ref_found = true;
3414 if (flag_chkp_narrow_bounds
3415 && !flag_chkp_narrow_to_innermost_arrray
3416 && (!last_comp
3417 || chkp_may_narrow_to_field (TREE_OPERAND (last_comp, 1))))
3419 comp_to_narrow = last_comp;
3420 break;
3423 else if (TREE_CODE (var) == COMPONENT_REF)
3425 tree field = TREE_OPERAND (var, 1);
3427 if (innermost_bounds
3428 && !array_ref_found
3429 && chkp_narrow_bounds_for_field (field))
3430 comp_to_narrow = var;
3431 last_comp = var;
3433 if (flag_chkp_narrow_bounds
3434 && flag_chkp_narrow_to_innermost_arrray
3435 && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
3437 if (bounds)
3438 *bounds = chkp_narrow_bounds_to_field (*bounds, var, iter);
3439 comp_to_narrow = NULL;
3442 else if (TREE_CODE (var) == VIEW_CONVERT_EXPR)
3443 /* Nothing to do for it. */
3445 else
3446 gcc_unreachable ();
3449 if (comp_to_narrow && DECL_SIZE (TREE_OPERAND (comp_to_narrow, 1)) && bounds)
3450 *bounds = chkp_narrow_bounds_to_field (*bounds, comp_to_narrow, iter);
3452 if (innermost_bounds && bounds && !*bounds)
3453 *bounds = chkp_find_bounds (*ptr, iter);
3456 /* Compute and return bounds for address of OBJ. */
3457 static tree
3458 chkp_make_addressed_object_bounds (tree obj, gimple_stmt_iterator *iter)
3460 tree bounds = chkp_get_registered_addr_bounds (obj);
3462 if (bounds)
3463 return bounds;
3465 switch (TREE_CODE (obj))
3467 case VAR_DECL:
3468 case PARM_DECL:
3469 case RESULT_DECL:
3470 bounds = chkp_get_bounds_for_decl_addr (obj);
3471 break;
3473 case STRING_CST:
3474 bounds = chkp_get_bounds_for_string_cst (obj);
3475 break;
3477 case ARRAY_REF:
3478 case COMPONENT_REF:
3480 tree elt;
3481 tree ptr;
3482 bool safe;
3483 bool bitfield;
3485 chkp_parse_array_and_component_ref (obj, &ptr, &elt, &safe,
3486 &bitfield, &bounds, iter, true);
3488 gcc_assert (bounds);
3490 break;
3492 case FUNCTION_DECL:
3493 case LABEL_DECL:
3494 bounds = chkp_get_zero_bounds ();
3495 break;
3497 case MEM_REF:
3498 bounds = chkp_find_bounds (TREE_OPERAND (obj, 0), iter);
3499 break;
3501 case REALPART_EXPR:
3502 case IMAGPART_EXPR:
3503 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (obj, 0), iter);
3504 break;
3506 default:
3507 if (dump_file && (dump_flags & TDF_DETAILS))
3509 fprintf (dump_file, "chkp_make_addressed_object_bounds: "
3510 "unexpected object of type %s\n",
3511 get_tree_code_name (TREE_CODE (obj)));
3512 print_node (dump_file, "", obj, 0);
3514 internal_error ("chkp_make_addressed_object_bounds: "
3515 "Unexpected tree code %s",
3516 get_tree_code_name (TREE_CODE (obj)));
3519 chkp_register_addr_bounds (obj, bounds);
3521 return bounds;
3524 /* Compute bounds for pointer PTR loaded from PTR_SRC. Generate statements
3525 to compute bounds if required. Computed bounds should be available at
3526 position pointed by ITER.
3528 If PTR_SRC is NULL_TREE then pointer definition is identified.
3530 If PTR_SRC is not NULL_TREE then ITER points to statements which loads
3531 PTR. If PTR is a any memory reference then ITER points to a statement
3532 after which bndldx will be inserterd. In both cases ITER will be updated
3533 to point to the inserted bndldx statement. */
3535 static tree
3536 chkp_find_bounds_1 (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3538 tree addr = NULL_TREE;
3539 tree bounds = NULL_TREE;
3541 if (!ptr_src)
3542 ptr_src = ptr;
3544 bounds = chkp_get_registered_bounds (ptr_src);
3546 if (bounds)
3547 return bounds;
3549 switch (TREE_CODE (ptr_src))
3551 case MEM_REF:
3552 case VAR_DECL:
3553 if (BOUNDED_P (ptr_src))
3554 if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
3555 bounds = chkp_get_zero_bounds ();
3556 else
3558 addr = chkp_build_addr_expr (ptr_src);
3559 bounds = chkp_build_bndldx (addr, ptr, iter);
3561 else
3562 bounds = chkp_get_nonpointer_load_bounds ();
3563 break;
3565 case ARRAY_REF:
3566 case COMPONENT_REF:
3567 addr = get_base_address (ptr_src);
3568 if (DECL_P (addr)
3569 || TREE_CODE (addr) == MEM_REF
3570 || TREE_CODE (addr) == TARGET_MEM_REF)
3572 if (BOUNDED_P (ptr_src))
3573 if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
3574 bounds = chkp_get_zero_bounds ();
3575 else
3577 addr = chkp_build_addr_expr (ptr_src);
3578 bounds = chkp_build_bndldx (addr, ptr, iter);
3580 else
3581 bounds = chkp_get_nonpointer_load_bounds ();
3583 else
3585 gcc_assert (TREE_CODE (addr) == SSA_NAME);
3586 bounds = chkp_find_bounds (addr, iter);
3588 break;
3590 case PARM_DECL:
3591 gcc_unreachable ();
3592 bounds = chkp_get_bound_for_parm (ptr_src);
3593 break;
3595 case TARGET_MEM_REF:
3596 addr = chkp_build_addr_expr (ptr_src);
3597 bounds = chkp_build_bndldx (addr, ptr, iter);
3598 break;
3600 case SSA_NAME:
3601 bounds = chkp_get_registered_bounds (ptr_src);
3602 if (!bounds)
3604 gimple *def_stmt = SSA_NAME_DEF_STMT (ptr_src);
3605 gphi_iterator phi_iter;
3607 bounds = chkp_get_bounds_by_definition (ptr_src, def_stmt, &phi_iter);
3609 gcc_assert (bounds);
3611 if (gphi *def_phi = dyn_cast <gphi *> (def_stmt))
3613 unsigned i;
3615 for (i = 0; i < gimple_phi_num_args (def_phi); i++)
3617 tree arg = gimple_phi_arg_def (def_phi, i);
3618 tree arg_bnd;
3619 gphi *phi_bnd;
3621 arg_bnd = chkp_find_bounds (arg, NULL);
3623 /* chkp_get_bounds_by_definition created new phi
3624 statement and phi_iter points to it.
3626 Previous call to chkp_find_bounds could create
3627 new basic block and therefore change phi statement
3628 phi_iter points to. */
3629 phi_bnd = phi_iter.phi ();
3631 add_phi_arg (phi_bnd, arg_bnd,
3632 gimple_phi_arg_edge (def_phi, i),
3633 UNKNOWN_LOCATION);
3636 /* If all bound phi nodes have their arg computed
3637 then we may finish its computation. See
3638 chkp_finish_incomplete_bounds for more details. */
3639 if (chkp_may_finish_incomplete_bounds ())
3640 chkp_finish_incomplete_bounds ();
3643 gcc_assert (bounds == chkp_get_registered_bounds (ptr_src)
3644 || chkp_incomplete_bounds (bounds));
3646 break;
3648 case ADDR_EXPR:
3649 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (ptr_src, 0), iter);
3650 break;
3652 case INTEGER_CST:
3653 if (integer_zerop (ptr_src))
3654 bounds = chkp_get_none_bounds ();
3655 else
3656 bounds = chkp_get_invalid_op_bounds ();
3657 break;
3659 default:
3660 if (dump_file && (dump_flags & TDF_DETAILS))
3662 fprintf (dump_file, "chkp_find_bounds: unexpected ptr of type %s\n",
3663 get_tree_code_name (TREE_CODE (ptr_src)));
3664 print_node (dump_file, "", ptr_src, 0);
3666 internal_error ("chkp_find_bounds: Unexpected tree code %s",
3667 get_tree_code_name (TREE_CODE (ptr_src)));
3670 if (!bounds)
3672 if (dump_file && (dump_flags & TDF_DETAILS))
3674 fprintf (stderr, "chkp_find_bounds: cannot find bounds for pointer\n");
3675 print_node (dump_file, "", ptr_src, 0);
3677 internal_error ("chkp_find_bounds: Cannot find bounds for pointer");
3680 return bounds;
3683 /* Normal case for bounds search without forced narrowing. */
3684 static tree
3685 chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter)
3687 return chkp_find_bounds_1 (ptr, NULL_TREE, iter);
3690 /* Search bounds for pointer PTR loaded from PTR_SRC
3691 by statement *ITER points to. */
3692 static tree
3693 chkp_find_bounds_loaded (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3695 return chkp_find_bounds_1 (ptr, ptr_src, iter);
3698 /* Helper function which checks type of RHS and finds all pointers in
3699 it. For each found pointer we build it's accesses in LHS and RHS
3700 objects and then call HANDLER for them. Function is used to copy
3701 or initilize bounds for copied object. */
3702 static void
3703 chkp_walk_pointer_assignments (tree lhs, tree rhs, void *arg,
3704 assign_handler handler)
3706 tree type = TREE_TYPE (lhs);
3708 /* We have nothing to do with clobbers. */
3709 if (TREE_CLOBBER_P (rhs))
3710 return;
3712 if (BOUNDED_TYPE_P (type))
3713 handler (lhs, rhs, arg);
3714 else if (RECORD_OR_UNION_TYPE_P (type))
3716 tree field;
3718 if (TREE_CODE (rhs) == CONSTRUCTOR)
3720 unsigned HOST_WIDE_INT cnt;
3721 tree val;
3723 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, field, val)
3725 if (chkp_type_has_pointer (TREE_TYPE (field)))
3727 tree lhs_field = chkp_build_component_ref (lhs, field);
3728 chkp_walk_pointer_assignments (lhs_field, val, arg, handler);
3732 else
3733 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3734 if (TREE_CODE (field) == FIELD_DECL
3735 && chkp_type_has_pointer (TREE_TYPE (field)))
3737 tree rhs_field = chkp_build_component_ref (rhs, field);
3738 tree lhs_field = chkp_build_component_ref (lhs, field);
3739 chkp_walk_pointer_assignments (lhs_field, rhs_field, arg, handler);
3742 else if (TREE_CODE (type) == ARRAY_TYPE)
3744 unsigned HOST_WIDE_INT cur = 0;
3745 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3746 tree etype = TREE_TYPE (type);
3747 tree esize = TYPE_SIZE (etype);
3749 if (TREE_CODE (rhs) == CONSTRUCTOR)
3751 unsigned HOST_WIDE_INT cnt;
3752 tree purp, val, lhs_elem;
3754 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, purp, val)
3756 if (purp && TREE_CODE (purp) == RANGE_EXPR)
3758 tree lo_index = TREE_OPERAND (purp, 0);
3759 tree hi_index = TREE_OPERAND (purp, 1);
3761 for (cur = (unsigned)tree_to_uhwi (lo_index);
3762 cur <= (unsigned)tree_to_uhwi (hi_index);
3763 cur++)
3765 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3766 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3769 else
3771 if (purp)
3773 gcc_assert (TREE_CODE (purp) == INTEGER_CST);
3774 cur = tree_to_uhwi (purp);
3777 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur++);
3779 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3783 /* Copy array only when size is known. */
3784 else if (maxval && !integer_minus_onep (maxval))
3785 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
3787 tree lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3788 tree rhs_elem = chkp_build_array_ref (rhs, etype, esize, cur);
3789 chkp_walk_pointer_assignments (lhs_elem, rhs_elem, arg, handler);
3792 else
3793 internal_error("chkp_walk_pointer_assignments: unexpected RHS type: %s",
3794 get_tree_code_name (TREE_CODE (type)));
3797 /* Add code to copy bounds for assignment of RHS to LHS.
3798 ARG is an iterator pointing ne code position. */
3799 static void
3800 chkp_copy_bounds_for_elem (tree lhs, tree rhs, void *arg)
3802 gimple_stmt_iterator *iter = (gimple_stmt_iterator *)arg;
3803 tree bounds = chkp_find_bounds (rhs, iter);
3804 tree addr = chkp_build_addr_expr(lhs);
3806 chkp_build_bndstx (addr, rhs, bounds, iter);
3809 /* Emit static bound initilizers and size vars. */
3810 void
3811 chkp_finish_file (void)
3813 struct varpool_node *node;
3814 struct chkp_ctor_stmt_list stmts;
3816 if (seen_error ())
3817 return;
3819 /* Iterate through varpool and generate bounds initialization
3820 constructors for all statically initialized pointers. */
3821 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3822 stmts.stmts = NULL;
3823 FOR_EACH_VARIABLE (node)
3824 /* Check that var is actually emitted and we need and may initialize
3825 its bounds. */
3826 if (node->need_bounds_init
3827 && !POINTER_BOUNDS_P (node->decl)
3828 && DECL_RTL (node->decl)
3829 && MEM_P (DECL_RTL (node->decl))
3830 && TREE_ASM_WRITTEN (node->decl))
3832 chkp_walk_pointer_assignments (node->decl,
3833 DECL_INITIAL (node->decl),
3834 &stmts,
3835 chkp_add_modification_to_stmt_list);
3837 if (stmts.avail <= 0)
3839 cgraph_build_static_cdtor ('P', stmts.stmts,
3840 MAX_RESERVED_INIT_PRIORITY + 3);
3841 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3842 stmts.stmts = NULL;
3846 if (stmts.stmts)
3847 cgraph_build_static_cdtor ('P', stmts.stmts,
3848 MAX_RESERVED_INIT_PRIORITY + 3);
3850 /* Iterate through varpool and generate bounds initialization
3851 constructors for all static bounds vars. */
3852 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3853 stmts.stmts = NULL;
3854 FOR_EACH_VARIABLE (node)
3855 if (node->need_bounds_init
3856 && POINTER_BOUNDS_P (node->decl)
3857 && TREE_ASM_WRITTEN (node->decl))
3859 tree bnd = node->decl;
3860 tree var;
3862 gcc_assert (DECL_INITIAL (bnd)
3863 && TREE_CODE (DECL_INITIAL (bnd)) == ADDR_EXPR);
3865 var = TREE_OPERAND (DECL_INITIAL (bnd), 0);
3866 chkp_output_static_bounds (bnd, var, &stmts);
3869 if (stmts.stmts)
3870 cgraph_build_static_cdtor ('B', stmts.stmts,
3871 MAX_RESERVED_INIT_PRIORITY + 2);
3873 delete chkp_static_var_bounds;
3874 delete chkp_bounds_map;
3877 /* An instrumentation function which is called for each statement
3878 having memory access we want to instrument. It inserts check
3879 code and bounds copy code.
3881 ITER points to statement to instrument.
3883 NODE holds memory access in statement to check.
3885 LOC holds the location information for statement.
3887 DIRFLAGS determines whether access is read or write.
3889 ACCESS_OFFS should be added to address used in NODE
3890 before check.
3892 ACCESS_SIZE holds size of checked access.
3894 SAFE indicates if NODE access is safe and should not be
3895 checked. */
3896 static void
3897 chkp_process_stmt (gimple_stmt_iterator *iter, tree node,
3898 location_t loc, tree dirflag,
3899 tree access_offs, tree access_size,
3900 bool safe)
3902 tree node_type = TREE_TYPE (node);
3903 tree size = access_size ? access_size : TYPE_SIZE_UNIT (node_type);
3904 tree addr_first = NULL_TREE; /* address of the first accessed byte */
3905 tree addr_last = NULL_TREE; /* address of the last accessed byte */
3906 tree ptr = NULL_TREE; /* a pointer used for dereference */
3907 tree bounds = NULL_TREE;
3909 /* We do not need instrumentation for clobbers. */
3910 if (dirflag == integer_one_node
3911 && gimple_code (gsi_stmt (*iter)) == GIMPLE_ASSIGN
3912 && TREE_CLOBBER_P (gimple_assign_rhs1 (gsi_stmt (*iter))))
3913 return;
3915 switch (TREE_CODE (node))
3917 case ARRAY_REF:
3918 case COMPONENT_REF:
3920 bool bitfield;
3921 tree elt;
3923 if (safe)
3925 /* We are not going to generate any checks, so do not
3926 generate bounds as well. */
3927 addr_first = chkp_build_addr_expr (node);
3928 break;
3931 chkp_parse_array_and_component_ref (node, &ptr, &elt, &safe,
3932 &bitfield, &bounds, iter, false);
3934 /* Break if there is no dereference and operation is safe. */
3936 if (bitfield)
3938 tree field = TREE_OPERAND (node, 1);
3940 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
3941 size = DECL_SIZE_UNIT (field);
3943 if (elt)
3944 elt = chkp_build_addr_expr (elt);
3945 addr_first = fold_convert_loc (loc, ptr_type_node, elt ? elt : ptr);
3946 addr_first = fold_build_pointer_plus_loc (loc,
3947 addr_first,
3948 byte_position (field));
3950 else
3951 addr_first = chkp_build_addr_expr (node);
3953 break;
3955 case INDIRECT_REF:
3956 ptr = TREE_OPERAND (node, 0);
3957 addr_first = ptr;
3958 break;
3960 case MEM_REF:
3961 ptr = TREE_OPERAND (node, 0);
3962 addr_first = chkp_build_addr_expr (node);
3963 break;
3965 case TARGET_MEM_REF:
3966 ptr = TMR_BASE (node);
3967 addr_first = chkp_build_addr_expr (node);
3968 break;
3970 case ARRAY_RANGE_REF:
3971 printf("ARRAY_RANGE_REF\n");
3972 debug_gimple_stmt(gsi_stmt(*iter));
3973 debug_tree(node);
3974 gcc_unreachable ();
3975 break;
3977 case BIT_FIELD_REF:
3979 tree offs, rem, bpu;
3981 gcc_assert (!access_offs);
3982 gcc_assert (!access_size);
3984 bpu = fold_convert (size_type_node, bitsize_int (BITS_PER_UNIT));
3985 offs = fold_convert (size_type_node, TREE_OPERAND (node, 2));
3986 rem = size_binop_loc (loc, TRUNC_MOD_EXPR, offs, bpu);
3987 offs = size_binop_loc (loc, TRUNC_DIV_EXPR, offs, bpu);
3989 size = fold_convert (size_type_node, TREE_OPERAND (node, 1));
3990 size = size_binop_loc (loc, PLUS_EXPR, size, rem);
3991 size = size_binop_loc (loc, CEIL_DIV_EXPR, size, bpu);
3992 size = fold_convert (size_type_node, size);
3994 chkp_process_stmt (iter, TREE_OPERAND (node, 0), loc,
3995 dirflag, offs, size, safe);
3996 return;
3998 break;
4000 case VAR_DECL:
4001 case RESULT_DECL:
4002 case PARM_DECL:
4003 if (dirflag != integer_one_node
4004 || DECL_REGISTER (node))
4005 return;
4007 safe = true;
4008 addr_first = chkp_build_addr_expr (node);
4009 break;
4011 default:
4012 return;
4015 /* If addr_last was not computed then use (addr_first + size - 1)
4016 expression to compute it. */
4017 if (!addr_last)
4019 addr_last = fold_build_pointer_plus_loc (loc, addr_first, size);
4020 addr_last = fold_build_pointer_plus_hwi_loc (loc, addr_last, -1);
4023 /* Shift both first_addr and last_addr by access_offs if specified. */
4024 if (access_offs)
4026 addr_first = fold_build_pointer_plus_loc (loc, addr_first, access_offs);
4027 addr_last = fold_build_pointer_plus_loc (loc, addr_last, access_offs);
4030 /* Generate bndcl/bndcu checks if memory access is not safe. */
4031 if (!safe)
4033 gimple_stmt_iterator stmt_iter = *iter;
4035 if (!bounds)
4036 bounds = chkp_find_bounds (ptr, iter);
4038 chkp_check_mem_access (addr_first, addr_last, bounds,
4039 stmt_iter, loc, dirflag);
4042 /* We need to store bounds in case pointer is stored. */
4043 if (dirflag == integer_one_node
4044 && chkp_type_has_pointer (node_type)
4045 && flag_chkp_store_bounds)
4047 gimple *stmt = gsi_stmt (*iter);
4048 tree rhs1 = gimple_assign_rhs1 (stmt);
4049 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4051 if (get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS)
4052 chkp_walk_pointer_assignments (node, rhs1, iter,
4053 chkp_copy_bounds_for_elem);
4054 else
4056 bounds = chkp_compute_bounds_for_assignment (NULL_TREE, stmt);
4057 chkp_build_bndstx (addr_first, rhs1, bounds, iter);
4062 /* Add code to copy bounds for all pointers copied
4063 in ASSIGN created during inline of EDGE. */
4064 void
4065 chkp_copy_bounds_for_assign (gimple *assign, struct cgraph_edge *edge)
4067 tree lhs = gimple_assign_lhs (assign);
4068 tree rhs = gimple_assign_rhs1 (assign);
4069 gimple_stmt_iterator iter = gsi_for_stmt (assign);
4071 if (!flag_chkp_store_bounds)
4072 return;
4074 chkp_walk_pointer_assignments (lhs, rhs, &iter, chkp_copy_bounds_for_elem);
4076 /* We should create edges for all created calls to bndldx and bndstx. */
4077 while (gsi_stmt (iter) != assign)
4079 gimple *stmt = gsi_stmt (iter);
4080 if (gimple_code (stmt) == GIMPLE_CALL)
4082 tree fndecl = gimple_call_fndecl (stmt);
4083 struct cgraph_node *callee = cgraph_node::get_create (fndecl);
4084 struct cgraph_edge *new_edge;
4086 gcc_assert (fndecl == chkp_bndstx_fndecl
4087 || fndecl == chkp_bndldx_fndecl
4088 || fndecl == chkp_ret_bnd_fndecl);
4090 new_edge = edge->caller->create_edge (callee,
4091 as_a <gcall *> (stmt),
4092 edge->count,
4093 edge->frequency);
4094 new_edge->frequency = compute_call_stmt_bb_frequency
4095 (edge->caller->decl, gimple_bb (stmt));
4097 gsi_prev (&iter);
4101 /* Some code transformation made during instrumentation pass
4102 may put code into inconsistent state. Here we find and fix
4103 such flaws. */
4104 void
4105 chkp_fix_cfg ()
4107 basic_block bb;
4108 gimple_stmt_iterator i;
4110 /* We could insert some code right after stmt which ends bb.
4111 We wanted to put this code on fallthru edge but did not
4112 add new edges from the beginning because it may cause new
4113 phi node creation which may be incorrect due to incomplete
4114 bound phi nodes. */
4115 FOR_ALL_BB_FN (bb, cfun)
4116 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
4118 gimple *stmt = gsi_stmt (i);
4119 gimple_stmt_iterator next = i;
4121 gsi_next (&next);
4123 if (stmt_ends_bb_p (stmt)
4124 && !gsi_end_p (next))
4126 edge fall = find_fallthru_edge (bb->succs);
4127 basic_block dest = NULL;
4128 int flags = 0;
4130 gcc_assert (fall);
4132 /* We cannot split abnormal edge. Therefore we
4133 store its params, make it regular and then
4134 rebuild abnormal edge after split. */
4135 if (fall->flags & EDGE_ABNORMAL)
4137 flags = fall->flags & ~EDGE_FALLTHRU;
4138 dest = fall->dest;
4140 fall->flags &= ~EDGE_COMPLEX;
4143 while (!gsi_end_p (next))
4145 gimple *next_stmt = gsi_stmt (next);
4146 gsi_remove (&next, false);
4147 gsi_insert_on_edge (fall, next_stmt);
4150 gsi_commit_edge_inserts ();
4152 /* Re-create abnormal edge. */
4153 if (dest)
4154 make_edge (bb, dest, flags);
4159 /* Walker callback for chkp_replace_function_pointers. Replaces
4160 function pointer in the specified operand with pointer to the
4161 instrumented function version. */
4162 static tree
4163 chkp_replace_function_pointer (tree *op, int *walk_subtrees,
4164 void *data ATTRIBUTE_UNUSED)
4166 if (TREE_CODE (*op) == FUNCTION_DECL
4167 && chkp_instrumentable_p (*op)
4168 && (DECL_BUILT_IN_CLASS (*op) == NOT_BUILT_IN
4169 /* For builtins we replace pointers only for selected
4170 function and functions having definitions. */
4171 || (DECL_BUILT_IN_CLASS (*op) == BUILT_IN_NORMAL
4172 && (chkp_instrument_normal_builtin (*op)
4173 || gimple_has_body_p (*op)))))
4175 struct cgraph_node *node = cgraph_node::get_create (*op);
4176 struct cgraph_node *clone = NULL;
4178 if (!node->instrumentation_clone)
4179 clone = chkp_maybe_create_clone (*op);
4181 if (clone)
4182 *op = clone->decl;
4183 *walk_subtrees = 0;
4186 return NULL;
4189 /* This function searches for function pointers in statement
4190 pointed by GSI and replaces them with pointers to instrumented
4191 function versions. */
4192 static void
4193 chkp_replace_function_pointers (gimple_stmt_iterator *gsi)
4195 gimple *stmt = gsi_stmt (*gsi);
4196 /* For calls we want to walk call args only. */
4197 if (gimple_code (stmt) == GIMPLE_CALL)
4199 unsigned i;
4200 for (i = 0; i < gimple_call_num_args (stmt); i++)
4201 walk_tree (gimple_call_arg_ptr (stmt, i),
4202 chkp_replace_function_pointer, NULL, NULL);
4204 else
4205 walk_gimple_stmt (gsi, NULL, chkp_replace_function_pointer, NULL);
4208 /* This function instruments all statements working with memory,
4209 calls and rets.
4211 It also removes excess statements from static initializers. */
4212 static void
4213 chkp_instrument_function (void)
4215 basic_block bb, next;
4216 gimple_stmt_iterator i;
4217 enum gimple_rhs_class grhs_class;
4218 bool safe = lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl));
4220 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
4223 next = bb->next_bb;
4224 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
4226 gimple *s = gsi_stmt (i);
4228 /* Skip statement marked to not be instrumented. */
4229 if (chkp_marked_stmt_p (s))
4231 gsi_next (&i);
4232 continue;
4235 chkp_replace_function_pointers (&i);
4237 switch (gimple_code (s))
4239 case GIMPLE_ASSIGN:
4240 chkp_process_stmt (&i, gimple_assign_lhs (s),
4241 gimple_location (s), integer_one_node,
4242 NULL_TREE, NULL_TREE, safe);
4243 chkp_process_stmt (&i, gimple_assign_rhs1 (s),
4244 gimple_location (s), integer_zero_node,
4245 NULL_TREE, NULL_TREE, safe);
4246 grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s));
4247 if (grhs_class == GIMPLE_BINARY_RHS)
4248 chkp_process_stmt (&i, gimple_assign_rhs2 (s),
4249 gimple_location (s), integer_zero_node,
4250 NULL_TREE, NULL_TREE, safe);
4251 break;
4253 case GIMPLE_RETURN:
4255 greturn *r = as_a <greturn *> (s);
4256 if (gimple_return_retval (r) != NULL_TREE)
4258 chkp_process_stmt (&i, gimple_return_retval (r),
4259 gimple_location (r),
4260 integer_zero_node,
4261 NULL_TREE, NULL_TREE, safe);
4263 /* Additionally we need to add bounds
4264 to return statement. */
4265 chkp_add_bounds_to_ret_stmt (&i);
4268 break;
4270 case GIMPLE_CALL:
4271 chkp_add_bounds_to_call_stmt (&i);
4272 break;
4274 default:
4278 gsi_next (&i);
4280 /* We do not need any actual pointer stores in checker
4281 static initializer. */
4282 if (lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl))
4283 && gimple_code (s) == GIMPLE_ASSIGN
4284 && gimple_store_p (s))
4286 gimple_stmt_iterator del_iter = gsi_for_stmt (s);
4287 gsi_remove (&del_iter, true);
4288 unlink_stmt_vdef (s);
4289 release_defs(s);
4292 bb = next;
4294 while (bb);
4296 /* Some input params may have bounds and be address taken. In this case
4297 we should store incoming bounds into bounds table. */
4298 tree arg;
4299 if (flag_chkp_store_bounds)
4300 for (arg = DECL_ARGUMENTS (cfun->decl); arg; arg = DECL_CHAIN (arg))
4301 if (TREE_ADDRESSABLE (arg))
4303 if (BOUNDED_P (arg))
4305 tree bounds = chkp_get_next_bounds_parm (arg);
4306 tree def_ptr = ssa_default_def (cfun, arg);
4307 gimple_stmt_iterator iter
4308 = gsi_start_bb (chkp_get_entry_block ());
4309 chkp_build_bndstx (chkp_build_addr_expr (arg),
4310 def_ptr ? def_ptr : arg,
4311 bounds, &iter);
4313 /* Skip bounds arg. */
4314 arg = TREE_CHAIN (arg);
4316 else if (chkp_type_has_pointer (TREE_TYPE (arg)))
4318 tree orig_arg = arg;
4319 bitmap slots = BITMAP_ALLOC (NULL);
4320 gimple_stmt_iterator iter
4321 = gsi_start_bb (chkp_get_entry_block ());
4322 bitmap_iterator bi;
4323 unsigned bnd_no;
4325 chkp_find_bound_slots (TREE_TYPE (arg), slots);
4327 EXECUTE_IF_SET_IN_BITMAP (slots, 0, bnd_no, bi)
4329 tree bounds = chkp_get_next_bounds_parm (arg);
4330 HOST_WIDE_INT offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
4331 tree addr = chkp_build_addr_expr (orig_arg);
4332 tree ptr = build2 (MEM_REF, ptr_type_node, addr,
4333 build_int_cst (ptr_type_node, offs));
4334 chkp_build_bndstx (chkp_build_addr_expr (ptr), ptr,
4335 bounds, &iter);
4337 arg = DECL_CHAIN (arg);
4339 BITMAP_FREE (slots);
4344 /* Find init/null/copy_ptr_bounds calls and replace them
4345 with assignments. It should allow better code
4346 optimization. */
4348 static void
4349 chkp_remove_useless_builtins ()
4351 basic_block bb;
4352 gimple_stmt_iterator gsi;
4354 FOR_EACH_BB_FN (bb, cfun)
4356 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4358 gimple *stmt = gsi_stmt (gsi);
4359 tree fndecl;
4360 enum built_in_function fcode;
4362 /* Find builtins returning first arg and replace
4363 them with assignments. */
4364 if (gimple_code (stmt) == GIMPLE_CALL
4365 && (fndecl = gimple_call_fndecl (stmt))
4366 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4367 && (fcode = DECL_FUNCTION_CODE (fndecl))
4368 && (fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
4369 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
4370 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS
4371 || fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS))
4373 tree res = gimple_call_arg (stmt, 0);
4374 update_call_from_tree (&gsi, res);
4375 stmt = gsi_stmt (gsi);
4376 update_stmt (stmt);
4382 /* Initialize pass. */
4383 static void
4384 chkp_init (void)
4386 basic_block bb;
4387 gimple_stmt_iterator i;
4389 in_chkp_pass = true;
4391 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = bb->next_bb)
4392 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
4393 chkp_unmark_stmt (gsi_stmt (i));
4395 chkp_invalid_bounds = new hash_set<tree>;
4396 chkp_completed_bounds_set = new hash_set<tree>;
4397 delete chkp_reg_bounds;
4398 chkp_reg_bounds = new hash_map<tree, tree>;
4399 delete chkp_bound_vars;
4400 chkp_bound_vars = new hash_map<tree, tree>;
4401 chkp_reg_addr_bounds = new hash_map<tree, tree>;
4402 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
4403 delete chkp_bounds_map;
4404 chkp_bounds_map = new hash_map<tree, tree>;
4405 chkp_abnormal_copies = BITMAP_GGC_ALLOC ();
4407 entry_block = NULL;
4408 zero_bounds = NULL_TREE;
4409 none_bounds = NULL_TREE;
4410 incomplete_bounds = integer_zero_node;
4411 tmp_var = NULL_TREE;
4412 size_tmp_var = NULL_TREE;
4414 chkp_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode, true);
4416 /* We create these constant bounds once for each object file.
4417 These symbols go to comdat section and result in single copy
4418 of each one in the final binary. */
4419 chkp_get_zero_bounds_var ();
4420 chkp_get_none_bounds_var ();
4422 calculate_dominance_info (CDI_DOMINATORS);
4423 calculate_dominance_info (CDI_POST_DOMINATORS);
4425 bitmap_obstack_initialize (NULL);
4428 /* Finalize instrumentation pass. */
4429 static void
4430 chkp_fini (void)
4432 in_chkp_pass = false;
4434 delete chkp_invalid_bounds;
4435 delete chkp_completed_bounds_set;
4436 delete chkp_reg_addr_bounds;
4437 delete chkp_incomplete_bounds_map;
4439 free_dominance_info (CDI_DOMINATORS);
4440 free_dominance_info (CDI_POST_DOMINATORS);
4442 bitmap_obstack_release (NULL);
4444 entry_block = NULL;
4445 zero_bounds = NULL_TREE;
4446 none_bounds = NULL_TREE;
4449 /* Main instrumentation pass function. */
4450 static unsigned int
4451 chkp_execute (void)
4453 chkp_init ();
4455 chkp_instrument_function ();
4457 chkp_remove_useless_builtins ();
4459 chkp_function_mark_instrumented (cfun->decl);
4461 chkp_fix_cfg ();
4463 chkp_fini ();
4465 return 0;
4468 /* Instrumentation pass gate. */
4469 static bool
4470 chkp_gate (void)
4472 cgraph_node *node = cgraph_node::get (cfun->decl);
4473 return ((node != NULL
4474 && node->instrumentation_clone)
4475 || lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl)));
4478 namespace {
4480 const pass_data pass_data_chkp =
4482 GIMPLE_PASS, /* type */
4483 "chkp", /* name */
4484 OPTGROUP_NONE, /* optinfo_flags */
4485 TV_NONE, /* tv_id */
4486 PROP_ssa | PROP_cfg, /* properties_required */
4487 0, /* properties_provided */
4488 0, /* properties_destroyed */
4489 0, /* todo_flags_start */
4490 TODO_verify_il
4491 | TODO_update_ssa /* todo_flags_finish */
4494 class pass_chkp : public gimple_opt_pass
4496 public:
4497 pass_chkp (gcc::context *ctxt)
4498 : gimple_opt_pass (pass_data_chkp, ctxt)
4501 /* opt_pass methods: */
4502 virtual opt_pass * clone ()
4504 return new pass_chkp (m_ctxt);
4507 virtual bool gate (function *)
4509 return chkp_gate ();
4512 virtual unsigned int execute (function *)
4514 return chkp_execute ();
4517 }; // class pass_chkp
4519 } // anon namespace
4521 gimple_opt_pass *
4522 make_pass_chkp (gcc::context *ctxt)
4524 return new pass_chkp (ctxt);
4527 #include "gt-tree-chkp.h"