[RTL-ifcvt] PR rtl-optimization/68506: Fix emitting order of insns in IF-THEN-JOIN...
[official-gcc.git] / gcc / tree-chkp.c
blob8b6381f7bb96538f2b5317cacb87d49e123bc71a
1 /* Pointer Bounds Checker insrumentation pass.
2 Copyright (C) 2014-2015 Free Software Foundation, Inc.
3 Contributed by Ilya Enkovich (ilya.enkovich@intel.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "diagnostic.h"
34 #include "fold-const.h"
35 #include "stor-layout.h"
36 #include "varasm.h"
37 #include "tree-iterator.h"
38 #include "tree-cfg.h"
39 #include "langhooks.h"
40 #include "tree-ssa-address.h"
41 #include "tree-ssa-loop-niter.h"
42 #include "gimple-pretty-print.h"
43 #include "gimple-iterator.h"
44 #include "gimplify.h"
45 #include "gimplify-me.h"
46 #include "print-tree.h"
47 #include "calls.h"
48 #include "expr.h"
49 #include "tree-ssa-propagate.h"
50 #include "tree-chkp.h"
51 #include "gimple-walk.h"
52 #include "tree-dfa.h"
53 #include "ipa-chkp.h"
54 #include "params.h"
56 /* Pointer Bounds Checker instruments code with memory checks to find
57 out-of-bounds memory accesses. Checks are performed by computing
58 bounds for each pointer and then comparing address of accessed
59 memory before pointer dereferencing.
61 1. Function clones.
63 See ipa-chkp.c.
65 2. Instrumentation.
67 There are few things to instrument:
69 a) Memory accesses - add checker calls to check address of accessed memory
70 against bounds of dereferenced pointer. Obviously safe memory
71 accesses like static variable access does not have to be instrumented
72 with checks.
74 Example:
76 val_2 = *p_1;
78 with 4 bytes access is transformed into:
80 __builtin___chkp_bndcl (__bound_tmp.1_3, p_1);
81 D.1_4 = p_1 + 3;
82 __builtin___chkp_bndcu (__bound_tmp.1_3, D.1_4);
83 val_2 = *p_1;
85 where __bound_tmp.1_3 are bounds computed for pointer p_1,
86 __builtin___chkp_bndcl is a lower bound check and
87 __builtin___chkp_bndcu is an upper bound check.
89 b) Pointer stores.
91 When pointer is stored in memory we need to store its bounds. To
92 achieve compatibility of instrumented code with regular codes
93 we have to keep data layout and store bounds in special bound tables
94 via special checker call. Implementation of bounds table may vary for
95 different platforms. It has to associate pointer value and its
96 location (it is required because we may have two equal pointers
97 with different bounds stored in different places) with bounds.
98 Another checker builtin allows to get bounds for specified pointer
99 loaded from specified location.
101 Example:
103 buf1[i_1] = &buf2;
105 is transformed into:
107 buf1[i_1] = &buf2;
108 D.1_2 = &buf1[i_1];
109 __builtin___chkp_bndstx (D.1_2, &buf2, __bound_tmp.1_2);
111 where __bound_tmp.1_2 are bounds of &buf2.
113 c) Static initialization.
115 The special case of pointer store is static pointer initialization.
116 Bounds initialization is performed in a few steps:
117 - register all static initializations in front-end using
118 chkp_register_var_initializer
119 - when file compilation finishes we create functions with special
120 attribute 'chkp ctor' and put explicit initialization code
121 (assignments) for all statically initialized pointers.
122 - when checker constructor is compiled checker pass adds required
123 bounds initialization for all statically initialized pointers
124 - since we do not actually need excess pointers initialization
125 in checker constructor we remove such assignments from them
127 d) Calls.
129 For each call in the code we add additional arguments to pass
130 bounds for pointer arguments. We determine type of call arguments
131 using arguments list from function declaration; if function
132 declaration is not available we use function type; otherwise
133 (e.g. for unnamed arguments) we use type of passed value. Function
134 declaration/type is replaced with the instrumented one.
136 Example:
138 val_1 = foo (&buf1, &buf2, &buf1, 0);
140 is translated into:
142 val_1 = foo.chkp (&buf1, __bound_tmp.1_2, &buf2, __bound_tmp.1_3,
143 &buf1, __bound_tmp.1_2, 0);
145 e) Returns.
147 If function returns a pointer value we have to return bounds also.
148 A new operand was added for return statement to hold returned bounds.
150 Example:
152 return &_buf1;
154 is transformed into
156 return &_buf1, __bound_tmp.1_1;
158 3. Bounds computation.
160 Compiler is fully responsible for computing bounds to be used for each
161 memory access. The first step for bounds computation is to find the
162 origin of pointer dereferenced for memory access. Basing on pointer
163 origin we define a way to compute its bounds. There are just few
164 possible cases:
166 a) Pointer is returned by call.
168 In this case we use corresponding checker builtin method to obtain returned
169 bounds.
171 Example:
173 buf_1 = malloc (size_2);
174 foo (buf_1);
176 is translated into:
178 buf_1 = malloc (size_2);
179 __bound_tmp.1_3 = __builtin___chkp_bndret (buf_1);
180 foo (buf_1, __bound_tmp.1_3);
182 b) Pointer is an address of an object.
184 In this case compiler tries to compute objects size and create corresponding
185 bounds. If object has incomplete type then special checker builtin is used to
186 obtain its size at runtime.
188 Example:
190 foo ()
192 <unnamed type> __bound_tmp.3;
193 static int buf[100];
195 <bb 3>:
196 __bound_tmp.3_2 = __builtin___chkp_bndmk (&buf, 400);
198 <bb 2>:
199 return &buf, __bound_tmp.3_2;
202 Example:
204 Address of an object 'extern int buf[]' with incomplete type is
205 returned.
207 foo ()
209 <unnamed type> __bound_tmp.4;
210 long unsigned int __size_tmp.3;
212 <bb 3>:
213 __size_tmp.3_4 = __builtin_ia32_sizeof (buf);
214 __bound_tmp.4_3 = __builtin_ia32_bndmk (&buf, __size_tmp.3_4);
216 <bb 2>:
217 return &buf, __bound_tmp.4_3;
220 c) Pointer is the result of object narrowing.
222 It happens when we use pointer to an object to compute pointer to a part
223 of an object. E.g. we take pointer to a field of a structure. In this
224 case we perform bounds intersection using bounds of original object and
225 bounds of object's part (which are computed basing on its type).
227 There may be some debatable questions about when narrowing should occur
228 and when it should not. To avoid false bound violations in correct
229 programs we do not perform narrowing when address of an array element is
230 obtained (it has address of the whole array) and when address of the first
231 structure field is obtained (because it is guaranteed to be equal to
232 address of the whole structure and it is legal to cast it back to structure).
234 Default narrowing behavior may be changed using compiler flags.
236 Example:
238 In this example address of the second structure field is returned.
240 foo (struct A * p, __bounds_type __bounds_of_p)
242 <unnamed type> __bound_tmp.3;
243 int * _2;
244 int * _5;
246 <bb 2>:
247 _5 = &p_1(D)->second_field;
248 __bound_tmp.3_6 = __builtin___chkp_bndmk (_5, 4);
249 __bound_tmp.3_8 = __builtin___chkp_intersect (__bound_tmp.3_6,
250 __bounds_of_p_3(D));
251 _2 = &p_1(D)->second_field;
252 return _2, __bound_tmp.3_8;
255 Example:
257 In this example address of the first field of array element is returned.
259 foo (struct A * p, __bounds_type __bounds_of_p, int i)
261 long unsigned int _3;
262 long unsigned int _4;
263 struct A * _6;
264 int * _7;
266 <bb 2>:
267 _3 = (long unsigned int) i_1(D);
268 _4 = _3 * 8;
269 _6 = p_5(D) + _4;
270 _7 = &_6->first_field;
271 return _7, __bounds_of_p_2(D);
275 d) Pointer is the result of pointer arithmetic or type cast.
277 In this case bounds of the base pointer are used. In case of binary
278 operation producing a pointer we are analyzing data flow further
279 looking for operand's bounds. One operand is considered as a base
280 if it has some valid bounds. If we fall into a case when none of
281 operands (or both of them) has valid bounds, a default bounds value
282 is used.
284 Trying to find out bounds for binary operations we may fall into
285 cyclic dependencies for pointers. To avoid infinite recursion all
286 walked phi nodes instantly obtain corresponding bounds but created
287 bounds are marked as incomplete. It helps us to stop DF walk during
288 bounds search.
290 When we reach pointer source, some args of incomplete bounds phi obtain
291 valid bounds and those values are propagated further through phi nodes.
292 If no valid bounds were found for phi node then we mark its result as
293 invalid bounds. Process stops when all incomplete bounds become either
294 valid or invalid and we are able to choose a pointer base.
296 e) Pointer is loaded from the memory.
298 In this case we just need to load bounds from the bounds table.
300 Example:
302 foo ()
304 <unnamed type> __bound_tmp.3;
305 static int * buf;
306 int * _2;
308 <bb 2>:
309 _2 = buf;
310 __bound_tmp.3_4 = __builtin___chkp_bndldx (&buf, _2);
311 return _2, __bound_tmp.3_4;
316 typedef void (*assign_handler)(tree, tree, void *);
318 static tree chkp_get_zero_bounds ();
319 static tree chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter);
320 static tree chkp_find_bounds_loaded (tree ptr, tree ptr_src,
321 gimple_stmt_iterator *iter);
322 static void chkp_parse_array_and_component_ref (tree node, tree *ptr,
323 tree *elt, bool *safe,
324 bool *bitfield,
325 tree *bounds,
326 gimple_stmt_iterator *iter,
327 bool innermost_bounds);
329 #define chkp_bndldx_fndecl \
330 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDLDX))
331 #define chkp_bndstx_fndecl \
332 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDSTX))
333 #define chkp_checkl_fndecl \
334 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCL))
335 #define chkp_checku_fndecl \
336 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCU))
337 #define chkp_bndmk_fndecl \
338 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDMK))
339 #define chkp_ret_bnd_fndecl \
340 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDRET))
341 #define chkp_intersect_fndecl \
342 (targetm.builtin_chkp_function (BUILT_IN_CHKP_INTERSECT))
343 #define chkp_narrow_bounds_fndecl \
344 (targetm.builtin_chkp_function (BUILT_IN_CHKP_NARROW))
345 #define chkp_sizeof_fndecl \
346 (targetm.builtin_chkp_function (BUILT_IN_CHKP_SIZEOF))
347 #define chkp_extract_lower_fndecl \
348 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_LOWER))
349 #define chkp_extract_upper_fndecl \
350 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_UPPER))
352 static GTY (()) tree chkp_uintptr_type;
354 static GTY (()) tree chkp_zero_bounds_var;
355 static GTY (()) tree chkp_none_bounds_var;
357 static GTY (()) basic_block entry_block;
358 static GTY (()) tree zero_bounds;
359 static GTY (()) tree none_bounds;
360 static GTY (()) tree incomplete_bounds;
361 static GTY (()) tree tmp_var;
362 static GTY (()) tree size_tmp_var;
363 static GTY (()) bitmap chkp_abnormal_copies;
365 struct hash_set<tree> *chkp_invalid_bounds;
366 struct hash_set<tree> *chkp_completed_bounds_set;
367 struct hash_map<tree, tree> *chkp_reg_bounds;
368 struct hash_map<tree, tree> *chkp_bound_vars;
369 struct hash_map<tree, tree> *chkp_reg_addr_bounds;
370 struct hash_map<tree, tree> *chkp_incomplete_bounds_map;
371 struct hash_map<tree, tree> *chkp_bounds_map;
372 struct hash_map<tree, tree> *chkp_static_var_bounds;
374 static bool in_chkp_pass;
376 #define CHKP_BOUND_TMP_NAME "__bound_tmp"
377 #define CHKP_SIZE_TMP_NAME "__size_tmp"
378 #define CHKP_BOUNDS_OF_SYMBOL_PREFIX "__chkp_bounds_of_"
379 #define CHKP_STRING_BOUNDS_PREFIX "__chkp_string_bounds_"
380 #define CHKP_VAR_BOUNDS_PREFIX "__chkp_var_bounds_"
381 #define CHKP_ZERO_BOUNDS_VAR_NAME "__chkp_zero_bounds"
382 #define CHKP_NONE_BOUNDS_VAR_NAME "__chkp_none_bounds"
384 /* Static checker constructors may become very large and their
385 compilation with optimization may take too much time.
386 Therefore we put a limit to number of statements in one
387 constructor. Tests with 100 000 statically initialized
388 pointers showed following compilation times on Sandy Bridge
389 server (used -O2):
390 limit 100 => ~18 sec.
391 limit 300 => ~22 sec.
392 limit 1000 => ~30 sec.
393 limit 3000 => ~49 sec.
394 limit 5000 => ~55 sec.
395 limit 10000 => ~76 sec.
396 limit 100000 => ~532 sec. */
397 #define MAX_STMTS_IN_STATIC_CHKP_CTOR (PARAM_VALUE (PARAM_CHKP_MAX_CTOR_SIZE))
399 struct chkp_ctor_stmt_list
401 tree stmts;
402 int avail;
405 /* Return 1 if function FNDECL is instrumented by Pointer
406 Bounds Checker. */
407 bool
408 chkp_function_instrumented_p (tree fndecl)
410 return fndecl
411 && lookup_attribute ("chkp instrumented", DECL_ATTRIBUTES (fndecl));
414 /* Mark function FNDECL as instrumented. */
415 void
416 chkp_function_mark_instrumented (tree fndecl)
418 if (chkp_function_instrumented_p (fndecl))
419 return;
421 DECL_ATTRIBUTES (fndecl)
422 = tree_cons (get_identifier ("chkp instrumented"), NULL,
423 DECL_ATTRIBUTES (fndecl));
426 /* Return true when STMT is builtin call to instrumentation function
427 corresponding to CODE. */
429 bool
430 chkp_gimple_call_builtin_p (gimple *call,
431 enum built_in_function code)
433 tree fndecl;
434 if (is_gimple_call (call)
435 && (fndecl = targetm.builtin_chkp_function (code))
436 && gimple_call_fndecl (call) == fndecl)
437 return true;
438 return false;
441 /* Emit code to build zero bounds and return RTL holding
442 the result. */
444 chkp_expand_zero_bounds ()
446 tree zero_bnd;
448 if (flag_chkp_use_static_const_bounds)
449 zero_bnd = chkp_get_zero_bounds_var ();
450 else
451 zero_bnd = chkp_build_make_bounds_call (integer_zero_node,
452 integer_zero_node);
453 return expand_normal (zero_bnd);
456 /* Emit code to store zero bounds for PTR located at MEM. */
457 void
458 chkp_expand_bounds_reset_for_mem (tree mem, tree ptr)
460 tree zero_bnd, bnd, addr, bndstx;
462 if (flag_chkp_use_static_const_bounds)
463 zero_bnd = chkp_get_zero_bounds_var ();
464 else
465 zero_bnd = chkp_build_make_bounds_call (integer_zero_node,
466 integer_zero_node);
467 bnd = make_tree (pointer_bounds_type_node,
468 assign_temp (pointer_bounds_type_node, 0, 1));
469 addr = build1 (ADDR_EXPR,
470 build_pointer_type (TREE_TYPE (mem)), mem);
471 bndstx = chkp_build_bndstx_call (addr, ptr, bnd);
473 expand_assignment (bnd, zero_bnd, false);
474 expand_normal (bndstx);
477 /* Build retbnd call for returned value RETVAL.
479 If BNDVAL is not NULL then result is stored
480 in it. Otherwise a temporary is created to
481 hold returned value.
483 GSI points to a position for a retbnd call
484 and is set to created stmt.
486 Cgraph edge is created for a new call if
487 UPDATE_EDGE is 1.
489 Obtained bounds are returned. */
490 tree
491 chkp_insert_retbnd_call (tree bndval, tree retval,
492 gimple_stmt_iterator *gsi)
494 gimple *call;
496 if (!bndval)
497 bndval = create_tmp_reg (pointer_bounds_type_node, "retbnd");
499 call = gimple_build_call (chkp_ret_bnd_fndecl, 1, retval);
500 gimple_call_set_lhs (call, bndval);
501 gsi_insert_after (gsi, call, GSI_CONTINUE_LINKING);
503 return bndval;
506 /* Build a GIMPLE_CALL identical to CALL but skipping bounds
507 arguments. */
509 gcall *
510 chkp_copy_call_skip_bounds (gcall *call)
512 bitmap bounds;
513 unsigned i;
515 bitmap_obstack_initialize (NULL);
516 bounds = BITMAP_ALLOC (NULL);
518 for (i = 0; i < gimple_call_num_args (call); i++)
519 if (POINTER_BOUNDS_P (gimple_call_arg (call, i)))
520 bitmap_set_bit (bounds, i);
522 if (!bitmap_empty_p (bounds))
523 call = gimple_call_copy_skip_args (call, bounds);
524 gimple_call_set_with_bounds (call, false);
526 BITMAP_FREE (bounds);
527 bitmap_obstack_release (NULL);
529 return call;
532 /* Redirect edge E to the correct node according to call_stmt.
533 Return 1 if bounds removal from call_stmt should be done
534 instead of redirection. */
536 bool
537 chkp_redirect_edge (cgraph_edge *e)
539 bool instrumented = false;
540 tree decl = e->callee->decl;
542 if (e->callee->instrumentation_clone
543 || chkp_function_instrumented_p (decl))
544 instrumented = true;
546 if (instrumented
547 && !gimple_call_with_bounds_p (e->call_stmt))
548 e->redirect_callee (cgraph_node::get_create (e->callee->orig_decl));
549 else if (!instrumented
550 && gimple_call_with_bounds_p (e->call_stmt)
551 && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDCL)
552 && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDCU)
553 && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDSTX))
555 if (e->callee->instrumented_version)
556 e->redirect_callee (e->callee->instrumented_version);
557 else
559 tree args = TYPE_ARG_TYPES (TREE_TYPE (decl));
560 /* Avoid bounds removal if all args will be removed. */
561 if (!args || TREE_VALUE (args) != void_type_node)
562 return true;
563 else
564 gimple_call_set_with_bounds (e->call_stmt, false);
568 return false;
571 /* Mark statement S to not be instrumented. */
572 static void
573 chkp_mark_stmt (gimple *s)
575 gimple_set_plf (s, GF_PLF_1, true);
578 /* Mark statement S to be instrumented. */
579 static void
580 chkp_unmark_stmt (gimple *s)
582 gimple_set_plf (s, GF_PLF_1, false);
585 /* Return 1 if statement S should not be instrumented. */
586 static bool
587 chkp_marked_stmt_p (gimple *s)
589 return gimple_plf (s, GF_PLF_1);
592 /* Get var to be used for bound temps. */
593 static tree
594 chkp_get_tmp_var (void)
596 if (!tmp_var)
597 tmp_var = create_tmp_reg (pointer_bounds_type_node, CHKP_BOUND_TMP_NAME);
599 return tmp_var;
602 /* Get SSA_NAME to be used as temp. */
603 static tree
604 chkp_get_tmp_reg (gimple *stmt)
606 if (in_chkp_pass)
607 return make_ssa_name (chkp_get_tmp_var (), stmt);
609 return make_temp_ssa_name (pointer_bounds_type_node, stmt,
610 CHKP_BOUND_TMP_NAME);
613 /* Get var to be used for size temps. */
614 static tree
615 chkp_get_size_tmp_var (void)
617 if (!size_tmp_var)
618 size_tmp_var = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
620 return size_tmp_var;
623 /* Register bounds BND for address of OBJ. */
624 static void
625 chkp_register_addr_bounds (tree obj, tree bnd)
627 if (bnd == incomplete_bounds)
628 return;
630 chkp_reg_addr_bounds->put (obj, bnd);
632 if (dump_file && (dump_flags & TDF_DETAILS))
634 fprintf (dump_file, "Regsitered bound ");
635 print_generic_expr (dump_file, bnd, 0);
636 fprintf (dump_file, " for address of ");
637 print_generic_expr (dump_file, obj, 0);
638 fprintf (dump_file, "\n");
642 /* Return bounds registered for address of OBJ. */
643 static tree
644 chkp_get_registered_addr_bounds (tree obj)
646 tree *slot = chkp_reg_addr_bounds->get (obj);
647 return slot ? *slot : NULL_TREE;
650 /* Mark BOUNDS as completed. */
651 static void
652 chkp_mark_completed_bounds (tree bounds)
654 chkp_completed_bounds_set->add (bounds);
656 if (dump_file && (dump_flags & TDF_DETAILS))
658 fprintf (dump_file, "Marked bounds ");
659 print_generic_expr (dump_file, bounds, 0);
660 fprintf (dump_file, " as completed\n");
664 /* Return 1 if BOUNDS were marked as completed and 0 otherwise. */
665 static bool
666 chkp_completed_bounds (tree bounds)
668 return chkp_completed_bounds_set->contains (bounds);
671 /* Clear comleted bound marks. */
672 static void
673 chkp_erase_completed_bounds (void)
675 delete chkp_completed_bounds_set;
676 chkp_completed_bounds_set = new hash_set<tree>;
679 /* Mark BOUNDS associated with PTR as incomplete. */
680 static void
681 chkp_register_incomplete_bounds (tree bounds, tree ptr)
683 chkp_incomplete_bounds_map->put (bounds, ptr);
685 if (dump_file && (dump_flags & TDF_DETAILS))
687 fprintf (dump_file, "Regsitered incomplete bounds ");
688 print_generic_expr (dump_file, bounds, 0);
689 fprintf (dump_file, " for ");
690 print_generic_expr (dump_file, ptr, 0);
691 fprintf (dump_file, "\n");
695 /* Return 1 if BOUNDS are incomplete and 0 otherwise. */
696 static bool
697 chkp_incomplete_bounds (tree bounds)
699 if (bounds == incomplete_bounds)
700 return true;
702 if (chkp_completed_bounds (bounds))
703 return false;
705 return chkp_incomplete_bounds_map->get (bounds) != NULL;
708 /* Clear incomleted bound marks. */
709 static void
710 chkp_erase_incomplete_bounds (void)
712 delete chkp_incomplete_bounds_map;
713 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
716 /* Build and return bndmk call which creates bounds for structure
717 pointed by PTR. Structure should have complete type. */
718 tree
719 chkp_make_bounds_for_struct_addr (tree ptr)
721 tree type = TREE_TYPE (ptr);
722 tree size;
724 gcc_assert (POINTER_TYPE_P (type));
726 size = TYPE_SIZE (TREE_TYPE (type));
728 gcc_assert (size);
730 return build_call_nary (pointer_bounds_type_node,
731 build_fold_addr_expr (chkp_bndmk_fndecl),
732 2, ptr, size);
735 /* Traversal function for chkp_may_finish_incomplete_bounds.
736 Set RES to 0 if at least one argument of phi statement
737 defining bounds (passed in KEY arg) is unknown.
738 Traversal stops when first unknown phi argument is found. */
739 bool
740 chkp_may_complete_phi_bounds (tree const &bounds, tree *slot ATTRIBUTE_UNUSED,
741 bool *res)
743 gimple *phi;
744 unsigned i;
746 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
748 phi = SSA_NAME_DEF_STMT (bounds);
750 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
752 for (i = 0; i < gimple_phi_num_args (phi); i++)
754 tree phi_arg = gimple_phi_arg_def (phi, i);
755 if (!phi_arg)
757 *res = false;
758 /* Do not need to traverse further. */
759 return false;
763 return true;
766 /* Return 1 if all phi nodes created for bounds have their
767 arguments computed. */
768 static bool
769 chkp_may_finish_incomplete_bounds (void)
771 bool res = true;
773 chkp_incomplete_bounds_map
774 ->traverse<bool *, chkp_may_complete_phi_bounds> (&res);
776 return res;
779 /* Helper function for chkp_finish_incomplete_bounds.
780 Recompute args for bounds phi node. */
781 bool
782 chkp_recompute_phi_bounds (tree const &bounds, tree *slot,
783 void *res ATTRIBUTE_UNUSED)
785 tree ptr = *slot;
786 gphi *bounds_phi;
787 gphi *ptr_phi;
788 unsigned i;
790 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
791 gcc_assert (TREE_CODE (ptr) == SSA_NAME);
793 bounds_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (bounds));
794 ptr_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (ptr));
796 for (i = 0; i < gimple_phi_num_args (bounds_phi); i++)
798 tree ptr_arg = gimple_phi_arg_def (ptr_phi, i);
799 tree bound_arg = chkp_find_bounds (ptr_arg, NULL);
801 add_phi_arg (bounds_phi, bound_arg,
802 gimple_phi_arg_edge (ptr_phi, i),
803 UNKNOWN_LOCATION);
806 return true;
809 /* Mark BOUNDS as invalid. */
810 static void
811 chkp_mark_invalid_bounds (tree bounds)
813 chkp_invalid_bounds->add (bounds);
815 if (dump_file && (dump_flags & TDF_DETAILS))
817 fprintf (dump_file, "Marked bounds ");
818 print_generic_expr (dump_file, bounds, 0);
819 fprintf (dump_file, " as invalid\n");
823 /* Return 1 if BOUNDS were marked as invalid and 0 otherwise. */
824 static bool
825 chkp_valid_bounds (tree bounds)
827 if (bounds == zero_bounds || bounds == none_bounds)
828 return false;
830 return !chkp_invalid_bounds->contains (bounds);
833 /* Helper function for chkp_finish_incomplete_bounds.
834 Check all arguments of phi nodes trying to find
835 valid completed bounds. If there is at least one
836 such arg then bounds produced by phi node are marked
837 as valid completed bounds and all phi args are
838 recomputed. */
839 bool
840 chkp_find_valid_phi_bounds (tree const &bounds, tree *slot, bool *res)
842 gimple *phi;
843 unsigned i;
845 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
847 if (chkp_completed_bounds (bounds))
848 return true;
850 phi = SSA_NAME_DEF_STMT (bounds);
852 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
854 for (i = 0; i < gimple_phi_num_args (phi); i++)
856 tree phi_arg = gimple_phi_arg_def (phi, i);
858 gcc_assert (phi_arg);
860 if (chkp_valid_bounds (phi_arg) && !chkp_incomplete_bounds (phi_arg))
862 *res = true;
863 chkp_mark_completed_bounds (bounds);
864 chkp_recompute_phi_bounds (bounds, slot, NULL);
865 return true;
869 return true;
872 /* Helper function for chkp_finish_incomplete_bounds.
873 Marks all incompleted bounds as invalid. */
874 bool
875 chkp_mark_invalid_bounds_walker (tree const &bounds,
876 tree *slot ATTRIBUTE_UNUSED,
877 void *res ATTRIBUTE_UNUSED)
879 if (!chkp_completed_bounds (bounds))
881 chkp_mark_invalid_bounds (bounds);
882 chkp_mark_completed_bounds (bounds);
884 return true;
887 /* When all bound phi nodes have all their args computed
888 we have enough info to find valid bounds. We iterate
889 through all incompleted bounds searching for valid
890 bounds. Found valid bounds are marked as completed
891 and all remaining incompleted bounds are recomputed.
892 Process continues until no new valid bounds may be
893 found. All remained incompleted bounds are marked as
894 invalid (i.e. have no valid source of bounds). */
895 static void
896 chkp_finish_incomplete_bounds (void)
898 bool found_valid;
900 while (found_valid)
902 found_valid = false;
904 chkp_incomplete_bounds_map->
905 traverse<bool *, chkp_find_valid_phi_bounds> (&found_valid);
907 if (found_valid)
908 chkp_incomplete_bounds_map->
909 traverse<void *, chkp_recompute_phi_bounds> (NULL);
912 chkp_incomplete_bounds_map->
913 traverse<void *, chkp_mark_invalid_bounds_walker> (NULL);
914 chkp_incomplete_bounds_map->
915 traverse<void *, chkp_recompute_phi_bounds> (NULL);
917 chkp_erase_completed_bounds ();
918 chkp_erase_incomplete_bounds ();
921 /* Return 1 if type TYPE is a pointer type or a
922 structure having a pointer type as one of its fields.
923 Otherwise return 0. */
924 bool
925 chkp_type_has_pointer (const_tree type)
927 bool res = false;
929 if (BOUNDED_TYPE_P (type))
930 res = true;
931 else if (RECORD_OR_UNION_TYPE_P (type))
933 tree field;
935 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
936 if (TREE_CODE (field) == FIELD_DECL)
937 res = res || chkp_type_has_pointer (TREE_TYPE (field));
939 else if (TREE_CODE (type) == ARRAY_TYPE)
940 res = chkp_type_has_pointer (TREE_TYPE (type));
942 return res;
945 unsigned
946 chkp_type_bounds_count (const_tree type)
948 unsigned res = 0;
950 if (!type)
951 res = 0;
952 else if (BOUNDED_TYPE_P (type))
953 res = 1;
954 else if (RECORD_OR_UNION_TYPE_P (type))
956 bitmap have_bound;
958 bitmap_obstack_initialize (NULL);
959 have_bound = BITMAP_ALLOC (NULL);
960 chkp_find_bound_slots (type, have_bound);
961 res = bitmap_count_bits (have_bound);
962 BITMAP_FREE (have_bound);
963 bitmap_obstack_release (NULL);
966 return res;
969 /* Get bounds associated with NODE via
970 chkp_set_bounds call. */
971 tree
972 chkp_get_bounds (tree node)
974 tree *slot;
976 if (!chkp_bounds_map)
977 return NULL_TREE;
979 slot = chkp_bounds_map->get (node);
980 return slot ? *slot : NULL_TREE;
983 /* Associate bounds VAL with NODE. */
984 void
985 chkp_set_bounds (tree node, tree val)
987 if (!chkp_bounds_map)
988 chkp_bounds_map = new hash_map<tree, tree>;
990 chkp_bounds_map->put (node, val);
993 /* Check if statically initialized variable VAR require
994 static bounds initialization. If VAR is added into
995 bounds initlization list then 1 is returned. Otherwise
996 return 0. */
997 extern bool
998 chkp_register_var_initializer (tree var)
1000 if (!flag_check_pointer_bounds
1001 || DECL_INITIAL (var) == error_mark_node)
1002 return false;
1004 gcc_assert (TREE_CODE (var) == VAR_DECL);
1005 gcc_assert (DECL_INITIAL (var));
1007 if (TREE_STATIC (var)
1008 && chkp_type_has_pointer (TREE_TYPE (var)))
1010 varpool_node::get_create (var)->need_bounds_init = 1;
1011 return true;
1014 return false;
1017 /* Helper function for chkp_finish_file.
1019 Add new modification statement (RHS is assigned to LHS)
1020 into list of static initializer statementes (passed in ARG).
1021 If statements list becomes too big, emit checker constructor
1022 and start the new one. */
1023 static void
1024 chkp_add_modification_to_stmt_list (tree lhs,
1025 tree rhs,
1026 void *arg)
1028 struct chkp_ctor_stmt_list *stmts = (struct chkp_ctor_stmt_list *)arg;
1029 tree modify;
1031 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
1032 rhs = build1 (CONVERT_EXPR, TREE_TYPE (lhs), rhs);
1034 modify = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
1035 append_to_statement_list (modify, &stmts->stmts);
1037 stmts->avail--;
1040 /* Build and return ADDR_EXPR for specified object OBJ. */
1041 static tree
1042 chkp_build_addr_expr (tree obj)
1044 return TREE_CODE (obj) == TARGET_MEM_REF
1045 ? tree_mem_ref_addr (ptr_type_node, obj)
1046 : build_fold_addr_expr (obj);
1049 /* Helper function for chkp_finish_file.
1050 Initialize bound variable BND_VAR with bounds of variable
1051 VAR to statements list STMTS. If statements list becomes
1052 too big, emit checker constructor and start the new one. */
1053 static void
1054 chkp_output_static_bounds (tree bnd_var, tree var,
1055 struct chkp_ctor_stmt_list *stmts)
1057 tree lb, ub, size;
1059 if (TREE_CODE (var) == STRING_CST)
1061 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1062 size = build_int_cst (size_type_node, TREE_STRING_LENGTH (var) - 1);
1064 else if (DECL_SIZE (var)
1065 && !chkp_variable_size_type (TREE_TYPE (var)))
1067 /* Compute bounds using statically known size. */
1068 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1069 size = size_binop (MINUS_EXPR, DECL_SIZE_UNIT (var), size_one_node);
1071 else
1073 /* Compute bounds using dynamic size. */
1074 tree call;
1076 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1077 call = build1 (ADDR_EXPR,
1078 build_pointer_type (TREE_TYPE (chkp_sizeof_fndecl)),
1079 chkp_sizeof_fndecl);
1080 size = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_sizeof_fndecl)),
1081 call, 1, var);
1083 if (flag_chkp_zero_dynamic_size_as_infinite)
1085 tree max_size, cond;
1087 max_size = build2 (MINUS_EXPR, size_type_node, size_zero_node, lb);
1088 cond = build2 (NE_EXPR, boolean_type_node, size, size_zero_node);
1089 size = build3 (COND_EXPR, size_type_node, cond, size, max_size);
1092 size = size_binop (MINUS_EXPR, size, size_one_node);
1095 ub = size_binop (PLUS_EXPR, lb, size);
1096 stmts->avail -= targetm.chkp_initialize_bounds (bnd_var, lb, ub,
1097 &stmts->stmts);
1098 if (stmts->avail <= 0)
1100 cgraph_build_static_cdtor ('B', stmts->stmts,
1101 MAX_RESERVED_INIT_PRIORITY + 2);
1102 stmts->avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
1103 stmts->stmts = NULL;
1107 /* Return entry block to be used for checker initilization code.
1108 Create new block if required. */
1109 static basic_block
1110 chkp_get_entry_block (void)
1112 if (!entry_block)
1113 entry_block
1114 = split_block_after_labels (ENTRY_BLOCK_PTR_FOR_FN (cfun))->dest;
1116 return entry_block;
1119 /* Return a bounds var to be used for pointer var PTR_VAR. */
1120 static tree
1121 chkp_get_bounds_var (tree ptr_var)
1123 tree bnd_var;
1124 tree *slot;
1126 slot = chkp_bound_vars->get (ptr_var);
1127 if (slot)
1128 bnd_var = *slot;
1129 else
1131 bnd_var = create_tmp_reg (pointer_bounds_type_node,
1132 CHKP_BOUND_TMP_NAME);
1133 chkp_bound_vars->put (ptr_var, bnd_var);
1136 return bnd_var;
1139 /* If BND is an abnormal bounds copy, return a copied value.
1140 Otherwise return BND. */
1141 static tree
1142 chkp_get_orginal_bounds_for_abnormal_copy (tree bnd)
1144 if (bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
1146 gimple *bnd_def = SSA_NAME_DEF_STMT (bnd);
1147 gcc_checking_assert (gimple_code (bnd_def) == GIMPLE_ASSIGN);
1148 bnd = gimple_assign_rhs1 (bnd_def);
1151 return bnd;
1154 /* Register bounds BND for object PTR in global bounds table.
1155 A copy of bounds may be created for abnormal ssa names.
1156 Returns bounds to use for PTR. */
1157 static tree
1158 chkp_maybe_copy_and_register_bounds (tree ptr, tree bnd)
1160 bool abnormal_ptr;
1162 if (!chkp_reg_bounds)
1163 return bnd;
1165 /* Do nothing if bounds are incomplete_bounds
1166 because it means bounds will be recomputed. */
1167 if (bnd == incomplete_bounds)
1168 return bnd;
1170 abnormal_ptr = (TREE_CODE (ptr) == SSA_NAME
1171 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1172 && gimple_code (SSA_NAME_DEF_STMT (ptr)) != GIMPLE_PHI);
1174 /* A single bounds value may be reused multiple times for
1175 different pointer values. It may cause coalescing issues
1176 for abnormal SSA names. To avoid it we create a bounds
1177 copy in case it is computed for abnormal SSA name.
1179 We also cannot reuse such created copies for other pointers */
1180 if (abnormal_ptr
1181 || bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
1183 tree bnd_var = NULL_TREE;
1185 if (abnormal_ptr)
1187 if (SSA_NAME_VAR (ptr))
1188 bnd_var = chkp_get_bounds_var (SSA_NAME_VAR (ptr));
1190 else
1191 bnd_var = chkp_get_tmp_var ();
1193 /* For abnormal copies we may just find original
1194 bounds and use them. */
1195 if (!abnormal_ptr && !SSA_NAME_IS_DEFAULT_DEF (bnd))
1196 bnd = chkp_get_orginal_bounds_for_abnormal_copy (bnd);
1197 /* For undefined values we usually use none bounds
1198 value but in case of abnormal edge it may cause
1199 coalescing failures. Use default definition of
1200 bounds variable instead to avoid it. */
1201 else if (SSA_NAME_IS_DEFAULT_DEF (ptr)
1202 && TREE_CODE (SSA_NAME_VAR (ptr)) != PARM_DECL)
1204 bnd = get_or_create_ssa_default_def (cfun, bnd_var);
1206 if (dump_file && (dump_flags & TDF_DETAILS))
1208 fprintf (dump_file, "Using default def bounds ");
1209 print_generic_expr (dump_file, bnd, 0);
1210 fprintf (dump_file, " for abnormal default def SSA name ");
1211 print_generic_expr (dump_file, ptr, 0);
1212 fprintf (dump_file, "\n");
1215 else
1217 tree copy;
1218 gimple *def = SSA_NAME_DEF_STMT (ptr);
1219 gimple *assign;
1220 gimple_stmt_iterator gsi;
1222 if (bnd_var)
1223 copy = make_ssa_name (bnd_var);
1224 else
1225 copy = make_temp_ssa_name (pointer_bounds_type_node,
1226 NULL,
1227 CHKP_BOUND_TMP_NAME);
1228 bnd = chkp_get_orginal_bounds_for_abnormal_copy (bnd);
1229 assign = gimple_build_assign (copy, bnd);
1231 if (dump_file && (dump_flags & TDF_DETAILS))
1233 fprintf (dump_file, "Creating a copy of bounds ");
1234 print_generic_expr (dump_file, bnd, 0);
1235 fprintf (dump_file, " for abnormal SSA name ");
1236 print_generic_expr (dump_file, ptr, 0);
1237 fprintf (dump_file, "\n");
1240 if (gimple_code (def) == GIMPLE_NOP)
1242 gsi = gsi_last_bb (chkp_get_entry_block ());
1243 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
1244 gsi_insert_before (&gsi, assign, GSI_CONTINUE_LINKING);
1245 else
1246 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1248 else
1250 gimple *bnd_def = SSA_NAME_DEF_STMT (bnd);
1251 /* Sometimes (e.g. when we load a pointer from a
1252 memory) bounds are produced later than a pointer.
1253 We need to insert bounds copy appropriately. */
1254 if (gimple_code (bnd_def) != GIMPLE_NOP
1255 && stmt_dominates_stmt_p (def, bnd_def))
1256 gsi = gsi_for_stmt (bnd_def);
1257 else
1258 gsi = gsi_for_stmt (def);
1259 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1262 bnd = copy;
1265 if (abnormal_ptr)
1266 bitmap_set_bit (chkp_abnormal_copies, SSA_NAME_VERSION (bnd));
1269 chkp_reg_bounds->put (ptr, bnd);
1271 if (dump_file && (dump_flags & TDF_DETAILS))
1273 fprintf (dump_file, "Regsitered bound ");
1274 print_generic_expr (dump_file, bnd, 0);
1275 fprintf (dump_file, " for pointer ");
1276 print_generic_expr (dump_file, ptr, 0);
1277 fprintf (dump_file, "\n");
1280 return bnd;
1283 /* Get bounds registered for object PTR in global bounds table. */
1284 static tree
1285 chkp_get_registered_bounds (tree ptr)
1287 tree *slot;
1289 if (!chkp_reg_bounds)
1290 return NULL_TREE;
1292 slot = chkp_reg_bounds->get (ptr);
1293 return slot ? *slot : NULL_TREE;
1296 /* Add bound retvals to return statement pointed by GSI. */
1298 static void
1299 chkp_add_bounds_to_ret_stmt (gimple_stmt_iterator *gsi)
1301 greturn *ret = as_a <greturn *> (gsi_stmt (*gsi));
1302 tree retval = gimple_return_retval (ret);
1303 tree ret_decl = DECL_RESULT (cfun->decl);
1304 tree bounds;
1306 if (!retval)
1307 return;
1309 if (BOUNDED_P (ret_decl))
1311 bounds = chkp_find_bounds (retval, gsi);
1312 bounds = chkp_maybe_copy_and_register_bounds (ret_decl, bounds);
1313 gimple_return_set_retbnd (ret, bounds);
1316 update_stmt (ret);
1319 /* Force OP to be suitable for using as an argument for call.
1320 New statements (if any) go to SEQ. */
1321 static tree
1322 chkp_force_gimple_call_op (tree op, gimple_seq *seq)
1324 gimple_seq stmts;
1325 gimple_stmt_iterator si;
1327 op = force_gimple_operand (unshare_expr (op), &stmts, true, NULL_TREE);
1329 for (si = gsi_start (stmts); !gsi_end_p (si); gsi_next (&si))
1330 chkp_mark_stmt (gsi_stmt (si));
1332 gimple_seq_add_seq (seq, stmts);
1334 return op;
1337 /* Generate lower bound check for memory access by ADDR.
1338 Check is inserted before the position pointed by ITER.
1339 DIRFLAG indicates whether memory access is load or store. */
1340 static void
1341 chkp_check_lower (tree addr, tree bounds,
1342 gimple_stmt_iterator iter,
1343 location_t location,
1344 tree dirflag)
1346 gimple_seq seq;
1347 gimple *check;
1348 tree node;
1350 if (!chkp_function_instrumented_p (current_function_decl)
1351 && bounds == chkp_get_zero_bounds ())
1352 return;
1354 if (dirflag == integer_zero_node
1355 && !flag_chkp_check_read)
1356 return;
1358 if (dirflag == integer_one_node
1359 && !flag_chkp_check_write)
1360 return;
1362 seq = NULL;
1364 node = chkp_force_gimple_call_op (addr, &seq);
1366 check = gimple_build_call (chkp_checkl_fndecl, 2, node, bounds);
1367 chkp_mark_stmt (check);
1368 gimple_call_set_with_bounds (check, true);
1369 gimple_set_location (check, location);
1370 gimple_seq_add_stmt (&seq, check);
1372 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1374 if (dump_file && (dump_flags & TDF_DETAILS))
1376 gimple *before = gsi_stmt (iter);
1377 fprintf (dump_file, "Generated lower bound check for statement ");
1378 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1379 fprintf (dump_file, " ");
1380 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1384 /* Generate upper bound check for memory access by ADDR.
1385 Check is inserted before the position pointed by ITER.
1386 DIRFLAG indicates whether memory access is load or store. */
1387 static void
1388 chkp_check_upper (tree addr, tree bounds,
1389 gimple_stmt_iterator iter,
1390 location_t location,
1391 tree dirflag)
1393 gimple_seq seq;
1394 gimple *check;
1395 tree node;
1397 if (!chkp_function_instrumented_p (current_function_decl)
1398 && bounds == chkp_get_zero_bounds ())
1399 return;
1401 if (dirflag == integer_zero_node
1402 && !flag_chkp_check_read)
1403 return;
1405 if (dirflag == integer_one_node
1406 && !flag_chkp_check_write)
1407 return;
1409 seq = NULL;
1411 node = chkp_force_gimple_call_op (addr, &seq);
1413 check = gimple_build_call (chkp_checku_fndecl, 2, node, bounds);
1414 chkp_mark_stmt (check);
1415 gimple_call_set_with_bounds (check, true);
1416 gimple_set_location (check, location);
1417 gimple_seq_add_stmt (&seq, check);
1419 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1421 if (dump_file && (dump_flags & TDF_DETAILS))
1423 gimple *before = gsi_stmt (iter);
1424 fprintf (dump_file, "Generated upper bound check for statement ");
1425 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1426 fprintf (dump_file, " ");
1427 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1431 /* Generate lower and upper bound checks for memory access
1432 to memory slot [FIRST, LAST] againsr BOUNDS. Checks
1433 are inserted before the position pointed by ITER.
1434 DIRFLAG indicates whether memory access is load or store. */
1435 void
1436 chkp_check_mem_access (tree first, tree last, tree bounds,
1437 gimple_stmt_iterator iter,
1438 location_t location,
1439 tree dirflag)
1441 chkp_check_lower (first, bounds, iter, location, dirflag);
1442 chkp_check_upper (last, bounds, iter, location, dirflag);
1445 /* Replace call to _bnd_chk_* pointed by GSI with
1446 bndcu and bndcl calls. DIRFLAG determines whether
1447 check is for read or write. */
1449 void
1450 chkp_replace_address_check_builtin (gimple_stmt_iterator *gsi,
1451 tree dirflag)
1453 gimple_stmt_iterator call_iter = *gsi;
1454 gimple *call = gsi_stmt (*gsi);
1455 tree fndecl = gimple_call_fndecl (call);
1456 tree addr = gimple_call_arg (call, 0);
1457 tree bounds = chkp_find_bounds (addr, gsi);
1459 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1460 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1461 chkp_check_lower (addr, bounds, *gsi, gimple_location (call), dirflag);
1463 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS)
1464 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1466 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1468 tree size = gimple_call_arg (call, 1);
1469 addr = fold_build_pointer_plus (addr, size);
1470 addr = fold_build_pointer_plus_hwi (addr, -1);
1471 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1474 gsi_remove (&call_iter, true);
1477 /* Replace call to _bnd_get_ptr_* pointed by GSI with
1478 corresponding bounds extract call. */
1480 void
1481 chkp_replace_extract_builtin (gimple_stmt_iterator *gsi)
1483 gimple *call = gsi_stmt (*gsi);
1484 tree fndecl = gimple_call_fndecl (call);
1485 tree addr = gimple_call_arg (call, 0);
1486 tree bounds = chkp_find_bounds (addr, gsi);
1487 gimple *extract;
1489 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND)
1490 fndecl = chkp_extract_lower_fndecl;
1491 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND)
1492 fndecl = chkp_extract_upper_fndecl;
1493 else
1494 gcc_unreachable ();
1496 extract = gimple_build_call (fndecl, 1, bounds);
1497 gimple_call_set_lhs (extract, gimple_call_lhs (call));
1498 chkp_mark_stmt (extract);
1500 gsi_replace (gsi, extract, false);
1503 /* Return COMPONENT_REF accessing FIELD in OBJ. */
1504 static tree
1505 chkp_build_component_ref (tree obj, tree field)
1507 tree res;
1509 /* If object is TMR then we do not use component_ref but
1510 add offset instead. We need it to be able to get addr
1511 of the reasult later. */
1512 if (TREE_CODE (obj) == TARGET_MEM_REF)
1514 tree offs = TMR_OFFSET (obj);
1515 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1516 offs, DECL_FIELD_OFFSET (field));
1518 gcc_assert (offs);
1520 res = copy_node (obj);
1521 TREE_TYPE (res) = TREE_TYPE (field);
1522 TMR_OFFSET (res) = offs;
1524 else
1525 res = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL_TREE);
1527 return res;
1530 /* Return ARRAY_REF for array ARR and index IDX with
1531 specified element type ETYPE and element size ESIZE. */
1532 static tree
1533 chkp_build_array_ref (tree arr, tree etype, tree esize,
1534 unsigned HOST_WIDE_INT idx)
1536 tree index = build_int_cst (size_type_node, idx);
1537 tree res;
1539 /* If object is TMR then we do not use array_ref but
1540 add offset instead. We need it to be able to get addr
1541 of the reasult later. */
1542 if (TREE_CODE (arr) == TARGET_MEM_REF)
1544 tree offs = TMR_OFFSET (arr);
1546 esize = fold_binary_to_constant (MULT_EXPR, TREE_TYPE (esize),
1547 esize, index);
1548 gcc_assert(esize);
1550 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1551 offs, esize);
1552 gcc_assert (offs);
1554 res = copy_node (arr);
1555 TREE_TYPE (res) = etype;
1556 TMR_OFFSET (res) = offs;
1558 else
1559 res = build4 (ARRAY_REF, etype, arr, index, NULL_TREE, NULL_TREE);
1561 return res;
1564 /* Helper function for chkp_add_bounds_to_call_stmt.
1565 Fill ALL_BOUNDS output array with created bounds.
1567 OFFS is used for recursive calls and holds basic
1568 offset of TYPE in outer structure in bits.
1570 ITER points a position where bounds are searched.
1572 ALL_BOUNDS[i] is filled with elem bounds if there
1573 is a field in TYPE which has pointer type and offset
1574 equal to i * POINTER_SIZE in bits. */
1575 static void
1576 chkp_find_bounds_for_elem (tree elem, tree *all_bounds,
1577 HOST_WIDE_INT offs,
1578 gimple_stmt_iterator *iter)
1580 tree type = TREE_TYPE (elem);
1582 if (BOUNDED_TYPE_P (type))
1584 if (!all_bounds[offs / POINTER_SIZE])
1586 tree temp = make_temp_ssa_name (type, NULL, "");
1587 gimple *assign = gimple_build_assign (temp, elem);
1588 gimple_stmt_iterator gsi;
1590 gsi_insert_before (iter, assign, GSI_SAME_STMT);
1591 gsi = gsi_for_stmt (assign);
1593 all_bounds[offs / POINTER_SIZE] = chkp_find_bounds (temp, &gsi);
1596 else if (RECORD_OR_UNION_TYPE_P (type))
1598 tree field;
1600 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1601 if (TREE_CODE (field) == FIELD_DECL)
1603 tree base = unshare_expr (elem);
1604 tree field_ref = chkp_build_component_ref (base, field);
1605 HOST_WIDE_INT field_offs
1606 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1607 if (DECL_FIELD_OFFSET (field))
1608 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1610 chkp_find_bounds_for_elem (field_ref, all_bounds,
1611 offs + field_offs, iter);
1614 else if (TREE_CODE (type) == ARRAY_TYPE)
1616 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1617 tree etype = TREE_TYPE (type);
1618 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1619 unsigned HOST_WIDE_INT cur;
1621 if (!maxval || integer_minus_onep (maxval))
1622 return;
1624 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1626 tree base = unshare_expr (elem);
1627 tree arr_elem = chkp_build_array_ref (base, etype,
1628 TYPE_SIZE (etype),
1629 cur);
1630 chkp_find_bounds_for_elem (arr_elem, all_bounds, offs + cur * esize,
1631 iter);
1636 /* Fill HAVE_BOUND output bitmap with information about
1637 bounds requred for object of type TYPE.
1639 OFFS is used for recursive calls and holds basic
1640 offset of TYPE in outer structure in bits.
1642 HAVE_BOUND[i] is set to 1 if there is a field
1643 in TYPE which has pointer type and offset
1644 equal to i * POINTER_SIZE - OFFS in bits. */
1645 void
1646 chkp_find_bound_slots_1 (const_tree type, bitmap have_bound,
1647 HOST_WIDE_INT offs)
1649 if (BOUNDED_TYPE_P (type))
1650 bitmap_set_bit (have_bound, offs / POINTER_SIZE);
1651 else if (RECORD_OR_UNION_TYPE_P (type))
1653 tree field;
1655 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1656 if (TREE_CODE (field) == FIELD_DECL)
1658 HOST_WIDE_INT field_offs = 0;
1659 if (DECL_FIELD_BIT_OFFSET (field))
1660 field_offs += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1661 if (DECL_FIELD_OFFSET (field))
1662 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1663 chkp_find_bound_slots_1 (TREE_TYPE (field), have_bound,
1664 offs + field_offs);
1667 else if (TREE_CODE (type) == ARRAY_TYPE)
1669 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1670 tree etype = TREE_TYPE (type);
1671 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1672 unsigned HOST_WIDE_INT cur;
1674 if (!maxval
1675 || TREE_CODE (maxval) != INTEGER_CST
1676 || integer_minus_onep (maxval))
1677 return;
1679 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1680 chkp_find_bound_slots_1 (etype, have_bound, offs + cur * esize);
1684 /* Fill bitmap RES with information about bounds for
1685 type TYPE. See chkp_find_bound_slots_1 for more
1686 details. */
1687 void
1688 chkp_find_bound_slots (const_tree type, bitmap res)
1690 bitmap_clear (res);
1691 chkp_find_bound_slots_1 (type, res, 0);
1694 /* Return 1 if call to FNDECL should be instrumented
1695 and 0 otherwise. */
1697 static bool
1698 chkp_instrument_normal_builtin (tree fndecl)
1700 switch (DECL_FUNCTION_CODE (fndecl))
1702 case BUILT_IN_STRLEN:
1703 case BUILT_IN_STRCPY:
1704 case BUILT_IN_STRNCPY:
1705 case BUILT_IN_STPCPY:
1706 case BUILT_IN_STPNCPY:
1707 case BUILT_IN_STRCAT:
1708 case BUILT_IN_STRNCAT:
1709 case BUILT_IN_MEMCPY:
1710 case BUILT_IN_MEMPCPY:
1711 case BUILT_IN_MEMSET:
1712 case BUILT_IN_MEMMOVE:
1713 case BUILT_IN_BZERO:
1714 case BUILT_IN_STRCMP:
1715 case BUILT_IN_STRNCMP:
1716 case BUILT_IN_BCMP:
1717 case BUILT_IN_MEMCMP:
1718 case BUILT_IN_MEMCPY_CHK:
1719 case BUILT_IN_MEMPCPY_CHK:
1720 case BUILT_IN_MEMMOVE_CHK:
1721 case BUILT_IN_MEMSET_CHK:
1722 case BUILT_IN_STRCPY_CHK:
1723 case BUILT_IN_STRNCPY_CHK:
1724 case BUILT_IN_STPCPY_CHK:
1725 case BUILT_IN_STPNCPY_CHK:
1726 case BUILT_IN_STRCAT_CHK:
1727 case BUILT_IN_STRNCAT_CHK:
1728 case BUILT_IN_MALLOC:
1729 case BUILT_IN_CALLOC:
1730 case BUILT_IN_REALLOC:
1731 return 1;
1733 default:
1734 return 0;
1738 /* Add bound arguments to call statement pointed by GSI.
1739 Also performs a replacement of user checker builtins calls
1740 with internal ones. */
1742 static void
1743 chkp_add_bounds_to_call_stmt (gimple_stmt_iterator *gsi)
1745 gcall *call = as_a <gcall *> (gsi_stmt (*gsi));
1746 unsigned arg_no = 0;
1747 tree fndecl = gimple_call_fndecl (call);
1748 tree fntype;
1749 tree first_formal_arg;
1750 tree arg;
1751 bool use_fntype = false;
1752 tree op;
1753 ssa_op_iter iter;
1754 gcall *new_call;
1756 /* Do nothing for internal functions. */
1757 if (gimple_call_internal_p (call))
1758 return;
1760 fntype = TREE_TYPE (TREE_TYPE (gimple_call_fn (call)));
1762 /* Do nothing if back-end builtin is called. */
1763 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
1764 return;
1766 /* Do nothing for some middle-end builtins. */
1767 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1768 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_OBJECT_SIZE)
1769 return;
1771 /* Do nothing for calls to not instrumentable functions. */
1772 if (fndecl && !chkp_instrumentable_p (fndecl))
1773 return;
1775 /* Ignore CHKP_INIT_PTR_BOUNDS, CHKP_NULL_PTR_BOUNDS
1776 and CHKP_COPY_PTR_BOUNDS. */
1777 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1778 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS
1779 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS
1780 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS
1781 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS))
1782 return;
1784 /* Check user builtins are replaced with checks. */
1785 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1786 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1787 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
1788 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS))
1790 chkp_replace_address_check_builtin (gsi, integer_minus_one_node);
1791 return;
1794 /* Check user builtins are replaced with bound extract. */
1795 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1796 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND
1797 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND))
1799 chkp_replace_extract_builtin (gsi);
1800 return;
1803 /* BUILT_IN_CHKP_NARROW_PTR_BOUNDS call is replaced with
1804 target narrow bounds call. */
1805 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1806 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
1808 tree arg = gimple_call_arg (call, 1);
1809 tree bounds = chkp_find_bounds (arg, gsi);
1811 gimple_call_set_fndecl (call, chkp_narrow_bounds_fndecl);
1812 gimple_call_set_arg (call, 1, bounds);
1813 update_stmt (call);
1815 return;
1818 /* BUILT_IN_CHKP_STORE_PTR_BOUNDS call is replaced with
1819 bndstx call. */
1820 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1821 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_STORE_PTR_BOUNDS)
1823 tree addr = gimple_call_arg (call, 0);
1824 tree ptr = gimple_call_arg (call, 1);
1825 tree bounds = chkp_find_bounds (ptr, gsi);
1826 gimple_stmt_iterator iter = gsi_for_stmt (call);
1828 chkp_build_bndstx (addr, ptr, bounds, gsi);
1829 gsi_remove (&iter, true);
1831 return;
1834 if (!flag_chkp_instrument_calls)
1835 return;
1837 /* We instrument only some subset of builtins. We also instrument
1838 builtin calls to be inlined. */
1839 if (fndecl
1840 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1841 && !chkp_instrument_normal_builtin (fndecl))
1843 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
1844 return;
1846 struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
1847 if (!clone
1848 || !gimple_has_body_p (clone->decl))
1849 return;
1852 /* If function decl is available then use it for
1853 formal arguments list. Otherwise use function type. */
1854 if (fndecl && DECL_ARGUMENTS (fndecl))
1855 first_formal_arg = DECL_ARGUMENTS (fndecl);
1856 else
1858 first_formal_arg = TYPE_ARG_TYPES (fntype);
1859 use_fntype = true;
1862 /* Fill vector of new call args. */
1863 vec<tree> new_args = vNULL;
1864 new_args.create (gimple_call_num_args (call));
1865 arg = first_formal_arg;
1866 for (arg_no = 0; arg_no < gimple_call_num_args (call); arg_no++)
1868 tree call_arg = gimple_call_arg (call, arg_no);
1869 tree type;
1871 /* Get arg type using formal argument description
1872 or actual argument type. */
1873 if (arg)
1874 if (use_fntype)
1875 if (TREE_VALUE (arg) != void_type_node)
1877 type = TREE_VALUE (arg);
1878 arg = TREE_CHAIN (arg);
1880 else
1881 type = TREE_TYPE (call_arg);
1882 else
1884 type = TREE_TYPE (arg);
1885 arg = TREE_CHAIN (arg);
1887 else
1888 type = TREE_TYPE (call_arg);
1890 new_args.safe_push (call_arg);
1892 if (BOUNDED_TYPE_P (type)
1893 || pass_by_reference (NULL, TYPE_MODE (type), type, true))
1894 new_args.safe_push (chkp_find_bounds (call_arg, gsi));
1895 else if (chkp_type_has_pointer (type))
1897 HOST_WIDE_INT max_bounds
1898 = TREE_INT_CST_LOW (TYPE_SIZE (type)) / POINTER_SIZE;
1899 tree *all_bounds = (tree *)xmalloc (sizeof (tree) * max_bounds);
1900 HOST_WIDE_INT bnd_no;
1902 memset (all_bounds, 0, sizeof (tree) * max_bounds);
1904 chkp_find_bounds_for_elem (call_arg, all_bounds, 0, gsi);
1906 for (bnd_no = 0; bnd_no < max_bounds; bnd_no++)
1907 if (all_bounds[bnd_no])
1908 new_args.safe_push (all_bounds[bnd_no]);
1910 free (all_bounds);
1914 if (new_args.length () == gimple_call_num_args (call))
1915 new_call = call;
1916 else
1918 new_call = gimple_build_call_vec (gimple_op (call, 1), new_args);
1919 gimple_call_set_lhs (new_call, gimple_call_lhs (call));
1920 gimple_call_copy_flags (new_call, call);
1921 gimple_call_set_chain (new_call, gimple_call_chain (call));
1923 new_args.release ();
1925 /* For direct calls fndecl is replaced with instrumented version. */
1926 if (fndecl)
1928 tree new_decl = chkp_maybe_create_clone (fndecl)->decl;
1929 gimple_call_set_fndecl (new_call, new_decl);
1930 gimple_call_set_fntype (new_call, TREE_TYPE (new_decl));
1932 /* For indirect call we should fix function pointer type if
1933 pass some bounds. */
1934 else if (new_call != call)
1936 tree type = gimple_call_fntype (call);
1937 type = chkp_copy_function_type_adding_bounds (type);
1938 gimple_call_set_fntype (new_call, type);
1941 /* replace old call statement with the new one. */
1942 if (call != new_call)
1944 FOR_EACH_SSA_TREE_OPERAND (op, call, iter, SSA_OP_ALL_DEFS)
1946 SSA_NAME_DEF_STMT (op) = new_call;
1948 gsi_replace (gsi, new_call, true);
1950 else
1951 update_stmt (new_call);
1953 gimple_call_set_with_bounds (new_call, true);
1956 /* Return constant static bounds var with specified bounds LB and UB.
1957 If such var does not exists then new var is created with specified NAME. */
1958 static tree
1959 chkp_make_static_const_bounds (HOST_WIDE_INT lb,
1960 HOST_WIDE_INT ub,
1961 const char *name)
1963 tree id = get_identifier (name);
1964 tree var;
1965 varpool_node *node;
1966 symtab_node *snode;
1968 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, id,
1969 pointer_bounds_type_node);
1970 TREE_STATIC (var) = 1;
1971 TREE_PUBLIC (var) = 1;
1973 /* With LTO we may have constant bounds already in varpool.
1974 Try to find it. */
1975 if ((snode = symtab_node::get_for_asmname (DECL_ASSEMBLER_NAME (var))))
1977 /* We don't allow this symbol usage for non bounds. */
1978 if (snode->type != SYMTAB_VARIABLE
1979 || !POINTER_BOUNDS_P (snode->decl))
1980 sorry ("-fcheck-pointer-bounds requires '%s' "
1981 "name for internal usage",
1982 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (var)));
1984 return snode->decl;
1987 TREE_USED (var) = 1;
1988 TREE_READONLY (var) = 1;
1989 TREE_ADDRESSABLE (var) = 0;
1990 DECL_ARTIFICIAL (var) = 1;
1991 DECL_READ_P (var) = 1;
1992 DECL_INITIAL (var) = targetm.chkp_make_bounds_constant (lb, ub);
1993 make_decl_one_only (var, DECL_ASSEMBLER_NAME (var));
1994 /* We may use this symbol during ctors generation in chkp_finish_file
1995 when all symbols are emitted. Force output to avoid undefined
1996 symbols in ctors. */
1997 node = varpool_node::get_create (var);
1998 node->force_output = 1;
2000 varpool_node::finalize_decl (var);
2002 return var;
2005 /* Generate code to make bounds with specified lower bound LB and SIZE.
2006 if AFTER is 1 then code is inserted after position pointed by ITER
2007 otherwise code is inserted before position pointed by ITER.
2008 If ITER is NULL then code is added to entry block. */
2009 static tree
2010 chkp_make_bounds (tree lb, tree size, gimple_stmt_iterator *iter, bool after)
2012 gimple_seq seq;
2013 gimple_stmt_iterator gsi;
2014 gimple *stmt;
2015 tree bounds;
2017 if (iter)
2018 gsi = *iter;
2019 else
2020 gsi = gsi_start_bb (chkp_get_entry_block ());
2022 seq = NULL;
2024 lb = chkp_force_gimple_call_op (lb, &seq);
2025 size = chkp_force_gimple_call_op (size, &seq);
2027 stmt = gimple_build_call (chkp_bndmk_fndecl, 2, lb, size);
2028 chkp_mark_stmt (stmt);
2030 bounds = chkp_get_tmp_reg (stmt);
2031 gimple_call_set_lhs (stmt, bounds);
2033 gimple_seq_add_stmt (&seq, stmt);
2035 if (iter && after)
2036 gsi_insert_seq_after (&gsi, seq, GSI_SAME_STMT);
2037 else
2038 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
2040 if (dump_file && (dump_flags & TDF_DETAILS))
2042 fprintf (dump_file, "Made bounds: ");
2043 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2044 if (iter)
2046 fprintf (dump_file, " inserted before statement: ");
2047 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0, TDF_VOPS|TDF_MEMSYMS);
2049 else
2050 fprintf (dump_file, " at function entry\n");
2053 /* update_stmt (stmt); */
2055 return bounds;
2058 /* Return var holding zero bounds. */
2059 tree
2060 chkp_get_zero_bounds_var (void)
2062 if (!chkp_zero_bounds_var)
2063 chkp_zero_bounds_var
2064 = chkp_make_static_const_bounds (0, -1,
2065 CHKP_ZERO_BOUNDS_VAR_NAME);
2066 return chkp_zero_bounds_var;
2069 /* Return var holding none bounds. */
2070 tree
2071 chkp_get_none_bounds_var (void)
2073 if (!chkp_none_bounds_var)
2074 chkp_none_bounds_var
2075 = chkp_make_static_const_bounds (-1, 0,
2076 CHKP_NONE_BOUNDS_VAR_NAME);
2077 return chkp_none_bounds_var;
2080 /* Return SSA_NAME used to represent zero bounds. */
2081 static tree
2082 chkp_get_zero_bounds (void)
2084 if (zero_bounds)
2085 return zero_bounds;
2087 if (dump_file && (dump_flags & TDF_DETAILS))
2088 fprintf (dump_file, "Creating zero bounds...");
2090 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2091 || flag_chkp_use_static_const_bounds > 0)
2093 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2094 gimple *stmt;
2096 zero_bounds = chkp_get_tmp_reg (NULL);
2097 stmt = gimple_build_assign (zero_bounds, chkp_get_zero_bounds_var ());
2098 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2100 else
2101 zero_bounds = chkp_make_bounds (integer_zero_node,
2102 integer_zero_node,
2103 NULL,
2104 false);
2106 return zero_bounds;
2109 /* Return SSA_NAME used to represent none bounds. */
2110 static tree
2111 chkp_get_none_bounds (void)
2113 if (none_bounds)
2114 return none_bounds;
2116 if (dump_file && (dump_flags & TDF_DETAILS))
2117 fprintf (dump_file, "Creating none bounds...");
2120 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2121 || flag_chkp_use_static_const_bounds > 0)
2123 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2124 gimple *stmt;
2126 none_bounds = chkp_get_tmp_reg (NULL);
2127 stmt = gimple_build_assign (none_bounds, chkp_get_none_bounds_var ());
2128 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2130 else
2131 none_bounds = chkp_make_bounds (integer_minus_one_node,
2132 build_int_cst (size_type_node, 2),
2133 NULL,
2134 false);
2136 return none_bounds;
2139 /* Return bounds to be used as a result of operation which
2140 should not create poiunter (e.g. MULT_EXPR). */
2141 static tree
2142 chkp_get_invalid_op_bounds (void)
2144 return chkp_get_zero_bounds ();
2147 /* Return bounds to be used for loads of non-pointer values. */
2148 static tree
2149 chkp_get_nonpointer_load_bounds (void)
2151 return chkp_get_zero_bounds ();
2154 /* Return 1 if may use bndret call to get bounds for pointer
2155 returned by CALL. */
2156 static bool
2157 chkp_call_returns_bounds_p (gcall *call)
2159 if (gimple_call_internal_p (call))
2160 return false;
2162 if (gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
2163 || chkp_gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW))
2164 return true;
2166 if (gimple_call_with_bounds_p (call))
2167 return true;
2169 tree fndecl = gimple_call_fndecl (call);
2171 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
2172 return false;
2174 if (fndecl && !chkp_instrumentable_p (fndecl))
2175 return false;
2177 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2179 if (chkp_instrument_normal_builtin (fndecl))
2180 return true;
2182 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
2183 return false;
2185 struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
2186 return (clone && gimple_has_body_p (clone->decl));
2189 return true;
2192 /* Build bounds returned by CALL. */
2193 static tree
2194 chkp_build_returned_bound (gcall *call)
2196 gimple_stmt_iterator gsi;
2197 tree bounds;
2198 gimple *stmt;
2199 tree fndecl = gimple_call_fndecl (call);
2200 unsigned int retflags;
2202 /* To avoid fixing alloca expands in targets we handle
2203 it separately. */
2204 if (fndecl
2205 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2206 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
2207 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2209 tree size = gimple_call_arg (call, 0);
2210 tree lb = gimple_call_lhs (call);
2211 gimple_stmt_iterator iter = gsi_for_stmt (call);
2212 bounds = chkp_make_bounds (lb, size, &iter, true);
2214 /* We know bounds returned by set_bounds builtin call. */
2215 else if (fndecl
2216 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2217 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS)
2219 tree lb = gimple_call_arg (call, 0);
2220 tree size = gimple_call_arg (call, 1);
2221 gimple_stmt_iterator iter = gsi_for_stmt (call);
2222 bounds = chkp_make_bounds (lb, size, &iter, true);
2224 /* Detect bounds initialization calls. */
2225 else if (fndecl
2226 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2227 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS)
2228 bounds = chkp_get_zero_bounds ();
2229 /* Detect bounds nullification calls. */
2230 else if (fndecl
2231 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2232 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS)
2233 bounds = chkp_get_none_bounds ();
2234 /* Detect bounds copy calls. */
2235 else if (fndecl
2236 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2237 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
2239 gimple_stmt_iterator iter = gsi_for_stmt (call);
2240 bounds = chkp_find_bounds (gimple_call_arg (call, 1), &iter);
2242 /* Do not use retbnd when returned bounds are equal to some
2243 of passed bounds. */
2244 else if (((retflags = gimple_call_return_flags (call)) & ERF_RETURNS_ARG)
2245 && (retflags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (call))
2247 gimple_stmt_iterator iter = gsi_for_stmt (call);
2248 unsigned int retarg = retflags & ERF_RETURN_ARG_MASK, argno;
2249 if (gimple_call_with_bounds_p (call))
2251 for (argno = 0; argno < gimple_call_num_args (call); argno++)
2252 if (!POINTER_BOUNDS_P (gimple_call_arg (call, argno)))
2254 if (retarg)
2255 retarg--;
2256 else
2257 break;
2260 else
2261 argno = retarg;
2263 bounds = chkp_find_bounds (gimple_call_arg (call, argno), &iter);
2265 else if (chkp_call_returns_bounds_p (call))
2267 gcc_assert (TREE_CODE (gimple_call_lhs (call)) == SSA_NAME);
2269 /* In general case build checker builtin call to
2270 obtain returned bounds. */
2271 stmt = gimple_build_call (chkp_ret_bnd_fndecl, 1,
2272 gimple_call_lhs (call));
2273 chkp_mark_stmt (stmt);
2275 gsi = gsi_for_stmt (call);
2276 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
2278 bounds = chkp_get_tmp_reg (stmt);
2279 gimple_call_set_lhs (stmt, bounds);
2281 update_stmt (stmt);
2283 else
2284 bounds = chkp_get_zero_bounds ();
2286 if (dump_file && (dump_flags & TDF_DETAILS))
2288 fprintf (dump_file, "Built returned bounds (");
2289 print_generic_expr (dump_file, bounds, 0);
2290 fprintf (dump_file, ") for call: ");
2291 print_gimple_stmt (dump_file, call, 0, TDF_VOPS|TDF_MEMSYMS);
2294 bounds = chkp_maybe_copy_and_register_bounds (gimple_call_lhs (call), bounds);
2296 return bounds;
2299 /* Return bounds used as returned by call
2300 which produced SSA name VAL. */
2301 gcall *
2302 chkp_retbnd_call_by_val (tree val)
2304 if (TREE_CODE (val) != SSA_NAME)
2305 return NULL;
2307 gcc_assert (gimple_code (SSA_NAME_DEF_STMT (val)) == GIMPLE_CALL);
2309 imm_use_iterator use_iter;
2310 use_operand_p use_p;
2311 FOR_EACH_IMM_USE_FAST (use_p, use_iter, val)
2312 if (gimple_code (USE_STMT (use_p)) == GIMPLE_CALL
2313 && gimple_call_fndecl (USE_STMT (use_p)) == chkp_ret_bnd_fndecl)
2314 return as_a <gcall *> (USE_STMT (use_p));
2316 return NULL;
2319 /* Check the next parameter for the given PARM is bounds
2320 and return it's default SSA_NAME (create if required). */
2321 static tree
2322 chkp_get_next_bounds_parm (tree parm)
2324 tree bounds = TREE_CHAIN (parm);
2325 gcc_assert (POINTER_BOUNDS_P (bounds));
2326 bounds = ssa_default_def (cfun, bounds);
2327 if (!bounds)
2329 bounds = make_ssa_name (TREE_CHAIN (parm), gimple_build_nop ());
2330 set_ssa_default_def (cfun, TREE_CHAIN (parm), bounds);
2332 return bounds;
2335 /* Return bounds to be used for input argument PARM. */
2336 static tree
2337 chkp_get_bound_for_parm (tree parm)
2339 tree decl = SSA_NAME_VAR (parm);
2340 tree bounds;
2342 gcc_assert (TREE_CODE (decl) == PARM_DECL);
2344 bounds = chkp_get_registered_bounds (parm);
2346 if (!bounds)
2347 bounds = chkp_get_registered_bounds (decl);
2349 if (!bounds)
2351 tree orig_decl = cgraph_node::get (cfun->decl)->orig_decl;
2353 /* For static chain param we return zero bounds
2354 because currently we do not check dereferences
2355 of this pointer. */
2356 if (cfun->static_chain_decl == decl)
2357 bounds = chkp_get_zero_bounds ();
2358 /* If non instrumented runtime is used then it may be useful
2359 to use zero bounds for input arguments of main
2360 function. */
2361 else if (flag_chkp_zero_input_bounds_for_main
2362 && strcmp (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (orig_decl)),
2363 "main") == 0)
2364 bounds = chkp_get_zero_bounds ();
2365 else if (BOUNDED_P (parm))
2367 bounds = chkp_get_next_bounds_parm (decl);
2368 bounds = chkp_maybe_copy_and_register_bounds (decl, bounds);
2370 if (dump_file && (dump_flags & TDF_DETAILS))
2372 fprintf (dump_file, "Built arg bounds (");
2373 print_generic_expr (dump_file, bounds, 0);
2374 fprintf (dump_file, ") for arg: ");
2375 print_node (dump_file, "", decl, 0);
2378 else
2379 bounds = chkp_get_zero_bounds ();
2382 if (!chkp_get_registered_bounds (parm))
2383 bounds = chkp_maybe_copy_and_register_bounds (parm, bounds);
2385 if (dump_file && (dump_flags & TDF_DETAILS))
2387 fprintf (dump_file, "Using bounds ");
2388 print_generic_expr (dump_file, bounds, 0);
2389 fprintf (dump_file, " for parm ");
2390 print_generic_expr (dump_file, parm, 0);
2391 fprintf (dump_file, " of type ");
2392 print_generic_expr (dump_file, TREE_TYPE (parm), 0);
2393 fprintf (dump_file, ".\n");
2396 return bounds;
2399 /* Build and return CALL_EXPR for bndstx builtin with specified
2400 arguments. */
2401 tree
2402 chkp_build_bndldx_call (tree addr, tree ptr)
2404 tree fn = build1 (ADDR_EXPR,
2405 build_pointer_type (TREE_TYPE (chkp_bndldx_fndecl)),
2406 chkp_bndldx_fndecl);
2407 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndldx_fndecl)),
2408 fn, 2, addr, ptr);
2409 CALL_WITH_BOUNDS_P (call) = true;
2410 return call;
2413 /* Insert code to load bounds for PTR located by ADDR.
2414 Code is inserted after position pointed by GSI.
2415 Loaded bounds are returned. */
2416 static tree
2417 chkp_build_bndldx (tree addr, tree ptr, gimple_stmt_iterator *gsi)
2419 gimple_seq seq;
2420 gimple *stmt;
2421 tree bounds;
2423 seq = NULL;
2425 addr = chkp_force_gimple_call_op (addr, &seq);
2426 ptr = chkp_force_gimple_call_op (ptr, &seq);
2428 stmt = gimple_build_call (chkp_bndldx_fndecl, 2, addr, ptr);
2429 chkp_mark_stmt (stmt);
2430 bounds = chkp_get_tmp_reg (stmt);
2431 gimple_call_set_lhs (stmt, bounds);
2433 gimple_seq_add_stmt (&seq, stmt);
2435 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2437 if (dump_file && (dump_flags & TDF_DETAILS))
2439 fprintf (dump_file, "Generated bndldx for pointer ");
2440 print_generic_expr (dump_file, ptr, 0);
2441 fprintf (dump_file, ": ");
2442 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2445 return bounds;
2448 /* Build and return CALL_EXPR for bndstx builtin with specified
2449 arguments. */
2450 tree
2451 chkp_build_bndstx_call (tree addr, tree ptr, tree bounds)
2453 tree fn = build1 (ADDR_EXPR,
2454 build_pointer_type (TREE_TYPE (chkp_bndstx_fndecl)),
2455 chkp_bndstx_fndecl);
2456 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndstx_fndecl)),
2457 fn, 3, ptr, bounds, addr);
2458 CALL_WITH_BOUNDS_P (call) = true;
2459 return call;
2462 /* Insert code to store BOUNDS for PTR stored by ADDR.
2463 New statements are inserted after position pointed
2464 by GSI. */
2465 void
2466 chkp_build_bndstx (tree addr, tree ptr, tree bounds,
2467 gimple_stmt_iterator *gsi)
2469 gimple_seq seq;
2470 gimple *stmt;
2472 seq = NULL;
2474 addr = chkp_force_gimple_call_op (addr, &seq);
2475 ptr = chkp_force_gimple_call_op (ptr, &seq);
2477 stmt = gimple_build_call (chkp_bndstx_fndecl, 3, ptr, bounds, addr);
2478 chkp_mark_stmt (stmt);
2479 gimple_call_set_with_bounds (stmt, true);
2481 gimple_seq_add_stmt (&seq, stmt);
2483 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2485 if (dump_file && (dump_flags & TDF_DETAILS))
2487 fprintf (dump_file, "Generated bndstx for pointer store ");
2488 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_VOPS|TDF_MEMSYMS);
2489 print_gimple_stmt (dump_file, stmt, 2, TDF_VOPS|TDF_MEMSYMS);
2493 /* Compute bounds for pointer NODE which was assigned in
2494 assignment statement ASSIGN. Return computed bounds. */
2495 static tree
2496 chkp_compute_bounds_for_assignment (tree node, gimple *assign)
2498 enum tree_code rhs_code = gimple_assign_rhs_code (assign);
2499 tree rhs1 = gimple_assign_rhs1 (assign);
2500 tree bounds = NULL_TREE;
2501 gimple_stmt_iterator iter = gsi_for_stmt (assign);
2502 tree base = NULL;
2504 if (dump_file && (dump_flags & TDF_DETAILS))
2506 fprintf (dump_file, "Computing bounds for assignment: ");
2507 print_gimple_stmt (dump_file, assign, 0, TDF_VOPS|TDF_MEMSYMS);
2510 switch (rhs_code)
2512 case MEM_REF:
2513 case TARGET_MEM_REF:
2514 case COMPONENT_REF:
2515 case ARRAY_REF:
2516 /* We need to load bounds from the bounds table. */
2517 bounds = chkp_find_bounds_loaded (node, rhs1, &iter);
2518 break;
2520 case VAR_DECL:
2521 case SSA_NAME:
2522 case ADDR_EXPR:
2523 case POINTER_PLUS_EXPR:
2524 case NOP_EXPR:
2525 case CONVERT_EXPR:
2526 case INTEGER_CST:
2527 /* Bounds are just propagated from RHS. */
2528 bounds = chkp_find_bounds (rhs1, &iter);
2529 base = rhs1;
2530 break;
2532 case VIEW_CONVERT_EXPR:
2533 /* Bounds are just propagated from RHS. */
2534 bounds = chkp_find_bounds (TREE_OPERAND (rhs1, 0), &iter);
2535 break;
2537 case PARM_DECL:
2538 if (BOUNDED_P (rhs1))
2540 /* We need to load bounds from the bounds table. */
2541 bounds = chkp_build_bndldx (chkp_build_addr_expr (rhs1),
2542 node, &iter);
2543 TREE_ADDRESSABLE (rhs1) = 1;
2545 else
2546 bounds = chkp_get_nonpointer_load_bounds ();
2547 break;
2549 case MINUS_EXPR:
2550 case PLUS_EXPR:
2551 case BIT_AND_EXPR:
2552 case BIT_IOR_EXPR:
2553 case BIT_XOR_EXPR:
2555 tree rhs2 = gimple_assign_rhs2 (assign);
2556 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2557 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2559 /* First we try to check types of operands. If it
2560 does not help then look at bound values.
2562 If some bounds are incomplete and other are
2563 not proven to be valid (i.e. also incomplete
2564 or invalid because value is not pointer) then
2565 resulting value is incomplete and will be
2566 recomputed later in chkp_finish_incomplete_bounds. */
2567 if (BOUNDED_P (rhs1)
2568 && !BOUNDED_P (rhs2))
2569 bounds = bnd1;
2570 else if (BOUNDED_P (rhs2)
2571 && !BOUNDED_P (rhs1)
2572 && rhs_code != MINUS_EXPR)
2573 bounds = bnd2;
2574 else if (chkp_incomplete_bounds (bnd1))
2575 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR
2576 && !chkp_incomplete_bounds (bnd2))
2577 bounds = bnd2;
2578 else
2579 bounds = incomplete_bounds;
2580 else if (chkp_incomplete_bounds (bnd2))
2581 if (chkp_valid_bounds (bnd1)
2582 && !chkp_incomplete_bounds (bnd1))
2583 bounds = bnd1;
2584 else
2585 bounds = incomplete_bounds;
2586 else if (!chkp_valid_bounds (bnd1))
2587 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR)
2588 bounds = bnd2;
2589 else if (bnd2 == chkp_get_zero_bounds ())
2590 bounds = bnd2;
2591 else
2592 bounds = bnd1;
2593 else if (!chkp_valid_bounds (bnd2))
2594 bounds = bnd1;
2595 else
2596 /* Seems both operands may have valid bounds
2597 (e.g. pointer minus pointer). In such case
2598 use default invalid op bounds. */
2599 bounds = chkp_get_invalid_op_bounds ();
2601 base = (bounds == bnd1) ? rhs1 : (bounds == bnd2) ? rhs2 : NULL;
2603 break;
2605 case BIT_NOT_EXPR:
2606 case NEGATE_EXPR:
2607 case LSHIFT_EXPR:
2608 case RSHIFT_EXPR:
2609 case LROTATE_EXPR:
2610 case RROTATE_EXPR:
2611 case EQ_EXPR:
2612 case NE_EXPR:
2613 case LT_EXPR:
2614 case LE_EXPR:
2615 case GT_EXPR:
2616 case GE_EXPR:
2617 case MULT_EXPR:
2618 case RDIV_EXPR:
2619 case TRUNC_DIV_EXPR:
2620 case FLOOR_DIV_EXPR:
2621 case CEIL_DIV_EXPR:
2622 case ROUND_DIV_EXPR:
2623 case TRUNC_MOD_EXPR:
2624 case FLOOR_MOD_EXPR:
2625 case CEIL_MOD_EXPR:
2626 case ROUND_MOD_EXPR:
2627 case EXACT_DIV_EXPR:
2628 case FIX_TRUNC_EXPR:
2629 case FLOAT_EXPR:
2630 case REALPART_EXPR:
2631 case IMAGPART_EXPR:
2632 /* No valid bounds may be produced by these exprs. */
2633 bounds = chkp_get_invalid_op_bounds ();
2634 break;
2636 case COND_EXPR:
2638 tree val1 = gimple_assign_rhs2 (assign);
2639 tree val2 = gimple_assign_rhs3 (assign);
2640 tree bnd1 = chkp_find_bounds (val1, &iter);
2641 tree bnd2 = chkp_find_bounds (val2, &iter);
2642 gimple *stmt;
2644 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2645 bounds = incomplete_bounds;
2646 else if (bnd1 == bnd2)
2647 bounds = bnd1;
2648 else
2650 rhs1 = unshare_expr (rhs1);
2652 bounds = chkp_get_tmp_reg (assign);
2653 stmt = gimple_build_assign (bounds, COND_EXPR, rhs1, bnd1, bnd2);
2654 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2656 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2657 chkp_mark_invalid_bounds (bounds);
2660 break;
2662 case MAX_EXPR:
2663 case MIN_EXPR:
2665 tree rhs2 = gimple_assign_rhs2 (assign);
2666 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2667 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2669 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2670 bounds = incomplete_bounds;
2671 else if (bnd1 == bnd2)
2672 bounds = bnd1;
2673 else
2675 gimple *stmt;
2676 tree cond = build2 (rhs_code == MAX_EXPR ? GT_EXPR : LT_EXPR,
2677 boolean_type_node, rhs1, rhs2);
2678 bounds = chkp_get_tmp_reg (assign);
2679 stmt = gimple_build_assign (bounds, COND_EXPR, cond, bnd1, bnd2);
2681 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2683 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2684 chkp_mark_invalid_bounds (bounds);
2687 break;
2689 default:
2690 bounds = chkp_get_zero_bounds ();
2691 warning (0, "pointer bounds were lost due to unexpected expression %s",
2692 get_tree_code_name (rhs_code));
2695 gcc_assert (bounds);
2697 /* We may reuse bounds of other pointer we copy/modify. But it is not
2698 allowed for abnormal ssa names. If we produced a pointer using
2699 abnormal ssa name, we better make a bounds copy to avoid coalescing
2700 issues. */
2701 if (base
2702 && TREE_CODE (base) == SSA_NAME
2703 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (base))
2705 gimple *stmt = gimple_build_assign (chkp_get_tmp_reg (NULL), bounds);
2706 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2707 bounds = gimple_assign_lhs (stmt);
2710 if (node)
2711 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2713 return bounds;
2716 /* Compute bounds for ssa name NODE defined by DEF_STMT pointed by ITER.
2718 There are just few statement codes allowed: NOP (for default ssa names),
2719 ASSIGN, CALL, PHI, ASM.
2721 Return computed bounds. */
2722 static tree
2723 chkp_get_bounds_by_definition (tree node, gimple *def_stmt,
2724 gphi_iterator *iter)
2726 tree var, bounds;
2727 enum gimple_code code = gimple_code (def_stmt);
2728 gphi *stmt;
2730 if (dump_file && (dump_flags & TDF_DETAILS))
2732 fprintf (dump_file, "Searching for bounds for node: ");
2733 print_generic_expr (dump_file, node, 0);
2735 fprintf (dump_file, " using its definition: ");
2736 print_gimple_stmt (dump_file, def_stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2739 switch (code)
2741 case GIMPLE_NOP:
2742 var = SSA_NAME_VAR (node);
2743 switch (TREE_CODE (var))
2745 case PARM_DECL:
2746 bounds = chkp_get_bound_for_parm (node);
2747 break;
2749 case VAR_DECL:
2750 /* For uninitialized pointers use none bounds. */
2751 bounds = chkp_get_none_bounds ();
2752 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2753 break;
2755 case RESULT_DECL:
2757 tree base_type;
2759 gcc_assert (TREE_CODE (TREE_TYPE (node)) == REFERENCE_TYPE);
2761 base_type = TREE_TYPE (TREE_TYPE (node));
2763 gcc_assert (TYPE_SIZE (base_type)
2764 && TREE_CODE (TYPE_SIZE (base_type)) == INTEGER_CST
2765 && tree_to_uhwi (TYPE_SIZE (base_type)) != 0);
2767 bounds = chkp_make_bounds (node, TYPE_SIZE_UNIT (base_type),
2768 NULL, false);
2769 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2771 break;
2773 default:
2774 if (dump_file && (dump_flags & TDF_DETAILS))
2776 fprintf (dump_file, "Unexpected var with no definition\n");
2777 print_generic_expr (dump_file, var, 0);
2779 internal_error ("chkp_get_bounds_by_definition: Unexpected var of type %s",
2780 get_tree_code_name (TREE_CODE (var)));
2782 break;
2784 case GIMPLE_ASSIGN:
2785 bounds = chkp_compute_bounds_for_assignment (node, def_stmt);
2786 break;
2788 case GIMPLE_CALL:
2789 bounds = chkp_build_returned_bound (as_a <gcall *> (def_stmt));
2790 break;
2792 case GIMPLE_PHI:
2793 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (node))
2794 if (SSA_NAME_VAR (node))
2795 var = chkp_get_bounds_var (SSA_NAME_VAR (node));
2796 else
2797 var = make_temp_ssa_name (pointer_bounds_type_node,
2798 NULL,
2799 CHKP_BOUND_TMP_NAME);
2800 else
2801 var = chkp_get_tmp_var ();
2802 stmt = create_phi_node (var, gimple_bb (def_stmt));
2803 bounds = gimple_phi_result (stmt);
2804 *iter = gsi_for_phi (stmt);
2806 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2808 /* Created bounds do not have all phi args computed and
2809 therefore we do not know if there is a valid source
2810 of bounds for that node. Therefore we mark bounds
2811 as incomplete and then recompute them when all phi
2812 args are computed. */
2813 chkp_register_incomplete_bounds (bounds, node);
2814 break;
2816 case GIMPLE_ASM:
2817 bounds = chkp_get_zero_bounds ();
2818 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2819 break;
2821 default:
2822 internal_error ("chkp_get_bounds_by_definition: Unexpected GIMPLE code %s",
2823 gimple_code_name[code]);
2826 return bounds;
2829 /* Return CALL_EXPR for bndmk with specified LOWER_BOUND and SIZE. */
2830 tree
2831 chkp_build_make_bounds_call (tree lower_bound, tree size)
2833 tree call = build1 (ADDR_EXPR,
2834 build_pointer_type (TREE_TYPE (chkp_bndmk_fndecl)),
2835 chkp_bndmk_fndecl);
2836 return build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndmk_fndecl)),
2837 call, 2, lower_bound, size);
2840 /* Create static bounds var of specfified OBJ which is
2841 is either VAR_DECL or string constant. */
2842 static tree
2843 chkp_make_static_bounds (tree obj)
2845 static int string_id = 1;
2846 static int var_id = 1;
2847 tree *slot;
2848 const char *var_name;
2849 char *bnd_var_name;
2850 tree bnd_var;
2852 /* First check if we already have required var. */
2853 if (chkp_static_var_bounds)
2855 /* For vars we use assembler name as a key in
2856 chkp_static_var_bounds map. It allows to
2857 avoid duplicating bound vars for decls
2858 sharing assembler name. */
2859 if (TREE_CODE (obj) == VAR_DECL)
2861 tree name = DECL_ASSEMBLER_NAME (obj);
2862 slot = chkp_static_var_bounds->get (name);
2863 if (slot)
2864 return *slot;
2866 else
2868 slot = chkp_static_var_bounds->get (obj);
2869 if (slot)
2870 return *slot;
2874 /* Build decl for bounds var. */
2875 if (TREE_CODE (obj) == VAR_DECL)
2877 if (DECL_IGNORED_P (obj))
2879 bnd_var_name = (char *) xmalloc (strlen (CHKP_VAR_BOUNDS_PREFIX) + 10);
2880 sprintf (bnd_var_name, "%s%d", CHKP_VAR_BOUNDS_PREFIX, var_id++);
2882 else
2884 var_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (obj));
2886 /* For hidden symbols we want to skip first '*' char. */
2887 if (*var_name == '*')
2888 var_name++;
2890 bnd_var_name = (char *) xmalloc (strlen (var_name)
2891 + strlen (CHKP_BOUNDS_OF_SYMBOL_PREFIX) + 1);
2892 strcpy (bnd_var_name, CHKP_BOUNDS_OF_SYMBOL_PREFIX);
2893 strcat (bnd_var_name, var_name);
2896 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2897 get_identifier (bnd_var_name),
2898 pointer_bounds_type_node);
2900 /* Address of the obj will be used as lower bound. */
2901 TREE_ADDRESSABLE (obj) = 1;
2903 else
2905 bnd_var_name = (char *) xmalloc (strlen (CHKP_STRING_BOUNDS_PREFIX) + 10);
2906 sprintf (bnd_var_name, "%s%d", CHKP_STRING_BOUNDS_PREFIX, string_id++);
2908 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2909 get_identifier (bnd_var_name),
2910 pointer_bounds_type_node);
2913 free (bnd_var_name);
2915 TREE_PUBLIC (bnd_var) = 0;
2916 TREE_USED (bnd_var) = 1;
2917 TREE_READONLY (bnd_var) = 0;
2918 TREE_STATIC (bnd_var) = 1;
2919 TREE_ADDRESSABLE (bnd_var) = 0;
2920 DECL_ARTIFICIAL (bnd_var) = 1;
2921 DECL_COMMON (bnd_var) = 1;
2922 DECL_COMDAT (bnd_var) = 1;
2923 DECL_READ_P (bnd_var) = 1;
2924 DECL_INITIAL (bnd_var) = chkp_build_addr_expr (obj);
2925 /* Force output similar to constant bounds.
2926 See chkp_make_static_const_bounds. */
2927 varpool_node::get_create (bnd_var)->force_output = 1;
2928 /* Mark symbol as requiring bounds initialization. */
2929 varpool_node::get_create (bnd_var)->need_bounds_init = 1;
2930 varpool_node::finalize_decl (bnd_var);
2932 /* Add created var to the map to use it for other references
2933 to obj. */
2934 if (!chkp_static_var_bounds)
2935 chkp_static_var_bounds = new hash_map<tree, tree>;
2937 if (TREE_CODE (obj) == VAR_DECL)
2939 tree name = DECL_ASSEMBLER_NAME (obj);
2940 chkp_static_var_bounds->put (name, bnd_var);
2942 else
2943 chkp_static_var_bounds->put (obj, bnd_var);
2945 return bnd_var;
2948 /* When var has incomplete type we cannot get size to
2949 compute its bounds. In such cases we use checker
2950 builtin call which determines object size at runtime. */
2951 static tree
2952 chkp_generate_extern_var_bounds (tree var)
2954 tree bounds, size_reloc, lb, size, max_size, cond;
2955 gimple_stmt_iterator gsi;
2956 gimple_seq seq = NULL;
2957 gimple *stmt;
2959 /* If instrumentation is not enabled for vars having
2960 incomplete type then just return zero bounds to avoid
2961 checks for this var. */
2962 if (!flag_chkp_incomplete_type)
2963 return chkp_get_zero_bounds ();
2965 if (dump_file && (dump_flags & TDF_DETAILS))
2967 fprintf (dump_file, "Generating bounds for extern symbol '");
2968 print_generic_expr (dump_file, var, 0);
2969 fprintf (dump_file, "'\n");
2972 stmt = gimple_build_call (chkp_sizeof_fndecl, 1, var);
2974 size_reloc = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
2975 gimple_call_set_lhs (stmt, size_reloc);
2977 gimple_seq_add_stmt (&seq, stmt);
2979 lb = chkp_build_addr_expr (var);
2980 size = make_ssa_name (chkp_get_size_tmp_var ());
2982 if (flag_chkp_zero_dynamic_size_as_infinite)
2984 /* We should check that size relocation was resolved.
2985 If it was not then use maximum possible size for the var. */
2986 max_size = build2 (MINUS_EXPR, chkp_uintptr_type, integer_zero_node,
2987 fold_convert (chkp_uintptr_type, lb));
2988 max_size = chkp_force_gimple_call_op (max_size, &seq);
2990 cond = build2 (NE_EXPR, boolean_type_node,
2991 size_reloc, integer_zero_node);
2992 stmt = gimple_build_assign (size, COND_EXPR, cond, size_reloc, max_size);
2993 gimple_seq_add_stmt (&seq, stmt);
2995 else
2997 stmt = gimple_build_assign (size, size_reloc);
2998 gimple_seq_add_stmt (&seq, stmt);
3001 gsi = gsi_start_bb (chkp_get_entry_block ());
3002 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
3004 bounds = chkp_make_bounds (lb, size, &gsi, true);
3006 return bounds;
3009 /* Return 1 if TYPE has fields with zero size or fields
3010 marked with chkp_variable_size attribute. */
3011 bool
3012 chkp_variable_size_type (tree type)
3014 bool res = false;
3015 tree field;
3017 if (RECORD_OR_UNION_TYPE_P (type))
3018 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3020 if (TREE_CODE (field) == FIELD_DECL)
3021 res = res
3022 || lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
3023 || chkp_variable_size_type (TREE_TYPE (field));
3025 else
3026 res = !TYPE_SIZE (type)
3027 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
3028 || tree_to_uhwi (TYPE_SIZE (type)) == 0;
3030 return res;
3033 /* Compute and return bounds for address of DECL which is
3034 one of VAR_DECL, PARM_DECL, RESULT_DECL. */
3035 static tree
3036 chkp_get_bounds_for_decl_addr (tree decl)
3038 tree bounds;
3040 gcc_assert (TREE_CODE (decl) == VAR_DECL
3041 || TREE_CODE (decl) == PARM_DECL
3042 || TREE_CODE (decl) == RESULT_DECL);
3044 bounds = chkp_get_registered_addr_bounds (decl);
3046 if (bounds)
3047 return bounds;
3049 if (dump_file && (dump_flags & TDF_DETAILS))
3051 fprintf (dump_file, "Building bounds for address of decl ");
3052 print_generic_expr (dump_file, decl, 0);
3053 fprintf (dump_file, "\n");
3056 /* Use zero bounds if size is unknown and checks for
3057 unknown sizes are restricted. */
3058 if ((!DECL_SIZE (decl)
3059 || (chkp_variable_size_type (TREE_TYPE (decl))
3060 && (TREE_STATIC (decl)
3061 || DECL_EXTERNAL (decl)
3062 || TREE_PUBLIC (decl))))
3063 && !flag_chkp_incomplete_type)
3064 return chkp_get_zero_bounds ();
3066 if (flag_chkp_use_static_bounds
3067 && TREE_CODE (decl) == VAR_DECL
3068 && (TREE_STATIC (decl)
3069 || DECL_EXTERNAL (decl)
3070 || TREE_PUBLIC (decl))
3071 && !DECL_THREAD_LOCAL_P (decl))
3073 tree bnd_var = chkp_make_static_bounds (decl);
3074 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
3075 gimple *stmt;
3077 bounds = chkp_get_tmp_reg (NULL);
3078 stmt = gimple_build_assign (bounds, bnd_var);
3079 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3081 else if (!DECL_SIZE (decl)
3082 || (chkp_variable_size_type (TREE_TYPE (decl))
3083 && (TREE_STATIC (decl)
3084 || DECL_EXTERNAL (decl)
3085 || TREE_PUBLIC (decl))))
3087 gcc_assert (TREE_CODE (decl) == VAR_DECL);
3088 bounds = chkp_generate_extern_var_bounds (decl);
3090 else
3092 tree lb = chkp_build_addr_expr (decl);
3093 bounds = chkp_make_bounds (lb, DECL_SIZE_UNIT (decl), NULL, false);
3096 return bounds;
3099 /* Compute and return bounds for constant string. */
3100 static tree
3101 chkp_get_bounds_for_string_cst (tree cst)
3103 tree bounds;
3104 tree lb;
3105 tree size;
3107 gcc_assert (TREE_CODE (cst) == STRING_CST);
3109 bounds = chkp_get_registered_bounds (cst);
3111 if (bounds)
3112 return bounds;
3114 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
3115 || flag_chkp_use_static_const_bounds > 0)
3117 tree bnd_var = chkp_make_static_bounds (cst);
3118 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
3119 gimple *stmt;
3121 bounds = chkp_get_tmp_reg (NULL);
3122 stmt = gimple_build_assign (bounds, bnd_var);
3123 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3125 else
3127 lb = chkp_build_addr_expr (cst);
3128 size = build_int_cst (chkp_uintptr_type, TREE_STRING_LENGTH (cst));
3129 bounds = chkp_make_bounds (lb, size, NULL, false);
3132 bounds = chkp_maybe_copy_and_register_bounds (cst, bounds);
3134 return bounds;
3137 /* Generate code to instersect bounds BOUNDS1 and BOUNDS2 and
3138 return the result. if ITER is not NULL then Code is inserted
3139 before position pointed by ITER. Otherwise code is added to
3140 entry block. */
3141 static tree
3142 chkp_intersect_bounds (tree bounds1, tree bounds2, gimple_stmt_iterator *iter)
3144 if (!bounds1 || bounds1 == chkp_get_zero_bounds ())
3145 return bounds2 ? bounds2 : bounds1;
3146 else if (!bounds2 || bounds2 == chkp_get_zero_bounds ())
3147 return bounds1;
3148 else
3150 gimple_seq seq;
3151 gimple *stmt;
3152 tree bounds;
3154 seq = NULL;
3156 stmt = gimple_build_call (chkp_intersect_fndecl, 2, bounds1, bounds2);
3157 chkp_mark_stmt (stmt);
3159 bounds = chkp_get_tmp_reg (stmt);
3160 gimple_call_set_lhs (stmt, bounds);
3162 gimple_seq_add_stmt (&seq, stmt);
3164 /* We are probably doing narrowing for constant expression.
3165 In such case iter may be undefined. */
3166 if (!iter)
3168 gimple_stmt_iterator gsi = gsi_last_bb (chkp_get_entry_block ());
3169 iter = &gsi;
3170 gsi_insert_seq_after (iter, seq, GSI_SAME_STMT);
3172 else
3173 gsi_insert_seq_before (iter, seq, GSI_SAME_STMT);
3175 if (dump_file && (dump_flags & TDF_DETAILS))
3177 fprintf (dump_file, "Bounds intersection: ");
3178 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
3179 fprintf (dump_file, " inserted before statement: ");
3180 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0,
3181 TDF_VOPS|TDF_MEMSYMS);
3184 return bounds;
3188 /* Return 1 if we are allowed to narrow bounds for addressed FIELD
3189 and 0 othersize. */
3190 static bool
3191 chkp_may_narrow_to_field (tree field)
3193 return DECL_SIZE (field) && TREE_CODE (DECL_SIZE (field)) == INTEGER_CST
3194 && tree_to_uhwi (DECL_SIZE (field)) != 0
3195 && (!DECL_FIELD_OFFSET (field)
3196 || TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST)
3197 && (!DECL_FIELD_BIT_OFFSET (field)
3198 || TREE_CODE (DECL_FIELD_BIT_OFFSET (field)) == INTEGER_CST)
3199 && !lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
3200 && !chkp_variable_size_type (TREE_TYPE (field));
3203 /* Return 1 if bounds for FIELD should be narrowed to
3204 field's own size. */
3205 static bool
3206 chkp_narrow_bounds_for_field (tree field)
3208 HOST_WIDE_INT offs;
3209 HOST_WIDE_INT bit_offs;
3211 if (!chkp_may_narrow_to_field (field))
3212 return false;
3214 /* Accesse to compiler generated fields should not cause
3215 bounds narrowing. */
3216 if (DECL_ARTIFICIAL (field))
3217 return false;
3219 offs = tree_to_uhwi (DECL_FIELD_OFFSET (field));
3220 bit_offs = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
3222 return (flag_chkp_narrow_bounds
3223 && (flag_chkp_first_field_has_own_bounds
3224 || offs
3225 || bit_offs));
3228 /* Perform narrowing for BOUNDS using bounds computed for field
3229 access COMPONENT. ITER meaning is the same as for
3230 chkp_intersect_bounds. */
3231 static tree
3232 chkp_narrow_bounds_to_field (tree bounds, tree component,
3233 gimple_stmt_iterator *iter)
3235 tree field = TREE_OPERAND (component, 1);
3236 tree size = DECL_SIZE_UNIT (field);
3237 tree field_ptr = chkp_build_addr_expr (component);
3238 tree field_bounds;
3240 field_bounds = chkp_make_bounds (field_ptr, size, iter, false);
3242 return chkp_intersect_bounds (field_bounds, bounds, iter);
3245 /* Parse field or array access NODE.
3247 PTR ouput parameter holds a pointer to the outermost
3248 object.
3250 BITFIELD output parameter is set to 1 if bitfield is
3251 accessed and to 0 otherwise. If it is 1 then ELT holds
3252 outer component for accessed bit field.
3254 SAFE outer parameter is set to 1 if access is safe and
3255 checks are not required.
3257 BOUNDS outer parameter holds bounds to be used to check
3258 access (may be NULL).
3260 If INNERMOST_BOUNDS is 1 then try to narrow bounds to the
3261 innermost accessed component. */
3262 static void
3263 chkp_parse_array_and_component_ref (tree node, tree *ptr,
3264 tree *elt, bool *safe,
3265 bool *bitfield,
3266 tree *bounds,
3267 gimple_stmt_iterator *iter,
3268 bool innermost_bounds)
3270 tree comp_to_narrow = NULL_TREE;
3271 tree last_comp = NULL_TREE;
3272 bool array_ref_found = false;
3273 tree *nodes;
3274 tree var;
3275 int len;
3276 int i;
3278 /* Compute tree height for expression. */
3279 var = node;
3280 len = 1;
3281 while (TREE_CODE (var) == COMPONENT_REF
3282 || TREE_CODE (var) == ARRAY_REF
3283 || TREE_CODE (var) == VIEW_CONVERT_EXPR)
3285 var = TREE_OPERAND (var, 0);
3286 len++;
3289 gcc_assert (len > 1);
3291 /* It is more convenient for us to scan left-to-right,
3292 so walk tree again and put all node to nodes vector
3293 in reversed order. */
3294 nodes = XALLOCAVEC (tree, len);
3295 nodes[len - 1] = node;
3296 for (i = len - 2; i >= 0; i--)
3297 nodes[i] = TREE_OPERAND (nodes[i + 1], 0);
3299 if (bounds)
3300 *bounds = NULL;
3301 *safe = true;
3302 *bitfield = (TREE_CODE (node) == COMPONENT_REF
3303 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (node, 1)));
3304 /* To get bitfield address we will need outer elemnt. */
3305 if (*bitfield)
3306 *elt = nodes[len - 2];
3307 else
3308 *elt = NULL_TREE;
3310 /* If we have indirection in expression then compute
3311 outermost structure bounds. Computed bounds may be
3312 narrowed later. */
3313 if (TREE_CODE (nodes[0]) == MEM_REF || INDIRECT_REF_P (nodes[0]))
3315 *safe = false;
3316 *ptr = TREE_OPERAND (nodes[0], 0);
3317 if (bounds)
3318 *bounds = chkp_find_bounds (*ptr, iter);
3320 else
3322 gcc_assert (TREE_CODE (var) == VAR_DECL
3323 || TREE_CODE (var) == PARM_DECL
3324 || TREE_CODE (var) == RESULT_DECL
3325 || TREE_CODE (var) == STRING_CST
3326 || TREE_CODE (var) == SSA_NAME);
3328 *ptr = chkp_build_addr_expr (var);
3331 /* In this loop we are trying to find a field access
3332 requiring narrowing. There are two simple rules
3333 for search:
3334 1. Leftmost array_ref is chosen if any.
3335 2. Rightmost suitable component_ref is chosen if innermost
3336 bounds are required and no array_ref exists. */
3337 for (i = 1; i < len; i++)
3339 var = nodes[i];
3341 if (TREE_CODE (var) == ARRAY_REF)
3343 *safe = false;
3344 array_ref_found = true;
3345 if (flag_chkp_narrow_bounds
3346 && !flag_chkp_narrow_to_innermost_arrray
3347 && (!last_comp
3348 || chkp_may_narrow_to_field (TREE_OPERAND (last_comp, 1))))
3350 comp_to_narrow = last_comp;
3351 break;
3354 else if (TREE_CODE (var) == COMPONENT_REF)
3356 tree field = TREE_OPERAND (var, 1);
3358 if (innermost_bounds
3359 && !array_ref_found
3360 && chkp_narrow_bounds_for_field (field))
3361 comp_to_narrow = var;
3362 last_comp = var;
3364 if (flag_chkp_narrow_bounds
3365 && flag_chkp_narrow_to_innermost_arrray
3366 && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
3368 if (bounds)
3369 *bounds = chkp_narrow_bounds_to_field (*bounds, var, iter);
3370 comp_to_narrow = NULL;
3373 else if (TREE_CODE (var) == VIEW_CONVERT_EXPR)
3374 /* Nothing to do for it. */
3376 else
3377 gcc_unreachable ();
3380 if (comp_to_narrow && DECL_SIZE (TREE_OPERAND (comp_to_narrow, 1)) && bounds)
3381 *bounds = chkp_narrow_bounds_to_field (*bounds, comp_to_narrow, iter);
3383 if (innermost_bounds && bounds && !*bounds)
3384 *bounds = chkp_find_bounds (*ptr, iter);
3387 /* Compute and return bounds for address of OBJ. */
3388 static tree
3389 chkp_make_addressed_object_bounds (tree obj, gimple_stmt_iterator *iter)
3391 tree bounds = chkp_get_registered_addr_bounds (obj);
3393 if (bounds)
3394 return bounds;
3396 switch (TREE_CODE (obj))
3398 case VAR_DECL:
3399 case PARM_DECL:
3400 case RESULT_DECL:
3401 bounds = chkp_get_bounds_for_decl_addr (obj);
3402 break;
3404 case STRING_CST:
3405 bounds = chkp_get_bounds_for_string_cst (obj);
3406 break;
3408 case ARRAY_REF:
3409 case COMPONENT_REF:
3411 tree elt;
3412 tree ptr;
3413 bool safe;
3414 bool bitfield;
3416 chkp_parse_array_and_component_ref (obj, &ptr, &elt, &safe,
3417 &bitfield, &bounds, iter, true);
3419 gcc_assert (bounds);
3421 break;
3423 case FUNCTION_DECL:
3424 case LABEL_DECL:
3425 bounds = chkp_get_zero_bounds ();
3426 break;
3428 case MEM_REF:
3429 bounds = chkp_find_bounds (TREE_OPERAND (obj, 0), iter);
3430 break;
3432 case REALPART_EXPR:
3433 case IMAGPART_EXPR:
3434 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (obj, 0), iter);
3435 break;
3437 default:
3438 if (dump_file && (dump_flags & TDF_DETAILS))
3440 fprintf (dump_file, "chkp_make_addressed_object_bounds: "
3441 "unexpected object of type %s\n",
3442 get_tree_code_name (TREE_CODE (obj)));
3443 print_node (dump_file, "", obj, 0);
3445 internal_error ("chkp_make_addressed_object_bounds: "
3446 "Unexpected tree code %s",
3447 get_tree_code_name (TREE_CODE (obj)));
3450 chkp_register_addr_bounds (obj, bounds);
3452 return bounds;
3455 /* Compute bounds for pointer PTR loaded from PTR_SRC. Generate statements
3456 to compute bounds if required. Computed bounds should be available at
3457 position pointed by ITER.
3459 If PTR_SRC is NULL_TREE then pointer definition is identified.
3461 If PTR_SRC is not NULL_TREE then ITER points to statements which loads
3462 PTR. If PTR is a any memory reference then ITER points to a statement
3463 after which bndldx will be inserterd. In both cases ITER will be updated
3464 to point to the inserted bndldx statement. */
3466 static tree
3467 chkp_find_bounds_1 (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3469 tree addr = NULL_TREE;
3470 tree bounds = NULL_TREE;
3472 if (!ptr_src)
3473 ptr_src = ptr;
3475 bounds = chkp_get_registered_bounds (ptr_src);
3477 if (bounds)
3478 return bounds;
3480 switch (TREE_CODE (ptr_src))
3482 case MEM_REF:
3483 case VAR_DECL:
3484 if (BOUNDED_P (ptr_src))
3485 if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
3486 bounds = chkp_get_zero_bounds ();
3487 else
3489 addr = chkp_build_addr_expr (ptr_src);
3490 bounds = chkp_build_bndldx (addr, ptr, iter);
3492 else
3493 bounds = chkp_get_nonpointer_load_bounds ();
3494 break;
3496 case ARRAY_REF:
3497 case COMPONENT_REF:
3498 addr = get_base_address (ptr_src);
3499 if (DECL_P (addr)
3500 || TREE_CODE (addr) == MEM_REF
3501 || TREE_CODE (addr) == TARGET_MEM_REF)
3503 if (BOUNDED_P (ptr_src))
3504 if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
3505 bounds = chkp_get_zero_bounds ();
3506 else
3508 addr = chkp_build_addr_expr (ptr_src);
3509 bounds = chkp_build_bndldx (addr, ptr, iter);
3511 else
3512 bounds = chkp_get_nonpointer_load_bounds ();
3514 else
3516 gcc_assert (TREE_CODE (addr) == SSA_NAME);
3517 bounds = chkp_find_bounds (addr, iter);
3519 break;
3521 case PARM_DECL:
3522 gcc_unreachable ();
3523 bounds = chkp_get_bound_for_parm (ptr_src);
3524 break;
3526 case TARGET_MEM_REF:
3527 addr = chkp_build_addr_expr (ptr_src);
3528 bounds = chkp_build_bndldx (addr, ptr, iter);
3529 break;
3531 case SSA_NAME:
3532 bounds = chkp_get_registered_bounds (ptr_src);
3533 if (!bounds)
3535 gimple *def_stmt = SSA_NAME_DEF_STMT (ptr_src);
3536 gphi_iterator phi_iter;
3538 bounds = chkp_get_bounds_by_definition (ptr_src, def_stmt, &phi_iter);
3540 gcc_assert (bounds);
3542 if (gphi *def_phi = dyn_cast <gphi *> (def_stmt))
3544 unsigned i;
3546 for (i = 0; i < gimple_phi_num_args (def_phi); i++)
3548 tree arg = gimple_phi_arg_def (def_phi, i);
3549 tree arg_bnd;
3550 gphi *phi_bnd;
3552 arg_bnd = chkp_find_bounds (arg, NULL);
3554 /* chkp_get_bounds_by_definition created new phi
3555 statement and phi_iter points to it.
3557 Previous call to chkp_find_bounds could create
3558 new basic block and therefore change phi statement
3559 phi_iter points to. */
3560 phi_bnd = phi_iter.phi ();
3562 add_phi_arg (phi_bnd, arg_bnd,
3563 gimple_phi_arg_edge (def_phi, i),
3564 UNKNOWN_LOCATION);
3567 /* If all bound phi nodes have their arg computed
3568 then we may finish its computation. See
3569 chkp_finish_incomplete_bounds for more details. */
3570 if (chkp_may_finish_incomplete_bounds ())
3571 chkp_finish_incomplete_bounds ();
3574 gcc_assert (bounds == chkp_get_registered_bounds (ptr_src)
3575 || chkp_incomplete_bounds (bounds));
3577 break;
3579 case ADDR_EXPR:
3580 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (ptr_src, 0), iter);
3581 break;
3583 case INTEGER_CST:
3584 if (integer_zerop (ptr_src))
3585 bounds = chkp_get_none_bounds ();
3586 else
3587 bounds = chkp_get_invalid_op_bounds ();
3588 break;
3590 default:
3591 if (dump_file && (dump_flags & TDF_DETAILS))
3593 fprintf (dump_file, "chkp_find_bounds: unexpected ptr of type %s\n",
3594 get_tree_code_name (TREE_CODE (ptr_src)));
3595 print_node (dump_file, "", ptr_src, 0);
3597 internal_error ("chkp_find_bounds: Unexpected tree code %s",
3598 get_tree_code_name (TREE_CODE (ptr_src)));
3601 if (!bounds)
3603 if (dump_file && (dump_flags & TDF_DETAILS))
3605 fprintf (stderr, "chkp_find_bounds: cannot find bounds for pointer\n");
3606 print_node (dump_file, "", ptr_src, 0);
3608 internal_error ("chkp_find_bounds: Cannot find bounds for pointer");
3611 return bounds;
3614 /* Normal case for bounds search without forced narrowing. */
3615 static tree
3616 chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter)
3618 return chkp_find_bounds_1 (ptr, NULL_TREE, iter);
3621 /* Search bounds for pointer PTR loaded from PTR_SRC
3622 by statement *ITER points to. */
3623 static tree
3624 chkp_find_bounds_loaded (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3626 return chkp_find_bounds_1 (ptr, ptr_src, iter);
3629 /* Helper function which checks type of RHS and finds all pointers in
3630 it. For each found pointer we build it's accesses in LHS and RHS
3631 objects and then call HANDLER for them. Function is used to copy
3632 or initilize bounds for copied object. */
3633 static void
3634 chkp_walk_pointer_assignments (tree lhs, tree rhs, void *arg,
3635 assign_handler handler)
3637 tree type = TREE_TYPE (lhs);
3639 /* We have nothing to do with clobbers. */
3640 if (TREE_CLOBBER_P (rhs))
3641 return;
3643 if (BOUNDED_TYPE_P (type))
3644 handler (lhs, rhs, arg);
3645 else if (RECORD_OR_UNION_TYPE_P (type))
3647 tree field;
3649 if (TREE_CODE (rhs) == CONSTRUCTOR)
3651 unsigned HOST_WIDE_INT cnt;
3652 tree val;
3654 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, field, val)
3656 if (chkp_type_has_pointer (TREE_TYPE (field)))
3658 tree lhs_field = chkp_build_component_ref (lhs, field);
3659 chkp_walk_pointer_assignments (lhs_field, val, arg, handler);
3663 else
3664 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3665 if (TREE_CODE (field) == FIELD_DECL
3666 && chkp_type_has_pointer (TREE_TYPE (field)))
3668 tree rhs_field = chkp_build_component_ref (rhs, field);
3669 tree lhs_field = chkp_build_component_ref (lhs, field);
3670 chkp_walk_pointer_assignments (lhs_field, rhs_field, arg, handler);
3673 else if (TREE_CODE (type) == ARRAY_TYPE)
3675 unsigned HOST_WIDE_INT cur = 0;
3676 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3677 tree etype = TREE_TYPE (type);
3678 tree esize = TYPE_SIZE (etype);
3680 if (TREE_CODE (rhs) == CONSTRUCTOR)
3682 unsigned HOST_WIDE_INT cnt;
3683 tree purp, val, lhs_elem;
3685 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, purp, val)
3687 if (purp && TREE_CODE (purp) == RANGE_EXPR)
3689 tree lo_index = TREE_OPERAND (purp, 0);
3690 tree hi_index = TREE_OPERAND (purp, 1);
3692 for (cur = (unsigned)tree_to_uhwi (lo_index);
3693 cur <= (unsigned)tree_to_uhwi (hi_index);
3694 cur++)
3696 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3697 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3700 else
3702 if (purp)
3704 gcc_assert (TREE_CODE (purp) == INTEGER_CST);
3705 cur = tree_to_uhwi (purp);
3708 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur++);
3710 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3714 /* Copy array only when size is known. */
3715 else if (maxval && !integer_minus_onep (maxval))
3716 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
3718 tree lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3719 tree rhs_elem = chkp_build_array_ref (rhs, etype, esize, cur);
3720 chkp_walk_pointer_assignments (lhs_elem, rhs_elem, arg, handler);
3723 else
3724 internal_error("chkp_walk_pointer_assignments: unexpected RHS type: %s",
3725 get_tree_code_name (TREE_CODE (type)));
3728 /* Add code to copy bounds for assignment of RHS to LHS.
3729 ARG is an iterator pointing ne code position. */
3730 static void
3731 chkp_copy_bounds_for_elem (tree lhs, tree rhs, void *arg)
3733 gimple_stmt_iterator *iter = (gimple_stmt_iterator *)arg;
3734 tree bounds = chkp_find_bounds (rhs, iter);
3735 tree addr = chkp_build_addr_expr(lhs);
3737 chkp_build_bndstx (addr, rhs, bounds, iter);
3740 /* Emit static bound initilizers and size vars. */
3741 void
3742 chkp_finish_file (void)
3744 struct varpool_node *node;
3745 struct chkp_ctor_stmt_list stmts;
3747 if (seen_error ())
3748 return;
3750 /* Iterate through varpool and generate bounds initialization
3751 constructors for all statically initialized pointers. */
3752 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3753 stmts.stmts = NULL;
3754 FOR_EACH_VARIABLE (node)
3755 /* Check that var is actually emitted and we need and may initialize
3756 its bounds. */
3757 if (node->need_bounds_init
3758 && !POINTER_BOUNDS_P (node->decl)
3759 && DECL_RTL (node->decl)
3760 && MEM_P (DECL_RTL (node->decl))
3761 && TREE_ASM_WRITTEN (node->decl))
3763 chkp_walk_pointer_assignments (node->decl,
3764 DECL_INITIAL (node->decl),
3765 &stmts,
3766 chkp_add_modification_to_stmt_list);
3768 if (stmts.avail <= 0)
3770 cgraph_build_static_cdtor ('P', stmts.stmts,
3771 MAX_RESERVED_INIT_PRIORITY + 3);
3772 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3773 stmts.stmts = NULL;
3777 if (stmts.stmts)
3778 cgraph_build_static_cdtor ('P', stmts.stmts,
3779 MAX_RESERVED_INIT_PRIORITY + 3);
3781 /* Iterate through varpool and generate bounds initialization
3782 constructors for all static bounds vars. */
3783 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3784 stmts.stmts = NULL;
3785 FOR_EACH_VARIABLE (node)
3786 if (node->need_bounds_init
3787 && POINTER_BOUNDS_P (node->decl)
3788 && TREE_ASM_WRITTEN (node->decl))
3790 tree bnd = node->decl;
3791 tree var;
3793 gcc_assert (DECL_INITIAL (bnd)
3794 && TREE_CODE (DECL_INITIAL (bnd)) == ADDR_EXPR);
3796 var = TREE_OPERAND (DECL_INITIAL (bnd), 0);
3797 chkp_output_static_bounds (bnd, var, &stmts);
3800 if (stmts.stmts)
3801 cgraph_build_static_cdtor ('B', stmts.stmts,
3802 MAX_RESERVED_INIT_PRIORITY + 2);
3804 delete chkp_static_var_bounds;
3805 delete chkp_bounds_map;
3808 /* An instrumentation function which is called for each statement
3809 having memory access we want to instrument. It inserts check
3810 code and bounds copy code.
3812 ITER points to statement to instrument.
3814 NODE holds memory access in statement to check.
3816 LOC holds the location information for statement.
3818 DIRFLAGS determines whether access is read or write.
3820 ACCESS_OFFS should be added to address used in NODE
3821 before check.
3823 ACCESS_SIZE holds size of checked access.
3825 SAFE indicates if NODE access is safe and should not be
3826 checked. */
3827 static void
3828 chkp_process_stmt (gimple_stmt_iterator *iter, tree node,
3829 location_t loc, tree dirflag,
3830 tree access_offs, tree access_size,
3831 bool safe)
3833 tree node_type = TREE_TYPE (node);
3834 tree size = access_size ? access_size : TYPE_SIZE_UNIT (node_type);
3835 tree addr_first = NULL_TREE; /* address of the first accessed byte */
3836 tree addr_last = NULL_TREE; /* address of the last accessed byte */
3837 tree ptr = NULL_TREE; /* a pointer used for dereference */
3838 tree bounds = NULL_TREE;
3840 /* We do not need instrumentation for clobbers. */
3841 if (dirflag == integer_one_node
3842 && gimple_code (gsi_stmt (*iter)) == GIMPLE_ASSIGN
3843 && TREE_CLOBBER_P (gimple_assign_rhs1 (gsi_stmt (*iter))))
3844 return;
3846 switch (TREE_CODE (node))
3848 case ARRAY_REF:
3849 case COMPONENT_REF:
3851 bool bitfield;
3852 tree elt;
3854 if (safe)
3856 /* We are not going to generate any checks, so do not
3857 generate bounds as well. */
3858 addr_first = chkp_build_addr_expr (node);
3859 break;
3862 chkp_parse_array_and_component_ref (node, &ptr, &elt, &safe,
3863 &bitfield, &bounds, iter, false);
3865 /* Break if there is no dereference and operation is safe. */
3867 if (bitfield)
3869 tree field = TREE_OPERAND (node, 1);
3871 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
3872 size = DECL_SIZE_UNIT (field);
3874 if (elt)
3875 elt = chkp_build_addr_expr (elt);
3876 addr_first = fold_convert_loc (loc, ptr_type_node, elt ? elt : ptr);
3877 addr_first = fold_build_pointer_plus_loc (loc,
3878 addr_first,
3879 byte_position (field));
3881 else
3882 addr_first = chkp_build_addr_expr (node);
3884 break;
3886 case INDIRECT_REF:
3887 ptr = TREE_OPERAND (node, 0);
3888 addr_first = ptr;
3889 break;
3891 case MEM_REF:
3892 ptr = TREE_OPERAND (node, 0);
3893 addr_first = chkp_build_addr_expr (node);
3894 break;
3896 case TARGET_MEM_REF:
3897 ptr = TMR_BASE (node);
3898 addr_first = chkp_build_addr_expr (node);
3899 break;
3901 case ARRAY_RANGE_REF:
3902 printf("ARRAY_RANGE_REF\n");
3903 debug_gimple_stmt(gsi_stmt(*iter));
3904 debug_tree(node);
3905 gcc_unreachable ();
3906 break;
3908 case BIT_FIELD_REF:
3910 tree offs, rem, bpu;
3912 gcc_assert (!access_offs);
3913 gcc_assert (!access_size);
3915 bpu = fold_convert (size_type_node, bitsize_int (BITS_PER_UNIT));
3916 offs = fold_convert (size_type_node, TREE_OPERAND (node, 2));
3917 rem = size_binop_loc (loc, TRUNC_MOD_EXPR, offs, bpu);
3918 offs = size_binop_loc (loc, TRUNC_DIV_EXPR, offs, bpu);
3920 size = fold_convert (size_type_node, TREE_OPERAND (node, 1));
3921 size = size_binop_loc (loc, PLUS_EXPR, size, rem);
3922 size = size_binop_loc (loc, CEIL_DIV_EXPR, size, bpu);
3923 size = fold_convert (size_type_node, size);
3925 chkp_process_stmt (iter, TREE_OPERAND (node, 0), loc,
3926 dirflag, offs, size, safe);
3927 return;
3929 break;
3931 case VAR_DECL:
3932 case RESULT_DECL:
3933 case PARM_DECL:
3934 if (dirflag != integer_one_node
3935 || DECL_REGISTER (node))
3936 return;
3938 safe = true;
3939 addr_first = chkp_build_addr_expr (node);
3940 break;
3942 default:
3943 return;
3946 /* If addr_last was not computed then use (addr_first + size - 1)
3947 expression to compute it. */
3948 if (!addr_last)
3950 addr_last = fold_build_pointer_plus_loc (loc, addr_first, size);
3951 addr_last = fold_build_pointer_plus_hwi_loc (loc, addr_last, -1);
3954 /* Shift both first_addr and last_addr by access_offs if specified. */
3955 if (access_offs)
3957 addr_first = fold_build_pointer_plus_loc (loc, addr_first, access_offs);
3958 addr_last = fold_build_pointer_plus_loc (loc, addr_last, access_offs);
3961 /* Generate bndcl/bndcu checks if memory access is not safe. */
3962 if (!safe)
3964 gimple_stmt_iterator stmt_iter = *iter;
3966 if (!bounds)
3967 bounds = chkp_find_bounds (ptr, iter);
3969 chkp_check_mem_access (addr_first, addr_last, bounds,
3970 stmt_iter, loc, dirflag);
3973 /* We need to store bounds in case pointer is stored. */
3974 if (dirflag == integer_one_node
3975 && chkp_type_has_pointer (node_type)
3976 && flag_chkp_store_bounds)
3978 gimple *stmt = gsi_stmt (*iter);
3979 tree rhs1 = gimple_assign_rhs1 (stmt);
3980 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3982 if (get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS)
3983 chkp_walk_pointer_assignments (node, rhs1, iter,
3984 chkp_copy_bounds_for_elem);
3985 else
3987 bounds = chkp_compute_bounds_for_assignment (NULL_TREE, stmt);
3988 chkp_build_bndstx (addr_first, rhs1, bounds, iter);
3993 /* Add code to copy bounds for all pointers copied
3994 in ASSIGN created during inline of EDGE. */
3995 void
3996 chkp_copy_bounds_for_assign (gimple *assign, struct cgraph_edge *edge)
3998 tree lhs = gimple_assign_lhs (assign);
3999 tree rhs = gimple_assign_rhs1 (assign);
4000 gimple_stmt_iterator iter = gsi_for_stmt (assign);
4002 if (!flag_chkp_store_bounds)
4003 return;
4005 chkp_walk_pointer_assignments (lhs, rhs, &iter, chkp_copy_bounds_for_elem);
4007 /* We should create edges for all created calls to bndldx and bndstx. */
4008 while (gsi_stmt (iter) != assign)
4010 gimple *stmt = gsi_stmt (iter);
4011 if (gimple_code (stmt) == GIMPLE_CALL)
4013 tree fndecl = gimple_call_fndecl (stmt);
4014 struct cgraph_node *callee = cgraph_node::get_create (fndecl);
4015 struct cgraph_edge *new_edge;
4017 gcc_assert (fndecl == chkp_bndstx_fndecl
4018 || fndecl == chkp_bndldx_fndecl
4019 || fndecl == chkp_ret_bnd_fndecl);
4021 new_edge = edge->caller->create_edge (callee,
4022 as_a <gcall *> (stmt),
4023 edge->count,
4024 edge->frequency);
4025 new_edge->frequency = compute_call_stmt_bb_frequency
4026 (edge->caller->decl, gimple_bb (stmt));
4028 gsi_prev (&iter);
4032 /* Some code transformation made during instrumentation pass
4033 may put code into inconsistent state. Here we find and fix
4034 such flaws. */
4035 void
4036 chkp_fix_cfg ()
4038 basic_block bb;
4039 gimple_stmt_iterator i;
4041 /* We could insert some code right after stmt which ends bb.
4042 We wanted to put this code on fallthru edge but did not
4043 add new edges from the beginning because it may cause new
4044 phi node creation which may be incorrect due to incomplete
4045 bound phi nodes. */
4046 FOR_ALL_BB_FN (bb, cfun)
4047 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
4049 gimple *stmt = gsi_stmt (i);
4050 gimple_stmt_iterator next = i;
4052 gsi_next (&next);
4054 if (stmt_ends_bb_p (stmt)
4055 && !gsi_end_p (next))
4057 edge fall = find_fallthru_edge (bb->succs);
4058 basic_block dest = NULL;
4059 int flags = 0;
4061 gcc_assert (fall);
4063 /* We cannot split abnormal edge. Therefore we
4064 store its params, make it regular and then
4065 rebuild abnormal edge after split. */
4066 if (fall->flags & EDGE_ABNORMAL)
4068 flags = fall->flags & ~EDGE_FALLTHRU;
4069 dest = fall->dest;
4071 fall->flags &= ~EDGE_COMPLEX;
4074 while (!gsi_end_p (next))
4076 gimple *next_stmt = gsi_stmt (next);
4077 gsi_remove (&next, false);
4078 gsi_insert_on_edge (fall, next_stmt);
4081 gsi_commit_edge_inserts ();
4083 /* Re-create abnormal edge. */
4084 if (dest)
4085 make_edge (bb, dest, flags);
4090 /* Walker callback for chkp_replace_function_pointers. Replaces
4091 function pointer in the specified operand with pointer to the
4092 instrumented function version. */
4093 static tree
4094 chkp_replace_function_pointer (tree *op, int *walk_subtrees,
4095 void *data ATTRIBUTE_UNUSED)
4097 if (TREE_CODE (*op) == FUNCTION_DECL
4098 && chkp_instrumentable_p (*op)
4099 && (DECL_BUILT_IN_CLASS (*op) == NOT_BUILT_IN
4100 /* For builtins we replace pointers only for selected
4101 function and functions having definitions. */
4102 || (DECL_BUILT_IN_CLASS (*op) == BUILT_IN_NORMAL
4103 && (chkp_instrument_normal_builtin (*op)
4104 || gimple_has_body_p (*op)))))
4106 struct cgraph_node *node = cgraph_node::get_create (*op);
4107 struct cgraph_node *clone = NULL;
4109 if (!node->instrumentation_clone)
4110 clone = chkp_maybe_create_clone (*op);
4112 if (clone)
4113 *op = clone->decl;
4114 *walk_subtrees = 0;
4117 return NULL;
4120 /* This function searches for function pointers in statement
4121 pointed by GSI and replaces them with pointers to instrumented
4122 function versions. */
4123 static void
4124 chkp_replace_function_pointers (gimple_stmt_iterator *gsi)
4126 gimple *stmt = gsi_stmt (*gsi);
4127 /* For calls we want to walk call args only. */
4128 if (gimple_code (stmt) == GIMPLE_CALL)
4130 unsigned i;
4131 for (i = 0; i < gimple_call_num_args (stmt); i++)
4132 walk_tree (gimple_call_arg_ptr (stmt, i),
4133 chkp_replace_function_pointer, NULL, NULL);
4135 else
4136 walk_gimple_stmt (gsi, NULL, chkp_replace_function_pointer, NULL);
4139 /* This function instruments all statements working with memory,
4140 calls and rets.
4142 It also removes excess statements from static initializers. */
4143 static void
4144 chkp_instrument_function (void)
4146 basic_block bb, next;
4147 gimple_stmt_iterator i;
4148 enum gimple_rhs_class grhs_class;
4149 bool safe = lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl));
4151 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
4154 next = bb->next_bb;
4155 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
4157 gimple *s = gsi_stmt (i);
4159 /* Skip statement marked to not be instrumented. */
4160 if (chkp_marked_stmt_p (s))
4162 gsi_next (&i);
4163 continue;
4166 chkp_replace_function_pointers (&i);
4168 switch (gimple_code (s))
4170 case GIMPLE_ASSIGN:
4171 chkp_process_stmt (&i, gimple_assign_lhs (s),
4172 gimple_location (s), integer_one_node,
4173 NULL_TREE, NULL_TREE, safe);
4174 chkp_process_stmt (&i, gimple_assign_rhs1 (s),
4175 gimple_location (s), integer_zero_node,
4176 NULL_TREE, NULL_TREE, safe);
4177 grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s));
4178 if (grhs_class == GIMPLE_BINARY_RHS)
4179 chkp_process_stmt (&i, gimple_assign_rhs2 (s),
4180 gimple_location (s), integer_zero_node,
4181 NULL_TREE, NULL_TREE, safe);
4182 break;
4184 case GIMPLE_RETURN:
4186 greturn *r = as_a <greturn *> (s);
4187 if (gimple_return_retval (r) != NULL_TREE)
4189 chkp_process_stmt (&i, gimple_return_retval (r),
4190 gimple_location (r),
4191 integer_zero_node,
4192 NULL_TREE, NULL_TREE, safe);
4194 /* Additionally we need to add bounds
4195 to return statement. */
4196 chkp_add_bounds_to_ret_stmt (&i);
4199 break;
4201 case GIMPLE_CALL:
4202 chkp_add_bounds_to_call_stmt (&i);
4203 break;
4205 default:
4209 gsi_next (&i);
4211 /* We do not need any actual pointer stores in checker
4212 static initializer. */
4213 if (lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl))
4214 && gimple_code (s) == GIMPLE_ASSIGN
4215 && gimple_store_p (s))
4217 gimple_stmt_iterator del_iter = gsi_for_stmt (s);
4218 gsi_remove (&del_iter, true);
4219 unlink_stmt_vdef (s);
4220 release_defs(s);
4223 bb = next;
4225 while (bb);
4227 /* Some input params may have bounds and be address taken. In this case
4228 we should store incoming bounds into bounds table. */
4229 tree arg;
4230 if (flag_chkp_store_bounds)
4231 for (arg = DECL_ARGUMENTS (cfun->decl); arg; arg = DECL_CHAIN (arg))
4232 if (TREE_ADDRESSABLE (arg))
4234 if (BOUNDED_P (arg))
4236 tree bounds = chkp_get_next_bounds_parm (arg);
4237 tree def_ptr = ssa_default_def (cfun, arg);
4238 gimple_stmt_iterator iter
4239 = gsi_start_bb (chkp_get_entry_block ());
4240 chkp_build_bndstx (chkp_build_addr_expr (arg),
4241 def_ptr ? def_ptr : arg,
4242 bounds, &iter);
4244 /* Skip bounds arg. */
4245 arg = TREE_CHAIN (arg);
4247 else if (chkp_type_has_pointer (TREE_TYPE (arg)))
4249 tree orig_arg = arg;
4250 bitmap slots = BITMAP_ALLOC (NULL);
4251 gimple_stmt_iterator iter
4252 = gsi_start_bb (chkp_get_entry_block ());
4253 bitmap_iterator bi;
4254 unsigned bnd_no;
4256 chkp_find_bound_slots (TREE_TYPE (arg), slots);
4258 EXECUTE_IF_SET_IN_BITMAP (slots, 0, bnd_no, bi)
4260 tree bounds = chkp_get_next_bounds_parm (arg);
4261 HOST_WIDE_INT offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
4262 tree addr = chkp_build_addr_expr (orig_arg);
4263 tree ptr = build2 (MEM_REF, ptr_type_node, addr,
4264 build_int_cst (ptr_type_node, offs));
4265 chkp_build_bndstx (chkp_build_addr_expr (ptr), ptr,
4266 bounds, &iter);
4268 arg = DECL_CHAIN (arg);
4270 BITMAP_FREE (slots);
4275 /* Find init/null/copy_ptr_bounds calls and replace them
4276 with assignments. It should allow better code
4277 optimization. */
4279 static void
4280 chkp_remove_useless_builtins ()
4282 basic_block bb;
4283 gimple_stmt_iterator gsi;
4285 FOR_EACH_BB_FN (bb, cfun)
4287 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4289 gimple *stmt = gsi_stmt (gsi);
4290 tree fndecl;
4291 enum built_in_function fcode;
4293 /* Find builtins returning first arg and replace
4294 them with assignments. */
4295 if (gimple_code (stmt) == GIMPLE_CALL
4296 && (fndecl = gimple_call_fndecl (stmt))
4297 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4298 && (fcode = DECL_FUNCTION_CODE (fndecl))
4299 && (fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
4300 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
4301 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS
4302 || fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS))
4304 tree res = gimple_call_arg (stmt, 0);
4305 update_call_from_tree (&gsi, res);
4306 stmt = gsi_stmt (gsi);
4307 update_stmt (stmt);
4313 /* Initialize pass. */
4314 static void
4315 chkp_init (void)
4317 basic_block bb;
4318 gimple_stmt_iterator i;
4320 in_chkp_pass = true;
4322 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = bb->next_bb)
4323 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
4324 chkp_unmark_stmt (gsi_stmt (i));
4326 chkp_invalid_bounds = new hash_set<tree>;
4327 chkp_completed_bounds_set = new hash_set<tree>;
4328 delete chkp_reg_bounds;
4329 chkp_reg_bounds = new hash_map<tree, tree>;
4330 delete chkp_bound_vars;
4331 chkp_bound_vars = new hash_map<tree, tree>;
4332 chkp_reg_addr_bounds = new hash_map<tree, tree>;
4333 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
4334 delete chkp_bounds_map;
4335 chkp_bounds_map = new hash_map<tree, tree>;
4336 chkp_abnormal_copies = BITMAP_GGC_ALLOC ();
4338 entry_block = NULL;
4339 zero_bounds = NULL_TREE;
4340 none_bounds = NULL_TREE;
4341 incomplete_bounds = integer_zero_node;
4342 tmp_var = NULL_TREE;
4343 size_tmp_var = NULL_TREE;
4345 chkp_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode, true);
4347 /* We create these constant bounds once for each object file.
4348 These symbols go to comdat section and result in single copy
4349 of each one in the final binary. */
4350 chkp_get_zero_bounds_var ();
4351 chkp_get_none_bounds_var ();
4353 calculate_dominance_info (CDI_DOMINATORS);
4354 calculate_dominance_info (CDI_POST_DOMINATORS);
4356 bitmap_obstack_initialize (NULL);
4359 /* Finalize instrumentation pass. */
4360 static void
4361 chkp_fini (void)
4363 in_chkp_pass = false;
4365 delete chkp_invalid_bounds;
4366 delete chkp_completed_bounds_set;
4367 delete chkp_reg_addr_bounds;
4368 delete chkp_incomplete_bounds_map;
4370 free_dominance_info (CDI_DOMINATORS);
4371 free_dominance_info (CDI_POST_DOMINATORS);
4373 bitmap_obstack_release (NULL);
4375 entry_block = NULL;
4376 zero_bounds = NULL_TREE;
4377 none_bounds = NULL_TREE;
4380 /* Main instrumentation pass function. */
4381 static unsigned int
4382 chkp_execute (void)
4384 chkp_init ();
4386 chkp_instrument_function ();
4388 chkp_remove_useless_builtins ();
4390 chkp_function_mark_instrumented (cfun->decl);
4392 chkp_fix_cfg ();
4394 chkp_fini ();
4396 return 0;
4399 /* Instrumentation pass gate. */
4400 static bool
4401 chkp_gate (void)
4403 cgraph_node *node = cgraph_node::get (cfun->decl);
4404 return ((node != NULL
4405 && node->instrumentation_clone)
4406 || lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl)));
4409 namespace {
4411 const pass_data pass_data_chkp =
4413 GIMPLE_PASS, /* type */
4414 "chkp", /* name */
4415 OPTGROUP_NONE, /* optinfo_flags */
4416 TV_NONE, /* tv_id */
4417 PROP_ssa | PROP_cfg, /* properties_required */
4418 0, /* properties_provided */
4419 0, /* properties_destroyed */
4420 0, /* todo_flags_start */
4421 TODO_verify_il
4422 | TODO_update_ssa /* todo_flags_finish */
4425 class pass_chkp : public gimple_opt_pass
4427 public:
4428 pass_chkp (gcc::context *ctxt)
4429 : gimple_opt_pass (pass_data_chkp, ctxt)
4432 /* opt_pass methods: */
4433 virtual opt_pass * clone ()
4435 return new pass_chkp (m_ctxt);
4438 virtual bool gate (function *)
4440 return chkp_gate ();
4443 virtual unsigned int execute (function *)
4445 return chkp_execute ();
4448 }; // class pass_chkp
4450 } // anon namespace
4452 gimple_opt_pass *
4453 make_pass_chkp (gcc::context *ctxt)
4455 return new pass_chkp (ctxt);
4458 #include "gt-tree-chkp.h"