2017-02-20 Paul Thomas <pault@gcc.gnu.org>
[official-gcc.git] / gcc / tree-chkp.c
blob7eecbc8972db73c5c41fa178950530aafcf2aa1d
1 /* Pointer Bounds Checker insrumentation pass.
2 Copyright (C) 2014-2017 Free Software Foundation, Inc.
3 Contributed by Ilya Enkovich (ilya.enkovich@intel.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "diagnostic.h"
34 #include "fold-const.h"
35 #include "stor-layout.h"
36 #include "varasm.h"
37 #include "tree-iterator.h"
38 #include "tree-cfg.h"
39 #include "langhooks.h"
40 #include "tree-ssa-address.h"
41 #include "tree-ssa-loop-niter.h"
42 #include "gimple-pretty-print.h"
43 #include "gimple-iterator.h"
44 #include "gimplify.h"
45 #include "gimplify-me.h"
46 #include "print-tree.h"
47 #include "calls.h"
48 #include "expr.h"
49 #include "tree-ssa-propagate.h"
50 #include "tree-chkp.h"
51 #include "gimple-walk.h"
52 #include "tree-dfa.h"
53 #include "ipa-chkp.h"
54 #include "params.h"
56 /* Pointer Bounds Checker instruments code with memory checks to find
57 out-of-bounds memory accesses. Checks are performed by computing
58 bounds for each pointer and then comparing address of accessed
59 memory before pointer dereferencing.
61 1. Function clones.
63 See ipa-chkp.c.
65 2. Instrumentation.
67 There are few things to instrument:
69 a) Memory accesses - add checker calls to check address of accessed memory
70 against bounds of dereferenced pointer. Obviously safe memory
71 accesses like static variable access does not have to be instrumented
72 with checks.
74 Example:
76 val_2 = *p_1;
78 with 4 bytes access is transformed into:
80 __builtin___chkp_bndcl (__bound_tmp.1_3, p_1);
81 D.1_4 = p_1 + 3;
82 __builtin___chkp_bndcu (__bound_tmp.1_3, D.1_4);
83 val_2 = *p_1;
85 where __bound_tmp.1_3 are bounds computed for pointer p_1,
86 __builtin___chkp_bndcl is a lower bound check and
87 __builtin___chkp_bndcu is an upper bound check.
89 b) Pointer stores.
91 When pointer is stored in memory we need to store its bounds. To
92 achieve compatibility of instrumented code with regular codes
93 we have to keep data layout and store bounds in special bound tables
94 via special checker call. Implementation of bounds table may vary for
95 different platforms. It has to associate pointer value and its
96 location (it is required because we may have two equal pointers
97 with different bounds stored in different places) with bounds.
98 Another checker builtin allows to get bounds for specified pointer
99 loaded from specified location.
101 Example:
103 buf1[i_1] = &buf2;
105 is transformed into:
107 buf1[i_1] = &buf2;
108 D.1_2 = &buf1[i_1];
109 __builtin___chkp_bndstx (D.1_2, &buf2, __bound_tmp.1_2);
111 where __bound_tmp.1_2 are bounds of &buf2.
113 c) Static initialization.
115 The special case of pointer store is static pointer initialization.
116 Bounds initialization is performed in a few steps:
117 - register all static initializations in front-end using
118 chkp_register_var_initializer
119 - when file compilation finishes we create functions with special
120 attribute 'chkp ctor' and put explicit initialization code
121 (assignments) for all statically initialized pointers.
122 - when checker constructor is compiled checker pass adds required
123 bounds initialization for all statically initialized pointers
124 - since we do not actually need excess pointers initialization
125 in checker constructor we remove such assignments from them
127 d) Calls.
129 For each call in the code we add additional arguments to pass
130 bounds for pointer arguments. We determine type of call arguments
131 using arguments list from function declaration; if function
132 declaration is not available we use function type; otherwise
133 (e.g. for unnamed arguments) we use type of passed value. Function
134 declaration/type is replaced with the instrumented one.
136 Example:
138 val_1 = foo (&buf1, &buf2, &buf1, 0);
140 is translated into:
142 val_1 = foo.chkp (&buf1, __bound_tmp.1_2, &buf2, __bound_tmp.1_3,
143 &buf1, __bound_tmp.1_2, 0);
145 e) Returns.
147 If function returns a pointer value we have to return bounds also.
148 A new operand was added for return statement to hold returned bounds.
150 Example:
152 return &_buf1;
154 is transformed into
156 return &_buf1, __bound_tmp.1_1;
158 3. Bounds computation.
160 Compiler is fully responsible for computing bounds to be used for each
161 memory access. The first step for bounds computation is to find the
162 origin of pointer dereferenced for memory access. Basing on pointer
163 origin we define a way to compute its bounds. There are just few
164 possible cases:
166 a) Pointer is returned by call.
168 In this case we use corresponding checker builtin method to obtain returned
169 bounds.
171 Example:
173 buf_1 = malloc (size_2);
174 foo (buf_1);
176 is translated into:
178 buf_1 = malloc (size_2);
179 __bound_tmp.1_3 = __builtin___chkp_bndret (buf_1);
180 foo (buf_1, __bound_tmp.1_3);
182 b) Pointer is an address of an object.
184 In this case compiler tries to compute objects size and create corresponding
185 bounds. If object has incomplete type then special checker builtin is used to
186 obtain its size at runtime.
188 Example:
190 foo ()
192 <unnamed type> __bound_tmp.3;
193 static int buf[100];
195 <bb 3>:
196 __bound_tmp.3_2 = __builtin___chkp_bndmk (&buf, 400);
198 <bb 2>:
199 return &buf, __bound_tmp.3_2;
202 Example:
204 Address of an object 'extern int buf[]' with incomplete type is
205 returned.
207 foo ()
209 <unnamed type> __bound_tmp.4;
210 long unsigned int __size_tmp.3;
212 <bb 3>:
213 __size_tmp.3_4 = __builtin_ia32_sizeof (buf);
214 __bound_tmp.4_3 = __builtin_ia32_bndmk (&buf, __size_tmp.3_4);
216 <bb 2>:
217 return &buf, __bound_tmp.4_3;
220 c) Pointer is the result of object narrowing.
222 It happens when we use pointer to an object to compute pointer to a part
223 of an object. E.g. we take pointer to a field of a structure. In this
224 case we perform bounds intersection using bounds of original object and
225 bounds of object's part (which are computed basing on its type).
227 There may be some debatable questions about when narrowing should occur
228 and when it should not. To avoid false bound violations in correct
229 programs we do not perform narrowing when address of an array element is
230 obtained (it has address of the whole array) and when address of the first
231 structure field is obtained (because it is guaranteed to be equal to
232 address of the whole structure and it is legal to cast it back to structure).
234 Default narrowing behavior may be changed using compiler flags.
236 Example:
238 In this example address of the second structure field is returned.
240 foo (struct A * p, __bounds_type __bounds_of_p)
242 <unnamed type> __bound_tmp.3;
243 int * _2;
244 int * _5;
246 <bb 2>:
247 _5 = &p_1(D)->second_field;
248 __bound_tmp.3_6 = __builtin___chkp_bndmk (_5, 4);
249 __bound_tmp.3_8 = __builtin___chkp_intersect (__bound_tmp.3_6,
250 __bounds_of_p_3(D));
251 _2 = &p_1(D)->second_field;
252 return _2, __bound_tmp.3_8;
255 Example:
257 In this example address of the first field of array element is returned.
259 foo (struct A * p, __bounds_type __bounds_of_p, int i)
261 long unsigned int _3;
262 long unsigned int _4;
263 struct A * _6;
264 int * _7;
266 <bb 2>:
267 _3 = (long unsigned int) i_1(D);
268 _4 = _3 * 8;
269 _6 = p_5(D) + _4;
270 _7 = &_6->first_field;
271 return _7, __bounds_of_p_2(D);
275 d) Pointer is the result of pointer arithmetic or type cast.
277 In this case bounds of the base pointer are used. In case of binary
278 operation producing a pointer we are analyzing data flow further
279 looking for operand's bounds. One operand is considered as a base
280 if it has some valid bounds. If we fall into a case when none of
281 operands (or both of them) has valid bounds, a default bounds value
282 is used.
284 Trying to find out bounds for binary operations we may fall into
285 cyclic dependencies for pointers. To avoid infinite recursion all
286 walked phi nodes instantly obtain corresponding bounds but created
287 bounds are marked as incomplete. It helps us to stop DF walk during
288 bounds search.
290 When we reach pointer source, some args of incomplete bounds phi obtain
291 valid bounds and those values are propagated further through phi nodes.
292 If no valid bounds were found for phi node then we mark its result as
293 invalid bounds. Process stops when all incomplete bounds become either
294 valid or invalid and we are able to choose a pointer base.
296 e) Pointer is loaded from the memory.
298 In this case we just need to load bounds from the bounds table.
300 Example:
302 foo ()
304 <unnamed type> __bound_tmp.3;
305 static int * buf;
306 int * _2;
308 <bb 2>:
309 _2 = buf;
310 __bound_tmp.3_4 = __builtin___chkp_bndldx (&buf, _2);
311 return _2, __bound_tmp.3_4;
316 typedef void (*assign_handler)(tree, tree, void *);
318 static tree chkp_get_zero_bounds ();
319 static tree chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter);
320 static tree chkp_find_bounds_loaded (tree ptr, tree ptr_src,
321 gimple_stmt_iterator *iter);
322 static void chkp_parse_array_and_component_ref (tree node, tree *ptr,
323 tree *elt, bool *safe,
324 bool *bitfield,
325 tree *bounds,
326 gimple_stmt_iterator *iter,
327 bool innermost_bounds);
329 #define chkp_bndldx_fndecl \
330 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDLDX))
331 #define chkp_bndstx_fndecl \
332 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDSTX))
333 #define chkp_checkl_fndecl \
334 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCL))
335 #define chkp_checku_fndecl \
336 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCU))
337 #define chkp_bndmk_fndecl \
338 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDMK))
339 #define chkp_ret_bnd_fndecl \
340 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDRET))
341 #define chkp_intersect_fndecl \
342 (targetm.builtin_chkp_function (BUILT_IN_CHKP_INTERSECT))
343 #define chkp_narrow_bounds_fndecl \
344 (targetm.builtin_chkp_function (BUILT_IN_CHKP_NARROW))
345 #define chkp_sizeof_fndecl \
346 (targetm.builtin_chkp_function (BUILT_IN_CHKP_SIZEOF))
347 #define chkp_extract_lower_fndecl \
348 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_LOWER))
349 #define chkp_extract_upper_fndecl \
350 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_UPPER))
352 static GTY (()) tree chkp_uintptr_type;
354 static GTY (()) tree chkp_zero_bounds_var;
355 static GTY (()) tree chkp_none_bounds_var;
357 static GTY (()) basic_block entry_block;
358 static GTY (()) tree zero_bounds;
359 static GTY (()) tree none_bounds;
360 static GTY (()) tree incomplete_bounds;
361 static GTY (()) tree tmp_var;
362 static GTY (()) tree size_tmp_var;
363 static GTY (()) bitmap chkp_abnormal_copies;
365 struct hash_set<tree> *chkp_invalid_bounds;
366 struct hash_set<tree> *chkp_completed_bounds_set;
367 struct hash_map<tree, tree> *chkp_reg_bounds;
368 struct hash_map<tree, tree> *chkp_bound_vars;
369 struct hash_map<tree, tree> *chkp_reg_addr_bounds;
370 struct hash_map<tree, tree> *chkp_incomplete_bounds_map;
371 struct hash_map<tree, tree> *chkp_bounds_map;
372 struct hash_map<tree, tree> *chkp_static_var_bounds;
374 static bool in_chkp_pass;
376 #define CHKP_BOUND_TMP_NAME "__bound_tmp"
377 #define CHKP_SIZE_TMP_NAME "__size_tmp"
378 #define CHKP_BOUNDS_OF_SYMBOL_PREFIX "__chkp_bounds_of_"
379 #define CHKP_STRING_BOUNDS_PREFIX "__chkp_string_bounds_"
380 #define CHKP_VAR_BOUNDS_PREFIX "__chkp_var_bounds_"
381 #define CHKP_ZERO_BOUNDS_VAR_NAME "__chkp_zero_bounds"
382 #define CHKP_NONE_BOUNDS_VAR_NAME "__chkp_none_bounds"
384 /* Static checker constructors may become very large and their
385 compilation with optimization may take too much time.
386 Therefore we put a limit to number of statements in one
387 constructor. Tests with 100 000 statically initialized
388 pointers showed following compilation times on Sandy Bridge
389 server (used -O2):
390 limit 100 => ~18 sec.
391 limit 300 => ~22 sec.
392 limit 1000 => ~30 sec.
393 limit 3000 => ~49 sec.
394 limit 5000 => ~55 sec.
395 limit 10000 => ~76 sec.
396 limit 100000 => ~532 sec. */
397 #define MAX_STMTS_IN_STATIC_CHKP_CTOR (PARAM_VALUE (PARAM_CHKP_MAX_CTOR_SIZE))
399 struct chkp_ctor_stmt_list
401 tree stmts;
402 int avail;
405 /* Return 1 if function FNDECL is instrumented by Pointer
406 Bounds Checker. */
407 bool
408 chkp_function_instrumented_p (tree fndecl)
410 return fndecl
411 && lookup_attribute ("chkp instrumented", DECL_ATTRIBUTES (fndecl));
414 /* Mark function FNDECL as instrumented. */
415 void
416 chkp_function_mark_instrumented (tree fndecl)
418 if (chkp_function_instrumented_p (fndecl))
419 return;
421 DECL_ATTRIBUTES (fndecl)
422 = tree_cons (get_identifier ("chkp instrumented"), NULL,
423 DECL_ATTRIBUTES (fndecl));
426 /* Return true when STMT is builtin call to instrumentation function
427 corresponding to CODE. */
429 bool
430 chkp_gimple_call_builtin_p (gimple *call,
431 enum built_in_function code)
433 tree fndecl;
434 if (gimple_call_builtin_p (call, BUILT_IN_MD)
435 && (fndecl = targetm.builtin_chkp_function (code))
436 && (DECL_FUNCTION_CODE (gimple_call_fndecl (call))
437 == DECL_FUNCTION_CODE (fndecl)))
438 return true;
439 return false;
442 /* Emit code to build zero bounds and return RTL holding
443 the result. */
445 chkp_expand_zero_bounds ()
447 tree zero_bnd;
449 if (flag_chkp_use_static_const_bounds)
450 zero_bnd = chkp_get_zero_bounds_var ();
451 else
452 zero_bnd = chkp_build_make_bounds_call (integer_zero_node,
453 integer_zero_node);
454 return expand_normal (zero_bnd);
457 /* Emit code to store zero bounds for PTR located at MEM. */
458 void
459 chkp_expand_bounds_reset_for_mem (tree mem, tree ptr)
461 tree zero_bnd, bnd, addr, bndstx;
463 if (flag_chkp_use_static_const_bounds)
464 zero_bnd = chkp_get_zero_bounds_var ();
465 else
466 zero_bnd = chkp_build_make_bounds_call (integer_zero_node,
467 integer_zero_node);
468 bnd = make_tree (pointer_bounds_type_node,
469 assign_temp (pointer_bounds_type_node, 0, 1));
470 addr = build1 (ADDR_EXPR,
471 build_pointer_type (TREE_TYPE (mem)), mem);
472 bndstx = chkp_build_bndstx_call (addr, ptr, bnd);
474 expand_assignment (bnd, zero_bnd, false);
475 expand_normal (bndstx);
478 /* Build retbnd call for returned value RETVAL.
480 If BNDVAL is not NULL then result is stored
481 in it. Otherwise a temporary is created to
482 hold returned value.
484 GSI points to a position for a retbnd call
485 and is set to created stmt.
487 Cgraph edge is created for a new call if
488 UPDATE_EDGE is 1.
490 Obtained bounds are returned. */
491 tree
492 chkp_insert_retbnd_call (tree bndval, tree retval,
493 gimple_stmt_iterator *gsi)
495 gimple *call;
497 if (!bndval)
498 bndval = create_tmp_reg (pointer_bounds_type_node, "retbnd");
500 call = gimple_build_call (chkp_ret_bnd_fndecl, 1, retval);
501 gimple_call_set_lhs (call, bndval);
502 gsi_insert_after (gsi, call, GSI_CONTINUE_LINKING);
504 return bndval;
507 /* Build a GIMPLE_CALL identical to CALL but skipping bounds
508 arguments. */
510 gcall *
511 chkp_copy_call_skip_bounds (gcall *call)
513 bitmap bounds;
514 unsigned i;
516 bitmap_obstack_initialize (NULL);
517 bounds = BITMAP_ALLOC (NULL);
519 for (i = 0; i < gimple_call_num_args (call); i++)
520 if (POINTER_BOUNDS_P (gimple_call_arg (call, i)))
521 bitmap_set_bit (bounds, i);
523 if (!bitmap_empty_p (bounds))
524 call = gimple_call_copy_skip_args (call, bounds);
525 gimple_call_set_with_bounds (call, false);
527 BITMAP_FREE (bounds);
528 bitmap_obstack_release (NULL);
530 return call;
533 /* Redirect edge E to the correct node according to call_stmt.
534 Return 1 if bounds removal from call_stmt should be done
535 instead of redirection. */
537 bool
538 chkp_redirect_edge (cgraph_edge *e)
540 bool instrumented = false;
541 tree decl = e->callee->decl;
543 if (e->callee->instrumentation_clone
544 || chkp_function_instrumented_p (decl))
545 instrumented = true;
547 if (instrumented
548 && !gimple_call_with_bounds_p (e->call_stmt))
549 e->redirect_callee (cgraph_node::get_create (e->callee->orig_decl));
550 else if (!instrumented
551 && gimple_call_with_bounds_p (e->call_stmt)
552 && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDCL)
553 && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDCU)
554 && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDSTX))
556 if (e->callee->instrumented_version)
557 e->redirect_callee (e->callee->instrumented_version);
558 else
560 tree args = TYPE_ARG_TYPES (TREE_TYPE (decl));
561 /* Avoid bounds removal if all args will be removed. */
562 if (!args || TREE_VALUE (args) != void_type_node)
563 return true;
564 else
565 gimple_call_set_with_bounds (e->call_stmt, false);
569 return false;
572 /* Mark statement S to not be instrumented. */
573 static void
574 chkp_mark_stmt (gimple *s)
576 gimple_set_plf (s, GF_PLF_1, true);
579 /* Mark statement S to be instrumented. */
580 static void
581 chkp_unmark_stmt (gimple *s)
583 gimple_set_plf (s, GF_PLF_1, false);
586 /* Return 1 if statement S should not be instrumented. */
587 static bool
588 chkp_marked_stmt_p (gimple *s)
590 return gimple_plf (s, GF_PLF_1);
593 /* Get var to be used for bound temps. */
594 static tree
595 chkp_get_tmp_var (void)
597 if (!tmp_var)
598 tmp_var = create_tmp_reg (pointer_bounds_type_node, CHKP_BOUND_TMP_NAME);
600 return tmp_var;
603 /* Get SSA_NAME to be used as temp. */
604 static tree
605 chkp_get_tmp_reg (gimple *stmt)
607 if (in_chkp_pass)
608 return make_ssa_name (chkp_get_tmp_var (), stmt);
610 return make_temp_ssa_name (pointer_bounds_type_node, stmt,
611 CHKP_BOUND_TMP_NAME);
614 /* Get var to be used for size temps. */
615 static tree
616 chkp_get_size_tmp_var (void)
618 if (!size_tmp_var)
619 size_tmp_var = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
621 return size_tmp_var;
624 /* Register bounds BND for address of OBJ. */
625 static void
626 chkp_register_addr_bounds (tree obj, tree bnd)
628 if (bnd == incomplete_bounds)
629 return;
631 chkp_reg_addr_bounds->put (obj, bnd);
633 if (dump_file && (dump_flags & TDF_DETAILS))
635 fprintf (dump_file, "Regsitered bound ");
636 print_generic_expr (dump_file, bnd, 0);
637 fprintf (dump_file, " for address of ");
638 print_generic_expr (dump_file, obj, 0);
639 fprintf (dump_file, "\n");
643 /* Return bounds registered for address of OBJ. */
644 static tree
645 chkp_get_registered_addr_bounds (tree obj)
647 tree *slot = chkp_reg_addr_bounds->get (obj);
648 return slot ? *slot : NULL_TREE;
651 /* Mark BOUNDS as completed. */
652 static void
653 chkp_mark_completed_bounds (tree bounds)
655 chkp_completed_bounds_set->add (bounds);
657 if (dump_file && (dump_flags & TDF_DETAILS))
659 fprintf (dump_file, "Marked bounds ");
660 print_generic_expr (dump_file, bounds, 0);
661 fprintf (dump_file, " as completed\n");
665 /* Return 1 if BOUNDS were marked as completed and 0 otherwise. */
666 static bool
667 chkp_completed_bounds (tree bounds)
669 return chkp_completed_bounds_set->contains (bounds);
672 /* Clear comleted bound marks. */
673 static void
674 chkp_erase_completed_bounds (void)
676 delete chkp_completed_bounds_set;
677 chkp_completed_bounds_set = new hash_set<tree>;
680 /* Mark BOUNDS associated with PTR as incomplete. */
681 static void
682 chkp_register_incomplete_bounds (tree bounds, tree ptr)
684 chkp_incomplete_bounds_map->put (bounds, ptr);
686 if (dump_file && (dump_flags & TDF_DETAILS))
688 fprintf (dump_file, "Regsitered incomplete bounds ");
689 print_generic_expr (dump_file, bounds, 0);
690 fprintf (dump_file, " for ");
691 print_generic_expr (dump_file, ptr, 0);
692 fprintf (dump_file, "\n");
696 /* Return 1 if BOUNDS are incomplete and 0 otherwise. */
697 static bool
698 chkp_incomplete_bounds (tree bounds)
700 if (bounds == incomplete_bounds)
701 return true;
703 if (chkp_completed_bounds (bounds))
704 return false;
706 return chkp_incomplete_bounds_map->get (bounds) != NULL;
709 /* Clear incomleted bound marks. */
710 static void
711 chkp_erase_incomplete_bounds (void)
713 delete chkp_incomplete_bounds_map;
714 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
717 /* Build and return bndmk call which creates bounds for structure
718 pointed by PTR. Structure should have complete type. */
719 tree
720 chkp_make_bounds_for_struct_addr (tree ptr)
722 tree type = TREE_TYPE (ptr);
723 tree size;
725 gcc_assert (POINTER_TYPE_P (type));
727 size = TYPE_SIZE (TREE_TYPE (type));
729 gcc_assert (size);
731 return build_call_nary (pointer_bounds_type_node,
732 build_fold_addr_expr (chkp_bndmk_fndecl),
733 2, ptr, size);
736 /* Traversal function for chkp_may_finish_incomplete_bounds.
737 Set RES to 0 if at least one argument of phi statement
738 defining bounds (passed in KEY arg) is unknown.
739 Traversal stops when first unknown phi argument is found. */
740 bool
741 chkp_may_complete_phi_bounds (tree const &bounds, tree *slot ATTRIBUTE_UNUSED,
742 bool *res)
744 gimple *phi;
745 unsigned i;
747 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
749 phi = SSA_NAME_DEF_STMT (bounds);
751 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
753 for (i = 0; i < gimple_phi_num_args (phi); i++)
755 tree phi_arg = gimple_phi_arg_def (phi, i);
756 if (!phi_arg)
758 *res = false;
759 /* Do not need to traverse further. */
760 return false;
764 return true;
767 /* Return 1 if all phi nodes created for bounds have their
768 arguments computed. */
769 static bool
770 chkp_may_finish_incomplete_bounds (void)
772 bool res = true;
774 chkp_incomplete_bounds_map
775 ->traverse<bool *, chkp_may_complete_phi_bounds> (&res);
777 return res;
780 /* Helper function for chkp_finish_incomplete_bounds.
781 Recompute args for bounds phi node. */
782 bool
783 chkp_recompute_phi_bounds (tree const &bounds, tree *slot,
784 void *res ATTRIBUTE_UNUSED)
786 tree ptr = *slot;
787 gphi *bounds_phi;
788 gphi *ptr_phi;
789 unsigned i;
791 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
792 gcc_assert (TREE_CODE (ptr) == SSA_NAME);
794 bounds_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (bounds));
795 ptr_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (ptr));
797 for (i = 0; i < gimple_phi_num_args (bounds_phi); i++)
799 tree ptr_arg = gimple_phi_arg_def (ptr_phi, i);
800 tree bound_arg = chkp_find_bounds (ptr_arg, NULL);
802 add_phi_arg (bounds_phi, bound_arg,
803 gimple_phi_arg_edge (ptr_phi, i),
804 UNKNOWN_LOCATION);
807 return true;
810 /* Mark BOUNDS as invalid. */
811 static void
812 chkp_mark_invalid_bounds (tree bounds)
814 chkp_invalid_bounds->add (bounds);
816 if (dump_file && (dump_flags & TDF_DETAILS))
818 fprintf (dump_file, "Marked bounds ");
819 print_generic_expr (dump_file, bounds, 0);
820 fprintf (dump_file, " as invalid\n");
824 /* Return 1 if BOUNDS were marked as invalid and 0 otherwise. */
825 static bool
826 chkp_valid_bounds (tree bounds)
828 if (bounds == zero_bounds || bounds == none_bounds)
829 return false;
831 return !chkp_invalid_bounds->contains (bounds);
834 /* Helper function for chkp_finish_incomplete_bounds.
835 Check all arguments of phi nodes trying to find
836 valid completed bounds. If there is at least one
837 such arg then bounds produced by phi node are marked
838 as valid completed bounds and all phi args are
839 recomputed. */
840 bool
841 chkp_find_valid_phi_bounds (tree const &bounds, tree *slot, bool *res)
843 gimple *phi;
844 unsigned i;
846 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
848 if (chkp_completed_bounds (bounds))
849 return true;
851 phi = SSA_NAME_DEF_STMT (bounds);
853 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
855 for (i = 0; i < gimple_phi_num_args (phi); i++)
857 tree phi_arg = gimple_phi_arg_def (phi, i);
859 gcc_assert (phi_arg);
861 if (chkp_valid_bounds (phi_arg) && !chkp_incomplete_bounds (phi_arg))
863 *res = true;
864 chkp_mark_completed_bounds (bounds);
865 chkp_recompute_phi_bounds (bounds, slot, NULL);
866 return true;
870 return true;
873 /* Helper function for chkp_finish_incomplete_bounds.
874 Marks all incompleted bounds as invalid. */
875 bool
876 chkp_mark_invalid_bounds_walker (tree const &bounds,
877 tree *slot ATTRIBUTE_UNUSED,
878 void *res ATTRIBUTE_UNUSED)
880 if (!chkp_completed_bounds (bounds))
882 chkp_mark_invalid_bounds (bounds);
883 chkp_mark_completed_bounds (bounds);
885 return true;
888 /* When all bound phi nodes have all their args computed
889 we have enough info to find valid bounds. We iterate
890 through all incompleted bounds searching for valid
891 bounds. Found valid bounds are marked as completed
892 and all remaining incompleted bounds are recomputed.
893 Process continues until no new valid bounds may be
894 found. All remained incompleted bounds are marked as
895 invalid (i.e. have no valid source of bounds). */
896 static void
897 chkp_finish_incomplete_bounds (void)
899 bool found_valid = true;
901 while (found_valid)
903 found_valid = false;
905 chkp_incomplete_bounds_map->
906 traverse<bool *, chkp_find_valid_phi_bounds> (&found_valid);
908 if (found_valid)
909 chkp_incomplete_bounds_map->
910 traverse<void *, chkp_recompute_phi_bounds> (NULL);
913 chkp_incomplete_bounds_map->
914 traverse<void *, chkp_mark_invalid_bounds_walker> (NULL);
915 chkp_incomplete_bounds_map->
916 traverse<void *, chkp_recompute_phi_bounds> (NULL);
918 chkp_erase_completed_bounds ();
919 chkp_erase_incomplete_bounds ();
922 /* Return 1 if type TYPE is a pointer type or a
923 structure having a pointer type as one of its fields.
924 Otherwise return 0. */
925 bool
926 chkp_type_has_pointer (const_tree type)
928 bool res = false;
930 if (BOUNDED_TYPE_P (type))
931 res = true;
932 else if (RECORD_OR_UNION_TYPE_P (type))
934 tree field;
936 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
937 if (TREE_CODE (field) == FIELD_DECL)
938 res = res || chkp_type_has_pointer (TREE_TYPE (field));
940 else if (TREE_CODE (type) == ARRAY_TYPE)
941 res = chkp_type_has_pointer (TREE_TYPE (type));
943 return res;
946 unsigned
947 chkp_type_bounds_count (const_tree type)
949 unsigned res = 0;
951 if (!type)
952 res = 0;
953 else if (BOUNDED_TYPE_P (type))
954 res = 1;
955 else if (RECORD_OR_UNION_TYPE_P (type))
957 bitmap have_bound;
959 bitmap_obstack_initialize (NULL);
960 have_bound = BITMAP_ALLOC (NULL);
961 chkp_find_bound_slots (type, have_bound);
962 res = bitmap_count_bits (have_bound);
963 BITMAP_FREE (have_bound);
964 bitmap_obstack_release (NULL);
967 return res;
970 /* Get bounds associated with NODE via
971 chkp_set_bounds call. */
972 tree
973 chkp_get_bounds (tree node)
975 tree *slot;
977 if (!chkp_bounds_map)
978 return NULL_TREE;
980 slot = chkp_bounds_map->get (node);
981 return slot ? *slot : NULL_TREE;
984 /* Associate bounds VAL with NODE. */
985 void
986 chkp_set_bounds (tree node, tree val)
988 if (!chkp_bounds_map)
989 chkp_bounds_map = new hash_map<tree, tree>;
991 chkp_bounds_map->put (node, val);
994 /* Check if statically initialized variable VAR require
995 static bounds initialization. If VAR is added into
996 bounds initlization list then 1 is returned. Otherwise
997 return 0. */
998 extern bool
999 chkp_register_var_initializer (tree var)
1001 if (!flag_check_pointer_bounds
1002 || DECL_INITIAL (var) == error_mark_node)
1003 return false;
1005 gcc_assert (VAR_P (var));
1006 gcc_assert (DECL_INITIAL (var));
1008 if (TREE_STATIC (var)
1009 && chkp_type_has_pointer (TREE_TYPE (var)))
1011 varpool_node::get_create (var)->need_bounds_init = 1;
1012 return true;
1015 return false;
1018 /* Helper function for chkp_finish_file.
1020 Add new modification statement (RHS is assigned to LHS)
1021 into list of static initializer statementes (passed in ARG).
1022 If statements list becomes too big, emit checker constructor
1023 and start the new one. */
1024 static void
1025 chkp_add_modification_to_stmt_list (tree lhs,
1026 tree rhs,
1027 void *arg)
1029 struct chkp_ctor_stmt_list *stmts = (struct chkp_ctor_stmt_list *)arg;
1030 tree modify;
1032 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
1033 rhs = build1 (CONVERT_EXPR, TREE_TYPE (lhs), rhs);
1035 modify = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
1036 append_to_statement_list (modify, &stmts->stmts);
1038 stmts->avail--;
1041 /* Build and return ADDR_EXPR for specified object OBJ. */
1042 static tree
1043 chkp_build_addr_expr (tree obj)
1045 return TREE_CODE (obj) == TARGET_MEM_REF
1046 ? tree_mem_ref_addr (ptr_type_node, obj)
1047 : build_fold_addr_expr (obj);
1050 /* Helper function for chkp_finish_file.
1051 Initialize bound variable BND_VAR with bounds of variable
1052 VAR to statements list STMTS. If statements list becomes
1053 too big, emit checker constructor and start the new one. */
1054 static void
1055 chkp_output_static_bounds (tree bnd_var, tree var,
1056 struct chkp_ctor_stmt_list *stmts)
1058 tree lb, ub, size;
1060 if (TREE_CODE (var) == STRING_CST)
1062 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1063 size = build_int_cst (size_type_node, TREE_STRING_LENGTH (var) - 1);
1065 else if (DECL_SIZE (var)
1066 && !chkp_variable_size_type (TREE_TYPE (var)))
1068 /* Compute bounds using statically known size. */
1069 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1070 size = size_binop (MINUS_EXPR, DECL_SIZE_UNIT (var), size_one_node);
1072 else
1074 /* Compute bounds using dynamic size. */
1075 tree call;
1077 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1078 call = build1 (ADDR_EXPR,
1079 build_pointer_type (TREE_TYPE (chkp_sizeof_fndecl)),
1080 chkp_sizeof_fndecl);
1081 size = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_sizeof_fndecl)),
1082 call, 1, var);
1084 if (flag_chkp_zero_dynamic_size_as_infinite)
1086 tree max_size, cond;
1088 max_size = build2 (MINUS_EXPR, size_type_node, size_zero_node, lb);
1089 cond = build2 (NE_EXPR, boolean_type_node, size, size_zero_node);
1090 size = build3 (COND_EXPR, size_type_node, cond, size, max_size);
1093 size = size_binop (MINUS_EXPR, size, size_one_node);
1096 ub = size_binop (PLUS_EXPR, lb, size);
1097 stmts->avail -= targetm.chkp_initialize_bounds (bnd_var, lb, ub,
1098 &stmts->stmts);
1099 if (stmts->avail <= 0)
1101 cgraph_build_static_cdtor ('B', stmts->stmts,
1102 MAX_RESERVED_INIT_PRIORITY + 2);
1103 stmts->avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
1104 stmts->stmts = NULL;
1108 /* Return entry block to be used for checker initilization code.
1109 Create new block if required. */
1110 static basic_block
1111 chkp_get_entry_block (void)
1113 if (!entry_block)
1114 entry_block
1115 = split_block_after_labels (ENTRY_BLOCK_PTR_FOR_FN (cfun))->dest;
1117 return entry_block;
1120 /* Return a bounds var to be used for pointer var PTR_VAR. */
1121 static tree
1122 chkp_get_bounds_var (tree ptr_var)
1124 tree bnd_var;
1125 tree *slot;
1127 slot = chkp_bound_vars->get (ptr_var);
1128 if (slot)
1129 bnd_var = *slot;
1130 else
1132 bnd_var = create_tmp_reg (pointer_bounds_type_node,
1133 CHKP_BOUND_TMP_NAME);
1134 chkp_bound_vars->put (ptr_var, bnd_var);
1137 return bnd_var;
1140 /* If BND is an abnormal bounds copy, return a copied value.
1141 Otherwise return BND. */
1142 static tree
1143 chkp_get_orginal_bounds_for_abnormal_copy (tree bnd)
1145 if (bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
1147 gimple *bnd_def = SSA_NAME_DEF_STMT (bnd);
1148 gcc_checking_assert (gimple_code (bnd_def) == GIMPLE_ASSIGN);
1149 bnd = gimple_assign_rhs1 (bnd_def);
1152 return bnd;
1155 /* Register bounds BND for object PTR in global bounds table.
1156 A copy of bounds may be created for abnormal ssa names.
1157 Returns bounds to use for PTR. */
1158 static tree
1159 chkp_maybe_copy_and_register_bounds (tree ptr, tree bnd)
1161 bool abnormal_ptr;
1163 if (!chkp_reg_bounds)
1164 return bnd;
1166 /* Do nothing if bounds are incomplete_bounds
1167 because it means bounds will be recomputed. */
1168 if (bnd == incomplete_bounds)
1169 return bnd;
1171 abnormal_ptr = (TREE_CODE (ptr) == SSA_NAME
1172 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1173 && gimple_code (SSA_NAME_DEF_STMT (ptr)) != GIMPLE_PHI);
1175 /* A single bounds value may be reused multiple times for
1176 different pointer values. It may cause coalescing issues
1177 for abnormal SSA names. To avoid it we create a bounds
1178 copy in case it is computed for abnormal SSA name.
1180 We also cannot reuse such created copies for other pointers */
1181 if (abnormal_ptr
1182 || bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
1184 tree bnd_var = NULL_TREE;
1186 if (abnormal_ptr)
1188 if (SSA_NAME_VAR (ptr))
1189 bnd_var = chkp_get_bounds_var (SSA_NAME_VAR (ptr));
1191 else
1192 bnd_var = chkp_get_tmp_var ();
1194 /* For abnormal copies we may just find original
1195 bounds and use them. */
1196 if (!abnormal_ptr && !SSA_NAME_IS_DEFAULT_DEF (bnd))
1197 bnd = chkp_get_orginal_bounds_for_abnormal_copy (bnd);
1198 /* For undefined values we usually use none bounds
1199 value but in case of abnormal edge it may cause
1200 coalescing failures. Use default definition of
1201 bounds variable instead to avoid it. */
1202 else if (SSA_NAME_IS_DEFAULT_DEF (ptr)
1203 && TREE_CODE (SSA_NAME_VAR (ptr)) != PARM_DECL)
1205 bnd = get_or_create_ssa_default_def (cfun, bnd_var);
1207 if (dump_file && (dump_flags & TDF_DETAILS))
1209 fprintf (dump_file, "Using default def bounds ");
1210 print_generic_expr (dump_file, bnd, 0);
1211 fprintf (dump_file, " for abnormal default def SSA name ");
1212 print_generic_expr (dump_file, ptr, 0);
1213 fprintf (dump_file, "\n");
1216 else
1218 tree copy;
1219 gimple *def = SSA_NAME_DEF_STMT (ptr);
1220 gimple *assign;
1221 gimple_stmt_iterator gsi;
1223 if (bnd_var)
1224 copy = make_ssa_name (bnd_var);
1225 else
1226 copy = make_temp_ssa_name (pointer_bounds_type_node,
1227 NULL,
1228 CHKP_BOUND_TMP_NAME);
1229 bnd = chkp_get_orginal_bounds_for_abnormal_copy (bnd);
1230 assign = gimple_build_assign (copy, bnd);
1232 if (dump_file && (dump_flags & TDF_DETAILS))
1234 fprintf (dump_file, "Creating a copy of bounds ");
1235 print_generic_expr (dump_file, bnd, 0);
1236 fprintf (dump_file, " for abnormal SSA name ");
1237 print_generic_expr (dump_file, ptr, 0);
1238 fprintf (dump_file, "\n");
1241 if (gimple_code (def) == GIMPLE_NOP)
1243 gsi = gsi_last_bb (chkp_get_entry_block ());
1244 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
1245 gsi_insert_before (&gsi, assign, GSI_CONTINUE_LINKING);
1246 else
1247 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1249 else
1251 gimple *bnd_def = SSA_NAME_DEF_STMT (bnd);
1252 /* Sometimes (e.g. when we load a pointer from a
1253 memory) bounds are produced later than a pointer.
1254 We need to insert bounds copy appropriately. */
1255 if (gimple_code (bnd_def) != GIMPLE_NOP
1256 && stmt_dominates_stmt_p (def, bnd_def))
1257 gsi = gsi_for_stmt (bnd_def);
1258 else
1259 gsi = gsi_for_stmt (def);
1260 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1263 bnd = copy;
1266 if (abnormal_ptr)
1267 bitmap_set_bit (chkp_abnormal_copies, SSA_NAME_VERSION (bnd));
1270 chkp_reg_bounds->put (ptr, bnd);
1272 if (dump_file && (dump_flags & TDF_DETAILS))
1274 fprintf (dump_file, "Regsitered bound ");
1275 print_generic_expr (dump_file, bnd, 0);
1276 fprintf (dump_file, " for pointer ");
1277 print_generic_expr (dump_file, ptr, 0);
1278 fprintf (dump_file, "\n");
1281 return bnd;
1284 /* Get bounds registered for object PTR in global bounds table. */
1285 static tree
1286 chkp_get_registered_bounds (tree ptr)
1288 tree *slot;
1290 if (!chkp_reg_bounds)
1291 return NULL_TREE;
1293 slot = chkp_reg_bounds->get (ptr);
1294 return slot ? *slot : NULL_TREE;
1297 /* Add bound retvals to return statement pointed by GSI. */
1299 static void
1300 chkp_add_bounds_to_ret_stmt (gimple_stmt_iterator *gsi)
1302 greturn *ret = as_a <greturn *> (gsi_stmt (*gsi));
1303 tree retval = gimple_return_retval (ret);
1304 tree ret_decl = DECL_RESULT (cfun->decl);
1305 tree bounds;
1307 if (!retval)
1308 return;
1310 if (BOUNDED_P (ret_decl))
1312 bounds = chkp_find_bounds (retval, gsi);
1313 bounds = chkp_maybe_copy_and_register_bounds (ret_decl, bounds);
1314 gimple_return_set_retbnd (ret, bounds);
1317 update_stmt (ret);
1320 /* Force OP to be suitable for using as an argument for call.
1321 New statements (if any) go to SEQ. */
1322 static tree
1323 chkp_force_gimple_call_op (tree op, gimple_seq *seq)
1325 gimple_seq stmts;
1326 gimple_stmt_iterator si;
1328 op = force_gimple_operand (unshare_expr (op), &stmts, true, NULL_TREE);
1330 for (si = gsi_start (stmts); !gsi_end_p (si); gsi_next (&si))
1331 chkp_mark_stmt (gsi_stmt (si));
1333 gimple_seq_add_seq (seq, stmts);
1335 return op;
1338 /* Generate lower bound check for memory access by ADDR.
1339 Check is inserted before the position pointed by ITER.
1340 DIRFLAG indicates whether memory access is load or store. */
1341 static void
1342 chkp_check_lower (tree addr, tree bounds,
1343 gimple_stmt_iterator iter,
1344 location_t location,
1345 tree dirflag)
1347 gimple_seq seq;
1348 gimple *check;
1349 tree node;
1351 if (!chkp_function_instrumented_p (current_function_decl)
1352 && bounds == chkp_get_zero_bounds ())
1353 return;
1355 if (dirflag == integer_zero_node
1356 && !flag_chkp_check_read)
1357 return;
1359 if (dirflag == integer_one_node
1360 && !flag_chkp_check_write)
1361 return;
1363 seq = NULL;
1365 node = chkp_force_gimple_call_op (addr, &seq);
1367 check = gimple_build_call (chkp_checkl_fndecl, 2, node, bounds);
1368 chkp_mark_stmt (check);
1369 gimple_call_set_with_bounds (check, true);
1370 gimple_set_location (check, location);
1371 gimple_seq_add_stmt (&seq, check);
1373 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1375 if (dump_file && (dump_flags & TDF_DETAILS))
1377 gimple *before = gsi_stmt (iter);
1378 fprintf (dump_file, "Generated lower bound check for statement ");
1379 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1380 fprintf (dump_file, " ");
1381 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1385 /* Generate upper bound check for memory access by ADDR.
1386 Check is inserted before the position pointed by ITER.
1387 DIRFLAG indicates whether memory access is load or store. */
1388 static void
1389 chkp_check_upper (tree addr, tree bounds,
1390 gimple_stmt_iterator iter,
1391 location_t location,
1392 tree dirflag)
1394 gimple_seq seq;
1395 gimple *check;
1396 tree node;
1398 if (!chkp_function_instrumented_p (current_function_decl)
1399 && bounds == chkp_get_zero_bounds ())
1400 return;
1402 if (dirflag == integer_zero_node
1403 && !flag_chkp_check_read)
1404 return;
1406 if (dirflag == integer_one_node
1407 && !flag_chkp_check_write)
1408 return;
1410 seq = NULL;
1412 node = chkp_force_gimple_call_op (addr, &seq);
1414 check = gimple_build_call (chkp_checku_fndecl, 2, node, bounds);
1415 chkp_mark_stmt (check);
1416 gimple_call_set_with_bounds (check, true);
1417 gimple_set_location (check, location);
1418 gimple_seq_add_stmt (&seq, check);
1420 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1422 if (dump_file && (dump_flags & TDF_DETAILS))
1424 gimple *before = gsi_stmt (iter);
1425 fprintf (dump_file, "Generated upper bound check for statement ");
1426 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1427 fprintf (dump_file, " ");
1428 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1432 /* Generate lower and upper bound checks for memory access
1433 to memory slot [FIRST, LAST] againsr BOUNDS. Checks
1434 are inserted before the position pointed by ITER.
1435 DIRFLAG indicates whether memory access is load or store. */
1436 void
1437 chkp_check_mem_access (tree first, tree last, tree bounds,
1438 gimple_stmt_iterator iter,
1439 location_t location,
1440 tree dirflag)
1442 chkp_check_lower (first, bounds, iter, location, dirflag);
1443 chkp_check_upper (last, bounds, iter, location, dirflag);
1446 /* Replace call to _bnd_chk_* pointed by GSI with
1447 bndcu and bndcl calls. DIRFLAG determines whether
1448 check is for read or write. */
1450 void
1451 chkp_replace_address_check_builtin (gimple_stmt_iterator *gsi,
1452 tree dirflag)
1454 gimple_stmt_iterator call_iter = *gsi;
1455 gimple *call = gsi_stmt (*gsi);
1456 tree fndecl = gimple_call_fndecl (call);
1457 tree addr = gimple_call_arg (call, 0);
1458 tree bounds = chkp_find_bounds (addr, gsi);
1460 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1461 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1462 chkp_check_lower (addr, bounds, *gsi, gimple_location (call), dirflag);
1464 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS)
1465 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1467 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1469 tree size = gimple_call_arg (call, 1);
1470 addr = fold_build_pointer_plus (addr, size);
1471 addr = fold_build_pointer_plus_hwi (addr, -1);
1472 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1475 gsi_remove (&call_iter, true);
1478 /* Replace call to _bnd_get_ptr_* pointed by GSI with
1479 corresponding bounds extract call. */
1481 void
1482 chkp_replace_extract_builtin (gimple_stmt_iterator *gsi)
1484 gimple *call = gsi_stmt (*gsi);
1485 tree fndecl = gimple_call_fndecl (call);
1486 tree addr = gimple_call_arg (call, 0);
1487 tree bounds = chkp_find_bounds (addr, gsi);
1488 gimple *extract;
1490 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND)
1491 fndecl = chkp_extract_lower_fndecl;
1492 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND)
1493 fndecl = chkp_extract_upper_fndecl;
1494 else
1495 gcc_unreachable ();
1497 extract = gimple_build_call (fndecl, 1, bounds);
1498 gimple_call_set_lhs (extract, gimple_call_lhs (call));
1499 chkp_mark_stmt (extract);
1501 gsi_replace (gsi, extract, false);
1504 /* Return COMPONENT_REF accessing FIELD in OBJ. */
1505 static tree
1506 chkp_build_component_ref (tree obj, tree field)
1508 tree res;
1510 /* If object is TMR then we do not use component_ref but
1511 add offset instead. We need it to be able to get addr
1512 of the reasult later. */
1513 if (TREE_CODE (obj) == TARGET_MEM_REF)
1515 tree offs = TMR_OFFSET (obj);
1516 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1517 offs, DECL_FIELD_OFFSET (field));
1519 gcc_assert (offs);
1521 res = copy_node (obj);
1522 TREE_TYPE (res) = TREE_TYPE (field);
1523 TMR_OFFSET (res) = offs;
1525 else
1526 res = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL_TREE);
1528 return res;
1531 /* Return ARRAY_REF for array ARR and index IDX with
1532 specified element type ETYPE and element size ESIZE. */
1533 static tree
1534 chkp_build_array_ref (tree arr, tree etype, tree esize,
1535 unsigned HOST_WIDE_INT idx)
1537 tree index = build_int_cst (size_type_node, idx);
1538 tree res;
1540 /* If object is TMR then we do not use array_ref but
1541 add offset instead. We need it to be able to get addr
1542 of the reasult later. */
1543 if (TREE_CODE (arr) == TARGET_MEM_REF)
1545 tree offs = TMR_OFFSET (arr);
1547 esize = fold_binary_to_constant (MULT_EXPR, TREE_TYPE (esize),
1548 esize, index);
1549 gcc_assert(esize);
1551 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1552 offs, esize);
1553 gcc_assert (offs);
1555 res = copy_node (arr);
1556 TREE_TYPE (res) = etype;
1557 TMR_OFFSET (res) = offs;
1559 else
1560 res = build4 (ARRAY_REF, etype, arr, index, NULL_TREE, NULL_TREE);
1562 return res;
1565 /* Helper function for chkp_add_bounds_to_call_stmt.
1566 Fill ALL_BOUNDS output array with created bounds.
1568 OFFS is used for recursive calls and holds basic
1569 offset of TYPE in outer structure in bits.
1571 ITER points a position where bounds are searched.
1573 ALL_BOUNDS[i] is filled with elem bounds if there
1574 is a field in TYPE which has pointer type and offset
1575 equal to i * POINTER_SIZE in bits. */
1576 static void
1577 chkp_find_bounds_for_elem (tree elem, tree *all_bounds,
1578 HOST_WIDE_INT offs,
1579 gimple_stmt_iterator *iter)
1581 tree type = TREE_TYPE (elem);
1583 if (BOUNDED_TYPE_P (type))
1585 if (!all_bounds[offs / POINTER_SIZE])
1587 tree temp = make_temp_ssa_name (type, NULL, "");
1588 gimple *assign = gimple_build_assign (temp, elem);
1589 gimple_stmt_iterator gsi;
1591 gsi_insert_before (iter, assign, GSI_SAME_STMT);
1592 gsi = gsi_for_stmt (assign);
1594 all_bounds[offs / POINTER_SIZE] = chkp_find_bounds (temp, &gsi);
1597 else if (RECORD_OR_UNION_TYPE_P (type))
1599 tree field;
1601 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1602 if (TREE_CODE (field) == FIELD_DECL)
1604 tree base = unshare_expr (elem);
1605 tree field_ref = chkp_build_component_ref (base, field);
1606 HOST_WIDE_INT field_offs
1607 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1608 if (DECL_FIELD_OFFSET (field))
1609 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1611 chkp_find_bounds_for_elem (field_ref, all_bounds,
1612 offs + field_offs, iter);
1615 else if (TREE_CODE (type) == ARRAY_TYPE)
1617 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1618 tree etype = TREE_TYPE (type);
1619 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1620 unsigned HOST_WIDE_INT cur;
1622 if (!maxval || integer_minus_onep (maxval))
1623 return;
1625 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1627 tree base = unshare_expr (elem);
1628 tree arr_elem = chkp_build_array_ref (base, etype,
1629 TYPE_SIZE (etype),
1630 cur);
1631 chkp_find_bounds_for_elem (arr_elem, all_bounds, offs + cur * esize,
1632 iter);
1637 /* Fill HAVE_BOUND output bitmap with information about
1638 bounds requred for object of type TYPE.
1640 OFFS is used for recursive calls and holds basic
1641 offset of TYPE in outer structure in bits.
1643 HAVE_BOUND[i] is set to 1 if there is a field
1644 in TYPE which has pointer type and offset
1645 equal to i * POINTER_SIZE - OFFS in bits. */
1646 void
1647 chkp_find_bound_slots_1 (const_tree type, bitmap have_bound,
1648 HOST_WIDE_INT offs)
1650 if (BOUNDED_TYPE_P (type))
1651 bitmap_set_bit (have_bound, offs / POINTER_SIZE);
1652 else if (RECORD_OR_UNION_TYPE_P (type))
1654 tree field;
1656 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1657 if (TREE_CODE (field) == FIELD_DECL)
1659 HOST_WIDE_INT field_offs = 0;
1660 if (DECL_FIELD_BIT_OFFSET (field))
1661 field_offs += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1662 if (DECL_FIELD_OFFSET (field))
1663 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1664 chkp_find_bound_slots_1 (TREE_TYPE (field), have_bound,
1665 offs + field_offs);
1668 else if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
1670 /* The object type is an array of complete type, i.e., other
1671 than a flexible array. */
1672 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1673 tree etype = TREE_TYPE (type);
1674 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1675 unsigned HOST_WIDE_INT cur;
1677 if (!maxval
1678 || TREE_CODE (maxval) != INTEGER_CST
1679 || integer_minus_onep (maxval))
1680 return;
1682 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1683 chkp_find_bound_slots_1 (etype, have_bound, offs + cur * esize);
1687 /* Fill bitmap RES with information about bounds for
1688 type TYPE. See chkp_find_bound_slots_1 for more
1689 details. */
1690 void
1691 chkp_find_bound_slots (const_tree type, bitmap res)
1693 bitmap_clear (res);
1694 chkp_find_bound_slots_1 (type, res, 0);
1697 /* Return 1 if call to FNDECL should be instrumented
1698 and 0 otherwise. */
1700 static bool
1701 chkp_instrument_normal_builtin (tree fndecl)
1703 switch (DECL_FUNCTION_CODE (fndecl))
1705 case BUILT_IN_STRLEN:
1706 case BUILT_IN_STRCPY:
1707 case BUILT_IN_STRNCPY:
1708 case BUILT_IN_STPCPY:
1709 case BUILT_IN_STPNCPY:
1710 case BUILT_IN_STRCAT:
1711 case BUILT_IN_STRNCAT:
1712 case BUILT_IN_MEMCPY:
1713 case BUILT_IN_MEMPCPY:
1714 case BUILT_IN_MEMSET:
1715 case BUILT_IN_MEMMOVE:
1716 case BUILT_IN_BZERO:
1717 case BUILT_IN_STRCMP:
1718 case BUILT_IN_STRNCMP:
1719 case BUILT_IN_BCMP:
1720 case BUILT_IN_MEMCMP:
1721 case BUILT_IN_MEMCPY_CHK:
1722 case BUILT_IN_MEMPCPY_CHK:
1723 case BUILT_IN_MEMMOVE_CHK:
1724 case BUILT_IN_MEMSET_CHK:
1725 case BUILT_IN_STRCPY_CHK:
1726 case BUILT_IN_STRNCPY_CHK:
1727 case BUILT_IN_STPCPY_CHK:
1728 case BUILT_IN_STPNCPY_CHK:
1729 case BUILT_IN_STRCAT_CHK:
1730 case BUILT_IN_STRNCAT_CHK:
1731 case BUILT_IN_MALLOC:
1732 case BUILT_IN_CALLOC:
1733 case BUILT_IN_REALLOC:
1734 return 1;
1736 default:
1737 return 0;
1741 /* Add bound arguments to call statement pointed by GSI.
1742 Also performs a replacement of user checker builtins calls
1743 with internal ones. */
1745 static void
1746 chkp_add_bounds_to_call_stmt (gimple_stmt_iterator *gsi)
1748 gcall *call = as_a <gcall *> (gsi_stmt (*gsi));
1749 unsigned arg_no = 0;
1750 tree fndecl = gimple_call_fndecl (call);
1751 tree fntype;
1752 tree first_formal_arg;
1753 tree arg;
1754 bool use_fntype = false;
1755 tree op;
1756 ssa_op_iter iter;
1757 gcall *new_call;
1759 /* Do nothing for internal functions. */
1760 if (gimple_call_internal_p (call))
1761 return;
1763 fntype = TREE_TYPE (TREE_TYPE (gimple_call_fn (call)));
1765 /* Do nothing if back-end builtin is called. */
1766 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
1767 return;
1769 /* Do nothing for some middle-end builtins. */
1770 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1771 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_OBJECT_SIZE)
1772 return;
1774 /* Do nothing for calls to not instrumentable functions. */
1775 if (fndecl && !chkp_instrumentable_p (fndecl))
1776 return;
1778 /* Ignore CHKP_INIT_PTR_BOUNDS, CHKP_NULL_PTR_BOUNDS
1779 and CHKP_COPY_PTR_BOUNDS. */
1780 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1781 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS
1782 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS
1783 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS
1784 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS))
1785 return;
1787 /* Check user builtins are replaced with checks. */
1788 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1789 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1790 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
1791 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS))
1793 chkp_replace_address_check_builtin (gsi, integer_minus_one_node);
1794 return;
1797 /* Check user builtins are replaced with bound extract. */
1798 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1799 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND
1800 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND))
1802 chkp_replace_extract_builtin (gsi);
1803 return;
1806 /* BUILT_IN_CHKP_NARROW_PTR_BOUNDS call is replaced with
1807 target narrow bounds call. */
1808 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1809 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
1811 tree arg = gimple_call_arg (call, 1);
1812 tree bounds = chkp_find_bounds (arg, gsi);
1814 gimple_call_set_fndecl (call, chkp_narrow_bounds_fndecl);
1815 gimple_call_set_arg (call, 1, bounds);
1816 update_stmt (call);
1818 return;
1821 /* BUILT_IN_CHKP_STORE_PTR_BOUNDS call is replaced with
1822 bndstx call. */
1823 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1824 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_STORE_PTR_BOUNDS)
1826 tree addr = gimple_call_arg (call, 0);
1827 tree ptr = gimple_call_arg (call, 1);
1828 tree bounds = chkp_find_bounds (ptr, gsi);
1829 gimple_stmt_iterator iter = gsi_for_stmt (call);
1831 chkp_build_bndstx (addr, ptr, bounds, gsi);
1832 gsi_remove (&iter, true);
1834 return;
1837 if (!flag_chkp_instrument_calls)
1838 return;
1840 /* We instrument only some subset of builtins. We also instrument
1841 builtin calls to be inlined. */
1842 if (fndecl
1843 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1844 && !chkp_instrument_normal_builtin (fndecl))
1846 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
1847 return;
1849 struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
1850 if (!clone
1851 || !gimple_has_body_p (clone->decl))
1852 return;
1855 /* If function decl is available then use it for
1856 formal arguments list. Otherwise use function type. */
1857 if (fndecl
1858 && DECL_ARGUMENTS (fndecl)
1859 && gimple_call_fntype (call) == TREE_TYPE (fndecl))
1860 first_formal_arg = DECL_ARGUMENTS (fndecl);
1861 else
1863 first_formal_arg = TYPE_ARG_TYPES (fntype);
1864 use_fntype = true;
1867 /* Fill vector of new call args. */
1868 vec<tree> new_args = vNULL;
1869 new_args.create (gimple_call_num_args (call));
1870 arg = first_formal_arg;
1871 for (arg_no = 0; arg_no < gimple_call_num_args (call); arg_no++)
1873 tree call_arg = gimple_call_arg (call, arg_no);
1874 tree type;
1876 /* Get arg type using formal argument description
1877 or actual argument type. */
1878 if (arg)
1879 if (use_fntype)
1880 if (TREE_VALUE (arg) != void_type_node)
1882 type = TREE_VALUE (arg);
1883 arg = TREE_CHAIN (arg);
1885 else
1886 type = TREE_TYPE (call_arg);
1887 else
1889 type = TREE_TYPE (arg);
1890 arg = TREE_CHAIN (arg);
1892 else
1893 type = TREE_TYPE (call_arg);
1895 new_args.safe_push (call_arg);
1897 if (BOUNDED_TYPE_P (type)
1898 || pass_by_reference (NULL, TYPE_MODE (type), type, true))
1899 new_args.safe_push (chkp_find_bounds (call_arg, gsi));
1900 else if (chkp_type_has_pointer (type))
1902 HOST_WIDE_INT max_bounds
1903 = TREE_INT_CST_LOW (TYPE_SIZE (type)) / POINTER_SIZE;
1904 tree *all_bounds = (tree *)xmalloc (sizeof (tree) * max_bounds);
1905 HOST_WIDE_INT bnd_no;
1907 memset (all_bounds, 0, sizeof (tree) * max_bounds);
1909 chkp_find_bounds_for_elem (call_arg, all_bounds, 0, gsi);
1911 for (bnd_no = 0; bnd_no < max_bounds; bnd_no++)
1912 if (all_bounds[bnd_no])
1913 new_args.safe_push (all_bounds[bnd_no]);
1915 free (all_bounds);
1919 if (new_args.length () == gimple_call_num_args (call))
1920 new_call = call;
1921 else
1923 new_call = gimple_build_call_vec (gimple_op (call, 1), new_args);
1924 gimple_call_set_lhs (new_call, gimple_call_lhs (call));
1925 gimple_call_copy_flags (new_call, call);
1926 gimple_call_set_chain (new_call, gimple_call_chain (call));
1928 new_args.release ();
1930 /* For direct calls fndecl is replaced with instrumented version. */
1931 if (fndecl)
1933 tree new_decl = chkp_maybe_create_clone (fndecl)->decl;
1934 gimple_call_set_fndecl (new_call, new_decl);
1935 /* In case of a type cast we should modify used function
1936 type instead of using type of new fndecl. */
1937 if (gimple_call_fntype (call) != TREE_TYPE (fndecl))
1939 tree type = gimple_call_fntype (call);
1940 type = chkp_copy_function_type_adding_bounds (type);
1941 gimple_call_set_fntype (new_call, type);
1943 else
1944 gimple_call_set_fntype (new_call, TREE_TYPE (new_decl));
1946 /* For indirect call we should fix function pointer type if
1947 pass some bounds. */
1948 else if (new_call != call)
1950 tree type = gimple_call_fntype (call);
1951 type = chkp_copy_function_type_adding_bounds (type);
1952 gimple_call_set_fntype (new_call, type);
1955 /* replace old call statement with the new one. */
1956 if (call != new_call)
1958 FOR_EACH_SSA_TREE_OPERAND (op, call, iter, SSA_OP_ALL_DEFS)
1960 SSA_NAME_DEF_STMT (op) = new_call;
1962 gsi_replace (gsi, new_call, true);
1964 else
1965 update_stmt (new_call);
1967 gimple_call_set_with_bounds (new_call, true);
1970 /* Return constant static bounds var with specified bounds LB and UB.
1971 If such var does not exists then new var is created with specified NAME. */
1972 static tree
1973 chkp_make_static_const_bounds (HOST_WIDE_INT lb,
1974 HOST_WIDE_INT ub,
1975 const char *name)
1977 tree id = get_identifier (name);
1978 tree var;
1979 varpool_node *node;
1980 symtab_node *snode;
1982 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, id,
1983 pointer_bounds_type_node);
1984 TREE_STATIC (var) = 1;
1985 TREE_PUBLIC (var) = 1;
1987 /* With LTO we may have constant bounds already in varpool.
1988 Try to find it. */
1989 if ((snode = symtab_node::get_for_asmname (DECL_ASSEMBLER_NAME (var))))
1991 /* We don't allow this symbol usage for non bounds. */
1992 if (snode->type != SYMTAB_VARIABLE
1993 || !POINTER_BOUNDS_P (snode->decl))
1994 sorry ("-fcheck-pointer-bounds requires '%s' "
1995 "name for internal usage",
1996 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (var)));
1998 return snode->decl;
2001 TREE_USED (var) = 1;
2002 TREE_READONLY (var) = 1;
2003 TREE_ADDRESSABLE (var) = 0;
2004 DECL_ARTIFICIAL (var) = 1;
2005 DECL_READ_P (var) = 1;
2006 DECL_INITIAL (var) = targetm.chkp_make_bounds_constant (lb, ub);
2007 make_decl_one_only (var, DECL_ASSEMBLER_NAME (var));
2008 /* We may use this symbol during ctors generation in chkp_finish_file
2009 when all symbols are emitted. Force output to avoid undefined
2010 symbols in ctors. */
2011 node = varpool_node::get_create (var);
2012 node->force_output = 1;
2014 varpool_node::finalize_decl (var);
2016 return var;
2019 /* Generate code to make bounds with specified lower bound LB and SIZE.
2020 if AFTER is 1 then code is inserted after position pointed by ITER
2021 otherwise code is inserted before position pointed by ITER.
2022 If ITER is NULL then code is added to entry block. */
2023 static tree
2024 chkp_make_bounds (tree lb, tree size, gimple_stmt_iterator *iter, bool after)
2026 gimple_seq seq;
2027 gimple_stmt_iterator gsi;
2028 gimple *stmt;
2029 tree bounds;
2031 if (iter)
2032 gsi = *iter;
2033 else
2034 gsi = gsi_start_bb (chkp_get_entry_block ());
2036 seq = NULL;
2038 lb = chkp_force_gimple_call_op (lb, &seq);
2039 size = chkp_force_gimple_call_op (size, &seq);
2041 stmt = gimple_build_call (chkp_bndmk_fndecl, 2, lb, size);
2042 chkp_mark_stmt (stmt);
2044 bounds = chkp_get_tmp_reg (stmt);
2045 gimple_call_set_lhs (stmt, bounds);
2047 gimple_seq_add_stmt (&seq, stmt);
2049 if (iter && after)
2050 gsi_insert_seq_after (&gsi, seq, GSI_SAME_STMT);
2051 else
2052 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
2054 if (dump_file && (dump_flags & TDF_DETAILS))
2056 fprintf (dump_file, "Made bounds: ");
2057 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2058 if (iter)
2060 fprintf (dump_file, " inserted before statement: ");
2061 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0, TDF_VOPS|TDF_MEMSYMS);
2063 else
2064 fprintf (dump_file, " at function entry\n");
2067 /* update_stmt (stmt); */
2069 return bounds;
2072 /* Return var holding zero bounds. */
2073 tree
2074 chkp_get_zero_bounds_var (void)
2076 if (!chkp_zero_bounds_var)
2077 chkp_zero_bounds_var
2078 = chkp_make_static_const_bounds (0, -1,
2079 CHKP_ZERO_BOUNDS_VAR_NAME);
2080 return chkp_zero_bounds_var;
2083 /* Return var holding none bounds. */
2084 tree
2085 chkp_get_none_bounds_var (void)
2087 if (!chkp_none_bounds_var)
2088 chkp_none_bounds_var
2089 = chkp_make_static_const_bounds (-1, 0,
2090 CHKP_NONE_BOUNDS_VAR_NAME);
2091 return chkp_none_bounds_var;
2094 /* Return SSA_NAME used to represent zero bounds. */
2095 static tree
2096 chkp_get_zero_bounds (void)
2098 if (zero_bounds)
2099 return zero_bounds;
2101 if (dump_file && (dump_flags & TDF_DETAILS))
2102 fprintf (dump_file, "Creating zero bounds...");
2104 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2105 || flag_chkp_use_static_const_bounds > 0)
2107 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2108 gimple *stmt;
2110 zero_bounds = chkp_get_tmp_reg (NULL);
2111 stmt = gimple_build_assign (zero_bounds, chkp_get_zero_bounds_var ());
2112 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2114 else
2115 zero_bounds = chkp_make_bounds (integer_zero_node,
2116 integer_zero_node,
2117 NULL,
2118 false);
2120 return zero_bounds;
2123 /* Return SSA_NAME used to represent none bounds. */
2124 static tree
2125 chkp_get_none_bounds (void)
2127 if (none_bounds)
2128 return none_bounds;
2130 if (dump_file && (dump_flags & TDF_DETAILS))
2131 fprintf (dump_file, "Creating none bounds...");
2134 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2135 || flag_chkp_use_static_const_bounds > 0)
2137 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2138 gimple *stmt;
2140 none_bounds = chkp_get_tmp_reg (NULL);
2141 stmt = gimple_build_assign (none_bounds, chkp_get_none_bounds_var ());
2142 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2144 else
2145 none_bounds = chkp_make_bounds (integer_minus_one_node,
2146 build_int_cst (size_type_node, 2),
2147 NULL,
2148 false);
2150 return none_bounds;
2153 /* Return bounds to be used as a result of operation which
2154 should not create poiunter (e.g. MULT_EXPR). */
2155 static tree
2156 chkp_get_invalid_op_bounds (void)
2158 return chkp_get_zero_bounds ();
2161 /* Return bounds to be used for loads of non-pointer values. */
2162 static tree
2163 chkp_get_nonpointer_load_bounds (void)
2165 return chkp_get_zero_bounds ();
2168 /* Return 1 if may use bndret call to get bounds for pointer
2169 returned by CALL. */
2170 static bool
2171 chkp_call_returns_bounds_p (gcall *call)
2173 if (gimple_call_internal_p (call))
2175 if (gimple_call_internal_fn (call) == IFN_VA_ARG)
2176 return true;
2177 return false;
2180 if (gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
2181 || chkp_gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW))
2182 return true;
2184 if (gimple_call_with_bounds_p (call))
2185 return true;
2187 tree fndecl = gimple_call_fndecl (call);
2189 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
2190 return false;
2192 if (fndecl && !chkp_instrumentable_p (fndecl))
2193 return false;
2195 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2197 if (chkp_instrument_normal_builtin (fndecl))
2198 return true;
2200 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
2201 return false;
2203 struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
2204 return (clone && gimple_has_body_p (clone->decl));
2207 return true;
2210 /* Build bounds returned by CALL. */
2211 static tree
2212 chkp_build_returned_bound (gcall *call)
2214 gimple_stmt_iterator gsi;
2215 tree bounds;
2216 gimple *stmt;
2217 tree fndecl = gimple_call_fndecl (call);
2218 unsigned int retflags;
2220 /* To avoid fixing alloca expands in targets we handle
2221 it separately. */
2222 if (fndecl
2223 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2224 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
2225 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2227 tree size = gimple_call_arg (call, 0);
2228 tree lb = gimple_call_lhs (call);
2229 gimple_stmt_iterator iter = gsi_for_stmt (call);
2230 bounds = chkp_make_bounds (lb, size, &iter, true);
2232 /* We know bounds returned by set_bounds builtin call. */
2233 else if (fndecl
2234 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2235 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS)
2237 tree lb = gimple_call_arg (call, 0);
2238 tree size = gimple_call_arg (call, 1);
2239 gimple_stmt_iterator iter = gsi_for_stmt (call);
2240 bounds = chkp_make_bounds (lb, size, &iter, true);
2242 /* Detect bounds initialization calls. */
2243 else if (fndecl
2244 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2245 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS)
2246 bounds = chkp_get_zero_bounds ();
2247 /* Detect bounds nullification calls. */
2248 else if (fndecl
2249 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2250 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS)
2251 bounds = chkp_get_none_bounds ();
2252 /* Detect bounds copy calls. */
2253 else if (fndecl
2254 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2255 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
2257 gimple_stmt_iterator iter = gsi_for_stmt (call);
2258 bounds = chkp_find_bounds (gimple_call_arg (call, 1), &iter);
2260 /* Do not use retbnd when returned bounds are equal to some
2261 of passed bounds. */
2262 else if (((retflags = gimple_call_return_flags (call)) & ERF_RETURNS_ARG)
2263 && (retflags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (call))
2265 gimple_stmt_iterator iter = gsi_for_stmt (call);
2266 unsigned int retarg = retflags & ERF_RETURN_ARG_MASK, argno;
2267 if (gimple_call_with_bounds_p (call))
2269 for (argno = 0; argno < gimple_call_num_args (call); argno++)
2270 if (!POINTER_BOUNDS_P (gimple_call_arg (call, argno)))
2272 if (retarg)
2273 retarg--;
2274 else
2275 break;
2278 else
2279 argno = retarg;
2281 bounds = chkp_find_bounds (gimple_call_arg (call, argno), &iter);
2283 else if (chkp_call_returns_bounds_p (call))
2285 gcc_assert (TREE_CODE (gimple_call_lhs (call)) == SSA_NAME);
2287 /* In general case build checker builtin call to
2288 obtain returned bounds. */
2289 stmt = gimple_build_call (chkp_ret_bnd_fndecl, 1,
2290 gimple_call_lhs (call));
2291 chkp_mark_stmt (stmt);
2293 gsi = gsi_for_stmt (call);
2294 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
2296 bounds = chkp_get_tmp_reg (stmt);
2297 gimple_call_set_lhs (stmt, bounds);
2299 update_stmt (stmt);
2301 else
2302 bounds = chkp_get_zero_bounds ();
2304 if (dump_file && (dump_flags & TDF_DETAILS))
2306 fprintf (dump_file, "Built returned bounds (");
2307 print_generic_expr (dump_file, bounds, 0);
2308 fprintf (dump_file, ") for call: ");
2309 print_gimple_stmt (dump_file, call, 0, TDF_VOPS|TDF_MEMSYMS);
2312 bounds = chkp_maybe_copy_and_register_bounds (gimple_call_lhs (call), bounds);
2314 return bounds;
2317 /* Return bounds used as returned by call
2318 which produced SSA name VAL. */
2319 gcall *
2320 chkp_retbnd_call_by_val (tree val)
2322 if (TREE_CODE (val) != SSA_NAME)
2323 return NULL;
2325 gcc_assert (gimple_code (SSA_NAME_DEF_STMT (val)) == GIMPLE_CALL);
2327 imm_use_iterator use_iter;
2328 use_operand_p use_p;
2329 FOR_EACH_IMM_USE_FAST (use_p, use_iter, val)
2330 if (chkp_gimple_call_builtin_p (USE_STMT (use_p), BUILT_IN_CHKP_BNDRET))
2331 return as_a <gcall *> (USE_STMT (use_p));
2333 return NULL;
2336 /* Check the next parameter for the given PARM is bounds
2337 and return it's default SSA_NAME (create if required). */
2338 static tree
2339 chkp_get_next_bounds_parm (tree parm)
2341 tree bounds = TREE_CHAIN (parm);
2342 gcc_assert (POINTER_BOUNDS_P (bounds));
2343 bounds = ssa_default_def (cfun, bounds);
2344 if (!bounds)
2346 bounds = make_ssa_name (TREE_CHAIN (parm), gimple_build_nop ());
2347 set_ssa_default_def (cfun, TREE_CHAIN (parm), bounds);
2349 return bounds;
2352 /* Return bounds to be used for input argument PARM. */
2353 static tree
2354 chkp_get_bound_for_parm (tree parm)
2356 tree decl = SSA_NAME_VAR (parm);
2357 tree bounds;
2359 gcc_assert (TREE_CODE (decl) == PARM_DECL);
2361 bounds = chkp_get_registered_bounds (parm);
2363 if (!bounds)
2364 bounds = chkp_get_registered_bounds (decl);
2366 if (!bounds)
2368 tree orig_decl = cgraph_node::get (cfun->decl)->orig_decl;
2370 /* For static chain param we return zero bounds
2371 because currently we do not check dereferences
2372 of this pointer. */
2373 if (cfun->static_chain_decl == decl)
2374 bounds = chkp_get_zero_bounds ();
2375 /* If non instrumented runtime is used then it may be useful
2376 to use zero bounds for input arguments of main
2377 function. */
2378 else if (flag_chkp_zero_input_bounds_for_main
2379 && strcmp (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (orig_decl)),
2380 "main") == 0)
2381 bounds = chkp_get_zero_bounds ();
2382 else if (BOUNDED_P (parm))
2384 bounds = chkp_get_next_bounds_parm (decl);
2385 bounds = chkp_maybe_copy_and_register_bounds (decl, bounds);
2387 if (dump_file && (dump_flags & TDF_DETAILS))
2389 fprintf (dump_file, "Built arg bounds (");
2390 print_generic_expr (dump_file, bounds, 0);
2391 fprintf (dump_file, ") for arg: ");
2392 print_node (dump_file, "", decl, 0);
2395 else
2396 bounds = chkp_get_zero_bounds ();
2399 if (!chkp_get_registered_bounds (parm))
2400 bounds = chkp_maybe_copy_and_register_bounds (parm, bounds);
2402 if (dump_file && (dump_flags & TDF_DETAILS))
2404 fprintf (dump_file, "Using bounds ");
2405 print_generic_expr (dump_file, bounds, 0);
2406 fprintf (dump_file, " for parm ");
2407 print_generic_expr (dump_file, parm, 0);
2408 fprintf (dump_file, " of type ");
2409 print_generic_expr (dump_file, TREE_TYPE (parm), 0);
2410 fprintf (dump_file, ".\n");
2413 return bounds;
2416 /* Build and return CALL_EXPR for bndstx builtin with specified
2417 arguments. */
2418 tree
2419 chkp_build_bndldx_call (tree addr, tree ptr)
2421 tree fn = build1 (ADDR_EXPR,
2422 build_pointer_type (TREE_TYPE (chkp_bndldx_fndecl)),
2423 chkp_bndldx_fndecl);
2424 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndldx_fndecl)),
2425 fn, 2, addr, ptr);
2426 CALL_WITH_BOUNDS_P (call) = true;
2427 return call;
2430 /* Insert code to load bounds for PTR located by ADDR.
2431 Code is inserted after position pointed by GSI.
2432 Loaded bounds are returned. */
2433 static tree
2434 chkp_build_bndldx (tree addr, tree ptr, gimple_stmt_iterator *gsi)
2436 gimple_seq seq;
2437 gimple *stmt;
2438 tree bounds;
2440 seq = NULL;
2442 addr = chkp_force_gimple_call_op (addr, &seq);
2443 ptr = chkp_force_gimple_call_op (ptr, &seq);
2445 stmt = gimple_build_call (chkp_bndldx_fndecl, 2, addr, ptr);
2446 chkp_mark_stmt (stmt);
2447 bounds = chkp_get_tmp_reg (stmt);
2448 gimple_call_set_lhs (stmt, bounds);
2450 gimple_seq_add_stmt (&seq, stmt);
2452 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2454 if (dump_file && (dump_flags & TDF_DETAILS))
2456 fprintf (dump_file, "Generated bndldx for pointer ");
2457 print_generic_expr (dump_file, ptr, 0);
2458 fprintf (dump_file, ": ");
2459 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2462 return bounds;
2465 /* Build and return CALL_EXPR for bndstx builtin with specified
2466 arguments. */
2467 tree
2468 chkp_build_bndstx_call (tree addr, tree ptr, tree bounds)
2470 tree fn = build1 (ADDR_EXPR,
2471 build_pointer_type (TREE_TYPE (chkp_bndstx_fndecl)),
2472 chkp_bndstx_fndecl);
2473 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndstx_fndecl)),
2474 fn, 3, ptr, bounds, addr);
2475 CALL_WITH_BOUNDS_P (call) = true;
2476 return call;
2479 /* Insert code to store BOUNDS for PTR stored by ADDR.
2480 New statements are inserted after position pointed
2481 by GSI. */
2482 void
2483 chkp_build_bndstx (tree addr, tree ptr, tree bounds,
2484 gimple_stmt_iterator *gsi)
2486 gimple_seq seq;
2487 gimple *stmt;
2489 seq = NULL;
2491 addr = chkp_force_gimple_call_op (addr, &seq);
2492 ptr = chkp_force_gimple_call_op (ptr, &seq);
2494 stmt = gimple_build_call (chkp_bndstx_fndecl, 3, ptr, bounds, addr);
2495 chkp_mark_stmt (stmt);
2496 gimple_call_set_with_bounds (stmt, true);
2498 gimple_seq_add_stmt (&seq, stmt);
2500 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2502 if (dump_file && (dump_flags & TDF_DETAILS))
2504 fprintf (dump_file, "Generated bndstx for pointer store ");
2505 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_VOPS|TDF_MEMSYMS);
2506 print_gimple_stmt (dump_file, stmt, 2, TDF_VOPS|TDF_MEMSYMS);
2510 /* This function is called when call statement
2511 is inlined and therefore we can't use bndret
2512 for its LHS anymore. Function fixes bndret
2513 call using new RHS value if possible. */
2514 void
2515 chkp_fixup_inlined_call (tree lhs, tree rhs)
2517 tree addr, bounds;
2518 gcall *retbnd, *bndldx;
2520 if (!BOUNDED_P (lhs))
2521 return;
2523 /* Search for retbnd call. */
2524 retbnd = chkp_retbnd_call_by_val (lhs);
2525 if (!retbnd)
2526 return;
2528 /* Currently only handle cases when call is replaced
2529 with a memory access. In this case bndret call
2530 may be replaced with bndldx call. Otherwise we
2531 have to search for bounds which may cause wrong
2532 result due to various optimizations applied. */
2533 switch (TREE_CODE (rhs))
2535 case VAR_DECL:
2536 if (DECL_REGISTER (rhs))
2537 return;
2538 break;
2540 case MEM_REF:
2541 break;
2543 case ARRAY_REF:
2544 case COMPONENT_REF:
2545 addr = get_base_address (rhs);
2546 if (!DECL_P (addr)
2547 && TREE_CODE (addr) != MEM_REF)
2548 return;
2549 if (DECL_P (addr) && DECL_REGISTER (addr))
2550 return;
2551 break;
2553 default:
2554 return;
2557 /* Create a new statements sequence with bndldx call. */
2558 gimple_stmt_iterator gsi = gsi_for_stmt (retbnd);
2559 addr = build_fold_addr_expr (rhs);
2560 chkp_build_bndldx (addr, lhs, &gsi);
2561 bndldx = as_a <gcall *> (gsi_stmt (gsi));
2563 /* Remove bndret call. */
2564 bounds = gimple_call_lhs (retbnd);
2565 gsi = gsi_for_stmt (retbnd);
2566 gsi_remove (&gsi, true);
2568 /* Link new bndldx call. */
2569 gimple_call_set_lhs (bndldx, bounds);
2570 update_stmt (bndldx);
2573 /* Compute bounds for pointer NODE which was assigned in
2574 assignment statement ASSIGN. Return computed bounds. */
2575 static tree
2576 chkp_compute_bounds_for_assignment (tree node, gimple *assign)
2578 enum tree_code rhs_code = gimple_assign_rhs_code (assign);
2579 tree rhs1 = gimple_assign_rhs1 (assign);
2580 tree bounds = NULL_TREE;
2581 gimple_stmt_iterator iter = gsi_for_stmt (assign);
2582 tree base = NULL;
2584 if (dump_file && (dump_flags & TDF_DETAILS))
2586 fprintf (dump_file, "Computing bounds for assignment: ");
2587 print_gimple_stmt (dump_file, assign, 0, TDF_VOPS|TDF_MEMSYMS);
2590 switch (rhs_code)
2592 case MEM_REF:
2593 case TARGET_MEM_REF:
2594 case COMPONENT_REF:
2595 case ARRAY_REF:
2596 /* We need to load bounds from the bounds table. */
2597 bounds = chkp_find_bounds_loaded (node, rhs1, &iter);
2598 break;
2600 case VAR_DECL:
2601 case SSA_NAME:
2602 case ADDR_EXPR:
2603 case POINTER_PLUS_EXPR:
2604 case NOP_EXPR:
2605 case CONVERT_EXPR:
2606 case INTEGER_CST:
2607 /* Bounds are just propagated from RHS. */
2608 bounds = chkp_find_bounds (rhs1, &iter);
2609 base = rhs1;
2610 break;
2612 case VIEW_CONVERT_EXPR:
2613 /* Bounds are just propagated from RHS. */
2614 bounds = chkp_find_bounds (TREE_OPERAND (rhs1, 0), &iter);
2615 break;
2617 case PARM_DECL:
2618 if (BOUNDED_P (rhs1))
2620 /* We need to load bounds from the bounds table. */
2621 bounds = chkp_build_bndldx (chkp_build_addr_expr (rhs1),
2622 node, &iter);
2623 TREE_ADDRESSABLE (rhs1) = 1;
2625 else
2626 bounds = chkp_get_nonpointer_load_bounds ();
2627 break;
2629 case MINUS_EXPR:
2630 case PLUS_EXPR:
2631 case BIT_AND_EXPR:
2632 case BIT_IOR_EXPR:
2633 case BIT_XOR_EXPR:
2635 tree rhs2 = gimple_assign_rhs2 (assign);
2636 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2637 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2639 /* First we try to check types of operands. If it
2640 does not help then look at bound values.
2642 If some bounds are incomplete and other are
2643 not proven to be valid (i.e. also incomplete
2644 or invalid because value is not pointer) then
2645 resulting value is incomplete and will be
2646 recomputed later in chkp_finish_incomplete_bounds. */
2647 if (BOUNDED_P (rhs1)
2648 && !BOUNDED_P (rhs2))
2649 bounds = bnd1;
2650 else if (BOUNDED_P (rhs2)
2651 && !BOUNDED_P (rhs1)
2652 && rhs_code != MINUS_EXPR)
2653 bounds = bnd2;
2654 else if (chkp_incomplete_bounds (bnd1))
2655 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR
2656 && !chkp_incomplete_bounds (bnd2))
2657 bounds = bnd2;
2658 else
2659 bounds = incomplete_bounds;
2660 else if (chkp_incomplete_bounds (bnd2))
2661 if (chkp_valid_bounds (bnd1)
2662 && !chkp_incomplete_bounds (bnd1))
2663 bounds = bnd1;
2664 else
2665 bounds = incomplete_bounds;
2666 else if (!chkp_valid_bounds (bnd1))
2667 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR)
2668 bounds = bnd2;
2669 else if (bnd2 == chkp_get_zero_bounds ())
2670 bounds = bnd2;
2671 else
2672 bounds = bnd1;
2673 else if (!chkp_valid_bounds (bnd2))
2674 bounds = bnd1;
2675 else
2676 /* Seems both operands may have valid bounds
2677 (e.g. pointer minus pointer). In such case
2678 use default invalid op bounds. */
2679 bounds = chkp_get_invalid_op_bounds ();
2681 base = (bounds == bnd1) ? rhs1 : (bounds == bnd2) ? rhs2 : NULL;
2683 break;
2685 case BIT_NOT_EXPR:
2686 case NEGATE_EXPR:
2687 case LSHIFT_EXPR:
2688 case RSHIFT_EXPR:
2689 case LROTATE_EXPR:
2690 case RROTATE_EXPR:
2691 case EQ_EXPR:
2692 case NE_EXPR:
2693 case LT_EXPR:
2694 case LE_EXPR:
2695 case GT_EXPR:
2696 case GE_EXPR:
2697 case MULT_EXPR:
2698 case RDIV_EXPR:
2699 case TRUNC_DIV_EXPR:
2700 case FLOOR_DIV_EXPR:
2701 case CEIL_DIV_EXPR:
2702 case ROUND_DIV_EXPR:
2703 case TRUNC_MOD_EXPR:
2704 case FLOOR_MOD_EXPR:
2705 case CEIL_MOD_EXPR:
2706 case ROUND_MOD_EXPR:
2707 case EXACT_DIV_EXPR:
2708 case FIX_TRUNC_EXPR:
2709 case FLOAT_EXPR:
2710 case REALPART_EXPR:
2711 case IMAGPART_EXPR:
2712 /* No valid bounds may be produced by these exprs. */
2713 bounds = chkp_get_invalid_op_bounds ();
2714 break;
2716 case COND_EXPR:
2718 tree val1 = gimple_assign_rhs2 (assign);
2719 tree val2 = gimple_assign_rhs3 (assign);
2720 tree bnd1 = chkp_find_bounds (val1, &iter);
2721 tree bnd2 = chkp_find_bounds (val2, &iter);
2722 gimple *stmt;
2724 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2725 bounds = incomplete_bounds;
2726 else if (bnd1 == bnd2)
2727 bounds = bnd1;
2728 else
2730 rhs1 = unshare_expr (rhs1);
2732 bounds = chkp_get_tmp_reg (assign);
2733 stmt = gimple_build_assign (bounds, COND_EXPR, rhs1, bnd1, bnd2);
2734 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2736 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2737 chkp_mark_invalid_bounds (bounds);
2740 break;
2742 case MAX_EXPR:
2743 case MIN_EXPR:
2745 tree rhs2 = gimple_assign_rhs2 (assign);
2746 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2747 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2749 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2750 bounds = incomplete_bounds;
2751 else if (bnd1 == bnd2)
2752 bounds = bnd1;
2753 else
2755 gimple *stmt;
2756 tree cond = build2 (rhs_code == MAX_EXPR ? GT_EXPR : LT_EXPR,
2757 boolean_type_node, rhs1, rhs2);
2758 bounds = chkp_get_tmp_reg (assign);
2759 stmt = gimple_build_assign (bounds, COND_EXPR, cond, bnd1, bnd2);
2761 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2763 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2764 chkp_mark_invalid_bounds (bounds);
2767 break;
2769 default:
2770 bounds = chkp_get_zero_bounds ();
2771 warning (0, "pointer bounds were lost due to unexpected expression %s",
2772 get_tree_code_name (rhs_code));
2775 gcc_assert (bounds);
2777 /* We may reuse bounds of other pointer we copy/modify. But it is not
2778 allowed for abnormal ssa names. If we produced a pointer using
2779 abnormal ssa name, we better make a bounds copy to avoid coalescing
2780 issues. */
2781 if (base
2782 && TREE_CODE (base) == SSA_NAME
2783 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (base))
2785 gimple *stmt = gimple_build_assign (chkp_get_tmp_reg (NULL), bounds);
2786 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2787 bounds = gimple_assign_lhs (stmt);
2790 if (node)
2791 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2793 return bounds;
2796 /* Compute bounds for ssa name NODE defined by DEF_STMT pointed by ITER.
2798 There are just few statement codes allowed: NOP (for default ssa names),
2799 ASSIGN, CALL, PHI, ASM.
2801 Return computed bounds. */
2802 static tree
2803 chkp_get_bounds_by_definition (tree node, gimple *def_stmt,
2804 gphi_iterator *iter)
2806 tree var, bounds;
2807 enum gimple_code code = gimple_code (def_stmt);
2808 gphi *stmt;
2810 if (dump_file && (dump_flags & TDF_DETAILS))
2812 fprintf (dump_file, "Searching for bounds for node: ");
2813 print_generic_expr (dump_file, node, 0);
2815 fprintf (dump_file, " using its definition: ");
2816 print_gimple_stmt (dump_file, def_stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2819 switch (code)
2821 case GIMPLE_NOP:
2822 var = SSA_NAME_VAR (node);
2823 switch (TREE_CODE (var))
2825 case PARM_DECL:
2826 bounds = chkp_get_bound_for_parm (node);
2827 break;
2829 case VAR_DECL:
2830 /* For uninitialized pointers use none bounds. */
2831 bounds = chkp_get_none_bounds ();
2832 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2833 break;
2835 case RESULT_DECL:
2837 tree base_type;
2839 gcc_assert (TREE_CODE (TREE_TYPE (node)) == REFERENCE_TYPE);
2841 base_type = TREE_TYPE (TREE_TYPE (node));
2843 gcc_assert (TYPE_SIZE (base_type)
2844 && TREE_CODE (TYPE_SIZE (base_type)) == INTEGER_CST
2845 && tree_to_uhwi (TYPE_SIZE (base_type)) != 0);
2847 bounds = chkp_make_bounds (node, TYPE_SIZE_UNIT (base_type),
2848 NULL, false);
2849 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2851 break;
2853 default:
2854 if (dump_file && (dump_flags & TDF_DETAILS))
2856 fprintf (dump_file, "Unexpected var with no definition\n");
2857 print_generic_expr (dump_file, var, 0);
2859 internal_error ("chkp_get_bounds_by_definition: Unexpected var of type %s",
2860 get_tree_code_name (TREE_CODE (var)));
2862 break;
2864 case GIMPLE_ASSIGN:
2865 bounds = chkp_compute_bounds_for_assignment (node, def_stmt);
2866 break;
2868 case GIMPLE_CALL:
2869 bounds = chkp_build_returned_bound (as_a <gcall *> (def_stmt));
2870 break;
2872 case GIMPLE_PHI:
2873 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (node))
2874 if (SSA_NAME_VAR (node))
2875 var = chkp_get_bounds_var (SSA_NAME_VAR (node));
2876 else
2877 var = make_temp_ssa_name (pointer_bounds_type_node,
2878 NULL,
2879 CHKP_BOUND_TMP_NAME);
2880 else
2881 var = chkp_get_tmp_var ();
2882 stmt = create_phi_node (var, gimple_bb (def_stmt));
2883 bounds = gimple_phi_result (stmt);
2884 *iter = gsi_for_phi (stmt);
2886 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2888 /* Created bounds do not have all phi args computed and
2889 therefore we do not know if there is a valid source
2890 of bounds for that node. Therefore we mark bounds
2891 as incomplete and then recompute them when all phi
2892 args are computed. */
2893 chkp_register_incomplete_bounds (bounds, node);
2894 break;
2896 case GIMPLE_ASM:
2897 bounds = chkp_get_zero_bounds ();
2898 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2899 break;
2901 default:
2902 internal_error ("chkp_get_bounds_by_definition: Unexpected GIMPLE code %s",
2903 gimple_code_name[code]);
2906 return bounds;
2909 /* Return CALL_EXPR for bndmk with specified LOWER_BOUND and SIZE. */
2910 tree
2911 chkp_build_make_bounds_call (tree lower_bound, tree size)
2913 tree call = build1 (ADDR_EXPR,
2914 build_pointer_type (TREE_TYPE (chkp_bndmk_fndecl)),
2915 chkp_bndmk_fndecl);
2916 return build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndmk_fndecl)),
2917 call, 2, lower_bound, size);
2920 /* Create static bounds var of specfified OBJ which is
2921 is either VAR_DECL or string constant. */
2922 static tree
2923 chkp_make_static_bounds (tree obj)
2925 static int string_id = 1;
2926 static int var_id = 1;
2927 tree *slot;
2928 const char *var_name;
2929 char *bnd_var_name;
2930 tree bnd_var;
2932 /* First check if we already have required var. */
2933 if (chkp_static_var_bounds)
2935 /* For vars we use assembler name as a key in
2936 chkp_static_var_bounds map. It allows to
2937 avoid duplicating bound vars for decls
2938 sharing assembler name. */
2939 if (VAR_P (obj))
2941 tree name = DECL_ASSEMBLER_NAME (obj);
2942 slot = chkp_static_var_bounds->get (name);
2943 if (slot)
2944 return *slot;
2946 else
2948 slot = chkp_static_var_bounds->get (obj);
2949 if (slot)
2950 return *slot;
2954 /* Build decl for bounds var. */
2955 if (VAR_P (obj))
2957 if (DECL_IGNORED_P (obj))
2959 bnd_var_name = (char *) xmalloc (strlen (CHKP_VAR_BOUNDS_PREFIX) + 10);
2960 sprintf (bnd_var_name, "%s%d", CHKP_VAR_BOUNDS_PREFIX, var_id++);
2962 else
2964 var_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (obj));
2966 /* For hidden symbols we want to skip first '*' char. */
2967 if (*var_name == '*')
2968 var_name++;
2970 bnd_var_name = (char *) xmalloc (strlen (var_name)
2971 + strlen (CHKP_BOUNDS_OF_SYMBOL_PREFIX) + 1);
2972 strcpy (bnd_var_name, CHKP_BOUNDS_OF_SYMBOL_PREFIX);
2973 strcat (bnd_var_name, var_name);
2976 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2977 get_identifier (bnd_var_name),
2978 pointer_bounds_type_node);
2980 /* Address of the obj will be used as lower bound. */
2981 TREE_ADDRESSABLE (obj) = 1;
2983 else
2985 bnd_var_name = (char *) xmalloc (strlen (CHKP_STRING_BOUNDS_PREFIX) + 10);
2986 sprintf (bnd_var_name, "%s%d", CHKP_STRING_BOUNDS_PREFIX, string_id++);
2988 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2989 get_identifier (bnd_var_name),
2990 pointer_bounds_type_node);
2993 free (bnd_var_name);
2995 TREE_PUBLIC (bnd_var) = 0;
2996 TREE_USED (bnd_var) = 1;
2997 TREE_READONLY (bnd_var) = 0;
2998 TREE_STATIC (bnd_var) = 1;
2999 TREE_ADDRESSABLE (bnd_var) = 0;
3000 DECL_ARTIFICIAL (bnd_var) = 1;
3001 DECL_COMMON (bnd_var) = 1;
3002 DECL_COMDAT (bnd_var) = 1;
3003 DECL_READ_P (bnd_var) = 1;
3004 DECL_INITIAL (bnd_var) = chkp_build_addr_expr (obj);
3005 /* Force output similar to constant bounds.
3006 See chkp_make_static_const_bounds. */
3007 varpool_node::get_create (bnd_var)->force_output = 1;
3008 /* Mark symbol as requiring bounds initialization. */
3009 varpool_node::get_create (bnd_var)->need_bounds_init = 1;
3010 varpool_node::finalize_decl (bnd_var);
3012 /* Add created var to the map to use it for other references
3013 to obj. */
3014 if (!chkp_static_var_bounds)
3015 chkp_static_var_bounds = new hash_map<tree, tree>;
3017 if (VAR_P (obj))
3019 tree name = DECL_ASSEMBLER_NAME (obj);
3020 chkp_static_var_bounds->put (name, bnd_var);
3022 else
3023 chkp_static_var_bounds->put (obj, bnd_var);
3025 return bnd_var;
3028 /* When var has incomplete type we cannot get size to
3029 compute its bounds. In such cases we use checker
3030 builtin call which determines object size at runtime. */
3031 static tree
3032 chkp_generate_extern_var_bounds (tree var)
3034 tree bounds, size_reloc, lb, size, max_size, cond;
3035 gimple_stmt_iterator gsi;
3036 gimple_seq seq = NULL;
3037 gimple *stmt;
3039 /* If instrumentation is not enabled for vars having
3040 incomplete type then just return zero bounds to avoid
3041 checks for this var. */
3042 if (!flag_chkp_incomplete_type)
3043 return chkp_get_zero_bounds ();
3045 if (dump_file && (dump_flags & TDF_DETAILS))
3047 fprintf (dump_file, "Generating bounds for extern symbol '");
3048 print_generic_expr (dump_file, var, 0);
3049 fprintf (dump_file, "'\n");
3052 stmt = gimple_build_call (chkp_sizeof_fndecl, 1, var);
3054 size_reloc = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
3055 gimple_call_set_lhs (stmt, size_reloc);
3057 gimple_seq_add_stmt (&seq, stmt);
3059 lb = chkp_build_addr_expr (var);
3060 size = make_ssa_name (chkp_get_size_tmp_var ());
3062 if (flag_chkp_zero_dynamic_size_as_infinite)
3064 /* We should check that size relocation was resolved.
3065 If it was not then use maximum possible size for the var. */
3066 max_size = build2 (MINUS_EXPR, chkp_uintptr_type, integer_zero_node,
3067 fold_convert (chkp_uintptr_type, lb));
3068 max_size = chkp_force_gimple_call_op (max_size, &seq);
3070 cond = build2 (NE_EXPR, boolean_type_node,
3071 size_reloc, integer_zero_node);
3072 stmt = gimple_build_assign (size, COND_EXPR, cond, size_reloc, max_size);
3073 gimple_seq_add_stmt (&seq, stmt);
3075 else
3077 stmt = gimple_build_assign (size, size_reloc);
3078 gimple_seq_add_stmt (&seq, stmt);
3081 gsi = gsi_start_bb (chkp_get_entry_block ());
3082 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
3084 bounds = chkp_make_bounds (lb, size, &gsi, true);
3086 return bounds;
3089 /* Return 1 if TYPE has fields with zero size or fields
3090 marked with chkp_variable_size attribute. */
3091 bool
3092 chkp_variable_size_type (tree type)
3094 bool res = false;
3095 tree field;
3097 if (RECORD_OR_UNION_TYPE_P (type))
3098 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3100 if (TREE_CODE (field) == FIELD_DECL)
3101 res = res
3102 || lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
3103 || chkp_variable_size_type (TREE_TYPE (field));
3105 else
3106 res = !TYPE_SIZE (type)
3107 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
3108 || tree_to_uhwi (TYPE_SIZE (type)) == 0;
3110 return res;
3113 /* Compute and return bounds for address of DECL which is
3114 one of VAR_DECL, PARM_DECL, RESULT_DECL. */
3115 static tree
3116 chkp_get_bounds_for_decl_addr (tree decl)
3118 tree bounds;
3120 gcc_assert (VAR_P (decl)
3121 || TREE_CODE (decl) == PARM_DECL
3122 || TREE_CODE (decl) == RESULT_DECL);
3124 bounds = chkp_get_registered_addr_bounds (decl);
3126 if (bounds)
3127 return bounds;
3129 if (dump_file && (dump_flags & TDF_DETAILS))
3131 fprintf (dump_file, "Building bounds for address of decl ");
3132 print_generic_expr (dump_file, decl, 0);
3133 fprintf (dump_file, "\n");
3136 /* Use zero bounds if size is unknown and checks for
3137 unknown sizes are restricted. */
3138 if ((!DECL_SIZE (decl)
3139 || (chkp_variable_size_type (TREE_TYPE (decl))
3140 && (TREE_STATIC (decl)
3141 || DECL_EXTERNAL (decl)
3142 || TREE_PUBLIC (decl))))
3143 && !flag_chkp_incomplete_type)
3144 return chkp_get_zero_bounds ();
3146 if (flag_chkp_use_static_bounds
3147 && VAR_P (decl)
3148 && (TREE_STATIC (decl)
3149 || DECL_EXTERNAL (decl)
3150 || TREE_PUBLIC (decl))
3151 && !DECL_THREAD_LOCAL_P (decl))
3153 tree bnd_var = chkp_make_static_bounds (decl);
3154 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
3155 gimple *stmt;
3157 bounds = chkp_get_tmp_reg (NULL);
3158 stmt = gimple_build_assign (bounds, bnd_var);
3159 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3161 else if (!DECL_SIZE (decl)
3162 || (chkp_variable_size_type (TREE_TYPE (decl))
3163 && (TREE_STATIC (decl)
3164 || DECL_EXTERNAL (decl)
3165 || TREE_PUBLIC (decl))))
3167 gcc_assert (VAR_P (decl));
3168 bounds = chkp_generate_extern_var_bounds (decl);
3170 else
3172 tree lb = chkp_build_addr_expr (decl);
3173 bounds = chkp_make_bounds (lb, DECL_SIZE_UNIT (decl), NULL, false);
3176 return bounds;
3179 /* Compute and return bounds for constant string. */
3180 static tree
3181 chkp_get_bounds_for_string_cst (tree cst)
3183 tree bounds;
3184 tree lb;
3185 tree size;
3187 gcc_assert (TREE_CODE (cst) == STRING_CST);
3189 bounds = chkp_get_registered_bounds (cst);
3191 if (bounds)
3192 return bounds;
3194 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
3195 || flag_chkp_use_static_const_bounds > 0)
3197 tree bnd_var = chkp_make_static_bounds (cst);
3198 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
3199 gimple *stmt;
3201 bounds = chkp_get_tmp_reg (NULL);
3202 stmt = gimple_build_assign (bounds, bnd_var);
3203 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3205 else
3207 lb = chkp_build_addr_expr (cst);
3208 size = build_int_cst (chkp_uintptr_type, TREE_STRING_LENGTH (cst));
3209 bounds = chkp_make_bounds (lb, size, NULL, false);
3212 bounds = chkp_maybe_copy_and_register_bounds (cst, bounds);
3214 return bounds;
3217 /* Generate code to instersect bounds BOUNDS1 and BOUNDS2 and
3218 return the result. if ITER is not NULL then Code is inserted
3219 before position pointed by ITER. Otherwise code is added to
3220 entry block. */
3221 static tree
3222 chkp_intersect_bounds (tree bounds1, tree bounds2, gimple_stmt_iterator *iter)
3224 if (!bounds1 || bounds1 == chkp_get_zero_bounds ())
3225 return bounds2 ? bounds2 : bounds1;
3226 else if (!bounds2 || bounds2 == chkp_get_zero_bounds ())
3227 return bounds1;
3228 else
3230 gimple_seq seq;
3231 gimple *stmt;
3232 tree bounds;
3234 seq = NULL;
3236 stmt = gimple_build_call (chkp_intersect_fndecl, 2, bounds1, bounds2);
3237 chkp_mark_stmt (stmt);
3239 bounds = chkp_get_tmp_reg (stmt);
3240 gimple_call_set_lhs (stmt, bounds);
3242 gimple_seq_add_stmt (&seq, stmt);
3244 /* We are probably doing narrowing for constant expression.
3245 In such case iter may be undefined. */
3246 if (!iter)
3248 gimple_stmt_iterator gsi = gsi_last_bb (chkp_get_entry_block ());
3249 iter = &gsi;
3250 gsi_insert_seq_after (iter, seq, GSI_SAME_STMT);
3252 else
3253 gsi_insert_seq_before (iter, seq, GSI_SAME_STMT);
3255 if (dump_file && (dump_flags & TDF_DETAILS))
3257 fprintf (dump_file, "Bounds intersection: ");
3258 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
3259 fprintf (dump_file, " inserted before statement: ");
3260 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0,
3261 TDF_VOPS|TDF_MEMSYMS);
3264 return bounds;
3268 /* Return 1 if we are allowed to narrow bounds for addressed FIELD
3269 and 0 othersize. */
3270 static bool
3271 chkp_may_narrow_to_field (tree field)
3273 return DECL_SIZE (field) && TREE_CODE (DECL_SIZE (field)) == INTEGER_CST
3274 && tree_to_uhwi (DECL_SIZE (field)) != 0
3275 && !(flag_chkp_flexible_struct_trailing_arrays
3276 && TREE_CODE(TREE_TYPE(field)) == ARRAY_TYPE
3277 && !DECL_CHAIN (field))
3278 && (!DECL_FIELD_OFFSET (field)
3279 || TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST)
3280 && (!DECL_FIELD_BIT_OFFSET (field)
3281 || TREE_CODE (DECL_FIELD_BIT_OFFSET (field)) == INTEGER_CST)
3282 && !lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
3283 && !chkp_variable_size_type (TREE_TYPE (field));
3286 /* Return 1 if bounds for FIELD should be narrowed to
3287 field's own size. */
3288 static bool
3289 chkp_narrow_bounds_for_field (tree field)
3291 HOST_WIDE_INT offs;
3292 HOST_WIDE_INT bit_offs;
3294 if (!chkp_may_narrow_to_field (field))
3295 return false;
3297 /* Accesse to compiler generated fields should not cause
3298 bounds narrowing. */
3299 if (DECL_ARTIFICIAL (field))
3300 return false;
3302 offs = tree_to_uhwi (DECL_FIELD_OFFSET (field));
3303 bit_offs = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
3305 return (flag_chkp_narrow_bounds
3306 && (flag_chkp_first_field_has_own_bounds
3307 || offs
3308 || bit_offs));
3311 /* Perform narrowing for BOUNDS using bounds computed for field
3312 access COMPONENT. ITER meaning is the same as for
3313 chkp_intersect_bounds. */
3314 static tree
3315 chkp_narrow_bounds_to_field (tree bounds, tree component,
3316 gimple_stmt_iterator *iter)
3318 tree field = TREE_OPERAND (component, 1);
3319 tree size = DECL_SIZE_UNIT (field);
3320 tree field_ptr = chkp_build_addr_expr (component);
3321 tree field_bounds;
3323 field_bounds = chkp_make_bounds (field_ptr, size, iter, false);
3325 return chkp_intersect_bounds (field_bounds, bounds, iter);
3328 /* Parse field or array access NODE.
3330 PTR ouput parameter holds a pointer to the outermost
3331 object.
3333 BITFIELD output parameter is set to 1 if bitfield is
3334 accessed and to 0 otherwise. If it is 1 then ELT holds
3335 outer component for accessed bit field.
3337 SAFE outer parameter is set to 1 if access is safe and
3338 checks are not required.
3340 BOUNDS outer parameter holds bounds to be used to check
3341 access (may be NULL).
3343 If INNERMOST_BOUNDS is 1 then try to narrow bounds to the
3344 innermost accessed component. */
3345 static void
3346 chkp_parse_array_and_component_ref (tree node, tree *ptr,
3347 tree *elt, bool *safe,
3348 bool *bitfield,
3349 tree *bounds,
3350 gimple_stmt_iterator *iter,
3351 bool innermost_bounds)
3353 tree comp_to_narrow = NULL_TREE;
3354 tree last_comp = NULL_TREE;
3355 bool array_ref_found = false;
3356 tree *nodes;
3357 tree var;
3358 int len;
3359 int i;
3361 /* Compute tree height for expression. */
3362 var = node;
3363 len = 1;
3364 while (TREE_CODE (var) == COMPONENT_REF
3365 || TREE_CODE (var) == ARRAY_REF
3366 || TREE_CODE (var) == VIEW_CONVERT_EXPR)
3368 var = TREE_OPERAND (var, 0);
3369 len++;
3372 gcc_assert (len > 1);
3374 /* It is more convenient for us to scan left-to-right,
3375 so walk tree again and put all node to nodes vector
3376 in reversed order. */
3377 nodes = XALLOCAVEC (tree, len);
3378 nodes[len - 1] = node;
3379 for (i = len - 2; i >= 0; i--)
3380 nodes[i] = TREE_OPERAND (nodes[i + 1], 0);
3382 if (bounds)
3383 *bounds = NULL;
3384 *safe = true;
3385 *bitfield = (TREE_CODE (node) == COMPONENT_REF
3386 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (node, 1)));
3387 /* To get bitfield address we will need outer elemnt. */
3388 if (*bitfield)
3389 *elt = nodes[len - 2];
3390 else
3391 *elt = NULL_TREE;
3393 /* If we have indirection in expression then compute
3394 outermost structure bounds. Computed bounds may be
3395 narrowed later. */
3396 if (TREE_CODE (nodes[0]) == MEM_REF || INDIRECT_REF_P (nodes[0]))
3398 *safe = false;
3399 *ptr = TREE_OPERAND (nodes[0], 0);
3400 if (bounds)
3401 *bounds = chkp_find_bounds (*ptr, iter);
3403 else
3405 gcc_assert (VAR_P (var)
3406 || TREE_CODE (var) == PARM_DECL
3407 || TREE_CODE (var) == RESULT_DECL
3408 || TREE_CODE (var) == STRING_CST
3409 || TREE_CODE (var) == SSA_NAME);
3411 *ptr = chkp_build_addr_expr (var);
3414 /* In this loop we are trying to find a field access
3415 requiring narrowing. There are two simple rules
3416 for search:
3417 1. Leftmost array_ref is chosen if any.
3418 2. Rightmost suitable component_ref is chosen if innermost
3419 bounds are required and no array_ref exists. */
3420 for (i = 1; i < len; i++)
3422 var = nodes[i];
3424 if (TREE_CODE (var) == ARRAY_REF)
3426 *safe = false;
3427 array_ref_found = true;
3428 if (flag_chkp_narrow_bounds
3429 && !flag_chkp_narrow_to_innermost_arrray
3430 && (!last_comp
3431 || chkp_may_narrow_to_field (TREE_OPERAND (last_comp, 1))))
3433 comp_to_narrow = last_comp;
3434 break;
3437 else if (TREE_CODE (var) == COMPONENT_REF)
3439 tree field = TREE_OPERAND (var, 1);
3441 if (innermost_bounds
3442 && !array_ref_found
3443 && chkp_narrow_bounds_for_field (field))
3444 comp_to_narrow = var;
3445 last_comp = var;
3447 if (flag_chkp_narrow_bounds
3448 && flag_chkp_narrow_to_innermost_arrray
3449 && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
3451 if (bounds)
3452 *bounds = chkp_narrow_bounds_to_field (*bounds, var, iter);
3453 comp_to_narrow = NULL;
3456 else if (TREE_CODE (var) == VIEW_CONVERT_EXPR)
3457 /* Nothing to do for it. */
3459 else
3460 gcc_unreachable ();
3463 if (comp_to_narrow && DECL_SIZE (TREE_OPERAND (comp_to_narrow, 1)) && bounds)
3464 *bounds = chkp_narrow_bounds_to_field (*bounds, comp_to_narrow, iter);
3466 if (innermost_bounds && bounds && !*bounds)
3467 *bounds = chkp_find_bounds (*ptr, iter);
3470 /* Compute and return bounds for address of OBJ. */
3471 static tree
3472 chkp_make_addressed_object_bounds (tree obj, gimple_stmt_iterator *iter)
3474 tree bounds = chkp_get_registered_addr_bounds (obj);
3476 if (bounds)
3477 return bounds;
3479 switch (TREE_CODE (obj))
3481 case VAR_DECL:
3482 case PARM_DECL:
3483 case RESULT_DECL:
3484 bounds = chkp_get_bounds_for_decl_addr (obj);
3485 break;
3487 case STRING_CST:
3488 bounds = chkp_get_bounds_for_string_cst (obj);
3489 break;
3491 case ARRAY_REF:
3492 case COMPONENT_REF:
3494 tree elt;
3495 tree ptr;
3496 bool safe;
3497 bool bitfield;
3499 chkp_parse_array_and_component_ref (obj, &ptr, &elt, &safe,
3500 &bitfield, &bounds, iter, true);
3502 gcc_assert (bounds);
3504 break;
3506 case FUNCTION_DECL:
3507 case LABEL_DECL:
3508 bounds = chkp_get_zero_bounds ();
3509 break;
3511 case MEM_REF:
3512 bounds = chkp_find_bounds (TREE_OPERAND (obj, 0), iter);
3513 break;
3515 case REALPART_EXPR:
3516 case IMAGPART_EXPR:
3517 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (obj, 0), iter);
3518 break;
3520 default:
3521 if (dump_file && (dump_flags & TDF_DETAILS))
3523 fprintf (dump_file, "chkp_make_addressed_object_bounds: "
3524 "unexpected object of type %s\n",
3525 get_tree_code_name (TREE_CODE (obj)));
3526 print_node (dump_file, "", obj, 0);
3528 internal_error ("chkp_make_addressed_object_bounds: "
3529 "Unexpected tree code %s",
3530 get_tree_code_name (TREE_CODE (obj)));
3533 chkp_register_addr_bounds (obj, bounds);
3535 return bounds;
3538 /* Compute bounds for pointer PTR loaded from PTR_SRC. Generate statements
3539 to compute bounds if required. Computed bounds should be available at
3540 position pointed by ITER.
3542 If PTR_SRC is NULL_TREE then pointer definition is identified.
3544 If PTR_SRC is not NULL_TREE then ITER points to statements which loads
3545 PTR. If PTR is a any memory reference then ITER points to a statement
3546 after which bndldx will be inserterd. In both cases ITER will be updated
3547 to point to the inserted bndldx statement. */
3549 static tree
3550 chkp_find_bounds_1 (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3552 tree addr = NULL_TREE;
3553 tree bounds = NULL_TREE;
3555 if (!ptr_src)
3556 ptr_src = ptr;
3558 bounds = chkp_get_registered_bounds (ptr_src);
3560 if (bounds)
3561 return bounds;
3563 switch (TREE_CODE (ptr_src))
3565 case MEM_REF:
3566 case VAR_DECL:
3567 if (BOUNDED_P (ptr_src))
3568 if (VAR_P (ptr) && DECL_REGISTER (ptr))
3569 bounds = chkp_get_zero_bounds ();
3570 else
3572 addr = chkp_build_addr_expr (ptr_src);
3573 bounds = chkp_build_bndldx (addr, ptr, iter);
3575 else
3576 bounds = chkp_get_nonpointer_load_bounds ();
3577 break;
3579 case ARRAY_REF:
3580 case COMPONENT_REF:
3581 addr = get_base_address (ptr_src);
3582 if (DECL_P (addr)
3583 || TREE_CODE (addr) == MEM_REF
3584 || TREE_CODE (addr) == TARGET_MEM_REF)
3586 if (BOUNDED_P (ptr_src))
3587 if (VAR_P (ptr) && DECL_REGISTER (ptr))
3588 bounds = chkp_get_zero_bounds ();
3589 else
3591 addr = chkp_build_addr_expr (ptr_src);
3592 bounds = chkp_build_bndldx (addr, ptr, iter);
3594 else
3595 bounds = chkp_get_nonpointer_load_bounds ();
3597 else
3599 gcc_assert (TREE_CODE (addr) == SSA_NAME);
3600 bounds = chkp_find_bounds (addr, iter);
3602 break;
3604 case PARM_DECL:
3605 gcc_unreachable ();
3606 bounds = chkp_get_bound_for_parm (ptr_src);
3607 break;
3609 case TARGET_MEM_REF:
3610 addr = chkp_build_addr_expr (ptr_src);
3611 bounds = chkp_build_bndldx (addr, ptr, iter);
3612 break;
3614 case SSA_NAME:
3615 bounds = chkp_get_registered_bounds (ptr_src);
3616 if (!bounds)
3618 gimple *def_stmt = SSA_NAME_DEF_STMT (ptr_src);
3619 gphi_iterator phi_iter;
3621 bounds = chkp_get_bounds_by_definition (ptr_src, def_stmt, &phi_iter);
3623 gcc_assert (bounds);
3625 if (gphi *def_phi = dyn_cast <gphi *> (def_stmt))
3627 unsigned i;
3629 for (i = 0; i < gimple_phi_num_args (def_phi); i++)
3631 tree arg = gimple_phi_arg_def (def_phi, i);
3632 tree arg_bnd;
3633 gphi *phi_bnd;
3635 arg_bnd = chkp_find_bounds (arg, NULL);
3637 /* chkp_get_bounds_by_definition created new phi
3638 statement and phi_iter points to it.
3640 Previous call to chkp_find_bounds could create
3641 new basic block and therefore change phi statement
3642 phi_iter points to. */
3643 phi_bnd = phi_iter.phi ();
3645 add_phi_arg (phi_bnd, arg_bnd,
3646 gimple_phi_arg_edge (def_phi, i),
3647 UNKNOWN_LOCATION);
3650 /* If all bound phi nodes have their arg computed
3651 then we may finish its computation. See
3652 chkp_finish_incomplete_bounds for more details. */
3653 if (chkp_may_finish_incomplete_bounds ())
3654 chkp_finish_incomplete_bounds ();
3657 gcc_assert (bounds == chkp_get_registered_bounds (ptr_src)
3658 || chkp_incomplete_bounds (bounds));
3660 break;
3662 case ADDR_EXPR:
3663 case WITH_SIZE_EXPR:
3664 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (ptr_src, 0), iter);
3665 break;
3667 case INTEGER_CST:
3668 if (integer_zerop (ptr_src))
3669 bounds = chkp_get_none_bounds ();
3670 else
3671 bounds = chkp_get_invalid_op_bounds ();
3672 break;
3674 default:
3675 if (dump_file && (dump_flags & TDF_DETAILS))
3677 fprintf (dump_file, "chkp_find_bounds: unexpected ptr of type %s\n",
3678 get_tree_code_name (TREE_CODE (ptr_src)));
3679 print_node (dump_file, "", ptr_src, 0);
3681 internal_error ("chkp_find_bounds: Unexpected tree code %s",
3682 get_tree_code_name (TREE_CODE (ptr_src)));
3685 if (!bounds)
3687 if (dump_file && (dump_flags & TDF_DETAILS))
3689 fprintf (stderr, "chkp_find_bounds: cannot find bounds for pointer\n");
3690 print_node (dump_file, "", ptr_src, 0);
3692 internal_error ("chkp_find_bounds: Cannot find bounds for pointer");
3695 return bounds;
3698 /* Normal case for bounds search without forced narrowing. */
3699 static tree
3700 chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter)
3702 return chkp_find_bounds_1 (ptr, NULL_TREE, iter);
3705 /* Search bounds for pointer PTR loaded from PTR_SRC
3706 by statement *ITER points to. */
3707 static tree
3708 chkp_find_bounds_loaded (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3710 return chkp_find_bounds_1 (ptr, ptr_src, iter);
3713 /* Helper function which checks type of RHS and finds all pointers in
3714 it. For each found pointer we build it's accesses in LHS and RHS
3715 objects and then call HANDLER for them. Function is used to copy
3716 or initilize bounds for copied object. */
3717 static void
3718 chkp_walk_pointer_assignments (tree lhs, tree rhs, void *arg,
3719 assign_handler handler)
3721 tree type = TREE_TYPE (lhs);
3723 /* We have nothing to do with clobbers. */
3724 if (TREE_CLOBBER_P (rhs))
3725 return;
3727 if (BOUNDED_TYPE_P (type))
3728 handler (lhs, rhs, arg);
3729 else if (RECORD_OR_UNION_TYPE_P (type))
3731 tree field;
3733 if (TREE_CODE (rhs) == CONSTRUCTOR)
3735 unsigned HOST_WIDE_INT cnt;
3736 tree val;
3738 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, field, val)
3740 if (chkp_type_has_pointer (TREE_TYPE (field)))
3742 tree lhs_field = chkp_build_component_ref (lhs, field);
3743 chkp_walk_pointer_assignments (lhs_field, val, arg, handler);
3747 else
3748 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3749 if (TREE_CODE (field) == FIELD_DECL
3750 && chkp_type_has_pointer (TREE_TYPE (field)))
3752 tree rhs_field = chkp_build_component_ref (rhs, field);
3753 tree lhs_field = chkp_build_component_ref (lhs, field);
3754 chkp_walk_pointer_assignments (lhs_field, rhs_field, arg, handler);
3757 else if (TREE_CODE (type) == ARRAY_TYPE)
3759 unsigned HOST_WIDE_INT cur = 0;
3760 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3761 tree etype = TREE_TYPE (type);
3762 tree esize = TYPE_SIZE (etype);
3764 if (TREE_CODE (rhs) == CONSTRUCTOR)
3766 unsigned HOST_WIDE_INT cnt;
3767 tree purp, val, lhs_elem;
3769 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, purp, val)
3771 if (purp && TREE_CODE (purp) == RANGE_EXPR)
3773 tree lo_index = TREE_OPERAND (purp, 0);
3774 tree hi_index = TREE_OPERAND (purp, 1);
3776 for (cur = (unsigned)tree_to_uhwi (lo_index);
3777 cur <= (unsigned)tree_to_uhwi (hi_index);
3778 cur++)
3780 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3781 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3784 else
3786 if (purp)
3788 gcc_assert (TREE_CODE (purp) == INTEGER_CST);
3789 cur = tree_to_uhwi (purp);
3792 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur++);
3794 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3798 /* Copy array only when size is known. */
3799 else if (maxval && !integer_minus_onep (maxval))
3800 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
3802 tree lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3803 tree rhs_elem = chkp_build_array_ref (rhs, etype, esize, cur);
3804 chkp_walk_pointer_assignments (lhs_elem, rhs_elem, arg, handler);
3807 else
3808 internal_error("chkp_walk_pointer_assignments: unexpected RHS type: %s",
3809 get_tree_code_name (TREE_CODE (type)));
3812 /* Add code to copy bounds for assignment of RHS to LHS.
3813 ARG is an iterator pointing ne code position. */
3814 static void
3815 chkp_copy_bounds_for_elem (tree lhs, tree rhs, void *arg)
3817 gimple_stmt_iterator *iter = (gimple_stmt_iterator *)arg;
3818 tree bounds = chkp_find_bounds (rhs, iter);
3819 tree addr = chkp_build_addr_expr(lhs);
3821 chkp_build_bndstx (addr, rhs, bounds, iter);
3824 /* Emit static bound initilizers and size vars. */
3825 void
3826 chkp_finish_file (void)
3828 struct varpool_node *node;
3829 struct chkp_ctor_stmt_list stmts;
3831 if (seen_error ())
3832 return;
3834 /* Iterate through varpool and generate bounds initialization
3835 constructors for all statically initialized pointers. */
3836 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3837 stmts.stmts = NULL;
3838 FOR_EACH_VARIABLE (node)
3839 /* Check that var is actually emitted and we need and may initialize
3840 its bounds. */
3841 if (node->need_bounds_init
3842 && !POINTER_BOUNDS_P (node->decl)
3843 && DECL_RTL (node->decl)
3844 && MEM_P (DECL_RTL (node->decl))
3845 && TREE_ASM_WRITTEN (node->decl))
3847 chkp_walk_pointer_assignments (node->decl,
3848 DECL_INITIAL (node->decl),
3849 &stmts,
3850 chkp_add_modification_to_stmt_list);
3852 if (stmts.avail <= 0)
3854 cgraph_build_static_cdtor ('P', stmts.stmts,
3855 MAX_RESERVED_INIT_PRIORITY + 3);
3856 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3857 stmts.stmts = NULL;
3861 if (stmts.stmts)
3862 cgraph_build_static_cdtor ('P', stmts.stmts,
3863 MAX_RESERVED_INIT_PRIORITY + 3);
3865 /* Iterate through varpool and generate bounds initialization
3866 constructors for all static bounds vars. */
3867 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3868 stmts.stmts = NULL;
3869 FOR_EACH_VARIABLE (node)
3870 if (node->need_bounds_init
3871 && POINTER_BOUNDS_P (node->decl)
3872 && TREE_ASM_WRITTEN (node->decl))
3874 tree bnd = node->decl;
3875 tree var;
3877 gcc_assert (DECL_INITIAL (bnd)
3878 && TREE_CODE (DECL_INITIAL (bnd)) == ADDR_EXPR);
3880 var = TREE_OPERAND (DECL_INITIAL (bnd), 0);
3881 chkp_output_static_bounds (bnd, var, &stmts);
3884 if (stmts.stmts)
3885 cgraph_build_static_cdtor ('B', stmts.stmts,
3886 MAX_RESERVED_INIT_PRIORITY + 2);
3888 delete chkp_static_var_bounds;
3889 delete chkp_bounds_map;
3892 /* An instrumentation function which is called for each statement
3893 having memory access we want to instrument. It inserts check
3894 code and bounds copy code.
3896 ITER points to statement to instrument.
3898 NODE holds memory access in statement to check.
3900 LOC holds the location information for statement.
3902 DIRFLAGS determines whether access is read or write.
3904 ACCESS_OFFS should be added to address used in NODE
3905 before check.
3907 ACCESS_SIZE holds size of checked access.
3909 SAFE indicates if NODE access is safe and should not be
3910 checked. */
3911 static void
3912 chkp_process_stmt (gimple_stmt_iterator *iter, tree node,
3913 location_t loc, tree dirflag,
3914 tree access_offs, tree access_size,
3915 bool safe)
3917 tree node_type = TREE_TYPE (node);
3918 tree size = access_size ? access_size : TYPE_SIZE_UNIT (node_type);
3919 tree addr_first = NULL_TREE; /* address of the first accessed byte */
3920 tree addr_last = NULL_TREE; /* address of the last accessed byte */
3921 tree ptr = NULL_TREE; /* a pointer used for dereference */
3922 tree bounds = NULL_TREE;
3924 /* We do not need instrumentation for clobbers. */
3925 if (dirflag == integer_one_node
3926 && gimple_code (gsi_stmt (*iter)) == GIMPLE_ASSIGN
3927 && TREE_CLOBBER_P (gimple_assign_rhs1 (gsi_stmt (*iter))))
3928 return;
3930 switch (TREE_CODE (node))
3932 case ARRAY_REF:
3933 case COMPONENT_REF:
3935 bool bitfield;
3936 tree elt;
3938 if (safe)
3940 /* We are not going to generate any checks, so do not
3941 generate bounds as well. */
3942 addr_first = chkp_build_addr_expr (node);
3943 break;
3946 chkp_parse_array_and_component_ref (node, &ptr, &elt, &safe,
3947 &bitfield, &bounds, iter, false);
3949 /* Break if there is no dereference and operation is safe. */
3951 if (bitfield)
3953 tree field = TREE_OPERAND (node, 1);
3955 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
3956 size = DECL_SIZE_UNIT (field);
3958 if (elt)
3959 elt = chkp_build_addr_expr (elt);
3960 addr_first = fold_convert_loc (loc, ptr_type_node, elt ? elt : ptr);
3961 addr_first = fold_build_pointer_plus_loc (loc,
3962 addr_first,
3963 byte_position (field));
3965 else
3966 addr_first = chkp_build_addr_expr (node);
3968 break;
3970 case INDIRECT_REF:
3971 ptr = TREE_OPERAND (node, 0);
3972 addr_first = ptr;
3973 break;
3975 case MEM_REF:
3976 ptr = TREE_OPERAND (node, 0);
3977 addr_first = chkp_build_addr_expr (node);
3978 break;
3980 case TARGET_MEM_REF:
3981 ptr = TMR_BASE (node);
3982 addr_first = chkp_build_addr_expr (node);
3983 break;
3985 case ARRAY_RANGE_REF:
3986 printf("ARRAY_RANGE_REF\n");
3987 debug_gimple_stmt(gsi_stmt(*iter));
3988 debug_tree(node);
3989 gcc_unreachable ();
3990 break;
3992 case BIT_FIELD_REF:
3994 tree offs, rem, bpu;
3996 gcc_assert (!access_offs);
3997 gcc_assert (!access_size);
3999 bpu = fold_convert (size_type_node, bitsize_int (BITS_PER_UNIT));
4000 offs = fold_convert (size_type_node, TREE_OPERAND (node, 2));
4001 rem = size_binop_loc (loc, TRUNC_MOD_EXPR, offs, bpu);
4002 offs = size_binop_loc (loc, TRUNC_DIV_EXPR, offs, bpu);
4004 size = fold_convert (size_type_node, TREE_OPERAND (node, 1));
4005 size = size_binop_loc (loc, PLUS_EXPR, size, rem);
4006 size = size_binop_loc (loc, CEIL_DIV_EXPR, size, bpu);
4007 size = fold_convert (size_type_node, size);
4009 chkp_process_stmt (iter, TREE_OPERAND (node, 0), loc,
4010 dirflag, offs, size, safe);
4011 return;
4013 break;
4015 case VAR_DECL:
4016 case RESULT_DECL:
4017 case PARM_DECL:
4018 if (dirflag != integer_one_node
4019 || DECL_REGISTER (node))
4020 return;
4022 safe = true;
4023 addr_first = chkp_build_addr_expr (node);
4024 break;
4026 default:
4027 return;
4030 /* If addr_last was not computed then use (addr_first + size - 1)
4031 expression to compute it. */
4032 if (!addr_last)
4034 addr_last = fold_build_pointer_plus_loc (loc, addr_first, size);
4035 addr_last = fold_build_pointer_plus_hwi_loc (loc, addr_last, -1);
4038 /* Shift both first_addr and last_addr by access_offs if specified. */
4039 if (access_offs)
4041 addr_first = fold_build_pointer_plus_loc (loc, addr_first, access_offs);
4042 addr_last = fold_build_pointer_plus_loc (loc, addr_last, access_offs);
4045 /* Generate bndcl/bndcu checks if memory access is not safe. */
4046 if (!safe)
4048 gimple_stmt_iterator stmt_iter = *iter;
4050 if (!bounds)
4051 bounds = chkp_find_bounds (ptr, iter);
4053 chkp_check_mem_access (addr_first, addr_last, bounds,
4054 stmt_iter, loc, dirflag);
4057 /* We need to store bounds in case pointer is stored. */
4058 if (dirflag == integer_one_node
4059 && chkp_type_has_pointer (node_type)
4060 && flag_chkp_store_bounds)
4062 gimple *stmt = gsi_stmt (*iter);
4063 tree rhs1 = gimple_assign_rhs1 (stmt);
4064 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4066 if (get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS)
4067 chkp_walk_pointer_assignments (node, rhs1, iter,
4068 chkp_copy_bounds_for_elem);
4069 else
4071 bounds = chkp_compute_bounds_for_assignment (NULL_TREE, stmt);
4072 chkp_build_bndstx (addr_first, rhs1, bounds, iter);
4077 /* Add code to copy bounds for all pointers copied
4078 in ASSIGN created during inline of EDGE. */
4079 void
4080 chkp_copy_bounds_for_assign (gimple *assign, struct cgraph_edge *edge)
4082 tree lhs = gimple_assign_lhs (assign);
4083 tree rhs = gimple_assign_rhs1 (assign);
4084 gimple_stmt_iterator iter = gsi_for_stmt (assign);
4086 if (!flag_chkp_store_bounds)
4087 return;
4089 chkp_walk_pointer_assignments (lhs, rhs, &iter, chkp_copy_bounds_for_elem);
4091 /* We should create edges for all created calls to bndldx and bndstx. */
4092 while (gsi_stmt (iter) != assign)
4094 gimple *stmt = gsi_stmt (iter);
4095 if (gimple_code (stmt) == GIMPLE_CALL)
4097 tree fndecl = gimple_call_fndecl (stmt);
4098 struct cgraph_node *callee = cgraph_node::get_create (fndecl);
4099 struct cgraph_edge *new_edge;
4101 gcc_assert (chkp_gimple_call_builtin_p (stmt, BUILT_IN_CHKP_BNDSTX)
4102 || chkp_gimple_call_builtin_p (stmt, BUILT_IN_CHKP_BNDLDX)
4103 || chkp_gimple_call_builtin_p (stmt, BUILT_IN_CHKP_BNDRET));
4105 new_edge = edge->caller->create_edge (callee,
4106 as_a <gcall *> (stmt),
4107 edge->count,
4108 edge->frequency);
4109 new_edge->frequency = compute_call_stmt_bb_frequency
4110 (edge->caller->decl, gimple_bb (stmt));
4112 gsi_prev (&iter);
4116 /* Some code transformation made during instrumentation pass
4117 may put code into inconsistent state. Here we find and fix
4118 such flaws. */
4119 void
4120 chkp_fix_cfg ()
4122 basic_block bb;
4123 gimple_stmt_iterator i;
4125 /* We could insert some code right after stmt which ends bb.
4126 We wanted to put this code on fallthru edge but did not
4127 add new edges from the beginning because it may cause new
4128 phi node creation which may be incorrect due to incomplete
4129 bound phi nodes. */
4130 FOR_ALL_BB_FN (bb, cfun)
4131 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
4133 gimple *stmt = gsi_stmt (i);
4134 gimple_stmt_iterator next = i;
4136 gsi_next (&next);
4138 if (stmt_ends_bb_p (stmt)
4139 && !gsi_end_p (next))
4141 edge fall = find_fallthru_edge (bb->succs);
4142 basic_block dest = NULL;
4143 int flags = 0;
4145 gcc_assert (fall);
4147 /* We cannot split abnormal edge. Therefore we
4148 store its params, make it regular and then
4149 rebuild abnormal edge after split. */
4150 if (fall->flags & EDGE_ABNORMAL)
4152 flags = fall->flags & ~EDGE_FALLTHRU;
4153 dest = fall->dest;
4155 fall->flags &= ~EDGE_COMPLEX;
4158 while (!gsi_end_p (next))
4160 gimple *next_stmt = gsi_stmt (next);
4161 gsi_remove (&next, false);
4162 gsi_insert_on_edge (fall, next_stmt);
4165 gsi_commit_edge_inserts ();
4167 /* Re-create abnormal edge. */
4168 if (dest)
4169 make_edge (bb, dest, flags);
4174 /* Walker callback for chkp_replace_function_pointers. Replaces
4175 function pointer in the specified operand with pointer to the
4176 instrumented function version. */
4177 static tree
4178 chkp_replace_function_pointer (tree *op, int *walk_subtrees,
4179 void *data ATTRIBUTE_UNUSED)
4181 if (TREE_CODE (*op) == FUNCTION_DECL
4182 && chkp_instrumentable_p (*op)
4183 && (DECL_BUILT_IN_CLASS (*op) == NOT_BUILT_IN
4184 /* For builtins we replace pointers only for selected
4185 function and functions having definitions. */
4186 || (DECL_BUILT_IN_CLASS (*op) == BUILT_IN_NORMAL
4187 && (chkp_instrument_normal_builtin (*op)
4188 || gimple_has_body_p (*op)))))
4190 struct cgraph_node *node = cgraph_node::get_create (*op);
4191 struct cgraph_node *clone = NULL;
4193 if (!node->instrumentation_clone)
4194 clone = chkp_maybe_create_clone (*op);
4196 if (clone)
4197 *op = clone->decl;
4198 *walk_subtrees = 0;
4201 return NULL;
4204 /* This function searches for function pointers in statement
4205 pointed by GSI and replaces them with pointers to instrumented
4206 function versions. */
4207 static void
4208 chkp_replace_function_pointers (gimple_stmt_iterator *gsi)
4210 gimple *stmt = gsi_stmt (*gsi);
4211 /* For calls we want to walk call args only. */
4212 if (gimple_code (stmt) == GIMPLE_CALL)
4214 unsigned i;
4215 for (i = 0; i < gimple_call_num_args (stmt); i++)
4216 walk_tree (gimple_call_arg_ptr (stmt, i),
4217 chkp_replace_function_pointer, NULL, NULL);
4219 else
4220 walk_gimple_stmt (gsi, NULL, chkp_replace_function_pointer, NULL);
4223 /* This function instruments all statements working with memory,
4224 calls and rets.
4226 It also removes excess statements from static initializers. */
4227 static void
4228 chkp_instrument_function (void)
4230 basic_block bb, next;
4231 gimple_stmt_iterator i;
4232 enum gimple_rhs_class grhs_class;
4233 bool safe = lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl));
4235 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
4238 next = bb->next_bb;
4239 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
4241 gimple *s = gsi_stmt (i);
4243 /* Skip statement marked to not be instrumented. */
4244 if (chkp_marked_stmt_p (s))
4246 gsi_next (&i);
4247 continue;
4250 chkp_replace_function_pointers (&i);
4252 switch (gimple_code (s))
4254 case GIMPLE_ASSIGN:
4255 chkp_process_stmt (&i, gimple_assign_lhs (s),
4256 gimple_location (s), integer_one_node,
4257 NULL_TREE, NULL_TREE, safe);
4258 chkp_process_stmt (&i, gimple_assign_rhs1 (s),
4259 gimple_location (s), integer_zero_node,
4260 NULL_TREE, NULL_TREE, safe);
4261 grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s));
4262 if (grhs_class == GIMPLE_BINARY_RHS)
4263 chkp_process_stmt (&i, gimple_assign_rhs2 (s),
4264 gimple_location (s), integer_zero_node,
4265 NULL_TREE, NULL_TREE, safe);
4266 break;
4268 case GIMPLE_RETURN:
4270 greturn *r = as_a <greturn *> (s);
4271 if (gimple_return_retval (r) != NULL_TREE)
4273 chkp_process_stmt (&i, gimple_return_retval (r),
4274 gimple_location (r),
4275 integer_zero_node,
4276 NULL_TREE, NULL_TREE, safe);
4278 /* Additionally we need to add bounds
4279 to return statement. */
4280 chkp_add_bounds_to_ret_stmt (&i);
4283 break;
4285 case GIMPLE_CALL:
4286 chkp_add_bounds_to_call_stmt (&i);
4287 break;
4289 default:
4293 gsi_next (&i);
4295 /* We do not need any actual pointer stores in checker
4296 static initializer. */
4297 if (lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl))
4298 && gimple_code (s) == GIMPLE_ASSIGN
4299 && gimple_store_p (s))
4301 gimple_stmt_iterator del_iter = gsi_for_stmt (s);
4302 gsi_remove (&del_iter, true);
4303 unlink_stmt_vdef (s);
4304 release_defs(s);
4307 bb = next;
4309 while (bb);
4311 /* Some input params may have bounds and be address taken. In this case
4312 we should store incoming bounds into bounds table. */
4313 tree arg;
4314 if (flag_chkp_store_bounds)
4315 for (arg = DECL_ARGUMENTS (cfun->decl); arg; arg = DECL_CHAIN (arg))
4316 if (TREE_ADDRESSABLE (arg))
4318 if (BOUNDED_P (arg))
4320 tree bounds = chkp_get_next_bounds_parm (arg);
4321 tree def_ptr = ssa_default_def (cfun, arg);
4322 gimple_stmt_iterator iter
4323 = gsi_start_bb (chkp_get_entry_block ());
4324 chkp_build_bndstx (chkp_build_addr_expr (arg),
4325 def_ptr ? def_ptr : arg,
4326 bounds, &iter);
4328 /* Skip bounds arg. */
4329 arg = TREE_CHAIN (arg);
4331 else if (chkp_type_has_pointer (TREE_TYPE (arg)))
4333 tree orig_arg = arg;
4334 bitmap slots = BITMAP_ALLOC (NULL);
4335 gimple_stmt_iterator iter
4336 = gsi_start_bb (chkp_get_entry_block ());
4337 bitmap_iterator bi;
4338 unsigned bnd_no;
4340 chkp_find_bound_slots (TREE_TYPE (arg), slots);
4342 EXECUTE_IF_SET_IN_BITMAP (slots, 0, bnd_no, bi)
4344 tree bounds = chkp_get_next_bounds_parm (arg);
4345 HOST_WIDE_INT offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
4346 tree addr = chkp_build_addr_expr (orig_arg);
4347 tree ptr = build2 (MEM_REF, ptr_type_node, addr,
4348 build_int_cst (ptr_type_node, offs));
4349 chkp_build_bndstx (chkp_build_addr_expr (ptr), ptr,
4350 bounds, &iter);
4352 arg = DECL_CHAIN (arg);
4354 BITMAP_FREE (slots);
4359 /* Find init/null/copy_ptr_bounds calls and replace them
4360 with assignments. It should allow better code
4361 optimization. */
4363 static void
4364 chkp_remove_useless_builtins ()
4366 basic_block bb;
4367 gimple_stmt_iterator gsi;
4369 FOR_EACH_BB_FN (bb, cfun)
4371 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4373 gimple *stmt = gsi_stmt (gsi);
4374 tree fndecl;
4375 enum built_in_function fcode;
4377 /* Find builtins returning first arg and replace
4378 them with assignments. */
4379 if (gimple_code (stmt) == GIMPLE_CALL
4380 && (fndecl = gimple_call_fndecl (stmt))
4381 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4382 && (fcode = DECL_FUNCTION_CODE (fndecl))
4383 && (fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
4384 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
4385 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS
4386 || fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS))
4388 tree res = gimple_call_arg (stmt, 0);
4389 update_call_from_tree (&gsi, res);
4390 stmt = gsi_stmt (gsi);
4391 update_stmt (stmt);
4397 /* Initialize pass. */
4398 static void
4399 chkp_init (void)
4401 basic_block bb;
4402 gimple_stmt_iterator i;
4404 in_chkp_pass = true;
4406 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = bb->next_bb)
4407 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
4408 chkp_unmark_stmt (gsi_stmt (i));
4410 chkp_invalid_bounds = new hash_set<tree>;
4411 chkp_completed_bounds_set = new hash_set<tree>;
4412 delete chkp_reg_bounds;
4413 chkp_reg_bounds = new hash_map<tree, tree>;
4414 delete chkp_bound_vars;
4415 chkp_bound_vars = new hash_map<tree, tree>;
4416 chkp_reg_addr_bounds = new hash_map<tree, tree>;
4417 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
4418 delete chkp_bounds_map;
4419 chkp_bounds_map = new hash_map<tree, tree>;
4420 chkp_abnormal_copies = BITMAP_GGC_ALLOC ();
4422 entry_block = NULL;
4423 zero_bounds = NULL_TREE;
4424 none_bounds = NULL_TREE;
4425 incomplete_bounds = integer_zero_node;
4426 tmp_var = NULL_TREE;
4427 size_tmp_var = NULL_TREE;
4429 chkp_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode, true);
4431 /* We create these constant bounds once for each object file.
4432 These symbols go to comdat section and result in single copy
4433 of each one in the final binary. */
4434 chkp_get_zero_bounds_var ();
4435 chkp_get_none_bounds_var ();
4437 calculate_dominance_info (CDI_DOMINATORS);
4438 calculate_dominance_info (CDI_POST_DOMINATORS);
4440 bitmap_obstack_initialize (NULL);
4443 /* Finalize instrumentation pass. */
4444 static void
4445 chkp_fini (void)
4447 in_chkp_pass = false;
4449 delete chkp_invalid_bounds;
4450 delete chkp_completed_bounds_set;
4451 delete chkp_reg_addr_bounds;
4452 delete chkp_incomplete_bounds_map;
4454 free_dominance_info (CDI_DOMINATORS);
4455 free_dominance_info (CDI_POST_DOMINATORS);
4457 bitmap_obstack_release (NULL);
4459 entry_block = NULL;
4460 zero_bounds = NULL_TREE;
4461 none_bounds = NULL_TREE;
4464 /* Main instrumentation pass function. */
4465 static unsigned int
4466 chkp_execute (void)
4468 chkp_init ();
4470 chkp_instrument_function ();
4472 chkp_remove_useless_builtins ();
4474 chkp_function_mark_instrumented (cfun->decl);
4476 chkp_fix_cfg ();
4478 chkp_fini ();
4480 return 0;
4483 /* Instrumentation pass gate. */
4484 static bool
4485 chkp_gate (void)
4487 cgraph_node *node = cgraph_node::get (cfun->decl);
4488 return ((node != NULL
4489 && node->instrumentation_clone)
4490 || lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl)));
4493 namespace {
4495 const pass_data pass_data_chkp =
4497 GIMPLE_PASS, /* type */
4498 "chkp", /* name */
4499 OPTGROUP_NONE, /* optinfo_flags */
4500 TV_NONE, /* tv_id */
4501 PROP_ssa | PROP_cfg, /* properties_required */
4502 0, /* properties_provided */
4503 0, /* properties_destroyed */
4504 0, /* todo_flags_start */
4505 TODO_verify_il
4506 | TODO_update_ssa /* todo_flags_finish */
4509 class pass_chkp : public gimple_opt_pass
4511 public:
4512 pass_chkp (gcc::context *ctxt)
4513 : gimple_opt_pass (pass_data_chkp, ctxt)
4516 /* opt_pass methods: */
4517 virtual opt_pass * clone ()
4519 return new pass_chkp (m_ctxt);
4522 virtual bool gate (function *)
4524 return chkp_gate ();
4527 virtual unsigned int execute (function *)
4529 return chkp_execute ();
4532 }; // class pass_chkp
4534 } // anon namespace
4536 gimple_opt_pass *
4537 make_pass_chkp (gcc::context *ctxt)
4539 return new pass_chkp (ctxt);
4542 #include "gt-tree-chkp.h"