Fix ifunc and resolver (PR ipa/81213).
[official-gcc.git] / gcc / tree-chkp.c
blob12af458fb90d79a0df94125075c1a3db2b98e5f2
1 /* Pointer Bounds Checker insrumentation pass.
2 Copyright (C) 2014-2017 Free Software Foundation, Inc.
3 Contributed by Ilya Enkovich (ilya.enkovich@intel.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "diagnostic.h"
34 #include "fold-const.h"
35 #include "stor-layout.h"
36 #include "varasm.h"
37 #include "tree-iterator.h"
38 #include "tree-cfg.h"
39 #include "langhooks.h"
40 #include "tree-ssa-address.h"
41 #include "tree-ssa-loop-niter.h"
42 #include "gimple-pretty-print.h"
43 #include "gimple-iterator.h"
44 #include "gimplify.h"
45 #include "gimplify-me.h"
46 #include "print-tree.h"
47 #include "calls.h"
48 #include "expr.h"
49 #include "tree-ssa-propagate.h"
50 #include "tree-chkp.h"
51 #include "gimple-walk.h"
52 #include "tree-dfa.h"
53 #include "ipa-chkp.h"
54 #include "params.h"
55 #include "stringpool.h"
56 #include "attribs.h"
58 /* Pointer Bounds Checker instruments code with memory checks to find
59 out-of-bounds memory accesses. Checks are performed by computing
60 bounds for each pointer and then comparing address of accessed
61 memory before pointer dereferencing.
63 1. Function clones.
65 See ipa-chkp.c.
67 2. Instrumentation.
69 There are few things to instrument:
71 a) Memory accesses - add checker calls to check address of accessed memory
72 against bounds of dereferenced pointer. Obviously safe memory
73 accesses like static variable access does not have to be instrumented
74 with checks.
76 Example:
78 val_2 = *p_1;
80 with 4 bytes access is transformed into:
82 __builtin___chkp_bndcl (__bound_tmp.1_3, p_1);
83 D.1_4 = p_1 + 3;
84 __builtin___chkp_bndcu (__bound_tmp.1_3, D.1_4);
85 val_2 = *p_1;
87 where __bound_tmp.1_3 are bounds computed for pointer p_1,
88 __builtin___chkp_bndcl is a lower bound check and
89 __builtin___chkp_bndcu is an upper bound check.
91 b) Pointer stores.
93 When pointer is stored in memory we need to store its bounds. To
94 achieve compatibility of instrumented code with regular codes
95 we have to keep data layout and store bounds in special bound tables
96 via special checker call. Implementation of bounds table may vary for
97 different platforms. It has to associate pointer value and its
98 location (it is required because we may have two equal pointers
99 with different bounds stored in different places) with bounds.
100 Another checker builtin allows to get bounds for specified pointer
101 loaded from specified location.
103 Example:
105 buf1[i_1] = &buf2;
107 is transformed into:
109 buf1[i_1] = &buf2;
110 D.1_2 = &buf1[i_1];
111 __builtin___chkp_bndstx (D.1_2, &buf2, __bound_tmp.1_2);
113 where __bound_tmp.1_2 are bounds of &buf2.
115 c) Static initialization.
117 The special case of pointer store is static pointer initialization.
118 Bounds initialization is performed in a few steps:
119 - register all static initializations in front-end using
120 chkp_register_var_initializer
121 - when file compilation finishes we create functions with special
122 attribute 'chkp ctor' and put explicit initialization code
123 (assignments) for all statically initialized pointers.
124 - when checker constructor is compiled checker pass adds required
125 bounds initialization for all statically initialized pointers
126 - since we do not actually need excess pointers initialization
127 in checker constructor we remove such assignments from them
129 d) Calls.
131 For each call in the code we add additional arguments to pass
132 bounds for pointer arguments. We determine type of call arguments
133 using arguments list from function declaration; if function
134 declaration is not available we use function type; otherwise
135 (e.g. for unnamed arguments) we use type of passed value. Function
136 declaration/type is replaced with the instrumented one.
138 Example:
140 val_1 = foo (&buf1, &buf2, &buf1, 0);
142 is translated into:
144 val_1 = foo.chkp (&buf1, __bound_tmp.1_2, &buf2, __bound_tmp.1_3,
145 &buf1, __bound_tmp.1_2, 0);
147 e) Returns.
149 If function returns a pointer value we have to return bounds also.
150 A new operand was added for return statement to hold returned bounds.
152 Example:
154 return &_buf1;
156 is transformed into
158 return &_buf1, __bound_tmp.1_1;
160 3. Bounds computation.
162 Compiler is fully responsible for computing bounds to be used for each
163 memory access. The first step for bounds computation is to find the
164 origin of pointer dereferenced for memory access. Basing on pointer
165 origin we define a way to compute its bounds. There are just few
166 possible cases:
168 a) Pointer is returned by call.
170 In this case we use corresponding checker builtin method to obtain returned
171 bounds.
173 Example:
175 buf_1 = malloc (size_2);
176 foo (buf_1);
178 is translated into:
180 buf_1 = malloc (size_2);
181 __bound_tmp.1_3 = __builtin___chkp_bndret (buf_1);
182 foo (buf_1, __bound_tmp.1_3);
184 b) Pointer is an address of an object.
186 In this case compiler tries to compute objects size and create corresponding
187 bounds. If object has incomplete type then special checker builtin is used to
188 obtain its size at runtime.
190 Example:
192 foo ()
194 <unnamed type> __bound_tmp.3;
195 static int buf[100];
197 <bb 3>:
198 __bound_tmp.3_2 = __builtin___chkp_bndmk (&buf, 400);
200 <bb 2>:
201 return &buf, __bound_tmp.3_2;
204 Example:
206 Address of an object 'extern int buf[]' with incomplete type is
207 returned.
209 foo ()
211 <unnamed type> __bound_tmp.4;
212 long unsigned int __size_tmp.3;
214 <bb 3>:
215 __size_tmp.3_4 = __builtin_ia32_sizeof (buf);
216 __bound_tmp.4_3 = __builtin_ia32_bndmk (&buf, __size_tmp.3_4);
218 <bb 2>:
219 return &buf, __bound_tmp.4_3;
222 c) Pointer is the result of object narrowing.
224 It happens when we use pointer to an object to compute pointer to a part
225 of an object. E.g. we take pointer to a field of a structure. In this
226 case we perform bounds intersection using bounds of original object and
227 bounds of object's part (which are computed basing on its type).
229 There may be some debatable questions about when narrowing should occur
230 and when it should not. To avoid false bound violations in correct
231 programs we do not perform narrowing when address of an array element is
232 obtained (it has address of the whole array) and when address of the first
233 structure field is obtained (because it is guaranteed to be equal to
234 address of the whole structure and it is legal to cast it back to structure).
236 Default narrowing behavior may be changed using compiler flags.
238 Example:
240 In this example address of the second structure field is returned.
242 foo (struct A * p, __bounds_type __bounds_of_p)
244 <unnamed type> __bound_tmp.3;
245 int * _2;
246 int * _5;
248 <bb 2>:
249 _5 = &p_1(D)->second_field;
250 __bound_tmp.3_6 = __builtin___chkp_bndmk (_5, 4);
251 __bound_tmp.3_8 = __builtin___chkp_intersect (__bound_tmp.3_6,
252 __bounds_of_p_3(D));
253 _2 = &p_1(D)->second_field;
254 return _2, __bound_tmp.3_8;
257 Example:
259 In this example address of the first field of array element is returned.
261 foo (struct A * p, __bounds_type __bounds_of_p, int i)
263 long unsigned int _3;
264 long unsigned int _4;
265 struct A * _6;
266 int * _7;
268 <bb 2>:
269 _3 = (long unsigned int) i_1(D);
270 _4 = _3 * 8;
271 _6 = p_5(D) + _4;
272 _7 = &_6->first_field;
273 return _7, __bounds_of_p_2(D);
277 d) Pointer is the result of pointer arithmetic or type cast.
279 In this case bounds of the base pointer are used. In case of binary
280 operation producing a pointer we are analyzing data flow further
281 looking for operand's bounds. One operand is considered as a base
282 if it has some valid bounds. If we fall into a case when none of
283 operands (or both of them) has valid bounds, a default bounds value
284 is used.
286 Trying to find out bounds for binary operations we may fall into
287 cyclic dependencies for pointers. To avoid infinite recursion all
288 walked phi nodes instantly obtain corresponding bounds but created
289 bounds are marked as incomplete. It helps us to stop DF walk during
290 bounds search.
292 When we reach pointer source, some args of incomplete bounds phi obtain
293 valid bounds and those values are propagated further through phi nodes.
294 If no valid bounds were found for phi node then we mark its result as
295 invalid bounds. Process stops when all incomplete bounds become either
296 valid or invalid and we are able to choose a pointer base.
298 e) Pointer is loaded from the memory.
300 In this case we just need to load bounds from the bounds table.
302 Example:
304 foo ()
306 <unnamed type> __bound_tmp.3;
307 static int * buf;
308 int * _2;
310 <bb 2>:
311 _2 = buf;
312 __bound_tmp.3_4 = __builtin___chkp_bndldx (&buf, _2);
313 return _2, __bound_tmp.3_4;
318 typedef void (*assign_handler)(tree, tree, void *);
320 static tree chkp_get_zero_bounds ();
321 static tree chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter);
322 static tree chkp_find_bounds_loaded (tree ptr, tree ptr_src,
323 gimple_stmt_iterator *iter);
324 static void chkp_parse_array_and_component_ref (tree node, tree *ptr,
325 tree *elt, bool *safe,
326 bool *bitfield,
327 tree *bounds,
328 gimple_stmt_iterator *iter,
329 bool innermost_bounds);
330 static void chkp_parse_bit_field_ref (tree node, location_t loc,
331 tree *offset, tree *size);
332 static tree
333 chkp_make_addressed_object_bounds (tree obj, gimple_stmt_iterator *iter);
335 #define chkp_bndldx_fndecl \
336 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDLDX))
337 #define chkp_bndstx_fndecl \
338 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDSTX))
339 #define chkp_checkl_fndecl \
340 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCL))
341 #define chkp_checku_fndecl \
342 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCU))
343 #define chkp_bndmk_fndecl \
344 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDMK))
345 #define chkp_ret_bnd_fndecl \
346 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDRET))
347 #define chkp_intersect_fndecl \
348 (targetm.builtin_chkp_function (BUILT_IN_CHKP_INTERSECT))
349 #define chkp_narrow_bounds_fndecl \
350 (targetm.builtin_chkp_function (BUILT_IN_CHKP_NARROW))
351 #define chkp_sizeof_fndecl \
352 (targetm.builtin_chkp_function (BUILT_IN_CHKP_SIZEOF))
353 #define chkp_extract_lower_fndecl \
354 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_LOWER))
355 #define chkp_extract_upper_fndecl \
356 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_UPPER))
358 static GTY (()) tree chkp_uintptr_type;
360 static GTY (()) tree chkp_zero_bounds_var;
361 static GTY (()) tree chkp_none_bounds_var;
363 static GTY (()) basic_block entry_block;
364 static GTY (()) tree zero_bounds;
365 static GTY (()) tree none_bounds;
366 static GTY (()) tree incomplete_bounds;
367 static GTY (()) tree tmp_var;
368 static GTY (()) tree size_tmp_var;
369 static GTY (()) bitmap chkp_abnormal_copies;
371 struct hash_set<tree> *chkp_invalid_bounds;
372 struct hash_set<tree> *chkp_completed_bounds_set;
373 struct hash_map<tree, tree> *chkp_reg_bounds;
374 struct hash_map<tree, tree> *chkp_bound_vars;
375 struct hash_map<tree, tree> *chkp_reg_addr_bounds;
376 struct hash_map<tree, tree> *chkp_incomplete_bounds_map;
377 struct hash_map<tree, tree> *chkp_bounds_map;
378 struct hash_map<tree, tree> *chkp_static_var_bounds;
380 static bool in_chkp_pass;
382 #define CHKP_BOUND_TMP_NAME "__bound_tmp"
383 #define CHKP_SIZE_TMP_NAME "__size_tmp"
384 #define CHKP_BOUNDS_OF_SYMBOL_PREFIX "__chkp_bounds_of_"
385 #define CHKP_STRING_BOUNDS_PREFIX "__chkp_string_bounds_"
386 #define CHKP_VAR_BOUNDS_PREFIX "__chkp_var_bounds_"
387 #define CHKP_ZERO_BOUNDS_VAR_NAME "__chkp_zero_bounds"
388 #define CHKP_NONE_BOUNDS_VAR_NAME "__chkp_none_bounds"
390 /* Static checker constructors may become very large and their
391 compilation with optimization may take too much time.
392 Therefore we put a limit to number of statements in one
393 constructor. Tests with 100 000 statically initialized
394 pointers showed following compilation times on Sandy Bridge
395 server (used -O2):
396 limit 100 => ~18 sec.
397 limit 300 => ~22 sec.
398 limit 1000 => ~30 sec.
399 limit 3000 => ~49 sec.
400 limit 5000 => ~55 sec.
401 limit 10000 => ~76 sec.
402 limit 100000 => ~532 sec. */
403 #define MAX_STMTS_IN_STATIC_CHKP_CTOR (PARAM_VALUE (PARAM_CHKP_MAX_CTOR_SIZE))
405 struct chkp_ctor_stmt_list
407 tree stmts;
408 int avail;
411 /* Return 1 if function FNDECL is instrumented by Pointer
412 Bounds Checker. */
413 bool
414 chkp_function_instrumented_p (tree fndecl)
416 return fndecl
417 && lookup_attribute ("chkp instrumented", DECL_ATTRIBUTES (fndecl));
420 /* Mark function FNDECL as instrumented. */
421 void
422 chkp_function_mark_instrumented (tree fndecl)
424 if (chkp_function_instrumented_p (fndecl))
425 return;
427 DECL_ATTRIBUTES (fndecl)
428 = tree_cons (get_identifier ("chkp instrumented"), NULL,
429 DECL_ATTRIBUTES (fndecl));
432 /* Return true when STMT is builtin call to instrumentation function
433 corresponding to CODE. */
435 bool
436 chkp_gimple_call_builtin_p (gimple *call,
437 enum built_in_function code)
439 tree fndecl;
440 /* We are skipping the check for address-spaces, that's
441 why we don't use gimple_call_builtin_p directly here. */
442 if (is_gimple_call (call)
443 && (fndecl = gimple_call_fndecl (call)) != NULL
444 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD
445 && (fndecl = targetm.builtin_chkp_function (code))
446 && (DECL_FUNCTION_CODE (gimple_call_fndecl (call))
447 == DECL_FUNCTION_CODE (fndecl)))
448 return true;
449 return false;
452 /* Emit code to build zero bounds and return RTL holding
453 the result. */
455 chkp_expand_zero_bounds ()
457 tree zero_bnd;
459 if (flag_chkp_use_static_const_bounds)
460 zero_bnd = chkp_get_zero_bounds_var ();
461 else
462 zero_bnd = chkp_build_make_bounds_call (integer_zero_node,
463 integer_zero_node);
464 return expand_normal (zero_bnd);
467 /* Emit code to store zero bounds for PTR located at MEM. */
468 void
469 chkp_expand_bounds_reset_for_mem (tree mem, tree ptr)
471 tree zero_bnd, bnd, addr, bndstx;
473 if (flag_chkp_use_static_const_bounds)
474 zero_bnd = chkp_get_zero_bounds_var ();
475 else
476 zero_bnd = chkp_build_make_bounds_call (integer_zero_node,
477 integer_zero_node);
478 bnd = make_tree (pointer_bounds_type_node,
479 assign_temp (pointer_bounds_type_node, 0, 1));
480 addr = build1 (ADDR_EXPR,
481 build_pointer_type (TREE_TYPE (mem)), mem);
482 bndstx = chkp_build_bndstx_call (addr, ptr, bnd);
484 expand_assignment (bnd, zero_bnd, false);
485 expand_normal (bndstx);
488 /* Build retbnd call for returned value RETVAL.
490 If BNDVAL is not NULL then result is stored
491 in it. Otherwise a temporary is created to
492 hold returned value.
494 GSI points to a position for a retbnd call
495 and is set to created stmt.
497 Cgraph edge is created for a new call if
498 UPDATE_EDGE is 1.
500 Obtained bounds are returned. */
501 tree
502 chkp_insert_retbnd_call (tree bndval, tree retval,
503 gimple_stmt_iterator *gsi)
505 gimple *call;
507 if (!bndval)
508 bndval = create_tmp_reg (pointer_bounds_type_node, "retbnd");
510 call = gimple_build_call (chkp_ret_bnd_fndecl, 1, retval);
511 gimple_call_set_lhs (call, bndval);
512 gsi_insert_after (gsi, call, GSI_CONTINUE_LINKING);
514 return bndval;
517 /* Build a GIMPLE_CALL identical to CALL but skipping bounds
518 arguments. */
520 gcall *
521 chkp_copy_call_skip_bounds (gcall *call)
523 bitmap bounds;
524 unsigned i;
526 bitmap_obstack_initialize (NULL);
527 bounds = BITMAP_ALLOC (NULL);
529 for (i = 0; i < gimple_call_num_args (call); i++)
530 if (POINTER_BOUNDS_P (gimple_call_arg (call, i)))
531 bitmap_set_bit (bounds, i);
533 if (!bitmap_empty_p (bounds))
534 call = gimple_call_copy_skip_args (call, bounds);
535 gimple_call_set_with_bounds (call, false);
537 BITMAP_FREE (bounds);
538 bitmap_obstack_release (NULL);
540 return call;
543 /* Redirect edge E to the correct node according to call_stmt.
544 Return 1 if bounds removal from call_stmt should be done
545 instead of redirection. */
547 bool
548 chkp_redirect_edge (cgraph_edge *e)
550 bool instrumented = false;
551 tree decl = e->callee->decl;
553 if (e->callee->instrumentation_clone
554 || chkp_function_instrumented_p (decl))
555 instrumented = true;
557 if (instrumented
558 && !gimple_call_with_bounds_p (e->call_stmt))
559 e->redirect_callee (cgraph_node::get_create (e->callee->orig_decl));
560 else if (!instrumented
561 && gimple_call_with_bounds_p (e->call_stmt)
562 && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDCL)
563 && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDCU)
564 && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDSTX))
566 if (e->callee->instrumented_version)
567 e->redirect_callee (e->callee->instrumented_version);
568 else
570 tree args = TYPE_ARG_TYPES (TREE_TYPE (decl));
571 /* Avoid bounds removal if all args will be removed. */
572 if (!args || TREE_VALUE (args) != void_type_node)
573 return true;
574 else
575 gimple_call_set_with_bounds (e->call_stmt, false);
579 return false;
582 /* Mark statement S to not be instrumented. */
583 static void
584 chkp_mark_stmt (gimple *s)
586 gimple_set_plf (s, GF_PLF_1, true);
589 /* Mark statement S to be instrumented. */
590 static void
591 chkp_unmark_stmt (gimple *s)
593 gimple_set_plf (s, GF_PLF_1, false);
596 /* Return 1 if statement S should not be instrumented. */
597 static bool
598 chkp_marked_stmt_p (gimple *s)
600 return gimple_plf (s, GF_PLF_1);
603 /* Get var to be used for bound temps. */
604 static tree
605 chkp_get_tmp_var (void)
607 if (!tmp_var)
608 tmp_var = create_tmp_reg (pointer_bounds_type_node, CHKP_BOUND_TMP_NAME);
610 return tmp_var;
613 /* Get SSA_NAME to be used as temp. */
614 static tree
615 chkp_get_tmp_reg (gimple *stmt)
617 if (in_chkp_pass)
618 return make_ssa_name (chkp_get_tmp_var (), stmt);
620 return make_temp_ssa_name (pointer_bounds_type_node, stmt,
621 CHKP_BOUND_TMP_NAME);
624 /* Get var to be used for size temps. */
625 static tree
626 chkp_get_size_tmp_var (void)
628 if (!size_tmp_var)
629 size_tmp_var = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
631 return size_tmp_var;
634 /* Register bounds BND for address of OBJ. */
635 static void
636 chkp_register_addr_bounds (tree obj, tree bnd)
638 if (bnd == incomplete_bounds)
639 return;
641 chkp_reg_addr_bounds->put (obj, bnd);
643 if (dump_file && (dump_flags & TDF_DETAILS))
645 fprintf (dump_file, "Regsitered bound ");
646 print_generic_expr (dump_file, bnd);
647 fprintf (dump_file, " for address of ");
648 print_generic_expr (dump_file, obj);
649 fprintf (dump_file, "\n");
653 /* Return bounds registered for address of OBJ. */
654 static tree
655 chkp_get_registered_addr_bounds (tree obj)
657 tree *slot = chkp_reg_addr_bounds->get (obj);
658 return slot ? *slot : NULL_TREE;
661 /* Mark BOUNDS as completed. */
662 static void
663 chkp_mark_completed_bounds (tree bounds)
665 chkp_completed_bounds_set->add (bounds);
667 if (dump_file && (dump_flags & TDF_DETAILS))
669 fprintf (dump_file, "Marked bounds ");
670 print_generic_expr (dump_file, bounds);
671 fprintf (dump_file, " as completed\n");
675 /* Return 1 if BOUNDS were marked as completed and 0 otherwise. */
676 static bool
677 chkp_completed_bounds (tree bounds)
679 return chkp_completed_bounds_set->contains (bounds);
682 /* Clear comleted bound marks. */
683 static void
684 chkp_erase_completed_bounds (void)
686 delete chkp_completed_bounds_set;
687 chkp_completed_bounds_set = new hash_set<tree>;
690 /* This function is used to provide a base address for
691 chkp_get_hard_register_fake_addr_expr. */
692 static tree
693 chkp_get_hard_register_var_fake_base_address ()
695 int prec = TYPE_PRECISION (ptr_type_node);
696 return wide_int_to_tree (ptr_type_node, wi::min_value (prec, SIGNED));
699 /* If we check bounds for a hard register variable, we cannot
700 use its address - it is illegal, so instead of that we use
701 this fake value. */
702 static tree
703 chkp_get_hard_register_fake_addr_expr (tree obj)
705 tree addr = chkp_get_hard_register_var_fake_base_address ();
706 tree outer = obj;
707 while (TREE_CODE (outer) == COMPONENT_REF || TREE_CODE (outer) == ARRAY_REF)
709 if (TREE_CODE (outer) == COMPONENT_REF)
711 addr = fold_build_pointer_plus (addr,
712 component_ref_field_offset (outer));
713 outer = TREE_OPERAND (outer, 0);
715 else if (TREE_CODE (outer) == ARRAY_REF)
717 tree indx = fold_convert(size_type_node, TREE_OPERAND(outer, 1));
718 tree offset = size_binop (MULT_EXPR,
719 array_ref_element_size (outer), indx);
720 addr = fold_build_pointer_plus (addr, offset);
721 outer = TREE_OPERAND (outer, 0);
725 return addr;
728 /* Mark BOUNDS associated with PTR as incomplete. */
729 static void
730 chkp_register_incomplete_bounds (tree bounds, tree ptr)
732 chkp_incomplete_bounds_map->put (bounds, ptr);
734 if (dump_file && (dump_flags & TDF_DETAILS))
736 fprintf (dump_file, "Regsitered incomplete bounds ");
737 print_generic_expr (dump_file, bounds);
738 fprintf (dump_file, " for ");
739 print_generic_expr (dump_file, ptr);
740 fprintf (dump_file, "\n");
744 /* Return 1 if BOUNDS are incomplete and 0 otherwise. */
745 static bool
746 chkp_incomplete_bounds (tree bounds)
748 if (bounds == incomplete_bounds)
749 return true;
751 if (chkp_completed_bounds (bounds))
752 return false;
754 return chkp_incomplete_bounds_map->get (bounds) != NULL;
757 /* Clear incomleted bound marks. */
758 static void
759 chkp_erase_incomplete_bounds (void)
761 delete chkp_incomplete_bounds_map;
762 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
765 /* Build and return bndmk call which creates bounds for structure
766 pointed by PTR. Structure should have complete type. */
767 tree
768 chkp_make_bounds_for_struct_addr (tree ptr)
770 tree type = TREE_TYPE (ptr);
771 tree size;
773 gcc_assert (POINTER_TYPE_P (type));
775 size = TYPE_SIZE (TREE_TYPE (type));
777 gcc_assert (size);
779 return build_call_nary (pointer_bounds_type_node,
780 build_fold_addr_expr (chkp_bndmk_fndecl),
781 2, ptr, size);
784 /* Traversal function for chkp_may_finish_incomplete_bounds.
785 Set RES to 0 if at least one argument of phi statement
786 defining bounds (passed in KEY arg) is unknown.
787 Traversal stops when first unknown phi argument is found. */
788 bool
789 chkp_may_complete_phi_bounds (tree const &bounds, tree *slot ATTRIBUTE_UNUSED,
790 bool *res)
792 gimple *phi;
793 unsigned i;
795 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
797 phi = SSA_NAME_DEF_STMT (bounds);
799 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
801 for (i = 0; i < gimple_phi_num_args (phi); i++)
803 tree phi_arg = gimple_phi_arg_def (phi, i);
804 if (!phi_arg)
806 *res = false;
807 /* Do not need to traverse further. */
808 return false;
812 return true;
815 /* Return 1 if all phi nodes created for bounds have their
816 arguments computed. */
817 static bool
818 chkp_may_finish_incomplete_bounds (void)
820 bool res = true;
822 chkp_incomplete_bounds_map
823 ->traverse<bool *, chkp_may_complete_phi_bounds> (&res);
825 return res;
828 /* Helper function for chkp_finish_incomplete_bounds.
829 Recompute args for bounds phi node. */
830 bool
831 chkp_recompute_phi_bounds (tree const &bounds, tree *slot,
832 void *res ATTRIBUTE_UNUSED)
834 tree ptr = *slot;
835 gphi *bounds_phi;
836 gphi *ptr_phi;
837 unsigned i;
839 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
840 gcc_assert (TREE_CODE (ptr) == SSA_NAME);
842 bounds_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (bounds));
843 ptr_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (ptr));
845 for (i = 0; i < gimple_phi_num_args (bounds_phi); i++)
847 tree ptr_arg = gimple_phi_arg_def (ptr_phi, i);
848 tree bound_arg = chkp_find_bounds (ptr_arg, NULL);
850 add_phi_arg (bounds_phi, bound_arg,
851 gimple_phi_arg_edge (ptr_phi, i),
852 UNKNOWN_LOCATION);
855 return true;
858 /* Mark BOUNDS as invalid. */
859 static void
860 chkp_mark_invalid_bounds (tree bounds)
862 chkp_invalid_bounds->add (bounds);
864 if (dump_file && (dump_flags & TDF_DETAILS))
866 fprintf (dump_file, "Marked bounds ");
867 print_generic_expr (dump_file, bounds);
868 fprintf (dump_file, " as invalid\n");
872 /* Return 1 if BOUNDS were marked as invalid and 0 otherwise. */
873 static bool
874 chkp_valid_bounds (tree bounds)
876 if (bounds == zero_bounds || bounds == none_bounds)
877 return false;
879 return !chkp_invalid_bounds->contains (bounds);
882 /* Helper function for chkp_finish_incomplete_bounds.
883 Check all arguments of phi nodes trying to find
884 valid completed bounds. If there is at least one
885 such arg then bounds produced by phi node are marked
886 as valid completed bounds and all phi args are
887 recomputed. */
888 bool
889 chkp_find_valid_phi_bounds (tree const &bounds, tree *slot, bool *res)
891 gimple *phi;
892 unsigned i;
894 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
896 if (chkp_completed_bounds (bounds))
897 return true;
899 phi = SSA_NAME_DEF_STMT (bounds);
901 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
903 for (i = 0; i < gimple_phi_num_args (phi); i++)
905 tree phi_arg = gimple_phi_arg_def (phi, i);
907 gcc_assert (phi_arg);
909 if (chkp_valid_bounds (phi_arg) && !chkp_incomplete_bounds (phi_arg))
911 *res = true;
912 chkp_mark_completed_bounds (bounds);
913 chkp_recompute_phi_bounds (bounds, slot, NULL);
914 return true;
918 return true;
921 /* Helper function for chkp_finish_incomplete_bounds.
922 Marks all incompleted bounds as invalid. */
923 bool
924 chkp_mark_invalid_bounds_walker (tree const &bounds,
925 tree *slot ATTRIBUTE_UNUSED,
926 void *res ATTRIBUTE_UNUSED)
928 if (!chkp_completed_bounds (bounds))
930 chkp_mark_invalid_bounds (bounds);
931 chkp_mark_completed_bounds (bounds);
933 return true;
936 /* When all bound phi nodes have all their args computed
937 we have enough info to find valid bounds. We iterate
938 through all incompleted bounds searching for valid
939 bounds. Found valid bounds are marked as completed
940 and all remaining incompleted bounds are recomputed.
941 Process continues until no new valid bounds may be
942 found. All remained incompleted bounds are marked as
943 invalid (i.e. have no valid source of bounds). */
944 static void
945 chkp_finish_incomplete_bounds (void)
947 bool found_valid = true;
949 while (found_valid)
951 found_valid = false;
953 chkp_incomplete_bounds_map->
954 traverse<bool *, chkp_find_valid_phi_bounds> (&found_valid);
956 if (found_valid)
957 chkp_incomplete_bounds_map->
958 traverse<void *, chkp_recompute_phi_bounds> (NULL);
961 chkp_incomplete_bounds_map->
962 traverse<void *, chkp_mark_invalid_bounds_walker> (NULL);
963 chkp_incomplete_bounds_map->
964 traverse<void *, chkp_recompute_phi_bounds> (NULL);
966 chkp_erase_completed_bounds ();
967 chkp_erase_incomplete_bounds ();
970 /* Return 1 if type TYPE is a pointer type or a
971 structure having a pointer type as one of its fields.
972 Otherwise return 0. */
973 bool
974 chkp_type_has_pointer (const_tree type)
976 bool res = false;
978 if (BOUNDED_TYPE_P (type))
979 res = true;
980 else if (RECORD_OR_UNION_TYPE_P (type))
982 tree field;
984 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
985 if (TREE_CODE (field) == FIELD_DECL)
986 res = res || chkp_type_has_pointer (TREE_TYPE (field));
988 else if (TREE_CODE (type) == ARRAY_TYPE)
989 res = chkp_type_has_pointer (TREE_TYPE (type));
991 return res;
994 unsigned
995 chkp_type_bounds_count (const_tree type)
997 unsigned res = 0;
999 if (!type)
1000 res = 0;
1001 else if (BOUNDED_TYPE_P (type))
1002 res = 1;
1003 else if (RECORD_OR_UNION_TYPE_P (type))
1005 bitmap have_bound;
1007 bitmap_obstack_initialize (NULL);
1008 have_bound = BITMAP_ALLOC (NULL);
1009 chkp_find_bound_slots (type, have_bound);
1010 res = bitmap_count_bits (have_bound);
1011 BITMAP_FREE (have_bound);
1012 bitmap_obstack_release (NULL);
1015 return res;
1018 /* Get bounds associated with NODE via
1019 chkp_set_bounds call. */
1020 tree
1021 chkp_get_bounds (tree node)
1023 tree *slot;
1025 if (!chkp_bounds_map)
1026 return NULL_TREE;
1028 slot = chkp_bounds_map->get (node);
1029 return slot ? *slot : NULL_TREE;
1032 /* Associate bounds VAL with NODE. */
1033 void
1034 chkp_set_bounds (tree node, tree val)
1036 if (!chkp_bounds_map)
1037 chkp_bounds_map = new hash_map<tree, tree>;
1039 chkp_bounds_map->put (node, val);
1042 /* Check if statically initialized variable VAR require
1043 static bounds initialization. If VAR is added into
1044 bounds initlization list then 1 is returned. Otherwise
1045 return 0. */
1046 extern bool
1047 chkp_register_var_initializer (tree var)
1049 if (!flag_check_pointer_bounds
1050 || DECL_INITIAL (var) == error_mark_node)
1051 return false;
1053 gcc_assert (VAR_P (var));
1054 gcc_assert (DECL_INITIAL (var));
1056 if (TREE_STATIC (var)
1057 && chkp_type_has_pointer (TREE_TYPE (var)))
1059 varpool_node::get_create (var)->need_bounds_init = 1;
1060 return true;
1063 return false;
1066 /* Helper function for chkp_finish_file.
1068 Add new modification statement (RHS is assigned to LHS)
1069 into list of static initializer statementes (passed in ARG).
1070 If statements list becomes too big, emit checker constructor
1071 and start the new one. */
1072 static void
1073 chkp_add_modification_to_stmt_list (tree lhs,
1074 tree rhs,
1075 void *arg)
1077 struct chkp_ctor_stmt_list *stmts = (struct chkp_ctor_stmt_list *)arg;
1078 tree modify;
1080 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
1081 rhs = build1 (CONVERT_EXPR, TREE_TYPE (lhs), rhs);
1083 modify = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
1084 append_to_statement_list (modify, &stmts->stmts);
1086 stmts->avail--;
1089 /* Build and return ADDR_EXPR for specified object OBJ. */
1090 static tree
1091 chkp_build_addr_expr (tree obj)
1093 /* We first check whether it is a "hard reg case". */
1094 tree base = get_base_address (obj);
1095 if (VAR_P (base) && DECL_HARD_REGISTER (base))
1096 return chkp_get_hard_register_fake_addr_expr (obj);
1098 /* If not - return regular ADDR_EXPR. */
1099 return TREE_CODE (obj) == TARGET_MEM_REF
1100 ? tree_mem_ref_addr (ptr_type_node, obj)
1101 : build_fold_addr_expr (obj);
1104 /* Helper function for chkp_finish_file.
1105 Initialize bound variable BND_VAR with bounds of variable
1106 VAR to statements list STMTS. If statements list becomes
1107 too big, emit checker constructor and start the new one. */
1108 static void
1109 chkp_output_static_bounds (tree bnd_var, tree var,
1110 struct chkp_ctor_stmt_list *stmts)
1112 tree lb, ub, size;
1114 if (TREE_CODE (var) == STRING_CST)
1116 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1117 size = build_int_cst (size_type_node, TREE_STRING_LENGTH (var) - 1);
1119 else if (DECL_SIZE (var)
1120 && !chkp_variable_size_type (TREE_TYPE (var)))
1122 /* Compute bounds using statically known size. */
1123 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1124 size = size_binop (MINUS_EXPR, DECL_SIZE_UNIT (var), size_one_node);
1126 else
1128 /* Compute bounds using dynamic size. */
1129 tree call;
1131 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1132 call = build1 (ADDR_EXPR,
1133 build_pointer_type (TREE_TYPE (chkp_sizeof_fndecl)),
1134 chkp_sizeof_fndecl);
1135 size = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_sizeof_fndecl)),
1136 call, 1, var);
1138 if (flag_chkp_zero_dynamic_size_as_infinite)
1140 tree max_size, cond;
1142 max_size = build2 (MINUS_EXPR, size_type_node, size_zero_node, lb);
1143 cond = build2 (NE_EXPR, boolean_type_node, size, size_zero_node);
1144 size = build3 (COND_EXPR, size_type_node, cond, size, max_size);
1147 size = size_binop (MINUS_EXPR, size, size_one_node);
1150 ub = size_binop (PLUS_EXPR, lb, size);
1151 stmts->avail -= targetm.chkp_initialize_bounds (bnd_var, lb, ub,
1152 &stmts->stmts);
1153 if (stmts->avail <= 0)
1155 cgraph_build_static_cdtor ('B', stmts->stmts,
1156 MAX_RESERVED_INIT_PRIORITY + 2);
1157 stmts->avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
1158 stmts->stmts = NULL;
1162 /* Return entry block to be used for checker initilization code.
1163 Create new block if required. */
1164 static basic_block
1165 chkp_get_entry_block (void)
1167 if (!entry_block)
1168 entry_block
1169 = split_block_after_labels (ENTRY_BLOCK_PTR_FOR_FN (cfun))->dest;
1171 return entry_block;
1174 /* Return a bounds var to be used for pointer var PTR_VAR. */
1175 static tree
1176 chkp_get_bounds_var (tree ptr_var)
1178 tree bnd_var;
1179 tree *slot;
1181 slot = chkp_bound_vars->get (ptr_var);
1182 if (slot)
1183 bnd_var = *slot;
1184 else
1186 bnd_var = create_tmp_reg (pointer_bounds_type_node,
1187 CHKP_BOUND_TMP_NAME);
1188 chkp_bound_vars->put (ptr_var, bnd_var);
1191 return bnd_var;
1194 /* If BND is an abnormal bounds copy, return a copied value.
1195 Otherwise return BND. */
1196 static tree
1197 chkp_get_orginal_bounds_for_abnormal_copy (tree bnd)
1199 if (bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
1201 gimple *bnd_def = SSA_NAME_DEF_STMT (bnd);
1202 gcc_checking_assert (gimple_code (bnd_def) == GIMPLE_ASSIGN);
1203 bnd = gimple_assign_rhs1 (bnd_def);
1206 return bnd;
1209 /* Register bounds BND for object PTR in global bounds table.
1210 A copy of bounds may be created for abnormal ssa names.
1211 Returns bounds to use for PTR. */
1212 static tree
1213 chkp_maybe_copy_and_register_bounds (tree ptr, tree bnd)
1215 bool abnormal_ptr;
1217 if (!chkp_reg_bounds)
1218 return bnd;
1220 /* Do nothing if bounds are incomplete_bounds
1221 because it means bounds will be recomputed. */
1222 if (bnd == incomplete_bounds)
1223 return bnd;
1225 abnormal_ptr = (TREE_CODE (ptr) == SSA_NAME
1226 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1227 && gimple_code (SSA_NAME_DEF_STMT (ptr)) != GIMPLE_PHI);
1229 /* A single bounds value may be reused multiple times for
1230 different pointer values. It may cause coalescing issues
1231 for abnormal SSA names. To avoid it we create a bounds
1232 copy in case it is computed for abnormal SSA name.
1234 We also cannot reuse such created copies for other pointers */
1235 if (abnormal_ptr
1236 || bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
1238 tree bnd_var = NULL_TREE;
1240 if (abnormal_ptr)
1242 if (SSA_NAME_VAR (ptr))
1243 bnd_var = chkp_get_bounds_var (SSA_NAME_VAR (ptr));
1245 else
1246 bnd_var = chkp_get_tmp_var ();
1248 /* For abnormal copies we may just find original
1249 bounds and use them. */
1250 if (!abnormal_ptr && !SSA_NAME_IS_DEFAULT_DEF (bnd))
1251 bnd = chkp_get_orginal_bounds_for_abnormal_copy (bnd);
1252 /* For undefined values we usually use none bounds
1253 value but in case of abnormal edge it may cause
1254 coalescing failures. Use default definition of
1255 bounds variable instead to avoid it. */
1256 else if (SSA_NAME_IS_DEFAULT_DEF (ptr)
1257 && TREE_CODE (SSA_NAME_VAR (ptr)) != PARM_DECL)
1259 bnd = get_or_create_ssa_default_def (cfun, bnd_var);
1261 if (dump_file && (dump_flags & TDF_DETAILS))
1263 fprintf (dump_file, "Using default def bounds ");
1264 print_generic_expr (dump_file, bnd);
1265 fprintf (dump_file, " for abnormal default def SSA name ");
1266 print_generic_expr (dump_file, ptr);
1267 fprintf (dump_file, "\n");
1270 else
1272 tree copy;
1273 gimple *def = SSA_NAME_DEF_STMT (ptr);
1274 gimple *assign;
1275 gimple_stmt_iterator gsi;
1277 if (bnd_var)
1278 copy = make_ssa_name (bnd_var);
1279 else
1280 copy = make_temp_ssa_name (pointer_bounds_type_node,
1281 NULL,
1282 CHKP_BOUND_TMP_NAME);
1283 bnd = chkp_get_orginal_bounds_for_abnormal_copy (bnd);
1284 assign = gimple_build_assign (copy, bnd);
1286 if (dump_file && (dump_flags & TDF_DETAILS))
1288 fprintf (dump_file, "Creating a copy of bounds ");
1289 print_generic_expr (dump_file, bnd);
1290 fprintf (dump_file, " for abnormal SSA name ");
1291 print_generic_expr (dump_file, ptr);
1292 fprintf (dump_file, "\n");
1295 if (gimple_code (def) == GIMPLE_NOP)
1297 gsi = gsi_last_bb (chkp_get_entry_block ());
1298 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
1299 gsi_insert_before (&gsi, assign, GSI_CONTINUE_LINKING);
1300 else
1301 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1303 else
1305 gimple *bnd_def = SSA_NAME_DEF_STMT (bnd);
1306 /* Sometimes (e.g. when we load a pointer from a
1307 memory) bounds are produced later than a pointer.
1308 We need to insert bounds copy appropriately. */
1309 if (gimple_code (bnd_def) != GIMPLE_NOP
1310 && stmt_dominates_stmt_p (def, bnd_def))
1311 gsi = gsi_for_stmt (bnd_def);
1312 else
1313 gsi = gsi_for_stmt (def);
1314 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1317 bnd = copy;
1320 if (abnormal_ptr)
1321 bitmap_set_bit (chkp_abnormal_copies, SSA_NAME_VERSION (bnd));
1324 chkp_reg_bounds->put (ptr, bnd);
1326 if (dump_file && (dump_flags & TDF_DETAILS))
1328 fprintf (dump_file, "Regsitered bound ");
1329 print_generic_expr (dump_file, bnd);
1330 fprintf (dump_file, " for pointer ");
1331 print_generic_expr (dump_file, ptr);
1332 fprintf (dump_file, "\n");
1335 return bnd;
1338 /* Get bounds registered for object PTR in global bounds table. */
1339 static tree
1340 chkp_get_registered_bounds (tree ptr)
1342 tree *slot;
1344 if (!chkp_reg_bounds)
1345 return NULL_TREE;
1347 slot = chkp_reg_bounds->get (ptr);
1348 return slot ? *slot : NULL_TREE;
1351 /* Add bound retvals to return statement pointed by GSI. */
1353 static void
1354 chkp_add_bounds_to_ret_stmt (gimple_stmt_iterator *gsi)
1356 greturn *ret = as_a <greturn *> (gsi_stmt (*gsi));
1357 tree retval = gimple_return_retval (ret);
1358 tree ret_decl = DECL_RESULT (cfun->decl);
1359 tree bounds;
1361 if (!retval)
1362 return;
1364 if (BOUNDED_P (ret_decl))
1366 bounds = chkp_find_bounds (retval, gsi);
1367 bounds = chkp_maybe_copy_and_register_bounds (ret_decl, bounds);
1368 gimple_return_set_retbnd (ret, bounds);
1371 update_stmt (ret);
1374 /* Force OP to be suitable for using as an argument for call.
1375 New statements (if any) go to SEQ. */
1376 static tree
1377 chkp_force_gimple_call_op (tree op, gimple_seq *seq)
1379 gimple_seq stmts;
1380 gimple_stmt_iterator si;
1382 op = force_gimple_operand (unshare_expr (op), &stmts, true, NULL_TREE);
1384 for (si = gsi_start (stmts); !gsi_end_p (si); gsi_next (&si))
1385 chkp_mark_stmt (gsi_stmt (si));
1387 gimple_seq_add_seq (seq, stmts);
1389 return op;
1392 /* Generate lower bound check for memory access by ADDR.
1393 Check is inserted before the position pointed by ITER.
1394 DIRFLAG indicates whether memory access is load or store. */
1395 static void
1396 chkp_check_lower (tree addr, tree bounds,
1397 gimple_stmt_iterator iter,
1398 location_t location,
1399 tree dirflag)
1401 gimple_seq seq;
1402 gimple *check;
1403 tree node;
1405 if (!chkp_function_instrumented_p (current_function_decl)
1406 && bounds == chkp_get_zero_bounds ())
1407 return;
1409 if (dirflag == integer_zero_node
1410 && !flag_chkp_check_read)
1411 return;
1413 if (dirflag == integer_one_node
1414 && !flag_chkp_check_write)
1415 return;
1417 seq = NULL;
1419 node = chkp_force_gimple_call_op (addr, &seq);
1421 check = gimple_build_call (chkp_checkl_fndecl, 2, node, bounds);
1422 chkp_mark_stmt (check);
1423 gimple_call_set_with_bounds (check, true);
1424 gimple_set_location (check, location);
1425 gimple_seq_add_stmt (&seq, check);
1427 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1429 if (dump_file && (dump_flags & TDF_DETAILS))
1431 gimple *before = gsi_stmt (iter);
1432 fprintf (dump_file, "Generated lower bound check for statement ");
1433 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1434 fprintf (dump_file, " ");
1435 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1439 /* Generate upper bound check for memory access by ADDR.
1440 Check is inserted before the position pointed by ITER.
1441 DIRFLAG indicates whether memory access is load or store. */
1442 static void
1443 chkp_check_upper (tree addr, tree bounds,
1444 gimple_stmt_iterator iter,
1445 location_t location,
1446 tree dirflag)
1448 gimple_seq seq;
1449 gimple *check;
1450 tree node;
1452 if (!chkp_function_instrumented_p (current_function_decl)
1453 && bounds == chkp_get_zero_bounds ())
1454 return;
1456 if (dirflag == integer_zero_node
1457 && !flag_chkp_check_read)
1458 return;
1460 if (dirflag == integer_one_node
1461 && !flag_chkp_check_write)
1462 return;
1464 seq = NULL;
1466 node = chkp_force_gimple_call_op (addr, &seq);
1468 check = gimple_build_call (chkp_checku_fndecl, 2, node, bounds);
1469 chkp_mark_stmt (check);
1470 gimple_call_set_with_bounds (check, true);
1471 gimple_set_location (check, location);
1472 gimple_seq_add_stmt (&seq, check);
1474 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1476 if (dump_file && (dump_flags & TDF_DETAILS))
1478 gimple *before = gsi_stmt (iter);
1479 fprintf (dump_file, "Generated upper bound check for statement ");
1480 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1481 fprintf (dump_file, " ");
1482 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1486 /* Generate lower and upper bound checks for memory access
1487 to memory slot [FIRST, LAST] againsr BOUNDS. Checks
1488 are inserted before the position pointed by ITER.
1489 DIRFLAG indicates whether memory access is load or store. */
1490 void
1491 chkp_check_mem_access (tree first, tree last, tree bounds,
1492 gimple_stmt_iterator iter,
1493 location_t location,
1494 tree dirflag)
1496 chkp_check_lower (first, bounds, iter, location, dirflag);
1497 chkp_check_upper (last, bounds, iter, location, dirflag);
1500 /* Replace call to _bnd_chk_* pointed by GSI with
1501 bndcu and bndcl calls. DIRFLAG determines whether
1502 check is for read or write. */
1504 void
1505 chkp_replace_address_check_builtin (gimple_stmt_iterator *gsi,
1506 tree dirflag)
1508 gimple_stmt_iterator call_iter = *gsi;
1509 gimple *call = gsi_stmt (*gsi);
1510 tree fndecl = gimple_call_fndecl (call);
1511 tree addr = gimple_call_arg (call, 0);
1512 tree bounds = chkp_find_bounds (addr, gsi);
1514 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1515 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1516 chkp_check_lower (addr, bounds, *gsi, gimple_location (call), dirflag);
1518 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS)
1519 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1521 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1523 tree size = gimple_call_arg (call, 1);
1524 addr = fold_build_pointer_plus (addr, size);
1525 addr = fold_build_pointer_plus_hwi (addr, -1);
1526 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1529 gsi_remove (&call_iter, true);
1532 /* Replace call to _bnd_get_ptr_* pointed by GSI with
1533 corresponding bounds extract call. */
1535 void
1536 chkp_replace_extract_builtin (gimple_stmt_iterator *gsi)
1538 gimple *call = gsi_stmt (*gsi);
1539 tree fndecl = gimple_call_fndecl (call);
1540 tree addr = gimple_call_arg (call, 0);
1541 tree bounds = chkp_find_bounds (addr, gsi);
1542 gimple *extract;
1544 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND)
1545 fndecl = chkp_extract_lower_fndecl;
1546 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND)
1547 fndecl = chkp_extract_upper_fndecl;
1548 else
1549 gcc_unreachable ();
1551 extract = gimple_build_call (fndecl, 1, bounds);
1552 gimple_call_set_lhs (extract, gimple_call_lhs (call));
1553 chkp_mark_stmt (extract);
1555 gsi_replace (gsi, extract, false);
1558 /* Return COMPONENT_REF accessing FIELD in OBJ. */
1559 static tree
1560 chkp_build_component_ref (tree obj, tree field)
1562 tree res;
1564 /* If object is TMR then we do not use component_ref but
1565 add offset instead. We need it to be able to get addr
1566 of the reasult later. */
1567 if (TREE_CODE (obj) == TARGET_MEM_REF)
1569 tree offs = TMR_OFFSET (obj);
1570 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1571 offs, DECL_FIELD_OFFSET (field));
1573 gcc_assert (offs);
1575 res = copy_node (obj);
1576 TREE_TYPE (res) = TREE_TYPE (field);
1577 TMR_OFFSET (res) = offs;
1579 else
1580 res = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL_TREE);
1582 return res;
1585 /* Return ARRAY_REF for array ARR and index IDX with
1586 specified element type ETYPE and element size ESIZE. */
1587 static tree
1588 chkp_build_array_ref (tree arr, tree etype, tree esize,
1589 unsigned HOST_WIDE_INT idx)
1591 tree index = build_int_cst (size_type_node, idx);
1592 tree res;
1594 /* If object is TMR then we do not use array_ref but
1595 add offset instead. We need it to be able to get addr
1596 of the reasult later. */
1597 if (TREE_CODE (arr) == TARGET_MEM_REF)
1599 tree offs = TMR_OFFSET (arr);
1601 esize = fold_binary_to_constant (MULT_EXPR, TREE_TYPE (esize),
1602 esize, index);
1603 gcc_assert(esize);
1605 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1606 offs, esize);
1607 gcc_assert (offs);
1609 res = copy_node (arr);
1610 TREE_TYPE (res) = etype;
1611 TMR_OFFSET (res) = offs;
1613 else
1614 res = build4 (ARRAY_REF, etype, arr, index, NULL_TREE, NULL_TREE);
1616 return res;
1619 /* Helper function for chkp_add_bounds_to_call_stmt.
1620 Fill ALL_BOUNDS output array with created bounds.
1622 OFFS is used for recursive calls and holds basic
1623 offset of TYPE in outer structure in bits.
1625 ITER points a position where bounds are searched.
1627 ALL_BOUNDS[i] is filled with elem bounds if there
1628 is a field in TYPE which has pointer type and offset
1629 equal to i * POINTER_SIZE in bits. */
1630 static void
1631 chkp_find_bounds_for_elem (tree elem, tree *all_bounds,
1632 HOST_WIDE_INT offs,
1633 gimple_stmt_iterator *iter)
1635 tree type = TREE_TYPE (elem);
1637 if (BOUNDED_TYPE_P (type))
1639 if (!all_bounds[offs / POINTER_SIZE])
1641 tree temp = make_temp_ssa_name (type, NULL, "");
1642 gimple *assign = gimple_build_assign (temp, elem);
1643 gimple_stmt_iterator gsi;
1645 gsi_insert_before (iter, assign, GSI_SAME_STMT);
1646 gsi = gsi_for_stmt (assign);
1648 all_bounds[offs / POINTER_SIZE] = chkp_find_bounds (temp, &gsi);
1651 else if (RECORD_OR_UNION_TYPE_P (type))
1653 tree field;
1655 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1656 if (TREE_CODE (field) == FIELD_DECL)
1658 tree base = unshare_expr (elem);
1659 tree field_ref = chkp_build_component_ref (base, field);
1660 HOST_WIDE_INT field_offs
1661 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1662 if (DECL_FIELD_OFFSET (field))
1663 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1665 chkp_find_bounds_for_elem (field_ref, all_bounds,
1666 offs + field_offs, iter);
1669 else if (TREE_CODE (type) == ARRAY_TYPE)
1671 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1672 tree etype = TREE_TYPE (type);
1673 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1674 unsigned HOST_WIDE_INT cur;
1676 if (!maxval || integer_minus_onep (maxval))
1677 return;
1679 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1681 tree base = unshare_expr (elem);
1682 tree arr_elem = chkp_build_array_ref (base, etype,
1683 TYPE_SIZE (etype),
1684 cur);
1685 chkp_find_bounds_for_elem (arr_elem, all_bounds, offs + cur * esize,
1686 iter);
1691 /* Fill HAVE_BOUND output bitmap with information about
1692 bounds requred for object of type TYPE.
1694 OFFS is used for recursive calls and holds basic
1695 offset of TYPE in outer structure in bits.
1697 HAVE_BOUND[i] is set to 1 if there is a field
1698 in TYPE which has pointer type and offset
1699 equal to i * POINTER_SIZE - OFFS in bits. */
1700 void
1701 chkp_find_bound_slots_1 (const_tree type, bitmap have_bound,
1702 HOST_WIDE_INT offs)
1704 if (BOUNDED_TYPE_P (type))
1705 bitmap_set_bit (have_bound, offs / POINTER_SIZE);
1706 else if (RECORD_OR_UNION_TYPE_P (type))
1708 tree field;
1710 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1711 if (TREE_CODE (field) == FIELD_DECL)
1713 HOST_WIDE_INT field_offs = 0;
1714 if (DECL_FIELD_BIT_OFFSET (field))
1715 field_offs += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1716 if (DECL_FIELD_OFFSET (field))
1717 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1718 chkp_find_bound_slots_1 (TREE_TYPE (field), have_bound,
1719 offs + field_offs);
1722 else if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
1724 /* The object type is an array of complete type, i.e., other
1725 than a flexible array. */
1726 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1727 tree etype = TREE_TYPE (type);
1728 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1729 unsigned HOST_WIDE_INT cur;
1731 if (!maxval
1732 || TREE_CODE (maxval) != INTEGER_CST
1733 || integer_minus_onep (maxval))
1734 return;
1736 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1737 chkp_find_bound_slots_1 (etype, have_bound, offs + cur * esize);
1741 /* Fill bitmap RES with information about bounds for
1742 type TYPE. See chkp_find_bound_slots_1 for more
1743 details. */
1744 void
1745 chkp_find_bound_slots (const_tree type, bitmap res)
1747 bitmap_clear (res);
1748 chkp_find_bound_slots_1 (type, res, 0);
1751 /* Return 1 if call to FNDECL should be instrumented
1752 and 0 otherwise. */
1754 static bool
1755 chkp_instrument_normal_builtin (tree fndecl)
1757 switch (DECL_FUNCTION_CODE (fndecl))
1759 case BUILT_IN_STRLEN:
1760 case BUILT_IN_STRCPY:
1761 case BUILT_IN_STRNCPY:
1762 case BUILT_IN_STPCPY:
1763 case BUILT_IN_STPNCPY:
1764 case BUILT_IN_STRCAT:
1765 case BUILT_IN_STRNCAT:
1766 case BUILT_IN_MEMCPY:
1767 case BUILT_IN_MEMPCPY:
1768 case BUILT_IN_MEMSET:
1769 case BUILT_IN_MEMMOVE:
1770 case BUILT_IN_BZERO:
1771 case BUILT_IN_STRCMP:
1772 case BUILT_IN_STRNCMP:
1773 case BUILT_IN_BCMP:
1774 case BUILT_IN_MEMCMP:
1775 case BUILT_IN_MEMCPY_CHK:
1776 case BUILT_IN_MEMPCPY_CHK:
1777 case BUILT_IN_MEMMOVE_CHK:
1778 case BUILT_IN_MEMSET_CHK:
1779 case BUILT_IN_STRCPY_CHK:
1780 case BUILT_IN_STRNCPY_CHK:
1781 case BUILT_IN_STPCPY_CHK:
1782 case BUILT_IN_STPNCPY_CHK:
1783 case BUILT_IN_STRCAT_CHK:
1784 case BUILT_IN_STRNCAT_CHK:
1785 case BUILT_IN_MALLOC:
1786 case BUILT_IN_CALLOC:
1787 case BUILT_IN_REALLOC:
1788 return 1;
1790 default:
1791 return 0;
1795 /* Add bound arguments to call statement pointed by GSI.
1796 Also performs a replacement of user checker builtins calls
1797 with internal ones. */
1799 static void
1800 chkp_add_bounds_to_call_stmt (gimple_stmt_iterator *gsi)
1802 gcall *call = as_a <gcall *> (gsi_stmt (*gsi));
1803 unsigned arg_no = 0;
1804 tree fndecl = gimple_call_fndecl (call);
1805 tree fntype;
1806 tree first_formal_arg;
1807 tree arg;
1808 bool use_fntype = false;
1809 tree op;
1810 ssa_op_iter iter;
1811 gcall *new_call;
1813 /* Do nothing for internal functions. */
1814 if (gimple_call_internal_p (call))
1815 return;
1817 fntype = TREE_TYPE (TREE_TYPE (gimple_call_fn (call)));
1819 /* Do nothing if back-end builtin is called. */
1820 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
1821 return;
1823 /* Do nothing for some middle-end builtins. */
1824 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1825 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_OBJECT_SIZE)
1826 return;
1828 /* Do nothing for calls to not instrumentable functions. */
1829 if (fndecl && !chkp_instrumentable_p (fndecl))
1830 return;
1832 /* Ignore CHKP_INIT_PTR_BOUNDS, CHKP_NULL_PTR_BOUNDS
1833 and CHKP_COPY_PTR_BOUNDS. */
1834 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1835 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS
1836 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS
1837 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS
1838 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS))
1839 return;
1841 /* Check user builtins are replaced with checks. */
1842 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1843 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1844 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
1845 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS))
1847 chkp_replace_address_check_builtin (gsi, integer_minus_one_node);
1848 return;
1851 /* Check user builtins are replaced with bound extract. */
1852 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1853 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND
1854 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND))
1856 chkp_replace_extract_builtin (gsi);
1857 return;
1860 /* BUILT_IN_CHKP_NARROW_PTR_BOUNDS call is replaced with
1861 target narrow bounds call. */
1862 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1863 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
1865 tree arg = gimple_call_arg (call, 1);
1866 tree bounds = chkp_find_bounds (arg, gsi);
1868 gimple_call_set_fndecl (call, chkp_narrow_bounds_fndecl);
1869 gimple_call_set_arg (call, 1, bounds);
1870 update_stmt (call);
1872 return;
1875 /* BUILT_IN_CHKP_STORE_PTR_BOUNDS call is replaced with
1876 bndstx call. */
1877 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1878 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_STORE_PTR_BOUNDS)
1880 tree addr = gimple_call_arg (call, 0);
1881 tree ptr = gimple_call_arg (call, 1);
1882 tree bounds = chkp_find_bounds (ptr, gsi);
1883 gimple_stmt_iterator iter = gsi_for_stmt (call);
1885 chkp_build_bndstx (addr, ptr, bounds, gsi);
1886 gsi_remove (&iter, true);
1888 return;
1891 if (!flag_chkp_instrument_calls)
1892 return;
1894 /* We instrument only some subset of builtins. We also instrument
1895 builtin calls to be inlined. */
1896 if (fndecl
1897 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1898 && !chkp_instrument_normal_builtin (fndecl))
1900 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
1901 return;
1903 struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
1904 if (!clone
1905 || !gimple_has_body_p (clone->decl))
1906 return;
1909 /* If function decl is available then use it for
1910 formal arguments list. Otherwise use function type. */
1911 if (fndecl
1912 && DECL_ARGUMENTS (fndecl)
1913 && gimple_call_fntype (call) == TREE_TYPE (fndecl))
1914 first_formal_arg = DECL_ARGUMENTS (fndecl);
1915 else
1917 first_formal_arg = TYPE_ARG_TYPES (fntype);
1918 use_fntype = true;
1921 /* Fill vector of new call args. */
1922 vec<tree> new_args = vNULL;
1923 new_args.create (gimple_call_num_args (call));
1924 arg = first_formal_arg;
1925 for (arg_no = 0; arg_no < gimple_call_num_args (call); arg_no++)
1927 tree call_arg = gimple_call_arg (call, arg_no);
1928 tree type;
1930 /* Get arg type using formal argument description
1931 or actual argument type. */
1932 if (arg)
1933 if (use_fntype)
1934 if (TREE_VALUE (arg) != void_type_node)
1936 type = TREE_VALUE (arg);
1937 arg = TREE_CHAIN (arg);
1939 else
1940 type = TREE_TYPE (call_arg);
1941 else
1943 type = TREE_TYPE (arg);
1944 arg = TREE_CHAIN (arg);
1946 else
1947 type = TREE_TYPE (call_arg);
1949 new_args.safe_push (call_arg);
1951 if (BOUNDED_TYPE_P (type)
1952 || pass_by_reference (NULL, TYPE_MODE (type), type, true))
1953 new_args.safe_push (chkp_find_bounds (call_arg, gsi));
1954 else if (chkp_type_has_pointer (type))
1956 HOST_WIDE_INT max_bounds
1957 = TREE_INT_CST_LOW (TYPE_SIZE (type)) / POINTER_SIZE;
1958 tree *all_bounds = (tree *)xmalloc (sizeof (tree) * max_bounds);
1959 HOST_WIDE_INT bnd_no;
1961 memset (all_bounds, 0, sizeof (tree) * max_bounds);
1963 chkp_find_bounds_for_elem (call_arg, all_bounds, 0, gsi);
1965 for (bnd_no = 0; bnd_no < max_bounds; bnd_no++)
1966 if (all_bounds[bnd_no])
1967 new_args.safe_push (all_bounds[bnd_no]);
1969 free (all_bounds);
1973 if (new_args.length () == gimple_call_num_args (call))
1974 new_call = call;
1975 else
1977 new_call = gimple_build_call_vec (gimple_op (call, 1), new_args);
1978 gimple_call_set_lhs (new_call, gimple_call_lhs (call));
1979 gimple_call_copy_flags (new_call, call);
1980 gimple_call_set_chain (new_call, gimple_call_chain (call));
1982 new_args.release ();
1984 /* For direct calls fndecl is replaced with instrumented version. */
1985 if (fndecl)
1987 tree new_decl = chkp_maybe_create_clone (fndecl)->decl;
1988 gimple_call_set_fndecl (new_call, new_decl);
1989 /* In case of a type cast we should modify used function
1990 type instead of using type of new fndecl. */
1991 if (gimple_call_fntype (call) != TREE_TYPE (fndecl))
1993 tree type = gimple_call_fntype (call);
1994 type = chkp_copy_function_type_adding_bounds (type);
1995 gimple_call_set_fntype (new_call, type);
1997 else
1998 gimple_call_set_fntype (new_call, TREE_TYPE (new_decl));
2000 /* For indirect call we should fix function pointer type if
2001 pass some bounds. */
2002 else if (new_call != call)
2004 tree type = gimple_call_fntype (call);
2005 type = chkp_copy_function_type_adding_bounds (type);
2006 gimple_call_set_fntype (new_call, type);
2009 /* replace old call statement with the new one. */
2010 if (call != new_call)
2012 FOR_EACH_SSA_TREE_OPERAND (op, call, iter, SSA_OP_ALL_DEFS)
2014 SSA_NAME_DEF_STMT (op) = new_call;
2016 gsi_replace (gsi, new_call, true);
2018 else
2019 update_stmt (new_call);
2021 gimple_call_set_with_bounds (new_call, true);
2024 /* Return constant static bounds var with specified bounds LB and UB.
2025 If such var does not exists then new var is created with specified NAME. */
2026 static tree
2027 chkp_make_static_const_bounds (HOST_WIDE_INT lb,
2028 HOST_WIDE_INT ub,
2029 const char *name)
2031 tree id = get_identifier (name);
2032 tree var;
2033 varpool_node *node;
2034 symtab_node *snode;
2036 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, id,
2037 pointer_bounds_type_node);
2038 TREE_STATIC (var) = 1;
2039 TREE_PUBLIC (var) = 1;
2041 /* With LTO we may have constant bounds already in varpool.
2042 Try to find it. */
2043 if ((snode = symtab_node::get_for_asmname (DECL_ASSEMBLER_NAME (var))))
2045 /* We don't allow this symbol usage for non bounds. */
2046 if (snode->type != SYMTAB_VARIABLE
2047 || !POINTER_BOUNDS_P (snode->decl))
2048 sorry ("-fcheck-pointer-bounds requires %qs "
2049 "name for internal usage",
2050 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (var)));
2052 return snode->decl;
2055 TREE_USED (var) = 1;
2056 TREE_READONLY (var) = 1;
2057 TREE_ADDRESSABLE (var) = 0;
2058 DECL_ARTIFICIAL (var) = 1;
2059 DECL_READ_P (var) = 1;
2060 DECL_INITIAL (var) = targetm.chkp_make_bounds_constant (lb, ub);
2061 make_decl_one_only (var, DECL_ASSEMBLER_NAME (var));
2062 /* We may use this symbol during ctors generation in chkp_finish_file
2063 when all symbols are emitted. Force output to avoid undefined
2064 symbols in ctors. */
2065 node = varpool_node::get_create (var);
2066 node->force_output = 1;
2068 varpool_node::finalize_decl (var);
2070 return var;
2073 /* Generate code to make bounds with specified lower bound LB and SIZE.
2074 if AFTER is 1 then code is inserted after position pointed by ITER
2075 otherwise code is inserted before position pointed by ITER.
2076 If ITER is NULL then code is added to entry block. */
2077 static tree
2078 chkp_make_bounds (tree lb, tree size, gimple_stmt_iterator *iter, bool after)
2080 gimple_seq seq;
2081 gimple_stmt_iterator gsi;
2082 gimple *stmt;
2083 tree bounds;
2085 if (iter)
2086 gsi = *iter;
2087 else
2088 gsi = gsi_start_bb (chkp_get_entry_block ());
2090 seq = NULL;
2092 lb = chkp_force_gimple_call_op (lb, &seq);
2093 size = chkp_force_gimple_call_op (size, &seq);
2095 stmt = gimple_build_call (chkp_bndmk_fndecl, 2, lb, size);
2096 chkp_mark_stmt (stmt);
2098 bounds = chkp_get_tmp_reg (stmt);
2099 gimple_call_set_lhs (stmt, bounds);
2101 gimple_seq_add_stmt (&seq, stmt);
2103 if (iter && after)
2104 gsi_insert_seq_after (&gsi, seq, GSI_SAME_STMT);
2105 else
2106 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
2108 if (dump_file && (dump_flags & TDF_DETAILS))
2110 fprintf (dump_file, "Made bounds: ");
2111 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2112 if (iter)
2114 fprintf (dump_file, " inserted before statement: ");
2115 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0, TDF_VOPS|TDF_MEMSYMS);
2117 else
2118 fprintf (dump_file, " at function entry\n");
2121 /* update_stmt (stmt); */
2123 return bounds;
2126 /* Return var holding zero bounds. */
2127 tree
2128 chkp_get_zero_bounds_var (void)
2130 if (!chkp_zero_bounds_var)
2131 chkp_zero_bounds_var
2132 = chkp_make_static_const_bounds (0, -1,
2133 CHKP_ZERO_BOUNDS_VAR_NAME);
2134 return chkp_zero_bounds_var;
2137 /* Return var holding none bounds. */
2138 tree
2139 chkp_get_none_bounds_var (void)
2141 if (!chkp_none_bounds_var)
2142 chkp_none_bounds_var
2143 = chkp_make_static_const_bounds (-1, 0,
2144 CHKP_NONE_BOUNDS_VAR_NAME);
2145 return chkp_none_bounds_var;
2148 /* Return SSA_NAME used to represent zero bounds. */
2149 static tree
2150 chkp_get_zero_bounds (void)
2152 if (zero_bounds)
2153 return zero_bounds;
2155 if (dump_file && (dump_flags & TDF_DETAILS))
2156 fprintf (dump_file, "Creating zero bounds...");
2158 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2159 || flag_chkp_use_static_const_bounds > 0)
2161 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2162 gimple *stmt;
2164 zero_bounds = chkp_get_tmp_reg (NULL);
2165 stmt = gimple_build_assign (zero_bounds, chkp_get_zero_bounds_var ());
2166 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2168 else
2169 zero_bounds = chkp_make_bounds (integer_zero_node,
2170 integer_zero_node,
2171 NULL,
2172 false);
2174 return zero_bounds;
2177 /* Return SSA_NAME used to represent none bounds. */
2178 static tree
2179 chkp_get_none_bounds (void)
2181 if (none_bounds)
2182 return none_bounds;
2184 if (dump_file && (dump_flags & TDF_DETAILS))
2185 fprintf (dump_file, "Creating none bounds...");
2188 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2189 || flag_chkp_use_static_const_bounds > 0)
2191 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2192 gimple *stmt;
2194 none_bounds = chkp_get_tmp_reg (NULL);
2195 stmt = gimple_build_assign (none_bounds, chkp_get_none_bounds_var ());
2196 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2198 else
2199 none_bounds = chkp_make_bounds (integer_minus_one_node,
2200 build_int_cst (size_type_node, 2),
2201 NULL,
2202 false);
2204 return none_bounds;
2207 /* Return bounds to be used as a result of operation which
2208 should not create poiunter (e.g. MULT_EXPR). */
2209 static tree
2210 chkp_get_invalid_op_bounds (void)
2212 return chkp_get_zero_bounds ();
2215 /* Return bounds to be used for loads of non-pointer values. */
2216 static tree
2217 chkp_get_nonpointer_load_bounds (void)
2219 return chkp_get_zero_bounds ();
2222 /* Return 1 if may use bndret call to get bounds for pointer
2223 returned by CALL. */
2224 static bool
2225 chkp_call_returns_bounds_p (gcall *call)
2227 if (gimple_call_internal_p (call))
2229 if (gimple_call_internal_fn (call) == IFN_VA_ARG)
2230 return true;
2231 return false;
2234 if (gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
2235 || chkp_gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW))
2236 return true;
2238 if (gimple_call_with_bounds_p (call))
2239 return true;
2241 tree fndecl = gimple_call_fndecl (call);
2243 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
2244 return false;
2246 if (fndecl && !chkp_instrumentable_p (fndecl))
2247 return false;
2249 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2251 if (chkp_instrument_normal_builtin (fndecl))
2252 return true;
2254 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
2255 return false;
2257 struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
2258 return (clone && gimple_has_body_p (clone->decl));
2261 return true;
2264 /* Build bounds returned by CALL. */
2265 static tree
2266 chkp_build_returned_bound (gcall *call)
2268 gimple_stmt_iterator gsi;
2269 tree bounds;
2270 gimple *stmt;
2271 tree fndecl = gimple_call_fndecl (call);
2272 unsigned int retflags;
2273 tree lhs = gimple_call_lhs (call);
2275 /* To avoid fixing alloca expands in targets we handle
2276 it separately. */
2277 if (fndecl
2278 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2279 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
2280 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2282 tree size = gimple_call_arg (call, 0);
2283 gimple_stmt_iterator iter = gsi_for_stmt (call);
2284 bounds = chkp_make_bounds (lhs, size, &iter, true);
2286 /* We know bounds returned by set_bounds builtin call. */
2287 else if (fndecl
2288 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2289 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS)
2291 tree lb = gimple_call_arg (call, 0);
2292 tree size = gimple_call_arg (call, 1);
2293 gimple_stmt_iterator iter = gsi_for_stmt (call);
2294 bounds = chkp_make_bounds (lb, size, &iter, true);
2296 /* Detect bounds initialization calls. */
2297 else if (fndecl
2298 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2299 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS)
2300 bounds = chkp_get_zero_bounds ();
2301 /* Detect bounds nullification calls. */
2302 else if (fndecl
2303 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2304 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS)
2305 bounds = chkp_get_none_bounds ();
2306 /* Detect bounds copy calls. */
2307 else if (fndecl
2308 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2309 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
2311 gimple_stmt_iterator iter = gsi_for_stmt (call);
2312 bounds = chkp_find_bounds (gimple_call_arg (call, 1), &iter);
2314 /* Do not use retbnd when returned bounds are equal to some
2315 of passed bounds. */
2316 else if (((retflags = gimple_call_return_flags (call)) & ERF_RETURNS_ARG)
2317 && (retflags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (call))
2319 gimple_stmt_iterator iter = gsi_for_stmt (call);
2320 unsigned int retarg = retflags & ERF_RETURN_ARG_MASK, argno;
2321 if (gimple_call_with_bounds_p (call))
2323 for (argno = 0; argno < gimple_call_num_args (call); argno++)
2324 if (!POINTER_BOUNDS_P (gimple_call_arg (call, argno)))
2326 if (retarg)
2327 retarg--;
2328 else
2329 break;
2332 else
2333 argno = retarg;
2335 bounds = chkp_find_bounds (gimple_call_arg (call, argno), &iter);
2337 else if (chkp_call_returns_bounds_p (call)
2338 && BOUNDED_P (lhs))
2340 gcc_assert (TREE_CODE (lhs) == SSA_NAME);
2342 /* In general case build checker builtin call to
2343 obtain returned bounds. */
2344 stmt = gimple_build_call (chkp_ret_bnd_fndecl, 1,
2345 gimple_call_lhs (call));
2346 chkp_mark_stmt (stmt);
2348 gsi = gsi_for_stmt (call);
2349 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
2351 bounds = chkp_get_tmp_reg (stmt);
2352 gimple_call_set_lhs (stmt, bounds);
2354 update_stmt (stmt);
2356 else
2357 bounds = chkp_get_zero_bounds ();
2359 if (dump_file && (dump_flags & TDF_DETAILS))
2361 fprintf (dump_file, "Built returned bounds (");
2362 print_generic_expr (dump_file, bounds);
2363 fprintf (dump_file, ") for call: ");
2364 print_gimple_stmt (dump_file, call, 0, TDF_VOPS | TDF_MEMSYMS);
2367 bounds = chkp_maybe_copy_and_register_bounds (lhs, bounds);
2369 return bounds;
2372 /* Return bounds used as returned by call
2373 which produced SSA name VAL. */
2374 gcall *
2375 chkp_retbnd_call_by_val (tree val)
2377 if (TREE_CODE (val) != SSA_NAME)
2378 return NULL;
2380 gcc_assert (gimple_code (SSA_NAME_DEF_STMT (val)) == GIMPLE_CALL);
2382 imm_use_iterator use_iter;
2383 use_operand_p use_p;
2384 FOR_EACH_IMM_USE_FAST (use_p, use_iter, val)
2385 if (chkp_gimple_call_builtin_p (USE_STMT (use_p), BUILT_IN_CHKP_BNDRET))
2386 return as_a <gcall *> (USE_STMT (use_p));
2388 return NULL;
2391 /* Check the next parameter for the given PARM is bounds
2392 and return it's default SSA_NAME (create if required). */
2393 static tree
2394 chkp_get_next_bounds_parm (tree parm)
2396 tree bounds = TREE_CHAIN (parm);
2397 gcc_assert (POINTER_BOUNDS_P (bounds));
2398 bounds = ssa_default_def (cfun, bounds);
2399 if (!bounds)
2401 bounds = make_ssa_name (TREE_CHAIN (parm), gimple_build_nop ());
2402 set_ssa_default_def (cfun, TREE_CHAIN (parm), bounds);
2404 return bounds;
2407 /* Return bounds to be used for input argument PARM. */
2408 static tree
2409 chkp_get_bound_for_parm (tree parm)
2411 tree decl = SSA_NAME_VAR (parm);
2412 tree bounds;
2414 gcc_assert (TREE_CODE (decl) == PARM_DECL);
2416 bounds = chkp_get_registered_bounds (parm);
2418 if (!bounds)
2419 bounds = chkp_get_registered_bounds (decl);
2421 if (!bounds)
2423 tree orig_decl = cgraph_node::get (cfun->decl)->orig_decl;
2425 /* For static chain param we return zero bounds
2426 because currently we do not check dereferences
2427 of this pointer. */
2428 if (cfun->static_chain_decl == decl)
2429 bounds = chkp_get_zero_bounds ();
2430 /* If non instrumented runtime is used then it may be useful
2431 to use zero bounds for input arguments of main
2432 function. */
2433 else if (flag_chkp_zero_input_bounds_for_main
2434 && id_equal (DECL_ASSEMBLER_NAME (orig_decl), "main"))
2435 bounds = chkp_get_zero_bounds ();
2436 else if (BOUNDED_P (parm))
2438 bounds = chkp_get_next_bounds_parm (decl);
2439 bounds = chkp_maybe_copy_and_register_bounds (decl, bounds);
2441 if (dump_file && (dump_flags & TDF_DETAILS))
2443 fprintf (dump_file, "Built arg bounds (");
2444 print_generic_expr (dump_file, bounds);
2445 fprintf (dump_file, ") for arg: ");
2446 print_node (dump_file, "", decl, 0);
2449 else
2450 bounds = chkp_get_zero_bounds ();
2453 if (!chkp_get_registered_bounds (parm))
2454 bounds = chkp_maybe_copy_and_register_bounds (parm, bounds);
2456 if (dump_file && (dump_flags & TDF_DETAILS))
2458 fprintf (dump_file, "Using bounds ");
2459 print_generic_expr (dump_file, bounds);
2460 fprintf (dump_file, " for parm ");
2461 print_generic_expr (dump_file, parm);
2462 fprintf (dump_file, " of type ");
2463 print_generic_expr (dump_file, TREE_TYPE (parm));
2464 fprintf (dump_file, ".\n");
2467 return bounds;
2470 /* Build and return CALL_EXPR for bndstx builtin with specified
2471 arguments. */
2472 tree
2473 chkp_build_bndldx_call (tree addr, tree ptr)
2475 tree fn = build1 (ADDR_EXPR,
2476 build_pointer_type (TREE_TYPE (chkp_bndldx_fndecl)),
2477 chkp_bndldx_fndecl);
2478 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndldx_fndecl)),
2479 fn, 2, addr, ptr);
2480 CALL_WITH_BOUNDS_P (call) = true;
2481 return call;
2484 /* Insert code to load bounds for PTR located by ADDR.
2485 Code is inserted after position pointed by GSI.
2486 Loaded bounds are returned. */
2487 static tree
2488 chkp_build_bndldx (tree addr, tree ptr, gimple_stmt_iterator *gsi)
2490 gimple_seq seq;
2491 gimple *stmt;
2492 tree bounds;
2494 seq = NULL;
2496 addr = chkp_force_gimple_call_op (addr, &seq);
2497 ptr = chkp_force_gimple_call_op (ptr, &seq);
2499 stmt = gimple_build_call (chkp_bndldx_fndecl, 2, addr, ptr);
2500 chkp_mark_stmt (stmt);
2501 bounds = chkp_get_tmp_reg (stmt);
2502 gimple_call_set_lhs (stmt, bounds);
2504 gimple_seq_add_stmt (&seq, stmt);
2506 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2508 if (dump_file && (dump_flags & TDF_DETAILS))
2510 fprintf (dump_file, "Generated bndldx for pointer ");
2511 print_generic_expr (dump_file, ptr);
2512 fprintf (dump_file, ": ");
2513 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS | TDF_MEMSYMS);
2516 return bounds;
2519 /* Build and return CALL_EXPR for bndstx builtin with specified
2520 arguments. */
2521 tree
2522 chkp_build_bndstx_call (tree addr, tree ptr, tree bounds)
2524 tree fn = build1 (ADDR_EXPR,
2525 build_pointer_type (TREE_TYPE (chkp_bndstx_fndecl)),
2526 chkp_bndstx_fndecl);
2527 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndstx_fndecl)),
2528 fn, 3, ptr, bounds, addr);
2529 CALL_WITH_BOUNDS_P (call) = true;
2530 return call;
2533 /* Insert code to store BOUNDS for PTR stored by ADDR.
2534 New statements are inserted after position pointed
2535 by GSI. */
2536 void
2537 chkp_build_bndstx (tree addr, tree ptr, tree bounds,
2538 gimple_stmt_iterator *gsi)
2540 gimple_seq seq;
2541 gimple *stmt;
2543 seq = NULL;
2545 addr = chkp_force_gimple_call_op (addr, &seq);
2546 ptr = chkp_force_gimple_call_op (ptr, &seq);
2548 stmt = gimple_build_call (chkp_bndstx_fndecl, 3, ptr, bounds, addr);
2549 chkp_mark_stmt (stmt);
2550 gimple_call_set_with_bounds (stmt, true);
2552 gimple_seq_add_stmt (&seq, stmt);
2554 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2556 if (dump_file && (dump_flags & TDF_DETAILS))
2558 fprintf (dump_file, "Generated bndstx for pointer store ");
2559 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_VOPS|TDF_MEMSYMS);
2560 print_gimple_stmt (dump_file, stmt, 2, TDF_VOPS|TDF_MEMSYMS);
2564 /* This function is called when call statement
2565 is inlined and therefore we can't use bndret
2566 for its LHS anymore. Function fixes bndret
2567 call using new RHS value if possible. */
2568 void
2569 chkp_fixup_inlined_call (tree lhs, tree rhs)
2571 tree addr, bounds;
2572 gcall *retbnd, *bndldx;
2574 if (!BOUNDED_P (lhs))
2575 return;
2577 /* Search for retbnd call. */
2578 retbnd = chkp_retbnd_call_by_val (lhs);
2579 if (!retbnd)
2580 return;
2582 /* Currently only handle cases when call is replaced
2583 with a memory access. In this case bndret call
2584 may be replaced with bndldx call. Otherwise we
2585 have to search for bounds which may cause wrong
2586 result due to various optimizations applied. */
2587 switch (TREE_CODE (rhs))
2589 case VAR_DECL:
2590 if (DECL_REGISTER (rhs))
2591 return;
2592 break;
2594 case MEM_REF:
2595 break;
2597 case ARRAY_REF:
2598 case COMPONENT_REF:
2599 addr = get_base_address (rhs);
2600 if (!DECL_P (addr)
2601 && TREE_CODE (addr) != MEM_REF)
2602 return;
2603 if (DECL_P (addr) && DECL_REGISTER (addr))
2604 return;
2605 break;
2607 default:
2608 return;
2611 /* Create a new statements sequence with bndldx call. */
2612 gimple_stmt_iterator gsi = gsi_for_stmt (retbnd);
2613 addr = build_fold_addr_expr (rhs);
2614 chkp_build_bndldx (addr, lhs, &gsi);
2615 bndldx = as_a <gcall *> (gsi_stmt (gsi));
2617 /* Remove bndret call. */
2618 bounds = gimple_call_lhs (retbnd);
2619 gsi = gsi_for_stmt (retbnd);
2620 gsi_remove (&gsi, true);
2622 /* Link new bndldx call. */
2623 gimple_call_set_lhs (bndldx, bounds);
2624 update_stmt (bndldx);
2627 /* Compute bounds for pointer NODE which was assigned in
2628 assignment statement ASSIGN. Return computed bounds. */
2629 static tree
2630 chkp_compute_bounds_for_assignment (tree node, gimple *assign)
2632 enum tree_code rhs_code = gimple_assign_rhs_code (assign);
2633 tree rhs1 = gimple_assign_rhs1 (assign);
2634 tree bounds = NULL_TREE;
2635 gimple_stmt_iterator iter = gsi_for_stmt (assign);
2636 tree base = NULL;
2638 if (dump_file && (dump_flags & TDF_DETAILS))
2640 fprintf (dump_file, "Computing bounds for assignment: ");
2641 print_gimple_stmt (dump_file, assign, 0, TDF_VOPS|TDF_MEMSYMS);
2644 switch (rhs_code)
2646 case MEM_REF:
2647 case TARGET_MEM_REF:
2648 case COMPONENT_REF:
2649 case ARRAY_REF:
2650 /* We need to load bounds from the bounds table. */
2651 bounds = chkp_find_bounds_loaded (node, rhs1, &iter);
2652 break;
2654 case VAR_DECL:
2655 case SSA_NAME:
2656 case ADDR_EXPR:
2657 case POINTER_PLUS_EXPR:
2658 case NOP_EXPR:
2659 case CONVERT_EXPR:
2660 case INTEGER_CST:
2661 /* Bounds are just propagated from RHS. */
2662 bounds = chkp_find_bounds (rhs1, &iter);
2663 base = rhs1;
2664 break;
2666 case VIEW_CONVERT_EXPR:
2667 /* Bounds are just propagated from RHS. */
2668 bounds = chkp_find_bounds (TREE_OPERAND (rhs1, 0), &iter);
2669 break;
2671 case PARM_DECL:
2672 if (BOUNDED_P (rhs1))
2674 /* We need to load bounds from the bounds table. */
2675 bounds = chkp_build_bndldx (chkp_build_addr_expr (rhs1),
2676 node, &iter);
2677 TREE_ADDRESSABLE (rhs1) = 1;
2679 else
2680 bounds = chkp_get_nonpointer_load_bounds ();
2681 break;
2683 case MINUS_EXPR:
2684 case PLUS_EXPR:
2685 case BIT_AND_EXPR:
2686 case BIT_IOR_EXPR:
2687 case BIT_XOR_EXPR:
2689 tree rhs2 = gimple_assign_rhs2 (assign);
2690 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2691 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2693 /* First we try to check types of operands. If it
2694 does not help then look at bound values.
2696 If some bounds are incomplete and other are
2697 not proven to be valid (i.e. also incomplete
2698 or invalid because value is not pointer) then
2699 resulting value is incomplete and will be
2700 recomputed later in chkp_finish_incomplete_bounds. */
2701 if (BOUNDED_P (rhs1)
2702 && !BOUNDED_P (rhs2))
2703 bounds = bnd1;
2704 else if (BOUNDED_P (rhs2)
2705 && !BOUNDED_P (rhs1)
2706 && rhs_code != MINUS_EXPR)
2707 bounds = bnd2;
2708 else if (chkp_incomplete_bounds (bnd1))
2709 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR
2710 && !chkp_incomplete_bounds (bnd2))
2711 bounds = bnd2;
2712 else
2713 bounds = incomplete_bounds;
2714 else if (chkp_incomplete_bounds (bnd2))
2715 if (chkp_valid_bounds (bnd1)
2716 && !chkp_incomplete_bounds (bnd1))
2717 bounds = bnd1;
2718 else
2719 bounds = incomplete_bounds;
2720 else if (!chkp_valid_bounds (bnd1))
2721 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR)
2722 bounds = bnd2;
2723 else if (bnd2 == chkp_get_zero_bounds ())
2724 bounds = bnd2;
2725 else
2726 bounds = bnd1;
2727 else if (!chkp_valid_bounds (bnd2))
2728 bounds = bnd1;
2729 else
2730 /* Seems both operands may have valid bounds
2731 (e.g. pointer minus pointer). In such case
2732 use default invalid op bounds. */
2733 bounds = chkp_get_invalid_op_bounds ();
2735 base = (bounds == bnd1) ? rhs1 : (bounds == bnd2) ? rhs2 : NULL;
2737 break;
2739 case BIT_NOT_EXPR:
2740 case NEGATE_EXPR:
2741 case LSHIFT_EXPR:
2742 case RSHIFT_EXPR:
2743 case LROTATE_EXPR:
2744 case RROTATE_EXPR:
2745 case EQ_EXPR:
2746 case NE_EXPR:
2747 case LT_EXPR:
2748 case LE_EXPR:
2749 case GT_EXPR:
2750 case GE_EXPR:
2751 case MULT_EXPR:
2752 case RDIV_EXPR:
2753 case TRUNC_DIV_EXPR:
2754 case FLOOR_DIV_EXPR:
2755 case CEIL_DIV_EXPR:
2756 case ROUND_DIV_EXPR:
2757 case TRUNC_MOD_EXPR:
2758 case FLOOR_MOD_EXPR:
2759 case CEIL_MOD_EXPR:
2760 case ROUND_MOD_EXPR:
2761 case EXACT_DIV_EXPR:
2762 case FIX_TRUNC_EXPR:
2763 case FLOAT_EXPR:
2764 case REALPART_EXPR:
2765 case IMAGPART_EXPR:
2766 /* No valid bounds may be produced by these exprs. */
2767 bounds = chkp_get_invalid_op_bounds ();
2768 break;
2770 case COND_EXPR:
2772 tree val1 = gimple_assign_rhs2 (assign);
2773 tree val2 = gimple_assign_rhs3 (assign);
2774 tree bnd1 = chkp_find_bounds (val1, &iter);
2775 tree bnd2 = chkp_find_bounds (val2, &iter);
2776 gimple *stmt;
2778 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2779 bounds = incomplete_bounds;
2780 else if (bnd1 == bnd2)
2781 bounds = bnd1;
2782 else
2784 rhs1 = unshare_expr (rhs1);
2786 bounds = chkp_get_tmp_reg (assign);
2787 stmt = gimple_build_assign (bounds, COND_EXPR, rhs1, bnd1, bnd2);
2788 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2790 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2791 chkp_mark_invalid_bounds (bounds);
2794 break;
2796 case MAX_EXPR:
2797 case MIN_EXPR:
2799 tree rhs2 = gimple_assign_rhs2 (assign);
2800 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2801 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2803 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2804 bounds = incomplete_bounds;
2805 else if (bnd1 == bnd2)
2806 bounds = bnd1;
2807 else
2809 gimple *stmt;
2810 tree cond = build2 (rhs_code == MAX_EXPR ? GT_EXPR : LT_EXPR,
2811 boolean_type_node, rhs1, rhs2);
2812 bounds = chkp_get_tmp_reg (assign);
2813 stmt = gimple_build_assign (bounds, COND_EXPR, cond, bnd1, bnd2);
2815 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2817 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2818 chkp_mark_invalid_bounds (bounds);
2821 break;
2823 default:
2824 bounds = chkp_get_zero_bounds ();
2825 warning (0, "pointer bounds were lost due to unexpected expression %s",
2826 get_tree_code_name (rhs_code));
2829 gcc_assert (bounds);
2831 /* We may reuse bounds of other pointer we copy/modify. But it is not
2832 allowed for abnormal ssa names. If we produced a pointer using
2833 abnormal ssa name, we better make a bounds copy to avoid coalescing
2834 issues. */
2835 if (base
2836 && TREE_CODE (base) == SSA_NAME
2837 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (base))
2839 gimple *stmt = gimple_build_assign (chkp_get_tmp_reg (NULL), bounds);
2840 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2841 bounds = gimple_assign_lhs (stmt);
2844 if (node)
2845 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2847 return bounds;
2850 /* Compute bounds for ssa name NODE defined by DEF_STMT pointed by ITER.
2852 There are just few statement codes allowed: NOP (for default ssa names),
2853 ASSIGN, CALL, PHI, ASM.
2855 Return computed bounds. */
2856 static tree
2857 chkp_get_bounds_by_definition (tree node, gimple *def_stmt,
2858 gphi_iterator *iter)
2860 tree var, bounds;
2861 enum gimple_code code = gimple_code (def_stmt);
2862 gphi *stmt;
2864 if (dump_file && (dump_flags & TDF_DETAILS))
2866 fprintf (dump_file, "Searching for bounds for node: ");
2867 print_generic_expr (dump_file, node);
2869 fprintf (dump_file, " using its definition: ");
2870 print_gimple_stmt (dump_file, def_stmt, 0, TDF_VOPS | TDF_MEMSYMS);
2873 switch (code)
2875 case GIMPLE_NOP:
2876 var = SSA_NAME_VAR (node);
2877 switch (TREE_CODE (var))
2879 case PARM_DECL:
2880 bounds = chkp_get_bound_for_parm (node);
2881 break;
2883 case VAR_DECL:
2884 /* For uninitialized pointers use none bounds. */
2885 bounds = chkp_get_none_bounds ();
2886 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2887 break;
2889 case RESULT_DECL:
2891 tree base_type;
2893 gcc_assert (TREE_CODE (TREE_TYPE (node)) == REFERENCE_TYPE);
2895 base_type = TREE_TYPE (TREE_TYPE (node));
2897 gcc_assert (TYPE_SIZE (base_type)
2898 && TREE_CODE (TYPE_SIZE (base_type)) == INTEGER_CST
2899 && tree_to_uhwi (TYPE_SIZE (base_type)) != 0);
2901 bounds = chkp_make_bounds (node, TYPE_SIZE_UNIT (base_type),
2902 NULL, false);
2903 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2905 break;
2907 default:
2908 if (dump_file && (dump_flags & TDF_DETAILS))
2910 fprintf (dump_file, "Unexpected var with no definition\n");
2911 print_generic_expr (dump_file, var);
2913 internal_error ("chkp_get_bounds_by_definition: Unexpected var of type %s",
2914 get_tree_code_name (TREE_CODE (var)));
2916 break;
2918 case GIMPLE_ASSIGN:
2919 bounds = chkp_compute_bounds_for_assignment (node, def_stmt);
2920 break;
2922 case GIMPLE_CALL:
2923 bounds = chkp_build_returned_bound (as_a <gcall *> (def_stmt));
2924 break;
2926 case GIMPLE_PHI:
2927 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (node))
2928 if (SSA_NAME_VAR (node))
2929 var = chkp_get_bounds_var (SSA_NAME_VAR (node));
2930 else
2931 var = make_temp_ssa_name (pointer_bounds_type_node,
2932 NULL,
2933 CHKP_BOUND_TMP_NAME);
2934 else
2935 var = chkp_get_tmp_var ();
2936 stmt = create_phi_node (var, gimple_bb (def_stmt));
2937 bounds = gimple_phi_result (stmt);
2938 *iter = gsi_for_phi (stmt);
2940 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2942 /* Created bounds do not have all phi args computed and
2943 therefore we do not know if there is a valid source
2944 of bounds for that node. Therefore we mark bounds
2945 as incomplete and then recompute them when all phi
2946 args are computed. */
2947 chkp_register_incomplete_bounds (bounds, node);
2948 break;
2950 case GIMPLE_ASM:
2951 bounds = chkp_get_zero_bounds ();
2952 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2953 break;
2955 default:
2956 internal_error ("chkp_get_bounds_by_definition: Unexpected GIMPLE code %s",
2957 gimple_code_name[code]);
2960 return bounds;
2963 /* Return CALL_EXPR for bndmk with specified LOWER_BOUND and SIZE. */
2964 tree
2965 chkp_build_make_bounds_call (tree lower_bound, tree size)
2967 tree call = build1 (ADDR_EXPR,
2968 build_pointer_type (TREE_TYPE (chkp_bndmk_fndecl)),
2969 chkp_bndmk_fndecl);
2970 return build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndmk_fndecl)),
2971 call, 2, lower_bound, size);
2974 /* Create static bounds var of specfified OBJ which is
2975 is either VAR_DECL or string constant. */
2976 static tree
2977 chkp_make_static_bounds (tree obj)
2979 static int string_id = 1;
2980 static int var_id = 1;
2981 tree *slot;
2982 const char *var_name;
2983 char *bnd_var_name;
2984 tree bnd_var;
2986 /* First check if we already have required var. */
2987 if (chkp_static_var_bounds)
2989 /* For vars we use assembler name as a key in
2990 chkp_static_var_bounds map. It allows to
2991 avoid duplicating bound vars for decls
2992 sharing assembler name. */
2993 if (VAR_P (obj))
2995 tree name = DECL_ASSEMBLER_NAME (obj);
2996 slot = chkp_static_var_bounds->get (name);
2997 if (slot)
2998 return *slot;
3000 else
3002 slot = chkp_static_var_bounds->get (obj);
3003 if (slot)
3004 return *slot;
3008 /* Build decl for bounds var. */
3009 if (VAR_P (obj))
3011 if (DECL_IGNORED_P (obj))
3013 bnd_var_name = (char *) xmalloc (strlen (CHKP_VAR_BOUNDS_PREFIX) + 10);
3014 sprintf (bnd_var_name, "%s%d", CHKP_VAR_BOUNDS_PREFIX, var_id++);
3016 else
3018 var_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (obj));
3020 /* For hidden symbols we want to skip first '*' char. */
3021 if (*var_name == '*')
3022 var_name++;
3024 bnd_var_name = (char *) xmalloc (strlen (var_name)
3025 + strlen (CHKP_BOUNDS_OF_SYMBOL_PREFIX) + 1);
3026 strcpy (bnd_var_name, CHKP_BOUNDS_OF_SYMBOL_PREFIX);
3027 strcat (bnd_var_name, var_name);
3030 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
3031 get_identifier (bnd_var_name),
3032 pointer_bounds_type_node);
3034 /* Address of the obj will be used as lower bound. */
3035 TREE_ADDRESSABLE (obj) = 1;
3037 else
3039 bnd_var_name = (char *) xmalloc (strlen (CHKP_STRING_BOUNDS_PREFIX) + 10);
3040 sprintf (bnd_var_name, "%s%d", CHKP_STRING_BOUNDS_PREFIX, string_id++);
3042 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
3043 get_identifier (bnd_var_name),
3044 pointer_bounds_type_node);
3047 free (bnd_var_name);
3049 TREE_PUBLIC (bnd_var) = 0;
3050 TREE_USED (bnd_var) = 1;
3051 TREE_READONLY (bnd_var) = 0;
3052 TREE_STATIC (bnd_var) = 1;
3053 TREE_ADDRESSABLE (bnd_var) = 0;
3054 DECL_ARTIFICIAL (bnd_var) = 1;
3055 DECL_COMMON (bnd_var) = 1;
3056 DECL_COMDAT (bnd_var) = 1;
3057 DECL_READ_P (bnd_var) = 1;
3058 DECL_INITIAL (bnd_var) = chkp_build_addr_expr (obj);
3059 /* Force output similar to constant bounds.
3060 See chkp_make_static_const_bounds. */
3061 varpool_node::get_create (bnd_var)->force_output = 1;
3062 /* Mark symbol as requiring bounds initialization. */
3063 varpool_node::get_create (bnd_var)->need_bounds_init = 1;
3064 varpool_node::finalize_decl (bnd_var);
3066 /* Add created var to the map to use it for other references
3067 to obj. */
3068 if (!chkp_static_var_bounds)
3069 chkp_static_var_bounds = new hash_map<tree, tree>;
3071 if (VAR_P (obj))
3073 tree name = DECL_ASSEMBLER_NAME (obj);
3074 chkp_static_var_bounds->put (name, bnd_var);
3076 else
3077 chkp_static_var_bounds->put (obj, bnd_var);
3079 return bnd_var;
3082 /* When var has incomplete type we cannot get size to
3083 compute its bounds. In such cases we use checker
3084 builtin call which determines object size at runtime. */
3085 static tree
3086 chkp_generate_extern_var_bounds (tree var)
3088 tree bounds, size_reloc, lb, size, max_size, cond;
3089 gimple_stmt_iterator gsi;
3090 gimple_seq seq = NULL;
3091 gimple *stmt;
3093 /* If instrumentation is not enabled for vars having
3094 incomplete type then just return zero bounds to avoid
3095 checks for this var. */
3096 if (!flag_chkp_incomplete_type)
3097 return chkp_get_zero_bounds ();
3099 if (dump_file && (dump_flags & TDF_DETAILS))
3101 fprintf (dump_file, "Generating bounds for extern symbol '");
3102 print_generic_expr (dump_file, var);
3103 fprintf (dump_file, "'\n");
3106 stmt = gimple_build_call (chkp_sizeof_fndecl, 1, var);
3108 size_reloc = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
3109 gimple_call_set_lhs (stmt, size_reloc);
3111 gimple_seq_add_stmt (&seq, stmt);
3113 lb = chkp_build_addr_expr (var);
3114 size = make_ssa_name (chkp_get_size_tmp_var ());
3116 if (flag_chkp_zero_dynamic_size_as_infinite)
3118 /* We should check that size relocation was resolved.
3119 If it was not then use maximum possible size for the var. */
3120 max_size = build2 (MINUS_EXPR, chkp_uintptr_type, integer_zero_node,
3121 fold_convert (chkp_uintptr_type, lb));
3122 max_size = chkp_force_gimple_call_op (max_size, &seq);
3124 cond = build2 (NE_EXPR, boolean_type_node,
3125 size_reloc, integer_zero_node);
3126 stmt = gimple_build_assign (size, COND_EXPR, cond, size_reloc, max_size);
3127 gimple_seq_add_stmt (&seq, stmt);
3129 else
3131 stmt = gimple_build_assign (size, size_reloc);
3132 gimple_seq_add_stmt (&seq, stmt);
3135 gsi = gsi_start_bb (chkp_get_entry_block ());
3136 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
3138 bounds = chkp_make_bounds (lb, size, &gsi, true);
3140 return bounds;
3143 /* Return 1 if TYPE has fields with zero size or fields
3144 marked with chkp_variable_size attribute. */
3145 bool
3146 chkp_variable_size_type (tree type)
3148 bool res = false;
3149 tree field;
3151 if (RECORD_OR_UNION_TYPE_P (type))
3152 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3154 if (TREE_CODE (field) == FIELD_DECL)
3155 res = res
3156 || lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
3157 || chkp_variable_size_type (TREE_TYPE (field));
3159 else
3160 res = !TYPE_SIZE (type)
3161 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
3162 || tree_to_uhwi (TYPE_SIZE (type)) == 0;
3164 return res;
3167 /* Compute and return bounds for address of DECL which is
3168 one of VAR_DECL, PARM_DECL, RESULT_DECL. */
3169 static tree
3170 chkp_get_bounds_for_decl_addr (tree decl)
3172 tree bounds;
3174 gcc_assert (VAR_P (decl)
3175 || TREE_CODE (decl) == PARM_DECL
3176 || TREE_CODE (decl) == RESULT_DECL);
3178 bounds = chkp_get_registered_addr_bounds (decl);
3180 if (bounds)
3181 return bounds;
3183 if (dump_file && (dump_flags & TDF_DETAILS))
3185 fprintf (dump_file, "Building bounds for address of decl ");
3186 print_generic_expr (dump_file, decl);
3187 fprintf (dump_file, "\n");
3190 /* Use zero bounds if size is unknown and checks for
3191 unknown sizes are restricted. */
3192 if ((!DECL_SIZE (decl)
3193 || (chkp_variable_size_type (TREE_TYPE (decl))
3194 && (TREE_STATIC (decl)
3195 || DECL_EXTERNAL (decl)
3196 || TREE_PUBLIC (decl))))
3197 && !flag_chkp_incomplete_type)
3198 return chkp_get_zero_bounds ();
3200 if (flag_chkp_use_static_bounds
3201 && VAR_P (decl)
3202 && (TREE_STATIC (decl)
3203 || DECL_EXTERNAL (decl)
3204 || TREE_PUBLIC (decl))
3205 && !DECL_THREAD_LOCAL_P (decl))
3207 tree bnd_var = chkp_make_static_bounds (decl);
3208 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
3209 gimple *stmt;
3211 bounds = chkp_get_tmp_reg (NULL);
3212 stmt = gimple_build_assign (bounds, bnd_var);
3213 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3215 else if (!DECL_SIZE (decl)
3216 || (chkp_variable_size_type (TREE_TYPE (decl))
3217 && (TREE_STATIC (decl)
3218 || DECL_EXTERNAL (decl)
3219 || TREE_PUBLIC (decl))))
3221 gcc_assert (VAR_P (decl));
3222 bounds = chkp_generate_extern_var_bounds (decl);
3224 else
3226 tree lb = chkp_build_addr_expr (decl);
3227 bounds = chkp_make_bounds (lb, DECL_SIZE_UNIT (decl), NULL, false);
3230 return bounds;
3233 /* Compute and return bounds for constant string. */
3234 static tree
3235 chkp_get_bounds_for_string_cst (tree cst)
3237 tree bounds;
3238 tree lb;
3239 tree size;
3241 gcc_assert (TREE_CODE (cst) == STRING_CST);
3243 bounds = chkp_get_registered_bounds (cst);
3245 if (bounds)
3246 return bounds;
3248 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
3249 || flag_chkp_use_static_const_bounds > 0)
3251 tree bnd_var = chkp_make_static_bounds (cst);
3252 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
3253 gimple *stmt;
3255 bounds = chkp_get_tmp_reg (NULL);
3256 stmt = gimple_build_assign (bounds, bnd_var);
3257 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3259 else
3261 lb = chkp_build_addr_expr (cst);
3262 size = build_int_cst (chkp_uintptr_type, TREE_STRING_LENGTH (cst));
3263 bounds = chkp_make_bounds (lb, size, NULL, false);
3266 bounds = chkp_maybe_copy_and_register_bounds (cst, bounds);
3268 return bounds;
3271 /* Generate code to instersect bounds BOUNDS1 and BOUNDS2 and
3272 return the result. if ITER is not NULL then Code is inserted
3273 before position pointed by ITER. Otherwise code is added to
3274 entry block. */
3275 static tree
3276 chkp_intersect_bounds (tree bounds1, tree bounds2, gimple_stmt_iterator *iter)
3278 if (!bounds1 || bounds1 == chkp_get_zero_bounds ())
3279 return bounds2 ? bounds2 : bounds1;
3280 else if (!bounds2 || bounds2 == chkp_get_zero_bounds ())
3281 return bounds1;
3282 else
3284 gimple_seq seq;
3285 gimple *stmt;
3286 tree bounds;
3288 seq = NULL;
3290 stmt = gimple_build_call (chkp_intersect_fndecl, 2, bounds1, bounds2);
3291 chkp_mark_stmt (stmt);
3293 bounds = chkp_get_tmp_reg (stmt);
3294 gimple_call_set_lhs (stmt, bounds);
3296 gimple_seq_add_stmt (&seq, stmt);
3298 /* We are probably doing narrowing for constant expression.
3299 In such case iter may be undefined. */
3300 if (!iter)
3302 gimple_stmt_iterator gsi = gsi_last_bb (chkp_get_entry_block ());
3303 iter = &gsi;
3304 gsi_insert_seq_after (iter, seq, GSI_SAME_STMT);
3306 else
3307 gsi_insert_seq_before (iter, seq, GSI_SAME_STMT);
3309 if (dump_file && (dump_flags & TDF_DETAILS))
3311 fprintf (dump_file, "Bounds intersection: ");
3312 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
3313 fprintf (dump_file, " inserted before statement: ");
3314 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0,
3315 TDF_VOPS|TDF_MEMSYMS);
3318 return bounds;
3322 /* Return 1 if we are allowed to narrow bounds for addressed FIELD
3323 and 0 othersize. REF is reference to the field. */
3325 static bool
3326 chkp_may_narrow_to_field (tree ref, tree field)
3328 return DECL_SIZE (field) && TREE_CODE (DECL_SIZE (field)) == INTEGER_CST
3329 && tree_to_uhwi (DECL_SIZE (field)) != 0
3330 && !(flag_chkp_flexible_struct_trailing_arrays
3331 && array_at_struct_end_p (ref))
3332 && (!DECL_FIELD_OFFSET (field)
3333 || TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST)
3334 && (!DECL_FIELD_BIT_OFFSET (field)
3335 || TREE_CODE (DECL_FIELD_BIT_OFFSET (field)) == INTEGER_CST)
3336 && !lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
3337 && !chkp_variable_size_type (TREE_TYPE (field));
3340 /* Return 1 if bounds for FIELD should be narrowed to
3341 field's own size. REF is reference to the field. */
3343 static bool
3344 chkp_narrow_bounds_for_field (tree ref, tree field)
3346 HOST_WIDE_INT offs;
3347 HOST_WIDE_INT bit_offs;
3349 if (!chkp_may_narrow_to_field (ref, field))
3350 return false;
3352 /* Access to compiler generated fields should not cause
3353 bounds narrowing. */
3354 if (DECL_ARTIFICIAL (field))
3355 return false;
3357 offs = tree_to_uhwi (DECL_FIELD_OFFSET (field));
3358 bit_offs = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
3360 return (flag_chkp_narrow_bounds
3361 && (flag_chkp_first_field_has_own_bounds
3362 || offs
3363 || bit_offs));
3366 /* Perform narrowing for BOUNDS of an INNER reference. Shift boundary
3367 by OFFSET bytes and limit to SIZE bytes. Newly created statements are
3368 added to ITER. */
3370 static tree
3371 chkp_narrow_size_and_offset (tree bounds, tree inner, tree offset,
3372 tree size, gimple_stmt_iterator *iter)
3374 tree addr = chkp_build_addr_expr (unshare_expr (inner));
3375 tree t = TREE_TYPE (addr);
3377 gimple *stmt = gimple_build_assign (NULL_TREE, addr);
3378 addr = make_temp_ssa_name (t, stmt, CHKP_BOUND_TMP_NAME);
3379 gimple_assign_set_lhs (stmt, addr);
3380 gsi_insert_seq_before (iter, stmt, GSI_SAME_STMT);
3382 stmt = gimple_build_assign (NULL_TREE, POINTER_PLUS_EXPR, addr, offset);
3383 tree shifted = make_temp_ssa_name (t, stmt, CHKP_BOUND_TMP_NAME);
3384 gimple_assign_set_lhs (stmt, shifted);
3385 gsi_insert_seq_before (iter, stmt, GSI_SAME_STMT);
3387 tree bounds2 = chkp_make_bounds (shifted, size, iter, false);
3389 return chkp_intersect_bounds (bounds, bounds2, iter);
3392 /* Perform narrowing for BOUNDS using bounds computed for field
3393 access COMPONENT. ITER meaning is the same as for
3394 chkp_intersect_bounds. */
3396 static tree
3397 chkp_narrow_bounds_to_field (tree bounds, tree component,
3398 gimple_stmt_iterator *iter)
3400 tree field = TREE_OPERAND (component, 1);
3401 tree size = DECL_SIZE_UNIT (field);
3402 tree field_ptr = chkp_build_addr_expr (component);
3403 tree field_bounds;
3405 field_bounds = chkp_make_bounds (field_ptr, size, iter, false);
3407 return chkp_intersect_bounds (field_bounds, bounds, iter);
3410 /* Parse field or array access NODE.
3412 PTR ouput parameter holds a pointer to the outermost
3413 object.
3415 BITFIELD output parameter is set to 1 if bitfield is
3416 accessed and to 0 otherwise. If it is 1 then ELT holds
3417 outer component for accessed bit field.
3419 SAFE outer parameter is set to 1 if access is safe and
3420 checks are not required.
3422 BOUNDS outer parameter holds bounds to be used to check
3423 access (may be NULL).
3425 If INNERMOST_BOUNDS is 1 then try to narrow bounds to the
3426 innermost accessed component. */
3427 static void
3428 chkp_parse_array_and_component_ref (tree node, tree *ptr,
3429 tree *elt, bool *safe,
3430 bool *bitfield,
3431 tree *bounds,
3432 gimple_stmt_iterator *iter,
3433 bool innermost_bounds)
3435 tree comp_to_narrow = NULL_TREE;
3436 tree last_comp = NULL_TREE;
3437 bool array_ref_found = false;
3438 tree *nodes;
3439 tree var;
3440 int len;
3441 int i;
3443 /* Compute tree height for expression. */
3444 var = node;
3445 len = 1;
3446 while (TREE_CODE (var) == COMPONENT_REF
3447 || TREE_CODE (var) == ARRAY_REF
3448 || TREE_CODE (var) == VIEW_CONVERT_EXPR
3449 || TREE_CODE (var) == BIT_FIELD_REF)
3451 var = TREE_OPERAND (var, 0);
3452 len++;
3455 gcc_assert (len > 1);
3457 /* It is more convenient for us to scan left-to-right,
3458 so walk tree again and put all node to nodes vector
3459 in reversed order. */
3460 nodes = XALLOCAVEC (tree, len);
3461 nodes[len - 1] = node;
3462 for (i = len - 2; i >= 0; i--)
3463 nodes[i] = TREE_OPERAND (nodes[i + 1], 0);
3465 if (bounds)
3466 *bounds = NULL;
3467 *safe = true;
3468 *bitfield = ((TREE_CODE (node) == COMPONENT_REF
3469 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (node, 1)))
3470 || TREE_CODE (node) == BIT_FIELD_REF);
3471 /* To get bitfield address we will need outer element. */
3472 if (*bitfield)
3473 *elt = nodes[len - 2];
3474 else
3475 *elt = NULL_TREE;
3477 /* If we have indirection in expression then compute
3478 outermost structure bounds. Computed bounds may be
3479 narrowed later. */
3480 if (TREE_CODE (nodes[0]) == MEM_REF || INDIRECT_REF_P (nodes[0]))
3482 *safe = false;
3483 *ptr = TREE_OPERAND (nodes[0], 0);
3484 if (bounds)
3485 *bounds = chkp_find_bounds (*ptr, iter);
3487 else
3489 gcc_assert (VAR_P (var)
3490 || TREE_CODE (var) == PARM_DECL
3491 || TREE_CODE (var) == RESULT_DECL
3492 || TREE_CODE (var) == STRING_CST
3493 || TREE_CODE (var) == SSA_NAME);
3495 *ptr = chkp_build_addr_expr (var);
3497 /* For hard register cases chkp_build_addr_expr returns INTEGER_CST
3498 and later on chkp_find_bounds will fail to find proper bounds.
3499 In order to avoid that, we find/create bounds right aways using
3500 the var itself. */
3501 if (VAR_P (var) && DECL_HARD_REGISTER (var))
3502 *bounds = chkp_make_addressed_object_bounds (var, iter);
3505 /* In this loop we are trying to find a field access
3506 requiring narrowing. There are two simple rules
3507 for search:
3508 1. Leftmost array_ref is chosen if any.
3509 2. Rightmost suitable component_ref is chosen if innermost
3510 bounds are required and no array_ref exists. */
3511 for (i = 1; i < len; i++)
3513 var = nodes[i];
3515 if (TREE_CODE (var) == ARRAY_REF)
3517 *safe = false;
3518 array_ref_found = true;
3519 if (flag_chkp_narrow_bounds
3520 && !flag_chkp_narrow_to_innermost_arrray
3521 && (!last_comp
3522 || chkp_may_narrow_to_field (var,
3523 TREE_OPERAND (last_comp, 1))))
3525 comp_to_narrow = last_comp;
3526 break;
3529 else if (TREE_CODE (var) == COMPONENT_REF)
3531 tree field = TREE_OPERAND (var, 1);
3533 if (innermost_bounds
3534 && !array_ref_found
3535 && chkp_narrow_bounds_for_field (var, field))
3536 comp_to_narrow = var;
3537 last_comp = var;
3539 if (flag_chkp_narrow_bounds
3540 && flag_chkp_narrow_to_innermost_arrray
3541 && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
3543 if (bounds)
3544 *bounds = chkp_narrow_bounds_to_field (*bounds, var, iter);
3545 comp_to_narrow = NULL;
3548 else if (TREE_CODE (var) == BIT_FIELD_REF)
3550 if (flag_chkp_narrow_bounds && bounds)
3552 tree offset, size;
3553 chkp_parse_bit_field_ref (var, UNKNOWN_LOCATION, &offset, &size);
3554 *bounds
3555 = chkp_narrow_size_and_offset (*bounds, TREE_OPERAND (var, 0),
3556 offset, size, iter);
3559 else if (TREE_CODE (var) == VIEW_CONVERT_EXPR)
3560 /* Nothing to do for it. */
3562 else
3563 gcc_unreachable ();
3566 if (comp_to_narrow && DECL_SIZE (TREE_OPERAND (comp_to_narrow, 1)) && bounds)
3567 *bounds = chkp_narrow_bounds_to_field (*bounds, comp_to_narrow, iter);
3569 if (innermost_bounds && bounds && !*bounds)
3570 *bounds = chkp_find_bounds (*ptr, iter);
3573 /* Parse BIT_FIELD_REF to a NODE for a given location LOC. Return OFFSET
3574 and SIZE in bytes. */
3576 static
3577 void chkp_parse_bit_field_ref (tree node, location_t loc, tree *offset,
3578 tree *size)
3580 tree bpu = fold_convert (size_type_node, bitsize_int (BITS_PER_UNIT));
3581 tree offs = fold_convert (size_type_node, TREE_OPERAND (node, 2));
3582 tree rem = size_binop_loc (loc, TRUNC_MOD_EXPR, offs, bpu);
3583 offs = size_binop_loc (loc, TRUNC_DIV_EXPR, offs, bpu);
3585 tree s = fold_convert (size_type_node, TREE_OPERAND (node, 1));
3586 s = size_binop_loc (loc, PLUS_EXPR, s, rem);
3587 s = size_binop_loc (loc, CEIL_DIV_EXPR, s, bpu);
3588 s = fold_convert (size_type_node, s);
3590 *offset = offs;
3591 *size = s;
3594 /* Compute and return bounds for address of OBJ. */
3595 static tree
3596 chkp_make_addressed_object_bounds (tree obj, gimple_stmt_iterator *iter)
3598 tree bounds = chkp_get_registered_addr_bounds (obj);
3600 if (bounds)
3601 return bounds;
3603 switch (TREE_CODE (obj))
3605 case VAR_DECL:
3606 case PARM_DECL:
3607 case RESULT_DECL:
3608 bounds = chkp_get_bounds_for_decl_addr (obj);
3609 break;
3611 case STRING_CST:
3612 bounds = chkp_get_bounds_for_string_cst (obj);
3613 break;
3615 case ARRAY_REF:
3616 case COMPONENT_REF:
3617 case BIT_FIELD_REF:
3619 tree elt;
3620 tree ptr;
3621 bool safe;
3622 bool bitfield;
3624 chkp_parse_array_and_component_ref (obj, &ptr, &elt, &safe,
3625 &bitfield, &bounds, iter, true);
3627 gcc_assert (bounds);
3629 break;
3631 case FUNCTION_DECL:
3632 case LABEL_DECL:
3633 bounds = chkp_get_zero_bounds ();
3634 break;
3636 case MEM_REF:
3637 bounds = chkp_find_bounds (TREE_OPERAND (obj, 0), iter);
3638 break;
3640 case REALPART_EXPR:
3641 case IMAGPART_EXPR:
3642 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (obj, 0), iter);
3643 break;
3645 default:
3646 if (dump_file && (dump_flags & TDF_DETAILS))
3648 fprintf (dump_file, "chkp_make_addressed_object_bounds: "
3649 "unexpected object of type %s\n",
3650 get_tree_code_name (TREE_CODE (obj)));
3651 print_node (dump_file, "", obj, 0);
3653 internal_error ("chkp_make_addressed_object_bounds: "
3654 "Unexpected tree code %s",
3655 get_tree_code_name (TREE_CODE (obj)));
3658 chkp_register_addr_bounds (obj, bounds);
3660 return bounds;
3663 /* Compute bounds for pointer PTR loaded from PTR_SRC. Generate statements
3664 to compute bounds if required. Computed bounds should be available at
3665 position pointed by ITER.
3667 If PTR_SRC is NULL_TREE then pointer definition is identified.
3669 If PTR_SRC is not NULL_TREE then ITER points to statements which loads
3670 PTR. If PTR is a any memory reference then ITER points to a statement
3671 after which bndldx will be inserterd. In both cases ITER will be updated
3672 to point to the inserted bndldx statement. */
3674 static tree
3675 chkp_find_bounds_1 (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3677 tree addr = NULL_TREE;
3678 tree bounds = NULL_TREE;
3680 if (!ptr_src)
3681 ptr_src = ptr;
3683 bounds = chkp_get_registered_bounds (ptr_src);
3685 if (bounds)
3686 return bounds;
3688 switch (TREE_CODE (ptr_src))
3690 case MEM_REF:
3691 case VAR_DECL:
3692 if (BOUNDED_P (ptr_src))
3693 if (VAR_P (ptr) && DECL_REGISTER (ptr))
3694 bounds = chkp_get_zero_bounds ();
3695 else
3697 addr = chkp_build_addr_expr (ptr_src);
3698 bounds = chkp_build_bndldx (addr, ptr, iter);
3700 else
3701 bounds = chkp_get_nonpointer_load_bounds ();
3702 break;
3704 case ARRAY_REF:
3705 case COMPONENT_REF:
3706 addr = get_base_address (ptr_src);
3707 if (VAR_P (addr) && DECL_HARD_REGISTER (addr))
3709 bounds = chkp_get_zero_bounds ();
3710 break;
3712 if (DECL_P (addr)
3713 || TREE_CODE (addr) == MEM_REF
3714 || TREE_CODE (addr) == TARGET_MEM_REF)
3716 if (BOUNDED_P (ptr_src))
3717 if (VAR_P (ptr) && DECL_REGISTER (ptr))
3718 bounds = chkp_get_zero_bounds ();
3719 else
3721 addr = chkp_build_addr_expr (ptr_src);
3722 bounds = chkp_build_bndldx (addr, ptr, iter);
3724 else
3725 bounds = chkp_get_nonpointer_load_bounds ();
3727 else
3729 gcc_assert (TREE_CODE (addr) == SSA_NAME);
3730 bounds = chkp_find_bounds (addr, iter);
3732 break;
3734 case PARM_DECL:
3735 /* Handled above but failed. */
3736 bounds = chkp_get_invalid_op_bounds ();
3737 break;
3739 case TARGET_MEM_REF:
3740 addr = chkp_build_addr_expr (ptr_src);
3741 bounds = chkp_build_bndldx (addr, ptr, iter);
3742 break;
3744 case SSA_NAME:
3745 bounds = chkp_get_registered_bounds (ptr_src);
3746 if (!bounds)
3748 gimple *def_stmt = SSA_NAME_DEF_STMT (ptr_src);
3749 gphi_iterator phi_iter;
3751 bounds = chkp_get_bounds_by_definition (ptr_src, def_stmt, &phi_iter);
3753 gcc_assert (bounds);
3755 if (gphi *def_phi = dyn_cast <gphi *> (def_stmt))
3757 unsigned i;
3759 for (i = 0; i < gimple_phi_num_args (def_phi); i++)
3761 tree arg = gimple_phi_arg_def (def_phi, i);
3762 tree arg_bnd;
3763 gphi *phi_bnd;
3765 arg_bnd = chkp_find_bounds (arg, NULL);
3767 /* chkp_get_bounds_by_definition created new phi
3768 statement and phi_iter points to it.
3770 Previous call to chkp_find_bounds could create
3771 new basic block and therefore change phi statement
3772 phi_iter points to. */
3773 phi_bnd = phi_iter.phi ();
3775 add_phi_arg (phi_bnd, arg_bnd,
3776 gimple_phi_arg_edge (def_phi, i),
3777 UNKNOWN_LOCATION);
3780 /* If all bound phi nodes have their arg computed
3781 then we may finish its computation. See
3782 chkp_finish_incomplete_bounds for more details. */
3783 if (chkp_may_finish_incomplete_bounds ())
3784 chkp_finish_incomplete_bounds ();
3787 gcc_assert (bounds == chkp_get_registered_bounds (ptr_src)
3788 || chkp_incomplete_bounds (bounds));
3790 break;
3792 case ADDR_EXPR:
3793 case WITH_SIZE_EXPR:
3794 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (ptr_src, 0), iter);
3795 break;
3797 case INTEGER_CST:
3798 case COMPLEX_CST:
3799 case VECTOR_CST:
3800 if (integer_zerop (ptr_src))
3801 bounds = chkp_get_none_bounds ();
3802 else
3803 bounds = chkp_get_invalid_op_bounds ();
3804 break;
3806 default:
3807 if (dump_file && (dump_flags & TDF_DETAILS))
3809 fprintf (dump_file, "chkp_find_bounds: unexpected ptr of type %s\n",
3810 get_tree_code_name (TREE_CODE (ptr_src)));
3811 print_node (dump_file, "", ptr_src, 0);
3813 internal_error ("chkp_find_bounds: Unexpected tree code %s",
3814 get_tree_code_name (TREE_CODE (ptr_src)));
3817 if (!bounds)
3819 if (dump_file && (dump_flags & TDF_DETAILS))
3821 fprintf (stderr, "chkp_find_bounds: cannot find bounds for pointer\n");
3822 print_node (dump_file, "", ptr_src, 0);
3824 internal_error ("chkp_find_bounds: Cannot find bounds for pointer");
3827 return bounds;
3830 /* Normal case for bounds search without forced narrowing. */
3831 static tree
3832 chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter)
3834 return chkp_find_bounds_1 (ptr, NULL_TREE, iter);
3837 /* Search bounds for pointer PTR loaded from PTR_SRC
3838 by statement *ITER points to. */
3839 static tree
3840 chkp_find_bounds_loaded (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3842 return chkp_find_bounds_1 (ptr, ptr_src, iter);
3845 /* Helper function which checks type of RHS and finds all pointers in
3846 it. For each found pointer we build it's accesses in LHS and RHS
3847 objects and then call HANDLER for them. Function is used to copy
3848 or initilize bounds for copied object. */
3849 static void
3850 chkp_walk_pointer_assignments (tree lhs, tree rhs, void *arg,
3851 assign_handler handler)
3853 tree type = TREE_TYPE (lhs);
3855 /* We have nothing to do with clobbers. */
3856 if (TREE_CLOBBER_P (rhs))
3857 return;
3859 if (BOUNDED_TYPE_P (type))
3860 handler (lhs, rhs, arg);
3861 else if (RECORD_OR_UNION_TYPE_P (type))
3863 tree field;
3865 if (TREE_CODE (rhs) == CONSTRUCTOR)
3867 unsigned HOST_WIDE_INT cnt;
3868 tree val;
3870 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, field, val)
3872 if (field && chkp_type_has_pointer (TREE_TYPE (field)))
3874 tree lhs_field = chkp_build_component_ref (lhs, field);
3875 chkp_walk_pointer_assignments (lhs_field, val, arg, handler);
3879 else
3880 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3881 if (TREE_CODE (field) == FIELD_DECL
3882 && chkp_type_has_pointer (TREE_TYPE (field)))
3884 tree rhs_field = chkp_build_component_ref (rhs, field);
3885 tree lhs_field = chkp_build_component_ref (lhs, field);
3886 chkp_walk_pointer_assignments (lhs_field, rhs_field, arg, handler);
3889 else if (TREE_CODE (type) == ARRAY_TYPE)
3891 unsigned HOST_WIDE_INT cur = 0;
3892 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3893 tree etype = TREE_TYPE (type);
3894 tree esize = TYPE_SIZE (etype);
3896 if (TREE_CODE (rhs) == CONSTRUCTOR)
3898 unsigned HOST_WIDE_INT cnt;
3899 tree purp, val, lhs_elem;
3901 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, purp, val)
3903 if (purp && TREE_CODE (purp) == RANGE_EXPR)
3905 tree lo_index = TREE_OPERAND (purp, 0);
3906 tree hi_index = TREE_OPERAND (purp, 1);
3908 for (cur = (unsigned)tree_to_uhwi (lo_index);
3909 cur <= (unsigned)tree_to_uhwi (hi_index);
3910 cur++)
3912 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3913 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3916 else
3918 if (purp)
3920 gcc_assert (TREE_CODE (purp) == INTEGER_CST);
3921 cur = tree_to_uhwi (purp);
3924 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur++);
3926 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3930 /* Copy array only when size is known. */
3931 else if (maxval && !integer_minus_onep (maxval))
3932 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
3934 tree lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3935 tree rhs_elem = chkp_build_array_ref (rhs, etype, esize, cur);
3936 chkp_walk_pointer_assignments (lhs_elem, rhs_elem, arg, handler);
3939 else
3940 internal_error("chkp_walk_pointer_assignments: unexpected RHS type: %s",
3941 get_tree_code_name (TREE_CODE (type)));
3944 /* Add code to copy bounds for assignment of RHS to LHS.
3945 ARG is an iterator pointing ne code position. */
3946 static void
3947 chkp_copy_bounds_for_elem (tree lhs, tree rhs, void *arg)
3949 gimple_stmt_iterator *iter = (gimple_stmt_iterator *)arg;
3950 tree bounds = chkp_find_bounds (rhs, iter);
3951 tree addr = chkp_build_addr_expr(lhs);
3953 chkp_build_bndstx (addr, rhs, bounds, iter);
3956 /* Emit static bound initilizers and size vars. */
3957 void
3958 chkp_finish_file (void)
3960 struct varpool_node *node;
3961 struct chkp_ctor_stmt_list stmts;
3963 if (seen_error ())
3964 return;
3966 /* Iterate through varpool and generate bounds initialization
3967 constructors for all statically initialized pointers. */
3968 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3969 stmts.stmts = NULL;
3970 FOR_EACH_VARIABLE (node)
3971 /* Check that var is actually emitted and we need and may initialize
3972 its bounds. */
3973 if (node->need_bounds_init
3974 && !POINTER_BOUNDS_P (node->decl)
3975 && DECL_RTL (node->decl)
3976 && MEM_P (DECL_RTL (node->decl))
3977 && TREE_ASM_WRITTEN (node->decl))
3979 chkp_walk_pointer_assignments (node->decl,
3980 DECL_INITIAL (node->decl),
3981 &stmts,
3982 chkp_add_modification_to_stmt_list);
3984 if (stmts.avail <= 0)
3986 cgraph_build_static_cdtor ('P', stmts.stmts,
3987 MAX_RESERVED_INIT_PRIORITY + 3);
3988 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3989 stmts.stmts = NULL;
3993 if (stmts.stmts)
3994 cgraph_build_static_cdtor ('P', stmts.stmts,
3995 MAX_RESERVED_INIT_PRIORITY + 3);
3997 /* Iterate through varpool and generate bounds initialization
3998 constructors for all static bounds vars. */
3999 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
4000 stmts.stmts = NULL;
4001 FOR_EACH_VARIABLE (node)
4002 if (node->need_bounds_init
4003 && POINTER_BOUNDS_P (node->decl)
4004 && TREE_ASM_WRITTEN (node->decl))
4006 tree bnd = node->decl;
4007 tree var;
4009 gcc_assert (DECL_INITIAL (bnd)
4010 && TREE_CODE (DECL_INITIAL (bnd)) == ADDR_EXPR);
4012 var = TREE_OPERAND (DECL_INITIAL (bnd), 0);
4013 chkp_output_static_bounds (bnd, var, &stmts);
4016 if (stmts.stmts)
4017 cgraph_build_static_cdtor ('B', stmts.stmts,
4018 MAX_RESERVED_INIT_PRIORITY + 2);
4020 delete chkp_static_var_bounds;
4021 delete chkp_bounds_map;
4024 /* An instrumentation function which is called for each statement
4025 having memory access we want to instrument. It inserts check
4026 code and bounds copy code.
4028 ITER points to statement to instrument.
4030 NODE holds memory access in statement to check.
4032 LOC holds the location information for statement.
4034 DIRFLAGS determines whether access is read or write.
4036 ACCESS_OFFS should be added to address used in NODE
4037 before check.
4039 ACCESS_SIZE holds size of checked access.
4041 SAFE indicates if NODE access is safe and should not be
4042 checked. */
4043 static void
4044 chkp_process_stmt (gimple_stmt_iterator *iter, tree node,
4045 location_t loc, tree dirflag,
4046 tree access_offs, tree access_size,
4047 bool safe)
4049 tree node_type = TREE_TYPE (node);
4050 tree size = access_size ? access_size : TYPE_SIZE_UNIT (node_type);
4051 tree addr_first = NULL_TREE; /* address of the first accessed byte */
4052 tree addr_last = NULL_TREE; /* address of the last accessed byte */
4053 tree ptr = NULL_TREE; /* a pointer used for dereference */
4054 tree bounds = NULL_TREE;
4055 bool reg_store = false;
4057 /* We do not need instrumentation for clobbers. */
4058 if (dirflag == integer_one_node
4059 && gimple_code (gsi_stmt (*iter)) == GIMPLE_ASSIGN
4060 && TREE_CLOBBER_P (gimple_assign_rhs1 (gsi_stmt (*iter))))
4061 return;
4063 switch (TREE_CODE (node))
4065 case ARRAY_REF:
4066 case COMPONENT_REF:
4068 bool bitfield;
4069 tree elt;
4071 if (safe)
4073 /* We are not going to generate any checks, so do not
4074 generate bounds as well. */
4075 addr_first = chkp_build_addr_expr (node);
4076 break;
4079 chkp_parse_array_and_component_ref (node, &ptr, &elt, &safe,
4080 &bitfield, &bounds, iter, false);
4082 /* Break if there is no dereference and operation is safe. */
4084 if (bitfield)
4086 tree field = TREE_OPERAND (node, 1);
4088 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
4089 size = DECL_SIZE_UNIT (field);
4091 if (elt)
4092 elt = chkp_build_addr_expr (elt);
4093 addr_first = fold_convert_loc (loc, ptr_type_node, elt ? elt : ptr);
4094 addr_first = fold_build_pointer_plus_loc (loc,
4095 addr_first,
4096 byte_position (field));
4098 else
4099 addr_first = chkp_build_addr_expr (node);
4101 break;
4103 case INDIRECT_REF:
4104 ptr = TREE_OPERAND (node, 0);
4105 addr_first = ptr;
4106 break;
4108 case MEM_REF:
4109 ptr = TREE_OPERAND (node, 0);
4110 addr_first = chkp_build_addr_expr (node);
4111 break;
4113 case TARGET_MEM_REF:
4114 ptr = TMR_BASE (node);
4115 addr_first = chkp_build_addr_expr (node);
4116 break;
4118 case ARRAY_RANGE_REF:
4119 printf("ARRAY_RANGE_REF\n");
4120 debug_gimple_stmt(gsi_stmt(*iter));
4121 debug_tree(node);
4122 gcc_unreachable ();
4123 break;
4125 case BIT_FIELD_REF:
4127 tree offset, size;
4129 gcc_assert (!access_offs);
4130 gcc_assert (!access_size);
4132 chkp_parse_bit_field_ref (node, loc, &offset, &size);
4134 chkp_process_stmt (iter, TREE_OPERAND (node, 0), loc,
4135 dirflag, offset, size, safe);
4136 return;
4138 break;
4140 case VAR_DECL:
4141 case RESULT_DECL:
4142 case PARM_DECL:
4143 if (dirflag != integer_one_node
4144 || DECL_REGISTER (node))
4145 return;
4147 safe = true;
4148 addr_first = chkp_build_addr_expr (node);
4149 break;
4151 default:
4152 return;
4155 /* If addr_last was not computed then use (addr_first + size - 1)
4156 expression to compute it. */
4157 if (!addr_last)
4159 addr_last = fold_build_pointer_plus_loc (loc, addr_first, size);
4160 addr_last = fold_build_pointer_plus_hwi_loc (loc, addr_last, -1);
4163 /* Shift both first_addr and last_addr by access_offs if specified. */
4164 if (access_offs)
4166 addr_first = fold_build_pointer_plus_loc (loc, addr_first, access_offs);
4167 addr_last = fold_build_pointer_plus_loc (loc, addr_last, access_offs);
4170 if (dirflag == integer_one_node)
4172 tree base = get_base_address (node);
4173 if (VAR_P (base) && DECL_HARD_REGISTER (base))
4174 reg_store = true;
4177 /* Generate bndcl/bndcu checks if memory access is not safe. */
4178 if (!safe)
4180 gimple_stmt_iterator stmt_iter = *iter;
4182 if (!bounds)
4183 bounds = chkp_find_bounds (ptr, iter);
4185 chkp_check_mem_access (addr_first, addr_last, bounds,
4186 stmt_iter, loc, dirflag);
4189 /* We need to store bounds in case pointer is stored. */
4190 if (dirflag == integer_one_node
4191 && !reg_store
4192 && chkp_type_has_pointer (node_type)
4193 && flag_chkp_store_bounds)
4195 gimple *stmt = gsi_stmt (*iter);
4196 tree rhs1 = gimple_assign_rhs1 (stmt);
4197 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4199 if (get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS)
4200 chkp_walk_pointer_assignments (node, rhs1, iter,
4201 chkp_copy_bounds_for_elem);
4202 else
4204 bounds = chkp_compute_bounds_for_assignment (NULL_TREE, stmt);
4205 chkp_build_bndstx (addr_first, rhs1, bounds, iter);
4210 /* Add code to copy bounds for all pointers copied
4211 in ASSIGN created during inline of EDGE. */
4212 void
4213 chkp_copy_bounds_for_assign (gimple *assign, struct cgraph_edge *edge)
4215 tree lhs = gimple_assign_lhs (assign);
4216 tree rhs = gimple_assign_rhs1 (assign);
4217 gimple_stmt_iterator iter = gsi_for_stmt (assign);
4219 if (!flag_chkp_store_bounds)
4220 return;
4222 chkp_walk_pointer_assignments (lhs, rhs, &iter, chkp_copy_bounds_for_elem);
4224 /* We should create edges for all created calls to bndldx and bndstx. */
4225 while (gsi_stmt (iter) != assign)
4227 gimple *stmt = gsi_stmt (iter);
4228 if (gimple_code (stmt) == GIMPLE_CALL)
4230 tree fndecl = gimple_call_fndecl (stmt);
4231 struct cgraph_node *callee = cgraph_node::get_create (fndecl);
4232 struct cgraph_edge *new_edge;
4234 gcc_assert (chkp_gimple_call_builtin_p (stmt, BUILT_IN_CHKP_BNDSTX)
4235 || chkp_gimple_call_builtin_p (stmt, BUILT_IN_CHKP_BNDLDX)
4236 || chkp_gimple_call_builtin_p (stmt, BUILT_IN_CHKP_BNDRET));
4238 new_edge = edge->caller->create_edge (callee,
4239 as_a <gcall *> (stmt),
4240 edge->count,
4241 edge->frequency);
4242 new_edge->frequency = compute_call_stmt_bb_frequency
4243 (edge->caller->decl, gimple_bb (stmt));
4245 gsi_prev (&iter);
4249 /* Some code transformation made during instrumentation pass
4250 may put code into inconsistent state. Here we find and fix
4251 such flaws. */
4252 void
4253 chkp_fix_cfg ()
4255 basic_block bb;
4256 gimple_stmt_iterator i;
4258 /* We could insert some code right after stmt which ends bb.
4259 We wanted to put this code on fallthru edge but did not
4260 add new edges from the beginning because it may cause new
4261 phi node creation which may be incorrect due to incomplete
4262 bound phi nodes. */
4263 FOR_ALL_BB_FN (bb, cfun)
4264 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
4266 gimple *stmt = gsi_stmt (i);
4267 gimple_stmt_iterator next = i;
4269 gsi_next (&next);
4271 if (stmt_ends_bb_p (stmt)
4272 && !gsi_end_p (next))
4274 edge fall = find_fallthru_edge (bb->succs);
4275 basic_block dest = NULL;
4276 int flags = 0;
4278 gcc_assert (fall);
4280 /* We cannot split abnormal edge. Therefore we
4281 store its params, make it regular and then
4282 rebuild abnormal edge after split. */
4283 if (fall->flags & EDGE_ABNORMAL)
4285 flags = fall->flags & ~EDGE_FALLTHRU;
4286 dest = fall->dest;
4288 fall->flags &= ~EDGE_COMPLEX;
4291 while (!gsi_end_p (next))
4293 gimple *next_stmt = gsi_stmt (next);
4294 gsi_remove (&next, false);
4295 gsi_insert_on_edge (fall, next_stmt);
4298 gsi_commit_edge_inserts ();
4300 /* Re-create abnormal edge. */
4301 if (dest)
4302 make_edge (bb, dest, flags);
4307 /* Walker callback for chkp_replace_function_pointers. Replaces
4308 function pointer in the specified operand with pointer to the
4309 instrumented function version. */
4310 static tree
4311 chkp_replace_function_pointer (tree *op, int *walk_subtrees,
4312 void *data ATTRIBUTE_UNUSED)
4314 if (TREE_CODE (*op) == FUNCTION_DECL
4315 && chkp_instrumentable_p (*op)
4316 && (DECL_BUILT_IN_CLASS (*op) == NOT_BUILT_IN
4317 /* For builtins we replace pointers only for selected
4318 function and functions having definitions. */
4319 || (DECL_BUILT_IN_CLASS (*op) == BUILT_IN_NORMAL
4320 && (chkp_instrument_normal_builtin (*op)
4321 || gimple_has_body_p (*op)))))
4323 struct cgraph_node *node = cgraph_node::get_create (*op);
4324 struct cgraph_node *clone = NULL;
4326 if (!node->instrumentation_clone)
4327 clone = chkp_maybe_create_clone (*op);
4329 if (clone)
4330 *op = clone->decl;
4331 *walk_subtrees = 0;
4334 return NULL;
4337 /* This function searches for function pointers in statement
4338 pointed by GSI and replaces them with pointers to instrumented
4339 function versions. */
4340 static void
4341 chkp_replace_function_pointers (gimple_stmt_iterator *gsi)
4343 gimple *stmt = gsi_stmt (*gsi);
4344 /* For calls we want to walk call args only. */
4345 if (gimple_code (stmt) == GIMPLE_CALL)
4347 unsigned i;
4348 for (i = 0; i < gimple_call_num_args (stmt); i++)
4349 walk_tree (gimple_call_arg_ptr (stmt, i),
4350 chkp_replace_function_pointer, NULL, NULL);
4352 else
4353 walk_gimple_stmt (gsi, NULL, chkp_replace_function_pointer, NULL);
4356 /* This function instruments all statements working with memory,
4357 calls and rets.
4359 It also removes excess statements from static initializers. */
4360 static void
4361 chkp_instrument_function (void)
4363 basic_block bb, next;
4364 gimple_stmt_iterator i;
4365 enum gimple_rhs_class grhs_class;
4366 bool safe = lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl));
4368 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
4371 next = bb->next_bb;
4372 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
4374 gimple *s = gsi_stmt (i);
4376 /* Skip statement marked to not be instrumented. */
4377 if (chkp_marked_stmt_p (s))
4379 gsi_next (&i);
4380 continue;
4383 chkp_replace_function_pointers (&i);
4385 switch (gimple_code (s))
4387 case GIMPLE_ASSIGN:
4388 chkp_process_stmt (&i, gimple_assign_lhs (s),
4389 gimple_location (s), integer_one_node,
4390 NULL_TREE, NULL_TREE, safe);
4391 chkp_process_stmt (&i, gimple_assign_rhs1 (s),
4392 gimple_location (s), integer_zero_node,
4393 NULL_TREE, NULL_TREE, safe);
4394 grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s));
4395 if (grhs_class == GIMPLE_BINARY_RHS)
4396 chkp_process_stmt (&i, gimple_assign_rhs2 (s),
4397 gimple_location (s), integer_zero_node,
4398 NULL_TREE, NULL_TREE, safe);
4399 break;
4401 case GIMPLE_RETURN:
4403 greturn *r = as_a <greturn *> (s);
4404 if (gimple_return_retval (r) != NULL_TREE)
4406 chkp_process_stmt (&i, gimple_return_retval (r),
4407 gimple_location (r),
4408 integer_zero_node,
4409 NULL_TREE, NULL_TREE, safe);
4411 /* Additionally we need to add bounds
4412 to return statement. */
4413 chkp_add_bounds_to_ret_stmt (&i);
4416 break;
4418 case GIMPLE_CALL:
4419 chkp_add_bounds_to_call_stmt (&i);
4420 break;
4422 default:
4426 gsi_next (&i);
4428 /* We do not need any actual pointer stores in checker
4429 static initializer. */
4430 if (lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl))
4431 && gimple_code (s) == GIMPLE_ASSIGN
4432 && gimple_store_p (s))
4434 gimple_stmt_iterator del_iter = gsi_for_stmt (s);
4435 gsi_remove (&del_iter, true);
4436 unlink_stmt_vdef (s);
4437 release_defs(s);
4440 bb = next;
4442 while (bb);
4444 /* Some input params may have bounds and be address taken. In this case
4445 we should store incoming bounds into bounds table. */
4446 tree arg;
4447 if (flag_chkp_store_bounds)
4448 for (arg = DECL_ARGUMENTS (cfun->decl); arg; arg = DECL_CHAIN (arg))
4449 if (TREE_ADDRESSABLE (arg))
4451 if (BOUNDED_P (arg))
4453 tree bounds = chkp_get_next_bounds_parm (arg);
4454 tree def_ptr = ssa_default_def (cfun, arg);
4455 gimple_stmt_iterator iter
4456 = gsi_start_bb (chkp_get_entry_block ());
4457 chkp_build_bndstx (chkp_build_addr_expr (arg),
4458 def_ptr ? def_ptr : arg,
4459 bounds, &iter);
4461 /* Skip bounds arg. */
4462 arg = TREE_CHAIN (arg);
4464 else if (chkp_type_has_pointer (TREE_TYPE (arg)))
4466 tree orig_arg = arg;
4467 bitmap slots = BITMAP_ALLOC (NULL);
4468 gimple_stmt_iterator iter
4469 = gsi_start_bb (chkp_get_entry_block ());
4470 bitmap_iterator bi;
4471 unsigned bnd_no;
4473 chkp_find_bound_slots (TREE_TYPE (arg), slots);
4475 EXECUTE_IF_SET_IN_BITMAP (slots, 0, bnd_no, bi)
4477 tree bounds = chkp_get_next_bounds_parm (arg);
4478 HOST_WIDE_INT offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
4479 tree addr = chkp_build_addr_expr (orig_arg);
4480 tree ptr = build2 (MEM_REF, ptr_type_node, addr,
4481 build_int_cst (ptr_type_node, offs));
4482 chkp_build_bndstx (chkp_build_addr_expr (ptr), ptr,
4483 bounds, &iter);
4485 arg = DECL_CHAIN (arg);
4487 BITMAP_FREE (slots);
4492 /* Find init/null/copy_ptr_bounds calls and replace them
4493 with assignments. It should allow better code
4494 optimization. */
4496 static void
4497 chkp_remove_useless_builtins ()
4499 basic_block bb;
4500 gimple_stmt_iterator gsi;
4502 FOR_EACH_BB_FN (bb, cfun)
4504 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4506 gimple *stmt = gsi_stmt (gsi);
4507 tree fndecl;
4508 enum built_in_function fcode;
4510 /* Find builtins returning first arg and replace
4511 them with assignments. */
4512 if (gimple_code (stmt) == GIMPLE_CALL
4513 && (fndecl = gimple_call_fndecl (stmt))
4514 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4515 && (fcode = DECL_FUNCTION_CODE (fndecl))
4516 && (fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
4517 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
4518 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS
4519 || fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS))
4521 tree res = gimple_call_arg (stmt, 0);
4522 update_call_from_tree (&gsi, res);
4523 stmt = gsi_stmt (gsi);
4524 update_stmt (stmt);
4530 /* Initialize pass. */
4531 static void
4532 chkp_init (void)
4534 basic_block bb;
4535 gimple_stmt_iterator i;
4537 in_chkp_pass = true;
4539 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = bb->next_bb)
4540 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
4541 chkp_unmark_stmt (gsi_stmt (i));
4543 chkp_invalid_bounds = new hash_set<tree>;
4544 chkp_completed_bounds_set = new hash_set<tree>;
4545 delete chkp_reg_bounds;
4546 chkp_reg_bounds = new hash_map<tree, tree>;
4547 delete chkp_bound_vars;
4548 chkp_bound_vars = new hash_map<tree, tree>;
4549 chkp_reg_addr_bounds = new hash_map<tree, tree>;
4550 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
4551 delete chkp_bounds_map;
4552 chkp_bounds_map = new hash_map<tree, tree>;
4553 chkp_abnormal_copies = BITMAP_GGC_ALLOC ();
4555 entry_block = NULL;
4556 zero_bounds = NULL_TREE;
4557 none_bounds = NULL_TREE;
4558 incomplete_bounds = integer_zero_node;
4559 tmp_var = NULL_TREE;
4560 size_tmp_var = NULL_TREE;
4562 chkp_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode, true);
4564 /* We create these constant bounds once for each object file.
4565 These symbols go to comdat section and result in single copy
4566 of each one in the final binary. */
4567 chkp_get_zero_bounds_var ();
4568 chkp_get_none_bounds_var ();
4570 calculate_dominance_info (CDI_DOMINATORS);
4571 calculate_dominance_info (CDI_POST_DOMINATORS);
4573 bitmap_obstack_initialize (NULL);
4576 /* Finalize instrumentation pass. */
4577 static void
4578 chkp_fini (void)
4580 in_chkp_pass = false;
4582 delete chkp_invalid_bounds;
4583 delete chkp_completed_bounds_set;
4584 delete chkp_reg_addr_bounds;
4585 delete chkp_incomplete_bounds_map;
4587 free_dominance_info (CDI_DOMINATORS);
4588 free_dominance_info (CDI_POST_DOMINATORS);
4590 bitmap_obstack_release (NULL);
4592 entry_block = NULL;
4593 zero_bounds = NULL_TREE;
4594 none_bounds = NULL_TREE;
4597 /* Main instrumentation pass function. */
4598 static unsigned int
4599 chkp_execute (void)
4601 chkp_init ();
4603 chkp_instrument_function ();
4605 chkp_remove_useless_builtins ();
4607 chkp_function_mark_instrumented (cfun->decl);
4609 chkp_fix_cfg ();
4611 chkp_fini ();
4613 return 0;
4616 /* Instrumentation pass gate. */
4617 static bool
4618 chkp_gate (void)
4620 cgraph_node *node = cgraph_node::get (cfun->decl);
4621 return ((node != NULL
4622 && node->instrumentation_clone)
4623 || lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl)));
4626 namespace {
4628 const pass_data pass_data_chkp =
4630 GIMPLE_PASS, /* type */
4631 "chkp", /* name */
4632 OPTGROUP_NONE, /* optinfo_flags */
4633 TV_NONE, /* tv_id */
4634 PROP_ssa | PROP_cfg, /* properties_required */
4635 0, /* properties_provided */
4636 0, /* properties_destroyed */
4637 0, /* todo_flags_start */
4638 TODO_verify_il
4639 | TODO_update_ssa /* todo_flags_finish */
4642 class pass_chkp : public gimple_opt_pass
4644 public:
4645 pass_chkp (gcc::context *ctxt)
4646 : gimple_opt_pass (pass_data_chkp, ctxt)
4649 /* opt_pass methods: */
4650 virtual opt_pass * clone ()
4652 return new pass_chkp (m_ctxt);
4655 virtual bool gate (function *)
4657 return chkp_gate ();
4660 virtual unsigned int execute (function *)
4662 return chkp_execute ();
4665 }; // class pass_chkp
4667 } // anon namespace
4669 gimple_opt_pass *
4670 make_pass_chkp (gcc::context *ctxt)
4672 return new pass_chkp (ctxt);
4675 #include "gt-tree-chkp.h"