i386: Rewrite check for AVX512 features
[official-gcc.git] / gcc / tree-chkp.c
blobe241f50f308413c0d4ab5b6b8367cded7099c332
1 /* Pointer Bounds Checker insrumentation pass.
2 Copyright (C) 2014-2017 Free Software Foundation, Inc.
3 Contributed by Ilya Enkovich (ilya.enkovich@intel.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "diagnostic.h"
34 #include "fold-const.h"
35 #include "stor-layout.h"
36 #include "varasm.h"
37 #include "tree-iterator.h"
38 #include "tree-cfg.h"
39 #include "langhooks.h"
40 #include "tree-ssa-address.h"
41 #include "tree-ssa-loop-niter.h"
42 #include "gimple-pretty-print.h"
43 #include "gimple-iterator.h"
44 #include "gimplify.h"
45 #include "gimplify-me.h"
46 #include "print-tree.h"
47 #include "calls.h"
48 #include "expr.h"
49 #include "tree-ssa-propagate.h"
50 #include "tree-chkp.h"
51 #include "gimple-walk.h"
52 #include "tree-dfa.h"
53 #include "ipa-chkp.h"
54 #include "params.h"
56 /* Pointer Bounds Checker instruments code with memory checks to find
57 out-of-bounds memory accesses. Checks are performed by computing
58 bounds for each pointer and then comparing address of accessed
59 memory before pointer dereferencing.
61 1. Function clones.
63 See ipa-chkp.c.
65 2. Instrumentation.
67 There are few things to instrument:
69 a) Memory accesses - add checker calls to check address of accessed memory
70 against bounds of dereferenced pointer. Obviously safe memory
71 accesses like static variable access does not have to be instrumented
72 with checks.
74 Example:
76 val_2 = *p_1;
78 with 4 bytes access is transformed into:
80 __builtin___chkp_bndcl (__bound_tmp.1_3, p_1);
81 D.1_4 = p_1 + 3;
82 __builtin___chkp_bndcu (__bound_tmp.1_3, D.1_4);
83 val_2 = *p_1;
85 where __bound_tmp.1_3 are bounds computed for pointer p_1,
86 __builtin___chkp_bndcl is a lower bound check and
87 __builtin___chkp_bndcu is an upper bound check.
89 b) Pointer stores.
91 When pointer is stored in memory we need to store its bounds. To
92 achieve compatibility of instrumented code with regular codes
93 we have to keep data layout and store bounds in special bound tables
94 via special checker call. Implementation of bounds table may vary for
95 different platforms. It has to associate pointer value and its
96 location (it is required because we may have two equal pointers
97 with different bounds stored in different places) with bounds.
98 Another checker builtin allows to get bounds for specified pointer
99 loaded from specified location.
101 Example:
103 buf1[i_1] = &buf2;
105 is transformed into:
107 buf1[i_1] = &buf2;
108 D.1_2 = &buf1[i_1];
109 __builtin___chkp_bndstx (D.1_2, &buf2, __bound_tmp.1_2);
111 where __bound_tmp.1_2 are bounds of &buf2.
113 c) Static initialization.
115 The special case of pointer store is static pointer initialization.
116 Bounds initialization is performed in a few steps:
117 - register all static initializations in front-end using
118 chkp_register_var_initializer
119 - when file compilation finishes we create functions with special
120 attribute 'chkp ctor' and put explicit initialization code
121 (assignments) for all statically initialized pointers.
122 - when checker constructor is compiled checker pass adds required
123 bounds initialization for all statically initialized pointers
124 - since we do not actually need excess pointers initialization
125 in checker constructor we remove such assignments from them
127 d) Calls.
129 For each call in the code we add additional arguments to pass
130 bounds for pointer arguments. We determine type of call arguments
131 using arguments list from function declaration; if function
132 declaration is not available we use function type; otherwise
133 (e.g. for unnamed arguments) we use type of passed value. Function
134 declaration/type is replaced with the instrumented one.
136 Example:
138 val_1 = foo (&buf1, &buf2, &buf1, 0);
140 is translated into:
142 val_1 = foo.chkp (&buf1, __bound_tmp.1_2, &buf2, __bound_tmp.1_3,
143 &buf1, __bound_tmp.1_2, 0);
145 e) Returns.
147 If function returns a pointer value we have to return bounds also.
148 A new operand was added for return statement to hold returned bounds.
150 Example:
152 return &_buf1;
154 is transformed into
156 return &_buf1, __bound_tmp.1_1;
158 3. Bounds computation.
160 Compiler is fully responsible for computing bounds to be used for each
161 memory access. The first step for bounds computation is to find the
162 origin of pointer dereferenced for memory access. Basing on pointer
163 origin we define a way to compute its bounds. There are just few
164 possible cases:
166 a) Pointer is returned by call.
168 In this case we use corresponding checker builtin method to obtain returned
169 bounds.
171 Example:
173 buf_1 = malloc (size_2);
174 foo (buf_1);
176 is translated into:
178 buf_1 = malloc (size_2);
179 __bound_tmp.1_3 = __builtin___chkp_bndret (buf_1);
180 foo (buf_1, __bound_tmp.1_3);
182 b) Pointer is an address of an object.
184 In this case compiler tries to compute objects size and create corresponding
185 bounds. If object has incomplete type then special checker builtin is used to
186 obtain its size at runtime.
188 Example:
190 foo ()
192 <unnamed type> __bound_tmp.3;
193 static int buf[100];
195 <bb 3>:
196 __bound_tmp.3_2 = __builtin___chkp_bndmk (&buf, 400);
198 <bb 2>:
199 return &buf, __bound_tmp.3_2;
202 Example:
204 Address of an object 'extern int buf[]' with incomplete type is
205 returned.
207 foo ()
209 <unnamed type> __bound_tmp.4;
210 long unsigned int __size_tmp.3;
212 <bb 3>:
213 __size_tmp.3_4 = __builtin_ia32_sizeof (buf);
214 __bound_tmp.4_3 = __builtin_ia32_bndmk (&buf, __size_tmp.3_4);
216 <bb 2>:
217 return &buf, __bound_tmp.4_3;
220 c) Pointer is the result of object narrowing.
222 It happens when we use pointer to an object to compute pointer to a part
223 of an object. E.g. we take pointer to a field of a structure. In this
224 case we perform bounds intersection using bounds of original object and
225 bounds of object's part (which are computed basing on its type).
227 There may be some debatable questions about when narrowing should occur
228 and when it should not. To avoid false bound violations in correct
229 programs we do not perform narrowing when address of an array element is
230 obtained (it has address of the whole array) and when address of the first
231 structure field is obtained (because it is guaranteed to be equal to
232 address of the whole structure and it is legal to cast it back to structure).
234 Default narrowing behavior may be changed using compiler flags.
236 Example:
238 In this example address of the second structure field is returned.
240 foo (struct A * p, __bounds_type __bounds_of_p)
242 <unnamed type> __bound_tmp.3;
243 int * _2;
244 int * _5;
246 <bb 2>:
247 _5 = &p_1(D)->second_field;
248 __bound_tmp.3_6 = __builtin___chkp_bndmk (_5, 4);
249 __bound_tmp.3_8 = __builtin___chkp_intersect (__bound_tmp.3_6,
250 __bounds_of_p_3(D));
251 _2 = &p_1(D)->second_field;
252 return _2, __bound_tmp.3_8;
255 Example:
257 In this example address of the first field of array element is returned.
259 foo (struct A * p, __bounds_type __bounds_of_p, int i)
261 long unsigned int _3;
262 long unsigned int _4;
263 struct A * _6;
264 int * _7;
266 <bb 2>:
267 _3 = (long unsigned int) i_1(D);
268 _4 = _3 * 8;
269 _6 = p_5(D) + _4;
270 _7 = &_6->first_field;
271 return _7, __bounds_of_p_2(D);
275 d) Pointer is the result of pointer arithmetic or type cast.
277 In this case bounds of the base pointer are used. In case of binary
278 operation producing a pointer we are analyzing data flow further
279 looking for operand's bounds. One operand is considered as a base
280 if it has some valid bounds. If we fall into a case when none of
281 operands (or both of them) has valid bounds, a default bounds value
282 is used.
284 Trying to find out bounds for binary operations we may fall into
285 cyclic dependencies for pointers. To avoid infinite recursion all
286 walked phi nodes instantly obtain corresponding bounds but created
287 bounds are marked as incomplete. It helps us to stop DF walk during
288 bounds search.
290 When we reach pointer source, some args of incomplete bounds phi obtain
291 valid bounds and those values are propagated further through phi nodes.
292 If no valid bounds were found for phi node then we mark its result as
293 invalid bounds. Process stops when all incomplete bounds become either
294 valid or invalid and we are able to choose a pointer base.
296 e) Pointer is loaded from the memory.
298 In this case we just need to load bounds from the bounds table.
300 Example:
302 foo ()
304 <unnamed type> __bound_tmp.3;
305 static int * buf;
306 int * _2;
308 <bb 2>:
309 _2 = buf;
310 __bound_tmp.3_4 = __builtin___chkp_bndldx (&buf, _2);
311 return _2, __bound_tmp.3_4;
316 typedef void (*assign_handler)(tree, tree, void *);
318 static tree chkp_get_zero_bounds ();
319 static tree chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter);
320 static tree chkp_find_bounds_loaded (tree ptr, tree ptr_src,
321 gimple_stmt_iterator *iter);
322 static void chkp_parse_array_and_component_ref (tree node, tree *ptr,
323 tree *elt, bool *safe,
324 bool *bitfield,
325 tree *bounds,
326 gimple_stmt_iterator *iter,
327 bool innermost_bounds);
328 static void chkp_parse_bit_field_ref (tree node, location_t loc,
329 tree *offset, tree *size);
330 static tree
331 chkp_make_addressed_object_bounds (tree obj, gimple_stmt_iterator *iter);
333 #define chkp_bndldx_fndecl \
334 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDLDX))
335 #define chkp_bndstx_fndecl \
336 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDSTX))
337 #define chkp_checkl_fndecl \
338 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCL))
339 #define chkp_checku_fndecl \
340 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCU))
341 #define chkp_bndmk_fndecl \
342 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDMK))
343 #define chkp_ret_bnd_fndecl \
344 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDRET))
345 #define chkp_intersect_fndecl \
346 (targetm.builtin_chkp_function (BUILT_IN_CHKP_INTERSECT))
347 #define chkp_narrow_bounds_fndecl \
348 (targetm.builtin_chkp_function (BUILT_IN_CHKP_NARROW))
349 #define chkp_sizeof_fndecl \
350 (targetm.builtin_chkp_function (BUILT_IN_CHKP_SIZEOF))
351 #define chkp_extract_lower_fndecl \
352 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_LOWER))
353 #define chkp_extract_upper_fndecl \
354 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_UPPER))
356 static GTY (()) tree chkp_uintptr_type;
358 static GTY (()) tree chkp_zero_bounds_var;
359 static GTY (()) tree chkp_none_bounds_var;
361 static GTY (()) basic_block entry_block;
362 static GTY (()) tree zero_bounds;
363 static GTY (()) tree none_bounds;
364 static GTY (()) tree incomplete_bounds;
365 static GTY (()) tree tmp_var;
366 static GTY (()) tree size_tmp_var;
367 static GTY (()) bitmap chkp_abnormal_copies;
369 struct hash_set<tree> *chkp_invalid_bounds;
370 struct hash_set<tree> *chkp_completed_bounds_set;
371 struct hash_map<tree, tree> *chkp_reg_bounds;
372 struct hash_map<tree, tree> *chkp_bound_vars;
373 struct hash_map<tree, tree> *chkp_reg_addr_bounds;
374 struct hash_map<tree, tree> *chkp_incomplete_bounds_map;
375 struct hash_map<tree, tree> *chkp_bounds_map;
376 struct hash_map<tree, tree> *chkp_static_var_bounds;
378 static bool in_chkp_pass;
380 #define CHKP_BOUND_TMP_NAME "__bound_tmp"
381 #define CHKP_SIZE_TMP_NAME "__size_tmp"
382 #define CHKP_BOUNDS_OF_SYMBOL_PREFIX "__chkp_bounds_of_"
383 #define CHKP_STRING_BOUNDS_PREFIX "__chkp_string_bounds_"
384 #define CHKP_VAR_BOUNDS_PREFIX "__chkp_var_bounds_"
385 #define CHKP_ZERO_BOUNDS_VAR_NAME "__chkp_zero_bounds"
386 #define CHKP_NONE_BOUNDS_VAR_NAME "__chkp_none_bounds"
388 /* Static checker constructors may become very large and their
389 compilation with optimization may take too much time.
390 Therefore we put a limit to number of statements in one
391 constructor. Tests with 100 000 statically initialized
392 pointers showed following compilation times on Sandy Bridge
393 server (used -O2):
394 limit 100 => ~18 sec.
395 limit 300 => ~22 sec.
396 limit 1000 => ~30 sec.
397 limit 3000 => ~49 sec.
398 limit 5000 => ~55 sec.
399 limit 10000 => ~76 sec.
400 limit 100000 => ~532 sec. */
401 #define MAX_STMTS_IN_STATIC_CHKP_CTOR (PARAM_VALUE (PARAM_CHKP_MAX_CTOR_SIZE))
403 struct chkp_ctor_stmt_list
405 tree stmts;
406 int avail;
409 /* Return 1 if function FNDECL is instrumented by Pointer
410 Bounds Checker. */
411 bool
412 chkp_function_instrumented_p (tree fndecl)
414 return fndecl
415 && lookup_attribute ("chkp instrumented", DECL_ATTRIBUTES (fndecl));
418 /* Mark function FNDECL as instrumented. */
419 void
420 chkp_function_mark_instrumented (tree fndecl)
422 if (chkp_function_instrumented_p (fndecl))
423 return;
425 DECL_ATTRIBUTES (fndecl)
426 = tree_cons (get_identifier ("chkp instrumented"), NULL,
427 DECL_ATTRIBUTES (fndecl));
430 /* Return true when STMT is builtin call to instrumentation function
431 corresponding to CODE. */
433 bool
434 chkp_gimple_call_builtin_p (gimple *call,
435 enum built_in_function code)
437 tree fndecl;
438 /* We are skipping the check for address-spaces, that's
439 why we don't use gimple_call_builtin_p directly here. */
440 if (is_gimple_call (call)
441 && (fndecl = gimple_call_fndecl (call)) != NULL
442 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD
443 && (fndecl = targetm.builtin_chkp_function (code))
444 && (DECL_FUNCTION_CODE (gimple_call_fndecl (call))
445 == DECL_FUNCTION_CODE (fndecl)))
446 return true;
447 return false;
450 /* Emit code to build zero bounds and return RTL holding
451 the result. */
453 chkp_expand_zero_bounds ()
455 tree zero_bnd;
457 if (flag_chkp_use_static_const_bounds)
458 zero_bnd = chkp_get_zero_bounds_var ();
459 else
460 zero_bnd = chkp_build_make_bounds_call (integer_zero_node,
461 integer_zero_node);
462 return expand_normal (zero_bnd);
465 /* Emit code to store zero bounds for PTR located at MEM. */
466 void
467 chkp_expand_bounds_reset_for_mem (tree mem, tree ptr)
469 tree zero_bnd, bnd, addr, bndstx;
471 if (flag_chkp_use_static_const_bounds)
472 zero_bnd = chkp_get_zero_bounds_var ();
473 else
474 zero_bnd = chkp_build_make_bounds_call (integer_zero_node,
475 integer_zero_node);
476 bnd = make_tree (pointer_bounds_type_node,
477 assign_temp (pointer_bounds_type_node, 0, 1));
478 addr = build1 (ADDR_EXPR,
479 build_pointer_type (TREE_TYPE (mem)), mem);
480 bndstx = chkp_build_bndstx_call (addr, ptr, bnd);
482 expand_assignment (bnd, zero_bnd, false);
483 expand_normal (bndstx);
486 /* Build retbnd call for returned value RETVAL.
488 If BNDVAL is not NULL then result is stored
489 in it. Otherwise a temporary is created to
490 hold returned value.
492 GSI points to a position for a retbnd call
493 and is set to created stmt.
495 Cgraph edge is created for a new call if
496 UPDATE_EDGE is 1.
498 Obtained bounds are returned. */
499 tree
500 chkp_insert_retbnd_call (tree bndval, tree retval,
501 gimple_stmt_iterator *gsi)
503 gimple *call;
505 if (!bndval)
506 bndval = create_tmp_reg (pointer_bounds_type_node, "retbnd");
508 call = gimple_build_call (chkp_ret_bnd_fndecl, 1, retval);
509 gimple_call_set_lhs (call, bndval);
510 gsi_insert_after (gsi, call, GSI_CONTINUE_LINKING);
512 return bndval;
515 /* Build a GIMPLE_CALL identical to CALL but skipping bounds
516 arguments. */
518 gcall *
519 chkp_copy_call_skip_bounds (gcall *call)
521 bitmap bounds;
522 unsigned i;
524 bitmap_obstack_initialize (NULL);
525 bounds = BITMAP_ALLOC (NULL);
527 for (i = 0; i < gimple_call_num_args (call); i++)
528 if (POINTER_BOUNDS_P (gimple_call_arg (call, i)))
529 bitmap_set_bit (bounds, i);
531 if (!bitmap_empty_p (bounds))
532 call = gimple_call_copy_skip_args (call, bounds);
533 gimple_call_set_with_bounds (call, false);
535 BITMAP_FREE (bounds);
536 bitmap_obstack_release (NULL);
538 return call;
541 /* Redirect edge E to the correct node according to call_stmt.
542 Return 1 if bounds removal from call_stmt should be done
543 instead of redirection. */
545 bool
546 chkp_redirect_edge (cgraph_edge *e)
548 bool instrumented = false;
549 tree decl = e->callee->decl;
551 if (e->callee->instrumentation_clone
552 || chkp_function_instrumented_p (decl))
553 instrumented = true;
555 if (instrumented
556 && !gimple_call_with_bounds_p (e->call_stmt))
557 e->redirect_callee (cgraph_node::get_create (e->callee->orig_decl));
558 else if (!instrumented
559 && gimple_call_with_bounds_p (e->call_stmt)
560 && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDCL)
561 && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDCU)
562 && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDSTX))
564 if (e->callee->instrumented_version)
565 e->redirect_callee (e->callee->instrumented_version);
566 else
568 tree args = TYPE_ARG_TYPES (TREE_TYPE (decl));
569 /* Avoid bounds removal if all args will be removed. */
570 if (!args || TREE_VALUE (args) != void_type_node)
571 return true;
572 else
573 gimple_call_set_with_bounds (e->call_stmt, false);
577 return false;
580 /* Mark statement S to not be instrumented. */
581 static void
582 chkp_mark_stmt (gimple *s)
584 gimple_set_plf (s, GF_PLF_1, true);
587 /* Mark statement S to be instrumented. */
588 static void
589 chkp_unmark_stmt (gimple *s)
591 gimple_set_plf (s, GF_PLF_1, false);
594 /* Return 1 if statement S should not be instrumented. */
595 static bool
596 chkp_marked_stmt_p (gimple *s)
598 return gimple_plf (s, GF_PLF_1);
601 /* Get var to be used for bound temps. */
602 static tree
603 chkp_get_tmp_var (void)
605 if (!tmp_var)
606 tmp_var = create_tmp_reg (pointer_bounds_type_node, CHKP_BOUND_TMP_NAME);
608 return tmp_var;
611 /* Get SSA_NAME to be used as temp. */
612 static tree
613 chkp_get_tmp_reg (gimple *stmt)
615 if (in_chkp_pass)
616 return make_ssa_name (chkp_get_tmp_var (), stmt);
618 return make_temp_ssa_name (pointer_bounds_type_node, stmt,
619 CHKP_BOUND_TMP_NAME);
622 /* Get var to be used for size temps. */
623 static tree
624 chkp_get_size_tmp_var (void)
626 if (!size_tmp_var)
627 size_tmp_var = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
629 return size_tmp_var;
632 /* Register bounds BND for address of OBJ. */
633 static void
634 chkp_register_addr_bounds (tree obj, tree bnd)
636 if (bnd == incomplete_bounds)
637 return;
639 chkp_reg_addr_bounds->put (obj, bnd);
641 if (dump_file && (dump_flags & TDF_DETAILS))
643 fprintf (dump_file, "Regsitered bound ");
644 print_generic_expr (dump_file, bnd);
645 fprintf (dump_file, " for address of ");
646 print_generic_expr (dump_file, obj);
647 fprintf (dump_file, "\n");
651 /* Return bounds registered for address of OBJ. */
652 static tree
653 chkp_get_registered_addr_bounds (tree obj)
655 tree *slot = chkp_reg_addr_bounds->get (obj);
656 return slot ? *slot : NULL_TREE;
659 /* Mark BOUNDS as completed. */
660 static void
661 chkp_mark_completed_bounds (tree bounds)
663 chkp_completed_bounds_set->add (bounds);
665 if (dump_file && (dump_flags & TDF_DETAILS))
667 fprintf (dump_file, "Marked bounds ");
668 print_generic_expr (dump_file, bounds);
669 fprintf (dump_file, " as completed\n");
673 /* Return 1 if BOUNDS were marked as completed and 0 otherwise. */
674 static bool
675 chkp_completed_bounds (tree bounds)
677 return chkp_completed_bounds_set->contains (bounds);
680 /* Clear comleted bound marks. */
681 static void
682 chkp_erase_completed_bounds (void)
684 delete chkp_completed_bounds_set;
685 chkp_completed_bounds_set = new hash_set<tree>;
688 /* This function is used to provide a base address for
689 chkp_get_hard_register_fake_addr_expr. */
690 static tree
691 chkp_get_hard_register_var_fake_base_address ()
693 int prec = TYPE_PRECISION (ptr_type_node);
694 return wide_int_to_tree (ptr_type_node, wi::min_value (prec, SIGNED));
697 /* If we check bounds for a hard register variable, we cannot
698 use its address - it is illegal, so instead of that we use
699 this fake value. */
700 static tree
701 chkp_get_hard_register_fake_addr_expr (tree obj)
703 tree addr = chkp_get_hard_register_var_fake_base_address ();
704 tree outer = obj;
705 while (TREE_CODE (outer) == COMPONENT_REF || TREE_CODE (outer) == ARRAY_REF)
707 if (TREE_CODE (outer) == COMPONENT_REF)
709 addr = fold_build_pointer_plus (addr,
710 component_ref_field_offset (outer));
711 outer = TREE_OPERAND (outer, 0);
713 else if (TREE_CODE (outer) == ARRAY_REF)
715 tree indx = fold_convert(size_type_node, TREE_OPERAND(outer, 1));
716 tree offset = size_binop (MULT_EXPR,
717 array_ref_element_size (outer), indx);
718 addr = fold_build_pointer_plus (addr, offset);
719 outer = TREE_OPERAND (outer, 0);
723 return addr;
726 /* Mark BOUNDS associated with PTR as incomplete. */
727 static void
728 chkp_register_incomplete_bounds (tree bounds, tree ptr)
730 chkp_incomplete_bounds_map->put (bounds, ptr);
732 if (dump_file && (dump_flags & TDF_DETAILS))
734 fprintf (dump_file, "Regsitered incomplete bounds ");
735 print_generic_expr (dump_file, bounds);
736 fprintf (dump_file, " for ");
737 print_generic_expr (dump_file, ptr);
738 fprintf (dump_file, "\n");
742 /* Return 1 if BOUNDS are incomplete and 0 otherwise. */
743 static bool
744 chkp_incomplete_bounds (tree bounds)
746 if (bounds == incomplete_bounds)
747 return true;
749 if (chkp_completed_bounds (bounds))
750 return false;
752 return chkp_incomplete_bounds_map->get (bounds) != NULL;
755 /* Clear incomleted bound marks. */
756 static void
757 chkp_erase_incomplete_bounds (void)
759 delete chkp_incomplete_bounds_map;
760 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
763 /* Build and return bndmk call which creates bounds for structure
764 pointed by PTR. Structure should have complete type. */
765 tree
766 chkp_make_bounds_for_struct_addr (tree ptr)
768 tree type = TREE_TYPE (ptr);
769 tree size;
771 gcc_assert (POINTER_TYPE_P (type));
773 size = TYPE_SIZE (TREE_TYPE (type));
775 gcc_assert (size);
777 return build_call_nary (pointer_bounds_type_node,
778 build_fold_addr_expr (chkp_bndmk_fndecl),
779 2, ptr, size);
782 /* Traversal function for chkp_may_finish_incomplete_bounds.
783 Set RES to 0 if at least one argument of phi statement
784 defining bounds (passed in KEY arg) is unknown.
785 Traversal stops when first unknown phi argument is found. */
786 bool
787 chkp_may_complete_phi_bounds (tree const &bounds, tree *slot ATTRIBUTE_UNUSED,
788 bool *res)
790 gimple *phi;
791 unsigned i;
793 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
795 phi = SSA_NAME_DEF_STMT (bounds);
797 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
799 for (i = 0; i < gimple_phi_num_args (phi); i++)
801 tree phi_arg = gimple_phi_arg_def (phi, i);
802 if (!phi_arg)
804 *res = false;
805 /* Do not need to traverse further. */
806 return false;
810 return true;
813 /* Return 1 if all phi nodes created for bounds have their
814 arguments computed. */
815 static bool
816 chkp_may_finish_incomplete_bounds (void)
818 bool res = true;
820 chkp_incomplete_bounds_map
821 ->traverse<bool *, chkp_may_complete_phi_bounds> (&res);
823 return res;
826 /* Helper function for chkp_finish_incomplete_bounds.
827 Recompute args for bounds phi node. */
828 bool
829 chkp_recompute_phi_bounds (tree const &bounds, tree *slot,
830 void *res ATTRIBUTE_UNUSED)
832 tree ptr = *slot;
833 gphi *bounds_phi;
834 gphi *ptr_phi;
835 unsigned i;
837 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
838 gcc_assert (TREE_CODE (ptr) == SSA_NAME);
840 bounds_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (bounds));
841 ptr_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (ptr));
843 for (i = 0; i < gimple_phi_num_args (bounds_phi); i++)
845 tree ptr_arg = gimple_phi_arg_def (ptr_phi, i);
846 tree bound_arg = chkp_find_bounds (ptr_arg, NULL);
848 add_phi_arg (bounds_phi, bound_arg,
849 gimple_phi_arg_edge (ptr_phi, i),
850 UNKNOWN_LOCATION);
853 return true;
856 /* Mark BOUNDS as invalid. */
857 static void
858 chkp_mark_invalid_bounds (tree bounds)
860 chkp_invalid_bounds->add (bounds);
862 if (dump_file && (dump_flags & TDF_DETAILS))
864 fprintf (dump_file, "Marked bounds ");
865 print_generic_expr (dump_file, bounds);
866 fprintf (dump_file, " as invalid\n");
870 /* Return 1 if BOUNDS were marked as invalid and 0 otherwise. */
871 static bool
872 chkp_valid_bounds (tree bounds)
874 if (bounds == zero_bounds || bounds == none_bounds)
875 return false;
877 return !chkp_invalid_bounds->contains (bounds);
880 /* Helper function for chkp_finish_incomplete_bounds.
881 Check all arguments of phi nodes trying to find
882 valid completed bounds. If there is at least one
883 such arg then bounds produced by phi node are marked
884 as valid completed bounds and all phi args are
885 recomputed. */
886 bool
887 chkp_find_valid_phi_bounds (tree const &bounds, tree *slot, bool *res)
889 gimple *phi;
890 unsigned i;
892 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
894 if (chkp_completed_bounds (bounds))
895 return true;
897 phi = SSA_NAME_DEF_STMT (bounds);
899 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
901 for (i = 0; i < gimple_phi_num_args (phi); i++)
903 tree phi_arg = gimple_phi_arg_def (phi, i);
905 gcc_assert (phi_arg);
907 if (chkp_valid_bounds (phi_arg) && !chkp_incomplete_bounds (phi_arg))
909 *res = true;
910 chkp_mark_completed_bounds (bounds);
911 chkp_recompute_phi_bounds (bounds, slot, NULL);
912 return true;
916 return true;
919 /* Helper function for chkp_finish_incomplete_bounds.
920 Marks all incompleted bounds as invalid. */
921 bool
922 chkp_mark_invalid_bounds_walker (tree const &bounds,
923 tree *slot ATTRIBUTE_UNUSED,
924 void *res ATTRIBUTE_UNUSED)
926 if (!chkp_completed_bounds (bounds))
928 chkp_mark_invalid_bounds (bounds);
929 chkp_mark_completed_bounds (bounds);
931 return true;
934 /* When all bound phi nodes have all their args computed
935 we have enough info to find valid bounds. We iterate
936 through all incompleted bounds searching for valid
937 bounds. Found valid bounds are marked as completed
938 and all remaining incompleted bounds are recomputed.
939 Process continues until no new valid bounds may be
940 found. All remained incompleted bounds are marked as
941 invalid (i.e. have no valid source of bounds). */
942 static void
943 chkp_finish_incomplete_bounds (void)
945 bool found_valid = true;
947 while (found_valid)
949 found_valid = false;
951 chkp_incomplete_bounds_map->
952 traverse<bool *, chkp_find_valid_phi_bounds> (&found_valid);
954 if (found_valid)
955 chkp_incomplete_bounds_map->
956 traverse<void *, chkp_recompute_phi_bounds> (NULL);
959 chkp_incomplete_bounds_map->
960 traverse<void *, chkp_mark_invalid_bounds_walker> (NULL);
961 chkp_incomplete_bounds_map->
962 traverse<void *, chkp_recompute_phi_bounds> (NULL);
964 chkp_erase_completed_bounds ();
965 chkp_erase_incomplete_bounds ();
968 /* Return 1 if type TYPE is a pointer type or a
969 structure having a pointer type as one of its fields.
970 Otherwise return 0. */
971 bool
972 chkp_type_has_pointer (const_tree type)
974 bool res = false;
976 if (BOUNDED_TYPE_P (type))
977 res = true;
978 else if (RECORD_OR_UNION_TYPE_P (type))
980 tree field;
982 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
983 if (TREE_CODE (field) == FIELD_DECL)
984 res = res || chkp_type_has_pointer (TREE_TYPE (field));
986 else if (TREE_CODE (type) == ARRAY_TYPE)
987 res = chkp_type_has_pointer (TREE_TYPE (type));
989 return res;
992 unsigned
993 chkp_type_bounds_count (const_tree type)
995 unsigned res = 0;
997 if (!type)
998 res = 0;
999 else if (BOUNDED_TYPE_P (type))
1000 res = 1;
1001 else if (RECORD_OR_UNION_TYPE_P (type))
1003 bitmap have_bound;
1005 bitmap_obstack_initialize (NULL);
1006 have_bound = BITMAP_ALLOC (NULL);
1007 chkp_find_bound_slots (type, have_bound);
1008 res = bitmap_count_bits (have_bound);
1009 BITMAP_FREE (have_bound);
1010 bitmap_obstack_release (NULL);
1013 return res;
1016 /* Get bounds associated with NODE via
1017 chkp_set_bounds call. */
1018 tree
1019 chkp_get_bounds (tree node)
1021 tree *slot;
1023 if (!chkp_bounds_map)
1024 return NULL_TREE;
1026 slot = chkp_bounds_map->get (node);
1027 return slot ? *slot : NULL_TREE;
1030 /* Associate bounds VAL with NODE. */
1031 void
1032 chkp_set_bounds (tree node, tree val)
1034 if (!chkp_bounds_map)
1035 chkp_bounds_map = new hash_map<tree, tree>;
1037 chkp_bounds_map->put (node, val);
1040 /* Check if statically initialized variable VAR require
1041 static bounds initialization. If VAR is added into
1042 bounds initlization list then 1 is returned. Otherwise
1043 return 0. */
1044 extern bool
1045 chkp_register_var_initializer (tree var)
1047 if (!flag_check_pointer_bounds
1048 || DECL_INITIAL (var) == error_mark_node)
1049 return false;
1051 gcc_assert (VAR_P (var));
1052 gcc_assert (DECL_INITIAL (var));
1054 if (TREE_STATIC (var)
1055 && chkp_type_has_pointer (TREE_TYPE (var)))
1057 varpool_node::get_create (var)->need_bounds_init = 1;
1058 return true;
1061 return false;
1064 /* Helper function for chkp_finish_file.
1066 Add new modification statement (RHS is assigned to LHS)
1067 into list of static initializer statementes (passed in ARG).
1068 If statements list becomes too big, emit checker constructor
1069 and start the new one. */
1070 static void
1071 chkp_add_modification_to_stmt_list (tree lhs,
1072 tree rhs,
1073 void *arg)
1075 struct chkp_ctor_stmt_list *stmts = (struct chkp_ctor_stmt_list *)arg;
1076 tree modify;
1078 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
1079 rhs = build1 (CONVERT_EXPR, TREE_TYPE (lhs), rhs);
1081 modify = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
1082 append_to_statement_list (modify, &stmts->stmts);
1084 stmts->avail--;
1087 /* Build and return ADDR_EXPR for specified object OBJ. */
1088 static tree
1089 chkp_build_addr_expr (tree obj)
1091 /* We first check whether it is a "hard reg case". */
1092 tree base = get_base_address (obj);
1093 if (VAR_P (base) && DECL_HARD_REGISTER (base))
1094 return chkp_get_hard_register_fake_addr_expr (obj);
1096 /* If not - return regular ADDR_EXPR. */
1097 return TREE_CODE (obj) == TARGET_MEM_REF
1098 ? tree_mem_ref_addr (ptr_type_node, obj)
1099 : build_fold_addr_expr (obj);
1102 /* Helper function for chkp_finish_file.
1103 Initialize bound variable BND_VAR with bounds of variable
1104 VAR to statements list STMTS. If statements list becomes
1105 too big, emit checker constructor and start the new one. */
1106 static void
1107 chkp_output_static_bounds (tree bnd_var, tree var,
1108 struct chkp_ctor_stmt_list *stmts)
1110 tree lb, ub, size;
1112 if (TREE_CODE (var) == STRING_CST)
1114 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1115 size = build_int_cst (size_type_node, TREE_STRING_LENGTH (var) - 1);
1117 else if (DECL_SIZE (var)
1118 && !chkp_variable_size_type (TREE_TYPE (var)))
1120 /* Compute bounds using statically known size. */
1121 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1122 size = size_binop (MINUS_EXPR, DECL_SIZE_UNIT (var), size_one_node);
1124 else
1126 /* Compute bounds using dynamic size. */
1127 tree call;
1129 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1130 call = build1 (ADDR_EXPR,
1131 build_pointer_type (TREE_TYPE (chkp_sizeof_fndecl)),
1132 chkp_sizeof_fndecl);
1133 size = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_sizeof_fndecl)),
1134 call, 1, var);
1136 if (flag_chkp_zero_dynamic_size_as_infinite)
1138 tree max_size, cond;
1140 max_size = build2 (MINUS_EXPR, size_type_node, size_zero_node, lb);
1141 cond = build2 (NE_EXPR, boolean_type_node, size, size_zero_node);
1142 size = build3 (COND_EXPR, size_type_node, cond, size, max_size);
1145 size = size_binop (MINUS_EXPR, size, size_one_node);
1148 ub = size_binop (PLUS_EXPR, lb, size);
1149 stmts->avail -= targetm.chkp_initialize_bounds (bnd_var, lb, ub,
1150 &stmts->stmts);
1151 if (stmts->avail <= 0)
1153 cgraph_build_static_cdtor ('B', stmts->stmts,
1154 MAX_RESERVED_INIT_PRIORITY + 2);
1155 stmts->avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
1156 stmts->stmts = NULL;
1160 /* Return entry block to be used for checker initilization code.
1161 Create new block if required. */
1162 static basic_block
1163 chkp_get_entry_block (void)
1165 if (!entry_block)
1166 entry_block
1167 = split_block_after_labels (ENTRY_BLOCK_PTR_FOR_FN (cfun))->dest;
1169 return entry_block;
1172 /* Return a bounds var to be used for pointer var PTR_VAR. */
1173 static tree
1174 chkp_get_bounds_var (tree ptr_var)
1176 tree bnd_var;
1177 tree *slot;
1179 slot = chkp_bound_vars->get (ptr_var);
1180 if (slot)
1181 bnd_var = *slot;
1182 else
1184 bnd_var = create_tmp_reg (pointer_bounds_type_node,
1185 CHKP_BOUND_TMP_NAME);
1186 chkp_bound_vars->put (ptr_var, bnd_var);
1189 return bnd_var;
1192 /* If BND is an abnormal bounds copy, return a copied value.
1193 Otherwise return BND. */
1194 static tree
1195 chkp_get_orginal_bounds_for_abnormal_copy (tree bnd)
1197 if (bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
1199 gimple *bnd_def = SSA_NAME_DEF_STMT (bnd);
1200 gcc_checking_assert (gimple_code (bnd_def) == GIMPLE_ASSIGN);
1201 bnd = gimple_assign_rhs1 (bnd_def);
1204 return bnd;
1207 /* Register bounds BND for object PTR in global bounds table.
1208 A copy of bounds may be created for abnormal ssa names.
1209 Returns bounds to use for PTR. */
1210 static tree
1211 chkp_maybe_copy_and_register_bounds (tree ptr, tree bnd)
1213 bool abnormal_ptr;
1215 if (!chkp_reg_bounds)
1216 return bnd;
1218 /* Do nothing if bounds are incomplete_bounds
1219 because it means bounds will be recomputed. */
1220 if (bnd == incomplete_bounds)
1221 return bnd;
1223 abnormal_ptr = (TREE_CODE (ptr) == SSA_NAME
1224 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1225 && gimple_code (SSA_NAME_DEF_STMT (ptr)) != GIMPLE_PHI);
1227 /* A single bounds value may be reused multiple times for
1228 different pointer values. It may cause coalescing issues
1229 for abnormal SSA names. To avoid it we create a bounds
1230 copy in case it is computed for abnormal SSA name.
1232 We also cannot reuse such created copies for other pointers */
1233 if (abnormal_ptr
1234 || bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
1236 tree bnd_var = NULL_TREE;
1238 if (abnormal_ptr)
1240 if (SSA_NAME_VAR (ptr))
1241 bnd_var = chkp_get_bounds_var (SSA_NAME_VAR (ptr));
1243 else
1244 bnd_var = chkp_get_tmp_var ();
1246 /* For abnormal copies we may just find original
1247 bounds and use them. */
1248 if (!abnormal_ptr && !SSA_NAME_IS_DEFAULT_DEF (bnd))
1249 bnd = chkp_get_orginal_bounds_for_abnormal_copy (bnd);
1250 /* For undefined values we usually use none bounds
1251 value but in case of abnormal edge it may cause
1252 coalescing failures. Use default definition of
1253 bounds variable instead to avoid it. */
1254 else if (SSA_NAME_IS_DEFAULT_DEF (ptr)
1255 && TREE_CODE (SSA_NAME_VAR (ptr)) != PARM_DECL)
1257 bnd = get_or_create_ssa_default_def (cfun, bnd_var);
1259 if (dump_file && (dump_flags & TDF_DETAILS))
1261 fprintf (dump_file, "Using default def bounds ");
1262 print_generic_expr (dump_file, bnd);
1263 fprintf (dump_file, " for abnormal default def SSA name ");
1264 print_generic_expr (dump_file, ptr);
1265 fprintf (dump_file, "\n");
1268 else
1270 tree copy;
1271 gimple *def = SSA_NAME_DEF_STMT (ptr);
1272 gimple *assign;
1273 gimple_stmt_iterator gsi;
1275 if (bnd_var)
1276 copy = make_ssa_name (bnd_var);
1277 else
1278 copy = make_temp_ssa_name (pointer_bounds_type_node,
1279 NULL,
1280 CHKP_BOUND_TMP_NAME);
1281 bnd = chkp_get_orginal_bounds_for_abnormal_copy (bnd);
1282 assign = gimple_build_assign (copy, bnd);
1284 if (dump_file && (dump_flags & TDF_DETAILS))
1286 fprintf (dump_file, "Creating a copy of bounds ");
1287 print_generic_expr (dump_file, bnd);
1288 fprintf (dump_file, " for abnormal SSA name ");
1289 print_generic_expr (dump_file, ptr);
1290 fprintf (dump_file, "\n");
1293 if (gimple_code (def) == GIMPLE_NOP)
1295 gsi = gsi_last_bb (chkp_get_entry_block ());
1296 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
1297 gsi_insert_before (&gsi, assign, GSI_CONTINUE_LINKING);
1298 else
1299 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1301 else
1303 gimple *bnd_def = SSA_NAME_DEF_STMT (bnd);
1304 /* Sometimes (e.g. when we load a pointer from a
1305 memory) bounds are produced later than a pointer.
1306 We need to insert bounds copy appropriately. */
1307 if (gimple_code (bnd_def) != GIMPLE_NOP
1308 && stmt_dominates_stmt_p (def, bnd_def))
1309 gsi = gsi_for_stmt (bnd_def);
1310 else
1311 gsi = gsi_for_stmt (def);
1312 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1315 bnd = copy;
1318 if (abnormal_ptr)
1319 bitmap_set_bit (chkp_abnormal_copies, SSA_NAME_VERSION (bnd));
1322 chkp_reg_bounds->put (ptr, bnd);
1324 if (dump_file && (dump_flags & TDF_DETAILS))
1326 fprintf (dump_file, "Regsitered bound ");
1327 print_generic_expr (dump_file, bnd);
1328 fprintf (dump_file, " for pointer ");
1329 print_generic_expr (dump_file, ptr);
1330 fprintf (dump_file, "\n");
1333 return bnd;
1336 /* Get bounds registered for object PTR in global bounds table. */
1337 static tree
1338 chkp_get_registered_bounds (tree ptr)
1340 tree *slot;
1342 if (!chkp_reg_bounds)
1343 return NULL_TREE;
1345 slot = chkp_reg_bounds->get (ptr);
1346 return slot ? *slot : NULL_TREE;
1349 /* Add bound retvals to return statement pointed by GSI. */
1351 static void
1352 chkp_add_bounds_to_ret_stmt (gimple_stmt_iterator *gsi)
1354 greturn *ret = as_a <greturn *> (gsi_stmt (*gsi));
1355 tree retval = gimple_return_retval (ret);
1356 tree ret_decl = DECL_RESULT (cfun->decl);
1357 tree bounds;
1359 if (!retval)
1360 return;
1362 if (BOUNDED_P (ret_decl))
1364 bounds = chkp_find_bounds (retval, gsi);
1365 bounds = chkp_maybe_copy_and_register_bounds (ret_decl, bounds);
1366 gimple_return_set_retbnd (ret, bounds);
1369 update_stmt (ret);
1372 /* Force OP to be suitable for using as an argument for call.
1373 New statements (if any) go to SEQ. */
1374 static tree
1375 chkp_force_gimple_call_op (tree op, gimple_seq *seq)
1377 gimple_seq stmts;
1378 gimple_stmt_iterator si;
1380 op = force_gimple_operand (unshare_expr (op), &stmts, true, NULL_TREE);
1382 for (si = gsi_start (stmts); !gsi_end_p (si); gsi_next (&si))
1383 chkp_mark_stmt (gsi_stmt (si));
1385 gimple_seq_add_seq (seq, stmts);
1387 return op;
1390 /* Generate lower bound check for memory access by ADDR.
1391 Check is inserted before the position pointed by ITER.
1392 DIRFLAG indicates whether memory access is load or store. */
1393 static void
1394 chkp_check_lower (tree addr, tree bounds,
1395 gimple_stmt_iterator iter,
1396 location_t location,
1397 tree dirflag)
1399 gimple_seq seq;
1400 gimple *check;
1401 tree node;
1403 if (!chkp_function_instrumented_p (current_function_decl)
1404 && bounds == chkp_get_zero_bounds ())
1405 return;
1407 if (dirflag == integer_zero_node
1408 && !flag_chkp_check_read)
1409 return;
1411 if (dirflag == integer_one_node
1412 && !flag_chkp_check_write)
1413 return;
1415 seq = NULL;
1417 node = chkp_force_gimple_call_op (addr, &seq);
1419 check = gimple_build_call (chkp_checkl_fndecl, 2, node, bounds);
1420 chkp_mark_stmt (check);
1421 gimple_call_set_with_bounds (check, true);
1422 gimple_set_location (check, location);
1423 gimple_seq_add_stmt (&seq, check);
1425 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1427 if (dump_file && (dump_flags & TDF_DETAILS))
1429 gimple *before = gsi_stmt (iter);
1430 fprintf (dump_file, "Generated lower bound check for statement ");
1431 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1432 fprintf (dump_file, " ");
1433 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1437 /* Generate upper bound check for memory access by ADDR.
1438 Check is inserted before the position pointed by ITER.
1439 DIRFLAG indicates whether memory access is load or store. */
1440 static void
1441 chkp_check_upper (tree addr, tree bounds,
1442 gimple_stmt_iterator iter,
1443 location_t location,
1444 tree dirflag)
1446 gimple_seq seq;
1447 gimple *check;
1448 tree node;
1450 if (!chkp_function_instrumented_p (current_function_decl)
1451 && bounds == chkp_get_zero_bounds ())
1452 return;
1454 if (dirflag == integer_zero_node
1455 && !flag_chkp_check_read)
1456 return;
1458 if (dirflag == integer_one_node
1459 && !flag_chkp_check_write)
1460 return;
1462 seq = NULL;
1464 node = chkp_force_gimple_call_op (addr, &seq);
1466 check = gimple_build_call (chkp_checku_fndecl, 2, node, bounds);
1467 chkp_mark_stmt (check);
1468 gimple_call_set_with_bounds (check, true);
1469 gimple_set_location (check, location);
1470 gimple_seq_add_stmt (&seq, check);
1472 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1474 if (dump_file && (dump_flags & TDF_DETAILS))
1476 gimple *before = gsi_stmt (iter);
1477 fprintf (dump_file, "Generated upper bound check for statement ");
1478 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1479 fprintf (dump_file, " ");
1480 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1484 /* Generate lower and upper bound checks for memory access
1485 to memory slot [FIRST, LAST] againsr BOUNDS. Checks
1486 are inserted before the position pointed by ITER.
1487 DIRFLAG indicates whether memory access is load or store. */
1488 void
1489 chkp_check_mem_access (tree first, tree last, tree bounds,
1490 gimple_stmt_iterator iter,
1491 location_t location,
1492 tree dirflag)
1494 chkp_check_lower (first, bounds, iter, location, dirflag);
1495 chkp_check_upper (last, bounds, iter, location, dirflag);
1498 /* Replace call to _bnd_chk_* pointed by GSI with
1499 bndcu and bndcl calls. DIRFLAG determines whether
1500 check is for read or write. */
1502 void
1503 chkp_replace_address_check_builtin (gimple_stmt_iterator *gsi,
1504 tree dirflag)
1506 gimple_stmt_iterator call_iter = *gsi;
1507 gimple *call = gsi_stmt (*gsi);
1508 tree fndecl = gimple_call_fndecl (call);
1509 tree addr = gimple_call_arg (call, 0);
1510 tree bounds = chkp_find_bounds (addr, gsi);
1512 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1513 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1514 chkp_check_lower (addr, bounds, *gsi, gimple_location (call), dirflag);
1516 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS)
1517 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1519 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1521 tree size = gimple_call_arg (call, 1);
1522 addr = fold_build_pointer_plus (addr, size);
1523 addr = fold_build_pointer_plus_hwi (addr, -1);
1524 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1527 gsi_remove (&call_iter, true);
1530 /* Replace call to _bnd_get_ptr_* pointed by GSI with
1531 corresponding bounds extract call. */
1533 void
1534 chkp_replace_extract_builtin (gimple_stmt_iterator *gsi)
1536 gimple *call = gsi_stmt (*gsi);
1537 tree fndecl = gimple_call_fndecl (call);
1538 tree addr = gimple_call_arg (call, 0);
1539 tree bounds = chkp_find_bounds (addr, gsi);
1540 gimple *extract;
1542 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND)
1543 fndecl = chkp_extract_lower_fndecl;
1544 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND)
1545 fndecl = chkp_extract_upper_fndecl;
1546 else
1547 gcc_unreachable ();
1549 extract = gimple_build_call (fndecl, 1, bounds);
1550 gimple_call_set_lhs (extract, gimple_call_lhs (call));
1551 chkp_mark_stmt (extract);
1553 gsi_replace (gsi, extract, false);
1556 /* Return COMPONENT_REF accessing FIELD in OBJ. */
1557 static tree
1558 chkp_build_component_ref (tree obj, tree field)
1560 tree res;
1562 /* If object is TMR then we do not use component_ref but
1563 add offset instead. We need it to be able to get addr
1564 of the reasult later. */
1565 if (TREE_CODE (obj) == TARGET_MEM_REF)
1567 tree offs = TMR_OFFSET (obj);
1568 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1569 offs, DECL_FIELD_OFFSET (field));
1571 gcc_assert (offs);
1573 res = copy_node (obj);
1574 TREE_TYPE (res) = TREE_TYPE (field);
1575 TMR_OFFSET (res) = offs;
1577 else
1578 res = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL_TREE);
1580 return res;
1583 /* Return ARRAY_REF for array ARR and index IDX with
1584 specified element type ETYPE and element size ESIZE. */
1585 static tree
1586 chkp_build_array_ref (tree arr, tree etype, tree esize,
1587 unsigned HOST_WIDE_INT idx)
1589 tree index = build_int_cst (size_type_node, idx);
1590 tree res;
1592 /* If object is TMR then we do not use array_ref but
1593 add offset instead. We need it to be able to get addr
1594 of the reasult later. */
1595 if (TREE_CODE (arr) == TARGET_MEM_REF)
1597 tree offs = TMR_OFFSET (arr);
1599 esize = fold_binary_to_constant (MULT_EXPR, TREE_TYPE (esize),
1600 esize, index);
1601 gcc_assert(esize);
1603 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1604 offs, esize);
1605 gcc_assert (offs);
1607 res = copy_node (arr);
1608 TREE_TYPE (res) = etype;
1609 TMR_OFFSET (res) = offs;
1611 else
1612 res = build4 (ARRAY_REF, etype, arr, index, NULL_TREE, NULL_TREE);
1614 return res;
1617 /* Helper function for chkp_add_bounds_to_call_stmt.
1618 Fill ALL_BOUNDS output array with created bounds.
1620 OFFS is used for recursive calls and holds basic
1621 offset of TYPE in outer structure in bits.
1623 ITER points a position where bounds are searched.
1625 ALL_BOUNDS[i] is filled with elem bounds if there
1626 is a field in TYPE which has pointer type and offset
1627 equal to i * POINTER_SIZE in bits. */
1628 static void
1629 chkp_find_bounds_for_elem (tree elem, tree *all_bounds,
1630 HOST_WIDE_INT offs,
1631 gimple_stmt_iterator *iter)
1633 tree type = TREE_TYPE (elem);
1635 if (BOUNDED_TYPE_P (type))
1637 if (!all_bounds[offs / POINTER_SIZE])
1639 tree temp = make_temp_ssa_name (type, NULL, "");
1640 gimple *assign = gimple_build_assign (temp, elem);
1641 gimple_stmt_iterator gsi;
1643 gsi_insert_before (iter, assign, GSI_SAME_STMT);
1644 gsi = gsi_for_stmt (assign);
1646 all_bounds[offs / POINTER_SIZE] = chkp_find_bounds (temp, &gsi);
1649 else if (RECORD_OR_UNION_TYPE_P (type))
1651 tree field;
1653 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1654 if (TREE_CODE (field) == FIELD_DECL)
1656 tree base = unshare_expr (elem);
1657 tree field_ref = chkp_build_component_ref (base, field);
1658 HOST_WIDE_INT field_offs
1659 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1660 if (DECL_FIELD_OFFSET (field))
1661 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1663 chkp_find_bounds_for_elem (field_ref, all_bounds,
1664 offs + field_offs, iter);
1667 else if (TREE_CODE (type) == ARRAY_TYPE)
1669 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1670 tree etype = TREE_TYPE (type);
1671 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1672 unsigned HOST_WIDE_INT cur;
1674 if (!maxval || integer_minus_onep (maxval))
1675 return;
1677 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1679 tree base = unshare_expr (elem);
1680 tree arr_elem = chkp_build_array_ref (base, etype,
1681 TYPE_SIZE (etype),
1682 cur);
1683 chkp_find_bounds_for_elem (arr_elem, all_bounds, offs + cur * esize,
1684 iter);
1689 /* Fill HAVE_BOUND output bitmap with information about
1690 bounds requred for object of type TYPE.
1692 OFFS is used for recursive calls and holds basic
1693 offset of TYPE in outer structure in bits.
1695 HAVE_BOUND[i] is set to 1 if there is a field
1696 in TYPE which has pointer type and offset
1697 equal to i * POINTER_SIZE - OFFS in bits. */
1698 void
1699 chkp_find_bound_slots_1 (const_tree type, bitmap have_bound,
1700 HOST_WIDE_INT offs)
1702 if (BOUNDED_TYPE_P (type))
1703 bitmap_set_bit (have_bound, offs / POINTER_SIZE);
1704 else if (RECORD_OR_UNION_TYPE_P (type))
1706 tree field;
1708 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1709 if (TREE_CODE (field) == FIELD_DECL)
1711 HOST_WIDE_INT field_offs = 0;
1712 if (DECL_FIELD_BIT_OFFSET (field))
1713 field_offs += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1714 if (DECL_FIELD_OFFSET (field))
1715 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1716 chkp_find_bound_slots_1 (TREE_TYPE (field), have_bound,
1717 offs + field_offs);
1720 else if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
1722 /* The object type is an array of complete type, i.e., other
1723 than a flexible array. */
1724 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1725 tree etype = TREE_TYPE (type);
1726 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1727 unsigned HOST_WIDE_INT cur;
1729 if (!maxval
1730 || TREE_CODE (maxval) != INTEGER_CST
1731 || integer_minus_onep (maxval))
1732 return;
1734 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1735 chkp_find_bound_slots_1 (etype, have_bound, offs + cur * esize);
1739 /* Fill bitmap RES with information about bounds for
1740 type TYPE. See chkp_find_bound_slots_1 for more
1741 details. */
1742 void
1743 chkp_find_bound_slots (const_tree type, bitmap res)
1745 bitmap_clear (res);
1746 chkp_find_bound_slots_1 (type, res, 0);
1749 /* Return 1 if call to FNDECL should be instrumented
1750 and 0 otherwise. */
1752 static bool
1753 chkp_instrument_normal_builtin (tree fndecl)
1755 switch (DECL_FUNCTION_CODE (fndecl))
1757 case BUILT_IN_STRLEN:
1758 case BUILT_IN_STRCPY:
1759 case BUILT_IN_STRNCPY:
1760 case BUILT_IN_STPCPY:
1761 case BUILT_IN_STPNCPY:
1762 case BUILT_IN_STRCAT:
1763 case BUILT_IN_STRNCAT:
1764 case BUILT_IN_MEMCPY:
1765 case BUILT_IN_MEMPCPY:
1766 case BUILT_IN_MEMSET:
1767 case BUILT_IN_MEMMOVE:
1768 case BUILT_IN_BZERO:
1769 case BUILT_IN_STRCMP:
1770 case BUILT_IN_STRNCMP:
1771 case BUILT_IN_BCMP:
1772 case BUILT_IN_MEMCMP:
1773 case BUILT_IN_MEMCPY_CHK:
1774 case BUILT_IN_MEMPCPY_CHK:
1775 case BUILT_IN_MEMMOVE_CHK:
1776 case BUILT_IN_MEMSET_CHK:
1777 case BUILT_IN_STRCPY_CHK:
1778 case BUILT_IN_STRNCPY_CHK:
1779 case BUILT_IN_STPCPY_CHK:
1780 case BUILT_IN_STPNCPY_CHK:
1781 case BUILT_IN_STRCAT_CHK:
1782 case BUILT_IN_STRNCAT_CHK:
1783 case BUILT_IN_MALLOC:
1784 case BUILT_IN_CALLOC:
1785 case BUILT_IN_REALLOC:
1786 return 1;
1788 default:
1789 return 0;
1793 /* Add bound arguments to call statement pointed by GSI.
1794 Also performs a replacement of user checker builtins calls
1795 with internal ones. */
1797 static void
1798 chkp_add_bounds_to_call_stmt (gimple_stmt_iterator *gsi)
1800 gcall *call = as_a <gcall *> (gsi_stmt (*gsi));
1801 unsigned arg_no = 0;
1802 tree fndecl = gimple_call_fndecl (call);
1803 tree fntype;
1804 tree first_formal_arg;
1805 tree arg;
1806 bool use_fntype = false;
1807 tree op;
1808 ssa_op_iter iter;
1809 gcall *new_call;
1811 /* Do nothing for internal functions. */
1812 if (gimple_call_internal_p (call))
1813 return;
1815 fntype = TREE_TYPE (TREE_TYPE (gimple_call_fn (call)));
1817 /* Do nothing if back-end builtin is called. */
1818 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
1819 return;
1821 /* Do nothing for some middle-end builtins. */
1822 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1823 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_OBJECT_SIZE)
1824 return;
1826 /* Do nothing for calls to not instrumentable functions. */
1827 if (fndecl && !chkp_instrumentable_p (fndecl))
1828 return;
1830 /* Ignore CHKP_INIT_PTR_BOUNDS, CHKP_NULL_PTR_BOUNDS
1831 and CHKP_COPY_PTR_BOUNDS. */
1832 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1833 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS
1834 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS
1835 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS
1836 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS))
1837 return;
1839 /* Check user builtins are replaced with checks. */
1840 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1841 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1842 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
1843 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS))
1845 chkp_replace_address_check_builtin (gsi, integer_minus_one_node);
1846 return;
1849 /* Check user builtins are replaced with bound extract. */
1850 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1851 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND
1852 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND))
1854 chkp_replace_extract_builtin (gsi);
1855 return;
1858 /* BUILT_IN_CHKP_NARROW_PTR_BOUNDS call is replaced with
1859 target narrow bounds call. */
1860 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1861 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
1863 tree arg = gimple_call_arg (call, 1);
1864 tree bounds = chkp_find_bounds (arg, gsi);
1866 gimple_call_set_fndecl (call, chkp_narrow_bounds_fndecl);
1867 gimple_call_set_arg (call, 1, bounds);
1868 update_stmt (call);
1870 return;
1873 /* BUILT_IN_CHKP_STORE_PTR_BOUNDS call is replaced with
1874 bndstx call. */
1875 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1876 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_STORE_PTR_BOUNDS)
1878 tree addr = gimple_call_arg (call, 0);
1879 tree ptr = gimple_call_arg (call, 1);
1880 tree bounds = chkp_find_bounds (ptr, gsi);
1881 gimple_stmt_iterator iter = gsi_for_stmt (call);
1883 chkp_build_bndstx (addr, ptr, bounds, gsi);
1884 gsi_remove (&iter, true);
1886 return;
1889 if (!flag_chkp_instrument_calls)
1890 return;
1892 /* We instrument only some subset of builtins. We also instrument
1893 builtin calls to be inlined. */
1894 if (fndecl
1895 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1896 && !chkp_instrument_normal_builtin (fndecl))
1898 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
1899 return;
1901 struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
1902 if (!clone
1903 || !gimple_has_body_p (clone->decl))
1904 return;
1907 /* If function decl is available then use it for
1908 formal arguments list. Otherwise use function type. */
1909 if (fndecl
1910 && DECL_ARGUMENTS (fndecl)
1911 && gimple_call_fntype (call) == TREE_TYPE (fndecl))
1912 first_formal_arg = DECL_ARGUMENTS (fndecl);
1913 else
1915 first_formal_arg = TYPE_ARG_TYPES (fntype);
1916 use_fntype = true;
1919 /* Fill vector of new call args. */
1920 vec<tree> new_args = vNULL;
1921 new_args.create (gimple_call_num_args (call));
1922 arg = first_formal_arg;
1923 for (arg_no = 0; arg_no < gimple_call_num_args (call); arg_no++)
1925 tree call_arg = gimple_call_arg (call, arg_no);
1926 tree type;
1928 /* Get arg type using formal argument description
1929 or actual argument type. */
1930 if (arg)
1931 if (use_fntype)
1932 if (TREE_VALUE (arg) != void_type_node)
1934 type = TREE_VALUE (arg);
1935 arg = TREE_CHAIN (arg);
1937 else
1938 type = TREE_TYPE (call_arg);
1939 else
1941 type = TREE_TYPE (arg);
1942 arg = TREE_CHAIN (arg);
1944 else
1945 type = TREE_TYPE (call_arg);
1947 new_args.safe_push (call_arg);
1949 if (BOUNDED_TYPE_P (type)
1950 || pass_by_reference (NULL, TYPE_MODE (type), type, true))
1951 new_args.safe_push (chkp_find_bounds (call_arg, gsi));
1952 else if (chkp_type_has_pointer (type))
1954 HOST_WIDE_INT max_bounds
1955 = TREE_INT_CST_LOW (TYPE_SIZE (type)) / POINTER_SIZE;
1956 tree *all_bounds = (tree *)xmalloc (sizeof (tree) * max_bounds);
1957 HOST_WIDE_INT bnd_no;
1959 memset (all_bounds, 0, sizeof (tree) * max_bounds);
1961 chkp_find_bounds_for_elem (call_arg, all_bounds, 0, gsi);
1963 for (bnd_no = 0; bnd_no < max_bounds; bnd_no++)
1964 if (all_bounds[bnd_no])
1965 new_args.safe_push (all_bounds[bnd_no]);
1967 free (all_bounds);
1971 if (new_args.length () == gimple_call_num_args (call))
1972 new_call = call;
1973 else
1975 new_call = gimple_build_call_vec (gimple_op (call, 1), new_args);
1976 gimple_call_set_lhs (new_call, gimple_call_lhs (call));
1977 gimple_call_copy_flags (new_call, call);
1978 gimple_call_set_chain (new_call, gimple_call_chain (call));
1980 new_args.release ();
1982 /* For direct calls fndecl is replaced with instrumented version. */
1983 if (fndecl)
1985 tree new_decl = chkp_maybe_create_clone (fndecl)->decl;
1986 gimple_call_set_fndecl (new_call, new_decl);
1987 /* In case of a type cast we should modify used function
1988 type instead of using type of new fndecl. */
1989 if (gimple_call_fntype (call) != TREE_TYPE (fndecl))
1991 tree type = gimple_call_fntype (call);
1992 type = chkp_copy_function_type_adding_bounds (type);
1993 gimple_call_set_fntype (new_call, type);
1995 else
1996 gimple_call_set_fntype (new_call, TREE_TYPE (new_decl));
1998 /* For indirect call we should fix function pointer type if
1999 pass some bounds. */
2000 else if (new_call != call)
2002 tree type = gimple_call_fntype (call);
2003 type = chkp_copy_function_type_adding_bounds (type);
2004 gimple_call_set_fntype (new_call, type);
2007 /* replace old call statement with the new one. */
2008 if (call != new_call)
2010 FOR_EACH_SSA_TREE_OPERAND (op, call, iter, SSA_OP_ALL_DEFS)
2012 SSA_NAME_DEF_STMT (op) = new_call;
2014 gsi_replace (gsi, new_call, true);
2016 else
2017 update_stmt (new_call);
2019 gimple_call_set_with_bounds (new_call, true);
2022 /* Return constant static bounds var with specified bounds LB and UB.
2023 If such var does not exists then new var is created with specified NAME. */
2024 static tree
2025 chkp_make_static_const_bounds (HOST_WIDE_INT lb,
2026 HOST_WIDE_INT ub,
2027 const char *name)
2029 tree id = get_identifier (name);
2030 tree var;
2031 varpool_node *node;
2032 symtab_node *snode;
2034 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, id,
2035 pointer_bounds_type_node);
2036 TREE_STATIC (var) = 1;
2037 TREE_PUBLIC (var) = 1;
2039 /* With LTO we may have constant bounds already in varpool.
2040 Try to find it. */
2041 if ((snode = symtab_node::get_for_asmname (DECL_ASSEMBLER_NAME (var))))
2043 /* We don't allow this symbol usage for non bounds. */
2044 if (snode->type != SYMTAB_VARIABLE
2045 || !POINTER_BOUNDS_P (snode->decl))
2046 sorry ("-fcheck-pointer-bounds requires %qs "
2047 "name for internal usage",
2048 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (var)));
2050 return snode->decl;
2053 TREE_USED (var) = 1;
2054 TREE_READONLY (var) = 1;
2055 TREE_ADDRESSABLE (var) = 0;
2056 DECL_ARTIFICIAL (var) = 1;
2057 DECL_READ_P (var) = 1;
2058 DECL_INITIAL (var) = targetm.chkp_make_bounds_constant (lb, ub);
2059 make_decl_one_only (var, DECL_ASSEMBLER_NAME (var));
2060 /* We may use this symbol during ctors generation in chkp_finish_file
2061 when all symbols are emitted. Force output to avoid undefined
2062 symbols in ctors. */
2063 node = varpool_node::get_create (var);
2064 node->force_output = 1;
2066 varpool_node::finalize_decl (var);
2068 return var;
2071 /* Generate code to make bounds with specified lower bound LB and SIZE.
2072 if AFTER is 1 then code is inserted after position pointed by ITER
2073 otherwise code is inserted before position pointed by ITER.
2074 If ITER is NULL then code is added to entry block. */
2075 static tree
2076 chkp_make_bounds (tree lb, tree size, gimple_stmt_iterator *iter, bool after)
2078 gimple_seq seq;
2079 gimple_stmt_iterator gsi;
2080 gimple *stmt;
2081 tree bounds;
2083 if (iter)
2084 gsi = *iter;
2085 else
2086 gsi = gsi_start_bb (chkp_get_entry_block ());
2088 seq = NULL;
2090 lb = chkp_force_gimple_call_op (lb, &seq);
2091 size = chkp_force_gimple_call_op (size, &seq);
2093 stmt = gimple_build_call (chkp_bndmk_fndecl, 2, lb, size);
2094 chkp_mark_stmt (stmt);
2096 bounds = chkp_get_tmp_reg (stmt);
2097 gimple_call_set_lhs (stmt, bounds);
2099 gimple_seq_add_stmt (&seq, stmt);
2101 if (iter && after)
2102 gsi_insert_seq_after (&gsi, seq, GSI_SAME_STMT);
2103 else
2104 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
2106 if (dump_file && (dump_flags & TDF_DETAILS))
2108 fprintf (dump_file, "Made bounds: ");
2109 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2110 if (iter)
2112 fprintf (dump_file, " inserted before statement: ");
2113 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0, TDF_VOPS|TDF_MEMSYMS);
2115 else
2116 fprintf (dump_file, " at function entry\n");
2119 /* update_stmt (stmt); */
2121 return bounds;
2124 /* Return var holding zero bounds. */
2125 tree
2126 chkp_get_zero_bounds_var (void)
2128 if (!chkp_zero_bounds_var)
2129 chkp_zero_bounds_var
2130 = chkp_make_static_const_bounds (0, -1,
2131 CHKP_ZERO_BOUNDS_VAR_NAME);
2132 return chkp_zero_bounds_var;
2135 /* Return var holding none bounds. */
2136 tree
2137 chkp_get_none_bounds_var (void)
2139 if (!chkp_none_bounds_var)
2140 chkp_none_bounds_var
2141 = chkp_make_static_const_bounds (-1, 0,
2142 CHKP_NONE_BOUNDS_VAR_NAME);
2143 return chkp_none_bounds_var;
2146 /* Return SSA_NAME used to represent zero bounds. */
2147 static tree
2148 chkp_get_zero_bounds (void)
2150 if (zero_bounds)
2151 return zero_bounds;
2153 if (dump_file && (dump_flags & TDF_DETAILS))
2154 fprintf (dump_file, "Creating zero bounds...");
2156 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2157 || flag_chkp_use_static_const_bounds > 0)
2159 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2160 gimple *stmt;
2162 zero_bounds = chkp_get_tmp_reg (NULL);
2163 stmt = gimple_build_assign (zero_bounds, chkp_get_zero_bounds_var ());
2164 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2166 else
2167 zero_bounds = chkp_make_bounds (integer_zero_node,
2168 integer_zero_node,
2169 NULL,
2170 false);
2172 return zero_bounds;
2175 /* Return SSA_NAME used to represent none bounds. */
2176 static tree
2177 chkp_get_none_bounds (void)
2179 if (none_bounds)
2180 return none_bounds;
2182 if (dump_file && (dump_flags & TDF_DETAILS))
2183 fprintf (dump_file, "Creating none bounds...");
2186 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2187 || flag_chkp_use_static_const_bounds > 0)
2189 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2190 gimple *stmt;
2192 none_bounds = chkp_get_tmp_reg (NULL);
2193 stmt = gimple_build_assign (none_bounds, chkp_get_none_bounds_var ());
2194 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2196 else
2197 none_bounds = chkp_make_bounds (integer_minus_one_node,
2198 build_int_cst (size_type_node, 2),
2199 NULL,
2200 false);
2202 return none_bounds;
2205 /* Return bounds to be used as a result of operation which
2206 should not create poiunter (e.g. MULT_EXPR). */
2207 static tree
2208 chkp_get_invalid_op_bounds (void)
2210 return chkp_get_zero_bounds ();
2213 /* Return bounds to be used for loads of non-pointer values. */
2214 static tree
2215 chkp_get_nonpointer_load_bounds (void)
2217 return chkp_get_zero_bounds ();
2220 /* Return 1 if may use bndret call to get bounds for pointer
2221 returned by CALL. */
2222 static bool
2223 chkp_call_returns_bounds_p (gcall *call)
2225 if (gimple_call_internal_p (call))
2227 if (gimple_call_internal_fn (call) == IFN_VA_ARG)
2228 return true;
2229 return false;
2232 if (gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
2233 || chkp_gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW))
2234 return true;
2236 if (gimple_call_with_bounds_p (call))
2237 return true;
2239 tree fndecl = gimple_call_fndecl (call);
2241 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
2242 return false;
2244 if (fndecl && !chkp_instrumentable_p (fndecl))
2245 return false;
2247 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2249 if (chkp_instrument_normal_builtin (fndecl))
2250 return true;
2252 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
2253 return false;
2255 struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
2256 return (clone && gimple_has_body_p (clone->decl));
2259 return true;
2262 /* Build bounds returned by CALL. */
2263 static tree
2264 chkp_build_returned_bound (gcall *call)
2266 gimple_stmt_iterator gsi;
2267 tree bounds;
2268 gimple *stmt;
2269 tree fndecl = gimple_call_fndecl (call);
2270 unsigned int retflags;
2271 tree lhs = gimple_call_lhs (call);
2273 /* To avoid fixing alloca expands in targets we handle
2274 it separately. */
2275 if (fndecl
2276 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2277 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
2278 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2280 tree size = gimple_call_arg (call, 0);
2281 gimple_stmt_iterator iter = gsi_for_stmt (call);
2282 bounds = chkp_make_bounds (lhs, size, &iter, true);
2284 /* We know bounds returned by set_bounds builtin call. */
2285 else if (fndecl
2286 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2287 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS)
2289 tree lb = gimple_call_arg (call, 0);
2290 tree size = gimple_call_arg (call, 1);
2291 gimple_stmt_iterator iter = gsi_for_stmt (call);
2292 bounds = chkp_make_bounds (lb, size, &iter, true);
2294 /* Detect bounds initialization calls. */
2295 else if (fndecl
2296 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2297 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS)
2298 bounds = chkp_get_zero_bounds ();
2299 /* Detect bounds nullification calls. */
2300 else if (fndecl
2301 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2302 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS)
2303 bounds = chkp_get_none_bounds ();
2304 /* Detect bounds copy calls. */
2305 else if (fndecl
2306 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2307 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
2309 gimple_stmt_iterator iter = gsi_for_stmt (call);
2310 bounds = chkp_find_bounds (gimple_call_arg (call, 1), &iter);
2312 /* Do not use retbnd when returned bounds are equal to some
2313 of passed bounds. */
2314 else if (((retflags = gimple_call_return_flags (call)) & ERF_RETURNS_ARG)
2315 && (retflags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (call))
2317 gimple_stmt_iterator iter = gsi_for_stmt (call);
2318 unsigned int retarg = retflags & ERF_RETURN_ARG_MASK, argno;
2319 if (gimple_call_with_bounds_p (call))
2321 for (argno = 0; argno < gimple_call_num_args (call); argno++)
2322 if (!POINTER_BOUNDS_P (gimple_call_arg (call, argno)))
2324 if (retarg)
2325 retarg--;
2326 else
2327 break;
2330 else
2331 argno = retarg;
2333 bounds = chkp_find_bounds (gimple_call_arg (call, argno), &iter);
2335 else if (chkp_call_returns_bounds_p (call)
2336 && BOUNDED_P (lhs))
2338 gcc_assert (TREE_CODE (lhs) == SSA_NAME);
2340 /* In general case build checker builtin call to
2341 obtain returned bounds. */
2342 stmt = gimple_build_call (chkp_ret_bnd_fndecl, 1,
2343 gimple_call_lhs (call));
2344 chkp_mark_stmt (stmt);
2346 gsi = gsi_for_stmt (call);
2347 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
2349 bounds = chkp_get_tmp_reg (stmt);
2350 gimple_call_set_lhs (stmt, bounds);
2352 update_stmt (stmt);
2354 else
2355 bounds = chkp_get_zero_bounds ();
2357 if (dump_file && (dump_flags & TDF_DETAILS))
2359 fprintf (dump_file, "Built returned bounds (");
2360 print_generic_expr (dump_file, bounds);
2361 fprintf (dump_file, ") for call: ");
2362 print_gimple_stmt (dump_file, call, 0, TDF_VOPS | TDF_MEMSYMS);
2365 bounds = chkp_maybe_copy_and_register_bounds (lhs, bounds);
2367 return bounds;
2370 /* Return bounds used as returned by call
2371 which produced SSA name VAL. */
2372 gcall *
2373 chkp_retbnd_call_by_val (tree val)
2375 if (TREE_CODE (val) != SSA_NAME)
2376 return NULL;
2378 gcc_assert (gimple_code (SSA_NAME_DEF_STMT (val)) == GIMPLE_CALL);
2380 imm_use_iterator use_iter;
2381 use_operand_p use_p;
2382 FOR_EACH_IMM_USE_FAST (use_p, use_iter, val)
2383 if (chkp_gimple_call_builtin_p (USE_STMT (use_p), BUILT_IN_CHKP_BNDRET))
2384 return as_a <gcall *> (USE_STMT (use_p));
2386 return NULL;
2389 /* Check the next parameter for the given PARM is bounds
2390 and return it's default SSA_NAME (create if required). */
2391 static tree
2392 chkp_get_next_bounds_parm (tree parm)
2394 tree bounds = TREE_CHAIN (parm);
2395 gcc_assert (POINTER_BOUNDS_P (bounds));
2396 bounds = ssa_default_def (cfun, bounds);
2397 if (!bounds)
2399 bounds = make_ssa_name (TREE_CHAIN (parm), gimple_build_nop ());
2400 set_ssa_default_def (cfun, TREE_CHAIN (parm), bounds);
2402 return bounds;
2405 /* Return bounds to be used for input argument PARM. */
2406 static tree
2407 chkp_get_bound_for_parm (tree parm)
2409 tree decl = SSA_NAME_VAR (parm);
2410 tree bounds;
2412 gcc_assert (TREE_CODE (decl) == PARM_DECL);
2414 bounds = chkp_get_registered_bounds (parm);
2416 if (!bounds)
2417 bounds = chkp_get_registered_bounds (decl);
2419 if (!bounds)
2421 tree orig_decl = cgraph_node::get (cfun->decl)->orig_decl;
2423 /* For static chain param we return zero bounds
2424 because currently we do not check dereferences
2425 of this pointer. */
2426 if (cfun->static_chain_decl == decl)
2427 bounds = chkp_get_zero_bounds ();
2428 /* If non instrumented runtime is used then it may be useful
2429 to use zero bounds for input arguments of main
2430 function. */
2431 else if (flag_chkp_zero_input_bounds_for_main
2432 && id_equal (DECL_ASSEMBLER_NAME (orig_decl), "main"))
2433 bounds = chkp_get_zero_bounds ();
2434 else if (BOUNDED_P (parm))
2436 bounds = chkp_get_next_bounds_parm (decl);
2437 bounds = chkp_maybe_copy_and_register_bounds (decl, bounds);
2439 if (dump_file && (dump_flags & TDF_DETAILS))
2441 fprintf (dump_file, "Built arg bounds (");
2442 print_generic_expr (dump_file, bounds);
2443 fprintf (dump_file, ") for arg: ");
2444 print_node (dump_file, "", decl, 0);
2447 else
2448 bounds = chkp_get_zero_bounds ();
2451 if (!chkp_get_registered_bounds (parm))
2452 bounds = chkp_maybe_copy_and_register_bounds (parm, bounds);
2454 if (dump_file && (dump_flags & TDF_DETAILS))
2456 fprintf (dump_file, "Using bounds ");
2457 print_generic_expr (dump_file, bounds);
2458 fprintf (dump_file, " for parm ");
2459 print_generic_expr (dump_file, parm);
2460 fprintf (dump_file, " of type ");
2461 print_generic_expr (dump_file, TREE_TYPE (parm));
2462 fprintf (dump_file, ".\n");
2465 return bounds;
2468 /* Build and return CALL_EXPR for bndstx builtin with specified
2469 arguments. */
2470 tree
2471 chkp_build_bndldx_call (tree addr, tree ptr)
2473 tree fn = build1 (ADDR_EXPR,
2474 build_pointer_type (TREE_TYPE (chkp_bndldx_fndecl)),
2475 chkp_bndldx_fndecl);
2476 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndldx_fndecl)),
2477 fn, 2, addr, ptr);
2478 CALL_WITH_BOUNDS_P (call) = true;
2479 return call;
2482 /* Insert code to load bounds for PTR located by ADDR.
2483 Code is inserted after position pointed by GSI.
2484 Loaded bounds are returned. */
2485 static tree
2486 chkp_build_bndldx (tree addr, tree ptr, gimple_stmt_iterator *gsi)
2488 gimple_seq seq;
2489 gimple *stmt;
2490 tree bounds;
2492 seq = NULL;
2494 addr = chkp_force_gimple_call_op (addr, &seq);
2495 ptr = chkp_force_gimple_call_op (ptr, &seq);
2497 stmt = gimple_build_call (chkp_bndldx_fndecl, 2, addr, ptr);
2498 chkp_mark_stmt (stmt);
2499 bounds = chkp_get_tmp_reg (stmt);
2500 gimple_call_set_lhs (stmt, bounds);
2502 gimple_seq_add_stmt (&seq, stmt);
2504 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2506 if (dump_file && (dump_flags & TDF_DETAILS))
2508 fprintf (dump_file, "Generated bndldx for pointer ");
2509 print_generic_expr (dump_file, ptr);
2510 fprintf (dump_file, ": ");
2511 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS | TDF_MEMSYMS);
2514 return bounds;
2517 /* Build and return CALL_EXPR for bndstx builtin with specified
2518 arguments. */
2519 tree
2520 chkp_build_bndstx_call (tree addr, tree ptr, tree bounds)
2522 tree fn = build1 (ADDR_EXPR,
2523 build_pointer_type (TREE_TYPE (chkp_bndstx_fndecl)),
2524 chkp_bndstx_fndecl);
2525 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndstx_fndecl)),
2526 fn, 3, ptr, bounds, addr);
2527 CALL_WITH_BOUNDS_P (call) = true;
2528 return call;
2531 /* Insert code to store BOUNDS for PTR stored by ADDR.
2532 New statements are inserted after position pointed
2533 by GSI. */
2534 void
2535 chkp_build_bndstx (tree addr, tree ptr, tree bounds,
2536 gimple_stmt_iterator *gsi)
2538 gimple_seq seq;
2539 gimple *stmt;
2541 seq = NULL;
2543 addr = chkp_force_gimple_call_op (addr, &seq);
2544 ptr = chkp_force_gimple_call_op (ptr, &seq);
2546 stmt = gimple_build_call (chkp_bndstx_fndecl, 3, ptr, bounds, addr);
2547 chkp_mark_stmt (stmt);
2548 gimple_call_set_with_bounds (stmt, true);
2550 gimple_seq_add_stmt (&seq, stmt);
2552 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2554 if (dump_file && (dump_flags & TDF_DETAILS))
2556 fprintf (dump_file, "Generated bndstx for pointer store ");
2557 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_VOPS|TDF_MEMSYMS);
2558 print_gimple_stmt (dump_file, stmt, 2, TDF_VOPS|TDF_MEMSYMS);
2562 /* This function is called when call statement
2563 is inlined and therefore we can't use bndret
2564 for its LHS anymore. Function fixes bndret
2565 call using new RHS value if possible. */
2566 void
2567 chkp_fixup_inlined_call (tree lhs, tree rhs)
2569 tree addr, bounds;
2570 gcall *retbnd, *bndldx;
2572 if (!BOUNDED_P (lhs))
2573 return;
2575 /* Search for retbnd call. */
2576 retbnd = chkp_retbnd_call_by_val (lhs);
2577 if (!retbnd)
2578 return;
2580 /* Currently only handle cases when call is replaced
2581 with a memory access. In this case bndret call
2582 may be replaced with bndldx call. Otherwise we
2583 have to search for bounds which may cause wrong
2584 result due to various optimizations applied. */
2585 switch (TREE_CODE (rhs))
2587 case VAR_DECL:
2588 if (DECL_REGISTER (rhs))
2589 return;
2590 break;
2592 case MEM_REF:
2593 break;
2595 case ARRAY_REF:
2596 case COMPONENT_REF:
2597 addr = get_base_address (rhs);
2598 if (!DECL_P (addr)
2599 && TREE_CODE (addr) != MEM_REF)
2600 return;
2601 if (DECL_P (addr) && DECL_REGISTER (addr))
2602 return;
2603 break;
2605 default:
2606 return;
2609 /* Create a new statements sequence with bndldx call. */
2610 gimple_stmt_iterator gsi = gsi_for_stmt (retbnd);
2611 addr = build_fold_addr_expr (rhs);
2612 chkp_build_bndldx (addr, lhs, &gsi);
2613 bndldx = as_a <gcall *> (gsi_stmt (gsi));
2615 /* Remove bndret call. */
2616 bounds = gimple_call_lhs (retbnd);
2617 gsi = gsi_for_stmt (retbnd);
2618 gsi_remove (&gsi, true);
2620 /* Link new bndldx call. */
2621 gimple_call_set_lhs (bndldx, bounds);
2622 update_stmt (bndldx);
2625 /* Compute bounds for pointer NODE which was assigned in
2626 assignment statement ASSIGN. Return computed bounds. */
2627 static tree
2628 chkp_compute_bounds_for_assignment (tree node, gimple *assign)
2630 enum tree_code rhs_code = gimple_assign_rhs_code (assign);
2631 tree rhs1 = gimple_assign_rhs1 (assign);
2632 tree bounds = NULL_TREE;
2633 gimple_stmt_iterator iter = gsi_for_stmt (assign);
2634 tree base = NULL;
2636 if (dump_file && (dump_flags & TDF_DETAILS))
2638 fprintf (dump_file, "Computing bounds for assignment: ");
2639 print_gimple_stmt (dump_file, assign, 0, TDF_VOPS|TDF_MEMSYMS);
2642 switch (rhs_code)
2644 case MEM_REF:
2645 case TARGET_MEM_REF:
2646 case COMPONENT_REF:
2647 case ARRAY_REF:
2648 /* We need to load bounds from the bounds table. */
2649 bounds = chkp_find_bounds_loaded (node, rhs1, &iter);
2650 break;
2652 case VAR_DECL:
2653 case SSA_NAME:
2654 case ADDR_EXPR:
2655 case POINTER_PLUS_EXPR:
2656 case NOP_EXPR:
2657 case CONVERT_EXPR:
2658 case INTEGER_CST:
2659 /* Bounds are just propagated from RHS. */
2660 bounds = chkp_find_bounds (rhs1, &iter);
2661 base = rhs1;
2662 break;
2664 case VIEW_CONVERT_EXPR:
2665 /* Bounds are just propagated from RHS. */
2666 bounds = chkp_find_bounds (TREE_OPERAND (rhs1, 0), &iter);
2667 break;
2669 case PARM_DECL:
2670 if (BOUNDED_P (rhs1))
2672 /* We need to load bounds from the bounds table. */
2673 bounds = chkp_build_bndldx (chkp_build_addr_expr (rhs1),
2674 node, &iter);
2675 TREE_ADDRESSABLE (rhs1) = 1;
2677 else
2678 bounds = chkp_get_nonpointer_load_bounds ();
2679 break;
2681 case MINUS_EXPR:
2682 case PLUS_EXPR:
2683 case BIT_AND_EXPR:
2684 case BIT_IOR_EXPR:
2685 case BIT_XOR_EXPR:
2687 tree rhs2 = gimple_assign_rhs2 (assign);
2688 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2689 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2691 /* First we try to check types of operands. If it
2692 does not help then look at bound values.
2694 If some bounds are incomplete and other are
2695 not proven to be valid (i.e. also incomplete
2696 or invalid because value is not pointer) then
2697 resulting value is incomplete and will be
2698 recomputed later in chkp_finish_incomplete_bounds. */
2699 if (BOUNDED_P (rhs1)
2700 && !BOUNDED_P (rhs2))
2701 bounds = bnd1;
2702 else if (BOUNDED_P (rhs2)
2703 && !BOUNDED_P (rhs1)
2704 && rhs_code != MINUS_EXPR)
2705 bounds = bnd2;
2706 else if (chkp_incomplete_bounds (bnd1))
2707 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR
2708 && !chkp_incomplete_bounds (bnd2))
2709 bounds = bnd2;
2710 else
2711 bounds = incomplete_bounds;
2712 else if (chkp_incomplete_bounds (bnd2))
2713 if (chkp_valid_bounds (bnd1)
2714 && !chkp_incomplete_bounds (bnd1))
2715 bounds = bnd1;
2716 else
2717 bounds = incomplete_bounds;
2718 else if (!chkp_valid_bounds (bnd1))
2719 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR)
2720 bounds = bnd2;
2721 else if (bnd2 == chkp_get_zero_bounds ())
2722 bounds = bnd2;
2723 else
2724 bounds = bnd1;
2725 else if (!chkp_valid_bounds (bnd2))
2726 bounds = bnd1;
2727 else
2728 /* Seems both operands may have valid bounds
2729 (e.g. pointer minus pointer). In such case
2730 use default invalid op bounds. */
2731 bounds = chkp_get_invalid_op_bounds ();
2733 base = (bounds == bnd1) ? rhs1 : (bounds == bnd2) ? rhs2 : NULL;
2735 break;
2737 case BIT_NOT_EXPR:
2738 case NEGATE_EXPR:
2739 case LSHIFT_EXPR:
2740 case RSHIFT_EXPR:
2741 case LROTATE_EXPR:
2742 case RROTATE_EXPR:
2743 case EQ_EXPR:
2744 case NE_EXPR:
2745 case LT_EXPR:
2746 case LE_EXPR:
2747 case GT_EXPR:
2748 case GE_EXPR:
2749 case MULT_EXPR:
2750 case RDIV_EXPR:
2751 case TRUNC_DIV_EXPR:
2752 case FLOOR_DIV_EXPR:
2753 case CEIL_DIV_EXPR:
2754 case ROUND_DIV_EXPR:
2755 case TRUNC_MOD_EXPR:
2756 case FLOOR_MOD_EXPR:
2757 case CEIL_MOD_EXPR:
2758 case ROUND_MOD_EXPR:
2759 case EXACT_DIV_EXPR:
2760 case FIX_TRUNC_EXPR:
2761 case FLOAT_EXPR:
2762 case REALPART_EXPR:
2763 case IMAGPART_EXPR:
2764 /* No valid bounds may be produced by these exprs. */
2765 bounds = chkp_get_invalid_op_bounds ();
2766 break;
2768 case COND_EXPR:
2770 tree val1 = gimple_assign_rhs2 (assign);
2771 tree val2 = gimple_assign_rhs3 (assign);
2772 tree bnd1 = chkp_find_bounds (val1, &iter);
2773 tree bnd2 = chkp_find_bounds (val2, &iter);
2774 gimple *stmt;
2776 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2777 bounds = incomplete_bounds;
2778 else if (bnd1 == bnd2)
2779 bounds = bnd1;
2780 else
2782 rhs1 = unshare_expr (rhs1);
2784 bounds = chkp_get_tmp_reg (assign);
2785 stmt = gimple_build_assign (bounds, COND_EXPR, rhs1, bnd1, bnd2);
2786 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2788 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2789 chkp_mark_invalid_bounds (bounds);
2792 break;
2794 case MAX_EXPR:
2795 case MIN_EXPR:
2797 tree rhs2 = gimple_assign_rhs2 (assign);
2798 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2799 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2801 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2802 bounds = incomplete_bounds;
2803 else if (bnd1 == bnd2)
2804 bounds = bnd1;
2805 else
2807 gimple *stmt;
2808 tree cond = build2 (rhs_code == MAX_EXPR ? GT_EXPR : LT_EXPR,
2809 boolean_type_node, rhs1, rhs2);
2810 bounds = chkp_get_tmp_reg (assign);
2811 stmt = gimple_build_assign (bounds, COND_EXPR, cond, bnd1, bnd2);
2813 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2815 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2816 chkp_mark_invalid_bounds (bounds);
2819 break;
2821 default:
2822 bounds = chkp_get_zero_bounds ();
2823 warning (0, "pointer bounds were lost due to unexpected expression %s",
2824 get_tree_code_name (rhs_code));
2827 gcc_assert (bounds);
2829 /* We may reuse bounds of other pointer we copy/modify. But it is not
2830 allowed for abnormal ssa names. If we produced a pointer using
2831 abnormal ssa name, we better make a bounds copy to avoid coalescing
2832 issues. */
2833 if (base
2834 && TREE_CODE (base) == SSA_NAME
2835 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (base))
2837 gimple *stmt = gimple_build_assign (chkp_get_tmp_reg (NULL), bounds);
2838 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2839 bounds = gimple_assign_lhs (stmt);
2842 if (node)
2843 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2845 return bounds;
2848 /* Compute bounds for ssa name NODE defined by DEF_STMT pointed by ITER.
2850 There are just few statement codes allowed: NOP (for default ssa names),
2851 ASSIGN, CALL, PHI, ASM.
2853 Return computed bounds. */
2854 static tree
2855 chkp_get_bounds_by_definition (tree node, gimple *def_stmt,
2856 gphi_iterator *iter)
2858 tree var, bounds;
2859 enum gimple_code code = gimple_code (def_stmt);
2860 gphi *stmt;
2862 if (dump_file && (dump_flags & TDF_DETAILS))
2864 fprintf (dump_file, "Searching for bounds for node: ");
2865 print_generic_expr (dump_file, node);
2867 fprintf (dump_file, " using its definition: ");
2868 print_gimple_stmt (dump_file, def_stmt, 0, TDF_VOPS | TDF_MEMSYMS);
2871 switch (code)
2873 case GIMPLE_NOP:
2874 var = SSA_NAME_VAR (node);
2875 switch (TREE_CODE (var))
2877 case PARM_DECL:
2878 bounds = chkp_get_bound_for_parm (node);
2879 break;
2881 case VAR_DECL:
2882 /* For uninitialized pointers use none bounds. */
2883 bounds = chkp_get_none_bounds ();
2884 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2885 break;
2887 case RESULT_DECL:
2889 tree base_type;
2891 gcc_assert (TREE_CODE (TREE_TYPE (node)) == REFERENCE_TYPE);
2893 base_type = TREE_TYPE (TREE_TYPE (node));
2895 gcc_assert (TYPE_SIZE (base_type)
2896 && TREE_CODE (TYPE_SIZE (base_type)) == INTEGER_CST
2897 && tree_to_uhwi (TYPE_SIZE (base_type)) != 0);
2899 bounds = chkp_make_bounds (node, TYPE_SIZE_UNIT (base_type),
2900 NULL, false);
2901 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2903 break;
2905 default:
2906 if (dump_file && (dump_flags & TDF_DETAILS))
2908 fprintf (dump_file, "Unexpected var with no definition\n");
2909 print_generic_expr (dump_file, var);
2911 internal_error ("chkp_get_bounds_by_definition: Unexpected var of type %s",
2912 get_tree_code_name (TREE_CODE (var)));
2914 break;
2916 case GIMPLE_ASSIGN:
2917 bounds = chkp_compute_bounds_for_assignment (node, def_stmt);
2918 break;
2920 case GIMPLE_CALL:
2921 bounds = chkp_build_returned_bound (as_a <gcall *> (def_stmt));
2922 break;
2924 case GIMPLE_PHI:
2925 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (node))
2926 if (SSA_NAME_VAR (node))
2927 var = chkp_get_bounds_var (SSA_NAME_VAR (node));
2928 else
2929 var = make_temp_ssa_name (pointer_bounds_type_node,
2930 NULL,
2931 CHKP_BOUND_TMP_NAME);
2932 else
2933 var = chkp_get_tmp_var ();
2934 stmt = create_phi_node (var, gimple_bb (def_stmt));
2935 bounds = gimple_phi_result (stmt);
2936 *iter = gsi_for_phi (stmt);
2938 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2940 /* Created bounds do not have all phi args computed and
2941 therefore we do not know if there is a valid source
2942 of bounds for that node. Therefore we mark bounds
2943 as incomplete and then recompute them when all phi
2944 args are computed. */
2945 chkp_register_incomplete_bounds (bounds, node);
2946 break;
2948 case GIMPLE_ASM:
2949 bounds = chkp_get_zero_bounds ();
2950 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2951 break;
2953 default:
2954 internal_error ("chkp_get_bounds_by_definition: Unexpected GIMPLE code %s",
2955 gimple_code_name[code]);
2958 return bounds;
2961 /* Return CALL_EXPR for bndmk with specified LOWER_BOUND and SIZE. */
2962 tree
2963 chkp_build_make_bounds_call (tree lower_bound, tree size)
2965 tree call = build1 (ADDR_EXPR,
2966 build_pointer_type (TREE_TYPE (chkp_bndmk_fndecl)),
2967 chkp_bndmk_fndecl);
2968 return build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndmk_fndecl)),
2969 call, 2, lower_bound, size);
2972 /* Create static bounds var of specfified OBJ which is
2973 is either VAR_DECL or string constant. */
2974 static tree
2975 chkp_make_static_bounds (tree obj)
2977 static int string_id = 1;
2978 static int var_id = 1;
2979 tree *slot;
2980 const char *var_name;
2981 char *bnd_var_name;
2982 tree bnd_var;
2984 /* First check if we already have required var. */
2985 if (chkp_static_var_bounds)
2987 /* For vars we use assembler name as a key in
2988 chkp_static_var_bounds map. It allows to
2989 avoid duplicating bound vars for decls
2990 sharing assembler name. */
2991 if (VAR_P (obj))
2993 tree name = DECL_ASSEMBLER_NAME (obj);
2994 slot = chkp_static_var_bounds->get (name);
2995 if (slot)
2996 return *slot;
2998 else
3000 slot = chkp_static_var_bounds->get (obj);
3001 if (slot)
3002 return *slot;
3006 /* Build decl for bounds var. */
3007 if (VAR_P (obj))
3009 if (DECL_IGNORED_P (obj))
3011 bnd_var_name = (char *) xmalloc (strlen (CHKP_VAR_BOUNDS_PREFIX) + 10);
3012 sprintf (bnd_var_name, "%s%d", CHKP_VAR_BOUNDS_PREFIX, var_id++);
3014 else
3016 var_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (obj));
3018 /* For hidden symbols we want to skip first '*' char. */
3019 if (*var_name == '*')
3020 var_name++;
3022 bnd_var_name = (char *) xmalloc (strlen (var_name)
3023 + strlen (CHKP_BOUNDS_OF_SYMBOL_PREFIX) + 1);
3024 strcpy (bnd_var_name, CHKP_BOUNDS_OF_SYMBOL_PREFIX);
3025 strcat (bnd_var_name, var_name);
3028 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
3029 get_identifier (bnd_var_name),
3030 pointer_bounds_type_node);
3032 /* Address of the obj will be used as lower bound. */
3033 TREE_ADDRESSABLE (obj) = 1;
3035 else
3037 bnd_var_name = (char *) xmalloc (strlen (CHKP_STRING_BOUNDS_PREFIX) + 10);
3038 sprintf (bnd_var_name, "%s%d", CHKP_STRING_BOUNDS_PREFIX, string_id++);
3040 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
3041 get_identifier (bnd_var_name),
3042 pointer_bounds_type_node);
3045 free (bnd_var_name);
3047 TREE_PUBLIC (bnd_var) = 0;
3048 TREE_USED (bnd_var) = 1;
3049 TREE_READONLY (bnd_var) = 0;
3050 TREE_STATIC (bnd_var) = 1;
3051 TREE_ADDRESSABLE (bnd_var) = 0;
3052 DECL_ARTIFICIAL (bnd_var) = 1;
3053 DECL_COMMON (bnd_var) = 1;
3054 DECL_COMDAT (bnd_var) = 1;
3055 DECL_READ_P (bnd_var) = 1;
3056 DECL_INITIAL (bnd_var) = chkp_build_addr_expr (obj);
3057 /* Force output similar to constant bounds.
3058 See chkp_make_static_const_bounds. */
3059 varpool_node::get_create (bnd_var)->force_output = 1;
3060 /* Mark symbol as requiring bounds initialization. */
3061 varpool_node::get_create (bnd_var)->need_bounds_init = 1;
3062 varpool_node::finalize_decl (bnd_var);
3064 /* Add created var to the map to use it for other references
3065 to obj. */
3066 if (!chkp_static_var_bounds)
3067 chkp_static_var_bounds = new hash_map<tree, tree>;
3069 if (VAR_P (obj))
3071 tree name = DECL_ASSEMBLER_NAME (obj);
3072 chkp_static_var_bounds->put (name, bnd_var);
3074 else
3075 chkp_static_var_bounds->put (obj, bnd_var);
3077 return bnd_var;
3080 /* When var has incomplete type we cannot get size to
3081 compute its bounds. In such cases we use checker
3082 builtin call which determines object size at runtime. */
3083 static tree
3084 chkp_generate_extern_var_bounds (tree var)
3086 tree bounds, size_reloc, lb, size, max_size, cond;
3087 gimple_stmt_iterator gsi;
3088 gimple_seq seq = NULL;
3089 gimple *stmt;
3091 /* If instrumentation is not enabled for vars having
3092 incomplete type then just return zero bounds to avoid
3093 checks for this var. */
3094 if (!flag_chkp_incomplete_type)
3095 return chkp_get_zero_bounds ();
3097 if (dump_file && (dump_flags & TDF_DETAILS))
3099 fprintf (dump_file, "Generating bounds for extern symbol '");
3100 print_generic_expr (dump_file, var);
3101 fprintf (dump_file, "'\n");
3104 stmt = gimple_build_call (chkp_sizeof_fndecl, 1, var);
3106 size_reloc = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
3107 gimple_call_set_lhs (stmt, size_reloc);
3109 gimple_seq_add_stmt (&seq, stmt);
3111 lb = chkp_build_addr_expr (var);
3112 size = make_ssa_name (chkp_get_size_tmp_var ());
3114 if (flag_chkp_zero_dynamic_size_as_infinite)
3116 /* We should check that size relocation was resolved.
3117 If it was not then use maximum possible size for the var. */
3118 max_size = build2 (MINUS_EXPR, chkp_uintptr_type, integer_zero_node,
3119 fold_convert (chkp_uintptr_type, lb));
3120 max_size = chkp_force_gimple_call_op (max_size, &seq);
3122 cond = build2 (NE_EXPR, boolean_type_node,
3123 size_reloc, integer_zero_node);
3124 stmt = gimple_build_assign (size, COND_EXPR, cond, size_reloc, max_size);
3125 gimple_seq_add_stmt (&seq, stmt);
3127 else
3129 stmt = gimple_build_assign (size, size_reloc);
3130 gimple_seq_add_stmt (&seq, stmt);
3133 gsi = gsi_start_bb (chkp_get_entry_block ());
3134 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
3136 bounds = chkp_make_bounds (lb, size, &gsi, true);
3138 return bounds;
3141 /* Return 1 if TYPE has fields with zero size or fields
3142 marked with chkp_variable_size attribute. */
3143 bool
3144 chkp_variable_size_type (tree type)
3146 bool res = false;
3147 tree field;
3149 if (RECORD_OR_UNION_TYPE_P (type))
3150 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3152 if (TREE_CODE (field) == FIELD_DECL)
3153 res = res
3154 || lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
3155 || chkp_variable_size_type (TREE_TYPE (field));
3157 else
3158 res = !TYPE_SIZE (type)
3159 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
3160 || tree_to_uhwi (TYPE_SIZE (type)) == 0;
3162 return res;
3165 /* Compute and return bounds for address of DECL which is
3166 one of VAR_DECL, PARM_DECL, RESULT_DECL. */
3167 static tree
3168 chkp_get_bounds_for_decl_addr (tree decl)
3170 tree bounds;
3172 gcc_assert (VAR_P (decl)
3173 || TREE_CODE (decl) == PARM_DECL
3174 || TREE_CODE (decl) == RESULT_DECL);
3176 bounds = chkp_get_registered_addr_bounds (decl);
3178 if (bounds)
3179 return bounds;
3181 if (dump_file && (dump_flags & TDF_DETAILS))
3183 fprintf (dump_file, "Building bounds for address of decl ");
3184 print_generic_expr (dump_file, decl);
3185 fprintf (dump_file, "\n");
3188 /* Use zero bounds if size is unknown and checks for
3189 unknown sizes are restricted. */
3190 if ((!DECL_SIZE (decl)
3191 || (chkp_variable_size_type (TREE_TYPE (decl))
3192 && (TREE_STATIC (decl)
3193 || DECL_EXTERNAL (decl)
3194 || TREE_PUBLIC (decl))))
3195 && !flag_chkp_incomplete_type)
3196 return chkp_get_zero_bounds ();
3198 if (flag_chkp_use_static_bounds
3199 && VAR_P (decl)
3200 && (TREE_STATIC (decl)
3201 || DECL_EXTERNAL (decl)
3202 || TREE_PUBLIC (decl))
3203 && !DECL_THREAD_LOCAL_P (decl))
3205 tree bnd_var = chkp_make_static_bounds (decl);
3206 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
3207 gimple *stmt;
3209 bounds = chkp_get_tmp_reg (NULL);
3210 stmt = gimple_build_assign (bounds, bnd_var);
3211 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3213 else if (!DECL_SIZE (decl)
3214 || (chkp_variable_size_type (TREE_TYPE (decl))
3215 && (TREE_STATIC (decl)
3216 || DECL_EXTERNAL (decl)
3217 || TREE_PUBLIC (decl))))
3219 gcc_assert (VAR_P (decl));
3220 bounds = chkp_generate_extern_var_bounds (decl);
3222 else
3224 tree lb = chkp_build_addr_expr (decl);
3225 bounds = chkp_make_bounds (lb, DECL_SIZE_UNIT (decl), NULL, false);
3228 return bounds;
3231 /* Compute and return bounds for constant string. */
3232 static tree
3233 chkp_get_bounds_for_string_cst (tree cst)
3235 tree bounds;
3236 tree lb;
3237 tree size;
3239 gcc_assert (TREE_CODE (cst) == STRING_CST);
3241 bounds = chkp_get_registered_bounds (cst);
3243 if (bounds)
3244 return bounds;
3246 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
3247 || flag_chkp_use_static_const_bounds > 0)
3249 tree bnd_var = chkp_make_static_bounds (cst);
3250 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
3251 gimple *stmt;
3253 bounds = chkp_get_tmp_reg (NULL);
3254 stmt = gimple_build_assign (bounds, bnd_var);
3255 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3257 else
3259 lb = chkp_build_addr_expr (cst);
3260 size = build_int_cst (chkp_uintptr_type, TREE_STRING_LENGTH (cst));
3261 bounds = chkp_make_bounds (lb, size, NULL, false);
3264 bounds = chkp_maybe_copy_and_register_bounds (cst, bounds);
3266 return bounds;
3269 /* Generate code to instersect bounds BOUNDS1 and BOUNDS2 and
3270 return the result. if ITER is not NULL then Code is inserted
3271 before position pointed by ITER. Otherwise code is added to
3272 entry block. */
3273 static tree
3274 chkp_intersect_bounds (tree bounds1, tree bounds2, gimple_stmt_iterator *iter)
3276 if (!bounds1 || bounds1 == chkp_get_zero_bounds ())
3277 return bounds2 ? bounds2 : bounds1;
3278 else if (!bounds2 || bounds2 == chkp_get_zero_bounds ())
3279 return bounds1;
3280 else
3282 gimple_seq seq;
3283 gimple *stmt;
3284 tree bounds;
3286 seq = NULL;
3288 stmt = gimple_build_call (chkp_intersect_fndecl, 2, bounds1, bounds2);
3289 chkp_mark_stmt (stmt);
3291 bounds = chkp_get_tmp_reg (stmt);
3292 gimple_call_set_lhs (stmt, bounds);
3294 gimple_seq_add_stmt (&seq, stmt);
3296 /* We are probably doing narrowing for constant expression.
3297 In such case iter may be undefined. */
3298 if (!iter)
3300 gimple_stmt_iterator gsi = gsi_last_bb (chkp_get_entry_block ());
3301 iter = &gsi;
3302 gsi_insert_seq_after (iter, seq, GSI_SAME_STMT);
3304 else
3305 gsi_insert_seq_before (iter, seq, GSI_SAME_STMT);
3307 if (dump_file && (dump_flags & TDF_DETAILS))
3309 fprintf (dump_file, "Bounds intersection: ");
3310 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
3311 fprintf (dump_file, " inserted before statement: ");
3312 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0,
3313 TDF_VOPS|TDF_MEMSYMS);
3316 return bounds;
3320 /* Return 1 if we are allowed to narrow bounds for addressed FIELD
3321 and 0 othersize. REF is reference to the field. */
3323 static bool
3324 chkp_may_narrow_to_field (tree ref, tree field)
3326 return DECL_SIZE (field) && TREE_CODE (DECL_SIZE (field)) == INTEGER_CST
3327 && tree_to_uhwi (DECL_SIZE (field)) != 0
3328 && !(flag_chkp_flexible_struct_trailing_arrays
3329 && array_at_struct_end_p (ref))
3330 && (!DECL_FIELD_OFFSET (field)
3331 || TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST)
3332 && (!DECL_FIELD_BIT_OFFSET (field)
3333 || TREE_CODE (DECL_FIELD_BIT_OFFSET (field)) == INTEGER_CST)
3334 && !lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
3335 && !chkp_variable_size_type (TREE_TYPE (field));
3338 /* Return 1 if bounds for FIELD should be narrowed to
3339 field's own size. REF is reference to the field. */
3341 static bool
3342 chkp_narrow_bounds_for_field (tree ref, tree field)
3344 HOST_WIDE_INT offs;
3345 HOST_WIDE_INT bit_offs;
3347 if (!chkp_may_narrow_to_field (ref, field))
3348 return false;
3350 /* Access to compiler generated fields should not cause
3351 bounds narrowing. */
3352 if (DECL_ARTIFICIAL (field))
3353 return false;
3355 offs = tree_to_uhwi (DECL_FIELD_OFFSET (field));
3356 bit_offs = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
3358 return (flag_chkp_narrow_bounds
3359 && (flag_chkp_first_field_has_own_bounds
3360 || offs
3361 || bit_offs));
3364 /* Perform narrowing for BOUNDS of an INNER reference. Shift boundary
3365 by OFFSET bytes and limit to SIZE bytes. Newly created statements are
3366 added to ITER. */
3368 static tree
3369 chkp_narrow_size_and_offset (tree bounds, tree inner, tree offset,
3370 tree size, gimple_stmt_iterator *iter)
3372 tree addr = chkp_build_addr_expr (unshare_expr (inner));
3373 tree t = TREE_TYPE (addr);
3375 gimple *stmt = gimple_build_assign (NULL_TREE, addr);
3376 addr = make_temp_ssa_name (t, stmt, CHKP_BOUND_TMP_NAME);
3377 gimple_assign_set_lhs (stmt, addr);
3378 gsi_insert_seq_before (iter, stmt, GSI_SAME_STMT);
3380 stmt = gimple_build_assign (NULL_TREE, POINTER_PLUS_EXPR, addr, offset);
3381 tree shifted = make_temp_ssa_name (t, stmt, CHKP_BOUND_TMP_NAME);
3382 gimple_assign_set_lhs (stmt, shifted);
3383 gsi_insert_seq_before (iter, stmt, GSI_SAME_STMT);
3385 tree bounds2 = chkp_make_bounds (shifted, size, iter, false);
3387 return chkp_intersect_bounds (bounds, bounds2, iter);
3390 /* Perform narrowing for BOUNDS using bounds computed for field
3391 access COMPONENT. ITER meaning is the same as for
3392 chkp_intersect_bounds. */
3394 static tree
3395 chkp_narrow_bounds_to_field (tree bounds, tree component,
3396 gimple_stmt_iterator *iter)
3398 tree field = TREE_OPERAND (component, 1);
3399 tree size = DECL_SIZE_UNIT (field);
3400 tree field_ptr = chkp_build_addr_expr (component);
3401 tree field_bounds;
3403 field_bounds = chkp_make_bounds (field_ptr, size, iter, false);
3405 return chkp_intersect_bounds (field_bounds, bounds, iter);
3408 /* Parse field or array access NODE.
3410 PTR ouput parameter holds a pointer to the outermost
3411 object.
3413 BITFIELD output parameter is set to 1 if bitfield is
3414 accessed and to 0 otherwise. If it is 1 then ELT holds
3415 outer component for accessed bit field.
3417 SAFE outer parameter is set to 1 if access is safe and
3418 checks are not required.
3420 BOUNDS outer parameter holds bounds to be used to check
3421 access (may be NULL).
3423 If INNERMOST_BOUNDS is 1 then try to narrow bounds to the
3424 innermost accessed component. */
3425 static void
3426 chkp_parse_array_and_component_ref (tree node, tree *ptr,
3427 tree *elt, bool *safe,
3428 bool *bitfield,
3429 tree *bounds,
3430 gimple_stmt_iterator *iter,
3431 bool innermost_bounds)
3433 tree comp_to_narrow = NULL_TREE;
3434 tree last_comp = NULL_TREE;
3435 bool array_ref_found = false;
3436 tree *nodes;
3437 tree var;
3438 int len;
3439 int i;
3441 /* Compute tree height for expression. */
3442 var = node;
3443 len = 1;
3444 while (TREE_CODE (var) == COMPONENT_REF
3445 || TREE_CODE (var) == ARRAY_REF
3446 || TREE_CODE (var) == VIEW_CONVERT_EXPR
3447 || TREE_CODE (var) == BIT_FIELD_REF)
3449 var = TREE_OPERAND (var, 0);
3450 len++;
3453 gcc_assert (len > 1);
3455 /* It is more convenient for us to scan left-to-right,
3456 so walk tree again and put all node to nodes vector
3457 in reversed order. */
3458 nodes = XALLOCAVEC (tree, len);
3459 nodes[len - 1] = node;
3460 for (i = len - 2; i >= 0; i--)
3461 nodes[i] = TREE_OPERAND (nodes[i + 1], 0);
3463 if (bounds)
3464 *bounds = NULL;
3465 *safe = true;
3466 *bitfield = ((TREE_CODE (node) == COMPONENT_REF
3467 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (node, 1)))
3468 || TREE_CODE (node) == BIT_FIELD_REF);
3469 /* To get bitfield address we will need outer element. */
3470 if (*bitfield)
3471 *elt = nodes[len - 2];
3472 else
3473 *elt = NULL_TREE;
3475 /* If we have indirection in expression then compute
3476 outermost structure bounds. Computed bounds may be
3477 narrowed later. */
3478 if (TREE_CODE (nodes[0]) == MEM_REF || INDIRECT_REF_P (nodes[0]))
3480 *safe = false;
3481 *ptr = TREE_OPERAND (nodes[0], 0);
3482 if (bounds)
3483 *bounds = chkp_find_bounds (*ptr, iter);
3485 else
3487 gcc_assert (VAR_P (var)
3488 || TREE_CODE (var) == PARM_DECL
3489 || TREE_CODE (var) == RESULT_DECL
3490 || TREE_CODE (var) == STRING_CST
3491 || TREE_CODE (var) == SSA_NAME);
3493 *ptr = chkp_build_addr_expr (var);
3495 /* For hard register cases chkp_build_addr_expr returns INTEGER_CST
3496 and later on chkp_find_bounds will fail to find proper bounds.
3497 In order to avoid that, we find/create bounds right aways using
3498 the var itself. */
3499 if (VAR_P (var) && DECL_HARD_REGISTER (var))
3500 *bounds = chkp_make_addressed_object_bounds (var, iter);
3503 /* In this loop we are trying to find a field access
3504 requiring narrowing. There are two simple rules
3505 for search:
3506 1. Leftmost array_ref is chosen if any.
3507 2. Rightmost suitable component_ref is chosen if innermost
3508 bounds are required and no array_ref exists. */
3509 for (i = 1; i < len; i++)
3511 var = nodes[i];
3513 if (TREE_CODE (var) == ARRAY_REF)
3515 *safe = false;
3516 array_ref_found = true;
3517 if (flag_chkp_narrow_bounds
3518 && !flag_chkp_narrow_to_innermost_arrray
3519 && (!last_comp
3520 || chkp_may_narrow_to_field (var,
3521 TREE_OPERAND (last_comp, 1))))
3523 comp_to_narrow = last_comp;
3524 break;
3527 else if (TREE_CODE (var) == COMPONENT_REF)
3529 tree field = TREE_OPERAND (var, 1);
3531 if (innermost_bounds
3532 && !array_ref_found
3533 && chkp_narrow_bounds_for_field (var, field))
3534 comp_to_narrow = var;
3535 last_comp = var;
3537 if (flag_chkp_narrow_bounds
3538 && flag_chkp_narrow_to_innermost_arrray
3539 && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
3541 if (bounds)
3542 *bounds = chkp_narrow_bounds_to_field (*bounds, var, iter);
3543 comp_to_narrow = NULL;
3546 else if (TREE_CODE (var) == BIT_FIELD_REF)
3548 if (flag_chkp_narrow_bounds && bounds)
3550 tree offset, size;
3551 chkp_parse_bit_field_ref (var, UNKNOWN_LOCATION, &offset, &size);
3552 *bounds
3553 = chkp_narrow_size_and_offset (*bounds, TREE_OPERAND (var, 0),
3554 offset, size, iter);
3557 else if (TREE_CODE (var) == VIEW_CONVERT_EXPR)
3558 /* Nothing to do for it. */
3560 else
3561 gcc_unreachable ();
3564 if (comp_to_narrow && DECL_SIZE (TREE_OPERAND (comp_to_narrow, 1)) && bounds)
3565 *bounds = chkp_narrow_bounds_to_field (*bounds, comp_to_narrow, iter);
3567 if (innermost_bounds && bounds && !*bounds)
3568 *bounds = chkp_find_bounds (*ptr, iter);
3571 /* Parse BIT_FIELD_REF to a NODE for a given location LOC. Return OFFSET
3572 and SIZE in bytes. */
3574 static
3575 void chkp_parse_bit_field_ref (tree node, location_t loc, tree *offset,
3576 tree *size)
3578 tree bpu = fold_convert (size_type_node, bitsize_int (BITS_PER_UNIT));
3579 tree offs = fold_convert (size_type_node, TREE_OPERAND (node, 2));
3580 tree rem = size_binop_loc (loc, TRUNC_MOD_EXPR, offs, bpu);
3581 offs = size_binop_loc (loc, TRUNC_DIV_EXPR, offs, bpu);
3583 tree s = fold_convert (size_type_node, TREE_OPERAND (node, 1));
3584 s = size_binop_loc (loc, PLUS_EXPR, s, rem);
3585 s = size_binop_loc (loc, CEIL_DIV_EXPR, s, bpu);
3586 s = fold_convert (size_type_node, s);
3588 *offset = offs;
3589 *size = s;
3592 /* Compute and return bounds for address of OBJ. */
3593 static tree
3594 chkp_make_addressed_object_bounds (tree obj, gimple_stmt_iterator *iter)
3596 tree bounds = chkp_get_registered_addr_bounds (obj);
3598 if (bounds)
3599 return bounds;
3601 switch (TREE_CODE (obj))
3603 case VAR_DECL:
3604 case PARM_DECL:
3605 case RESULT_DECL:
3606 bounds = chkp_get_bounds_for_decl_addr (obj);
3607 break;
3609 case STRING_CST:
3610 bounds = chkp_get_bounds_for_string_cst (obj);
3611 break;
3613 case ARRAY_REF:
3614 case COMPONENT_REF:
3615 case BIT_FIELD_REF:
3617 tree elt;
3618 tree ptr;
3619 bool safe;
3620 bool bitfield;
3622 chkp_parse_array_and_component_ref (obj, &ptr, &elt, &safe,
3623 &bitfield, &bounds, iter, true);
3625 gcc_assert (bounds);
3627 break;
3629 case FUNCTION_DECL:
3630 case LABEL_DECL:
3631 bounds = chkp_get_zero_bounds ();
3632 break;
3634 case MEM_REF:
3635 bounds = chkp_find_bounds (TREE_OPERAND (obj, 0), iter);
3636 break;
3638 case REALPART_EXPR:
3639 case IMAGPART_EXPR:
3640 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (obj, 0), iter);
3641 break;
3643 default:
3644 if (dump_file && (dump_flags & TDF_DETAILS))
3646 fprintf (dump_file, "chkp_make_addressed_object_bounds: "
3647 "unexpected object of type %s\n",
3648 get_tree_code_name (TREE_CODE (obj)));
3649 print_node (dump_file, "", obj, 0);
3651 internal_error ("chkp_make_addressed_object_bounds: "
3652 "Unexpected tree code %s",
3653 get_tree_code_name (TREE_CODE (obj)));
3656 chkp_register_addr_bounds (obj, bounds);
3658 return bounds;
3661 /* Compute bounds for pointer PTR loaded from PTR_SRC. Generate statements
3662 to compute bounds if required. Computed bounds should be available at
3663 position pointed by ITER.
3665 If PTR_SRC is NULL_TREE then pointer definition is identified.
3667 If PTR_SRC is not NULL_TREE then ITER points to statements which loads
3668 PTR. If PTR is a any memory reference then ITER points to a statement
3669 after which bndldx will be inserterd. In both cases ITER will be updated
3670 to point to the inserted bndldx statement. */
3672 static tree
3673 chkp_find_bounds_1 (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3675 tree addr = NULL_TREE;
3676 tree bounds = NULL_TREE;
3678 if (!ptr_src)
3679 ptr_src = ptr;
3681 bounds = chkp_get_registered_bounds (ptr_src);
3683 if (bounds)
3684 return bounds;
3686 switch (TREE_CODE (ptr_src))
3688 case MEM_REF:
3689 case VAR_DECL:
3690 if (BOUNDED_P (ptr_src))
3691 if (VAR_P (ptr) && DECL_REGISTER (ptr))
3692 bounds = chkp_get_zero_bounds ();
3693 else
3695 addr = chkp_build_addr_expr (ptr_src);
3696 bounds = chkp_build_bndldx (addr, ptr, iter);
3698 else
3699 bounds = chkp_get_nonpointer_load_bounds ();
3700 break;
3702 case ARRAY_REF:
3703 case COMPONENT_REF:
3704 addr = get_base_address (ptr_src);
3705 if (VAR_P (addr) && DECL_HARD_REGISTER (addr))
3707 bounds = chkp_get_zero_bounds ();
3708 break;
3710 if (DECL_P (addr)
3711 || TREE_CODE (addr) == MEM_REF
3712 || TREE_CODE (addr) == TARGET_MEM_REF)
3714 if (BOUNDED_P (ptr_src))
3715 if (VAR_P (ptr) && DECL_REGISTER (ptr))
3716 bounds = chkp_get_zero_bounds ();
3717 else
3719 addr = chkp_build_addr_expr (ptr_src);
3720 bounds = chkp_build_bndldx (addr, ptr, iter);
3722 else
3723 bounds = chkp_get_nonpointer_load_bounds ();
3725 else
3727 gcc_assert (TREE_CODE (addr) == SSA_NAME);
3728 bounds = chkp_find_bounds (addr, iter);
3730 break;
3732 case PARM_DECL:
3733 /* Handled above but failed. */
3734 bounds = chkp_get_invalid_op_bounds ();
3735 break;
3737 case TARGET_MEM_REF:
3738 addr = chkp_build_addr_expr (ptr_src);
3739 bounds = chkp_build_bndldx (addr, ptr, iter);
3740 break;
3742 case SSA_NAME:
3743 bounds = chkp_get_registered_bounds (ptr_src);
3744 if (!bounds)
3746 gimple *def_stmt = SSA_NAME_DEF_STMT (ptr_src);
3747 gphi_iterator phi_iter;
3749 bounds = chkp_get_bounds_by_definition (ptr_src, def_stmt, &phi_iter);
3751 gcc_assert (bounds);
3753 if (gphi *def_phi = dyn_cast <gphi *> (def_stmt))
3755 unsigned i;
3757 for (i = 0; i < gimple_phi_num_args (def_phi); i++)
3759 tree arg = gimple_phi_arg_def (def_phi, i);
3760 tree arg_bnd;
3761 gphi *phi_bnd;
3763 arg_bnd = chkp_find_bounds (arg, NULL);
3765 /* chkp_get_bounds_by_definition created new phi
3766 statement and phi_iter points to it.
3768 Previous call to chkp_find_bounds could create
3769 new basic block and therefore change phi statement
3770 phi_iter points to. */
3771 phi_bnd = phi_iter.phi ();
3773 add_phi_arg (phi_bnd, arg_bnd,
3774 gimple_phi_arg_edge (def_phi, i),
3775 UNKNOWN_LOCATION);
3778 /* If all bound phi nodes have their arg computed
3779 then we may finish its computation. See
3780 chkp_finish_incomplete_bounds for more details. */
3781 if (chkp_may_finish_incomplete_bounds ())
3782 chkp_finish_incomplete_bounds ();
3785 gcc_assert (bounds == chkp_get_registered_bounds (ptr_src)
3786 || chkp_incomplete_bounds (bounds));
3788 break;
3790 case ADDR_EXPR:
3791 case WITH_SIZE_EXPR:
3792 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (ptr_src, 0), iter);
3793 break;
3795 case INTEGER_CST:
3796 case COMPLEX_CST:
3797 case VECTOR_CST:
3798 if (integer_zerop (ptr_src))
3799 bounds = chkp_get_none_bounds ();
3800 else
3801 bounds = chkp_get_invalid_op_bounds ();
3802 break;
3804 default:
3805 if (dump_file && (dump_flags & TDF_DETAILS))
3807 fprintf (dump_file, "chkp_find_bounds: unexpected ptr of type %s\n",
3808 get_tree_code_name (TREE_CODE (ptr_src)));
3809 print_node (dump_file, "", ptr_src, 0);
3811 internal_error ("chkp_find_bounds: Unexpected tree code %s",
3812 get_tree_code_name (TREE_CODE (ptr_src)));
3815 if (!bounds)
3817 if (dump_file && (dump_flags & TDF_DETAILS))
3819 fprintf (stderr, "chkp_find_bounds: cannot find bounds for pointer\n");
3820 print_node (dump_file, "", ptr_src, 0);
3822 internal_error ("chkp_find_bounds: Cannot find bounds for pointer");
3825 return bounds;
3828 /* Normal case for bounds search without forced narrowing. */
3829 static tree
3830 chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter)
3832 return chkp_find_bounds_1 (ptr, NULL_TREE, iter);
3835 /* Search bounds for pointer PTR loaded from PTR_SRC
3836 by statement *ITER points to. */
3837 static tree
3838 chkp_find_bounds_loaded (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3840 return chkp_find_bounds_1 (ptr, ptr_src, iter);
3843 /* Helper function which checks type of RHS and finds all pointers in
3844 it. For each found pointer we build it's accesses in LHS and RHS
3845 objects and then call HANDLER for them. Function is used to copy
3846 or initilize bounds for copied object. */
3847 static void
3848 chkp_walk_pointer_assignments (tree lhs, tree rhs, void *arg,
3849 assign_handler handler)
3851 tree type = TREE_TYPE (lhs);
3853 /* We have nothing to do with clobbers. */
3854 if (TREE_CLOBBER_P (rhs))
3855 return;
3857 if (BOUNDED_TYPE_P (type))
3858 handler (lhs, rhs, arg);
3859 else if (RECORD_OR_UNION_TYPE_P (type))
3861 tree field;
3863 if (TREE_CODE (rhs) == CONSTRUCTOR)
3865 unsigned HOST_WIDE_INT cnt;
3866 tree val;
3868 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, field, val)
3870 if (field && chkp_type_has_pointer (TREE_TYPE (field)))
3872 tree lhs_field = chkp_build_component_ref (lhs, field);
3873 chkp_walk_pointer_assignments (lhs_field, val, arg, handler);
3877 else
3878 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3879 if (TREE_CODE (field) == FIELD_DECL
3880 && chkp_type_has_pointer (TREE_TYPE (field)))
3882 tree rhs_field = chkp_build_component_ref (rhs, field);
3883 tree lhs_field = chkp_build_component_ref (lhs, field);
3884 chkp_walk_pointer_assignments (lhs_field, rhs_field, arg, handler);
3887 else if (TREE_CODE (type) == ARRAY_TYPE)
3889 unsigned HOST_WIDE_INT cur = 0;
3890 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3891 tree etype = TREE_TYPE (type);
3892 tree esize = TYPE_SIZE (etype);
3894 if (TREE_CODE (rhs) == CONSTRUCTOR)
3896 unsigned HOST_WIDE_INT cnt;
3897 tree purp, val, lhs_elem;
3899 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, purp, val)
3901 if (purp && TREE_CODE (purp) == RANGE_EXPR)
3903 tree lo_index = TREE_OPERAND (purp, 0);
3904 tree hi_index = TREE_OPERAND (purp, 1);
3906 for (cur = (unsigned)tree_to_uhwi (lo_index);
3907 cur <= (unsigned)tree_to_uhwi (hi_index);
3908 cur++)
3910 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3911 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3914 else
3916 if (purp)
3918 gcc_assert (TREE_CODE (purp) == INTEGER_CST);
3919 cur = tree_to_uhwi (purp);
3922 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur++);
3924 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3928 /* Copy array only when size is known. */
3929 else if (maxval && !integer_minus_onep (maxval))
3930 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
3932 tree lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3933 tree rhs_elem = chkp_build_array_ref (rhs, etype, esize, cur);
3934 chkp_walk_pointer_assignments (lhs_elem, rhs_elem, arg, handler);
3937 else
3938 internal_error("chkp_walk_pointer_assignments: unexpected RHS type: %s",
3939 get_tree_code_name (TREE_CODE (type)));
3942 /* Add code to copy bounds for assignment of RHS to LHS.
3943 ARG is an iterator pointing ne code position. */
3944 static void
3945 chkp_copy_bounds_for_elem (tree lhs, tree rhs, void *arg)
3947 gimple_stmt_iterator *iter = (gimple_stmt_iterator *)arg;
3948 tree bounds = chkp_find_bounds (rhs, iter);
3949 tree addr = chkp_build_addr_expr(lhs);
3951 chkp_build_bndstx (addr, rhs, bounds, iter);
3954 /* Emit static bound initilizers and size vars. */
3955 void
3956 chkp_finish_file (void)
3958 struct varpool_node *node;
3959 struct chkp_ctor_stmt_list stmts;
3961 if (seen_error ())
3962 return;
3964 /* Iterate through varpool and generate bounds initialization
3965 constructors for all statically initialized pointers. */
3966 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3967 stmts.stmts = NULL;
3968 FOR_EACH_VARIABLE (node)
3969 /* Check that var is actually emitted and we need and may initialize
3970 its bounds. */
3971 if (node->need_bounds_init
3972 && !POINTER_BOUNDS_P (node->decl)
3973 && DECL_RTL (node->decl)
3974 && MEM_P (DECL_RTL (node->decl))
3975 && TREE_ASM_WRITTEN (node->decl))
3977 chkp_walk_pointer_assignments (node->decl,
3978 DECL_INITIAL (node->decl),
3979 &stmts,
3980 chkp_add_modification_to_stmt_list);
3982 if (stmts.avail <= 0)
3984 cgraph_build_static_cdtor ('P', stmts.stmts,
3985 MAX_RESERVED_INIT_PRIORITY + 3);
3986 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3987 stmts.stmts = NULL;
3991 if (stmts.stmts)
3992 cgraph_build_static_cdtor ('P', stmts.stmts,
3993 MAX_RESERVED_INIT_PRIORITY + 3);
3995 /* Iterate through varpool and generate bounds initialization
3996 constructors for all static bounds vars. */
3997 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3998 stmts.stmts = NULL;
3999 FOR_EACH_VARIABLE (node)
4000 if (node->need_bounds_init
4001 && POINTER_BOUNDS_P (node->decl)
4002 && TREE_ASM_WRITTEN (node->decl))
4004 tree bnd = node->decl;
4005 tree var;
4007 gcc_assert (DECL_INITIAL (bnd)
4008 && TREE_CODE (DECL_INITIAL (bnd)) == ADDR_EXPR);
4010 var = TREE_OPERAND (DECL_INITIAL (bnd), 0);
4011 chkp_output_static_bounds (bnd, var, &stmts);
4014 if (stmts.stmts)
4015 cgraph_build_static_cdtor ('B', stmts.stmts,
4016 MAX_RESERVED_INIT_PRIORITY + 2);
4018 delete chkp_static_var_bounds;
4019 delete chkp_bounds_map;
4022 /* An instrumentation function which is called for each statement
4023 having memory access we want to instrument. It inserts check
4024 code and bounds copy code.
4026 ITER points to statement to instrument.
4028 NODE holds memory access in statement to check.
4030 LOC holds the location information for statement.
4032 DIRFLAGS determines whether access is read or write.
4034 ACCESS_OFFS should be added to address used in NODE
4035 before check.
4037 ACCESS_SIZE holds size of checked access.
4039 SAFE indicates if NODE access is safe and should not be
4040 checked. */
4041 static void
4042 chkp_process_stmt (gimple_stmt_iterator *iter, tree node,
4043 location_t loc, tree dirflag,
4044 tree access_offs, tree access_size,
4045 bool safe)
4047 tree node_type = TREE_TYPE (node);
4048 tree size = access_size ? access_size : TYPE_SIZE_UNIT (node_type);
4049 tree addr_first = NULL_TREE; /* address of the first accessed byte */
4050 tree addr_last = NULL_TREE; /* address of the last accessed byte */
4051 tree ptr = NULL_TREE; /* a pointer used for dereference */
4052 tree bounds = NULL_TREE;
4053 bool reg_store = false;
4055 /* We do not need instrumentation for clobbers. */
4056 if (dirflag == integer_one_node
4057 && gimple_code (gsi_stmt (*iter)) == GIMPLE_ASSIGN
4058 && TREE_CLOBBER_P (gimple_assign_rhs1 (gsi_stmt (*iter))))
4059 return;
4061 switch (TREE_CODE (node))
4063 case ARRAY_REF:
4064 case COMPONENT_REF:
4066 bool bitfield;
4067 tree elt;
4069 if (safe)
4071 /* We are not going to generate any checks, so do not
4072 generate bounds as well. */
4073 addr_first = chkp_build_addr_expr (node);
4074 break;
4077 chkp_parse_array_and_component_ref (node, &ptr, &elt, &safe,
4078 &bitfield, &bounds, iter, false);
4080 /* Break if there is no dereference and operation is safe. */
4082 if (bitfield)
4084 tree field = TREE_OPERAND (node, 1);
4086 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
4087 size = DECL_SIZE_UNIT (field);
4089 if (elt)
4090 elt = chkp_build_addr_expr (elt);
4091 addr_first = fold_convert_loc (loc, ptr_type_node, elt ? elt : ptr);
4092 addr_first = fold_build_pointer_plus_loc (loc,
4093 addr_first,
4094 byte_position (field));
4096 else
4097 addr_first = chkp_build_addr_expr (node);
4099 break;
4101 case INDIRECT_REF:
4102 ptr = TREE_OPERAND (node, 0);
4103 addr_first = ptr;
4104 break;
4106 case MEM_REF:
4107 ptr = TREE_OPERAND (node, 0);
4108 addr_first = chkp_build_addr_expr (node);
4109 break;
4111 case TARGET_MEM_REF:
4112 ptr = TMR_BASE (node);
4113 addr_first = chkp_build_addr_expr (node);
4114 break;
4116 case ARRAY_RANGE_REF:
4117 printf("ARRAY_RANGE_REF\n");
4118 debug_gimple_stmt(gsi_stmt(*iter));
4119 debug_tree(node);
4120 gcc_unreachable ();
4121 break;
4123 case BIT_FIELD_REF:
4125 tree offset, size;
4127 gcc_assert (!access_offs);
4128 gcc_assert (!access_size);
4130 chkp_parse_bit_field_ref (node, loc, &offset, &size);
4132 chkp_process_stmt (iter, TREE_OPERAND (node, 0), loc,
4133 dirflag, offset, size, safe);
4134 return;
4136 break;
4138 case VAR_DECL:
4139 case RESULT_DECL:
4140 case PARM_DECL:
4141 if (dirflag != integer_one_node
4142 || DECL_REGISTER (node))
4143 return;
4145 safe = true;
4146 addr_first = chkp_build_addr_expr (node);
4147 break;
4149 default:
4150 return;
4153 /* If addr_last was not computed then use (addr_first + size - 1)
4154 expression to compute it. */
4155 if (!addr_last)
4157 addr_last = fold_build_pointer_plus_loc (loc, addr_first, size);
4158 addr_last = fold_build_pointer_plus_hwi_loc (loc, addr_last, -1);
4161 /* Shift both first_addr and last_addr by access_offs if specified. */
4162 if (access_offs)
4164 addr_first = fold_build_pointer_plus_loc (loc, addr_first, access_offs);
4165 addr_last = fold_build_pointer_plus_loc (loc, addr_last, access_offs);
4168 if (dirflag == integer_one_node)
4170 tree base = get_base_address (node);
4171 if (VAR_P (base) && DECL_HARD_REGISTER (base))
4172 reg_store = true;
4175 /* Generate bndcl/bndcu checks if memory access is not safe. */
4176 if (!safe)
4178 gimple_stmt_iterator stmt_iter = *iter;
4180 if (!bounds)
4181 bounds = chkp_find_bounds (ptr, iter);
4183 chkp_check_mem_access (addr_first, addr_last, bounds,
4184 stmt_iter, loc, dirflag);
4187 /* We need to store bounds in case pointer is stored. */
4188 if (dirflag == integer_one_node
4189 && !reg_store
4190 && chkp_type_has_pointer (node_type)
4191 && flag_chkp_store_bounds)
4193 gimple *stmt = gsi_stmt (*iter);
4194 tree rhs1 = gimple_assign_rhs1 (stmt);
4195 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4197 if (get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS)
4198 chkp_walk_pointer_assignments (node, rhs1, iter,
4199 chkp_copy_bounds_for_elem);
4200 else
4202 bounds = chkp_compute_bounds_for_assignment (NULL_TREE, stmt);
4203 chkp_build_bndstx (addr_first, rhs1, bounds, iter);
4208 /* Add code to copy bounds for all pointers copied
4209 in ASSIGN created during inline of EDGE. */
4210 void
4211 chkp_copy_bounds_for_assign (gimple *assign, struct cgraph_edge *edge)
4213 tree lhs = gimple_assign_lhs (assign);
4214 tree rhs = gimple_assign_rhs1 (assign);
4215 gimple_stmt_iterator iter = gsi_for_stmt (assign);
4217 if (!flag_chkp_store_bounds)
4218 return;
4220 chkp_walk_pointer_assignments (lhs, rhs, &iter, chkp_copy_bounds_for_elem);
4222 /* We should create edges for all created calls to bndldx and bndstx. */
4223 while (gsi_stmt (iter) != assign)
4225 gimple *stmt = gsi_stmt (iter);
4226 if (gimple_code (stmt) == GIMPLE_CALL)
4228 tree fndecl = gimple_call_fndecl (stmt);
4229 struct cgraph_node *callee = cgraph_node::get_create (fndecl);
4230 struct cgraph_edge *new_edge;
4232 gcc_assert (chkp_gimple_call_builtin_p (stmt, BUILT_IN_CHKP_BNDSTX)
4233 || chkp_gimple_call_builtin_p (stmt, BUILT_IN_CHKP_BNDLDX)
4234 || chkp_gimple_call_builtin_p (stmt, BUILT_IN_CHKP_BNDRET));
4236 new_edge = edge->caller->create_edge (callee,
4237 as_a <gcall *> (stmt),
4238 edge->count,
4239 edge->frequency);
4240 new_edge->frequency = compute_call_stmt_bb_frequency
4241 (edge->caller->decl, gimple_bb (stmt));
4243 gsi_prev (&iter);
4247 /* Some code transformation made during instrumentation pass
4248 may put code into inconsistent state. Here we find and fix
4249 such flaws. */
4250 void
4251 chkp_fix_cfg ()
4253 basic_block bb;
4254 gimple_stmt_iterator i;
4256 /* We could insert some code right after stmt which ends bb.
4257 We wanted to put this code on fallthru edge but did not
4258 add new edges from the beginning because it may cause new
4259 phi node creation which may be incorrect due to incomplete
4260 bound phi nodes. */
4261 FOR_ALL_BB_FN (bb, cfun)
4262 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
4264 gimple *stmt = gsi_stmt (i);
4265 gimple_stmt_iterator next = i;
4267 gsi_next (&next);
4269 if (stmt_ends_bb_p (stmt)
4270 && !gsi_end_p (next))
4272 edge fall = find_fallthru_edge (bb->succs);
4273 basic_block dest = NULL;
4274 int flags = 0;
4276 gcc_assert (fall);
4278 /* We cannot split abnormal edge. Therefore we
4279 store its params, make it regular and then
4280 rebuild abnormal edge after split. */
4281 if (fall->flags & EDGE_ABNORMAL)
4283 flags = fall->flags & ~EDGE_FALLTHRU;
4284 dest = fall->dest;
4286 fall->flags &= ~EDGE_COMPLEX;
4289 while (!gsi_end_p (next))
4291 gimple *next_stmt = gsi_stmt (next);
4292 gsi_remove (&next, false);
4293 gsi_insert_on_edge (fall, next_stmt);
4296 gsi_commit_edge_inserts ();
4298 /* Re-create abnormal edge. */
4299 if (dest)
4300 make_edge (bb, dest, flags);
4305 /* Walker callback for chkp_replace_function_pointers. Replaces
4306 function pointer in the specified operand with pointer to the
4307 instrumented function version. */
4308 static tree
4309 chkp_replace_function_pointer (tree *op, int *walk_subtrees,
4310 void *data ATTRIBUTE_UNUSED)
4312 if (TREE_CODE (*op) == FUNCTION_DECL
4313 && chkp_instrumentable_p (*op)
4314 && (DECL_BUILT_IN_CLASS (*op) == NOT_BUILT_IN
4315 /* For builtins we replace pointers only for selected
4316 function and functions having definitions. */
4317 || (DECL_BUILT_IN_CLASS (*op) == BUILT_IN_NORMAL
4318 && (chkp_instrument_normal_builtin (*op)
4319 || gimple_has_body_p (*op)))))
4321 struct cgraph_node *node = cgraph_node::get_create (*op);
4322 struct cgraph_node *clone = NULL;
4324 if (!node->instrumentation_clone)
4325 clone = chkp_maybe_create_clone (*op);
4327 if (clone)
4328 *op = clone->decl;
4329 *walk_subtrees = 0;
4332 return NULL;
4335 /* This function searches for function pointers in statement
4336 pointed by GSI and replaces them with pointers to instrumented
4337 function versions. */
4338 static void
4339 chkp_replace_function_pointers (gimple_stmt_iterator *gsi)
4341 gimple *stmt = gsi_stmt (*gsi);
4342 /* For calls we want to walk call args only. */
4343 if (gimple_code (stmt) == GIMPLE_CALL)
4345 unsigned i;
4346 for (i = 0; i < gimple_call_num_args (stmt); i++)
4347 walk_tree (gimple_call_arg_ptr (stmt, i),
4348 chkp_replace_function_pointer, NULL, NULL);
4350 else
4351 walk_gimple_stmt (gsi, NULL, chkp_replace_function_pointer, NULL);
4354 /* This function instruments all statements working with memory,
4355 calls and rets.
4357 It also removes excess statements from static initializers. */
4358 static void
4359 chkp_instrument_function (void)
4361 basic_block bb, next;
4362 gimple_stmt_iterator i;
4363 enum gimple_rhs_class grhs_class;
4364 bool safe = lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl));
4366 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
4369 next = bb->next_bb;
4370 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
4372 gimple *s = gsi_stmt (i);
4374 /* Skip statement marked to not be instrumented. */
4375 if (chkp_marked_stmt_p (s))
4377 gsi_next (&i);
4378 continue;
4381 chkp_replace_function_pointers (&i);
4383 switch (gimple_code (s))
4385 case GIMPLE_ASSIGN:
4386 chkp_process_stmt (&i, gimple_assign_lhs (s),
4387 gimple_location (s), integer_one_node,
4388 NULL_TREE, NULL_TREE, safe);
4389 chkp_process_stmt (&i, gimple_assign_rhs1 (s),
4390 gimple_location (s), integer_zero_node,
4391 NULL_TREE, NULL_TREE, safe);
4392 grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s));
4393 if (grhs_class == GIMPLE_BINARY_RHS)
4394 chkp_process_stmt (&i, gimple_assign_rhs2 (s),
4395 gimple_location (s), integer_zero_node,
4396 NULL_TREE, NULL_TREE, safe);
4397 break;
4399 case GIMPLE_RETURN:
4401 greturn *r = as_a <greturn *> (s);
4402 if (gimple_return_retval (r) != NULL_TREE)
4404 chkp_process_stmt (&i, gimple_return_retval (r),
4405 gimple_location (r),
4406 integer_zero_node,
4407 NULL_TREE, NULL_TREE, safe);
4409 /* Additionally we need to add bounds
4410 to return statement. */
4411 chkp_add_bounds_to_ret_stmt (&i);
4414 break;
4416 case GIMPLE_CALL:
4417 chkp_add_bounds_to_call_stmt (&i);
4418 break;
4420 default:
4424 gsi_next (&i);
4426 /* We do not need any actual pointer stores in checker
4427 static initializer. */
4428 if (lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl))
4429 && gimple_code (s) == GIMPLE_ASSIGN
4430 && gimple_store_p (s))
4432 gimple_stmt_iterator del_iter = gsi_for_stmt (s);
4433 gsi_remove (&del_iter, true);
4434 unlink_stmt_vdef (s);
4435 release_defs(s);
4438 bb = next;
4440 while (bb);
4442 /* Some input params may have bounds and be address taken. In this case
4443 we should store incoming bounds into bounds table. */
4444 tree arg;
4445 if (flag_chkp_store_bounds)
4446 for (arg = DECL_ARGUMENTS (cfun->decl); arg; arg = DECL_CHAIN (arg))
4447 if (TREE_ADDRESSABLE (arg))
4449 if (BOUNDED_P (arg))
4451 tree bounds = chkp_get_next_bounds_parm (arg);
4452 tree def_ptr = ssa_default_def (cfun, arg);
4453 gimple_stmt_iterator iter
4454 = gsi_start_bb (chkp_get_entry_block ());
4455 chkp_build_bndstx (chkp_build_addr_expr (arg),
4456 def_ptr ? def_ptr : arg,
4457 bounds, &iter);
4459 /* Skip bounds arg. */
4460 arg = TREE_CHAIN (arg);
4462 else if (chkp_type_has_pointer (TREE_TYPE (arg)))
4464 tree orig_arg = arg;
4465 bitmap slots = BITMAP_ALLOC (NULL);
4466 gimple_stmt_iterator iter
4467 = gsi_start_bb (chkp_get_entry_block ());
4468 bitmap_iterator bi;
4469 unsigned bnd_no;
4471 chkp_find_bound_slots (TREE_TYPE (arg), slots);
4473 EXECUTE_IF_SET_IN_BITMAP (slots, 0, bnd_no, bi)
4475 tree bounds = chkp_get_next_bounds_parm (arg);
4476 HOST_WIDE_INT offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
4477 tree addr = chkp_build_addr_expr (orig_arg);
4478 tree ptr = build2 (MEM_REF, ptr_type_node, addr,
4479 build_int_cst (ptr_type_node, offs));
4480 chkp_build_bndstx (chkp_build_addr_expr (ptr), ptr,
4481 bounds, &iter);
4483 arg = DECL_CHAIN (arg);
4485 BITMAP_FREE (slots);
4490 /* Find init/null/copy_ptr_bounds calls and replace them
4491 with assignments. It should allow better code
4492 optimization. */
4494 static void
4495 chkp_remove_useless_builtins ()
4497 basic_block bb;
4498 gimple_stmt_iterator gsi;
4500 FOR_EACH_BB_FN (bb, cfun)
4502 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4504 gimple *stmt = gsi_stmt (gsi);
4505 tree fndecl;
4506 enum built_in_function fcode;
4508 /* Find builtins returning first arg and replace
4509 them with assignments. */
4510 if (gimple_code (stmt) == GIMPLE_CALL
4511 && (fndecl = gimple_call_fndecl (stmt))
4512 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4513 && (fcode = DECL_FUNCTION_CODE (fndecl))
4514 && (fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
4515 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
4516 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS
4517 || fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS))
4519 tree res = gimple_call_arg (stmt, 0);
4520 update_call_from_tree (&gsi, res);
4521 stmt = gsi_stmt (gsi);
4522 update_stmt (stmt);
4528 /* Initialize pass. */
4529 static void
4530 chkp_init (void)
4532 basic_block bb;
4533 gimple_stmt_iterator i;
4535 in_chkp_pass = true;
4537 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = bb->next_bb)
4538 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
4539 chkp_unmark_stmt (gsi_stmt (i));
4541 chkp_invalid_bounds = new hash_set<tree>;
4542 chkp_completed_bounds_set = new hash_set<tree>;
4543 delete chkp_reg_bounds;
4544 chkp_reg_bounds = new hash_map<tree, tree>;
4545 delete chkp_bound_vars;
4546 chkp_bound_vars = new hash_map<tree, tree>;
4547 chkp_reg_addr_bounds = new hash_map<tree, tree>;
4548 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
4549 delete chkp_bounds_map;
4550 chkp_bounds_map = new hash_map<tree, tree>;
4551 chkp_abnormal_copies = BITMAP_GGC_ALLOC ();
4553 entry_block = NULL;
4554 zero_bounds = NULL_TREE;
4555 none_bounds = NULL_TREE;
4556 incomplete_bounds = integer_zero_node;
4557 tmp_var = NULL_TREE;
4558 size_tmp_var = NULL_TREE;
4560 chkp_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode, true);
4562 /* We create these constant bounds once for each object file.
4563 These symbols go to comdat section and result in single copy
4564 of each one in the final binary. */
4565 chkp_get_zero_bounds_var ();
4566 chkp_get_none_bounds_var ();
4568 calculate_dominance_info (CDI_DOMINATORS);
4569 calculate_dominance_info (CDI_POST_DOMINATORS);
4571 bitmap_obstack_initialize (NULL);
4574 /* Finalize instrumentation pass. */
4575 static void
4576 chkp_fini (void)
4578 in_chkp_pass = false;
4580 delete chkp_invalid_bounds;
4581 delete chkp_completed_bounds_set;
4582 delete chkp_reg_addr_bounds;
4583 delete chkp_incomplete_bounds_map;
4585 free_dominance_info (CDI_DOMINATORS);
4586 free_dominance_info (CDI_POST_DOMINATORS);
4588 bitmap_obstack_release (NULL);
4590 entry_block = NULL;
4591 zero_bounds = NULL_TREE;
4592 none_bounds = NULL_TREE;
4595 /* Main instrumentation pass function. */
4596 static unsigned int
4597 chkp_execute (void)
4599 chkp_init ();
4601 chkp_instrument_function ();
4603 chkp_remove_useless_builtins ();
4605 chkp_function_mark_instrumented (cfun->decl);
4607 chkp_fix_cfg ();
4609 chkp_fini ();
4611 return 0;
4614 /* Instrumentation pass gate. */
4615 static bool
4616 chkp_gate (void)
4618 cgraph_node *node = cgraph_node::get (cfun->decl);
4619 return ((node != NULL
4620 && node->instrumentation_clone)
4621 || lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl)));
4624 namespace {
4626 const pass_data pass_data_chkp =
4628 GIMPLE_PASS, /* type */
4629 "chkp", /* name */
4630 OPTGROUP_NONE, /* optinfo_flags */
4631 TV_NONE, /* tv_id */
4632 PROP_ssa | PROP_cfg, /* properties_required */
4633 0, /* properties_provided */
4634 0, /* properties_destroyed */
4635 0, /* todo_flags_start */
4636 TODO_verify_il
4637 | TODO_update_ssa /* todo_flags_finish */
4640 class pass_chkp : public gimple_opt_pass
4642 public:
4643 pass_chkp (gcc::context *ctxt)
4644 : gimple_opt_pass (pass_data_chkp, ctxt)
4647 /* opt_pass methods: */
4648 virtual opt_pass * clone ()
4650 return new pass_chkp (m_ctxt);
4653 virtual bool gate (function *)
4655 return chkp_gate ();
4658 virtual unsigned int execute (function *)
4660 return chkp_execute ();
4663 }; // class pass_chkp
4665 } // anon namespace
4667 gimple_opt_pass *
4668 make_pass_chkp (gcc::context *ctxt)
4670 return new pass_chkp (ctxt);
4673 #include "gt-tree-chkp.h"