1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004-2022 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "tree-pass.h"
29 #include "gimple-pretty-print.h"
30 #include "fold-const.h"
31 #include "tree-object-size.h"
32 #include "gimple-iterator.h"
33 #include "gimple-fold.h"
36 #include "stringpool.h"
39 #include "gimplify-me.h"
41 struct object_size_info
46 bitmap visited
, reexamine
, unknowns
;
48 unsigned int *stack
, *tos
;
51 struct GTY(()) object_size
53 /* Estimate of bytes till the end of the object. */
55 /* Estimate of the size of the whole object. */
59 static tree
compute_object_offset (const_tree
, const_tree
);
60 static bool addr_object_size (struct object_size_info
*,
61 const_tree
, int, tree
*, tree
*t
= NULL
);
62 static tree
alloc_object_size (const gcall
*, int);
63 static tree
pass_through_call (const gcall
*);
64 static void collect_object_sizes_for (struct object_size_info
*, tree
);
65 static void expr_object_size (struct object_size_info
*, tree
, tree
);
66 static bool merge_object_sizes (struct object_size_info
*, tree
, tree
);
67 static bool plus_stmt_object_size (struct object_size_info
*, tree
, gimple
*);
68 static bool cond_expr_object_size (struct object_size_info
*, tree
, gimple
*);
69 static void init_offset_limit (void);
70 static void check_for_plus_in_loops (struct object_size_info
*, tree
);
71 static void check_for_plus_in_loops_1 (struct object_size_info
*, tree
,
74 /* object_sizes[0] is upper bound for the object size and number of bytes till
75 the end of the object.
76 object_sizes[1] is upper bound for the object size and number of bytes till
77 the end of the subobject (innermost array or field with address taken).
78 object_sizes[2] is lower bound for the object size and number of bytes till
79 the end of the object and object_sizes[3] lower bound for subobject.
81 For static object sizes, the object size and the bytes till the end of the
82 object are both INTEGER_CST. In the dynamic case, they are finally either a
83 gimple variable or an INTEGER_CST. */
84 static vec
<object_size
> object_sizes
[OST_END
];
86 /* Bitmaps what object sizes have been computed already. */
87 static bitmap computed
[OST_END
];
89 /* Maximum value of offset we consider to be addition. */
90 static unsigned HOST_WIDE_INT offset_limit
;
92 /* Return true if VAL represents an initial size for OBJECT_SIZE_TYPE. */
95 size_initval_p (tree val
, int object_size_type
)
97 return ((object_size_type
& OST_MINIMUM
)
98 ? integer_all_onesp (val
) : integer_zerop (val
));
101 /* Return true if VAL represents an unknown size for OBJECT_SIZE_TYPE. */
104 size_unknown_p (tree val
, int object_size_type
)
106 return ((object_size_type
& OST_MINIMUM
)
107 ? integer_zerop (val
) : integer_all_onesp (val
));
110 /* Return true if VAL represents a valid size for OBJECT_SIZE_TYPE. */
113 size_valid_p (tree val
, int object_size_type
)
115 return ((object_size_type
& OST_DYNAMIC
) || TREE_CODE (val
) == INTEGER_CST
);
118 /* Return true if VAL is usable as an object size in the object_sizes
122 size_usable_p (tree val
)
124 return TREE_CODE (val
) == SSA_NAME
|| TREE_CODE (val
) == INTEGER_CST
;
127 /* Return a tree with initial value for OBJECT_SIZE_TYPE. */
130 size_initval (int object_size_type
)
132 return ((object_size_type
& OST_MINIMUM
)
133 ? TYPE_MAX_VALUE (sizetype
) : size_zero_node
);
136 /* Return a tree with unknown value for OBJECT_SIZE_TYPE. */
139 size_unknown (int object_size_type
)
141 return ((object_size_type
& OST_MINIMUM
)
142 ? size_zero_node
: TYPE_MAX_VALUE (sizetype
));
145 /* Grow object_sizes[OBJECT_SIZE_TYPE] to num_ssa_names. */
148 object_sizes_grow (int object_size_type
)
150 if (num_ssa_names
> object_sizes
[object_size_type
].length ())
151 object_sizes
[object_size_type
].safe_grow (num_ssa_names
, true);
154 /* Release object_sizes[OBJECT_SIZE_TYPE]. */
157 object_sizes_release (int object_size_type
)
159 object_sizes
[object_size_type
].release ();
162 /* Return true if object_sizes[OBJECT_SIZE_TYPE][VARNO] is unknown. */
165 object_sizes_unknown_p (int object_size_type
, unsigned varno
)
167 return size_unknown_p (object_sizes
[object_size_type
][varno
].size
,
171 /* Return the raw size expression for VARNO corresponding to OSI. This returns
172 the TREE_VEC as is and should only be used during gimplification. */
174 static inline object_size
175 object_sizes_get_raw (struct object_size_info
*osi
, unsigned varno
)
177 gcc_assert (osi
->pass
!= 0);
178 return object_sizes
[osi
->object_size_type
][varno
];
181 /* Return a size tree for VARNO corresponding to OSI. If WHOLE is true, return
182 the whole object size. Use this for building size expressions based on size
186 object_sizes_get (struct object_size_info
*osi
, unsigned varno
,
190 int object_size_type
= osi
->object_size_type
;
193 ret
= object_sizes
[object_size_type
][varno
].wholesize
;
195 ret
= object_sizes
[object_size_type
][varno
].size
;
197 if (object_size_type
& OST_DYNAMIC
)
199 if (TREE_CODE (ret
) == MODIFY_EXPR
)
200 return TREE_OPERAND (ret
, 0);
201 else if (TREE_CODE (ret
) == TREE_VEC
)
202 return TREE_VEC_ELT (ret
, TREE_VEC_LENGTH (ret
) - 1);
204 gcc_checking_assert (size_usable_p (ret
));
210 /* Set size for VARNO corresponding to OSI to VAL. */
213 object_sizes_initialize (struct object_size_info
*osi
, unsigned varno
,
214 tree val
, tree wholeval
)
216 int object_size_type
= osi
->object_size_type
;
218 object_sizes
[object_size_type
][varno
].size
= val
;
219 object_sizes
[object_size_type
][varno
].wholesize
= wholeval
;
222 /* Return a MODIFY_EXPR for cases where SSA and EXPR have the same type. The
223 TREE_VEC is returned only in case of PHI nodes. */
226 bundle_sizes (tree name
, tree expr
)
228 gcc_checking_assert (TREE_TYPE (name
) == sizetype
);
230 if (TREE_CODE (expr
) == TREE_VEC
)
232 TREE_VEC_ELT (expr
, TREE_VEC_LENGTH (expr
) - 1) = name
;
236 gcc_checking_assert (types_compatible_p (TREE_TYPE (expr
), sizetype
));
237 return build2 (MODIFY_EXPR
, sizetype
, name
, expr
);
240 /* Set size for VARNO corresponding to OSI to VAL if it is the new minimum or
241 maximum. For static sizes, each element of TREE_VEC is always INTEGER_CST
242 throughout the computation. For dynamic sizes, each element may either be a
243 gimple variable, a MODIFY_EXPR or a TREE_VEC. The MODIFY_EXPR is for
244 expressions that need to be gimplified. TREE_VECs are special, they're
245 emitted only for GIMPLE_PHI and the PHI result variable is the last element
249 object_sizes_set (struct object_size_info
*osi
, unsigned varno
, tree val
,
252 int object_size_type
= osi
->object_size_type
;
253 object_size osize
= object_sizes
[object_size_type
][varno
];
256 tree oldval
= osize
.size
;
257 tree old_wholeval
= osize
.wholesize
;
259 if (object_size_type
& OST_DYNAMIC
)
261 if (bitmap_bit_p (osi
->reexamine
, varno
))
263 if (size_unknown_p (val
, object_size_type
))
265 oldval
= object_sizes_get (osi
, varno
);
266 old_wholeval
= object_sizes_get (osi
, varno
, true);
267 bitmap_set_bit (osi
->unknowns
, SSA_NAME_VERSION (oldval
));
268 bitmap_set_bit (osi
->unknowns
, SSA_NAME_VERSION (old_wholeval
));
269 bitmap_clear_bit (osi
->reexamine
, varno
);
273 val
= bundle_sizes (oldval
, val
);
274 wholeval
= bundle_sizes (old_wholeval
, wholeval
);
279 gcc_checking_assert (size_initval_p (oldval
, object_size_type
));
280 gcc_checking_assert (size_initval_p (old_wholeval
,
282 /* For dynamic object sizes, all object sizes that are not gimple
283 variables will need to be gimplified. */
284 if (wholeval
!= val
&& !size_usable_p (wholeval
))
286 bitmap_set_bit (osi
->reexamine
, varno
);
287 wholeval
= bundle_sizes (make_ssa_name (sizetype
), wholeval
);
289 if (!size_usable_p (val
))
291 bitmap_set_bit (osi
->reexamine
, varno
);
292 tree newval
= bundle_sizes (make_ssa_name (sizetype
), val
);
297 /* If the new value is a temporary variable, mark it for
299 else if (TREE_CODE (val
) == SSA_NAME
&& !SSA_NAME_DEF_STMT (val
))
300 bitmap_set_bit (osi
->reexamine
, varno
);
305 enum tree_code code
= (object_size_type
& OST_MINIMUM
306 ? MIN_EXPR
: MAX_EXPR
);
308 val
= size_binop (code
, val
, oldval
);
309 wholeval
= size_binop (code
, wholeval
, old_wholeval
);
310 changed
= (tree_int_cst_compare (val
, oldval
) != 0
311 || tree_int_cst_compare (old_wholeval
, wholeval
) != 0);
314 object_sizes
[object_size_type
][varno
].size
= val
;
315 object_sizes
[object_size_type
][varno
].wholesize
= wholeval
;
320 /* Set temporary SSA names for object size and whole size to resolve dependency
321 loops in dynamic size computation. */
324 object_sizes_set_temp (struct object_size_info
*osi
, unsigned varno
)
326 tree val
= object_sizes_get (osi
, varno
);
328 if (size_initval_p (val
, osi
->object_size_type
))
329 object_sizes_set (osi
, varno
,
330 make_ssa_name (sizetype
),
331 make_ssa_name (sizetype
));
334 /* Initialize OFFSET_LIMIT variable. */
336 init_offset_limit (void)
338 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype
)))
339 offset_limit
= tree_to_uhwi (TYPE_MAX_VALUE (sizetype
));
345 /* Bytes at end of the object with SZ from offset OFFSET. If WHOLESIZE is not
346 NULL_TREE, use it to get the net offset of the pointer, which should always
347 be positive and hence, be within OFFSET_LIMIT for valid offsets. */
350 size_for_offset (tree sz
, tree offset
, tree wholesize
= NULL_TREE
)
352 gcc_checking_assert (types_compatible_p (TREE_TYPE (sz
), sizetype
));
354 /* For negative offsets, if we have a distinct WHOLESIZE, use it to get a net
355 offset from the whole object. */
356 if (wholesize
&& wholesize
!= sz
357 && (TREE_CODE (sz
) != INTEGER_CST
358 || TREE_CODE (wholesize
) != INTEGER_CST
359 || tree_int_cst_compare (sz
, wholesize
)))
361 gcc_checking_assert (types_compatible_p (TREE_TYPE (wholesize
),
364 /* Restructure SZ - OFFSET as
365 WHOLESIZE - (WHOLESIZE + OFFSET - SZ) so that the offset part, i.e.
366 WHOLESIZE + OFFSET - SZ is only allowed to be positive. */
367 tree tmp
= size_binop (MAX_EXPR
, wholesize
, sz
);
368 offset
= fold_build2 (PLUS_EXPR
, sizetype
, tmp
, offset
);
369 offset
= fold_build2 (MINUS_EXPR
, sizetype
, offset
, sz
);
373 /* Safe to convert now, since a valid net offset should be non-negative. */
374 if (!useless_type_conversion_p (sizetype
, TREE_TYPE (offset
)))
375 offset
= fold_convert (sizetype
, offset
);
377 if (TREE_CODE (offset
) == INTEGER_CST
)
379 if (integer_zerop (offset
))
382 /* Negative or too large offset even after adjustment, cannot be within
383 bounds of an object. */
384 if (compare_tree_int (offset
, offset_limit
) > 0)
385 return size_zero_node
;
388 return size_binop (MINUS_EXPR
, size_binop (MAX_EXPR
, sz
, offset
), offset
);
391 /* Compute offset of EXPR within VAR. Return error_mark_node
395 compute_object_offset (const_tree expr
, const_tree var
)
397 enum tree_code code
= PLUS_EXPR
;
401 return size_zero_node
;
403 switch (TREE_CODE (expr
))
406 base
= compute_object_offset (TREE_OPERAND (expr
, 0), var
);
407 if (base
== error_mark_node
)
410 t
= TREE_OPERAND (expr
, 1);
411 off
= size_binop (PLUS_EXPR
, DECL_FIELD_OFFSET (t
),
412 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t
))
418 case VIEW_CONVERT_EXPR
:
419 case NON_LVALUE_EXPR
:
420 return compute_object_offset (TREE_OPERAND (expr
, 0), var
);
423 base
= compute_object_offset (TREE_OPERAND (expr
, 0), var
);
424 if (base
== error_mark_node
)
427 off
= TYPE_SIZE_UNIT (TREE_TYPE (expr
));
431 base
= compute_object_offset (TREE_OPERAND (expr
, 0), var
);
432 if (base
== error_mark_node
)
435 t
= TREE_OPERAND (expr
, 1);
436 tree low_bound
, unit_size
;
437 low_bound
= array_ref_low_bound (CONST_CAST_TREE (expr
));
438 unit_size
= array_ref_element_size (CONST_CAST_TREE (expr
));
439 if (! integer_zerop (low_bound
))
440 t
= fold_build2 (MINUS_EXPR
, TREE_TYPE (t
), t
, low_bound
);
441 if (TREE_CODE (t
) == INTEGER_CST
&& tree_int_cst_sgn (t
) < 0)
444 t
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (t
), t
);
446 t
= fold_convert (sizetype
, t
);
447 off
= size_binop (MULT_EXPR
, unit_size
, t
);
451 gcc_assert (TREE_CODE (TREE_OPERAND (expr
, 0)) == ADDR_EXPR
);
452 return wide_int_to_tree (sizetype
, mem_ref_offset (expr
));
455 return error_mark_node
;
458 return size_binop (code
, base
, off
);
461 /* Returns the size of the object designated by DECL considering its
462 initializer if it either has one or if it would not affect its size,
463 otherwise the size of the object without the initializer when MIN
464 is true, else null. An object's initializer affects the object's
465 size if it's a struct type with a flexible array member. */
468 decl_init_size (tree decl
, bool min
)
470 tree size
= DECL_SIZE_UNIT (decl
);
471 tree type
= TREE_TYPE (decl
);
472 if (TREE_CODE (type
) != RECORD_TYPE
)
475 tree last
= last_field (type
);
479 tree last_type
= TREE_TYPE (last
);
480 if (TREE_CODE (last_type
) != ARRAY_TYPE
481 || TYPE_SIZE (last_type
))
484 /* Use TYPE_SIZE_UNIT; DECL_SIZE_UNIT sometimes reflects the size
485 of the initializer and sometimes doesn't. */
486 size
= TYPE_SIZE_UNIT (type
);
487 tree ref
= build3 (COMPONENT_REF
, type
, decl
, last
, NULL_TREE
);
488 tree compsize
= component_ref_size (ref
);
490 return min
? size
: NULL_TREE
;
492 /* The size includes tail padding and initializer elements. */
493 tree pos
= byte_position (last
);
494 size
= fold_build2 (PLUS_EXPR
, TREE_TYPE (size
), pos
, compsize
);
498 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
499 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
500 If unknown, return size_unknown (object_size_type). */
503 addr_object_size (struct object_size_info
*osi
, const_tree ptr
,
504 int object_size_type
, tree
*psize
, tree
*pwholesize
)
506 tree pt_var
, pt_var_size
= NULL_TREE
, pt_var_wholesize
= NULL_TREE
;
507 tree var_size
, bytes
, wholebytes
;
509 gcc_assert (TREE_CODE (ptr
) == ADDR_EXPR
);
511 /* Set to unknown and overwrite just before returning if the size
512 could be determined. */
513 *psize
= size_unknown (object_size_type
);
515 *pwholesize
= size_unknown (object_size_type
);
517 pt_var
= TREE_OPERAND (ptr
, 0);
518 while (handled_component_p (pt_var
))
519 pt_var
= TREE_OPERAND (pt_var
, 0);
524 if (TREE_CODE (pt_var
) == MEM_REF
)
528 if (!osi
|| (object_size_type
& OST_SUBOBJECT
) != 0
529 || TREE_CODE (TREE_OPERAND (pt_var
, 0)) != SSA_NAME
)
531 compute_builtin_object_size (TREE_OPERAND (pt_var
, 0),
532 object_size_type
& ~OST_SUBOBJECT
, &sz
);
537 tree var
= TREE_OPERAND (pt_var
, 0);
539 collect_object_sizes_for (osi
, var
);
540 if (bitmap_bit_p (computed
[object_size_type
],
541 SSA_NAME_VERSION (var
)))
543 sz
= object_sizes_get (osi
, SSA_NAME_VERSION (var
));
544 wholesize
= object_sizes_get (osi
, SSA_NAME_VERSION (var
), true);
547 sz
= wholesize
= size_unknown (object_size_type
);
549 if (!size_unknown_p (sz
, object_size_type
))
550 sz
= size_for_offset (sz
, TREE_OPERAND (pt_var
, 1), wholesize
);
552 if (!size_unknown_p (sz
, object_size_type
)
553 && (TREE_CODE (sz
) != INTEGER_CST
554 || compare_tree_int (sz
, offset_limit
) < 0))
557 pt_var_wholesize
= wholesize
;
560 else if (DECL_P (pt_var
))
562 pt_var_size
= pt_var_wholesize
563 = decl_init_size (pt_var
, object_size_type
& OST_MINIMUM
);
567 else if (TREE_CODE (pt_var
) == STRING_CST
)
568 pt_var_size
= pt_var_wholesize
= TYPE_SIZE_UNIT (TREE_TYPE (pt_var
));
574 /* Validate the size determined above if it is a constant. */
575 if (TREE_CODE (pt_var_size
) == INTEGER_CST
576 && compare_tree_int (pt_var_size
, offset_limit
) >= 0)
580 if (pt_var
!= TREE_OPERAND (ptr
, 0))
584 if (object_size_type
& OST_SUBOBJECT
)
586 var
= TREE_OPERAND (ptr
, 0);
589 && TREE_CODE (var
) != BIT_FIELD_REF
590 && TREE_CODE (var
) != COMPONENT_REF
591 && TREE_CODE (var
) != ARRAY_REF
592 && TREE_CODE (var
) != ARRAY_RANGE_REF
593 && TREE_CODE (var
) != REALPART_EXPR
594 && TREE_CODE (var
) != IMAGPART_EXPR
)
595 var
= TREE_OPERAND (var
, 0);
596 if (var
!= pt_var
&& TREE_CODE (var
) == ARRAY_REF
)
597 var
= TREE_OPERAND (var
, 0);
598 if (! TYPE_SIZE_UNIT (TREE_TYPE (var
))
599 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var
)))
600 || (pt_var_size
&& TREE_CODE (pt_var_size
) == INTEGER_CST
601 && tree_int_cst_lt (pt_var_size
,
602 TYPE_SIZE_UNIT (TREE_TYPE (var
)))))
604 else if (var
!= pt_var
&& TREE_CODE (pt_var
) == MEM_REF
)
607 /* For &X->fld, compute object size if fld isn't a flexible array
609 bool is_flexible_array_mem_ref
= false;
610 while (v
&& v
!= pt_var
)
611 switch (TREE_CODE (v
))
614 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v
, 0))))
617 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v
, 0)));
618 if (domain
&& TYPE_MAX_VALUE (domain
))
624 v
= TREE_OPERAND (v
, 0);
631 if (TREE_CODE (TREE_TYPE (v
)) != ARRAY_TYPE
)
636 is_flexible_array_mem_ref
= array_at_struct_end_p (v
);
637 while (v
!= pt_var
&& TREE_CODE (v
) == COMPONENT_REF
)
638 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v
, 0)))
640 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v
, 0)))
644 v
= TREE_OPERAND (v
, 0);
645 if (TREE_CODE (v
) == COMPONENT_REF
646 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v
, 0)))
649 /* compute object size only if v is not a
650 flexible array member. */
651 if (!is_flexible_array_mem_ref
)
656 v
= TREE_OPERAND (v
, 0);
658 while (v
!= pt_var
&& TREE_CODE (v
) == COMPONENT_REF
)
659 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v
, 0)))
661 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v
, 0)))
665 v
= TREE_OPERAND (v
, 0);
684 var_size
= TYPE_SIZE_UNIT (TREE_TYPE (var
));
685 if (!TREE_CONSTANT (var_size
))
686 var_size
= get_or_create_ssa_default_def (cfun
, var_size
);
690 else if (!pt_var_size
)
693 var_size
= pt_var_size
;
694 bytes
= compute_object_offset (TREE_OPERAND (ptr
, 0), var
);
695 if (bytes
!= error_mark_node
)
697 bytes
= size_for_offset (var_size
, bytes
);
698 if (var
!= pt_var
&& pt_var_size
&& TREE_CODE (pt_var
) == MEM_REF
)
700 tree bytes2
= compute_object_offset (TREE_OPERAND (ptr
, 0),
702 if (bytes2
!= error_mark_node
)
704 bytes2
= size_for_offset (pt_var_size
, bytes2
);
705 bytes
= size_binop (MIN_EXPR
, bytes
, bytes2
);
710 bytes
= size_unknown (object_size_type
);
713 = object_size_type
& OST_SUBOBJECT
? var_size
: pt_var_wholesize
;
715 else if (!pt_var_size
)
720 wholebytes
= pt_var_wholesize
;
723 if (!size_unknown_p (bytes
, object_size_type
)
724 && size_valid_p (bytes
, object_size_type
)
725 && !size_unknown_p (bytes
, object_size_type
)
726 && size_valid_p (wholebytes
, object_size_type
))
730 *pwholesize
= wholebytes
;
738 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
739 Handles calls to functions declared with attribute alloc_size.
740 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
741 If unknown, return size_unknown (object_size_type). */
744 alloc_object_size (const gcall
*call
, int object_size_type
)
746 gcc_assert (is_gimple_call (call
));
749 tree callfn
= gimple_call_fndecl (call
);
751 calltype
= TREE_TYPE (callfn
);
753 calltype
= gimple_call_fntype (call
);
756 return size_unknown (object_size_type
);
758 /* Set to positions of alloc_size arguments. */
759 int arg1
= -1, arg2
= -1;
760 tree alloc_size
= lookup_attribute ("alloc_size",
761 TYPE_ATTRIBUTES (calltype
));
762 if (alloc_size
&& TREE_VALUE (alloc_size
))
764 tree p
= TREE_VALUE (alloc_size
);
766 arg1
= TREE_INT_CST_LOW (TREE_VALUE (p
))-1;
768 arg2
= TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p
)))-1;
770 else if (gimple_call_builtin_p (call
, BUILT_IN_NORMAL
)
771 && callfn
&& ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (callfn
)))
774 /* Non-const arguments are OK here, let the caller handle constness. */
775 if (arg1
< 0 || arg1
>= (int) gimple_call_num_args (call
)
776 || arg2
>= (int) gimple_call_num_args (call
))
777 return size_unknown (object_size_type
);
779 tree bytes
= NULL_TREE
;
781 bytes
= size_binop (MULT_EXPR
,
782 fold_convert (sizetype
, gimple_call_arg (call
, arg1
)),
783 fold_convert (sizetype
, gimple_call_arg (call
, arg2
)));
785 bytes
= fold_convert (sizetype
, gimple_call_arg (call
, arg1
));
787 return bytes
? bytes
: size_unknown (object_size_type
);
791 /* If object size is propagated from one of function's arguments directly
792 to its return value, return that argument for GIMPLE_CALL statement CALL.
793 Otherwise return NULL. */
796 pass_through_call (const gcall
*call
)
798 unsigned rf
= gimple_call_return_flags (call
);
799 if (rf
& ERF_RETURNS_ARG
)
801 unsigned argnum
= rf
& ERF_RETURN_ARG_MASK
;
802 if (argnum
< gimple_call_num_args (call
))
803 return gimple_call_arg (call
, argnum
);
806 /* __builtin_assume_aligned is intentionally not marked RET1. */
807 if (gimple_call_builtin_p (call
, BUILT_IN_ASSUME_ALIGNED
))
808 return gimple_call_arg (call
, 0);
813 /* Emit PHI nodes for size expressions fo. */
816 emit_phi_nodes (gimple
*stmt
, tree size
, tree wholesize
)
819 gphi
*wholephi
= NULL
;
821 if (wholesize
!= size
)
823 phires
= TREE_VEC_ELT (wholesize
, TREE_VEC_LENGTH (wholesize
) - 1);
824 wholephi
= create_phi_node (phires
, gimple_bb (stmt
));
827 phires
= TREE_VEC_ELT (size
, TREE_VEC_LENGTH (size
) - 1);
828 gphi
*phi
= create_phi_node (phires
, gimple_bb (stmt
));
829 gphi
*obj_phi
= as_a
<gphi
*> (stmt
);
831 gcc_checking_assert (TREE_CODE (wholesize
) == TREE_VEC
);
832 gcc_checking_assert (TREE_CODE (size
) == TREE_VEC
);
834 for (unsigned i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
836 gimple_seq seq
= NULL
;
837 tree wsz
= TREE_VEC_ELT (wholesize
, i
);
838 tree sz
= TREE_VEC_ELT (size
, i
);
840 /* If we built an expression, we will need to build statements
841 and insert them on the edge right away. */
842 if (TREE_CODE (wsz
) != SSA_NAME
)
843 wsz
= force_gimple_operand (wsz
, &seq
, true, NULL
);
844 if (TREE_CODE (sz
) != SSA_NAME
)
847 sz
= force_gimple_operand (sz
, &s
, true, NULL
);
848 gimple_seq_add_seq (&seq
, s
);
852 gsi_insert_seq_on_edge (gimple_phi_arg_edge (obj_phi
, i
), seq
);
855 add_phi_arg (wholephi
, wsz
,
856 gimple_phi_arg_edge (obj_phi
, i
),
857 gimple_phi_arg_location (obj_phi
, i
));
859 add_phi_arg (phi
, sz
,
860 gimple_phi_arg_edge (obj_phi
, i
),
861 gimple_phi_arg_location (obj_phi
, i
));
865 /* Descend through EXPR and return size_unknown if it uses any SSA variable
866 object_size_set or object_size_set_temp generated, which turned out to be
867 size_unknown, as noted in UNKNOWNS. */
870 propagate_unknowns (object_size_info
*osi
, tree expr
)
872 int object_size_type
= osi
->object_size_type
;
874 switch (TREE_CODE (expr
))
877 if (bitmap_bit_p (osi
->unknowns
, SSA_NAME_VERSION (expr
)))
878 return size_unknown (object_size_type
);
884 tree res
= propagate_unknowns (osi
, TREE_OPERAND (expr
, 0));
885 if (size_unknown_p (res
, object_size_type
))
888 res
= propagate_unknowns (osi
, TREE_OPERAND (expr
, 1));
889 if (size_unknown_p (res
, object_size_type
))
896 tree res
= propagate_unknowns (osi
, TREE_OPERAND (expr
, 1));
897 if (size_unknown_p (res
, object_size_type
))
902 for (int i
= 0; i
< TREE_VEC_LENGTH (expr
); i
++)
904 tree res
= propagate_unknowns (osi
, TREE_VEC_ELT (expr
, i
));
905 if (size_unknown_p (res
, object_size_type
))
912 tree res
= propagate_unknowns (osi
, TREE_OPERAND (expr
, 0));
913 if (size_unknown_p (res
, object_size_type
))
923 /* Walk through size expressions that need reexamination and generate
924 statements for them. */
927 gimplify_size_expressions (object_size_info
*osi
)
929 int object_size_type
= osi
->object_size_type
;
934 /* Step 1: Propagate unknowns into expressions. */
935 bitmap reexamine
= BITMAP_ALLOC (NULL
);
936 bitmap_copy (reexamine
, osi
->reexamine
);
940 EXECUTE_IF_SET_IN_BITMAP (reexamine
, 0, i
, bi
)
942 object_size cur
= object_sizes_get_raw (osi
, i
);
944 if (size_unknown_p (propagate_unknowns (osi
, cur
.size
),
946 || size_unknown_p (propagate_unknowns (osi
, cur
.wholesize
),
949 object_sizes_set (osi
, i
,
950 size_unknown (object_size_type
),
951 size_unknown (object_size_type
));
955 bitmap_copy (reexamine
, osi
->reexamine
);
959 /* Release all unknowns. */
960 EXECUTE_IF_SET_IN_BITMAP (osi
->unknowns
, 0, i
, bi
)
961 release_ssa_name (ssa_name (i
));
963 /* Expand all size expressions to put their definitions close to the objects
964 for which size is being computed. */
965 EXECUTE_IF_SET_IN_BITMAP (osi
->reexamine
, 0, i
, bi
)
967 gimple_seq seq
= NULL
;
968 object_size osize
= object_sizes_get_raw (osi
, i
);
970 gimple
*stmt
= SSA_NAME_DEF_STMT (ssa_name (i
));
971 enum gimple_code code
= gimple_code (stmt
);
973 /* PHI nodes need special attention. */
974 if (code
== GIMPLE_PHI
)
975 emit_phi_nodes (stmt
, osize
.size
, osize
.wholesize
);
978 tree size_expr
= NULL_TREE
;
980 /* Bundle wholesize in with the size to gimplify if needed. */
981 if (osize
.wholesize
!= osize
.size
982 && !size_usable_p (osize
.wholesize
))
983 size_expr
= size_binop (COMPOUND_EXPR
,
986 else if (!size_usable_p (osize
.size
))
987 size_expr
= osize
.size
;
991 gimple_stmt_iterator gsi
;
992 if (code
== GIMPLE_NOP
)
993 gsi
= gsi_start_bb (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun
)));
995 gsi
= gsi_for_stmt (stmt
);
997 force_gimple_operand (size_expr
, &seq
, true, NULL
);
998 gsi_insert_seq_before (&gsi
, seq
, GSI_CONTINUE_LINKING
);
1002 /* We're done, so replace the MODIFY_EXPRs with the SSA names. */
1003 object_sizes_initialize (osi
, i
,
1004 object_sizes_get (osi
, i
),
1005 object_sizes_get (osi
, i
, true));
1009 /* Compute __builtin_object_size value for PTR and set *PSIZE to
1010 the resulting value. If the declared object is known and PDECL
1011 is nonnull, sets *PDECL to the object's DECL. OBJECT_SIZE_TYPE
1012 is the second argument to __builtin_object_size.
1013 Returns true on success and false when the object size could not
1017 compute_builtin_object_size (tree ptr
, int object_size_type
,
1020 gcc_assert (object_size_type
>= 0 && object_size_type
< OST_END
);
1022 /* Set to unknown and overwrite just before returning if the size
1023 could be determined. */
1024 *psize
= size_unknown (object_size_type
);
1027 init_offset_limit ();
1029 if (TREE_CODE (ptr
) == ADDR_EXPR
)
1030 return addr_object_size (NULL
, ptr
, object_size_type
, psize
);
1032 if (TREE_CODE (ptr
) != SSA_NAME
1033 || !POINTER_TYPE_P (TREE_TYPE (ptr
)))
1036 if (computed
[object_size_type
] == NULL
)
1038 if (optimize
|| object_size_type
& OST_SUBOBJECT
)
1041 /* When not optimizing, rather than failing, make a small effort
1042 to determine the object size without the full benefit of
1043 the (costly) computation below. */
1044 gimple
*def
= SSA_NAME_DEF_STMT (ptr
);
1045 if (gimple_code (def
) == GIMPLE_ASSIGN
)
1047 tree_code code
= gimple_assign_rhs_code (def
);
1048 if (code
== POINTER_PLUS_EXPR
)
1050 tree offset
= gimple_assign_rhs2 (def
);
1051 ptr
= gimple_assign_rhs1 (def
);
1053 if (((object_size_type
& OST_DYNAMIC
)
1054 || (tree_fits_shwi_p (offset
)
1055 && compare_tree_int (offset
, offset_limit
) <= 0))
1056 && compute_builtin_object_size (ptr
, object_size_type
,
1059 *psize
= size_for_offset (*psize
, offset
);
1067 struct object_size_info osi
;
1068 osi
.object_size_type
= object_size_type
;
1069 if (!bitmap_bit_p (computed
[object_size_type
], SSA_NAME_VERSION (ptr
)))
1074 object_sizes_grow (object_size_type
);
1077 fprintf (dump_file
, "Computing %s %s%sobject size for ",
1078 (object_size_type
& OST_MINIMUM
) ? "minimum" : "maximum",
1079 (object_size_type
& OST_DYNAMIC
) ? "dynamic " : "",
1080 (object_size_type
& OST_SUBOBJECT
) ? "sub" : "");
1081 print_generic_expr (dump_file
, ptr
, dump_flags
);
1082 fprintf (dump_file
, ":\n");
1085 osi
.visited
= BITMAP_ALLOC (NULL
);
1086 osi
.reexamine
= BITMAP_ALLOC (NULL
);
1088 if (object_size_type
& OST_DYNAMIC
)
1089 osi
.unknowns
= BITMAP_ALLOC (NULL
);
1097 /* First pass: walk UD chains, compute object sizes that
1098 can be computed. osi.reexamine bitmap at the end will
1099 contain what variables were found in dependency cycles
1100 and therefore need to be reexamined. */
1102 osi
.changed
= false;
1103 collect_object_sizes_for (&osi
, ptr
);
1105 if (object_size_type
& OST_DYNAMIC
)
1108 gimplify_size_expressions (&osi
);
1109 BITMAP_FREE (osi
.unknowns
);
1110 bitmap_clear (osi
.reexamine
);
1113 /* Second pass: keep recomputing object sizes of variables
1114 that need reexamination, until no object sizes are
1115 increased or all object sizes are computed. */
1116 if (! bitmap_empty_p (osi
.reexamine
))
1118 bitmap reexamine
= BITMAP_ALLOC (NULL
);
1120 /* If looking for minimum instead of maximum object size,
1121 detect cases where a pointer is increased in a loop.
1122 Although even without this detection pass 2 would eventually
1123 terminate, it could take a long time. If a pointer is
1124 increasing this way, we need to assume 0 object size.
1125 E.g. p = &buf[0]; while (cond) p = p + 4; */
1126 if (object_size_type
& OST_MINIMUM
)
1128 osi
.depths
= XCNEWVEC (unsigned int, num_ssa_names
);
1129 osi
.stack
= XNEWVEC (unsigned int, num_ssa_names
);
1130 osi
.tos
= osi
.stack
;
1132 /* collect_object_sizes_for is changing
1133 osi.reexamine bitmap, so iterate over a copy. */
1134 bitmap_copy (reexamine
, osi
.reexamine
);
1135 EXECUTE_IF_SET_IN_BITMAP (reexamine
, 0, i
, bi
)
1136 if (bitmap_bit_p (osi
.reexamine
, i
))
1137 check_for_plus_in_loops (&osi
, ssa_name (i
));
1149 osi
.changed
= false;
1150 /* collect_object_sizes_for is changing
1151 osi.reexamine bitmap, so iterate over a copy. */
1152 bitmap_copy (reexamine
, osi
.reexamine
);
1153 EXECUTE_IF_SET_IN_BITMAP (reexamine
, 0, i
, bi
)
1154 if (bitmap_bit_p (osi
.reexamine
, i
))
1156 collect_object_sizes_for (&osi
, ssa_name (i
));
1157 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1159 fprintf (dump_file
, "Reexamining ");
1160 print_generic_expr (dump_file
, ssa_name (i
),
1162 fprintf (dump_file
, "\n");
1166 while (osi
.changed
);
1168 BITMAP_FREE (reexamine
);
1170 EXECUTE_IF_SET_IN_BITMAP (osi
.reexamine
, 0, i
, bi
)
1171 bitmap_set_bit (computed
[object_size_type
], i
);
1173 /* Debugging dumps. */
1176 EXECUTE_IF_SET_IN_BITMAP (osi
.visited
, 0, i
, bi
)
1177 if (!object_sizes_unknown_p (object_size_type
, i
))
1179 print_generic_expr (dump_file
, ssa_name (i
),
1182 ": %s %s%sobject size ",
1183 ((object_size_type
& OST_MINIMUM
) ? "minimum"
1185 (object_size_type
& OST_DYNAMIC
) ? "dynamic " : "",
1186 (object_size_type
& OST_SUBOBJECT
) ? "sub" : "");
1187 print_generic_expr (dump_file
, object_sizes_get (&osi
, i
),
1189 fprintf (dump_file
, "\n");
1193 BITMAP_FREE (osi
.reexamine
);
1194 BITMAP_FREE (osi
.visited
);
1197 *psize
= object_sizes_get (&osi
, SSA_NAME_VERSION (ptr
));
1198 return !size_unknown_p (*psize
, object_size_type
);
1201 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
1204 expr_object_size (struct object_size_info
*osi
, tree ptr
, tree value
)
1206 int object_size_type
= osi
->object_size_type
;
1207 unsigned int varno
= SSA_NAME_VERSION (ptr
);
1208 tree bytes
, wholesize
;
1210 gcc_assert (!object_sizes_unknown_p (object_size_type
, varno
));
1211 gcc_assert (osi
->pass
== 0);
1213 if (TREE_CODE (value
) == WITH_SIZE_EXPR
)
1214 value
= TREE_OPERAND (value
, 0);
1216 /* Pointer variables should have been handled by merge_object_sizes. */
1217 gcc_assert (TREE_CODE (value
) != SSA_NAME
1218 || !POINTER_TYPE_P (TREE_TYPE (value
)));
1220 if (TREE_CODE (value
) == ADDR_EXPR
)
1221 addr_object_size (osi
, value
, object_size_type
, &bytes
, &wholesize
);
1223 bytes
= wholesize
= size_unknown (object_size_type
);
1225 object_sizes_set (osi
, varno
, bytes
, wholesize
);
1229 /* Compute object_sizes for PTR, defined to the result of a call. */
1232 call_object_size (struct object_size_info
*osi
, tree ptr
, gcall
*call
)
1234 int object_size_type
= osi
->object_size_type
;
1235 unsigned int varno
= SSA_NAME_VERSION (ptr
);
1237 gcc_assert (is_gimple_call (call
));
1239 gcc_assert (!object_sizes_unknown_p (object_size_type
, varno
));
1240 gcc_assert (osi
->pass
== 0);
1241 tree bytes
= alloc_object_size (call
, object_size_type
);
1243 if (!size_valid_p (bytes
, object_size_type
))
1244 bytes
= size_unknown (object_size_type
);
1246 object_sizes_set (osi
, varno
, bytes
, bytes
);
1250 /* Compute object_sizes for PTR, defined to an unknown value. */
1253 unknown_object_size (struct object_size_info
*osi
, tree ptr
)
1255 int object_size_type
= osi
->object_size_type
;
1256 unsigned int varno
= SSA_NAME_VERSION (ptr
);
1258 gcc_checking_assert (!object_sizes_unknown_p (object_size_type
, varno
));
1259 gcc_checking_assert (osi
->pass
== 0);
1260 tree bytes
= size_unknown (object_size_type
);
1262 object_sizes_set (osi
, varno
, bytes
, bytes
);
1266 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
1267 the object size might need reexamination later. */
1270 merge_object_sizes (struct object_size_info
*osi
, tree dest
, tree orig
)
1272 int object_size_type
= osi
->object_size_type
;
1273 unsigned int varno
= SSA_NAME_VERSION (dest
);
1274 tree orig_bytes
, wholesize
;
1276 if (object_sizes_unknown_p (object_size_type
, varno
))
1280 collect_object_sizes_for (osi
, orig
);
1282 orig_bytes
= object_sizes_get (osi
, SSA_NAME_VERSION (orig
));
1283 wholesize
= object_sizes_get (osi
, SSA_NAME_VERSION (orig
), true);
1285 if (object_sizes_set (osi
, varno
, orig_bytes
, wholesize
))
1286 osi
->changed
= true;
1288 return bitmap_bit_p (osi
->reexamine
, SSA_NAME_VERSION (orig
));
1292 /* Compute object_sizes for VAR, defined to the result of an assignment
1293 with operator POINTER_PLUS_EXPR. Return true if the object size might
1294 need reexamination later. */
1297 plus_stmt_object_size (struct object_size_info
*osi
, tree var
, gimple
*stmt
)
1299 int object_size_type
= osi
->object_size_type
;
1300 unsigned int varno
= SSA_NAME_VERSION (var
);
1301 tree bytes
, wholesize
;
1303 bool reexamine
= false;
1305 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
1307 op0
= gimple_assign_rhs1 (stmt
);
1308 op1
= gimple_assign_rhs2 (stmt
);
1310 else if (gimple_assign_rhs_code (stmt
) == ADDR_EXPR
)
1312 tree rhs
= TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
1313 gcc_assert (TREE_CODE (rhs
) == MEM_REF
);
1314 op0
= TREE_OPERAND (rhs
, 0);
1315 op1
= TREE_OPERAND (rhs
, 1);
1320 if (object_sizes_unknown_p (object_size_type
, varno
))
1323 /* Handle PTR + OFFSET here. */
1324 if (size_valid_p (op1
, object_size_type
)
1325 && (TREE_CODE (op0
) == SSA_NAME
|| TREE_CODE (op0
) == ADDR_EXPR
))
1327 if (TREE_CODE (op0
) == SSA_NAME
)
1330 collect_object_sizes_for (osi
, op0
);
1332 bytes
= object_sizes_get (osi
, SSA_NAME_VERSION (op0
));
1333 wholesize
= object_sizes_get (osi
, SSA_NAME_VERSION (op0
), true);
1334 reexamine
= bitmap_bit_p (osi
->reexamine
, SSA_NAME_VERSION (op0
));
1338 /* op0 will be ADDR_EXPR here. We should never come here during
1340 gcc_checking_assert (osi
->pass
== 0);
1341 addr_object_size (osi
, op0
, object_size_type
, &bytes
, &wholesize
);
1344 /* size_for_offset doesn't make sense for -1 size, but it does for size 0
1345 since the wholesize could be non-zero and a negative offset could give
1347 if (size_unknown_p (bytes
, 0))
1349 else if ((object_size_type
& OST_DYNAMIC
)
1350 || compare_tree_int (op1
, offset_limit
) <= 0)
1351 bytes
= size_for_offset (bytes
, op1
, wholesize
);
1352 /* In the static case, with a negative offset, the best estimate for
1353 minimum size is size_unknown but for maximum size, the wholesize is a
1354 better estimate than size_unknown. */
1355 else if (object_size_type
& OST_MINIMUM
)
1356 bytes
= size_unknown (object_size_type
);
1361 bytes
= wholesize
= size_unknown (object_size_type
);
1363 if (!size_valid_p (bytes
, object_size_type
)
1364 || !size_valid_p (wholesize
, object_size_type
))
1365 bytes
= wholesize
= size_unknown (object_size_type
);
1367 if (object_sizes_set (osi
, varno
, bytes
, wholesize
))
1368 osi
->changed
= true;
1372 /* Compute the dynamic object size for VAR. Return the result in SIZE and
1376 dynamic_object_size (struct object_size_info
*osi
, tree var
,
1377 tree
*size
, tree
*wholesize
)
1379 int object_size_type
= osi
->object_size_type
;
1381 if (TREE_CODE (var
) == SSA_NAME
)
1383 unsigned varno
= SSA_NAME_VERSION (var
);
1385 collect_object_sizes_for (osi
, var
);
1386 *size
= object_sizes_get (osi
, varno
);
1387 *wholesize
= object_sizes_get (osi
, varno
, true);
1389 else if (TREE_CODE (var
) == ADDR_EXPR
)
1390 addr_object_size (osi
, var
, object_size_type
, size
, wholesize
);
1392 *size
= *wholesize
= size_unknown (object_size_type
);
1395 /* Compute object_sizes for VAR, defined at STMT, which is
1396 a COND_EXPR. Return true if the object size might need reexamination
1400 cond_expr_object_size (struct object_size_info
*osi
, tree var
, gimple
*stmt
)
1403 int object_size_type
= osi
->object_size_type
;
1404 unsigned int varno
= SSA_NAME_VERSION (var
);
1405 bool reexamine
= false;
1407 gcc_assert (gimple_assign_rhs_code (stmt
) == COND_EXPR
);
1409 if (object_sizes_unknown_p (object_size_type
, varno
))
1412 then_
= gimple_assign_rhs2 (stmt
);
1413 else_
= gimple_assign_rhs3 (stmt
);
1415 if (object_size_type
& OST_DYNAMIC
)
1417 tree then_size
, then_wholesize
, else_size
, else_wholesize
;
1419 dynamic_object_size (osi
, then_
, &then_size
, &then_wholesize
);
1420 if (!size_unknown_p (then_size
, object_size_type
))
1421 dynamic_object_size (osi
, else_
, &else_size
, &else_wholesize
);
1423 tree cond_size
, cond_wholesize
;
1424 if (size_unknown_p (then_size
, object_size_type
)
1425 || size_unknown_p (else_size
, object_size_type
))
1426 cond_size
= cond_wholesize
= size_unknown (object_size_type
);
1429 cond_size
= fold_build3 (COND_EXPR
, sizetype
,
1430 gimple_assign_rhs1 (stmt
),
1431 then_size
, else_size
);
1432 cond_wholesize
= fold_build3 (COND_EXPR
, sizetype
,
1433 gimple_assign_rhs1 (stmt
),
1434 then_wholesize
, else_wholesize
);
1437 object_sizes_set (osi
, varno
, cond_size
, cond_wholesize
);
1442 if (TREE_CODE (then_
) == SSA_NAME
)
1443 reexamine
|= merge_object_sizes (osi
, var
, then_
);
1445 expr_object_size (osi
, var
, then_
);
1447 if (object_sizes_unknown_p (object_size_type
, varno
))
1450 if (TREE_CODE (else_
) == SSA_NAME
)
1451 reexamine
|= merge_object_sizes (osi
, var
, else_
);
1453 expr_object_size (osi
, var
, else_
);
1458 /* Find size of an object passed as a parameter to the function. */
1461 parm_object_size (struct object_size_info
*osi
, tree var
)
1463 int object_size_type
= osi
->object_size_type
;
1464 tree parm
= SSA_NAME_VAR (var
);
1466 if (!(object_size_type
& OST_DYNAMIC
) || !POINTER_TYPE_P (TREE_TYPE (parm
)))
1468 expr_object_size (osi
, var
, parm
);
1472 /* Look for access attribute. */
1475 tree fndecl
= cfun
->decl
;
1476 const attr_access
*access
= get_parm_access (rdwr_idx
, parm
, fndecl
);
1477 tree typesize
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (parm
)));
1478 tree sz
= NULL_TREE
;
1480 /* If we have an explicit access attribute with a usable size argument... */
1481 if (access
&& access
->sizarg
!= UINT_MAX
&& !access
->internal_p
1482 /* ... and either PARM is void * or has a type that is complete and has a
1484 && ((typesize
&& poly_int_tree_p (typesize
))
1485 || (!typesize
&& VOID_TYPE_P (TREE_TYPE (TREE_TYPE (parm
))))))
1487 tree fnargs
= DECL_ARGUMENTS (fndecl
);
1488 tree arg
= NULL_TREE
;
1489 unsigned argpos
= 0;
1491 /* ... then walk through the parameters to pick the size parameter and
1492 safely scale it by the type size if needed. */
1493 for (arg
= fnargs
; arg
; arg
= TREE_CHAIN (arg
), ++argpos
)
1494 if (argpos
== access
->sizarg
&& INTEGRAL_TYPE_P (TREE_TYPE (arg
)))
1496 sz
= get_or_create_ssa_default_def (cfun
, arg
);
1497 if (sz
!= NULL_TREE
)
1499 sz
= fold_convert (sizetype
, sz
);
1501 sz
= size_binop (MULT_EXPR
, sz
, typesize
);
1507 sz
= size_unknown (object_size_type
);
1509 object_sizes_set (osi
, SSA_NAME_VERSION (var
), sz
, sz
);
1512 /* Compute an object size expression for VAR, which is the result of a PHI
1516 phi_dynamic_object_size (struct object_size_info
*osi
, tree var
)
1518 int object_size_type
= osi
->object_size_type
;
1519 unsigned int varno
= SSA_NAME_VERSION (var
);
1520 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
1521 unsigned i
, num_args
= gimple_phi_num_args (stmt
);
1522 bool wholesize_needed
= false;
1524 /* The extra space is for the PHI result at the end, which object_sizes_set
1526 tree sizes
= make_tree_vec (num_args
+ 1);
1527 tree wholesizes
= make_tree_vec (num_args
+ 1);
1529 /* Bail out if the size of any of the PHI arguments cannot be
1531 for (i
= 0; i
< num_args
; i
++)
1533 edge e
= gimple_phi_arg_edge (as_a
<gphi
*> (stmt
), i
);
1534 if (e
->flags
& EDGE_COMPLEX
)
1537 tree rhs
= gimple_phi_arg_def (stmt
, i
);
1538 tree size
, wholesize
;
1540 dynamic_object_size (osi
, rhs
, &size
, &wholesize
);
1542 if (size_unknown_p (size
, object_size_type
))
1545 if (size
!= wholesize
)
1546 wholesize_needed
= true;
1548 TREE_VEC_ELT (sizes
, i
) = size
;
1549 TREE_VEC_ELT (wholesizes
, i
) = wholesize
;
1555 ggc_free (wholesizes
);
1556 sizes
= wholesizes
= size_unknown (object_size_type
);
1559 /* Point to the same TREE_VEC so that we can avoid emitting two PHI
1561 else if (!wholesize_needed
)
1563 ggc_free (wholesizes
);
1567 object_sizes_set (osi
, varno
, sizes
, wholesizes
);
1570 /* Compute object sizes for VAR.
1571 For ADDR_EXPR an object size is the number of remaining bytes
1572 to the end of the object (where what is considered an object depends on
1573 OSI->object_size_type).
1574 For allocation GIMPLE_CALL like malloc or calloc object size is the size
1576 For POINTER_PLUS_EXPR where second operand is a constant integer,
1577 object size is object size of the first operand minus the constant.
1578 If the constant is bigger than the number of remaining bytes until the
1579 end of the object, object size is 0, but if it is instead a pointer
1580 subtraction, object size is size_unknown (object_size_type).
1581 To differentiate addition from subtraction, ADDR_EXPR returns
1582 size_unknown (object_size_type) for all objects bigger than half of the
1583 address space, and constants less than half of the address space are
1584 considered addition, while bigger constants subtraction.
1585 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
1586 object size is object size of that argument.
1587 Otherwise, object size is the maximum of object sizes of variables
1588 that it might be set to. */
1591 collect_object_sizes_for (struct object_size_info
*osi
, tree var
)
1593 int object_size_type
= osi
->object_size_type
;
1594 unsigned int varno
= SSA_NAME_VERSION (var
);
1598 if (bitmap_bit_p (computed
[object_size_type
], varno
))
1603 if (bitmap_set_bit (osi
->visited
, varno
))
1605 /* Initialize to 0 for maximum size and M1U for minimum size so that
1606 it gets immediately overridden. */
1607 object_sizes_initialize (osi
, varno
,
1608 size_initval (object_size_type
),
1609 size_initval (object_size_type
));
1613 /* Found a dependency loop. Mark the variable for later
1615 if (object_size_type
& OST_DYNAMIC
)
1616 object_sizes_set_temp (osi
, varno
);
1618 bitmap_set_bit (osi
->reexamine
, varno
);
1619 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1621 fprintf (dump_file
, "Found a dependency loop at ");
1622 print_generic_expr (dump_file
, var
, dump_flags
);
1623 fprintf (dump_file
, "\n");
1629 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1631 fprintf (dump_file
, "Visiting use-def links for ");
1632 print_generic_expr (dump_file
, var
, dump_flags
);
1633 fprintf (dump_file
, "\n");
1636 stmt
= SSA_NAME_DEF_STMT (var
);
1639 switch (gimple_code (stmt
))
1643 tree rhs
= gimple_assign_rhs1 (stmt
);
1644 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
1645 || (gimple_assign_rhs_code (stmt
) == ADDR_EXPR
1646 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == MEM_REF
))
1647 reexamine
= plus_stmt_object_size (osi
, var
, stmt
);
1648 else if (gimple_assign_rhs_code (stmt
) == COND_EXPR
)
1649 reexamine
= cond_expr_object_size (osi
, var
, stmt
);
1650 else if (gimple_assign_single_p (stmt
)
1651 || gimple_assign_unary_nop_p (stmt
))
1653 if (TREE_CODE (rhs
) == SSA_NAME
1654 && POINTER_TYPE_P (TREE_TYPE (rhs
)))
1655 reexamine
= merge_object_sizes (osi
, var
, rhs
);
1657 expr_object_size (osi
, var
, rhs
);
1660 unknown_object_size (osi
, var
);
1666 gcall
*call_stmt
= as_a
<gcall
*> (stmt
);
1667 tree arg
= pass_through_call (call_stmt
);
1670 if (TREE_CODE (arg
) == SSA_NAME
1671 && POINTER_TYPE_P (TREE_TYPE (arg
)))
1672 reexamine
= merge_object_sizes (osi
, var
, arg
);
1674 expr_object_size (osi
, var
, arg
);
1677 call_object_size (osi
, var
, call_stmt
);
1682 /* Pointers defined by __asm__ statements can point anywhere. */
1683 unknown_object_size (osi
, var
);
1687 if (SSA_NAME_VAR (var
)
1688 && TREE_CODE (SSA_NAME_VAR (var
)) == PARM_DECL
)
1689 parm_object_size (osi
, var
);
1691 /* Uninitialized SSA names point nowhere. */
1692 unknown_object_size (osi
, var
);
1699 if (object_size_type
& OST_DYNAMIC
)
1701 phi_dynamic_object_size (osi
, var
);
1705 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
1707 tree rhs
= gimple_phi_arg (stmt
, i
)->def
;
1709 if (object_sizes_unknown_p (object_size_type
, varno
))
1712 if (TREE_CODE (rhs
) == SSA_NAME
)
1713 reexamine
|= merge_object_sizes (osi
, var
, rhs
);
1714 else if (osi
->pass
== 0)
1715 expr_object_size (osi
, var
, rhs
);
1724 if (! reexamine
|| object_sizes_unknown_p (object_size_type
, varno
))
1726 bitmap_set_bit (computed
[object_size_type
], varno
);
1727 if (!(object_size_type
& OST_DYNAMIC
))
1728 bitmap_clear_bit (osi
->reexamine
, varno
);
1732 bitmap_set_bit (osi
->reexamine
, varno
);
1733 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1735 fprintf (dump_file
, "Need to reexamine ");
1736 print_generic_expr (dump_file
, var
, dump_flags
);
1737 fprintf (dump_file
, "\n");
1743 /* Helper function for check_for_plus_in_loops. Called recursively
1747 check_for_plus_in_loops_1 (struct object_size_info
*osi
, tree var
,
1750 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
1751 unsigned int varno
= SSA_NAME_VERSION (var
);
1753 if (osi
->depths
[varno
])
1755 if (osi
->depths
[varno
] != depth
)
1759 /* Found a loop involving pointer addition. */
1760 for (sp
= osi
->tos
; sp
> osi
->stack
; )
1763 bitmap_clear_bit (osi
->reexamine
, *sp
);
1764 bitmap_set_bit (computed
[osi
->object_size_type
], *sp
);
1765 object_sizes_set (osi
, *sp
, size_zero_node
,
1766 object_sizes_get (osi
, *sp
, true));
1773 else if (! bitmap_bit_p (osi
->reexamine
, varno
))
1776 osi
->depths
[varno
] = depth
;
1777 *osi
->tos
++ = varno
;
1779 switch (gimple_code (stmt
))
1784 if ((gimple_assign_single_p (stmt
)
1785 || gimple_assign_unary_nop_p (stmt
))
1786 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
)
1788 tree rhs
= gimple_assign_rhs1 (stmt
);
1790 check_for_plus_in_loops_1 (osi
, rhs
, depth
);
1792 else if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
1794 tree basevar
= gimple_assign_rhs1 (stmt
);
1795 tree cst
= gimple_assign_rhs2 (stmt
);
1797 gcc_assert (TREE_CODE (cst
) == INTEGER_CST
);
1799 check_for_plus_in_loops_1 (osi
, basevar
,
1800 depth
+ !integer_zerop (cst
));
1809 gcall
*call_stmt
= as_a
<gcall
*> (stmt
);
1810 tree arg
= pass_through_call (call_stmt
);
1813 if (TREE_CODE (arg
) == SSA_NAME
)
1814 check_for_plus_in_loops_1 (osi
, arg
, depth
);
1825 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
1827 tree rhs
= gimple_phi_arg (stmt
, i
)->def
;
1829 if (TREE_CODE (rhs
) == SSA_NAME
)
1830 check_for_plus_in_loops_1 (osi
, rhs
, depth
);
1839 osi
->depths
[varno
] = 0;
1844 /* Check if some pointer we are computing object size of is being increased
1845 within a loop. If yes, assume all the SSA variables participating in
1846 that loop have minimum object sizes 0. */
1849 check_for_plus_in_loops (struct object_size_info
*osi
, tree var
)
1851 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
1853 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1854 and looked for a POINTER_PLUS_EXPR in the pass-through
1855 argument, if any. In GIMPLE, however, such an expression
1856 is not a valid call operand. */
1858 if (is_gimple_assign (stmt
)
1859 && gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
1861 tree basevar
= gimple_assign_rhs1 (stmt
);
1862 tree cst
= gimple_assign_rhs2 (stmt
);
1864 gcc_assert (TREE_CODE (cst
) == INTEGER_CST
);
1866 /* Skip non-positive offsets. */
1867 if (integer_zerop (cst
) || compare_tree_int (cst
, offset_limit
) > 0)
1870 osi
->depths
[SSA_NAME_VERSION (basevar
)] = 1;
1871 *osi
->tos
++ = SSA_NAME_VERSION (basevar
);
1872 check_for_plus_in_loops_1 (osi
, var
, 2);
1873 osi
->depths
[SSA_NAME_VERSION (basevar
)] = 0;
1879 /* Initialize data structures for the object size computation. */
1882 init_object_sizes (void)
1884 int object_size_type
;
1889 for (object_size_type
= 0; object_size_type
< OST_END
; object_size_type
++)
1891 object_sizes_grow (object_size_type
);
1892 computed
[object_size_type
] = BITMAP_ALLOC (NULL
);
1895 init_offset_limit ();
1899 /* Destroy data structures after the object size computation. */
1902 fini_object_sizes (void)
1904 int object_size_type
;
1906 for (object_size_type
= 0; object_size_type
< OST_END
; object_size_type
++)
1908 object_sizes_release (object_size_type
);
1909 BITMAP_FREE (computed
[object_size_type
]);
1913 /* Dummy valueize function. */
1916 do_valueize (tree t
)
1921 /* Process a __builtin_object_size or __builtin_dynamic_object_size call in
1922 CALL early for subobjects before any object information is lost due to
1923 optimization. Insert a MIN or MAX expression of the result and
1924 __builtin_object_size at I so that it may be processed in the second pass.
1925 __builtin_dynamic_object_size is treated like __builtin_object_size here
1926 since we're only looking for constant bounds. */
1929 early_object_sizes_execute_one (gimple_stmt_iterator
*i
, gimple
*call
)
1931 tree ost
= gimple_call_arg (call
, 1);
1932 tree lhs
= gimple_call_lhs (call
);
1933 gcc_assert (lhs
!= NULL_TREE
);
1935 if (!tree_fits_uhwi_p (ost
))
1938 unsigned HOST_WIDE_INT object_size_type
= tree_to_uhwi (ost
);
1939 tree ptr
= gimple_call_arg (call
, 0);
1941 if (object_size_type
!= 1 && object_size_type
!= 3)
1944 if (TREE_CODE (ptr
) != ADDR_EXPR
&& TREE_CODE (ptr
) != SSA_NAME
)
1947 tree type
= TREE_TYPE (lhs
);
1949 if (!compute_builtin_object_size (ptr
, object_size_type
, &bytes
)
1950 || !int_fits_type_p (bytes
, type
))
1953 tree tem
= make_ssa_name (type
);
1954 gimple_call_set_lhs (call
, tem
);
1955 enum tree_code code
= object_size_type
& OST_MINIMUM
? MAX_EXPR
: MIN_EXPR
;
1956 tree cst
= fold_convert (type
, bytes
);
1957 gimple
*g
= gimple_build_assign (lhs
, code
, tem
, cst
);
1958 gsi_insert_after (i
, g
, GSI_NEW_STMT
);
1962 /* Attempt to fold one __builtin_dynamic_object_size call in CALL into an
1963 expression and insert it at I. Return true if it succeeds. */
1966 dynamic_object_sizes_execute_one (gimple_stmt_iterator
*i
, gimple
*call
)
1968 gcc_assert (gimple_call_num_args (call
) == 2);
1971 args
[0] = gimple_call_arg (call
, 0);
1972 args
[1] = gimple_call_arg (call
, 1);
1974 location_t loc
= EXPR_LOC_OR_LOC (args
[0], input_location
);
1975 tree result_type
= gimple_call_return_type (as_a
<gcall
*> (call
));
1976 tree result
= fold_builtin_call_array (loc
, result_type
,
1977 gimple_call_fn (call
), 2, args
);
1982 /* fold_builtin_call_array may wrap the result inside a
1984 STRIP_NOPS (result
);
1985 gimplify_and_update_call_from_tree (i
, result
);
1987 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1989 fprintf (dump_file
, "Simplified (dynamic)\n ");
1990 print_gimple_stmt (dump_file
, call
, 0, dump_flags
);
1991 fprintf (dump_file
, " to ");
1992 print_generic_expr (dump_file
, result
);
1993 fprintf (dump_file
, "\n");
1999 object_sizes_execute (function
*fun
, bool early
)
2002 FOR_EACH_BB_FN (bb
, fun
)
2004 gimple_stmt_iterator i
;
2005 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
2008 bool dynamic
= false;
2010 gimple
*call
= gsi_stmt (i
);
2011 if (gimple_call_builtin_p (call
, BUILT_IN_DYNAMIC_OBJECT_SIZE
))
2013 else if (!gimple_call_builtin_p (call
, BUILT_IN_OBJECT_SIZE
))
2016 tree lhs
= gimple_call_lhs (call
);
2020 init_object_sizes ();
2022 /* If early, only attempt to fold
2023 __builtin_object_size (x, 1) and __builtin_object_size (x, 3),
2024 and rather than folding the builtin to the constant if any,
2025 create a MIN_EXPR or MAX_EXPR of the __builtin_object_size
2026 call result and the computed constant. Do the same for
2027 __builtin_dynamic_object_size too. */
2030 early_object_sizes_execute_one (&i
, call
);
2036 if (dynamic_object_sizes_execute_one (&i
, call
))
2040 /* If we could not find a suitable size expression, lower to
2041 __builtin_object_size so that we may at least get a
2042 constant lower or higher estimate. */
2043 tree bosfn
= builtin_decl_implicit (BUILT_IN_OBJECT_SIZE
);
2044 gimple_call_set_fndecl (call
, bosfn
);
2047 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2049 print_generic_expr (dump_file
, gimple_call_arg (call
, 0),
2052 ": Retrying as __builtin_object_size\n");
2057 result
= gimple_fold_stmt_to_constant (call
, do_valueize
);
2060 tree ost
= gimple_call_arg (call
, 1);
2062 if (tree_fits_uhwi_p (ost
))
2064 unsigned HOST_WIDE_INT object_size_type
= tree_to_uhwi (ost
);
2066 if (object_size_type
& OST_MINIMUM
)
2067 result
= build_zero_cst (size_type_node
);
2068 else if (object_size_type
< OST_END
)
2069 result
= fold_convert (size_type_node
,
2070 integer_minus_one_node
);
2077 gcc_assert (TREE_CODE (result
) == INTEGER_CST
);
2079 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2081 fprintf (dump_file
, "Simplified\n ");
2082 print_gimple_stmt (dump_file
, call
, 0, dump_flags
);
2083 fprintf (dump_file
, " to ");
2084 print_generic_expr (dump_file
, result
);
2085 fprintf (dump_file
, "\n");
2088 /* Propagate into all uses and fold those stmts. */
2089 if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
2090 replace_uses_by (lhs
, result
);
2092 replace_call_with_value (&i
, result
);
2096 fini_object_sizes ();
2100 /* Simple pass to optimize all __builtin_object_size () builtins. */
2104 const pass_data pass_data_object_sizes
=
2106 GIMPLE_PASS
, /* type */
2108 OPTGROUP_NONE
, /* optinfo_flags */
2109 TV_NONE
, /* tv_id */
2110 ( PROP_cfg
| PROP_ssa
), /* properties_required */
2111 PROP_objsz
, /* properties_provided */
2112 0, /* properties_destroyed */
2113 0, /* todo_flags_start */
2114 0, /* todo_flags_finish */
2117 class pass_object_sizes
: public gimple_opt_pass
2120 pass_object_sizes (gcc::context
*ctxt
)
2121 : gimple_opt_pass (pass_data_object_sizes
, ctxt
)
2124 /* opt_pass methods: */
2125 opt_pass
* clone () final override
{ return new pass_object_sizes (m_ctxt
); }
2126 unsigned int execute (function
*fun
) final override
2128 return object_sizes_execute (fun
, false);
2130 }; // class pass_object_sizes
2135 make_pass_object_sizes (gcc::context
*ctxt
)
2137 return new pass_object_sizes (ctxt
);
2140 /* Early version of pass to optimize all __builtin_object_size () builtins. */
2144 const pass_data pass_data_early_object_sizes
=
2146 GIMPLE_PASS
, /* type */
2147 "early_objsz", /* name */
2148 OPTGROUP_NONE
, /* optinfo_flags */
2149 TV_NONE
, /* tv_id */
2150 ( PROP_cfg
| PROP_ssa
), /* properties_required */
2151 0, /* properties_provided */
2152 0, /* properties_destroyed */
2153 0, /* todo_flags_start */
2154 0, /* todo_flags_finish */
2157 class pass_early_object_sizes
: public gimple_opt_pass
2160 pass_early_object_sizes (gcc::context
*ctxt
)
2161 : gimple_opt_pass (pass_data_early_object_sizes
, ctxt
)
2164 /* opt_pass methods: */
2165 unsigned int execute (function
*fun
) final override
2167 return object_sizes_execute (fun
, true);
2169 }; // class pass_object_sizes
2174 make_pass_early_object_sizes (gcc::context
*ctxt
)
2176 return new pass_early_object_sizes (ctxt
);