1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004-2022 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "tree-pass.h"
29 #include "gimple-pretty-print.h"
30 #include "fold-const.h"
31 #include "tree-object-size.h"
32 #include "gimple-iterator.h"
33 #include "gimple-fold.h"
36 #include "stringpool.h"
39 #include "gimplify-me.h"
41 struct object_size_info
46 bitmap visited
, reexamine
, unknowns
;
48 unsigned int *stack
, *tos
;
51 struct GTY(()) object_size
53 /* Estimate of bytes till the end of the object. */
55 /* Estimate of the size of the whole object. */
59 static tree
compute_object_offset (const_tree
, const_tree
);
60 static bool addr_object_size (struct object_size_info
*,
61 const_tree
, int, tree
*, tree
*t
= NULL
);
62 static tree
alloc_object_size (const gcall
*, int);
63 static tree
pass_through_call (const gcall
*);
64 static void collect_object_sizes_for (struct object_size_info
*, tree
);
65 static void expr_object_size (struct object_size_info
*, tree
, tree
);
66 static bool merge_object_sizes (struct object_size_info
*, tree
, tree
);
67 static bool plus_stmt_object_size (struct object_size_info
*, tree
, gimple
*);
68 static bool cond_expr_object_size (struct object_size_info
*, tree
, gimple
*);
69 static void init_offset_limit (void);
70 static void check_for_plus_in_loops (struct object_size_info
*, tree
);
71 static void check_for_plus_in_loops_1 (struct object_size_info
*, tree
,
74 /* object_sizes[0] is upper bound for the object size and number of bytes till
75 the end of the object.
76 object_sizes[1] is upper bound for the object size and number of bytes till
77 the end of the subobject (innermost array or field with address taken).
78 object_sizes[2] is lower bound for the object size and number of bytes till
79 the end of the object and object_sizes[3] lower bound for subobject.
81 For static object sizes, the object size and the bytes till the end of the
82 object are both INTEGER_CST. In the dynamic case, they are finally either a
83 gimple variable or an INTEGER_CST. */
84 static vec
<object_size
> object_sizes
[OST_END
];
86 /* Bitmaps what object sizes have been computed already. */
87 static bitmap computed
[OST_END
];
89 /* Maximum value of offset we consider to be addition. */
90 static unsigned HOST_WIDE_INT offset_limit
;
92 /* Tell the generic SSA updater what kind of update is needed after the pass
96 /* Return true if VAL represents an initial size for OBJECT_SIZE_TYPE. */
99 size_initval_p (tree val
, int object_size_type
)
101 return ((object_size_type
& OST_MINIMUM
)
102 ? integer_all_onesp (val
) : integer_zerop (val
));
105 /* Return true if VAL represents an unknown size for OBJECT_SIZE_TYPE. */
108 size_unknown_p (tree val
, int object_size_type
)
110 return ((object_size_type
& OST_MINIMUM
)
111 ? integer_zerop (val
) : integer_all_onesp (val
));
114 /* Return true if VAL represents a valid size for OBJECT_SIZE_TYPE. */
117 size_valid_p (tree val
, int object_size_type
)
119 return ((object_size_type
& OST_DYNAMIC
) || TREE_CODE (val
) == INTEGER_CST
);
122 /* Return true if VAL is usable as an object size in the object_sizes
126 size_usable_p (tree val
)
128 return TREE_CODE (val
) == SSA_NAME
|| TREE_CODE (val
) == INTEGER_CST
;
131 /* Return a tree with initial value for OBJECT_SIZE_TYPE. */
134 size_initval (int object_size_type
)
136 return ((object_size_type
& OST_MINIMUM
)
137 ? TYPE_MAX_VALUE (sizetype
) : size_zero_node
);
140 /* Return a tree with unknown value for OBJECT_SIZE_TYPE. */
143 size_unknown (int object_size_type
)
145 return ((object_size_type
& OST_MINIMUM
)
146 ? size_zero_node
: TYPE_MAX_VALUE (sizetype
));
149 /* Grow object_sizes[OBJECT_SIZE_TYPE] to num_ssa_names. */
152 object_sizes_grow (int object_size_type
)
154 if (num_ssa_names
> object_sizes
[object_size_type
].length ())
155 object_sizes
[object_size_type
].safe_grow (num_ssa_names
, true);
158 /* Release object_sizes[OBJECT_SIZE_TYPE]. */
161 object_sizes_release (int object_size_type
)
163 object_sizes
[object_size_type
].release ();
166 /* Return true if object_sizes[OBJECT_SIZE_TYPE][VARNO] is unknown. */
169 object_sizes_unknown_p (int object_size_type
, unsigned varno
)
171 return size_unknown_p (object_sizes
[object_size_type
][varno
].size
,
175 /* Return the raw size expression for VARNO corresponding to OSI. This returns
176 the TREE_VEC as is and should only be used during gimplification. */
178 static inline object_size
179 object_sizes_get_raw (struct object_size_info
*osi
, unsigned varno
)
181 gcc_assert (osi
->pass
!= 0);
182 return object_sizes
[osi
->object_size_type
][varno
];
185 /* Return a size tree for VARNO corresponding to OSI. If WHOLE is true, return
186 the whole object size. Use this for building size expressions based on size
190 object_sizes_get (struct object_size_info
*osi
, unsigned varno
,
194 int object_size_type
= osi
->object_size_type
;
197 ret
= object_sizes
[object_size_type
][varno
].wholesize
;
199 ret
= object_sizes
[object_size_type
][varno
].size
;
201 if (object_size_type
& OST_DYNAMIC
)
203 if (TREE_CODE (ret
) == MODIFY_EXPR
)
204 return TREE_OPERAND (ret
, 0);
205 else if (TREE_CODE (ret
) == TREE_VEC
)
206 return TREE_VEC_ELT (ret
, TREE_VEC_LENGTH (ret
) - 1);
208 gcc_checking_assert (size_usable_p (ret
));
214 /* Set size for VARNO corresponding to OSI to VAL. */
217 object_sizes_initialize (struct object_size_info
*osi
, unsigned varno
,
218 tree val
, tree wholeval
)
220 int object_size_type
= osi
->object_size_type
;
222 object_sizes
[object_size_type
][varno
].size
= val
;
223 object_sizes
[object_size_type
][varno
].wholesize
= wholeval
;
226 /* Return a MODIFY_EXPR for cases where SSA and EXPR have the same type. The
227 TREE_VEC is returned only in case of PHI nodes. */
230 bundle_sizes (tree name
, tree expr
)
232 gcc_checking_assert (TREE_TYPE (name
) == sizetype
);
234 if (TREE_CODE (expr
) == TREE_VEC
)
236 TREE_VEC_ELT (expr
, TREE_VEC_LENGTH (expr
) - 1) = name
;
240 gcc_checking_assert (types_compatible_p (TREE_TYPE (expr
), sizetype
));
241 return build2 (MODIFY_EXPR
, sizetype
, name
, expr
);
244 /* Set size for VARNO corresponding to OSI to VAL if it is the new minimum or
245 maximum. For static sizes, each element of TREE_VEC is always INTEGER_CST
246 throughout the computation. For dynamic sizes, each element may either be a
247 gimple variable, a MODIFY_EXPR or a TREE_VEC. The MODIFY_EXPR is for
248 expressions that need to be gimplified. TREE_VECs are special, they're
249 emitted only for GIMPLE_PHI and the PHI result variable is the last element
253 object_sizes_set (struct object_size_info
*osi
, unsigned varno
, tree val
,
256 int object_size_type
= osi
->object_size_type
;
257 object_size osize
= object_sizes
[object_size_type
][varno
];
260 tree oldval
= osize
.size
;
261 tree old_wholeval
= osize
.wholesize
;
263 if (object_size_type
& OST_DYNAMIC
)
265 if (bitmap_bit_p (osi
->reexamine
, varno
))
267 if (size_unknown_p (val
, object_size_type
))
269 oldval
= object_sizes_get (osi
, varno
);
270 old_wholeval
= object_sizes_get (osi
, varno
, true);
271 bitmap_set_bit (osi
->unknowns
, SSA_NAME_VERSION (oldval
));
272 bitmap_set_bit (osi
->unknowns
, SSA_NAME_VERSION (old_wholeval
));
273 bitmap_clear_bit (osi
->reexamine
, varno
);
277 val
= bundle_sizes (oldval
, val
);
278 wholeval
= bundle_sizes (old_wholeval
, wholeval
);
283 gcc_checking_assert (size_initval_p (oldval
, object_size_type
));
284 gcc_checking_assert (size_initval_p (old_wholeval
,
286 /* For dynamic object sizes, all object sizes that are not gimple
287 variables will need to be gimplified. */
288 if (wholeval
!= val
&& !size_usable_p (wholeval
))
290 bitmap_set_bit (osi
->reexamine
, varno
);
291 wholeval
= bundle_sizes (make_ssa_name (sizetype
), wholeval
);
293 if (!size_usable_p (val
))
295 bitmap_set_bit (osi
->reexamine
, varno
);
296 tree newval
= bundle_sizes (make_ssa_name (sizetype
), val
);
301 /* If the new value is a temporary variable, mark it for
303 else if (TREE_CODE (val
) == SSA_NAME
&& !SSA_NAME_DEF_STMT (val
))
304 bitmap_set_bit (osi
->reexamine
, varno
);
309 enum tree_code code
= (object_size_type
& OST_MINIMUM
310 ? MIN_EXPR
: MAX_EXPR
);
312 val
= size_binop (code
, val
, oldval
);
313 wholeval
= size_binop (code
, wholeval
, old_wholeval
);
314 changed
= (tree_int_cst_compare (val
, oldval
) != 0
315 || tree_int_cst_compare (old_wholeval
, wholeval
) != 0);
318 object_sizes
[object_size_type
][varno
].size
= val
;
319 object_sizes
[object_size_type
][varno
].wholesize
= wholeval
;
324 /* Set temporary SSA names for object size and whole size to resolve dependency
325 loops in dynamic size computation. */
328 object_sizes_set_temp (struct object_size_info
*osi
, unsigned varno
)
330 tree val
= object_sizes_get (osi
, varno
);
332 if (size_initval_p (val
, osi
->object_size_type
))
333 object_sizes_set (osi
, varno
,
334 make_ssa_name (sizetype
),
335 make_ssa_name (sizetype
));
338 /* Initialize OFFSET_LIMIT variable. */
340 init_offset_limit (void)
342 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype
)))
343 offset_limit
= tree_to_uhwi (TYPE_MAX_VALUE (sizetype
));
349 /* Bytes at end of the object with SZ from offset OFFSET. If WHOLESIZE is not
350 NULL_TREE, use it to get the net offset of the pointer, which should always
351 be positive and hence, be within OFFSET_LIMIT for valid offsets. */
354 size_for_offset (tree sz
, tree offset
, tree wholesize
= NULL_TREE
)
356 gcc_checking_assert (types_compatible_p (TREE_TYPE (sz
), sizetype
));
358 /* For negative offsets, if we have a distinct WHOLESIZE, use it to get a net
359 offset from the whole object. */
360 if (wholesize
&& wholesize
!= sz
361 && (TREE_CODE (sz
) != INTEGER_CST
362 || TREE_CODE (wholesize
) != INTEGER_CST
363 || tree_int_cst_compare (sz
, wholesize
)))
365 gcc_checking_assert (types_compatible_p (TREE_TYPE (wholesize
),
368 /* Restructure SZ - OFFSET as
369 WHOLESIZE - (WHOLESIZE + OFFSET - SZ) so that the offset part, i.e.
370 WHOLESIZE + OFFSET - SZ is only allowed to be positive. */
371 tree tmp
= size_binop (MAX_EXPR
, wholesize
, sz
);
372 offset
= fold_build2 (PLUS_EXPR
, sizetype
, tmp
, offset
);
373 offset
= fold_build2 (MINUS_EXPR
, sizetype
, offset
, sz
);
377 /* Safe to convert now, since a valid net offset should be non-negative. */
378 if (!useless_type_conversion_p (sizetype
, TREE_TYPE (offset
)))
379 offset
= fold_convert (sizetype
, offset
);
381 if (TREE_CODE (offset
) == INTEGER_CST
)
383 if (integer_zerop (offset
))
386 /* Negative or too large offset even after adjustment, cannot be within
387 bounds of an object. */
388 if (compare_tree_int (offset
, offset_limit
) > 0)
389 return size_zero_node
;
392 return size_binop (MINUS_EXPR
, size_binop (MAX_EXPR
, sz
, offset
), offset
);
395 /* Compute offset of EXPR within VAR. Return error_mark_node
399 compute_object_offset (const_tree expr
, const_tree var
)
401 enum tree_code code
= PLUS_EXPR
;
405 return size_zero_node
;
407 switch (TREE_CODE (expr
))
410 base
= compute_object_offset (TREE_OPERAND (expr
, 0), var
);
411 if (base
== error_mark_node
)
414 t
= TREE_OPERAND (expr
, 1);
415 off
= size_binop (PLUS_EXPR
, DECL_FIELD_OFFSET (t
),
416 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t
))
422 case VIEW_CONVERT_EXPR
:
423 case NON_LVALUE_EXPR
:
424 return compute_object_offset (TREE_OPERAND (expr
, 0), var
);
427 base
= compute_object_offset (TREE_OPERAND (expr
, 0), var
);
428 if (base
== error_mark_node
)
431 off
= TYPE_SIZE_UNIT (TREE_TYPE (expr
));
435 base
= compute_object_offset (TREE_OPERAND (expr
, 0), var
);
436 if (base
== error_mark_node
)
439 t
= TREE_OPERAND (expr
, 1);
440 tree low_bound
, unit_size
;
441 low_bound
= array_ref_low_bound (CONST_CAST_TREE (expr
));
442 unit_size
= array_ref_element_size (CONST_CAST_TREE (expr
));
443 if (! integer_zerop (low_bound
))
444 t
= fold_build2 (MINUS_EXPR
, TREE_TYPE (t
), t
, low_bound
);
445 if (TREE_CODE (t
) == INTEGER_CST
&& tree_int_cst_sgn (t
) < 0)
448 t
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (t
), t
);
450 t
= fold_convert (sizetype
, t
);
451 off
= size_binop (MULT_EXPR
, unit_size
, t
);
455 gcc_assert (TREE_CODE (TREE_OPERAND (expr
, 0)) == ADDR_EXPR
);
456 return wide_int_to_tree (sizetype
, mem_ref_offset (expr
));
459 return error_mark_node
;
462 return size_binop (code
, base
, off
);
465 /* Returns the size of the object designated by DECL considering its
466 initializer if it either has one or if it would not affect its size,
467 otherwise the size of the object without the initializer when MIN
468 is true, else null. An object's initializer affects the object's
469 size if it's a struct type with a flexible array member. */
472 decl_init_size (tree decl
, bool min
)
474 tree size
= DECL_SIZE_UNIT (decl
);
475 tree type
= TREE_TYPE (decl
);
476 if (TREE_CODE (type
) != RECORD_TYPE
)
479 tree last
= last_field (type
);
483 tree last_type
= TREE_TYPE (last
);
484 if (TREE_CODE (last_type
) != ARRAY_TYPE
485 || TYPE_SIZE (last_type
))
488 /* Use TYPE_SIZE_UNIT; DECL_SIZE_UNIT sometimes reflects the size
489 of the initializer and sometimes doesn't. */
490 size
= TYPE_SIZE_UNIT (type
);
491 tree ref
= build3 (COMPONENT_REF
, type
, decl
, last
, NULL_TREE
);
492 tree compsize
= component_ref_size (ref
);
494 return min
? size
: NULL_TREE
;
496 /* The size includes tail padding and initializer elements. */
497 tree pos
= byte_position (last
);
498 size
= fold_build2 (PLUS_EXPR
, TREE_TYPE (size
), pos
, compsize
);
502 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
503 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
504 If unknown, return size_unknown (object_size_type). */
507 addr_object_size (struct object_size_info
*osi
, const_tree ptr
,
508 int object_size_type
, tree
*psize
, tree
*pwholesize
)
510 tree pt_var
, pt_var_size
= NULL_TREE
, pt_var_wholesize
= NULL_TREE
;
511 tree var_size
, bytes
, wholebytes
;
513 gcc_assert (TREE_CODE (ptr
) == ADDR_EXPR
);
515 /* Set to unknown and overwrite just before returning if the size
516 could be determined. */
517 *psize
= size_unknown (object_size_type
);
519 *pwholesize
= size_unknown (object_size_type
);
521 pt_var
= TREE_OPERAND (ptr
, 0);
522 while (handled_component_p (pt_var
))
523 pt_var
= TREE_OPERAND (pt_var
, 0);
528 if (TREE_CODE (pt_var
) == MEM_REF
)
532 if (!osi
|| (object_size_type
& OST_SUBOBJECT
) != 0
533 || TREE_CODE (TREE_OPERAND (pt_var
, 0)) != SSA_NAME
)
535 compute_builtin_object_size (TREE_OPERAND (pt_var
, 0),
536 object_size_type
& ~OST_SUBOBJECT
, &sz
);
541 tree var
= TREE_OPERAND (pt_var
, 0);
543 collect_object_sizes_for (osi
, var
);
544 if (bitmap_bit_p (computed
[object_size_type
],
545 SSA_NAME_VERSION (var
)))
547 sz
= object_sizes_get (osi
, SSA_NAME_VERSION (var
));
548 wholesize
= object_sizes_get (osi
, SSA_NAME_VERSION (var
), true);
551 sz
= wholesize
= size_unknown (object_size_type
);
553 if (!size_unknown_p (sz
, object_size_type
))
554 sz
= size_for_offset (sz
, TREE_OPERAND (pt_var
, 1), wholesize
);
556 if (!size_unknown_p (sz
, object_size_type
)
557 && (TREE_CODE (sz
) != INTEGER_CST
558 || compare_tree_int (sz
, offset_limit
) < 0))
561 pt_var_wholesize
= wholesize
;
564 else if (DECL_P (pt_var
))
566 pt_var_size
= pt_var_wholesize
567 = decl_init_size (pt_var
, object_size_type
& OST_MINIMUM
);
571 else if (TREE_CODE (pt_var
) == STRING_CST
)
572 pt_var_size
= pt_var_wholesize
= TYPE_SIZE_UNIT (TREE_TYPE (pt_var
));
578 /* Validate the size determined above if it is a constant. */
579 if (TREE_CODE (pt_var_size
) == INTEGER_CST
580 && compare_tree_int (pt_var_size
, offset_limit
) >= 0)
584 if (pt_var
!= TREE_OPERAND (ptr
, 0))
588 if (object_size_type
& OST_SUBOBJECT
)
590 var
= TREE_OPERAND (ptr
, 0);
593 && TREE_CODE (var
) != BIT_FIELD_REF
594 && TREE_CODE (var
) != COMPONENT_REF
595 && TREE_CODE (var
) != ARRAY_REF
596 && TREE_CODE (var
) != ARRAY_RANGE_REF
597 && TREE_CODE (var
) != REALPART_EXPR
598 && TREE_CODE (var
) != IMAGPART_EXPR
)
599 var
= TREE_OPERAND (var
, 0);
600 if (var
!= pt_var
&& TREE_CODE (var
) == ARRAY_REF
)
601 var
= TREE_OPERAND (var
, 0);
602 if (! TYPE_SIZE_UNIT (TREE_TYPE (var
))
603 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var
)))
604 || (pt_var_size
&& TREE_CODE (pt_var_size
) == INTEGER_CST
605 && tree_int_cst_lt (pt_var_size
,
606 TYPE_SIZE_UNIT (TREE_TYPE (var
)))))
608 else if (var
!= pt_var
&& TREE_CODE (pt_var
) == MEM_REF
)
611 /* For &X->fld, compute object size if fld isn't a flexible array
613 bool is_flexible_array_mem_ref
= false;
614 while (v
&& v
!= pt_var
)
615 switch (TREE_CODE (v
))
618 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v
, 0))))
621 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v
, 0)));
622 if (domain
&& TYPE_MAX_VALUE (domain
))
628 v
= TREE_OPERAND (v
, 0);
635 if (TREE_CODE (TREE_TYPE (v
)) != ARRAY_TYPE
)
640 is_flexible_array_mem_ref
= array_ref_flexible_size_p (v
);
641 while (v
!= pt_var
&& TREE_CODE (v
) == COMPONENT_REF
)
642 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v
, 0)))
644 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v
, 0)))
648 v
= TREE_OPERAND (v
, 0);
649 if (TREE_CODE (v
) == COMPONENT_REF
650 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v
, 0)))
653 /* compute object size only if v is not a
654 flexible array member. */
655 if (!is_flexible_array_mem_ref
)
660 v
= TREE_OPERAND (v
, 0);
662 while (v
!= pt_var
&& TREE_CODE (v
) == COMPONENT_REF
)
663 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v
, 0)))
665 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v
, 0)))
669 v
= TREE_OPERAND (v
, 0);
688 var_size
= TYPE_SIZE_UNIT (TREE_TYPE (var
));
689 if (!TREE_CONSTANT (var_size
))
690 var_size
= get_or_create_ssa_default_def (cfun
, var_size
);
694 else if (!pt_var_size
)
697 var_size
= pt_var_size
;
698 bytes
= compute_object_offset (TREE_OPERAND (ptr
, 0), var
);
699 if (bytes
!= error_mark_node
)
701 bytes
= size_for_offset (var_size
, bytes
);
702 if (var
!= pt_var
&& pt_var_size
&& TREE_CODE (pt_var
) == MEM_REF
)
704 tree bytes2
= compute_object_offset (TREE_OPERAND (ptr
, 0),
706 if (bytes2
!= error_mark_node
)
708 bytes2
= size_for_offset (pt_var_size
, bytes2
);
709 bytes
= size_binop (MIN_EXPR
, bytes
, bytes2
);
714 bytes
= size_unknown (object_size_type
);
717 = object_size_type
& OST_SUBOBJECT
? var_size
: pt_var_wholesize
;
719 else if (!pt_var_size
)
724 wholebytes
= pt_var_wholesize
;
727 if (!size_unknown_p (bytes
, object_size_type
)
728 && size_valid_p (bytes
, object_size_type
)
729 && !size_unknown_p (bytes
, object_size_type
)
730 && size_valid_p (wholebytes
, object_size_type
))
734 *pwholesize
= wholebytes
;
742 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
743 Handles calls to functions declared with attribute alloc_size.
744 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
745 If unknown, return size_unknown (object_size_type). */
748 alloc_object_size (const gcall
*call
, int object_size_type
)
750 gcc_assert (is_gimple_call (call
));
753 tree callfn
= gimple_call_fndecl (call
);
755 calltype
= TREE_TYPE (callfn
);
757 calltype
= gimple_call_fntype (call
);
760 return size_unknown (object_size_type
);
762 /* Set to positions of alloc_size arguments. */
763 int arg1
= -1, arg2
= -1;
764 tree alloc_size
= lookup_attribute ("alloc_size",
765 TYPE_ATTRIBUTES (calltype
));
766 if (alloc_size
&& TREE_VALUE (alloc_size
))
768 tree p
= TREE_VALUE (alloc_size
);
770 arg1
= TREE_INT_CST_LOW (TREE_VALUE (p
))-1;
772 arg2
= TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p
)))-1;
774 else if (gimple_call_builtin_p (call
, BUILT_IN_NORMAL
)
775 && callfn
&& ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (callfn
)))
778 /* Non-const arguments are OK here, let the caller handle constness. */
779 if (arg1
< 0 || arg1
>= (int) gimple_call_num_args (call
)
780 || arg2
>= (int) gimple_call_num_args (call
))
781 return size_unknown (object_size_type
);
783 tree bytes
= NULL_TREE
;
785 bytes
= size_binop (MULT_EXPR
,
786 fold_convert (sizetype
, gimple_call_arg (call
, arg1
)),
787 fold_convert (sizetype
, gimple_call_arg (call
, arg2
)));
789 bytes
= fold_convert (sizetype
, gimple_call_arg (call
, arg1
));
791 return bytes
? bytes
: size_unknown (object_size_type
);
794 /* Compute __builtin_object_size for CALL, which is a call to either
795 BUILT_IN_STRDUP or BUILT_IN_STRNDUP; IS_STRNDUP indicates which it is.
796 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
797 If unknown, return size_unknown (object_size_type). */
800 strdup_object_size (const gcall
*call
, int object_size_type
, bool is_strndup
)
802 tree src
= gimple_call_arg (call
, 0);
803 tree sz
= size_unknown (object_size_type
);
807 n
= fold_build2 (PLUS_EXPR
, sizetype
, size_one_node
,
808 gimple_call_arg (call
, 1));
809 /* For strdup, simply emit strlen (SRC) + 1 and let the optimizer fold it the
813 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
816 sz
= fold_build2 (PLUS_EXPR
, sizetype
, size_one_node
,
817 build_call_expr (strlen_fn
, 1, src
));
818 todo
= TODO_update_ssa_only_virtuals
;
822 /* In all other cases, return the size of SRC since the object size cannot
823 exceed that. We cannot do this for OST_MINIMUM unless SRC points into a
824 string constant since otherwise the object size could go all the way down
826 if (!size_valid_p (sz
, object_size_type
)
827 || size_unknown_p (sz
, object_size_type
))
829 tree wholesrc
= NULL_TREE
;
830 if (TREE_CODE (src
) == ADDR_EXPR
)
831 wholesrc
= get_base_address (TREE_OPERAND (src
, 0));
833 /* If the source points within a string constant, we try to get its
835 if (wholesrc
&& TREE_CODE (wholesrc
) == STRING_CST
)
837 tree len
= c_strlen (src
, 0);
839 sz
= fold_build2 (PLUS_EXPR
, sizetype
, size_one_node
, len
);
842 /* For maximum estimate, our next best guess is the object size of the
844 if (size_unknown_p (sz
, object_size_type
)
845 && !(object_size_type
& OST_MINIMUM
))
846 compute_builtin_object_size (src
, object_size_type
, &sz
);
849 /* String duplication allocates at least one byte, so we should never fail
851 if ((!size_valid_p (sz
, object_size_type
)
852 || size_unknown_p (sz
, object_size_type
))
853 && (object_size_type
& OST_MINIMUM
))
856 /* Factor in the N. */
857 return n
? fold_build2 (MIN_EXPR
, sizetype
, n
, sz
) : sz
;
860 /* If object size is propagated from one of function's arguments directly
861 to its return value, return that argument for GIMPLE_CALL statement CALL.
862 Otherwise return NULL. */
865 pass_through_call (const gcall
*call
)
867 unsigned rf
= gimple_call_return_flags (call
);
868 if (rf
& ERF_RETURNS_ARG
)
870 unsigned argnum
= rf
& ERF_RETURN_ARG_MASK
;
871 if (argnum
< gimple_call_num_args (call
))
872 return gimple_call_arg (call
, argnum
);
875 /* __builtin_assume_aligned is intentionally not marked RET1. */
876 if (gimple_call_builtin_p (call
, BUILT_IN_ASSUME_ALIGNED
))
877 return gimple_call_arg (call
, 0);
882 /* Emit PHI nodes for size expressions fo. */
885 emit_phi_nodes (gimple
*stmt
, tree size
, tree wholesize
)
888 gphi
*wholephi
= NULL
;
890 if (wholesize
!= size
)
892 phires
= TREE_VEC_ELT (wholesize
, TREE_VEC_LENGTH (wholesize
) - 1);
893 wholephi
= create_phi_node (phires
, gimple_bb (stmt
));
896 phires
= TREE_VEC_ELT (size
, TREE_VEC_LENGTH (size
) - 1);
897 gphi
*phi
= create_phi_node (phires
, gimple_bb (stmt
));
898 gphi
*obj_phi
= as_a
<gphi
*> (stmt
);
900 gcc_checking_assert (TREE_CODE (wholesize
) == TREE_VEC
);
901 gcc_checking_assert (TREE_CODE (size
) == TREE_VEC
);
903 for (unsigned i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
905 gimple_seq seq
= NULL
;
906 tree wsz
= TREE_VEC_ELT (wholesize
, i
);
907 tree sz
= TREE_VEC_ELT (size
, i
);
909 /* If we built an expression, we will need to build statements
910 and insert them on the edge right away. */
911 if (TREE_CODE (wsz
) != SSA_NAME
)
912 wsz
= force_gimple_operand (wsz
, &seq
, true, NULL
);
913 if (TREE_CODE (sz
) != SSA_NAME
)
916 sz
= force_gimple_operand (sz
, &s
, true, NULL
);
917 gimple_seq_add_seq (&seq
, s
);
921 gsi_insert_seq_on_edge (gimple_phi_arg_edge (obj_phi
, i
), seq
);
924 add_phi_arg (wholephi
, wsz
,
925 gimple_phi_arg_edge (obj_phi
, i
),
926 gimple_phi_arg_location (obj_phi
, i
));
928 add_phi_arg (phi
, sz
,
929 gimple_phi_arg_edge (obj_phi
, i
),
930 gimple_phi_arg_location (obj_phi
, i
));
934 /* Descend through EXPR and return size_unknown if it uses any SSA variable
935 object_size_set or object_size_set_temp generated, which turned out to be
936 size_unknown, as noted in UNKNOWNS. */
939 propagate_unknowns (object_size_info
*osi
, tree expr
)
941 int object_size_type
= osi
->object_size_type
;
943 switch (TREE_CODE (expr
))
946 if (bitmap_bit_p (osi
->unknowns
, SSA_NAME_VERSION (expr
)))
947 return size_unknown (object_size_type
);
953 tree res
= propagate_unknowns (osi
, TREE_OPERAND (expr
, 0));
954 if (size_unknown_p (res
, object_size_type
))
957 res
= propagate_unknowns (osi
, TREE_OPERAND (expr
, 1));
958 if (size_unknown_p (res
, object_size_type
))
965 tree res
= propagate_unknowns (osi
, TREE_OPERAND (expr
, 1));
966 if (size_unknown_p (res
, object_size_type
))
971 for (int i
= 0; i
< TREE_VEC_LENGTH (expr
); i
++)
973 tree res
= propagate_unknowns (osi
, TREE_VEC_ELT (expr
, i
));
974 if (size_unknown_p (res
, object_size_type
))
981 tree res
= propagate_unknowns (osi
, TREE_OPERAND (expr
, 0));
982 if (size_unknown_p (res
, object_size_type
))
992 /* Walk through size expressions that need reexamination and generate
993 statements for them. */
996 gimplify_size_expressions (object_size_info
*osi
)
998 int object_size_type
= osi
->object_size_type
;
1003 /* Step 1: Propagate unknowns into expressions. */
1004 bitmap reexamine
= BITMAP_ALLOC (NULL
);
1005 bitmap_copy (reexamine
, osi
->reexamine
);
1009 EXECUTE_IF_SET_IN_BITMAP (reexamine
, 0, i
, bi
)
1011 object_size cur
= object_sizes_get_raw (osi
, i
);
1013 if (size_unknown_p (propagate_unknowns (osi
, cur
.size
),
1015 || size_unknown_p (propagate_unknowns (osi
, cur
.wholesize
),
1018 object_sizes_set (osi
, i
,
1019 size_unknown (object_size_type
),
1020 size_unknown (object_size_type
));
1024 bitmap_copy (reexamine
, osi
->reexamine
);
1028 /* Release all unknowns. */
1029 EXECUTE_IF_SET_IN_BITMAP (osi
->unknowns
, 0, i
, bi
)
1030 release_ssa_name (ssa_name (i
));
1032 /* Expand all size expressions to put their definitions close to the objects
1033 for which size is being computed. */
1034 EXECUTE_IF_SET_IN_BITMAP (osi
->reexamine
, 0, i
, bi
)
1036 gimple_seq seq
= NULL
;
1037 object_size osize
= object_sizes_get_raw (osi
, i
);
1039 gimple
*stmt
= SSA_NAME_DEF_STMT (ssa_name (i
));
1040 enum gimple_code code
= gimple_code (stmt
);
1042 /* PHI nodes need special attention. */
1043 if (code
== GIMPLE_PHI
)
1044 emit_phi_nodes (stmt
, osize
.size
, osize
.wholesize
);
1047 tree size_expr
= NULL_TREE
;
1049 /* Bundle wholesize in with the size to gimplify if needed. */
1050 if (osize
.wholesize
!= osize
.size
1051 && !size_usable_p (osize
.wholesize
))
1052 size_expr
= size_binop (COMPOUND_EXPR
,
1055 else if (!size_usable_p (osize
.size
))
1056 size_expr
= osize
.size
;
1060 gimple_stmt_iterator gsi
;
1061 if (code
== GIMPLE_NOP
)
1062 gsi
= gsi_start_bb (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun
)));
1064 gsi
= gsi_for_stmt (stmt
);
1066 force_gimple_operand (size_expr
, &seq
, true, NULL
);
1067 gsi_insert_seq_before (&gsi
, seq
, GSI_CONTINUE_LINKING
);
1071 /* We're done, so replace the MODIFY_EXPRs with the SSA names. */
1072 object_sizes_initialize (osi
, i
,
1073 object_sizes_get (osi
, i
),
1074 object_sizes_get (osi
, i
, true));
1078 /* Compute __builtin_object_size value for PTR and set *PSIZE to
1079 the resulting value. If the declared object is known and PDECL
1080 is nonnull, sets *PDECL to the object's DECL. OBJECT_SIZE_TYPE
1081 is the second argument to __builtin_object_size.
1082 Returns true on success and false when the object size could not
1086 compute_builtin_object_size (tree ptr
, int object_size_type
,
1089 gcc_assert (object_size_type
>= 0 && object_size_type
< OST_END
);
1091 /* Set to unknown and overwrite just before returning if the size
1092 could be determined. */
1093 *psize
= size_unknown (object_size_type
);
1096 init_offset_limit ();
1098 if (TREE_CODE (ptr
) == ADDR_EXPR
)
1099 return addr_object_size (NULL
, ptr
, object_size_type
, psize
);
1101 if (TREE_CODE (ptr
) != SSA_NAME
1102 || !POINTER_TYPE_P (TREE_TYPE (ptr
)))
1105 if (computed
[object_size_type
] == NULL
)
1107 if (optimize
|| object_size_type
& OST_SUBOBJECT
)
1110 /* When not optimizing, rather than failing, make a small effort
1111 to determine the object size without the full benefit of
1112 the (costly) computation below. */
1113 gimple
*def
= SSA_NAME_DEF_STMT (ptr
);
1114 if (gimple_code (def
) == GIMPLE_ASSIGN
)
1116 tree_code code
= gimple_assign_rhs_code (def
);
1117 if (code
== POINTER_PLUS_EXPR
)
1119 tree offset
= gimple_assign_rhs2 (def
);
1120 ptr
= gimple_assign_rhs1 (def
);
1122 if (((object_size_type
& OST_DYNAMIC
)
1123 || (tree_fits_shwi_p (offset
)
1124 && compare_tree_int (offset
, offset_limit
) <= 0))
1125 && compute_builtin_object_size (ptr
, object_size_type
,
1128 *psize
= size_for_offset (*psize
, offset
);
1136 struct object_size_info osi
;
1137 osi
.object_size_type
= object_size_type
;
1138 if (!bitmap_bit_p (computed
[object_size_type
], SSA_NAME_VERSION (ptr
)))
1143 object_sizes_grow (object_size_type
);
1146 fprintf (dump_file
, "Computing %s %s%sobject size for ",
1147 (object_size_type
& OST_MINIMUM
) ? "minimum" : "maximum",
1148 (object_size_type
& OST_DYNAMIC
) ? "dynamic " : "",
1149 (object_size_type
& OST_SUBOBJECT
) ? "sub" : "");
1150 print_generic_expr (dump_file
, ptr
, dump_flags
);
1151 fprintf (dump_file
, ":\n");
1154 osi
.visited
= BITMAP_ALLOC (NULL
);
1155 osi
.reexamine
= BITMAP_ALLOC (NULL
);
1157 if (object_size_type
& OST_DYNAMIC
)
1158 osi
.unknowns
= BITMAP_ALLOC (NULL
);
1166 /* First pass: walk UD chains, compute object sizes that
1167 can be computed. osi.reexamine bitmap at the end will
1168 contain what variables were found in dependency cycles
1169 and therefore need to be reexamined. */
1171 osi
.changed
= false;
1172 collect_object_sizes_for (&osi
, ptr
);
1174 if (object_size_type
& OST_DYNAMIC
)
1177 gimplify_size_expressions (&osi
);
1178 BITMAP_FREE (osi
.unknowns
);
1179 bitmap_clear (osi
.reexamine
);
1182 /* Second pass: keep recomputing object sizes of variables
1183 that need reexamination, until no object sizes are
1184 increased or all object sizes are computed. */
1185 if (! bitmap_empty_p (osi
.reexamine
))
1187 bitmap reexamine
= BITMAP_ALLOC (NULL
);
1189 /* If looking for minimum instead of maximum object size,
1190 detect cases where a pointer is increased in a loop.
1191 Although even without this detection pass 2 would eventually
1192 terminate, it could take a long time. If a pointer is
1193 increasing this way, we need to assume 0 object size.
1194 E.g. p = &buf[0]; while (cond) p = p + 4; */
1195 if (object_size_type
& OST_MINIMUM
)
1197 osi
.depths
= XCNEWVEC (unsigned int, num_ssa_names
);
1198 osi
.stack
= XNEWVEC (unsigned int, num_ssa_names
);
1199 osi
.tos
= osi
.stack
;
1201 /* collect_object_sizes_for is changing
1202 osi.reexamine bitmap, so iterate over a copy. */
1203 bitmap_copy (reexamine
, osi
.reexamine
);
1204 EXECUTE_IF_SET_IN_BITMAP (reexamine
, 0, i
, bi
)
1205 if (bitmap_bit_p (osi
.reexamine
, i
))
1206 check_for_plus_in_loops (&osi
, ssa_name (i
));
1218 osi
.changed
= false;
1219 /* collect_object_sizes_for is changing
1220 osi.reexamine bitmap, so iterate over a copy. */
1221 bitmap_copy (reexamine
, osi
.reexamine
);
1222 EXECUTE_IF_SET_IN_BITMAP (reexamine
, 0, i
, bi
)
1223 if (bitmap_bit_p (osi
.reexamine
, i
))
1225 collect_object_sizes_for (&osi
, ssa_name (i
));
1226 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1228 fprintf (dump_file
, "Reexamining ");
1229 print_generic_expr (dump_file
, ssa_name (i
),
1231 fprintf (dump_file
, "\n");
1235 while (osi
.changed
);
1237 BITMAP_FREE (reexamine
);
1239 EXECUTE_IF_SET_IN_BITMAP (osi
.reexamine
, 0, i
, bi
)
1240 bitmap_set_bit (computed
[object_size_type
], i
);
1242 /* Debugging dumps. */
1245 EXECUTE_IF_SET_IN_BITMAP (osi
.visited
, 0, i
, bi
)
1246 if (!object_sizes_unknown_p (object_size_type
, i
))
1248 print_generic_expr (dump_file
, ssa_name (i
),
1251 ": %s %s%sobject size ",
1252 ((object_size_type
& OST_MINIMUM
) ? "minimum"
1254 (object_size_type
& OST_DYNAMIC
) ? "dynamic " : "",
1255 (object_size_type
& OST_SUBOBJECT
) ? "sub" : "");
1256 print_generic_expr (dump_file
, object_sizes_get (&osi
, i
),
1258 fprintf (dump_file
, "\n");
1262 BITMAP_FREE (osi
.reexamine
);
1263 BITMAP_FREE (osi
.visited
);
1266 *psize
= object_sizes_get (&osi
, SSA_NAME_VERSION (ptr
));
1267 return !size_unknown_p (*psize
, object_size_type
);
1270 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
1273 expr_object_size (struct object_size_info
*osi
, tree ptr
, tree value
)
1275 int object_size_type
= osi
->object_size_type
;
1276 unsigned int varno
= SSA_NAME_VERSION (ptr
);
1277 tree bytes
, wholesize
;
1279 gcc_assert (!object_sizes_unknown_p (object_size_type
, varno
));
1280 gcc_assert (osi
->pass
== 0);
1282 if (TREE_CODE (value
) == WITH_SIZE_EXPR
)
1283 value
= TREE_OPERAND (value
, 0);
1285 /* Pointer variables should have been handled by merge_object_sizes. */
1286 gcc_assert (TREE_CODE (value
) != SSA_NAME
1287 || !POINTER_TYPE_P (TREE_TYPE (value
)));
1289 if (TREE_CODE (value
) == ADDR_EXPR
)
1290 addr_object_size (osi
, value
, object_size_type
, &bytes
, &wholesize
);
1292 bytes
= wholesize
= size_unknown (object_size_type
);
1294 object_sizes_set (osi
, varno
, bytes
, wholesize
);
1298 /* Compute object_sizes for PTR, defined to the result of a call. */
1301 call_object_size (struct object_size_info
*osi
, tree ptr
, gcall
*call
)
1303 int object_size_type
= osi
->object_size_type
;
1304 unsigned int varno
= SSA_NAME_VERSION (ptr
);
1305 tree bytes
= NULL_TREE
;
1307 gcc_assert (is_gimple_call (call
));
1309 gcc_assert (!object_sizes_unknown_p (object_size_type
, varno
));
1310 gcc_assert (osi
->pass
== 0);
1312 bool is_strdup
= gimple_call_builtin_p (call
, BUILT_IN_STRDUP
);
1313 bool is_strndup
= gimple_call_builtin_p (call
, BUILT_IN_STRNDUP
);
1314 if (is_strdup
|| is_strndup
)
1315 bytes
= strdup_object_size (call
, object_size_type
, is_strndup
);
1317 bytes
= alloc_object_size (call
, object_size_type
);
1319 if (!size_valid_p (bytes
, object_size_type
))
1320 bytes
= size_unknown (object_size_type
);
1322 object_sizes_set (osi
, varno
, bytes
, bytes
);
1326 /* Compute object_sizes for PTR, defined to an unknown value. */
1329 unknown_object_size (struct object_size_info
*osi
, tree ptr
)
1331 int object_size_type
= osi
->object_size_type
;
1332 unsigned int varno
= SSA_NAME_VERSION (ptr
);
1334 gcc_checking_assert (!object_sizes_unknown_p (object_size_type
, varno
));
1335 gcc_checking_assert (osi
->pass
== 0);
1336 tree bytes
= size_unknown (object_size_type
);
1338 object_sizes_set (osi
, varno
, bytes
, bytes
);
1342 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
1343 the object size might need reexamination later. */
1346 merge_object_sizes (struct object_size_info
*osi
, tree dest
, tree orig
)
1348 int object_size_type
= osi
->object_size_type
;
1349 unsigned int varno
= SSA_NAME_VERSION (dest
);
1350 tree orig_bytes
, wholesize
;
1352 if (object_sizes_unknown_p (object_size_type
, varno
))
1356 collect_object_sizes_for (osi
, orig
);
1358 orig_bytes
= object_sizes_get (osi
, SSA_NAME_VERSION (orig
));
1359 wholesize
= object_sizes_get (osi
, SSA_NAME_VERSION (orig
), true);
1361 if (object_sizes_set (osi
, varno
, orig_bytes
, wholesize
))
1362 osi
->changed
= true;
1364 return bitmap_bit_p (osi
->reexamine
, SSA_NAME_VERSION (orig
));
1368 /* Compute object_sizes for VAR, defined to the result of an assignment
1369 with operator POINTER_PLUS_EXPR. Return true if the object size might
1370 need reexamination later. */
1373 plus_stmt_object_size (struct object_size_info
*osi
, tree var
, gimple
*stmt
)
1375 int object_size_type
= osi
->object_size_type
;
1376 unsigned int varno
= SSA_NAME_VERSION (var
);
1377 tree bytes
, wholesize
;
1379 bool reexamine
= false;
1381 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
1383 op0
= gimple_assign_rhs1 (stmt
);
1384 op1
= gimple_assign_rhs2 (stmt
);
1386 else if (gimple_assign_rhs_code (stmt
) == ADDR_EXPR
)
1388 tree rhs
= TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
1389 gcc_assert (TREE_CODE (rhs
) == MEM_REF
);
1390 op0
= TREE_OPERAND (rhs
, 0);
1391 op1
= TREE_OPERAND (rhs
, 1);
1396 if (object_sizes_unknown_p (object_size_type
, varno
))
1399 /* Handle PTR + OFFSET here. */
1400 if (size_valid_p (op1
, object_size_type
)
1401 && (TREE_CODE (op0
) == SSA_NAME
|| TREE_CODE (op0
) == ADDR_EXPR
))
1403 if (TREE_CODE (op0
) == SSA_NAME
)
1406 collect_object_sizes_for (osi
, op0
);
1408 bytes
= object_sizes_get (osi
, SSA_NAME_VERSION (op0
));
1409 wholesize
= object_sizes_get (osi
, SSA_NAME_VERSION (op0
), true);
1410 reexamine
= bitmap_bit_p (osi
->reexamine
, SSA_NAME_VERSION (op0
));
1414 /* op0 will be ADDR_EXPR here. We should never come here during
1416 gcc_checking_assert (osi
->pass
== 0);
1417 addr_object_size (osi
, op0
, object_size_type
, &bytes
, &wholesize
);
1420 /* size_for_offset doesn't make sense for -1 size, but it does for size 0
1421 since the wholesize could be non-zero and a negative offset could give
1423 if (size_unknown_p (bytes
, 0))
1425 else if ((object_size_type
& OST_DYNAMIC
)
1426 || compare_tree_int (op1
, offset_limit
) <= 0)
1427 bytes
= size_for_offset (bytes
, op1
, wholesize
);
1428 /* In the static case, with a negative offset, the best estimate for
1429 minimum size is size_unknown but for maximum size, the wholesize is a
1430 better estimate than size_unknown. */
1431 else if (object_size_type
& OST_MINIMUM
)
1432 bytes
= size_unknown (object_size_type
);
1437 bytes
= wholesize
= size_unknown (object_size_type
);
1439 if (!size_valid_p (bytes
, object_size_type
)
1440 || !size_valid_p (wholesize
, object_size_type
))
1441 bytes
= wholesize
= size_unknown (object_size_type
);
1443 if (object_sizes_set (osi
, varno
, bytes
, wholesize
))
1444 osi
->changed
= true;
1448 /* Compute the dynamic object size for VAR. Return the result in SIZE and
1452 dynamic_object_size (struct object_size_info
*osi
, tree var
,
1453 tree
*size
, tree
*wholesize
)
1455 int object_size_type
= osi
->object_size_type
;
1457 if (TREE_CODE (var
) == SSA_NAME
)
1459 unsigned varno
= SSA_NAME_VERSION (var
);
1461 collect_object_sizes_for (osi
, var
);
1462 *size
= object_sizes_get (osi
, varno
);
1463 *wholesize
= object_sizes_get (osi
, varno
, true);
1465 else if (TREE_CODE (var
) == ADDR_EXPR
)
1466 addr_object_size (osi
, var
, object_size_type
, size
, wholesize
);
1468 *size
= *wholesize
= size_unknown (object_size_type
);
1471 /* Compute object_sizes for VAR, defined at STMT, which is
1472 a COND_EXPR. Return true if the object size might need reexamination
1476 cond_expr_object_size (struct object_size_info
*osi
, tree var
, gimple
*stmt
)
1479 int object_size_type
= osi
->object_size_type
;
1480 unsigned int varno
= SSA_NAME_VERSION (var
);
1481 bool reexamine
= false;
1483 gcc_assert (gimple_assign_rhs_code (stmt
) == COND_EXPR
);
1485 if (object_sizes_unknown_p (object_size_type
, varno
))
1488 then_
= gimple_assign_rhs2 (stmt
);
1489 else_
= gimple_assign_rhs3 (stmt
);
1491 if (object_size_type
& OST_DYNAMIC
)
1493 tree then_size
, then_wholesize
, else_size
, else_wholesize
;
1495 dynamic_object_size (osi
, then_
, &then_size
, &then_wholesize
);
1496 if (!size_unknown_p (then_size
, object_size_type
))
1497 dynamic_object_size (osi
, else_
, &else_size
, &else_wholesize
);
1499 tree cond_size
, cond_wholesize
;
1500 if (size_unknown_p (then_size
, object_size_type
)
1501 || size_unknown_p (else_size
, object_size_type
))
1502 cond_size
= cond_wholesize
= size_unknown (object_size_type
);
1505 cond_size
= fold_build3 (COND_EXPR
, sizetype
,
1506 gimple_assign_rhs1 (stmt
),
1507 then_size
, else_size
);
1508 cond_wholesize
= fold_build3 (COND_EXPR
, sizetype
,
1509 gimple_assign_rhs1 (stmt
),
1510 then_wholesize
, else_wholesize
);
1513 object_sizes_set (osi
, varno
, cond_size
, cond_wholesize
);
1518 if (TREE_CODE (then_
) == SSA_NAME
)
1519 reexamine
|= merge_object_sizes (osi
, var
, then_
);
1521 expr_object_size (osi
, var
, then_
);
1523 if (object_sizes_unknown_p (object_size_type
, varno
))
1526 if (TREE_CODE (else_
) == SSA_NAME
)
1527 reexamine
|= merge_object_sizes (osi
, var
, else_
);
1529 expr_object_size (osi
, var
, else_
);
1534 /* Find size of an object passed as a parameter to the function. */
1537 parm_object_size (struct object_size_info
*osi
, tree var
)
1539 int object_size_type
= osi
->object_size_type
;
1540 tree parm
= SSA_NAME_VAR (var
);
1542 if (!(object_size_type
& OST_DYNAMIC
) || !POINTER_TYPE_P (TREE_TYPE (parm
)))
1544 expr_object_size (osi
, var
, parm
);
1548 /* Look for access attribute. */
1551 tree fndecl
= cfun
->decl
;
1552 const attr_access
*access
= get_parm_access (rdwr_idx
, parm
, fndecl
);
1553 tree typesize
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (parm
)));
1554 tree sz
= NULL_TREE
;
1556 /* If we have an explicit access attribute with a usable size argument... */
1557 if (access
&& access
->sizarg
!= UINT_MAX
&& !access
->internal_p
1558 /* ... and either PARM is void * or has a type that is complete and has a
1560 && ((typesize
&& poly_int_tree_p (typesize
))
1561 || (!typesize
&& VOID_TYPE_P (TREE_TYPE (TREE_TYPE (parm
))))))
1563 tree fnargs
= DECL_ARGUMENTS (fndecl
);
1564 tree arg
= NULL_TREE
;
1565 unsigned argpos
= 0;
1567 /* ... then walk through the parameters to pick the size parameter and
1568 safely scale it by the type size if needed. */
1569 for (arg
= fnargs
; arg
; arg
= TREE_CHAIN (arg
), ++argpos
)
1570 if (argpos
== access
->sizarg
&& INTEGRAL_TYPE_P (TREE_TYPE (arg
)))
1572 sz
= get_or_create_ssa_default_def (cfun
, arg
);
1573 if (sz
!= NULL_TREE
)
1575 sz
= fold_convert (sizetype
, sz
);
1577 sz
= size_binop (MULT_EXPR
, sz
, typesize
);
1583 sz
= size_unknown (object_size_type
);
1585 object_sizes_set (osi
, SSA_NAME_VERSION (var
), sz
, sz
);
1588 /* Compute an object size expression for VAR, which is the result of a PHI
1592 phi_dynamic_object_size (struct object_size_info
*osi
, tree var
)
1594 int object_size_type
= osi
->object_size_type
;
1595 unsigned int varno
= SSA_NAME_VERSION (var
);
1596 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
1597 unsigned i
, num_args
= gimple_phi_num_args (stmt
);
1598 bool wholesize_needed
= false;
1600 /* The extra space is for the PHI result at the end, which object_sizes_set
1602 tree sizes
= make_tree_vec (num_args
+ 1);
1603 tree wholesizes
= make_tree_vec (num_args
+ 1);
1605 /* Bail out if the size of any of the PHI arguments cannot be
1607 for (i
= 0; i
< num_args
; i
++)
1609 edge e
= gimple_phi_arg_edge (as_a
<gphi
*> (stmt
), i
);
1610 if (e
->flags
& EDGE_COMPLEX
)
1613 tree rhs
= gimple_phi_arg_def (stmt
, i
);
1614 tree size
, wholesize
;
1616 dynamic_object_size (osi
, rhs
, &size
, &wholesize
);
1618 if (size_unknown_p (size
, object_size_type
))
1621 if (size
!= wholesize
)
1622 wholesize_needed
= true;
1624 TREE_VEC_ELT (sizes
, i
) = size
;
1625 TREE_VEC_ELT (wholesizes
, i
) = wholesize
;
1631 ggc_free (wholesizes
);
1632 sizes
= wholesizes
= size_unknown (object_size_type
);
1635 /* Point to the same TREE_VEC so that we can avoid emitting two PHI
1637 else if (!wholesize_needed
)
1639 ggc_free (wholesizes
);
1643 object_sizes_set (osi
, varno
, sizes
, wholesizes
);
1646 /* Compute object sizes for VAR.
1647 For ADDR_EXPR an object size is the number of remaining bytes
1648 to the end of the object (where what is considered an object depends on
1649 OSI->object_size_type).
1650 For allocation GIMPLE_CALL like malloc or calloc object size is the size
1652 For POINTER_PLUS_EXPR where second operand is a constant integer,
1653 object size is object size of the first operand minus the constant.
1654 If the constant is bigger than the number of remaining bytes until the
1655 end of the object, object size is 0, but if it is instead a pointer
1656 subtraction, object size is size_unknown (object_size_type).
1657 To differentiate addition from subtraction, ADDR_EXPR returns
1658 size_unknown (object_size_type) for all objects bigger than half of the
1659 address space, and constants less than half of the address space are
1660 considered addition, while bigger constants subtraction.
1661 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
1662 object size is object size of that argument.
1663 Otherwise, object size is the maximum of object sizes of variables
1664 that it might be set to. */
1667 collect_object_sizes_for (struct object_size_info
*osi
, tree var
)
1669 int object_size_type
= osi
->object_size_type
;
1670 unsigned int varno
= SSA_NAME_VERSION (var
);
1674 if (bitmap_bit_p (computed
[object_size_type
], varno
))
1679 if (bitmap_set_bit (osi
->visited
, varno
))
1681 /* Initialize to 0 for maximum size and M1U for minimum size so that
1682 it gets immediately overridden. */
1683 object_sizes_initialize (osi
, varno
,
1684 size_initval (object_size_type
),
1685 size_initval (object_size_type
));
1689 /* Found a dependency loop. Mark the variable for later
1691 if (object_size_type
& OST_DYNAMIC
)
1692 object_sizes_set_temp (osi
, varno
);
1694 bitmap_set_bit (osi
->reexamine
, varno
);
1695 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1697 fprintf (dump_file
, "Found a dependency loop at ");
1698 print_generic_expr (dump_file
, var
, dump_flags
);
1699 fprintf (dump_file
, "\n");
1705 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1707 fprintf (dump_file
, "Visiting use-def links for ");
1708 print_generic_expr (dump_file
, var
, dump_flags
);
1709 fprintf (dump_file
, "\n");
1712 stmt
= SSA_NAME_DEF_STMT (var
);
1715 switch (gimple_code (stmt
))
1719 tree rhs
= gimple_assign_rhs1 (stmt
);
1720 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
1721 || (gimple_assign_rhs_code (stmt
) == ADDR_EXPR
1722 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == MEM_REF
))
1723 reexamine
= plus_stmt_object_size (osi
, var
, stmt
);
1724 else if (gimple_assign_rhs_code (stmt
) == COND_EXPR
)
1725 reexamine
= cond_expr_object_size (osi
, var
, stmt
);
1726 else if (gimple_assign_single_p (stmt
)
1727 || gimple_assign_unary_nop_p (stmt
))
1729 if (TREE_CODE (rhs
) == SSA_NAME
1730 && POINTER_TYPE_P (TREE_TYPE (rhs
)))
1731 reexamine
= merge_object_sizes (osi
, var
, rhs
);
1733 expr_object_size (osi
, var
, rhs
);
1736 unknown_object_size (osi
, var
);
1742 gcall
*call_stmt
= as_a
<gcall
*> (stmt
);
1743 tree arg
= pass_through_call (call_stmt
);
1746 if (TREE_CODE (arg
) == SSA_NAME
1747 && POINTER_TYPE_P (TREE_TYPE (arg
)))
1748 reexamine
= merge_object_sizes (osi
, var
, arg
);
1750 expr_object_size (osi
, var
, arg
);
1753 call_object_size (osi
, var
, call_stmt
);
1758 /* Pointers defined by __asm__ statements can point anywhere. */
1759 unknown_object_size (osi
, var
);
1763 if (SSA_NAME_VAR (var
)
1764 && TREE_CODE (SSA_NAME_VAR (var
)) == PARM_DECL
)
1765 parm_object_size (osi
, var
);
1767 /* Uninitialized SSA names point nowhere. */
1768 unknown_object_size (osi
, var
);
1775 if (object_size_type
& OST_DYNAMIC
)
1777 phi_dynamic_object_size (osi
, var
);
1781 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
1783 tree rhs
= gimple_phi_arg (stmt
, i
)->def
;
1785 if (object_sizes_unknown_p (object_size_type
, varno
))
1788 if (TREE_CODE (rhs
) == SSA_NAME
)
1789 reexamine
|= merge_object_sizes (osi
, var
, rhs
);
1790 else if (osi
->pass
== 0)
1791 expr_object_size (osi
, var
, rhs
);
1800 if (! reexamine
|| object_sizes_unknown_p (object_size_type
, varno
))
1802 bitmap_set_bit (computed
[object_size_type
], varno
);
1803 if (!(object_size_type
& OST_DYNAMIC
))
1804 bitmap_clear_bit (osi
->reexamine
, varno
);
1808 bitmap_set_bit (osi
->reexamine
, varno
);
1809 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1811 fprintf (dump_file
, "Need to reexamine ");
1812 print_generic_expr (dump_file
, var
, dump_flags
);
1813 fprintf (dump_file
, "\n");
1819 /* Helper function for check_for_plus_in_loops. Called recursively
1823 check_for_plus_in_loops_1 (struct object_size_info
*osi
, tree var
,
1826 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
1827 unsigned int varno
= SSA_NAME_VERSION (var
);
1829 if (osi
->depths
[varno
])
1831 if (osi
->depths
[varno
] != depth
)
1835 /* Found a loop involving pointer addition. */
1836 for (sp
= osi
->tos
; sp
> osi
->stack
; )
1839 bitmap_clear_bit (osi
->reexamine
, *sp
);
1840 bitmap_set_bit (computed
[osi
->object_size_type
], *sp
);
1841 object_sizes_set (osi
, *sp
, size_zero_node
,
1842 object_sizes_get (osi
, *sp
, true));
1849 else if (! bitmap_bit_p (osi
->reexamine
, varno
))
1852 osi
->depths
[varno
] = depth
;
1853 *osi
->tos
++ = varno
;
1855 switch (gimple_code (stmt
))
1860 if ((gimple_assign_single_p (stmt
)
1861 || gimple_assign_unary_nop_p (stmt
))
1862 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
)
1864 tree rhs
= gimple_assign_rhs1 (stmt
);
1866 check_for_plus_in_loops_1 (osi
, rhs
, depth
);
1868 else if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
1870 tree basevar
= gimple_assign_rhs1 (stmt
);
1871 tree cst
= gimple_assign_rhs2 (stmt
);
1873 gcc_assert (TREE_CODE (cst
) == INTEGER_CST
);
1875 check_for_plus_in_loops_1 (osi
, basevar
,
1876 depth
+ !integer_zerop (cst
));
1885 gcall
*call_stmt
= as_a
<gcall
*> (stmt
);
1886 tree arg
= pass_through_call (call_stmt
);
1889 if (TREE_CODE (arg
) == SSA_NAME
)
1890 check_for_plus_in_loops_1 (osi
, arg
, depth
);
1901 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
1903 tree rhs
= gimple_phi_arg (stmt
, i
)->def
;
1905 if (TREE_CODE (rhs
) == SSA_NAME
)
1906 check_for_plus_in_loops_1 (osi
, rhs
, depth
);
1915 osi
->depths
[varno
] = 0;
1920 /* Check if some pointer we are computing object size of is being increased
1921 within a loop. If yes, assume all the SSA variables participating in
1922 that loop have minimum object sizes 0. */
1925 check_for_plus_in_loops (struct object_size_info
*osi
, tree var
)
1927 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
1929 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1930 and looked for a POINTER_PLUS_EXPR in the pass-through
1931 argument, if any. In GIMPLE, however, such an expression
1932 is not a valid call operand. */
1934 if (is_gimple_assign (stmt
)
1935 && gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
1937 tree basevar
= gimple_assign_rhs1 (stmt
);
1938 tree cst
= gimple_assign_rhs2 (stmt
);
1940 gcc_assert (TREE_CODE (cst
) == INTEGER_CST
);
1942 /* Skip non-positive offsets. */
1943 if (integer_zerop (cst
) || compare_tree_int (cst
, offset_limit
) > 0)
1946 osi
->depths
[SSA_NAME_VERSION (basevar
)] = 1;
1947 *osi
->tos
++ = SSA_NAME_VERSION (basevar
);
1948 check_for_plus_in_loops_1 (osi
, var
, 2);
1949 osi
->depths
[SSA_NAME_VERSION (basevar
)] = 0;
1955 /* Initialize data structures for the object size computation. */
1958 init_object_sizes (void)
1960 int object_size_type
;
1965 for (object_size_type
= 0; object_size_type
< OST_END
; object_size_type
++)
1967 object_sizes_grow (object_size_type
);
1968 computed
[object_size_type
] = BITMAP_ALLOC (NULL
);
1971 init_offset_limit ();
1975 /* Destroy data structures after the object size computation. */
1978 fini_object_sizes (void)
1980 int object_size_type
;
1982 for (object_size_type
= 0; object_size_type
< OST_END
; object_size_type
++)
1984 object_sizes_release (object_size_type
);
1985 BITMAP_FREE (computed
[object_size_type
]);
1989 /* Dummy valueize function. */
1992 do_valueize (tree t
)
1997 /* Process a __builtin_object_size or __builtin_dynamic_object_size call in
1998 CALL early for subobjects before any object information is lost due to
1999 optimization. Insert a MIN or MAX expression of the result and
2000 __builtin_object_size at I so that it may be processed in the second pass.
2001 __builtin_dynamic_object_size is treated like __builtin_object_size here
2002 since we're only looking for constant bounds. */
2005 early_object_sizes_execute_one (gimple_stmt_iterator
*i
, gimple
*call
)
2007 tree ost
= gimple_call_arg (call
, 1);
2008 tree lhs
= gimple_call_lhs (call
);
2009 gcc_assert (lhs
!= NULL_TREE
);
2011 if (!tree_fits_uhwi_p (ost
))
2014 unsigned HOST_WIDE_INT object_size_type
= tree_to_uhwi (ost
);
2015 tree ptr
= gimple_call_arg (call
, 0);
2017 if (object_size_type
!= 1 && object_size_type
!= 3)
2020 if (TREE_CODE (ptr
) != ADDR_EXPR
&& TREE_CODE (ptr
) != SSA_NAME
)
2023 tree type
= TREE_TYPE (lhs
);
2025 if (!compute_builtin_object_size (ptr
, object_size_type
, &bytes
)
2026 || !int_fits_type_p (bytes
, type
))
2029 tree tem
= make_ssa_name (type
);
2030 gimple_call_set_lhs (call
, tem
);
2031 enum tree_code code
= object_size_type
& OST_MINIMUM
? MAX_EXPR
: MIN_EXPR
;
2032 tree cst
= fold_convert (type
, bytes
);
2033 gimple
*g
= gimple_build_assign (lhs
, code
, tem
, cst
);
2034 gsi_insert_after (i
, g
, GSI_NEW_STMT
);
2038 /* Attempt to fold one __builtin_dynamic_object_size call in CALL into an
2039 expression and insert it at I. Return true if it succeeds. */
2042 dynamic_object_sizes_execute_one (gimple_stmt_iterator
*i
, gimple
*call
)
2044 gcc_assert (gimple_call_num_args (call
) == 2);
2047 args
[0] = gimple_call_arg (call
, 0);
2048 args
[1] = gimple_call_arg (call
, 1);
2050 location_t loc
= EXPR_LOC_OR_LOC (args
[0], input_location
);
2051 tree result_type
= gimple_call_return_type (as_a
<gcall
*> (call
));
2052 tree result
= fold_builtin_call_array (loc
, result_type
,
2053 gimple_call_fn (call
), 2, args
);
2058 /* fold_builtin_call_array may wrap the result inside a
2060 STRIP_NOPS (result
);
2061 gimplify_and_update_call_from_tree (i
, result
);
2063 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2065 fprintf (dump_file
, "Simplified (dynamic)\n ");
2066 print_gimple_stmt (dump_file
, call
, 0, dump_flags
);
2067 fprintf (dump_file
, " to ");
2068 print_generic_expr (dump_file
, result
);
2069 fprintf (dump_file
, "\n");
2075 object_sizes_execute (function
*fun
, bool early
)
2080 FOR_EACH_BB_FN (bb
, fun
)
2082 gimple_stmt_iterator i
;
2083 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
2086 bool dynamic
= false;
2088 gimple
*call
= gsi_stmt (i
);
2089 if (gimple_call_builtin_p (call
, BUILT_IN_DYNAMIC_OBJECT_SIZE
))
2091 else if (!gimple_call_builtin_p (call
, BUILT_IN_OBJECT_SIZE
))
2094 tree lhs
= gimple_call_lhs (call
);
2098 init_object_sizes ();
2100 /* If early, only attempt to fold
2101 __builtin_object_size (x, 1) and __builtin_object_size (x, 3),
2102 and rather than folding the builtin to the constant if any,
2103 create a MIN_EXPR or MAX_EXPR of the __builtin_object_size
2104 call result and the computed constant. Do the same for
2105 __builtin_dynamic_object_size too. */
2108 early_object_sizes_execute_one (&i
, call
);
2114 if (dynamic_object_sizes_execute_one (&i
, call
))
2118 /* If we could not find a suitable size expression, lower to
2119 __builtin_object_size so that we may at least get a
2120 constant lower or higher estimate. */
2121 tree bosfn
= builtin_decl_implicit (BUILT_IN_OBJECT_SIZE
);
2122 gimple_call_set_fndecl (call
, bosfn
);
2125 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2127 print_generic_expr (dump_file
, gimple_call_arg (call
, 0),
2130 ": Retrying as __builtin_object_size\n");
2135 result
= gimple_fold_stmt_to_constant (call
, do_valueize
);
2138 tree ost
= gimple_call_arg (call
, 1);
2140 if (tree_fits_uhwi_p (ost
))
2142 unsigned HOST_WIDE_INT object_size_type
= tree_to_uhwi (ost
);
2144 if (object_size_type
& OST_MINIMUM
)
2145 result
= build_zero_cst (size_type_node
);
2146 else if (object_size_type
< OST_END
)
2147 result
= fold_convert (size_type_node
,
2148 integer_minus_one_node
);
2155 gcc_assert (TREE_CODE (result
) == INTEGER_CST
);
2157 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2159 fprintf (dump_file
, "Simplified\n ");
2160 print_gimple_stmt (dump_file
, call
, 0, dump_flags
);
2161 fprintf (dump_file
, " to ");
2162 print_generic_expr (dump_file
, result
);
2163 fprintf (dump_file
, "\n");
2166 /* Propagate into all uses and fold those stmts. */
2167 if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
2168 replace_uses_by (lhs
, result
);
2170 replace_call_with_value (&i
, result
);
2174 fini_object_sizes ();
2178 /* Simple pass to optimize all __builtin_object_size () builtins. */
2182 const pass_data pass_data_object_sizes
=
2184 GIMPLE_PASS
, /* type */
2186 OPTGROUP_NONE
, /* optinfo_flags */
2187 TV_NONE
, /* tv_id */
2188 ( PROP_cfg
| PROP_ssa
), /* properties_required */
2189 PROP_objsz
, /* properties_provided */
2190 0, /* properties_destroyed */
2191 0, /* todo_flags_start */
2192 0, /* todo_flags_finish */
2195 class pass_object_sizes
: public gimple_opt_pass
2198 pass_object_sizes (gcc::context
*ctxt
)
2199 : gimple_opt_pass (pass_data_object_sizes
, ctxt
)
2202 /* opt_pass methods: */
2203 opt_pass
* clone () final override
{ return new pass_object_sizes (m_ctxt
); }
2204 unsigned int execute (function
*fun
) final override
2206 return object_sizes_execute (fun
, false);
2208 }; // class pass_object_sizes
2213 make_pass_object_sizes (gcc::context
*ctxt
)
2215 return new pass_object_sizes (ctxt
);
2218 /* Early version of pass to optimize all __builtin_object_size () builtins. */
2222 const pass_data pass_data_early_object_sizes
=
2224 GIMPLE_PASS
, /* type */
2225 "early_objsz", /* name */
2226 OPTGROUP_NONE
, /* optinfo_flags */
2227 TV_NONE
, /* tv_id */
2228 ( PROP_cfg
| PROP_ssa
), /* properties_required */
2229 0, /* properties_provided */
2230 0, /* properties_destroyed */
2231 0, /* todo_flags_start */
2232 0, /* todo_flags_finish */
2235 class pass_early_object_sizes
: public gimple_opt_pass
2238 pass_early_object_sizes (gcc::context
*ctxt
)
2239 : gimple_opt_pass (pass_data_early_object_sizes
, ctxt
)
2242 /* opt_pass methods: */
2243 unsigned int execute (function
*fun
) final override
2245 return object_sizes_execute (fun
, true);
2247 }; // class pass_object_sizes
2252 make_pass_early_object_sizes (gcc::context
*ctxt
)
2254 return new pass_early_object_sizes (ctxt
);