c++: Implement modules ABI for vtable emissions
[official-gcc.git] / gcc / tree-object-size.cc
blob018fbc30cbb660d48859d1c55f57fd13ede6f6a1
1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004-2024 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "fold-const.h"
31 #include "tree-object-size.h"
32 #include "gimple-iterator.h"
33 #include "gimple-fold.h"
34 #include "tree-cfg.h"
35 #include "tree-dfa.h"
36 #include "stringpool.h"
37 #include "attribs.h"
38 #include "builtins.h"
39 #include "gimplify-me.h"
41 struct object_size_info
43 int object_size_type;
44 unsigned char pass;
45 bool changed;
46 bitmap visited, reexamine;
47 unsigned int *depths;
48 unsigned int *stack, *tos;
51 struct GTY(()) object_size
53 /* Estimate of bytes till the end of the object. */
54 tree size;
55 /* Estimate of the size of the whole object. */
56 tree wholesize;
59 static tree compute_object_offset (tree, const_tree);
60 static bool addr_object_size (struct object_size_info *,
61 const_tree, int, tree *, tree *t = NULL);
62 static tree alloc_object_size (const gcall *, int);
63 static tree pass_through_call (const gcall *);
64 static void collect_object_sizes_for (struct object_size_info *, tree);
65 static void expr_object_size (struct object_size_info *, tree, tree);
66 static bool merge_object_sizes (struct object_size_info *, tree, tree);
67 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple *);
68 static bool cond_expr_object_size (struct object_size_info *, tree, gimple *);
69 static void init_offset_limit (void);
70 static void check_for_plus_in_loops (struct object_size_info *, tree);
71 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
72 unsigned int);
74 /* object_sizes[0] is upper bound for the object size and number of bytes till
75 the end of the object.
76 object_sizes[1] is upper bound for the object size and number of bytes till
77 the end of the subobject (innermost array or field with address taken).
78 object_sizes[2] is lower bound for the object size and number of bytes till
79 the end of the object and object_sizes[3] lower bound for subobject.
81 For static object sizes, the object size and the bytes till the end of the
82 object are both INTEGER_CST. In the dynamic case, they are finally either a
83 gimple variable or an INTEGER_CST. */
84 static vec<object_size> object_sizes[OST_END];
86 /* Bitmaps what object sizes have been computed already. */
87 static bitmap computed[OST_END];
89 /* Maximum value of offset we consider to be addition. */
90 static unsigned HOST_WIDE_INT offset_limit;
92 /* Tell the generic SSA updater what kind of update is needed after the pass
93 executes. */
94 static unsigned todo;
96 /* Return true if VAL represents an initial size for OBJECT_SIZE_TYPE. */
98 static inline bool
99 size_initval_p (tree val, int object_size_type)
101 return ((object_size_type & OST_MINIMUM)
102 ? integer_all_onesp (val) : integer_zerop (val));
105 /* Return true if VAL represents an unknown size for OBJECT_SIZE_TYPE. */
107 static inline bool
108 size_unknown_p (tree val, int object_size_type)
110 return ((object_size_type & OST_MINIMUM)
111 ? integer_zerop (val) : integer_all_onesp (val));
114 /* Return true if VAL represents a valid size for OBJECT_SIZE_TYPE. */
116 static inline bool
117 size_valid_p (tree val, int object_size_type)
119 return ((object_size_type & OST_DYNAMIC) || TREE_CODE (val) == INTEGER_CST);
122 /* Return true if VAL is usable as an object size in the object_sizes
123 vectors. */
125 static inline bool
126 size_usable_p (tree val)
128 return TREE_CODE (val) == SSA_NAME || TREE_CODE (val) == INTEGER_CST;
131 /* Return a tree with initial value for OBJECT_SIZE_TYPE. */
133 static inline tree
134 size_initval (int object_size_type)
136 return ((object_size_type & OST_MINIMUM)
137 ? TYPE_MAX_VALUE (sizetype) : size_zero_node);
140 /* Return a tree with unknown value for OBJECT_SIZE_TYPE. */
142 static inline tree
143 size_unknown (int object_size_type)
145 return ((object_size_type & OST_MINIMUM)
146 ? size_zero_node : TYPE_MAX_VALUE (sizetype));
149 /* Grow object_sizes[OBJECT_SIZE_TYPE] to num_ssa_names. */
151 static inline void
152 object_sizes_grow (int object_size_type)
154 if (num_ssa_names > object_sizes[object_size_type].length ())
155 object_sizes[object_size_type].safe_grow (num_ssa_names, true);
158 /* Release object_sizes[OBJECT_SIZE_TYPE]. */
160 static inline void
161 object_sizes_release (int object_size_type)
163 object_sizes[object_size_type].release ();
166 /* Return true if object_sizes[OBJECT_SIZE_TYPE][VARNO] is unknown. */
168 static inline bool
169 object_sizes_unknown_p (int object_size_type, unsigned varno)
171 return size_unknown_p (object_sizes[object_size_type][varno].size,
172 object_size_type);
175 /* Return the raw size expression for VARNO corresponding to OSI. This returns
176 the TREE_VEC as is and should only be used during gimplification. */
178 static inline object_size
179 object_sizes_get_raw (struct object_size_info *osi, unsigned varno)
181 gcc_assert (osi->pass != 0);
182 return object_sizes[osi->object_size_type][varno];
185 /* Return a size tree for VARNO corresponding to OSI. If WHOLE is true, return
186 the whole object size. Use this for building size expressions based on size
187 of VARNO. */
189 static inline tree
190 object_sizes_get (struct object_size_info *osi, unsigned varno,
191 bool whole = false)
193 tree ret;
194 int object_size_type = osi->object_size_type;
196 if (whole)
197 ret = object_sizes[object_size_type][varno].wholesize;
198 else
199 ret = object_sizes[object_size_type][varno].size;
201 if (object_size_type & OST_DYNAMIC)
203 if (TREE_CODE (ret) == MODIFY_EXPR)
204 return TREE_OPERAND (ret, 0);
205 else if (TREE_CODE (ret) == TREE_VEC)
206 return TREE_VEC_ELT (ret, TREE_VEC_LENGTH (ret) - 1);
207 else
208 gcc_checking_assert (size_usable_p (ret));
211 return ret;
214 /* Set size for VARNO corresponding to OSI to VAL. */
216 static inline void
217 object_sizes_initialize (struct object_size_info *osi, unsigned varno,
218 tree val, tree wholeval)
220 int object_size_type = osi->object_size_type;
222 object_sizes[object_size_type][varno].size = val;
223 object_sizes[object_size_type][varno].wholesize = wholeval;
226 /* Return a MODIFY_EXPR for cases where SSA and EXPR have the same type. The
227 TREE_VEC is returned only in case of PHI nodes. */
229 static tree
230 bundle_sizes (tree name, tree expr)
232 gcc_checking_assert (TREE_TYPE (name) == sizetype);
234 if (TREE_CODE (expr) == TREE_VEC)
236 TREE_VEC_ELT (expr, TREE_VEC_LENGTH (expr) - 1) = name;
237 return expr;
240 gcc_checking_assert (types_compatible_p (TREE_TYPE (expr), sizetype));
241 return build2 (MODIFY_EXPR, sizetype, name, expr);
244 /* Set size for VARNO corresponding to OSI to VAL if it is the new minimum or
245 maximum. For static sizes, each element of TREE_VEC is always INTEGER_CST
246 throughout the computation. For dynamic sizes, each element may either be a
247 gimple variable, a MODIFY_EXPR or a TREE_VEC. The MODIFY_EXPR is for
248 expressions that need to be gimplified. TREE_VECs are special, they're
249 emitted only for GIMPLE_PHI and the PHI result variable is the last element
250 of the vector. */
252 static bool
253 object_sizes_set (struct object_size_info *osi, unsigned varno, tree val,
254 tree wholeval)
256 int object_size_type = osi->object_size_type;
257 object_size osize = object_sizes[object_size_type][varno];
258 bool changed = true;
260 tree oldval = osize.size;
261 tree old_wholeval = osize.wholesize;
263 if (object_size_type & OST_DYNAMIC)
265 if (bitmap_bit_p (osi->reexamine, varno))
267 val = bundle_sizes (oldval, val);
268 wholeval = bundle_sizes (old_wholeval, wholeval);
270 else
272 gcc_checking_assert (size_initval_p (oldval, object_size_type));
273 gcc_checking_assert (size_initval_p (old_wholeval,
274 object_size_type));
275 /* For dynamic object sizes, all object sizes that are not gimple
276 variables will need to be gimplified. */
277 if (wholeval != val && !size_usable_p (wholeval))
279 bitmap_set_bit (osi->reexamine, varno);
280 wholeval = bundle_sizes (make_ssa_name (sizetype), wholeval);
282 if (!size_usable_p (val))
284 bitmap_set_bit (osi->reexamine, varno);
285 tree newval = bundle_sizes (make_ssa_name (sizetype), val);
286 if (val == wholeval)
287 wholeval = newval;
288 val = newval;
290 /* If the new value is a temporary variable, mark it for
291 reexamination. */
292 else if (TREE_CODE (val) == SSA_NAME && !SSA_NAME_DEF_STMT (val))
293 bitmap_set_bit (osi->reexamine, varno);
296 else
298 enum tree_code code = (object_size_type & OST_MINIMUM
299 ? MIN_EXPR : MAX_EXPR);
301 val = size_binop (code, val, oldval);
302 wholeval = size_binop (code, wholeval, old_wholeval);
303 changed = (tree_int_cst_compare (val, oldval) != 0
304 || tree_int_cst_compare (old_wholeval, wholeval) != 0);
307 object_sizes[object_size_type][varno].size = val;
308 object_sizes[object_size_type][varno].wholesize = wholeval;
310 return changed;
313 /* Set temporary SSA names for object size and whole size to resolve dependency
314 loops in dynamic size computation. */
316 static inline void
317 object_sizes_set_temp (struct object_size_info *osi, unsigned varno)
319 tree val = object_sizes_get (osi, varno);
321 if (size_initval_p (val, osi->object_size_type))
322 object_sizes_set (osi, varno,
323 make_ssa_name (sizetype),
324 make_ssa_name (sizetype));
327 /* Initialize OFFSET_LIMIT variable. */
328 static void
329 init_offset_limit (void)
331 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
332 offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
333 else
334 offset_limit = -1;
335 offset_limit /= 2;
338 /* Bytes at end of the object with SZ from offset OFFSET. If WHOLESIZE is not
339 NULL_TREE, use it to get the net offset of the pointer, which should always
340 be positive and hence, be within OFFSET_LIMIT for valid offsets. */
342 static tree
343 size_for_offset (tree sz, tree offset, tree wholesize = NULL_TREE)
345 gcc_checking_assert (types_compatible_p (TREE_TYPE (sz), sizetype));
347 /* For negative offsets, if we have a distinct WHOLESIZE, use it to get a net
348 offset from the whole object. */
349 if (wholesize && wholesize != sz
350 && (TREE_CODE (sz) != INTEGER_CST
351 || TREE_CODE (wholesize) != INTEGER_CST
352 || tree_int_cst_compare (sz, wholesize)))
354 gcc_checking_assert (types_compatible_p (TREE_TYPE (wholesize),
355 sizetype));
357 /* Restructure SZ - OFFSET as
358 WHOLESIZE - (WHOLESIZE + OFFSET - SZ) so that the offset part, i.e.
359 WHOLESIZE + OFFSET - SZ is only allowed to be positive. */
360 tree tmp = size_binop (MAX_EXPR, wholesize, sz);
361 offset = fold_build2 (PLUS_EXPR, sizetype, tmp, offset);
362 offset = fold_build2 (MINUS_EXPR, sizetype, offset, sz);
363 sz = tmp;
366 /* Safe to convert now, since a valid net offset should be non-negative. */
367 if (!useless_type_conversion_p (sizetype, TREE_TYPE (offset)))
368 offset = fold_convert (sizetype, offset);
370 if (TREE_CODE (offset) == INTEGER_CST)
372 if (integer_zerop (offset))
373 return sz;
375 /* Negative or too large offset even after adjustment, cannot be within
376 bounds of an object. */
377 if (compare_tree_int (offset, offset_limit) > 0)
378 return size_zero_node;
381 return size_binop (MINUS_EXPR, size_binop (MAX_EXPR, sz, offset), offset);
384 /* Compute offset of EXPR within VAR. Return error_mark_node
385 if unknown. */
387 static tree
388 compute_object_offset (tree expr, const_tree var)
390 enum tree_code code = PLUS_EXPR;
391 tree base, off, t;
393 if (expr == var)
394 return size_zero_node;
396 switch (TREE_CODE (expr))
398 case COMPONENT_REF:
399 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
400 if (base == error_mark_node)
401 return base;
403 t = TREE_OPERAND (expr, 1);
404 off = size_binop (PLUS_EXPR,
405 component_ref_field_offset (expr),
406 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
407 / BITS_PER_UNIT));
408 break;
410 case REALPART_EXPR:
411 CASE_CONVERT:
412 case VIEW_CONVERT_EXPR:
413 case NON_LVALUE_EXPR:
414 return compute_object_offset (TREE_OPERAND (expr, 0), var);
416 case IMAGPART_EXPR:
417 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
418 if (base == error_mark_node)
419 return base;
421 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
422 break;
424 case ARRAY_REF:
425 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
426 if (base == error_mark_node)
427 return base;
429 t = TREE_OPERAND (expr, 1);
430 tree low_bound, unit_size;
431 low_bound = array_ref_low_bound (CONST_CAST_TREE (expr));
432 unit_size = array_ref_element_size (CONST_CAST_TREE (expr));
433 if (! integer_zerop (low_bound))
434 t = fold_build2 (MINUS_EXPR, TREE_TYPE (t), t, low_bound);
435 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
437 code = MINUS_EXPR;
438 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
440 t = fold_convert (sizetype, t);
441 off = size_binop (MULT_EXPR, unit_size, t);
442 break;
444 case MEM_REF:
445 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
446 return wide_int_to_tree (sizetype, mem_ref_offset (expr));
448 default:
449 return error_mark_node;
452 return size_binop (code, base, off);
455 /* Returns the size of the object designated by DECL considering its
456 initializer if it either has one or if it would not affect its size,
457 otherwise the size of the object without the initializer when MIN
458 is true, else null. An object's initializer affects the object's
459 size if it's a struct type with a flexible array member. */
461 tree
462 decl_init_size (tree decl, bool min)
464 tree size = DECL_SIZE_UNIT (decl);
465 tree type = TREE_TYPE (decl);
466 if (TREE_CODE (type) != RECORD_TYPE)
467 return size;
469 tree last = last_field (type);
470 if (!last)
471 return size;
473 tree last_type = TREE_TYPE (last);
474 if (TREE_CODE (last_type) != ARRAY_TYPE
475 || TYPE_SIZE (last_type))
476 return size;
478 /* Use TYPE_SIZE_UNIT; DECL_SIZE_UNIT sometimes reflects the size
479 of the initializer and sometimes doesn't. */
480 size = TYPE_SIZE_UNIT (type);
481 tree ref = build3 (COMPONENT_REF, type, decl, last, NULL_TREE);
482 tree compsize = component_ref_size (ref);
483 if (!compsize)
484 return min ? size : NULL_TREE;
486 /* The size includes tail padding and initializer elements. */
487 tree pos = byte_position (last);
488 size = fold_build2 (PLUS_EXPR, TREE_TYPE (size), pos, compsize);
489 return size;
492 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
493 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
494 If unknown, return size_unknown (object_size_type). */
496 static bool
497 addr_object_size (struct object_size_info *osi, const_tree ptr,
498 int object_size_type, tree *psize, tree *pwholesize)
500 tree pt_var, pt_var_size = NULL_TREE, pt_var_wholesize = NULL_TREE;
501 tree var_size, bytes, wholebytes;
503 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
505 /* Set to unknown and overwrite just before returning if the size
506 could be determined. */
507 *psize = size_unknown (object_size_type);
508 if (pwholesize)
509 *pwholesize = size_unknown (object_size_type);
511 pt_var = TREE_OPERAND (ptr, 0);
512 while (handled_component_p (pt_var))
513 pt_var = TREE_OPERAND (pt_var, 0);
515 if (!pt_var)
516 return false;
518 if (TREE_CODE (pt_var) == MEM_REF)
520 tree sz, wholesize;
522 if (!osi || (object_size_type & OST_SUBOBJECT) != 0
523 || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
525 compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
526 object_size_type & ~OST_SUBOBJECT, &sz);
527 wholesize = sz;
529 else
531 tree var = TREE_OPERAND (pt_var, 0);
532 if (osi->pass == 0)
533 collect_object_sizes_for (osi, var);
534 if (bitmap_bit_p (computed[object_size_type],
535 SSA_NAME_VERSION (var)))
537 sz = object_sizes_get (osi, SSA_NAME_VERSION (var));
538 wholesize = object_sizes_get (osi, SSA_NAME_VERSION (var), true);
540 else
541 sz = wholesize = size_unknown (object_size_type);
543 if (!size_unknown_p (sz, object_size_type))
544 sz = size_for_offset (sz, TREE_OPERAND (pt_var, 1), wholesize);
546 if (!size_unknown_p (sz, object_size_type)
547 && (TREE_CODE (sz) != INTEGER_CST
548 || compare_tree_int (sz, offset_limit) < 0))
550 pt_var_size = sz;
551 pt_var_wholesize = wholesize;
554 else if (DECL_P (pt_var))
556 pt_var_size = pt_var_wholesize
557 = decl_init_size (pt_var, object_size_type & OST_MINIMUM);
558 if (!pt_var_size)
559 return false;
561 else if (TREE_CODE (pt_var) == STRING_CST)
562 pt_var_size = pt_var_wholesize = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
563 else
564 return false;
566 if (pt_var_size)
568 /* Validate the size determined above if it is a constant. */
569 if (TREE_CODE (pt_var_size) == INTEGER_CST
570 && compare_tree_int (pt_var_size, offset_limit) >= 0)
571 return false;
574 if (pt_var != TREE_OPERAND (ptr, 0))
576 tree var;
578 if (object_size_type & OST_SUBOBJECT)
580 var = TREE_OPERAND (ptr, 0);
582 while (var != pt_var
583 && TREE_CODE (var) != BIT_FIELD_REF
584 && TREE_CODE (var) != COMPONENT_REF
585 && TREE_CODE (var) != ARRAY_REF
586 && TREE_CODE (var) != ARRAY_RANGE_REF
587 && TREE_CODE (var) != REALPART_EXPR
588 && TREE_CODE (var) != IMAGPART_EXPR)
589 var = TREE_OPERAND (var, 0);
590 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
591 var = TREE_OPERAND (var, 0);
592 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
593 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var)))
594 || (pt_var_size && TREE_CODE (pt_var_size) == INTEGER_CST
595 && tree_int_cst_lt (pt_var_size,
596 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
597 var = pt_var;
598 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
600 tree v = var;
601 /* For &X->fld, compute object size if fld isn't a flexible array
602 member. */
603 bool is_flexible_array_mem_ref = false;
604 while (v && v != pt_var)
605 switch (TREE_CODE (v))
607 case ARRAY_REF:
608 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0))))
610 tree domain
611 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
612 if (domain && TYPE_MAX_VALUE (domain))
614 v = NULL_TREE;
615 break;
618 v = TREE_OPERAND (v, 0);
619 break;
620 case REALPART_EXPR:
621 case IMAGPART_EXPR:
622 v = NULL_TREE;
623 break;
624 case COMPONENT_REF:
625 /* When the ref is not to an aggregate type, i.e, an array,
626 a record or a union, it will not have flexible size,
627 compute the object size directly. */
628 if (!AGGREGATE_TYPE_P (TREE_TYPE (v)))
630 v = NULL_TREE;
631 break;
633 /* if the ref is to a record or union type, but the type
634 does not include a flexible array recursively, compute
635 the object size directly. */
636 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (v)))
638 if (!TYPE_INCLUDES_FLEXARRAY (TREE_TYPE (v)))
640 v = NULL_TREE;
641 break;
643 else
645 v = TREE_OPERAND (v, 0);
646 break;
649 /* Now the ref is to an array type. */
650 gcc_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
651 is_flexible_array_mem_ref = array_ref_flexible_size_p (v);
652 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
653 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
654 != UNION_TYPE
655 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
656 != QUAL_UNION_TYPE)
657 break;
658 else
659 v = TREE_OPERAND (v, 0);
660 if (TREE_CODE (v) == COMPONENT_REF
661 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
662 == RECORD_TYPE)
664 /* compute object size only if v is not a
665 flexible array member. */
666 if (!is_flexible_array_mem_ref)
668 v = NULL_TREE;
669 break;
671 v = TREE_OPERAND (v, 0);
673 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
674 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
675 != UNION_TYPE
676 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
677 != QUAL_UNION_TYPE)
678 break;
679 else
680 v = TREE_OPERAND (v, 0);
681 if (v != pt_var)
682 v = NULL_TREE;
683 else
684 v = pt_var;
685 break;
686 default:
687 v = pt_var;
688 break;
690 if (v == pt_var)
691 var = pt_var;
694 else
695 var = pt_var;
697 if (var != pt_var)
699 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
700 if (!TREE_CONSTANT (var_size))
701 var_size = get_or_create_ssa_default_def (cfun, var_size);
702 if (!var_size)
703 return false;
705 else if (!pt_var_size)
706 return false;
707 else
708 var_size = pt_var_size;
709 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
710 if (bytes != error_mark_node)
712 bytes = size_for_offset (var_size, bytes);
713 if (var != pt_var && pt_var_size && TREE_CODE (pt_var) == MEM_REF)
715 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0),
716 pt_var);
717 if (bytes2 != error_mark_node)
719 bytes2 = size_for_offset (pt_var_size, bytes2);
720 bytes = size_binop (MIN_EXPR, bytes, bytes2);
724 else
725 bytes = size_unknown (object_size_type);
727 wholebytes
728 = object_size_type & OST_SUBOBJECT ? var_size : pt_var_wholesize;
730 else if (!pt_var_size)
731 return false;
732 else
734 bytes = pt_var_size;
735 wholebytes = pt_var_wholesize;
738 if (!size_unknown_p (bytes, object_size_type)
739 && size_valid_p (bytes, object_size_type)
740 && !size_unknown_p (bytes, object_size_type)
741 && size_valid_p (wholebytes, object_size_type))
743 *psize = bytes;
744 if (pwholesize)
745 *pwholesize = wholebytes;
746 return true;
749 return false;
753 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
754 Handles calls to functions declared with attribute alloc_size.
755 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
756 If unknown, return size_unknown (object_size_type). */
758 static tree
759 alloc_object_size (const gcall *call, int object_size_type)
761 gcc_assert (is_gimple_call (call));
763 tree calltype;
764 tree callfn = gimple_call_fndecl (call);
765 if (callfn)
766 calltype = TREE_TYPE (callfn);
767 else
768 calltype = gimple_call_fntype (call);
770 if (!calltype)
771 return size_unknown (object_size_type);
773 /* Set to positions of alloc_size arguments. */
774 int arg1 = -1, arg2 = -1;
775 tree alloc_size = lookup_attribute ("alloc_size",
776 TYPE_ATTRIBUTES (calltype));
777 if (alloc_size && TREE_VALUE (alloc_size))
779 tree p = TREE_VALUE (alloc_size);
781 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
782 if (TREE_CHAIN (p))
783 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
785 else if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
786 && callfn
787 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (callfn)))
788 arg1 = 0;
790 /* Non-const arguments are OK here, let the caller handle constness. */
791 if (arg1 < 0
792 || (unsigned) arg1 >= gimple_call_num_args (call)
793 || (arg2 >= 0 && (unsigned) arg2 >= gimple_call_num_args (call)))
794 return size_unknown (object_size_type);
796 tree targ1 = gimple_call_arg (call, arg1);
797 if (!INTEGRAL_TYPE_P (TREE_TYPE (targ1))
798 || TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (sizetype))
799 return size_unknown (object_size_type);
800 targ1 = fold_convert (sizetype, targ1);
801 tree bytes = NULL_TREE;
802 if (arg2 >= 0)
804 tree targ2 = gimple_call_arg (call, arg2);
805 if (!INTEGRAL_TYPE_P (TREE_TYPE (targ2))
806 || TYPE_PRECISION (TREE_TYPE (targ2)) > TYPE_PRECISION (sizetype))
807 return size_unknown (object_size_type);
808 targ2 = fold_convert (sizetype, targ2);
809 bytes = size_binop (MULT_EXPR, targ1, targ2);
811 else
812 bytes = targ1;
814 return bytes ? bytes : size_unknown (object_size_type);
817 /* Compute __builtin_object_size for CALL, which is a call to either
818 BUILT_IN_STRDUP or BUILT_IN_STRNDUP; IS_STRNDUP indicates which it is.
819 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
820 If unknown, return size_unknown (object_size_type). */
822 static tree
823 strdup_object_size (const gcall *call, int object_size_type, bool is_strndup)
825 tree src = gimple_call_arg (call, 0);
826 tree sz = size_unknown (object_size_type);
827 tree n = NULL_TREE;
829 if (is_strndup)
830 n = fold_build2 (PLUS_EXPR, sizetype, size_one_node,
831 gimple_call_arg (call, 1));
832 /* For strdup, simply emit strlen (SRC) + 1 and let the optimizer fold it the
833 way it likes. */
834 else
836 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
837 if (strlen_fn)
839 sz = fold_build2 (PLUS_EXPR, sizetype, size_one_node,
840 build_call_expr (strlen_fn, 1, src));
841 todo = TODO_update_ssa_only_virtuals;
845 /* In all other cases, return the size of SRC since the object size cannot
846 exceed that. We cannot do this for OST_MINIMUM unless SRC points into a
847 string constant since otherwise the object size could go all the way down
848 to zero. */
849 if (!size_valid_p (sz, object_size_type)
850 || size_unknown_p (sz, object_size_type))
852 tree wholesrc = NULL_TREE;
853 if (TREE_CODE (src) == ADDR_EXPR)
854 wholesrc = get_base_address (TREE_OPERAND (src, 0));
856 /* If the source points within a string constant, we try to get its
857 length. */
858 if (wholesrc && TREE_CODE (wholesrc) == STRING_CST)
860 tree len = c_strlen (src, 0);
861 if (len)
862 sz = fold_build2 (PLUS_EXPR, sizetype, size_one_node, len);
865 /* For maximum estimate, our next best guess is the object size of the
866 source. */
867 if (size_unknown_p (sz, object_size_type)
868 && !(object_size_type & OST_MINIMUM))
869 compute_builtin_object_size (src, object_size_type, &sz);
872 /* String duplication allocates at least one byte, so we should never fail
873 for OST_MINIMUM. */
874 if ((!size_valid_p (sz, object_size_type)
875 || size_unknown_p (sz, object_size_type))
876 && (object_size_type & OST_MINIMUM))
877 sz = size_one_node;
879 /* Factor in the N. */
880 return n ? fold_build2 (MIN_EXPR, sizetype, n, sz) : sz;
883 /* If object size is propagated from one of function's arguments directly
884 to its return value, return that argument for GIMPLE_CALL statement CALL.
885 Otherwise return NULL. */
887 static tree
888 pass_through_call (const gcall *call)
890 unsigned rf = gimple_call_return_flags (call);
891 if (rf & ERF_RETURNS_ARG)
893 unsigned argnum = rf & ERF_RETURN_ARG_MASK;
894 if (argnum < gimple_call_num_args (call))
895 return gimple_call_arg (call, argnum);
898 /* __builtin_assume_aligned is intentionally not marked RET1. */
899 if (gimple_call_builtin_p (call, BUILT_IN_ASSUME_ALIGNED))
900 return gimple_call_arg (call, 0);
902 return NULL_TREE;
905 /* Emit PHI nodes for size expressions fo. */
907 static void
908 emit_phi_nodes (gimple *stmt, tree size, tree wholesize)
910 tree phires;
911 gphi *wholephi = NULL;
913 if (wholesize != size)
915 phires = TREE_VEC_ELT (wholesize, TREE_VEC_LENGTH (wholesize) - 1);
916 wholephi = create_phi_node (phires, gimple_bb (stmt));
919 phires = TREE_VEC_ELT (size, TREE_VEC_LENGTH (size) - 1);
920 gphi *phi = create_phi_node (phires, gimple_bb (stmt));
921 gphi *obj_phi = as_a <gphi *> (stmt);
923 gcc_checking_assert (TREE_CODE (wholesize) == TREE_VEC);
924 gcc_checking_assert (TREE_CODE (size) == TREE_VEC);
926 for (unsigned i = 0; i < gimple_phi_num_args (stmt); i++)
928 gimple_seq seq = NULL;
929 tree wsz = TREE_VEC_ELT (wholesize, i);
930 tree sz = TREE_VEC_ELT (size, i);
932 /* If we built an expression, we will need to build statements
933 and insert them on the edge right away. */
934 if (TREE_CODE (wsz) != SSA_NAME)
935 wsz = force_gimple_operand (wsz, &seq, true, NULL);
936 if (TREE_CODE (sz) != SSA_NAME)
938 gimple_seq s;
939 sz = force_gimple_operand (sz, &s, true, NULL);
940 gimple_seq_add_seq (&seq, s);
943 if (seq)
944 gsi_insert_seq_on_edge (gimple_phi_arg_edge (obj_phi, i), seq);
946 if (wholephi)
947 add_phi_arg (wholephi, wsz,
948 gimple_phi_arg_edge (obj_phi, i),
949 gimple_phi_arg_location (obj_phi, i));
951 add_phi_arg (phi, sz,
952 gimple_phi_arg_edge (obj_phi, i),
953 gimple_phi_arg_location (obj_phi, i));
957 /* Descend through EXPR and return size_unknown if it uses any SSA variable
958 object_size_set or object_size_set_temp generated, which turned out to be
959 size_unknown, as noted in UNKNOWNS. */
961 static tree
962 propagate_unknowns (object_size_info *osi, tree expr, bitmap unknowns)
964 int object_size_type = osi->object_size_type;
966 switch (TREE_CODE (expr))
968 case SSA_NAME:
969 if (bitmap_bit_p (unknowns, SSA_NAME_VERSION (expr)))
970 return size_unknown (object_size_type);
971 return expr;
973 case MIN_EXPR:
974 case MAX_EXPR:
976 tree res = propagate_unknowns (osi, TREE_OPERAND (expr, 0),
977 unknowns);
978 if (size_unknown_p (res, object_size_type))
979 return res;
981 res = propagate_unknowns (osi, TREE_OPERAND (expr, 1), unknowns);
982 if (size_unknown_p (res, object_size_type))
983 return res;
985 return expr;
987 case MODIFY_EXPR:
989 tree res = propagate_unknowns (osi, TREE_OPERAND (expr, 1),
990 unknowns);
991 if (size_unknown_p (res, object_size_type))
992 return res;
993 return expr;
995 case TREE_VEC:
996 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
998 tree res = propagate_unknowns (osi, TREE_VEC_ELT (expr, i),
999 unknowns);
1000 if (size_unknown_p (res, object_size_type))
1001 return res;
1003 return expr;
1004 case PLUS_EXPR:
1005 case MINUS_EXPR:
1007 tree res = propagate_unknowns (osi, TREE_OPERAND (expr, 0),
1008 unknowns);
1009 if (size_unknown_p (res, object_size_type))
1010 return res;
1012 return expr;
1014 default:
1015 return expr;
1019 /* Walk through size expressions that need reexamination and generate
1020 statements for them. */
1022 static void
1023 gimplify_size_expressions (object_size_info *osi)
1025 int object_size_type = osi->object_size_type;
1026 bitmap_iterator bi;
1027 unsigned int i;
1028 bool changed;
1030 /* Step 1: Propagate unknowns into expressions. */
1031 bitmap reexamine = BITMAP_ALLOC (NULL);
1032 bitmap_copy (reexamine, osi->reexamine);
1033 bitmap unknowns = BITMAP_ALLOC (NULL);
1036 changed = false;
1037 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
1039 object_size cur = object_sizes_get_raw (osi, i);
1041 if (size_unknown_p (propagate_unknowns (osi, cur.size, unknowns),
1042 object_size_type)
1043 || size_unknown_p (propagate_unknowns (osi, cur.wholesize,
1044 unknowns),
1045 object_size_type))
1047 /* Record the SSAs we're overwriting to propagate the
1048 unknwons. */
1049 tree oldval = object_sizes_get (osi, i);
1050 tree old_wholeval = object_sizes_get (osi, i, true);
1052 bitmap_set_bit (unknowns, SSA_NAME_VERSION (oldval));
1053 bitmap_set_bit (unknowns, SSA_NAME_VERSION (old_wholeval));
1054 object_sizes_initialize (osi, i,
1055 size_unknown (object_size_type),
1056 size_unknown (object_size_type));
1057 bitmap_clear_bit (osi->reexamine, i);
1058 changed = true;
1061 bitmap_copy (reexamine, osi->reexamine);
1063 while (changed);
1065 /* Release all unknowns. */
1066 EXECUTE_IF_SET_IN_BITMAP (unknowns, 0, i, bi)
1067 release_ssa_name (ssa_name (i));
1069 BITMAP_FREE (unknowns);
1070 BITMAP_FREE (reexamine);
1072 /* Expand all size expressions to put their definitions close to the objects
1073 for which size is being computed. */
1074 EXECUTE_IF_SET_IN_BITMAP (osi->reexamine, 0, i, bi)
1076 gimple_seq seq = NULL;
1077 object_size osize = object_sizes_get_raw (osi, i);
1079 gimple *stmt = SSA_NAME_DEF_STMT (ssa_name (i));
1080 enum gimple_code code = gimple_code (stmt);
1082 /* PHI nodes need special attention. */
1083 if (code == GIMPLE_PHI)
1084 emit_phi_nodes (stmt, osize.size, osize.wholesize);
1085 else
1087 tree size_expr = NULL_TREE;
1089 /* Bundle wholesize in with the size to gimplify if needed. */
1090 if (osize.wholesize != osize.size
1091 && !size_usable_p (osize.wholesize))
1092 size_expr = size_binop (COMPOUND_EXPR,
1093 osize.wholesize,
1094 osize.size);
1095 else if (!size_usable_p (osize.size))
1096 size_expr = osize.size;
1098 if (size_expr)
1100 gimple_stmt_iterator gsi;
1101 if (code == GIMPLE_NOP)
1102 gsi = gsi_start_bb (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1103 else
1104 gsi = gsi_for_stmt (stmt);
1106 force_gimple_operand (size_expr, &seq, true, NULL);
1107 gsi_insert_seq_before (&gsi, seq, GSI_CONTINUE_LINKING);
1111 /* We're done, so replace the MODIFY_EXPRs with the SSA names. */
1112 object_sizes_initialize (osi, i,
1113 object_sizes_get (osi, i),
1114 object_sizes_get (osi, i, true));
1118 /* Compute __builtin_object_size value for PTR and set *PSIZE to
1119 the resulting value. If the declared object is known and PDECL
1120 is nonnull, sets *PDECL to the object's DECL. OBJECT_SIZE_TYPE
1121 is the second argument to __builtin_object_size.
1122 Returns true on success and false when the object size could not
1123 be determined. */
1125 bool
1126 compute_builtin_object_size (tree ptr, int object_size_type,
1127 tree *psize)
1129 gcc_assert (object_size_type >= 0 && object_size_type < OST_END);
1131 /* Set to unknown and overwrite just before returning if the size
1132 could be determined. */
1133 *psize = size_unknown (object_size_type);
1135 if (! offset_limit)
1136 init_offset_limit ();
1138 if (TREE_CODE (ptr) == ADDR_EXPR)
1139 return addr_object_size (NULL, ptr, object_size_type, psize);
1141 if (TREE_CODE (ptr) != SSA_NAME
1142 || !POINTER_TYPE_P (TREE_TYPE (ptr)))
1143 return false;
1145 if (computed[object_size_type] == NULL)
1147 if (optimize || object_size_type & OST_SUBOBJECT)
1148 return false;
1150 /* When not optimizing, rather than failing, make a small effort
1151 to determine the object size without the full benefit of
1152 the (costly) computation below. */
1153 gimple *def = SSA_NAME_DEF_STMT (ptr);
1154 if (gimple_code (def) == GIMPLE_ASSIGN)
1156 tree_code code = gimple_assign_rhs_code (def);
1157 if (code == POINTER_PLUS_EXPR)
1159 tree offset = gimple_assign_rhs2 (def);
1160 ptr = gimple_assign_rhs1 (def);
1162 if (((object_size_type & OST_DYNAMIC)
1163 || (tree_fits_shwi_p (offset)
1164 && compare_tree_int (offset, offset_limit) <= 0))
1165 && compute_builtin_object_size (ptr, object_size_type,
1166 psize))
1168 *psize = size_for_offset (*psize, offset);
1169 return true;
1173 return false;
1176 struct object_size_info osi;
1177 osi.object_size_type = object_size_type;
1178 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
1180 bitmap_iterator bi;
1181 unsigned int i;
1183 object_sizes_grow (object_size_type);
1184 if (dump_file)
1186 fprintf (dump_file, "Computing %s %s%sobject size for ",
1187 (object_size_type & OST_MINIMUM) ? "minimum" : "maximum",
1188 (object_size_type & OST_DYNAMIC) ? "dynamic " : "",
1189 (object_size_type & OST_SUBOBJECT) ? "sub" : "");
1190 print_generic_expr (dump_file, ptr, dump_flags);
1191 fprintf (dump_file, ":\n");
1194 osi.visited = BITMAP_ALLOC (NULL);
1195 osi.reexamine = BITMAP_ALLOC (NULL);
1197 if (!(object_size_type & OST_DYNAMIC))
1199 osi.depths = NULL;
1200 osi.stack = NULL;
1201 osi.tos = NULL;
1204 /* First pass: walk UD chains, compute object sizes that can be computed.
1205 osi.reexamine bitmap at the end will contain versions of SSA_NAMES
1206 that need to be reexamined. For both static and dynamic size
1207 computation, reexamination is for propagation across dependency loops.
1208 The dynamic case has the additional use case where the computed
1209 expression needs to be gimplified. */
1210 osi.pass = 0;
1211 osi.changed = false;
1212 collect_object_sizes_for (&osi, ptr);
1214 if (object_size_type & OST_DYNAMIC)
1216 osi.pass = 1;
1217 gimplify_size_expressions (&osi);
1218 bitmap_clear (osi.reexamine);
1221 /* Second pass: keep recomputing object sizes of variables
1222 that need reexamination, until no object sizes are
1223 increased or all object sizes are computed. */
1224 if (! bitmap_empty_p (osi.reexamine))
1226 bitmap reexamine = BITMAP_ALLOC (NULL);
1228 /* If looking for minimum instead of maximum object size,
1229 detect cases where a pointer is increased in a loop.
1230 Although even without this detection pass 2 would eventually
1231 terminate, it could take a long time. If a pointer is
1232 increasing this way, we need to assume 0 object size.
1233 E.g. p = &buf[0]; while (cond) p = p + 4; */
1234 if (object_size_type & OST_MINIMUM)
1236 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
1237 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
1238 osi.tos = osi.stack;
1239 osi.pass = 1;
1240 /* collect_object_sizes_for is changing
1241 osi.reexamine bitmap, so iterate over a copy. */
1242 bitmap_copy (reexamine, osi.reexamine);
1243 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
1244 if (bitmap_bit_p (osi.reexamine, i))
1245 check_for_plus_in_loops (&osi, ssa_name (i));
1247 free (osi.depths);
1248 osi.depths = NULL;
1249 free (osi.stack);
1250 osi.stack = NULL;
1251 osi.tos = NULL;
1256 osi.pass = 2;
1257 osi.changed = false;
1258 /* collect_object_sizes_for is changing
1259 osi.reexamine bitmap, so iterate over a copy. */
1260 bitmap_copy (reexamine, osi.reexamine);
1261 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
1262 if (bitmap_bit_p (osi.reexamine, i))
1264 collect_object_sizes_for (&osi, ssa_name (i));
1265 if (dump_file && (dump_flags & TDF_DETAILS))
1267 fprintf (dump_file, "Reexamining ");
1268 print_generic_expr (dump_file, ssa_name (i),
1269 dump_flags);
1270 fprintf (dump_file, "\n");
1274 while (osi.changed);
1276 BITMAP_FREE (reexamine);
1278 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
1279 bitmap_set_bit (computed[object_size_type], i);
1281 /* Debugging dumps. */
1282 if (dump_file)
1284 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
1285 if (!object_sizes_unknown_p (object_size_type, i))
1287 print_generic_expr (dump_file, ssa_name (i),
1288 dump_flags);
1289 fprintf (dump_file,
1290 ": %s %s%sobject size ",
1291 ((object_size_type & OST_MINIMUM) ? "minimum"
1292 : "maximum"),
1293 (object_size_type & OST_DYNAMIC) ? "dynamic " : "",
1294 (object_size_type & OST_SUBOBJECT) ? "sub" : "");
1295 print_generic_expr (dump_file, object_sizes_get (&osi, i),
1296 dump_flags);
1297 fprintf (dump_file, "\n");
1301 BITMAP_FREE (osi.reexamine);
1302 BITMAP_FREE (osi.visited);
1305 *psize = object_sizes_get (&osi, SSA_NAME_VERSION (ptr));
1306 return !size_unknown_p (*psize, object_size_type);
1309 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
1311 static void
1312 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
1314 int object_size_type = osi->object_size_type;
1315 unsigned int varno = SSA_NAME_VERSION (ptr);
1316 tree bytes, wholesize;
1318 gcc_assert (!object_sizes_unknown_p (object_size_type, varno));
1319 gcc_assert (osi->pass == 0);
1321 if (TREE_CODE (value) == WITH_SIZE_EXPR)
1322 value = TREE_OPERAND (value, 0);
1324 /* Pointer variables should have been handled by merge_object_sizes. */
1325 gcc_assert (TREE_CODE (value) != SSA_NAME
1326 || !POINTER_TYPE_P (TREE_TYPE (value)));
1328 if (TREE_CODE (value) == ADDR_EXPR)
1329 addr_object_size (osi, value, object_size_type, &bytes, &wholesize);
1330 else
1331 bytes = wholesize = size_unknown (object_size_type);
1333 object_sizes_set (osi, varno, bytes, wholesize);
1337 /* Compute object_sizes for PTR, defined to the result of a call. */
1339 static void
1340 call_object_size (struct object_size_info *osi, tree ptr, gcall *call)
1342 int object_size_type = osi->object_size_type;
1343 unsigned int varno = SSA_NAME_VERSION (ptr);
1344 tree bytes = NULL_TREE;
1346 gcc_assert (is_gimple_call (call));
1348 gcc_assert (!object_sizes_unknown_p (object_size_type, varno));
1349 gcc_assert (osi->pass == 0);
1351 bool is_strdup = gimple_call_builtin_p (call, BUILT_IN_STRDUP);
1352 bool is_strndup = gimple_call_builtin_p (call, BUILT_IN_STRNDUP);
1353 if (is_strdup || is_strndup)
1354 bytes = strdup_object_size (call, object_size_type, is_strndup);
1355 else
1356 bytes = alloc_object_size (call, object_size_type);
1358 if (!size_valid_p (bytes, object_size_type))
1359 bytes = size_unknown (object_size_type);
1361 object_sizes_set (osi, varno, bytes, bytes);
1365 /* Compute object_sizes for PTR, defined to an unknown value. */
1367 static void
1368 unknown_object_size (struct object_size_info *osi, tree ptr)
1370 int object_size_type = osi->object_size_type;
1371 unsigned int varno = SSA_NAME_VERSION (ptr);
1373 gcc_checking_assert (!object_sizes_unknown_p (object_size_type, varno));
1374 gcc_checking_assert (osi->pass == 0);
1375 tree bytes = size_unknown (object_size_type);
1377 object_sizes_set (osi, varno, bytes, bytes);
1381 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
1382 the object size might need reexamination later. */
1384 static bool
1385 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig)
1387 int object_size_type = osi->object_size_type;
1388 unsigned int varno = SSA_NAME_VERSION (dest);
1389 tree orig_bytes, wholesize;
1391 if (object_sizes_unknown_p (object_size_type, varno))
1392 return false;
1394 if (osi->pass == 0)
1395 collect_object_sizes_for (osi, orig);
1397 orig_bytes = object_sizes_get (osi, SSA_NAME_VERSION (orig));
1398 wholesize = object_sizes_get (osi, SSA_NAME_VERSION (orig), true);
1400 if (object_sizes_set (osi, varno, orig_bytes, wholesize))
1401 osi->changed = true;
1403 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
1407 /* Compute object_sizes for VAR, defined to the result of an assignment
1408 with operator POINTER_PLUS_EXPR. Return true if the object size might
1409 need reexamination later. */
1411 static bool
1412 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple *stmt)
1414 int object_size_type = osi->object_size_type;
1415 unsigned int varno = SSA_NAME_VERSION (var);
1416 tree bytes, wholesize;
1417 tree op0, op1;
1418 bool reexamine = false;
1420 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1422 op0 = gimple_assign_rhs1 (stmt);
1423 op1 = gimple_assign_rhs2 (stmt);
1425 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
1427 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
1428 gcc_assert (TREE_CODE (rhs) == MEM_REF);
1429 op0 = TREE_OPERAND (rhs, 0);
1430 op1 = TREE_OPERAND (rhs, 1);
1432 else
1433 gcc_unreachable ();
1435 if (object_sizes_unknown_p (object_size_type, varno))
1436 return false;
1438 /* Handle PTR + OFFSET here. */
1439 if (size_valid_p (op1, object_size_type)
1440 && (TREE_CODE (op0) == SSA_NAME || TREE_CODE (op0) == ADDR_EXPR))
1442 if (TREE_CODE (op0) == SSA_NAME)
1444 if (osi->pass == 0)
1445 collect_object_sizes_for (osi, op0);
1447 bytes = object_sizes_get (osi, SSA_NAME_VERSION (op0));
1448 wholesize = object_sizes_get (osi, SSA_NAME_VERSION (op0), true);
1449 reexamine = bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (op0));
1451 else
1453 /* op0 will be ADDR_EXPR here. We should never come here during
1454 reexamination. */
1455 gcc_checking_assert (osi->pass == 0);
1456 addr_object_size (osi, op0, object_size_type, &bytes, &wholesize);
1459 /* size_for_offset doesn't make sense for -1 size, but it does for size 0
1460 since the wholesize could be non-zero and a negative offset could give
1461 a non-zero size. */
1462 if (size_unknown_p (bytes, 0))
1464 else if ((object_size_type & OST_DYNAMIC)
1465 || compare_tree_int (op1, offset_limit) <= 0)
1466 bytes = size_for_offset (bytes, op1, wholesize);
1467 /* In the static case, with a negative offset, the best estimate for
1468 minimum size is size_unknown but for maximum size, the wholesize is a
1469 better estimate than size_unknown. */
1470 else if (object_size_type & OST_MINIMUM)
1471 bytes = size_unknown (object_size_type);
1472 else
1473 bytes = wholesize;
1475 else
1476 bytes = wholesize = size_unknown (object_size_type);
1478 if (!size_valid_p (bytes, object_size_type)
1479 || !size_valid_p (wholesize, object_size_type))
1480 bytes = wholesize = size_unknown (object_size_type);
1482 if (object_sizes_set (osi, varno, bytes, wholesize))
1483 osi->changed = true;
1484 return reexamine;
1487 /* Compute the dynamic object size for VAR. Return the result in SIZE and
1488 WHOLESIZE. */
1490 static void
1491 dynamic_object_size (struct object_size_info *osi, tree var,
1492 tree *size, tree *wholesize)
1494 int object_size_type = osi->object_size_type;
1496 if (TREE_CODE (var) == SSA_NAME)
1498 unsigned varno = SSA_NAME_VERSION (var);
1500 collect_object_sizes_for (osi, var);
1501 *size = object_sizes_get (osi, varno);
1502 *wholesize = object_sizes_get (osi, varno, true);
1504 else if (TREE_CODE (var) == ADDR_EXPR)
1505 addr_object_size (osi, var, object_size_type, size, wholesize);
1506 else
1507 *size = *wholesize = size_unknown (object_size_type);
1510 /* Compute object_sizes for VAR, defined at STMT, which is
1511 a COND_EXPR. Return true if the object size might need reexamination
1512 later. */
1514 static bool
1515 cond_expr_object_size (struct object_size_info *osi, tree var, gimple *stmt)
1517 tree then_, else_;
1518 int object_size_type = osi->object_size_type;
1519 unsigned int varno = SSA_NAME_VERSION (var);
1520 bool reexamine = false;
1522 gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
1524 if (object_sizes_unknown_p (object_size_type, varno))
1525 return false;
1527 then_ = gimple_assign_rhs2 (stmt);
1528 else_ = gimple_assign_rhs3 (stmt);
1530 if (object_size_type & OST_DYNAMIC)
1532 tree then_size, then_wholesize, else_size, else_wholesize;
1534 dynamic_object_size (osi, then_, &then_size, &then_wholesize);
1535 if (!size_unknown_p (then_size, object_size_type))
1536 dynamic_object_size (osi, else_, &else_size, &else_wholesize);
1538 tree cond_size, cond_wholesize;
1539 if (size_unknown_p (then_size, object_size_type)
1540 || size_unknown_p (else_size, object_size_type))
1541 cond_size = cond_wholesize = size_unknown (object_size_type);
1542 else
1544 cond_size = fold_build3 (COND_EXPR, sizetype,
1545 gimple_assign_rhs1 (stmt),
1546 then_size, else_size);
1547 cond_wholesize = fold_build3 (COND_EXPR, sizetype,
1548 gimple_assign_rhs1 (stmt),
1549 then_wholesize, else_wholesize);
1552 object_sizes_set (osi, varno, cond_size, cond_wholesize);
1554 return false;
1557 if (TREE_CODE (then_) == SSA_NAME)
1558 reexamine |= merge_object_sizes (osi, var, then_);
1559 else
1560 expr_object_size (osi, var, then_);
1562 if (object_sizes_unknown_p (object_size_type, varno))
1563 return reexamine;
1565 if (TREE_CODE (else_) == SSA_NAME)
1566 reexamine |= merge_object_sizes (osi, var, else_);
1567 else
1568 expr_object_size (osi, var, else_);
1570 return reexamine;
1573 /* Find size of an object passed as a parameter to the function. */
1575 static void
1576 parm_object_size (struct object_size_info *osi, tree var)
1578 int object_size_type = osi->object_size_type;
1579 tree parm = SSA_NAME_VAR (var);
1581 if (!(object_size_type & OST_DYNAMIC) || !POINTER_TYPE_P (TREE_TYPE (parm)))
1583 expr_object_size (osi, var, parm);
1584 return;
1587 /* Look for access attribute. */
1588 rdwr_map rdwr_idx;
1590 tree fndecl = cfun->decl;
1591 const attr_access *access = get_parm_access (rdwr_idx, parm, fndecl);
1592 tree typesize = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (parm)));
1593 tree sz = NULL_TREE;
1595 /* If we have an access attribute with a usable size argument... */
1596 if (access && access->sizarg != UINT_MAX
1597 /* ... and either PARM is void * or has a type that is complete and has a
1598 constant size... */
1599 && ((typesize && poly_int_tree_p (typesize))
1600 || (!typesize && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (parm))))))
1602 tree fnargs = DECL_ARGUMENTS (fndecl);
1603 tree arg = NULL_TREE;
1604 unsigned argpos = 0;
1606 /* ... then walk through the parameters to pick the size parameter and
1607 safely scale it by the type size if needed.
1609 TODO: we could also compute the size of VLAs where the size is
1610 given by a function parameter. */
1611 for (arg = fnargs; arg; arg = TREE_CHAIN (arg), ++argpos)
1612 if (argpos == access->sizarg)
1614 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (arg)));
1615 sz = get_or_create_ssa_default_def (cfun, arg);
1616 if (sz != NULL_TREE)
1618 sz = fold_convert (sizetype, sz);
1619 if (typesize)
1620 sz = size_binop (MULT_EXPR, sz, typesize);
1622 break;
1625 if (!sz)
1626 sz = size_unknown (object_size_type);
1628 object_sizes_set (osi, SSA_NAME_VERSION (var), sz, sz);
1631 /* Compute an object size expression for VAR, which is the result of a PHI
1632 node. */
1634 static void
1635 phi_dynamic_object_size (struct object_size_info *osi, tree var)
1637 int object_size_type = osi->object_size_type;
1638 unsigned int varno = SSA_NAME_VERSION (var);
1639 gimple *stmt = SSA_NAME_DEF_STMT (var);
1640 unsigned i, num_args = gimple_phi_num_args (stmt);
1641 bool wholesize_needed = false;
1643 /* The extra space is for the PHI result at the end, which object_sizes_set
1644 sets for us. */
1645 tree sizes = make_tree_vec (num_args + 1);
1646 tree wholesizes = make_tree_vec (num_args + 1);
1648 /* Bail out if the size of any of the PHI arguments cannot be
1649 determined. */
1650 for (i = 0; i < num_args; i++)
1652 edge e = gimple_phi_arg_edge (as_a <gphi *> (stmt), i);
1653 if (e->flags & EDGE_COMPLEX)
1654 break;
1656 tree rhs = gimple_phi_arg_def (stmt, i);
1657 tree size, wholesize;
1659 dynamic_object_size (osi, rhs, &size, &wholesize);
1661 if (size_unknown_p (size, object_size_type))
1662 break;
1664 if (size != wholesize)
1665 wholesize_needed = true;
1667 TREE_VEC_ELT (sizes, i) = size;
1668 TREE_VEC_ELT (wholesizes, i) = wholesize;
1671 if (i < num_args)
1673 ggc_free (sizes);
1674 ggc_free (wholesizes);
1675 sizes = wholesizes = size_unknown (object_size_type);
1678 /* Point to the same TREE_VEC so that we can avoid emitting two PHI
1679 nodes. */
1680 else if (!wholesize_needed)
1682 ggc_free (wholesizes);
1683 wholesizes = sizes;
1686 object_sizes_set (osi, varno, sizes, wholesizes);
1689 /* Compute object sizes for VAR.
1690 For ADDR_EXPR an object size is the number of remaining bytes
1691 to the end of the object (where what is considered an object depends on
1692 OSI->object_size_type).
1693 For allocation GIMPLE_CALL like malloc or calloc object size is the size
1694 of the allocation.
1695 For POINTER_PLUS_EXPR where second operand is a constant integer,
1696 object size is object size of the first operand minus the constant.
1697 If the constant is bigger than the number of remaining bytes until the
1698 end of the object, object size is 0, but if it is instead a pointer
1699 subtraction, object size is size_unknown (object_size_type).
1700 To differentiate addition from subtraction, ADDR_EXPR returns
1701 size_unknown (object_size_type) for all objects bigger than half of the
1702 address space, and constants less than half of the address space are
1703 considered addition, while bigger constants subtraction.
1704 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
1705 object size is object size of that argument.
1706 Otherwise, object size is the maximum of object sizes of variables
1707 that it might be set to. */
1709 static void
1710 collect_object_sizes_for (struct object_size_info *osi, tree var)
1712 int object_size_type = osi->object_size_type;
1713 unsigned int varno = SSA_NAME_VERSION (var);
1714 gimple *stmt;
1715 bool reexamine;
1717 if (bitmap_bit_p (computed[object_size_type], varno))
1718 return;
1720 if (osi->pass == 0)
1722 if (bitmap_set_bit (osi->visited, varno))
1724 /* Initialize to 0 for maximum size and M1U for minimum size so that
1725 it gets immediately overridden. */
1726 object_sizes_initialize (osi, varno,
1727 size_initval (object_size_type),
1728 size_initval (object_size_type));
1730 else
1732 /* Found a dependency loop. Mark the variable for later
1733 re-examination. */
1734 if (object_size_type & OST_DYNAMIC)
1735 object_sizes_set_temp (osi, varno);
1737 bitmap_set_bit (osi->reexamine, varno);
1738 if (dump_file && (dump_flags & TDF_DETAILS))
1740 fprintf (dump_file, "Found a dependency loop at ");
1741 print_generic_expr (dump_file, var, dump_flags);
1742 fprintf (dump_file, "\n");
1744 return;
1748 if (dump_file && (dump_flags & TDF_DETAILS))
1750 fprintf (dump_file, "Visiting use-def links for ");
1751 print_generic_expr (dump_file, var, dump_flags);
1752 fprintf (dump_file, "\n");
1755 stmt = SSA_NAME_DEF_STMT (var);
1756 reexamine = false;
1758 switch (gimple_code (stmt))
1760 case GIMPLE_ASSIGN:
1762 tree rhs = gimple_assign_rhs1 (stmt);
1763 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1764 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
1765 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
1766 reexamine = plus_stmt_object_size (osi, var, stmt);
1767 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
1768 reexamine = cond_expr_object_size (osi, var, stmt);
1769 else if (gimple_assign_single_p (stmt)
1770 || gimple_assign_unary_nop_p (stmt))
1772 if (TREE_CODE (rhs) == SSA_NAME
1773 && POINTER_TYPE_P (TREE_TYPE (rhs)))
1774 reexamine = merge_object_sizes (osi, var, rhs);
1775 else
1776 expr_object_size (osi, var, rhs);
1778 else
1779 unknown_object_size (osi, var);
1780 break;
1783 case GIMPLE_CALL:
1785 gcall *call_stmt = as_a <gcall *> (stmt);
1786 tree arg = pass_through_call (call_stmt);
1787 if (arg)
1789 if (TREE_CODE (arg) == SSA_NAME
1790 && POINTER_TYPE_P (TREE_TYPE (arg)))
1791 reexamine = merge_object_sizes (osi, var, arg);
1792 else
1793 expr_object_size (osi, var, arg);
1795 else
1796 call_object_size (osi, var, call_stmt);
1797 break;
1800 case GIMPLE_ASM:
1801 /* Pointers defined by __asm__ statements can point anywhere. */
1802 unknown_object_size (osi, var);
1803 break;
1805 case GIMPLE_NOP:
1806 if (SSA_NAME_VAR (var)
1807 && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
1808 parm_object_size (osi, var);
1809 else
1810 /* Uninitialized SSA names point nowhere. */
1811 unknown_object_size (osi, var);
1812 break;
1814 case GIMPLE_PHI:
1816 unsigned i;
1818 if (object_size_type & OST_DYNAMIC)
1820 phi_dynamic_object_size (osi, var);
1821 break;
1824 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1826 tree rhs = gimple_phi_arg (stmt, i)->def;
1828 if (object_sizes_unknown_p (object_size_type, varno))
1829 break;
1831 if (TREE_CODE (rhs) == SSA_NAME)
1832 reexamine |= merge_object_sizes (osi, var, rhs);
1833 else if (osi->pass == 0)
1834 expr_object_size (osi, var, rhs);
1836 break;
1839 default:
1840 gcc_unreachable ();
1843 /* Dynamic sizes use placeholder temps to return an answer, so it is always
1844 safe to set COMPUTED for them. */
1845 if ((object_size_type & OST_DYNAMIC)
1846 || !reexamine || object_sizes_unknown_p (object_size_type, varno))
1848 bitmap_set_bit (computed[object_size_type], varno);
1849 if (!(object_size_type & OST_DYNAMIC))
1850 bitmap_clear_bit (osi->reexamine, varno);
1851 else if (reexamine)
1852 bitmap_set_bit (osi->reexamine, varno);
1854 else
1856 bitmap_set_bit (osi->reexamine, varno);
1857 if (dump_file && (dump_flags & TDF_DETAILS))
1859 fprintf (dump_file, "Need to reexamine ");
1860 print_generic_expr (dump_file, var, dump_flags);
1861 fprintf (dump_file, "\n");
1867 /* Helper function for check_for_plus_in_loops. Called recursively
1868 to detect loops. */
1870 static void
1871 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1872 unsigned int depth)
1874 gimple *stmt = SSA_NAME_DEF_STMT (var);
1875 unsigned int varno = SSA_NAME_VERSION (var);
1877 if (osi->depths[varno])
1879 if (osi->depths[varno] != depth)
1881 unsigned int *sp;
1883 /* Found a loop involving pointer addition. */
1884 for (sp = osi->tos; sp > osi->stack; )
1886 --sp;
1887 bitmap_clear_bit (osi->reexamine, *sp);
1888 bitmap_set_bit (computed[osi->object_size_type], *sp);
1889 object_sizes_set (osi, *sp, size_zero_node,
1890 object_sizes_get (osi, *sp, true));
1891 if (*sp == varno)
1892 break;
1895 return;
1897 else if (! bitmap_bit_p (osi->reexamine, varno))
1898 return;
1900 osi->depths[varno] = depth;
1901 *osi->tos++ = varno;
1903 switch (gimple_code (stmt))
1906 case GIMPLE_ASSIGN:
1908 if ((gimple_assign_single_p (stmt)
1909 || gimple_assign_unary_nop_p (stmt))
1910 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1912 tree rhs = gimple_assign_rhs1 (stmt);
1914 check_for_plus_in_loops_1 (osi, rhs, depth);
1916 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1918 tree basevar = gimple_assign_rhs1 (stmt);
1919 tree cst = gimple_assign_rhs2 (stmt);
1921 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1923 check_for_plus_in_loops_1 (osi, basevar,
1924 depth + !integer_zerop (cst));
1926 else
1927 gcc_unreachable ();
1928 break;
1931 case GIMPLE_CALL:
1933 gcall *call_stmt = as_a <gcall *> (stmt);
1934 tree arg = pass_through_call (call_stmt);
1935 if (arg)
1937 if (TREE_CODE (arg) == SSA_NAME)
1938 check_for_plus_in_loops_1 (osi, arg, depth);
1939 else
1940 gcc_unreachable ();
1942 break;
1945 case GIMPLE_PHI:
1947 unsigned i;
1949 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1951 tree rhs = gimple_phi_arg (stmt, i)->def;
1953 if (TREE_CODE (rhs) == SSA_NAME)
1954 check_for_plus_in_loops_1 (osi, rhs, depth);
1956 break;
1959 default:
1960 gcc_unreachable ();
1963 osi->depths[varno] = 0;
1964 osi->tos--;
1968 /* Check if some pointer we are computing object size of is being increased
1969 within a loop. If yes, assume all the SSA variables participating in
1970 that loop have minimum object sizes 0. */
1972 static void
1973 check_for_plus_in_loops (struct object_size_info *osi, tree var)
1975 gimple *stmt = SSA_NAME_DEF_STMT (var);
1977 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1978 and looked for a POINTER_PLUS_EXPR in the pass-through
1979 argument, if any. In GIMPLE, however, such an expression
1980 is not a valid call operand. */
1982 if (is_gimple_assign (stmt)
1983 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1985 tree basevar = gimple_assign_rhs1 (stmt);
1986 tree cst = gimple_assign_rhs2 (stmt);
1988 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1990 /* Skip non-positive offsets. */
1991 if (integer_zerop (cst) || compare_tree_int (cst, offset_limit) > 0)
1992 return;
1994 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1995 *osi->tos++ = SSA_NAME_VERSION (basevar);
1996 check_for_plus_in_loops_1 (osi, var, 2);
1997 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1998 osi->tos--;
2003 /* Initialize data structures for the object size computation. */
2005 void
2006 init_object_sizes (void)
2008 int object_size_type;
2010 if (computed[0])
2011 return;
2013 for (object_size_type = 0; object_size_type < OST_END; object_size_type++)
2015 object_sizes_grow (object_size_type);
2016 computed[object_size_type] = BITMAP_ALLOC (NULL);
2019 init_offset_limit ();
2023 /* Destroy data structures after the object size computation. */
2025 void
2026 fini_object_sizes (void)
2028 int object_size_type;
2030 for (object_size_type = 0; object_size_type < OST_END; object_size_type++)
2032 object_sizes_release (object_size_type);
2033 BITMAP_FREE (computed[object_size_type]);
2037 /* Dummy valueize function. */
2039 static tree
2040 do_valueize (tree t)
2042 return t;
2045 /* Process a __builtin_object_size or __builtin_dynamic_object_size call in
2046 CALL early for subobjects before any object information is lost due to
2047 optimization. Insert a MIN or MAX expression of the result and
2048 __builtin_object_size at I so that it may be processed in the second pass.
2049 __builtin_dynamic_object_size is treated like __builtin_object_size here
2050 since we're only looking for constant bounds. */
2052 static void
2053 early_object_sizes_execute_one (gimple_stmt_iterator *i, gimple *call)
2055 tree ost = gimple_call_arg (call, 1);
2056 tree lhs = gimple_call_lhs (call);
2057 gcc_assert (lhs != NULL_TREE);
2059 if (!tree_fits_uhwi_p (ost))
2060 return;
2062 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
2063 tree ptr = gimple_call_arg (call, 0);
2065 if (object_size_type != 1 && object_size_type != 3)
2066 return;
2068 if (TREE_CODE (ptr) != ADDR_EXPR && TREE_CODE (ptr) != SSA_NAME)
2069 return;
2071 tree type = TREE_TYPE (lhs);
2072 tree bytes;
2073 if (!compute_builtin_object_size (ptr, object_size_type, &bytes)
2074 || !int_fits_type_p (bytes, type))
2075 return;
2077 tree tem = make_ssa_name (type);
2078 gimple_call_set_lhs (call, tem);
2079 enum tree_code code = object_size_type & OST_MINIMUM ? MAX_EXPR : MIN_EXPR;
2080 tree cst = fold_convert (type, bytes);
2081 gimple *g = gimple_build_assign (lhs, code, tem, cst);
2082 gsi_insert_after (i, g, GSI_NEW_STMT);
2083 update_stmt (call);
2086 /* Attempt to fold one __builtin_dynamic_object_size call in CALL into an
2087 expression and insert it at I. Return true if it succeeds. */
2089 static bool
2090 dynamic_object_sizes_execute_one (gimple_stmt_iterator *i, gimple *call)
2092 gcc_assert (gimple_call_num_args (call) == 2);
2094 tree args[2];
2095 args[0] = gimple_call_arg (call, 0);
2096 args[1] = gimple_call_arg (call, 1);
2098 location_t loc = EXPR_LOC_OR_LOC (args[0], input_location);
2099 tree result_type = gimple_call_return_type (as_a <gcall *> (call));
2100 tree result = fold_builtin_call_array (loc, result_type,
2101 gimple_call_fn (call), 2, args);
2103 if (!result)
2104 return false;
2106 /* fold_builtin_call_array may wrap the result inside a
2107 NOP_EXPR. */
2108 STRIP_NOPS (result);
2109 gimplify_and_update_call_from_tree (i, result);
2111 if (dump_file && (dump_flags & TDF_DETAILS))
2113 fprintf (dump_file, "Simplified (dynamic)\n ");
2114 print_gimple_stmt (dump_file, call, 0, dump_flags);
2115 fprintf (dump_file, " to ");
2116 print_generic_expr (dump_file, result);
2117 fprintf (dump_file, "\n");
2119 return true;
2122 static unsigned int
2123 object_sizes_execute (function *fun, bool early)
2125 todo = 0;
2127 basic_block bb;
2128 FOR_EACH_BB_FN (bb, fun)
2130 gimple_stmt_iterator i;
2131 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
2133 tree result;
2134 bool dynamic = false;
2136 gimple *call = gsi_stmt (i);
2137 if (gimple_call_builtin_p (call, BUILT_IN_DYNAMIC_OBJECT_SIZE))
2138 dynamic = true;
2139 else if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE))
2140 continue;
2142 tree lhs = gimple_call_lhs (call);
2143 if (!lhs)
2144 continue;
2146 init_object_sizes ();
2148 /* If early, only attempt to fold
2149 __builtin_object_size (x, 1) and __builtin_object_size (x, 3),
2150 and rather than folding the builtin to the constant if any,
2151 create a MIN_EXPR or MAX_EXPR of the __builtin_object_size
2152 call result and the computed constant. Do the same for
2153 __builtin_dynamic_object_size too. */
2154 if (early)
2156 early_object_sizes_execute_one (&i, call);
2157 continue;
2160 if (dynamic)
2162 if (dynamic_object_sizes_execute_one (&i, call))
2163 continue;
2164 else
2166 /* If we could not find a suitable size expression, lower to
2167 __builtin_object_size so that we may at least get a
2168 constant lower or higher estimate. */
2169 tree bosfn = builtin_decl_implicit (BUILT_IN_OBJECT_SIZE);
2170 gimple_call_set_fndecl (call, bosfn);
2171 update_stmt (call);
2173 if (dump_file && (dump_flags & TDF_DETAILS))
2175 print_generic_expr (dump_file, gimple_call_arg (call, 0),
2176 dump_flags);
2177 fprintf (dump_file,
2178 ": Retrying as __builtin_object_size\n");
2183 result = gimple_fold_stmt_to_constant (call, do_valueize);
2184 if (!result)
2186 tree ost = gimple_call_arg (call, 1);
2188 if (tree_fits_uhwi_p (ost))
2190 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
2192 if (object_size_type & OST_MINIMUM)
2193 result = build_zero_cst (size_type_node);
2194 else if (object_size_type < OST_END)
2195 result = fold_convert (size_type_node,
2196 integer_minus_one_node);
2199 if (!result)
2200 continue;
2203 gcc_assert (TREE_CODE (result) == INTEGER_CST);
2205 if (dump_file && (dump_flags & TDF_DETAILS))
2207 fprintf (dump_file, "Simplified\n ");
2208 print_gimple_stmt (dump_file, call, 0, dump_flags);
2209 fprintf (dump_file, " to ");
2210 print_generic_expr (dump_file, result);
2211 fprintf (dump_file, "\n");
2214 /* Propagate into all uses and fold those stmts. */
2215 if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2216 replace_uses_by (lhs, result);
2217 else
2218 replace_call_with_value (&i, result);
2222 fini_object_sizes ();
2223 return todo;
2226 /* Simple pass to optimize all __builtin_object_size () builtins. */
2228 namespace {
2230 const pass_data pass_data_object_sizes =
2232 GIMPLE_PASS, /* type */
2233 "objsz", /* name */
2234 OPTGROUP_NONE, /* optinfo_flags */
2235 TV_NONE, /* tv_id */
2236 ( PROP_cfg | PROP_ssa ), /* properties_required */
2237 PROP_objsz, /* properties_provided */
2238 0, /* properties_destroyed */
2239 0, /* todo_flags_start */
2240 0, /* todo_flags_finish */
2243 class pass_object_sizes : public gimple_opt_pass
2245 public:
2246 pass_object_sizes (gcc::context *ctxt)
2247 : gimple_opt_pass (pass_data_object_sizes, ctxt)
2250 /* opt_pass methods: */
2251 opt_pass * clone () final override { return new pass_object_sizes (m_ctxt); }
2252 unsigned int execute (function *fun) final override
2254 return object_sizes_execute (fun, false);
2256 }; // class pass_object_sizes
2258 } // anon namespace
2260 gimple_opt_pass *
2261 make_pass_object_sizes (gcc::context *ctxt)
2263 return new pass_object_sizes (ctxt);
2266 /* Early version of pass to optimize all __builtin_object_size () builtins. */
2268 namespace {
2270 const pass_data pass_data_early_object_sizes =
2272 GIMPLE_PASS, /* type */
2273 "early_objsz", /* name */
2274 OPTGROUP_NONE, /* optinfo_flags */
2275 TV_NONE, /* tv_id */
2276 ( PROP_cfg | PROP_ssa ), /* properties_required */
2277 0, /* properties_provided */
2278 0, /* properties_destroyed */
2279 0, /* todo_flags_start */
2280 0, /* todo_flags_finish */
2283 class pass_early_object_sizes : public gimple_opt_pass
2285 public:
2286 pass_early_object_sizes (gcc::context *ctxt)
2287 : gimple_opt_pass (pass_data_early_object_sizes, ctxt)
2290 /* opt_pass methods: */
2291 unsigned int execute (function *fun) final override
2293 return object_sizes_execute (fun, true);
2295 }; // class pass_object_sizes
2297 } // anon namespace
2299 gimple_opt_pass *
2300 make_pass_early_object_sizes (gcc::context *ctxt)
2302 return new pass_early_object_sizes (ctxt);