Skip -fwhole-program when merging LTO options.
[official-gcc.git] / gcc / tree-object-size.cc
blobd9f25397c71c5499f0921aec975ba2c284777310
1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004-2022 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "fold-const.h"
31 #include "tree-object-size.h"
32 #include "gimple-iterator.h"
33 #include "gimple-fold.h"
34 #include "tree-cfg.h"
35 #include "tree-dfa.h"
36 #include "stringpool.h"
37 #include "attribs.h"
38 #include "builtins.h"
39 #include "gimplify-me.h"
41 struct object_size_info
43 int object_size_type;
44 unsigned char pass;
45 bool changed;
46 bitmap visited, reexamine, unknowns;
47 unsigned int *depths;
48 unsigned int *stack, *tos;
51 struct GTY(()) object_size
53 /* Estimate of bytes till the end of the object. */
54 tree size;
55 /* Estimate of the size of the whole object. */
56 tree wholesize;
59 static tree compute_object_offset (const_tree, const_tree);
60 static bool addr_object_size (struct object_size_info *,
61 const_tree, int, tree *, tree *t = NULL);
62 static tree alloc_object_size (const gcall *, int);
63 static tree pass_through_call (const gcall *);
64 static void collect_object_sizes_for (struct object_size_info *, tree);
65 static void expr_object_size (struct object_size_info *, tree, tree);
66 static bool merge_object_sizes (struct object_size_info *, tree, tree);
67 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple *);
68 static bool cond_expr_object_size (struct object_size_info *, tree, gimple *);
69 static void init_offset_limit (void);
70 static void check_for_plus_in_loops (struct object_size_info *, tree);
71 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
72 unsigned int);
74 /* object_sizes[0] is upper bound for the object size and number of bytes till
75 the end of the object.
76 object_sizes[1] is upper bound for the object size and number of bytes till
77 the end of the subobject (innermost array or field with address taken).
78 object_sizes[2] is lower bound for the object size and number of bytes till
79 the end of the object and object_sizes[3] lower bound for subobject.
81 For static object sizes, the object size and the bytes till the end of the
82 object are both INTEGER_CST. In the dynamic case, they are finally either a
83 gimple variable or an INTEGER_CST. */
84 static vec<object_size> object_sizes[OST_END];
86 /* Bitmaps what object sizes have been computed already. */
87 static bitmap computed[OST_END];
89 /* Maximum value of offset we consider to be addition. */
90 static unsigned HOST_WIDE_INT offset_limit;
92 /* Tell the generic SSA updater what kind of update is needed after the pass
93 executes. */
94 static unsigned todo;
96 /* Return true if VAL represents an initial size for OBJECT_SIZE_TYPE. */
98 static inline bool
99 size_initval_p (tree val, int object_size_type)
101 return ((object_size_type & OST_MINIMUM)
102 ? integer_all_onesp (val) : integer_zerop (val));
105 /* Return true if VAL represents an unknown size for OBJECT_SIZE_TYPE. */
107 static inline bool
108 size_unknown_p (tree val, int object_size_type)
110 return ((object_size_type & OST_MINIMUM)
111 ? integer_zerop (val) : integer_all_onesp (val));
114 /* Return true if VAL represents a valid size for OBJECT_SIZE_TYPE. */
116 static inline bool
117 size_valid_p (tree val, int object_size_type)
119 return ((object_size_type & OST_DYNAMIC) || TREE_CODE (val) == INTEGER_CST);
122 /* Return true if VAL is usable as an object size in the object_sizes
123 vectors. */
125 static inline bool
126 size_usable_p (tree val)
128 return TREE_CODE (val) == SSA_NAME || TREE_CODE (val) == INTEGER_CST;
131 /* Return a tree with initial value for OBJECT_SIZE_TYPE. */
133 static inline tree
134 size_initval (int object_size_type)
136 return ((object_size_type & OST_MINIMUM)
137 ? TYPE_MAX_VALUE (sizetype) : size_zero_node);
140 /* Return a tree with unknown value for OBJECT_SIZE_TYPE. */
142 static inline tree
143 size_unknown (int object_size_type)
145 return ((object_size_type & OST_MINIMUM)
146 ? size_zero_node : TYPE_MAX_VALUE (sizetype));
149 /* Grow object_sizes[OBJECT_SIZE_TYPE] to num_ssa_names. */
151 static inline void
152 object_sizes_grow (int object_size_type)
154 if (num_ssa_names > object_sizes[object_size_type].length ())
155 object_sizes[object_size_type].safe_grow (num_ssa_names, true);
158 /* Release object_sizes[OBJECT_SIZE_TYPE]. */
160 static inline void
161 object_sizes_release (int object_size_type)
163 object_sizes[object_size_type].release ();
166 /* Return true if object_sizes[OBJECT_SIZE_TYPE][VARNO] is unknown. */
168 static inline bool
169 object_sizes_unknown_p (int object_size_type, unsigned varno)
171 return size_unknown_p (object_sizes[object_size_type][varno].size,
172 object_size_type);
175 /* Return the raw size expression for VARNO corresponding to OSI. This returns
176 the TREE_VEC as is and should only be used during gimplification. */
178 static inline object_size
179 object_sizes_get_raw (struct object_size_info *osi, unsigned varno)
181 gcc_assert (osi->pass != 0);
182 return object_sizes[osi->object_size_type][varno];
185 /* Return a size tree for VARNO corresponding to OSI. If WHOLE is true, return
186 the whole object size. Use this for building size expressions based on size
187 of VARNO. */
189 static inline tree
190 object_sizes_get (struct object_size_info *osi, unsigned varno,
191 bool whole = false)
193 tree ret;
194 int object_size_type = osi->object_size_type;
196 if (whole)
197 ret = object_sizes[object_size_type][varno].wholesize;
198 else
199 ret = object_sizes[object_size_type][varno].size;
201 if (object_size_type & OST_DYNAMIC)
203 if (TREE_CODE (ret) == MODIFY_EXPR)
204 return TREE_OPERAND (ret, 0);
205 else if (TREE_CODE (ret) == TREE_VEC)
206 return TREE_VEC_ELT (ret, TREE_VEC_LENGTH (ret) - 1);
207 else
208 gcc_checking_assert (size_usable_p (ret));
211 return ret;
214 /* Set size for VARNO corresponding to OSI to VAL. */
216 static inline void
217 object_sizes_initialize (struct object_size_info *osi, unsigned varno,
218 tree val, tree wholeval)
220 int object_size_type = osi->object_size_type;
222 object_sizes[object_size_type][varno].size = val;
223 object_sizes[object_size_type][varno].wholesize = wholeval;
226 /* Return a MODIFY_EXPR for cases where SSA and EXPR have the same type. The
227 TREE_VEC is returned only in case of PHI nodes. */
229 static tree
230 bundle_sizes (tree name, tree expr)
232 gcc_checking_assert (TREE_TYPE (name) == sizetype);
234 if (TREE_CODE (expr) == TREE_VEC)
236 TREE_VEC_ELT (expr, TREE_VEC_LENGTH (expr) - 1) = name;
237 return expr;
240 gcc_checking_assert (types_compatible_p (TREE_TYPE (expr), sizetype));
241 return build2 (MODIFY_EXPR, sizetype, name, expr);
244 /* Set size for VARNO corresponding to OSI to VAL if it is the new minimum or
245 maximum. For static sizes, each element of TREE_VEC is always INTEGER_CST
246 throughout the computation. For dynamic sizes, each element may either be a
247 gimple variable, a MODIFY_EXPR or a TREE_VEC. The MODIFY_EXPR is for
248 expressions that need to be gimplified. TREE_VECs are special, they're
249 emitted only for GIMPLE_PHI and the PHI result variable is the last element
250 of the vector. */
252 static bool
253 object_sizes_set (struct object_size_info *osi, unsigned varno, tree val,
254 tree wholeval)
256 int object_size_type = osi->object_size_type;
257 object_size osize = object_sizes[object_size_type][varno];
258 bool changed = true;
260 tree oldval = osize.size;
261 tree old_wholeval = osize.wholesize;
263 if (object_size_type & OST_DYNAMIC)
265 if (bitmap_bit_p (osi->reexamine, varno))
267 if (size_unknown_p (val, object_size_type))
269 oldval = object_sizes_get (osi, varno);
270 old_wholeval = object_sizes_get (osi, varno, true);
271 bitmap_set_bit (osi->unknowns, SSA_NAME_VERSION (oldval));
272 bitmap_set_bit (osi->unknowns, SSA_NAME_VERSION (old_wholeval));
273 bitmap_clear_bit (osi->reexamine, varno);
275 else
277 val = bundle_sizes (oldval, val);
278 wholeval = bundle_sizes (old_wholeval, wholeval);
281 else
283 gcc_checking_assert (size_initval_p (oldval, object_size_type));
284 gcc_checking_assert (size_initval_p (old_wholeval,
285 object_size_type));
286 /* For dynamic object sizes, all object sizes that are not gimple
287 variables will need to be gimplified. */
288 if (wholeval != val && !size_usable_p (wholeval))
290 bitmap_set_bit (osi->reexamine, varno);
291 wholeval = bundle_sizes (make_ssa_name (sizetype), wholeval);
293 if (!size_usable_p (val))
295 bitmap_set_bit (osi->reexamine, varno);
296 tree newval = bundle_sizes (make_ssa_name (sizetype), val);
297 if (val == wholeval)
298 wholeval = newval;
299 val = newval;
301 /* If the new value is a temporary variable, mark it for
302 reexamination. */
303 else if (TREE_CODE (val) == SSA_NAME && !SSA_NAME_DEF_STMT (val))
304 bitmap_set_bit (osi->reexamine, varno);
307 else
309 enum tree_code code = (object_size_type & OST_MINIMUM
310 ? MIN_EXPR : MAX_EXPR);
312 val = size_binop (code, val, oldval);
313 wholeval = size_binop (code, wholeval, old_wholeval);
314 changed = (tree_int_cst_compare (val, oldval) != 0
315 || tree_int_cst_compare (old_wholeval, wholeval) != 0);
318 object_sizes[object_size_type][varno].size = val;
319 object_sizes[object_size_type][varno].wholesize = wholeval;
321 return changed;
324 /* Set temporary SSA names for object size and whole size to resolve dependency
325 loops in dynamic size computation. */
327 static inline void
328 object_sizes_set_temp (struct object_size_info *osi, unsigned varno)
330 tree val = object_sizes_get (osi, varno);
332 if (size_initval_p (val, osi->object_size_type))
333 object_sizes_set (osi, varno,
334 make_ssa_name (sizetype),
335 make_ssa_name (sizetype));
338 /* Initialize OFFSET_LIMIT variable. */
339 static void
340 init_offset_limit (void)
342 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
343 offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
344 else
345 offset_limit = -1;
346 offset_limit /= 2;
349 /* Bytes at end of the object with SZ from offset OFFSET. If WHOLESIZE is not
350 NULL_TREE, use it to get the net offset of the pointer, which should always
351 be positive and hence, be within OFFSET_LIMIT for valid offsets. */
353 static tree
354 size_for_offset (tree sz, tree offset, tree wholesize = NULL_TREE)
356 gcc_checking_assert (types_compatible_p (TREE_TYPE (sz), sizetype));
358 /* For negative offsets, if we have a distinct WHOLESIZE, use it to get a net
359 offset from the whole object. */
360 if (wholesize && wholesize != sz
361 && (TREE_CODE (sz) != INTEGER_CST
362 || TREE_CODE (wholesize) != INTEGER_CST
363 || tree_int_cst_compare (sz, wholesize)))
365 gcc_checking_assert (types_compatible_p (TREE_TYPE (wholesize),
366 sizetype));
368 /* Restructure SZ - OFFSET as
369 WHOLESIZE - (WHOLESIZE + OFFSET - SZ) so that the offset part, i.e.
370 WHOLESIZE + OFFSET - SZ is only allowed to be positive. */
371 tree tmp = size_binop (MAX_EXPR, wholesize, sz);
372 offset = fold_build2 (PLUS_EXPR, sizetype, tmp, offset);
373 offset = fold_build2 (MINUS_EXPR, sizetype, offset, sz);
374 sz = tmp;
377 /* Safe to convert now, since a valid net offset should be non-negative. */
378 if (!useless_type_conversion_p (sizetype, TREE_TYPE (offset)))
379 offset = fold_convert (sizetype, offset);
381 if (TREE_CODE (offset) == INTEGER_CST)
383 if (integer_zerop (offset))
384 return sz;
386 /* Negative or too large offset even after adjustment, cannot be within
387 bounds of an object. */
388 if (compare_tree_int (offset, offset_limit) > 0)
389 return size_zero_node;
392 return size_binop (MINUS_EXPR, size_binop (MAX_EXPR, sz, offset), offset);
395 /* Compute offset of EXPR within VAR. Return error_mark_node
396 if unknown. */
398 static tree
399 compute_object_offset (const_tree expr, const_tree var)
401 enum tree_code code = PLUS_EXPR;
402 tree base, off, t;
404 if (expr == var)
405 return size_zero_node;
407 switch (TREE_CODE (expr))
409 case COMPONENT_REF:
410 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
411 if (base == error_mark_node)
412 return base;
414 t = TREE_OPERAND (expr, 1);
415 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
416 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
417 / BITS_PER_UNIT));
418 break;
420 case REALPART_EXPR:
421 CASE_CONVERT:
422 case VIEW_CONVERT_EXPR:
423 case NON_LVALUE_EXPR:
424 return compute_object_offset (TREE_OPERAND (expr, 0), var);
426 case IMAGPART_EXPR:
427 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
428 if (base == error_mark_node)
429 return base;
431 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
432 break;
434 case ARRAY_REF:
435 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
436 if (base == error_mark_node)
437 return base;
439 t = TREE_OPERAND (expr, 1);
440 tree low_bound, unit_size;
441 low_bound = array_ref_low_bound (CONST_CAST_TREE (expr));
442 unit_size = array_ref_element_size (CONST_CAST_TREE (expr));
443 if (! integer_zerop (low_bound))
444 t = fold_build2 (MINUS_EXPR, TREE_TYPE (t), t, low_bound);
445 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
447 code = MINUS_EXPR;
448 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
450 t = fold_convert (sizetype, t);
451 off = size_binop (MULT_EXPR, unit_size, t);
452 break;
454 case MEM_REF:
455 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
456 return wide_int_to_tree (sizetype, mem_ref_offset (expr));
458 default:
459 return error_mark_node;
462 return size_binop (code, base, off);
465 /* Returns the size of the object designated by DECL considering its
466 initializer if it either has one or if it would not affect its size,
467 otherwise the size of the object without the initializer when MIN
468 is true, else null. An object's initializer affects the object's
469 size if it's a struct type with a flexible array member. */
471 tree
472 decl_init_size (tree decl, bool min)
474 tree size = DECL_SIZE_UNIT (decl);
475 tree type = TREE_TYPE (decl);
476 if (TREE_CODE (type) != RECORD_TYPE)
477 return size;
479 tree last = last_field (type);
480 if (!last)
481 return size;
483 tree last_type = TREE_TYPE (last);
484 if (TREE_CODE (last_type) != ARRAY_TYPE
485 || TYPE_SIZE (last_type))
486 return size;
488 /* Use TYPE_SIZE_UNIT; DECL_SIZE_UNIT sometimes reflects the size
489 of the initializer and sometimes doesn't. */
490 size = TYPE_SIZE_UNIT (type);
491 tree ref = build3 (COMPONENT_REF, type, decl, last, NULL_TREE);
492 tree compsize = component_ref_size (ref);
493 if (!compsize)
494 return min ? size : NULL_TREE;
496 /* The size includes tail padding and initializer elements. */
497 tree pos = byte_position (last);
498 size = fold_build2 (PLUS_EXPR, TREE_TYPE (size), pos, compsize);
499 return size;
502 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
503 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
504 If unknown, return size_unknown (object_size_type). */
506 static bool
507 addr_object_size (struct object_size_info *osi, const_tree ptr,
508 int object_size_type, tree *psize, tree *pwholesize)
510 tree pt_var, pt_var_size = NULL_TREE, pt_var_wholesize = NULL_TREE;
511 tree var_size, bytes, wholebytes;
513 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
515 /* Set to unknown and overwrite just before returning if the size
516 could be determined. */
517 *psize = size_unknown (object_size_type);
518 if (pwholesize)
519 *pwholesize = size_unknown (object_size_type);
521 pt_var = TREE_OPERAND (ptr, 0);
522 while (handled_component_p (pt_var))
523 pt_var = TREE_OPERAND (pt_var, 0);
525 if (!pt_var)
526 return false;
528 if (TREE_CODE (pt_var) == MEM_REF)
530 tree sz, wholesize;
532 if (!osi || (object_size_type & OST_SUBOBJECT) != 0
533 || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
535 compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
536 object_size_type & ~OST_SUBOBJECT, &sz);
537 wholesize = sz;
539 else
541 tree var = TREE_OPERAND (pt_var, 0);
542 if (osi->pass == 0)
543 collect_object_sizes_for (osi, var);
544 if (bitmap_bit_p (computed[object_size_type],
545 SSA_NAME_VERSION (var)))
547 sz = object_sizes_get (osi, SSA_NAME_VERSION (var));
548 wholesize = object_sizes_get (osi, SSA_NAME_VERSION (var), true);
550 else
551 sz = wholesize = size_unknown (object_size_type);
553 if (!size_unknown_p (sz, object_size_type))
554 sz = size_for_offset (sz, TREE_OPERAND (pt_var, 1), wholesize);
556 if (!size_unknown_p (sz, object_size_type)
557 && (TREE_CODE (sz) != INTEGER_CST
558 || compare_tree_int (sz, offset_limit) < 0))
560 pt_var_size = sz;
561 pt_var_wholesize = wholesize;
564 else if (DECL_P (pt_var))
566 pt_var_size = pt_var_wholesize
567 = decl_init_size (pt_var, object_size_type & OST_MINIMUM);
568 if (!pt_var_size)
569 return false;
571 else if (TREE_CODE (pt_var) == STRING_CST)
572 pt_var_size = pt_var_wholesize = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
573 else
574 return false;
576 if (pt_var_size)
578 /* Validate the size determined above if it is a constant. */
579 if (TREE_CODE (pt_var_size) == INTEGER_CST
580 && compare_tree_int (pt_var_size, offset_limit) >= 0)
581 return false;
584 if (pt_var != TREE_OPERAND (ptr, 0))
586 tree var;
588 if (object_size_type & OST_SUBOBJECT)
590 var = TREE_OPERAND (ptr, 0);
592 while (var != pt_var
593 && TREE_CODE (var) != BIT_FIELD_REF
594 && TREE_CODE (var) != COMPONENT_REF
595 && TREE_CODE (var) != ARRAY_REF
596 && TREE_CODE (var) != ARRAY_RANGE_REF
597 && TREE_CODE (var) != REALPART_EXPR
598 && TREE_CODE (var) != IMAGPART_EXPR)
599 var = TREE_OPERAND (var, 0);
600 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
601 var = TREE_OPERAND (var, 0);
602 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
603 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var)))
604 || (pt_var_size && TREE_CODE (pt_var_size) == INTEGER_CST
605 && tree_int_cst_lt (pt_var_size,
606 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
607 var = pt_var;
608 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
610 tree v = var;
611 /* For &X->fld, compute object size if fld isn't a flexible array
612 member. */
613 bool is_flexible_array_mem_ref = false;
614 while (v && v != pt_var)
615 switch (TREE_CODE (v))
617 case ARRAY_REF:
618 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0))))
620 tree domain
621 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
622 if (domain && TYPE_MAX_VALUE (domain))
624 v = NULL_TREE;
625 break;
628 v = TREE_OPERAND (v, 0);
629 break;
630 case REALPART_EXPR:
631 case IMAGPART_EXPR:
632 v = NULL_TREE;
633 break;
634 case COMPONENT_REF:
635 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
637 v = NULL_TREE;
638 break;
640 is_flexible_array_mem_ref = array_ref_flexible_size_p (v);
641 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
642 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
643 != UNION_TYPE
644 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
645 != QUAL_UNION_TYPE)
646 break;
647 else
648 v = TREE_OPERAND (v, 0);
649 if (TREE_CODE (v) == COMPONENT_REF
650 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
651 == RECORD_TYPE)
653 /* compute object size only if v is not a
654 flexible array member. */
655 if (!is_flexible_array_mem_ref)
657 v = NULL_TREE;
658 break;
660 v = TREE_OPERAND (v, 0);
662 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
663 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
664 != UNION_TYPE
665 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
666 != QUAL_UNION_TYPE)
667 break;
668 else
669 v = TREE_OPERAND (v, 0);
670 if (v != pt_var)
671 v = NULL_TREE;
672 else
673 v = pt_var;
674 break;
675 default:
676 v = pt_var;
677 break;
679 if (v == pt_var)
680 var = pt_var;
683 else
684 var = pt_var;
686 if (var != pt_var)
688 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
689 if (!TREE_CONSTANT (var_size))
690 var_size = get_or_create_ssa_default_def (cfun, var_size);
691 if (!var_size)
692 return false;
694 else if (!pt_var_size)
695 return false;
696 else
697 var_size = pt_var_size;
698 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
699 if (bytes != error_mark_node)
701 bytes = size_for_offset (var_size, bytes);
702 if (var != pt_var && pt_var_size && TREE_CODE (pt_var) == MEM_REF)
704 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0),
705 pt_var);
706 if (bytes2 != error_mark_node)
708 bytes2 = size_for_offset (pt_var_size, bytes2);
709 bytes = size_binop (MIN_EXPR, bytes, bytes2);
713 else
714 bytes = size_unknown (object_size_type);
716 wholebytes
717 = object_size_type & OST_SUBOBJECT ? var_size : pt_var_wholesize;
719 else if (!pt_var_size)
720 return false;
721 else
723 bytes = pt_var_size;
724 wholebytes = pt_var_wholesize;
727 if (!size_unknown_p (bytes, object_size_type)
728 && size_valid_p (bytes, object_size_type)
729 && !size_unknown_p (bytes, object_size_type)
730 && size_valid_p (wholebytes, object_size_type))
732 *psize = bytes;
733 if (pwholesize)
734 *pwholesize = wholebytes;
735 return true;
738 return false;
742 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
743 Handles calls to functions declared with attribute alloc_size.
744 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
745 If unknown, return size_unknown (object_size_type). */
747 static tree
748 alloc_object_size (const gcall *call, int object_size_type)
750 gcc_assert (is_gimple_call (call));
752 tree calltype;
753 tree callfn = gimple_call_fndecl (call);
754 if (callfn)
755 calltype = TREE_TYPE (callfn);
756 else
757 calltype = gimple_call_fntype (call);
759 if (!calltype)
760 return size_unknown (object_size_type);
762 /* Set to positions of alloc_size arguments. */
763 int arg1 = -1, arg2 = -1;
764 tree alloc_size = lookup_attribute ("alloc_size",
765 TYPE_ATTRIBUTES (calltype));
766 if (alloc_size && TREE_VALUE (alloc_size))
768 tree p = TREE_VALUE (alloc_size);
770 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
771 if (TREE_CHAIN (p))
772 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
774 else if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
775 && callfn && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (callfn)))
776 arg1 = 0;
778 /* Non-const arguments are OK here, let the caller handle constness. */
779 if (arg1 < 0 || arg1 >= (int) gimple_call_num_args (call)
780 || arg2 >= (int) gimple_call_num_args (call))
781 return size_unknown (object_size_type);
783 tree bytes = NULL_TREE;
784 if (arg2 >= 0)
785 bytes = size_binop (MULT_EXPR,
786 fold_convert (sizetype, gimple_call_arg (call, arg1)),
787 fold_convert (sizetype, gimple_call_arg (call, arg2)));
788 else if (arg1 >= 0)
789 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
791 return bytes ? bytes : size_unknown (object_size_type);
794 /* Compute __builtin_object_size for CALL, which is a call to either
795 BUILT_IN_STRDUP or BUILT_IN_STRNDUP; IS_STRNDUP indicates which it is.
796 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
797 If unknown, return size_unknown (object_size_type). */
799 static tree
800 strdup_object_size (const gcall *call, int object_size_type, bool is_strndup)
802 tree src = gimple_call_arg (call, 0);
803 tree sz = size_unknown (object_size_type);
804 tree n = NULL_TREE;
806 if (is_strndup)
807 n = fold_build2 (PLUS_EXPR, sizetype, size_one_node,
808 gimple_call_arg (call, 1));
809 /* For strdup, simply emit strlen (SRC) + 1 and let the optimizer fold it the
810 way it likes. */
811 else
813 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
814 if (strlen_fn)
816 sz = fold_build2 (PLUS_EXPR, sizetype, size_one_node,
817 build_call_expr (strlen_fn, 1, src));
818 todo = TODO_update_ssa_only_virtuals;
822 /* In all other cases, return the size of SRC since the object size cannot
823 exceed that. We cannot do this for OST_MINIMUM unless SRC points into a
824 string constant since otherwise the object size could go all the way down
825 to zero. */
826 if (!size_valid_p (sz, object_size_type)
827 || size_unknown_p (sz, object_size_type))
829 tree wholesrc = NULL_TREE;
830 if (TREE_CODE (src) == ADDR_EXPR)
831 wholesrc = get_base_address (TREE_OPERAND (src, 0));
833 /* If the source points within a string constant, we try to get its
834 length. */
835 if (wholesrc && TREE_CODE (wholesrc) == STRING_CST)
837 tree len = c_strlen (src, 0);
838 if (len)
839 sz = fold_build2 (PLUS_EXPR, sizetype, size_one_node, len);
842 /* For maximum estimate, our next best guess is the object size of the
843 source. */
844 if (size_unknown_p (sz, object_size_type)
845 && !(object_size_type & OST_MINIMUM))
846 compute_builtin_object_size (src, object_size_type, &sz);
849 /* String duplication allocates at least one byte, so we should never fail
850 for OST_MINIMUM. */
851 if ((!size_valid_p (sz, object_size_type)
852 || size_unknown_p (sz, object_size_type))
853 && (object_size_type & OST_MINIMUM))
854 sz = size_one_node;
856 /* Factor in the N. */
857 return n ? fold_build2 (MIN_EXPR, sizetype, n, sz) : sz;
860 /* If object size is propagated from one of function's arguments directly
861 to its return value, return that argument for GIMPLE_CALL statement CALL.
862 Otherwise return NULL. */
864 static tree
865 pass_through_call (const gcall *call)
867 unsigned rf = gimple_call_return_flags (call);
868 if (rf & ERF_RETURNS_ARG)
870 unsigned argnum = rf & ERF_RETURN_ARG_MASK;
871 if (argnum < gimple_call_num_args (call))
872 return gimple_call_arg (call, argnum);
875 /* __builtin_assume_aligned is intentionally not marked RET1. */
876 if (gimple_call_builtin_p (call, BUILT_IN_ASSUME_ALIGNED))
877 return gimple_call_arg (call, 0);
879 return NULL_TREE;
882 /* Emit PHI nodes for size expressions fo. */
884 static void
885 emit_phi_nodes (gimple *stmt, tree size, tree wholesize)
887 tree phires;
888 gphi *wholephi = NULL;
890 if (wholesize != size)
892 phires = TREE_VEC_ELT (wholesize, TREE_VEC_LENGTH (wholesize) - 1);
893 wholephi = create_phi_node (phires, gimple_bb (stmt));
896 phires = TREE_VEC_ELT (size, TREE_VEC_LENGTH (size) - 1);
897 gphi *phi = create_phi_node (phires, gimple_bb (stmt));
898 gphi *obj_phi = as_a <gphi *> (stmt);
900 gcc_checking_assert (TREE_CODE (wholesize) == TREE_VEC);
901 gcc_checking_assert (TREE_CODE (size) == TREE_VEC);
903 for (unsigned i = 0; i < gimple_phi_num_args (stmt); i++)
905 gimple_seq seq = NULL;
906 tree wsz = TREE_VEC_ELT (wholesize, i);
907 tree sz = TREE_VEC_ELT (size, i);
909 /* If we built an expression, we will need to build statements
910 and insert them on the edge right away. */
911 if (TREE_CODE (wsz) != SSA_NAME)
912 wsz = force_gimple_operand (wsz, &seq, true, NULL);
913 if (TREE_CODE (sz) != SSA_NAME)
915 gimple_seq s;
916 sz = force_gimple_operand (sz, &s, true, NULL);
917 gimple_seq_add_seq (&seq, s);
920 if (seq)
921 gsi_insert_seq_on_edge (gimple_phi_arg_edge (obj_phi, i), seq);
923 if (wholephi)
924 add_phi_arg (wholephi, wsz,
925 gimple_phi_arg_edge (obj_phi, i),
926 gimple_phi_arg_location (obj_phi, i));
928 add_phi_arg (phi, sz,
929 gimple_phi_arg_edge (obj_phi, i),
930 gimple_phi_arg_location (obj_phi, i));
934 /* Descend through EXPR and return size_unknown if it uses any SSA variable
935 object_size_set or object_size_set_temp generated, which turned out to be
936 size_unknown, as noted in UNKNOWNS. */
938 static tree
939 propagate_unknowns (object_size_info *osi, tree expr)
941 int object_size_type = osi->object_size_type;
943 switch (TREE_CODE (expr))
945 case SSA_NAME:
946 if (bitmap_bit_p (osi->unknowns, SSA_NAME_VERSION (expr)))
947 return size_unknown (object_size_type);
948 return expr;
950 case MIN_EXPR:
951 case MAX_EXPR:
953 tree res = propagate_unknowns (osi, TREE_OPERAND (expr, 0));
954 if (size_unknown_p (res, object_size_type))
955 return res;
957 res = propagate_unknowns (osi, TREE_OPERAND (expr, 1));
958 if (size_unknown_p (res, object_size_type))
959 return res;
961 return expr;
963 case MODIFY_EXPR:
965 tree res = propagate_unknowns (osi, TREE_OPERAND (expr, 1));
966 if (size_unknown_p (res, object_size_type))
967 return res;
968 return expr;
970 case TREE_VEC:
971 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
973 tree res = propagate_unknowns (osi, TREE_VEC_ELT (expr, i));
974 if (size_unknown_p (res, object_size_type))
975 return res;
977 return expr;
978 case PLUS_EXPR:
979 case MINUS_EXPR:
981 tree res = propagate_unknowns (osi, TREE_OPERAND (expr, 0));
982 if (size_unknown_p (res, object_size_type))
983 return res;
985 return expr;
987 default:
988 return expr;
992 /* Walk through size expressions that need reexamination and generate
993 statements for them. */
995 static void
996 gimplify_size_expressions (object_size_info *osi)
998 int object_size_type = osi->object_size_type;
999 bitmap_iterator bi;
1000 unsigned int i;
1001 bool changed;
1003 /* Step 1: Propagate unknowns into expressions. */
1004 bitmap reexamine = BITMAP_ALLOC (NULL);
1005 bitmap_copy (reexamine, osi->reexamine);
1008 changed = false;
1009 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
1011 object_size cur = object_sizes_get_raw (osi, i);
1013 if (size_unknown_p (propagate_unknowns (osi, cur.size),
1014 object_size_type)
1015 || size_unknown_p (propagate_unknowns (osi, cur.wholesize),
1016 object_size_type))
1018 object_sizes_set (osi, i,
1019 size_unknown (object_size_type),
1020 size_unknown (object_size_type));
1021 changed = true;
1024 bitmap_copy (reexamine, osi->reexamine);
1026 while (changed);
1028 /* Release all unknowns. */
1029 EXECUTE_IF_SET_IN_BITMAP (osi->unknowns, 0, i, bi)
1030 release_ssa_name (ssa_name (i));
1032 /* Expand all size expressions to put their definitions close to the objects
1033 for which size is being computed. */
1034 EXECUTE_IF_SET_IN_BITMAP (osi->reexamine, 0, i, bi)
1036 gimple_seq seq = NULL;
1037 object_size osize = object_sizes_get_raw (osi, i);
1039 gimple *stmt = SSA_NAME_DEF_STMT (ssa_name (i));
1040 enum gimple_code code = gimple_code (stmt);
1042 /* PHI nodes need special attention. */
1043 if (code == GIMPLE_PHI)
1044 emit_phi_nodes (stmt, osize.size, osize.wholesize);
1045 else
1047 tree size_expr = NULL_TREE;
1049 /* Bundle wholesize in with the size to gimplify if needed. */
1050 if (osize.wholesize != osize.size
1051 && !size_usable_p (osize.wholesize))
1052 size_expr = size_binop (COMPOUND_EXPR,
1053 osize.wholesize,
1054 osize.size);
1055 else if (!size_usable_p (osize.size))
1056 size_expr = osize.size;
1058 if (size_expr)
1060 gimple_stmt_iterator gsi;
1061 if (code == GIMPLE_NOP)
1062 gsi = gsi_start_bb (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1063 else
1064 gsi = gsi_for_stmt (stmt);
1066 force_gimple_operand (size_expr, &seq, true, NULL);
1067 gsi_insert_seq_before (&gsi, seq, GSI_CONTINUE_LINKING);
1071 /* We're done, so replace the MODIFY_EXPRs with the SSA names. */
1072 object_sizes_initialize (osi, i,
1073 object_sizes_get (osi, i),
1074 object_sizes_get (osi, i, true));
1078 /* Compute __builtin_object_size value for PTR and set *PSIZE to
1079 the resulting value. If the declared object is known and PDECL
1080 is nonnull, sets *PDECL to the object's DECL. OBJECT_SIZE_TYPE
1081 is the second argument to __builtin_object_size.
1082 Returns true on success and false when the object size could not
1083 be determined. */
1085 bool
1086 compute_builtin_object_size (tree ptr, int object_size_type,
1087 tree *psize)
1089 gcc_assert (object_size_type >= 0 && object_size_type < OST_END);
1091 /* Set to unknown and overwrite just before returning if the size
1092 could be determined. */
1093 *psize = size_unknown (object_size_type);
1095 if (! offset_limit)
1096 init_offset_limit ();
1098 if (TREE_CODE (ptr) == ADDR_EXPR)
1099 return addr_object_size (NULL, ptr, object_size_type, psize);
1101 if (TREE_CODE (ptr) != SSA_NAME
1102 || !POINTER_TYPE_P (TREE_TYPE (ptr)))
1103 return false;
1105 if (computed[object_size_type] == NULL)
1107 if (optimize || object_size_type & OST_SUBOBJECT)
1108 return false;
1110 /* When not optimizing, rather than failing, make a small effort
1111 to determine the object size without the full benefit of
1112 the (costly) computation below. */
1113 gimple *def = SSA_NAME_DEF_STMT (ptr);
1114 if (gimple_code (def) == GIMPLE_ASSIGN)
1116 tree_code code = gimple_assign_rhs_code (def);
1117 if (code == POINTER_PLUS_EXPR)
1119 tree offset = gimple_assign_rhs2 (def);
1120 ptr = gimple_assign_rhs1 (def);
1122 if (((object_size_type & OST_DYNAMIC)
1123 || (tree_fits_shwi_p (offset)
1124 && compare_tree_int (offset, offset_limit) <= 0))
1125 && compute_builtin_object_size (ptr, object_size_type,
1126 psize))
1128 *psize = size_for_offset (*psize, offset);
1129 return true;
1133 return false;
1136 struct object_size_info osi;
1137 osi.object_size_type = object_size_type;
1138 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
1140 bitmap_iterator bi;
1141 unsigned int i;
1143 object_sizes_grow (object_size_type);
1144 if (dump_file)
1146 fprintf (dump_file, "Computing %s %s%sobject size for ",
1147 (object_size_type & OST_MINIMUM) ? "minimum" : "maximum",
1148 (object_size_type & OST_DYNAMIC) ? "dynamic " : "",
1149 (object_size_type & OST_SUBOBJECT) ? "sub" : "");
1150 print_generic_expr (dump_file, ptr, dump_flags);
1151 fprintf (dump_file, ":\n");
1154 osi.visited = BITMAP_ALLOC (NULL);
1155 osi.reexamine = BITMAP_ALLOC (NULL);
1157 if (object_size_type & OST_DYNAMIC)
1158 osi.unknowns = BITMAP_ALLOC (NULL);
1159 else
1161 osi.depths = NULL;
1162 osi.stack = NULL;
1163 osi.tos = NULL;
1166 /* First pass: walk UD chains, compute object sizes that
1167 can be computed. osi.reexamine bitmap at the end will
1168 contain what variables were found in dependency cycles
1169 and therefore need to be reexamined. */
1170 osi.pass = 0;
1171 osi.changed = false;
1172 collect_object_sizes_for (&osi, ptr);
1174 if (object_size_type & OST_DYNAMIC)
1176 osi.pass = 1;
1177 gimplify_size_expressions (&osi);
1178 BITMAP_FREE (osi.unknowns);
1179 bitmap_clear (osi.reexamine);
1182 /* Second pass: keep recomputing object sizes of variables
1183 that need reexamination, until no object sizes are
1184 increased or all object sizes are computed. */
1185 if (! bitmap_empty_p (osi.reexamine))
1187 bitmap reexamine = BITMAP_ALLOC (NULL);
1189 /* If looking for minimum instead of maximum object size,
1190 detect cases where a pointer is increased in a loop.
1191 Although even without this detection pass 2 would eventually
1192 terminate, it could take a long time. If a pointer is
1193 increasing this way, we need to assume 0 object size.
1194 E.g. p = &buf[0]; while (cond) p = p + 4; */
1195 if (object_size_type & OST_MINIMUM)
1197 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
1198 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
1199 osi.tos = osi.stack;
1200 osi.pass = 1;
1201 /* collect_object_sizes_for is changing
1202 osi.reexamine bitmap, so iterate over a copy. */
1203 bitmap_copy (reexamine, osi.reexamine);
1204 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
1205 if (bitmap_bit_p (osi.reexamine, i))
1206 check_for_plus_in_loops (&osi, ssa_name (i));
1208 free (osi.depths);
1209 osi.depths = NULL;
1210 free (osi.stack);
1211 osi.stack = NULL;
1212 osi.tos = NULL;
1217 osi.pass = 2;
1218 osi.changed = false;
1219 /* collect_object_sizes_for is changing
1220 osi.reexamine bitmap, so iterate over a copy. */
1221 bitmap_copy (reexamine, osi.reexamine);
1222 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
1223 if (bitmap_bit_p (osi.reexamine, i))
1225 collect_object_sizes_for (&osi, ssa_name (i));
1226 if (dump_file && (dump_flags & TDF_DETAILS))
1228 fprintf (dump_file, "Reexamining ");
1229 print_generic_expr (dump_file, ssa_name (i),
1230 dump_flags);
1231 fprintf (dump_file, "\n");
1235 while (osi.changed);
1237 BITMAP_FREE (reexamine);
1239 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
1240 bitmap_set_bit (computed[object_size_type], i);
1242 /* Debugging dumps. */
1243 if (dump_file)
1245 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
1246 if (!object_sizes_unknown_p (object_size_type, i))
1248 print_generic_expr (dump_file, ssa_name (i),
1249 dump_flags);
1250 fprintf (dump_file,
1251 ": %s %s%sobject size ",
1252 ((object_size_type & OST_MINIMUM) ? "minimum"
1253 : "maximum"),
1254 (object_size_type & OST_DYNAMIC) ? "dynamic " : "",
1255 (object_size_type & OST_SUBOBJECT) ? "sub" : "");
1256 print_generic_expr (dump_file, object_sizes_get (&osi, i),
1257 dump_flags);
1258 fprintf (dump_file, "\n");
1262 BITMAP_FREE (osi.reexamine);
1263 BITMAP_FREE (osi.visited);
1266 *psize = object_sizes_get (&osi, SSA_NAME_VERSION (ptr));
1267 return !size_unknown_p (*psize, object_size_type);
1270 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
1272 static void
1273 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
1275 int object_size_type = osi->object_size_type;
1276 unsigned int varno = SSA_NAME_VERSION (ptr);
1277 tree bytes, wholesize;
1279 gcc_assert (!object_sizes_unknown_p (object_size_type, varno));
1280 gcc_assert (osi->pass == 0);
1282 if (TREE_CODE (value) == WITH_SIZE_EXPR)
1283 value = TREE_OPERAND (value, 0);
1285 /* Pointer variables should have been handled by merge_object_sizes. */
1286 gcc_assert (TREE_CODE (value) != SSA_NAME
1287 || !POINTER_TYPE_P (TREE_TYPE (value)));
1289 if (TREE_CODE (value) == ADDR_EXPR)
1290 addr_object_size (osi, value, object_size_type, &bytes, &wholesize);
1291 else
1292 bytes = wholesize = size_unknown (object_size_type);
1294 object_sizes_set (osi, varno, bytes, wholesize);
1298 /* Compute object_sizes for PTR, defined to the result of a call. */
1300 static void
1301 call_object_size (struct object_size_info *osi, tree ptr, gcall *call)
1303 int object_size_type = osi->object_size_type;
1304 unsigned int varno = SSA_NAME_VERSION (ptr);
1305 tree bytes = NULL_TREE;
1307 gcc_assert (is_gimple_call (call));
1309 gcc_assert (!object_sizes_unknown_p (object_size_type, varno));
1310 gcc_assert (osi->pass == 0);
1312 bool is_strdup = gimple_call_builtin_p (call, BUILT_IN_STRDUP);
1313 bool is_strndup = gimple_call_builtin_p (call, BUILT_IN_STRNDUP);
1314 if (is_strdup || is_strndup)
1315 bytes = strdup_object_size (call, object_size_type, is_strndup);
1316 else
1317 bytes = alloc_object_size (call, object_size_type);
1319 if (!size_valid_p (bytes, object_size_type))
1320 bytes = size_unknown (object_size_type);
1322 object_sizes_set (osi, varno, bytes, bytes);
1326 /* Compute object_sizes for PTR, defined to an unknown value. */
1328 static void
1329 unknown_object_size (struct object_size_info *osi, tree ptr)
1331 int object_size_type = osi->object_size_type;
1332 unsigned int varno = SSA_NAME_VERSION (ptr);
1334 gcc_checking_assert (!object_sizes_unknown_p (object_size_type, varno));
1335 gcc_checking_assert (osi->pass == 0);
1336 tree bytes = size_unknown (object_size_type);
1338 object_sizes_set (osi, varno, bytes, bytes);
1342 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
1343 the object size might need reexamination later. */
1345 static bool
1346 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig)
1348 int object_size_type = osi->object_size_type;
1349 unsigned int varno = SSA_NAME_VERSION (dest);
1350 tree orig_bytes, wholesize;
1352 if (object_sizes_unknown_p (object_size_type, varno))
1353 return false;
1355 if (osi->pass == 0)
1356 collect_object_sizes_for (osi, orig);
1358 orig_bytes = object_sizes_get (osi, SSA_NAME_VERSION (orig));
1359 wholesize = object_sizes_get (osi, SSA_NAME_VERSION (orig), true);
1361 if (object_sizes_set (osi, varno, orig_bytes, wholesize))
1362 osi->changed = true;
1364 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
1368 /* Compute object_sizes for VAR, defined to the result of an assignment
1369 with operator POINTER_PLUS_EXPR. Return true if the object size might
1370 need reexamination later. */
1372 static bool
1373 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple *stmt)
1375 int object_size_type = osi->object_size_type;
1376 unsigned int varno = SSA_NAME_VERSION (var);
1377 tree bytes, wholesize;
1378 tree op0, op1;
1379 bool reexamine = false;
1381 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1383 op0 = gimple_assign_rhs1 (stmt);
1384 op1 = gimple_assign_rhs2 (stmt);
1386 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
1388 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
1389 gcc_assert (TREE_CODE (rhs) == MEM_REF);
1390 op0 = TREE_OPERAND (rhs, 0);
1391 op1 = TREE_OPERAND (rhs, 1);
1393 else
1394 gcc_unreachable ();
1396 if (object_sizes_unknown_p (object_size_type, varno))
1397 return false;
1399 /* Handle PTR + OFFSET here. */
1400 if (size_valid_p (op1, object_size_type)
1401 && (TREE_CODE (op0) == SSA_NAME || TREE_CODE (op0) == ADDR_EXPR))
1403 if (TREE_CODE (op0) == SSA_NAME)
1405 if (osi->pass == 0)
1406 collect_object_sizes_for (osi, op0);
1408 bytes = object_sizes_get (osi, SSA_NAME_VERSION (op0));
1409 wholesize = object_sizes_get (osi, SSA_NAME_VERSION (op0), true);
1410 reexamine = bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (op0));
1412 else
1414 /* op0 will be ADDR_EXPR here. We should never come here during
1415 reexamination. */
1416 gcc_checking_assert (osi->pass == 0);
1417 addr_object_size (osi, op0, object_size_type, &bytes, &wholesize);
1420 /* size_for_offset doesn't make sense for -1 size, but it does for size 0
1421 since the wholesize could be non-zero and a negative offset could give
1422 a non-zero size. */
1423 if (size_unknown_p (bytes, 0))
1425 else if ((object_size_type & OST_DYNAMIC)
1426 || compare_tree_int (op1, offset_limit) <= 0)
1427 bytes = size_for_offset (bytes, op1, wholesize);
1428 /* In the static case, with a negative offset, the best estimate for
1429 minimum size is size_unknown but for maximum size, the wholesize is a
1430 better estimate than size_unknown. */
1431 else if (object_size_type & OST_MINIMUM)
1432 bytes = size_unknown (object_size_type);
1433 else
1434 bytes = wholesize;
1436 else
1437 bytes = wholesize = size_unknown (object_size_type);
1439 if (!size_valid_p (bytes, object_size_type)
1440 || !size_valid_p (wholesize, object_size_type))
1441 bytes = wholesize = size_unknown (object_size_type);
1443 if (object_sizes_set (osi, varno, bytes, wholesize))
1444 osi->changed = true;
1445 return reexamine;
1448 /* Compute the dynamic object size for VAR. Return the result in SIZE and
1449 WHOLESIZE. */
1451 static void
1452 dynamic_object_size (struct object_size_info *osi, tree var,
1453 tree *size, tree *wholesize)
1455 int object_size_type = osi->object_size_type;
1457 if (TREE_CODE (var) == SSA_NAME)
1459 unsigned varno = SSA_NAME_VERSION (var);
1461 collect_object_sizes_for (osi, var);
1462 *size = object_sizes_get (osi, varno);
1463 *wholesize = object_sizes_get (osi, varno, true);
1465 else if (TREE_CODE (var) == ADDR_EXPR)
1466 addr_object_size (osi, var, object_size_type, size, wholesize);
1467 else
1468 *size = *wholesize = size_unknown (object_size_type);
1471 /* Compute object_sizes for VAR, defined at STMT, which is
1472 a COND_EXPR. Return true if the object size might need reexamination
1473 later. */
1475 static bool
1476 cond_expr_object_size (struct object_size_info *osi, tree var, gimple *stmt)
1478 tree then_, else_;
1479 int object_size_type = osi->object_size_type;
1480 unsigned int varno = SSA_NAME_VERSION (var);
1481 bool reexamine = false;
1483 gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
1485 if (object_sizes_unknown_p (object_size_type, varno))
1486 return false;
1488 then_ = gimple_assign_rhs2 (stmt);
1489 else_ = gimple_assign_rhs3 (stmt);
1491 if (object_size_type & OST_DYNAMIC)
1493 tree then_size, then_wholesize, else_size, else_wholesize;
1495 dynamic_object_size (osi, then_, &then_size, &then_wholesize);
1496 if (!size_unknown_p (then_size, object_size_type))
1497 dynamic_object_size (osi, else_, &else_size, &else_wholesize);
1499 tree cond_size, cond_wholesize;
1500 if (size_unknown_p (then_size, object_size_type)
1501 || size_unknown_p (else_size, object_size_type))
1502 cond_size = cond_wholesize = size_unknown (object_size_type);
1503 else
1505 cond_size = fold_build3 (COND_EXPR, sizetype,
1506 gimple_assign_rhs1 (stmt),
1507 then_size, else_size);
1508 cond_wholesize = fold_build3 (COND_EXPR, sizetype,
1509 gimple_assign_rhs1 (stmt),
1510 then_wholesize, else_wholesize);
1513 object_sizes_set (osi, varno, cond_size, cond_wholesize);
1515 return false;
1518 if (TREE_CODE (then_) == SSA_NAME)
1519 reexamine |= merge_object_sizes (osi, var, then_);
1520 else
1521 expr_object_size (osi, var, then_);
1523 if (object_sizes_unknown_p (object_size_type, varno))
1524 return reexamine;
1526 if (TREE_CODE (else_) == SSA_NAME)
1527 reexamine |= merge_object_sizes (osi, var, else_);
1528 else
1529 expr_object_size (osi, var, else_);
1531 return reexamine;
1534 /* Find size of an object passed as a parameter to the function. */
1536 static void
1537 parm_object_size (struct object_size_info *osi, tree var)
1539 int object_size_type = osi->object_size_type;
1540 tree parm = SSA_NAME_VAR (var);
1542 if (!(object_size_type & OST_DYNAMIC) || !POINTER_TYPE_P (TREE_TYPE (parm)))
1544 expr_object_size (osi, var, parm);
1545 return;
1548 /* Look for access attribute. */
1549 rdwr_map rdwr_idx;
1551 tree fndecl = cfun->decl;
1552 const attr_access *access = get_parm_access (rdwr_idx, parm, fndecl);
1553 tree typesize = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (parm)));
1554 tree sz = NULL_TREE;
1556 /* If we have an explicit access attribute with a usable size argument... */
1557 if (access && access->sizarg != UINT_MAX && !access->internal_p
1558 /* ... and either PARM is void * or has a type that is complete and has a
1559 constant size... */
1560 && ((typesize && poly_int_tree_p (typesize))
1561 || (!typesize && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (parm))))))
1563 tree fnargs = DECL_ARGUMENTS (fndecl);
1564 tree arg = NULL_TREE;
1565 unsigned argpos = 0;
1567 /* ... then walk through the parameters to pick the size parameter and
1568 safely scale it by the type size if needed. */
1569 for (arg = fnargs; arg; arg = TREE_CHAIN (arg), ++argpos)
1570 if (argpos == access->sizarg && INTEGRAL_TYPE_P (TREE_TYPE (arg)))
1572 sz = get_or_create_ssa_default_def (cfun, arg);
1573 if (sz != NULL_TREE)
1575 sz = fold_convert (sizetype, sz);
1576 if (typesize)
1577 sz = size_binop (MULT_EXPR, sz, typesize);
1579 break;
1582 if (!sz)
1583 sz = size_unknown (object_size_type);
1585 object_sizes_set (osi, SSA_NAME_VERSION (var), sz, sz);
1588 /* Compute an object size expression for VAR, which is the result of a PHI
1589 node. */
1591 static void
1592 phi_dynamic_object_size (struct object_size_info *osi, tree var)
1594 int object_size_type = osi->object_size_type;
1595 unsigned int varno = SSA_NAME_VERSION (var);
1596 gimple *stmt = SSA_NAME_DEF_STMT (var);
1597 unsigned i, num_args = gimple_phi_num_args (stmt);
1598 bool wholesize_needed = false;
1600 /* The extra space is for the PHI result at the end, which object_sizes_set
1601 sets for us. */
1602 tree sizes = make_tree_vec (num_args + 1);
1603 tree wholesizes = make_tree_vec (num_args + 1);
1605 /* Bail out if the size of any of the PHI arguments cannot be
1606 determined. */
1607 for (i = 0; i < num_args; i++)
1609 edge e = gimple_phi_arg_edge (as_a <gphi *> (stmt), i);
1610 if (e->flags & EDGE_COMPLEX)
1611 break;
1613 tree rhs = gimple_phi_arg_def (stmt, i);
1614 tree size, wholesize;
1616 dynamic_object_size (osi, rhs, &size, &wholesize);
1618 if (size_unknown_p (size, object_size_type))
1619 break;
1621 if (size != wholesize)
1622 wholesize_needed = true;
1624 TREE_VEC_ELT (sizes, i) = size;
1625 TREE_VEC_ELT (wholesizes, i) = wholesize;
1628 if (i < num_args)
1630 ggc_free (sizes);
1631 ggc_free (wholesizes);
1632 sizes = wholesizes = size_unknown (object_size_type);
1635 /* Point to the same TREE_VEC so that we can avoid emitting two PHI
1636 nodes. */
1637 else if (!wholesize_needed)
1639 ggc_free (wholesizes);
1640 wholesizes = sizes;
1643 object_sizes_set (osi, varno, sizes, wholesizes);
1646 /* Compute object sizes for VAR.
1647 For ADDR_EXPR an object size is the number of remaining bytes
1648 to the end of the object (where what is considered an object depends on
1649 OSI->object_size_type).
1650 For allocation GIMPLE_CALL like malloc or calloc object size is the size
1651 of the allocation.
1652 For POINTER_PLUS_EXPR where second operand is a constant integer,
1653 object size is object size of the first operand minus the constant.
1654 If the constant is bigger than the number of remaining bytes until the
1655 end of the object, object size is 0, but if it is instead a pointer
1656 subtraction, object size is size_unknown (object_size_type).
1657 To differentiate addition from subtraction, ADDR_EXPR returns
1658 size_unknown (object_size_type) for all objects bigger than half of the
1659 address space, and constants less than half of the address space are
1660 considered addition, while bigger constants subtraction.
1661 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
1662 object size is object size of that argument.
1663 Otherwise, object size is the maximum of object sizes of variables
1664 that it might be set to. */
1666 static void
1667 collect_object_sizes_for (struct object_size_info *osi, tree var)
1669 int object_size_type = osi->object_size_type;
1670 unsigned int varno = SSA_NAME_VERSION (var);
1671 gimple *stmt;
1672 bool reexamine;
1674 if (bitmap_bit_p (computed[object_size_type], varno))
1675 return;
1677 if (osi->pass == 0)
1679 if (bitmap_set_bit (osi->visited, varno))
1681 /* Initialize to 0 for maximum size and M1U for minimum size so that
1682 it gets immediately overridden. */
1683 object_sizes_initialize (osi, varno,
1684 size_initval (object_size_type),
1685 size_initval (object_size_type));
1687 else
1689 /* Found a dependency loop. Mark the variable for later
1690 re-examination. */
1691 if (object_size_type & OST_DYNAMIC)
1692 object_sizes_set_temp (osi, varno);
1694 bitmap_set_bit (osi->reexamine, varno);
1695 if (dump_file && (dump_flags & TDF_DETAILS))
1697 fprintf (dump_file, "Found a dependency loop at ");
1698 print_generic_expr (dump_file, var, dump_flags);
1699 fprintf (dump_file, "\n");
1701 return;
1705 if (dump_file && (dump_flags & TDF_DETAILS))
1707 fprintf (dump_file, "Visiting use-def links for ");
1708 print_generic_expr (dump_file, var, dump_flags);
1709 fprintf (dump_file, "\n");
1712 stmt = SSA_NAME_DEF_STMT (var);
1713 reexamine = false;
1715 switch (gimple_code (stmt))
1717 case GIMPLE_ASSIGN:
1719 tree rhs = gimple_assign_rhs1 (stmt);
1720 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1721 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
1722 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
1723 reexamine = plus_stmt_object_size (osi, var, stmt);
1724 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
1725 reexamine = cond_expr_object_size (osi, var, stmt);
1726 else if (gimple_assign_single_p (stmt)
1727 || gimple_assign_unary_nop_p (stmt))
1729 if (TREE_CODE (rhs) == SSA_NAME
1730 && POINTER_TYPE_P (TREE_TYPE (rhs)))
1731 reexamine = merge_object_sizes (osi, var, rhs);
1732 else
1733 expr_object_size (osi, var, rhs);
1735 else
1736 unknown_object_size (osi, var);
1737 break;
1740 case GIMPLE_CALL:
1742 gcall *call_stmt = as_a <gcall *> (stmt);
1743 tree arg = pass_through_call (call_stmt);
1744 if (arg)
1746 if (TREE_CODE (arg) == SSA_NAME
1747 && POINTER_TYPE_P (TREE_TYPE (arg)))
1748 reexamine = merge_object_sizes (osi, var, arg);
1749 else
1750 expr_object_size (osi, var, arg);
1752 else
1753 call_object_size (osi, var, call_stmt);
1754 break;
1757 case GIMPLE_ASM:
1758 /* Pointers defined by __asm__ statements can point anywhere. */
1759 unknown_object_size (osi, var);
1760 break;
1762 case GIMPLE_NOP:
1763 if (SSA_NAME_VAR (var)
1764 && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
1765 parm_object_size (osi, var);
1766 else
1767 /* Uninitialized SSA names point nowhere. */
1768 unknown_object_size (osi, var);
1769 break;
1771 case GIMPLE_PHI:
1773 unsigned i;
1775 if (object_size_type & OST_DYNAMIC)
1777 phi_dynamic_object_size (osi, var);
1778 break;
1781 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1783 tree rhs = gimple_phi_arg (stmt, i)->def;
1785 if (object_sizes_unknown_p (object_size_type, varno))
1786 break;
1788 if (TREE_CODE (rhs) == SSA_NAME)
1789 reexamine |= merge_object_sizes (osi, var, rhs);
1790 else if (osi->pass == 0)
1791 expr_object_size (osi, var, rhs);
1793 break;
1796 default:
1797 gcc_unreachable ();
1800 if (! reexamine || object_sizes_unknown_p (object_size_type, varno))
1802 bitmap_set_bit (computed[object_size_type], varno);
1803 if (!(object_size_type & OST_DYNAMIC))
1804 bitmap_clear_bit (osi->reexamine, varno);
1806 else
1808 bitmap_set_bit (osi->reexamine, varno);
1809 if (dump_file && (dump_flags & TDF_DETAILS))
1811 fprintf (dump_file, "Need to reexamine ");
1812 print_generic_expr (dump_file, var, dump_flags);
1813 fprintf (dump_file, "\n");
1819 /* Helper function for check_for_plus_in_loops. Called recursively
1820 to detect loops. */
1822 static void
1823 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1824 unsigned int depth)
1826 gimple *stmt = SSA_NAME_DEF_STMT (var);
1827 unsigned int varno = SSA_NAME_VERSION (var);
1829 if (osi->depths[varno])
1831 if (osi->depths[varno] != depth)
1833 unsigned int *sp;
1835 /* Found a loop involving pointer addition. */
1836 for (sp = osi->tos; sp > osi->stack; )
1838 --sp;
1839 bitmap_clear_bit (osi->reexamine, *sp);
1840 bitmap_set_bit (computed[osi->object_size_type], *sp);
1841 object_sizes_set (osi, *sp, size_zero_node,
1842 object_sizes_get (osi, *sp, true));
1843 if (*sp == varno)
1844 break;
1847 return;
1849 else if (! bitmap_bit_p (osi->reexamine, varno))
1850 return;
1852 osi->depths[varno] = depth;
1853 *osi->tos++ = varno;
1855 switch (gimple_code (stmt))
1858 case GIMPLE_ASSIGN:
1860 if ((gimple_assign_single_p (stmt)
1861 || gimple_assign_unary_nop_p (stmt))
1862 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1864 tree rhs = gimple_assign_rhs1 (stmt);
1866 check_for_plus_in_loops_1 (osi, rhs, depth);
1868 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1870 tree basevar = gimple_assign_rhs1 (stmt);
1871 tree cst = gimple_assign_rhs2 (stmt);
1873 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1875 check_for_plus_in_loops_1 (osi, basevar,
1876 depth + !integer_zerop (cst));
1878 else
1879 gcc_unreachable ();
1880 break;
1883 case GIMPLE_CALL:
1885 gcall *call_stmt = as_a <gcall *> (stmt);
1886 tree arg = pass_through_call (call_stmt);
1887 if (arg)
1889 if (TREE_CODE (arg) == SSA_NAME)
1890 check_for_plus_in_loops_1 (osi, arg, depth);
1891 else
1892 gcc_unreachable ();
1894 break;
1897 case GIMPLE_PHI:
1899 unsigned i;
1901 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1903 tree rhs = gimple_phi_arg (stmt, i)->def;
1905 if (TREE_CODE (rhs) == SSA_NAME)
1906 check_for_plus_in_loops_1 (osi, rhs, depth);
1908 break;
1911 default:
1912 gcc_unreachable ();
1915 osi->depths[varno] = 0;
1916 osi->tos--;
1920 /* Check if some pointer we are computing object size of is being increased
1921 within a loop. If yes, assume all the SSA variables participating in
1922 that loop have minimum object sizes 0. */
1924 static void
1925 check_for_plus_in_loops (struct object_size_info *osi, tree var)
1927 gimple *stmt = SSA_NAME_DEF_STMT (var);
1929 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1930 and looked for a POINTER_PLUS_EXPR in the pass-through
1931 argument, if any. In GIMPLE, however, such an expression
1932 is not a valid call operand. */
1934 if (is_gimple_assign (stmt)
1935 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1937 tree basevar = gimple_assign_rhs1 (stmt);
1938 tree cst = gimple_assign_rhs2 (stmt);
1940 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1942 /* Skip non-positive offsets. */
1943 if (integer_zerop (cst) || compare_tree_int (cst, offset_limit) > 0)
1944 return;
1946 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1947 *osi->tos++ = SSA_NAME_VERSION (basevar);
1948 check_for_plus_in_loops_1 (osi, var, 2);
1949 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1950 osi->tos--;
1955 /* Initialize data structures for the object size computation. */
1957 void
1958 init_object_sizes (void)
1960 int object_size_type;
1962 if (computed[0])
1963 return;
1965 for (object_size_type = 0; object_size_type < OST_END; object_size_type++)
1967 object_sizes_grow (object_size_type);
1968 computed[object_size_type] = BITMAP_ALLOC (NULL);
1971 init_offset_limit ();
1975 /* Destroy data structures after the object size computation. */
1977 void
1978 fini_object_sizes (void)
1980 int object_size_type;
1982 for (object_size_type = 0; object_size_type < OST_END; object_size_type++)
1984 object_sizes_release (object_size_type);
1985 BITMAP_FREE (computed[object_size_type]);
1989 /* Dummy valueize function. */
1991 static tree
1992 do_valueize (tree t)
1994 return t;
1997 /* Process a __builtin_object_size or __builtin_dynamic_object_size call in
1998 CALL early for subobjects before any object information is lost due to
1999 optimization. Insert a MIN or MAX expression of the result and
2000 __builtin_object_size at I so that it may be processed in the second pass.
2001 __builtin_dynamic_object_size is treated like __builtin_object_size here
2002 since we're only looking for constant bounds. */
2004 static void
2005 early_object_sizes_execute_one (gimple_stmt_iterator *i, gimple *call)
2007 tree ost = gimple_call_arg (call, 1);
2008 tree lhs = gimple_call_lhs (call);
2009 gcc_assert (lhs != NULL_TREE);
2011 if (!tree_fits_uhwi_p (ost))
2012 return;
2014 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
2015 tree ptr = gimple_call_arg (call, 0);
2017 if (object_size_type != 1 && object_size_type != 3)
2018 return;
2020 if (TREE_CODE (ptr) != ADDR_EXPR && TREE_CODE (ptr) != SSA_NAME)
2021 return;
2023 tree type = TREE_TYPE (lhs);
2024 tree bytes;
2025 if (!compute_builtin_object_size (ptr, object_size_type, &bytes)
2026 || !int_fits_type_p (bytes, type))
2027 return;
2029 tree tem = make_ssa_name (type);
2030 gimple_call_set_lhs (call, tem);
2031 enum tree_code code = object_size_type & OST_MINIMUM ? MAX_EXPR : MIN_EXPR;
2032 tree cst = fold_convert (type, bytes);
2033 gimple *g = gimple_build_assign (lhs, code, tem, cst);
2034 gsi_insert_after (i, g, GSI_NEW_STMT);
2035 update_stmt (call);
2038 /* Attempt to fold one __builtin_dynamic_object_size call in CALL into an
2039 expression and insert it at I. Return true if it succeeds. */
2041 static bool
2042 dynamic_object_sizes_execute_one (gimple_stmt_iterator *i, gimple *call)
2044 gcc_assert (gimple_call_num_args (call) == 2);
2046 tree args[2];
2047 args[0] = gimple_call_arg (call, 0);
2048 args[1] = gimple_call_arg (call, 1);
2050 location_t loc = EXPR_LOC_OR_LOC (args[0], input_location);
2051 tree result_type = gimple_call_return_type (as_a <gcall *> (call));
2052 tree result = fold_builtin_call_array (loc, result_type,
2053 gimple_call_fn (call), 2, args);
2055 if (!result)
2056 return false;
2058 /* fold_builtin_call_array may wrap the result inside a
2059 NOP_EXPR. */
2060 STRIP_NOPS (result);
2061 gimplify_and_update_call_from_tree (i, result);
2063 if (dump_file && (dump_flags & TDF_DETAILS))
2065 fprintf (dump_file, "Simplified (dynamic)\n ");
2066 print_gimple_stmt (dump_file, call, 0, dump_flags);
2067 fprintf (dump_file, " to ");
2068 print_generic_expr (dump_file, result);
2069 fprintf (dump_file, "\n");
2071 return true;
2074 static unsigned int
2075 object_sizes_execute (function *fun, bool early)
2077 todo = 0;
2079 basic_block bb;
2080 FOR_EACH_BB_FN (bb, fun)
2082 gimple_stmt_iterator i;
2083 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
2085 tree result;
2086 bool dynamic = false;
2088 gimple *call = gsi_stmt (i);
2089 if (gimple_call_builtin_p (call, BUILT_IN_DYNAMIC_OBJECT_SIZE))
2090 dynamic = true;
2091 else if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE))
2092 continue;
2094 tree lhs = gimple_call_lhs (call);
2095 if (!lhs)
2096 continue;
2098 init_object_sizes ();
2100 /* If early, only attempt to fold
2101 __builtin_object_size (x, 1) and __builtin_object_size (x, 3),
2102 and rather than folding the builtin to the constant if any,
2103 create a MIN_EXPR or MAX_EXPR of the __builtin_object_size
2104 call result and the computed constant. Do the same for
2105 __builtin_dynamic_object_size too. */
2106 if (early)
2108 early_object_sizes_execute_one (&i, call);
2109 continue;
2112 if (dynamic)
2114 if (dynamic_object_sizes_execute_one (&i, call))
2115 continue;
2116 else
2118 /* If we could not find a suitable size expression, lower to
2119 __builtin_object_size so that we may at least get a
2120 constant lower or higher estimate. */
2121 tree bosfn = builtin_decl_implicit (BUILT_IN_OBJECT_SIZE);
2122 gimple_call_set_fndecl (call, bosfn);
2123 update_stmt (call);
2125 if (dump_file && (dump_flags & TDF_DETAILS))
2127 print_generic_expr (dump_file, gimple_call_arg (call, 0),
2128 dump_flags);
2129 fprintf (dump_file,
2130 ": Retrying as __builtin_object_size\n");
2135 result = gimple_fold_stmt_to_constant (call, do_valueize);
2136 if (!result)
2138 tree ost = gimple_call_arg (call, 1);
2140 if (tree_fits_uhwi_p (ost))
2142 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
2144 if (object_size_type & OST_MINIMUM)
2145 result = build_zero_cst (size_type_node);
2146 else if (object_size_type < OST_END)
2147 result = fold_convert (size_type_node,
2148 integer_minus_one_node);
2151 if (!result)
2152 continue;
2155 gcc_assert (TREE_CODE (result) == INTEGER_CST);
2157 if (dump_file && (dump_flags & TDF_DETAILS))
2159 fprintf (dump_file, "Simplified\n ");
2160 print_gimple_stmt (dump_file, call, 0, dump_flags);
2161 fprintf (dump_file, " to ");
2162 print_generic_expr (dump_file, result);
2163 fprintf (dump_file, "\n");
2166 /* Propagate into all uses and fold those stmts. */
2167 if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2168 replace_uses_by (lhs, result);
2169 else
2170 replace_call_with_value (&i, result);
2174 fini_object_sizes ();
2175 return todo;
2178 /* Simple pass to optimize all __builtin_object_size () builtins. */
2180 namespace {
2182 const pass_data pass_data_object_sizes =
2184 GIMPLE_PASS, /* type */
2185 "objsz", /* name */
2186 OPTGROUP_NONE, /* optinfo_flags */
2187 TV_NONE, /* tv_id */
2188 ( PROP_cfg | PROP_ssa ), /* properties_required */
2189 PROP_objsz, /* properties_provided */
2190 0, /* properties_destroyed */
2191 0, /* todo_flags_start */
2192 0, /* todo_flags_finish */
2195 class pass_object_sizes : public gimple_opt_pass
2197 public:
2198 pass_object_sizes (gcc::context *ctxt)
2199 : gimple_opt_pass (pass_data_object_sizes, ctxt)
2202 /* opt_pass methods: */
2203 opt_pass * clone () final override { return new pass_object_sizes (m_ctxt); }
2204 unsigned int execute (function *fun) final override
2206 return object_sizes_execute (fun, false);
2208 }; // class pass_object_sizes
2210 } // anon namespace
2212 gimple_opt_pass *
2213 make_pass_object_sizes (gcc::context *ctxt)
2215 return new pass_object_sizes (ctxt);
2218 /* Early version of pass to optimize all __builtin_object_size () builtins. */
2220 namespace {
2222 const pass_data pass_data_early_object_sizes =
2224 GIMPLE_PASS, /* type */
2225 "early_objsz", /* name */
2226 OPTGROUP_NONE, /* optinfo_flags */
2227 TV_NONE, /* tv_id */
2228 ( PROP_cfg | PROP_ssa ), /* properties_required */
2229 0, /* properties_provided */
2230 0, /* properties_destroyed */
2231 0, /* todo_flags_start */
2232 0, /* todo_flags_finish */
2235 class pass_early_object_sizes : public gimple_opt_pass
2237 public:
2238 pass_early_object_sizes (gcc::context *ctxt)
2239 : gimple_opt_pass (pass_data_early_object_sizes, ctxt)
2242 /* opt_pass methods: */
2243 unsigned int execute (function *fun) final override
2245 return object_sizes_execute (fun, true);
2247 }; // class pass_object_sizes
2249 } // anon namespace
2251 gimple_opt_pass *
2252 make_pass_early_object_sizes (gcc::context *ctxt)
2254 return new pass_early_object_sizes (ctxt);