c++: top level bind when rewriting coroutines [PR106188]
[official-gcc.git] / gcc / tree-object-size.cc
blob4eb454a4a33b01c86826110191a90e069d799c5e
1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004-2022 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "fold-const.h"
31 #include "tree-object-size.h"
32 #include "gimple-iterator.h"
33 #include "gimple-fold.h"
34 #include "tree-cfg.h"
35 #include "tree-dfa.h"
36 #include "stringpool.h"
37 #include "attribs.h"
38 #include "builtins.h"
39 #include "gimplify-me.h"
41 struct object_size_info
43 int object_size_type;
44 unsigned char pass;
45 bool changed;
46 bitmap visited, reexamine, unknowns;
47 unsigned int *depths;
48 unsigned int *stack, *tos;
51 struct GTY(()) object_size
53 /* Estimate of bytes till the end of the object. */
54 tree size;
55 /* Estimate of the size of the whole object. */
56 tree wholesize;
59 static tree compute_object_offset (const_tree, const_tree);
60 static bool addr_object_size (struct object_size_info *,
61 const_tree, int, tree *, tree *t = NULL);
62 static tree alloc_object_size (const gcall *, int);
63 static tree pass_through_call (const gcall *);
64 static void collect_object_sizes_for (struct object_size_info *, tree);
65 static void expr_object_size (struct object_size_info *, tree, tree);
66 static bool merge_object_sizes (struct object_size_info *, tree, tree);
67 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple *);
68 static bool cond_expr_object_size (struct object_size_info *, tree, gimple *);
69 static void init_offset_limit (void);
70 static void check_for_plus_in_loops (struct object_size_info *, tree);
71 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
72 unsigned int);
74 /* object_sizes[0] is upper bound for the object size and number of bytes till
75 the end of the object.
76 object_sizes[1] is upper bound for the object size and number of bytes till
77 the end of the subobject (innermost array or field with address taken).
78 object_sizes[2] is lower bound for the object size and number of bytes till
79 the end of the object and object_sizes[3] lower bound for subobject.
81 For static object sizes, the object size and the bytes till the end of the
82 object are both INTEGER_CST. In the dynamic case, they are finally either a
83 gimple variable or an INTEGER_CST. */
84 static vec<object_size> object_sizes[OST_END];
86 /* Bitmaps what object sizes have been computed already. */
87 static bitmap computed[OST_END];
89 /* Maximum value of offset we consider to be addition. */
90 static unsigned HOST_WIDE_INT offset_limit;
92 /* Return true if VAL represents an initial size for OBJECT_SIZE_TYPE. */
94 static inline bool
95 size_initval_p (tree val, int object_size_type)
97 return ((object_size_type & OST_MINIMUM)
98 ? integer_all_onesp (val) : integer_zerop (val));
101 /* Return true if VAL represents an unknown size for OBJECT_SIZE_TYPE. */
103 static inline bool
104 size_unknown_p (tree val, int object_size_type)
106 return ((object_size_type & OST_MINIMUM)
107 ? integer_zerop (val) : integer_all_onesp (val));
110 /* Return true if VAL represents a valid size for OBJECT_SIZE_TYPE. */
112 static inline bool
113 size_valid_p (tree val, int object_size_type)
115 return ((object_size_type & OST_DYNAMIC) || TREE_CODE (val) == INTEGER_CST);
118 /* Return true if VAL is usable as an object size in the object_sizes
119 vectors. */
121 static inline bool
122 size_usable_p (tree val)
124 return TREE_CODE (val) == SSA_NAME || TREE_CODE (val) == INTEGER_CST;
127 /* Return a tree with initial value for OBJECT_SIZE_TYPE. */
129 static inline tree
130 size_initval (int object_size_type)
132 return ((object_size_type & OST_MINIMUM)
133 ? TYPE_MAX_VALUE (sizetype) : size_zero_node);
136 /* Return a tree with unknown value for OBJECT_SIZE_TYPE. */
138 static inline tree
139 size_unknown (int object_size_type)
141 return ((object_size_type & OST_MINIMUM)
142 ? size_zero_node : TYPE_MAX_VALUE (sizetype));
145 /* Grow object_sizes[OBJECT_SIZE_TYPE] to num_ssa_names. */
147 static inline void
148 object_sizes_grow (int object_size_type)
150 if (num_ssa_names > object_sizes[object_size_type].length ())
151 object_sizes[object_size_type].safe_grow (num_ssa_names, true);
154 /* Release object_sizes[OBJECT_SIZE_TYPE]. */
156 static inline void
157 object_sizes_release (int object_size_type)
159 object_sizes[object_size_type].release ();
162 /* Return true if object_sizes[OBJECT_SIZE_TYPE][VARNO] is unknown. */
164 static inline bool
165 object_sizes_unknown_p (int object_size_type, unsigned varno)
167 return size_unknown_p (object_sizes[object_size_type][varno].size,
168 object_size_type);
171 /* Return the raw size expression for VARNO corresponding to OSI. This returns
172 the TREE_VEC as is and should only be used during gimplification. */
174 static inline object_size
175 object_sizes_get_raw (struct object_size_info *osi, unsigned varno)
177 gcc_assert (osi->pass != 0);
178 return object_sizes[osi->object_size_type][varno];
181 /* Return a size tree for VARNO corresponding to OSI. If WHOLE is true, return
182 the whole object size. Use this for building size expressions based on size
183 of VARNO. */
185 static inline tree
186 object_sizes_get (struct object_size_info *osi, unsigned varno,
187 bool whole = false)
189 tree ret;
190 int object_size_type = osi->object_size_type;
192 if (whole)
193 ret = object_sizes[object_size_type][varno].wholesize;
194 else
195 ret = object_sizes[object_size_type][varno].size;
197 if (object_size_type & OST_DYNAMIC)
199 if (TREE_CODE (ret) == MODIFY_EXPR)
200 return TREE_OPERAND (ret, 0);
201 else if (TREE_CODE (ret) == TREE_VEC)
202 return TREE_VEC_ELT (ret, TREE_VEC_LENGTH (ret) - 1);
203 else
204 gcc_checking_assert (size_usable_p (ret));
207 return ret;
210 /* Set size for VARNO corresponding to OSI to VAL. */
212 static inline void
213 object_sizes_initialize (struct object_size_info *osi, unsigned varno,
214 tree val, tree wholeval)
216 int object_size_type = osi->object_size_type;
218 object_sizes[object_size_type][varno].size = val;
219 object_sizes[object_size_type][varno].wholesize = wholeval;
222 /* Return a MODIFY_EXPR for cases where SSA and EXPR have the same type. The
223 TREE_VEC is returned only in case of PHI nodes. */
225 static tree
226 bundle_sizes (tree name, tree expr)
228 gcc_checking_assert (TREE_TYPE (name) == sizetype);
230 if (TREE_CODE (expr) == TREE_VEC)
232 TREE_VEC_ELT (expr, TREE_VEC_LENGTH (expr) - 1) = name;
233 return expr;
236 gcc_checking_assert (types_compatible_p (TREE_TYPE (expr), sizetype));
237 return build2 (MODIFY_EXPR, sizetype, name, expr);
240 /* Set size for VARNO corresponding to OSI to VAL if it is the new minimum or
241 maximum. For static sizes, each element of TREE_VEC is always INTEGER_CST
242 throughout the computation. For dynamic sizes, each element may either be a
243 gimple variable, a MODIFY_EXPR or a TREE_VEC. The MODIFY_EXPR is for
244 expressions that need to be gimplified. TREE_VECs are special, they're
245 emitted only for GIMPLE_PHI and the PHI result variable is the last element
246 of the vector. */
248 static bool
249 object_sizes_set (struct object_size_info *osi, unsigned varno, tree val,
250 tree wholeval)
252 int object_size_type = osi->object_size_type;
253 object_size osize = object_sizes[object_size_type][varno];
254 bool changed = true;
256 tree oldval = osize.size;
257 tree old_wholeval = osize.wholesize;
259 if (object_size_type & OST_DYNAMIC)
261 if (bitmap_bit_p (osi->reexamine, varno))
263 if (size_unknown_p (val, object_size_type))
265 oldval = object_sizes_get (osi, varno);
266 old_wholeval = object_sizes_get (osi, varno, true);
267 bitmap_set_bit (osi->unknowns, SSA_NAME_VERSION (oldval));
268 bitmap_set_bit (osi->unknowns, SSA_NAME_VERSION (old_wholeval));
269 bitmap_clear_bit (osi->reexamine, varno);
271 else
273 val = bundle_sizes (oldval, val);
274 wholeval = bundle_sizes (old_wholeval, wholeval);
277 else
279 gcc_checking_assert (size_initval_p (oldval, object_size_type));
280 gcc_checking_assert (size_initval_p (old_wholeval,
281 object_size_type));
282 /* For dynamic object sizes, all object sizes that are not gimple
283 variables will need to be gimplified. */
284 if (wholeval != val && !size_usable_p (wholeval))
286 bitmap_set_bit (osi->reexamine, varno);
287 wholeval = bundle_sizes (make_ssa_name (sizetype), wholeval);
289 if (!size_usable_p (val))
291 bitmap_set_bit (osi->reexamine, varno);
292 tree newval = bundle_sizes (make_ssa_name (sizetype), val);
293 if (val == wholeval)
294 wholeval = newval;
295 val = newval;
297 /* If the new value is a temporary variable, mark it for
298 reexamination. */
299 else if (TREE_CODE (val) == SSA_NAME && !SSA_NAME_DEF_STMT (val))
300 bitmap_set_bit (osi->reexamine, varno);
303 else
305 enum tree_code code = (object_size_type & OST_MINIMUM
306 ? MIN_EXPR : MAX_EXPR);
308 val = size_binop (code, val, oldval);
309 wholeval = size_binop (code, wholeval, old_wholeval);
310 changed = (tree_int_cst_compare (val, oldval) != 0
311 || tree_int_cst_compare (old_wholeval, wholeval) != 0);
314 object_sizes[object_size_type][varno].size = val;
315 object_sizes[object_size_type][varno].wholesize = wholeval;
317 return changed;
320 /* Set temporary SSA names for object size and whole size to resolve dependency
321 loops in dynamic size computation. */
323 static inline void
324 object_sizes_set_temp (struct object_size_info *osi, unsigned varno)
326 tree val = object_sizes_get (osi, varno);
328 if (size_initval_p (val, osi->object_size_type))
329 object_sizes_set (osi, varno,
330 make_ssa_name (sizetype),
331 make_ssa_name (sizetype));
334 /* Initialize OFFSET_LIMIT variable. */
335 static void
336 init_offset_limit (void)
338 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
339 offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
340 else
341 offset_limit = -1;
342 offset_limit /= 2;
345 /* Bytes at end of the object with SZ from offset OFFSET. If WHOLESIZE is not
346 NULL_TREE, use it to get the net offset of the pointer, which should always
347 be positive and hence, be within OFFSET_LIMIT for valid offsets. */
349 static tree
350 size_for_offset (tree sz, tree offset, tree wholesize = NULL_TREE)
352 gcc_checking_assert (types_compatible_p (TREE_TYPE (sz), sizetype));
354 /* For negative offsets, if we have a distinct WHOLESIZE, use it to get a net
355 offset from the whole object. */
356 if (wholesize && wholesize != sz
357 && (TREE_CODE (sz) != INTEGER_CST
358 || TREE_CODE (wholesize) != INTEGER_CST
359 || tree_int_cst_compare (sz, wholesize)))
361 gcc_checking_assert (types_compatible_p (TREE_TYPE (wholesize),
362 sizetype));
364 /* Restructure SZ - OFFSET as
365 WHOLESIZE - (WHOLESIZE + OFFSET - SZ) so that the offset part, i.e.
366 WHOLESIZE + OFFSET - SZ is only allowed to be positive. */
367 tree tmp = size_binop (MAX_EXPR, wholesize, sz);
368 offset = fold_build2 (PLUS_EXPR, sizetype, tmp, offset);
369 offset = fold_build2 (MINUS_EXPR, sizetype, offset, sz);
370 sz = tmp;
373 /* Safe to convert now, since a valid net offset should be non-negative. */
374 if (!useless_type_conversion_p (sizetype, TREE_TYPE (offset)))
375 offset = fold_convert (sizetype, offset);
377 if (TREE_CODE (offset) == INTEGER_CST)
379 if (integer_zerop (offset))
380 return sz;
382 /* Negative or too large offset even after adjustment, cannot be within
383 bounds of an object. */
384 if (compare_tree_int (offset, offset_limit) > 0)
385 return size_zero_node;
388 return size_binop (MINUS_EXPR, size_binop (MAX_EXPR, sz, offset), offset);
391 /* Compute offset of EXPR within VAR. Return error_mark_node
392 if unknown. */
394 static tree
395 compute_object_offset (const_tree expr, const_tree var)
397 enum tree_code code = PLUS_EXPR;
398 tree base, off, t;
400 if (expr == var)
401 return size_zero_node;
403 switch (TREE_CODE (expr))
405 case COMPONENT_REF:
406 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
407 if (base == error_mark_node)
408 return base;
410 t = TREE_OPERAND (expr, 1);
411 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
412 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
413 / BITS_PER_UNIT));
414 break;
416 case REALPART_EXPR:
417 CASE_CONVERT:
418 case VIEW_CONVERT_EXPR:
419 case NON_LVALUE_EXPR:
420 return compute_object_offset (TREE_OPERAND (expr, 0), var);
422 case IMAGPART_EXPR:
423 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
424 if (base == error_mark_node)
425 return base;
427 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
428 break;
430 case ARRAY_REF:
431 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
432 if (base == error_mark_node)
433 return base;
435 t = TREE_OPERAND (expr, 1);
436 tree low_bound, unit_size;
437 low_bound = array_ref_low_bound (CONST_CAST_TREE (expr));
438 unit_size = array_ref_element_size (CONST_CAST_TREE (expr));
439 if (! integer_zerop (low_bound))
440 t = fold_build2 (MINUS_EXPR, TREE_TYPE (t), t, low_bound);
441 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
443 code = MINUS_EXPR;
444 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
446 t = fold_convert (sizetype, t);
447 off = size_binop (MULT_EXPR, unit_size, t);
448 break;
450 case MEM_REF:
451 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
452 return wide_int_to_tree (sizetype, mem_ref_offset (expr));
454 default:
455 return error_mark_node;
458 return size_binop (code, base, off);
461 /* Returns the size of the object designated by DECL considering its
462 initializer if it either has one or if it would not affect its size,
463 otherwise the size of the object without the initializer when MIN
464 is true, else null. An object's initializer affects the object's
465 size if it's a struct type with a flexible array member. */
467 tree
468 decl_init_size (tree decl, bool min)
470 tree size = DECL_SIZE_UNIT (decl);
471 tree type = TREE_TYPE (decl);
472 if (TREE_CODE (type) != RECORD_TYPE)
473 return size;
475 tree last = last_field (type);
476 if (!last)
477 return size;
479 tree last_type = TREE_TYPE (last);
480 if (TREE_CODE (last_type) != ARRAY_TYPE
481 || TYPE_SIZE (last_type))
482 return size;
484 /* Use TYPE_SIZE_UNIT; DECL_SIZE_UNIT sometimes reflects the size
485 of the initializer and sometimes doesn't. */
486 size = TYPE_SIZE_UNIT (type);
487 tree ref = build3 (COMPONENT_REF, type, decl, last, NULL_TREE);
488 tree compsize = component_ref_size (ref);
489 if (!compsize)
490 return min ? size : NULL_TREE;
492 /* The size includes tail padding and initializer elements. */
493 tree pos = byte_position (last);
494 size = fold_build2 (PLUS_EXPR, TREE_TYPE (size), pos, compsize);
495 return size;
498 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
499 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
500 If unknown, return size_unknown (object_size_type). */
502 static bool
503 addr_object_size (struct object_size_info *osi, const_tree ptr,
504 int object_size_type, tree *psize, tree *pwholesize)
506 tree pt_var, pt_var_size = NULL_TREE, pt_var_wholesize = NULL_TREE;
507 tree var_size, bytes, wholebytes;
509 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
511 /* Set to unknown and overwrite just before returning if the size
512 could be determined. */
513 *psize = size_unknown (object_size_type);
514 if (pwholesize)
515 *pwholesize = size_unknown (object_size_type);
517 pt_var = TREE_OPERAND (ptr, 0);
518 while (handled_component_p (pt_var))
519 pt_var = TREE_OPERAND (pt_var, 0);
521 if (!pt_var)
522 return false;
524 if (TREE_CODE (pt_var) == MEM_REF)
526 tree sz, wholesize;
528 if (!osi || (object_size_type & OST_SUBOBJECT) != 0
529 || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
531 compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
532 object_size_type & ~OST_SUBOBJECT, &sz);
533 wholesize = sz;
535 else
537 tree var = TREE_OPERAND (pt_var, 0);
538 if (osi->pass == 0)
539 collect_object_sizes_for (osi, var);
540 if (bitmap_bit_p (computed[object_size_type],
541 SSA_NAME_VERSION (var)))
543 sz = object_sizes_get (osi, SSA_NAME_VERSION (var));
544 wholesize = object_sizes_get (osi, SSA_NAME_VERSION (var), true);
546 else
547 sz = wholesize = size_unknown (object_size_type);
549 if (!size_unknown_p (sz, object_size_type))
550 sz = size_for_offset (sz, TREE_OPERAND (pt_var, 1), wholesize);
552 if (!size_unknown_p (sz, object_size_type)
553 && (TREE_CODE (sz) != INTEGER_CST
554 || compare_tree_int (sz, offset_limit) < 0))
556 pt_var_size = sz;
557 pt_var_wholesize = wholesize;
560 else if (DECL_P (pt_var))
562 pt_var_size = pt_var_wholesize
563 = decl_init_size (pt_var, object_size_type & OST_MINIMUM);
564 if (!pt_var_size)
565 return false;
567 else if (TREE_CODE (pt_var) == STRING_CST)
568 pt_var_size = pt_var_wholesize = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
569 else
570 return false;
572 if (pt_var_size)
574 /* Validate the size determined above if it is a constant. */
575 if (TREE_CODE (pt_var_size) == INTEGER_CST
576 && compare_tree_int (pt_var_size, offset_limit) >= 0)
577 return false;
580 if (pt_var != TREE_OPERAND (ptr, 0))
582 tree var;
584 if (object_size_type & OST_SUBOBJECT)
586 var = TREE_OPERAND (ptr, 0);
588 while (var != pt_var
589 && TREE_CODE (var) != BIT_FIELD_REF
590 && TREE_CODE (var) != COMPONENT_REF
591 && TREE_CODE (var) != ARRAY_REF
592 && TREE_CODE (var) != ARRAY_RANGE_REF
593 && TREE_CODE (var) != REALPART_EXPR
594 && TREE_CODE (var) != IMAGPART_EXPR)
595 var = TREE_OPERAND (var, 0);
596 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
597 var = TREE_OPERAND (var, 0);
598 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
599 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var)))
600 || (pt_var_size && TREE_CODE (pt_var_size) == INTEGER_CST
601 && tree_int_cst_lt (pt_var_size,
602 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
603 var = pt_var;
604 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
606 tree v = var;
607 /* For &X->fld, compute object size only if fld isn't the last
608 field, as struct { int i; char c[1]; } is often used instead
609 of flexible array member. */
610 while (v && v != pt_var)
611 switch (TREE_CODE (v))
613 case ARRAY_REF:
614 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0))))
616 tree domain
617 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
618 if (domain && TYPE_MAX_VALUE (domain))
620 v = NULL_TREE;
621 break;
624 v = TREE_OPERAND (v, 0);
625 break;
626 case REALPART_EXPR:
627 case IMAGPART_EXPR:
628 v = NULL_TREE;
629 break;
630 case COMPONENT_REF:
631 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
633 v = NULL_TREE;
634 break;
636 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
637 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
638 != UNION_TYPE
639 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
640 != QUAL_UNION_TYPE)
641 break;
642 else
643 v = TREE_OPERAND (v, 0);
644 if (TREE_CODE (v) == COMPONENT_REF
645 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
646 == RECORD_TYPE)
648 tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
649 for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
650 if (TREE_CODE (fld_chain) == FIELD_DECL)
651 break;
653 if (fld_chain)
655 v = NULL_TREE;
656 break;
658 v = TREE_OPERAND (v, 0);
660 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
661 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
662 != UNION_TYPE
663 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
664 != QUAL_UNION_TYPE)
665 break;
666 else
667 v = TREE_OPERAND (v, 0);
668 if (v != pt_var)
669 v = NULL_TREE;
670 else
671 v = pt_var;
672 break;
673 default:
674 v = pt_var;
675 break;
677 if (v == pt_var)
678 var = pt_var;
681 else
682 var = pt_var;
684 if (var != pt_var)
686 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
687 if (!TREE_CONSTANT (var_size))
688 var_size = get_or_create_ssa_default_def (cfun, var_size);
689 if (!var_size)
690 return false;
692 else if (!pt_var_size)
693 return false;
694 else
695 var_size = pt_var_size;
696 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
697 if (bytes != error_mark_node)
699 bytes = size_for_offset (var_size, bytes);
700 if (var != pt_var && pt_var_size && TREE_CODE (pt_var) == MEM_REF)
702 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0),
703 pt_var);
704 if (bytes2 != error_mark_node)
706 bytes2 = size_for_offset (pt_var_size, bytes2);
707 bytes = size_binop (MIN_EXPR, bytes, bytes2);
711 else
712 bytes = size_unknown (object_size_type);
714 wholebytes
715 = object_size_type & OST_SUBOBJECT ? var_size : pt_var_wholesize;
717 else if (!pt_var_size)
718 return false;
719 else
721 bytes = pt_var_size;
722 wholebytes = pt_var_wholesize;
725 if (!size_unknown_p (bytes, object_size_type)
726 && size_valid_p (bytes, object_size_type)
727 && !size_unknown_p (bytes, object_size_type)
728 && size_valid_p (wholebytes, object_size_type))
730 *psize = bytes;
731 if (pwholesize)
732 *pwholesize = wholebytes;
733 return true;
736 return false;
740 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
741 Handles calls to functions declared with attribute alloc_size.
742 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
743 If unknown, return size_unknown (object_size_type). */
745 static tree
746 alloc_object_size (const gcall *call, int object_size_type)
748 gcc_assert (is_gimple_call (call));
750 tree calltype;
751 tree callfn = gimple_call_fndecl (call);
752 if (callfn)
753 calltype = TREE_TYPE (callfn);
754 else
755 calltype = gimple_call_fntype (call);
757 if (!calltype)
758 return size_unknown (object_size_type);
760 /* Set to positions of alloc_size arguments. */
761 int arg1 = -1, arg2 = -1;
762 tree alloc_size = lookup_attribute ("alloc_size",
763 TYPE_ATTRIBUTES (calltype));
764 if (alloc_size && TREE_VALUE (alloc_size))
766 tree p = TREE_VALUE (alloc_size);
768 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
769 if (TREE_CHAIN (p))
770 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
772 else if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
773 && callfn && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (callfn)))
774 arg1 = 0;
776 /* Non-const arguments are OK here, let the caller handle constness. */
777 if (arg1 < 0 || arg1 >= (int) gimple_call_num_args (call)
778 || arg2 >= (int) gimple_call_num_args (call))
779 return size_unknown (object_size_type);
781 tree bytes = NULL_TREE;
782 if (arg2 >= 0)
783 bytes = size_binop (MULT_EXPR,
784 fold_convert (sizetype, gimple_call_arg (call, arg1)),
785 fold_convert (sizetype, gimple_call_arg (call, arg2)));
786 else if (arg1 >= 0)
787 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
789 return bytes ? bytes : size_unknown (object_size_type);
793 /* If object size is propagated from one of function's arguments directly
794 to its return value, return that argument for GIMPLE_CALL statement CALL.
795 Otherwise return NULL. */
797 static tree
798 pass_through_call (const gcall *call)
800 unsigned rf = gimple_call_return_flags (call);
801 if (rf & ERF_RETURNS_ARG)
803 unsigned argnum = rf & ERF_RETURN_ARG_MASK;
804 if (argnum < gimple_call_num_args (call))
805 return gimple_call_arg (call, argnum);
808 /* __builtin_assume_aligned is intentionally not marked RET1. */
809 if (gimple_call_builtin_p (call, BUILT_IN_ASSUME_ALIGNED))
810 return gimple_call_arg (call, 0);
812 return NULL_TREE;
815 /* Emit PHI nodes for size expressions fo. */
817 static void
818 emit_phi_nodes (gimple *stmt, tree size, tree wholesize)
820 tree phires;
821 gphi *wholephi = NULL;
823 if (wholesize != size)
825 phires = TREE_VEC_ELT (wholesize, TREE_VEC_LENGTH (wholesize) - 1);
826 wholephi = create_phi_node (phires, gimple_bb (stmt));
829 phires = TREE_VEC_ELT (size, TREE_VEC_LENGTH (size) - 1);
830 gphi *phi = create_phi_node (phires, gimple_bb (stmt));
831 gphi *obj_phi = as_a <gphi *> (stmt);
833 gcc_checking_assert (TREE_CODE (wholesize) == TREE_VEC);
834 gcc_checking_assert (TREE_CODE (size) == TREE_VEC);
836 for (unsigned i = 0; i < gimple_phi_num_args (stmt); i++)
838 gimple_seq seq = NULL;
839 tree wsz = TREE_VEC_ELT (wholesize, i);
840 tree sz = TREE_VEC_ELT (size, i);
842 /* If we built an expression, we will need to build statements
843 and insert them on the edge right away. */
844 if (TREE_CODE (wsz) != SSA_NAME)
845 wsz = force_gimple_operand (wsz, &seq, true, NULL);
846 if (TREE_CODE (sz) != SSA_NAME)
848 gimple_seq s;
849 sz = force_gimple_operand (sz, &s, true, NULL);
850 gimple_seq_add_seq (&seq, s);
853 if (seq)
854 gsi_insert_seq_on_edge (gimple_phi_arg_edge (obj_phi, i), seq);
856 if (wholephi)
857 add_phi_arg (wholephi, wsz,
858 gimple_phi_arg_edge (obj_phi, i),
859 gimple_phi_arg_location (obj_phi, i));
861 add_phi_arg (phi, sz,
862 gimple_phi_arg_edge (obj_phi, i),
863 gimple_phi_arg_location (obj_phi, i));
867 /* Descend through EXPR and return size_unknown if it uses any SSA variable
868 object_size_set or object_size_set_temp generated, which turned out to be
869 size_unknown, as noted in UNKNOWNS. */
871 static tree
872 propagate_unknowns (object_size_info *osi, tree expr)
874 int object_size_type = osi->object_size_type;
876 switch (TREE_CODE (expr))
878 case SSA_NAME:
879 if (bitmap_bit_p (osi->unknowns, SSA_NAME_VERSION (expr)))
880 return size_unknown (object_size_type);
881 return expr;
883 case MIN_EXPR:
884 case MAX_EXPR:
886 tree res = propagate_unknowns (osi, TREE_OPERAND (expr, 0));
887 if (size_unknown_p (res, object_size_type))
888 return res;
890 res = propagate_unknowns (osi, TREE_OPERAND (expr, 1));
891 if (size_unknown_p (res, object_size_type))
892 return res;
894 return expr;
896 case MODIFY_EXPR:
898 tree res = propagate_unknowns (osi, TREE_OPERAND (expr, 1));
899 if (size_unknown_p (res, object_size_type))
900 return res;
901 return expr;
903 case TREE_VEC:
904 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
906 tree res = propagate_unknowns (osi, TREE_VEC_ELT (expr, i));
907 if (size_unknown_p (res, object_size_type))
908 return res;
910 return expr;
911 case PLUS_EXPR:
912 case MINUS_EXPR:
914 tree res = propagate_unknowns (osi, TREE_OPERAND (expr, 0));
915 if (size_unknown_p (res, object_size_type))
916 return res;
918 return expr;
920 default:
921 return expr;
925 /* Walk through size expressions that need reexamination and generate
926 statements for them. */
928 static void
929 gimplify_size_expressions (object_size_info *osi)
931 int object_size_type = osi->object_size_type;
932 bitmap_iterator bi;
933 unsigned int i;
934 bool changed;
936 /* Step 1: Propagate unknowns into expressions. */
937 bitmap reexamine = BITMAP_ALLOC (NULL);
938 bitmap_copy (reexamine, osi->reexamine);
941 changed = false;
942 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
944 object_size cur = object_sizes_get_raw (osi, i);
946 if (size_unknown_p (propagate_unknowns (osi, cur.size),
947 object_size_type)
948 || size_unknown_p (propagate_unknowns (osi, cur.wholesize),
949 object_size_type))
951 object_sizes_set (osi, i,
952 size_unknown (object_size_type),
953 size_unknown (object_size_type));
954 changed = true;
957 bitmap_copy (reexamine, osi->reexamine);
959 while (changed);
961 /* Release all unknowns. */
962 EXECUTE_IF_SET_IN_BITMAP (osi->unknowns, 0, i, bi)
963 release_ssa_name (ssa_name (i));
965 /* Expand all size expressions to put their definitions close to the objects
966 for which size is being computed. */
967 EXECUTE_IF_SET_IN_BITMAP (osi->reexamine, 0, i, bi)
969 gimple_seq seq = NULL;
970 object_size osize = object_sizes_get_raw (osi, i);
972 gimple *stmt = SSA_NAME_DEF_STMT (ssa_name (i));
973 enum gimple_code code = gimple_code (stmt);
975 /* PHI nodes need special attention. */
976 if (code == GIMPLE_PHI)
977 emit_phi_nodes (stmt, osize.size, osize.wholesize);
978 else
980 tree size_expr = NULL_TREE;
982 /* Bundle wholesize in with the size to gimplify if needed. */
983 if (osize.wholesize != osize.size
984 && !size_usable_p (osize.wholesize))
985 size_expr = size_binop (COMPOUND_EXPR,
986 osize.wholesize,
987 osize.size);
988 else if (!size_usable_p (osize.size))
989 size_expr = osize.size;
991 if (size_expr)
993 gimple_stmt_iterator gsi;
994 if (code == GIMPLE_NOP)
995 gsi = gsi_start_bb (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
996 else
997 gsi = gsi_for_stmt (stmt);
999 force_gimple_operand (size_expr, &seq, true, NULL);
1000 gsi_insert_seq_before (&gsi, seq, GSI_CONTINUE_LINKING);
1004 /* We're done, so replace the MODIFY_EXPRs with the SSA names. */
1005 object_sizes_initialize (osi, i,
1006 object_sizes_get (osi, i),
1007 object_sizes_get (osi, i, true));
1011 /* Compute __builtin_object_size value for PTR and set *PSIZE to
1012 the resulting value. If the declared object is known and PDECL
1013 is nonnull, sets *PDECL to the object's DECL. OBJECT_SIZE_TYPE
1014 is the second argument to __builtin_object_size.
1015 Returns true on success and false when the object size could not
1016 be determined. */
1018 bool
1019 compute_builtin_object_size (tree ptr, int object_size_type,
1020 tree *psize)
1022 gcc_assert (object_size_type >= 0 && object_size_type < OST_END);
1024 /* Set to unknown and overwrite just before returning if the size
1025 could be determined. */
1026 *psize = size_unknown (object_size_type);
1028 if (! offset_limit)
1029 init_offset_limit ();
1031 if (TREE_CODE (ptr) == ADDR_EXPR)
1032 return addr_object_size (NULL, ptr, object_size_type, psize);
1034 if (TREE_CODE (ptr) != SSA_NAME
1035 || !POINTER_TYPE_P (TREE_TYPE (ptr)))
1036 return false;
1038 if (computed[object_size_type] == NULL)
1040 if (optimize || object_size_type & OST_SUBOBJECT)
1041 return false;
1043 /* When not optimizing, rather than failing, make a small effort
1044 to determine the object size without the full benefit of
1045 the (costly) computation below. */
1046 gimple *def = SSA_NAME_DEF_STMT (ptr);
1047 if (gimple_code (def) == GIMPLE_ASSIGN)
1049 tree_code code = gimple_assign_rhs_code (def);
1050 if (code == POINTER_PLUS_EXPR)
1052 tree offset = gimple_assign_rhs2 (def);
1053 ptr = gimple_assign_rhs1 (def);
1055 if (((object_size_type & OST_DYNAMIC)
1056 || (tree_fits_shwi_p (offset)
1057 && compare_tree_int (offset, offset_limit) <= 0))
1058 && compute_builtin_object_size (ptr, object_size_type,
1059 psize))
1061 *psize = size_for_offset (*psize, offset);
1062 return true;
1066 return false;
1069 struct object_size_info osi;
1070 osi.object_size_type = object_size_type;
1071 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
1073 bitmap_iterator bi;
1074 unsigned int i;
1076 object_sizes_grow (object_size_type);
1077 if (dump_file)
1079 fprintf (dump_file, "Computing %s %s%sobject size for ",
1080 (object_size_type & OST_MINIMUM) ? "minimum" : "maximum",
1081 (object_size_type & OST_DYNAMIC) ? "dynamic " : "",
1082 (object_size_type & OST_SUBOBJECT) ? "sub" : "");
1083 print_generic_expr (dump_file, ptr, dump_flags);
1084 fprintf (dump_file, ":\n");
1087 osi.visited = BITMAP_ALLOC (NULL);
1088 osi.reexamine = BITMAP_ALLOC (NULL);
1090 if (object_size_type & OST_DYNAMIC)
1091 osi.unknowns = BITMAP_ALLOC (NULL);
1092 else
1094 osi.depths = NULL;
1095 osi.stack = NULL;
1096 osi.tos = NULL;
1099 /* First pass: walk UD chains, compute object sizes that
1100 can be computed. osi.reexamine bitmap at the end will
1101 contain what variables were found in dependency cycles
1102 and therefore need to be reexamined. */
1103 osi.pass = 0;
1104 osi.changed = false;
1105 collect_object_sizes_for (&osi, ptr);
1107 if (object_size_type & OST_DYNAMIC)
1109 osi.pass = 1;
1110 gimplify_size_expressions (&osi);
1111 BITMAP_FREE (osi.unknowns);
1112 bitmap_clear (osi.reexamine);
1115 /* Second pass: keep recomputing object sizes of variables
1116 that need reexamination, until no object sizes are
1117 increased or all object sizes are computed. */
1118 if (! bitmap_empty_p (osi.reexamine))
1120 bitmap reexamine = BITMAP_ALLOC (NULL);
1122 /* If looking for minimum instead of maximum object size,
1123 detect cases where a pointer is increased in a loop.
1124 Although even without this detection pass 2 would eventually
1125 terminate, it could take a long time. If a pointer is
1126 increasing this way, we need to assume 0 object size.
1127 E.g. p = &buf[0]; while (cond) p = p + 4; */
1128 if (object_size_type & OST_MINIMUM)
1130 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
1131 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
1132 osi.tos = osi.stack;
1133 osi.pass = 1;
1134 /* collect_object_sizes_for is changing
1135 osi.reexamine bitmap, so iterate over a copy. */
1136 bitmap_copy (reexamine, osi.reexamine);
1137 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
1138 if (bitmap_bit_p (osi.reexamine, i))
1139 check_for_plus_in_loops (&osi, ssa_name (i));
1141 free (osi.depths);
1142 osi.depths = NULL;
1143 free (osi.stack);
1144 osi.stack = NULL;
1145 osi.tos = NULL;
1150 osi.pass = 2;
1151 osi.changed = false;
1152 /* collect_object_sizes_for is changing
1153 osi.reexamine bitmap, so iterate over a copy. */
1154 bitmap_copy (reexamine, osi.reexamine);
1155 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
1156 if (bitmap_bit_p (osi.reexamine, i))
1158 collect_object_sizes_for (&osi, ssa_name (i));
1159 if (dump_file && (dump_flags & TDF_DETAILS))
1161 fprintf (dump_file, "Reexamining ");
1162 print_generic_expr (dump_file, ssa_name (i),
1163 dump_flags);
1164 fprintf (dump_file, "\n");
1168 while (osi.changed);
1170 BITMAP_FREE (reexamine);
1172 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
1173 bitmap_set_bit (computed[object_size_type], i);
1175 /* Debugging dumps. */
1176 if (dump_file)
1178 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
1179 if (!object_sizes_unknown_p (object_size_type, i))
1181 print_generic_expr (dump_file, ssa_name (i),
1182 dump_flags);
1183 fprintf (dump_file,
1184 ": %s %s%sobject size ",
1185 ((object_size_type & OST_MINIMUM) ? "minimum"
1186 : "maximum"),
1187 (object_size_type & OST_DYNAMIC) ? "dynamic " : "",
1188 (object_size_type & OST_SUBOBJECT) ? "sub" : "");
1189 print_generic_expr (dump_file, object_sizes_get (&osi, i),
1190 dump_flags);
1191 fprintf (dump_file, "\n");
1195 BITMAP_FREE (osi.reexamine);
1196 BITMAP_FREE (osi.visited);
1199 *psize = object_sizes_get (&osi, SSA_NAME_VERSION (ptr));
1200 return !size_unknown_p (*psize, object_size_type);
1203 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
1205 static void
1206 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
1208 int object_size_type = osi->object_size_type;
1209 unsigned int varno = SSA_NAME_VERSION (ptr);
1210 tree bytes, wholesize;
1212 gcc_assert (!object_sizes_unknown_p (object_size_type, varno));
1213 gcc_assert (osi->pass == 0);
1215 if (TREE_CODE (value) == WITH_SIZE_EXPR)
1216 value = TREE_OPERAND (value, 0);
1218 /* Pointer variables should have been handled by merge_object_sizes. */
1219 gcc_assert (TREE_CODE (value) != SSA_NAME
1220 || !POINTER_TYPE_P (TREE_TYPE (value)));
1222 if (TREE_CODE (value) == ADDR_EXPR)
1223 addr_object_size (osi, value, object_size_type, &bytes, &wholesize);
1224 else
1225 bytes = wholesize = size_unknown (object_size_type);
1227 object_sizes_set (osi, varno, bytes, wholesize);
1231 /* Compute object_sizes for PTR, defined to the result of a call. */
1233 static void
1234 call_object_size (struct object_size_info *osi, tree ptr, gcall *call)
1236 int object_size_type = osi->object_size_type;
1237 unsigned int varno = SSA_NAME_VERSION (ptr);
1239 gcc_assert (is_gimple_call (call));
1241 gcc_assert (!object_sizes_unknown_p (object_size_type, varno));
1242 gcc_assert (osi->pass == 0);
1243 tree bytes = alloc_object_size (call, object_size_type);
1245 if (!size_valid_p (bytes, object_size_type))
1246 bytes = size_unknown (object_size_type);
1248 object_sizes_set (osi, varno, bytes, bytes);
1252 /* Compute object_sizes for PTR, defined to an unknown value. */
1254 static void
1255 unknown_object_size (struct object_size_info *osi, tree ptr)
1257 int object_size_type = osi->object_size_type;
1258 unsigned int varno = SSA_NAME_VERSION (ptr);
1260 gcc_checking_assert (!object_sizes_unknown_p (object_size_type, varno));
1261 gcc_checking_assert (osi->pass == 0);
1262 tree bytes = size_unknown (object_size_type);
1264 object_sizes_set (osi, varno, bytes, bytes);
1268 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
1269 the object size might need reexamination later. */
1271 static bool
1272 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig)
1274 int object_size_type = osi->object_size_type;
1275 unsigned int varno = SSA_NAME_VERSION (dest);
1276 tree orig_bytes, wholesize;
1278 if (object_sizes_unknown_p (object_size_type, varno))
1279 return false;
1281 if (osi->pass == 0)
1282 collect_object_sizes_for (osi, orig);
1284 orig_bytes = object_sizes_get (osi, SSA_NAME_VERSION (orig));
1285 wholesize = object_sizes_get (osi, SSA_NAME_VERSION (orig), true);
1287 if (object_sizes_set (osi, varno, orig_bytes, wholesize))
1288 osi->changed = true;
1290 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
1294 /* Compute object_sizes for VAR, defined to the result of an assignment
1295 with operator POINTER_PLUS_EXPR. Return true if the object size might
1296 need reexamination later. */
1298 static bool
1299 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple *stmt)
1301 int object_size_type = osi->object_size_type;
1302 unsigned int varno = SSA_NAME_VERSION (var);
1303 tree bytes, wholesize;
1304 tree op0, op1;
1305 bool reexamine = false;
1307 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1309 op0 = gimple_assign_rhs1 (stmt);
1310 op1 = gimple_assign_rhs2 (stmt);
1312 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
1314 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
1315 gcc_assert (TREE_CODE (rhs) == MEM_REF);
1316 op0 = TREE_OPERAND (rhs, 0);
1317 op1 = TREE_OPERAND (rhs, 1);
1319 else
1320 gcc_unreachable ();
1322 if (object_sizes_unknown_p (object_size_type, varno))
1323 return false;
1325 /* Handle PTR + OFFSET here. */
1326 if (size_valid_p (op1, object_size_type)
1327 && (TREE_CODE (op0) == SSA_NAME || TREE_CODE (op0) == ADDR_EXPR))
1329 if (TREE_CODE (op0) == SSA_NAME)
1331 if (osi->pass == 0)
1332 collect_object_sizes_for (osi, op0);
1334 bytes = object_sizes_get (osi, SSA_NAME_VERSION (op0));
1335 wholesize = object_sizes_get (osi, SSA_NAME_VERSION (op0), true);
1336 reexamine = bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (op0));
1338 else
1340 /* op0 will be ADDR_EXPR here. We should never come here during
1341 reexamination. */
1342 gcc_checking_assert (osi->pass == 0);
1343 addr_object_size (osi, op0, object_size_type, &bytes, &wholesize);
1346 /* size_for_offset doesn't make sense for -1 size, but it does for size 0
1347 since the wholesize could be non-zero and a negative offset could give
1348 a non-zero size. */
1349 if (size_unknown_p (bytes, 0))
1351 else if ((object_size_type & OST_DYNAMIC)
1352 || compare_tree_int (op1, offset_limit) <= 0)
1353 bytes = size_for_offset (bytes, op1, wholesize);
1354 /* In the static case, with a negative offset, the best estimate for
1355 minimum size is size_unknown but for maximum size, the wholesize is a
1356 better estimate than size_unknown. */
1357 else if (object_size_type & OST_MINIMUM)
1358 bytes = size_unknown (object_size_type);
1359 else
1360 bytes = wholesize;
1362 else
1363 bytes = wholesize = size_unknown (object_size_type);
1365 if (!size_valid_p (bytes, object_size_type)
1366 || !size_valid_p (wholesize, object_size_type))
1367 bytes = wholesize = size_unknown (object_size_type);
1369 if (object_sizes_set (osi, varno, bytes, wholesize))
1370 osi->changed = true;
1371 return reexamine;
1374 /* Compute the dynamic object size for VAR. Return the result in SIZE and
1375 WHOLESIZE. */
1377 static void
1378 dynamic_object_size (struct object_size_info *osi, tree var,
1379 tree *size, tree *wholesize)
1381 int object_size_type = osi->object_size_type;
1383 if (TREE_CODE (var) == SSA_NAME)
1385 unsigned varno = SSA_NAME_VERSION (var);
1387 collect_object_sizes_for (osi, var);
1388 *size = object_sizes_get (osi, varno);
1389 *wholesize = object_sizes_get (osi, varno, true);
1391 else if (TREE_CODE (var) == ADDR_EXPR)
1392 addr_object_size (osi, var, object_size_type, size, wholesize);
1393 else
1394 *size = *wholesize = size_unknown (object_size_type);
1397 /* Compute object_sizes for VAR, defined at STMT, which is
1398 a COND_EXPR. Return true if the object size might need reexamination
1399 later. */
1401 static bool
1402 cond_expr_object_size (struct object_size_info *osi, tree var, gimple *stmt)
1404 tree then_, else_;
1405 int object_size_type = osi->object_size_type;
1406 unsigned int varno = SSA_NAME_VERSION (var);
1407 bool reexamine = false;
1409 gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
1411 if (object_sizes_unknown_p (object_size_type, varno))
1412 return false;
1414 then_ = gimple_assign_rhs2 (stmt);
1415 else_ = gimple_assign_rhs3 (stmt);
1417 if (object_size_type & OST_DYNAMIC)
1419 tree then_size, then_wholesize, else_size, else_wholesize;
1421 dynamic_object_size (osi, then_, &then_size, &then_wholesize);
1422 if (!size_unknown_p (then_size, object_size_type))
1423 dynamic_object_size (osi, else_, &else_size, &else_wholesize);
1425 tree cond_size, cond_wholesize;
1426 if (size_unknown_p (then_size, object_size_type)
1427 || size_unknown_p (else_size, object_size_type))
1428 cond_size = cond_wholesize = size_unknown (object_size_type);
1429 else
1431 cond_size = fold_build3 (COND_EXPR, sizetype,
1432 gimple_assign_rhs1 (stmt),
1433 then_size, else_size);
1434 cond_wholesize = fold_build3 (COND_EXPR, sizetype,
1435 gimple_assign_rhs1 (stmt),
1436 then_wholesize, else_wholesize);
1439 object_sizes_set (osi, varno, cond_size, cond_wholesize);
1441 return false;
1444 if (TREE_CODE (then_) == SSA_NAME)
1445 reexamine |= merge_object_sizes (osi, var, then_);
1446 else
1447 expr_object_size (osi, var, then_);
1449 if (object_sizes_unknown_p (object_size_type, varno))
1450 return reexamine;
1452 if (TREE_CODE (else_) == SSA_NAME)
1453 reexamine |= merge_object_sizes (osi, var, else_);
1454 else
1455 expr_object_size (osi, var, else_);
1457 return reexamine;
1460 /* Find size of an object passed as a parameter to the function. */
1462 static void
1463 parm_object_size (struct object_size_info *osi, tree var)
1465 int object_size_type = osi->object_size_type;
1466 tree parm = SSA_NAME_VAR (var);
1468 if (!(object_size_type & OST_DYNAMIC) || !POINTER_TYPE_P (TREE_TYPE (parm)))
1470 expr_object_size (osi, var, parm);
1471 return;
1474 /* Look for access attribute. */
1475 rdwr_map rdwr_idx;
1477 tree fndecl = cfun->decl;
1478 const attr_access *access = get_parm_access (rdwr_idx, parm, fndecl);
1479 tree typesize = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (parm)));
1480 tree sz = NULL_TREE;
1482 /* If we have an explicit access attribute with a usable size argument... */
1483 if (access && access->sizarg != UINT_MAX && !access->internal_p
1484 /* ... and either PARM is void * or has a type that is complete and has a
1485 constant size... */
1486 && ((typesize && poly_int_tree_p (typesize))
1487 || (!typesize && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (parm))))))
1489 tree fnargs = DECL_ARGUMENTS (fndecl);
1490 tree arg = NULL_TREE;
1491 unsigned argpos = 0;
1493 /* ... then walk through the parameters to pick the size parameter and
1494 safely scale it by the type size if needed. */
1495 for (arg = fnargs; arg; arg = TREE_CHAIN (arg), ++argpos)
1496 if (argpos == access->sizarg && INTEGRAL_TYPE_P (TREE_TYPE (arg)))
1498 sz = get_or_create_ssa_default_def (cfun, arg);
1499 if (sz != NULL_TREE)
1501 sz = fold_convert (sizetype, sz);
1502 if (typesize)
1503 sz = size_binop (MULT_EXPR, sz, typesize);
1505 break;
1508 if (!sz)
1509 sz = size_unknown (object_size_type);
1511 object_sizes_set (osi, SSA_NAME_VERSION (var), sz, sz);
1514 /* Compute an object size expression for VAR, which is the result of a PHI
1515 node. */
1517 static void
1518 phi_dynamic_object_size (struct object_size_info *osi, tree var)
1520 int object_size_type = osi->object_size_type;
1521 unsigned int varno = SSA_NAME_VERSION (var);
1522 gimple *stmt = SSA_NAME_DEF_STMT (var);
1523 unsigned i, num_args = gimple_phi_num_args (stmt);
1524 bool wholesize_needed = false;
1526 /* The extra space is for the PHI result at the end, which object_sizes_set
1527 sets for us. */
1528 tree sizes = make_tree_vec (num_args + 1);
1529 tree wholesizes = make_tree_vec (num_args + 1);
1531 /* Bail out if the size of any of the PHI arguments cannot be
1532 determined. */
1533 for (i = 0; i < num_args; i++)
1535 edge e = gimple_phi_arg_edge (as_a <gphi *> (stmt), i);
1536 if (e->flags & EDGE_COMPLEX)
1537 break;
1539 tree rhs = gimple_phi_arg_def (stmt, i);
1540 tree size, wholesize;
1542 dynamic_object_size (osi, rhs, &size, &wholesize);
1544 if (size_unknown_p (size, object_size_type))
1545 break;
1547 if (size != wholesize)
1548 wholesize_needed = true;
1550 TREE_VEC_ELT (sizes, i) = size;
1551 TREE_VEC_ELT (wholesizes, i) = wholesize;
1554 if (i < num_args)
1556 ggc_free (sizes);
1557 ggc_free (wholesizes);
1558 sizes = wholesizes = size_unknown (object_size_type);
1561 /* Point to the same TREE_VEC so that we can avoid emitting two PHI
1562 nodes. */
1563 else if (!wholesize_needed)
1565 ggc_free (wholesizes);
1566 wholesizes = sizes;
1569 object_sizes_set (osi, varno, sizes, wholesizes);
1572 /* Compute object sizes for VAR.
1573 For ADDR_EXPR an object size is the number of remaining bytes
1574 to the end of the object (where what is considered an object depends on
1575 OSI->object_size_type).
1576 For allocation GIMPLE_CALL like malloc or calloc object size is the size
1577 of the allocation.
1578 For POINTER_PLUS_EXPR where second operand is a constant integer,
1579 object size is object size of the first operand minus the constant.
1580 If the constant is bigger than the number of remaining bytes until the
1581 end of the object, object size is 0, but if it is instead a pointer
1582 subtraction, object size is size_unknown (object_size_type).
1583 To differentiate addition from subtraction, ADDR_EXPR returns
1584 size_unknown (object_size_type) for all objects bigger than half of the
1585 address space, and constants less than half of the address space are
1586 considered addition, while bigger constants subtraction.
1587 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
1588 object size is object size of that argument.
1589 Otherwise, object size is the maximum of object sizes of variables
1590 that it might be set to. */
1592 static void
1593 collect_object_sizes_for (struct object_size_info *osi, tree var)
1595 int object_size_type = osi->object_size_type;
1596 unsigned int varno = SSA_NAME_VERSION (var);
1597 gimple *stmt;
1598 bool reexamine;
1600 if (bitmap_bit_p (computed[object_size_type], varno))
1601 return;
1603 if (osi->pass == 0)
1605 if (bitmap_set_bit (osi->visited, varno))
1607 /* Initialize to 0 for maximum size and M1U for minimum size so that
1608 it gets immediately overridden. */
1609 object_sizes_initialize (osi, varno,
1610 size_initval (object_size_type),
1611 size_initval (object_size_type));
1613 else
1615 /* Found a dependency loop. Mark the variable for later
1616 re-examination. */
1617 if (object_size_type & OST_DYNAMIC)
1618 object_sizes_set_temp (osi, varno);
1620 bitmap_set_bit (osi->reexamine, varno);
1621 if (dump_file && (dump_flags & TDF_DETAILS))
1623 fprintf (dump_file, "Found a dependency loop at ");
1624 print_generic_expr (dump_file, var, dump_flags);
1625 fprintf (dump_file, "\n");
1627 return;
1631 if (dump_file && (dump_flags & TDF_DETAILS))
1633 fprintf (dump_file, "Visiting use-def links for ");
1634 print_generic_expr (dump_file, var, dump_flags);
1635 fprintf (dump_file, "\n");
1638 stmt = SSA_NAME_DEF_STMT (var);
1639 reexamine = false;
1641 switch (gimple_code (stmt))
1643 case GIMPLE_ASSIGN:
1645 tree rhs = gimple_assign_rhs1 (stmt);
1646 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1647 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
1648 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
1649 reexamine = plus_stmt_object_size (osi, var, stmt);
1650 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
1651 reexamine = cond_expr_object_size (osi, var, stmt);
1652 else if (gimple_assign_single_p (stmt)
1653 || gimple_assign_unary_nop_p (stmt))
1655 if (TREE_CODE (rhs) == SSA_NAME
1656 && POINTER_TYPE_P (TREE_TYPE (rhs)))
1657 reexamine = merge_object_sizes (osi, var, rhs);
1658 else
1659 expr_object_size (osi, var, rhs);
1661 else
1662 unknown_object_size (osi, var);
1663 break;
1666 case GIMPLE_CALL:
1668 gcall *call_stmt = as_a <gcall *> (stmt);
1669 tree arg = pass_through_call (call_stmt);
1670 if (arg)
1672 if (TREE_CODE (arg) == SSA_NAME
1673 && POINTER_TYPE_P (TREE_TYPE (arg)))
1674 reexamine = merge_object_sizes (osi, var, arg);
1675 else
1676 expr_object_size (osi, var, arg);
1678 else
1679 call_object_size (osi, var, call_stmt);
1680 break;
1683 case GIMPLE_ASM:
1684 /* Pointers defined by __asm__ statements can point anywhere. */
1685 unknown_object_size (osi, var);
1686 break;
1688 case GIMPLE_NOP:
1689 if (SSA_NAME_VAR (var)
1690 && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
1691 parm_object_size (osi, var);
1692 else
1693 /* Uninitialized SSA names point nowhere. */
1694 unknown_object_size (osi, var);
1695 break;
1697 case GIMPLE_PHI:
1699 unsigned i;
1701 if (object_size_type & OST_DYNAMIC)
1703 phi_dynamic_object_size (osi, var);
1704 break;
1707 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1709 tree rhs = gimple_phi_arg (stmt, i)->def;
1711 if (object_sizes_unknown_p (object_size_type, varno))
1712 break;
1714 if (TREE_CODE (rhs) == SSA_NAME)
1715 reexamine |= merge_object_sizes (osi, var, rhs);
1716 else if (osi->pass == 0)
1717 expr_object_size (osi, var, rhs);
1719 break;
1722 default:
1723 gcc_unreachable ();
1726 if (! reexamine || object_sizes_unknown_p (object_size_type, varno))
1728 bitmap_set_bit (computed[object_size_type], varno);
1729 if (!(object_size_type & OST_DYNAMIC))
1730 bitmap_clear_bit (osi->reexamine, varno);
1732 else
1734 bitmap_set_bit (osi->reexamine, varno);
1735 if (dump_file && (dump_flags & TDF_DETAILS))
1737 fprintf (dump_file, "Need to reexamine ");
1738 print_generic_expr (dump_file, var, dump_flags);
1739 fprintf (dump_file, "\n");
1745 /* Helper function for check_for_plus_in_loops. Called recursively
1746 to detect loops. */
1748 static void
1749 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1750 unsigned int depth)
1752 gimple *stmt = SSA_NAME_DEF_STMT (var);
1753 unsigned int varno = SSA_NAME_VERSION (var);
1755 if (osi->depths[varno])
1757 if (osi->depths[varno] != depth)
1759 unsigned int *sp;
1761 /* Found a loop involving pointer addition. */
1762 for (sp = osi->tos; sp > osi->stack; )
1764 --sp;
1765 bitmap_clear_bit (osi->reexamine, *sp);
1766 bitmap_set_bit (computed[osi->object_size_type], *sp);
1767 object_sizes_set (osi, *sp, size_zero_node,
1768 object_sizes_get (osi, *sp, true));
1769 if (*sp == varno)
1770 break;
1773 return;
1775 else if (! bitmap_bit_p (osi->reexamine, varno))
1776 return;
1778 osi->depths[varno] = depth;
1779 *osi->tos++ = varno;
1781 switch (gimple_code (stmt))
1784 case GIMPLE_ASSIGN:
1786 if ((gimple_assign_single_p (stmt)
1787 || gimple_assign_unary_nop_p (stmt))
1788 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1790 tree rhs = gimple_assign_rhs1 (stmt);
1792 check_for_plus_in_loops_1 (osi, rhs, depth);
1794 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1796 tree basevar = gimple_assign_rhs1 (stmt);
1797 tree cst = gimple_assign_rhs2 (stmt);
1799 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1801 check_for_plus_in_loops_1 (osi, basevar,
1802 depth + !integer_zerop (cst));
1804 else
1805 gcc_unreachable ();
1806 break;
1809 case GIMPLE_CALL:
1811 gcall *call_stmt = as_a <gcall *> (stmt);
1812 tree arg = pass_through_call (call_stmt);
1813 if (arg)
1815 if (TREE_CODE (arg) == SSA_NAME)
1816 check_for_plus_in_loops_1 (osi, arg, depth);
1817 else
1818 gcc_unreachable ();
1820 break;
1823 case GIMPLE_PHI:
1825 unsigned i;
1827 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1829 tree rhs = gimple_phi_arg (stmt, i)->def;
1831 if (TREE_CODE (rhs) == SSA_NAME)
1832 check_for_plus_in_loops_1 (osi, rhs, depth);
1834 break;
1837 default:
1838 gcc_unreachable ();
1841 osi->depths[varno] = 0;
1842 osi->tos--;
1846 /* Check if some pointer we are computing object size of is being increased
1847 within a loop. If yes, assume all the SSA variables participating in
1848 that loop have minimum object sizes 0. */
1850 static void
1851 check_for_plus_in_loops (struct object_size_info *osi, tree var)
1853 gimple *stmt = SSA_NAME_DEF_STMT (var);
1855 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1856 and looked for a POINTER_PLUS_EXPR in the pass-through
1857 argument, if any. In GIMPLE, however, such an expression
1858 is not a valid call operand. */
1860 if (is_gimple_assign (stmt)
1861 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1863 tree basevar = gimple_assign_rhs1 (stmt);
1864 tree cst = gimple_assign_rhs2 (stmt);
1866 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1868 /* Skip non-positive offsets. */
1869 if (integer_zerop (cst) || compare_tree_int (cst, offset_limit) > 0)
1870 return;
1872 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1873 *osi->tos++ = SSA_NAME_VERSION (basevar);
1874 check_for_plus_in_loops_1 (osi, var, 2);
1875 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1876 osi->tos--;
1881 /* Initialize data structures for the object size computation. */
1883 void
1884 init_object_sizes (void)
1886 int object_size_type;
1888 if (computed[0])
1889 return;
1891 for (object_size_type = 0; object_size_type < OST_END; object_size_type++)
1893 object_sizes_grow (object_size_type);
1894 computed[object_size_type] = BITMAP_ALLOC (NULL);
1897 init_offset_limit ();
1901 /* Destroy data structures after the object size computation. */
1903 void
1904 fini_object_sizes (void)
1906 int object_size_type;
1908 for (object_size_type = 0; object_size_type < OST_END; object_size_type++)
1910 object_sizes_release (object_size_type);
1911 BITMAP_FREE (computed[object_size_type]);
1915 /* Dummy valueize function. */
1917 static tree
1918 do_valueize (tree t)
1920 return t;
1923 /* Process a __builtin_object_size or __builtin_dynamic_object_size call in
1924 CALL early for subobjects before any object information is lost due to
1925 optimization. Insert a MIN or MAX expression of the result and
1926 __builtin_object_size at I so that it may be processed in the second pass.
1927 __builtin_dynamic_object_size is treated like __builtin_object_size here
1928 since we're only looking for constant bounds. */
1930 static void
1931 early_object_sizes_execute_one (gimple_stmt_iterator *i, gimple *call)
1933 tree ost = gimple_call_arg (call, 1);
1934 tree lhs = gimple_call_lhs (call);
1935 gcc_assert (lhs != NULL_TREE);
1937 if (!tree_fits_uhwi_p (ost))
1938 return;
1940 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
1941 tree ptr = gimple_call_arg (call, 0);
1943 if (object_size_type != 1 && object_size_type != 3)
1944 return;
1946 if (TREE_CODE (ptr) != ADDR_EXPR && TREE_CODE (ptr) != SSA_NAME)
1947 return;
1949 tree type = TREE_TYPE (lhs);
1950 tree bytes;
1951 if (!compute_builtin_object_size (ptr, object_size_type, &bytes)
1952 || !int_fits_type_p (bytes, type))
1953 return;
1955 tree tem = make_ssa_name (type);
1956 gimple_call_set_lhs (call, tem);
1957 enum tree_code code = object_size_type & OST_MINIMUM ? MAX_EXPR : MIN_EXPR;
1958 tree cst = fold_convert (type, bytes);
1959 gimple *g = gimple_build_assign (lhs, code, tem, cst);
1960 gsi_insert_after (i, g, GSI_NEW_STMT);
1961 update_stmt (call);
1964 /* Attempt to fold one __builtin_dynamic_object_size call in CALL into an
1965 expression and insert it at I. Return true if it succeeds. */
1967 static bool
1968 dynamic_object_sizes_execute_one (gimple_stmt_iterator *i, gimple *call)
1970 gcc_assert (gimple_call_num_args (call) == 2);
1972 tree args[2];
1973 args[0] = gimple_call_arg (call, 0);
1974 args[1] = gimple_call_arg (call, 1);
1976 location_t loc = EXPR_LOC_OR_LOC (args[0], input_location);
1977 tree result_type = gimple_call_return_type (as_a <gcall *> (call));
1978 tree result = fold_builtin_call_array (loc, result_type,
1979 gimple_call_fn (call), 2, args);
1981 if (!result)
1982 return false;
1984 /* fold_builtin_call_array may wrap the result inside a
1985 NOP_EXPR. */
1986 STRIP_NOPS (result);
1987 gimplify_and_update_call_from_tree (i, result);
1989 if (dump_file && (dump_flags & TDF_DETAILS))
1991 fprintf (dump_file, "Simplified (dynamic)\n ");
1992 print_gimple_stmt (dump_file, call, 0, dump_flags);
1993 fprintf (dump_file, " to ");
1994 print_generic_expr (dump_file, result);
1995 fprintf (dump_file, "\n");
1997 return true;
2000 static unsigned int
2001 object_sizes_execute (function *fun, bool early)
2003 basic_block bb;
2004 FOR_EACH_BB_FN (bb, fun)
2006 gimple_stmt_iterator i;
2007 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
2009 tree result;
2010 bool dynamic = false;
2012 gimple *call = gsi_stmt (i);
2013 if (gimple_call_builtin_p (call, BUILT_IN_DYNAMIC_OBJECT_SIZE))
2014 dynamic = true;
2015 else if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE))
2016 continue;
2018 tree lhs = gimple_call_lhs (call);
2019 if (!lhs)
2020 continue;
2022 init_object_sizes ();
2024 /* If early, only attempt to fold
2025 __builtin_object_size (x, 1) and __builtin_object_size (x, 3),
2026 and rather than folding the builtin to the constant if any,
2027 create a MIN_EXPR or MAX_EXPR of the __builtin_object_size
2028 call result and the computed constant. Do the same for
2029 __builtin_dynamic_object_size too. */
2030 if (early)
2032 early_object_sizes_execute_one (&i, call);
2033 continue;
2036 if (dynamic)
2038 if (dynamic_object_sizes_execute_one (&i, call))
2039 continue;
2040 else
2042 /* If we could not find a suitable size expression, lower to
2043 __builtin_object_size so that we may at least get a
2044 constant lower or higher estimate. */
2045 tree bosfn = builtin_decl_implicit (BUILT_IN_OBJECT_SIZE);
2046 gimple_call_set_fndecl (call, bosfn);
2047 update_stmt (call);
2049 if (dump_file && (dump_flags & TDF_DETAILS))
2051 print_generic_expr (dump_file, gimple_call_arg (call, 0),
2052 dump_flags);
2053 fprintf (dump_file,
2054 ": Retrying as __builtin_object_size\n");
2059 result = gimple_fold_stmt_to_constant (call, do_valueize);
2060 if (!result)
2062 tree ost = gimple_call_arg (call, 1);
2064 if (tree_fits_uhwi_p (ost))
2066 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
2068 if (object_size_type & OST_MINIMUM)
2069 result = build_zero_cst (size_type_node);
2070 else if (object_size_type < OST_END)
2071 result = fold_convert (size_type_node,
2072 integer_minus_one_node);
2075 if (!result)
2076 continue;
2079 gcc_assert (TREE_CODE (result) == INTEGER_CST);
2081 if (dump_file && (dump_flags & TDF_DETAILS))
2083 fprintf (dump_file, "Simplified\n ");
2084 print_gimple_stmt (dump_file, call, 0, dump_flags);
2085 fprintf (dump_file, " to ");
2086 print_generic_expr (dump_file, result);
2087 fprintf (dump_file, "\n");
2090 /* Propagate into all uses and fold those stmts. */
2091 if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2092 replace_uses_by (lhs, result);
2093 else
2094 replace_call_with_value (&i, result);
2098 fini_object_sizes ();
2099 return 0;
2102 /* Simple pass to optimize all __builtin_object_size () builtins. */
2104 namespace {
2106 const pass_data pass_data_object_sizes =
2108 GIMPLE_PASS, /* type */
2109 "objsz", /* name */
2110 OPTGROUP_NONE, /* optinfo_flags */
2111 TV_NONE, /* tv_id */
2112 ( PROP_cfg | PROP_ssa ), /* properties_required */
2113 PROP_objsz, /* properties_provided */
2114 0, /* properties_destroyed */
2115 0, /* todo_flags_start */
2116 0, /* todo_flags_finish */
2119 class pass_object_sizes : public gimple_opt_pass
2121 public:
2122 pass_object_sizes (gcc::context *ctxt)
2123 : gimple_opt_pass (pass_data_object_sizes, ctxt)
2126 /* opt_pass methods: */
2127 opt_pass * clone () final override { return new pass_object_sizes (m_ctxt); }
2128 unsigned int execute (function *fun) final override
2130 return object_sizes_execute (fun, false);
2132 }; // class pass_object_sizes
2134 } // anon namespace
2136 gimple_opt_pass *
2137 make_pass_object_sizes (gcc::context *ctxt)
2139 return new pass_object_sizes (ctxt);
2142 /* Early version of pass to optimize all __builtin_object_size () builtins. */
2144 namespace {
2146 const pass_data pass_data_early_object_sizes =
2148 GIMPLE_PASS, /* type */
2149 "early_objsz", /* name */
2150 OPTGROUP_NONE, /* optinfo_flags */
2151 TV_NONE, /* tv_id */
2152 ( PROP_cfg | PROP_ssa ), /* properties_required */
2153 0, /* properties_provided */
2154 0, /* properties_destroyed */
2155 0, /* todo_flags_start */
2156 0, /* todo_flags_finish */
2159 class pass_early_object_sizes : public gimple_opt_pass
2161 public:
2162 pass_early_object_sizes (gcc::context *ctxt)
2163 : gimple_opt_pass (pass_data_early_object_sizes, ctxt)
2166 /* opt_pass methods: */
2167 unsigned int execute (function *fun) final override
2169 return object_sizes_execute (fun, true);
2171 }; // class pass_object_sizes
2173 } // anon namespace
2175 gimple_opt_pass *
2176 make_pass_early_object_sizes (gcc::context *ctxt)
2178 return new pass_early_object_sizes (ctxt);