Daily bump.
[official-gcc.git] / gcc / tree-object-size.c
blobb4881ef198f9f668fb8cf264f1d37a22edfc8df4
1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004-2021 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "fold-const.h"
31 #include "tree-object-size.h"
32 #include "gimple-fold.h"
33 #include "gimple-iterator.h"
34 #include "tree-cfg.h"
35 #include "stringpool.h"
36 #include "attribs.h"
38 struct object_size_info
40 int object_size_type;
41 unsigned char pass;
42 bool changed;
43 bitmap visited, reexamine;
44 unsigned int *depths;
45 unsigned int *stack, *tos;
48 enum
50 OST_SUBOBJECT = 1,
51 OST_MINIMUM = 2,
52 OST_END = 4,
55 static tree compute_object_offset (const_tree, const_tree);
56 static bool addr_object_size (struct object_size_info *,
57 const_tree, int, unsigned HOST_WIDE_INT *);
58 static unsigned HOST_WIDE_INT alloc_object_size (const gcall *, int);
59 static tree pass_through_call (const gcall *);
60 static void collect_object_sizes_for (struct object_size_info *, tree);
61 static void expr_object_size (struct object_size_info *, tree, tree);
62 static bool merge_object_sizes (struct object_size_info *, tree, tree,
63 unsigned HOST_WIDE_INT);
64 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple *);
65 static bool cond_expr_object_size (struct object_size_info *, tree, gimple *);
66 static void init_offset_limit (void);
67 static void check_for_plus_in_loops (struct object_size_info *, tree);
68 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
69 unsigned int);
71 /* object_sizes[0] is upper bound for number of bytes till the end of
72 the object.
73 object_sizes[1] is upper bound for number of bytes till the end of
74 the subobject (innermost array or field with address taken).
75 object_sizes[2] is lower bound for number of bytes till the end of
76 the object and object_sizes[3] lower bound for subobject. */
77 static vec<unsigned HOST_WIDE_INT> object_sizes[OST_END];
79 /* Bitmaps what object sizes have been computed already. */
80 static bitmap computed[OST_END];
82 /* Maximum value of offset we consider to be addition. */
83 static unsigned HOST_WIDE_INT offset_limit;
85 static inline unsigned HOST_WIDE_INT
86 unknown (int object_size_type)
88 return ((unsigned HOST_WIDE_INT) -((object_size_type >> 1) ^ 1));
91 /* Grow object_sizes[OBJECT_SIZE_TYPE] to num_ssa_names. */
93 static inline void
94 object_sizes_grow (int object_size_type)
96 if (num_ssa_names > object_sizes[object_size_type].length ())
97 object_sizes[object_size_type].safe_grow (num_ssa_names, true);
100 /* Release object_sizes[OBJECT_SIZE_TYPE]. */
102 static inline void
103 object_sizes_release (int object_size_type)
105 object_sizes[object_size_type].release ();
108 /* Return true if object_sizes[OBJECT_SIZE_TYPE][VARNO] is unknown. */
110 static inline bool
111 object_sizes_unknown_p (int object_size_type, unsigned varno)
113 return (object_sizes[object_size_type][varno]
114 == unknown (object_size_type));
117 /* Return size for VARNO corresponding to OSI. */
119 static inline unsigned HOST_WIDE_INT
120 object_sizes_get (struct object_size_info *osi, unsigned varno)
122 return object_sizes[osi->object_size_type][varno];
125 /* Set size for VARNO corresponding to OSI to VAL. */
127 static inline bool
128 object_sizes_set_force (struct object_size_info *osi, unsigned varno,
129 unsigned HOST_WIDE_INT val)
131 object_sizes[osi->object_size_type][varno] = val;
132 return true;
135 /* Set size for VARNO corresponding to OSI to VAL if it is the new minimum or
136 maximum. */
138 static inline bool
139 object_sizes_set (struct object_size_info *osi, unsigned varno,
140 unsigned HOST_WIDE_INT val)
142 int object_size_type = osi->object_size_type;
143 if ((object_size_type & OST_MINIMUM) == 0)
145 if (object_sizes[object_size_type][varno] < val)
146 return object_sizes_set_force (osi, varno, val);
148 else
150 if (object_sizes[object_size_type][varno] > val)
151 return object_sizes_set_force (osi, varno, val);
153 return false;
156 /* Initialize OFFSET_LIMIT variable. */
157 static void
158 init_offset_limit (void)
160 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
161 offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
162 else
163 offset_limit = -1;
164 offset_limit /= 2;
168 /* Compute offset of EXPR within VAR. Return error_mark_node
169 if unknown. */
171 static tree
172 compute_object_offset (const_tree expr, const_tree var)
174 enum tree_code code = PLUS_EXPR;
175 tree base, off, t;
177 if (expr == var)
178 return size_zero_node;
180 switch (TREE_CODE (expr))
182 case COMPONENT_REF:
183 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
184 if (base == error_mark_node)
185 return base;
187 t = TREE_OPERAND (expr, 1);
188 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
189 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
190 / BITS_PER_UNIT));
191 break;
193 case REALPART_EXPR:
194 CASE_CONVERT:
195 case VIEW_CONVERT_EXPR:
196 case NON_LVALUE_EXPR:
197 return compute_object_offset (TREE_OPERAND (expr, 0), var);
199 case IMAGPART_EXPR:
200 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
201 if (base == error_mark_node)
202 return base;
204 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
205 break;
207 case ARRAY_REF:
208 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
209 if (base == error_mark_node)
210 return base;
212 t = TREE_OPERAND (expr, 1);
213 tree low_bound, unit_size;
214 low_bound = array_ref_low_bound (CONST_CAST_TREE (expr));
215 unit_size = array_ref_element_size (CONST_CAST_TREE (expr));
216 if (! integer_zerop (low_bound))
217 t = fold_build2 (MINUS_EXPR, TREE_TYPE (t), t, low_bound);
218 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
220 code = MINUS_EXPR;
221 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
223 t = fold_convert (sizetype, t);
224 off = size_binop (MULT_EXPR, unit_size, t);
225 break;
227 case MEM_REF:
228 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
229 return wide_int_to_tree (sizetype, mem_ref_offset (expr));
231 default:
232 return error_mark_node;
235 return size_binop (code, base, off);
238 /* Returns the size of the object designated by DECL considering its
239 initializer if it either has one or if it would not affect its size,
240 otherwise the size of the object without the initializer when MIN
241 is true, else null. An object's initializer affects the object's
242 size if it's a struct type with a flexible array member. */
244 tree
245 decl_init_size (tree decl, bool min)
247 tree size = DECL_SIZE_UNIT (decl);
248 tree type = TREE_TYPE (decl);
249 if (TREE_CODE (type) != RECORD_TYPE)
250 return size;
252 tree last = last_field (type);
253 if (!last)
254 return size;
256 tree last_type = TREE_TYPE (last);
257 if (TREE_CODE (last_type) != ARRAY_TYPE
258 || TYPE_SIZE (last_type))
259 return size;
261 /* Use TYPE_SIZE_UNIT; DECL_SIZE_UNIT sometimes reflects the size
262 of the initializer and sometimes doesn't. */
263 size = TYPE_SIZE_UNIT (type);
264 tree ref = build3 (COMPONENT_REF, type, decl, last, NULL_TREE);
265 tree compsize = component_ref_size (ref);
266 if (!compsize)
267 return min ? size : NULL_TREE;
269 /* The size includes tail padding and initializer elements. */
270 tree pos = byte_position (last);
271 size = fold_build2 (PLUS_EXPR, TREE_TYPE (size), pos, compsize);
272 return size;
275 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
276 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
277 If unknown, return unknown (object_size_type). */
279 static bool
280 addr_object_size (struct object_size_info *osi, const_tree ptr,
281 int object_size_type, unsigned HOST_WIDE_INT *psize)
283 tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
285 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
287 /* Set to unknown and overwrite just before returning if the size
288 could be determined. */
289 *psize = unknown (object_size_type);
291 pt_var = TREE_OPERAND (ptr, 0);
292 while (handled_component_p (pt_var))
293 pt_var = TREE_OPERAND (pt_var, 0);
295 if (!pt_var)
296 return false;
298 if (TREE_CODE (pt_var) == MEM_REF)
300 unsigned HOST_WIDE_INT sz;
302 if (!osi || (object_size_type & OST_SUBOBJECT) != 0
303 || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
305 compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
306 object_size_type & ~OST_SUBOBJECT, &sz);
308 else
310 tree var = TREE_OPERAND (pt_var, 0);
311 if (osi->pass == 0)
312 collect_object_sizes_for (osi, var);
313 if (bitmap_bit_p (computed[object_size_type],
314 SSA_NAME_VERSION (var)))
315 sz = object_sizes_get (osi, SSA_NAME_VERSION (var));
316 else
317 sz = unknown (object_size_type);
319 if (sz != unknown (object_size_type))
321 offset_int mem_offset;
322 if (mem_ref_offset (pt_var).is_constant (&mem_offset))
324 offset_int dsz = wi::sub (sz, mem_offset);
325 if (wi::neg_p (dsz))
326 sz = 0;
327 else if (wi::fits_uhwi_p (dsz))
328 sz = dsz.to_uhwi ();
329 else
330 sz = unknown (object_size_type);
332 else
333 sz = unknown (object_size_type);
336 if (sz != unknown (object_size_type) && sz < offset_limit)
337 pt_var_size = size_int (sz);
339 else if (DECL_P (pt_var))
341 pt_var_size = decl_init_size (pt_var, object_size_type & OST_MINIMUM);
342 if (!pt_var_size)
343 return false;
345 else if (TREE_CODE (pt_var) == STRING_CST)
346 pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
347 else
348 return false;
350 if (pt_var_size)
352 /* Validate the size determined above. */
353 if (!tree_fits_uhwi_p (pt_var_size)
354 || tree_to_uhwi (pt_var_size) >= offset_limit)
355 return false;
358 if (pt_var != TREE_OPERAND (ptr, 0))
360 tree var;
362 if (object_size_type & OST_SUBOBJECT)
364 var = TREE_OPERAND (ptr, 0);
366 while (var != pt_var
367 && TREE_CODE (var) != BIT_FIELD_REF
368 && TREE_CODE (var) != COMPONENT_REF
369 && TREE_CODE (var) != ARRAY_REF
370 && TREE_CODE (var) != ARRAY_RANGE_REF
371 && TREE_CODE (var) != REALPART_EXPR
372 && TREE_CODE (var) != IMAGPART_EXPR)
373 var = TREE_OPERAND (var, 0);
374 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
375 var = TREE_OPERAND (var, 0);
376 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
377 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var)))
378 || (pt_var_size
379 && tree_int_cst_lt (pt_var_size,
380 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
381 var = pt_var;
382 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
384 tree v = var;
385 /* For &X->fld, compute object size only if fld isn't the last
386 field, as struct { int i; char c[1]; } is often used instead
387 of flexible array member. */
388 while (v && v != pt_var)
389 switch (TREE_CODE (v))
391 case ARRAY_REF:
392 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
393 && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
395 tree domain
396 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
397 if (domain
398 && TYPE_MAX_VALUE (domain)
399 && TREE_CODE (TYPE_MAX_VALUE (domain))
400 == INTEGER_CST
401 && tree_int_cst_lt (TREE_OPERAND (v, 1),
402 TYPE_MAX_VALUE (domain)))
404 v = NULL_TREE;
405 break;
408 v = TREE_OPERAND (v, 0);
409 break;
410 case REALPART_EXPR:
411 case IMAGPART_EXPR:
412 v = NULL_TREE;
413 break;
414 case COMPONENT_REF:
415 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
417 v = NULL_TREE;
418 break;
420 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
421 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
422 != UNION_TYPE
423 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
424 != QUAL_UNION_TYPE)
425 break;
426 else
427 v = TREE_OPERAND (v, 0);
428 if (TREE_CODE (v) == COMPONENT_REF
429 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
430 == RECORD_TYPE)
432 tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
433 for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
434 if (TREE_CODE (fld_chain) == FIELD_DECL)
435 break;
437 if (fld_chain)
439 v = NULL_TREE;
440 break;
442 v = TREE_OPERAND (v, 0);
444 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
445 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
446 != UNION_TYPE
447 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
448 != QUAL_UNION_TYPE)
449 break;
450 else
451 v = TREE_OPERAND (v, 0);
452 if (v != pt_var)
453 v = NULL_TREE;
454 else
455 v = pt_var;
456 break;
457 default:
458 v = pt_var;
459 break;
461 if (v == pt_var)
462 var = pt_var;
465 else
466 var = pt_var;
468 if (var != pt_var)
469 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
470 else if (!pt_var_size)
471 return false;
472 else
473 var_size = pt_var_size;
474 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
475 if (bytes != error_mark_node)
477 if (TREE_CODE (bytes) == INTEGER_CST
478 && tree_int_cst_lt (var_size, bytes))
479 bytes = size_zero_node;
480 else
481 bytes = size_binop (MINUS_EXPR, var_size, bytes);
483 if (var != pt_var
484 && pt_var_size
485 && TREE_CODE (pt_var) == MEM_REF
486 && bytes != error_mark_node)
488 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
489 if (bytes2 != error_mark_node)
491 if (TREE_CODE (bytes2) == INTEGER_CST
492 && tree_int_cst_lt (pt_var_size, bytes2))
493 bytes2 = size_zero_node;
494 else
495 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
496 bytes = size_binop (MIN_EXPR, bytes, bytes2);
500 else if (!pt_var_size)
501 return false;
502 else
503 bytes = pt_var_size;
505 if (tree_fits_uhwi_p (bytes))
507 *psize = tree_to_uhwi (bytes);
508 return true;
511 return false;
515 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
516 Handles calls to functions declared with attribute alloc_size.
517 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
518 If unknown, return unknown (object_size_type). */
520 static unsigned HOST_WIDE_INT
521 alloc_object_size (const gcall *call, int object_size_type)
523 gcc_assert (is_gimple_call (call));
525 tree calltype;
526 if (tree callfn = gimple_call_fndecl (call))
527 calltype = TREE_TYPE (callfn);
528 else
529 calltype = gimple_call_fntype (call);
531 if (!calltype)
532 return unknown (object_size_type);
534 /* Set to positions of alloc_size arguments. */
535 int arg1 = -1, arg2 = -1;
536 tree alloc_size = lookup_attribute ("alloc_size",
537 TYPE_ATTRIBUTES (calltype));
538 if (alloc_size && TREE_VALUE (alloc_size))
540 tree p = TREE_VALUE (alloc_size);
542 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
543 if (TREE_CHAIN (p))
544 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
547 if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
548 || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
549 || (arg2 >= 0
550 && (arg2 >= (int)gimple_call_num_args (call)
551 || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
552 return unknown (object_size_type);
554 tree bytes = NULL_TREE;
555 if (arg2 >= 0)
556 bytes = size_binop (MULT_EXPR,
557 fold_convert (sizetype, gimple_call_arg (call, arg1)),
558 fold_convert (sizetype, gimple_call_arg (call, arg2)));
559 else if (arg1 >= 0)
560 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
562 if (bytes && tree_fits_uhwi_p (bytes))
563 return tree_to_uhwi (bytes);
565 return unknown (object_size_type);
569 /* If object size is propagated from one of function's arguments directly
570 to its return value, return that argument for GIMPLE_CALL statement CALL.
571 Otherwise return NULL. */
573 static tree
574 pass_through_call (const gcall *call)
576 unsigned rf = gimple_call_return_flags (call);
577 if (rf & ERF_RETURNS_ARG)
579 unsigned argnum = rf & ERF_RETURN_ARG_MASK;
580 if (argnum < gimple_call_num_args (call))
581 return gimple_call_arg (call, argnum);
584 /* __builtin_assume_aligned is intentionally not marked RET1. */
585 if (gimple_call_builtin_p (call, BUILT_IN_ASSUME_ALIGNED))
586 return gimple_call_arg (call, 0);
588 return NULL_TREE;
592 /* Compute __builtin_object_size value for PTR and set *PSIZE to
593 the resulting value. If the declared object is known and PDECL
594 is nonnull, sets *PDECL to the object's DECL. OBJECT_SIZE_TYPE
595 is the second argument to __builtin_object_size.
596 Returns true on success and false when the object size could not
597 be determined. */
599 bool
600 compute_builtin_object_size (tree ptr, int object_size_type,
601 unsigned HOST_WIDE_INT *psize)
603 gcc_assert (object_size_type >= 0 && object_size_type < OST_END);
605 /* Set to unknown and overwrite just before returning if the size
606 could be determined. */
607 *psize = unknown (object_size_type);
609 if (! offset_limit)
610 init_offset_limit ();
612 if (TREE_CODE (ptr) == ADDR_EXPR)
613 return addr_object_size (NULL, ptr, object_size_type, psize);
615 if (TREE_CODE (ptr) != SSA_NAME
616 || !POINTER_TYPE_P (TREE_TYPE (ptr)))
617 return false;
619 if (computed[object_size_type] == NULL)
621 if (optimize || object_size_type & OST_SUBOBJECT)
622 return false;
624 /* When not optimizing, rather than failing, make a small effort
625 to determine the object size without the full benefit of
626 the (costly) computation below. */
627 gimple *def = SSA_NAME_DEF_STMT (ptr);
628 if (gimple_code (def) == GIMPLE_ASSIGN)
630 tree_code code = gimple_assign_rhs_code (def);
631 if (code == POINTER_PLUS_EXPR)
633 tree offset = gimple_assign_rhs2 (def);
634 ptr = gimple_assign_rhs1 (def);
636 if (tree_fits_shwi_p (offset)
637 && compute_builtin_object_size (ptr, object_size_type,
638 psize))
640 /* Return zero when the offset is out of bounds. */
641 unsigned HOST_WIDE_INT off = tree_to_shwi (offset);
642 *psize = off < *psize ? *psize - off : 0;
643 return true;
647 return false;
650 struct object_size_info osi;
651 osi.object_size_type = object_size_type;
652 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
654 bitmap_iterator bi;
655 unsigned int i;
657 object_sizes_grow (object_size_type);
658 if (dump_file)
660 fprintf (dump_file, "Computing %s %sobject size for ",
661 (object_size_type & OST_MINIMUM) ? "minimum" : "maximum",
662 (object_size_type & OST_SUBOBJECT) ? "sub" : "");
663 print_generic_expr (dump_file, ptr, dump_flags);
664 fprintf (dump_file, ":\n");
667 osi.visited = BITMAP_ALLOC (NULL);
668 osi.reexamine = BITMAP_ALLOC (NULL);
669 osi.depths = NULL;
670 osi.stack = NULL;
671 osi.tos = NULL;
673 /* First pass: walk UD chains, compute object sizes that
674 can be computed. osi.reexamine bitmap at the end will
675 contain what variables were found in dependency cycles
676 and therefore need to be reexamined. */
677 osi.pass = 0;
678 osi.changed = false;
679 collect_object_sizes_for (&osi, ptr);
681 /* Second pass: keep recomputing object sizes of variables
682 that need reexamination, until no object sizes are
683 increased or all object sizes are computed. */
684 if (! bitmap_empty_p (osi.reexamine))
686 bitmap reexamine = BITMAP_ALLOC (NULL);
688 /* If looking for minimum instead of maximum object size,
689 detect cases where a pointer is increased in a loop.
690 Although even without this detection pass 2 would eventually
691 terminate, it could take a long time. If a pointer is
692 increasing this way, we need to assume 0 object size.
693 E.g. p = &buf[0]; while (cond) p = p + 4; */
694 if (object_size_type & OST_MINIMUM)
696 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
697 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
698 osi.tos = osi.stack;
699 osi.pass = 1;
700 /* collect_object_sizes_for is changing
701 osi.reexamine bitmap, so iterate over a copy. */
702 bitmap_copy (reexamine, osi.reexamine);
703 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
704 if (bitmap_bit_p (osi.reexamine, i))
705 check_for_plus_in_loops (&osi, ssa_name (i));
707 free (osi.depths);
708 osi.depths = NULL;
709 free (osi.stack);
710 osi.stack = NULL;
711 osi.tos = NULL;
716 osi.pass = 2;
717 osi.changed = false;
718 /* collect_object_sizes_for is changing
719 osi.reexamine bitmap, so iterate over a copy. */
720 bitmap_copy (reexamine, osi.reexamine);
721 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
722 if (bitmap_bit_p (osi.reexamine, i))
724 collect_object_sizes_for (&osi, ssa_name (i));
725 if (dump_file && (dump_flags & TDF_DETAILS))
727 fprintf (dump_file, "Reexamining ");
728 print_generic_expr (dump_file, ssa_name (i),
729 dump_flags);
730 fprintf (dump_file, "\n");
734 while (osi.changed);
736 BITMAP_FREE (reexamine);
738 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
739 bitmap_set_bit (computed[object_size_type], i);
741 /* Debugging dumps. */
742 if (dump_file)
744 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
745 if (!object_sizes_unknown_p (object_size_type, i))
747 print_generic_expr (dump_file, ssa_name (i),
748 dump_flags);
749 fprintf (dump_file,
750 ": %s %sobject size "
751 HOST_WIDE_INT_PRINT_UNSIGNED "\n",
752 ((object_size_type & OST_MINIMUM) ? "minimum"
753 : "maximum"),
754 (object_size_type & OST_SUBOBJECT) ? "sub" : "",
755 object_sizes_get (&osi, i));
759 BITMAP_FREE (osi.reexamine);
760 BITMAP_FREE (osi.visited);
763 *psize = object_sizes_get (&osi, SSA_NAME_VERSION (ptr));
764 return *psize != unknown (object_size_type);
767 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
769 static void
770 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
772 int object_size_type = osi->object_size_type;
773 unsigned int varno = SSA_NAME_VERSION (ptr);
774 unsigned HOST_WIDE_INT bytes;
776 gcc_assert (!object_sizes_unknown_p (object_size_type, varno));
777 gcc_assert (osi->pass == 0);
779 if (TREE_CODE (value) == WITH_SIZE_EXPR)
780 value = TREE_OPERAND (value, 0);
782 /* Pointer variables should have been handled by merge_object_sizes. */
783 gcc_assert (TREE_CODE (value) != SSA_NAME
784 || !POINTER_TYPE_P (TREE_TYPE (value)));
786 if (TREE_CODE (value) == ADDR_EXPR)
787 addr_object_size (osi, value, object_size_type, &bytes);
788 else
789 bytes = unknown (object_size_type);
791 object_sizes_set (osi, varno, bytes);
795 /* Compute object_sizes for PTR, defined to the result of a call. */
797 static void
798 call_object_size (struct object_size_info *osi, tree ptr, gcall *call)
800 int object_size_type = osi->object_size_type;
801 unsigned int varno = SSA_NAME_VERSION (ptr);
802 unsigned HOST_WIDE_INT bytes;
804 gcc_assert (is_gimple_call (call));
806 gcc_assert (!object_sizes_unknown_p (object_size_type, varno));
807 gcc_assert (osi->pass == 0);
809 bytes = alloc_object_size (call, object_size_type);
811 object_sizes_set (osi, varno, bytes);
815 /* Compute object_sizes for PTR, defined to an unknown value. */
817 static void
818 unknown_object_size (struct object_size_info *osi, tree ptr)
820 int object_size_type = osi->object_size_type;
821 unsigned int varno = SSA_NAME_VERSION (ptr);
823 gcc_checking_assert (!object_sizes_unknown_p (object_size_type, varno));
824 gcc_checking_assert (osi->pass == 0);
826 object_sizes_set (osi, varno, unknown (object_size_type));
830 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
831 the object size might need reexamination later. */
833 static bool
834 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
835 unsigned HOST_WIDE_INT offset)
837 int object_size_type = osi->object_size_type;
838 unsigned int varno = SSA_NAME_VERSION (dest);
839 unsigned HOST_WIDE_INT orig_bytes;
841 if (object_sizes_unknown_p (object_size_type, varno))
842 return false;
843 if (offset >= offset_limit)
845 object_sizes_set (osi, varno, unknown (object_size_type));
846 return false;
849 if (osi->pass == 0)
850 collect_object_sizes_for (osi, orig);
852 orig_bytes = object_sizes_get (osi, SSA_NAME_VERSION (orig));
853 if (orig_bytes != unknown (object_size_type))
854 orig_bytes = (offset > orig_bytes)
855 ? HOST_WIDE_INT_0U : orig_bytes - offset;
857 if (object_sizes_set (osi, varno, orig_bytes))
858 osi->changed = true;
860 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
864 /* Compute object_sizes for VAR, defined to the result of an assignment
865 with operator POINTER_PLUS_EXPR. Return true if the object size might
866 need reexamination later. */
868 static bool
869 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple *stmt)
871 int object_size_type = osi->object_size_type;
872 unsigned int varno = SSA_NAME_VERSION (var);
873 unsigned HOST_WIDE_INT bytes;
874 tree op0, op1;
876 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
878 op0 = gimple_assign_rhs1 (stmt);
879 op1 = gimple_assign_rhs2 (stmt);
881 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
883 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
884 gcc_assert (TREE_CODE (rhs) == MEM_REF);
885 op0 = TREE_OPERAND (rhs, 0);
886 op1 = TREE_OPERAND (rhs, 1);
888 else
889 gcc_unreachable ();
891 if (object_sizes_unknown_p (object_size_type, varno))
892 return false;
894 /* Handle PTR + OFFSET here. */
895 if (TREE_CODE (op1) == INTEGER_CST
896 && (TREE_CODE (op0) == SSA_NAME
897 || TREE_CODE (op0) == ADDR_EXPR))
899 if (! tree_fits_uhwi_p (op1))
900 bytes = unknown (object_size_type);
901 else if (TREE_CODE (op0) == SSA_NAME)
902 return merge_object_sizes (osi, var, op0, tree_to_uhwi (op1));
903 else
905 unsigned HOST_WIDE_INT off = tree_to_uhwi (op1);
907 /* op0 will be ADDR_EXPR here. */
908 addr_object_size (osi, op0, object_size_type, &bytes);
909 if (bytes == unknown (object_size_type))
911 else if (off > offset_limit)
912 bytes = unknown (object_size_type);
913 else if (off > bytes)
914 bytes = 0;
915 else
916 bytes -= off;
919 else
920 bytes = unknown (object_size_type);
922 object_sizes_set (osi, varno, bytes);
923 return false;
927 /* Compute object_sizes for VAR, defined at STMT, which is
928 a COND_EXPR. Return true if the object size might need reexamination
929 later. */
931 static bool
932 cond_expr_object_size (struct object_size_info *osi, tree var, gimple *stmt)
934 tree then_, else_;
935 int object_size_type = osi->object_size_type;
936 unsigned int varno = SSA_NAME_VERSION (var);
937 bool reexamine = false;
939 gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
941 if (object_sizes_unknown_p (object_size_type, varno))
942 return false;
944 then_ = gimple_assign_rhs2 (stmt);
945 else_ = gimple_assign_rhs3 (stmt);
947 if (TREE_CODE (then_) == SSA_NAME)
948 reexamine |= merge_object_sizes (osi, var, then_, 0);
949 else
950 expr_object_size (osi, var, then_);
952 if (object_sizes_unknown_p (object_size_type, varno))
953 return reexamine;
955 if (TREE_CODE (else_) == SSA_NAME)
956 reexamine |= merge_object_sizes (osi, var, else_, 0);
957 else
958 expr_object_size (osi, var, else_);
960 return reexamine;
963 /* Compute object sizes for VAR.
964 For ADDR_EXPR an object size is the number of remaining bytes
965 to the end of the object (where what is considered an object depends on
966 OSI->object_size_type).
967 For allocation GIMPLE_CALL like malloc or calloc object size is the size
968 of the allocation.
969 For POINTER_PLUS_EXPR where second operand is a constant integer,
970 object size is object size of the first operand minus the constant.
971 If the constant is bigger than the number of remaining bytes until the
972 end of the object, object size is 0, but if it is instead a pointer
973 subtraction, object size is unknown (object_size_type).
974 To differentiate addition from subtraction, ADDR_EXPR returns
975 unknown (object_size_type) for all objects bigger than half of the address
976 space, and constants less than half of the address space are considered
977 addition, while bigger constants subtraction.
978 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
979 object size is object size of that argument.
980 Otherwise, object size is the maximum of object sizes of variables
981 that it might be set to. */
983 static void
984 collect_object_sizes_for (struct object_size_info *osi, tree var)
986 int object_size_type = osi->object_size_type;
987 unsigned int varno = SSA_NAME_VERSION (var);
988 gimple *stmt;
989 bool reexamine;
991 if (bitmap_bit_p (computed[object_size_type], varno))
992 return;
994 if (osi->pass == 0)
996 if (bitmap_set_bit (osi->visited, varno))
998 /* Initialize to 0 for maximum size and M1U for minimum size so that
999 it gets immediately overridden. */
1000 object_sizes_set_force (osi, varno,
1001 unknown (object_size_type ^ OST_MINIMUM));
1003 else
1005 /* Found a dependency loop. Mark the variable for later
1006 re-examination. */
1007 bitmap_set_bit (osi->reexamine, varno);
1008 if (dump_file && (dump_flags & TDF_DETAILS))
1010 fprintf (dump_file, "Found a dependency loop at ");
1011 print_generic_expr (dump_file, var, dump_flags);
1012 fprintf (dump_file, "\n");
1014 return;
1018 if (dump_file && (dump_flags & TDF_DETAILS))
1020 fprintf (dump_file, "Visiting use-def links for ");
1021 print_generic_expr (dump_file, var, dump_flags);
1022 fprintf (dump_file, "\n");
1025 stmt = SSA_NAME_DEF_STMT (var);
1026 reexamine = false;
1028 switch (gimple_code (stmt))
1030 case GIMPLE_ASSIGN:
1032 tree rhs = gimple_assign_rhs1 (stmt);
1033 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1034 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
1035 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
1036 reexamine = plus_stmt_object_size (osi, var, stmt);
1037 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
1038 reexamine = cond_expr_object_size (osi, var, stmt);
1039 else if (gimple_assign_single_p (stmt)
1040 || gimple_assign_unary_nop_p (stmt))
1042 if (TREE_CODE (rhs) == SSA_NAME
1043 && POINTER_TYPE_P (TREE_TYPE (rhs)))
1044 reexamine = merge_object_sizes (osi, var, rhs, 0);
1045 else
1046 expr_object_size (osi, var, rhs);
1048 else
1049 unknown_object_size (osi, var);
1050 break;
1053 case GIMPLE_CALL:
1055 gcall *call_stmt = as_a <gcall *> (stmt);
1056 tree arg = pass_through_call (call_stmt);
1057 if (arg)
1059 if (TREE_CODE (arg) == SSA_NAME
1060 && POINTER_TYPE_P (TREE_TYPE (arg)))
1061 reexamine = merge_object_sizes (osi, var, arg, 0);
1062 else
1063 expr_object_size (osi, var, arg);
1065 else
1066 call_object_size (osi, var, call_stmt);
1067 break;
1070 case GIMPLE_ASM:
1071 /* Pointers defined by __asm__ statements can point anywhere. */
1072 object_sizes_set (osi, varno, unknown (object_size_type));
1073 break;
1075 case GIMPLE_NOP:
1076 if (SSA_NAME_VAR (var)
1077 && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
1078 expr_object_size (osi, var, SSA_NAME_VAR (var));
1079 else
1080 /* Uninitialized SSA names point nowhere. */
1081 object_sizes_set (osi, varno, unknown (object_size_type));
1082 break;
1084 case GIMPLE_PHI:
1086 unsigned i;
1088 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1090 tree rhs = gimple_phi_arg (stmt, i)->def;
1092 if (object_sizes_unknown_p (object_size_type, varno))
1093 break;
1095 if (TREE_CODE (rhs) == SSA_NAME)
1096 reexamine |= merge_object_sizes (osi, var, rhs, 0);
1097 else if (osi->pass == 0)
1098 expr_object_size (osi, var, rhs);
1100 break;
1103 default:
1104 gcc_unreachable ();
1107 if (! reexamine || object_sizes_unknown_p (object_size_type, varno))
1109 bitmap_set_bit (computed[object_size_type], varno);
1110 bitmap_clear_bit (osi->reexamine, varno);
1112 else
1114 bitmap_set_bit (osi->reexamine, varno);
1115 if (dump_file && (dump_flags & TDF_DETAILS))
1117 fprintf (dump_file, "Need to reexamine ");
1118 print_generic_expr (dump_file, var, dump_flags);
1119 fprintf (dump_file, "\n");
1125 /* Helper function for check_for_plus_in_loops. Called recursively
1126 to detect loops. */
1128 static void
1129 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1130 unsigned int depth)
1132 gimple *stmt = SSA_NAME_DEF_STMT (var);
1133 unsigned int varno = SSA_NAME_VERSION (var);
1135 if (osi->depths[varno])
1137 if (osi->depths[varno] != depth)
1139 unsigned int *sp;
1141 /* Found a loop involving pointer addition. */
1142 for (sp = osi->tos; sp > osi->stack; )
1144 --sp;
1145 bitmap_clear_bit (osi->reexamine, *sp);
1146 bitmap_set_bit (computed[osi->object_size_type], *sp);
1147 object_sizes_set_force (osi, *sp, 0);
1148 if (*sp == varno)
1149 break;
1152 return;
1154 else if (! bitmap_bit_p (osi->reexamine, varno))
1155 return;
1157 osi->depths[varno] = depth;
1158 *osi->tos++ = varno;
1160 switch (gimple_code (stmt))
1163 case GIMPLE_ASSIGN:
1165 if ((gimple_assign_single_p (stmt)
1166 || gimple_assign_unary_nop_p (stmt))
1167 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1169 tree rhs = gimple_assign_rhs1 (stmt);
1171 check_for_plus_in_loops_1 (osi, rhs, depth);
1173 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1175 tree basevar = gimple_assign_rhs1 (stmt);
1176 tree cst = gimple_assign_rhs2 (stmt);
1178 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1180 check_for_plus_in_loops_1 (osi, basevar,
1181 depth + !integer_zerop (cst));
1183 else
1184 gcc_unreachable ();
1185 break;
1188 case GIMPLE_CALL:
1190 gcall *call_stmt = as_a <gcall *> (stmt);
1191 tree arg = pass_through_call (call_stmt);
1192 if (arg)
1194 if (TREE_CODE (arg) == SSA_NAME)
1195 check_for_plus_in_loops_1 (osi, arg, depth);
1196 else
1197 gcc_unreachable ();
1199 break;
1202 case GIMPLE_PHI:
1204 unsigned i;
1206 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1208 tree rhs = gimple_phi_arg (stmt, i)->def;
1210 if (TREE_CODE (rhs) == SSA_NAME)
1211 check_for_plus_in_loops_1 (osi, rhs, depth);
1213 break;
1216 default:
1217 gcc_unreachable ();
1220 osi->depths[varno] = 0;
1221 osi->tos--;
1225 /* Check if some pointer we are computing object size of is being increased
1226 within a loop. If yes, assume all the SSA variables participating in
1227 that loop have minimum object sizes 0. */
1229 static void
1230 check_for_plus_in_loops (struct object_size_info *osi, tree var)
1232 gimple *stmt = SSA_NAME_DEF_STMT (var);
1234 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1235 and looked for a POINTER_PLUS_EXPR in the pass-through
1236 argument, if any. In GIMPLE, however, such an expression
1237 is not a valid call operand. */
1239 if (is_gimple_assign (stmt)
1240 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1242 tree basevar = gimple_assign_rhs1 (stmt);
1243 tree cst = gimple_assign_rhs2 (stmt);
1245 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1247 if (integer_zerop (cst))
1248 return;
1250 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1251 *osi->tos++ = SSA_NAME_VERSION (basevar);
1252 check_for_plus_in_loops_1 (osi, var, 2);
1253 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1254 osi->tos--;
1259 /* Initialize data structures for the object size computation. */
1261 void
1262 init_object_sizes (void)
1264 int object_size_type;
1266 if (computed[0])
1267 return;
1269 for (object_size_type = 0; object_size_type < OST_END; object_size_type++)
1271 object_sizes_grow (object_size_type);
1272 computed[object_size_type] = BITMAP_ALLOC (NULL);
1275 init_offset_limit ();
1279 /* Destroy data structures after the object size computation. */
1281 void
1282 fini_object_sizes (void)
1284 int object_size_type;
1286 for (object_size_type = 0; object_size_type < OST_END; object_size_type++)
1288 object_sizes_release (object_size_type);
1289 BITMAP_FREE (computed[object_size_type]);
1293 /* Dummy valueize function. */
1295 static tree
1296 do_valueize (tree t)
1298 return t;
1301 static unsigned int
1302 object_sizes_execute (function *fun, bool insert_min_max_p)
1304 basic_block bb;
1305 FOR_EACH_BB_FN (bb, fun)
1307 gimple_stmt_iterator i;
1308 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1310 tree result;
1311 gimple *call = gsi_stmt (i);
1312 if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE))
1313 continue;
1315 tree lhs = gimple_call_lhs (call);
1316 if (!lhs)
1317 continue;
1319 init_object_sizes ();
1321 /* If insert_min_max_p, only attempt to fold
1322 __builtin_object_size (x, 1) and __builtin_object_size (x, 3),
1323 and rather than folding the builtin to the constant if any,
1324 create a MIN_EXPR or MAX_EXPR of the __builtin_object_size
1325 call result and the computed constant. */
1326 if (insert_min_max_p)
1328 tree ost = gimple_call_arg (call, 1);
1329 if (tree_fits_uhwi_p (ost))
1331 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
1332 tree ptr = gimple_call_arg (call, 0);
1333 if ((object_size_type & OST_SUBOBJECT)
1334 && (TREE_CODE (ptr) == ADDR_EXPR
1335 || TREE_CODE (ptr) == SSA_NAME))
1337 tree type = TREE_TYPE (lhs);
1338 unsigned HOST_WIDE_INT bytes;
1339 if (compute_builtin_object_size (ptr, object_size_type,
1340 &bytes)
1341 && wi::fits_to_tree_p (bytes, type))
1343 tree tem = make_ssa_name (type);
1344 gimple_call_set_lhs (call, tem);
1345 enum tree_code code
1346 = (object_size_type & OST_MINIMUM
1347 ? MAX_EXPR : MIN_EXPR);
1348 tree cst = build_int_cstu (type, bytes);
1349 gimple *g
1350 = gimple_build_assign (lhs, code, tem, cst);
1351 gsi_insert_after (&i, g, GSI_NEW_STMT);
1352 update_stmt (call);
1356 continue;
1359 result = gimple_fold_stmt_to_constant (call, do_valueize);
1360 if (!result)
1362 tree ost = gimple_call_arg (call, 1);
1364 if (tree_fits_uhwi_p (ost))
1366 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
1368 if (object_size_type & OST_MINIMUM)
1369 result = build_zero_cst (size_type_node);
1370 else if (object_size_type < OST_END)
1371 result = fold_convert (size_type_node,
1372 integer_minus_one_node);
1375 if (!result)
1376 continue;
1379 gcc_assert (TREE_CODE (result) == INTEGER_CST);
1381 if (dump_file && (dump_flags & TDF_DETAILS))
1383 fprintf (dump_file, "Simplified\n ");
1384 print_gimple_stmt (dump_file, call, 0, dump_flags);
1385 fprintf (dump_file, " to ");
1386 print_generic_expr (dump_file, result);
1387 fprintf (dump_file, "\n");
1390 /* Propagate into all uses and fold those stmts. */
1391 if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
1392 replace_uses_by (lhs, result);
1393 else
1394 replace_call_with_value (&i, result);
1398 fini_object_sizes ();
1399 return 0;
1402 /* Simple pass to optimize all __builtin_object_size () builtins. */
1404 namespace {
1406 const pass_data pass_data_object_sizes =
1408 GIMPLE_PASS, /* type */
1409 "objsz", /* name */
1410 OPTGROUP_NONE, /* optinfo_flags */
1411 TV_NONE, /* tv_id */
1412 ( PROP_cfg | PROP_ssa ), /* properties_required */
1413 PROP_objsz, /* properties_provided */
1414 0, /* properties_destroyed */
1415 0, /* todo_flags_start */
1416 0, /* todo_flags_finish */
1419 class pass_object_sizes : public gimple_opt_pass
1421 public:
1422 pass_object_sizes (gcc::context *ctxt)
1423 : gimple_opt_pass (pass_data_object_sizes, ctxt)
1426 /* opt_pass methods: */
1427 opt_pass * clone () { return new pass_object_sizes (m_ctxt); }
1428 virtual unsigned int execute (function *fun)
1430 return object_sizes_execute (fun, false);
1432 }; // class pass_object_sizes
1434 } // anon namespace
1436 gimple_opt_pass *
1437 make_pass_object_sizes (gcc::context *ctxt)
1439 return new pass_object_sizes (ctxt);
1442 /* Early version of pass to optimize all __builtin_object_size () builtins. */
1444 namespace {
1446 const pass_data pass_data_early_object_sizes =
1448 GIMPLE_PASS, /* type */
1449 "early_objsz", /* name */
1450 OPTGROUP_NONE, /* optinfo_flags */
1451 TV_NONE, /* tv_id */
1452 ( PROP_cfg | PROP_ssa ), /* properties_required */
1453 0, /* properties_provided */
1454 0, /* properties_destroyed */
1455 0, /* todo_flags_start */
1456 0, /* todo_flags_finish */
1459 class pass_early_object_sizes : public gimple_opt_pass
1461 public:
1462 pass_early_object_sizes (gcc::context *ctxt)
1463 : gimple_opt_pass (pass_data_early_object_sizes, ctxt)
1466 /* opt_pass methods: */
1467 virtual unsigned int execute (function *fun)
1469 return object_sizes_execute (fun, true);
1471 }; // class pass_object_sizes
1473 } // anon namespace
1475 gimple_opt_pass *
1476 make_pass_early_object_sizes (gcc::context *ctxt)
1478 return new pass_early_object_sizes (ctxt);