Skip gcc.dg/guality/example.c on hppa-linux.
[official-gcc.git] / gcc / tree-object-size.c
blobee9ea1bfbfd9bcff9adec550fc9f92fd877e23f6
1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004-2021 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "fold-const.h"
31 #include "tree-object-size.h"
32 #include "gimple-fold.h"
33 #include "gimple-iterator.h"
34 #include "tree-cfg.h"
35 #include "stringpool.h"
36 #include "attribs.h"
37 #include "builtins.h"
39 struct object_size_info
41 int object_size_type;
42 unsigned char pass;
43 bool changed;
44 bitmap visited, reexamine;
45 unsigned int *depths;
46 unsigned int *stack, *tos;
49 struct GTY(()) object_size
51 /* Estimate of bytes till the end of the object. */
52 tree size;
53 /* Estimate of the size of the whole object. */
54 tree wholesize;
57 static tree compute_object_offset (const_tree, const_tree);
58 static bool addr_object_size (struct object_size_info *,
59 const_tree, int, tree *, tree *t = NULL);
60 static tree alloc_object_size (const gcall *, int);
61 static tree pass_through_call (const gcall *);
62 static void collect_object_sizes_for (struct object_size_info *, tree);
63 static void expr_object_size (struct object_size_info *, tree, tree);
64 static bool merge_object_sizes (struct object_size_info *, tree, tree);
65 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple *);
66 static bool cond_expr_object_size (struct object_size_info *, tree, gimple *);
67 static void init_offset_limit (void);
68 static void check_for_plus_in_loops (struct object_size_info *, tree);
69 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
70 unsigned int);
72 /* object_sizes[0] is upper bound for the object size and number of bytes till
73 the end of the object.
74 object_sizes[1] is upper bound for the object size and number of bytes till
75 the end of the subobject (innermost array or field with address taken).
76 object_sizes[2] is lower bound for the object size and number of bytes till
77 the end of the object and object_sizes[3] lower bound for subobject. */
78 static vec<object_size> object_sizes[OST_END];
80 /* Bitmaps what object sizes have been computed already. */
81 static bitmap computed[OST_END];
83 /* Maximum value of offset we consider to be addition. */
84 static unsigned HOST_WIDE_INT offset_limit;
86 /* Return true if VAL is represents an unknown size for OBJECT_SIZE_TYPE. */
88 static inline bool
89 size_unknown_p (tree val, int object_size_type)
91 return ((object_size_type & OST_MINIMUM)
92 ? integer_zerop (val) : integer_all_onesp (val));
95 /* Return a tree with initial value for OBJECT_SIZE_TYPE. */
97 static inline tree
98 size_initval (int object_size_type)
100 return ((object_size_type & OST_MINIMUM)
101 ? TYPE_MAX_VALUE (sizetype) : size_zero_node);
104 /* Return a tree with unknown value for OBJECT_SIZE_TYPE. */
106 static inline tree
107 size_unknown (int object_size_type)
109 return ((object_size_type & OST_MINIMUM)
110 ? size_zero_node : TYPE_MAX_VALUE (sizetype));
113 /* Grow object_sizes[OBJECT_SIZE_TYPE] to num_ssa_names. */
115 static inline void
116 object_sizes_grow (int object_size_type)
118 if (num_ssa_names > object_sizes[object_size_type].length ())
119 object_sizes[object_size_type].safe_grow (num_ssa_names, true);
122 /* Release object_sizes[OBJECT_SIZE_TYPE]. */
124 static inline void
125 object_sizes_release (int object_size_type)
127 object_sizes[object_size_type].release ();
130 /* Return true if object_sizes[OBJECT_SIZE_TYPE][VARNO] is unknown. */
132 static inline bool
133 object_sizes_unknown_p (int object_size_type, unsigned varno)
135 return size_unknown_p (object_sizes[object_size_type][varno].size,
136 object_size_type);
139 /* Return size for VARNO corresponding to OSI. If WHOLE is true, return the
140 whole object size. */
142 static inline tree
143 object_sizes_get (struct object_size_info *osi, unsigned varno,
144 bool whole = false)
146 if (whole)
147 return object_sizes[osi->object_size_type][varno].wholesize;
148 else
149 return object_sizes[osi->object_size_type][varno].size;
152 /* Set size for VARNO corresponding to OSI to VAL. */
154 static inline void
155 object_sizes_initialize (struct object_size_info *osi, unsigned varno,
156 tree val, tree wholeval)
158 int object_size_type = osi->object_size_type;
160 object_sizes[object_size_type][varno].size = val;
161 object_sizes[object_size_type][varno].wholesize = wholeval;
164 /* Set size for VARNO corresponding to OSI to VAL if it is the new minimum or
165 maximum. */
167 static inline bool
168 object_sizes_set (struct object_size_info *osi, unsigned varno, tree val,
169 tree wholeval)
171 int object_size_type = osi->object_size_type;
172 object_size osize = object_sizes[object_size_type][varno];
174 tree oldval = osize.size;
175 tree old_wholeval = osize.wholesize;
177 enum tree_code code = object_size_type & OST_MINIMUM ? MIN_EXPR : MAX_EXPR;
179 val = size_binop (code, val, oldval);
180 wholeval = size_binop (code, wholeval, old_wholeval);
182 object_sizes[object_size_type][varno].size = val;
183 object_sizes[object_size_type][varno].wholesize = wholeval;
184 return (tree_int_cst_compare (oldval, val) != 0
185 || tree_int_cst_compare (old_wholeval, wholeval) != 0);
188 /* Initialize OFFSET_LIMIT variable. */
189 static void
190 init_offset_limit (void)
192 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
193 offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
194 else
195 offset_limit = -1;
196 offset_limit /= 2;
199 /* Bytes at end of the object with SZ from offset OFFSET. If WHOLESIZE is not
200 NULL_TREE, use it to get the net offset of the pointer, which should always
201 be positive and hence, be within OFFSET_LIMIT for valid offsets. */
203 static tree
204 size_for_offset (tree sz, tree offset, tree wholesize = NULL_TREE)
206 gcc_checking_assert (TREE_CODE (offset) == INTEGER_CST);
207 gcc_checking_assert (TREE_CODE (sz) == INTEGER_CST);
208 gcc_checking_assert (types_compatible_p (TREE_TYPE (sz), sizetype));
210 /* For negative offsets, if we have a distinct WHOLESIZE, use it to get a net
211 offset from the whole object. */
212 if (wholesize && tree_int_cst_compare (sz, wholesize))
214 gcc_checking_assert (TREE_CODE (wholesize) == INTEGER_CST);
215 gcc_checking_assert (types_compatible_p (TREE_TYPE (wholesize),
216 sizetype));
218 /* Restructure SZ - OFFSET as
219 WHOLESIZE - (WHOLESIZE + OFFSET - SZ) so that the offset part, i.e.
220 WHOLESIZE + OFFSET - SZ is only allowed to be positive. */
221 tree tmp = size_binop (MAX_EXPR, wholesize, sz);
222 offset = fold_build2 (PLUS_EXPR, sizetype, tmp, offset);
223 offset = fold_build2 (MINUS_EXPR, sizetype, offset, sz);
224 sz = tmp;
227 /* Safe to convert now, since a valid net offset should be non-negative. */
228 if (!types_compatible_p (TREE_TYPE (offset), sizetype))
229 fold_convert (sizetype, offset);
231 if (integer_zerop (offset))
232 return sz;
234 /* Negative or too large offset even after adjustment, cannot be within
235 bounds of an object. */
236 if (compare_tree_int (offset, offset_limit) > 0)
237 return size_zero_node;
239 return size_binop (MINUS_EXPR, size_binop (MAX_EXPR, sz, offset), offset);
242 /* Compute offset of EXPR within VAR. Return error_mark_node
243 if unknown. */
245 static tree
246 compute_object_offset (const_tree expr, const_tree var)
248 enum tree_code code = PLUS_EXPR;
249 tree base, off, t;
251 if (expr == var)
252 return size_zero_node;
254 switch (TREE_CODE (expr))
256 case COMPONENT_REF:
257 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
258 if (base == error_mark_node)
259 return base;
261 t = TREE_OPERAND (expr, 1);
262 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
263 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
264 / BITS_PER_UNIT));
265 break;
267 case REALPART_EXPR:
268 CASE_CONVERT:
269 case VIEW_CONVERT_EXPR:
270 case NON_LVALUE_EXPR:
271 return compute_object_offset (TREE_OPERAND (expr, 0), var);
273 case IMAGPART_EXPR:
274 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
275 if (base == error_mark_node)
276 return base;
278 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
279 break;
281 case ARRAY_REF:
282 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
283 if (base == error_mark_node)
284 return base;
286 t = TREE_OPERAND (expr, 1);
287 tree low_bound, unit_size;
288 low_bound = array_ref_low_bound (CONST_CAST_TREE (expr));
289 unit_size = array_ref_element_size (CONST_CAST_TREE (expr));
290 if (! integer_zerop (low_bound))
291 t = fold_build2 (MINUS_EXPR, TREE_TYPE (t), t, low_bound);
292 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
294 code = MINUS_EXPR;
295 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
297 t = fold_convert (sizetype, t);
298 off = size_binop (MULT_EXPR, unit_size, t);
299 break;
301 case MEM_REF:
302 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
303 return wide_int_to_tree (sizetype, mem_ref_offset (expr));
305 default:
306 return error_mark_node;
309 return size_binop (code, base, off);
312 /* Returns the size of the object designated by DECL considering its
313 initializer if it either has one or if it would not affect its size,
314 otherwise the size of the object without the initializer when MIN
315 is true, else null. An object's initializer affects the object's
316 size if it's a struct type with a flexible array member. */
318 tree
319 decl_init_size (tree decl, bool min)
321 tree size = DECL_SIZE_UNIT (decl);
322 tree type = TREE_TYPE (decl);
323 if (TREE_CODE (type) != RECORD_TYPE)
324 return size;
326 tree last = last_field (type);
327 if (!last)
328 return size;
330 tree last_type = TREE_TYPE (last);
331 if (TREE_CODE (last_type) != ARRAY_TYPE
332 || TYPE_SIZE (last_type))
333 return size;
335 /* Use TYPE_SIZE_UNIT; DECL_SIZE_UNIT sometimes reflects the size
336 of the initializer and sometimes doesn't. */
337 size = TYPE_SIZE_UNIT (type);
338 tree ref = build3 (COMPONENT_REF, type, decl, last, NULL_TREE);
339 tree compsize = component_ref_size (ref);
340 if (!compsize)
341 return min ? size : NULL_TREE;
343 /* The size includes tail padding and initializer elements. */
344 tree pos = byte_position (last);
345 size = fold_build2 (PLUS_EXPR, TREE_TYPE (size), pos, compsize);
346 return size;
349 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
350 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
351 If unknown, return size_unknown (object_size_type). */
353 static bool
354 addr_object_size (struct object_size_info *osi, const_tree ptr,
355 int object_size_type, tree *psize, tree *pwholesize)
357 tree pt_var, pt_var_size = NULL_TREE, pt_var_wholesize = NULL_TREE;
358 tree var_size, bytes, wholebytes;
360 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
362 /* Set to unknown and overwrite just before returning if the size
363 could be determined. */
364 *psize = size_unknown (object_size_type);
365 if (pwholesize)
366 *pwholesize = size_unknown (object_size_type);
368 pt_var = TREE_OPERAND (ptr, 0);
369 while (handled_component_p (pt_var))
370 pt_var = TREE_OPERAND (pt_var, 0);
372 if (!pt_var)
373 return false;
375 if (TREE_CODE (pt_var) == MEM_REF)
377 tree sz, wholesize;
379 if (!osi || (object_size_type & OST_SUBOBJECT) != 0
380 || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
382 compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
383 object_size_type & ~OST_SUBOBJECT, &sz);
384 wholesize = sz;
386 else
388 tree var = TREE_OPERAND (pt_var, 0);
389 if (osi->pass == 0)
390 collect_object_sizes_for (osi, var);
391 if (bitmap_bit_p (computed[object_size_type],
392 SSA_NAME_VERSION (var)))
394 sz = object_sizes_get (osi, SSA_NAME_VERSION (var));
395 wholesize = object_sizes_get (osi, SSA_NAME_VERSION (var), true);
397 else
398 sz = wholesize = size_unknown (object_size_type);
400 if (!size_unknown_p (sz, object_size_type))
402 tree offset = TREE_OPERAND (pt_var, 1);
403 if (TREE_CODE (offset) != INTEGER_CST
404 || TREE_CODE (sz) != INTEGER_CST)
405 sz = wholesize = size_unknown (object_size_type);
406 else
407 sz = size_for_offset (sz, offset, wholesize);
410 if (!size_unknown_p (sz, object_size_type)
411 && TREE_CODE (sz) == INTEGER_CST
412 && compare_tree_int (sz, offset_limit) < 0)
414 pt_var_size = sz;
415 pt_var_wholesize = wholesize;
418 else if (DECL_P (pt_var))
420 pt_var_size = pt_var_wholesize
421 = decl_init_size (pt_var, object_size_type & OST_MINIMUM);
422 if (!pt_var_size)
423 return false;
425 else if (TREE_CODE (pt_var) == STRING_CST)
426 pt_var_size = pt_var_wholesize = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
427 else
428 return false;
430 if (pt_var_size)
432 /* Validate the size determined above. */
433 if (compare_tree_int (pt_var_size, offset_limit) >= 0)
434 return false;
437 if (pt_var != TREE_OPERAND (ptr, 0))
439 tree var;
441 if (object_size_type & OST_SUBOBJECT)
443 var = TREE_OPERAND (ptr, 0);
445 while (var != pt_var
446 && TREE_CODE (var) != BIT_FIELD_REF
447 && TREE_CODE (var) != COMPONENT_REF
448 && TREE_CODE (var) != ARRAY_REF
449 && TREE_CODE (var) != ARRAY_RANGE_REF
450 && TREE_CODE (var) != REALPART_EXPR
451 && TREE_CODE (var) != IMAGPART_EXPR)
452 var = TREE_OPERAND (var, 0);
453 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
454 var = TREE_OPERAND (var, 0);
455 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
456 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var)))
457 || (pt_var_size
458 && tree_int_cst_lt (pt_var_size,
459 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
460 var = pt_var;
461 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
463 tree v = var;
464 /* For &X->fld, compute object size only if fld isn't the last
465 field, as struct { int i; char c[1]; } is often used instead
466 of flexible array member. */
467 while (v && v != pt_var)
468 switch (TREE_CODE (v))
470 case ARRAY_REF:
471 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
472 && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
474 tree domain
475 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
476 if (domain
477 && TYPE_MAX_VALUE (domain)
478 && TREE_CODE (TYPE_MAX_VALUE (domain))
479 == INTEGER_CST
480 && tree_int_cst_lt (TREE_OPERAND (v, 1),
481 TYPE_MAX_VALUE (domain)))
483 v = NULL_TREE;
484 break;
487 v = TREE_OPERAND (v, 0);
488 break;
489 case REALPART_EXPR:
490 case IMAGPART_EXPR:
491 v = NULL_TREE;
492 break;
493 case COMPONENT_REF:
494 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
496 v = NULL_TREE;
497 break;
499 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
500 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
501 != UNION_TYPE
502 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
503 != QUAL_UNION_TYPE)
504 break;
505 else
506 v = TREE_OPERAND (v, 0);
507 if (TREE_CODE (v) == COMPONENT_REF
508 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
509 == RECORD_TYPE)
511 tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
512 for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
513 if (TREE_CODE (fld_chain) == FIELD_DECL)
514 break;
516 if (fld_chain)
518 v = NULL_TREE;
519 break;
521 v = TREE_OPERAND (v, 0);
523 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
524 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
525 != UNION_TYPE
526 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
527 != QUAL_UNION_TYPE)
528 break;
529 else
530 v = TREE_OPERAND (v, 0);
531 if (v != pt_var)
532 v = NULL_TREE;
533 else
534 v = pt_var;
535 break;
536 default:
537 v = pt_var;
538 break;
540 if (v == pt_var)
541 var = pt_var;
544 else
545 var = pt_var;
547 if (var != pt_var)
548 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
549 else if (!pt_var_size)
550 return false;
551 else
552 var_size = pt_var_size;
553 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
554 if (bytes != error_mark_node)
556 if (TREE_CODE (bytes) == INTEGER_CST
557 && tree_int_cst_lt (var_size, bytes))
558 bytes = size_zero_node;
559 else
560 bytes = size_binop (MINUS_EXPR, var_size, bytes);
562 if (var != pt_var
563 && pt_var_size
564 && TREE_CODE (pt_var) == MEM_REF
565 && bytes != error_mark_node)
567 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
568 if (bytes2 != error_mark_node)
570 if (TREE_CODE (bytes2) == INTEGER_CST
571 && tree_int_cst_lt (pt_var_size, bytes2))
572 bytes2 = size_zero_node;
573 else
574 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
575 bytes = size_binop (MIN_EXPR, bytes, bytes2);
579 wholebytes
580 = object_size_type & OST_SUBOBJECT ? var_size : pt_var_wholesize;
582 else if (!pt_var_size)
583 return false;
584 else
586 bytes = pt_var_size;
587 wholebytes = pt_var_wholesize;
590 if (TREE_CODE (bytes) != INTEGER_CST
591 || TREE_CODE (wholebytes) != INTEGER_CST)
592 return false;
594 *psize = bytes;
595 if (pwholesize)
596 *pwholesize = wholebytes;
597 return true;
601 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
602 Handles calls to functions declared with attribute alloc_size.
603 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
604 If unknown, return size_unknown (object_size_type). */
606 static tree
607 alloc_object_size (const gcall *call, int object_size_type)
609 gcc_assert (is_gimple_call (call));
611 tree calltype;
612 if (tree callfn = gimple_call_fndecl (call))
613 calltype = TREE_TYPE (callfn);
614 else
615 calltype = gimple_call_fntype (call);
617 if (!calltype)
618 return size_unknown (object_size_type);
620 /* Set to positions of alloc_size arguments. */
621 int arg1 = -1, arg2 = -1;
622 tree alloc_size = lookup_attribute ("alloc_size",
623 TYPE_ATTRIBUTES (calltype));
624 if (alloc_size && TREE_VALUE (alloc_size))
626 tree p = TREE_VALUE (alloc_size);
628 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
629 if (TREE_CHAIN (p))
630 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
633 if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
634 || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
635 || (arg2 >= 0
636 && (arg2 >= (int)gimple_call_num_args (call)
637 || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
638 return size_unknown (object_size_type);
640 tree bytes = NULL_TREE;
641 if (arg2 >= 0)
642 bytes = size_binop (MULT_EXPR,
643 fold_convert (sizetype, gimple_call_arg (call, arg1)),
644 fold_convert (sizetype, gimple_call_arg (call, arg2)));
645 else if (arg1 >= 0)
646 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
648 return bytes;
652 /* If object size is propagated from one of function's arguments directly
653 to its return value, return that argument for GIMPLE_CALL statement CALL.
654 Otherwise return NULL. */
656 static tree
657 pass_through_call (const gcall *call)
659 unsigned rf = gimple_call_return_flags (call);
660 if (rf & ERF_RETURNS_ARG)
662 unsigned argnum = rf & ERF_RETURN_ARG_MASK;
663 if (argnum < gimple_call_num_args (call))
664 return gimple_call_arg (call, argnum);
667 /* __builtin_assume_aligned is intentionally not marked RET1. */
668 if (gimple_call_builtin_p (call, BUILT_IN_ASSUME_ALIGNED))
669 return gimple_call_arg (call, 0);
671 return NULL_TREE;
675 /* Compute __builtin_object_size value for PTR and set *PSIZE to
676 the resulting value. If the declared object is known and PDECL
677 is nonnull, sets *PDECL to the object's DECL. OBJECT_SIZE_TYPE
678 is the second argument to __builtin_object_size.
679 Returns true on success and false when the object size could not
680 be determined. */
682 bool
683 compute_builtin_object_size (tree ptr, int object_size_type,
684 tree *psize)
686 gcc_assert (object_size_type >= 0 && object_size_type < OST_END);
688 /* Set to unknown and overwrite just before returning if the size
689 could be determined. */
690 *psize = size_unknown (object_size_type);
692 if (! offset_limit)
693 init_offset_limit ();
695 if (TREE_CODE (ptr) == ADDR_EXPR)
696 return addr_object_size (NULL, ptr, object_size_type, psize);
698 if (TREE_CODE (ptr) != SSA_NAME
699 || !POINTER_TYPE_P (TREE_TYPE (ptr)))
700 return false;
702 if (computed[object_size_type] == NULL)
704 if (optimize || object_size_type & OST_SUBOBJECT)
705 return false;
707 /* When not optimizing, rather than failing, make a small effort
708 to determine the object size without the full benefit of
709 the (costly) computation below. */
710 gimple *def = SSA_NAME_DEF_STMT (ptr);
711 if (gimple_code (def) == GIMPLE_ASSIGN)
713 tree_code code = gimple_assign_rhs_code (def);
714 if (code == POINTER_PLUS_EXPR)
716 tree offset = gimple_assign_rhs2 (def);
717 ptr = gimple_assign_rhs1 (def);
719 if (tree_fits_shwi_p (offset)
720 && compute_builtin_object_size (ptr, object_size_type,
721 psize))
723 /* Return zero when the offset is out of bounds. */
724 *psize = size_for_offset (*psize, offset);
725 return true;
729 return false;
732 struct object_size_info osi;
733 osi.object_size_type = object_size_type;
734 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
736 bitmap_iterator bi;
737 unsigned int i;
739 object_sizes_grow (object_size_type);
740 if (dump_file)
742 fprintf (dump_file, "Computing %s %s%sobject size for ",
743 (object_size_type & OST_MINIMUM) ? "minimum" : "maximum",
744 (object_size_type & OST_DYNAMIC) ? "dynamic " : "",
745 (object_size_type & OST_SUBOBJECT) ? "sub" : "");
746 print_generic_expr (dump_file, ptr, dump_flags);
747 fprintf (dump_file, ":\n");
750 osi.visited = BITMAP_ALLOC (NULL);
751 osi.reexamine = BITMAP_ALLOC (NULL);
752 osi.depths = NULL;
753 osi.stack = NULL;
754 osi.tos = NULL;
756 /* First pass: walk UD chains, compute object sizes that
757 can be computed. osi.reexamine bitmap at the end will
758 contain what variables were found in dependency cycles
759 and therefore need to be reexamined. */
760 osi.pass = 0;
761 osi.changed = false;
762 collect_object_sizes_for (&osi, ptr);
764 /* Second pass: keep recomputing object sizes of variables
765 that need reexamination, until no object sizes are
766 increased or all object sizes are computed. */
767 if (! bitmap_empty_p (osi.reexamine))
769 bitmap reexamine = BITMAP_ALLOC (NULL);
771 /* If looking for minimum instead of maximum object size,
772 detect cases where a pointer is increased in a loop.
773 Although even without this detection pass 2 would eventually
774 terminate, it could take a long time. If a pointer is
775 increasing this way, we need to assume 0 object size.
776 E.g. p = &buf[0]; while (cond) p = p + 4; */
777 if (object_size_type & OST_MINIMUM)
779 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
780 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
781 osi.tos = osi.stack;
782 osi.pass = 1;
783 /* collect_object_sizes_for is changing
784 osi.reexamine bitmap, so iterate over a copy. */
785 bitmap_copy (reexamine, osi.reexamine);
786 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
787 if (bitmap_bit_p (osi.reexamine, i))
788 check_for_plus_in_loops (&osi, ssa_name (i));
790 free (osi.depths);
791 osi.depths = NULL;
792 free (osi.stack);
793 osi.stack = NULL;
794 osi.tos = NULL;
799 osi.pass = 2;
800 osi.changed = false;
801 /* collect_object_sizes_for is changing
802 osi.reexamine bitmap, so iterate over a copy. */
803 bitmap_copy (reexamine, osi.reexamine);
804 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
805 if (bitmap_bit_p (osi.reexamine, i))
807 collect_object_sizes_for (&osi, ssa_name (i));
808 if (dump_file && (dump_flags & TDF_DETAILS))
810 fprintf (dump_file, "Reexamining ");
811 print_generic_expr (dump_file, ssa_name (i),
812 dump_flags);
813 fprintf (dump_file, "\n");
817 while (osi.changed);
819 BITMAP_FREE (reexamine);
821 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
822 bitmap_set_bit (computed[object_size_type], i);
824 /* Debugging dumps. */
825 if (dump_file)
827 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
828 if (!object_sizes_unknown_p (object_size_type, i))
830 print_generic_expr (dump_file, ssa_name (i),
831 dump_flags);
832 fprintf (dump_file,
833 ": %s %s%sobject size ",
834 ((object_size_type & OST_MINIMUM) ? "minimum"
835 : "maximum"),
836 (object_size_type & OST_DYNAMIC) ? "dynamic " : "",
837 (object_size_type & OST_SUBOBJECT) ? "sub" : "");
838 print_generic_expr (dump_file, object_sizes_get (&osi, i),
839 dump_flags);
840 fprintf (dump_file, "\n");
844 BITMAP_FREE (osi.reexamine);
845 BITMAP_FREE (osi.visited);
848 *psize = object_sizes_get (&osi, SSA_NAME_VERSION (ptr));
849 return !size_unknown_p (*psize, object_size_type);
852 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
854 static void
855 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
857 int object_size_type = osi->object_size_type;
858 unsigned int varno = SSA_NAME_VERSION (ptr);
859 tree bytes, wholesize;
861 gcc_assert (!object_sizes_unknown_p (object_size_type, varno));
862 gcc_assert (osi->pass == 0);
864 if (TREE_CODE (value) == WITH_SIZE_EXPR)
865 value = TREE_OPERAND (value, 0);
867 /* Pointer variables should have been handled by merge_object_sizes. */
868 gcc_assert (TREE_CODE (value) != SSA_NAME
869 || !POINTER_TYPE_P (TREE_TYPE (value)));
871 if (TREE_CODE (value) == ADDR_EXPR)
872 addr_object_size (osi, value, object_size_type, &bytes, &wholesize);
873 else
874 bytes = wholesize = size_unknown (object_size_type);
876 object_sizes_set (osi, varno, bytes, wholesize);
880 /* Compute object_sizes for PTR, defined to the result of a call. */
882 static void
883 call_object_size (struct object_size_info *osi, tree ptr, gcall *call)
885 int object_size_type = osi->object_size_type;
886 unsigned int varno = SSA_NAME_VERSION (ptr);
888 gcc_assert (is_gimple_call (call));
890 gcc_assert (!object_sizes_unknown_p (object_size_type, varno));
891 gcc_assert (osi->pass == 0);
892 tree bytes = alloc_object_size (call, object_size_type);
894 object_sizes_set (osi, varno, bytes, bytes);
898 /* Compute object_sizes for PTR, defined to an unknown value. */
900 static void
901 unknown_object_size (struct object_size_info *osi, tree ptr)
903 int object_size_type = osi->object_size_type;
904 unsigned int varno = SSA_NAME_VERSION (ptr);
906 gcc_checking_assert (!object_sizes_unknown_p (object_size_type, varno));
907 gcc_checking_assert (osi->pass == 0);
908 tree bytes = size_unknown (object_size_type);
910 object_sizes_set (osi, varno, bytes, bytes);
914 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
915 the object size might need reexamination later. */
917 static bool
918 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig)
920 int object_size_type = osi->object_size_type;
921 unsigned int varno = SSA_NAME_VERSION (dest);
922 tree orig_bytes, wholesize;
924 if (object_sizes_unknown_p (object_size_type, varno))
925 return false;
927 if (osi->pass == 0)
928 collect_object_sizes_for (osi, orig);
930 orig_bytes = object_sizes_get (osi, SSA_NAME_VERSION (orig));
931 wholesize = object_sizes_get (osi, SSA_NAME_VERSION (orig), true);
933 if (object_sizes_set (osi, varno, orig_bytes, wholesize))
934 osi->changed = true;
936 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
940 /* Compute object_sizes for VAR, defined to the result of an assignment
941 with operator POINTER_PLUS_EXPR. Return true if the object size might
942 need reexamination later. */
944 static bool
945 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple *stmt)
947 int object_size_type = osi->object_size_type;
948 unsigned int varno = SSA_NAME_VERSION (var);
949 tree bytes, wholesize;
950 tree op0, op1;
951 bool reexamine = false;
953 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
955 op0 = gimple_assign_rhs1 (stmt);
956 op1 = gimple_assign_rhs2 (stmt);
958 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
960 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
961 gcc_assert (TREE_CODE (rhs) == MEM_REF);
962 op0 = TREE_OPERAND (rhs, 0);
963 op1 = TREE_OPERAND (rhs, 1);
965 else
966 gcc_unreachable ();
968 if (object_sizes_unknown_p (object_size_type, varno))
969 return false;
971 /* Handle PTR + OFFSET here. */
972 if (TREE_CODE (op1) == INTEGER_CST
973 && (TREE_CODE (op0) == SSA_NAME
974 || TREE_CODE (op0) == ADDR_EXPR))
976 if (TREE_CODE (op0) == SSA_NAME)
978 if (osi->pass == 0)
979 collect_object_sizes_for (osi, op0);
981 bytes = object_sizes_get (osi, SSA_NAME_VERSION (op0));
982 wholesize = object_sizes_get (osi, SSA_NAME_VERSION (op0), true);
983 reexamine = bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (op0));
985 else
987 /* op0 will be ADDR_EXPR here. We should never come here during
988 reexamination. */
989 gcc_checking_assert (osi->pass == 0);
990 addr_object_size (osi, op0, object_size_type, &bytes, &wholesize);
993 /* In the first pass, do not compute size for offset if either the
994 maximum size is unknown or the minimum size is not initialized yet;
995 the latter indicates a dependency loop and will be resolved in
996 subsequent passes. We attempt to compute offset for 0 minimum size
997 too because a negative offset could be within bounds of WHOLESIZE,
998 giving a non-zero result for VAR. */
999 if (osi->pass != 0 || !size_unknown_p (bytes, 0))
1000 bytes = size_for_offset (bytes, op1, wholesize);
1002 else
1003 bytes = wholesize = size_unknown (object_size_type);
1005 if (object_sizes_set (osi, varno, bytes, wholesize))
1006 osi->changed = true;
1007 return reexamine;
1011 /* Compute object_sizes for VAR, defined at STMT, which is
1012 a COND_EXPR. Return true if the object size might need reexamination
1013 later. */
1015 static bool
1016 cond_expr_object_size (struct object_size_info *osi, tree var, gimple *stmt)
1018 tree then_, else_;
1019 int object_size_type = osi->object_size_type;
1020 unsigned int varno = SSA_NAME_VERSION (var);
1021 bool reexamine = false;
1023 gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
1025 if (object_sizes_unknown_p (object_size_type, varno))
1026 return false;
1028 then_ = gimple_assign_rhs2 (stmt);
1029 else_ = gimple_assign_rhs3 (stmt);
1031 if (TREE_CODE (then_) == SSA_NAME)
1032 reexamine |= merge_object_sizes (osi, var, then_);
1033 else
1034 expr_object_size (osi, var, then_);
1036 if (object_sizes_unknown_p (object_size_type, varno))
1037 return reexamine;
1039 if (TREE_CODE (else_) == SSA_NAME)
1040 reexamine |= merge_object_sizes (osi, var, else_);
1041 else
1042 expr_object_size (osi, var, else_);
1044 return reexamine;
1047 /* Compute object sizes for VAR.
1048 For ADDR_EXPR an object size is the number of remaining bytes
1049 to the end of the object (where what is considered an object depends on
1050 OSI->object_size_type).
1051 For allocation GIMPLE_CALL like malloc or calloc object size is the size
1052 of the allocation.
1053 For POINTER_PLUS_EXPR where second operand is a constant integer,
1054 object size is object size of the first operand minus the constant.
1055 If the constant is bigger than the number of remaining bytes until the
1056 end of the object, object size is 0, but if it is instead a pointer
1057 subtraction, object size is size_unknown (object_size_type).
1058 To differentiate addition from subtraction, ADDR_EXPR returns
1059 size_unknown (object_size_type) for all objects bigger than half of the
1060 address space, and constants less than half of the address space are
1061 considered addition, while bigger constants subtraction.
1062 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
1063 object size is object size of that argument.
1064 Otherwise, object size is the maximum of object sizes of variables
1065 that it might be set to. */
1067 static void
1068 collect_object_sizes_for (struct object_size_info *osi, tree var)
1070 int object_size_type = osi->object_size_type;
1071 unsigned int varno = SSA_NAME_VERSION (var);
1072 gimple *stmt;
1073 bool reexamine;
1075 if (bitmap_bit_p (computed[object_size_type], varno))
1076 return;
1078 if (osi->pass == 0)
1080 if (bitmap_set_bit (osi->visited, varno))
1082 /* Initialize to 0 for maximum size and M1U for minimum size so that
1083 it gets immediately overridden. */
1084 object_sizes_initialize (osi, varno,
1085 size_initval (object_size_type),
1086 size_initval (object_size_type));
1088 else
1090 /* Found a dependency loop. Mark the variable for later
1091 re-examination. */
1092 bitmap_set_bit (osi->reexamine, varno);
1093 if (dump_file && (dump_flags & TDF_DETAILS))
1095 fprintf (dump_file, "Found a dependency loop at ");
1096 print_generic_expr (dump_file, var, dump_flags);
1097 fprintf (dump_file, "\n");
1099 return;
1103 if (dump_file && (dump_flags & TDF_DETAILS))
1105 fprintf (dump_file, "Visiting use-def links for ");
1106 print_generic_expr (dump_file, var, dump_flags);
1107 fprintf (dump_file, "\n");
1110 stmt = SSA_NAME_DEF_STMT (var);
1111 reexamine = false;
1113 switch (gimple_code (stmt))
1115 case GIMPLE_ASSIGN:
1117 tree rhs = gimple_assign_rhs1 (stmt);
1118 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1119 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
1120 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
1121 reexamine = plus_stmt_object_size (osi, var, stmt);
1122 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
1123 reexamine = cond_expr_object_size (osi, var, stmt);
1124 else if (gimple_assign_single_p (stmt)
1125 || gimple_assign_unary_nop_p (stmt))
1127 if (TREE_CODE (rhs) == SSA_NAME
1128 && POINTER_TYPE_P (TREE_TYPE (rhs)))
1129 reexamine = merge_object_sizes (osi, var, rhs);
1130 else
1131 expr_object_size (osi, var, rhs);
1133 else
1134 unknown_object_size (osi, var);
1135 break;
1138 case GIMPLE_CALL:
1140 gcall *call_stmt = as_a <gcall *> (stmt);
1141 tree arg = pass_through_call (call_stmt);
1142 if (arg)
1144 if (TREE_CODE (arg) == SSA_NAME
1145 && POINTER_TYPE_P (TREE_TYPE (arg)))
1146 reexamine = merge_object_sizes (osi, var, arg);
1147 else
1148 expr_object_size (osi, var, arg);
1150 else
1151 call_object_size (osi, var, call_stmt);
1152 break;
1155 case GIMPLE_ASM:
1156 /* Pointers defined by __asm__ statements can point anywhere. */
1157 unknown_object_size (osi, var);
1158 break;
1160 case GIMPLE_NOP:
1161 if (SSA_NAME_VAR (var)
1162 && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
1163 expr_object_size (osi, var, SSA_NAME_VAR (var));
1164 else
1165 /* Uninitialized SSA names point nowhere. */
1166 unknown_object_size (osi, var);
1167 break;
1169 case GIMPLE_PHI:
1171 unsigned i;
1173 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1175 tree rhs = gimple_phi_arg (stmt, i)->def;
1177 if (object_sizes_unknown_p (object_size_type, varno))
1178 break;
1180 if (TREE_CODE (rhs) == SSA_NAME)
1181 reexamine |= merge_object_sizes (osi, var, rhs);
1182 else if (osi->pass == 0)
1183 expr_object_size (osi, var, rhs);
1185 break;
1188 default:
1189 gcc_unreachable ();
1192 if (! reexamine || object_sizes_unknown_p (object_size_type, varno))
1194 bitmap_set_bit (computed[object_size_type], varno);
1195 bitmap_clear_bit (osi->reexamine, varno);
1197 else
1199 bitmap_set_bit (osi->reexamine, varno);
1200 if (dump_file && (dump_flags & TDF_DETAILS))
1202 fprintf (dump_file, "Need to reexamine ");
1203 print_generic_expr (dump_file, var, dump_flags);
1204 fprintf (dump_file, "\n");
1210 /* Helper function for check_for_plus_in_loops. Called recursively
1211 to detect loops. */
1213 static void
1214 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1215 unsigned int depth)
1217 gimple *stmt = SSA_NAME_DEF_STMT (var);
1218 unsigned int varno = SSA_NAME_VERSION (var);
1220 if (osi->depths[varno])
1222 if (osi->depths[varno] != depth)
1224 unsigned int *sp;
1226 /* Found a loop involving pointer addition. */
1227 for (sp = osi->tos; sp > osi->stack; )
1229 --sp;
1230 bitmap_clear_bit (osi->reexamine, *sp);
1231 bitmap_set_bit (computed[osi->object_size_type], *sp);
1232 object_sizes_set (osi, *sp, size_zero_node,
1233 object_sizes_get (osi, *sp, true));
1234 if (*sp == varno)
1235 break;
1238 return;
1240 else if (! bitmap_bit_p (osi->reexamine, varno))
1241 return;
1243 osi->depths[varno] = depth;
1244 *osi->tos++ = varno;
1246 switch (gimple_code (stmt))
1249 case GIMPLE_ASSIGN:
1251 if ((gimple_assign_single_p (stmt)
1252 || gimple_assign_unary_nop_p (stmt))
1253 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1255 tree rhs = gimple_assign_rhs1 (stmt);
1257 check_for_plus_in_loops_1 (osi, rhs, depth);
1259 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1261 tree basevar = gimple_assign_rhs1 (stmt);
1262 tree cst = gimple_assign_rhs2 (stmt);
1264 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1266 check_for_plus_in_loops_1 (osi, basevar,
1267 depth + !integer_zerop (cst));
1269 else
1270 gcc_unreachable ();
1271 break;
1274 case GIMPLE_CALL:
1276 gcall *call_stmt = as_a <gcall *> (stmt);
1277 tree arg = pass_through_call (call_stmt);
1278 if (arg)
1280 if (TREE_CODE (arg) == SSA_NAME)
1281 check_for_plus_in_loops_1 (osi, arg, depth);
1282 else
1283 gcc_unreachable ();
1285 break;
1288 case GIMPLE_PHI:
1290 unsigned i;
1292 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1294 tree rhs = gimple_phi_arg (stmt, i)->def;
1296 if (TREE_CODE (rhs) == SSA_NAME)
1297 check_for_plus_in_loops_1 (osi, rhs, depth);
1299 break;
1302 default:
1303 gcc_unreachable ();
1306 osi->depths[varno] = 0;
1307 osi->tos--;
1311 /* Check if some pointer we are computing object size of is being increased
1312 within a loop. If yes, assume all the SSA variables participating in
1313 that loop have minimum object sizes 0. */
1315 static void
1316 check_for_plus_in_loops (struct object_size_info *osi, tree var)
1318 gimple *stmt = SSA_NAME_DEF_STMT (var);
1320 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1321 and looked for a POINTER_PLUS_EXPR in the pass-through
1322 argument, if any. In GIMPLE, however, such an expression
1323 is not a valid call operand. */
1325 if (is_gimple_assign (stmt)
1326 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1328 tree basevar = gimple_assign_rhs1 (stmt);
1329 tree cst = gimple_assign_rhs2 (stmt);
1331 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1333 /* Skip non-positive offsets. */
1334 if (integer_zerop (cst) || compare_tree_int (cst, offset_limit) > 0)
1335 return;
1337 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1338 *osi->tos++ = SSA_NAME_VERSION (basevar);
1339 check_for_plus_in_loops_1 (osi, var, 2);
1340 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1341 osi->tos--;
1346 /* Initialize data structures for the object size computation. */
1348 void
1349 init_object_sizes (void)
1351 int object_size_type;
1353 if (computed[0])
1354 return;
1356 for (object_size_type = 0; object_size_type < OST_END; object_size_type++)
1358 object_sizes_grow (object_size_type);
1359 computed[object_size_type] = BITMAP_ALLOC (NULL);
1362 init_offset_limit ();
1366 /* Destroy data structures after the object size computation. */
1368 void
1369 fini_object_sizes (void)
1371 int object_size_type;
1373 for (object_size_type = 0; object_size_type < OST_END; object_size_type++)
1375 object_sizes_release (object_size_type);
1376 BITMAP_FREE (computed[object_size_type]);
1380 /* Dummy valueize function. */
1382 static tree
1383 do_valueize (tree t)
1385 return t;
1388 /* Process a __builtin_object_size or __builtin_dynamic_object_size call in
1389 CALL early for subobjects before any object information is lost due to
1390 optimization. Insert a MIN or MAX expression of the result and
1391 __builtin_object_size at I so that it may be processed in the second pass.
1392 __builtin_dynamic_object_size is treated like __builtin_object_size here
1393 since we're only looking for constant bounds. */
1395 static void
1396 early_object_sizes_execute_one (gimple_stmt_iterator *i, gimple *call)
1398 tree ost = gimple_call_arg (call, 1);
1399 tree lhs = gimple_call_lhs (call);
1400 gcc_assert (lhs != NULL_TREE);
1402 if (!tree_fits_uhwi_p (ost))
1403 return;
1405 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
1406 tree ptr = gimple_call_arg (call, 0);
1408 if (object_size_type != 1 && object_size_type != 3)
1409 return;
1411 if (TREE_CODE (ptr) != ADDR_EXPR && TREE_CODE (ptr) != SSA_NAME)
1412 return;
1414 tree type = TREE_TYPE (lhs);
1415 tree bytes;
1416 if (!compute_builtin_object_size (ptr, object_size_type, &bytes)
1417 || !int_fits_type_p (bytes, type))
1418 return;
1420 tree tem = make_ssa_name (type);
1421 gimple_call_set_lhs (call, tem);
1422 enum tree_code code = object_size_type & OST_MINIMUM ? MAX_EXPR : MIN_EXPR;
1423 tree cst = fold_convert (type, bytes);
1424 gimple *g = gimple_build_assign (lhs, code, tem, cst);
1425 gsi_insert_after (i, g, GSI_NEW_STMT);
1426 update_stmt (call);
1429 /* Attempt to fold one __builtin_dynamic_object_size call in CALL into an
1430 expression and insert it at I. Return true if it succeeds. */
1432 static bool
1433 dynamic_object_sizes_execute_one (gimple_stmt_iterator *i, gimple *call)
1435 gcc_assert (gimple_call_num_args (call) == 2);
1437 tree args[2];
1438 args[0] = gimple_call_arg (call, 0);
1439 args[1] = gimple_call_arg (call, 1);
1441 location_t loc = EXPR_LOC_OR_LOC (args[0], input_location);
1442 tree result_type = gimple_call_return_type (as_a <gcall *> (call));
1443 tree result = fold_builtin_call_array (loc, result_type,
1444 gimple_call_fn (call), 2, args);
1446 if (!result)
1447 return false;
1449 /* fold_builtin_call_array may wrap the result inside a
1450 NOP_EXPR. */
1451 STRIP_NOPS (result);
1452 gimplify_and_update_call_from_tree (i, result);
1454 if (dump_file && (dump_flags & TDF_DETAILS))
1456 fprintf (dump_file, "Simplified (dynamic)\n ");
1457 print_gimple_stmt (dump_file, call, 0, dump_flags);
1458 fprintf (dump_file, " to ");
1459 print_generic_expr (dump_file, result);
1460 fprintf (dump_file, "\n");
1462 return true;
1465 static unsigned int
1466 object_sizes_execute (function *fun, bool early)
1468 basic_block bb;
1469 FOR_EACH_BB_FN (bb, fun)
1471 gimple_stmt_iterator i;
1472 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1474 tree result;
1475 bool dynamic = false;
1477 gimple *call = gsi_stmt (i);
1478 if (gimple_call_builtin_p (call, BUILT_IN_DYNAMIC_OBJECT_SIZE))
1479 dynamic = true;
1480 else if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE))
1481 continue;
1483 tree lhs = gimple_call_lhs (call);
1484 if (!lhs)
1485 continue;
1487 init_object_sizes ();
1489 /* If early, only attempt to fold
1490 __builtin_object_size (x, 1) and __builtin_object_size (x, 3),
1491 and rather than folding the builtin to the constant if any,
1492 create a MIN_EXPR or MAX_EXPR of the __builtin_object_size
1493 call result and the computed constant. Do the same for
1494 __builtin_dynamic_object_size too. */
1495 if (early)
1497 early_object_sizes_execute_one (&i, call);
1498 continue;
1501 if (dynamic)
1503 if (dynamic_object_sizes_execute_one (&i, call))
1504 continue;
1505 else
1507 /* If we could not find a suitable size expression, lower to
1508 __builtin_object_size so that we may at least get a
1509 constant lower or higher estimate. */
1510 tree bosfn = builtin_decl_implicit (BUILT_IN_OBJECT_SIZE);
1511 gimple_call_set_fndecl (call, bosfn);
1512 update_stmt (call);
1514 if (dump_file && (dump_flags & TDF_DETAILS))
1516 print_generic_expr (dump_file, gimple_call_arg (call, 0),
1517 dump_flags);
1518 fprintf (dump_file,
1519 ": Retrying as __builtin_object_size\n");
1524 result = gimple_fold_stmt_to_constant (call, do_valueize);
1525 if (!result)
1527 tree ost = gimple_call_arg (call, 1);
1529 if (tree_fits_uhwi_p (ost))
1531 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
1533 if (object_size_type & OST_MINIMUM)
1534 result = build_zero_cst (size_type_node);
1535 else if (object_size_type < OST_END)
1536 result = fold_convert (size_type_node,
1537 integer_minus_one_node);
1540 if (!result)
1541 continue;
1544 gcc_assert (TREE_CODE (result) == INTEGER_CST);
1546 if (dump_file && (dump_flags & TDF_DETAILS))
1548 fprintf (dump_file, "Simplified\n ");
1549 print_gimple_stmt (dump_file, call, 0, dump_flags);
1550 fprintf (dump_file, " to ");
1551 print_generic_expr (dump_file, result);
1552 fprintf (dump_file, "\n");
1555 /* Propagate into all uses and fold those stmts. */
1556 if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
1557 replace_uses_by (lhs, result);
1558 else
1559 replace_call_with_value (&i, result);
1563 fini_object_sizes ();
1564 return 0;
1567 /* Simple pass to optimize all __builtin_object_size () builtins. */
1569 namespace {
1571 const pass_data pass_data_object_sizes =
1573 GIMPLE_PASS, /* type */
1574 "objsz", /* name */
1575 OPTGROUP_NONE, /* optinfo_flags */
1576 TV_NONE, /* tv_id */
1577 ( PROP_cfg | PROP_ssa ), /* properties_required */
1578 PROP_objsz, /* properties_provided */
1579 0, /* properties_destroyed */
1580 0, /* todo_flags_start */
1581 0, /* todo_flags_finish */
1584 class pass_object_sizes : public gimple_opt_pass
1586 public:
1587 pass_object_sizes (gcc::context *ctxt)
1588 : gimple_opt_pass (pass_data_object_sizes, ctxt)
1591 /* opt_pass methods: */
1592 opt_pass * clone () { return new pass_object_sizes (m_ctxt); }
1593 virtual unsigned int execute (function *fun)
1595 return object_sizes_execute (fun, false);
1597 }; // class pass_object_sizes
1599 } // anon namespace
1601 gimple_opt_pass *
1602 make_pass_object_sizes (gcc::context *ctxt)
1604 return new pass_object_sizes (ctxt);
1607 /* Early version of pass to optimize all __builtin_object_size () builtins. */
1609 namespace {
1611 const pass_data pass_data_early_object_sizes =
1613 GIMPLE_PASS, /* type */
1614 "early_objsz", /* name */
1615 OPTGROUP_NONE, /* optinfo_flags */
1616 TV_NONE, /* tv_id */
1617 ( PROP_cfg | PROP_ssa ), /* properties_required */
1618 0, /* properties_provided */
1619 0, /* properties_destroyed */
1620 0, /* todo_flags_start */
1621 0, /* todo_flags_finish */
1624 class pass_early_object_sizes : public gimple_opt_pass
1626 public:
1627 pass_early_object_sizes (gcc::context *ctxt)
1628 : gimple_opt_pass (pass_data_early_object_sizes, ctxt)
1631 /* opt_pass methods: */
1632 virtual unsigned int execute (function *fun)
1634 return object_sizes_execute (fun, true);
1636 }; // class pass_object_sizes
1638 } // anon namespace
1640 gimple_opt_pass *
1641 make_pass_early_object_sizes (gcc::context *ctxt)
1643 return new pass_early_object_sizes (ctxt);