Add testcase of PR c++/92542, already fixed.
[official-gcc.git] / gcc / tree-object-size.c
blob116413c2d8f611c9bee3757e6900e8eb0591a4df
1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004-2020 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "fold-const.h"
31 #include "tree-object-size.h"
32 #include "gimple-fold.h"
33 #include "gimple-iterator.h"
34 #include "tree-cfg.h"
35 #include "stringpool.h"
36 #include "attribs.h"
38 struct object_size_info
40 int object_size_type;
41 unsigned char pass;
42 bool changed;
43 bitmap visited, reexamine;
44 unsigned int *depths;
45 unsigned int *stack, *tos;
48 static const unsigned HOST_WIDE_INT unknown[4] = {
49 HOST_WIDE_INT_M1U,
50 HOST_WIDE_INT_M1U,
55 static tree compute_object_offset (const_tree, const_tree);
56 static bool addr_object_size (struct object_size_info *,
57 const_tree, int, unsigned HOST_WIDE_INT *,
58 tree * = NULL, tree * = NULL);
59 static unsigned HOST_WIDE_INT alloc_object_size (const gcall *, int);
60 static tree pass_through_call (const gcall *);
61 static void collect_object_sizes_for (struct object_size_info *, tree);
62 static void expr_object_size (struct object_size_info *, tree, tree);
63 static bool merge_object_sizes (struct object_size_info *, tree, tree,
64 unsigned HOST_WIDE_INT);
65 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple *);
66 static bool cond_expr_object_size (struct object_size_info *, tree, gimple *);
67 static void init_offset_limit (void);
68 static void check_for_plus_in_loops (struct object_size_info *, tree);
69 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
70 unsigned int);
72 /* object_sizes[0] is upper bound for number of bytes till the end of
73 the object.
74 object_sizes[1] is upper bound for number of bytes till the end of
75 the subobject (innermost array or field with address taken).
76 object_sizes[2] is lower bound for number of bytes till the end of
77 the object and object_sizes[3] lower bound for subobject. */
78 static vec<unsigned HOST_WIDE_INT> object_sizes[4];
80 /* Bitmaps what object sizes have been computed already. */
81 static bitmap computed[4];
83 /* Maximum value of offset we consider to be addition. */
84 static unsigned HOST_WIDE_INT offset_limit;
87 /* Initialize OFFSET_LIMIT variable. */
88 static void
89 init_offset_limit (void)
91 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
92 offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
93 else
94 offset_limit = -1;
95 offset_limit /= 2;
99 /* Compute offset of EXPR within VAR. Return error_mark_node
100 if unknown. */
102 static tree
103 compute_object_offset (const_tree expr, const_tree var)
105 enum tree_code code = PLUS_EXPR;
106 tree base, off, t;
108 if (expr == var)
109 return size_zero_node;
111 switch (TREE_CODE (expr))
113 case COMPONENT_REF:
114 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
115 if (base == error_mark_node)
116 return base;
118 t = TREE_OPERAND (expr, 1);
119 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
120 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
121 / BITS_PER_UNIT));
122 break;
124 case REALPART_EXPR:
125 CASE_CONVERT:
126 case VIEW_CONVERT_EXPR:
127 case NON_LVALUE_EXPR:
128 return compute_object_offset (TREE_OPERAND (expr, 0), var);
130 case IMAGPART_EXPR:
131 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
132 if (base == error_mark_node)
133 return base;
135 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
136 break;
138 case ARRAY_REF:
139 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
140 if (base == error_mark_node)
141 return base;
143 t = TREE_OPERAND (expr, 1);
144 tree low_bound, unit_size;
145 low_bound = array_ref_low_bound (CONST_CAST_TREE (expr));
146 unit_size = array_ref_element_size (CONST_CAST_TREE (expr));
147 if (! integer_zerop (low_bound))
148 t = fold_build2 (MINUS_EXPR, TREE_TYPE (t), t, low_bound);
149 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
151 code = MINUS_EXPR;
152 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
154 t = fold_convert (sizetype, t);
155 off = size_binop (MULT_EXPR, unit_size, t);
156 break;
158 case MEM_REF:
159 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
160 return wide_int_to_tree (sizetype, mem_ref_offset (expr));
162 default:
163 return error_mark_node;
166 return size_binop (code, base, off);
170 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
171 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
172 If unknown, return unknown[object_size_type]. */
174 static bool
175 addr_object_size (struct object_size_info *osi, const_tree ptr,
176 int object_size_type, unsigned HOST_WIDE_INT *psize,
177 tree *pdecl /* = NULL */, tree *poff /* = NULL */)
179 tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
181 tree dummy_decl, dummy_off = size_zero_node;
182 if (!pdecl)
183 pdecl = &dummy_decl;
184 if (!poff)
185 poff = &dummy_off;
187 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
189 /* Set to unknown and overwrite just before returning if the size
190 could be determined. */
191 *psize = unknown[object_size_type];
193 pt_var = TREE_OPERAND (ptr, 0);
194 while (handled_component_p (pt_var))
195 pt_var = TREE_OPERAND (pt_var, 0);
197 if (pt_var
198 && TREE_CODE (pt_var) == MEM_REF)
200 unsigned HOST_WIDE_INT sz;
202 if (!osi || (object_size_type & 1) != 0
203 || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
205 compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
206 object_size_type & ~1, &sz, pdecl, poff);
208 else
210 tree var = TREE_OPERAND (pt_var, 0);
211 if (osi->pass == 0)
212 collect_object_sizes_for (osi, var);
213 if (bitmap_bit_p (computed[object_size_type],
214 SSA_NAME_VERSION (var)))
215 sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
216 else
217 sz = unknown[object_size_type];
219 if (sz != unknown[object_size_type])
221 offset_int mem_offset;
222 if (mem_ref_offset (pt_var).is_constant (&mem_offset))
224 offset_int dsz = wi::sub (sz, mem_offset);
225 if (wi::neg_p (dsz))
226 sz = 0;
227 else if (wi::fits_uhwi_p (dsz))
228 sz = dsz.to_uhwi ();
229 else
230 sz = unknown[object_size_type];
232 else
233 sz = unknown[object_size_type];
236 if (sz != unknown[object_size_type] && sz < offset_limit)
237 pt_var_size = size_int (sz);
239 else if (pt_var
240 && DECL_P (pt_var)
241 && tree_fits_uhwi_p (DECL_SIZE_UNIT (pt_var))
242 && tree_to_uhwi (DECL_SIZE_UNIT (pt_var)) < offset_limit)
244 *pdecl = pt_var;
245 pt_var_size = DECL_SIZE_UNIT (pt_var);
247 else if (pt_var
248 && TREE_CODE (pt_var) == STRING_CST
249 && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
250 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
251 && tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
252 < offset_limit)
253 pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
254 else
255 return false;
257 if (pt_var != TREE_OPERAND (ptr, 0))
259 tree var;
261 if (object_size_type & 1)
263 var = TREE_OPERAND (ptr, 0);
265 while (var != pt_var
266 && TREE_CODE (var) != BIT_FIELD_REF
267 && TREE_CODE (var) != COMPONENT_REF
268 && TREE_CODE (var) != ARRAY_REF
269 && TREE_CODE (var) != ARRAY_RANGE_REF
270 && TREE_CODE (var) != REALPART_EXPR
271 && TREE_CODE (var) != IMAGPART_EXPR)
272 var = TREE_OPERAND (var, 0);
273 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
274 var = TREE_OPERAND (var, 0);
275 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
276 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var)))
277 || (pt_var_size
278 && tree_int_cst_lt (pt_var_size,
279 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
280 var = pt_var;
281 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
283 tree v = var;
284 /* For &X->fld, compute object size only if fld isn't the last
285 field, as struct { int i; char c[1]; } is often used instead
286 of flexible array member. */
287 while (v && v != pt_var)
288 switch (TREE_CODE (v))
290 case ARRAY_REF:
291 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
292 && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
294 tree domain
295 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
296 if (domain
297 && TYPE_MAX_VALUE (domain)
298 && TREE_CODE (TYPE_MAX_VALUE (domain))
299 == INTEGER_CST
300 && tree_int_cst_lt (TREE_OPERAND (v, 1),
301 TYPE_MAX_VALUE (domain)))
303 v = NULL_TREE;
304 break;
307 v = TREE_OPERAND (v, 0);
308 break;
309 case REALPART_EXPR:
310 case IMAGPART_EXPR:
311 v = NULL_TREE;
312 break;
313 case COMPONENT_REF:
314 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
316 v = NULL_TREE;
317 break;
319 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
320 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
321 != UNION_TYPE
322 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
323 != QUAL_UNION_TYPE)
324 break;
325 else
326 v = TREE_OPERAND (v, 0);
327 if (TREE_CODE (v) == COMPONENT_REF
328 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
329 == RECORD_TYPE)
331 tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
332 for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
333 if (TREE_CODE (fld_chain) == FIELD_DECL)
334 break;
336 if (fld_chain)
338 v = NULL_TREE;
339 break;
341 v = TREE_OPERAND (v, 0);
343 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
344 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
345 != UNION_TYPE
346 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
347 != QUAL_UNION_TYPE)
348 break;
349 else
350 v = TREE_OPERAND (v, 0);
351 if (v != pt_var)
352 v = NULL_TREE;
353 else
354 v = pt_var;
355 break;
356 default:
357 v = pt_var;
358 break;
360 if (v == pt_var)
361 var = pt_var;
364 else
365 var = pt_var;
367 if (var != pt_var)
368 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
369 else if (!pt_var_size)
370 return false;
371 else
372 var_size = pt_var_size;
373 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
374 if (bytes != error_mark_node)
376 if (TREE_CODE (bytes) == INTEGER_CST
377 && tree_int_cst_lt (var_size, bytes))
378 bytes = size_zero_node;
379 else
380 bytes = size_binop (MINUS_EXPR, var_size, bytes);
381 *poff = bytes;
383 if (var != pt_var
384 && pt_var_size
385 && TREE_CODE (pt_var) == MEM_REF
386 && bytes != error_mark_node)
388 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
389 if (bytes2 != error_mark_node)
391 if (TREE_CODE (bytes2) == INTEGER_CST
392 && tree_int_cst_lt (pt_var_size, bytes2))
393 bytes2 = size_zero_node;
394 else
395 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
396 *poff = size_binop (PLUS_EXPR, *poff, bytes2);
397 bytes = size_binop (MIN_EXPR, bytes, bytes2);
401 else if (!pt_var_size)
402 return false;
403 else
404 bytes = pt_var_size;
406 if (tree_fits_uhwi_p (bytes))
408 *psize = tree_to_uhwi (bytes);
409 return true;
412 return false;
416 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
417 Handles calls to functions declared with attribute alloc_size.
418 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
419 If unknown, return unknown[object_size_type]. */
421 static unsigned HOST_WIDE_INT
422 alloc_object_size (const gcall *call, int object_size_type)
424 gcc_assert (is_gimple_call (call));
426 tree calltype;
427 if (tree callfn = gimple_call_fndecl (call))
428 calltype = TREE_TYPE (callfn);
429 else
430 calltype = gimple_call_fntype (call);
432 if (!calltype)
433 return unknown[object_size_type];
435 /* Set to positions of alloc_size arguments. */
436 int arg1 = -1, arg2 = -1;
437 tree alloc_size = lookup_attribute ("alloc_size",
438 TYPE_ATTRIBUTES (calltype));
439 if (alloc_size && TREE_VALUE (alloc_size))
441 tree p = TREE_VALUE (alloc_size);
443 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
444 if (TREE_CHAIN (p))
445 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
448 if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
449 || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
450 || (arg2 >= 0
451 && (arg2 >= (int)gimple_call_num_args (call)
452 || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
453 return unknown[object_size_type];
455 tree bytes = NULL_TREE;
456 if (arg2 >= 0)
457 bytes = size_binop (MULT_EXPR,
458 fold_convert (sizetype, gimple_call_arg (call, arg1)),
459 fold_convert (sizetype, gimple_call_arg (call, arg2)));
460 else if (arg1 >= 0)
461 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
463 if (bytes && tree_fits_uhwi_p (bytes))
464 return tree_to_uhwi (bytes);
466 return unknown[object_size_type];
470 /* If object size is propagated from one of function's arguments directly
471 to its return value, return that argument for GIMPLE_CALL statement CALL.
472 Otherwise return NULL. */
474 static tree
475 pass_through_call (const gcall *call)
477 unsigned rf = gimple_call_return_flags (call);
478 if (rf & ERF_RETURNS_ARG)
480 unsigned argnum = rf & ERF_RETURN_ARG_MASK;
481 if (argnum < gimple_call_num_args (call))
482 return gimple_call_arg (call, argnum);
485 /* __builtin_assume_aligned is intentionally not marked RET1. */
486 if (gimple_call_builtin_p (call, BUILT_IN_ASSUME_ALIGNED))
487 return gimple_call_arg (call, 0);
489 return NULL_TREE;
493 /* Compute __builtin_object_size value for PTR and set *PSIZE to
494 the resulting value. If the declared object is known and PDECL
495 is nonnull, sets *PDECL to the object's DECL. OBJECT_SIZE_TYPE
496 is the second argument to __builtin_object_size.
497 Returns true on success and false when the object size could not
498 be determined. */
500 bool
501 compute_builtin_object_size (tree ptr, int object_size_type,
502 unsigned HOST_WIDE_INT *psize,
503 tree *pdecl /* = NULL */, tree *poff /* = NULL */)
505 gcc_assert (object_size_type >= 0 && object_size_type <= 3);
507 tree dummy_decl, dummy_off = size_zero_node;
508 if (!pdecl)
509 pdecl = &dummy_decl;
510 if (!poff)
511 poff = &dummy_off;
513 /* Set to unknown and overwrite just before returning if the size
514 could be determined. */
515 *psize = unknown[object_size_type];
517 if (! offset_limit)
518 init_offset_limit ();
520 if (TREE_CODE (ptr) == ADDR_EXPR)
521 return addr_object_size (NULL, ptr, object_size_type, psize, pdecl, poff);
523 if (TREE_CODE (ptr) != SSA_NAME
524 || !POINTER_TYPE_P (TREE_TYPE (ptr)))
525 return false;
527 if (computed[object_size_type] == NULL)
529 if (optimize || object_size_type & 1)
530 return false;
532 /* When not optimizing, rather than failing, make a small effort
533 to determine the object size without the full benefit of
534 the (costly) computation below. */
535 gimple *def = SSA_NAME_DEF_STMT (ptr);
536 if (gimple_code (def) == GIMPLE_ASSIGN)
538 tree_code code = gimple_assign_rhs_code (def);
539 if (code == POINTER_PLUS_EXPR)
541 tree offset = gimple_assign_rhs2 (def);
542 ptr = gimple_assign_rhs1 (def);
544 if (tree_fits_shwi_p (offset)
545 && compute_builtin_object_size (ptr, object_size_type,
546 psize, pdecl, poff))
548 /* Return zero when the offset is out of bounds. */
549 unsigned HOST_WIDE_INT off = tree_to_shwi (offset);
550 *psize = off < *psize ? *psize - off : 0;
551 *poff = offset;
552 return true;
556 return false;
559 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
561 struct object_size_info osi;
562 bitmap_iterator bi;
563 unsigned int i;
565 if (num_ssa_names > object_sizes[object_size_type].length ())
566 object_sizes[object_size_type].safe_grow (num_ssa_names);
567 if (dump_file)
569 fprintf (dump_file, "Computing %s %sobject size for ",
570 (object_size_type & 2) ? "minimum" : "maximum",
571 (object_size_type & 1) ? "sub" : "");
572 print_generic_expr (dump_file, ptr, dump_flags);
573 fprintf (dump_file, ":\n");
576 osi.visited = BITMAP_ALLOC (NULL);
577 osi.reexamine = BITMAP_ALLOC (NULL);
578 osi.object_size_type = object_size_type;
579 osi.depths = NULL;
580 osi.stack = NULL;
581 osi.tos = NULL;
583 /* First pass: walk UD chains, compute object sizes that
584 can be computed. osi.reexamine bitmap at the end will
585 contain what variables were found in dependency cycles
586 and therefore need to be reexamined. */
587 osi.pass = 0;
588 osi.changed = false;
589 collect_object_sizes_for (&osi, ptr);
591 /* Second pass: keep recomputing object sizes of variables
592 that need reexamination, until no object sizes are
593 increased or all object sizes are computed. */
594 if (! bitmap_empty_p (osi.reexamine))
596 bitmap reexamine = BITMAP_ALLOC (NULL);
598 /* If looking for minimum instead of maximum object size,
599 detect cases where a pointer is increased in a loop.
600 Although even without this detection pass 2 would eventually
601 terminate, it could take a long time. If a pointer is
602 increasing this way, we need to assume 0 object size.
603 E.g. p = &buf[0]; while (cond) p = p + 4; */
604 if (object_size_type & 2)
606 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
607 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
608 osi.tos = osi.stack;
609 osi.pass = 1;
610 /* collect_object_sizes_for is changing
611 osi.reexamine bitmap, so iterate over a copy. */
612 bitmap_copy (reexamine, osi.reexamine);
613 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
614 if (bitmap_bit_p (osi.reexamine, i))
615 check_for_plus_in_loops (&osi, ssa_name (i));
617 free (osi.depths);
618 osi.depths = NULL;
619 free (osi.stack);
620 osi.stack = NULL;
621 osi.tos = NULL;
626 osi.pass = 2;
627 osi.changed = false;
628 /* collect_object_sizes_for is changing
629 osi.reexamine bitmap, so iterate over a copy. */
630 bitmap_copy (reexamine, osi.reexamine);
631 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
632 if (bitmap_bit_p (osi.reexamine, i))
634 collect_object_sizes_for (&osi, ssa_name (i));
635 if (dump_file && (dump_flags & TDF_DETAILS))
637 fprintf (dump_file, "Reexamining ");
638 print_generic_expr (dump_file, ssa_name (i),
639 dump_flags);
640 fprintf (dump_file, "\n");
644 while (osi.changed);
646 BITMAP_FREE (reexamine);
648 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
649 bitmap_set_bit (computed[object_size_type], i);
651 /* Debugging dumps. */
652 if (dump_file)
654 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
655 if (object_sizes[object_size_type][i]
656 != unknown[object_size_type])
658 print_generic_expr (dump_file, ssa_name (i),
659 dump_flags);
660 fprintf (dump_file,
661 ": %s %sobject size "
662 HOST_WIDE_INT_PRINT_UNSIGNED "\n",
663 (object_size_type & 2) ? "minimum" : "maximum",
664 (object_size_type & 1) ? "sub" : "",
665 object_sizes[object_size_type][i]);
669 BITMAP_FREE (osi.reexamine);
670 BITMAP_FREE (osi.visited);
673 *psize = object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
674 return *psize != unknown[object_size_type];
677 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
679 static void
680 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
682 int object_size_type = osi->object_size_type;
683 unsigned int varno = SSA_NAME_VERSION (ptr);
684 unsigned HOST_WIDE_INT bytes;
686 gcc_assert (object_sizes[object_size_type][varno]
687 != unknown[object_size_type]);
688 gcc_assert (osi->pass == 0);
690 if (TREE_CODE (value) == WITH_SIZE_EXPR)
691 value = TREE_OPERAND (value, 0);
693 /* Pointer variables should have been handled by merge_object_sizes. */
694 gcc_assert (TREE_CODE (value) != SSA_NAME
695 || !POINTER_TYPE_P (TREE_TYPE (value)));
697 if (TREE_CODE (value) == ADDR_EXPR)
698 addr_object_size (osi, value, object_size_type, &bytes);
699 else
700 bytes = unknown[object_size_type];
702 if ((object_size_type & 2) == 0)
704 if (object_sizes[object_size_type][varno] < bytes)
705 object_sizes[object_size_type][varno] = bytes;
707 else
709 if (object_sizes[object_size_type][varno] > bytes)
710 object_sizes[object_size_type][varno] = bytes;
715 /* Compute object_sizes for PTR, defined to the result of a call. */
717 static void
718 call_object_size (struct object_size_info *osi, tree ptr, gcall *call)
720 int object_size_type = osi->object_size_type;
721 unsigned int varno = SSA_NAME_VERSION (ptr);
722 unsigned HOST_WIDE_INT bytes;
724 gcc_assert (is_gimple_call (call));
726 gcc_assert (object_sizes[object_size_type][varno]
727 != unknown[object_size_type]);
728 gcc_assert (osi->pass == 0);
730 bytes = alloc_object_size (call, object_size_type);
732 if ((object_size_type & 2) == 0)
734 if (object_sizes[object_size_type][varno] < bytes)
735 object_sizes[object_size_type][varno] = bytes;
737 else
739 if (object_sizes[object_size_type][varno] > bytes)
740 object_sizes[object_size_type][varno] = bytes;
745 /* Compute object_sizes for PTR, defined to an unknown value. */
747 static void
748 unknown_object_size (struct object_size_info *osi, tree ptr)
750 int object_size_type = osi->object_size_type;
751 unsigned int varno = SSA_NAME_VERSION (ptr);
752 unsigned HOST_WIDE_INT bytes;
754 gcc_assert (object_sizes[object_size_type][varno]
755 != unknown[object_size_type]);
756 gcc_assert (osi->pass == 0);
758 bytes = unknown[object_size_type];
760 if ((object_size_type & 2) == 0)
762 if (object_sizes[object_size_type][varno] < bytes)
763 object_sizes[object_size_type][varno] = bytes;
765 else
767 if (object_sizes[object_size_type][varno] > bytes)
768 object_sizes[object_size_type][varno] = bytes;
773 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
774 the object size might need reexamination later. */
776 static bool
777 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
778 unsigned HOST_WIDE_INT offset)
780 int object_size_type = osi->object_size_type;
781 unsigned int varno = SSA_NAME_VERSION (dest);
782 unsigned HOST_WIDE_INT orig_bytes;
784 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
785 return false;
786 if (offset >= offset_limit)
788 object_sizes[object_size_type][varno] = unknown[object_size_type];
789 return false;
792 if (osi->pass == 0)
793 collect_object_sizes_for (osi, orig);
795 orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
796 if (orig_bytes != unknown[object_size_type])
797 orig_bytes = (offset > orig_bytes)
798 ? HOST_WIDE_INT_0U : orig_bytes - offset;
800 if ((object_size_type & 2) == 0)
802 if (object_sizes[object_size_type][varno] < orig_bytes)
804 object_sizes[object_size_type][varno] = orig_bytes;
805 osi->changed = true;
808 else
810 if (object_sizes[object_size_type][varno] > orig_bytes)
812 object_sizes[object_size_type][varno] = orig_bytes;
813 osi->changed = true;
816 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
820 /* Compute object_sizes for VAR, defined to the result of an assignment
821 with operator POINTER_PLUS_EXPR. Return true if the object size might
822 need reexamination later. */
824 static bool
825 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple *stmt)
827 int object_size_type = osi->object_size_type;
828 unsigned int varno = SSA_NAME_VERSION (var);
829 unsigned HOST_WIDE_INT bytes;
830 tree op0, op1;
832 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
834 op0 = gimple_assign_rhs1 (stmt);
835 op1 = gimple_assign_rhs2 (stmt);
837 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
839 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
840 gcc_assert (TREE_CODE (rhs) == MEM_REF);
841 op0 = TREE_OPERAND (rhs, 0);
842 op1 = TREE_OPERAND (rhs, 1);
844 else
845 gcc_unreachable ();
847 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
848 return false;
850 /* Handle PTR + OFFSET here. */
851 if (TREE_CODE (op1) == INTEGER_CST
852 && (TREE_CODE (op0) == SSA_NAME
853 || TREE_CODE (op0) == ADDR_EXPR))
855 if (! tree_fits_uhwi_p (op1))
856 bytes = unknown[object_size_type];
857 else if (TREE_CODE (op0) == SSA_NAME)
858 return merge_object_sizes (osi, var, op0, tree_to_uhwi (op1));
859 else
861 unsigned HOST_WIDE_INT off = tree_to_uhwi (op1);
863 /* op0 will be ADDR_EXPR here. */
864 addr_object_size (osi, op0, object_size_type, &bytes);
865 if (bytes == unknown[object_size_type])
867 else if (off > offset_limit)
868 bytes = unknown[object_size_type];
869 else if (off > bytes)
870 bytes = 0;
871 else
872 bytes -= off;
875 else
876 bytes = unknown[object_size_type];
878 if ((object_size_type & 2) == 0)
880 if (object_sizes[object_size_type][varno] < bytes)
881 object_sizes[object_size_type][varno] = bytes;
883 else
885 if (object_sizes[object_size_type][varno] > bytes)
886 object_sizes[object_size_type][varno] = bytes;
888 return false;
892 /* Compute object_sizes for VAR, defined at STMT, which is
893 a COND_EXPR. Return true if the object size might need reexamination
894 later. */
896 static bool
897 cond_expr_object_size (struct object_size_info *osi, tree var, gimple *stmt)
899 tree then_, else_;
900 int object_size_type = osi->object_size_type;
901 unsigned int varno = SSA_NAME_VERSION (var);
902 bool reexamine = false;
904 gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
906 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
907 return false;
909 then_ = gimple_assign_rhs2 (stmt);
910 else_ = gimple_assign_rhs3 (stmt);
912 if (TREE_CODE (then_) == SSA_NAME)
913 reexamine |= merge_object_sizes (osi, var, then_, 0);
914 else
915 expr_object_size (osi, var, then_);
917 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
918 return reexamine;
920 if (TREE_CODE (else_) == SSA_NAME)
921 reexamine |= merge_object_sizes (osi, var, else_, 0);
922 else
923 expr_object_size (osi, var, else_);
925 return reexamine;
928 /* Compute object sizes for VAR.
929 For ADDR_EXPR an object size is the number of remaining bytes
930 to the end of the object (where what is considered an object depends on
931 OSI->object_size_type).
932 For allocation GIMPLE_CALL like malloc or calloc object size is the size
933 of the allocation.
934 For POINTER_PLUS_EXPR where second operand is a constant integer,
935 object size is object size of the first operand minus the constant.
936 If the constant is bigger than the number of remaining bytes until the
937 end of the object, object size is 0, but if it is instead a pointer
938 subtraction, object size is unknown[object_size_type].
939 To differentiate addition from subtraction, ADDR_EXPR returns
940 unknown[object_size_type] for all objects bigger than half of the address
941 space, and constants less than half of the address space are considered
942 addition, while bigger constants subtraction.
943 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
944 object size is object size of that argument.
945 Otherwise, object size is the maximum of object sizes of variables
946 that it might be set to. */
948 static void
949 collect_object_sizes_for (struct object_size_info *osi, tree var)
951 int object_size_type = osi->object_size_type;
952 unsigned int varno = SSA_NAME_VERSION (var);
953 gimple *stmt;
954 bool reexamine;
956 if (bitmap_bit_p (computed[object_size_type], varno))
957 return;
959 if (osi->pass == 0)
961 if (bitmap_set_bit (osi->visited, varno))
963 object_sizes[object_size_type][varno]
964 = (object_size_type & 2) ? -1 : 0;
966 else
968 /* Found a dependency loop. Mark the variable for later
969 re-examination. */
970 bitmap_set_bit (osi->reexamine, varno);
971 if (dump_file && (dump_flags & TDF_DETAILS))
973 fprintf (dump_file, "Found a dependency loop at ");
974 print_generic_expr (dump_file, var, dump_flags);
975 fprintf (dump_file, "\n");
977 return;
981 if (dump_file && (dump_flags & TDF_DETAILS))
983 fprintf (dump_file, "Visiting use-def links for ");
984 print_generic_expr (dump_file, var, dump_flags);
985 fprintf (dump_file, "\n");
988 stmt = SSA_NAME_DEF_STMT (var);
989 reexamine = false;
991 switch (gimple_code (stmt))
993 case GIMPLE_ASSIGN:
995 tree rhs = gimple_assign_rhs1 (stmt);
996 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
997 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
998 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
999 reexamine = plus_stmt_object_size (osi, var, stmt);
1000 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
1001 reexamine = cond_expr_object_size (osi, var, stmt);
1002 else if (gimple_assign_single_p (stmt)
1003 || gimple_assign_unary_nop_p (stmt))
1005 if (TREE_CODE (rhs) == SSA_NAME
1006 && POINTER_TYPE_P (TREE_TYPE (rhs)))
1007 reexamine = merge_object_sizes (osi, var, rhs, 0);
1008 else
1009 expr_object_size (osi, var, rhs);
1011 else
1012 unknown_object_size (osi, var);
1013 break;
1016 case GIMPLE_CALL:
1018 gcall *call_stmt = as_a <gcall *> (stmt);
1019 tree arg = pass_through_call (call_stmt);
1020 if (arg)
1022 if (TREE_CODE (arg) == SSA_NAME
1023 && POINTER_TYPE_P (TREE_TYPE (arg)))
1024 reexamine = merge_object_sizes (osi, var, arg, 0);
1025 else
1026 expr_object_size (osi, var, arg);
1028 else
1029 call_object_size (osi, var, call_stmt);
1030 break;
1033 case GIMPLE_ASM:
1034 /* Pointers defined by __asm__ statements can point anywhere. */
1035 object_sizes[object_size_type][varno] = unknown[object_size_type];
1036 break;
1038 case GIMPLE_NOP:
1039 if (SSA_NAME_VAR (var)
1040 && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
1041 expr_object_size (osi, var, SSA_NAME_VAR (var));
1042 else
1043 /* Uninitialized SSA names point nowhere. */
1044 object_sizes[object_size_type][varno] = unknown[object_size_type];
1045 break;
1047 case GIMPLE_PHI:
1049 unsigned i;
1051 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1053 tree rhs = gimple_phi_arg (stmt, i)->def;
1055 if (object_sizes[object_size_type][varno]
1056 == unknown[object_size_type])
1057 break;
1059 if (TREE_CODE (rhs) == SSA_NAME)
1060 reexamine |= merge_object_sizes (osi, var, rhs, 0);
1061 else if (osi->pass == 0)
1062 expr_object_size (osi, var, rhs);
1064 break;
1067 default:
1068 gcc_unreachable ();
1071 if (! reexamine
1072 || object_sizes[object_size_type][varno] == unknown[object_size_type])
1074 bitmap_set_bit (computed[object_size_type], varno);
1075 bitmap_clear_bit (osi->reexamine, varno);
1077 else
1079 bitmap_set_bit (osi->reexamine, varno);
1080 if (dump_file && (dump_flags & TDF_DETAILS))
1082 fprintf (dump_file, "Need to reexamine ");
1083 print_generic_expr (dump_file, var, dump_flags);
1084 fprintf (dump_file, "\n");
1090 /* Helper function for check_for_plus_in_loops. Called recursively
1091 to detect loops. */
1093 static void
1094 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1095 unsigned int depth)
1097 gimple *stmt = SSA_NAME_DEF_STMT (var);
1098 unsigned int varno = SSA_NAME_VERSION (var);
1100 if (osi->depths[varno])
1102 if (osi->depths[varno] != depth)
1104 unsigned int *sp;
1106 /* Found a loop involving pointer addition. */
1107 for (sp = osi->tos; sp > osi->stack; )
1109 --sp;
1110 bitmap_clear_bit (osi->reexamine, *sp);
1111 bitmap_set_bit (computed[osi->object_size_type], *sp);
1112 object_sizes[osi->object_size_type][*sp] = 0;
1113 if (*sp == varno)
1114 break;
1117 return;
1119 else if (! bitmap_bit_p (osi->reexamine, varno))
1120 return;
1122 osi->depths[varno] = depth;
1123 *osi->tos++ = varno;
1125 switch (gimple_code (stmt))
1128 case GIMPLE_ASSIGN:
1130 if ((gimple_assign_single_p (stmt)
1131 || gimple_assign_unary_nop_p (stmt))
1132 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1134 tree rhs = gimple_assign_rhs1 (stmt);
1136 check_for_plus_in_loops_1 (osi, rhs, depth);
1138 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1140 tree basevar = gimple_assign_rhs1 (stmt);
1141 tree cst = gimple_assign_rhs2 (stmt);
1143 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1145 check_for_plus_in_loops_1 (osi, basevar,
1146 depth + !integer_zerop (cst));
1148 else
1149 gcc_unreachable ();
1150 break;
1153 case GIMPLE_CALL:
1155 gcall *call_stmt = as_a <gcall *> (stmt);
1156 tree arg = pass_through_call (call_stmt);
1157 if (arg)
1159 if (TREE_CODE (arg) == SSA_NAME)
1160 check_for_plus_in_loops_1 (osi, arg, depth);
1161 else
1162 gcc_unreachable ();
1164 break;
1167 case GIMPLE_PHI:
1169 unsigned i;
1171 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1173 tree rhs = gimple_phi_arg (stmt, i)->def;
1175 if (TREE_CODE (rhs) == SSA_NAME)
1176 check_for_plus_in_loops_1 (osi, rhs, depth);
1178 break;
1181 default:
1182 gcc_unreachable ();
1185 osi->depths[varno] = 0;
1186 osi->tos--;
1190 /* Check if some pointer we are computing object size of is being increased
1191 within a loop. If yes, assume all the SSA variables participating in
1192 that loop have minimum object sizes 0. */
1194 static void
1195 check_for_plus_in_loops (struct object_size_info *osi, tree var)
1197 gimple *stmt = SSA_NAME_DEF_STMT (var);
1199 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1200 and looked for a POINTER_PLUS_EXPR in the pass-through
1201 argument, if any. In GIMPLE, however, such an expression
1202 is not a valid call operand. */
1204 if (is_gimple_assign (stmt)
1205 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1207 tree basevar = gimple_assign_rhs1 (stmt);
1208 tree cst = gimple_assign_rhs2 (stmt);
1210 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1212 if (integer_zerop (cst))
1213 return;
1215 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1216 *osi->tos++ = SSA_NAME_VERSION (basevar);
1217 check_for_plus_in_loops_1 (osi, var, 2);
1218 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1219 osi->tos--;
1224 /* Initialize data structures for the object size computation. */
1226 void
1227 init_object_sizes (void)
1229 int object_size_type;
1231 if (computed[0])
1232 return;
1234 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1236 object_sizes[object_size_type].safe_grow (num_ssa_names);
1237 computed[object_size_type] = BITMAP_ALLOC (NULL);
1240 init_offset_limit ();
1244 /* Destroy data structures after the object size computation. */
1246 void
1247 fini_object_sizes (void)
1249 int object_size_type;
1251 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1253 object_sizes[object_size_type].release ();
1254 BITMAP_FREE (computed[object_size_type]);
1259 /* Simple pass to optimize all __builtin_object_size () builtins. */
1261 namespace {
1263 const pass_data pass_data_object_sizes =
1265 GIMPLE_PASS, /* type */
1266 "objsz", /* name */
1267 OPTGROUP_NONE, /* optinfo_flags */
1268 TV_NONE, /* tv_id */
1269 ( PROP_cfg | PROP_ssa ), /* properties_required */
1270 0, /* properties_provided */
1271 0, /* properties_destroyed */
1272 0, /* todo_flags_start */
1273 0, /* todo_flags_finish */
1276 class pass_object_sizes : public gimple_opt_pass
1278 public:
1279 pass_object_sizes (gcc::context *ctxt)
1280 : gimple_opt_pass (pass_data_object_sizes, ctxt), insert_min_max_p (false)
1283 /* opt_pass methods: */
1284 opt_pass * clone () { return new pass_object_sizes (m_ctxt); }
1285 void set_pass_param (unsigned int n, bool param)
1287 gcc_assert (n == 0);
1288 insert_min_max_p = param;
1290 virtual unsigned int execute (function *);
1292 private:
1293 /* Determines whether the pass instance creates MIN/MAX_EXPRs. */
1294 bool insert_min_max_p;
1295 }; // class pass_object_sizes
1297 /* Dummy valueize function. */
1299 static tree
1300 do_valueize (tree t)
1302 return t;
1305 unsigned int
1306 pass_object_sizes::execute (function *fun)
1308 basic_block bb;
1309 FOR_EACH_BB_FN (bb, fun)
1311 gimple_stmt_iterator i;
1312 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1314 tree result;
1315 gimple *call = gsi_stmt (i);
1316 if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE))
1317 continue;
1319 init_object_sizes ();
1321 /* If insert_min_max_p, only attempt to fold
1322 __builtin_object_size (x, 1) and __builtin_object_size (x, 3),
1323 and rather than folding the builtin to the constant if any,
1324 create a MIN_EXPR or MAX_EXPR of the __builtin_object_size
1325 call result and the computed constant. */
1326 if (insert_min_max_p)
1328 tree ost = gimple_call_arg (call, 1);
1329 if (tree_fits_uhwi_p (ost))
1331 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
1332 tree ptr = gimple_call_arg (call, 0);
1333 tree lhs = gimple_call_lhs (call);
1334 if ((object_size_type == 1 || object_size_type == 3)
1335 && (TREE_CODE (ptr) == ADDR_EXPR
1336 || TREE_CODE (ptr) == SSA_NAME)
1337 && lhs)
1339 tree type = TREE_TYPE (lhs);
1340 unsigned HOST_WIDE_INT bytes;
1341 if (compute_builtin_object_size (ptr, object_size_type,
1342 &bytes)
1343 && wi::fits_to_tree_p (bytes, type))
1345 tree tem = make_ssa_name (type);
1346 gimple_call_set_lhs (call, tem);
1347 enum tree_code code
1348 = object_size_type == 1 ? MIN_EXPR : MAX_EXPR;
1349 tree cst = build_int_cstu (type, bytes);
1350 gimple *g
1351 = gimple_build_assign (lhs, code, tem, cst);
1352 gsi_insert_after (&i, g, GSI_NEW_STMT);
1353 update_stmt (call);
1357 continue;
1360 tree lhs = gimple_call_lhs (call);
1361 if (!lhs)
1362 continue;
1364 result = gimple_fold_stmt_to_constant (call, do_valueize);
1365 if (!result)
1367 tree ost = gimple_call_arg (call, 1);
1369 if (tree_fits_uhwi_p (ost))
1371 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
1373 if (object_size_type < 2)
1374 result = fold_convert (size_type_node,
1375 integer_minus_one_node);
1376 else if (object_size_type < 4)
1377 result = build_zero_cst (size_type_node);
1380 if (!result)
1381 continue;
1384 gcc_assert (TREE_CODE (result) == INTEGER_CST);
1386 if (dump_file && (dump_flags & TDF_DETAILS))
1388 fprintf (dump_file, "Simplified\n ");
1389 print_gimple_stmt (dump_file, call, 0, dump_flags);
1390 fprintf (dump_file, " to ");
1391 print_generic_expr (dump_file, result);
1392 fprintf (dump_file, "\n");
1395 /* Propagate into all uses and fold those stmts. */
1396 replace_uses_by (lhs, result);
1400 fini_object_sizes ();
1401 return 0;
1404 } // anon namespace
1406 gimple_opt_pass *
1407 make_pass_object_sizes (gcc::context *ctxt)
1409 return new pass_object_sizes (ctxt);