Daily bump.
[official-gcc.git] / gcc / tree-object-size.c
blob4334e05ef70b77112f38dd0e75be56beab8e4154
1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004-2021 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "fold-const.h"
31 #include "tree-object-size.h"
32 #include "gimple-fold.h"
33 #include "gimple-iterator.h"
34 #include "tree-cfg.h"
35 #include "stringpool.h"
36 #include "attribs.h"
38 struct object_size_info
40 int object_size_type;
41 unsigned char pass;
42 bool changed;
43 bitmap visited, reexamine;
44 unsigned int *depths;
45 unsigned int *stack, *tos;
48 static tree compute_object_offset (const_tree, const_tree);
49 static bool addr_object_size (struct object_size_info *,
50 const_tree, int, unsigned HOST_WIDE_INT *);
51 static unsigned HOST_WIDE_INT alloc_object_size (const gcall *, int);
52 static tree pass_through_call (const gcall *);
53 static void collect_object_sizes_for (struct object_size_info *, tree);
54 static void expr_object_size (struct object_size_info *, tree, tree);
55 static bool merge_object_sizes (struct object_size_info *, tree, tree,
56 unsigned HOST_WIDE_INT);
57 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple *);
58 static bool cond_expr_object_size (struct object_size_info *, tree, gimple *);
59 static void init_offset_limit (void);
60 static void check_for_plus_in_loops (struct object_size_info *, tree);
61 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
62 unsigned int);
64 /* object_sizes[0] is upper bound for number of bytes till the end of
65 the object.
66 object_sizes[1] is upper bound for number of bytes till the end of
67 the subobject (innermost array or field with address taken).
68 object_sizes[2] is lower bound for number of bytes till the end of
69 the object and object_sizes[3] lower bound for subobject. */
70 static vec<unsigned HOST_WIDE_INT> object_sizes[4];
72 /* Bitmaps what object sizes have been computed already. */
73 static bitmap computed[4];
75 /* Maximum value of offset we consider to be addition. */
76 static unsigned HOST_WIDE_INT offset_limit;
78 static inline unsigned HOST_WIDE_INT
79 unknown (int object_size_type)
81 return ((unsigned HOST_WIDE_INT) -((object_size_type >> 1) ^ 1));
84 /* Initialize OFFSET_LIMIT variable. */
85 static void
86 init_offset_limit (void)
88 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
89 offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
90 else
91 offset_limit = -1;
92 offset_limit /= 2;
96 /* Compute offset of EXPR within VAR. Return error_mark_node
97 if unknown. */
99 static tree
100 compute_object_offset (const_tree expr, const_tree var)
102 enum tree_code code = PLUS_EXPR;
103 tree base, off, t;
105 if (expr == var)
106 return size_zero_node;
108 switch (TREE_CODE (expr))
110 case COMPONENT_REF:
111 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
112 if (base == error_mark_node)
113 return base;
115 t = TREE_OPERAND (expr, 1);
116 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
117 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
118 / BITS_PER_UNIT));
119 break;
121 case REALPART_EXPR:
122 CASE_CONVERT:
123 case VIEW_CONVERT_EXPR:
124 case NON_LVALUE_EXPR:
125 return compute_object_offset (TREE_OPERAND (expr, 0), var);
127 case IMAGPART_EXPR:
128 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
129 if (base == error_mark_node)
130 return base;
132 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
133 break;
135 case ARRAY_REF:
136 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
137 if (base == error_mark_node)
138 return base;
140 t = TREE_OPERAND (expr, 1);
141 tree low_bound, unit_size;
142 low_bound = array_ref_low_bound (CONST_CAST_TREE (expr));
143 unit_size = array_ref_element_size (CONST_CAST_TREE (expr));
144 if (! integer_zerop (low_bound))
145 t = fold_build2 (MINUS_EXPR, TREE_TYPE (t), t, low_bound);
146 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
148 code = MINUS_EXPR;
149 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
151 t = fold_convert (sizetype, t);
152 off = size_binop (MULT_EXPR, unit_size, t);
153 break;
155 case MEM_REF:
156 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
157 return wide_int_to_tree (sizetype, mem_ref_offset (expr));
159 default:
160 return error_mark_node;
163 return size_binop (code, base, off);
166 /* Returns the size of the object designated by DECL considering its
167 initializer if it either has one or if it would not affect its size,
168 otherwise the size of the object without the initializer when MIN
169 is true, else null. An object's initializer affects the object's
170 size if it's a struct type with a flexible array member. */
172 tree
173 decl_init_size (tree decl, bool min)
175 tree size = DECL_SIZE_UNIT (decl);
176 tree type = TREE_TYPE (decl);
177 if (TREE_CODE (type) != RECORD_TYPE)
178 return size;
180 tree last = last_field (type);
181 if (!last)
182 return size;
184 tree last_type = TREE_TYPE (last);
185 if (TREE_CODE (last_type) != ARRAY_TYPE
186 || TYPE_SIZE (last_type))
187 return size;
189 /* Use TYPE_SIZE_UNIT; DECL_SIZE_UNIT sometimes reflects the size
190 of the initializer and sometimes doesn't. */
191 size = TYPE_SIZE_UNIT (type);
192 tree ref = build3 (COMPONENT_REF, type, decl, last, NULL_TREE);
193 tree compsize = component_ref_size (ref);
194 if (!compsize)
195 return min ? size : NULL_TREE;
197 /* The size includes tail padding and initializer elements. */
198 tree pos = byte_position (last);
199 size = fold_build2 (PLUS_EXPR, TREE_TYPE (size), pos, compsize);
200 return size;
203 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
204 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
205 If unknown, return unknown (object_size_type). */
207 static bool
208 addr_object_size (struct object_size_info *osi, const_tree ptr,
209 int object_size_type, unsigned HOST_WIDE_INT *psize)
211 tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
213 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
215 /* Set to unknown and overwrite just before returning if the size
216 could be determined. */
217 *psize = unknown (object_size_type);
219 pt_var = TREE_OPERAND (ptr, 0);
220 while (handled_component_p (pt_var))
221 pt_var = TREE_OPERAND (pt_var, 0);
223 if (!pt_var)
224 return false;
226 if (TREE_CODE (pt_var) == MEM_REF)
228 unsigned HOST_WIDE_INT sz;
230 if (!osi || (object_size_type & 1) != 0
231 || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
233 compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
234 object_size_type & ~1, &sz);
236 else
238 tree var = TREE_OPERAND (pt_var, 0);
239 if (osi->pass == 0)
240 collect_object_sizes_for (osi, var);
241 if (bitmap_bit_p (computed[object_size_type],
242 SSA_NAME_VERSION (var)))
243 sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
244 else
245 sz = unknown (object_size_type);
247 if (sz != unknown (object_size_type))
249 offset_int mem_offset;
250 if (mem_ref_offset (pt_var).is_constant (&mem_offset))
252 offset_int dsz = wi::sub (sz, mem_offset);
253 if (wi::neg_p (dsz))
254 sz = 0;
255 else if (wi::fits_uhwi_p (dsz))
256 sz = dsz.to_uhwi ();
257 else
258 sz = unknown (object_size_type);
260 else
261 sz = unknown (object_size_type);
264 if (sz != unknown (object_size_type) && sz < offset_limit)
265 pt_var_size = size_int (sz);
267 else if (DECL_P (pt_var))
269 pt_var_size = decl_init_size (pt_var, object_size_type & 2);
270 if (!pt_var_size)
271 return false;
273 else if (TREE_CODE (pt_var) == STRING_CST)
274 pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
275 else
276 return false;
278 if (pt_var_size)
280 /* Validate the size determined above. */
281 if (!tree_fits_uhwi_p (pt_var_size)
282 || tree_to_uhwi (pt_var_size) >= offset_limit)
283 return false;
286 if (pt_var != TREE_OPERAND (ptr, 0))
288 tree var;
290 if (object_size_type & 1)
292 var = TREE_OPERAND (ptr, 0);
294 while (var != pt_var
295 && TREE_CODE (var) != BIT_FIELD_REF
296 && TREE_CODE (var) != COMPONENT_REF
297 && TREE_CODE (var) != ARRAY_REF
298 && TREE_CODE (var) != ARRAY_RANGE_REF
299 && TREE_CODE (var) != REALPART_EXPR
300 && TREE_CODE (var) != IMAGPART_EXPR)
301 var = TREE_OPERAND (var, 0);
302 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
303 var = TREE_OPERAND (var, 0);
304 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
305 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var)))
306 || (pt_var_size
307 && tree_int_cst_lt (pt_var_size,
308 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
309 var = pt_var;
310 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
312 tree v = var;
313 /* For &X->fld, compute object size only if fld isn't the last
314 field, as struct { int i; char c[1]; } is often used instead
315 of flexible array member. */
316 while (v && v != pt_var)
317 switch (TREE_CODE (v))
319 case ARRAY_REF:
320 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
321 && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
323 tree domain
324 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
325 if (domain
326 && TYPE_MAX_VALUE (domain)
327 && TREE_CODE (TYPE_MAX_VALUE (domain))
328 == INTEGER_CST
329 && tree_int_cst_lt (TREE_OPERAND (v, 1),
330 TYPE_MAX_VALUE (domain)))
332 v = NULL_TREE;
333 break;
336 v = TREE_OPERAND (v, 0);
337 break;
338 case REALPART_EXPR:
339 case IMAGPART_EXPR:
340 v = NULL_TREE;
341 break;
342 case COMPONENT_REF:
343 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
345 v = NULL_TREE;
346 break;
348 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
349 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
350 != UNION_TYPE
351 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
352 != QUAL_UNION_TYPE)
353 break;
354 else
355 v = TREE_OPERAND (v, 0);
356 if (TREE_CODE (v) == COMPONENT_REF
357 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
358 == RECORD_TYPE)
360 tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
361 for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
362 if (TREE_CODE (fld_chain) == FIELD_DECL)
363 break;
365 if (fld_chain)
367 v = NULL_TREE;
368 break;
370 v = TREE_OPERAND (v, 0);
372 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
373 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
374 != UNION_TYPE
375 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
376 != QUAL_UNION_TYPE)
377 break;
378 else
379 v = TREE_OPERAND (v, 0);
380 if (v != pt_var)
381 v = NULL_TREE;
382 else
383 v = pt_var;
384 break;
385 default:
386 v = pt_var;
387 break;
389 if (v == pt_var)
390 var = pt_var;
393 else
394 var = pt_var;
396 if (var != pt_var)
397 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
398 else if (!pt_var_size)
399 return false;
400 else
401 var_size = pt_var_size;
402 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
403 if (bytes != error_mark_node)
405 if (TREE_CODE (bytes) == INTEGER_CST
406 && tree_int_cst_lt (var_size, bytes))
407 bytes = size_zero_node;
408 else
409 bytes = size_binop (MINUS_EXPR, var_size, bytes);
411 if (var != pt_var
412 && pt_var_size
413 && TREE_CODE (pt_var) == MEM_REF
414 && bytes != error_mark_node)
416 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
417 if (bytes2 != error_mark_node)
419 if (TREE_CODE (bytes2) == INTEGER_CST
420 && tree_int_cst_lt (pt_var_size, bytes2))
421 bytes2 = size_zero_node;
422 else
423 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
424 bytes = size_binop (MIN_EXPR, bytes, bytes2);
428 else if (!pt_var_size)
429 return false;
430 else
431 bytes = pt_var_size;
433 if (tree_fits_uhwi_p (bytes))
435 *psize = tree_to_uhwi (bytes);
436 return true;
439 return false;
443 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
444 Handles calls to functions declared with attribute alloc_size.
445 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
446 If unknown, return unknown (object_size_type). */
448 static unsigned HOST_WIDE_INT
449 alloc_object_size (const gcall *call, int object_size_type)
451 gcc_assert (is_gimple_call (call));
453 tree calltype;
454 if (tree callfn = gimple_call_fndecl (call))
455 calltype = TREE_TYPE (callfn);
456 else
457 calltype = gimple_call_fntype (call);
459 if (!calltype)
460 return unknown (object_size_type);
462 /* Set to positions of alloc_size arguments. */
463 int arg1 = -1, arg2 = -1;
464 tree alloc_size = lookup_attribute ("alloc_size",
465 TYPE_ATTRIBUTES (calltype));
466 if (alloc_size && TREE_VALUE (alloc_size))
468 tree p = TREE_VALUE (alloc_size);
470 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
471 if (TREE_CHAIN (p))
472 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
475 if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
476 || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
477 || (arg2 >= 0
478 && (arg2 >= (int)gimple_call_num_args (call)
479 || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
480 return unknown (object_size_type);
482 tree bytes = NULL_TREE;
483 if (arg2 >= 0)
484 bytes = size_binop (MULT_EXPR,
485 fold_convert (sizetype, gimple_call_arg (call, arg1)),
486 fold_convert (sizetype, gimple_call_arg (call, arg2)));
487 else if (arg1 >= 0)
488 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
490 if (bytes && tree_fits_uhwi_p (bytes))
491 return tree_to_uhwi (bytes);
493 return unknown (object_size_type);
497 /* If object size is propagated from one of function's arguments directly
498 to its return value, return that argument for GIMPLE_CALL statement CALL.
499 Otherwise return NULL. */
501 static tree
502 pass_through_call (const gcall *call)
504 unsigned rf = gimple_call_return_flags (call);
505 if (rf & ERF_RETURNS_ARG)
507 unsigned argnum = rf & ERF_RETURN_ARG_MASK;
508 if (argnum < gimple_call_num_args (call))
509 return gimple_call_arg (call, argnum);
512 /* __builtin_assume_aligned is intentionally not marked RET1. */
513 if (gimple_call_builtin_p (call, BUILT_IN_ASSUME_ALIGNED))
514 return gimple_call_arg (call, 0);
516 return NULL_TREE;
520 /* Compute __builtin_object_size value for PTR and set *PSIZE to
521 the resulting value. If the declared object is known and PDECL
522 is nonnull, sets *PDECL to the object's DECL. OBJECT_SIZE_TYPE
523 is the second argument to __builtin_object_size.
524 Returns true on success and false when the object size could not
525 be determined. */
527 bool
528 compute_builtin_object_size (tree ptr, int object_size_type,
529 unsigned HOST_WIDE_INT *psize)
531 gcc_assert (object_size_type >= 0 && object_size_type <= 3);
533 /* Set to unknown and overwrite just before returning if the size
534 could be determined. */
535 *psize = unknown (object_size_type);
537 if (! offset_limit)
538 init_offset_limit ();
540 if (TREE_CODE (ptr) == ADDR_EXPR)
541 return addr_object_size (NULL, ptr, object_size_type, psize);
543 if (TREE_CODE (ptr) != SSA_NAME
544 || !POINTER_TYPE_P (TREE_TYPE (ptr)))
545 return false;
547 if (computed[object_size_type] == NULL)
549 if (optimize || object_size_type & 1)
550 return false;
552 /* When not optimizing, rather than failing, make a small effort
553 to determine the object size without the full benefit of
554 the (costly) computation below. */
555 gimple *def = SSA_NAME_DEF_STMT (ptr);
556 if (gimple_code (def) == GIMPLE_ASSIGN)
558 tree_code code = gimple_assign_rhs_code (def);
559 if (code == POINTER_PLUS_EXPR)
561 tree offset = gimple_assign_rhs2 (def);
562 ptr = gimple_assign_rhs1 (def);
564 if (tree_fits_shwi_p (offset)
565 && compute_builtin_object_size (ptr, object_size_type,
566 psize))
568 /* Return zero when the offset is out of bounds. */
569 unsigned HOST_WIDE_INT off = tree_to_shwi (offset);
570 *psize = off < *psize ? *psize - off : 0;
571 return true;
575 return false;
578 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
580 struct object_size_info osi;
581 bitmap_iterator bi;
582 unsigned int i;
584 if (num_ssa_names > object_sizes[object_size_type].length ())
585 object_sizes[object_size_type].safe_grow (num_ssa_names, true);
586 if (dump_file)
588 fprintf (dump_file, "Computing %s %sobject size for ",
589 (object_size_type & 2) ? "minimum" : "maximum",
590 (object_size_type & 1) ? "sub" : "");
591 print_generic_expr (dump_file, ptr, dump_flags);
592 fprintf (dump_file, ":\n");
595 osi.visited = BITMAP_ALLOC (NULL);
596 osi.reexamine = BITMAP_ALLOC (NULL);
597 osi.object_size_type = object_size_type;
598 osi.depths = NULL;
599 osi.stack = NULL;
600 osi.tos = NULL;
602 /* First pass: walk UD chains, compute object sizes that
603 can be computed. osi.reexamine bitmap at the end will
604 contain what variables were found in dependency cycles
605 and therefore need to be reexamined. */
606 osi.pass = 0;
607 osi.changed = false;
608 collect_object_sizes_for (&osi, ptr);
610 /* Second pass: keep recomputing object sizes of variables
611 that need reexamination, until no object sizes are
612 increased or all object sizes are computed. */
613 if (! bitmap_empty_p (osi.reexamine))
615 bitmap reexamine = BITMAP_ALLOC (NULL);
617 /* If looking for minimum instead of maximum object size,
618 detect cases where a pointer is increased in a loop.
619 Although even without this detection pass 2 would eventually
620 terminate, it could take a long time. If a pointer is
621 increasing this way, we need to assume 0 object size.
622 E.g. p = &buf[0]; while (cond) p = p + 4; */
623 if (object_size_type & 2)
625 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
626 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
627 osi.tos = osi.stack;
628 osi.pass = 1;
629 /* collect_object_sizes_for is changing
630 osi.reexamine bitmap, so iterate over a copy. */
631 bitmap_copy (reexamine, osi.reexamine);
632 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
633 if (bitmap_bit_p (osi.reexamine, i))
634 check_for_plus_in_loops (&osi, ssa_name (i));
636 free (osi.depths);
637 osi.depths = NULL;
638 free (osi.stack);
639 osi.stack = NULL;
640 osi.tos = NULL;
645 osi.pass = 2;
646 osi.changed = false;
647 /* collect_object_sizes_for is changing
648 osi.reexamine bitmap, so iterate over a copy. */
649 bitmap_copy (reexamine, osi.reexamine);
650 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
651 if (bitmap_bit_p (osi.reexamine, i))
653 collect_object_sizes_for (&osi, ssa_name (i));
654 if (dump_file && (dump_flags & TDF_DETAILS))
656 fprintf (dump_file, "Reexamining ");
657 print_generic_expr (dump_file, ssa_name (i),
658 dump_flags);
659 fprintf (dump_file, "\n");
663 while (osi.changed);
665 BITMAP_FREE (reexamine);
667 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
668 bitmap_set_bit (computed[object_size_type], i);
670 /* Debugging dumps. */
671 if (dump_file)
673 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
674 if (object_sizes[object_size_type][i]
675 != unknown (object_size_type))
677 print_generic_expr (dump_file, ssa_name (i),
678 dump_flags);
679 fprintf (dump_file,
680 ": %s %sobject size "
681 HOST_WIDE_INT_PRINT_UNSIGNED "\n",
682 (object_size_type & 2) ? "minimum" : "maximum",
683 (object_size_type & 1) ? "sub" : "",
684 object_sizes[object_size_type][i]);
688 BITMAP_FREE (osi.reexamine);
689 BITMAP_FREE (osi.visited);
692 *psize = object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
693 return *psize != unknown (object_size_type);
696 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
698 static void
699 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
701 int object_size_type = osi->object_size_type;
702 unsigned int varno = SSA_NAME_VERSION (ptr);
703 unsigned HOST_WIDE_INT bytes;
705 gcc_assert (object_sizes[object_size_type][varno]
706 != unknown (object_size_type));
707 gcc_assert (osi->pass == 0);
709 if (TREE_CODE (value) == WITH_SIZE_EXPR)
710 value = TREE_OPERAND (value, 0);
712 /* Pointer variables should have been handled by merge_object_sizes. */
713 gcc_assert (TREE_CODE (value) != SSA_NAME
714 || !POINTER_TYPE_P (TREE_TYPE (value)));
716 if (TREE_CODE (value) == ADDR_EXPR)
717 addr_object_size (osi, value, object_size_type, &bytes);
718 else
719 bytes = unknown (object_size_type);
721 if ((object_size_type & 2) == 0)
723 if (object_sizes[object_size_type][varno] < bytes)
724 object_sizes[object_size_type][varno] = bytes;
726 else
728 if (object_sizes[object_size_type][varno] > bytes)
729 object_sizes[object_size_type][varno] = bytes;
734 /* Compute object_sizes for PTR, defined to the result of a call. */
736 static void
737 call_object_size (struct object_size_info *osi, tree ptr, gcall *call)
739 int object_size_type = osi->object_size_type;
740 unsigned int varno = SSA_NAME_VERSION (ptr);
741 unsigned HOST_WIDE_INT bytes;
743 gcc_assert (is_gimple_call (call));
745 gcc_assert (object_sizes[object_size_type][varno]
746 != unknown (object_size_type));
747 gcc_assert (osi->pass == 0);
749 bytes = alloc_object_size (call, object_size_type);
751 if ((object_size_type & 2) == 0)
753 if (object_sizes[object_size_type][varno] < bytes)
754 object_sizes[object_size_type][varno] = bytes;
756 else
758 if (object_sizes[object_size_type][varno] > bytes)
759 object_sizes[object_size_type][varno] = bytes;
764 /* Compute object_sizes for PTR, defined to an unknown value. */
766 static void
767 unknown_object_size (struct object_size_info *osi, tree ptr)
769 int object_size_type = osi->object_size_type;
770 unsigned int varno = SSA_NAME_VERSION (ptr);
771 unsigned HOST_WIDE_INT bytes = unknown (object_size_type);
773 gcc_checking_assert (object_sizes[object_size_type][varno] != bytes);
774 gcc_checking_assert (osi->pass == 0);
776 object_sizes[object_size_type][varno] = bytes;
780 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
781 the object size might need reexamination later. */
783 static bool
784 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
785 unsigned HOST_WIDE_INT offset)
787 int object_size_type = osi->object_size_type;
788 unsigned int varno = SSA_NAME_VERSION (dest);
789 unsigned HOST_WIDE_INT orig_bytes;
791 if (object_sizes[object_size_type][varno] == unknown (object_size_type))
792 return false;
793 if (offset >= offset_limit)
795 object_sizes[object_size_type][varno] = unknown (object_size_type);
796 return false;
799 if (osi->pass == 0)
800 collect_object_sizes_for (osi, orig);
802 orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
803 if (orig_bytes != unknown (object_size_type))
804 orig_bytes = (offset > orig_bytes)
805 ? HOST_WIDE_INT_0U : orig_bytes - offset;
807 if ((object_size_type & 2) == 0)
809 if (object_sizes[object_size_type][varno] < orig_bytes)
811 object_sizes[object_size_type][varno] = orig_bytes;
812 osi->changed = true;
815 else
817 if (object_sizes[object_size_type][varno] > orig_bytes)
819 object_sizes[object_size_type][varno] = orig_bytes;
820 osi->changed = true;
823 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
827 /* Compute object_sizes for VAR, defined to the result of an assignment
828 with operator POINTER_PLUS_EXPR. Return true if the object size might
829 need reexamination later. */
831 static bool
832 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple *stmt)
834 int object_size_type = osi->object_size_type;
835 unsigned int varno = SSA_NAME_VERSION (var);
836 unsigned HOST_WIDE_INT bytes;
837 tree op0, op1;
839 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
841 op0 = gimple_assign_rhs1 (stmt);
842 op1 = gimple_assign_rhs2 (stmt);
844 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
846 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
847 gcc_assert (TREE_CODE (rhs) == MEM_REF);
848 op0 = TREE_OPERAND (rhs, 0);
849 op1 = TREE_OPERAND (rhs, 1);
851 else
852 gcc_unreachable ();
854 if (object_sizes[object_size_type][varno] == unknown (object_size_type))
855 return false;
857 /* Handle PTR + OFFSET here. */
858 if (TREE_CODE (op1) == INTEGER_CST
859 && (TREE_CODE (op0) == SSA_NAME
860 || TREE_CODE (op0) == ADDR_EXPR))
862 if (! tree_fits_uhwi_p (op1))
863 bytes = unknown (object_size_type);
864 else if (TREE_CODE (op0) == SSA_NAME)
865 return merge_object_sizes (osi, var, op0, tree_to_uhwi (op1));
866 else
868 unsigned HOST_WIDE_INT off = tree_to_uhwi (op1);
870 /* op0 will be ADDR_EXPR here. */
871 addr_object_size (osi, op0, object_size_type, &bytes);
872 if (bytes == unknown (object_size_type))
874 else if (off > offset_limit)
875 bytes = unknown (object_size_type);
876 else if (off > bytes)
877 bytes = 0;
878 else
879 bytes -= off;
882 else
883 bytes = unknown (object_size_type);
885 if ((object_size_type & 2) == 0)
887 if (object_sizes[object_size_type][varno] < bytes)
888 object_sizes[object_size_type][varno] = bytes;
890 else
892 if (object_sizes[object_size_type][varno] > bytes)
893 object_sizes[object_size_type][varno] = bytes;
895 return false;
899 /* Compute object_sizes for VAR, defined at STMT, which is
900 a COND_EXPR. Return true if the object size might need reexamination
901 later. */
903 static bool
904 cond_expr_object_size (struct object_size_info *osi, tree var, gimple *stmt)
906 tree then_, else_;
907 int object_size_type = osi->object_size_type;
908 unsigned int varno = SSA_NAME_VERSION (var);
909 bool reexamine = false;
911 gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
913 if (object_sizes[object_size_type][varno] == unknown (object_size_type))
914 return false;
916 then_ = gimple_assign_rhs2 (stmt);
917 else_ = gimple_assign_rhs3 (stmt);
919 if (TREE_CODE (then_) == SSA_NAME)
920 reexamine |= merge_object_sizes (osi, var, then_, 0);
921 else
922 expr_object_size (osi, var, then_);
924 if (object_sizes[object_size_type][varno] == unknown (object_size_type))
925 return reexamine;
927 if (TREE_CODE (else_) == SSA_NAME)
928 reexamine |= merge_object_sizes (osi, var, else_, 0);
929 else
930 expr_object_size (osi, var, else_);
932 return reexamine;
935 /* Compute object sizes for VAR.
936 For ADDR_EXPR an object size is the number of remaining bytes
937 to the end of the object (where what is considered an object depends on
938 OSI->object_size_type).
939 For allocation GIMPLE_CALL like malloc or calloc object size is the size
940 of the allocation.
941 For POINTER_PLUS_EXPR where second operand is a constant integer,
942 object size is object size of the first operand minus the constant.
943 If the constant is bigger than the number of remaining bytes until the
944 end of the object, object size is 0, but if it is instead a pointer
945 subtraction, object size is unknown (object_size_type).
946 To differentiate addition from subtraction, ADDR_EXPR returns
947 unknown (object_size_type) for all objects bigger than half of the address
948 space, and constants less than half of the address space are considered
949 addition, while bigger constants subtraction.
950 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
951 object size is object size of that argument.
952 Otherwise, object size is the maximum of object sizes of variables
953 that it might be set to. */
955 static void
956 collect_object_sizes_for (struct object_size_info *osi, tree var)
958 int object_size_type = osi->object_size_type;
959 unsigned int varno = SSA_NAME_VERSION (var);
960 gimple *stmt;
961 bool reexamine;
963 if (bitmap_bit_p (computed[object_size_type], varno))
964 return;
966 if (osi->pass == 0)
968 if (bitmap_set_bit (osi->visited, varno))
970 object_sizes[object_size_type][varno]
971 = (object_size_type & 2) ? -1 : 0;
973 else
975 /* Found a dependency loop. Mark the variable for later
976 re-examination. */
977 bitmap_set_bit (osi->reexamine, varno);
978 if (dump_file && (dump_flags & TDF_DETAILS))
980 fprintf (dump_file, "Found a dependency loop at ");
981 print_generic_expr (dump_file, var, dump_flags);
982 fprintf (dump_file, "\n");
984 return;
988 if (dump_file && (dump_flags & TDF_DETAILS))
990 fprintf (dump_file, "Visiting use-def links for ");
991 print_generic_expr (dump_file, var, dump_flags);
992 fprintf (dump_file, "\n");
995 stmt = SSA_NAME_DEF_STMT (var);
996 reexamine = false;
998 switch (gimple_code (stmt))
1000 case GIMPLE_ASSIGN:
1002 tree rhs = gimple_assign_rhs1 (stmt);
1003 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1004 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
1005 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
1006 reexamine = plus_stmt_object_size (osi, var, stmt);
1007 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
1008 reexamine = cond_expr_object_size (osi, var, stmt);
1009 else if (gimple_assign_single_p (stmt)
1010 || gimple_assign_unary_nop_p (stmt))
1012 if (TREE_CODE (rhs) == SSA_NAME
1013 && POINTER_TYPE_P (TREE_TYPE (rhs)))
1014 reexamine = merge_object_sizes (osi, var, rhs, 0);
1015 else
1016 expr_object_size (osi, var, rhs);
1018 else
1019 unknown_object_size (osi, var);
1020 break;
1023 case GIMPLE_CALL:
1025 gcall *call_stmt = as_a <gcall *> (stmt);
1026 tree arg = pass_through_call (call_stmt);
1027 if (arg)
1029 if (TREE_CODE (arg) == SSA_NAME
1030 && POINTER_TYPE_P (TREE_TYPE (arg)))
1031 reexamine = merge_object_sizes (osi, var, arg, 0);
1032 else
1033 expr_object_size (osi, var, arg);
1035 else
1036 call_object_size (osi, var, call_stmt);
1037 break;
1040 case GIMPLE_ASM:
1041 /* Pointers defined by __asm__ statements can point anywhere. */
1042 object_sizes[object_size_type][varno] = unknown (object_size_type);
1043 break;
1045 case GIMPLE_NOP:
1046 if (SSA_NAME_VAR (var)
1047 && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
1048 expr_object_size (osi, var, SSA_NAME_VAR (var));
1049 else
1050 /* Uninitialized SSA names point nowhere. */
1051 object_sizes[object_size_type][varno] = unknown (object_size_type);
1052 break;
1054 case GIMPLE_PHI:
1056 unsigned i;
1058 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1060 tree rhs = gimple_phi_arg (stmt, i)->def;
1062 if (object_sizes[object_size_type][varno]
1063 == unknown (object_size_type))
1064 break;
1066 if (TREE_CODE (rhs) == SSA_NAME)
1067 reexamine |= merge_object_sizes (osi, var, rhs, 0);
1068 else if (osi->pass == 0)
1069 expr_object_size (osi, var, rhs);
1071 break;
1074 default:
1075 gcc_unreachable ();
1078 if (! reexamine
1079 || object_sizes[object_size_type][varno] == unknown (object_size_type))
1081 bitmap_set_bit (computed[object_size_type], varno);
1082 bitmap_clear_bit (osi->reexamine, varno);
1084 else
1086 bitmap_set_bit (osi->reexamine, varno);
1087 if (dump_file && (dump_flags & TDF_DETAILS))
1089 fprintf (dump_file, "Need to reexamine ");
1090 print_generic_expr (dump_file, var, dump_flags);
1091 fprintf (dump_file, "\n");
1097 /* Helper function for check_for_plus_in_loops. Called recursively
1098 to detect loops. */
1100 static void
1101 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1102 unsigned int depth)
1104 gimple *stmt = SSA_NAME_DEF_STMT (var);
1105 unsigned int varno = SSA_NAME_VERSION (var);
1107 if (osi->depths[varno])
1109 if (osi->depths[varno] != depth)
1111 unsigned int *sp;
1113 /* Found a loop involving pointer addition. */
1114 for (sp = osi->tos; sp > osi->stack; )
1116 --sp;
1117 bitmap_clear_bit (osi->reexamine, *sp);
1118 bitmap_set_bit (computed[osi->object_size_type], *sp);
1119 object_sizes[osi->object_size_type][*sp] = 0;
1120 if (*sp == varno)
1121 break;
1124 return;
1126 else if (! bitmap_bit_p (osi->reexamine, varno))
1127 return;
1129 osi->depths[varno] = depth;
1130 *osi->tos++ = varno;
1132 switch (gimple_code (stmt))
1135 case GIMPLE_ASSIGN:
1137 if ((gimple_assign_single_p (stmt)
1138 || gimple_assign_unary_nop_p (stmt))
1139 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1141 tree rhs = gimple_assign_rhs1 (stmt);
1143 check_for_plus_in_loops_1 (osi, rhs, depth);
1145 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1147 tree basevar = gimple_assign_rhs1 (stmt);
1148 tree cst = gimple_assign_rhs2 (stmt);
1150 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1152 check_for_plus_in_loops_1 (osi, basevar,
1153 depth + !integer_zerop (cst));
1155 else
1156 gcc_unreachable ();
1157 break;
1160 case GIMPLE_CALL:
1162 gcall *call_stmt = as_a <gcall *> (stmt);
1163 tree arg = pass_through_call (call_stmt);
1164 if (arg)
1166 if (TREE_CODE (arg) == SSA_NAME)
1167 check_for_plus_in_loops_1 (osi, arg, depth);
1168 else
1169 gcc_unreachable ();
1171 break;
1174 case GIMPLE_PHI:
1176 unsigned i;
1178 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1180 tree rhs = gimple_phi_arg (stmt, i)->def;
1182 if (TREE_CODE (rhs) == SSA_NAME)
1183 check_for_plus_in_loops_1 (osi, rhs, depth);
1185 break;
1188 default:
1189 gcc_unreachable ();
1192 osi->depths[varno] = 0;
1193 osi->tos--;
1197 /* Check if some pointer we are computing object size of is being increased
1198 within a loop. If yes, assume all the SSA variables participating in
1199 that loop have minimum object sizes 0. */
1201 static void
1202 check_for_plus_in_loops (struct object_size_info *osi, tree var)
1204 gimple *stmt = SSA_NAME_DEF_STMT (var);
1206 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1207 and looked for a POINTER_PLUS_EXPR in the pass-through
1208 argument, if any. In GIMPLE, however, such an expression
1209 is not a valid call operand. */
1211 if (is_gimple_assign (stmt)
1212 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1214 tree basevar = gimple_assign_rhs1 (stmt);
1215 tree cst = gimple_assign_rhs2 (stmt);
1217 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1219 if (integer_zerop (cst))
1220 return;
1222 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1223 *osi->tos++ = SSA_NAME_VERSION (basevar);
1224 check_for_plus_in_loops_1 (osi, var, 2);
1225 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1226 osi->tos--;
1231 /* Initialize data structures for the object size computation. */
1233 void
1234 init_object_sizes (void)
1236 int object_size_type;
1238 if (computed[0])
1239 return;
1241 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1243 object_sizes[object_size_type].safe_grow (num_ssa_names, true);
1244 computed[object_size_type] = BITMAP_ALLOC (NULL);
1247 init_offset_limit ();
1251 /* Destroy data structures after the object size computation. */
1253 void
1254 fini_object_sizes (void)
1256 int object_size_type;
1258 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1260 object_sizes[object_size_type].release ();
1261 BITMAP_FREE (computed[object_size_type]);
1265 /* Dummy valueize function. */
1267 static tree
1268 do_valueize (tree t)
1270 return t;
1273 static unsigned int
1274 object_sizes_execute (function *fun, bool insert_min_max_p)
1276 basic_block bb;
1277 FOR_EACH_BB_FN (bb, fun)
1279 gimple_stmt_iterator i;
1280 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1282 tree result;
1283 gimple *call = gsi_stmt (i);
1284 if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE))
1285 continue;
1287 tree lhs = gimple_call_lhs (call);
1288 if (!lhs)
1289 continue;
1291 init_object_sizes ();
1293 /* If insert_min_max_p, only attempt to fold
1294 __builtin_object_size (x, 1) and __builtin_object_size (x, 3),
1295 and rather than folding the builtin to the constant if any,
1296 create a MIN_EXPR or MAX_EXPR of the __builtin_object_size
1297 call result and the computed constant. */
1298 if (insert_min_max_p)
1300 tree ost = gimple_call_arg (call, 1);
1301 if (tree_fits_uhwi_p (ost))
1303 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
1304 tree ptr = gimple_call_arg (call, 0);
1305 if ((object_size_type == 1 || object_size_type == 3)
1306 && (TREE_CODE (ptr) == ADDR_EXPR
1307 || TREE_CODE (ptr) == SSA_NAME))
1309 tree type = TREE_TYPE (lhs);
1310 unsigned HOST_WIDE_INT bytes;
1311 if (compute_builtin_object_size (ptr, object_size_type,
1312 &bytes)
1313 && wi::fits_to_tree_p (bytes, type))
1315 tree tem = make_ssa_name (type);
1316 gimple_call_set_lhs (call, tem);
1317 enum tree_code code
1318 = object_size_type == 1 ? MIN_EXPR : MAX_EXPR;
1319 tree cst = build_int_cstu (type, bytes);
1320 gimple *g
1321 = gimple_build_assign (lhs, code, tem, cst);
1322 gsi_insert_after (&i, g, GSI_NEW_STMT);
1323 update_stmt (call);
1327 continue;
1330 result = gimple_fold_stmt_to_constant (call, do_valueize);
1331 if (!result)
1333 tree ost = gimple_call_arg (call, 1);
1335 if (tree_fits_uhwi_p (ost))
1337 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
1339 if (object_size_type < 2)
1340 result = fold_convert (size_type_node,
1341 integer_minus_one_node);
1342 else if (object_size_type < 4)
1343 result = build_zero_cst (size_type_node);
1346 if (!result)
1347 continue;
1350 gcc_assert (TREE_CODE (result) == INTEGER_CST);
1352 if (dump_file && (dump_flags & TDF_DETAILS))
1354 fprintf (dump_file, "Simplified\n ");
1355 print_gimple_stmt (dump_file, call, 0, dump_flags);
1356 fprintf (dump_file, " to ");
1357 print_generic_expr (dump_file, result);
1358 fprintf (dump_file, "\n");
1361 /* Propagate into all uses and fold those stmts. */
1362 if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
1363 replace_uses_by (lhs, result);
1364 else
1365 replace_call_with_value (&i, result);
1369 fini_object_sizes ();
1370 return 0;
1373 /* Simple pass to optimize all __builtin_object_size () builtins. */
1375 namespace {
1377 const pass_data pass_data_object_sizes =
1379 GIMPLE_PASS, /* type */
1380 "objsz", /* name */
1381 OPTGROUP_NONE, /* optinfo_flags */
1382 TV_NONE, /* tv_id */
1383 ( PROP_cfg | PROP_ssa ), /* properties_required */
1384 PROP_objsz, /* properties_provided */
1385 0, /* properties_destroyed */
1386 0, /* todo_flags_start */
1387 0, /* todo_flags_finish */
1390 class pass_object_sizes : public gimple_opt_pass
1392 public:
1393 pass_object_sizes (gcc::context *ctxt)
1394 : gimple_opt_pass (pass_data_object_sizes, ctxt)
1397 /* opt_pass methods: */
1398 opt_pass * clone () { return new pass_object_sizes (m_ctxt); }
1399 virtual unsigned int execute (function *fun)
1401 return object_sizes_execute (fun, false);
1403 }; // class pass_object_sizes
1405 } // anon namespace
1407 gimple_opt_pass *
1408 make_pass_object_sizes (gcc::context *ctxt)
1410 return new pass_object_sizes (ctxt);
1413 /* Early version of pass to optimize all __builtin_object_size () builtins. */
1415 namespace {
1417 const pass_data pass_data_early_object_sizes =
1419 GIMPLE_PASS, /* type */
1420 "early_objsz", /* name */
1421 OPTGROUP_NONE, /* optinfo_flags */
1422 TV_NONE, /* tv_id */
1423 ( PROP_cfg | PROP_ssa ), /* properties_required */
1424 0, /* properties_provided */
1425 0, /* properties_destroyed */
1426 0, /* todo_flags_start */
1427 0, /* todo_flags_finish */
1430 class pass_early_object_sizes : public gimple_opt_pass
1432 public:
1433 pass_early_object_sizes (gcc::context *ctxt)
1434 : gimple_opt_pass (pass_data_early_object_sizes, ctxt)
1437 /* opt_pass methods: */
1438 virtual unsigned int execute (function *fun)
1440 return object_sizes_execute (fun, true);
1442 }; // class pass_object_sizes
1444 } // anon namespace
1446 gimple_opt_pass *
1447 make_pass_early_object_sizes (gcc::context *ctxt)
1449 return new pass_early_object_sizes (ctxt);