* c-warn.c (warn_if_unused_value): Remove WITH_CLEANUP_EXPR handling.
[official-gcc.git] / gcc / tree-object-size.c
blob723a5283da60232beffdd0d91f6dc8b9a0b63d6d
1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004-2017 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "fold-const.h"
31 #include "tree-object-size.h"
32 #include "gimple-fold.h"
33 #include "gimple-iterator.h"
34 #include "tree-cfg.h"
36 struct object_size_info
38 int object_size_type;
39 unsigned char pass;
40 bool changed;
41 bitmap visited, reexamine;
42 unsigned int *depths;
43 unsigned int *stack, *tos;
46 static const unsigned HOST_WIDE_INT unknown[4] = {
47 HOST_WIDE_INT_M1U,
48 HOST_WIDE_INT_M1U,
53 static tree compute_object_offset (const_tree, const_tree);
54 static bool addr_object_size (struct object_size_info *,
55 const_tree, int, unsigned HOST_WIDE_INT *);
56 static unsigned HOST_WIDE_INT alloc_object_size (const gcall *, int);
57 static tree pass_through_call (const gcall *);
58 static void collect_object_sizes_for (struct object_size_info *, tree);
59 static void expr_object_size (struct object_size_info *, tree, tree);
60 static bool merge_object_sizes (struct object_size_info *, tree, tree,
61 unsigned HOST_WIDE_INT);
62 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple *);
63 static bool cond_expr_object_size (struct object_size_info *, tree, gimple *);
64 static void init_offset_limit (void);
65 static void check_for_plus_in_loops (struct object_size_info *, tree);
66 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
67 unsigned int);
69 /* object_sizes[0] is upper bound for number of bytes till the end of
70 the object.
71 object_sizes[1] is upper bound for number of bytes till the end of
72 the subobject (innermost array or field with address taken).
73 object_sizes[2] is lower bound for number of bytes till the end of
74 the object and object_sizes[3] lower bound for subobject. */
75 static vec<unsigned HOST_WIDE_INT> object_sizes[4];
77 /* Bitmaps what object sizes have been computed already. */
78 static bitmap computed[4];
80 /* Maximum value of offset we consider to be addition. */
81 static unsigned HOST_WIDE_INT offset_limit;
84 /* Initialize OFFSET_LIMIT variable. */
85 static void
86 init_offset_limit (void)
88 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
89 offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
90 else
91 offset_limit = -1;
92 offset_limit /= 2;
96 /* Compute offset of EXPR within VAR. Return error_mark_node
97 if unknown. */
99 static tree
100 compute_object_offset (const_tree expr, const_tree var)
102 enum tree_code code = PLUS_EXPR;
103 tree base, off, t;
105 if (expr == var)
106 return size_zero_node;
108 switch (TREE_CODE (expr))
110 case COMPONENT_REF:
111 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
112 if (base == error_mark_node)
113 return base;
115 t = TREE_OPERAND (expr, 1);
116 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
117 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
118 / BITS_PER_UNIT));
119 break;
121 case REALPART_EXPR:
122 CASE_CONVERT:
123 case VIEW_CONVERT_EXPR:
124 case NON_LVALUE_EXPR:
125 return compute_object_offset (TREE_OPERAND (expr, 0), var);
127 case IMAGPART_EXPR:
128 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
129 if (base == error_mark_node)
130 return base;
132 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
133 break;
135 case ARRAY_REF:
136 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
137 if (base == error_mark_node)
138 return base;
140 t = TREE_OPERAND (expr, 1);
141 tree low_bound, unit_size;
142 low_bound = array_ref_low_bound (CONST_CAST_TREE (expr));
143 unit_size = array_ref_element_size (CONST_CAST_TREE (expr));
144 if (! integer_zerop (low_bound))
145 t = fold_build2 (MINUS_EXPR, TREE_TYPE (t), t, low_bound);
146 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
148 code = MINUS_EXPR;
149 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
151 t = fold_convert (sizetype, t);
152 off = size_binop (MULT_EXPR, unit_size, t);
153 break;
155 case MEM_REF:
156 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
157 return wide_int_to_tree (sizetype, mem_ref_offset (expr));
159 default:
160 return error_mark_node;
163 return size_binop (code, base, off);
167 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
168 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
169 If unknown, return unknown[object_size_type]. */
171 static bool
172 addr_object_size (struct object_size_info *osi, const_tree ptr,
173 int object_size_type, unsigned HOST_WIDE_INT *psize)
175 tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
177 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
179 /* Set to unknown and overwrite just before returning if the size
180 could be determined. */
181 *psize = unknown[object_size_type];
183 pt_var = TREE_OPERAND (ptr, 0);
184 while (handled_component_p (pt_var))
185 pt_var = TREE_OPERAND (pt_var, 0);
187 if (pt_var
188 && TREE_CODE (pt_var) == MEM_REF)
190 unsigned HOST_WIDE_INT sz;
192 if (!osi || (object_size_type & 1) != 0
193 || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
195 compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
196 object_size_type & ~1, &sz);
198 else
200 tree var = TREE_OPERAND (pt_var, 0);
201 if (osi->pass == 0)
202 collect_object_sizes_for (osi, var);
203 if (bitmap_bit_p (computed[object_size_type],
204 SSA_NAME_VERSION (var)))
205 sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
206 else
207 sz = unknown[object_size_type];
209 if (sz != unknown[object_size_type])
211 offset_int dsz = wi::sub (sz, mem_ref_offset (pt_var));
212 if (wi::neg_p (dsz))
213 sz = 0;
214 else if (wi::fits_uhwi_p (dsz))
215 sz = dsz.to_uhwi ();
216 else
217 sz = unknown[object_size_type];
220 if (sz != unknown[object_size_type] && sz < offset_limit)
221 pt_var_size = size_int (sz);
223 else if (pt_var
224 && DECL_P (pt_var)
225 && tree_fits_uhwi_p (DECL_SIZE_UNIT (pt_var))
226 && tree_to_uhwi (DECL_SIZE_UNIT (pt_var)) < offset_limit)
227 pt_var_size = DECL_SIZE_UNIT (pt_var);
228 else if (pt_var
229 && TREE_CODE (pt_var) == STRING_CST
230 && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
231 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
232 && tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
233 < offset_limit)
234 pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
235 else
236 return false;
238 if (pt_var != TREE_OPERAND (ptr, 0))
240 tree var;
242 if (object_size_type & 1)
244 var = TREE_OPERAND (ptr, 0);
246 while (var != pt_var
247 && TREE_CODE (var) != BIT_FIELD_REF
248 && TREE_CODE (var) != COMPONENT_REF
249 && TREE_CODE (var) != ARRAY_REF
250 && TREE_CODE (var) != ARRAY_RANGE_REF
251 && TREE_CODE (var) != REALPART_EXPR
252 && TREE_CODE (var) != IMAGPART_EXPR)
253 var = TREE_OPERAND (var, 0);
254 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
255 var = TREE_OPERAND (var, 0);
256 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
257 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var)))
258 || (pt_var_size
259 && tree_int_cst_lt (pt_var_size,
260 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
261 var = pt_var;
262 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
264 tree v = var;
265 /* For &X->fld, compute object size only if fld isn't the last
266 field, as struct { int i; char c[1]; } is often used instead
267 of flexible array member. */
268 while (v && v != pt_var)
269 switch (TREE_CODE (v))
271 case ARRAY_REF:
272 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
273 && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
275 tree domain
276 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
277 if (domain
278 && TYPE_MAX_VALUE (domain)
279 && TREE_CODE (TYPE_MAX_VALUE (domain))
280 == INTEGER_CST
281 && tree_int_cst_lt (TREE_OPERAND (v, 1),
282 TYPE_MAX_VALUE (domain)))
284 v = NULL_TREE;
285 break;
288 v = TREE_OPERAND (v, 0);
289 break;
290 case REALPART_EXPR:
291 case IMAGPART_EXPR:
292 v = NULL_TREE;
293 break;
294 case COMPONENT_REF:
295 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
297 v = NULL_TREE;
298 break;
300 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
301 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
302 != UNION_TYPE
303 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
304 != QUAL_UNION_TYPE)
305 break;
306 else
307 v = TREE_OPERAND (v, 0);
308 if (TREE_CODE (v) == COMPONENT_REF
309 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
310 == RECORD_TYPE)
312 tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
313 for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
314 if (TREE_CODE (fld_chain) == FIELD_DECL)
315 break;
317 if (fld_chain)
319 v = NULL_TREE;
320 break;
322 v = TREE_OPERAND (v, 0);
324 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
325 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
326 != UNION_TYPE
327 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
328 != QUAL_UNION_TYPE)
329 break;
330 else
331 v = TREE_OPERAND (v, 0);
332 if (v != pt_var)
333 v = NULL_TREE;
334 else
335 v = pt_var;
336 break;
337 default:
338 v = pt_var;
339 break;
341 if (v == pt_var)
342 var = pt_var;
345 else
346 var = pt_var;
348 if (var != pt_var)
349 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
350 else if (!pt_var_size)
351 return false;
352 else
353 var_size = pt_var_size;
354 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
355 if (bytes != error_mark_node)
357 if (TREE_CODE (bytes) == INTEGER_CST
358 && tree_int_cst_lt (var_size, bytes))
359 bytes = size_zero_node;
360 else
361 bytes = size_binop (MINUS_EXPR, var_size, bytes);
363 if (var != pt_var
364 && pt_var_size
365 && TREE_CODE (pt_var) == MEM_REF
366 && bytes != error_mark_node)
368 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
369 if (bytes2 != error_mark_node)
371 if (TREE_CODE (bytes2) == INTEGER_CST
372 && tree_int_cst_lt (pt_var_size, bytes2))
373 bytes2 = size_zero_node;
374 else
375 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
376 bytes = size_binop (MIN_EXPR, bytes, bytes2);
380 else if (!pt_var_size)
381 return false;
382 else
383 bytes = pt_var_size;
385 if (tree_fits_uhwi_p (bytes))
387 *psize = tree_to_uhwi (bytes);
388 return true;
391 return false;
395 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
396 Handles various allocation calls. OBJECT_SIZE_TYPE is the second
397 argument from __builtin_object_size. If unknown, return
398 unknown[object_size_type]. */
400 static unsigned HOST_WIDE_INT
401 alloc_object_size (const gcall *call, int object_size_type)
403 tree callee, bytes = NULL_TREE;
404 tree alloc_size;
405 int arg1 = -1, arg2 = -1;
407 gcc_assert (is_gimple_call (call));
409 callee = gimple_call_fndecl (call);
410 if (!callee)
411 return unknown[object_size_type];
413 alloc_size = lookup_attribute ("alloc_size",
414 TYPE_ATTRIBUTES (TREE_TYPE (callee)));
415 if (alloc_size && TREE_VALUE (alloc_size))
417 tree p = TREE_VALUE (alloc_size);
419 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
420 if (TREE_CHAIN (p))
421 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
424 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
425 switch (DECL_FUNCTION_CODE (callee))
427 case BUILT_IN_CALLOC:
428 arg2 = 1;
429 /* fall through */
430 case BUILT_IN_MALLOC:
431 case BUILT_IN_ALLOCA:
432 case BUILT_IN_ALLOCA_WITH_ALIGN:
433 arg1 = 0;
434 default:
435 break;
438 if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
439 || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
440 || (arg2 >= 0
441 && (arg2 >= (int)gimple_call_num_args (call)
442 || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
443 return unknown[object_size_type];
445 if (arg2 >= 0)
446 bytes = size_binop (MULT_EXPR,
447 fold_convert (sizetype, gimple_call_arg (call, arg1)),
448 fold_convert (sizetype, gimple_call_arg (call, arg2)));
449 else if (arg1 >= 0)
450 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
452 if (bytes && tree_fits_uhwi_p (bytes))
453 return tree_to_uhwi (bytes);
455 return unknown[object_size_type];
459 /* If object size is propagated from one of function's arguments directly
460 to its return value, return that argument for GIMPLE_CALL statement CALL.
461 Otherwise return NULL. */
463 static tree
464 pass_through_call (const gcall *call)
466 tree callee = gimple_call_fndecl (call);
468 if (callee
469 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
470 switch (DECL_FUNCTION_CODE (callee))
472 case BUILT_IN_MEMCPY:
473 case BUILT_IN_MEMMOVE:
474 case BUILT_IN_MEMSET:
475 case BUILT_IN_STRCPY:
476 case BUILT_IN_STRNCPY:
477 case BUILT_IN_STRCAT:
478 case BUILT_IN_STRNCAT:
479 case BUILT_IN_MEMCPY_CHK:
480 case BUILT_IN_MEMMOVE_CHK:
481 case BUILT_IN_MEMSET_CHK:
482 case BUILT_IN_STRCPY_CHK:
483 case BUILT_IN_STRNCPY_CHK:
484 case BUILT_IN_STPNCPY_CHK:
485 case BUILT_IN_STRCAT_CHK:
486 case BUILT_IN_STRNCAT_CHK:
487 case BUILT_IN_ASSUME_ALIGNED:
488 if (gimple_call_num_args (call) >= 1)
489 return gimple_call_arg (call, 0);
490 break;
491 default:
492 break;
495 return NULL_TREE;
499 /* Compute __builtin_object_size value for PTR and set *PSIZE to
500 the resulting value. OBJECT_SIZE_TYPE is the second argument
501 to __builtin_object_size. Return true on success and false
502 when the object size could not be determined. */
504 bool
505 compute_builtin_object_size (tree ptr, int object_size_type,
506 unsigned HOST_WIDE_INT *psize)
508 gcc_assert (object_size_type >= 0 && object_size_type <= 3);
510 /* Set to unknown and overwrite just before returning if the size
511 could be determined. */
512 *psize = unknown[object_size_type];
514 if (! offset_limit)
515 init_offset_limit ();
517 if (TREE_CODE (ptr) == ADDR_EXPR)
518 return addr_object_size (NULL, ptr, object_size_type, psize);
520 if (TREE_CODE (ptr) != SSA_NAME
521 || !POINTER_TYPE_P (TREE_TYPE (ptr)))
522 return false;
524 if (computed[object_size_type] == NULL)
526 if (optimize || object_size_type & 1)
527 return false;
529 /* When not optimizing, rather than failing, make a small effort
530 to determine the object size without the full benefit of
531 the (costly) computation below. */
532 gimple *def = SSA_NAME_DEF_STMT (ptr);
533 if (gimple_code (def) == GIMPLE_ASSIGN)
535 tree_code code = gimple_assign_rhs_code (def);
536 if (code == POINTER_PLUS_EXPR)
538 tree offset = gimple_assign_rhs2 (def);
539 ptr = gimple_assign_rhs1 (def);
541 if (tree_fits_shwi_p (offset)
542 && compute_builtin_object_size (ptr, object_size_type, psize))
544 /* Return zero when the offset is out of bounds. */
545 unsigned HOST_WIDE_INT off = tree_to_shwi (offset);
546 *psize = off < *psize ? *psize - off : 0;
547 return true;
551 return false;
554 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
556 struct object_size_info osi;
557 bitmap_iterator bi;
558 unsigned int i;
560 if (num_ssa_names > object_sizes[object_size_type].length ())
561 object_sizes[object_size_type].safe_grow (num_ssa_names);
562 if (dump_file)
564 fprintf (dump_file, "Computing %s %sobject size for ",
565 (object_size_type & 2) ? "minimum" : "maximum",
566 (object_size_type & 1) ? "sub" : "");
567 print_generic_expr (dump_file, ptr, dump_flags);
568 fprintf (dump_file, ":\n");
571 osi.visited = BITMAP_ALLOC (NULL);
572 osi.reexamine = BITMAP_ALLOC (NULL);
573 osi.object_size_type = object_size_type;
574 osi.depths = NULL;
575 osi.stack = NULL;
576 osi.tos = NULL;
578 /* First pass: walk UD chains, compute object sizes that
579 can be computed. osi.reexamine bitmap at the end will
580 contain what variables were found in dependency cycles
581 and therefore need to be reexamined. */
582 osi.pass = 0;
583 osi.changed = false;
584 collect_object_sizes_for (&osi, ptr);
586 /* Second pass: keep recomputing object sizes of variables
587 that need reexamination, until no object sizes are
588 increased or all object sizes are computed. */
589 if (! bitmap_empty_p (osi.reexamine))
591 bitmap reexamine = BITMAP_ALLOC (NULL);
593 /* If looking for minimum instead of maximum object size,
594 detect cases where a pointer is increased in a loop.
595 Although even without this detection pass 2 would eventually
596 terminate, it could take a long time. If a pointer is
597 increasing this way, we need to assume 0 object size.
598 E.g. p = &buf[0]; while (cond) p = p + 4; */
599 if (object_size_type & 2)
601 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
602 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
603 osi.tos = osi.stack;
604 osi.pass = 1;
605 /* collect_object_sizes_for is changing
606 osi.reexamine bitmap, so iterate over a copy. */
607 bitmap_copy (reexamine, osi.reexamine);
608 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
609 if (bitmap_bit_p (osi.reexamine, i))
610 check_for_plus_in_loops (&osi, ssa_name (i));
612 free (osi.depths);
613 osi.depths = NULL;
614 free (osi.stack);
615 osi.stack = NULL;
616 osi.tos = NULL;
621 osi.pass = 2;
622 osi.changed = false;
623 /* collect_object_sizes_for is changing
624 osi.reexamine bitmap, so iterate over a copy. */
625 bitmap_copy (reexamine, osi.reexamine);
626 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
627 if (bitmap_bit_p (osi.reexamine, i))
629 collect_object_sizes_for (&osi, ssa_name (i));
630 if (dump_file && (dump_flags & TDF_DETAILS))
632 fprintf (dump_file, "Reexamining ");
633 print_generic_expr (dump_file, ssa_name (i),
634 dump_flags);
635 fprintf (dump_file, "\n");
639 while (osi.changed);
641 BITMAP_FREE (reexamine);
643 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
644 bitmap_set_bit (computed[object_size_type], i);
646 /* Debugging dumps. */
647 if (dump_file)
649 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
650 if (object_sizes[object_size_type][i]
651 != unknown[object_size_type])
653 print_generic_expr (dump_file, ssa_name (i),
654 dump_flags);
655 fprintf (dump_file,
656 ": %s %sobject size "
657 HOST_WIDE_INT_PRINT_UNSIGNED "\n",
658 (object_size_type & 2) ? "minimum" : "maximum",
659 (object_size_type & 1) ? "sub" : "",
660 object_sizes[object_size_type][i]);
664 BITMAP_FREE (osi.reexamine);
665 BITMAP_FREE (osi.visited);
668 *psize = object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
669 return *psize != unknown[object_size_type];
672 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
674 static void
675 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
677 int object_size_type = osi->object_size_type;
678 unsigned int varno = SSA_NAME_VERSION (ptr);
679 unsigned HOST_WIDE_INT bytes;
681 gcc_assert (object_sizes[object_size_type][varno]
682 != unknown[object_size_type]);
683 gcc_assert (osi->pass == 0);
685 if (TREE_CODE (value) == WITH_SIZE_EXPR)
686 value = TREE_OPERAND (value, 0);
688 /* Pointer variables should have been handled by merge_object_sizes. */
689 gcc_assert (TREE_CODE (value) != SSA_NAME
690 || !POINTER_TYPE_P (TREE_TYPE (value)));
692 if (TREE_CODE (value) == ADDR_EXPR)
693 addr_object_size (osi, value, object_size_type, &bytes);
694 else
695 bytes = unknown[object_size_type];
697 if ((object_size_type & 2) == 0)
699 if (object_sizes[object_size_type][varno] < bytes)
700 object_sizes[object_size_type][varno] = bytes;
702 else
704 if (object_sizes[object_size_type][varno] > bytes)
705 object_sizes[object_size_type][varno] = bytes;
710 /* Compute object_sizes for PTR, defined to the result of a call. */
712 static void
713 call_object_size (struct object_size_info *osi, tree ptr, gcall *call)
715 int object_size_type = osi->object_size_type;
716 unsigned int varno = SSA_NAME_VERSION (ptr);
717 unsigned HOST_WIDE_INT bytes;
719 gcc_assert (is_gimple_call (call));
721 gcc_assert (object_sizes[object_size_type][varno]
722 != unknown[object_size_type]);
723 gcc_assert (osi->pass == 0);
725 bytes = alloc_object_size (call, object_size_type);
727 if ((object_size_type & 2) == 0)
729 if (object_sizes[object_size_type][varno] < bytes)
730 object_sizes[object_size_type][varno] = bytes;
732 else
734 if (object_sizes[object_size_type][varno] > bytes)
735 object_sizes[object_size_type][varno] = bytes;
740 /* Compute object_sizes for PTR, defined to an unknown value. */
742 static void
743 unknown_object_size (struct object_size_info *osi, tree ptr)
745 int object_size_type = osi->object_size_type;
746 unsigned int varno = SSA_NAME_VERSION (ptr);
747 unsigned HOST_WIDE_INT bytes;
749 gcc_assert (object_sizes[object_size_type][varno]
750 != unknown[object_size_type]);
751 gcc_assert (osi->pass == 0);
753 bytes = unknown[object_size_type];
755 if ((object_size_type & 2) == 0)
757 if (object_sizes[object_size_type][varno] < bytes)
758 object_sizes[object_size_type][varno] = bytes;
760 else
762 if (object_sizes[object_size_type][varno] > bytes)
763 object_sizes[object_size_type][varno] = bytes;
768 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
769 the object size might need reexamination later. */
771 static bool
772 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
773 unsigned HOST_WIDE_INT offset)
775 int object_size_type = osi->object_size_type;
776 unsigned int varno = SSA_NAME_VERSION (dest);
777 unsigned HOST_WIDE_INT orig_bytes;
779 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
780 return false;
781 if (offset >= offset_limit)
783 object_sizes[object_size_type][varno] = unknown[object_size_type];
784 return false;
787 if (osi->pass == 0)
788 collect_object_sizes_for (osi, orig);
790 orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
791 if (orig_bytes != unknown[object_size_type])
792 orig_bytes = (offset > orig_bytes)
793 ? HOST_WIDE_INT_0U : orig_bytes - offset;
795 if ((object_size_type & 2) == 0)
797 if (object_sizes[object_size_type][varno] < orig_bytes)
799 object_sizes[object_size_type][varno] = orig_bytes;
800 osi->changed = true;
803 else
805 if (object_sizes[object_size_type][varno] > orig_bytes)
807 object_sizes[object_size_type][varno] = orig_bytes;
808 osi->changed = true;
811 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
815 /* Compute object_sizes for VAR, defined to the result of an assignment
816 with operator POINTER_PLUS_EXPR. Return true if the object size might
817 need reexamination later. */
819 static bool
820 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple *stmt)
822 int object_size_type = osi->object_size_type;
823 unsigned int varno = SSA_NAME_VERSION (var);
824 unsigned HOST_WIDE_INT bytes;
825 tree op0, op1;
827 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
829 op0 = gimple_assign_rhs1 (stmt);
830 op1 = gimple_assign_rhs2 (stmt);
832 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
834 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
835 gcc_assert (TREE_CODE (rhs) == MEM_REF);
836 op0 = TREE_OPERAND (rhs, 0);
837 op1 = TREE_OPERAND (rhs, 1);
839 else
840 gcc_unreachable ();
842 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
843 return false;
845 /* Handle PTR + OFFSET here. */
846 if (TREE_CODE (op1) == INTEGER_CST
847 && (TREE_CODE (op0) == SSA_NAME
848 || TREE_CODE (op0) == ADDR_EXPR))
850 if (! tree_fits_uhwi_p (op1))
851 bytes = unknown[object_size_type];
852 else if (TREE_CODE (op0) == SSA_NAME)
853 return merge_object_sizes (osi, var, op0, tree_to_uhwi (op1));
854 else
856 unsigned HOST_WIDE_INT off = tree_to_uhwi (op1);
858 /* op0 will be ADDR_EXPR here. */
859 addr_object_size (osi, op0, object_size_type, &bytes);
860 if (bytes == unknown[object_size_type])
862 else if (off > offset_limit)
863 bytes = unknown[object_size_type];
864 else if (off > bytes)
865 bytes = 0;
866 else
867 bytes -= off;
870 else
871 bytes = unknown[object_size_type];
873 if ((object_size_type & 2) == 0)
875 if (object_sizes[object_size_type][varno] < bytes)
876 object_sizes[object_size_type][varno] = bytes;
878 else
880 if (object_sizes[object_size_type][varno] > bytes)
881 object_sizes[object_size_type][varno] = bytes;
883 return false;
887 /* Compute object_sizes for VAR, defined at STMT, which is
888 a COND_EXPR. Return true if the object size might need reexamination
889 later. */
891 static bool
892 cond_expr_object_size (struct object_size_info *osi, tree var, gimple *stmt)
894 tree then_, else_;
895 int object_size_type = osi->object_size_type;
896 unsigned int varno = SSA_NAME_VERSION (var);
897 bool reexamine = false;
899 gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
901 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
902 return false;
904 then_ = gimple_assign_rhs2 (stmt);
905 else_ = gimple_assign_rhs3 (stmt);
907 if (TREE_CODE (then_) == SSA_NAME)
908 reexamine |= merge_object_sizes (osi, var, then_, 0);
909 else
910 expr_object_size (osi, var, then_);
912 if (TREE_CODE (else_) == SSA_NAME)
913 reexamine |= merge_object_sizes (osi, var, else_, 0);
914 else
915 expr_object_size (osi, var, else_);
917 return reexamine;
920 /* Compute object sizes for VAR.
921 For ADDR_EXPR an object size is the number of remaining bytes
922 to the end of the object (where what is considered an object depends on
923 OSI->object_size_type).
924 For allocation GIMPLE_CALL like malloc or calloc object size is the size
925 of the allocation.
926 For POINTER_PLUS_EXPR where second operand is a constant integer,
927 object size is object size of the first operand minus the constant.
928 If the constant is bigger than the number of remaining bytes until the
929 end of the object, object size is 0, but if it is instead a pointer
930 subtraction, object size is unknown[object_size_type].
931 To differentiate addition from subtraction, ADDR_EXPR returns
932 unknown[object_size_type] for all objects bigger than half of the address
933 space, and constants less than half of the address space are considered
934 addition, while bigger constants subtraction.
935 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
936 object size is object size of that argument.
937 Otherwise, object size is the maximum of object sizes of variables
938 that it might be set to. */
940 static void
941 collect_object_sizes_for (struct object_size_info *osi, tree var)
943 int object_size_type = osi->object_size_type;
944 unsigned int varno = SSA_NAME_VERSION (var);
945 gimple *stmt;
946 bool reexamine;
948 if (bitmap_bit_p (computed[object_size_type], varno))
949 return;
951 if (osi->pass == 0)
953 if (bitmap_set_bit (osi->visited, varno))
955 object_sizes[object_size_type][varno]
956 = (object_size_type & 2) ? -1 : 0;
958 else
960 /* Found a dependency loop. Mark the variable for later
961 re-examination. */
962 bitmap_set_bit (osi->reexamine, varno);
963 if (dump_file && (dump_flags & TDF_DETAILS))
965 fprintf (dump_file, "Found a dependency loop at ");
966 print_generic_expr (dump_file, var, dump_flags);
967 fprintf (dump_file, "\n");
969 return;
973 if (dump_file && (dump_flags & TDF_DETAILS))
975 fprintf (dump_file, "Visiting use-def links for ");
976 print_generic_expr (dump_file, var, dump_flags);
977 fprintf (dump_file, "\n");
980 stmt = SSA_NAME_DEF_STMT (var);
981 reexamine = false;
983 switch (gimple_code (stmt))
985 case GIMPLE_ASSIGN:
987 tree rhs = gimple_assign_rhs1 (stmt);
988 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
989 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
990 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
991 reexamine = plus_stmt_object_size (osi, var, stmt);
992 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
993 reexamine = cond_expr_object_size (osi, var, stmt);
994 else if (gimple_assign_single_p (stmt)
995 || gimple_assign_unary_nop_p (stmt))
997 if (TREE_CODE (rhs) == SSA_NAME
998 && POINTER_TYPE_P (TREE_TYPE (rhs)))
999 reexamine = merge_object_sizes (osi, var, rhs, 0);
1000 else
1001 expr_object_size (osi, var, rhs);
1003 else
1004 unknown_object_size (osi, var);
1005 break;
1008 case GIMPLE_CALL:
1010 gcall *call_stmt = as_a <gcall *> (stmt);
1011 tree arg = pass_through_call (call_stmt);
1012 if (arg)
1014 if (TREE_CODE (arg) == SSA_NAME
1015 && POINTER_TYPE_P (TREE_TYPE (arg)))
1016 reexamine = merge_object_sizes (osi, var, arg, 0);
1017 else
1018 expr_object_size (osi, var, arg);
1020 else
1021 call_object_size (osi, var, call_stmt);
1022 break;
1025 case GIMPLE_ASM:
1026 /* Pointers defined by __asm__ statements can point anywhere. */
1027 object_sizes[object_size_type][varno] = unknown[object_size_type];
1028 break;
1030 case GIMPLE_NOP:
1031 if (SSA_NAME_VAR (var)
1032 && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
1033 expr_object_size (osi, var, SSA_NAME_VAR (var));
1034 else
1035 /* Uninitialized SSA names point nowhere. */
1036 object_sizes[object_size_type][varno] = unknown[object_size_type];
1037 break;
1039 case GIMPLE_PHI:
1041 unsigned i;
1043 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1045 tree rhs = gimple_phi_arg (stmt, i)->def;
1047 if (object_sizes[object_size_type][varno]
1048 == unknown[object_size_type])
1049 break;
1051 if (TREE_CODE (rhs) == SSA_NAME)
1052 reexamine |= merge_object_sizes (osi, var, rhs, 0);
1053 else if (osi->pass == 0)
1054 expr_object_size (osi, var, rhs);
1056 break;
1059 default:
1060 gcc_unreachable ();
1063 if (! reexamine
1064 || object_sizes[object_size_type][varno] == unknown[object_size_type])
1066 bitmap_set_bit (computed[object_size_type], varno);
1067 bitmap_clear_bit (osi->reexamine, varno);
1069 else
1071 bitmap_set_bit (osi->reexamine, varno);
1072 if (dump_file && (dump_flags & TDF_DETAILS))
1074 fprintf (dump_file, "Need to reexamine ");
1075 print_generic_expr (dump_file, var, dump_flags);
1076 fprintf (dump_file, "\n");
1082 /* Helper function for check_for_plus_in_loops. Called recursively
1083 to detect loops. */
1085 static void
1086 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1087 unsigned int depth)
1089 gimple *stmt = SSA_NAME_DEF_STMT (var);
1090 unsigned int varno = SSA_NAME_VERSION (var);
1092 if (osi->depths[varno])
1094 if (osi->depths[varno] != depth)
1096 unsigned int *sp;
1098 /* Found a loop involving pointer addition. */
1099 for (sp = osi->tos; sp > osi->stack; )
1101 --sp;
1102 bitmap_clear_bit (osi->reexamine, *sp);
1103 bitmap_set_bit (computed[osi->object_size_type], *sp);
1104 object_sizes[osi->object_size_type][*sp] = 0;
1105 if (*sp == varno)
1106 break;
1109 return;
1111 else if (! bitmap_bit_p (osi->reexamine, varno))
1112 return;
1114 osi->depths[varno] = depth;
1115 *osi->tos++ = varno;
1117 switch (gimple_code (stmt))
1120 case GIMPLE_ASSIGN:
1122 if ((gimple_assign_single_p (stmt)
1123 || gimple_assign_unary_nop_p (stmt))
1124 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1126 tree rhs = gimple_assign_rhs1 (stmt);
1128 check_for_plus_in_loops_1 (osi, rhs, depth);
1130 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1132 tree basevar = gimple_assign_rhs1 (stmt);
1133 tree cst = gimple_assign_rhs2 (stmt);
1135 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1137 check_for_plus_in_loops_1 (osi, basevar,
1138 depth + !integer_zerop (cst));
1140 else
1141 gcc_unreachable ();
1142 break;
1145 case GIMPLE_CALL:
1147 gcall *call_stmt = as_a <gcall *> (stmt);
1148 tree arg = pass_through_call (call_stmt);
1149 if (arg)
1151 if (TREE_CODE (arg) == SSA_NAME)
1152 check_for_plus_in_loops_1 (osi, arg, depth);
1153 else
1154 gcc_unreachable ();
1156 break;
1159 case GIMPLE_PHI:
1161 unsigned i;
1163 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1165 tree rhs = gimple_phi_arg (stmt, i)->def;
1167 if (TREE_CODE (rhs) == SSA_NAME)
1168 check_for_plus_in_loops_1 (osi, rhs, depth);
1170 break;
1173 default:
1174 gcc_unreachable ();
1177 osi->depths[varno] = 0;
1178 osi->tos--;
1182 /* Check if some pointer we are computing object size of is being increased
1183 within a loop. If yes, assume all the SSA variables participating in
1184 that loop have minimum object sizes 0. */
1186 static void
1187 check_for_plus_in_loops (struct object_size_info *osi, tree var)
1189 gimple *stmt = SSA_NAME_DEF_STMT (var);
1191 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1192 and looked for a POINTER_PLUS_EXPR in the pass-through
1193 argument, if any. In GIMPLE, however, such an expression
1194 is not a valid call operand. */
1196 if (is_gimple_assign (stmt)
1197 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1199 tree basevar = gimple_assign_rhs1 (stmt);
1200 tree cst = gimple_assign_rhs2 (stmt);
1202 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1204 if (integer_zerop (cst))
1205 return;
1207 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1208 *osi->tos++ = SSA_NAME_VERSION (basevar);
1209 check_for_plus_in_loops_1 (osi, var, 2);
1210 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1211 osi->tos--;
1216 /* Initialize data structures for the object size computation. */
1218 void
1219 init_object_sizes (void)
1221 int object_size_type;
1223 if (computed[0])
1224 return;
1226 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1228 object_sizes[object_size_type].safe_grow (num_ssa_names);
1229 computed[object_size_type] = BITMAP_ALLOC (NULL);
1232 init_offset_limit ();
1236 /* Destroy data structures after the object size computation. */
1238 void
1239 fini_object_sizes (void)
1241 int object_size_type;
1243 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1245 object_sizes[object_size_type].release ();
1246 BITMAP_FREE (computed[object_size_type]);
1251 /* Simple pass to optimize all __builtin_object_size () builtins. */
1253 namespace {
1255 const pass_data pass_data_object_sizes =
1257 GIMPLE_PASS, /* type */
1258 "objsz", /* name */
1259 OPTGROUP_NONE, /* optinfo_flags */
1260 TV_NONE, /* tv_id */
1261 ( PROP_cfg | PROP_ssa ), /* properties_required */
1262 0, /* properties_provided */
1263 0, /* properties_destroyed */
1264 0, /* todo_flags_start */
1265 0, /* todo_flags_finish */
1268 class pass_object_sizes : public gimple_opt_pass
1270 public:
1271 pass_object_sizes (gcc::context *ctxt)
1272 : gimple_opt_pass (pass_data_object_sizes, ctxt), insert_min_max_p (false)
1275 /* opt_pass methods: */
1276 opt_pass * clone () { return new pass_object_sizes (m_ctxt); }
1277 void set_pass_param (unsigned int n, bool param)
1279 gcc_assert (n == 0);
1280 insert_min_max_p = param;
1282 virtual unsigned int execute (function *);
1284 private:
1285 /* Determines whether the pass instance creates MIN/MAX_EXPRs. */
1286 bool insert_min_max_p;
1287 }; // class pass_object_sizes
1289 /* Dummy valueize function. */
1291 static tree
1292 do_valueize (tree t)
1294 return t;
1297 unsigned int
1298 pass_object_sizes::execute (function *fun)
1300 basic_block bb;
1301 FOR_EACH_BB_FN (bb, fun)
1303 gimple_stmt_iterator i;
1304 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1306 tree result;
1307 gimple *call = gsi_stmt (i);
1308 if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE))
1309 continue;
1311 init_object_sizes ();
1313 /* If insert_min_max_p, only attempt to fold
1314 __builtin_object_size (x, 1) and __builtin_object_size (x, 3),
1315 and rather than folding the builtin to the constant if any,
1316 create a MIN_EXPR or MAX_EXPR of the __builtin_object_size
1317 call result and the computed constant. */
1318 if (insert_min_max_p)
1320 tree ost = gimple_call_arg (call, 1);
1321 if (tree_fits_uhwi_p (ost))
1323 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
1324 tree ptr = gimple_call_arg (call, 0);
1325 tree lhs = gimple_call_lhs (call);
1326 if ((object_size_type == 1 || object_size_type == 3)
1327 && (TREE_CODE (ptr) == ADDR_EXPR
1328 || TREE_CODE (ptr) == SSA_NAME)
1329 && lhs)
1331 tree type = TREE_TYPE (lhs);
1332 unsigned HOST_WIDE_INT bytes;
1333 if (compute_builtin_object_size (ptr, object_size_type,
1334 &bytes)
1335 && wi::fits_to_tree_p (bytes, type))
1337 tree tem = make_ssa_name (type);
1338 gimple_call_set_lhs (call, tem);
1339 enum tree_code code
1340 = object_size_type == 1 ? MIN_EXPR : MAX_EXPR;
1341 tree cst = build_int_cstu (type, bytes);
1342 gimple *g
1343 = gimple_build_assign (lhs, code, tem, cst);
1344 gsi_insert_after (&i, g, GSI_NEW_STMT);
1345 update_stmt (call);
1349 continue;
1352 tree lhs = gimple_call_lhs (call);
1353 if (!lhs)
1354 continue;
1356 result = gimple_fold_stmt_to_constant (call, do_valueize);
1357 if (!result)
1359 tree ost = gimple_call_arg (call, 1);
1361 if (tree_fits_uhwi_p (ost))
1363 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
1365 if (object_size_type < 2)
1366 result = fold_convert (size_type_node,
1367 integer_minus_one_node);
1368 else if (object_size_type < 4)
1369 result = build_zero_cst (size_type_node);
1372 if (!result)
1373 continue;
1376 gcc_assert (TREE_CODE (result) == INTEGER_CST);
1378 if (dump_file && (dump_flags & TDF_DETAILS))
1380 fprintf (dump_file, "Simplified\n ");
1381 print_gimple_stmt (dump_file, call, 0, dump_flags);
1382 fprintf (dump_file, " to ");
1383 print_generic_expr (dump_file, result);
1384 fprintf (dump_file, "\n");
1387 /* Propagate into all uses and fold those stmts. */
1388 replace_uses_by (lhs, result);
1392 fini_object_sizes ();
1393 return 0;
1396 } // anon namespace
1398 gimple_opt_pass *
1399 make_pass_object_sizes (gcc::context *ctxt)
1401 return new pass_object_sizes (ctxt);