PR target/65871
[official-gcc.git] / gcc / tree-object-size.c
blob828a3d02dbc8d1ce4d890122b431f4b1dba0667a
1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "hash-set.h"
26 #include "machmode.h"
27 #include "vec.h"
28 #include "double-int.h"
29 #include "input.h"
30 #include "alias.h"
31 #include "symtab.h"
32 #include "wide-int.h"
33 #include "inchash.h"
34 #include "tree.h"
35 #include "fold-const.h"
36 #include "tree-object-size.h"
37 #include "diagnostic-core.h"
38 #include "gimple-pretty-print.h"
39 #include "bitmap.h"
40 #include "predict.h"
41 #include "hard-reg-set.h"
42 #include "input.h"
43 #include "function.h"
44 #include "dominance.h"
45 #include "cfg.h"
46 #include "basic-block.h"
47 #include "tree-ssa-alias.h"
48 #include "internal-fn.h"
49 #include "gimple-fold.h"
50 #include "gimple-expr.h"
51 #include "is-a.h"
52 #include "gimple.h"
53 #include "gimple-iterator.h"
54 #include "gimple-ssa.h"
55 #include "stringpool.h"
56 #include "tree-ssanames.h"
57 #include "tree-pass.h"
58 #include "tree-ssa-propagate.h"
59 #include "tree-phinodes.h"
60 #include "ssa-iterators.h"
61 #include "builtins.h"
63 struct object_size_info
65 int object_size_type;
66 bitmap visited, reexamine;
67 int pass;
68 bool changed;
69 unsigned int *depths;
70 unsigned int *stack, *tos;
73 static const unsigned HOST_WIDE_INT unknown[4] = { -1, -1, 0, 0 };
75 static tree compute_object_offset (const_tree, const_tree);
76 static unsigned HOST_WIDE_INT addr_object_size (struct object_size_info *,
77 const_tree, int);
78 static unsigned HOST_WIDE_INT alloc_object_size (const gcall *, int);
79 static tree pass_through_call (const gcall *);
80 static void collect_object_sizes_for (struct object_size_info *, tree);
81 static void expr_object_size (struct object_size_info *, tree, tree);
82 static bool merge_object_sizes (struct object_size_info *, tree, tree,
83 unsigned HOST_WIDE_INT);
84 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple);
85 static bool cond_expr_object_size (struct object_size_info *, tree, gimple);
86 static void init_offset_limit (void);
87 static void check_for_plus_in_loops (struct object_size_info *, tree);
88 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
89 unsigned int);
91 /* object_sizes[0] is upper bound for number of bytes till the end of
92 the object.
93 object_sizes[1] is upper bound for number of bytes till the end of
94 the subobject (innermost array or field with address taken).
95 object_sizes[2] is lower bound for number of bytes till the end of
96 the object and object_sizes[3] lower bound for subobject. */
97 static vec<unsigned HOST_WIDE_INT> object_sizes[4];
99 /* Bitmaps what object sizes have been computed already. */
100 static bitmap computed[4];
102 /* Maximum value of offset we consider to be addition. */
103 static unsigned HOST_WIDE_INT offset_limit;
106 /* Initialize OFFSET_LIMIT variable. */
107 static void
108 init_offset_limit (void)
110 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
111 offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
112 else
113 offset_limit = -1;
114 offset_limit /= 2;
118 /* Compute offset of EXPR within VAR. Return error_mark_node
119 if unknown. */
121 static tree
122 compute_object_offset (const_tree expr, const_tree var)
124 enum tree_code code = PLUS_EXPR;
125 tree base, off, t;
127 if (expr == var)
128 return size_zero_node;
130 switch (TREE_CODE (expr))
132 case COMPONENT_REF:
133 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
134 if (base == error_mark_node)
135 return base;
137 t = TREE_OPERAND (expr, 1);
138 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
139 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
140 / BITS_PER_UNIT));
141 break;
143 case REALPART_EXPR:
144 CASE_CONVERT:
145 case VIEW_CONVERT_EXPR:
146 case NON_LVALUE_EXPR:
147 return compute_object_offset (TREE_OPERAND (expr, 0), var);
149 case IMAGPART_EXPR:
150 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
151 if (base == error_mark_node)
152 return base;
154 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
155 break;
157 case ARRAY_REF:
158 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
159 if (base == error_mark_node)
160 return base;
162 t = TREE_OPERAND (expr, 1);
163 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
165 code = MINUS_EXPR;
166 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
168 t = fold_convert (sizetype, t);
169 off = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (expr)), t);
170 break;
172 case MEM_REF:
173 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
174 return wide_int_to_tree (sizetype, mem_ref_offset (expr));
176 default:
177 return error_mark_node;
180 return size_binop (code, base, off);
184 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
185 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
186 If unknown, return unknown[object_size_type]. */
188 static unsigned HOST_WIDE_INT
189 addr_object_size (struct object_size_info *osi, const_tree ptr,
190 int object_size_type)
192 tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
194 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
196 pt_var = TREE_OPERAND (ptr, 0);
197 while (handled_component_p (pt_var))
198 pt_var = TREE_OPERAND (pt_var, 0);
200 if (pt_var
201 && TREE_CODE (pt_var) == MEM_REF)
203 unsigned HOST_WIDE_INT sz;
205 if (!osi || (object_size_type & 1) != 0
206 || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
208 sz = compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
209 object_size_type & ~1);
211 else
213 tree var = TREE_OPERAND (pt_var, 0);
214 if (osi->pass == 0)
215 collect_object_sizes_for (osi, var);
216 if (bitmap_bit_p (computed[object_size_type],
217 SSA_NAME_VERSION (var)))
218 sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
219 else
220 sz = unknown[object_size_type];
222 if (sz != unknown[object_size_type])
224 offset_int dsz = wi::sub (sz, mem_ref_offset (pt_var));
225 if (wi::neg_p (dsz))
226 sz = 0;
227 else if (wi::fits_uhwi_p (dsz))
228 sz = dsz.to_uhwi ();
229 else
230 sz = unknown[object_size_type];
233 if (sz != unknown[object_size_type] && sz < offset_limit)
234 pt_var_size = size_int (sz);
236 else if (pt_var
237 && DECL_P (pt_var)
238 && tree_fits_uhwi_p (DECL_SIZE_UNIT (pt_var))
239 && tree_to_uhwi (DECL_SIZE_UNIT (pt_var)) < offset_limit)
240 pt_var_size = DECL_SIZE_UNIT (pt_var);
241 else if (pt_var
242 && TREE_CODE (pt_var) == STRING_CST
243 && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
244 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
245 && tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
246 < offset_limit)
247 pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
248 else
249 return unknown[object_size_type];
251 if (pt_var != TREE_OPERAND (ptr, 0))
253 tree var;
255 if (object_size_type & 1)
257 var = TREE_OPERAND (ptr, 0);
259 while (var != pt_var
260 && TREE_CODE (var) != BIT_FIELD_REF
261 && TREE_CODE (var) != COMPONENT_REF
262 && TREE_CODE (var) != ARRAY_REF
263 && TREE_CODE (var) != ARRAY_RANGE_REF
264 && TREE_CODE (var) != REALPART_EXPR
265 && TREE_CODE (var) != IMAGPART_EXPR)
266 var = TREE_OPERAND (var, 0);
267 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
268 var = TREE_OPERAND (var, 0);
269 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
270 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var)))
271 || (pt_var_size
272 && tree_int_cst_lt (pt_var_size,
273 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
274 var = pt_var;
275 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
277 tree v = var;
278 /* For &X->fld, compute object size only if fld isn't the last
279 field, as struct { int i; char c[1]; } is often used instead
280 of flexible array member. */
281 while (v && v != pt_var)
282 switch (TREE_CODE (v))
284 case ARRAY_REF:
285 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
286 && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
288 tree domain
289 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
290 if (domain
291 && TYPE_MAX_VALUE (domain)
292 && TREE_CODE (TYPE_MAX_VALUE (domain))
293 == INTEGER_CST
294 && tree_int_cst_lt (TREE_OPERAND (v, 1),
295 TYPE_MAX_VALUE (domain)))
297 v = NULL_TREE;
298 break;
301 v = TREE_OPERAND (v, 0);
302 break;
303 case REALPART_EXPR:
304 case IMAGPART_EXPR:
305 v = NULL_TREE;
306 break;
307 case COMPONENT_REF:
308 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
310 v = NULL_TREE;
311 break;
313 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
314 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
315 != UNION_TYPE
316 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
317 != QUAL_UNION_TYPE)
318 break;
319 else
320 v = TREE_OPERAND (v, 0);
321 if (TREE_CODE (v) == COMPONENT_REF
322 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
323 == RECORD_TYPE)
325 tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
326 for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
327 if (TREE_CODE (fld_chain) == FIELD_DECL)
328 break;
330 if (fld_chain)
332 v = NULL_TREE;
333 break;
335 v = TREE_OPERAND (v, 0);
337 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
338 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
339 != UNION_TYPE
340 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
341 != QUAL_UNION_TYPE)
342 break;
343 else
344 v = TREE_OPERAND (v, 0);
345 if (v != pt_var)
346 v = NULL_TREE;
347 else
348 v = pt_var;
349 break;
350 default:
351 v = pt_var;
352 break;
354 if (v == pt_var)
355 var = pt_var;
358 else
359 var = pt_var;
361 if (var != pt_var)
362 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
363 else if (!pt_var_size)
364 return unknown[object_size_type];
365 else
366 var_size = pt_var_size;
367 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
368 if (bytes != error_mark_node)
370 if (TREE_CODE (bytes) == INTEGER_CST
371 && tree_int_cst_lt (var_size, bytes))
372 bytes = size_zero_node;
373 else
374 bytes = size_binop (MINUS_EXPR, var_size, bytes);
376 if (var != pt_var
377 && pt_var_size
378 && TREE_CODE (pt_var) == MEM_REF
379 && bytes != error_mark_node)
381 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
382 if (bytes2 != error_mark_node)
384 if (TREE_CODE (bytes2) == INTEGER_CST
385 && tree_int_cst_lt (pt_var_size, bytes2))
386 bytes2 = size_zero_node;
387 else
388 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
389 bytes = size_binop (MIN_EXPR, bytes, bytes2);
393 else if (!pt_var_size)
394 return unknown[object_size_type];
395 else
396 bytes = pt_var_size;
398 if (tree_fits_uhwi_p (bytes))
399 return tree_to_uhwi (bytes);
401 return unknown[object_size_type];
405 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
406 Handles various allocation calls. OBJECT_SIZE_TYPE is the second
407 argument from __builtin_object_size. If unknown, return
408 unknown[object_size_type]. */
410 static unsigned HOST_WIDE_INT
411 alloc_object_size (const gcall *call, int object_size_type)
413 tree callee, bytes = NULL_TREE;
414 tree alloc_size;
415 int arg1 = -1, arg2 = -1;
417 gcc_assert (is_gimple_call (call));
419 callee = gimple_call_fndecl (call);
420 if (!callee)
421 return unknown[object_size_type];
423 alloc_size = lookup_attribute ("alloc_size",
424 TYPE_ATTRIBUTES (TREE_TYPE (callee)));
425 if (alloc_size && TREE_VALUE (alloc_size))
427 tree p = TREE_VALUE (alloc_size);
429 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
430 if (TREE_CHAIN (p))
431 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
434 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
435 switch (DECL_FUNCTION_CODE (callee))
437 case BUILT_IN_CALLOC:
438 arg2 = 1;
439 /* fall through */
440 case BUILT_IN_MALLOC:
441 case BUILT_IN_ALLOCA:
442 case BUILT_IN_ALLOCA_WITH_ALIGN:
443 arg1 = 0;
444 default:
445 break;
448 if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
449 || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
450 || (arg2 >= 0
451 && (arg2 >= (int)gimple_call_num_args (call)
452 || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
453 return unknown[object_size_type];
455 if (arg2 >= 0)
456 bytes = size_binop (MULT_EXPR,
457 fold_convert (sizetype, gimple_call_arg (call, arg1)),
458 fold_convert (sizetype, gimple_call_arg (call, arg2)));
459 else if (arg1 >= 0)
460 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
462 if (bytes && tree_fits_uhwi_p (bytes))
463 return tree_to_uhwi (bytes);
465 return unknown[object_size_type];
469 /* If object size is propagated from one of function's arguments directly
470 to its return value, return that argument for GIMPLE_CALL statement CALL.
471 Otherwise return NULL. */
473 static tree
474 pass_through_call (const gcall *call)
476 tree callee = gimple_call_fndecl (call);
478 if (callee
479 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
480 switch (DECL_FUNCTION_CODE (callee))
482 case BUILT_IN_MEMCPY:
483 case BUILT_IN_MEMMOVE:
484 case BUILT_IN_MEMSET:
485 case BUILT_IN_STRCPY:
486 case BUILT_IN_STRNCPY:
487 case BUILT_IN_STRCAT:
488 case BUILT_IN_STRNCAT:
489 case BUILT_IN_MEMCPY_CHK:
490 case BUILT_IN_MEMMOVE_CHK:
491 case BUILT_IN_MEMSET_CHK:
492 case BUILT_IN_STRCPY_CHK:
493 case BUILT_IN_STRNCPY_CHK:
494 case BUILT_IN_STPNCPY_CHK:
495 case BUILT_IN_STRCAT_CHK:
496 case BUILT_IN_STRNCAT_CHK:
497 case BUILT_IN_ASSUME_ALIGNED:
498 if (gimple_call_num_args (call) >= 1)
499 return gimple_call_arg (call, 0);
500 break;
501 default:
502 break;
505 return NULL_TREE;
509 /* Compute __builtin_object_size value for PTR. OBJECT_SIZE_TYPE is the
510 second argument from __builtin_object_size. */
512 unsigned HOST_WIDE_INT
513 compute_builtin_object_size (tree ptr, int object_size_type)
515 gcc_assert (object_size_type >= 0 && object_size_type <= 3);
517 if (! offset_limit)
518 init_offset_limit ();
520 if (TREE_CODE (ptr) == ADDR_EXPR)
521 return addr_object_size (NULL, ptr, object_size_type);
523 if (TREE_CODE (ptr) == SSA_NAME
524 && POINTER_TYPE_P (TREE_TYPE (ptr))
525 && computed[object_size_type] != NULL)
527 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
529 struct object_size_info osi;
530 bitmap_iterator bi;
531 unsigned int i;
533 if (num_ssa_names > object_sizes[object_size_type].length ())
534 object_sizes[object_size_type].safe_grow (num_ssa_names);
535 if (dump_file)
537 fprintf (dump_file, "Computing %s %sobject size for ",
538 (object_size_type & 2) ? "minimum" : "maximum",
539 (object_size_type & 1) ? "sub" : "");
540 print_generic_expr (dump_file, ptr, dump_flags);
541 fprintf (dump_file, ":\n");
544 osi.visited = BITMAP_ALLOC (NULL);
545 osi.reexamine = BITMAP_ALLOC (NULL);
546 osi.object_size_type = object_size_type;
547 osi.depths = NULL;
548 osi.stack = NULL;
549 osi.tos = NULL;
551 /* First pass: walk UD chains, compute object sizes that
552 can be computed. osi.reexamine bitmap at the end will
553 contain what variables were found in dependency cycles
554 and therefore need to be reexamined. */
555 osi.pass = 0;
556 osi.changed = false;
557 collect_object_sizes_for (&osi, ptr);
559 /* Second pass: keep recomputing object sizes of variables
560 that need reexamination, until no object sizes are
561 increased or all object sizes are computed. */
562 if (! bitmap_empty_p (osi.reexamine))
564 bitmap reexamine = BITMAP_ALLOC (NULL);
566 /* If looking for minimum instead of maximum object size,
567 detect cases where a pointer is increased in a loop.
568 Although even without this detection pass 2 would eventually
569 terminate, it could take a long time. If a pointer is
570 increasing this way, we need to assume 0 object size.
571 E.g. p = &buf[0]; while (cond) p = p + 4; */
572 if (object_size_type & 2)
574 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
575 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
576 osi.tos = osi.stack;
577 osi.pass = 1;
578 /* collect_object_sizes_for is changing
579 osi.reexamine bitmap, so iterate over a copy. */
580 bitmap_copy (reexamine, osi.reexamine);
581 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
582 if (bitmap_bit_p (osi.reexamine, i))
583 check_for_plus_in_loops (&osi, ssa_name (i));
585 free (osi.depths);
586 osi.depths = NULL;
587 free (osi.stack);
588 osi.stack = NULL;
589 osi.tos = NULL;
594 osi.pass = 2;
595 osi.changed = false;
596 /* collect_object_sizes_for is changing
597 osi.reexamine bitmap, so iterate over a copy. */
598 bitmap_copy (reexamine, osi.reexamine);
599 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
600 if (bitmap_bit_p (osi.reexamine, i))
602 collect_object_sizes_for (&osi, ssa_name (i));
603 if (dump_file && (dump_flags & TDF_DETAILS))
605 fprintf (dump_file, "Reexamining ");
606 print_generic_expr (dump_file, ssa_name (i),
607 dump_flags);
608 fprintf (dump_file, "\n");
612 while (osi.changed);
614 BITMAP_FREE (reexamine);
616 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
617 bitmap_set_bit (computed[object_size_type], i);
619 /* Debugging dumps. */
620 if (dump_file)
622 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
623 if (object_sizes[object_size_type][i]
624 != unknown[object_size_type])
626 print_generic_expr (dump_file, ssa_name (i),
627 dump_flags);
628 fprintf (dump_file,
629 ": %s %sobject size "
630 HOST_WIDE_INT_PRINT_UNSIGNED "\n",
631 (object_size_type & 2) ? "minimum" : "maximum",
632 (object_size_type & 1) ? "sub" : "",
633 object_sizes[object_size_type][i]);
637 BITMAP_FREE (osi.reexamine);
638 BITMAP_FREE (osi.visited);
641 return object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
644 return unknown[object_size_type];
647 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
649 static void
650 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
652 int object_size_type = osi->object_size_type;
653 unsigned int varno = SSA_NAME_VERSION (ptr);
654 unsigned HOST_WIDE_INT bytes;
656 gcc_assert (object_sizes[object_size_type][varno]
657 != unknown[object_size_type]);
658 gcc_assert (osi->pass == 0);
660 if (TREE_CODE (value) == WITH_SIZE_EXPR)
661 value = TREE_OPERAND (value, 0);
663 /* Pointer variables should have been handled by merge_object_sizes. */
664 gcc_assert (TREE_CODE (value) != SSA_NAME
665 || !POINTER_TYPE_P (TREE_TYPE (value)));
667 if (TREE_CODE (value) == ADDR_EXPR)
668 bytes = addr_object_size (osi, value, object_size_type);
669 else
670 bytes = unknown[object_size_type];
672 if ((object_size_type & 2) == 0)
674 if (object_sizes[object_size_type][varno] < bytes)
675 object_sizes[object_size_type][varno] = bytes;
677 else
679 if (object_sizes[object_size_type][varno] > bytes)
680 object_sizes[object_size_type][varno] = bytes;
685 /* Compute object_sizes for PTR, defined to the result of a call. */
687 static void
688 call_object_size (struct object_size_info *osi, tree ptr, gcall *call)
690 int object_size_type = osi->object_size_type;
691 unsigned int varno = SSA_NAME_VERSION (ptr);
692 unsigned HOST_WIDE_INT bytes;
694 gcc_assert (is_gimple_call (call));
696 gcc_assert (object_sizes[object_size_type][varno]
697 != unknown[object_size_type]);
698 gcc_assert (osi->pass == 0);
700 bytes = alloc_object_size (call, object_size_type);
702 if ((object_size_type & 2) == 0)
704 if (object_sizes[object_size_type][varno] < bytes)
705 object_sizes[object_size_type][varno] = bytes;
707 else
709 if (object_sizes[object_size_type][varno] > bytes)
710 object_sizes[object_size_type][varno] = bytes;
715 /* Compute object_sizes for PTR, defined to an unknown value. */
717 static void
718 unknown_object_size (struct object_size_info *osi, tree ptr)
720 int object_size_type = osi->object_size_type;
721 unsigned int varno = SSA_NAME_VERSION (ptr);
722 unsigned HOST_WIDE_INT bytes;
724 gcc_assert (object_sizes[object_size_type][varno]
725 != unknown[object_size_type]);
726 gcc_assert (osi->pass == 0);
728 bytes = unknown[object_size_type];
730 if ((object_size_type & 2) == 0)
732 if (object_sizes[object_size_type][varno] < bytes)
733 object_sizes[object_size_type][varno] = bytes;
735 else
737 if (object_sizes[object_size_type][varno] > bytes)
738 object_sizes[object_size_type][varno] = bytes;
743 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
744 the object size might need reexamination later. */
746 static bool
747 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
748 unsigned HOST_WIDE_INT offset)
750 int object_size_type = osi->object_size_type;
751 unsigned int varno = SSA_NAME_VERSION (dest);
752 unsigned HOST_WIDE_INT orig_bytes;
754 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
755 return false;
756 if (offset >= offset_limit)
758 object_sizes[object_size_type][varno] = unknown[object_size_type];
759 return false;
762 if (osi->pass == 0)
763 collect_object_sizes_for (osi, orig);
765 orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
766 if (orig_bytes != unknown[object_size_type])
767 orig_bytes = (offset > orig_bytes)
768 ? (unsigned HOST_WIDE_INT) 0 : orig_bytes - offset;
770 if ((object_size_type & 2) == 0)
772 if (object_sizes[object_size_type][varno] < orig_bytes)
774 object_sizes[object_size_type][varno] = orig_bytes;
775 osi->changed = true;
778 else
780 if (object_sizes[object_size_type][varno] > orig_bytes)
782 object_sizes[object_size_type][varno] = orig_bytes;
783 osi->changed = true;
786 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
790 /* Compute object_sizes for VAR, defined to the result of an assignment
791 with operator POINTER_PLUS_EXPR. Return true if the object size might
792 need reexamination later. */
794 static bool
795 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple stmt)
797 int object_size_type = osi->object_size_type;
798 unsigned int varno = SSA_NAME_VERSION (var);
799 unsigned HOST_WIDE_INT bytes;
800 tree op0, op1;
802 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
804 op0 = gimple_assign_rhs1 (stmt);
805 op1 = gimple_assign_rhs2 (stmt);
807 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
809 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
810 gcc_assert (TREE_CODE (rhs) == MEM_REF);
811 op0 = TREE_OPERAND (rhs, 0);
812 op1 = TREE_OPERAND (rhs, 1);
814 else
815 gcc_unreachable ();
817 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
818 return false;
820 /* Handle PTR + OFFSET here. */
821 if (TREE_CODE (op1) == INTEGER_CST
822 && (TREE_CODE (op0) == SSA_NAME
823 || TREE_CODE (op0) == ADDR_EXPR))
825 if (! tree_fits_uhwi_p (op1))
826 bytes = unknown[object_size_type];
827 else if (TREE_CODE (op0) == SSA_NAME)
828 return merge_object_sizes (osi, var, op0, tree_to_uhwi (op1));
829 else
831 unsigned HOST_WIDE_INT off = tree_to_uhwi (op1);
833 /* op0 will be ADDR_EXPR here. */
834 bytes = addr_object_size (osi, op0, object_size_type);
835 if (bytes == unknown[object_size_type])
837 else if (off > offset_limit)
838 bytes = unknown[object_size_type];
839 else if (off > bytes)
840 bytes = 0;
841 else
842 bytes -= off;
845 else
846 bytes = unknown[object_size_type];
848 if ((object_size_type & 2) == 0)
850 if (object_sizes[object_size_type][varno] < bytes)
851 object_sizes[object_size_type][varno] = bytes;
853 else
855 if (object_sizes[object_size_type][varno] > bytes)
856 object_sizes[object_size_type][varno] = bytes;
858 return false;
862 /* Compute object_sizes for VAR, defined at STMT, which is
863 a COND_EXPR. Return true if the object size might need reexamination
864 later. */
866 static bool
867 cond_expr_object_size (struct object_size_info *osi, tree var, gimple stmt)
869 tree then_, else_;
870 int object_size_type = osi->object_size_type;
871 unsigned int varno = SSA_NAME_VERSION (var);
872 bool reexamine = false;
874 gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
876 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
877 return false;
879 then_ = gimple_assign_rhs2 (stmt);
880 else_ = gimple_assign_rhs3 (stmt);
882 if (TREE_CODE (then_) == SSA_NAME)
883 reexamine |= merge_object_sizes (osi, var, then_, 0);
884 else
885 expr_object_size (osi, var, then_);
887 if (TREE_CODE (else_) == SSA_NAME)
888 reexamine |= merge_object_sizes (osi, var, else_, 0);
889 else
890 expr_object_size (osi, var, else_);
892 return reexamine;
895 /* Compute object sizes for VAR.
896 For ADDR_EXPR an object size is the number of remaining bytes
897 to the end of the object (where what is considered an object depends on
898 OSI->object_size_type).
899 For allocation GIMPLE_CALL like malloc or calloc object size is the size
900 of the allocation.
901 For POINTER_PLUS_EXPR where second operand is a constant integer,
902 object size is object size of the first operand minus the constant.
903 If the constant is bigger than the number of remaining bytes until the
904 end of the object, object size is 0, but if it is instead a pointer
905 subtraction, object size is unknown[object_size_type].
906 To differentiate addition from subtraction, ADDR_EXPR returns
907 unknown[object_size_type] for all objects bigger than half of the address
908 space, and constants less than half of the address space are considered
909 addition, while bigger constants subtraction.
910 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
911 object size is object size of that argument.
912 Otherwise, object size is the maximum of object sizes of variables
913 that it might be set to. */
915 static void
916 collect_object_sizes_for (struct object_size_info *osi, tree var)
918 int object_size_type = osi->object_size_type;
919 unsigned int varno = SSA_NAME_VERSION (var);
920 gimple stmt;
921 bool reexamine;
923 if (bitmap_bit_p (computed[object_size_type], varno))
924 return;
926 if (osi->pass == 0)
928 if (bitmap_set_bit (osi->visited, varno))
930 object_sizes[object_size_type][varno]
931 = (object_size_type & 2) ? -1 : 0;
933 else
935 /* Found a dependency loop. Mark the variable for later
936 re-examination. */
937 bitmap_set_bit (osi->reexamine, varno);
938 if (dump_file && (dump_flags & TDF_DETAILS))
940 fprintf (dump_file, "Found a dependency loop at ");
941 print_generic_expr (dump_file, var, dump_flags);
942 fprintf (dump_file, "\n");
944 return;
948 if (dump_file && (dump_flags & TDF_DETAILS))
950 fprintf (dump_file, "Visiting use-def links for ");
951 print_generic_expr (dump_file, var, dump_flags);
952 fprintf (dump_file, "\n");
955 stmt = SSA_NAME_DEF_STMT (var);
956 reexamine = false;
958 switch (gimple_code (stmt))
960 case GIMPLE_ASSIGN:
962 tree rhs = gimple_assign_rhs1 (stmt);
963 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
964 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
965 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
966 reexamine = plus_stmt_object_size (osi, var, stmt);
967 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
968 reexamine = cond_expr_object_size (osi, var, stmt);
969 else if (gimple_assign_single_p (stmt)
970 || gimple_assign_unary_nop_p (stmt))
972 if (TREE_CODE (rhs) == SSA_NAME
973 && POINTER_TYPE_P (TREE_TYPE (rhs)))
974 reexamine = merge_object_sizes (osi, var, rhs, 0);
975 else
976 expr_object_size (osi, var, rhs);
978 else
979 unknown_object_size (osi, var);
980 break;
983 case GIMPLE_CALL:
985 gcall *call_stmt = as_a <gcall *> (stmt);
986 tree arg = pass_through_call (call_stmt);
987 if (arg)
989 if (TREE_CODE (arg) == SSA_NAME
990 && POINTER_TYPE_P (TREE_TYPE (arg)))
991 reexamine = merge_object_sizes (osi, var, arg, 0);
992 else
993 expr_object_size (osi, var, arg);
995 else
996 call_object_size (osi, var, call_stmt);
997 break;
1000 case GIMPLE_ASM:
1001 /* Pointers defined by __asm__ statements can point anywhere. */
1002 object_sizes[object_size_type][varno] = unknown[object_size_type];
1003 break;
1005 case GIMPLE_NOP:
1006 if (SSA_NAME_VAR (var)
1007 && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
1008 expr_object_size (osi, var, SSA_NAME_VAR (var));
1009 else
1010 /* Uninitialized SSA names point nowhere. */
1011 object_sizes[object_size_type][varno] = unknown[object_size_type];
1012 break;
1014 case GIMPLE_PHI:
1016 unsigned i;
1018 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1020 tree rhs = gimple_phi_arg (stmt, i)->def;
1022 if (object_sizes[object_size_type][varno]
1023 == unknown[object_size_type])
1024 break;
1026 if (TREE_CODE (rhs) == SSA_NAME)
1027 reexamine |= merge_object_sizes (osi, var, rhs, 0);
1028 else if (osi->pass == 0)
1029 expr_object_size (osi, var, rhs);
1031 break;
1034 default:
1035 gcc_unreachable ();
1038 if (! reexamine
1039 || object_sizes[object_size_type][varno] == unknown[object_size_type])
1041 bitmap_set_bit (computed[object_size_type], varno);
1042 bitmap_clear_bit (osi->reexamine, varno);
1044 else
1046 bitmap_set_bit (osi->reexamine, varno);
1047 if (dump_file && (dump_flags & TDF_DETAILS))
1049 fprintf (dump_file, "Need to reexamine ");
1050 print_generic_expr (dump_file, var, dump_flags);
1051 fprintf (dump_file, "\n");
1057 /* Helper function for check_for_plus_in_loops. Called recursively
1058 to detect loops. */
1060 static void
1061 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1062 unsigned int depth)
1064 gimple stmt = SSA_NAME_DEF_STMT (var);
1065 unsigned int varno = SSA_NAME_VERSION (var);
1067 if (osi->depths[varno])
1069 if (osi->depths[varno] != depth)
1071 unsigned int *sp;
1073 /* Found a loop involving pointer addition. */
1074 for (sp = osi->tos; sp > osi->stack; )
1076 --sp;
1077 bitmap_clear_bit (osi->reexamine, *sp);
1078 bitmap_set_bit (computed[osi->object_size_type], *sp);
1079 object_sizes[osi->object_size_type][*sp] = 0;
1080 if (*sp == varno)
1081 break;
1084 return;
1086 else if (! bitmap_bit_p (osi->reexamine, varno))
1087 return;
1089 osi->depths[varno] = depth;
1090 *osi->tos++ = varno;
1092 switch (gimple_code (stmt))
1095 case GIMPLE_ASSIGN:
1097 if ((gimple_assign_single_p (stmt)
1098 || gimple_assign_unary_nop_p (stmt))
1099 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1101 tree rhs = gimple_assign_rhs1 (stmt);
1103 check_for_plus_in_loops_1 (osi, rhs, depth);
1105 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1107 tree basevar = gimple_assign_rhs1 (stmt);
1108 tree cst = gimple_assign_rhs2 (stmt);
1110 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1112 check_for_plus_in_loops_1 (osi, basevar,
1113 depth + !integer_zerop (cst));
1115 else
1116 gcc_unreachable ();
1117 break;
1120 case GIMPLE_CALL:
1122 gcall *call_stmt = as_a <gcall *> (stmt);
1123 tree arg = pass_through_call (call_stmt);
1124 if (arg)
1126 if (TREE_CODE (arg) == SSA_NAME)
1127 check_for_plus_in_loops_1 (osi, arg, depth);
1128 else
1129 gcc_unreachable ();
1131 break;
1134 case GIMPLE_PHI:
1136 unsigned i;
1138 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1140 tree rhs = gimple_phi_arg (stmt, i)->def;
1142 if (TREE_CODE (rhs) == SSA_NAME)
1143 check_for_plus_in_loops_1 (osi, rhs, depth);
1145 break;
1148 default:
1149 gcc_unreachable ();
1152 osi->depths[varno] = 0;
1153 osi->tos--;
1157 /* Check if some pointer we are computing object size of is being increased
1158 within a loop. If yes, assume all the SSA variables participating in
1159 that loop have minimum object sizes 0. */
1161 static void
1162 check_for_plus_in_loops (struct object_size_info *osi, tree var)
1164 gimple stmt = SSA_NAME_DEF_STMT (var);
1166 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1167 and looked for a POINTER_PLUS_EXPR in the pass-through
1168 argument, if any. In GIMPLE, however, such an expression
1169 is not a valid call operand. */
1171 if (is_gimple_assign (stmt)
1172 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1174 tree basevar = gimple_assign_rhs1 (stmt);
1175 tree cst = gimple_assign_rhs2 (stmt);
1177 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1179 if (integer_zerop (cst))
1180 return;
1182 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1183 *osi->tos++ = SSA_NAME_VERSION (basevar);
1184 check_for_plus_in_loops_1 (osi, var, 2);
1185 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1186 osi->tos--;
1191 /* Initialize data structures for the object size computation. */
1193 void
1194 init_object_sizes (void)
1196 int object_size_type;
1198 if (computed[0])
1199 return;
1201 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1203 object_sizes[object_size_type].safe_grow (num_ssa_names);
1204 computed[object_size_type] = BITMAP_ALLOC (NULL);
1207 init_offset_limit ();
1211 /* Destroy data structures after the object size computation. */
1213 static void
1214 fini_object_sizes (void)
1216 int object_size_type;
1218 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1220 object_sizes[object_size_type].release ();
1221 BITMAP_FREE (computed[object_size_type]);
1226 /* Simple pass to optimize all __builtin_object_size () builtins. */
1228 namespace {
1230 const pass_data pass_data_object_sizes =
1232 GIMPLE_PASS, /* type */
1233 "objsz", /* name */
1234 OPTGROUP_NONE, /* optinfo_flags */
1235 TV_NONE, /* tv_id */
1236 ( PROP_cfg | PROP_ssa ), /* properties_required */
1237 0, /* properties_provided */
1238 0, /* properties_destroyed */
1239 0, /* todo_flags_start */
1240 0, /* todo_flags_finish */
1243 class pass_object_sizes : public gimple_opt_pass
1245 public:
1246 pass_object_sizes (gcc::context *ctxt)
1247 : gimple_opt_pass (pass_data_object_sizes, ctxt)
1250 /* opt_pass methods: */
1251 opt_pass * clone () { return new pass_object_sizes (m_ctxt); }
1252 virtual unsigned int execute (function *);
1254 }; // class pass_object_sizes
1256 unsigned int
1257 pass_object_sizes::execute (function *fun)
1259 basic_block bb;
1260 FOR_EACH_BB_FN (bb, fun)
1262 gimple_stmt_iterator i;
1263 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1265 tree result;
1266 gimple call = gsi_stmt (i);
1267 if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE))
1268 continue;
1270 init_object_sizes ();
1272 /* In the first pass instance, only attempt to fold
1273 __builtin_object_size (x, 1) and __builtin_object_size (x, 3),
1274 and rather than folding the builtin to the constant if any,
1275 create a MIN_EXPR or MAX_EXPR of the __builtin_object_size
1276 call result and the computed constant. */
1277 if (first_pass_instance)
1279 tree ost = gimple_call_arg (call, 1);
1280 if (tree_fits_uhwi_p (ost))
1282 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
1283 tree ptr = gimple_call_arg (call, 0);
1284 tree lhs = gimple_call_lhs (call);
1285 if ((object_size_type == 1 || object_size_type == 3)
1286 && (TREE_CODE (ptr) == ADDR_EXPR
1287 || TREE_CODE (ptr) == SSA_NAME)
1288 && lhs)
1290 tree type = TREE_TYPE (lhs);
1291 unsigned HOST_WIDE_INT bytes
1292 = compute_builtin_object_size (ptr, object_size_type);
1293 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type == 1
1294 ? -1 : 0)
1295 && wi::fits_to_tree_p (bytes, type))
1297 tree tem = make_ssa_name (type);
1298 gimple_call_set_lhs (call, tem);
1299 enum tree_code code
1300 = object_size_type == 1 ? MIN_EXPR : MAX_EXPR;
1301 tree cst = build_int_cstu (type, bytes);
1302 gimple g = gimple_build_assign (lhs, code, tem, cst);
1303 gsi_insert_after (&i, g, GSI_NEW_STMT);
1304 update_stmt (call);
1308 continue;
1311 result = fold_call_stmt (as_a <gcall *> (call), false);
1312 if (!result)
1314 tree ost = gimple_call_arg (call, 1);
1316 if (tree_fits_uhwi_p (ost))
1318 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
1320 if (object_size_type < 2)
1321 result = fold_convert (size_type_node,
1322 integer_minus_one_node);
1323 else if (object_size_type < 4)
1324 result = build_zero_cst (size_type_node);
1327 if (!result)
1328 continue;
1331 gcc_assert (TREE_CODE (result) == INTEGER_CST);
1333 if (dump_file && (dump_flags & TDF_DETAILS))
1335 fprintf (dump_file, "Simplified\n ");
1336 print_gimple_stmt (dump_file, call, 0, dump_flags);
1337 fprintf (dump_file, " to ");
1338 print_generic_expr (dump_file, result, 0);
1339 fprintf (dump_file, "\n");
1342 tree lhs = gimple_call_lhs (call);
1343 if (!lhs)
1344 continue;
1346 /* Propagate into all uses and fold those stmts. */
1347 gimple use_stmt;
1348 imm_use_iterator iter;
1349 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
1351 use_operand_p use_p;
1352 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1353 SET_USE (use_p, result);
1354 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1355 fold_stmt (&gsi);
1356 update_stmt (gsi_stmt (gsi));
1361 fini_object_sizes ();
1362 return 0;
1365 } // anon namespace
1367 gimple_opt_pass *
1368 make_pass_object_sizes (gcc::context *ctxt)
1370 return new pass_object_sizes (ctxt);