svn merge -r 217500:218679 svn+ssh://gcc.gnu.org/svn/gcc/trunk
[official-gcc.git] / gcc / tree-object-size.c
blob20b99567eef7100ca6d879507405a14435a171f3
1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004-2014 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "tree-object-size.h"
27 #include "diagnostic-core.h"
28 #include "gimple-pretty-print.h"
29 #include "bitmap.h"
30 #include "predict.h"
31 #include "vec.h"
32 #include "hashtab.h"
33 #include "hash-set.h"
34 #include "machmode.h"
35 #include "hard-reg-set.h"
36 #include "input.h"
37 #include "function.h"
38 #include "dominance.h"
39 #include "cfg.h"
40 #include "basic-block.h"
41 #include "tree-ssa-alias.h"
42 #include "internal-fn.h"
43 #include "gimple-fold.h"
44 #include "gimple-expr.h"
45 #include "is-a.h"
46 #include "gimple.h"
47 #include "gimple-iterator.h"
48 #include "gimple-ssa.h"
49 #include "stringpool.h"
50 #include "tree-ssanames.h"
51 #include "tree-pass.h"
52 #include "tree-ssa-propagate.h"
53 #include "tree-phinodes.h"
54 #include "ssa-iterators.h"
55 #include "builtins.h"
57 struct object_size_info
59 int object_size_type;
60 bitmap visited, reexamine;
61 int pass;
62 bool changed;
63 unsigned int *depths;
64 unsigned int *stack, *tos;
67 static const unsigned HOST_WIDE_INT unknown[4] = { -1, -1, 0, 0 };
69 static tree compute_object_offset (const_tree, const_tree);
70 static unsigned HOST_WIDE_INT addr_object_size (struct object_size_info *,
71 const_tree, int);
72 static unsigned HOST_WIDE_INT alloc_object_size (const gcall *, int);
73 static tree pass_through_call (const gcall *);
74 static void collect_object_sizes_for (struct object_size_info *, tree);
75 static void expr_object_size (struct object_size_info *, tree, tree);
76 static bool merge_object_sizes (struct object_size_info *, tree, tree,
77 unsigned HOST_WIDE_INT);
78 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple);
79 static bool cond_expr_object_size (struct object_size_info *, tree, gimple);
80 static void init_offset_limit (void);
81 static void check_for_plus_in_loops (struct object_size_info *, tree);
82 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
83 unsigned int);
85 /* object_sizes[0] is upper bound for number of bytes till the end of
86 the object.
87 object_sizes[1] is upper bound for number of bytes till the end of
88 the subobject (innermost array or field with address taken).
89 object_sizes[2] is lower bound for number of bytes till the end of
90 the object and object_sizes[3] lower bound for subobject. */
91 static vec<unsigned HOST_WIDE_INT> object_sizes[4];
93 /* Bitmaps what object sizes have been computed already. */
94 static bitmap computed[4];
96 /* Maximum value of offset we consider to be addition. */
97 static unsigned HOST_WIDE_INT offset_limit;
100 /* Initialize OFFSET_LIMIT variable. */
101 static void
102 init_offset_limit (void)
104 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
105 offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
106 else
107 offset_limit = -1;
108 offset_limit /= 2;
112 /* Compute offset of EXPR within VAR. Return error_mark_node
113 if unknown. */
115 static tree
116 compute_object_offset (const_tree expr, const_tree var)
118 enum tree_code code = PLUS_EXPR;
119 tree base, off, t;
121 if (expr == var)
122 return size_zero_node;
124 switch (TREE_CODE (expr))
126 case COMPONENT_REF:
127 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
128 if (base == error_mark_node)
129 return base;
131 t = TREE_OPERAND (expr, 1);
132 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
133 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
134 / BITS_PER_UNIT));
135 break;
137 case REALPART_EXPR:
138 CASE_CONVERT:
139 case VIEW_CONVERT_EXPR:
140 case NON_LVALUE_EXPR:
141 return compute_object_offset (TREE_OPERAND (expr, 0), var);
143 case IMAGPART_EXPR:
144 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
145 if (base == error_mark_node)
146 return base;
148 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
149 break;
151 case ARRAY_REF:
152 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
153 if (base == error_mark_node)
154 return base;
156 t = TREE_OPERAND (expr, 1);
157 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
159 code = MINUS_EXPR;
160 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
162 t = fold_convert (sizetype, t);
163 off = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (expr)), t);
164 break;
166 case MEM_REF:
167 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
168 return wide_int_to_tree (sizetype, mem_ref_offset (expr));
170 default:
171 return error_mark_node;
174 return size_binop (code, base, off);
178 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
179 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
180 If unknown, return unknown[object_size_type]. */
182 static unsigned HOST_WIDE_INT
183 addr_object_size (struct object_size_info *osi, const_tree ptr,
184 int object_size_type)
186 tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
188 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
190 pt_var = TREE_OPERAND (ptr, 0);
191 while (handled_component_p (pt_var))
192 pt_var = TREE_OPERAND (pt_var, 0);
194 if (pt_var
195 && TREE_CODE (pt_var) == MEM_REF)
197 unsigned HOST_WIDE_INT sz;
199 if (!osi || (object_size_type & 1) != 0
200 || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
202 sz = compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
203 object_size_type & ~1);
205 else
207 tree var = TREE_OPERAND (pt_var, 0);
208 if (osi->pass == 0)
209 collect_object_sizes_for (osi, var);
210 if (bitmap_bit_p (computed[object_size_type],
211 SSA_NAME_VERSION (var)))
212 sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
213 else
214 sz = unknown[object_size_type];
216 if (sz != unknown[object_size_type])
218 offset_int dsz = wi::sub (sz, mem_ref_offset (pt_var));
219 if (wi::neg_p (dsz))
220 sz = 0;
221 else if (wi::fits_uhwi_p (dsz))
222 sz = dsz.to_uhwi ();
223 else
224 sz = unknown[object_size_type];
227 if (sz != unknown[object_size_type] && sz < offset_limit)
228 pt_var_size = size_int (sz);
230 else if (pt_var
231 && DECL_P (pt_var)
232 && tree_fits_uhwi_p (DECL_SIZE_UNIT (pt_var))
233 && tree_to_uhwi (DECL_SIZE_UNIT (pt_var)) < offset_limit)
234 pt_var_size = DECL_SIZE_UNIT (pt_var);
235 else if (pt_var
236 && TREE_CODE (pt_var) == STRING_CST
237 && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
238 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
239 && tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
240 < offset_limit)
241 pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
242 else
243 return unknown[object_size_type];
245 if (pt_var != TREE_OPERAND (ptr, 0))
247 tree var;
249 if (object_size_type & 1)
251 var = TREE_OPERAND (ptr, 0);
253 while (var != pt_var
254 && TREE_CODE (var) != BIT_FIELD_REF
255 && TREE_CODE (var) != COMPONENT_REF
256 && TREE_CODE (var) != ARRAY_REF
257 && TREE_CODE (var) != ARRAY_RANGE_REF
258 && TREE_CODE (var) != REALPART_EXPR
259 && TREE_CODE (var) != IMAGPART_EXPR)
260 var = TREE_OPERAND (var, 0);
261 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
262 var = TREE_OPERAND (var, 0);
263 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
264 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var)))
265 || (pt_var_size
266 && tree_int_cst_lt (pt_var_size,
267 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
268 var = pt_var;
269 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
271 tree v = var;
272 /* For &X->fld, compute object size only if fld isn't the last
273 field, as struct { int i; char c[1]; } is often used instead
274 of flexible array member. */
275 while (v && v != pt_var)
276 switch (TREE_CODE (v))
278 case ARRAY_REF:
279 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
280 && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
282 tree domain
283 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
284 if (domain
285 && TYPE_MAX_VALUE (domain)
286 && TREE_CODE (TYPE_MAX_VALUE (domain))
287 == INTEGER_CST
288 && tree_int_cst_lt (TREE_OPERAND (v, 1),
289 TYPE_MAX_VALUE (domain)))
291 v = NULL_TREE;
292 break;
295 v = TREE_OPERAND (v, 0);
296 break;
297 case REALPART_EXPR:
298 case IMAGPART_EXPR:
299 v = NULL_TREE;
300 break;
301 case COMPONENT_REF:
302 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
304 v = NULL_TREE;
305 break;
307 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
308 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
309 != UNION_TYPE
310 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
311 != QUAL_UNION_TYPE)
312 break;
313 else
314 v = TREE_OPERAND (v, 0);
315 if (TREE_CODE (v) == COMPONENT_REF
316 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
317 == RECORD_TYPE)
319 tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
320 for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
321 if (TREE_CODE (fld_chain) == FIELD_DECL)
322 break;
324 if (fld_chain)
326 v = NULL_TREE;
327 break;
329 v = TREE_OPERAND (v, 0);
331 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
332 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
333 != UNION_TYPE
334 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
335 != QUAL_UNION_TYPE)
336 break;
337 else
338 v = TREE_OPERAND (v, 0);
339 if (v != pt_var)
340 v = NULL_TREE;
341 else
342 v = pt_var;
343 break;
344 default:
345 v = pt_var;
346 break;
348 if (v == pt_var)
349 var = pt_var;
352 else
353 var = pt_var;
355 if (var != pt_var)
356 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
357 else if (!pt_var_size)
358 return unknown[object_size_type];
359 else
360 var_size = pt_var_size;
361 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
362 if (bytes != error_mark_node)
364 if (TREE_CODE (bytes) == INTEGER_CST
365 && tree_int_cst_lt (var_size, bytes))
366 bytes = size_zero_node;
367 else
368 bytes = size_binop (MINUS_EXPR, var_size, bytes);
370 if (var != pt_var
371 && pt_var_size
372 && TREE_CODE (pt_var) == MEM_REF
373 && bytes != error_mark_node)
375 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
376 if (bytes2 != error_mark_node)
378 if (TREE_CODE (bytes2) == INTEGER_CST
379 && tree_int_cst_lt (pt_var_size, bytes2))
380 bytes2 = size_zero_node;
381 else
382 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
383 bytes = size_binop (MIN_EXPR, bytes, bytes2);
387 else if (!pt_var_size)
388 return unknown[object_size_type];
389 else
390 bytes = pt_var_size;
392 if (tree_fits_uhwi_p (bytes))
393 return tree_to_uhwi (bytes);
395 return unknown[object_size_type];
399 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
400 Handles various allocation calls. OBJECT_SIZE_TYPE is the second
401 argument from __builtin_object_size. If unknown, return
402 unknown[object_size_type]. */
404 static unsigned HOST_WIDE_INT
405 alloc_object_size (const gcall *call, int object_size_type)
407 tree callee, bytes = NULL_TREE;
408 tree alloc_size;
409 int arg1 = -1, arg2 = -1;
411 gcc_assert (is_gimple_call (call));
413 callee = gimple_call_fndecl (call);
414 if (!callee)
415 return unknown[object_size_type];
417 alloc_size = lookup_attribute ("alloc_size",
418 TYPE_ATTRIBUTES (TREE_TYPE (callee)));
419 if (alloc_size && TREE_VALUE (alloc_size))
421 tree p = TREE_VALUE (alloc_size);
423 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
424 if (TREE_CHAIN (p))
425 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
428 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
429 switch (DECL_FUNCTION_CODE (callee))
431 case BUILT_IN_CALLOC:
432 arg2 = 1;
433 /* fall through */
434 case BUILT_IN_MALLOC:
435 case BUILT_IN_ALLOCA:
436 case BUILT_IN_ALLOCA_WITH_ALIGN:
437 arg1 = 0;
438 default:
439 break;
442 if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
443 || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
444 || (arg2 >= 0
445 && (arg2 >= (int)gimple_call_num_args (call)
446 || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
447 return unknown[object_size_type];
449 if (arg2 >= 0)
450 bytes = size_binop (MULT_EXPR,
451 fold_convert (sizetype, gimple_call_arg (call, arg1)),
452 fold_convert (sizetype, gimple_call_arg (call, arg2)));
453 else if (arg1 >= 0)
454 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
456 if (bytes && tree_fits_uhwi_p (bytes))
457 return tree_to_uhwi (bytes);
459 return unknown[object_size_type];
463 /* If object size is propagated from one of function's arguments directly
464 to its return value, return that argument for GIMPLE_CALL statement CALL.
465 Otherwise return NULL. */
467 static tree
468 pass_through_call (const gcall *call)
470 tree callee = gimple_call_fndecl (call);
472 if (callee
473 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
474 switch (DECL_FUNCTION_CODE (callee))
476 case BUILT_IN_MEMCPY:
477 case BUILT_IN_MEMMOVE:
478 case BUILT_IN_MEMSET:
479 case BUILT_IN_STRCPY:
480 case BUILT_IN_STRNCPY:
481 case BUILT_IN_STRCAT:
482 case BUILT_IN_STRNCAT:
483 case BUILT_IN_MEMCPY_CHK:
484 case BUILT_IN_MEMMOVE_CHK:
485 case BUILT_IN_MEMSET_CHK:
486 case BUILT_IN_STRCPY_CHK:
487 case BUILT_IN_STRNCPY_CHK:
488 case BUILT_IN_STPNCPY_CHK:
489 case BUILT_IN_STRCAT_CHK:
490 case BUILT_IN_STRNCAT_CHK:
491 case BUILT_IN_ASSUME_ALIGNED:
492 if (gimple_call_num_args (call) >= 1)
493 return gimple_call_arg (call, 0);
494 break;
495 default:
496 break;
499 return NULL_TREE;
503 /* Compute __builtin_object_size value for PTR. OBJECT_SIZE_TYPE is the
504 second argument from __builtin_object_size. */
506 unsigned HOST_WIDE_INT
507 compute_builtin_object_size (tree ptr, int object_size_type)
509 gcc_assert (object_size_type >= 0 && object_size_type <= 3);
511 if (! offset_limit)
512 init_offset_limit ();
514 if (TREE_CODE (ptr) == ADDR_EXPR)
515 return addr_object_size (NULL, ptr, object_size_type);
517 if (TREE_CODE (ptr) == SSA_NAME
518 && POINTER_TYPE_P (TREE_TYPE (ptr))
519 && computed[object_size_type] != NULL)
521 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
523 struct object_size_info osi;
524 bitmap_iterator bi;
525 unsigned int i;
527 if (num_ssa_names > object_sizes[object_size_type].length ())
528 object_sizes[object_size_type].safe_grow (num_ssa_names);
529 if (dump_file)
531 fprintf (dump_file, "Computing %s %sobject size for ",
532 (object_size_type & 2) ? "minimum" : "maximum",
533 (object_size_type & 1) ? "sub" : "");
534 print_generic_expr (dump_file, ptr, dump_flags);
535 fprintf (dump_file, ":\n");
538 osi.visited = BITMAP_ALLOC (NULL);
539 osi.reexamine = BITMAP_ALLOC (NULL);
540 osi.object_size_type = object_size_type;
541 osi.depths = NULL;
542 osi.stack = NULL;
543 osi.tos = NULL;
545 /* First pass: walk UD chains, compute object sizes that
546 can be computed. osi.reexamine bitmap at the end will
547 contain what variables were found in dependency cycles
548 and therefore need to be reexamined. */
549 osi.pass = 0;
550 osi.changed = false;
551 collect_object_sizes_for (&osi, ptr);
553 /* Second pass: keep recomputing object sizes of variables
554 that need reexamination, until no object sizes are
555 increased or all object sizes are computed. */
556 if (! bitmap_empty_p (osi.reexamine))
558 bitmap reexamine = BITMAP_ALLOC (NULL);
560 /* If looking for minimum instead of maximum object size,
561 detect cases where a pointer is increased in a loop.
562 Although even without this detection pass 2 would eventually
563 terminate, it could take a long time. If a pointer is
564 increasing this way, we need to assume 0 object size.
565 E.g. p = &buf[0]; while (cond) p = p + 4; */
566 if (object_size_type & 2)
568 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
569 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
570 osi.tos = osi.stack;
571 osi.pass = 1;
572 /* collect_object_sizes_for is changing
573 osi.reexamine bitmap, so iterate over a copy. */
574 bitmap_copy (reexamine, osi.reexamine);
575 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
576 if (bitmap_bit_p (osi.reexamine, i))
577 check_for_plus_in_loops (&osi, ssa_name (i));
579 free (osi.depths);
580 osi.depths = NULL;
581 free (osi.stack);
582 osi.stack = NULL;
583 osi.tos = NULL;
588 osi.pass = 2;
589 osi.changed = false;
590 /* collect_object_sizes_for is changing
591 osi.reexamine bitmap, so iterate over a copy. */
592 bitmap_copy (reexamine, osi.reexamine);
593 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
594 if (bitmap_bit_p (osi.reexamine, i))
596 collect_object_sizes_for (&osi, ssa_name (i));
597 if (dump_file && (dump_flags & TDF_DETAILS))
599 fprintf (dump_file, "Reexamining ");
600 print_generic_expr (dump_file, ssa_name (i),
601 dump_flags);
602 fprintf (dump_file, "\n");
606 while (osi.changed);
608 BITMAP_FREE (reexamine);
610 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
611 bitmap_set_bit (computed[object_size_type], i);
613 /* Debugging dumps. */
614 if (dump_file)
616 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
617 if (object_sizes[object_size_type][i]
618 != unknown[object_size_type])
620 print_generic_expr (dump_file, ssa_name (i),
621 dump_flags);
622 fprintf (dump_file,
623 ": %s %sobject size "
624 HOST_WIDE_INT_PRINT_UNSIGNED "\n",
625 (object_size_type & 2) ? "minimum" : "maximum",
626 (object_size_type & 1) ? "sub" : "",
627 object_sizes[object_size_type][i]);
631 BITMAP_FREE (osi.reexamine);
632 BITMAP_FREE (osi.visited);
635 return object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
638 return unknown[object_size_type];
641 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
643 static void
644 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
646 int object_size_type = osi->object_size_type;
647 unsigned int varno = SSA_NAME_VERSION (ptr);
648 unsigned HOST_WIDE_INT bytes;
650 gcc_assert (object_sizes[object_size_type][varno]
651 != unknown[object_size_type]);
652 gcc_assert (osi->pass == 0);
654 if (TREE_CODE (value) == WITH_SIZE_EXPR)
655 value = TREE_OPERAND (value, 0);
657 /* Pointer variables should have been handled by merge_object_sizes. */
658 gcc_assert (TREE_CODE (value) != SSA_NAME
659 || !POINTER_TYPE_P (TREE_TYPE (value)));
661 if (TREE_CODE (value) == ADDR_EXPR)
662 bytes = addr_object_size (osi, value, object_size_type);
663 else
664 bytes = unknown[object_size_type];
666 if ((object_size_type & 2) == 0)
668 if (object_sizes[object_size_type][varno] < bytes)
669 object_sizes[object_size_type][varno] = bytes;
671 else
673 if (object_sizes[object_size_type][varno] > bytes)
674 object_sizes[object_size_type][varno] = bytes;
679 /* Compute object_sizes for PTR, defined to the result of a call. */
681 static void
682 call_object_size (struct object_size_info *osi, tree ptr, gcall *call)
684 int object_size_type = osi->object_size_type;
685 unsigned int varno = SSA_NAME_VERSION (ptr);
686 unsigned HOST_WIDE_INT bytes;
688 gcc_assert (is_gimple_call (call));
690 gcc_assert (object_sizes[object_size_type][varno]
691 != unknown[object_size_type]);
692 gcc_assert (osi->pass == 0);
694 bytes = alloc_object_size (call, object_size_type);
696 if ((object_size_type & 2) == 0)
698 if (object_sizes[object_size_type][varno] < bytes)
699 object_sizes[object_size_type][varno] = bytes;
701 else
703 if (object_sizes[object_size_type][varno] > bytes)
704 object_sizes[object_size_type][varno] = bytes;
709 /* Compute object_sizes for PTR, defined to an unknown value. */
711 static void
712 unknown_object_size (struct object_size_info *osi, tree ptr)
714 int object_size_type = osi->object_size_type;
715 unsigned int varno = SSA_NAME_VERSION (ptr);
716 unsigned HOST_WIDE_INT bytes;
718 gcc_assert (object_sizes[object_size_type][varno]
719 != unknown[object_size_type]);
720 gcc_assert (osi->pass == 0);
722 bytes = unknown[object_size_type];
724 if ((object_size_type & 2) == 0)
726 if (object_sizes[object_size_type][varno] < bytes)
727 object_sizes[object_size_type][varno] = bytes;
729 else
731 if (object_sizes[object_size_type][varno] > bytes)
732 object_sizes[object_size_type][varno] = bytes;
737 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
738 the object size might need reexamination later. */
740 static bool
741 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
742 unsigned HOST_WIDE_INT offset)
744 int object_size_type = osi->object_size_type;
745 unsigned int varno = SSA_NAME_VERSION (dest);
746 unsigned HOST_WIDE_INT orig_bytes;
748 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
749 return false;
750 if (offset >= offset_limit)
752 object_sizes[object_size_type][varno] = unknown[object_size_type];
753 return false;
756 if (osi->pass == 0)
757 collect_object_sizes_for (osi, orig);
759 orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
760 if (orig_bytes != unknown[object_size_type])
761 orig_bytes = (offset > orig_bytes)
762 ? (unsigned HOST_WIDE_INT) 0 : orig_bytes - offset;
764 if ((object_size_type & 2) == 0)
766 if (object_sizes[object_size_type][varno] < orig_bytes)
768 object_sizes[object_size_type][varno] = orig_bytes;
769 osi->changed = true;
772 else
774 if (object_sizes[object_size_type][varno] > orig_bytes)
776 object_sizes[object_size_type][varno] = orig_bytes;
777 osi->changed = true;
780 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
784 /* Compute object_sizes for VAR, defined to the result of an assignment
785 with operator POINTER_PLUS_EXPR. Return true if the object size might
786 need reexamination later. */
788 static bool
789 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple stmt)
791 int object_size_type = osi->object_size_type;
792 unsigned int varno = SSA_NAME_VERSION (var);
793 unsigned HOST_WIDE_INT bytes;
794 tree op0, op1;
796 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
798 op0 = gimple_assign_rhs1 (stmt);
799 op1 = gimple_assign_rhs2 (stmt);
801 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
803 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
804 gcc_assert (TREE_CODE (rhs) == MEM_REF);
805 op0 = TREE_OPERAND (rhs, 0);
806 op1 = TREE_OPERAND (rhs, 1);
808 else
809 gcc_unreachable ();
811 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
812 return false;
814 /* Handle PTR + OFFSET here. */
815 if (TREE_CODE (op1) == INTEGER_CST
816 && (TREE_CODE (op0) == SSA_NAME
817 || TREE_CODE (op0) == ADDR_EXPR))
819 if (! tree_fits_uhwi_p (op1))
820 bytes = unknown[object_size_type];
821 else if (TREE_CODE (op0) == SSA_NAME)
822 return merge_object_sizes (osi, var, op0, tree_to_uhwi (op1));
823 else
825 unsigned HOST_WIDE_INT off = tree_to_uhwi (op1);
827 /* op0 will be ADDR_EXPR here. */
828 bytes = addr_object_size (osi, op0, object_size_type);
829 if (bytes == unknown[object_size_type])
831 else if (off > offset_limit)
832 bytes = unknown[object_size_type];
833 else if (off > bytes)
834 bytes = 0;
835 else
836 bytes -= off;
839 else
840 bytes = unknown[object_size_type];
842 if ((object_size_type & 2) == 0)
844 if (object_sizes[object_size_type][varno] < bytes)
845 object_sizes[object_size_type][varno] = bytes;
847 else
849 if (object_sizes[object_size_type][varno] > bytes)
850 object_sizes[object_size_type][varno] = bytes;
852 return false;
856 /* Compute object_sizes for VAR, defined at STMT, which is
857 a COND_EXPR. Return true if the object size might need reexamination
858 later. */
860 static bool
861 cond_expr_object_size (struct object_size_info *osi, tree var, gimple stmt)
863 tree then_, else_;
864 int object_size_type = osi->object_size_type;
865 unsigned int varno = SSA_NAME_VERSION (var);
866 bool reexamine = false;
868 gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
870 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
871 return false;
873 then_ = gimple_assign_rhs2 (stmt);
874 else_ = gimple_assign_rhs3 (stmt);
876 if (TREE_CODE (then_) == SSA_NAME)
877 reexamine |= merge_object_sizes (osi, var, then_, 0);
878 else
879 expr_object_size (osi, var, then_);
881 if (TREE_CODE (else_) == SSA_NAME)
882 reexamine |= merge_object_sizes (osi, var, else_, 0);
883 else
884 expr_object_size (osi, var, else_);
886 return reexamine;
889 /* Compute object sizes for VAR.
890 For ADDR_EXPR an object size is the number of remaining bytes
891 to the end of the object (where what is considered an object depends on
892 OSI->object_size_type).
893 For allocation GIMPLE_CALL like malloc or calloc object size is the size
894 of the allocation.
895 For POINTER_PLUS_EXPR where second operand is a constant integer,
896 object size is object size of the first operand minus the constant.
897 If the constant is bigger than the number of remaining bytes until the
898 end of the object, object size is 0, but if it is instead a pointer
899 subtraction, object size is unknown[object_size_type].
900 To differentiate addition from subtraction, ADDR_EXPR returns
901 unknown[object_size_type] for all objects bigger than half of the address
902 space, and constants less than half of the address space are considered
903 addition, while bigger constants subtraction.
904 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
905 object size is object size of that argument.
906 Otherwise, object size is the maximum of object sizes of variables
907 that it might be set to. */
909 static void
910 collect_object_sizes_for (struct object_size_info *osi, tree var)
912 int object_size_type = osi->object_size_type;
913 unsigned int varno = SSA_NAME_VERSION (var);
914 gimple stmt;
915 bool reexamine;
917 if (bitmap_bit_p (computed[object_size_type], varno))
918 return;
920 if (osi->pass == 0)
922 if (bitmap_set_bit (osi->visited, varno))
924 object_sizes[object_size_type][varno]
925 = (object_size_type & 2) ? -1 : 0;
927 else
929 /* Found a dependency loop. Mark the variable for later
930 re-examination. */
931 bitmap_set_bit (osi->reexamine, varno);
932 if (dump_file && (dump_flags & TDF_DETAILS))
934 fprintf (dump_file, "Found a dependency loop at ");
935 print_generic_expr (dump_file, var, dump_flags);
936 fprintf (dump_file, "\n");
938 return;
942 if (dump_file && (dump_flags & TDF_DETAILS))
944 fprintf (dump_file, "Visiting use-def links for ");
945 print_generic_expr (dump_file, var, dump_flags);
946 fprintf (dump_file, "\n");
949 stmt = SSA_NAME_DEF_STMT (var);
950 reexamine = false;
952 switch (gimple_code (stmt))
954 case GIMPLE_ASSIGN:
956 tree rhs = gimple_assign_rhs1 (stmt);
957 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
958 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
959 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
960 reexamine = plus_stmt_object_size (osi, var, stmt);
961 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
962 reexamine = cond_expr_object_size (osi, var, stmt);
963 else if (gimple_assign_single_p (stmt)
964 || gimple_assign_unary_nop_p (stmt))
966 if (TREE_CODE (rhs) == SSA_NAME
967 && POINTER_TYPE_P (TREE_TYPE (rhs)))
968 reexamine = merge_object_sizes (osi, var, rhs, 0);
969 else
970 expr_object_size (osi, var, rhs);
972 else
973 unknown_object_size (osi, var);
974 break;
977 case GIMPLE_CALL:
979 gcall *call_stmt = as_a <gcall *> (stmt);
980 tree arg = pass_through_call (call_stmt);
981 if (arg)
983 if (TREE_CODE (arg) == SSA_NAME
984 && POINTER_TYPE_P (TREE_TYPE (arg)))
985 reexamine = merge_object_sizes (osi, var, arg, 0);
986 else
987 expr_object_size (osi, var, arg);
989 else
990 call_object_size (osi, var, call_stmt);
991 break;
994 case GIMPLE_ASM:
995 /* Pointers defined by __asm__ statements can point anywhere. */
996 object_sizes[object_size_type][varno] = unknown[object_size_type];
997 break;
999 case GIMPLE_NOP:
1000 if (SSA_NAME_VAR (var)
1001 && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
1002 expr_object_size (osi, var, SSA_NAME_VAR (var));
1003 else
1004 /* Uninitialized SSA names point nowhere. */
1005 object_sizes[object_size_type][varno] = unknown[object_size_type];
1006 break;
1008 case GIMPLE_PHI:
1010 unsigned i;
1012 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1014 tree rhs = gimple_phi_arg (stmt, i)->def;
1016 if (object_sizes[object_size_type][varno]
1017 == unknown[object_size_type])
1018 break;
1020 if (TREE_CODE (rhs) == SSA_NAME)
1021 reexamine |= merge_object_sizes (osi, var, rhs, 0);
1022 else if (osi->pass == 0)
1023 expr_object_size (osi, var, rhs);
1025 break;
1028 default:
1029 gcc_unreachable ();
1032 if (! reexamine
1033 || object_sizes[object_size_type][varno] == unknown[object_size_type])
1035 bitmap_set_bit (computed[object_size_type], varno);
1036 bitmap_clear_bit (osi->reexamine, varno);
1038 else
1040 bitmap_set_bit (osi->reexamine, varno);
1041 if (dump_file && (dump_flags & TDF_DETAILS))
1043 fprintf (dump_file, "Need to reexamine ");
1044 print_generic_expr (dump_file, var, dump_flags);
1045 fprintf (dump_file, "\n");
1051 /* Helper function for check_for_plus_in_loops. Called recursively
1052 to detect loops. */
1054 static void
1055 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1056 unsigned int depth)
1058 gimple stmt = SSA_NAME_DEF_STMT (var);
1059 unsigned int varno = SSA_NAME_VERSION (var);
1061 if (osi->depths[varno])
1063 if (osi->depths[varno] != depth)
1065 unsigned int *sp;
1067 /* Found a loop involving pointer addition. */
1068 for (sp = osi->tos; sp > osi->stack; )
1070 --sp;
1071 bitmap_clear_bit (osi->reexamine, *sp);
1072 bitmap_set_bit (computed[osi->object_size_type], *sp);
1073 object_sizes[osi->object_size_type][*sp] = 0;
1074 if (*sp == varno)
1075 break;
1078 return;
1080 else if (! bitmap_bit_p (osi->reexamine, varno))
1081 return;
1083 osi->depths[varno] = depth;
1084 *osi->tos++ = varno;
1086 switch (gimple_code (stmt))
1089 case GIMPLE_ASSIGN:
1091 if ((gimple_assign_single_p (stmt)
1092 || gimple_assign_unary_nop_p (stmt))
1093 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1095 tree rhs = gimple_assign_rhs1 (stmt);
1097 check_for_plus_in_loops_1 (osi, rhs, depth);
1099 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1101 tree basevar = gimple_assign_rhs1 (stmt);
1102 tree cst = gimple_assign_rhs2 (stmt);
1104 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1106 check_for_plus_in_loops_1 (osi, basevar,
1107 depth + !integer_zerop (cst));
1109 else
1110 gcc_unreachable ();
1111 break;
1114 case GIMPLE_CALL:
1116 gcall *call_stmt = as_a <gcall *> (stmt);
1117 tree arg = pass_through_call (call_stmt);
1118 if (arg)
1120 if (TREE_CODE (arg) == SSA_NAME)
1121 check_for_plus_in_loops_1 (osi, arg, depth);
1122 else
1123 gcc_unreachable ();
1125 break;
1128 case GIMPLE_PHI:
1130 unsigned i;
1132 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1134 tree rhs = gimple_phi_arg (stmt, i)->def;
1136 if (TREE_CODE (rhs) == SSA_NAME)
1137 check_for_plus_in_loops_1 (osi, rhs, depth);
1139 break;
1142 default:
1143 gcc_unreachable ();
1146 osi->depths[varno] = 0;
1147 osi->tos--;
1151 /* Check if some pointer we are computing object size of is being increased
1152 within a loop. If yes, assume all the SSA variables participating in
1153 that loop have minimum object sizes 0. */
1155 static void
1156 check_for_plus_in_loops (struct object_size_info *osi, tree var)
1158 gimple stmt = SSA_NAME_DEF_STMT (var);
1160 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1161 and looked for a POINTER_PLUS_EXPR in the pass-through
1162 argument, if any. In GIMPLE, however, such an expression
1163 is not a valid call operand. */
1165 if (is_gimple_assign (stmt)
1166 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1168 tree basevar = gimple_assign_rhs1 (stmt);
1169 tree cst = gimple_assign_rhs2 (stmt);
1171 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1173 if (integer_zerop (cst))
1174 return;
1176 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1177 *osi->tos++ = SSA_NAME_VERSION (basevar);
1178 check_for_plus_in_loops_1 (osi, var, 2);
1179 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1180 osi->tos--;
1185 /* Initialize data structures for the object size computation. */
1187 void
1188 init_object_sizes (void)
1190 int object_size_type;
1192 if (computed[0])
1193 return;
1195 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1197 object_sizes[object_size_type].safe_grow (num_ssa_names);
1198 computed[object_size_type] = BITMAP_ALLOC (NULL);
1201 init_offset_limit ();
1205 /* Destroy data structures after the object size computation. */
1207 static void
1208 fini_object_sizes (void)
1210 int object_size_type;
1212 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1214 object_sizes[object_size_type].release ();
1215 BITMAP_FREE (computed[object_size_type]);
1220 /* Simple pass to optimize all __builtin_object_size () builtins. */
1222 namespace {
1224 const pass_data pass_data_object_sizes =
1226 GIMPLE_PASS, /* type */
1227 "objsz", /* name */
1228 OPTGROUP_NONE, /* optinfo_flags */
1229 TV_NONE, /* tv_id */
1230 ( PROP_cfg | PROP_ssa ), /* properties_required */
1231 0, /* properties_provided */
1232 0, /* properties_destroyed */
1233 0, /* todo_flags_start */
1234 0, /* todo_flags_finish */
1237 class pass_object_sizes : public gimple_opt_pass
1239 public:
1240 pass_object_sizes (gcc::context *ctxt)
1241 : gimple_opt_pass (pass_data_object_sizes, ctxt)
1244 /* opt_pass methods: */
1245 opt_pass * clone () { return new pass_object_sizes (m_ctxt); }
1246 virtual unsigned int execute (function *);
1248 }; // class pass_object_sizes
1250 unsigned int
1251 pass_object_sizes::execute (function *fun)
1253 basic_block bb;
1254 FOR_EACH_BB_FN (bb, fun)
1256 gimple_stmt_iterator i;
1257 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1259 tree result;
1260 gimple call = gsi_stmt (i);
1261 if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE))
1262 continue;
1264 init_object_sizes ();
1265 result = fold_call_stmt (as_a <gcall *> (call), false);
1266 if (!result)
1268 if (gimple_call_num_args (call) == 2
1269 && POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
1271 tree ost = gimple_call_arg (call, 1);
1273 if (tree_fits_uhwi_p (ost))
1275 unsigned HOST_WIDE_INT object_size_type
1276 = tree_to_uhwi (ost);
1278 if (object_size_type < 2)
1279 result = fold_convert (size_type_node,
1280 integer_minus_one_node);
1281 else if (object_size_type < 4)
1282 result = build_zero_cst (size_type_node);
1286 if (!result)
1287 continue;
1290 gcc_assert (TREE_CODE (result) == INTEGER_CST);
1292 if (dump_file && (dump_flags & TDF_DETAILS))
1294 fprintf (dump_file, "Simplified\n ");
1295 print_gimple_stmt (dump_file, call, 0, dump_flags);
1296 fprintf (dump_file, " to ");
1297 print_generic_expr (dump_file, result, 0);
1298 fprintf (dump_file, "\n");
1301 tree lhs = gimple_call_lhs (call);
1302 if (!lhs)
1303 continue;
1305 /* Propagate into all uses and fold those stmts. */
1306 gimple use_stmt;
1307 imm_use_iterator iter;
1308 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
1310 use_operand_p use_p;
1311 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1312 SET_USE (use_p, result);
1313 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1314 fold_stmt (&gsi);
1315 update_stmt (gsi_stmt (gsi));
1320 fini_object_sizes ();
1321 return 0;
1324 } // anon namespace
1326 gimple_opt_pass *
1327 make_pass_object_sizes (gcc::context *ctxt)
1329 return new pass_object_sizes (ctxt);