2010-07-27 Paolo Carlini <paolo.carlini@oracle.com>
[official-gcc/alias-decl.git] / gcc / tree-object-size.c
blob0ea5538640f657450ec8f81ad6d664bde3ea24d2
1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Jakub Jelinek <jakub@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "diagnostic-core.h"
28 #include "toplev.h"
29 #include "tree-pretty-print.h"
30 #include "gimple-pretty-print.h"
31 #include "tree-flow.h"
32 #include "tree-pass.h"
33 #include "tree-ssa-propagate.h"
35 struct object_size_info
37 int object_size_type;
38 bitmap visited, reexamine;
39 int pass;
40 bool changed;
41 unsigned int *depths;
42 unsigned int *stack, *tos;
45 static unsigned HOST_WIDE_INT unknown[4] = { -1, -1, 0, 0 };
47 static tree compute_object_offset (const_tree, const_tree);
48 static unsigned HOST_WIDE_INT addr_object_size (struct object_size_info *,
49 const_tree, int);
50 static unsigned HOST_WIDE_INT alloc_object_size (const_gimple, int);
51 static tree pass_through_call (const_gimple);
52 static void collect_object_sizes_for (struct object_size_info *, tree);
53 static void expr_object_size (struct object_size_info *, tree, tree);
54 static bool merge_object_sizes (struct object_size_info *, tree, tree,
55 unsigned HOST_WIDE_INT);
56 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple);
57 static bool cond_expr_object_size (struct object_size_info *, tree, tree);
58 static unsigned int compute_object_sizes (void);
59 static void init_offset_limit (void);
60 static void check_for_plus_in_loops (struct object_size_info *, tree);
61 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
62 unsigned int);
64 /* object_sizes[0] is upper bound for number of bytes till the end of
65 the object.
66 object_sizes[1] is upper bound for number of bytes till the end of
67 the subobject (innermost array or field with address taken).
68 object_sizes[2] is lower bound for number of bytes till the end of
69 the object and object_sizes[3] lower bound for subobject. */
70 static unsigned HOST_WIDE_INT *object_sizes[4];
72 /* Bitmaps what object sizes have been computed already. */
73 static bitmap computed[4];
75 /* Maximum value of offset we consider to be addition. */
76 static unsigned HOST_WIDE_INT offset_limit;
79 /* Initialize OFFSET_LIMIT variable. */
80 static void
81 init_offset_limit (void)
83 if (host_integerp (TYPE_MAX_VALUE (sizetype), 1))
84 offset_limit = tree_low_cst (TYPE_MAX_VALUE (sizetype), 1);
85 else
86 offset_limit = -1;
87 offset_limit /= 2;
91 /* Compute offset of EXPR within VAR. Return error_mark_node
92 if unknown. */
94 static tree
95 compute_object_offset (const_tree expr, const_tree var)
97 enum tree_code code = PLUS_EXPR;
98 tree base, off, t;
100 if (expr == var)
101 return size_zero_node;
103 switch (TREE_CODE (expr))
105 case COMPONENT_REF:
106 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
107 if (base == error_mark_node)
108 return base;
110 t = TREE_OPERAND (expr, 1);
111 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
112 size_int (tree_low_cst (DECL_FIELD_BIT_OFFSET (t), 1)
113 / BITS_PER_UNIT));
114 break;
116 case REALPART_EXPR:
117 CASE_CONVERT:
118 case VIEW_CONVERT_EXPR:
119 case NON_LVALUE_EXPR:
120 return compute_object_offset (TREE_OPERAND (expr, 0), var);
122 case IMAGPART_EXPR:
123 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
124 if (base == error_mark_node)
125 return base;
127 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
128 break;
130 case ARRAY_REF:
131 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
132 if (base == error_mark_node)
133 return base;
135 t = TREE_OPERAND (expr, 1);
136 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
138 code = MINUS_EXPR;
139 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
141 t = fold_convert (sizetype, t);
142 off = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (expr)), t);
143 break;
145 case MEM_REF:
146 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
147 return TREE_OPERAND (expr, 1);
149 default:
150 return error_mark_node;
153 return size_binop (code, base, off);
157 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
158 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
159 If unknown, return unknown[object_size_type]. */
161 static unsigned HOST_WIDE_INT
162 addr_object_size (struct object_size_info *osi, const_tree ptr,
163 int object_size_type)
165 tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
167 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
169 pt_var = TREE_OPERAND (ptr, 0);
170 if (REFERENCE_CLASS_P (pt_var))
171 pt_var = get_base_address (pt_var);
173 if (pt_var
174 && TREE_CODE (pt_var) == MEM_REF
175 && TREE_CODE (TREE_OPERAND (pt_var, 0)) == SSA_NAME
176 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (pt_var, 0))))
178 unsigned HOST_WIDE_INT sz;
180 if (!osi || (object_size_type & 1) != 0)
182 sz = compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
183 object_size_type & ~1);
184 if (host_integerp (TREE_OPERAND (pt_var, 1), 0))
185 sz -= TREE_INT_CST_LOW (TREE_OPERAND (pt_var, 1));
186 else
187 sz = offset_limit;
189 else
191 tree var = TREE_OPERAND (pt_var, 0);
192 if (osi->pass == 0)
193 collect_object_sizes_for (osi, var);
194 if (bitmap_bit_p (computed[object_size_type],
195 SSA_NAME_VERSION (var)))
196 sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
197 else
198 sz = unknown[object_size_type];
199 if (host_integerp (TREE_OPERAND (pt_var, 1), 0))
200 sz -= TREE_INT_CST_LOW (TREE_OPERAND (pt_var, 1));
201 else
202 sz = offset_limit;
205 if (sz != unknown[object_size_type] && sz < offset_limit)
206 pt_var_size = size_int (sz);
208 else if (pt_var
209 && (SSA_VAR_P (pt_var) || TREE_CODE (pt_var) == STRING_CST)
210 && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
211 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1)
212 && (unsigned HOST_WIDE_INT)
213 tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1)
214 < offset_limit)
215 pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
216 else
217 return unknown[object_size_type];
219 if (pt_var != TREE_OPERAND (ptr, 0))
221 tree var;
223 if (object_size_type & 1)
225 var = TREE_OPERAND (ptr, 0);
227 while (var != pt_var
228 && TREE_CODE (var) != BIT_FIELD_REF
229 && TREE_CODE (var) != COMPONENT_REF
230 && TREE_CODE (var) != ARRAY_REF
231 && TREE_CODE (var) != ARRAY_RANGE_REF
232 && TREE_CODE (var) != REALPART_EXPR
233 && TREE_CODE (var) != IMAGPART_EXPR)
234 var = TREE_OPERAND (var, 0);
235 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
236 var = TREE_OPERAND (var, 0);
237 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
238 || ! host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (var)), 1)
239 || (pt_var_size
240 && tree_int_cst_lt (pt_var_size,
241 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
242 var = pt_var;
243 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
245 tree v = var;
246 /* For &X->fld, compute object size only if fld isn't the last
247 field, as struct { int i; char c[1]; } is often used instead
248 of flexible array member. */
249 while (v && v != pt_var)
250 switch (TREE_CODE (v))
252 case ARRAY_REF:
253 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
254 && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
256 tree domain
257 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
258 if (domain
259 && TYPE_MAX_VALUE (domain)
260 && TREE_CODE (TYPE_MAX_VALUE (domain))
261 == INTEGER_CST
262 && tree_int_cst_lt (TREE_OPERAND (v, 1),
263 TYPE_MAX_VALUE (domain)))
265 v = NULL_TREE;
266 break;
269 v = TREE_OPERAND (v, 0);
270 break;
271 case REALPART_EXPR:
272 case IMAGPART_EXPR:
273 v = NULL_TREE;
274 break;
275 case COMPONENT_REF:
276 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
278 v = NULL_TREE;
279 break;
281 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
282 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
283 != UNION_TYPE
284 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
285 != QUAL_UNION_TYPE)
286 break;
287 else
288 v = TREE_OPERAND (v, 0);
289 if (TREE_CODE (v) == COMPONENT_REF
290 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
291 == RECORD_TYPE)
293 tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
294 for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
295 if (TREE_CODE (fld_chain) == FIELD_DECL)
296 break;
298 if (fld_chain)
300 v = NULL_TREE;
301 break;
303 v = TREE_OPERAND (v, 0);
305 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
306 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
307 != UNION_TYPE
308 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
309 != QUAL_UNION_TYPE)
310 break;
311 else
312 v = TREE_OPERAND (v, 0);
313 if (v != pt_var)
314 v = NULL_TREE;
315 else
316 v = pt_var;
317 break;
318 default:
319 v = pt_var;
320 break;
322 if (v == pt_var)
323 var = pt_var;
326 else
327 var = pt_var;
329 if (var != pt_var)
330 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
331 else if (!pt_var_size)
332 return unknown[object_size_type];
333 else
334 var_size = pt_var_size;
335 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
336 if (bytes != error_mark_node)
338 if (TREE_CODE (bytes) == INTEGER_CST
339 && tree_int_cst_lt (var_size, bytes))
340 bytes = size_zero_node;
341 else
342 bytes = size_binop (MINUS_EXPR, var_size, bytes);
344 if (var != pt_var
345 && pt_var_size
346 && TREE_CODE (pt_var) == MEM_REF
347 && bytes != error_mark_node)
349 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
350 if (bytes2 != error_mark_node)
352 bytes2 = size_binop (PLUS_EXPR, bytes2,
353 TREE_OPERAND (pt_var, 1));
354 if (TREE_CODE (bytes2) == INTEGER_CST
355 && tree_int_cst_lt (pt_var_size, bytes2))
356 bytes2 = size_zero_node;
357 else
358 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
359 bytes = size_binop (MIN_EXPR, bytes, bytes2);
363 else if (!pt_var_size)
364 return unknown[object_size_type];
365 else
366 bytes = pt_var_size;
368 if (host_integerp (bytes, 1))
369 return tree_low_cst (bytes, 1);
371 return unknown[object_size_type];
375 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
376 Handles various allocation calls. OBJECT_SIZE_TYPE is the second
377 argument from __builtin_object_size. If unknown, return
378 unknown[object_size_type]. */
380 static unsigned HOST_WIDE_INT
381 alloc_object_size (const_gimple call, int object_size_type)
383 tree callee, bytes = NULL_TREE;
384 tree alloc_size;
385 int arg1 = -1, arg2 = -1;
387 gcc_assert (is_gimple_call (call));
389 callee = gimple_call_fndecl (call);
390 if (!callee)
391 return unknown[object_size_type];
393 alloc_size = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (TREE_TYPE(callee)));
394 if (alloc_size && TREE_VALUE (alloc_size))
396 tree p = TREE_VALUE (alloc_size);
398 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
399 if (TREE_CHAIN (p))
400 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
403 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
404 switch (DECL_FUNCTION_CODE (callee))
406 case BUILT_IN_CALLOC:
407 arg2 = 1;
408 /* fall through */
409 case BUILT_IN_MALLOC:
410 case BUILT_IN_ALLOCA:
411 arg1 = 0;
412 default:
413 break;
416 if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
417 || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
418 || (arg2 >= 0
419 && (arg2 >= (int)gimple_call_num_args (call)
420 || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
421 return unknown[object_size_type];
423 if (arg2 >= 0)
424 bytes = size_binop (MULT_EXPR,
425 fold_convert (sizetype, gimple_call_arg (call, arg1)),
426 fold_convert (sizetype, gimple_call_arg (call, arg2)));
427 else if (arg1 >= 0)
428 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
430 if (bytes && host_integerp (bytes, 1))
431 return tree_low_cst (bytes, 1);
433 return unknown[object_size_type];
437 /* If object size is propagated from one of function's arguments directly
438 to its return value, return that argument for GIMPLE_CALL statement CALL.
439 Otherwise return NULL. */
441 static tree
442 pass_through_call (const_gimple call)
444 tree callee = gimple_call_fndecl (call);
446 if (callee
447 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
448 switch (DECL_FUNCTION_CODE (callee))
450 case BUILT_IN_MEMCPY:
451 case BUILT_IN_MEMMOVE:
452 case BUILT_IN_MEMSET:
453 case BUILT_IN_STRCPY:
454 case BUILT_IN_STRNCPY:
455 case BUILT_IN_STRCAT:
456 case BUILT_IN_STRNCAT:
457 case BUILT_IN_MEMCPY_CHK:
458 case BUILT_IN_MEMMOVE_CHK:
459 case BUILT_IN_MEMSET_CHK:
460 case BUILT_IN_STRCPY_CHK:
461 case BUILT_IN_STRNCPY_CHK:
462 case BUILT_IN_STRCAT_CHK:
463 case BUILT_IN_STRNCAT_CHK:
464 if (gimple_call_num_args (call) >= 1)
465 return gimple_call_arg (call, 0);
466 break;
467 default:
468 break;
471 return NULL_TREE;
475 /* Compute __builtin_object_size value for PTR. OBJECT_SIZE_TYPE is the
476 second argument from __builtin_object_size. */
478 unsigned HOST_WIDE_INT
479 compute_builtin_object_size (tree ptr, int object_size_type)
481 gcc_assert (object_size_type >= 0 && object_size_type <= 3);
483 if (! offset_limit)
484 init_offset_limit ();
486 if (TREE_CODE (ptr) == ADDR_EXPR)
487 return addr_object_size (NULL, ptr, object_size_type);
489 if (TREE_CODE (ptr) == SSA_NAME
490 && POINTER_TYPE_P (TREE_TYPE (ptr))
491 && object_sizes[object_size_type] != NULL)
493 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
495 struct object_size_info osi;
496 bitmap_iterator bi;
497 unsigned int i;
499 if (dump_file)
501 fprintf (dump_file, "Computing %s %sobject size for ",
502 (object_size_type & 2) ? "minimum" : "maximum",
503 (object_size_type & 1) ? "sub" : "");
504 print_generic_expr (dump_file, ptr, dump_flags);
505 fprintf (dump_file, ":\n");
508 osi.visited = BITMAP_ALLOC (NULL);
509 osi.reexamine = BITMAP_ALLOC (NULL);
510 osi.object_size_type = object_size_type;
511 osi.depths = NULL;
512 osi.stack = NULL;
513 osi.tos = NULL;
515 /* First pass: walk UD chains, compute object sizes that
516 can be computed. osi.reexamine bitmap at the end will
517 contain what variables were found in dependency cycles
518 and therefore need to be reexamined. */
519 osi.pass = 0;
520 osi.changed = false;
521 collect_object_sizes_for (&osi, ptr);
523 /* Second pass: keep recomputing object sizes of variables
524 that need reexamination, until no object sizes are
525 increased or all object sizes are computed. */
526 if (! bitmap_empty_p (osi.reexamine))
528 bitmap reexamine = BITMAP_ALLOC (NULL);
530 /* If looking for minimum instead of maximum object size,
531 detect cases where a pointer is increased in a loop.
532 Although even without this detection pass 2 would eventually
533 terminate, it could take a long time. If a pointer is
534 increasing this way, we need to assume 0 object size.
535 E.g. p = &buf[0]; while (cond) p = p + 4; */
536 if (object_size_type & 2)
538 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
539 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
540 osi.tos = osi.stack;
541 osi.pass = 1;
542 /* collect_object_sizes_for is changing
543 osi.reexamine bitmap, so iterate over a copy. */
544 bitmap_copy (reexamine, osi.reexamine);
545 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
546 if (bitmap_bit_p (osi.reexamine, i))
547 check_for_plus_in_loops (&osi, ssa_name (i));
549 free (osi.depths);
550 osi.depths = NULL;
551 free (osi.stack);
552 osi.stack = NULL;
553 osi.tos = NULL;
558 osi.pass = 2;
559 osi.changed = false;
560 /* collect_object_sizes_for is changing
561 osi.reexamine bitmap, so iterate over a copy. */
562 bitmap_copy (reexamine, osi.reexamine);
563 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
564 if (bitmap_bit_p (osi.reexamine, i))
566 collect_object_sizes_for (&osi, ssa_name (i));
567 if (dump_file && (dump_flags & TDF_DETAILS))
569 fprintf (dump_file, "Reexamining ");
570 print_generic_expr (dump_file, ssa_name (i),
571 dump_flags);
572 fprintf (dump_file, "\n");
576 while (osi.changed);
578 BITMAP_FREE (reexamine);
580 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
581 bitmap_set_bit (computed[object_size_type], i);
583 /* Debugging dumps. */
584 if (dump_file)
586 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
587 if (object_sizes[object_size_type][i]
588 != unknown[object_size_type])
590 print_generic_expr (dump_file, ssa_name (i),
591 dump_flags);
592 fprintf (dump_file,
593 ": %s %sobject size "
594 HOST_WIDE_INT_PRINT_UNSIGNED "\n",
595 (object_size_type & 2) ? "minimum" : "maximum",
596 (object_size_type & 1) ? "sub" : "",
597 object_sizes[object_size_type][i]);
601 BITMAP_FREE (osi.reexamine);
602 BITMAP_FREE (osi.visited);
605 return object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
608 return unknown[object_size_type];
611 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
613 static void
614 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
616 int object_size_type = osi->object_size_type;
617 unsigned int varno = SSA_NAME_VERSION (ptr);
618 unsigned HOST_WIDE_INT bytes;
620 gcc_assert (object_sizes[object_size_type][varno]
621 != unknown[object_size_type]);
622 gcc_assert (osi->pass == 0);
624 if (TREE_CODE (value) == WITH_SIZE_EXPR)
625 value = TREE_OPERAND (value, 0);
627 /* Pointer variables should have been handled by merge_object_sizes. */
628 gcc_assert (TREE_CODE (value) != SSA_NAME
629 || !POINTER_TYPE_P (TREE_TYPE (value)));
631 if (TREE_CODE (value) == ADDR_EXPR)
632 bytes = addr_object_size (osi, value, object_size_type);
633 else
634 bytes = unknown[object_size_type];
636 if ((object_size_type & 2) == 0)
638 if (object_sizes[object_size_type][varno] < bytes)
639 object_sizes[object_size_type][varno] = bytes;
641 else
643 if (object_sizes[object_size_type][varno] > bytes)
644 object_sizes[object_size_type][varno] = bytes;
649 /* Compute object_sizes for PTR, defined to the result of a call. */
651 static void
652 call_object_size (struct object_size_info *osi, tree ptr, gimple call)
654 int object_size_type = osi->object_size_type;
655 unsigned int varno = SSA_NAME_VERSION (ptr);
656 unsigned HOST_WIDE_INT bytes;
658 gcc_assert (is_gimple_call (call));
660 gcc_assert (object_sizes[object_size_type][varno]
661 != unknown[object_size_type]);
662 gcc_assert (osi->pass == 0);
664 bytes = alloc_object_size (call, object_size_type);
666 if ((object_size_type & 2) == 0)
668 if (object_sizes[object_size_type][varno] < bytes)
669 object_sizes[object_size_type][varno] = bytes;
671 else
673 if (object_sizes[object_size_type][varno] > bytes)
674 object_sizes[object_size_type][varno] = bytes;
679 /* Compute object_sizes for PTR, defined to an unknown value. */
681 static void
682 unknown_object_size (struct object_size_info *osi, tree ptr)
684 int object_size_type = osi->object_size_type;
685 unsigned int varno = SSA_NAME_VERSION (ptr);
686 unsigned HOST_WIDE_INT bytes;
688 gcc_assert (object_sizes[object_size_type][varno]
689 != unknown[object_size_type]);
690 gcc_assert (osi->pass == 0);
692 bytes = unknown[object_size_type];
694 if ((object_size_type & 2) == 0)
696 if (object_sizes[object_size_type][varno] < bytes)
697 object_sizes[object_size_type][varno] = bytes;
699 else
701 if (object_sizes[object_size_type][varno] > bytes)
702 object_sizes[object_size_type][varno] = bytes;
707 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
708 the object size might need reexamination later. */
710 static bool
711 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
712 unsigned HOST_WIDE_INT offset)
714 int object_size_type = osi->object_size_type;
715 unsigned int varno = SSA_NAME_VERSION (dest);
716 unsigned HOST_WIDE_INT orig_bytes;
718 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
719 return false;
720 if (offset >= offset_limit)
722 object_sizes[object_size_type][varno] = unknown[object_size_type];
723 return false;
726 if (osi->pass == 0)
727 collect_object_sizes_for (osi, orig);
729 orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
730 if (orig_bytes != unknown[object_size_type])
731 orig_bytes = (offset > orig_bytes)
732 ? (unsigned HOST_WIDE_INT) 0 : orig_bytes - offset;
734 if ((object_size_type & 2) == 0)
736 if (object_sizes[object_size_type][varno] < orig_bytes)
738 object_sizes[object_size_type][varno] = orig_bytes;
739 osi->changed = true;
742 else
744 if (object_sizes[object_size_type][varno] > orig_bytes)
746 object_sizes[object_size_type][varno] = orig_bytes;
747 osi->changed = true;
750 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
754 /* Compute object_sizes for VAR, defined to the result of an assignment
755 with operator POINTER_PLUS_EXPR. Return true if the object size might
756 need reexamination later. */
758 static bool
759 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple stmt)
761 int object_size_type = osi->object_size_type;
762 unsigned int varno = SSA_NAME_VERSION (var);
763 unsigned HOST_WIDE_INT bytes;
764 tree op0, op1;
766 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
768 op0 = gimple_assign_rhs1 (stmt);
769 op1 = gimple_assign_rhs2 (stmt);
771 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
773 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
774 gcc_assert (TREE_CODE (rhs) == MEM_REF);
775 op0 = TREE_OPERAND (rhs, 0);
776 op1 = TREE_OPERAND (rhs, 1);
778 else
779 gcc_unreachable ();
781 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
782 return false;
784 /* Handle PTR + OFFSET here. */
785 if (TREE_CODE (op1) == INTEGER_CST
786 && (TREE_CODE (op0) == SSA_NAME
787 || TREE_CODE (op0) == ADDR_EXPR))
789 if (! host_integerp (op1, 1))
790 bytes = unknown[object_size_type];
791 else if (TREE_CODE (op0) == SSA_NAME)
792 return merge_object_sizes (osi, var, op0, tree_low_cst (op1, 1));
793 else
795 unsigned HOST_WIDE_INT off = tree_low_cst (op1, 1);
797 /* op0 will be ADDR_EXPR here. */
798 bytes = addr_object_size (osi, op0, object_size_type);
799 if (bytes == unknown[object_size_type])
801 else if (off > offset_limit)
802 bytes = unknown[object_size_type];
803 else if (off > bytes)
804 bytes = 0;
805 else
806 bytes -= off;
809 else
810 bytes = unknown[object_size_type];
812 if ((object_size_type & 2) == 0)
814 if (object_sizes[object_size_type][varno] < bytes)
815 object_sizes[object_size_type][varno] = bytes;
817 else
819 if (object_sizes[object_size_type][varno] > bytes)
820 object_sizes[object_size_type][varno] = bytes;
822 return false;
826 /* Compute object_sizes for VAR, defined to VALUE, which is
827 a COND_EXPR. Return true if the object size might need reexamination
828 later. */
830 static bool
831 cond_expr_object_size (struct object_size_info *osi, tree var, tree value)
833 tree then_, else_;
834 int object_size_type = osi->object_size_type;
835 unsigned int varno = SSA_NAME_VERSION (var);
836 bool reexamine = false;
838 gcc_assert (TREE_CODE (value) == COND_EXPR);
840 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
841 return false;
843 then_ = COND_EXPR_THEN (value);
844 else_ = COND_EXPR_ELSE (value);
846 if (TREE_CODE (then_) == SSA_NAME)
847 reexamine |= merge_object_sizes (osi, var, then_, 0);
848 else
849 expr_object_size (osi, var, then_);
851 if (TREE_CODE (else_) == SSA_NAME)
852 reexamine |= merge_object_sizes (osi, var, else_, 0);
853 else
854 expr_object_size (osi, var, else_);
856 return reexamine;
859 /* Compute object sizes for VAR.
860 For ADDR_EXPR an object size is the number of remaining bytes
861 to the end of the object (where what is considered an object depends on
862 OSI->object_size_type).
863 For allocation GIMPLE_CALL like malloc or calloc object size is the size
864 of the allocation.
865 For POINTER_PLUS_EXPR where second operand is a constant integer,
866 object size is object size of the first operand minus the constant.
867 If the constant is bigger than the number of remaining bytes until the
868 end of the object, object size is 0, but if it is instead a pointer
869 subtraction, object size is unknown[object_size_type].
870 To differentiate addition from subtraction, ADDR_EXPR returns
871 unknown[object_size_type] for all objects bigger than half of the address
872 space, and constants less than half of the address space are considered
873 addition, while bigger constants subtraction.
874 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
875 object size is object size of that argument.
876 Otherwise, object size is the maximum of object sizes of variables
877 that it might be set to. */
879 static void
880 collect_object_sizes_for (struct object_size_info *osi, tree var)
882 int object_size_type = osi->object_size_type;
883 unsigned int varno = SSA_NAME_VERSION (var);
884 gimple stmt;
885 bool reexamine;
887 if (bitmap_bit_p (computed[object_size_type], varno))
888 return;
890 if (osi->pass == 0)
892 if (! bitmap_bit_p (osi->visited, varno))
894 bitmap_set_bit (osi->visited, varno);
895 object_sizes[object_size_type][varno]
896 = (object_size_type & 2) ? -1 : 0;
898 else
900 /* Found a dependency loop. Mark the variable for later
901 re-examination. */
902 bitmap_set_bit (osi->reexamine, varno);
903 if (dump_file && (dump_flags & TDF_DETAILS))
905 fprintf (dump_file, "Found a dependency loop at ");
906 print_generic_expr (dump_file, var, dump_flags);
907 fprintf (dump_file, "\n");
909 return;
913 if (dump_file && (dump_flags & TDF_DETAILS))
915 fprintf (dump_file, "Visiting use-def links for ");
916 print_generic_expr (dump_file, var, dump_flags);
917 fprintf (dump_file, "\n");
920 stmt = SSA_NAME_DEF_STMT (var);
921 reexamine = false;
923 switch (gimple_code (stmt))
925 case GIMPLE_ASSIGN:
927 tree rhs = gimple_assign_rhs1 (stmt);
928 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
929 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
930 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
931 reexamine = plus_stmt_object_size (osi, var, stmt);
932 else if (gimple_assign_single_p (stmt)
933 || gimple_assign_unary_nop_p (stmt))
935 if (TREE_CODE (rhs) == SSA_NAME
936 && POINTER_TYPE_P (TREE_TYPE (rhs)))
937 reexamine = merge_object_sizes (osi, var, rhs, 0);
938 else if (TREE_CODE (rhs) == COND_EXPR)
939 reexamine = cond_expr_object_size (osi, var, rhs);
940 else
941 expr_object_size (osi, var, rhs);
943 else
944 unknown_object_size (osi, var);
945 break;
948 case GIMPLE_CALL:
950 tree arg = pass_through_call (stmt);
951 if (arg)
953 if (TREE_CODE (arg) == SSA_NAME
954 && POINTER_TYPE_P (TREE_TYPE (arg)))
955 reexamine = merge_object_sizes (osi, var, arg, 0);
956 else if (TREE_CODE (arg) == COND_EXPR)
957 reexamine = cond_expr_object_size (osi, var, arg);
958 else
959 expr_object_size (osi, var, arg);
961 else
962 call_object_size (osi, var, stmt);
963 break;
966 case GIMPLE_ASM:
967 /* Pointers defined by __asm__ statements can point anywhere. */
968 object_sizes[object_size_type][varno] = unknown[object_size_type];
969 break;
971 case GIMPLE_NOP:
973 tree decl = SSA_NAME_VAR (var);
975 if (TREE_CODE (decl) != PARM_DECL && DECL_INITIAL (decl))
976 expr_object_size (osi, var, DECL_INITIAL (decl));
977 else
978 expr_object_size (osi, var, decl);
980 break;
982 case GIMPLE_PHI:
984 unsigned i;
986 for (i = 0; i < gimple_phi_num_args (stmt); i++)
988 tree rhs = gimple_phi_arg (stmt, i)->def;
990 if (object_sizes[object_size_type][varno]
991 == unknown[object_size_type])
992 break;
994 if (TREE_CODE (rhs) == SSA_NAME)
995 reexamine |= merge_object_sizes (osi, var, rhs, 0);
996 else if (osi->pass == 0)
997 expr_object_size (osi, var, rhs);
999 break;
1002 default:
1003 gcc_unreachable ();
1006 if (! reexamine
1007 || object_sizes[object_size_type][varno] == unknown[object_size_type])
1009 bitmap_set_bit (computed[object_size_type], varno);
1010 bitmap_clear_bit (osi->reexamine, varno);
1012 else
1014 bitmap_set_bit (osi->reexamine, varno);
1015 if (dump_file && (dump_flags & TDF_DETAILS))
1017 fprintf (dump_file, "Need to reexamine ");
1018 print_generic_expr (dump_file, var, dump_flags);
1019 fprintf (dump_file, "\n");
1025 /* Helper function for check_for_plus_in_loops. Called recursively
1026 to detect loops. */
1028 static void
1029 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1030 unsigned int depth)
1032 gimple stmt = SSA_NAME_DEF_STMT (var);
1033 unsigned int varno = SSA_NAME_VERSION (var);
1035 if (osi->depths[varno])
1037 if (osi->depths[varno] != depth)
1039 unsigned int *sp;
1041 /* Found a loop involving pointer addition. */
1042 for (sp = osi->tos; sp > osi->stack; )
1044 --sp;
1045 bitmap_clear_bit (osi->reexamine, *sp);
1046 bitmap_set_bit (computed[osi->object_size_type], *sp);
1047 object_sizes[osi->object_size_type][*sp] = 0;
1048 if (*sp == varno)
1049 break;
1052 return;
1054 else if (! bitmap_bit_p (osi->reexamine, varno))
1055 return;
1057 osi->depths[varno] = depth;
1058 *osi->tos++ = varno;
1060 switch (gimple_code (stmt))
1063 case GIMPLE_ASSIGN:
1065 if ((gimple_assign_single_p (stmt)
1066 || gimple_assign_unary_nop_p (stmt))
1067 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1069 tree rhs = gimple_assign_rhs1 (stmt);
1071 check_for_plus_in_loops_1 (osi, rhs, depth);
1073 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1075 tree basevar = gimple_assign_rhs1 (stmt);
1076 tree cst = gimple_assign_rhs2 (stmt);
1078 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1080 check_for_plus_in_loops_1 (osi, basevar,
1081 depth + !integer_zerop (cst));
1083 else
1084 gcc_unreachable ();
1085 break;
1088 case GIMPLE_CALL:
1090 tree arg = pass_through_call (stmt);
1091 if (arg)
1093 if (TREE_CODE (arg) == SSA_NAME)
1094 check_for_plus_in_loops_1 (osi, arg, depth);
1095 else
1096 gcc_unreachable ();
1098 break;
1101 case GIMPLE_PHI:
1103 unsigned i;
1105 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1107 tree rhs = gimple_phi_arg (stmt, i)->def;
1109 if (TREE_CODE (rhs) == SSA_NAME)
1110 check_for_plus_in_loops_1 (osi, rhs, depth);
1112 break;
1115 default:
1116 gcc_unreachable ();
1119 osi->depths[varno] = 0;
1120 osi->tos--;
1124 /* Check if some pointer we are computing object size of is being increased
1125 within a loop. If yes, assume all the SSA variables participating in
1126 that loop have minimum object sizes 0. */
1128 static void
1129 check_for_plus_in_loops (struct object_size_info *osi, tree var)
1131 gimple stmt = SSA_NAME_DEF_STMT (var);
1133 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1134 and looked for a POINTER_PLUS_EXPR in the pass-through
1135 argument, if any. In GIMPLE, however, such an expression
1136 is not a valid call operand. */
1138 if (is_gimple_assign (stmt)
1139 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1141 tree basevar = gimple_assign_rhs1 (stmt);
1142 tree cst = gimple_assign_rhs2 (stmt);
1144 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1146 if (integer_zerop (cst))
1147 return;
1149 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1150 *osi->tos++ = SSA_NAME_VERSION (basevar);
1151 check_for_plus_in_loops_1 (osi, var, 2);
1152 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1153 osi->tos--;
1158 /* Initialize data structures for the object size computation. */
1160 void
1161 init_object_sizes (void)
1163 int object_size_type;
1165 if (object_sizes[0])
1166 return;
1168 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1170 object_sizes[object_size_type] = XNEWVEC (unsigned HOST_WIDE_INT, num_ssa_names);
1171 computed[object_size_type] = BITMAP_ALLOC (NULL);
1174 init_offset_limit ();
1178 /* Destroy data structures after the object size computation. */
1180 void
1181 fini_object_sizes (void)
1183 int object_size_type;
1185 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1187 free (object_sizes[object_size_type]);
1188 BITMAP_FREE (computed[object_size_type]);
1189 object_sizes[object_size_type] = NULL;
1194 /* Simple pass to optimize all __builtin_object_size () builtins. */
1196 static unsigned int
1197 compute_object_sizes (void)
1199 basic_block bb;
1200 FOR_EACH_BB (bb)
1202 gimple_stmt_iterator i;
1203 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1205 tree callee, result;
1206 gimple call = gsi_stmt (i);
1208 if (gimple_code (call) != GIMPLE_CALL)
1209 continue;
1211 callee = gimple_call_fndecl (call);
1212 if (!callee
1213 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
1214 || DECL_FUNCTION_CODE (callee) != BUILT_IN_OBJECT_SIZE)
1215 continue;
1217 init_object_sizes ();
1218 result = fold_call_stmt (call, false);
1219 if (!result)
1221 if (gimple_call_num_args (call) == 2
1222 && POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
1224 tree ost = gimple_call_arg (call, 1);
1226 if (host_integerp (ost, 1))
1228 unsigned HOST_WIDE_INT object_size_type
1229 = tree_low_cst (ost, 1);
1231 if (object_size_type < 2)
1232 result = fold_convert (size_type_node,
1233 integer_minus_one_node);
1234 else if (object_size_type < 4)
1235 result = fold_convert (size_type_node,
1236 integer_zero_node);
1240 if (!result)
1241 continue;
1244 if (dump_file && (dump_flags & TDF_DETAILS))
1246 fprintf (dump_file, "Simplified\n ");
1247 print_gimple_stmt (dump_file, call, 0, dump_flags);
1250 if (!update_call_from_tree (&i, result))
1251 gcc_unreachable ();
1253 /* NOTE: In the pre-tuples code, we called update_stmt here. This is
1254 now handled by gsi_replace, called from update_call_from_tree. */
1256 if (dump_file && (dump_flags & TDF_DETAILS))
1258 fprintf (dump_file, "to\n ");
1259 print_gimple_stmt (dump_file, call, 0, dump_flags);
1260 fprintf (dump_file, "\n");
1265 fini_object_sizes ();
1266 return 0;
1269 struct gimple_opt_pass pass_object_sizes =
1272 GIMPLE_PASS,
1273 "objsz", /* name */
1274 NULL, /* gate */
1275 compute_object_sizes, /* execute */
1276 NULL, /* sub */
1277 NULL, /* next */
1278 0, /* static_pass_number */
1279 TV_NONE, /* tv_id */
1280 PROP_cfg | PROP_ssa, /* properties_required */
1281 0, /* properties_provided */
1282 0, /* properties_destroyed */
1283 0, /* todo_flags_start */
1284 TODO_dump_func | TODO_verify_ssa /* todo_flags_finish */