gcc/fortran/:
[official-gcc.git] / gcc / tree-object-size.c
blob5c7d6f599c8733fb546a8b806a107dbc26f654c3
1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Jakub Jelinek <jakub@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "toplev.h"
28 #include "tree-pretty-print.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-ssa-propagate.h"
34 struct object_size_info
36 int object_size_type;
37 bitmap visited, reexamine;
38 int pass;
39 bool changed;
40 unsigned int *depths;
41 unsigned int *stack, *tos;
44 static unsigned HOST_WIDE_INT unknown[4] = { -1, -1, 0, 0 };
46 static tree compute_object_offset (const_tree, const_tree);
47 static unsigned HOST_WIDE_INT addr_object_size (struct object_size_info *,
48 const_tree, int);
49 static unsigned HOST_WIDE_INT alloc_object_size (const_gimple, int);
50 static tree pass_through_call (const_gimple);
51 static void collect_object_sizes_for (struct object_size_info *, tree);
52 static void expr_object_size (struct object_size_info *, tree, tree);
53 static bool merge_object_sizes (struct object_size_info *, tree, tree,
54 unsigned HOST_WIDE_INT);
55 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple);
56 static bool cond_expr_object_size (struct object_size_info *, tree, tree);
57 static unsigned int compute_object_sizes (void);
58 static void init_offset_limit (void);
59 static void check_for_plus_in_loops (struct object_size_info *, tree);
60 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
61 unsigned int);
63 /* object_sizes[0] is upper bound for number of bytes till the end of
64 the object.
65 object_sizes[1] is upper bound for number of bytes till the end of
66 the subobject (innermost array or field with address taken).
67 object_sizes[2] is lower bound for number of bytes till the end of
68 the object and object_sizes[3] lower bound for subobject. */
69 static unsigned HOST_WIDE_INT *object_sizes[4];
71 /* Bitmaps what object sizes have been computed already. */
72 static bitmap computed[4];
74 /* Maximum value of offset we consider to be addition. */
75 static unsigned HOST_WIDE_INT offset_limit;
78 /* Initialize OFFSET_LIMIT variable. */
79 static void
80 init_offset_limit (void)
82 if (host_integerp (TYPE_MAX_VALUE (sizetype), 1))
83 offset_limit = tree_low_cst (TYPE_MAX_VALUE (sizetype), 1);
84 else
85 offset_limit = -1;
86 offset_limit /= 2;
90 /* Compute offset of EXPR within VAR. Return error_mark_node
91 if unknown. */
93 static tree
94 compute_object_offset (const_tree expr, const_tree var)
96 enum tree_code code = PLUS_EXPR;
97 tree base, off, t;
99 if (expr == var)
100 return size_zero_node;
102 switch (TREE_CODE (expr))
104 case COMPONENT_REF:
105 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
106 if (base == error_mark_node)
107 return base;
109 t = TREE_OPERAND (expr, 1);
110 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
111 size_int (tree_low_cst (DECL_FIELD_BIT_OFFSET (t), 1)
112 / BITS_PER_UNIT));
113 break;
115 case REALPART_EXPR:
116 CASE_CONVERT:
117 case VIEW_CONVERT_EXPR:
118 case NON_LVALUE_EXPR:
119 return compute_object_offset (TREE_OPERAND (expr, 0), var);
121 case IMAGPART_EXPR:
122 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
123 if (base == error_mark_node)
124 return base;
126 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
127 break;
129 case ARRAY_REF:
130 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
131 if (base == error_mark_node)
132 return base;
134 t = TREE_OPERAND (expr, 1);
135 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
137 code = MINUS_EXPR;
138 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
140 t = fold_convert (sizetype, t);
141 off = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (expr)), t);
142 break;
144 default:
145 return error_mark_node;
148 return size_binop (code, base, off);
152 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
153 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
154 If unknown, return unknown[object_size_type]. */
156 static unsigned HOST_WIDE_INT
157 addr_object_size (struct object_size_info *osi, const_tree ptr,
158 int object_size_type)
160 tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
162 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
164 pt_var = TREE_OPERAND (ptr, 0);
165 if (REFERENCE_CLASS_P (pt_var))
166 pt_var = get_base_address (pt_var);
168 if (pt_var
169 && TREE_CODE (pt_var) == INDIRECT_REF
170 && TREE_CODE (TREE_OPERAND (pt_var, 0)) == SSA_NAME
171 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (pt_var, 0))))
173 unsigned HOST_WIDE_INT sz;
175 if (!osi || (object_size_type & 1) != 0)
176 sz = compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
177 object_size_type & ~1);
178 else
180 tree var = TREE_OPERAND (pt_var, 0);
181 if (osi->pass == 0)
182 collect_object_sizes_for (osi, var);
183 if (bitmap_bit_p (computed[object_size_type],
184 SSA_NAME_VERSION (var)))
185 sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
186 else
187 sz = unknown[object_size_type];
190 if (sz != unknown[object_size_type] && sz < offset_limit)
191 pt_var_size = size_int (sz);
193 else if (pt_var
194 && (SSA_VAR_P (pt_var) || TREE_CODE (pt_var) == STRING_CST)
195 && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
196 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1)
197 && (unsigned HOST_WIDE_INT)
198 tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1)
199 < offset_limit)
200 pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
201 else
202 return unknown[object_size_type];
204 if (pt_var != TREE_OPERAND (ptr, 0))
206 tree var;
208 if (object_size_type & 1)
210 var = TREE_OPERAND (ptr, 0);
212 while (var != pt_var
213 && TREE_CODE (var) != BIT_FIELD_REF
214 && TREE_CODE (var) != COMPONENT_REF
215 && TREE_CODE (var) != ARRAY_REF
216 && TREE_CODE (var) != ARRAY_RANGE_REF
217 && TREE_CODE (var) != REALPART_EXPR
218 && TREE_CODE (var) != IMAGPART_EXPR)
219 var = TREE_OPERAND (var, 0);
220 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
221 var = TREE_OPERAND (var, 0);
222 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
223 || ! host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (var)), 1)
224 || (pt_var_size
225 && tree_int_cst_lt (pt_var_size,
226 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
227 var = pt_var;
228 else if (var != pt_var && TREE_CODE (pt_var) == INDIRECT_REF)
230 tree v = var;
231 /* For &X->fld, compute object size only if fld isn't the last
232 field, as struct { int i; char c[1]; } is often used instead
233 of flexible array member. */
234 while (v && v != pt_var)
235 switch (TREE_CODE (v))
237 case ARRAY_REF:
238 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
239 && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
241 tree domain
242 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
243 if (domain
244 && TYPE_MAX_VALUE (domain)
245 && TREE_CODE (TYPE_MAX_VALUE (domain))
246 == INTEGER_CST
247 && tree_int_cst_lt (TREE_OPERAND (v, 1),
248 TYPE_MAX_VALUE (domain)))
250 v = NULL_TREE;
251 break;
254 v = TREE_OPERAND (v, 0);
255 break;
256 case REALPART_EXPR:
257 case IMAGPART_EXPR:
258 v = NULL_TREE;
259 break;
260 case COMPONENT_REF:
261 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
263 v = NULL_TREE;
264 break;
266 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
267 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
268 != UNION_TYPE
269 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
270 != QUAL_UNION_TYPE)
271 break;
272 else
273 v = TREE_OPERAND (v, 0);
274 if (TREE_CODE (v) == COMPONENT_REF
275 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
276 == RECORD_TYPE)
278 tree fld_chain = TREE_CHAIN (TREE_OPERAND (v, 1));
279 for (; fld_chain; fld_chain = TREE_CHAIN (fld_chain))
280 if (TREE_CODE (fld_chain) == FIELD_DECL)
281 break;
283 if (fld_chain)
285 v = NULL_TREE;
286 break;
288 v = TREE_OPERAND (v, 0);
290 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
291 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
292 != UNION_TYPE
293 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
294 != QUAL_UNION_TYPE)
295 break;
296 else
297 v = TREE_OPERAND (v, 0);
298 if (v != pt_var)
299 v = NULL_TREE;
300 else
301 v = pt_var;
302 break;
303 default:
304 v = pt_var;
305 break;
307 if (v == pt_var)
308 var = pt_var;
311 else
312 var = pt_var;
314 if (var != pt_var)
315 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
316 else if (!pt_var_size)
317 return unknown[object_size_type];
318 else
319 var_size = pt_var_size;
320 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
321 if (bytes != error_mark_node)
323 if (TREE_CODE (bytes) == INTEGER_CST
324 && tree_int_cst_lt (var_size, bytes))
325 bytes = size_zero_node;
326 else
327 bytes = size_binop (MINUS_EXPR, var_size, bytes);
329 if (var != pt_var
330 && pt_var_size
331 && TREE_CODE (pt_var) == INDIRECT_REF
332 && bytes != error_mark_node)
334 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
335 if (bytes2 != error_mark_node)
337 if (TREE_CODE (bytes2) == INTEGER_CST
338 && tree_int_cst_lt (pt_var_size, bytes2))
339 bytes2 = size_zero_node;
340 else
341 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
342 bytes = size_binop (MIN_EXPR, bytes, bytes2);
346 else if (!pt_var_size)
347 return unknown[object_size_type];
348 else
349 bytes = pt_var_size;
351 if (host_integerp (bytes, 1))
352 return tree_low_cst (bytes, 1);
354 return unknown[object_size_type];
358 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
359 Handles various allocation calls. OBJECT_SIZE_TYPE is the second
360 argument from __builtin_object_size. If unknown, return
361 unknown[object_size_type]. */
363 static unsigned HOST_WIDE_INT
364 alloc_object_size (const_gimple call, int object_size_type)
366 tree callee, bytes = NULL_TREE;
367 tree alloc_size;
368 int arg1 = -1, arg2 = -1;
370 gcc_assert (is_gimple_call (call));
372 callee = gimple_call_fndecl (call);
373 if (!callee)
374 return unknown[object_size_type];
376 alloc_size = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (TREE_TYPE(callee)));
377 if (alloc_size && TREE_VALUE (alloc_size))
379 tree p = TREE_VALUE (alloc_size);
381 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
382 if (TREE_CHAIN (p))
383 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
386 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
387 switch (DECL_FUNCTION_CODE (callee))
389 case BUILT_IN_CALLOC:
390 arg2 = 1;
391 /* fall through */
392 case BUILT_IN_MALLOC:
393 case BUILT_IN_ALLOCA:
394 arg1 = 0;
395 default:
396 break;
399 if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
400 || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
401 || (arg2 >= 0
402 && (arg2 >= (int)gimple_call_num_args (call)
403 || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
404 return unknown[object_size_type];
406 if (arg2 >= 0)
407 bytes = size_binop (MULT_EXPR,
408 fold_convert (sizetype, gimple_call_arg (call, arg1)),
409 fold_convert (sizetype, gimple_call_arg (call, arg2)));
410 else if (arg1 >= 0)
411 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
413 if (bytes && host_integerp (bytes, 1))
414 return tree_low_cst (bytes, 1);
416 return unknown[object_size_type];
420 /* If object size is propagated from one of function's arguments directly
421 to its return value, return that argument for GIMPLE_CALL statement CALL.
422 Otherwise return NULL. */
424 static tree
425 pass_through_call (const_gimple call)
427 tree callee = gimple_call_fndecl (call);
429 if (callee
430 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
431 switch (DECL_FUNCTION_CODE (callee))
433 case BUILT_IN_MEMCPY:
434 case BUILT_IN_MEMMOVE:
435 case BUILT_IN_MEMSET:
436 case BUILT_IN_STRCPY:
437 case BUILT_IN_STRNCPY:
438 case BUILT_IN_STRCAT:
439 case BUILT_IN_STRNCAT:
440 case BUILT_IN_MEMCPY_CHK:
441 case BUILT_IN_MEMMOVE_CHK:
442 case BUILT_IN_MEMSET_CHK:
443 case BUILT_IN_STRCPY_CHK:
444 case BUILT_IN_STRNCPY_CHK:
445 case BUILT_IN_STRCAT_CHK:
446 case BUILT_IN_STRNCAT_CHK:
447 if (gimple_call_num_args (call) >= 1)
448 return gimple_call_arg (call, 0);
449 break;
450 default:
451 break;
454 return NULL_TREE;
458 /* Compute __builtin_object_size value for PTR. OBJECT_SIZE_TYPE is the
459 second argument from __builtin_object_size. */
461 unsigned HOST_WIDE_INT
462 compute_builtin_object_size (tree ptr, int object_size_type)
464 gcc_assert (object_size_type >= 0 && object_size_type <= 3);
466 if (! offset_limit)
467 init_offset_limit ();
469 if (TREE_CODE (ptr) == ADDR_EXPR)
470 return addr_object_size (NULL, ptr, object_size_type);
472 if (TREE_CODE (ptr) == SSA_NAME
473 && POINTER_TYPE_P (TREE_TYPE (ptr))
474 && object_sizes[object_size_type] != NULL)
476 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
478 struct object_size_info osi;
479 bitmap_iterator bi;
480 unsigned int i;
482 if (dump_file)
484 fprintf (dump_file, "Computing %s %sobject size for ",
485 (object_size_type & 2) ? "minimum" : "maximum",
486 (object_size_type & 1) ? "sub" : "");
487 print_generic_expr (dump_file, ptr, dump_flags);
488 fprintf (dump_file, ":\n");
491 osi.visited = BITMAP_ALLOC (NULL);
492 osi.reexamine = BITMAP_ALLOC (NULL);
493 osi.object_size_type = object_size_type;
494 osi.depths = NULL;
495 osi.stack = NULL;
496 osi.tos = NULL;
498 /* First pass: walk UD chains, compute object sizes that
499 can be computed. osi.reexamine bitmap at the end will
500 contain what variables were found in dependency cycles
501 and therefore need to be reexamined. */
502 osi.pass = 0;
503 osi.changed = false;
504 collect_object_sizes_for (&osi, ptr);
506 /* Second pass: keep recomputing object sizes of variables
507 that need reexamination, until no object sizes are
508 increased or all object sizes are computed. */
509 if (! bitmap_empty_p (osi.reexamine))
511 bitmap reexamine = BITMAP_ALLOC (NULL);
513 /* If looking for minimum instead of maximum object size,
514 detect cases where a pointer is increased in a loop.
515 Although even without this detection pass 2 would eventually
516 terminate, it could take a long time. If a pointer is
517 increasing this way, we need to assume 0 object size.
518 E.g. p = &buf[0]; while (cond) p = p + 4; */
519 if (object_size_type & 2)
521 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
522 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
523 osi.tos = osi.stack;
524 osi.pass = 1;
525 /* collect_object_sizes_for is changing
526 osi.reexamine bitmap, so iterate over a copy. */
527 bitmap_copy (reexamine, osi.reexamine);
528 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
529 if (bitmap_bit_p (osi.reexamine, i))
530 check_for_plus_in_loops (&osi, ssa_name (i));
532 free (osi.depths);
533 osi.depths = NULL;
534 free (osi.stack);
535 osi.stack = NULL;
536 osi.tos = NULL;
541 osi.pass = 2;
542 osi.changed = false;
543 /* collect_object_sizes_for is changing
544 osi.reexamine bitmap, so iterate over a copy. */
545 bitmap_copy (reexamine, osi.reexamine);
546 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
547 if (bitmap_bit_p (osi.reexamine, i))
549 collect_object_sizes_for (&osi, ssa_name (i));
550 if (dump_file && (dump_flags & TDF_DETAILS))
552 fprintf (dump_file, "Reexamining ");
553 print_generic_expr (dump_file, ssa_name (i),
554 dump_flags);
555 fprintf (dump_file, "\n");
559 while (osi.changed);
561 BITMAP_FREE (reexamine);
563 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
564 bitmap_set_bit (computed[object_size_type], i);
566 /* Debugging dumps. */
567 if (dump_file)
569 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
570 if (object_sizes[object_size_type][i]
571 != unknown[object_size_type])
573 print_generic_expr (dump_file, ssa_name (i),
574 dump_flags);
575 fprintf (dump_file,
576 ": %s %sobject size "
577 HOST_WIDE_INT_PRINT_UNSIGNED "\n",
578 (object_size_type & 2) ? "minimum" : "maximum",
579 (object_size_type & 1) ? "sub" : "",
580 object_sizes[object_size_type][i]);
584 BITMAP_FREE (osi.reexamine);
585 BITMAP_FREE (osi.visited);
588 return object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
591 return unknown[object_size_type];
594 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
596 static void
597 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
599 int object_size_type = osi->object_size_type;
600 unsigned int varno = SSA_NAME_VERSION (ptr);
601 unsigned HOST_WIDE_INT bytes;
603 gcc_assert (object_sizes[object_size_type][varno]
604 != unknown[object_size_type]);
605 gcc_assert (osi->pass == 0);
607 if (TREE_CODE (value) == WITH_SIZE_EXPR)
608 value = TREE_OPERAND (value, 0);
610 /* Pointer variables should have been handled by merge_object_sizes. */
611 gcc_assert (TREE_CODE (value) != SSA_NAME
612 || !POINTER_TYPE_P (TREE_TYPE (value)));
614 if (TREE_CODE (value) == ADDR_EXPR)
615 bytes = addr_object_size (osi, value, object_size_type);
616 else
617 bytes = unknown[object_size_type];
619 if ((object_size_type & 2) == 0)
621 if (object_sizes[object_size_type][varno] < bytes)
622 object_sizes[object_size_type][varno] = bytes;
624 else
626 if (object_sizes[object_size_type][varno] > bytes)
627 object_sizes[object_size_type][varno] = bytes;
632 /* Compute object_sizes for PTR, defined to the result of a call. */
634 static void
635 call_object_size (struct object_size_info *osi, tree ptr, gimple call)
637 int object_size_type = osi->object_size_type;
638 unsigned int varno = SSA_NAME_VERSION (ptr);
639 unsigned HOST_WIDE_INT bytes;
641 gcc_assert (is_gimple_call (call));
643 gcc_assert (object_sizes[object_size_type][varno]
644 != unknown[object_size_type]);
645 gcc_assert (osi->pass == 0);
647 bytes = alloc_object_size (call, object_size_type);
649 if ((object_size_type & 2) == 0)
651 if (object_sizes[object_size_type][varno] < bytes)
652 object_sizes[object_size_type][varno] = bytes;
654 else
656 if (object_sizes[object_size_type][varno] > bytes)
657 object_sizes[object_size_type][varno] = bytes;
662 /* Compute object_sizes for PTR, defined to an unknown value. */
664 static void
665 unknown_object_size (struct object_size_info *osi, tree ptr)
667 int object_size_type = osi->object_size_type;
668 unsigned int varno = SSA_NAME_VERSION (ptr);
669 unsigned HOST_WIDE_INT bytes;
671 gcc_assert (object_sizes[object_size_type][varno]
672 != unknown[object_size_type]);
673 gcc_assert (osi->pass == 0);
675 bytes = unknown[object_size_type];
677 if ((object_size_type & 2) == 0)
679 if (object_sizes[object_size_type][varno] < bytes)
680 object_sizes[object_size_type][varno] = bytes;
682 else
684 if (object_sizes[object_size_type][varno] > bytes)
685 object_sizes[object_size_type][varno] = bytes;
690 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
691 the object size might need reexamination later. */
693 static bool
694 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
695 unsigned HOST_WIDE_INT offset)
697 int object_size_type = osi->object_size_type;
698 unsigned int varno = SSA_NAME_VERSION (dest);
699 unsigned HOST_WIDE_INT orig_bytes;
701 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
702 return false;
703 if (offset >= offset_limit)
705 object_sizes[object_size_type][varno] = unknown[object_size_type];
706 return false;
709 if (osi->pass == 0)
710 collect_object_sizes_for (osi, orig);
712 orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
713 if (orig_bytes != unknown[object_size_type])
714 orig_bytes = (offset > orig_bytes)
715 ? (unsigned HOST_WIDE_INT) 0 : orig_bytes - offset;
717 if ((object_size_type & 2) == 0)
719 if (object_sizes[object_size_type][varno] < orig_bytes)
721 object_sizes[object_size_type][varno] = orig_bytes;
722 osi->changed = true;
725 else
727 if (object_sizes[object_size_type][varno] > orig_bytes)
729 object_sizes[object_size_type][varno] = orig_bytes;
730 osi->changed = true;
733 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
737 /* Compute object_sizes for VAR, defined to the result of an assignment
738 with operator POINTER_PLUS_EXPR. Return true if the object size might
739 need reexamination later. */
741 static bool
742 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple stmt)
744 int object_size_type = osi->object_size_type;
745 unsigned int varno = SSA_NAME_VERSION (var);
746 unsigned HOST_WIDE_INT bytes;
747 tree op0, op1;
749 gcc_assert (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR);
751 op0 = gimple_assign_rhs1 (stmt);
752 op1 = gimple_assign_rhs2 (stmt);
754 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
755 return false;
757 /* Handle PTR + OFFSET here. */
758 if (TREE_CODE (op1) == INTEGER_CST
759 && (TREE_CODE (op0) == SSA_NAME
760 || TREE_CODE (op0) == ADDR_EXPR))
762 if (! host_integerp (op1, 1))
763 bytes = unknown[object_size_type];
764 else if (TREE_CODE (op0) == SSA_NAME)
765 return merge_object_sizes (osi, var, op0, tree_low_cst (op1, 1));
766 else
768 unsigned HOST_WIDE_INT off = tree_low_cst (op1, 1);
770 /* op0 will be ADDR_EXPR here. */
771 bytes = addr_object_size (osi, op0, object_size_type);
772 if (bytes == unknown[object_size_type])
774 else if (off > offset_limit)
775 bytes = unknown[object_size_type];
776 else if (off > bytes)
777 bytes = 0;
778 else
779 bytes -= off;
782 else
783 bytes = unknown[object_size_type];
785 if ((object_size_type & 2) == 0)
787 if (object_sizes[object_size_type][varno] < bytes)
788 object_sizes[object_size_type][varno] = bytes;
790 else
792 if (object_sizes[object_size_type][varno] > bytes)
793 object_sizes[object_size_type][varno] = bytes;
795 return false;
799 /* Compute object_sizes for VAR, defined to VALUE, which is
800 a COND_EXPR. Return true if the object size might need reexamination
801 later. */
803 static bool
804 cond_expr_object_size (struct object_size_info *osi, tree var, tree value)
806 tree then_, else_;
807 int object_size_type = osi->object_size_type;
808 unsigned int varno = SSA_NAME_VERSION (var);
809 bool reexamine = false;
811 gcc_assert (TREE_CODE (value) == COND_EXPR);
813 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
814 return false;
816 then_ = COND_EXPR_THEN (value);
817 else_ = COND_EXPR_ELSE (value);
819 if (TREE_CODE (then_) == SSA_NAME)
820 reexamine |= merge_object_sizes (osi, var, then_, 0);
821 else
822 expr_object_size (osi, var, then_);
824 if (TREE_CODE (else_) == SSA_NAME)
825 reexamine |= merge_object_sizes (osi, var, else_, 0);
826 else
827 expr_object_size (osi, var, else_);
829 return reexamine;
832 /* Compute object sizes for VAR.
833 For ADDR_EXPR an object size is the number of remaining bytes
834 to the end of the object (where what is considered an object depends on
835 OSI->object_size_type).
836 For allocation GIMPLE_CALL like malloc or calloc object size is the size
837 of the allocation.
838 For POINTER_PLUS_EXPR where second operand is a constant integer,
839 object size is object size of the first operand minus the constant.
840 If the constant is bigger than the number of remaining bytes until the
841 end of the object, object size is 0, but if it is instead a pointer
842 subtraction, object size is unknown[object_size_type].
843 To differentiate addition from subtraction, ADDR_EXPR returns
844 unknown[object_size_type] for all objects bigger than half of the address
845 space, and constants less than half of the address space are considered
846 addition, while bigger constants subtraction.
847 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
848 object size is object size of that argument.
849 Otherwise, object size is the maximum of object sizes of variables
850 that it might be set to. */
852 static void
853 collect_object_sizes_for (struct object_size_info *osi, tree var)
855 int object_size_type = osi->object_size_type;
856 unsigned int varno = SSA_NAME_VERSION (var);
857 gimple stmt;
858 bool reexamine;
860 if (bitmap_bit_p (computed[object_size_type], varno))
861 return;
863 if (osi->pass == 0)
865 if (! bitmap_bit_p (osi->visited, varno))
867 bitmap_set_bit (osi->visited, varno);
868 object_sizes[object_size_type][varno]
869 = (object_size_type & 2) ? -1 : 0;
871 else
873 /* Found a dependency loop. Mark the variable for later
874 re-examination. */
875 bitmap_set_bit (osi->reexamine, varno);
876 if (dump_file && (dump_flags & TDF_DETAILS))
878 fprintf (dump_file, "Found a dependency loop at ");
879 print_generic_expr (dump_file, var, dump_flags);
880 fprintf (dump_file, "\n");
882 return;
886 if (dump_file && (dump_flags & TDF_DETAILS))
888 fprintf (dump_file, "Visiting use-def links for ");
889 print_generic_expr (dump_file, var, dump_flags);
890 fprintf (dump_file, "\n");
893 stmt = SSA_NAME_DEF_STMT (var);
894 reexamine = false;
896 switch (gimple_code (stmt))
898 case GIMPLE_ASSIGN:
900 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
901 reexamine = plus_stmt_object_size (osi, var, stmt);
902 else if (gimple_assign_single_p (stmt)
903 || gimple_assign_unary_nop_p (stmt))
905 tree rhs = gimple_assign_rhs1 (stmt);
907 if (TREE_CODE (rhs) == SSA_NAME
908 && POINTER_TYPE_P (TREE_TYPE (rhs)))
909 reexamine = merge_object_sizes (osi, var, rhs, 0);
910 else if (TREE_CODE (rhs) == COND_EXPR)
911 reexamine = cond_expr_object_size (osi, var, rhs);
912 else
913 expr_object_size (osi, var, rhs);
915 else
916 unknown_object_size (osi, var);
917 break;
920 case GIMPLE_CALL:
922 tree arg = pass_through_call (stmt);
923 if (arg)
925 if (TREE_CODE (arg) == SSA_NAME
926 && POINTER_TYPE_P (TREE_TYPE (arg)))
927 reexamine = merge_object_sizes (osi, var, arg, 0);
928 else if (TREE_CODE (arg) == COND_EXPR)
929 reexamine = cond_expr_object_size (osi, var, arg);
930 else
931 expr_object_size (osi, var, arg);
933 else
934 call_object_size (osi, var, stmt);
935 break;
938 case GIMPLE_ASM:
939 /* Pointers defined by __asm__ statements can point anywhere. */
940 object_sizes[object_size_type][varno] = unknown[object_size_type];
941 break;
943 case GIMPLE_NOP:
945 tree decl = SSA_NAME_VAR (var);
947 if (TREE_CODE (decl) != PARM_DECL && DECL_INITIAL (decl))
948 expr_object_size (osi, var, DECL_INITIAL (decl));
949 else
950 expr_object_size (osi, var, decl);
952 break;
954 case GIMPLE_PHI:
956 unsigned i;
958 for (i = 0; i < gimple_phi_num_args (stmt); i++)
960 tree rhs = gimple_phi_arg (stmt, i)->def;
962 if (object_sizes[object_size_type][varno]
963 == unknown[object_size_type])
964 break;
966 if (TREE_CODE (rhs) == SSA_NAME)
967 reexamine |= merge_object_sizes (osi, var, rhs, 0);
968 else if (osi->pass == 0)
969 expr_object_size (osi, var, rhs);
971 break;
974 default:
975 gcc_unreachable ();
978 if (! reexamine
979 || object_sizes[object_size_type][varno] == unknown[object_size_type])
981 bitmap_set_bit (computed[object_size_type], varno);
982 bitmap_clear_bit (osi->reexamine, varno);
984 else
986 bitmap_set_bit (osi->reexamine, varno);
987 if (dump_file && (dump_flags & TDF_DETAILS))
989 fprintf (dump_file, "Need to reexamine ");
990 print_generic_expr (dump_file, var, dump_flags);
991 fprintf (dump_file, "\n");
997 /* Helper function for check_for_plus_in_loops. Called recursively
998 to detect loops. */
1000 static void
1001 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1002 unsigned int depth)
1004 gimple stmt = SSA_NAME_DEF_STMT (var);
1005 unsigned int varno = SSA_NAME_VERSION (var);
1007 if (osi->depths[varno])
1009 if (osi->depths[varno] != depth)
1011 unsigned int *sp;
1013 /* Found a loop involving pointer addition. */
1014 for (sp = osi->tos; sp > osi->stack; )
1016 --sp;
1017 bitmap_clear_bit (osi->reexamine, *sp);
1018 bitmap_set_bit (computed[osi->object_size_type], *sp);
1019 object_sizes[osi->object_size_type][*sp] = 0;
1020 if (*sp == varno)
1021 break;
1024 return;
1026 else if (! bitmap_bit_p (osi->reexamine, varno))
1027 return;
1029 osi->depths[varno] = depth;
1030 *osi->tos++ = varno;
1032 switch (gimple_code (stmt))
1035 case GIMPLE_ASSIGN:
1037 if ((gimple_assign_single_p (stmt)
1038 || gimple_assign_unary_nop_p (stmt))
1039 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1041 tree rhs = gimple_assign_rhs1 (stmt);
1043 check_for_plus_in_loops_1 (osi, rhs, depth);
1045 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1047 tree basevar = gimple_assign_rhs1 (stmt);
1048 tree cst = gimple_assign_rhs2 (stmt);
1050 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1052 check_for_plus_in_loops_1 (osi, basevar,
1053 depth + !integer_zerop (cst));
1055 else
1056 gcc_unreachable ();
1057 break;
1060 case GIMPLE_CALL:
1062 tree arg = pass_through_call (stmt);
1063 if (arg)
1065 if (TREE_CODE (arg) == SSA_NAME)
1066 check_for_plus_in_loops_1 (osi, arg, depth);
1067 else
1068 gcc_unreachable ();
1070 break;
1073 case GIMPLE_PHI:
1075 unsigned i;
1077 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1079 tree rhs = gimple_phi_arg (stmt, i)->def;
1081 if (TREE_CODE (rhs) == SSA_NAME)
1082 check_for_plus_in_loops_1 (osi, rhs, depth);
1084 break;
1087 default:
1088 gcc_unreachable ();
1091 osi->depths[varno] = 0;
1092 osi->tos--;
1096 /* Check if some pointer we are computing object size of is being increased
1097 within a loop. If yes, assume all the SSA variables participating in
1098 that loop have minimum object sizes 0. */
1100 static void
1101 check_for_plus_in_loops (struct object_size_info *osi, tree var)
1103 gimple stmt = SSA_NAME_DEF_STMT (var);
1105 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1106 and looked for a POINTER_PLUS_EXPR in the pass-through
1107 argument, if any. In GIMPLE, however, such an expression
1108 is not a valid call operand. */
1110 if (is_gimple_assign (stmt)
1111 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1113 tree basevar = gimple_assign_rhs1 (stmt);
1114 tree cst = gimple_assign_rhs2 (stmt);
1116 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1118 if (integer_zerop (cst))
1119 return;
1121 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1122 *osi->tos++ = SSA_NAME_VERSION (basevar);
1123 check_for_plus_in_loops_1 (osi, var, 2);
1124 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1125 osi->tos--;
1130 /* Initialize data structures for the object size computation. */
1132 void
1133 init_object_sizes (void)
1135 int object_size_type;
1137 if (object_sizes[0])
1138 return;
1140 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1142 object_sizes[object_size_type] = XNEWVEC (unsigned HOST_WIDE_INT, num_ssa_names);
1143 computed[object_size_type] = BITMAP_ALLOC (NULL);
1146 init_offset_limit ();
1150 /* Destroy data structures after the object size computation. */
1152 void
1153 fini_object_sizes (void)
1155 int object_size_type;
1157 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1159 free (object_sizes[object_size_type]);
1160 BITMAP_FREE (computed[object_size_type]);
1161 object_sizes[object_size_type] = NULL;
1166 /* Simple pass to optimize all __builtin_object_size () builtins. */
1168 static unsigned int
1169 compute_object_sizes (void)
1171 basic_block bb;
1172 FOR_EACH_BB (bb)
1174 gimple_stmt_iterator i;
1175 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1177 tree callee, result;
1178 gimple call = gsi_stmt (i);
1180 if (gimple_code (call) != GIMPLE_CALL)
1181 continue;
1183 callee = gimple_call_fndecl (call);
1184 if (!callee
1185 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
1186 || DECL_FUNCTION_CODE (callee) != BUILT_IN_OBJECT_SIZE)
1187 continue;
1189 init_object_sizes ();
1190 result = fold_call_stmt (call, false);
1191 if (!result)
1193 if (gimple_call_num_args (call) == 2
1194 && POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
1196 tree ost = gimple_call_arg (call, 1);
1198 if (host_integerp (ost, 1))
1200 unsigned HOST_WIDE_INT object_size_type
1201 = tree_low_cst (ost, 1);
1203 if (object_size_type < 2)
1204 result = fold_convert (size_type_node,
1205 integer_minus_one_node);
1206 else if (object_size_type < 4)
1207 result = fold_convert (size_type_node,
1208 integer_zero_node);
1212 if (!result)
1213 continue;
1216 if (dump_file && (dump_flags & TDF_DETAILS))
1218 fprintf (dump_file, "Simplified\n ");
1219 print_gimple_stmt (dump_file, call, 0, dump_flags);
1222 if (!update_call_from_tree (&i, result))
1223 gcc_unreachable ();
1225 /* NOTE: In the pre-tuples code, we called update_stmt here. This is
1226 now handled by gsi_replace, called from update_call_from_tree. */
1228 if (dump_file && (dump_flags & TDF_DETAILS))
1230 fprintf (dump_file, "to\n ");
1231 print_gimple_stmt (dump_file, call, 0, dump_flags);
1232 fprintf (dump_file, "\n");
1237 fini_object_sizes ();
1238 return 0;
1241 struct gimple_opt_pass pass_object_sizes =
1244 GIMPLE_PASS,
1245 "objsz", /* name */
1246 NULL, /* gate */
1247 compute_object_sizes, /* execute */
1248 NULL, /* sub */
1249 NULL, /* next */
1250 0, /* static_pass_number */
1251 TV_NONE, /* tv_id */
1252 PROP_cfg | PROP_ssa, /* properties_required */
1253 0, /* properties_provided */
1254 0, /* properties_destroyed */
1255 0, /* todo_flags_start */
1256 TODO_dump_func | TODO_verify_ssa /* todo_flags_finish */