* config/avr/avr-protos.h (avr_mode_code_base_reg_class): New prototype.
[official-gcc.git] / gcc / tree-object-size.c
blob2998fb59da05edf6f56f64b2a11c24c5ee2904ea
1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
4 Contributed by Jakub Jelinek <jakub@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "diagnostic-core.h"
28 #include "tree-pretty-print.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-ssa-propagate.h"
34 struct object_size_info
36 int object_size_type;
37 bitmap visited, reexamine;
38 int pass;
39 bool changed;
40 unsigned int *depths;
41 unsigned int *stack, *tos;
44 static unsigned HOST_WIDE_INT unknown[4] = { -1, -1, 0, 0 };
46 static tree compute_object_offset (const_tree, const_tree);
47 static unsigned HOST_WIDE_INT addr_object_size (struct object_size_info *,
48 const_tree, int);
49 static unsigned HOST_WIDE_INT alloc_object_size (const_gimple, int);
50 static tree pass_through_call (const_gimple);
51 static void collect_object_sizes_for (struct object_size_info *, tree);
52 static void expr_object_size (struct object_size_info *, tree, tree);
53 static bool merge_object_sizes (struct object_size_info *, tree, tree,
54 unsigned HOST_WIDE_INT);
55 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple);
56 static bool cond_expr_object_size (struct object_size_info *, tree, gimple);
57 static unsigned int compute_object_sizes (void);
58 static void init_offset_limit (void);
59 static void check_for_plus_in_loops (struct object_size_info *, tree);
60 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
61 unsigned int);
63 /* object_sizes[0] is upper bound for number of bytes till the end of
64 the object.
65 object_sizes[1] is upper bound for number of bytes till the end of
66 the subobject (innermost array or field with address taken).
67 object_sizes[2] is lower bound for number of bytes till the end of
68 the object and object_sizes[3] lower bound for subobject. */
69 static unsigned HOST_WIDE_INT *object_sizes[4];
71 /* Bitmaps what object sizes have been computed already. */
72 static bitmap computed[4];
74 /* Maximum value of offset we consider to be addition. */
75 static unsigned HOST_WIDE_INT offset_limit;
78 /* Initialize OFFSET_LIMIT variable. */
79 static void
80 init_offset_limit (void)
82 if (host_integerp (TYPE_MAX_VALUE (sizetype), 1))
83 offset_limit = tree_low_cst (TYPE_MAX_VALUE (sizetype), 1);
84 else
85 offset_limit = -1;
86 offset_limit /= 2;
90 /* Compute offset of EXPR within VAR. Return error_mark_node
91 if unknown. */
93 static tree
94 compute_object_offset (const_tree expr, const_tree var)
96 enum tree_code code = PLUS_EXPR;
97 tree base, off, t;
99 if (expr == var)
100 return size_zero_node;
102 switch (TREE_CODE (expr))
104 case COMPONENT_REF:
105 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
106 if (base == error_mark_node)
107 return base;
109 t = TREE_OPERAND (expr, 1);
110 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
111 size_int (tree_low_cst (DECL_FIELD_BIT_OFFSET (t), 1)
112 / BITS_PER_UNIT));
113 break;
115 case REALPART_EXPR:
116 CASE_CONVERT:
117 case VIEW_CONVERT_EXPR:
118 case NON_LVALUE_EXPR:
119 return compute_object_offset (TREE_OPERAND (expr, 0), var);
121 case IMAGPART_EXPR:
122 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
123 if (base == error_mark_node)
124 return base;
126 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
127 break;
129 case ARRAY_REF:
130 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
131 if (base == error_mark_node)
132 return base;
134 t = TREE_OPERAND (expr, 1);
135 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
137 code = MINUS_EXPR;
138 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
140 t = fold_convert (sizetype, t);
141 off = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (expr)), t);
142 break;
144 case MEM_REF:
145 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
146 return double_int_to_tree (sizetype, mem_ref_offset (expr));
148 default:
149 return error_mark_node;
152 return size_binop (code, base, off);
156 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
157 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
158 If unknown, return unknown[object_size_type]. */
160 static unsigned HOST_WIDE_INT
161 addr_object_size (struct object_size_info *osi, const_tree ptr,
162 int object_size_type)
164 tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
166 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
168 pt_var = TREE_OPERAND (ptr, 0);
169 if (REFERENCE_CLASS_P (pt_var))
170 pt_var = get_base_address (pt_var);
172 if (pt_var
173 && TREE_CODE (pt_var) == MEM_REF
174 && TREE_CODE (TREE_OPERAND (pt_var, 0)) == SSA_NAME
175 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (pt_var, 0))))
177 unsigned HOST_WIDE_INT sz;
179 if (!osi || (object_size_type & 1) != 0)
181 sz = compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
182 object_size_type & ~1);
183 if (host_integerp (TREE_OPERAND (pt_var, 1), 0))
184 sz -= TREE_INT_CST_LOW (TREE_OPERAND (pt_var, 1));
185 else
186 sz = offset_limit;
188 else
190 tree var = TREE_OPERAND (pt_var, 0);
191 if (osi->pass == 0)
192 collect_object_sizes_for (osi, var);
193 if (bitmap_bit_p (computed[object_size_type],
194 SSA_NAME_VERSION (var)))
195 sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
196 else
197 sz = unknown[object_size_type];
198 if (host_integerp (TREE_OPERAND (pt_var, 1), 0))
199 sz -= TREE_INT_CST_LOW (TREE_OPERAND (pt_var, 1));
200 else
201 sz = offset_limit;
204 if (sz != unknown[object_size_type] && sz < offset_limit)
205 pt_var_size = size_int (sz);
207 else if (pt_var
208 && DECL_P (pt_var)
209 && host_integerp (DECL_SIZE_UNIT (pt_var), 1)
210 && (unsigned HOST_WIDE_INT)
211 tree_low_cst (DECL_SIZE_UNIT (pt_var), 1) < offset_limit)
212 pt_var_size = DECL_SIZE_UNIT (pt_var);
213 else if (pt_var
214 && (SSA_VAR_P (pt_var) || TREE_CODE (pt_var) == STRING_CST)
215 && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
216 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1)
217 && (unsigned HOST_WIDE_INT)
218 tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1)
219 < offset_limit)
220 pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
221 else
222 return unknown[object_size_type];
224 if (pt_var != TREE_OPERAND (ptr, 0))
226 tree var;
228 if (object_size_type & 1)
230 var = TREE_OPERAND (ptr, 0);
232 while (var != pt_var
233 && TREE_CODE (var) != BIT_FIELD_REF
234 && TREE_CODE (var) != COMPONENT_REF
235 && TREE_CODE (var) != ARRAY_REF
236 && TREE_CODE (var) != ARRAY_RANGE_REF
237 && TREE_CODE (var) != REALPART_EXPR
238 && TREE_CODE (var) != IMAGPART_EXPR)
239 var = TREE_OPERAND (var, 0);
240 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
241 var = TREE_OPERAND (var, 0);
242 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
243 || ! host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (var)), 1)
244 || (pt_var_size
245 && tree_int_cst_lt (pt_var_size,
246 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
247 var = pt_var;
248 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
250 tree v = var;
251 /* For &X->fld, compute object size only if fld isn't the last
252 field, as struct { int i; char c[1]; } is often used instead
253 of flexible array member. */
254 while (v && v != pt_var)
255 switch (TREE_CODE (v))
257 case ARRAY_REF:
258 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
259 && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
261 tree domain
262 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
263 if (domain
264 && TYPE_MAX_VALUE (domain)
265 && TREE_CODE (TYPE_MAX_VALUE (domain))
266 == INTEGER_CST
267 && tree_int_cst_lt (TREE_OPERAND (v, 1),
268 TYPE_MAX_VALUE (domain)))
270 v = NULL_TREE;
271 break;
274 v = TREE_OPERAND (v, 0);
275 break;
276 case REALPART_EXPR:
277 case IMAGPART_EXPR:
278 v = NULL_TREE;
279 break;
280 case COMPONENT_REF:
281 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
283 v = NULL_TREE;
284 break;
286 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
287 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
288 != UNION_TYPE
289 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
290 != QUAL_UNION_TYPE)
291 break;
292 else
293 v = TREE_OPERAND (v, 0);
294 if (TREE_CODE (v) == COMPONENT_REF
295 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
296 == RECORD_TYPE)
298 tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
299 for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
300 if (TREE_CODE (fld_chain) == FIELD_DECL)
301 break;
303 if (fld_chain)
305 v = NULL_TREE;
306 break;
308 v = TREE_OPERAND (v, 0);
310 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
311 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
312 != UNION_TYPE
313 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
314 != QUAL_UNION_TYPE)
315 break;
316 else
317 v = TREE_OPERAND (v, 0);
318 if (v != pt_var)
319 v = NULL_TREE;
320 else
321 v = pt_var;
322 break;
323 default:
324 v = pt_var;
325 break;
327 if (v == pt_var)
328 var = pt_var;
331 else
332 var = pt_var;
334 if (var != pt_var)
335 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
336 else if (!pt_var_size)
337 return unknown[object_size_type];
338 else
339 var_size = pt_var_size;
340 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
341 if (bytes != error_mark_node)
343 if (TREE_CODE (bytes) == INTEGER_CST
344 && tree_int_cst_lt (var_size, bytes))
345 bytes = size_zero_node;
346 else
347 bytes = size_binop (MINUS_EXPR, var_size, bytes);
349 if (var != pt_var
350 && pt_var_size
351 && TREE_CODE (pt_var) == MEM_REF
352 && bytes != error_mark_node)
354 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
355 if (bytes2 != error_mark_node)
357 if (TREE_CODE (bytes2) == INTEGER_CST
358 && tree_int_cst_lt (pt_var_size, bytes2))
359 bytes2 = size_zero_node;
360 else
361 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
362 bytes = size_binop (MIN_EXPR, bytes, bytes2);
366 else if (!pt_var_size)
367 return unknown[object_size_type];
368 else
369 bytes = pt_var_size;
371 if (host_integerp (bytes, 1))
372 return tree_low_cst (bytes, 1);
374 return unknown[object_size_type];
378 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
379 Handles various allocation calls. OBJECT_SIZE_TYPE is the second
380 argument from __builtin_object_size. If unknown, return
381 unknown[object_size_type]. */
383 static unsigned HOST_WIDE_INT
384 alloc_object_size (const_gimple call, int object_size_type)
386 tree callee, bytes = NULL_TREE;
387 tree alloc_size;
388 int arg1 = -1, arg2 = -1;
390 gcc_assert (is_gimple_call (call));
392 callee = gimple_call_fndecl (call);
393 if (!callee)
394 return unknown[object_size_type];
396 alloc_size = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (TREE_TYPE(callee)));
397 if (alloc_size && TREE_VALUE (alloc_size))
399 tree p = TREE_VALUE (alloc_size);
401 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
402 if (TREE_CHAIN (p))
403 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
406 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
407 switch (DECL_FUNCTION_CODE (callee))
409 case BUILT_IN_CALLOC:
410 arg2 = 1;
411 /* fall through */
412 case BUILT_IN_MALLOC:
413 case BUILT_IN_ALLOCA:
414 case BUILT_IN_ALLOCA_WITH_ALIGN:
415 arg1 = 0;
416 default:
417 break;
420 if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
421 || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
422 || (arg2 >= 0
423 && (arg2 >= (int)gimple_call_num_args (call)
424 || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
425 return unknown[object_size_type];
427 if (arg2 >= 0)
428 bytes = size_binop (MULT_EXPR,
429 fold_convert (sizetype, gimple_call_arg (call, arg1)),
430 fold_convert (sizetype, gimple_call_arg (call, arg2)));
431 else if (arg1 >= 0)
432 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
434 if (bytes && host_integerp (bytes, 1))
435 return tree_low_cst (bytes, 1);
437 return unknown[object_size_type];
441 /* If object size is propagated from one of function's arguments directly
442 to its return value, return that argument for GIMPLE_CALL statement CALL.
443 Otherwise return NULL. */
445 static tree
446 pass_through_call (const_gimple call)
448 tree callee = gimple_call_fndecl (call);
450 if (callee
451 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
452 switch (DECL_FUNCTION_CODE (callee))
454 case BUILT_IN_MEMCPY:
455 case BUILT_IN_MEMMOVE:
456 case BUILT_IN_MEMSET:
457 case BUILT_IN_STRCPY:
458 case BUILT_IN_STRNCPY:
459 case BUILT_IN_STRCAT:
460 case BUILT_IN_STRNCAT:
461 case BUILT_IN_MEMCPY_CHK:
462 case BUILT_IN_MEMMOVE_CHK:
463 case BUILT_IN_MEMSET_CHK:
464 case BUILT_IN_STRCPY_CHK:
465 case BUILT_IN_STRNCPY_CHK:
466 case BUILT_IN_STRCAT_CHK:
467 case BUILT_IN_STRNCAT_CHK:
468 case BUILT_IN_ASSUME_ALIGNED:
469 if (gimple_call_num_args (call) >= 1)
470 return gimple_call_arg (call, 0);
471 break;
472 default:
473 break;
476 return NULL_TREE;
480 /* Compute __builtin_object_size value for PTR. OBJECT_SIZE_TYPE is the
481 second argument from __builtin_object_size. */
483 unsigned HOST_WIDE_INT
484 compute_builtin_object_size (tree ptr, int object_size_type)
486 gcc_assert (object_size_type >= 0 && object_size_type <= 3);
488 if (! offset_limit)
489 init_offset_limit ();
491 if (TREE_CODE (ptr) == ADDR_EXPR)
492 return addr_object_size (NULL, ptr, object_size_type);
494 if (TREE_CODE (ptr) == SSA_NAME
495 && POINTER_TYPE_P (TREE_TYPE (ptr))
496 && object_sizes[object_size_type] != NULL)
498 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
500 struct object_size_info osi;
501 bitmap_iterator bi;
502 unsigned int i;
504 if (dump_file)
506 fprintf (dump_file, "Computing %s %sobject size for ",
507 (object_size_type & 2) ? "minimum" : "maximum",
508 (object_size_type & 1) ? "sub" : "");
509 print_generic_expr (dump_file, ptr, dump_flags);
510 fprintf (dump_file, ":\n");
513 osi.visited = BITMAP_ALLOC (NULL);
514 osi.reexamine = BITMAP_ALLOC (NULL);
515 osi.object_size_type = object_size_type;
516 osi.depths = NULL;
517 osi.stack = NULL;
518 osi.tos = NULL;
520 /* First pass: walk UD chains, compute object sizes that
521 can be computed. osi.reexamine bitmap at the end will
522 contain what variables were found in dependency cycles
523 and therefore need to be reexamined. */
524 osi.pass = 0;
525 osi.changed = false;
526 collect_object_sizes_for (&osi, ptr);
528 /* Second pass: keep recomputing object sizes of variables
529 that need reexamination, until no object sizes are
530 increased or all object sizes are computed. */
531 if (! bitmap_empty_p (osi.reexamine))
533 bitmap reexamine = BITMAP_ALLOC (NULL);
535 /* If looking for minimum instead of maximum object size,
536 detect cases where a pointer is increased in a loop.
537 Although even without this detection pass 2 would eventually
538 terminate, it could take a long time. If a pointer is
539 increasing this way, we need to assume 0 object size.
540 E.g. p = &buf[0]; while (cond) p = p + 4; */
541 if (object_size_type & 2)
543 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
544 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
545 osi.tos = osi.stack;
546 osi.pass = 1;
547 /* collect_object_sizes_for is changing
548 osi.reexamine bitmap, so iterate over a copy. */
549 bitmap_copy (reexamine, osi.reexamine);
550 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
551 if (bitmap_bit_p (osi.reexamine, i))
552 check_for_plus_in_loops (&osi, ssa_name (i));
554 free (osi.depths);
555 osi.depths = NULL;
556 free (osi.stack);
557 osi.stack = NULL;
558 osi.tos = NULL;
563 osi.pass = 2;
564 osi.changed = false;
565 /* collect_object_sizes_for is changing
566 osi.reexamine bitmap, so iterate over a copy. */
567 bitmap_copy (reexamine, osi.reexamine);
568 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
569 if (bitmap_bit_p (osi.reexamine, i))
571 collect_object_sizes_for (&osi, ssa_name (i));
572 if (dump_file && (dump_flags & TDF_DETAILS))
574 fprintf (dump_file, "Reexamining ");
575 print_generic_expr (dump_file, ssa_name (i),
576 dump_flags);
577 fprintf (dump_file, "\n");
581 while (osi.changed);
583 BITMAP_FREE (reexamine);
585 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
586 bitmap_set_bit (computed[object_size_type], i);
588 /* Debugging dumps. */
589 if (dump_file)
591 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
592 if (object_sizes[object_size_type][i]
593 != unknown[object_size_type])
595 print_generic_expr (dump_file, ssa_name (i),
596 dump_flags);
597 fprintf (dump_file,
598 ": %s %sobject size "
599 HOST_WIDE_INT_PRINT_UNSIGNED "\n",
600 (object_size_type & 2) ? "minimum" : "maximum",
601 (object_size_type & 1) ? "sub" : "",
602 object_sizes[object_size_type][i]);
606 BITMAP_FREE (osi.reexamine);
607 BITMAP_FREE (osi.visited);
610 return object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
613 return unknown[object_size_type];
616 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
618 static void
619 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
621 int object_size_type = osi->object_size_type;
622 unsigned int varno = SSA_NAME_VERSION (ptr);
623 unsigned HOST_WIDE_INT bytes;
625 gcc_assert (object_sizes[object_size_type][varno]
626 != unknown[object_size_type]);
627 gcc_assert (osi->pass == 0);
629 if (TREE_CODE (value) == WITH_SIZE_EXPR)
630 value = TREE_OPERAND (value, 0);
632 /* Pointer variables should have been handled by merge_object_sizes. */
633 gcc_assert (TREE_CODE (value) != SSA_NAME
634 || !POINTER_TYPE_P (TREE_TYPE (value)));
636 if (TREE_CODE (value) == ADDR_EXPR)
637 bytes = addr_object_size (osi, value, object_size_type);
638 else
639 bytes = unknown[object_size_type];
641 if ((object_size_type & 2) == 0)
643 if (object_sizes[object_size_type][varno] < bytes)
644 object_sizes[object_size_type][varno] = bytes;
646 else
648 if (object_sizes[object_size_type][varno] > bytes)
649 object_sizes[object_size_type][varno] = bytes;
654 /* Compute object_sizes for PTR, defined to the result of a call. */
656 static void
657 call_object_size (struct object_size_info *osi, tree ptr, gimple call)
659 int object_size_type = osi->object_size_type;
660 unsigned int varno = SSA_NAME_VERSION (ptr);
661 unsigned HOST_WIDE_INT bytes;
663 gcc_assert (is_gimple_call (call));
665 gcc_assert (object_sizes[object_size_type][varno]
666 != unknown[object_size_type]);
667 gcc_assert (osi->pass == 0);
669 bytes = alloc_object_size (call, object_size_type);
671 if ((object_size_type & 2) == 0)
673 if (object_sizes[object_size_type][varno] < bytes)
674 object_sizes[object_size_type][varno] = bytes;
676 else
678 if (object_sizes[object_size_type][varno] > bytes)
679 object_sizes[object_size_type][varno] = bytes;
684 /* Compute object_sizes for PTR, defined to an unknown value. */
686 static void
687 unknown_object_size (struct object_size_info *osi, tree ptr)
689 int object_size_type = osi->object_size_type;
690 unsigned int varno = SSA_NAME_VERSION (ptr);
691 unsigned HOST_WIDE_INT bytes;
693 gcc_assert (object_sizes[object_size_type][varno]
694 != unknown[object_size_type]);
695 gcc_assert (osi->pass == 0);
697 bytes = unknown[object_size_type];
699 if ((object_size_type & 2) == 0)
701 if (object_sizes[object_size_type][varno] < bytes)
702 object_sizes[object_size_type][varno] = bytes;
704 else
706 if (object_sizes[object_size_type][varno] > bytes)
707 object_sizes[object_size_type][varno] = bytes;
712 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
713 the object size might need reexamination later. */
715 static bool
716 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
717 unsigned HOST_WIDE_INT offset)
719 int object_size_type = osi->object_size_type;
720 unsigned int varno = SSA_NAME_VERSION (dest);
721 unsigned HOST_WIDE_INT orig_bytes;
723 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
724 return false;
725 if (offset >= offset_limit)
727 object_sizes[object_size_type][varno] = unknown[object_size_type];
728 return false;
731 if (osi->pass == 0)
732 collect_object_sizes_for (osi, orig);
734 orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
735 if (orig_bytes != unknown[object_size_type])
736 orig_bytes = (offset > orig_bytes)
737 ? (unsigned HOST_WIDE_INT) 0 : orig_bytes - offset;
739 if ((object_size_type & 2) == 0)
741 if (object_sizes[object_size_type][varno] < orig_bytes)
743 object_sizes[object_size_type][varno] = orig_bytes;
744 osi->changed = true;
747 else
749 if (object_sizes[object_size_type][varno] > orig_bytes)
751 object_sizes[object_size_type][varno] = orig_bytes;
752 osi->changed = true;
755 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
759 /* Compute object_sizes for VAR, defined to the result of an assignment
760 with operator POINTER_PLUS_EXPR. Return true if the object size might
761 need reexamination later. */
763 static bool
764 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple stmt)
766 int object_size_type = osi->object_size_type;
767 unsigned int varno = SSA_NAME_VERSION (var);
768 unsigned HOST_WIDE_INT bytes;
769 tree op0, op1;
771 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
773 op0 = gimple_assign_rhs1 (stmt);
774 op1 = gimple_assign_rhs2 (stmt);
776 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
778 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
779 gcc_assert (TREE_CODE (rhs) == MEM_REF);
780 op0 = TREE_OPERAND (rhs, 0);
781 op1 = TREE_OPERAND (rhs, 1);
783 else
784 gcc_unreachable ();
786 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
787 return false;
789 /* Handle PTR + OFFSET here. */
790 if (TREE_CODE (op1) == INTEGER_CST
791 && (TREE_CODE (op0) == SSA_NAME
792 || TREE_CODE (op0) == ADDR_EXPR))
794 if (! host_integerp (op1, 1))
795 bytes = unknown[object_size_type];
796 else if (TREE_CODE (op0) == SSA_NAME)
797 return merge_object_sizes (osi, var, op0, tree_low_cst (op1, 1));
798 else
800 unsigned HOST_WIDE_INT off = tree_low_cst (op1, 1);
802 /* op0 will be ADDR_EXPR here. */
803 bytes = addr_object_size (osi, op0, object_size_type);
804 if (bytes == unknown[object_size_type])
806 else if (off > offset_limit)
807 bytes = unknown[object_size_type];
808 else if (off > bytes)
809 bytes = 0;
810 else
811 bytes -= off;
814 else
815 bytes = unknown[object_size_type];
817 if ((object_size_type & 2) == 0)
819 if (object_sizes[object_size_type][varno] < bytes)
820 object_sizes[object_size_type][varno] = bytes;
822 else
824 if (object_sizes[object_size_type][varno] > bytes)
825 object_sizes[object_size_type][varno] = bytes;
827 return false;
831 /* Compute object_sizes for VAR, defined at STMT, which is
832 a COND_EXPR. Return true if the object size might need reexamination
833 later. */
835 static bool
836 cond_expr_object_size (struct object_size_info *osi, tree var, gimple stmt)
838 tree then_, else_;
839 int object_size_type = osi->object_size_type;
840 unsigned int varno = SSA_NAME_VERSION (var);
841 bool reexamine = false;
843 gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
845 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
846 return false;
848 then_ = gimple_assign_rhs2 (stmt);
849 else_ = gimple_assign_rhs3 (stmt);
851 if (TREE_CODE (then_) == SSA_NAME)
852 reexamine |= merge_object_sizes (osi, var, then_, 0);
853 else
854 expr_object_size (osi, var, then_);
856 if (TREE_CODE (else_) == SSA_NAME)
857 reexamine |= merge_object_sizes (osi, var, else_, 0);
858 else
859 expr_object_size (osi, var, else_);
861 return reexamine;
864 /* Compute object sizes for VAR.
865 For ADDR_EXPR an object size is the number of remaining bytes
866 to the end of the object (where what is considered an object depends on
867 OSI->object_size_type).
868 For allocation GIMPLE_CALL like malloc or calloc object size is the size
869 of the allocation.
870 For POINTER_PLUS_EXPR where second operand is a constant integer,
871 object size is object size of the first operand minus the constant.
872 If the constant is bigger than the number of remaining bytes until the
873 end of the object, object size is 0, but if it is instead a pointer
874 subtraction, object size is unknown[object_size_type].
875 To differentiate addition from subtraction, ADDR_EXPR returns
876 unknown[object_size_type] for all objects bigger than half of the address
877 space, and constants less than half of the address space are considered
878 addition, while bigger constants subtraction.
879 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
880 object size is object size of that argument.
881 Otherwise, object size is the maximum of object sizes of variables
882 that it might be set to. */
884 static void
885 collect_object_sizes_for (struct object_size_info *osi, tree var)
887 int object_size_type = osi->object_size_type;
888 unsigned int varno = SSA_NAME_VERSION (var);
889 gimple stmt;
890 bool reexamine;
892 if (bitmap_bit_p (computed[object_size_type], varno))
893 return;
895 if (osi->pass == 0)
897 if (bitmap_set_bit (osi->visited, varno))
899 object_sizes[object_size_type][varno]
900 = (object_size_type & 2) ? -1 : 0;
902 else
904 /* Found a dependency loop. Mark the variable for later
905 re-examination. */
906 bitmap_set_bit (osi->reexamine, varno);
907 if (dump_file && (dump_flags & TDF_DETAILS))
909 fprintf (dump_file, "Found a dependency loop at ");
910 print_generic_expr (dump_file, var, dump_flags);
911 fprintf (dump_file, "\n");
913 return;
917 if (dump_file && (dump_flags & TDF_DETAILS))
919 fprintf (dump_file, "Visiting use-def links for ");
920 print_generic_expr (dump_file, var, dump_flags);
921 fprintf (dump_file, "\n");
924 stmt = SSA_NAME_DEF_STMT (var);
925 reexamine = false;
927 switch (gimple_code (stmt))
929 case GIMPLE_ASSIGN:
931 tree rhs = gimple_assign_rhs1 (stmt);
932 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
933 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
934 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
935 reexamine = plus_stmt_object_size (osi, var, stmt);
936 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
937 reexamine = cond_expr_object_size (osi, var, stmt);
938 else if (gimple_assign_single_p (stmt)
939 || gimple_assign_unary_nop_p (stmt))
941 if (TREE_CODE (rhs) == SSA_NAME
942 && POINTER_TYPE_P (TREE_TYPE (rhs)))
943 reexamine = merge_object_sizes (osi, var, rhs, 0);
944 else
945 expr_object_size (osi, var, rhs);
947 else
948 unknown_object_size (osi, var);
949 break;
952 case GIMPLE_CALL:
954 tree arg = pass_through_call (stmt);
955 if (arg)
957 if (TREE_CODE (arg) == SSA_NAME
958 && POINTER_TYPE_P (TREE_TYPE (arg)))
959 reexamine = merge_object_sizes (osi, var, arg, 0);
960 else
961 expr_object_size (osi, var, arg);
963 else
964 call_object_size (osi, var, stmt);
965 break;
968 case GIMPLE_ASM:
969 /* Pointers defined by __asm__ statements can point anywhere. */
970 object_sizes[object_size_type][varno] = unknown[object_size_type];
971 break;
973 case GIMPLE_NOP:
975 tree decl = SSA_NAME_VAR (var);
977 if (TREE_CODE (decl) != PARM_DECL && DECL_INITIAL (decl))
978 expr_object_size (osi, var, DECL_INITIAL (decl));
979 else
980 expr_object_size (osi, var, decl);
982 break;
984 case GIMPLE_PHI:
986 unsigned i;
988 for (i = 0; i < gimple_phi_num_args (stmt); i++)
990 tree rhs = gimple_phi_arg (stmt, i)->def;
992 if (object_sizes[object_size_type][varno]
993 == unknown[object_size_type])
994 break;
996 if (TREE_CODE (rhs) == SSA_NAME)
997 reexamine |= merge_object_sizes (osi, var, rhs, 0);
998 else if (osi->pass == 0)
999 expr_object_size (osi, var, rhs);
1001 break;
1004 default:
1005 gcc_unreachable ();
1008 if (! reexamine
1009 || object_sizes[object_size_type][varno] == unknown[object_size_type])
1011 bitmap_set_bit (computed[object_size_type], varno);
1012 bitmap_clear_bit (osi->reexamine, varno);
1014 else
1016 bitmap_set_bit (osi->reexamine, varno);
1017 if (dump_file && (dump_flags & TDF_DETAILS))
1019 fprintf (dump_file, "Need to reexamine ");
1020 print_generic_expr (dump_file, var, dump_flags);
1021 fprintf (dump_file, "\n");
1027 /* Helper function for check_for_plus_in_loops. Called recursively
1028 to detect loops. */
1030 static void
1031 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1032 unsigned int depth)
1034 gimple stmt = SSA_NAME_DEF_STMT (var);
1035 unsigned int varno = SSA_NAME_VERSION (var);
1037 if (osi->depths[varno])
1039 if (osi->depths[varno] != depth)
1041 unsigned int *sp;
1043 /* Found a loop involving pointer addition. */
1044 for (sp = osi->tos; sp > osi->stack; )
1046 --sp;
1047 bitmap_clear_bit (osi->reexamine, *sp);
1048 bitmap_set_bit (computed[osi->object_size_type], *sp);
1049 object_sizes[osi->object_size_type][*sp] = 0;
1050 if (*sp == varno)
1051 break;
1054 return;
1056 else if (! bitmap_bit_p (osi->reexamine, varno))
1057 return;
1059 osi->depths[varno] = depth;
1060 *osi->tos++ = varno;
1062 switch (gimple_code (stmt))
1065 case GIMPLE_ASSIGN:
1067 if ((gimple_assign_single_p (stmt)
1068 || gimple_assign_unary_nop_p (stmt))
1069 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1071 tree rhs = gimple_assign_rhs1 (stmt);
1073 check_for_plus_in_loops_1 (osi, rhs, depth);
1075 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1077 tree basevar = gimple_assign_rhs1 (stmt);
1078 tree cst = gimple_assign_rhs2 (stmt);
1080 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1082 check_for_plus_in_loops_1 (osi, basevar,
1083 depth + !integer_zerop (cst));
1085 else
1086 gcc_unreachable ();
1087 break;
1090 case GIMPLE_CALL:
1092 tree arg = pass_through_call (stmt);
1093 if (arg)
1095 if (TREE_CODE (arg) == SSA_NAME)
1096 check_for_plus_in_loops_1 (osi, arg, depth);
1097 else
1098 gcc_unreachable ();
1100 break;
1103 case GIMPLE_PHI:
1105 unsigned i;
1107 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1109 tree rhs = gimple_phi_arg (stmt, i)->def;
1111 if (TREE_CODE (rhs) == SSA_NAME)
1112 check_for_plus_in_loops_1 (osi, rhs, depth);
1114 break;
1117 default:
1118 gcc_unreachable ();
1121 osi->depths[varno] = 0;
1122 osi->tos--;
1126 /* Check if some pointer we are computing object size of is being increased
1127 within a loop. If yes, assume all the SSA variables participating in
1128 that loop have minimum object sizes 0. */
1130 static void
1131 check_for_plus_in_loops (struct object_size_info *osi, tree var)
1133 gimple stmt = SSA_NAME_DEF_STMT (var);
1135 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1136 and looked for a POINTER_PLUS_EXPR in the pass-through
1137 argument, if any. In GIMPLE, however, such an expression
1138 is not a valid call operand. */
1140 if (is_gimple_assign (stmt)
1141 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1143 tree basevar = gimple_assign_rhs1 (stmt);
1144 tree cst = gimple_assign_rhs2 (stmt);
1146 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1148 if (integer_zerop (cst))
1149 return;
1151 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1152 *osi->tos++ = SSA_NAME_VERSION (basevar);
1153 check_for_plus_in_loops_1 (osi, var, 2);
1154 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1155 osi->tos--;
1160 /* Initialize data structures for the object size computation. */
1162 void
1163 init_object_sizes (void)
1165 int object_size_type;
1167 if (object_sizes[0])
1168 return;
1170 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1172 object_sizes[object_size_type] = XNEWVEC (unsigned HOST_WIDE_INT, num_ssa_names);
1173 computed[object_size_type] = BITMAP_ALLOC (NULL);
1176 init_offset_limit ();
1180 /* Destroy data structures after the object size computation. */
1182 void
1183 fini_object_sizes (void)
1185 int object_size_type;
1187 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1189 free (object_sizes[object_size_type]);
1190 BITMAP_FREE (computed[object_size_type]);
1191 object_sizes[object_size_type] = NULL;
1196 /* Simple pass to optimize all __builtin_object_size () builtins. */
1198 static unsigned int
1199 compute_object_sizes (void)
1201 basic_block bb;
1202 FOR_EACH_BB (bb)
1204 gimple_stmt_iterator i;
1205 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1207 tree callee, result;
1208 gimple call = gsi_stmt (i);
1210 if (gimple_code (call) != GIMPLE_CALL)
1211 continue;
1213 callee = gimple_call_fndecl (call);
1214 if (!callee
1215 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
1216 || DECL_FUNCTION_CODE (callee) != BUILT_IN_OBJECT_SIZE)
1217 continue;
1219 init_object_sizes ();
1220 result = fold_call_stmt (call, false);
1221 if (!result)
1223 if (gimple_call_num_args (call) == 2
1224 && POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
1226 tree ost = gimple_call_arg (call, 1);
1228 if (host_integerp (ost, 1))
1230 unsigned HOST_WIDE_INT object_size_type
1231 = tree_low_cst (ost, 1);
1233 if (object_size_type < 2)
1234 result = fold_convert (size_type_node,
1235 integer_minus_one_node);
1236 else if (object_size_type < 4)
1237 result = build_zero_cst (size_type_node);
1241 if (!result)
1242 continue;
1245 if (dump_file && (dump_flags & TDF_DETAILS))
1247 fprintf (dump_file, "Simplified\n ");
1248 print_gimple_stmt (dump_file, call, 0, dump_flags);
1251 if (!update_call_from_tree (&i, result))
1252 gcc_unreachable ();
1254 if (dump_file && (dump_flags & TDF_DETAILS))
1256 fprintf (dump_file, "to\n ");
1257 print_gimple_stmt (dump_file, gsi_stmt (i), 0, dump_flags);
1258 fprintf (dump_file, "\n");
1263 fini_object_sizes ();
1264 return 0;
1267 struct gimple_opt_pass pass_object_sizes =
1270 GIMPLE_PASS,
1271 "objsz", /* name */
1272 NULL, /* gate */
1273 compute_object_sizes, /* execute */
1274 NULL, /* sub */
1275 NULL, /* next */
1276 0, /* static_pass_number */
1277 TV_NONE, /* tv_id */
1278 PROP_cfg | PROP_ssa, /* properties_required */
1279 0, /* properties_provided */
1280 0, /* properties_destroyed */
1281 0, /* todo_flags_start */
1282 TODO_verify_ssa /* todo_flags_finish */