dumping cleanup phase 1 -- Removing TODO_dump_func
[official-gcc.git] / gcc / tree-object-size.c
blob674f72e903121cef97faeb62392d1638c15aab0e
1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Jakub Jelinek <jakub@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "diagnostic-core.h"
28 #include "tree-pretty-print.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-ssa-propagate.h"
34 struct object_size_info
36 int object_size_type;
37 bitmap visited, reexamine;
38 int pass;
39 bool changed;
40 unsigned int *depths;
41 unsigned int *stack, *tos;
44 static unsigned HOST_WIDE_INT unknown[4] = { -1, -1, 0, 0 };
46 static tree compute_object_offset (const_tree, const_tree);
47 static unsigned HOST_WIDE_INT addr_object_size (struct object_size_info *,
48 const_tree, int);
49 static unsigned HOST_WIDE_INT alloc_object_size (const_gimple, int);
50 static tree pass_through_call (const_gimple);
51 static void collect_object_sizes_for (struct object_size_info *, tree);
52 static void expr_object_size (struct object_size_info *, tree, tree);
53 static bool merge_object_sizes (struct object_size_info *, tree, tree,
54 unsigned HOST_WIDE_INT);
55 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple);
56 static bool cond_expr_object_size (struct object_size_info *, tree, tree);
57 static unsigned int compute_object_sizes (void);
58 static void init_offset_limit (void);
59 static void check_for_plus_in_loops (struct object_size_info *, tree);
60 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
61 unsigned int);
63 /* object_sizes[0] is upper bound for number of bytes till the end of
64 the object.
65 object_sizes[1] is upper bound for number of bytes till the end of
66 the subobject (innermost array or field with address taken).
67 object_sizes[2] is lower bound for number of bytes till the end of
68 the object and object_sizes[3] lower bound for subobject. */
69 static unsigned HOST_WIDE_INT *object_sizes[4];
71 /* Bitmaps what object sizes have been computed already. */
72 static bitmap computed[4];
74 /* Maximum value of offset we consider to be addition. */
75 static unsigned HOST_WIDE_INT offset_limit;
78 /* Initialize OFFSET_LIMIT variable. */
79 static void
80 init_offset_limit (void)
82 if (host_integerp (TYPE_MAX_VALUE (sizetype), 1))
83 offset_limit = tree_low_cst (TYPE_MAX_VALUE (sizetype), 1);
84 else
85 offset_limit = -1;
86 offset_limit /= 2;
90 /* Compute offset of EXPR within VAR. Return error_mark_node
91 if unknown. */
93 static tree
94 compute_object_offset (const_tree expr, const_tree var)
96 enum tree_code code = PLUS_EXPR;
97 tree base, off, t;
99 if (expr == var)
100 return size_zero_node;
102 switch (TREE_CODE (expr))
104 case COMPONENT_REF:
105 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
106 if (base == error_mark_node)
107 return base;
109 t = TREE_OPERAND (expr, 1);
110 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
111 size_int (tree_low_cst (DECL_FIELD_BIT_OFFSET (t), 1)
112 / BITS_PER_UNIT));
113 break;
115 case REALPART_EXPR:
116 CASE_CONVERT:
117 case VIEW_CONVERT_EXPR:
118 case NON_LVALUE_EXPR:
119 return compute_object_offset (TREE_OPERAND (expr, 0), var);
121 case IMAGPART_EXPR:
122 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
123 if (base == error_mark_node)
124 return base;
126 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
127 break;
129 case ARRAY_REF:
130 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
131 if (base == error_mark_node)
132 return base;
134 t = TREE_OPERAND (expr, 1);
135 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
137 code = MINUS_EXPR;
138 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
140 t = fold_convert (sizetype, t);
141 off = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (expr)), t);
142 break;
144 case MEM_REF:
145 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
146 return double_int_to_tree (sizetype, mem_ref_offset (expr));
148 default:
149 return error_mark_node;
152 return size_binop (code, base, off);
156 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
157 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
158 If unknown, return unknown[object_size_type]. */
160 static unsigned HOST_WIDE_INT
161 addr_object_size (struct object_size_info *osi, const_tree ptr,
162 int object_size_type)
164 tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
166 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
168 pt_var = TREE_OPERAND (ptr, 0);
169 if (REFERENCE_CLASS_P (pt_var))
170 pt_var = get_base_address (pt_var);
172 if (pt_var
173 && TREE_CODE (pt_var) == MEM_REF
174 && TREE_CODE (TREE_OPERAND (pt_var, 0)) == SSA_NAME
175 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (pt_var, 0))))
177 unsigned HOST_WIDE_INT sz;
179 if (!osi || (object_size_type & 1) != 0)
181 sz = compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
182 object_size_type & ~1);
183 if (host_integerp (TREE_OPERAND (pt_var, 1), 0))
184 sz -= TREE_INT_CST_LOW (TREE_OPERAND (pt_var, 1));
185 else
186 sz = offset_limit;
188 else
190 tree var = TREE_OPERAND (pt_var, 0);
191 if (osi->pass == 0)
192 collect_object_sizes_for (osi, var);
193 if (bitmap_bit_p (computed[object_size_type],
194 SSA_NAME_VERSION (var)))
195 sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
196 else
197 sz = unknown[object_size_type];
198 if (host_integerp (TREE_OPERAND (pt_var, 1), 0))
199 sz -= TREE_INT_CST_LOW (TREE_OPERAND (pt_var, 1));
200 else
201 sz = offset_limit;
204 if (sz != unknown[object_size_type] && sz < offset_limit)
205 pt_var_size = size_int (sz);
207 else if (pt_var
208 && DECL_P (pt_var)
209 && host_integerp (DECL_SIZE_UNIT (pt_var), 1)
210 && (unsigned HOST_WIDE_INT)
211 tree_low_cst (DECL_SIZE_UNIT (pt_var), 1) < offset_limit)
212 pt_var_size = DECL_SIZE_UNIT (pt_var);
213 else if (pt_var
214 && (SSA_VAR_P (pt_var) || TREE_CODE (pt_var) == STRING_CST)
215 && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
216 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1)
217 && (unsigned HOST_WIDE_INT)
218 tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1)
219 < offset_limit)
220 pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
221 else
222 return unknown[object_size_type];
224 if (pt_var != TREE_OPERAND (ptr, 0))
226 tree var;
228 if (object_size_type & 1)
230 var = TREE_OPERAND (ptr, 0);
232 while (var != pt_var
233 && TREE_CODE (var) != BIT_FIELD_REF
234 && TREE_CODE (var) != COMPONENT_REF
235 && TREE_CODE (var) != ARRAY_REF
236 && TREE_CODE (var) != ARRAY_RANGE_REF
237 && TREE_CODE (var) != REALPART_EXPR
238 && TREE_CODE (var) != IMAGPART_EXPR)
239 var = TREE_OPERAND (var, 0);
240 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
241 var = TREE_OPERAND (var, 0);
242 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
243 || ! host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (var)), 1)
244 || (pt_var_size
245 && tree_int_cst_lt (pt_var_size,
246 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
247 var = pt_var;
248 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
250 tree v = var;
251 /* For &X->fld, compute object size only if fld isn't the last
252 field, as struct { int i; char c[1]; } is often used instead
253 of flexible array member. */
254 while (v && v != pt_var)
255 switch (TREE_CODE (v))
257 case ARRAY_REF:
258 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
259 && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
261 tree domain
262 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
263 if (domain
264 && TYPE_MAX_VALUE (domain)
265 && TREE_CODE (TYPE_MAX_VALUE (domain))
266 == INTEGER_CST
267 && tree_int_cst_lt (TREE_OPERAND (v, 1),
268 TYPE_MAX_VALUE (domain)))
270 v = NULL_TREE;
271 break;
274 v = TREE_OPERAND (v, 0);
275 break;
276 case REALPART_EXPR:
277 case IMAGPART_EXPR:
278 v = NULL_TREE;
279 break;
280 case COMPONENT_REF:
281 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
283 v = NULL_TREE;
284 break;
286 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
287 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
288 != UNION_TYPE
289 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
290 != QUAL_UNION_TYPE)
291 break;
292 else
293 v = TREE_OPERAND (v, 0);
294 if (TREE_CODE (v) == COMPONENT_REF
295 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
296 == RECORD_TYPE)
298 tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
299 for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
300 if (TREE_CODE (fld_chain) == FIELD_DECL)
301 break;
303 if (fld_chain)
305 v = NULL_TREE;
306 break;
308 v = TREE_OPERAND (v, 0);
310 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
311 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
312 != UNION_TYPE
313 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
314 != QUAL_UNION_TYPE)
315 break;
316 else
317 v = TREE_OPERAND (v, 0);
318 if (v != pt_var)
319 v = NULL_TREE;
320 else
321 v = pt_var;
322 break;
323 default:
324 v = pt_var;
325 break;
327 if (v == pt_var)
328 var = pt_var;
331 else
332 var = pt_var;
334 if (var != pt_var)
335 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
336 else if (!pt_var_size)
337 return unknown[object_size_type];
338 else
339 var_size = pt_var_size;
340 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
341 if (bytes != error_mark_node)
343 if (TREE_CODE (bytes) == INTEGER_CST
344 && tree_int_cst_lt (var_size, bytes))
345 bytes = size_zero_node;
346 else
347 bytes = size_binop (MINUS_EXPR, var_size, bytes);
349 if (var != pt_var
350 && pt_var_size
351 && TREE_CODE (pt_var) == MEM_REF
352 && bytes != error_mark_node)
354 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
355 if (bytes2 != error_mark_node)
357 if (TREE_CODE (bytes2) == INTEGER_CST
358 && tree_int_cst_lt (pt_var_size, bytes2))
359 bytes2 = size_zero_node;
360 else
361 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
362 bytes = size_binop (MIN_EXPR, bytes, bytes2);
366 else if (!pt_var_size)
367 return unknown[object_size_type];
368 else
369 bytes = pt_var_size;
371 if (host_integerp (bytes, 1))
372 return tree_low_cst (bytes, 1);
374 return unknown[object_size_type];
378 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
379 Handles various allocation calls. OBJECT_SIZE_TYPE is the second
380 argument from __builtin_object_size. If unknown, return
381 unknown[object_size_type]. */
383 static unsigned HOST_WIDE_INT
384 alloc_object_size (const_gimple call, int object_size_type)
386 tree callee, bytes = NULL_TREE;
387 tree alloc_size;
388 int arg1 = -1, arg2 = -1;
390 gcc_assert (is_gimple_call (call));
392 callee = gimple_call_fndecl (call);
393 if (!callee)
394 return unknown[object_size_type];
396 alloc_size = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (TREE_TYPE(callee)));
397 if (alloc_size && TREE_VALUE (alloc_size))
399 tree p = TREE_VALUE (alloc_size);
401 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
402 if (TREE_CHAIN (p))
403 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
406 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
407 switch (DECL_FUNCTION_CODE (callee))
409 case BUILT_IN_CALLOC:
410 arg2 = 1;
411 /* fall through */
412 case BUILT_IN_MALLOC:
413 case BUILT_IN_ALLOCA:
414 arg1 = 0;
415 default:
416 break;
419 if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
420 || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
421 || (arg2 >= 0
422 && (arg2 >= (int)gimple_call_num_args (call)
423 || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
424 return unknown[object_size_type];
426 if (arg2 >= 0)
427 bytes = size_binop (MULT_EXPR,
428 fold_convert (sizetype, gimple_call_arg (call, arg1)),
429 fold_convert (sizetype, gimple_call_arg (call, arg2)));
430 else if (arg1 >= 0)
431 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
433 if (bytes && host_integerp (bytes, 1))
434 return tree_low_cst (bytes, 1);
436 return unknown[object_size_type];
440 /* If object size is propagated from one of function's arguments directly
441 to its return value, return that argument for GIMPLE_CALL statement CALL.
442 Otherwise return NULL. */
444 static tree
445 pass_through_call (const_gimple call)
447 tree callee = gimple_call_fndecl (call);
449 if (callee
450 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
451 switch (DECL_FUNCTION_CODE (callee))
453 case BUILT_IN_MEMCPY:
454 case BUILT_IN_MEMMOVE:
455 case BUILT_IN_MEMSET:
456 case BUILT_IN_STRCPY:
457 case BUILT_IN_STRNCPY:
458 case BUILT_IN_STRCAT:
459 case BUILT_IN_STRNCAT:
460 case BUILT_IN_MEMCPY_CHK:
461 case BUILT_IN_MEMMOVE_CHK:
462 case BUILT_IN_MEMSET_CHK:
463 case BUILT_IN_STRCPY_CHK:
464 case BUILT_IN_STRNCPY_CHK:
465 case BUILT_IN_STRCAT_CHK:
466 case BUILT_IN_STRNCAT_CHK:
467 if (gimple_call_num_args (call) >= 1)
468 return gimple_call_arg (call, 0);
469 break;
470 default:
471 break;
474 return NULL_TREE;
478 /* Compute __builtin_object_size value for PTR. OBJECT_SIZE_TYPE is the
479 second argument from __builtin_object_size. */
481 unsigned HOST_WIDE_INT
482 compute_builtin_object_size (tree ptr, int object_size_type)
484 gcc_assert (object_size_type >= 0 && object_size_type <= 3);
486 if (! offset_limit)
487 init_offset_limit ();
489 if (TREE_CODE (ptr) == ADDR_EXPR)
490 return addr_object_size (NULL, ptr, object_size_type);
492 if (TREE_CODE (ptr) == SSA_NAME
493 && POINTER_TYPE_P (TREE_TYPE (ptr))
494 && object_sizes[object_size_type] != NULL)
496 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
498 struct object_size_info osi;
499 bitmap_iterator bi;
500 unsigned int i;
502 if (dump_file)
504 fprintf (dump_file, "Computing %s %sobject size for ",
505 (object_size_type & 2) ? "minimum" : "maximum",
506 (object_size_type & 1) ? "sub" : "");
507 print_generic_expr (dump_file, ptr, dump_flags);
508 fprintf (dump_file, ":\n");
511 osi.visited = BITMAP_ALLOC (NULL);
512 osi.reexamine = BITMAP_ALLOC (NULL);
513 osi.object_size_type = object_size_type;
514 osi.depths = NULL;
515 osi.stack = NULL;
516 osi.tos = NULL;
518 /* First pass: walk UD chains, compute object sizes that
519 can be computed. osi.reexamine bitmap at the end will
520 contain what variables were found in dependency cycles
521 and therefore need to be reexamined. */
522 osi.pass = 0;
523 osi.changed = false;
524 collect_object_sizes_for (&osi, ptr);
526 /* Second pass: keep recomputing object sizes of variables
527 that need reexamination, until no object sizes are
528 increased or all object sizes are computed. */
529 if (! bitmap_empty_p (osi.reexamine))
531 bitmap reexamine = BITMAP_ALLOC (NULL);
533 /* If looking for minimum instead of maximum object size,
534 detect cases where a pointer is increased in a loop.
535 Although even without this detection pass 2 would eventually
536 terminate, it could take a long time. If a pointer is
537 increasing this way, we need to assume 0 object size.
538 E.g. p = &buf[0]; while (cond) p = p + 4; */
539 if (object_size_type & 2)
541 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
542 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
543 osi.tos = osi.stack;
544 osi.pass = 1;
545 /* collect_object_sizes_for is changing
546 osi.reexamine bitmap, so iterate over a copy. */
547 bitmap_copy (reexamine, osi.reexamine);
548 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
549 if (bitmap_bit_p (osi.reexamine, i))
550 check_for_plus_in_loops (&osi, ssa_name (i));
552 free (osi.depths);
553 osi.depths = NULL;
554 free (osi.stack);
555 osi.stack = NULL;
556 osi.tos = NULL;
561 osi.pass = 2;
562 osi.changed = false;
563 /* collect_object_sizes_for is changing
564 osi.reexamine bitmap, so iterate over a copy. */
565 bitmap_copy (reexamine, osi.reexamine);
566 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
567 if (bitmap_bit_p (osi.reexamine, i))
569 collect_object_sizes_for (&osi, ssa_name (i));
570 if (dump_file && (dump_flags & TDF_DETAILS))
572 fprintf (dump_file, "Reexamining ");
573 print_generic_expr (dump_file, ssa_name (i),
574 dump_flags);
575 fprintf (dump_file, "\n");
579 while (osi.changed);
581 BITMAP_FREE (reexamine);
583 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
584 bitmap_set_bit (computed[object_size_type], i);
586 /* Debugging dumps. */
587 if (dump_file)
589 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
590 if (object_sizes[object_size_type][i]
591 != unknown[object_size_type])
593 print_generic_expr (dump_file, ssa_name (i),
594 dump_flags);
595 fprintf (dump_file,
596 ": %s %sobject size "
597 HOST_WIDE_INT_PRINT_UNSIGNED "\n",
598 (object_size_type & 2) ? "minimum" : "maximum",
599 (object_size_type & 1) ? "sub" : "",
600 object_sizes[object_size_type][i]);
604 BITMAP_FREE (osi.reexamine);
605 BITMAP_FREE (osi.visited);
608 return object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
611 return unknown[object_size_type];
614 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
616 static void
617 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
619 int object_size_type = osi->object_size_type;
620 unsigned int varno = SSA_NAME_VERSION (ptr);
621 unsigned HOST_WIDE_INT bytes;
623 gcc_assert (object_sizes[object_size_type][varno]
624 != unknown[object_size_type]);
625 gcc_assert (osi->pass == 0);
627 if (TREE_CODE (value) == WITH_SIZE_EXPR)
628 value = TREE_OPERAND (value, 0);
630 /* Pointer variables should have been handled by merge_object_sizes. */
631 gcc_assert (TREE_CODE (value) != SSA_NAME
632 || !POINTER_TYPE_P (TREE_TYPE (value)));
634 if (TREE_CODE (value) == ADDR_EXPR)
635 bytes = addr_object_size (osi, value, object_size_type);
636 else
637 bytes = unknown[object_size_type];
639 if ((object_size_type & 2) == 0)
641 if (object_sizes[object_size_type][varno] < bytes)
642 object_sizes[object_size_type][varno] = bytes;
644 else
646 if (object_sizes[object_size_type][varno] > bytes)
647 object_sizes[object_size_type][varno] = bytes;
652 /* Compute object_sizes for PTR, defined to the result of a call. */
654 static void
655 call_object_size (struct object_size_info *osi, tree ptr, gimple call)
657 int object_size_type = osi->object_size_type;
658 unsigned int varno = SSA_NAME_VERSION (ptr);
659 unsigned HOST_WIDE_INT bytes;
661 gcc_assert (is_gimple_call (call));
663 gcc_assert (object_sizes[object_size_type][varno]
664 != unknown[object_size_type]);
665 gcc_assert (osi->pass == 0);
667 bytes = alloc_object_size (call, object_size_type);
669 if ((object_size_type & 2) == 0)
671 if (object_sizes[object_size_type][varno] < bytes)
672 object_sizes[object_size_type][varno] = bytes;
674 else
676 if (object_sizes[object_size_type][varno] > bytes)
677 object_sizes[object_size_type][varno] = bytes;
682 /* Compute object_sizes for PTR, defined to an unknown value. */
684 static void
685 unknown_object_size (struct object_size_info *osi, tree ptr)
687 int object_size_type = osi->object_size_type;
688 unsigned int varno = SSA_NAME_VERSION (ptr);
689 unsigned HOST_WIDE_INT bytes;
691 gcc_assert (object_sizes[object_size_type][varno]
692 != unknown[object_size_type]);
693 gcc_assert (osi->pass == 0);
695 bytes = unknown[object_size_type];
697 if ((object_size_type & 2) == 0)
699 if (object_sizes[object_size_type][varno] < bytes)
700 object_sizes[object_size_type][varno] = bytes;
702 else
704 if (object_sizes[object_size_type][varno] > bytes)
705 object_sizes[object_size_type][varno] = bytes;
710 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
711 the object size might need reexamination later. */
713 static bool
714 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
715 unsigned HOST_WIDE_INT offset)
717 int object_size_type = osi->object_size_type;
718 unsigned int varno = SSA_NAME_VERSION (dest);
719 unsigned HOST_WIDE_INT orig_bytes;
721 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
722 return false;
723 if (offset >= offset_limit)
725 object_sizes[object_size_type][varno] = unknown[object_size_type];
726 return false;
729 if (osi->pass == 0)
730 collect_object_sizes_for (osi, orig);
732 orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
733 if (orig_bytes != unknown[object_size_type])
734 orig_bytes = (offset > orig_bytes)
735 ? (unsigned HOST_WIDE_INT) 0 : orig_bytes - offset;
737 if ((object_size_type & 2) == 0)
739 if (object_sizes[object_size_type][varno] < orig_bytes)
741 object_sizes[object_size_type][varno] = orig_bytes;
742 osi->changed = true;
745 else
747 if (object_sizes[object_size_type][varno] > orig_bytes)
749 object_sizes[object_size_type][varno] = orig_bytes;
750 osi->changed = true;
753 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
757 /* Compute object_sizes for VAR, defined to the result of an assignment
758 with operator POINTER_PLUS_EXPR. Return true if the object size might
759 need reexamination later. */
761 static bool
762 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple stmt)
764 int object_size_type = osi->object_size_type;
765 unsigned int varno = SSA_NAME_VERSION (var);
766 unsigned HOST_WIDE_INT bytes;
767 tree op0, op1;
769 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
771 op0 = gimple_assign_rhs1 (stmt);
772 op1 = gimple_assign_rhs2 (stmt);
774 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
776 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
777 gcc_assert (TREE_CODE (rhs) == MEM_REF);
778 op0 = TREE_OPERAND (rhs, 0);
779 op1 = TREE_OPERAND (rhs, 1);
781 else
782 gcc_unreachable ();
784 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
785 return false;
787 /* Handle PTR + OFFSET here. */
788 if (TREE_CODE (op1) == INTEGER_CST
789 && (TREE_CODE (op0) == SSA_NAME
790 || TREE_CODE (op0) == ADDR_EXPR))
792 if (! host_integerp (op1, 1))
793 bytes = unknown[object_size_type];
794 else if (TREE_CODE (op0) == SSA_NAME)
795 return merge_object_sizes (osi, var, op0, tree_low_cst (op1, 1));
796 else
798 unsigned HOST_WIDE_INT off = tree_low_cst (op1, 1);
800 /* op0 will be ADDR_EXPR here. */
801 bytes = addr_object_size (osi, op0, object_size_type);
802 if (bytes == unknown[object_size_type])
804 else if (off > offset_limit)
805 bytes = unknown[object_size_type];
806 else if (off > bytes)
807 bytes = 0;
808 else
809 bytes -= off;
812 else
813 bytes = unknown[object_size_type];
815 if ((object_size_type & 2) == 0)
817 if (object_sizes[object_size_type][varno] < bytes)
818 object_sizes[object_size_type][varno] = bytes;
820 else
822 if (object_sizes[object_size_type][varno] > bytes)
823 object_sizes[object_size_type][varno] = bytes;
825 return false;
829 /* Compute object_sizes for VAR, defined to VALUE, which is
830 a COND_EXPR. Return true if the object size might need reexamination
831 later. */
833 static bool
834 cond_expr_object_size (struct object_size_info *osi, tree var, tree value)
836 tree then_, else_;
837 int object_size_type = osi->object_size_type;
838 unsigned int varno = SSA_NAME_VERSION (var);
839 bool reexamine = false;
841 gcc_assert (TREE_CODE (value) == COND_EXPR);
843 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
844 return false;
846 then_ = COND_EXPR_THEN (value);
847 else_ = COND_EXPR_ELSE (value);
849 if (TREE_CODE (then_) == SSA_NAME)
850 reexamine |= merge_object_sizes (osi, var, then_, 0);
851 else
852 expr_object_size (osi, var, then_);
854 if (TREE_CODE (else_) == SSA_NAME)
855 reexamine |= merge_object_sizes (osi, var, else_, 0);
856 else
857 expr_object_size (osi, var, else_);
859 return reexamine;
862 /* Compute object sizes for VAR.
863 For ADDR_EXPR an object size is the number of remaining bytes
864 to the end of the object (where what is considered an object depends on
865 OSI->object_size_type).
866 For allocation GIMPLE_CALL like malloc or calloc object size is the size
867 of the allocation.
868 For POINTER_PLUS_EXPR where second operand is a constant integer,
869 object size is object size of the first operand minus the constant.
870 If the constant is bigger than the number of remaining bytes until the
871 end of the object, object size is 0, but if it is instead a pointer
872 subtraction, object size is unknown[object_size_type].
873 To differentiate addition from subtraction, ADDR_EXPR returns
874 unknown[object_size_type] for all objects bigger than half of the address
875 space, and constants less than half of the address space are considered
876 addition, while bigger constants subtraction.
877 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
878 object size is object size of that argument.
879 Otherwise, object size is the maximum of object sizes of variables
880 that it might be set to. */
882 static void
883 collect_object_sizes_for (struct object_size_info *osi, tree var)
885 int object_size_type = osi->object_size_type;
886 unsigned int varno = SSA_NAME_VERSION (var);
887 gimple stmt;
888 bool reexamine;
890 if (bitmap_bit_p (computed[object_size_type], varno))
891 return;
893 if (osi->pass == 0)
895 if (bitmap_set_bit (osi->visited, varno))
897 object_sizes[object_size_type][varno]
898 = (object_size_type & 2) ? -1 : 0;
900 else
902 /* Found a dependency loop. Mark the variable for later
903 re-examination. */
904 bitmap_set_bit (osi->reexamine, varno);
905 if (dump_file && (dump_flags & TDF_DETAILS))
907 fprintf (dump_file, "Found a dependency loop at ");
908 print_generic_expr (dump_file, var, dump_flags);
909 fprintf (dump_file, "\n");
911 return;
915 if (dump_file && (dump_flags & TDF_DETAILS))
917 fprintf (dump_file, "Visiting use-def links for ");
918 print_generic_expr (dump_file, var, dump_flags);
919 fprintf (dump_file, "\n");
922 stmt = SSA_NAME_DEF_STMT (var);
923 reexamine = false;
925 switch (gimple_code (stmt))
927 case GIMPLE_ASSIGN:
929 tree rhs = gimple_assign_rhs1 (stmt);
930 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
931 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
932 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
933 reexamine = plus_stmt_object_size (osi, var, stmt);
934 else if (gimple_assign_single_p (stmt)
935 || gimple_assign_unary_nop_p (stmt))
937 if (TREE_CODE (rhs) == SSA_NAME
938 && POINTER_TYPE_P (TREE_TYPE (rhs)))
939 reexamine = merge_object_sizes (osi, var, rhs, 0);
940 else if (TREE_CODE (rhs) == COND_EXPR)
941 reexamine = cond_expr_object_size (osi, var, rhs);
942 else
943 expr_object_size (osi, var, rhs);
945 else
946 unknown_object_size (osi, var);
947 break;
950 case GIMPLE_CALL:
952 tree arg = pass_through_call (stmt);
953 if (arg)
955 if (TREE_CODE (arg) == SSA_NAME
956 && POINTER_TYPE_P (TREE_TYPE (arg)))
957 reexamine = merge_object_sizes (osi, var, arg, 0);
958 else if (TREE_CODE (arg) == COND_EXPR)
959 reexamine = cond_expr_object_size (osi, var, arg);
960 else
961 expr_object_size (osi, var, arg);
963 else
964 call_object_size (osi, var, stmt);
965 break;
968 case GIMPLE_ASM:
969 /* Pointers defined by __asm__ statements can point anywhere. */
970 object_sizes[object_size_type][varno] = unknown[object_size_type];
971 break;
973 case GIMPLE_NOP:
975 tree decl = SSA_NAME_VAR (var);
977 if (TREE_CODE (decl) != PARM_DECL && DECL_INITIAL (decl))
978 expr_object_size (osi, var, DECL_INITIAL (decl));
979 else
980 expr_object_size (osi, var, decl);
982 break;
984 case GIMPLE_PHI:
986 unsigned i;
988 for (i = 0; i < gimple_phi_num_args (stmt); i++)
990 tree rhs = gimple_phi_arg (stmt, i)->def;
992 if (object_sizes[object_size_type][varno]
993 == unknown[object_size_type])
994 break;
996 if (TREE_CODE (rhs) == SSA_NAME)
997 reexamine |= merge_object_sizes (osi, var, rhs, 0);
998 else if (osi->pass == 0)
999 expr_object_size (osi, var, rhs);
1001 break;
1004 default:
1005 gcc_unreachable ();
1008 if (! reexamine
1009 || object_sizes[object_size_type][varno] == unknown[object_size_type])
1011 bitmap_set_bit (computed[object_size_type], varno);
1012 bitmap_clear_bit (osi->reexamine, varno);
1014 else
1016 bitmap_set_bit (osi->reexamine, varno);
1017 if (dump_file && (dump_flags & TDF_DETAILS))
1019 fprintf (dump_file, "Need to reexamine ");
1020 print_generic_expr (dump_file, var, dump_flags);
1021 fprintf (dump_file, "\n");
1027 /* Helper function for check_for_plus_in_loops. Called recursively
1028 to detect loops. */
1030 static void
1031 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1032 unsigned int depth)
1034 gimple stmt = SSA_NAME_DEF_STMT (var);
1035 unsigned int varno = SSA_NAME_VERSION (var);
1037 if (osi->depths[varno])
1039 if (osi->depths[varno] != depth)
1041 unsigned int *sp;
1043 /* Found a loop involving pointer addition. */
1044 for (sp = osi->tos; sp > osi->stack; )
1046 --sp;
1047 bitmap_clear_bit (osi->reexamine, *sp);
1048 bitmap_set_bit (computed[osi->object_size_type], *sp);
1049 object_sizes[osi->object_size_type][*sp] = 0;
1050 if (*sp == varno)
1051 break;
1054 return;
1056 else if (! bitmap_bit_p (osi->reexamine, varno))
1057 return;
1059 osi->depths[varno] = depth;
1060 *osi->tos++ = varno;
1062 switch (gimple_code (stmt))
1065 case GIMPLE_ASSIGN:
1067 if ((gimple_assign_single_p (stmt)
1068 || gimple_assign_unary_nop_p (stmt))
1069 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1071 tree rhs = gimple_assign_rhs1 (stmt);
1073 check_for_plus_in_loops_1 (osi, rhs, depth);
1075 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1077 tree basevar = gimple_assign_rhs1 (stmt);
1078 tree cst = gimple_assign_rhs2 (stmt);
1080 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1082 check_for_plus_in_loops_1 (osi, basevar,
1083 depth + !integer_zerop (cst));
1085 else
1086 gcc_unreachable ();
1087 break;
1090 case GIMPLE_CALL:
1092 tree arg = pass_through_call (stmt);
1093 if (arg)
1095 if (TREE_CODE (arg) == SSA_NAME)
1096 check_for_plus_in_loops_1 (osi, arg, depth);
1097 else
1098 gcc_unreachable ();
1100 break;
1103 case GIMPLE_PHI:
1105 unsigned i;
1107 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1109 tree rhs = gimple_phi_arg (stmt, i)->def;
1111 if (TREE_CODE (rhs) == SSA_NAME)
1112 check_for_plus_in_loops_1 (osi, rhs, depth);
1114 break;
1117 default:
1118 gcc_unreachable ();
1121 osi->depths[varno] = 0;
1122 osi->tos--;
1126 /* Check if some pointer we are computing object size of is being increased
1127 within a loop. If yes, assume all the SSA variables participating in
1128 that loop have minimum object sizes 0. */
1130 static void
1131 check_for_plus_in_loops (struct object_size_info *osi, tree var)
1133 gimple stmt = SSA_NAME_DEF_STMT (var);
1135 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1136 and looked for a POINTER_PLUS_EXPR in the pass-through
1137 argument, if any. In GIMPLE, however, such an expression
1138 is not a valid call operand. */
1140 if (is_gimple_assign (stmt)
1141 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1143 tree basevar = gimple_assign_rhs1 (stmt);
1144 tree cst = gimple_assign_rhs2 (stmt);
1146 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1148 if (integer_zerop (cst))
1149 return;
1151 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1152 *osi->tos++ = SSA_NAME_VERSION (basevar);
1153 check_for_plus_in_loops_1 (osi, var, 2);
1154 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1155 osi->tos--;
1160 /* Initialize data structures for the object size computation. */
1162 void
1163 init_object_sizes (void)
1165 int object_size_type;
1167 if (object_sizes[0])
1168 return;
1170 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1172 object_sizes[object_size_type] = XNEWVEC (unsigned HOST_WIDE_INT, num_ssa_names);
1173 computed[object_size_type] = BITMAP_ALLOC (NULL);
1176 init_offset_limit ();
1180 /* Destroy data structures after the object size computation. */
1182 void
1183 fini_object_sizes (void)
1185 int object_size_type;
1187 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1189 free (object_sizes[object_size_type]);
1190 BITMAP_FREE (computed[object_size_type]);
1191 object_sizes[object_size_type] = NULL;
1196 /* Simple pass to optimize all __builtin_object_size () builtins. */
1198 static unsigned int
1199 compute_object_sizes (void)
1201 basic_block bb;
1202 FOR_EACH_BB (bb)
1204 gimple_stmt_iterator i;
1205 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1207 tree callee, result;
1208 gimple call = gsi_stmt (i);
1210 if (gimple_code (call) != GIMPLE_CALL)
1211 continue;
1213 callee = gimple_call_fndecl (call);
1214 if (!callee
1215 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
1216 || DECL_FUNCTION_CODE (callee) != BUILT_IN_OBJECT_SIZE)
1217 continue;
1219 init_object_sizes ();
1220 result = fold_call_stmt (call, false);
1221 if (!result)
1223 if (gimple_call_num_args (call) == 2
1224 && POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
1226 tree ost = gimple_call_arg (call, 1);
1228 if (host_integerp (ost, 1))
1230 unsigned HOST_WIDE_INT object_size_type
1231 = tree_low_cst (ost, 1);
1233 if (object_size_type < 2)
1234 result = fold_convert (size_type_node,
1235 integer_minus_one_node);
1236 else if (object_size_type < 4)
1237 result = build_zero_cst (size_type_node);
1241 if (!result)
1242 continue;
1245 if (dump_file && (dump_flags & TDF_DETAILS))
1247 fprintf (dump_file, "Simplified\n ");
1248 print_gimple_stmt (dump_file, call, 0, dump_flags);
1251 if (!update_call_from_tree (&i, result))
1252 gcc_unreachable ();
1254 /* NOTE: In the pre-tuples code, we called update_stmt here. This is
1255 now handled by gsi_replace, called from update_call_from_tree. */
1257 if (dump_file && (dump_flags & TDF_DETAILS))
1259 fprintf (dump_file, "to\n ");
1260 print_gimple_stmt (dump_file, call, 0, dump_flags);
1261 fprintf (dump_file, "\n");
1266 fini_object_sizes ();
1267 return 0;
1270 struct gimple_opt_pass pass_object_sizes =
1273 GIMPLE_PASS,
1274 "objsz", /* name */
1275 NULL, /* gate */
1276 compute_object_sizes, /* execute */
1277 NULL, /* sub */
1278 NULL, /* next */
1279 0, /* static_pass_number */
1280 TV_NONE, /* tv_id */
1281 PROP_cfg | PROP_ssa, /* properties_required */
1282 0, /* properties_provided */
1283 0, /* properties_destroyed */
1284 0, /* todo_flags_start */
1285 TODO_verify_ssa /* todo_flags_finish */