Fix ChangeLog record for 171649:
[official-gcc.git] / gcc / tree-object-size.c
blobd35922c38ff287153a8af4eb409d8bec93ef40bb
1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
4 Contributed by Jakub Jelinek <jakub@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "diagnostic-core.h"
28 #include "tree-pretty-print.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-ssa-propagate.h"
34 struct object_size_info
36 int object_size_type;
37 bitmap visited, reexamine;
38 int pass;
39 bool changed;
40 unsigned int *depths;
41 unsigned int *stack, *tos;
44 static unsigned HOST_WIDE_INT unknown[4] = { -1, -1, 0, 0 };
46 static tree compute_object_offset (const_tree, const_tree);
47 static unsigned HOST_WIDE_INT addr_object_size (struct object_size_info *,
48 const_tree, int);
49 static unsigned HOST_WIDE_INT alloc_object_size (const_gimple, int);
50 static tree pass_through_call (const_gimple);
51 static void collect_object_sizes_for (struct object_size_info *, tree);
52 static void expr_object_size (struct object_size_info *, tree, tree);
53 static bool merge_object_sizes (struct object_size_info *, tree, tree,
54 unsigned HOST_WIDE_INT);
55 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple);
56 static bool cond_expr_object_size (struct object_size_info *, tree, gimple);
57 static unsigned int compute_object_sizes (void);
58 static void init_offset_limit (void);
59 static void check_for_plus_in_loops (struct object_size_info *, tree);
60 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
61 unsigned int);
63 /* object_sizes[0] is upper bound for number of bytes till the end of
64 the object.
65 object_sizes[1] is upper bound for number of bytes till the end of
66 the subobject (innermost array or field with address taken).
67 object_sizes[2] is lower bound for number of bytes till the end of
68 the object and object_sizes[3] lower bound for subobject. */
69 static unsigned HOST_WIDE_INT *object_sizes[4];
71 /* Bitmaps what object sizes have been computed already. */
72 static bitmap computed[4];
74 /* Maximum value of offset we consider to be addition. */
75 static unsigned HOST_WIDE_INT offset_limit;
78 /* Initialize OFFSET_LIMIT variable. */
79 static void
80 init_offset_limit (void)
82 if (host_integerp (TYPE_MAX_VALUE (sizetype), 1))
83 offset_limit = tree_low_cst (TYPE_MAX_VALUE (sizetype), 1);
84 else
85 offset_limit = -1;
86 offset_limit /= 2;
90 /* Compute offset of EXPR within VAR. Return error_mark_node
91 if unknown. */
93 static tree
94 compute_object_offset (const_tree expr, const_tree var)
96 enum tree_code code = PLUS_EXPR;
97 tree base, off, t;
99 if (expr == var)
100 return size_zero_node;
102 switch (TREE_CODE (expr))
104 case COMPONENT_REF:
105 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
106 if (base == error_mark_node)
107 return base;
109 t = TREE_OPERAND (expr, 1);
110 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
111 size_int (tree_low_cst (DECL_FIELD_BIT_OFFSET (t), 1)
112 / BITS_PER_UNIT));
113 break;
115 case REALPART_EXPR:
116 CASE_CONVERT:
117 case VIEW_CONVERT_EXPR:
118 case NON_LVALUE_EXPR:
119 return compute_object_offset (TREE_OPERAND (expr, 0), var);
121 case IMAGPART_EXPR:
122 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
123 if (base == error_mark_node)
124 return base;
126 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
127 break;
129 case ARRAY_REF:
130 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
131 if (base == error_mark_node)
132 return base;
134 t = TREE_OPERAND (expr, 1);
135 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
137 code = MINUS_EXPR;
138 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
140 t = fold_convert (sizetype, t);
141 off = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (expr)), t);
142 break;
144 case MEM_REF:
145 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
146 return double_int_to_tree (sizetype, mem_ref_offset (expr));
148 default:
149 return error_mark_node;
152 return size_binop (code, base, off);
156 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
157 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
158 If unknown, return unknown[object_size_type]. */
160 static unsigned HOST_WIDE_INT
161 addr_object_size (struct object_size_info *osi, const_tree ptr,
162 int object_size_type)
164 tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
166 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
168 pt_var = TREE_OPERAND (ptr, 0);
169 while (handled_component_p (pt_var))
170 pt_var = TREE_OPERAND (pt_var, 0);
172 if (pt_var
173 && TREE_CODE (pt_var) == MEM_REF)
175 unsigned HOST_WIDE_INT sz;
177 if (!osi || (object_size_type & 1) != 0
178 || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
180 sz = compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
181 object_size_type & ~1);
183 else
185 tree var = TREE_OPERAND (pt_var, 0);
186 if (osi->pass == 0)
187 collect_object_sizes_for (osi, var);
188 if (bitmap_bit_p (computed[object_size_type],
189 SSA_NAME_VERSION (var)))
190 sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
191 else
192 sz = unknown[object_size_type];
194 if (sz != unknown[object_size_type])
196 double_int dsz = double_int_sub (uhwi_to_double_int (sz),
197 mem_ref_offset (pt_var));
198 if (double_int_negative_p (dsz))
199 sz = 0;
200 else if (double_int_fits_in_uhwi_p (dsz))
201 sz = double_int_to_uhwi (dsz);
202 else
203 sz = unknown[object_size_type];
206 if (sz != unknown[object_size_type] && sz < offset_limit)
207 pt_var_size = size_int (sz);
209 else if (pt_var
210 && DECL_P (pt_var)
211 && host_integerp (DECL_SIZE_UNIT (pt_var), 1)
212 && (unsigned HOST_WIDE_INT)
213 tree_low_cst (DECL_SIZE_UNIT (pt_var), 1) < offset_limit)
214 pt_var_size = DECL_SIZE_UNIT (pt_var);
215 else if (pt_var
216 && TREE_CODE (pt_var) == STRING_CST
217 && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
218 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1)
219 && (unsigned HOST_WIDE_INT)
220 tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1)
221 < offset_limit)
222 pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
223 else
224 return unknown[object_size_type];
226 if (pt_var != TREE_OPERAND (ptr, 0))
228 tree var;
230 if (object_size_type & 1)
232 var = TREE_OPERAND (ptr, 0);
234 while (var != pt_var
235 && TREE_CODE (var) != BIT_FIELD_REF
236 && TREE_CODE (var) != COMPONENT_REF
237 && TREE_CODE (var) != ARRAY_REF
238 && TREE_CODE (var) != ARRAY_RANGE_REF
239 && TREE_CODE (var) != REALPART_EXPR
240 && TREE_CODE (var) != IMAGPART_EXPR)
241 var = TREE_OPERAND (var, 0);
242 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
243 var = TREE_OPERAND (var, 0);
244 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
245 || ! host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (var)), 1)
246 || (pt_var_size
247 && tree_int_cst_lt (pt_var_size,
248 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
249 var = pt_var;
250 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
252 tree v = var;
253 /* For &X->fld, compute object size only if fld isn't the last
254 field, as struct { int i; char c[1]; } is often used instead
255 of flexible array member. */
256 while (v && v != pt_var)
257 switch (TREE_CODE (v))
259 case ARRAY_REF:
260 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
261 && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
263 tree domain
264 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
265 if (domain
266 && TYPE_MAX_VALUE (domain)
267 && TREE_CODE (TYPE_MAX_VALUE (domain))
268 == INTEGER_CST
269 && tree_int_cst_lt (TREE_OPERAND (v, 1),
270 TYPE_MAX_VALUE (domain)))
272 v = NULL_TREE;
273 break;
276 v = TREE_OPERAND (v, 0);
277 break;
278 case REALPART_EXPR:
279 case IMAGPART_EXPR:
280 v = NULL_TREE;
281 break;
282 case COMPONENT_REF:
283 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
285 v = NULL_TREE;
286 break;
288 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
289 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
290 != UNION_TYPE
291 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
292 != QUAL_UNION_TYPE)
293 break;
294 else
295 v = TREE_OPERAND (v, 0);
296 if (TREE_CODE (v) == COMPONENT_REF
297 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
298 == RECORD_TYPE)
300 tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
301 for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
302 if (TREE_CODE (fld_chain) == FIELD_DECL)
303 break;
305 if (fld_chain)
307 v = NULL_TREE;
308 break;
310 v = TREE_OPERAND (v, 0);
312 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
313 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
314 != UNION_TYPE
315 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
316 != QUAL_UNION_TYPE)
317 break;
318 else
319 v = TREE_OPERAND (v, 0);
320 if (v != pt_var)
321 v = NULL_TREE;
322 else
323 v = pt_var;
324 break;
325 default:
326 v = pt_var;
327 break;
329 if (v == pt_var)
330 var = pt_var;
333 else
334 var = pt_var;
336 if (var != pt_var)
337 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
338 else if (!pt_var_size)
339 return unknown[object_size_type];
340 else
341 var_size = pt_var_size;
342 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
343 if (bytes != error_mark_node)
345 if (TREE_CODE (bytes) == INTEGER_CST
346 && tree_int_cst_lt (var_size, bytes))
347 bytes = size_zero_node;
348 else
349 bytes = size_binop (MINUS_EXPR, var_size, bytes);
351 if (var != pt_var
352 && pt_var_size
353 && TREE_CODE (pt_var) == MEM_REF
354 && bytes != error_mark_node)
356 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
357 if (bytes2 != error_mark_node)
359 if (TREE_CODE (bytes2) == INTEGER_CST
360 && tree_int_cst_lt (pt_var_size, bytes2))
361 bytes2 = size_zero_node;
362 else
363 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
364 bytes = size_binop (MIN_EXPR, bytes, bytes2);
368 else if (!pt_var_size)
369 return unknown[object_size_type];
370 else
371 bytes = pt_var_size;
373 if (host_integerp (bytes, 1))
374 return tree_low_cst (bytes, 1);
376 return unknown[object_size_type];
380 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
381 Handles various allocation calls. OBJECT_SIZE_TYPE is the second
382 argument from __builtin_object_size. If unknown, return
383 unknown[object_size_type]. */
385 static unsigned HOST_WIDE_INT
386 alloc_object_size (const_gimple call, int object_size_type)
388 tree callee, bytes = NULL_TREE;
389 tree alloc_size;
390 int arg1 = -1, arg2 = -1;
392 gcc_assert (is_gimple_call (call));
394 callee = gimple_call_fndecl (call);
395 if (!callee)
396 return unknown[object_size_type];
398 alloc_size = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (TREE_TYPE(callee)));
399 if (alloc_size && TREE_VALUE (alloc_size))
401 tree p = TREE_VALUE (alloc_size);
403 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
404 if (TREE_CHAIN (p))
405 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
408 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
409 switch (DECL_FUNCTION_CODE (callee))
411 case BUILT_IN_CALLOC:
412 arg2 = 1;
413 /* fall through */
414 case BUILT_IN_MALLOC:
415 case BUILT_IN_ALLOCA:
416 case BUILT_IN_ALLOCA_WITH_ALIGN:
417 arg1 = 0;
418 default:
419 break;
422 if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
423 || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
424 || (arg2 >= 0
425 && (arg2 >= (int)gimple_call_num_args (call)
426 || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
427 return unknown[object_size_type];
429 if (arg2 >= 0)
430 bytes = size_binop (MULT_EXPR,
431 fold_convert (sizetype, gimple_call_arg (call, arg1)),
432 fold_convert (sizetype, gimple_call_arg (call, arg2)));
433 else if (arg1 >= 0)
434 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
436 if (bytes && host_integerp (bytes, 1))
437 return tree_low_cst (bytes, 1);
439 return unknown[object_size_type];
443 /* If object size is propagated from one of function's arguments directly
444 to its return value, return that argument for GIMPLE_CALL statement CALL.
445 Otherwise return NULL. */
447 static tree
448 pass_through_call (const_gimple call)
450 tree callee = gimple_call_fndecl (call);
452 if (callee
453 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
454 switch (DECL_FUNCTION_CODE (callee))
456 case BUILT_IN_MEMCPY:
457 case BUILT_IN_MEMMOVE:
458 case BUILT_IN_MEMSET:
459 case BUILT_IN_STRCPY:
460 case BUILT_IN_STRNCPY:
461 case BUILT_IN_STRCAT:
462 case BUILT_IN_STRNCAT:
463 case BUILT_IN_MEMCPY_CHK:
464 case BUILT_IN_MEMMOVE_CHK:
465 case BUILT_IN_MEMSET_CHK:
466 case BUILT_IN_STRCPY_CHK:
467 case BUILT_IN_STRNCPY_CHK:
468 case BUILT_IN_STPNCPY_CHK:
469 case BUILT_IN_STRCAT_CHK:
470 case BUILT_IN_STRNCAT_CHK:
471 case BUILT_IN_ASSUME_ALIGNED:
472 if (gimple_call_num_args (call) >= 1)
473 return gimple_call_arg (call, 0);
474 break;
475 default:
476 break;
479 return NULL_TREE;
483 /* Compute __builtin_object_size value for PTR. OBJECT_SIZE_TYPE is the
484 second argument from __builtin_object_size. */
486 unsigned HOST_WIDE_INT
487 compute_builtin_object_size (tree ptr, int object_size_type)
489 gcc_assert (object_size_type >= 0 && object_size_type <= 3);
491 if (! offset_limit)
492 init_offset_limit ();
494 if (TREE_CODE (ptr) == ADDR_EXPR)
495 return addr_object_size (NULL, ptr, object_size_type);
497 if (TREE_CODE (ptr) == SSA_NAME
498 && POINTER_TYPE_P (TREE_TYPE (ptr))
499 && object_sizes[object_size_type] != NULL)
501 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
503 struct object_size_info osi;
504 bitmap_iterator bi;
505 unsigned int i;
507 if (dump_file)
509 fprintf (dump_file, "Computing %s %sobject size for ",
510 (object_size_type & 2) ? "minimum" : "maximum",
511 (object_size_type & 1) ? "sub" : "");
512 print_generic_expr (dump_file, ptr, dump_flags);
513 fprintf (dump_file, ":\n");
516 osi.visited = BITMAP_ALLOC (NULL);
517 osi.reexamine = BITMAP_ALLOC (NULL);
518 osi.object_size_type = object_size_type;
519 osi.depths = NULL;
520 osi.stack = NULL;
521 osi.tos = NULL;
523 /* First pass: walk UD chains, compute object sizes that
524 can be computed. osi.reexamine bitmap at the end will
525 contain what variables were found in dependency cycles
526 and therefore need to be reexamined. */
527 osi.pass = 0;
528 osi.changed = false;
529 collect_object_sizes_for (&osi, ptr);
531 /* Second pass: keep recomputing object sizes of variables
532 that need reexamination, until no object sizes are
533 increased or all object sizes are computed. */
534 if (! bitmap_empty_p (osi.reexamine))
536 bitmap reexamine = BITMAP_ALLOC (NULL);
538 /* If looking for minimum instead of maximum object size,
539 detect cases where a pointer is increased in a loop.
540 Although even without this detection pass 2 would eventually
541 terminate, it could take a long time. If a pointer is
542 increasing this way, we need to assume 0 object size.
543 E.g. p = &buf[0]; while (cond) p = p + 4; */
544 if (object_size_type & 2)
546 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
547 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
548 osi.tos = osi.stack;
549 osi.pass = 1;
550 /* collect_object_sizes_for is changing
551 osi.reexamine bitmap, so iterate over a copy. */
552 bitmap_copy (reexamine, osi.reexamine);
553 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
554 if (bitmap_bit_p (osi.reexamine, i))
555 check_for_plus_in_loops (&osi, ssa_name (i));
557 free (osi.depths);
558 osi.depths = NULL;
559 free (osi.stack);
560 osi.stack = NULL;
561 osi.tos = NULL;
566 osi.pass = 2;
567 osi.changed = false;
568 /* collect_object_sizes_for is changing
569 osi.reexamine bitmap, so iterate over a copy. */
570 bitmap_copy (reexamine, osi.reexamine);
571 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
572 if (bitmap_bit_p (osi.reexamine, i))
574 collect_object_sizes_for (&osi, ssa_name (i));
575 if (dump_file && (dump_flags & TDF_DETAILS))
577 fprintf (dump_file, "Reexamining ");
578 print_generic_expr (dump_file, ssa_name (i),
579 dump_flags);
580 fprintf (dump_file, "\n");
584 while (osi.changed);
586 BITMAP_FREE (reexamine);
588 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
589 bitmap_set_bit (computed[object_size_type], i);
591 /* Debugging dumps. */
592 if (dump_file)
594 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
595 if (object_sizes[object_size_type][i]
596 != unknown[object_size_type])
598 print_generic_expr (dump_file, ssa_name (i),
599 dump_flags);
600 fprintf (dump_file,
601 ": %s %sobject size "
602 HOST_WIDE_INT_PRINT_UNSIGNED "\n",
603 (object_size_type & 2) ? "minimum" : "maximum",
604 (object_size_type & 1) ? "sub" : "",
605 object_sizes[object_size_type][i]);
609 BITMAP_FREE (osi.reexamine);
610 BITMAP_FREE (osi.visited);
613 return object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
616 return unknown[object_size_type];
619 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
621 static void
622 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
624 int object_size_type = osi->object_size_type;
625 unsigned int varno = SSA_NAME_VERSION (ptr);
626 unsigned HOST_WIDE_INT bytes;
628 gcc_assert (object_sizes[object_size_type][varno]
629 != unknown[object_size_type]);
630 gcc_assert (osi->pass == 0);
632 if (TREE_CODE (value) == WITH_SIZE_EXPR)
633 value = TREE_OPERAND (value, 0);
635 /* Pointer variables should have been handled by merge_object_sizes. */
636 gcc_assert (TREE_CODE (value) != SSA_NAME
637 || !POINTER_TYPE_P (TREE_TYPE (value)));
639 if (TREE_CODE (value) == ADDR_EXPR)
640 bytes = addr_object_size (osi, value, object_size_type);
641 else
642 bytes = unknown[object_size_type];
644 if ((object_size_type & 2) == 0)
646 if (object_sizes[object_size_type][varno] < bytes)
647 object_sizes[object_size_type][varno] = bytes;
649 else
651 if (object_sizes[object_size_type][varno] > bytes)
652 object_sizes[object_size_type][varno] = bytes;
657 /* Compute object_sizes for PTR, defined to the result of a call. */
659 static void
660 call_object_size (struct object_size_info *osi, tree ptr, gimple call)
662 int object_size_type = osi->object_size_type;
663 unsigned int varno = SSA_NAME_VERSION (ptr);
664 unsigned HOST_WIDE_INT bytes;
666 gcc_assert (is_gimple_call (call));
668 gcc_assert (object_sizes[object_size_type][varno]
669 != unknown[object_size_type]);
670 gcc_assert (osi->pass == 0);
672 bytes = alloc_object_size (call, object_size_type);
674 if ((object_size_type & 2) == 0)
676 if (object_sizes[object_size_type][varno] < bytes)
677 object_sizes[object_size_type][varno] = bytes;
679 else
681 if (object_sizes[object_size_type][varno] > bytes)
682 object_sizes[object_size_type][varno] = bytes;
687 /* Compute object_sizes for PTR, defined to an unknown value. */
689 static void
690 unknown_object_size (struct object_size_info *osi, tree ptr)
692 int object_size_type = osi->object_size_type;
693 unsigned int varno = SSA_NAME_VERSION (ptr);
694 unsigned HOST_WIDE_INT bytes;
696 gcc_assert (object_sizes[object_size_type][varno]
697 != unknown[object_size_type]);
698 gcc_assert (osi->pass == 0);
700 bytes = unknown[object_size_type];
702 if ((object_size_type & 2) == 0)
704 if (object_sizes[object_size_type][varno] < bytes)
705 object_sizes[object_size_type][varno] = bytes;
707 else
709 if (object_sizes[object_size_type][varno] > bytes)
710 object_sizes[object_size_type][varno] = bytes;
715 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
716 the object size might need reexamination later. */
718 static bool
719 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
720 unsigned HOST_WIDE_INT offset)
722 int object_size_type = osi->object_size_type;
723 unsigned int varno = SSA_NAME_VERSION (dest);
724 unsigned HOST_WIDE_INT orig_bytes;
726 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
727 return false;
728 if (offset >= offset_limit)
730 object_sizes[object_size_type][varno] = unknown[object_size_type];
731 return false;
734 if (osi->pass == 0)
735 collect_object_sizes_for (osi, orig);
737 orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
738 if (orig_bytes != unknown[object_size_type])
739 orig_bytes = (offset > orig_bytes)
740 ? (unsigned HOST_WIDE_INT) 0 : orig_bytes - offset;
742 if ((object_size_type & 2) == 0)
744 if (object_sizes[object_size_type][varno] < orig_bytes)
746 object_sizes[object_size_type][varno] = orig_bytes;
747 osi->changed = true;
750 else
752 if (object_sizes[object_size_type][varno] > orig_bytes)
754 object_sizes[object_size_type][varno] = orig_bytes;
755 osi->changed = true;
758 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
762 /* Compute object_sizes for VAR, defined to the result of an assignment
763 with operator POINTER_PLUS_EXPR. Return true if the object size might
764 need reexamination later. */
766 static bool
767 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple stmt)
769 int object_size_type = osi->object_size_type;
770 unsigned int varno = SSA_NAME_VERSION (var);
771 unsigned HOST_WIDE_INT bytes;
772 tree op0, op1;
774 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
776 op0 = gimple_assign_rhs1 (stmt);
777 op1 = gimple_assign_rhs2 (stmt);
779 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
781 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
782 gcc_assert (TREE_CODE (rhs) == MEM_REF);
783 op0 = TREE_OPERAND (rhs, 0);
784 op1 = TREE_OPERAND (rhs, 1);
786 else
787 gcc_unreachable ();
789 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
790 return false;
792 /* Handle PTR + OFFSET here. */
793 if (TREE_CODE (op1) == INTEGER_CST
794 && (TREE_CODE (op0) == SSA_NAME
795 || TREE_CODE (op0) == ADDR_EXPR))
797 if (! host_integerp (op1, 1))
798 bytes = unknown[object_size_type];
799 else if (TREE_CODE (op0) == SSA_NAME)
800 return merge_object_sizes (osi, var, op0, tree_low_cst (op1, 1));
801 else
803 unsigned HOST_WIDE_INT off = tree_low_cst (op1, 1);
805 /* op0 will be ADDR_EXPR here. */
806 bytes = addr_object_size (osi, op0, object_size_type);
807 if (bytes == unknown[object_size_type])
809 else if (off > offset_limit)
810 bytes = unknown[object_size_type];
811 else if (off > bytes)
812 bytes = 0;
813 else
814 bytes -= off;
817 else
818 bytes = unknown[object_size_type];
820 if ((object_size_type & 2) == 0)
822 if (object_sizes[object_size_type][varno] < bytes)
823 object_sizes[object_size_type][varno] = bytes;
825 else
827 if (object_sizes[object_size_type][varno] > bytes)
828 object_sizes[object_size_type][varno] = bytes;
830 return false;
834 /* Compute object_sizes for VAR, defined at STMT, which is
835 a COND_EXPR. Return true if the object size might need reexamination
836 later. */
838 static bool
839 cond_expr_object_size (struct object_size_info *osi, tree var, gimple stmt)
841 tree then_, else_;
842 int object_size_type = osi->object_size_type;
843 unsigned int varno = SSA_NAME_VERSION (var);
844 bool reexamine = false;
846 gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
848 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
849 return false;
851 then_ = gimple_assign_rhs2 (stmt);
852 else_ = gimple_assign_rhs3 (stmt);
854 if (TREE_CODE (then_) == SSA_NAME)
855 reexamine |= merge_object_sizes (osi, var, then_, 0);
856 else
857 expr_object_size (osi, var, then_);
859 if (TREE_CODE (else_) == SSA_NAME)
860 reexamine |= merge_object_sizes (osi, var, else_, 0);
861 else
862 expr_object_size (osi, var, else_);
864 return reexamine;
867 /* Compute object sizes for VAR.
868 For ADDR_EXPR an object size is the number of remaining bytes
869 to the end of the object (where what is considered an object depends on
870 OSI->object_size_type).
871 For allocation GIMPLE_CALL like malloc or calloc object size is the size
872 of the allocation.
873 For POINTER_PLUS_EXPR where second operand is a constant integer,
874 object size is object size of the first operand minus the constant.
875 If the constant is bigger than the number of remaining bytes until the
876 end of the object, object size is 0, but if it is instead a pointer
877 subtraction, object size is unknown[object_size_type].
878 To differentiate addition from subtraction, ADDR_EXPR returns
879 unknown[object_size_type] for all objects bigger than half of the address
880 space, and constants less than half of the address space are considered
881 addition, while bigger constants subtraction.
882 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
883 object size is object size of that argument.
884 Otherwise, object size is the maximum of object sizes of variables
885 that it might be set to. */
887 static void
888 collect_object_sizes_for (struct object_size_info *osi, tree var)
890 int object_size_type = osi->object_size_type;
891 unsigned int varno = SSA_NAME_VERSION (var);
892 gimple stmt;
893 bool reexamine;
895 if (bitmap_bit_p (computed[object_size_type], varno))
896 return;
898 if (osi->pass == 0)
900 if (bitmap_set_bit (osi->visited, varno))
902 object_sizes[object_size_type][varno]
903 = (object_size_type & 2) ? -1 : 0;
905 else
907 /* Found a dependency loop. Mark the variable for later
908 re-examination. */
909 bitmap_set_bit (osi->reexamine, varno);
910 if (dump_file && (dump_flags & TDF_DETAILS))
912 fprintf (dump_file, "Found a dependency loop at ");
913 print_generic_expr (dump_file, var, dump_flags);
914 fprintf (dump_file, "\n");
916 return;
920 if (dump_file && (dump_flags & TDF_DETAILS))
922 fprintf (dump_file, "Visiting use-def links for ");
923 print_generic_expr (dump_file, var, dump_flags);
924 fprintf (dump_file, "\n");
927 stmt = SSA_NAME_DEF_STMT (var);
928 reexamine = false;
930 switch (gimple_code (stmt))
932 case GIMPLE_ASSIGN:
934 tree rhs = gimple_assign_rhs1 (stmt);
935 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
936 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
937 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
938 reexamine = plus_stmt_object_size (osi, var, stmt);
939 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
940 reexamine = cond_expr_object_size (osi, var, stmt);
941 else if (gimple_assign_single_p (stmt)
942 || gimple_assign_unary_nop_p (stmt))
944 if (TREE_CODE (rhs) == SSA_NAME
945 && POINTER_TYPE_P (TREE_TYPE (rhs)))
946 reexamine = merge_object_sizes (osi, var, rhs, 0);
947 else
948 expr_object_size (osi, var, rhs);
950 else
951 unknown_object_size (osi, var);
952 break;
955 case GIMPLE_CALL:
957 tree arg = pass_through_call (stmt);
958 if (arg)
960 if (TREE_CODE (arg) == SSA_NAME
961 && POINTER_TYPE_P (TREE_TYPE (arg)))
962 reexamine = merge_object_sizes (osi, var, arg, 0);
963 else
964 expr_object_size (osi, var, arg);
966 else
967 call_object_size (osi, var, stmt);
968 break;
971 case GIMPLE_ASM:
972 /* Pointers defined by __asm__ statements can point anywhere. */
973 object_sizes[object_size_type][varno] = unknown[object_size_type];
974 break;
976 case GIMPLE_NOP:
978 tree decl = SSA_NAME_VAR (var);
980 if (TREE_CODE (decl) != PARM_DECL && DECL_INITIAL (decl))
981 expr_object_size (osi, var, DECL_INITIAL (decl));
982 else
983 expr_object_size (osi, var, decl);
985 break;
987 case GIMPLE_PHI:
989 unsigned i;
991 for (i = 0; i < gimple_phi_num_args (stmt); i++)
993 tree rhs = gimple_phi_arg (stmt, i)->def;
995 if (object_sizes[object_size_type][varno]
996 == unknown[object_size_type])
997 break;
999 if (TREE_CODE (rhs) == SSA_NAME)
1000 reexamine |= merge_object_sizes (osi, var, rhs, 0);
1001 else if (osi->pass == 0)
1002 expr_object_size (osi, var, rhs);
1004 break;
1007 default:
1008 gcc_unreachable ();
1011 if (! reexamine
1012 || object_sizes[object_size_type][varno] == unknown[object_size_type])
1014 bitmap_set_bit (computed[object_size_type], varno);
1015 bitmap_clear_bit (osi->reexamine, varno);
1017 else
1019 bitmap_set_bit (osi->reexamine, varno);
1020 if (dump_file && (dump_flags & TDF_DETAILS))
1022 fprintf (dump_file, "Need to reexamine ");
1023 print_generic_expr (dump_file, var, dump_flags);
1024 fprintf (dump_file, "\n");
1030 /* Helper function for check_for_plus_in_loops. Called recursively
1031 to detect loops. */
1033 static void
1034 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1035 unsigned int depth)
1037 gimple stmt = SSA_NAME_DEF_STMT (var);
1038 unsigned int varno = SSA_NAME_VERSION (var);
1040 if (osi->depths[varno])
1042 if (osi->depths[varno] != depth)
1044 unsigned int *sp;
1046 /* Found a loop involving pointer addition. */
1047 for (sp = osi->tos; sp > osi->stack; )
1049 --sp;
1050 bitmap_clear_bit (osi->reexamine, *sp);
1051 bitmap_set_bit (computed[osi->object_size_type], *sp);
1052 object_sizes[osi->object_size_type][*sp] = 0;
1053 if (*sp == varno)
1054 break;
1057 return;
1059 else if (! bitmap_bit_p (osi->reexamine, varno))
1060 return;
1062 osi->depths[varno] = depth;
1063 *osi->tos++ = varno;
1065 switch (gimple_code (stmt))
1068 case GIMPLE_ASSIGN:
1070 if ((gimple_assign_single_p (stmt)
1071 || gimple_assign_unary_nop_p (stmt))
1072 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1074 tree rhs = gimple_assign_rhs1 (stmt);
1076 check_for_plus_in_loops_1 (osi, rhs, depth);
1078 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1080 tree basevar = gimple_assign_rhs1 (stmt);
1081 tree cst = gimple_assign_rhs2 (stmt);
1083 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1085 check_for_plus_in_loops_1 (osi, basevar,
1086 depth + !integer_zerop (cst));
1088 else
1089 gcc_unreachable ();
1090 break;
1093 case GIMPLE_CALL:
1095 tree arg = pass_through_call (stmt);
1096 if (arg)
1098 if (TREE_CODE (arg) == SSA_NAME)
1099 check_for_plus_in_loops_1 (osi, arg, depth);
1100 else
1101 gcc_unreachable ();
1103 break;
1106 case GIMPLE_PHI:
1108 unsigned i;
1110 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1112 tree rhs = gimple_phi_arg (stmt, i)->def;
1114 if (TREE_CODE (rhs) == SSA_NAME)
1115 check_for_plus_in_loops_1 (osi, rhs, depth);
1117 break;
1120 default:
1121 gcc_unreachable ();
1124 osi->depths[varno] = 0;
1125 osi->tos--;
1129 /* Check if some pointer we are computing object size of is being increased
1130 within a loop. If yes, assume all the SSA variables participating in
1131 that loop have minimum object sizes 0. */
1133 static void
1134 check_for_plus_in_loops (struct object_size_info *osi, tree var)
1136 gimple stmt = SSA_NAME_DEF_STMT (var);
1138 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1139 and looked for a POINTER_PLUS_EXPR in the pass-through
1140 argument, if any. In GIMPLE, however, such an expression
1141 is not a valid call operand. */
1143 if (is_gimple_assign (stmt)
1144 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1146 tree basevar = gimple_assign_rhs1 (stmt);
1147 tree cst = gimple_assign_rhs2 (stmt);
1149 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1151 if (integer_zerop (cst))
1152 return;
1154 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1155 *osi->tos++ = SSA_NAME_VERSION (basevar);
1156 check_for_plus_in_loops_1 (osi, var, 2);
1157 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1158 osi->tos--;
1163 /* Initialize data structures for the object size computation. */
1165 void
1166 init_object_sizes (void)
1168 int object_size_type;
1170 if (object_sizes[0])
1171 return;
1173 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1175 object_sizes[object_size_type] = XNEWVEC (unsigned HOST_WIDE_INT, num_ssa_names);
1176 computed[object_size_type] = BITMAP_ALLOC (NULL);
1179 init_offset_limit ();
1183 /* Destroy data structures after the object size computation. */
1185 void
1186 fini_object_sizes (void)
1188 int object_size_type;
1190 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1192 free (object_sizes[object_size_type]);
1193 BITMAP_FREE (computed[object_size_type]);
1194 object_sizes[object_size_type] = NULL;
1199 /* Simple pass to optimize all __builtin_object_size () builtins. */
1201 static unsigned int
1202 compute_object_sizes (void)
1204 basic_block bb;
1205 FOR_EACH_BB (bb)
1207 gimple_stmt_iterator i;
1208 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1210 tree callee, result;
1211 gimple call = gsi_stmt (i);
1213 if (gimple_code (call) != GIMPLE_CALL)
1214 continue;
1216 callee = gimple_call_fndecl (call);
1217 if (!callee
1218 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
1219 || DECL_FUNCTION_CODE (callee) != BUILT_IN_OBJECT_SIZE)
1220 continue;
1222 init_object_sizes ();
1223 result = fold_call_stmt (call, false);
1224 if (!result)
1226 if (gimple_call_num_args (call) == 2
1227 && POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
1229 tree ost = gimple_call_arg (call, 1);
1231 if (host_integerp (ost, 1))
1233 unsigned HOST_WIDE_INT object_size_type
1234 = tree_low_cst (ost, 1);
1236 if (object_size_type < 2)
1237 result = fold_convert (size_type_node,
1238 integer_minus_one_node);
1239 else if (object_size_type < 4)
1240 result = build_zero_cst (size_type_node);
1244 if (!result)
1245 continue;
1248 if (dump_file && (dump_flags & TDF_DETAILS))
1250 fprintf (dump_file, "Simplified\n ");
1251 print_gimple_stmt (dump_file, call, 0, dump_flags);
1254 if (!update_call_from_tree (&i, result))
1255 gcc_unreachable ();
1257 if (dump_file && (dump_flags & TDF_DETAILS))
1259 fprintf (dump_file, "to\n ");
1260 print_gimple_stmt (dump_file, gsi_stmt (i), 0, dump_flags);
1261 fprintf (dump_file, "\n");
1266 fini_object_sizes ();
1267 return 0;
1270 struct gimple_opt_pass pass_object_sizes =
1273 GIMPLE_PASS,
1274 "objsz", /* name */
1275 NULL, /* gate */
1276 compute_object_sizes, /* execute */
1277 NULL, /* sub */
1278 NULL, /* next */
1279 0, /* static_pass_number */
1280 TV_NONE, /* tv_id */
1281 PROP_cfg | PROP_ssa, /* properties_required */
1282 0, /* properties_provided */
1283 0, /* properties_destroyed */
1284 0, /* todo_flags_start */
1285 TODO_verify_ssa /* todo_flags_finish */