* gcc.c-torture/compile/pr46534.c: Skip if pdp11.
[official-gcc.git] / gcc / tree-object-size.c
blob23abcfe12c1b6a109e586da574627ceb59cfd1c2
1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Jakub Jelinek <jakub@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "diagnostic-core.h"
28 #include "tree-pretty-print.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-ssa-propagate.h"
34 struct object_size_info
36 int object_size_type;
37 bitmap visited, reexamine;
38 int pass;
39 bool changed;
40 unsigned int *depths;
41 unsigned int *stack, *tos;
44 static unsigned HOST_WIDE_INT unknown[4] = { -1, -1, 0, 0 };
46 static tree compute_object_offset (const_tree, const_tree);
47 static unsigned HOST_WIDE_INT addr_object_size (struct object_size_info *,
48 const_tree, int);
49 static unsigned HOST_WIDE_INT alloc_object_size (const_gimple, int);
50 static tree pass_through_call (const_gimple);
51 static void collect_object_sizes_for (struct object_size_info *, tree);
52 static void expr_object_size (struct object_size_info *, tree, tree);
53 static bool merge_object_sizes (struct object_size_info *, tree, tree,
54 unsigned HOST_WIDE_INT);
55 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple);
56 static bool cond_expr_object_size (struct object_size_info *, tree, tree);
57 static unsigned int compute_object_sizes (void);
58 static void init_offset_limit (void);
59 static void check_for_plus_in_loops (struct object_size_info *, tree);
60 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
61 unsigned int);
63 /* object_sizes[0] is upper bound for number of bytes till the end of
64 the object.
65 object_sizes[1] is upper bound for number of bytes till the end of
66 the subobject (innermost array or field with address taken).
67 object_sizes[2] is lower bound for number of bytes till the end of
68 the object and object_sizes[3] lower bound for subobject. */
69 static unsigned HOST_WIDE_INT *object_sizes[4];
71 /* Bitmaps what object sizes have been computed already. */
72 static bitmap computed[4];
74 /* Maximum value of offset we consider to be addition. */
75 static unsigned HOST_WIDE_INT offset_limit;
78 /* Initialize OFFSET_LIMIT variable. */
79 static void
80 init_offset_limit (void)
82 if (host_integerp (TYPE_MAX_VALUE (sizetype), 1))
83 offset_limit = tree_low_cst (TYPE_MAX_VALUE (sizetype), 1);
84 else
85 offset_limit = -1;
86 offset_limit /= 2;
90 /* Compute offset of EXPR within VAR. Return error_mark_node
91 if unknown. */
93 static tree
94 compute_object_offset (const_tree expr, const_tree var)
96 enum tree_code code = PLUS_EXPR;
97 tree base, off, t;
99 if (expr == var)
100 return size_zero_node;
102 switch (TREE_CODE (expr))
104 case COMPONENT_REF:
105 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
106 if (base == error_mark_node)
107 return base;
109 t = TREE_OPERAND (expr, 1);
110 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
111 size_int (tree_low_cst (DECL_FIELD_BIT_OFFSET (t), 1)
112 / BITS_PER_UNIT));
113 break;
115 case REALPART_EXPR:
116 CASE_CONVERT:
117 case VIEW_CONVERT_EXPR:
118 case NON_LVALUE_EXPR:
119 return compute_object_offset (TREE_OPERAND (expr, 0), var);
121 case IMAGPART_EXPR:
122 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
123 if (base == error_mark_node)
124 return base;
126 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
127 break;
129 case ARRAY_REF:
130 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
131 if (base == error_mark_node)
132 return base;
134 t = TREE_OPERAND (expr, 1);
135 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
137 code = MINUS_EXPR;
138 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
140 t = fold_convert (sizetype, t);
141 off = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (expr)), t);
142 break;
144 case MEM_REF:
145 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
146 return TREE_OPERAND (expr, 1);
148 default:
149 return error_mark_node;
152 return size_binop (code, base, off);
156 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
157 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
158 If unknown, return unknown[object_size_type]. */
160 static unsigned HOST_WIDE_INT
161 addr_object_size (struct object_size_info *osi, const_tree ptr,
162 int object_size_type)
164 tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
166 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
168 pt_var = TREE_OPERAND (ptr, 0);
169 if (REFERENCE_CLASS_P (pt_var))
170 pt_var = get_base_address (pt_var);
172 if (pt_var
173 && TREE_CODE (pt_var) == MEM_REF
174 && TREE_CODE (TREE_OPERAND (pt_var, 0)) == SSA_NAME
175 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (pt_var, 0))))
177 unsigned HOST_WIDE_INT sz;
179 if (!osi || (object_size_type & 1) != 0)
181 sz = compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
182 object_size_type & ~1);
183 if (host_integerp (TREE_OPERAND (pt_var, 1), 0))
184 sz -= TREE_INT_CST_LOW (TREE_OPERAND (pt_var, 1));
185 else
186 sz = offset_limit;
188 else
190 tree var = TREE_OPERAND (pt_var, 0);
191 if (osi->pass == 0)
192 collect_object_sizes_for (osi, var);
193 if (bitmap_bit_p (computed[object_size_type],
194 SSA_NAME_VERSION (var)))
195 sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
196 else
197 sz = unknown[object_size_type];
198 if (host_integerp (TREE_OPERAND (pt_var, 1), 0))
199 sz -= TREE_INT_CST_LOW (TREE_OPERAND (pt_var, 1));
200 else
201 sz = offset_limit;
204 if (sz != unknown[object_size_type] && sz < offset_limit)
205 pt_var_size = size_int (sz);
207 else if (pt_var
208 && (SSA_VAR_P (pt_var) || TREE_CODE (pt_var) == STRING_CST)
209 && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
210 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1)
211 && (unsigned HOST_WIDE_INT)
212 tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1)
213 < offset_limit)
214 pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
215 else
216 return unknown[object_size_type];
218 if (pt_var != TREE_OPERAND (ptr, 0))
220 tree var;
222 if (object_size_type & 1)
224 var = TREE_OPERAND (ptr, 0);
226 while (var != pt_var
227 && TREE_CODE (var) != BIT_FIELD_REF
228 && TREE_CODE (var) != COMPONENT_REF
229 && TREE_CODE (var) != ARRAY_REF
230 && TREE_CODE (var) != ARRAY_RANGE_REF
231 && TREE_CODE (var) != REALPART_EXPR
232 && TREE_CODE (var) != IMAGPART_EXPR)
233 var = TREE_OPERAND (var, 0);
234 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
235 var = TREE_OPERAND (var, 0);
236 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
237 || ! host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (var)), 1)
238 || (pt_var_size
239 && tree_int_cst_lt (pt_var_size,
240 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
241 var = pt_var;
242 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
244 tree v = var;
245 /* For &X->fld, compute object size only if fld isn't the last
246 field, as struct { int i; char c[1]; } is often used instead
247 of flexible array member. */
248 while (v && v != pt_var)
249 switch (TREE_CODE (v))
251 case ARRAY_REF:
252 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
253 && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
255 tree domain
256 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
257 if (domain
258 && TYPE_MAX_VALUE (domain)
259 && TREE_CODE (TYPE_MAX_VALUE (domain))
260 == INTEGER_CST
261 && tree_int_cst_lt (TREE_OPERAND (v, 1),
262 TYPE_MAX_VALUE (domain)))
264 v = NULL_TREE;
265 break;
268 v = TREE_OPERAND (v, 0);
269 break;
270 case REALPART_EXPR:
271 case IMAGPART_EXPR:
272 v = NULL_TREE;
273 break;
274 case COMPONENT_REF:
275 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
277 v = NULL_TREE;
278 break;
280 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
281 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
282 != UNION_TYPE
283 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
284 != QUAL_UNION_TYPE)
285 break;
286 else
287 v = TREE_OPERAND (v, 0);
288 if (TREE_CODE (v) == COMPONENT_REF
289 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
290 == RECORD_TYPE)
292 tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
293 for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
294 if (TREE_CODE (fld_chain) == FIELD_DECL)
295 break;
297 if (fld_chain)
299 v = NULL_TREE;
300 break;
302 v = TREE_OPERAND (v, 0);
304 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
305 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
306 != UNION_TYPE
307 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
308 != QUAL_UNION_TYPE)
309 break;
310 else
311 v = TREE_OPERAND (v, 0);
312 if (v != pt_var)
313 v = NULL_TREE;
314 else
315 v = pt_var;
316 break;
317 default:
318 v = pt_var;
319 break;
321 if (v == pt_var)
322 var = pt_var;
325 else
326 var = pt_var;
328 if (var != pt_var)
329 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
330 else if (!pt_var_size)
331 return unknown[object_size_type];
332 else
333 var_size = pt_var_size;
334 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
335 if (bytes != error_mark_node)
337 if (TREE_CODE (bytes) == INTEGER_CST
338 && tree_int_cst_lt (var_size, bytes))
339 bytes = size_zero_node;
340 else
341 bytes = size_binop (MINUS_EXPR, var_size, bytes);
343 if (var != pt_var
344 && pt_var_size
345 && TREE_CODE (pt_var) == MEM_REF
346 && bytes != error_mark_node)
348 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
349 if (bytes2 != error_mark_node)
351 bytes2 = size_binop (PLUS_EXPR, bytes2,
352 TREE_OPERAND (pt_var, 1));
353 if (TREE_CODE (bytes2) == INTEGER_CST
354 && tree_int_cst_lt (pt_var_size, bytes2))
355 bytes2 = size_zero_node;
356 else
357 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
358 bytes = size_binop (MIN_EXPR, bytes, bytes2);
362 else if (!pt_var_size)
363 return unknown[object_size_type];
364 else
365 bytes = pt_var_size;
367 if (host_integerp (bytes, 1))
368 return tree_low_cst (bytes, 1);
370 return unknown[object_size_type];
374 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
375 Handles various allocation calls. OBJECT_SIZE_TYPE is the second
376 argument from __builtin_object_size. If unknown, return
377 unknown[object_size_type]. */
379 static unsigned HOST_WIDE_INT
380 alloc_object_size (const_gimple call, int object_size_type)
382 tree callee, bytes = NULL_TREE;
383 tree alloc_size;
384 int arg1 = -1, arg2 = -1;
386 gcc_assert (is_gimple_call (call));
388 callee = gimple_call_fndecl (call);
389 if (!callee)
390 return unknown[object_size_type];
392 alloc_size = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (TREE_TYPE(callee)));
393 if (alloc_size && TREE_VALUE (alloc_size))
395 tree p = TREE_VALUE (alloc_size);
397 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
398 if (TREE_CHAIN (p))
399 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
402 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
403 switch (DECL_FUNCTION_CODE (callee))
405 case BUILT_IN_CALLOC:
406 arg2 = 1;
407 /* fall through */
408 case BUILT_IN_MALLOC:
409 case BUILT_IN_ALLOCA:
410 arg1 = 0;
411 default:
412 break;
415 if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
416 || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
417 || (arg2 >= 0
418 && (arg2 >= (int)gimple_call_num_args (call)
419 || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
420 return unknown[object_size_type];
422 if (arg2 >= 0)
423 bytes = size_binop (MULT_EXPR,
424 fold_convert (sizetype, gimple_call_arg (call, arg1)),
425 fold_convert (sizetype, gimple_call_arg (call, arg2)));
426 else if (arg1 >= 0)
427 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
429 if (bytes && host_integerp (bytes, 1))
430 return tree_low_cst (bytes, 1);
432 return unknown[object_size_type];
436 /* If object size is propagated from one of function's arguments directly
437 to its return value, return that argument for GIMPLE_CALL statement CALL.
438 Otherwise return NULL. */
440 static tree
441 pass_through_call (const_gimple call)
443 tree callee = gimple_call_fndecl (call);
445 if (callee
446 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
447 switch (DECL_FUNCTION_CODE (callee))
449 case BUILT_IN_MEMCPY:
450 case BUILT_IN_MEMMOVE:
451 case BUILT_IN_MEMSET:
452 case BUILT_IN_STRCPY:
453 case BUILT_IN_STRNCPY:
454 case BUILT_IN_STRCAT:
455 case BUILT_IN_STRNCAT:
456 case BUILT_IN_MEMCPY_CHK:
457 case BUILT_IN_MEMMOVE_CHK:
458 case BUILT_IN_MEMSET_CHK:
459 case BUILT_IN_STRCPY_CHK:
460 case BUILT_IN_STRNCPY_CHK:
461 case BUILT_IN_STRCAT_CHK:
462 case BUILT_IN_STRNCAT_CHK:
463 if (gimple_call_num_args (call) >= 1)
464 return gimple_call_arg (call, 0);
465 break;
466 default:
467 break;
470 return NULL_TREE;
474 /* Compute __builtin_object_size value for PTR. OBJECT_SIZE_TYPE is the
475 second argument from __builtin_object_size. */
477 unsigned HOST_WIDE_INT
478 compute_builtin_object_size (tree ptr, int object_size_type)
480 gcc_assert (object_size_type >= 0 && object_size_type <= 3);
482 if (! offset_limit)
483 init_offset_limit ();
485 if (TREE_CODE (ptr) == ADDR_EXPR)
486 return addr_object_size (NULL, ptr, object_size_type);
488 if (TREE_CODE (ptr) == SSA_NAME
489 && POINTER_TYPE_P (TREE_TYPE (ptr))
490 && object_sizes[object_size_type] != NULL)
492 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
494 struct object_size_info osi;
495 bitmap_iterator bi;
496 unsigned int i;
498 if (dump_file)
500 fprintf (dump_file, "Computing %s %sobject size for ",
501 (object_size_type & 2) ? "minimum" : "maximum",
502 (object_size_type & 1) ? "sub" : "");
503 print_generic_expr (dump_file, ptr, dump_flags);
504 fprintf (dump_file, ":\n");
507 osi.visited = BITMAP_ALLOC (NULL);
508 osi.reexamine = BITMAP_ALLOC (NULL);
509 osi.object_size_type = object_size_type;
510 osi.depths = NULL;
511 osi.stack = NULL;
512 osi.tos = NULL;
514 /* First pass: walk UD chains, compute object sizes that
515 can be computed. osi.reexamine bitmap at the end will
516 contain what variables were found in dependency cycles
517 and therefore need to be reexamined. */
518 osi.pass = 0;
519 osi.changed = false;
520 collect_object_sizes_for (&osi, ptr);
522 /* Second pass: keep recomputing object sizes of variables
523 that need reexamination, until no object sizes are
524 increased or all object sizes are computed. */
525 if (! bitmap_empty_p (osi.reexamine))
527 bitmap reexamine = BITMAP_ALLOC (NULL);
529 /* If looking for minimum instead of maximum object size,
530 detect cases where a pointer is increased in a loop.
531 Although even without this detection pass 2 would eventually
532 terminate, it could take a long time. If a pointer is
533 increasing this way, we need to assume 0 object size.
534 E.g. p = &buf[0]; while (cond) p = p + 4; */
535 if (object_size_type & 2)
537 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
538 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
539 osi.tos = osi.stack;
540 osi.pass = 1;
541 /* collect_object_sizes_for is changing
542 osi.reexamine bitmap, so iterate over a copy. */
543 bitmap_copy (reexamine, osi.reexamine);
544 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
545 if (bitmap_bit_p (osi.reexamine, i))
546 check_for_plus_in_loops (&osi, ssa_name (i));
548 free (osi.depths);
549 osi.depths = NULL;
550 free (osi.stack);
551 osi.stack = NULL;
552 osi.tos = NULL;
557 osi.pass = 2;
558 osi.changed = false;
559 /* collect_object_sizes_for is changing
560 osi.reexamine bitmap, so iterate over a copy. */
561 bitmap_copy (reexamine, osi.reexamine);
562 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
563 if (bitmap_bit_p (osi.reexamine, i))
565 collect_object_sizes_for (&osi, ssa_name (i));
566 if (dump_file && (dump_flags & TDF_DETAILS))
568 fprintf (dump_file, "Reexamining ");
569 print_generic_expr (dump_file, ssa_name (i),
570 dump_flags);
571 fprintf (dump_file, "\n");
575 while (osi.changed);
577 BITMAP_FREE (reexamine);
579 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
580 bitmap_set_bit (computed[object_size_type], i);
582 /* Debugging dumps. */
583 if (dump_file)
585 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
586 if (object_sizes[object_size_type][i]
587 != unknown[object_size_type])
589 print_generic_expr (dump_file, ssa_name (i),
590 dump_flags);
591 fprintf (dump_file,
592 ": %s %sobject size "
593 HOST_WIDE_INT_PRINT_UNSIGNED "\n",
594 (object_size_type & 2) ? "minimum" : "maximum",
595 (object_size_type & 1) ? "sub" : "",
596 object_sizes[object_size_type][i]);
600 BITMAP_FREE (osi.reexamine);
601 BITMAP_FREE (osi.visited);
604 return object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
607 return unknown[object_size_type];
610 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
612 static void
613 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
615 int object_size_type = osi->object_size_type;
616 unsigned int varno = SSA_NAME_VERSION (ptr);
617 unsigned HOST_WIDE_INT bytes;
619 gcc_assert (object_sizes[object_size_type][varno]
620 != unknown[object_size_type]);
621 gcc_assert (osi->pass == 0);
623 if (TREE_CODE (value) == WITH_SIZE_EXPR)
624 value = TREE_OPERAND (value, 0);
626 /* Pointer variables should have been handled by merge_object_sizes. */
627 gcc_assert (TREE_CODE (value) != SSA_NAME
628 || !POINTER_TYPE_P (TREE_TYPE (value)));
630 if (TREE_CODE (value) == ADDR_EXPR)
631 bytes = addr_object_size (osi, value, object_size_type);
632 else
633 bytes = unknown[object_size_type];
635 if ((object_size_type & 2) == 0)
637 if (object_sizes[object_size_type][varno] < bytes)
638 object_sizes[object_size_type][varno] = bytes;
640 else
642 if (object_sizes[object_size_type][varno] > bytes)
643 object_sizes[object_size_type][varno] = bytes;
648 /* Compute object_sizes for PTR, defined to the result of a call. */
650 static void
651 call_object_size (struct object_size_info *osi, tree ptr, gimple call)
653 int object_size_type = osi->object_size_type;
654 unsigned int varno = SSA_NAME_VERSION (ptr);
655 unsigned HOST_WIDE_INT bytes;
657 gcc_assert (is_gimple_call (call));
659 gcc_assert (object_sizes[object_size_type][varno]
660 != unknown[object_size_type]);
661 gcc_assert (osi->pass == 0);
663 bytes = alloc_object_size (call, object_size_type);
665 if ((object_size_type & 2) == 0)
667 if (object_sizes[object_size_type][varno] < bytes)
668 object_sizes[object_size_type][varno] = bytes;
670 else
672 if (object_sizes[object_size_type][varno] > bytes)
673 object_sizes[object_size_type][varno] = bytes;
678 /* Compute object_sizes for PTR, defined to an unknown value. */
680 static void
681 unknown_object_size (struct object_size_info *osi, tree ptr)
683 int object_size_type = osi->object_size_type;
684 unsigned int varno = SSA_NAME_VERSION (ptr);
685 unsigned HOST_WIDE_INT bytes;
687 gcc_assert (object_sizes[object_size_type][varno]
688 != unknown[object_size_type]);
689 gcc_assert (osi->pass == 0);
691 bytes = unknown[object_size_type];
693 if ((object_size_type & 2) == 0)
695 if (object_sizes[object_size_type][varno] < bytes)
696 object_sizes[object_size_type][varno] = bytes;
698 else
700 if (object_sizes[object_size_type][varno] > bytes)
701 object_sizes[object_size_type][varno] = bytes;
706 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
707 the object size might need reexamination later. */
709 static bool
710 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
711 unsigned HOST_WIDE_INT offset)
713 int object_size_type = osi->object_size_type;
714 unsigned int varno = SSA_NAME_VERSION (dest);
715 unsigned HOST_WIDE_INT orig_bytes;
717 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
718 return false;
719 if (offset >= offset_limit)
721 object_sizes[object_size_type][varno] = unknown[object_size_type];
722 return false;
725 if (osi->pass == 0)
726 collect_object_sizes_for (osi, orig);
728 orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
729 if (orig_bytes != unknown[object_size_type])
730 orig_bytes = (offset > orig_bytes)
731 ? (unsigned HOST_WIDE_INT) 0 : orig_bytes - offset;
733 if ((object_size_type & 2) == 0)
735 if (object_sizes[object_size_type][varno] < orig_bytes)
737 object_sizes[object_size_type][varno] = orig_bytes;
738 osi->changed = true;
741 else
743 if (object_sizes[object_size_type][varno] > orig_bytes)
745 object_sizes[object_size_type][varno] = orig_bytes;
746 osi->changed = true;
749 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
753 /* Compute object_sizes for VAR, defined to the result of an assignment
754 with operator POINTER_PLUS_EXPR. Return true if the object size might
755 need reexamination later. */
757 static bool
758 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple stmt)
760 int object_size_type = osi->object_size_type;
761 unsigned int varno = SSA_NAME_VERSION (var);
762 unsigned HOST_WIDE_INT bytes;
763 tree op0, op1;
765 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
767 op0 = gimple_assign_rhs1 (stmt);
768 op1 = gimple_assign_rhs2 (stmt);
770 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
772 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
773 gcc_assert (TREE_CODE (rhs) == MEM_REF);
774 op0 = TREE_OPERAND (rhs, 0);
775 op1 = TREE_OPERAND (rhs, 1);
777 else
778 gcc_unreachable ();
780 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
781 return false;
783 /* Handle PTR + OFFSET here. */
784 if (TREE_CODE (op1) == INTEGER_CST
785 && (TREE_CODE (op0) == SSA_NAME
786 || TREE_CODE (op0) == ADDR_EXPR))
788 if (! host_integerp (op1, 1))
789 bytes = unknown[object_size_type];
790 else if (TREE_CODE (op0) == SSA_NAME)
791 return merge_object_sizes (osi, var, op0, tree_low_cst (op1, 1));
792 else
794 unsigned HOST_WIDE_INT off = tree_low_cst (op1, 1);
796 /* op0 will be ADDR_EXPR here. */
797 bytes = addr_object_size (osi, op0, object_size_type);
798 if (bytes == unknown[object_size_type])
800 else if (off > offset_limit)
801 bytes = unknown[object_size_type];
802 else if (off > bytes)
803 bytes = 0;
804 else
805 bytes -= off;
808 else
809 bytes = unknown[object_size_type];
811 if ((object_size_type & 2) == 0)
813 if (object_sizes[object_size_type][varno] < bytes)
814 object_sizes[object_size_type][varno] = bytes;
816 else
818 if (object_sizes[object_size_type][varno] > bytes)
819 object_sizes[object_size_type][varno] = bytes;
821 return false;
825 /* Compute object_sizes for VAR, defined to VALUE, which is
826 a COND_EXPR. Return true if the object size might need reexamination
827 later. */
829 static bool
830 cond_expr_object_size (struct object_size_info *osi, tree var, tree value)
832 tree then_, else_;
833 int object_size_type = osi->object_size_type;
834 unsigned int varno = SSA_NAME_VERSION (var);
835 bool reexamine = false;
837 gcc_assert (TREE_CODE (value) == COND_EXPR);
839 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
840 return false;
842 then_ = COND_EXPR_THEN (value);
843 else_ = COND_EXPR_ELSE (value);
845 if (TREE_CODE (then_) == SSA_NAME)
846 reexamine |= merge_object_sizes (osi, var, then_, 0);
847 else
848 expr_object_size (osi, var, then_);
850 if (TREE_CODE (else_) == SSA_NAME)
851 reexamine |= merge_object_sizes (osi, var, else_, 0);
852 else
853 expr_object_size (osi, var, else_);
855 return reexamine;
858 /* Compute object sizes for VAR.
859 For ADDR_EXPR an object size is the number of remaining bytes
860 to the end of the object (where what is considered an object depends on
861 OSI->object_size_type).
862 For allocation GIMPLE_CALL like malloc or calloc object size is the size
863 of the allocation.
864 For POINTER_PLUS_EXPR where second operand is a constant integer,
865 object size is object size of the first operand minus the constant.
866 If the constant is bigger than the number of remaining bytes until the
867 end of the object, object size is 0, but if it is instead a pointer
868 subtraction, object size is unknown[object_size_type].
869 To differentiate addition from subtraction, ADDR_EXPR returns
870 unknown[object_size_type] for all objects bigger than half of the address
871 space, and constants less than half of the address space are considered
872 addition, while bigger constants subtraction.
873 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
874 object size is object size of that argument.
875 Otherwise, object size is the maximum of object sizes of variables
876 that it might be set to. */
878 static void
879 collect_object_sizes_for (struct object_size_info *osi, tree var)
881 int object_size_type = osi->object_size_type;
882 unsigned int varno = SSA_NAME_VERSION (var);
883 gimple stmt;
884 bool reexamine;
886 if (bitmap_bit_p (computed[object_size_type], varno))
887 return;
889 if (osi->pass == 0)
891 if (bitmap_set_bit (osi->visited, varno))
893 object_sizes[object_size_type][varno]
894 = (object_size_type & 2) ? -1 : 0;
896 else
898 /* Found a dependency loop. Mark the variable for later
899 re-examination. */
900 bitmap_set_bit (osi->reexamine, varno);
901 if (dump_file && (dump_flags & TDF_DETAILS))
903 fprintf (dump_file, "Found a dependency loop at ");
904 print_generic_expr (dump_file, var, dump_flags);
905 fprintf (dump_file, "\n");
907 return;
911 if (dump_file && (dump_flags & TDF_DETAILS))
913 fprintf (dump_file, "Visiting use-def links for ");
914 print_generic_expr (dump_file, var, dump_flags);
915 fprintf (dump_file, "\n");
918 stmt = SSA_NAME_DEF_STMT (var);
919 reexamine = false;
921 switch (gimple_code (stmt))
923 case GIMPLE_ASSIGN:
925 tree rhs = gimple_assign_rhs1 (stmt);
926 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
927 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
928 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
929 reexamine = plus_stmt_object_size (osi, var, stmt);
930 else if (gimple_assign_single_p (stmt)
931 || gimple_assign_unary_nop_p (stmt))
933 if (TREE_CODE (rhs) == SSA_NAME
934 && POINTER_TYPE_P (TREE_TYPE (rhs)))
935 reexamine = merge_object_sizes (osi, var, rhs, 0);
936 else if (TREE_CODE (rhs) == COND_EXPR)
937 reexamine = cond_expr_object_size (osi, var, rhs);
938 else
939 expr_object_size (osi, var, rhs);
941 else
942 unknown_object_size (osi, var);
943 break;
946 case GIMPLE_CALL:
948 tree arg = pass_through_call (stmt);
949 if (arg)
951 if (TREE_CODE (arg) == SSA_NAME
952 && POINTER_TYPE_P (TREE_TYPE (arg)))
953 reexamine = merge_object_sizes (osi, var, arg, 0);
954 else if (TREE_CODE (arg) == COND_EXPR)
955 reexamine = cond_expr_object_size (osi, var, arg);
956 else
957 expr_object_size (osi, var, arg);
959 else
960 call_object_size (osi, var, stmt);
961 break;
964 case GIMPLE_ASM:
965 /* Pointers defined by __asm__ statements can point anywhere. */
966 object_sizes[object_size_type][varno] = unknown[object_size_type];
967 break;
969 case GIMPLE_NOP:
971 tree decl = SSA_NAME_VAR (var);
973 if (TREE_CODE (decl) != PARM_DECL && DECL_INITIAL (decl))
974 expr_object_size (osi, var, DECL_INITIAL (decl));
975 else
976 expr_object_size (osi, var, decl);
978 break;
980 case GIMPLE_PHI:
982 unsigned i;
984 for (i = 0; i < gimple_phi_num_args (stmt); i++)
986 tree rhs = gimple_phi_arg (stmt, i)->def;
988 if (object_sizes[object_size_type][varno]
989 == unknown[object_size_type])
990 break;
992 if (TREE_CODE (rhs) == SSA_NAME)
993 reexamine |= merge_object_sizes (osi, var, rhs, 0);
994 else if (osi->pass == 0)
995 expr_object_size (osi, var, rhs);
997 break;
1000 default:
1001 gcc_unreachable ();
1004 if (! reexamine
1005 || object_sizes[object_size_type][varno] == unknown[object_size_type])
1007 bitmap_set_bit (computed[object_size_type], varno);
1008 bitmap_clear_bit (osi->reexamine, varno);
1010 else
1012 bitmap_set_bit (osi->reexamine, varno);
1013 if (dump_file && (dump_flags & TDF_DETAILS))
1015 fprintf (dump_file, "Need to reexamine ");
1016 print_generic_expr (dump_file, var, dump_flags);
1017 fprintf (dump_file, "\n");
1023 /* Helper function for check_for_plus_in_loops. Called recursively
1024 to detect loops. */
1026 static void
1027 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1028 unsigned int depth)
1030 gimple stmt = SSA_NAME_DEF_STMT (var);
1031 unsigned int varno = SSA_NAME_VERSION (var);
1033 if (osi->depths[varno])
1035 if (osi->depths[varno] != depth)
1037 unsigned int *sp;
1039 /* Found a loop involving pointer addition. */
1040 for (sp = osi->tos; sp > osi->stack; )
1042 --sp;
1043 bitmap_clear_bit (osi->reexamine, *sp);
1044 bitmap_set_bit (computed[osi->object_size_type], *sp);
1045 object_sizes[osi->object_size_type][*sp] = 0;
1046 if (*sp == varno)
1047 break;
1050 return;
1052 else if (! bitmap_bit_p (osi->reexamine, varno))
1053 return;
1055 osi->depths[varno] = depth;
1056 *osi->tos++ = varno;
1058 switch (gimple_code (stmt))
1061 case GIMPLE_ASSIGN:
1063 if ((gimple_assign_single_p (stmt)
1064 || gimple_assign_unary_nop_p (stmt))
1065 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1067 tree rhs = gimple_assign_rhs1 (stmt);
1069 check_for_plus_in_loops_1 (osi, rhs, depth);
1071 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1073 tree basevar = gimple_assign_rhs1 (stmt);
1074 tree cst = gimple_assign_rhs2 (stmt);
1076 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1078 check_for_plus_in_loops_1 (osi, basevar,
1079 depth + !integer_zerop (cst));
1081 else
1082 gcc_unreachable ();
1083 break;
1086 case GIMPLE_CALL:
1088 tree arg = pass_through_call (stmt);
1089 if (arg)
1091 if (TREE_CODE (arg) == SSA_NAME)
1092 check_for_plus_in_loops_1 (osi, arg, depth);
1093 else
1094 gcc_unreachable ();
1096 break;
1099 case GIMPLE_PHI:
1101 unsigned i;
1103 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1105 tree rhs = gimple_phi_arg (stmt, i)->def;
1107 if (TREE_CODE (rhs) == SSA_NAME)
1108 check_for_plus_in_loops_1 (osi, rhs, depth);
1110 break;
1113 default:
1114 gcc_unreachable ();
1117 osi->depths[varno] = 0;
1118 osi->tos--;
1122 /* Check if some pointer we are computing object size of is being increased
1123 within a loop. If yes, assume all the SSA variables participating in
1124 that loop have minimum object sizes 0. */
1126 static void
1127 check_for_plus_in_loops (struct object_size_info *osi, tree var)
1129 gimple stmt = SSA_NAME_DEF_STMT (var);
1131 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1132 and looked for a POINTER_PLUS_EXPR in the pass-through
1133 argument, if any. In GIMPLE, however, such an expression
1134 is not a valid call operand. */
1136 if (is_gimple_assign (stmt)
1137 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1139 tree basevar = gimple_assign_rhs1 (stmt);
1140 tree cst = gimple_assign_rhs2 (stmt);
1142 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1144 if (integer_zerop (cst))
1145 return;
1147 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1148 *osi->tos++ = SSA_NAME_VERSION (basevar);
1149 check_for_plus_in_loops_1 (osi, var, 2);
1150 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1151 osi->tos--;
1156 /* Initialize data structures for the object size computation. */
1158 void
1159 init_object_sizes (void)
1161 int object_size_type;
1163 if (object_sizes[0])
1164 return;
1166 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1168 object_sizes[object_size_type] = XNEWVEC (unsigned HOST_WIDE_INT, num_ssa_names);
1169 computed[object_size_type] = BITMAP_ALLOC (NULL);
1172 init_offset_limit ();
1176 /* Destroy data structures after the object size computation. */
1178 void
1179 fini_object_sizes (void)
1181 int object_size_type;
1183 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1185 free (object_sizes[object_size_type]);
1186 BITMAP_FREE (computed[object_size_type]);
1187 object_sizes[object_size_type] = NULL;
1192 /* Simple pass to optimize all __builtin_object_size () builtins. */
1194 static unsigned int
1195 compute_object_sizes (void)
1197 basic_block bb;
1198 FOR_EACH_BB (bb)
1200 gimple_stmt_iterator i;
1201 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1203 tree callee, result;
1204 gimple call = gsi_stmt (i);
1206 if (gimple_code (call) != GIMPLE_CALL)
1207 continue;
1209 callee = gimple_call_fndecl (call);
1210 if (!callee
1211 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
1212 || DECL_FUNCTION_CODE (callee) != BUILT_IN_OBJECT_SIZE)
1213 continue;
1215 init_object_sizes ();
1216 result = fold_call_stmt (call, false);
1217 if (!result)
1219 if (gimple_call_num_args (call) == 2
1220 && POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
1222 tree ost = gimple_call_arg (call, 1);
1224 if (host_integerp (ost, 1))
1226 unsigned HOST_WIDE_INT object_size_type
1227 = tree_low_cst (ost, 1);
1229 if (object_size_type < 2)
1230 result = fold_convert (size_type_node,
1231 integer_minus_one_node);
1232 else if (object_size_type < 4)
1233 result = build_zero_cst (size_type_node);
1237 if (!result)
1238 continue;
1241 if (dump_file && (dump_flags & TDF_DETAILS))
1243 fprintf (dump_file, "Simplified\n ");
1244 print_gimple_stmt (dump_file, call, 0, dump_flags);
1247 if (!update_call_from_tree (&i, result))
1248 gcc_unreachable ();
1250 /* NOTE: In the pre-tuples code, we called update_stmt here. This is
1251 now handled by gsi_replace, called from update_call_from_tree. */
1253 if (dump_file && (dump_flags & TDF_DETAILS))
1255 fprintf (dump_file, "to\n ");
1256 print_gimple_stmt (dump_file, call, 0, dump_flags);
1257 fprintf (dump_file, "\n");
1262 fini_object_sizes ();
1263 return 0;
1266 struct gimple_opt_pass pass_object_sizes =
1269 GIMPLE_PASS,
1270 "objsz", /* name */
1271 NULL, /* gate */
1272 compute_object_sizes, /* execute */
1273 NULL, /* sub */
1274 NULL, /* next */
1275 0, /* static_pass_number */
1276 TV_NONE, /* tv_id */
1277 PROP_cfg | PROP_ssa, /* properties_required */
1278 0, /* properties_provided */
1279 0, /* properties_destroyed */
1280 0, /* todo_flags_start */
1281 TODO_dump_func | TODO_verify_ssa /* todo_flags_finish */