re PR libstdc++/48382 (ctype_noninline.h should not be installed)
[official-gcc.git] / gcc / tree-object-size.c
blob043b445bf2e5e509e934cff0f76b4267d1bd3cbd
1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Jakub Jelinek <jakub@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "diagnostic-core.h"
28 #include "tree-pretty-print.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-ssa-propagate.h"
34 struct object_size_info
36 int object_size_type;
37 bitmap visited, reexamine;
38 int pass;
39 bool changed;
40 unsigned int *depths;
41 unsigned int *stack, *tos;
44 static unsigned HOST_WIDE_INT unknown[4] = { -1, -1, 0, 0 };
46 static tree compute_object_offset (const_tree, const_tree);
47 static unsigned HOST_WIDE_INT addr_object_size (struct object_size_info *,
48 const_tree, int);
49 static unsigned HOST_WIDE_INT alloc_object_size (const_gimple, int);
50 static tree pass_through_call (const_gimple);
51 static void collect_object_sizes_for (struct object_size_info *, tree);
52 static void expr_object_size (struct object_size_info *, tree, tree);
53 static bool merge_object_sizes (struct object_size_info *, tree, tree,
54 unsigned HOST_WIDE_INT);
55 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple);
56 static bool cond_expr_object_size (struct object_size_info *, tree, tree);
57 static unsigned int compute_object_sizes (void);
58 static void init_offset_limit (void);
59 static void check_for_plus_in_loops (struct object_size_info *, tree);
60 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
61 unsigned int);
63 /* object_sizes[0] is upper bound for number of bytes till the end of
64 the object.
65 object_sizes[1] is upper bound for number of bytes till the end of
66 the subobject (innermost array or field with address taken).
67 object_sizes[2] is lower bound for number of bytes till the end of
68 the object and object_sizes[3] lower bound for subobject. */
69 static unsigned HOST_WIDE_INT *object_sizes[4];
71 /* Bitmaps what object sizes have been computed already. */
72 static bitmap computed[4];
74 /* Maximum value of offset we consider to be addition. */
75 static unsigned HOST_WIDE_INT offset_limit;
78 /* Initialize OFFSET_LIMIT variable. */
79 static void
80 init_offset_limit (void)
82 if (host_integerp (TYPE_MAX_VALUE (sizetype), 1))
83 offset_limit = tree_low_cst (TYPE_MAX_VALUE (sizetype), 1);
84 else
85 offset_limit = -1;
86 offset_limit /= 2;
90 /* Compute offset of EXPR within VAR. Return error_mark_node
91 if unknown. */
93 static tree
94 compute_object_offset (const_tree expr, const_tree var)
96 enum tree_code code = PLUS_EXPR;
97 tree base, off, t;
99 if (expr == var)
100 return size_zero_node;
102 switch (TREE_CODE (expr))
104 case COMPONENT_REF:
105 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
106 if (base == error_mark_node)
107 return base;
109 t = TREE_OPERAND (expr, 1);
110 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
111 size_int (tree_low_cst (DECL_FIELD_BIT_OFFSET (t), 1)
112 / BITS_PER_UNIT));
113 break;
115 case REALPART_EXPR:
116 CASE_CONVERT:
117 case VIEW_CONVERT_EXPR:
118 case NON_LVALUE_EXPR:
119 return compute_object_offset (TREE_OPERAND (expr, 0), var);
121 case IMAGPART_EXPR:
122 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
123 if (base == error_mark_node)
124 return base;
126 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
127 break;
129 case ARRAY_REF:
130 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
131 if (base == error_mark_node)
132 return base;
134 t = TREE_OPERAND (expr, 1);
135 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
137 code = MINUS_EXPR;
138 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
140 t = fold_convert (sizetype, t);
141 off = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (expr)), t);
142 break;
144 case MEM_REF:
145 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
146 return double_int_to_tree (sizetype, mem_ref_offset (expr));
148 default:
149 return error_mark_node;
152 return size_binop (code, base, off);
156 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
157 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
158 If unknown, return unknown[object_size_type]. */
160 static unsigned HOST_WIDE_INT
161 addr_object_size (struct object_size_info *osi, const_tree ptr,
162 int object_size_type)
164 tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
166 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
168 pt_var = TREE_OPERAND (ptr, 0);
169 if (REFERENCE_CLASS_P (pt_var))
170 pt_var = get_base_address (pt_var);
172 if (pt_var
173 && TREE_CODE (pt_var) == MEM_REF
174 && TREE_CODE (TREE_OPERAND (pt_var, 0)) == SSA_NAME
175 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (pt_var, 0))))
177 unsigned HOST_WIDE_INT sz;
179 if (!osi || (object_size_type & 1) != 0)
181 sz = compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
182 object_size_type & ~1);
183 if (host_integerp (TREE_OPERAND (pt_var, 1), 0))
184 sz -= TREE_INT_CST_LOW (TREE_OPERAND (pt_var, 1));
185 else
186 sz = offset_limit;
188 else
190 tree var = TREE_OPERAND (pt_var, 0);
191 if (osi->pass == 0)
192 collect_object_sizes_for (osi, var);
193 if (bitmap_bit_p (computed[object_size_type],
194 SSA_NAME_VERSION (var)))
195 sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
196 else
197 sz = unknown[object_size_type];
198 if (host_integerp (TREE_OPERAND (pt_var, 1), 0))
199 sz -= TREE_INT_CST_LOW (TREE_OPERAND (pt_var, 1));
200 else
201 sz = offset_limit;
204 if (sz != unknown[object_size_type] && sz < offset_limit)
205 pt_var_size = size_int (sz);
207 else if (pt_var
208 && (SSA_VAR_P (pt_var) || TREE_CODE (pt_var) == STRING_CST)
209 && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
210 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1)
211 && (unsigned HOST_WIDE_INT)
212 tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1)
213 < offset_limit)
214 pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
215 else
216 return unknown[object_size_type];
218 if (pt_var != TREE_OPERAND (ptr, 0))
220 tree var;
222 if (object_size_type & 1)
224 var = TREE_OPERAND (ptr, 0);
226 while (var != pt_var
227 && TREE_CODE (var) != BIT_FIELD_REF
228 && TREE_CODE (var) != COMPONENT_REF
229 && TREE_CODE (var) != ARRAY_REF
230 && TREE_CODE (var) != ARRAY_RANGE_REF
231 && TREE_CODE (var) != REALPART_EXPR
232 && TREE_CODE (var) != IMAGPART_EXPR)
233 var = TREE_OPERAND (var, 0);
234 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
235 var = TREE_OPERAND (var, 0);
236 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
237 || ! host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (var)), 1)
238 || (pt_var_size
239 && tree_int_cst_lt (pt_var_size,
240 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
241 var = pt_var;
242 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
244 tree v = var;
245 /* For &X->fld, compute object size only if fld isn't the last
246 field, as struct { int i; char c[1]; } is often used instead
247 of flexible array member. */
248 while (v && v != pt_var)
249 switch (TREE_CODE (v))
251 case ARRAY_REF:
252 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
253 && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
255 tree domain
256 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
257 if (domain
258 && TYPE_MAX_VALUE (domain)
259 && TREE_CODE (TYPE_MAX_VALUE (domain))
260 == INTEGER_CST
261 && tree_int_cst_lt (TREE_OPERAND (v, 1),
262 TYPE_MAX_VALUE (domain)))
264 v = NULL_TREE;
265 break;
268 v = TREE_OPERAND (v, 0);
269 break;
270 case REALPART_EXPR:
271 case IMAGPART_EXPR:
272 v = NULL_TREE;
273 break;
274 case COMPONENT_REF:
275 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
277 v = NULL_TREE;
278 break;
280 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
281 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
282 != UNION_TYPE
283 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
284 != QUAL_UNION_TYPE)
285 break;
286 else
287 v = TREE_OPERAND (v, 0);
288 if (TREE_CODE (v) == COMPONENT_REF
289 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
290 == RECORD_TYPE)
292 tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
293 for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
294 if (TREE_CODE (fld_chain) == FIELD_DECL)
295 break;
297 if (fld_chain)
299 v = NULL_TREE;
300 break;
302 v = TREE_OPERAND (v, 0);
304 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
305 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
306 != UNION_TYPE
307 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
308 != QUAL_UNION_TYPE)
309 break;
310 else
311 v = TREE_OPERAND (v, 0);
312 if (v != pt_var)
313 v = NULL_TREE;
314 else
315 v = pt_var;
316 break;
317 default:
318 v = pt_var;
319 break;
321 if (v == pt_var)
322 var = pt_var;
325 else
326 var = pt_var;
328 if (var != pt_var)
329 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
330 else if (!pt_var_size)
331 return unknown[object_size_type];
332 else
333 var_size = pt_var_size;
334 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
335 if (bytes != error_mark_node)
337 if (TREE_CODE (bytes) == INTEGER_CST
338 && tree_int_cst_lt (var_size, bytes))
339 bytes = size_zero_node;
340 else
341 bytes = size_binop (MINUS_EXPR, var_size, bytes);
343 if (var != pt_var
344 && pt_var_size
345 && TREE_CODE (pt_var) == MEM_REF
346 && bytes != error_mark_node)
348 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
349 if (bytes2 != error_mark_node)
351 if (TREE_CODE (bytes2) == INTEGER_CST
352 && tree_int_cst_lt (pt_var_size, bytes2))
353 bytes2 = size_zero_node;
354 else
355 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
356 bytes = size_binop (MIN_EXPR, bytes, bytes2);
360 else if (!pt_var_size)
361 return unknown[object_size_type];
362 else
363 bytes = pt_var_size;
365 if (host_integerp (bytes, 1))
366 return tree_low_cst (bytes, 1);
368 return unknown[object_size_type];
372 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
373 Handles various allocation calls. OBJECT_SIZE_TYPE is the second
374 argument from __builtin_object_size. If unknown, return
375 unknown[object_size_type]. */
377 static unsigned HOST_WIDE_INT
378 alloc_object_size (const_gimple call, int object_size_type)
380 tree callee, bytes = NULL_TREE;
381 tree alloc_size;
382 int arg1 = -1, arg2 = -1;
384 gcc_assert (is_gimple_call (call));
386 callee = gimple_call_fndecl (call);
387 if (!callee)
388 return unknown[object_size_type];
390 alloc_size = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (TREE_TYPE(callee)));
391 if (alloc_size && TREE_VALUE (alloc_size))
393 tree p = TREE_VALUE (alloc_size);
395 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
396 if (TREE_CHAIN (p))
397 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
400 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
401 switch (DECL_FUNCTION_CODE (callee))
403 case BUILT_IN_CALLOC:
404 arg2 = 1;
405 /* fall through */
406 case BUILT_IN_MALLOC:
407 case BUILT_IN_ALLOCA:
408 arg1 = 0;
409 default:
410 break;
413 if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
414 || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
415 || (arg2 >= 0
416 && (arg2 >= (int)gimple_call_num_args (call)
417 || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
418 return unknown[object_size_type];
420 if (arg2 >= 0)
421 bytes = size_binop (MULT_EXPR,
422 fold_convert (sizetype, gimple_call_arg (call, arg1)),
423 fold_convert (sizetype, gimple_call_arg (call, arg2)));
424 else if (arg1 >= 0)
425 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
427 if (bytes && host_integerp (bytes, 1))
428 return tree_low_cst (bytes, 1);
430 return unknown[object_size_type];
434 /* If object size is propagated from one of function's arguments directly
435 to its return value, return that argument for GIMPLE_CALL statement CALL.
436 Otherwise return NULL. */
438 static tree
439 pass_through_call (const_gimple call)
441 tree callee = gimple_call_fndecl (call);
443 if (callee
444 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
445 switch (DECL_FUNCTION_CODE (callee))
447 case BUILT_IN_MEMCPY:
448 case BUILT_IN_MEMMOVE:
449 case BUILT_IN_MEMSET:
450 case BUILT_IN_STRCPY:
451 case BUILT_IN_STRNCPY:
452 case BUILT_IN_STRCAT:
453 case BUILT_IN_STRNCAT:
454 case BUILT_IN_MEMCPY_CHK:
455 case BUILT_IN_MEMMOVE_CHK:
456 case BUILT_IN_MEMSET_CHK:
457 case BUILT_IN_STRCPY_CHK:
458 case BUILT_IN_STRNCPY_CHK:
459 case BUILT_IN_STRCAT_CHK:
460 case BUILT_IN_STRNCAT_CHK:
461 if (gimple_call_num_args (call) >= 1)
462 return gimple_call_arg (call, 0);
463 break;
464 default:
465 break;
468 return NULL_TREE;
472 /* Compute __builtin_object_size value for PTR. OBJECT_SIZE_TYPE is the
473 second argument from __builtin_object_size. */
475 unsigned HOST_WIDE_INT
476 compute_builtin_object_size (tree ptr, int object_size_type)
478 gcc_assert (object_size_type >= 0 && object_size_type <= 3);
480 if (! offset_limit)
481 init_offset_limit ();
483 if (TREE_CODE (ptr) == ADDR_EXPR)
484 return addr_object_size (NULL, ptr, object_size_type);
486 if (TREE_CODE (ptr) == SSA_NAME
487 && POINTER_TYPE_P (TREE_TYPE (ptr))
488 && object_sizes[object_size_type] != NULL)
490 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
492 struct object_size_info osi;
493 bitmap_iterator bi;
494 unsigned int i;
496 if (dump_file)
498 fprintf (dump_file, "Computing %s %sobject size for ",
499 (object_size_type & 2) ? "minimum" : "maximum",
500 (object_size_type & 1) ? "sub" : "");
501 print_generic_expr (dump_file, ptr, dump_flags);
502 fprintf (dump_file, ":\n");
505 osi.visited = BITMAP_ALLOC (NULL);
506 osi.reexamine = BITMAP_ALLOC (NULL);
507 osi.object_size_type = object_size_type;
508 osi.depths = NULL;
509 osi.stack = NULL;
510 osi.tos = NULL;
512 /* First pass: walk UD chains, compute object sizes that
513 can be computed. osi.reexamine bitmap at the end will
514 contain what variables were found in dependency cycles
515 and therefore need to be reexamined. */
516 osi.pass = 0;
517 osi.changed = false;
518 collect_object_sizes_for (&osi, ptr);
520 /* Second pass: keep recomputing object sizes of variables
521 that need reexamination, until no object sizes are
522 increased or all object sizes are computed. */
523 if (! bitmap_empty_p (osi.reexamine))
525 bitmap reexamine = BITMAP_ALLOC (NULL);
527 /* If looking for minimum instead of maximum object size,
528 detect cases where a pointer is increased in a loop.
529 Although even without this detection pass 2 would eventually
530 terminate, it could take a long time. If a pointer is
531 increasing this way, we need to assume 0 object size.
532 E.g. p = &buf[0]; while (cond) p = p + 4; */
533 if (object_size_type & 2)
535 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
536 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
537 osi.tos = osi.stack;
538 osi.pass = 1;
539 /* collect_object_sizes_for is changing
540 osi.reexamine bitmap, so iterate over a copy. */
541 bitmap_copy (reexamine, osi.reexamine);
542 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
543 if (bitmap_bit_p (osi.reexamine, i))
544 check_for_plus_in_loops (&osi, ssa_name (i));
546 free (osi.depths);
547 osi.depths = NULL;
548 free (osi.stack);
549 osi.stack = NULL;
550 osi.tos = NULL;
555 osi.pass = 2;
556 osi.changed = false;
557 /* collect_object_sizes_for is changing
558 osi.reexamine bitmap, so iterate over a copy. */
559 bitmap_copy (reexamine, osi.reexamine);
560 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
561 if (bitmap_bit_p (osi.reexamine, i))
563 collect_object_sizes_for (&osi, ssa_name (i));
564 if (dump_file && (dump_flags & TDF_DETAILS))
566 fprintf (dump_file, "Reexamining ");
567 print_generic_expr (dump_file, ssa_name (i),
568 dump_flags);
569 fprintf (dump_file, "\n");
573 while (osi.changed);
575 BITMAP_FREE (reexamine);
577 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
578 bitmap_set_bit (computed[object_size_type], i);
580 /* Debugging dumps. */
581 if (dump_file)
583 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
584 if (object_sizes[object_size_type][i]
585 != unknown[object_size_type])
587 print_generic_expr (dump_file, ssa_name (i),
588 dump_flags);
589 fprintf (dump_file,
590 ": %s %sobject size "
591 HOST_WIDE_INT_PRINT_UNSIGNED "\n",
592 (object_size_type & 2) ? "minimum" : "maximum",
593 (object_size_type & 1) ? "sub" : "",
594 object_sizes[object_size_type][i]);
598 BITMAP_FREE (osi.reexamine);
599 BITMAP_FREE (osi.visited);
602 return object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
605 return unknown[object_size_type];
608 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
610 static void
611 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
613 int object_size_type = osi->object_size_type;
614 unsigned int varno = SSA_NAME_VERSION (ptr);
615 unsigned HOST_WIDE_INT bytes;
617 gcc_assert (object_sizes[object_size_type][varno]
618 != unknown[object_size_type]);
619 gcc_assert (osi->pass == 0);
621 if (TREE_CODE (value) == WITH_SIZE_EXPR)
622 value = TREE_OPERAND (value, 0);
624 /* Pointer variables should have been handled by merge_object_sizes. */
625 gcc_assert (TREE_CODE (value) != SSA_NAME
626 || !POINTER_TYPE_P (TREE_TYPE (value)));
628 if (TREE_CODE (value) == ADDR_EXPR)
629 bytes = addr_object_size (osi, value, object_size_type);
630 else
631 bytes = unknown[object_size_type];
633 if ((object_size_type & 2) == 0)
635 if (object_sizes[object_size_type][varno] < bytes)
636 object_sizes[object_size_type][varno] = bytes;
638 else
640 if (object_sizes[object_size_type][varno] > bytes)
641 object_sizes[object_size_type][varno] = bytes;
646 /* Compute object_sizes for PTR, defined to the result of a call. */
648 static void
649 call_object_size (struct object_size_info *osi, tree ptr, gimple call)
651 int object_size_type = osi->object_size_type;
652 unsigned int varno = SSA_NAME_VERSION (ptr);
653 unsigned HOST_WIDE_INT bytes;
655 gcc_assert (is_gimple_call (call));
657 gcc_assert (object_sizes[object_size_type][varno]
658 != unknown[object_size_type]);
659 gcc_assert (osi->pass == 0);
661 bytes = alloc_object_size (call, object_size_type);
663 if ((object_size_type & 2) == 0)
665 if (object_sizes[object_size_type][varno] < bytes)
666 object_sizes[object_size_type][varno] = bytes;
668 else
670 if (object_sizes[object_size_type][varno] > bytes)
671 object_sizes[object_size_type][varno] = bytes;
676 /* Compute object_sizes for PTR, defined to an unknown value. */
678 static void
679 unknown_object_size (struct object_size_info *osi, tree ptr)
681 int object_size_type = osi->object_size_type;
682 unsigned int varno = SSA_NAME_VERSION (ptr);
683 unsigned HOST_WIDE_INT bytes;
685 gcc_assert (object_sizes[object_size_type][varno]
686 != unknown[object_size_type]);
687 gcc_assert (osi->pass == 0);
689 bytes = unknown[object_size_type];
691 if ((object_size_type & 2) == 0)
693 if (object_sizes[object_size_type][varno] < bytes)
694 object_sizes[object_size_type][varno] = bytes;
696 else
698 if (object_sizes[object_size_type][varno] > bytes)
699 object_sizes[object_size_type][varno] = bytes;
704 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
705 the object size might need reexamination later. */
707 static bool
708 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
709 unsigned HOST_WIDE_INT offset)
711 int object_size_type = osi->object_size_type;
712 unsigned int varno = SSA_NAME_VERSION (dest);
713 unsigned HOST_WIDE_INT orig_bytes;
715 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
716 return false;
717 if (offset >= offset_limit)
719 object_sizes[object_size_type][varno] = unknown[object_size_type];
720 return false;
723 if (osi->pass == 0)
724 collect_object_sizes_for (osi, orig);
726 orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
727 if (orig_bytes != unknown[object_size_type])
728 orig_bytes = (offset > orig_bytes)
729 ? (unsigned HOST_WIDE_INT) 0 : orig_bytes - offset;
731 if ((object_size_type & 2) == 0)
733 if (object_sizes[object_size_type][varno] < orig_bytes)
735 object_sizes[object_size_type][varno] = orig_bytes;
736 osi->changed = true;
739 else
741 if (object_sizes[object_size_type][varno] > orig_bytes)
743 object_sizes[object_size_type][varno] = orig_bytes;
744 osi->changed = true;
747 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
751 /* Compute object_sizes for VAR, defined to the result of an assignment
752 with operator POINTER_PLUS_EXPR. Return true if the object size might
753 need reexamination later. */
755 static bool
756 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple stmt)
758 int object_size_type = osi->object_size_type;
759 unsigned int varno = SSA_NAME_VERSION (var);
760 unsigned HOST_WIDE_INT bytes;
761 tree op0, op1;
763 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
765 op0 = gimple_assign_rhs1 (stmt);
766 op1 = gimple_assign_rhs2 (stmt);
768 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
770 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
771 gcc_assert (TREE_CODE (rhs) == MEM_REF);
772 op0 = TREE_OPERAND (rhs, 0);
773 op1 = TREE_OPERAND (rhs, 1);
775 else
776 gcc_unreachable ();
778 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
779 return false;
781 /* Handle PTR + OFFSET here. */
782 if (TREE_CODE (op1) == INTEGER_CST
783 && (TREE_CODE (op0) == SSA_NAME
784 || TREE_CODE (op0) == ADDR_EXPR))
786 if (! host_integerp (op1, 1))
787 bytes = unknown[object_size_type];
788 else if (TREE_CODE (op0) == SSA_NAME)
789 return merge_object_sizes (osi, var, op0, tree_low_cst (op1, 1));
790 else
792 unsigned HOST_WIDE_INT off = tree_low_cst (op1, 1);
794 /* op0 will be ADDR_EXPR here. */
795 bytes = addr_object_size (osi, op0, object_size_type);
796 if (bytes == unknown[object_size_type])
798 else if (off > offset_limit)
799 bytes = unknown[object_size_type];
800 else if (off > bytes)
801 bytes = 0;
802 else
803 bytes -= off;
806 else
807 bytes = unknown[object_size_type];
809 if ((object_size_type & 2) == 0)
811 if (object_sizes[object_size_type][varno] < bytes)
812 object_sizes[object_size_type][varno] = bytes;
814 else
816 if (object_sizes[object_size_type][varno] > bytes)
817 object_sizes[object_size_type][varno] = bytes;
819 return false;
823 /* Compute object_sizes for VAR, defined to VALUE, which is
824 a COND_EXPR. Return true if the object size might need reexamination
825 later. */
827 static bool
828 cond_expr_object_size (struct object_size_info *osi, tree var, tree value)
830 tree then_, else_;
831 int object_size_type = osi->object_size_type;
832 unsigned int varno = SSA_NAME_VERSION (var);
833 bool reexamine = false;
835 gcc_assert (TREE_CODE (value) == COND_EXPR);
837 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
838 return false;
840 then_ = COND_EXPR_THEN (value);
841 else_ = COND_EXPR_ELSE (value);
843 if (TREE_CODE (then_) == SSA_NAME)
844 reexamine |= merge_object_sizes (osi, var, then_, 0);
845 else
846 expr_object_size (osi, var, then_);
848 if (TREE_CODE (else_) == SSA_NAME)
849 reexamine |= merge_object_sizes (osi, var, else_, 0);
850 else
851 expr_object_size (osi, var, else_);
853 return reexamine;
856 /* Compute object sizes for VAR.
857 For ADDR_EXPR an object size is the number of remaining bytes
858 to the end of the object (where what is considered an object depends on
859 OSI->object_size_type).
860 For allocation GIMPLE_CALL like malloc or calloc object size is the size
861 of the allocation.
862 For POINTER_PLUS_EXPR where second operand is a constant integer,
863 object size is object size of the first operand minus the constant.
864 If the constant is bigger than the number of remaining bytes until the
865 end of the object, object size is 0, but if it is instead a pointer
866 subtraction, object size is unknown[object_size_type].
867 To differentiate addition from subtraction, ADDR_EXPR returns
868 unknown[object_size_type] for all objects bigger than half of the address
869 space, and constants less than half of the address space are considered
870 addition, while bigger constants subtraction.
871 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
872 object size is object size of that argument.
873 Otherwise, object size is the maximum of object sizes of variables
874 that it might be set to. */
876 static void
877 collect_object_sizes_for (struct object_size_info *osi, tree var)
879 int object_size_type = osi->object_size_type;
880 unsigned int varno = SSA_NAME_VERSION (var);
881 gimple stmt;
882 bool reexamine;
884 if (bitmap_bit_p (computed[object_size_type], varno))
885 return;
887 if (osi->pass == 0)
889 if (bitmap_set_bit (osi->visited, varno))
891 object_sizes[object_size_type][varno]
892 = (object_size_type & 2) ? -1 : 0;
894 else
896 /* Found a dependency loop. Mark the variable for later
897 re-examination. */
898 bitmap_set_bit (osi->reexamine, varno);
899 if (dump_file && (dump_flags & TDF_DETAILS))
901 fprintf (dump_file, "Found a dependency loop at ");
902 print_generic_expr (dump_file, var, dump_flags);
903 fprintf (dump_file, "\n");
905 return;
909 if (dump_file && (dump_flags & TDF_DETAILS))
911 fprintf (dump_file, "Visiting use-def links for ");
912 print_generic_expr (dump_file, var, dump_flags);
913 fprintf (dump_file, "\n");
916 stmt = SSA_NAME_DEF_STMT (var);
917 reexamine = false;
919 switch (gimple_code (stmt))
921 case GIMPLE_ASSIGN:
923 tree rhs = gimple_assign_rhs1 (stmt);
924 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
925 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
926 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
927 reexamine = plus_stmt_object_size (osi, var, stmt);
928 else if (gimple_assign_single_p (stmt)
929 || gimple_assign_unary_nop_p (stmt))
931 if (TREE_CODE (rhs) == SSA_NAME
932 && POINTER_TYPE_P (TREE_TYPE (rhs)))
933 reexamine = merge_object_sizes (osi, var, rhs, 0);
934 else if (TREE_CODE (rhs) == COND_EXPR)
935 reexamine = cond_expr_object_size (osi, var, rhs);
936 else
937 expr_object_size (osi, var, rhs);
939 else
940 unknown_object_size (osi, var);
941 break;
944 case GIMPLE_CALL:
946 tree arg = pass_through_call (stmt);
947 if (arg)
949 if (TREE_CODE (arg) == SSA_NAME
950 && POINTER_TYPE_P (TREE_TYPE (arg)))
951 reexamine = merge_object_sizes (osi, var, arg, 0);
952 else if (TREE_CODE (arg) == COND_EXPR)
953 reexamine = cond_expr_object_size (osi, var, arg);
954 else
955 expr_object_size (osi, var, arg);
957 else
958 call_object_size (osi, var, stmt);
959 break;
962 case GIMPLE_ASM:
963 /* Pointers defined by __asm__ statements can point anywhere. */
964 object_sizes[object_size_type][varno] = unknown[object_size_type];
965 break;
967 case GIMPLE_NOP:
969 tree decl = SSA_NAME_VAR (var);
971 if (TREE_CODE (decl) != PARM_DECL && DECL_INITIAL (decl))
972 expr_object_size (osi, var, DECL_INITIAL (decl));
973 else
974 expr_object_size (osi, var, decl);
976 break;
978 case GIMPLE_PHI:
980 unsigned i;
982 for (i = 0; i < gimple_phi_num_args (stmt); i++)
984 tree rhs = gimple_phi_arg (stmt, i)->def;
986 if (object_sizes[object_size_type][varno]
987 == unknown[object_size_type])
988 break;
990 if (TREE_CODE (rhs) == SSA_NAME)
991 reexamine |= merge_object_sizes (osi, var, rhs, 0);
992 else if (osi->pass == 0)
993 expr_object_size (osi, var, rhs);
995 break;
998 default:
999 gcc_unreachable ();
1002 if (! reexamine
1003 || object_sizes[object_size_type][varno] == unknown[object_size_type])
1005 bitmap_set_bit (computed[object_size_type], varno);
1006 bitmap_clear_bit (osi->reexamine, varno);
1008 else
1010 bitmap_set_bit (osi->reexamine, varno);
1011 if (dump_file && (dump_flags & TDF_DETAILS))
1013 fprintf (dump_file, "Need to reexamine ");
1014 print_generic_expr (dump_file, var, dump_flags);
1015 fprintf (dump_file, "\n");
1021 /* Helper function for check_for_plus_in_loops. Called recursively
1022 to detect loops. */
1024 static void
1025 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1026 unsigned int depth)
1028 gimple stmt = SSA_NAME_DEF_STMT (var);
1029 unsigned int varno = SSA_NAME_VERSION (var);
1031 if (osi->depths[varno])
1033 if (osi->depths[varno] != depth)
1035 unsigned int *sp;
1037 /* Found a loop involving pointer addition. */
1038 for (sp = osi->tos; sp > osi->stack; )
1040 --sp;
1041 bitmap_clear_bit (osi->reexamine, *sp);
1042 bitmap_set_bit (computed[osi->object_size_type], *sp);
1043 object_sizes[osi->object_size_type][*sp] = 0;
1044 if (*sp == varno)
1045 break;
1048 return;
1050 else if (! bitmap_bit_p (osi->reexamine, varno))
1051 return;
1053 osi->depths[varno] = depth;
1054 *osi->tos++ = varno;
1056 switch (gimple_code (stmt))
1059 case GIMPLE_ASSIGN:
1061 if ((gimple_assign_single_p (stmt)
1062 || gimple_assign_unary_nop_p (stmt))
1063 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1065 tree rhs = gimple_assign_rhs1 (stmt);
1067 check_for_plus_in_loops_1 (osi, rhs, depth);
1069 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1071 tree basevar = gimple_assign_rhs1 (stmt);
1072 tree cst = gimple_assign_rhs2 (stmt);
1074 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1076 check_for_plus_in_loops_1 (osi, basevar,
1077 depth + !integer_zerop (cst));
1079 else
1080 gcc_unreachable ();
1081 break;
1084 case GIMPLE_CALL:
1086 tree arg = pass_through_call (stmt);
1087 if (arg)
1089 if (TREE_CODE (arg) == SSA_NAME)
1090 check_for_plus_in_loops_1 (osi, arg, depth);
1091 else
1092 gcc_unreachable ();
1094 break;
1097 case GIMPLE_PHI:
1099 unsigned i;
1101 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1103 tree rhs = gimple_phi_arg (stmt, i)->def;
1105 if (TREE_CODE (rhs) == SSA_NAME)
1106 check_for_plus_in_loops_1 (osi, rhs, depth);
1108 break;
1111 default:
1112 gcc_unreachable ();
1115 osi->depths[varno] = 0;
1116 osi->tos--;
1120 /* Check if some pointer we are computing object size of is being increased
1121 within a loop. If yes, assume all the SSA variables participating in
1122 that loop have minimum object sizes 0. */
1124 static void
1125 check_for_plus_in_loops (struct object_size_info *osi, tree var)
1127 gimple stmt = SSA_NAME_DEF_STMT (var);
1129 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1130 and looked for a POINTER_PLUS_EXPR in the pass-through
1131 argument, if any. In GIMPLE, however, such an expression
1132 is not a valid call operand. */
1134 if (is_gimple_assign (stmt)
1135 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1137 tree basevar = gimple_assign_rhs1 (stmt);
1138 tree cst = gimple_assign_rhs2 (stmt);
1140 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1142 if (integer_zerop (cst))
1143 return;
1145 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1146 *osi->tos++ = SSA_NAME_VERSION (basevar);
1147 check_for_plus_in_loops_1 (osi, var, 2);
1148 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1149 osi->tos--;
1154 /* Initialize data structures for the object size computation. */
1156 void
1157 init_object_sizes (void)
1159 int object_size_type;
1161 if (object_sizes[0])
1162 return;
1164 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1166 object_sizes[object_size_type] = XNEWVEC (unsigned HOST_WIDE_INT, num_ssa_names);
1167 computed[object_size_type] = BITMAP_ALLOC (NULL);
1170 init_offset_limit ();
1174 /* Destroy data structures after the object size computation. */
1176 void
1177 fini_object_sizes (void)
1179 int object_size_type;
1181 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1183 free (object_sizes[object_size_type]);
1184 BITMAP_FREE (computed[object_size_type]);
1185 object_sizes[object_size_type] = NULL;
1190 /* Simple pass to optimize all __builtin_object_size () builtins. */
1192 static unsigned int
1193 compute_object_sizes (void)
1195 basic_block bb;
1196 FOR_EACH_BB (bb)
1198 gimple_stmt_iterator i;
1199 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1201 tree callee, result;
1202 gimple call = gsi_stmt (i);
1204 if (gimple_code (call) != GIMPLE_CALL)
1205 continue;
1207 callee = gimple_call_fndecl (call);
1208 if (!callee
1209 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
1210 || DECL_FUNCTION_CODE (callee) != BUILT_IN_OBJECT_SIZE)
1211 continue;
1213 init_object_sizes ();
1214 result = fold_call_stmt (call, false);
1215 if (!result)
1217 if (gimple_call_num_args (call) == 2
1218 && POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
1220 tree ost = gimple_call_arg (call, 1);
1222 if (host_integerp (ost, 1))
1224 unsigned HOST_WIDE_INT object_size_type
1225 = tree_low_cst (ost, 1);
1227 if (object_size_type < 2)
1228 result = fold_convert (size_type_node,
1229 integer_minus_one_node);
1230 else if (object_size_type < 4)
1231 result = build_zero_cst (size_type_node);
1235 if (!result)
1236 continue;
1239 if (dump_file && (dump_flags & TDF_DETAILS))
1241 fprintf (dump_file, "Simplified\n ");
1242 print_gimple_stmt (dump_file, call, 0, dump_flags);
1245 if (!update_call_from_tree (&i, result))
1246 gcc_unreachable ();
1248 /* NOTE: In the pre-tuples code, we called update_stmt here. This is
1249 now handled by gsi_replace, called from update_call_from_tree. */
1251 if (dump_file && (dump_flags & TDF_DETAILS))
1253 fprintf (dump_file, "to\n ");
1254 print_gimple_stmt (dump_file, call, 0, dump_flags);
1255 fprintf (dump_file, "\n");
1260 fini_object_sizes ();
1261 return 0;
1264 struct gimple_opt_pass pass_object_sizes =
1267 GIMPLE_PASS,
1268 "objsz", /* name */
1269 NULL, /* gate */
1270 compute_object_sizes, /* execute */
1271 NULL, /* sub */
1272 NULL, /* next */
1273 0, /* static_pass_number */
1274 TV_NONE, /* tv_id */
1275 PROP_cfg | PROP_ssa, /* properties_required */
1276 0, /* properties_provided */
1277 0, /* properties_destroyed */
1278 0, /* todo_flags_start */
1279 TODO_dump_func | TODO_verify_ssa /* todo_flags_finish */