2014-04-24 Segher Boessenkool <segher@kernel.crashing.org>
[official-gcc.git] / gcc / tree-object-size.c
blobec50709e86e0f933a0e67b5a15b4b4ce500fd8db
1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004-2014 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "tree-object-size.h"
27 #include "diagnostic-core.h"
28 #include "gimple-pretty-print.h"
29 #include "bitmap.h"
30 #include "basic-block.h"
31 #include "tree-ssa-alias.h"
32 #include "internal-fn.h"
33 #include "gimple-fold.h"
34 #include "gimple-expr.h"
35 #include "is-a.h"
36 #include "gimple.h"
37 #include "gimple-iterator.h"
38 #include "gimple-ssa.h"
39 #include "stringpool.h"
40 #include "tree-ssanames.h"
41 #include "tree-pass.h"
42 #include "tree-ssa-propagate.h"
43 #include "tree-phinodes.h"
44 #include "ssa-iterators.h"
46 struct object_size_info
48 int object_size_type;
49 bitmap visited, reexamine;
50 int pass;
51 bool changed;
52 unsigned int *depths;
53 unsigned int *stack, *tos;
56 static const unsigned HOST_WIDE_INT unknown[4] = { -1, -1, 0, 0 };
58 static tree compute_object_offset (const_tree, const_tree);
59 static unsigned HOST_WIDE_INT addr_object_size (struct object_size_info *,
60 const_tree, int);
61 static unsigned HOST_WIDE_INT alloc_object_size (const_gimple, int);
62 static tree pass_through_call (const_gimple);
63 static void collect_object_sizes_for (struct object_size_info *, tree);
64 static void expr_object_size (struct object_size_info *, tree, tree);
65 static bool merge_object_sizes (struct object_size_info *, tree, tree,
66 unsigned HOST_WIDE_INT);
67 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple);
68 static bool cond_expr_object_size (struct object_size_info *, tree, gimple);
69 static void init_offset_limit (void);
70 static void check_for_plus_in_loops (struct object_size_info *, tree);
71 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
72 unsigned int);
74 /* object_sizes[0] is upper bound for number of bytes till the end of
75 the object.
76 object_sizes[1] is upper bound for number of bytes till the end of
77 the subobject (innermost array or field with address taken).
78 object_sizes[2] is lower bound for number of bytes till the end of
79 the object and object_sizes[3] lower bound for subobject. */
80 static vec<unsigned HOST_WIDE_INT> object_sizes[4];
82 /* Bitmaps what object sizes have been computed already. */
83 static bitmap computed[4];
85 /* Maximum value of offset we consider to be addition. */
86 static unsigned HOST_WIDE_INT offset_limit;
89 /* Initialize OFFSET_LIMIT variable. */
90 static void
91 init_offset_limit (void)
93 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
94 offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
95 else
96 offset_limit = -1;
97 offset_limit /= 2;
101 /* Compute offset of EXPR within VAR. Return error_mark_node
102 if unknown. */
104 static tree
105 compute_object_offset (const_tree expr, const_tree var)
107 enum tree_code code = PLUS_EXPR;
108 tree base, off, t;
110 if (expr == var)
111 return size_zero_node;
113 switch (TREE_CODE (expr))
115 case COMPONENT_REF:
116 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
117 if (base == error_mark_node)
118 return base;
120 t = TREE_OPERAND (expr, 1);
121 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
122 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
123 / BITS_PER_UNIT));
124 break;
126 case REALPART_EXPR:
127 CASE_CONVERT:
128 case VIEW_CONVERT_EXPR:
129 case NON_LVALUE_EXPR:
130 return compute_object_offset (TREE_OPERAND (expr, 0), var);
132 case IMAGPART_EXPR:
133 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
134 if (base == error_mark_node)
135 return base;
137 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
138 break;
140 case ARRAY_REF:
141 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
142 if (base == error_mark_node)
143 return base;
145 t = TREE_OPERAND (expr, 1);
146 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
148 code = MINUS_EXPR;
149 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
151 t = fold_convert (sizetype, t);
152 off = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (expr)), t);
153 break;
155 case MEM_REF:
156 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
157 return double_int_to_tree (sizetype, mem_ref_offset (expr));
159 default:
160 return error_mark_node;
163 return size_binop (code, base, off);
167 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
168 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
169 If unknown, return unknown[object_size_type]. */
171 static unsigned HOST_WIDE_INT
172 addr_object_size (struct object_size_info *osi, const_tree ptr,
173 int object_size_type)
175 tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
177 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
179 pt_var = TREE_OPERAND (ptr, 0);
180 while (handled_component_p (pt_var))
181 pt_var = TREE_OPERAND (pt_var, 0);
183 if (pt_var
184 && TREE_CODE (pt_var) == MEM_REF)
186 unsigned HOST_WIDE_INT sz;
188 if (!osi || (object_size_type & 1) != 0
189 || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
191 sz = compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
192 object_size_type & ~1);
194 else
196 tree var = TREE_OPERAND (pt_var, 0);
197 if (osi->pass == 0)
198 collect_object_sizes_for (osi, var);
199 if (bitmap_bit_p (computed[object_size_type],
200 SSA_NAME_VERSION (var)))
201 sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
202 else
203 sz = unknown[object_size_type];
205 if (sz != unknown[object_size_type])
207 double_int dsz = double_int::from_uhwi (sz) - mem_ref_offset (pt_var);
208 if (dsz.is_negative ())
209 sz = 0;
210 else if (dsz.fits_uhwi ())
211 sz = dsz.to_uhwi ();
212 else
213 sz = unknown[object_size_type];
216 if (sz != unknown[object_size_type] && sz < offset_limit)
217 pt_var_size = size_int (sz);
219 else if (pt_var
220 && DECL_P (pt_var)
221 && tree_fits_uhwi_p (DECL_SIZE_UNIT (pt_var))
222 && tree_to_uhwi (DECL_SIZE_UNIT (pt_var)) < offset_limit)
223 pt_var_size = DECL_SIZE_UNIT (pt_var);
224 else if (pt_var
225 && TREE_CODE (pt_var) == STRING_CST
226 && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
227 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
228 && tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
229 < offset_limit)
230 pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
231 else
232 return unknown[object_size_type];
234 if (pt_var != TREE_OPERAND (ptr, 0))
236 tree var;
238 if (object_size_type & 1)
240 var = TREE_OPERAND (ptr, 0);
242 while (var != pt_var
243 && TREE_CODE (var) != BIT_FIELD_REF
244 && TREE_CODE (var) != COMPONENT_REF
245 && TREE_CODE (var) != ARRAY_REF
246 && TREE_CODE (var) != ARRAY_RANGE_REF
247 && TREE_CODE (var) != REALPART_EXPR
248 && TREE_CODE (var) != IMAGPART_EXPR)
249 var = TREE_OPERAND (var, 0);
250 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
251 var = TREE_OPERAND (var, 0);
252 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
253 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var)))
254 || (pt_var_size
255 && tree_int_cst_lt (pt_var_size,
256 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
257 var = pt_var;
258 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
260 tree v = var;
261 /* For &X->fld, compute object size only if fld isn't the last
262 field, as struct { int i; char c[1]; } is often used instead
263 of flexible array member. */
264 while (v && v != pt_var)
265 switch (TREE_CODE (v))
267 case ARRAY_REF:
268 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
269 && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
271 tree domain
272 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
273 if (domain
274 && TYPE_MAX_VALUE (domain)
275 && TREE_CODE (TYPE_MAX_VALUE (domain))
276 == INTEGER_CST
277 && tree_int_cst_lt (TREE_OPERAND (v, 1),
278 TYPE_MAX_VALUE (domain)))
280 v = NULL_TREE;
281 break;
284 v = TREE_OPERAND (v, 0);
285 break;
286 case REALPART_EXPR:
287 case IMAGPART_EXPR:
288 v = NULL_TREE;
289 break;
290 case COMPONENT_REF:
291 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
293 v = NULL_TREE;
294 break;
296 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
297 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
298 != UNION_TYPE
299 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
300 != QUAL_UNION_TYPE)
301 break;
302 else
303 v = TREE_OPERAND (v, 0);
304 if (TREE_CODE (v) == COMPONENT_REF
305 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
306 == RECORD_TYPE)
308 tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
309 for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
310 if (TREE_CODE (fld_chain) == FIELD_DECL)
311 break;
313 if (fld_chain)
315 v = NULL_TREE;
316 break;
318 v = TREE_OPERAND (v, 0);
320 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
321 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
322 != UNION_TYPE
323 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
324 != QUAL_UNION_TYPE)
325 break;
326 else
327 v = TREE_OPERAND (v, 0);
328 if (v != pt_var)
329 v = NULL_TREE;
330 else
331 v = pt_var;
332 break;
333 default:
334 v = pt_var;
335 break;
337 if (v == pt_var)
338 var = pt_var;
341 else
342 var = pt_var;
344 if (var != pt_var)
345 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
346 else if (!pt_var_size)
347 return unknown[object_size_type];
348 else
349 var_size = pt_var_size;
350 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
351 if (bytes != error_mark_node)
353 if (TREE_CODE (bytes) == INTEGER_CST
354 && tree_int_cst_lt (var_size, bytes))
355 bytes = size_zero_node;
356 else
357 bytes = size_binop (MINUS_EXPR, var_size, bytes);
359 if (var != pt_var
360 && pt_var_size
361 && TREE_CODE (pt_var) == MEM_REF
362 && bytes != error_mark_node)
364 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
365 if (bytes2 != error_mark_node)
367 if (TREE_CODE (bytes2) == INTEGER_CST
368 && tree_int_cst_lt (pt_var_size, bytes2))
369 bytes2 = size_zero_node;
370 else
371 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
372 bytes = size_binop (MIN_EXPR, bytes, bytes2);
376 else if (!pt_var_size)
377 return unknown[object_size_type];
378 else
379 bytes = pt_var_size;
381 if (tree_fits_uhwi_p (bytes))
382 return tree_to_uhwi (bytes);
384 return unknown[object_size_type];
388 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
389 Handles various allocation calls. OBJECT_SIZE_TYPE is the second
390 argument from __builtin_object_size. If unknown, return
391 unknown[object_size_type]. */
393 static unsigned HOST_WIDE_INT
394 alloc_object_size (const_gimple call, int object_size_type)
396 tree callee, bytes = NULL_TREE;
397 tree alloc_size;
398 int arg1 = -1, arg2 = -1;
400 gcc_assert (is_gimple_call (call));
402 callee = gimple_call_fndecl (call);
403 if (!callee)
404 return unknown[object_size_type];
406 alloc_size = lookup_attribute ("alloc_size",
407 TYPE_ATTRIBUTES (TREE_TYPE (callee)));
408 if (alloc_size && TREE_VALUE (alloc_size))
410 tree p = TREE_VALUE (alloc_size);
412 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
413 if (TREE_CHAIN (p))
414 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
417 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
418 switch (DECL_FUNCTION_CODE (callee))
420 case BUILT_IN_CALLOC:
421 arg2 = 1;
422 /* fall through */
423 case BUILT_IN_MALLOC:
424 case BUILT_IN_ALLOCA:
425 case BUILT_IN_ALLOCA_WITH_ALIGN:
426 arg1 = 0;
427 default:
428 break;
431 if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
432 || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
433 || (arg2 >= 0
434 && (arg2 >= (int)gimple_call_num_args (call)
435 || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
436 return unknown[object_size_type];
438 if (arg2 >= 0)
439 bytes = size_binop (MULT_EXPR,
440 fold_convert (sizetype, gimple_call_arg (call, arg1)),
441 fold_convert (sizetype, gimple_call_arg (call, arg2)));
442 else if (arg1 >= 0)
443 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
445 if (bytes && tree_fits_uhwi_p (bytes))
446 return tree_to_uhwi (bytes);
448 return unknown[object_size_type];
452 /* If object size is propagated from one of function's arguments directly
453 to its return value, return that argument for GIMPLE_CALL statement CALL.
454 Otherwise return NULL. */
456 static tree
457 pass_through_call (const_gimple call)
459 tree callee = gimple_call_fndecl (call);
461 if (callee
462 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
463 switch (DECL_FUNCTION_CODE (callee))
465 case BUILT_IN_MEMCPY:
466 case BUILT_IN_MEMMOVE:
467 case BUILT_IN_MEMSET:
468 case BUILT_IN_STRCPY:
469 case BUILT_IN_STRNCPY:
470 case BUILT_IN_STRCAT:
471 case BUILT_IN_STRNCAT:
472 case BUILT_IN_MEMCPY_CHK:
473 case BUILT_IN_MEMMOVE_CHK:
474 case BUILT_IN_MEMSET_CHK:
475 case BUILT_IN_STRCPY_CHK:
476 case BUILT_IN_STRNCPY_CHK:
477 case BUILT_IN_STPNCPY_CHK:
478 case BUILT_IN_STRCAT_CHK:
479 case BUILT_IN_STRNCAT_CHK:
480 case BUILT_IN_ASSUME_ALIGNED:
481 if (gimple_call_num_args (call) >= 1)
482 return gimple_call_arg (call, 0);
483 break;
484 default:
485 break;
488 return NULL_TREE;
492 /* Compute __builtin_object_size value for PTR. OBJECT_SIZE_TYPE is the
493 second argument from __builtin_object_size. */
495 unsigned HOST_WIDE_INT
496 compute_builtin_object_size (tree ptr, int object_size_type)
498 gcc_assert (object_size_type >= 0 && object_size_type <= 3);
500 if (! offset_limit)
501 init_offset_limit ();
503 if (TREE_CODE (ptr) == ADDR_EXPR)
504 return addr_object_size (NULL, ptr, object_size_type);
506 if (TREE_CODE (ptr) == SSA_NAME
507 && POINTER_TYPE_P (TREE_TYPE (ptr))
508 && computed[object_size_type] != NULL)
510 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
512 struct object_size_info osi;
513 bitmap_iterator bi;
514 unsigned int i;
516 if (num_ssa_names > object_sizes[object_size_type].length ())
517 object_sizes[object_size_type].safe_grow (num_ssa_names);
518 if (dump_file)
520 fprintf (dump_file, "Computing %s %sobject size for ",
521 (object_size_type & 2) ? "minimum" : "maximum",
522 (object_size_type & 1) ? "sub" : "");
523 print_generic_expr (dump_file, ptr, dump_flags);
524 fprintf (dump_file, ":\n");
527 osi.visited = BITMAP_ALLOC (NULL);
528 osi.reexamine = BITMAP_ALLOC (NULL);
529 osi.object_size_type = object_size_type;
530 osi.depths = NULL;
531 osi.stack = NULL;
532 osi.tos = NULL;
534 /* First pass: walk UD chains, compute object sizes that
535 can be computed. osi.reexamine bitmap at the end will
536 contain what variables were found in dependency cycles
537 and therefore need to be reexamined. */
538 osi.pass = 0;
539 osi.changed = false;
540 collect_object_sizes_for (&osi, ptr);
542 /* Second pass: keep recomputing object sizes of variables
543 that need reexamination, until no object sizes are
544 increased or all object sizes are computed. */
545 if (! bitmap_empty_p (osi.reexamine))
547 bitmap reexamine = BITMAP_ALLOC (NULL);
549 /* If looking for minimum instead of maximum object size,
550 detect cases where a pointer is increased in a loop.
551 Although even without this detection pass 2 would eventually
552 terminate, it could take a long time. If a pointer is
553 increasing this way, we need to assume 0 object size.
554 E.g. p = &buf[0]; while (cond) p = p + 4; */
555 if (object_size_type & 2)
557 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
558 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
559 osi.tos = osi.stack;
560 osi.pass = 1;
561 /* collect_object_sizes_for is changing
562 osi.reexamine bitmap, so iterate over a copy. */
563 bitmap_copy (reexamine, osi.reexamine);
564 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
565 if (bitmap_bit_p (osi.reexamine, i))
566 check_for_plus_in_loops (&osi, ssa_name (i));
568 free (osi.depths);
569 osi.depths = NULL;
570 free (osi.stack);
571 osi.stack = NULL;
572 osi.tos = NULL;
577 osi.pass = 2;
578 osi.changed = false;
579 /* collect_object_sizes_for is changing
580 osi.reexamine bitmap, so iterate over a copy. */
581 bitmap_copy (reexamine, osi.reexamine);
582 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
583 if (bitmap_bit_p (osi.reexamine, i))
585 collect_object_sizes_for (&osi, ssa_name (i));
586 if (dump_file && (dump_flags & TDF_DETAILS))
588 fprintf (dump_file, "Reexamining ");
589 print_generic_expr (dump_file, ssa_name (i),
590 dump_flags);
591 fprintf (dump_file, "\n");
595 while (osi.changed);
597 BITMAP_FREE (reexamine);
599 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
600 bitmap_set_bit (computed[object_size_type], i);
602 /* Debugging dumps. */
603 if (dump_file)
605 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
606 if (object_sizes[object_size_type][i]
607 != unknown[object_size_type])
609 print_generic_expr (dump_file, ssa_name (i),
610 dump_flags);
611 fprintf (dump_file,
612 ": %s %sobject size "
613 HOST_WIDE_INT_PRINT_UNSIGNED "\n",
614 (object_size_type & 2) ? "minimum" : "maximum",
615 (object_size_type & 1) ? "sub" : "",
616 object_sizes[object_size_type][i]);
620 BITMAP_FREE (osi.reexamine);
621 BITMAP_FREE (osi.visited);
624 return object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
627 return unknown[object_size_type];
630 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
632 static void
633 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
635 int object_size_type = osi->object_size_type;
636 unsigned int varno = SSA_NAME_VERSION (ptr);
637 unsigned HOST_WIDE_INT bytes;
639 gcc_assert (object_sizes[object_size_type][varno]
640 != unknown[object_size_type]);
641 gcc_assert (osi->pass == 0);
643 if (TREE_CODE (value) == WITH_SIZE_EXPR)
644 value = TREE_OPERAND (value, 0);
646 /* Pointer variables should have been handled by merge_object_sizes. */
647 gcc_assert (TREE_CODE (value) != SSA_NAME
648 || !POINTER_TYPE_P (TREE_TYPE (value)));
650 if (TREE_CODE (value) == ADDR_EXPR)
651 bytes = addr_object_size (osi, value, object_size_type);
652 else
653 bytes = unknown[object_size_type];
655 if ((object_size_type & 2) == 0)
657 if (object_sizes[object_size_type][varno] < bytes)
658 object_sizes[object_size_type][varno] = bytes;
660 else
662 if (object_sizes[object_size_type][varno] > bytes)
663 object_sizes[object_size_type][varno] = bytes;
668 /* Compute object_sizes for PTR, defined to the result of a call. */
670 static void
671 call_object_size (struct object_size_info *osi, tree ptr, gimple call)
673 int object_size_type = osi->object_size_type;
674 unsigned int varno = SSA_NAME_VERSION (ptr);
675 unsigned HOST_WIDE_INT bytes;
677 gcc_assert (is_gimple_call (call));
679 gcc_assert (object_sizes[object_size_type][varno]
680 != unknown[object_size_type]);
681 gcc_assert (osi->pass == 0);
683 bytes = alloc_object_size (call, object_size_type);
685 if ((object_size_type & 2) == 0)
687 if (object_sizes[object_size_type][varno] < bytes)
688 object_sizes[object_size_type][varno] = bytes;
690 else
692 if (object_sizes[object_size_type][varno] > bytes)
693 object_sizes[object_size_type][varno] = bytes;
698 /* Compute object_sizes for PTR, defined to an unknown value. */
700 static void
701 unknown_object_size (struct object_size_info *osi, tree ptr)
703 int object_size_type = osi->object_size_type;
704 unsigned int varno = SSA_NAME_VERSION (ptr);
705 unsigned HOST_WIDE_INT bytes;
707 gcc_assert (object_sizes[object_size_type][varno]
708 != unknown[object_size_type]);
709 gcc_assert (osi->pass == 0);
711 bytes = unknown[object_size_type];
713 if ((object_size_type & 2) == 0)
715 if (object_sizes[object_size_type][varno] < bytes)
716 object_sizes[object_size_type][varno] = bytes;
718 else
720 if (object_sizes[object_size_type][varno] > bytes)
721 object_sizes[object_size_type][varno] = bytes;
726 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
727 the object size might need reexamination later. */
729 static bool
730 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
731 unsigned HOST_WIDE_INT offset)
733 int object_size_type = osi->object_size_type;
734 unsigned int varno = SSA_NAME_VERSION (dest);
735 unsigned HOST_WIDE_INT orig_bytes;
737 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
738 return false;
739 if (offset >= offset_limit)
741 object_sizes[object_size_type][varno] = unknown[object_size_type];
742 return false;
745 if (osi->pass == 0)
746 collect_object_sizes_for (osi, orig);
748 orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
749 if (orig_bytes != unknown[object_size_type])
750 orig_bytes = (offset > orig_bytes)
751 ? (unsigned HOST_WIDE_INT) 0 : orig_bytes - offset;
753 if ((object_size_type & 2) == 0)
755 if (object_sizes[object_size_type][varno] < orig_bytes)
757 object_sizes[object_size_type][varno] = orig_bytes;
758 osi->changed = true;
761 else
763 if (object_sizes[object_size_type][varno] > orig_bytes)
765 object_sizes[object_size_type][varno] = orig_bytes;
766 osi->changed = true;
769 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
773 /* Compute object_sizes for VAR, defined to the result of an assignment
774 with operator POINTER_PLUS_EXPR. Return true if the object size might
775 need reexamination later. */
777 static bool
778 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple stmt)
780 int object_size_type = osi->object_size_type;
781 unsigned int varno = SSA_NAME_VERSION (var);
782 unsigned HOST_WIDE_INT bytes;
783 tree op0, op1;
785 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
787 op0 = gimple_assign_rhs1 (stmt);
788 op1 = gimple_assign_rhs2 (stmt);
790 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
792 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
793 gcc_assert (TREE_CODE (rhs) == MEM_REF);
794 op0 = TREE_OPERAND (rhs, 0);
795 op1 = TREE_OPERAND (rhs, 1);
797 else
798 gcc_unreachable ();
800 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
801 return false;
803 /* Handle PTR + OFFSET here. */
804 if (TREE_CODE (op1) == INTEGER_CST
805 && (TREE_CODE (op0) == SSA_NAME
806 || TREE_CODE (op0) == ADDR_EXPR))
808 if (! tree_fits_uhwi_p (op1))
809 bytes = unknown[object_size_type];
810 else if (TREE_CODE (op0) == SSA_NAME)
811 return merge_object_sizes (osi, var, op0, tree_to_uhwi (op1));
812 else
814 unsigned HOST_WIDE_INT off = tree_to_uhwi (op1);
816 /* op0 will be ADDR_EXPR here. */
817 bytes = addr_object_size (osi, op0, object_size_type);
818 if (bytes == unknown[object_size_type])
820 else if (off > offset_limit)
821 bytes = unknown[object_size_type];
822 else if (off > bytes)
823 bytes = 0;
824 else
825 bytes -= off;
828 else
829 bytes = unknown[object_size_type];
831 if ((object_size_type & 2) == 0)
833 if (object_sizes[object_size_type][varno] < bytes)
834 object_sizes[object_size_type][varno] = bytes;
836 else
838 if (object_sizes[object_size_type][varno] > bytes)
839 object_sizes[object_size_type][varno] = bytes;
841 return false;
845 /* Compute object_sizes for VAR, defined at STMT, which is
846 a COND_EXPR. Return true if the object size might need reexamination
847 later. */
849 static bool
850 cond_expr_object_size (struct object_size_info *osi, tree var, gimple stmt)
852 tree then_, else_;
853 int object_size_type = osi->object_size_type;
854 unsigned int varno = SSA_NAME_VERSION (var);
855 bool reexamine = false;
857 gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
859 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
860 return false;
862 then_ = gimple_assign_rhs2 (stmt);
863 else_ = gimple_assign_rhs3 (stmt);
865 if (TREE_CODE (then_) == SSA_NAME)
866 reexamine |= merge_object_sizes (osi, var, then_, 0);
867 else
868 expr_object_size (osi, var, then_);
870 if (TREE_CODE (else_) == SSA_NAME)
871 reexamine |= merge_object_sizes (osi, var, else_, 0);
872 else
873 expr_object_size (osi, var, else_);
875 return reexamine;
878 /* Compute object sizes for VAR.
879 For ADDR_EXPR an object size is the number of remaining bytes
880 to the end of the object (where what is considered an object depends on
881 OSI->object_size_type).
882 For allocation GIMPLE_CALL like malloc or calloc object size is the size
883 of the allocation.
884 For POINTER_PLUS_EXPR where second operand is a constant integer,
885 object size is object size of the first operand minus the constant.
886 If the constant is bigger than the number of remaining bytes until the
887 end of the object, object size is 0, but if it is instead a pointer
888 subtraction, object size is unknown[object_size_type].
889 To differentiate addition from subtraction, ADDR_EXPR returns
890 unknown[object_size_type] for all objects bigger than half of the address
891 space, and constants less than half of the address space are considered
892 addition, while bigger constants subtraction.
893 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
894 object size is object size of that argument.
895 Otherwise, object size is the maximum of object sizes of variables
896 that it might be set to. */
898 static void
899 collect_object_sizes_for (struct object_size_info *osi, tree var)
901 int object_size_type = osi->object_size_type;
902 unsigned int varno = SSA_NAME_VERSION (var);
903 gimple stmt;
904 bool reexamine;
906 if (bitmap_bit_p (computed[object_size_type], varno))
907 return;
909 if (osi->pass == 0)
911 if (bitmap_set_bit (osi->visited, varno))
913 object_sizes[object_size_type][varno]
914 = (object_size_type & 2) ? -1 : 0;
916 else
918 /* Found a dependency loop. Mark the variable for later
919 re-examination. */
920 bitmap_set_bit (osi->reexamine, varno);
921 if (dump_file && (dump_flags & TDF_DETAILS))
923 fprintf (dump_file, "Found a dependency loop at ");
924 print_generic_expr (dump_file, var, dump_flags);
925 fprintf (dump_file, "\n");
927 return;
931 if (dump_file && (dump_flags & TDF_DETAILS))
933 fprintf (dump_file, "Visiting use-def links for ");
934 print_generic_expr (dump_file, var, dump_flags);
935 fprintf (dump_file, "\n");
938 stmt = SSA_NAME_DEF_STMT (var);
939 reexamine = false;
941 switch (gimple_code (stmt))
943 case GIMPLE_ASSIGN:
945 tree rhs = gimple_assign_rhs1 (stmt);
946 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
947 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
948 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
949 reexamine = plus_stmt_object_size (osi, var, stmt);
950 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
951 reexamine = cond_expr_object_size (osi, var, stmt);
952 else if (gimple_assign_single_p (stmt)
953 || gimple_assign_unary_nop_p (stmt))
955 if (TREE_CODE (rhs) == SSA_NAME
956 && POINTER_TYPE_P (TREE_TYPE (rhs)))
957 reexamine = merge_object_sizes (osi, var, rhs, 0);
958 else
959 expr_object_size (osi, var, rhs);
961 else
962 unknown_object_size (osi, var);
963 break;
966 case GIMPLE_CALL:
968 tree arg = pass_through_call (stmt);
969 if (arg)
971 if (TREE_CODE (arg) == SSA_NAME
972 && POINTER_TYPE_P (TREE_TYPE (arg)))
973 reexamine = merge_object_sizes (osi, var, arg, 0);
974 else
975 expr_object_size (osi, var, arg);
977 else
978 call_object_size (osi, var, stmt);
979 break;
982 case GIMPLE_ASM:
983 /* Pointers defined by __asm__ statements can point anywhere. */
984 object_sizes[object_size_type][varno] = unknown[object_size_type];
985 break;
987 case GIMPLE_NOP:
988 if (SSA_NAME_VAR (var)
989 && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
990 expr_object_size (osi, var, SSA_NAME_VAR (var));
991 else
992 /* Uninitialized SSA names point nowhere. */
993 object_sizes[object_size_type][varno] = unknown[object_size_type];
994 break;
996 case GIMPLE_PHI:
998 unsigned i;
1000 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1002 tree rhs = gimple_phi_arg (stmt, i)->def;
1004 if (object_sizes[object_size_type][varno]
1005 == unknown[object_size_type])
1006 break;
1008 if (TREE_CODE (rhs) == SSA_NAME)
1009 reexamine |= merge_object_sizes (osi, var, rhs, 0);
1010 else if (osi->pass == 0)
1011 expr_object_size (osi, var, rhs);
1013 break;
1016 default:
1017 gcc_unreachable ();
1020 if (! reexamine
1021 || object_sizes[object_size_type][varno] == unknown[object_size_type])
1023 bitmap_set_bit (computed[object_size_type], varno);
1024 bitmap_clear_bit (osi->reexamine, varno);
1026 else
1028 bitmap_set_bit (osi->reexamine, varno);
1029 if (dump_file && (dump_flags & TDF_DETAILS))
1031 fprintf (dump_file, "Need to reexamine ");
1032 print_generic_expr (dump_file, var, dump_flags);
1033 fprintf (dump_file, "\n");
1039 /* Helper function for check_for_plus_in_loops. Called recursively
1040 to detect loops. */
1042 static void
1043 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1044 unsigned int depth)
1046 gimple stmt = SSA_NAME_DEF_STMT (var);
1047 unsigned int varno = SSA_NAME_VERSION (var);
1049 if (osi->depths[varno])
1051 if (osi->depths[varno] != depth)
1053 unsigned int *sp;
1055 /* Found a loop involving pointer addition. */
1056 for (sp = osi->tos; sp > osi->stack; )
1058 --sp;
1059 bitmap_clear_bit (osi->reexamine, *sp);
1060 bitmap_set_bit (computed[osi->object_size_type], *sp);
1061 object_sizes[osi->object_size_type][*sp] = 0;
1062 if (*sp == varno)
1063 break;
1066 return;
1068 else if (! bitmap_bit_p (osi->reexamine, varno))
1069 return;
1071 osi->depths[varno] = depth;
1072 *osi->tos++ = varno;
1074 switch (gimple_code (stmt))
1077 case GIMPLE_ASSIGN:
1079 if ((gimple_assign_single_p (stmt)
1080 || gimple_assign_unary_nop_p (stmt))
1081 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1083 tree rhs = gimple_assign_rhs1 (stmt);
1085 check_for_plus_in_loops_1 (osi, rhs, depth);
1087 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1089 tree basevar = gimple_assign_rhs1 (stmt);
1090 tree cst = gimple_assign_rhs2 (stmt);
1092 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1094 check_for_plus_in_loops_1 (osi, basevar,
1095 depth + !integer_zerop (cst));
1097 else
1098 gcc_unreachable ();
1099 break;
1102 case GIMPLE_CALL:
1104 tree arg = pass_through_call (stmt);
1105 if (arg)
1107 if (TREE_CODE (arg) == SSA_NAME)
1108 check_for_plus_in_loops_1 (osi, arg, depth);
1109 else
1110 gcc_unreachable ();
1112 break;
1115 case GIMPLE_PHI:
1117 unsigned i;
1119 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1121 tree rhs = gimple_phi_arg (stmt, i)->def;
1123 if (TREE_CODE (rhs) == SSA_NAME)
1124 check_for_plus_in_loops_1 (osi, rhs, depth);
1126 break;
1129 default:
1130 gcc_unreachable ();
1133 osi->depths[varno] = 0;
1134 osi->tos--;
1138 /* Check if some pointer we are computing object size of is being increased
1139 within a loop. If yes, assume all the SSA variables participating in
1140 that loop have minimum object sizes 0. */
1142 static void
1143 check_for_plus_in_loops (struct object_size_info *osi, tree var)
1145 gimple stmt = SSA_NAME_DEF_STMT (var);
1147 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1148 and looked for a POINTER_PLUS_EXPR in the pass-through
1149 argument, if any. In GIMPLE, however, such an expression
1150 is not a valid call operand. */
1152 if (is_gimple_assign (stmt)
1153 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1155 tree basevar = gimple_assign_rhs1 (stmt);
1156 tree cst = gimple_assign_rhs2 (stmt);
1158 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1160 if (integer_zerop (cst))
1161 return;
1163 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1164 *osi->tos++ = SSA_NAME_VERSION (basevar);
1165 check_for_plus_in_loops_1 (osi, var, 2);
1166 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1167 osi->tos--;
1172 /* Initialize data structures for the object size computation. */
1174 void
1175 init_object_sizes (void)
1177 int object_size_type;
1179 if (computed[0])
1180 return;
1182 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1184 object_sizes[object_size_type].safe_grow (num_ssa_names);
1185 computed[object_size_type] = BITMAP_ALLOC (NULL);
1188 init_offset_limit ();
1192 /* Destroy data structures after the object size computation. */
1194 static void
1195 fini_object_sizes (void)
1197 int object_size_type;
1199 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1201 object_sizes[object_size_type].release ();
1202 BITMAP_FREE (computed[object_size_type]);
1207 /* Simple pass to optimize all __builtin_object_size () builtins. */
1209 namespace {
1211 const pass_data pass_data_object_sizes =
1213 GIMPLE_PASS, /* type */
1214 "objsz", /* name */
1215 OPTGROUP_NONE, /* optinfo_flags */
1216 true, /* has_execute */
1217 TV_NONE, /* tv_id */
1218 ( PROP_cfg | PROP_ssa ), /* properties_required */
1219 0, /* properties_provided */
1220 0, /* properties_destroyed */
1221 0, /* todo_flags_start */
1222 TODO_verify_ssa, /* todo_flags_finish */
1225 class pass_object_sizes : public gimple_opt_pass
1227 public:
1228 pass_object_sizes (gcc::context *ctxt)
1229 : gimple_opt_pass (pass_data_object_sizes, ctxt)
1232 /* opt_pass methods: */
1233 opt_pass * clone () { return new pass_object_sizes (m_ctxt); }
1234 virtual unsigned int execute (function *);
1236 }; // class pass_object_sizes
1238 unsigned int
1239 pass_object_sizes::execute (function *fun)
1241 basic_block bb;
1242 FOR_EACH_BB_FN (bb, fun)
1244 gimple_stmt_iterator i;
1245 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1247 tree result;
1248 gimple call = gsi_stmt (i);
1249 if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE))
1250 continue;
1252 init_object_sizes ();
1253 result = fold_call_stmt (call, false);
1254 if (!result)
1256 if (gimple_call_num_args (call) == 2
1257 && POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
1259 tree ost = gimple_call_arg (call, 1);
1261 if (tree_fits_uhwi_p (ost))
1263 unsigned HOST_WIDE_INT object_size_type
1264 = tree_to_uhwi (ost);
1266 if (object_size_type < 2)
1267 result = fold_convert (size_type_node,
1268 integer_minus_one_node);
1269 else if (object_size_type < 4)
1270 result = build_zero_cst (size_type_node);
1274 if (!result)
1275 continue;
1278 gcc_assert (TREE_CODE (result) == INTEGER_CST);
1280 if (dump_file && (dump_flags & TDF_DETAILS))
1282 fprintf (dump_file, "Simplified\n ");
1283 print_gimple_stmt (dump_file, call, 0, dump_flags);
1284 fprintf (dump_file, " to ");
1285 print_generic_expr (dump_file, result, 0);
1286 fprintf (dump_file, "\n");
1289 tree lhs = gimple_call_lhs (call);
1290 if (!lhs)
1291 continue;
1293 /* Propagate into all uses and fold those stmts. */
1294 gimple use_stmt;
1295 imm_use_iterator iter;
1296 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
1298 use_operand_p use_p;
1299 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1300 SET_USE (use_p, result);
1301 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1302 fold_stmt (&gsi);
1303 update_stmt (gsi_stmt (gsi));
1308 fini_object_sizes ();
1309 return 0;
1312 } // anon namespace
1314 gimple_opt_pass *
1315 make_pass_object_sizes (gcc::context *ctxt)
1317 return new pass_object_sizes (ctxt);