PR tree-optimization/66718
[official-gcc.git] / gcc / tree-object-size.c
blobe48ee6445798dc578d8223922d7d4ad023e00260
1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "alias.h"
26 #include "symtab.h"
27 #include "tree.h"
28 #include "fold-const.h"
29 #include "tree-object-size.h"
30 #include "diagnostic-core.h"
31 #include "gimple-pretty-print.h"
32 #include "bitmap.h"
33 #include "predict.h"
34 #include "hard-reg-set.h"
35 #include "function.h"
36 #include "dominance.h"
37 #include "cfg.h"
38 #include "basic-block.h"
39 #include "tree-ssa-alias.h"
40 #include "internal-fn.h"
41 #include "gimple-fold.h"
42 #include "gimple-expr.h"
43 #include "gimple.h"
44 #include "gimple-iterator.h"
45 #include "gimple-ssa.h"
46 #include "stringpool.h"
47 #include "tree-ssanames.h"
48 #include "tree-pass.h"
49 #include "tree-ssa-propagate.h"
50 #include "tree-phinodes.h"
51 #include "ssa-iterators.h"
52 #include "builtins.h"
54 struct object_size_info
56 int object_size_type;
57 bitmap visited, reexamine;
58 int pass;
59 bool changed;
60 unsigned int *depths;
61 unsigned int *stack, *tos;
64 static const unsigned HOST_WIDE_INT unknown[4] = { -1, -1, 0, 0 };
66 static tree compute_object_offset (const_tree, const_tree);
67 static unsigned HOST_WIDE_INT addr_object_size (struct object_size_info *,
68 const_tree, int);
69 static unsigned HOST_WIDE_INT alloc_object_size (const gcall *, int);
70 static tree pass_through_call (const gcall *);
71 static void collect_object_sizes_for (struct object_size_info *, tree);
72 static void expr_object_size (struct object_size_info *, tree, tree);
73 static bool merge_object_sizes (struct object_size_info *, tree, tree,
74 unsigned HOST_WIDE_INT);
75 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple);
76 static bool cond_expr_object_size (struct object_size_info *, tree, gimple);
77 static void init_offset_limit (void);
78 static void check_for_plus_in_loops (struct object_size_info *, tree);
79 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
80 unsigned int);
82 /* object_sizes[0] is upper bound for number of bytes till the end of
83 the object.
84 object_sizes[1] is upper bound for number of bytes till the end of
85 the subobject (innermost array or field with address taken).
86 object_sizes[2] is lower bound for number of bytes till the end of
87 the object and object_sizes[3] lower bound for subobject. */
88 static vec<unsigned HOST_WIDE_INT> object_sizes[4];
90 /* Bitmaps what object sizes have been computed already. */
91 static bitmap computed[4];
93 /* Maximum value of offset we consider to be addition. */
94 static unsigned HOST_WIDE_INT offset_limit;
97 /* Initialize OFFSET_LIMIT variable. */
98 static void
99 init_offset_limit (void)
101 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
102 offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
103 else
104 offset_limit = -1;
105 offset_limit /= 2;
109 /* Compute offset of EXPR within VAR. Return error_mark_node
110 if unknown. */
112 static tree
113 compute_object_offset (const_tree expr, const_tree var)
115 enum tree_code code = PLUS_EXPR;
116 tree base, off, t;
118 if (expr == var)
119 return size_zero_node;
121 switch (TREE_CODE (expr))
123 case COMPONENT_REF:
124 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
125 if (base == error_mark_node)
126 return base;
128 t = TREE_OPERAND (expr, 1);
129 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
130 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
131 / BITS_PER_UNIT));
132 break;
134 case REALPART_EXPR:
135 CASE_CONVERT:
136 case VIEW_CONVERT_EXPR:
137 case NON_LVALUE_EXPR:
138 return compute_object_offset (TREE_OPERAND (expr, 0), var);
140 case IMAGPART_EXPR:
141 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
142 if (base == error_mark_node)
143 return base;
145 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
146 break;
148 case ARRAY_REF:
149 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
150 if (base == error_mark_node)
151 return base;
153 t = TREE_OPERAND (expr, 1);
154 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
156 code = MINUS_EXPR;
157 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
159 t = fold_convert (sizetype, t);
160 off = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (expr)), t);
161 break;
163 case MEM_REF:
164 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
165 return wide_int_to_tree (sizetype, mem_ref_offset (expr));
167 default:
168 return error_mark_node;
171 return size_binop (code, base, off);
175 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
176 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
177 If unknown, return unknown[object_size_type]. */
179 static unsigned HOST_WIDE_INT
180 addr_object_size (struct object_size_info *osi, const_tree ptr,
181 int object_size_type)
183 tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
185 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
187 pt_var = TREE_OPERAND (ptr, 0);
188 while (handled_component_p (pt_var))
189 pt_var = TREE_OPERAND (pt_var, 0);
191 if (pt_var
192 && TREE_CODE (pt_var) == MEM_REF)
194 unsigned HOST_WIDE_INT sz;
196 if (!osi || (object_size_type & 1) != 0
197 || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
199 sz = compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
200 object_size_type & ~1);
202 else
204 tree var = TREE_OPERAND (pt_var, 0);
205 if (osi->pass == 0)
206 collect_object_sizes_for (osi, var);
207 if (bitmap_bit_p (computed[object_size_type],
208 SSA_NAME_VERSION (var)))
209 sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
210 else
211 sz = unknown[object_size_type];
213 if (sz != unknown[object_size_type])
215 offset_int dsz = wi::sub (sz, mem_ref_offset (pt_var));
216 if (wi::neg_p (dsz))
217 sz = 0;
218 else if (wi::fits_uhwi_p (dsz))
219 sz = dsz.to_uhwi ();
220 else
221 sz = unknown[object_size_type];
224 if (sz != unknown[object_size_type] && sz < offset_limit)
225 pt_var_size = size_int (sz);
227 else if (pt_var
228 && DECL_P (pt_var)
229 && tree_fits_uhwi_p (DECL_SIZE_UNIT (pt_var))
230 && tree_to_uhwi (DECL_SIZE_UNIT (pt_var)) < offset_limit)
231 pt_var_size = DECL_SIZE_UNIT (pt_var);
232 else if (pt_var
233 && TREE_CODE (pt_var) == STRING_CST
234 && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
235 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
236 && tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
237 < offset_limit)
238 pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
239 else
240 return unknown[object_size_type];
242 if (pt_var != TREE_OPERAND (ptr, 0))
244 tree var;
246 if (object_size_type & 1)
248 var = TREE_OPERAND (ptr, 0);
250 while (var != pt_var
251 && TREE_CODE (var) != BIT_FIELD_REF
252 && TREE_CODE (var) != COMPONENT_REF
253 && TREE_CODE (var) != ARRAY_REF
254 && TREE_CODE (var) != ARRAY_RANGE_REF
255 && TREE_CODE (var) != REALPART_EXPR
256 && TREE_CODE (var) != IMAGPART_EXPR)
257 var = TREE_OPERAND (var, 0);
258 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
259 var = TREE_OPERAND (var, 0);
260 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
261 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var)))
262 || (pt_var_size
263 && tree_int_cst_lt (pt_var_size,
264 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
265 var = pt_var;
266 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
268 tree v = var;
269 /* For &X->fld, compute object size only if fld isn't the last
270 field, as struct { int i; char c[1]; } is often used instead
271 of flexible array member. */
272 while (v && v != pt_var)
273 switch (TREE_CODE (v))
275 case ARRAY_REF:
276 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
277 && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
279 tree domain
280 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
281 if (domain
282 && TYPE_MAX_VALUE (domain)
283 && TREE_CODE (TYPE_MAX_VALUE (domain))
284 == INTEGER_CST
285 && tree_int_cst_lt (TREE_OPERAND (v, 1),
286 TYPE_MAX_VALUE (domain)))
288 v = NULL_TREE;
289 break;
292 v = TREE_OPERAND (v, 0);
293 break;
294 case REALPART_EXPR:
295 case IMAGPART_EXPR:
296 v = NULL_TREE;
297 break;
298 case COMPONENT_REF:
299 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
301 v = NULL_TREE;
302 break;
304 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
305 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
306 != UNION_TYPE
307 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
308 != QUAL_UNION_TYPE)
309 break;
310 else
311 v = TREE_OPERAND (v, 0);
312 if (TREE_CODE (v) == COMPONENT_REF
313 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
314 == RECORD_TYPE)
316 tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
317 for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
318 if (TREE_CODE (fld_chain) == FIELD_DECL)
319 break;
321 if (fld_chain)
323 v = NULL_TREE;
324 break;
326 v = TREE_OPERAND (v, 0);
328 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
329 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
330 != UNION_TYPE
331 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
332 != QUAL_UNION_TYPE)
333 break;
334 else
335 v = TREE_OPERAND (v, 0);
336 if (v != pt_var)
337 v = NULL_TREE;
338 else
339 v = pt_var;
340 break;
341 default:
342 v = pt_var;
343 break;
345 if (v == pt_var)
346 var = pt_var;
349 else
350 var = pt_var;
352 if (var != pt_var)
353 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
354 else if (!pt_var_size)
355 return unknown[object_size_type];
356 else
357 var_size = pt_var_size;
358 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
359 if (bytes != error_mark_node)
361 if (TREE_CODE (bytes) == INTEGER_CST
362 && tree_int_cst_lt (var_size, bytes))
363 bytes = size_zero_node;
364 else
365 bytes = size_binop (MINUS_EXPR, var_size, bytes);
367 if (var != pt_var
368 && pt_var_size
369 && TREE_CODE (pt_var) == MEM_REF
370 && bytes != error_mark_node)
372 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
373 if (bytes2 != error_mark_node)
375 if (TREE_CODE (bytes2) == INTEGER_CST
376 && tree_int_cst_lt (pt_var_size, bytes2))
377 bytes2 = size_zero_node;
378 else
379 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
380 bytes = size_binop (MIN_EXPR, bytes, bytes2);
384 else if (!pt_var_size)
385 return unknown[object_size_type];
386 else
387 bytes = pt_var_size;
389 if (tree_fits_uhwi_p (bytes))
390 return tree_to_uhwi (bytes);
392 return unknown[object_size_type];
396 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
397 Handles various allocation calls. OBJECT_SIZE_TYPE is the second
398 argument from __builtin_object_size. If unknown, return
399 unknown[object_size_type]. */
401 static unsigned HOST_WIDE_INT
402 alloc_object_size (const gcall *call, int object_size_type)
404 tree callee, bytes = NULL_TREE;
405 tree alloc_size;
406 int arg1 = -1, arg2 = -1;
408 gcc_assert (is_gimple_call (call));
410 callee = gimple_call_fndecl (call);
411 if (!callee)
412 return unknown[object_size_type];
414 alloc_size = lookup_attribute ("alloc_size",
415 TYPE_ATTRIBUTES (TREE_TYPE (callee)));
416 if (alloc_size && TREE_VALUE (alloc_size))
418 tree p = TREE_VALUE (alloc_size);
420 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
421 if (TREE_CHAIN (p))
422 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
425 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
426 switch (DECL_FUNCTION_CODE (callee))
428 case BUILT_IN_CALLOC:
429 arg2 = 1;
430 /* fall through */
431 case BUILT_IN_MALLOC:
432 case BUILT_IN_ALLOCA:
433 case BUILT_IN_ALLOCA_WITH_ALIGN:
434 arg1 = 0;
435 default:
436 break;
439 if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
440 || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
441 || (arg2 >= 0
442 && (arg2 >= (int)gimple_call_num_args (call)
443 || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
444 return unknown[object_size_type];
446 if (arg2 >= 0)
447 bytes = size_binop (MULT_EXPR,
448 fold_convert (sizetype, gimple_call_arg (call, arg1)),
449 fold_convert (sizetype, gimple_call_arg (call, arg2)));
450 else if (arg1 >= 0)
451 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
453 if (bytes && tree_fits_uhwi_p (bytes))
454 return tree_to_uhwi (bytes);
456 return unknown[object_size_type];
460 /* If object size is propagated from one of function's arguments directly
461 to its return value, return that argument for GIMPLE_CALL statement CALL.
462 Otherwise return NULL. */
464 static tree
465 pass_through_call (const gcall *call)
467 tree callee = gimple_call_fndecl (call);
469 if (callee
470 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
471 switch (DECL_FUNCTION_CODE (callee))
473 case BUILT_IN_MEMCPY:
474 case BUILT_IN_MEMMOVE:
475 case BUILT_IN_MEMSET:
476 case BUILT_IN_STRCPY:
477 case BUILT_IN_STRNCPY:
478 case BUILT_IN_STRCAT:
479 case BUILT_IN_STRNCAT:
480 case BUILT_IN_MEMCPY_CHK:
481 case BUILT_IN_MEMMOVE_CHK:
482 case BUILT_IN_MEMSET_CHK:
483 case BUILT_IN_STRCPY_CHK:
484 case BUILT_IN_STRNCPY_CHK:
485 case BUILT_IN_STPNCPY_CHK:
486 case BUILT_IN_STRCAT_CHK:
487 case BUILT_IN_STRNCAT_CHK:
488 case BUILT_IN_ASSUME_ALIGNED:
489 if (gimple_call_num_args (call) >= 1)
490 return gimple_call_arg (call, 0);
491 break;
492 default:
493 break;
496 return NULL_TREE;
500 /* Compute __builtin_object_size value for PTR. OBJECT_SIZE_TYPE is the
501 second argument from __builtin_object_size. */
503 unsigned HOST_WIDE_INT
504 compute_builtin_object_size (tree ptr, int object_size_type)
506 gcc_assert (object_size_type >= 0 && object_size_type <= 3);
508 if (! offset_limit)
509 init_offset_limit ();
511 if (TREE_CODE (ptr) == ADDR_EXPR)
512 return addr_object_size (NULL, ptr, object_size_type);
514 if (TREE_CODE (ptr) == SSA_NAME
515 && POINTER_TYPE_P (TREE_TYPE (ptr))
516 && computed[object_size_type] != NULL)
518 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
520 struct object_size_info osi;
521 bitmap_iterator bi;
522 unsigned int i;
524 if (num_ssa_names > object_sizes[object_size_type].length ())
525 object_sizes[object_size_type].safe_grow (num_ssa_names);
526 if (dump_file)
528 fprintf (dump_file, "Computing %s %sobject size for ",
529 (object_size_type & 2) ? "minimum" : "maximum",
530 (object_size_type & 1) ? "sub" : "");
531 print_generic_expr (dump_file, ptr, dump_flags);
532 fprintf (dump_file, ":\n");
535 osi.visited = BITMAP_ALLOC (NULL);
536 osi.reexamine = BITMAP_ALLOC (NULL);
537 osi.object_size_type = object_size_type;
538 osi.depths = NULL;
539 osi.stack = NULL;
540 osi.tos = NULL;
542 /* First pass: walk UD chains, compute object sizes that
543 can be computed. osi.reexamine bitmap at the end will
544 contain what variables were found in dependency cycles
545 and therefore need to be reexamined. */
546 osi.pass = 0;
547 osi.changed = false;
548 collect_object_sizes_for (&osi, ptr);
550 /* Second pass: keep recomputing object sizes of variables
551 that need reexamination, until no object sizes are
552 increased or all object sizes are computed. */
553 if (! bitmap_empty_p (osi.reexamine))
555 bitmap reexamine = BITMAP_ALLOC (NULL);
557 /* If looking for minimum instead of maximum object size,
558 detect cases where a pointer is increased in a loop.
559 Although even without this detection pass 2 would eventually
560 terminate, it could take a long time. If a pointer is
561 increasing this way, we need to assume 0 object size.
562 E.g. p = &buf[0]; while (cond) p = p + 4; */
563 if (object_size_type & 2)
565 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
566 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
567 osi.tos = osi.stack;
568 osi.pass = 1;
569 /* collect_object_sizes_for is changing
570 osi.reexamine bitmap, so iterate over a copy. */
571 bitmap_copy (reexamine, osi.reexamine);
572 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
573 if (bitmap_bit_p (osi.reexamine, i))
574 check_for_plus_in_loops (&osi, ssa_name (i));
576 free (osi.depths);
577 osi.depths = NULL;
578 free (osi.stack);
579 osi.stack = NULL;
580 osi.tos = NULL;
585 osi.pass = 2;
586 osi.changed = false;
587 /* collect_object_sizes_for is changing
588 osi.reexamine bitmap, so iterate over a copy. */
589 bitmap_copy (reexamine, osi.reexamine);
590 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
591 if (bitmap_bit_p (osi.reexamine, i))
593 collect_object_sizes_for (&osi, ssa_name (i));
594 if (dump_file && (dump_flags & TDF_DETAILS))
596 fprintf (dump_file, "Reexamining ");
597 print_generic_expr (dump_file, ssa_name (i),
598 dump_flags);
599 fprintf (dump_file, "\n");
603 while (osi.changed);
605 BITMAP_FREE (reexamine);
607 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
608 bitmap_set_bit (computed[object_size_type], i);
610 /* Debugging dumps. */
611 if (dump_file)
613 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
614 if (object_sizes[object_size_type][i]
615 != unknown[object_size_type])
617 print_generic_expr (dump_file, ssa_name (i),
618 dump_flags);
619 fprintf (dump_file,
620 ": %s %sobject size "
621 HOST_WIDE_INT_PRINT_UNSIGNED "\n",
622 (object_size_type & 2) ? "minimum" : "maximum",
623 (object_size_type & 1) ? "sub" : "",
624 object_sizes[object_size_type][i]);
628 BITMAP_FREE (osi.reexamine);
629 BITMAP_FREE (osi.visited);
632 return object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
635 return unknown[object_size_type];
638 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
640 static void
641 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
643 int object_size_type = osi->object_size_type;
644 unsigned int varno = SSA_NAME_VERSION (ptr);
645 unsigned HOST_WIDE_INT bytes;
647 gcc_assert (object_sizes[object_size_type][varno]
648 != unknown[object_size_type]);
649 gcc_assert (osi->pass == 0);
651 if (TREE_CODE (value) == WITH_SIZE_EXPR)
652 value = TREE_OPERAND (value, 0);
654 /* Pointer variables should have been handled by merge_object_sizes. */
655 gcc_assert (TREE_CODE (value) != SSA_NAME
656 || !POINTER_TYPE_P (TREE_TYPE (value)));
658 if (TREE_CODE (value) == ADDR_EXPR)
659 bytes = addr_object_size (osi, value, object_size_type);
660 else
661 bytes = unknown[object_size_type];
663 if ((object_size_type & 2) == 0)
665 if (object_sizes[object_size_type][varno] < bytes)
666 object_sizes[object_size_type][varno] = bytes;
668 else
670 if (object_sizes[object_size_type][varno] > bytes)
671 object_sizes[object_size_type][varno] = bytes;
676 /* Compute object_sizes for PTR, defined to the result of a call. */
678 static void
679 call_object_size (struct object_size_info *osi, tree ptr, gcall *call)
681 int object_size_type = osi->object_size_type;
682 unsigned int varno = SSA_NAME_VERSION (ptr);
683 unsigned HOST_WIDE_INT bytes;
685 gcc_assert (is_gimple_call (call));
687 gcc_assert (object_sizes[object_size_type][varno]
688 != unknown[object_size_type]);
689 gcc_assert (osi->pass == 0);
691 bytes = alloc_object_size (call, object_size_type);
693 if ((object_size_type & 2) == 0)
695 if (object_sizes[object_size_type][varno] < bytes)
696 object_sizes[object_size_type][varno] = bytes;
698 else
700 if (object_sizes[object_size_type][varno] > bytes)
701 object_sizes[object_size_type][varno] = bytes;
706 /* Compute object_sizes for PTR, defined to an unknown value. */
708 static void
709 unknown_object_size (struct object_size_info *osi, tree ptr)
711 int object_size_type = osi->object_size_type;
712 unsigned int varno = SSA_NAME_VERSION (ptr);
713 unsigned HOST_WIDE_INT bytes;
715 gcc_assert (object_sizes[object_size_type][varno]
716 != unknown[object_size_type]);
717 gcc_assert (osi->pass == 0);
719 bytes = unknown[object_size_type];
721 if ((object_size_type & 2) == 0)
723 if (object_sizes[object_size_type][varno] < bytes)
724 object_sizes[object_size_type][varno] = bytes;
726 else
728 if (object_sizes[object_size_type][varno] > bytes)
729 object_sizes[object_size_type][varno] = bytes;
734 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
735 the object size might need reexamination later. */
737 static bool
738 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
739 unsigned HOST_WIDE_INT offset)
741 int object_size_type = osi->object_size_type;
742 unsigned int varno = SSA_NAME_VERSION (dest);
743 unsigned HOST_WIDE_INT orig_bytes;
745 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
746 return false;
747 if (offset >= offset_limit)
749 object_sizes[object_size_type][varno] = unknown[object_size_type];
750 return false;
753 if (osi->pass == 0)
754 collect_object_sizes_for (osi, orig);
756 orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
757 if (orig_bytes != unknown[object_size_type])
758 orig_bytes = (offset > orig_bytes)
759 ? (unsigned HOST_WIDE_INT) 0 : orig_bytes - offset;
761 if ((object_size_type & 2) == 0)
763 if (object_sizes[object_size_type][varno] < orig_bytes)
765 object_sizes[object_size_type][varno] = orig_bytes;
766 osi->changed = true;
769 else
771 if (object_sizes[object_size_type][varno] > orig_bytes)
773 object_sizes[object_size_type][varno] = orig_bytes;
774 osi->changed = true;
777 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
781 /* Compute object_sizes for VAR, defined to the result of an assignment
782 with operator POINTER_PLUS_EXPR. Return true if the object size might
783 need reexamination later. */
785 static bool
786 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple stmt)
788 int object_size_type = osi->object_size_type;
789 unsigned int varno = SSA_NAME_VERSION (var);
790 unsigned HOST_WIDE_INT bytes;
791 tree op0, op1;
793 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
795 op0 = gimple_assign_rhs1 (stmt);
796 op1 = gimple_assign_rhs2 (stmt);
798 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
800 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
801 gcc_assert (TREE_CODE (rhs) == MEM_REF);
802 op0 = TREE_OPERAND (rhs, 0);
803 op1 = TREE_OPERAND (rhs, 1);
805 else
806 gcc_unreachable ();
808 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
809 return false;
811 /* Handle PTR + OFFSET here. */
812 if (TREE_CODE (op1) == INTEGER_CST
813 && (TREE_CODE (op0) == SSA_NAME
814 || TREE_CODE (op0) == ADDR_EXPR))
816 if (! tree_fits_uhwi_p (op1))
817 bytes = unknown[object_size_type];
818 else if (TREE_CODE (op0) == SSA_NAME)
819 return merge_object_sizes (osi, var, op0, tree_to_uhwi (op1));
820 else
822 unsigned HOST_WIDE_INT off = tree_to_uhwi (op1);
824 /* op0 will be ADDR_EXPR here. */
825 bytes = addr_object_size (osi, op0, object_size_type);
826 if (bytes == unknown[object_size_type])
828 else if (off > offset_limit)
829 bytes = unknown[object_size_type];
830 else if (off > bytes)
831 bytes = 0;
832 else
833 bytes -= off;
836 else
837 bytes = unknown[object_size_type];
839 if ((object_size_type & 2) == 0)
841 if (object_sizes[object_size_type][varno] < bytes)
842 object_sizes[object_size_type][varno] = bytes;
844 else
846 if (object_sizes[object_size_type][varno] > bytes)
847 object_sizes[object_size_type][varno] = bytes;
849 return false;
853 /* Compute object_sizes for VAR, defined at STMT, which is
854 a COND_EXPR. Return true if the object size might need reexamination
855 later. */
857 static bool
858 cond_expr_object_size (struct object_size_info *osi, tree var, gimple stmt)
860 tree then_, else_;
861 int object_size_type = osi->object_size_type;
862 unsigned int varno = SSA_NAME_VERSION (var);
863 bool reexamine = false;
865 gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
867 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
868 return false;
870 then_ = gimple_assign_rhs2 (stmt);
871 else_ = gimple_assign_rhs3 (stmt);
873 if (TREE_CODE (then_) == SSA_NAME)
874 reexamine |= merge_object_sizes (osi, var, then_, 0);
875 else
876 expr_object_size (osi, var, then_);
878 if (TREE_CODE (else_) == SSA_NAME)
879 reexamine |= merge_object_sizes (osi, var, else_, 0);
880 else
881 expr_object_size (osi, var, else_);
883 return reexamine;
886 /* Compute object sizes for VAR.
887 For ADDR_EXPR an object size is the number of remaining bytes
888 to the end of the object (where what is considered an object depends on
889 OSI->object_size_type).
890 For allocation GIMPLE_CALL like malloc or calloc object size is the size
891 of the allocation.
892 For POINTER_PLUS_EXPR where second operand is a constant integer,
893 object size is object size of the first operand minus the constant.
894 If the constant is bigger than the number of remaining bytes until the
895 end of the object, object size is 0, but if it is instead a pointer
896 subtraction, object size is unknown[object_size_type].
897 To differentiate addition from subtraction, ADDR_EXPR returns
898 unknown[object_size_type] for all objects bigger than half of the address
899 space, and constants less than half of the address space are considered
900 addition, while bigger constants subtraction.
901 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
902 object size is object size of that argument.
903 Otherwise, object size is the maximum of object sizes of variables
904 that it might be set to. */
906 static void
907 collect_object_sizes_for (struct object_size_info *osi, tree var)
909 int object_size_type = osi->object_size_type;
910 unsigned int varno = SSA_NAME_VERSION (var);
911 gimple stmt;
912 bool reexamine;
914 if (bitmap_bit_p (computed[object_size_type], varno))
915 return;
917 if (osi->pass == 0)
919 if (bitmap_set_bit (osi->visited, varno))
921 object_sizes[object_size_type][varno]
922 = (object_size_type & 2) ? -1 : 0;
924 else
926 /* Found a dependency loop. Mark the variable for later
927 re-examination. */
928 bitmap_set_bit (osi->reexamine, varno);
929 if (dump_file && (dump_flags & TDF_DETAILS))
931 fprintf (dump_file, "Found a dependency loop at ");
932 print_generic_expr (dump_file, var, dump_flags);
933 fprintf (dump_file, "\n");
935 return;
939 if (dump_file && (dump_flags & TDF_DETAILS))
941 fprintf (dump_file, "Visiting use-def links for ");
942 print_generic_expr (dump_file, var, dump_flags);
943 fprintf (dump_file, "\n");
946 stmt = SSA_NAME_DEF_STMT (var);
947 reexamine = false;
949 switch (gimple_code (stmt))
951 case GIMPLE_ASSIGN:
953 tree rhs = gimple_assign_rhs1 (stmt);
954 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
955 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
956 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
957 reexamine = plus_stmt_object_size (osi, var, stmt);
958 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
959 reexamine = cond_expr_object_size (osi, var, stmt);
960 else if (gimple_assign_single_p (stmt)
961 || gimple_assign_unary_nop_p (stmt))
963 if (TREE_CODE (rhs) == SSA_NAME
964 && POINTER_TYPE_P (TREE_TYPE (rhs)))
965 reexamine = merge_object_sizes (osi, var, rhs, 0);
966 else
967 expr_object_size (osi, var, rhs);
969 else
970 unknown_object_size (osi, var);
971 break;
974 case GIMPLE_CALL:
976 gcall *call_stmt = as_a <gcall *> (stmt);
977 tree arg = pass_through_call (call_stmt);
978 if (arg)
980 if (TREE_CODE (arg) == SSA_NAME
981 && POINTER_TYPE_P (TREE_TYPE (arg)))
982 reexamine = merge_object_sizes (osi, var, arg, 0);
983 else
984 expr_object_size (osi, var, arg);
986 else
987 call_object_size (osi, var, call_stmt);
988 break;
991 case GIMPLE_ASM:
992 /* Pointers defined by __asm__ statements can point anywhere. */
993 object_sizes[object_size_type][varno] = unknown[object_size_type];
994 break;
996 case GIMPLE_NOP:
997 if (SSA_NAME_VAR (var)
998 && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
999 expr_object_size (osi, var, SSA_NAME_VAR (var));
1000 else
1001 /* Uninitialized SSA names point nowhere. */
1002 object_sizes[object_size_type][varno] = unknown[object_size_type];
1003 break;
1005 case GIMPLE_PHI:
1007 unsigned i;
1009 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1011 tree rhs = gimple_phi_arg (stmt, i)->def;
1013 if (object_sizes[object_size_type][varno]
1014 == unknown[object_size_type])
1015 break;
1017 if (TREE_CODE (rhs) == SSA_NAME)
1018 reexamine |= merge_object_sizes (osi, var, rhs, 0);
1019 else if (osi->pass == 0)
1020 expr_object_size (osi, var, rhs);
1022 break;
1025 default:
1026 gcc_unreachable ();
1029 if (! reexamine
1030 || object_sizes[object_size_type][varno] == unknown[object_size_type])
1032 bitmap_set_bit (computed[object_size_type], varno);
1033 bitmap_clear_bit (osi->reexamine, varno);
1035 else
1037 bitmap_set_bit (osi->reexamine, varno);
1038 if (dump_file && (dump_flags & TDF_DETAILS))
1040 fprintf (dump_file, "Need to reexamine ");
1041 print_generic_expr (dump_file, var, dump_flags);
1042 fprintf (dump_file, "\n");
1048 /* Helper function for check_for_plus_in_loops. Called recursively
1049 to detect loops. */
1051 static void
1052 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1053 unsigned int depth)
1055 gimple stmt = SSA_NAME_DEF_STMT (var);
1056 unsigned int varno = SSA_NAME_VERSION (var);
1058 if (osi->depths[varno])
1060 if (osi->depths[varno] != depth)
1062 unsigned int *sp;
1064 /* Found a loop involving pointer addition. */
1065 for (sp = osi->tos; sp > osi->stack; )
1067 --sp;
1068 bitmap_clear_bit (osi->reexamine, *sp);
1069 bitmap_set_bit (computed[osi->object_size_type], *sp);
1070 object_sizes[osi->object_size_type][*sp] = 0;
1071 if (*sp == varno)
1072 break;
1075 return;
1077 else if (! bitmap_bit_p (osi->reexamine, varno))
1078 return;
1080 osi->depths[varno] = depth;
1081 *osi->tos++ = varno;
1083 switch (gimple_code (stmt))
1086 case GIMPLE_ASSIGN:
1088 if ((gimple_assign_single_p (stmt)
1089 || gimple_assign_unary_nop_p (stmt))
1090 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1092 tree rhs = gimple_assign_rhs1 (stmt);
1094 check_for_plus_in_loops_1 (osi, rhs, depth);
1096 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1098 tree basevar = gimple_assign_rhs1 (stmt);
1099 tree cst = gimple_assign_rhs2 (stmt);
1101 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1103 check_for_plus_in_loops_1 (osi, basevar,
1104 depth + !integer_zerop (cst));
1106 else
1107 gcc_unreachable ();
1108 break;
1111 case GIMPLE_CALL:
1113 gcall *call_stmt = as_a <gcall *> (stmt);
1114 tree arg = pass_through_call (call_stmt);
1115 if (arg)
1117 if (TREE_CODE (arg) == SSA_NAME)
1118 check_for_plus_in_loops_1 (osi, arg, depth);
1119 else
1120 gcc_unreachable ();
1122 break;
1125 case GIMPLE_PHI:
1127 unsigned i;
1129 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1131 tree rhs = gimple_phi_arg (stmt, i)->def;
1133 if (TREE_CODE (rhs) == SSA_NAME)
1134 check_for_plus_in_loops_1 (osi, rhs, depth);
1136 break;
1139 default:
1140 gcc_unreachable ();
1143 osi->depths[varno] = 0;
1144 osi->tos--;
1148 /* Check if some pointer we are computing object size of is being increased
1149 within a loop. If yes, assume all the SSA variables participating in
1150 that loop have minimum object sizes 0. */
1152 static void
1153 check_for_plus_in_loops (struct object_size_info *osi, tree var)
1155 gimple stmt = SSA_NAME_DEF_STMT (var);
1157 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1158 and looked for a POINTER_PLUS_EXPR in the pass-through
1159 argument, if any. In GIMPLE, however, such an expression
1160 is not a valid call operand. */
1162 if (is_gimple_assign (stmt)
1163 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1165 tree basevar = gimple_assign_rhs1 (stmt);
1166 tree cst = gimple_assign_rhs2 (stmt);
1168 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1170 if (integer_zerop (cst))
1171 return;
1173 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1174 *osi->tos++ = SSA_NAME_VERSION (basevar);
1175 check_for_plus_in_loops_1 (osi, var, 2);
1176 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1177 osi->tos--;
1182 /* Initialize data structures for the object size computation. */
1184 void
1185 init_object_sizes (void)
1187 int object_size_type;
1189 if (computed[0])
1190 return;
1192 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1194 object_sizes[object_size_type].safe_grow (num_ssa_names);
1195 computed[object_size_type] = BITMAP_ALLOC (NULL);
1198 init_offset_limit ();
1202 /* Destroy data structures after the object size computation. */
1204 static void
1205 fini_object_sizes (void)
1207 int object_size_type;
1209 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1211 object_sizes[object_size_type].release ();
1212 BITMAP_FREE (computed[object_size_type]);
1217 /* Simple pass to optimize all __builtin_object_size () builtins. */
1219 namespace {
1221 const pass_data pass_data_object_sizes =
1223 GIMPLE_PASS, /* type */
1224 "objsz", /* name */
1225 OPTGROUP_NONE, /* optinfo_flags */
1226 TV_NONE, /* tv_id */
1227 ( PROP_cfg | PROP_ssa ), /* properties_required */
1228 0, /* properties_provided */
1229 0, /* properties_destroyed */
1230 0, /* todo_flags_start */
1231 0, /* todo_flags_finish */
1234 class pass_object_sizes : public gimple_opt_pass
1236 public:
1237 pass_object_sizes (gcc::context *ctxt)
1238 : gimple_opt_pass (pass_data_object_sizes, ctxt)
1241 /* opt_pass methods: */
1242 opt_pass * clone () { return new pass_object_sizes (m_ctxt); }
1243 virtual unsigned int execute (function *);
1245 }; // class pass_object_sizes
1247 unsigned int
1248 pass_object_sizes::execute (function *fun)
1250 basic_block bb;
1251 FOR_EACH_BB_FN (bb, fun)
1253 gimple_stmt_iterator i;
1254 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1256 tree result;
1257 gimple call = gsi_stmt (i);
1258 if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE))
1259 continue;
1261 init_object_sizes ();
1263 /* In the first pass instance, only attempt to fold
1264 __builtin_object_size (x, 1) and __builtin_object_size (x, 3),
1265 and rather than folding the builtin to the constant if any,
1266 create a MIN_EXPR or MAX_EXPR of the __builtin_object_size
1267 call result and the computed constant. */
1268 if (first_pass_instance)
1270 tree ost = gimple_call_arg (call, 1);
1271 if (tree_fits_uhwi_p (ost))
1273 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
1274 tree ptr = gimple_call_arg (call, 0);
1275 tree lhs = gimple_call_lhs (call);
1276 if ((object_size_type == 1 || object_size_type == 3)
1277 && (TREE_CODE (ptr) == ADDR_EXPR
1278 || TREE_CODE (ptr) == SSA_NAME)
1279 && lhs)
1281 tree type = TREE_TYPE (lhs);
1282 unsigned HOST_WIDE_INT bytes
1283 = compute_builtin_object_size (ptr, object_size_type);
1284 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type == 1
1285 ? -1 : 0)
1286 && wi::fits_to_tree_p (bytes, type))
1288 tree tem = make_ssa_name (type);
1289 gimple_call_set_lhs (call, tem);
1290 enum tree_code code
1291 = object_size_type == 1 ? MIN_EXPR : MAX_EXPR;
1292 tree cst = build_int_cstu (type, bytes);
1293 gimple g = gimple_build_assign (lhs, code, tem, cst);
1294 gsi_insert_after (&i, g, GSI_NEW_STMT);
1295 update_stmt (call);
1299 continue;
1302 result = fold_call_stmt (as_a <gcall *> (call), false);
1303 if (!result)
1305 tree ost = gimple_call_arg (call, 1);
1307 if (tree_fits_uhwi_p (ost))
1309 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
1311 if (object_size_type < 2)
1312 result = fold_convert (size_type_node,
1313 integer_minus_one_node);
1314 else if (object_size_type < 4)
1315 result = build_zero_cst (size_type_node);
1318 if (!result)
1319 continue;
1322 gcc_assert (TREE_CODE (result) == INTEGER_CST);
1324 if (dump_file && (dump_flags & TDF_DETAILS))
1326 fprintf (dump_file, "Simplified\n ");
1327 print_gimple_stmt (dump_file, call, 0, dump_flags);
1328 fprintf (dump_file, " to ");
1329 print_generic_expr (dump_file, result, 0);
1330 fprintf (dump_file, "\n");
1333 tree lhs = gimple_call_lhs (call);
1334 if (!lhs)
1335 continue;
1337 /* Propagate into all uses and fold those stmts. */
1338 gimple use_stmt;
1339 imm_use_iterator iter;
1340 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
1342 use_operand_p use_p;
1343 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1344 SET_USE (use_p, result);
1345 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1346 fold_stmt (&gsi);
1347 update_stmt (gsi_stmt (gsi));
1352 fini_object_sizes ();
1353 return 0;
1356 } // anon namespace
1358 gimple_opt_pass *
1359 make_pass_object_sizes (gcc::context *ctxt)
1361 return new pass_object_sizes (ctxt);