pa.h (BIGGEST_ALIGNMENT): Adjust comment.
[official-gcc.git] / gcc / tree-object-size.c
blob1317ad7ee9a768d296042806ae7afb37fccd2635
1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004-2016 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "fold-const.h"
31 #include "tree-object-size.h"
32 #include "gimple-fold.h"
33 #include "gimple-iterator.h"
34 #include "tree-cfg.h"
36 struct object_size_info
38 int object_size_type;
39 bitmap visited, reexamine;
40 int pass;
41 bool changed;
42 unsigned int *depths;
43 unsigned int *stack, *tos;
46 static const unsigned HOST_WIDE_INT unknown[4] = {
47 HOST_WIDE_INT_M1U,
48 HOST_WIDE_INT_M1U,
53 static tree compute_object_offset (const_tree, const_tree);
54 static bool addr_object_size (struct object_size_info *,
55 const_tree, int, unsigned HOST_WIDE_INT *);
56 static unsigned HOST_WIDE_INT alloc_object_size (const gcall *, int);
57 static tree pass_through_call (const gcall *);
58 static void collect_object_sizes_for (struct object_size_info *, tree);
59 static void expr_object_size (struct object_size_info *, tree, tree);
60 static bool merge_object_sizes (struct object_size_info *, tree, tree,
61 unsigned HOST_WIDE_INT);
62 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple *);
63 static bool cond_expr_object_size (struct object_size_info *, tree, gimple *);
64 static void init_offset_limit (void);
65 static void check_for_plus_in_loops (struct object_size_info *, tree);
66 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
67 unsigned int);
69 /* object_sizes[0] is upper bound for number of bytes till the end of
70 the object.
71 object_sizes[1] is upper bound for number of bytes till the end of
72 the subobject (innermost array or field with address taken).
73 object_sizes[2] is lower bound for number of bytes till the end of
74 the object and object_sizes[3] lower bound for subobject. */
75 static vec<unsigned HOST_WIDE_INT> object_sizes[4];
77 /* Bitmaps what object sizes have been computed already. */
78 static bitmap computed[4];
80 /* Maximum value of offset we consider to be addition. */
81 static unsigned HOST_WIDE_INT offset_limit;
84 /* Initialize OFFSET_LIMIT variable. */
85 static void
86 init_offset_limit (void)
88 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
89 offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
90 else
91 offset_limit = -1;
92 offset_limit /= 2;
96 /* Compute offset of EXPR within VAR. Return error_mark_node
97 if unknown. */
99 static tree
100 compute_object_offset (const_tree expr, const_tree var)
102 enum tree_code code = PLUS_EXPR;
103 tree base, off, t;
105 if (expr == var)
106 return size_zero_node;
108 switch (TREE_CODE (expr))
110 case COMPONENT_REF:
111 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
112 if (base == error_mark_node)
113 return base;
115 t = TREE_OPERAND (expr, 1);
116 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
117 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
118 / BITS_PER_UNIT));
119 break;
121 case REALPART_EXPR:
122 CASE_CONVERT:
123 case VIEW_CONVERT_EXPR:
124 case NON_LVALUE_EXPR:
125 return compute_object_offset (TREE_OPERAND (expr, 0), var);
127 case IMAGPART_EXPR:
128 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
129 if (base == error_mark_node)
130 return base;
132 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
133 break;
135 case ARRAY_REF:
136 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
137 if (base == error_mark_node)
138 return base;
140 t = TREE_OPERAND (expr, 1);
141 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
143 code = MINUS_EXPR;
144 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
146 t = fold_convert (sizetype, t);
147 off = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (expr)), t);
148 break;
150 case MEM_REF:
151 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
152 return wide_int_to_tree (sizetype, mem_ref_offset (expr));
154 default:
155 return error_mark_node;
158 return size_binop (code, base, off);
162 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
163 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
164 If unknown, return unknown[object_size_type]. */
166 static bool
167 addr_object_size (struct object_size_info *osi, const_tree ptr,
168 int object_size_type, unsigned HOST_WIDE_INT *psize)
170 tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
172 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
174 /* Set to unknown and overwrite just before returning if the size
175 could be determined. */
176 *psize = unknown[object_size_type];
178 pt_var = TREE_OPERAND (ptr, 0);
179 while (handled_component_p (pt_var))
180 pt_var = TREE_OPERAND (pt_var, 0);
182 if (pt_var
183 && TREE_CODE (pt_var) == MEM_REF)
185 unsigned HOST_WIDE_INT sz;
187 if (!osi || (object_size_type & 1) != 0
188 || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
190 compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
191 object_size_type & ~1, &sz);
193 else
195 tree var = TREE_OPERAND (pt_var, 0);
196 if (osi->pass == 0)
197 collect_object_sizes_for (osi, var);
198 if (bitmap_bit_p (computed[object_size_type],
199 SSA_NAME_VERSION (var)))
200 sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
201 else
202 sz = unknown[object_size_type];
204 if (sz != unknown[object_size_type])
206 offset_int dsz = wi::sub (sz, mem_ref_offset (pt_var));
207 if (wi::neg_p (dsz))
208 sz = 0;
209 else if (wi::fits_uhwi_p (dsz))
210 sz = dsz.to_uhwi ();
211 else
212 sz = unknown[object_size_type];
215 if (sz != unknown[object_size_type] && sz < offset_limit)
216 pt_var_size = size_int (sz);
218 else if (pt_var
219 && DECL_P (pt_var)
220 && tree_fits_uhwi_p (DECL_SIZE_UNIT (pt_var))
221 && tree_to_uhwi (DECL_SIZE_UNIT (pt_var)) < offset_limit)
222 pt_var_size = DECL_SIZE_UNIT (pt_var);
223 else if (pt_var
224 && TREE_CODE (pt_var) == STRING_CST
225 && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
226 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
227 && tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
228 < offset_limit)
229 pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
230 else
231 return false;
233 if (pt_var != TREE_OPERAND (ptr, 0))
235 tree var;
237 if (object_size_type & 1)
239 var = TREE_OPERAND (ptr, 0);
241 while (var != pt_var
242 && TREE_CODE (var) != BIT_FIELD_REF
243 && TREE_CODE (var) != COMPONENT_REF
244 && TREE_CODE (var) != ARRAY_REF
245 && TREE_CODE (var) != ARRAY_RANGE_REF
246 && TREE_CODE (var) != REALPART_EXPR
247 && TREE_CODE (var) != IMAGPART_EXPR)
248 var = TREE_OPERAND (var, 0);
249 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
250 var = TREE_OPERAND (var, 0);
251 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
252 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var)))
253 || (pt_var_size
254 && tree_int_cst_lt (pt_var_size,
255 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
256 var = pt_var;
257 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
259 tree v = var;
260 /* For &X->fld, compute object size only if fld isn't the last
261 field, as struct { int i; char c[1]; } is often used instead
262 of flexible array member. */
263 while (v && v != pt_var)
264 switch (TREE_CODE (v))
266 case ARRAY_REF:
267 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
268 && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
270 tree domain
271 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
272 if (domain
273 && TYPE_MAX_VALUE (domain)
274 && TREE_CODE (TYPE_MAX_VALUE (domain))
275 == INTEGER_CST
276 && tree_int_cst_lt (TREE_OPERAND (v, 1),
277 TYPE_MAX_VALUE (domain)))
279 v = NULL_TREE;
280 break;
283 v = TREE_OPERAND (v, 0);
284 break;
285 case REALPART_EXPR:
286 case IMAGPART_EXPR:
287 v = NULL_TREE;
288 break;
289 case COMPONENT_REF:
290 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
292 v = NULL_TREE;
293 break;
295 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
296 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
297 != UNION_TYPE
298 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
299 != QUAL_UNION_TYPE)
300 break;
301 else
302 v = TREE_OPERAND (v, 0);
303 if (TREE_CODE (v) == COMPONENT_REF
304 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
305 == RECORD_TYPE)
307 tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
308 for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
309 if (TREE_CODE (fld_chain) == FIELD_DECL)
310 break;
312 if (fld_chain)
314 v = NULL_TREE;
315 break;
317 v = TREE_OPERAND (v, 0);
319 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
320 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
321 != UNION_TYPE
322 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
323 != QUAL_UNION_TYPE)
324 break;
325 else
326 v = TREE_OPERAND (v, 0);
327 if (v != pt_var)
328 v = NULL_TREE;
329 else
330 v = pt_var;
331 break;
332 default:
333 v = pt_var;
334 break;
336 if (v == pt_var)
337 var = pt_var;
340 else
341 var = pt_var;
343 if (var != pt_var)
344 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
345 else if (!pt_var_size)
346 return false;
347 else
348 var_size = pt_var_size;
349 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
350 if (bytes != error_mark_node)
352 if (TREE_CODE (bytes) == INTEGER_CST
353 && tree_int_cst_lt (var_size, bytes))
354 bytes = size_zero_node;
355 else
356 bytes = size_binop (MINUS_EXPR, var_size, bytes);
358 if (var != pt_var
359 && pt_var_size
360 && TREE_CODE (pt_var) == MEM_REF
361 && bytes != error_mark_node)
363 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
364 if (bytes2 != error_mark_node)
366 if (TREE_CODE (bytes2) == INTEGER_CST
367 && tree_int_cst_lt (pt_var_size, bytes2))
368 bytes2 = size_zero_node;
369 else
370 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
371 bytes = size_binop (MIN_EXPR, bytes, bytes2);
375 else if (!pt_var_size)
376 return false;
377 else
378 bytes = pt_var_size;
380 if (tree_fits_uhwi_p (bytes))
382 *psize = tree_to_uhwi (bytes);
383 return true;
386 return false;
390 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
391 Handles various allocation calls. OBJECT_SIZE_TYPE is the second
392 argument from __builtin_object_size. If unknown, return
393 unknown[object_size_type]. */
395 static unsigned HOST_WIDE_INT
396 alloc_object_size (const gcall *call, int object_size_type)
398 tree callee, bytes = NULL_TREE;
399 tree alloc_size;
400 int arg1 = -1, arg2 = -1;
402 gcc_assert (is_gimple_call (call));
404 callee = gimple_call_fndecl (call);
405 if (!callee)
406 return unknown[object_size_type];
408 alloc_size = lookup_attribute ("alloc_size",
409 TYPE_ATTRIBUTES (TREE_TYPE (callee)));
410 if (alloc_size && TREE_VALUE (alloc_size))
412 tree p = TREE_VALUE (alloc_size);
414 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
415 if (TREE_CHAIN (p))
416 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
419 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
420 switch (DECL_FUNCTION_CODE (callee))
422 case BUILT_IN_CALLOC:
423 arg2 = 1;
424 /* fall through */
425 case BUILT_IN_MALLOC:
426 case BUILT_IN_ALLOCA:
427 case BUILT_IN_ALLOCA_WITH_ALIGN:
428 arg1 = 0;
429 default:
430 break;
433 if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
434 || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
435 || (arg2 >= 0
436 && (arg2 >= (int)gimple_call_num_args (call)
437 || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
438 return unknown[object_size_type];
440 if (arg2 >= 0)
441 bytes = size_binop (MULT_EXPR,
442 fold_convert (sizetype, gimple_call_arg (call, arg1)),
443 fold_convert (sizetype, gimple_call_arg (call, arg2)));
444 else if (arg1 >= 0)
445 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
447 if (bytes && tree_fits_uhwi_p (bytes))
448 return tree_to_uhwi (bytes);
450 return unknown[object_size_type];
454 /* If object size is propagated from one of function's arguments directly
455 to its return value, return that argument for GIMPLE_CALL statement CALL.
456 Otherwise return NULL. */
458 static tree
459 pass_through_call (const gcall *call)
461 tree callee = gimple_call_fndecl (call);
463 if (callee
464 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
465 switch (DECL_FUNCTION_CODE (callee))
467 case BUILT_IN_MEMCPY:
468 case BUILT_IN_MEMMOVE:
469 case BUILT_IN_MEMSET:
470 case BUILT_IN_STRCPY:
471 case BUILT_IN_STRNCPY:
472 case BUILT_IN_STRCAT:
473 case BUILT_IN_STRNCAT:
474 case BUILT_IN_MEMCPY_CHK:
475 case BUILT_IN_MEMMOVE_CHK:
476 case BUILT_IN_MEMSET_CHK:
477 case BUILT_IN_STRCPY_CHK:
478 case BUILT_IN_STRNCPY_CHK:
479 case BUILT_IN_STPNCPY_CHK:
480 case BUILT_IN_STRCAT_CHK:
481 case BUILT_IN_STRNCAT_CHK:
482 case BUILT_IN_ASSUME_ALIGNED:
483 if (gimple_call_num_args (call) >= 1)
484 return gimple_call_arg (call, 0);
485 break;
486 default:
487 break;
490 return NULL_TREE;
494 /* Compute __builtin_object_size value for PTR and set *PSIZE to
495 the resulting value. OBJECT_SIZE_TYPE is the second argument
496 to __builtin_object_size. Return true on success and false
497 when the object size could not be determined. */
499 bool
500 compute_builtin_object_size (tree ptr, int object_size_type,
501 unsigned HOST_WIDE_INT *psize)
503 gcc_assert (object_size_type >= 0 && object_size_type <= 3);
505 /* Set to unknown and overwrite just before returning if the size
506 could be determined. */
507 *psize = unknown[object_size_type];
509 if (! offset_limit)
510 init_offset_limit ();
512 if (TREE_CODE (ptr) == ADDR_EXPR)
513 return addr_object_size (NULL, ptr, object_size_type, psize);
515 if (TREE_CODE (ptr) != SSA_NAME
516 || !POINTER_TYPE_P (TREE_TYPE (ptr)))
517 return false;
519 if (computed[object_size_type] == NULL)
521 if (optimize || object_size_type & 1)
522 return false;
524 /* When not optimizing, rather than failing, make a small effort
525 to determine the object size without the full benefit of
526 the (costly) computation below. */
527 gimple *def = SSA_NAME_DEF_STMT (ptr);
528 if (gimple_code (def) == GIMPLE_ASSIGN)
530 tree_code code = gimple_assign_rhs_code (def);
531 if (code == POINTER_PLUS_EXPR)
533 tree offset = gimple_assign_rhs2 (def);
534 ptr = gimple_assign_rhs1 (def);
536 if (cst_and_fits_in_hwi (offset)
537 && compute_builtin_object_size (ptr, object_size_type, psize))
539 /* Return zero when the offset is out of bounds. */
540 unsigned HOST_WIDE_INT off = tree_to_shwi (offset);
541 *psize = off < *psize ? *psize - off : 0;
542 return true;
546 return false;
549 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
551 struct object_size_info osi;
552 bitmap_iterator bi;
553 unsigned int i;
555 if (num_ssa_names > object_sizes[object_size_type].length ())
556 object_sizes[object_size_type].safe_grow (num_ssa_names);
557 if (dump_file)
559 fprintf (dump_file, "Computing %s %sobject size for ",
560 (object_size_type & 2) ? "minimum" : "maximum",
561 (object_size_type & 1) ? "sub" : "");
562 print_generic_expr (dump_file, ptr, dump_flags);
563 fprintf (dump_file, ":\n");
566 osi.visited = BITMAP_ALLOC (NULL);
567 osi.reexamine = BITMAP_ALLOC (NULL);
568 osi.object_size_type = object_size_type;
569 osi.depths = NULL;
570 osi.stack = NULL;
571 osi.tos = NULL;
573 /* First pass: walk UD chains, compute object sizes that
574 can be computed. osi.reexamine bitmap at the end will
575 contain what variables were found in dependency cycles
576 and therefore need to be reexamined. */
577 osi.pass = 0;
578 osi.changed = false;
579 collect_object_sizes_for (&osi, ptr);
581 /* Second pass: keep recomputing object sizes of variables
582 that need reexamination, until no object sizes are
583 increased or all object sizes are computed. */
584 if (! bitmap_empty_p (osi.reexamine))
586 bitmap reexamine = BITMAP_ALLOC (NULL);
588 /* If looking for minimum instead of maximum object size,
589 detect cases where a pointer is increased in a loop.
590 Although even without this detection pass 2 would eventually
591 terminate, it could take a long time. If a pointer is
592 increasing this way, we need to assume 0 object size.
593 E.g. p = &buf[0]; while (cond) p = p + 4; */
594 if (object_size_type & 2)
596 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
597 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
598 osi.tos = osi.stack;
599 osi.pass = 1;
600 /* collect_object_sizes_for is changing
601 osi.reexamine bitmap, so iterate over a copy. */
602 bitmap_copy (reexamine, osi.reexamine);
603 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
604 if (bitmap_bit_p (osi.reexamine, i))
605 check_for_plus_in_loops (&osi, ssa_name (i));
607 free (osi.depths);
608 osi.depths = NULL;
609 free (osi.stack);
610 osi.stack = NULL;
611 osi.tos = NULL;
616 osi.pass = 2;
617 osi.changed = false;
618 /* collect_object_sizes_for is changing
619 osi.reexamine bitmap, so iterate over a copy. */
620 bitmap_copy (reexamine, osi.reexamine);
621 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
622 if (bitmap_bit_p (osi.reexamine, i))
624 collect_object_sizes_for (&osi, ssa_name (i));
625 if (dump_file && (dump_flags & TDF_DETAILS))
627 fprintf (dump_file, "Reexamining ");
628 print_generic_expr (dump_file, ssa_name (i),
629 dump_flags);
630 fprintf (dump_file, "\n");
634 while (osi.changed);
636 BITMAP_FREE (reexamine);
638 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
639 bitmap_set_bit (computed[object_size_type], i);
641 /* Debugging dumps. */
642 if (dump_file)
644 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
645 if (object_sizes[object_size_type][i]
646 != unknown[object_size_type])
648 print_generic_expr (dump_file, ssa_name (i),
649 dump_flags);
650 fprintf (dump_file,
651 ": %s %sobject size "
652 HOST_WIDE_INT_PRINT_UNSIGNED "\n",
653 (object_size_type & 2) ? "minimum" : "maximum",
654 (object_size_type & 1) ? "sub" : "",
655 object_sizes[object_size_type][i]);
659 BITMAP_FREE (osi.reexamine);
660 BITMAP_FREE (osi.visited);
663 *psize = object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
664 return *psize != unknown[object_size_type];
667 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
669 static void
670 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
672 int object_size_type = osi->object_size_type;
673 unsigned int varno = SSA_NAME_VERSION (ptr);
674 unsigned HOST_WIDE_INT bytes;
676 gcc_assert (object_sizes[object_size_type][varno]
677 != unknown[object_size_type]);
678 gcc_assert (osi->pass == 0);
680 if (TREE_CODE (value) == WITH_SIZE_EXPR)
681 value = TREE_OPERAND (value, 0);
683 /* Pointer variables should have been handled by merge_object_sizes. */
684 gcc_assert (TREE_CODE (value) != SSA_NAME
685 || !POINTER_TYPE_P (TREE_TYPE (value)));
687 if (TREE_CODE (value) == ADDR_EXPR)
688 addr_object_size (osi, value, object_size_type, &bytes);
689 else
690 bytes = unknown[object_size_type];
692 if ((object_size_type & 2) == 0)
694 if (object_sizes[object_size_type][varno] < bytes)
695 object_sizes[object_size_type][varno] = bytes;
697 else
699 if (object_sizes[object_size_type][varno] > bytes)
700 object_sizes[object_size_type][varno] = bytes;
705 /* Compute object_sizes for PTR, defined to the result of a call. */
707 static void
708 call_object_size (struct object_size_info *osi, tree ptr, gcall *call)
710 int object_size_type = osi->object_size_type;
711 unsigned int varno = SSA_NAME_VERSION (ptr);
712 unsigned HOST_WIDE_INT bytes;
714 gcc_assert (is_gimple_call (call));
716 gcc_assert (object_sizes[object_size_type][varno]
717 != unknown[object_size_type]);
718 gcc_assert (osi->pass == 0);
720 bytes = alloc_object_size (call, object_size_type);
722 if ((object_size_type & 2) == 0)
724 if (object_sizes[object_size_type][varno] < bytes)
725 object_sizes[object_size_type][varno] = bytes;
727 else
729 if (object_sizes[object_size_type][varno] > bytes)
730 object_sizes[object_size_type][varno] = bytes;
735 /* Compute object_sizes for PTR, defined to an unknown value. */
737 static void
738 unknown_object_size (struct object_size_info *osi, tree ptr)
740 int object_size_type = osi->object_size_type;
741 unsigned int varno = SSA_NAME_VERSION (ptr);
742 unsigned HOST_WIDE_INT bytes;
744 gcc_assert (object_sizes[object_size_type][varno]
745 != unknown[object_size_type]);
746 gcc_assert (osi->pass == 0);
748 bytes = unknown[object_size_type];
750 if ((object_size_type & 2) == 0)
752 if (object_sizes[object_size_type][varno] < bytes)
753 object_sizes[object_size_type][varno] = bytes;
755 else
757 if (object_sizes[object_size_type][varno] > bytes)
758 object_sizes[object_size_type][varno] = bytes;
763 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
764 the object size might need reexamination later. */
766 static bool
767 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
768 unsigned HOST_WIDE_INT offset)
770 int object_size_type = osi->object_size_type;
771 unsigned int varno = SSA_NAME_VERSION (dest);
772 unsigned HOST_WIDE_INT orig_bytes;
774 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
775 return false;
776 if (offset >= offset_limit)
778 object_sizes[object_size_type][varno] = unknown[object_size_type];
779 return false;
782 if (osi->pass == 0)
783 collect_object_sizes_for (osi, orig);
785 orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
786 if (orig_bytes != unknown[object_size_type])
787 orig_bytes = (offset > orig_bytes)
788 ? HOST_WIDE_INT_0U : orig_bytes - offset;
790 if ((object_size_type & 2) == 0)
792 if (object_sizes[object_size_type][varno] < orig_bytes)
794 object_sizes[object_size_type][varno] = orig_bytes;
795 osi->changed = true;
798 else
800 if (object_sizes[object_size_type][varno] > orig_bytes)
802 object_sizes[object_size_type][varno] = orig_bytes;
803 osi->changed = true;
806 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
810 /* Compute object_sizes for VAR, defined to the result of an assignment
811 with operator POINTER_PLUS_EXPR. Return true if the object size might
812 need reexamination later. */
814 static bool
815 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple *stmt)
817 int object_size_type = osi->object_size_type;
818 unsigned int varno = SSA_NAME_VERSION (var);
819 unsigned HOST_WIDE_INT bytes;
820 tree op0, op1;
822 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
824 op0 = gimple_assign_rhs1 (stmt);
825 op1 = gimple_assign_rhs2 (stmt);
827 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
829 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
830 gcc_assert (TREE_CODE (rhs) == MEM_REF);
831 op0 = TREE_OPERAND (rhs, 0);
832 op1 = TREE_OPERAND (rhs, 1);
834 else
835 gcc_unreachable ();
837 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
838 return false;
840 /* Handle PTR + OFFSET here. */
841 if (TREE_CODE (op1) == INTEGER_CST
842 && (TREE_CODE (op0) == SSA_NAME
843 || TREE_CODE (op0) == ADDR_EXPR))
845 if (! tree_fits_uhwi_p (op1))
846 bytes = unknown[object_size_type];
847 else if (TREE_CODE (op0) == SSA_NAME)
848 return merge_object_sizes (osi, var, op0, tree_to_uhwi (op1));
849 else
851 unsigned HOST_WIDE_INT off = tree_to_uhwi (op1);
853 /* op0 will be ADDR_EXPR here. */
854 addr_object_size (osi, op0, object_size_type, &bytes);
855 if (bytes == unknown[object_size_type])
857 else if (off > offset_limit)
858 bytes = unknown[object_size_type];
859 else if (off > bytes)
860 bytes = 0;
861 else
862 bytes -= off;
865 else
866 bytes = unknown[object_size_type];
868 if ((object_size_type & 2) == 0)
870 if (object_sizes[object_size_type][varno] < bytes)
871 object_sizes[object_size_type][varno] = bytes;
873 else
875 if (object_sizes[object_size_type][varno] > bytes)
876 object_sizes[object_size_type][varno] = bytes;
878 return false;
882 /* Compute object_sizes for VAR, defined at STMT, which is
883 a COND_EXPR. Return true if the object size might need reexamination
884 later. */
886 static bool
887 cond_expr_object_size (struct object_size_info *osi, tree var, gimple *stmt)
889 tree then_, else_;
890 int object_size_type = osi->object_size_type;
891 unsigned int varno = SSA_NAME_VERSION (var);
892 bool reexamine = false;
894 gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
896 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
897 return false;
899 then_ = gimple_assign_rhs2 (stmt);
900 else_ = gimple_assign_rhs3 (stmt);
902 if (TREE_CODE (then_) == SSA_NAME)
903 reexamine |= merge_object_sizes (osi, var, then_, 0);
904 else
905 expr_object_size (osi, var, then_);
907 if (TREE_CODE (else_) == SSA_NAME)
908 reexamine |= merge_object_sizes (osi, var, else_, 0);
909 else
910 expr_object_size (osi, var, else_);
912 return reexamine;
915 /* Compute object sizes for VAR.
916 For ADDR_EXPR an object size is the number of remaining bytes
917 to the end of the object (where what is considered an object depends on
918 OSI->object_size_type).
919 For allocation GIMPLE_CALL like malloc or calloc object size is the size
920 of the allocation.
921 For POINTER_PLUS_EXPR where second operand is a constant integer,
922 object size is object size of the first operand minus the constant.
923 If the constant is bigger than the number of remaining bytes until the
924 end of the object, object size is 0, but if it is instead a pointer
925 subtraction, object size is unknown[object_size_type].
926 To differentiate addition from subtraction, ADDR_EXPR returns
927 unknown[object_size_type] for all objects bigger than half of the address
928 space, and constants less than half of the address space are considered
929 addition, while bigger constants subtraction.
930 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
931 object size is object size of that argument.
932 Otherwise, object size is the maximum of object sizes of variables
933 that it might be set to. */
935 static void
936 collect_object_sizes_for (struct object_size_info *osi, tree var)
938 int object_size_type = osi->object_size_type;
939 unsigned int varno = SSA_NAME_VERSION (var);
940 gimple *stmt;
941 bool reexamine;
943 if (bitmap_bit_p (computed[object_size_type], varno))
944 return;
946 if (osi->pass == 0)
948 if (bitmap_set_bit (osi->visited, varno))
950 object_sizes[object_size_type][varno]
951 = (object_size_type & 2) ? -1 : 0;
953 else
955 /* Found a dependency loop. Mark the variable for later
956 re-examination. */
957 bitmap_set_bit (osi->reexamine, varno);
958 if (dump_file && (dump_flags & TDF_DETAILS))
960 fprintf (dump_file, "Found a dependency loop at ");
961 print_generic_expr (dump_file, var, dump_flags);
962 fprintf (dump_file, "\n");
964 return;
968 if (dump_file && (dump_flags & TDF_DETAILS))
970 fprintf (dump_file, "Visiting use-def links for ");
971 print_generic_expr (dump_file, var, dump_flags);
972 fprintf (dump_file, "\n");
975 stmt = SSA_NAME_DEF_STMT (var);
976 reexamine = false;
978 switch (gimple_code (stmt))
980 case GIMPLE_ASSIGN:
982 tree rhs = gimple_assign_rhs1 (stmt);
983 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
984 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
985 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
986 reexamine = plus_stmt_object_size (osi, var, stmt);
987 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
988 reexamine = cond_expr_object_size (osi, var, stmt);
989 else if (gimple_assign_single_p (stmt)
990 || gimple_assign_unary_nop_p (stmt))
992 if (TREE_CODE (rhs) == SSA_NAME
993 && POINTER_TYPE_P (TREE_TYPE (rhs)))
994 reexamine = merge_object_sizes (osi, var, rhs, 0);
995 else
996 expr_object_size (osi, var, rhs);
998 else
999 unknown_object_size (osi, var);
1000 break;
1003 case GIMPLE_CALL:
1005 gcall *call_stmt = as_a <gcall *> (stmt);
1006 tree arg = pass_through_call (call_stmt);
1007 if (arg)
1009 if (TREE_CODE (arg) == SSA_NAME
1010 && POINTER_TYPE_P (TREE_TYPE (arg)))
1011 reexamine = merge_object_sizes (osi, var, arg, 0);
1012 else
1013 expr_object_size (osi, var, arg);
1015 else
1016 call_object_size (osi, var, call_stmt);
1017 break;
1020 case GIMPLE_ASM:
1021 /* Pointers defined by __asm__ statements can point anywhere. */
1022 object_sizes[object_size_type][varno] = unknown[object_size_type];
1023 break;
1025 case GIMPLE_NOP:
1026 if (SSA_NAME_VAR (var)
1027 && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
1028 expr_object_size (osi, var, SSA_NAME_VAR (var));
1029 else
1030 /* Uninitialized SSA names point nowhere. */
1031 object_sizes[object_size_type][varno] = unknown[object_size_type];
1032 break;
1034 case GIMPLE_PHI:
1036 unsigned i;
1038 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1040 tree rhs = gimple_phi_arg (stmt, i)->def;
1042 if (object_sizes[object_size_type][varno]
1043 == unknown[object_size_type])
1044 break;
1046 if (TREE_CODE (rhs) == SSA_NAME)
1047 reexamine |= merge_object_sizes (osi, var, rhs, 0);
1048 else if (osi->pass == 0)
1049 expr_object_size (osi, var, rhs);
1051 break;
1054 default:
1055 gcc_unreachable ();
1058 if (! reexamine
1059 || object_sizes[object_size_type][varno] == unknown[object_size_type])
1061 bitmap_set_bit (computed[object_size_type], varno);
1062 bitmap_clear_bit (osi->reexamine, varno);
1064 else
1066 bitmap_set_bit (osi->reexamine, varno);
1067 if (dump_file && (dump_flags & TDF_DETAILS))
1069 fprintf (dump_file, "Need to reexamine ");
1070 print_generic_expr (dump_file, var, dump_flags);
1071 fprintf (dump_file, "\n");
1077 /* Helper function for check_for_plus_in_loops. Called recursively
1078 to detect loops. */
1080 static void
1081 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1082 unsigned int depth)
1084 gimple *stmt = SSA_NAME_DEF_STMT (var);
1085 unsigned int varno = SSA_NAME_VERSION (var);
1087 if (osi->depths[varno])
1089 if (osi->depths[varno] != depth)
1091 unsigned int *sp;
1093 /* Found a loop involving pointer addition. */
1094 for (sp = osi->tos; sp > osi->stack; )
1096 --sp;
1097 bitmap_clear_bit (osi->reexamine, *sp);
1098 bitmap_set_bit (computed[osi->object_size_type], *sp);
1099 object_sizes[osi->object_size_type][*sp] = 0;
1100 if (*sp == varno)
1101 break;
1104 return;
1106 else if (! bitmap_bit_p (osi->reexamine, varno))
1107 return;
1109 osi->depths[varno] = depth;
1110 *osi->tos++ = varno;
1112 switch (gimple_code (stmt))
1115 case GIMPLE_ASSIGN:
1117 if ((gimple_assign_single_p (stmt)
1118 || gimple_assign_unary_nop_p (stmt))
1119 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1121 tree rhs = gimple_assign_rhs1 (stmt);
1123 check_for_plus_in_loops_1 (osi, rhs, depth);
1125 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1127 tree basevar = gimple_assign_rhs1 (stmt);
1128 tree cst = gimple_assign_rhs2 (stmt);
1130 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1132 check_for_plus_in_loops_1 (osi, basevar,
1133 depth + !integer_zerop (cst));
1135 else
1136 gcc_unreachable ();
1137 break;
1140 case GIMPLE_CALL:
1142 gcall *call_stmt = as_a <gcall *> (stmt);
1143 tree arg = pass_through_call (call_stmt);
1144 if (arg)
1146 if (TREE_CODE (arg) == SSA_NAME)
1147 check_for_plus_in_loops_1 (osi, arg, depth);
1148 else
1149 gcc_unreachable ();
1151 break;
1154 case GIMPLE_PHI:
1156 unsigned i;
1158 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1160 tree rhs = gimple_phi_arg (stmt, i)->def;
1162 if (TREE_CODE (rhs) == SSA_NAME)
1163 check_for_plus_in_loops_1 (osi, rhs, depth);
1165 break;
1168 default:
1169 gcc_unreachable ();
1172 osi->depths[varno] = 0;
1173 osi->tos--;
1177 /* Check if some pointer we are computing object size of is being increased
1178 within a loop. If yes, assume all the SSA variables participating in
1179 that loop have minimum object sizes 0. */
1181 static void
1182 check_for_plus_in_loops (struct object_size_info *osi, tree var)
1184 gimple *stmt = SSA_NAME_DEF_STMT (var);
1186 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1187 and looked for a POINTER_PLUS_EXPR in the pass-through
1188 argument, if any. In GIMPLE, however, such an expression
1189 is not a valid call operand. */
1191 if (is_gimple_assign (stmt)
1192 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1194 tree basevar = gimple_assign_rhs1 (stmt);
1195 tree cst = gimple_assign_rhs2 (stmt);
1197 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1199 if (integer_zerop (cst))
1200 return;
1202 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1203 *osi->tos++ = SSA_NAME_VERSION (basevar);
1204 check_for_plus_in_loops_1 (osi, var, 2);
1205 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1206 osi->tos--;
1211 /* Initialize data structures for the object size computation. */
1213 void
1214 init_object_sizes (void)
1216 int object_size_type;
1218 if (computed[0])
1219 return;
1221 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1223 object_sizes[object_size_type].safe_grow (num_ssa_names);
1224 computed[object_size_type] = BITMAP_ALLOC (NULL);
1227 init_offset_limit ();
1231 /* Destroy data structures after the object size computation. */
1233 static void
1234 fini_object_sizes (void)
1236 int object_size_type;
1238 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1240 object_sizes[object_size_type].release ();
1241 BITMAP_FREE (computed[object_size_type]);
1246 /* Simple pass to optimize all __builtin_object_size () builtins. */
1248 namespace {
1250 const pass_data pass_data_object_sizes =
1252 GIMPLE_PASS, /* type */
1253 "objsz", /* name */
1254 OPTGROUP_NONE, /* optinfo_flags */
1255 TV_NONE, /* tv_id */
1256 ( PROP_cfg | PROP_ssa ), /* properties_required */
1257 0, /* properties_provided */
1258 0, /* properties_destroyed */
1259 0, /* todo_flags_start */
1260 0, /* todo_flags_finish */
1263 class pass_object_sizes : public gimple_opt_pass
1265 public:
1266 pass_object_sizes (gcc::context *ctxt)
1267 : gimple_opt_pass (pass_data_object_sizes, ctxt), insert_min_max_p (false)
1270 /* opt_pass methods: */
1271 opt_pass * clone () { return new pass_object_sizes (m_ctxt); }
1272 void set_pass_param (unsigned int n, bool param)
1274 gcc_assert (n == 0);
1275 insert_min_max_p = param;
1277 virtual unsigned int execute (function *);
1279 private:
1280 /* Determines whether the pass instance creates MIN/MAX_EXPRs. */
1281 bool insert_min_max_p;
1282 }; // class pass_object_sizes
1284 /* Dummy valueize function. */
1286 static tree
1287 do_valueize (tree t)
1289 return t;
1292 unsigned int
1293 pass_object_sizes::execute (function *fun)
1295 basic_block bb;
1296 FOR_EACH_BB_FN (bb, fun)
1298 gimple_stmt_iterator i;
1299 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1301 tree result;
1302 gimple *call = gsi_stmt (i);
1303 if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE))
1304 continue;
1306 init_object_sizes ();
1308 /* If insert_min_max_p, only attempt to fold
1309 __builtin_object_size (x, 1) and __builtin_object_size (x, 3),
1310 and rather than folding the builtin to the constant if any,
1311 create a MIN_EXPR or MAX_EXPR of the __builtin_object_size
1312 call result and the computed constant. */
1313 if (insert_min_max_p)
1315 tree ost = gimple_call_arg (call, 1);
1316 if (tree_fits_uhwi_p (ost))
1318 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
1319 tree ptr = gimple_call_arg (call, 0);
1320 tree lhs = gimple_call_lhs (call);
1321 if ((object_size_type == 1 || object_size_type == 3)
1322 && (TREE_CODE (ptr) == ADDR_EXPR
1323 || TREE_CODE (ptr) == SSA_NAME)
1324 && lhs)
1326 tree type = TREE_TYPE (lhs);
1327 unsigned HOST_WIDE_INT bytes;
1328 if (compute_builtin_object_size (ptr, object_size_type,
1329 &bytes)
1330 && wi::fits_to_tree_p (bytes, type))
1332 tree tem = make_ssa_name (type);
1333 gimple_call_set_lhs (call, tem);
1334 enum tree_code code
1335 = object_size_type == 1 ? MIN_EXPR : MAX_EXPR;
1336 tree cst = build_int_cstu (type, bytes);
1337 gimple *g
1338 = gimple_build_assign (lhs, code, tem, cst);
1339 gsi_insert_after (&i, g, GSI_NEW_STMT);
1340 update_stmt (call);
1344 continue;
1347 tree lhs = gimple_call_lhs (call);
1348 if (!lhs)
1349 continue;
1351 result = gimple_fold_stmt_to_constant (call, do_valueize);
1352 if (!result)
1354 tree ost = gimple_call_arg (call, 1);
1356 if (tree_fits_uhwi_p (ost))
1358 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
1360 if (object_size_type < 2)
1361 result = fold_convert (size_type_node,
1362 integer_minus_one_node);
1363 else if (object_size_type < 4)
1364 result = build_zero_cst (size_type_node);
1367 if (!result)
1368 continue;
1371 gcc_assert (TREE_CODE (result) == INTEGER_CST);
1373 if (dump_file && (dump_flags & TDF_DETAILS))
1375 fprintf (dump_file, "Simplified\n ");
1376 print_gimple_stmt (dump_file, call, 0, dump_flags);
1377 fprintf (dump_file, " to ");
1378 print_generic_expr (dump_file, result, 0);
1379 fprintf (dump_file, "\n");
1382 /* Propagate into all uses and fold those stmts. */
1383 replace_uses_by (lhs, result);
1387 fini_object_sizes ();
1388 return 0;
1391 } // anon namespace
1393 gimple_opt_pass *
1394 make_pass_object_sizes (gcc::context *ctxt)
1396 return new pass_object_sizes (ctxt);