S/390: Testsuite: Add asm scan patterns for -m31.
[official-gcc.git] / gcc / tree-object-size.c
blob459e65d40a6cb73c5a94b5993674b1d3529e96b7
1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "fold-const.h"
31 #include "tree-object-size.h"
32 #include "gimple-fold.h"
33 #include "gimple-iterator.h"
34 #include "tree-cfg.h"
36 struct object_size_info
38 int object_size_type;
39 bitmap visited, reexamine;
40 int pass;
41 bool changed;
42 unsigned int *depths;
43 unsigned int *stack, *tos;
46 static const unsigned HOST_WIDE_INT unknown[4] = { -1, -1, 0, 0 };
48 static tree compute_object_offset (const_tree, const_tree);
49 static unsigned HOST_WIDE_INT addr_object_size (struct object_size_info *,
50 const_tree, int);
51 static unsigned HOST_WIDE_INT alloc_object_size (const gcall *, int);
52 static tree pass_through_call (const gcall *);
53 static void collect_object_sizes_for (struct object_size_info *, tree);
54 static void expr_object_size (struct object_size_info *, tree, tree);
55 static bool merge_object_sizes (struct object_size_info *, tree, tree,
56 unsigned HOST_WIDE_INT);
57 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple *);
58 static bool cond_expr_object_size (struct object_size_info *, tree, gimple *);
59 static void init_offset_limit (void);
60 static void check_for_plus_in_loops (struct object_size_info *, tree);
61 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
62 unsigned int);
64 /* object_sizes[0] is upper bound for number of bytes till the end of
65 the object.
66 object_sizes[1] is upper bound for number of bytes till the end of
67 the subobject (innermost array or field with address taken).
68 object_sizes[2] is lower bound for number of bytes till the end of
69 the object and object_sizes[3] lower bound for subobject. */
70 static vec<unsigned HOST_WIDE_INT> object_sizes[4];
72 /* Bitmaps what object sizes have been computed already. */
73 static bitmap computed[4];
75 /* Maximum value of offset we consider to be addition. */
76 static unsigned HOST_WIDE_INT offset_limit;
79 /* Initialize OFFSET_LIMIT variable. */
80 static void
81 init_offset_limit (void)
83 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
84 offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
85 else
86 offset_limit = -1;
87 offset_limit /= 2;
91 /* Compute offset of EXPR within VAR. Return error_mark_node
92 if unknown. */
94 static tree
95 compute_object_offset (const_tree expr, const_tree var)
97 enum tree_code code = PLUS_EXPR;
98 tree base, off, t;
100 if (expr == var)
101 return size_zero_node;
103 switch (TREE_CODE (expr))
105 case COMPONENT_REF:
106 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
107 if (base == error_mark_node)
108 return base;
110 t = TREE_OPERAND (expr, 1);
111 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
112 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
113 / BITS_PER_UNIT));
114 break;
116 case REALPART_EXPR:
117 CASE_CONVERT:
118 case VIEW_CONVERT_EXPR:
119 case NON_LVALUE_EXPR:
120 return compute_object_offset (TREE_OPERAND (expr, 0), var);
122 case IMAGPART_EXPR:
123 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
124 if (base == error_mark_node)
125 return base;
127 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
128 break;
130 case ARRAY_REF:
131 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
132 if (base == error_mark_node)
133 return base;
135 t = TREE_OPERAND (expr, 1);
136 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
138 code = MINUS_EXPR;
139 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
141 t = fold_convert (sizetype, t);
142 off = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (expr)), t);
143 break;
145 case MEM_REF:
146 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
147 return wide_int_to_tree (sizetype, mem_ref_offset (expr));
149 default:
150 return error_mark_node;
153 return size_binop (code, base, off);
157 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
158 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
159 If unknown, return unknown[object_size_type]. */
161 static unsigned HOST_WIDE_INT
162 addr_object_size (struct object_size_info *osi, const_tree ptr,
163 int object_size_type)
165 tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
167 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
169 pt_var = TREE_OPERAND (ptr, 0);
170 while (handled_component_p (pt_var))
171 pt_var = TREE_OPERAND (pt_var, 0);
173 if (pt_var
174 && TREE_CODE (pt_var) == MEM_REF)
176 unsigned HOST_WIDE_INT sz;
178 if (!osi || (object_size_type & 1) != 0
179 || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
181 sz = compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
182 object_size_type & ~1);
184 else
186 tree var = TREE_OPERAND (pt_var, 0);
187 if (osi->pass == 0)
188 collect_object_sizes_for (osi, var);
189 if (bitmap_bit_p (computed[object_size_type],
190 SSA_NAME_VERSION (var)))
191 sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
192 else
193 sz = unknown[object_size_type];
195 if (sz != unknown[object_size_type])
197 offset_int dsz = wi::sub (sz, mem_ref_offset (pt_var));
198 if (wi::neg_p (dsz))
199 sz = 0;
200 else if (wi::fits_uhwi_p (dsz))
201 sz = dsz.to_uhwi ();
202 else
203 sz = unknown[object_size_type];
206 if (sz != unknown[object_size_type] && sz < offset_limit)
207 pt_var_size = size_int (sz);
209 else if (pt_var
210 && DECL_P (pt_var)
211 && tree_fits_uhwi_p (DECL_SIZE_UNIT (pt_var))
212 && tree_to_uhwi (DECL_SIZE_UNIT (pt_var)) < offset_limit)
213 pt_var_size = DECL_SIZE_UNIT (pt_var);
214 else if (pt_var
215 && TREE_CODE (pt_var) == STRING_CST
216 && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
217 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
218 && tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
219 < offset_limit)
220 pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
221 else
222 return unknown[object_size_type];
224 if (pt_var != TREE_OPERAND (ptr, 0))
226 tree var;
228 if (object_size_type & 1)
230 var = TREE_OPERAND (ptr, 0);
232 while (var != pt_var
233 && TREE_CODE (var) != BIT_FIELD_REF
234 && TREE_CODE (var) != COMPONENT_REF
235 && TREE_CODE (var) != ARRAY_REF
236 && TREE_CODE (var) != ARRAY_RANGE_REF
237 && TREE_CODE (var) != REALPART_EXPR
238 && TREE_CODE (var) != IMAGPART_EXPR)
239 var = TREE_OPERAND (var, 0);
240 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
241 var = TREE_OPERAND (var, 0);
242 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
243 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var)))
244 || (pt_var_size
245 && tree_int_cst_lt (pt_var_size,
246 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
247 var = pt_var;
248 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
250 tree v = var;
251 /* For &X->fld, compute object size only if fld isn't the last
252 field, as struct { int i; char c[1]; } is often used instead
253 of flexible array member. */
254 while (v && v != pt_var)
255 switch (TREE_CODE (v))
257 case ARRAY_REF:
258 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
259 && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
261 tree domain
262 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
263 if (domain
264 && TYPE_MAX_VALUE (domain)
265 && TREE_CODE (TYPE_MAX_VALUE (domain))
266 == INTEGER_CST
267 && tree_int_cst_lt (TREE_OPERAND (v, 1),
268 TYPE_MAX_VALUE (domain)))
270 v = NULL_TREE;
271 break;
274 v = TREE_OPERAND (v, 0);
275 break;
276 case REALPART_EXPR:
277 case IMAGPART_EXPR:
278 v = NULL_TREE;
279 break;
280 case COMPONENT_REF:
281 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
283 v = NULL_TREE;
284 break;
286 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
287 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
288 != UNION_TYPE
289 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
290 != QUAL_UNION_TYPE)
291 break;
292 else
293 v = TREE_OPERAND (v, 0);
294 if (TREE_CODE (v) == COMPONENT_REF
295 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
296 == RECORD_TYPE)
298 tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
299 for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
300 if (TREE_CODE (fld_chain) == FIELD_DECL)
301 break;
303 if (fld_chain)
305 v = NULL_TREE;
306 break;
308 v = TREE_OPERAND (v, 0);
310 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
311 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
312 != UNION_TYPE
313 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
314 != QUAL_UNION_TYPE)
315 break;
316 else
317 v = TREE_OPERAND (v, 0);
318 if (v != pt_var)
319 v = NULL_TREE;
320 else
321 v = pt_var;
322 break;
323 default:
324 v = pt_var;
325 break;
327 if (v == pt_var)
328 var = pt_var;
331 else
332 var = pt_var;
334 if (var != pt_var)
335 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
336 else if (!pt_var_size)
337 return unknown[object_size_type];
338 else
339 var_size = pt_var_size;
340 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
341 if (bytes != error_mark_node)
343 if (TREE_CODE (bytes) == INTEGER_CST
344 && tree_int_cst_lt (var_size, bytes))
345 bytes = size_zero_node;
346 else
347 bytes = size_binop (MINUS_EXPR, var_size, bytes);
349 if (var != pt_var
350 && pt_var_size
351 && TREE_CODE (pt_var) == MEM_REF
352 && bytes != error_mark_node)
354 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
355 if (bytes2 != error_mark_node)
357 if (TREE_CODE (bytes2) == INTEGER_CST
358 && tree_int_cst_lt (pt_var_size, bytes2))
359 bytes2 = size_zero_node;
360 else
361 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
362 bytes = size_binop (MIN_EXPR, bytes, bytes2);
366 else if (!pt_var_size)
367 return unknown[object_size_type];
368 else
369 bytes = pt_var_size;
371 if (tree_fits_uhwi_p (bytes))
372 return tree_to_uhwi (bytes);
374 return unknown[object_size_type];
378 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
379 Handles various allocation calls. OBJECT_SIZE_TYPE is the second
380 argument from __builtin_object_size. If unknown, return
381 unknown[object_size_type]. */
383 static unsigned HOST_WIDE_INT
384 alloc_object_size (const gcall *call, int object_size_type)
386 tree callee, bytes = NULL_TREE;
387 tree alloc_size;
388 int arg1 = -1, arg2 = -1;
390 gcc_assert (is_gimple_call (call));
392 callee = gimple_call_fndecl (call);
393 if (!callee)
394 return unknown[object_size_type];
396 alloc_size = lookup_attribute ("alloc_size",
397 TYPE_ATTRIBUTES (TREE_TYPE (callee)));
398 if (alloc_size && TREE_VALUE (alloc_size))
400 tree p = TREE_VALUE (alloc_size);
402 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
403 if (TREE_CHAIN (p))
404 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
407 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
408 switch (DECL_FUNCTION_CODE (callee))
410 case BUILT_IN_CALLOC:
411 arg2 = 1;
412 /* fall through */
413 case BUILT_IN_MALLOC:
414 case BUILT_IN_ALLOCA:
415 case BUILT_IN_ALLOCA_WITH_ALIGN:
416 arg1 = 0;
417 default:
418 break;
421 if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
422 || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
423 || (arg2 >= 0
424 && (arg2 >= (int)gimple_call_num_args (call)
425 || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
426 return unknown[object_size_type];
428 if (arg2 >= 0)
429 bytes = size_binop (MULT_EXPR,
430 fold_convert (sizetype, gimple_call_arg (call, arg1)),
431 fold_convert (sizetype, gimple_call_arg (call, arg2)));
432 else if (arg1 >= 0)
433 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
435 if (bytes && tree_fits_uhwi_p (bytes))
436 return tree_to_uhwi (bytes);
438 return unknown[object_size_type];
442 /* If object size is propagated from one of function's arguments directly
443 to its return value, return that argument for GIMPLE_CALL statement CALL.
444 Otherwise return NULL. */
446 static tree
447 pass_through_call (const gcall *call)
449 tree callee = gimple_call_fndecl (call);
451 if (callee
452 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
453 switch (DECL_FUNCTION_CODE (callee))
455 case BUILT_IN_MEMCPY:
456 case BUILT_IN_MEMMOVE:
457 case BUILT_IN_MEMSET:
458 case BUILT_IN_STRCPY:
459 case BUILT_IN_STRNCPY:
460 case BUILT_IN_STRCAT:
461 case BUILT_IN_STRNCAT:
462 case BUILT_IN_MEMCPY_CHK:
463 case BUILT_IN_MEMMOVE_CHK:
464 case BUILT_IN_MEMSET_CHK:
465 case BUILT_IN_STRCPY_CHK:
466 case BUILT_IN_STRNCPY_CHK:
467 case BUILT_IN_STPNCPY_CHK:
468 case BUILT_IN_STRCAT_CHK:
469 case BUILT_IN_STRNCAT_CHK:
470 case BUILT_IN_ASSUME_ALIGNED:
471 if (gimple_call_num_args (call) >= 1)
472 return gimple_call_arg (call, 0);
473 break;
474 default:
475 break;
478 return NULL_TREE;
482 /* Compute __builtin_object_size value for PTR. OBJECT_SIZE_TYPE is the
483 second argument from __builtin_object_size. */
485 unsigned HOST_WIDE_INT
486 compute_builtin_object_size (tree ptr, int object_size_type)
488 gcc_assert (object_size_type >= 0 && object_size_type <= 3);
490 if (! offset_limit)
491 init_offset_limit ();
493 if (TREE_CODE (ptr) == ADDR_EXPR)
494 return addr_object_size (NULL, ptr, object_size_type);
496 if (TREE_CODE (ptr) == SSA_NAME
497 && POINTER_TYPE_P (TREE_TYPE (ptr))
498 && computed[object_size_type] != NULL)
500 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
502 struct object_size_info osi;
503 bitmap_iterator bi;
504 unsigned int i;
506 if (num_ssa_names > object_sizes[object_size_type].length ())
507 object_sizes[object_size_type].safe_grow (num_ssa_names);
508 if (dump_file)
510 fprintf (dump_file, "Computing %s %sobject size for ",
511 (object_size_type & 2) ? "minimum" : "maximum",
512 (object_size_type & 1) ? "sub" : "");
513 print_generic_expr (dump_file, ptr, dump_flags);
514 fprintf (dump_file, ":\n");
517 osi.visited = BITMAP_ALLOC (NULL);
518 osi.reexamine = BITMAP_ALLOC (NULL);
519 osi.object_size_type = object_size_type;
520 osi.depths = NULL;
521 osi.stack = NULL;
522 osi.tos = NULL;
524 /* First pass: walk UD chains, compute object sizes that
525 can be computed. osi.reexamine bitmap at the end will
526 contain what variables were found in dependency cycles
527 and therefore need to be reexamined. */
528 osi.pass = 0;
529 osi.changed = false;
530 collect_object_sizes_for (&osi, ptr);
532 /* Second pass: keep recomputing object sizes of variables
533 that need reexamination, until no object sizes are
534 increased or all object sizes are computed. */
535 if (! bitmap_empty_p (osi.reexamine))
537 bitmap reexamine = BITMAP_ALLOC (NULL);
539 /* If looking for minimum instead of maximum object size,
540 detect cases where a pointer is increased in a loop.
541 Although even without this detection pass 2 would eventually
542 terminate, it could take a long time. If a pointer is
543 increasing this way, we need to assume 0 object size.
544 E.g. p = &buf[0]; while (cond) p = p + 4; */
545 if (object_size_type & 2)
547 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
548 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
549 osi.tos = osi.stack;
550 osi.pass = 1;
551 /* collect_object_sizes_for is changing
552 osi.reexamine bitmap, so iterate over a copy. */
553 bitmap_copy (reexamine, osi.reexamine);
554 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
555 if (bitmap_bit_p (osi.reexamine, i))
556 check_for_plus_in_loops (&osi, ssa_name (i));
558 free (osi.depths);
559 osi.depths = NULL;
560 free (osi.stack);
561 osi.stack = NULL;
562 osi.tos = NULL;
567 osi.pass = 2;
568 osi.changed = false;
569 /* collect_object_sizes_for is changing
570 osi.reexamine bitmap, so iterate over a copy. */
571 bitmap_copy (reexamine, osi.reexamine);
572 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
573 if (bitmap_bit_p (osi.reexamine, i))
575 collect_object_sizes_for (&osi, ssa_name (i));
576 if (dump_file && (dump_flags & TDF_DETAILS))
578 fprintf (dump_file, "Reexamining ");
579 print_generic_expr (dump_file, ssa_name (i),
580 dump_flags);
581 fprintf (dump_file, "\n");
585 while (osi.changed);
587 BITMAP_FREE (reexamine);
589 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
590 bitmap_set_bit (computed[object_size_type], i);
592 /* Debugging dumps. */
593 if (dump_file)
595 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
596 if (object_sizes[object_size_type][i]
597 != unknown[object_size_type])
599 print_generic_expr (dump_file, ssa_name (i),
600 dump_flags);
601 fprintf (dump_file,
602 ": %s %sobject size "
603 HOST_WIDE_INT_PRINT_UNSIGNED "\n",
604 (object_size_type & 2) ? "minimum" : "maximum",
605 (object_size_type & 1) ? "sub" : "",
606 object_sizes[object_size_type][i]);
610 BITMAP_FREE (osi.reexamine);
611 BITMAP_FREE (osi.visited);
614 return object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
617 return unknown[object_size_type];
620 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
622 static void
623 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
625 int object_size_type = osi->object_size_type;
626 unsigned int varno = SSA_NAME_VERSION (ptr);
627 unsigned HOST_WIDE_INT bytes;
629 gcc_assert (object_sizes[object_size_type][varno]
630 != unknown[object_size_type]);
631 gcc_assert (osi->pass == 0);
633 if (TREE_CODE (value) == WITH_SIZE_EXPR)
634 value = TREE_OPERAND (value, 0);
636 /* Pointer variables should have been handled by merge_object_sizes. */
637 gcc_assert (TREE_CODE (value) != SSA_NAME
638 || !POINTER_TYPE_P (TREE_TYPE (value)));
640 if (TREE_CODE (value) == ADDR_EXPR)
641 bytes = addr_object_size (osi, value, object_size_type);
642 else
643 bytes = unknown[object_size_type];
645 if ((object_size_type & 2) == 0)
647 if (object_sizes[object_size_type][varno] < bytes)
648 object_sizes[object_size_type][varno] = bytes;
650 else
652 if (object_sizes[object_size_type][varno] > bytes)
653 object_sizes[object_size_type][varno] = bytes;
658 /* Compute object_sizes for PTR, defined to the result of a call. */
660 static void
661 call_object_size (struct object_size_info *osi, tree ptr, gcall *call)
663 int object_size_type = osi->object_size_type;
664 unsigned int varno = SSA_NAME_VERSION (ptr);
665 unsigned HOST_WIDE_INT bytes;
667 gcc_assert (is_gimple_call (call));
669 gcc_assert (object_sizes[object_size_type][varno]
670 != unknown[object_size_type]);
671 gcc_assert (osi->pass == 0);
673 bytes = alloc_object_size (call, object_size_type);
675 if ((object_size_type & 2) == 0)
677 if (object_sizes[object_size_type][varno] < bytes)
678 object_sizes[object_size_type][varno] = bytes;
680 else
682 if (object_sizes[object_size_type][varno] > bytes)
683 object_sizes[object_size_type][varno] = bytes;
688 /* Compute object_sizes for PTR, defined to an unknown value. */
690 static void
691 unknown_object_size (struct object_size_info *osi, tree ptr)
693 int object_size_type = osi->object_size_type;
694 unsigned int varno = SSA_NAME_VERSION (ptr);
695 unsigned HOST_WIDE_INT bytes;
697 gcc_assert (object_sizes[object_size_type][varno]
698 != unknown[object_size_type]);
699 gcc_assert (osi->pass == 0);
701 bytes = unknown[object_size_type];
703 if ((object_size_type & 2) == 0)
705 if (object_sizes[object_size_type][varno] < bytes)
706 object_sizes[object_size_type][varno] = bytes;
708 else
710 if (object_sizes[object_size_type][varno] > bytes)
711 object_sizes[object_size_type][varno] = bytes;
716 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
717 the object size might need reexamination later. */
719 static bool
720 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
721 unsigned HOST_WIDE_INT offset)
723 int object_size_type = osi->object_size_type;
724 unsigned int varno = SSA_NAME_VERSION (dest);
725 unsigned HOST_WIDE_INT orig_bytes;
727 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
728 return false;
729 if (offset >= offset_limit)
731 object_sizes[object_size_type][varno] = unknown[object_size_type];
732 return false;
735 if (osi->pass == 0)
736 collect_object_sizes_for (osi, orig);
738 orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
739 if (orig_bytes != unknown[object_size_type])
740 orig_bytes = (offset > orig_bytes)
741 ? (unsigned HOST_WIDE_INT) 0 : orig_bytes - offset;
743 if ((object_size_type & 2) == 0)
745 if (object_sizes[object_size_type][varno] < orig_bytes)
747 object_sizes[object_size_type][varno] = orig_bytes;
748 osi->changed = true;
751 else
753 if (object_sizes[object_size_type][varno] > orig_bytes)
755 object_sizes[object_size_type][varno] = orig_bytes;
756 osi->changed = true;
759 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
763 /* Compute object_sizes for VAR, defined to the result of an assignment
764 with operator POINTER_PLUS_EXPR. Return true if the object size might
765 need reexamination later. */
767 static bool
768 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple *stmt)
770 int object_size_type = osi->object_size_type;
771 unsigned int varno = SSA_NAME_VERSION (var);
772 unsigned HOST_WIDE_INT bytes;
773 tree op0, op1;
775 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
777 op0 = gimple_assign_rhs1 (stmt);
778 op1 = gimple_assign_rhs2 (stmt);
780 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
782 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
783 gcc_assert (TREE_CODE (rhs) == MEM_REF);
784 op0 = TREE_OPERAND (rhs, 0);
785 op1 = TREE_OPERAND (rhs, 1);
787 else
788 gcc_unreachable ();
790 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
791 return false;
793 /* Handle PTR + OFFSET here. */
794 if (TREE_CODE (op1) == INTEGER_CST
795 && (TREE_CODE (op0) == SSA_NAME
796 || TREE_CODE (op0) == ADDR_EXPR))
798 if (! tree_fits_uhwi_p (op1))
799 bytes = unknown[object_size_type];
800 else if (TREE_CODE (op0) == SSA_NAME)
801 return merge_object_sizes (osi, var, op0, tree_to_uhwi (op1));
802 else
804 unsigned HOST_WIDE_INT off = tree_to_uhwi (op1);
806 /* op0 will be ADDR_EXPR here. */
807 bytes = addr_object_size (osi, op0, object_size_type);
808 if (bytes == unknown[object_size_type])
810 else if (off > offset_limit)
811 bytes = unknown[object_size_type];
812 else if (off > bytes)
813 bytes = 0;
814 else
815 bytes -= off;
818 else
819 bytes = unknown[object_size_type];
821 if ((object_size_type & 2) == 0)
823 if (object_sizes[object_size_type][varno] < bytes)
824 object_sizes[object_size_type][varno] = bytes;
826 else
828 if (object_sizes[object_size_type][varno] > bytes)
829 object_sizes[object_size_type][varno] = bytes;
831 return false;
835 /* Compute object_sizes for VAR, defined at STMT, which is
836 a COND_EXPR. Return true if the object size might need reexamination
837 later. */
839 static bool
840 cond_expr_object_size (struct object_size_info *osi, tree var, gimple *stmt)
842 tree then_, else_;
843 int object_size_type = osi->object_size_type;
844 unsigned int varno = SSA_NAME_VERSION (var);
845 bool reexamine = false;
847 gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
849 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
850 return false;
852 then_ = gimple_assign_rhs2 (stmt);
853 else_ = gimple_assign_rhs3 (stmt);
855 if (TREE_CODE (then_) == SSA_NAME)
856 reexamine |= merge_object_sizes (osi, var, then_, 0);
857 else
858 expr_object_size (osi, var, then_);
860 if (TREE_CODE (else_) == SSA_NAME)
861 reexamine |= merge_object_sizes (osi, var, else_, 0);
862 else
863 expr_object_size (osi, var, else_);
865 return reexamine;
868 /* Compute object sizes for VAR.
869 For ADDR_EXPR an object size is the number of remaining bytes
870 to the end of the object (where what is considered an object depends on
871 OSI->object_size_type).
872 For allocation GIMPLE_CALL like malloc or calloc object size is the size
873 of the allocation.
874 For POINTER_PLUS_EXPR where second operand is a constant integer,
875 object size is object size of the first operand minus the constant.
876 If the constant is bigger than the number of remaining bytes until the
877 end of the object, object size is 0, but if it is instead a pointer
878 subtraction, object size is unknown[object_size_type].
879 To differentiate addition from subtraction, ADDR_EXPR returns
880 unknown[object_size_type] for all objects bigger than half of the address
881 space, and constants less than half of the address space are considered
882 addition, while bigger constants subtraction.
883 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
884 object size is object size of that argument.
885 Otherwise, object size is the maximum of object sizes of variables
886 that it might be set to. */
888 static void
889 collect_object_sizes_for (struct object_size_info *osi, tree var)
891 int object_size_type = osi->object_size_type;
892 unsigned int varno = SSA_NAME_VERSION (var);
893 gimple *stmt;
894 bool reexamine;
896 if (bitmap_bit_p (computed[object_size_type], varno))
897 return;
899 if (osi->pass == 0)
901 if (bitmap_set_bit (osi->visited, varno))
903 object_sizes[object_size_type][varno]
904 = (object_size_type & 2) ? -1 : 0;
906 else
908 /* Found a dependency loop. Mark the variable for later
909 re-examination. */
910 bitmap_set_bit (osi->reexamine, varno);
911 if (dump_file && (dump_flags & TDF_DETAILS))
913 fprintf (dump_file, "Found a dependency loop at ");
914 print_generic_expr (dump_file, var, dump_flags);
915 fprintf (dump_file, "\n");
917 return;
921 if (dump_file && (dump_flags & TDF_DETAILS))
923 fprintf (dump_file, "Visiting use-def links for ");
924 print_generic_expr (dump_file, var, dump_flags);
925 fprintf (dump_file, "\n");
928 stmt = SSA_NAME_DEF_STMT (var);
929 reexamine = false;
931 switch (gimple_code (stmt))
933 case GIMPLE_ASSIGN:
935 tree rhs = gimple_assign_rhs1 (stmt);
936 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
937 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
938 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
939 reexamine = plus_stmt_object_size (osi, var, stmt);
940 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
941 reexamine = cond_expr_object_size (osi, var, stmt);
942 else if (gimple_assign_single_p (stmt)
943 || gimple_assign_unary_nop_p (stmt))
945 if (TREE_CODE (rhs) == SSA_NAME
946 && POINTER_TYPE_P (TREE_TYPE (rhs)))
947 reexamine = merge_object_sizes (osi, var, rhs, 0);
948 else
949 expr_object_size (osi, var, rhs);
951 else
952 unknown_object_size (osi, var);
953 break;
956 case GIMPLE_CALL:
958 gcall *call_stmt = as_a <gcall *> (stmt);
959 tree arg = pass_through_call (call_stmt);
960 if (arg)
962 if (TREE_CODE (arg) == SSA_NAME
963 && POINTER_TYPE_P (TREE_TYPE (arg)))
964 reexamine = merge_object_sizes (osi, var, arg, 0);
965 else
966 expr_object_size (osi, var, arg);
968 else
969 call_object_size (osi, var, call_stmt);
970 break;
973 case GIMPLE_ASM:
974 /* Pointers defined by __asm__ statements can point anywhere. */
975 object_sizes[object_size_type][varno] = unknown[object_size_type];
976 break;
978 case GIMPLE_NOP:
979 if (SSA_NAME_VAR (var)
980 && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
981 expr_object_size (osi, var, SSA_NAME_VAR (var));
982 else
983 /* Uninitialized SSA names point nowhere. */
984 object_sizes[object_size_type][varno] = unknown[object_size_type];
985 break;
987 case GIMPLE_PHI:
989 unsigned i;
991 for (i = 0; i < gimple_phi_num_args (stmt); i++)
993 tree rhs = gimple_phi_arg (stmt, i)->def;
995 if (object_sizes[object_size_type][varno]
996 == unknown[object_size_type])
997 break;
999 if (TREE_CODE (rhs) == SSA_NAME)
1000 reexamine |= merge_object_sizes (osi, var, rhs, 0);
1001 else if (osi->pass == 0)
1002 expr_object_size (osi, var, rhs);
1004 break;
1007 default:
1008 gcc_unreachable ();
1011 if (! reexamine
1012 || object_sizes[object_size_type][varno] == unknown[object_size_type])
1014 bitmap_set_bit (computed[object_size_type], varno);
1015 bitmap_clear_bit (osi->reexamine, varno);
1017 else
1019 bitmap_set_bit (osi->reexamine, varno);
1020 if (dump_file && (dump_flags & TDF_DETAILS))
1022 fprintf (dump_file, "Need to reexamine ");
1023 print_generic_expr (dump_file, var, dump_flags);
1024 fprintf (dump_file, "\n");
1030 /* Helper function for check_for_plus_in_loops. Called recursively
1031 to detect loops. */
1033 static void
1034 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1035 unsigned int depth)
1037 gimple *stmt = SSA_NAME_DEF_STMT (var);
1038 unsigned int varno = SSA_NAME_VERSION (var);
1040 if (osi->depths[varno])
1042 if (osi->depths[varno] != depth)
1044 unsigned int *sp;
1046 /* Found a loop involving pointer addition. */
1047 for (sp = osi->tos; sp > osi->stack; )
1049 --sp;
1050 bitmap_clear_bit (osi->reexamine, *sp);
1051 bitmap_set_bit (computed[osi->object_size_type], *sp);
1052 object_sizes[osi->object_size_type][*sp] = 0;
1053 if (*sp == varno)
1054 break;
1057 return;
1059 else if (! bitmap_bit_p (osi->reexamine, varno))
1060 return;
1062 osi->depths[varno] = depth;
1063 *osi->tos++ = varno;
1065 switch (gimple_code (stmt))
1068 case GIMPLE_ASSIGN:
1070 if ((gimple_assign_single_p (stmt)
1071 || gimple_assign_unary_nop_p (stmt))
1072 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1074 tree rhs = gimple_assign_rhs1 (stmt);
1076 check_for_plus_in_loops_1 (osi, rhs, depth);
1078 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1080 tree basevar = gimple_assign_rhs1 (stmt);
1081 tree cst = gimple_assign_rhs2 (stmt);
1083 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1085 check_for_plus_in_loops_1 (osi, basevar,
1086 depth + !integer_zerop (cst));
1088 else
1089 gcc_unreachable ();
1090 break;
1093 case GIMPLE_CALL:
1095 gcall *call_stmt = as_a <gcall *> (stmt);
1096 tree arg = pass_through_call (call_stmt);
1097 if (arg)
1099 if (TREE_CODE (arg) == SSA_NAME)
1100 check_for_plus_in_loops_1 (osi, arg, depth);
1101 else
1102 gcc_unreachable ();
1104 break;
1107 case GIMPLE_PHI:
1109 unsigned i;
1111 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1113 tree rhs = gimple_phi_arg (stmt, i)->def;
1115 if (TREE_CODE (rhs) == SSA_NAME)
1116 check_for_plus_in_loops_1 (osi, rhs, depth);
1118 break;
1121 default:
1122 gcc_unreachable ();
1125 osi->depths[varno] = 0;
1126 osi->tos--;
1130 /* Check if some pointer we are computing object size of is being increased
1131 within a loop. If yes, assume all the SSA variables participating in
1132 that loop have minimum object sizes 0. */
1134 static void
1135 check_for_plus_in_loops (struct object_size_info *osi, tree var)
1137 gimple *stmt = SSA_NAME_DEF_STMT (var);
1139 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1140 and looked for a POINTER_PLUS_EXPR in the pass-through
1141 argument, if any. In GIMPLE, however, such an expression
1142 is not a valid call operand. */
1144 if (is_gimple_assign (stmt)
1145 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1147 tree basevar = gimple_assign_rhs1 (stmt);
1148 tree cst = gimple_assign_rhs2 (stmt);
1150 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1152 if (integer_zerop (cst))
1153 return;
1155 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1156 *osi->tos++ = SSA_NAME_VERSION (basevar);
1157 check_for_plus_in_loops_1 (osi, var, 2);
1158 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1159 osi->tos--;
1164 /* Initialize data structures for the object size computation. */
1166 void
1167 init_object_sizes (void)
1169 int object_size_type;
1171 if (computed[0])
1172 return;
1174 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1176 object_sizes[object_size_type].safe_grow (num_ssa_names);
1177 computed[object_size_type] = BITMAP_ALLOC (NULL);
1180 init_offset_limit ();
1184 /* Destroy data structures after the object size computation. */
1186 static void
1187 fini_object_sizes (void)
1189 int object_size_type;
1191 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1193 object_sizes[object_size_type].release ();
1194 BITMAP_FREE (computed[object_size_type]);
1199 /* Simple pass to optimize all __builtin_object_size () builtins. */
1201 namespace {
1203 const pass_data pass_data_object_sizes =
1205 GIMPLE_PASS, /* type */
1206 "objsz", /* name */
1207 OPTGROUP_NONE, /* optinfo_flags */
1208 TV_NONE, /* tv_id */
1209 ( PROP_cfg | PROP_ssa ), /* properties_required */
1210 0, /* properties_provided */
1211 0, /* properties_destroyed */
1212 0, /* todo_flags_start */
1213 0, /* todo_flags_finish */
1216 class pass_object_sizes : public gimple_opt_pass
1218 public:
1219 pass_object_sizes (gcc::context *ctxt)
1220 : gimple_opt_pass (pass_data_object_sizes, ctxt), insert_min_max_p (false)
1223 /* opt_pass methods: */
1224 opt_pass * clone () { return new pass_object_sizes (m_ctxt); }
1225 void set_pass_param (unsigned int n, bool param)
1227 gcc_assert (n == 0);
1228 insert_min_max_p = param;
1230 virtual unsigned int execute (function *);
1232 private:
1233 /* Determines whether the pass instance creates MIN/MAX_EXPRs. */
1234 bool insert_min_max_p;
1235 }; // class pass_object_sizes
1237 /* Dummy valueize function. */
1239 static tree
1240 do_valueize (tree t)
1242 return t;
1245 unsigned int
1246 pass_object_sizes::execute (function *fun)
1248 basic_block bb;
1249 FOR_EACH_BB_FN (bb, fun)
1251 gimple_stmt_iterator i;
1252 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1254 tree result;
1255 gimple *call = gsi_stmt (i);
1256 if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE))
1257 continue;
1259 init_object_sizes ();
1261 /* If insert_min_max_p, only attempt to fold
1262 __builtin_object_size (x, 1) and __builtin_object_size (x, 3),
1263 and rather than folding the builtin to the constant if any,
1264 create a MIN_EXPR or MAX_EXPR of the __builtin_object_size
1265 call result and the computed constant. */
1266 if (insert_min_max_p)
1268 tree ost = gimple_call_arg (call, 1);
1269 if (tree_fits_uhwi_p (ost))
1271 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
1272 tree ptr = gimple_call_arg (call, 0);
1273 tree lhs = gimple_call_lhs (call);
1274 if ((object_size_type == 1 || object_size_type == 3)
1275 && (TREE_CODE (ptr) == ADDR_EXPR
1276 || TREE_CODE (ptr) == SSA_NAME)
1277 && lhs)
1279 tree type = TREE_TYPE (lhs);
1280 unsigned HOST_WIDE_INT bytes
1281 = compute_builtin_object_size (ptr, object_size_type);
1282 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type == 1
1283 ? -1 : 0)
1284 && wi::fits_to_tree_p (bytes, type))
1286 tree tem = make_ssa_name (type);
1287 gimple_call_set_lhs (call, tem);
1288 enum tree_code code
1289 = object_size_type == 1 ? MIN_EXPR : MAX_EXPR;
1290 tree cst = build_int_cstu (type, bytes);
1291 gimple *g
1292 = gimple_build_assign (lhs, code, tem, cst);
1293 gsi_insert_after (&i, g, GSI_NEW_STMT);
1294 update_stmt (call);
1298 continue;
1301 tree lhs = gimple_call_lhs (call);
1302 if (!lhs)
1303 continue;
1305 result = gimple_fold_stmt_to_constant (call, do_valueize);
1306 if (!result)
1308 tree ost = gimple_call_arg (call, 1);
1310 if (tree_fits_uhwi_p (ost))
1312 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
1314 if (object_size_type < 2)
1315 result = fold_convert (size_type_node,
1316 integer_minus_one_node);
1317 else if (object_size_type < 4)
1318 result = build_zero_cst (size_type_node);
1321 if (!result)
1322 continue;
1325 gcc_assert (TREE_CODE (result) == INTEGER_CST);
1327 if (dump_file && (dump_flags & TDF_DETAILS))
1329 fprintf (dump_file, "Simplified\n ");
1330 print_gimple_stmt (dump_file, call, 0, dump_flags);
1331 fprintf (dump_file, " to ");
1332 print_generic_expr (dump_file, result, 0);
1333 fprintf (dump_file, "\n");
1336 /* Propagate into all uses and fold those stmts. */
1337 replace_uses_by (lhs, result);
1341 fini_object_sizes ();
1342 return 0;
1345 } // anon namespace
1347 gimple_opt_pass *
1348 make_pass_object_sizes (gcc::context *ctxt)
1350 return new pass_object_sizes (ctxt);