i386.md (simple LEA peephole2): Add missing mode to zero_extend for zero-extended...
[official-gcc.git] / gcc / tree-object-size.c
blobc83345f7efd260375685c60a47e35be48e96af6a
1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "tree-object-size.h"
27 #include "diagnostic-core.h"
28 #include "gimple-pretty-print.h"
29 #include "bitmap.h"
30 #include "basic-block.h"
31 #include "tree-ssa-alias.h"
32 #include "internal-fn.h"
33 #include "gimple-fold.h"
34 #include "gimple-expr.h"
35 #include "is-a.h"
36 #include "gimple.h"
37 #include "gimple-iterator.h"
38 #include "gimple-ssa.h"
39 #include "stringpool.h"
40 #include "tree-ssanames.h"
41 #include "tree-pass.h"
42 #include "tree-ssa-propagate.h"
43 #include "tree-phinodes.h"
44 #include "ssa-iterators.h"
46 struct object_size_info
48 int object_size_type;
49 bitmap visited, reexamine;
50 int pass;
51 bool changed;
52 unsigned int *depths;
53 unsigned int *stack, *tos;
56 static const unsigned HOST_WIDE_INT unknown[4] = { -1, -1, 0, 0 };
58 static tree compute_object_offset (const_tree, const_tree);
59 static unsigned HOST_WIDE_INT addr_object_size (struct object_size_info *,
60 const_tree, int);
61 static unsigned HOST_WIDE_INT alloc_object_size (const_gimple, int);
62 static tree pass_through_call (const_gimple);
63 static void collect_object_sizes_for (struct object_size_info *, tree);
64 static void expr_object_size (struct object_size_info *, tree, tree);
65 static bool merge_object_sizes (struct object_size_info *, tree, tree,
66 unsigned HOST_WIDE_INT);
67 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple);
68 static bool cond_expr_object_size (struct object_size_info *, tree, gimple);
69 static unsigned int compute_object_sizes (void);
70 static void init_offset_limit (void);
71 static void check_for_plus_in_loops (struct object_size_info *, tree);
72 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
73 unsigned int);
75 /* object_sizes[0] is upper bound for number of bytes till the end of
76 the object.
77 object_sizes[1] is upper bound for number of bytes till the end of
78 the subobject (innermost array or field with address taken).
79 object_sizes[2] is lower bound for number of bytes till the end of
80 the object and object_sizes[3] lower bound for subobject. */
81 static vec<unsigned HOST_WIDE_INT> object_sizes[4];
83 /* Bitmaps what object sizes have been computed already. */
84 static bitmap computed[4];
86 /* Maximum value of offset we consider to be addition. */
87 static unsigned HOST_WIDE_INT offset_limit;
90 /* Initialize OFFSET_LIMIT variable. */
91 static void
92 init_offset_limit (void)
94 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
95 offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
96 else
97 offset_limit = -1;
98 offset_limit /= 2;
102 /* Compute offset of EXPR within VAR. Return error_mark_node
103 if unknown. */
105 static tree
106 compute_object_offset (const_tree expr, const_tree var)
108 enum tree_code code = PLUS_EXPR;
109 tree base, off, t;
111 if (expr == var)
112 return size_zero_node;
114 switch (TREE_CODE (expr))
116 case COMPONENT_REF:
117 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
118 if (base == error_mark_node)
119 return base;
121 t = TREE_OPERAND (expr, 1);
122 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
123 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
124 / BITS_PER_UNIT));
125 break;
127 case REALPART_EXPR:
128 CASE_CONVERT:
129 case VIEW_CONVERT_EXPR:
130 case NON_LVALUE_EXPR:
131 return compute_object_offset (TREE_OPERAND (expr, 0), var);
133 case IMAGPART_EXPR:
134 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
135 if (base == error_mark_node)
136 return base;
138 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
139 break;
141 case ARRAY_REF:
142 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
143 if (base == error_mark_node)
144 return base;
146 t = TREE_OPERAND (expr, 1);
147 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
149 code = MINUS_EXPR;
150 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
152 t = fold_convert (sizetype, t);
153 off = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (expr)), t);
154 break;
156 case MEM_REF:
157 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
158 return double_int_to_tree (sizetype, mem_ref_offset (expr));
160 default:
161 return error_mark_node;
164 return size_binop (code, base, off);
168 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
169 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
170 If unknown, return unknown[object_size_type]. */
172 static unsigned HOST_WIDE_INT
173 addr_object_size (struct object_size_info *osi, const_tree ptr,
174 int object_size_type)
176 tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
178 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
180 pt_var = TREE_OPERAND (ptr, 0);
181 while (handled_component_p (pt_var))
182 pt_var = TREE_OPERAND (pt_var, 0);
184 if (pt_var
185 && TREE_CODE (pt_var) == MEM_REF)
187 unsigned HOST_WIDE_INT sz;
189 if (!osi || (object_size_type & 1) != 0
190 || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
192 sz = compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
193 object_size_type & ~1);
195 else
197 tree var = TREE_OPERAND (pt_var, 0);
198 if (osi->pass == 0)
199 collect_object_sizes_for (osi, var);
200 if (bitmap_bit_p (computed[object_size_type],
201 SSA_NAME_VERSION (var)))
202 sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
203 else
204 sz = unknown[object_size_type];
206 if (sz != unknown[object_size_type])
208 double_int dsz = double_int::from_uhwi (sz) - mem_ref_offset (pt_var);
209 if (dsz.is_negative ())
210 sz = 0;
211 else if (dsz.fits_uhwi ())
212 sz = dsz.to_uhwi ();
213 else
214 sz = unknown[object_size_type];
217 if (sz != unknown[object_size_type] && sz < offset_limit)
218 pt_var_size = size_int (sz);
220 else if (pt_var
221 && DECL_P (pt_var)
222 && tree_fits_uhwi_p (DECL_SIZE_UNIT (pt_var))
223 && tree_to_uhwi (DECL_SIZE_UNIT (pt_var)) < offset_limit)
224 pt_var_size = DECL_SIZE_UNIT (pt_var);
225 else if (pt_var
226 && TREE_CODE (pt_var) == STRING_CST
227 && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
228 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
229 && tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
230 < offset_limit)
231 pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
232 else
233 return unknown[object_size_type];
235 if (pt_var != TREE_OPERAND (ptr, 0))
237 tree var;
239 if (object_size_type & 1)
241 var = TREE_OPERAND (ptr, 0);
243 while (var != pt_var
244 && TREE_CODE (var) != BIT_FIELD_REF
245 && TREE_CODE (var) != COMPONENT_REF
246 && TREE_CODE (var) != ARRAY_REF
247 && TREE_CODE (var) != ARRAY_RANGE_REF
248 && TREE_CODE (var) != REALPART_EXPR
249 && TREE_CODE (var) != IMAGPART_EXPR)
250 var = TREE_OPERAND (var, 0);
251 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
252 var = TREE_OPERAND (var, 0);
253 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
254 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var)))
255 || (pt_var_size
256 && tree_int_cst_lt (pt_var_size,
257 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
258 var = pt_var;
259 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
261 tree v = var;
262 /* For &X->fld, compute object size only if fld isn't the last
263 field, as struct { int i; char c[1]; } is often used instead
264 of flexible array member. */
265 while (v && v != pt_var)
266 switch (TREE_CODE (v))
268 case ARRAY_REF:
269 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
270 && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
272 tree domain
273 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
274 if (domain
275 && TYPE_MAX_VALUE (domain)
276 && TREE_CODE (TYPE_MAX_VALUE (domain))
277 == INTEGER_CST
278 && tree_int_cst_lt (TREE_OPERAND (v, 1),
279 TYPE_MAX_VALUE (domain)))
281 v = NULL_TREE;
282 break;
285 v = TREE_OPERAND (v, 0);
286 break;
287 case REALPART_EXPR:
288 case IMAGPART_EXPR:
289 v = NULL_TREE;
290 break;
291 case COMPONENT_REF:
292 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
294 v = NULL_TREE;
295 break;
297 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
298 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
299 != UNION_TYPE
300 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
301 != QUAL_UNION_TYPE)
302 break;
303 else
304 v = TREE_OPERAND (v, 0);
305 if (TREE_CODE (v) == COMPONENT_REF
306 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
307 == RECORD_TYPE)
309 tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
310 for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
311 if (TREE_CODE (fld_chain) == FIELD_DECL)
312 break;
314 if (fld_chain)
316 v = NULL_TREE;
317 break;
319 v = TREE_OPERAND (v, 0);
321 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
322 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
323 != UNION_TYPE
324 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
325 != QUAL_UNION_TYPE)
326 break;
327 else
328 v = TREE_OPERAND (v, 0);
329 if (v != pt_var)
330 v = NULL_TREE;
331 else
332 v = pt_var;
333 break;
334 default:
335 v = pt_var;
336 break;
338 if (v == pt_var)
339 var = pt_var;
342 else
343 var = pt_var;
345 if (var != pt_var)
346 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
347 else if (!pt_var_size)
348 return unknown[object_size_type];
349 else
350 var_size = pt_var_size;
351 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
352 if (bytes != error_mark_node)
354 if (TREE_CODE (bytes) == INTEGER_CST
355 && tree_int_cst_lt (var_size, bytes))
356 bytes = size_zero_node;
357 else
358 bytes = size_binop (MINUS_EXPR, var_size, bytes);
360 if (var != pt_var
361 && pt_var_size
362 && TREE_CODE (pt_var) == MEM_REF
363 && bytes != error_mark_node)
365 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
366 if (bytes2 != error_mark_node)
368 if (TREE_CODE (bytes2) == INTEGER_CST
369 && tree_int_cst_lt (pt_var_size, bytes2))
370 bytes2 = size_zero_node;
371 else
372 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
373 bytes = size_binop (MIN_EXPR, bytes, bytes2);
377 else if (!pt_var_size)
378 return unknown[object_size_type];
379 else
380 bytes = pt_var_size;
382 if (tree_fits_uhwi_p (bytes))
383 return tree_to_uhwi (bytes);
385 return unknown[object_size_type];
389 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
390 Handles various allocation calls. OBJECT_SIZE_TYPE is the second
391 argument from __builtin_object_size. If unknown, return
392 unknown[object_size_type]. */
394 static unsigned HOST_WIDE_INT
395 alloc_object_size (const_gimple call, int object_size_type)
397 tree callee, bytes = NULL_TREE;
398 tree alloc_size;
399 int arg1 = -1, arg2 = -1;
401 gcc_assert (is_gimple_call (call));
403 callee = gimple_call_fndecl (call);
404 if (!callee)
405 return unknown[object_size_type];
407 alloc_size = lookup_attribute ("alloc_size",
408 TYPE_ATTRIBUTES (TREE_TYPE (callee)));
409 if (alloc_size && TREE_VALUE (alloc_size))
411 tree p = TREE_VALUE (alloc_size);
413 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
414 if (TREE_CHAIN (p))
415 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
418 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
419 switch (DECL_FUNCTION_CODE (callee))
421 case BUILT_IN_CALLOC:
422 arg2 = 1;
423 /* fall through */
424 case BUILT_IN_MALLOC:
425 case BUILT_IN_ALLOCA:
426 case BUILT_IN_ALLOCA_WITH_ALIGN:
427 arg1 = 0;
428 default:
429 break;
432 if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
433 || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
434 || (arg2 >= 0
435 && (arg2 >= (int)gimple_call_num_args (call)
436 || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
437 return unknown[object_size_type];
439 if (arg2 >= 0)
440 bytes = size_binop (MULT_EXPR,
441 fold_convert (sizetype, gimple_call_arg (call, arg1)),
442 fold_convert (sizetype, gimple_call_arg (call, arg2)));
443 else if (arg1 >= 0)
444 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
446 if (bytes && tree_fits_uhwi_p (bytes))
447 return tree_to_uhwi (bytes);
449 return unknown[object_size_type];
453 /* If object size is propagated from one of function's arguments directly
454 to its return value, return that argument for GIMPLE_CALL statement CALL.
455 Otherwise return NULL. */
457 static tree
458 pass_through_call (const_gimple call)
460 tree callee = gimple_call_fndecl (call);
462 if (callee
463 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
464 switch (DECL_FUNCTION_CODE (callee))
466 case BUILT_IN_MEMCPY:
467 case BUILT_IN_MEMMOVE:
468 case BUILT_IN_MEMSET:
469 case BUILT_IN_STRCPY:
470 case BUILT_IN_STRNCPY:
471 case BUILT_IN_STRCAT:
472 case BUILT_IN_STRNCAT:
473 case BUILT_IN_MEMCPY_CHK:
474 case BUILT_IN_MEMMOVE_CHK:
475 case BUILT_IN_MEMSET_CHK:
476 case BUILT_IN_STRCPY_CHK:
477 case BUILT_IN_STRNCPY_CHK:
478 case BUILT_IN_STPNCPY_CHK:
479 case BUILT_IN_STRCAT_CHK:
480 case BUILT_IN_STRNCAT_CHK:
481 case BUILT_IN_ASSUME_ALIGNED:
482 if (gimple_call_num_args (call) >= 1)
483 return gimple_call_arg (call, 0);
484 break;
485 default:
486 break;
489 return NULL_TREE;
493 /* Compute __builtin_object_size value for PTR. OBJECT_SIZE_TYPE is the
494 second argument from __builtin_object_size. */
496 unsigned HOST_WIDE_INT
497 compute_builtin_object_size (tree ptr, int object_size_type)
499 gcc_assert (object_size_type >= 0 && object_size_type <= 3);
501 if (! offset_limit)
502 init_offset_limit ();
504 if (TREE_CODE (ptr) == ADDR_EXPR)
505 return addr_object_size (NULL, ptr, object_size_type);
507 if (TREE_CODE (ptr) == SSA_NAME
508 && POINTER_TYPE_P (TREE_TYPE (ptr))
509 && computed[object_size_type] != NULL)
511 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
513 struct object_size_info osi;
514 bitmap_iterator bi;
515 unsigned int i;
517 if (num_ssa_names > object_sizes[object_size_type].length ())
518 object_sizes[object_size_type].safe_grow (num_ssa_names);
519 if (dump_file)
521 fprintf (dump_file, "Computing %s %sobject size for ",
522 (object_size_type & 2) ? "minimum" : "maximum",
523 (object_size_type & 1) ? "sub" : "");
524 print_generic_expr (dump_file, ptr, dump_flags);
525 fprintf (dump_file, ":\n");
528 osi.visited = BITMAP_ALLOC (NULL);
529 osi.reexamine = BITMAP_ALLOC (NULL);
530 osi.object_size_type = object_size_type;
531 osi.depths = NULL;
532 osi.stack = NULL;
533 osi.tos = NULL;
535 /* First pass: walk UD chains, compute object sizes that
536 can be computed. osi.reexamine bitmap at the end will
537 contain what variables were found in dependency cycles
538 and therefore need to be reexamined. */
539 osi.pass = 0;
540 osi.changed = false;
541 collect_object_sizes_for (&osi, ptr);
543 /* Second pass: keep recomputing object sizes of variables
544 that need reexamination, until no object sizes are
545 increased or all object sizes are computed. */
546 if (! bitmap_empty_p (osi.reexamine))
548 bitmap reexamine = BITMAP_ALLOC (NULL);
550 /* If looking for minimum instead of maximum object size,
551 detect cases where a pointer is increased in a loop.
552 Although even without this detection pass 2 would eventually
553 terminate, it could take a long time. If a pointer is
554 increasing this way, we need to assume 0 object size.
555 E.g. p = &buf[0]; while (cond) p = p + 4; */
556 if (object_size_type & 2)
558 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
559 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
560 osi.tos = osi.stack;
561 osi.pass = 1;
562 /* collect_object_sizes_for is changing
563 osi.reexamine bitmap, so iterate over a copy. */
564 bitmap_copy (reexamine, osi.reexamine);
565 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
566 if (bitmap_bit_p (osi.reexamine, i))
567 check_for_plus_in_loops (&osi, ssa_name (i));
569 free (osi.depths);
570 osi.depths = NULL;
571 free (osi.stack);
572 osi.stack = NULL;
573 osi.tos = NULL;
578 osi.pass = 2;
579 osi.changed = false;
580 /* collect_object_sizes_for is changing
581 osi.reexamine bitmap, so iterate over a copy. */
582 bitmap_copy (reexamine, osi.reexamine);
583 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
584 if (bitmap_bit_p (osi.reexamine, i))
586 collect_object_sizes_for (&osi, ssa_name (i));
587 if (dump_file && (dump_flags & TDF_DETAILS))
589 fprintf (dump_file, "Reexamining ");
590 print_generic_expr (dump_file, ssa_name (i),
591 dump_flags);
592 fprintf (dump_file, "\n");
596 while (osi.changed);
598 BITMAP_FREE (reexamine);
600 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
601 bitmap_set_bit (computed[object_size_type], i);
603 /* Debugging dumps. */
604 if (dump_file)
606 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
607 if (object_sizes[object_size_type][i]
608 != unknown[object_size_type])
610 print_generic_expr (dump_file, ssa_name (i),
611 dump_flags);
612 fprintf (dump_file,
613 ": %s %sobject size "
614 HOST_WIDE_INT_PRINT_UNSIGNED "\n",
615 (object_size_type & 2) ? "minimum" : "maximum",
616 (object_size_type & 1) ? "sub" : "",
617 object_sizes[object_size_type][i]);
621 BITMAP_FREE (osi.reexamine);
622 BITMAP_FREE (osi.visited);
625 return object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
628 return unknown[object_size_type];
631 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
633 static void
634 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
636 int object_size_type = osi->object_size_type;
637 unsigned int varno = SSA_NAME_VERSION (ptr);
638 unsigned HOST_WIDE_INT bytes;
640 gcc_assert (object_sizes[object_size_type][varno]
641 != unknown[object_size_type]);
642 gcc_assert (osi->pass == 0);
644 if (TREE_CODE (value) == WITH_SIZE_EXPR)
645 value = TREE_OPERAND (value, 0);
647 /* Pointer variables should have been handled by merge_object_sizes. */
648 gcc_assert (TREE_CODE (value) != SSA_NAME
649 || !POINTER_TYPE_P (TREE_TYPE (value)));
651 if (TREE_CODE (value) == ADDR_EXPR)
652 bytes = addr_object_size (osi, value, object_size_type);
653 else
654 bytes = unknown[object_size_type];
656 if ((object_size_type & 2) == 0)
658 if (object_sizes[object_size_type][varno] < bytes)
659 object_sizes[object_size_type][varno] = bytes;
661 else
663 if (object_sizes[object_size_type][varno] > bytes)
664 object_sizes[object_size_type][varno] = bytes;
669 /* Compute object_sizes for PTR, defined to the result of a call. */
671 static void
672 call_object_size (struct object_size_info *osi, tree ptr, gimple call)
674 int object_size_type = osi->object_size_type;
675 unsigned int varno = SSA_NAME_VERSION (ptr);
676 unsigned HOST_WIDE_INT bytes;
678 gcc_assert (is_gimple_call (call));
680 gcc_assert (object_sizes[object_size_type][varno]
681 != unknown[object_size_type]);
682 gcc_assert (osi->pass == 0);
684 bytes = alloc_object_size (call, object_size_type);
686 if ((object_size_type & 2) == 0)
688 if (object_sizes[object_size_type][varno] < bytes)
689 object_sizes[object_size_type][varno] = bytes;
691 else
693 if (object_sizes[object_size_type][varno] > bytes)
694 object_sizes[object_size_type][varno] = bytes;
699 /* Compute object_sizes for PTR, defined to an unknown value. */
701 static void
702 unknown_object_size (struct object_size_info *osi, tree ptr)
704 int object_size_type = osi->object_size_type;
705 unsigned int varno = SSA_NAME_VERSION (ptr);
706 unsigned HOST_WIDE_INT bytes;
708 gcc_assert (object_sizes[object_size_type][varno]
709 != unknown[object_size_type]);
710 gcc_assert (osi->pass == 0);
712 bytes = unknown[object_size_type];
714 if ((object_size_type & 2) == 0)
716 if (object_sizes[object_size_type][varno] < bytes)
717 object_sizes[object_size_type][varno] = bytes;
719 else
721 if (object_sizes[object_size_type][varno] > bytes)
722 object_sizes[object_size_type][varno] = bytes;
727 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
728 the object size might need reexamination later. */
730 static bool
731 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
732 unsigned HOST_WIDE_INT offset)
734 int object_size_type = osi->object_size_type;
735 unsigned int varno = SSA_NAME_VERSION (dest);
736 unsigned HOST_WIDE_INT orig_bytes;
738 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
739 return false;
740 if (offset >= offset_limit)
742 object_sizes[object_size_type][varno] = unknown[object_size_type];
743 return false;
746 if (osi->pass == 0)
747 collect_object_sizes_for (osi, orig);
749 orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
750 if (orig_bytes != unknown[object_size_type])
751 orig_bytes = (offset > orig_bytes)
752 ? (unsigned HOST_WIDE_INT) 0 : orig_bytes - offset;
754 if ((object_size_type & 2) == 0)
756 if (object_sizes[object_size_type][varno] < orig_bytes)
758 object_sizes[object_size_type][varno] = orig_bytes;
759 osi->changed = true;
762 else
764 if (object_sizes[object_size_type][varno] > orig_bytes)
766 object_sizes[object_size_type][varno] = orig_bytes;
767 osi->changed = true;
770 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
774 /* Compute object_sizes for VAR, defined to the result of an assignment
775 with operator POINTER_PLUS_EXPR. Return true if the object size might
776 need reexamination later. */
778 static bool
779 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple stmt)
781 int object_size_type = osi->object_size_type;
782 unsigned int varno = SSA_NAME_VERSION (var);
783 unsigned HOST_WIDE_INT bytes;
784 tree op0, op1;
786 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
788 op0 = gimple_assign_rhs1 (stmt);
789 op1 = gimple_assign_rhs2 (stmt);
791 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
793 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
794 gcc_assert (TREE_CODE (rhs) == MEM_REF);
795 op0 = TREE_OPERAND (rhs, 0);
796 op1 = TREE_OPERAND (rhs, 1);
798 else
799 gcc_unreachable ();
801 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
802 return false;
804 /* Handle PTR + OFFSET here. */
805 if (TREE_CODE (op1) == INTEGER_CST
806 && (TREE_CODE (op0) == SSA_NAME
807 || TREE_CODE (op0) == ADDR_EXPR))
809 if (! tree_fits_uhwi_p (op1))
810 bytes = unknown[object_size_type];
811 else if (TREE_CODE (op0) == SSA_NAME)
812 return merge_object_sizes (osi, var, op0, tree_to_uhwi (op1));
813 else
815 unsigned HOST_WIDE_INT off = tree_to_uhwi (op1);
817 /* op0 will be ADDR_EXPR here. */
818 bytes = addr_object_size (osi, op0, object_size_type);
819 if (bytes == unknown[object_size_type])
821 else if (off > offset_limit)
822 bytes = unknown[object_size_type];
823 else if (off > bytes)
824 bytes = 0;
825 else
826 bytes -= off;
829 else
830 bytes = unknown[object_size_type];
832 if ((object_size_type & 2) == 0)
834 if (object_sizes[object_size_type][varno] < bytes)
835 object_sizes[object_size_type][varno] = bytes;
837 else
839 if (object_sizes[object_size_type][varno] > bytes)
840 object_sizes[object_size_type][varno] = bytes;
842 return false;
846 /* Compute object_sizes for VAR, defined at STMT, which is
847 a COND_EXPR. Return true if the object size might need reexamination
848 later. */
850 static bool
851 cond_expr_object_size (struct object_size_info *osi, tree var, gimple stmt)
853 tree then_, else_;
854 int object_size_type = osi->object_size_type;
855 unsigned int varno = SSA_NAME_VERSION (var);
856 bool reexamine = false;
858 gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
860 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
861 return false;
863 then_ = gimple_assign_rhs2 (stmt);
864 else_ = gimple_assign_rhs3 (stmt);
866 if (TREE_CODE (then_) == SSA_NAME)
867 reexamine |= merge_object_sizes (osi, var, then_, 0);
868 else
869 expr_object_size (osi, var, then_);
871 if (TREE_CODE (else_) == SSA_NAME)
872 reexamine |= merge_object_sizes (osi, var, else_, 0);
873 else
874 expr_object_size (osi, var, else_);
876 return reexamine;
879 /* Compute object sizes for VAR.
880 For ADDR_EXPR an object size is the number of remaining bytes
881 to the end of the object (where what is considered an object depends on
882 OSI->object_size_type).
883 For allocation GIMPLE_CALL like malloc or calloc object size is the size
884 of the allocation.
885 For POINTER_PLUS_EXPR where second operand is a constant integer,
886 object size is object size of the first operand minus the constant.
887 If the constant is bigger than the number of remaining bytes until the
888 end of the object, object size is 0, but if it is instead a pointer
889 subtraction, object size is unknown[object_size_type].
890 To differentiate addition from subtraction, ADDR_EXPR returns
891 unknown[object_size_type] for all objects bigger than half of the address
892 space, and constants less than half of the address space are considered
893 addition, while bigger constants subtraction.
894 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
895 object size is object size of that argument.
896 Otherwise, object size is the maximum of object sizes of variables
897 that it might be set to. */
899 static void
900 collect_object_sizes_for (struct object_size_info *osi, tree var)
902 int object_size_type = osi->object_size_type;
903 unsigned int varno = SSA_NAME_VERSION (var);
904 gimple stmt;
905 bool reexamine;
907 if (bitmap_bit_p (computed[object_size_type], varno))
908 return;
910 if (osi->pass == 0)
912 if (bitmap_set_bit (osi->visited, varno))
914 object_sizes[object_size_type][varno]
915 = (object_size_type & 2) ? -1 : 0;
917 else
919 /* Found a dependency loop. Mark the variable for later
920 re-examination. */
921 bitmap_set_bit (osi->reexamine, varno);
922 if (dump_file && (dump_flags & TDF_DETAILS))
924 fprintf (dump_file, "Found a dependency loop at ");
925 print_generic_expr (dump_file, var, dump_flags);
926 fprintf (dump_file, "\n");
928 return;
932 if (dump_file && (dump_flags & TDF_DETAILS))
934 fprintf (dump_file, "Visiting use-def links for ");
935 print_generic_expr (dump_file, var, dump_flags);
936 fprintf (dump_file, "\n");
939 stmt = SSA_NAME_DEF_STMT (var);
940 reexamine = false;
942 switch (gimple_code (stmt))
944 case GIMPLE_ASSIGN:
946 tree rhs = gimple_assign_rhs1 (stmt);
947 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
948 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
949 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
950 reexamine = plus_stmt_object_size (osi, var, stmt);
951 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
952 reexamine = cond_expr_object_size (osi, var, stmt);
953 else if (gimple_assign_single_p (stmt)
954 || gimple_assign_unary_nop_p (stmt))
956 if (TREE_CODE (rhs) == SSA_NAME
957 && POINTER_TYPE_P (TREE_TYPE (rhs)))
958 reexamine = merge_object_sizes (osi, var, rhs, 0);
959 else
960 expr_object_size (osi, var, rhs);
962 else
963 unknown_object_size (osi, var);
964 break;
967 case GIMPLE_CALL:
969 tree arg = pass_through_call (stmt);
970 if (arg)
972 if (TREE_CODE (arg) == SSA_NAME
973 && POINTER_TYPE_P (TREE_TYPE (arg)))
974 reexamine = merge_object_sizes (osi, var, arg, 0);
975 else
976 expr_object_size (osi, var, arg);
978 else
979 call_object_size (osi, var, stmt);
980 break;
983 case GIMPLE_ASM:
984 /* Pointers defined by __asm__ statements can point anywhere. */
985 object_sizes[object_size_type][varno] = unknown[object_size_type];
986 break;
988 case GIMPLE_NOP:
989 if (SSA_NAME_VAR (var)
990 && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
991 expr_object_size (osi, var, SSA_NAME_VAR (var));
992 else
993 /* Uninitialized SSA names point nowhere. */
994 object_sizes[object_size_type][varno] = unknown[object_size_type];
995 break;
997 case GIMPLE_PHI:
999 unsigned i;
1001 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1003 tree rhs = gimple_phi_arg (stmt, i)->def;
1005 if (object_sizes[object_size_type][varno]
1006 == unknown[object_size_type])
1007 break;
1009 if (TREE_CODE (rhs) == SSA_NAME)
1010 reexamine |= merge_object_sizes (osi, var, rhs, 0);
1011 else if (osi->pass == 0)
1012 expr_object_size (osi, var, rhs);
1014 break;
1017 default:
1018 gcc_unreachable ();
1021 if (! reexamine
1022 || object_sizes[object_size_type][varno] == unknown[object_size_type])
1024 bitmap_set_bit (computed[object_size_type], varno);
1025 bitmap_clear_bit (osi->reexamine, varno);
1027 else
1029 bitmap_set_bit (osi->reexamine, varno);
1030 if (dump_file && (dump_flags & TDF_DETAILS))
1032 fprintf (dump_file, "Need to reexamine ");
1033 print_generic_expr (dump_file, var, dump_flags);
1034 fprintf (dump_file, "\n");
1040 /* Helper function for check_for_plus_in_loops. Called recursively
1041 to detect loops. */
1043 static void
1044 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1045 unsigned int depth)
1047 gimple stmt = SSA_NAME_DEF_STMT (var);
1048 unsigned int varno = SSA_NAME_VERSION (var);
1050 if (osi->depths[varno])
1052 if (osi->depths[varno] != depth)
1054 unsigned int *sp;
1056 /* Found a loop involving pointer addition. */
1057 for (sp = osi->tos; sp > osi->stack; )
1059 --sp;
1060 bitmap_clear_bit (osi->reexamine, *sp);
1061 bitmap_set_bit (computed[osi->object_size_type], *sp);
1062 object_sizes[osi->object_size_type][*sp] = 0;
1063 if (*sp == varno)
1064 break;
1067 return;
1069 else if (! bitmap_bit_p (osi->reexamine, varno))
1070 return;
1072 osi->depths[varno] = depth;
1073 *osi->tos++ = varno;
1075 switch (gimple_code (stmt))
1078 case GIMPLE_ASSIGN:
1080 if ((gimple_assign_single_p (stmt)
1081 || gimple_assign_unary_nop_p (stmt))
1082 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1084 tree rhs = gimple_assign_rhs1 (stmt);
1086 check_for_plus_in_loops_1 (osi, rhs, depth);
1088 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1090 tree basevar = gimple_assign_rhs1 (stmt);
1091 tree cst = gimple_assign_rhs2 (stmt);
1093 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1095 check_for_plus_in_loops_1 (osi, basevar,
1096 depth + !integer_zerop (cst));
1098 else
1099 gcc_unreachable ();
1100 break;
1103 case GIMPLE_CALL:
1105 tree arg = pass_through_call (stmt);
1106 if (arg)
1108 if (TREE_CODE (arg) == SSA_NAME)
1109 check_for_plus_in_loops_1 (osi, arg, depth);
1110 else
1111 gcc_unreachable ();
1113 break;
1116 case GIMPLE_PHI:
1118 unsigned i;
1120 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1122 tree rhs = gimple_phi_arg (stmt, i)->def;
1124 if (TREE_CODE (rhs) == SSA_NAME)
1125 check_for_plus_in_loops_1 (osi, rhs, depth);
1127 break;
1130 default:
1131 gcc_unreachable ();
1134 osi->depths[varno] = 0;
1135 osi->tos--;
1139 /* Check if some pointer we are computing object size of is being increased
1140 within a loop. If yes, assume all the SSA variables participating in
1141 that loop have minimum object sizes 0. */
1143 static void
1144 check_for_plus_in_loops (struct object_size_info *osi, tree var)
1146 gimple stmt = SSA_NAME_DEF_STMT (var);
1148 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1149 and looked for a POINTER_PLUS_EXPR in the pass-through
1150 argument, if any. In GIMPLE, however, such an expression
1151 is not a valid call operand. */
1153 if (is_gimple_assign (stmt)
1154 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1156 tree basevar = gimple_assign_rhs1 (stmt);
1157 tree cst = gimple_assign_rhs2 (stmt);
1159 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1161 if (integer_zerop (cst))
1162 return;
1164 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1165 *osi->tos++ = SSA_NAME_VERSION (basevar);
1166 check_for_plus_in_loops_1 (osi, var, 2);
1167 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1168 osi->tos--;
1173 /* Initialize data structures for the object size computation. */
1175 void
1176 init_object_sizes (void)
1178 int object_size_type;
1180 if (computed[0])
1181 return;
1183 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1185 object_sizes[object_size_type].safe_grow (num_ssa_names);
1186 computed[object_size_type] = BITMAP_ALLOC (NULL);
1189 init_offset_limit ();
1193 /* Destroy data structures after the object size computation. */
1195 static void
1196 fini_object_sizes (void)
1198 int object_size_type;
1200 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1202 object_sizes[object_size_type].release ();
1203 BITMAP_FREE (computed[object_size_type]);
1208 /* Simple pass to optimize all __builtin_object_size () builtins. */
1210 static unsigned int
1211 compute_object_sizes (void)
1213 basic_block bb;
1214 FOR_EACH_BB_FN (bb, cfun)
1216 gimple_stmt_iterator i;
1217 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1219 tree result;
1220 gimple call = gsi_stmt (i);
1221 if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE))
1222 continue;
1224 init_object_sizes ();
1225 result = fold_call_stmt (call, false);
1226 if (!result)
1228 if (gimple_call_num_args (call) == 2
1229 && POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
1231 tree ost = gimple_call_arg (call, 1);
1233 if (tree_fits_uhwi_p (ost))
1235 unsigned HOST_WIDE_INT object_size_type
1236 = tree_to_uhwi (ost);
1238 if (object_size_type < 2)
1239 result = fold_convert (size_type_node,
1240 integer_minus_one_node);
1241 else if (object_size_type < 4)
1242 result = build_zero_cst (size_type_node);
1246 if (!result)
1247 continue;
1250 gcc_assert (TREE_CODE (result) == INTEGER_CST);
1252 if (dump_file && (dump_flags & TDF_DETAILS))
1254 fprintf (dump_file, "Simplified\n ");
1255 print_gimple_stmt (dump_file, call, 0, dump_flags);
1256 fprintf (dump_file, " to ");
1257 print_generic_expr (dump_file, result, 0);
1258 fprintf (dump_file, "\n");
1261 tree lhs = gimple_call_lhs (call);
1262 if (!lhs)
1263 continue;
1265 /* Propagate into all uses and fold those stmts. */
1266 gimple use_stmt;
1267 imm_use_iterator iter;
1268 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
1270 use_operand_p use_p;
1271 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1272 SET_USE (use_p, result);
1273 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1274 fold_stmt (&gsi);
1275 update_stmt (gsi_stmt (gsi));
1280 fini_object_sizes ();
1281 return 0;
1284 namespace {
1286 const pass_data pass_data_object_sizes =
1288 GIMPLE_PASS, /* type */
1289 "objsz", /* name */
1290 OPTGROUP_NONE, /* optinfo_flags */
1291 false, /* has_gate */
1292 true, /* has_execute */
1293 TV_NONE, /* tv_id */
1294 ( PROP_cfg | PROP_ssa ), /* properties_required */
1295 0, /* properties_provided */
1296 0, /* properties_destroyed */
1297 0, /* todo_flags_start */
1298 TODO_verify_ssa, /* todo_flags_finish */
1301 class pass_object_sizes : public gimple_opt_pass
1303 public:
1304 pass_object_sizes (gcc::context *ctxt)
1305 : gimple_opt_pass (pass_data_object_sizes, ctxt)
1308 /* opt_pass methods: */
1309 opt_pass * clone () { return new pass_object_sizes (m_ctxt); }
1310 unsigned int execute () { return compute_object_sizes (); }
1312 }; // class pass_object_sizes
1314 } // anon namespace
1316 gimple_opt_pass *
1317 make_pass_object_sizes (gcc::context *ctxt)
1319 return new pass_object_sizes (ctxt);