PR sanitizer/59009
[official-gcc.git] / gcc / tree-object-size.c
blob576dcb786ede05e0ff282ce7d718e827e6873a26
1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "diagnostic-core.h"
27 #include "gimple-pretty-print.h"
28 #include "bitmap.h"
29 #include "gimple.h"
30 #include "gimple-ssa.h"
31 #include "tree-ssanames.h"
32 #include "tree-pass.h"
33 #include "tree-ssa-propagate.h"
35 struct object_size_info
37 int object_size_type;
38 bitmap visited, reexamine;
39 int pass;
40 bool changed;
41 unsigned int *depths;
42 unsigned int *stack, *tos;
45 static const unsigned HOST_WIDE_INT unknown[4] = { -1, -1, 0, 0 };
47 static tree compute_object_offset (const_tree, const_tree);
48 static unsigned HOST_WIDE_INT addr_object_size (struct object_size_info *,
49 const_tree, int);
50 static unsigned HOST_WIDE_INT alloc_object_size (const_gimple, int);
51 static tree pass_through_call (const_gimple);
52 static void collect_object_sizes_for (struct object_size_info *, tree);
53 static void expr_object_size (struct object_size_info *, tree, tree);
54 static bool merge_object_sizes (struct object_size_info *, tree, tree,
55 unsigned HOST_WIDE_INT);
56 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple);
57 static bool cond_expr_object_size (struct object_size_info *, tree, gimple);
58 static unsigned int compute_object_sizes (void);
59 static void init_offset_limit (void);
60 static void check_for_plus_in_loops (struct object_size_info *, tree);
61 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
62 unsigned int);
64 /* object_sizes[0] is upper bound for number of bytes till the end of
65 the object.
66 object_sizes[1] is upper bound for number of bytes till the end of
67 the subobject (innermost array or field with address taken).
68 object_sizes[2] is lower bound for number of bytes till the end of
69 the object and object_sizes[3] lower bound for subobject. */
70 static unsigned HOST_WIDE_INT *object_sizes[4];
72 /* Bitmaps what object sizes have been computed already. */
73 static bitmap computed[4];
75 /* Maximum value of offset we consider to be addition. */
76 static unsigned HOST_WIDE_INT offset_limit;
79 /* Initialize OFFSET_LIMIT variable. */
80 static void
81 init_offset_limit (void)
83 if (host_integerp (TYPE_MAX_VALUE (sizetype), 1))
84 offset_limit = tree_low_cst (TYPE_MAX_VALUE (sizetype), 1);
85 else
86 offset_limit = -1;
87 offset_limit /= 2;
91 /* Compute offset of EXPR within VAR. Return error_mark_node
92 if unknown. */
94 static tree
95 compute_object_offset (const_tree expr, const_tree var)
97 enum tree_code code = PLUS_EXPR;
98 tree base, off, t;
100 if (expr == var)
101 return size_zero_node;
103 switch (TREE_CODE (expr))
105 case COMPONENT_REF:
106 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
107 if (base == error_mark_node)
108 return base;
110 t = TREE_OPERAND (expr, 1);
111 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
112 size_int (tree_low_cst (DECL_FIELD_BIT_OFFSET (t), 1)
113 / BITS_PER_UNIT));
114 break;
116 case REALPART_EXPR:
117 CASE_CONVERT:
118 case VIEW_CONVERT_EXPR:
119 case NON_LVALUE_EXPR:
120 return compute_object_offset (TREE_OPERAND (expr, 0), var);
122 case IMAGPART_EXPR:
123 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
124 if (base == error_mark_node)
125 return base;
127 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
128 break;
130 case ARRAY_REF:
131 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
132 if (base == error_mark_node)
133 return base;
135 t = TREE_OPERAND (expr, 1);
136 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
138 code = MINUS_EXPR;
139 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
141 t = fold_convert (sizetype, t);
142 off = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (expr)), t);
143 break;
145 case MEM_REF:
146 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
147 return double_int_to_tree (sizetype, mem_ref_offset (expr));
149 default:
150 return error_mark_node;
153 return size_binop (code, base, off);
157 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
158 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
159 If unknown, return unknown[object_size_type]. */
161 static unsigned HOST_WIDE_INT
162 addr_object_size (struct object_size_info *osi, const_tree ptr,
163 int object_size_type)
165 tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
167 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
169 pt_var = TREE_OPERAND (ptr, 0);
170 while (handled_component_p (pt_var))
171 pt_var = TREE_OPERAND (pt_var, 0);
173 if (pt_var
174 && TREE_CODE (pt_var) == MEM_REF)
176 unsigned HOST_WIDE_INT sz;
178 if (!osi || (object_size_type & 1) != 0
179 || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
181 sz = compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
182 object_size_type & ~1);
184 else
186 tree var = TREE_OPERAND (pt_var, 0);
187 if (osi->pass == 0)
188 collect_object_sizes_for (osi, var);
189 if (bitmap_bit_p (computed[object_size_type],
190 SSA_NAME_VERSION (var)))
191 sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
192 else
193 sz = unknown[object_size_type];
195 if (sz != unknown[object_size_type])
197 double_int dsz = double_int::from_uhwi (sz) - mem_ref_offset (pt_var);
198 if (dsz.is_negative ())
199 sz = 0;
200 else if (dsz.fits_uhwi ())
201 sz = dsz.to_uhwi ();
202 else
203 sz = unknown[object_size_type];
206 if (sz != unknown[object_size_type] && sz < offset_limit)
207 pt_var_size = size_int (sz);
209 else if (pt_var
210 && DECL_P (pt_var)
211 && host_integerp (DECL_SIZE_UNIT (pt_var), 1)
212 && (unsigned HOST_WIDE_INT)
213 tree_low_cst (DECL_SIZE_UNIT (pt_var), 1) < offset_limit)
214 pt_var_size = DECL_SIZE_UNIT (pt_var);
215 else if (pt_var
216 && TREE_CODE (pt_var) == STRING_CST
217 && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
218 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1)
219 && (unsigned HOST_WIDE_INT)
220 tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1)
221 < offset_limit)
222 pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
223 else
224 return unknown[object_size_type];
226 if (pt_var != TREE_OPERAND (ptr, 0))
228 tree var;
230 if (object_size_type & 1)
232 var = TREE_OPERAND (ptr, 0);
234 while (var != pt_var
235 && TREE_CODE (var) != BIT_FIELD_REF
236 && TREE_CODE (var) != COMPONENT_REF
237 && TREE_CODE (var) != ARRAY_REF
238 && TREE_CODE (var) != ARRAY_RANGE_REF
239 && TREE_CODE (var) != REALPART_EXPR
240 && TREE_CODE (var) != IMAGPART_EXPR)
241 var = TREE_OPERAND (var, 0);
242 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
243 var = TREE_OPERAND (var, 0);
244 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
245 || ! host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (var)), 1)
246 || (pt_var_size
247 && tree_int_cst_lt (pt_var_size,
248 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
249 var = pt_var;
250 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
252 tree v = var;
253 /* For &X->fld, compute object size only if fld isn't the last
254 field, as struct { int i; char c[1]; } is often used instead
255 of flexible array member. */
256 while (v && v != pt_var)
257 switch (TREE_CODE (v))
259 case ARRAY_REF:
260 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
261 && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
263 tree domain
264 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
265 if (domain
266 && TYPE_MAX_VALUE (domain)
267 && TREE_CODE (TYPE_MAX_VALUE (domain))
268 == INTEGER_CST
269 && tree_int_cst_lt (TREE_OPERAND (v, 1),
270 TYPE_MAX_VALUE (domain)))
272 v = NULL_TREE;
273 break;
276 v = TREE_OPERAND (v, 0);
277 break;
278 case REALPART_EXPR:
279 case IMAGPART_EXPR:
280 v = NULL_TREE;
281 break;
282 case COMPONENT_REF:
283 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
285 v = NULL_TREE;
286 break;
288 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
289 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
290 != UNION_TYPE
291 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
292 != QUAL_UNION_TYPE)
293 break;
294 else
295 v = TREE_OPERAND (v, 0);
296 if (TREE_CODE (v) == COMPONENT_REF
297 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
298 == RECORD_TYPE)
300 tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
301 for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
302 if (TREE_CODE (fld_chain) == FIELD_DECL)
303 break;
305 if (fld_chain)
307 v = NULL_TREE;
308 break;
310 v = TREE_OPERAND (v, 0);
312 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
313 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
314 != UNION_TYPE
315 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
316 != QUAL_UNION_TYPE)
317 break;
318 else
319 v = TREE_OPERAND (v, 0);
320 if (v != pt_var)
321 v = NULL_TREE;
322 else
323 v = pt_var;
324 break;
325 default:
326 v = pt_var;
327 break;
329 if (v == pt_var)
330 var = pt_var;
333 else
334 var = pt_var;
336 if (var != pt_var)
337 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
338 else if (!pt_var_size)
339 return unknown[object_size_type];
340 else
341 var_size = pt_var_size;
342 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
343 if (bytes != error_mark_node)
345 if (TREE_CODE (bytes) == INTEGER_CST
346 && tree_int_cst_lt (var_size, bytes))
347 bytes = size_zero_node;
348 else
349 bytes = size_binop (MINUS_EXPR, var_size, bytes);
351 if (var != pt_var
352 && pt_var_size
353 && TREE_CODE (pt_var) == MEM_REF
354 && bytes != error_mark_node)
356 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
357 if (bytes2 != error_mark_node)
359 if (TREE_CODE (bytes2) == INTEGER_CST
360 && tree_int_cst_lt (pt_var_size, bytes2))
361 bytes2 = size_zero_node;
362 else
363 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
364 bytes = size_binop (MIN_EXPR, bytes, bytes2);
368 else if (!pt_var_size)
369 return unknown[object_size_type];
370 else
371 bytes = pt_var_size;
373 if (host_integerp (bytes, 1))
374 return tree_low_cst (bytes, 1);
376 return unknown[object_size_type];
380 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
381 Handles various allocation calls. OBJECT_SIZE_TYPE is the second
382 argument from __builtin_object_size. If unknown, return
383 unknown[object_size_type]. */
385 static unsigned HOST_WIDE_INT
386 alloc_object_size (const_gimple call, int object_size_type)
388 tree callee, bytes = NULL_TREE;
389 tree alloc_size;
390 int arg1 = -1, arg2 = -1;
392 gcc_assert (is_gimple_call (call));
394 callee = gimple_call_fndecl (call);
395 if (!callee)
396 return unknown[object_size_type];
398 alloc_size = lookup_attribute ("alloc_size",
399 TYPE_ATTRIBUTES (TREE_TYPE (callee)));
400 if (alloc_size && TREE_VALUE (alloc_size))
402 tree p = TREE_VALUE (alloc_size);
404 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
405 if (TREE_CHAIN (p))
406 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
409 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
410 switch (DECL_FUNCTION_CODE (callee))
412 case BUILT_IN_CALLOC:
413 arg2 = 1;
414 /* fall through */
415 case BUILT_IN_MALLOC:
416 case BUILT_IN_ALLOCA:
417 case BUILT_IN_ALLOCA_WITH_ALIGN:
418 arg1 = 0;
419 default:
420 break;
423 if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
424 || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
425 || (arg2 >= 0
426 && (arg2 >= (int)gimple_call_num_args (call)
427 || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
428 return unknown[object_size_type];
430 if (arg2 >= 0)
431 bytes = size_binop (MULT_EXPR,
432 fold_convert (sizetype, gimple_call_arg (call, arg1)),
433 fold_convert (sizetype, gimple_call_arg (call, arg2)));
434 else if (arg1 >= 0)
435 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
437 if (bytes && host_integerp (bytes, 1))
438 return tree_low_cst (bytes, 1);
440 return unknown[object_size_type];
444 /* If object size is propagated from one of function's arguments directly
445 to its return value, return that argument for GIMPLE_CALL statement CALL.
446 Otherwise return NULL. */
448 static tree
449 pass_through_call (const_gimple call)
451 tree callee = gimple_call_fndecl (call);
453 if (callee
454 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
455 switch (DECL_FUNCTION_CODE (callee))
457 case BUILT_IN_MEMCPY:
458 case BUILT_IN_MEMMOVE:
459 case BUILT_IN_MEMSET:
460 case BUILT_IN_STRCPY:
461 case BUILT_IN_STRNCPY:
462 case BUILT_IN_STRCAT:
463 case BUILT_IN_STRNCAT:
464 case BUILT_IN_MEMCPY_CHK:
465 case BUILT_IN_MEMMOVE_CHK:
466 case BUILT_IN_MEMSET_CHK:
467 case BUILT_IN_STRCPY_CHK:
468 case BUILT_IN_STRNCPY_CHK:
469 case BUILT_IN_STPNCPY_CHK:
470 case BUILT_IN_STRCAT_CHK:
471 case BUILT_IN_STRNCAT_CHK:
472 case BUILT_IN_ASSUME_ALIGNED:
473 if (gimple_call_num_args (call) >= 1)
474 return gimple_call_arg (call, 0);
475 break;
476 default:
477 break;
480 return NULL_TREE;
484 /* Compute __builtin_object_size value for PTR. OBJECT_SIZE_TYPE is the
485 second argument from __builtin_object_size. */
487 unsigned HOST_WIDE_INT
488 compute_builtin_object_size (tree ptr, int object_size_type)
490 gcc_assert (object_size_type >= 0 && object_size_type <= 3);
492 if (! offset_limit)
493 init_offset_limit ();
495 if (TREE_CODE (ptr) == ADDR_EXPR)
496 return addr_object_size (NULL, ptr, object_size_type);
498 if (TREE_CODE (ptr) == SSA_NAME
499 && POINTER_TYPE_P (TREE_TYPE (ptr))
500 && object_sizes[object_size_type] != NULL)
502 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
504 struct object_size_info osi;
505 bitmap_iterator bi;
506 unsigned int i;
508 if (dump_file)
510 fprintf (dump_file, "Computing %s %sobject size for ",
511 (object_size_type & 2) ? "minimum" : "maximum",
512 (object_size_type & 1) ? "sub" : "");
513 print_generic_expr (dump_file, ptr, dump_flags);
514 fprintf (dump_file, ":\n");
517 osi.visited = BITMAP_ALLOC (NULL);
518 osi.reexamine = BITMAP_ALLOC (NULL);
519 osi.object_size_type = object_size_type;
520 osi.depths = NULL;
521 osi.stack = NULL;
522 osi.tos = NULL;
524 /* First pass: walk UD chains, compute object sizes that
525 can be computed. osi.reexamine bitmap at the end will
526 contain what variables were found in dependency cycles
527 and therefore need to be reexamined. */
528 osi.pass = 0;
529 osi.changed = false;
530 collect_object_sizes_for (&osi, ptr);
532 /* Second pass: keep recomputing object sizes of variables
533 that need reexamination, until no object sizes are
534 increased or all object sizes are computed. */
535 if (! bitmap_empty_p (osi.reexamine))
537 bitmap reexamine = BITMAP_ALLOC (NULL);
539 /* If looking for minimum instead of maximum object size,
540 detect cases where a pointer is increased in a loop.
541 Although even without this detection pass 2 would eventually
542 terminate, it could take a long time. If a pointer is
543 increasing this way, we need to assume 0 object size.
544 E.g. p = &buf[0]; while (cond) p = p + 4; */
545 if (object_size_type & 2)
547 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
548 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
549 osi.tos = osi.stack;
550 osi.pass = 1;
551 /* collect_object_sizes_for is changing
552 osi.reexamine bitmap, so iterate over a copy. */
553 bitmap_copy (reexamine, osi.reexamine);
554 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
555 if (bitmap_bit_p (osi.reexamine, i))
556 check_for_plus_in_loops (&osi, ssa_name (i));
558 free (osi.depths);
559 osi.depths = NULL;
560 free (osi.stack);
561 osi.stack = NULL;
562 osi.tos = NULL;
567 osi.pass = 2;
568 osi.changed = false;
569 /* collect_object_sizes_for is changing
570 osi.reexamine bitmap, so iterate over a copy. */
571 bitmap_copy (reexamine, osi.reexamine);
572 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
573 if (bitmap_bit_p (osi.reexamine, i))
575 collect_object_sizes_for (&osi, ssa_name (i));
576 if (dump_file && (dump_flags & TDF_DETAILS))
578 fprintf (dump_file, "Reexamining ");
579 print_generic_expr (dump_file, ssa_name (i),
580 dump_flags);
581 fprintf (dump_file, "\n");
585 while (osi.changed);
587 BITMAP_FREE (reexamine);
589 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
590 bitmap_set_bit (computed[object_size_type], i);
592 /* Debugging dumps. */
593 if (dump_file)
595 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
596 if (object_sizes[object_size_type][i]
597 != unknown[object_size_type])
599 print_generic_expr (dump_file, ssa_name (i),
600 dump_flags);
601 fprintf (dump_file,
602 ": %s %sobject size "
603 HOST_WIDE_INT_PRINT_UNSIGNED "\n",
604 (object_size_type & 2) ? "minimum" : "maximum",
605 (object_size_type & 1) ? "sub" : "",
606 object_sizes[object_size_type][i]);
610 BITMAP_FREE (osi.reexamine);
611 BITMAP_FREE (osi.visited);
614 return object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
617 return unknown[object_size_type];
620 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
622 static void
623 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
625 int object_size_type = osi->object_size_type;
626 unsigned int varno = SSA_NAME_VERSION (ptr);
627 unsigned HOST_WIDE_INT bytes;
629 gcc_assert (object_sizes[object_size_type][varno]
630 != unknown[object_size_type]);
631 gcc_assert (osi->pass == 0);
633 if (TREE_CODE (value) == WITH_SIZE_EXPR)
634 value = TREE_OPERAND (value, 0);
636 /* Pointer variables should have been handled by merge_object_sizes. */
637 gcc_assert (TREE_CODE (value) != SSA_NAME
638 || !POINTER_TYPE_P (TREE_TYPE (value)));
640 if (TREE_CODE (value) == ADDR_EXPR)
641 bytes = addr_object_size (osi, value, object_size_type);
642 else
643 bytes = unknown[object_size_type];
645 if ((object_size_type & 2) == 0)
647 if (object_sizes[object_size_type][varno] < bytes)
648 object_sizes[object_size_type][varno] = bytes;
650 else
652 if (object_sizes[object_size_type][varno] > bytes)
653 object_sizes[object_size_type][varno] = bytes;
658 /* Compute object_sizes for PTR, defined to the result of a call. */
660 static void
661 call_object_size (struct object_size_info *osi, tree ptr, gimple call)
663 int object_size_type = osi->object_size_type;
664 unsigned int varno = SSA_NAME_VERSION (ptr);
665 unsigned HOST_WIDE_INT bytes;
667 gcc_assert (is_gimple_call (call));
669 gcc_assert (object_sizes[object_size_type][varno]
670 != unknown[object_size_type]);
671 gcc_assert (osi->pass == 0);
673 bytes = alloc_object_size (call, object_size_type);
675 if ((object_size_type & 2) == 0)
677 if (object_sizes[object_size_type][varno] < bytes)
678 object_sizes[object_size_type][varno] = bytes;
680 else
682 if (object_sizes[object_size_type][varno] > bytes)
683 object_sizes[object_size_type][varno] = bytes;
688 /* Compute object_sizes for PTR, defined to an unknown value. */
690 static void
691 unknown_object_size (struct object_size_info *osi, tree ptr)
693 int object_size_type = osi->object_size_type;
694 unsigned int varno = SSA_NAME_VERSION (ptr);
695 unsigned HOST_WIDE_INT bytes;
697 gcc_assert (object_sizes[object_size_type][varno]
698 != unknown[object_size_type]);
699 gcc_assert (osi->pass == 0);
701 bytes = unknown[object_size_type];
703 if ((object_size_type & 2) == 0)
705 if (object_sizes[object_size_type][varno] < bytes)
706 object_sizes[object_size_type][varno] = bytes;
708 else
710 if (object_sizes[object_size_type][varno] > bytes)
711 object_sizes[object_size_type][varno] = bytes;
716 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
717 the object size might need reexamination later. */
719 static bool
720 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
721 unsigned HOST_WIDE_INT offset)
723 int object_size_type = osi->object_size_type;
724 unsigned int varno = SSA_NAME_VERSION (dest);
725 unsigned HOST_WIDE_INT orig_bytes;
727 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
728 return false;
729 if (offset >= offset_limit)
731 object_sizes[object_size_type][varno] = unknown[object_size_type];
732 return false;
735 if (osi->pass == 0)
736 collect_object_sizes_for (osi, orig);
738 orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
739 if (orig_bytes != unknown[object_size_type])
740 orig_bytes = (offset > orig_bytes)
741 ? (unsigned HOST_WIDE_INT) 0 : orig_bytes - offset;
743 if ((object_size_type & 2) == 0)
745 if (object_sizes[object_size_type][varno] < orig_bytes)
747 object_sizes[object_size_type][varno] = orig_bytes;
748 osi->changed = true;
751 else
753 if (object_sizes[object_size_type][varno] > orig_bytes)
755 object_sizes[object_size_type][varno] = orig_bytes;
756 osi->changed = true;
759 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
763 /* Compute object_sizes for VAR, defined to the result of an assignment
764 with operator POINTER_PLUS_EXPR. Return true if the object size might
765 need reexamination later. */
767 static bool
768 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple stmt)
770 int object_size_type = osi->object_size_type;
771 unsigned int varno = SSA_NAME_VERSION (var);
772 unsigned HOST_WIDE_INT bytes;
773 tree op0, op1;
775 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
777 op0 = gimple_assign_rhs1 (stmt);
778 op1 = gimple_assign_rhs2 (stmt);
780 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
782 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
783 gcc_assert (TREE_CODE (rhs) == MEM_REF);
784 op0 = TREE_OPERAND (rhs, 0);
785 op1 = TREE_OPERAND (rhs, 1);
787 else
788 gcc_unreachable ();
790 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
791 return false;
793 /* Handle PTR + OFFSET here. */
794 if (TREE_CODE (op1) == INTEGER_CST
795 && (TREE_CODE (op0) == SSA_NAME
796 || TREE_CODE (op0) == ADDR_EXPR))
798 if (! host_integerp (op1, 1))
799 bytes = unknown[object_size_type];
800 else if (TREE_CODE (op0) == SSA_NAME)
801 return merge_object_sizes (osi, var, op0, tree_low_cst (op1, 1));
802 else
804 unsigned HOST_WIDE_INT off = tree_low_cst (op1, 1);
806 /* op0 will be ADDR_EXPR here. */
807 bytes = addr_object_size (osi, op0, object_size_type);
808 if (bytes == unknown[object_size_type])
810 else if (off > offset_limit)
811 bytes = unknown[object_size_type];
812 else if (off > bytes)
813 bytes = 0;
814 else
815 bytes -= off;
818 else
819 bytes = unknown[object_size_type];
821 if ((object_size_type & 2) == 0)
823 if (object_sizes[object_size_type][varno] < bytes)
824 object_sizes[object_size_type][varno] = bytes;
826 else
828 if (object_sizes[object_size_type][varno] > bytes)
829 object_sizes[object_size_type][varno] = bytes;
831 return false;
835 /* Compute object_sizes for VAR, defined at STMT, which is
836 a COND_EXPR. Return true if the object size might need reexamination
837 later. */
839 static bool
840 cond_expr_object_size (struct object_size_info *osi, tree var, gimple stmt)
842 tree then_, else_;
843 int object_size_type = osi->object_size_type;
844 unsigned int varno = SSA_NAME_VERSION (var);
845 bool reexamine = false;
847 gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
849 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
850 return false;
852 then_ = gimple_assign_rhs2 (stmt);
853 else_ = gimple_assign_rhs3 (stmt);
855 if (TREE_CODE (then_) == SSA_NAME)
856 reexamine |= merge_object_sizes (osi, var, then_, 0);
857 else
858 expr_object_size (osi, var, then_);
860 if (TREE_CODE (else_) == SSA_NAME)
861 reexamine |= merge_object_sizes (osi, var, else_, 0);
862 else
863 expr_object_size (osi, var, else_);
865 return reexamine;
868 /* Compute object sizes for VAR.
869 For ADDR_EXPR an object size is the number of remaining bytes
870 to the end of the object (where what is considered an object depends on
871 OSI->object_size_type).
872 For allocation GIMPLE_CALL like malloc or calloc object size is the size
873 of the allocation.
874 For POINTER_PLUS_EXPR where second operand is a constant integer,
875 object size is object size of the first operand minus the constant.
876 If the constant is bigger than the number of remaining bytes until the
877 end of the object, object size is 0, but if it is instead a pointer
878 subtraction, object size is unknown[object_size_type].
879 To differentiate addition from subtraction, ADDR_EXPR returns
880 unknown[object_size_type] for all objects bigger than half of the address
881 space, and constants less than half of the address space are considered
882 addition, while bigger constants subtraction.
883 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
884 object size is object size of that argument.
885 Otherwise, object size is the maximum of object sizes of variables
886 that it might be set to. */
888 static void
889 collect_object_sizes_for (struct object_size_info *osi, tree var)
891 int object_size_type = osi->object_size_type;
892 unsigned int varno = SSA_NAME_VERSION (var);
893 gimple stmt;
894 bool reexamine;
896 if (bitmap_bit_p (computed[object_size_type], varno))
897 return;
899 if (osi->pass == 0)
901 if (bitmap_set_bit (osi->visited, varno))
903 object_sizes[object_size_type][varno]
904 = (object_size_type & 2) ? -1 : 0;
906 else
908 /* Found a dependency loop. Mark the variable for later
909 re-examination. */
910 bitmap_set_bit (osi->reexamine, varno);
911 if (dump_file && (dump_flags & TDF_DETAILS))
913 fprintf (dump_file, "Found a dependency loop at ");
914 print_generic_expr (dump_file, var, dump_flags);
915 fprintf (dump_file, "\n");
917 return;
921 if (dump_file && (dump_flags & TDF_DETAILS))
923 fprintf (dump_file, "Visiting use-def links for ");
924 print_generic_expr (dump_file, var, dump_flags);
925 fprintf (dump_file, "\n");
928 stmt = SSA_NAME_DEF_STMT (var);
929 reexamine = false;
931 switch (gimple_code (stmt))
933 case GIMPLE_ASSIGN:
935 tree rhs = gimple_assign_rhs1 (stmt);
936 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
937 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
938 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
939 reexamine = plus_stmt_object_size (osi, var, stmt);
940 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
941 reexamine = cond_expr_object_size (osi, var, stmt);
942 else if (gimple_assign_single_p (stmt)
943 || gimple_assign_unary_nop_p (stmt))
945 if (TREE_CODE (rhs) == SSA_NAME
946 && POINTER_TYPE_P (TREE_TYPE (rhs)))
947 reexamine = merge_object_sizes (osi, var, rhs, 0);
948 else
949 expr_object_size (osi, var, rhs);
951 else
952 unknown_object_size (osi, var);
953 break;
956 case GIMPLE_CALL:
958 tree arg = pass_through_call (stmt);
959 if (arg)
961 if (TREE_CODE (arg) == SSA_NAME
962 && POINTER_TYPE_P (TREE_TYPE (arg)))
963 reexamine = merge_object_sizes (osi, var, arg, 0);
964 else
965 expr_object_size (osi, var, arg);
967 else
968 call_object_size (osi, var, stmt);
969 break;
972 case GIMPLE_ASM:
973 /* Pointers defined by __asm__ statements can point anywhere. */
974 object_sizes[object_size_type][varno] = unknown[object_size_type];
975 break;
977 case GIMPLE_NOP:
978 if (SSA_NAME_VAR (var)
979 && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
980 expr_object_size (osi, var, SSA_NAME_VAR (var));
981 else
982 /* Uninitialized SSA names point nowhere. */
983 object_sizes[object_size_type][varno] = unknown[object_size_type];
984 break;
986 case GIMPLE_PHI:
988 unsigned i;
990 for (i = 0; i < gimple_phi_num_args (stmt); i++)
992 tree rhs = gimple_phi_arg (stmt, i)->def;
994 if (object_sizes[object_size_type][varno]
995 == unknown[object_size_type])
996 break;
998 if (TREE_CODE (rhs) == SSA_NAME)
999 reexamine |= merge_object_sizes (osi, var, rhs, 0);
1000 else if (osi->pass == 0)
1001 expr_object_size (osi, var, rhs);
1003 break;
1006 default:
1007 gcc_unreachable ();
1010 if (! reexamine
1011 || object_sizes[object_size_type][varno] == unknown[object_size_type])
1013 bitmap_set_bit (computed[object_size_type], varno);
1014 bitmap_clear_bit (osi->reexamine, varno);
1016 else
1018 bitmap_set_bit (osi->reexamine, varno);
1019 if (dump_file && (dump_flags & TDF_DETAILS))
1021 fprintf (dump_file, "Need to reexamine ");
1022 print_generic_expr (dump_file, var, dump_flags);
1023 fprintf (dump_file, "\n");
1029 /* Helper function for check_for_plus_in_loops. Called recursively
1030 to detect loops. */
1032 static void
1033 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1034 unsigned int depth)
1036 gimple stmt = SSA_NAME_DEF_STMT (var);
1037 unsigned int varno = SSA_NAME_VERSION (var);
1039 if (osi->depths[varno])
1041 if (osi->depths[varno] != depth)
1043 unsigned int *sp;
1045 /* Found a loop involving pointer addition. */
1046 for (sp = osi->tos; sp > osi->stack; )
1048 --sp;
1049 bitmap_clear_bit (osi->reexamine, *sp);
1050 bitmap_set_bit (computed[osi->object_size_type], *sp);
1051 object_sizes[osi->object_size_type][*sp] = 0;
1052 if (*sp == varno)
1053 break;
1056 return;
1058 else if (! bitmap_bit_p (osi->reexamine, varno))
1059 return;
1061 osi->depths[varno] = depth;
1062 *osi->tos++ = varno;
1064 switch (gimple_code (stmt))
1067 case GIMPLE_ASSIGN:
1069 if ((gimple_assign_single_p (stmt)
1070 || gimple_assign_unary_nop_p (stmt))
1071 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1073 tree rhs = gimple_assign_rhs1 (stmt);
1075 check_for_plus_in_loops_1 (osi, rhs, depth);
1077 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1079 tree basevar = gimple_assign_rhs1 (stmt);
1080 tree cst = gimple_assign_rhs2 (stmt);
1082 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1084 check_for_plus_in_loops_1 (osi, basevar,
1085 depth + !integer_zerop (cst));
1087 else
1088 gcc_unreachable ();
1089 break;
1092 case GIMPLE_CALL:
1094 tree arg = pass_through_call (stmt);
1095 if (arg)
1097 if (TREE_CODE (arg) == SSA_NAME)
1098 check_for_plus_in_loops_1 (osi, arg, depth);
1099 else
1100 gcc_unreachable ();
1102 break;
1105 case GIMPLE_PHI:
1107 unsigned i;
1109 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1111 tree rhs = gimple_phi_arg (stmt, i)->def;
1113 if (TREE_CODE (rhs) == SSA_NAME)
1114 check_for_plus_in_loops_1 (osi, rhs, depth);
1116 break;
1119 default:
1120 gcc_unreachable ();
1123 osi->depths[varno] = 0;
1124 osi->tos--;
1128 /* Check if some pointer we are computing object size of is being increased
1129 within a loop. If yes, assume all the SSA variables participating in
1130 that loop have minimum object sizes 0. */
1132 static void
1133 check_for_plus_in_loops (struct object_size_info *osi, tree var)
1135 gimple stmt = SSA_NAME_DEF_STMT (var);
1137 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1138 and looked for a POINTER_PLUS_EXPR in the pass-through
1139 argument, if any. In GIMPLE, however, such an expression
1140 is not a valid call operand. */
1142 if (is_gimple_assign (stmt)
1143 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1145 tree basevar = gimple_assign_rhs1 (stmt);
1146 tree cst = gimple_assign_rhs2 (stmt);
1148 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1150 if (integer_zerop (cst))
1151 return;
1153 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1154 *osi->tos++ = SSA_NAME_VERSION (basevar);
1155 check_for_plus_in_loops_1 (osi, var, 2);
1156 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1157 osi->tos--;
1162 /* Initialize data structures for the object size computation. */
1164 void
1165 init_object_sizes (void)
1167 int object_size_type;
1169 if (object_sizes[0])
1170 return;
1172 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1174 object_sizes[object_size_type] = XNEWVEC (unsigned HOST_WIDE_INT, num_ssa_names);
1175 computed[object_size_type] = BITMAP_ALLOC (NULL);
1178 init_offset_limit ();
1182 /* Destroy data structures after the object size computation. */
1184 static void
1185 fini_object_sizes (void)
1187 int object_size_type;
1189 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1191 free (object_sizes[object_size_type]);
1192 BITMAP_FREE (computed[object_size_type]);
1193 object_sizes[object_size_type] = NULL;
1198 /* Simple pass to optimize all __builtin_object_size () builtins. */
1200 static unsigned int
1201 compute_object_sizes (void)
1203 basic_block bb;
1204 FOR_EACH_BB (bb)
1206 gimple_stmt_iterator i;
1207 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1209 tree callee, result;
1210 gimple call = gsi_stmt (i);
1212 if (gimple_code (call) != GIMPLE_CALL)
1213 continue;
1215 callee = gimple_call_fndecl (call);
1216 if (!callee
1217 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
1218 || DECL_FUNCTION_CODE (callee) != BUILT_IN_OBJECT_SIZE)
1219 continue;
1221 init_object_sizes ();
1222 result = fold_call_stmt (call, false);
1223 if (!result)
1225 if (gimple_call_num_args (call) == 2
1226 && POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
1228 tree ost = gimple_call_arg (call, 1);
1230 if (host_integerp (ost, 1))
1232 unsigned HOST_WIDE_INT object_size_type
1233 = tree_low_cst (ost, 1);
1235 if (object_size_type < 2)
1236 result = fold_convert (size_type_node,
1237 integer_minus_one_node);
1238 else if (object_size_type < 4)
1239 result = build_zero_cst (size_type_node);
1243 if (!result)
1244 continue;
1247 if (dump_file && (dump_flags & TDF_DETAILS))
1249 fprintf (dump_file, "Simplified\n ");
1250 print_gimple_stmt (dump_file, call, 0, dump_flags);
1253 if (!update_call_from_tree (&i, result))
1254 gcc_unreachable ();
1256 if (dump_file && (dump_flags & TDF_DETAILS))
1258 fprintf (dump_file, "to\n ");
1259 print_gimple_stmt (dump_file, gsi_stmt (i), 0, dump_flags);
1260 fprintf (dump_file, "\n");
1265 fini_object_sizes ();
1266 return 0;
1269 namespace {
1271 const pass_data pass_data_object_sizes =
1273 GIMPLE_PASS, /* type */
1274 "objsz", /* name */
1275 OPTGROUP_NONE, /* optinfo_flags */
1276 false, /* has_gate */
1277 true, /* has_execute */
1278 TV_NONE, /* tv_id */
1279 ( PROP_cfg | PROP_ssa ), /* properties_required */
1280 0, /* properties_provided */
1281 0, /* properties_destroyed */
1282 0, /* todo_flags_start */
1283 TODO_verify_ssa, /* todo_flags_finish */
1286 class pass_object_sizes : public gimple_opt_pass
1288 public:
1289 pass_object_sizes (gcc::context *ctxt)
1290 : gimple_opt_pass (pass_data_object_sizes, ctxt)
1293 /* opt_pass methods: */
1294 opt_pass * clone () { return new pass_object_sizes (m_ctxt); }
1295 unsigned int execute () { return compute_object_sizes (); }
1297 }; // class pass_object_sizes
1299 } // anon namespace
1301 gimple_opt_pass *
1302 make_pass_object_sizes (gcc::context *ctxt)
1304 return new pass_object_sizes (ctxt);