Daily bump.
[official-gcc.git] / gcc / tree-object-size.c
blobe683861a12207e03e466a940cd47dc51492d0d6e
1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004-2016 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "fold-const.h"
31 #include "tree-object-size.h"
32 #include "gimple-fold.h"
33 #include "gimple-iterator.h"
34 #include "tree-cfg.h"
36 struct object_size_info
38 int object_size_type;
39 bitmap visited, reexamine;
40 int pass;
41 bool changed;
42 unsigned int *depths;
43 unsigned int *stack, *tos;
46 static const unsigned HOST_WIDE_INT unknown[4] = {
47 HOST_WIDE_INT_M1U,
48 HOST_WIDE_INT_M1U,
53 static tree compute_object_offset (const_tree, const_tree);
54 static unsigned HOST_WIDE_INT addr_object_size (struct object_size_info *,
55 const_tree, int);
56 static unsigned HOST_WIDE_INT alloc_object_size (const gcall *, int);
57 static tree pass_through_call (const gcall *);
58 static void collect_object_sizes_for (struct object_size_info *, tree);
59 static void expr_object_size (struct object_size_info *, tree, tree);
60 static bool merge_object_sizes (struct object_size_info *, tree, tree,
61 unsigned HOST_WIDE_INT);
62 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple *);
63 static bool cond_expr_object_size (struct object_size_info *, tree, gimple *);
64 static void init_offset_limit (void);
65 static void check_for_plus_in_loops (struct object_size_info *, tree);
66 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
67 unsigned int);
69 /* object_sizes[0] is upper bound for number of bytes till the end of
70 the object.
71 object_sizes[1] is upper bound for number of bytes till the end of
72 the subobject (innermost array or field with address taken).
73 object_sizes[2] is lower bound for number of bytes till the end of
74 the object and object_sizes[3] lower bound for subobject. */
75 static vec<unsigned HOST_WIDE_INT> object_sizes[4];
77 /* Bitmaps what object sizes have been computed already. */
78 static bitmap computed[4];
80 /* Maximum value of offset we consider to be addition. */
81 static unsigned HOST_WIDE_INT offset_limit;
84 /* Initialize OFFSET_LIMIT variable. */
85 static void
86 init_offset_limit (void)
88 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
89 offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
90 else
91 offset_limit = -1;
92 offset_limit /= 2;
96 /* Compute offset of EXPR within VAR. Return error_mark_node
97 if unknown. */
99 static tree
100 compute_object_offset (const_tree expr, const_tree var)
102 enum tree_code code = PLUS_EXPR;
103 tree base, off, t;
105 if (expr == var)
106 return size_zero_node;
108 switch (TREE_CODE (expr))
110 case COMPONENT_REF:
111 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
112 if (base == error_mark_node)
113 return base;
115 t = TREE_OPERAND (expr, 1);
116 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
117 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
118 / BITS_PER_UNIT));
119 break;
121 case REALPART_EXPR:
122 CASE_CONVERT:
123 case VIEW_CONVERT_EXPR:
124 case NON_LVALUE_EXPR:
125 return compute_object_offset (TREE_OPERAND (expr, 0), var);
127 case IMAGPART_EXPR:
128 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
129 if (base == error_mark_node)
130 return base;
132 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
133 break;
135 case ARRAY_REF:
136 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
137 if (base == error_mark_node)
138 return base;
140 t = TREE_OPERAND (expr, 1);
141 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
143 code = MINUS_EXPR;
144 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
146 t = fold_convert (sizetype, t);
147 off = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (expr)), t);
148 break;
150 case MEM_REF:
151 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
152 return wide_int_to_tree (sizetype, mem_ref_offset (expr));
154 default:
155 return error_mark_node;
158 return size_binop (code, base, off);
162 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
163 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
164 If unknown, return unknown[object_size_type]. */
166 static unsigned HOST_WIDE_INT
167 addr_object_size (struct object_size_info *osi, const_tree ptr,
168 int object_size_type)
170 tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
172 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
174 pt_var = TREE_OPERAND (ptr, 0);
175 while (handled_component_p (pt_var))
176 pt_var = TREE_OPERAND (pt_var, 0);
178 if (pt_var
179 && TREE_CODE (pt_var) == MEM_REF)
181 unsigned HOST_WIDE_INT sz;
183 if (!osi || (object_size_type & 1) != 0
184 || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
186 sz = compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
187 object_size_type & ~1);
189 else
191 tree var = TREE_OPERAND (pt_var, 0);
192 if (osi->pass == 0)
193 collect_object_sizes_for (osi, var);
194 if (bitmap_bit_p (computed[object_size_type],
195 SSA_NAME_VERSION (var)))
196 sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
197 else
198 sz = unknown[object_size_type];
200 if (sz != unknown[object_size_type])
202 offset_int dsz = wi::sub (sz, mem_ref_offset (pt_var));
203 if (wi::neg_p (dsz))
204 sz = 0;
205 else if (wi::fits_uhwi_p (dsz))
206 sz = dsz.to_uhwi ();
207 else
208 sz = unknown[object_size_type];
211 if (sz != unknown[object_size_type] && sz < offset_limit)
212 pt_var_size = size_int (sz);
214 else if (pt_var
215 && DECL_P (pt_var)
216 && tree_fits_uhwi_p (DECL_SIZE_UNIT (pt_var))
217 && tree_to_uhwi (DECL_SIZE_UNIT (pt_var)) < offset_limit)
218 pt_var_size = DECL_SIZE_UNIT (pt_var);
219 else if (pt_var
220 && TREE_CODE (pt_var) == STRING_CST
221 && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
222 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
223 && tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
224 < offset_limit)
225 pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
226 else
227 return unknown[object_size_type];
229 if (pt_var != TREE_OPERAND (ptr, 0))
231 tree var;
233 if (object_size_type & 1)
235 var = TREE_OPERAND (ptr, 0);
237 while (var != pt_var
238 && TREE_CODE (var) != BIT_FIELD_REF
239 && TREE_CODE (var) != COMPONENT_REF
240 && TREE_CODE (var) != ARRAY_REF
241 && TREE_CODE (var) != ARRAY_RANGE_REF
242 && TREE_CODE (var) != REALPART_EXPR
243 && TREE_CODE (var) != IMAGPART_EXPR)
244 var = TREE_OPERAND (var, 0);
245 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
246 var = TREE_OPERAND (var, 0);
247 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
248 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var)))
249 || (pt_var_size
250 && tree_int_cst_lt (pt_var_size,
251 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
252 var = pt_var;
253 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
255 tree v = var;
256 /* For &X->fld, compute object size only if fld isn't the last
257 field, as struct { int i; char c[1]; } is often used instead
258 of flexible array member. */
259 while (v && v != pt_var)
260 switch (TREE_CODE (v))
262 case ARRAY_REF:
263 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
264 && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
266 tree domain
267 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
268 if (domain
269 && TYPE_MAX_VALUE (domain)
270 && TREE_CODE (TYPE_MAX_VALUE (domain))
271 == INTEGER_CST
272 && tree_int_cst_lt (TREE_OPERAND (v, 1),
273 TYPE_MAX_VALUE (domain)))
275 v = NULL_TREE;
276 break;
279 v = TREE_OPERAND (v, 0);
280 break;
281 case REALPART_EXPR:
282 case IMAGPART_EXPR:
283 v = NULL_TREE;
284 break;
285 case COMPONENT_REF:
286 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
288 v = NULL_TREE;
289 break;
291 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
292 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
293 != UNION_TYPE
294 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
295 != QUAL_UNION_TYPE)
296 break;
297 else
298 v = TREE_OPERAND (v, 0);
299 if (TREE_CODE (v) == COMPONENT_REF
300 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
301 == RECORD_TYPE)
303 tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
304 for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
305 if (TREE_CODE (fld_chain) == FIELD_DECL)
306 break;
308 if (fld_chain)
310 v = NULL_TREE;
311 break;
313 v = TREE_OPERAND (v, 0);
315 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
316 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
317 != UNION_TYPE
318 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
319 != QUAL_UNION_TYPE)
320 break;
321 else
322 v = TREE_OPERAND (v, 0);
323 if (v != pt_var)
324 v = NULL_TREE;
325 else
326 v = pt_var;
327 break;
328 default:
329 v = pt_var;
330 break;
332 if (v == pt_var)
333 var = pt_var;
336 else
337 var = pt_var;
339 if (var != pt_var)
340 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
341 else if (!pt_var_size)
342 return unknown[object_size_type];
343 else
344 var_size = pt_var_size;
345 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
346 if (bytes != error_mark_node)
348 if (TREE_CODE (bytes) == INTEGER_CST
349 && tree_int_cst_lt (var_size, bytes))
350 bytes = size_zero_node;
351 else
352 bytes = size_binop (MINUS_EXPR, var_size, bytes);
354 if (var != pt_var
355 && pt_var_size
356 && TREE_CODE (pt_var) == MEM_REF
357 && bytes != error_mark_node)
359 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
360 if (bytes2 != error_mark_node)
362 if (TREE_CODE (bytes2) == INTEGER_CST
363 && tree_int_cst_lt (pt_var_size, bytes2))
364 bytes2 = size_zero_node;
365 else
366 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
367 bytes = size_binop (MIN_EXPR, bytes, bytes2);
371 else if (!pt_var_size)
372 return unknown[object_size_type];
373 else
374 bytes = pt_var_size;
376 if (tree_fits_uhwi_p (bytes))
377 return tree_to_uhwi (bytes);
379 return unknown[object_size_type];
383 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
384 Handles various allocation calls. OBJECT_SIZE_TYPE is the second
385 argument from __builtin_object_size. If unknown, return
386 unknown[object_size_type]. */
388 static unsigned HOST_WIDE_INT
389 alloc_object_size (const gcall *call, int object_size_type)
391 tree callee, bytes = NULL_TREE;
392 tree alloc_size;
393 int arg1 = -1, arg2 = -1;
395 gcc_assert (is_gimple_call (call));
397 callee = gimple_call_fndecl (call);
398 if (!callee)
399 return unknown[object_size_type];
401 alloc_size = lookup_attribute ("alloc_size",
402 TYPE_ATTRIBUTES (TREE_TYPE (callee)));
403 if (alloc_size && TREE_VALUE (alloc_size))
405 tree p = TREE_VALUE (alloc_size);
407 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
408 if (TREE_CHAIN (p))
409 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
412 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
413 switch (DECL_FUNCTION_CODE (callee))
415 case BUILT_IN_CALLOC:
416 arg2 = 1;
417 /* fall through */
418 case BUILT_IN_MALLOC:
419 case BUILT_IN_ALLOCA:
420 case BUILT_IN_ALLOCA_WITH_ALIGN:
421 arg1 = 0;
422 default:
423 break;
426 if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
427 || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
428 || (arg2 >= 0
429 && (arg2 >= (int)gimple_call_num_args (call)
430 || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
431 return unknown[object_size_type];
433 if (arg2 >= 0)
434 bytes = size_binop (MULT_EXPR,
435 fold_convert (sizetype, gimple_call_arg (call, arg1)),
436 fold_convert (sizetype, gimple_call_arg (call, arg2)));
437 else if (arg1 >= 0)
438 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
440 if (bytes && tree_fits_uhwi_p (bytes))
441 return tree_to_uhwi (bytes);
443 return unknown[object_size_type];
447 /* If object size is propagated from one of function's arguments directly
448 to its return value, return that argument for GIMPLE_CALL statement CALL.
449 Otherwise return NULL. */
451 static tree
452 pass_through_call (const gcall *call)
454 tree callee = gimple_call_fndecl (call);
456 if (callee
457 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
458 switch (DECL_FUNCTION_CODE (callee))
460 case BUILT_IN_MEMCPY:
461 case BUILT_IN_MEMMOVE:
462 case BUILT_IN_MEMSET:
463 case BUILT_IN_STRCPY:
464 case BUILT_IN_STRNCPY:
465 case BUILT_IN_STRCAT:
466 case BUILT_IN_STRNCAT:
467 case BUILT_IN_MEMCPY_CHK:
468 case BUILT_IN_MEMMOVE_CHK:
469 case BUILT_IN_MEMSET_CHK:
470 case BUILT_IN_STRCPY_CHK:
471 case BUILT_IN_STRNCPY_CHK:
472 case BUILT_IN_STPNCPY_CHK:
473 case BUILT_IN_STRCAT_CHK:
474 case BUILT_IN_STRNCAT_CHK:
475 case BUILT_IN_ASSUME_ALIGNED:
476 if (gimple_call_num_args (call) >= 1)
477 return gimple_call_arg (call, 0);
478 break;
479 default:
480 break;
483 return NULL_TREE;
487 /* Compute __builtin_object_size value for PTR. OBJECT_SIZE_TYPE is the
488 second argument from __builtin_object_size. */
490 unsigned HOST_WIDE_INT
491 compute_builtin_object_size (tree ptr, int object_size_type)
493 gcc_assert (object_size_type >= 0 && object_size_type <= 3);
495 if (! offset_limit)
496 init_offset_limit ();
498 if (TREE_CODE (ptr) == ADDR_EXPR)
499 return addr_object_size (NULL, ptr, object_size_type);
501 if (TREE_CODE (ptr) == SSA_NAME
502 && POINTER_TYPE_P (TREE_TYPE (ptr))
503 && computed[object_size_type] != NULL)
505 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
507 struct object_size_info osi;
508 bitmap_iterator bi;
509 unsigned int i;
511 if (num_ssa_names > object_sizes[object_size_type].length ())
512 object_sizes[object_size_type].safe_grow (num_ssa_names);
513 if (dump_file)
515 fprintf (dump_file, "Computing %s %sobject size for ",
516 (object_size_type & 2) ? "minimum" : "maximum",
517 (object_size_type & 1) ? "sub" : "");
518 print_generic_expr (dump_file, ptr, dump_flags);
519 fprintf (dump_file, ":\n");
522 osi.visited = BITMAP_ALLOC (NULL);
523 osi.reexamine = BITMAP_ALLOC (NULL);
524 osi.object_size_type = object_size_type;
525 osi.depths = NULL;
526 osi.stack = NULL;
527 osi.tos = NULL;
529 /* First pass: walk UD chains, compute object sizes that
530 can be computed. osi.reexamine bitmap at the end will
531 contain what variables were found in dependency cycles
532 and therefore need to be reexamined. */
533 osi.pass = 0;
534 osi.changed = false;
535 collect_object_sizes_for (&osi, ptr);
537 /* Second pass: keep recomputing object sizes of variables
538 that need reexamination, until no object sizes are
539 increased or all object sizes are computed. */
540 if (! bitmap_empty_p (osi.reexamine))
542 bitmap reexamine = BITMAP_ALLOC (NULL);
544 /* If looking for minimum instead of maximum object size,
545 detect cases where a pointer is increased in a loop.
546 Although even without this detection pass 2 would eventually
547 terminate, it could take a long time. If a pointer is
548 increasing this way, we need to assume 0 object size.
549 E.g. p = &buf[0]; while (cond) p = p + 4; */
550 if (object_size_type & 2)
552 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
553 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
554 osi.tos = osi.stack;
555 osi.pass = 1;
556 /* collect_object_sizes_for is changing
557 osi.reexamine bitmap, so iterate over a copy. */
558 bitmap_copy (reexamine, osi.reexamine);
559 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
560 if (bitmap_bit_p (osi.reexamine, i))
561 check_for_plus_in_loops (&osi, ssa_name (i));
563 free (osi.depths);
564 osi.depths = NULL;
565 free (osi.stack);
566 osi.stack = NULL;
567 osi.tos = NULL;
572 osi.pass = 2;
573 osi.changed = false;
574 /* collect_object_sizes_for is changing
575 osi.reexamine bitmap, so iterate over a copy. */
576 bitmap_copy (reexamine, osi.reexamine);
577 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
578 if (bitmap_bit_p (osi.reexamine, i))
580 collect_object_sizes_for (&osi, ssa_name (i));
581 if (dump_file && (dump_flags & TDF_DETAILS))
583 fprintf (dump_file, "Reexamining ");
584 print_generic_expr (dump_file, ssa_name (i),
585 dump_flags);
586 fprintf (dump_file, "\n");
590 while (osi.changed);
592 BITMAP_FREE (reexamine);
594 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
595 bitmap_set_bit (computed[object_size_type], i);
597 /* Debugging dumps. */
598 if (dump_file)
600 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
601 if (object_sizes[object_size_type][i]
602 != unknown[object_size_type])
604 print_generic_expr (dump_file, ssa_name (i),
605 dump_flags);
606 fprintf (dump_file,
607 ": %s %sobject size "
608 HOST_WIDE_INT_PRINT_UNSIGNED "\n",
609 (object_size_type & 2) ? "minimum" : "maximum",
610 (object_size_type & 1) ? "sub" : "",
611 object_sizes[object_size_type][i]);
615 BITMAP_FREE (osi.reexamine);
616 BITMAP_FREE (osi.visited);
619 return object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
622 return unknown[object_size_type];
625 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
627 static void
628 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
630 int object_size_type = osi->object_size_type;
631 unsigned int varno = SSA_NAME_VERSION (ptr);
632 unsigned HOST_WIDE_INT bytes;
634 gcc_assert (object_sizes[object_size_type][varno]
635 != unknown[object_size_type]);
636 gcc_assert (osi->pass == 0);
638 if (TREE_CODE (value) == WITH_SIZE_EXPR)
639 value = TREE_OPERAND (value, 0);
641 /* Pointer variables should have been handled by merge_object_sizes. */
642 gcc_assert (TREE_CODE (value) != SSA_NAME
643 || !POINTER_TYPE_P (TREE_TYPE (value)));
645 if (TREE_CODE (value) == ADDR_EXPR)
646 bytes = addr_object_size (osi, value, object_size_type);
647 else
648 bytes = unknown[object_size_type];
650 if ((object_size_type & 2) == 0)
652 if (object_sizes[object_size_type][varno] < bytes)
653 object_sizes[object_size_type][varno] = bytes;
655 else
657 if (object_sizes[object_size_type][varno] > bytes)
658 object_sizes[object_size_type][varno] = bytes;
663 /* Compute object_sizes for PTR, defined to the result of a call. */
665 static void
666 call_object_size (struct object_size_info *osi, tree ptr, gcall *call)
668 int object_size_type = osi->object_size_type;
669 unsigned int varno = SSA_NAME_VERSION (ptr);
670 unsigned HOST_WIDE_INT bytes;
672 gcc_assert (is_gimple_call (call));
674 gcc_assert (object_sizes[object_size_type][varno]
675 != unknown[object_size_type]);
676 gcc_assert (osi->pass == 0);
678 bytes = alloc_object_size (call, object_size_type);
680 if ((object_size_type & 2) == 0)
682 if (object_sizes[object_size_type][varno] < bytes)
683 object_sizes[object_size_type][varno] = bytes;
685 else
687 if (object_sizes[object_size_type][varno] > bytes)
688 object_sizes[object_size_type][varno] = bytes;
693 /* Compute object_sizes for PTR, defined to an unknown value. */
695 static void
696 unknown_object_size (struct object_size_info *osi, tree ptr)
698 int object_size_type = osi->object_size_type;
699 unsigned int varno = SSA_NAME_VERSION (ptr);
700 unsigned HOST_WIDE_INT bytes;
702 gcc_assert (object_sizes[object_size_type][varno]
703 != unknown[object_size_type]);
704 gcc_assert (osi->pass == 0);
706 bytes = unknown[object_size_type];
708 if ((object_size_type & 2) == 0)
710 if (object_sizes[object_size_type][varno] < bytes)
711 object_sizes[object_size_type][varno] = bytes;
713 else
715 if (object_sizes[object_size_type][varno] > bytes)
716 object_sizes[object_size_type][varno] = bytes;
721 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
722 the object size might need reexamination later. */
724 static bool
725 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
726 unsigned HOST_WIDE_INT offset)
728 int object_size_type = osi->object_size_type;
729 unsigned int varno = SSA_NAME_VERSION (dest);
730 unsigned HOST_WIDE_INT orig_bytes;
732 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
733 return false;
734 if (offset >= offset_limit)
736 object_sizes[object_size_type][varno] = unknown[object_size_type];
737 return false;
740 if (osi->pass == 0)
741 collect_object_sizes_for (osi, orig);
743 orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
744 if (orig_bytes != unknown[object_size_type])
745 orig_bytes = (offset > orig_bytes)
746 ? (unsigned HOST_WIDE_INT) 0 : orig_bytes - offset;
748 if ((object_size_type & 2) == 0)
750 if (object_sizes[object_size_type][varno] < orig_bytes)
752 object_sizes[object_size_type][varno] = orig_bytes;
753 osi->changed = true;
756 else
758 if (object_sizes[object_size_type][varno] > orig_bytes)
760 object_sizes[object_size_type][varno] = orig_bytes;
761 osi->changed = true;
764 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
768 /* Compute object_sizes for VAR, defined to the result of an assignment
769 with operator POINTER_PLUS_EXPR. Return true if the object size might
770 need reexamination later. */
772 static bool
773 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple *stmt)
775 int object_size_type = osi->object_size_type;
776 unsigned int varno = SSA_NAME_VERSION (var);
777 unsigned HOST_WIDE_INT bytes;
778 tree op0, op1;
780 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
782 op0 = gimple_assign_rhs1 (stmt);
783 op1 = gimple_assign_rhs2 (stmt);
785 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
787 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
788 gcc_assert (TREE_CODE (rhs) == MEM_REF);
789 op0 = TREE_OPERAND (rhs, 0);
790 op1 = TREE_OPERAND (rhs, 1);
792 else
793 gcc_unreachable ();
795 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
796 return false;
798 /* Handle PTR + OFFSET here. */
799 if (TREE_CODE (op1) == INTEGER_CST
800 && (TREE_CODE (op0) == SSA_NAME
801 || TREE_CODE (op0) == ADDR_EXPR))
803 if (! tree_fits_uhwi_p (op1))
804 bytes = unknown[object_size_type];
805 else if (TREE_CODE (op0) == SSA_NAME)
806 return merge_object_sizes (osi, var, op0, tree_to_uhwi (op1));
807 else
809 unsigned HOST_WIDE_INT off = tree_to_uhwi (op1);
811 /* op0 will be ADDR_EXPR here. */
812 bytes = addr_object_size (osi, op0, object_size_type);
813 if (bytes == unknown[object_size_type])
815 else if (off > offset_limit)
816 bytes = unknown[object_size_type];
817 else if (off > bytes)
818 bytes = 0;
819 else
820 bytes -= off;
823 else
824 bytes = unknown[object_size_type];
826 if ((object_size_type & 2) == 0)
828 if (object_sizes[object_size_type][varno] < bytes)
829 object_sizes[object_size_type][varno] = bytes;
831 else
833 if (object_sizes[object_size_type][varno] > bytes)
834 object_sizes[object_size_type][varno] = bytes;
836 return false;
840 /* Compute object_sizes for VAR, defined at STMT, which is
841 a COND_EXPR. Return true if the object size might need reexamination
842 later. */
844 static bool
845 cond_expr_object_size (struct object_size_info *osi, tree var, gimple *stmt)
847 tree then_, else_;
848 int object_size_type = osi->object_size_type;
849 unsigned int varno = SSA_NAME_VERSION (var);
850 bool reexamine = false;
852 gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
854 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
855 return false;
857 then_ = gimple_assign_rhs2 (stmt);
858 else_ = gimple_assign_rhs3 (stmt);
860 if (TREE_CODE (then_) == SSA_NAME)
861 reexamine |= merge_object_sizes (osi, var, then_, 0);
862 else
863 expr_object_size (osi, var, then_);
865 if (TREE_CODE (else_) == SSA_NAME)
866 reexamine |= merge_object_sizes (osi, var, else_, 0);
867 else
868 expr_object_size (osi, var, else_);
870 return reexamine;
873 /* Compute object sizes for VAR.
874 For ADDR_EXPR an object size is the number of remaining bytes
875 to the end of the object (where what is considered an object depends on
876 OSI->object_size_type).
877 For allocation GIMPLE_CALL like malloc or calloc object size is the size
878 of the allocation.
879 For POINTER_PLUS_EXPR where second operand is a constant integer,
880 object size is object size of the first operand minus the constant.
881 If the constant is bigger than the number of remaining bytes until the
882 end of the object, object size is 0, but if it is instead a pointer
883 subtraction, object size is unknown[object_size_type].
884 To differentiate addition from subtraction, ADDR_EXPR returns
885 unknown[object_size_type] for all objects bigger than half of the address
886 space, and constants less than half of the address space are considered
887 addition, while bigger constants subtraction.
888 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
889 object size is object size of that argument.
890 Otherwise, object size is the maximum of object sizes of variables
891 that it might be set to. */
893 static void
894 collect_object_sizes_for (struct object_size_info *osi, tree var)
896 int object_size_type = osi->object_size_type;
897 unsigned int varno = SSA_NAME_VERSION (var);
898 gimple *stmt;
899 bool reexamine;
901 if (bitmap_bit_p (computed[object_size_type], varno))
902 return;
904 if (osi->pass == 0)
906 if (bitmap_set_bit (osi->visited, varno))
908 object_sizes[object_size_type][varno]
909 = (object_size_type & 2) ? -1 : 0;
911 else
913 /* Found a dependency loop. Mark the variable for later
914 re-examination. */
915 bitmap_set_bit (osi->reexamine, varno);
916 if (dump_file && (dump_flags & TDF_DETAILS))
918 fprintf (dump_file, "Found a dependency loop at ");
919 print_generic_expr (dump_file, var, dump_flags);
920 fprintf (dump_file, "\n");
922 return;
926 if (dump_file && (dump_flags & TDF_DETAILS))
928 fprintf (dump_file, "Visiting use-def links for ");
929 print_generic_expr (dump_file, var, dump_flags);
930 fprintf (dump_file, "\n");
933 stmt = SSA_NAME_DEF_STMT (var);
934 reexamine = false;
936 switch (gimple_code (stmt))
938 case GIMPLE_ASSIGN:
940 tree rhs = gimple_assign_rhs1 (stmt);
941 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
942 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
943 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
944 reexamine = plus_stmt_object_size (osi, var, stmt);
945 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
946 reexamine = cond_expr_object_size (osi, var, stmt);
947 else if (gimple_assign_single_p (stmt)
948 || gimple_assign_unary_nop_p (stmt))
950 if (TREE_CODE (rhs) == SSA_NAME
951 && POINTER_TYPE_P (TREE_TYPE (rhs)))
952 reexamine = merge_object_sizes (osi, var, rhs, 0);
953 else
954 expr_object_size (osi, var, rhs);
956 else
957 unknown_object_size (osi, var);
958 break;
961 case GIMPLE_CALL:
963 gcall *call_stmt = as_a <gcall *> (stmt);
964 tree arg = pass_through_call (call_stmt);
965 if (arg)
967 if (TREE_CODE (arg) == SSA_NAME
968 && POINTER_TYPE_P (TREE_TYPE (arg)))
969 reexamine = merge_object_sizes (osi, var, arg, 0);
970 else
971 expr_object_size (osi, var, arg);
973 else
974 call_object_size (osi, var, call_stmt);
975 break;
978 case GIMPLE_ASM:
979 /* Pointers defined by __asm__ statements can point anywhere. */
980 object_sizes[object_size_type][varno] = unknown[object_size_type];
981 break;
983 case GIMPLE_NOP:
984 if (SSA_NAME_VAR (var)
985 && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
986 expr_object_size (osi, var, SSA_NAME_VAR (var));
987 else
988 /* Uninitialized SSA names point nowhere. */
989 object_sizes[object_size_type][varno] = unknown[object_size_type];
990 break;
992 case GIMPLE_PHI:
994 unsigned i;
996 for (i = 0; i < gimple_phi_num_args (stmt); i++)
998 tree rhs = gimple_phi_arg (stmt, i)->def;
1000 if (object_sizes[object_size_type][varno]
1001 == unknown[object_size_type])
1002 break;
1004 if (TREE_CODE (rhs) == SSA_NAME)
1005 reexamine |= merge_object_sizes (osi, var, rhs, 0);
1006 else if (osi->pass == 0)
1007 expr_object_size (osi, var, rhs);
1009 break;
1012 default:
1013 gcc_unreachable ();
1016 if (! reexamine
1017 || object_sizes[object_size_type][varno] == unknown[object_size_type])
1019 bitmap_set_bit (computed[object_size_type], varno);
1020 bitmap_clear_bit (osi->reexamine, varno);
1022 else
1024 bitmap_set_bit (osi->reexamine, varno);
1025 if (dump_file && (dump_flags & TDF_DETAILS))
1027 fprintf (dump_file, "Need to reexamine ");
1028 print_generic_expr (dump_file, var, dump_flags);
1029 fprintf (dump_file, "\n");
1035 /* Helper function for check_for_plus_in_loops. Called recursively
1036 to detect loops. */
1038 static void
1039 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1040 unsigned int depth)
1042 gimple *stmt = SSA_NAME_DEF_STMT (var);
1043 unsigned int varno = SSA_NAME_VERSION (var);
1045 if (osi->depths[varno])
1047 if (osi->depths[varno] != depth)
1049 unsigned int *sp;
1051 /* Found a loop involving pointer addition. */
1052 for (sp = osi->tos; sp > osi->stack; )
1054 --sp;
1055 bitmap_clear_bit (osi->reexamine, *sp);
1056 bitmap_set_bit (computed[osi->object_size_type], *sp);
1057 object_sizes[osi->object_size_type][*sp] = 0;
1058 if (*sp == varno)
1059 break;
1062 return;
1064 else if (! bitmap_bit_p (osi->reexamine, varno))
1065 return;
1067 osi->depths[varno] = depth;
1068 *osi->tos++ = varno;
1070 switch (gimple_code (stmt))
1073 case GIMPLE_ASSIGN:
1075 if ((gimple_assign_single_p (stmt)
1076 || gimple_assign_unary_nop_p (stmt))
1077 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1079 tree rhs = gimple_assign_rhs1 (stmt);
1081 check_for_plus_in_loops_1 (osi, rhs, depth);
1083 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1085 tree basevar = gimple_assign_rhs1 (stmt);
1086 tree cst = gimple_assign_rhs2 (stmt);
1088 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1090 check_for_plus_in_loops_1 (osi, basevar,
1091 depth + !integer_zerop (cst));
1093 else
1094 gcc_unreachable ();
1095 break;
1098 case GIMPLE_CALL:
1100 gcall *call_stmt = as_a <gcall *> (stmt);
1101 tree arg = pass_through_call (call_stmt);
1102 if (arg)
1104 if (TREE_CODE (arg) == SSA_NAME)
1105 check_for_plus_in_loops_1 (osi, arg, depth);
1106 else
1107 gcc_unreachable ();
1109 break;
1112 case GIMPLE_PHI:
1114 unsigned i;
1116 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1118 tree rhs = gimple_phi_arg (stmt, i)->def;
1120 if (TREE_CODE (rhs) == SSA_NAME)
1121 check_for_plus_in_loops_1 (osi, rhs, depth);
1123 break;
1126 default:
1127 gcc_unreachable ();
1130 osi->depths[varno] = 0;
1131 osi->tos--;
1135 /* Check if some pointer we are computing object size of is being increased
1136 within a loop. If yes, assume all the SSA variables participating in
1137 that loop have minimum object sizes 0. */
1139 static void
1140 check_for_plus_in_loops (struct object_size_info *osi, tree var)
1142 gimple *stmt = SSA_NAME_DEF_STMT (var);
1144 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1145 and looked for a POINTER_PLUS_EXPR in the pass-through
1146 argument, if any. In GIMPLE, however, such an expression
1147 is not a valid call operand. */
1149 if (is_gimple_assign (stmt)
1150 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1152 tree basevar = gimple_assign_rhs1 (stmt);
1153 tree cst = gimple_assign_rhs2 (stmt);
1155 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1157 if (integer_zerop (cst))
1158 return;
1160 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1161 *osi->tos++ = SSA_NAME_VERSION (basevar);
1162 check_for_plus_in_loops_1 (osi, var, 2);
1163 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1164 osi->tos--;
1169 /* Initialize data structures for the object size computation. */
1171 void
1172 init_object_sizes (void)
1174 int object_size_type;
1176 if (computed[0])
1177 return;
1179 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1181 object_sizes[object_size_type].safe_grow (num_ssa_names);
1182 computed[object_size_type] = BITMAP_ALLOC (NULL);
1185 init_offset_limit ();
1189 /* Destroy data structures after the object size computation. */
1191 static void
1192 fini_object_sizes (void)
1194 int object_size_type;
1196 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1198 object_sizes[object_size_type].release ();
1199 BITMAP_FREE (computed[object_size_type]);
1204 /* Simple pass to optimize all __builtin_object_size () builtins. */
1206 namespace {
1208 const pass_data pass_data_object_sizes =
1210 GIMPLE_PASS, /* type */
1211 "objsz", /* name */
1212 OPTGROUP_NONE, /* optinfo_flags */
1213 TV_NONE, /* tv_id */
1214 ( PROP_cfg | PROP_ssa ), /* properties_required */
1215 0, /* properties_provided */
1216 0, /* properties_destroyed */
1217 0, /* todo_flags_start */
1218 0, /* todo_flags_finish */
1221 class pass_object_sizes : public gimple_opt_pass
1223 public:
1224 pass_object_sizes (gcc::context *ctxt)
1225 : gimple_opt_pass (pass_data_object_sizes, ctxt), insert_min_max_p (false)
1228 /* opt_pass methods: */
1229 opt_pass * clone () { return new pass_object_sizes (m_ctxt); }
1230 void set_pass_param (unsigned int n, bool param)
1232 gcc_assert (n == 0);
1233 insert_min_max_p = param;
1235 virtual unsigned int execute (function *);
1237 private:
1238 /* Determines whether the pass instance creates MIN/MAX_EXPRs. */
1239 bool insert_min_max_p;
1240 }; // class pass_object_sizes
1242 /* Dummy valueize function. */
1244 static tree
1245 do_valueize (tree t)
1247 return t;
1250 unsigned int
1251 pass_object_sizes::execute (function *fun)
1253 basic_block bb;
1254 FOR_EACH_BB_FN (bb, fun)
1256 gimple_stmt_iterator i;
1257 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1259 tree result;
1260 gimple *call = gsi_stmt (i);
1261 if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE))
1262 continue;
1264 init_object_sizes ();
1266 /* If insert_min_max_p, only attempt to fold
1267 __builtin_object_size (x, 1) and __builtin_object_size (x, 3),
1268 and rather than folding the builtin to the constant if any,
1269 create a MIN_EXPR or MAX_EXPR of the __builtin_object_size
1270 call result and the computed constant. */
1271 if (insert_min_max_p)
1273 tree ost = gimple_call_arg (call, 1);
1274 if (tree_fits_uhwi_p (ost))
1276 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
1277 tree ptr = gimple_call_arg (call, 0);
1278 tree lhs = gimple_call_lhs (call);
1279 if ((object_size_type == 1 || object_size_type == 3)
1280 && (TREE_CODE (ptr) == ADDR_EXPR
1281 || TREE_CODE (ptr) == SSA_NAME)
1282 && lhs)
1284 tree type = TREE_TYPE (lhs);
1285 unsigned HOST_WIDE_INT bytes
1286 = compute_builtin_object_size (ptr, object_size_type);
1287 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type == 1
1288 ? -1 : 0)
1289 && wi::fits_to_tree_p (bytes, type))
1291 tree tem = make_ssa_name (type);
1292 gimple_call_set_lhs (call, tem);
1293 enum tree_code code
1294 = object_size_type == 1 ? MIN_EXPR : MAX_EXPR;
1295 tree cst = build_int_cstu (type, bytes);
1296 gimple *g
1297 = gimple_build_assign (lhs, code, tem, cst);
1298 gsi_insert_after (&i, g, GSI_NEW_STMT);
1299 update_stmt (call);
1303 continue;
1306 tree lhs = gimple_call_lhs (call);
1307 if (!lhs)
1308 continue;
1310 result = gimple_fold_stmt_to_constant (call, do_valueize);
1311 if (!result)
1313 tree ost = gimple_call_arg (call, 1);
1315 if (tree_fits_uhwi_p (ost))
1317 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
1319 if (object_size_type < 2)
1320 result = fold_convert (size_type_node,
1321 integer_minus_one_node);
1322 else if (object_size_type < 4)
1323 result = build_zero_cst (size_type_node);
1326 if (!result)
1327 continue;
1330 gcc_assert (TREE_CODE (result) == INTEGER_CST);
1332 if (dump_file && (dump_flags & TDF_DETAILS))
1334 fprintf (dump_file, "Simplified\n ");
1335 print_gimple_stmt (dump_file, call, 0, dump_flags);
1336 fprintf (dump_file, " to ");
1337 print_generic_expr (dump_file, result, 0);
1338 fprintf (dump_file, "\n");
1341 /* Propagate into all uses and fold those stmts. */
1342 replace_uses_by (lhs, result);
1346 fini_object_sizes ();
1347 return 0;
1350 } // anon namespace
1352 gimple_opt_pass *
1353 make_pass_object_sizes (gcc::context *ctxt)
1355 return new pass_object_sizes (ctxt);