diagnostic-show-locus.c: remove unused field from class colorizer
[official-gcc.git] / gcc / tree-object-size.c
bloba56b78a4510388f075fa2aeb23f4c4703565f944
1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004-2017 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "fold-const.h"
31 #include "tree-object-size.h"
32 #include "gimple-fold.h"
33 #include "gimple-iterator.h"
34 #include "tree-cfg.h"
35 #include "stringpool.h"
36 #include "attribs.h"
38 struct object_size_info
40 int object_size_type;
41 unsigned char pass;
42 bool changed;
43 bitmap visited, reexamine;
44 unsigned int *depths;
45 unsigned int *stack, *tos;
48 static const unsigned HOST_WIDE_INT unknown[4] = {
49 HOST_WIDE_INT_M1U,
50 HOST_WIDE_INT_M1U,
55 static tree compute_object_offset (const_tree, const_tree);
56 static bool addr_object_size (struct object_size_info *,
57 const_tree, int, unsigned HOST_WIDE_INT *);
58 static unsigned HOST_WIDE_INT alloc_object_size (const gcall *, int);
59 static tree pass_through_call (const gcall *);
60 static void collect_object_sizes_for (struct object_size_info *, tree);
61 static void expr_object_size (struct object_size_info *, tree, tree);
62 static bool merge_object_sizes (struct object_size_info *, tree, tree,
63 unsigned HOST_WIDE_INT);
64 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple *);
65 static bool cond_expr_object_size (struct object_size_info *, tree, gimple *);
66 static void init_offset_limit (void);
67 static void check_for_plus_in_loops (struct object_size_info *, tree);
68 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
69 unsigned int);
71 /* object_sizes[0] is upper bound for number of bytes till the end of
72 the object.
73 object_sizes[1] is upper bound for number of bytes till the end of
74 the subobject (innermost array or field with address taken).
75 object_sizes[2] is lower bound for number of bytes till the end of
76 the object and object_sizes[3] lower bound for subobject. */
77 static vec<unsigned HOST_WIDE_INT> object_sizes[4];
79 /* Bitmaps what object sizes have been computed already. */
80 static bitmap computed[4];
82 /* Maximum value of offset we consider to be addition. */
83 static unsigned HOST_WIDE_INT offset_limit;
86 /* Initialize OFFSET_LIMIT variable. */
87 static void
88 init_offset_limit (void)
90 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
91 offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
92 else
93 offset_limit = -1;
94 offset_limit /= 2;
98 /* Compute offset of EXPR within VAR. Return error_mark_node
99 if unknown. */
101 static tree
102 compute_object_offset (const_tree expr, const_tree var)
104 enum tree_code code = PLUS_EXPR;
105 tree base, off, t;
107 if (expr == var)
108 return size_zero_node;
110 switch (TREE_CODE (expr))
112 case COMPONENT_REF:
113 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
114 if (base == error_mark_node)
115 return base;
117 t = TREE_OPERAND (expr, 1);
118 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
119 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
120 / BITS_PER_UNIT));
121 break;
123 case REALPART_EXPR:
124 CASE_CONVERT:
125 case VIEW_CONVERT_EXPR:
126 case NON_LVALUE_EXPR:
127 return compute_object_offset (TREE_OPERAND (expr, 0), var);
129 case IMAGPART_EXPR:
130 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
131 if (base == error_mark_node)
132 return base;
134 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
135 break;
137 case ARRAY_REF:
138 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
139 if (base == error_mark_node)
140 return base;
142 t = TREE_OPERAND (expr, 1);
143 tree low_bound, unit_size;
144 low_bound = array_ref_low_bound (CONST_CAST_TREE (expr));
145 unit_size = array_ref_element_size (CONST_CAST_TREE (expr));
146 if (! integer_zerop (low_bound))
147 t = fold_build2 (MINUS_EXPR, TREE_TYPE (t), t, low_bound);
148 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
150 code = MINUS_EXPR;
151 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
153 t = fold_convert (sizetype, t);
154 off = size_binop (MULT_EXPR, unit_size, t);
155 break;
157 case MEM_REF:
158 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
159 return wide_int_to_tree (sizetype, mem_ref_offset (expr));
161 default:
162 return error_mark_node;
165 return size_binop (code, base, off);
169 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
170 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
171 If unknown, return unknown[object_size_type]. */
173 static bool
174 addr_object_size (struct object_size_info *osi, const_tree ptr,
175 int object_size_type, unsigned HOST_WIDE_INT *psize)
177 tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
179 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
181 /* Set to unknown and overwrite just before returning if the size
182 could be determined. */
183 *psize = unknown[object_size_type];
185 pt_var = TREE_OPERAND (ptr, 0);
186 while (handled_component_p (pt_var))
187 pt_var = TREE_OPERAND (pt_var, 0);
189 if (pt_var
190 && TREE_CODE (pt_var) == MEM_REF)
192 unsigned HOST_WIDE_INT sz;
194 if (!osi || (object_size_type & 1) != 0
195 || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
197 compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
198 object_size_type & ~1, &sz);
200 else
202 tree var = TREE_OPERAND (pt_var, 0);
203 if (osi->pass == 0)
204 collect_object_sizes_for (osi, var);
205 if (bitmap_bit_p (computed[object_size_type],
206 SSA_NAME_VERSION (var)))
207 sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
208 else
209 sz = unknown[object_size_type];
211 if (sz != unknown[object_size_type])
213 offset_int dsz = wi::sub (sz, mem_ref_offset (pt_var));
214 if (wi::neg_p (dsz))
215 sz = 0;
216 else if (wi::fits_uhwi_p (dsz))
217 sz = dsz.to_uhwi ();
218 else
219 sz = unknown[object_size_type];
222 if (sz != unknown[object_size_type] && sz < offset_limit)
223 pt_var_size = size_int (sz);
225 else if (pt_var
226 && DECL_P (pt_var)
227 && tree_fits_uhwi_p (DECL_SIZE_UNIT (pt_var))
228 && tree_to_uhwi (DECL_SIZE_UNIT (pt_var)) < offset_limit)
229 pt_var_size = DECL_SIZE_UNIT (pt_var);
230 else if (pt_var
231 && TREE_CODE (pt_var) == STRING_CST
232 && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
233 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
234 && tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
235 < offset_limit)
236 pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
237 else
238 return false;
240 if (pt_var != TREE_OPERAND (ptr, 0))
242 tree var;
244 if (object_size_type & 1)
246 var = TREE_OPERAND (ptr, 0);
248 while (var != pt_var
249 && TREE_CODE (var) != BIT_FIELD_REF
250 && TREE_CODE (var) != COMPONENT_REF
251 && TREE_CODE (var) != ARRAY_REF
252 && TREE_CODE (var) != ARRAY_RANGE_REF
253 && TREE_CODE (var) != REALPART_EXPR
254 && TREE_CODE (var) != IMAGPART_EXPR)
255 var = TREE_OPERAND (var, 0);
256 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
257 var = TREE_OPERAND (var, 0);
258 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
259 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var)))
260 || (pt_var_size
261 && tree_int_cst_lt (pt_var_size,
262 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
263 var = pt_var;
264 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
266 tree v = var;
267 /* For &X->fld, compute object size only if fld isn't the last
268 field, as struct { int i; char c[1]; } is often used instead
269 of flexible array member. */
270 while (v && v != pt_var)
271 switch (TREE_CODE (v))
273 case ARRAY_REF:
274 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
275 && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
277 tree domain
278 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
279 if (domain
280 && TYPE_MAX_VALUE (domain)
281 && TREE_CODE (TYPE_MAX_VALUE (domain))
282 == INTEGER_CST
283 && tree_int_cst_lt (TREE_OPERAND (v, 1),
284 TYPE_MAX_VALUE (domain)))
286 v = NULL_TREE;
287 break;
290 v = TREE_OPERAND (v, 0);
291 break;
292 case REALPART_EXPR:
293 case IMAGPART_EXPR:
294 v = NULL_TREE;
295 break;
296 case COMPONENT_REF:
297 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
299 v = NULL_TREE;
300 break;
302 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
303 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
304 != UNION_TYPE
305 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
306 != QUAL_UNION_TYPE)
307 break;
308 else
309 v = TREE_OPERAND (v, 0);
310 if (TREE_CODE (v) == COMPONENT_REF
311 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
312 == RECORD_TYPE)
314 tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
315 for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
316 if (TREE_CODE (fld_chain) == FIELD_DECL)
317 break;
319 if (fld_chain)
321 v = NULL_TREE;
322 break;
324 v = TREE_OPERAND (v, 0);
326 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
327 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
328 != UNION_TYPE
329 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
330 != QUAL_UNION_TYPE)
331 break;
332 else
333 v = TREE_OPERAND (v, 0);
334 if (v != pt_var)
335 v = NULL_TREE;
336 else
337 v = pt_var;
338 break;
339 default:
340 v = pt_var;
341 break;
343 if (v == pt_var)
344 var = pt_var;
347 else
348 var = pt_var;
350 if (var != pt_var)
351 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
352 else if (!pt_var_size)
353 return false;
354 else
355 var_size = pt_var_size;
356 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
357 if (bytes != error_mark_node)
359 if (TREE_CODE (bytes) == INTEGER_CST
360 && tree_int_cst_lt (var_size, bytes))
361 bytes = size_zero_node;
362 else
363 bytes = size_binop (MINUS_EXPR, var_size, bytes);
365 if (var != pt_var
366 && pt_var_size
367 && TREE_CODE (pt_var) == MEM_REF
368 && bytes != error_mark_node)
370 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
371 if (bytes2 != error_mark_node)
373 if (TREE_CODE (bytes2) == INTEGER_CST
374 && tree_int_cst_lt (pt_var_size, bytes2))
375 bytes2 = size_zero_node;
376 else
377 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
378 bytes = size_binop (MIN_EXPR, bytes, bytes2);
382 else if (!pt_var_size)
383 return false;
384 else
385 bytes = pt_var_size;
387 if (tree_fits_uhwi_p (bytes))
389 *psize = tree_to_uhwi (bytes);
390 return true;
393 return false;
397 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
398 Handles various allocation calls. OBJECT_SIZE_TYPE is the second
399 argument from __builtin_object_size. If unknown, return
400 unknown[object_size_type]. */
402 static unsigned HOST_WIDE_INT
403 alloc_object_size (const gcall *call, int object_size_type)
405 tree callee, bytes = NULL_TREE;
406 tree alloc_size;
407 int arg1 = -1, arg2 = -1;
409 gcc_assert (is_gimple_call (call));
411 callee = gimple_call_fndecl (call);
412 if (!callee)
413 return unknown[object_size_type];
415 alloc_size = lookup_attribute ("alloc_size",
416 TYPE_ATTRIBUTES (TREE_TYPE (callee)));
417 if (alloc_size && TREE_VALUE (alloc_size))
419 tree p = TREE_VALUE (alloc_size);
421 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
422 if (TREE_CHAIN (p))
423 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
426 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
427 switch (DECL_FUNCTION_CODE (callee))
429 case BUILT_IN_CALLOC:
430 arg2 = 1;
431 /* fall through */
432 case BUILT_IN_MALLOC:
433 case BUILT_IN_ALLOCA:
434 case BUILT_IN_ALLOCA_WITH_ALIGN:
435 arg1 = 0;
436 default:
437 break;
440 if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
441 || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
442 || (arg2 >= 0
443 && (arg2 >= (int)gimple_call_num_args (call)
444 || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
445 return unknown[object_size_type];
447 if (arg2 >= 0)
448 bytes = size_binop (MULT_EXPR,
449 fold_convert (sizetype, gimple_call_arg (call, arg1)),
450 fold_convert (sizetype, gimple_call_arg (call, arg2)));
451 else if (arg1 >= 0)
452 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
454 if (bytes && tree_fits_uhwi_p (bytes))
455 return tree_to_uhwi (bytes);
457 return unknown[object_size_type];
461 /* If object size is propagated from one of function's arguments directly
462 to its return value, return that argument for GIMPLE_CALL statement CALL.
463 Otherwise return NULL. */
465 static tree
466 pass_through_call (const gcall *call)
468 tree callee = gimple_call_fndecl (call);
470 if (callee
471 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
472 switch (DECL_FUNCTION_CODE (callee))
474 case BUILT_IN_MEMCPY:
475 case BUILT_IN_MEMMOVE:
476 case BUILT_IN_MEMSET:
477 case BUILT_IN_STRCPY:
478 case BUILT_IN_STRNCPY:
479 case BUILT_IN_STRCAT:
480 case BUILT_IN_STRNCAT:
481 case BUILT_IN_MEMCPY_CHK:
482 case BUILT_IN_MEMMOVE_CHK:
483 case BUILT_IN_MEMSET_CHK:
484 case BUILT_IN_STRCPY_CHK:
485 case BUILT_IN_STRNCPY_CHK:
486 case BUILT_IN_STPNCPY_CHK:
487 case BUILT_IN_STRCAT_CHK:
488 case BUILT_IN_STRNCAT_CHK:
489 case BUILT_IN_ASSUME_ALIGNED:
490 if (gimple_call_num_args (call) >= 1)
491 return gimple_call_arg (call, 0);
492 break;
493 default:
494 break;
497 return NULL_TREE;
501 /* Compute __builtin_object_size value for PTR and set *PSIZE to
502 the resulting value. OBJECT_SIZE_TYPE is the second argument
503 to __builtin_object_size. Return true on success and false
504 when the object size could not be determined. */
506 bool
507 compute_builtin_object_size (tree ptr, int object_size_type,
508 unsigned HOST_WIDE_INT *psize)
510 gcc_assert (object_size_type >= 0 && object_size_type <= 3);
512 /* Set to unknown and overwrite just before returning if the size
513 could be determined. */
514 *psize = unknown[object_size_type];
516 if (! offset_limit)
517 init_offset_limit ();
519 if (TREE_CODE (ptr) == ADDR_EXPR)
520 return addr_object_size (NULL, ptr, object_size_type, psize);
522 if (TREE_CODE (ptr) != SSA_NAME
523 || !POINTER_TYPE_P (TREE_TYPE (ptr)))
524 return false;
526 if (computed[object_size_type] == NULL)
528 if (optimize || object_size_type & 1)
529 return false;
531 /* When not optimizing, rather than failing, make a small effort
532 to determine the object size without the full benefit of
533 the (costly) computation below. */
534 gimple *def = SSA_NAME_DEF_STMT (ptr);
535 if (gimple_code (def) == GIMPLE_ASSIGN)
537 tree_code code = gimple_assign_rhs_code (def);
538 if (code == POINTER_PLUS_EXPR)
540 tree offset = gimple_assign_rhs2 (def);
541 ptr = gimple_assign_rhs1 (def);
543 if (tree_fits_shwi_p (offset)
544 && compute_builtin_object_size (ptr, object_size_type, psize))
546 /* Return zero when the offset is out of bounds. */
547 unsigned HOST_WIDE_INT off = tree_to_shwi (offset);
548 *psize = off < *psize ? *psize - off : 0;
549 return true;
553 return false;
556 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
558 struct object_size_info osi;
559 bitmap_iterator bi;
560 unsigned int i;
562 if (num_ssa_names > object_sizes[object_size_type].length ())
563 object_sizes[object_size_type].safe_grow (num_ssa_names);
564 if (dump_file)
566 fprintf (dump_file, "Computing %s %sobject size for ",
567 (object_size_type & 2) ? "minimum" : "maximum",
568 (object_size_type & 1) ? "sub" : "");
569 print_generic_expr (dump_file, ptr, dump_flags);
570 fprintf (dump_file, ":\n");
573 osi.visited = BITMAP_ALLOC (NULL);
574 osi.reexamine = BITMAP_ALLOC (NULL);
575 osi.object_size_type = object_size_type;
576 osi.depths = NULL;
577 osi.stack = NULL;
578 osi.tos = NULL;
580 /* First pass: walk UD chains, compute object sizes that
581 can be computed. osi.reexamine bitmap at the end will
582 contain what variables were found in dependency cycles
583 and therefore need to be reexamined. */
584 osi.pass = 0;
585 osi.changed = false;
586 collect_object_sizes_for (&osi, ptr);
588 /* Second pass: keep recomputing object sizes of variables
589 that need reexamination, until no object sizes are
590 increased or all object sizes are computed. */
591 if (! bitmap_empty_p (osi.reexamine))
593 bitmap reexamine = BITMAP_ALLOC (NULL);
595 /* If looking for minimum instead of maximum object size,
596 detect cases where a pointer is increased in a loop.
597 Although even without this detection pass 2 would eventually
598 terminate, it could take a long time. If a pointer is
599 increasing this way, we need to assume 0 object size.
600 E.g. p = &buf[0]; while (cond) p = p + 4; */
601 if (object_size_type & 2)
603 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
604 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
605 osi.tos = osi.stack;
606 osi.pass = 1;
607 /* collect_object_sizes_for is changing
608 osi.reexamine bitmap, so iterate over a copy. */
609 bitmap_copy (reexamine, osi.reexamine);
610 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
611 if (bitmap_bit_p (osi.reexamine, i))
612 check_for_plus_in_loops (&osi, ssa_name (i));
614 free (osi.depths);
615 osi.depths = NULL;
616 free (osi.stack);
617 osi.stack = NULL;
618 osi.tos = NULL;
623 osi.pass = 2;
624 osi.changed = false;
625 /* collect_object_sizes_for is changing
626 osi.reexamine bitmap, so iterate over a copy. */
627 bitmap_copy (reexamine, osi.reexamine);
628 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
629 if (bitmap_bit_p (osi.reexamine, i))
631 collect_object_sizes_for (&osi, ssa_name (i));
632 if (dump_file && (dump_flags & TDF_DETAILS))
634 fprintf (dump_file, "Reexamining ");
635 print_generic_expr (dump_file, ssa_name (i),
636 dump_flags);
637 fprintf (dump_file, "\n");
641 while (osi.changed);
643 BITMAP_FREE (reexamine);
645 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
646 bitmap_set_bit (computed[object_size_type], i);
648 /* Debugging dumps. */
649 if (dump_file)
651 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
652 if (object_sizes[object_size_type][i]
653 != unknown[object_size_type])
655 print_generic_expr (dump_file, ssa_name (i),
656 dump_flags);
657 fprintf (dump_file,
658 ": %s %sobject size "
659 HOST_WIDE_INT_PRINT_UNSIGNED "\n",
660 (object_size_type & 2) ? "minimum" : "maximum",
661 (object_size_type & 1) ? "sub" : "",
662 object_sizes[object_size_type][i]);
666 BITMAP_FREE (osi.reexamine);
667 BITMAP_FREE (osi.visited);
670 *psize = object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
671 return *psize != unknown[object_size_type];
674 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
676 static void
677 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
679 int object_size_type = osi->object_size_type;
680 unsigned int varno = SSA_NAME_VERSION (ptr);
681 unsigned HOST_WIDE_INT bytes;
683 gcc_assert (object_sizes[object_size_type][varno]
684 != unknown[object_size_type]);
685 gcc_assert (osi->pass == 0);
687 if (TREE_CODE (value) == WITH_SIZE_EXPR)
688 value = TREE_OPERAND (value, 0);
690 /* Pointer variables should have been handled by merge_object_sizes. */
691 gcc_assert (TREE_CODE (value) != SSA_NAME
692 || !POINTER_TYPE_P (TREE_TYPE (value)));
694 if (TREE_CODE (value) == ADDR_EXPR)
695 addr_object_size (osi, value, object_size_type, &bytes);
696 else
697 bytes = unknown[object_size_type];
699 if ((object_size_type & 2) == 0)
701 if (object_sizes[object_size_type][varno] < bytes)
702 object_sizes[object_size_type][varno] = bytes;
704 else
706 if (object_sizes[object_size_type][varno] > bytes)
707 object_sizes[object_size_type][varno] = bytes;
712 /* Compute object_sizes for PTR, defined to the result of a call. */
714 static void
715 call_object_size (struct object_size_info *osi, tree ptr, gcall *call)
717 int object_size_type = osi->object_size_type;
718 unsigned int varno = SSA_NAME_VERSION (ptr);
719 unsigned HOST_WIDE_INT bytes;
721 gcc_assert (is_gimple_call (call));
723 gcc_assert (object_sizes[object_size_type][varno]
724 != unknown[object_size_type]);
725 gcc_assert (osi->pass == 0);
727 bytes = alloc_object_size (call, object_size_type);
729 if ((object_size_type & 2) == 0)
731 if (object_sizes[object_size_type][varno] < bytes)
732 object_sizes[object_size_type][varno] = bytes;
734 else
736 if (object_sizes[object_size_type][varno] > bytes)
737 object_sizes[object_size_type][varno] = bytes;
742 /* Compute object_sizes for PTR, defined to an unknown value. */
744 static void
745 unknown_object_size (struct object_size_info *osi, tree ptr)
747 int object_size_type = osi->object_size_type;
748 unsigned int varno = SSA_NAME_VERSION (ptr);
749 unsigned HOST_WIDE_INT bytes;
751 gcc_assert (object_sizes[object_size_type][varno]
752 != unknown[object_size_type]);
753 gcc_assert (osi->pass == 0);
755 bytes = unknown[object_size_type];
757 if ((object_size_type & 2) == 0)
759 if (object_sizes[object_size_type][varno] < bytes)
760 object_sizes[object_size_type][varno] = bytes;
762 else
764 if (object_sizes[object_size_type][varno] > bytes)
765 object_sizes[object_size_type][varno] = bytes;
770 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
771 the object size might need reexamination later. */
773 static bool
774 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
775 unsigned HOST_WIDE_INT offset)
777 int object_size_type = osi->object_size_type;
778 unsigned int varno = SSA_NAME_VERSION (dest);
779 unsigned HOST_WIDE_INT orig_bytes;
781 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
782 return false;
783 if (offset >= offset_limit)
785 object_sizes[object_size_type][varno] = unknown[object_size_type];
786 return false;
789 if (osi->pass == 0)
790 collect_object_sizes_for (osi, orig);
792 orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
793 if (orig_bytes != unknown[object_size_type])
794 orig_bytes = (offset > orig_bytes)
795 ? HOST_WIDE_INT_0U : orig_bytes - offset;
797 if ((object_size_type & 2) == 0)
799 if (object_sizes[object_size_type][varno] < orig_bytes)
801 object_sizes[object_size_type][varno] = orig_bytes;
802 osi->changed = true;
805 else
807 if (object_sizes[object_size_type][varno] > orig_bytes)
809 object_sizes[object_size_type][varno] = orig_bytes;
810 osi->changed = true;
813 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
817 /* Compute object_sizes for VAR, defined to the result of an assignment
818 with operator POINTER_PLUS_EXPR. Return true if the object size might
819 need reexamination later. */
821 static bool
822 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple *stmt)
824 int object_size_type = osi->object_size_type;
825 unsigned int varno = SSA_NAME_VERSION (var);
826 unsigned HOST_WIDE_INT bytes;
827 tree op0, op1;
829 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
831 op0 = gimple_assign_rhs1 (stmt);
832 op1 = gimple_assign_rhs2 (stmt);
834 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
836 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
837 gcc_assert (TREE_CODE (rhs) == MEM_REF);
838 op0 = TREE_OPERAND (rhs, 0);
839 op1 = TREE_OPERAND (rhs, 1);
841 else
842 gcc_unreachable ();
844 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
845 return false;
847 /* Handle PTR + OFFSET here. */
848 if (TREE_CODE (op1) == INTEGER_CST
849 && (TREE_CODE (op0) == SSA_NAME
850 || TREE_CODE (op0) == ADDR_EXPR))
852 if (! tree_fits_uhwi_p (op1))
853 bytes = unknown[object_size_type];
854 else if (TREE_CODE (op0) == SSA_NAME)
855 return merge_object_sizes (osi, var, op0, tree_to_uhwi (op1));
856 else
858 unsigned HOST_WIDE_INT off = tree_to_uhwi (op1);
860 /* op0 will be ADDR_EXPR here. */
861 addr_object_size (osi, op0, object_size_type, &bytes);
862 if (bytes == unknown[object_size_type])
864 else if (off > offset_limit)
865 bytes = unknown[object_size_type];
866 else if (off > bytes)
867 bytes = 0;
868 else
869 bytes -= off;
872 else
873 bytes = unknown[object_size_type];
875 if ((object_size_type & 2) == 0)
877 if (object_sizes[object_size_type][varno] < bytes)
878 object_sizes[object_size_type][varno] = bytes;
880 else
882 if (object_sizes[object_size_type][varno] > bytes)
883 object_sizes[object_size_type][varno] = bytes;
885 return false;
889 /* Compute object_sizes for VAR, defined at STMT, which is
890 a COND_EXPR. Return true if the object size might need reexamination
891 later. */
893 static bool
894 cond_expr_object_size (struct object_size_info *osi, tree var, gimple *stmt)
896 tree then_, else_;
897 int object_size_type = osi->object_size_type;
898 unsigned int varno = SSA_NAME_VERSION (var);
899 bool reexamine = false;
901 gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
903 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
904 return false;
906 then_ = gimple_assign_rhs2 (stmt);
907 else_ = gimple_assign_rhs3 (stmt);
909 if (TREE_CODE (then_) == SSA_NAME)
910 reexamine |= merge_object_sizes (osi, var, then_, 0);
911 else
912 expr_object_size (osi, var, then_);
914 if (TREE_CODE (else_) == SSA_NAME)
915 reexamine |= merge_object_sizes (osi, var, else_, 0);
916 else
917 expr_object_size (osi, var, else_);
919 return reexamine;
922 /* Compute object sizes for VAR.
923 For ADDR_EXPR an object size is the number of remaining bytes
924 to the end of the object (where what is considered an object depends on
925 OSI->object_size_type).
926 For allocation GIMPLE_CALL like malloc or calloc object size is the size
927 of the allocation.
928 For POINTER_PLUS_EXPR where second operand is a constant integer,
929 object size is object size of the first operand minus the constant.
930 If the constant is bigger than the number of remaining bytes until the
931 end of the object, object size is 0, but if it is instead a pointer
932 subtraction, object size is unknown[object_size_type].
933 To differentiate addition from subtraction, ADDR_EXPR returns
934 unknown[object_size_type] for all objects bigger than half of the address
935 space, and constants less than half of the address space are considered
936 addition, while bigger constants subtraction.
937 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
938 object size is object size of that argument.
939 Otherwise, object size is the maximum of object sizes of variables
940 that it might be set to. */
942 static void
943 collect_object_sizes_for (struct object_size_info *osi, tree var)
945 int object_size_type = osi->object_size_type;
946 unsigned int varno = SSA_NAME_VERSION (var);
947 gimple *stmt;
948 bool reexamine;
950 if (bitmap_bit_p (computed[object_size_type], varno))
951 return;
953 if (osi->pass == 0)
955 if (bitmap_set_bit (osi->visited, varno))
957 object_sizes[object_size_type][varno]
958 = (object_size_type & 2) ? -1 : 0;
960 else
962 /* Found a dependency loop. Mark the variable for later
963 re-examination. */
964 bitmap_set_bit (osi->reexamine, varno);
965 if (dump_file && (dump_flags & TDF_DETAILS))
967 fprintf (dump_file, "Found a dependency loop at ");
968 print_generic_expr (dump_file, var, dump_flags);
969 fprintf (dump_file, "\n");
971 return;
975 if (dump_file && (dump_flags & TDF_DETAILS))
977 fprintf (dump_file, "Visiting use-def links for ");
978 print_generic_expr (dump_file, var, dump_flags);
979 fprintf (dump_file, "\n");
982 stmt = SSA_NAME_DEF_STMT (var);
983 reexamine = false;
985 switch (gimple_code (stmt))
987 case GIMPLE_ASSIGN:
989 tree rhs = gimple_assign_rhs1 (stmt);
990 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
991 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
992 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
993 reexamine = plus_stmt_object_size (osi, var, stmt);
994 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
995 reexamine = cond_expr_object_size (osi, var, stmt);
996 else if (gimple_assign_single_p (stmt)
997 || gimple_assign_unary_nop_p (stmt))
999 if (TREE_CODE (rhs) == SSA_NAME
1000 && POINTER_TYPE_P (TREE_TYPE (rhs)))
1001 reexamine = merge_object_sizes (osi, var, rhs, 0);
1002 else
1003 expr_object_size (osi, var, rhs);
1005 else
1006 unknown_object_size (osi, var);
1007 break;
1010 case GIMPLE_CALL:
1012 gcall *call_stmt = as_a <gcall *> (stmt);
1013 tree arg = pass_through_call (call_stmt);
1014 if (arg)
1016 if (TREE_CODE (arg) == SSA_NAME
1017 && POINTER_TYPE_P (TREE_TYPE (arg)))
1018 reexamine = merge_object_sizes (osi, var, arg, 0);
1019 else
1020 expr_object_size (osi, var, arg);
1022 else
1023 call_object_size (osi, var, call_stmt);
1024 break;
1027 case GIMPLE_ASM:
1028 /* Pointers defined by __asm__ statements can point anywhere. */
1029 object_sizes[object_size_type][varno] = unknown[object_size_type];
1030 break;
1032 case GIMPLE_NOP:
1033 if (SSA_NAME_VAR (var)
1034 && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
1035 expr_object_size (osi, var, SSA_NAME_VAR (var));
1036 else
1037 /* Uninitialized SSA names point nowhere. */
1038 object_sizes[object_size_type][varno] = unknown[object_size_type];
1039 break;
1041 case GIMPLE_PHI:
1043 unsigned i;
1045 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1047 tree rhs = gimple_phi_arg (stmt, i)->def;
1049 if (object_sizes[object_size_type][varno]
1050 == unknown[object_size_type])
1051 break;
1053 if (TREE_CODE (rhs) == SSA_NAME)
1054 reexamine |= merge_object_sizes (osi, var, rhs, 0);
1055 else if (osi->pass == 0)
1056 expr_object_size (osi, var, rhs);
1058 break;
1061 default:
1062 gcc_unreachable ();
1065 if (! reexamine
1066 || object_sizes[object_size_type][varno] == unknown[object_size_type])
1068 bitmap_set_bit (computed[object_size_type], varno);
1069 bitmap_clear_bit (osi->reexamine, varno);
1071 else
1073 bitmap_set_bit (osi->reexamine, varno);
1074 if (dump_file && (dump_flags & TDF_DETAILS))
1076 fprintf (dump_file, "Need to reexamine ");
1077 print_generic_expr (dump_file, var, dump_flags);
1078 fprintf (dump_file, "\n");
1084 /* Helper function for check_for_plus_in_loops. Called recursively
1085 to detect loops. */
1087 static void
1088 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1089 unsigned int depth)
1091 gimple *stmt = SSA_NAME_DEF_STMT (var);
1092 unsigned int varno = SSA_NAME_VERSION (var);
1094 if (osi->depths[varno])
1096 if (osi->depths[varno] != depth)
1098 unsigned int *sp;
1100 /* Found a loop involving pointer addition. */
1101 for (sp = osi->tos; sp > osi->stack; )
1103 --sp;
1104 bitmap_clear_bit (osi->reexamine, *sp);
1105 bitmap_set_bit (computed[osi->object_size_type], *sp);
1106 object_sizes[osi->object_size_type][*sp] = 0;
1107 if (*sp == varno)
1108 break;
1111 return;
1113 else if (! bitmap_bit_p (osi->reexamine, varno))
1114 return;
1116 osi->depths[varno] = depth;
1117 *osi->tos++ = varno;
1119 switch (gimple_code (stmt))
1122 case GIMPLE_ASSIGN:
1124 if ((gimple_assign_single_p (stmt)
1125 || gimple_assign_unary_nop_p (stmt))
1126 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1128 tree rhs = gimple_assign_rhs1 (stmt);
1130 check_for_plus_in_loops_1 (osi, rhs, depth);
1132 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1134 tree basevar = gimple_assign_rhs1 (stmt);
1135 tree cst = gimple_assign_rhs2 (stmt);
1137 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1139 check_for_plus_in_loops_1 (osi, basevar,
1140 depth + !integer_zerop (cst));
1142 else
1143 gcc_unreachable ();
1144 break;
1147 case GIMPLE_CALL:
1149 gcall *call_stmt = as_a <gcall *> (stmt);
1150 tree arg = pass_through_call (call_stmt);
1151 if (arg)
1153 if (TREE_CODE (arg) == SSA_NAME)
1154 check_for_plus_in_loops_1 (osi, arg, depth);
1155 else
1156 gcc_unreachable ();
1158 break;
1161 case GIMPLE_PHI:
1163 unsigned i;
1165 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1167 tree rhs = gimple_phi_arg (stmt, i)->def;
1169 if (TREE_CODE (rhs) == SSA_NAME)
1170 check_for_plus_in_loops_1 (osi, rhs, depth);
1172 break;
1175 default:
1176 gcc_unreachable ();
1179 osi->depths[varno] = 0;
1180 osi->tos--;
1184 /* Check if some pointer we are computing object size of is being increased
1185 within a loop. If yes, assume all the SSA variables participating in
1186 that loop have minimum object sizes 0. */
1188 static void
1189 check_for_plus_in_loops (struct object_size_info *osi, tree var)
1191 gimple *stmt = SSA_NAME_DEF_STMT (var);
1193 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1194 and looked for a POINTER_PLUS_EXPR in the pass-through
1195 argument, if any. In GIMPLE, however, such an expression
1196 is not a valid call operand. */
1198 if (is_gimple_assign (stmt)
1199 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1201 tree basevar = gimple_assign_rhs1 (stmt);
1202 tree cst = gimple_assign_rhs2 (stmt);
1204 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1206 if (integer_zerop (cst))
1207 return;
1209 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1210 *osi->tos++ = SSA_NAME_VERSION (basevar);
1211 check_for_plus_in_loops_1 (osi, var, 2);
1212 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1213 osi->tos--;
1218 /* Initialize data structures for the object size computation. */
1220 void
1221 init_object_sizes (void)
1223 int object_size_type;
1225 if (computed[0])
1226 return;
1228 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1230 object_sizes[object_size_type].safe_grow (num_ssa_names);
1231 computed[object_size_type] = BITMAP_ALLOC (NULL);
1234 init_offset_limit ();
1238 /* Destroy data structures after the object size computation. */
1240 void
1241 fini_object_sizes (void)
1243 int object_size_type;
1245 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1247 object_sizes[object_size_type].release ();
1248 BITMAP_FREE (computed[object_size_type]);
1253 /* Simple pass to optimize all __builtin_object_size () builtins. */
1255 namespace {
1257 const pass_data pass_data_object_sizes =
1259 GIMPLE_PASS, /* type */
1260 "objsz", /* name */
1261 OPTGROUP_NONE, /* optinfo_flags */
1262 TV_NONE, /* tv_id */
1263 ( PROP_cfg | PROP_ssa ), /* properties_required */
1264 0, /* properties_provided */
1265 0, /* properties_destroyed */
1266 0, /* todo_flags_start */
1267 0, /* todo_flags_finish */
1270 class pass_object_sizes : public gimple_opt_pass
1272 public:
1273 pass_object_sizes (gcc::context *ctxt)
1274 : gimple_opt_pass (pass_data_object_sizes, ctxt), insert_min_max_p (false)
1277 /* opt_pass methods: */
1278 opt_pass * clone () { return new pass_object_sizes (m_ctxt); }
1279 void set_pass_param (unsigned int n, bool param)
1281 gcc_assert (n == 0);
1282 insert_min_max_p = param;
1284 virtual unsigned int execute (function *);
1286 private:
1287 /* Determines whether the pass instance creates MIN/MAX_EXPRs. */
1288 bool insert_min_max_p;
1289 }; // class pass_object_sizes
1291 /* Dummy valueize function. */
1293 static tree
1294 do_valueize (tree t)
1296 return t;
1299 unsigned int
1300 pass_object_sizes::execute (function *fun)
1302 basic_block bb;
1303 FOR_EACH_BB_FN (bb, fun)
1305 gimple_stmt_iterator i;
1306 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1308 tree result;
1309 gimple *call = gsi_stmt (i);
1310 if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE))
1311 continue;
1313 init_object_sizes ();
1315 /* If insert_min_max_p, only attempt to fold
1316 __builtin_object_size (x, 1) and __builtin_object_size (x, 3),
1317 and rather than folding the builtin to the constant if any,
1318 create a MIN_EXPR or MAX_EXPR of the __builtin_object_size
1319 call result and the computed constant. */
1320 if (insert_min_max_p)
1322 tree ost = gimple_call_arg (call, 1);
1323 if (tree_fits_uhwi_p (ost))
1325 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
1326 tree ptr = gimple_call_arg (call, 0);
1327 tree lhs = gimple_call_lhs (call);
1328 if ((object_size_type == 1 || object_size_type == 3)
1329 && (TREE_CODE (ptr) == ADDR_EXPR
1330 || TREE_CODE (ptr) == SSA_NAME)
1331 && lhs)
1333 tree type = TREE_TYPE (lhs);
1334 unsigned HOST_WIDE_INT bytes;
1335 if (compute_builtin_object_size (ptr, object_size_type,
1336 &bytes)
1337 && wi::fits_to_tree_p (bytes, type))
1339 tree tem = make_ssa_name (type);
1340 gimple_call_set_lhs (call, tem);
1341 enum tree_code code
1342 = object_size_type == 1 ? MIN_EXPR : MAX_EXPR;
1343 tree cst = build_int_cstu (type, bytes);
1344 gimple *g
1345 = gimple_build_assign (lhs, code, tem, cst);
1346 gsi_insert_after (&i, g, GSI_NEW_STMT);
1347 update_stmt (call);
1351 continue;
1354 tree lhs = gimple_call_lhs (call);
1355 if (!lhs)
1356 continue;
1358 result = gimple_fold_stmt_to_constant (call, do_valueize);
1359 if (!result)
1361 tree ost = gimple_call_arg (call, 1);
1363 if (tree_fits_uhwi_p (ost))
1365 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
1367 if (object_size_type < 2)
1368 result = fold_convert (size_type_node,
1369 integer_minus_one_node);
1370 else if (object_size_type < 4)
1371 result = build_zero_cst (size_type_node);
1374 if (!result)
1375 continue;
1378 gcc_assert (TREE_CODE (result) == INTEGER_CST);
1380 if (dump_file && (dump_flags & TDF_DETAILS))
1382 fprintf (dump_file, "Simplified\n ");
1383 print_gimple_stmt (dump_file, call, 0, dump_flags);
1384 fprintf (dump_file, " to ");
1385 print_generic_expr (dump_file, result);
1386 fprintf (dump_file, "\n");
1389 /* Propagate into all uses and fold those stmts. */
1390 replace_uses_by (lhs, result);
1394 fini_object_sizes ();
1395 return 0;
1398 } // anon namespace
1400 gimple_opt_pass *
1401 make_pass_object_sizes (gcc::context *ctxt)
1403 return new pass_object_sizes (ctxt);