* gimple-ssa-store-merging.c (struct store_immediate_info): Add
[official-gcc.git] / gcc / tree-object-size.c
blobd45f50d4173e6f040840949750aba1a7a96724e1
1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004-2017 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "fold-const.h"
31 #include "tree-object-size.h"
32 #include "gimple-fold.h"
33 #include "gimple-iterator.h"
34 #include "tree-cfg.h"
35 #include "stringpool.h"
36 #include "attribs.h"
38 struct object_size_info
40 int object_size_type;
41 unsigned char pass;
42 bool changed;
43 bitmap visited, reexamine;
44 unsigned int *depths;
45 unsigned int *stack, *tos;
48 static const unsigned HOST_WIDE_INT unknown[4] = {
49 HOST_WIDE_INT_M1U,
50 HOST_WIDE_INT_M1U,
55 static tree compute_object_offset (const_tree, const_tree);
56 static bool addr_object_size (struct object_size_info *,
57 const_tree, int, unsigned HOST_WIDE_INT *);
58 static unsigned HOST_WIDE_INT alloc_object_size (const gcall *, int);
59 static tree pass_through_call (const gcall *);
60 static void collect_object_sizes_for (struct object_size_info *, tree);
61 static void expr_object_size (struct object_size_info *, tree, tree);
62 static bool merge_object_sizes (struct object_size_info *, tree, tree,
63 unsigned HOST_WIDE_INT);
64 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple *);
65 static bool cond_expr_object_size (struct object_size_info *, tree, gimple *);
66 static void init_offset_limit (void);
67 static void check_for_plus_in_loops (struct object_size_info *, tree);
68 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
69 unsigned int);
71 /* object_sizes[0] is upper bound for number of bytes till the end of
72 the object.
73 object_sizes[1] is upper bound for number of bytes till the end of
74 the subobject (innermost array or field with address taken).
75 object_sizes[2] is lower bound for number of bytes till the end of
76 the object and object_sizes[3] lower bound for subobject. */
77 static vec<unsigned HOST_WIDE_INT> object_sizes[4];
79 /* Bitmaps what object sizes have been computed already. */
80 static bitmap computed[4];
82 /* Maximum value of offset we consider to be addition. */
83 static unsigned HOST_WIDE_INT offset_limit;
86 /* Initialize OFFSET_LIMIT variable. */
87 static void
88 init_offset_limit (void)
90 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
91 offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
92 else
93 offset_limit = -1;
94 offset_limit /= 2;
98 /* Compute offset of EXPR within VAR. Return error_mark_node
99 if unknown. */
101 static tree
102 compute_object_offset (const_tree expr, const_tree var)
104 enum tree_code code = PLUS_EXPR;
105 tree base, off, t;
107 if (expr == var)
108 return size_zero_node;
110 switch (TREE_CODE (expr))
112 case COMPONENT_REF:
113 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
114 if (base == error_mark_node)
115 return base;
117 t = TREE_OPERAND (expr, 1);
118 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
119 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
120 / BITS_PER_UNIT));
121 break;
123 case REALPART_EXPR:
124 CASE_CONVERT:
125 case VIEW_CONVERT_EXPR:
126 case NON_LVALUE_EXPR:
127 return compute_object_offset (TREE_OPERAND (expr, 0), var);
129 case IMAGPART_EXPR:
130 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
131 if (base == error_mark_node)
132 return base;
134 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
135 break;
137 case ARRAY_REF:
138 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
139 if (base == error_mark_node)
140 return base;
142 t = TREE_OPERAND (expr, 1);
143 tree low_bound, unit_size;
144 low_bound = array_ref_low_bound (CONST_CAST_TREE (expr));
145 unit_size = array_ref_element_size (CONST_CAST_TREE (expr));
146 if (! integer_zerop (low_bound))
147 t = fold_build2 (MINUS_EXPR, TREE_TYPE (t), t, low_bound);
148 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
150 code = MINUS_EXPR;
151 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
153 t = fold_convert (sizetype, t);
154 off = size_binop (MULT_EXPR, unit_size, t);
155 break;
157 case MEM_REF:
158 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
159 return wide_int_to_tree (sizetype, mem_ref_offset (expr));
161 default:
162 return error_mark_node;
165 return size_binop (code, base, off);
169 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
170 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
171 If unknown, return unknown[object_size_type]. */
173 static bool
174 addr_object_size (struct object_size_info *osi, const_tree ptr,
175 int object_size_type, unsigned HOST_WIDE_INT *psize)
177 tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
179 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
181 /* Set to unknown and overwrite just before returning if the size
182 could be determined. */
183 *psize = unknown[object_size_type];
185 pt_var = TREE_OPERAND (ptr, 0);
186 while (handled_component_p (pt_var))
187 pt_var = TREE_OPERAND (pt_var, 0);
189 if (pt_var
190 && TREE_CODE (pt_var) == MEM_REF)
192 unsigned HOST_WIDE_INT sz;
194 if (!osi || (object_size_type & 1) != 0
195 || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
197 compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
198 object_size_type & ~1, &sz);
200 else
202 tree var = TREE_OPERAND (pt_var, 0);
203 if (osi->pass == 0)
204 collect_object_sizes_for (osi, var);
205 if (bitmap_bit_p (computed[object_size_type],
206 SSA_NAME_VERSION (var)))
207 sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
208 else
209 sz = unknown[object_size_type];
211 if (sz != unknown[object_size_type])
213 offset_int dsz = wi::sub (sz, mem_ref_offset (pt_var));
214 if (wi::neg_p (dsz))
215 sz = 0;
216 else if (wi::fits_uhwi_p (dsz))
217 sz = dsz.to_uhwi ();
218 else
219 sz = unknown[object_size_type];
222 if (sz != unknown[object_size_type] && sz < offset_limit)
223 pt_var_size = size_int (sz);
225 else if (pt_var
226 && DECL_P (pt_var)
227 && tree_fits_uhwi_p (DECL_SIZE_UNIT (pt_var))
228 && tree_to_uhwi (DECL_SIZE_UNIT (pt_var)) < offset_limit)
229 pt_var_size = DECL_SIZE_UNIT (pt_var);
230 else if (pt_var
231 && TREE_CODE (pt_var) == STRING_CST
232 && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
233 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
234 && tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
235 < offset_limit)
236 pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
237 else
238 return false;
240 if (pt_var != TREE_OPERAND (ptr, 0))
242 tree var;
244 if (object_size_type & 1)
246 var = TREE_OPERAND (ptr, 0);
248 while (var != pt_var
249 && TREE_CODE (var) != BIT_FIELD_REF
250 && TREE_CODE (var) != COMPONENT_REF
251 && TREE_CODE (var) != ARRAY_REF
252 && TREE_CODE (var) != ARRAY_RANGE_REF
253 && TREE_CODE (var) != REALPART_EXPR
254 && TREE_CODE (var) != IMAGPART_EXPR)
255 var = TREE_OPERAND (var, 0);
256 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
257 var = TREE_OPERAND (var, 0);
258 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
259 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var)))
260 || (pt_var_size
261 && tree_int_cst_lt (pt_var_size,
262 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
263 var = pt_var;
264 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
266 tree v = var;
267 /* For &X->fld, compute object size only if fld isn't the last
268 field, as struct { int i; char c[1]; } is often used instead
269 of flexible array member. */
270 while (v && v != pt_var)
271 switch (TREE_CODE (v))
273 case ARRAY_REF:
274 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
275 && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
277 tree domain
278 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
279 if (domain
280 && TYPE_MAX_VALUE (domain)
281 && TREE_CODE (TYPE_MAX_VALUE (domain))
282 == INTEGER_CST
283 && tree_int_cst_lt (TREE_OPERAND (v, 1),
284 TYPE_MAX_VALUE (domain)))
286 v = NULL_TREE;
287 break;
290 v = TREE_OPERAND (v, 0);
291 break;
292 case REALPART_EXPR:
293 case IMAGPART_EXPR:
294 v = NULL_TREE;
295 break;
296 case COMPONENT_REF:
297 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
299 v = NULL_TREE;
300 break;
302 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
303 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
304 != UNION_TYPE
305 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
306 != QUAL_UNION_TYPE)
307 break;
308 else
309 v = TREE_OPERAND (v, 0);
310 if (TREE_CODE (v) == COMPONENT_REF
311 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
312 == RECORD_TYPE)
314 tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
315 for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
316 if (TREE_CODE (fld_chain) == FIELD_DECL)
317 break;
319 if (fld_chain)
321 v = NULL_TREE;
322 break;
324 v = TREE_OPERAND (v, 0);
326 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
327 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
328 != UNION_TYPE
329 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
330 != QUAL_UNION_TYPE)
331 break;
332 else
333 v = TREE_OPERAND (v, 0);
334 if (v != pt_var)
335 v = NULL_TREE;
336 else
337 v = pt_var;
338 break;
339 default:
340 v = pt_var;
341 break;
343 if (v == pt_var)
344 var = pt_var;
347 else
348 var = pt_var;
350 if (var != pt_var)
351 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
352 else if (!pt_var_size)
353 return false;
354 else
355 var_size = pt_var_size;
356 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
357 if (bytes != error_mark_node)
359 if (TREE_CODE (bytes) == INTEGER_CST
360 && tree_int_cst_lt (var_size, bytes))
361 bytes = size_zero_node;
362 else
363 bytes = size_binop (MINUS_EXPR, var_size, bytes);
365 if (var != pt_var
366 && pt_var_size
367 && TREE_CODE (pt_var) == MEM_REF
368 && bytes != error_mark_node)
370 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
371 if (bytes2 != error_mark_node)
373 if (TREE_CODE (bytes2) == INTEGER_CST
374 && tree_int_cst_lt (pt_var_size, bytes2))
375 bytes2 = size_zero_node;
376 else
377 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
378 bytes = size_binop (MIN_EXPR, bytes, bytes2);
382 else if (!pt_var_size)
383 return false;
384 else
385 bytes = pt_var_size;
387 if (tree_fits_uhwi_p (bytes))
389 *psize = tree_to_uhwi (bytes);
390 return true;
393 return false;
397 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
398 Handles various allocation calls. OBJECT_SIZE_TYPE is the second
399 argument from __builtin_object_size. If unknown, return
400 unknown[object_size_type]. */
402 static unsigned HOST_WIDE_INT
403 alloc_object_size (const gcall *call, int object_size_type)
405 tree callee, bytes = NULL_TREE;
406 tree alloc_size;
407 int arg1 = -1, arg2 = -1;
409 gcc_assert (is_gimple_call (call));
411 callee = gimple_call_fndecl (call);
412 if (!callee)
413 return unknown[object_size_type];
415 alloc_size = lookup_attribute ("alloc_size",
416 TYPE_ATTRIBUTES (TREE_TYPE (callee)));
417 if (alloc_size && TREE_VALUE (alloc_size))
419 tree p = TREE_VALUE (alloc_size);
421 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
422 if (TREE_CHAIN (p))
423 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
426 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
427 switch (DECL_FUNCTION_CODE (callee))
429 case BUILT_IN_CALLOC:
430 arg2 = 1;
431 /* fall through */
432 case BUILT_IN_MALLOC:
433 CASE_BUILT_IN_ALLOCA:
434 arg1 = 0;
435 default:
436 break;
439 if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
440 || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
441 || (arg2 >= 0
442 && (arg2 >= (int)gimple_call_num_args (call)
443 || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
444 return unknown[object_size_type];
446 if (arg2 >= 0)
447 bytes = size_binop (MULT_EXPR,
448 fold_convert (sizetype, gimple_call_arg (call, arg1)),
449 fold_convert (sizetype, gimple_call_arg (call, arg2)));
450 else if (arg1 >= 0)
451 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
453 if (bytes && tree_fits_uhwi_p (bytes))
454 return tree_to_uhwi (bytes);
456 return unknown[object_size_type];
460 /* If object size is propagated from one of function's arguments directly
461 to its return value, return that argument for GIMPLE_CALL statement CALL.
462 Otherwise return NULL. */
464 static tree
465 pass_through_call (const gcall *call)
467 tree callee = gimple_call_fndecl (call);
469 if (callee
470 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
471 switch (DECL_FUNCTION_CODE (callee))
473 case BUILT_IN_MEMCPY:
474 case BUILT_IN_MEMMOVE:
475 case BUILT_IN_MEMSET:
476 case BUILT_IN_STRCPY:
477 case BUILT_IN_STRNCPY:
478 case BUILT_IN_STRCAT:
479 case BUILT_IN_STRNCAT:
480 case BUILT_IN_MEMCPY_CHK:
481 case BUILT_IN_MEMMOVE_CHK:
482 case BUILT_IN_MEMSET_CHK:
483 case BUILT_IN_STRCPY_CHK:
484 case BUILT_IN_STRNCPY_CHK:
485 case BUILT_IN_STPNCPY_CHK:
486 case BUILT_IN_STRCAT_CHK:
487 case BUILT_IN_STRNCAT_CHK:
488 case BUILT_IN_ASSUME_ALIGNED:
489 if (gimple_call_num_args (call) >= 1)
490 return gimple_call_arg (call, 0);
491 break;
492 default:
493 break;
496 return NULL_TREE;
500 /* Compute __builtin_object_size value for PTR and set *PSIZE to
501 the resulting value. OBJECT_SIZE_TYPE is the second argument
502 to __builtin_object_size. Return true on success and false
503 when the object size could not be determined. */
505 bool
506 compute_builtin_object_size (tree ptr, int object_size_type,
507 unsigned HOST_WIDE_INT *psize)
509 gcc_assert (object_size_type >= 0 && object_size_type <= 3);
511 /* Set to unknown and overwrite just before returning if the size
512 could be determined. */
513 *psize = unknown[object_size_type];
515 if (! offset_limit)
516 init_offset_limit ();
518 if (TREE_CODE (ptr) == ADDR_EXPR)
519 return addr_object_size (NULL, ptr, object_size_type, psize);
521 if (TREE_CODE (ptr) != SSA_NAME
522 || !POINTER_TYPE_P (TREE_TYPE (ptr)))
523 return false;
525 if (computed[object_size_type] == NULL)
527 if (optimize || object_size_type & 1)
528 return false;
530 /* When not optimizing, rather than failing, make a small effort
531 to determine the object size without the full benefit of
532 the (costly) computation below. */
533 gimple *def = SSA_NAME_DEF_STMT (ptr);
534 if (gimple_code (def) == GIMPLE_ASSIGN)
536 tree_code code = gimple_assign_rhs_code (def);
537 if (code == POINTER_PLUS_EXPR)
539 tree offset = gimple_assign_rhs2 (def);
540 ptr = gimple_assign_rhs1 (def);
542 if (tree_fits_shwi_p (offset)
543 && compute_builtin_object_size (ptr, object_size_type, psize))
545 /* Return zero when the offset is out of bounds. */
546 unsigned HOST_WIDE_INT off = tree_to_shwi (offset);
547 *psize = off < *psize ? *psize - off : 0;
548 return true;
552 return false;
555 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
557 struct object_size_info osi;
558 bitmap_iterator bi;
559 unsigned int i;
561 if (num_ssa_names > object_sizes[object_size_type].length ())
562 object_sizes[object_size_type].safe_grow (num_ssa_names);
563 if (dump_file)
565 fprintf (dump_file, "Computing %s %sobject size for ",
566 (object_size_type & 2) ? "minimum" : "maximum",
567 (object_size_type & 1) ? "sub" : "");
568 print_generic_expr (dump_file, ptr, dump_flags);
569 fprintf (dump_file, ":\n");
572 osi.visited = BITMAP_ALLOC (NULL);
573 osi.reexamine = BITMAP_ALLOC (NULL);
574 osi.object_size_type = object_size_type;
575 osi.depths = NULL;
576 osi.stack = NULL;
577 osi.tos = NULL;
579 /* First pass: walk UD chains, compute object sizes that
580 can be computed. osi.reexamine bitmap at the end will
581 contain what variables were found in dependency cycles
582 and therefore need to be reexamined. */
583 osi.pass = 0;
584 osi.changed = false;
585 collect_object_sizes_for (&osi, ptr);
587 /* Second pass: keep recomputing object sizes of variables
588 that need reexamination, until no object sizes are
589 increased or all object sizes are computed. */
590 if (! bitmap_empty_p (osi.reexamine))
592 bitmap reexamine = BITMAP_ALLOC (NULL);
594 /* If looking for minimum instead of maximum object size,
595 detect cases where a pointer is increased in a loop.
596 Although even without this detection pass 2 would eventually
597 terminate, it could take a long time. If a pointer is
598 increasing this way, we need to assume 0 object size.
599 E.g. p = &buf[0]; while (cond) p = p + 4; */
600 if (object_size_type & 2)
602 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
603 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
604 osi.tos = osi.stack;
605 osi.pass = 1;
606 /* collect_object_sizes_for is changing
607 osi.reexamine bitmap, so iterate over a copy. */
608 bitmap_copy (reexamine, osi.reexamine);
609 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
610 if (bitmap_bit_p (osi.reexamine, i))
611 check_for_plus_in_loops (&osi, ssa_name (i));
613 free (osi.depths);
614 osi.depths = NULL;
615 free (osi.stack);
616 osi.stack = NULL;
617 osi.tos = NULL;
622 osi.pass = 2;
623 osi.changed = false;
624 /* collect_object_sizes_for is changing
625 osi.reexamine bitmap, so iterate over a copy. */
626 bitmap_copy (reexamine, osi.reexamine);
627 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
628 if (bitmap_bit_p (osi.reexamine, i))
630 collect_object_sizes_for (&osi, ssa_name (i));
631 if (dump_file && (dump_flags & TDF_DETAILS))
633 fprintf (dump_file, "Reexamining ");
634 print_generic_expr (dump_file, ssa_name (i),
635 dump_flags);
636 fprintf (dump_file, "\n");
640 while (osi.changed);
642 BITMAP_FREE (reexamine);
644 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
645 bitmap_set_bit (computed[object_size_type], i);
647 /* Debugging dumps. */
648 if (dump_file)
650 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
651 if (object_sizes[object_size_type][i]
652 != unknown[object_size_type])
654 print_generic_expr (dump_file, ssa_name (i),
655 dump_flags);
656 fprintf (dump_file,
657 ": %s %sobject size "
658 HOST_WIDE_INT_PRINT_UNSIGNED "\n",
659 (object_size_type & 2) ? "minimum" : "maximum",
660 (object_size_type & 1) ? "sub" : "",
661 object_sizes[object_size_type][i]);
665 BITMAP_FREE (osi.reexamine);
666 BITMAP_FREE (osi.visited);
669 *psize = object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
670 return *psize != unknown[object_size_type];
673 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
675 static void
676 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
678 int object_size_type = osi->object_size_type;
679 unsigned int varno = SSA_NAME_VERSION (ptr);
680 unsigned HOST_WIDE_INT bytes;
682 gcc_assert (object_sizes[object_size_type][varno]
683 != unknown[object_size_type]);
684 gcc_assert (osi->pass == 0);
686 if (TREE_CODE (value) == WITH_SIZE_EXPR)
687 value = TREE_OPERAND (value, 0);
689 /* Pointer variables should have been handled by merge_object_sizes. */
690 gcc_assert (TREE_CODE (value) != SSA_NAME
691 || !POINTER_TYPE_P (TREE_TYPE (value)));
693 if (TREE_CODE (value) == ADDR_EXPR)
694 addr_object_size (osi, value, object_size_type, &bytes);
695 else
696 bytes = unknown[object_size_type];
698 if ((object_size_type & 2) == 0)
700 if (object_sizes[object_size_type][varno] < bytes)
701 object_sizes[object_size_type][varno] = bytes;
703 else
705 if (object_sizes[object_size_type][varno] > bytes)
706 object_sizes[object_size_type][varno] = bytes;
711 /* Compute object_sizes for PTR, defined to the result of a call. */
713 static void
714 call_object_size (struct object_size_info *osi, tree ptr, gcall *call)
716 int object_size_type = osi->object_size_type;
717 unsigned int varno = SSA_NAME_VERSION (ptr);
718 unsigned HOST_WIDE_INT bytes;
720 gcc_assert (is_gimple_call (call));
722 gcc_assert (object_sizes[object_size_type][varno]
723 != unknown[object_size_type]);
724 gcc_assert (osi->pass == 0);
726 bytes = alloc_object_size (call, object_size_type);
728 if ((object_size_type & 2) == 0)
730 if (object_sizes[object_size_type][varno] < bytes)
731 object_sizes[object_size_type][varno] = bytes;
733 else
735 if (object_sizes[object_size_type][varno] > bytes)
736 object_sizes[object_size_type][varno] = bytes;
741 /* Compute object_sizes for PTR, defined to an unknown value. */
743 static void
744 unknown_object_size (struct object_size_info *osi, tree ptr)
746 int object_size_type = osi->object_size_type;
747 unsigned int varno = SSA_NAME_VERSION (ptr);
748 unsigned HOST_WIDE_INT bytes;
750 gcc_assert (object_sizes[object_size_type][varno]
751 != unknown[object_size_type]);
752 gcc_assert (osi->pass == 0);
754 bytes = unknown[object_size_type];
756 if ((object_size_type & 2) == 0)
758 if (object_sizes[object_size_type][varno] < bytes)
759 object_sizes[object_size_type][varno] = bytes;
761 else
763 if (object_sizes[object_size_type][varno] > bytes)
764 object_sizes[object_size_type][varno] = bytes;
769 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
770 the object size might need reexamination later. */
772 static bool
773 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
774 unsigned HOST_WIDE_INT offset)
776 int object_size_type = osi->object_size_type;
777 unsigned int varno = SSA_NAME_VERSION (dest);
778 unsigned HOST_WIDE_INT orig_bytes;
780 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
781 return false;
782 if (offset >= offset_limit)
784 object_sizes[object_size_type][varno] = unknown[object_size_type];
785 return false;
788 if (osi->pass == 0)
789 collect_object_sizes_for (osi, orig);
791 orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
792 if (orig_bytes != unknown[object_size_type])
793 orig_bytes = (offset > orig_bytes)
794 ? HOST_WIDE_INT_0U : orig_bytes - offset;
796 if ((object_size_type & 2) == 0)
798 if (object_sizes[object_size_type][varno] < orig_bytes)
800 object_sizes[object_size_type][varno] = orig_bytes;
801 osi->changed = true;
804 else
806 if (object_sizes[object_size_type][varno] > orig_bytes)
808 object_sizes[object_size_type][varno] = orig_bytes;
809 osi->changed = true;
812 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
816 /* Compute object_sizes for VAR, defined to the result of an assignment
817 with operator POINTER_PLUS_EXPR. Return true if the object size might
818 need reexamination later. */
820 static bool
821 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple *stmt)
823 int object_size_type = osi->object_size_type;
824 unsigned int varno = SSA_NAME_VERSION (var);
825 unsigned HOST_WIDE_INT bytes;
826 tree op0, op1;
828 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
830 op0 = gimple_assign_rhs1 (stmt);
831 op1 = gimple_assign_rhs2 (stmt);
833 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
835 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
836 gcc_assert (TREE_CODE (rhs) == MEM_REF);
837 op0 = TREE_OPERAND (rhs, 0);
838 op1 = TREE_OPERAND (rhs, 1);
840 else
841 gcc_unreachable ();
843 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
844 return false;
846 /* Handle PTR + OFFSET here. */
847 if (TREE_CODE (op1) == INTEGER_CST
848 && (TREE_CODE (op0) == SSA_NAME
849 || TREE_CODE (op0) == ADDR_EXPR))
851 if (! tree_fits_uhwi_p (op1))
852 bytes = unknown[object_size_type];
853 else if (TREE_CODE (op0) == SSA_NAME)
854 return merge_object_sizes (osi, var, op0, tree_to_uhwi (op1));
855 else
857 unsigned HOST_WIDE_INT off = tree_to_uhwi (op1);
859 /* op0 will be ADDR_EXPR here. */
860 addr_object_size (osi, op0, object_size_type, &bytes);
861 if (bytes == unknown[object_size_type])
863 else if (off > offset_limit)
864 bytes = unknown[object_size_type];
865 else if (off > bytes)
866 bytes = 0;
867 else
868 bytes -= off;
871 else
872 bytes = unknown[object_size_type];
874 if ((object_size_type & 2) == 0)
876 if (object_sizes[object_size_type][varno] < bytes)
877 object_sizes[object_size_type][varno] = bytes;
879 else
881 if (object_sizes[object_size_type][varno] > bytes)
882 object_sizes[object_size_type][varno] = bytes;
884 return false;
888 /* Compute object_sizes for VAR, defined at STMT, which is
889 a COND_EXPR. Return true if the object size might need reexamination
890 later. */
892 static bool
893 cond_expr_object_size (struct object_size_info *osi, tree var, gimple *stmt)
895 tree then_, else_;
896 int object_size_type = osi->object_size_type;
897 unsigned int varno = SSA_NAME_VERSION (var);
898 bool reexamine = false;
900 gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
902 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
903 return false;
905 then_ = gimple_assign_rhs2 (stmt);
906 else_ = gimple_assign_rhs3 (stmt);
908 if (TREE_CODE (then_) == SSA_NAME)
909 reexamine |= merge_object_sizes (osi, var, then_, 0);
910 else
911 expr_object_size (osi, var, then_);
913 if (TREE_CODE (else_) == SSA_NAME)
914 reexamine |= merge_object_sizes (osi, var, else_, 0);
915 else
916 expr_object_size (osi, var, else_);
918 return reexamine;
921 /* Compute object sizes for VAR.
922 For ADDR_EXPR an object size is the number of remaining bytes
923 to the end of the object (where what is considered an object depends on
924 OSI->object_size_type).
925 For allocation GIMPLE_CALL like malloc or calloc object size is the size
926 of the allocation.
927 For POINTER_PLUS_EXPR where second operand is a constant integer,
928 object size is object size of the first operand minus the constant.
929 If the constant is bigger than the number of remaining bytes until the
930 end of the object, object size is 0, but if it is instead a pointer
931 subtraction, object size is unknown[object_size_type].
932 To differentiate addition from subtraction, ADDR_EXPR returns
933 unknown[object_size_type] for all objects bigger than half of the address
934 space, and constants less than half of the address space are considered
935 addition, while bigger constants subtraction.
936 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
937 object size is object size of that argument.
938 Otherwise, object size is the maximum of object sizes of variables
939 that it might be set to. */
941 static void
942 collect_object_sizes_for (struct object_size_info *osi, tree var)
944 int object_size_type = osi->object_size_type;
945 unsigned int varno = SSA_NAME_VERSION (var);
946 gimple *stmt;
947 bool reexamine;
949 if (bitmap_bit_p (computed[object_size_type], varno))
950 return;
952 if (osi->pass == 0)
954 if (bitmap_set_bit (osi->visited, varno))
956 object_sizes[object_size_type][varno]
957 = (object_size_type & 2) ? -1 : 0;
959 else
961 /* Found a dependency loop. Mark the variable for later
962 re-examination. */
963 bitmap_set_bit (osi->reexamine, varno);
964 if (dump_file && (dump_flags & TDF_DETAILS))
966 fprintf (dump_file, "Found a dependency loop at ");
967 print_generic_expr (dump_file, var, dump_flags);
968 fprintf (dump_file, "\n");
970 return;
974 if (dump_file && (dump_flags & TDF_DETAILS))
976 fprintf (dump_file, "Visiting use-def links for ");
977 print_generic_expr (dump_file, var, dump_flags);
978 fprintf (dump_file, "\n");
981 stmt = SSA_NAME_DEF_STMT (var);
982 reexamine = false;
984 switch (gimple_code (stmt))
986 case GIMPLE_ASSIGN:
988 tree rhs = gimple_assign_rhs1 (stmt);
989 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
990 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
991 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
992 reexamine = plus_stmt_object_size (osi, var, stmt);
993 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
994 reexamine = cond_expr_object_size (osi, var, stmt);
995 else if (gimple_assign_single_p (stmt)
996 || gimple_assign_unary_nop_p (stmt))
998 if (TREE_CODE (rhs) == SSA_NAME
999 && POINTER_TYPE_P (TREE_TYPE (rhs)))
1000 reexamine = merge_object_sizes (osi, var, rhs, 0);
1001 else
1002 expr_object_size (osi, var, rhs);
1004 else
1005 unknown_object_size (osi, var);
1006 break;
1009 case GIMPLE_CALL:
1011 gcall *call_stmt = as_a <gcall *> (stmt);
1012 tree arg = pass_through_call (call_stmt);
1013 if (arg)
1015 if (TREE_CODE (arg) == SSA_NAME
1016 && POINTER_TYPE_P (TREE_TYPE (arg)))
1017 reexamine = merge_object_sizes (osi, var, arg, 0);
1018 else
1019 expr_object_size (osi, var, arg);
1021 else
1022 call_object_size (osi, var, call_stmt);
1023 break;
1026 case GIMPLE_ASM:
1027 /* Pointers defined by __asm__ statements can point anywhere. */
1028 object_sizes[object_size_type][varno] = unknown[object_size_type];
1029 break;
1031 case GIMPLE_NOP:
1032 if (SSA_NAME_VAR (var)
1033 && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
1034 expr_object_size (osi, var, SSA_NAME_VAR (var));
1035 else
1036 /* Uninitialized SSA names point nowhere. */
1037 object_sizes[object_size_type][varno] = unknown[object_size_type];
1038 break;
1040 case GIMPLE_PHI:
1042 unsigned i;
1044 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1046 tree rhs = gimple_phi_arg (stmt, i)->def;
1048 if (object_sizes[object_size_type][varno]
1049 == unknown[object_size_type])
1050 break;
1052 if (TREE_CODE (rhs) == SSA_NAME)
1053 reexamine |= merge_object_sizes (osi, var, rhs, 0);
1054 else if (osi->pass == 0)
1055 expr_object_size (osi, var, rhs);
1057 break;
1060 default:
1061 gcc_unreachable ();
1064 if (! reexamine
1065 || object_sizes[object_size_type][varno] == unknown[object_size_type])
1067 bitmap_set_bit (computed[object_size_type], varno);
1068 bitmap_clear_bit (osi->reexamine, varno);
1070 else
1072 bitmap_set_bit (osi->reexamine, varno);
1073 if (dump_file && (dump_flags & TDF_DETAILS))
1075 fprintf (dump_file, "Need to reexamine ");
1076 print_generic_expr (dump_file, var, dump_flags);
1077 fprintf (dump_file, "\n");
1083 /* Helper function for check_for_plus_in_loops. Called recursively
1084 to detect loops. */
1086 static void
1087 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1088 unsigned int depth)
1090 gimple *stmt = SSA_NAME_DEF_STMT (var);
1091 unsigned int varno = SSA_NAME_VERSION (var);
1093 if (osi->depths[varno])
1095 if (osi->depths[varno] != depth)
1097 unsigned int *sp;
1099 /* Found a loop involving pointer addition. */
1100 for (sp = osi->tos; sp > osi->stack; )
1102 --sp;
1103 bitmap_clear_bit (osi->reexamine, *sp);
1104 bitmap_set_bit (computed[osi->object_size_type], *sp);
1105 object_sizes[osi->object_size_type][*sp] = 0;
1106 if (*sp == varno)
1107 break;
1110 return;
1112 else if (! bitmap_bit_p (osi->reexamine, varno))
1113 return;
1115 osi->depths[varno] = depth;
1116 *osi->tos++ = varno;
1118 switch (gimple_code (stmt))
1121 case GIMPLE_ASSIGN:
1123 if ((gimple_assign_single_p (stmt)
1124 || gimple_assign_unary_nop_p (stmt))
1125 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1127 tree rhs = gimple_assign_rhs1 (stmt);
1129 check_for_plus_in_loops_1 (osi, rhs, depth);
1131 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1133 tree basevar = gimple_assign_rhs1 (stmt);
1134 tree cst = gimple_assign_rhs2 (stmt);
1136 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1138 check_for_plus_in_loops_1 (osi, basevar,
1139 depth + !integer_zerop (cst));
1141 else
1142 gcc_unreachable ();
1143 break;
1146 case GIMPLE_CALL:
1148 gcall *call_stmt = as_a <gcall *> (stmt);
1149 tree arg = pass_through_call (call_stmt);
1150 if (arg)
1152 if (TREE_CODE (arg) == SSA_NAME)
1153 check_for_plus_in_loops_1 (osi, arg, depth);
1154 else
1155 gcc_unreachable ();
1157 break;
1160 case GIMPLE_PHI:
1162 unsigned i;
1164 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1166 tree rhs = gimple_phi_arg (stmt, i)->def;
1168 if (TREE_CODE (rhs) == SSA_NAME)
1169 check_for_plus_in_loops_1 (osi, rhs, depth);
1171 break;
1174 default:
1175 gcc_unreachable ();
1178 osi->depths[varno] = 0;
1179 osi->tos--;
1183 /* Check if some pointer we are computing object size of is being increased
1184 within a loop. If yes, assume all the SSA variables participating in
1185 that loop have minimum object sizes 0. */
1187 static void
1188 check_for_plus_in_loops (struct object_size_info *osi, tree var)
1190 gimple *stmt = SSA_NAME_DEF_STMT (var);
1192 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1193 and looked for a POINTER_PLUS_EXPR in the pass-through
1194 argument, if any. In GIMPLE, however, such an expression
1195 is not a valid call operand. */
1197 if (is_gimple_assign (stmt)
1198 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1200 tree basevar = gimple_assign_rhs1 (stmt);
1201 tree cst = gimple_assign_rhs2 (stmt);
1203 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1205 if (integer_zerop (cst))
1206 return;
1208 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1209 *osi->tos++ = SSA_NAME_VERSION (basevar);
1210 check_for_plus_in_loops_1 (osi, var, 2);
1211 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1212 osi->tos--;
1217 /* Initialize data structures for the object size computation. */
1219 void
1220 init_object_sizes (void)
1222 int object_size_type;
1224 if (computed[0])
1225 return;
1227 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1229 object_sizes[object_size_type].safe_grow (num_ssa_names);
1230 computed[object_size_type] = BITMAP_ALLOC (NULL);
1233 init_offset_limit ();
1237 /* Destroy data structures after the object size computation. */
1239 void
1240 fini_object_sizes (void)
1242 int object_size_type;
1244 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1246 object_sizes[object_size_type].release ();
1247 BITMAP_FREE (computed[object_size_type]);
1252 /* Simple pass to optimize all __builtin_object_size () builtins. */
1254 namespace {
1256 const pass_data pass_data_object_sizes =
1258 GIMPLE_PASS, /* type */
1259 "objsz", /* name */
1260 OPTGROUP_NONE, /* optinfo_flags */
1261 TV_NONE, /* tv_id */
1262 ( PROP_cfg | PROP_ssa ), /* properties_required */
1263 0, /* properties_provided */
1264 0, /* properties_destroyed */
1265 0, /* todo_flags_start */
1266 0, /* todo_flags_finish */
1269 class pass_object_sizes : public gimple_opt_pass
1271 public:
1272 pass_object_sizes (gcc::context *ctxt)
1273 : gimple_opt_pass (pass_data_object_sizes, ctxt), insert_min_max_p (false)
1276 /* opt_pass methods: */
1277 opt_pass * clone () { return new pass_object_sizes (m_ctxt); }
1278 void set_pass_param (unsigned int n, bool param)
1280 gcc_assert (n == 0);
1281 insert_min_max_p = param;
1283 virtual unsigned int execute (function *);
1285 private:
1286 /* Determines whether the pass instance creates MIN/MAX_EXPRs. */
1287 bool insert_min_max_p;
1288 }; // class pass_object_sizes
1290 /* Dummy valueize function. */
1292 static tree
1293 do_valueize (tree t)
1295 return t;
1298 unsigned int
1299 pass_object_sizes::execute (function *fun)
1301 basic_block bb;
1302 FOR_EACH_BB_FN (bb, fun)
1304 gimple_stmt_iterator i;
1305 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1307 tree result;
1308 gimple *call = gsi_stmt (i);
1309 if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE))
1310 continue;
1312 init_object_sizes ();
1314 /* If insert_min_max_p, only attempt to fold
1315 __builtin_object_size (x, 1) and __builtin_object_size (x, 3),
1316 and rather than folding the builtin to the constant if any,
1317 create a MIN_EXPR or MAX_EXPR of the __builtin_object_size
1318 call result and the computed constant. */
1319 if (insert_min_max_p)
1321 tree ost = gimple_call_arg (call, 1);
1322 if (tree_fits_uhwi_p (ost))
1324 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
1325 tree ptr = gimple_call_arg (call, 0);
1326 tree lhs = gimple_call_lhs (call);
1327 if ((object_size_type == 1 || object_size_type == 3)
1328 && (TREE_CODE (ptr) == ADDR_EXPR
1329 || TREE_CODE (ptr) == SSA_NAME)
1330 && lhs)
1332 tree type = TREE_TYPE (lhs);
1333 unsigned HOST_WIDE_INT bytes;
1334 if (compute_builtin_object_size (ptr, object_size_type,
1335 &bytes)
1336 && wi::fits_to_tree_p (bytes, type))
1338 tree tem = make_ssa_name (type);
1339 gimple_call_set_lhs (call, tem);
1340 enum tree_code code
1341 = object_size_type == 1 ? MIN_EXPR : MAX_EXPR;
1342 tree cst = build_int_cstu (type, bytes);
1343 gimple *g
1344 = gimple_build_assign (lhs, code, tem, cst);
1345 gsi_insert_after (&i, g, GSI_NEW_STMT);
1346 update_stmt (call);
1350 continue;
1353 tree lhs = gimple_call_lhs (call);
1354 if (!lhs)
1355 continue;
1357 result = gimple_fold_stmt_to_constant (call, do_valueize);
1358 if (!result)
1360 tree ost = gimple_call_arg (call, 1);
1362 if (tree_fits_uhwi_p (ost))
1364 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
1366 if (object_size_type < 2)
1367 result = fold_convert (size_type_node,
1368 integer_minus_one_node);
1369 else if (object_size_type < 4)
1370 result = build_zero_cst (size_type_node);
1373 if (!result)
1374 continue;
1377 gcc_assert (TREE_CODE (result) == INTEGER_CST);
1379 if (dump_file && (dump_flags & TDF_DETAILS))
1381 fprintf (dump_file, "Simplified\n ");
1382 print_gimple_stmt (dump_file, call, 0, dump_flags);
1383 fprintf (dump_file, " to ");
1384 print_generic_expr (dump_file, result);
1385 fprintf (dump_file, "\n");
1388 /* Propagate into all uses and fold those stmts. */
1389 replace_uses_by (lhs, result);
1393 fini_object_sizes ();
1394 return 0;
1397 } // anon namespace
1399 gimple_opt_pass *
1400 make_pass_object_sizes (gcc::context *ctxt)
1402 return new pass_object_sizes (ctxt);