1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004-2019 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "tree-pass.h"
29 #include "gimple-pretty-print.h"
30 #include "fold-const.h"
31 #include "tree-object-size.h"
32 #include "gimple-fold.h"
33 #include "gimple-iterator.h"
35 #include "stringpool.h"
38 struct object_size_info
43 bitmap visited
, reexamine
;
45 unsigned int *stack
, *tos
;
48 static const unsigned HOST_WIDE_INT unknown
[4] = {
55 static tree
compute_object_offset (const_tree
, const_tree
);
56 static bool addr_object_size (struct object_size_info
*,
57 const_tree
, int, unsigned HOST_WIDE_INT
*);
58 static unsigned HOST_WIDE_INT
alloc_object_size (const gcall
*, int);
59 static tree
pass_through_call (const gcall
*);
60 static void collect_object_sizes_for (struct object_size_info
*, tree
);
61 static void expr_object_size (struct object_size_info
*, tree
, tree
);
62 static bool merge_object_sizes (struct object_size_info
*, tree
, tree
,
63 unsigned HOST_WIDE_INT
);
64 static bool plus_stmt_object_size (struct object_size_info
*, tree
, gimple
*);
65 static bool cond_expr_object_size (struct object_size_info
*, tree
, gimple
*);
66 static void init_offset_limit (void);
67 static void check_for_plus_in_loops (struct object_size_info
*, tree
);
68 static void check_for_plus_in_loops_1 (struct object_size_info
*, tree
,
71 /* object_sizes[0] is upper bound for number of bytes till the end of
73 object_sizes[1] is upper bound for number of bytes till the end of
74 the subobject (innermost array or field with address taken).
75 object_sizes[2] is lower bound for number of bytes till the end of
76 the object and object_sizes[3] lower bound for subobject. */
77 static vec
<unsigned HOST_WIDE_INT
> object_sizes
[4];
79 /* Bitmaps what object sizes have been computed already. */
80 static bitmap computed
[4];
82 /* Maximum value of offset we consider to be addition. */
83 static unsigned HOST_WIDE_INT offset_limit
;
86 /* Initialize OFFSET_LIMIT variable. */
88 init_offset_limit (void)
90 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype
)))
91 offset_limit
= tree_to_uhwi (TYPE_MAX_VALUE (sizetype
));
98 /* Compute offset of EXPR within VAR. Return error_mark_node
102 compute_object_offset (const_tree expr
, const_tree var
)
104 enum tree_code code
= PLUS_EXPR
;
108 return size_zero_node
;
110 switch (TREE_CODE (expr
))
113 base
= compute_object_offset (TREE_OPERAND (expr
, 0), var
);
114 if (base
== error_mark_node
)
117 t
= TREE_OPERAND (expr
, 1);
118 off
= size_binop (PLUS_EXPR
, DECL_FIELD_OFFSET (t
),
119 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t
))
125 case VIEW_CONVERT_EXPR
:
126 case NON_LVALUE_EXPR
:
127 return compute_object_offset (TREE_OPERAND (expr
, 0), var
);
130 base
= compute_object_offset (TREE_OPERAND (expr
, 0), var
);
131 if (base
== error_mark_node
)
134 off
= TYPE_SIZE_UNIT (TREE_TYPE (expr
));
138 base
= compute_object_offset (TREE_OPERAND (expr
, 0), var
);
139 if (base
== error_mark_node
)
142 t
= TREE_OPERAND (expr
, 1);
143 tree low_bound
, unit_size
;
144 low_bound
= array_ref_low_bound (CONST_CAST_TREE (expr
));
145 unit_size
= array_ref_element_size (CONST_CAST_TREE (expr
));
146 if (! integer_zerop (low_bound
))
147 t
= fold_build2 (MINUS_EXPR
, TREE_TYPE (t
), t
, low_bound
);
148 if (TREE_CODE (t
) == INTEGER_CST
&& tree_int_cst_sgn (t
) < 0)
151 t
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (t
), t
);
153 t
= fold_convert (sizetype
, t
);
154 off
= size_binop (MULT_EXPR
, unit_size
, t
);
158 gcc_assert (TREE_CODE (TREE_OPERAND (expr
, 0)) == ADDR_EXPR
);
159 return wide_int_to_tree (sizetype
, mem_ref_offset (expr
));
162 return error_mark_node
;
165 return size_binop (code
, base
, off
);
169 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
170 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
171 If unknown, return unknown[object_size_type]. */
174 addr_object_size (struct object_size_info
*osi
, const_tree ptr
,
175 int object_size_type
, unsigned HOST_WIDE_INT
*psize
)
177 tree pt_var
, pt_var_size
= NULL_TREE
, var_size
, bytes
;
179 gcc_assert (TREE_CODE (ptr
) == ADDR_EXPR
);
181 /* Set to unknown and overwrite just before returning if the size
182 could be determined. */
183 *psize
= unknown
[object_size_type
];
185 pt_var
= TREE_OPERAND (ptr
, 0);
186 while (handled_component_p (pt_var
))
187 pt_var
= TREE_OPERAND (pt_var
, 0);
190 && TREE_CODE (pt_var
) == MEM_REF
)
192 unsigned HOST_WIDE_INT sz
;
194 if (!osi
|| (object_size_type
& 1) != 0
195 || TREE_CODE (TREE_OPERAND (pt_var
, 0)) != SSA_NAME
)
197 compute_builtin_object_size (TREE_OPERAND (pt_var
, 0),
198 object_size_type
& ~1, &sz
);
202 tree var
= TREE_OPERAND (pt_var
, 0);
204 collect_object_sizes_for (osi
, var
);
205 if (bitmap_bit_p (computed
[object_size_type
],
206 SSA_NAME_VERSION (var
)))
207 sz
= object_sizes
[object_size_type
][SSA_NAME_VERSION (var
)];
209 sz
= unknown
[object_size_type
];
211 if (sz
!= unknown
[object_size_type
])
213 offset_int mem_offset
;
214 if (mem_ref_offset (pt_var
).is_constant (&mem_offset
))
216 offset_int dsz
= wi::sub (sz
, mem_offset
);
219 else if (wi::fits_uhwi_p (dsz
))
222 sz
= unknown
[object_size_type
];
225 sz
= unknown
[object_size_type
];
228 if (sz
!= unknown
[object_size_type
] && sz
< offset_limit
)
229 pt_var_size
= size_int (sz
);
233 && tree_fits_uhwi_p (DECL_SIZE_UNIT (pt_var
))
234 && tree_to_uhwi (DECL_SIZE_UNIT (pt_var
)) < offset_limit
)
235 pt_var_size
= DECL_SIZE_UNIT (pt_var
);
237 && TREE_CODE (pt_var
) == STRING_CST
238 && TYPE_SIZE_UNIT (TREE_TYPE (pt_var
))
239 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (pt_var
)))
240 && tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (pt_var
)))
242 pt_var_size
= TYPE_SIZE_UNIT (TREE_TYPE (pt_var
));
246 if (pt_var
!= TREE_OPERAND (ptr
, 0))
250 if (object_size_type
& 1)
252 var
= TREE_OPERAND (ptr
, 0);
255 && TREE_CODE (var
) != BIT_FIELD_REF
256 && TREE_CODE (var
) != COMPONENT_REF
257 && TREE_CODE (var
) != ARRAY_REF
258 && TREE_CODE (var
) != ARRAY_RANGE_REF
259 && TREE_CODE (var
) != REALPART_EXPR
260 && TREE_CODE (var
) != IMAGPART_EXPR
)
261 var
= TREE_OPERAND (var
, 0);
262 if (var
!= pt_var
&& TREE_CODE (var
) == ARRAY_REF
)
263 var
= TREE_OPERAND (var
, 0);
264 if (! TYPE_SIZE_UNIT (TREE_TYPE (var
))
265 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var
)))
267 && tree_int_cst_lt (pt_var_size
,
268 TYPE_SIZE_UNIT (TREE_TYPE (var
)))))
270 else if (var
!= pt_var
&& TREE_CODE (pt_var
) == MEM_REF
)
273 /* For &X->fld, compute object size only if fld isn't the last
274 field, as struct { int i; char c[1]; } is often used instead
275 of flexible array member. */
276 while (v
&& v
!= pt_var
)
277 switch (TREE_CODE (v
))
280 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v
, 0)))
281 && TREE_CODE (TREE_OPERAND (v
, 1)) == INTEGER_CST
)
284 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v
, 0)));
286 && TYPE_MAX_VALUE (domain
)
287 && TREE_CODE (TYPE_MAX_VALUE (domain
))
289 && tree_int_cst_lt (TREE_OPERAND (v
, 1),
290 TYPE_MAX_VALUE (domain
)))
296 v
= TREE_OPERAND (v
, 0);
303 if (TREE_CODE (TREE_TYPE (v
)) != ARRAY_TYPE
)
308 while (v
!= pt_var
&& TREE_CODE (v
) == COMPONENT_REF
)
309 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v
, 0)))
311 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v
, 0)))
315 v
= TREE_OPERAND (v
, 0);
316 if (TREE_CODE (v
) == COMPONENT_REF
317 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v
, 0)))
320 tree fld_chain
= DECL_CHAIN (TREE_OPERAND (v
, 1));
321 for (; fld_chain
; fld_chain
= DECL_CHAIN (fld_chain
))
322 if (TREE_CODE (fld_chain
) == FIELD_DECL
)
330 v
= TREE_OPERAND (v
, 0);
332 while (v
!= pt_var
&& TREE_CODE (v
) == COMPONENT_REF
)
333 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v
, 0)))
335 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v
, 0)))
339 v
= TREE_OPERAND (v
, 0);
357 var_size
= TYPE_SIZE_UNIT (TREE_TYPE (var
));
358 else if (!pt_var_size
)
361 var_size
= pt_var_size
;
362 bytes
= compute_object_offset (TREE_OPERAND (ptr
, 0), var
);
363 if (bytes
!= error_mark_node
)
365 if (TREE_CODE (bytes
) == INTEGER_CST
366 && tree_int_cst_lt (var_size
, bytes
))
367 bytes
= size_zero_node
;
369 bytes
= size_binop (MINUS_EXPR
, var_size
, bytes
);
373 && TREE_CODE (pt_var
) == MEM_REF
374 && bytes
!= error_mark_node
)
376 tree bytes2
= compute_object_offset (TREE_OPERAND (ptr
, 0), pt_var
);
377 if (bytes2
!= error_mark_node
)
379 if (TREE_CODE (bytes2
) == INTEGER_CST
380 && tree_int_cst_lt (pt_var_size
, bytes2
))
381 bytes2
= size_zero_node
;
383 bytes2
= size_binop (MINUS_EXPR
, pt_var_size
, bytes2
);
384 bytes
= size_binop (MIN_EXPR
, bytes
, bytes2
);
388 else if (!pt_var_size
)
393 if (tree_fits_uhwi_p (bytes
))
395 *psize
= tree_to_uhwi (bytes
);
403 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
404 Handles calls to functions declared with attribute alloc_size.
405 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
406 If unknown, return unknown[object_size_type]. */
408 static unsigned HOST_WIDE_INT
409 alloc_object_size (const gcall
*call
, int object_size_type
)
411 gcc_assert (is_gimple_call (call
));
414 if (tree callfn
= gimple_call_fndecl (call
))
415 calltype
= TREE_TYPE (callfn
);
417 calltype
= gimple_call_fntype (call
);
420 return unknown
[object_size_type
];
422 /* Set to positions of alloc_size arguments. */
423 int arg1
= -1, arg2
= -1;
424 tree alloc_size
= lookup_attribute ("alloc_size",
425 TYPE_ATTRIBUTES (calltype
));
426 if (alloc_size
&& TREE_VALUE (alloc_size
))
428 tree p
= TREE_VALUE (alloc_size
);
430 arg1
= TREE_INT_CST_LOW (TREE_VALUE (p
))-1;
432 arg2
= TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p
)))-1;
435 if (arg1
< 0 || arg1
>= (int)gimple_call_num_args (call
)
436 || TREE_CODE (gimple_call_arg (call
, arg1
)) != INTEGER_CST
438 && (arg2
>= (int)gimple_call_num_args (call
)
439 || TREE_CODE (gimple_call_arg (call
, arg2
)) != INTEGER_CST
)))
440 return unknown
[object_size_type
];
442 tree bytes
= NULL_TREE
;
444 bytes
= size_binop (MULT_EXPR
,
445 fold_convert (sizetype
, gimple_call_arg (call
, arg1
)),
446 fold_convert (sizetype
, gimple_call_arg (call
, arg2
)));
448 bytes
= fold_convert (sizetype
, gimple_call_arg (call
, arg1
));
450 if (bytes
&& tree_fits_uhwi_p (bytes
))
451 return tree_to_uhwi (bytes
);
453 return unknown
[object_size_type
];
457 /* If object size is propagated from one of function's arguments directly
458 to its return value, return that argument for GIMPLE_CALL statement CALL.
459 Otherwise return NULL. */
462 pass_through_call (const gcall
*call
)
464 unsigned rf
= gimple_call_return_flags (call
);
465 if (rf
& ERF_RETURNS_ARG
)
467 unsigned argnum
= rf
& ERF_RETURN_ARG_MASK
;
468 if (argnum
< gimple_call_num_args (call
))
469 return gimple_call_arg (call
, argnum
);
472 /* __builtin_assume_aligned is intentionally not marked RET1. */
473 if (gimple_call_builtin_p (call
, BUILT_IN_ASSUME_ALIGNED
))
474 return gimple_call_arg (call
, 0);
480 /* Compute __builtin_object_size value for PTR and set *PSIZE to
481 the resulting value. OBJECT_SIZE_TYPE is the second argument
482 to __builtin_object_size. Return true on success and false
483 when the object size could not be determined. */
486 compute_builtin_object_size (tree ptr
, int object_size_type
,
487 unsigned HOST_WIDE_INT
*psize
)
489 gcc_assert (object_size_type
>= 0 && object_size_type
<= 3);
491 /* Set to unknown and overwrite just before returning if the size
492 could be determined. */
493 *psize
= unknown
[object_size_type
];
496 init_offset_limit ();
498 if (TREE_CODE (ptr
) == ADDR_EXPR
)
499 return addr_object_size (NULL
, ptr
, object_size_type
, psize
);
501 if (TREE_CODE (ptr
) != SSA_NAME
502 || !POINTER_TYPE_P (TREE_TYPE (ptr
)))
505 if (computed
[object_size_type
] == NULL
)
507 if (optimize
|| object_size_type
& 1)
510 /* When not optimizing, rather than failing, make a small effort
511 to determine the object size without the full benefit of
512 the (costly) computation below. */
513 gimple
*def
= SSA_NAME_DEF_STMT (ptr
);
514 if (gimple_code (def
) == GIMPLE_ASSIGN
)
516 tree_code code
= gimple_assign_rhs_code (def
);
517 if (code
== POINTER_PLUS_EXPR
)
519 tree offset
= gimple_assign_rhs2 (def
);
520 ptr
= gimple_assign_rhs1 (def
);
522 if (tree_fits_shwi_p (offset
)
523 && compute_builtin_object_size (ptr
, object_size_type
, psize
))
525 /* Return zero when the offset is out of bounds. */
526 unsigned HOST_WIDE_INT off
= tree_to_shwi (offset
);
527 *psize
= off
< *psize
? *psize
- off
: 0;
535 if (!bitmap_bit_p (computed
[object_size_type
], SSA_NAME_VERSION (ptr
)))
537 struct object_size_info osi
;
541 if (num_ssa_names
> object_sizes
[object_size_type
].length ())
542 object_sizes
[object_size_type
].safe_grow (num_ssa_names
);
545 fprintf (dump_file
, "Computing %s %sobject size for ",
546 (object_size_type
& 2) ? "minimum" : "maximum",
547 (object_size_type
& 1) ? "sub" : "");
548 print_generic_expr (dump_file
, ptr
, dump_flags
);
549 fprintf (dump_file
, ":\n");
552 osi
.visited
= BITMAP_ALLOC (NULL
);
553 osi
.reexamine
= BITMAP_ALLOC (NULL
);
554 osi
.object_size_type
= object_size_type
;
559 /* First pass: walk UD chains, compute object sizes that
560 can be computed. osi.reexamine bitmap at the end will
561 contain what variables were found in dependency cycles
562 and therefore need to be reexamined. */
565 collect_object_sizes_for (&osi
, ptr
);
567 /* Second pass: keep recomputing object sizes of variables
568 that need reexamination, until no object sizes are
569 increased or all object sizes are computed. */
570 if (! bitmap_empty_p (osi
.reexamine
))
572 bitmap reexamine
= BITMAP_ALLOC (NULL
);
574 /* If looking for minimum instead of maximum object size,
575 detect cases where a pointer is increased in a loop.
576 Although even without this detection pass 2 would eventually
577 terminate, it could take a long time. If a pointer is
578 increasing this way, we need to assume 0 object size.
579 E.g. p = &buf[0]; while (cond) p = p + 4; */
580 if (object_size_type
& 2)
582 osi
.depths
= XCNEWVEC (unsigned int, num_ssa_names
);
583 osi
.stack
= XNEWVEC (unsigned int, num_ssa_names
);
586 /* collect_object_sizes_for is changing
587 osi.reexamine bitmap, so iterate over a copy. */
588 bitmap_copy (reexamine
, osi
.reexamine
);
589 EXECUTE_IF_SET_IN_BITMAP (reexamine
, 0, i
, bi
)
590 if (bitmap_bit_p (osi
.reexamine
, i
))
591 check_for_plus_in_loops (&osi
, ssa_name (i
));
604 /* collect_object_sizes_for is changing
605 osi.reexamine bitmap, so iterate over a copy. */
606 bitmap_copy (reexamine
, osi
.reexamine
);
607 EXECUTE_IF_SET_IN_BITMAP (reexamine
, 0, i
, bi
)
608 if (bitmap_bit_p (osi
.reexamine
, i
))
610 collect_object_sizes_for (&osi
, ssa_name (i
));
611 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
613 fprintf (dump_file
, "Reexamining ");
614 print_generic_expr (dump_file
, ssa_name (i
),
616 fprintf (dump_file
, "\n");
622 BITMAP_FREE (reexamine
);
624 EXECUTE_IF_SET_IN_BITMAP (osi
.reexamine
, 0, i
, bi
)
625 bitmap_set_bit (computed
[object_size_type
], i
);
627 /* Debugging dumps. */
630 EXECUTE_IF_SET_IN_BITMAP (osi
.visited
, 0, i
, bi
)
631 if (object_sizes
[object_size_type
][i
]
632 != unknown
[object_size_type
])
634 print_generic_expr (dump_file
, ssa_name (i
),
637 ": %s %sobject size "
638 HOST_WIDE_INT_PRINT_UNSIGNED
"\n",
639 (object_size_type
& 2) ? "minimum" : "maximum",
640 (object_size_type
& 1) ? "sub" : "",
641 object_sizes
[object_size_type
][i
]);
645 BITMAP_FREE (osi
.reexamine
);
646 BITMAP_FREE (osi
.visited
);
649 *psize
= object_sizes
[object_size_type
][SSA_NAME_VERSION (ptr
)];
650 return *psize
!= unknown
[object_size_type
];
653 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
656 expr_object_size (struct object_size_info
*osi
, tree ptr
, tree value
)
658 int object_size_type
= osi
->object_size_type
;
659 unsigned int varno
= SSA_NAME_VERSION (ptr
);
660 unsigned HOST_WIDE_INT bytes
;
662 gcc_assert (object_sizes
[object_size_type
][varno
]
663 != unknown
[object_size_type
]);
664 gcc_assert (osi
->pass
== 0);
666 if (TREE_CODE (value
) == WITH_SIZE_EXPR
)
667 value
= TREE_OPERAND (value
, 0);
669 /* Pointer variables should have been handled by merge_object_sizes. */
670 gcc_assert (TREE_CODE (value
) != SSA_NAME
671 || !POINTER_TYPE_P (TREE_TYPE (value
)));
673 if (TREE_CODE (value
) == ADDR_EXPR
)
674 addr_object_size (osi
, value
, object_size_type
, &bytes
);
676 bytes
= unknown
[object_size_type
];
678 if ((object_size_type
& 2) == 0)
680 if (object_sizes
[object_size_type
][varno
] < bytes
)
681 object_sizes
[object_size_type
][varno
] = bytes
;
685 if (object_sizes
[object_size_type
][varno
] > bytes
)
686 object_sizes
[object_size_type
][varno
] = bytes
;
691 /* Compute object_sizes for PTR, defined to the result of a call. */
694 call_object_size (struct object_size_info
*osi
, tree ptr
, gcall
*call
)
696 int object_size_type
= osi
->object_size_type
;
697 unsigned int varno
= SSA_NAME_VERSION (ptr
);
698 unsigned HOST_WIDE_INT bytes
;
700 gcc_assert (is_gimple_call (call
));
702 gcc_assert (object_sizes
[object_size_type
][varno
]
703 != unknown
[object_size_type
]);
704 gcc_assert (osi
->pass
== 0);
706 bytes
= alloc_object_size (call
, object_size_type
);
708 if ((object_size_type
& 2) == 0)
710 if (object_sizes
[object_size_type
][varno
] < bytes
)
711 object_sizes
[object_size_type
][varno
] = bytes
;
715 if (object_sizes
[object_size_type
][varno
] > bytes
)
716 object_sizes
[object_size_type
][varno
] = bytes
;
721 /* Compute object_sizes for PTR, defined to an unknown value. */
724 unknown_object_size (struct object_size_info
*osi
, tree ptr
)
726 int object_size_type
= osi
->object_size_type
;
727 unsigned int varno
= SSA_NAME_VERSION (ptr
);
728 unsigned HOST_WIDE_INT bytes
;
730 gcc_assert (object_sizes
[object_size_type
][varno
]
731 != unknown
[object_size_type
]);
732 gcc_assert (osi
->pass
== 0);
734 bytes
= unknown
[object_size_type
];
736 if ((object_size_type
& 2) == 0)
738 if (object_sizes
[object_size_type
][varno
] < bytes
)
739 object_sizes
[object_size_type
][varno
] = bytes
;
743 if (object_sizes
[object_size_type
][varno
] > bytes
)
744 object_sizes
[object_size_type
][varno
] = bytes
;
749 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
750 the object size might need reexamination later. */
753 merge_object_sizes (struct object_size_info
*osi
, tree dest
, tree orig
,
754 unsigned HOST_WIDE_INT offset
)
756 int object_size_type
= osi
->object_size_type
;
757 unsigned int varno
= SSA_NAME_VERSION (dest
);
758 unsigned HOST_WIDE_INT orig_bytes
;
760 if (object_sizes
[object_size_type
][varno
] == unknown
[object_size_type
])
762 if (offset
>= offset_limit
)
764 object_sizes
[object_size_type
][varno
] = unknown
[object_size_type
];
769 collect_object_sizes_for (osi
, orig
);
771 orig_bytes
= object_sizes
[object_size_type
][SSA_NAME_VERSION (orig
)];
772 if (orig_bytes
!= unknown
[object_size_type
])
773 orig_bytes
= (offset
> orig_bytes
)
774 ? HOST_WIDE_INT_0U
: orig_bytes
- offset
;
776 if ((object_size_type
& 2) == 0)
778 if (object_sizes
[object_size_type
][varno
] < orig_bytes
)
780 object_sizes
[object_size_type
][varno
] = orig_bytes
;
786 if (object_sizes
[object_size_type
][varno
] > orig_bytes
)
788 object_sizes
[object_size_type
][varno
] = orig_bytes
;
792 return bitmap_bit_p (osi
->reexamine
, SSA_NAME_VERSION (orig
));
796 /* Compute object_sizes for VAR, defined to the result of an assignment
797 with operator POINTER_PLUS_EXPR. Return true if the object size might
798 need reexamination later. */
801 plus_stmt_object_size (struct object_size_info
*osi
, tree var
, gimple
*stmt
)
803 int object_size_type
= osi
->object_size_type
;
804 unsigned int varno
= SSA_NAME_VERSION (var
);
805 unsigned HOST_WIDE_INT bytes
;
808 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
810 op0
= gimple_assign_rhs1 (stmt
);
811 op1
= gimple_assign_rhs2 (stmt
);
813 else if (gimple_assign_rhs_code (stmt
) == ADDR_EXPR
)
815 tree rhs
= TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
816 gcc_assert (TREE_CODE (rhs
) == MEM_REF
);
817 op0
= TREE_OPERAND (rhs
, 0);
818 op1
= TREE_OPERAND (rhs
, 1);
823 if (object_sizes
[object_size_type
][varno
] == unknown
[object_size_type
])
826 /* Handle PTR + OFFSET here. */
827 if (TREE_CODE (op1
) == INTEGER_CST
828 && (TREE_CODE (op0
) == SSA_NAME
829 || TREE_CODE (op0
) == ADDR_EXPR
))
831 if (! tree_fits_uhwi_p (op1
))
832 bytes
= unknown
[object_size_type
];
833 else if (TREE_CODE (op0
) == SSA_NAME
)
834 return merge_object_sizes (osi
, var
, op0
, tree_to_uhwi (op1
));
837 unsigned HOST_WIDE_INT off
= tree_to_uhwi (op1
);
839 /* op0 will be ADDR_EXPR here. */
840 addr_object_size (osi
, op0
, object_size_type
, &bytes
);
841 if (bytes
== unknown
[object_size_type
])
843 else if (off
> offset_limit
)
844 bytes
= unknown
[object_size_type
];
845 else if (off
> bytes
)
852 bytes
= unknown
[object_size_type
];
854 if ((object_size_type
& 2) == 0)
856 if (object_sizes
[object_size_type
][varno
] < bytes
)
857 object_sizes
[object_size_type
][varno
] = bytes
;
861 if (object_sizes
[object_size_type
][varno
] > bytes
)
862 object_sizes
[object_size_type
][varno
] = bytes
;
868 /* Compute object_sizes for VAR, defined at STMT, which is
869 a COND_EXPR. Return true if the object size might need reexamination
873 cond_expr_object_size (struct object_size_info
*osi
, tree var
, gimple
*stmt
)
876 int object_size_type
= osi
->object_size_type
;
877 unsigned int varno
= SSA_NAME_VERSION (var
);
878 bool reexamine
= false;
880 gcc_assert (gimple_assign_rhs_code (stmt
) == COND_EXPR
);
882 if (object_sizes
[object_size_type
][varno
] == unknown
[object_size_type
])
885 then_
= gimple_assign_rhs2 (stmt
);
886 else_
= gimple_assign_rhs3 (stmt
);
888 if (TREE_CODE (then_
) == SSA_NAME
)
889 reexamine
|= merge_object_sizes (osi
, var
, then_
, 0);
891 expr_object_size (osi
, var
, then_
);
893 if (TREE_CODE (else_
) == SSA_NAME
)
894 reexamine
|= merge_object_sizes (osi
, var
, else_
, 0);
896 expr_object_size (osi
, var
, else_
);
901 /* Compute object sizes for VAR.
902 For ADDR_EXPR an object size is the number of remaining bytes
903 to the end of the object (where what is considered an object depends on
904 OSI->object_size_type).
905 For allocation GIMPLE_CALL like malloc or calloc object size is the size
907 For POINTER_PLUS_EXPR where second operand is a constant integer,
908 object size is object size of the first operand minus the constant.
909 If the constant is bigger than the number of remaining bytes until the
910 end of the object, object size is 0, but if it is instead a pointer
911 subtraction, object size is unknown[object_size_type].
912 To differentiate addition from subtraction, ADDR_EXPR returns
913 unknown[object_size_type] for all objects bigger than half of the address
914 space, and constants less than half of the address space are considered
915 addition, while bigger constants subtraction.
916 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
917 object size is object size of that argument.
918 Otherwise, object size is the maximum of object sizes of variables
919 that it might be set to. */
922 collect_object_sizes_for (struct object_size_info
*osi
, tree var
)
924 int object_size_type
= osi
->object_size_type
;
925 unsigned int varno
= SSA_NAME_VERSION (var
);
929 if (bitmap_bit_p (computed
[object_size_type
], varno
))
934 if (bitmap_set_bit (osi
->visited
, varno
))
936 object_sizes
[object_size_type
][varno
]
937 = (object_size_type
& 2) ? -1 : 0;
941 /* Found a dependency loop. Mark the variable for later
943 bitmap_set_bit (osi
->reexamine
, varno
);
944 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
946 fprintf (dump_file
, "Found a dependency loop at ");
947 print_generic_expr (dump_file
, var
, dump_flags
);
948 fprintf (dump_file
, "\n");
954 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
956 fprintf (dump_file
, "Visiting use-def links for ");
957 print_generic_expr (dump_file
, var
, dump_flags
);
958 fprintf (dump_file
, "\n");
961 stmt
= SSA_NAME_DEF_STMT (var
);
964 switch (gimple_code (stmt
))
968 tree rhs
= gimple_assign_rhs1 (stmt
);
969 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
970 || (gimple_assign_rhs_code (stmt
) == ADDR_EXPR
971 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == MEM_REF
))
972 reexamine
= plus_stmt_object_size (osi
, var
, stmt
);
973 else if (gimple_assign_rhs_code (stmt
) == COND_EXPR
)
974 reexamine
= cond_expr_object_size (osi
, var
, stmt
);
975 else if (gimple_assign_single_p (stmt
)
976 || gimple_assign_unary_nop_p (stmt
))
978 if (TREE_CODE (rhs
) == SSA_NAME
979 && POINTER_TYPE_P (TREE_TYPE (rhs
)))
980 reexamine
= merge_object_sizes (osi
, var
, rhs
, 0);
982 expr_object_size (osi
, var
, rhs
);
985 unknown_object_size (osi
, var
);
991 gcall
*call_stmt
= as_a
<gcall
*> (stmt
);
992 tree arg
= pass_through_call (call_stmt
);
995 if (TREE_CODE (arg
) == SSA_NAME
996 && POINTER_TYPE_P (TREE_TYPE (arg
)))
997 reexamine
= merge_object_sizes (osi
, var
, arg
, 0);
999 expr_object_size (osi
, var
, arg
);
1002 call_object_size (osi
, var
, call_stmt
);
1007 /* Pointers defined by __asm__ statements can point anywhere. */
1008 object_sizes
[object_size_type
][varno
] = unknown
[object_size_type
];
1012 if (SSA_NAME_VAR (var
)
1013 && TREE_CODE (SSA_NAME_VAR (var
)) == PARM_DECL
)
1014 expr_object_size (osi
, var
, SSA_NAME_VAR (var
));
1016 /* Uninitialized SSA names point nowhere. */
1017 object_sizes
[object_size_type
][varno
] = unknown
[object_size_type
];
1024 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
1026 tree rhs
= gimple_phi_arg (stmt
, i
)->def
;
1028 if (object_sizes
[object_size_type
][varno
]
1029 == unknown
[object_size_type
])
1032 if (TREE_CODE (rhs
) == SSA_NAME
)
1033 reexamine
|= merge_object_sizes (osi
, var
, rhs
, 0);
1034 else if (osi
->pass
== 0)
1035 expr_object_size (osi
, var
, rhs
);
1045 || object_sizes
[object_size_type
][varno
] == unknown
[object_size_type
])
1047 bitmap_set_bit (computed
[object_size_type
], varno
);
1048 bitmap_clear_bit (osi
->reexamine
, varno
);
1052 bitmap_set_bit (osi
->reexamine
, varno
);
1053 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1055 fprintf (dump_file
, "Need to reexamine ");
1056 print_generic_expr (dump_file
, var
, dump_flags
);
1057 fprintf (dump_file
, "\n");
1063 /* Helper function for check_for_plus_in_loops. Called recursively
1067 check_for_plus_in_loops_1 (struct object_size_info
*osi
, tree var
,
1070 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
1071 unsigned int varno
= SSA_NAME_VERSION (var
);
1073 if (osi
->depths
[varno
])
1075 if (osi
->depths
[varno
] != depth
)
1079 /* Found a loop involving pointer addition. */
1080 for (sp
= osi
->tos
; sp
> osi
->stack
; )
1083 bitmap_clear_bit (osi
->reexamine
, *sp
);
1084 bitmap_set_bit (computed
[osi
->object_size_type
], *sp
);
1085 object_sizes
[osi
->object_size_type
][*sp
] = 0;
1092 else if (! bitmap_bit_p (osi
->reexamine
, varno
))
1095 osi
->depths
[varno
] = depth
;
1096 *osi
->tos
++ = varno
;
1098 switch (gimple_code (stmt
))
1103 if ((gimple_assign_single_p (stmt
)
1104 || gimple_assign_unary_nop_p (stmt
))
1105 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
)
1107 tree rhs
= gimple_assign_rhs1 (stmt
);
1109 check_for_plus_in_loops_1 (osi
, rhs
, depth
);
1111 else if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
1113 tree basevar
= gimple_assign_rhs1 (stmt
);
1114 tree cst
= gimple_assign_rhs2 (stmt
);
1116 gcc_assert (TREE_CODE (cst
) == INTEGER_CST
);
1118 check_for_plus_in_loops_1 (osi
, basevar
,
1119 depth
+ !integer_zerop (cst
));
1128 gcall
*call_stmt
= as_a
<gcall
*> (stmt
);
1129 tree arg
= pass_through_call (call_stmt
);
1132 if (TREE_CODE (arg
) == SSA_NAME
)
1133 check_for_plus_in_loops_1 (osi
, arg
, depth
);
1144 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
1146 tree rhs
= gimple_phi_arg (stmt
, i
)->def
;
1148 if (TREE_CODE (rhs
) == SSA_NAME
)
1149 check_for_plus_in_loops_1 (osi
, rhs
, depth
);
1158 osi
->depths
[varno
] = 0;
1163 /* Check if some pointer we are computing object size of is being increased
1164 within a loop. If yes, assume all the SSA variables participating in
1165 that loop have minimum object sizes 0. */
1168 check_for_plus_in_loops (struct object_size_info
*osi
, tree var
)
1170 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
1172 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1173 and looked for a POINTER_PLUS_EXPR in the pass-through
1174 argument, if any. In GIMPLE, however, such an expression
1175 is not a valid call operand. */
1177 if (is_gimple_assign (stmt
)
1178 && gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
1180 tree basevar
= gimple_assign_rhs1 (stmt
);
1181 tree cst
= gimple_assign_rhs2 (stmt
);
1183 gcc_assert (TREE_CODE (cst
) == INTEGER_CST
);
1185 if (integer_zerop (cst
))
1188 osi
->depths
[SSA_NAME_VERSION (basevar
)] = 1;
1189 *osi
->tos
++ = SSA_NAME_VERSION (basevar
);
1190 check_for_plus_in_loops_1 (osi
, var
, 2);
1191 osi
->depths
[SSA_NAME_VERSION (basevar
)] = 0;
1197 /* Initialize data structures for the object size computation. */
1200 init_object_sizes (void)
1202 int object_size_type
;
1207 for (object_size_type
= 0; object_size_type
<= 3; object_size_type
++)
1209 object_sizes
[object_size_type
].safe_grow (num_ssa_names
);
1210 computed
[object_size_type
] = BITMAP_ALLOC (NULL
);
1213 init_offset_limit ();
1217 /* Destroy data structures after the object size computation. */
1220 fini_object_sizes (void)
1222 int object_size_type
;
1224 for (object_size_type
= 0; object_size_type
<= 3; object_size_type
++)
1226 object_sizes
[object_size_type
].release ();
1227 BITMAP_FREE (computed
[object_size_type
]);
1232 /* Simple pass to optimize all __builtin_object_size () builtins. */
1236 const pass_data pass_data_object_sizes
=
1238 GIMPLE_PASS
, /* type */
1240 OPTGROUP_NONE
, /* optinfo_flags */
1241 TV_NONE
, /* tv_id */
1242 ( PROP_cfg
| PROP_ssa
), /* properties_required */
1243 0, /* properties_provided */
1244 0, /* properties_destroyed */
1245 0, /* todo_flags_start */
1246 0, /* todo_flags_finish */
1249 class pass_object_sizes
: public gimple_opt_pass
1252 pass_object_sizes (gcc::context
*ctxt
)
1253 : gimple_opt_pass (pass_data_object_sizes
, ctxt
), insert_min_max_p (false)
1256 /* opt_pass methods: */
1257 opt_pass
* clone () { return new pass_object_sizes (m_ctxt
); }
1258 void set_pass_param (unsigned int n
, bool param
)
1260 gcc_assert (n
== 0);
1261 insert_min_max_p
= param
;
1263 virtual unsigned int execute (function
*);
1266 /* Determines whether the pass instance creates MIN/MAX_EXPRs. */
1267 bool insert_min_max_p
;
1268 }; // class pass_object_sizes
1270 /* Dummy valueize function. */
1273 do_valueize (tree t
)
1279 pass_object_sizes::execute (function
*fun
)
1282 FOR_EACH_BB_FN (bb
, fun
)
1284 gimple_stmt_iterator i
;
1285 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
1288 gimple
*call
= gsi_stmt (i
);
1289 if (!gimple_call_builtin_p (call
, BUILT_IN_OBJECT_SIZE
))
1292 init_object_sizes ();
1294 /* If insert_min_max_p, only attempt to fold
1295 __builtin_object_size (x, 1) and __builtin_object_size (x, 3),
1296 and rather than folding the builtin to the constant if any,
1297 create a MIN_EXPR or MAX_EXPR of the __builtin_object_size
1298 call result and the computed constant. */
1299 if (insert_min_max_p
)
1301 tree ost
= gimple_call_arg (call
, 1);
1302 if (tree_fits_uhwi_p (ost
))
1304 unsigned HOST_WIDE_INT object_size_type
= tree_to_uhwi (ost
);
1305 tree ptr
= gimple_call_arg (call
, 0);
1306 tree lhs
= gimple_call_lhs (call
);
1307 if ((object_size_type
== 1 || object_size_type
== 3)
1308 && (TREE_CODE (ptr
) == ADDR_EXPR
1309 || TREE_CODE (ptr
) == SSA_NAME
)
1312 tree type
= TREE_TYPE (lhs
);
1313 unsigned HOST_WIDE_INT bytes
;
1314 if (compute_builtin_object_size (ptr
, object_size_type
,
1316 && wi::fits_to_tree_p (bytes
, type
))
1318 tree tem
= make_ssa_name (type
);
1319 gimple_call_set_lhs (call
, tem
);
1321 = object_size_type
== 1 ? MIN_EXPR
: MAX_EXPR
;
1322 tree cst
= build_int_cstu (type
, bytes
);
1324 = gimple_build_assign (lhs
, code
, tem
, cst
);
1325 gsi_insert_after (&i
, g
, GSI_NEW_STMT
);
1333 tree lhs
= gimple_call_lhs (call
);
1337 result
= gimple_fold_stmt_to_constant (call
, do_valueize
);
1340 tree ost
= gimple_call_arg (call
, 1);
1342 if (tree_fits_uhwi_p (ost
))
1344 unsigned HOST_WIDE_INT object_size_type
= tree_to_uhwi (ost
);
1346 if (object_size_type
< 2)
1347 result
= fold_convert (size_type_node
,
1348 integer_minus_one_node
);
1349 else if (object_size_type
< 4)
1350 result
= build_zero_cst (size_type_node
);
1357 gcc_assert (TREE_CODE (result
) == INTEGER_CST
);
1359 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1361 fprintf (dump_file
, "Simplified\n ");
1362 print_gimple_stmt (dump_file
, call
, 0, dump_flags
);
1363 fprintf (dump_file
, " to ");
1364 print_generic_expr (dump_file
, result
);
1365 fprintf (dump_file
, "\n");
1368 /* Propagate into all uses and fold those stmts. */
1369 replace_uses_by (lhs
, result
);
1373 fini_object_sizes ();
1380 make_pass_object_sizes (gcc::context
*ctxt
)
1382 return new pass_object_sizes (ctxt
);