1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004-2021 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "tree-pass.h"
29 #include "gimple-pretty-print.h"
30 #include "fold-const.h"
31 #include "tree-object-size.h"
32 #include "gimple-fold.h"
33 #include "gimple-iterator.h"
35 #include "stringpool.h"
38 struct object_size_info
43 bitmap visited
, reexamine
;
45 unsigned int *stack
, *tos
;
55 static tree
compute_object_offset (const_tree
, const_tree
);
56 static bool addr_object_size (struct object_size_info
*,
57 const_tree
, int, unsigned HOST_WIDE_INT
*);
58 static unsigned HOST_WIDE_INT
alloc_object_size (const gcall
*, int);
59 static tree
pass_through_call (const gcall
*);
60 static void collect_object_sizes_for (struct object_size_info
*, tree
);
61 static void expr_object_size (struct object_size_info
*, tree
, tree
);
62 static bool merge_object_sizes (struct object_size_info
*, tree
, tree
,
63 unsigned HOST_WIDE_INT
);
64 static bool plus_stmt_object_size (struct object_size_info
*, tree
, gimple
*);
65 static bool cond_expr_object_size (struct object_size_info
*, tree
, gimple
*);
66 static void init_offset_limit (void);
67 static void check_for_plus_in_loops (struct object_size_info
*, tree
);
68 static void check_for_plus_in_loops_1 (struct object_size_info
*, tree
,
71 /* object_sizes[0] is upper bound for number of bytes till the end of
73 object_sizes[1] is upper bound for number of bytes till the end of
74 the subobject (innermost array or field with address taken).
75 object_sizes[2] is lower bound for number of bytes till the end of
76 the object and object_sizes[3] lower bound for subobject. */
77 static vec
<unsigned HOST_WIDE_INT
> object_sizes
[OST_END
];
79 /* Bitmaps what object sizes have been computed already. */
80 static bitmap computed
[OST_END
];
82 /* Maximum value of offset we consider to be addition. */
83 static unsigned HOST_WIDE_INT offset_limit
;
85 static inline unsigned HOST_WIDE_INT
86 unknown (int object_size_type
)
88 return ((unsigned HOST_WIDE_INT
) -((object_size_type
>> 1) ^ 1));
91 /* Grow object_sizes[OBJECT_SIZE_TYPE] to num_ssa_names. */
94 object_sizes_grow (int object_size_type
)
96 if (num_ssa_names
> object_sizes
[object_size_type
].length ())
97 object_sizes
[object_size_type
].safe_grow (num_ssa_names
, true);
100 /* Release object_sizes[OBJECT_SIZE_TYPE]. */
103 object_sizes_release (int object_size_type
)
105 object_sizes
[object_size_type
].release ();
108 /* Return true if object_sizes[OBJECT_SIZE_TYPE][VARNO] is unknown. */
111 object_sizes_unknown_p (int object_size_type
, unsigned varno
)
113 return (object_sizes
[object_size_type
][varno
]
114 == unknown (object_size_type
));
117 /* Return size for VARNO corresponding to OSI. */
119 static inline unsigned HOST_WIDE_INT
120 object_sizes_get (struct object_size_info
*osi
, unsigned varno
)
122 return object_sizes
[osi
->object_size_type
][varno
];
125 /* Set size for VARNO corresponding to OSI to VAL. */
128 object_sizes_set_force (struct object_size_info
*osi
, unsigned varno
,
129 unsigned HOST_WIDE_INT val
)
131 object_sizes
[osi
->object_size_type
][varno
] = val
;
135 /* Set size for VARNO corresponding to OSI to VAL if it is the new minimum or
139 object_sizes_set (struct object_size_info
*osi
, unsigned varno
,
140 unsigned HOST_WIDE_INT val
)
142 int object_size_type
= osi
->object_size_type
;
143 if ((object_size_type
& OST_MINIMUM
) == 0)
145 if (object_sizes
[object_size_type
][varno
] < val
)
146 return object_sizes_set_force (osi
, varno
, val
);
150 if (object_sizes
[object_size_type
][varno
] > val
)
151 return object_sizes_set_force (osi
, varno
, val
);
156 /* Initialize OFFSET_LIMIT variable. */
158 init_offset_limit (void)
160 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype
)))
161 offset_limit
= tree_to_uhwi (TYPE_MAX_VALUE (sizetype
));
168 /* Compute offset of EXPR within VAR. Return error_mark_node
172 compute_object_offset (const_tree expr
, const_tree var
)
174 enum tree_code code
= PLUS_EXPR
;
178 return size_zero_node
;
180 switch (TREE_CODE (expr
))
183 base
= compute_object_offset (TREE_OPERAND (expr
, 0), var
);
184 if (base
== error_mark_node
)
187 t
= TREE_OPERAND (expr
, 1);
188 off
= size_binop (PLUS_EXPR
, DECL_FIELD_OFFSET (t
),
189 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t
))
195 case VIEW_CONVERT_EXPR
:
196 case NON_LVALUE_EXPR
:
197 return compute_object_offset (TREE_OPERAND (expr
, 0), var
);
200 base
= compute_object_offset (TREE_OPERAND (expr
, 0), var
);
201 if (base
== error_mark_node
)
204 off
= TYPE_SIZE_UNIT (TREE_TYPE (expr
));
208 base
= compute_object_offset (TREE_OPERAND (expr
, 0), var
);
209 if (base
== error_mark_node
)
212 t
= TREE_OPERAND (expr
, 1);
213 tree low_bound
, unit_size
;
214 low_bound
= array_ref_low_bound (CONST_CAST_TREE (expr
));
215 unit_size
= array_ref_element_size (CONST_CAST_TREE (expr
));
216 if (! integer_zerop (low_bound
))
217 t
= fold_build2 (MINUS_EXPR
, TREE_TYPE (t
), t
, low_bound
);
218 if (TREE_CODE (t
) == INTEGER_CST
&& tree_int_cst_sgn (t
) < 0)
221 t
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (t
), t
);
223 t
= fold_convert (sizetype
, t
);
224 off
= size_binop (MULT_EXPR
, unit_size
, t
);
228 gcc_assert (TREE_CODE (TREE_OPERAND (expr
, 0)) == ADDR_EXPR
);
229 return wide_int_to_tree (sizetype
, mem_ref_offset (expr
));
232 return error_mark_node
;
235 return size_binop (code
, base
, off
);
238 /* Returns the size of the object designated by DECL considering its
239 initializer if it either has one or if it would not affect its size,
240 otherwise the size of the object without the initializer when MIN
241 is true, else null. An object's initializer affects the object's
242 size if it's a struct type with a flexible array member. */
245 decl_init_size (tree decl
, bool min
)
247 tree size
= DECL_SIZE_UNIT (decl
);
248 tree type
= TREE_TYPE (decl
);
249 if (TREE_CODE (type
) != RECORD_TYPE
)
252 tree last
= last_field (type
);
256 tree last_type
= TREE_TYPE (last
);
257 if (TREE_CODE (last_type
) != ARRAY_TYPE
258 || TYPE_SIZE (last_type
))
261 /* Use TYPE_SIZE_UNIT; DECL_SIZE_UNIT sometimes reflects the size
262 of the initializer and sometimes doesn't. */
263 size
= TYPE_SIZE_UNIT (type
);
264 tree ref
= build3 (COMPONENT_REF
, type
, decl
, last
, NULL_TREE
);
265 tree compsize
= component_ref_size (ref
);
267 return min
? size
: NULL_TREE
;
269 /* The size includes tail padding and initializer elements. */
270 tree pos
= byte_position (last
);
271 size
= fold_build2 (PLUS_EXPR
, TREE_TYPE (size
), pos
, compsize
);
275 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
276 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
277 If unknown, return unknown (object_size_type). */
280 addr_object_size (struct object_size_info
*osi
, const_tree ptr
,
281 int object_size_type
, unsigned HOST_WIDE_INT
*psize
)
283 tree pt_var
, pt_var_size
= NULL_TREE
, var_size
, bytes
;
285 gcc_assert (TREE_CODE (ptr
) == ADDR_EXPR
);
287 /* Set to unknown and overwrite just before returning if the size
288 could be determined. */
289 *psize
= unknown (object_size_type
);
291 pt_var
= TREE_OPERAND (ptr
, 0);
292 while (handled_component_p (pt_var
))
293 pt_var
= TREE_OPERAND (pt_var
, 0);
298 if (TREE_CODE (pt_var
) == MEM_REF
)
300 unsigned HOST_WIDE_INT sz
;
302 if (!osi
|| (object_size_type
& OST_SUBOBJECT
) != 0
303 || TREE_CODE (TREE_OPERAND (pt_var
, 0)) != SSA_NAME
)
305 compute_builtin_object_size (TREE_OPERAND (pt_var
, 0),
306 object_size_type
& ~OST_SUBOBJECT
, &sz
);
310 tree var
= TREE_OPERAND (pt_var
, 0);
312 collect_object_sizes_for (osi
, var
);
313 if (bitmap_bit_p (computed
[object_size_type
],
314 SSA_NAME_VERSION (var
)))
315 sz
= object_sizes_get (osi
, SSA_NAME_VERSION (var
));
317 sz
= unknown (object_size_type
);
319 if (sz
!= unknown (object_size_type
))
321 offset_int mem_offset
;
322 if (mem_ref_offset (pt_var
).is_constant (&mem_offset
))
324 offset_int dsz
= wi::sub (sz
, mem_offset
);
327 else if (wi::fits_uhwi_p (dsz
))
330 sz
= unknown (object_size_type
);
333 sz
= unknown (object_size_type
);
336 if (sz
!= unknown (object_size_type
) && sz
< offset_limit
)
337 pt_var_size
= size_int (sz
);
339 else if (DECL_P (pt_var
))
341 pt_var_size
= decl_init_size (pt_var
, object_size_type
& OST_MINIMUM
);
345 else if (TREE_CODE (pt_var
) == STRING_CST
)
346 pt_var_size
= TYPE_SIZE_UNIT (TREE_TYPE (pt_var
));
352 /* Validate the size determined above. */
353 if (!tree_fits_uhwi_p (pt_var_size
)
354 || tree_to_uhwi (pt_var_size
) >= offset_limit
)
358 if (pt_var
!= TREE_OPERAND (ptr
, 0))
362 if (object_size_type
& OST_SUBOBJECT
)
364 var
= TREE_OPERAND (ptr
, 0);
367 && TREE_CODE (var
) != BIT_FIELD_REF
368 && TREE_CODE (var
) != COMPONENT_REF
369 && TREE_CODE (var
) != ARRAY_REF
370 && TREE_CODE (var
) != ARRAY_RANGE_REF
371 && TREE_CODE (var
) != REALPART_EXPR
372 && TREE_CODE (var
) != IMAGPART_EXPR
)
373 var
= TREE_OPERAND (var
, 0);
374 if (var
!= pt_var
&& TREE_CODE (var
) == ARRAY_REF
)
375 var
= TREE_OPERAND (var
, 0);
376 if (! TYPE_SIZE_UNIT (TREE_TYPE (var
))
377 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var
)))
379 && tree_int_cst_lt (pt_var_size
,
380 TYPE_SIZE_UNIT (TREE_TYPE (var
)))))
382 else if (var
!= pt_var
&& TREE_CODE (pt_var
) == MEM_REF
)
385 /* For &X->fld, compute object size only if fld isn't the last
386 field, as struct { int i; char c[1]; } is often used instead
387 of flexible array member. */
388 while (v
&& v
!= pt_var
)
389 switch (TREE_CODE (v
))
392 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v
, 0)))
393 && TREE_CODE (TREE_OPERAND (v
, 1)) == INTEGER_CST
)
396 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v
, 0)));
398 && TYPE_MAX_VALUE (domain
)
399 && TREE_CODE (TYPE_MAX_VALUE (domain
))
401 && tree_int_cst_lt (TREE_OPERAND (v
, 1),
402 TYPE_MAX_VALUE (domain
)))
408 v
= TREE_OPERAND (v
, 0);
415 if (TREE_CODE (TREE_TYPE (v
)) != ARRAY_TYPE
)
420 while (v
!= pt_var
&& TREE_CODE (v
) == COMPONENT_REF
)
421 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v
, 0)))
423 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v
, 0)))
427 v
= TREE_OPERAND (v
, 0);
428 if (TREE_CODE (v
) == COMPONENT_REF
429 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v
, 0)))
432 tree fld_chain
= DECL_CHAIN (TREE_OPERAND (v
, 1));
433 for (; fld_chain
; fld_chain
= DECL_CHAIN (fld_chain
))
434 if (TREE_CODE (fld_chain
) == FIELD_DECL
)
442 v
= TREE_OPERAND (v
, 0);
444 while (v
!= pt_var
&& TREE_CODE (v
) == COMPONENT_REF
)
445 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v
, 0)))
447 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v
, 0)))
451 v
= TREE_OPERAND (v
, 0);
469 var_size
= TYPE_SIZE_UNIT (TREE_TYPE (var
));
470 else if (!pt_var_size
)
473 var_size
= pt_var_size
;
474 bytes
= compute_object_offset (TREE_OPERAND (ptr
, 0), var
);
475 if (bytes
!= error_mark_node
)
477 if (TREE_CODE (bytes
) == INTEGER_CST
478 && tree_int_cst_lt (var_size
, bytes
))
479 bytes
= size_zero_node
;
481 bytes
= size_binop (MINUS_EXPR
, var_size
, bytes
);
485 && TREE_CODE (pt_var
) == MEM_REF
486 && bytes
!= error_mark_node
)
488 tree bytes2
= compute_object_offset (TREE_OPERAND (ptr
, 0), pt_var
);
489 if (bytes2
!= error_mark_node
)
491 if (TREE_CODE (bytes2
) == INTEGER_CST
492 && tree_int_cst_lt (pt_var_size
, bytes2
))
493 bytes2
= size_zero_node
;
495 bytes2
= size_binop (MINUS_EXPR
, pt_var_size
, bytes2
);
496 bytes
= size_binop (MIN_EXPR
, bytes
, bytes2
);
500 else if (!pt_var_size
)
505 if (tree_fits_uhwi_p (bytes
))
507 *psize
= tree_to_uhwi (bytes
);
515 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
516 Handles calls to functions declared with attribute alloc_size.
517 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
518 If unknown, return unknown (object_size_type). */
520 static unsigned HOST_WIDE_INT
521 alloc_object_size (const gcall
*call
, int object_size_type
)
523 gcc_assert (is_gimple_call (call
));
526 if (tree callfn
= gimple_call_fndecl (call
))
527 calltype
= TREE_TYPE (callfn
);
529 calltype
= gimple_call_fntype (call
);
532 return unknown (object_size_type
);
534 /* Set to positions of alloc_size arguments. */
535 int arg1
= -1, arg2
= -1;
536 tree alloc_size
= lookup_attribute ("alloc_size",
537 TYPE_ATTRIBUTES (calltype
));
538 if (alloc_size
&& TREE_VALUE (alloc_size
))
540 tree p
= TREE_VALUE (alloc_size
);
542 arg1
= TREE_INT_CST_LOW (TREE_VALUE (p
))-1;
544 arg2
= TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p
)))-1;
547 if (arg1
< 0 || arg1
>= (int)gimple_call_num_args (call
)
548 || TREE_CODE (gimple_call_arg (call
, arg1
)) != INTEGER_CST
550 && (arg2
>= (int)gimple_call_num_args (call
)
551 || TREE_CODE (gimple_call_arg (call
, arg2
)) != INTEGER_CST
)))
552 return unknown (object_size_type
);
554 tree bytes
= NULL_TREE
;
556 bytes
= size_binop (MULT_EXPR
,
557 fold_convert (sizetype
, gimple_call_arg (call
, arg1
)),
558 fold_convert (sizetype
, gimple_call_arg (call
, arg2
)));
560 bytes
= fold_convert (sizetype
, gimple_call_arg (call
, arg1
));
562 if (bytes
&& tree_fits_uhwi_p (bytes
))
563 return tree_to_uhwi (bytes
);
565 return unknown (object_size_type
);
569 /* If object size is propagated from one of function's arguments directly
570 to its return value, return that argument for GIMPLE_CALL statement CALL.
571 Otherwise return NULL. */
574 pass_through_call (const gcall
*call
)
576 unsigned rf
= gimple_call_return_flags (call
);
577 if (rf
& ERF_RETURNS_ARG
)
579 unsigned argnum
= rf
& ERF_RETURN_ARG_MASK
;
580 if (argnum
< gimple_call_num_args (call
))
581 return gimple_call_arg (call
, argnum
);
584 /* __builtin_assume_aligned is intentionally not marked RET1. */
585 if (gimple_call_builtin_p (call
, BUILT_IN_ASSUME_ALIGNED
))
586 return gimple_call_arg (call
, 0);
592 /* Compute __builtin_object_size value for PTR and set *PSIZE to
593 the resulting value. If the declared object is known and PDECL
594 is nonnull, sets *PDECL to the object's DECL. OBJECT_SIZE_TYPE
595 is the second argument to __builtin_object_size.
596 Returns true on success and false when the object size could not
600 compute_builtin_object_size (tree ptr
, int object_size_type
,
601 unsigned HOST_WIDE_INT
*psize
)
603 gcc_assert (object_size_type
>= 0 && object_size_type
< OST_END
);
605 /* Set to unknown and overwrite just before returning if the size
606 could be determined. */
607 *psize
= unknown (object_size_type
);
610 init_offset_limit ();
612 if (TREE_CODE (ptr
) == ADDR_EXPR
)
613 return addr_object_size (NULL
, ptr
, object_size_type
, psize
);
615 if (TREE_CODE (ptr
) != SSA_NAME
616 || !POINTER_TYPE_P (TREE_TYPE (ptr
)))
619 if (computed
[object_size_type
] == NULL
)
621 if (optimize
|| object_size_type
& OST_SUBOBJECT
)
624 /* When not optimizing, rather than failing, make a small effort
625 to determine the object size without the full benefit of
626 the (costly) computation below. */
627 gimple
*def
= SSA_NAME_DEF_STMT (ptr
);
628 if (gimple_code (def
) == GIMPLE_ASSIGN
)
630 tree_code code
= gimple_assign_rhs_code (def
);
631 if (code
== POINTER_PLUS_EXPR
)
633 tree offset
= gimple_assign_rhs2 (def
);
634 ptr
= gimple_assign_rhs1 (def
);
636 if (tree_fits_shwi_p (offset
)
637 && compute_builtin_object_size (ptr
, object_size_type
,
640 /* Return zero when the offset is out of bounds. */
641 unsigned HOST_WIDE_INT off
= tree_to_shwi (offset
);
642 *psize
= off
< *psize
? *psize
- off
: 0;
650 struct object_size_info osi
;
651 osi
.object_size_type
= object_size_type
;
652 if (!bitmap_bit_p (computed
[object_size_type
], SSA_NAME_VERSION (ptr
)))
657 object_sizes_grow (object_size_type
);
660 fprintf (dump_file
, "Computing %s %sobject size for ",
661 (object_size_type
& OST_MINIMUM
) ? "minimum" : "maximum",
662 (object_size_type
& OST_SUBOBJECT
) ? "sub" : "");
663 print_generic_expr (dump_file
, ptr
, dump_flags
);
664 fprintf (dump_file
, ":\n");
667 osi
.visited
= BITMAP_ALLOC (NULL
);
668 osi
.reexamine
= BITMAP_ALLOC (NULL
);
673 /* First pass: walk UD chains, compute object sizes that
674 can be computed. osi.reexamine bitmap at the end will
675 contain what variables were found in dependency cycles
676 and therefore need to be reexamined. */
679 collect_object_sizes_for (&osi
, ptr
);
681 /* Second pass: keep recomputing object sizes of variables
682 that need reexamination, until no object sizes are
683 increased or all object sizes are computed. */
684 if (! bitmap_empty_p (osi
.reexamine
))
686 bitmap reexamine
= BITMAP_ALLOC (NULL
);
688 /* If looking for minimum instead of maximum object size,
689 detect cases where a pointer is increased in a loop.
690 Although even without this detection pass 2 would eventually
691 terminate, it could take a long time. If a pointer is
692 increasing this way, we need to assume 0 object size.
693 E.g. p = &buf[0]; while (cond) p = p + 4; */
694 if (object_size_type
& OST_MINIMUM
)
696 osi
.depths
= XCNEWVEC (unsigned int, num_ssa_names
);
697 osi
.stack
= XNEWVEC (unsigned int, num_ssa_names
);
700 /* collect_object_sizes_for is changing
701 osi.reexamine bitmap, so iterate over a copy. */
702 bitmap_copy (reexamine
, osi
.reexamine
);
703 EXECUTE_IF_SET_IN_BITMAP (reexamine
, 0, i
, bi
)
704 if (bitmap_bit_p (osi
.reexamine
, i
))
705 check_for_plus_in_loops (&osi
, ssa_name (i
));
718 /* collect_object_sizes_for is changing
719 osi.reexamine bitmap, so iterate over a copy. */
720 bitmap_copy (reexamine
, osi
.reexamine
);
721 EXECUTE_IF_SET_IN_BITMAP (reexamine
, 0, i
, bi
)
722 if (bitmap_bit_p (osi
.reexamine
, i
))
724 collect_object_sizes_for (&osi
, ssa_name (i
));
725 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
727 fprintf (dump_file
, "Reexamining ");
728 print_generic_expr (dump_file
, ssa_name (i
),
730 fprintf (dump_file
, "\n");
736 BITMAP_FREE (reexamine
);
738 EXECUTE_IF_SET_IN_BITMAP (osi
.reexamine
, 0, i
, bi
)
739 bitmap_set_bit (computed
[object_size_type
], i
);
741 /* Debugging dumps. */
744 EXECUTE_IF_SET_IN_BITMAP (osi
.visited
, 0, i
, bi
)
745 if (!object_sizes_unknown_p (object_size_type
, i
))
747 print_generic_expr (dump_file
, ssa_name (i
),
750 ": %s %sobject size "
751 HOST_WIDE_INT_PRINT_UNSIGNED
"\n",
752 ((object_size_type
& OST_MINIMUM
) ? "minimum"
754 (object_size_type
& OST_SUBOBJECT
) ? "sub" : "",
755 object_sizes_get (&osi
, i
));
759 BITMAP_FREE (osi
.reexamine
);
760 BITMAP_FREE (osi
.visited
);
763 *psize
= object_sizes_get (&osi
, SSA_NAME_VERSION (ptr
));
764 return *psize
!= unknown (object_size_type
);
767 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
770 expr_object_size (struct object_size_info
*osi
, tree ptr
, tree value
)
772 int object_size_type
= osi
->object_size_type
;
773 unsigned int varno
= SSA_NAME_VERSION (ptr
);
774 unsigned HOST_WIDE_INT bytes
;
776 gcc_assert (!object_sizes_unknown_p (object_size_type
, varno
));
777 gcc_assert (osi
->pass
== 0);
779 if (TREE_CODE (value
) == WITH_SIZE_EXPR
)
780 value
= TREE_OPERAND (value
, 0);
782 /* Pointer variables should have been handled by merge_object_sizes. */
783 gcc_assert (TREE_CODE (value
) != SSA_NAME
784 || !POINTER_TYPE_P (TREE_TYPE (value
)));
786 if (TREE_CODE (value
) == ADDR_EXPR
)
787 addr_object_size (osi
, value
, object_size_type
, &bytes
);
789 bytes
= unknown (object_size_type
);
791 object_sizes_set (osi
, varno
, bytes
);
795 /* Compute object_sizes for PTR, defined to the result of a call. */
798 call_object_size (struct object_size_info
*osi
, tree ptr
, gcall
*call
)
800 int object_size_type
= osi
->object_size_type
;
801 unsigned int varno
= SSA_NAME_VERSION (ptr
);
802 unsigned HOST_WIDE_INT bytes
;
804 gcc_assert (is_gimple_call (call
));
806 gcc_assert (!object_sizes_unknown_p (object_size_type
, varno
));
807 gcc_assert (osi
->pass
== 0);
809 bytes
= alloc_object_size (call
, object_size_type
);
811 object_sizes_set (osi
, varno
, bytes
);
815 /* Compute object_sizes for PTR, defined to an unknown value. */
818 unknown_object_size (struct object_size_info
*osi
, tree ptr
)
820 int object_size_type
= osi
->object_size_type
;
821 unsigned int varno
= SSA_NAME_VERSION (ptr
);
823 gcc_checking_assert (!object_sizes_unknown_p (object_size_type
, varno
));
824 gcc_checking_assert (osi
->pass
== 0);
826 object_sizes_set (osi
, varno
, unknown (object_size_type
));
830 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
831 the object size might need reexamination later. */
834 merge_object_sizes (struct object_size_info
*osi
, tree dest
, tree orig
,
835 unsigned HOST_WIDE_INT offset
)
837 int object_size_type
= osi
->object_size_type
;
838 unsigned int varno
= SSA_NAME_VERSION (dest
);
839 unsigned HOST_WIDE_INT orig_bytes
;
841 if (object_sizes_unknown_p (object_size_type
, varno
))
843 if (offset
>= offset_limit
)
845 object_sizes_set (osi
, varno
, unknown (object_size_type
));
850 collect_object_sizes_for (osi
, orig
);
852 orig_bytes
= object_sizes_get (osi
, SSA_NAME_VERSION (orig
));
853 if (orig_bytes
!= unknown (object_size_type
))
854 orig_bytes
= (offset
> orig_bytes
)
855 ? HOST_WIDE_INT_0U
: orig_bytes
- offset
;
857 osi
->changed
= object_sizes_set (osi
, varno
, orig_bytes
);
859 return bitmap_bit_p (osi
->reexamine
, SSA_NAME_VERSION (orig
));
863 /* Compute object_sizes for VAR, defined to the result of an assignment
864 with operator POINTER_PLUS_EXPR. Return true if the object size might
865 need reexamination later. */
868 plus_stmt_object_size (struct object_size_info
*osi
, tree var
, gimple
*stmt
)
870 int object_size_type
= osi
->object_size_type
;
871 unsigned int varno
= SSA_NAME_VERSION (var
);
872 unsigned HOST_WIDE_INT bytes
;
875 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
877 op0
= gimple_assign_rhs1 (stmt
);
878 op1
= gimple_assign_rhs2 (stmt
);
880 else if (gimple_assign_rhs_code (stmt
) == ADDR_EXPR
)
882 tree rhs
= TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
883 gcc_assert (TREE_CODE (rhs
) == MEM_REF
);
884 op0
= TREE_OPERAND (rhs
, 0);
885 op1
= TREE_OPERAND (rhs
, 1);
890 if (object_sizes_unknown_p (object_size_type
, varno
))
893 /* Handle PTR + OFFSET here. */
894 if (TREE_CODE (op1
) == INTEGER_CST
895 && (TREE_CODE (op0
) == SSA_NAME
896 || TREE_CODE (op0
) == ADDR_EXPR
))
898 if (! tree_fits_uhwi_p (op1
))
899 bytes
= unknown (object_size_type
);
900 else if (TREE_CODE (op0
) == SSA_NAME
)
901 return merge_object_sizes (osi
, var
, op0
, tree_to_uhwi (op1
));
904 unsigned HOST_WIDE_INT off
= tree_to_uhwi (op1
);
906 /* op0 will be ADDR_EXPR here. */
907 addr_object_size (osi
, op0
, object_size_type
, &bytes
);
908 if (bytes
== unknown (object_size_type
))
910 else if (off
> offset_limit
)
911 bytes
= unknown (object_size_type
);
912 else if (off
> bytes
)
919 bytes
= unknown (object_size_type
);
921 object_sizes_set (osi
, varno
, bytes
);
926 /* Compute object_sizes for VAR, defined at STMT, which is
927 a COND_EXPR. Return true if the object size might need reexamination
931 cond_expr_object_size (struct object_size_info
*osi
, tree var
, gimple
*stmt
)
934 int object_size_type
= osi
->object_size_type
;
935 unsigned int varno
= SSA_NAME_VERSION (var
);
936 bool reexamine
= false;
938 gcc_assert (gimple_assign_rhs_code (stmt
) == COND_EXPR
);
940 if (object_sizes_unknown_p (object_size_type
, varno
))
943 then_
= gimple_assign_rhs2 (stmt
);
944 else_
= gimple_assign_rhs3 (stmt
);
946 if (TREE_CODE (then_
) == SSA_NAME
)
947 reexamine
|= merge_object_sizes (osi
, var
, then_
, 0);
949 expr_object_size (osi
, var
, then_
);
951 if (object_sizes_unknown_p (object_size_type
, varno
))
954 if (TREE_CODE (else_
) == SSA_NAME
)
955 reexamine
|= merge_object_sizes (osi
, var
, else_
, 0);
957 expr_object_size (osi
, var
, else_
);
962 /* Compute object sizes for VAR.
963 For ADDR_EXPR an object size is the number of remaining bytes
964 to the end of the object (where what is considered an object depends on
965 OSI->object_size_type).
966 For allocation GIMPLE_CALL like malloc or calloc object size is the size
968 For POINTER_PLUS_EXPR where second operand is a constant integer,
969 object size is object size of the first operand minus the constant.
970 If the constant is bigger than the number of remaining bytes until the
971 end of the object, object size is 0, but if it is instead a pointer
972 subtraction, object size is unknown (object_size_type).
973 To differentiate addition from subtraction, ADDR_EXPR returns
974 unknown (object_size_type) for all objects bigger than half of the address
975 space, and constants less than half of the address space are considered
976 addition, while bigger constants subtraction.
977 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
978 object size is object size of that argument.
979 Otherwise, object size is the maximum of object sizes of variables
980 that it might be set to. */
983 collect_object_sizes_for (struct object_size_info
*osi
, tree var
)
985 int object_size_type
= osi
->object_size_type
;
986 unsigned int varno
= SSA_NAME_VERSION (var
);
990 if (bitmap_bit_p (computed
[object_size_type
], varno
))
995 if (bitmap_set_bit (osi
->visited
, varno
))
997 /* Initialize to 0 for maximum size and M1U for minimum size so that
998 it gets immediately overridden. */
999 object_sizes_set_force (osi
, varno
,
1000 unknown (object_size_type
^ OST_MINIMUM
));
1004 /* Found a dependency loop. Mark the variable for later
1006 bitmap_set_bit (osi
->reexamine
, varno
);
1007 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1009 fprintf (dump_file
, "Found a dependency loop at ");
1010 print_generic_expr (dump_file
, var
, dump_flags
);
1011 fprintf (dump_file
, "\n");
1017 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1019 fprintf (dump_file
, "Visiting use-def links for ");
1020 print_generic_expr (dump_file
, var
, dump_flags
);
1021 fprintf (dump_file
, "\n");
1024 stmt
= SSA_NAME_DEF_STMT (var
);
1027 switch (gimple_code (stmt
))
1031 tree rhs
= gimple_assign_rhs1 (stmt
);
1032 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
1033 || (gimple_assign_rhs_code (stmt
) == ADDR_EXPR
1034 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == MEM_REF
))
1035 reexamine
= plus_stmt_object_size (osi
, var
, stmt
);
1036 else if (gimple_assign_rhs_code (stmt
) == COND_EXPR
)
1037 reexamine
= cond_expr_object_size (osi
, var
, stmt
);
1038 else if (gimple_assign_single_p (stmt
)
1039 || gimple_assign_unary_nop_p (stmt
))
1041 if (TREE_CODE (rhs
) == SSA_NAME
1042 && POINTER_TYPE_P (TREE_TYPE (rhs
)))
1043 reexamine
= merge_object_sizes (osi
, var
, rhs
, 0);
1045 expr_object_size (osi
, var
, rhs
);
1048 unknown_object_size (osi
, var
);
1054 gcall
*call_stmt
= as_a
<gcall
*> (stmt
);
1055 tree arg
= pass_through_call (call_stmt
);
1058 if (TREE_CODE (arg
) == SSA_NAME
1059 && POINTER_TYPE_P (TREE_TYPE (arg
)))
1060 reexamine
= merge_object_sizes (osi
, var
, arg
, 0);
1062 expr_object_size (osi
, var
, arg
);
1065 call_object_size (osi
, var
, call_stmt
);
1070 /* Pointers defined by __asm__ statements can point anywhere. */
1071 object_sizes_set (osi
, varno
, unknown (object_size_type
));
1075 if (SSA_NAME_VAR (var
)
1076 && TREE_CODE (SSA_NAME_VAR (var
)) == PARM_DECL
)
1077 expr_object_size (osi
, var
, SSA_NAME_VAR (var
));
1079 /* Uninitialized SSA names point nowhere. */
1080 object_sizes_set (osi
, varno
, unknown (object_size_type
));
1087 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
1089 tree rhs
= gimple_phi_arg (stmt
, i
)->def
;
1091 if (object_sizes_unknown_p (object_size_type
, varno
))
1094 if (TREE_CODE (rhs
) == SSA_NAME
)
1095 reexamine
|= merge_object_sizes (osi
, var
, rhs
, 0);
1096 else if (osi
->pass
== 0)
1097 expr_object_size (osi
, var
, rhs
);
1106 if (! reexamine
|| object_sizes_unknown_p (object_size_type
, varno
))
1108 bitmap_set_bit (computed
[object_size_type
], varno
);
1109 bitmap_clear_bit (osi
->reexamine
, varno
);
1113 bitmap_set_bit (osi
->reexamine
, varno
);
1114 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1116 fprintf (dump_file
, "Need to reexamine ");
1117 print_generic_expr (dump_file
, var
, dump_flags
);
1118 fprintf (dump_file
, "\n");
1124 /* Helper function for check_for_plus_in_loops. Called recursively
1128 check_for_plus_in_loops_1 (struct object_size_info
*osi
, tree var
,
1131 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
1132 unsigned int varno
= SSA_NAME_VERSION (var
);
1134 if (osi
->depths
[varno
])
1136 if (osi
->depths
[varno
] != depth
)
1140 /* Found a loop involving pointer addition. */
1141 for (sp
= osi
->tos
; sp
> osi
->stack
; )
1144 bitmap_clear_bit (osi
->reexamine
, *sp
);
1145 bitmap_set_bit (computed
[osi
->object_size_type
], *sp
);
1146 object_sizes_set_force (osi
, *sp
, 0);
1153 else if (! bitmap_bit_p (osi
->reexamine
, varno
))
1156 osi
->depths
[varno
] = depth
;
1157 *osi
->tos
++ = varno
;
1159 switch (gimple_code (stmt
))
1164 if ((gimple_assign_single_p (stmt
)
1165 || gimple_assign_unary_nop_p (stmt
))
1166 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
)
1168 tree rhs
= gimple_assign_rhs1 (stmt
);
1170 check_for_plus_in_loops_1 (osi
, rhs
, depth
);
1172 else if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
1174 tree basevar
= gimple_assign_rhs1 (stmt
);
1175 tree cst
= gimple_assign_rhs2 (stmt
);
1177 gcc_assert (TREE_CODE (cst
) == INTEGER_CST
);
1179 check_for_plus_in_loops_1 (osi
, basevar
,
1180 depth
+ !integer_zerop (cst
));
1189 gcall
*call_stmt
= as_a
<gcall
*> (stmt
);
1190 tree arg
= pass_through_call (call_stmt
);
1193 if (TREE_CODE (arg
) == SSA_NAME
)
1194 check_for_plus_in_loops_1 (osi
, arg
, depth
);
1205 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
1207 tree rhs
= gimple_phi_arg (stmt
, i
)->def
;
1209 if (TREE_CODE (rhs
) == SSA_NAME
)
1210 check_for_plus_in_loops_1 (osi
, rhs
, depth
);
1219 osi
->depths
[varno
] = 0;
1224 /* Check if some pointer we are computing object size of is being increased
1225 within a loop. If yes, assume all the SSA variables participating in
1226 that loop have minimum object sizes 0. */
1229 check_for_plus_in_loops (struct object_size_info
*osi
, tree var
)
1231 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
1233 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1234 and looked for a POINTER_PLUS_EXPR in the pass-through
1235 argument, if any. In GIMPLE, however, such an expression
1236 is not a valid call operand. */
1238 if (is_gimple_assign (stmt
)
1239 && gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
1241 tree basevar
= gimple_assign_rhs1 (stmt
);
1242 tree cst
= gimple_assign_rhs2 (stmt
);
1244 gcc_assert (TREE_CODE (cst
) == INTEGER_CST
);
1246 if (integer_zerop (cst
))
1249 osi
->depths
[SSA_NAME_VERSION (basevar
)] = 1;
1250 *osi
->tos
++ = SSA_NAME_VERSION (basevar
);
1251 check_for_plus_in_loops_1 (osi
, var
, 2);
1252 osi
->depths
[SSA_NAME_VERSION (basevar
)] = 0;
1258 /* Initialize data structures for the object size computation. */
1261 init_object_sizes (void)
1263 int object_size_type
;
1268 for (object_size_type
= 0; object_size_type
< OST_END
; object_size_type
++)
1270 object_sizes_grow (object_size_type
);
1271 computed
[object_size_type
] = BITMAP_ALLOC (NULL
);
1274 init_offset_limit ();
1278 /* Destroy data structures after the object size computation. */
1281 fini_object_sizes (void)
1283 int object_size_type
;
1285 for (object_size_type
= 0; object_size_type
< OST_END
; object_size_type
++)
1287 object_sizes_release (object_size_type
);
1288 BITMAP_FREE (computed
[object_size_type
]);
1292 /* Dummy valueize function. */
1295 do_valueize (tree t
)
1301 object_sizes_execute (function
*fun
, bool insert_min_max_p
)
1304 FOR_EACH_BB_FN (bb
, fun
)
1306 gimple_stmt_iterator i
;
1307 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
1310 gimple
*call
= gsi_stmt (i
);
1311 if (!gimple_call_builtin_p (call
, BUILT_IN_OBJECT_SIZE
))
1314 tree lhs
= gimple_call_lhs (call
);
1318 init_object_sizes ();
1320 /* If insert_min_max_p, only attempt to fold
1321 __builtin_object_size (x, 1) and __builtin_object_size (x, 3),
1322 and rather than folding the builtin to the constant if any,
1323 create a MIN_EXPR or MAX_EXPR of the __builtin_object_size
1324 call result and the computed constant. */
1325 if (insert_min_max_p
)
1327 tree ost
= gimple_call_arg (call
, 1);
1328 if (tree_fits_uhwi_p (ost
))
1330 unsigned HOST_WIDE_INT object_size_type
= tree_to_uhwi (ost
);
1331 tree ptr
= gimple_call_arg (call
, 0);
1332 if ((object_size_type
& OST_SUBOBJECT
)
1333 && (TREE_CODE (ptr
) == ADDR_EXPR
1334 || TREE_CODE (ptr
) == SSA_NAME
))
1336 tree type
= TREE_TYPE (lhs
);
1337 unsigned HOST_WIDE_INT bytes
;
1338 if (compute_builtin_object_size (ptr
, object_size_type
,
1340 && wi::fits_to_tree_p (bytes
, type
))
1342 tree tem
= make_ssa_name (type
);
1343 gimple_call_set_lhs (call
, tem
);
1345 = (object_size_type
& OST_MINIMUM
1346 ? MAX_EXPR
: MIN_EXPR
);
1347 tree cst
= build_int_cstu (type
, bytes
);
1349 = gimple_build_assign (lhs
, code
, tem
, cst
);
1350 gsi_insert_after (&i
, g
, GSI_NEW_STMT
);
1358 result
= gimple_fold_stmt_to_constant (call
, do_valueize
);
1361 tree ost
= gimple_call_arg (call
, 1);
1363 if (tree_fits_uhwi_p (ost
))
1365 unsigned HOST_WIDE_INT object_size_type
= tree_to_uhwi (ost
);
1367 if (object_size_type
& OST_MINIMUM
)
1368 result
= build_zero_cst (size_type_node
);
1369 else if (object_size_type
< OST_END
)
1370 result
= fold_convert (size_type_node
,
1371 integer_minus_one_node
);
1378 gcc_assert (TREE_CODE (result
) == INTEGER_CST
);
1380 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1382 fprintf (dump_file
, "Simplified\n ");
1383 print_gimple_stmt (dump_file
, call
, 0, dump_flags
);
1384 fprintf (dump_file
, " to ");
1385 print_generic_expr (dump_file
, result
);
1386 fprintf (dump_file
, "\n");
1389 /* Propagate into all uses and fold those stmts. */
1390 if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
1391 replace_uses_by (lhs
, result
);
1393 replace_call_with_value (&i
, result
);
1397 fini_object_sizes ();
1401 /* Simple pass to optimize all __builtin_object_size () builtins. */
1405 const pass_data pass_data_object_sizes
=
1407 GIMPLE_PASS
, /* type */
1409 OPTGROUP_NONE
, /* optinfo_flags */
1410 TV_NONE
, /* tv_id */
1411 ( PROP_cfg
| PROP_ssa
), /* properties_required */
1412 PROP_objsz
, /* properties_provided */
1413 0, /* properties_destroyed */
1414 0, /* todo_flags_start */
1415 0, /* todo_flags_finish */
1418 class pass_object_sizes
: public gimple_opt_pass
1421 pass_object_sizes (gcc::context
*ctxt
)
1422 : gimple_opt_pass (pass_data_object_sizes
, ctxt
)
1425 /* opt_pass methods: */
1426 opt_pass
* clone () { return new pass_object_sizes (m_ctxt
); }
1427 virtual unsigned int execute (function
*fun
)
1429 return object_sizes_execute (fun
, false);
1431 }; // class pass_object_sizes
1436 make_pass_object_sizes (gcc::context
*ctxt
)
1438 return new pass_object_sizes (ctxt
);
1441 /* Early version of pass to optimize all __builtin_object_size () builtins. */
1445 const pass_data pass_data_early_object_sizes
=
1447 GIMPLE_PASS
, /* type */
1448 "early_objsz", /* name */
1449 OPTGROUP_NONE
, /* optinfo_flags */
1450 TV_NONE
, /* tv_id */
1451 ( PROP_cfg
| PROP_ssa
), /* properties_required */
1452 0, /* properties_provided */
1453 0, /* properties_destroyed */
1454 0, /* todo_flags_start */
1455 0, /* todo_flags_finish */
1458 class pass_early_object_sizes
: public gimple_opt_pass
1461 pass_early_object_sizes (gcc::context
*ctxt
)
1462 : gimple_opt_pass (pass_data_early_object_sizes
, ctxt
)
1465 /* opt_pass methods: */
1466 virtual unsigned int execute (function
*fun
)
1468 return object_sizes_execute (fun
, true);
1470 }; // class pass_object_sizes
1475 make_pass_early_object_sizes (gcc::context
*ctxt
)
1477 return new pass_early_object_sizes (ctxt
);