1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004-2016 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "tree-pass.h"
29 #include "gimple-pretty-print.h"
30 #include "fold-const.h"
31 #include "tree-object-size.h"
32 #include "gimple-fold.h"
33 #include "gimple-iterator.h"
36 struct object_size_info
39 bitmap visited
, reexamine
;
43 unsigned int *stack
, *tos
;
46 static const unsigned HOST_WIDE_INT unknown
[4] = { -1, -1, 0, 0 };
48 static tree
compute_object_offset (const_tree
, const_tree
);
49 static unsigned HOST_WIDE_INT
addr_object_size (struct object_size_info
*,
51 static unsigned HOST_WIDE_INT
alloc_object_size (const gcall
*, int);
52 static tree
pass_through_call (const gcall
*);
53 static void collect_object_sizes_for (struct object_size_info
*, tree
);
54 static void expr_object_size (struct object_size_info
*, tree
, tree
);
55 static bool merge_object_sizes (struct object_size_info
*, tree
, tree
,
56 unsigned HOST_WIDE_INT
);
57 static bool plus_stmt_object_size (struct object_size_info
*, tree
, gimple
*);
58 static bool cond_expr_object_size (struct object_size_info
*, tree
, gimple
*);
59 static void init_offset_limit (void);
60 static void check_for_plus_in_loops (struct object_size_info
*, tree
);
61 static void check_for_plus_in_loops_1 (struct object_size_info
*, tree
,
64 /* object_sizes[0] is upper bound for number of bytes till the end of
66 object_sizes[1] is upper bound for number of bytes till the end of
67 the subobject (innermost array or field with address taken).
68 object_sizes[2] is lower bound for number of bytes till the end of
69 the object and object_sizes[3] lower bound for subobject. */
70 static vec
<unsigned HOST_WIDE_INT
> object_sizes
[4];
72 /* Bitmaps what object sizes have been computed already. */
73 static bitmap computed
[4];
75 /* Maximum value of offset we consider to be addition. */
76 static unsigned HOST_WIDE_INT offset_limit
;
79 /* Initialize OFFSET_LIMIT variable. */
81 init_offset_limit (void)
83 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype
)))
84 offset_limit
= tree_to_uhwi (TYPE_MAX_VALUE (sizetype
));
91 /* Compute offset of EXPR within VAR. Return error_mark_node
95 compute_object_offset (const_tree expr
, const_tree var
)
97 enum tree_code code
= PLUS_EXPR
;
101 return size_zero_node
;
103 switch (TREE_CODE (expr
))
106 base
= compute_object_offset (TREE_OPERAND (expr
, 0), var
);
107 if (base
== error_mark_node
)
110 t
= TREE_OPERAND (expr
, 1);
111 off
= size_binop (PLUS_EXPR
, DECL_FIELD_OFFSET (t
),
112 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t
))
118 case VIEW_CONVERT_EXPR
:
119 case NON_LVALUE_EXPR
:
120 return compute_object_offset (TREE_OPERAND (expr
, 0), var
);
123 base
= compute_object_offset (TREE_OPERAND (expr
, 0), var
);
124 if (base
== error_mark_node
)
127 off
= TYPE_SIZE_UNIT (TREE_TYPE (expr
));
131 base
= compute_object_offset (TREE_OPERAND (expr
, 0), var
);
132 if (base
== error_mark_node
)
135 t
= TREE_OPERAND (expr
, 1);
136 if (TREE_CODE (t
) == INTEGER_CST
&& tree_int_cst_sgn (t
) < 0)
139 t
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (t
), t
);
141 t
= fold_convert (sizetype
, t
);
142 off
= size_binop (MULT_EXPR
, TYPE_SIZE_UNIT (TREE_TYPE (expr
)), t
);
146 gcc_assert (TREE_CODE (TREE_OPERAND (expr
, 0)) == ADDR_EXPR
);
147 return wide_int_to_tree (sizetype
, mem_ref_offset (expr
));
150 return error_mark_node
;
153 return size_binop (code
, base
, off
);
157 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
158 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
159 If unknown, return unknown[object_size_type]. */
161 static unsigned HOST_WIDE_INT
162 addr_object_size (struct object_size_info
*osi
, const_tree ptr
,
163 int object_size_type
)
165 tree pt_var
, pt_var_size
= NULL_TREE
, var_size
, bytes
;
167 gcc_assert (TREE_CODE (ptr
) == ADDR_EXPR
);
169 pt_var
= TREE_OPERAND (ptr
, 0);
170 while (handled_component_p (pt_var
))
171 pt_var
= TREE_OPERAND (pt_var
, 0);
174 && TREE_CODE (pt_var
) == MEM_REF
)
176 unsigned HOST_WIDE_INT sz
;
178 if (!osi
|| (object_size_type
& 1) != 0
179 || TREE_CODE (TREE_OPERAND (pt_var
, 0)) != SSA_NAME
)
181 sz
= compute_builtin_object_size (TREE_OPERAND (pt_var
, 0),
182 object_size_type
& ~1);
186 tree var
= TREE_OPERAND (pt_var
, 0);
188 collect_object_sizes_for (osi
, var
);
189 if (bitmap_bit_p (computed
[object_size_type
],
190 SSA_NAME_VERSION (var
)))
191 sz
= object_sizes
[object_size_type
][SSA_NAME_VERSION (var
)];
193 sz
= unknown
[object_size_type
];
195 if (sz
!= unknown
[object_size_type
])
197 offset_int dsz
= wi::sub (sz
, mem_ref_offset (pt_var
));
200 else if (wi::fits_uhwi_p (dsz
))
203 sz
= unknown
[object_size_type
];
206 if (sz
!= unknown
[object_size_type
] && sz
< offset_limit
)
207 pt_var_size
= size_int (sz
);
211 && tree_fits_uhwi_p (DECL_SIZE_UNIT (pt_var
))
212 && tree_to_uhwi (DECL_SIZE_UNIT (pt_var
)) < offset_limit
)
213 pt_var_size
= DECL_SIZE_UNIT (pt_var
);
215 && TREE_CODE (pt_var
) == STRING_CST
216 && TYPE_SIZE_UNIT (TREE_TYPE (pt_var
))
217 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (pt_var
)))
218 && tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (pt_var
)))
220 pt_var_size
= TYPE_SIZE_UNIT (TREE_TYPE (pt_var
));
222 return unknown
[object_size_type
];
224 if (pt_var
!= TREE_OPERAND (ptr
, 0))
228 if (object_size_type
& 1)
230 var
= TREE_OPERAND (ptr
, 0);
233 && TREE_CODE (var
) != BIT_FIELD_REF
234 && TREE_CODE (var
) != COMPONENT_REF
235 && TREE_CODE (var
) != ARRAY_REF
236 && TREE_CODE (var
) != ARRAY_RANGE_REF
237 && TREE_CODE (var
) != REALPART_EXPR
238 && TREE_CODE (var
) != IMAGPART_EXPR
)
239 var
= TREE_OPERAND (var
, 0);
240 if (var
!= pt_var
&& TREE_CODE (var
) == ARRAY_REF
)
241 var
= TREE_OPERAND (var
, 0);
242 if (! TYPE_SIZE_UNIT (TREE_TYPE (var
))
243 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var
)))
245 && tree_int_cst_lt (pt_var_size
,
246 TYPE_SIZE_UNIT (TREE_TYPE (var
)))))
248 else if (var
!= pt_var
&& TREE_CODE (pt_var
) == MEM_REF
)
251 /* For &X->fld, compute object size only if fld isn't the last
252 field, as struct { int i; char c[1]; } is often used instead
253 of flexible array member. */
254 while (v
&& v
!= pt_var
)
255 switch (TREE_CODE (v
))
258 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v
, 0)))
259 && TREE_CODE (TREE_OPERAND (v
, 1)) == INTEGER_CST
)
262 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v
, 0)));
264 && TYPE_MAX_VALUE (domain
)
265 && TREE_CODE (TYPE_MAX_VALUE (domain
))
267 && tree_int_cst_lt (TREE_OPERAND (v
, 1),
268 TYPE_MAX_VALUE (domain
)))
274 v
= TREE_OPERAND (v
, 0);
281 if (TREE_CODE (TREE_TYPE (v
)) != ARRAY_TYPE
)
286 while (v
!= pt_var
&& TREE_CODE (v
) == COMPONENT_REF
)
287 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v
, 0)))
289 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v
, 0)))
293 v
= TREE_OPERAND (v
, 0);
294 if (TREE_CODE (v
) == COMPONENT_REF
295 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v
, 0)))
298 tree fld_chain
= DECL_CHAIN (TREE_OPERAND (v
, 1));
299 for (; fld_chain
; fld_chain
= DECL_CHAIN (fld_chain
))
300 if (TREE_CODE (fld_chain
) == FIELD_DECL
)
308 v
= TREE_OPERAND (v
, 0);
310 while (v
!= pt_var
&& TREE_CODE (v
) == COMPONENT_REF
)
311 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v
, 0)))
313 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v
, 0)))
317 v
= TREE_OPERAND (v
, 0);
335 var_size
= TYPE_SIZE_UNIT (TREE_TYPE (var
));
336 else if (!pt_var_size
)
337 return unknown
[object_size_type
];
339 var_size
= pt_var_size
;
340 bytes
= compute_object_offset (TREE_OPERAND (ptr
, 0), var
);
341 if (bytes
!= error_mark_node
)
343 if (TREE_CODE (bytes
) == INTEGER_CST
344 && tree_int_cst_lt (var_size
, bytes
))
345 bytes
= size_zero_node
;
347 bytes
= size_binop (MINUS_EXPR
, var_size
, bytes
);
351 && TREE_CODE (pt_var
) == MEM_REF
352 && bytes
!= error_mark_node
)
354 tree bytes2
= compute_object_offset (TREE_OPERAND (ptr
, 0), pt_var
);
355 if (bytes2
!= error_mark_node
)
357 if (TREE_CODE (bytes2
) == INTEGER_CST
358 && tree_int_cst_lt (pt_var_size
, bytes2
))
359 bytes2
= size_zero_node
;
361 bytes2
= size_binop (MINUS_EXPR
, pt_var_size
, bytes2
);
362 bytes
= size_binop (MIN_EXPR
, bytes
, bytes2
);
366 else if (!pt_var_size
)
367 return unknown
[object_size_type
];
371 if (tree_fits_uhwi_p (bytes
))
372 return tree_to_uhwi (bytes
);
374 return unknown
[object_size_type
];
378 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
379 Handles various allocation calls. OBJECT_SIZE_TYPE is the second
380 argument from __builtin_object_size. If unknown, return
381 unknown[object_size_type]. */
383 static unsigned HOST_WIDE_INT
384 alloc_object_size (const gcall
*call
, int object_size_type
)
386 tree callee
, bytes
= NULL_TREE
;
388 int arg1
= -1, arg2
= -1;
390 gcc_assert (is_gimple_call (call
));
392 callee
= gimple_call_fndecl (call
);
394 return unknown
[object_size_type
];
396 alloc_size
= lookup_attribute ("alloc_size",
397 TYPE_ATTRIBUTES (TREE_TYPE (callee
)));
398 if (alloc_size
&& TREE_VALUE (alloc_size
))
400 tree p
= TREE_VALUE (alloc_size
);
402 arg1
= TREE_INT_CST_LOW (TREE_VALUE (p
))-1;
404 arg2
= TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p
)))-1;
407 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
)
408 switch (DECL_FUNCTION_CODE (callee
))
410 case BUILT_IN_CALLOC
:
413 case BUILT_IN_MALLOC
:
414 case BUILT_IN_ALLOCA
:
415 case BUILT_IN_ALLOCA_WITH_ALIGN
:
421 if (arg1
< 0 || arg1
>= (int)gimple_call_num_args (call
)
422 || TREE_CODE (gimple_call_arg (call
, arg1
)) != INTEGER_CST
424 && (arg2
>= (int)gimple_call_num_args (call
)
425 || TREE_CODE (gimple_call_arg (call
, arg2
)) != INTEGER_CST
)))
426 return unknown
[object_size_type
];
429 bytes
= size_binop (MULT_EXPR
,
430 fold_convert (sizetype
, gimple_call_arg (call
, arg1
)),
431 fold_convert (sizetype
, gimple_call_arg (call
, arg2
)));
433 bytes
= fold_convert (sizetype
, gimple_call_arg (call
, arg1
));
435 if (bytes
&& tree_fits_uhwi_p (bytes
))
436 return tree_to_uhwi (bytes
);
438 return unknown
[object_size_type
];
442 /* If object size is propagated from one of function's arguments directly
443 to its return value, return that argument for GIMPLE_CALL statement CALL.
444 Otherwise return NULL. */
447 pass_through_call (const gcall
*call
)
449 tree callee
= gimple_call_fndecl (call
);
452 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
)
453 switch (DECL_FUNCTION_CODE (callee
))
455 case BUILT_IN_MEMCPY
:
456 case BUILT_IN_MEMMOVE
:
457 case BUILT_IN_MEMSET
:
458 case BUILT_IN_STRCPY
:
459 case BUILT_IN_STRNCPY
:
460 case BUILT_IN_STRCAT
:
461 case BUILT_IN_STRNCAT
:
462 case BUILT_IN_MEMCPY_CHK
:
463 case BUILT_IN_MEMMOVE_CHK
:
464 case BUILT_IN_MEMSET_CHK
:
465 case BUILT_IN_STRCPY_CHK
:
466 case BUILT_IN_STRNCPY_CHK
:
467 case BUILT_IN_STPNCPY_CHK
:
468 case BUILT_IN_STRCAT_CHK
:
469 case BUILT_IN_STRNCAT_CHK
:
470 case BUILT_IN_ASSUME_ALIGNED
:
471 if (gimple_call_num_args (call
) >= 1)
472 return gimple_call_arg (call
, 0);
482 /* Compute __builtin_object_size value for PTR. OBJECT_SIZE_TYPE is the
483 second argument from __builtin_object_size. */
485 unsigned HOST_WIDE_INT
486 compute_builtin_object_size (tree ptr
, int object_size_type
)
488 gcc_assert (object_size_type
>= 0 && object_size_type
<= 3);
491 init_offset_limit ();
493 if (TREE_CODE (ptr
) == ADDR_EXPR
)
494 return addr_object_size (NULL
, ptr
, object_size_type
);
496 if (TREE_CODE (ptr
) == SSA_NAME
497 && POINTER_TYPE_P (TREE_TYPE (ptr
))
498 && computed
[object_size_type
] != NULL
)
500 if (!bitmap_bit_p (computed
[object_size_type
], SSA_NAME_VERSION (ptr
)))
502 struct object_size_info osi
;
506 if (num_ssa_names
> object_sizes
[object_size_type
].length ())
507 object_sizes
[object_size_type
].safe_grow (num_ssa_names
);
510 fprintf (dump_file
, "Computing %s %sobject size for ",
511 (object_size_type
& 2) ? "minimum" : "maximum",
512 (object_size_type
& 1) ? "sub" : "");
513 print_generic_expr (dump_file
, ptr
, dump_flags
);
514 fprintf (dump_file
, ":\n");
517 osi
.visited
= BITMAP_ALLOC (NULL
);
518 osi
.reexamine
= BITMAP_ALLOC (NULL
);
519 osi
.object_size_type
= object_size_type
;
524 /* First pass: walk UD chains, compute object sizes that
525 can be computed. osi.reexamine bitmap at the end will
526 contain what variables were found in dependency cycles
527 and therefore need to be reexamined. */
530 collect_object_sizes_for (&osi
, ptr
);
532 /* Second pass: keep recomputing object sizes of variables
533 that need reexamination, until no object sizes are
534 increased or all object sizes are computed. */
535 if (! bitmap_empty_p (osi
.reexamine
))
537 bitmap reexamine
= BITMAP_ALLOC (NULL
);
539 /* If looking for minimum instead of maximum object size,
540 detect cases where a pointer is increased in a loop.
541 Although even without this detection pass 2 would eventually
542 terminate, it could take a long time. If a pointer is
543 increasing this way, we need to assume 0 object size.
544 E.g. p = &buf[0]; while (cond) p = p + 4; */
545 if (object_size_type
& 2)
547 osi
.depths
= XCNEWVEC (unsigned int, num_ssa_names
);
548 osi
.stack
= XNEWVEC (unsigned int, num_ssa_names
);
551 /* collect_object_sizes_for is changing
552 osi.reexamine bitmap, so iterate over a copy. */
553 bitmap_copy (reexamine
, osi
.reexamine
);
554 EXECUTE_IF_SET_IN_BITMAP (reexamine
, 0, i
, bi
)
555 if (bitmap_bit_p (osi
.reexamine
, i
))
556 check_for_plus_in_loops (&osi
, ssa_name (i
));
569 /* collect_object_sizes_for is changing
570 osi.reexamine bitmap, so iterate over a copy. */
571 bitmap_copy (reexamine
, osi
.reexamine
);
572 EXECUTE_IF_SET_IN_BITMAP (reexamine
, 0, i
, bi
)
573 if (bitmap_bit_p (osi
.reexamine
, i
))
575 collect_object_sizes_for (&osi
, ssa_name (i
));
576 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
578 fprintf (dump_file
, "Reexamining ");
579 print_generic_expr (dump_file
, ssa_name (i
),
581 fprintf (dump_file
, "\n");
587 BITMAP_FREE (reexamine
);
589 EXECUTE_IF_SET_IN_BITMAP (osi
.reexamine
, 0, i
, bi
)
590 bitmap_set_bit (computed
[object_size_type
], i
);
592 /* Debugging dumps. */
595 EXECUTE_IF_SET_IN_BITMAP (osi
.visited
, 0, i
, bi
)
596 if (object_sizes
[object_size_type
][i
]
597 != unknown
[object_size_type
])
599 print_generic_expr (dump_file
, ssa_name (i
),
602 ": %s %sobject size "
603 HOST_WIDE_INT_PRINT_UNSIGNED
"\n",
604 (object_size_type
& 2) ? "minimum" : "maximum",
605 (object_size_type
& 1) ? "sub" : "",
606 object_sizes
[object_size_type
][i
]);
610 BITMAP_FREE (osi
.reexamine
);
611 BITMAP_FREE (osi
.visited
);
614 return object_sizes
[object_size_type
][SSA_NAME_VERSION (ptr
)];
617 return unknown
[object_size_type
];
620 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
623 expr_object_size (struct object_size_info
*osi
, tree ptr
, tree value
)
625 int object_size_type
= osi
->object_size_type
;
626 unsigned int varno
= SSA_NAME_VERSION (ptr
);
627 unsigned HOST_WIDE_INT bytes
;
629 gcc_assert (object_sizes
[object_size_type
][varno
]
630 != unknown
[object_size_type
]);
631 gcc_assert (osi
->pass
== 0);
633 if (TREE_CODE (value
) == WITH_SIZE_EXPR
)
634 value
= TREE_OPERAND (value
, 0);
636 /* Pointer variables should have been handled by merge_object_sizes. */
637 gcc_assert (TREE_CODE (value
) != SSA_NAME
638 || !POINTER_TYPE_P (TREE_TYPE (value
)));
640 if (TREE_CODE (value
) == ADDR_EXPR
)
641 bytes
= addr_object_size (osi
, value
, object_size_type
);
643 bytes
= unknown
[object_size_type
];
645 if ((object_size_type
& 2) == 0)
647 if (object_sizes
[object_size_type
][varno
] < bytes
)
648 object_sizes
[object_size_type
][varno
] = bytes
;
652 if (object_sizes
[object_size_type
][varno
] > bytes
)
653 object_sizes
[object_size_type
][varno
] = bytes
;
658 /* Compute object_sizes for PTR, defined to the result of a call. */
661 call_object_size (struct object_size_info
*osi
, tree ptr
, gcall
*call
)
663 int object_size_type
= osi
->object_size_type
;
664 unsigned int varno
= SSA_NAME_VERSION (ptr
);
665 unsigned HOST_WIDE_INT bytes
;
667 gcc_assert (is_gimple_call (call
));
669 gcc_assert (object_sizes
[object_size_type
][varno
]
670 != unknown
[object_size_type
]);
671 gcc_assert (osi
->pass
== 0);
673 bytes
= alloc_object_size (call
, object_size_type
);
675 if ((object_size_type
& 2) == 0)
677 if (object_sizes
[object_size_type
][varno
] < bytes
)
678 object_sizes
[object_size_type
][varno
] = bytes
;
682 if (object_sizes
[object_size_type
][varno
] > bytes
)
683 object_sizes
[object_size_type
][varno
] = bytes
;
688 /* Compute object_sizes for PTR, defined to an unknown value. */
691 unknown_object_size (struct object_size_info
*osi
, tree ptr
)
693 int object_size_type
= osi
->object_size_type
;
694 unsigned int varno
= SSA_NAME_VERSION (ptr
);
695 unsigned HOST_WIDE_INT bytes
;
697 gcc_assert (object_sizes
[object_size_type
][varno
]
698 != unknown
[object_size_type
]);
699 gcc_assert (osi
->pass
== 0);
701 bytes
= unknown
[object_size_type
];
703 if ((object_size_type
& 2) == 0)
705 if (object_sizes
[object_size_type
][varno
] < bytes
)
706 object_sizes
[object_size_type
][varno
] = bytes
;
710 if (object_sizes
[object_size_type
][varno
] > bytes
)
711 object_sizes
[object_size_type
][varno
] = bytes
;
716 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
717 the object size might need reexamination later. */
720 merge_object_sizes (struct object_size_info
*osi
, tree dest
, tree orig
,
721 unsigned HOST_WIDE_INT offset
)
723 int object_size_type
= osi
->object_size_type
;
724 unsigned int varno
= SSA_NAME_VERSION (dest
);
725 unsigned HOST_WIDE_INT orig_bytes
;
727 if (object_sizes
[object_size_type
][varno
] == unknown
[object_size_type
])
729 if (offset
>= offset_limit
)
731 object_sizes
[object_size_type
][varno
] = unknown
[object_size_type
];
736 collect_object_sizes_for (osi
, orig
);
738 orig_bytes
= object_sizes
[object_size_type
][SSA_NAME_VERSION (orig
)];
739 if (orig_bytes
!= unknown
[object_size_type
])
740 orig_bytes
= (offset
> orig_bytes
)
741 ? (unsigned HOST_WIDE_INT
) 0 : orig_bytes
- offset
;
743 if ((object_size_type
& 2) == 0)
745 if (object_sizes
[object_size_type
][varno
] < orig_bytes
)
747 object_sizes
[object_size_type
][varno
] = orig_bytes
;
753 if (object_sizes
[object_size_type
][varno
] > orig_bytes
)
755 object_sizes
[object_size_type
][varno
] = orig_bytes
;
759 return bitmap_bit_p (osi
->reexamine
, SSA_NAME_VERSION (orig
));
763 /* Compute object_sizes for VAR, defined to the result of an assignment
764 with operator POINTER_PLUS_EXPR. Return true if the object size might
765 need reexamination later. */
768 plus_stmt_object_size (struct object_size_info
*osi
, tree var
, gimple
*stmt
)
770 int object_size_type
= osi
->object_size_type
;
771 unsigned int varno
= SSA_NAME_VERSION (var
);
772 unsigned HOST_WIDE_INT bytes
;
775 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
777 op0
= gimple_assign_rhs1 (stmt
);
778 op1
= gimple_assign_rhs2 (stmt
);
780 else if (gimple_assign_rhs_code (stmt
) == ADDR_EXPR
)
782 tree rhs
= TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
783 gcc_assert (TREE_CODE (rhs
) == MEM_REF
);
784 op0
= TREE_OPERAND (rhs
, 0);
785 op1
= TREE_OPERAND (rhs
, 1);
790 if (object_sizes
[object_size_type
][varno
] == unknown
[object_size_type
])
793 /* Handle PTR + OFFSET here. */
794 if (TREE_CODE (op1
) == INTEGER_CST
795 && (TREE_CODE (op0
) == SSA_NAME
796 || TREE_CODE (op0
) == ADDR_EXPR
))
798 if (! tree_fits_uhwi_p (op1
))
799 bytes
= unknown
[object_size_type
];
800 else if (TREE_CODE (op0
) == SSA_NAME
)
801 return merge_object_sizes (osi
, var
, op0
, tree_to_uhwi (op1
));
804 unsigned HOST_WIDE_INT off
= tree_to_uhwi (op1
);
806 /* op0 will be ADDR_EXPR here. */
807 bytes
= addr_object_size (osi
, op0
, object_size_type
);
808 if (bytes
== unknown
[object_size_type
])
810 else if (off
> offset_limit
)
811 bytes
= unknown
[object_size_type
];
812 else if (off
> bytes
)
819 bytes
= unknown
[object_size_type
];
821 if ((object_size_type
& 2) == 0)
823 if (object_sizes
[object_size_type
][varno
] < bytes
)
824 object_sizes
[object_size_type
][varno
] = bytes
;
828 if (object_sizes
[object_size_type
][varno
] > bytes
)
829 object_sizes
[object_size_type
][varno
] = bytes
;
835 /* Compute object_sizes for VAR, defined at STMT, which is
836 a COND_EXPR. Return true if the object size might need reexamination
840 cond_expr_object_size (struct object_size_info
*osi
, tree var
, gimple
*stmt
)
843 int object_size_type
= osi
->object_size_type
;
844 unsigned int varno
= SSA_NAME_VERSION (var
);
845 bool reexamine
= false;
847 gcc_assert (gimple_assign_rhs_code (stmt
) == COND_EXPR
);
849 if (object_sizes
[object_size_type
][varno
] == unknown
[object_size_type
])
852 then_
= gimple_assign_rhs2 (stmt
);
853 else_
= gimple_assign_rhs3 (stmt
);
855 if (TREE_CODE (then_
) == SSA_NAME
)
856 reexamine
|= merge_object_sizes (osi
, var
, then_
, 0);
858 expr_object_size (osi
, var
, then_
);
860 if (TREE_CODE (else_
) == SSA_NAME
)
861 reexamine
|= merge_object_sizes (osi
, var
, else_
, 0);
863 expr_object_size (osi
, var
, else_
);
868 /* Compute object sizes for VAR.
869 For ADDR_EXPR an object size is the number of remaining bytes
870 to the end of the object (where what is considered an object depends on
871 OSI->object_size_type).
872 For allocation GIMPLE_CALL like malloc or calloc object size is the size
874 For POINTER_PLUS_EXPR where second operand is a constant integer,
875 object size is object size of the first operand minus the constant.
876 If the constant is bigger than the number of remaining bytes until the
877 end of the object, object size is 0, but if it is instead a pointer
878 subtraction, object size is unknown[object_size_type].
879 To differentiate addition from subtraction, ADDR_EXPR returns
880 unknown[object_size_type] for all objects bigger than half of the address
881 space, and constants less than half of the address space are considered
882 addition, while bigger constants subtraction.
883 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
884 object size is object size of that argument.
885 Otherwise, object size is the maximum of object sizes of variables
886 that it might be set to. */
889 collect_object_sizes_for (struct object_size_info
*osi
, tree var
)
891 int object_size_type
= osi
->object_size_type
;
892 unsigned int varno
= SSA_NAME_VERSION (var
);
896 if (bitmap_bit_p (computed
[object_size_type
], varno
))
901 if (bitmap_set_bit (osi
->visited
, varno
))
903 object_sizes
[object_size_type
][varno
]
904 = (object_size_type
& 2) ? -1 : 0;
908 /* Found a dependency loop. Mark the variable for later
910 bitmap_set_bit (osi
->reexamine
, varno
);
911 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
913 fprintf (dump_file
, "Found a dependency loop at ");
914 print_generic_expr (dump_file
, var
, dump_flags
);
915 fprintf (dump_file
, "\n");
921 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
923 fprintf (dump_file
, "Visiting use-def links for ");
924 print_generic_expr (dump_file
, var
, dump_flags
);
925 fprintf (dump_file
, "\n");
928 stmt
= SSA_NAME_DEF_STMT (var
);
931 switch (gimple_code (stmt
))
935 tree rhs
= gimple_assign_rhs1 (stmt
);
936 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
937 || (gimple_assign_rhs_code (stmt
) == ADDR_EXPR
938 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == MEM_REF
))
939 reexamine
= plus_stmt_object_size (osi
, var
, stmt
);
940 else if (gimple_assign_rhs_code (stmt
) == COND_EXPR
)
941 reexamine
= cond_expr_object_size (osi
, var
, stmt
);
942 else if (gimple_assign_single_p (stmt
)
943 || gimple_assign_unary_nop_p (stmt
))
945 if (TREE_CODE (rhs
) == SSA_NAME
946 && POINTER_TYPE_P (TREE_TYPE (rhs
)))
947 reexamine
= merge_object_sizes (osi
, var
, rhs
, 0);
949 expr_object_size (osi
, var
, rhs
);
952 unknown_object_size (osi
, var
);
958 gcall
*call_stmt
= as_a
<gcall
*> (stmt
);
959 tree arg
= pass_through_call (call_stmt
);
962 if (TREE_CODE (arg
) == SSA_NAME
963 && POINTER_TYPE_P (TREE_TYPE (arg
)))
964 reexamine
= merge_object_sizes (osi
, var
, arg
, 0);
966 expr_object_size (osi
, var
, arg
);
969 call_object_size (osi
, var
, call_stmt
);
974 /* Pointers defined by __asm__ statements can point anywhere. */
975 object_sizes
[object_size_type
][varno
] = unknown
[object_size_type
];
979 if (SSA_NAME_VAR (var
)
980 && TREE_CODE (SSA_NAME_VAR (var
)) == PARM_DECL
)
981 expr_object_size (osi
, var
, SSA_NAME_VAR (var
));
983 /* Uninitialized SSA names point nowhere. */
984 object_sizes
[object_size_type
][varno
] = unknown
[object_size_type
];
991 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
993 tree rhs
= gimple_phi_arg (stmt
, i
)->def
;
995 if (object_sizes
[object_size_type
][varno
]
996 == unknown
[object_size_type
])
999 if (TREE_CODE (rhs
) == SSA_NAME
)
1000 reexamine
|= merge_object_sizes (osi
, var
, rhs
, 0);
1001 else if (osi
->pass
== 0)
1002 expr_object_size (osi
, var
, rhs
);
1012 || object_sizes
[object_size_type
][varno
] == unknown
[object_size_type
])
1014 bitmap_set_bit (computed
[object_size_type
], varno
);
1015 bitmap_clear_bit (osi
->reexamine
, varno
);
1019 bitmap_set_bit (osi
->reexamine
, varno
);
1020 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1022 fprintf (dump_file
, "Need to reexamine ");
1023 print_generic_expr (dump_file
, var
, dump_flags
);
1024 fprintf (dump_file
, "\n");
1030 /* Helper function for check_for_plus_in_loops. Called recursively
1034 check_for_plus_in_loops_1 (struct object_size_info
*osi
, tree var
,
1037 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
1038 unsigned int varno
= SSA_NAME_VERSION (var
);
1040 if (osi
->depths
[varno
])
1042 if (osi
->depths
[varno
] != depth
)
1046 /* Found a loop involving pointer addition. */
1047 for (sp
= osi
->tos
; sp
> osi
->stack
; )
1050 bitmap_clear_bit (osi
->reexamine
, *sp
);
1051 bitmap_set_bit (computed
[osi
->object_size_type
], *sp
);
1052 object_sizes
[osi
->object_size_type
][*sp
] = 0;
1059 else if (! bitmap_bit_p (osi
->reexamine
, varno
))
1062 osi
->depths
[varno
] = depth
;
1063 *osi
->tos
++ = varno
;
1065 switch (gimple_code (stmt
))
1070 if ((gimple_assign_single_p (stmt
)
1071 || gimple_assign_unary_nop_p (stmt
))
1072 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
)
1074 tree rhs
= gimple_assign_rhs1 (stmt
);
1076 check_for_plus_in_loops_1 (osi
, rhs
, depth
);
1078 else if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
1080 tree basevar
= gimple_assign_rhs1 (stmt
);
1081 tree cst
= gimple_assign_rhs2 (stmt
);
1083 gcc_assert (TREE_CODE (cst
) == INTEGER_CST
);
1085 check_for_plus_in_loops_1 (osi
, basevar
,
1086 depth
+ !integer_zerop (cst
));
1095 gcall
*call_stmt
= as_a
<gcall
*> (stmt
);
1096 tree arg
= pass_through_call (call_stmt
);
1099 if (TREE_CODE (arg
) == SSA_NAME
)
1100 check_for_plus_in_loops_1 (osi
, arg
, depth
);
1111 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
1113 tree rhs
= gimple_phi_arg (stmt
, i
)->def
;
1115 if (TREE_CODE (rhs
) == SSA_NAME
)
1116 check_for_plus_in_loops_1 (osi
, rhs
, depth
);
1125 osi
->depths
[varno
] = 0;
1130 /* Check if some pointer we are computing object size of is being increased
1131 within a loop. If yes, assume all the SSA variables participating in
1132 that loop have minimum object sizes 0. */
1135 check_for_plus_in_loops (struct object_size_info
*osi
, tree var
)
1137 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
1139 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1140 and looked for a POINTER_PLUS_EXPR in the pass-through
1141 argument, if any. In GIMPLE, however, such an expression
1142 is not a valid call operand. */
1144 if (is_gimple_assign (stmt
)
1145 && gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
1147 tree basevar
= gimple_assign_rhs1 (stmt
);
1148 tree cst
= gimple_assign_rhs2 (stmt
);
1150 gcc_assert (TREE_CODE (cst
) == INTEGER_CST
);
1152 if (integer_zerop (cst
))
1155 osi
->depths
[SSA_NAME_VERSION (basevar
)] = 1;
1156 *osi
->tos
++ = SSA_NAME_VERSION (basevar
);
1157 check_for_plus_in_loops_1 (osi
, var
, 2);
1158 osi
->depths
[SSA_NAME_VERSION (basevar
)] = 0;
1164 /* Initialize data structures for the object size computation. */
1167 init_object_sizes (void)
1169 int object_size_type
;
1174 for (object_size_type
= 0; object_size_type
<= 3; object_size_type
++)
1176 object_sizes
[object_size_type
].safe_grow (num_ssa_names
);
1177 computed
[object_size_type
] = BITMAP_ALLOC (NULL
);
1180 init_offset_limit ();
1184 /* Destroy data structures after the object size computation. */
1187 fini_object_sizes (void)
1189 int object_size_type
;
1191 for (object_size_type
= 0; object_size_type
<= 3; object_size_type
++)
1193 object_sizes
[object_size_type
].release ();
1194 BITMAP_FREE (computed
[object_size_type
]);
1199 /* Simple pass to optimize all __builtin_object_size () builtins. */
1203 const pass_data pass_data_object_sizes
=
1205 GIMPLE_PASS
, /* type */
1207 OPTGROUP_NONE
, /* optinfo_flags */
1208 TV_NONE
, /* tv_id */
1209 ( PROP_cfg
| PROP_ssa
), /* properties_required */
1210 0, /* properties_provided */
1211 0, /* properties_destroyed */
1212 0, /* todo_flags_start */
1213 0, /* todo_flags_finish */
1216 class pass_object_sizes
: public gimple_opt_pass
1219 pass_object_sizes (gcc::context
*ctxt
)
1220 : gimple_opt_pass (pass_data_object_sizes
, ctxt
), insert_min_max_p (false)
1223 /* opt_pass methods: */
1224 opt_pass
* clone () { return new pass_object_sizes (m_ctxt
); }
1225 void set_pass_param (unsigned int n
, bool param
)
1227 gcc_assert (n
== 0);
1228 insert_min_max_p
= param
;
1230 virtual unsigned int execute (function
*);
1233 /* Determines whether the pass instance creates MIN/MAX_EXPRs. */
1234 bool insert_min_max_p
;
1235 }; // class pass_object_sizes
1237 /* Dummy valueize function. */
1240 do_valueize (tree t
)
1246 pass_object_sizes::execute (function
*fun
)
1249 FOR_EACH_BB_FN (bb
, fun
)
1251 gimple_stmt_iterator i
;
1252 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
1255 gimple
*call
= gsi_stmt (i
);
1256 if (!gimple_call_builtin_p (call
, BUILT_IN_OBJECT_SIZE
))
1259 init_object_sizes ();
1261 /* If insert_min_max_p, only attempt to fold
1262 __builtin_object_size (x, 1) and __builtin_object_size (x, 3),
1263 and rather than folding the builtin to the constant if any,
1264 create a MIN_EXPR or MAX_EXPR of the __builtin_object_size
1265 call result and the computed constant. */
1266 if (insert_min_max_p
)
1268 tree ost
= gimple_call_arg (call
, 1);
1269 if (tree_fits_uhwi_p (ost
))
1271 unsigned HOST_WIDE_INT object_size_type
= tree_to_uhwi (ost
);
1272 tree ptr
= gimple_call_arg (call
, 0);
1273 tree lhs
= gimple_call_lhs (call
);
1274 if ((object_size_type
== 1 || object_size_type
== 3)
1275 && (TREE_CODE (ptr
) == ADDR_EXPR
1276 || TREE_CODE (ptr
) == SSA_NAME
)
1279 tree type
= TREE_TYPE (lhs
);
1280 unsigned HOST_WIDE_INT bytes
1281 = compute_builtin_object_size (ptr
, object_size_type
);
1282 if (bytes
!= (unsigned HOST_WIDE_INT
) (object_size_type
== 1
1284 && wi::fits_to_tree_p (bytes
, type
))
1286 tree tem
= make_ssa_name (type
);
1287 gimple_call_set_lhs (call
, tem
);
1289 = object_size_type
== 1 ? MIN_EXPR
: MAX_EXPR
;
1290 tree cst
= build_int_cstu (type
, bytes
);
1292 = gimple_build_assign (lhs
, code
, tem
, cst
);
1293 gsi_insert_after (&i
, g
, GSI_NEW_STMT
);
1301 tree lhs
= gimple_call_lhs (call
);
1305 result
= gimple_fold_stmt_to_constant (call
, do_valueize
);
1308 tree ost
= gimple_call_arg (call
, 1);
1310 if (tree_fits_uhwi_p (ost
))
1312 unsigned HOST_WIDE_INT object_size_type
= tree_to_uhwi (ost
);
1314 if (object_size_type
< 2)
1315 result
= fold_convert (size_type_node
,
1316 integer_minus_one_node
);
1317 else if (object_size_type
< 4)
1318 result
= build_zero_cst (size_type_node
);
1325 gcc_assert (TREE_CODE (result
) == INTEGER_CST
);
1327 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1329 fprintf (dump_file
, "Simplified\n ");
1330 print_gimple_stmt (dump_file
, call
, 0, dump_flags
);
1331 fprintf (dump_file
, " to ");
1332 print_generic_expr (dump_file
, result
, 0);
1333 fprintf (dump_file
, "\n");
1336 /* Propagate into all uses and fold those stmts. */
1337 replace_uses_by (lhs
, result
);
1341 fini_object_sizes ();
1348 make_pass_object_sizes (gcc::context
*ctxt
)
1350 return new pass_object_sizes (ctxt
);