1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
26 #include "diagnostic-core.h"
27 #include "gimple-pretty-print.h"
30 #include "gimple-iterator.h"
31 #include "gimple-ssa.h"
32 #include "tree-ssanames.h"
33 #include "tree-pass.h"
34 #include "tree-ssa-propagate.h"
36 struct object_size_info
39 bitmap visited
, reexamine
;
43 unsigned int *stack
, *tos
;
46 static const unsigned HOST_WIDE_INT unknown
[4] = { -1, -1, 0, 0 };
48 static tree
compute_object_offset (const_tree
, const_tree
);
49 static unsigned HOST_WIDE_INT
addr_object_size (struct object_size_info
*,
51 static unsigned HOST_WIDE_INT
alloc_object_size (const_gimple
, int);
52 static tree
pass_through_call (const_gimple
);
53 static void collect_object_sizes_for (struct object_size_info
*, tree
);
54 static void expr_object_size (struct object_size_info
*, tree
, tree
);
55 static bool merge_object_sizes (struct object_size_info
*, tree
, tree
,
56 unsigned HOST_WIDE_INT
);
57 static bool plus_stmt_object_size (struct object_size_info
*, tree
, gimple
);
58 static bool cond_expr_object_size (struct object_size_info
*, tree
, gimple
);
59 static unsigned int compute_object_sizes (void);
60 static void init_offset_limit (void);
61 static void check_for_plus_in_loops (struct object_size_info
*, tree
);
62 static void check_for_plus_in_loops_1 (struct object_size_info
*, tree
,
65 /* object_sizes[0] is upper bound for number of bytes till the end of
67 object_sizes[1] is upper bound for number of bytes till the end of
68 the subobject (innermost array or field with address taken).
69 object_sizes[2] is lower bound for number of bytes till the end of
70 the object and object_sizes[3] lower bound for subobject. */
71 static unsigned HOST_WIDE_INT
*object_sizes
[4];
73 /* Bitmaps what object sizes have been computed already. */
74 static bitmap computed
[4];
76 /* Maximum value of offset we consider to be addition. */
77 static unsigned HOST_WIDE_INT offset_limit
;
80 /* Initialize OFFSET_LIMIT variable. */
82 init_offset_limit (void)
84 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype
)))
85 offset_limit
= tree_to_uhwi (TYPE_MAX_VALUE (sizetype
));
92 /* Compute offset of EXPR within VAR. Return error_mark_node
96 compute_object_offset (const_tree expr
, const_tree var
)
98 enum tree_code code
= PLUS_EXPR
;
102 return size_zero_node
;
104 switch (TREE_CODE (expr
))
107 base
= compute_object_offset (TREE_OPERAND (expr
, 0), var
);
108 if (base
== error_mark_node
)
111 t
= TREE_OPERAND (expr
, 1);
112 off
= size_binop (PLUS_EXPR
, DECL_FIELD_OFFSET (t
),
113 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t
))
119 case VIEW_CONVERT_EXPR
:
120 case NON_LVALUE_EXPR
:
121 return compute_object_offset (TREE_OPERAND (expr
, 0), var
);
124 base
= compute_object_offset (TREE_OPERAND (expr
, 0), var
);
125 if (base
== error_mark_node
)
128 off
= TYPE_SIZE_UNIT (TREE_TYPE (expr
));
132 base
= compute_object_offset (TREE_OPERAND (expr
, 0), var
);
133 if (base
== error_mark_node
)
136 t
= TREE_OPERAND (expr
, 1);
137 if (TREE_CODE (t
) == INTEGER_CST
&& tree_int_cst_sgn (t
) < 0)
140 t
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (t
), t
);
142 t
= fold_convert (sizetype
, t
);
143 off
= size_binop (MULT_EXPR
, TYPE_SIZE_UNIT (TREE_TYPE (expr
)), t
);
147 gcc_assert (TREE_CODE (TREE_OPERAND (expr
, 0)) == ADDR_EXPR
);
148 return double_int_to_tree (sizetype
, mem_ref_offset (expr
));
151 return error_mark_node
;
154 return size_binop (code
, base
, off
);
158 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
159 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
160 If unknown, return unknown[object_size_type]. */
162 static unsigned HOST_WIDE_INT
163 addr_object_size (struct object_size_info
*osi
, const_tree ptr
,
164 int object_size_type
)
166 tree pt_var
, pt_var_size
= NULL_TREE
, var_size
, bytes
;
168 gcc_assert (TREE_CODE (ptr
) == ADDR_EXPR
);
170 pt_var
= TREE_OPERAND (ptr
, 0);
171 while (handled_component_p (pt_var
))
172 pt_var
= TREE_OPERAND (pt_var
, 0);
175 && TREE_CODE (pt_var
) == MEM_REF
)
177 unsigned HOST_WIDE_INT sz
;
179 if (!osi
|| (object_size_type
& 1) != 0
180 || TREE_CODE (TREE_OPERAND (pt_var
, 0)) != SSA_NAME
)
182 sz
= compute_builtin_object_size (TREE_OPERAND (pt_var
, 0),
183 object_size_type
& ~1);
187 tree var
= TREE_OPERAND (pt_var
, 0);
189 collect_object_sizes_for (osi
, var
);
190 if (bitmap_bit_p (computed
[object_size_type
],
191 SSA_NAME_VERSION (var
)))
192 sz
= object_sizes
[object_size_type
][SSA_NAME_VERSION (var
)];
194 sz
= unknown
[object_size_type
];
196 if (sz
!= unknown
[object_size_type
])
198 double_int dsz
= double_int::from_uhwi (sz
) - mem_ref_offset (pt_var
);
199 if (dsz
.is_negative ())
201 else if (dsz
.fits_uhwi ())
204 sz
= unknown
[object_size_type
];
207 if (sz
!= unknown
[object_size_type
] && sz
< offset_limit
)
208 pt_var_size
= size_int (sz
);
212 && tree_fits_uhwi_p (DECL_SIZE_UNIT (pt_var
))
213 && tree_to_uhwi (DECL_SIZE_UNIT (pt_var
)) < offset_limit
)
214 pt_var_size
= DECL_SIZE_UNIT (pt_var
);
216 && TREE_CODE (pt_var
) == STRING_CST
217 && TYPE_SIZE_UNIT (TREE_TYPE (pt_var
))
218 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (pt_var
)))
219 && tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (pt_var
)))
221 pt_var_size
= TYPE_SIZE_UNIT (TREE_TYPE (pt_var
));
223 return unknown
[object_size_type
];
225 if (pt_var
!= TREE_OPERAND (ptr
, 0))
229 if (object_size_type
& 1)
231 var
= TREE_OPERAND (ptr
, 0);
234 && TREE_CODE (var
) != BIT_FIELD_REF
235 && TREE_CODE (var
) != COMPONENT_REF
236 && TREE_CODE (var
) != ARRAY_REF
237 && TREE_CODE (var
) != ARRAY_RANGE_REF
238 && TREE_CODE (var
) != REALPART_EXPR
239 && TREE_CODE (var
) != IMAGPART_EXPR
)
240 var
= TREE_OPERAND (var
, 0);
241 if (var
!= pt_var
&& TREE_CODE (var
) == ARRAY_REF
)
242 var
= TREE_OPERAND (var
, 0);
243 if (! TYPE_SIZE_UNIT (TREE_TYPE (var
))
244 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var
)))
246 && tree_int_cst_lt (pt_var_size
,
247 TYPE_SIZE_UNIT (TREE_TYPE (var
)))))
249 else if (var
!= pt_var
&& TREE_CODE (pt_var
) == MEM_REF
)
252 /* For &X->fld, compute object size only if fld isn't the last
253 field, as struct { int i; char c[1]; } is often used instead
254 of flexible array member. */
255 while (v
&& v
!= pt_var
)
256 switch (TREE_CODE (v
))
259 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v
, 0)))
260 && TREE_CODE (TREE_OPERAND (v
, 1)) == INTEGER_CST
)
263 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v
, 0)));
265 && TYPE_MAX_VALUE (domain
)
266 && TREE_CODE (TYPE_MAX_VALUE (domain
))
268 && tree_int_cst_lt (TREE_OPERAND (v
, 1),
269 TYPE_MAX_VALUE (domain
)))
275 v
= TREE_OPERAND (v
, 0);
282 if (TREE_CODE (TREE_TYPE (v
)) != ARRAY_TYPE
)
287 while (v
!= pt_var
&& TREE_CODE (v
) == COMPONENT_REF
)
288 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v
, 0)))
290 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v
, 0)))
294 v
= TREE_OPERAND (v
, 0);
295 if (TREE_CODE (v
) == COMPONENT_REF
296 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v
, 0)))
299 tree fld_chain
= DECL_CHAIN (TREE_OPERAND (v
, 1));
300 for (; fld_chain
; fld_chain
= DECL_CHAIN (fld_chain
))
301 if (TREE_CODE (fld_chain
) == FIELD_DECL
)
309 v
= TREE_OPERAND (v
, 0);
311 while (v
!= pt_var
&& TREE_CODE (v
) == COMPONENT_REF
)
312 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v
, 0)))
314 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v
, 0)))
318 v
= TREE_OPERAND (v
, 0);
336 var_size
= TYPE_SIZE_UNIT (TREE_TYPE (var
));
337 else if (!pt_var_size
)
338 return unknown
[object_size_type
];
340 var_size
= pt_var_size
;
341 bytes
= compute_object_offset (TREE_OPERAND (ptr
, 0), var
);
342 if (bytes
!= error_mark_node
)
344 if (TREE_CODE (bytes
) == INTEGER_CST
345 && tree_int_cst_lt (var_size
, bytes
))
346 bytes
= size_zero_node
;
348 bytes
= size_binop (MINUS_EXPR
, var_size
, bytes
);
352 && TREE_CODE (pt_var
) == MEM_REF
353 && bytes
!= error_mark_node
)
355 tree bytes2
= compute_object_offset (TREE_OPERAND (ptr
, 0), pt_var
);
356 if (bytes2
!= error_mark_node
)
358 if (TREE_CODE (bytes2
) == INTEGER_CST
359 && tree_int_cst_lt (pt_var_size
, bytes2
))
360 bytes2
= size_zero_node
;
362 bytes2
= size_binop (MINUS_EXPR
, pt_var_size
, bytes2
);
363 bytes
= size_binop (MIN_EXPR
, bytes
, bytes2
);
367 else if (!pt_var_size
)
368 return unknown
[object_size_type
];
372 if (tree_fits_uhwi_p (bytes
))
373 return tree_to_uhwi (bytes
);
375 return unknown
[object_size_type
];
379 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
380 Handles various allocation calls. OBJECT_SIZE_TYPE is the second
381 argument from __builtin_object_size. If unknown, return
382 unknown[object_size_type]. */
384 static unsigned HOST_WIDE_INT
385 alloc_object_size (const_gimple call
, int object_size_type
)
387 tree callee
, bytes
= NULL_TREE
;
389 int arg1
= -1, arg2
= -1;
391 gcc_assert (is_gimple_call (call
));
393 callee
= gimple_call_fndecl (call
);
395 return unknown
[object_size_type
];
397 alloc_size
= lookup_attribute ("alloc_size",
398 TYPE_ATTRIBUTES (TREE_TYPE (callee
)));
399 if (alloc_size
&& TREE_VALUE (alloc_size
))
401 tree p
= TREE_VALUE (alloc_size
);
403 arg1
= TREE_INT_CST_LOW (TREE_VALUE (p
))-1;
405 arg2
= TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p
)))-1;
408 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
)
409 switch (DECL_FUNCTION_CODE (callee
))
411 case BUILT_IN_CALLOC
:
414 case BUILT_IN_MALLOC
:
415 case BUILT_IN_ALLOCA
:
416 case BUILT_IN_ALLOCA_WITH_ALIGN
:
422 if (arg1
< 0 || arg1
>= (int)gimple_call_num_args (call
)
423 || TREE_CODE (gimple_call_arg (call
, arg1
)) != INTEGER_CST
425 && (arg2
>= (int)gimple_call_num_args (call
)
426 || TREE_CODE (gimple_call_arg (call
, arg2
)) != INTEGER_CST
)))
427 return unknown
[object_size_type
];
430 bytes
= size_binop (MULT_EXPR
,
431 fold_convert (sizetype
, gimple_call_arg (call
, arg1
)),
432 fold_convert (sizetype
, gimple_call_arg (call
, arg2
)));
434 bytes
= fold_convert (sizetype
, gimple_call_arg (call
, arg1
));
436 if (bytes
&& tree_fits_uhwi_p (bytes
))
437 return tree_to_uhwi (bytes
);
439 return unknown
[object_size_type
];
443 /* If object size is propagated from one of function's arguments directly
444 to its return value, return that argument for GIMPLE_CALL statement CALL.
445 Otherwise return NULL. */
448 pass_through_call (const_gimple call
)
450 tree callee
= gimple_call_fndecl (call
);
453 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
)
454 switch (DECL_FUNCTION_CODE (callee
))
456 case BUILT_IN_MEMCPY
:
457 case BUILT_IN_MEMMOVE
:
458 case BUILT_IN_MEMSET
:
459 case BUILT_IN_STRCPY
:
460 case BUILT_IN_STRNCPY
:
461 case BUILT_IN_STRCAT
:
462 case BUILT_IN_STRNCAT
:
463 case BUILT_IN_MEMCPY_CHK
:
464 case BUILT_IN_MEMMOVE_CHK
:
465 case BUILT_IN_MEMSET_CHK
:
466 case BUILT_IN_STRCPY_CHK
:
467 case BUILT_IN_STRNCPY_CHK
:
468 case BUILT_IN_STPNCPY_CHK
:
469 case BUILT_IN_STRCAT_CHK
:
470 case BUILT_IN_STRNCAT_CHK
:
471 case BUILT_IN_ASSUME_ALIGNED
:
472 if (gimple_call_num_args (call
) >= 1)
473 return gimple_call_arg (call
, 0);
483 /* Compute __builtin_object_size value for PTR. OBJECT_SIZE_TYPE is the
484 second argument from __builtin_object_size. */
486 unsigned HOST_WIDE_INT
487 compute_builtin_object_size (tree ptr
, int object_size_type
)
489 gcc_assert (object_size_type
>= 0 && object_size_type
<= 3);
492 init_offset_limit ();
494 if (TREE_CODE (ptr
) == ADDR_EXPR
)
495 return addr_object_size (NULL
, ptr
, object_size_type
);
497 if (TREE_CODE (ptr
) == SSA_NAME
498 && POINTER_TYPE_P (TREE_TYPE (ptr
))
499 && object_sizes
[object_size_type
] != NULL
)
501 if (!bitmap_bit_p (computed
[object_size_type
], SSA_NAME_VERSION (ptr
)))
503 struct object_size_info osi
;
509 fprintf (dump_file
, "Computing %s %sobject size for ",
510 (object_size_type
& 2) ? "minimum" : "maximum",
511 (object_size_type
& 1) ? "sub" : "");
512 print_generic_expr (dump_file
, ptr
, dump_flags
);
513 fprintf (dump_file
, ":\n");
516 osi
.visited
= BITMAP_ALLOC (NULL
);
517 osi
.reexamine
= BITMAP_ALLOC (NULL
);
518 osi
.object_size_type
= object_size_type
;
523 /* First pass: walk UD chains, compute object sizes that
524 can be computed. osi.reexamine bitmap at the end will
525 contain what variables were found in dependency cycles
526 and therefore need to be reexamined. */
529 collect_object_sizes_for (&osi
, ptr
);
531 /* Second pass: keep recomputing object sizes of variables
532 that need reexamination, until no object sizes are
533 increased or all object sizes are computed. */
534 if (! bitmap_empty_p (osi
.reexamine
))
536 bitmap reexamine
= BITMAP_ALLOC (NULL
);
538 /* If looking for minimum instead of maximum object size,
539 detect cases where a pointer is increased in a loop.
540 Although even without this detection pass 2 would eventually
541 terminate, it could take a long time. If a pointer is
542 increasing this way, we need to assume 0 object size.
543 E.g. p = &buf[0]; while (cond) p = p + 4; */
544 if (object_size_type
& 2)
546 osi
.depths
= XCNEWVEC (unsigned int, num_ssa_names
);
547 osi
.stack
= XNEWVEC (unsigned int, num_ssa_names
);
550 /* collect_object_sizes_for is changing
551 osi.reexamine bitmap, so iterate over a copy. */
552 bitmap_copy (reexamine
, osi
.reexamine
);
553 EXECUTE_IF_SET_IN_BITMAP (reexamine
, 0, i
, bi
)
554 if (bitmap_bit_p (osi
.reexamine
, i
))
555 check_for_plus_in_loops (&osi
, ssa_name (i
));
568 /* collect_object_sizes_for is changing
569 osi.reexamine bitmap, so iterate over a copy. */
570 bitmap_copy (reexamine
, osi
.reexamine
);
571 EXECUTE_IF_SET_IN_BITMAP (reexamine
, 0, i
, bi
)
572 if (bitmap_bit_p (osi
.reexamine
, i
))
574 collect_object_sizes_for (&osi
, ssa_name (i
));
575 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
577 fprintf (dump_file
, "Reexamining ");
578 print_generic_expr (dump_file
, ssa_name (i
),
580 fprintf (dump_file
, "\n");
586 BITMAP_FREE (reexamine
);
588 EXECUTE_IF_SET_IN_BITMAP (osi
.reexamine
, 0, i
, bi
)
589 bitmap_set_bit (computed
[object_size_type
], i
);
591 /* Debugging dumps. */
594 EXECUTE_IF_SET_IN_BITMAP (osi
.visited
, 0, i
, bi
)
595 if (object_sizes
[object_size_type
][i
]
596 != unknown
[object_size_type
])
598 print_generic_expr (dump_file
, ssa_name (i
),
601 ": %s %sobject size "
602 HOST_WIDE_INT_PRINT_UNSIGNED
"\n",
603 (object_size_type
& 2) ? "minimum" : "maximum",
604 (object_size_type
& 1) ? "sub" : "",
605 object_sizes
[object_size_type
][i
]);
609 BITMAP_FREE (osi
.reexamine
);
610 BITMAP_FREE (osi
.visited
);
613 return object_sizes
[object_size_type
][SSA_NAME_VERSION (ptr
)];
616 return unknown
[object_size_type
];
619 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
622 expr_object_size (struct object_size_info
*osi
, tree ptr
, tree value
)
624 int object_size_type
= osi
->object_size_type
;
625 unsigned int varno
= SSA_NAME_VERSION (ptr
);
626 unsigned HOST_WIDE_INT bytes
;
628 gcc_assert (object_sizes
[object_size_type
][varno
]
629 != unknown
[object_size_type
]);
630 gcc_assert (osi
->pass
== 0);
632 if (TREE_CODE (value
) == WITH_SIZE_EXPR
)
633 value
= TREE_OPERAND (value
, 0);
635 /* Pointer variables should have been handled by merge_object_sizes. */
636 gcc_assert (TREE_CODE (value
) != SSA_NAME
637 || !POINTER_TYPE_P (TREE_TYPE (value
)));
639 if (TREE_CODE (value
) == ADDR_EXPR
)
640 bytes
= addr_object_size (osi
, value
, object_size_type
);
642 bytes
= unknown
[object_size_type
];
644 if ((object_size_type
& 2) == 0)
646 if (object_sizes
[object_size_type
][varno
] < bytes
)
647 object_sizes
[object_size_type
][varno
] = bytes
;
651 if (object_sizes
[object_size_type
][varno
] > bytes
)
652 object_sizes
[object_size_type
][varno
] = bytes
;
657 /* Compute object_sizes for PTR, defined to the result of a call. */
660 call_object_size (struct object_size_info
*osi
, tree ptr
, gimple call
)
662 int object_size_type
= osi
->object_size_type
;
663 unsigned int varno
= SSA_NAME_VERSION (ptr
);
664 unsigned HOST_WIDE_INT bytes
;
666 gcc_assert (is_gimple_call (call
));
668 gcc_assert (object_sizes
[object_size_type
][varno
]
669 != unknown
[object_size_type
]);
670 gcc_assert (osi
->pass
== 0);
672 bytes
= alloc_object_size (call
, object_size_type
);
674 if ((object_size_type
& 2) == 0)
676 if (object_sizes
[object_size_type
][varno
] < bytes
)
677 object_sizes
[object_size_type
][varno
] = bytes
;
681 if (object_sizes
[object_size_type
][varno
] > bytes
)
682 object_sizes
[object_size_type
][varno
] = bytes
;
687 /* Compute object_sizes for PTR, defined to an unknown value. */
690 unknown_object_size (struct object_size_info
*osi
, tree ptr
)
692 int object_size_type
= osi
->object_size_type
;
693 unsigned int varno
= SSA_NAME_VERSION (ptr
);
694 unsigned HOST_WIDE_INT bytes
;
696 gcc_assert (object_sizes
[object_size_type
][varno
]
697 != unknown
[object_size_type
]);
698 gcc_assert (osi
->pass
== 0);
700 bytes
= unknown
[object_size_type
];
702 if ((object_size_type
& 2) == 0)
704 if (object_sizes
[object_size_type
][varno
] < bytes
)
705 object_sizes
[object_size_type
][varno
] = bytes
;
709 if (object_sizes
[object_size_type
][varno
] > bytes
)
710 object_sizes
[object_size_type
][varno
] = bytes
;
715 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
716 the object size might need reexamination later. */
719 merge_object_sizes (struct object_size_info
*osi
, tree dest
, tree orig
,
720 unsigned HOST_WIDE_INT offset
)
722 int object_size_type
= osi
->object_size_type
;
723 unsigned int varno
= SSA_NAME_VERSION (dest
);
724 unsigned HOST_WIDE_INT orig_bytes
;
726 if (object_sizes
[object_size_type
][varno
] == unknown
[object_size_type
])
728 if (offset
>= offset_limit
)
730 object_sizes
[object_size_type
][varno
] = unknown
[object_size_type
];
735 collect_object_sizes_for (osi
, orig
);
737 orig_bytes
= object_sizes
[object_size_type
][SSA_NAME_VERSION (orig
)];
738 if (orig_bytes
!= unknown
[object_size_type
])
739 orig_bytes
= (offset
> orig_bytes
)
740 ? (unsigned HOST_WIDE_INT
) 0 : orig_bytes
- offset
;
742 if ((object_size_type
& 2) == 0)
744 if (object_sizes
[object_size_type
][varno
] < orig_bytes
)
746 object_sizes
[object_size_type
][varno
] = orig_bytes
;
752 if (object_sizes
[object_size_type
][varno
] > orig_bytes
)
754 object_sizes
[object_size_type
][varno
] = orig_bytes
;
758 return bitmap_bit_p (osi
->reexamine
, SSA_NAME_VERSION (orig
));
762 /* Compute object_sizes for VAR, defined to the result of an assignment
763 with operator POINTER_PLUS_EXPR. Return true if the object size might
764 need reexamination later. */
767 plus_stmt_object_size (struct object_size_info
*osi
, tree var
, gimple stmt
)
769 int object_size_type
= osi
->object_size_type
;
770 unsigned int varno
= SSA_NAME_VERSION (var
);
771 unsigned HOST_WIDE_INT bytes
;
774 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
776 op0
= gimple_assign_rhs1 (stmt
);
777 op1
= gimple_assign_rhs2 (stmt
);
779 else if (gimple_assign_rhs_code (stmt
) == ADDR_EXPR
)
781 tree rhs
= TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
782 gcc_assert (TREE_CODE (rhs
) == MEM_REF
);
783 op0
= TREE_OPERAND (rhs
, 0);
784 op1
= TREE_OPERAND (rhs
, 1);
789 if (object_sizes
[object_size_type
][varno
] == unknown
[object_size_type
])
792 /* Handle PTR + OFFSET here. */
793 if (TREE_CODE (op1
) == INTEGER_CST
794 && (TREE_CODE (op0
) == SSA_NAME
795 || TREE_CODE (op0
) == ADDR_EXPR
))
797 if (! tree_fits_uhwi_p (op1
))
798 bytes
= unknown
[object_size_type
];
799 else if (TREE_CODE (op0
) == SSA_NAME
)
800 return merge_object_sizes (osi
, var
, op0
, tree_to_uhwi (op1
));
803 unsigned HOST_WIDE_INT off
= tree_to_uhwi (op1
);
805 /* op0 will be ADDR_EXPR here. */
806 bytes
= addr_object_size (osi
, op0
, object_size_type
);
807 if (bytes
== unknown
[object_size_type
])
809 else if (off
> offset_limit
)
810 bytes
= unknown
[object_size_type
];
811 else if (off
> bytes
)
818 bytes
= unknown
[object_size_type
];
820 if ((object_size_type
& 2) == 0)
822 if (object_sizes
[object_size_type
][varno
] < bytes
)
823 object_sizes
[object_size_type
][varno
] = bytes
;
827 if (object_sizes
[object_size_type
][varno
] > bytes
)
828 object_sizes
[object_size_type
][varno
] = bytes
;
834 /* Compute object_sizes for VAR, defined at STMT, which is
835 a COND_EXPR. Return true if the object size might need reexamination
839 cond_expr_object_size (struct object_size_info
*osi
, tree var
, gimple stmt
)
842 int object_size_type
= osi
->object_size_type
;
843 unsigned int varno
= SSA_NAME_VERSION (var
);
844 bool reexamine
= false;
846 gcc_assert (gimple_assign_rhs_code (stmt
) == COND_EXPR
);
848 if (object_sizes
[object_size_type
][varno
] == unknown
[object_size_type
])
851 then_
= gimple_assign_rhs2 (stmt
);
852 else_
= gimple_assign_rhs3 (stmt
);
854 if (TREE_CODE (then_
) == SSA_NAME
)
855 reexamine
|= merge_object_sizes (osi
, var
, then_
, 0);
857 expr_object_size (osi
, var
, then_
);
859 if (TREE_CODE (else_
) == SSA_NAME
)
860 reexamine
|= merge_object_sizes (osi
, var
, else_
, 0);
862 expr_object_size (osi
, var
, else_
);
867 /* Compute object sizes for VAR.
868 For ADDR_EXPR an object size is the number of remaining bytes
869 to the end of the object (where what is considered an object depends on
870 OSI->object_size_type).
871 For allocation GIMPLE_CALL like malloc or calloc object size is the size
873 For POINTER_PLUS_EXPR where second operand is a constant integer,
874 object size is object size of the first operand minus the constant.
875 If the constant is bigger than the number of remaining bytes until the
876 end of the object, object size is 0, but if it is instead a pointer
877 subtraction, object size is unknown[object_size_type].
878 To differentiate addition from subtraction, ADDR_EXPR returns
879 unknown[object_size_type] for all objects bigger than half of the address
880 space, and constants less than half of the address space are considered
881 addition, while bigger constants subtraction.
882 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
883 object size is object size of that argument.
884 Otherwise, object size is the maximum of object sizes of variables
885 that it might be set to. */
888 collect_object_sizes_for (struct object_size_info
*osi
, tree var
)
890 int object_size_type
= osi
->object_size_type
;
891 unsigned int varno
= SSA_NAME_VERSION (var
);
895 if (bitmap_bit_p (computed
[object_size_type
], varno
))
900 if (bitmap_set_bit (osi
->visited
, varno
))
902 object_sizes
[object_size_type
][varno
]
903 = (object_size_type
& 2) ? -1 : 0;
907 /* Found a dependency loop. Mark the variable for later
909 bitmap_set_bit (osi
->reexamine
, varno
);
910 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
912 fprintf (dump_file
, "Found a dependency loop at ");
913 print_generic_expr (dump_file
, var
, dump_flags
);
914 fprintf (dump_file
, "\n");
920 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
922 fprintf (dump_file
, "Visiting use-def links for ");
923 print_generic_expr (dump_file
, var
, dump_flags
);
924 fprintf (dump_file
, "\n");
927 stmt
= SSA_NAME_DEF_STMT (var
);
930 switch (gimple_code (stmt
))
934 tree rhs
= gimple_assign_rhs1 (stmt
);
935 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
936 || (gimple_assign_rhs_code (stmt
) == ADDR_EXPR
937 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == MEM_REF
))
938 reexamine
= plus_stmt_object_size (osi
, var
, stmt
);
939 else if (gimple_assign_rhs_code (stmt
) == COND_EXPR
)
940 reexamine
= cond_expr_object_size (osi
, var
, stmt
);
941 else if (gimple_assign_single_p (stmt
)
942 || gimple_assign_unary_nop_p (stmt
))
944 if (TREE_CODE (rhs
) == SSA_NAME
945 && POINTER_TYPE_P (TREE_TYPE (rhs
)))
946 reexamine
= merge_object_sizes (osi
, var
, rhs
, 0);
948 expr_object_size (osi
, var
, rhs
);
951 unknown_object_size (osi
, var
);
957 tree arg
= pass_through_call (stmt
);
960 if (TREE_CODE (arg
) == SSA_NAME
961 && POINTER_TYPE_P (TREE_TYPE (arg
)))
962 reexamine
= merge_object_sizes (osi
, var
, arg
, 0);
964 expr_object_size (osi
, var
, arg
);
967 call_object_size (osi
, var
, stmt
);
972 /* Pointers defined by __asm__ statements can point anywhere. */
973 object_sizes
[object_size_type
][varno
] = unknown
[object_size_type
];
977 if (SSA_NAME_VAR (var
)
978 && TREE_CODE (SSA_NAME_VAR (var
)) == PARM_DECL
)
979 expr_object_size (osi
, var
, SSA_NAME_VAR (var
));
981 /* Uninitialized SSA names point nowhere. */
982 object_sizes
[object_size_type
][varno
] = unknown
[object_size_type
];
989 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
991 tree rhs
= gimple_phi_arg (stmt
, i
)->def
;
993 if (object_sizes
[object_size_type
][varno
]
994 == unknown
[object_size_type
])
997 if (TREE_CODE (rhs
) == SSA_NAME
)
998 reexamine
|= merge_object_sizes (osi
, var
, rhs
, 0);
999 else if (osi
->pass
== 0)
1000 expr_object_size (osi
, var
, rhs
);
1010 || object_sizes
[object_size_type
][varno
] == unknown
[object_size_type
])
1012 bitmap_set_bit (computed
[object_size_type
], varno
);
1013 bitmap_clear_bit (osi
->reexamine
, varno
);
1017 bitmap_set_bit (osi
->reexamine
, varno
);
1018 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1020 fprintf (dump_file
, "Need to reexamine ");
1021 print_generic_expr (dump_file
, var
, dump_flags
);
1022 fprintf (dump_file
, "\n");
1028 /* Helper function for check_for_plus_in_loops. Called recursively
1032 check_for_plus_in_loops_1 (struct object_size_info
*osi
, tree var
,
1035 gimple stmt
= SSA_NAME_DEF_STMT (var
);
1036 unsigned int varno
= SSA_NAME_VERSION (var
);
1038 if (osi
->depths
[varno
])
1040 if (osi
->depths
[varno
] != depth
)
1044 /* Found a loop involving pointer addition. */
1045 for (sp
= osi
->tos
; sp
> osi
->stack
; )
1048 bitmap_clear_bit (osi
->reexamine
, *sp
);
1049 bitmap_set_bit (computed
[osi
->object_size_type
], *sp
);
1050 object_sizes
[osi
->object_size_type
][*sp
] = 0;
1057 else if (! bitmap_bit_p (osi
->reexamine
, varno
))
1060 osi
->depths
[varno
] = depth
;
1061 *osi
->tos
++ = varno
;
1063 switch (gimple_code (stmt
))
1068 if ((gimple_assign_single_p (stmt
)
1069 || gimple_assign_unary_nop_p (stmt
))
1070 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
)
1072 tree rhs
= gimple_assign_rhs1 (stmt
);
1074 check_for_plus_in_loops_1 (osi
, rhs
, depth
);
1076 else if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
1078 tree basevar
= gimple_assign_rhs1 (stmt
);
1079 tree cst
= gimple_assign_rhs2 (stmt
);
1081 gcc_assert (TREE_CODE (cst
) == INTEGER_CST
);
1083 check_for_plus_in_loops_1 (osi
, basevar
,
1084 depth
+ !integer_zerop (cst
));
1093 tree arg
= pass_through_call (stmt
);
1096 if (TREE_CODE (arg
) == SSA_NAME
)
1097 check_for_plus_in_loops_1 (osi
, arg
, depth
);
1108 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
1110 tree rhs
= gimple_phi_arg (stmt
, i
)->def
;
1112 if (TREE_CODE (rhs
) == SSA_NAME
)
1113 check_for_plus_in_loops_1 (osi
, rhs
, depth
);
1122 osi
->depths
[varno
] = 0;
1127 /* Check if some pointer we are computing object size of is being increased
1128 within a loop. If yes, assume all the SSA variables participating in
1129 that loop have minimum object sizes 0. */
1132 check_for_plus_in_loops (struct object_size_info
*osi
, tree var
)
1134 gimple stmt
= SSA_NAME_DEF_STMT (var
);
1136 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1137 and looked for a POINTER_PLUS_EXPR in the pass-through
1138 argument, if any. In GIMPLE, however, such an expression
1139 is not a valid call operand. */
1141 if (is_gimple_assign (stmt
)
1142 && gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
1144 tree basevar
= gimple_assign_rhs1 (stmt
);
1145 tree cst
= gimple_assign_rhs2 (stmt
);
1147 gcc_assert (TREE_CODE (cst
) == INTEGER_CST
);
1149 if (integer_zerop (cst
))
1152 osi
->depths
[SSA_NAME_VERSION (basevar
)] = 1;
1153 *osi
->tos
++ = SSA_NAME_VERSION (basevar
);
1154 check_for_plus_in_loops_1 (osi
, var
, 2);
1155 osi
->depths
[SSA_NAME_VERSION (basevar
)] = 0;
1161 /* Initialize data structures for the object size computation. */
1164 init_object_sizes (void)
1166 int object_size_type
;
1168 if (object_sizes
[0])
1171 for (object_size_type
= 0; object_size_type
<= 3; object_size_type
++)
1173 object_sizes
[object_size_type
] = XNEWVEC (unsigned HOST_WIDE_INT
, num_ssa_names
);
1174 computed
[object_size_type
] = BITMAP_ALLOC (NULL
);
1177 init_offset_limit ();
1181 /* Destroy data structures after the object size computation. */
1184 fini_object_sizes (void)
1186 int object_size_type
;
1188 for (object_size_type
= 0; object_size_type
<= 3; object_size_type
++)
1190 free (object_sizes
[object_size_type
]);
1191 BITMAP_FREE (computed
[object_size_type
]);
1192 object_sizes
[object_size_type
] = NULL
;
1197 /* Simple pass to optimize all __builtin_object_size () builtins. */
1200 compute_object_sizes (void)
1205 gimple_stmt_iterator i
;
1206 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
1208 tree callee
, result
;
1209 gimple call
= gsi_stmt (i
);
1211 if (gimple_code (call
) != GIMPLE_CALL
)
1214 callee
= gimple_call_fndecl (call
);
1216 || DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
1217 || DECL_FUNCTION_CODE (callee
) != BUILT_IN_OBJECT_SIZE
)
1220 init_object_sizes ();
1221 result
= fold_call_stmt (call
, false);
1224 if (gimple_call_num_args (call
) == 2
1225 && POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call
, 0))))
1227 tree ost
= gimple_call_arg (call
, 1);
1229 if (tree_fits_uhwi_p (ost
))
1231 unsigned HOST_WIDE_INT object_size_type
1232 = tree_to_uhwi (ost
);
1234 if (object_size_type
< 2)
1235 result
= fold_convert (size_type_node
,
1236 integer_minus_one_node
);
1237 else if (object_size_type
< 4)
1238 result
= build_zero_cst (size_type_node
);
1246 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1248 fprintf (dump_file
, "Simplified\n ");
1249 print_gimple_stmt (dump_file
, call
, 0, dump_flags
);
1252 if (!update_call_from_tree (&i
, result
))
1255 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1257 fprintf (dump_file
, "to\n ");
1258 print_gimple_stmt (dump_file
, gsi_stmt (i
), 0, dump_flags
);
1259 fprintf (dump_file
, "\n");
1264 fini_object_sizes ();
1270 const pass_data pass_data_object_sizes
=
1272 GIMPLE_PASS
, /* type */
1274 OPTGROUP_NONE
, /* optinfo_flags */
1275 false, /* has_gate */
1276 true, /* has_execute */
1277 TV_NONE
, /* tv_id */
1278 ( PROP_cfg
| PROP_ssa
), /* properties_required */
1279 0, /* properties_provided */
1280 0, /* properties_destroyed */
1281 0, /* todo_flags_start */
1282 TODO_verify_ssa
, /* todo_flags_finish */
1285 class pass_object_sizes
: public gimple_opt_pass
1288 pass_object_sizes (gcc::context
*ctxt
)
1289 : gimple_opt_pass (pass_data_object_sizes
, ctxt
)
1292 /* opt_pass methods: */
1293 opt_pass
* clone () { return new pass_object_sizes (m_ctxt
); }
1294 unsigned int execute () { return compute_object_sizes (); }
1296 }; // class pass_object_sizes
1301 make_pass_object_sizes (gcc::context
*ctxt
)
1303 return new pass_object_sizes (ctxt
);