1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
26 #include "diagnostic-core.h"
27 #include "gimple-pretty-print.h"
30 #include "gimple-iterator.h"
31 #include "gimple-ssa.h"
32 #include "tree-ssanames.h"
33 #include "tree-pass.h"
34 #include "tree-ssa-propagate.h"
35 #include "tree-phinodes.h"
36 #include "ssa-iterators.h"
38 struct object_size_info
41 bitmap visited
, reexamine
;
45 unsigned int *stack
, *tos
;
48 static const unsigned HOST_WIDE_INT unknown
[4] = { -1, -1, 0, 0 };
50 static tree
compute_object_offset (const_tree
, const_tree
);
51 static unsigned HOST_WIDE_INT
addr_object_size (struct object_size_info
*,
53 static unsigned HOST_WIDE_INT
alloc_object_size (const_gimple
, int);
54 static tree
pass_through_call (const_gimple
);
55 static void collect_object_sizes_for (struct object_size_info
*, tree
);
56 static void expr_object_size (struct object_size_info
*, tree
, tree
);
57 static bool merge_object_sizes (struct object_size_info
*, tree
, tree
,
58 unsigned HOST_WIDE_INT
);
59 static bool plus_stmt_object_size (struct object_size_info
*, tree
, gimple
);
60 static bool cond_expr_object_size (struct object_size_info
*, tree
, gimple
);
61 static unsigned int compute_object_sizes (void);
62 static void init_offset_limit (void);
63 static void check_for_plus_in_loops (struct object_size_info
*, tree
);
64 static void check_for_plus_in_loops_1 (struct object_size_info
*, tree
,
67 /* object_sizes[0] is upper bound for number of bytes till the end of
69 object_sizes[1] is upper bound for number of bytes till the end of
70 the subobject (innermost array or field with address taken).
71 object_sizes[2] is lower bound for number of bytes till the end of
72 the object and object_sizes[3] lower bound for subobject. */
73 static unsigned HOST_WIDE_INT
*object_sizes
[4];
75 /* Bitmaps what object sizes have been computed already. */
76 static bitmap computed
[4];
78 /* Maximum value of offset we consider to be addition. */
79 static unsigned HOST_WIDE_INT offset_limit
;
82 /* Initialize OFFSET_LIMIT variable. */
84 init_offset_limit (void)
86 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype
)))
87 offset_limit
= tree_to_uhwi (TYPE_MAX_VALUE (sizetype
));
94 /* Compute offset of EXPR within VAR. Return error_mark_node
98 compute_object_offset (const_tree expr
, const_tree var
)
100 enum tree_code code
= PLUS_EXPR
;
104 return size_zero_node
;
106 switch (TREE_CODE (expr
))
109 base
= compute_object_offset (TREE_OPERAND (expr
, 0), var
);
110 if (base
== error_mark_node
)
113 t
= TREE_OPERAND (expr
, 1);
114 off
= size_binop (PLUS_EXPR
, DECL_FIELD_OFFSET (t
),
115 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t
))
121 case VIEW_CONVERT_EXPR
:
122 case NON_LVALUE_EXPR
:
123 return compute_object_offset (TREE_OPERAND (expr
, 0), var
);
126 base
= compute_object_offset (TREE_OPERAND (expr
, 0), var
);
127 if (base
== error_mark_node
)
130 off
= TYPE_SIZE_UNIT (TREE_TYPE (expr
));
134 base
= compute_object_offset (TREE_OPERAND (expr
, 0), var
);
135 if (base
== error_mark_node
)
138 t
= TREE_OPERAND (expr
, 1);
139 if (TREE_CODE (t
) == INTEGER_CST
&& tree_int_cst_sgn (t
) < 0)
142 t
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (t
), t
);
144 t
= fold_convert (sizetype
, t
);
145 off
= size_binop (MULT_EXPR
, TYPE_SIZE_UNIT (TREE_TYPE (expr
)), t
);
149 gcc_assert (TREE_CODE (TREE_OPERAND (expr
, 0)) == ADDR_EXPR
);
150 return double_int_to_tree (sizetype
, mem_ref_offset (expr
));
153 return error_mark_node
;
156 return size_binop (code
, base
, off
);
160 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
161 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
162 If unknown, return unknown[object_size_type]. */
164 static unsigned HOST_WIDE_INT
165 addr_object_size (struct object_size_info
*osi
, const_tree ptr
,
166 int object_size_type
)
168 tree pt_var
, pt_var_size
= NULL_TREE
, var_size
, bytes
;
170 gcc_assert (TREE_CODE (ptr
) == ADDR_EXPR
);
172 pt_var
= TREE_OPERAND (ptr
, 0);
173 while (handled_component_p (pt_var
))
174 pt_var
= TREE_OPERAND (pt_var
, 0);
177 && TREE_CODE (pt_var
) == MEM_REF
)
179 unsigned HOST_WIDE_INT sz
;
181 if (!osi
|| (object_size_type
& 1) != 0
182 || TREE_CODE (TREE_OPERAND (pt_var
, 0)) != SSA_NAME
)
184 sz
= compute_builtin_object_size (TREE_OPERAND (pt_var
, 0),
185 object_size_type
& ~1);
189 tree var
= TREE_OPERAND (pt_var
, 0);
191 collect_object_sizes_for (osi
, var
);
192 if (bitmap_bit_p (computed
[object_size_type
],
193 SSA_NAME_VERSION (var
)))
194 sz
= object_sizes
[object_size_type
][SSA_NAME_VERSION (var
)];
196 sz
= unknown
[object_size_type
];
198 if (sz
!= unknown
[object_size_type
])
200 double_int dsz
= double_int::from_uhwi (sz
) - mem_ref_offset (pt_var
);
201 if (dsz
.is_negative ())
203 else if (dsz
.fits_uhwi ())
206 sz
= unknown
[object_size_type
];
209 if (sz
!= unknown
[object_size_type
] && sz
< offset_limit
)
210 pt_var_size
= size_int (sz
);
214 && tree_fits_uhwi_p (DECL_SIZE_UNIT (pt_var
))
215 && tree_to_uhwi (DECL_SIZE_UNIT (pt_var
)) < offset_limit
)
216 pt_var_size
= DECL_SIZE_UNIT (pt_var
);
218 && TREE_CODE (pt_var
) == STRING_CST
219 && TYPE_SIZE_UNIT (TREE_TYPE (pt_var
))
220 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (pt_var
)))
221 && tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (pt_var
)))
223 pt_var_size
= TYPE_SIZE_UNIT (TREE_TYPE (pt_var
));
225 return unknown
[object_size_type
];
227 if (pt_var
!= TREE_OPERAND (ptr
, 0))
231 if (object_size_type
& 1)
233 var
= TREE_OPERAND (ptr
, 0);
236 && TREE_CODE (var
) != BIT_FIELD_REF
237 && TREE_CODE (var
) != COMPONENT_REF
238 && TREE_CODE (var
) != ARRAY_REF
239 && TREE_CODE (var
) != ARRAY_RANGE_REF
240 && TREE_CODE (var
) != REALPART_EXPR
241 && TREE_CODE (var
) != IMAGPART_EXPR
)
242 var
= TREE_OPERAND (var
, 0);
243 if (var
!= pt_var
&& TREE_CODE (var
) == ARRAY_REF
)
244 var
= TREE_OPERAND (var
, 0);
245 if (! TYPE_SIZE_UNIT (TREE_TYPE (var
))
246 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var
)))
248 && tree_int_cst_lt (pt_var_size
,
249 TYPE_SIZE_UNIT (TREE_TYPE (var
)))))
251 else if (var
!= pt_var
&& TREE_CODE (pt_var
) == MEM_REF
)
254 /* For &X->fld, compute object size only if fld isn't the last
255 field, as struct { int i; char c[1]; } is often used instead
256 of flexible array member. */
257 while (v
&& v
!= pt_var
)
258 switch (TREE_CODE (v
))
261 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v
, 0)))
262 && TREE_CODE (TREE_OPERAND (v
, 1)) == INTEGER_CST
)
265 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v
, 0)));
267 && TYPE_MAX_VALUE (domain
)
268 && TREE_CODE (TYPE_MAX_VALUE (domain
))
270 && tree_int_cst_lt (TREE_OPERAND (v
, 1),
271 TYPE_MAX_VALUE (domain
)))
277 v
= TREE_OPERAND (v
, 0);
284 if (TREE_CODE (TREE_TYPE (v
)) != ARRAY_TYPE
)
289 while (v
!= pt_var
&& TREE_CODE (v
) == COMPONENT_REF
)
290 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v
, 0)))
292 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v
, 0)))
296 v
= TREE_OPERAND (v
, 0);
297 if (TREE_CODE (v
) == COMPONENT_REF
298 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v
, 0)))
301 tree fld_chain
= DECL_CHAIN (TREE_OPERAND (v
, 1));
302 for (; fld_chain
; fld_chain
= DECL_CHAIN (fld_chain
))
303 if (TREE_CODE (fld_chain
) == FIELD_DECL
)
311 v
= TREE_OPERAND (v
, 0);
313 while (v
!= pt_var
&& TREE_CODE (v
) == COMPONENT_REF
)
314 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v
, 0)))
316 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v
, 0)))
320 v
= TREE_OPERAND (v
, 0);
338 var_size
= TYPE_SIZE_UNIT (TREE_TYPE (var
));
339 else if (!pt_var_size
)
340 return unknown
[object_size_type
];
342 var_size
= pt_var_size
;
343 bytes
= compute_object_offset (TREE_OPERAND (ptr
, 0), var
);
344 if (bytes
!= error_mark_node
)
346 if (TREE_CODE (bytes
) == INTEGER_CST
347 && tree_int_cst_lt (var_size
, bytes
))
348 bytes
= size_zero_node
;
350 bytes
= size_binop (MINUS_EXPR
, var_size
, bytes
);
354 && TREE_CODE (pt_var
) == MEM_REF
355 && bytes
!= error_mark_node
)
357 tree bytes2
= compute_object_offset (TREE_OPERAND (ptr
, 0), pt_var
);
358 if (bytes2
!= error_mark_node
)
360 if (TREE_CODE (bytes2
) == INTEGER_CST
361 && tree_int_cst_lt (pt_var_size
, bytes2
))
362 bytes2
= size_zero_node
;
364 bytes2
= size_binop (MINUS_EXPR
, pt_var_size
, bytes2
);
365 bytes
= size_binop (MIN_EXPR
, bytes
, bytes2
);
369 else if (!pt_var_size
)
370 return unknown
[object_size_type
];
374 if (tree_fits_uhwi_p (bytes
))
375 return tree_to_uhwi (bytes
);
377 return unknown
[object_size_type
];
381 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
382 Handles various allocation calls. OBJECT_SIZE_TYPE is the second
383 argument from __builtin_object_size. If unknown, return
384 unknown[object_size_type]. */
386 static unsigned HOST_WIDE_INT
387 alloc_object_size (const_gimple call
, int object_size_type
)
389 tree callee
, bytes
= NULL_TREE
;
391 int arg1
= -1, arg2
= -1;
393 gcc_assert (is_gimple_call (call
));
395 callee
= gimple_call_fndecl (call
);
397 return unknown
[object_size_type
];
399 alloc_size
= lookup_attribute ("alloc_size",
400 TYPE_ATTRIBUTES (TREE_TYPE (callee
)));
401 if (alloc_size
&& TREE_VALUE (alloc_size
))
403 tree p
= TREE_VALUE (alloc_size
);
405 arg1
= TREE_INT_CST_LOW (TREE_VALUE (p
))-1;
407 arg2
= TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p
)))-1;
410 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
)
411 switch (DECL_FUNCTION_CODE (callee
))
413 case BUILT_IN_CALLOC
:
416 case BUILT_IN_MALLOC
:
417 case BUILT_IN_ALLOCA
:
418 case BUILT_IN_ALLOCA_WITH_ALIGN
:
424 if (arg1
< 0 || arg1
>= (int)gimple_call_num_args (call
)
425 || TREE_CODE (gimple_call_arg (call
, arg1
)) != INTEGER_CST
427 && (arg2
>= (int)gimple_call_num_args (call
)
428 || TREE_CODE (gimple_call_arg (call
, arg2
)) != INTEGER_CST
)))
429 return unknown
[object_size_type
];
432 bytes
= size_binop (MULT_EXPR
,
433 fold_convert (sizetype
, gimple_call_arg (call
, arg1
)),
434 fold_convert (sizetype
, gimple_call_arg (call
, arg2
)));
436 bytes
= fold_convert (sizetype
, gimple_call_arg (call
, arg1
));
438 if (bytes
&& tree_fits_uhwi_p (bytes
))
439 return tree_to_uhwi (bytes
);
441 return unknown
[object_size_type
];
445 /* If object size is propagated from one of function's arguments directly
446 to its return value, return that argument for GIMPLE_CALL statement CALL.
447 Otherwise return NULL. */
450 pass_through_call (const_gimple call
)
452 tree callee
= gimple_call_fndecl (call
);
455 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
)
456 switch (DECL_FUNCTION_CODE (callee
))
458 case BUILT_IN_MEMCPY
:
459 case BUILT_IN_MEMMOVE
:
460 case BUILT_IN_MEMSET
:
461 case BUILT_IN_STRCPY
:
462 case BUILT_IN_STRNCPY
:
463 case BUILT_IN_STRCAT
:
464 case BUILT_IN_STRNCAT
:
465 case BUILT_IN_MEMCPY_CHK
:
466 case BUILT_IN_MEMMOVE_CHK
:
467 case BUILT_IN_MEMSET_CHK
:
468 case BUILT_IN_STRCPY_CHK
:
469 case BUILT_IN_STRNCPY_CHK
:
470 case BUILT_IN_STPNCPY_CHK
:
471 case BUILT_IN_STRCAT_CHK
:
472 case BUILT_IN_STRNCAT_CHK
:
473 case BUILT_IN_ASSUME_ALIGNED
:
474 if (gimple_call_num_args (call
) >= 1)
475 return gimple_call_arg (call
, 0);
485 /* Compute __builtin_object_size value for PTR. OBJECT_SIZE_TYPE is the
486 second argument from __builtin_object_size. */
488 unsigned HOST_WIDE_INT
489 compute_builtin_object_size (tree ptr
, int object_size_type
)
491 gcc_assert (object_size_type
>= 0 && object_size_type
<= 3);
494 init_offset_limit ();
496 if (TREE_CODE (ptr
) == ADDR_EXPR
)
497 return addr_object_size (NULL
, ptr
, object_size_type
);
499 if (TREE_CODE (ptr
) == SSA_NAME
500 && POINTER_TYPE_P (TREE_TYPE (ptr
))
501 && object_sizes
[object_size_type
] != NULL
)
503 if (!bitmap_bit_p (computed
[object_size_type
], SSA_NAME_VERSION (ptr
)))
505 struct object_size_info osi
;
511 fprintf (dump_file
, "Computing %s %sobject size for ",
512 (object_size_type
& 2) ? "minimum" : "maximum",
513 (object_size_type
& 1) ? "sub" : "");
514 print_generic_expr (dump_file
, ptr
, dump_flags
);
515 fprintf (dump_file
, ":\n");
518 osi
.visited
= BITMAP_ALLOC (NULL
);
519 osi
.reexamine
= BITMAP_ALLOC (NULL
);
520 osi
.object_size_type
= object_size_type
;
525 /* First pass: walk UD chains, compute object sizes that
526 can be computed. osi.reexamine bitmap at the end will
527 contain what variables were found in dependency cycles
528 and therefore need to be reexamined. */
531 collect_object_sizes_for (&osi
, ptr
);
533 /* Second pass: keep recomputing object sizes of variables
534 that need reexamination, until no object sizes are
535 increased or all object sizes are computed. */
536 if (! bitmap_empty_p (osi
.reexamine
))
538 bitmap reexamine
= BITMAP_ALLOC (NULL
);
540 /* If looking for minimum instead of maximum object size,
541 detect cases where a pointer is increased in a loop.
542 Although even without this detection pass 2 would eventually
543 terminate, it could take a long time. If a pointer is
544 increasing this way, we need to assume 0 object size.
545 E.g. p = &buf[0]; while (cond) p = p + 4; */
546 if (object_size_type
& 2)
548 osi
.depths
= XCNEWVEC (unsigned int, num_ssa_names
);
549 osi
.stack
= XNEWVEC (unsigned int, num_ssa_names
);
552 /* collect_object_sizes_for is changing
553 osi.reexamine bitmap, so iterate over a copy. */
554 bitmap_copy (reexamine
, osi
.reexamine
);
555 EXECUTE_IF_SET_IN_BITMAP (reexamine
, 0, i
, bi
)
556 if (bitmap_bit_p (osi
.reexamine
, i
))
557 check_for_plus_in_loops (&osi
, ssa_name (i
));
570 /* collect_object_sizes_for is changing
571 osi.reexamine bitmap, so iterate over a copy. */
572 bitmap_copy (reexamine
, osi
.reexamine
);
573 EXECUTE_IF_SET_IN_BITMAP (reexamine
, 0, i
, bi
)
574 if (bitmap_bit_p (osi
.reexamine
, i
))
576 collect_object_sizes_for (&osi
, ssa_name (i
));
577 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
579 fprintf (dump_file
, "Reexamining ");
580 print_generic_expr (dump_file
, ssa_name (i
),
582 fprintf (dump_file
, "\n");
588 BITMAP_FREE (reexamine
);
590 EXECUTE_IF_SET_IN_BITMAP (osi
.reexamine
, 0, i
, bi
)
591 bitmap_set_bit (computed
[object_size_type
], i
);
593 /* Debugging dumps. */
596 EXECUTE_IF_SET_IN_BITMAP (osi
.visited
, 0, i
, bi
)
597 if (object_sizes
[object_size_type
][i
]
598 != unknown
[object_size_type
])
600 print_generic_expr (dump_file
, ssa_name (i
),
603 ": %s %sobject size "
604 HOST_WIDE_INT_PRINT_UNSIGNED
"\n",
605 (object_size_type
& 2) ? "minimum" : "maximum",
606 (object_size_type
& 1) ? "sub" : "",
607 object_sizes
[object_size_type
][i
]);
611 BITMAP_FREE (osi
.reexamine
);
612 BITMAP_FREE (osi
.visited
);
615 return object_sizes
[object_size_type
][SSA_NAME_VERSION (ptr
)];
618 return unknown
[object_size_type
];
621 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
624 expr_object_size (struct object_size_info
*osi
, tree ptr
, tree value
)
626 int object_size_type
= osi
->object_size_type
;
627 unsigned int varno
= SSA_NAME_VERSION (ptr
);
628 unsigned HOST_WIDE_INT bytes
;
630 gcc_assert (object_sizes
[object_size_type
][varno
]
631 != unknown
[object_size_type
]);
632 gcc_assert (osi
->pass
== 0);
634 if (TREE_CODE (value
) == WITH_SIZE_EXPR
)
635 value
= TREE_OPERAND (value
, 0);
637 /* Pointer variables should have been handled by merge_object_sizes. */
638 gcc_assert (TREE_CODE (value
) != SSA_NAME
639 || !POINTER_TYPE_P (TREE_TYPE (value
)));
641 if (TREE_CODE (value
) == ADDR_EXPR
)
642 bytes
= addr_object_size (osi
, value
, object_size_type
);
644 bytes
= unknown
[object_size_type
];
646 if ((object_size_type
& 2) == 0)
648 if (object_sizes
[object_size_type
][varno
] < bytes
)
649 object_sizes
[object_size_type
][varno
] = bytes
;
653 if (object_sizes
[object_size_type
][varno
] > bytes
)
654 object_sizes
[object_size_type
][varno
] = bytes
;
659 /* Compute object_sizes for PTR, defined to the result of a call. */
662 call_object_size (struct object_size_info
*osi
, tree ptr
, gimple call
)
664 int object_size_type
= osi
->object_size_type
;
665 unsigned int varno
= SSA_NAME_VERSION (ptr
);
666 unsigned HOST_WIDE_INT bytes
;
668 gcc_assert (is_gimple_call (call
));
670 gcc_assert (object_sizes
[object_size_type
][varno
]
671 != unknown
[object_size_type
]);
672 gcc_assert (osi
->pass
== 0);
674 bytes
= alloc_object_size (call
, object_size_type
);
676 if ((object_size_type
& 2) == 0)
678 if (object_sizes
[object_size_type
][varno
] < bytes
)
679 object_sizes
[object_size_type
][varno
] = bytes
;
683 if (object_sizes
[object_size_type
][varno
] > bytes
)
684 object_sizes
[object_size_type
][varno
] = bytes
;
689 /* Compute object_sizes for PTR, defined to an unknown value. */
692 unknown_object_size (struct object_size_info
*osi
, tree ptr
)
694 int object_size_type
= osi
->object_size_type
;
695 unsigned int varno
= SSA_NAME_VERSION (ptr
);
696 unsigned HOST_WIDE_INT bytes
;
698 gcc_assert (object_sizes
[object_size_type
][varno
]
699 != unknown
[object_size_type
]);
700 gcc_assert (osi
->pass
== 0);
702 bytes
= unknown
[object_size_type
];
704 if ((object_size_type
& 2) == 0)
706 if (object_sizes
[object_size_type
][varno
] < bytes
)
707 object_sizes
[object_size_type
][varno
] = bytes
;
711 if (object_sizes
[object_size_type
][varno
] > bytes
)
712 object_sizes
[object_size_type
][varno
] = bytes
;
717 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
718 the object size might need reexamination later. */
721 merge_object_sizes (struct object_size_info
*osi
, tree dest
, tree orig
,
722 unsigned HOST_WIDE_INT offset
)
724 int object_size_type
= osi
->object_size_type
;
725 unsigned int varno
= SSA_NAME_VERSION (dest
);
726 unsigned HOST_WIDE_INT orig_bytes
;
728 if (object_sizes
[object_size_type
][varno
] == unknown
[object_size_type
])
730 if (offset
>= offset_limit
)
732 object_sizes
[object_size_type
][varno
] = unknown
[object_size_type
];
737 collect_object_sizes_for (osi
, orig
);
739 orig_bytes
= object_sizes
[object_size_type
][SSA_NAME_VERSION (orig
)];
740 if (orig_bytes
!= unknown
[object_size_type
])
741 orig_bytes
= (offset
> orig_bytes
)
742 ? (unsigned HOST_WIDE_INT
) 0 : orig_bytes
- offset
;
744 if ((object_size_type
& 2) == 0)
746 if (object_sizes
[object_size_type
][varno
] < orig_bytes
)
748 object_sizes
[object_size_type
][varno
] = orig_bytes
;
754 if (object_sizes
[object_size_type
][varno
] > orig_bytes
)
756 object_sizes
[object_size_type
][varno
] = orig_bytes
;
760 return bitmap_bit_p (osi
->reexamine
, SSA_NAME_VERSION (orig
));
764 /* Compute object_sizes for VAR, defined to the result of an assignment
765 with operator POINTER_PLUS_EXPR. Return true if the object size might
766 need reexamination later. */
769 plus_stmt_object_size (struct object_size_info
*osi
, tree var
, gimple stmt
)
771 int object_size_type
= osi
->object_size_type
;
772 unsigned int varno
= SSA_NAME_VERSION (var
);
773 unsigned HOST_WIDE_INT bytes
;
776 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
778 op0
= gimple_assign_rhs1 (stmt
);
779 op1
= gimple_assign_rhs2 (stmt
);
781 else if (gimple_assign_rhs_code (stmt
) == ADDR_EXPR
)
783 tree rhs
= TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
784 gcc_assert (TREE_CODE (rhs
) == MEM_REF
);
785 op0
= TREE_OPERAND (rhs
, 0);
786 op1
= TREE_OPERAND (rhs
, 1);
791 if (object_sizes
[object_size_type
][varno
] == unknown
[object_size_type
])
794 /* Handle PTR + OFFSET here. */
795 if (TREE_CODE (op1
) == INTEGER_CST
796 && (TREE_CODE (op0
) == SSA_NAME
797 || TREE_CODE (op0
) == ADDR_EXPR
))
799 if (! tree_fits_uhwi_p (op1
))
800 bytes
= unknown
[object_size_type
];
801 else if (TREE_CODE (op0
) == SSA_NAME
)
802 return merge_object_sizes (osi
, var
, op0
, tree_to_uhwi (op1
));
805 unsigned HOST_WIDE_INT off
= tree_to_uhwi (op1
);
807 /* op0 will be ADDR_EXPR here. */
808 bytes
= addr_object_size (osi
, op0
, object_size_type
);
809 if (bytes
== unknown
[object_size_type
])
811 else if (off
> offset_limit
)
812 bytes
= unknown
[object_size_type
];
813 else if (off
> bytes
)
820 bytes
= unknown
[object_size_type
];
822 if ((object_size_type
& 2) == 0)
824 if (object_sizes
[object_size_type
][varno
] < bytes
)
825 object_sizes
[object_size_type
][varno
] = bytes
;
829 if (object_sizes
[object_size_type
][varno
] > bytes
)
830 object_sizes
[object_size_type
][varno
] = bytes
;
836 /* Compute object_sizes for VAR, defined at STMT, which is
837 a COND_EXPR. Return true if the object size might need reexamination
841 cond_expr_object_size (struct object_size_info
*osi
, tree var
, gimple stmt
)
844 int object_size_type
= osi
->object_size_type
;
845 unsigned int varno
= SSA_NAME_VERSION (var
);
846 bool reexamine
= false;
848 gcc_assert (gimple_assign_rhs_code (stmt
) == COND_EXPR
);
850 if (object_sizes
[object_size_type
][varno
] == unknown
[object_size_type
])
853 then_
= gimple_assign_rhs2 (stmt
);
854 else_
= gimple_assign_rhs3 (stmt
);
856 if (TREE_CODE (then_
) == SSA_NAME
)
857 reexamine
|= merge_object_sizes (osi
, var
, then_
, 0);
859 expr_object_size (osi
, var
, then_
);
861 if (TREE_CODE (else_
) == SSA_NAME
)
862 reexamine
|= merge_object_sizes (osi
, var
, else_
, 0);
864 expr_object_size (osi
, var
, else_
);
869 /* Compute object sizes for VAR.
870 For ADDR_EXPR an object size is the number of remaining bytes
871 to the end of the object (where what is considered an object depends on
872 OSI->object_size_type).
873 For allocation GIMPLE_CALL like malloc or calloc object size is the size
875 For POINTER_PLUS_EXPR where second operand is a constant integer,
876 object size is object size of the first operand minus the constant.
877 If the constant is bigger than the number of remaining bytes until the
878 end of the object, object size is 0, but if it is instead a pointer
879 subtraction, object size is unknown[object_size_type].
880 To differentiate addition from subtraction, ADDR_EXPR returns
881 unknown[object_size_type] for all objects bigger than half of the address
882 space, and constants less than half of the address space are considered
883 addition, while bigger constants subtraction.
884 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
885 object size is object size of that argument.
886 Otherwise, object size is the maximum of object sizes of variables
887 that it might be set to. */
890 collect_object_sizes_for (struct object_size_info
*osi
, tree var
)
892 int object_size_type
= osi
->object_size_type
;
893 unsigned int varno
= SSA_NAME_VERSION (var
);
897 if (bitmap_bit_p (computed
[object_size_type
], varno
))
902 if (bitmap_set_bit (osi
->visited
, varno
))
904 object_sizes
[object_size_type
][varno
]
905 = (object_size_type
& 2) ? -1 : 0;
909 /* Found a dependency loop. Mark the variable for later
911 bitmap_set_bit (osi
->reexamine
, varno
);
912 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
914 fprintf (dump_file
, "Found a dependency loop at ");
915 print_generic_expr (dump_file
, var
, dump_flags
);
916 fprintf (dump_file
, "\n");
922 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
924 fprintf (dump_file
, "Visiting use-def links for ");
925 print_generic_expr (dump_file
, var
, dump_flags
);
926 fprintf (dump_file
, "\n");
929 stmt
= SSA_NAME_DEF_STMT (var
);
932 switch (gimple_code (stmt
))
936 tree rhs
= gimple_assign_rhs1 (stmt
);
937 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
938 || (gimple_assign_rhs_code (stmt
) == ADDR_EXPR
939 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == MEM_REF
))
940 reexamine
= plus_stmt_object_size (osi
, var
, stmt
);
941 else if (gimple_assign_rhs_code (stmt
) == COND_EXPR
)
942 reexamine
= cond_expr_object_size (osi
, var
, stmt
);
943 else if (gimple_assign_single_p (stmt
)
944 || gimple_assign_unary_nop_p (stmt
))
946 if (TREE_CODE (rhs
) == SSA_NAME
947 && POINTER_TYPE_P (TREE_TYPE (rhs
)))
948 reexamine
= merge_object_sizes (osi
, var
, rhs
, 0);
950 expr_object_size (osi
, var
, rhs
);
953 unknown_object_size (osi
, var
);
959 tree arg
= pass_through_call (stmt
);
962 if (TREE_CODE (arg
) == SSA_NAME
963 && POINTER_TYPE_P (TREE_TYPE (arg
)))
964 reexamine
= merge_object_sizes (osi
, var
, arg
, 0);
966 expr_object_size (osi
, var
, arg
);
969 call_object_size (osi
, var
, stmt
);
974 /* Pointers defined by __asm__ statements can point anywhere. */
975 object_sizes
[object_size_type
][varno
] = unknown
[object_size_type
];
979 if (SSA_NAME_VAR (var
)
980 && TREE_CODE (SSA_NAME_VAR (var
)) == PARM_DECL
)
981 expr_object_size (osi
, var
, SSA_NAME_VAR (var
));
983 /* Uninitialized SSA names point nowhere. */
984 object_sizes
[object_size_type
][varno
] = unknown
[object_size_type
];
991 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
993 tree rhs
= gimple_phi_arg (stmt
, i
)->def
;
995 if (object_sizes
[object_size_type
][varno
]
996 == unknown
[object_size_type
])
999 if (TREE_CODE (rhs
) == SSA_NAME
)
1000 reexamine
|= merge_object_sizes (osi
, var
, rhs
, 0);
1001 else if (osi
->pass
== 0)
1002 expr_object_size (osi
, var
, rhs
);
1012 || object_sizes
[object_size_type
][varno
] == unknown
[object_size_type
])
1014 bitmap_set_bit (computed
[object_size_type
], varno
);
1015 bitmap_clear_bit (osi
->reexamine
, varno
);
1019 bitmap_set_bit (osi
->reexamine
, varno
);
1020 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1022 fprintf (dump_file
, "Need to reexamine ");
1023 print_generic_expr (dump_file
, var
, dump_flags
);
1024 fprintf (dump_file
, "\n");
1030 /* Helper function for check_for_plus_in_loops. Called recursively
1034 check_for_plus_in_loops_1 (struct object_size_info
*osi
, tree var
,
1037 gimple stmt
= SSA_NAME_DEF_STMT (var
);
1038 unsigned int varno
= SSA_NAME_VERSION (var
);
1040 if (osi
->depths
[varno
])
1042 if (osi
->depths
[varno
] != depth
)
1046 /* Found a loop involving pointer addition. */
1047 for (sp
= osi
->tos
; sp
> osi
->stack
; )
1050 bitmap_clear_bit (osi
->reexamine
, *sp
);
1051 bitmap_set_bit (computed
[osi
->object_size_type
], *sp
);
1052 object_sizes
[osi
->object_size_type
][*sp
] = 0;
1059 else if (! bitmap_bit_p (osi
->reexamine
, varno
))
1062 osi
->depths
[varno
] = depth
;
1063 *osi
->tos
++ = varno
;
1065 switch (gimple_code (stmt
))
1070 if ((gimple_assign_single_p (stmt
)
1071 || gimple_assign_unary_nop_p (stmt
))
1072 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
)
1074 tree rhs
= gimple_assign_rhs1 (stmt
);
1076 check_for_plus_in_loops_1 (osi
, rhs
, depth
);
1078 else if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
1080 tree basevar
= gimple_assign_rhs1 (stmt
);
1081 tree cst
= gimple_assign_rhs2 (stmt
);
1083 gcc_assert (TREE_CODE (cst
) == INTEGER_CST
);
1085 check_for_plus_in_loops_1 (osi
, basevar
,
1086 depth
+ !integer_zerop (cst
));
1095 tree arg
= pass_through_call (stmt
);
1098 if (TREE_CODE (arg
) == SSA_NAME
)
1099 check_for_plus_in_loops_1 (osi
, arg
, depth
);
1110 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
1112 tree rhs
= gimple_phi_arg (stmt
, i
)->def
;
1114 if (TREE_CODE (rhs
) == SSA_NAME
)
1115 check_for_plus_in_loops_1 (osi
, rhs
, depth
);
1124 osi
->depths
[varno
] = 0;
1129 /* Check if some pointer we are computing object size of is being increased
1130 within a loop. If yes, assume all the SSA variables participating in
1131 that loop have minimum object sizes 0. */
1134 check_for_plus_in_loops (struct object_size_info
*osi
, tree var
)
1136 gimple stmt
= SSA_NAME_DEF_STMT (var
);
1138 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1139 and looked for a POINTER_PLUS_EXPR in the pass-through
1140 argument, if any. In GIMPLE, however, such an expression
1141 is not a valid call operand. */
1143 if (is_gimple_assign (stmt
)
1144 && gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
1146 tree basevar
= gimple_assign_rhs1 (stmt
);
1147 tree cst
= gimple_assign_rhs2 (stmt
);
1149 gcc_assert (TREE_CODE (cst
) == INTEGER_CST
);
1151 if (integer_zerop (cst
))
1154 osi
->depths
[SSA_NAME_VERSION (basevar
)] = 1;
1155 *osi
->tos
++ = SSA_NAME_VERSION (basevar
);
1156 check_for_plus_in_loops_1 (osi
, var
, 2);
1157 osi
->depths
[SSA_NAME_VERSION (basevar
)] = 0;
1163 /* Initialize data structures for the object size computation. */
1166 init_object_sizes (void)
1168 int object_size_type
;
1170 if (object_sizes
[0])
1173 for (object_size_type
= 0; object_size_type
<= 3; object_size_type
++)
1175 object_sizes
[object_size_type
] = XNEWVEC (unsigned HOST_WIDE_INT
, num_ssa_names
);
1176 computed
[object_size_type
] = BITMAP_ALLOC (NULL
);
1179 init_offset_limit ();
1183 /* Destroy data structures after the object size computation. */
1186 fini_object_sizes (void)
1188 int object_size_type
;
1190 for (object_size_type
= 0; object_size_type
<= 3; object_size_type
++)
1192 free (object_sizes
[object_size_type
]);
1193 BITMAP_FREE (computed
[object_size_type
]);
1194 object_sizes
[object_size_type
] = NULL
;
1199 /* Simple pass to optimize all __builtin_object_size () builtins. */
1202 compute_object_sizes (void)
1207 gimple_stmt_iterator i
;
1208 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
1211 gimple call
= gsi_stmt (i
);
1212 if (!gimple_call_builtin_p (call
, BUILT_IN_OBJECT_SIZE
))
1215 init_object_sizes ();
1216 result
= fold_call_stmt (call
, false);
1219 if (gimple_call_num_args (call
) == 2
1220 && POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call
, 0))))
1222 tree ost
= gimple_call_arg (call
, 1);
1224 if (tree_fits_uhwi_p (ost
))
1226 unsigned HOST_WIDE_INT object_size_type
1227 = tree_to_uhwi (ost
);
1229 if (object_size_type
< 2)
1230 result
= fold_convert (size_type_node
,
1231 integer_minus_one_node
);
1232 else if (object_size_type
< 4)
1233 result
= build_zero_cst (size_type_node
);
1241 gcc_assert (TREE_CODE (result
) == INTEGER_CST
);
1243 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1245 fprintf (dump_file
, "Simplified\n ");
1246 print_gimple_stmt (dump_file
, call
, 0, dump_flags
);
1247 fprintf (dump_file
, " to ");
1248 print_generic_expr (dump_file
, result
, 0);
1249 fprintf (dump_file
, "\n");
1252 tree lhs
= gimple_call_lhs (call
);
1256 /* Propagate into all uses and fold those stmts. */
1258 imm_use_iterator iter
;
1259 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, lhs
)
1261 use_operand_p use_p
;
1262 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
1263 SET_USE (use_p
, result
);
1264 gimple_stmt_iterator gsi
= gsi_for_stmt (use_stmt
);
1266 update_stmt (gsi_stmt (gsi
));
1271 fini_object_sizes ();
1277 const pass_data pass_data_object_sizes
=
1279 GIMPLE_PASS
, /* type */
1281 OPTGROUP_NONE
, /* optinfo_flags */
1282 false, /* has_gate */
1283 true, /* has_execute */
1284 TV_NONE
, /* tv_id */
1285 ( PROP_cfg
| PROP_ssa
), /* properties_required */
1286 0, /* properties_provided */
1287 0, /* properties_destroyed */
1288 0, /* todo_flags_start */
1289 TODO_verify_ssa
, /* todo_flags_finish */
1292 class pass_object_sizes
: public gimple_opt_pass
1295 pass_object_sizes (gcc::context
*ctxt
)
1296 : gimple_opt_pass (pass_data_object_sizes
, ctxt
)
1299 /* opt_pass methods: */
1300 opt_pass
* clone () { return new pass_object_sizes (m_ctxt
); }
1301 unsigned int execute () { return compute_object_sizes (); }
1303 }; // class pass_object_sizes
1308 make_pass_object_sizes (gcc::context
*ctxt
)
1310 return new pass_object_sizes (ctxt
);