Import gcc-4.4.1
[dragonfly.git] / contrib / gcc-4.4 / gcc / tree-object-size.c
blob7e346a9f55de332842c41624778806c21fb37c30
1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "toplev.h"
27 #include "diagnostic.h"
28 #include "tree-flow.h"
29 #include "tree-pass.h"
30 #include "tree-ssa-propagate.h"
32 struct object_size_info
34 int object_size_type;
35 bitmap visited, reexamine;
36 int pass;
37 bool changed;
38 unsigned int *depths;
39 unsigned int *stack, *tos;
42 static unsigned HOST_WIDE_INT unknown[4] = { -1, -1, 0, 0 };
44 static tree compute_object_offset (const_tree, const_tree);
45 static unsigned HOST_WIDE_INT addr_object_size (const_tree, int);
46 static unsigned HOST_WIDE_INT alloc_object_size (const_gimple, int);
47 static tree pass_through_call (const_gimple);
48 static void collect_object_sizes_for (struct object_size_info *, tree);
49 static void expr_object_size (struct object_size_info *, tree, tree);
50 static bool merge_object_sizes (struct object_size_info *, tree, tree,
51 unsigned HOST_WIDE_INT);
52 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple);
53 static bool cond_expr_object_size (struct object_size_info *, tree, tree);
54 static unsigned int compute_object_sizes (void);
55 static void init_offset_limit (void);
56 static void check_for_plus_in_loops (struct object_size_info *, tree);
57 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
58 unsigned int);
60 /* object_sizes[0] is upper bound for number of bytes till the end of
61 the object.
62 object_sizes[1] is upper bound for number of bytes till the end of
63 the subobject (innermost array or field with address taken).
64 object_sizes[2] is lower bound for number of bytes till the end of
65 the object and object_sizes[3] lower bound for subobject. */
66 static unsigned HOST_WIDE_INT *object_sizes[4];
68 /* Bitmaps what object sizes have been computed already. */
69 static bitmap computed[4];
71 /* Maximum value of offset we consider to be addition. */
72 static unsigned HOST_WIDE_INT offset_limit;
75 /* Initialize OFFSET_LIMIT variable. */
76 static void
77 init_offset_limit (void)
79 if (host_integerp (TYPE_MAX_VALUE (sizetype), 1))
80 offset_limit = tree_low_cst (TYPE_MAX_VALUE (sizetype), 1);
81 else
82 offset_limit = -1;
83 offset_limit /= 2;
87 /* Compute offset of EXPR within VAR. Return error_mark_node
88 if unknown. */
90 static tree
91 compute_object_offset (const_tree expr, const_tree var)
93 enum tree_code code = PLUS_EXPR;
94 tree base, off, t;
96 if (expr == var)
97 return size_zero_node;
99 switch (TREE_CODE (expr))
101 case COMPONENT_REF:
102 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
103 if (base == error_mark_node)
104 return base;
106 t = TREE_OPERAND (expr, 1);
107 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
108 size_int (tree_low_cst (DECL_FIELD_BIT_OFFSET (t), 1)
109 / BITS_PER_UNIT));
110 break;
112 case REALPART_EXPR:
113 CASE_CONVERT:
114 case VIEW_CONVERT_EXPR:
115 case NON_LVALUE_EXPR:
116 return compute_object_offset (TREE_OPERAND (expr, 0), var);
118 case IMAGPART_EXPR:
119 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
120 if (base == error_mark_node)
121 return base;
123 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
124 break;
126 case ARRAY_REF:
127 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
128 if (base == error_mark_node)
129 return base;
131 t = TREE_OPERAND (expr, 1);
132 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
134 code = MINUS_EXPR;
135 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
137 t = fold_convert (sizetype, t);
138 off = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (expr)), t);
139 break;
141 default:
142 return error_mark_node;
145 return size_binop (code, base, off);
149 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
150 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
151 If unknown, return unknown[object_size_type]. */
153 static unsigned HOST_WIDE_INT
154 addr_object_size (const_tree ptr, int object_size_type)
156 tree pt_var;
158 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
160 pt_var = TREE_OPERAND (ptr, 0);
161 if (REFERENCE_CLASS_P (pt_var))
162 pt_var = get_base_address (pt_var);
164 if (pt_var
165 && (SSA_VAR_P (pt_var) || TREE_CODE (pt_var) == STRING_CST)
166 && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
167 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1)
168 && (unsigned HOST_WIDE_INT)
169 tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1) < offset_limit)
171 tree bytes;
173 if (pt_var != TREE_OPERAND (ptr, 0))
175 tree var;
177 if (object_size_type & 1)
179 var = TREE_OPERAND (ptr, 0);
181 while (var != pt_var
182 && TREE_CODE (var) != BIT_FIELD_REF
183 && TREE_CODE (var) != COMPONENT_REF
184 && TREE_CODE (var) != ARRAY_REF
185 && TREE_CODE (var) != ARRAY_RANGE_REF
186 && TREE_CODE (var) != REALPART_EXPR
187 && TREE_CODE (var) != IMAGPART_EXPR)
188 var = TREE_OPERAND (var, 0);
189 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
190 var = TREE_OPERAND (var, 0);
191 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
192 || ! host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (var)), 1)
193 || tree_int_cst_lt (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)),
194 TYPE_SIZE_UNIT (TREE_TYPE (var))))
195 var = pt_var;
197 else
198 var = pt_var;
200 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
201 if (bytes != error_mark_node)
203 if (TREE_CODE (bytes) == INTEGER_CST
204 && tree_int_cst_lt (TYPE_SIZE_UNIT (TREE_TYPE (var)), bytes))
205 bytes = size_zero_node;
206 else
207 bytes = size_binop (MINUS_EXPR,
208 TYPE_SIZE_UNIT (TREE_TYPE (var)), bytes);
211 else
212 bytes = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
214 if (host_integerp (bytes, 1))
215 return tree_low_cst (bytes, 1);
218 return unknown[object_size_type];
222 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
223 Handles various allocation calls. OBJECT_SIZE_TYPE is the second
224 argument from __builtin_object_size. If unknown, return
225 unknown[object_size_type]. */
227 static unsigned HOST_WIDE_INT
228 alloc_object_size (const_gimple call, int object_size_type)
230 tree callee, bytes = NULL_TREE;
231 tree alloc_size;
232 int arg1 = -1, arg2 = -1;
234 gcc_assert (is_gimple_call (call));
236 callee = gimple_call_fndecl (call);
237 if (!callee)
238 return unknown[object_size_type];
240 alloc_size = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (TREE_TYPE(callee)));
241 if (alloc_size && TREE_VALUE (alloc_size))
243 tree p = TREE_VALUE (alloc_size);
245 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
246 if (TREE_CHAIN (p))
247 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
250 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
251 switch (DECL_FUNCTION_CODE (callee))
253 case BUILT_IN_CALLOC:
254 arg2 = 1;
255 /* fall through */
256 case BUILT_IN_MALLOC:
257 case BUILT_IN_ALLOCA:
258 arg1 = 0;
259 default:
260 break;
263 if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
264 || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
265 || (arg2 >= 0
266 && (arg2 >= (int)gimple_call_num_args (call)
267 || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
268 return unknown[object_size_type];
270 if (arg2 >= 0)
271 bytes = size_binop (MULT_EXPR,
272 fold_convert (sizetype, gimple_call_arg (call, arg1)),
273 fold_convert (sizetype, gimple_call_arg (call, arg2)));
274 else if (arg1 >= 0)
275 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
277 if (bytes && host_integerp (bytes, 1))
278 return tree_low_cst (bytes, 1);
280 return unknown[object_size_type];
284 /* If object size is propagated from one of function's arguments directly
285 to its return value, return that argument for GIMPLE_CALL statement CALL.
286 Otherwise return NULL. */
288 static tree
289 pass_through_call (const_gimple call)
291 tree callee = gimple_call_fndecl (call);
293 if (callee
294 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
295 switch (DECL_FUNCTION_CODE (callee))
297 case BUILT_IN_MEMCPY:
298 case BUILT_IN_MEMMOVE:
299 case BUILT_IN_MEMSET:
300 case BUILT_IN_STRCPY:
301 case BUILT_IN_STRNCPY:
302 case BUILT_IN_STRCAT:
303 case BUILT_IN_STRNCAT:
304 case BUILT_IN_MEMCPY_CHK:
305 case BUILT_IN_MEMMOVE_CHK:
306 case BUILT_IN_MEMSET_CHK:
307 case BUILT_IN_STRCPY_CHK:
308 case BUILT_IN_STRNCPY_CHK:
309 case BUILT_IN_STRCAT_CHK:
310 case BUILT_IN_STRNCAT_CHK:
311 if (gimple_call_num_args (call) >= 1)
312 return gimple_call_arg (call, 0);
313 break;
314 default:
315 break;
318 return NULL_TREE;
322 /* Compute __builtin_object_size value for PTR. OBJECT_SIZE_TYPE is the
323 second argument from __builtin_object_size. */
325 unsigned HOST_WIDE_INT
326 compute_builtin_object_size (tree ptr, int object_size_type)
328 gcc_assert (object_size_type >= 0 && object_size_type <= 3);
330 if (! offset_limit)
331 init_offset_limit ();
333 if (TREE_CODE (ptr) == ADDR_EXPR)
334 return addr_object_size (ptr, object_size_type);
336 if (TREE_CODE (ptr) == SSA_NAME
337 && POINTER_TYPE_P (TREE_TYPE (ptr))
338 && object_sizes[object_size_type] != NULL)
340 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
342 struct object_size_info osi;
343 bitmap_iterator bi;
344 unsigned int i;
346 if (dump_file)
348 fprintf (dump_file, "Computing %s %sobject size for ",
349 (object_size_type & 2) ? "minimum" : "maximum",
350 (object_size_type & 1) ? "sub" : "");
351 print_generic_expr (dump_file, ptr, dump_flags);
352 fprintf (dump_file, ":\n");
355 osi.visited = BITMAP_ALLOC (NULL);
356 osi.reexamine = BITMAP_ALLOC (NULL);
357 osi.object_size_type = object_size_type;
358 osi.depths = NULL;
359 osi.stack = NULL;
360 osi.tos = NULL;
362 /* First pass: walk UD chains, compute object sizes that
363 can be computed. osi.reexamine bitmap at the end will
364 contain what variables were found in dependency cycles
365 and therefore need to be reexamined. */
366 osi.pass = 0;
367 osi.changed = false;
368 collect_object_sizes_for (&osi, ptr);
370 /* Second pass: keep recomputing object sizes of variables
371 that need reexamination, until no object sizes are
372 increased or all object sizes are computed. */
373 if (! bitmap_empty_p (osi.reexamine))
375 bitmap reexamine = BITMAP_ALLOC (NULL);
377 /* If looking for minimum instead of maximum object size,
378 detect cases where a pointer is increased in a loop.
379 Although even without this detection pass 2 would eventually
380 terminate, it could take a long time. If a pointer is
381 increasing this way, we need to assume 0 object size.
382 E.g. p = &buf[0]; while (cond) p = p + 4; */
383 if (object_size_type & 2)
385 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
386 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
387 osi.tos = osi.stack;
388 osi.pass = 1;
389 /* collect_object_sizes_for is changing
390 osi.reexamine bitmap, so iterate over a copy. */
391 bitmap_copy (reexamine, osi.reexamine);
392 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
393 if (bitmap_bit_p (osi.reexamine, i))
394 check_for_plus_in_loops (&osi, ssa_name (i));
396 free (osi.depths);
397 osi.depths = NULL;
398 free (osi.stack);
399 osi.stack = NULL;
400 osi.tos = NULL;
405 osi.pass = 2;
406 osi.changed = false;
407 /* collect_object_sizes_for is changing
408 osi.reexamine bitmap, so iterate over a copy. */
409 bitmap_copy (reexamine, osi.reexamine);
410 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
411 if (bitmap_bit_p (osi.reexamine, i))
413 collect_object_sizes_for (&osi, ssa_name (i));
414 if (dump_file && (dump_flags & TDF_DETAILS))
416 fprintf (dump_file, "Reexamining ");
417 print_generic_expr (dump_file, ssa_name (i),
418 dump_flags);
419 fprintf (dump_file, "\n");
423 while (osi.changed);
425 BITMAP_FREE (reexamine);
427 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
428 bitmap_set_bit (computed[object_size_type], i);
430 /* Debugging dumps. */
431 if (dump_file)
433 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
434 if (object_sizes[object_size_type][i]
435 != unknown[object_size_type])
437 print_generic_expr (dump_file, ssa_name (i),
438 dump_flags);
439 fprintf (dump_file,
440 ": %s %sobject size "
441 HOST_WIDE_INT_PRINT_UNSIGNED "\n",
442 (object_size_type & 2) ? "minimum" : "maximum",
443 (object_size_type & 1) ? "sub" : "",
444 object_sizes[object_size_type][i]);
448 BITMAP_FREE (osi.reexamine);
449 BITMAP_FREE (osi.visited);
452 return object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
455 return unknown[object_size_type];
458 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
460 static void
461 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
463 int object_size_type = osi->object_size_type;
464 unsigned int varno = SSA_NAME_VERSION (ptr);
465 unsigned HOST_WIDE_INT bytes;
467 gcc_assert (object_sizes[object_size_type][varno]
468 != unknown[object_size_type]);
469 gcc_assert (osi->pass == 0);
471 if (TREE_CODE (value) == WITH_SIZE_EXPR)
472 value = TREE_OPERAND (value, 0);
474 /* Pointer variables should have been handled by merge_object_sizes. */
475 gcc_assert (TREE_CODE (value) != SSA_NAME
476 || !POINTER_TYPE_P (TREE_TYPE (value)));
478 if (TREE_CODE (value) == ADDR_EXPR)
479 bytes = addr_object_size (value, object_size_type);
480 else
481 bytes = unknown[object_size_type];
483 if ((object_size_type & 2) == 0)
485 if (object_sizes[object_size_type][varno] < bytes)
486 object_sizes[object_size_type][varno] = bytes;
488 else
490 if (object_sizes[object_size_type][varno] > bytes)
491 object_sizes[object_size_type][varno] = bytes;
496 /* Compute object_sizes for PTR, defined to the result of a call. */
498 static void
499 call_object_size (struct object_size_info *osi, tree ptr, gimple call)
501 int object_size_type = osi->object_size_type;
502 unsigned int varno = SSA_NAME_VERSION (ptr);
503 unsigned HOST_WIDE_INT bytes;
505 gcc_assert (is_gimple_call (call));
507 gcc_assert (object_sizes[object_size_type][varno]
508 != unknown[object_size_type]);
509 gcc_assert (osi->pass == 0);
511 bytes = alloc_object_size (call, object_size_type);
513 if ((object_size_type & 2) == 0)
515 if (object_sizes[object_size_type][varno] < bytes)
516 object_sizes[object_size_type][varno] = bytes;
518 else
520 if (object_sizes[object_size_type][varno] > bytes)
521 object_sizes[object_size_type][varno] = bytes;
526 /* Compute object_sizes for PTR, defined to an unknown value. */
528 static void
529 unknown_object_size (struct object_size_info *osi, tree ptr)
531 int object_size_type = osi->object_size_type;
532 unsigned int varno = SSA_NAME_VERSION (ptr);
533 unsigned HOST_WIDE_INT bytes;
535 gcc_assert (object_sizes[object_size_type][varno]
536 != unknown[object_size_type]);
537 gcc_assert (osi->pass == 0);
539 bytes = unknown[object_size_type];
541 if ((object_size_type & 2) == 0)
543 if (object_sizes[object_size_type][varno] < bytes)
544 object_sizes[object_size_type][varno] = bytes;
546 else
548 if (object_sizes[object_size_type][varno] > bytes)
549 object_sizes[object_size_type][varno] = bytes;
554 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
555 the object size might need reexamination later. */
557 static bool
558 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
559 unsigned HOST_WIDE_INT offset)
561 int object_size_type = osi->object_size_type;
562 unsigned int varno = SSA_NAME_VERSION (dest);
563 unsigned HOST_WIDE_INT orig_bytes;
565 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
566 return false;
567 if (offset >= offset_limit)
569 object_sizes[object_size_type][varno] = unknown[object_size_type];
570 return false;
573 if (osi->pass == 0)
574 collect_object_sizes_for (osi, orig);
576 orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
577 if (orig_bytes != unknown[object_size_type])
578 orig_bytes = (offset > orig_bytes)
579 ? (unsigned HOST_WIDE_INT) 0 : orig_bytes - offset;
581 if ((object_size_type & 2) == 0)
583 if (object_sizes[object_size_type][varno] < orig_bytes)
585 object_sizes[object_size_type][varno] = orig_bytes;
586 osi->changed = true;
589 else
591 if (object_sizes[object_size_type][varno] > orig_bytes)
593 object_sizes[object_size_type][varno] = orig_bytes;
594 osi->changed = true;
597 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
601 /* Compute object_sizes for VAR, defined to the result of an assignment
602 with operator POINTER_PLUS_EXPR. Return true if the object size might
603 need reexamination later. */
605 static bool
606 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple stmt)
608 int object_size_type = osi->object_size_type;
609 unsigned int varno = SSA_NAME_VERSION (var);
610 unsigned HOST_WIDE_INT bytes;
611 tree op0, op1;
613 gcc_assert (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR);
615 op0 = gimple_assign_rhs1 (stmt);
616 op1 = gimple_assign_rhs2 (stmt);
618 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
619 return false;
621 /* Handle PTR + OFFSET here. */
622 if (TREE_CODE (op1) == INTEGER_CST
623 && (TREE_CODE (op0) == SSA_NAME
624 || TREE_CODE (op0) == ADDR_EXPR))
626 if (! host_integerp (op1, 1))
627 bytes = unknown[object_size_type];
628 else if (TREE_CODE (op0) == SSA_NAME)
629 return merge_object_sizes (osi, var, op0, tree_low_cst (op1, 1));
630 else
632 unsigned HOST_WIDE_INT off = tree_low_cst (op1, 1);
634 /* op0 will be ADDR_EXPR here. */
635 bytes = compute_builtin_object_size (op0, object_size_type);
636 if (bytes == unknown[object_size_type])
638 else if (off > offset_limit)
639 bytes = unknown[object_size_type];
640 else if (off > bytes)
641 bytes = 0;
642 else
643 bytes -= off;
646 else
647 bytes = unknown[object_size_type];
649 if ((object_size_type & 2) == 0)
651 if (object_sizes[object_size_type][varno] < bytes)
652 object_sizes[object_size_type][varno] = bytes;
654 else
656 if (object_sizes[object_size_type][varno] > bytes)
657 object_sizes[object_size_type][varno] = bytes;
659 return false;
663 /* Compute object_sizes for VAR, defined to VALUE, which is
664 a COND_EXPR. Return true if the object size might need reexamination
665 later. */
667 static bool
668 cond_expr_object_size (struct object_size_info *osi, tree var, tree value)
670 tree then_, else_;
671 int object_size_type = osi->object_size_type;
672 unsigned int varno = SSA_NAME_VERSION (var);
673 bool reexamine = false;
675 gcc_assert (TREE_CODE (value) == COND_EXPR);
677 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
678 return false;
680 then_ = COND_EXPR_THEN (value);
681 else_ = COND_EXPR_ELSE (value);
683 if (TREE_CODE (then_) == SSA_NAME)
684 reexamine |= merge_object_sizes (osi, var, then_, 0);
685 else
686 expr_object_size (osi, var, then_);
688 if (TREE_CODE (else_) == SSA_NAME)
689 reexamine |= merge_object_sizes (osi, var, else_, 0);
690 else
691 expr_object_size (osi, var, else_);
693 return reexamine;
696 /* Compute object sizes for VAR.
697 For ADDR_EXPR an object size is the number of remaining bytes
698 to the end of the object (where what is considered an object depends on
699 OSI->object_size_type).
700 For allocation GIMPLE_CALL like malloc or calloc object size is the size
701 of the allocation.
702 For POINTER_PLUS_EXPR where second operand is a constant integer,
703 object size is object size of the first operand minus the constant.
704 If the constant is bigger than the number of remaining bytes until the
705 end of the object, object size is 0, but if it is instead a pointer
706 subtraction, object size is unknown[object_size_type].
707 To differentiate addition from subtraction, ADDR_EXPR returns
708 unknown[object_size_type] for all objects bigger than half of the address
709 space, and constants less than half of the address space are considered
710 addition, while bigger constants subtraction.
711 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
712 object size is object size of that argument.
713 Otherwise, object size is the maximum of object sizes of variables
714 that it might be set to. */
716 static void
717 collect_object_sizes_for (struct object_size_info *osi, tree var)
719 int object_size_type = osi->object_size_type;
720 unsigned int varno = SSA_NAME_VERSION (var);
721 gimple stmt;
722 bool reexamine;
724 if (bitmap_bit_p (computed[object_size_type], varno))
725 return;
727 if (osi->pass == 0)
729 if (! bitmap_bit_p (osi->visited, varno))
731 bitmap_set_bit (osi->visited, varno);
732 object_sizes[object_size_type][varno]
733 = (object_size_type & 2) ? -1 : 0;
735 else
737 /* Found a dependency loop. Mark the variable for later
738 re-examination. */
739 bitmap_set_bit (osi->reexamine, varno);
740 if (dump_file && (dump_flags & TDF_DETAILS))
742 fprintf (dump_file, "Found a dependency loop at ");
743 print_generic_expr (dump_file, var, dump_flags);
744 fprintf (dump_file, "\n");
746 return;
750 if (dump_file && (dump_flags & TDF_DETAILS))
752 fprintf (dump_file, "Visiting use-def links for ");
753 print_generic_expr (dump_file, var, dump_flags);
754 fprintf (dump_file, "\n");
757 stmt = SSA_NAME_DEF_STMT (var);
758 reexamine = false;
760 switch (gimple_code (stmt))
762 case GIMPLE_ASSIGN:
764 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
765 reexamine = plus_stmt_object_size (osi, var, stmt);
766 else if (gimple_assign_single_p (stmt)
767 || gimple_assign_unary_nop_p (stmt))
769 tree rhs = gimple_assign_rhs1 (stmt);
771 if (TREE_CODE (rhs) == SSA_NAME
772 && POINTER_TYPE_P (TREE_TYPE (rhs)))
773 reexamine = merge_object_sizes (osi, var, rhs, 0);
774 else if (TREE_CODE (rhs) == COND_EXPR)
775 reexamine = cond_expr_object_size (osi, var, rhs);
776 else
777 expr_object_size (osi, var, rhs);
779 else
780 unknown_object_size (osi, var);
781 break;
784 case GIMPLE_CALL:
786 tree arg = pass_through_call (stmt);
787 if (arg)
789 if (TREE_CODE (arg) == SSA_NAME
790 && POINTER_TYPE_P (TREE_TYPE (arg)))
791 reexamine = merge_object_sizes (osi, var, arg, 0);
792 else if (TREE_CODE (arg) == COND_EXPR)
793 reexamine = cond_expr_object_size (osi, var, arg);
794 else
795 expr_object_size (osi, var, arg);
797 else
798 call_object_size (osi, var, stmt);
799 break;
802 case GIMPLE_ASM:
803 /* Pointers defined by __asm__ statements can point anywhere. */
804 object_sizes[object_size_type][varno] = unknown[object_size_type];
805 break;
807 case GIMPLE_NOP:
809 tree decl = SSA_NAME_VAR (var);
811 if (TREE_CODE (decl) != PARM_DECL && DECL_INITIAL (decl))
812 expr_object_size (osi, var, DECL_INITIAL (decl));
813 else
814 expr_object_size (osi, var, decl);
816 break;
818 case GIMPLE_PHI:
820 unsigned i;
822 for (i = 0; i < gimple_phi_num_args (stmt); i++)
824 tree rhs = gimple_phi_arg (stmt, i)->def;
826 if (object_sizes[object_size_type][varno]
827 == unknown[object_size_type])
828 break;
830 if (TREE_CODE (rhs) == SSA_NAME)
831 reexamine |= merge_object_sizes (osi, var, rhs, 0);
832 else if (osi->pass == 0)
833 expr_object_size (osi, var, rhs);
835 break;
838 default:
839 gcc_unreachable ();
842 if (! reexamine
843 || object_sizes[object_size_type][varno] == unknown[object_size_type])
845 bitmap_set_bit (computed[object_size_type], varno);
846 bitmap_clear_bit (osi->reexamine, varno);
848 else
850 bitmap_set_bit (osi->reexamine, varno);
851 if (dump_file && (dump_flags & TDF_DETAILS))
853 fprintf (dump_file, "Need to reexamine ");
854 print_generic_expr (dump_file, var, dump_flags);
855 fprintf (dump_file, "\n");
861 /* Helper function for check_for_plus_in_loops. Called recursively
862 to detect loops. */
864 static void
865 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
866 unsigned int depth)
868 gimple stmt = SSA_NAME_DEF_STMT (var);
869 unsigned int varno = SSA_NAME_VERSION (var);
871 if (osi->depths[varno])
873 if (osi->depths[varno] != depth)
875 unsigned int *sp;
877 /* Found a loop involving pointer addition. */
878 for (sp = osi->tos; sp > osi->stack; )
880 --sp;
881 bitmap_clear_bit (osi->reexamine, *sp);
882 bitmap_set_bit (computed[osi->object_size_type], *sp);
883 object_sizes[osi->object_size_type][*sp] = 0;
884 if (*sp == varno)
885 break;
888 return;
890 else if (! bitmap_bit_p (osi->reexamine, varno))
891 return;
893 osi->depths[varno] = depth;
894 *osi->tos++ = varno;
896 switch (gimple_code (stmt))
899 case GIMPLE_ASSIGN:
901 if ((gimple_assign_single_p (stmt)
902 || gimple_assign_unary_nop_p (stmt))
903 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
905 tree rhs = gimple_assign_rhs1 (stmt);
907 check_for_plus_in_loops_1 (osi, rhs, depth);
909 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
911 tree basevar = gimple_assign_rhs1 (stmt);
912 tree cst = gimple_assign_rhs2 (stmt);
914 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
916 check_for_plus_in_loops_1 (osi, basevar,
917 depth + !integer_zerop (cst));
919 else
920 gcc_unreachable ();
921 break;
924 case GIMPLE_CALL:
926 tree arg = pass_through_call (stmt);
927 if (arg)
929 if (TREE_CODE (arg) == SSA_NAME)
930 check_for_plus_in_loops_1 (osi, arg, depth);
931 else
932 gcc_unreachable ();
934 break;
937 case GIMPLE_PHI:
939 unsigned i;
941 for (i = 0; i < gimple_phi_num_args (stmt); i++)
943 tree rhs = gimple_phi_arg (stmt, i)->def;
945 if (TREE_CODE (rhs) == SSA_NAME)
946 check_for_plus_in_loops_1 (osi, rhs, depth);
948 break;
951 default:
952 gcc_unreachable ();
955 osi->depths[varno] = 0;
956 osi->tos--;
960 /* Check if some pointer we are computing object size of is being increased
961 within a loop. If yes, assume all the SSA variables participating in
962 that loop have minimum object sizes 0. */
964 static void
965 check_for_plus_in_loops (struct object_size_info *osi, tree var)
967 gimple stmt = SSA_NAME_DEF_STMT (var);
969 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
970 and looked for a POINTER_PLUS_EXPR in the pass-through
971 argument, if any. In GIMPLE, however, such an expression
972 is not a valid call operand. */
974 if (is_gimple_assign (stmt)
975 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
977 tree basevar = gimple_assign_rhs1 (stmt);
978 tree cst = gimple_assign_rhs2 (stmt);
980 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
982 if (integer_zerop (cst))
983 return;
985 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
986 *osi->tos++ = SSA_NAME_VERSION (basevar);
987 check_for_plus_in_loops_1 (osi, var, 2);
988 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
989 osi->tos--;
994 /* Initialize data structures for the object size computation. */
996 void
997 init_object_sizes (void)
999 int object_size_type;
1001 if (object_sizes[0])
1002 return;
1004 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1006 object_sizes[object_size_type] = XNEWVEC (unsigned HOST_WIDE_INT, num_ssa_names);
1007 computed[object_size_type] = BITMAP_ALLOC (NULL);
1010 init_offset_limit ();
1014 /* Destroy data structures after the object size computation. */
1016 void
1017 fini_object_sizes (void)
1019 int object_size_type;
1021 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1023 free (object_sizes[object_size_type]);
1024 BITMAP_FREE (computed[object_size_type]);
1025 object_sizes[object_size_type] = NULL;
1030 /* Simple pass to optimize all __builtin_object_size () builtins. */
1032 static unsigned int
1033 compute_object_sizes (void)
1035 basic_block bb;
1036 FOR_EACH_BB (bb)
1038 gimple_stmt_iterator i;
1039 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1041 tree callee, result;
1042 gimple call = gsi_stmt (i);
1044 if (gimple_code (call) != GIMPLE_CALL)
1045 continue;
1047 callee = gimple_call_fndecl (call);
1048 if (!callee
1049 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
1050 || DECL_FUNCTION_CODE (callee) != BUILT_IN_OBJECT_SIZE)
1051 continue;
1053 init_object_sizes ();
1054 result = fold_call_stmt (call, false);
1055 if (!result)
1057 if (gimple_call_num_args (call) == 2
1058 && POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
1060 tree ost = gimple_call_arg (call, 1);
1062 if (host_integerp (ost, 1))
1064 unsigned HOST_WIDE_INT object_size_type
1065 = tree_low_cst (ost, 1);
1067 if (object_size_type < 2)
1068 result = fold_convert (size_type_node,
1069 integer_minus_one_node);
1070 else if (object_size_type < 4)
1071 result = size_zero_node;
1075 if (!result)
1076 continue;
1079 if (dump_file && (dump_flags & TDF_DETAILS))
1081 fprintf (dump_file, "Simplified\n ");
1082 print_gimple_stmt (dump_file, call, 0, dump_flags);
1085 if (!update_call_from_tree (&i, result))
1086 gcc_unreachable ();
1088 /* NOTE: In the pre-tuples code, we called update_stmt here. This is
1089 now handled by gsi_replace, called from update_call_from_tree. */
1091 if (dump_file && (dump_flags & TDF_DETAILS))
1093 fprintf (dump_file, "to\n ");
1094 print_gimple_stmt (dump_file, call, 0, dump_flags);
1095 fprintf (dump_file, "\n");
1100 fini_object_sizes ();
1101 return 0;
1104 struct gimple_opt_pass pass_object_sizes =
1107 GIMPLE_PASS,
1108 "objsz", /* name */
1109 NULL, /* gate */
1110 compute_object_sizes, /* execute */
1111 NULL, /* sub */
1112 NULL, /* next */
1113 0, /* static_pass_number */
1114 0, /* tv_id */
1115 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
1116 0, /* properties_provided */
1117 0, /* properties_destroyed */
1118 0, /* todo_flags_start */
1119 TODO_dump_func | TODO_verify_ssa /* todo_flags_finish */