* xcoffout.h (xcoffout_source_line): Update prototype.
[official-gcc.git] / gcc / tree-object-size.c
blob18e62e860fdc164b566ead8d4c91bd103bbce6b2
1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Jakub Jelinek <jakub@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "toplev.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
33 struct object_size_info
35 int object_size_type;
36 bitmap visited, reexamine;
37 int pass;
38 bool changed;
39 unsigned int *depths;
40 unsigned int *stack, *tos;
43 static unsigned HOST_WIDE_INT unknown[4] = { -1, -1, 0, 0 };
45 static tree compute_object_offset (const_tree, const_tree);
46 static unsigned HOST_WIDE_INT addr_object_size (const_tree, int);
47 static unsigned HOST_WIDE_INT alloc_object_size (const_gimple, int);
48 static tree pass_through_call (const_gimple);
49 static void collect_object_sizes_for (struct object_size_info *, tree);
50 static void expr_object_size (struct object_size_info *, tree, tree);
51 static bool merge_object_sizes (struct object_size_info *, tree, tree,
52 unsigned HOST_WIDE_INT);
53 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple);
54 static bool cond_expr_object_size (struct object_size_info *, tree, tree);
55 static unsigned int compute_object_sizes (void);
56 static void init_offset_limit (void);
57 static void check_for_plus_in_loops (struct object_size_info *, tree);
58 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
59 unsigned int);
61 /* object_sizes[0] is upper bound for number of bytes till the end of
62 the object.
63 object_sizes[1] is upper bound for number of bytes till the end of
64 the subobject (innermost array or field with address taken).
65 object_sizes[2] is lower bound for number of bytes till the end of
66 the object and object_sizes[3] lower bound for subobject. */
67 static unsigned HOST_WIDE_INT *object_sizes[4];
69 /* Bitmaps what object sizes have been computed already. */
70 static bitmap computed[4];
72 /* Maximum value of offset we consider to be addition. */
73 static unsigned HOST_WIDE_INT offset_limit;
76 /* Initialize OFFSET_LIMIT variable. */
77 static void
78 init_offset_limit (void)
80 if (host_integerp (TYPE_MAX_VALUE (sizetype), 1))
81 offset_limit = tree_low_cst (TYPE_MAX_VALUE (sizetype), 1);
82 else
83 offset_limit = -1;
84 offset_limit /= 2;
88 /* Compute offset of EXPR within VAR. Return error_mark_node
89 if unknown. */
91 static tree
92 compute_object_offset (const_tree expr, const_tree var)
94 enum tree_code code = PLUS_EXPR;
95 tree base, off, t;
97 if (expr == var)
98 return size_zero_node;
100 switch (TREE_CODE (expr))
102 case COMPONENT_REF:
103 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
104 if (base == error_mark_node)
105 return base;
107 t = TREE_OPERAND (expr, 1);
108 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
109 size_int (tree_low_cst (DECL_FIELD_BIT_OFFSET (t), 1)
110 / BITS_PER_UNIT));
111 break;
113 case REALPART_EXPR:
114 CASE_CONVERT:
115 case VIEW_CONVERT_EXPR:
116 case NON_LVALUE_EXPR:
117 return compute_object_offset (TREE_OPERAND (expr, 0), var);
119 case IMAGPART_EXPR:
120 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
121 if (base == error_mark_node)
122 return base;
124 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
125 break;
127 case ARRAY_REF:
128 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
129 if (base == error_mark_node)
130 return base;
132 t = TREE_OPERAND (expr, 1);
133 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
135 code = MINUS_EXPR;
136 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
138 t = fold_convert (sizetype, t);
139 off = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (expr)), t);
140 break;
142 default:
143 return error_mark_node;
146 return size_binop (code, base, off);
150 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
151 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
152 If unknown, return unknown[object_size_type]. */
154 static unsigned HOST_WIDE_INT
155 addr_object_size (const_tree ptr, int object_size_type)
157 tree pt_var;
159 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
161 pt_var = TREE_OPERAND (ptr, 0);
162 if (REFERENCE_CLASS_P (pt_var))
163 pt_var = get_base_address (pt_var);
165 if (pt_var
166 && (SSA_VAR_P (pt_var) || TREE_CODE (pt_var) == STRING_CST)
167 && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
168 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1)
169 && (unsigned HOST_WIDE_INT)
170 tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1) < offset_limit)
172 tree bytes;
174 if (pt_var != TREE_OPERAND (ptr, 0))
176 tree var;
178 if (object_size_type & 1)
180 var = TREE_OPERAND (ptr, 0);
182 while (var != pt_var
183 && TREE_CODE (var) != BIT_FIELD_REF
184 && TREE_CODE (var) != COMPONENT_REF
185 && TREE_CODE (var) != ARRAY_REF
186 && TREE_CODE (var) != ARRAY_RANGE_REF
187 && TREE_CODE (var) != REALPART_EXPR
188 && TREE_CODE (var) != IMAGPART_EXPR)
189 var = TREE_OPERAND (var, 0);
190 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
191 var = TREE_OPERAND (var, 0);
192 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
193 || ! host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (var)), 1)
194 || tree_int_cst_lt (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)),
195 TYPE_SIZE_UNIT (TREE_TYPE (var))))
196 var = pt_var;
198 else
199 var = pt_var;
201 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
202 if (bytes != error_mark_node)
204 if (TREE_CODE (bytes) == INTEGER_CST
205 && tree_int_cst_lt (TYPE_SIZE_UNIT (TREE_TYPE (var)), bytes))
206 bytes = size_zero_node;
207 else
208 bytes = size_binop (MINUS_EXPR,
209 TYPE_SIZE_UNIT (TREE_TYPE (var)), bytes);
212 else
213 bytes = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
215 if (host_integerp (bytes, 1))
216 return tree_low_cst (bytes, 1);
219 return unknown[object_size_type];
223 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
224 Handles various allocation calls. OBJECT_SIZE_TYPE is the second
225 argument from __builtin_object_size. If unknown, return
226 unknown[object_size_type]. */
228 static unsigned HOST_WIDE_INT
229 alloc_object_size (const_gimple call, int object_size_type)
231 tree callee, bytes = NULL_TREE;
232 tree alloc_size;
233 int arg1 = -1, arg2 = -1;
235 gcc_assert (is_gimple_call (call));
237 callee = gimple_call_fndecl (call);
238 if (!callee)
239 return unknown[object_size_type];
241 alloc_size = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (TREE_TYPE(callee)));
242 if (alloc_size && TREE_VALUE (alloc_size))
244 tree p = TREE_VALUE (alloc_size);
246 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
247 if (TREE_CHAIN (p))
248 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
251 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
252 switch (DECL_FUNCTION_CODE (callee))
254 case BUILT_IN_CALLOC:
255 arg2 = 1;
256 /* fall through */
257 case BUILT_IN_MALLOC:
258 case BUILT_IN_ALLOCA:
259 arg1 = 0;
260 default:
261 break;
264 if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
265 || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
266 || (arg2 >= 0
267 && (arg2 >= (int)gimple_call_num_args (call)
268 || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
269 return unknown[object_size_type];
271 if (arg2 >= 0)
272 bytes = size_binop (MULT_EXPR,
273 fold_convert (sizetype, gimple_call_arg (call, arg1)),
274 fold_convert (sizetype, gimple_call_arg (call, arg2)));
275 else if (arg1 >= 0)
276 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
278 if (bytes && host_integerp (bytes, 1))
279 return tree_low_cst (bytes, 1);
281 return unknown[object_size_type];
285 /* If object size is propagated from one of function's arguments directly
286 to its return value, return that argument for GIMPLE_CALL statement CALL.
287 Otherwise return NULL. */
289 static tree
290 pass_through_call (const_gimple call)
292 tree callee = gimple_call_fndecl (call);
294 if (callee
295 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
296 switch (DECL_FUNCTION_CODE (callee))
298 case BUILT_IN_MEMCPY:
299 case BUILT_IN_MEMMOVE:
300 case BUILT_IN_MEMSET:
301 case BUILT_IN_STRCPY:
302 case BUILT_IN_STRNCPY:
303 case BUILT_IN_STRCAT:
304 case BUILT_IN_STRNCAT:
305 case BUILT_IN_MEMCPY_CHK:
306 case BUILT_IN_MEMMOVE_CHK:
307 case BUILT_IN_MEMSET_CHK:
308 case BUILT_IN_STRCPY_CHK:
309 case BUILT_IN_STRNCPY_CHK:
310 case BUILT_IN_STRCAT_CHK:
311 case BUILT_IN_STRNCAT_CHK:
312 if (gimple_call_num_args (call) >= 1)
313 return gimple_call_arg (call, 0);
314 break;
315 default:
316 break;
319 return NULL_TREE;
323 /* Compute __builtin_object_size value for PTR. OBJECT_SIZE_TYPE is the
324 second argument from __builtin_object_size. */
326 unsigned HOST_WIDE_INT
327 compute_builtin_object_size (tree ptr, int object_size_type)
329 gcc_assert (object_size_type >= 0 && object_size_type <= 3);
331 if (! offset_limit)
332 init_offset_limit ();
334 if (TREE_CODE (ptr) == ADDR_EXPR)
335 return addr_object_size (ptr, object_size_type);
337 if (TREE_CODE (ptr) == SSA_NAME
338 && POINTER_TYPE_P (TREE_TYPE (ptr))
339 && object_sizes[object_size_type] != NULL)
341 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
343 struct object_size_info osi;
344 bitmap_iterator bi;
345 unsigned int i;
347 if (dump_file)
349 fprintf (dump_file, "Computing %s %sobject size for ",
350 (object_size_type & 2) ? "minimum" : "maximum",
351 (object_size_type & 1) ? "sub" : "");
352 print_generic_expr (dump_file, ptr, dump_flags);
353 fprintf (dump_file, ":\n");
356 osi.visited = BITMAP_ALLOC (NULL);
357 osi.reexamine = BITMAP_ALLOC (NULL);
358 osi.object_size_type = object_size_type;
359 osi.depths = NULL;
360 osi.stack = NULL;
361 osi.tos = NULL;
363 /* First pass: walk UD chains, compute object sizes that
364 can be computed. osi.reexamine bitmap at the end will
365 contain what variables were found in dependency cycles
366 and therefore need to be reexamined. */
367 osi.pass = 0;
368 osi.changed = false;
369 collect_object_sizes_for (&osi, ptr);
371 /* Second pass: keep recomputing object sizes of variables
372 that need reexamination, until no object sizes are
373 increased or all object sizes are computed. */
374 if (! bitmap_empty_p (osi.reexamine))
376 bitmap reexamine = BITMAP_ALLOC (NULL);
378 /* If looking for minimum instead of maximum object size,
379 detect cases where a pointer is increased in a loop.
380 Although even without this detection pass 2 would eventually
381 terminate, it could take a long time. If a pointer is
382 increasing this way, we need to assume 0 object size.
383 E.g. p = &buf[0]; while (cond) p = p + 4; */
384 if (object_size_type & 2)
386 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
387 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
388 osi.tos = osi.stack;
389 osi.pass = 1;
390 /* collect_object_sizes_for is changing
391 osi.reexamine bitmap, so iterate over a copy. */
392 bitmap_copy (reexamine, osi.reexamine);
393 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
394 if (bitmap_bit_p (osi.reexamine, i))
395 check_for_plus_in_loops (&osi, ssa_name (i));
397 free (osi.depths);
398 osi.depths = NULL;
399 free (osi.stack);
400 osi.stack = NULL;
401 osi.tos = NULL;
406 osi.pass = 2;
407 osi.changed = false;
408 /* collect_object_sizes_for is changing
409 osi.reexamine bitmap, so iterate over a copy. */
410 bitmap_copy (reexamine, osi.reexamine);
411 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
412 if (bitmap_bit_p (osi.reexamine, i))
414 collect_object_sizes_for (&osi, ssa_name (i));
415 if (dump_file && (dump_flags & TDF_DETAILS))
417 fprintf (dump_file, "Reexamining ");
418 print_generic_expr (dump_file, ssa_name (i),
419 dump_flags);
420 fprintf (dump_file, "\n");
424 while (osi.changed);
426 BITMAP_FREE (reexamine);
428 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
429 bitmap_set_bit (computed[object_size_type], i);
431 /* Debugging dumps. */
432 if (dump_file)
434 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
435 if (object_sizes[object_size_type][i]
436 != unknown[object_size_type])
438 print_generic_expr (dump_file, ssa_name (i),
439 dump_flags);
440 fprintf (dump_file,
441 ": %s %sobject size "
442 HOST_WIDE_INT_PRINT_UNSIGNED "\n",
443 (object_size_type & 2) ? "minimum" : "maximum",
444 (object_size_type & 1) ? "sub" : "",
445 object_sizes[object_size_type][i]);
449 BITMAP_FREE (osi.reexamine);
450 BITMAP_FREE (osi.visited);
453 return object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
456 return unknown[object_size_type];
459 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
461 static void
462 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
464 int object_size_type = osi->object_size_type;
465 unsigned int varno = SSA_NAME_VERSION (ptr);
466 unsigned HOST_WIDE_INT bytes;
468 gcc_assert (object_sizes[object_size_type][varno]
469 != unknown[object_size_type]);
470 gcc_assert (osi->pass == 0);
472 if (TREE_CODE (value) == WITH_SIZE_EXPR)
473 value = TREE_OPERAND (value, 0);
475 /* Pointer variables should have been handled by merge_object_sizes. */
476 gcc_assert (TREE_CODE (value) != SSA_NAME
477 || !POINTER_TYPE_P (TREE_TYPE (value)));
479 if (TREE_CODE (value) == ADDR_EXPR)
480 bytes = addr_object_size (value, object_size_type);
481 else
482 bytes = unknown[object_size_type];
484 if ((object_size_type & 2) == 0)
486 if (object_sizes[object_size_type][varno] < bytes)
487 object_sizes[object_size_type][varno] = bytes;
489 else
491 if (object_sizes[object_size_type][varno] > bytes)
492 object_sizes[object_size_type][varno] = bytes;
497 /* Compute object_sizes for PTR, defined to the result of a call. */
499 static void
500 call_object_size (struct object_size_info *osi, tree ptr, gimple call)
502 int object_size_type = osi->object_size_type;
503 unsigned int varno = SSA_NAME_VERSION (ptr);
504 unsigned HOST_WIDE_INT bytes;
506 gcc_assert (is_gimple_call (call));
508 gcc_assert (object_sizes[object_size_type][varno]
509 != unknown[object_size_type]);
510 gcc_assert (osi->pass == 0);
512 bytes = alloc_object_size (call, object_size_type);
514 if ((object_size_type & 2) == 0)
516 if (object_sizes[object_size_type][varno] < bytes)
517 object_sizes[object_size_type][varno] = bytes;
519 else
521 if (object_sizes[object_size_type][varno] > bytes)
522 object_sizes[object_size_type][varno] = bytes;
527 /* Compute object_sizes for PTR, defined to an unknown value. */
529 static void
530 unknown_object_size (struct object_size_info *osi, tree ptr)
532 int object_size_type = osi->object_size_type;
533 unsigned int varno = SSA_NAME_VERSION (ptr);
534 unsigned HOST_WIDE_INT bytes;
536 gcc_assert (object_sizes[object_size_type][varno]
537 != unknown[object_size_type]);
538 gcc_assert (osi->pass == 0);
540 bytes = unknown[object_size_type];
542 if ((object_size_type & 2) == 0)
544 if (object_sizes[object_size_type][varno] < bytes)
545 object_sizes[object_size_type][varno] = bytes;
547 else
549 if (object_sizes[object_size_type][varno] > bytes)
550 object_sizes[object_size_type][varno] = bytes;
555 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
556 the object size might need reexamination later. */
558 static bool
559 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
560 unsigned HOST_WIDE_INT offset)
562 int object_size_type = osi->object_size_type;
563 unsigned int varno = SSA_NAME_VERSION (dest);
564 unsigned HOST_WIDE_INT orig_bytes;
566 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
567 return false;
568 if (offset >= offset_limit)
570 object_sizes[object_size_type][varno] = unknown[object_size_type];
571 return false;
574 if (osi->pass == 0)
575 collect_object_sizes_for (osi, orig);
577 orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
578 if (orig_bytes != unknown[object_size_type])
579 orig_bytes = (offset > orig_bytes)
580 ? (unsigned HOST_WIDE_INT) 0 : orig_bytes - offset;
582 if ((object_size_type & 2) == 0)
584 if (object_sizes[object_size_type][varno] < orig_bytes)
586 object_sizes[object_size_type][varno] = orig_bytes;
587 osi->changed = true;
590 else
592 if (object_sizes[object_size_type][varno] > orig_bytes)
594 object_sizes[object_size_type][varno] = orig_bytes;
595 osi->changed = true;
598 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
602 /* Compute object_sizes for VAR, defined to the result of an assignment
603 with operator POINTER_PLUS_EXPR. Return true if the object size might
604 need reexamination later. */
606 static bool
607 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple stmt)
609 int object_size_type = osi->object_size_type;
610 unsigned int varno = SSA_NAME_VERSION (var);
611 unsigned HOST_WIDE_INT bytes;
612 tree op0, op1;
614 gcc_assert (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR);
616 op0 = gimple_assign_rhs1 (stmt);
617 op1 = gimple_assign_rhs2 (stmt);
619 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
620 return false;
622 /* Handle PTR + OFFSET here. */
623 if (TREE_CODE (op1) == INTEGER_CST
624 && (TREE_CODE (op0) == SSA_NAME
625 || TREE_CODE (op0) == ADDR_EXPR))
627 if (! host_integerp (op1, 1))
628 bytes = unknown[object_size_type];
629 else if (TREE_CODE (op0) == SSA_NAME)
630 return merge_object_sizes (osi, var, op0, tree_low_cst (op1, 1));
631 else
633 unsigned HOST_WIDE_INT off = tree_low_cst (op1, 1);
635 /* op0 will be ADDR_EXPR here. */
636 bytes = compute_builtin_object_size (op0, object_size_type);
637 if (bytes == unknown[object_size_type])
639 else if (off > offset_limit)
640 bytes = unknown[object_size_type];
641 else if (off > bytes)
642 bytes = 0;
643 else
644 bytes -= off;
647 else
648 bytes = unknown[object_size_type];
650 if ((object_size_type & 2) == 0)
652 if (object_sizes[object_size_type][varno] < bytes)
653 object_sizes[object_size_type][varno] = bytes;
655 else
657 if (object_sizes[object_size_type][varno] > bytes)
658 object_sizes[object_size_type][varno] = bytes;
660 return false;
664 /* Compute object_sizes for VAR, defined to VALUE, which is
665 a COND_EXPR. Return true if the object size might need reexamination
666 later. */
668 static bool
669 cond_expr_object_size (struct object_size_info *osi, tree var, tree value)
671 tree then_, else_;
672 int object_size_type = osi->object_size_type;
673 unsigned int varno = SSA_NAME_VERSION (var);
674 bool reexamine = false;
676 gcc_assert (TREE_CODE (value) == COND_EXPR);
678 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
679 return false;
681 then_ = COND_EXPR_THEN (value);
682 else_ = COND_EXPR_ELSE (value);
684 if (TREE_CODE (then_) == SSA_NAME)
685 reexamine |= merge_object_sizes (osi, var, then_, 0);
686 else
687 expr_object_size (osi, var, then_);
689 if (TREE_CODE (else_) == SSA_NAME)
690 reexamine |= merge_object_sizes (osi, var, else_, 0);
691 else
692 expr_object_size (osi, var, else_);
694 return reexamine;
697 /* Compute object sizes for VAR.
698 For ADDR_EXPR an object size is the number of remaining bytes
699 to the end of the object (where what is considered an object depends on
700 OSI->object_size_type).
701 For allocation GIMPLE_CALL like malloc or calloc object size is the size
702 of the allocation.
703 For POINTER_PLUS_EXPR where second operand is a constant integer,
704 object size is object size of the first operand minus the constant.
705 If the constant is bigger than the number of remaining bytes until the
706 end of the object, object size is 0, but if it is instead a pointer
707 subtraction, object size is unknown[object_size_type].
708 To differentiate addition from subtraction, ADDR_EXPR returns
709 unknown[object_size_type] for all objects bigger than half of the address
710 space, and constants less than half of the address space are considered
711 addition, while bigger constants subtraction.
712 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
713 object size is object size of that argument.
714 Otherwise, object size is the maximum of object sizes of variables
715 that it might be set to. */
717 static void
718 collect_object_sizes_for (struct object_size_info *osi, tree var)
720 int object_size_type = osi->object_size_type;
721 unsigned int varno = SSA_NAME_VERSION (var);
722 gimple stmt;
723 bool reexamine;
725 if (bitmap_bit_p (computed[object_size_type], varno))
726 return;
728 if (osi->pass == 0)
730 if (! bitmap_bit_p (osi->visited, varno))
732 bitmap_set_bit (osi->visited, varno);
733 object_sizes[object_size_type][varno]
734 = (object_size_type & 2) ? -1 : 0;
736 else
738 /* Found a dependency loop. Mark the variable for later
739 re-examination. */
740 bitmap_set_bit (osi->reexamine, varno);
741 if (dump_file && (dump_flags & TDF_DETAILS))
743 fprintf (dump_file, "Found a dependency loop at ");
744 print_generic_expr (dump_file, var, dump_flags);
745 fprintf (dump_file, "\n");
747 return;
751 if (dump_file && (dump_flags & TDF_DETAILS))
753 fprintf (dump_file, "Visiting use-def links for ");
754 print_generic_expr (dump_file, var, dump_flags);
755 fprintf (dump_file, "\n");
758 stmt = SSA_NAME_DEF_STMT (var);
759 reexamine = false;
761 switch (gimple_code (stmt))
763 case GIMPLE_ASSIGN:
765 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
766 reexamine = plus_stmt_object_size (osi, var, stmt);
767 else if (gimple_assign_single_p (stmt)
768 || gimple_assign_unary_nop_p (stmt))
770 tree rhs = gimple_assign_rhs1 (stmt);
772 if (TREE_CODE (rhs) == SSA_NAME
773 && POINTER_TYPE_P (TREE_TYPE (rhs)))
774 reexamine = merge_object_sizes (osi, var, rhs, 0);
775 else if (TREE_CODE (rhs) == COND_EXPR)
776 reexamine = cond_expr_object_size (osi, var, rhs);
777 else
778 expr_object_size (osi, var, rhs);
780 else
781 unknown_object_size (osi, var);
782 break;
785 case GIMPLE_CALL:
787 tree arg = pass_through_call (stmt);
788 if (arg)
790 if (TREE_CODE (arg) == SSA_NAME
791 && POINTER_TYPE_P (TREE_TYPE (arg)))
792 reexamine = merge_object_sizes (osi, var, arg, 0);
793 else if (TREE_CODE (arg) == COND_EXPR)
794 reexamine = cond_expr_object_size (osi, var, arg);
795 else
796 expr_object_size (osi, var, arg);
798 else
799 call_object_size (osi, var, stmt);
800 break;
803 case GIMPLE_ASM:
804 /* Pointers defined by __asm__ statements can point anywhere. */
805 object_sizes[object_size_type][varno] = unknown[object_size_type];
806 break;
808 case GIMPLE_NOP:
810 tree decl = SSA_NAME_VAR (var);
812 if (TREE_CODE (decl) != PARM_DECL && DECL_INITIAL (decl))
813 expr_object_size (osi, var, DECL_INITIAL (decl));
814 else
815 expr_object_size (osi, var, decl);
817 break;
819 case GIMPLE_PHI:
821 unsigned i;
823 for (i = 0; i < gimple_phi_num_args (stmt); i++)
825 tree rhs = gimple_phi_arg (stmt, i)->def;
827 if (object_sizes[object_size_type][varno]
828 == unknown[object_size_type])
829 break;
831 if (TREE_CODE (rhs) == SSA_NAME)
832 reexamine |= merge_object_sizes (osi, var, rhs, 0);
833 else if (osi->pass == 0)
834 expr_object_size (osi, var, rhs);
836 break;
839 default:
840 gcc_unreachable ();
843 if (! reexamine
844 || object_sizes[object_size_type][varno] == unknown[object_size_type])
846 bitmap_set_bit (computed[object_size_type], varno);
847 bitmap_clear_bit (osi->reexamine, varno);
849 else
851 bitmap_set_bit (osi->reexamine, varno);
852 if (dump_file && (dump_flags & TDF_DETAILS))
854 fprintf (dump_file, "Need to reexamine ");
855 print_generic_expr (dump_file, var, dump_flags);
856 fprintf (dump_file, "\n");
862 /* Helper function for check_for_plus_in_loops. Called recursively
863 to detect loops. */
865 static void
866 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
867 unsigned int depth)
869 gimple stmt = SSA_NAME_DEF_STMT (var);
870 unsigned int varno = SSA_NAME_VERSION (var);
872 if (osi->depths[varno])
874 if (osi->depths[varno] != depth)
876 unsigned int *sp;
878 /* Found a loop involving pointer addition. */
879 for (sp = osi->tos; sp > osi->stack; )
881 --sp;
882 bitmap_clear_bit (osi->reexamine, *sp);
883 bitmap_set_bit (computed[osi->object_size_type], *sp);
884 object_sizes[osi->object_size_type][*sp] = 0;
885 if (*sp == varno)
886 break;
889 return;
891 else if (! bitmap_bit_p (osi->reexamine, varno))
892 return;
894 osi->depths[varno] = depth;
895 *osi->tos++ = varno;
897 switch (gimple_code (stmt))
900 case GIMPLE_ASSIGN:
902 if ((gimple_assign_single_p (stmt)
903 || gimple_assign_unary_nop_p (stmt))
904 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
906 tree rhs = gimple_assign_rhs1 (stmt);
908 check_for_plus_in_loops_1 (osi, rhs, depth);
910 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
912 tree basevar = gimple_assign_rhs1 (stmt);
913 tree cst = gimple_assign_rhs2 (stmt);
915 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
917 check_for_plus_in_loops_1 (osi, basevar,
918 depth + !integer_zerop (cst));
920 else
921 gcc_unreachable ();
922 break;
925 case GIMPLE_CALL:
927 tree arg = pass_through_call (stmt);
928 if (arg)
930 if (TREE_CODE (arg) == SSA_NAME)
931 check_for_plus_in_loops_1 (osi, arg, depth);
932 else
933 gcc_unreachable ();
935 break;
938 case GIMPLE_PHI:
940 unsigned i;
942 for (i = 0; i < gimple_phi_num_args (stmt); i++)
944 tree rhs = gimple_phi_arg (stmt, i)->def;
946 if (TREE_CODE (rhs) == SSA_NAME)
947 check_for_plus_in_loops_1 (osi, rhs, depth);
949 break;
952 default:
953 gcc_unreachable ();
956 osi->depths[varno] = 0;
957 osi->tos--;
961 /* Check if some pointer we are computing object size of is being increased
962 within a loop. If yes, assume all the SSA variables participating in
963 that loop have minimum object sizes 0. */
965 static void
966 check_for_plus_in_loops (struct object_size_info *osi, tree var)
968 gimple stmt = SSA_NAME_DEF_STMT (var);
970 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
971 and looked for a POINTER_PLUS_EXPR in the pass-through
972 argument, if any. In GIMPLE, however, such an expression
973 is not a valid call operand. */
975 if (is_gimple_assign (stmt)
976 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
978 tree basevar = gimple_assign_rhs1 (stmt);
979 tree cst = gimple_assign_rhs2 (stmt);
981 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
983 if (integer_zerop (cst))
984 return;
986 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
987 *osi->tos++ = SSA_NAME_VERSION (basevar);
988 check_for_plus_in_loops_1 (osi, var, 2);
989 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
990 osi->tos--;
995 /* Initialize data structures for the object size computation. */
997 void
998 init_object_sizes (void)
1000 int object_size_type;
1002 if (object_sizes[0])
1003 return;
1005 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1007 object_sizes[object_size_type] = XNEWVEC (unsigned HOST_WIDE_INT, num_ssa_names);
1008 computed[object_size_type] = BITMAP_ALLOC (NULL);
1011 init_offset_limit ();
1015 /* Destroy data structures after the object size computation. */
1017 void
1018 fini_object_sizes (void)
1020 int object_size_type;
1022 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1024 free (object_sizes[object_size_type]);
1025 BITMAP_FREE (computed[object_size_type]);
1026 object_sizes[object_size_type] = NULL;
1031 /* Simple pass to optimize all __builtin_object_size () builtins. */
1033 static unsigned int
1034 compute_object_sizes (void)
1036 basic_block bb;
1037 FOR_EACH_BB (bb)
1039 gimple_stmt_iterator i;
1040 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1042 tree callee, result;
1043 gimple call = gsi_stmt (i);
1045 if (gimple_code (call) != GIMPLE_CALL)
1046 continue;
1048 callee = gimple_call_fndecl (call);
1049 if (!callee
1050 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
1051 || DECL_FUNCTION_CODE (callee) != BUILT_IN_OBJECT_SIZE)
1052 continue;
1054 init_object_sizes ();
1055 result = fold_call_stmt (call, false);
1056 if (!result)
1058 if (gimple_call_num_args (call) == 2
1059 && POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
1061 tree ost = gimple_call_arg (call, 1);
1063 if (host_integerp (ost, 1))
1065 unsigned HOST_WIDE_INT object_size_type
1066 = tree_low_cst (ost, 1);
1068 if (object_size_type < 2)
1069 result = fold_convert (size_type_node,
1070 integer_minus_one_node);
1071 else if (object_size_type < 4)
1072 result = size_zero_node;
1076 if (!result)
1077 continue;
1080 if (dump_file && (dump_flags & TDF_DETAILS))
1082 fprintf (dump_file, "Simplified\n ");
1083 print_gimple_stmt (dump_file, call, 0, dump_flags);
1086 if (!update_call_from_tree (&i, result))
1087 gcc_unreachable ();
1089 /* NOTE: In the pre-tuples code, we called update_stmt here. This is
1090 now handled by gsi_replace, called from update_call_from_tree. */
1092 if (dump_file && (dump_flags & TDF_DETAILS))
1094 fprintf (dump_file, "to\n ");
1095 print_gimple_stmt (dump_file, call, 0, dump_flags);
1096 fprintf (dump_file, "\n");
1101 fini_object_sizes ();
1102 return 0;
1105 struct gimple_opt_pass pass_object_sizes =
1108 GIMPLE_PASS,
1109 "objsz", /* name */
1110 NULL, /* gate */
1111 compute_object_sizes, /* execute */
1112 NULL, /* sub */
1113 NULL, /* next */
1114 0, /* static_pass_number */
1115 TV_NONE, /* tv_id */
1116 PROP_cfg | PROP_ssa, /* properties_required */
1117 0, /* properties_provided */
1118 0, /* properties_destroyed */
1119 0, /* todo_flags_start */
1120 TODO_dump_func | TODO_verify_ssa /* todo_flags_finish */