Merged r158465 through r158660 into branch.
[official-gcc.git] / gcc / gimple-fold.c
blobab076348be0e216fc3a4b27aac25e61c2cc1df04
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "ggc.h"
30 #include "basic-block.h"
31 #include "output.h"
32 #include "expr.h"
33 #include "function.h"
34 #include "diagnostic.h"
35 #include "timevar.h"
36 #include "tree-dump.h"
37 #include "tree-flow.h"
38 #include "tree-pass.h"
39 #include "tree-ssa-propagate.h"
40 #include "value-prof.h"
41 #include "langhooks.h"
42 #include "target.h"
45 /* If SYM is a constant variable with known value, return the value.
46 NULL_TREE is returned otherwise. */
48 tree
49 get_symbol_constant_value (tree sym)
51 if (TREE_STATIC (sym)
52 && (TREE_READONLY (sym)
53 || TREE_CODE (sym) == CONST_DECL))
55 tree val = DECL_INITIAL (sym);
56 if (val)
58 STRIP_NOPS (val);
59 if (is_gimple_min_invariant (val))
61 if (TREE_CODE (val) == ADDR_EXPR)
63 tree base = get_base_address (TREE_OPERAND (val, 0));
64 if (base && TREE_CODE (base) == VAR_DECL)
66 TREE_ADDRESSABLE (base) = 1;
67 if (gimple_referenced_vars (cfun))
68 add_referenced_var (base);
71 return val;
74 /* Variables declared 'const' without an initializer
75 have zero as the initializer if they may not be
76 overridden at link or run time. */
77 if (!val
78 && !DECL_EXTERNAL (sym)
79 && targetm.binds_local_p (sym)
80 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
81 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
82 return fold_convert (TREE_TYPE (sym), integer_zero_node);
85 return NULL_TREE;
89 /* Return true if we may propagate the address expression ADDR into the
90 dereference DEREF and cancel them. */
92 bool
93 may_propagate_address_into_dereference (tree addr, tree deref)
95 gcc_assert (INDIRECT_REF_P (deref)
96 && TREE_CODE (addr) == ADDR_EXPR);
98 /* Don't propagate if ADDR's operand has incomplete type. */
99 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0))))
100 return false;
102 /* If the address is invariant then we do not need to preserve restrict
103 qualifications. But we do need to preserve volatile qualifiers until
104 we can annotate the folded dereference itself properly. */
105 if (is_gimple_min_invariant (addr)
106 && (!TREE_THIS_VOLATILE (deref)
107 || TYPE_VOLATILE (TREE_TYPE (addr))))
108 return useless_type_conversion_p (TREE_TYPE (deref),
109 TREE_TYPE (TREE_OPERAND (addr, 0)));
111 /* Else both the address substitution and the folding must result in
112 a valid useless type conversion sequence. */
113 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
114 TREE_TYPE (addr))
115 && useless_type_conversion_p (TREE_TYPE (deref),
116 TREE_TYPE (TREE_OPERAND (addr, 0))));
120 /* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
121 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
122 is the desired result type.
124 LOC is the location of the original expression. */
126 static tree
127 maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset,
128 tree orig_type,
129 bool allow_negative_idx)
131 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
132 tree array_type, elt_type, elt_size;
133 tree domain_type;
135 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
136 measured in units of the size of elements type) from that ARRAY_REF).
137 We can't do anything if either is variable.
139 The case we handle here is *(&A[N]+O). */
140 if (TREE_CODE (base) == ARRAY_REF)
142 tree low_bound = array_ref_low_bound (base);
144 elt_offset = TREE_OPERAND (base, 1);
145 if (TREE_CODE (low_bound) != INTEGER_CST
146 || TREE_CODE (elt_offset) != INTEGER_CST)
147 return NULL_TREE;
149 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
150 base = TREE_OPERAND (base, 0);
153 /* Ignore stupid user tricks of indexing non-array variables. */
154 array_type = TREE_TYPE (base);
155 if (TREE_CODE (array_type) != ARRAY_TYPE)
156 return NULL_TREE;
157 elt_type = TREE_TYPE (array_type);
158 if (!useless_type_conversion_p (orig_type, elt_type))
159 return NULL_TREE;
161 /* Use signed size type for intermediate computation on the index. */
162 idx_type = ssizetype;
164 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
165 element type (so we can use the alignment if it's not constant).
166 Otherwise, compute the offset as an index by using a division. If the
167 division isn't exact, then don't do anything. */
168 elt_size = TYPE_SIZE_UNIT (elt_type);
169 if (!elt_size)
170 return NULL;
171 if (integer_zerop (offset))
173 if (TREE_CODE (elt_size) != INTEGER_CST)
174 elt_size = size_int (TYPE_ALIGN (elt_type));
176 idx = build_int_cst (idx_type, 0);
178 else
180 unsigned HOST_WIDE_INT lquo, lrem;
181 HOST_WIDE_INT hquo, hrem;
182 double_int soffset;
184 /* The final array offset should be signed, so we need
185 to sign-extend the (possibly pointer) offset here
186 and use signed division. */
187 soffset = double_int_sext (tree_to_double_int (offset),
188 TYPE_PRECISION (TREE_TYPE (offset)));
189 if (TREE_CODE (elt_size) != INTEGER_CST
190 || div_and_round_double (TRUNC_DIV_EXPR, 0,
191 soffset.low, soffset.high,
192 TREE_INT_CST_LOW (elt_size),
193 TREE_INT_CST_HIGH (elt_size),
194 &lquo, &hquo, &lrem, &hrem)
195 || lrem || hrem)
196 return NULL_TREE;
198 idx = build_int_cst_wide (idx_type, lquo, hquo);
201 /* Assume the low bound is zero. If there is a domain type, get the
202 low bound, if any, convert the index into that type, and add the
203 low bound. */
204 min_idx = build_int_cst (idx_type, 0);
205 domain_type = TYPE_DOMAIN (array_type);
206 if (domain_type)
208 idx_type = domain_type;
209 if (TYPE_MIN_VALUE (idx_type))
210 min_idx = TYPE_MIN_VALUE (idx_type);
211 else
212 min_idx = fold_convert (idx_type, min_idx);
214 if (TREE_CODE (min_idx) != INTEGER_CST)
215 return NULL_TREE;
217 elt_offset = fold_convert (idx_type, elt_offset);
220 if (!integer_zerop (min_idx))
221 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
222 if (!integer_zerop (elt_offset))
223 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
225 /* Make sure to possibly truncate late after offsetting. */
226 idx = fold_convert (idx_type, idx);
228 /* We don't want to construct access past array bounds. For example
229 char *(c[4]);
230 c[3][2];
231 should not be simplified into (*c)[14] or tree-vrp will
232 give false warnings. The same is true for
233 struct A { long x; char d[0]; } *a;
234 (char *)a - 4;
235 which should be not folded to &a->d[-8]. */
236 if (domain_type
237 && TYPE_MAX_VALUE (domain_type)
238 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST)
240 tree up_bound = TYPE_MAX_VALUE (domain_type);
242 if (tree_int_cst_lt (up_bound, idx)
243 /* Accesses after the end of arrays of size 0 (gcc
244 extension) and 1 are likely intentional ("struct
245 hack"). */
246 && compare_tree_int (up_bound, 1) > 0)
247 return NULL_TREE;
249 if (domain_type
250 && TYPE_MIN_VALUE (domain_type))
252 if (!allow_negative_idx
253 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
254 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
255 return NULL_TREE;
257 else if (!allow_negative_idx
258 && compare_tree_int (idx, 0) < 0)
259 return NULL_TREE;
262 tree t = build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
263 SET_EXPR_LOCATION (t, loc);
264 return t;
269 /* Attempt to fold *(S+O) to S.X.
270 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
271 is the desired result type.
273 LOC is the location of the original expression. */
275 static tree
276 maybe_fold_offset_to_component_ref (location_t loc, tree record_type,
277 tree base, tree offset, tree orig_type)
279 tree f, t, field_type, tail_array_field, field_offset;
280 tree ret;
281 tree new_base;
283 if (TREE_CODE (record_type) != RECORD_TYPE
284 && TREE_CODE (record_type) != UNION_TYPE
285 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
286 return NULL_TREE;
288 /* Short-circuit silly cases. */
289 if (useless_type_conversion_p (record_type, orig_type))
290 return NULL_TREE;
292 tail_array_field = NULL_TREE;
293 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
295 int cmp;
297 if (TREE_CODE (f) != FIELD_DECL)
298 continue;
299 if (DECL_BIT_FIELD (f))
300 continue;
302 if (!DECL_FIELD_OFFSET (f))
303 continue;
304 field_offset = byte_position (f);
305 if (TREE_CODE (field_offset) != INTEGER_CST)
306 continue;
308 /* ??? Java creates "interesting" fields for representing base classes.
309 They have no name, and have no context. With no context, we get into
310 trouble with nonoverlapping_component_refs_p. Skip them. */
311 if (!DECL_FIELD_CONTEXT (f))
312 continue;
314 /* The previous array field isn't at the end. */
315 tail_array_field = NULL_TREE;
317 /* Check to see if this offset overlaps with the field. */
318 cmp = tree_int_cst_compare (field_offset, offset);
319 if (cmp > 0)
320 continue;
322 field_type = TREE_TYPE (f);
324 /* Here we exactly match the offset being checked. If the types match,
325 then we can return that field. */
326 if (cmp == 0
327 && useless_type_conversion_p (orig_type, field_type))
329 t = fold_build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
330 return t;
333 /* Don't care about offsets into the middle of scalars. */
334 if (!AGGREGATE_TYPE_P (field_type))
335 continue;
337 /* Check for array at the end of the struct. This is often
338 used as for flexible array members. We should be able to
339 turn this into an array access anyway. */
340 if (TREE_CODE (field_type) == ARRAY_TYPE)
341 tail_array_field = f;
343 /* Check the end of the field against the offset. */
344 if (!DECL_SIZE_UNIT (f)
345 || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
346 continue;
347 t = int_const_binop (MINUS_EXPR, offset, field_offset, 1);
348 if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
349 continue;
351 /* If we matched, then set offset to the displacement into
352 this field. */
353 new_base = fold_build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
354 SET_EXPR_LOCATION (new_base, loc);
356 /* Recurse to possibly find the match. */
357 ret = maybe_fold_offset_to_array_ref (loc, new_base, t, orig_type,
358 f == TYPE_FIELDS (record_type));
359 if (ret)
360 return ret;
361 ret = maybe_fold_offset_to_component_ref (loc, field_type, new_base, t,
362 orig_type);
363 if (ret)
364 return ret;
367 if (!tail_array_field)
368 return NULL_TREE;
370 f = tail_array_field;
371 field_type = TREE_TYPE (f);
372 offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1);
374 /* If we get here, we've got an aggregate field, and a possibly
375 nonzero offset into them. Recurse and hope for a valid match. */
376 base = fold_build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
377 SET_EXPR_LOCATION (base, loc);
379 t = maybe_fold_offset_to_array_ref (loc, base, offset, orig_type,
380 f == TYPE_FIELDS (record_type));
381 if (t)
382 return t;
383 return maybe_fold_offset_to_component_ref (loc, field_type, base, offset,
384 orig_type);
387 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type
388 or BASE[index] or by combination of those.
390 LOC is the location of original expression.
392 Before attempting the conversion strip off existing ADDR_EXPRs and
393 handled component refs. */
395 tree
396 maybe_fold_offset_to_reference (location_t loc, tree base, tree offset,
397 tree orig_type)
399 tree ret;
400 tree type;
402 STRIP_NOPS (base);
403 if (TREE_CODE (base) != ADDR_EXPR)
404 return NULL_TREE;
406 base = TREE_OPERAND (base, 0);
408 /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union,
409 so it needs to be removed and new COMPONENT_REF constructed.
410 The wrong COMPONENT_REF are often constructed by folding the
411 (type *)&object within the expression (type *)&object+offset */
412 if (handled_component_p (base))
414 HOST_WIDE_INT sub_offset, size, maxsize;
415 tree newbase;
416 newbase = get_ref_base_and_extent (base, &sub_offset,
417 &size, &maxsize);
418 gcc_assert (newbase);
419 if (size == maxsize
420 && size != -1
421 && !(sub_offset & (BITS_PER_UNIT - 1)))
423 base = newbase;
424 if (sub_offset)
425 offset = int_const_binop (PLUS_EXPR, offset,
426 build_int_cst (TREE_TYPE (offset),
427 sub_offset / BITS_PER_UNIT), 1);
430 if (useless_type_conversion_p (orig_type, TREE_TYPE (base))
431 && integer_zerop (offset))
432 return base;
433 type = TREE_TYPE (base);
435 ret = maybe_fold_offset_to_component_ref (loc, type, base, offset, orig_type);
436 if (!ret)
437 ret = maybe_fold_offset_to_array_ref (loc, base, offset, orig_type, true);
439 return ret;
442 /* Attempt to express (ORIG_TYPE)&BASE+OFFSET as &BASE->field_of_orig_type
443 or &BASE[index] or by combination of those.
445 LOC is the location of the original expression.
447 Before attempting the conversion strip off existing component refs. */
449 tree
450 maybe_fold_offset_to_address (location_t loc, tree addr, tree offset,
451 tree orig_type)
453 tree t;
455 gcc_assert (POINTER_TYPE_P (TREE_TYPE (addr))
456 && POINTER_TYPE_P (orig_type));
458 t = maybe_fold_offset_to_reference (loc, addr, offset,
459 TREE_TYPE (orig_type));
460 if (t != NULL_TREE)
462 tree orig = addr;
463 tree ptr_type;
465 /* For __builtin_object_size to function correctly we need to
466 make sure not to fold address arithmetic so that we change
467 reference from one array to another. This would happen for
468 example for
470 struct X { char s1[10]; char s2[10] } s;
471 char *foo (void) { return &s.s2[-4]; }
473 where we need to avoid generating &s.s1[6]. As the C and
474 C++ frontends create different initial trees
475 (char *) &s.s1 + -4 vs. &s.s1[-4] we have to do some
476 sophisticated comparisons here. Note that checking for the
477 condition after the fact is easier than trying to avoid doing
478 the folding. */
479 STRIP_NOPS (orig);
480 if (TREE_CODE (orig) == ADDR_EXPR)
481 orig = TREE_OPERAND (orig, 0);
482 if ((TREE_CODE (orig) == ARRAY_REF
483 || (TREE_CODE (orig) == COMPONENT_REF
484 && TREE_CODE (TREE_TYPE (TREE_OPERAND (orig, 1))) == ARRAY_TYPE))
485 && (TREE_CODE (t) == ARRAY_REF
486 || TREE_CODE (t) == COMPONENT_REF)
487 && !operand_equal_p (TREE_CODE (orig) == ARRAY_REF
488 ? TREE_OPERAND (orig, 0) : orig,
489 TREE_CODE (t) == ARRAY_REF
490 ? TREE_OPERAND (t, 0) : t, 0))
491 return NULL_TREE;
493 ptr_type = build_pointer_type (TREE_TYPE (t));
494 if (!useless_type_conversion_p (orig_type, ptr_type))
495 return NULL_TREE;
496 return build_fold_addr_expr_with_type_loc (loc, t, ptr_type);
499 return NULL_TREE;
502 /* A subroutine of fold_stmt. Attempt to simplify *(BASE+OFFSET).
503 Return the simplified expression, or NULL if nothing could be done. */
505 static tree
506 maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
508 tree t;
509 bool volatile_p = TREE_THIS_VOLATILE (expr);
510 location_t loc = EXPR_LOCATION (expr);
512 /* We may well have constructed a double-nested PLUS_EXPR via multiple
513 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
514 are sometimes added. */
515 base = fold (base);
516 STRIP_TYPE_NOPS (base);
517 TREE_OPERAND (expr, 0) = base;
519 /* One possibility is that the address reduces to a string constant. */
520 t = fold_read_from_constant_string (expr);
521 if (t)
522 return t;
524 /* Add in any offset from a POINTER_PLUS_EXPR. */
525 if (TREE_CODE (base) == POINTER_PLUS_EXPR)
527 tree offset2;
529 offset2 = TREE_OPERAND (base, 1);
530 if (TREE_CODE (offset2) != INTEGER_CST)
531 return NULL_TREE;
532 base = TREE_OPERAND (base, 0);
534 offset = fold_convert (sizetype,
535 int_const_binop (PLUS_EXPR, offset, offset2, 1));
538 if (TREE_CODE (base) == ADDR_EXPR)
540 tree base_addr = base;
542 /* Strip the ADDR_EXPR. */
543 base = TREE_OPERAND (base, 0);
545 /* Fold away CONST_DECL to its value, if the type is scalar. */
546 if (TREE_CODE (base) == CONST_DECL
547 && is_gimple_min_invariant (DECL_INITIAL (base)))
548 return DECL_INITIAL (base);
550 /* If there is no offset involved simply return the folded base. */
551 if (integer_zerop (offset))
552 return base;
554 /* Try folding *(&B+O) to B.X. */
555 t = maybe_fold_offset_to_reference (loc, base_addr, offset,
556 TREE_TYPE (expr));
557 if (t)
559 /* Preserve volatileness of the original expression.
560 We can end up with a plain decl here which is shared
561 and we shouldn't mess with its flags. */
562 if (!SSA_VAR_P (t))
563 TREE_THIS_VOLATILE (t) = volatile_p;
564 return t;
567 else
569 /* We can get here for out-of-range string constant accesses,
570 such as "_"[3]. Bail out of the entire substitution search
571 and arrange for the entire statement to be replaced by a
572 call to __builtin_trap. In all likelihood this will all be
573 constant-folded away, but in the meantime we can't leave with
574 something that get_expr_operands can't understand. */
576 t = base;
577 STRIP_NOPS (t);
578 if (TREE_CODE (t) == ADDR_EXPR
579 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
581 /* FIXME: Except that this causes problems elsewhere with dead
582 code not being deleted, and we die in the rtl expanders
583 because we failed to remove some ssa_name. In the meantime,
584 just return zero. */
585 /* FIXME2: This condition should be signaled by
586 fold_read_from_constant_string directly, rather than
587 re-checking for it here. */
588 return integer_zero_node;
591 /* Try folding *(B+O) to B->X. Still an improvement. */
592 if (POINTER_TYPE_P (TREE_TYPE (base)))
594 t = maybe_fold_offset_to_reference (loc, base, offset,
595 TREE_TYPE (expr));
596 if (t)
597 return t;
601 /* Otherwise we had an offset that we could not simplify. */
602 return NULL_TREE;
606 /* A quaint feature extant in our address arithmetic is that there
607 can be hidden type changes here. The type of the result need
608 not be the same as the type of the input pointer.
610 What we're after here is an expression of the form
611 (T *)(&array + const)
612 where array is OP0, const is OP1, RES_TYPE is T and
613 the cast doesn't actually exist, but is implicit in the
614 type of the POINTER_PLUS_EXPR. We'd like to turn this into
615 &array[x]
616 which may be able to propagate further. */
618 tree
619 maybe_fold_stmt_addition (location_t loc, tree res_type, tree op0, tree op1)
621 tree ptd_type;
622 tree t;
624 /* The first operand should be an ADDR_EXPR. */
625 if (TREE_CODE (op0) != ADDR_EXPR)
626 return NULL_TREE;
627 op0 = TREE_OPERAND (op0, 0);
629 /* It had better be a constant. */
630 if (TREE_CODE (op1) != INTEGER_CST)
632 /* Or op0 should now be A[0] and the non-constant offset defined
633 via a multiplication by the array element size. */
634 if (TREE_CODE (op0) == ARRAY_REF
635 && integer_zerop (TREE_OPERAND (op0, 1))
636 && TREE_CODE (op1) == SSA_NAME
637 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (op0)), 1))
639 gimple offset_def = SSA_NAME_DEF_STMT (op1);
640 if (!is_gimple_assign (offset_def))
641 return NULL_TREE;
643 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
644 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
645 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def),
646 TYPE_SIZE_UNIT (TREE_TYPE (op0))))
647 return build_fold_addr_expr
648 (build4 (ARRAY_REF, TREE_TYPE (op0),
649 TREE_OPERAND (op0, 0),
650 gimple_assign_rhs1 (offset_def),
651 TREE_OPERAND (op0, 2),
652 TREE_OPERAND (op0, 3)));
653 else if (integer_onep (TYPE_SIZE_UNIT (TREE_TYPE (op0)))
654 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
655 return build_fold_addr_expr
656 (build4 (ARRAY_REF, TREE_TYPE (op0),
657 TREE_OPERAND (op0, 0),
658 op1,
659 TREE_OPERAND (op0, 2),
660 TREE_OPERAND (op0, 3)));
662 return NULL_TREE;
665 /* If the first operand is an ARRAY_REF, expand it so that we can fold
666 the offset into it. */
667 while (TREE_CODE (op0) == ARRAY_REF)
669 tree array_obj = TREE_OPERAND (op0, 0);
670 tree array_idx = TREE_OPERAND (op0, 1);
671 tree elt_type = TREE_TYPE (op0);
672 tree elt_size = TYPE_SIZE_UNIT (elt_type);
673 tree min_idx;
675 if (TREE_CODE (array_idx) != INTEGER_CST)
676 break;
677 if (TREE_CODE (elt_size) != INTEGER_CST)
678 break;
680 /* Un-bias the index by the min index of the array type. */
681 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
682 if (min_idx)
684 min_idx = TYPE_MIN_VALUE (min_idx);
685 if (min_idx)
687 if (TREE_CODE (min_idx) != INTEGER_CST)
688 break;
690 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
691 if (!integer_zerop (min_idx))
692 array_idx = int_const_binop (MINUS_EXPR, array_idx,
693 min_idx, 0);
697 /* Convert the index to a byte offset. */
698 array_idx = fold_convert (sizetype, array_idx);
699 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
701 /* Update the operands for the next round, or for folding. */
702 op1 = int_const_binop (PLUS_EXPR,
703 array_idx, op1, 0);
704 op0 = array_obj;
707 ptd_type = TREE_TYPE (res_type);
708 /* If we want a pointer to void, reconstruct the reference from the
709 array element type. A pointer to that can be trivially converted
710 to void *. This happens as we fold (void *)(ptr p+ off). */
711 if (VOID_TYPE_P (ptd_type)
712 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
713 ptd_type = TREE_TYPE (TREE_TYPE (op0));
715 /* At which point we can try some of the same things as for indirects. */
716 t = maybe_fold_offset_to_array_ref (loc, op0, op1, ptd_type, true);
717 if (!t)
718 t = maybe_fold_offset_to_component_ref (loc, TREE_TYPE (op0), op0, op1,
719 ptd_type);
720 if (t)
722 t = build1 (ADDR_EXPR, res_type, t);
723 SET_EXPR_LOCATION (t, loc);
726 return t;
729 /* Subroutine of fold_stmt. We perform several simplifications of the
730 memory reference tree EXPR and make sure to re-gimplify them properly
731 after propagation of constant addresses. IS_LHS is true if the
732 reference is supposed to be an lvalue. */
734 static tree
735 maybe_fold_reference (tree expr, bool is_lhs)
737 tree *t = &expr;
739 if (TREE_CODE (expr) == ARRAY_REF
740 && !is_lhs)
742 tree tem = fold_read_from_constant_string (expr);
743 if (tem)
744 return tem;
747 /* ??? We might want to open-code the relevant remaining cases
748 to avoid using the generic fold. */
749 if (handled_component_p (*t)
750 && CONSTANT_CLASS_P (TREE_OPERAND (*t, 0)))
752 tree tem = fold (*t);
753 if (tem != *t)
754 return tem;
757 while (handled_component_p (*t))
758 t = &TREE_OPERAND (*t, 0);
760 if (TREE_CODE (*t) == INDIRECT_REF)
762 tree tem = maybe_fold_stmt_indirect (*t, TREE_OPERAND (*t, 0),
763 integer_zero_node);
764 /* Avoid folding *"abc" = 5 into 'a' = 5. */
765 if (is_lhs && tem && CONSTANT_CLASS_P (tem))
766 tem = NULL_TREE;
767 if (!tem
768 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR)
769 /* If we had a good reason for propagating the address here,
770 make sure we end up with valid gimple. See PR34989. */
771 tem = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
773 if (tem)
775 *t = tem;
776 tem = maybe_fold_reference (expr, is_lhs);
777 if (tem)
778 return tem;
779 return expr;
782 else if (!is_lhs
783 && DECL_P (*t))
785 tree tem = get_symbol_constant_value (*t);
786 if (tem
787 && useless_type_conversion_p (TREE_TYPE (*t), TREE_TYPE (tem)))
789 *t = unshare_expr (tem);
790 tem = maybe_fold_reference (expr, is_lhs);
791 if (tem)
792 return tem;
793 return expr;
797 return NULL_TREE;
801 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
802 replacement rhs for the statement or NULL_TREE if no simplification
803 could be made. It is assumed that the operands have been previously
804 folded. */
806 static tree
807 fold_gimple_assign (gimple_stmt_iterator *si)
809 gimple stmt = gsi_stmt (*si);
810 enum tree_code subcode = gimple_assign_rhs_code (stmt);
811 location_t loc = gimple_location (stmt);
813 tree result = NULL_TREE;
815 switch (get_gimple_rhs_class (subcode))
817 case GIMPLE_SINGLE_RHS:
819 tree rhs = gimple_assign_rhs1 (stmt);
821 /* Try to fold a conditional expression. */
822 if (TREE_CODE (rhs) == COND_EXPR)
824 tree op0 = COND_EXPR_COND (rhs);
825 tree tem;
826 bool set = false;
827 location_t cond_loc = EXPR_LOCATION (rhs);
829 if (COMPARISON_CLASS_P (op0))
831 fold_defer_overflow_warnings ();
832 tem = fold_binary_loc (cond_loc,
833 TREE_CODE (op0), TREE_TYPE (op0),
834 TREE_OPERAND (op0, 0),
835 TREE_OPERAND (op0, 1));
836 /* This is actually a conditional expression, not a GIMPLE
837 conditional statement, however, the valid_gimple_rhs_p
838 test still applies. */
839 set = (tem && is_gimple_condexpr (tem)
840 && valid_gimple_rhs_p (tem));
841 fold_undefer_overflow_warnings (set, stmt, 0);
843 else if (is_gimple_min_invariant (op0))
845 tem = op0;
846 set = true;
848 else
849 return NULL_TREE;
851 if (set)
852 result = fold_build3_loc (cond_loc, COND_EXPR, TREE_TYPE (rhs), tem,
853 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
856 else if (TREE_CODE (rhs) == TARGET_MEM_REF)
857 return maybe_fold_tmr (rhs);
859 else if (REFERENCE_CLASS_P (rhs))
860 return maybe_fold_reference (rhs, false);
862 else if (TREE_CODE (rhs) == ADDR_EXPR)
864 tree tem = maybe_fold_reference (TREE_OPERAND (rhs, 0), true);
865 if (tem)
866 result = fold_convert (TREE_TYPE (rhs),
867 build_fold_addr_expr_loc (loc, tem));
870 else if (TREE_CODE (rhs) == CONSTRUCTOR
871 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
872 && (CONSTRUCTOR_NELTS (rhs)
873 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
875 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
876 unsigned i;
877 tree val;
879 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
880 if (TREE_CODE (val) != INTEGER_CST
881 && TREE_CODE (val) != REAL_CST
882 && TREE_CODE (val) != FIXED_CST)
883 return NULL_TREE;
885 return build_vector_from_ctor (TREE_TYPE (rhs),
886 CONSTRUCTOR_ELTS (rhs));
889 else if (DECL_P (rhs))
890 return unshare_expr (get_symbol_constant_value (rhs));
892 /* If we couldn't fold the RHS, hand over to the generic
893 fold routines. */
894 if (result == NULL_TREE)
895 result = fold (rhs);
897 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
898 that may have been added by fold, and "useless" type
899 conversions that might now be apparent due to propagation. */
900 STRIP_USELESS_TYPE_CONVERSION (result);
902 if (result != rhs && valid_gimple_rhs_p (result))
903 return result;
905 return NULL_TREE;
907 break;
909 case GIMPLE_UNARY_RHS:
911 tree rhs = gimple_assign_rhs1 (stmt);
913 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
914 if (result)
916 /* If the operation was a conversion do _not_ mark a
917 resulting constant with TREE_OVERFLOW if the original
918 constant was not. These conversions have implementation
919 defined behavior and retaining the TREE_OVERFLOW flag
920 here would confuse later passes such as VRP. */
921 if (CONVERT_EXPR_CODE_P (subcode)
922 && TREE_CODE (result) == INTEGER_CST
923 && TREE_CODE (rhs) == INTEGER_CST)
924 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
926 STRIP_USELESS_TYPE_CONVERSION (result);
927 if (valid_gimple_rhs_p (result))
928 return result;
930 else if (CONVERT_EXPR_CODE_P (subcode)
931 && POINTER_TYPE_P (gimple_expr_type (stmt))
932 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
934 tree type = gimple_expr_type (stmt);
935 tree t = maybe_fold_offset_to_address (loc,
936 gimple_assign_rhs1 (stmt),
937 integer_zero_node, type);
938 if (t)
939 return t;
942 break;
944 case GIMPLE_BINARY_RHS:
945 /* Try to fold pointer addition. */
946 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
948 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
949 if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
951 type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
952 if (!useless_type_conversion_p
953 (TREE_TYPE (gimple_assign_lhs (stmt)), type))
954 type = TREE_TYPE (gimple_assign_rhs1 (stmt));
956 result = maybe_fold_stmt_addition (gimple_location (stmt),
957 type,
958 gimple_assign_rhs1 (stmt),
959 gimple_assign_rhs2 (stmt));
962 if (!result)
963 result = fold_binary_loc (loc, subcode,
964 TREE_TYPE (gimple_assign_lhs (stmt)),
965 gimple_assign_rhs1 (stmt),
966 gimple_assign_rhs2 (stmt));
968 if (result)
970 STRIP_USELESS_TYPE_CONVERSION (result);
971 if (valid_gimple_rhs_p (result))
972 return result;
974 /* Fold might have produced non-GIMPLE, so if we trust it blindly
975 we lose canonicalization opportunities. Do not go again
976 through fold here though, or the same non-GIMPLE will be
977 produced. */
978 if (commutative_tree_code (subcode)
979 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
980 gimple_assign_rhs2 (stmt), false))
981 return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
982 gimple_assign_rhs2 (stmt),
983 gimple_assign_rhs1 (stmt));
985 break;
987 case GIMPLE_INVALID_RHS:
988 gcc_unreachable ();
991 return NULL_TREE;
994 /* Attempt to fold a conditional statement. Return true if any changes were
995 made. We only attempt to fold the condition expression, and do not perform
996 any transformation that would require alteration of the cfg. It is
997 assumed that the operands have been previously folded. */
999 static bool
1000 fold_gimple_cond (gimple stmt)
1002 tree result = fold_binary_loc (gimple_location (stmt),
1003 gimple_cond_code (stmt),
1004 boolean_type_node,
1005 gimple_cond_lhs (stmt),
1006 gimple_cond_rhs (stmt));
1008 if (result)
1010 STRIP_USELESS_TYPE_CONVERSION (result);
1011 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
1013 gimple_cond_set_condition_from_tree (stmt, result);
1014 return true;
1018 return false;
1021 /* Convert EXPR into a GIMPLE value suitable for substitution on the
1022 RHS of an assignment. Insert the necessary statements before
1023 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
1024 is replaced. If the call is expected to produces a result, then it
1025 is replaced by an assignment of the new RHS to the result variable.
1026 If the result is to be ignored, then the call is replaced by a
1027 GIMPLE_NOP. */
1029 void
1030 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
1032 tree lhs;
1033 tree tmp = NULL_TREE; /* Silence warning. */
1034 gimple stmt, new_stmt;
1035 gimple_stmt_iterator i;
1036 gimple_seq stmts = gimple_seq_alloc();
1037 struct gimplify_ctx gctx;
1038 gimple last = NULL;
1040 stmt = gsi_stmt (*si_p);
1042 gcc_assert (is_gimple_call (stmt));
1044 lhs = gimple_call_lhs (stmt);
1046 push_gimplify_context (&gctx);
1048 if (lhs == NULL_TREE)
1049 gimplify_and_add (expr, &stmts);
1050 else
1051 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
1053 pop_gimplify_context (NULL);
1055 if (gimple_has_location (stmt))
1056 annotate_all_with_location (stmts, gimple_location (stmt));
1058 /* The replacement can expose previously unreferenced variables. */
1059 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
1061 if (last)
1063 gsi_insert_before (si_p, last, GSI_NEW_STMT);
1064 gsi_next (si_p);
1066 new_stmt = gsi_stmt (i);
1067 find_new_referenced_vars (new_stmt);
1068 mark_symbols_for_renaming (new_stmt);
1069 last = new_stmt;
1072 if (lhs == NULL_TREE)
1074 unlink_stmt_vdef (stmt);
1075 release_defs (stmt);
1076 new_stmt = last;
1078 else
1080 if (last)
1082 gsi_insert_before (si_p, last, GSI_NEW_STMT);
1083 gsi_next (si_p);
1085 new_stmt = gimple_build_assign (lhs, tmp);
1086 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1087 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1088 move_ssa_defining_stmt_for_defs (new_stmt, stmt);
1091 gimple_set_location (new_stmt, gimple_location (stmt));
1092 gsi_replace (si_p, new_stmt, false);
1095 /* Return the string length, maximum string length or maximum value of
1096 ARG in LENGTH.
1097 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1098 is not NULL and, for TYPE == 0, its value is not equal to the length
1099 we determine or if we are unable to determine the length or value,
1100 return false. VISITED is a bitmap of visited variables.
1101 TYPE is 0 if string length should be returned, 1 for maximum string
1102 length and 2 for maximum value ARG can have. */
1104 static bool
1105 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
1107 tree var, val;
1108 gimple def_stmt;
1110 if (TREE_CODE (arg) != SSA_NAME)
1112 if (TREE_CODE (arg) == COND_EXPR)
1113 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
1114 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
1115 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1116 else if (TREE_CODE (arg) == ADDR_EXPR
1117 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1118 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1120 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1121 if (TREE_CODE (aop0) == INDIRECT_REF
1122 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1123 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1124 length, visited, type);
1127 if (type == 2)
1129 val = arg;
1130 if (TREE_CODE (val) != INTEGER_CST
1131 || tree_int_cst_sgn (val) < 0)
1132 return false;
1134 else
1135 val = c_strlen (arg, 1);
1136 if (!val)
1137 return false;
1139 if (*length)
1141 if (type > 0)
1143 if (TREE_CODE (*length) != INTEGER_CST
1144 || TREE_CODE (val) != INTEGER_CST)
1145 return false;
1147 if (tree_int_cst_lt (*length, val))
1148 *length = val;
1149 return true;
1151 else if (simple_cst_equal (val, *length) != 1)
1152 return false;
1155 *length = val;
1156 return true;
1159 /* If we were already here, break the infinite cycle. */
1160 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
1161 return true;
1162 bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
1164 var = arg;
1165 def_stmt = SSA_NAME_DEF_STMT (var);
1167 switch (gimple_code (def_stmt))
1169 case GIMPLE_ASSIGN:
1170 /* The RHS of the statement defining VAR must either have a
1171 constant length or come from another SSA_NAME with a constant
1172 length. */
1173 if (gimple_assign_single_p (def_stmt)
1174 || gimple_assign_unary_nop_p (def_stmt))
1176 tree rhs = gimple_assign_rhs1 (def_stmt);
1177 return get_maxval_strlen (rhs, length, visited, type);
1179 return false;
1181 case GIMPLE_PHI:
1183 /* All the arguments of the PHI node must have the same constant
1184 length. */
1185 unsigned i;
1187 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1189 tree arg = gimple_phi_arg (def_stmt, i)->def;
1191 /* If this PHI has itself as an argument, we cannot
1192 determine the string length of this argument. However,
1193 if we can find a constant string length for the other
1194 PHI args then we can still be sure that this is a
1195 constant string length. So be optimistic and just
1196 continue with the next argument. */
1197 if (arg == gimple_phi_result (def_stmt))
1198 continue;
1200 if (!get_maxval_strlen (arg, length, visited, type))
1201 return false;
1204 return true;
1206 default:
1207 return false;
1212 /* Fold builtin call in statement STMT. Returns a simplified tree.
1213 We may return a non-constant expression, including another call
1214 to a different function and with different arguments, e.g.,
1215 substituting memcpy for strcpy when the string length is known.
1216 Note that some builtins expand into inline code that may not
1217 be valid in GIMPLE. Callers must take care. */
1219 tree
1220 gimple_fold_builtin (gimple stmt)
1222 tree result, val[3];
1223 tree callee, a;
1224 int arg_idx, type;
1225 bitmap visited;
1226 bool ignore;
1227 int nargs;
1228 location_t loc = gimple_location (stmt);
1230 gcc_assert (is_gimple_call (stmt));
1232 ignore = (gimple_call_lhs (stmt) == NULL);
1234 /* First try the generic builtin folder. If that succeeds, return the
1235 result directly. */
1236 result = fold_call_stmt (stmt, ignore);
1237 if (result)
1239 if (ignore)
1240 STRIP_NOPS (result);
1241 return result;
1244 /* Ignore MD builtins. */
1245 callee = gimple_call_fndecl (stmt);
1246 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
1247 return NULL_TREE;
1249 /* If the builtin could not be folded, and it has no argument list,
1250 we're done. */
1251 nargs = gimple_call_num_args (stmt);
1252 if (nargs == 0)
1253 return NULL_TREE;
1255 /* Limit the work only for builtins we know how to simplify. */
1256 switch (DECL_FUNCTION_CODE (callee))
1258 case BUILT_IN_STRLEN:
1259 case BUILT_IN_FPUTS:
1260 case BUILT_IN_FPUTS_UNLOCKED:
1261 arg_idx = 0;
1262 type = 0;
1263 break;
1264 case BUILT_IN_STRCPY:
1265 case BUILT_IN_STRNCPY:
1266 arg_idx = 1;
1267 type = 0;
1268 break;
1269 case BUILT_IN_MEMCPY_CHK:
1270 case BUILT_IN_MEMPCPY_CHK:
1271 case BUILT_IN_MEMMOVE_CHK:
1272 case BUILT_IN_MEMSET_CHK:
1273 case BUILT_IN_STRNCPY_CHK:
1274 arg_idx = 2;
1275 type = 2;
1276 break;
1277 case BUILT_IN_STRCPY_CHK:
1278 case BUILT_IN_STPCPY_CHK:
1279 arg_idx = 1;
1280 type = 1;
1281 break;
1282 case BUILT_IN_SNPRINTF_CHK:
1283 case BUILT_IN_VSNPRINTF_CHK:
1284 arg_idx = 1;
1285 type = 2;
1286 break;
1287 default:
1288 return NULL_TREE;
1291 if (arg_idx >= nargs)
1292 return NULL_TREE;
1294 /* Try to use the dataflow information gathered by the CCP process. */
1295 visited = BITMAP_ALLOC (NULL);
1296 bitmap_clear (visited);
1298 memset (val, 0, sizeof (val));
1299 a = gimple_call_arg (stmt, arg_idx);
1300 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
1301 val[arg_idx] = NULL_TREE;
1303 BITMAP_FREE (visited);
1305 result = NULL_TREE;
1306 switch (DECL_FUNCTION_CODE (callee))
1308 case BUILT_IN_STRLEN:
1309 if (val[0] && nargs == 1)
1311 tree new_val =
1312 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
1314 /* If the result is not a valid gimple value, or not a cast
1315 of a valid gimple value, then we can not use the result. */
1316 if (is_gimple_val (new_val)
1317 || (is_gimple_cast (new_val)
1318 && is_gimple_val (TREE_OPERAND (new_val, 0))))
1319 return new_val;
1321 break;
1323 case BUILT_IN_STRCPY:
1324 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
1325 result = fold_builtin_strcpy (loc, callee,
1326 gimple_call_arg (stmt, 0),
1327 gimple_call_arg (stmt, 1),
1328 val[1]);
1329 break;
1331 case BUILT_IN_STRNCPY:
1332 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1333 result = fold_builtin_strncpy (loc, callee,
1334 gimple_call_arg (stmt, 0),
1335 gimple_call_arg (stmt, 1),
1336 gimple_call_arg (stmt, 2),
1337 val[1]);
1338 break;
1340 case BUILT_IN_FPUTS:
1341 if (nargs == 2)
1342 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1343 gimple_call_arg (stmt, 1),
1344 ignore, false, val[0]);
1345 break;
1347 case BUILT_IN_FPUTS_UNLOCKED:
1348 if (nargs == 2)
1349 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1350 gimple_call_arg (stmt, 1),
1351 ignore, true, val[0]);
1352 break;
1354 case BUILT_IN_MEMCPY_CHK:
1355 case BUILT_IN_MEMPCPY_CHK:
1356 case BUILT_IN_MEMMOVE_CHK:
1357 case BUILT_IN_MEMSET_CHK:
1358 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1359 result = fold_builtin_memory_chk (loc, callee,
1360 gimple_call_arg (stmt, 0),
1361 gimple_call_arg (stmt, 1),
1362 gimple_call_arg (stmt, 2),
1363 gimple_call_arg (stmt, 3),
1364 val[2], ignore,
1365 DECL_FUNCTION_CODE (callee));
1366 break;
1368 case BUILT_IN_STRCPY_CHK:
1369 case BUILT_IN_STPCPY_CHK:
1370 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1371 result = fold_builtin_stxcpy_chk (loc, callee,
1372 gimple_call_arg (stmt, 0),
1373 gimple_call_arg (stmt, 1),
1374 gimple_call_arg (stmt, 2),
1375 val[1], ignore,
1376 DECL_FUNCTION_CODE (callee));
1377 break;
1379 case BUILT_IN_STRNCPY_CHK:
1380 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1381 result = fold_builtin_strncpy_chk (loc, gimple_call_arg (stmt, 0),
1382 gimple_call_arg (stmt, 1),
1383 gimple_call_arg (stmt, 2),
1384 gimple_call_arg (stmt, 3),
1385 val[2]);
1386 break;
1388 case BUILT_IN_SNPRINTF_CHK:
1389 case BUILT_IN_VSNPRINTF_CHK:
1390 if (val[1] && is_gimple_val (val[1]))
1391 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
1392 DECL_FUNCTION_CODE (callee));
1393 break;
1395 default:
1396 gcc_unreachable ();
1399 if (result && ignore)
1400 result = fold_ignored_result (result);
1401 return result;
1404 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1405 The statement may be replaced by another statement, e.g., if the call
1406 simplifies to a constant value. Return true if any changes were made.
1407 It is assumed that the operands have been previously folded. */
1409 static bool
1410 fold_gimple_call (gimple_stmt_iterator *gsi)
1412 gimple stmt = gsi_stmt (*gsi);
1414 tree callee = gimple_call_fndecl (stmt);
1416 /* Check for builtins that CCP can handle using information not
1417 available in the generic fold routines. */
1418 if (callee && DECL_BUILT_IN (callee))
1420 tree result = gimple_fold_builtin (stmt);
1422 if (result)
1424 if (!update_call_from_tree (gsi, result))
1425 gimplify_and_update_call_from_tree (gsi, result);
1426 return true;
1429 else
1431 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
1432 here are when we've propagated the address of a decl into the
1433 object slot. */
1434 /* ??? Should perhaps do this in fold proper. However, doing it
1435 there requires that we create a new CALL_EXPR, and that requires
1436 copying EH region info to the new node. Easier to just do it
1437 here where we can just smash the call operand. */
1438 /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
1439 callee = gimple_call_fn (stmt);
1440 if (TREE_CODE (callee) == OBJ_TYPE_REF
1441 && lang_hooks.fold_obj_type_ref
1442 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR
1443 && DECL_P (TREE_OPERAND
1444 (OBJ_TYPE_REF_OBJECT (callee), 0)))
1446 tree t;
1448 /* ??? Caution: Broken ADDR_EXPR semantics means that
1449 looking at the type of the operand of the addr_expr
1450 can yield an array type. See silly exception in
1451 check_pointer_types_r. */
1452 t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee)));
1453 t = lang_hooks.fold_obj_type_ref (callee, t);
1454 if (t)
1456 gimple_call_set_fn (stmt, t);
1457 return true;
1462 return false;
1465 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1466 distinguishes both cases. */
1468 static bool
1469 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1471 bool changed = false;
1472 gimple stmt = gsi_stmt (*gsi);
1473 unsigned i;
1475 /* Fold the main computation performed by the statement. */
1476 switch (gimple_code (stmt))
1478 case GIMPLE_ASSIGN:
1480 unsigned old_num_ops = gimple_num_ops (stmt);
1481 tree new_rhs = fold_gimple_assign (gsi);
1482 tree lhs = gimple_assign_lhs (stmt);
1483 if (new_rhs
1484 && !useless_type_conversion_p (TREE_TYPE (lhs),
1485 TREE_TYPE (new_rhs)))
1486 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1487 if (new_rhs
1488 && (!inplace
1489 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1491 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1492 changed = true;
1494 break;
1497 case GIMPLE_COND:
1498 changed |= fold_gimple_cond (stmt);
1499 break;
1501 case GIMPLE_CALL:
1502 /* Fold *& in call arguments. */
1503 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1504 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1506 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1507 if (tmp)
1509 gimple_call_set_arg (stmt, i, tmp);
1510 changed = true;
1513 /* The entire statement may be replaced in this case. */
1514 if (!inplace)
1515 changed |= fold_gimple_call (gsi);
1516 break;
1518 case GIMPLE_ASM:
1519 /* Fold *& in asm operands. */
1520 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1522 tree link = gimple_asm_output_op (stmt, i);
1523 tree op = TREE_VALUE (link);
1524 if (REFERENCE_CLASS_P (op)
1525 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1527 TREE_VALUE (link) = op;
1528 changed = true;
1531 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1533 tree link = gimple_asm_input_op (stmt, i);
1534 tree op = TREE_VALUE (link);
1535 if (REFERENCE_CLASS_P (op)
1536 && (op = maybe_fold_reference (op, false)) != NULL_TREE)
1538 TREE_VALUE (link) = op;
1539 changed = true;
1542 break;
1544 default:;
1547 stmt = gsi_stmt (*gsi);
1549 /* Fold *& on the lhs. */
1550 if (gimple_has_lhs (stmt))
1552 tree lhs = gimple_get_lhs (stmt);
1553 if (lhs && REFERENCE_CLASS_P (lhs))
1555 tree new_lhs = maybe_fold_reference (lhs, true);
1556 if (new_lhs)
1558 gimple_set_lhs (stmt, new_lhs);
1559 changed = true;
1564 return changed;
1567 /* Fold the statement pointed to by GSI. In some cases, this function may
1568 replace the whole statement with a new one. Returns true iff folding
1569 makes any changes.
1570 The statement pointed to by GSI should be in valid gimple form but may
1571 be in unfolded state as resulting from for example constant propagation
1572 which can produce *&x = 0. */
1574 bool
1575 fold_stmt (gimple_stmt_iterator *gsi)
1577 return fold_stmt_1 (gsi, false);
1580 /* Perform the minimal folding on statement STMT. Only operations like
1581 *&x created by constant propagation are handled. The statement cannot
1582 be replaced with a new one. Return true if the statement was
1583 changed, false otherwise.
1584 The statement STMT should be in valid gimple form but may
1585 be in unfolded state as resulting from for example constant propagation
1586 which can produce *&x = 0. */
1588 bool
1589 fold_stmt_inplace (gimple stmt)
1591 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1592 bool changed = fold_stmt_1 (&gsi, true);
1593 gcc_assert (gsi_stmt (gsi) == stmt);
1594 return changed;