1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
35 /* If SYM is a constant variable with known value, return the value.
36 NULL_TREE is returned otherwise. */
39 get_symbol_constant_value (tree sym
)
42 && (TREE_READONLY (sym
)
43 || TREE_CODE (sym
) == CONST_DECL
))
45 tree val
= DECL_INITIAL (sym
);
49 if (is_gimple_min_invariant (val
))
51 if (TREE_CODE (val
) == ADDR_EXPR
)
53 tree base
= get_base_address (TREE_OPERAND (val
, 0));
54 if (base
&& TREE_CODE (base
) == VAR_DECL
)
56 TREE_ADDRESSABLE (base
) = 1;
57 if (gimple_referenced_vars (cfun
))
58 add_referenced_var (base
);
64 /* Variables declared 'const' without an initializer
65 have zero as the initializer if they may not be
66 overridden at link or run time. */
68 && !DECL_EXTERNAL (sym
)
69 && targetm
.binds_local_p (sym
)
70 && (INTEGRAL_TYPE_P (TREE_TYPE (sym
))
71 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym
))))
72 return fold_convert (TREE_TYPE (sym
), integer_zero_node
);
79 /* Return true if we may propagate the address expression ADDR into the
80 dereference DEREF and cancel them. */
83 may_propagate_address_into_dereference (tree addr
, tree deref
)
85 gcc_assert (INDIRECT_REF_P (deref
)
86 && TREE_CODE (addr
) == ADDR_EXPR
);
88 /* Don't propagate if ADDR's operand has incomplete type. */
89 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr
, 0))))
92 /* If the address is invariant then we do not need to preserve restrict
93 qualifications. But we do need to preserve volatile qualifiers until
94 we can annotate the folded dereference itself properly. */
95 if (is_gimple_min_invariant (addr
)
96 && (!TREE_THIS_VOLATILE (deref
)
97 || TYPE_VOLATILE (TREE_TYPE (addr
))))
98 return useless_type_conversion_p (TREE_TYPE (deref
),
99 TREE_TYPE (TREE_OPERAND (addr
, 0)));
101 /* Else both the address substitution and the folding must result in
102 a valid useless type conversion sequence. */
103 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref
, 0)),
105 && useless_type_conversion_p (TREE_TYPE (deref
),
106 TREE_TYPE (TREE_OPERAND (addr
, 0))));
110 /* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
111 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
112 is the desired result type.
114 LOC is the location of the original expression. */
117 maybe_fold_offset_to_array_ref (location_t loc
, tree base
, tree offset
,
119 bool allow_negative_idx
)
121 tree min_idx
, idx
, idx_type
, elt_offset
= integer_zero_node
;
122 tree array_type
, elt_type
, elt_size
;
125 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
126 measured in units of the size of elements type) from that ARRAY_REF).
127 We can't do anything if either is variable.
129 The case we handle here is *(&A[N]+O). */
130 if (TREE_CODE (base
) == ARRAY_REF
)
132 tree low_bound
= array_ref_low_bound (base
);
134 elt_offset
= TREE_OPERAND (base
, 1);
135 if (TREE_CODE (low_bound
) != INTEGER_CST
136 || TREE_CODE (elt_offset
) != INTEGER_CST
)
139 elt_offset
= int_const_binop (MINUS_EXPR
, elt_offset
, low_bound
, 0);
140 base
= TREE_OPERAND (base
, 0);
143 /* Ignore stupid user tricks of indexing non-array variables. */
144 array_type
= TREE_TYPE (base
);
145 if (TREE_CODE (array_type
) != ARRAY_TYPE
)
147 elt_type
= TREE_TYPE (array_type
);
148 if (!useless_type_conversion_p (orig_type
, elt_type
))
151 /* Use signed size type for intermediate computation on the index. */
152 idx_type
= ssizetype
;
154 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
155 element type (so we can use the alignment if it's not constant).
156 Otherwise, compute the offset as an index by using a division. If the
157 division isn't exact, then don't do anything. */
158 elt_size
= TYPE_SIZE_UNIT (elt_type
);
161 if (integer_zerop (offset
))
163 if (TREE_CODE (elt_size
) != INTEGER_CST
)
164 elt_size
= size_int (TYPE_ALIGN (elt_type
));
166 idx
= build_int_cst (idx_type
, 0);
170 unsigned HOST_WIDE_INT lquo
, lrem
;
171 HOST_WIDE_INT hquo
, hrem
;
174 /* The final array offset should be signed, so we need
175 to sign-extend the (possibly pointer) offset here
176 and use signed division. */
177 soffset
= double_int_sext (tree_to_double_int (offset
),
178 TYPE_PRECISION (TREE_TYPE (offset
)));
179 if (TREE_CODE (elt_size
) != INTEGER_CST
180 || div_and_round_double (TRUNC_DIV_EXPR
, 0,
181 soffset
.low
, soffset
.high
,
182 TREE_INT_CST_LOW (elt_size
),
183 TREE_INT_CST_HIGH (elt_size
),
184 &lquo
, &hquo
, &lrem
, &hrem
)
188 idx
= build_int_cst_wide (idx_type
, lquo
, hquo
);
191 /* Assume the low bound is zero. If there is a domain type, get the
192 low bound, if any, convert the index into that type, and add the
194 min_idx
= build_int_cst (idx_type
, 0);
195 domain_type
= TYPE_DOMAIN (array_type
);
198 idx_type
= domain_type
;
199 if (TYPE_MIN_VALUE (idx_type
))
200 min_idx
= TYPE_MIN_VALUE (idx_type
);
202 min_idx
= fold_convert (idx_type
, min_idx
);
204 if (TREE_CODE (min_idx
) != INTEGER_CST
)
207 elt_offset
= fold_convert (idx_type
, elt_offset
);
210 if (!integer_zerop (min_idx
))
211 idx
= int_const_binop (PLUS_EXPR
, idx
, min_idx
, 0);
212 if (!integer_zerop (elt_offset
))
213 idx
= int_const_binop (PLUS_EXPR
, idx
, elt_offset
, 0);
215 /* Make sure to possibly truncate late after offsetting. */
216 idx
= fold_convert (idx_type
, idx
);
218 /* We don't want to construct access past array bounds. For example
221 should not be simplified into (*c)[14] or tree-vrp will
222 give false warnings. The same is true for
223 struct A { long x; char d[0]; } *a;
225 which should be not folded to &a->d[-8]. */
227 && TYPE_MAX_VALUE (domain_type
)
228 && TREE_CODE (TYPE_MAX_VALUE (domain_type
)) == INTEGER_CST
)
230 tree up_bound
= TYPE_MAX_VALUE (domain_type
);
232 if (tree_int_cst_lt (up_bound
, idx
)
233 /* Accesses after the end of arrays of size 0 (gcc
234 extension) and 1 are likely intentional ("struct
236 && compare_tree_int (up_bound
, 1) > 0)
240 && TYPE_MIN_VALUE (domain_type
))
242 if (!allow_negative_idx
243 && TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
244 && tree_int_cst_lt (idx
, TYPE_MIN_VALUE (domain_type
)))
247 else if (!allow_negative_idx
248 && compare_tree_int (idx
, 0) < 0)
252 tree t
= build4 (ARRAY_REF
, elt_type
, base
, idx
, NULL_TREE
, NULL_TREE
);
253 SET_EXPR_LOCATION (t
, loc
);
259 /* Attempt to fold *(S+O) to S.X.
260 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
261 is the desired result type.
263 LOC is the location of the original expression. */
266 maybe_fold_offset_to_component_ref (location_t loc
, tree record_type
,
267 tree base
, tree offset
, tree orig_type
)
269 tree f
, t
, field_type
, tail_array_field
, field_offset
;
273 if (TREE_CODE (record_type
) != RECORD_TYPE
274 && TREE_CODE (record_type
) != UNION_TYPE
275 && TREE_CODE (record_type
) != QUAL_UNION_TYPE
)
278 /* Short-circuit silly cases. */
279 if (useless_type_conversion_p (record_type
, orig_type
))
282 tail_array_field
= NULL_TREE
;
283 for (f
= TYPE_FIELDS (record_type
); f
; f
= TREE_CHAIN (f
))
287 if (TREE_CODE (f
) != FIELD_DECL
)
289 if (DECL_BIT_FIELD (f
))
292 if (!DECL_FIELD_OFFSET (f
))
294 field_offset
= byte_position (f
);
295 if (TREE_CODE (field_offset
) != INTEGER_CST
)
298 /* ??? Java creates "interesting" fields for representing base classes.
299 They have no name, and have no context. With no context, we get into
300 trouble with nonoverlapping_component_refs_p. Skip them. */
301 if (!DECL_FIELD_CONTEXT (f
))
304 /* The previous array field isn't at the end. */
305 tail_array_field
= NULL_TREE
;
307 /* Check to see if this offset overlaps with the field. */
308 cmp
= tree_int_cst_compare (field_offset
, offset
);
312 field_type
= TREE_TYPE (f
);
314 /* Here we exactly match the offset being checked. If the types match,
315 then we can return that field. */
317 && useless_type_conversion_p (orig_type
, field_type
))
319 t
= fold_build3 (COMPONENT_REF
, field_type
, base
, f
, NULL_TREE
);
323 /* Don't care about offsets into the middle of scalars. */
324 if (!AGGREGATE_TYPE_P (field_type
))
327 /* Check for array at the end of the struct. This is often
328 used as for flexible array members. We should be able to
329 turn this into an array access anyway. */
330 if (TREE_CODE (field_type
) == ARRAY_TYPE
)
331 tail_array_field
= f
;
333 /* Check the end of the field against the offset. */
334 if (!DECL_SIZE_UNIT (f
)
335 || TREE_CODE (DECL_SIZE_UNIT (f
)) != INTEGER_CST
)
337 t
= int_const_binop (MINUS_EXPR
, offset
, field_offset
, 1);
338 if (!tree_int_cst_lt (t
, DECL_SIZE_UNIT (f
)))
341 /* If we matched, then set offset to the displacement into
343 new_base
= fold_build3 (COMPONENT_REF
, field_type
, base
, f
, NULL_TREE
);
344 SET_EXPR_LOCATION (new_base
, loc
);
346 /* Recurse to possibly find the match. */
347 ret
= maybe_fold_offset_to_array_ref (loc
, new_base
, t
, orig_type
,
348 f
== TYPE_FIELDS (record_type
));
351 ret
= maybe_fold_offset_to_component_ref (loc
, field_type
, new_base
, t
,
357 if (!tail_array_field
)
360 f
= tail_array_field
;
361 field_type
= TREE_TYPE (f
);
362 offset
= int_const_binop (MINUS_EXPR
, offset
, byte_position (f
), 1);
364 /* If we get here, we've got an aggregate field, and a possibly
365 nonzero offset into them. Recurse and hope for a valid match. */
366 base
= fold_build3 (COMPONENT_REF
, field_type
, base
, f
, NULL_TREE
);
367 SET_EXPR_LOCATION (base
, loc
);
369 t
= maybe_fold_offset_to_array_ref (loc
, base
, offset
, orig_type
,
370 f
== TYPE_FIELDS (record_type
));
373 return maybe_fold_offset_to_component_ref (loc
, field_type
, base
, offset
,
377 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type
378 or BASE[index] or by combination of those.
380 LOC is the location of original expression.
382 Before attempting the conversion strip off existing ADDR_EXPRs and
383 handled component refs. */
386 maybe_fold_offset_to_reference (location_t loc
, tree base
, tree offset
,
393 if (TREE_CODE (base
) != ADDR_EXPR
)
396 base
= TREE_OPERAND (base
, 0);
398 /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union,
399 so it needs to be removed and new COMPONENT_REF constructed.
400 The wrong COMPONENT_REF are often constructed by folding the
401 (type *)&object within the expression (type *)&object+offset */
402 if (handled_component_p (base
))
404 HOST_WIDE_INT sub_offset
, size
, maxsize
;
406 newbase
= get_ref_base_and_extent (base
, &sub_offset
,
408 gcc_assert (newbase
);
411 && !(sub_offset
& (BITS_PER_UNIT
- 1)))
415 offset
= int_const_binop (PLUS_EXPR
, offset
,
416 build_int_cst (TREE_TYPE (offset
),
417 sub_offset
/ BITS_PER_UNIT
), 1);
420 if (useless_type_conversion_p (orig_type
, TREE_TYPE (base
))
421 && integer_zerop (offset
))
423 type
= TREE_TYPE (base
);
425 ret
= maybe_fold_offset_to_component_ref (loc
, type
, base
, offset
, orig_type
);
427 ret
= maybe_fold_offset_to_array_ref (loc
, base
, offset
, orig_type
, true);
432 /* Attempt to express (ORIG_TYPE)&BASE+OFFSET as &BASE->field_of_orig_type
433 or &BASE[index] or by combination of those.
435 LOC is the location of the original expression.
437 Before attempting the conversion strip off existing component refs. */
440 maybe_fold_offset_to_address (location_t loc
, tree addr
, tree offset
,
445 gcc_assert (POINTER_TYPE_P (TREE_TYPE (addr
))
446 && POINTER_TYPE_P (orig_type
));
448 t
= maybe_fold_offset_to_reference (loc
, addr
, offset
,
449 TREE_TYPE (orig_type
));
455 /* For __builtin_object_size to function correctly we need to
456 make sure not to fold address arithmetic so that we change
457 reference from one array to another. This would happen for
460 struct X { char s1[10]; char s2[10] } s;
461 char *foo (void) { return &s.s2[-4]; }
463 where we need to avoid generating &s.s1[6]. As the C and
464 C++ frontends create different initial trees
465 (char *) &s.s1 + -4 vs. &s.s1[-4] we have to do some
466 sophisticated comparisons here. Note that checking for the
467 condition after the fact is easier than trying to avoid doing
470 if (TREE_CODE (orig
) == ADDR_EXPR
)
471 orig
= TREE_OPERAND (orig
, 0);
472 if ((TREE_CODE (orig
) == ARRAY_REF
473 || (TREE_CODE (orig
) == COMPONENT_REF
474 && TREE_CODE (TREE_TYPE (TREE_OPERAND (orig
, 1))) == ARRAY_TYPE
))
475 && (TREE_CODE (t
) == ARRAY_REF
476 || TREE_CODE (t
) == COMPONENT_REF
)
477 && !operand_equal_p (TREE_CODE (orig
) == ARRAY_REF
478 ? TREE_OPERAND (orig
, 0) : orig
,
479 TREE_CODE (t
) == ARRAY_REF
480 ? TREE_OPERAND (t
, 0) : t
, 0))
483 ptr_type
= build_pointer_type (TREE_TYPE (t
));
484 if (!useless_type_conversion_p (orig_type
, ptr_type
))
486 return build_fold_addr_expr_with_type_loc (loc
, t
, ptr_type
);
492 /* A subroutine of fold_stmt. Attempt to simplify *(BASE+OFFSET).
493 Return the simplified expression, or NULL if nothing could be done. */
496 maybe_fold_stmt_indirect (tree expr
, tree base
, tree offset
)
499 bool volatile_p
= TREE_THIS_VOLATILE (expr
);
500 location_t loc
= EXPR_LOCATION (expr
);
502 /* We may well have constructed a double-nested PLUS_EXPR via multiple
503 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
504 are sometimes added. */
506 STRIP_TYPE_NOPS (base
);
507 TREE_OPERAND (expr
, 0) = base
;
509 /* One possibility is that the address reduces to a string constant. */
510 t
= fold_read_from_constant_string (expr
);
514 /* Add in any offset from a POINTER_PLUS_EXPR. */
515 if (TREE_CODE (base
) == POINTER_PLUS_EXPR
)
519 offset2
= TREE_OPERAND (base
, 1);
520 if (TREE_CODE (offset2
) != INTEGER_CST
)
522 base
= TREE_OPERAND (base
, 0);
524 offset
= fold_convert (sizetype
,
525 int_const_binop (PLUS_EXPR
, offset
, offset2
, 1));
528 if (TREE_CODE (base
) == ADDR_EXPR
)
530 tree base_addr
= base
;
532 /* Strip the ADDR_EXPR. */
533 base
= TREE_OPERAND (base
, 0);
535 /* Fold away CONST_DECL to its value, if the type is scalar. */
536 if (TREE_CODE (base
) == CONST_DECL
537 && is_gimple_min_invariant (DECL_INITIAL (base
)))
538 return DECL_INITIAL (base
);
540 /* If there is no offset involved simply return the folded base. */
541 if (integer_zerop (offset
))
544 /* Try folding *(&B+O) to B.X. */
545 t
= maybe_fold_offset_to_reference (loc
, base_addr
, offset
,
549 /* Preserve volatileness of the original expression.
550 We can end up with a plain decl here which is shared
551 and we shouldn't mess with its flags. */
553 TREE_THIS_VOLATILE (t
) = volatile_p
;
559 /* We can get here for out-of-range string constant accesses,
560 such as "_"[3]. Bail out of the entire substitution search
561 and arrange for the entire statement to be replaced by a
562 call to __builtin_trap. In all likelihood this will all be
563 constant-folded away, but in the meantime we can't leave with
564 something that get_expr_operands can't understand. */
568 if (TREE_CODE (t
) == ADDR_EXPR
569 && TREE_CODE (TREE_OPERAND (t
, 0)) == STRING_CST
)
571 /* FIXME: Except that this causes problems elsewhere with dead
572 code not being deleted, and we die in the rtl expanders
573 because we failed to remove some ssa_name. In the meantime,
575 /* FIXME2: This condition should be signaled by
576 fold_read_from_constant_string directly, rather than
577 re-checking for it here. */
578 return integer_zero_node
;
581 /* Try folding *(B+O) to B->X. Still an improvement. */
582 if (POINTER_TYPE_P (TREE_TYPE (base
)))
584 t
= maybe_fold_offset_to_reference (loc
, base
, offset
,
591 /* Otherwise we had an offset that we could not simplify. */
596 /* A quaint feature extant in our address arithmetic is that there
597 can be hidden type changes here. The type of the result need
598 not be the same as the type of the input pointer.
600 What we're after here is an expression of the form
601 (T *)(&array + const)
602 where array is OP0, const is OP1, RES_TYPE is T and
603 the cast doesn't actually exist, but is implicit in the
604 type of the POINTER_PLUS_EXPR. We'd like to turn this into
606 which may be able to propagate further. */
609 maybe_fold_stmt_addition (location_t loc
, tree res_type
, tree op0
, tree op1
)
614 /* The first operand should be an ADDR_EXPR. */
615 if (TREE_CODE (op0
) != ADDR_EXPR
)
617 op0
= TREE_OPERAND (op0
, 0);
619 /* It had better be a constant. */
620 if (TREE_CODE (op1
) != INTEGER_CST
)
622 /* Or op0 should now be A[0] and the non-constant offset defined
623 via a multiplication by the array element size. */
624 if (TREE_CODE (op0
) == ARRAY_REF
625 && integer_zerop (TREE_OPERAND (op0
, 1))
626 && TREE_CODE (op1
) == SSA_NAME
627 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (op0
)), 1))
629 gimple offset_def
= SSA_NAME_DEF_STMT (op1
);
630 if (!is_gimple_assign (offset_def
))
633 /* As we will end up creating a variable index array access
634 in the outermost array dimension make sure there isn't
635 a more inner array that the index could overflow to. */
636 if (TREE_CODE (TREE_OPERAND (op0
, 0)) == ARRAY_REF
)
639 /* Do not build array references of something that we can't
640 see the true number of array dimensions for. */
641 if (!DECL_P (TREE_OPERAND (op0
, 0))
642 && !handled_component_p (TREE_OPERAND (op0
, 0)))
645 if (gimple_assign_rhs_code (offset_def
) == MULT_EXPR
646 && TREE_CODE (gimple_assign_rhs2 (offset_def
)) == INTEGER_CST
647 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def
),
648 TYPE_SIZE_UNIT (TREE_TYPE (op0
))))
649 return build_fold_addr_expr
650 (build4 (ARRAY_REF
, TREE_TYPE (op0
),
651 TREE_OPERAND (op0
, 0),
652 gimple_assign_rhs1 (offset_def
),
653 TREE_OPERAND (op0
, 2),
654 TREE_OPERAND (op0
, 3)));
655 else if (integer_onep (TYPE_SIZE_UNIT (TREE_TYPE (op0
)))
656 && gimple_assign_rhs_code (offset_def
) != MULT_EXPR
)
657 return build_fold_addr_expr
658 (build4 (ARRAY_REF
, TREE_TYPE (op0
),
659 TREE_OPERAND (op0
, 0),
661 TREE_OPERAND (op0
, 2),
662 TREE_OPERAND (op0
, 3)));
667 /* If the first operand is an ARRAY_REF, expand it so that we can fold
668 the offset into it. */
669 while (TREE_CODE (op0
) == ARRAY_REF
)
671 tree array_obj
= TREE_OPERAND (op0
, 0);
672 tree array_idx
= TREE_OPERAND (op0
, 1);
673 tree elt_type
= TREE_TYPE (op0
);
674 tree elt_size
= TYPE_SIZE_UNIT (elt_type
);
677 if (TREE_CODE (array_idx
) != INTEGER_CST
)
679 if (TREE_CODE (elt_size
) != INTEGER_CST
)
682 /* Un-bias the index by the min index of the array type. */
683 min_idx
= TYPE_DOMAIN (TREE_TYPE (array_obj
));
686 min_idx
= TYPE_MIN_VALUE (min_idx
);
689 if (TREE_CODE (min_idx
) != INTEGER_CST
)
692 array_idx
= fold_convert (TREE_TYPE (min_idx
), array_idx
);
693 if (!integer_zerop (min_idx
))
694 array_idx
= int_const_binop (MINUS_EXPR
, array_idx
,
699 /* Convert the index to a byte offset. */
700 array_idx
= fold_convert (sizetype
, array_idx
);
701 array_idx
= int_const_binop (MULT_EXPR
, array_idx
, elt_size
, 0);
703 /* Update the operands for the next round, or for folding. */
704 op1
= int_const_binop (PLUS_EXPR
,
709 ptd_type
= TREE_TYPE (res_type
);
710 /* If we want a pointer to void, reconstruct the reference from the
711 array element type. A pointer to that can be trivially converted
712 to void *. This happens as we fold (void *)(ptr p+ off). */
713 if (VOID_TYPE_P (ptd_type
)
714 && TREE_CODE (TREE_TYPE (op0
)) == ARRAY_TYPE
)
715 ptd_type
= TREE_TYPE (TREE_TYPE (op0
));
717 /* At which point we can try some of the same things as for indirects. */
718 t
= maybe_fold_offset_to_array_ref (loc
, op0
, op1
, ptd_type
, true);
720 t
= maybe_fold_offset_to_component_ref (loc
, TREE_TYPE (op0
), op0
, op1
,
724 t
= build1 (ADDR_EXPR
, res_type
, t
);
725 SET_EXPR_LOCATION (t
, loc
);
731 /* Subroutine of fold_stmt. We perform several simplifications of the
732 memory reference tree EXPR and make sure to re-gimplify them properly
733 after propagation of constant addresses. IS_LHS is true if the
734 reference is supposed to be an lvalue. */
737 maybe_fold_reference (tree expr
, bool is_lhs
)
741 if (TREE_CODE (expr
) == ARRAY_REF
744 tree tem
= fold_read_from_constant_string (expr
);
749 /* ??? We might want to open-code the relevant remaining cases
750 to avoid using the generic fold. */
751 if (handled_component_p (*t
)
752 && CONSTANT_CLASS_P (TREE_OPERAND (*t
, 0)))
754 tree tem
= fold (*t
);
759 while (handled_component_p (*t
))
760 t
= &TREE_OPERAND (*t
, 0);
762 if (TREE_CODE (*t
) == INDIRECT_REF
)
764 tree tem
= maybe_fold_stmt_indirect (*t
, TREE_OPERAND (*t
, 0),
766 /* Avoid folding *"abc" = 5 into 'a' = 5. */
767 if (is_lhs
&& tem
&& CONSTANT_CLASS_P (tem
))
770 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
)
771 /* If we had a good reason for propagating the address here,
772 make sure we end up with valid gimple. See PR34989. */
773 tem
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
778 tem
= maybe_fold_reference (expr
, is_lhs
);
787 tree tem
= get_symbol_constant_value (*t
);
789 && useless_type_conversion_p (TREE_TYPE (*t
), TREE_TYPE (tem
)))
791 *t
= unshare_expr (tem
);
792 tem
= maybe_fold_reference (expr
, is_lhs
);
803 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
804 replacement rhs for the statement or NULL_TREE if no simplification
805 could be made. It is assumed that the operands have been previously
809 fold_gimple_assign (gimple_stmt_iterator
*si
)
811 gimple stmt
= gsi_stmt (*si
);
812 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
813 location_t loc
= gimple_location (stmt
);
815 tree result
= NULL_TREE
;
817 switch (get_gimple_rhs_class (subcode
))
819 case GIMPLE_SINGLE_RHS
:
821 tree rhs
= gimple_assign_rhs1 (stmt
);
823 /* Try to fold a conditional expression. */
824 if (TREE_CODE (rhs
) == COND_EXPR
)
826 tree op0
= COND_EXPR_COND (rhs
);
829 location_t cond_loc
= EXPR_LOCATION (rhs
);
831 if (COMPARISON_CLASS_P (op0
))
833 fold_defer_overflow_warnings ();
834 tem
= fold_binary_loc (cond_loc
,
835 TREE_CODE (op0
), TREE_TYPE (op0
),
836 TREE_OPERAND (op0
, 0),
837 TREE_OPERAND (op0
, 1));
838 /* This is actually a conditional expression, not a GIMPLE
839 conditional statement, however, the valid_gimple_rhs_p
840 test still applies. */
841 set
= (tem
&& is_gimple_condexpr (tem
)
842 && valid_gimple_rhs_p (tem
));
843 fold_undefer_overflow_warnings (set
, stmt
, 0);
845 else if (is_gimple_min_invariant (op0
))
854 result
= fold_build3_loc (cond_loc
, COND_EXPR
, TREE_TYPE (rhs
), tem
,
855 COND_EXPR_THEN (rhs
), COND_EXPR_ELSE (rhs
));
858 else if (TREE_CODE (rhs
) == TARGET_MEM_REF
)
859 return maybe_fold_tmr (rhs
);
861 else if (REFERENCE_CLASS_P (rhs
))
862 return maybe_fold_reference (rhs
, false);
864 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
866 tree tem
= maybe_fold_reference (TREE_OPERAND (rhs
, 0), true);
868 result
= fold_convert (TREE_TYPE (rhs
),
869 build_fold_addr_expr_loc (loc
, tem
));
872 else if (TREE_CODE (rhs
) == CONSTRUCTOR
873 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
874 && (CONSTRUCTOR_NELTS (rhs
)
875 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
877 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
881 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
882 if (TREE_CODE (val
) != INTEGER_CST
883 && TREE_CODE (val
) != REAL_CST
884 && TREE_CODE (val
) != FIXED_CST
)
887 return build_vector_from_ctor (TREE_TYPE (rhs
),
888 CONSTRUCTOR_ELTS (rhs
));
891 else if (DECL_P (rhs
))
892 return unshare_expr (get_symbol_constant_value (rhs
));
894 /* If we couldn't fold the RHS, hand over to the generic
896 if (result
== NULL_TREE
)
899 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
900 that may have been added by fold, and "useless" type
901 conversions that might now be apparent due to propagation. */
902 STRIP_USELESS_TYPE_CONVERSION (result
);
904 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
911 case GIMPLE_UNARY_RHS
:
913 tree rhs
= gimple_assign_rhs1 (stmt
);
915 result
= fold_unary_loc (loc
, subcode
, gimple_expr_type (stmt
), rhs
);
918 /* If the operation was a conversion do _not_ mark a
919 resulting constant with TREE_OVERFLOW if the original
920 constant was not. These conversions have implementation
921 defined behavior and retaining the TREE_OVERFLOW flag
922 here would confuse later passes such as VRP. */
923 if (CONVERT_EXPR_CODE_P (subcode
)
924 && TREE_CODE (result
) == INTEGER_CST
925 && TREE_CODE (rhs
) == INTEGER_CST
)
926 TREE_OVERFLOW (result
) = TREE_OVERFLOW (rhs
);
928 STRIP_USELESS_TYPE_CONVERSION (result
);
929 if (valid_gimple_rhs_p (result
))
932 else if (CONVERT_EXPR_CODE_P (subcode
)
933 && POINTER_TYPE_P (gimple_expr_type (stmt
))
934 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt
))))
936 tree type
= gimple_expr_type (stmt
);
937 tree t
= maybe_fold_offset_to_address (loc
,
938 gimple_assign_rhs1 (stmt
),
939 integer_zero_node
, type
);
946 case GIMPLE_BINARY_RHS
:
947 /* Try to fold pointer addition. */
948 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
950 tree type
= TREE_TYPE (gimple_assign_rhs1 (stmt
));
951 if (TREE_CODE (TREE_TYPE (type
)) == ARRAY_TYPE
)
953 type
= build_pointer_type (TREE_TYPE (TREE_TYPE (type
)));
954 if (!useless_type_conversion_p
955 (TREE_TYPE (gimple_assign_lhs (stmt
)), type
))
956 type
= TREE_TYPE (gimple_assign_rhs1 (stmt
));
958 result
= maybe_fold_stmt_addition (gimple_location (stmt
),
960 gimple_assign_rhs1 (stmt
),
961 gimple_assign_rhs2 (stmt
));
965 result
= fold_binary_loc (loc
, subcode
,
966 TREE_TYPE (gimple_assign_lhs (stmt
)),
967 gimple_assign_rhs1 (stmt
),
968 gimple_assign_rhs2 (stmt
));
972 STRIP_USELESS_TYPE_CONVERSION (result
);
973 if (valid_gimple_rhs_p (result
))
976 /* Fold might have produced non-GIMPLE, so if we trust it blindly
977 we lose canonicalization opportunities. Do not go again
978 through fold here though, or the same non-GIMPLE will be
980 if (commutative_tree_code (subcode
)
981 && tree_swap_operands_p (gimple_assign_rhs1 (stmt
),
982 gimple_assign_rhs2 (stmt
), false))
983 return build2 (subcode
, TREE_TYPE (gimple_assign_lhs (stmt
)),
984 gimple_assign_rhs2 (stmt
),
985 gimple_assign_rhs1 (stmt
));
989 case GIMPLE_INVALID_RHS
:
996 /* Attempt to fold a conditional statement. Return true if any changes were
997 made. We only attempt to fold the condition expression, and do not perform
998 any transformation that would require alteration of the cfg. It is
999 assumed that the operands have been previously folded. */
1002 fold_gimple_cond (gimple stmt
)
1004 tree result
= fold_binary_loc (gimple_location (stmt
),
1005 gimple_cond_code (stmt
),
1007 gimple_cond_lhs (stmt
),
1008 gimple_cond_rhs (stmt
));
1012 STRIP_USELESS_TYPE_CONVERSION (result
);
1013 if (is_gimple_condexpr (result
) && valid_gimple_rhs_p (result
))
1015 gimple_cond_set_condition_from_tree (stmt
, result
);
1023 /* Convert EXPR into a GIMPLE value suitable for substitution on the
1024 RHS of an assignment. Insert the necessary statements before
1025 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
1026 is replaced. If the call is expected to produces a result, then it
1027 is replaced by an assignment of the new RHS to the result variable.
1028 If the result is to be ignored, then the call is replaced by a
1032 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
1035 tree tmp
= NULL_TREE
; /* Silence warning. */
1036 gimple stmt
, new_stmt
;
1037 gimple_stmt_iterator i
;
1038 gimple_seq stmts
= gimple_seq_alloc();
1039 struct gimplify_ctx gctx
;
1042 stmt
= gsi_stmt (*si_p
);
1044 gcc_assert (is_gimple_call (stmt
));
1046 lhs
= gimple_call_lhs (stmt
);
1048 push_gimplify_context (&gctx
);
1050 if (lhs
== NULL_TREE
)
1051 gimplify_and_add (expr
, &stmts
);
1053 tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
1055 pop_gimplify_context (NULL
);
1057 if (gimple_has_location (stmt
))
1058 annotate_all_with_location (stmts
, gimple_location (stmt
));
1060 /* The replacement can expose previously unreferenced variables. */
1061 for (i
= gsi_start (stmts
); !gsi_end_p (i
); gsi_next (&i
))
1065 gsi_insert_before (si_p
, last
, GSI_NEW_STMT
);
1068 new_stmt
= gsi_stmt (i
);
1069 find_new_referenced_vars (new_stmt
);
1070 mark_symbols_for_renaming (new_stmt
);
1074 if (lhs
== NULL_TREE
)
1076 unlink_stmt_vdef (stmt
);
1077 release_defs (stmt
);
1084 gsi_insert_before (si_p
, last
, GSI_NEW_STMT
);
1087 new_stmt
= gimple_build_assign (lhs
, tmp
);
1088 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1089 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
1090 move_ssa_defining_stmt_for_defs (new_stmt
, stmt
);
1093 gimple_set_location (new_stmt
, gimple_location (stmt
));
1094 gsi_replace (si_p
, new_stmt
, false);
1097 /* Return the string length, maximum string length or maximum value of
1099 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1100 is not NULL and, for TYPE == 0, its value is not equal to the length
1101 we determine or if we are unable to determine the length or value,
1102 return false. VISITED is a bitmap of visited variables.
1103 TYPE is 0 if string length should be returned, 1 for maximum string
1104 length and 2 for maximum value ARG can have. */
1107 get_maxval_strlen (tree arg
, tree
*length
, bitmap visited
, int type
)
1112 if (TREE_CODE (arg
) != SSA_NAME
)
1114 if (TREE_CODE (arg
) == COND_EXPR
)
1115 return get_maxval_strlen (COND_EXPR_THEN (arg
), length
, visited
, type
)
1116 && get_maxval_strlen (COND_EXPR_ELSE (arg
), length
, visited
, type
);
1117 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1118 else if (TREE_CODE (arg
) == ADDR_EXPR
1119 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
1120 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
1122 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
1123 if (TREE_CODE (aop0
) == INDIRECT_REF
1124 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
1125 return get_maxval_strlen (TREE_OPERAND (aop0
, 0),
1126 length
, visited
, type
);
1132 if (TREE_CODE (val
) != INTEGER_CST
1133 || tree_int_cst_sgn (val
) < 0)
1137 val
= c_strlen (arg
, 1);
1145 if (TREE_CODE (*length
) != INTEGER_CST
1146 || TREE_CODE (val
) != INTEGER_CST
)
1149 if (tree_int_cst_lt (*length
, val
))
1153 else if (simple_cst_equal (val
, *length
) != 1)
1161 /* If we were already here, break the infinite cycle. */
1162 if (bitmap_bit_p (visited
, SSA_NAME_VERSION (arg
)))
1164 bitmap_set_bit (visited
, SSA_NAME_VERSION (arg
));
1167 def_stmt
= SSA_NAME_DEF_STMT (var
);
1169 switch (gimple_code (def_stmt
))
1172 /* The RHS of the statement defining VAR must either have a
1173 constant length or come from another SSA_NAME with a constant
1175 if (gimple_assign_single_p (def_stmt
)
1176 || gimple_assign_unary_nop_p (def_stmt
))
1178 tree rhs
= gimple_assign_rhs1 (def_stmt
);
1179 return get_maxval_strlen (rhs
, length
, visited
, type
);
1185 /* All the arguments of the PHI node must have the same constant
1189 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1191 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
1193 /* If this PHI has itself as an argument, we cannot
1194 determine the string length of this argument. However,
1195 if we can find a constant string length for the other
1196 PHI args then we can still be sure that this is a
1197 constant string length. So be optimistic and just
1198 continue with the next argument. */
1199 if (arg
== gimple_phi_result (def_stmt
))
1202 if (!get_maxval_strlen (arg
, length
, visited
, type
))
1214 /* Fold builtin call in statement STMT. Returns a simplified tree.
1215 We may return a non-constant expression, including another call
1216 to a different function and with different arguments, e.g.,
1217 substituting memcpy for strcpy when the string length is known.
1218 Note that some builtins expand into inline code that may not
1219 be valid in GIMPLE. Callers must take care. */
1222 gimple_fold_builtin (gimple stmt
)
1224 tree result
, val
[3];
1230 location_t loc
= gimple_location (stmt
);
1232 gcc_assert (is_gimple_call (stmt
));
1234 ignore
= (gimple_call_lhs (stmt
) == NULL
);
1236 /* First try the generic builtin folder. If that succeeds, return the
1238 result
= fold_call_stmt (stmt
, ignore
);
1242 STRIP_NOPS (result
);
1246 /* Ignore MD builtins. */
1247 callee
= gimple_call_fndecl (stmt
);
1248 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_MD
)
1251 /* If the builtin could not be folded, and it has no argument list,
1253 nargs
= gimple_call_num_args (stmt
);
1257 /* Limit the work only for builtins we know how to simplify. */
1258 switch (DECL_FUNCTION_CODE (callee
))
1260 case BUILT_IN_STRLEN
:
1261 case BUILT_IN_FPUTS
:
1262 case BUILT_IN_FPUTS_UNLOCKED
:
1266 case BUILT_IN_STRCPY
:
1267 case BUILT_IN_STRNCPY
:
1271 case BUILT_IN_MEMCPY_CHK
:
1272 case BUILT_IN_MEMPCPY_CHK
:
1273 case BUILT_IN_MEMMOVE_CHK
:
1274 case BUILT_IN_MEMSET_CHK
:
1275 case BUILT_IN_STRNCPY_CHK
:
1279 case BUILT_IN_STRCPY_CHK
:
1280 case BUILT_IN_STPCPY_CHK
:
1284 case BUILT_IN_SNPRINTF_CHK
:
1285 case BUILT_IN_VSNPRINTF_CHK
:
1293 if (arg_idx
>= nargs
)
1296 /* Try to use the dataflow information gathered by the CCP process. */
1297 visited
= BITMAP_ALLOC (NULL
);
1298 bitmap_clear (visited
);
1300 memset (val
, 0, sizeof (val
));
1301 a
= gimple_call_arg (stmt
, arg_idx
);
1302 if (!get_maxval_strlen (a
, &val
[arg_idx
], visited
, type
))
1303 val
[arg_idx
] = NULL_TREE
;
1305 BITMAP_FREE (visited
);
1308 switch (DECL_FUNCTION_CODE (callee
))
1310 case BUILT_IN_STRLEN
:
1311 if (val
[0] && nargs
== 1)
1314 fold_convert (TREE_TYPE (gimple_call_lhs (stmt
)), val
[0]);
1316 /* If the result is not a valid gimple value, or not a cast
1317 of a valid gimple value, then we can not use the result. */
1318 if (is_gimple_val (new_val
)
1319 || (is_gimple_cast (new_val
)
1320 && is_gimple_val (TREE_OPERAND (new_val
, 0))))
1325 case BUILT_IN_STRCPY
:
1326 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 2)
1327 result
= fold_builtin_strcpy (loc
, callee
,
1328 gimple_call_arg (stmt
, 0),
1329 gimple_call_arg (stmt
, 1),
1333 case BUILT_IN_STRNCPY
:
1334 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
1335 result
= fold_builtin_strncpy (loc
, callee
,
1336 gimple_call_arg (stmt
, 0),
1337 gimple_call_arg (stmt
, 1),
1338 gimple_call_arg (stmt
, 2),
1342 case BUILT_IN_FPUTS
:
1344 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
1345 gimple_call_arg (stmt
, 1),
1346 ignore
, false, val
[0]);
1349 case BUILT_IN_FPUTS_UNLOCKED
:
1351 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
1352 gimple_call_arg (stmt
, 1),
1353 ignore
, true, val
[0]);
1356 case BUILT_IN_MEMCPY_CHK
:
1357 case BUILT_IN_MEMPCPY_CHK
:
1358 case BUILT_IN_MEMMOVE_CHK
:
1359 case BUILT_IN_MEMSET_CHK
:
1360 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
1361 result
= fold_builtin_memory_chk (loc
, callee
,
1362 gimple_call_arg (stmt
, 0),
1363 gimple_call_arg (stmt
, 1),
1364 gimple_call_arg (stmt
, 2),
1365 gimple_call_arg (stmt
, 3),
1367 DECL_FUNCTION_CODE (callee
));
1370 case BUILT_IN_STRCPY_CHK
:
1371 case BUILT_IN_STPCPY_CHK
:
1372 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
1373 result
= fold_builtin_stxcpy_chk (loc
, callee
,
1374 gimple_call_arg (stmt
, 0),
1375 gimple_call_arg (stmt
, 1),
1376 gimple_call_arg (stmt
, 2),
1378 DECL_FUNCTION_CODE (callee
));
1381 case BUILT_IN_STRNCPY_CHK
:
1382 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
1383 result
= fold_builtin_strncpy_chk (loc
, gimple_call_arg (stmt
, 0),
1384 gimple_call_arg (stmt
, 1),
1385 gimple_call_arg (stmt
, 2),
1386 gimple_call_arg (stmt
, 3),
1390 case BUILT_IN_SNPRINTF_CHK
:
1391 case BUILT_IN_VSNPRINTF_CHK
:
1392 if (val
[1] && is_gimple_val (val
[1]))
1393 result
= gimple_fold_builtin_snprintf_chk (stmt
, val
[1],
1394 DECL_FUNCTION_CODE (callee
));
1401 if (result
&& ignore
)
1402 result
= fold_ignored_result (result
);
1406 /* Search for a base binfo of BINFO that corresponds to TYPE and return it if
1407 it is found or NULL_TREE if it is not. */
1410 get_base_binfo_for_type (tree binfo
, tree type
)
1414 tree res
= NULL_TREE
;
1416 for (i
= 0; BINFO_BASE_ITERATE (binfo
, i
, base_binfo
); i
++)
1417 if (TREE_TYPE (base_binfo
) == type
)
1426 /* Return a binfo describing the part of object referenced by expression REF.
1427 Return NULL_TREE if it cannot be determined. REF can consist of a series of
1428 COMPONENT_REFs of a declaration or of an INDIRECT_REF or it can also be just
1429 a simple declaration, indirect reference or an SSA_NAME. If the function
1430 discovers an INDIRECT_REF or an SSA_NAME, it will assume that the
1431 encapsulating type is described by KNOWN_BINFO, if it is not NULL_TREE.
1432 Otherwise the first non-artificial field declaration or the base declaration
1433 will be examined to get the encapsulating type. */
1436 gimple_get_relevant_ref_binfo (tree ref
, tree known_binfo
)
1440 if (TREE_CODE (ref
) == COMPONENT_REF
)
1443 tree binfo
, base_binfo
;
1444 tree field
= TREE_OPERAND (ref
, 1);
1446 if (!DECL_ARTIFICIAL (field
))
1448 tree type
= TREE_TYPE (field
);
1449 if (TREE_CODE (type
) == RECORD_TYPE
)
1450 return TYPE_BINFO (type
);
1455 par_type
= TREE_TYPE (TREE_OPERAND (ref
, 0));
1456 binfo
= TYPE_BINFO (par_type
);
1458 || BINFO_N_BASE_BINFOS (binfo
) == 0)
1461 base_binfo
= BINFO_BASE_BINFO (binfo
, 0);
1462 if (BINFO_TYPE (base_binfo
) != TREE_TYPE (field
))
1466 d_binfo
= gimple_get_relevant_ref_binfo (TREE_OPERAND (ref
, 0),
1468 /* Get descendant binfo. */
1471 return get_base_binfo_for_type (d_binfo
, TREE_TYPE (field
));
1474 ref
= TREE_OPERAND (ref
, 0);
1476 else if (DECL_P (ref
) && TREE_CODE (TREE_TYPE (ref
)) == RECORD_TYPE
)
1477 return TYPE_BINFO (TREE_TYPE (ref
));
1478 else if (known_binfo
1479 && (TREE_CODE (ref
) == SSA_NAME
1480 || TREE_CODE (ref
) == INDIRECT_REF
))
1487 /* Fold a OBJ_TYPE_REF expression to the address of a function. TOKEN is
1488 integer form of OBJ_TYPE_REF_TOKEN of the reference expression. KNOWN_BINFO
1489 carries the binfo describing the true type of OBJ_TYPE_REF_OBJECT(REF). */
1492 gimple_fold_obj_type_ref_known_binfo (HOST_WIDE_INT token
, tree known_binfo
)
1497 v
= BINFO_VIRTUALS (known_binfo
);
1501 i
+= (TARGET_VTABLE_USES_DESCRIPTORS
1502 ? TARGET_VTABLE_USES_DESCRIPTORS
: 1);
1506 fndecl
= TREE_VALUE (v
);
1507 return build_fold_addr_expr (fndecl
);
1511 /* Fold a OBJ_TYPE_REF expression to the address of a function. If KNOWN_TYPE
1512 is not NULL_TREE, it is the true type of the outmost encapsulating object if
1513 that comes from a pointer SSA_NAME. If the true outmost encapsulating type
1514 can be determined from a declaration OBJ_TYPE_REF_OBJECT(REF), it is used
1515 regardless of KNOWN_TYPE (which thus can be NULL_TREE). */
1518 gimple_fold_obj_type_ref (tree ref
, tree known_type
)
1520 tree obj
= OBJ_TYPE_REF_OBJECT (ref
);
1521 tree known_binfo
= known_type
? TYPE_BINFO (known_type
) : NULL_TREE
;
1524 if (TREE_CODE (obj
) == ADDR_EXPR
)
1525 obj
= TREE_OPERAND (obj
, 0);
1527 binfo
= gimple_get_relevant_ref_binfo (obj
, known_binfo
);
1530 HOST_WIDE_INT token
= tree_low_cst (OBJ_TYPE_REF_TOKEN (ref
), 1);
1531 return gimple_fold_obj_type_ref_known_binfo (token
, binfo
);
1537 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1538 The statement may be replaced by another statement, e.g., if the call
1539 simplifies to a constant value. Return true if any changes were made.
1540 It is assumed that the operands have been previously folded. */
1543 fold_gimple_call (gimple_stmt_iterator
*gsi
)
1545 gimple stmt
= gsi_stmt (*gsi
);
1547 tree callee
= gimple_call_fndecl (stmt
);
1549 /* Check for builtins that CCP can handle using information not
1550 available in the generic fold routines. */
1551 if (callee
&& DECL_BUILT_IN (callee
))
1553 tree result
= gimple_fold_builtin (stmt
);
1557 if (!update_call_from_tree (gsi
, result
))
1558 gimplify_and_update_call_from_tree (gsi
, result
);
1564 /* ??? Should perhaps do this in fold proper. However, doing it
1565 there requires that we create a new CALL_EXPR, and that requires
1566 copying EH region info to the new node. Easier to just do it
1567 here where we can just smash the call operand. */
1568 /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
1569 callee
= gimple_call_fn (stmt
);
1570 if (TREE_CODE (callee
) == OBJ_TYPE_REF
1571 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee
)) == ADDR_EXPR
)
1575 t
= gimple_fold_obj_type_ref (callee
, NULL_TREE
);
1578 gimple_call_set_fn (stmt
, t
);
1587 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1588 distinguishes both cases. */
1591 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
)
1593 bool changed
= false;
1594 gimple stmt
= gsi_stmt (*gsi
);
1597 /* Fold the main computation performed by the statement. */
1598 switch (gimple_code (stmt
))
1602 unsigned old_num_ops
= gimple_num_ops (stmt
);
1603 tree new_rhs
= fold_gimple_assign (gsi
);
1604 tree lhs
= gimple_assign_lhs (stmt
);
1606 && !useless_type_conversion_p (TREE_TYPE (lhs
),
1607 TREE_TYPE (new_rhs
)))
1608 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
1611 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
1613 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
1620 changed
|= fold_gimple_cond (stmt
);
1624 /* Fold *& in call arguments. */
1625 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
1626 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
1628 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
1631 gimple_call_set_arg (stmt
, i
, tmp
);
1635 /* The entire statement may be replaced in this case. */
1637 changed
|= fold_gimple_call (gsi
);
1641 /* Fold *& in asm operands. */
1642 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
1644 tree link
= gimple_asm_output_op (stmt
, i
);
1645 tree op
= TREE_VALUE (link
);
1646 if (REFERENCE_CLASS_P (op
)
1647 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
1649 TREE_VALUE (link
) = op
;
1653 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
1655 tree link
= gimple_asm_input_op (stmt
, i
);
1656 tree op
= TREE_VALUE (link
);
1657 if (REFERENCE_CLASS_P (op
)
1658 && (op
= maybe_fold_reference (op
, false)) != NULL_TREE
)
1660 TREE_VALUE (link
) = op
;
1669 stmt
= gsi_stmt (*gsi
);
1671 /* Fold *& on the lhs. */
1672 if (gimple_has_lhs (stmt
))
1674 tree lhs
= gimple_get_lhs (stmt
);
1675 if (lhs
&& REFERENCE_CLASS_P (lhs
))
1677 tree new_lhs
= maybe_fold_reference (lhs
, true);
1680 gimple_set_lhs (stmt
, new_lhs
);
1689 /* Fold the statement pointed to by GSI. In some cases, this function may
1690 replace the whole statement with a new one. Returns true iff folding
1692 The statement pointed to by GSI should be in valid gimple form but may
1693 be in unfolded state as resulting from for example constant propagation
1694 which can produce *&x = 0. */
1697 fold_stmt (gimple_stmt_iterator
*gsi
)
1699 return fold_stmt_1 (gsi
, false);
1702 /* Perform the minimal folding on statement STMT. Only operations like
1703 *&x created by constant propagation are handled. The statement cannot
1704 be replaced with a new one. Return true if the statement was
1705 changed, false otherwise.
1706 The statement STMT should be in valid gimple form but may
1707 be in unfolded state as resulting from for example constant propagation
1708 which can produce *&x = 0. */
1711 fold_stmt_inplace (gimple stmt
)
1713 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
1714 bool changed
= fold_stmt_1 (&gsi
, true);
1715 gcc_assert (gsi_stmt (gsi
) == stmt
);