1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010, 2011, 2012 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
29 #include "tree-flow.h"
30 #include "tree-ssa-propagate.h"
32 #include "gimple-fold.h"
34 /* Return true when DECL can be referenced from current unit.
35 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
36 We can get declarations that are not possible to reference for various
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
50 3) COMDAT functions referred by external vtables that
51 we devirtualize only during final copmilation stage.
52 At this time we already decided that we will not output
53 the function body and thus we can't reference the symbol
57 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
59 struct varpool_node
*vnode
;
60 struct cgraph_node
*node
;
63 /* We will later output the initializer, so we can refer to it.
64 So we are concerned only when DECL comes from initializer of
67 || TREE_CODE (from_decl
) != VAR_DECL
68 || !DECL_EXTERNAL (from_decl
)
70 && symtab_get_node (from_decl
)->symbol
.in_other_partition
))
72 /* We are concerned only about static/external vars and functions. */
73 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
74 || (TREE_CODE (decl
) != VAR_DECL
&& TREE_CODE (decl
) != FUNCTION_DECL
))
76 /* Weakrefs have somewhat confusing DECL_EXTERNAL flag set; they
78 if (DECL_EXTERNAL (decl
)
79 && lookup_attribute ("weakref", DECL_ATTRIBUTES (decl
)))
81 /* We are folding reference from external vtable. The vtable may reffer
82 to a symbol keyed to other compilation unit. The other compilation
83 unit may be in separate DSO and the symbol may be hidden. */
84 if (DECL_VISIBILITY_SPECIFIED (decl
)
85 && DECL_EXTERNAL (decl
)
86 && (!(snode
= symtab_get_node (decl
)) || !snode
->symbol
.in_other_partition
))
88 /* When function is public, we always can introduce new reference.
89 Exception are the COMDAT functions where introducing a direct
90 reference imply need to include function body in the curren tunit. */
91 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
93 /* We are not at ltrans stage; so don't worry about WHOPR.
94 Also when still gimplifying all referred comdat functions will be
97 As observed in PR20991 for already optimized out comdat virtual functions
98 it may be tempting to not necessarily give up because the copy will be
99 output elsewhere when corresponding vtable is output.
100 This is however not possible - ABI specify that COMDATs are output in
101 units where they are used and when the other unit was compiled with LTO
102 it is possible that vtable was kept public while the function itself
104 if (!flag_ltrans
&& (!DECL_COMDAT (decl
) || !cgraph_function_flags_ready
))
107 /* OK we are seeing either COMDAT or static variable. In this case we must
108 check that the definition is still around so we can refer it. */
109 if (TREE_CODE (decl
) == FUNCTION_DECL
)
111 node
= cgraph_get_node (decl
);
112 /* Check that we still have function body and that we didn't took
113 the decision to eliminate offline copy of the function yet.
114 The second is important when devirtualization happens during final
115 compilation stage when making a new reference no longer makes callee
117 if (!node
|| !node
->analyzed
|| node
->global
.inlined_to
)
119 gcc_checking_assert (!TREE_ASM_WRITTEN (decl
));
123 else if (TREE_CODE (decl
) == VAR_DECL
)
125 vnode
= varpool_get_node (decl
);
126 if (!vnode
|| !vnode
->analyzed
)
128 gcc_checking_assert (!TREE_ASM_WRITTEN (decl
));
135 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
136 acceptable form for is_gimple_min_invariant.
137 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
140 canonicalize_constructor_val (tree cval
, tree from_decl
)
142 STRIP_USELESS_TYPE_CONVERSION (cval
);
143 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
144 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
146 tree ptr
= TREE_OPERAND (cval
, 0);
147 if (is_gimple_min_invariant (ptr
))
148 cval
= build1_loc (EXPR_LOCATION (cval
),
149 ADDR_EXPR
, TREE_TYPE (ptr
),
150 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
152 fold_convert (ptr_type_node
,
153 TREE_OPERAND (cval
, 1))));
155 if (TREE_CODE (cval
) == ADDR_EXPR
)
157 tree base
= get_base_address (TREE_OPERAND (cval
, 0));
158 if (!base
&& TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
160 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
162 TREE_OPERAND (cval
, 0) = base
;
167 if ((TREE_CODE (base
) == VAR_DECL
168 || TREE_CODE (base
) == FUNCTION_DECL
)
169 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
171 if (TREE_CODE (base
) == VAR_DECL
)
173 TREE_ADDRESSABLE (base
) = 1;
174 if (cfun
&& gimple_referenced_vars (cfun
)
175 && !is_global_var (base
))
176 add_referenced_var (base
);
178 else if (TREE_CODE (base
) == FUNCTION_DECL
)
180 /* Make sure we create a cgraph node for functions we'll reference.
181 They can be non-existent if the reference comes from an entry
182 of an external vtable for example. */
183 cgraph_get_create_node (base
);
185 /* Fixup types in global initializers. */
186 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
187 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
192 /* If SYM is a constant variable with known value, return the value.
193 NULL_TREE is returned otherwise. */
196 get_symbol_constant_value (tree sym
)
198 if (const_value_known_p (sym
))
200 tree val
= DECL_INITIAL (sym
);
203 val
= canonicalize_constructor_val (val
, sym
);
204 if (val
&& is_gimple_min_invariant (val
))
209 /* Variables declared 'const' without an initializer
210 have zero as the initializer if they may not be
211 overridden at link or run time. */
213 && (INTEGRAL_TYPE_P (TREE_TYPE (sym
))
214 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym
))))
215 return build_zero_cst (TREE_TYPE (sym
));
223 /* Subroutine of fold_stmt. We perform several simplifications of the
224 memory reference tree EXPR and make sure to re-gimplify them properly
225 after propagation of constant addresses. IS_LHS is true if the
226 reference is supposed to be an lvalue. */
229 maybe_fold_reference (tree expr
, bool is_lhs
)
234 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
235 || TREE_CODE (expr
) == REALPART_EXPR
236 || TREE_CODE (expr
) == IMAGPART_EXPR
)
237 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
238 return fold_unary_loc (EXPR_LOCATION (expr
),
241 TREE_OPERAND (expr
, 0));
242 else if (TREE_CODE (expr
) == BIT_FIELD_REF
243 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
244 return fold_ternary_loc (EXPR_LOCATION (expr
),
247 TREE_OPERAND (expr
, 0),
248 TREE_OPERAND (expr
, 1),
249 TREE_OPERAND (expr
, 2));
251 while (handled_component_p (*t
))
252 t
= &TREE_OPERAND (*t
, 0);
254 /* Canonicalize MEM_REFs invariant address operand. Do this first
255 to avoid feeding non-canonical MEM_REFs elsewhere. */
256 if (TREE_CODE (*t
) == MEM_REF
257 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)))
259 bool volatile_p
= TREE_THIS_VOLATILE (*t
);
260 tree tem
= fold_binary (MEM_REF
, TREE_TYPE (*t
),
261 TREE_OPERAND (*t
, 0),
262 TREE_OPERAND (*t
, 1));
265 TREE_THIS_VOLATILE (tem
) = volatile_p
;
267 tem
= maybe_fold_reference (expr
, is_lhs
);
275 && (result
= fold_const_aggregate_ref (expr
))
276 && is_gimple_min_invariant (result
))
279 /* Fold back MEM_REFs to reference trees. */
280 if (TREE_CODE (*t
) == MEM_REF
281 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
282 && integer_zerop (TREE_OPERAND (*t
, 1))
283 && (TREE_THIS_VOLATILE (*t
)
284 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0)))
285 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t
, 1)))
286 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t
))
287 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t
, 1)))))
288 /* We have to look out here to not drop a required conversion
289 from the rhs to the lhs if is_lhs, but we don't have the
290 rhs here to verify that. Thus require strict type
292 && types_compatible_p (TREE_TYPE (*t
),
293 TREE_TYPE (TREE_OPERAND
294 (TREE_OPERAND (*t
, 0), 0))))
297 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
298 tem
= maybe_fold_reference (expr
, is_lhs
);
303 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
305 tree tem
= maybe_fold_tmr (*t
);
309 tem
= maybe_fold_reference (expr
, is_lhs
);
320 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
321 replacement rhs for the statement or NULL_TREE if no simplification
322 could be made. It is assumed that the operands have been previously
326 fold_gimple_assign (gimple_stmt_iterator
*si
)
328 gimple stmt
= gsi_stmt (*si
);
329 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
330 location_t loc
= gimple_location (stmt
);
332 tree result
= NULL_TREE
;
334 switch (get_gimple_rhs_class (subcode
))
336 case GIMPLE_SINGLE_RHS
:
338 tree rhs
= gimple_assign_rhs1 (stmt
);
340 if (REFERENCE_CLASS_P (rhs
))
341 return maybe_fold_reference (rhs
, false);
343 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
345 tree ref
= TREE_OPERAND (rhs
, 0);
346 tree tem
= maybe_fold_reference (ref
, true);
348 && TREE_CODE (tem
) == MEM_REF
349 && integer_zerop (TREE_OPERAND (tem
, 1)))
350 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
352 result
= fold_convert (TREE_TYPE (rhs
),
353 build_fold_addr_expr_loc (loc
, tem
));
354 else if (TREE_CODE (ref
) == MEM_REF
355 && integer_zerop (TREE_OPERAND (ref
, 1)))
356 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
359 else if (TREE_CODE (rhs
) == CONSTRUCTOR
360 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
361 && (CONSTRUCTOR_NELTS (rhs
)
362 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
364 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
368 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
369 if (TREE_CODE (val
) != INTEGER_CST
370 && TREE_CODE (val
) != REAL_CST
371 && TREE_CODE (val
) != FIXED_CST
)
374 return build_vector_from_ctor (TREE_TYPE (rhs
),
375 CONSTRUCTOR_ELTS (rhs
));
378 else if (DECL_P (rhs
))
379 return unshare_expr (get_symbol_constant_value (rhs
));
381 /* If we couldn't fold the RHS, hand over to the generic
383 if (result
== NULL_TREE
)
386 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
387 that may have been added by fold, and "useless" type
388 conversions that might now be apparent due to propagation. */
389 STRIP_USELESS_TYPE_CONVERSION (result
);
391 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
398 case GIMPLE_UNARY_RHS
:
400 tree rhs
= gimple_assign_rhs1 (stmt
);
402 result
= fold_unary_loc (loc
, subcode
, gimple_expr_type (stmt
), rhs
);
405 /* If the operation was a conversion do _not_ mark a
406 resulting constant with TREE_OVERFLOW if the original
407 constant was not. These conversions have implementation
408 defined behavior and retaining the TREE_OVERFLOW flag
409 here would confuse later passes such as VRP. */
410 if (CONVERT_EXPR_CODE_P (subcode
)
411 && TREE_CODE (result
) == INTEGER_CST
412 && TREE_CODE (rhs
) == INTEGER_CST
)
413 TREE_OVERFLOW (result
) = TREE_OVERFLOW (rhs
);
415 STRIP_USELESS_TYPE_CONVERSION (result
);
416 if (valid_gimple_rhs_p (result
))
422 case GIMPLE_BINARY_RHS
:
423 /* Try to canonicalize for boolean-typed X the comparisons
424 X == 0, X == 1, X != 0, and X != 1. */
425 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
426 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
428 tree lhs
= gimple_assign_lhs (stmt
);
429 tree op1
= gimple_assign_rhs1 (stmt
);
430 tree op2
= gimple_assign_rhs2 (stmt
);
431 tree type
= TREE_TYPE (op1
);
433 /* Check whether the comparison operands are of the same boolean
434 type as the result type is.
435 Check that second operand is an integer-constant with value
437 if (TREE_CODE (op2
) == INTEGER_CST
438 && (integer_zerop (op2
) || integer_onep (op2
))
439 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
441 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
442 bool is_logical_not
= false;
444 /* X == 0 and X != 1 is a logical-not.of X
445 X == 1 and X != 0 is X */
446 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
447 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
448 is_logical_not
= true;
450 if (is_logical_not
== false)
452 /* Only for one-bit precision typed X the transformation
453 !X -> ~X is valied. */
454 else if (TYPE_PRECISION (type
) == 1)
455 result
= build1_loc (gimple_location (stmt
), BIT_NOT_EXPR
,
457 /* Otherwise we use !X -> X ^ 1. */
459 result
= build2_loc (gimple_location (stmt
), BIT_XOR_EXPR
,
460 type
, op1
, build_int_cst (type
, 1));
466 result
= fold_binary_loc (loc
, subcode
,
467 TREE_TYPE (gimple_assign_lhs (stmt
)),
468 gimple_assign_rhs1 (stmt
),
469 gimple_assign_rhs2 (stmt
));
473 STRIP_USELESS_TYPE_CONVERSION (result
);
474 if (valid_gimple_rhs_p (result
))
479 case GIMPLE_TERNARY_RHS
:
480 /* Try to fold a conditional expression. */
481 if (gimple_assign_rhs_code (stmt
) == COND_EXPR
)
483 tree op0
= gimple_assign_rhs1 (stmt
);
486 location_t cond_loc
= gimple_location (stmt
);
488 if (COMPARISON_CLASS_P (op0
))
490 fold_defer_overflow_warnings ();
491 tem
= fold_binary_loc (cond_loc
,
492 TREE_CODE (op0
), TREE_TYPE (op0
),
493 TREE_OPERAND (op0
, 0),
494 TREE_OPERAND (op0
, 1));
495 /* This is actually a conditional expression, not a GIMPLE
496 conditional statement, however, the valid_gimple_rhs_p
497 test still applies. */
498 set
= (tem
&& is_gimple_condexpr (tem
)
499 && valid_gimple_rhs_p (tem
));
500 fold_undefer_overflow_warnings (set
, stmt
, 0);
502 else if (is_gimple_min_invariant (op0
))
511 result
= fold_build3_loc (cond_loc
, COND_EXPR
,
512 TREE_TYPE (gimple_assign_lhs (stmt
)), tem
,
513 gimple_assign_rhs2 (stmt
),
514 gimple_assign_rhs3 (stmt
));
518 result
= fold_ternary_loc (loc
, subcode
,
519 TREE_TYPE (gimple_assign_lhs (stmt
)),
520 gimple_assign_rhs1 (stmt
),
521 gimple_assign_rhs2 (stmt
),
522 gimple_assign_rhs3 (stmt
));
526 STRIP_USELESS_TYPE_CONVERSION (result
);
527 if (valid_gimple_rhs_p (result
))
532 case GIMPLE_INVALID_RHS
:
539 /* Attempt to fold a conditional statement. Return true if any changes were
540 made. We only attempt to fold the condition expression, and do not perform
541 any transformation that would require alteration of the cfg. It is
542 assumed that the operands have been previously folded. */
545 fold_gimple_cond (gimple stmt
)
547 tree result
= fold_binary_loc (gimple_location (stmt
),
548 gimple_cond_code (stmt
),
550 gimple_cond_lhs (stmt
),
551 gimple_cond_rhs (stmt
));
555 STRIP_USELESS_TYPE_CONVERSION (result
);
556 if (is_gimple_condexpr (result
) && valid_gimple_rhs_p (result
))
558 gimple_cond_set_condition_from_tree (stmt
, result
);
566 /* Convert EXPR into a GIMPLE value suitable for substitution on the
567 RHS of an assignment. Insert the necessary statements before
568 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
569 is replaced. If the call is expected to produces a result, then it
570 is replaced by an assignment of the new RHS to the result variable.
571 If the result is to be ignored, then the call is replaced by a
572 GIMPLE_NOP. A proper VDEF chain is retained by making the first
573 VUSE and the last VDEF of the whole sequence be the same as the replaced
574 statement and using new SSA names for stores in between. */
577 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
580 gimple stmt
, new_stmt
;
581 gimple_stmt_iterator i
;
582 gimple_seq stmts
= NULL
;
583 struct gimplify_ctx gctx
;
587 stmt
= gsi_stmt (*si_p
);
589 gcc_assert (is_gimple_call (stmt
));
591 push_gimplify_context (&gctx
);
592 gctx
.into_ssa
= gimple_in_ssa_p (cfun
);
594 lhs
= gimple_call_lhs (stmt
);
595 if (lhs
== NULL_TREE
)
597 gimplify_and_add (expr
, &stmts
);
598 /* We can end up with folding a memcpy of an empty class assignment
599 which gets optimized away by C++ gimplification. */
600 if (gimple_seq_empty_p (stmts
))
602 pop_gimplify_context (NULL
);
603 if (gimple_in_ssa_p (cfun
))
605 unlink_stmt_vdef (stmt
);
608 gsi_remove (si_p
, true);
614 tree tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
615 new_stmt
= gimple_build_assign (lhs
, tmp
);
616 i
= gsi_last (stmts
);
617 gsi_insert_after_without_update (&i
, new_stmt
,
618 GSI_CONTINUE_LINKING
);
621 pop_gimplify_context (NULL
);
623 if (gimple_has_location (stmt
))
624 annotate_all_with_location (stmts
, gimple_location (stmt
));
626 /* First iterate over the replacement statements backward, assigning
627 virtual operands to their defining statements. */
629 for (i
= gsi_last (stmts
); !gsi_end_p (i
); gsi_prev (&i
))
631 new_stmt
= gsi_stmt (i
);
632 if ((gimple_assign_single_p (new_stmt
)
633 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
634 || (is_gimple_call (new_stmt
)
635 && (gimple_call_flags (new_stmt
)
636 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
640 vdef
= gimple_vdef (stmt
);
642 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
643 gimple_set_vdef (new_stmt
, vdef
);
644 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
645 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
646 laststore
= new_stmt
;
650 /* Second iterate over the statements forward, assigning virtual
651 operands to their uses. */
652 reaching_vuse
= gimple_vuse (stmt
);
653 for (i
= gsi_start (stmts
); !gsi_end_p (i
); gsi_next (&i
))
655 new_stmt
= gsi_stmt (i
);
656 /* The replacement can expose previously unreferenced variables. */
657 if (gimple_in_ssa_p (cfun
))
658 find_referenced_vars_in (new_stmt
);
659 /* If the new statement possibly has a VUSE, update it with exact SSA
660 name we know will reach this one. */
661 if (gimple_has_mem_ops (new_stmt
))
662 gimple_set_vuse (new_stmt
, reaching_vuse
);
663 gimple_set_modified (new_stmt
, true);
664 if (gimple_vdef (new_stmt
))
665 reaching_vuse
= gimple_vdef (new_stmt
);
668 /* If the new sequence does not do a store release the virtual
669 definition of the original statement. */
671 && reaching_vuse
== gimple_vuse (stmt
))
673 tree vdef
= gimple_vdef (stmt
);
675 && TREE_CODE (vdef
) == SSA_NAME
)
677 unlink_stmt_vdef (stmt
);
678 release_ssa_name (vdef
);
682 /* Finally replace the original statement with the sequence. */
683 gsi_replace_with_seq (si_p
, stmts
, false);
686 /* Return the string length, maximum string length or maximum value of
688 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
689 is not NULL and, for TYPE == 0, its value is not equal to the length
690 we determine or if we are unable to determine the length or value,
691 return false. VISITED is a bitmap of visited variables.
692 TYPE is 0 if string length should be returned, 1 for maximum string
693 length and 2 for maximum value ARG can have. */
696 get_maxval_strlen (tree arg
, tree
*length
, bitmap visited
, int type
)
701 if (TREE_CODE (arg
) != SSA_NAME
)
703 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
704 if (TREE_CODE (arg
) == ADDR_EXPR
705 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
706 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
708 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
709 if (TREE_CODE (aop0
) == INDIRECT_REF
710 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
711 return get_maxval_strlen (TREE_OPERAND (aop0
, 0),
712 length
, visited
, type
);
718 if (TREE_CODE (val
) != INTEGER_CST
719 || tree_int_cst_sgn (val
) < 0)
723 val
= c_strlen (arg
, 1);
731 if (TREE_CODE (*length
) != INTEGER_CST
732 || TREE_CODE (val
) != INTEGER_CST
)
735 if (tree_int_cst_lt (*length
, val
))
739 else if (simple_cst_equal (val
, *length
) != 1)
747 /* If we were already here, break the infinite cycle. */
748 if (!bitmap_set_bit (visited
, SSA_NAME_VERSION (arg
)))
752 def_stmt
= SSA_NAME_DEF_STMT (var
);
754 switch (gimple_code (def_stmt
))
757 /* The RHS of the statement defining VAR must either have a
758 constant length or come from another SSA_NAME with a constant
760 if (gimple_assign_single_p (def_stmt
)
761 || gimple_assign_unary_nop_p (def_stmt
))
763 tree rhs
= gimple_assign_rhs1 (def_stmt
);
764 return get_maxval_strlen (rhs
, length
, visited
, type
);
766 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
768 tree op2
= gimple_assign_rhs2 (def_stmt
);
769 tree op3
= gimple_assign_rhs3 (def_stmt
);
770 return get_maxval_strlen (op2
, length
, visited
, type
)
771 && get_maxval_strlen (op3
, length
, visited
, type
);
777 /* All the arguments of the PHI node must have the same constant
781 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
783 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
785 /* If this PHI has itself as an argument, we cannot
786 determine the string length of this argument. However,
787 if we can find a constant string length for the other
788 PHI args then we can still be sure that this is a
789 constant string length. So be optimistic and just
790 continue with the next argument. */
791 if (arg
== gimple_phi_result (def_stmt
))
794 if (!get_maxval_strlen (arg
, length
, visited
, type
))
806 /* Fold builtin call in statement STMT. Returns a simplified tree.
807 We may return a non-constant expression, including another call
808 to a different function and with different arguments, e.g.,
809 substituting memcpy for strcpy when the string length is known.
810 Note that some builtins expand into inline code that may not
811 be valid in GIMPLE. Callers must take care. */
814 gimple_fold_builtin (gimple stmt
)
822 location_t loc
= gimple_location (stmt
);
824 gcc_assert (is_gimple_call (stmt
));
826 ignore
= (gimple_call_lhs (stmt
) == NULL
);
828 /* First try the generic builtin folder. If that succeeds, return the
830 result
= fold_call_stmt (stmt
, ignore
);
838 /* Ignore MD builtins. */
839 callee
= gimple_call_fndecl (stmt
);
840 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_MD
)
843 /* Give up for always_inline inline builtins until they are
845 if (avoid_folding_inline_builtin (callee
))
848 /* If the builtin could not be folded, and it has no argument list,
850 nargs
= gimple_call_num_args (stmt
);
854 /* Limit the work only for builtins we know how to simplify. */
855 switch (DECL_FUNCTION_CODE (callee
))
857 case BUILT_IN_STRLEN
:
859 case BUILT_IN_FPUTS_UNLOCKED
:
863 case BUILT_IN_STRCPY
:
864 case BUILT_IN_STRNCPY
:
868 case BUILT_IN_MEMCPY_CHK
:
869 case BUILT_IN_MEMPCPY_CHK
:
870 case BUILT_IN_MEMMOVE_CHK
:
871 case BUILT_IN_MEMSET_CHK
:
872 case BUILT_IN_STRNCPY_CHK
:
873 case BUILT_IN_STPNCPY_CHK
:
877 case BUILT_IN_STRCPY_CHK
:
878 case BUILT_IN_STPCPY_CHK
:
882 case BUILT_IN_SNPRINTF_CHK
:
883 case BUILT_IN_VSNPRINTF_CHK
:
891 if (arg_idx
>= nargs
)
894 /* Try to use the dataflow information gathered by the CCP process. */
895 visited
= BITMAP_ALLOC (NULL
);
896 bitmap_clear (visited
);
898 memset (val
, 0, sizeof (val
));
899 a
= gimple_call_arg (stmt
, arg_idx
);
900 if (!get_maxval_strlen (a
, &val
[arg_idx
], visited
, type
))
901 val
[arg_idx
] = NULL_TREE
;
903 BITMAP_FREE (visited
);
906 switch (DECL_FUNCTION_CODE (callee
))
908 case BUILT_IN_STRLEN
:
909 if (val
[0] && nargs
== 1)
912 fold_convert (TREE_TYPE (gimple_call_lhs (stmt
)), val
[0]);
914 /* If the result is not a valid gimple value, or not a cast
915 of a valid gimple value, then we cannot use the result. */
916 if (is_gimple_val (new_val
)
917 || (CONVERT_EXPR_P (new_val
)
918 && is_gimple_val (TREE_OPERAND (new_val
, 0))))
923 case BUILT_IN_STRCPY
:
924 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 2)
925 result
= fold_builtin_strcpy (loc
, callee
,
926 gimple_call_arg (stmt
, 0),
927 gimple_call_arg (stmt
, 1),
931 case BUILT_IN_STRNCPY
:
932 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
933 result
= fold_builtin_strncpy (loc
, callee
,
934 gimple_call_arg (stmt
, 0),
935 gimple_call_arg (stmt
, 1),
936 gimple_call_arg (stmt
, 2),
942 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
943 gimple_call_arg (stmt
, 1),
944 ignore
, false, val
[0]);
947 case BUILT_IN_FPUTS_UNLOCKED
:
949 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
950 gimple_call_arg (stmt
, 1),
951 ignore
, true, val
[0]);
954 case BUILT_IN_MEMCPY_CHK
:
955 case BUILT_IN_MEMPCPY_CHK
:
956 case BUILT_IN_MEMMOVE_CHK
:
957 case BUILT_IN_MEMSET_CHK
:
958 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
959 result
= fold_builtin_memory_chk (loc
, callee
,
960 gimple_call_arg (stmt
, 0),
961 gimple_call_arg (stmt
, 1),
962 gimple_call_arg (stmt
, 2),
963 gimple_call_arg (stmt
, 3),
965 DECL_FUNCTION_CODE (callee
));
968 case BUILT_IN_STRCPY_CHK
:
969 case BUILT_IN_STPCPY_CHK
:
970 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
971 result
= fold_builtin_stxcpy_chk (loc
, callee
,
972 gimple_call_arg (stmt
, 0),
973 gimple_call_arg (stmt
, 1),
974 gimple_call_arg (stmt
, 2),
976 DECL_FUNCTION_CODE (callee
));
979 case BUILT_IN_STRNCPY_CHK
:
980 case BUILT_IN_STPNCPY_CHK
:
981 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
982 result
= fold_builtin_stxncpy_chk (loc
, gimple_call_arg (stmt
, 0),
983 gimple_call_arg (stmt
, 1),
984 gimple_call_arg (stmt
, 2),
985 gimple_call_arg (stmt
, 3),
987 DECL_FUNCTION_CODE (callee
));
990 case BUILT_IN_SNPRINTF_CHK
:
991 case BUILT_IN_VSNPRINTF_CHK
:
992 if (val
[1] && is_gimple_val (val
[1]))
993 result
= gimple_fold_builtin_snprintf_chk (stmt
, val
[1],
994 DECL_FUNCTION_CODE (callee
));
1001 if (result
&& ignore
)
1002 result
= fold_ignored_result (result
);
1007 /* Return a binfo to be used for devirtualization of calls based on an object
1008 represented by a declaration (i.e. a global or automatically allocated one)
1009 or NULL if it cannot be found or is not safe. CST is expected to be an
1010 ADDR_EXPR of such object or the function will return NULL. Currently it is
1011 safe to use such binfo only if it has no base binfo (i.e. no ancestors). */
1014 gimple_extract_devirt_binfo_from_cst (tree cst
)
1016 HOST_WIDE_INT offset
, size
, max_size
;
1017 tree base
, type
, expected_type
, binfo
;
1018 bool last_artificial
= false;
1020 if (!flag_devirtualize
1021 || TREE_CODE (cst
) != ADDR_EXPR
1022 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst
))) != RECORD_TYPE
)
1025 cst
= TREE_OPERAND (cst
, 0);
1026 expected_type
= TREE_TYPE (cst
);
1027 base
= get_ref_base_and_extent (cst
, &offset
, &size
, &max_size
);
1028 type
= TREE_TYPE (base
);
1032 || TREE_CODE (type
) != RECORD_TYPE
)
1035 /* Find the sub-object the constant actually refers to and mark whether it is
1036 an artificial one (as opposed to a user-defined one). */
1039 HOST_WIDE_INT pos
, size
;
1042 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (expected_type
))
1047 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
1049 if (TREE_CODE (fld
) != FIELD_DECL
)
1052 pos
= int_bit_position (fld
);
1053 size
= tree_low_cst (DECL_SIZE (fld
), 1);
1054 if (pos
<= offset
&& (pos
+ size
) > offset
)
1057 if (!fld
|| TREE_CODE (TREE_TYPE (fld
)) != RECORD_TYPE
)
1060 last_artificial
= DECL_ARTIFICIAL (fld
);
1061 type
= TREE_TYPE (fld
);
1064 /* Artificial sub-objects are ancestors, we do not want to use them for
1065 devirtualization, at least not here. */
1066 if (last_artificial
)
1068 binfo
= TYPE_BINFO (type
);
1069 if (!binfo
|| BINFO_N_BASE_BINFOS (binfo
) > 0)
1075 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1076 The statement may be replaced by another statement, e.g., if the call
1077 simplifies to a constant value. Return true if any changes were made.
1078 It is assumed that the operands have been previously folded. */
1081 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
1083 gimple stmt
= gsi_stmt (*gsi
);
1085 bool changed
= false;
1088 /* Fold *& in call arguments. */
1089 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
1090 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
1092 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
1095 gimple_call_set_arg (stmt
, i
, tmp
);
1100 /* Check for virtual calls that became direct calls. */
1101 callee
= gimple_call_fn (stmt
);
1102 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
1104 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
1106 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
1111 tree obj
= OBJ_TYPE_REF_OBJECT (callee
);
1112 tree binfo
= gimple_extract_devirt_binfo_from_cst (obj
);
1116 = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee
));
1117 tree fndecl
= gimple_get_virt_method_for_binfo (token
, binfo
);
1120 gimple_call_set_fndecl (stmt
, fndecl
);
1130 /* Check for builtins that CCP can handle using information not
1131 available in the generic fold routines. */
1132 callee
= gimple_call_fndecl (stmt
);
1133 if (callee
&& DECL_BUILT_IN (callee
))
1135 tree result
= gimple_fold_builtin (stmt
);
1138 if (!update_call_from_tree (gsi
, result
))
1139 gimplify_and_update_call_from_tree (gsi
, result
);
1147 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1148 distinguishes both cases. */
1151 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
)
1153 bool changed
= false;
1154 gimple stmt
= gsi_stmt (*gsi
);
1156 gimple_stmt_iterator gsinext
= *gsi
;
1159 gsi_next (&gsinext
);
1160 next_stmt
= gsi_end_p (gsinext
) ? NULL
: gsi_stmt (gsinext
);
1162 /* Fold the main computation performed by the statement. */
1163 switch (gimple_code (stmt
))
1167 unsigned old_num_ops
= gimple_num_ops (stmt
);
1168 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
1169 tree lhs
= gimple_assign_lhs (stmt
);
1171 /* First canonicalize operand order. This avoids building new
1172 trees if this is the only thing fold would later do. */
1173 if ((commutative_tree_code (subcode
)
1174 || commutative_ternary_tree_code (subcode
))
1175 && tree_swap_operands_p (gimple_assign_rhs1 (stmt
),
1176 gimple_assign_rhs2 (stmt
), false))
1178 tree tem
= gimple_assign_rhs1 (stmt
);
1179 gimple_assign_set_rhs1 (stmt
, gimple_assign_rhs2 (stmt
));
1180 gimple_assign_set_rhs2 (stmt
, tem
);
1183 new_rhs
= fold_gimple_assign (gsi
);
1185 && !useless_type_conversion_p (TREE_TYPE (lhs
),
1186 TREE_TYPE (new_rhs
)))
1187 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
1190 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
1192 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
1199 changed
|= fold_gimple_cond (stmt
);
1203 changed
|= gimple_fold_call (gsi
, inplace
);
1207 /* Fold *& in asm operands. */
1210 const char **oconstraints
;
1211 const char *constraint
;
1212 bool allows_mem
, allows_reg
;
1214 noutputs
= gimple_asm_noutputs (stmt
);
1215 oconstraints
= XALLOCAVEC (const char *, noutputs
);
1217 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
1219 tree link
= gimple_asm_output_op (stmt
, i
);
1220 tree op
= TREE_VALUE (link
);
1222 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1223 if (REFERENCE_CLASS_P (op
)
1224 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
1226 TREE_VALUE (link
) = op
;
1230 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
1232 tree link
= gimple_asm_input_op (stmt
, i
);
1233 tree op
= TREE_VALUE (link
);
1235 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1236 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
1237 oconstraints
, &allows_mem
, &allows_reg
);
1238 if (REFERENCE_CLASS_P (op
)
1239 && (op
= maybe_fold_reference (op
, !allows_reg
&& allows_mem
))
1242 TREE_VALUE (link
) = op
;
1250 if (gimple_debug_bind_p (stmt
))
1252 tree val
= gimple_debug_bind_get_value (stmt
);
1254 && REFERENCE_CLASS_P (val
))
1256 tree tem
= maybe_fold_reference (val
, false);
1259 gimple_debug_bind_set_value (stmt
, tem
);
1264 && TREE_CODE (val
) == ADDR_EXPR
)
1266 tree ref
= TREE_OPERAND (val
, 0);
1267 tree tem
= maybe_fold_reference (ref
, false);
1270 tem
= build_fold_addr_expr_with_type (tem
, TREE_TYPE (val
));
1271 gimple_debug_bind_set_value (stmt
, tem
);
1281 /* If stmt folds into nothing and it was the last stmt in a bb,
1282 don't call gsi_stmt. */
1283 if (gsi_end_p (*gsi
))
1285 gcc_assert (next_stmt
== NULL
);
1289 stmt
= gsi_stmt (*gsi
);
1291 /* Fold *& on the lhs. Don't do this if stmt folded into nothing,
1292 as we'd changing the next stmt. */
1293 if (gimple_has_lhs (stmt
) && stmt
!= next_stmt
)
1295 tree lhs
= gimple_get_lhs (stmt
);
1296 if (lhs
&& REFERENCE_CLASS_P (lhs
))
1298 tree new_lhs
= maybe_fold_reference (lhs
, true);
1301 gimple_set_lhs (stmt
, new_lhs
);
1310 /* Fold the statement pointed to by GSI. In some cases, this function may
1311 replace the whole statement with a new one. Returns true iff folding
1313 The statement pointed to by GSI should be in valid gimple form but may
1314 be in unfolded state as resulting from for example constant propagation
1315 which can produce *&x = 0. */
1318 fold_stmt (gimple_stmt_iterator
*gsi
)
1320 return fold_stmt_1 (gsi
, false);
1323 /* Perform the minimal folding on statement *GSI. Only operations like
1324 *&x created by constant propagation are handled. The statement cannot
1325 be replaced with a new one. Return true if the statement was
1326 changed, false otherwise.
1327 The statement *GSI should be in valid gimple form but may
1328 be in unfolded state as resulting from for example constant propagation
1329 which can produce *&x = 0. */
1332 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
1334 gimple stmt
= gsi_stmt (*gsi
);
1335 bool changed
= fold_stmt_1 (gsi
, true);
1336 gcc_assert (gsi_stmt (*gsi
) == stmt
);
1340 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1341 if EXPR is null or we don't know how.
1342 If non-null, the result always has boolean type. */
1345 canonicalize_bool (tree expr
, bool invert
)
1351 if (integer_nonzerop (expr
))
1352 return boolean_false_node
;
1353 else if (integer_zerop (expr
))
1354 return boolean_true_node
;
1355 else if (TREE_CODE (expr
) == SSA_NAME
)
1356 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
1357 build_int_cst (TREE_TYPE (expr
), 0));
1358 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
1359 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
1361 TREE_OPERAND (expr
, 0),
1362 TREE_OPERAND (expr
, 1));
1368 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
1370 if (integer_nonzerop (expr
))
1371 return boolean_true_node
;
1372 else if (integer_zerop (expr
))
1373 return boolean_false_node
;
1374 else if (TREE_CODE (expr
) == SSA_NAME
)
1375 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
1376 build_int_cst (TREE_TYPE (expr
), 0));
1377 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
1378 return fold_build2 (TREE_CODE (expr
),
1380 TREE_OPERAND (expr
, 0),
1381 TREE_OPERAND (expr
, 1));
1387 /* Check to see if a boolean expression EXPR is logically equivalent to the
1388 comparison (OP1 CODE OP2). Check for various identities involving
1392 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
1393 const_tree op1
, const_tree op2
)
1397 /* The obvious case. */
1398 if (TREE_CODE (expr
) == code
1399 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
1400 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
1403 /* Check for comparing (name, name != 0) and the case where expr
1404 is an SSA_NAME with a definition matching the comparison. */
1405 if (TREE_CODE (expr
) == SSA_NAME
1406 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
1408 if (operand_equal_p (expr
, op1
, 0))
1409 return ((code
== NE_EXPR
&& integer_zerop (op2
))
1410 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
1411 s
= SSA_NAME_DEF_STMT (expr
);
1412 if (is_gimple_assign (s
)
1413 && gimple_assign_rhs_code (s
) == code
1414 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
1415 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
1419 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1420 of name is a comparison, recurse. */
1421 if (TREE_CODE (op1
) == SSA_NAME
1422 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
1424 s
= SSA_NAME_DEF_STMT (op1
);
1425 if (is_gimple_assign (s
)
1426 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
1428 enum tree_code c
= gimple_assign_rhs_code (s
);
1429 if ((c
== NE_EXPR
&& integer_zerop (op2
))
1430 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
1431 return same_bool_comparison_p (expr
, c
,
1432 gimple_assign_rhs1 (s
),
1433 gimple_assign_rhs2 (s
));
1434 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
1435 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
1436 return same_bool_comparison_p (expr
,
1437 invert_tree_comparison (c
, false),
1438 gimple_assign_rhs1 (s
),
1439 gimple_assign_rhs2 (s
));
1445 /* Check to see if two boolean expressions OP1 and OP2 are logically
1449 same_bool_result_p (const_tree op1
, const_tree op2
)
1451 /* Simple cases first. */
1452 if (operand_equal_p (op1
, op2
, 0))
1455 /* Check the cases where at least one of the operands is a comparison.
1456 These are a bit smarter than operand_equal_p in that they apply some
1457 identifies on SSA_NAMEs. */
1458 if (TREE_CODE_CLASS (TREE_CODE (op2
)) == tcc_comparison
1459 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
1460 TREE_OPERAND (op2
, 0),
1461 TREE_OPERAND (op2
, 1)))
1463 if (TREE_CODE_CLASS (TREE_CODE (op1
)) == tcc_comparison
1464 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
1465 TREE_OPERAND (op1
, 0),
1466 TREE_OPERAND (op1
, 1)))
1473 /* Forward declarations for some mutually recursive functions. */
1476 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1477 enum tree_code code2
, tree op2a
, tree op2b
);
1479 and_var_with_comparison (tree var
, bool invert
,
1480 enum tree_code code2
, tree op2a
, tree op2b
);
1482 and_var_with_comparison_1 (gimple stmt
,
1483 enum tree_code code2
, tree op2a
, tree op2b
);
1485 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1486 enum tree_code code2
, tree op2a
, tree op2b
);
1488 or_var_with_comparison (tree var
, bool invert
,
1489 enum tree_code code2
, tree op2a
, tree op2b
);
1491 or_var_with_comparison_1 (gimple stmt
,
1492 enum tree_code code2
, tree op2a
, tree op2b
);
1494 /* Helper function for and_comparisons_1: try to simplify the AND of the
1495 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1496 If INVERT is true, invert the value of the VAR before doing the AND.
1497 Return NULL_EXPR if we can't simplify this to a single expression. */
1500 and_var_with_comparison (tree var
, bool invert
,
1501 enum tree_code code2
, tree op2a
, tree op2b
)
1504 gimple stmt
= SSA_NAME_DEF_STMT (var
);
1506 /* We can only deal with variables whose definitions are assignments. */
1507 if (!is_gimple_assign (stmt
))
1510 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1511 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1512 Then we only have to consider the simpler non-inverted cases. */
1514 t
= or_var_with_comparison_1 (stmt
,
1515 invert_tree_comparison (code2
, false),
1518 t
= and_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
1519 return canonicalize_bool (t
, invert
);
1522 /* Try to simplify the AND of the ssa variable defined by the assignment
1523 STMT with the comparison specified by (OP2A CODE2 OP2B).
1524 Return NULL_EXPR if we can't simplify this to a single expression. */
1527 and_var_with_comparison_1 (gimple stmt
,
1528 enum tree_code code2
, tree op2a
, tree op2b
)
1530 tree var
= gimple_assign_lhs (stmt
);
1531 tree true_test_var
= NULL_TREE
;
1532 tree false_test_var
= NULL_TREE
;
1533 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
1535 /* Check for identities like (var AND (var == 0)) => false. */
1536 if (TREE_CODE (op2a
) == SSA_NAME
1537 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
1539 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
1540 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
1542 true_test_var
= op2a
;
1543 if (var
== true_test_var
)
1546 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
1547 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
1549 false_test_var
= op2a
;
1550 if (var
== false_test_var
)
1551 return boolean_false_node
;
1555 /* If the definition is a comparison, recurse on it. */
1556 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
1558 tree t
= and_comparisons_1 (innercode
,
1559 gimple_assign_rhs1 (stmt
),
1560 gimple_assign_rhs2 (stmt
),
1568 /* If the definition is an AND or OR expression, we may be able to
1569 simplify by reassociating. */
1570 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
1571 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
1573 tree inner1
= gimple_assign_rhs1 (stmt
);
1574 tree inner2
= gimple_assign_rhs2 (stmt
);
1577 tree partial
= NULL_TREE
;
1578 bool is_and
= (innercode
== BIT_AND_EXPR
);
1580 /* Check for boolean identities that don't require recursive examination
1582 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1583 inner1 AND (inner1 OR inner2) => inner1
1584 !inner1 AND (inner1 AND inner2) => false
1585 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1586 Likewise for similar cases involving inner2. */
1587 if (inner1
== true_test_var
)
1588 return (is_and
? var
: inner1
);
1589 else if (inner2
== true_test_var
)
1590 return (is_and
? var
: inner2
);
1591 else if (inner1
== false_test_var
)
1593 ? boolean_false_node
1594 : and_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
1595 else if (inner2
== false_test_var
)
1597 ? boolean_false_node
1598 : and_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
1600 /* Next, redistribute/reassociate the AND across the inner tests.
1601 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1602 if (TREE_CODE (inner1
) == SSA_NAME
1603 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
1604 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
1605 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
1606 gimple_assign_rhs1 (s
),
1607 gimple_assign_rhs2 (s
),
1608 code2
, op2a
, op2b
)))
1610 /* Handle the AND case, where we are reassociating:
1611 (inner1 AND inner2) AND (op2a code2 op2b)
1613 If the partial result t is a constant, we win. Otherwise
1614 continue on to try reassociating with the other inner test. */
1617 if (integer_onep (t
))
1619 else if (integer_zerop (t
))
1620 return boolean_false_node
;
1623 /* Handle the OR case, where we are redistributing:
1624 (inner1 OR inner2) AND (op2a code2 op2b)
1625 => (t OR (inner2 AND (op2a code2 op2b))) */
1626 else if (integer_onep (t
))
1627 return boolean_true_node
;
1629 /* Save partial result for later. */
1633 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1634 if (TREE_CODE (inner2
) == SSA_NAME
1635 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
1636 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
1637 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
1638 gimple_assign_rhs1 (s
),
1639 gimple_assign_rhs2 (s
),
1640 code2
, op2a
, op2b
)))
1642 /* Handle the AND case, where we are reassociating:
1643 (inner1 AND inner2) AND (op2a code2 op2b)
1644 => (inner1 AND t) */
1647 if (integer_onep (t
))
1649 else if (integer_zerop (t
))
1650 return boolean_false_node
;
1651 /* If both are the same, we can apply the identity
1653 else if (partial
&& same_bool_result_p (t
, partial
))
1657 /* Handle the OR case. where we are redistributing:
1658 (inner1 OR inner2) AND (op2a code2 op2b)
1659 => (t OR (inner1 AND (op2a code2 op2b)))
1660 => (t OR partial) */
1663 if (integer_onep (t
))
1664 return boolean_true_node
;
1667 /* We already got a simplification for the other
1668 operand to the redistributed OR expression. The
1669 interesting case is when at least one is false.
1670 Or, if both are the same, we can apply the identity
1672 if (integer_zerop (partial
))
1674 else if (integer_zerop (t
))
1676 else if (same_bool_result_p (t
, partial
))
1685 /* Try to simplify the AND of two comparisons defined by
1686 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1687 If this can be done without constructing an intermediate value,
1688 return the resulting tree; otherwise NULL_TREE is returned.
1689 This function is deliberately asymmetric as it recurses on SSA_DEFs
1690 in the first comparison but not the second. */
1693 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1694 enum tree_code code2
, tree op2a
, tree op2b
)
1696 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1697 if (operand_equal_p (op1a
, op2a
, 0)
1698 && operand_equal_p (op1b
, op2b
, 0))
1700 /* Result will be either NULL_TREE, or a combined comparison. */
1701 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
1702 TRUTH_ANDIF_EXPR
, code1
, code2
,
1703 boolean_type_node
, op1a
, op1b
);
1708 /* Likewise the swapped case of the above. */
1709 if (operand_equal_p (op1a
, op2b
, 0)
1710 && operand_equal_p (op1b
, op2a
, 0))
1712 /* Result will be either NULL_TREE, or a combined comparison. */
1713 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
1714 TRUTH_ANDIF_EXPR
, code1
,
1715 swap_tree_comparison (code2
),
1716 boolean_type_node
, op1a
, op1b
);
1721 /* If both comparisons are of the same value against constants, we might
1722 be able to merge them. */
1723 if (operand_equal_p (op1a
, op2a
, 0)
1724 && TREE_CODE (op1b
) == INTEGER_CST
1725 && TREE_CODE (op2b
) == INTEGER_CST
)
1727 int cmp
= tree_int_cst_compare (op1b
, op2b
);
1729 /* If we have (op1a == op1b), we should either be able to
1730 return that or FALSE, depending on whether the constant op1b
1731 also satisfies the other comparison against op2b. */
1732 if (code1
== EQ_EXPR
)
1738 case EQ_EXPR
: val
= (cmp
== 0); break;
1739 case NE_EXPR
: val
= (cmp
!= 0); break;
1740 case LT_EXPR
: val
= (cmp
< 0); break;
1741 case GT_EXPR
: val
= (cmp
> 0); break;
1742 case LE_EXPR
: val
= (cmp
<= 0); break;
1743 case GE_EXPR
: val
= (cmp
>= 0); break;
1744 default: done
= false;
1749 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1751 return boolean_false_node
;
1754 /* Likewise if the second comparison is an == comparison. */
1755 else if (code2
== EQ_EXPR
)
1761 case EQ_EXPR
: val
= (cmp
== 0); break;
1762 case NE_EXPR
: val
= (cmp
!= 0); break;
1763 case LT_EXPR
: val
= (cmp
> 0); break;
1764 case GT_EXPR
: val
= (cmp
< 0); break;
1765 case LE_EXPR
: val
= (cmp
>= 0); break;
1766 case GE_EXPR
: val
= (cmp
<= 0); break;
1767 default: done
= false;
1772 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1774 return boolean_false_node
;
1778 /* Same business with inequality tests. */
1779 else if (code1
== NE_EXPR
)
1784 case EQ_EXPR
: val
= (cmp
!= 0); break;
1785 case NE_EXPR
: val
= (cmp
== 0); break;
1786 case LT_EXPR
: val
= (cmp
>= 0); break;
1787 case GT_EXPR
: val
= (cmp
<= 0); break;
1788 case LE_EXPR
: val
= (cmp
> 0); break;
1789 case GE_EXPR
: val
= (cmp
< 0); break;
1794 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1796 else if (code2
== NE_EXPR
)
1801 case EQ_EXPR
: val
= (cmp
== 0); break;
1802 case NE_EXPR
: val
= (cmp
!= 0); break;
1803 case LT_EXPR
: val
= (cmp
<= 0); break;
1804 case GT_EXPR
: val
= (cmp
>= 0); break;
1805 case LE_EXPR
: val
= (cmp
< 0); break;
1806 case GE_EXPR
: val
= (cmp
> 0); break;
1811 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1814 /* Chose the more restrictive of two < or <= comparisons. */
1815 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
1816 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
1818 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
1819 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1821 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1824 /* Likewise chose the more restrictive of two > or >= comparisons. */
1825 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
1826 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
1828 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
1829 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1831 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1834 /* Check for singleton ranges. */
1836 && ((code1
== LE_EXPR
&& code2
== GE_EXPR
)
1837 || (code1
== GE_EXPR
&& code2
== LE_EXPR
)))
1838 return fold_build2 (EQ_EXPR
, boolean_type_node
, op1a
, op2b
);
1840 /* Check for disjoint ranges. */
1842 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
1843 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
1844 return boolean_false_node
;
1846 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
1847 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
1848 return boolean_false_node
;
1851 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1852 NAME's definition is a truth value. See if there are any simplifications
1853 that can be done against the NAME's definition. */
1854 if (TREE_CODE (op1a
) == SSA_NAME
1855 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
1856 && (integer_zerop (op1b
) || integer_onep (op1b
)))
1858 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
1859 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
1860 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
1861 switch (gimple_code (stmt
))
1864 /* Try to simplify by copy-propagating the definition. */
1865 return and_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
1868 /* If every argument to the PHI produces the same result when
1869 ANDed with the second comparison, we win.
1870 Do not do this unless the type is bool since we need a bool
1871 result here anyway. */
1872 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
1874 tree result
= NULL_TREE
;
1876 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
1878 tree arg
= gimple_phi_arg_def (stmt
, i
);
1880 /* If this PHI has itself as an argument, ignore it.
1881 If all the other args produce the same result,
1883 if (arg
== gimple_phi_result (stmt
))
1885 else if (TREE_CODE (arg
) == INTEGER_CST
)
1887 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
1890 result
= boolean_false_node
;
1891 else if (!integer_zerop (result
))
1895 result
= fold_build2 (code2
, boolean_type_node
,
1897 else if (!same_bool_comparison_p (result
,
1901 else if (TREE_CODE (arg
) == SSA_NAME
1902 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
1905 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
1906 /* In simple cases we can look through PHI nodes,
1907 but we have to be careful with loops.
1909 if (! dom_info_available_p (CDI_DOMINATORS
)
1910 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
1911 || dominated_by_p (CDI_DOMINATORS
,
1912 gimple_bb (def_stmt
),
1915 temp
= and_var_with_comparison (arg
, invert
, code2
,
1921 else if (!same_bool_result_p (result
, temp
))
1937 /* Try to simplify the AND of two comparisons, specified by
1938 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1939 If this can be simplified to a single expression (without requiring
1940 introducing more SSA variables to hold intermediate values),
1941 return the resulting tree. Otherwise return NULL_TREE.
1942 If the result expression is non-null, it has boolean type. */
1945 maybe_fold_and_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
1946 enum tree_code code2
, tree op2a
, tree op2b
)
1948 tree t
= and_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
1952 return and_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
1955 /* Helper function for or_comparisons_1: try to simplify the OR of the
1956 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1957 If INVERT is true, invert the value of VAR before doing the OR.
1958 Return NULL_EXPR if we can't simplify this to a single expression. */
1961 or_var_with_comparison (tree var
, bool invert
,
1962 enum tree_code code2
, tree op2a
, tree op2b
)
1965 gimple stmt
= SSA_NAME_DEF_STMT (var
);
1967 /* We can only deal with variables whose definitions are assignments. */
1968 if (!is_gimple_assign (stmt
))
1971 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1972 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
1973 Then we only have to consider the simpler non-inverted cases. */
1975 t
= and_var_with_comparison_1 (stmt
,
1976 invert_tree_comparison (code2
, false),
1979 t
= or_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
1980 return canonicalize_bool (t
, invert
);
1983 /* Try to simplify the OR of the ssa variable defined by the assignment
1984 STMT with the comparison specified by (OP2A CODE2 OP2B).
1985 Return NULL_EXPR if we can't simplify this to a single expression. */
1988 or_var_with_comparison_1 (gimple stmt
,
1989 enum tree_code code2
, tree op2a
, tree op2b
)
1991 tree var
= gimple_assign_lhs (stmt
);
1992 tree true_test_var
= NULL_TREE
;
1993 tree false_test_var
= NULL_TREE
;
1994 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
1996 /* Check for identities like (var OR (var != 0)) => true . */
1997 if (TREE_CODE (op2a
) == SSA_NAME
1998 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
2000 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
2001 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
2003 true_test_var
= op2a
;
2004 if (var
== true_test_var
)
2007 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
2008 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
2010 false_test_var
= op2a
;
2011 if (var
== false_test_var
)
2012 return boolean_true_node
;
2016 /* If the definition is a comparison, recurse on it. */
2017 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
2019 tree t
= or_comparisons_1 (innercode
,
2020 gimple_assign_rhs1 (stmt
),
2021 gimple_assign_rhs2 (stmt
),
2029 /* If the definition is an AND or OR expression, we may be able to
2030 simplify by reassociating. */
2031 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
2032 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
2034 tree inner1
= gimple_assign_rhs1 (stmt
);
2035 tree inner2
= gimple_assign_rhs2 (stmt
);
2038 tree partial
= NULL_TREE
;
2039 bool is_or
= (innercode
== BIT_IOR_EXPR
);
2041 /* Check for boolean identities that don't require recursive examination
2043 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2044 inner1 OR (inner1 AND inner2) => inner1
2045 !inner1 OR (inner1 OR inner2) => true
2046 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2048 if (inner1
== true_test_var
)
2049 return (is_or
? var
: inner1
);
2050 else if (inner2
== true_test_var
)
2051 return (is_or
? var
: inner2
);
2052 else if (inner1
== false_test_var
)
2055 : or_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
2056 else if (inner2
== false_test_var
)
2059 : or_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
2061 /* Next, redistribute/reassociate the OR across the inner tests.
2062 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2063 if (TREE_CODE (inner1
) == SSA_NAME
2064 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
2065 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2066 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
2067 gimple_assign_rhs1 (s
),
2068 gimple_assign_rhs2 (s
),
2069 code2
, op2a
, op2b
)))
2071 /* Handle the OR case, where we are reassociating:
2072 (inner1 OR inner2) OR (op2a code2 op2b)
2074 If the partial result t is a constant, we win. Otherwise
2075 continue on to try reassociating with the other inner test. */
2078 if (integer_onep (t
))
2079 return boolean_true_node
;
2080 else if (integer_zerop (t
))
2084 /* Handle the AND case, where we are redistributing:
2085 (inner1 AND inner2) OR (op2a code2 op2b)
2086 => (t AND (inner2 OR (op2a code op2b))) */
2087 else if (integer_zerop (t
))
2088 return boolean_false_node
;
2090 /* Save partial result for later. */
2094 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2095 if (TREE_CODE (inner2
) == SSA_NAME
2096 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
2097 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2098 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
2099 gimple_assign_rhs1 (s
),
2100 gimple_assign_rhs2 (s
),
2101 code2
, op2a
, op2b
)))
2103 /* Handle the OR case, where we are reassociating:
2104 (inner1 OR inner2) OR (op2a code2 op2b)
2106 => (t OR partial) */
2109 if (integer_zerop (t
))
2111 else if (integer_onep (t
))
2112 return boolean_true_node
;
2113 /* If both are the same, we can apply the identity
2115 else if (partial
&& same_bool_result_p (t
, partial
))
2119 /* Handle the AND case, where we are redistributing:
2120 (inner1 AND inner2) OR (op2a code2 op2b)
2121 => (t AND (inner1 OR (op2a code2 op2b)))
2122 => (t AND partial) */
2125 if (integer_zerop (t
))
2126 return boolean_false_node
;
2129 /* We already got a simplification for the other
2130 operand to the redistributed AND expression. The
2131 interesting case is when at least one is true.
2132 Or, if both are the same, we can apply the identity
2134 if (integer_onep (partial
))
2136 else if (integer_onep (t
))
2138 else if (same_bool_result_p (t
, partial
))
2147 /* Try to simplify the OR of two comparisons defined by
2148 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2149 If this can be done without constructing an intermediate value,
2150 return the resulting tree; otherwise NULL_TREE is returned.
2151 This function is deliberately asymmetric as it recurses on SSA_DEFs
2152 in the first comparison but not the second. */
2155 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
2156 enum tree_code code2
, tree op2a
, tree op2b
)
2158 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2159 if (operand_equal_p (op1a
, op2a
, 0)
2160 && operand_equal_p (op1b
, op2b
, 0))
2162 /* Result will be either NULL_TREE, or a combined comparison. */
2163 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2164 TRUTH_ORIF_EXPR
, code1
, code2
,
2165 boolean_type_node
, op1a
, op1b
);
2170 /* Likewise the swapped case of the above. */
2171 if (operand_equal_p (op1a
, op2b
, 0)
2172 && operand_equal_p (op1b
, op2a
, 0))
2174 /* Result will be either NULL_TREE, or a combined comparison. */
2175 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2176 TRUTH_ORIF_EXPR
, code1
,
2177 swap_tree_comparison (code2
),
2178 boolean_type_node
, op1a
, op1b
);
2183 /* If both comparisons are of the same value against constants, we might
2184 be able to merge them. */
2185 if (operand_equal_p (op1a
, op2a
, 0)
2186 && TREE_CODE (op1b
) == INTEGER_CST
2187 && TREE_CODE (op2b
) == INTEGER_CST
)
2189 int cmp
= tree_int_cst_compare (op1b
, op2b
);
2191 /* If we have (op1a != op1b), we should either be able to
2192 return that or TRUE, depending on whether the constant op1b
2193 also satisfies the other comparison against op2b. */
2194 if (code1
== NE_EXPR
)
2200 case EQ_EXPR
: val
= (cmp
== 0); break;
2201 case NE_EXPR
: val
= (cmp
!= 0); break;
2202 case LT_EXPR
: val
= (cmp
< 0); break;
2203 case GT_EXPR
: val
= (cmp
> 0); break;
2204 case LE_EXPR
: val
= (cmp
<= 0); break;
2205 case GE_EXPR
: val
= (cmp
>= 0); break;
2206 default: done
= false;
2211 return boolean_true_node
;
2213 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2216 /* Likewise if the second comparison is a != comparison. */
2217 else if (code2
== NE_EXPR
)
2223 case EQ_EXPR
: val
= (cmp
== 0); break;
2224 case NE_EXPR
: val
= (cmp
!= 0); break;
2225 case LT_EXPR
: val
= (cmp
> 0); break;
2226 case GT_EXPR
: val
= (cmp
< 0); break;
2227 case LE_EXPR
: val
= (cmp
>= 0); break;
2228 case GE_EXPR
: val
= (cmp
<= 0); break;
2229 default: done
= false;
2234 return boolean_true_node
;
2236 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2240 /* See if an equality test is redundant with the other comparison. */
2241 else if (code1
== EQ_EXPR
)
2246 case EQ_EXPR
: val
= (cmp
== 0); break;
2247 case NE_EXPR
: val
= (cmp
!= 0); break;
2248 case LT_EXPR
: val
= (cmp
< 0); break;
2249 case GT_EXPR
: val
= (cmp
> 0); break;
2250 case LE_EXPR
: val
= (cmp
<= 0); break;
2251 case GE_EXPR
: val
= (cmp
>= 0); break;
2256 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2258 else if (code2
== EQ_EXPR
)
2263 case EQ_EXPR
: val
= (cmp
== 0); break;
2264 case NE_EXPR
: val
= (cmp
!= 0); break;
2265 case LT_EXPR
: val
= (cmp
> 0); break;
2266 case GT_EXPR
: val
= (cmp
< 0); break;
2267 case LE_EXPR
: val
= (cmp
>= 0); break;
2268 case GE_EXPR
: val
= (cmp
<= 0); break;
2273 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2276 /* Chose the less restrictive of two < or <= comparisons. */
2277 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
2278 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2280 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
2281 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2283 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2286 /* Likewise chose the less restrictive of two > or >= comparisons. */
2287 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
2288 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2290 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
2291 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2293 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2296 /* Check for singleton ranges. */
2298 && ((code1
== LT_EXPR
&& code2
== GT_EXPR
)
2299 || (code1
== GT_EXPR
&& code2
== LT_EXPR
)))
2300 return fold_build2 (NE_EXPR
, boolean_type_node
, op1a
, op2b
);
2302 /* Check for less/greater pairs that don't restrict the range at all. */
2304 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
2305 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2306 return boolean_true_node
;
2308 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
2309 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2310 return boolean_true_node
;
2313 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2314 NAME's definition is a truth value. See if there are any simplifications
2315 that can be done against the NAME's definition. */
2316 if (TREE_CODE (op1a
) == SSA_NAME
2317 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
2318 && (integer_zerop (op1b
) || integer_onep (op1b
)))
2320 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
2321 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
2322 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
2323 switch (gimple_code (stmt
))
2326 /* Try to simplify by copy-propagating the definition. */
2327 return or_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
2330 /* If every argument to the PHI produces the same result when
2331 ORed with the second comparison, we win.
2332 Do not do this unless the type is bool since we need a bool
2333 result here anyway. */
2334 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
2336 tree result
= NULL_TREE
;
2338 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
2340 tree arg
= gimple_phi_arg_def (stmt
, i
);
2342 /* If this PHI has itself as an argument, ignore it.
2343 If all the other args produce the same result,
2345 if (arg
== gimple_phi_result (stmt
))
2347 else if (TREE_CODE (arg
) == INTEGER_CST
)
2349 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
2352 result
= boolean_true_node
;
2353 else if (!integer_onep (result
))
2357 result
= fold_build2 (code2
, boolean_type_node
,
2359 else if (!same_bool_comparison_p (result
,
2363 else if (TREE_CODE (arg
) == SSA_NAME
2364 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
2367 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
2368 /* In simple cases we can look through PHI nodes,
2369 but we have to be careful with loops.
2371 if (! dom_info_available_p (CDI_DOMINATORS
)
2372 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
2373 || dominated_by_p (CDI_DOMINATORS
,
2374 gimple_bb (def_stmt
),
2377 temp
= or_var_with_comparison (arg
, invert
, code2
,
2383 else if (!same_bool_result_p (result
, temp
))
2399 /* Try to simplify the OR of two comparisons, specified by
2400 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2401 If this can be simplified to a single expression (without requiring
2402 introducing more SSA variables to hold intermediate values),
2403 return the resulting tree. Otherwise return NULL_TREE.
2404 If the result expression is non-null, it has boolean type. */
2407 maybe_fold_or_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
2408 enum tree_code code2
, tree op2a
, tree op2b
)
2410 tree t
= or_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
2414 return or_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
2418 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2420 Either NULL_TREE, a simplified but non-constant or a constant
2423 ??? This should go into a gimple-fold-inline.h file to be eventually
2424 privatized with the single valueize function used in the various TUs
2425 to avoid the indirect function call overhead. */
2428 gimple_fold_stmt_to_constant_1 (gimple stmt
, tree (*valueize
) (tree
))
2430 location_t loc
= gimple_location (stmt
);
2431 switch (gimple_code (stmt
))
2435 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
2437 switch (get_gimple_rhs_class (subcode
))
2439 case GIMPLE_SINGLE_RHS
:
2441 tree rhs
= gimple_assign_rhs1 (stmt
);
2442 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
2444 if (TREE_CODE (rhs
) == SSA_NAME
)
2446 /* If the RHS is an SSA_NAME, return its known constant value,
2448 return (*valueize
) (rhs
);
2450 /* Handle propagating invariant addresses into address
2452 else if (TREE_CODE (rhs
) == ADDR_EXPR
2453 && !is_gimple_min_invariant (rhs
))
2455 HOST_WIDE_INT offset
= 0;
2457 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
2461 && (CONSTANT_CLASS_P (base
)
2462 || decl_address_invariant_p (base
)))
2463 return build_invariant_address (TREE_TYPE (rhs
),
2466 else if (TREE_CODE (rhs
) == CONSTRUCTOR
2467 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
2468 && (CONSTRUCTOR_NELTS (rhs
)
2469 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
2474 vec
= XALLOCAVEC (tree
,
2475 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
)));
2476 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
2478 val
= (*valueize
) (val
);
2479 if (TREE_CODE (val
) == INTEGER_CST
2480 || TREE_CODE (val
) == REAL_CST
2481 || TREE_CODE (val
) == FIXED_CST
)
2487 return build_vector (TREE_TYPE (rhs
), vec
);
2490 if (kind
== tcc_reference
)
2492 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
2493 || TREE_CODE (rhs
) == REALPART_EXPR
2494 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
2495 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2497 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2498 return fold_unary_loc (EXPR_LOCATION (rhs
),
2500 TREE_TYPE (rhs
), val
);
2502 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
2503 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2505 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2506 return fold_ternary_loc (EXPR_LOCATION (rhs
),
2508 TREE_TYPE (rhs
), val
,
2509 TREE_OPERAND (rhs
, 1),
2510 TREE_OPERAND (rhs
, 2));
2512 else if (TREE_CODE (rhs
) == MEM_REF
2513 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2515 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2516 if (TREE_CODE (val
) == ADDR_EXPR
2517 && is_gimple_min_invariant (val
))
2519 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
2521 TREE_OPERAND (rhs
, 1));
2526 return fold_const_aggregate_ref_1 (rhs
, valueize
);
2528 else if (kind
== tcc_declaration
)
2529 return get_symbol_constant_value (rhs
);
2533 case GIMPLE_UNARY_RHS
:
2535 /* Handle unary operators that can appear in GIMPLE form.
2536 Note that we know the single operand must be a constant,
2537 so this should almost always return a simplified RHS. */
2538 tree lhs
= gimple_assign_lhs (stmt
);
2539 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2541 /* Conversions are useless for CCP purposes if they are
2542 value-preserving. Thus the restrictions that
2543 useless_type_conversion_p places for restrict qualification
2544 of pointer types should not apply here.
2545 Substitution later will only substitute to allowed places. */
2546 if (CONVERT_EXPR_CODE_P (subcode
)
2547 && POINTER_TYPE_P (TREE_TYPE (lhs
))
2548 && POINTER_TYPE_P (TREE_TYPE (op0
))
2549 && TYPE_ADDR_SPACE (TREE_TYPE (lhs
))
2550 == TYPE_ADDR_SPACE (TREE_TYPE (op0
))
2551 && TYPE_MODE (TREE_TYPE (lhs
))
2552 == TYPE_MODE (TREE_TYPE (op0
)))
2556 fold_unary_ignore_overflow_loc (loc
, subcode
,
2557 gimple_expr_type (stmt
), op0
);
2560 case GIMPLE_BINARY_RHS
:
2562 /* Handle binary operators that can appear in GIMPLE form. */
2563 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2564 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
2566 /* Translate &x + CST into an invariant form suitable for
2567 further propagation. */
2568 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
2569 && TREE_CODE (op0
) == ADDR_EXPR
2570 && TREE_CODE (op1
) == INTEGER_CST
)
2572 tree off
= fold_convert (ptr_type_node
, op1
);
2573 return build_fold_addr_expr_loc
2575 fold_build2 (MEM_REF
,
2576 TREE_TYPE (TREE_TYPE (op0
)),
2577 unshare_expr (op0
), off
));
2580 return fold_binary_loc (loc
, subcode
,
2581 gimple_expr_type (stmt
), op0
, op1
);
2584 case GIMPLE_TERNARY_RHS
:
2586 /* Handle ternary operators that can appear in GIMPLE form. */
2587 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2588 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
2589 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
2591 /* Fold embedded expressions in ternary codes. */
2592 if ((subcode
== COND_EXPR
2593 || subcode
== VEC_COND_EXPR
)
2594 && COMPARISON_CLASS_P (op0
))
2596 tree op00
= (*valueize
) (TREE_OPERAND (op0
, 0));
2597 tree op01
= (*valueize
) (TREE_OPERAND (op0
, 1));
2598 tree tem
= fold_binary_loc (loc
, TREE_CODE (op0
),
2599 TREE_TYPE (op0
), op00
, op01
);
2604 return fold_ternary_loc (loc
, subcode
,
2605 gimple_expr_type (stmt
), op0
, op1
, op2
);
2617 if (gimple_call_internal_p (stmt
))
2618 /* No folding yet for these functions. */
2621 fn
= (*valueize
) (gimple_call_fn (stmt
));
2622 if (TREE_CODE (fn
) == ADDR_EXPR
2623 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
2624 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0)))
2626 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
2629 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
2630 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
2631 call
= build_call_array_loc (loc
,
2632 gimple_call_return_type (stmt
),
2633 fn
, gimple_call_num_args (stmt
), args
);
2634 retval
= fold_call_expr (EXPR_LOCATION (call
), call
, false);
2636 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2637 STRIP_NOPS (retval
);
2648 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2649 Returns NULL_TREE if folding to a constant is not possible, otherwise
2650 returns a constant according to is_gimple_min_invariant. */
2653 gimple_fold_stmt_to_constant (gimple stmt
, tree (*valueize
) (tree
))
2655 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
2656 if (res
&& is_gimple_min_invariant (res
))
2662 /* The following set of functions are supposed to fold references using
2663 their constant initializers. */
2665 static tree
fold_ctor_reference (tree type
, tree ctor
,
2666 unsigned HOST_WIDE_INT offset
,
2667 unsigned HOST_WIDE_INT size
, tree
);
2669 /* See if we can find constructor defining value of BASE.
2670 When we know the consructor with constant offset (such as
2671 base is array[40] and we do know constructor of array), then
2672 BIT_OFFSET is adjusted accordingly.
2674 As a special case, return error_mark_node when constructor
2675 is not explicitly available, but it is known to be zero
2676 such as 'static const int a;'. */
2678 get_base_constructor (tree base
, HOST_WIDE_INT
*bit_offset
,
2679 tree (*valueize
)(tree
))
2681 HOST_WIDE_INT bit_offset2
, size
, max_size
;
2682 if (TREE_CODE (base
) == MEM_REF
)
2684 if (!integer_zerop (TREE_OPERAND (base
, 1)))
2686 if (!host_integerp (TREE_OPERAND (base
, 1), 0))
2688 *bit_offset
+= (mem_ref_offset (base
).low
2693 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
2694 base
= valueize (TREE_OPERAND (base
, 0));
2695 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
2697 base
= TREE_OPERAND (base
, 0);
2700 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2701 DECL_INITIAL. If BASE is a nested reference into another
2702 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2703 the inner reference. */
2704 switch (TREE_CODE (base
))
2707 if (!const_value_known_p (base
))
2712 if (!DECL_INITIAL (base
)
2713 && (TREE_STATIC (base
) || DECL_EXTERNAL (base
)))
2714 return error_mark_node
;
2715 /* Do not return an error_mark_node DECL_INITIAL. LTO uses this
2716 as special marker (_not_ zero ...) for its own purposes. */
2717 if (DECL_INITIAL (base
) == error_mark_node
)
2719 return DECL_INITIAL (base
);
2723 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
);
2724 if (max_size
== -1 || size
!= max_size
)
2726 *bit_offset
+= bit_offset2
;
2727 return get_base_constructor (base
, bit_offset
, valueize
);
2738 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2739 to the memory at bit OFFSET.
2741 We do only simple job of folding byte accesses. */
2744 fold_string_cst_ctor_reference (tree type
, tree ctor
,
2745 unsigned HOST_WIDE_INT offset
,
2746 unsigned HOST_WIDE_INT size
)
2748 if (INTEGRAL_TYPE_P (type
)
2749 && (TYPE_MODE (type
)
2750 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
2751 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
2753 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
)))) == 1
2754 && size
== BITS_PER_UNIT
2755 && !(offset
% BITS_PER_UNIT
))
2757 offset
/= BITS_PER_UNIT
;
2758 if (offset
< (unsigned HOST_WIDE_INT
) TREE_STRING_LENGTH (ctor
))
2759 return build_int_cst_type (type
, (TREE_STRING_POINTER (ctor
)
2762 const char a[20]="hello";
2765 might lead to offset greater than string length. In this case we
2766 know value is either initialized to 0 or out of bounds. Return 0
2768 return build_zero_cst (type
);
2773 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2774 SIZE to the memory at bit OFFSET. */
2777 fold_array_ctor_reference (tree type
, tree ctor
,
2778 unsigned HOST_WIDE_INT offset
,
2779 unsigned HOST_WIDE_INT size
,
2782 unsigned HOST_WIDE_INT cnt
;
2784 double_int low_bound
, elt_size
;
2785 double_int index
, max_index
;
2786 double_int access_index
;
2787 tree domain_type
= NULL_TREE
, index_type
= NULL_TREE
;
2788 HOST_WIDE_INT inner_offset
;
2790 /* Compute low bound and elt size. */
2791 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
2792 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
2793 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
2795 /* Static constructors for variably sized objects makes no sense. */
2796 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
);
2797 index_type
= TREE_TYPE (TYPE_MIN_VALUE (domain_type
));
2798 low_bound
= tree_to_double_int (TYPE_MIN_VALUE (domain_type
));
2801 low_bound
= double_int_zero
;
2802 /* Static constructors for variably sized objects makes no sense. */
2803 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))))
2806 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
2809 /* We can handle only constantly sized accesses that are known to not
2810 be larger than size of array element. */
2811 if (!TYPE_SIZE_UNIT (type
)
2812 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
2813 || double_int_cmp (elt_size
,
2814 tree_to_double_int (TYPE_SIZE_UNIT (type
)), 0) < 0)
2817 /* Compute the array index we look for. */
2818 access_index
= double_int_udiv (uhwi_to_double_int (offset
/ BITS_PER_UNIT
),
2819 elt_size
, TRUNC_DIV_EXPR
);
2820 access_index
= double_int_add (access_index
, low_bound
);
2822 access_index
= double_int_ext (access_index
,
2823 TYPE_PRECISION (index_type
),
2824 TYPE_UNSIGNED (index_type
));
2826 /* And offset within the access. */
2827 inner_offset
= offset
% (double_int_to_uhwi (elt_size
) * BITS_PER_UNIT
);
2829 /* See if the array field is large enough to span whole access. We do not
2830 care to fold accesses spanning multiple array indexes. */
2831 if (inner_offset
+ size
> double_int_to_uhwi (elt_size
) * BITS_PER_UNIT
)
2834 index
= double_int_sub (low_bound
, double_int_one
);
2836 index
= double_int_ext (index
,
2837 TYPE_PRECISION (index_type
),
2838 TYPE_UNSIGNED (index_type
));
2840 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
2842 /* Array constructor might explicitely set index, or specify range
2843 or leave index NULL meaning that it is next index after previous
2847 if (TREE_CODE (cfield
) == INTEGER_CST
)
2848 max_index
= index
= tree_to_double_int (cfield
);
2851 gcc_assert (TREE_CODE (cfield
) == RANGE_EXPR
);
2852 index
= tree_to_double_int (TREE_OPERAND (cfield
, 0));
2853 max_index
= tree_to_double_int (TREE_OPERAND (cfield
, 1));
2858 index
= double_int_add (index
, double_int_one
);
2860 index
= double_int_ext (index
,
2861 TYPE_PRECISION (index_type
),
2862 TYPE_UNSIGNED (index_type
));
2866 /* Do we have match? */
2867 if (double_int_cmp (access_index
, index
, 1) >= 0
2868 && double_int_cmp (access_index
, max_index
, 1) <= 0)
2869 return fold_ctor_reference (type
, cval
, inner_offset
, size
,
2872 /* When memory is not explicitely mentioned in constructor,
2873 it is 0 (or out of range). */
2874 return build_zero_cst (type
);
2877 /* CTOR is CONSTRUCTOR of an aggregate or vector.
2878 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2881 fold_nonarray_ctor_reference (tree type
, tree ctor
,
2882 unsigned HOST_WIDE_INT offset
,
2883 unsigned HOST_WIDE_INT size
,
2886 unsigned HOST_WIDE_INT cnt
;
2889 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
2892 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
2893 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
2894 tree field_size
= DECL_SIZE (cfield
);
2895 double_int bitoffset
;
2896 double_int byte_offset_cst
= tree_to_double_int (byte_offset
);
2897 double_int bits_per_unit_cst
= uhwi_to_double_int (BITS_PER_UNIT
);
2898 double_int bitoffset_end
, access_end
;
2900 /* Variable sized objects in static constructors makes no sense,
2901 but field_size can be NULL for flexible array members. */
2902 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
2903 && TREE_CODE (byte_offset
) == INTEGER_CST
2904 && (field_size
!= NULL_TREE
2905 ? TREE_CODE (field_size
) == INTEGER_CST
2906 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
2908 /* Compute bit offset of the field. */
2909 bitoffset
= double_int_add (tree_to_double_int (field_offset
),
2910 double_int_mul (byte_offset_cst
,
2911 bits_per_unit_cst
));
2912 /* Compute bit offset where the field ends. */
2913 if (field_size
!= NULL_TREE
)
2914 bitoffset_end
= double_int_add (bitoffset
,
2915 tree_to_double_int (field_size
));
2917 bitoffset_end
= double_int_zero
;
2919 access_end
= double_int_add (uhwi_to_double_int (offset
),
2920 uhwi_to_double_int (size
));
2922 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
2923 [BITOFFSET, BITOFFSET_END)? */
2924 if (double_int_cmp (access_end
, bitoffset
, 0) > 0
2925 && (field_size
== NULL_TREE
2926 || double_int_cmp (uhwi_to_double_int (offset
),
2927 bitoffset_end
, 0) < 0))
2929 double_int inner_offset
= double_int_sub (uhwi_to_double_int (offset
),
2931 /* We do have overlap. Now see if field is large enough to
2932 cover the access. Give up for accesses spanning multiple
2934 if (double_int_cmp (access_end
, bitoffset_end
, 0) > 0)
2936 if (double_int_cmp (uhwi_to_double_int (offset
), bitoffset
, 0) < 0)
2938 return fold_ctor_reference (type
, cval
,
2939 double_int_to_uhwi (inner_offset
), size
,
2943 /* When memory is not explicitely mentioned in constructor, it is 0. */
2944 return build_zero_cst (type
);
2947 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
2948 to the memory at bit OFFSET. */
2951 fold_ctor_reference (tree type
, tree ctor
, unsigned HOST_WIDE_INT offset
,
2952 unsigned HOST_WIDE_INT size
, tree from_decl
)
2956 /* We found the field with exact match. */
2957 if (useless_type_conversion_p (type
, TREE_TYPE (ctor
))
2959 return canonicalize_constructor_val (ctor
, from_decl
);
2961 /* We are at the end of walk, see if we can view convert the
2963 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
2964 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
2965 && operand_equal_p (TYPE_SIZE (type
),
2966 TYPE_SIZE (TREE_TYPE (ctor
)), 0))
2968 ret
= canonicalize_constructor_val (ctor
, from_decl
);
2969 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
2974 if (TREE_CODE (ctor
) == STRING_CST
)
2975 return fold_string_cst_ctor_reference (type
, ctor
, offset
, size
);
2976 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
2979 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
2980 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
2981 return fold_array_ctor_reference (type
, ctor
, offset
, size
,
2984 return fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
2991 /* Return the tree representing the element referenced by T if T is an
2992 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
2993 names using VALUEIZE. Return NULL_TREE otherwise. */
2996 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
2998 tree ctor
, idx
, base
;
2999 HOST_WIDE_INT offset
, size
, max_size
;
3002 if (TREE_THIS_VOLATILE (t
))
3005 if (TREE_CODE_CLASS (TREE_CODE (t
)) == tcc_declaration
)
3006 return get_symbol_constant_value (t
);
3008 tem
= fold_read_from_constant_string (t
);
3012 switch (TREE_CODE (t
))
3015 case ARRAY_RANGE_REF
:
3016 /* Constant indexes are handled well by get_base_constructor.
3017 Only special case variable offsets.
3018 FIXME: This code can't handle nested references with variable indexes
3019 (they will be handled only by iteration of ccp). Perhaps we can bring
3020 get_ref_base_and_extent here and make it use a valueize callback. */
3021 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
3023 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
3024 && TREE_CODE (idx
) == INTEGER_CST
)
3026 tree low_bound
, unit_size
;
3029 /* If the resulting bit-offset is constant, track it. */
3030 if ((low_bound
= array_ref_low_bound (t
),
3031 TREE_CODE (low_bound
) == INTEGER_CST
)
3032 && (unit_size
= array_ref_element_size (t
),
3033 host_integerp (unit_size
, 1))
3034 && (doffset
= double_int_sext
3035 (double_int_sub (TREE_INT_CST (idx
),
3036 TREE_INT_CST (low_bound
)),
3037 TYPE_PRECISION (TREE_TYPE (idx
))),
3038 double_int_fits_in_shwi_p (doffset
)))
3040 offset
= double_int_to_shwi (doffset
);
3041 offset
*= TREE_INT_CST_LOW (unit_size
);
3042 offset
*= BITS_PER_UNIT
;
3044 base
= TREE_OPERAND (t
, 0);
3045 ctor
= get_base_constructor (base
, &offset
, valueize
);
3046 /* Empty constructor. Always fold to 0. */
3047 if (ctor
== error_mark_node
)
3048 return build_zero_cst (TREE_TYPE (t
));
3049 /* Out of bound array access. Value is undefined,
3053 /* We can not determine ctor. */
3056 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
3057 TREE_INT_CST_LOW (unit_size
)
3066 case TARGET_MEM_REF
:
3068 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
);
3069 ctor
= get_base_constructor (base
, &offset
, valueize
);
3071 /* Empty constructor. Always fold to 0. */
3072 if (ctor
== error_mark_node
)
3073 return build_zero_cst (TREE_TYPE (t
));
3074 /* We do not know precise address. */
3075 if (max_size
== -1 || max_size
!= size
)
3077 /* We can not determine ctor. */
3081 /* Out of bound array access. Value is undefined, but don't fold. */
3085 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
,
3091 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
3092 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
3093 return fold_build1_loc (EXPR_LOCATION (t
),
3094 TREE_CODE (t
), TREE_TYPE (t
), c
);
3106 fold_const_aggregate_ref (tree t
)
3108 return fold_const_aggregate_ref_1 (t
, NULL
);
3111 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3112 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3113 KNOWN_BINFO carries the binfo describing the true type of
3114 OBJ_TYPE_REF_OBJECT(REF). */
3117 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
)
3119 unsigned HOST_WIDE_INT offset
, size
;
3122 vtable
= v
= BINFO_VTABLE (known_binfo
);
3123 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3127 if (TREE_CODE (v
) == POINTER_PLUS_EXPR
)
3129 offset
= tree_low_cst (TREE_OPERAND (v
, 1), 1) * BITS_PER_UNIT
;
3130 v
= TREE_OPERAND (v
, 0);
3135 if (TREE_CODE (v
) != ADDR_EXPR
)
3137 v
= TREE_OPERAND (v
, 0);
3139 if (TREE_CODE (v
) != VAR_DECL
3140 || !DECL_VIRTUAL_P (v
)
3141 || !DECL_INITIAL (v
)
3142 || DECL_INITIAL (v
) == error_mark_node
)
3144 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
3145 size
= tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))), 1);
3146 offset
+= token
* size
;
3147 fn
= fold_ctor_reference (TREE_TYPE (TREE_TYPE (v
)), DECL_INITIAL (v
),
3148 offset
, size
, vtable
);
3149 if (!fn
|| integer_zerop (fn
))
3151 gcc_assert (TREE_CODE (fn
) == ADDR_EXPR
3152 || TREE_CODE (fn
) == FDESC_EXPR
);
3153 fn
= TREE_OPERAND (fn
, 0);
3154 gcc_assert (TREE_CODE (fn
) == FUNCTION_DECL
);
3156 /* When cgraph node is missing and function is not public, we cannot
3157 devirtualize. This can happen in WHOPR when the actual method
3158 ends up in other partition, because we found devirtualization
3159 possibility too late. */
3160 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
3163 /* Make sure we create a cgraph node for functions we'll reference.
3164 They can be non-existent if the reference comes from an entry
3165 of an external vtable for example. */
3166 cgraph_get_create_node (fn
);
3171 /* Return true iff VAL is a gimple expression that is known to be
3172 non-negative. Restricted to floating-point inputs. */
3175 gimple_val_nonnegative_real_p (tree val
)
3179 gcc_assert (val
&& SCALAR_FLOAT_TYPE_P (TREE_TYPE (val
)));
3181 /* Use existing logic for non-gimple trees. */
3182 if (tree_expr_nonnegative_p (val
))
3185 if (TREE_CODE (val
) != SSA_NAME
)
3188 /* Currently we look only at the immediately defining statement
3189 to make this determination, since recursion on defining
3190 statements of operands can lead to quadratic behavior in the
3191 worst case. This is expected to catch almost all occurrences
3192 in practice. It would be possible to implement limited-depth
3193 recursion if important cases are lost. Alternatively, passes
3194 that need this information (such as the pow/powi lowering code
3195 in the cse_sincos pass) could be revised to provide it through
3196 dataflow propagation. */
3198 def_stmt
= SSA_NAME_DEF_STMT (val
);
3200 if (is_gimple_assign (def_stmt
))
3204 /* See fold-const.c:tree_expr_nonnegative_p for additional
3205 cases that could be handled with recursion. */
3207 switch (gimple_assign_rhs_code (def_stmt
))
3210 /* Always true for floating-point operands. */
3214 /* True if the two operands are identical (since we are
3215 restricted to floating-point inputs). */
3216 op0
= gimple_assign_rhs1 (def_stmt
);
3217 op1
= gimple_assign_rhs2 (def_stmt
);
3220 || operand_equal_p (op0
, op1
, 0))
3227 else if (is_gimple_call (def_stmt
))
3229 tree fndecl
= gimple_call_fndecl (def_stmt
);
3231 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
3235 switch (DECL_FUNCTION_CODE (fndecl
))
3237 CASE_FLT_FN (BUILT_IN_ACOS
):
3238 CASE_FLT_FN (BUILT_IN_ACOSH
):
3239 CASE_FLT_FN (BUILT_IN_CABS
):
3240 CASE_FLT_FN (BUILT_IN_COSH
):
3241 CASE_FLT_FN (BUILT_IN_ERFC
):
3242 CASE_FLT_FN (BUILT_IN_EXP
):
3243 CASE_FLT_FN (BUILT_IN_EXP10
):
3244 CASE_FLT_FN (BUILT_IN_EXP2
):
3245 CASE_FLT_FN (BUILT_IN_FABS
):
3246 CASE_FLT_FN (BUILT_IN_FDIM
):
3247 CASE_FLT_FN (BUILT_IN_HYPOT
):
3248 CASE_FLT_FN (BUILT_IN_POW10
):
3251 CASE_FLT_FN (BUILT_IN_SQRT
):
3252 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3253 nonnegative inputs. */
3254 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val
))))
3259 CASE_FLT_FN (BUILT_IN_POWI
):
3260 /* True if the second argument is an even integer. */
3261 arg1
= gimple_call_arg (def_stmt
, 1);
3263 if (TREE_CODE (arg1
) == INTEGER_CST
3264 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
3269 CASE_FLT_FN (BUILT_IN_POW
):
3270 /* True if the second argument is an even integer-valued
3272 arg1
= gimple_call_arg (def_stmt
, 1);
3274 if (TREE_CODE (arg1
) == REAL_CST
)
3279 c
= TREE_REAL_CST (arg1
);
3280 n
= real_to_integer (&c
);
3284 REAL_VALUE_TYPE cint
;
3285 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
3286 if (real_identical (&c
, &cint
))