1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010, 2011, 2012 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
29 #include "tree-flow.h"
30 #include "tree-ssa-propagate.h"
32 #include "gimple-fold.h"
34 /* Return true when DECL can be referenced from current unit.
35 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
36 We can get declarations that are not possible to reference for various
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
50 3) COMDAT functions referred by external vtables that
51 we devirtualize only during final copmilation stage.
52 At this time we already decided that we will not output
53 the function body and thus we can't reference the symbol
57 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
59 struct varpool_node
*vnode
;
60 struct cgraph_node
*node
;
63 /* We will later output the initializer, so we can refer to it.
64 So we are concerned only when DECL comes from initializer of
67 || TREE_CODE (from_decl
) != VAR_DECL
68 || !DECL_EXTERNAL (from_decl
)
70 && symtab_get_node (from_decl
)->symbol
.in_other_partition
))
72 /* We are concerned only about static/external vars and functions. */
73 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
74 || (TREE_CODE (decl
) != VAR_DECL
&& TREE_CODE (decl
) != FUNCTION_DECL
))
76 /* Weakrefs have somewhat confusing DECL_EXTERNAL flag set; they
78 if (DECL_EXTERNAL (decl
)
79 && lookup_attribute ("weakref", DECL_ATTRIBUTES (decl
)))
81 /* We are folding reference from external vtable. The vtable may reffer
82 to a symbol keyed to other compilation unit. The other compilation
83 unit may be in separate DSO and the symbol may be hidden. */
84 if (DECL_VISIBILITY_SPECIFIED (decl
)
85 && DECL_EXTERNAL (decl
)
86 && (!(snode
= symtab_get_node (decl
)) || !snode
->symbol
.in_other_partition
))
88 /* When function is public, we always can introduce new reference.
89 Exception are the COMDAT functions where introducing a direct
90 reference imply need to include function body in the curren tunit. */
91 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
93 /* We are not at ltrans stage; so don't worry about WHOPR.
94 Also when still gimplifying all referred comdat functions will be
97 As observed in PR20991 for already optimized out comdat virtual functions
98 it may be tempting to not necessarily give up because the copy will be
99 output elsewhere when corresponding vtable is output.
100 This is however not possible - ABI specify that COMDATs are output in
101 units where they are used and when the other unit was compiled with LTO
102 it is possible that vtable was kept public while the function itself
104 if (!flag_ltrans
&& (!DECL_COMDAT (decl
) || !cgraph_function_flags_ready
))
107 /* OK we are seeing either COMDAT or static variable. In this case we must
108 check that the definition is still around so we can refer it. */
109 if (TREE_CODE (decl
) == FUNCTION_DECL
)
111 node
= cgraph_get_node (decl
);
112 /* Check that we still have function body and that we didn't took
113 the decision to eliminate offline copy of the function yet.
114 The second is important when devirtualization happens during final
115 compilation stage when making a new reference no longer makes callee
117 if (!node
|| !node
->analyzed
|| node
->global
.inlined_to
)
119 gcc_checking_assert (!TREE_ASM_WRITTEN (decl
));
123 else if (TREE_CODE (decl
) == VAR_DECL
)
125 vnode
= varpool_get_node (decl
);
126 if (!vnode
|| !vnode
->analyzed
)
128 gcc_checking_assert (!TREE_ASM_WRITTEN (decl
));
135 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
136 acceptable form for is_gimple_min_invariant.
137 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
140 canonicalize_constructor_val (tree cval
, tree from_decl
)
142 STRIP_USELESS_TYPE_CONVERSION (cval
);
143 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
144 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
146 tree ptr
= TREE_OPERAND (cval
, 0);
147 if (is_gimple_min_invariant (ptr
))
148 cval
= build1_loc (EXPR_LOCATION (cval
),
149 ADDR_EXPR
, TREE_TYPE (ptr
),
150 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
152 fold_convert (ptr_type_node
,
153 TREE_OPERAND (cval
, 1))));
155 if (TREE_CODE (cval
) == ADDR_EXPR
)
157 tree base
= get_base_address (TREE_OPERAND (cval
, 0));
158 if (!base
&& TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
160 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
162 TREE_OPERAND (cval
, 0) = base
;
167 if ((TREE_CODE (base
) == VAR_DECL
168 || TREE_CODE (base
) == FUNCTION_DECL
)
169 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
171 if (TREE_CODE (base
) == VAR_DECL
)
172 TREE_ADDRESSABLE (base
) = 1;
173 else if (TREE_CODE (base
) == FUNCTION_DECL
)
175 /* Make sure we create a cgraph node for functions we'll reference.
176 They can be non-existent if the reference comes from an entry
177 of an external vtable for example. */
178 cgraph_get_create_node (base
);
180 /* Fixup types in global initializers. */
181 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
182 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
187 /* If SYM is a constant variable with known value, return the value.
188 NULL_TREE is returned otherwise. */
191 get_symbol_constant_value (tree sym
)
193 if (const_value_known_p (sym
))
195 tree val
= DECL_INITIAL (sym
);
198 val
= canonicalize_constructor_val (val
, sym
);
199 if (val
&& is_gimple_min_invariant (val
))
204 /* Variables declared 'const' without an initializer
205 have zero as the initializer if they may not be
206 overridden at link or run time. */
208 && (INTEGRAL_TYPE_P (TREE_TYPE (sym
))
209 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym
))))
210 return build_zero_cst (TREE_TYPE (sym
));
218 /* Subroutine of fold_stmt. We perform several simplifications of the
219 memory reference tree EXPR and make sure to re-gimplify them properly
220 after propagation of constant addresses. IS_LHS is true if the
221 reference is supposed to be an lvalue. */
224 maybe_fold_reference (tree expr
, bool is_lhs
)
229 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
230 || TREE_CODE (expr
) == REALPART_EXPR
231 || TREE_CODE (expr
) == IMAGPART_EXPR
)
232 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
233 return fold_unary_loc (EXPR_LOCATION (expr
),
236 TREE_OPERAND (expr
, 0));
237 else if (TREE_CODE (expr
) == BIT_FIELD_REF
238 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
239 return fold_ternary_loc (EXPR_LOCATION (expr
),
242 TREE_OPERAND (expr
, 0),
243 TREE_OPERAND (expr
, 1),
244 TREE_OPERAND (expr
, 2));
246 while (handled_component_p (*t
))
247 t
= &TREE_OPERAND (*t
, 0);
249 /* Canonicalize MEM_REFs invariant address operand. Do this first
250 to avoid feeding non-canonical MEM_REFs elsewhere. */
251 if (TREE_CODE (*t
) == MEM_REF
252 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)))
254 bool volatile_p
= TREE_THIS_VOLATILE (*t
);
255 tree tem
= fold_binary (MEM_REF
, TREE_TYPE (*t
),
256 TREE_OPERAND (*t
, 0),
257 TREE_OPERAND (*t
, 1));
260 TREE_THIS_VOLATILE (tem
) = volatile_p
;
262 tem
= maybe_fold_reference (expr
, is_lhs
);
270 && (result
= fold_const_aggregate_ref (expr
))
271 && is_gimple_min_invariant (result
))
274 /* Fold back MEM_REFs to reference trees. */
275 if (TREE_CODE (*t
) == MEM_REF
276 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
277 && integer_zerop (TREE_OPERAND (*t
, 1))
278 && (TREE_THIS_VOLATILE (*t
)
279 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0)))
280 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t
, 1)))
281 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t
))
282 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t
, 1)))))
283 /* We have to look out here to not drop a required conversion
284 from the rhs to the lhs if is_lhs, but we don't have the
285 rhs here to verify that. Thus require strict type
287 && types_compatible_p (TREE_TYPE (*t
),
288 TREE_TYPE (TREE_OPERAND
289 (TREE_OPERAND (*t
, 0), 0))))
292 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
293 tem
= maybe_fold_reference (expr
, is_lhs
);
298 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
300 tree tem
= maybe_fold_tmr (*t
);
304 tem
= maybe_fold_reference (expr
, is_lhs
);
315 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
316 replacement rhs for the statement or NULL_TREE if no simplification
317 could be made. It is assumed that the operands have been previously
321 fold_gimple_assign (gimple_stmt_iterator
*si
)
323 gimple stmt
= gsi_stmt (*si
);
324 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
325 location_t loc
= gimple_location (stmt
);
327 tree result
= NULL_TREE
;
329 switch (get_gimple_rhs_class (subcode
))
331 case GIMPLE_SINGLE_RHS
:
333 tree rhs
= gimple_assign_rhs1 (stmt
);
335 if (REFERENCE_CLASS_P (rhs
))
336 return maybe_fold_reference (rhs
, false);
338 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
340 tree ref
= TREE_OPERAND (rhs
, 0);
341 tree tem
= maybe_fold_reference (ref
, true);
343 && TREE_CODE (tem
) == MEM_REF
344 && integer_zerop (TREE_OPERAND (tem
, 1)))
345 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
347 result
= fold_convert (TREE_TYPE (rhs
),
348 build_fold_addr_expr_loc (loc
, tem
));
349 else if (TREE_CODE (ref
) == MEM_REF
350 && integer_zerop (TREE_OPERAND (ref
, 1)))
351 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
354 else if (TREE_CODE (rhs
) == CONSTRUCTOR
355 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
356 && (CONSTRUCTOR_NELTS (rhs
)
357 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
359 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
363 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
364 if (TREE_CODE (val
) != INTEGER_CST
365 && TREE_CODE (val
) != REAL_CST
366 && TREE_CODE (val
) != FIXED_CST
)
369 return build_vector_from_ctor (TREE_TYPE (rhs
),
370 CONSTRUCTOR_ELTS (rhs
));
373 else if (DECL_P (rhs
))
374 return unshare_expr (get_symbol_constant_value (rhs
));
376 /* If we couldn't fold the RHS, hand over to the generic
378 if (result
== NULL_TREE
)
381 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
382 that may have been added by fold, and "useless" type
383 conversions that might now be apparent due to propagation. */
384 STRIP_USELESS_TYPE_CONVERSION (result
);
386 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
393 case GIMPLE_UNARY_RHS
:
395 tree rhs
= gimple_assign_rhs1 (stmt
);
397 result
= fold_unary_loc (loc
, subcode
, gimple_expr_type (stmt
), rhs
);
400 /* If the operation was a conversion do _not_ mark a
401 resulting constant with TREE_OVERFLOW if the original
402 constant was not. These conversions have implementation
403 defined behavior and retaining the TREE_OVERFLOW flag
404 here would confuse later passes such as VRP. */
405 if (CONVERT_EXPR_CODE_P (subcode
)
406 && TREE_CODE (result
) == INTEGER_CST
407 && TREE_CODE (rhs
) == INTEGER_CST
)
408 TREE_OVERFLOW (result
) = TREE_OVERFLOW (rhs
);
410 STRIP_USELESS_TYPE_CONVERSION (result
);
411 if (valid_gimple_rhs_p (result
))
417 case GIMPLE_BINARY_RHS
:
418 /* Try to canonicalize for boolean-typed X the comparisons
419 X == 0, X == 1, X != 0, and X != 1. */
420 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
421 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
423 tree lhs
= gimple_assign_lhs (stmt
);
424 tree op1
= gimple_assign_rhs1 (stmt
);
425 tree op2
= gimple_assign_rhs2 (stmt
);
426 tree type
= TREE_TYPE (op1
);
428 /* Check whether the comparison operands are of the same boolean
429 type as the result type is.
430 Check that second operand is an integer-constant with value
432 if (TREE_CODE (op2
) == INTEGER_CST
433 && (integer_zerop (op2
) || integer_onep (op2
))
434 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
436 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
437 bool is_logical_not
= false;
439 /* X == 0 and X != 1 is a logical-not.of X
440 X == 1 and X != 0 is X */
441 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
442 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
443 is_logical_not
= true;
445 if (is_logical_not
== false)
447 /* Only for one-bit precision typed X the transformation
448 !X -> ~X is valied. */
449 else if (TYPE_PRECISION (type
) == 1)
450 result
= build1_loc (gimple_location (stmt
), BIT_NOT_EXPR
,
452 /* Otherwise we use !X -> X ^ 1. */
454 result
= build2_loc (gimple_location (stmt
), BIT_XOR_EXPR
,
455 type
, op1
, build_int_cst (type
, 1));
461 result
= fold_binary_loc (loc
, subcode
,
462 TREE_TYPE (gimple_assign_lhs (stmt
)),
463 gimple_assign_rhs1 (stmt
),
464 gimple_assign_rhs2 (stmt
));
468 STRIP_USELESS_TYPE_CONVERSION (result
);
469 if (valid_gimple_rhs_p (result
))
474 case GIMPLE_TERNARY_RHS
:
475 /* Try to fold a conditional expression. */
476 if (gimple_assign_rhs_code (stmt
) == COND_EXPR
)
478 tree op0
= gimple_assign_rhs1 (stmt
);
481 location_t cond_loc
= gimple_location (stmt
);
483 if (COMPARISON_CLASS_P (op0
))
485 fold_defer_overflow_warnings ();
486 tem
= fold_binary_loc (cond_loc
,
487 TREE_CODE (op0
), TREE_TYPE (op0
),
488 TREE_OPERAND (op0
, 0),
489 TREE_OPERAND (op0
, 1));
490 /* This is actually a conditional expression, not a GIMPLE
491 conditional statement, however, the valid_gimple_rhs_p
492 test still applies. */
493 set
= (tem
&& is_gimple_condexpr (tem
)
494 && valid_gimple_rhs_p (tem
));
495 fold_undefer_overflow_warnings (set
, stmt
, 0);
497 else if (is_gimple_min_invariant (op0
))
506 result
= fold_build3_loc (cond_loc
, COND_EXPR
,
507 TREE_TYPE (gimple_assign_lhs (stmt
)), tem
,
508 gimple_assign_rhs2 (stmt
),
509 gimple_assign_rhs3 (stmt
));
513 result
= fold_ternary_loc (loc
, subcode
,
514 TREE_TYPE (gimple_assign_lhs (stmt
)),
515 gimple_assign_rhs1 (stmt
),
516 gimple_assign_rhs2 (stmt
),
517 gimple_assign_rhs3 (stmt
));
521 STRIP_USELESS_TYPE_CONVERSION (result
);
522 if (valid_gimple_rhs_p (result
))
527 case GIMPLE_INVALID_RHS
:
534 /* Attempt to fold a conditional statement. Return true if any changes were
535 made. We only attempt to fold the condition expression, and do not perform
536 any transformation that would require alteration of the cfg. It is
537 assumed that the operands have been previously folded. */
540 fold_gimple_cond (gimple stmt
)
542 tree result
= fold_binary_loc (gimple_location (stmt
),
543 gimple_cond_code (stmt
),
545 gimple_cond_lhs (stmt
),
546 gimple_cond_rhs (stmt
));
550 STRIP_USELESS_TYPE_CONVERSION (result
);
551 if (is_gimple_condexpr (result
) && valid_gimple_rhs_p (result
))
553 gimple_cond_set_condition_from_tree (stmt
, result
);
561 /* Convert EXPR into a GIMPLE value suitable for substitution on the
562 RHS of an assignment. Insert the necessary statements before
563 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
564 is replaced. If the call is expected to produces a result, then it
565 is replaced by an assignment of the new RHS to the result variable.
566 If the result is to be ignored, then the call is replaced by a
567 GIMPLE_NOP. A proper VDEF chain is retained by making the first
568 VUSE and the last VDEF of the whole sequence be the same as the replaced
569 statement and using new SSA names for stores in between. */
572 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
575 gimple stmt
, new_stmt
;
576 gimple_stmt_iterator i
;
577 gimple_seq stmts
= NULL
;
578 struct gimplify_ctx gctx
;
582 stmt
= gsi_stmt (*si_p
);
584 gcc_assert (is_gimple_call (stmt
));
586 push_gimplify_context (&gctx
);
587 gctx
.into_ssa
= gimple_in_ssa_p (cfun
);
589 lhs
= gimple_call_lhs (stmt
);
590 if (lhs
== NULL_TREE
)
592 gimplify_and_add (expr
, &stmts
);
593 /* We can end up with folding a memcpy of an empty class assignment
594 which gets optimized away by C++ gimplification. */
595 if (gimple_seq_empty_p (stmts
))
597 pop_gimplify_context (NULL
);
598 if (gimple_in_ssa_p (cfun
))
600 unlink_stmt_vdef (stmt
);
603 gsi_remove (si_p
, true);
609 tree tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
610 new_stmt
= gimple_build_assign (lhs
, tmp
);
611 i
= gsi_last (stmts
);
612 gsi_insert_after_without_update (&i
, new_stmt
,
613 GSI_CONTINUE_LINKING
);
616 pop_gimplify_context (NULL
);
618 if (gimple_has_location (stmt
))
619 annotate_all_with_location (stmts
, gimple_location (stmt
));
621 /* First iterate over the replacement statements backward, assigning
622 virtual operands to their defining statements. */
624 for (i
= gsi_last (stmts
); !gsi_end_p (i
); gsi_prev (&i
))
626 new_stmt
= gsi_stmt (i
);
627 if ((gimple_assign_single_p (new_stmt
)
628 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
629 || (is_gimple_call (new_stmt
)
630 && (gimple_call_flags (new_stmt
)
631 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
635 vdef
= gimple_vdef (stmt
);
637 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
638 gimple_set_vdef (new_stmt
, vdef
);
639 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
640 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
641 laststore
= new_stmt
;
645 /* Second iterate over the statements forward, assigning virtual
646 operands to their uses. */
647 reaching_vuse
= gimple_vuse (stmt
);
648 for (i
= gsi_start (stmts
); !gsi_end_p (i
); gsi_next (&i
))
650 new_stmt
= gsi_stmt (i
);
651 /* If the new statement possibly has a VUSE, update it with exact SSA
652 name we know will reach this one. */
653 if (gimple_has_mem_ops (new_stmt
))
654 gimple_set_vuse (new_stmt
, reaching_vuse
);
655 gimple_set_modified (new_stmt
, true);
656 if (gimple_vdef (new_stmt
))
657 reaching_vuse
= gimple_vdef (new_stmt
);
660 /* If the new sequence does not do a store release the virtual
661 definition of the original statement. */
663 && reaching_vuse
== gimple_vuse (stmt
))
665 tree vdef
= gimple_vdef (stmt
);
667 && TREE_CODE (vdef
) == SSA_NAME
)
669 unlink_stmt_vdef (stmt
);
670 release_ssa_name (vdef
);
674 /* Finally replace the original statement with the sequence. */
675 gsi_replace_with_seq (si_p
, stmts
, false);
678 /* Return the string length, maximum string length or maximum value of
680 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
681 is not NULL and, for TYPE == 0, its value is not equal to the length
682 we determine or if we are unable to determine the length or value,
683 return false. VISITED is a bitmap of visited variables.
684 TYPE is 0 if string length should be returned, 1 for maximum string
685 length and 2 for maximum value ARG can have. */
688 get_maxval_strlen (tree arg
, tree
*length
, bitmap visited
, int type
)
693 if (TREE_CODE (arg
) != SSA_NAME
)
695 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
696 if (TREE_CODE (arg
) == ADDR_EXPR
697 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
698 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
700 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
701 if (TREE_CODE (aop0
) == INDIRECT_REF
702 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
703 return get_maxval_strlen (TREE_OPERAND (aop0
, 0),
704 length
, visited
, type
);
710 if (TREE_CODE (val
) != INTEGER_CST
711 || tree_int_cst_sgn (val
) < 0)
715 val
= c_strlen (arg
, 1);
723 if (TREE_CODE (*length
) != INTEGER_CST
724 || TREE_CODE (val
) != INTEGER_CST
)
727 if (tree_int_cst_lt (*length
, val
))
731 else if (simple_cst_equal (val
, *length
) != 1)
739 /* If ARG is registered for SSA update we cannot look at its defining
741 if (name_registered_for_update_p (arg
))
744 /* If we were already here, break the infinite cycle. */
745 if (!bitmap_set_bit (visited
, SSA_NAME_VERSION (arg
)))
749 def_stmt
= SSA_NAME_DEF_STMT (var
);
751 switch (gimple_code (def_stmt
))
754 /* The RHS of the statement defining VAR must either have a
755 constant length or come from another SSA_NAME with a constant
757 if (gimple_assign_single_p (def_stmt
)
758 || gimple_assign_unary_nop_p (def_stmt
))
760 tree rhs
= gimple_assign_rhs1 (def_stmt
);
761 return get_maxval_strlen (rhs
, length
, visited
, type
);
763 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
765 tree op2
= gimple_assign_rhs2 (def_stmt
);
766 tree op3
= gimple_assign_rhs3 (def_stmt
);
767 return get_maxval_strlen (op2
, length
, visited
, type
)
768 && get_maxval_strlen (op3
, length
, visited
, type
);
774 /* All the arguments of the PHI node must have the same constant
778 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
780 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
782 /* If this PHI has itself as an argument, we cannot
783 determine the string length of this argument. However,
784 if we can find a constant string length for the other
785 PHI args then we can still be sure that this is a
786 constant string length. So be optimistic and just
787 continue with the next argument. */
788 if (arg
== gimple_phi_result (def_stmt
))
791 if (!get_maxval_strlen (arg
, length
, visited
, type
))
803 /* Fold builtin call in statement STMT. Returns a simplified tree.
804 We may return a non-constant expression, including another call
805 to a different function and with different arguments, e.g.,
806 substituting memcpy for strcpy when the string length is known.
807 Note that some builtins expand into inline code that may not
808 be valid in GIMPLE. Callers must take care. */
811 gimple_fold_builtin (gimple stmt
)
819 location_t loc
= gimple_location (stmt
);
821 gcc_assert (is_gimple_call (stmt
));
823 ignore
= (gimple_call_lhs (stmt
) == NULL
);
825 /* First try the generic builtin folder. If that succeeds, return the
827 result
= fold_call_stmt (stmt
, ignore
);
835 /* Ignore MD builtins. */
836 callee
= gimple_call_fndecl (stmt
);
837 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_MD
)
840 /* Give up for always_inline inline builtins until they are
842 if (avoid_folding_inline_builtin (callee
))
845 /* If the builtin could not be folded, and it has no argument list,
847 nargs
= gimple_call_num_args (stmt
);
851 /* Limit the work only for builtins we know how to simplify. */
852 switch (DECL_FUNCTION_CODE (callee
))
854 case BUILT_IN_STRLEN
:
856 case BUILT_IN_FPUTS_UNLOCKED
:
860 case BUILT_IN_STRCPY
:
861 case BUILT_IN_STRNCPY
:
865 case BUILT_IN_MEMCPY_CHK
:
866 case BUILT_IN_MEMPCPY_CHK
:
867 case BUILT_IN_MEMMOVE_CHK
:
868 case BUILT_IN_MEMSET_CHK
:
869 case BUILT_IN_STRNCPY_CHK
:
870 case BUILT_IN_STPNCPY_CHK
:
874 case BUILT_IN_STRCPY_CHK
:
875 case BUILT_IN_STPCPY_CHK
:
879 case BUILT_IN_SNPRINTF_CHK
:
880 case BUILT_IN_VSNPRINTF_CHK
:
888 if (arg_idx
>= nargs
)
891 /* Try to use the dataflow information gathered by the CCP process. */
892 visited
= BITMAP_ALLOC (NULL
);
893 bitmap_clear (visited
);
895 memset (val
, 0, sizeof (val
));
896 a
= gimple_call_arg (stmt
, arg_idx
);
897 if (!get_maxval_strlen (a
, &val
[arg_idx
], visited
, type
))
898 val
[arg_idx
] = NULL_TREE
;
900 BITMAP_FREE (visited
);
903 switch (DECL_FUNCTION_CODE (callee
))
905 case BUILT_IN_STRLEN
:
906 if (val
[0] && nargs
== 1)
909 fold_convert (TREE_TYPE (gimple_call_lhs (stmt
)), val
[0]);
911 /* If the result is not a valid gimple value, or not a cast
912 of a valid gimple value, then we cannot use the result. */
913 if (is_gimple_val (new_val
)
914 || (CONVERT_EXPR_P (new_val
)
915 && is_gimple_val (TREE_OPERAND (new_val
, 0))))
920 case BUILT_IN_STRCPY
:
921 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 2)
922 result
= fold_builtin_strcpy (loc
, callee
,
923 gimple_call_arg (stmt
, 0),
924 gimple_call_arg (stmt
, 1),
928 case BUILT_IN_STRNCPY
:
929 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
930 result
= fold_builtin_strncpy (loc
, callee
,
931 gimple_call_arg (stmt
, 0),
932 gimple_call_arg (stmt
, 1),
933 gimple_call_arg (stmt
, 2),
939 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
940 gimple_call_arg (stmt
, 1),
941 ignore
, false, val
[0]);
944 case BUILT_IN_FPUTS_UNLOCKED
:
946 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
947 gimple_call_arg (stmt
, 1),
948 ignore
, true, val
[0]);
951 case BUILT_IN_MEMCPY_CHK
:
952 case BUILT_IN_MEMPCPY_CHK
:
953 case BUILT_IN_MEMMOVE_CHK
:
954 case BUILT_IN_MEMSET_CHK
:
955 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
956 result
= fold_builtin_memory_chk (loc
, callee
,
957 gimple_call_arg (stmt
, 0),
958 gimple_call_arg (stmt
, 1),
959 gimple_call_arg (stmt
, 2),
960 gimple_call_arg (stmt
, 3),
962 DECL_FUNCTION_CODE (callee
));
965 case BUILT_IN_STRCPY_CHK
:
966 case BUILT_IN_STPCPY_CHK
:
967 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
968 result
= fold_builtin_stxcpy_chk (loc
, callee
,
969 gimple_call_arg (stmt
, 0),
970 gimple_call_arg (stmt
, 1),
971 gimple_call_arg (stmt
, 2),
973 DECL_FUNCTION_CODE (callee
));
976 case BUILT_IN_STRNCPY_CHK
:
977 case BUILT_IN_STPNCPY_CHK
:
978 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
979 result
= fold_builtin_stxncpy_chk (loc
, gimple_call_arg (stmt
, 0),
980 gimple_call_arg (stmt
, 1),
981 gimple_call_arg (stmt
, 2),
982 gimple_call_arg (stmt
, 3),
984 DECL_FUNCTION_CODE (callee
));
987 case BUILT_IN_SNPRINTF_CHK
:
988 case BUILT_IN_VSNPRINTF_CHK
:
989 if (val
[1] && is_gimple_val (val
[1]))
990 result
= gimple_fold_builtin_snprintf_chk (stmt
, val
[1],
991 DECL_FUNCTION_CODE (callee
));
998 if (result
&& ignore
)
999 result
= fold_ignored_result (result
);
1004 /* Return a binfo to be used for devirtualization of calls based on an object
1005 represented by a declaration (i.e. a global or automatically allocated one)
1006 or NULL if it cannot be found or is not safe. CST is expected to be an
1007 ADDR_EXPR of such object or the function will return NULL. Currently it is
1008 safe to use such binfo only if it has no base binfo (i.e. no ancestors). */
1011 gimple_extract_devirt_binfo_from_cst (tree cst
)
1013 HOST_WIDE_INT offset
, size
, max_size
;
1014 tree base
, type
, expected_type
, binfo
;
1015 bool last_artificial
= false;
1017 if (!flag_devirtualize
1018 || TREE_CODE (cst
) != ADDR_EXPR
1019 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst
))) != RECORD_TYPE
)
1022 cst
= TREE_OPERAND (cst
, 0);
1023 expected_type
= TREE_TYPE (cst
);
1024 base
= get_ref_base_and_extent (cst
, &offset
, &size
, &max_size
);
1025 type
= TREE_TYPE (base
);
1029 || TREE_CODE (type
) != RECORD_TYPE
)
1032 /* Find the sub-object the constant actually refers to and mark whether it is
1033 an artificial one (as opposed to a user-defined one). */
1036 HOST_WIDE_INT pos
, size
;
1039 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (expected_type
))
1044 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
1046 if (TREE_CODE (fld
) != FIELD_DECL
)
1049 pos
= int_bit_position (fld
);
1050 size
= tree_low_cst (DECL_SIZE (fld
), 1);
1051 if (pos
<= offset
&& (pos
+ size
) > offset
)
1054 if (!fld
|| TREE_CODE (TREE_TYPE (fld
)) != RECORD_TYPE
)
1057 last_artificial
= DECL_ARTIFICIAL (fld
);
1058 type
= TREE_TYPE (fld
);
1061 /* Artificial sub-objects are ancestors, we do not want to use them for
1062 devirtualization, at least not here. */
1063 if (last_artificial
)
1065 binfo
= TYPE_BINFO (type
);
1066 if (!binfo
|| BINFO_N_BASE_BINFOS (binfo
) > 0)
1072 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1073 The statement may be replaced by another statement, e.g., if the call
1074 simplifies to a constant value. Return true if any changes were made.
1075 It is assumed that the operands have been previously folded. */
1078 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
1080 gimple stmt
= gsi_stmt (*gsi
);
1082 bool changed
= false;
1085 /* Fold *& in call arguments. */
1086 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
1087 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
1089 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
1092 gimple_call_set_arg (stmt
, i
, tmp
);
1097 /* Check for virtual calls that became direct calls. */
1098 callee
= gimple_call_fn (stmt
);
1099 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
1101 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
1103 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
1108 tree obj
= OBJ_TYPE_REF_OBJECT (callee
);
1109 tree binfo
= gimple_extract_devirt_binfo_from_cst (obj
);
1113 = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee
));
1114 tree fndecl
= gimple_get_virt_method_for_binfo (token
, binfo
);
1117 gimple_call_set_fndecl (stmt
, fndecl
);
1127 /* Check for builtins that CCP can handle using information not
1128 available in the generic fold routines. */
1129 callee
= gimple_call_fndecl (stmt
);
1130 if (callee
&& DECL_BUILT_IN (callee
))
1132 tree result
= gimple_fold_builtin (stmt
);
1135 if (!update_call_from_tree (gsi
, result
))
1136 gimplify_and_update_call_from_tree (gsi
, result
);
1144 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1145 distinguishes both cases. */
1148 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
)
1150 bool changed
= false;
1151 gimple stmt
= gsi_stmt (*gsi
);
1153 gimple_stmt_iterator gsinext
= *gsi
;
1156 gsi_next (&gsinext
);
1157 next_stmt
= gsi_end_p (gsinext
) ? NULL
: gsi_stmt (gsinext
);
1159 /* Fold the main computation performed by the statement. */
1160 switch (gimple_code (stmt
))
1164 unsigned old_num_ops
= gimple_num_ops (stmt
);
1165 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
1166 tree lhs
= gimple_assign_lhs (stmt
);
1168 /* First canonicalize operand order. This avoids building new
1169 trees if this is the only thing fold would later do. */
1170 if ((commutative_tree_code (subcode
)
1171 || commutative_ternary_tree_code (subcode
))
1172 && tree_swap_operands_p (gimple_assign_rhs1 (stmt
),
1173 gimple_assign_rhs2 (stmt
), false))
1175 tree tem
= gimple_assign_rhs1 (stmt
);
1176 gimple_assign_set_rhs1 (stmt
, gimple_assign_rhs2 (stmt
));
1177 gimple_assign_set_rhs2 (stmt
, tem
);
1180 new_rhs
= fold_gimple_assign (gsi
);
1182 && !useless_type_conversion_p (TREE_TYPE (lhs
),
1183 TREE_TYPE (new_rhs
)))
1184 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
1187 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
1189 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
1196 changed
|= fold_gimple_cond (stmt
);
1200 changed
|= gimple_fold_call (gsi
, inplace
);
1204 /* Fold *& in asm operands. */
1207 const char **oconstraints
;
1208 const char *constraint
;
1209 bool allows_mem
, allows_reg
;
1211 noutputs
= gimple_asm_noutputs (stmt
);
1212 oconstraints
= XALLOCAVEC (const char *, noutputs
);
1214 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
1216 tree link
= gimple_asm_output_op (stmt
, i
);
1217 tree op
= TREE_VALUE (link
);
1219 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1220 if (REFERENCE_CLASS_P (op
)
1221 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
1223 TREE_VALUE (link
) = op
;
1227 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
1229 tree link
= gimple_asm_input_op (stmt
, i
);
1230 tree op
= TREE_VALUE (link
);
1232 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1233 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
1234 oconstraints
, &allows_mem
, &allows_reg
);
1235 if (REFERENCE_CLASS_P (op
)
1236 && (op
= maybe_fold_reference (op
, !allows_reg
&& allows_mem
))
1239 TREE_VALUE (link
) = op
;
1247 if (gimple_debug_bind_p (stmt
))
1249 tree val
= gimple_debug_bind_get_value (stmt
);
1251 && REFERENCE_CLASS_P (val
))
1253 tree tem
= maybe_fold_reference (val
, false);
1256 gimple_debug_bind_set_value (stmt
, tem
);
1261 && TREE_CODE (val
) == ADDR_EXPR
)
1263 tree ref
= TREE_OPERAND (val
, 0);
1264 tree tem
= maybe_fold_reference (ref
, false);
1267 tem
= build_fold_addr_expr_with_type (tem
, TREE_TYPE (val
));
1268 gimple_debug_bind_set_value (stmt
, tem
);
1278 /* If stmt folds into nothing and it was the last stmt in a bb,
1279 don't call gsi_stmt. */
1280 if (gsi_end_p (*gsi
))
1282 gcc_assert (next_stmt
== NULL
);
1286 stmt
= gsi_stmt (*gsi
);
1288 /* Fold *& on the lhs. Don't do this if stmt folded into nothing,
1289 as we'd changing the next stmt. */
1290 if (gimple_has_lhs (stmt
) && stmt
!= next_stmt
)
1292 tree lhs
= gimple_get_lhs (stmt
);
1293 if (lhs
&& REFERENCE_CLASS_P (lhs
))
1295 tree new_lhs
= maybe_fold_reference (lhs
, true);
1298 gimple_set_lhs (stmt
, new_lhs
);
1307 /* Fold the statement pointed to by GSI. In some cases, this function may
1308 replace the whole statement with a new one. Returns true iff folding
1310 The statement pointed to by GSI should be in valid gimple form but may
1311 be in unfolded state as resulting from for example constant propagation
1312 which can produce *&x = 0. */
1315 fold_stmt (gimple_stmt_iterator
*gsi
)
1317 return fold_stmt_1 (gsi
, false);
1320 /* Perform the minimal folding on statement *GSI. Only operations like
1321 *&x created by constant propagation are handled. The statement cannot
1322 be replaced with a new one. Return true if the statement was
1323 changed, false otherwise.
1324 The statement *GSI should be in valid gimple form but may
1325 be in unfolded state as resulting from for example constant propagation
1326 which can produce *&x = 0. */
1329 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
1331 gimple stmt
= gsi_stmt (*gsi
);
1332 bool changed
= fold_stmt_1 (gsi
, true);
1333 gcc_assert (gsi_stmt (*gsi
) == stmt
);
1337 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1338 if EXPR is null or we don't know how.
1339 If non-null, the result always has boolean type. */
1342 canonicalize_bool (tree expr
, bool invert
)
1348 if (integer_nonzerop (expr
))
1349 return boolean_false_node
;
1350 else if (integer_zerop (expr
))
1351 return boolean_true_node
;
1352 else if (TREE_CODE (expr
) == SSA_NAME
)
1353 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
1354 build_int_cst (TREE_TYPE (expr
), 0));
1355 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
1356 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
1358 TREE_OPERAND (expr
, 0),
1359 TREE_OPERAND (expr
, 1));
1365 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
1367 if (integer_nonzerop (expr
))
1368 return boolean_true_node
;
1369 else if (integer_zerop (expr
))
1370 return boolean_false_node
;
1371 else if (TREE_CODE (expr
) == SSA_NAME
)
1372 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
1373 build_int_cst (TREE_TYPE (expr
), 0));
1374 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
1375 return fold_build2 (TREE_CODE (expr
),
1377 TREE_OPERAND (expr
, 0),
1378 TREE_OPERAND (expr
, 1));
1384 /* Check to see if a boolean expression EXPR is logically equivalent to the
1385 comparison (OP1 CODE OP2). Check for various identities involving
1389 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
1390 const_tree op1
, const_tree op2
)
1394 /* The obvious case. */
1395 if (TREE_CODE (expr
) == code
1396 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
1397 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
1400 /* Check for comparing (name, name != 0) and the case where expr
1401 is an SSA_NAME with a definition matching the comparison. */
1402 if (TREE_CODE (expr
) == SSA_NAME
1403 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
1405 if (operand_equal_p (expr
, op1
, 0))
1406 return ((code
== NE_EXPR
&& integer_zerop (op2
))
1407 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
1408 s
= SSA_NAME_DEF_STMT (expr
);
1409 if (is_gimple_assign (s
)
1410 && gimple_assign_rhs_code (s
) == code
1411 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
1412 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
1416 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1417 of name is a comparison, recurse. */
1418 if (TREE_CODE (op1
) == SSA_NAME
1419 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
1421 s
= SSA_NAME_DEF_STMT (op1
);
1422 if (is_gimple_assign (s
)
1423 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
1425 enum tree_code c
= gimple_assign_rhs_code (s
);
1426 if ((c
== NE_EXPR
&& integer_zerop (op2
))
1427 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
1428 return same_bool_comparison_p (expr
, c
,
1429 gimple_assign_rhs1 (s
),
1430 gimple_assign_rhs2 (s
));
1431 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
1432 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
1433 return same_bool_comparison_p (expr
,
1434 invert_tree_comparison (c
, false),
1435 gimple_assign_rhs1 (s
),
1436 gimple_assign_rhs2 (s
));
1442 /* Check to see if two boolean expressions OP1 and OP2 are logically
1446 same_bool_result_p (const_tree op1
, const_tree op2
)
1448 /* Simple cases first. */
1449 if (operand_equal_p (op1
, op2
, 0))
1452 /* Check the cases where at least one of the operands is a comparison.
1453 These are a bit smarter than operand_equal_p in that they apply some
1454 identifies on SSA_NAMEs. */
1455 if (TREE_CODE_CLASS (TREE_CODE (op2
)) == tcc_comparison
1456 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
1457 TREE_OPERAND (op2
, 0),
1458 TREE_OPERAND (op2
, 1)))
1460 if (TREE_CODE_CLASS (TREE_CODE (op1
)) == tcc_comparison
1461 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
1462 TREE_OPERAND (op1
, 0),
1463 TREE_OPERAND (op1
, 1)))
1470 /* Forward declarations for some mutually recursive functions. */
1473 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1474 enum tree_code code2
, tree op2a
, tree op2b
);
1476 and_var_with_comparison (tree var
, bool invert
,
1477 enum tree_code code2
, tree op2a
, tree op2b
);
1479 and_var_with_comparison_1 (gimple stmt
,
1480 enum tree_code code2
, tree op2a
, tree op2b
);
1482 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1483 enum tree_code code2
, tree op2a
, tree op2b
);
1485 or_var_with_comparison (tree var
, bool invert
,
1486 enum tree_code code2
, tree op2a
, tree op2b
);
1488 or_var_with_comparison_1 (gimple stmt
,
1489 enum tree_code code2
, tree op2a
, tree op2b
);
1491 /* Helper function for and_comparisons_1: try to simplify the AND of the
1492 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1493 If INVERT is true, invert the value of the VAR before doing the AND.
1494 Return NULL_EXPR if we can't simplify this to a single expression. */
1497 and_var_with_comparison (tree var
, bool invert
,
1498 enum tree_code code2
, tree op2a
, tree op2b
)
1501 gimple stmt
= SSA_NAME_DEF_STMT (var
);
1503 /* We can only deal with variables whose definitions are assignments. */
1504 if (!is_gimple_assign (stmt
))
1507 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1508 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1509 Then we only have to consider the simpler non-inverted cases. */
1511 t
= or_var_with_comparison_1 (stmt
,
1512 invert_tree_comparison (code2
, false),
1515 t
= and_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
1516 return canonicalize_bool (t
, invert
);
1519 /* Try to simplify the AND of the ssa variable defined by the assignment
1520 STMT with the comparison specified by (OP2A CODE2 OP2B).
1521 Return NULL_EXPR if we can't simplify this to a single expression. */
1524 and_var_with_comparison_1 (gimple stmt
,
1525 enum tree_code code2
, tree op2a
, tree op2b
)
1527 tree var
= gimple_assign_lhs (stmt
);
1528 tree true_test_var
= NULL_TREE
;
1529 tree false_test_var
= NULL_TREE
;
1530 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
1532 /* Check for identities like (var AND (var == 0)) => false. */
1533 if (TREE_CODE (op2a
) == SSA_NAME
1534 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
1536 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
1537 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
1539 true_test_var
= op2a
;
1540 if (var
== true_test_var
)
1543 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
1544 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
1546 false_test_var
= op2a
;
1547 if (var
== false_test_var
)
1548 return boolean_false_node
;
1552 /* If the definition is a comparison, recurse on it. */
1553 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
1555 tree t
= and_comparisons_1 (innercode
,
1556 gimple_assign_rhs1 (stmt
),
1557 gimple_assign_rhs2 (stmt
),
1565 /* If the definition is an AND or OR expression, we may be able to
1566 simplify by reassociating. */
1567 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
1568 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
1570 tree inner1
= gimple_assign_rhs1 (stmt
);
1571 tree inner2
= gimple_assign_rhs2 (stmt
);
1574 tree partial
= NULL_TREE
;
1575 bool is_and
= (innercode
== BIT_AND_EXPR
);
1577 /* Check for boolean identities that don't require recursive examination
1579 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1580 inner1 AND (inner1 OR inner2) => inner1
1581 !inner1 AND (inner1 AND inner2) => false
1582 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1583 Likewise for similar cases involving inner2. */
1584 if (inner1
== true_test_var
)
1585 return (is_and
? var
: inner1
);
1586 else if (inner2
== true_test_var
)
1587 return (is_and
? var
: inner2
);
1588 else if (inner1
== false_test_var
)
1590 ? boolean_false_node
1591 : and_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
1592 else if (inner2
== false_test_var
)
1594 ? boolean_false_node
1595 : and_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
1597 /* Next, redistribute/reassociate the AND across the inner tests.
1598 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1599 if (TREE_CODE (inner1
) == SSA_NAME
1600 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
1601 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
1602 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
1603 gimple_assign_rhs1 (s
),
1604 gimple_assign_rhs2 (s
),
1605 code2
, op2a
, op2b
)))
1607 /* Handle the AND case, where we are reassociating:
1608 (inner1 AND inner2) AND (op2a code2 op2b)
1610 If the partial result t is a constant, we win. Otherwise
1611 continue on to try reassociating with the other inner test. */
1614 if (integer_onep (t
))
1616 else if (integer_zerop (t
))
1617 return boolean_false_node
;
1620 /* Handle the OR case, where we are redistributing:
1621 (inner1 OR inner2) AND (op2a code2 op2b)
1622 => (t OR (inner2 AND (op2a code2 op2b))) */
1623 else if (integer_onep (t
))
1624 return boolean_true_node
;
1626 /* Save partial result for later. */
1630 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1631 if (TREE_CODE (inner2
) == SSA_NAME
1632 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
1633 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
1634 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
1635 gimple_assign_rhs1 (s
),
1636 gimple_assign_rhs2 (s
),
1637 code2
, op2a
, op2b
)))
1639 /* Handle the AND case, where we are reassociating:
1640 (inner1 AND inner2) AND (op2a code2 op2b)
1641 => (inner1 AND t) */
1644 if (integer_onep (t
))
1646 else if (integer_zerop (t
))
1647 return boolean_false_node
;
1648 /* If both are the same, we can apply the identity
1650 else if (partial
&& same_bool_result_p (t
, partial
))
1654 /* Handle the OR case. where we are redistributing:
1655 (inner1 OR inner2) AND (op2a code2 op2b)
1656 => (t OR (inner1 AND (op2a code2 op2b)))
1657 => (t OR partial) */
1660 if (integer_onep (t
))
1661 return boolean_true_node
;
1664 /* We already got a simplification for the other
1665 operand to the redistributed OR expression. The
1666 interesting case is when at least one is false.
1667 Or, if both are the same, we can apply the identity
1669 if (integer_zerop (partial
))
1671 else if (integer_zerop (t
))
1673 else if (same_bool_result_p (t
, partial
))
1682 /* Try to simplify the AND of two comparisons defined by
1683 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1684 If this can be done without constructing an intermediate value,
1685 return the resulting tree; otherwise NULL_TREE is returned.
1686 This function is deliberately asymmetric as it recurses on SSA_DEFs
1687 in the first comparison but not the second. */
1690 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1691 enum tree_code code2
, tree op2a
, tree op2b
)
1693 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1694 if (operand_equal_p (op1a
, op2a
, 0)
1695 && operand_equal_p (op1b
, op2b
, 0))
1697 /* Result will be either NULL_TREE, or a combined comparison. */
1698 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
1699 TRUTH_ANDIF_EXPR
, code1
, code2
,
1700 boolean_type_node
, op1a
, op1b
);
1705 /* Likewise the swapped case of the above. */
1706 if (operand_equal_p (op1a
, op2b
, 0)
1707 && operand_equal_p (op1b
, op2a
, 0))
1709 /* Result will be either NULL_TREE, or a combined comparison. */
1710 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
1711 TRUTH_ANDIF_EXPR
, code1
,
1712 swap_tree_comparison (code2
),
1713 boolean_type_node
, op1a
, op1b
);
1718 /* If both comparisons are of the same value against constants, we might
1719 be able to merge them. */
1720 if (operand_equal_p (op1a
, op2a
, 0)
1721 && TREE_CODE (op1b
) == INTEGER_CST
1722 && TREE_CODE (op2b
) == INTEGER_CST
)
1724 int cmp
= tree_int_cst_compare (op1b
, op2b
);
1726 /* If we have (op1a == op1b), we should either be able to
1727 return that or FALSE, depending on whether the constant op1b
1728 also satisfies the other comparison against op2b. */
1729 if (code1
== EQ_EXPR
)
1735 case EQ_EXPR
: val
= (cmp
== 0); break;
1736 case NE_EXPR
: val
= (cmp
!= 0); break;
1737 case LT_EXPR
: val
= (cmp
< 0); break;
1738 case GT_EXPR
: val
= (cmp
> 0); break;
1739 case LE_EXPR
: val
= (cmp
<= 0); break;
1740 case GE_EXPR
: val
= (cmp
>= 0); break;
1741 default: done
= false;
1746 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1748 return boolean_false_node
;
1751 /* Likewise if the second comparison is an == comparison. */
1752 else if (code2
== EQ_EXPR
)
1758 case EQ_EXPR
: val
= (cmp
== 0); break;
1759 case NE_EXPR
: val
= (cmp
!= 0); break;
1760 case LT_EXPR
: val
= (cmp
> 0); break;
1761 case GT_EXPR
: val
= (cmp
< 0); break;
1762 case LE_EXPR
: val
= (cmp
>= 0); break;
1763 case GE_EXPR
: val
= (cmp
<= 0); break;
1764 default: done
= false;
1769 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1771 return boolean_false_node
;
1775 /* Same business with inequality tests. */
1776 else if (code1
== NE_EXPR
)
1781 case EQ_EXPR
: val
= (cmp
!= 0); break;
1782 case NE_EXPR
: val
= (cmp
== 0); break;
1783 case LT_EXPR
: val
= (cmp
>= 0); break;
1784 case GT_EXPR
: val
= (cmp
<= 0); break;
1785 case LE_EXPR
: val
= (cmp
> 0); break;
1786 case GE_EXPR
: val
= (cmp
< 0); break;
1791 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1793 else if (code2
== NE_EXPR
)
1798 case EQ_EXPR
: val
= (cmp
== 0); break;
1799 case NE_EXPR
: val
= (cmp
!= 0); break;
1800 case LT_EXPR
: val
= (cmp
<= 0); break;
1801 case GT_EXPR
: val
= (cmp
>= 0); break;
1802 case LE_EXPR
: val
= (cmp
< 0); break;
1803 case GE_EXPR
: val
= (cmp
> 0); break;
1808 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1811 /* Chose the more restrictive of two < or <= comparisons. */
1812 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
1813 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
1815 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
1816 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1818 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1821 /* Likewise chose the more restrictive of two > or >= comparisons. */
1822 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
1823 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
1825 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
1826 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1828 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1831 /* Check for singleton ranges. */
1833 && ((code1
== LE_EXPR
&& code2
== GE_EXPR
)
1834 || (code1
== GE_EXPR
&& code2
== LE_EXPR
)))
1835 return fold_build2 (EQ_EXPR
, boolean_type_node
, op1a
, op2b
);
1837 /* Check for disjoint ranges. */
1839 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
1840 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
1841 return boolean_false_node
;
1843 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
1844 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
1845 return boolean_false_node
;
1848 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1849 NAME's definition is a truth value. See if there are any simplifications
1850 that can be done against the NAME's definition. */
1851 if (TREE_CODE (op1a
) == SSA_NAME
1852 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
1853 && (integer_zerop (op1b
) || integer_onep (op1b
)))
1855 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
1856 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
1857 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
1858 switch (gimple_code (stmt
))
1861 /* Try to simplify by copy-propagating the definition. */
1862 return and_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
1865 /* If every argument to the PHI produces the same result when
1866 ANDed with the second comparison, we win.
1867 Do not do this unless the type is bool since we need a bool
1868 result here anyway. */
1869 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
1871 tree result
= NULL_TREE
;
1873 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
1875 tree arg
= gimple_phi_arg_def (stmt
, i
);
1877 /* If this PHI has itself as an argument, ignore it.
1878 If all the other args produce the same result,
1880 if (arg
== gimple_phi_result (stmt
))
1882 else if (TREE_CODE (arg
) == INTEGER_CST
)
1884 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
1887 result
= boolean_false_node
;
1888 else if (!integer_zerop (result
))
1892 result
= fold_build2 (code2
, boolean_type_node
,
1894 else if (!same_bool_comparison_p (result
,
1898 else if (TREE_CODE (arg
) == SSA_NAME
1899 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
1902 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
1903 /* In simple cases we can look through PHI nodes,
1904 but we have to be careful with loops.
1906 if (! dom_info_available_p (CDI_DOMINATORS
)
1907 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
1908 || dominated_by_p (CDI_DOMINATORS
,
1909 gimple_bb (def_stmt
),
1912 temp
= and_var_with_comparison (arg
, invert
, code2
,
1918 else if (!same_bool_result_p (result
, temp
))
1934 /* Try to simplify the AND of two comparisons, specified by
1935 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1936 If this can be simplified to a single expression (without requiring
1937 introducing more SSA variables to hold intermediate values),
1938 return the resulting tree. Otherwise return NULL_TREE.
1939 If the result expression is non-null, it has boolean type. */
1942 maybe_fold_and_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
1943 enum tree_code code2
, tree op2a
, tree op2b
)
1945 tree t
= and_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
1949 return and_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
1952 /* Helper function for or_comparisons_1: try to simplify the OR of the
1953 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1954 If INVERT is true, invert the value of VAR before doing the OR.
1955 Return NULL_EXPR if we can't simplify this to a single expression. */
1958 or_var_with_comparison (tree var
, bool invert
,
1959 enum tree_code code2
, tree op2a
, tree op2b
)
1962 gimple stmt
= SSA_NAME_DEF_STMT (var
);
1964 /* We can only deal with variables whose definitions are assignments. */
1965 if (!is_gimple_assign (stmt
))
1968 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1969 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
1970 Then we only have to consider the simpler non-inverted cases. */
1972 t
= and_var_with_comparison_1 (stmt
,
1973 invert_tree_comparison (code2
, false),
1976 t
= or_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
1977 return canonicalize_bool (t
, invert
);
1980 /* Try to simplify the OR of the ssa variable defined by the assignment
1981 STMT with the comparison specified by (OP2A CODE2 OP2B).
1982 Return NULL_EXPR if we can't simplify this to a single expression. */
1985 or_var_with_comparison_1 (gimple stmt
,
1986 enum tree_code code2
, tree op2a
, tree op2b
)
1988 tree var
= gimple_assign_lhs (stmt
);
1989 tree true_test_var
= NULL_TREE
;
1990 tree false_test_var
= NULL_TREE
;
1991 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
1993 /* Check for identities like (var OR (var != 0)) => true . */
1994 if (TREE_CODE (op2a
) == SSA_NAME
1995 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
1997 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
1998 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
2000 true_test_var
= op2a
;
2001 if (var
== true_test_var
)
2004 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
2005 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
2007 false_test_var
= op2a
;
2008 if (var
== false_test_var
)
2009 return boolean_true_node
;
2013 /* If the definition is a comparison, recurse on it. */
2014 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
2016 tree t
= or_comparisons_1 (innercode
,
2017 gimple_assign_rhs1 (stmt
),
2018 gimple_assign_rhs2 (stmt
),
2026 /* If the definition is an AND or OR expression, we may be able to
2027 simplify by reassociating. */
2028 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
2029 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
2031 tree inner1
= gimple_assign_rhs1 (stmt
);
2032 tree inner2
= gimple_assign_rhs2 (stmt
);
2035 tree partial
= NULL_TREE
;
2036 bool is_or
= (innercode
== BIT_IOR_EXPR
);
2038 /* Check for boolean identities that don't require recursive examination
2040 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2041 inner1 OR (inner1 AND inner2) => inner1
2042 !inner1 OR (inner1 OR inner2) => true
2043 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2045 if (inner1
== true_test_var
)
2046 return (is_or
? var
: inner1
);
2047 else if (inner2
== true_test_var
)
2048 return (is_or
? var
: inner2
);
2049 else if (inner1
== false_test_var
)
2052 : or_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
2053 else if (inner2
== false_test_var
)
2056 : or_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
2058 /* Next, redistribute/reassociate the OR across the inner tests.
2059 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2060 if (TREE_CODE (inner1
) == SSA_NAME
2061 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
2062 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2063 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
2064 gimple_assign_rhs1 (s
),
2065 gimple_assign_rhs2 (s
),
2066 code2
, op2a
, op2b
)))
2068 /* Handle the OR case, where we are reassociating:
2069 (inner1 OR inner2) OR (op2a code2 op2b)
2071 If the partial result t is a constant, we win. Otherwise
2072 continue on to try reassociating with the other inner test. */
2075 if (integer_onep (t
))
2076 return boolean_true_node
;
2077 else if (integer_zerop (t
))
2081 /* Handle the AND case, where we are redistributing:
2082 (inner1 AND inner2) OR (op2a code2 op2b)
2083 => (t AND (inner2 OR (op2a code op2b))) */
2084 else if (integer_zerop (t
))
2085 return boolean_false_node
;
2087 /* Save partial result for later. */
2091 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2092 if (TREE_CODE (inner2
) == SSA_NAME
2093 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
2094 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2095 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
2096 gimple_assign_rhs1 (s
),
2097 gimple_assign_rhs2 (s
),
2098 code2
, op2a
, op2b
)))
2100 /* Handle the OR case, where we are reassociating:
2101 (inner1 OR inner2) OR (op2a code2 op2b)
2103 => (t OR partial) */
2106 if (integer_zerop (t
))
2108 else if (integer_onep (t
))
2109 return boolean_true_node
;
2110 /* If both are the same, we can apply the identity
2112 else if (partial
&& same_bool_result_p (t
, partial
))
2116 /* Handle the AND case, where we are redistributing:
2117 (inner1 AND inner2) OR (op2a code2 op2b)
2118 => (t AND (inner1 OR (op2a code2 op2b)))
2119 => (t AND partial) */
2122 if (integer_zerop (t
))
2123 return boolean_false_node
;
2126 /* We already got a simplification for the other
2127 operand to the redistributed AND expression. The
2128 interesting case is when at least one is true.
2129 Or, if both are the same, we can apply the identity
2131 if (integer_onep (partial
))
2133 else if (integer_onep (t
))
2135 else if (same_bool_result_p (t
, partial
))
2144 /* Try to simplify the OR of two comparisons defined by
2145 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2146 If this can be done without constructing an intermediate value,
2147 return the resulting tree; otherwise NULL_TREE is returned.
2148 This function is deliberately asymmetric as it recurses on SSA_DEFs
2149 in the first comparison but not the second. */
2152 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
2153 enum tree_code code2
, tree op2a
, tree op2b
)
2155 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2156 if (operand_equal_p (op1a
, op2a
, 0)
2157 && operand_equal_p (op1b
, op2b
, 0))
2159 /* Result will be either NULL_TREE, or a combined comparison. */
2160 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2161 TRUTH_ORIF_EXPR
, code1
, code2
,
2162 boolean_type_node
, op1a
, op1b
);
2167 /* Likewise the swapped case of the above. */
2168 if (operand_equal_p (op1a
, op2b
, 0)
2169 && operand_equal_p (op1b
, op2a
, 0))
2171 /* Result will be either NULL_TREE, or a combined comparison. */
2172 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2173 TRUTH_ORIF_EXPR
, code1
,
2174 swap_tree_comparison (code2
),
2175 boolean_type_node
, op1a
, op1b
);
2180 /* If both comparisons are of the same value against constants, we might
2181 be able to merge them. */
2182 if (operand_equal_p (op1a
, op2a
, 0)
2183 && TREE_CODE (op1b
) == INTEGER_CST
2184 && TREE_CODE (op2b
) == INTEGER_CST
)
2186 int cmp
= tree_int_cst_compare (op1b
, op2b
);
2188 /* If we have (op1a != op1b), we should either be able to
2189 return that or TRUE, depending on whether the constant op1b
2190 also satisfies the other comparison against op2b. */
2191 if (code1
== NE_EXPR
)
2197 case EQ_EXPR
: val
= (cmp
== 0); break;
2198 case NE_EXPR
: val
= (cmp
!= 0); break;
2199 case LT_EXPR
: val
= (cmp
< 0); break;
2200 case GT_EXPR
: val
= (cmp
> 0); break;
2201 case LE_EXPR
: val
= (cmp
<= 0); break;
2202 case GE_EXPR
: val
= (cmp
>= 0); break;
2203 default: done
= false;
2208 return boolean_true_node
;
2210 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2213 /* Likewise if the second comparison is a != comparison. */
2214 else if (code2
== NE_EXPR
)
2220 case EQ_EXPR
: val
= (cmp
== 0); break;
2221 case NE_EXPR
: val
= (cmp
!= 0); break;
2222 case LT_EXPR
: val
= (cmp
> 0); break;
2223 case GT_EXPR
: val
= (cmp
< 0); break;
2224 case LE_EXPR
: val
= (cmp
>= 0); break;
2225 case GE_EXPR
: val
= (cmp
<= 0); break;
2226 default: done
= false;
2231 return boolean_true_node
;
2233 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2237 /* See if an equality test is redundant with the other comparison. */
2238 else if (code1
== EQ_EXPR
)
2243 case EQ_EXPR
: val
= (cmp
== 0); break;
2244 case NE_EXPR
: val
= (cmp
!= 0); break;
2245 case LT_EXPR
: val
= (cmp
< 0); break;
2246 case GT_EXPR
: val
= (cmp
> 0); break;
2247 case LE_EXPR
: val
= (cmp
<= 0); break;
2248 case GE_EXPR
: val
= (cmp
>= 0); break;
2253 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2255 else if (code2
== EQ_EXPR
)
2260 case EQ_EXPR
: val
= (cmp
== 0); break;
2261 case NE_EXPR
: val
= (cmp
!= 0); break;
2262 case LT_EXPR
: val
= (cmp
> 0); break;
2263 case GT_EXPR
: val
= (cmp
< 0); break;
2264 case LE_EXPR
: val
= (cmp
>= 0); break;
2265 case GE_EXPR
: val
= (cmp
<= 0); break;
2270 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2273 /* Chose the less restrictive of two < or <= comparisons. */
2274 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
2275 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2277 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
2278 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2280 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2283 /* Likewise chose the less restrictive of two > or >= comparisons. */
2284 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
2285 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2287 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
2288 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2290 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2293 /* Check for singleton ranges. */
2295 && ((code1
== LT_EXPR
&& code2
== GT_EXPR
)
2296 || (code1
== GT_EXPR
&& code2
== LT_EXPR
)))
2297 return fold_build2 (NE_EXPR
, boolean_type_node
, op1a
, op2b
);
2299 /* Check for less/greater pairs that don't restrict the range at all. */
2301 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
2302 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2303 return boolean_true_node
;
2305 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
2306 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2307 return boolean_true_node
;
2310 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2311 NAME's definition is a truth value. See if there are any simplifications
2312 that can be done against the NAME's definition. */
2313 if (TREE_CODE (op1a
) == SSA_NAME
2314 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
2315 && (integer_zerop (op1b
) || integer_onep (op1b
)))
2317 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
2318 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
2319 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
2320 switch (gimple_code (stmt
))
2323 /* Try to simplify by copy-propagating the definition. */
2324 return or_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
2327 /* If every argument to the PHI produces the same result when
2328 ORed with the second comparison, we win.
2329 Do not do this unless the type is bool since we need a bool
2330 result here anyway. */
2331 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
2333 tree result
= NULL_TREE
;
2335 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
2337 tree arg
= gimple_phi_arg_def (stmt
, i
);
2339 /* If this PHI has itself as an argument, ignore it.
2340 If all the other args produce the same result,
2342 if (arg
== gimple_phi_result (stmt
))
2344 else if (TREE_CODE (arg
) == INTEGER_CST
)
2346 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
2349 result
= boolean_true_node
;
2350 else if (!integer_onep (result
))
2354 result
= fold_build2 (code2
, boolean_type_node
,
2356 else if (!same_bool_comparison_p (result
,
2360 else if (TREE_CODE (arg
) == SSA_NAME
2361 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
2364 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
2365 /* In simple cases we can look through PHI nodes,
2366 but we have to be careful with loops.
2368 if (! dom_info_available_p (CDI_DOMINATORS
)
2369 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
2370 || dominated_by_p (CDI_DOMINATORS
,
2371 gimple_bb (def_stmt
),
2374 temp
= or_var_with_comparison (arg
, invert
, code2
,
2380 else if (!same_bool_result_p (result
, temp
))
2396 /* Try to simplify the OR of two comparisons, specified by
2397 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2398 If this can be simplified to a single expression (without requiring
2399 introducing more SSA variables to hold intermediate values),
2400 return the resulting tree. Otherwise return NULL_TREE.
2401 If the result expression is non-null, it has boolean type. */
2404 maybe_fold_or_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
2405 enum tree_code code2
, tree op2a
, tree op2b
)
2407 tree t
= or_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
2411 return or_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
2415 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2417 Either NULL_TREE, a simplified but non-constant or a constant
2420 ??? This should go into a gimple-fold-inline.h file to be eventually
2421 privatized with the single valueize function used in the various TUs
2422 to avoid the indirect function call overhead. */
2425 gimple_fold_stmt_to_constant_1 (gimple stmt
, tree (*valueize
) (tree
))
2427 location_t loc
= gimple_location (stmt
);
2428 switch (gimple_code (stmt
))
2432 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
2434 switch (get_gimple_rhs_class (subcode
))
2436 case GIMPLE_SINGLE_RHS
:
2438 tree rhs
= gimple_assign_rhs1 (stmt
);
2439 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
2441 if (TREE_CODE (rhs
) == SSA_NAME
)
2443 /* If the RHS is an SSA_NAME, return its known constant value,
2445 return (*valueize
) (rhs
);
2447 /* Handle propagating invariant addresses into address
2449 else if (TREE_CODE (rhs
) == ADDR_EXPR
2450 && !is_gimple_min_invariant (rhs
))
2452 HOST_WIDE_INT offset
= 0;
2454 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
2458 && (CONSTANT_CLASS_P (base
)
2459 || decl_address_invariant_p (base
)))
2460 return build_invariant_address (TREE_TYPE (rhs
),
2463 else if (TREE_CODE (rhs
) == CONSTRUCTOR
2464 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
2465 && (CONSTRUCTOR_NELTS (rhs
)
2466 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
2471 vec
= XALLOCAVEC (tree
,
2472 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
)));
2473 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
2475 val
= (*valueize
) (val
);
2476 if (TREE_CODE (val
) == INTEGER_CST
2477 || TREE_CODE (val
) == REAL_CST
2478 || TREE_CODE (val
) == FIXED_CST
)
2484 return build_vector (TREE_TYPE (rhs
), vec
);
2487 if (kind
== tcc_reference
)
2489 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
2490 || TREE_CODE (rhs
) == REALPART_EXPR
2491 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
2492 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2494 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2495 return fold_unary_loc (EXPR_LOCATION (rhs
),
2497 TREE_TYPE (rhs
), val
);
2499 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
2500 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2502 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2503 return fold_ternary_loc (EXPR_LOCATION (rhs
),
2505 TREE_TYPE (rhs
), val
,
2506 TREE_OPERAND (rhs
, 1),
2507 TREE_OPERAND (rhs
, 2));
2509 else if (TREE_CODE (rhs
) == MEM_REF
2510 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2512 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2513 if (TREE_CODE (val
) == ADDR_EXPR
2514 && is_gimple_min_invariant (val
))
2516 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
2518 TREE_OPERAND (rhs
, 1));
2523 return fold_const_aggregate_ref_1 (rhs
, valueize
);
2525 else if (kind
== tcc_declaration
)
2526 return get_symbol_constant_value (rhs
);
2530 case GIMPLE_UNARY_RHS
:
2532 /* Handle unary operators that can appear in GIMPLE form.
2533 Note that we know the single operand must be a constant,
2534 so this should almost always return a simplified RHS. */
2535 tree lhs
= gimple_assign_lhs (stmt
);
2536 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2538 /* Conversions are useless for CCP purposes if they are
2539 value-preserving. Thus the restrictions that
2540 useless_type_conversion_p places for restrict qualification
2541 of pointer types should not apply here.
2542 Substitution later will only substitute to allowed places. */
2543 if (CONVERT_EXPR_CODE_P (subcode
)
2544 && POINTER_TYPE_P (TREE_TYPE (lhs
))
2545 && POINTER_TYPE_P (TREE_TYPE (op0
))
2546 && TYPE_ADDR_SPACE (TREE_TYPE (lhs
))
2547 == TYPE_ADDR_SPACE (TREE_TYPE (op0
))
2548 && TYPE_MODE (TREE_TYPE (lhs
))
2549 == TYPE_MODE (TREE_TYPE (op0
)))
2553 fold_unary_ignore_overflow_loc (loc
, subcode
,
2554 gimple_expr_type (stmt
), op0
);
2557 case GIMPLE_BINARY_RHS
:
2559 /* Handle binary operators that can appear in GIMPLE form. */
2560 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2561 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
2563 /* Translate &x + CST into an invariant form suitable for
2564 further propagation. */
2565 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
2566 && TREE_CODE (op0
) == ADDR_EXPR
2567 && TREE_CODE (op1
) == INTEGER_CST
)
2569 tree off
= fold_convert (ptr_type_node
, op1
);
2570 return build_fold_addr_expr_loc
2572 fold_build2 (MEM_REF
,
2573 TREE_TYPE (TREE_TYPE (op0
)),
2574 unshare_expr (op0
), off
));
2577 return fold_binary_loc (loc
, subcode
,
2578 gimple_expr_type (stmt
), op0
, op1
);
2581 case GIMPLE_TERNARY_RHS
:
2583 /* Handle ternary operators that can appear in GIMPLE form. */
2584 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2585 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
2586 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
2588 /* Fold embedded expressions in ternary codes. */
2589 if ((subcode
== COND_EXPR
2590 || subcode
== VEC_COND_EXPR
)
2591 && COMPARISON_CLASS_P (op0
))
2593 tree op00
= (*valueize
) (TREE_OPERAND (op0
, 0));
2594 tree op01
= (*valueize
) (TREE_OPERAND (op0
, 1));
2595 tree tem
= fold_binary_loc (loc
, TREE_CODE (op0
),
2596 TREE_TYPE (op0
), op00
, op01
);
2601 return fold_ternary_loc (loc
, subcode
,
2602 gimple_expr_type (stmt
), op0
, op1
, op2
);
2614 if (gimple_call_internal_p (stmt
))
2615 /* No folding yet for these functions. */
2618 fn
= (*valueize
) (gimple_call_fn (stmt
));
2619 if (TREE_CODE (fn
) == ADDR_EXPR
2620 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
2621 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0)))
2623 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
2626 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
2627 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
2628 call
= build_call_array_loc (loc
,
2629 gimple_call_return_type (stmt
),
2630 fn
, gimple_call_num_args (stmt
), args
);
2631 retval
= fold_call_expr (EXPR_LOCATION (call
), call
, false);
2633 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2634 STRIP_NOPS (retval
);
2645 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2646 Returns NULL_TREE if folding to a constant is not possible, otherwise
2647 returns a constant according to is_gimple_min_invariant. */
2650 gimple_fold_stmt_to_constant (gimple stmt
, tree (*valueize
) (tree
))
2652 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
2653 if (res
&& is_gimple_min_invariant (res
))
2659 /* The following set of functions are supposed to fold references using
2660 their constant initializers. */
2662 static tree
fold_ctor_reference (tree type
, tree ctor
,
2663 unsigned HOST_WIDE_INT offset
,
2664 unsigned HOST_WIDE_INT size
, tree
);
2666 /* See if we can find constructor defining value of BASE.
2667 When we know the consructor with constant offset (such as
2668 base is array[40] and we do know constructor of array), then
2669 BIT_OFFSET is adjusted accordingly.
2671 As a special case, return error_mark_node when constructor
2672 is not explicitly available, but it is known to be zero
2673 such as 'static const int a;'. */
2675 get_base_constructor (tree base
, HOST_WIDE_INT
*bit_offset
,
2676 tree (*valueize
)(tree
))
2678 HOST_WIDE_INT bit_offset2
, size
, max_size
;
2679 if (TREE_CODE (base
) == MEM_REF
)
2681 if (!integer_zerop (TREE_OPERAND (base
, 1)))
2683 if (!host_integerp (TREE_OPERAND (base
, 1), 0))
2685 *bit_offset
+= (mem_ref_offset (base
).low
2690 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
2691 base
= valueize (TREE_OPERAND (base
, 0));
2692 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
2694 base
= TREE_OPERAND (base
, 0);
2697 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2698 DECL_INITIAL. If BASE is a nested reference into another
2699 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2700 the inner reference. */
2701 switch (TREE_CODE (base
))
2704 if (!const_value_known_p (base
))
2709 if (!DECL_INITIAL (base
)
2710 && (TREE_STATIC (base
) || DECL_EXTERNAL (base
)))
2711 return error_mark_node
;
2712 /* Do not return an error_mark_node DECL_INITIAL. LTO uses this
2713 as special marker (_not_ zero ...) for its own purposes. */
2714 if (DECL_INITIAL (base
) == error_mark_node
)
2716 return DECL_INITIAL (base
);
2720 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
);
2721 if (max_size
== -1 || size
!= max_size
)
2723 *bit_offset
+= bit_offset2
;
2724 return get_base_constructor (base
, bit_offset
, valueize
);
2735 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2736 to the memory at bit OFFSET.
2738 We do only simple job of folding byte accesses. */
2741 fold_string_cst_ctor_reference (tree type
, tree ctor
,
2742 unsigned HOST_WIDE_INT offset
,
2743 unsigned HOST_WIDE_INT size
)
2745 if (INTEGRAL_TYPE_P (type
)
2746 && (TYPE_MODE (type
)
2747 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
2748 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
2750 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
)))) == 1
2751 && size
== BITS_PER_UNIT
2752 && !(offset
% BITS_PER_UNIT
))
2754 offset
/= BITS_PER_UNIT
;
2755 if (offset
< (unsigned HOST_WIDE_INT
) TREE_STRING_LENGTH (ctor
))
2756 return build_int_cst_type (type
, (TREE_STRING_POINTER (ctor
)
2759 const char a[20]="hello";
2762 might lead to offset greater than string length. In this case we
2763 know value is either initialized to 0 or out of bounds. Return 0
2765 return build_zero_cst (type
);
2770 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2771 SIZE to the memory at bit OFFSET. */
2774 fold_array_ctor_reference (tree type
, tree ctor
,
2775 unsigned HOST_WIDE_INT offset
,
2776 unsigned HOST_WIDE_INT size
,
2779 unsigned HOST_WIDE_INT cnt
;
2781 double_int low_bound
, elt_size
;
2782 double_int index
, max_index
;
2783 double_int access_index
;
2784 tree domain_type
= NULL_TREE
, index_type
= NULL_TREE
;
2785 HOST_WIDE_INT inner_offset
;
2787 /* Compute low bound and elt size. */
2788 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
2789 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
2790 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
2792 /* Static constructors for variably sized objects makes no sense. */
2793 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
);
2794 index_type
= TREE_TYPE (TYPE_MIN_VALUE (domain_type
));
2795 low_bound
= tree_to_double_int (TYPE_MIN_VALUE (domain_type
));
2798 low_bound
= double_int_zero
;
2799 /* Static constructors for variably sized objects makes no sense. */
2800 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))))
2803 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
2806 /* We can handle only constantly sized accesses that are known to not
2807 be larger than size of array element. */
2808 if (!TYPE_SIZE_UNIT (type
)
2809 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
2810 || double_int_cmp (elt_size
,
2811 tree_to_double_int (TYPE_SIZE_UNIT (type
)), 0) < 0)
2814 /* Compute the array index we look for. */
2815 access_index
= double_int_udiv (uhwi_to_double_int (offset
/ BITS_PER_UNIT
),
2816 elt_size
, TRUNC_DIV_EXPR
);
2817 access_index
= double_int_add (access_index
, low_bound
);
2819 access_index
= double_int_ext (access_index
,
2820 TYPE_PRECISION (index_type
),
2821 TYPE_UNSIGNED (index_type
));
2823 /* And offset within the access. */
2824 inner_offset
= offset
% (double_int_to_uhwi (elt_size
) * BITS_PER_UNIT
);
2826 /* See if the array field is large enough to span whole access. We do not
2827 care to fold accesses spanning multiple array indexes. */
2828 if (inner_offset
+ size
> double_int_to_uhwi (elt_size
) * BITS_PER_UNIT
)
2831 index
= double_int_sub (low_bound
, double_int_one
);
2833 index
= double_int_ext (index
,
2834 TYPE_PRECISION (index_type
),
2835 TYPE_UNSIGNED (index_type
));
2837 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
2839 /* Array constructor might explicitely set index, or specify range
2840 or leave index NULL meaning that it is next index after previous
2844 if (TREE_CODE (cfield
) == INTEGER_CST
)
2845 max_index
= index
= tree_to_double_int (cfield
);
2848 gcc_assert (TREE_CODE (cfield
) == RANGE_EXPR
);
2849 index
= tree_to_double_int (TREE_OPERAND (cfield
, 0));
2850 max_index
= tree_to_double_int (TREE_OPERAND (cfield
, 1));
2855 index
= double_int_add (index
, double_int_one
);
2857 index
= double_int_ext (index
,
2858 TYPE_PRECISION (index_type
),
2859 TYPE_UNSIGNED (index_type
));
2863 /* Do we have match? */
2864 if (double_int_cmp (access_index
, index
, 1) >= 0
2865 && double_int_cmp (access_index
, max_index
, 1) <= 0)
2866 return fold_ctor_reference (type
, cval
, inner_offset
, size
,
2869 /* When memory is not explicitely mentioned in constructor,
2870 it is 0 (or out of range). */
2871 return build_zero_cst (type
);
2874 /* CTOR is CONSTRUCTOR of an aggregate or vector.
2875 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2878 fold_nonarray_ctor_reference (tree type
, tree ctor
,
2879 unsigned HOST_WIDE_INT offset
,
2880 unsigned HOST_WIDE_INT size
,
2883 unsigned HOST_WIDE_INT cnt
;
2886 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
2889 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
2890 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
2891 tree field_size
= DECL_SIZE (cfield
);
2892 double_int bitoffset
;
2893 double_int byte_offset_cst
= tree_to_double_int (byte_offset
);
2894 double_int bits_per_unit_cst
= uhwi_to_double_int (BITS_PER_UNIT
);
2895 double_int bitoffset_end
, access_end
;
2897 /* Variable sized objects in static constructors makes no sense,
2898 but field_size can be NULL for flexible array members. */
2899 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
2900 && TREE_CODE (byte_offset
) == INTEGER_CST
2901 && (field_size
!= NULL_TREE
2902 ? TREE_CODE (field_size
) == INTEGER_CST
2903 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
2905 /* Compute bit offset of the field. */
2906 bitoffset
= double_int_add (tree_to_double_int (field_offset
),
2907 double_int_mul (byte_offset_cst
,
2908 bits_per_unit_cst
));
2909 /* Compute bit offset where the field ends. */
2910 if (field_size
!= NULL_TREE
)
2911 bitoffset_end
= double_int_add (bitoffset
,
2912 tree_to_double_int (field_size
));
2914 bitoffset_end
= double_int_zero
;
2916 access_end
= double_int_add (uhwi_to_double_int (offset
),
2917 uhwi_to_double_int (size
));
2919 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
2920 [BITOFFSET, BITOFFSET_END)? */
2921 if (double_int_cmp (access_end
, bitoffset
, 0) > 0
2922 && (field_size
== NULL_TREE
2923 || double_int_cmp (uhwi_to_double_int (offset
),
2924 bitoffset_end
, 0) < 0))
2926 double_int inner_offset
= double_int_sub (uhwi_to_double_int (offset
),
2928 /* We do have overlap. Now see if field is large enough to
2929 cover the access. Give up for accesses spanning multiple
2931 if (double_int_cmp (access_end
, bitoffset_end
, 0) > 0)
2933 if (double_int_cmp (uhwi_to_double_int (offset
), bitoffset
, 0) < 0)
2935 return fold_ctor_reference (type
, cval
,
2936 double_int_to_uhwi (inner_offset
), size
,
2940 /* When memory is not explicitely mentioned in constructor, it is 0. */
2941 return build_zero_cst (type
);
2944 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
2945 to the memory at bit OFFSET. */
2948 fold_ctor_reference (tree type
, tree ctor
, unsigned HOST_WIDE_INT offset
,
2949 unsigned HOST_WIDE_INT size
, tree from_decl
)
2953 /* We found the field with exact match. */
2954 if (useless_type_conversion_p (type
, TREE_TYPE (ctor
))
2956 return canonicalize_constructor_val (ctor
, from_decl
);
2958 /* We are at the end of walk, see if we can view convert the
2960 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
2961 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
2962 && operand_equal_p (TYPE_SIZE (type
),
2963 TYPE_SIZE (TREE_TYPE (ctor
)), 0))
2965 ret
= canonicalize_constructor_val (ctor
, from_decl
);
2966 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
2971 if (TREE_CODE (ctor
) == STRING_CST
)
2972 return fold_string_cst_ctor_reference (type
, ctor
, offset
, size
);
2973 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
2976 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
2977 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
2978 return fold_array_ctor_reference (type
, ctor
, offset
, size
,
2981 return fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
2988 /* Return the tree representing the element referenced by T if T is an
2989 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
2990 names using VALUEIZE. Return NULL_TREE otherwise. */
2993 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
2995 tree ctor
, idx
, base
;
2996 HOST_WIDE_INT offset
, size
, max_size
;
2999 if (TREE_THIS_VOLATILE (t
))
3002 if (TREE_CODE_CLASS (TREE_CODE (t
)) == tcc_declaration
)
3003 return get_symbol_constant_value (t
);
3005 tem
= fold_read_from_constant_string (t
);
3009 switch (TREE_CODE (t
))
3012 case ARRAY_RANGE_REF
:
3013 /* Constant indexes are handled well by get_base_constructor.
3014 Only special case variable offsets.
3015 FIXME: This code can't handle nested references with variable indexes
3016 (they will be handled only by iteration of ccp). Perhaps we can bring
3017 get_ref_base_and_extent here and make it use a valueize callback. */
3018 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
3020 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
3021 && TREE_CODE (idx
) == INTEGER_CST
)
3023 tree low_bound
, unit_size
;
3026 /* If the resulting bit-offset is constant, track it. */
3027 if ((low_bound
= array_ref_low_bound (t
),
3028 TREE_CODE (low_bound
) == INTEGER_CST
)
3029 && (unit_size
= array_ref_element_size (t
),
3030 host_integerp (unit_size
, 1))
3031 && (doffset
= double_int_sext
3032 (double_int_sub (TREE_INT_CST (idx
),
3033 TREE_INT_CST (low_bound
)),
3034 TYPE_PRECISION (TREE_TYPE (idx
))),
3035 double_int_fits_in_shwi_p (doffset
)))
3037 offset
= double_int_to_shwi (doffset
);
3038 offset
*= TREE_INT_CST_LOW (unit_size
);
3039 offset
*= BITS_PER_UNIT
;
3041 base
= TREE_OPERAND (t
, 0);
3042 ctor
= get_base_constructor (base
, &offset
, valueize
);
3043 /* Empty constructor. Always fold to 0. */
3044 if (ctor
== error_mark_node
)
3045 return build_zero_cst (TREE_TYPE (t
));
3046 /* Out of bound array access. Value is undefined,
3050 /* We can not determine ctor. */
3053 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
3054 TREE_INT_CST_LOW (unit_size
)
3063 case TARGET_MEM_REF
:
3065 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
);
3066 ctor
= get_base_constructor (base
, &offset
, valueize
);
3068 /* Empty constructor. Always fold to 0. */
3069 if (ctor
== error_mark_node
)
3070 return build_zero_cst (TREE_TYPE (t
));
3071 /* We do not know precise address. */
3072 if (max_size
== -1 || max_size
!= size
)
3074 /* We can not determine ctor. */
3078 /* Out of bound array access. Value is undefined, but don't fold. */
3082 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
,
3088 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
3089 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
3090 return fold_build1_loc (EXPR_LOCATION (t
),
3091 TREE_CODE (t
), TREE_TYPE (t
), c
);
3103 fold_const_aggregate_ref (tree t
)
3105 return fold_const_aggregate_ref_1 (t
, NULL
);
3108 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3109 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3110 KNOWN_BINFO carries the binfo describing the true type of
3111 OBJ_TYPE_REF_OBJECT(REF). */
3114 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
)
3116 unsigned HOST_WIDE_INT offset
, size
;
3119 vtable
= v
= BINFO_VTABLE (known_binfo
);
3120 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3124 if (TREE_CODE (v
) == POINTER_PLUS_EXPR
)
3126 offset
= tree_low_cst (TREE_OPERAND (v
, 1), 1) * BITS_PER_UNIT
;
3127 v
= TREE_OPERAND (v
, 0);
3132 if (TREE_CODE (v
) != ADDR_EXPR
)
3134 v
= TREE_OPERAND (v
, 0);
3136 if (TREE_CODE (v
) != VAR_DECL
3137 || !DECL_VIRTUAL_P (v
)
3138 || !DECL_INITIAL (v
)
3139 || DECL_INITIAL (v
) == error_mark_node
)
3141 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
3142 size
= tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))), 1);
3143 offset
+= token
* size
;
3144 fn
= fold_ctor_reference (TREE_TYPE (TREE_TYPE (v
)), DECL_INITIAL (v
),
3145 offset
, size
, vtable
);
3146 if (!fn
|| integer_zerop (fn
))
3148 gcc_assert (TREE_CODE (fn
) == ADDR_EXPR
3149 || TREE_CODE (fn
) == FDESC_EXPR
);
3150 fn
= TREE_OPERAND (fn
, 0);
3151 gcc_assert (TREE_CODE (fn
) == FUNCTION_DECL
);
3153 /* When cgraph node is missing and function is not public, we cannot
3154 devirtualize. This can happen in WHOPR when the actual method
3155 ends up in other partition, because we found devirtualization
3156 possibility too late. */
3157 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
3160 /* Make sure we create a cgraph node for functions we'll reference.
3161 They can be non-existent if the reference comes from an entry
3162 of an external vtable for example. */
3163 cgraph_get_create_node (fn
);
3168 /* Return true iff VAL is a gimple expression that is known to be
3169 non-negative. Restricted to floating-point inputs. */
3172 gimple_val_nonnegative_real_p (tree val
)
3176 gcc_assert (val
&& SCALAR_FLOAT_TYPE_P (TREE_TYPE (val
)));
3178 /* Use existing logic for non-gimple trees. */
3179 if (tree_expr_nonnegative_p (val
))
3182 if (TREE_CODE (val
) != SSA_NAME
)
3185 /* Currently we look only at the immediately defining statement
3186 to make this determination, since recursion on defining
3187 statements of operands can lead to quadratic behavior in the
3188 worst case. This is expected to catch almost all occurrences
3189 in practice. It would be possible to implement limited-depth
3190 recursion if important cases are lost. Alternatively, passes
3191 that need this information (such as the pow/powi lowering code
3192 in the cse_sincos pass) could be revised to provide it through
3193 dataflow propagation. */
3195 def_stmt
= SSA_NAME_DEF_STMT (val
);
3197 if (is_gimple_assign (def_stmt
))
3201 /* See fold-const.c:tree_expr_nonnegative_p for additional
3202 cases that could be handled with recursion. */
3204 switch (gimple_assign_rhs_code (def_stmt
))
3207 /* Always true for floating-point operands. */
3211 /* True if the two operands are identical (since we are
3212 restricted to floating-point inputs). */
3213 op0
= gimple_assign_rhs1 (def_stmt
);
3214 op1
= gimple_assign_rhs2 (def_stmt
);
3217 || operand_equal_p (op0
, op1
, 0))
3224 else if (is_gimple_call (def_stmt
))
3226 tree fndecl
= gimple_call_fndecl (def_stmt
);
3228 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
3232 switch (DECL_FUNCTION_CODE (fndecl
))
3234 CASE_FLT_FN (BUILT_IN_ACOS
):
3235 CASE_FLT_FN (BUILT_IN_ACOSH
):
3236 CASE_FLT_FN (BUILT_IN_CABS
):
3237 CASE_FLT_FN (BUILT_IN_COSH
):
3238 CASE_FLT_FN (BUILT_IN_ERFC
):
3239 CASE_FLT_FN (BUILT_IN_EXP
):
3240 CASE_FLT_FN (BUILT_IN_EXP10
):
3241 CASE_FLT_FN (BUILT_IN_EXP2
):
3242 CASE_FLT_FN (BUILT_IN_FABS
):
3243 CASE_FLT_FN (BUILT_IN_FDIM
):
3244 CASE_FLT_FN (BUILT_IN_HYPOT
):
3245 CASE_FLT_FN (BUILT_IN_POW10
):
3248 CASE_FLT_FN (BUILT_IN_SQRT
):
3249 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3250 nonnegative inputs. */
3251 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val
))))
3256 CASE_FLT_FN (BUILT_IN_POWI
):
3257 /* True if the second argument is an even integer. */
3258 arg1
= gimple_call_arg (def_stmt
, 1);
3260 if (TREE_CODE (arg1
) == INTEGER_CST
3261 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
3266 CASE_FLT_FN (BUILT_IN_POW
):
3267 /* True if the second argument is an even integer-valued
3269 arg1
= gimple_call_arg (def_stmt
, 1);
3271 if (TREE_CODE (arg1
) == REAL_CST
)
3276 c
= TREE_REAL_CST (arg1
);
3277 n
= real_to_integer (&c
);
3281 REAL_VALUE_TYPE cint
;
3282 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
3283 if (real_identical (&c
, &cint
))