1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010, 2011, 2012 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
33 #include "gimple-fold.h"
35 /* Return true when DECL can be referenced from current unit.
36 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
37 We can get declarations that are not possible to reference for various
40 1) When analyzing C++ virtual tables.
41 C++ virtual tables do have known constructors even
42 when they are keyed to other compilation unit.
43 Those tables can contain pointers to methods and vars
44 in other units. Those methods have both STATIC and EXTERNAL
46 2) In WHOPR mode devirtualization might lead to reference
47 to method that was partitioned elsehwere.
48 In this case we have static VAR_DECL or FUNCTION_DECL
49 that has no corresponding callgraph/varpool node
51 3) COMDAT functions referred by external vtables that
52 we devirtualize only during final copmilation stage.
53 At this time we already decided that we will not output
54 the function body and thus we can't reference the symbol
58 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
60 struct varpool_node
*vnode
;
61 struct cgraph_node
*node
;
64 /* We will later output the initializer, so we can refer to it.
65 So we are concerned only when DECL comes from initializer of
68 || TREE_CODE (from_decl
) != VAR_DECL
69 || !DECL_EXTERNAL (from_decl
)
71 && symtab_get_node (from_decl
)->symbol
.in_other_partition
))
73 /* We are concerned only about static/external vars and functions. */
74 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
75 || (TREE_CODE (decl
) != VAR_DECL
&& TREE_CODE (decl
) != FUNCTION_DECL
))
77 /* Weakrefs have somewhat confusing DECL_EXTERNAL flag set; they
79 if (DECL_EXTERNAL (decl
)
80 && lookup_attribute ("weakref", DECL_ATTRIBUTES (decl
)))
82 /* We are folding reference from external vtable. The vtable may reffer
83 to a symbol keyed to other compilation unit. The other compilation
84 unit may be in separate DSO and the symbol may be hidden. */
85 if (DECL_VISIBILITY_SPECIFIED (decl
)
86 && DECL_EXTERNAL (decl
)
87 && (!(snode
= symtab_get_node (decl
)) || !snode
->symbol
.in_other_partition
))
89 /* When function is public, we always can introduce new reference.
90 Exception are the COMDAT functions where introducing a direct
91 reference imply need to include function body in the curren tunit. */
92 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
94 /* We are not at ltrans stage; so don't worry about WHOPR.
95 Also when still gimplifying all referred comdat functions will be
98 As observed in PR20991 for already optimized out comdat virtual functions
99 it may be tempting to not necessarily give up because the copy will be
100 output elsewhere when corresponding vtable is output.
101 This is however not possible - ABI specify that COMDATs are output in
102 units where they are used and when the other unit was compiled with LTO
103 it is possible that vtable was kept public while the function itself
105 if (!flag_ltrans
&& (!DECL_COMDAT (decl
) || !cgraph_function_flags_ready
))
108 /* OK we are seeing either COMDAT or static variable. In this case we must
109 check that the definition is still around so we can refer it. */
110 if (TREE_CODE (decl
) == FUNCTION_DECL
)
112 node
= cgraph_get_node (decl
);
113 /* Check that we still have function body and that we didn't took
114 the decision to eliminate offline copy of the function yet.
115 The second is important when devirtualization happens during final
116 compilation stage when making a new reference no longer makes callee
118 if (!node
|| !node
->analyzed
|| node
->global
.inlined_to
)
120 gcc_checking_assert (!TREE_ASM_WRITTEN (decl
));
124 else if (TREE_CODE (decl
) == VAR_DECL
)
126 vnode
= varpool_get_node (decl
);
127 if (!vnode
|| !vnode
->analyzed
)
129 gcc_checking_assert (!TREE_ASM_WRITTEN (decl
));
136 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
137 acceptable form for is_gimple_min_invariant.
138 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
141 canonicalize_constructor_val (tree cval
, tree from_decl
)
144 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
145 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
147 tree ptr
= TREE_OPERAND (cval
, 0);
148 if (is_gimple_min_invariant (ptr
))
149 cval
= build1_loc (EXPR_LOCATION (cval
),
150 ADDR_EXPR
, TREE_TYPE (ptr
),
151 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
153 fold_convert (ptr_type_node
,
154 TREE_OPERAND (cval
, 1))));
156 if (TREE_CODE (cval
) == ADDR_EXPR
)
158 tree base
= get_base_address (TREE_OPERAND (cval
, 0));
159 if (!base
&& TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
161 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
163 TREE_OPERAND (cval
, 0) = base
;
168 if ((TREE_CODE (base
) == VAR_DECL
169 || TREE_CODE (base
) == FUNCTION_DECL
)
170 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
172 if (TREE_CODE (base
) == VAR_DECL
)
174 TREE_ADDRESSABLE (base
) = 1;
175 if (cfun
&& gimple_referenced_vars (cfun
)
176 && !is_global_var (base
))
177 add_referenced_var (base
);
179 else if (TREE_CODE (base
) == FUNCTION_DECL
)
181 /* Make sure we create a cgraph node for functions we'll reference.
182 They can be non-existent if the reference comes from an entry
183 of an external vtable for example. */
184 cgraph_get_create_node (base
);
186 /* Fixup types in global initializers. */
187 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
188 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
193 /* If SYM is a constant variable with known value, return the value.
194 NULL_TREE is returned otherwise. */
197 get_symbol_constant_value (tree sym
)
199 if (const_value_known_p (sym
))
201 tree val
= DECL_INITIAL (sym
);
204 val
= canonicalize_constructor_val (val
, sym
);
205 if (val
&& is_gimple_min_invariant (val
))
210 /* Variables declared 'const' without an initializer
211 have zero as the initializer if they may not be
212 overridden at link or run time. */
214 && (INTEGRAL_TYPE_P (TREE_TYPE (sym
))
215 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym
))))
216 return build_zero_cst (TREE_TYPE (sym
));
224 /* Subroutine of fold_stmt. We perform several simplifications of the
225 memory reference tree EXPR and make sure to re-gimplify them properly
226 after propagation of constant addresses. IS_LHS is true if the
227 reference is supposed to be an lvalue. */
230 maybe_fold_reference (tree expr
, bool is_lhs
)
235 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
236 || TREE_CODE (expr
) == REALPART_EXPR
237 || TREE_CODE (expr
) == IMAGPART_EXPR
)
238 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
239 return fold_unary_loc (EXPR_LOCATION (expr
),
242 TREE_OPERAND (expr
, 0));
243 else if (TREE_CODE (expr
) == BIT_FIELD_REF
244 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
245 return fold_ternary_loc (EXPR_LOCATION (expr
),
248 TREE_OPERAND (expr
, 0),
249 TREE_OPERAND (expr
, 1),
250 TREE_OPERAND (expr
, 2));
252 while (handled_component_p (*t
))
253 t
= &TREE_OPERAND (*t
, 0);
255 /* Canonicalize MEM_REFs invariant address operand. Do this first
256 to avoid feeding non-canonical MEM_REFs elsewhere. */
257 if (TREE_CODE (*t
) == MEM_REF
258 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)))
260 bool volatile_p
= TREE_THIS_VOLATILE (*t
);
261 tree tem
= fold_binary (MEM_REF
, TREE_TYPE (*t
),
262 TREE_OPERAND (*t
, 0),
263 TREE_OPERAND (*t
, 1));
266 TREE_THIS_VOLATILE (tem
) = volatile_p
;
268 tem
= maybe_fold_reference (expr
, is_lhs
);
276 && (result
= fold_const_aggregate_ref (expr
))
277 && is_gimple_min_invariant (result
))
280 /* Fold back MEM_REFs to reference trees. */
281 if (TREE_CODE (*t
) == MEM_REF
282 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
283 && integer_zerop (TREE_OPERAND (*t
, 1))
284 && (TREE_THIS_VOLATILE (*t
)
285 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0)))
286 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t
, 1)))
287 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t
))
288 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t
, 1)))))
289 /* We have to look out here to not drop a required conversion
290 from the rhs to the lhs if is_lhs, but we don't have the
291 rhs here to verify that. Thus require strict type
293 && types_compatible_p (TREE_TYPE (*t
),
294 TREE_TYPE (TREE_OPERAND
295 (TREE_OPERAND (*t
, 0), 0))))
298 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
299 tem
= maybe_fold_reference (expr
, is_lhs
);
304 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
306 tree tem
= maybe_fold_tmr (*t
);
310 tem
= maybe_fold_reference (expr
, is_lhs
);
321 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
322 replacement rhs for the statement or NULL_TREE if no simplification
323 could be made. It is assumed that the operands have been previously
327 fold_gimple_assign (gimple_stmt_iterator
*si
)
329 gimple stmt
= gsi_stmt (*si
);
330 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
331 location_t loc
= gimple_location (stmt
);
333 tree result
= NULL_TREE
;
335 switch (get_gimple_rhs_class (subcode
))
337 case GIMPLE_SINGLE_RHS
:
339 tree rhs
= gimple_assign_rhs1 (stmt
);
341 if (REFERENCE_CLASS_P (rhs
))
342 return maybe_fold_reference (rhs
, false);
344 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
346 tree ref
= TREE_OPERAND (rhs
, 0);
347 tree tem
= maybe_fold_reference (ref
, true);
349 && TREE_CODE (tem
) == MEM_REF
350 && integer_zerop (TREE_OPERAND (tem
, 1)))
351 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
353 result
= fold_convert (TREE_TYPE (rhs
),
354 build_fold_addr_expr_loc (loc
, tem
));
355 else if (TREE_CODE (ref
) == MEM_REF
356 && integer_zerop (TREE_OPERAND (ref
, 1)))
357 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
360 else if (TREE_CODE (rhs
) == CONSTRUCTOR
361 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
362 && (CONSTRUCTOR_NELTS (rhs
)
363 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
365 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
369 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
370 if (TREE_CODE (val
) != INTEGER_CST
371 && TREE_CODE (val
) != REAL_CST
372 && TREE_CODE (val
) != FIXED_CST
)
375 return build_vector_from_ctor (TREE_TYPE (rhs
),
376 CONSTRUCTOR_ELTS (rhs
));
379 else if (DECL_P (rhs
))
380 return unshare_expr (get_symbol_constant_value (rhs
));
382 /* If we couldn't fold the RHS, hand over to the generic
384 if (result
== NULL_TREE
)
387 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
388 that may have been added by fold, and "useless" type
389 conversions that might now be apparent due to propagation. */
390 STRIP_USELESS_TYPE_CONVERSION (result
);
392 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
399 case GIMPLE_UNARY_RHS
:
401 tree rhs
= gimple_assign_rhs1 (stmt
);
403 result
= fold_unary_loc (loc
, subcode
, gimple_expr_type (stmt
), rhs
);
406 /* If the operation was a conversion do _not_ mark a
407 resulting constant with TREE_OVERFLOW if the original
408 constant was not. These conversions have implementation
409 defined behavior and retaining the TREE_OVERFLOW flag
410 here would confuse later passes such as VRP. */
411 if (CONVERT_EXPR_CODE_P (subcode
)
412 && TREE_CODE (result
) == INTEGER_CST
413 && TREE_CODE (rhs
) == INTEGER_CST
)
414 TREE_OVERFLOW (result
) = TREE_OVERFLOW (rhs
);
416 STRIP_USELESS_TYPE_CONVERSION (result
);
417 if (valid_gimple_rhs_p (result
))
423 case GIMPLE_BINARY_RHS
:
424 /* Try to canonicalize for boolean-typed X the comparisons
425 X == 0, X == 1, X != 0, and X != 1. */
426 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
427 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
429 tree lhs
= gimple_assign_lhs (stmt
);
430 tree op1
= gimple_assign_rhs1 (stmt
);
431 tree op2
= gimple_assign_rhs2 (stmt
);
432 tree type
= TREE_TYPE (op1
);
434 /* Check whether the comparison operands are of the same boolean
435 type as the result type is.
436 Check that second operand is an integer-constant with value
438 if (TREE_CODE (op2
) == INTEGER_CST
439 && (integer_zerop (op2
) || integer_onep (op2
))
440 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
442 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
443 bool is_logical_not
= false;
445 /* X == 0 and X != 1 is a logical-not.of X
446 X == 1 and X != 0 is X */
447 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
448 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
449 is_logical_not
= true;
451 if (is_logical_not
== false)
453 /* Only for one-bit precision typed X the transformation
454 !X -> ~X is valied. */
455 else if (TYPE_PRECISION (type
) == 1)
456 result
= build1_loc (gimple_location (stmt
), BIT_NOT_EXPR
,
458 /* Otherwise we use !X -> X ^ 1. */
460 result
= build2_loc (gimple_location (stmt
), BIT_XOR_EXPR
,
461 type
, op1
, build_int_cst (type
, 1));
467 result
= fold_binary_loc (loc
, subcode
,
468 TREE_TYPE (gimple_assign_lhs (stmt
)),
469 gimple_assign_rhs1 (stmt
),
470 gimple_assign_rhs2 (stmt
));
474 STRIP_USELESS_TYPE_CONVERSION (result
);
475 if (valid_gimple_rhs_p (result
))
480 case GIMPLE_TERNARY_RHS
:
481 /* Try to fold a conditional expression. */
482 if (gimple_assign_rhs_code (stmt
) == COND_EXPR
)
484 tree op0
= gimple_assign_rhs1 (stmt
);
487 location_t cond_loc
= gimple_location (stmt
);
489 if (COMPARISON_CLASS_P (op0
))
491 fold_defer_overflow_warnings ();
492 tem
= fold_binary_loc (cond_loc
,
493 TREE_CODE (op0
), TREE_TYPE (op0
),
494 TREE_OPERAND (op0
, 0),
495 TREE_OPERAND (op0
, 1));
496 /* This is actually a conditional expression, not a GIMPLE
497 conditional statement, however, the valid_gimple_rhs_p
498 test still applies. */
499 set
= (tem
&& is_gimple_condexpr (tem
)
500 && valid_gimple_rhs_p (tem
));
501 fold_undefer_overflow_warnings (set
, stmt
, 0);
503 else if (is_gimple_min_invariant (op0
))
512 result
= fold_build3_loc (cond_loc
, COND_EXPR
,
513 TREE_TYPE (gimple_assign_lhs (stmt
)), tem
,
514 gimple_assign_rhs2 (stmt
),
515 gimple_assign_rhs3 (stmt
));
519 result
= fold_ternary_loc (loc
, subcode
,
520 TREE_TYPE (gimple_assign_lhs (stmt
)),
521 gimple_assign_rhs1 (stmt
),
522 gimple_assign_rhs2 (stmt
),
523 gimple_assign_rhs3 (stmt
));
527 STRIP_USELESS_TYPE_CONVERSION (result
);
528 if (valid_gimple_rhs_p (result
))
533 case GIMPLE_INVALID_RHS
:
540 /* Attempt to fold a conditional statement. Return true if any changes were
541 made. We only attempt to fold the condition expression, and do not perform
542 any transformation that would require alteration of the cfg. It is
543 assumed that the operands have been previously folded. */
546 fold_gimple_cond (gimple stmt
)
548 tree result
= fold_binary_loc (gimple_location (stmt
),
549 gimple_cond_code (stmt
),
551 gimple_cond_lhs (stmt
),
552 gimple_cond_rhs (stmt
));
556 STRIP_USELESS_TYPE_CONVERSION (result
);
557 if (is_gimple_condexpr (result
) && valid_gimple_rhs_p (result
))
559 gimple_cond_set_condition_from_tree (stmt
, result
);
567 /* Convert EXPR into a GIMPLE value suitable for substitution on the
568 RHS of an assignment. Insert the necessary statements before
569 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
570 is replaced. If the call is expected to produces a result, then it
571 is replaced by an assignment of the new RHS to the result variable.
572 If the result is to be ignored, then the call is replaced by a
573 GIMPLE_NOP. A proper VDEF chain is retained by making the first
574 VUSE and the last VDEF of the whole sequence be the same as the replaced
575 statement and using new SSA names for stores in between. */
578 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
581 gimple stmt
, new_stmt
;
582 gimple_stmt_iterator i
;
583 gimple_seq stmts
= NULL
;
584 struct gimplify_ctx gctx
;
588 stmt
= gsi_stmt (*si_p
);
590 gcc_assert (is_gimple_call (stmt
));
592 push_gimplify_context (&gctx
);
593 gctx
.into_ssa
= gimple_in_ssa_p (cfun
);
595 lhs
= gimple_call_lhs (stmt
);
596 if (lhs
== NULL_TREE
)
598 gimplify_and_add (expr
, &stmts
);
599 /* We can end up with folding a memcpy of an empty class assignment
600 which gets optimized away by C++ gimplification. */
601 if (gimple_seq_empty_p (stmts
))
603 pop_gimplify_context (NULL
);
604 if (gimple_in_ssa_p (cfun
))
606 unlink_stmt_vdef (stmt
);
609 gsi_remove (si_p
, true);
615 tree tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
616 new_stmt
= gimple_build_assign (lhs
, tmp
);
617 i
= gsi_last (stmts
);
618 gsi_insert_after_without_update (&i
, new_stmt
,
619 GSI_CONTINUE_LINKING
);
622 pop_gimplify_context (NULL
);
624 if (gimple_has_location (stmt
))
625 annotate_all_with_location (stmts
, gimple_location (stmt
));
627 /* First iterate over the replacement statements backward, assigning
628 virtual operands to their defining statements. */
630 for (i
= gsi_last (stmts
); !gsi_end_p (i
); gsi_prev (&i
))
632 new_stmt
= gsi_stmt (i
);
633 if ((gimple_assign_single_p (new_stmt
)
634 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
635 || (is_gimple_call (new_stmt
)
636 && (gimple_call_flags (new_stmt
)
637 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
641 vdef
= gimple_vdef (stmt
);
643 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
644 gimple_set_vdef (new_stmt
, vdef
);
645 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
646 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
647 laststore
= new_stmt
;
651 /* Second iterate over the statements forward, assigning virtual
652 operands to their uses. */
653 reaching_vuse
= gimple_vuse (stmt
);
654 for (i
= gsi_start (stmts
); !gsi_end_p (i
); gsi_next (&i
))
656 new_stmt
= gsi_stmt (i
);
657 /* The replacement can expose previously unreferenced variables. */
658 if (gimple_in_ssa_p (cfun
))
659 find_referenced_vars_in (new_stmt
);
660 /* If the new statement possibly has a VUSE, update it with exact SSA
661 name we know will reach this one. */
662 if (gimple_has_mem_ops (new_stmt
))
663 gimple_set_vuse (new_stmt
, reaching_vuse
);
664 gimple_set_modified (new_stmt
, true);
665 if (gimple_vdef (new_stmt
))
666 reaching_vuse
= gimple_vdef (new_stmt
);
669 /* If the new sequence does not do a store release the virtual
670 definition of the original statement. */
672 && reaching_vuse
== gimple_vuse (stmt
))
674 tree vdef
= gimple_vdef (stmt
);
676 && TREE_CODE (vdef
) == SSA_NAME
)
678 unlink_stmt_vdef (stmt
);
679 release_ssa_name (vdef
);
683 /* Finally replace the original statement with the sequence. */
684 gsi_replace_with_seq (si_p
, stmts
, false);
687 /* Return the string length, maximum string length or maximum value of
689 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
690 is not NULL and, for TYPE == 0, its value is not equal to the length
691 we determine or if we are unable to determine the length or value,
692 return false. VISITED is a bitmap of visited variables.
693 TYPE is 0 if string length should be returned, 1 for maximum string
694 length and 2 for maximum value ARG can have. */
697 get_maxval_strlen (tree arg
, tree
*length
, bitmap visited
, int type
)
702 if (TREE_CODE (arg
) != SSA_NAME
)
704 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
705 if (TREE_CODE (arg
) == ADDR_EXPR
706 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
707 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
709 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
710 if (TREE_CODE (aop0
) == INDIRECT_REF
711 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
712 return get_maxval_strlen (TREE_OPERAND (aop0
, 0),
713 length
, visited
, type
);
719 if (TREE_CODE (val
) != INTEGER_CST
720 || tree_int_cst_sgn (val
) < 0)
724 val
= c_strlen (arg
, 1);
732 if (TREE_CODE (*length
) != INTEGER_CST
733 || TREE_CODE (val
) != INTEGER_CST
)
736 if (tree_int_cst_lt (*length
, val
))
740 else if (simple_cst_equal (val
, *length
) != 1)
748 /* If we were already here, break the infinite cycle. */
749 if (!bitmap_set_bit (visited
, SSA_NAME_VERSION (arg
)))
753 def_stmt
= SSA_NAME_DEF_STMT (var
);
755 switch (gimple_code (def_stmt
))
758 /* The RHS of the statement defining VAR must either have a
759 constant length or come from another SSA_NAME with a constant
761 if (gimple_assign_single_p (def_stmt
)
762 || gimple_assign_unary_nop_p (def_stmt
))
764 tree rhs
= gimple_assign_rhs1 (def_stmt
);
765 return get_maxval_strlen (rhs
, length
, visited
, type
);
767 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
769 tree op2
= gimple_assign_rhs2 (def_stmt
);
770 tree op3
= gimple_assign_rhs3 (def_stmt
);
771 return get_maxval_strlen (op2
, length
, visited
, type
)
772 && get_maxval_strlen (op3
, length
, visited
, type
);
778 /* All the arguments of the PHI node must have the same constant
782 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
784 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
786 /* If this PHI has itself as an argument, we cannot
787 determine the string length of this argument. However,
788 if we can find a constant string length for the other
789 PHI args then we can still be sure that this is a
790 constant string length. So be optimistic and just
791 continue with the next argument. */
792 if (arg
== gimple_phi_result (def_stmt
))
795 if (!get_maxval_strlen (arg
, length
, visited
, type
))
807 /* Fold builtin call in statement STMT. Returns a simplified tree.
808 We may return a non-constant expression, including another call
809 to a different function and with different arguments, e.g.,
810 substituting memcpy for strcpy when the string length is known.
811 Note that some builtins expand into inline code that may not
812 be valid in GIMPLE. Callers must take care. */
815 gimple_fold_builtin (gimple stmt
)
823 location_t loc
= gimple_location (stmt
);
825 gcc_assert (is_gimple_call (stmt
));
827 ignore
= (gimple_call_lhs (stmt
) == NULL
);
829 /* First try the generic builtin folder. If that succeeds, return the
831 result
= fold_call_stmt (stmt
, ignore
);
839 /* Ignore MD builtins. */
840 callee
= gimple_call_fndecl (stmt
);
841 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_MD
)
844 /* Give up for always_inline inline builtins until they are
846 if (avoid_folding_inline_builtin (callee
))
849 /* If the builtin could not be folded, and it has no argument list,
851 nargs
= gimple_call_num_args (stmt
);
855 /* Limit the work only for builtins we know how to simplify. */
856 switch (DECL_FUNCTION_CODE (callee
))
858 case BUILT_IN_STRLEN
:
860 case BUILT_IN_FPUTS_UNLOCKED
:
864 case BUILT_IN_STRCPY
:
865 case BUILT_IN_STRNCPY
:
869 case BUILT_IN_MEMCPY_CHK
:
870 case BUILT_IN_MEMPCPY_CHK
:
871 case BUILT_IN_MEMMOVE_CHK
:
872 case BUILT_IN_MEMSET_CHK
:
873 case BUILT_IN_STRNCPY_CHK
:
874 case BUILT_IN_STPNCPY_CHK
:
878 case BUILT_IN_STRCPY_CHK
:
879 case BUILT_IN_STPCPY_CHK
:
883 case BUILT_IN_SNPRINTF_CHK
:
884 case BUILT_IN_VSNPRINTF_CHK
:
892 if (arg_idx
>= nargs
)
895 /* Try to use the dataflow information gathered by the CCP process. */
896 visited
= BITMAP_ALLOC (NULL
);
897 bitmap_clear (visited
);
899 memset (val
, 0, sizeof (val
));
900 a
= gimple_call_arg (stmt
, arg_idx
);
901 if (!get_maxval_strlen (a
, &val
[arg_idx
], visited
, type
))
902 val
[arg_idx
] = NULL_TREE
;
904 BITMAP_FREE (visited
);
907 switch (DECL_FUNCTION_CODE (callee
))
909 case BUILT_IN_STRLEN
:
910 if (val
[0] && nargs
== 1)
913 fold_convert (TREE_TYPE (gimple_call_lhs (stmt
)), val
[0]);
915 /* If the result is not a valid gimple value, or not a cast
916 of a valid gimple value, then we cannot use the result. */
917 if (is_gimple_val (new_val
)
918 || (CONVERT_EXPR_P (new_val
)
919 && is_gimple_val (TREE_OPERAND (new_val
, 0))))
924 case BUILT_IN_STRCPY
:
925 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 2)
926 result
= fold_builtin_strcpy (loc
, callee
,
927 gimple_call_arg (stmt
, 0),
928 gimple_call_arg (stmt
, 1),
932 case BUILT_IN_STRNCPY
:
933 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
934 result
= fold_builtin_strncpy (loc
, callee
,
935 gimple_call_arg (stmt
, 0),
936 gimple_call_arg (stmt
, 1),
937 gimple_call_arg (stmt
, 2),
943 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
944 gimple_call_arg (stmt
, 1),
945 ignore
, false, val
[0]);
948 case BUILT_IN_FPUTS_UNLOCKED
:
950 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
951 gimple_call_arg (stmt
, 1),
952 ignore
, true, val
[0]);
955 case BUILT_IN_MEMCPY_CHK
:
956 case BUILT_IN_MEMPCPY_CHK
:
957 case BUILT_IN_MEMMOVE_CHK
:
958 case BUILT_IN_MEMSET_CHK
:
959 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
960 result
= fold_builtin_memory_chk (loc
, callee
,
961 gimple_call_arg (stmt
, 0),
962 gimple_call_arg (stmt
, 1),
963 gimple_call_arg (stmt
, 2),
964 gimple_call_arg (stmt
, 3),
966 DECL_FUNCTION_CODE (callee
));
969 case BUILT_IN_STRCPY_CHK
:
970 case BUILT_IN_STPCPY_CHK
:
971 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
972 result
= fold_builtin_stxcpy_chk (loc
, callee
,
973 gimple_call_arg (stmt
, 0),
974 gimple_call_arg (stmt
, 1),
975 gimple_call_arg (stmt
, 2),
977 DECL_FUNCTION_CODE (callee
));
980 case BUILT_IN_STRNCPY_CHK
:
981 case BUILT_IN_STPNCPY_CHK
:
982 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
983 result
= fold_builtin_stxncpy_chk (loc
, gimple_call_arg (stmt
, 0),
984 gimple_call_arg (stmt
, 1),
985 gimple_call_arg (stmt
, 2),
986 gimple_call_arg (stmt
, 3),
988 DECL_FUNCTION_CODE (callee
));
991 case BUILT_IN_SNPRINTF_CHK
:
992 case BUILT_IN_VSNPRINTF_CHK
:
993 if (val
[1] && is_gimple_val (val
[1]))
994 result
= gimple_fold_builtin_snprintf_chk (stmt
, val
[1],
995 DECL_FUNCTION_CODE (callee
));
1002 if (result
&& ignore
)
1003 result
= fold_ignored_result (result
);
1008 /* Return a binfo to be used for devirtualization of calls based on an object
1009 represented by a declaration (i.e. a global or automatically allocated one)
1010 or NULL if it cannot be found or is not safe. CST is expected to be an
1011 ADDR_EXPR of such object or the function will return NULL. Currently it is
1012 safe to use such binfo only if it has no base binfo (i.e. no ancestors). */
1015 gimple_extract_devirt_binfo_from_cst (tree cst
)
1017 HOST_WIDE_INT offset
, size
, max_size
;
1018 tree base
, type
, expected_type
, binfo
;
1019 bool last_artificial
= false;
1021 if (!flag_devirtualize
1022 || TREE_CODE (cst
) != ADDR_EXPR
1023 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst
))) != RECORD_TYPE
)
1026 cst
= TREE_OPERAND (cst
, 0);
1027 expected_type
= TREE_TYPE (cst
);
1028 base
= get_ref_base_and_extent (cst
, &offset
, &size
, &max_size
);
1029 type
= TREE_TYPE (base
);
1033 || TREE_CODE (type
) != RECORD_TYPE
)
1036 /* Find the sub-object the constant actually refers to and mark whether it is
1037 an artificial one (as opposed to a user-defined one). */
1040 HOST_WIDE_INT pos
, size
;
1043 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (expected_type
))
1048 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
1050 if (TREE_CODE (fld
) != FIELD_DECL
)
1053 pos
= int_bit_position (fld
);
1054 size
= tree_low_cst (DECL_SIZE (fld
), 1);
1055 if (pos
<= offset
&& (pos
+ size
) > offset
)
1058 if (!fld
|| TREE_CODE (TREE_TYPE (fld
)) != RECORD_TYPE
)
1061 last_artificial
= DECL_ARTIFICIAL (fld
);
1062 type
= TREE_TYPE (fld
);
1065 /* Artificial sub-objects are ancestors, we do not want to use them for
1066 devirtualization, at least not here. */
1067 if (last_artificial
)
1069 binfo
= TYPE_BINFO (type
);
1070 if (!binfo
|| BINFO_N_BASE_BINFOS (binfo
) > 0)
1076 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1077 The statement may be replaced by another statement, e.g., if the call
1078 simplifies to a constant value. Return true if any changes were made.
1079 It is assumed that the operands have been previously folded. */
1082 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
1084 gimple stmt
= gsi_stmt (*gsi
);
1086 bool changed
= false;
1089 /* Fold *& in call arguments. */
1090 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
1091 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
1093 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
1096 gimple_call_set_arg (stmt
, i
, tmp
);
1101 /* Check for virtual calls that became direct calls. */
1102 callee
= gimple_call_fn (stmt
);
1103 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
1105 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
1107 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
1112 tree obj
= OBJ_TYPE_REF_OBJECT (callee
);
1113 tree binfo
= gimple_extract_devirt_binfo_from_cst (obj
);
1117 = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee
));
1118 tree fndecl
= gimple_get_virt_method_for_binfo (token
, binfo
);
1121 gimple_call_set_fndecl (stmt
, fndecl
);
1131 /* Check for builtins that CCP can handle using information not
1132 available in the generic fold routines. */
1133 callee
= gimple_call_fndecl (stmt
);
1134 if (callee
&& DECL_BUILT_IN (callee
))
1136 tree result
= gimple_fold_builtin (stmt
);
1139 if (!update_call_from_tree (gsi
, result
))
1140 gimplify_and_update_call_from_tree (gsi
, result
);
1148 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1149 distinguishes both cases. */
1152 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
)
1154 bool changed
= false;
1155 gimple stmt
= gsi_stmt (*gsi
);
1157 gimple_stmt_iterator gsinext
= *gsi
;
1160 gsi_next (&gsinext
);
1161 next_stmt
= gsi_end_p (gsinext
) ? NULL
: gsi_stmt (gsinext
);
1163 /* Fold the main computation performed by the statement. */
1164 switch (gimple_code (stmt
))
1168 unsigned old_num_ops
= gimple_num_ops (stmt
);
1169 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
1170 tree lhs
= gimple_assign_lhs (stmt
);
1172 /* First canonicalize operand order. This avoids building new
1173 trees if this is the only thing fold would later do. */
1174 if ((commutative_tree_code (subcode
)
1175 || commutative_ternary_tree_code (subcode
))
1176 && tree_swap_operands_p (gimple_assign_rhs1 (stmt
),
1177 gimple_assign_rhs2 (stmt
), false))
1179 tree tem
= gimple_assign_rhs1 (stmt
);
1180 gimple_assign_set_rhs1 (stmt
, gimple_assign_rhs2 (stmt
));
1181 gimple_assign_set_rhs2 (stmt
, tem
);
1184 new_rhs
= fold_gimple_assign (gsi
);
1186 && !useless_type_conversion_p (TREE_TYPE (lhs
),
1187 TREE_TYPE (new_rhs
)))
1188 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
1191 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
1193 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
1200 changed
|= fold_gimple_cond (stmt
);
1204 changed
|= gimple_fold_call (gsi
, inplace
);
1208 /* Fold *& in asm operands. */
1211 const char **oconstraints
;
1212 const char *constraint
;
1213 bool allows_mem
, allows_reg
;
1215 noutputs
= gimple_asm_noutputs (stmt
);
1216 oconstraints
= XALLOCAVEC (const char *, noutputs
);
1218 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
1220 tree link
= gimple_asm_output_op (stmt
, i
);
1221 tree op
= TREE_VALUE (link
);
1223 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1224 if (REFERENCE_CLASS_P (op
)
1225 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
1227 TREE_VALUE (link
) = op
;
1231 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
1233 tree link
= gimple_asm_input_op (stmt
, i
);
1234 tree op
= TREE_VALUE (link
);
1236 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1237 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
1238 oconstraints
, &allows_mem
, &allows_reg
);
1239 if (REFERENCE_CLASS_P (op
)
1240 && (op
= maybe_fold_reference (op
, !allows_reg
&& allows_mem
))
1243 TREE_VALUE (link
) = op
;
1251 if (gimple_debug_bind_p (stmt
))
1253 tree val
= gimple_debug_bind_get_value (stmt
);
1255 && REFERENCE_CLASS_P (val
))
1257 tree tem
= maybe_fold_reference (val
, false);
1260 gimple_debug_bind_set_value (stmt
, tem
);
1265 && TREE_CODE (val
) == ADDR_EXPR
)
1267 tree ref
= TREE_OPERAND (val
, 0);
1268 tree tem
= maybe_fold_reference (ref
, false);
1271 tem
= build_fold_addr_expr_with_type (tem
, TREE_TYPE (val
));
1272 gimple_debug_bind_set_value (stmt
, tem
);
1282 /* If stmt folds into nothing and it was the last stmt in a bb,
1283 don't call gsi_stmt. */
1284 if (gsi_end_p (*gsi
))
1286 gcc_assert (next_stmt
== NULL
);
1290 stmt
= gsi_stmt (*gsi
);
1292 /* Fold *& on the lhs. Don't do this if stmt folded into nothing,
1293 as we'd changing the next stmt. */
1294 if (gimple_has_lhs (stmt
) && stmt
!= next_stmt
)
1296 tree lhs
= gimple_get_lhs (stmt
);
1297 if (lhs
&& REFERENCE_CLASS_P (lhs
))
1299 tree new_lhs
= maybe_fold_reference (lhs
, true);
1302 gimple_set_lhs (stmt
, new_lhs
);
1311 /* Fold the statement pointed to by GSI. In some cases, this function may
1312 replace the whole statement with a new one. Returns true iff folding
1314 The statement pointed to by GSI should be in valid gimple form but may
1315 be in unfolded state as resulting from for example constant propagation
1316 which can produce *&x = 0. */
1319 fold_stmt (gimple_stmt_iterator
*gsi
)
1321 return fold_stmt_1 (gsi
, false);
1324 /* Perform the minimal folding on statement *GSI. Only operations like
1325 *&x created by constant propagation are handled. The statement cannot
1326 be replaced with a new one. Return true if the statement was
1327 changed, false otherwise.
1328 The statement *GSI should be in valid gimple form but may
1329 be in unfolded state as resulting from for example constant propagation
1330 which can produce *&x = 0. */
1333 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
1335 gimple stmt
= gsi_stmt (*gsi
);
1336 bool changed
= fold_stmt_1 (gsi
, true);
1337 gcc_assert (gsi_stmt (*gsi
) == stmt
);
1341 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1342 if EXPR is null or we don't know how.
1343 If non-null, the result always has boolean type. */
1346 canonicalize_bool (tree expr
, bool invert
)
1352 if (integer_nonzerop (expr
))
1353 return boolean_false_node
;
1354 else if (integer_zerop (expr
))
1355 return boolean_true_node
;
1356 else if (TREE_CODE (expr
) == SSA_NAME
)
1357 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
1358 build_int_cst (TREE_TYPE (expr
), 0));
1359 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
1360 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
1362 TREE_OPERAND (expr
, 0),
1363 TREE_OPERAND (expr
, 1));
1369 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
1371 if (integer_nonzerop (expr
))
1372 return boolean_true_node
;
1373 else if (integer_zerop (expr
))
1374 return boolean_false_node
;
1375 else if (TREE_CODE (expr
) == SSA_NAME
)
1376 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
1377 build_int_cst (TREE_TYPE (expr
), 0));
1378 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
1379 return fold_build2 (TREE_CODE (expr
),
1381 TREE_OPERAND (expr
, 0),
1382 TREE_OPERAND (expr
, 1));
1388 /* Check to see if a boolean expression EXPR is logically equivalent to the
1389 comparison (OP1 CODE OP2). Check for various identities involving
1393 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
1394 const_tree op1
, const_tree op2
)
1398 /* The obvious case. */
1399 if (TREE_CODE (expr
) == code
1400 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
1401 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
1404 /* Check for comparing (name, name != 0) and the case where expr
1405 is an SSA_NAME with a definition matching the comparison. */
1406 if (TREE_CODE (expr
) == SSA_NAME
1407 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
1409 if (operand_equal_p (expr
, op1
, 0))
1410 return ((code
== NE_EXPR
&& integer_zerop (op2
))
1411 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
1412 s
= SSA_NAME_DEF_STMT (expr
);
1413 if (is_gimple_assign (s
)
1414 && gimple_assign_rhs_code (s
) == code
1415 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
1416 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
1420 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1421 of name is a comparison, recurse. */
1422 if (TREE_CODE (op1
) == SSA_NAME
1423 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
1425 s
= SSA_NAME_DEF_STMT (op1
);
1426 if (is_gimple_assign (s
)
1427 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
1429 enum tree_code c
= gimple_assign_rhs_code (s
);
1430 if ((c
== NE_EXPR
&& integer_zerop (op2
))
1431 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
1432 return same_bool_comparison_p (expr
, c
,
1433 gimple_assign_rhs1 (s
),
1434 gimple_assign_rhs2 (s
));
1435 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
1436 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
1437 return same_bool_comparison_p (expr
,
1438 invert_tree_comparison (c
, false),
1439 gimple_assign_rhs1 (s
),
1440 gimple_assign_rhs2 (s
));
1446 /* Check to see if two boolean expressions OP1 and OP2 are logically
1450 same_bool_result_p (const_tree op1
, const_tree op2
)
1452 /* Simple cases first. */
1453 if (operand_equal_p (op1
, op2
, 0))
1456 /* Check the cases where at least one of the operands is a comparison.
1457 These are a bit smarter than operand_equal_p in that they apply some
1458 identifies on SSA_NAMEs. */
1459 if (TREE_CODE_CLASS (TREE_CODE (op2
)) == tcc_comparison
1460 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
1461 TREE_OPERAND (op2
, 0),
1462 TREE_OPERAND (op2
, 1)))
1464 if (TREE_CODE_CLASS (TREE_CODE (op1
)) == tcc_comparison
1465 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
1466 TREE_OPERAND (op1
, 0),
1467 TREE_OPERAND (op1
, 1)))
1474 /* Forward declarations for some mutually recursive functions. */
1477 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1478 enum tree_code code2
, tree op2a
, tree op2b
);
1480 and_var_with_comparison (tree var
, bool invert
,
1481 enum tree_code code2
, tree op2a
, tree op2b
);
1483 and_var_with_comparison_1 (gimple stmt
,
1484 enum tree_code code2
, tree op2a
, tree op2b
);
1486 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1487 enum tree_code code2
, tree op2a
, tree op2b
);
1489 or_var_with_comparison (tree var
, bool invert
,
1490 enum tree_code code2
, tree op2a
, tree op2b
);
1492 or_var_with_comparison_1 (gimple stmt
,
1493 enum tree_code code2
, tree op2a
, tree op2b
);
1495 /* Helper function for and_comparisons_1: try to simplify the AND of the
1496 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1497 If INVERT is true, invert the value of the VAR before doing the AND.
1498 Return NULL_EXPR if we can't simplify this to a single expression. */
1501 and_var_with_comparison (tree var
, bool invert
,
1502 enum tree_code code2
, tree op2a
, tree op2b
)
1505 gimple stmt
= SSA_NAME_DEF_STMT (var
);
1507 /* We can only deal with variables whose definitions are assignments. */
1508 if (!is_gimple_assign (stmt
))
1511 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1512 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1513 Then we only have to consider the simpler non-inverted cases. */
1515 t
= or_var_with_comparison_1 (stmt
,
1516 invert_tree_comparison (code2
, false),
1519 t
= and_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
1520 return canonicalize_bool (t
, invert
);
1523 /* Try to simplify the AND of the ssa variable defined by the assignment
1524 STMT with the comparison specified by (OP2A CODE2 OP2B).
1525 Return NULL_EXPR if we can't simplify this to a single expression. */
1528 and_var_with_comparison_1 (gimple stmt
,
1529 enum tree_code code2
, tree op2a
, tree op2b
)
1531 tree var
= gimple_assign_lhs (stmt
);
1532 tree true_test_var
= NULL_TREE
;
1533 tree false_test_var
= NULL_TREE
;
1534 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
1536 /* Check for identities like (var AND (var == 0)) => false. */
1537 if (TREE_CODE (op2a
) == SSA_NAME
1538 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
1540 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
1541 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
1543 true_test_var
= op2a
;
1544 if (var
== true_test_var
)
1547 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
1548 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
1550 false_test_var
= op2a
;
1551 if (var
== false_test_var
)
1552 return boolean_false_node
;
1556 /* If the definition is a comparison, recurse on it. */
1557 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
1559 tree t
= and_comparisons_1 (innercode
,
1560 gimple_assign_rhs1 (stmt
),
1561 gimple_assign_rhs2 (stmt
),
1569 /* If the definition is an AND or OR expression, we may be able to
1570 simplify by reassociating. */
1571 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
1572 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
1574 tree inner1
= gimple_assign_rhs1 (stmt
);
1575 tree inner2
= gimple_assign_rhs2 (stmt
);
1578 tree partial
= NULL_TREE
;
1579 bool is_and
= (innercode
== BIT_AND_EXPR
);
1581 /* Check for boolean identities that don't require recursive examination
1583 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1584 inner1 AND (inner1 OR inner2) => inner1
1585 !inner1 AND (inner1 AND inner2) => false
1586 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1587 Likewise for similar cases involving inner2. */
1588 if (inner1
== true_test_var
)
1589 return (is_and
? var
: inner1
);
1590 else if (inner2
== true_test_var
)
1591 return (is_and
? var
: inner2
);
1592 else if (inner1
== false_test_var
)
1594 ? boolean_false_node
1595 : and_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
1596 else if (inner2
== false_test_var
)
1598 ? boolean_false_node
1599 : and_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
1601 /* Next, redistribute/reassociate the AND across the inner tests.
1602 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1603 if (TREE_CODE (inner1
) == SSA_NAME
1604 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
1605 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
1606 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
1607 gimple_assign_rhs1 (s
),
1608 gimple_assign_rhs2 (s
),
1609 code2
, op2a
, op2b
)))
1611 /* Handle the AND case, where we are reassociating:
1612 (inner1 AND inner2) AND (op2a code2 op2b)
1614 If the partial result t is a constant, we win. Otherwise
1615 continue on to try reassociating with the other inner test. */
1618 if (integer_onep (t
))
1620 else if (integer_zerop (t
))
1621 return boolean_false_node
;
1624 /* Handle the OR case, where we are redistributing:
1625 (inner1 OR inner2) AND (op2a code2 op2b)
1626 => (t OR (inner2 AND (op2a code2 op2b))) */
1627 else if (integer_onep (t
))
1628 return boolean_true_node
;
1630 /* Save partial result for later. */
1634 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1635 if (TREE_CODE (inner2
) == SSA_NAME
1636 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
1637 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
1638 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
1639 gimple_assign_rhs1 (s
),
1640 gimple_assign_rhs2 (s
),
1641 code2
, op2a
, op2b
)))
1643 /* Handle the AND case, where we are reassociating:
1644 (inner1 AND inner2) AND (op2a code2 op2b)
1645 => (inner1 AND t) */
1648 if (integer_onep (t
))
1650 else if (integer_zerop (t
))
1651 return boolean_false_node
;
1652 /* If both are the same, we can apply the identity
1654 else if (partial
&& same_bool_result_p (t
, partial
))
1658 /* Handle the OR case. where we are redistributing:
1659 (inner1 OR inner2) AND (op2a code2 op2b)
1660 => (t OR (inner1 AND (op2a code2 op2b)))
1661 => (t OR partial) */
1664 if (integer_onep (t
))
1665 return boolean_true_node
;
1668 /* We already got a simplification for the other
1669 operand to the redistributed OR expression. The
1670 interesting case is when at least one is false.
1671 Or, if both are the same, we can apply the identity
1673 if (integer_zerop (partial
))
1675 else if (integer_zerop (t
))
1677 else if (same_bool_result_p (t
, partial
))
1686 /* Try to simplify the AND of two comparisons defined by
1687 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1688 If this can be done without constructing an intermediate value,
1689 return the resulting tree; otherwise NULL_TREE is returned.
1690 This function is deliberately asymmetric as it recurses on SSA_DEFs
1691 in the first comparison but not the second. */
1694 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1695 enum tree_code code2
, tree op2a
, tree op2b
)
1697 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1698 if (operand_equal_p (op1a
, op2a
, 0)
1699 && operand_equal_p (op1b
, op2b
, 0))
1701 /* Result will be either NULL_TREE, or a combined comparison. */
1702 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
1703 TRUTH_ANDIF_EXPR
, code1
, code2
,
1704 boolean_type_node
, op1a
, op1b
);
1709 /* Likewise the swapped case of the above. */
1710 if (operand_equal_p (op1a
, op2b
, 0)
1711 && operand_equal_p (op1b
, op2a
, 0))
1713 /* Result will be either NULL_TREE, or a combined comparison. */
1714 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
1715 TRUTH_ANDIF_EXPR
, code1
,
1716 swap_tree_comparison (code2
),
1717 boolean_type_node
, op1a
, op1b
);
1722 /* If both comparisons are of the same value against constants, we might
1723 be able to merge them. */
1724 if (operand_equal_p (op1a
, op2a
, 0)
1725 && TREE_CODE (op1b
) == INTEGER_CST
1726 && TREE_CODE (op2b
) == INTEGER_CST
)
1728 int cmp
= tree_int_cst_compare (op1b
, op2b
);
1730 /* If we have (op1a == op1b), we should either be able to
1731 return that or FALSE, depending on whether the constant op1b
1732 also satisfies the other comparison against op2b. */
1733 if (code1
== EQ_EXPR
)
1739 case EQ_EXPR
: val
= (cmp
== 0); break;
1740 case NE_EXPR
: val
= (cmp
!= 0); break;
1741 case LT_EXPR
: val
= (cmp
< 0); break;
1742 case GT_EXPR
: val
= (cmp
> 0); break;
1743 case LE_EXPR
: val
= (cmp
<= 0); break;
1744 case GE_EXPR
: val
= (cmp
>= 0); break;
1745 default: done
= false;
1750 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1752 return boolean_false_node
;
1755 /* Likewise if the second comparison is an == comparison. */
1756 else if (code2
== EQ_EXPR
)
1762 case EQ_EXPR
: val
= (cmp
== 0); break;
1763 case NE_EXPR
: val
= (cmp
!= 0); break;
1764 case LT_EXPR
: val
= (cmp
> 0); break;
1765 case GT_EXPR
: val
= (cmp
< 0); break;
1766 case LE_EXPR
: val
= (cmp
>= 0); break;
1767 case GE_EXPR
: val
= (cmp
<= 0); break;
1768 default: done
= false;
1773 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1775 return boolean_false_node
;
1779 /* Same business with inequality tests. */
1780 else if (code1
== NE_EXPR
)
1785 case EQ_EXPR
: val
= (cmp
!= 0); break;
1786 case NE_EXPR
: val
= (cmp
== 0); break;
1787 case LT_EXPR
: val
= (cmp
>= 0); break;
1788 case GT_EXPR
: val
= (cmp
<= 0); break;
1789 case LE_EXPR
: val
= (cmp
> 0); break;
1790 case GE_EXPR
: val
= (cmp
< 0); break;
1795 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1797 else if (code2
== NE_EXPR
)
1802 case EQ_EXPR
: val
= (cmp
== 0); break;
1803 case NE_EXPR
: val
= (cmp
!= 0); break;
1804 case LT_EXPR
: val
= (cmp
<= 0); break;
1805 case GT_EXPR
: val
= (cmp
>= 0); break;
1806 case LE_EXPR
: val
= (cmp
< 0); break;
1807 case GE_EXPR
: val
= (cmp
> 0); break;
1812 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1815 /* Chose the more restrictive of two < or <= comparisons. */
1816 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
1817 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
1819 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
1820 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1822 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1825 /* Likewise chose the more restrictive of two > or >= comparisons. */
1826 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
1827 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
1829 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
1830 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1832 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1835 /* Check for singleton ranges. */
1837 && ((code1
== LE_EXPR
&& code2
== GE_EXPR
)
1838 || (code1
== GE_EXPR
&& code2
== LE_EXPR
)))
1839 return fold_build2 (EQ_EXPR
, boolean_type_node
, op1a
, op2b
);
1841 /* Check for disjoint ranges. */
1843 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
1844 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
1845 return boolean_false_node
;
1847 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
1848 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
1849 return boolean_false_node
;
1852 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1853 NAME's definition is a truth value. See if there are any simplifications
1854 that can be done against the NAME's definition. */
1855 if (TREE_CODE (op1a
) == SSA_NAME
1856 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
1857 && (integer_zerop (op1b
) || integer_onep (op1b
)))
1859 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
1860 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
1861 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
1862 switch (gimple_code (stmt
))
1865 /* Try to simplify by copy-propagating the definition. */
1866 return and_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
1869 /* If every argument to the PHI produces the same result when
1870 ANDed with the second comparison, we win.
1871 Do not do this unless the type is bool since we need a bool
1872 result here anyway. */
1873 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
1875 tree result
= NULL_TREE
;
1877 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
1879 tree arg
= gimple_phi_arg_def (stmt
, i
);
1881 /* If this PHI has itself as an argument, ignore it.
1882 If all the other args produce the same result,
1884 if (arg
== gimple_phi_result (stmt
))
1886 else if (TREE_CODE (arg
) == INTEGER_CST
)
1888 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
1891 result
= boolean_false_node
;
1892 else if (!integer_zerop (result
))
1896 result
= fold_build2 (code2
, boolean_type_node
,
1898 else if (!same_bool_comparison_p (result
,
1902 else if (TREE_CODE (arg
) == SSA_NAME
1903 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
1906 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
1907 /* In simple cases we can look through PHI nodes,
1908 but we have to be careful with loops.
1910 if (! dom_info_available_p (CDI_DOMINATORS
)
1911 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
1912 || dominated_by_p (CDI_DOMINATORS
,
1913 gimple_bb (def_stmt
),
1916 temp
= and_var_with_comparison (arg
, invert
, code2
,
1922 else if (!same_bool_result_p (result
, temp
))
1938 /* Try to simplify the AND of two comparisons, specified by
1939 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1940 If this can be simplified to a single expression (without requiring
1941 introducing more SSA variables to hold intermediate values),
1942 return the resulting tree. Otherwise return NULL_TREE.
1943 If the result expression is non-null, it has boolean type. */
1946 maybe_fold_and_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
1947 enum tree_code code2
, tree op2a
, tree op2b
)
1949 tree t
= and_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
1953 return and_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
1956 /* Helper function for or_comparisons_1: try to simplify the OR of the
1957 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1958 If INVERT is true, invert the value of VAR before doing the OR.
1959 Return NULL_EXPR if we can't simplify this to a single expression. */
1962 or_var_with_comparison (tree var
, bool invert
,
1963 enum tree_code code2
, tree op2a
, tree op2b
)
1966 gimple stmt
= SSA_NAME_DEF_STMT (var
);
1968 /* We can only deal with variables whose definitions are assignments. */
1969 if (!is_gimple_assign (stmt
))
1972 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1973 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
1974 Then we only have to consider the simpler non-inverted cases. */
1976 t
= and_var_with_comparison_1 (stmt
,
1977 invert_tree_comparison (code2
, false),
1980 t
= or_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
1981 return canonicalize_bool (t
, invert
);
1984 /* Try to simplify the OR of the ssa variable defined by the assignment
1985 STMT with the comparison specified by (OP2A CODE2 OP2B).
1986 Return NULL_EXPR if we can't simplify this to a single expression. */
1989 or_var_with_comparison_1 (gimple stmt
,
1990 enum tree_code code2
, tree op2a
, tree op2b
)
1992 tree var
= gimple_assign_lhs (stmt
);
1993 tree true_test_var
= NULL_TREE
;
1994 tree false_test_var
= NULL_TREE
;
1995 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
1997 /* Check for identities like (var OR (var != 0)) => true . */
1998 if (TREE_CODE (op2a
) == SSA_NAME
1999 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
2001 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
2002 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
2004 true_test_var
= op2a
;
2005 if (var
== true_test_var
)
2008 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
2009 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
2011 false_test_var
= op2a
;
2012 if (var
== false_test_var
)
2013 return boolean_true_node
;
2017 /* If the definition is a comparison, recurse on it. */
2018 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
2020 tree t
= or_comparisons_1 (innercode
,
2021 gimple_assign_rhs1 (stmt
),
2022 gimple_assign_rhs2 (stmt
),
2030 /* If the definition is an AND or OR expression, we may be able to
2031 simplify by reassociating. */
2032 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
2033 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
2035 tree inner1
= gimple_assign_rhs1 (stmt
);
2036 tree inner2
= gimple_assign_rhs2 (stmt
);
2039 tree partial
= NULL_TREE
;
2040 bool is_or
= (innercode
== BIT_IOR_EXPR
);
2042 /* Check for boolean identities that don't require recursive examination
2044 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2045 inner1 OR (inner1 AND inner2) => inner1
2046 !inner1 OR (inner1 OR inner2) => true
2047 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2049 if (inner1
== true_test_var
)
2050 return (is_or
? var
: inner1
);
2051 else if (inner2
== true_test_var
)
2052 return (is_or
? var
: inner2
);
2053 else if (inner1
== false_test_var
)
2056 : or_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
2057 else if (inner2
== false_test_var
)
2060 : or_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
2062 /* Next, redistribute/reassociate the OR across the inner tests.
2063 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2064 if (TREE_CODE (inner1
) == SSA_NAME
2065 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
2066 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2067 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
2068 gimple_assign_rhs1 (s
),
2069 gimple_assign_rhs2 (s
),
2070 code2
, op2a
, op2b
)))
2072 /* Handle the OR case, where we are reassociating:
2073 (inner1 OR inner2) OR (op2a code2 op2b)
2075 If the partial result t is a constant, we win. Otherwise
2076 continue on to try reassociating with the other inner test. */
2079 if (integer_onep (t
))
2080 return boolean_true_node
;
2081 else if (integer_zerop (t
))
2085 /* Handle the AND case, where we are redistributing:
2086 (inner1 AND inner2) OR (op2a code2 op2b)
2087 => (t AND (inner2 OR (op2a code op2b))) */
2088 else if (integer_zerop (t
))
2089 return boolean_false_node
;
2091 /* Save partial result for later. */
2095 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2096 if (TREE_CODE (inner2
) == SSA_NAME
2097 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
2098 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2099 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
2100 gimple_assign_rhs1 (s
),
2101 gimple_assign_rhs2 (s
),
2102 code2
, op2a
, op2b
)))
2104 /* Handle the OR case, where we are reassociating:
2105 (inner1 OR inner2) OR (op2a code2 op2b)
2107 => (t OR partial) */
2110 if (integer_zerop (t
))
2112 else if (integer_onep (t
))
2113 return boolean_true_node
;
2114 /* If both are the same, we can apply the identity
2116 else if (partial
&& same_bool_result_p (t
, partial
))
2120 /* Handle the AND case, where we are redistributing:
2121 (inner1 AND inner2) OR (op2a code2 op2b)
2122 => (t AND (inner1 OR (op2a code2 op2b)))
2123 => (t AND partial) */
2126 if (integer_zerop (t
))
2127 return boolean_false_node
;
2130 /* We already got a simplification for the other
2131 operand to the redistributed AND expression. The
2132 interesting case is when at least one is true.
2133 Or, if both are the same, we can apply the identity
2135 if (integer_onep (partial
))
2137 else if (integer_onep (t
))
2139 else if (same_bool_result_p (t
, partial
))
2148 /* Try to simplify the OR of two comparisons defined by
2149 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2150 If this can be done without constructing an intermediate value,
2151 return the resulting tree; otherwise NULL_TREE is returned.
2152 This function is deliberately asymmetric as it recurses on SSA_DEFs
2153 in the first comparison but not the second. */
2156 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
2157 enum tree_code code2
, tree op2a
, tree op2b
)
2159 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2160 if (operand_equal_p (op1a
, op2a
, 0)
2161 && operand_equal_p (op1b
, op2b
, 0))
2163 /* Result will be either NULL_TREE, or a combined comparison. */
2164 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2165 TRUTH_ORIF_EXPR
, code1
, code2
,
2166 boolean_type_node
, op1a
, op1b
);
2171 /* Likewise the swapped case of the above. */
2172 if (operand_equal_p (op1a
, op2b
, 0)
2173 && operand_equal_p (op1b
, op2a
, 0))
2175 /* Result will be either NULL_TREE, or a combined comparison. */
2176 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2177 TRUTH_ORIF_EXPR
, code1
,
2178 swap_tree_comparison (code2
),
2179 boolean_type_node
, op1a
, op1b
);
2184 /* If both comparisons are of the same value against constants, we might
2185 be able to merge them. */
2186 if (operand_equal_p (op1a
, op2a
, 0)
2187 && TREE_CODE (op1b
) == INTEGER_CST
2188 && TREE_CODE (op2b
) == INTEGER_CST
)
2190 int cmp
= tree_int_cst_compare (op1b
, op2b
);
2192 /* If we have (op1a != op1b), we should either be able to
2193 return that or TRUE, depending on whether the constant op1b
2194 also satisfies the other comparison against op2b. */
2195 if (code1
== NE_EXPR
)
2201 case EQ_EXPR
: val
= (cmp
== 0); break;
2202 case NE_EXPR
: val
= (cmp
!= 0); break;
2203 case LT_EXPR
: val
= (cmp
< 0); break;
2204 case GT_EXPR
: val
= (cmp
> 0); break;
2205 case LE_EXPR
: val
= (cmp
<= 0); break;
2206 case GE_EXPR
: val
= (cmp
>= 0); break;
2207 default: done
= false;
2212 return boolean_true_node
;
2214 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2217 /* Likewise if the second comparison is a != comparison. */
2218 else if (code2
== NE_EXPR
)
2224 case EQ_EXPR
: val
= (cmp
== 0); break;
2225 case NE_EXPR
: val
= (cmp
!= 0); break;
2226 case LT_EXPR
: val
= (cmp
> 0); break;
2227 case GT_EXPR
: val
= (cmp
< 0); break;
2228 case LE_EXPR
: val
= (cmp
>= 0); break;
2229 case GE_EXPR
: val
= (cmp
<= 0); break;
2230 default: done
= false;
2235 return boolean_true_node
;
2237 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2241 /* See if an equality test is redundant with the other comparison. */
2242 else if (code1
== EQ_EXPR
)
2247 case EQ_EXPR
: val
= (cmp
== 0); break;
2248 case NE_EXPR
: val
= (cmp
!= 0); break;
2249 case LT_EXPR
: val
= (cmp
< 0); break;
2250 case GT_EXPR
: val
= (cmp
> 0); break;
2251 case LE_EXPR
: val
= (cmp
<= 0); break;
2252 case GE_EXPR
: val
= (cmp
>= 0); break;
2257 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2259 else if (code2
== EQ_EXPR
)
2264 case EQ_EXPR
: val
= (cmp
== 0); break;
2265 case NE_EXPR
: val
= (cmp
!= 0); break;
2266 case LT_EXPR
: val
= (cmp
> 0); break;
2267 case GT_EXPR
: val
= (cmp
< 0); break;
2268 case LE_EXPR
: val
= (cmp
>= 0); break;
2269 case GE_EXPR
: val
= (cmp
<= 0); break;
2274 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2277 /* Chose the less restrictive of two < or <= comparisons. */
2278 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
2279 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2281 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
2282 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2284 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2287 /* Likewise chose the less restrictive of two > or >= comparisons. */
2288 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
2289 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2291 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
2292 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2294 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2297 /* Check for singleton ranges. */
2299 && ((code1
== LT_EXPR
&& code2
== GT_EXPR
)
2300 || (code1
== GT_EXPR
&& code2
== LT_EXPR
)))
2301 return fold_build2 (NE_EXPR
, boolean_type_node
, op1a
, op2b
);
2303 /* Check for less/greater pairs that don't restrict the range at all. */
2305 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
2306 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2307 return boolean_true_node
;
2309 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
2310 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2311 return boolean_true_node
;
2314 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2315 NAME's definition is a truth value. See if there are any simplifications
2316 that can be done against the NAME's definition. */
2317 if (TREE_CODE (op1a
) == SSA_NAME
2318 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
2319 && (integer_zerop (op1b
) || integer_onep (op1b
)))
2321 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
2322 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
2323 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
2324 switch (gimple_code (stmt
))
2327 /* Try to simplify by copy-propagating the definition. */
2328 return or_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
2331 /* If every argument to the PHI produces the same result when
2332 ORed with the second comparison, we win.
2333 Do not do this unless the type is bool since we need a bool
2334 result here anyway. */
2335 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
2337 tree result
= NULL_TREE
;
2339 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
2341 tree arg
= gimple_phi_arg_def (stmt
, i
);
2343 /* If this PHI has itself as an argument, ignore it.
2344 If all the other args produce the same result,
2346 if (arg
== gimple_phi_result (stmt
))
2348 else if (TREE_CODE (arg
) == INTEGER_CST
)
2350 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
2353 result
= boolean_true_node
;
2354 else if (!integer_onep (result
))
2358 result
= fold_build2 (code2
, boolean_type_node
,
2360 else if (!same_bool_comparison_p (result
,
2364 else if (TREE_CODE (arg
) == SSA_NAME
2365 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
2368 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
2369 /* In simple cases we can look through PHI nodes,
2370 but we have to be careful with loops.
2372 if (! dom_info_available_p (CDI_DOMINATORS
)
2373 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
2374 || dominated_by_p (CDI_DOMINATORS
,
2375 gimple_bb (def_stmt
),
2378 temp
= or_var_with_comparison (arg
, invert
, code2
,
2384 else if (!same_bool_result_p (result
, temp
))
2400 /* Try to simplify the OR of two comparisons, specified by
2401 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2402 If this can be simplified to a single expression (without requiring
2403 introducing more SSA variables to hold intermediate values),
2404 return the resulting tree. Otherwise return NULL_TREE.
2405 If the result expression is non-null, it has boolean type. */
2408 maybe_fold_or_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
2409 enum tree_code code2
, tree op2a
, tree op2b
)
2411 tree t
= or_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
2415 return or_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
2419 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2421 Either NULL_TREE, a simplified but non-constant or a constant
2424 ??? This should go into a gimple-fold-inline.h file to be eventually
2425 privatized with the single valueize function used in the various TUs
2426 to avoid the indirect function call overhead. */
2429 gimple_fold_stmt_to_constant_1 (gimple stmt
, tree (*valueize
) (tree
))
2431 location_t loc
= gimple_location (stmt
);
2432 switch (gimple_code (stmt
))
2436 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
2438 switch (get_gimple_rhs_class (subcode
))
2440 case GIMPLE_SINGLE_RHS
:
2442 tree rhs
= gimple_assign_rhs1 (stmt
);
2443 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
2445 if (TREE_CODE (rhs
) == SSA_NAME
)
2447 /* If the RHS is an SSA_NAME, return its known constant value,
2449 return (*valueize
) (rhs
);
2451 /* Handle propagating invariant addresses into address
2453 else if (TREE_CODE (rhs
) == ADDR_EXPR
2454 && !is_gimple_min_invariant (rhs
))
2456 HOST_WIDE_INT offset
= 0;
2458 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
2462 && (CONSTANT_CLASS_P (base
)
2463 || decl_address_invariant_p (base
)))
2464 return build_invariant_address (TREE_TYPE (rhs
),
2467 else if (TREE_CODE (rhs
) == CONSTRUCTOR
2468 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
2469 && (CONSTRUCTOR_NELTS (rhs
)
2470 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
2475 vec
= XALLOCAVEC (tree
,
2476 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
)));
2477 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
2479 val
= (*valueize
) (val
);
2480 if (TREE_CODE (val
) == INTEGER_CST
2481 || TREE_CODE (val
) == REAL_CST
2482 || TREE_CODE (val
) == FIXED_CST
)
2488 return build_vector (TREE_TYPE (rhs
), vec
);
2491 if (kind
== tcc_reference
)
2493 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
2494 || TREE_CODE (rhs
) == REALPART_EXPR
2495 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
2496 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2498 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2499 return fold_unary_loc (EXPR_LOCATION (rhs
),
2501 TREE_TYPE (rhs
), val
);
2503 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
2504 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2506 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2507 return fold_ternary_loc (EXPR_LOCATION (rhs
),
2509 TREE_TYPE (rhs
), val
,
2510 TREE_OPERAND (rhs
, 1),
2511 TREE_OPERAND (rhs
, 2));
2513 else if (TREE_CODE (rhs
) == MEM_REF
2514 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2516 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2517 if (TREE_CODE (val
) == ADDR_EXPR
2518 && is_gimple_min_invariant (val
))
2520 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
2522 TREE_OPERAND (rhs
, 1));
2527 return fold_const_aggregate_ref_1 (rhs
, valueize
);
2529 else if (kind
== tcc_declaration
)
2530 return get_symbol_constant_value (rhs
);
2534 case GIMPLE_UNARY_RHS
:
2536 /* Handle unary operators that can appear in GIMPLE form.
2537 Note that we know the single operand must be a constant,
2538 so this should almost always return a simplified RHS. */
2539 tree lhs
= gimple_assign_lhs (stmt
);
2540 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2542 /* Conversions are useless for CCP purposes if they are
2543 value-preserving. Thus the restrictions that
2544 useless_type_conversion_p places for restrict qualification
2545 of pointer types should not apply here.
2546 Substitution later will only substitute to allowed places. */
2547 if (CONVERT_EXPR_CODE_P (subcode
)
2548 && POINTER_TYPE_P (TREE_TYPE (lhs
))
2549 && POINTER_TYPE_P (TREE_TYPE (op0
))
2550 && TYPE_ADDR_SPACE (TREE_TYPE (lhs
))
2551 == TYPE_ADDR_SPACE (TREE_TYPE (op0
))
2552 && TYPE_MODE (TREE_TYPE (lhs
))
2553 == TYPE_MODE (TREE_TYPE (op0
)))
2557 fold_unary_ignore_overflow_loc (loc
, subcode
,
2558 gimple_expr_type (stmt
), op0
);
2561 case GIMPLE_BINARY_RHS
:
2563 /* Handle binary operators that can appear in GIMPLE form. */
2564 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2565 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
2567 /* Translate &x + CST into an invariant form suitable for
2568 further propagation. */
2569 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
2570 && TREE_CODE (op0
) == ADDR_EXPR
2571 && TREE_CODE (op1
) == INTEGER_CST
)
2573 tree off
= fold_convert (ptr_type_node
, op1
);
2574 return build_fold_addr_expr_loc
2576 fold_build2 (MEM_REF
,
2577 TREE_TYPE (TREE_TYPE (op0
)),
2578 unshare_expr (op0
), off
));
2581 return fold_binary_loc (loc
, subcode
,
2582 gimple_expr_type (stmt
), op0
, op1
);
2585 case GIMPLE_TERNARY_RHS
:
2587 /* Handle ternary operators that can appear in GIMPLE form. */
2588 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2589 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
2590 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
2592 /* Fold embedded expressions in ternary codes. */
2593 if ((subcode
== COND_EXPR
2594 || subcode
== VEC_COND_EXPR
)
2595 && COMPARISON_CLASS_P (op0
))
2597 tree op00
= (*valueize
) (TREE_OPERAND (op0
, 0));
2598 tree op01
= (*valueize
) (TREE_OPERAND (op0
, 1));
2599 tree tem
= fold_binary_loc (loc
, TREE_CODE (op0
),
2600 TREE_TYPE (op0
), op00
, op01
);
2605 return fold_ternary_loc (loc
, subcode
,
2606 gimple_expr_type (stmt
), op0
, op1
, op2
);
2618 if (gimple_call_internal_p (stmt
))
2619 /* No folding yet for these functions. */
2622 fn
= (*valueize
) (gimple_call_fn (stmt
));
2623 if (TREE_CODE (fn
) == ADDR_EXPR
2624 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
2625 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0)))
2627 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
2630 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
2631 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
2632 call
= build_call_array_loc (loc
,
2633 gimple_call_return_type (stmt
),
2634 fn
, gimple_call_num_args (stmt
), args
);
2635 retval
= fold_call_expr (EXPR_LOCATION (call
), call
, false);
2637 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2638 STRIP_NOPS (retval
);
2649 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2650 Returns NULL_TREE if folding to a constant is not possible, otherwise
2651 returns a constant according to is_gimple_min_invariant. */
2654 gimple_fold_stmt_to_constant (gimple stmt
, tree (*valueize
) (tree
))
2656 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
2657 if (res
&& is_gimple_min_invariant (res
))
2663 /* The following set of functions are supposed to fold references using
2664 their constant initializers. */
2666 static tree
fold_ctor_reference (tree type
, tree ctor
,
2667 unsigned HOST_WIDE_INT offset
,
2668 unsigned HOST_WIDE_INT size
, tree
);
2670 /* See if we can find constructor defining value of BASE.
2671 When we know the consructor with constant offset (such as
2672 base is array[40] and we do know constructor of array), then
2673 BIT_OFFSET is adjusted accordingly.
2675 As a special case, return error_mark_node when constructor
2676 is not explicitly available, but it is known to be zero
2677 such as 'static const int a;'. */
2679 get_base_constructor (tree base
, HOST_WIDE_INT
*bit_offset
,
2680 tree (*valueize
)(tree
))
2682 HOST_WIDE_INT bit_offset2
, size
, max_size
;
2683 if (TREE_CODE (base
) == MEM_REF
)
2685 if (!integer_zerop (TREE_OPERAND (base
, 1)))
2687 if (!host_integerp (TREE_OPERAND (base
, 1), 0))
2689 *bit_offset
+= (mem_ref_offset (base
).low
2694 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
2695 base
= valueize (TREE_OPERAND (base
, 0));
2696 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
2698 base
= TREE_OPERAND (base
, 0);
2701 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2702 DECL_INITIAL. If BASE is a nested reference into another
2703 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2704 the inner reference. */
2705 switch (TREE_CODE (base
))
2708 if (!const_value_known_p (base
))
2713 if (!DECL_INITIAL (base
)
2714 && (TREE_STATIC (base
) || DECL_EXTERNAL (base
)))
2715 return error_mark_node
;
2716 /* Do not return an error_mark_node DECL_INITIAL. LTO uses this
2717 as special marker (_not_ zero ...) for its own purposes. */
2718 if (DECL_INITIAL (base
) == error_mark_node
)
2720 return DECL_INITIAL (base
);
2724 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
);
2725 if (max_size
== -1 || size
!= max_size
)
2727 *bit_offset
+= bit_offset2
;
2728 return get_base_constructor (base
, bit_offset
, valueize
);
2739 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2740 to the memory at bit OFFSET.
2742 We do only simple job of folding byte accesses. */
2745 fold_string_cst_ctor_reference (tree type
, tree ctor
,
2746 unsigned HOST_WIDE_INT offset
,
2747 unsigned HOST_WIDE_INT size
)
2749 if (INTEGRAL_TYPE_P (type
)
2750 && (TYPE_MODE (type
)
2751 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
2752 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
2754 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
)))) == 1
2755 && size
== BITS_PER_UNIT
2756 && !(offset
% BITS_PER_UNIT
))
2758 offset
/= BITS_PER_UNIT
;
2759 if (offset
< (unsigned HOST_WIDE_INT
) TREE_STRING_LENGTH (ctor
))
2760 return build_int_cst_type (type
, (TREE_STRING_POINTER (ctor
)
2763 const char a[20]="hello";
2766 might lead to offset greater than string length. In this case we
2767 know value is either initialized to 0 or out of bounds. Return 0
2769 return build_zero_cst (type
);
2774 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2775 SIZE to the memory at bit OFFSET. */
2778 fold_array_ctor_reference (tree type
, tree ctor
,
2779 unsigned HOST_WIDE_INT offset
,
2780 unsigned HOST_WIDE_INT size
,
2783 unsigned HOST_WIDE_INT cnt
;
2785 double_int low_bound
, elt_size
;
2786 double_int index
, max_index
;
2787 double_int access_index
;
2788 tree domain_type
= NULL_TREE
, index_type
= NULL_TREE
;
2789 HOST_WIDE_INT inner_offset
;
2791 /* Compute low bound and elt size. */
2792 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
2793 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
2794 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
2796 /* Static constructors for variably sized objects makes no sense. */
2797 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
);
2798 index_type
= TREE_TYPE (TYPE_MIN_VALUE (domain_type
));
2799 low_bound
= tree_to_double_int (TYPE_MIN_VALUE (domain_type
));
2802 low_bound
= double_int_zero
;
2803 /* Static constructors for variably sized objects makes no sense. */
2804 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))))
2807 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
2810 /* We can handle only constantly sized accesses that are known to not
2811 be larger than size of array element. */
2812 if (!TYPE_SIZE_UNIT (type
)
2813 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
2814 || double_int_cmp (elt_size
,
2815 tree_to_double_int (TYPE_SIZE_UNIT (type
)), 0) < 0)
2818 /* Compute the array index we look for. */
2819 access_index
= double_int_udiv (uhwi_to_double_int (offset
/ BITS_PER_UNIT
),
2820 elt_size
, TRUNC_DIV_EXPR
);
2821 access_index
= double_int_add (access_index
, low_bound
);
2823 access_index
= double_int_ext (access_index
,
2824 TYPE_PRECISION (index_type
),
2825 TYPE_UNSIGNED (index_type
));
2827 /* And offset within the access. */
2828 inner_offset
= offset
% (double_int_to_uhwi (elt_size
) * BITS_PER_UNIT
);
2830 /* See if the array field is large enough to span whole access. We do not
2831 care to fold accesses spanning multiple array indexes. */
2832 if (inner_offset
+ size
> double_int_to_uhwi (elt_size
) * BITS_PER_UNIT
)
2835 index
= double_int_sub (low_bound
, double_int_one
);
2837 index
= double_int_ext (index
,
2838 TYPE_PRECISION (index_type
),
2839 TYPE_UNSIGNED (index_type
));
2841 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
2843 /* Array constructor might explicitely set index, or specify range
2844 or leave index NULL meaning that it is next index after previous
2848 if (TREE_CODE (cfield
) == INTEGER_CST
)
2849 max_index
= index
= tree_to_double_int (cfield
);
2852 gcc_assert (TREE_CODE (cfield
) == RANGE_EXPR
);
2853 index
= tree_to_double_int (TREE_OPERAND (cfield
, 0));
2854 max_index
= tree_to_double_int (TREE_OPERAND (cfield
, 1));
2859 index
= double_int_add (index
, double_int_one
);
2861 index
= double_int_ext (index
,
2862 TYPE_PRECISION (index_type
),
2863 TYPE_UNSIGNED (index_type
));
2867 /* Do we have match? */
2868 if (double_int_cmp (access_index
, index
, 1) >= 0
2869 && double_int_cmp (access_index
, max_index
, 1) <= 0)
2870 return fold_ctor_reference (type
, cval
, inner_offset
, size
,
2873 /* When memory is not explicitely mentioned in constructor,
2874 it is 0 (or out of range). */
2875 return build_zero_cst (type
);
2878 /* CTOR is CONSTRUCTOR of an aggregate or vector.
2879 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2882 fold_nonarray_ctor_reference (tree type
, tree ctor
,
2883 unsigned HOST_WIDE_INT offset
,
2884 unsigned HOST_WIDE_INT size
,
2887 unsigned HOST_WIDE_INT cnt
;
2890 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
2893 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
2894 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
2895 tree field_size
= DECL_SIZE (cfield
);
2896 double_int bitoffset
;
2897 double_int byte_offset_cst
= tree_to_double_int (byte_offset
);
2898 double_int bits_per_unit_cst
= uhwi_to_double_int (BITS_PER_UNIT
);
2899 double_int bitoffset_end
, access_end
;
2901 /* Variable sized objects in static constructors makes no sense,
2902 but field_size can be NULL for flexible array members. */
2903 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
2904 && TREE_CODE (byte_offset
) == INTEGER_CST
2905 && (field_size
!= NULL_TREE
2906 ? TREE_CODE (field_size
) == INTEGER_CST
2907 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
2909 /* Compute bit offset of the field. */
2910 bitoffset
= double_int_add (tree_to_double_int (field_offset
),
2911 double_int_mul (byte_offset_cst
,
2912 bits_per_unit_cst
));
2913 /* Compute bit offset where the field ends. */
2914 if (field_size
!= NULL_TREE
)
2915 bitoffset_end
= double_int_add (bitoffset
,
2916 tree_to_double_int (field_size
));
2918 bitoffset_end
= double_int_zero
;
2920 access_end
= double_int_add (uhwi_to_double_int (offset
),
2921 uhwi_to_double_int (size
));
2923 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
2924 [BITOFFSET, BITOFFSET_END)? */
2925 if (double_int_cmp (access_end
, bitoffset
, 0) > 0
2926 && (field_size
== NULL_TREE
2927 || double_int_cmp (uhwi_to_double_int (offset
),
2928 bitoffset_end
, 0) < 0))
2930 double_int inner_offset
= double_int_sub (uhwi_to_double_int (offset
),
2932 /* We do have overlap. Now see if field is large enough to
2933 cover the access. Give up for accesses spanning multiple
2935 if (double_int_cmp (access_end
, bitoffset_end
, 0) > 0)
2937 if (double_int_cmp (uhwi_to_double_int (offset
), bitoffset
, 0) < 0)
2939 return fold_ctor_reference (type
, cval
,
2940 double_int_to_uhwi (inner_offset
), size
,
2944 /* When memory is not explicitely mentioned in constructor, it is 0. */
2945 return build_zero_cst (type
);
2948 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
2949 to the memory at bit OFFSET. */
2952 fold_ctor_reference (tree type
, tree ctor
, unsigned HOST_WIDE_INT offset
,
2953 unsigned HOST_WIDE_INT size
, tree from_decl
)
2957 /* We found the field with exact match. */
2958 if (useless_type_conversion_p (type
, TREE_TYPE (ctor
))
2960 return canonicalize_constructor_val (ctor
, from_decl
);
2962 /* We are at the end of walk, see if we can view convert the
2964 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
2965 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
2966 && operand_equal_p (TYPE_SIZE (type
),
2967 TYPE_SIZE (TREE_TYPE (ctor
)), 0))
2969 ret
= canonicalize_constructor_val (ctor
, from_decl
);
2970 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
2975 if (TREE_CODE (ctor
) == STRING_CST
)
2976 return fold_string_cst_ctor_reference (type
, ctor
, offset
, size
);
2977 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
2980 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
2981 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
2982 return fold_array_ctor_reference (type
, ctor
, offset
, size
,
2985 return fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
2992 /* Return the tree representing the element referenced by T if T is an
2993 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
2994 names using VALUEIZE. Return NULL_TREE otherwise. */
2997 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
2999 tree ctor
, idx
, base
;
3000 HOST_WIDE_INT offset
, size
, max_size
;
3003 if (TREE_THIS_VOLATILE (t
))
3006 if (TREE_CODE_CLASS (TREE_CODE (t
)) == tcc_declaration
)
3007 return get_symbol_constant_value (t
);
3009 tem
= fold_read_from_constant_string (t
);
3013 switch (TREE_CODE (t
))
3016 case ARRAY_RANGE_REF
:
3017 /* Constant indexes are handled well by get_base_constructor.
3018 Only special case variable offsets.
3019 FIXME: This code can't handle nested references with variable indexes
3020 (they will be handled only by iteration of ccp). Perhaps we can bring
3021 get_ref_base_and_extent here and make it use a valueize callback. */
3022 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
3024 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
3025 && TREE_CODE (idx
) == INTEGER_CST
)
3027 tree low_bound
, unit_size
;
3030 /* If the resulting bit-offset is constant, track it. */
3031 if ((low_bound
= array_ref_low_bound (t
),
3032 TREE_CODE (low_bound
) == INTEGER_CST
)
3033 && (unit_size
= array_ref_element_size (t
),
3034 host_integerp (unit_size
, 1))
3035 && (doffset
= double_int_sext
3036 (double_int_sub (TREE_INT_CST (idx
),
3037 TREE_INT_CST (low_bound
)),
3038 TYPE_PRECISION (TREE_TYPE (idx
))),
3039 double_int_fits_in_shwi_p (doffset
)))
3041 offset
= double_int_to_shwi (doffset
);
3042 offset
*= TREE_INT_CST_LOW (unit_size
);
3043 offset
*= BITS_PER_UNIT
;
3045 base
= TREE_OPERAND (t
, 0);
3046 ctor
= get_base_constructor (base
, &offset
, valueize
);
3047 /* Empty constructor. Always fold to 0. */
3048 if (ctor
== error_mark_node
)
3049 return build_zero_cst (TREE_TYPE (t
));
3050 /* Out of bound array access. Value is undefined,
3054 /* We can not determine ctor. */
3057 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
3058 TREE_INT_CST_LOW (unit_size
)
3067 case TARGET_MEM_REF
:
3069 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
);
3070 ctor
= get_base_constructor (base
, &offset
, valueize
);
3072 /* Empty constructor. Always fold to 0. */
3073 if (ctor
== error_mark_node
)
3074 return build_zero_cst (TREE_TYPE (t
));
3075 /* We do not know precise address. */
3076 if (max_size
== -1 || max_size
!= size
)
3078 /* We can not determine ctor. */
3082 /* Out of bound array access. Value is undefined, but don't fold. */
3086 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
,
3092 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
3093 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
3094 return fold_build1_loc (EXPR_LOCATION (t
),
3095 TREE_CODE (t
), TREE_TYPE (t
), c
);
3107 fold_const_aggregate_ref (tree t
)
3109 return fold_const_aggregate_ref_1 (t
, NULL
);
3112 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3113 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3114 KNOWN_BINFO carries the binfo describing the true type of
3115 OBJ_TYPE_REF_OBJECT(REF). */
3118 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
)
3120 unsigned HOST_WIDE_INT offset
, size
;
3123 vtable
= v
= BINFO_VTABLE (known_binfo
);
3124 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3128 if (TREE_CODE (v
) == POINTER_PLUS_EXPR
)
3130 offset
= tree_low_cst (TREE_OPERAND (v
, 1), 1) * BITS_PER_UNIT
;
3131 v
= TREE_OPERAND (v
, 0);
3136 if (TREE_CODE (v
) != ADDR_EXPR
)
3138 v
= TREE_OPERAND (v
, 0);
3140 if (TREE_CODE (v
) != VAR_DECL
3141 || !DECL_VIRTUAL_P (v
)
3142 || !DECL_INITIAL (v
)
3143 || DECL_INITIAL (v
) == error_mark_node
)
3145 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
3146 size
= tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))), 1);
3147 offset
+= token
* size
;
3148 fn
= fold_ctor_reference (TREE_TYPE (TREE_TYPE (v
)), DECL_INITIAL (v
),
3149 offset
, size
, vtable
);
3150 if (!fn
|| integer_zerop (fn
))
3152 gcc_assert (TREE_CODE (fn
) == ADDR_EXPR
3153 || TREE_CODE (fn
) == FDESC_EXPR
);
3154 fn
= TREE_OPERAND (fn
, 0);
3155 gcc_assert (TREE_CODE (fn
) == FUNCTION_DECL
);
3157 /* When cgraph node is missing and function is not public, we cannot
3158 devirtualize. This can happen in WHOPR when the actual method
3159 ends up in other partition, because we found devirtualization
3160 possibility too late. */
3161 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
3164 /* Make sure we create a cgraph node for functions we'll reference.
3165 They can be non-existent if the reference comes from an entry
3166 of an external vtable for example. */
3167 cgraph_get_create_node (fn
);
3172 /* Return true iff VAL is a gimple expression that is known to be
3173 non-negative. Restricted to floating-point inputs. */
3176 gimple_val_nonnegative_real_p (tree val
)
3180 gcc_assert (val
&& SCALAR_FLOAT_TYPE_P (TREE_TYPE (val
)));
3182 /* Use existing logic for non-gimple trees. */
3183 if (tree_expr_nonnegative_p (val
))
3186 if (TREE_CODE (val
) != SSA_NAME
)
3189 /* Currently we look only at the immediately defining statement
3190 to make this determination, since recursion on defining
3191 statements of operands can lead to quadratic behavior in the
3192 worst case. This is expected to catch almost all occurrences
3193 in practice. It would be possible to implement limited-depth
3194 recursion if important cases are lost. Alternatively, passes
3195 that need this information (such as the pow/powi lowering code
3196 in the cse_sincos pass) could be revised to provide it through
3197 dataflow propagation. */
3199 def_stmt
= SSA_NAME_DEF_STMT (val
);
3201 if (is_gimple_assign (def_stmt
))
3205 /* See fold-const.c:tree_expr_nonnegative_p for additional
3206 cases that could be handled with recursion. */
3208 switch (gimple_assign_rhs_code (def_stmt
))
3211 /* Always true for floating-point operands. */
3215 /* True if the two operands are identical (since we are
3216 restricted to floating-point inputs). */
3217 op0
= gimple_assign_rhs1 (def_stmt
);
3218 op1
= gimple_assign_rhs2 (def_stmt
);
3221 || operand_equal_p (op0
, op1
, 0))
3228 else if (is_gimple_call (def_stmt
))
3230 tree fndecl
= gimple_call_fndecl (def_stmt
);
3232 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
3236 switch (DECL_FUNCTION_CODE (fndecl
))
3238 CASE_FLT_FN (BUILT_IN_ACOS
):
3239 CASE_FLT_FN (BUILT_IN_ACOSH
):
3240 CASE_FLT_FN (BUILT_IN_CABS
):
3241 CASE_FLT_FN (BUILT_IN_COSH
):
3242 CASE_FLT_FN (BUILT_IN_ERFC
):
3243 CASE_FLT_FN (BUILT_IN_EXP
):
3244 CASE_FLT_FN (BUILT_IN_EXP10
):
3245 CASE_FLT_FN (BUILT_IN_EXP2
):
3246 CASE_FLT_FN (BUILT_IN_FABS
):
3247 CASE_FLT_FN (BUILT_IN_FDIM
):
3248 CASE_FLT_FN (BUILT_IN_HYPOT
):
3249 CASE_FLT_FN (BUILT_IN_POW10
):
3252 CASE_FLT_FN (BUILT_IN_SQRT
):
3253 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3254 nonnegative inputs. */
3255 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val
))))
3260 CASE_FLT_FN (BUILT_IN_POWI
):
3261 /* True if the second argument is an even integer. */
3262 arg1
= gimple_call_arg (def_stmt
, 1);
3264 if (TREE_CODE (arg1
) == INTEGER_CST
3265 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
3270 CASE_FLT_FN (BUILT_IN_POW
):
3271 /* True if the second argument is an even integer-valued
3273 arg1
= gimple_call_arg (def_stmt
, 1);
3275 if (TREE_CODE (arg1
) == REAL_CST
)
3280 c
= TREE_REAL_CST (arg1
);
3281 n
= real_to_integer (&c
);
3285 REAL_VALUE_TYPE cint
;
3286 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
3287 if (real_identical (&c
, &cint
))