1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010, 2011, 2012 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
29 #include "tree-flow.h"
30 #include "tree-ssa-propagate.h"
32 #include "gimple-fold.h"
34 /* Return true when DECL can be referenced from current unit.
35 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
36 We can get declarations that are not possible to reference for various
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
50 3) COMDAT functions referred by external vtables that
51 we devirtualize only during final copmilation stage.
52 At this time we already decided that we will not output
53 the function body and thus we can't reference the symbol
57 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
59 struct varpool_node
*vnode
;
60 struct cgraph_node
*node
;
63 /* We will later output the initializer, so we can refer to it.
64 So we are concerned only when DECL comes from initializer of
67 || TREE_CODE (from_decl
) != VAR_DECL
68 || !DECL_EXTERNAL (from_decl
)
70 && symtab_get_node (from_decl
)->symbol
.in_other_partition
))
72 /* We are concerned only about static/external vars and functions. */
73 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
74 || (TREE_CODE (decl
) != VAR_DECL
&& TREE_CODE (decl
) != FUNCTION_DECL
))
76 /* Weakrefs have somewhat confusing DECL_EXTERNAL flag set; they
78 if (DECL_EXTERNAL (decl
)
79 && lookup_attribute ("weakref", DECL_ATTRIBUTES (decl
)))
81 /* We are folding reference from external vtable. The vtable may reffer
82 to a symbol keyed to other compilation unit. The other compilation
83 unit may be in separate DSO and the symbol may be hidden. */
84 if (DECL_VISIBILITY_SPECIFIED (decl
)
85 && DECL_EXTERNAL (decl
)
86 && (!(snode
= symtab_get_node (decl
)) || !snode
->symbol
.in_other_partition
))
88 /* When function is public, we always can introduce new reference.
89 Exception are the COMDAT functions where introducing a direct
90 reference imply need to include function body in the curren tunit. */
91 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
93 /* We are not at ltrans stage; so don't worry about WHOPR.
94 Also when still gimplifying all referred comdat functions will be
97 As observed in PR20991 for already optimized out comdat virtual functions
98 it may be tempting to not necessarily give up because the copy will be
99 output elsewhere when corresponding vtable is output.
100 This is however not possible - ABI specify that COMDATs are output in
101 units where they are used and when the other unit was compiled with LTO
102 it is possible that vtable was kept public while the function itself
104 if (!flag_ltrans
&& (!DECL_COMDAT (decl
) || !cgraph_function_flags_ready
))
107 /* OK we are seeing either COMDAT or static variable. In this case we must
108 check that the definition is still around so we can refer it. */
109 if (TREE_CODE (decl
) == FUNCTION_DECL
)
111 node
= cgraph_get_node (decl
);
112 /* Check that we still have function body and that we didn't took
113 the decision to eliminate offline copy of the function yet.
114 The second is important when devirtualization happens during final
115 compilation stage when making a new reference no longer makes callee
117 if (!node
|| !node
->analyzed
|| node
->global
.inlined_to
)
119 gcc_checking_assert (!TREE_ASM_WRITTEN (decl
));
123 else if (TREE_CODE (decl
) == VAR_DECL
)
125 vnode
= varpool_get_node (decl
);
126 if (!vnode
|| !vnode
->analyzed
)
128 gcc_checking_assert (!TREE_ASM_WRITTEN (decl
));
135 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
136 acceptable form for is_gimple_min_invariant.
137 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
140 canonicalize_constructor_val (tree cval
, tree from_decl
)
142 tree orig_cval
= cval
;
144 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
145 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
147 tree ptr
= TREE_OPERAND (cval
, 0);
148 if (is_gimple_min_invariant (ptr
))
149 cval
= build1_loc (EXPR_LOCATION (cval
),
150 ADDR_EXPR
, TREE_TYPE (ptr
),
151 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
153 fold_convert (ptr_type_node
,
154 TREE_OPERAND (cval
, 1))));
156 if (TREE_CODE (cval
) == ADDR_EXPR
)
158 tree base
= NULL_TREE
;
159 if (TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
161 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
163 TREE_OPERAND (cval
, 0) = base
;
166 base
= get_base_address (TREE_OPERAND (cval
, 0));
170 if ((TREE_CODE (base
) == VAR_DECL
171 || TREE_CODE (base
) == FUNCTION_DECL
)
172 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
174 if (TREE_CODE (base
) == VAR_DECL
)
175 TREE_ADDRESSABLE (base
) = 1;
176 else if (TREE_CODE (base
) == FUNCTION_DECL
)
178 /* Make sure we create a cgraph node for functions we'll reference.
179 They can be non-existent if the reference comes from an entry
180 of an external vtable for example. */
181 cgraph_get_create_node (base
);
183 /* Fixup types in global initializers. */
184 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
185 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
187 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
188 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
194 /* If SYM is a constant variable with known value, return the value.
195 NULL_TREE is returned otherwise. */
198 get_symbol_constant_value (tree sym
)
200 if (const_value_known_p (sym
))
202 tree val
= DECL_INITIAL (sym
);
205 val
= canonicalize_constructor_val (val
, sym
);
206 if (val
&& is_gimple_min_invariant (val
))
211 /* Variables declared 'const' without an initializer
212 have zero as the initializer if they may not be
213 overridden at link or run time. */
215 && (INTEGRAL_TYPE_P (TREE_TYPE (sym
))
216 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym
))))
217 return build_zero_cst (TREE_TYPE (sym
));
225 /* Subroutine of fold_stmt. We perform several simplifications of the
226 memory reference tree EXPR and make sure to re-gimplify them properly
227 after propagation of constant addresses. IS_LHS is true if the
228 reference is supposed to be an lvalue. */
231 maybe_fold_reference (tree expr
, bool is_lhs
)
236 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
237 || TREE_CODE (expr
) == REALPART_EXPR
238 || TREE_CODE (expr
) == IMAGPART_EXPR
)
239 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
240 return fold_unary_loc (EXPR_LOCATION (expr
),
243 TREE_OPERAND (expr
, 0));
244 else if (TREE_CODE (expr
) == BIT_FIELD_REF
245 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
246 return fold_ternary_loc (EXPR_LOCATION (expr
),
249 TREE_OPERAND (expr
, 0),
250 TREE_OPERAND (expr
, 1),
251 TREE_OPERAND (expr
, 2));
253 while (handled_component_p (*t
))
254 t
= &TREE_OPERAND (*t
, 0);
256 /* Canonicalize MEM_REFs invariant address operand. Do this first
257 to avoid feeding non-canonical MEM_REFs elsewhere. */
258 if (TREE_CODE (*t
) == MEM_REF
259 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)))
261 bool volatile_p
= TREE_THIS_VOLATILE (*t
);
262 tree tem
= fold_binary (MEM_REF
, TREE_TYPE (*t
),
263 TREE_OPERAND (*t
, 0),
264 TREE_OPERAND (*t
, 1));
267 TREE_THIS_VOLATILE (tem
) = volatile_p
;
269 tem
= maybe_fold_reference (expr
, is_lhs
);
277 && (result
= fold_const_aggregate_ref (expr
))
278 && is_gimple_min_invariant (result
))
281 /* Fold back MEM_REFs to reference trees. */
282 if (TREE_CODE (*t
) == MEM_REF
283 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
284 && integer_zerop (TREE_OPERAND (*t
, 1))
285 && (TREE_THIS_VOLATILE (*t
)
286 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0)))
287 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t
, 1)))
288 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t
))
289 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t
, 1)))))
290 /* We have to look out here to not drop a required conversion
291 from the rhs to the lhs if is_lhs, but we don't have the
292 rhs here to verify that. Thus require strict type
294 && types_compatible_p (TREE_TYPE (*t
),
295 TREE_TYPE (TREE_OPERAND
296 (TREE_OPERAND (*t
, 0), 0))))
299 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
300 tem
= maybe_fold_reference (expr
, is_lhs
);
305 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
307 tree tem
= maybe_fold_tmr (*t
);
311 tem
= maybe_fold_reference (expr
, is_lhs
);
322 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
323 replacement rhs for the statement or NULL_TREE if no simplification
324 could be made. It is assumed that the operands have been previously
328 fold_gimple_assign (gimple_stmt_iterator
*si
)
330 gimple stmt
= gsi_stmt (*si
);
331 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
332 location_t loc
= gimple_location (stmt
);
334 tree result
= NULL_TREE
;
336 switch (get_gimple_rhs_class (subcode
))
338 case GIMPLE_SINGLE_RHS
:
340 tree rhs
= gimple_assign_rhs1 (stmt
);
342 if (REFERENCE_CLASS_P (rhs
))
343 return maybe_fold_reference (rhs
, false);
345 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
347 tree ref
= TREE_OPERAND (rhs
, 0);
348 tree tem
= maybe_fold_reference (ref
, true);
350 && TREE_CODE (tem
) == MEM_REF
351 && integer_zerop (TREE_OPERAND (tem
, 1)))
352 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
354 result
= fold_convert (TREE_TYPE (rhs
),
355 build_fold_addr_expr_loc (loc
, tem
));
356 else if (TREE_CODE (ref
) == MEM_REF
357 && integer_zerop (TREE_OPERAND (ref
, 1)))
358 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
361 else if (TREE_CODE (rhs
) == CONSTRUCTOR
362 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
363 && (CONSTRUCTOR_NELTS (rhs
)
364 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
366 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
370 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
371 if (TREE_CODE (val
) != INTEGER_CST
372 && TREE_CODE (val
) != REAL_CST
373 && TREE_CODE (val
) != FIXED_CST
)
376 return build_vector_from_ctor (TREE_TYPE (rhs
),
377 CONSTRUCTOR_ELTS (rhs
));
380 else if (DECL_P (rhs
))
381 return unshare_expr (get_symbol_constant_value (rhs
));
383 /* If we couldn't fold the RHS, hand over to the generic
385 if (result
== NULL_TREE
)
388 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
389 that may have been added by fold, and "useless" type
390 conversions that might now be apparent due to propagation. */
391 STRIP_USELESS_TYPE_CONVERSION (result
);
393 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
400 case GIMPLE_UNARY_RHS
:
402 tree rhs
= gimple_assign_rhs1 (stmt
);
404 result
= fold_unary_loc (loc
, subcode
, gimple_expr_type (stmt
), rhs
);
407 /* If the operation was a conversion do _not_ mark a
408 resulting constant with TREE_OVERFLOW if the original
409 constant was not. These conversions have implementation
410 defined behavior and retaining the TREE_OVERFLOW flag
411 here would confuse later passes such as VRP. */
412 if (CONVERT_EXPR_CODE_P (subcode
)
413 && TREE_CODE (result
) == INTEGER_CST
414 && TREE_CODE (rhs
) == INTEGER_CST
)
415 TREE_OVERFLOW (result
) = TREE_OVERFLOW (rhs
);
417 STRIP_USELESS_TYPE_CONVERSION (result
);
418 if (valid_gimple_rhs_p (result
))
424 case GIMPLE_BINARY_RHS
:
425 /* Try to canonicalize for boolean-typed X the comparisons
426 X == 0, X == 1, X != 0, and X != 1. */
427 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
428 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
430 tree lhs
= gimple_assign_lhs (stmt
);
431 tree op1
= gimple_assign_rhs1 (stmt
);
432 tree op2
= gimple_assign_rhs2 (stmt
);
433 tree type
= TREE_TYPE (op1
);
435 /* Check whether the comparison operands are of the same boolean
436 type as the result type is.
437 Check that second operand is an integer-constant with value
439 if (TREE_CODE (op2
) == INTEGER_CST
440 && (integer_zerop (op2
) || integer_onep (op2
))
441 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
443 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
444 bool is_logical_not
= false;
446 /* X == 0 and X != 1 is a logical-not.of X
447 X == 1 and X != 0 is X */
448 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
449 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
450 is_logical_not
= true;
452 if (is_logical_not
== false)
454 /* Only for one-bit precision typed X the transformation
455 !X -> ~X is valied. */
456 else if (TYPE_PRECISION (type
) == 1)
457 result
= build1_loc (gimple_location (stmt
), BIT_NOT_EXPR
,
459 /* Otherwise we use !X -> X ^ 1. */
461 result
= build2_loc (gimple_location (stmt
), BIT_XOR_EXPR
,
462 type
, op1
, build_int_cst (type
, 1));
468 result
= fold_binary_loc (loc
, subcode
,
469 TREE_TYPE (gimple_assign_lhs (stmt
)),
470 gimple_assign_rhs1 (stmt
),
471 gimple_assign_rhs2 (stmt
));
475 STRIP_USELESS_TYPE_CONVERSION (result
);
476 if (valid_gimple_rhs_p (result
))
481 case GIMPLE_TERNARY_RHS
:
482 /* Try to fold a conditional expression. */
483 if (gimple_assign_rhs_code (stmt
) == COND_EXPR
)
485 tree op0
= gimple_assign_rhs1 (stmt
);
488 location_t cond_loc
= gimple_location (stmt
);
490 if (COMPARISON_CLASS_P (op0
))
492 fold_defer_overflow_warnings ();
493 tem
= fold_binary_loc (cond_loc
,
494 TREE_CODE (op0
), TREE_TYPE (op0
),
495 TREE_OPERAND (op0
, 0),
496 TREE_OPERAND (op0
, 1));
497 /* This is actually a conditional expression, not a GIMPLE
498 conditional statement, however, the valid_gimple_rhs_p
499 test still applies. */
500 set
= (tem
&& is_gimple_condexpr (tem
)
501 && valid_gimple_rhs_p (tem
));
502 fold_undefer_overflow_warnings (set
, stmt
, 0);
504 else if (is_gimple_min_invariant (op0
))
513 result
= fold_build3_loc (cond_loc
, COND_EXPR
,
514 TREE_TYPE (gimple_assign_lhs (stmt
)), tem
,
515 gimple_assign_rhs2 (stmt
),
516 gimple_assign_rhs3 (stmt
));
520 result
= fold_ternary_loc (loc
, subcode
,
521 TREE_TYPE (gimple_assign_lhs (stmt
)),
522 gimple_assign_rhs1 (stmt
),
523 gimple_assign_rhs2 (stmt
),
524 gimple_assign_rhs3 (stmt
));
528 STRIP_USELESS_TYPE_CONVERSION (result
);
529 if (valid_gimple_rhs_p (result
))
534 case GIMPLE_INVALID_RHS
:
541 /* Attempt to fold a conditional statement. Return true if any changes were
542 made. We only attempt to fold the condition expression, and do not perform
543 any transformation that would require alteration of the cfg. It is
544 assumed that the operands have been previously folded. */
547 fold_gimple_cond (gimple stmt
)
549 tree result
= fold_binary_loc (gimple_location (stmt
),
550 gimple_cond_code (stmt
),
552 gimple_cond_lhs (stmt
),
553 gimple_cond_rhs (stmt
));
557 STRIP_USELESS_TYPE_CONVERSION (result
);
558 if (is_gimple_condexpr (result
) && valid_gimple_rhs_p (result
))
560 gimple_cond_set_condition_from_tree (stmt
, result
);
568 /* Convert EXPR into a GIMPLE value suitable for substitution on the
569 RHS of an assignment. Insert the necessary statements before
570 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
571 is replaced. If the call is expected to produces a result, then it
572 is replaced by an assignment of the new RHS to the result variable.
573 If the result is to be ignored, then the call is replaced by a
574 GIMPLE_NOP. A proper VDEF chain is retained by making the first
575 VUSE and the last VDEF of the whole sequence be the same as the replaced
576 statement and using new SSA names for stores in between. */
579 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
582 gimple stmt
, new_stmt
;
583 gimple_stmt_iterator i
;
584 gimple_seq stmts
= NULL
;
585 struct gimplify_ctx gctx
;
589 stmt
= gsi_stmt (*si_p
);
591 gcc_assert (is_gimple_call (stmt
));
593 push_gimplify_context (&gctx
);
594 gctx
.into_ssa
= gimple_in_ssa_p (cfun
);
596 lhs
= gimple_call_lhs (stmt
);
597 if (lhs
== NULL_TREE
)
599 gimplify_and_add (expr
, &stmts
);
600 /* We can end up with folding a memcpy of an empty class assignment
601 which gets optimized away by C++ gimplification. */
602 if (gimple_seq_empty_p (stmts
))
604 pop_gimplify_context (NULL
);
605 if (gimple_in_ssa_p (cfun
))
607 unlink_stmt_vdef (stmt
);
610 gsi_remove (si_p
, true);
616 tree tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
617 new_stmt
= gimple_build_assign (lhs
, tmp
);
618 i
= gsi_last (stmts
);
619 gsi_insert_after_without_update (&i
, new_stmt
,
620 GSI_CONTINUE_LINKING
);
623 pop_gimplify_context (NULL
);
625 if (gimple_has_location (stmt
))
626 annotate_all_with_location (stmts
, gimple_location (stmt
));
628 /* First iterate over the replacement statements backward, assigning
629 virtual operands to their defining statements. */
631 for (i
= gsi_last (stmts
); !gsi_end_p (i
); gsi_prev (&i
))
633 new_stmt
= gsi_stmt (i
);
634 if ((gimple_assign_single_p (new_stmt
)
635 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
636 || (is_gimple_call (new_stmt
)
637 && (gimple_call_flags (new_stmt
)
638 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
642 vdef
= gimple_vdef (stmt
);
644 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
645 gimple_set_vdef (new_stmt
, vdef
);
646 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
647 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
648 laststore
= new_stmt
;
652 /* Second iterate over the statements forward, assigning virtual
653 operands to their uses. */
654 reaching_vuse
= gimple_vuse (stmt
);
655 for (i
= gsi_start (stmts
); !gsi_end_p (i
); gsi_next (&i
))
657 new_stmt
= gsi_stmt (i
);
658 /* If the new statement possibly has a VUSE, update it with exact SSA
659 name we know will reach this one. */
660 if (gimple_has_mem_ops (new_stmt
))
661 gimple_set_vuse (new_stmt
, reaching_vuse
);
662 gimple_set_modified (new_stmt
, true);
663 if (gimple_vdef (new_stmt
))
664 reaching_vuse
= gimple_vdef (new_stmt
);
667 /* If the new sequence does not do a store release the virtual
668 definition of the original statement. */
670 && reaching_vuse
== gimple_vuse (stmt
))
672 tree vdef
= gimple_vdef (stmt
);
674 && TREE_CODE (vdef
) == SSA_NAME
)
676 unlink_stmt_vdef (stmt
);
677 release_ssa_name (vdef
);
681 /* Finally replace the original statement with the sequence. */
682 gsi_replace_with_seq (si_p
, stmts
, false);
685 /* Return the string length, maximum string length or maximum value of
687 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
688 is not NULL and, for TYPE == 0, its value is not equal to the length
689 we determine or if we are unable to determine the length or value,
690 return false. VISITED is a bitmap of visited variables.
691 TYPE is 0 if string length should be returned, 1 for maximum string
692 length and 2 for maximum value ARG can have. */
695 get_maxval_strlen (tree arg
, tree
*length
, bitmap visited
, int type
)
700 if (TREE_CODE (arg
) != SSA_NAME
)
702 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
703 if (TREE_CODE (arg
) == ADDR_EXPR
704 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
705 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
707 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
708 if (TREE_CODE (aop0
) == INDIRECT_REF
709 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
710 return get_maxval_strlen (TREE_OPERAND (aop0
, 0),
711 length
, visited
, type
);
717 if (TREE_CODE (val
) != INTEGER_CST
718 || tree_int_cst_sgn (val
) < 0)
722 val
= c_strlen (arg
, 1);
730 if (TREE_CODE (*length
) != INTEGER_CST
731 || TREE_CODE (val
) != INTEGER_CST
)
734 if (tree_int_cst_lt (*length
, val
))
738 else if (simple_cst_equal (val
, *length
) != 1)
746 /* If ARG is registered for SSA update we cannot look at its defining
748 if (name_registered_for_update_p (arg
))
751 /* If we were already here, break the infinite cycle. */
752 if (!bitmap_set_bit (visited
, SSA_NAME_VERSION (arg
)))
756 def_stmt
= SSA_NAME_DEF_STMT (var
);
758 switch (gimple_code (def_stmt
))
761 /* The RHS of the statement defining VAR must either have a
762 constant length or come from another SSA_NAME with a constant
764 if (gimple_assign_single_p (def_stmt
)
765 || gimple_assign_unary_nop_p (def_stmt
))
767 tree rhs
= gimple_assign_rhs1 (def_stmt
);
768 return get_maxval_strlen (rhs
, length
, visited
, type
);
770 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
772 tree op2
= gimple_assign_rhs2 (def_stmt
);
773 tree op3
= gimple_assign_rhs3 (def_stmt
);
774 return get_maxval_strlen (op2
, length
, visited
, type
)
775 && get_maxval_strlen (op3
, length
, visited
, type
);
781 /* All the arguments of the PHI node must have the same constant
785 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
787 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
789 /* If this PHI has itself as an argument, we cannot
790 determine the string length of this argument. However,
791 if we can find a constant string length for the other
792 PHI args then we can still be sure that this is a
793 constant string length. So be optimistic and just
794 continue with the next argument. */
795 if (arg
== gimple_phi_result (def_stmt
))
798 if (!get_maxval_strlen (arg
, length
, visited
, type
))
810 /* Fold builtin call in statement STMT. Returns a simplified tree.
811 We may return a non-constant expression, including another call
812 to a different function and with different arguments, e.g.,
813 substituting memcpy for strcpy when the string length is known.
814 Note that some builtins expand into inline code that may not
815 be valid in GIMPLE. Callers must take care. */
818 gimple_fold_builtin (gimple stmt
)
826 location_t loc
= gimple_location (stmt
);
828 gcc_assert (is_gimple_call (stmt
));
830 ignore
= (gimple_call_lhs (stmt
) == NULL
);
832 /* First try the generic builtin folder. If that succeeds, return the
834 result
= fold_call_stmt (stmt
, ignore
);
842 /* Ignore MD builtins. */
843 callee
= gimple_call_fndecl (stmt
);
844 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_MD
)
847 /* Give up for always_inline inline builtins until they are
849 if (avoid_folding_inline_builtin (callee
))
852 /* If the builtin could not be folded, and it has no argument list,
854 nargs
= gimple_call_num_args (stmt
);
858 /* Limit the work only for builtins we know how to simplify. */
859 switch (DECL_FUNCTION_CODE (callee
))
861 case BUILT_IN_STRLEN
:
863 case BUILT_IN_FPUTS_UNLOCKED
:
867 case BUILT_IN_STRCPY
:
868 case BUILT_IN_STRNCPY
:
872 case BUILT_IN_MEMCPY_CHK
:
873 case BUILT_IN_MEMPCPY_CHK
:
874 case BUILT_IN_MEMMOVE_CHK
:
875 case BUILT_IN_MEMSET_CHK
:
876 case BUILT_IN_STRNCPY_CHK
:
877 case BUILT_IN_STPNCPY_CHK
:
881 case BUILT_IN_STRCPY_CHK
:
882 case BUILT_IN_STPCPY_CHK
:
886 case BUILT_IN_SNPRINTF_CHK
:
887 case BUILT_IN_VSNPRINTF_CHK
:
895 if (arg_idx
>= nargs
)
898 /* Try to use the dataflow information gathered by the CCP process. */
899 visited
= BITMAP_ALLOC (NULL
);
900 bitmap_clear (visited
);
902 memset (val
, 0, sizeof (val
));
903 a
= gimple_call_arg (stmt
, arg_idx
);
904 if (!get_maxval_strlen (a
, &val
[arg_idx
], visited
, type
))
905 val
[arg_idx
] = NULL_TREE
;
907 BITMAP_FREE (visited
);
910 switch (DECL_FUNCTION_CODE (callee
))
912 case BUILT_IN_STRLEN
:
913 if (val
[0] && nargs
== 1)
916 fold_convert (TREE_TYPE (gimple_call_lhs (stmt
)), val
[0]);
918 /* If the result is not a valid gimple value, or not a cast
919 of a valid gimple value, then we cannot use the result. */
920 if (is_gimple_val (new_val
)
921 || (CONVERT_EXPR_P (new_val
)
922 && is_gimple_val (TREE_OPERAND (new_val
, 0))))
927 case BUILT_IN_STRCPY
:
928 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 2)
929 result
= fold_builtin_strcpy (loc
, callee
,
930 gimple_call_arg (stmt
, 0),
931 gimple_call_arg (stmt
, 1),
935 case BUILT_IN_STRNCPY
:
936 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
937 result
= fold_builtin_strncpy (loc
, callee
,
938 gimple_call_arg (stmt
, 0),
939 gimple_call_arg (stmt
, 1),
940 gimple_call_arg (stmt
, 2),
946 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
947 gimple_call_arg (stmt
, 1),
948 ignore
, false, val
[0]);
951 case BUILT_IN_FPUTS_UNLOCKED
:
953 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
954 gimple_call_arg (stmt
, 1),
955 ignore
, true, val
[0]);
958 case BUILT_IN_MEMCPY_CHK
:
959 case BUILT_IN_MEMPCPY_CHK
:
960 case BUILT_IN_MEMMOVE_CHK
:
961 case BUILT_IN_MEMSET_CHK
:
962 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
963 result
= fold_builtin_memory_chk (loc
, callee
,
964 gimple_call_arg (stmt
, 0),
965 gimple_call_arg (stmt
, 1),
966 gimple_call_arg (stmt
, 2),
967 gimple_call_arg (stmt
, 3),
969 DECL_FUNCTION_CODE (callee
));
972 case BUILT_IN_STRCPY_CHK
:
973 case BUILT_IN_STPCPY_CHK
:
974 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
975 result
= fold_builtin_stxcpy_chk (loc
, callee
,
976 gimple_call_arg (stmt
, 0),
977 gimple_call_arg (stmt
, 1),
978 gimple_call_arg (stmt
, 2),
980 DECL_FUNCTION_CODE (callee
));
983 case BUILT_IN_STRNCPY_CHK
:
984 case BUILT_IN_STPNCPY_CHK
:
985 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
986 result
= fold_builtin_stxncpy_chk (loc
, gimple_call_arg (stmt
, 0),
987 gimple_call_arg (stmt
, 1),
988 gimple_call_arg (stmt
, 2),
989 gimple_call_arg (stmt
, 3),
991 DECL_FUNCTION_CODE (callee
));
994 case BUILT_IN_SNPRINTF_CHK
:
995 case BUILT_IN_VSNPRINTF_CHK
:
996 if (val
[1] && is_gimple_val (val
[1]))
997 result
= gimple_fold_builtin_snprintf_chk (stmt
, val
[1],
998 DECL_FUNCTION_CODE (callee
));
1005 if (result
&& ignore
)
1006 result
= fold_ignored_result (result
);
1011 /* Return a binfo to be used for devirtualization of calls based on an object
1012 represented by a declaration (i.e. a global or automatically allocated one)
1013 or NULL if it cannot be found or is not safe. CST is expected to be an
1014 ADDR_EXPR of such object or the function will return NULL. Currently it is
1015 safe to use such binfo only if it has no base binfo (i.e. no ancestors). */
1018 gimple_extract_devirt_binfo_from_cst (tree cst
)
1020 HOST_WIDE_INT offset
, size
, max_size
;
1021 tree base
, type
, expected_type
, binfo
;
1022 bool last_artificial
= false;
1024 if (!flag_devirtualize
1025 || TREE_CODE (cst
) != ADDR_EXPR
1026 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst
))) != RECORD_TYPE
)
1029 cst
= TREE_OPERAND (cst
, 0);
1030 expected_type
= TREE_TYPE (cst
);
1031 base
= get_ref_base_and_extent (cst
, &offset
, &size
, &max_size
);
1032 type
= TREE_TYPE (base
);
1036 || TREE_CODE (type
) != RECORD_TYPE
)
1039 /* Find the sub-object the constant actually refers to and mark whether it is
1040 an artificial one (as opposed to a user-defined one). */
1043 HOST_WIDE_INT pos
, size
;
1046 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (expected_type
))
1051 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
1053 if (TREE_CODE (fld
) != FIELD_DECL
)
1056 pos
= int_bit_position (fld
);
1057 size
= tree_low_cst (DECL_SIZE (fld
), 1);
1058 if (pos
<= offset
&& (pos
+ size
) > offset
)
1061 if (!fld
|| TREE_CODE (TREE_TYPE (fld
)) != RECORD_TYPE
)
1064 last_artificial
= DECL_ARTIFICIAL (fld
);
1065 type
= TREE_TYPE (fld
);
1068 /* Artificial sub-objects are ancestors, we do not want to use them for
1069 devirtualization, at least not here. */
1070 if (last_artificial
)
1072 binfo
= TYPE_BINFO (type
);
1073 if (!binfo
|| BINFO_N_BASE_BINFOS (binfo
) > 0)
1079 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1080 The statement may be replaced by another statement, e.g., if the call
1081 simplifies to a constant value. Return true if any changes were made.
1082 It is assumed that the operands have been previously folded. */
1085 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
1087 gimple stmt
= gsi_stmt (*gsi
);
1089 bool changed
= false;
1092 /* Fold *& in call arguments. */
1093 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
1094 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
1096 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
1099 gimple_call_set_arg (stmt
, i
, tmp
);
1104 /* Check for virtual calls that became direct calls. */
1105 callee
= gimple_call_fn (stmt
);
1106 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
1108 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
1110 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
1115 tree obj
= OBJ_TYPE_REF_OBJECT (callee
);
1116 tree binfo
= gimple_extract_devirt_binfo_from_cst (obj
);
1120 = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee
));
1121 tree fndecl
= gimple_get_virt_method_for_binfo (token
, binfo
);
1124 gimple_call_set_fndecl (stmt
, fndecl
);
1134 /* Check for builtins that CCP can handle using information not
1135 available in the generic fold routines. */
1136 callee
= gimple_call_fndecl (stmt
);
1137 if (callee
&& DECL_BUILT_IN (callee
))
1139 tree result
= gimple_fold_builtin (stmt
);
1142 if (!update_call_from_tree (gsi
, result
))
1143 gimplify_and_update_call_from_tree (gsi
, result
);
1151 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1152 distinguishes both cases. */
1155 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
)
1157 bool changed
= false;
1158 gimple stmt
= gsi_stmt (*gsi
);
1160 gimple_stmt_iterator gsinext
= *gsi
;
1163 gsi_next (&gsinext
);
1164 next_stmt
= gsi_end_p (gsinext
) ? NULL
: gsi_stmt (gsinext
);
1166 /* Fold the main computation performed by the statement. */
1167 switch (gimple_code (stmt
))
1171 unsigned old_num_ops
= gimple_num_ops (stmt
);
1172 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
1173 tree lhs
= gimple_assign_lhs (stmt
);
1175 /* First canonicalize operand order. This avoids building new
1176 trees if this is the only thing fold would later do. */
1177 if ((commutative_tree_code (subcode
)
1178 || commutative_ternary_tree_code (subcode
))
1179 && tree_swap_operands_p (gimple_assign_rhs1 (stmt
),
1180 gimple_assign_rhs2 (stmt
), false))
1182 tree tem
= gimple_assign_rhs1 (stmt
);
1183 gimple_assign_set_rhs1 (stmt
, gimple_assign_rhs2 (stmt
));
1184 gimple_assign_set_rhs2 (stmt
, tem
);
1187 new_rhs
= fold_gimple_assign (gsi
);
1189 && !useless_type_conversion_p (TREE_TYPE (lhs
),
1190 TREE_TYPE (new_rhs
)))
1191 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
1194 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
1196 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
1203 changed
|= fold_gimple_cond (stmt
);
1207 changed
|= gimple_fold_call (gsi
, inplace
);
1211 /* Fold *& in asm operands. */
1214 const char **oconstraints
;
1215 const char *constraint
;
1216 bool allows_mem
, allows_reg
;
1218 noutputs
= gimple_asm_noutputs (stmt
);
1219 oconstraints
= XALLOCAVEC (const char *, noutputs
);
1221 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
1223 tree link
= gimple_asm_output_op (stmt
, i
);
1224 tree op
= TREE_VALUE (link
);
1226 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1227 if (REFERENCE_CLASS_P (op
)
1228 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
1230 TREE_VALUE (link
) = op
;
1234 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
1236 tree link
= gimple_asm_input_op (stmt
, i
);
1237 tree op
= TREE_VALUE (link
);
1239 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1240 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
1241 oconstraints
, &allows_mem
, &allows_reg
);
1242 if (REFERENCE_CLASS_P (op
)
1243 && (op
= maybe_fold_reference (op
, !allows_reg
&& allows_mem
))
1246 TREE_VALUE (link
) = op
;
1254 if (gimple_debug_bind_p (stmt
))
1256 tree val
= gimple_debug_bind_get_value (stmt
);
1258 && REFERENCE_CLASS_P (val
))
1260 tree tem
= maybe_fold_reference (val
, false);
1263 gimple_debug_bind_set_value (stmt
, tem
);
1268 && TREE_CODE (val
) == ADDR_EXPR
)
1270 tree ref
= TREE_OPERAND (val
, 0);
1271 tree tem
= maybe_fold_reference (ref
, false);
1274 tem
= build_fold_addr_expr_with_type (tem
, TREE_TYPE (val
));
1275 gimple_debug_bind_set_value (stmt
, tem
);
1285 /* If stmt folds into nothing and it was the last stmt in a bb,
1286 don't call gsi_stmt. */
1287 if (gsi_end_p (*gsi
))
1289 gcc_assert (next_stmt
== NULL
);
1293 stmt
= gsi_stmt (*gsi
);
1295 /* Fold *& on the lhs. Don't do this if stmt folded into nothing,
1296 as we'd changing the next stmt. */
1297 if (gimple_has_lhs (stmt
) && stmt
!= next_stmt
)
1299 tree lhs
= gimple_get_lhs (stmt
);
1300 if (lhs
&& REFERENCE_CLASS_P (lhs
))
1302 tree new_lhs
= maybe_fold_reference (lhs
, true);
1305 gimple_set_lhs (stmt
, new_lhs
);
1314 /* Fold the statement pointed to by GSI. In some cases, this function may
1315 replace the whole statement with a new one. Returns true iff folding
1317 The statement pointed to by GSI should be in valid gimple form but may
1318 be in unfolded state as resulting from for example constant propagation
1319 which can produce *&x = 0. */
1322 fold_stmt (gimple_stmt_iterator
*gsi
)
1324 return fold_stmt_1 (gsi
, false);
1327 /* Perform the minimal folding on statement *GSI. Only operations like
1328 *&x created by constant propagation are handled. The statement cannot
1329 be replaced with a new one. Return true if the statement was
1330 changed, false otherwise.
1331 The statement *GSI should be in valid gimple form but may
1332 be in unfolded state as resulting from for example constant propagation
1333 which can produce *&x = 0. */
1336 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
1338 gimple stmt
= gsi_stmt (*gsi
);
1339 bool changed
= fold_stmt_1 (gsi
, true);
1340 gcc_assert (gsi_stmt (*gsi
) == stmt
);
1344 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1345 if EXPR is null or we don't know how.
1346 If non-null, the result always has boolean type. */
1349 canonicalize_bool (tree expr
, bool invert
)
1355 if (integer_nonzerop (expr
))
1356 return boolean_false_node
;
1357 else if (integer_zerop (expr
))
1358 return boolean_true_node
;
1359 else if (TREE_CODE (expr
) == SSA_NAME
)
1360 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
1361 build_int_cst (TREE_TYPE (expr
), 0));
1362 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
1363 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
1365 TREE_OPERAND (expr
, 0),
1366 TREE_OPERAND (expr
, 1));
1372 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
1374 if (integer_nonzerop (expr
))
1375 return boolean_true_node
;
1376 else if (integer_zerop (expr
))
1377 return boolean_false_node
;
1378 else if (TREE_CODE (expr
) == SSA_NAME
)
1379 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
1380 build_int_cst (TREE_TYPE (expr
), 0));
1381 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
1382 return fold_build2 (TREE_CODE (expr
),
1384 TREE_OPERAND (expr
, 0),
1385 TREE_OPERAND (expr
, 1));
1391 /* Check to see if a boolean expression EXPR is logically equivalent to the
1392 comparison (OP1 CODE OP2). Check for various identities involving
1396 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
1397 const_tree op1
, const_tree op2
)
1401 /* The obvious case. */
1402 if (TREE_CODE (expr
) == code
1403 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
1404 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
1407 /* Check for comparing (name, name != 0) and the case where expr
1408 is an SSA_NAME with a definition matching the comparison. */
1409 if (TREE_CODE (expr
) == SSA_NAME
1410 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
1412 if (operand_equal_p (expr
, op1
, 0))
1413 return ((code
== NE_EXPR
&& integer_zerop (op2
))
1414 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
1415 s
= SSA_NAME_DEF_STMT (expr
);
1416 if (is_gimple_assign (s
)
1417 && gimple_assign_rhs_code (s
) == code
1418 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
1419 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
1423 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1424 of name is a comparison, recurse. */
1425 if (TREE_CODE (op1
) == SSA_NAME
1426 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
1428 s
= SSA_NAME_DEF_STMT (op1
);
1429 if (is_gimple_assign (s
)
1430 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
1432 enum tree_code c
= gimple_assign_rhs_code (s
);
1433 if ((c
== NE_EXPR
&& integer_zerop (op2
))
1434 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
1435 return same_bool_comparison_p (expr
, c
,
1436 gimple_assign_rhs1 (s
),
1437 gimple_assign_rhs2 (s
));
1438 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
1439 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
1440 return same_bool_comparison_p (expr
,
1441 invert_tree_comparison (c
, false),
1442 gimple_assign_rhs1 (s
),
1443 gimple_assign_rhs2 (s
));
1449 /* Check to see if two boolean expressions OP1 and OP2 are logically
1453 same_bool_result_p (const_tree op1
, const_tree op2
)
1455 /* Simple cases first. */
1456 if (operand_equal_p (op1
, op2
, 0))
1459 /* Check the cases where at least one of the operands is a comparison.
1460 These are a bit smarter than operand_equal_p in that they apply some
1461 identifies on SSA_NAMEs. */
1462 if (TREE_CODE_CLASS (TREE_CODE (op2
)) == tcc_comparison
1463 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
1464 TREE_OPERAND (op2
, 0),
1465 TREE_OPERAND (op2
, 1)))
1467 if (TREE_CODE_CLASS (TREE_CODE (op1
)) == tcc_comparison
1468 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
1469 TREE_OPERAND (op1
, 0),
1470 TREE_OPERAND (op1
, 1)))
1477 /* Forward declarations for some mutually recursive functions. */
1480 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1481 enum tree_code code2
, tree op2a
, tree op2b
);
1483 and_var_with_comparison (tree var
, bool invert
,
1484 enum tree_code code2
, tree op2a
, tree op2b
);
1486 and_var_with_comparison_1 (gimple stmt
,
1487 enum tree_code code2
, tree op2a
, tree op2b
);
1489 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1490 enum tree_code code2
, tree op2a
, tree op2b
);
1492 or_var_with_comparison (tree var
, bool invert
,
1493 enum tree_code code2
, tree op2a
, tree op2b
);
1495 or_var_with_comparison_1 (gimple stmt
,
1496 enum tree_code code2
, tree op2a
, tree op2b
);
1498 /* Helper function for and_comparisons_1: try to simplify the AND of the
1499 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1500 If INVERT is true, invert the value of the VAR before doing the AND.
1501 Return NULL_EXPR if we can't simplify this to a single expression. */
1504 and_var_with_comparison (tree var
, bool invert
,
1505 enum tree_code code2
, tree op2a
, tree op2b
)
1508 gimple stmt
= SSA_NAME_DEF_STMT (var
);
1510 /* We can only deal with variables whose definitions are assignments. */
1511 if (!is_gimple_assign (stmt
))
1514 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1515 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1516 Then we only have to consider the simpler non-inverted cases. */
1518 t
= or_var_with_comparison_1 (stmt
,
1519 invert_tree_comparison (code2
, false),
1522 t
= and_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
1523 return canonicalize_bool (t
, invert
);
1526 /* Try to simplify the AND of the ssa variable defined by the assignment
1527 STMT with the comparison specified by (OP2A CODE2 OP2B).
1528 Return NULL_EXPR if we can't simplify this to a single expression. */
1531 and_var_with_comparison_1 (gimple stmt
,
1532 enum tree_code code2
, tree op2a
, tree op2b
)
1534 tree var
= gimple_assign_lhs (stmt
);
1535 tree true_test_var
= NULL_TREE
;
1536 tree false_test_var
= NULL_TREE
;
1537 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
1539 /* Check for identities like (var AND (var == 0)) => false. */
1540 if (TREE_CODE (op2a
) == SSA_NAME
1541 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
1543 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
1544 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
1546 true_test_var
= op2a
;
1547 if (var
== true_test_var
)
1550 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
1551 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
1553 false_test_var
= op2a
;
1554 if (var
== false_test_var
)
1555 return boolean_false_node
;
1559 /* If the definition is a comparison, recurse on it. */
1560 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
1562 tree t
= and_comparisons_1 (innercode
,
1563 gimple_assign_rhs1 (stmt
),
1564 gimple_assign_rhs2 (stmt
),
1572 /* If the definition is an AND or OR expression, we may be able to
1573 simplify by reassociating. */
1574 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
1575 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
1577 tree inner1
= gimple_assign_rhs1 (stmt
);
1578 tree inner2
= gimple_assign_rhs2 (stmt
);
1581 tree partial
= NULL_TREE
;
1582 bool is_and
= (innercode
== BIT_AND_EXPR
);
1584 /* Check for boolean identities that don't require recursive examination
1586 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1587 inner1 AND (inner1 OR inner2) => inner1
1588 !inner1 AND (inner1 AND inner2) => false
1589 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1590 Likewise for similar cases involving inner2. */
1591 if (inner1
== true_test_var
)
1592 return (is_and
? var
: inner1
);
1593 else if (inner2
== true_test_var
)
1594 return (is_and
? var
: inner2
);
1595 else if (inner1
== false_test_var
)
1597 ? boolean_false_node
1598 : and_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
1599 else if (inner2
== false_test_var
)
1601 ? boolean_false_node
1602 : and_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
1604 /* Next, redistribute/reassociate the AND across the inner tests.
1605 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1606 if (TREE_CODE (inner1
) == SSA_NAME
1607 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
1608 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
1609 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
1610 gimple_assign_rhs1 (s
),
1611 gimple_assign_rhs2 (s
),
1612 code2
, op2a
, op2b
)))
1614 /* Handle the AND case, where we are reassociating:
1615 (inner1 AND inner2) AND (op2a code2 op2b)
1617 If the partial result t is a constant, we win. Otherwise
1618 continue on to try reassociating with the other inner test. */
1621 if (integer_onep (t
))
1623 else if (integer_zerop (t
))
1624 return boolean_false_node
;
1627 /* Handle the OR case, where we are redistributing:
1628 (inner1 OR inner2) AND (op2a code2 op2b)
1629 => (t OR (inner2 AND (op2a code2 op2b))) */
1630 else if (integer_onep (t
))
1631 return boolean_true_node
;
1633 /* Save partial result for later. */
1637 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1638 if (TREE_CODE (inner2
) == SSA_NAME
1639 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
1640 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
1641 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
1642 gimple_assign_rhs1 (s
),
1643 gimple_assign_rhs2 (s
),
1644 code2
, op2a
, op2b
)))
1646 /* Handle the AND case, where we are reassociating:
1647 (inner1 AND inner2) AND (op2a code2 op2b)
1648 => (inner1 AND t) */
1651 if (integer_onep (t
))
1653 else if (integer_zerop (t
))
1654 return boolean_false_node
;
1655 /* If both are the same, we can apply the identity
1657 else if (partial
&& same_bool_result_p (t
, partial
))
1661 /* Handle the OR case. where we are redistributing:
1662 (inner1 OR inner2) AND (op2a code2 op2b)
1663 => (t OR (inner1 AND (op2a code2 op2b)))
1664 => (t OR partial) */
1667 if (integer_onep (t
))
1668 return boolean_true_node
;
1671 /* We already got a simplification for the other
1672 operand to the redistributed OR expression. The
1673 interesting case is when at least one is false.
1674 Or, if both are the same, we can apply the identity
1676 if (integer_zerop (partial
))
1678 else if (integer_zerop (t
))
1680 else if (same_bool_result_p (t
, partial
))
1689 /* Try to simplify the AND of two comparisons defined by
1690 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1691 If this can be done without constructing an intermediate value,
1692 return the resulting tree; otherwise NULL_TREE is returned.
1693 This function is deliberately asymmetric as it recurses on SSA_DEFs
1694 in the first comparison but not the second. */
1697 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1698 enum tree_code code2
, tree op2a
, tree op2b
)
1700 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
1702 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1703 if (operand_equal_p (op1a
, op2a
, 0)
1704 && operand_equal_p (op1b
, op2b
, 0))
1706 /* Result will be either NULL_TREE, or a combined comparison. */
1707 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
1708 TRUTH_ANDIF_EXPR
, code1
, code2
,
1709 truth_type
, op1a
, op1b
);
1714 /* Likewise the swapped case of the above. */
1715 if (operand_equal_p (op1a
, op2b
, 0)
1716 && operand_equal_p (op1b
, op2a
, 0))
1718 /* Result will be either NULL_TREE, or a combined comparison. */
1719 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
1720 TRUTH_ANDIF_EXPR
, code1
,
1721 swap_tree_comparison (code2
),
1722 truth_type
, op1a
, op1b
);
1727 /* If both comparisons are of the same value against constants, we might
1728 be able to merge them. */
1729 if (operand_equal_p (op1a
, op2a
, 0)
1730 && TREE_CODE (op1b
) == INTEGER_CST
1731 && TREE_CODE (op2b
) == INTEGER_CST
)
1733 int cmp
= tree_int_cst_compare (op1b
, op2b
);
1735 /* If we have (op1a == op1b), we should either be able to
1736 return that or FALSE, depending on whether the constant op1b
1737 also satisfies the other comparison against op2b. */
1738 if (code1
== EQ_EXPR
)
1744 case EQ_EXPR
: val
= (cmp
== 0); break;
1745 case NE_EXPR
: val
= (cmp
!= 0); break;
1746 case LT_EXPR
: val
= (cmp
< 0); break;
1747 case GT_EXPR
: val
= (cmp
> 0); break;
1748 case LE_EXPR
: val
= (cmp
<= 0); break;
1749 case GE_EXPR
: val
= (cmp
>= 0); break;
1750 default: done
= false;
1755 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1757 return boolean_false_node
;
1760 /* Likewise if the second comparison is an == comparison. */
1761 else if (code2
== EQ_EXPR
)
1767 case EQ_EXPR
: val
= (cmp
== 0); break;
1768 case NE_EXPR
: val
= (cmp
!= 0); break;
1769 case LT_EXPR
: val
= (cmp
> 0); break;
1770 case GT_EXPR
: val
= (cmp
< 0); break;
1771 case LE_EXPR
: val
= (cmp
>= 0); break;
1772 case GE_EXPR
: val
= (cmp
<= 0); break;
1773 default: done
= false;
1778 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1780 return boolean_false_node
;
1784 /* Same business with inequality tests. */
1785 else if (code1
== NE_EXPR
)
1790 case EQ_EXPR
: val
= (cmp
!= 0); break;
1791 case NE_EXPR
: val
= (cmp
== 0); break;
1792 case LT_EXPR
: val
= (cmp
>= 0); break;
1793 case GT_EXPR
: val
= (cmp
<= 0); break;
1794 case LE_EXPR
: val
= (cmp
> 0); break;
1795 case GE_EXPR
: val
= (cmp
< 0); break;
1800 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1802 else if (code2
== NE_EXPR
)
1807 case EQ_EXPR
: val
= (cmp
== 0); break;
1808 case NE_EXPR
: val
= (cmp
!= 0); break;
1809 case LT_EXPR
: val
= (cmp
<= 0); break;
1810 case GT_EXPR
: val
= (cmp
>= 0); break;
1811 case LE_EXPR
: val
= (cmp
< 0); break;
1812 case GE_EXPR
: val
= (cmp
> 0); break;
1817 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1820 /* Chose the more restrictive of two < or <= comparisons. */
1821 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
1822 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
1824 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
1825 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1827 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1830 /* Likewise chose the more restrictive of two > or >= comparisons. */
1831 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
1832 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
1834 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
1835 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1837 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1840 /* Check for singleton ranges. */
1842 && ((code1
== LE_EXPR
&& code2
== GE_EXPR
)
1843 || (code1
== GE_EXPR
&& code2
== LE_EXPR
)))
1844 return fold_build2 (EQ_EXPR
, boolean_type_node
, op1a
, op2b
);
1846 /* Check for disjoint ranges. */
1848 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
1849 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
1850 return boolean_false_node
;
1852 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
1853 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
1854 return boolean_false_node
;
1857 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1858 NAME's definition is a truth value. See if there are any simplifications
1859 that can be done against the NAME's definition. */
1860 if (TREE_CODE (op1a
) == SSA_NAME
1861 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
1862 && (integer_zerop (op1b
) || integer_onep (op1b
)))
1864 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
1865 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
1866 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
1867 switch (gimple_code (stmt
))
1870 /* Try to simplify by copy-propagating the definition. */
1871 return and_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
1874 /* If every argument to the PHI produces the same result when
1875 ANDed with the second comparison, we win.
1876 Do not do this unless the type is bool since we need a bool
1877 result here anyway. */
1878 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
1880 tree result
= NULL_TREE
;
1882 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
1884 tree arg
= gimple_phi_arg_def (stmt
, i
);
1886 /* If this PHI has itself as an argument, ignore it.
1887 If all the other args produce the same result,
1889 if (arg
== gimple_phi_result (stmt
))
1891 else if (TREE_CODE (arg
) == INTEGER_CST
)
1893 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
1896 result
= boolean_false_node
;
1897 else if (!integer_zerop (result
))
1901 result
= fold_build2 (code2
, boolean_type_node
,
1903 else if (!same_bool_comparison_p (result
,
1907 else if (TREE_CODE (arg
) == SSA_NAME
1908 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
1911 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
1912 /* In simple cases we can look through PHI nodes,
1913 but we have to be careful with loops.
1915 if (! dom_info_available_p (CDI_DOMINATORS
)
1916 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
1917 || dominated_by_p (CDI_DOMINATORS
,
1918 gimple_bb (def_stmt
),
1921 temp
= and_var_with_comparison (arg
, invert
, code2
,
1927 else if (!same_bool_result_p (result
, temp
))
1943 /* Try to simplify the AND of two comparisons, specified by
1944 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1945 If this can be simplified to a single expression (without requiring
1946 introducing more SSA variables to hold intermediate values),
1947 return the resulting tree. Otherwise return NULL_TREE.
1948 If the result expression is non-null, it has boolean type. */
1951 maybe_fold_and_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
1952 enum tree_code code2
, tree op2a
, tree op2b
)
1954 tree t
= and_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
1958 return and_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
1961 /* Helper function for or_comparisons_1: try to simplify the OR of the
1962 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1963 If INVERT is true, invert the value of VAR before doing the OR.
1964 Return NULL_EXPR if we can't simplify this to a single expression. */
1967 or_var_with_comparison (tree var
, bool invert
,
1968 enum tree_code code2
, tree op2a
, tree op2b
)
1971 gimple stmt
= SSA_NAME_DEF_STMT (var
);
1973 /* We can only deal with variables whose definitions are assignments. */
1974 if (!is_gimple_assign (stmt
))
1977 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1978 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
1979 Then we only have to consider the simpler non-inverted cases. */
1981 t
= and_var_with_comparison_1 (stmt
,
1982 invert_tree_comparison (code2
, false),
1985 t
= or_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
1986 return canonicalize_bool (t
, invert
);
1989 /* Try to simplify the OR of the ssa variable defined by the assignment
1990 STMT with the comparison specified by (OP2A CODE2 OP2B).
1991 Return NULL_EXPR if we can't simplify this to a single expression. */
1994 or_var_with_comparison_1 (gimple stmt
,
1995 enum tree_code code2
, tree op2a
, tree op2b
)
1997 tree var
= gimple_assign_lhs (stmt
);
1998 tree true_test_var
= NULL_TREE
;
1999 tree false_test_var
= NULL_TREE
;
2000 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
2002 /* Check for identities like (var OR (var != 0)) => true . */
2003 if (TREE_CODE (op2a
) == SSA_NAME
2004 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
2006 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
2007 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
2009 true_test_var
= op2a
;
2010 if (var
== true_test_var
)
2013 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
2014 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
2016 false_test_var
= op2a
;
2017 if (var
== false_test_var
)
2018 return boolean_true_node
;
2022 /* If the definition is a comparison, recurse on it. */
2023 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
2025 tree t
= or_comparisons_1 (innercode
,
2026 gimple_assign_rhs1 (stmt
),
2027 gimple_assign_rhs2 (stmt
),
2035 /* If the definition is an AND or OR expression, we may be able to
2036 simplify by reassociating. */
2037 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
2038 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
2040 tree inner1
= gimple_assign_rhs1 (stmt
);
2041 tree inner2
= gimple_assign_rhs2 (stmt
);
2044 tree partial
= NULL_TREE
;
2045 bool is_or
= (innercode
== BIT_IOR_EXPR
);
2047 /* Check for boolean identities that don't require recursive examination
2049 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2050 inner1 OR (inner1 AND inner2) => inner1
2051 !inner1 OR (inner1 OR inner2) => true
2052 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2054 if (inner1
== true_test_var
)
2055 return (is_or
? var
: inner1
);
2056 else if (inner2
== true_test_var
)
2057 return (is_or
? var
: inner2
);
2058 else if (inner1
== false_test_var
)
2061 : or_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
2062 else if (inner2
== false_test_var
)
2065 : or_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
2067 /* Next, redistribute/reassociate the OR across the inner tests.
2068 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2069 if (TREE_CODE (inner1
) == SSA_NAME
2070 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
2071 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2072 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
2073 gimple_assign_rhs1 (s
),
2074 gimple_assign_rhs2 (s
),
2075 code2
, op2a
, op2b
)))
2077 /* Handle the OR case, where we are reassociating:
2078 (inner1 OR inner2) OR (op2a code2 op2b)
2080 If the partial result t is a constant, we win. Otherwise
2081 continue on to try reassociating with the other inner test. */
2084 if (integer_onep (t
))
2085 return boolean_true_node
;
2086 else if (integer_zerop (t
))
2090 /* Handle the AND case, where we are redistributing:
2091 (inner1 AND inner2) OR (op2a code2 op2b)
2092 => (t AND (inner2 OR (op2a code op2b))) */
2093 else if (integer_zerop (t
))
2094 return boolean_false_node
;
2096 /* Save partial result for later. */
2100 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2101 if (TREE_CODE (inner2
) == SSA_NAME
2102 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
2103 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2104 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
2105 gimple_assign_rhs1 (s
),
2106 gimple_assign_rhs2 (s
),
2107 code2
, op2a
, op2b
)))
2109 /* Handle the OR case, where we are reassociating:
2110 (inner1 OR inner2) OR (op2a code2 op2b)
2112 => (t OR partial) */
2115 if (integer_zerop (t
))
2117 else if (integer_onep (t
))
2118 return boolean_true_node
;
2119 /* If both are the same, we can apply the identity
2121 else if (partial
&& same_bool_result_p (t
, partial
))
2125 /* Handle the AND case, where we are redistributing:
2126 (inner1 AND inner2) OR (op2a code2 op2b)
2127 => (t AND (inner1 OR (op2a code2 op2b)))
2128 => (t AND partial) */
2131 if (integer_zerop (t
))
2132 return boolean_false_node
;
2135 /* We already got a simplification for the other
2136 operand to the redistributed AND expression. The
2137 interesting case is when at least one is true.
2138 Or, if both are the same, we can apply the identity
2140 if (integer_onep (partial
))
2142 else if (integer_onep (t
))
2144 else if (same_bool_result_p (t
, partial
))
2153 /* Try to simplify the OR of two comparisons defined by
2154 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2155 If this can be done without constructing an intermediate value,
2156 return the resulting tree; otherwise NULL_TREE is returned.
2157 This function is deliberately asymmetric as it recurses on SSA_DEFs
2158 in the first comparison but not the second. */
2161 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
2162 enum tree_code code2
, tree op2a
, tree op2b
)
2164 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
2166 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2167 if (operand_equal_p (op1a
, op2a
, 0)
2168 && operand_equal_p (op1b
, op2b
, 0))
2170 /* Result will be either NULL_TREE, or a combined comparison. */
2171 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2172 TRUTH_ORIF_EXPR
, code1
, code2
,
2173 truth_type
, op1a
, op1b
);
2178 /* Likewise the swapped case of the above. */
2179 if (operand_equal_p (op1a
, op2b
, 0)
2180 && operand_equal_p (op1b
, op2a
, 0))
2182 /* Result will be either NULL_TREE, or a combined comparison. */
2183 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2184 TRUTH_ORIF_EXPR
, code1
,
2185 swap_tree_comparison (code2
),
2186 truth_type
, op1a
, op1b
);
2191 /* If both comparisons are of the same value against constants, we might
2192 be able to merge them. */
2193 if (operand_equal_p (op1a
, op2a
, 0)
2194 && TREE_CODE (op1b
) == INTEGER_CST
2195 && TREE_CODE (op2b
) == INTEGER_CST
)
2197 int cmp
= tree_int_cst_compare (op1b
, op2b
);
2199 /* If we have (op1a != op1b), we should either be able to
2200 return that or TRUE, depending on whether the constant op1b
2201 also satisfies the other comparison against op2b. */
2202 if (code1
== NE_EXPR
)
2208 case EQ_EXPR
: val
= (cmp
== 0); break;
2209 case NE_EXPR
: val
= (cmp
!= 0); break;
2210 case LT_EXPR
: val
= (cmp
< 0); break;
2211 case GT_EXPR
: val
= (cmp
> 0); break;
2212 case LE_EXPR
: val
= (cmp
<= 0); break;
2213 case GE_EXPR
: val
= (cmp
>= 0); break;
2214 default: done
= false;
2219 return boolean_true_node
;
2221 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2224 /* Likewise if the second comparison is a != comparison. */
2225 else if (code2
== NE_EXPR
)
2231 case EQ_EXPR
: val
= (cmp
== 0); break;
2232 case NE_EXPR
: val
= (cmp
!= 0); break;
2233 case LT_EXPR
: val
= (cmp
> 0); break;
2234 case GT_EXPR
: val
= (cmp
< 0); break;
2235 case LE_EXPR
: val
= (cmp
>= 0); break;
2236 case GE_EXPR
: val
= (cmp
<= 0); break;
2237 default: done
= false;
2242 return boolean_true_node
;
2244 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2248 /* See if an equality test is redundant with the other comparison. */
2249 else if (code1
== EQ_EXPR
)
2254 case EQ_EXPR
: val
= (cmp
== 0); break;
2255 case NE_EXPR
: val
= (cmp
!= 0); break;
2256 case LT_EXPR
: val
= (cmp
< 0); break;
2257 case GT_EXPR
: val
= (cmp
> 0); break;
2258 case LE_EXPR
: val
= (cmp
<= 0); break;
2259 case GE_EXPR
: val
= (cmp
>= 0); break;
2264 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2266 else if (code2
== EQ_EXPR
)
2271 case EQ_EXPR
: val
= (cmp
== 0); break;
2272 case NE_EXPR
: val
= (cmp
!= 0); break;
2273 case LT_EXPR
: val
= (cmp
> 0); break;
2274 case GT_EXPR
: val
= (cmp
< 0); break;
2275 case LE_EXPR
: val
= (cmp
>= 0); break;
2276 case GE_EXPR
: val
= (cmp
<= 0); break;
2281 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2284 /* Chose the less restrictive of two < or <= comparisons. */
2285 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
2286 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2288 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
2289 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2291 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2294 /* Likewise chose the less restrictive of two > or >= comparisons. */
2295 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
2296 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2298 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
2299 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2301 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2304 /* Check for singleton ranges. */
2306 && ((code1
== LT_EXPR
&& code2
== GT_EXPR
)
2307 || (code1
== GT_EXPR
&& code2
== LT_EXPR
)))
2308 return fold_build2 (NE_EXPR
, boolean_type_node
, op1a
, op2b
);
2310 /* Check for less/greater pairs that don't restrict the range at all. */
2312 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
2313 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2314 return boolean_true_node
;
2316 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
2317 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2318 return boolean_true_node
;
2321 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2322 NAME's definition is a truth value. See if there are any simplifications
2323 that can be done against the NAME's definition. */
2324 if (TREE_CODE (op1a
) == SSA_NAME
2325 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
2326 && (integer_zerop (op1b
) || integer_onep (op1b
)))
2328 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
2329 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
2330 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
2331 switch (gimple_code (stmt
))
2334 /* Try to simplify by copy-propagating the definition. */
2335 return or_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
2338 /* If every argument to the PHI produces the same result when
2339 ORed with the second comparison, we win.
2340 Do not do this unless the type is bool since we need a bool
2341 result here anyway. */
2342 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
2344 tree result
= NULL_TREE
;
2346 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
2348 tree arg
= gimple_phi_arg_def (stmt
, i
);
2350 /* If this PHI has itself as an argument, ignore it.
2351 If all the other args produce the same result,
2353 if (arg
== gimple_phi_result (stmt
))
2355 else if (TREE_CODE (arg
) == INTEGER_CST
)
2357 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
2360 result
= boolean_true_node
;
2361 else if (!integer_onep (result
))
2365 result
= fold_build2 (code2
, boolean_type_node
,
2367 else if (!same_bool_comparison_p (result
,
2371 else if (TREE_CODE (arg
) == SSA_NAME
2372 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
2375 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
2376 /* In simple cases we can look through PHI nodes,
2377 but we have to be careful with loops.
2379 if (! dom_info_available_p (CDI_DOMINATORS
)
2380 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
2381 || dominated_by_p (CDI_DOMINATORS
,
2382 gimple_bb (def_stmt
),
2385 temp
= or_var_with_comparison (arg
, invert
, code2
,
2391 else if (!same_bool_result_p (result
, temp
))
2407 /* Try to simplify the OR of two comparisons, specified by
2408 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2409 If this can be simplified to a single expression (without requiring
2410 introducing more SSA variables to hold intermediate values),
2411 return the resulting tree. Otherwise return NULL_TREE.
2412 If the result expression is non-null, it has boolean type. */
2415 maybe_fold_or_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
2416 enum tree_code code2
, tree op2a
, tree op2b
)
2418 tree t
= or_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
2422 return or_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
2426 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2428 Either NULL_TREE, a simplified but non-constant or a constant
2431 ??? This should go into a gimple-fold-inline.h file to be eventually
2432 privatized with the single valueize function used in the various TUs
2433 to avoid the indirect function call overhead. */
2436 gimple_fold_stmt_to_constant_1 (gimple stmt
, tree (*valueize
) (tree
))
2438 location_t loc
= gimple_location (stmt
);
2439 switch (gimple_code (stmt
))
2443 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
2445 switch (get_gimple_rhs_class (subcode
))
2447 case GIMPLE_SINGLE_RHS
:
2449 tree rhs
= gimple_assign_rhs1 (stmt
);
2450 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
2452 if (TREE_CODE (rhs
) == SSA_NAME
)
2454 /* If the RHS is an SSA_NAME, return its known constant value,
2456 return (*valueize
) (rhs
);
2458 /* Handle propagating invariant addresses into address
2460 else if (TREE_CODE (rhs
) == ADDR_EXPR
2461 && !is_gimple_min_invariant (rhs
))
2463 HOST_WIDE_INT offset
= 0;
2465 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
2469 && (CONSTANT_CLASS_P (base
)
2470 || decl_address_invariant_p (base
)))
2471 return build_invariant_address (TREE_TYPE (rhs
),
2474 else if (TREE_CODE (rhs
) == CONSTRUCTOR
2475 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
2476 && (CONSTRUCTOR_NELTS (rhs
)
2477 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
2482 vec
= XALLOCAVEC (tree
,
2483 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
)));
2484 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
2486 val
= (*valueize
) (val
);
2487 if (TREE_CODE (val
) == INTEGER_CST
2488 || TREE_CODE (val
) == REAL_CST
2489 || TREE_CODE (val
) == FIXED_CST
)
2495 return build_vector (TREE_TYPE (rhs
), vec
);
2498 if (kind
== tcc_reference
)
2500 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
2501 || TREE_CODE (rhs
) == REALPART_EXPR
2502 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
2503 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2505 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2506 return fold_unary_loc (EXPR_LOCATION (rhs
),
2508 TREE_TYPE (rhs
), val
);
2510 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
2511 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2513 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2514 return fold_ternary_loc (EXPR_LOCATION (rhs
),
2516 TREE_TYPE (rhs
), val
,
2517 TREE_OPERAND (rhs
, 1),
2518 TREE_OPERAND (rhs
, 2));
2520 else if (TREE_CODE (rhs
) == MEM_REF
2521 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2523 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2524 if (TREE_CODE (val
) == ADDR_EXPR
2525 && is_gimple_min_invariant (val
))
2527 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
2529 TREE_OPERAND (rhs
, 1));
2534 return fold_const_aggregate_ref_1 (rhs
, valueize
);
2536 else if (kind
== tcc_declaration
)
2537 return get_symbol_constant_value (rhs
);
2541 case GIMPLE_UNARY_RHS
:
2543 /* Handle unary operators that can appear in GIMPLE form.
2544 Note that we know the single operand must be a constant,
2545 so this should almost always return a simplified RHS. */
2546 tree lhs
= gimple_assign_lhs (stmt
);
2547 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2549 /* Conversions are useless for CCP purposes if they are
2550 value-preserving. Thus the restrictions that
2551 useless_type_conversion_p places for restrict qualification
2552 of pointer types should not apply here.
2553 Substitution later will only substitute to allowed places. */
2554 if (CONVERT_EXPR_CODE_P (subcode
)
2555 && POINTER_TYPE_P (TREE_TYPE (lhs
))
2556 && POINTER_TYPE_P (TREE_TYPE (op0
))
2557 && TYPE_ADDR_SPACE (TREE_TYPE (lhs
))
2558 == TYPE_ADDR_SPACE (TREE_TYPE (op0
))
2559 && TYPE_MODE (TREE_TYPE (lhs
))
2560 == TYPE_MODE (TREE_TYPE (op0
)))
2564 fold_unary_ignore_overflow_loc (loc
, subcode
,
2565 gimple_expr_type (stmt
), op0
);
2568 case GIMPLE_BINARY_RHS
:
2570 /* Handle binary operators that can appear in GIMPLE form. */
2571 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2572 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
2574 /* Translate &x + CST into an invariant form suitable for
2575 further propagation. */
2576 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
2577 && TREE_CODE (op0
) == ADDR_EXPR
2578 && TREE_CODE (op1
) == INTEGER_CST
)
2580 tree off
= fold_convert (ptr_type_node
, op1
);
2581 return build_fold_addr_expr_loc
2583 fold_build2 (MEM_REF
,
2584 TREE_TYPE (TREE_TYPE (op0
)),
2585 unshare_expr (op0
), off
));
2588 return fold_binary_loc (loc
, subcode
,
2589 gimple_expr_type (stmt
), op0
, op1
);
2592 case GIMPLE_TERNARY_RHS
:
2594 /* Handle ternary operators that can appear in GIMPLE form. */
2595 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2596 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
2597 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
2599 /* Fold embedded expressions in ternary codes. */
2600 if ((subcode
== COND_EXPR
2601 || subcode
== VEC_COND_EXPR
)
2602 && COMPARISON_CLASS_P (op0
))
2604 tree op00
= (*valueize
) (TREE_OPERAND (op0
, 0));
2605 tree op01
= (*valueize
) (TREE_OPERAND (op0
, 1));
2606 tree tem
= fold_binary_loc (loc
, TREE_CODE (op0
),
2607 TREE_TYPE (op0
), op00
, op01
);
2612 return fold_ternary_loc (loc
, subcode
,
2613 gimple_expr_type (stmt
), op0
, op1
, op2
);
2625 if (gimple_call_internal_p (stmt
))
2626 /* No folding yet for these functions. */
2629 fn
= (*valueize
) (gimple_call_fn (stmt
));
2630 if (TREE_CODE (fn
) == ADDR_EXPR
2631 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
2632 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0)))
2634 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
2637 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
2638 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
2639 call
= build_call_array_loc (loc
,
2640 gimple_call_return_type (stmt
),
2641 fn
, gimple_call_num_args (stmt
), args
);
2642 retval
= fold_call_expr (EXPR_LOCATION (call
), call
, false);
2644 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2645 STRIP_NOPS (retval
);
2656 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2657 Returns NULL_TREE if folding to a constant is not possible, otherwise
2658 returns a constant according to is_gimple_min_invariant. */
2661 gimple_fold_stmt_to_constant (gimple stmt
, tree (*valueize
) (tree
))
2663 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
2664 if (res
&& is_gimple_min_invariant (res
))
2670 /* The following set of functions are supposed to fold references using
2671 their constant initializers. */
2673 static tree
fold_ctor_reference (tree type
, tree ctor
,
2674 unsigned HOST_WIDE_INT offset
,
2675 unsigned HOST_WIDE_INT size
, tree
);
2677 /* See if we can find constructor defining value of BASE.
2678 When we know the consructor with constant offset (such as
2679 base is array[40] and we do know constructor of array), then
2680 BIT_OFFSET is adjusted accordingly.
2682 As a special case, return error_mark_node when constructor
2683 is not explicitly available, but it is known to be zero
2684 such as 'static const int a;'. */
2686 get_base_constructor (tree base
, HOST_WIDE_INT
*bit_offset
,
2687 tree (*valueize
)(tree
))
2689 HOST_WIDE_INT bit_offset2
, size
, max_size
;
2690 if (TREE_CODE (base
) == MEM_REF
)
2692 if (!integer_zerop (TREE_OPERAND (base
, 1)))
2694 if (!host_integerp (TREE_OPERAND (base
, 1), 0))
2696 *bit_offset
+= (mem_ref_offset (base
).low
2701 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
2702 base
= valueize (TREE_OPERAND (base
, 0));
2703 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
2705 base
= TREE_OPERAND (base
, 0);
2708 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2709 DECL_INITIAL. If BASE is a nested reference into another
2710 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2711 the inner reference. */
2712 switch (TREE_CODE (base
))
2715 if (!const_value_known_p (base
))
2720 if (!DECL_INITIAL (base
)
2721 && (TREE_STATIC (base
) || DECL_EXTERNAL (base
)))
2722 return error_mark_node
;
2723 /* Do not return an error_mark_node DECL_INITIAL. LTO uses this
2724 as special marker (_not_ zero ...) for its own purposes. */
2725 if (DECL_INITIAL (base
) == error_mark_node
)
2727 return DECL_INITIAL (base
);
2731 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
);
2732 if (max_size
== -1 || size
!= max_size
)
2734 *bit_offset
+= bit_offset2
;
2735 return get_base_constructor (base
, bit_offset
, valueize
);
2746 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2747 to the memory at bit OFFSET.
2749 We do only simple job of folding byte accesses. */
2752 fold_string_cst_ctor_reference (tree type
, tree ctor
,
2753 unsigned HOST_WIDE_INT offset
,
2754 unsigned HOST_WIDE_INT size
)
2756 if (INTEGRAL_TYPE_P (type
)
2757 && (TYPE_MODE (type
)
2758 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
2759 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
2761 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
)))) == 1
2762 && size
== BITS_PER_UNIT
2763 && !(offset
% BITS_PER_UNIT
))
2765 offset
/= BITS_PER_UNIT
;
2766 if (offset
< (unsigned HOST_WIDE_INT
) TREE_STRING_LENGTH (ctor
))
2767 return build_int_cst_type (type
, (TREE_STRING_POINTER (ctor
)
2770 const char a[20]="hello";
2773 might lead to offset greater than string length. In this case we
2774 know value is either initialized to 0 or out of bounds. Return 0
2776 return build_zero_cst (type
);
2781 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2782 SIZE to the memory at bit OFFSET. */
2785 fold_array_ctor_reference (tree type
, tree ctor
,
2786 unsigned HOST_WIDE_INT offset
,
2787 unsigned HOST_WIDE_INT size
,
2790 unsigned HOST_WIDE_INT cnt
;
2792 double_int low_bound
, elt_size
;
2793 double_int index
, max_index
;
2794 double_int access_index
;
2795 tree domain_type
= NULL_TREE
, index_type
= NULL_TREE
;
2796 HOST_WIDE_INT inner_offset
;
2798 /* Compute low bound and elt size. */
2799 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
2800 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
2801 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
2803 /* Static constructors for variably sized objects makes no sense. */
2804 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
);
2805 index_type
= TREE_TYPE (TYPE_MIN_VALUE (domain_type
));
2806 low_bound
= tree_to_double_int (TYPE_MIN_VALUE (domain_type
));
2809 low_bound
= double_int_zero
;
2810 /* Static constructors for variably sized objects makes no sense. */
2811 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))))
2814 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
2817 /* We can handle only constantly sized accesses that are known to not
2818 be larger than size of array element. */
2819 if (!TYPE_SIZE_UNIT (type
)
2820 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
2821 || elt_size
.slt (tree_to_double_int (TYPE_SIZE_UNIT (type
))))
2824 /* Compute the array index we look for. */
2825 access_index
= double_int::from_uhwi (offset
/ BITS_PER_UNIT
)
2826 .udiv (elt_size
, TRUNC_DIV_EXPR
);
2827 access_index
+= low_bound
;
2829 access_index
= access_index
.ext (TYPE_PRECISION (index_type
),
2830 TYPE_UNSIGNED (index_type
));
2832 /* And offset within the access. */
2833 inner_offset
= offset
% (elt_size
.to_uhwi () * BITS_PER_UNIT
);
2835 /* See if the array field is large enough to span whole access. We do not
2836 care to fold accesses spanning multiple array indexes. */
2837 if (inner_offset
+ size
> elt_size
.to_uhwi () * BITS_PER_UNIT
)
2840 index
= low_bound
- double_int_one
;
2842 index
= index
.ext (TYPE_PRECISION (index_type
), TYPE_UNSIGNED (index_type
));
2844 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
2846 /* Array constructor might explicitely set index, or specify range
2847 or leave index NULL meaning that it is next index after previous
2851 if (TREE_CODE (cfield
) == INTEGER_CST
)
2852 max_index
= index
= tree_to_double_int (cfield
);
2855 gcc_assert (TREE_CODE (cfield
) == RANGE_EXPR
);
2856 index
= tree_to_double_int (TREE_OPERAND (cfield
, 0));
2857 max_index
= tree_to_double_int (TREE_OPERAND (cfield
, 1));
2862 index
+= double_int_one
;
2864 index
= index
.ext (TYPE_PRECISION (index_type
),
2865 TYPE_UNSIGNED (index_type
));
2869 /* Do we have match? */
2870 if (access_index
.cmp (index
, 1) >= 0
2871 && access_index
.cmp (max_index
, 1) <= 0)
2872 return fold_ctor_reference (type
, cval
, inner_offset
, size
,
2875 /* When memory is not explicitely mentioned in constructor,
2876 it is 0 (or out of range). */
2877 return build_zero_cst (type
);
2880 /* CTOR is CONSTRUCTOR of an aggregate or vector.
2881 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2884 fold_nonarray_ctor_reference (tree type
, tree ctor
,
2885 unsigned HOST_WIDE_INT offset
,
2886 unsigned HOST_WIDE_INT size
,
2889 unsigned HOST_WIDE_INT cnt
;
2892 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
2895 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
2896 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
2897 tree field_size
= DECL_SIZE (cfield
);
2898 double_int bitoffset
;
2899 double_int byte_offset_cst
= tree_to_double_int (byte_offset
);
2900 double_int bits_per_unit_cst
= double_int::from_uhwi (BITS_PER_UNIT
);
2901 double_int bitoffset_end
, access_end
;
2903 /* Variable sized objects in static constructors makes no sense,
2904 but field_size can be NULL for flexible array members. */
2905 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
2906 && TREE_CODE (byte_offset
) == INTEGER_CST
2907 && (field_size
!= NULL_TREE
2908 ? TREE_CODE (field_size
) == INTEGER_CST
2909 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
2911 /* Compute bit offset of the field. */
2912 bitoffset
= tree_to_double_int (field_offset
)
2913 + byte_offset_cst
* bits_per_unit_cst
;
2914 /* Compute bit offset where the field ends. */
2915 if (field_size
!= NULL_TREE
)
2916 bitoffset_end
= bitoffset
+ tree_to_double_int (field_size
);
2918 bitoffset_end
= double_int_zero
;
2920 access_end
= double_int::from_uhwi (offset
)
2921 + double_int::from_uhwi (size
);
2923 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
2924 [BITOFFSET, BITOFFSET_END)? */
2925 if (access_end
.cmp (bitoffset
, 0) > 0
2926 && (field_size
== NULL_TREE
2927 || double_int::from_uhwi (offset
).slt (bitoffset_end
)))
2929 double_int inner_offset
= double_int::from_uhwi (offset
) - bitoffset
;
2930 /* We do have overlap. Now see if field is large enough to
2931 cover the access. Give up for accesses spanning multiple
2933 if (access_end
.cmp (bitoffset_end
, 0) > 0)
2935 if (double_int::from_uhwi (offset
).slt (bitoffset
))
2937 return fold_ctor_reference (type
, cval
,
2938 inner_offset
.to_uhwi (), size
,
2942 /* When memory is not explicitely mentioned in constructor, it is 0. */
2943 return build_zero_cst (type
);
2946 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
2947 to the memory at bit OFFSET. */
2950 fold_ctor_reference (tree type
, tree ctor
, unsigned HOST_WIDE_INT offset
,
2951 unsigned HOST_WIDE_INT size
, tree from_decl
)
2955 /* We found the field with exact match. */
2956 if (useless_type_conversion_p (type
, TREE_TYPE (ctor
))
2958 return canonicalize_constructor_val (ctor
, from_decl
);
2960 /* We are at the end of walk, see if we can view convert the
2962 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
2963 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
2964 && operand_equal_p (TYPE_SIZE (type
),
2965 TYPE_SIZE (TREE_TYPE (ctor
)), 0))
2967 ret
= canonicalize_constructor_val (ctor
, from_decl
);
2968 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
2973 if (TREE_CODE (ctor
) == STRING_CST
)
2974 return fold_string_cst_ctor_reference (type
, ctor
, offset
, size
);
2975 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
2978 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
2979 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
2980 return fold_array_ctor_reference (type
, ctor
, offset
, size
,
2983 return fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
2990 /* Return the tree representing the element referenced by T if T is an
2991 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
2992 names using VALUEIZE. Return NULL_TREE otherwise. */
2995 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
2997 tree ctor
, idx
, base
;
2998 HOST_WIDE_INT offset
, size
, max_size
;
3001 if (TREE_THIS_VOLATILE (t
))
3004 if (TREE_CODE_CLASS (TREE_CODE (t
)) == tcc_declaration
)
3005 return get_symbol_constant_value (t
);
3007 tem
= fold_read_from_constant_string (t
);
3011 switch (TREE_CODE (t
))
3014 case ARRAY_RANGE_REF
:
3015 /* Constant indexes are handled well by get_base_constructor.
3016 Only special case variable offsets.
3017 FIXME: This code can't handle nested references with variable indexes
3018 (they will be handled only by iteration of ccp). Perhaps we can bring
3019 get_ref_base_and_extent here and make it use a valueize callback. */
3020 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
3022 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
3023 && TREE_CODE (idx
) == INTEGER_CST
)
3025 tree low_bound
, unit_size
;
3028 /* If the resulting bit-offset is constant, track it. */
3029 if ((low_bound
= array_ref_low_bound (t
),
3030 TREE_CODE (low_bound
) == INTEGER_CST
)
3031 && (unit_size
= array_ref_element_size (t
),
3032 host_integerp (unit_size
, 1))
3033 && (doffset
= (TREE_INT_CST (idx
) - TREE_INT_CST (low_bound
))
3034 .sext (TYPE_PRECISION (TREE_TYPE (idx
))),
3035 doffset
.fits_shwi ()))
3037 offset
= doffset
.to_shwi ();
3038 offset
*= TREE_INT_CST_LOW (unit_size
);
3039 offset
*= BITS_PER_UNIT
;
3041 base
= TREE_OPERAND (t
, 0);
3042 ctor
= get_base_constructor (base
, &offset
, valueize
);
3043 /* Empty constructor. Always fold to 0. */
3044 if (ctor
== error_mark_node
)
3045 return build_zero_cst (TREE_TYPE (t
));
3046 /* Out of bound array access. Value is undefined,
3050 /* We can not determine ctor. */
3053 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
3054 TREE_INT_CST_LOW (unit_size
)
3063 case TARGET_MEM_REF
:
3065 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
);
3066 ctor
= get_base_constructor (base
, &offset
, valueize
);
3068 /* Empty constructor. Always fold to 0. */
3069 if (ctor
== error_mark_node
)
3070 return build_zero_cst (TREE_TYPE (t
));
3071 /* We do not know precise address. */
3072 if (max_size
== -1 || max_size
!= size
)
3074 /* We can not determine ctor. */
3078 /* Out of bound array access. Value is undefined, but don't fold. */
3082 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
,
3088 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
3089 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
3090 return fold_build1_loc (EXPR_LOCATION (t
),
3091 TREE_CODE (t
), TREE_TYPE (t
), c
);
3103 fold_const_aggregate_ref (tree t
)
3105 return fold_const_aggregate_ref_1 (t
, NULL
);
3108 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3109 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3110 KNOWN_BINFO carries the binfo describing the true type of
3111 OBJ_TYPE_REF_OBJECT(REF). */
3114 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
)
3116 unsigned HOST_WIDE_INT offset
, size
;
3119 vtable
= v
= BINFO_VTABLE (known_binfo
);
3120 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3124 if (TREE_CODE (v
) == POINTER_PLUS_EXPR
)
3126 offset
= tree_low_cst (TREE_OPERAND (v
, 1), 1) * BITS_PER_UNIT
;
3127 v
= TREE_OPERAND (v
, 0);
3132 if (TREE_CODE (v
) != ADDR_EXPR
)
3134 v
= TREE_OPERAND (v
, 0);
3136 if (TREE_CODE (v
) != VAR_DECL
3137 || !DECL_VIRTUAL_P (v
)
3138 || !DECL_INITIAL (v
)
3139 || DECL_INITIAL (v
) == error_mark_node
)
3141 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
3142 size
= tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))), 1);
3143 offset
+= token
* size
;
3144 fn
= fold_ctor_reference (TREE_TYPE (TREE_TYPE (v
)), DECL_INITIAL (v
),
3145 offset
, size
, vtable
);
3146 if (!fn
|| integer_zerop (fn
))
3148 gcc_assert (TREE_CODE (fn
) == ADDR_EXPR
3149 || TREE_CODE (fn
) == FDESC_EXPR
);
3150 fn
= TREE_OPERAND (fn
, 0);
3151 gcc_assert (TREE_CODE (fn
) == FUNCTION_DECL
);
3153 /* When cgraph node is missing and function is not public, we cannot
3154 devirtualize. This can happen in WHOPR when the actual method
3155 ends up in other partition, because we found devirtualization
3156 possibility too late. */
3157 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
3160 /* Make sure we create a cgraph node for functions we'll reference.
3161 They can be non-existent if the reference comes from an entry
3162 of an external vtable for example. */
3163 cgraph_get_create_node (fn
);
3168 /* Return true iff VAL is a gimple expression that is known to be
3169 non-negative. Restricted to floating-point inputs. */
3172 gimple_val_nonnegative_real_p (tree val
)
3176 gcc_assert (val
&& SCALAR_FLOAT_TYPE_P (TREE_TYPE (val
)));
3178 /* Use existing logic for non-gimple trees. */
3179 if (tree_expr_nonnegative_p (val
))
3182 if (TREE_CODE (val
) != SSA_NAME
)
3185 /* Currently we look only at the immediately defining statement
3186 to make this determination, since recursion on defining
3187 statements of operands can lead to quadratic behavior in the
3188 worst case. This is expected to catch almost all occurrences
3189 in practice. It would be possible to implement limited-depth
3190 recursion if important cases are lost. Alternatively, passes
3191 that need this information (such as the pow/powi lowering code
3192 in the cse_sincos pass) could be revised to provide it through
3193 dataflow propagation. */
3195 def_stmt
= SSA_NAME_DEF_STMT (val
);
3197 if (is_gimple_assign (def_stmt
))
3201 /* See fold-const.c:tree_expr_nonnegative_p for additional
3202 cases that could be handled with recursion. */
3204 switch (gimple_assign_rhs_code (def_stmt
))
3207 /* Always true for floating-point operands. */
3211 /* True if the two operands are identical (since we are
3212 restricted to floating-point inputs). */
3213 op0
= gimple_assign_rhs1 (def_stmt
);
3214 op1
= gimple_assign_rhs2 (def_stmt
);
3217 || operand_equal_p (op0
, op1
, 0))
3224 else if (is_gimple_call (def_stmt
))
3226 tree fndecl
= gimple_call_fndecl (def_stmt
);
3228 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
3232 switch (DECL_FUNCTION_CODE (fndecl
))
3234 CASE_FLT_FN (BUILT_IN_ACOS
):
3235 CASE_FLT_FN (BUILT_IN_ACOSH
):
3236 CASE_FLT_FN (BUILT_IN_CABS
):
3237 CASE_FLT_FN (BUILT_IN_COSH
):
3238 CASE_FLT_FN (BUILT_IN_ERFC
):
3239 CASE_FLT_FN (BUILT_IN_EXP
):
3240 CASE_FLT_FN (BUILT_IN_EXP10
):
3241 CASE_FLT_FN (BUILT_IN_EXP2
):
3242 CASE_FLT_FN (BUILT_IN_FABS
):
3243 CASE_FLT_FN (BUILT_IN_FDIM
):
3244 CASE_FLT_FN (BUILT_IN_HYPOT
):
3245 CASE_FLT_FN (BUILT_IN_POW10
):
3248 CASE_FLT_FN (BUILT_IN_SQRT
):
3249 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3250 nonnegative inputs. */
3251 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val
))))
3256 CASE_FLT_FN (BUILT_IN_POWI
):
3257 /* True if the second argument is an even integer. */
3258 arg1
= gimple_call_arg (def_stmt
, 1);
3260 if (TREE_CODE (arg1
) == INTEGER_CST
3261 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
3266 CASE_FLT_FN (BUILT_IN_POW
):
3267 /* True if the second argument is an even integer-valued
3269 arg1
= gimple_call_arg (def_stmt
, 1);
3271 if (TREE_CODE (arg1
) == REAL_CST
)
3276 c
= TREE_REAL_CST (arg1
);
3277 n
= real_to_integer (&c
);
3281 REAL_VALUE_TYPE cint
;
3282 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
3283 if (real_identical (&c
, &cint
))