1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2013 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
29 #include "tree-flow.h"
30 #include "tree-ssa-propagate.h"
32 #include "gimple-fold.h"
34 /* Return true when DECL can be referenced from current unit.
35 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
36 We can get declarations that are not possible to reference for various
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
50 3) COMDAT functions referred by external vtables that
51 we devirtualize only during final copmilation stage.
52 At this time we already decided that we will not output
53 the function body and thus we can't reference the symbol
57 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
59 struct varpool_node
*vnode
;
60 struct cgraph_node
*node
;
63 /* We will later output the initializer, so we can refer to it.
64 So we are concerned only when DECL comes from initializer of
67 || TREE_CODE (from_decl
) != VAR_DECL
68 || !DECL_EXTERNAL (from_decl
)
70 && symtab_get_node (from_decl
)->symbol
.in_other_partition
))
72 /* We are concerned only about static/external vars and functions. */
73 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
74 || (TREE_CODE (decl
) != VAR_DECL
&& TREE_CODE (decl
) != FUNCTION_DECL
))
76 /* We are folding reference from external vtable. The vtable may reffer
77 to a symbol keyed to other compilation unit. The other compilation
78 unit may be in separate DSO and the symbol may be hidden. */
79 if (DECL_VISIBILITY_SPECIFIED (decl
)
80 && DECL_EXTERNAL (decl
)
81 && (!(snode
= symtab_get_node (decl
)) || !snode
->symbol
.in_other_partition
))
83 /* When function is public, we always can introduce new reference.
84 Exception are the COMDAT functions where introducing a direct
85 reference imply need to include function body in the curren tunit. */
86 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
88 /* We are not at ltrans stage; so don't worry about WHOPR.
89 Also when still gimplifying all referred comdat functions will be
92 As observed in PR20991 for already optimized out comdat virtual functions
93 it may be tempting to not necessarily give up because the copy will be
94 output elsewhere when corresponding vtable is output.
95 This is however not possible - ABI specify that COMDATs are output in
96 units where they are used and when the other unit was compiled with LTO
97 it is possible that vtable was kept public while the function itself
99 if (!flag_ltrans
&& (!DECL_COMDAT (decl
) || !cgraph_function_flags_ready
))
102 /* OK we are seeing either COMDAT or static variable. In this case we must
103 check that the definition is still around so we can refer it. */
104 if (TREE_CODE (decl
) == FUNCTION_DECL
)
106 node
= cgraph_get_node (decl
);
107 /* Check that we still have function body and that we didn't took
108 the decision to eliminate offline copy of the function yet.
109 The second is important when devirtualization happens during final
110 compilation stage when making a new reference no longer makes callee
112 if (!node
|| !node
->symbol
.definition
|| node
->global
.inlined_to
)
114 gcc_checking_assert (!TREE_ASM_WRITTEN (decl
));
118 else if (TREE_CODE (decl
) == VAR_DECL
)
120 vnode
= varpool_get_node (decl
);
121 if (!vnode
|| !vnode
->symbol
.definition
)
123 gcc_checking_assert (!TREE_ASM_WRITTEN (decl
));
130 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
131 acceptable form for is_gimple_min_invariant.
132 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
135 canonicalize_constructor_val (tree cval
, tree from_decl
)
137 tree orig_cval
= cval
;
139 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
140 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
142 tree ptr
= TREE_OPERAND (cval
, 0);
143 if (is_gimple_min_invariant (ptr
))
144 cval
= build1_loc (EXPR_LOCATION (cval
),
145 ADDR_EXPR
, TREE_TYPE (ptr
),
146 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
148 fold_convert (ptr_type_node
,
149 TREE_OPERAND (cval
, 1))));
151 if (TREE_CODE (cval
) == ADDR_EXPR
)
153 tree base
= NULL_TREE
;
154 if (TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
156 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
158 TREE_OPERAND (cval
, 0) = base
;
161 base
= get_base_address (TREE_OPERAND (cval
, 0));
165 if ((TREE_CODE (base
) == VAR_DECL
166 || TREE_CODE (base
) == FUNCTION_DECL
)
167 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
169 if (TREE_CODE (base
) == VAR_DECL
)
170 TREE_ADDRESSABLE (base
) = 1;
171 else if (TREE_CODE (base
) == FUNCTION_DECL
)
173 /* Make sure we create a cgraph node for functions we'll reference.
174 They can be non-existent if the reference comes from an entry
175 of an external vtable for example. */
176 cgraph_get_create_real_symbol_node (base
);
178 /* Fixup types in global initializers. */
179 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
180 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
182 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
183 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
189 /* If SYM is a constant variable with known value, return the value.
190 NULL_TREE is returned otherwise. */
193 get_symbol_constant_value (tree sym
)
195 tree val
= ctor_for_folding (sym
);
196 if (val
!= error_mark_node
)
200 val
= canonicalize_constructor_val (unshare_expr (val
), sym
);
201 if (val
&& is_gimple_min_invariant (val
))
206 /* Variables declared 'const' without an initializer
207 have zero as the initializer if they may not be
208 overridden at link or run time. */
210 && (INTEGRAL_TYPE_P (TREE_TYPE (sym
))
211 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym
))))
212 return build_zero_cst (TREE_TYPE (sym
));
220 /* Subroutine of fold_stmt. We perform several simplifications of the
221 memory reference tree EXPR and make sure to re-gimplify them properly
222 after propagation of constant addresses. IS_LHS is true if the
223 reference is supposed to be an lvalue. */
226 maybe_fold_reference (tree expr
, bool is_lhs
)
231 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
232 || TREE_CODE (expr
) == REALPART_EXPR
233 || TREE_CODE (expr
) == IMAGPART_EXPR
)
234 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
235 return fold_unary_loc (EXPR_LOCATION (expr
),
238 TREE_OPERAND (expr
, 0));
239 else if (TREE_CODE (expr
) == BIT_FIELD_REF
240 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
241 return fold_ternary_loc (EXPR_LOCATION (expr
),
244 TREE_OPERAND (expr
, 0),
245 TREE_OPERAND (expr
, 1),
246 TREE_OPERAND (expr
, 2));
248 while (handled_component_p (*t
))
249 t
= &TREE_OPERAND (*t
, 0);
251 /* Canonicalize MEM_REFs invariant address operand. Do this first
252 to avoid feeding non-canonical MEM_REFs elsewhere. */
253 if (TREE_CODE (*t
) == MEM_REF
254 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)))
256 bool volatile_p
= TREE_THIS_VOLATILE (*t
);
257 tree tem
= fold_binary (MEM_REF
, TREE_TYPE (*t
),
258 TREE_OPERAND (*t
, 0),
259 TREE_OPERAND (*t
, 1));
262 TREE_THIS_VOLATILE (tem
) = volatile_p
;
264 tem
= maybe_fold_reference (expr
, is_lhs
);
272 && (result
= fold_const_aggregate_ref (expr
))
273 && is_gimple_min_invariant (result
))
276 /* Fold back MEM_REFs to reference trees. */
277 if (TREE_CODE (*t
) == MEM_REF
278 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
279 && integer_zerop (TREE_OPERAND (*t
, 1))
280 && (TREE_THIS_VOLATILE (*t
)
281 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0)))
282 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t
, 1)))
283 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t
))
284 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t
, 1)))))
285 /* We have to look out here to not drop a required conversion
286 from the rhs to the lhs if is_lhs, but we don't have the
287 rhs here to verify that. Thus require strict type
289 && types_compatible_p (TREE_TYPE (*t
),
290 TREE_TYPE (TREE_OPERAND
291 (TREE_OPERAND (*t
, 0), 0))))
294 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
295 tem
= maybe_fold_reference (expr
, is_lhs
);
300 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
302 tree tem
= maybe_fold_tmr (*t
);
306 tem
= maybe_fold_reference (expr
, is_lhs
);
317 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
318 replacement rhs for the statement or NULL_TREE if no simplification
319 could be made. It is assumed that the operands have been previously
323 fold_gimple_assign (gimple_stmt_iterator
*si
)
325 gimple stmt
= gsi_stmt (*si
);
326 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
327 location_t loc
= gimple_location (stmt
);
329 tree result
= NULL_TREE
;
331 switch (get_gimple_rhs_class (subcode
))
333 case GIMPLE_SINGLE_RHS
:
335 tree rhs
= gimple_assign_rhs1 (stmt
);
337 if (REFERENCE_CLASS_P (rhs
))
338 return maybe_fold_reference (rhs
, false);
340 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
342 tree ref
= TREE_OPERAND (rhs
, 0);
343 tree tem
= maybe_fold_reference (ref
, true);
345 && TREE_CODE (tem
) == MEM_REF
346 && integer_zerop (TREE_OPERAND (tem
, 1)))
347 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
349 result
= fold_convert (TREE_TYPE (rhs
),
350 build_fold_addr_expr_loc (loc
, tem
));
351 else if (TREE_CODE (ref
) == MEM_REF
352 && integer_zerop (TREE_OPERAND (ref
, 1)))
353 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
356 else if (TREE_CODE (rhs
) == CONSTRUCTOR
357 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
358 && (CONSTRUCTOR_NELTS (rhs
)
359 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
361 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
365 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
366 if (TREE_CODE (val
) != INTEGER_CST
367 && TREE_CODE (val
) != REAL_CST
368 && TREE_CODE (val
) != FIXED_CST
)
371 return build_vector_from_ctor (TREE_TYPE (rhs
),
372 CONSTRUCTOR_ELTS (rhs
));
375 else if (DECL_P (rhs
))
376 return get_symbol_constant_value (rhs
);
378 /* If we couldn't fold the RHS, hand over to the generic
380 if (result
== NULL_TREE
)
383 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
384 that may have been added by fold, and "useless" type
385 conversions that might now be apparent due to propagation. */
386 STRIP_USELESS_TYPE_CONVERSION (result
);
388 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
395 case GIMPLE_UNARY_RHS
:
397 tree rhs
= gimple_assign_rhs1 (stmt
);
399 result
= fold_unary_loc (loc
, subcode
, gimple_expr_type (stmt
), rhs
);
402 /* If the operation was a conversion do _not_ mark a
403 resulting constant with TREE_OVERFLOW if the original
404 constant was not. These conversions have implementation
405 defined behavior and retaining the TREE_OVERFLOW flag
406 here would confuse later passes such as VRP. */
407 if (CONVERT_EXPR_CODE_P (subcode
)
408 && TREE_CODE (result
) == INTEGER_CST
409 && TREE_CODE (rhs
) == INTEGER_CST
)
410 TREE_OVERFLOW (result
) = TREE_OVERFLOW (rhs
);
412 STRIP_USELESS_TYPE_CONVERSION (result
);
413 if (valid_gimple_rhs_p (result
))
419 case GIMPLE_BINARY_RHS
:
420 /* Try to canonicalize for boolean-typed X the comparisons
421 X == 0, X == 1, X != 0, and X != 1. */
422 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
423 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
425 tree lhs
= gimple_assign_lhs (stmt
);
426 tree op1
= gimple_assign_rhs1 (stmt
);
427 tree op2
= gimple_assign_rhs2 (stmt
);
428 tree type
= TREE_TYPE (op1
);
430 /* Check whether the comparison operands are of the same boolean
431 type as the result type is.
432 Check that second operand is an integer-constant with value
434 if (TREE_CODE (op2
) == INTEGER_CST
435 && (integer_zerop (op2
) || integer_onep (op2
))
436 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
438 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
439 bool is_logical_not
= false;
441 /* X == 0 and X != 1 is a logical-not.of X
442 X == 1 and X != 0 is X */
443 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
444 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
445 is_logical_not
= true;
447 if (is_logical_not
== false)
449 /* Only for one-bit precision typed X the transformation
450 !X -> ~X is valied. */
451 else if (TYPE_PRECISION (type
) == 1)
452 result
= build1_loc (gimple_location (stmt
), BIT_NOT_EXPR
,
454 /* Otherwise we use !X -> X ^ 1. */
456 result
= build2_loc (gimple_location (stmt
), BIT_XOR_EXPR
,
457 type
, op1
, build_int_cst (type
, 1));
463 result
= fold_binary_loc (loc
, subcode
,
464 TREE_TYPE (gimple_assign_lhs (stmt
)),
465 gimple_assign_rhs1 (stmt
),
466 gimple_assign_rhs2 (stmt
));
470 STRIP_USELESS_TYPE_CONVERSION (result
);
471 if (valid_gimple_rhs_p (result
))
476 case GIMPLE_TERNARY_RHS
:
477 /* Try to fold a conditional expression. */
478 if (gimple_assign_rhs_code (stmt
) == COND_EXPR
)
480 tree op0
= gimple_assign_rhs1 (stmt
);
483 location_t cond_loc
= gimple_location (stmt
);
485 if (COMPARISON_CLASS_P (op0
))
487 fold_defer_overflow_warnings ();
488 tem
= fold_binary_loc (cond_loc
,
489 TREE_CODE (op0
), TREE_TYPE (op0
),
490 TREE_OPERAND (op0
, 0),
491 TREE_OPERAND (op0
, 1));
492 /* This is actually a conditional expression, not a GIMPLE
493 conditional statement, however, the valid_gimple_rhs_p
494 test still applies. */
495 set
= (tem
&& is_gimple_condexpr (tem
)
496 && valid_gimple_rhs_p (tem
));
497 fold_undefer_overflow_warnings (set
, stmt
, 0);
499 else if (is_gimple_min_invariant (op0
))
508 result
= fold_build3_loc (cond_loc
, COND_EXPR
,
509 TREE_TYPE (gimple_assign_lhs (stmt
)), tem
,
510 gimple_assign_rhs2 (stmt
),
511 gimple_assign_rhs3 (stmt
));
515 result
= fold_ternary_loc (loc
, subcode
,
516 TREE_TYPE (gimple_assign_lhs (stmt
)),
517 gimple_assign_rhs1 (stmt
),
518 gimple_assign_rhs2 (stmt
),
519 gimple_assign_rhs3 (stmt
));
523 STRIP_USELESS_TYPE_CONVERSION (result
);
524 if (valid_gimple_rhs_p (result
))
529 case GIMPLE_INVALID_RHS
:
536 /* Attempt to fold a conditional statement. Return true if any changes were
537 made. We only attempt to fold the condition expression, and do not perform
538 any transformation that would require alteration of the cfg. It is
539 assumed that the operands have been previously folded. */
542 fold_gimple_cond (gimple stmt
)
544 tree result
= fold_binary_loc (gimple_location (stmt
),
545 gimple_cond_code (stmt
),
547 gimple_cond_lhs (stmt
),
548 gimple_cond_rhs (stmt
));
552 STRIP_USELESS_TYPE_CONVERSION (result
);
553 if (is_gimple_condexpr (result
) && valid_gimple_rhs_p (result
))
555 gimple_cond_set_condition_from_tree (stmt
, result
);
563 /* Convert EXPR into a GIMPLE value suitable for substitution on the
564 RHS of an assignment. Insert the necessary statements before
565 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
566 is replaced. If the call is expected to produces a result, then it
567 is replaced by an assignment of the new RHS to the result variable.
568 If the result is to be ignored, then the call is replaced by a
569 GIMPLE_NOP. A proper VDEF chain is retained by making the first
570 VUSE and the last VDEF of the whole sequence be the same as the replaced
571 statement and using new SSA names for stores in between. */
574 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
577 gimple stmt
, new_stmt
;
578 gimple_stmt_iterator i
;
579 gimple_seq stmts
= NULL
;
580 struct gimplify_ctx gctx
;
584 stmt
= gsi_stmt (*si_p
);
586 gcc_assert (is_gimple_call (stmt
));
588 push_gimplify_context (&gctx
);
589 gctx
.into_ssa
= gimple_in_ssa_p (cfun
);
591 lhs
= gimple_call_lhs (stmt
);
592 if (lhs
== NULL_TREE
)
594 gimplify_and_add (expr
, &stmts
);
595 /* We can end up with folding a memcpy of an empty class assignment
596 which gets optimized away by C++ gimplification. */
597 if (gimple_seq_empty_p (stmts
))
599 pop_gimplify_context (NULL
);
600 if (gimple_in_ssa_p (cfun
))
602 unlink_stmt_vdef (stmt
);
605 gsi_replace (si_p
, gimple_build_nop (), true);
611 tree tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
612 new_stmt
= gimple_build_assign (lhs
, tmp
);
613 i
= gsi_last (stmts
);
614 gsi_insert_after_without_update (&i
, new_stmt
,
615 GSI_CONTINUE_LINKING
);
618 pop_gimplify_context (NULL
);
620 if (gimple_has_location (stmt
))
621 annotate_all_with_location (stmts
, gimple_location (stmt
));
623 /* First iterate over the replacement statements backward, assigning
624 virtual operands to their defining statements. */
626 for (i
= gsi_last (stmts
); !gsi_end_p (i
); gsi_prev (&i
))
628 new_stmt
= gsi_stmt (i
);
629 if ((gimple_assign_single_p (new_stmt
)
630 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
631 || (is_gimple_call (new_stmt
)
632 && (gimple_call_flags (new_stmt
)
633 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
637 vdef
= gimple_vdef (stmt
);
639 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
640 gimple_set_vdef (new_stmt
, vdef
);
641 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
642 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
643 laststore
= new_stmt
;
647 /* Second iterate over the statements forward, assigning virtual
648 operands to their uses. */
649 reaching_vuse
= gimple_vuse (stmt
);
650 for (i
= gsi_start (stmts
); !gsi_end_p (i
); gsi_next (&i
))
652 new_stmt
= gsi_stmt (i
);
653 /* If the new statement possibly has a VUSE, update it with exact SSA
654 name we know will reach this one. */
655 if (gimple_has_mem_ops (new_stmt
))
656 gimple_set_vuse (new_stmt
, reaching_vuse
);
657 gimple_set_modified (new_stmt
, true);
658 if (gimple_vdef (new_stmt
))
659 reaching_vuse
= gimple_vdef (new_stmt
);
662 /* If the new sequence does not do a store release the virtual
663 definition of the original statement. */
665 && reaching_vuse
== gimple_vuse (stmt
))
667 tree vdef
= gimple_vdef (stmt
);
669 && TREE_CODE (vdef
) == SSA_NAME
)
671 unlink_stmt_vdef (stmt
);
672 release_ssa_name (vdef
);
676 /* Finally replace the original statement with the sequence. */
677 gsi_replace_with_seq (si_p
, stmts
, false);
680 /* Return the string length, maximum string length or maximum value of
682 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
683 is not NULL and, for TYPE == 0, its value is not equal to the length
684 we determine or if we are unable to determine the length or value,
685 return false. VISITED is a bitmap of visited variables.
686 TYPE is 0 if string length should be returned, 1 for maximum string
687 length and 2 for maximum value ARG can have. */
690 get_maxval_strlen (tree arg
, tree
*length
, bitmap visited
, int type
)
695 if (TREE_CODE (arg
) != SSA_NAME
)
697 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
698 if (TREE_CODE (arg
) == ADDR_EXPR
699 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
700 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
702 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
703 if (TREE_CODE (aop0
) == INDIRECT_REF
704 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
705 return get_maxval_strlen (TREE_OPERAND (aop0
, 0),
706 length
, visited
, type
);
712 if (TREE_CODE (val
) != INTEGER_CST
713 || tree_int_cst_sgn (val
) < 0)
717 val
= c_strlen (arg
, 1);
725 if (TREE_CODE (*length
) != INTEGER_CST
726 || TREE_CODE (val
) != INTEGER_CST
)
729 if (tree_int_cst_lt (*length
, val
))
733 else if (simple_cst_equal (val
, *length
) != 1)
741 /* If ARG is registered for SSA update we cannot look at its defining
743 if (name_registered_for_update_p (arg
))
746 /* If we were already here, break the infinite cycle. */
747 if (!bitmap_set_bit (visited
, SSA_NAME_VERSION (arg
)))
751 def_stmt
= SSA_NAME_DEF_STMT (var
);
753 switch (gimple_code (def_stmt
))
756 /* The RHS of the statement defining VAR must either have a
757 constant length or come from another SSA_NAME with a constant
759 if (gimple_assign_single_p (def_stmt
)
760 || gimple_assign_unary_nop_p (def_stmt
))
762 tree rhs
= gimple_assign_rhs1 (def_stmt
);
763 return get_maxval_strlen (rhs
, length
, visited
, type
);
765 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
767 tree op2
= gimple_assign_rhs2 (def_stmt
);
768 tree op3
= gimple_assign_rhs3 (def_stmt
);
769 return get_maxval_strlen (op2
, length
, visited
, type
)
770 && get_maxval_strlen (op3
, length
, visited
, type
);
776 /* All the arguments of the PHI node must have the same constant
780 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
782 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
784 /* If this PHI has itself as an argument, we cannot
785 determine the string length of this argument. However,
786 if we can find a constant string length for the other
787 PHI args then we can still be sure that this is a
788 constant string length. So be optimistic and just
789 continue with the next argument. */
790 if (arg
== gimple_phi_result (def_stmt
))
793 if (!get_maxval_strlen (arg
, length
, visited
, type
))
805 /* Fold builtin call in statement STMT. Returns a simplified tree.
806 We may return a non-constant expression, including another call
807 to a different function and with different arguments, e.g.,
808 substituting memcpy for strcpy when the string length is known.
809 Note that some builtins expand into inline code that may not
810 be valid in GIMPLE. Callers must take care. */
813 gimple_fold_builtin (gimple stmt
)
821 location_t loc
= gimple_location (stmt
);
823 gcc_assert (is_gimple_call (stmt
));
825 ignore
= (gimple_call_lhs (stmt
) == NULL
);
827 /* First try the generic builtin folder. If that succeeds, return the
829 result
= fold_call_stmt (stmt
, ignore
);
837 /* Ignore MD builtins. */
838 callee
= gimple_call_fndecl (stmt
);
839 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_MD
)
842 /* Give up for always_inline inline builtins until they are
844 if (avoid_folding_inline_builtin (callee
))
847 /* If the builtin could not be folded, and it has no argument list,
849 nargs
= gimple_call_num_args (stmt
);
853 /* Limit the work only for builtins we know how to simplify. */
854 switch (DECL_FUNCTION_CODE (callee
))
856 case BUILT_IN_STRLEN
:
858 case BUILT_IN_FPUTS_UNLOCKED
:
862 case BUILT_IN_STRCPY
:
863 case BUILT_IN_STRNCPY
:
867 case BUILT_IN_MEMCPY_CHK
:
868 case BUILT_IN_MEMPCPY_CHK
:
869 case BUILT_IN_MEMMOVE_CHK
:
870 case BUILT_IN_MEMSET_CHK
:
871 case BUILT_IN_STRNCPY_CHK
:
872 case BUILT_IN_STPNCPY_CHK
:
876 case BUILT_IN_STRCPY_CHK
:
877 case BUILT_IN_STPCPY_CHK
:
881 case BUILT_IN_SNPRINTF_CHK
:
882 case BUILT_IN_VSNPRINTF_CHK
:
890 if (arg_idx
>= nargs
)
893 /* Try to use the dataflow information gathered by the CCP process. */
894 visited
= BITMAP_ALLOC (NULL
);
895 bitmap_clear (visited
);
897 memset (val
, 0, sizeof (val
));
898 a
= gimple_call_arg (stmt
, arg_idx
);
899 if (!get_maxval_strlen (a
, &val
[arg_idx
], visited
, type
))
900 val
[arg_idx
] = NULL_TREE
;
902 BITMAP_FREE (visited
);
905 switch (DECL_FUNCTION_CODE (callee
))
907 case BUILT_IN_STRLEN
:
908 if (val
[0] && nargs
== 1)
911 fold_convert (TREE_TYPE (gimple_call_lhs (stmt
)), val
[0]);
913 /* If the result is not a valid gimple value, or not a cast
914 of a valid gimple value, then we cannot use the result. */
915 if (is_gimple_val (new_val
)
916 || (CONVERT_EXPR_P (new_val
)
917 && is_gimple_val (TREE_OPERAND (new_val
, 0))))
922 case BUILT_IN_STRCPY
:
923 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 2)
924 result
= fold_builtin_strcpy (loc
, callee
,
925 gimple_call_arg (stmt
, 0),
926 gimple_call_arg (stmt
, 1),
930 case BUILT_IN_STRNCPY
:
931 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
932 result
= fold_builtin_strncpy (loc
, callee
,
933 gimple_call_arg (stmt
, 0),
934 gimple_call_arg (stmt
, 1),
935 gimple_call_arg (stmt
, 2),
941 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
942 gimple_call_arg (stmt
, 1),
943 ignore
, false, val
[0]);
946 case BUILT_IN_FPUTS_UNLOCKED
:
948 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
949 gimple_call_arg (stmt
, 1),
950 ignore
, true, val
[0]);
953 case BUILT_IN_MEMCPY_CHK
:
954 case BUILT_IN_MEMPCPY_CHK
:
955 case BUILT_IN_MEMMOVE_CHK
:
956 case BUILT_IN_MEMSET_CHK
:
957 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
958 result
= fold_builtin_memory_chk (loc
, callee
,
959 gimple_call_arg (stmt
, 0),
960 gimple_call_arg (stmt
, 1),
961 gimple_call_arg (stmt
, 2),
962 gimple_call_arg (stmt
, 3),
964 DECL_FUNCTION_CODE (callee
));
967 case BUILT_IN_STRCPY_CHK
:
968 case BUILT_IN_STPCPY_CHK
:
969 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
970 result
= fold_builtin_stxcpy_chk (loc
, callee
,
971 gimple_call_arg (stmt
, 0),
972 gimple_call_arg (stmt
, 1),
973 gimple_call_arg (stmt
, 2),
975 DECL_FUNCTION_CODE (callee
));
978 case BUILT_IN_STRNCPY_CHK
:
979 case BUILT_IN_STPNCPY_CHK
:
980 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
981 result
= fold_builtin_stxncpy_chk (loc
, gimple_call_arg (stmt
, 0),
982 gimple_call_arg (stmt
, 1),
983 gimple_call_arg (stmt
, 2),
984 gimple_call_arg (stmt
, 3),
986 DECL_FUNCTION_CODE (callee
));
989 case BUILT_IN_SNPRINTF_CHK
:
990 case BUILT_IN_VSNPRINTF_CHK
:
991 if (val
[1] && is_gimple_val (val
[1]))
992 result
= gimple_fold_builtin_snprintf_chk (stmt
, val
[1],
993 DECL_FUNCTION_CODE (callee
));
1000 if (result
&& ignore
)
1001 result
= fold_ignored_result (result
);
1006 /* Return a binfo to be used for devirtualization of calls based on an object
1007 represented by a declaration (i.e. a global or automatically allocated one)
1008 or NULL if it cannot be found or is not safe. CST is expected to be an
1009 ADDR_EXPR of such object or the function will return NULL. Currently it is
1010 safe to use such binfo only if it has no base binfo (i.e. no ancestors). */
1013 gimple_extract_devirt_binfo_from_cst (tree cst
)
1015 HOST_WIDE_INT offset
, size
, max_size
;
1016 tree base
, type
, expected_type
, binfo
;
1017 bool last_artificial
= false;
1019 if (!flag_devirtualize
1020 || TREE_CODE (cst
) != ADDR_EXPR
1021 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst
))) != RECORD_TYPE
)
1024 cst
= TREE_OPERAND (cst
, 0);
1025 expected_type
= TREE_TYPE (cst
);
1026 base
= get_ref_base_and_extent (cst
, &offset
, &size
, &max_size
);
1027 type
= TREE_TYPE (base
);
1031 || TREE_CODE (type
) != RECORD_TYPE
)
1034 /* Find the sub-object the constant actually refers to and mark whether it is
1035 an artificial one (as opposed to a user-defined one). */
1038 HOST_WIDE_INT pos
, size
;
1041 if (types_same_for_odr (type
, expected_type
))
1046 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
1048 if (TREE_CODE (fld
) != FIELD_DECL
)
1051 pos
= int_bit_position (fld
);
1052 size
= tree_low_cst (DECL_SIZE (fld
), 1);
1053 if (pos
<= offset
&& (pos
+ size
) > offset
)
1056 if (!fld
|| TREE_CODE (TREE_TYPE (fld
)) != RECORD_TYPE
)
1059 last_artificial
= DECL_ARTIFICIAL (fld
);
1060 type
= TREE_TYPE (fld
);
1063 /* Artificial sub-objects are ancestors, we do not want to use them for
1064 devirtualization, at least not here. */
1065 if (last_artificial
)
1067 binfo
= TYPE_BINFO (type
);
1068 if (!binfo
|| BINFO_N_BASE_BINFOS (binfo
) > 0)
1074 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1075 The statement may be replaced by another statement, e.g., if the call
1076 simplifies to a constant value. Return true if any changes were made.
1077 It is assumed that the operands have been previously folded. */
1080 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
1082 gimple stmt
= gsi_stmt (*gsi
);
1084 bool changed
= false;
1087 /* Fold *& in call arguments. */
1088 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
1089 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
1091 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
1094 gimple_call_set_arg (stmt
, i
, tmp
);
1099 /* Check for virtual calls that became direct calls. */
1100 callee
= gimple_call_fn (stmt
);
1101 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
1103 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
1105 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
1110 tree obj
= OBJ_TYPE_REF_OBJECT (callee
);
1111 tree binfo
= gimple_extract_devirt_binfo_from_cst (obj
);
1115 = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee
));
1116 tree fndecl
= gimple_get_virt_method_for_binfo (token
, binfo
);
1119 gimple_call_set_fndecl (stmt
, fndecl
);
1129 /* Check for builtins that CCP can handle using information not
1130 available in the generic fold routines. */
1131 callee
= gimple_call_fndecl (stmt
);
1132 if (callee
&& DECL_BUILT_IN (callee
))
1134 tree result
= gimple_fold_builtin (stmt
);
1137 if (!update_call_from_tree (gsi
, result
))
1138 gimplify_and_update_call_from_tree (gsi
, result
);
1141 else if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_MD
)
1142 changed
|= targetm
.gimple_fold_builtin (gsi
);
1148 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1149 distinguishes both cases. */
1152 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
)
1154 bool changed
= false;
1155 gimple stmt
= gsi_stmt (*gsi
);
1158 /* Fold the main computation performed by the statement. */
1159 switch (gimple_code (stmt
))
1163 unsigned old_num_ops
= gimple_num_ops (stmt
);
1164 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
1165 tree lhs
= gimple_assign_lhs (stmt
);
1167 /* First canonicalize operand order. This avoids building new
1168 trees if this is the only thing fold would later do. */
1169 if ((commutative_tree_code (subcode
)
1170 || commutative_ternary_tree_code (subcode
))
1171 && tree_swap_operands_p (gimple_assign_rhs1 (stmt
),
1172 gimple_assign_rhs2 (stmt
), false))
1174 tree tem
= gimple_assign_rhs1 (stmt
);
1175 gimple_assign_set_rhs1 (stmt
, gimple_assign_rhs2 (stmt
));
1176 gimple_assign_set_rhs2 (stmt
, tem
);
1179 new_rhs
= fold_gimple_assign (gsi
);
1181 && !useless_type_conversion_p (TREE_TYPE (lhs
),
1182 TREE_TYPE (new_rhs
)))
1183 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
1186 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
1188 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
1195 changed
|= fold_gimple_cond (stmt
);
1199 changed
|= gimple_fold_call (gsi
, inplace
);
1203 /* Fold *& in asm operands. */
1206 const char **oconstraints
;
1207 const char *constraint
;
1208 bool allows_mem
, allows_reg
;
1210 noutputs
= gimple_asm_noutputs (stmt
);
1211 oconstraints
= XALLOCAVEC (const char *, noutputs
);
1213 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
1215 tree link
= gimple_asm_output_op (stmt
, i
);
1216 tree op
= TREE_VALUE (link
);
1218 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1219 if (REFERENCE_CLASS_P (op
)
1220 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
1222 TREE_VALUE (link
) = op
;
1226 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
1228 tree link
= gimple_asm_input_op (stmt
, i
);
1229 tree op
= TREE_VALUE (link
);
1231 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1232 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
1233 oconstraints
, &allows_mem
, &allows_reg
);
1234 if (REFERENCE_CLASS_P (op
)
1235 && (op
= maybe_fold_reference (op
, !allows_reg
&& allows_mem
))
1238 TREE_VALUE (link
) = op
;
1246 if (gimple_debug_bind_p (stmt
))
1248 tree val
= gimple_debug_bind_get_value (stmt
);
1250 && REFERENCE_CLASS_P (val
))
1252 tree tem
= maybe_fold_reference (val
, false);
1255 gimple_debug_bind_set_value (stmt
, tem
);
1260 && TREE_CODE (val
) == ADDR_EXPR
)
1262 tree ref
= TREE_OPERAND (val
, 0);
1263 tree tem
= maybe_fold_reference (ref
, false);
1266 tem
= build_fold_addr_expr_with_type (tem
, TREE_TYPE (val
));
1267 gimple_debug_bind_set_value (stmt
, tem
);
1277 stmt
= gsi_stmt (*gsi
);
1279 /* Fold *& on the lhs. */
1280 if (gimple_has_lhs (stmt
))
1282 tree lhs
= gimple_get_lhs (stmt
);
1283 if (lhs
&& REFERENCE_CLASS_P (lhs
))
1285 tree new_lhs
= maybe_fold_reference (lhs
, true);
1288 gimple_set_lhs (stmt
, new_lhs
);
1297 /* Fold the statement pointed to by GSI. In some cases, this function may
1298 replace the whole statement with a new one. Returns true iff folding
1300 The statement pointed to by GSI should be in valid gimple form but may
1301 be in unfolded state as resulting from for example constant propagation
1302 which can produce *&x = 0. */
1305 fold_stmt (gimple_stmt_iterator
*gsi
)
1307 return fold_stmt_1 (gsi
, false);
1310 /* Perform the minimal folding on statement *GSI. Only operations like
1311 *&x created by constant propagation are handled. The statement cannot
1312 be replaced with a new one. Return true if the statement was
1313 changed, false otherwise.
1314 The statement *GSI should be in valid gimple form but may
1315 be in unfolded state as resulting from for example constant propagation
1316 which can produce *&x = 0. */
1319 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
1321 gimple stmt
= gsi_stmt (*gsi
);
1322 bool changed
= fold_stmt_1 (gsi
, true);
1323 gcc_assert (gsi_stmt (*gsi
) == stmt
);
1327 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1328 if EXPR is null or we don't know how.
1329 If non-null, the result always has boolean type. */
1332 canonicalize_bool (tree expr
, bool invert
)
1338 if (integer_nonzerop (expr
))
1339 return boolean_false_node
;
1340 else if (integer_zerop (expr
))
1341 return boolean_true_node
;
1342 else if (TREE_CODE (expr
) == SSA_NAME
)
1343 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
1344 build_int_cst (TREE_TYPE (expr
), 0));
1345 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
1346 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
1348 TREE_OPERAND (expr
, 0),
1349 TREE_OPERAND (expr
, 1));
1355 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
1357 if (integer_nonzerop (expr
))
1358 return boolean_true_node
;
1359 else if (integer_zerop (expr
))
1360 return boolean_false_node
;
1361 else if (TREE_CODE (expr
) == SSA_NAME
)
1362 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
1363 build_int_cst (TREE_TYPE (expr
), 0));
1364 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
1365 return fold_build2 (TREE_CODE (expr
),
1367 TREE_OPERAND (expr
, 0),
1368 TREE_OPERAND (expr
, 1));
1374 /* Check to see if a boolean expression EXPR is logically equivalent to the
1375 comparison (OP1 CODE OP2). Check for various identities involving
1379 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
1380 const_tree op1
, const_tree op2
)
1384 /* The obvious case. */
1385 if (TREE_CODE (expr
) == code
1386 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
1387 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
1390 /* Check for comparing (name, name != 0) and the case where expr
1391 is an SSA_NAME with a definition matching the comparison. */
1392 if (TREE_CODE (expr
) == SSA_NAME
1393 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
1395 if (operand_equal_p (expr
, op1
, 0))
1396 return ((code
== NE_EXPR
&& integer_zerop (op2
))
1397 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
1398 s
= SSA_NAME_DEF_STMT (expr
);
1399 if (is_gimple_assign (s
)
1400 && gimple_assign_rhs_code (s
) == code
1401 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
1402 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
1406 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1407 of name is a comparison, recurse. */
1408 if (TREE_CODE (op1
) == SSA_NAME
1409 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
1411 s
= SSA_NAME_DEF_STMT (op1
);
1412 if (is_gimple_assign (s
)
1413 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
1415 enum tree_code c
= gimple_assign_rhs_code (s
);
1416 if ((c
== NE_EXPR
&& integer_zerop (op2
))
1417 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
1418 return same_bool_comparison_p (expr
, c
,
1419 gimple_assign_rhs1 (s
),
1420 gimple_assign_rhs2 (s
));
1421 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
1422 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
1423 return same_bool_comparison_p (expr
,
1424 invert_tree_comparison (c
, false),
1425 gimple_assign_rhs1 (s
),
1426 gimple_assign_rhs2 (s
));
1432 /* Check to see if two boolean expressions OP1 and OP2 are logically
1436 same_bool_result_p (const_tree op1
, const_tree op2
)
1438 /* Simple cases first. */
1439 if (operand_equal_p (op1
, op2
, 0))
1442 /* Check the cases where at least one of the operands is a comparison.
1443 These are a bit smarter than operand_equal_p in that they apply some
1444 identifies on SSA_NAMEs. */
1445 if (TREE_CODE_CLASS (TREE_CODE (op2
)) == tcc_comparison
1446 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
1447 TREE_OPERAND (op2
, 0),
1448 TREE_OPERAND (op2
, 1)))
1450 if (TREE_CODE_CLASS (TREE_CODE (op1
)) == tcc_comparison
1451 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
1452 TREE_OPERAND (op1
, 0),
1453 TREE_OPERAND (op1
, 1)))
1460 /* Forward declarations for some mutually recursive functions. */
1463 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1464 enum tree_code code2
, tree op2a
, tree op2b
);
1466 and_var_with_comparison (tree var
, bool invert
,
1467 enum tree_code code2
, tree op2a
, tree op2b
);
1469 and_var_with_comparison_1 (gimple stmt
,
1470 enum tree_code code2
, tree op2a
, tree op2b
);
1472 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1473 enum tree_code code2
, tree op2a
, tree op2b
);
1475 or_var_with_comparison (tree var
, bool invert
,
1476 enum tree_code code2
, tree op2a
, tree op2b
);
1478 or_var_with_comparison_1 (gimple stmt
,
1479 enum tree_code code2
, tree op2a
, tree op2b
);
1481 /* Helper function for and_comparisons_1: try to simplify the AND of the
1482 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1483 If INVERT is true, invert the value of the VAR before doing the AND.
1484 Return NULL_EXPR if we can't simplify this to a single expression. */
1487 and_var_with_comparison (tree var
, bool invert
,
1488 enum tree_code code2
, tree op2a
, tree op2b
)
1491 gimple stmt
= SSA_NAME_DEF_STMT (var
);
1493 /* We can only deal with variables whose definitions are assignments. */
1494 if (!is_gimple_assign (stmt
))
1497 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1498 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1499 Then we only have to consider the simpler non-inverted cases. */
1501 t
= or_var_with_comparison_1 (stmt
,
1502 invert_tree_comparison (code2
, false),
1505 t
= and_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
1506 return canonicalize_bool (t
, invert
);
1509 /* Try to simplify the AND of the ssa variable defined by the assignment
1510 STMT with the comparison specified by (OP2A CODE2 OP2B).
1511 Return NULL_EXPR if we can't simplify this to a single expression. */
1514 and_var_with_comparison_1 (gimple stmt
,
1515 enum tree_code code2
, tree op2a
, tree op2b
)
1517 tree var
= gimple_assign_lhs (stmt
);
1518 tree true_test_var
= NULL_TREE
;
1519 tree false_test_var
= NULL_TREE
;
1520 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
1522 /* Check for identities like (var AND (var == 0)) => false. */
1523 if (TREE_CODE (op2a
) == SSA_NAME
1524 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
1526 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
1527 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
1529 true_test_var
= op2a
;
1530 if (var
== true_test_var
)
1533 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
1534 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
1536 false_test_var
= op2a
;
1537 if (var
== false_test_var
)
1538 return boolean_false_node
;
1542 /* If the definition is a comparison, recurse on it. */
1543 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
1545 tree t
= and_comparisons_1 (innercode
,
1546 gimple_assign_rhs1 (stmt
),
1547 gimple_assign_rhs2 (stmt
),
1555 /* If the definition is an AND or OR expression, we may be able to
1556 simplify by reassociating. */
1557 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
1558 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
1560 tree inner1
= gimple_assign_rhs1 (stmt
);
1561 tree inner2
= gimple_assign_rhs2 (stmt
);
1564 tree partial
= NULL_TREE
;
1565 bool is_and
= (innercode
== BIT_AND_EXPR
);
1567 /* Check for boolean identities that don't require recursive examination
1569 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1570 inner1 AND (inner1 OR inner2) => inner1
1571 !inner1 AND (inner1 AND inner2) => false
1572 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1573 Likewise for similar cases involving inner2. */
1574 if (inner1
== true_test_var
)
1575 return (is_and
? var
: inner1
);
1576 else if (inner2
== true_test_var
)
1577 return (is_and
? var
: inner2
);
1578 else if (inner1
== false_test_var
)
1580 ? boolean_false_node
1581 : and_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
1582 else if (inner2
== false_test_var
)
1584 ? boolean_false_node
1585 : and_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
1587 /* Next, redistribute/reassociate the AND across the inner tests.
1588 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1589 if (TREE_CODE (inner1
) == SSA_NAME
1590 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
1591 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
1592 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
1593 gimple_assign_rhs1 (s
),
1594 gimple_assign_rhs2 (s
),
1595 code2
, op2a
, op2b
)))
1597 /* Handle the AND case, where we are reassociating:
1598 (inner1 AND inner2) AND (op2a code2 op2b)
1600 If the partial result t is a constant, we win. Otherwise
1601 continue on to try reassociating with the other inner test. */
1604 if (integer_onep (t
))
1606 else if (integer_zerop (t
))
1607 return boolean_false_node
;
1610 /* Handle the OR case, where we are redistributing:
1611 (inner1 OR inner2) AND (op2a code2 op2b)
1612 => (t OR (inner2 AND (op2a code2 op2b))) */
1613 else if (integer_onep (t
))
1614 return boolean_true_node
;
1616 /* Save partial result for later. */
1620 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1621 if (TREE_CODE (inner2
) == SSA_NAME
1622 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
1623 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
1624 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
1625 gimple_assign_rhs1 (s
),
1626 gimple_assign_rhs2 (s
),
1627 code2
, op2a
, op2b
)))
1629 /* Handle the AND case, where we are reassociating:
1630 (inner1 AND inner2) AND (op2a code2 op2b)
1631 => (inner1 AND t) */
1634 if (integer_onep (t
))
1636 else if (integer_zerop (t
))
1637 return boolean_false_node
;
1638 /* If both are the same, we can apply the identity
1640 else if (partial
&& same_bool_result_p (t
, partial
))
1644 /* Handle the OR case. where we are redistributing:
1645 (inner1 OR inner2) AND (op2a code2 op2b)
1646 => (t OR (inner1 AND (op2a code2 op2b)))
1647 => (t OR partial) */
1650 if (integer_onep (t
))
1651 return boolean_true_node
;
1654 /* We already got a simplification for the other
1655 operand to the redistributed OR expression. The
1656 interesting case is when at least one is false.
1657 Or, if both are the same, we can apply the identity
1659 if (integer_zerop (partial
))
1661 else if (integer_zerop (t
))
1663 else if (same_bool_result_p (t
, partial
))
1672 /* Try to simplify the AND of two comparisons defined by
1673 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1674 If this can be done without constructing an intermediate value,
1675 return the resulting tree; otherwise NULL_TREE is returned.
1676 This function is deliberately asymmetric as it recurses on SSA_DEFs
1677 in the first comparison but not the second. */
1680 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1681 enum tree_code code2
, tree op2a
, tree op2b
)
1683 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
1685 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1686 if (operand_equal_p (op1a
, op2a
, 0)
1687 && operand_equal_p (op1b
, op2b
, 0))
1689 /* Result will be either NULL_TREE, or a combined comparison. */
1690 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
1691 TRUTH_ANDIF_EXPR
, code1
, code2
,
1692 truth_type
, op1a
, op1b
);
1697 /* Likewise the swapped case of the above. */
1698 if (operand_equal_p (op1a
, op2b
, 0)
1699 && operand_equal_p (op1b
, op2a
, 0))
1701 /* Result will be either NULL_TREE, or a combined comparison. */
1702 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
1703 TRUTH_ANDIF_EXPR
, code1
,
1704 swap_tree_comparison (code2
),
1705 truth_type
, op1a
, op1b
);
1710 /* If both comparisons are of the same value against constants, we might
1711 be able to merge them. */
1712 if (operand_equal_p (op1a
, op2a
, 0)
1713 && TREE_CODE (op1b
) == INTEGER_CST
1714 && TREE_CODE (op2b
) == INTEGER_CST
)
1716 int cmp
= tree_int_cst_compare (op1b
, op2b
);
1718 /* If we have (op1a == op1b), we should either be able to
1719 return that or FALSE, depending on whether the constant op1b
1720 also satisfies the other comparison against op2b. */
1721 if (code1
== EQ_EXPR
)
1727 case EQ_EXPR
: val
= (cmp
== 0); break;
1728 case NE_EXPR
: val
= (cmp
!= 0); break;
1729 case LT_EXPR
: val
= (cmp
< 0); break;
1730 case GT_EXPR
: val
= (cmp
> 0); break;
1731 case LE_EXPR
: val
= (cmp
<= 0); break;
1732 case GE_EXPR
: val
= (cmp
>= 0); break;
1733 default: done
= false;
1738 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1740 return boolean_false_node
;
1743 /* Likewise if the second comparison is an == comparison. */
1744 else if (code2
== EQ_EXPR
)
1750 case EQ_EXPR
: val
= (cmp
== 0); break;
1751 case NE_EXPR
: val
= (cmp
!= 0); break;
1752 case LT_EXPR
: val
= (cmp
> 0); break;
1753 case GT_EXPR
: val
= (cmp
< 0); break;
1754 case LE_EXPR
: val
= (cmp
>= 0); break;
1755 case GE_EXPR
: val
= (cmp
<= 0); break;
1756 default: done
= false;
1761 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1763 return boolean_false_node
;
1767 /* Same business with inequality tests. */
1768 else if (code1
== NE_EXPR
)
1773 case EQ_EXPR
: val
= (cmp
!= 0); break;
1774 case NE_EXPR
: val
= (cmp
== 0); break;
1775 case LT_EXPR
: val
= (cmp
>= 0); break;
1776 case GT_EXPR
: val
= (cmp
<= 0); break;
1777 case LE_EXPR
: val
= (cmp
> 0); break;
1778 case GE_EXPR
: val
= (cmp
< 0); break;
1783 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1785 else if (code2
== NE_EXPR
)
1790 case EQ_EXPR
: val
= (cmp
== 0); break;
1791 case NE_EXPR
: val
= (cmp
!= 0); break;
1792 case LT_EXPR
: val
= (cmp
<= 0); break;
1793 case GT_EXPR
: val
= (cmp
>= 0); break;
1794 case LE_EXPR
: val
= (cmp
< 0); break;
1795 case GE_EXPR
: val
= (cmp
> 0); break;
1800 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1803 /* Chose the more restrictive of two < or <= comparisons. */
1804 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
1805 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
1807 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
1808 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1810 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1813 /* Likewise chose the more restrictive of two > or >= comparisons. */
1814 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
1815 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
1817 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
1818 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1820 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1823 /* Check for singleton ranges. */
1825 && ((code1
== LE_EXPR
&& code2
== GE_EXPR
)
1826 || (code1
== GE_EXPR
&& code2
== LE_EXPR
)))
1827 return fold_build2 (EQ_EXPR
, boolean_type_node
, op1a
, op2b
);
1829 /* Check for disjoint ranges. */
1831 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
1832 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
1833 return boolean_false_node
;
1835 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
1836 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
1837 return boolean_false_node
;
1840 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1841 NAME's definition is a truth value. See if there are any simplifications
1842 that can be done against the NAME's definition. */
1843 if (TREE_CODE (op1a
) == SSA_NAME
1844 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
1845 && (integer_zerop (op1b
) || integer_onep (op1b
)))
1847 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
1848 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
1849 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
1850 switch (gimple_code (stmt
))
1853 /* Try to simplify by copy-propagating the definition. */
1854 return and_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
1857 /* If every argument to the PHI produces the same result when
1858 ANDed with the second comparison, we win.
1859 Do not do this unless the type is bool since we need a bool
1860 result here anyway. */
1861 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
1863 tree result
= NULL_TREE
;
1865 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
1867 tree arg
= gimple_phi_arg_def (stmt
, i
);
1869 /* If this PHI has itself as an argument, ignore it.
1870 If all the other args produce the same result,
1872 if (arg
== gimple_phi_result (stmt
))
1874 else if (TREE_CODE (arg
) == INTEGER_CST
)
1876 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
1879 result
= boolean_false_node
;
1880 else if (!integer_zerop (result
))
1884 result
= fold_build2 (code2
, boolean_type_node
,
1886 else if (!same_bool_comparison_p (result
,
1890 else if (TREE_CODE (arg
) == SSA_NAME
1891 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
1894 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
1895 /* In simple cases we can look through PHI nodes,
1896 but we have to be careful with loops.
1898 if (! dom_info_available_p (CDI_DOMINATORS
)
1899 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
1900 || dominated_by_p (CDI_DOMINATORS
,
1901 gimple_bb (def_stmt
),
1904 temp
= and_var_with_comparison (arg
, invert
, code2
,
1910 else if (!same_bool_result_p (result
, temp
))
1926 /* Try to simplify the AND of two comparisons, specified by
1927 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1928 If this can be simplified to a single expression (without requiring
1929 introducing more SSA variables to hold intermediate values),
1930 return the resulting tree. Otherwise return NULL_TREE.
1931 If the result expression is non-null, it has boolean type. */
1934 maybe_fold_and_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
1935 enum tree_code code2
, tree op2a
, tree op2b
)
1937 tree t
= and_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
1941 return and_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
1944 /* Helper function for or_comparisons_1: try to simplify the OR of the
1945 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1946 If INVERT is true, invert the value of VAR before doing the OR.
1947 Return NULL_EXPR if we can't simplify this to a single expression. */
1950 or_var_with_comparison (tree var
, bool invert
,
1951 enum tree_code code2
, tree op2a
, tree op2b
)
1954 gimple stmt
= SSA_NAME_DEF_STMT (var
);
1956 /* We can only deal with variables whose definitions are assignments. */
1957 if (!is_gimple_assign (stmt
))
1960 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1961 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
1962 Then we only have to consider the simpler non-inverted cases. */
1964 t
= and_var_with_comparison_1 (stmt
,
1965 invert_tree_comparison (code2
, false),
1968 t
= or_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
1969 return canonicalize_bool (t
, invert
);
1972 /* Try to simplify the OR of the ssa variable defined by the assignment
1973 STMT with the comparison specified by (OP2A CODE2 OP2B).
1974 Return NULL_EXPR if we can't simplify this to a single expression. */
1977 or_var_with_comparison_1 (gimple stmt
,
1978 enum tree_code code2
, tree op2a
, tree op2b
)
1980 tree var
= gimple_assign_lhs (stmt
);
1981 tree true_test_var
= NULL_TREE
;
1982 tree false_test_var
= NULL_TREE
;
1983 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
1985 /* Check for identities like (var OR (var != 0)) => true . */
1986 if (TREE_CODE (op2a
) == SSA_NAME
1987 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
1989 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
1990 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
1992 true_test_var
= op2a
;
1993 if (var
== true_test_var
)
1996 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
1997 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
1999 false_test_var
= op2a
;
2000 if (var
== false_test_var
)
2001 return boolean_true_node
;
2005 /* If the definition is a comparison, recurse on it. */
2006 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
2008 tree t
= or_comparisons_1 (innercode
,
2009 gimple_assign_rhs1 (stmt
),
2010 gimple_assign_rhs2 (stmt
),
2018 /* If the definition is an AND or OR expression, we may be able to
2019 simplify by reassociating. */
2020 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
2021 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
2023 tree inner1
= gimple_assign_rhs1 (stmt
);
2024 tree inner2
= gimple_assign_rhs2 (stmt
);
2027 tree partial
= NULL_TREE
;
2028 bool is_or
= (innercode
== BIT_IOR_EXPR
);
2030 /* Check for boolean identities that don't require recursive examination
2032 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2033 inner1 OR (inner1 AND inner2) => inner1
2034 !inner1 OR (inner1 OR inner2) => true
2035 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2037 if (inner1
== true_test_var
)
2038 return (is_or
? var
: inner1
);
2039 else if (inner2
== true_test_var
)
2040 return (is_or
? var
: inner2
);
2041 else if (inner1
== false_test_var
)
2044 : or_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
2045 else if (inner2
== false_test_var
)
2048 : or_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
2050 /* Next, redistribute/reassociate the OR across the inner tests.
2051 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2052 if (TREE_CODE (inner1
) == SSA_NAME
2053 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
2054 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2055 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
2056 gimple_assign_rhs1 (s
),
2057 gimple_assign_rhs2 (s
),
2058 code2
, op2a
, op2b
)))
2060 /* Handle the OR case, where we are reassociating:
2061 (inner1 OR inner2) OR (op2a code2 op2b)
2063 If the partial result t is a constant, we win. Otherwise
2064 continue on to try reassociating with the other inner test. */
2067 if (integer_onep (t
))
2068 return boolean_true_node
;
2069 else if (integer_zerop (t
))
2073 /* Handle the AND case, where we are redistributing:
2074 (inner1 AND inner2) OR (op2a code2 op2b)
2075 => (t AND (inner2 OR (op2a code op2b))) */
2076 else if (integer_zerop (t
))
2077 return boolean_false_node
;
2079 /* Save partial result for later. */
2083 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2084 if (TREE_CODE (inner2
) == SSA_NAME
2085 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
2086 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2087 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
2088 gimple_assign_rhs1 (s
),
2089 gimple_assign_rhs2 (s
),
2090 code2
, op2a
, op2b
)))
2092 /* Handle the OR case, where we are reassociating:
2093 (inner1 OR inner2) OR (op2a code2 op2b)
2095 => (t OR partial) */
2098 if (integer_zerop (t
))
2100 else if (integer_onep (t
))
2101 return boolean_true_node
;
2102 /* If both are the same, we can apply the identity
2104 else if (partial
&& same_bool_result_p (t
, partial
))
2108 /* Handle the AND case, where we are redistributing:
2109 (inner1 AND inner2) OR (op2a code2 op2b)
2110 => (t AND (inner1 OR (op2a code2 op2b)))
2111 => (t AND partial) */
2114 if (integer_zerop (t
))
2115 return boolean_false_node
;
2118 /* We already got a simplification for the other
2119 operand to the redistributed AND expression. The
2120 interesting case is when at least one is true.
2121 Or, if both are the same, we can apply the identity
2123 if (integer_onep (partial
))
2125 else if (integer_onep (t
))
2127 else if (same_bool_result_p (t
, partial
))
2136 /* Try to simplify the OR of two comparisons defined by
2137 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2138 If this can be done without constructing an intermediate value,
2139 return the resulting tree; otherwise NULL_TREE is returned.
2140 This function is deliberately asymmetric as it recurses on SSA_DEFs
2141 in the first comparison but not the second. */
2144 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
2145 enum tree_code code2
, tree op2a
, tree op2b
)
2147 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
2149 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2150 if (operand_equal_p (op1a
, op2a
, 0)
2151 && operand_equal_p (op1b
, op2b
, 0))
2153 /* Result will be either NULL_TREE, or a combined comparison. */
2154 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2155 TRUTH_ORIF_EXPR
, code1
, code2
,
2156 truth_type
, op1a
, op1b
);
2161 /* Likewise the swapped case of the above. */
2162 if (operand_equal_p (op1a
, op2b
, 0)
2163 && operand_equal_p (op1b
, op2a
, 0))
2165 /* Result will be either NULL_TREE, or a combined comparison. */
2166 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2167 TRUTH_ORIF_EXPR
, code1
,
2168 swap_tree_comparison (code2
),
2169 truth_type
, op1a
, op1b
);
2174 /* If both comparisons are of the same value against constants, we might
2175 be able to merge them. */
2176 if (operand_equal_p (op1a
, op2a
, 0)
2177 && TREE_CODE (op1b
) == INTEGER_CST
2178 && TREE_CODE (op2b
) == INTEGER_CST
)
2180 int cmp
= tree_int_cst_compare (op1b
, op2b
);
2182 /* If we have (op1a != op1b), we should either be able to
2183 return that or TRUE, depending on whether the constant op1b
2184 also satisfies the other comparison against op2b. */
2185 if (code1
== NE_EXPR
)
2191 case EQ_EXPR
: val
= (cmp
== 0); break;
2192 case NE_EXPR
: val
= (cmp
!= 0); break;
2193 case LT_EXPR
: val
= (cmp
< 0); break;
2194 case GT_EXPR
: val
= (cmp
> 0); break;
2195 case LE_EXPR
: val
= (cmp
<= 0); break;
2196 case GE_EXPR
: val
= (cmp
>= 0); break;
2197 default: done
= false;
2202 return boolean_true_node
;
2204 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2207 /* Likewise if the second comparison is a != comparison. */
2208 else if (code2
== NE_EXPR
)
2214 case EQ_EXPR
: val
= (cmp
== 0); break;
2215 case NE_EXPR
: val
= (cmp
!= 0); break;
2216 case LT_EXPR
: val
= (cmp
> 0); break;
2217 case GT_EXPR
: val
= (cmp
< 0); break;
2218 case LE_EXPR
: val
= (cmp
>= 0); break;
2219 case GE_EXPR
: val
= (cmp
<= 0); break;
2220 default: done
= false;
2225 return boolean_true_node
;
2227 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2231 /* See if an equality test is redundant with the other comparison. */
2232 else if (code1
== EQ_EXPR
)
2237 case EQ_EXPR
: val
= (cmp
== 0); break;
2238 case NE_EXPR
: val
= (cmp
!= 0); break;
2239 case LT_EXPR
: val
= (cmp
< 0); break;
2240 case GT_EXPR
: val
= (cmp
> 0); break;
2241 case LE_EXPR
: val
= (cmp
<= 0); break;
2242 case GE_EXPR
: val
= (cmp
>= 0); break;
2247 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2249 else if (code2
== EQ_EXPR
)
2254 case EQ_EXPR
: val
= (cmp
== 0); break;
2255 case NE_EXPR
: val
= (cmp
!= 0); break;
2256 case LT_EXPR
: val
= (cmp
> 0); break;
2257 case GT_EXPR
: val
= (cmp
< 0); break;
2258 case LE_EXPR
: val
= (cmp
>= 0); break;
2259 case GE_EXPR
: val
= (cmp
<= 0); break;
2264 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2267 /* Chose the less restrictive of two < or <= comparisons. */
2268 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
2269 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2271 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
2272 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2274 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2277 /* Likewise chose the less restrictive of two > or >= comparisons. */
2278 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
2279 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2281 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
2282 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2284 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2287 /* Check for singleton ranges. */
2289 && ((code1
== LT_EXPR
&& code2
== GT_EXPR
)
2290 || (code1
== GT_EXPR
&& code2
== LT_EXPR
)))
2291 return fold_build2 (NE_EXPR
, boolean_type_node
, op1a
, op2b
);
2293 /* Check for less/greater pairs that don't restrict the range at all. */
2295 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
2296 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2297 return boolean_true_node
;
2299 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
2300 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2301 return boolean_true_node
;
2304 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2305 NAME's definition is a truth value. See if there are any simplifications
2306 that can be done against the NAME's definition. */
2307 if (TREE_CODE (op1a
) == SSA_NAME
2308 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
2309 && (integer_zerop (op1b
) || integer_onep (op1b
)))
2311 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
2312 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
2313 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
2314 switch (gimple_code (stmt
))
2317 /* Try to simplify by copy-propagating the definition. */
2318 return or_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
2321 /* If every argument to the PHI produces the same result when
2322 ORed with the second comparison, we win.
2323 Do not do this unless the type is bool since we need a bool
2324 result here anyway. */
2325 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
2327 tree result
= NULL_TREE
;
2329 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
2331 tree arg
= gimple_phi_arg_def (stmt
, i
);
2333 /* If this PHI has itself as an argument, ignore it.
2334 If all the other args produce the same result,
2336 if (arg
== gimple_phi_result (stmt
))
2338 else if (TREE_CODE (arg
) == INTEGER_CST
)
2340 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
2343 result
= boolean_true_node
;
2344 else if (!integer_onep (result
))
2348 result
= fold_build2 (code2
, boolean_type_node
,
2350 else if (!same_bool_comparison_p (result
,
2354 else if (TREE_CODE (arg
) == SSA_NAME
2355 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
2358 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
2359 /* In simple cases we can look through PHI nodes,
2360 but we have to be careful with loops.
2362 if (! dom_info_available_p (CDI_DOMINATORS
)
2363 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
2364 || dominated_by_p (CDI_DOMINATORS
,
2365 gimple_bb (def_stmt
),
2368 temp
= or_var_with_comparison (arg
, invert
, code2
,
2374 else if (!same_bool_result_p (result
, temp
))
2390 /* Try to simplify the OR of two comparisons, specified by
2391 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2392 If this can be simplified to a single expression (without requiring
2393 introducing more SSA variables to hold intermediate values),
2394 return the resulting tree. Otherwise return NULL_TREE.
2395 If the result expression is non-null, it has boolean type. */
2398 maybe_fold_or_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
2399 enum tree_code code2
, tree op2a
, tree op2b
)
2401 tree t
= or_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
2405 return or_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
2409 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2411 Either NULL_TREE, a simplified but non-constant or a constant
2414 ??? This should go into a gimple-fold-inline.h file to be eventually
2415 privatized with the single valueize function used in the various TUs
2416 to avoid the indirect function call overhead. */
2419 gimple_fold_stmt_to_constant_1 (gimple stmt
, tree (*valueize
) (tree
))
2421 location_t loc
= gimple_location (stmt
);
2422 switch (gimple_code (stmt
))
2426 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
2428 switch (get_gimple_rhs_class (subcode
))
2430 case GIMPLE_SINGLE_RHS
:
2432 tree rhs
= gimple_assign_rhs1 (stmt
);
2433 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
2435 if (TREE_CODE (rhs
) == SSA_NAME
)
2437 /* If the RHS is an SSA_NAME, return its known constant value,
2439 return (*valueize
) (rhs
);
2441 /* Handle propagating invariant addresses into address
2443 else if (TREE_CODE (rhs
) == ADDR_EXPR
2444 && !is_gimple_min_invariant (rhs
))
2446 HOST_WIDE_INT offset
= 0;
2448 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
2452 && (CONSTANT_CLASS_P (base
)
2453 || decl_address_invariant_p (base
)))
2454 return build_invariant_address (TREE_TYPE (rhs
),
2457 else if (TREE_CODE (rhs
) == CONSTRUCTOR
2458 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
2459 && (CONSTRUCTOR_NELTS (rhs
)
2460 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
2465 vec
= XALLOCAVEC (tree
,
2466 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
)));
2467 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
2469 val
= (*valueize
) (val
);
2470 if (TREE_CODE (val
) == INTEGER_CST
2471 || TREE_CODE (val
) == REAL_CST
2472 || TREE_CODE (val
) == FIXED_CST
)
2478 return build_vector (TREE_TYPE (rhs
), vec
);
2481 if (kind
== tcc_reference
)
2483 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
2484 || TREE_CODE (rhs
) == REALPART_EXPR
2485 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
2486 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2488 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2489 return fold_unary_loc (EXPR_LOCATION (rhs
),
2491 TREE_TYPE (rhs
), val
);
2493 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
2494 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2496 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2497 return fold_ternary_loc (EXPR_LOCATION (rhs
),
2499 TREE_TYPE (rhs
), val
,
2500 TREE_OPERAND (rhs
, 1),
2501 TREE_OPERAND (rhs
, 2));
2503 else if (TREE_CODE (rhs
) == MEM_REF
2504 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2506 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2507 if (TREE_CODE (val
) == ADDR_EXPR
2508 && is_gimple_min_invariant (val
))
2510 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
2512 TREE_OPERAND (rhs
, 1));
2517 return fold_const_aggregate_ref_1 (rhs
, valueize
);
2519 else if (kind
== tcc_declaration
)
2520 return get_symbol_constant_value (rhs
);
2524 case GIMPLE_UNARY_RHS
:
2526 /* Handle unary operators that can appear in GIMPLE form.
2527 Note that we know the single operand must be a constant,
2528 so this should almost always return a simplified RHS. */
2529 tree lhs
= gimple_assign_lhs (stmt
);
2530 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2532 /* Conversions are useless for CCP purposes if they are
2533 value-preserving. Thus the restrictions that
2534 useless_type_conversion_p places for restrict qualification
2535 of pointer types should not apply here.
2536 Substitution later will only substitute to allowed places. */
2537 if (CONVERT_EXPR_CODE_P (subcode
)
2538 && POINTER_TYPE_P (TREE_TYPE (lhs
))
2539 && POINTER_TYPE_P (TREE_TYPE (op0
))
2540 && TYPE_ADDR_SPACE (TREE_TYPE (lhs
))
2541 == TYPE_ADDR_SPACE (TREE_TYPE (op0
))
2542 && TYPE_MODE (TREE_TYPE (lhs
))
2543 == TYPE_MODE (TREE_TYPE (op0
)))
2547 fold_unary_ignore_overflow_loc (loc
, subcode
,
2548 gimple_expr_type (stmt
), op0
);
2551 case GIMPLE_BINARY_RHS
:
2553 /* Handle binary operators that can appear in GIMPLE form. */
2554 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2555 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
2557 /* Translate &x + CST into an invariant form suitable for
2558 further propagation. */
2559 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
2560 && TREE_CODE (op0
) == ADDR_EXPR
2561 && TREE_CODE (op1
) == INTEGER_CST
)
2563 tree off
= fold_convert (ptr_type_node
, op1
);
2564 return build_fold_addr_expr_loc
2566 fold_build2 (MEM_REF
,
2567 TREE_TYPE (TREE_TYPE (op0
)),
2568 unshare_expr (op0
), off
));
2571 return fold_binary_loc (loc
, subcode
,
2572 gimple_expr_type (stmt
), op0
, op1
);
2575 case GIMPLE_TERNARY_RHS
:
2577 /* Handle ternary operators that can appear in GIMPLE form. */
2578 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2579 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
2580 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
2582 /* Fold embedded expressions in ternary codes. */
2583 if ((subcode
== COND_EXPR
2584 || subcode
== VEC_COND_EXPR
)
2585 && COMPARISON_CLASS_P (op0
))
2587 tree op00
= (*valueize
) (TREE_OPERAND (op0
, 0));
2588 tree op01
= (*valueize
) (TREE_OPERAND (op0
, 1));
2589 tree tem
= fold_binary_loc (loc
, TREE_CODE (op0
),
2590 TREE_TYPE (op0
), op00
, op01
);
2595 return fold_ternary_loc (loc
, subcode
,
2596 gimple_expr_type (stmt
), op0
, op1
, op2
);
2608 if (gimple_call_internal_p (stmt
))
2609 /* No folding yet for these functions. */
2612 fn
= (*valueize
) (gimple_call_fn (stmt
));
2613 if (TREE_CODE (fn
) == ADDR_EXPR
2614 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
2615 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0)))
2617 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
2620 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
2621 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
2622 call
= build_call_array_loc (loc
,
2623 gimple_call_return_type (stmt
),
2624 fn
, gimple_call_num_args (stmt
), args
);
2625 retval
= fold_call_expr (EXPR_LOCATION (call
), call
, false);
2627 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2628 STRIP_NOPS (retval
);
2639 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2640 Returns NULL_TREE if folding to a constant is not possible, otherwise
2641 returns a constant according to is_gimple_min_invariant. */
2644 gimple_fold_stmt_to_constant (gimple stmt
, tree (*valueize
) (tree
))
2646 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
2647 if (res
&& is_gimple_min_invariant (res
))
2653 /* The following set of functions are supposed to fold references using
2654 their constant initializers. */
2656 static tree
fold_ctor_reference (tree type
, tree ctor
,
2657 unsigned HOST_WIDE_INT offset
,
2658 unsigned HOST_WIDE_INT size
, tree
);
2660 /* See if we can find constructor defining value of BASE.
2661 When we know the consructor with constant offset (such as
2662 base is array[40] and we do know constructor of array), then
2663 BIT_OFFSET is adjusted accordingly.
2665 As a special case, return error_mark_node when constructor
2666 is not explicitly available, but it is known to be zero
2667 such as 'static const int a;'. */
2669 get_base_constructor (tree base
, HOST_WIDE_INT
*bit_offset
,
2670 tree (*valueize
)(tree
))
2672 HOST_WIDE_INT bit_offset2
, size
, max_size
;
2673 if (TREE_CODE (base
) == MEM_REF
)
2675 if (!integer_zerop (TREE_OPERAND (base
, 1)))
2677 if (!host_integerp (TREE_OPERAND (base
, 1), 0))
2679 *bit_offset
+= (mem_ref_offset (base
).low
2684 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
2685 base
= valueize (TREE_OPERAND (base
, 0));
2686 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
2688 base
= TREE_OPERAND (base
, 0);
2691 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2692 DECL_INITIAL. If BASE is a nested reference into another
2693 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2694 the inner reference. */
2695 switch (TREE_CODE (base
))
2700 tree init
= ctor_for_folding (base
);
2702 /* Our semantic is exact oposite of ctor_for_folding;
2703 NULL means unknown, while error_mark_node is 0. */
2704 if (init
== error_mark_node
)
2707 return error_mark_node
;
2713 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
);
2714 if (max_size
== -1 || size
!= max_size
)
2716 *bit_offset
+= bit_offset2
;
2717 return get_base_constructor (base
, bit_offset
, valueize
);
2728 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2729 to the memory at bit OFFSET.
2731 We do only simple job of folding byte accesses. */
2734 fold_string_cst_ctor_reference (tree type
, tree ctor
,
2735 unsigned HOST_WIDE_INT offset
,
2736 unsigned HOST_WIDE_INT size
)
2738 if (INTEGRAL_TYPE_P (type
)
2739 && (TYPE_MODE (type
)
2740 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
2741 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
2743 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
)))) == 1
2744 && size
== BITS_PER_UNIT
2745 && !(offset
% BITS_PER_UNIT
))
2747 offset
/= BITS_PER_UNIT
;
2748 if (offset
< (unsigned HOST_WIDE_INT
) TREE_STRING_LENGTH (ctor
))
2749 return build_int_cst_type (type
, (TREE_STRING_POINTER (ctor
)
2752 const char a[20]="hello";
2755 might lead to offset greater than string length. In this case we
2756 know value is either initialized to 0 or out of bounds. Return 0
2758 return build_zero_cst (type
);
2763 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2764 SIZE to the memory at bit OFFSET. */
2767 fold_array_ctor_reference (tree type
, tree ctor
,
2768 unsigned HOST_WIDE_INT offset
,
2769 unsigned HOST_WIDE_INT size
,
2772 unsigned HOST_WIDE_INT cnt
;
2774 double_int low_bound
, elt_size
;
2775 double_int index
, max_index
;
2776 double_int access_index
;
2777 tree domain_type
= NULL_TREE
, index_type
= NULL_TREE
;
2778 HOST_WIDE_INT inner_offset
;
2780 /* Compute low bound and elt size. */
2781 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
2782 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
2783 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
2785 /* Static constructors for variably sized objects makes no sense. */
2786 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
);
2787 index_type
= TREE_TYPE (TYPE_MIN_VALUE (domain_type
));
2788 low_bound
= tree_to_double_int (TYPE_MIN_VALUE (domain_type
));
2791 low_bound
= double_int_zero
;
2792 /* Static constructors for variably sized objects makes no sense. */
2793 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))))
2796 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
2799 /* We can handle only constantly sized accesses that are known to not
2800 be larger than size of array element. */
2801 if (!TYPE_SIZE_UNIT (type
)
2802 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
2803 || elt_size
.slt (tree_to_double_int (TYPE_SIZE_UNIT (type
))))
2806 /* Compute the array index we look for. */
2807 access_index
= double_int::from_uhwi (offset
/ BITS_PER_UNIT
)
2808 .udiv (elt_size
, TRUNC_DIV_EXPR
);
2809 access_index
+= low_bound
;
2811 access_index
= access_index
.ext (TYPE_PRECISION (index_type
),
2812 TYPE_UNSIGNED (index_type
));
2814 /* And offset within the access. */
2815 inner_offset
= offset
% (elt_size
.to_uhwi () * BITS_PER_UNIT
);
2817 /* See if the array field is large enough to span whole access. We do not
2818 care to fold accesses spanning multiple array indexes. */
2819 if (inner_offset
+ size
> elt_size
.to_uhwi () * BITS_PER_UNIT
)
2822 index
= low_bound
- double_int_one
;
2824 index
= index
.ext (TYPE_PRECISION (index_type
), TYPE_UNSIGNED (index_type
));
2826 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
2828 /* Array constructor might explicitely set index, or specify range
2829 or leave index NULL meaning that it is next index after previous
2833 if (TREE_CODE (cfield
) == INTEGER_CST
)
2834 max_index
= index
= tree_to_double_int (cfield
);
2837 gcc_assert (TREE_CODE (cfield
) == RANGE_EXPR
);
2838 index
= tree_to_double_int (TREE_OPERAND (cfield
, 0));
2839 max_index
= tree_to_double_int (TREE_OPERAND (cfield
, 1));
2844 index
+= double_int_one
;
2846 index
= index
.ext (TYPE_PRECISION (index_type
),
2847 TYPE_UNSIGNED (index_type
));
2851 /* Do we have match? */
2852 if (access_index
.cmp (index
, 1) >= 0
2853 && access_index
.cmp (max_index
, 1) <= 0)
2854 return fold_ctor_reference (type
, cval
, inner_offset
, size
,
2857 /* When memory is not explicitely mentioned in constructor,
2858 it is 0 (or out of range). */
2859 return build_zero_cst (type
);
2862 /* CTOR is CONSTRUCTOR of an aggregate or vector.
2863 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2866 fold_nonarray_ctor_reference (tree type
, tree ctor
,
2867 unsigned HOST_WIDE_INT offset
,
2868 unsigned HOST_WIDE_INT size
,
2871 unsigned HOST_WIDE_INT cnt
;
2874 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
2877 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
2878 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
2879 tree field_size
= DECL_SIZE (cfield
);
2880 double_int bitoffset
;
2881 double_int byte_offset_cst
= tree_to_double_int (byte_offset
);
2882 double_int bits_per_unit_cst
= double_int::from_uhwi (BITS_PER_UNIT
);
2883 double_int bitoffset_end
, access_end
;
2885 /* Variable sized objects in static constructors makes no sense,
2886 but field_size can be NULL for flexible array members. */
2887 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
2888 && TREE_CODE (byte_offset
) == INTEGER_CST
2889 && (field_size
!= NULL_TREE
2890 ? TREE_CODE (field_size
) == INTEGER_CST
2891 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
2893 /* Compute bit offset of the field. */
2894 bitoffset
= tree_to_double_int (field_offset
)
2895 + byte_offset_cst
* bits_per_unit_cst
;
2896 /* Compute bit offset where the field ends. */
2897 if (field_size
!= NULL_TREE
)
2898 bitoffset_end
= bitoffset
+ tree_to_double_int (field_size
);
2900 bitoffset_end
= double_int_zero
;
2902 access_end
= double_int::from_uhwi (offset
)
2903 + double_int::from_uhwi (size
);
2905 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
2906 [BITOFFSET, BITOFFSET_END)? */
2907 if (access_end
.cmp (bitoffset
, 0) > 0
2908 && (field_size
== NULL_TREE
2909 || double_int::from_uhwi (offset
).slt (bitoffset_end
)))
2911 double_int inner_offset
= double_int::from_uhwi (offset
) - bitoffset
;
2912 /* We do have overlap. Now see if field is large enough to
2913 cover the access. Give up for accesses spanning multiple
2915 if (access_end
.cmp (bitoffset_end
, 0) > 0)
2917 if (double_int::from_uhwi (offset
).slt (bitoffset
))
2919 return fold_ctor_reference (type
, cval
,
2920 inner_offset
.to_uhwi (), size
,
2924 /* When memory is not explicitely mentioned in constructor, it is 0. */
2925 return build_zero_cst (type
);
2928 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
2929 to the memory at bit OFFSET. */
2932 fold_ctor_reference (tree type
, tree ctor
, unsigned HOST_WIDE_INT offset
,
2933 unsigned HOST_WIDE_INT size
, tree from_decl
)
2937 /* We found the field with exact match. */
2938 if (useless_type_conversion_p (type
, TREE_TYPE (ctor
))
2940 return canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
2942 /* We are at the end of walk, see if we can view convert the
2944 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
2945 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
2946 && operand_equal_p (TYPE_SIZE (type
),
2947 TYPE_SIZE (TREE_TYPE (ctor
)), 0))
2949 ret
= canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
2950 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
2955 if (TREE_CODE (ctor
) == STRING_CST
)
2956 return fold_string_cst_ctor_reference (type
, ctor
, offset
, size
);
2957 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
2960 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
2961 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
2962 return fold_array_ctor_reference (type
, ctor
, offset
, size
,
2965 return fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
2972 /* Return the tree representing the element referenced by T if T is an
2973 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
2974 names using VALUEIZE. Return NULL_TREE otherwise. */
2977 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
2979 tree ctor
, idx
, base
;
2980 HOST_WIDE_INT offset
, size
, max_size
;
2983 if (TREE_THIS_VOLATILE (t
))
2986 if (TREE_CODE_CLASS (TREE_CODE (t
)) == tcc_declaration
)
2987 return get_symbol_constant_value (t
);
2989 tem
= fold_read_from_constant_string (t
);
2993 switch (TREE_CODE (t
))
2996 case ARRAY_RANGE_REF
:
2997 /* Constant indexes are handled well by get_base_constructor.
2998 Only special case variable offsets.
2999 FIXME: This code can't handle nested references with variable indexes
3000 (they will be handled only by iteration of ccp). Perhaps we can bring
3001 get_ref_base_and_extent here and make it use a valueize callback. */
3002 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
3004 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
3005 && TREE_CODE (idx
) == INTEGER_CST
)
3007 tree low_bound
, unit_size
;
3010 /* If the resulting bit-offset is constant, track it. */
3011 if ((low_bound
= array_ref_low_bound (t
),
3012 TREE_CODE (low_bound
) == INTEGER_CST
)
3013 && (unit_size
= array_ref_element_size (t
),
3014 host_integerp (unit_size
, 1))
3015 && (doffset
= (TREE_INT_CST (idx
) - TREE_INT_CST (low_bound
))
3016 .sext (TYPE_PRECISION (TREE_TYPE (idx
))),
3017 doffset
.fits_shwi ()))
3019 offset
= doffset
.to_shwi ();
3020 offset
*= TREE_INT_CST_LOW (unit_size
);
3021 offset
*= BITS_PER_UNIT
;
3023 base
= TREE_OPERAND (t
, 0);
3024 ctor
= get_base_constructor (base
, &offset
, valueize
);
3025 /* Empty constructor. Always fold to 0. */
3026 if (ctor
== error_mark_node
)
3027 return build_zero_cst (TREE_TYPE (t
));
3028 /* Out of bound array access. Value is undefined,
3032 /* We can not determine ctor. */
3035 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
3036 TREE_INT_CST_LOW (unit_size
)
3045 case TARGET_MEM_REF
:
3047 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
);
3048 ctor
= get_base_constructor (base
, &offset
, valueize
);
3050 /* Empty constructor. Always fold to 0. */
3051 if (ctor
== error_mark_node
)
3052 return build_zero_cst (TREE_TYPE (t
));
3053 /* We do not know precise address. */
3054 if (max_size
== -1 || max_size
!= size
)
3056 /* We can not determine ctor. */
3060 /* Out of bound array access. Value is undefined, but don't fold. */
3064 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
,
3070 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
3071 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
3072 return fold_build1_loc (EXPR_LOCATION (t
),
3073 TREE_CODE (t
), TREE_TYPE (t
), c
);
3085 fold_const_aggregate_ref (tree t
)
3087 return fold_const_aggregate_ref_1 (t
, NULL
);
3090 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3091 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3092 KNOWN_BINFO carries the binfo describing the true type of
3093 OBJ_TYPE_REF_OBJECT(REF). */
3096 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
)
3098 unsigned HOST_WIDE_INT offset
, size
;
3101 vtable
= v
= BINFO_VTABLE (known_binfo
);
3102 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3106 if (TREE_CODE (v
) == POINTER_PLUS_EXPR
)
3108 offset
= tree_low_cst (TREE_OPERAND (v
, 1), 1) * BITS_PER_UNIT
;
3109 v
= TREE_OPERAND (v
, 0);
3114 if (TREE_CODE (v
) != ADDR_EXPR
)
3116 v
= TREE_OPERAND (v
, 0);
3118 if (TREE_CODE (v
) != VAR_DECL
3119 || !DECL_VIRTUAL_P (v
)
3120 || !DECL_INITIAL (v
)
3121 || DECL_INITIAL (v
) == error_mark_node
)
3123 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
3124 size
= tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))), 1);
3125 offset
+= token
* size
;
3126 fn
= fold_ctor_reference (TREE_TYPE (TREE_TYPE (v
)), DECL_INITIAL (v
),
3127 offset
, size
, vtable
);
3128 if (!fn
|| integer_zerop (fn
))
3130 gcc_assert (TREE_CODE (fn
) == ADDR_EXPR
3131 || TREE_CODE (fn
) == FDESC_EXPR
);
3132 fn
= TREE_OPERAND (fn
, 0);
3133 gcc_assert (TREE_CODE (fn
) == FUNCTION_DECL
);
3135 /* When cgraph node is missing and function is not public, we cannot
3136 devirtualize. This can happen in WHOPR when the actual method
3137 ends up in other partition, because we found devirtualization
3138 possibility too late. */
3139 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
3142 /* Make sure we create a cgraph node for functions we'll reference.
3143 They can be non-existent if the reference comes from an entry
3144 of an external vtable for example. */
3145 cgraph_get_create_node (fn
);
3150 /* Return true iff VAL is a gimple expression that is known to be
3151 non-negative. Restricted to floating-point inputs. */
3154 gimple_val_nonnegative_real_p (tree val
)
3158 gcc_assert (val
&& SCALAR_FLOAT_TYPE_P (TREE_TYPE (val
)));
3160 /* Use existing logic for non-gimple trees. */
3161 if (tree_expr_nonnegative_p (val
))
3164 if (TREE_CODE (val
) != SSA_NAME
)
3167 /* Currently we look only at the immediately defining statement
3168 to make this determination, since recursion on defining
3169 statements of operands can lead to quadratic behavior in the
3170 worst case. This is expected to catch almost all occurrences
3171 in practice. It would be possible to implement limited-depth
3172 recursion if important cases are lost. Alternatively, passes
3173 that need this information (such as the pow/powi lowering code
3174 in the cse_sincos pass) could be revised to provide it through
3175 dataflow propagation. */
3177 def_stmt
= SSA_NAME_DEF_STMT (val
);
3179 if (is_gimple_assign (def_stmt
))
3183 /* See fold-const.c:tree_expr_nonnegative_p for additional
3184 cases that could be handled with recursion. */
3186 switch (gimple_assign_rhs_code (def_stmt
))
3189 /* Always true for floating-point operands. */
3193 /* True if the two operands are identical (since we are
3194 restricted to floating-point inputs). */
3195 op0
= gimple_assign_rhs1 (def_stmt
);
3196 op1
= gimple_assign_rhs2 (def_stmt
);
3199 || operand_equal_p (op0
, op1
, 0))
3206 else if (is_gimple_call (def_stmt
))
3208 tree fndecl
= gimple_call_fndecl (def_stmt
);
3210 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
3214 switch (DECL_FUNCTION_CODE (fndecl
))
3216 CASE_FLT_FN (BUILT_IN_ACOS
):
3217 CASE_FLT_FN (BUILT_IN_ACOSH
):
3218 CASE_FLT_FN (BUILT_IN_CABS
):
3219 CASE_FLT_FN (BUILT_IN_COSH
):
3220 CASE_FLT_FN (BUILT_IN_ERFC
):
3221 CASE_FLT_FN (BUILT_IN_EXP
):
3222 CASE_FLT_FN (BUILT_IN_EXP10
):
3223 CASE_FLT_FN (BUILT_IN_EXP2
):
3224 CASE_FLT_FN (BUILT_IN_FABS
):
3225 CASE_FLT_FN (BUILT_IN_FDIM
):
3226 CASE_FLT_FN (BUILT_IN_HYPOT
):
3227 CASE_FLT_FN (BUILT_IN_POW10
):
3230 CASE_FLT_FN (BUILT_IN_SQRT
):
3231 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3232 nonnegative inputs. */
3233 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val
))))
3238 CASE_FLT_FN (BUILT_IN_POWI
):
3239 /* True if the second argument is an even integer. */
3240 arg1
= gimple_call_arg (def_stmt
, 1);
3242 if (TREE_CODE (arg1
) == INTEGER_CST
3243 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
3248 CASE_FLT_FN (BUILT_IN_POW
):
3249 /* True if the second argument is an even integer-valued
3251 arg1
= gimple_call_arg (def_stmt
, 1);
3253 if (TREE_CODE (arg1
) == REAL_CST
)
3258 c
= TREE_REAL_CST (arg1
);
3259 n
= real_to_integer (&c
);
3263 REAL_VALUE_TYPE cint
;
3264 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
3265 if (real_identical (&c
, &cint
))