1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010, 2011, 2012 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
29 #include "tree-flow.h"
30 #include "tree-ssa-propagate.h"
32 #include "gimple-fold.h"
34 /* Return true when DECL can be referenced from current unit.
35 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
36 We can get declarations that are not possible to reference for various
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
50 3) COMDAT functions referred by external vtables that
51 we devirtualize only during final copmilation stage.
52 At this time we already decided that we will not output
53 the function body and thus we can't reference the symbol
57 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
59 struct varpool_node
*vnode
;
60 struct cgraph_node
*node
;
63 /* We will later output the initializer, so we can refer to it.
64 So we are concerned only when DECL comes from initializer of
67 || TREE_CODE (from_decl
) != VAR_DECL
68 || !DECL_EXTERNAL (from_decl
)
70 && symtab_get_node (from_decl
)->symbol
.in_other_partition
))
72 /* We are concerned only about static/external vars and functions. */
73 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
74 || (TREE_CODE (decl
) != VAR_DECL
&& TREE_CODE (decl
) != FUNCTION_DECL
))
76 /* Weakrefs have somewhat confusing DECL_EXTERNAL flag set; they
78 if (DECL_EXTERNAL (decl
)
79 && lookup_attribute ("weakref", DECL_ATTRIBUTES (decl
)))
81 /* We are folding reference from external vtable. The vtable may reffer
82 to a symbol keyed to other compilation unit. The other compilation
83 unit may be in separate DSO and the symbol may be hidden. */
84 if (DECL_VISIBILITY_SPECIFIED (decl
)
85 && DECL_EXTERNAL (decl
)
86 && (!(snode
= symtab_get_node (decl
)) || !snode
->symbol
.in_other_partition
))
88 /* When function is public, we always can introduce new reference.
89 Exception are the COMDAT functions where introducing a direct
90 reference imply need to include function body in the curren tunit. */
91 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
93 /* We are not at ltrans stage; so don't worry about WHOPR.
94 Also when still gimplifying all referred comdat functions will be
97 As observed in PR20991 for already optimized out comdat virtual functions
98 it may be tempting to not necessarily give up because the copy will be
99 output elsewhere when corresponding vtable is output.
100 This is however not possible - ABI specify that COMDATs are output in
101 units where they are used and when the other unit was compiled with LTO
102 it is possible that vtable was kept public while the function itself
104 if (!flag_ltrans
&& (!DECL_COMDAT (decl
) || !cgraph_function_flags_ready
))
107 /* OK we are seeing either COMDAT or static variable. In this case we must
108 check that the definition is still around so we can refer it. */
109 if (TREE_CODE (decl
) == FUNCTION_DECL
)
111 node
= cgraph_get_node (decl
);
112 /* Check that we still have function body and that we didn't took
113 the decision to eliminate offline copy of the function yet.
114 The second is important when devirtualization happens during final
115 compilation stage when making a new reference no longer makes callee
117 if (!node
|| !node
->analyzed
|| node
->global
.inlined_to
)
119 gcc_checking_assert (!TREE_ASM_WRITTEN (decl
));
123 else if (TREE_CODE (decl
) == VAR_DECL
)
125 vnode
= varpool_get_node (decl
);
126 if (!vnode
|| !vnode
->analyzed
)
128 gcc_checking_assert (!TREE_ASM_WRITTEN (decl
));
135 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
136 acceptable form for is_gimple_min_invariant.
137 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
140 canonicalize_constructor_val (tree cval
, tree from_decl
)
142 tree orig_cval
= cval
;
144 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
145 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
147 tree ptr
= TREE_OPERAND (cval
, 0);
148 if (is_gimple_min_invariant (ptr
))
149 cval
= build1_loc (EXPR_LOCATION (cval
),
150 ADDR_EXPR
, TREE_TYPE (ptr
),
151 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
153 fold_convert (ptr_type_node
,
154 TREE_OPERAND (cval
, 1))));
156 if (TREE_CODE (cval
) == ADDR_EXPR
)
158 tree base
= NULL_TREE
;
159 if (TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
161 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
163 TREE_OPERAND (cval
, 0) = base
;
166 base
= get_base_address (TREE_OPERAND (cval
, 0));
170 if ((TREE_CODE (base
) == VAR_DECL
171 || TREE_CODE (base
) == FUNCTION_DECL
)
172 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
174 if (TREE_CODE (base
) == VAR_DECL
)
175 TREE_ADDRESSABLE (base
) = 1;
176 else if (TREE_CODE (base
) == FUNCTION_DECL
)
178 /* Make sure we create a cgraph node for functions we'll reference.
179 They can be non-existent if the reference comes from an entry
180 of an external vtable for example. */
181 cgraph_get_create_node (base
);
183 /* Fixup types in global initializers. */
184 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
185 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
187 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
188 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
194 /* If SYM is a constant variable with known value, return the value.
195 NULL_TREE is returned otherwise. */
198 get_symbol_constant_value (tree sym
)
200 if (const_value_known_p (sym
))
202 tree val
= DECL_INITIAL (sym
);
205 val
= canonicalize_constructor_val (val
, sym
);
206 if (val
&& is_gimple_min_invariant (val
))
211 /* Variables declared 'const' without an initializer
212 have zero as the initializer if they may not be
213 overridden at link or run time. */
215 && (INTEGRAL_TYPE_P (TREE_TYPE (sym
))
216 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym
))))
217 return build_zero_cst (TREE_TYPE (sym
));
225 /* Subroutine of fold_stmt. We perform several simplifications of the
226 memory reference tree EXPR and make sure to re-gimplify them properly
227 after propagation of constant addresses. IS_LHS is true if the
228 reference is supposed to be an lvalue. */
231 maybe_fold_reference (tree expr
, bool is_lhs
)
236 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
237 || TREE_CODE (expr
) == REALPART_EXPR
238 || TREE_CODE (expr
) == IMAGPART_EXPR
)
239 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
240 return fold_unary_loc (EXPR_LOCATION (expr
),
243 TREE_OPERAND (expr
, 0));
244 else if (TREE_CODE (expr
) == BIT_FIELD_REF
245 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
246 return fold_ternary_loc (EXPR_LOCATION (expr
),
249 TREE_OPERAND (expr
, 0),
250 TREE_OPERAND (expr
, 1),
251 TREE_OPERAND (expr
, 2));
253 while (handled_component_p (*t
))
254 t
= &TREE_OPERAND (*t
, 0);
256 /* Canonicalize MEM_REFs invariant address operand. Do this first
257 to avoid feeding non-canonical MEM_REFs elsewhere. */
258 if (TREE_CODE (*t
) == MEM_REF
259 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)))
261 bool volatile_p
= TREE_THIS_VOLATILE (*t
);
262 tree tem
= fold_binary (MEM_REF
, TREE_TYPE (*t
),
263 TREE_OPERAND (*t
, 0),
264 TREE_OPERAND (*t
, 1));
267 TREE_THIS_VOLATILE (tem
) = volatile_p
;
269 tem
= maybe_fold_reference (expr
, is_lhs
);
277 && (result
= fold_const_aggregate_ref (expr
))
278 && is_gimple_min_invariant (result
))
281 /* Fold back MEM_REFs to reference trees. */
282 if (TREE_CODE (*t
) == MEM_REF
283 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
284 && integer_zerop (TREE_OPERAND (*t
, 1))
285 && (TREE_THIS_VOLATILE (*t
)
286 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0)))
287 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t
, 1)))
288 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t
))
289 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t
, 1)))))
290 /* We have to look out here to not drop a required conversion
291 from the rhs to the lhs if is_lhs, but we don't have the
292 rhs here to verify that. Thus require strict type
294 && types_compatible_p (TREE_TYPE (*t
),
295 TREE_TYPE (TREE_OPERAND
296 (TREE_OPERAND (*t
, 0), 0))))
299 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
300 tem
= maybe_fold_reference (expr
, is_lhs
);
305 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
307 tree tem
= maybe_fold_tmr (*t
);
311 tem
= maybe_fold_reference (expr
, is_lhs
);
322 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
323 replacement rhs for the statement or NULL_TREE if no simplification
324 could be made. It is assumed that the operands have been previously
328 fold_gimple_assign (gimple_stmt_iterator
*si
)
330 gimple stmt
= gsi_stmt (*si
);
331 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
332 location_t loc
= gimple_location (stmt
);
334 tree result
= NULL_TREE
;
336 switch (get_gimple_rhs_class (subcode
))
338 case GIMPLE_SINGLE_RHS
:
340 tree rhs
= gimple_assign_rhs1 (stmt
);
342 if (REFERENCE_CLASS_P (rhs
))
343 return maybe_fold_reference (rhs
, false);
345 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
347 tree ref
= TREE_OPERAND (rhs
, 0);
348 tree tem
= maybe_fold_reference (ref
, true);
350 && TREE_CODE (tem
) == MEM_REF
351 && integer_zerop (TREE_OPERAND (tem
, 1)))
352 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
354 result
= fold_convert (TREE_TYPE (rhs
),
355 build_fold_addr_expr_loc (loc
, tem
));
356 else if (TREE_CODE (ref
) == MEM_REF
357 && integer_zerop (TREE_OPERAND (ref
, 1)))
358 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
361 else if (TREE_CODE (rhs
) == CONSTRUCTOR
362 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
363 && (CONSTRUCTOR_NELTS (rhs
)
364 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
366 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
370 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
371 if (TREE_CODE (val
) != INTEGER_CST
372 && TREE_CODE (val
) != REAL_CST
373 && TREE_CODE (val
) != FIXED_CST
)
376 return build_vector_from_ctor (TREE_TYPE (rhs
),
377 CONSTRUCTOR_ELTS (rhs
));
380 else if (DECL_P (rhs
))
381 return unshare_expr (get_symbol_constant_value (rhs
));
383 /* If we couldn't fold the RHS, hand over to the generic
385 if (result
== NULL_TREE
)
388 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
389 that may have been added by fold, and "useless" type
390 conversions that might now be apparent due to propagation. */
391 STRIP_USELESS_TYPE_CONVERSION (result
);
393 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
400 case GIMPLE_UNARY_RHS
:
402 tree rhs
= gimple_assign_rhs1 (stmt
);
404 result
= fold_unary_loc (loc
, subcode
, gimple_expr_type (stmt
), rhs
);
407 /* If the operation was a conversion do _not_ mark a
408 resulting constant with TREE_OVERFLOW if the original
409 constant was not. These conversions have implementation
410 defined behavior and retaining the TREE_OVERFLOW flag
411 here would confuse later passes such as VRP. */
412 if (CONVERT_EXPR_CODE_P (subcode
)
413 && TREE_CODE (result
) == INTEGER_CST
414 && TREE_CODE (rhs
) == INTEGER_CST
)
415 TREE_OVERFLOW (result
) = TREE_OVERFLOW (rhs
);
417 STRIP_USELESS_TYPE_CONVERSION (result
);
418 if (valid_gimple_rhs_p (result
))
424 case GIMPLE_BINARY_RHS
:
425 /* Try to canonicalize for boolean-typed X the comparisons
426 X == 0, X == 1, X != 0, and X != 1. */
427 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
428 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
430 tree lhs
= gimple_assign_lhs (stmt
);
431 tree op1
= gimple_assign_rhs1 (stmt
);
432 tree op2
= gimple_assign_rhs2 (stmt
);
433 tree type
= TREE_TYPE (op1
);
435 /* Check whether the comparison operands are of the same boolean
436 type as the result type is.
437 Check that second operand is an integer-constant with value
439 if (TREE_CODE (op2
) == INTEGER_CST
440 && (integer_zerop (op2
) || integer_onep (op2
))
441 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
443 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
444 bool is_logical_not
= false;
446 /* X == 0 and X != 1 is a logical-not.of X
447 X == 1 and X != 0 is X */
448 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
449 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
450 is_logical_not
= true;
452 if (is_logical_not
== false)
454 /* Only for one-bit precision typed X the transformation
455 !X -> ~X is valied. */
456 else if (TYPE_PRECISION (type
) == 1)
457 result
= build1_loc (gimple_location (stmt
), BIT_NOT_EXPR
,
459 /* Otherwise we use !X -> X ^ 1. */
461 result
= build2_loc (gimple_location (stmt
), BIT_XOR_EXPR
,
462 type
, op1
, build_int_cst (type
, 1));
468 result
= fold_binary_loc (loc
, subcode
,
469 TREE_TYPE (gimple_assign_lhs (stmt
)),
470 gimple_assign_rhs1 (stmt
),
471 gimple_assign_rhs2 (stmt
));
475 STRIP_USELESS_TYPE_CONVERSION (result
);
476 if (valid_gimple_rhs_p (result
))
481 case GIMPLE_TERNARY_RHS
:
482 /* Try to fold a conditional expression. */
483 if (gimple_assign_rhs_code (stmt
) == COND_EXPR
)
485 tree op0
= gimple_assign_rhs1 (stmt
);
488 location_t cond_loc
= gimple_location (stmt
);
490 if (COMPARISON_CLASS_P (op0
))
492 fold_defer_overflow_warnings ();
493 tem
= fold_binary_loc (cond_loc
,
494 TREE_CODE (op0
), TREE_TYPE (op0
),
495 TREE_OPERAND (op0
, 0),
496 TREE_OPERAND (op0
, 1));
497 /* This is actually a conditional expression, not a GIMPLE
498 conditional statement, however, the valid_gimple_rhs_p
499 test still applies. */
500 set
= (tem
&& is_gimple_condexpr (tem
)
501 && valid_gimple_rhs_p (tem
));
502 fold_undefer_overflow_warnings (set
, stmt
, 0);
504 else if (is_gimple_min_invariant (op0
))
513 result
= fold_build3_loc (cond_loc
, COND_EXPR
,
514 TREE_TYPE (gimple_assign_lhs (stmt
)), tem
,
515 gimple_assign_rhs2 (stmt
),
516 gimple_assign_rhs3 (stmt
));
520 result
= fold_ternary_loc (loc
, subcode
,
521 TREE_TYPE (gimple_assign_lhs (stmt
)),
522 gimple_assign_rhs1 (stmt
),
523 gimple_assign_rhs2 (stmt
),
524 gimple_assign_rhs3 (stmt
));
528 STRIP_USELESS_TYPE_CONVERSION (result
);
529 if (valid_gimple_rhs_p (result
))
534 case GIMPLE_INVALID_RHS
:
541 /* Attempt to fold a conditional statement. Return true if any changes were
542 made. We only attempt to fold the condition expression, and do not perform
543 any transformation that would require alteration of the cfg. It is
544 assumed that the operands have been previously folded. */
547 fold_gimple_cond (gimple stmt
)
549 tree result
= fold_binary_loc (gimple_location (stmt
),
550 gimple_cond_code (stmt
),
552 gimple_cond_lhs (stmt
),
553 gimple_cond_rhs (stmt
));
557 STRIP_USELESS_TYPE_CONVERSION (result
);
558 if (is_gimple_condexpr (result
) && valid_gimple_rhs_p (result
))
560 gimple_cond_set_condition_from_tree (stmt
, result
);
568 /* Convert EXPR into a GIMPLE value suitable for substitution on the
569 RHS of an assignment. Insert the necessary statements before
570 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
571 is replaced. If the call is expected to produces a result, then it
572 is replaced by an assignment of the new RHS to the result variable.
573 If the result is to be ignored, then the call is replaced by a
574 GIMPLE_NOP. A proper VDEF chain is retained by making the first
575 VUSE and the last VDEF of the whole sequence be the same as the replaced
576 statement and using new SSA names for stores in between. */
579 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
582 gimple stmt
, new_stmt
;
583 gimple_stmt_iterator i
;
584 gimple_seq stmts
= NULL
;
585 struct gimplify_ctx gctx
;
589 stmt
= gsi_stmt (*si_p
);
591 gcc_assert (is_gimple_call (stmt
));
593 push_gimplify_context (&gctx
);
594 gctx
.into_ssa
= gimple_in_ssa_p (cfun
);
596 lhs
= gimple_call_lhs (stmt
);
597 if (lhs
== NULL_TREE
)
599 gimplify_and_add (expr
, &stmts
);
600 /* We can end up with folding a memcpy of an empty class assignment
601 which gets optimized away by C++ gimplification. */
602 if (gimple_seq_empty_p (stmts
))
604 pop_gimplify_context (NULL
);
605 if (gimple_in_ssa_p (cfun
))
607 unlink_stmt_vdef (stmt
);
610 gsi_replace (si_p
, gimple_build_nop (), true);
616 tree tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
617 new_stmt
= gimple_build_assign (lhs
, tmp
);
618 i
= gsi_last (stmts
);
619 gsi_insert_after_without_update (&i
, new_stmt
,
620 GSI_CONTINUE_LINKING
);
623 pop_gimplify_context (NULL
);
625 if (gimple_has_location (stmt
))
626 annotate_all_with_location (stmts
, gimple_location (stmt
));
628 /* First iterate over the replacement statements backward, assigning
629 virtual operands to their defining statements. */
631 for (i
= gsi_last (stmts
); !gsi_end_p (i
); gsi_prev (&i
))
633 new_stmt
= gsi_stmt (i
);
634 if ((gimple_assign_single_p (new_stmt
)
635 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
636 || (is_gimple_call (new_stmt
)
637 && (gimple_call_flags (new_stmt
)
638 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
642 vdef
= gimple_vdef (stmt
);
644 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
645 gimple_set_vdef (new_stmt
, vdef
);
646 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
647 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
648 laststore
= new_stmt
;
652 /* Second iterate over the statements forward, assigning virtual
653 operands to their uses. */
654 reaching_vuse
= gimple_vuse (stmt
);
655 for (i
= gsi_start (stmts
); !gsi_end_p (i
); gsi_next (&i
))
657 new_stmt
= gsi_stmt (i
);
658 /* If the new statement possibly has a VUSE, update it with exact SSA
659 name we know will reach this one. */
660 if (gimple_has_mem_ops (new_stmt
))
661 gimple_set_vuse (new_stmt
, reaching_vuse
);
662 gimple_set_modified (new_stmt
, true);
663 if (gimple_vdef (new_stmt
))
664 reaching_vuse
= gimple_vdef (new_stmt
);
667 /* If the new sequence does not do a store release the virtual
668 definition of the original statement. */
670 && reaching_vuse
== gimple_vuse (stmt
))
672 tree vdef
= gimple_vdef (stmt
);
674 && TREE_CODE (vdef
) == SSA_NAME
)
676 unlink_stmt_vdef (stmt
);
677 release_ssa_name (vdef
);
681 /* Finally replace the original statement with the sequence. */
682 gsi_replace_with_seq (si_p
, stmts
, false);
685 /* Return the string length, maximum string length or maximum value of
687 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
688 is not NULL and, for TYPE == 0, its value is not equal to the length
689 we determine or if we are unable to determine the length or value,
690 return false. VISITED is a bitmap of visited variables.
691 TYPE is 0 if string length should be returned, 1 for maximum string
692 length and 2 for maximum value ARG can have. */
695 get_maxval_strlen (tree arg
, tree
*length
, bitmap visited
, int type
)
700 if (TREE_CODE (arg
) != SSA_NAME
)
702 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
703 if (TREE_CODE (arg
) == ADDR_EXPR
704 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
705 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
707 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
708 if (TREE_CODE (aop0
) == INDIRECT_REF
709 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
710 return get_maxval_strlen (TREE_OPERAND (aop0
, 0),
711 length
, visited
, type
);
717 if (TREE_CODE (val
) != INTEGER_CST
718 || tree_int_cst_sgn (val
) < 0)
722 val
= c_strlen (arg
, 1);
730 if (TREE_CODE (*length
) != INTEGER_CST
731 || TREE_CODE (val
) != INTEGER_CST
)
734 if (tree_int_cst_lt (*length
, val
))
738 else if (simple_cst_equal (val
, *length
) != 1)
746 /* If ARG is registered for SSA update we cannot look at its defining
748 if (name_registered_for_update_p (arg
))
751 /* If we were already here, break the infinite cycle. */
752 if (!bitmap_set_bit (visited
, SSA_NAME_VERSION (arg
)))
756 def_stmt
= SSA_NAME_DEF_STMT (var
);
758 switch (gimple_code (def_stmt
))
761 /* The RHS of the statement defining VAR must either have a
762 constant length or come from another SSA_NAME with a constant
764 if (gimple_assign_single_p (def_stmt
)
765 || gimple_assign_unary_nop_p (def_stmt
))
767 tree rhs
= gimple_assign_rhs1 (def_stmt
);
768 return get_maxval_strlen (rhs
, length
, visited
, type
);
770 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
772 tree op2
= gimple_assign_rhs2 (def_stmt
);
773 tree op3
= gimple_assign_rhs3 (def_stmt
);
774 return get_maxval_strlen (op2
, length
, visited
, type
)
775 && get_maxval_strlen (op3
, length
, visited
, type
);
781 /* All the arguments of the PHI node must have the same constant
785 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
787 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
789 /* If this PHI has itself as an argument, we cannot
790 determine the string length of this argument. However,
791 if we can find a constant string length for the other
792 PHI args then we can still be sure that this is a
793 constant string length. So be optimistic and just
794 continue with the next argument. */
795 if (arg
== gimple_phi_result (def_stmt
))
798 if (!get_maxval_strlen (arg
, length
, visited
, type
))
810 /* Fold builtin call in statement STMT. Returns a simplified tree.
811 We may return a non-constant expression, including another call
812 to a different function and with different arguments, e.g.,
813 substituting memcpy for strcpy when the string length is known.
814 Note that some builtins expand into inline code that may not
815 be valid in GIMPLE. Callers must take care. */
818 gimple_fold_builtin (gimple stmt
)
826 location_t loc
= gimple_location (stmt
);
828 gcc_assert (is_gimple_call (stmt
));
830 ignore
= (gimple_call_lhs (stmt
) == NULL
);
832 /* First try the generic builtin folder. If that succeeds, return the
834 result
= fold_call_stmt (stmt
, ignore
);
842 /* Ignore MD builtins. */
843 callee
= gimple_call_fndecl (stmt
);
844 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_MD
)
847 /* Give up for always_inline inline builtins until they are
849 if (avoid_folding_inline_builtin (callee
))
852 /* If the builtin could not be folded, and it has no argument list,
854 nargs
= gimple_call_num_args (stmt
);
858 /* Limit the work only for builtins we know how to simplify. */
859 switch (DECL_FUNCTION_CODE (callee
))
861 case BUILT_IN_STRLEN
:
863 case BUILT_IN_FPUTS_UNLOCKED
:
867 case BUILT_IN_STRCPY
:
868 case BUILT_IN_STRNCPY
:
872 case BUILT_IN_MEMCPY_CHK
:
873 case BUILT_IN_MEMPCPY_CHK
:
874 case BUILT_IN_MEMMOVE_CHK
:
875 case BUILT_IN_MEMSET_CHK
:
876 case BUILT_IN_STRNCPY_CHK
:
877 case BUILT_IN_STPNCPY_CHK
:
881 case BUILT_IN_STRCPY_CHK
:
882 case BUILT_IN_STPCPY_CHK
:
886 case BUILT_IN_SNPRINTF_CHK
:
887 case BUILT_IN_VSNPRINTF_CHK
:
895 if (arg_idx
>= nargs
)
898 /* Try to use the dataflow information gathered by the CCP process. */
899 visited
= BITMAP_ALLOC (NULL
);
900 bitmap_clear (visited
);
902 memset (val
, 0, sizeof (val
));
903 a
= gimple_call_arg (stmt
, arg_idx
);
904 if (!get_maxval_strlen (a
, &val
[arg_idx
], visited
, type
))
905 val
[arg_idx
] = NULL_TREE
;
907 BITMAP_FREE (visited
);
910 switch (DECL_FUNCTION_CODE (callee
))
912 case BUILT_IN_STRLEN
:
913 if (val
[0] && nargs
== 1)
916 fold_convert (TREE_TYPE (gimple_call_lhs (stmt
)), val
[0]);
918 /* If the result is not a valid gimple value, or not a cast
919 of a valid gimple value, then we cannot use the result. */
920 if (is_gimple_val (new_val
)
921 || (CONVERT_EXPR_P (new_val
)
922 && is_gimple_val (TREE_OPERAND (new_val
, 0))))
927 case BUILT_IN_STRCPY
:
928 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 2)
929 result
= fold_builtin_strcpy (loc
, callee
,
930 gimple_call_arg (stmt
, 0),
931 gimple_call_arg (stmt
, 1),
935 case BUILT_IN_STRNCPY
:
936 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
937 result
= fold_builtin_strncpy (loc
, callee
,
938 gimple_call_arg (stmt
, 0),
939 gimple_call_arg (stmt
, 1),
940 gimple_call_arg (stmt
, 2),
946 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
947 gimple_call_arg (stmt
, 1),
948 ignore
, false, val
[0]);
951 case BUILT_IN_FPUTS_UNLOCKED
:
953 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
954 gimple_call_arg (stmt
, 1),
955 ignore
, true, val
[0]);
958 case BUILT_IN_MEMCPY_CHK
:
959 case BUILT_IN_MEMPCPY_CHK
:
960 case BUILT_IN_MEMMOVE_CHK
:
961 case BUILT_IN_MEMSET_CHK
:
962 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
963 result
= fold_builtin_memory_chk (loc
, callee
,
964 gimple_call_arg (stmt
, 0),
965 gimple_call_arg (stmt
, 1),
966 gimple_call_arg (stmt
, 2),
967 gimple_call_arg (stmt
, 3),
969 DECL_FUNCTION_CODE (callee
));
972 case BUILT_IN_STRCPY_CHK
:
973 case BUILT_IN_STPCPY_CHK
:
974 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
975 result
= fold_builtin_stxcpy_chk (loc
, callee
,
976 gimple_call_arg (stmt
, 0),
977 gimple_call_arg (stmt
, 1),
978 gimple_call_arg (stmt
, 2),
980 DECL_FUNCTION_CODE (callee
));
983 case BUILT_IN_STRNCPY_CHK
:
984 case BUILT_IN_STPNCPY_CHK
:
985 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
986 result
= fold_builtin_stxncpy_chk (loc
, gimple_call_arg (stmt
, 0),
987 gimple_call_arg (stmt
, 1),
988 gimple_call_arg (stmt
, 2),
989 gimple_call_arg (stmt
, 3),
991 DECL_FUNCTION_CODE (callee
));
994 case BUILT_IN_SNPRINTF_CHK
:
995 case BUILT_IN_VSNPRINTF_CHK
:
996 if (val
[1] && is_gimple_val (val
[1]))
997 result
= gimple_fold_builtin_snprintf_chk (stmt
, val
[1],
998 DECL_FUNCTION_CODE (callee
));
1005 if (result
&& ignore
)
1006 result
= fold_ignored_result (result
);
1011 /* Return a binfo to be used for devirtualization of calls based on an object
1012 represented by a declaration (i.e. a global or automatically allocated one)
1013 or NULL if it cannot be found or is not safe. CST is expected to be an
1014 ADDR_EXPR of such object or the function will return NULL. Currently it is
1015 safe to use such binfo only if it has no base binfo (i.e. no ancestors). */
1018 gimple_extract_devirt_binfo_from_cst (tree cst
)
1020 HOST_WIDE_INT offset
, size
, max_size
;
1021 tree base
, type
, expected_type
, binfo
;
1022 bool last_artificial
= false;
1024 if (!flag_devirtualize
1025 || TREE_CODE (cst
) != ADDR_EXPR
1026 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst
))) != RECORD_TYPE
)
1029 cst
= TREE_OPERAND (cst
, 0);
1030 expected_type
= TREE_TYPE (cst
);
1031 base
= get_ref_base_and_extent (cst
, &offset
, &size
, &max_size
);
1032 type
= TREE_TYPE (base
);
1036 || TREE_CODE (type
) != RECORD_TYPE
)
1039 /* Find the sub-object the constant actually refers to and mark whether it is
1040 an artificial one (as opposed to a user-defined one). */
1043 HOST_WIDE_INT pos
, size
;
1046 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (expected_type
))
1051 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
1053 if (TREE_CODE (fld
) != FIELD_DECL
)
1056 pos
= int_bit_position (fld
);
1057 size
= tree_low_cst (DECL_SIZE (fld
), 1);
1058 if (pos
<= offset
&& (pos
+ size
) > offset
)
1061 if (!fld
|| TREE_CODE (TREE_TYPE (fld
)) != RECORD_TYPE
)
1064 last_artificial
= DECL_ARTIFICIAL (fld
);
1065 type
= TREE_TYPE (fld
);
1068 /* Artificial sub-objects are ancestors, we do not want to use them for
1069 devirtualization, at least not here. */
1070 if (last_artificial
)
1072 binfo
= TYPE_BINFO (type
);
1073 if (!binfo
|| BINFO_N_BASE_BINFOS (binfo
) > 0)
1079 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1080 The statement may be replaced by another statement, e.g., if the call
1081 simplifies to a constant value. Return true if any changes were made.
1082 It is assumed that the operands have been previously folded. */
1085 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
1087 gimple stmt
= gsi_stmt (*gsi
);
1089 bool changed
= false;
1092 /* Fold *& in call arguments. */
1093 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
1094 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
1096 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
1099 gimple_call_set_arg (stmt
, i
, tmp
);
1104 /* Check for virtual calls that became direct calls. */
1105 callee
= gimple_call_fn (stmt
);
1106 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
1108 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
1110 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
1115 tree obj
= OBJ_TYPE_REF_OBJECT (callee
);
1116 tree binfo
= gimple_extract_devirt_binfo_from_cst (obj
);
1120 = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee
));
1121 tree fndecl
= gimple_get_virt_method_for_binfo (token
, binfo
);
1124 gimple_call_set_fndecl (stmt
, fndecl
);
1134 /* Check for builtins that CCP can handle using information not
1135 available in the generic fold routines. */
1136 callee
= gimple_call_fndecl (stmt
);
1137 if (callee
&& DECL_BUILT_IN (callee
))
1139 tree result
= gimple_fold_builtin (stmt
);
1142 if (!update_call_from_tree (gsi
, result
))
1143 gimplify_and_update_call_from_tree (gsi
, result
);
1151 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1152 distinguishes both cases. */
1155 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
)
1157 bool changed
= false;
1158 gimple stmt
= gsi_stmt (*gsi
);
1161 /* Fold the main computation performed by the statement. */
1162 switch (gimple_code (stmt
))
1166 unsigned old_num_ops
= gimple_num_ops (stmt
);
1167 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
1168 tree lhs
= gimple_assign_lhs (stmt
);
1170 /* First canonicalize operand order. This avoids building new
1171 trees if this is the only thing fold would later do. */
1172 if ((commutative_tree_code (subcode
)
1173 || commutative_ternary_tree_code (subcode
))
1174 && tree_swap_operands_p (gimple_assign_rhs1 (stmt
),
1175 gimple_assign_rhs2 (stmt
), false))
1177 tree tem
= gimple_assign_rhs1 (stmt
);
1178 gimple_assign_set_rhs1 (stmt
, gimple_assign_rhs2 (stmt
));
1179 gimple_assign_set_rhs2 (stmt
, tem
);
1182 new_rhs
= fold_gimple_assign (gsi
);
1184 && !useless_type_conversion_p (TREE_TYPE (lhs
),
1185 TREE_TYPE (new_rhs
)))
1186 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
1189 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
1191 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
1198 changed
|= fold_gimple_cond (stmt
);
1202 changed
|= gimple_fold_call (gsi
, inplace
);
1206 /* Fold *& in asm operands. */
1209 const char **oconstraints
;
1210 const char *constraint
;
1211 bool allows_mem
, allows_reg
;
1213 noutputs
= gimple_asm_noutputs (stmt
);
1214 oconstraints
= XALLOCAVEC (const char *, noutputs
);
1216 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
1218 tree link
= gimple_asm_output_op (stmt
, i
);
1219 tree op
= TREE_VALUE (link
);
1221 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1222 if (REFERENCE_CLASS_P (op
)
1223 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
1225 TREE_VALUE (link
) = op
;
1229 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
1231 tree link
= gimple_asm_input_op (stmt
, i
);
1232 tree op
= TREE_VALUE (link
);
1234 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1235 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
1236 oconstraints
, &allows_mem
, &allows_reg
);
1237 if (REFERENCE_CLASS_P (op
)
1238 && (op
= maybe_fold_reference (op
, !allows_reg
&& allows_mem
))
1241 TREE_VALUE (link
) = op
;
1249 if (gimple_debug_bind_p (stmt
))
1251 tree val
= gimple_debug_bind_get_value (stmt
);
1253 && REFERENCE_CLASS_P (val
))
1255 tree tem
= maybe_fold_reference (val
, false);
1258 gimple_debug_bind_set_value (stmt
, tem
);
1263 && TREE_CODE (val
) == ADDR_EXPR
)
1265 tree ref
= TREE_OPERAND (val
, 0);
1266 tree tem
= maybe_fold_reference (ref
, false);
1269 tem
= build_fold_addr_expr_with_type (tem
, TREE_TYPE (val
));
1270 gimple_debug_bind_set_value (stmt
, tem
);
1280 stmt
= gsi_stmt (*gsi
);
1282 /* Fold *& on the lhs. */
1283 if (gimple_has_lhs (stmt
))
1285 tree lhs
= gimple_get_lhs (stmt
);
1286 if (lhs
&& REFERENCE_CLASS_P (lhs
))
1288 tree new_lhs
= maybe_fold_reference (lhs
, true);
1291 gimple_set_lhs (stmt
, new_lhs
);
1300 /* Fold the statement pointed to by GSI. In some cases, this function may
1301 replace the whole statement with a new one. Returns true iff folding
1303 The statement pointed to by GSI should be in valid gimple form but may
1304 be in unfolded state as resulting from for example constant propagation
1305 which can produce *&x = 0. */
1308 fold_stmt (gimple_stmt_iterator
*gsi
)
1310 return fold_stmt_1 (gsi
, false);
1313 /* Perform the minimal folding on statement *GSI. Only operations like
1314 *&x created by constant propagation are handled. The statement cannot
1315 be replaced with a new one. Return true if the statement was
1316 changed, false otherwise.
1317 The statement *GSI should be in valid gimple form but may
1318 be in unfolded state as resulting from for example constant propagation
1319 which can produce *&x = 0. */
1322 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
1324 gimple stmt
= gsi_stmt (*gsi
);
1325 bool changed
= fold_stmt_1 (gsi
, true);
1326 gcc_assert (gsi_stmt (*gsi
) == stmt
);
1330 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1331 if EXPR is null or we don't know how.
1332 If non-null, the result always has boolean type. */
1335 canonicalize_bool (tree expr
, bool invert
)
1341 if (integer_nonzerop (expr
))
1342 return boolean_false_node
;
1343 else if (integer_zerop (expr
))
1344 return boolean_true_node
;
1345 else if (TREE_CODE (expr
) == SSA_NAME
)
1346 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
1347 build_int_cst (TREE_TYPE (expr
), 0));
1348 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
1349 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
1351 TREE_OPERAND (expr
, 0),
1352 TREE_OPERAND (expr
, 1));
1358 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
1360 if (integer_nonzerop (expr
))
1361 return boolean_true_node
;
1362 else if (integer_zerop (expr
))
1363 return boolean_false_node
;
1364 else if (TREE_CODE (expr
) == SSA_NAME
)
1365 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
1366 build_int_cst (TREE_TYPE (expr
), 0));
1367 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
1368 return fold_build2 (TREE_CODE (expr
),
1370 TREE_OPERAND (expr
, 0),
1371 TREE_OPERAND (expr
, 1));
1377 /* Check to see if a boolean expression EXPR is logically equivalent to the
1378 comparison (OP1 CODE OP2). Check for various identities involving
1382 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
1383 const_tree op1
, const_tree op2
)
1387 /* The obvious case. */
1388 if (TREE_CODE (expr
) == code
1389 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
1390 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
1393 /* Check for comparing (name, name != 0) and the case where expr
1394 is an SSA_NAME with a definition matching the comparison. */
1395 if (TREE_CODE (expr
) == SSA_NAME
1396 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
1398 if (operand_equal_p (expr
, op1
, 0))
1399 return ((code
== NE_EXPR
&& integer_zerop (op2
))
1400 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
1401 s
= SSA_NAME_DEF_STMT (expr
);
1402 if (is_gimple_assign (s
)
1403 && gimple_assign_rhs_code (s
) == code
1404 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
1405 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
1409 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1410 of name is a comparison, recurse. */
1411 if (TREE_CODE (op1
) == SSA_NAME
1412 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
1414 s
= SSA_NAME_DEF_STMT (op1
);
1415 if (is_gimple_assign (s
)
1416 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
1418 enum tree_code c
= gimple_assign_rhs_code (s
);
1419 if ((c
== NE_EXPR
&& integer_zerop (op2
))
1420 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
1421 return same_bool_comparison_p (expr
, c
,
1422 gimple_assign_rhs1 (s
),
1423 gimple_assign_rhs2 (s
));
1424 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
1425 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
1426 return same_bool_comparison_p (expr
,
1427 invert_tree_comparison (c
, false),
1428 gimple_assign_rhs1 (s
),
1429 gimple_assign_rhs2 (s
));
1435 /* Check to see if two boolean expressions OP1 and OP2 are logically
1439 same_bool_result_p (const_tree op1
, const_tree op2
)
1441 /* Simple cases first. */
1442 if (operand_equal_p (op1
, op2
, 0))
1445 /* Check the cases where at least one of the operands is a comparison.
1446 These are a bit smarter than operand_equal_p in that they apply some
1447 identifies on SSA_NAMEs. */
1448 if (TREE_CODE_CLASS (TREE_CODE (op2
)) == tcc_comparison
1449 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
1450 TREE_OPERAND (op2
, 0),
1451 TREE_OPERAND (op2
, 1)))
1453 if (TREE_CODE_CLASS (TREE_CODE (op1
)) == tcc_comparison
1454 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
1455 TREE_OPERAND (op1
, 0),
1456 TREE_OPERAND (op1
, 1)))
1463 /* Forward declarations for some mutually recursive functions. */
1466 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1467 enum tree_code code2
, tree op2a
, tree op2b
);
1469 and_var_with_comparison (tree var
, bool invert
,
1470 enum tree_code code2
, tree op2a
, tree op2b
);
1472 and_var_with_comparison_1 (gimple stmt
,
1473 enum tree_code code2
, tree op2a
, tree op2b
);
1475 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1476 enum tree_code code2
, tree op2a
, tree op2b
);
1478 or_var_with_comparison (tree var
, bool invert
,
1479 enum tree_code code2
, tree op2a
, tree op2b
);
1481 or_var_with_comparison_1 (gimple stmt
,
1482 enum tree_code code2
, tree op2a
, tree op2b
);
1484 /* Helper function for and_comparisons_1: try to simplify the AND of the
1485 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1486 If INVERT is true, invert the value of the VAR before doing the AND.
1487 Return NULL_EXPR if we can't simplify this to a single expression. */
1490 and_var_with_comparison (tree var
, bool invert
,
1491 enum tree_code code2
, tree op2a
, tree op2b
)
1494 gimple stmt
= SSA_NAME_DEF_STMT (var
);
1496 /* We can only deal with variables whose definitions are assignments. */
1497 if (!is_gimple_assign (stmt
))
1500 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1501 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1502 Then we only have to consider the simpler non-inverted cases. */
1504 t
= or_var_with_comparison_1 (stmt
,
1505 invert_tree_comparison (code2
, false),
1508 t
= and_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
1509 return canonicalize_bool (t
, invert
);
1512 /* Try to simplify the AND of the ssa variable defined by the assignment
1513 STMT with the comparison specified by (OP2A CODE2 OP2B).
1514 Return NULL_EXPR if we can't simplify this to a single expression. */
1517 and_var_with_comparison_1 (gimple stmt
,
1518 enum tree_code code2
, tree op2a
, tree op2b
)
1520 tree var
= gimple_assign_lhs (stmt
);
1521 tree true_test_var
= NULL_TREE
;
1522 tree false_test_var
= NULL_TREE
;
1523 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
1525 /* Check for identities like (var AND (var == 0)) => false. */
1526 if (TREE_CODE (op2a
) == SSA_NAME
1527 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
1529 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
1530 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
1532 true_test_var
= op2a
;
1533 if (var
== true_test_var
)
1536 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
1537 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
1539 false_test_var
= op2a
;
1540 if (var
== false_test_var
)
1541 return boolean_false_node
;
1545 /* If the definition is a comparison, recurse on it. */
1546 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
1548 tree t
= and_comparisons_1 (innercode
,
1549 gimple_assign_rhs1 (stmt
),
1550 gimple_assign_rhs2 (stmt
),
1558 /* If the definition is an AND or OR expression, we may be able to
1559 simplify by reassociating. */
1560 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
1561 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
1563 tree inner1
= gimple_assign_rhs1 (stmt
);
1564 tree inner2
= gimple_assign_rhs2 (stmt
);
1567 tree partial
= NULL_TREE
;
1568 bool is_and
= (innercode
== BIT_AND_EXPR
);
1570 /* Check for boolean identities that don't require recursive examination
1572 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1573 inner1 AND (inner1 OR inner2) => inner1
1574 !inner1 AND (inner1 AND inner2) => false
1575 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1576 Likewise for similar cases involving inner2. */
1577 if (inner1
== true_test_var
)
1578 return (is_and
? var
: inner1
);
1579 else if (inner2
== true_test_var
)
1580 return (is_and
? var
: inner2
);
1581 else if (inner1
== false_test_var
)
1583 ? boolean_false_node
1584 : and_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
1585 else if (inner2
== false_test_var
)
1587 ? boolean_false_node
1588 : and_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
1590 /* Next, redistribute/reassociate the AND across the inner tests.
1591 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1592 if (TREE_CODE (inner1
) == SSA_NAME
1593 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
1594 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
1595 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
1596 gimple_assign_rhs1 (s
),
1597 gimple_assign_rhs2 (s
),
1598 code2
, op2a
, op2b
)))
1600 /* Handle the AND case, where we are reassociating:
1601 (inner1 AND inner2) AND (op2a code2 op2b)
1603 If the partial result t is a constant, we win. Otherwise
1604 continue on to try reassociating with the other inner test. */
1607 if (integer_onep (t
))
1609 else if (integer_zerop (t
))
1610 return boolean_false_node
;
1613 /* Handle the OR case, where we are redistributing:
1614 (inner1 OR inner2) AND (op2a code2 op2b)
1615 => (t OR (inner2 AND (op2a code2 op2b))) */
1616 else if (integer_onep (t
))
1617 return boolean_true_node
;
1619 /* Save partial result for later. */
1623 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1624 if (TREE_CODE (inner2
) == SSA_NAME
1625 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
1626 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
1627 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
1628 gimple_assign_rhs1 (s
),
1629 gimple_assign_rhs2 (s
),
1630 code2
, op2a
, op2b
)))
1632 /* Handle the AND case, where we are reassociating:
1633 (inner1 AND inner2) AND (op2a code2 op2b)
1634 => (inner1 AND t) */
1637 if (integer_onep (t
))
1639 else if (integer_zerop (t
))
1640 return boolean_false_node
;
1641 /* If both are the same, we can apply the identity
1643 else if (partial
&& same_bool_result_p (t
, partial
))
1647 /* Handle the OR case. where we are redistributing:
1648 (inner1 OR inner2) AND (op2a code2 op2b)
1649 => (t OR (inner1 AND (op2a code2 op2b)))
1650 => (t OR partial) */
1653 if (integer_onep (t
))
1654 return boolean_true_node
;
1657 /* We already got a simplification for the other
1658 operand to the redistributed OR expression. The
1659 interesting case is when at least one is false.
1660 Or, if both are the same, we can apply the identity
1662 if (integer_zerop (partial
))
1664 else if (integer_zerop (t
))
1666 else if (same_bool_result_p (t
, partial
))
1675 /* Try to simplify the AND of two comparisons defined by
1676 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1677 If this can be done without constructing an intermediate value,
1678 return the resulting tree; otherwise NULL_TREE is returned.
1679 This function is deliberately asymmetric as it recurses on SSA_DEFs
1680 in the first comparison but not the second. */
1683 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1684 enum tree_code code2
, tree op2a
, tree op2b
)
1686 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
1688 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1689 if (operand_equal_p (op1a
, op2a
, 0)
1690 && operand_equal_p (op1b
, op2b
, 0))
1692 /* Result will be either NULL_TREE, or a combined comparison. */
1693 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
1694 TRUTH_ANDIF_EXPR
, code1
, code2
,
1695 truth_type
, op1a
, op1b
);
1700 /* Likewise the swapped case of the above. */
1701 if (operand_equal_p (op1a
, op2b
, 0)
1702 && operand_equal_p (op1b
, op2a
, 0))
1704 /* Result will be either NULL_TREE, or a combined comparison. */
1705 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
1706 TRUTH_ANDIF_EXPR
, code1
,
1707 swap_tree_comparison (code2
),
1708 truth_type
, op1a
, op1b
);
1713 /* If both comparisons are of the same value against constants, we might
1714 be able to merge them. */
1715 if (operand_equal_p (op1a
, op2a
, 0)
1716 && TREE_CODE (op1b
) == INTEGER_CST
1717 && TREE_CODE (op2b
) == INTEGER_CST
)
1719 int cmp
= tree_int_cst_compare (op1b
, op2b
);
1721 /* If we have (op1a == op1b), we should either be able to
1722 return that or FALSE, depending on whether the constant op1b
1723 also satisfies the other comparison against op2b. */
1724 if (code1
== EQ_EXPR
)
1730 case EQ_EXPR
: val
= (cmp
== 0); break;
1731 case NE_EXPR
: val
= (cmp
!= 0); break;
1732 case LT_EXPR
: val
= (cmp
< 0); break;
1733 case GT_EXPR
: val
= (cmp
> 0); break;
1734 case LE_EXPR
: val
= (cmp
<= 0); break;
1735 case GE_EXPR
: val
= (cmp
>= 0); break;
1736 default: done
= false;
1741 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1743 return boolean_false_node
;
1746 /* Likewise if the second comparison is an == comparison. */
1747 else if (code2
== EQ_EXPR
)
1753 case EQ_EXPR
: val
= (cmp
== 0); break;
1754 case NE_EXPR
: val
= (cmp
!= 0); break;
1755 case LT_EXPR
: val
= (cmp
> 0); break;
1756 case GT_EXPR
: val
= (cmp
< 0); break;
1757 case LE_EXPR
: val
= (cmp
>= 0); break;
1758 case GE_EXPR
: val
= (cmp
<= 0); break;
1759 default: done
= false;
1764 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1766 return boolean_false_node
;
1770 /* Same business with inequality tests. */
1771 else if (code1
== NE_EXPR
)
1776 case EQ_EXPR
: val
= (cmp
!= 0); break;
1777 case NE_EXPR
: val
= (cmp
== 0); break;
1778 case LT_EXPR
: val
= (cmp
>= 0); break;
1779 case GT_EXPR
: val
= (cmp
<= 0); break;
1780 case LE_EXPR
: val
= (cmp
> 0); break;
1781 case GE_EXPR
: val
= (cmp
< 0); break;
1786 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1788 else if (code2
== NE_EXPR
)
1793 case EQ_EXPR
: val
= (cmp
== 0); break;
1794 case NE_EXPR
: val
= (cmp
!= 0); break;
1795 case LT_EXPR
: val
= (cmp
<= 0); break;
1796 case GT_EXPR
: val
= (cmp
>= 0); break;
1797 case LE_EXPR
: val
= (cmp
< 0); break;
1798 case GE_EXPR
: val
= (cmp
> 0); break;
1803 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1806 /* Chose the more restrictive of two < or <= comparisons. */
1807 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
1808 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
1810 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
1811 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1813 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1816 /* Likewise chose the more restrictive of two > or >= comparisons. */
1817 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
1818 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
1820 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
1821 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1823 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1826 /* Check for singleton ranges. */
1828 && ((code1
== LE_EXPR
&& code2
== GE_EXPR
)
1829 || (code1
== GE_EXPR
&& code2
== LE_EXPR
)))
1830 return fold_build2 (EQ_EXPR
, boolean_type_node
, op1a
, op2b
);
1832 /* Check for disjoint ranges. */
1834 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
1835 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
1836 return boolean_false_node
;
1838 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
1839 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
1840 return boolean_false_node
;
1843 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1844 NAME's definition is a truth value. See if there are any simplifications
1845 that can be done against the NAME's definition. */
1846 if (TREE_CODE (op1a
) == SSA_NAME
1847 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
1848 && (integer_zerop (op1b
) || integer_onep (op1b
)))
1850 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
1851 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
1852 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
1853 switch (gimple_code (stmt
))
1856 /* Try to simplify by copy-propagating the definition. */
1857 return and_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
1860 /* If every argument to the PHI produces the same result when
1861 ANDed with the second comparison, we win.
1862 Do not do this unless the type is bool since we need a bool
1863 result here anyway. */
1864 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
1866 tree result
= NULL_TREE
;
1868 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
1870 tree arg
= gimple_phi_arg_def (stmt
, i
);
1872 /* If this PHI has itself as an argument, ignore it.
1873 If all the other args produce the same result,
1875 if (arg
== gimple_phi_result (stmt
))
1877 else if (TREE_CODE (arg
) == INTEGER_CST
)
1879 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
1882 result
= boolean_false_node
;
1883 else if (!integer_zerop (result
))
1887 result
= fold_build2 (code2
, boolean_type_node
,
1889 else if (!same_bool_comparison_p (result
,
1893 else if (TREE_CODE (arg
) == SSA_NAME
1894 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
1897 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
1898 /* In simple cases we can look through PHI nodes,
1899 but we have to be careful with loops.
1901 if (! dom_info_available_p (CDI_DOMINATORS
)
1902 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
1903 || dominated_by_p (CDI_DOMINATORS
,
1904 gimple_bb (def_stmt
),
1907 temp
= and_var_with_comparison (arg
, invert
, code2
,
1913 else if (!same_bool_result_p (result
, temp
))
1929 /* Try to simplify the AND of two comparisons, specified by
1930 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1931 If this can be simplified to a single expression (without requiring
1932 introducing more SSA variables to hold intermediate values),
1933 return the resulting tree. Otherwise return NULL_TREE.
1934 If the result expression is non-null, it has boolean type. */
1937 maybe_fold_and_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
1938 enum tree_code code2
, tree op2a
, tree op2b
)
1940 tree t
= and_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
1944 return and_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
1947 /* Helper function for or_comparisons_1: try to simplify the OR of the
1948 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1949 If INVERT is true, invert the value of VAR before doing the OR.
1950 Return NULL_EXPR if we can't simplify this to a single expression. */
1953 or_var_with_comparison (tree var
, bool invert
,
1954 enum tree_code code2
, tree op2a
, tree op2b
)
1957 gimple stmt
= SSA_NAME_DEF_STMT (var
);
1959 /* We can only deal with variables whose definitions are assignments. */
1960 if (!is_gimple_assign (stmt
))
1963 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1964 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
1965 Then we only have to consider the simpler non-inverted cases. */
1967 t
= and_var_with_comparison_1 (stmt
,
1968 invert_tree_comparison (code2
, false),
1971 t
= or_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
1972 return canonicalize_bool (t
, invert
);
1975 /* Try to simplify the OR of the ssa variable defined by the assignment
1976 STMT with the comparison specified by (OP2A CODE2 OP2B).
1977 Return NULL_EXPR if we can't simplify this to a single expression. */
1980 or_var_with_comparison_1 (gimple stmt
,
1981 enum tree_code code2
, tree op2a
, tree op2b
)
1983 tree var
= gimple_assign_lhs (stmt
);
1984 tree true_test_var
= NULL_TREE
;
1985 tree false_test_var
= NULL_TREE
;
1986 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
1988 /* Check for identities like (var OR (var != 0)) => true . */
1989 if (TREE_CODE (op2a
) == SSA_NAME
1990 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
1992 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
1993 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
1995 true_test_var
= op2a
;
1996 if (var
== true_test_var
)
1999 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
2000 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
2002 false_test_var
= op2a
;
2003 if (var
== false_test_var
)
2004 return boolean_true_node
;
2008 /* If the definition is a comparison, recurse on it. */
2009 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
2011 tree t
= or_comparisons_1 (innercode
,
2012 gimple_assign_rhs1 (stmt
),
2013 gimple_assign_rhs2 (stmt
),
2021 /* If the definition is an AND or OR expression, we may be able to
2022 simplify by reassociating. */
2023 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
2024 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
2026 tree inner1
= gimple_assign_rhs1 (stmt
);
2027 tree inner2
= gimple_assign_rhs2 (stmt
);
2030 tree partial
= NULL_TREE
;
2031 bool is_or
= (innercode
== BIT_IOR_EXPR
);
2033 /* Check for boolean identities that don't require recursive examination
2035 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2036 inner1 OR (inner1 AND inner2) => inner1
2037 !inner1 OR (inner1 OR inner2) => true
2038 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2040 if (inner1
== true_test_var
)
2041 return (is_or
? var
: inner1
);
2042 else if (inner2
== true_test_var
)
2043 return (is_or
? var
: inner2
);
2044 else if (inner1
== false_test_var
)
2047 : or_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
2048 else if (inner2
== false_test_var
)
2051 : or_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
2053 /* Next, redistribute/reassociate the OR across the inner tests.
2054 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2055 if (TREE_CODE (inner1
) == SSA_NAME
2056 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
2057 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2058 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
2059 gimple_assign_rhs1 (s
),
2060 gimple_assign_rhs2 (s
),
2061 code2
, op2a
, op2b
)))
2063 /* Handle the OR case, where we are reassociating:
2064 (inner1 OR inner2) OR (op2a code2 op2b)
2066 If the partial result t is a constant, we win. Otherwise
2067 continue on to try reassociating with the other inner test. */
2070 if (integer_onep (t
))
2071 return boolean_true_node
;
2072 else if (integer_zerop (t
))
2076 /* Handle the AND case, where we are redistributing:
2077 (inner1 AND inner2) OR (op2a code2 op2b)
2078 => (t AND (inner2 OR (op2a code op2b))) */
2079 else if (integer_zerop (t
))
2080 return boolean_false_node
;
2082 /* Save partial result for later. */
2086 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2087 if (TREE_CODE (inner2
) == SSA_NAME
2088 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
2089 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2090 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
2091 gimple_assign_rhs1 (s
),
2092 gimple_assign_rhs2 (s
),
2093 code2
, op2a
, op2b
)))
2095 /* Handle the OR case, where we are reassociating:
2096 (inner1 OR inner2) OR (op2a code2 op2b)
2098 => (t OR partial) */
2101 if (integer_zerop (t
))
2103 else if (integer_onep (t
))
2104 return boolean_true_node
;
2105 /* If both are the same, we can apply the identity
2107 else if (partial
&& same_bool_result_p (t
, partial
))
2111 /* Handle the AND case, where we are redistributing:
2112 (inner1 AND inner2) OR (op2a code2 op2b)
2113 => (t AND (inner1 OR (op2a code2 op2b)))
2114 => (t AND partial) */
2117 if (integer_zerop (t
))
2118 return boolean_false_node
;
2121 /* We already got a simplification for the other
2122 operand to the redistributed AND expression. The
2123 interesting case is when at least one is true.
2124 Or, if both are the same, we can apply the identity
2126 if (integer_onep (partial
))
2128 else if (integer_onep (t
))
2130 else if (same_bool_result_p (t
, partial
))
2139 /* Try to simplify the OR of two comparisons defined by
2140 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2141 If this can be done without constructing an intermediate value,
2142 return the resulting tree; otherwise NULL_TREE is returned.
2143 This function is deliberately asymmetric as it recurses on SSA_DEFs
2144 in the first comparison but not the second. */
2147 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
2148 enum tree_code code2
, tree op2a
, tree op2b
)
2150 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
2152 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2153 if (operand_equal_p (op1a
, op2a
, 0)
2154 && operand_equal_p (op1b
, op2b
, 0))
2156 /* Result will be either NULL_TREE, or a combined comparison. */
2157 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2158 TRUTH_ORIF_EXPR
, code1
, code2
,
2159 truth_type
, op1a
, op1b
);
2164 /* Likewise the swapped case of the above. */
2165 if (operand_equal_p (op1a
, op2b
, 0)
2166 && operand_equal_p (op1b
, op2a
, 0))
2168 /* Result will be either NULL_TREE, or a combined comparison. */
2169 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2170 TRUTH_ORIF_EXPR
, code1
,
2171 swap_tree_comparison (code2
),
2172 truth_type
, op1a
, op1b
);
2177 /* If both comparisons are of the same value against constants, we might
2178 be able to merge them. */
2179 if (operand_equal_p (op1a
, op2a
, 0)
2180 && TREE_CODE (op1b
) == INTEGER_CST
2181 && TREE_CODE (op2b
) == INTEGER_CST
)
2183 int cmp
= tree_int_cst_compare (op1b
, op2b
);
2185 /* If we have (op1a != op1b), we should either be able to
2186 return that or TRUE, depending on whether the constant op1b
2187 also satisfies the other comparison against op2b. */
2188 if (code1
== NE_EXPR
)
2194 case EQ_EXPR
: val
= (cmp
== 0); break;
2195 case NE_EXPR
: val
= (cmp
!= 0); break;
2196 case LT_EXPR
: val
= (cmp
< 0); break;
2197 case GT_EXPR
: val
= (cmp
> 0); break;
2198 case LE_EXPR
: val
= (cmp
<= 0); break;
2199 case GE_EXPR
: val
= (cmp
>= 0); break;
2200 default: done
= false;
2205 return boolean_true_node
;
2207 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2210 /* Likewise if the second comparison is a != comparison. */
2211 else if (code2
== NE_EXPR
)
2217 case EQ_EXPR
: val
= (cmp
== 0); break;
2218 case NE_EXPR
: val
= (cmp
!= 0); break;
2219 case LT_EXPR
: val
= (cmp
> 0); break;
2220 case GT_EXPR
: val
= (cmp
< 0); break;
2221 case LE_EXPR
: val
= (cmp
>= 0); break;
2222 case GE_EXPR
: val
= (cmp
<= 0); break;
2223 default: done
= false;
2228 return boolean_true_node
;
2230 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2234 /* See if an equality test is redundant with the other comparison. */
2235 else if (code1
== EQ_EXPR
)
2240 case EQ_EXPR
: val
= (cmp
== 0); break;
2241 case NE_EXPR
: val
= (cmp
!= 0); break;
2242 case LT_EXPR
: val
= (cmp
< 0); break;
2243 case GT_EXPR
: val
= (cmp
> 0); break;
2244 case LE_EXPR
: val
= (cmp
<= 0); break;
2245 case GE_EXPR
: val
= (cmp
>= 0); break;
2250 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2252 else if (code2
== EQ_EXPR
)
2257 case EQ_EXPR
: val
= (cmp
== 0); break;
2258 case NE_EXPR
: val
= (cmp
!= 0); break;
2259 case LT_EXPR
: val
= (cmp
> 0); break;
2260 case GT_EXPR
: val
= (cmp
< 0); break;
2261 case LE_EXPR
: val
= (cmp
>= 0); break;
2262 case GE_EXPR
: val
= (cmp
<= 0); break;
2267 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2270 /* Chose the less restrictive of two < or <= comparisons. */
2271 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
2272 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2274 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
2275 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2277 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2280 /* Likewise chose the less restrictive of two > or >= comparisons. */
2281 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
2282 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2284 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
2285 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2287 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2290 /* Check for singleton ranges. */
2292 && ((code1
== LT_EXPR
&& code2
== GT_EXPR
)
2293 || (code1
== GT_EXPR
&& code2
== LT_EXPR
)))
2294 return fold_build2 (NE_EXPR
, boolean_type_node
, op1a
, op2b
);
2296 /* Check for less/greater pairs that don't restrict the range at all. */
2298 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
2299 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2300 return boolean_true_node
;
2302 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
2303 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2304 return boolean_true_node
;
2307 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2308 NAME's definition is a truth value. See if there are any simplifications
2309 that can be done against the NAME's definition. */
2310 if (TREE_CODE (op1a
) == SSA_NAME
2311 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
2312 && (integer_zerop (op1b
) || integer_onep (op1b
)))
2314 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
2315 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
2316 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
2317 switch (gimple_code (stmt
))
2320 /* Try to simplify by copy-propagating the definition. */
2321 return or_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
2324 /* If every argument to the PHI produces the same result when
2325 ORed with the second comparison, we win.
2326 Do not do this unless the type is bool since we need a bool
2327 result here anyway. */
2328 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
2330 tree result
= NULL_TREE
;
2332 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
2334 tree arg
= gimple_phi_arg_def (stmt
, i
);
2336 /* If this PHI has itself as an argument, ignore it.
2337 If all the other args produce the same result,
2339 if (arg
== gimple_phi_result (stmt
))
2341 else if (TREE_CODE (arg
) == INTEGER_CST
)
2343 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
2346 result
= boolean_true_node
;
2347 else if (!integer_onep (result
))
2351 result
= fold_build2 (code2
, boolean_type_node
,
2353 else if (!same_bool_comparison_p (result
,
2357 else if (TREE_CODE (arg
) == SSA_NAME
2358 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
2361 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
2362 /* In simple cases we can look through PHI nodes,
2363 but we have to be careful with loops.
2365 if (! dom_info_available_p (CDI_DOMINATORS
)
2366 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
2367 || dominated_by_p (CDI_DOMINATORS
,
2368 gimple_bb (def_stmt
),
2371 temp
= or_var_with_comparison (arg
, invert
, code2
,
2377 else if (!same_bool_result_p (result
, temp
))
2393 /* Try to simplify the OR of two comparisons, specified by
2394 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2395 If this can be simplified to a single expression (without requiring
2396 introducing more SSA variables to hold intermediate values),
2397 return the resulting tree. Otherwise return NULL_TREE.
2398 If the result expression is non-null, it has boolean type. */
2401 maybe_fold_or_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
2402 enum tree_code code2
, tree op2a
, tree op2b
)
2404 tree t
= or_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
2408 return or_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
2412 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2414 Either NULL_TREE, a simplified but non-constant or a constant
2417 ??? This should go into a gimple-fold-inline.h file to be eventually
2418 privatized with the single valueize function used in the various TUs
2419 to avoid the indirect function call overhead. */
2422 gimple_fold_stmt_to_constant_1 (gimple stmt
, tree (*valueize
) (tree
))
2424 location_t loc
= gimple_location (stmt
);
2425 switch (gimple_code (stmt
))
2429 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
2431 switch (get_gimple_rhs_class (subcode
))
2433 case GIMPLE_SINGLE_RHS
:
2435 tree rhs
= gimple_assign_rhs1 (stmt
);
2436 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
2438 if (TREE_CODE (rhs
) == SSA_NAME
)
2440 /* If the RHS is an SSA_NAME, return its known constant value,
2442 return (*valueize
) (rhs
);
2444 /* Handle propagating invariant addresses into address
2446 else if (TREE_CODE (rhs
) == ADDR_EXPR
2447 && !is_gimple_min_invariant (rhs
))
2449 HOST_WIDE_INT offset
= 0;
2451 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
2455 && (CONSTANT_CLASS_P (base
)
2456 || decl_address_invariant_p (base
)))
2457 return build_invariant_address (TREE_TYPE (rhs
),
2460 else if (TREE_CODE (rhs
) == CONSTRUCTOR
2461 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
2462 && (CONSTRUCTOR_NELTS (rhs
)
2463 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
2468 vec
= XALLOCAVEC (tree
,
2469 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
)));
2470 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
2472 val
= (*valueize
) (val
);
2473 if (TREE_CODE (val
) == INTEGER_CST
2474 || TREE_CODE (val
) == REAL_CST
2475 || TREE_CODE (val
) == FIXED_CST
)
2481 return build_vector (TREE_TYPE (rhs
), vec
);
2484 if (kind
== tcc_reference
)
2486 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
2487 || TREE_CODE (rhs
) == REALPART_EXPR
2488 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
2489 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2491 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2492 return fold_unary_loc (EXPR_LOCATION (rhs
),
2494 TREE_TYPE (rhs
), val
);
2496 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
2497 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2499 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2500 return fold_ternary_loc (EXPR_LOCATION (rhs
),
2502 TREE_TYPE (rhs
), val
,
2503 TREE_OPERAND (rhs
, 1),
2504 TREE_OPERAND (rhs
, 2));
2506 else if (TREE_CODE (rhs
) == MEM_REF
2507 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2509 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2510 if (TREE_CODE (val
) == ADDR_EXPR
2511 && is_gimple_min_invariant (val
))
2513 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
2515 TREE_OPERAND (rhs
, 1));
2520 return fold_const_aggregate_ref_1 (rhs
, valueize
);
2522 else if (kind
== tcc_declaration
)
2523 return get_symbol_constant_value (rhs
);
2527 case GIMPLE_UNARY_RHS
:
2529 /* Handle unary operators that can appear in GIMPLE form.
2530 Note that we know the single operand must be a constant,
2531 so this should almost always return a simplified RHS. */
2532 tree lhs
= gimple_assign_lhs (stmt
);
2533 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2535 /* Conversions are useless for CCP purposes if they are
2536 value-preserving. Thus the restrictions that
2537 useless_type_conversion_p places for restrict qualification
2538 of pointer types should not apply here.
2539 Substitution later will only substitute to allowed places. */
2540 if (CONVERT_EXPR_CODE_P (subcode
)
2541 && POINTER_TYPE_P (TREE_TYPE (lhs
))
2542 && POINTER_TYPE_P (TREE_TYPE (op0
))
2543 && TYPE_ADDR_SPACE (TREE_TYPE (lhs
))
2544 == TYPE_ADDR_SPACE (TREE_TYPE (op0
))
2545 && TYPE_MODE (TREE_TYPE (lhs
))
2546 == TYPE_MODE (TREE_TYPE (op0
)))
2550 fold_unary_ignore_overflow_loc (loc
, subcode
,
2551 gimple_expr_type (stmt
), op0
);
2554 case GIMPLE_BINARY_RHS
:
2556 /* Handle binary operators that can appear in GIMPLE form. */
2557 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2558 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
2560 /* Translate &x + CST into an invariant form suitable for
2561 further propagation. */
2562 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
2563 && TREE_CODE (op0
) == ADDR_EXPR
2564 && TREE_CODE (op1
) == INTEGER_CST
)
2566 tree off
= fold_convert (ptr_type_node
, op1
);
2567 return build_fold_addr_expr_loc
2569 fold_build2 (MEM_REF
,
2570 TREE_TYPE (TREE_TYPE (op0
)),
2571 unshare_expr (op0
), off
));
2574 return fold_binary_loc (loc
, subcode
,
2575 gimple_expr_type (stmt
), op0
, op1
);
2578 case GIMPLE_TERNARY_RHS
:
2580 /* Handle ternary operators that can appear in GIMPLE form. */
2581 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2582 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
2583 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
2585 /* Fold embedded expressions in ternary codes. */
2586 if ((subcode
== COND_EXPR
2587 || subcode
== VEC_COND_EXPR
)
2588 && COMPARISON_CLASS_P (op0
))
2590 tree op00
= (*valueize
) (TREE_OPERAND (op0
, 0));
2591 tree op01
= (*valueize
) (TREE_OPERAND (op0
, 1));
2592 tree tem
= fold_binary_loc (loc
, TREE_CODE (op0
),
2593 TREE_TYPE (op0
), op00
, op01
);
2598 return fold_ternary_loc (loc
, subcode
,
2599 gimple_expr_type (stmt
), op0
, op1
, op2
);
2611 if (gimple_call_internal_p (stmt
))
2612 /* No folding yet for these functions. */
2615 fn
= (*valueize
) (gimple_call_fn (stmt
));
2616 if (TREE_CODE (fn
) == ADDR_EXPR
2617 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
2618 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0)))
2620 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
2623 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
2624 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
2625 call
= build_call_array_loc (loc
,
2626 gimple_call_return_type (stmt
),
2627 fn
, gimple_call_num_args (stmt
), args
);
2628 retval
= fold_call_expr (EXPR_LOCATION (call
), call
, false);
2630 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2631 STRIP_NOPS (retval
);
2642 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2643 Returns NULL_TREE if folding to a constant is not possible, otherwise
2644 returns a constant according to is_gimple_min_invariant. */
2647 gimple_fold_stmt_to_constant (gimple stmt
, tree (*valueize
) (tree
))
2649 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
2650 if (res
&& is_gimple_min_invariant (res
))
2656 /* The following set of functions are supposed to fold references using
2657 their constant initializers. */
2659 static tree
fold_ctor_reference (tree type
, tree ctor
,
2660 unsigned HOST_WIDE_INT offset
,
2661 unsigned HOST_WIDE_INT size
, tree
);
2663 /* See if we can find constructor defining value of BASE.
2664 When we know the consructor with constant offset (such as
2665 base is array[40] and we do know constructor of array), then
2666 BIT_OFFSET is adjusted accordingly.
2668 As a special case, return error_mark_node when constructor
2669 is not explicitly available, but it is known to be zero
2670 such as 'static const int a;'. */
2672 get_base_constructor (tree base
, HOST_WIDE_INT
*bit_offset
,
2673 tree (*valueize
)(tree
))
2675 HOST_WIDE_INT bit_offset2
, size
, max_size
;
2676 if (TREE_CODE (base
) == MEM_REF
)
2678 if (!integer_zerop (TREE_OPERAND (base
, 1)))
2680 if (!host_integerp (TREE_OPERAND (base
, 1), 0))
2682 *bit_offset
+= (mem_ref_offset (base
).low
2687 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
2688 base
= valueize (TREE_OPERAND (base
, 0));
2689 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
2691 base
= TREE_OPERAND (base
, 0);
2694 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2695 DECL_INITIAL. If BASE is a nested reference into another
2696 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2697 the inner reference. */
2698 switch (TREE_CODE (base
))
2701 if (!const_value_known_p (base
))
2706 if (!DECL_INITIAL (base
)
2707 && (TREE_STATIC (base
) || DECL_EXTERNAL (base
)))
2708 return error_mark_node
;
2709 /* Do not return an error_mark_node DECL_INITIAL. LTO uses this
2710 as special marker (_not_ zero ...) for its own purposes. */
2711 if (DECL_INITIAL (base
) == error_mark_node
)
2713 return DECL_INITIAL (base
);
2717 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
);
2718 if (max_size
== -1 || size
!= max_size
)
2720 *bit_offset
+= bit_offset2
;
2721 return get_base_constructor (base
, bit_offset
, valueize
);
2732 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2733 to the memory at bit OFFSET.
2735 We do only simple job of folding byte accesses. */
2738 fold_string_cst_ctor_reference (tree type
, tree ctor
,
2739 unsigned HOST_WIDE_INT offset
,
2740 unsigned HOST_WIDE_INT size
)
2742 if (INTEGRAL_TYPE_P (type
)
2743 && (TYPE_MODE (type
)
2744 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
2745 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
2747 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
)))) == 1
2748 && size
== BITS_PER_UNIT
2749 && !(offset
% BITS_PER_UNIT
))
2751 offset
/= BITS_PER_UNIT
;
2752 if (offset
< (unsigned HOST_WIDE_INT
) TREE_STRING_LENGTH (ctor
))
2753 return build_int_cst_type (type
, (TREE_STRING_POINTER (ctor
)
2756 const char a[20]="hello";
2759 might lead to offset greater than string length. In this case we
2760 know value is either initialized to 0 or out of bounds. Return 0
2762 return build_zero_cst (type
);
2767 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2768 SIZE to the memory at bit OFFSET. */
2771 fold_array_ctor_reference (tree type
, tree ctor
,
2772 unsigned HOST_WIDE_INT offset
,
2773 unsigned HOST_WIDE_INT size
,
2776 unsigned HOST_WIDE_INT cnt
;
2778 double_int low_bound
, elt_size
;
2779 double_int index
, max_index
;
2780 double_int access_index
;
2781 tree domain_type
= NULL_TREE
, index_type
= NULL_TREE
;
2782 HOST_WIDE_INT inner_offset
;
2784 /* Compute low bound and elt size. */
2785 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
2786 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
2787 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
2789 /* Static constructors for variably sized objects makes no sense. */
2790 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
);
2791 index_type
= TREE_TYPE (TYPE_MIN_VALUE (domain_type
));
2792 low_bound
= tree_to_double_int (TYPE_MIN_VALUE (domain_type
));
2795 low_bound
= double_int_zero
;
2796 /* Static constructors for variably sized objects makes no sense. */
2797 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))))
2800 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
2803 /* We can handle only constantly sized accesses that are known to not
2804 be larger than size of array element. */
2805 if (!TYPE_SIZE_UNIT (type
)
2806 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
2807 || elt_size
.slt (tree_to_double_int (TYPE_SIZE_UNIT (type
))))
2810 /* Compute the array index we look for. */
2811 access_index
= double_int::from_uhwi (offset
/ BITS_PER_UNIT
)
2812 .udiv (elt_size
, TRUNC_DIV_EXPR
);
2813 access_index
+= low_bound
;
2815 access_index
= access_index
.ext (TYPE_PRECISION (index_type
),
2816 TYPE_UNSIGNED (index_type
));
2818 /* And offset within the access. */
2819 inner_offset
= offset
% (elt_size
.to_uhwi () * BITS_PER_UNIT
);
2821 /* See if the array field is large enough to span whole access. We do not
2822 care to fold accesses spanning multiple array indexes. */
2823 if (inner_offset
+ size
> elt_size
.to_uhwi () * BITS_PER_UNIT
)
2826 index
= low_bound
- double_int_one
;
2828 index
= index
.ext (TYPE_PRECISION (index_type
), TYPE_UNSIGNED (index_type
));
2830 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
2832 /* Array constructor might explicitely set index, or specify range
2833 or leave index NULL meaning that it is next index after previous
2837 if (TREE_CODE (cfield
) == INTEGER_CST
)
2838 max_index
= index
= tree_to_double_int (cfield
);
2841 gcc_assert (TREE_CODE (cfield
) == RANGE_EXPR
);
2842 index
= tree_to_double_int (TREE_OPERAND (cfield
, 0));
2843 max_index
= tree_to_double_int (TREE_OPERAND (cfield
, 1));
2848 index
+= double_int_one
;
2850 index
= index
.ext (TYPE_PRECISION (index_type
),
2851 TYPE_UNSIGNED (index_type
));
2855 /* Do we have match? */
2856 if (access_index
.cmp (index
, 1) >= 0
2857 && access_index
.cmp (max_index
, 1) <= 0)
2858 return fold_ctor_reference (type
, cval
, inner_offset
, size
,
2861 /* When memory is not explicitely mentioned in constructor,
2862 it is 0 (or out of range). */
2863 return build_zero_cst (type
);
2866 /* CTOR is CONSTRUCTOR of an aggregate or vector.
2867 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2870 fold_nonarray_ctor_reference (tree type
, tree ctor
,
2871 unsigned HOST_WIDE_INT offset
,
2872 unsigned HOST_WIDE_INT size
,
2875 unsigned HOST_WIDE_INT cnt
;
2878 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
2881 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
2882 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
2883 tree field_size
= DECL_SIZE (cfield
);
2884 double_int bitoffset
;
2885 double_int byte_offset_cst
= tree_to_double_int (byte_offset
);
2886 double_int bits_per_unit_cst
= double_int::from_uhwi (BITS_PER_UNIT
);
2887 double_int bitoffset_end
, access_end
;
2889 /* Variable sized objects in static constructors makes no sense,
2890 but field_size can be NULL for flexible array members. */
2891 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
2892 && TREE_CODE (byte_offset
) == INTEGER_CST
2893 && (field_size
!= NULL_TREE
2894 ? TREE_CODE (field_size
) == INTEGER_CST
2895 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
2897 /* Compute bit offset of the field. */
2898 bitoffset
= tree_to_double_int (field_offset
)
2899 + byte_offset_cst
* bits_per_unit_cst
;
2900 /* Compute bit offset where the field ends. */
2901 if (field_size
!= NULL_TREE
)
2902 bitoffset_end
= bitoffset
+ tree_to_double_int (field_size
);
2904 bitoffset_end
= double_int_zero
;
2906 access_end
= double_int::from_uhwi (offset
)
2907 + double_int::from_uhwi (size
);
2909 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
2910 [BITOFFSET, BITOFFSET_END)? */
2911 if (access_end
.cmp (bitoffset
, 0) > 0
2912 && (field_size
== NULL_TREE
2913 || double_int::from_uhwi (offset
).slt (bitoffset_end
)))
2915 double_int inner_offset
= double_int::from_uhwi (offset
) - bitoffset
;
2916 /* We do have overlap. Now see if field is large enough to
2917 cover the access. Give up for accesses spanning multiple
2919 if (access_end
.cmp (bitoffset_end
, 0) > 0)
2921 if (double_int::from_uhwi (offset
).slt (bitoffset
))
2923 return fold_ctor_reference (type
, cval
,
2924 inner_offset
.to_uhwi (), size
,
2928 /* When memory is not explicitely mentioned in constructor, it is 0. */
2929 return build_zero_cst (type
);
2932 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
2933 to the memory at bit OFFSET. */
2936 fold_ctor_reference (tree type
, tree ctor
, unsigned HOST_WIDE_INT offset
,
2937 unsigned HOST_WIDE_INT size
, tree from_decl
)
2941 /* We found the field with exact match. */
2942 if (useless_type_conversion_p (type
, TREE_TYPE (ctor
))
2944 return canonicalize_constructor_val (ctor
, from_decl
);
2946 /* We are at the end of walk, see if we can view convert the
2948 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
2949 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
2950 && operand_equal_p (TYPE_SIZE (type
),
2951 TYPE_SIZE (TREE_TYPE (ctor
)), 0))
2953 ret
= canonicalize_constructor_val (ctor
, from_decl
);
2954 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
2959 if (TREE_CODE (ctor
) == STRING_CST
)
2960 return fold_string_cst_ctor_reference (type
, ctor
, offset
, size
);
2961 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
2964 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
2965 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
2966 return fold_array_ctor_reference (type
, ctor
, offset
, size
,
2969 return fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
2976 /* Return the tree representing the element referenced by T if T is an
2977 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
2978 names using VALUEIZE. Return NULL_TREE otherwise. */
2981 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
2983 tree ctor
, idx
, base
;
2984 HOST_WIDE_INT offset
, size
, max_size
;
2987 if (TREE_THIS_VOLATILE (t
))
2990 if (TREE_CODE_CLASS (TREE_CODE (t
)) == tcc_declaration
)
2991 return get_symbol_constant_value (t
);
2993 tem
= fold_read_from_constant_string (t
);
2997 switch (TREE_CODE (t
))
3000 case ARRAY_RANGE_REF
:
3001 /* Constant indexes are handled well by get_base_constructor.
3002 Only special case variable offsets.
3003 FIXME: This code can't handle nested references with variable indexes
3004 (they will be handled only by iteration of ccp). Perhaps we can bring
3005 get_ref_base_and_extent here and make it use a valueize callback. */
3006 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
3008 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
3009 && TREE_CODE (idx
) == INTEGER_CST
)
3011 tree low_bound
, unit_size
;
3014 /* If the resulting bit-offset is constant, track it. */
3015 if ((low_bound
= array_ref_low_bound (t
),
3016 TREE_CODE (low_bound
) == INTEGER_CST
)
3017 && (unit_size
= array_ref_element_size (t
),
3018 host_integerp (unit_size
, 1))
3019 && (doffset
= (TREE_INT_CST (idx
) - TREE_INT_CST (low_bound
))
3020 .sext (TYPE_PRECISION (TREE_TYPE (idx
))),
3021 doffset
.fits_shwi ()))
3023 offset
= doffset
.to_shwi ();
3024 offset
*= TREE_INT_CST_LOW (unit_size
);
3025 offset
*= BITS_PER_UNIT
;
3027 base
= TREE_OPERAND (t
, 0);
3028 ctor
= get_base_constructor (base
, &offset
, valueize
);
3029 /* Empty constructor. Always fold to 0. */
3030 if (ctor
== error_mark_node
)
3031 return build_zero_cst (TREE_TYPE (t
));
3032 /* Out of bound array access. Value is undefined,
3036 /* We can not determine ctor. */
3039 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
3040 TREE_INT_CST_LOW (unit_size
)
3049 case TARGET_MEM_REF
:
3051 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
);
3052 ctor
= get_base_constructor (base
, &offset
, valueize
);
3054 /* Empty constructor. Always fold to 0. */
3055 if (ctor
== error_mark_node
)
3056 return build_zero_cst (TREE_TYPE (t
));
3057 /* We do not know precise address. */
3058 if (max_size
== -1 || max_size
!= size
)
3060 /* We can not determine ctor. */
3064 /* Out of bound array access. Value is undefined, but don't fold. */
3068 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
,
3074 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
3075 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
3076 return fold_build1_loc (EXPR_LOCATION (t
),
3077 TREE_CODE (t
), TREE_TYPE (t
), c
);
3089 fold_const_aggregate_ref (tree t
)
3091 return fold_const_aggregate_ref_1 (t
, NULL
);
3094 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3095 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3096 KNOWN_BINFO carries the binfo describing the true type of
3097 OBJ_TYPE_REF_OBJECT(REF). */
3100 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
)
3102 unsigned HOST_WIDE_INT offset
, size
;
3105 vtable
= v
= BINFO_VTABLE (known_binfo
);
3106 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3110 if (TREE_CODE (v
) == POINTER_PLUS_EXPR
)
3112 offset
= tree_low_cst (TREE_OPERAND (v
, 1), 1) * BITS_PER_UNIT
;
3113 v
= TREE_OPERAND (v
, 0);
3118 if (TREE_CODE (v
) != ADDR_EXPR
)
3120 v
= TREE_OPERAND (v
, 0);
3122 if (TREE_CODE (v
) != VAR_DECL
3123 || !DECL_VIRTUAL_P (v
)
3124 || !DECL_INITIAL (v
)
3125 || DECL_INITIAL (v
) == error_mark_node
)
3127 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
3128 size
= tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))), 1);
3129 offset
+= token
* size
;
3130 fn
= fold_ctor_reference (TREE_TYPE (TREE_TYPE (v
)), DECL_INITIAL (v
),
3131 offset
, size
, vtable
);
3132 if (!fn
|| integer_zerop (fn
))
3134 gcc_assert (TREE_CODE (fn
) == ADDR_EXPR
3135 || TREE_CODE (fn
) == FDESC_EXPR
);
3136 fn
= TREE_OPERAND (fn
, 0);
3137 gcc_assert (TREE_CODE (fn
) == FUNCTION_DECL
);
3139 /* When cgraph node is missing and function is not public, we cannot
3140 devirtualize. This can happen in WHOPR when the actual method
3141 ends up in other partition, because we found devirtualization
3142 possibility too late. */
3143 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
3146 /* Make sure we create a cgraph node for functions we'll reference.
3147 They can be non-existent if the reference comes from an entry
3148 of an external vtable for example. */
3149 cgraph_get_create_node (fn
);
3154 /* Return true iff VAL is a gimple expression that is known to be
3155 non-negative. Restricted to floating-point inputs. */
3158 gimple_val_nonnegative_real_p (tree val
)
3162 gcc_assert (val
&& SCALAR_FLOAT_TYPE_P (TREE_TYPE (val
)));
3164 /* Use existing logic for non-gimple trees. */
3165 if (tree_expr_nonnegative_p (val
))
3168 if (TREE_CODE (val
) != SSA_NAME
)
3171 /* Currently we look only at the immediately defining statement
3172 to make this determination, since recursion on defining
3173 statements of operands can lead to quadratic behavior in the
3174 worst case. This is expected to catch almost all occurrences
3175 in practice. It would be possible to implement limited-depth
3176 recursion if important cases are lost. Alternatively, passes
3177 that need this information (such as the pow/powi lowering code
3178 in the cse_sincos pass) could be revised to provide it through
3179 dataflow propagation. */
3181 def_stmt
= SSA_NAME_DEF_STMT (val
);
3183 if (is_gimple_assign (def_stmt
))
3187 /* See fold-const.c:tree_expr_nonnegative_p for additional
3188 cases that could be handled with recursion. */
3190 switch (gimple_assign_rhs_code (def_stmt
))
3193 /* Always true for floating-point operands. */
3197 /* True if the two operands are identical (since we are
3198 restricted to floating-point inputs). */
3199 op0
= gimple_assign_rhs1 (def_stmt
);
3200 op1
= gimple_assign_rhs2 (def_stmt
);
3203 || operand_equal_p (op0
, op1
, 0))
3210 else if (is_gimple_call (def_stmt
))
3212 tree fndecl
= gimple_call_fndecl (def_stmt
);
3214 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
3218 switch (DECL_FUNCTION_CODE (fndecl
))
3220 CASE_FLT_FN (BUILT_IN_ACOS
):
3221 CASE_FLT_FN (BUILT_IN_ACOSH
):
3222 CASE_FLT_FN (BUILT_IN_CABS
):
3223 CASE_FLT_FN (BUILT_IN_COSH
):
3224 CASE_FLT_FN (BUILT_IN_ERFC
):
3225 CASE_FLT_FN (BUILT_IN_EXP
):
3226 CASE_FLT_FN (BUILT_IN_EXP10
):
3227 CASE_FLT_FN (BUILT_IN_EXP2
):
3228 CASE_FLT_FN (BUILT_IN_FABS
):
3229 CASE_FLT_FN (BUILT_IN_FDIM
):
3230 CASE_FLT_FN (BUILT_IN_HYPOT
):
3231 CASE_FLT_FN (BUILT_IN_POW10
):
3234 CASE_FLT_FN (BUILT_IN_SQRT
):
3235 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3236 nonnegative inputs. */
3237 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val
))))
3242 CASE_FLT_FN (BUILT_IN_POWI
):
3243 /* True if the second argument is an even integer. */
3244 arg1
= gimple_call_arg (def_stmt
, 1);
3246 if (TREE_CODE (arg1
) == INTEGER_CST
3247 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
3252 CASE_FLT_FN (BUILT_IN_POW
):
3253 /* True if the second argument is an even integer-valued
3255 arg1
= gimple_call_arg (def_stmt
, 1);
3257 if (TREE_CODE (arg1
) == REAL_CST
)
3262 c
= TREE_REAL_CST (arg1
);
3263 n
= real_to_integer (&c
);
3267 REAL_VALUE_TYPE cint
;
3268 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
3269 if (real_identical (&c
, &cint
))