1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2014 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
26 #include "stringpool.h"
29 #include "stor-layout.h"
34 #include "basic-block.h"
35 #include "tree-ssa-alias.h"
36 #include "internal-fn.h"
37 #include "gimple-fold.h"
38 #include "gimple-expr.h"
42 #include "gimple-iterator.h"
43 #include "gimple-ssa.h"
44 #include "tree-ssanames.h"
45 #include "tree-into-ssa.h"
48 #include "tree-ssa-propagate.h"
50 #include "ipa-utils.h"
51 #include "gimple-pretty-print.h"
52 #include "tree-ssa-address.h"
53 #include "langhooks.h"
54 #include "gimplify-me.h"
56 /* Return true when DECL can be referenced from current unit.
57 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
58 We can get declarations that are not possible to reference for various
61 1) When analyzing C++ virtual tables.
62 C++ virtual tables do have known constructors even
63 when they are keyed to other compilation unit.
64 Those tables can contain pointers to methods and vars
65 in other units. Those methods have both STATIC and EXTERNAL
67 2) In WHOPR mode devirtualization might lead to reference
68 to method that was partitioned elsehwere.
69 In this case we have static VAR_DECL or FUNCTION_DECL
70 that has no corresponding callgraph/varpool node
72 3) COMDAT functions referred by external vtables that
73 we devirtualize only during final compilation stage.
74 At this time we already decided that we will not output
75 the function body and thus we can't reference the symbol
79 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
82 struct cgraph_node
*node
;
85 if (DECL_ABSTRACT (decl
))
88 /* We are concerned only about static/external vars and functions. */
89 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
90 || (TREE_CODE (decl
) != VAR_DECL
&& TREE_CODE (decl
) != FUNCTION_DECL
))
93 /* Static objects can be referred only if they was not optimized out yet. */
94 if (!TREE_PUBLIC (decl
) && !DECL_EXTERNAL (decl
))
96 snode
= symtab_get_node (decl
);
99 node
= dyn_cast
<cgraph_node
*> (snode
);
100 return !node
|| !node
->global
.inlined_to
;
103 /* We will later output the initializer, so we can refer to it.
104 So we are concerned only when DECL comes from initializer of
107 || TREE_CODE (from_decl
) != VAR_DECL
108 || !DECL_EXTERNAL (from_decl
)
110 && symtab_get_node (from_decl
)->in_other_partition
))
112 /* We are folding reference from external vtable. The vtable may reffer
113 to a symbol keyed to other compilation unit. The other compilation
114 unit may be in separate DSO and the symbol may be hidden. */
115 if (DECL_VISIBILITY_SPECIFIED (decl
)
116 && DECL_EXTERNAL (decl
)
117 && DECL_VISIBILITY (decl
) != VISIBILITY_DEFAULT
118 && (!(snode
= symtab_get_node (decl
)) || !snode
->in_other_partition
))
120 /* When function is public, we always can introduce new reference.
121 Exception are the COMDAT functions where introducing a direct
122 reference imply need to include function body in the curren tunit. */
123 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
125 /* We are not at ltrans stage; so don't worry about WHOPR.
126 Also when still gimplifying all referred comdat functions will be
129 As observed in PR20991 for already optimized out comdat virtual functions
130 it may be tempting to not necessarily give up because the copy will be
131 output elsewhere when corresponding vtable is output.
132 This is however not possible - ABI specify that COMDATs are output in
133 units where they are used and when the other unit was compiled with LTO
134 it is possible that vtable was kept public while the function itself
136 if (!flag_ltrans
&& (!DECL_COMDAT (decl
) || !cgraph_function_flags_ready
))
139 /* OK we are seeing either COMDAT or static variable. In this case we must
140 check that the definition is still around so we can refer it. */
141 if (TREE_CODE (decl
) == FUNCTION_DECL
)
143 node
= cgraph_get_node (decl
);
144 /* Check that we still have function body and that we didn't took
145 the decision to eliminate offline copy of the function yet.
146 The second is important when devirtualization happens during final
147 compilation stage when making a new reference no longer makes callee
149 if (!node
|| !node
->definition
|| node
->global
.inlined_to
)
151 gcc_checking_assert (!TREE_ASM_WRITTEN (decl
));
155 else if (TREE_CODE (decl
) == VAR_DECL
)
157 vnode
= varpool_get_node (decl
);
158 if (!vnode
|| !vnode
->definition
)
160 gcc_checking_assert (!TREE_ASM_WRITTEN (decl
));
167 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
168 acceptable form for is_gimple_min_invariant.
169 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
172 canonicalize_constructor_val (tree cval
, tree from_decl
)
174 tree orig_cval
= cval
;
176 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
177 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
179 tree ptr
= TREE_OPERAND (cval
, 0);
180 if (is_gimple_min_invariant (ptr
))
181 cval
= build1_loc (EXPR_LOCATION (cval
),
182 ADDR_EXPR
, TREE_TYPE (ptr
),
183 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
185 fold_convert (ptr_type_node
,
186 TREE_OPERAND (cval
, 1))));
188 if (TREE_CODE (cval
) == ADDR_EXPR
)
190 tree base
= NULL_TREE
;
191 if (TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
193 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
195 TREE_OPERAND (cval
, 0) = base
;
198 base
= get_base_address (TREE_OPERAND (cval
, 0));
202 if ((TREE_CODE (base
) == VAR_DECL
203 || TREE_CODE (base
) == FUNCTION_DECL
)
204 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
206 if (TREE_CODE (base
) == VAR_DECL
)
207 TREE_ADDRESSABLE (base
) = 1;
208 else if (TREE_CODE (base
) == FUNCTION_DECL
)
210 /* Make sure we create a cgraph node for functions we'll reference.
211 They can be non-existent if the reference comes from an entry
212 of an external vtable for example. */
213 cgraph_get_create_node (base
);
215 /* Fixup types in global initializers. */
216 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
217 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
219 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
220 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
223 if (TREE_OVERFLOW_P (cval
))
224 return drop_tree_overflow (cval
);
228 /* If SYM is a constant variable with known value, return the value.
229 NULL_TREE is returned otherwise. */
232 get_symbol_constant_value (tree sym
)
234 tree val
= ctor_for_folding (sym
);
235 if (val
!= error_mark_node
)
239 val
= canonicalize_constructor_val (unshare_expr (val
), sym
);
240 if (val
&& is_gimple_min_invariant (val
))
245 /* Variables declared 'const' without an initializer
246 have zero as the initializer if they may not be
247 overridden at link or run time. */
249 && (INTEGRAL_TYPE_P (TREE_TYPE (sym
))
250 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym
))))
251 return build_zero_cst (TREE_TYPE (sym
));
259 /* Subroutine of fold_stmt. We perform several simplifications of the
260 memory reference tree EXPR and make sure to re-gimplify them properly
261 after propagation of constant addresses. IS_LHS is true if the
262 reference is supposed to be an lvalue. */
265 maybe_fold_reference (tree expr
, bool is_lhs
)
270 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
271 || TREE_CODE (expr
) == REALPART_EXPR
272 || TREE_CODE (expr
) == IMAGPART_EXPR
)
273 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
274 return fold_unary_loc (EXPR_LOCATION (expr
),
277 TREE_OPERAND (expr
, 0));
278 else if (TREE_CODE (expr
) == BIT_FIELD_REF
279 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
280 return fold_ternary_loc (EXPR_LOCATION (expr
),
283 TREE_OPERAND (expr
, 0),
284 TREE_OPERAND (expr
, 1),
285 TREE_OPERAND (expr
, 2));
287 while (handled_component_p (*t
))
288 t
= &TREE_OPERAND (*t
, 0);
290 /* Canonicalize MEM_REFs invariant address operand. Do this first
291 to avoid feeding non-canonical MEM_REFs elsewhere. */
292 if (TREE_CODE (*t
) == MEM_REF
293 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)))
295 bool volatile_p
= TREE_THIS_VOLATILE (*t
);
296 tree tem
= fold_binary (MEM_REF
, TREE_TYPE (*t
),
297 TREE_OPERAND (*t
, 0),
298 TREE_OPERAND (*t
, 1));
301 TREE_THIS_VOLATILE (tem
) = volatile_p
;
303 tem
= maybe_fold_reference (expr
, is_lhs
);
311 && (result
= fold_const_aggregate_ref (expr
))
312 && is_gimple_min_invariant (result
))
315 /* Fold back MEM_REFs to reference trees. */
316 if (TREE_CODE (*t
) == MEM_REF
317 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
318 && integer_zerop (TREE_OPERAND (*t
, 1))
319 && (TREE_THIS_VOLATILE (*t
)
320 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0)))
321 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t
, 1)))
322 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t
))
323 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t
, 1)))))
324 /* We have to look out here to not drop a required conversion
325 from the rhs to the lhs if is_lhs, but we don't have the
326 rhs here to verify that. Thus require strict type
328 && types_compatible_p (TREE_TYPE (*t
),
329 TREE_TYPE (TREE_OPERAND
330 (TREE_OPERAND (*t
, 0), 0))))
333 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
334 tem
= maybe_fold_reference (expr
, is_lhs
);
339 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
341 tree tem
= maybe_fold_tmr (*t
);
345 tem
= maybe_fold_reference (expr
, is_lhs
);
356 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
357 replacement rhs for the statement or NULL_TREE if no simplification
358 could be made. It is assumed that the operands have been previously
362 fold_gimple_assign (gimple_stmt_iterator
*si
)
364 gimple stmt
= gsi_stmt (*si
);
365 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
366 location_t loc
= gimple_location (stmt
);
368 tree result
= NULL_TREE
;
370 switch (get_gimple_rhs_class (subcode
))
372 case GIMPLE_SINGLE_RHS
:
374 tree rhs
= gimple_assign_rhs1 (stmt
);
376 if (REFERENCE_CLASS_P (rhs
))
377 return maybe_fold_reference (rhs
, false);
379 else if (TREE_CODE (rhs
) == OBJ_TYPE_REF
)
381 tree val
= OBJ_TYPE_REF_EXPR (rhs
);
382 if (is_gimple_min_invariant (val
))
384 else if (flag_devirtualize
&& virtual_method_call_p (val
))
387 vec
<cgraph_node
*>targets
388 = possible_polymorphic_call_targets (val
, &final
);
389 if (final
&& targets
.length () <= 1)
392 if (targets
.length () == 1)
393 fndecl
= targets
[0]->decl
;
395 fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
396 val
= fold_convert (TREE_TYPE (val
), fndecl
);
397 STRIP_USELESS_TYPE_CONVERSION (val
);
403 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
405 tree ref
= TREE_OPERAND (rhs
, 0);
406 tree tem
= maybe_fold_reference (ref
, true);
408 && TREE_CODE (tem
) == MEM_REF
409 && integer_zerop (TREE_OPERAND (tem
, 1)))
410 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
412 result
= fold_convert (TREE_TYPE (rhs
),
413 build_fold_addr_expr_loc (loc
, tem
));
414 else if (TREE_CODE (ref
) == MEM_REF
415 && integer_zerop (TREE_OPERAND (ref
, 1)))
416 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
419 else if (TREE_CODE (rhs
) == CONSTRUCTOR
420 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
421 && (CONSTRUCTOR_NELTS (rhs
)
422 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
424 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
428 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
429 if (TREE_CODE (val
) != INTEGER_CST
430 && TREE_CODE (val
) != REAL_CST
431 && TREE_CODE (val
) != FIXED_CST
)
434 return build_vector_from_ctor (TREE_TYPE (rhs
),
435 CONSTRUCTOR_ELTS (rhs
));
438 else if (DECL_P (rhs
))
439 return get_symbol_constant_value (rhs
);
441 /* If we couldn't fold the RHS, hand over to the generic
443 if (result
== NULL_TREE
)
446 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
447 that may have been added by fold, and "useless" type
448 conversions that might now be apparent due to propagation. */
449 STRIP_USELESS_TYPE_CONVERSION (result
);
451 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
458 case GIMPLE_UNARY_RHS
:
460 tree rhs
= gimple_assign_rhs1 (stmt
);
462 result
= fold_unary_loc (loc
, subcode
, gimple_expr_type (stmt
), rhs
);
465 /* If the operation was a conversion do _not_ mark a
466 resulting constant with TREE_OVERFLOW if the original
467 constant was not. These conversions have implementation
468 defined behavior and retaining the TREE_OVERFLOW flag
469 here would confuse later passes such as VRP. */
470 if (CONVERT_EXPR_CODE_P (subcode
)
471 && TREE_CODE (result
) == INTEGER_CST
472 && TREE_CODE (rhs
) == INTEGER_CST
)
473 TREE_OVERFLOW (result
) = TREE_OVERFLOW (rhs
);
475 STRIP_USELESS_TYPE_CONVERSION (result
);
476 if (valid_gimple_rhs_p (result
))
482 case GIMPLE_BINARY_RHS
:
483 /* Try to canonicalize for boolean-typed X the comparisons
484 X == 0, X == 1, X != 0, and X != 1. */
485 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
486 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
488 tree lhs
= gimple_assign_lhs (stmt
);
489 tree op1
= gimple_assign_rhs1 (stmt
);
490 tree op2
= gimple_assign_rhs2 (stmt
);
491 tree type
= TREE_TYPE (op1
);
493 /* Check whether the comparison operands are of the same boolean
494 type as the result type is.
495 Check that second operand is an integer-constant with value
497 if (TREE_CODE (op2
) == INTEGER_CST
498 && (integer_zerop (op2
) || integer_onep (op2
))
499 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
501 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
502 bool is_logical_not
= false;
504 /* X == 0 and X != 1 is a logical-not.of X
505 X == 1 and X != 0 is X */
506 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
507 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
508 is_logical_not
= true;
510 if (is_logical_not
== false)
512 /* Only for one-bit precision typed X the transformation
513 !X -> ~X is valied. */
514 else if (TYPE_PRECISION (type
) == 1)
515 result
= build1_loc (gimple_location (stmt
), BIT_NOT_EXPR
,
517 /* Otherwise we use !X -> X ^ 1. */
519 result
= build2_loc (gimple_location (stmt
), BIT_XOR_EXPR
,
520 type
, op1
, build_int_cst (type
, 1));
526 result
= fold_binary_loc (loc
, subcode
,
527 TREE_TYPE (gimple_assign_lhs (stmt
)),
528 gimple_assign_rhs1 (stmt
),
529 gimple_assign_rhs2 (stmt
));
533 STRIP_USELESS_TYPE_CONVERSION (result
);
534 if (valid_gimple_rhs_p (result
))
539 case GIMPLE_TERNARY_RHS
:
540 /* Try to fold a conditional expression. */
541 if (gimple_assign_rhs_code (stmt
) == COND_EXPR
)
543 tree op0
= gimple_assign_rhs1 (stmt
);
546 location_t cond_loc
= gimple_location (stmt
);
548 if (COMPARISON_CLASS_P (op0
))
550 fold_defer_overflow_warnings ();
551 tem
= fold_binary_loc (cond_loc
,
552 TREE_CODE (op0
), TREE_TYPE (op0
),
553 TREE_OPERAND (op0
, 0),
554 TREE_OPERAND (op0
, 1));
555 /* This is actually a conditional expression, not a GIMPLE
556 conditional statement, however, the valid_gimple_rhs_p
557 test still applies. */
558 set
= (tem
&& is_gimple_condexpr (tem
)
559 && valid_gimple_rhs_p (tem
));
560 fold_undefer_overflow_warnings (set
, stmt
, 0);
562 else if (is_gimple_min_invariant (op0
))
571 result
= fold_build3_loc (cond_loc
, COND_EXPR
,
572 TREE_TYPE (gimple_assign_lhs (stmt
)), tem
,
573 gimple_assign_rhs2 (stmt
),
574 gimple_assign_rhs3 (stmt
));
578 result
= fold_ternary_loc (loc
, subcode
,
579 TREE_TYPE (gimple_assign_lhs (stmt
)),
580 gimple_assign_rhs1 (stmt
),
581 gimple_assign_rhs2 (stmt
),
582 gimple_assign_rhs3 (stmt
));
586 STRIP_USELESS_TYPE_CONVERSION (result
);
587 if (valid_gimple_rhs_p (result
))
592 case GIMPLE_INVALID_RHS
:
599 /* Attempt to fold a conditional statement. Return true if any changes were
600 made. We only attempt to fold the condition expression, and do not perform
601 any transformation that would require alteration of the cfg. It is
602 assumed that the operands have been previously folded. */
605 fold_gimple_cond (gimple stmt
)
607 tree result
= fold_binary_loc (gimple_location (stmt
),
608 gimple_cond_code (stmt
),
610 gimple_cond_lhs (stmt
),
611 gimple_cond_rhs (stmt
));
615 STRIP_USELESS_TYPE_CONVERSION (result
);
616 if (is_gimple_condexpr (result
) && valid_gimple_rhs_p (result
))
618 gimple_cond_set_condition_from_tree (stmt
, result
);
626 /* Convert EXPR into a GIMPLE value suitable for substitution on the
627 RHS of an assignment. Insert the necessary statements before
628 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
629 is replaced. If the call is expected to produces a result, then it
630 is replaced by an assignment of the new RHS to the result variable.
631 If the result is to be ignored, then the call is replaced by a
632 GIMPLE_NOP. A proper VDEF chain is retained by making the first
633 VUSE and the last VDEF of the whole sequence be the same as the replaced
634 statement and using new SSA names for stores in between. */
637 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
640 gimple stmt
, new_stmt
;
641 gimple_stmt_iterator i
;
642 gimple_seq stmts
= NULL
;
646 stmt
= gsi_stmt (*si_p
);
648 gcc_assert (is_gimple_call (stmt
));
650 push_gimplify_context (gimple_in_ssa_p (cfun
));
652 lhs
= gimple_call_lhs (stmt
);
653 if (lhs
== NULL_TREE
)
655 gimplify_and_add (expr
, &stmts
);
656 /* We can end up with folding a memcpy of an empty class assignment
657 which gets optimized away by C++ gimplification. */
658 if (gimple_seq_empty_p (stmts
))
660 pop_gimplify_context (NULL
);
661 if (gimple_in_ssa_p (cfun
))
663 unlink_stmt_vdef (stmt
);
666 gsi_replace (si_p
, gimple_build_nop (), true);
672 tree tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
673 new_stmt
= gimple_build_assign (lhs
, tmp
);
674 i
= gsi_last (stmts
);
675 gsi_insert_after_without_update (&i
, new_stmt
,
676 GSI_CONTINUE_LINKING
);
679 pop_gimplify_context (NULL
);
681 if (gimple_has_location (stmt
))
682 annotate_all_with_location (stmts
, gimple_location (stmt
));
684 /* First iterate over the replacement statements backward, assigning
685 virtual operands to their defining statements. */
687 for (i
= gsi_last (stmts
); !gsi_end_p (i
); gsi_prev (&i
))
689 new_stmt
= gsi_stmt (i
);
690 if ((gimple_assign_single_p (new_stmt
)
691 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
692 || (is_gimple_call (new_stmt
)
693 && (gimple_call_flags (new_stmt
)
694 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
698 vdef
= gimple_vdef (stmt
);
700 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
701 gimple_set_vdef (new_stmt
, vdef
);
702 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
703 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
704 laststore
= new_stmt
;
708 /* Second iterate over the statements forward, assigning virtual
709 operands to their uses. */
710 reaching_vuse
= gimple_vuse (stmt
);
711 for (i
= gsi_start (stmts
); !gsi_end_p (i
); gsi_next (&i
))
713 new_stmt
= gsi_stmt (i
);
714 /* If the new statement possibly has a VUSE, update it with exact SSA
715 name we know will reach this one. */
716 if (gimple_has_mem_ops (new_stmt
))
717 gimple_set_vuse (new_stmt
, reaching_vuse
);
718 gimple_set_modified (new_stmt
, true);
719 if (gimple_vdef (new_stmt
))
720 reaching_vuse
= gimple_vdef (new_stmt
);
723 /* If the new sequence does not do a store release the virtual
724 definition of the original statement. */
726 && reaching_vuse
== gimple_vuse (stmt
))
728 tree vdef
= gimple_vdef (stmt
);
730 && TREE_CODE (vdef
) == SSA_NAME
)
732 unlink_stmt_vdef (stmt
);
733 release_ssa_name (vdef
);
737 /* Finally replace the original statement with the sequence. */
738 gsi_replace_with_seq (si_p
, stmts
, false);
741 /* Return the string length, maximum string length or maximum value of
743 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
744 is not NULL and, for TYPE == 0, its value is not equal to the length
745 we determine or if we are unable to determine the length or value,
746 return false. VISITED is a bitmap of visited variables.
747 TYPE is 0 if string length should be returned, 1 for maximum string
748 length and 2 for maximum value ARG can have. */
751 get_maxval_strlen (tree arg
, tree
*length
, bitmap visited
, int type
)
756 if (TREE_CODE (arg
) != SSA_NAME
)
758 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
759 if (TREE_CODE (arg
) == ADDR_EXPR
760 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
761 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
763 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
764 if (TREE_CODE (aop0
) == INDIRECT_REF
765 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
766 return get_maxval_strlen (TREE_OPERAND (aop0
, 0),
767 length
, visited
, type
);
773 if (TREE_CODE (val
) != INTEGER_CST
774 || tree_int_cst_sgn (val
) < 0)
778 val
= c_strlen (arg
, 1);
786 if (TREE_CODE (*length
) != INTEGER_CST
787 || TREE_CODE (val
) != INTEGER_CST
)
790 if (tree_int_cst_lt (*length
, val
))
794 else if (simple_cst_equal (val
, *length
) != 1)
802 /* If ARG is registered for SSA update we cannot look at its defining
804 if (name_registered_for_update_p (arg
))
807 /* If we were already here, break the infinite cycle. */
808 if (!bitmap_set_bit (visited
, SSA_NAME_VERSION (arg
)))
812 def_stmt
= SSA_NAME_DEF_STMT (var
);
814 switch (gimple_code (def_stmt
))
817 /* The RHS of the statement defining VAR must either have a
818 constant length or come from another SSA_NAME with a constant
820 if (gimple_assign_single_p (def_stmt
)
821 || gimple_assign_unary_nop_p (def_stmt
))
823 tree rhs
= gimple_assign_rhs1 (def_stmt
);
824 return get_maxval_strlen (rhs
, length
, visited
, type
);
826 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
828 tree op2
= gimple_assign_rhs2 (def_stmt
);
829 tree op3
= gimple_assign_rhs3 (def_stmt
);
830 return get_maxval_strlen (op2
, length
, visited
, type
)
831 && get_maxval_strlen (op3
, length
, visited
, type
);
837 /* All the arguments of the PHI node must have the same constant
841 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
843 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
845 /* If this PHI has itself as an argument, we cannot
846 determine the string length of this argument. However,
847 if we can find a constant string length for the other
848 PHI args then we can still be sure that this is a
849 constant string length. So be optimistic and just
850 continue with the next argument. */
851 if (arg
== gimple_phi_result (def_stmt
))
854 if (!get_maxval_strlen (arg
, length
, visited
, type
))
866 /* Fold builtin call in statement STMT. Returns a simplified tree.
867 We may return a non-constant expression, including another call
868 to a different function and with different arguments, e.g.,
869 substituting memcpy for strcpy when the string length is known.
870 Note that some builtins expand into inline code that may not
871 be valid in GIMPLE. Callers must take care. */
874 gimple_fold_builtin (gimple stmt
)
882 location_t loc
= gimple_location (stmt
);
884 ignore
= (gimple_call_lhs (stmt
) == NULL
);
886 /* First try the generic builtin folder. If that succeeds, return the
888 result
= fold_call_stmt (stmt
, ignore
);
894 result
= fold_convert (gimple_call_return_type (stmt
), result
);
898 /* Ignore MD builtins. */
899 callee
= gimple_call_fndecl (stmt
);
900 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_MD
)
903 /* Give up for always_inline inline builtins until they are
905 if (avoid_folding_inline_builtin (callee
))
908 /* If the builtin could not be folded, and it has no argument list,
910 nargs
= gimple_call_num_args (stmt
);
914 /* Limit the work only for builtins we know how to simplify. */
915 switch (DECL_FUNCTION_CODE (callee
))
917 case BUILT_IN_STRLEN
:
919 case BUILT_IN_FPUTS_UNLOCKED
:
923 case BUILT_IN_STRCPY
:
924 case BUILT_IN_STRNCPY
:
925 case BUILT_IN_STRCAT
:
929 case BUILT_IN_MEMCPY_CHK
:
930 case BUILT_IN_MEMPCPY_CHK
:
931 case BUILT_IN_MEMMOVE_CHK
:
932 case BUILT_IN_MEMSET_CHK
:
933 case BUILT_IN_STRNCPY_CHK
:
934 case BUILT_IN_STPNCPY_CHK
:
938 case BUILT_IN_STRCPY_CHK
:
939 case BUILT_IN_STPCPY_CHK
:
943 case BUILT_IN_SNPRINTF_CHK
:
944 case BUILT_IN_VSNPRINTF_CHK
:
952 if (arg_idx
>= nargs
)
955 /* Try to use the dataflow information gathered by the CCP process. */
956 visited
= BITMAP_ALLOC (NULL
);
957 bitmap_clear (visited
);
959 memset (val
, 0, sizeof (val
));
960 a
= gimple_call_arg (stmt
, arg_idx
);
961 if (!get_maxval_strlen (a
, &val
[arg_idx
], visited
, type
))
962 val
[arg_idx
] = NULL_TREE
;
964 BITMAP_FREE (visited
);
967 switch (DECL_FUNCTION_CODE (callee
))
969 case BUILT_IN_STRLEN
:
970 if (val
[0] && nargs
== 1)
973 fold_convert (TREE_TYPE (gimple_call_lhs (stmt
)), val
[0]);
975 /* If the result is not a valid gimple value, or not a cast
976 of a valid gimple value, then we cannot use the result. */
977 if (is_gimple_val (new_val
)
978 || (CONVERT_EXPR_P (new_val
)
979 && is_gimple_val (TREE_OPERAND (new_val
, 0))))
984 case BUILT_IN_STRCPY
:
985 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 2)
986 result
= fold_builtin_strcpy (loc
, callee
,
987 gimple_call_arg (stmt
, 0),
988 gimple_call_arg (stmt
, 1),
992 case BUILT_IN_STRNCPY
:
993 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
994 result
= fold_builtin_strncpy (loc
, callee
,
995 gimple_call_arg (stmt
, 0),
996 gimple_call_arg (stmt
, 1),
997 gimple_call_arg (stmt
, 2),
1001 case BUILT_IN_STRCAT
:
1002 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 2)
1003 result
= fold_builtin_strcat (loc
, gimple_call_arg (stmt
, 0),
1004 gimple_call_arg (stmt
, 1),
1008 case BUILT_IN_FPUTS
:
1010 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
1011 gimple_call_arg (stmt
, 1),
1012 ignore
, false, val
[0]);
1015 case BUILT_IN_FPUTS_UNLOCKED
:
1017 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
1018 gimple_call_arg (stmt
, 1),
1019 ignore
, true, val
[0]);
1022 case BUILT_IN_MEMCPY_CHK
:
1023 case BUILT_IN_MEMPCPY_CHK
:
1024 case BUILT_IN_MEMMOVE_CHK
:
1025 case BUILT_IN_MEMSET_CHK
:
1026 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
1027 result
= fold_builtin_memory_chk (loc
, callee
,
1028 gimple_call_arg (stmt
, 0),
1029 gimple_call_arg (stmt
, 1),
1030 gimple_call_arg (stmt
, 2),
1031 gimple_call_arg (stmt
, 3),
1033 DECL_FUNCTION_CODE (callee
));
1036 case BUILT_IN_STRCPY_CHK
:
1037 case BUILT_IN_STPCPY_CHK
:
1038 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
1039 result
= fold_builtin_stxcpy_chk (loc
, callee
,
1040 gimple_call_arg (stmt
, 0),
1041 gimple_call_arg (stmt
, 1),
1042 gimple_call_arg (stmt
, 2),
1044 DECL_FUNCTION_CODE (callee
));
1047 case BUILT_IN_STRNCPY_CHK
:
1048 case BUILT_IN_STPNCPY_CHK
:
1049 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
1050 result
= fold_builtin_stxncpy_chk (loc
, gimple_call_arg (stmt
, 0),
1051 gimple_call_arg (stmt
, 1),
1052 gimple_call_arg (stmt
, 2),
1053 gimple_call_arg (stmt
, 3),
1055 DECL_FUNCTION_CODE (callee
));
1058 case BUILT_IN_SNPRINTF_CHK
:
1059 case BUILT_IN_VSNPRINTF_CHK
:
1060 if (val
[1] && is_gimple_val (val
[1]))
1061 result
= gimple_fold_builtin_snprintf_chk (stmt
, val
[1],
1062 DECL_FUNCTION_CODE (callee
));
1069 if (result
&& ignore
)
1070 result
= fold_ignored_result (result
);
1075 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1076 The statement may be replaced by another statement, e.g., if the call
1077 simplifies to a constant value. Return true if any changes were made.
1078 It is assumed that the operands have been previously folded. */
1081 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
1083 gimple stmt
= gsi_stmt (*gsi
);
1085 bool changed
= false;
1088 /* Fold *& in call arguments. */
1089 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
1090 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
1092 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
1095 gimple_call_set_arg (stmt
, i
, tmp
);
1100 /* Check for virtual calls that became direct calls. */
1101 callee
= gimple_call_fn (stmt
);
1102 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
1104 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
1106 if (dump_file
&& virtual_method_call_p (callee
)
1107 && !possible_polymorphic_call_target_p
1108 (callee
, cgraph_get_node (gimple_call_addr_fndecl
1109 (OBJ_TYPE_REF_EXPR (callee
)))))
1112 "Type inheritance inconsistent devirtualization of ");
1113 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
1114 fprintf (dump_file
, " to ");
1115 print_generic_expr (dump_file
, callee
, TDF_SLIM
);
1116 fprintf (dump_file
, "\n");
1119 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
1122 else if (flag_devirtualize
&& !inplace
&& virtual_method_call_p (callee
))
1125 vec
<cgraph_node
*>targets
1126 = possible_polymorphic_call_targets (callee
, &final
);
1127 if (final
&& targets
.length () <= 1)
1129 tree lhs
= gimple_call_lhs (stmt
);
1130 if (targets
.length () == 1)
1132 gimple_call_set_fndecl (stmt
, targets
[0]->decl
);
1134 /* If the call becomes noreturn, remove the lhs. */
1135 if (lhs
&& (gimple_call_flags (stmt
) & ECF_NORETURN
))
1137 if (TREE_CODE (lhs
) == SSA_NAME
)
1139 tree var
= create_tmp_var (TREE_TYPE (lhs
), NULL
);
1140 tree def
= get_or_create_ssa_default_def (cfun
, var
);
1141 gimple new_stmt
= gimple_build_assign (lhs
, def
);
1142 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1144 gimple_call_set_lhs (stmt
, NULL_TREE
);
1149 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
1150 gimple new_stmt
= gimple_build_call (fndecl
, 0);
1151 gimple_set_location (new_stmt
, gimple_location (stmt
));
1152 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
1154 tree var
= create_tmp_var (TREE_TYPE (lhs
), NULL
);
1155 tree def
= get_or_create_ssa_default_def (cfun
, var
);
1157 /* To satisfy condition for
1158 cgraph_update_edges_for_call_stmt_node,
1159 we need to preserve GIMPLE_CALL statement
1160 at position of GSI iterator. */
1161 update_call_from_tree (gsi
, def
);
1162 gsi_insert_before (gsi
, new_stmt
, GSI_NEW_STMT
);
1165 gsi_replace (gsi
, new_stmt
, true);
1175 /* Check for builtins that CCP can handle using information not
1176 available in the generic fold routines. */
1177 if (gimple_call_builtin_p (stmt
))
1179 tree result
= gimple_fold_builtin (stmt
);
1182 if (!update_call_from_tree (gsi
, result
))
1183 gimplify_and_update_call_from_tree (gsi
, result
);
1186 else if (gimple_call_builtin_p (stmt
, BUILT_IN_MD
))
1187 changed
|= targetm
.gimple_fold_builtin (gsi
);
1189 else if (gimple_call_internal_p (stmt
))
1191 enum tree_code subcode
= ERROR_MARK
;
1192 tree result
= NULL_TREE
;
1193 switch (gimple_call_internal_fn (stmt
))
1195 case IFN_BUILTIN_EXPECT
:
1196 result
= fold_builtin_expect (gimple_location (stmt
),
1197 gimple_call_arg (stmt
, 0),
1198 gimple_call_arg (stmt
, 1),
1199 gimple_call_arg (stmt
, 2));
1201 case IFN_UBSAN_CHECK_ADD
:
1202 subcode
= PLUS_EXPR
;
1204 case IFN_UBSAN_CHECK_SUB
:
1205 subcode
= MINUS_EXPR
;
1207 case IFN_UBSAN_CHECK_MUL
:
1208 subcode
= MULT_EXPR
;
1213 if (subcode
!= ERROR_MARK
)
1215 tree arg0
= gimple_call_arg (stmt
, 0);
1216 tree arg1
= gimple_call_arg (stmt
, 1);
1217 /* x = y + 0; x = y - 0; x = y * 0; */
1218 if (integer_zerop (arg1
))
1219 result
= subcode
== MULT_EXPR
1220 ? build_zero_cst (TREE_TYPE (arg0
))
1222 /* x = 0 + y; x = 0 * y; */
1223 else if (subcode
!= MINUS_EXPR
&& integer_zerop (arg0
))
1224 result
= subcode
== MULT_EXPR
1225 ? build_zero_cst (TREE_TYPE (arg0
))
1228 else if (subcode
== MINUS_EXPR
&& operand_equal_p (arg0
, arg1
, 0))
1229 result
= build_zero_cst (TREE_TYPE (arg0
));
1230 /* x = y * 1; x = 1 * y; */
1231 else if (subcode
== MULT_EXPR
)
1233 if (integer_onep (arg1
))
1235 else if (integer_onep (arg0
))
1241 if (!update_call_from_tree (gsi
, result
))
1242 gimplify_and_update_call_from_tree (gsi
, result
);
1250 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1251 distinguishes both cases. */
1254 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
)
1256 bool changed
= false;
1257 gimple stmt
= gsi_stmt (*gsi
);
1260 /* Fold the main computation performed by the statement. */
1261 switch (gimple_code (stmt
))
1265 unsigned old_num_ops
= gimple_num_ops (stmt
);
1266 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
1267 tree lhs
= gimple_assign_lhs (stmt
);
1269 /* First canonicalize operand order. This avoids building new
1270 trees if this is the only thing fold would later do. */
1271 if ((commutative_tree_code (subcode
)
1272 || commutative_ternary_tree_code (subcode
))
1273 && tree_swap_operands_p (gimple_assign_rhs1 (stmt
),
1274 gimple_assign_rhs2 (stmt
), false))
1276 tree tem
= gimple_assign_rhs1 (stmt
);
1277 gimple_assign_set_rhs1 (stmt
, gimple_assign_rhs2 (stmt
));
1278 gimple_assign_set_rhs2 (stmt
, tem
);
1281 new_rhs
= fold_gimple_assign (gsi
);
1283 && !useless_type_conversion_p (TREE_TYPE (lhs
),
1284 TREE_TYPE (new_rhs
)))
1285 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
1288 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
1290 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
1297 changed
|= fold_gimple_cond (stmt
);
1301 changed
|= gimple_fold_call (gsi
, inplace
);
1305 /* Fold *& in asm operands. */
1308 const char **oconstraints
;
1309 const char *constraint
;
1310 bool allows_mem
, allows_reg
;
1312 noutputs
= gimple_asm_noutputs (stmt
);
1313 oconstraints
= XALLOCAVEC (const char *, noutputs
);
1315 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
1317 tree link
= gimple_asm_output_op (stmt
, i
);
1318 tree op
= TREE_VALUE (link
);
1320 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1321 if (REFERENCE_CLASS_P (op
)
1322 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
1324 TREE_VALUE (link
) = op
;
1328 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
1330 tree link
= gimple_asm_input_op (stmt
, i
);
1331 tree op
= TREE_VALUE (link
);
1333 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1334 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
1335 oconstraints
, &allows_mem
, &allows_reg
);
1336 if (REFERENCE_CLASS_P (op
)
1337 && (op
= maybe_fold_reference (op
, !allows_reg
&& allows_mem
))
1340 TREE_VALUE (link
) = op
;
1348 if (gimple_debug_bind_p (stmt
))
1350 tree val
= gimple_debug_bind_get_value (stmt
);
1352 && REFERENCE_CLASS_P (val
))
1354 tree tem
= maybe_fold_reference (val
, false);
1357 gimple_debug_bind_set_value (stmt
, tem
);
1362 && TREE_CODE (val
) == ADDR_EXPR
)
1364 tree ref
= TREE_OPERAND (val
, 0);
1365 tree tem
= maybe_fold_reference (ref
, false);
1368 tem
= build_fold_addr_expr_with_type (tem
, TREE_TYPE (val
));
1369 gimple_debug_bind_set_value (stmt
, tem
);
1379 stmt
= gsi_stmt (*gsi
);
1381 /* Fold *& on the lhs. */
1382 if (gimple_has_lhs (stmt
))
1384 tree lhs
= gimple_get_lhs (stmt
);
1385 if (lhs
&& REFERENCE_CLASS_P (lhs
))
1387 tree new_lhs
= maybe_fold_reference (lhs
, true);
1390 gimple_set_lhs (stmt
, new_lhs
);
1399 /* Fold the statement pointed to by GSI. In some cases, this function may
1400 replace the whole statement with a new one. Returns true iff folding
1402 The statement pointed to by GSI should be in valid gimple form but may
1403 be in unfolded state as resulting from for example constant propagation
1404 which can produce *&x = 0. */
1407 fold_stmt (gimple_stmt_iterator
*gsi
)
1409 return fold_stmt_1 (gsi
, false);
1412 /* Perform the minimal folding on statement *GSI. Only operations like
1413 *&x created by constant propagation are handled. The statement cannot
1414 be replaced with a new one. Return true if the statement was
1415 changed, false otherwise.
1416 The statement *GSI should be in valid gimple form but may
1417 be in unfolded state as resulting from for example constant propagation
1418 which can produce *&x = 0. */
1421 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
1423 gimple stmt
= gsi_stmt (*gsi
);
1424 bool changed
= fold_stmt_1 (gsi
, true);
1425 gcc_assert (gsi_stmt (*gsi
) == stmt
);
1429 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1430 if EXPR is null or we don't know how.
1431 If non-null, the result always has boolean type. */
1434 canonicalize_bool (tree expr
, bool invert
)
1440 if (integer_nonzerop (expr
))
1441 return boolean_false_node
;
1442 else if (integer_zerop (expr
))
1443 return boolean_true_node
;
1444 else if (TREE_CODE (expr
) == SSA_NAME
)
1445 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
1446 build_int_cst (TREE_TYPE (expr
), 0));
1447 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
1448 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
1450 TREE_OPERAND (expr
, 0),
1451 TREE_OPERAND (expr
, 1));
1457 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
1459 if (integer_nonzerop (expr
))
1460 return boolean_true_node
;
1461 else if (integer_zerop (expr
))
1462 return boolean_false_node
;
1463 else if (TREE_CODE (expr
) == SSA_NAME
)
1464 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
1465 build_int_cst (TREE_TYPE (expr
), 0));
1466 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
1467 return fold_build2 (TREE_CODE (expr
),
1469 TREE_OPERAND (expr
, 0),
1470 TREE_OPERAND (expr
, 1));
1476 /* Check to see if a boolean expression EXPR is logically equivalent to the
1477 comparison (OP1 CODE OP2). Check for various identities involving
1481 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
1482 const_tree op1
, const_tree op2
)
1486 /* The obvious case. */
1487 if (TREE_CODE (expr
) == code
1488 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
1489 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
1492 /* Check for comparing (name, name != 0) and the case where expr
1493 is an SSA_NAME with a definition matching the comparison. */
1494 if (TREE_CODE (expr
) == SSA_NAME
1495 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
1497 if (operand_equal_p (expr
, op1
, 0))
1498 return ((code
== NE_EXPR
&& integer_zerop (op2
))
1499 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
1500 s
= SSA_NAME_DEF_STMT (expr
);
1501 if (is_gimple_assign (s
)
1502 && gimple_assign_rhs_code (s
) == code
1503 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
1504 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
1508 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1509 of name is a comparison, recurse. */
1510 if (TREE_CODE (op1
) == SSA_NAME
1511 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
1513 s
= SSA_NAME_DEF_STMT (op1
);
1514 if (is_gimple_assign (s
)
1515 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
1517 enum tree_code c
= gimple_assign_rhs_code (s
);
1518 if ((c
== NE_EXPR
&& integer_zerop (op2
))
1519 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
1520 return same_bool_comparison_p (expr
, c
,
1521 gimple_assign_rhs1 (s
),
1522 gimple_assign_rhs2 (s
));
1523 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
1524 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
1525 return same_bool_comparison_p (expr
,
1526 invert_tree_comparison (c
, false),
1527 gimple_assign_rhs1 (s
),
1528 gimple_assign_rhs2 (s
));
1534 /* Check to see if two boolean expressions OP1 and OP2 are logically
1538 same_bool_result_p (const_tree op1
, const_tree op2
)
1540 /* Simple cases first. */
1541 if (operand_equal_p (op1
, op2
, 0))
1544 /* Check the cases where at least one of the operands is a comparison.
1545 These are a bit smarter than operand_equal_p in that they apply some
1546 identifies on SSA_NAMEs. */
1547 if (TREE_CODE_CLASS (TREE_CODE (op2
)) == tcc_comparison
1548 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
1549 TREE_OPERAND (op2
, 0),
1550 TREE_OPERAND (op2
, 1)))
1552 if (TREE_CODE_CLASS (TREE_CODE (op1
)) == tcc_comparison
1553 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
1554 TREE_OPERAND (op1
, 0),
1555 TREE_OPERAND (op1
, 1)))
1562 /* Forward declarations for some mutually recursive functions. */
1565 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1566 enum tree_code code2
, tree op2a
, tree op2b
);
1568 and_var_with_comparison (tree var
, bool invert
,
1569 enum tree_code code2
, tree op2a
, tree op2b
);
1571 and_var_with_comparison_1 (gimple stmt
,
1572 enum tree_code code2
, tree op2a
, tree op2b
);
1574 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1575 enum tree_code code2
, tree op2a
, tree op2b
);
1577 or_var_with_comparison (tree var
, bool invert
,
1578 enum tree_code code2
, tree op2a
, tree op2b
);
1580 or_var_with_comparison_1 (gimple stmt
,
1581 enum tree_code code2
, tree op2a
, tree op2b
);
1583 /* Helper function for and_comparisons_1: try to simplify the AND of the
1584 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1585 If INVERT is true, invert the value of the VAR before doing the AND.
1586 Return NULL_EXPR if we can't simplify this to a single expression. */
1589 and_var_with_comparison (tree var
, bool invert
,
1590 enum tree_code code2
, tree op2a
, tree op2b
)
1593 gimple stmt
= SSA_NAME_DEF_STMT (var
);
1595 /* We can only deal with variables whose definitions are assignments. */
1596 if (!is_gimple_assign (stmt
))
1599 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1600 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1601 Then we only have to consider the simpler non-inverted cases. */
1603 t
= or_var_with_comparison_1 (stmt
,
1604 invert_tree_comparison (code2
, false),
1607 t
= and_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
1608 return canonicalize_bool (t
, invert
);
1611 /* Try to simplify the AND of the ssa variable defined by the assignment
1612 STMT with the comparison specified by (OP2A CODE2 OP2B).
1613 Return NULL_EXPR if we can't simplify this to a single expression. */
1616 and_var_with_comparison_1 (gimple stmt
,
1617 enum tree_code code2
, tree op2a
, tree op2b
)
1619 tree var
= gimple_assign_lhs (stmt
);
1620 tree true_test_var
= NULL_TREE
;
1621 tree false_test_var
= NULL_TREE
;
1622 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
1624 /* Check for identities like (var AND (var == 0)) => false. */
1625 if (TREE_CODE (op2a
) == SSA_NAME
1626 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
1628 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
1629 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
1631 true_test_var
= op2a
;
1632 if (var
== true_test_var
)
1635 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
1636 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
1638 false_test_var
= op2a
;
1639 if (var
== false_test_var
)
1640 return boolean_false_node
;
1644 /* If the definition is a comparison, recurse on it. */
1645 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
1647 tree t
= and_comparisons_1 (innercode
,
1648 gimple_assign_rhs1 (stmt
),
1649 gimple_assign_rhs2 (stmt
),
1657 /* If the definition is an AND or OR expression, we may be able to
1658 simplify by reassociating. */
1659 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
1660 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
1662 tree inner1
= gimple_assign_rhs1 (stmt
);
1663 tree inner2
= gimple_assign_rhs2 (stmt
);
1666 tree partial
= NULL_TREE
;
1667 bool is_and
= (innercode
== BIT_AND_EXPR
);
1669 /* Check for boolean identities that don't require recursive examination
1671 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1672 inner1 AND (inner1 OR inner2) => inner1
1673 !inner1 AND (inner1 AND inner2) => false
1674 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1675 Likewise for similar cases involving inner2. */
1676 if (inner1
== true_test_var
)
1677 return (is_and
? var
: inner1
);
1678 else if (inner2
== true_test_var
)
1679 return (is_and
? var
: inner2
);
1680 else if (inner1
== false_test_var
)
1682 ? boolean_false_node
1683 : and_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
1684 else if (inner2
== false_test_var
)
1686 ? boolean_false_node
1687 : and_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
1689 /* Next, redistribute/reassociate the AND across the inner tests.
1690 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1691 if (TREE_CODE (inner1
) == SSA_NAME
1692 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
1693 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
1694 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
1695 gimple_assign_rhs1 (s
),
1696 gimple_assign_rhs2 (s
),
1697 code2
, op2a
, op2b
)))
1699 /* Handle the AND case, where we are reassociating:
1700 (inner1 AND inner2) AND (op2a code2 op2b)
1702 If the partial result t is a constant, we win. Otherwise
1703 continue on to try reassociating with the other inner test. */
1706 if (integer_onep (t
))
1708 else if (integer_zerop (t
))
1709 return boolean_false_node
;
1712 /* Handle the OR case, where we are redistributing:
1713 (inner1 OR inner2) AND (op2a code2 op2b)
1714 => (t OR (inner2 AND (op2a code2 op2b))) */
1715 else if (integer_onep (t
))
1716 return boolean_true_node
;
1718 /* Save partial result for later. */
1722 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1723 if (TREE_CODE (inner2
) == SSA_NAME
1724 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
1725 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
1726 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
1727 gimple_assign_rhs1 (s
),
1728 gimple_assign_rhs2 (s
),
1729 code2
, op2a
, op2b
)))
1731 /* Handle the AND case, where we are reassociating:
1732 (inner1 AND inner2) AND (op2a code2 op2b)
1733 => (inner1 AND t) */
1736 if (integer_onep (t
))
1738 else if (integer_zerop (t
))
1739 return boolean_false_node
;
1740 /* If both are the same, we can apply the identity
1742 else if (partial
&& same_bool_result_p (t
, partial
))
1746 /* Handle the OR case. where we are redistributing:
1747 (inner1 OR inner2) AND (op2a code2 op2b)
1748 => (t OR (inner1 AND (op2a code2 op2b)))
1749 => (t OR partial) */
1752 if (integer_onep (t
))
1753 return boolean_true_node
;
1756 /* We already got a simplification for the other
1757 operand to the redistributed OR expression. The
1758 interesting case is when at least one is false.
1759 Or, if both are the same, we can apply the identity
1761 if (integer_zerop (partial
))
1763 else if (integer_zerop (t
))
1765 else if (same_bool_result_p (t
, partial
))
1774 /* Try to simplify the AND of two comparisons defined by
1775 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1776 If this can be done without constructing an intermediate value,
1777 return the resulting tree; otherwise NULL_TREE is returned.
1778 This function is deliberately asymmetric as it recurses on SSA_DEFs
1779 in the first comparison but not the second. */
1782 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1783 enum tree_code code2
, tree op2a
, tree op2b
)
1785 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
1787 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1788 if (operand_equal_p (op1a
, op2a
, 0)
1789 && operand_equal_p (op1b
, op2b
, 0))
1791 /* Result will be either NULL_TREE, or a combined comparison. */
1792 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
1793 TRUTH_ANDIF_EXPR
, code1
, code2
,
1794 truth_type
, op1a
, op1b
);
1799 /* Likewise the swapped case of the above. */
1800 if (operand_equal_p (op1a
, op2b
, 0)
1801 && operand_equal_p (op1b
, op2a
, 0))
1803 /* Result will be either NULL_TREE, or a combined comparison. */
1804 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
1805 TRUTH_ANDIF_EXPR
, code1
,
1806 swap_tree_comparison (code2
),
1807 truth_type
, op1a
, op1b
);
1812 /* If both comparisons are of the same value against constants, we might
1813 be able to merge them. */
1814 if (operand_equal_p (op1a
, op2a
, 0)
1815 && TREE_CODE (op1b
) == INTEGER_CST
1816 && TREE_CODE (op2b
) == INTEGER_CST
)
1818 int cmp
= tree_int_cst_compare (op1b
, op2b
);
1820 /* If we have (op1a == op1b), we should either be able to
1821 return that or FALSE, depending on whether the constant op1b
1822 also satisfies the other comparison against op2b. */
1823 if (code1
== EQ_EXPR
)
1829 case EQ_EXPR
: val
= (cmp
== 0); break;
1830 case NE_EXPR
: val
= (cmp
!= 0); break;
1831 case LT_EXPR
: val
= (cmp
< 0); break;
1832 case GT_EXPR
: val
= (cmp
> 0); break;
1833 case LE_EXPR
: val
= (cmp
<= 0); break;
1834 case GE_EXPR
: val
= (cmp
>= 0); break;
1835 default: done
= false;
1840 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1842 return boolean_false_node
;
1845 /* Likewise if the second comparison is an == comparison. */
1846 else if (code2
== EQ_EXPR
)
1852 case EQ_EXPR
: val
= (cmp
== 0); break;
1853 case NE_EXPR
: val
= (cmp
!= 0); break;
1854 case LT_EXPR
: val
= (cmp
> 0); break;
1855 case GT_EXPR
: val
= (cmp
< 0); break;
1856 case LE_EXPR
: val
= (cmp
>= 0); break;
1857 case GE_EXPR
: val
= (cmp
<= 0); break;
1858 default: done
= false;
1863 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1865 return boolean_false_node
;
1869 /* Same business with inequality tests. */
1870 else if (code1
== NE_EXPR
)
1875 case EQ_EXPR
: val
= (cmp
!= 0); break;
1876 case NE_EXPR
: val
= (cmp
== 0); break;
1877 case LT_EXPR
: val
= (cmp
>= 0); break;
1878 case GT_EXPR
: val
= (cmp
<= 0); break;
1879 case LE_EXPR
: val
= (cmp
> 0); break;
1880 case GE_EXPR
: val
= (cmp
< 0); break;
1885 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1887 else if (code2
== NE_EXPR
)
1892 case EQ_EXPR
: val
= (cmp
== 0); break;
1893 case NE_EXPR
: val
= (cmp
!= 0); break;
1894 case LT_EXPR
: val
= (cmp
<= 0); break;
1895 case GT_EXPR
: val
= (cmp
>= 0); break;
1896 case LE_EXPR
: val
= (cmp
< 0); break;
1897 case GE_EXPR
: val
= (cmp
> 0); break;
1902 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1905 /* Chose the more restrictive of two < or <= comparisons. */
1906 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
1907 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
1909 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
1910 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1912 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1915 /* Likewise chose the more restrictive of two > or >= comparisons. */
1916 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
1917 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
1919 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
1920 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1922 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1925 /* Check for singleton ranges. */
1927 && ((code1
== LE_EXPR
&& code2
== GE_EXPR
)
1928 || (code1
== GE_EXPR
&& code2
== LE_EXPR
)))
1929 return fold_build2 (EQ_EXPR
, boolean_type_node
, op1a
, op2b
);
1931 /* Check for disjoint ranges. */
1933 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
1934 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
1935 return boolean_false_node
;
1937 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
1938 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
1939 return boolean_false_node
;
1942 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1943 NAME's definition is a truth value. See if there are any simplifications
1944 that can be done against the NAME's definition. */
1945 if (TREE_CODE (op1a
) == SSA_NAME
1946 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
1947 && (integer_zerop (op1b
) || integer_onep (op1b
)))
1949 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
1950 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
1951 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
1952 switch (gimple_code (stmt
))
1955 /* Try to simplify by copy-propagating the definition. */
1956 return and_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
1959 /* If every argument to the PHI produces the same result when
1960 ANDed with the second comparison, we win.
1961 Do not do this unless the type is bool since we need a bool
1962 result here anyway. */
1963 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
1965 tree result
= NULL_TREE
;
1967 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
1969 tree arg
= gimple_phi_arg_def (stmt
, i
);
1971 /* If this PHI has itself as an argument, ignore it.
1972 If all the other args produce the same result,
1974 if (arg
== gimple_phi_result (stmt
))
1976 else if (TREE_CODE (arg
) == INTEGER_CST
)
1978 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
1981 result
= boolean_false_node
;
1982 else if (!integer_zerop (result
))
1986 result
= fold_build2 (code2
, boolean_type_node
,
1988 else if (!same_bool_comparison_p (result
,
1992 else if (TREE_CODE (arg
) == SSA_NAME
1993 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
1996 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
1997 /* In simple cases we can look through PHI nodes,
1998 but we have to be careful with loops.
2000 if (! dom_info_available_p (CDI_DOMINATORS
)
2001 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
2002 || dominated_by_p (CDI_DOMINATORS
,
2003 gimple_bb (def_stmt
),
2006 temp
= and_var_with_comparison (arg
, invert
, code2
,
2012 else if (!same_bool_result_p (result
, temp
))
2028 /* Try to simplify the AND of two comparisons, specified by
2029 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2030 If this can be simplified to a single expression (without requiring
2031 introducing more SSA variables to hold intermediate values),
2032 return the resulting tree. Otherwise return NULL_TREE.
2033 If the result expression is non-null, it has boolean type. */
2036 maybe_fold_and_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
2037 enum tree_code code2
, tree op2a
, tree op2b
)
2039 tree t
= and_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
2043 return and_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
2046 /* Helper function for or_comparisons_1: try to simplify the OR of the
2047 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
2048 If INVERT is true, invert the value of VAR before doing the OR.
2049 Return NULL_EXPR if we can't simplify this to a single expression. */
2052 or_var_with_comparison (tree var
, bool invert
,
2053 enum tree_code code2
, tree op2a
, tree op2b
)
2056 gimple stmt
= SSA_NAME_DEF_STMT (var
);
2058 /* We can only deal with variables whose definitions are assignments. */
2059 if (!is_gimple_assign (stmt
))
2062 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2063 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2064 Then we only have to consider the simpler non-inverted cases. */
2066 t
= and_var_with_comparison_1 (stmt
,
2067 invert_tree_comparison (code2
, false),
2070 t
= or_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
2071 return canonicalize_bool (t
, invert
);
2074 /* Try to simplify the OR of the ssa variable defined by the assignment
2075 STMT with the comparison specified by (OP2A CODE2 OP2B).
2076 Return NULL_EXPR if we can't simplify this to a single expression. */
2079 or_var_with_comparison_1 (gimple stmt
,
2080 enum tree_code code2
, tree op2a
, tree op2b
)
2082 tree var
= gimple_assign_lhs (stmt
);
2083 tree true_test_var
= NULL_TREE
;
2084 tree false_test_var
= NULL_TREE
;
2085 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
2087 /* Check for identities like (var OR (var != 0)) => true . */
2088 if (TREE_CODE (op2a
) == SSA_NAME
2089 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
2091 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
2092 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
2094 true_test_var
= op2a
;
2095 if (var
== true_test_var
)
2098 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
2099 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
2101 false_test_var
= op2a
;
2102 if (var
== false_test_var
)
2103 return boolean_true_node
;
2107 /* If the definition is a comparison, recurse on it. */
2108 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
2110 tree t
= or_comparisons_1 (innercode
,
2111 gimple_assign_rhs1 (stmt
),
2112 gimple_assign_rhs2 (stmt
),
2120 /* If the definition is an AND or OR expression, we may be able to
2121 simplify by reassociating. */
2122 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
2123 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
2125 tree inner1
= gimple_assign_rhs1 (stmt
);
2126 tree inner2
= gimple_assign_rhs2 (stmt
);
2129 tree partial
= NULL_TREE
;
2130 bool is_or
= (innercode
== BIT_IOR_EXPR
);
2132 /* Check for boolean identities that don't require recursive examination
2134 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2135 inner1 OR (inner1 AND inner2) => inner1
2136 !inner1 OR (inner1 OR inner2) => true
2137 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2139 if (inner1
== true_test_var
)
2140 return (is_or
? var
: inner1
);
2141 else if (inner2
== true_test_var
)
2142 return (is_or
? var
: inner2
);
2143 else if (inner1
== false_test_var
)
2146 : or_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
2147 else if (inner2
== false_test_var
)
2150 : or_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
2152 /* Next, redistribute/reassociate the OR across the inner tests.
2153 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2154 if (TREE_CODE (inner1
) == SSA_NAME
2155 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
2156 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2157 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
2158 gimple_assign_rhs1 (s
),
2159 gimple_assign_rhs2 (s
),
2160 code2
, op2a
, op2b
)))
2162 /* Handle the OR case, where we are reassociating:
2163 (inner1 OR inner2) OR (op2a code2 op2b)
2165 If the partial result t is a constant, we win. Otherwise
2166 continue on to try reassociating with the other inner test. */
2169 if (integer_onep (t
))
2170 return boolean_true_node
;
2171 else if (integer_zerop (t
))
2175 /* Handle the AND case, where we are redistributing:
2176 (inner1 AND inner2) OR (op2a code2 op2b)
2177 => (t AND (inner2 OR (op2a code op2b))) */
2178 else if (integer_zerop (t
))
2179 return boolean_false_node
;
2181 /* Save partial result for later. */
2185 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2186 if (TREE_CODE (inner2
) == SSA_NAME
2187 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
2188 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2189 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
2190 gimple_assign_rhs1 (s
),
2191 gimple_assign_rhs2 (s
),
2192 code2
, op2a
, op2b
)))
2194 /* Handle the OR case, where we are reassociating:
2195 (inner1 OR inner2) OR (op2a code2 op2b)
2197 => (t OR partial) */
2200 if (integer_zerop (t
))
2202 else if (integer_onep (t
))
2203 return boolean_true_node
;
2204 /* If both are the same, we can apply the identity
2206 else if (partial
&& same_bool_result_p (t
, partial
))
2210 /* Handle the AND case, where we are redistributing:
2211 (inner1 AND inner2) OR (op2a code2 op2b)
2212 => (t AND (inner1 OR (op2a code2 op2b)))
2213 => (t AND partial) */
2216 if (integer_zerop (t
))
2217 return boolean_false_node
;
2220 /* We already got a simplification for the other
2221 operand to the redistributed AND expression. The
2222 interesting case is when at least one is true.
2223 Or, if both are the same, we can apply the identity
2225 if (integer_onep (partial
))
2227 else if (integer_onep (t
))
2229 else if (same_bool_result_p (t
, partial
))
2238 /* Try to simplify the OR of two comparisons defined by
2239 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2240 If this can be done without constructing an intermediate value,
2241 return the resulting tree; otherwise NULL_TREE is returned.
2242 This function is deliberately asymmetric as it recurses on SSA_DEFs
2243 in the first comparison but not the second. */
2246 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
2247 enum tree_code code2
, tree op2a
, tree op2b
)
2249 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
2251 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2252 if (operand_equal_p (op1a
, op2a
, 0)
2253 && operand_equal_p (op1b
, op2b
, 0))
2255 /* Result will be either NULL_TREE, or a combined comparison. */
2256 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2257 TRUTH_ORIF_EXPR
, code1
, code2
,
2258 truth_type
, op1a
, op1b
);
2263 /* Likewise the swapped case of the above. */
2264 if (operand_equal_p (op1a
, op2b
, 0)
2265 && operand_equal_p (op1b
, op2a
, 0))
2267 /* Result will be either NULL_TREE, or a combined comparison. */
2268 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2269 TRUTH_ORIF_EXPR
, code1
,
2270 swap_tree_comparison (code2
),
2271 truth_type
, op1a
, op1b
);
2276 /* If both comparisons are of the same value against constants, we might
2277 be able to merge them. */
2278 if (operand_equal_p (op1a
, op2a
, 0)
2279 && TREE_CODE (op1b
) == INTEGER_CST
2280 && TREE_CODE (op2b
) == INTEGER_CST
)
2282 int cmp
= tree_int_cst_compare (op1b
, op2b
);
2284 /* If we have (op1a != op1b), we should either be able to
2285 return that or TRUE, depending on whether the constant op1b
2286 also satisfies the other comparison against op2b. */
2287 if (code1
== NE_EXPR
)
2293 case EQ_EXPR
: val
= (cmp
== 0); break;
2294 case NE_EXPR
: val
= (cmp
!= 0); break;
2295 case LT_EXPR
: val
= (cmp
< 0); break;
2296 case GT_EXPR
: val
= (cmp
> 0); break;
2297 case LE_EXPR
: val
= (cmp
<= 0); break;
2298 case GE_EXPR
: val
= (cmp
>= 0); break;
2299 default: done
= false;
2304 return boolean_true_node
;
2306 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2309 /* Likewise if the second comparison is a != comparison. */
2310 else if (code2
== NE_EXPR
)
2316 case EQ_EXPR
: val
= (cmp
== 0); break;
2317 case NE_EXPR
: val
= (cmp
!= 0); break;
2318 case LT_EXPR
: val
= (cmp
> 0); break;
2319 case GT_EXPR
: val
= (cmp
< 0); break;
2320 case LE_EXPR
: val
= (cmp
>= 0); break;
2321 case GE_EXPR
: val
= (cmp
<= 0); break;
2322 default: done
= false;
2327 return boolean_true_node
;
2329 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2333 /* See if an equality test is redundant with the other comparison. */
2334 else if (code1
== EQ_EXPR
)
2339 case EQ_EXPR
: val
= (cmp
== 0); break;
2340 case NE_EXPR
: val
= (cmp
!= 0); break;
2341 case LT_EXPR
: val
= (cmp
< 0); break;
2342 case GT_EXPR
: val
= (cmp
> 0); break;
2343 case LE_EXPR
: val
= (cmp
<= 0); break;
2344 case GE_EXPR
: val
= (cmp
>= 0); break;
2349 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2351 else if (code2
== EQ_EXPR
)
2356 case EQ_EXPR
: val
= (cmp
== 0); break;
2357 case NE_EXPR
: val
= (cmp
!= 0); break;
2358 case LT_EXPR
: val
= (cmp
> 0); break;
2359 case GT_EXPR
: val
= (cmp
< 0); break;
2360 case LE_EXPR
: val
= (cmp
>= 0); break;
2361 case GE_EXPR
: val
= (cmp
<= 0); break;
2366 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2369 /* Chose the less restrictive of two < or <= comparisons. */
2370 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
2371 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2373 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
2374 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2376 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2379 /* Likewise chose the less restrictive of two > or >= comparisons. */
2380 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
2381 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2383 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
2384 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2386 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2389 /* Check for singleton ranges. */
2391 && ((code1
== LT_EXPR
&& code2
== GT_EXPR
)
2392 || (code1
== GT_EXPR
&& code2
== LT_EXPR
)))
2393 return fold_build2 (NE_EXPR
, boolean_type_node
, op1a
, op2b
);
2395 /* Check for less/greater pairs that don't restrict the range at all. */
2397 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
2398 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2399 return boolean_true_node
;
2401 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
2402 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2403 return boolean_true_node
;
2406 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2407 NAME's definition is a truth value. See if there are any simplifications
2408 that can be done against the NAME's definition. */
2409 if (TREE_CODE (op1a
) == SSA_NAME
2410 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
2411 && (integer_zerop (op1b
) || integer_onep (op1b
)))
2413 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
2414 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
2415 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
2416 switch (gimple_code (stmt
))
2419 /* Try to simplify by copy-propagating the definition. */
2420 return or_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
2423 /* If every argument to the PHI produces the same result when
2424 ORed with the second comparison, we win.
2425 Do not do this unless the type is bool since we need a bool
2426 result here anyway. */
2427 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
2429 tree result
= NULL_TREE
;
2431 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
2433 tree arg
= gimple_phi_arg_def (stmt
, i
);
2435 /* If this PHI has itself as an argument, ignore it.
2436 If all the other args produce the same result,
2438 if (arg
== gimple_phi_result (stmt
))
2440 else if (TREE_CODE (arg
) == INTEGER_CST
)
2442 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
2445 result
= boolean_true_node
;
2446 else if (!integer_onep (result
))
2450 result
= fold_build2 (code2
, boolean_type_node
,
2452 else if (!same_bool_comparison_p (result
,
2456 else if (TREE_CODE (arg
) == SSA_NAME
2457 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
2460 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
2461 /* In simple cases we can look through PHI nodes,
2462 but we have to be careful with loops.
2464 if (! dom_info_available_p (CDI_DOMINATORS
)
2465 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
2466 || dominated_by_p (CDI_DOMINATORS
,
2467 gimple_bb (def_stmt
),
2470 temp
= or_var_with_comparison (arg
, invert
, code2
,
2476 else if (!same_bool_result_p (result
, temp
))
2492 /* Try to simplify the OR of two comparisons, specified by
2493 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2494 If this can be simplified to a single expression (without requiring
2495 introducing more SSA variables to hold intermediate values),
2496 return the resulting tree. Otherwise return NULL_TREE.
2497 If the result expression is non-null, it has boolean type. */
2500 maybe_fold_or_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
2501 enum tree_code code2
, tree op2a
, tree op2b
)
2503 tree t
= or_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
2507 return or_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
2511 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2513 Either NULL_TREE, a simplified but non-constant or a constant
2516 ??? This should go into a gimple-fold-inline.h file to be eventually
2517 privatized with the single valueize function used in the various TUs
2518 to avoid the indirect function call overhead. */
2521 gimple_fold_stmt_to_constant_1 (gimple stmt
, tree (*valueize
) (tree
))
2523 location_t loc
= gimple_location (stmt
);
2524 switch (gimple_code (stmt
))
2528 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
2530 switch (get_gimple_rhs_class (subcode
))
2532 case GIMPLE_SINGLE_RHS
:
2534 tree rhs
= gimple_assign_rhs1 (stmt
);
2535 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
2537 if (TREE_CODE (rhs
) == SSA_NAME
)
2539 /* If the RHS is an SSA_NAME, return its known constant value,
2541 return (*valueize
) (rhs
);
2543 /* Handle propagating invariant addresses into address
2545 else if (TREE_CODE (rhs
) == ADDR_EXPR
2546 && !is_gimple_min_invariant (rhs
))
2548 HOST_WIDE_INT offset
= 0;
2550 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
2554 && (CONSTANT_CLASS_P (base
)
2555 || decl_address_invariant_p (base
)))
2556 return build_invariant_address (TREE_TYPE (rhs
),
2559 else if (TREE_CODE (rhs
) == CONSTRUCTOR
2560 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
2561 && (CONSTRUCTOR_NELTS (rhs
)
2562 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
2567 vec
= XALLOCAVEC (tree
,
2568 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
)));
2569 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
2571 val
= (*valueize
) (val
);
2572 if (TREE_CODE (val
) == INTEGER_CST
2573 || TREE_CODE (val
) == REAL_CST
2574 || TREE_CODE (val
) == FIXED_CST
)
2580 return build_vector (TREE_TYPE (rhs
), vec
);
2582 if (subcode
== OBJ_TYPE_REF
)
2584 tree val
= (*valueize
) (OBJ_TYPE_REF_EXPR (rhs
));
2585 /* If callee is constant, we can fold away the wrapper. */
2586 if (is_gimple_min_invariant (val
))
2590 if (kind
== tcc_reference
)
2592 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
2593 || TREE_CODE (rhs
) == REALPART_EXPR
2594 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
2595 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2597 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2598 return fold_unary_loc (EXPR_LOCATION (rhs
),
2600 TREE_TYPE (rhs
), val
);
2602 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
2603 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2605 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2606 return fold_ternary_loc (EXPR_LOCATION (rhs
),
2608 TREE_TYPE (rhs
), val
,
2609 TREE_OPERAND (rhs
, 1),
2610 TREE_OPERAND (rhs
, 2));
2612 else if (TREE_CODE (rhs
) == MEM_REF
2613 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2615 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2616 if (TREE_CODE (val
) == ADDR_EXPR
2617 && is_gimple_min_invariant (val
))
2619 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
2621 TREE_OPERAND (rhs
, 1));
2626 return fold_const_aggregate_ref_1 (rhs
, valueize
);
2628 else if (kind
== tcc_declaration
)
2629 return get_symbol_constant_value (rhs
);
2633 case GIMPLE_UNARY_RHS
:
2635 /* Handle unary operators that can appear in GIMPLE form.
2636 Note that we know the single operand must be a constant,
2637 so this should almost always return a simplified RHS. */
2638 tree lhs
= gimple_assign_lhs (stmt
);
2639 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2641 /* Conversions are useless for CCP purposes if they are
2642 value-preserving. Thus the restrictions that
2643 useless_type_conversion_p places for restrict qualification
2644 of pointer types should not apply here.
2645 Substitution later will only substitute to allowed places. */
2646 if (CONVERT_EXPR_CODE_P (subcode
)
2647 && POINTER_TYPE_P (TREE_TYPE (lhs
))
2648 && POINTER_TYPE_P (TREE_TYPE (op0
))
2649 && TYPE_ADDR_SPACE (TREE_TYPE (lhs
))
2650 == TYPE_ADDR_SPACE (TREE_TYPE (op0
))
2651 && TYPE_MODE (TREE_TYPE (lhs
))
2652 == TYPE_MODE (TREE_TYPE (op0
)))
2656 fold_unary_ignore_overflow_loc (loc
, subcode
,
2657 gimple_expr_type (stmt
), op0
);
2660 case GIMPLE_BINARY_RHS
:
2662 /* Handle binary operators that can appear in GIMPLE form. */
2663 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2664 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
2666 /* Translate &x + CST into an invariant form suitable for
2667 further propagation. */
2668 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
2669 && TREE_CODE (op0
) == ADDR_EXPR
2670 && TREE_CODE (op1
) == INTEGER_CST
)
2672 tree off
= fold_convert (ptr_type_node
, op1
);
2673 return build_fold_addr_expr_loc
2675 fold_build2 (MEM_REF
,
2676 TREE_TYPE (TREE_TYPE (op0
)),
2677 unshare_expr (op0
), off
));
2680 return fold_binary_loc (loc
, subcode
,
2681 gimple_expr_type (stmt
), op0
, op1
);
2684 case GIMPLE_TERNARY_RHS
:
2686 /* Handle ternary operators that can appear in GIMPLE form. */
2687 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2688 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
2689 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
2691 /* Fold embedded expressions in ternary codes. */
2692 if ((subcode
== COND_EXPR
2693 || subcode
== VEC_COND_EXPR
)
2694 && COMPARISON_CLASS_P (op0
))
2696 tree op00
= (*valueize
) (TREE_OPERAND (op0
, 0));
2697 tree op01
= (*valueize
) (TREE_OPERAND (op0
, 1));
2698 tree tem
= fold_binary_loc (loc
, TREE_CODE (op0
),
2699 TREE_TYPE (op0
), op00
, op01
);
2704 return fold_ternary_loc (loc
, subcode
,
2705 gimple_expr_type (stmt
), op0
, op1
, op2
);
2717 if (gimple_call_internal_p (stmt
))
2719 enum tree_code subcode
= ERROR_MARK
;
2720 switch (gimple_call_internal_fn (stmt
))
2722 case IFN_UBSAN_CHECK_ADD
:
2723 subcode
= PLUS_EXPR
;
2725 case IFN_UBSAN_CHECK_SUB
:
2726 subcode
= MINUS_EXPR
;
2728 case IFN_UBSAN_CHECK_MUL
:
2729 subcode
= MULT_EXPR
;
2734 tree arg0
= gimple_call_arg (stmt
, 0);
2735 tree arg1
= gimple_call_arg (stmt
, 1);
2736 tree op0
= (*valueize
) (arg0
);
2737 tree op1
= (*valueize
) (arg1
);
2739 if (TREE_CODE (op0
) != INTEGER_CST
2740 || TREE_CODE (op1
) != INTEGER_CST
)
2745 /* x * 0 = 0 * x = 0 without overflow. */
2746 if (integer_zerop (op0
) || integer_zerop (op1
))
2747 return build_zero_cst (TREE_TYPE (arg0
));
2750 /* y - y = 0 without overflow. */
2751 if (operand_equal_p (op0
, op1
, 0))
2752 return build_zero_cst (TREE_TYPE (arg0
));
2759 = fold_binary_loc (loc
, subcode
, TREE_TYPE (arg0
), op0
, op1
);
2761 && TREE_CODE (res
) == INTEGER_CST
2762 && !TREE_OVERFLOW (res
))
2767 fn
= (*valueize
) (gimple_call_fn (stmt
));
2768 if (TREE_CODE (fn
) == ADDR_EXPR
2769 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
2770 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0))
2771 && gimple_builtin_call_types_compatible_p (stmt
,
2772 TREE_OPERAND (fn
, 0)))
2774 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
2777 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
2778 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
2779 call
= build_call_array_loc (loc
,
2780 gimple_call_return_type (stmt
),
2781 fn
, gimple_call_num_args (stmt
), args
);
2782 retval
= fold_call_expr (EXPR_LOCATION (call
), call
, false);
2785 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2786 STRIP_NOPS (retval
);
2787 retval
= fold_convert (gimple_call_return_type (stmt
), retval
);
2799 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2800 Returns NULL_TREE if folding to a constant is not possible, otherwise
2801 returns a constant according to is_gimple_min_invariant. */
2804 gimple_fold_stmt_to_constant (gimple stmt
, tree (*valueize
) (tree
))
2806 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
2807 if (res
&& is_gimple_min_invariant (res
))
2813 /* The following set of functions are supposed to fold references using
2814 their constant initializers. */
2816 static tree
fold_ctor_reference (tree type
, tree ctor
,
2817 unsigned HOST_WIDE_INT offset
,
2818 unsigned HOST_WIDE_INT size
, tree
);
2820 /* See if we can find constructor defining value of BASE.
2821 When we know the consructor with constant offset (such as
2822 base is array[40] and we do know constructor of array), then
2823 BIT_OFFSET is adjusted accordingly.
2825 As a special case, return error_mark_node when constructor
2826 is not explicitly available, but it is known to be zero
2827 such as 'static const int a;'. */
2829 get_base_constructor (tree base
, HOST_WIDE_INT
*bit_offset
,
2830 tree (*valueize
)(tree
))
2832 HOST_WIDE_INT bit_offset2
, size
, max_size
;
2833 if (TREE_CODE (base
) == MEM_REF
)
2835 if (!integer_zerop (TREE_OPERAND (base
, 1)))
2837 if (!tree_fits_shwi_p (TREE_OPERAND (base
, 1)))
2839 *bit_offset
+= (mem_ref_offset (base
).low
2844 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
2845 base
= valueize (TREE_OPERAND (base
, 0));
2846 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
2848 base
= TREE_OPERAND (base
, 0);
2851 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2852 DECL_INITIAL. If BASE is a nested reference into another
2853 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2854 the inner reference. */
2855 switch (TREE_CODE (base
))
2860 tree init
= ctor_for_folding (base
);
2862 /* Our semantic is exact opposite of ctor_for_folding;
2863 NULL means unknown, while error_mark_node is 0. */
2864 if (init
== error_mark_node
)
2867 return error_mark_node
;
2873 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
);
2874 if (max_size
== -1 || size
!= max_size
)
2876 *bit_offset
+= bit_offset2
;
2877 return get_base_constructor (base
, bit_offset
, valueize
);
2888 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2889 to the memory at bit OFFSET.
2891 We do only simple job of folding byte accesses. */
2894 fold_string_cst_ctor_reference (tree type
, tree ctor
,
2895 unsigned HOST_WIDE_INT offset
,
2896 unsigned HOST_WIDE_INT size
)
2898 if (INTEGRAL_TYPE_P (type
)
2899 && (TYPE_MODE (type
)
2900 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
2901 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
2903 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
)))) == 1
2904 && size
== BITS_PER_UNIT
2905 && !(offset
% BITS_PER_UNIT
))
2907 offset
/= BITS_PER_UNIT
;
2908 if (offset
< (unsigned HOST_WIDE_INT
) TREE_STRING_LENGTH (ctor
))
2909 return build_int_cst_type (type
, (TREE_STRING_POINTER (ctor
)
2912 const char a[20]="hello";
2915 might lead to offset greater than string length. In this case we
2916 know value is either initialized to 0 or out of bounds. Return 0
2918 return build_zero_cst (type
);
2923 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2924 SIZE to the memory at bit OFFSET. */
2927 fold_array_ctor_reference (tree type
, tree ctor
,
2928 unsigned HOST_WIDE_INT offset
,
2929 unsigned HOST_WIDE_INT size
,
2932 unsigned HOST_WIDE_INT cnt
;
2934 double_int low_bound
, elt_size
;
2935 double_int index
, max_index
;
2936 double_int access_index
;
2937 tree domain_type
= NULL_TREE
, index_type
= NULL_TREE
;
2938 HOST_WIDE_INT inner_offset
;
2940 /* Compute low bound and elt size. */
2941 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
2942 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
2943 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
2945 /* Static constructors for variably sized objects makes no sense. */
2946 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
);
2947 index_type
= TREE_TYPE (TYPE_MIN_VALUE (domain_type
));
2948 low_bound
= tree_to_double_int (TYPE_MIN_VALUE (domain_type
));
2951 low_bound
= double_int_zero
;
2952 /* Static constructors for variably sized objects makes no sense. */
2953 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))))
2956 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
2959 /* We can handle only constantly sized accesses that are known to not
2960 be larger than size of array element. */
2961 if (!TYPE_SIZE_UNIT (type
)
2962 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
2963 || elt_size
.slt (tree_to_double_int (TYPE_SIZE_UNIT (type
)))
2964 || elt_size
.is_zero ())
2967 /* Compute the array index we look for. */
2968 access_index
= double_int::from_uhwi (offset
/ BITS_PER_UNIT
)
2969 .udiv (elt_size
, TRUNC_DIV_EXPR
);
2970 access_index
+= low_bound
;
2972 access_index
= access_index
.ext (TYPE_PRECISION (index_type
),
2973 TYPE_UNSIGNED (index_type
));
2975 /* And offset within the access. */
2976 inner_offset
= offset
% (elt_size
.to_uhwi () * BITS_PER_UNIT
);
2978 /* See if the array field is large enough to span whole access. We do not
2979 care to fold accesses spanning multiple array indexes. */
2980 if (inner_offset
+ size
> elt_size
.to_uhwi () * BITS_PER_UNIT
)
2983 index
= low_bound
- double_int_one
;
2985 index
= index
.ext (TYPE_PRECISION (index_type
), TYPE_UNSIGNED (index_type
));
2987 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
2989 /* Array constructor might explicitely set index, or specify range
2990 or leave index NULL meaning that it is next index after previous
2994 if (TREE_CODE (cfield
) == INTEGER_CST
)
2995 max_index
= index
= tree_to_double_int (cfield
);
2998 gcc_assert (TREE_CODE (cfield
) == RANGE_EXPR
);
2999 index
= tree_to_double_int (TREE_OPERAND (cfield
, 0));
3000 max_index
= tree_to_double_int (TREE_OPERAND (cfield
, 1));
3005 index
+= double_int_one
;
3007 index
= index
.ext (TYPE_PRECISION (index_type
),
3008 TYPE_UNSIGNED (index_type
));
3012 /* Do we have match? */
3013 if (access_index
.cmp (index
, 1) >= 0
3014 && access_index
.cmp (max_index
, 1) <= 0)
3015 return fold_ctor_reference (type
, cval
, inner_offset
, size
,
3018 /* When memory is not explicitely mentioned in constructor,
3019 it is 0 (or out of range). */
3020 return build_zero_cst (type
);
3023 /* CTOR is CONSTRUCTOR of an aggregate or vector.
3024 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
3027 fold_nonarray_ctor_reference (tree type
, tree ctor
,
3028 unsigned HOST_WIDE_INT offset
,
3029 unsigned HOST_WIDE_INT size
,
3032 unsigned HOST_WIDE_INT cnt
;
3035 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
3038 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
3039 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
3040 tree field_size
= DECL_SIZE (cfield
);
3041 double_int bitoffset
;
3042 double_int byte_offset_cst
= tree_to_double_int (byte_offset
);
3043 double_int bits_per_unit_cst
= double_int::from_uhwi (BITS_PER_UNIT
);
3044 double_int bitoffset_end
, access_end
;
3046 /* Variable sized objects in static constructors makes no sense,
3047 but field_size can be NULL for flexible array members. */
3048 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
3049 && TREE_CODE (byte_offset
) == INTEGER_CST
3050 && (field_size
!= NULL_TREE
3051 ? TREE_CODE (field_size
) == INTEGER_CST
3052 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
3054 /* Compute bit offset of the field. */
3055 bitoffset
= tree_to_double_int (field_offset
)
3056 + byte_offset_cst
* bits_per_unit_cst
;
3057 /* Compute bit offset where the field ends. */
3058 if (field_size
!= NULL_TREE
)
3059 bitoffset_end
= bitoffset
+ tree_to_double_int (field_size
);
3061 bitoffset_end
= double_int_zero
;
3063 access_end
= double_int::from_uhwi (offset
)
3064 + double_int::from_uhwi (size
);
3066 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
3067 [BITOFFSET, BITOFFSET_END)? */
3068 if (access_end
.cmp (bitoffset
, 0) > 0
3069 && (field_size
== NULL_TREE
3070 || double_int::from_uhwi (offset
).slt (bitoffset_end
)))
3072 double_int inner_offset
= double_int::from_uhwi (offset
) - bitoffset
;
3073 /* We do have overlap. Now see if field is large enough to
3074 cover the access. Give up for accesses spanning multiple
3076 if (access_end
.cmp (bitoffset_end
, 0) > 0)
3078 if (double_int::from_uhwi (offset
).slt (bitoffset
))
3080 return fold_ctor_reference (type
, cval
,
3081 inner_offset
.to_uhwi (), size
,
3085 /* When memory is not explicitely mentioned in constructor, it is 0. */
3086 return build_zero_cst (type
);
3089 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
3090 to the memory at bit OFFSET. */
3093 fold_ctor_reference (tree type
, tree ctor
, unsigned HOST_WIDE_INT offset
,
3094 unsigned HOST_WIDE_INT size
, tree from_decl
)
3098 /* We found the field with exact match. */
3099 if (useless_type_conversion_p (type
, TREE_TYPE (ctor
))
3101 return canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
3103 /* We are at the end of walk, see if we can view convert the
3105 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
3106 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3107 && operand_equal_p (TYPE_SIZE (type
),
3108 TYPE_SIZE (TREE_TYPE (ctor
)), 0))
3110 ret
= canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
3111 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
3116 if (TREE_CODE (ctor
) == STRING_CST
)
3117 return fold_string_cst_ctor_reference (type
, ctor
, offset
, size
);
3118 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
3121 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
3122 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
3123 return fold_array_ctor_reference (type
, ctor
, offset
, size
,
3126 return fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
3133 /* Return the tree representing the element referenced by T if T is an
3134 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
3135 names using VALUEIZE. Return NULL_TREE otherwise. */
3138 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
3140 tree ctor
, idx
, base
;
3141 HOST_WIDE_INT offset
, size
, max_size
;
3144 if (TREE_THIS_VOLATILE (t
))
3147 if (TREE_CODE_CLASS (TREE_CODE (t
)) == tcc_declaration
)
3148 return get_symbol_constant_value (t
);
3150 tem
= fold_read_from_constant_string (t
);
3154 switch (TREE_CODE (t
))
3157 case ARRAY_RANGE_REF
:
3158 /* Constant indexes are handled well by get_base_constructor.
3159 Only special case variable offsets.
3160 FIXME: This code can't handle nested references with variable indexes
3161 (they will be handled only by iteration of ccp). Perhaps we can bring
3162 get_ref_base_and_extent here and make it use a valueize callback. */
3163 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
3165 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
3166 && TREE_CODE (idx
) == INTEGER_CST
)
3168 tree low_bound
, unit_size
;
3171 /* If the resulting bit-offset is constant, track it. */
3172 if ((low_bound
= array_ref_low_bound (t
),
3173 TREE_CODE (low_bound
) == INTEGER_CST
)
3174 && (unit_size
= array_ref_element_size (t
),
3175 tree_fits_uhwi_p (unit_size
))
3176 && (doffset
= (TREE_INT_CST (idx
) - TREE_INT_CST (low_bound
))
3177 .sext (TYPE_PRECISION (TREE_TYPE (idx
))),
3178 doffset
.fits_shwi ()))
3180 offset
= doffset
.to_shwi ();
3181 offset
*= tree_to_uhwi (unit_size
);
3182 offset
*= BITS_PER_UNIT
;
3184 base
= TREE_OPERAND (t
, 0);
3185 ctor
= get_base_constructor (base
, &offset
, valueize
);
3186 /* Empty constructor. Always fold to 0. */
3187 if (ctor
== error_mark_node
)
3188 return build_zero_cst (TREE_TYPE (t
));
3189 /* Out of bound array access. Value is undefined,
3193 /* We can not determine ctor. */
3196 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
3197 tree_to_uhwi (unit_size
)
3206 case TARGET_MEM_REF
:
3208 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
);
3209 ctor
= get_base_constructor (base
, &offset
, valueize
);
3211 /* Empty constructor. Always fold to 0. */
3212 if (ctor
== error_mark_node
)
3213 return build_zero_cst (TREE_TYPE (t
));
3214 /* We do not know precise address. */
3215 if (max_size
== -1 || max_size
!= size
)
3217 /* We can not determine ctor. */
3221 /* Out of bound array access. Value is undefined, but don't fold. */
3225 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
,
3231 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
3232 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
3233 return fold_build1_loc (EXPR_LOCATION (t
),
3234 TREE_CODE (t
), TREE_TYPE (t
), c
);
3246 fold_const_aggregate_ref (tree t
)
3248 return fold_const_aggregate_ref_1 (t
, NULL
);
3251 /* Lookup virtual method with index TOKEN in a virtual table V
3253 Set CAN_REFER if non-NULL to false if method
3254 is not referable or if the virtual table is ill-formed (such as rewriten
3255 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
3258 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token
,
3260 unsigned HOST_WIDE_INT offset
,
3263 tree vtable
= v
, init
, fn
;
3264 unsigned HOST_WIDE_INT size
;
3265 unsigned HOST_WIDE_INT elt_size
, access_index
;
3271 /* First of all double check we have virtual table. */
3272 if (TREE_CODE (v
) != VAR_DECL
3273 || !DECL_VIRTUAL_P (v
))
3275 gcc_assert (in_lto_p
);
3276 /* Pass down that we lost track of the target. */
3282 init
= ctor_for_folding (v
);
3284 /* The virtual tables should always be born with constructors
3285 and we always should assume that they are avaialble for
3286 folding. At the moment we do not stream them in all cases,
3287 but it should never happen that ctor seem unreachable. */
3289 if (init
== error_mark_node
)
3291 gcc_assert (in_lto_p
);
3292 /* Pass down that we lost track of the target. */
3297 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
3298 size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))));
3299 offset
*= BITS_PER_UNIT
;
3300 offset
+= token
* size
;
3302 /* Lookup the value in the constructor that is assumed to be array.
3303 This is equivalent to
3304 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
3305 offset, size, NULL);
3306 but in a constant time. We expect that frontend produced a simple
3307 array without indexed initializers. */
3309 gcc_checking_assert (TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
3310 domain_type
= TYPE_DOMAIN (TREE_TYPE (init
));
3311 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type
)));
3312 elt_size
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init
))));
3314 access_index
= offset
/ BITS_PER_UNIT
/ elt_size
;
3315 gcc_checking_assert (offset
% (elt_size
* BITS_PER_UNIT
) == 0);
3317 /* This code makes an assumption that there are no
3318 indexed fileds produced by C++ FE, so we can directly index the array. */
3319 if (access_index
< CONSTRUCTOR_NELTS (init
))
3321 fn
= CONSTRUCTOR_ELT (init
, access_index
)->value
;
3322 gcc_checking_assert (!CONSTRUCTOR_ELT (init
, access_index
)->index
);
3328 /* For type inconsistent program we may end up looking up virtual method
3329 in virtual table that does not contain TOKEN entries. We may overrun
3330 the virtual table and pick up a constant or RTTI info pointer.
3331 In any case the call is undefined. */
3333 || (TREE_CODE (fn
) != ADDR_EXPR
&& TREE_CODE (fn
) != FDESC_EXPR
)
3334 || TREE_CODE (TREE_OPERAND (fn
, 0)) != FUNCTION_DECL
)
3335 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
3338 fn
= TREE_OPERAND (fn
, 0);
3340 /* When cgraph node is missing and function is not public, we cannot
3341 devirtualize. This can happen in WHOPR when the actual method
3342 ends up in other partition, because we found devirtualization
3343 possibility too late. */
3344 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
3355 /* Make sure we create a cgraph node for functions we'll reference.
3356 They can be non-existent if the reference comes from an entry
3357 of an external vtable for example. */
3358 cgraph_get_create_node (fn
);
3363 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3364 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3365 KNOWN_BINFO carries the binfo describing the true type of
3366 OBJ_TYPE_REF_OBJECT(REF).
3367 Set CAN_REFER if non-NULL to false if method
3368 is not referable or if the virtual table is ill-formed (such as rewriten
3369 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
3372 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
,
3375 unsigned HOST_WIDE_INT offset
;
3378 v
= BINFO_VTABLE (known_binfo
);
3379 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3383 if (!vtable_pointer_value_to_vtable (v
, &v
, &offset
))
3389 return gimple_get_virt_method_for_vtable (token
, v
, offset
, can_refer
);
3392 /* Return true iff VAL is a gimple expression that is known to be
3393 non-negative. Restricted to floating-point inputs. */
3396 gimple_val_nonnegative_real_p (tree val
)
3400 gcc_assert (val
&& SCALAR_FLOAT_TYPE_P (TREE_TYPE (val
)));
3402 /* Use existing logic for non-gimple trees. */
3403 if (tree_expr_nonnegative_p (val
))
3406 if (TREE_CODE (val
) != SSA_NAME
)
3409 /* Currently we look only at the immediately defining statement
3410 to make this determination, since recursion on defining
3411 statements of operands can lead to quadratic behavior in the
3412 worst case. This is expected to catch almost all occurrences
3413 in practice. It would be possible to implement limited-depth
3414 recursion if important cases are lost. Alternatively, passes
3415 that need this information (such as the pow/powi lowering code
3416 in the cse_sincos pass) could be revised to provide it through
3417 dataflow propagation. */
3419 def_stmt
= SSA_NAME_DEF_STMT (val
);
3421 if (is_gimple_assign (def_stmt
))
3425 /* See fold-const.c:tree_expr_nonnegative_p for additional
3426 cases that could be handled with recursion. */
3428 switch (gimple_assign_rhs_code (def_stmt
))
3431 /* Always true for floating-point operands. */
3435 /* True if the two operands are identical (since we are
3436 restricted to floating-point inputs). */
3437 op0
= gimple_assign_rhs1 (def_stmt
);
3438 op1
= gimple_assign_rhs2 (def_stmt
);
3441 || operand_equal_p (op0
, op1
, 0))
3448 else if (is_gimple_call (def_stmt
))
3450 tree fndecl
= gimple_call_fndecl (def_stmt
);
3452 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
3456 switch (DECL_FUNCTION_CODE (fndecl
))
3458 CASE_FLT_FN (BUILT_IN_ACOS
):
3459 CASE_FLT_FN (BUILT_IN_ACOSH
):
3460 CASE_FLT_FN (BUILT_IN_CABS
):
3461 CASE_FLT_FN (BUILT_IN_COSH
):
3462 CASE_FLT_FN (BUILT_IN_ERFC
):
3463 CASE_FLT_FN (BUILT_IN_EXP
):
3464 CASE_FLT_FN (BUILT_IN_EXP10
):
3465 CASE_FLT_FN (BUILT_IN_EXP2
):
3466 CASE_FLT_FN (BUILT_IN_FABS
):
3467 CASE_FLT_FN (BUILT_IN_FDIM
):
3468 CASE_FLT_FN (BUILT_IN_HYPOT
):
3469 CASE_FLT_FN (BUILT_IN_POW10
):
3472 CASE_FLT_FN (BUILT_IN_SQRT
):
3473 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3474 nonnegative inputs. */
3475 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val
))))
3480 CASE_FLT_FN (BUILT_IN_POWI
):
3481 /* True if the second argument is an even integer. */
3482 arg1
= gimple_call_arg (def_stmt
, 1);
3484 if (TREE_CODE (arg1
) == INTEGER_CST
3485 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
3490 CASE_FLT_FN (BUILT_IN_POW
):
3491 /* True if the second argument is an even integer-valued
3493 arg1
= gimple_call_arg (def_stmt
, 1);
3495 if (TREE_CODE (arg1
) == REAL_CST
)
3500 c
= TREE_REAL_CST (arg1
);
3501 n
= real_to_integer (&c
);
3505 REAL_VALUE_TYPE cint
;
3506 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
3507 if (real_identical (&c
, &cint
))
3523 /* Given a pointer value OP0, return a simplified version of an
3524 indirection through OP0, or NULL_TREE if no simplification is
3525 possible. Note that the resulting type may be different from
3526 the type pointed to in the sense that it is still compatible
3527 from the langhooks point of view. */
3530 gimple_fold_indirect_ref (tree t
)
3532 tree ptype
= TREE_TYPE (t
), type
= TREE_TYPE (ptype
);
3537 subtype
= TREE_TYPE (sub
);
3538 if (!POINTER_TYPE_P (subtype
))
3541 if (TREE_CODE (sub
) == ADDR_EXPR
)
3543 tree op
= TREE_OPERAND (sub
, 0);
3544 tree optype
= TREE_TYPE (op
);
3546 if (useless_type_conversion_p (type
, optype
))
3549 /* *(foo *)&fooarray => fooarray[0] */
3550 if (TREE_CODE (optype
) == ARRAY_TYPE
3551 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype
))) == INTEGER_CST
3552 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
3554 tree type_domain
= TYPE_DOMAIN (optype
);
3555 tree min_val
= size_zero_node
;
3556 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
3557 min_val
= TYPE_MIN_VALUE (type_domain
);
3558 if (TREE_CODE (min_val
) == INTEGER_CST
)
3559 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
3561 /* *(foo *)&complexfoo => __real__ complexfoo */
3562 else if (TREE_CODE (optype
) == COMPLEX_TYPE
3563 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
3564 return fold_build1 (REALPART_EXPR
, type
, op
);
3565 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
3566 else if (TREE_CODE (optype
) == VECTOR_TYPE
3567 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
3569 tree part_width
= TYPE_SIZE (type
);
3570 tree index
= bitsize_int (0);
3571 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
3575 /* *(p + CST) -> ... */
3576 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
3577 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
3579 tree addr
= TREE_OPERAND (sub
, 0);
3580 tree off
= TREE_OPERAND (sub
, 1);
3584 addrtype
= TREE_TYPE (addr
);
3586 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
3587 if (TREE_CODE (addr
) == ADDR_EXPR
3588 && TREE_CODE (TREE_TYPE (addrtype
)) == VECTOR_TYPE
3589 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
)))
3590 && tree_fits_uhwi_p (off
))
3592 unsigned HOST_WIDE_INT offset
= tree_to_uhwi (off
);
3593 tree part_width
= TYPE_SIZE (type
);
3594 unsigned HOST_WIDE_INT part_widthi
3595 = tree_to_shwi (part_width
) / BITS_PER_UNIT
;
3596 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
3597 tree index
= bitsize_int (indexi
);
3598 if (offset
/ part_widthi
3599 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype
)))
3600 return fold_build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (addr
, 0),
3604 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
3605 if (TREE_CODE (addr
) == ADDR_EXPR
3606 && TREE_CODE (TREE_TYPE (addrtype
)) == COMPLEX_TYPE
3607 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
))))
3609 tree size
= TYPE_SIZE_UNIT (type
);
3610 if (tree_int_cst_equal (size
, off
))
3611 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (addr
, 0));
3614 /* *(p + CST) -> MEM_REF <p, CST>. */
3615 if (TREE_CODE (addr
) != ADDR_EXPR
3616 || DECL_P (TREE_OPERAND (addr
, 0)))
3617 return fold_build2 (MEM_REF
, type
,
3619 build_int_cst_wide (ptype
,
3620 TREE_INT_CST_LOW (off
),
3621 TREE_INT_CST_HIGH (off
)));
3624 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
3625 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
3626 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype
)))) == INTEGER_CST
3627 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
3630 tree min_val
= size_zero_node
;
3632 sub
= gimple_fold_indirect_ref (sub
);
3634 sub
= build1 (INDIRECT_REF
, TREE_TYPE (subtype
), osub
);
3635 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
3636 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
3637 min_val
= TYPE_MIN_VALUE (type_domain
);
3638 if (TREE_CODE (min_val
) == INTEGER_CST
)
3639 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
3645 /* Return true if CODE is an operation that when operating on signed
3646 integer types involves undefined behavior on overflow and the
3647 operation can be expressed with unsigned arithmetic. */
3650 arith_code_with_undefined_signed_overflow (tree_code code
)
3658 case POINTER_PLUS_EXPR
:
3665 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
3666 operation that can be transformed to unsigned arithmetic by converting
3667 its operand, carrying out the operation in the corresponding unsigned
3668 type and converting the result back to the original type.
3670 Returns a sequence of statements that replace STMT and also contain
3671 a modified form of STMT itself. */
3674 rewrite_to_defined_overflow (gimple stmt
)
3676 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3678 fprintf (dump_file
, "rewriting stmt with undefined signed "
3680 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
3683 tree lhs
= gimple_assign_lhs (stmt
);
3684 tree type
= unsigned_type_for (TREE_TYPE (lhs
));
3685 gimple_seq stmts
= NULL
;
3686 for (unsigned i
= 1; i
< gimple_num_ops (stmt
); ++i
)
3688 gimple_seq stmts2
= NULL
;
3689 gimple_set_op (stmt
, i
,
3690 force_gimple_operand (fold_convert (type
,
3691 gimple_op (stmt
, i
)),
3692 &stmts2
, true, NULL_TREE
));
3693 gimple_seq_add_seq (&stmts
, stmts2
);
3695 gimple_assign_set_lhs (stmt
, make_ssa_name (type
, stmt
));
3696 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
3697 gimple_assign_set_rhs_code (stmt
, PLUS_EXPR
);
3698 gimple_seq_add_stmt (&stmts
, stmt
);
3699 gimple cvt
= gimple_build_assign_with_ops
3700 (NOP_EXPR
, lhs
, gimple_assign_lhs (stmt
), NULL_TREE
);
3701 gimple_seq_add_stmt (&stmts
, cvt
);