1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "basic-block.h"
28 #include "gimple-pretty-print.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "langhooks.h"
37 /* This pass propagates the RHS of assignment statements into use
38 sites of the LHS of the assignment. It's basically a specialized
39 form of tree combination. It is hoped all of this can disappear
40 when we have a generalized tree combiner.
42 One class of common cases we handle is forward propagating a single use
43 variable into a COND_EXPR.
47 if (x) goto ... else goto ...
49 Will be transformed into:
52 if (a COND b) goto ... else goto ...
54 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
56 Or (assuming c1 and c2 are constants):
60 if (x EQ/NEQ c2) goto ... else goto ...
62 Will be transformed into:
65 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
67 Similarly for x = a - c1.
73 if (x) goto ... else goto ...
75 Will be transformed into:
78 if (a == 0) goto ... else goto ...
80 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
81 For these cases, we propagate A into all, possibly more than one,
82 COND_EXPRs that use X.
88 if (x) goto ... else goto ...
90 Will be transformed into:
93 if (a != 0) goto ... else goto ...
95 (Assuming a is an integral type and x is a boolean or x is an
96 integral and a is a boolean.)
98 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
99 For these cases, we propagate A into all, possibly more than one,
100 COND_EXPRs that use X.
102 In addition to eliminating the variable and the statement which assigns
103 a value to the variable, we may be able to later thread the jump without
104 adding insane complexity in the dominator optimizer.
106 Also note these transformations can cascade. We handle this by having
107 a worklist of COND_EXPR statements to examine. As we make a change to
108 a statement, we put it back on the worklist to examine on the next
109 iteration of the main loop.
111 A second class of propagation opportunities arises for ADDR_EXPR
122 ptr = (type1*)&type2var;
125 Will get turned into (if type1 and type2 are the same size
126 and neither have volatile on them):
127 res = VIEW_CONVERT_EXPR<type1>(type2var)
132 ptr2 = ptr + <constant>;
136 ptr2 = &x[constant/elementsize];
141 offset = index * element_size;
142 offset_p = (pointer) offset;
143 ptr2 = ptr + offset_p
145 Will get turned into:
153 Provided that decl has known alignment >= 2, will get turned into
157 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
158 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
161 This will (of course) be extended as other needs arise. */
163 static bool forward_propagate_addr_expr (tree name
, tree rhs
);
165 /* Set to true if we delete dead edges during the optimization. */
166 static bool cfg_changed
;
168 static tree
rhs_to_tree (tree type
, gimple stmt
);
170 /* Get the next statement we can propagate NAME's value into skipping
171 trivial copies. Returns the statement that is suitable as a
172 propagation destination or NULL_TREE if there is no such one.
173 This only returns destinations in a single-use chain. FINAL_NAME_P
174 if non-NULL is written to the ssa name that represents the use. */
177 get_prop_dest_stmt (tree name
, tree
*final_name_p
)
183 /* If name has multiple uses, bail out. */
184 if (!single_imm_use (name
, &use
, &use_stmt
))
187 /* If this is not a trivial copy, we found it. */
188 if (!gimple_assign_ssa_name_copy_p (use_stmt
)
189 || gimple_assign_rhs1 (use_stmt
) != name
)
192 /* Continue searching uses of the copy destination. */
193 name
= gimple_assign_lhs (use_stmt
);
197 *final_name_p
= name
;
202 /* Get the statement we can propagate from into NAME skipping
203 trivial copies. Returns the statement which defines the
204 propagation source or NULL_TREE if there is no such one.
205 If SINGLE_USE_ONLY is set considers only sources which have
206 a single use chain up to NAME. If SINGLE_USE_P is non-null,
207 it is set to whether the chain to NAME is a single use chain
208 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
211 get_prop_source_stmt (tree name
, bool single_use_only
, bool *single_use_p
)
213 bool single_use
= true;
216 gimple def_stmt
= SSA_NAME_DEF_STMT (name
);
218 if (!has_single_use (name
))
225 /* If name is defined by a PHI node or is the default def, bail out. */
226 if (!is_gimple_assign (def_stmt
))
229 /* If def_stmt is not a simple copy, we possibly found it. */
230 if (!gimple_assign_ssa_name_copy_p (def_stmt
))
234 if (!single_use_only
&& single_use_p
)
235 *single_use_p
= single_use
;
237 /* We can look through pointer conversions in the search
238 for a useful stmt for the comparison folding. */
239 rhs
= gimple_assign_rhs1 (def_stmt
);
240 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt
))
241 && TREE_CODE (rhs
) == SSA_NAME
242 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (def_stmt
)))
243 && POINTER_TYPE_P (TREE_TYPE (rhs
)))
250 /* Continue searching the def of the copy source name. */
251 name
= gimple_assign_rhs1 (def_stmt
);
256 /* Checks if the destination ssa name in DEF_STMT can be used as
257 propagation source. Returns true if so, otherwise false. */
260 can_propagate_from (gimple def_stmt
)
262 gcc_assert (is_gimple_assign (def_stmt
));
264 /* If the rhs has side-effects we cannot propagate from it. */
265 if (gimple_has_volatile_ops (def_stmt
))
268 /* If the rhs is a load we cannot propagate from it. */
269 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt
)) == tcc_reference
270 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt
)) == tcc_declaration
)
273 /* Constants can be always propagated. */
274 if (gimple_assign_single_p (def_stmt
)
275 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt
)))
278 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
279 if (stmt_references_abnormal_ssa_name (def_stmt
))
282 /* If the definition is a conversion of a pointer to a function type,
283 then we can not apply optimizations as some targets require
284 function pointers to be canonicalized and in this case this
285 optimization could eliminate a necessary canonicalization. */
286 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt
)))
288 tree rhs
= gimple_assign_rhs1 (def_stmt
);
289 if (POINTER_TYPE_P (TREE_TYPE (rhs
))
290 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs
))) == FUNCTION_TYPE
)
297 /* Remove a chain of dead statements starting at the definition of
298 NAME. The chain is linked via the first operand of the defining statements.
299 If NAME was replaced in its only use then this function can be used
300 to clean up dead stmts. The function handles already released SSA
302 Returns true if cleanup-cfg has to run. */
305 remove_prop_source_from_use (tree name
)
307 gimple_stmt_iterator gsi
;
309 bool cfg_changed
= false;
314 if (SSA_NAME_IN_FREE_LIST (name
)
315 || SSA_NAME_IS_DEFAULT_DEF (name
)
316 || !has_zero_uses (name
))
319 stmt
= SSA_NAME_DEF_STMT (name
);
320 if (gimple_code (stmt
) == GIMPLE_PHI
321 || gimple_has_side_effects (stmt
))
324 bb
= gimple_bb (stmt
);
325 gsi
= gsi_for_stmt (stmt
);
326 unlink_stmt_vdef (stmt
);
327 if (gsi_remove (&gsi
, true))
328 cfg_changed
|= gimple_purge_dead_eh_edges (bb
);
331 name
= is_gimple_assign (stmt
) ? gimple_assign_rhs1 (stmt
) : NULL_TREE
;
332 } while (name
&& TREE_CODE (name
) == SSA_NAME
);
337 /* Return the rhs of a gimple_assign STMT in a form of a single tree,
338 converted to type TYPE.
340 This should disappear, but is needed so we can combine expressions and use
341 the fold() interfaces. Long term, we need to develop folding and combine
342 routines that deal with gimple exclusively . */
345 rhs_to_tree (tree type
, gimple stmt
)
347 location_t loc
= gimple_location (stmt
);
348 enum tree_code code
= gimple_assign_rhs_code (stmt
);
349 if (get_gimple_rhs_class (code
) == GIMPLE_TERNARY_RHS
)
350 return fold_build3_loc (loc
, code
, type
, gimple_assign_rhs1 (stmt
),
351 gimple_assign_rhs2 (stmt
),
352 gimple_assign_rhs3 (stmt
));
353 else if (get_gimple_rhs_class (code
) == GIMPLE_BINARY_RHS
)
354 return fold_build2_loc (loc
, code
, type
, gimple_assign_rhs1 (stmt
),
355 gimple_assign_rhs2 (stmt
));
356 else if (get_gimple_rhs_class (code
) == GIMPLE_UNARY_RHS
)
357 return build1 (code
, type
, gimple_assign_rhs1 (stmt
));
358 else if (get_gimple_rhs_class (code
) == GIMPLE_SINGLE_RHS
)
359 return gimple_assign_rhs1 (stmt
);
364 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
365 the folded result in a form suitable for COND_EXPR_COND or
366 NULL_TREE, if there is no suitable simplified form. If
367 INVARIANT_ONLY is true only gimple_min_invariant results are
368 considered simplified. */
371 combine_cond_expr_cond (gimple stmt
, enum tree_code code
, tree type
,
372 tree op0
, tree op1
, bool invariant_only
)
376 gcc_assert (TREE_CODE_CLASS (code
) == tcc_comparison
);
378 fold_defer_overflow_warnings ();
379 t
= fold_binary_loc (gimple_location (stmt
), code
, type
, op0
, op1
);
382 fold_undefer_overflow_warnings (false, NULL
, 0);
386 /* Require that we got a boolean type out if we put one in. */
387 gcc_assert (TREE_CODE (TREE_TYPE (t
)) == TREE_CODE (type
));
389 /* Canonicalize the combined condition for use in a COND_EXPR. */
390 t
= canonicalize_cond_expr_cond (t
);
392 /* Bail out if we required an invariant but didn't get one. */
393 if (!t
|| (invariant_only
&& !is_gimple_min_invariant (t
)))
395 fold_undefer_overflow_warnings (false, NULL
, 0);
399 fold_undefer_overflow_warnings (!gimple_no_warning_p (stmt
), stmt
, 0);
404 /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
405 of its operand. Return a new comparison tree or NULL_TREE if there
406 were no simplifying combines. */
409 forward_propagate_into_comparison_1 (gimple stmt
,
410 enum tree_code code
, tree type
,
413 tree tmp
= NULL_TREE
;
414 tree rhs0
= NULL_TREE
, rhs1
= NULL_TREE
;
415 bool single_use0_p
= false, single_use1_p
= false;
417 /* For comparisons use the first operand, that is likely to
418 simplify comparisons against constants. */
419 if (TREE_CODE (op0
) == SSA_NAME
)
421 gimple def_stmt
= get_prop_source_stmt (op0
, false, &single_use0_p
);
422 if (def_stmt
&& can_propagate_from (def_stmt
))
424 rhs0
= rhs_to_tree (TREE_TYPE (op1
), def_stmt
);
425 tmp
= combine_cond_expr_cond (stmt
, code
, type
,
426 rhs0
, op1
, !single_use0_p
);
432 /* If that wasn't successful, try the second operand. */
433 if (TREE_CODE (op1
) == SSA_NAME
)
435 gimple def_stmt
= get_prop_source_stmt (op1
, false, &single_use1_p
);
436 if (def_stmt
&& can_propagate_from (def_stmt
))
438 rhs1
= rhs_to_tree (TREE_TYPE (op0
), def_stmt
);
439 tmp
= combine_cond_expr_cond (stmt
, code
, type
,
440 op0
, rhs1
, !single_use1_p
);
446 /* If that wasn't successful either, try both operands. */
447 if (rhs0
!= NULL_TREE
448 && rhs1
!= NULL_TREE
)
449 tmp
= combine_cond_expr_cond (stmt
, code
, type
,
451 !(single_use0_p
&& single_use1_p
));
456 /* Propagate from the ssa name definition statements of the assignment
457 from a comparison at *GSI into the conditional if that simplifies it.
458 Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
459 otherwise returns 0. */
462 forward_propagate_into_comparison (gimple_stmt_iterator
*gsi
)
464 gimple stmt
= gsi_stmt (*gsi
);
466 bool cfg_changed
= false;
467 tree type
= TREE_TYPE (gimple_assign_lhs (stmt
));
468 tree rhs1
= gimple_assign_rhs1 (stmt
);
469 tree rhs2
= gimple_assign_rhs2 (stmt
);
471 /* Combine the comparison with defining statements. */
472 tmp
= forward_propagate_into_comparison_1 (stmt
,
473 gimple_assign_rhs_code (stmt
),
475 if (tmp
&& useless_type_conversion_p (type
, TREE_TYPE (tmp
)))
477 gimple_assign_set_rhs_from_tree (gsi
, tmp
);
479 update_stmt (gsi_stmt (*gsi
));
481 if (TREE_CODE (rhs1
) == SSA_NAME
)
482 cfg_changed
|= remove_prop_source_from_use (rhs1
);
483 if (TREE_CODE (rhs2
) == SSA_NAME
)
484 cfg_changed
|= remove_prop_source_from_use (rhs2
);
485 return cfg_changed
? 2 : 1;
491 /* Propagate from the ssa name definition statements of COND_EXPR
492 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
493 Returns zero if no statement was changed, one if there were
494 changes and two if cfg_cleanup needs to run.
496 This must be kept in sync with forward_propagate_into_cond. */
499 forward_propagate_into_gimple_cond (gimple stmt
)
502 enum tree_code code
= gimple_cond_code (stmt
);
503 bool cfg_changed
= false;
504 tree rhs1
= gimple_cond_lhs (stmt
);
505 tree rhs2
= gimple_cond_rhs (stmt
);
507 /* We can do tree combining on SSA_NAME and comparison expressions. */
508 if (TREE_CODE_CLASS (gimple_cond_code (stmt
)) != tcc_comparison
)
511 tmp
= forward_propagate_into_comparison_1 (stmt
, code
,
516 if (dump_file
&& tmp
)
518 fprintf (dump_file
, " Replaced '");
519 print_gimple_expr (dump_file
, stmt
, 0, 0);
520 fprintf (dump_file
, "' with '");
521 print_generic_expr (dump_file
, tmp
, 0);
522 fprintf (dump_file
, "'\n");
525 gimple_cond_set_condition_from_tree (stmt
, unshare_expr (tmp
));
528 if (TREE_CODE (rhs1
) == SSA_NAME
)
529 cfg_changed
|= remove_prop_source_from_use (rhs1
);
530 if (TREE_CODE (rhs2
) == SSA_NAME
)
531 cfg_changed
|= remove_prop_source_from_use (rhs2
);
532 return (cfg_changed
|| is_gimple_min_invariant (tmp
)) ? 2 : 1;
535 /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges. */
536 if ((TREE_CODE (TREE_TYPE (rhs1
)) == BOOLEAN_TYPE
537 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
538 && TYPE_PRECISION (TREE_TYPE (rhs1
)) == 1))
540 && integer_zerop (rhs2
))
542 && integer_onep (rhs2
))))
544 basic_block bb
= gimple_bb (stmt
);
545 gimple_cond_set_code (stmt
, NE_EXPR
);
546 gimple_cond_set_rhs (stmt
, build_zero_cst (TREE_TYPE (rhs1
)));
547 EDGE_SUCC (bb
, 0)->flags
^= (EDGE_TRUE_VALUE
|EDGE_FALSE_VALUE
);
548 EDGE_SUCC (bb
, 1)->flags
^= (EDGE_TRUE_VALUE
|EDGE_FALSE_VALUE
);
556 /* Propagate from the ssa name definition statements of COND_EXPR
557 in the rhs of statement STMT into the conditional if that simplifies it.
558 Returns true zero if the stmt was changed. */
561 forward_propagate_into_cond (gimple_stmt_iterator
*gsi_p
)
563 gimple stmt
= gsi_stmt (*gsi_p
);
564 tree tmp
= NULL_TREE
;
565 tree cond
= gimple_assign_rhs1 (stmt
);
568 /* We can do tree combining on SSA_NAME and comparison expressions. */
569 if (COMPARISON_CLASS_P (cond
))
570 tmp
= forward_propagate_into_comparison_1 (stmt
, TREE_CODE (cond
),
572 TREE_OPERAND (cond
, 0),
573 TREE_OPERAND (cond
, 1));
574 else if (TREE_CODE (cond
) == SSA_NAME
)
578 gimple def_stmt
= get_prop_source_stmt (name
, true, NULL
);
579 if (!def_stmt
|| !can_propagate_from (def_stmt
))
582 code
= gimple_assign_rhs_code (def_stmt
);
583 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
584 tmp
= fold_build2_loc (gimple_location (def_stmt
),
587 gimple_assign_rhs1 (def_stmt
),
588 gimple_assign_rhs2 (def_stmt
));
589 else if ((code
== BIT_NOT_EXPR
590 && TYPE_PRECISION (TREE_TYPE (cond
)) == 1)
591 || (code
== BIT_XOR_EXPR
592 && integer_onep (gimple_assign_rhs2 (def_stmt
))))
594 tmp
= gimple_assign_rhs1 (def_stmt
);
600 && is_gimple_condexpr (tmp
))
602 if (dump_file
&& tmp
)
604 fprintf (dump_file
, " Replaced '");
605 print_generic_expr (dump_file
, cond
, 0);
606 fprintf (dump_file
, "' with '");
607 print_generic_expr (dump_file
, tmp
, 0);
608 fprintf (dump_file
, "'\n");
611 if (integer_onep (tmp
))
612 gimple_assign_set_rhs_from_tree (gsi_p
, gimple_assign_rhs2 (stmt
));
613 else if (integer_zerop (tmp
))
614 gimple_assign_set_rhs_from_tree (gsi_p
, gimple_assign_rhs3 (stmt
));
617 gimple_assign_set_rhs1 (stmt
, unshare_expr (tmp
));
620 tree t
= gimple_assign_rhs2 (stmt
);
621 gimple_assign_set_rhs2 (stmt
, gimple_assign_rhs3 (stmt
));
622 gimple_assign_set_rhs3 (stmt
, t
);
625 stmt
= gsi_stmt (*gsi_p
);
634 /* Propagate from the ssa name definition statements of COND_EXPR
635 values in the rhs of statement STMT into the conditional arms
636 if that simplifies it.
637 Returns true if the stmt was changed. */
640 combine_cond_exprs (gimple_stmt_iterator
*gsi_p
)
642 gimple stmt
= gsi_stmt (*gsi_p
);
643 tree cond
, val1
, val2
;
644 bool changed
= false;
646 cond
= gimple_assign_rhs1 (stmt
);
647 val1
= gimple_assign_rhs2 (stmt
);
648 if (TREE_CODE (val1
) == SSA_NAME
)
650 gimple def_stmt
= SSA_NAME_DEF_STMT (val1
);
651 if (is_gimple_assign (def_stmt
)
652 && gimple_assign_rhs_code (def_stmt
) == gimple_assign_rhs_code (stmt
)
653 && operand_equal_p (gimple_assign_rhs1 (def_stmt
), cond
, 0))
655 val1
= unshare_expr (gimple_assign_rhs2 (def_stmt
));
656 gimple_assign_set_rhs2 (stmt
, val1
);
660 val2
= gimple_assign_rhs3 (stmt
);
661 if (TREE_CODE (val2
) == SSA_NAME
)
663 gimple def_stmt
= SSA_NAME_DEF_STMT (val2
);
664 if (is_gimple_assign (def_stmt
)
665 && gimple_assign_rhs_code (def_stmt
) == gimple_assign_rhs_code (stmt
)
666 && operand_equal_p (gimple_assign_rhs1 (def_stmt
), cond
, 0))
668 val2
= unshare_expr (gimple_assign_rhs3 (def_stmt
));
669 gimple_assign_set_rhs3 (stmt
, val2
);
673 if (operand_equal_p (val1
, val2
, 0))
675 gimple_assign_set_rhs_from_tree (gsi_p
, val1
);
676 stmt
= gsi_stmt (*gsi_p
);
686 /* We've just substituted an ADDR_EXPR into stmt. Update all the
687 relevant data structures to match. */
690 tidy_after_forward_propagate_addr (gimple stmt
)
692 /* We may have turned a trapping insn into a non-trapping insn. */
693 if (maybe_clean_or_replace_eh_stmt (stmt
, stmt
)
694 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
697 if (TREE_CODE (gimple_assign_rhs1 (stmt
)) == ADDR_EXPR
)
698 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt
));
701 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
702 ADDR_EXPR <whatever>.
704 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
705 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
706 node or for recovery of array indexing from pointer arithmetic.
708 Return true if the propagation was successful (the propagation can
709 be not totally successful, yet things may have been changed). */
712 forward_propagate_addr_expr_1 (tree name
, tree def_rhs
,
713 gimple_stmt_iterator
*use_stmt_gsi
,
716 tree lhs
, rhs
, rhs2
, array_ref
;
717 gimple use_stmt
= gsi_stmt (*use_stmt_gsi
);
718 enum tree_code rhs_code
;
721 gcc_assert (TREE_CODE (def_rhs
) == ADDR_EXPR
);
723 lhs
= gimple_assign_lhs (use_stmt
);
724 rhs_code
= gimple_assign_rhs_code (use_stmt
);
725 rhs
= gimple_assign_rhs1 (use_stmt
);
727 /* Trivial cases. The use statement could be a trivial copy or a
728 useless conversion. Recurse to the uses of the lhs as copyprop does
729 not copy through different variant pointers and FRE does not catch
730 all useless conversions. Treat the case of a single-use name and
731 a conversion to def_rhs type separate, though. */
732 if (TREE_CODE (lhs
) == SSA_NAME
733 && ((rhs_code
== SSA_NAME
&& rhs
== name
)
734 || CONVERT_EXPR_CODE_P (rhs_code
)))
736 /* Only recurse if we don't deal with a single use or we cannot
737 do the propagation to the current statement. In particular
738 we can end up with a conversion needed for a non-invariant
739 address which we cannot do in a single statement. */
741 || (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (def_rhs
))
742 && (!is_gimple_min_invariant (def_rhs
)
743 || (INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
744 && POINTER_TYPE_P (TREE_TYPE (def_rhs
))
745 && (TYPE_PRECISION (TREE_TYPE (lhs
))
746 > TYPE_PRECISION (TREE_TYPE (def_rhs
)))))))
747 return forward_propagate_addr_expr (lhs
, def_rhs
);
749 gimple_assign_set_rhs1 (use_stmt
, unshare_expr (def_rhs
));
750 if (useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (def_rhs
)))
751 gimple_assign_set_rhs_code (use_stmt
, TREE_CODE (def_rhs
));
753 gimple_assign_set_rhs_code (use_stmt
, NOP_EXPR
);
757 /* Propagate through constant pointer adjustments. */
758 if (TREE_CODE (lhs
) == SSA_NAME
759 && rhs_code
== POINTER_PLUS_EXPR
761 && TREE_CODE (gimple_assign_rhs2 (use_stmt
)) == INTEGER_CST
)
764 /* As we come here with non-invariant addresses in def_rhs we need
765 to make sure we can build a valid constant offsetted address
766 for further propagation. Simply rely on fold building that
767 and check after the fact. */
768 new_def_rhs
= fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (rhs
)),
770 fold_convert (ptr_type_node
,
771 gimple_assign_rhs2 (use_stmt
)));
772 if (TREE_CODE (new_def_rhs
) == MEM_REF
773 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs
, 0)))
775 new_def_rhs
= build_fold_addr_expr_with_type (new_def_rhs
,
778 /* Recurse. If we could propagate into all uses of lhs do not
779 bother to replace into the current use but just pretend we did. */
780 if (TREE_CODE (new_def_rhs
) == ADDR_EXPR
781 && forward_propagate_addr_expr (lhs
, new_def_rhs
))
784 if (useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (new_def_rhs
)))
785 gimple_assign_set_rhs_with_ops (use_stmt_gsi
, TREE_CODE (new_def_rhs
),
786 new_def_rhs
, NULL_TREE
);
787 else if (is_gimple_min_invariant (new_def_rhs
))
788 gimple_assign_set_rhs_with_ops (use_stmt_gsi
, NOP_EXPR
,
789 new_def_rhs
, NULL_TREE
);
792 gcc_assert (gsi_stmt (*use_stmt_gsi
) == use_stmt
);
793 update_stmt (use_stmt
);
797 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
798 ADDR_EXPR will not appear on the LHS. */
799 lhs
= gimple_assign_lhs (use_stmt
);
800 while (handled_component_p (lhs
))
801 lhs
= TREE_OPERAND (lhs
, 0);
803 /* Now see if the LHS node is a MEM_REF using NAME. If so,
804 propagate the ADDR_EXPR into the use of NAME and fold the result. */
805 if (TREE_CODE (lhs
) == MEM_REF
806 && TREE_OPERAND (lhs
, 0) == name
)
809 HOST_WIDE_INT def_rhs_offset
;
810 /* If the address is invariant we can always fold it. */
811 if ((def_rhs_base
= get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs
, 0),
814 double_int off
= mem_ref_offset (lhs
);
816 off
= double_int_add (off
,
817 shwi_to_double_int (def_rhs_offset
));
818 if (TREE_CODE (def_rhs_base
) == MEM_REF
)
820 off
= double_int_add (off
, mem_ref_offset (def_rhs_base
));
821 new_ptr
= TREE_OPERAND (def_rhs_base
, 0);
824 new_ptr
= build_fold_addr_expr (def_rhs_base
);
825 TREE_OPERAND (lhs
, 0) = new_ptr
;
826 TREE_OPERAND (lhs
, 1)
827 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs
, 1)), off
);
828 tidy_after_forward_propagate_addr (use_stmt
);
829 /* Continue propagating into the RHS if this was not the only use. */
833 /* If the LHS is a plain dereference and the value type is the same as
834 that of the pointed-to type of the address we can put the
835 dereferenced address on the LHS preserving the original alias-type. */
836 else if (gimple_assign_lhs (use_stmt
) == lhs
837 && integer_zerop (TREE_OPERAND (lhs
, 1))
838 && useless_type_conversion_p
839 (TREE_TYPE (TREE_OPERAND (def_rhs
, 0)),
840 TREE_TYPE (gimple_assign_rhs1 (use_stmt
))))
842 tree
*def_rhs_basep
= &TREE_OPERAND (def_rhs
, 0);
843 tree new_offset
, new_base
, saved
, new_lhs
;
844 while (handled_component_p (*def_rhs_basep
))
845 def_rhs_basep
= &TREE_OPERAND (*def_rhs_basep
, 0);
846 saved
= *def_rhs_basep
;
847 if (TREE_CODE (*def_rhs_basep
) == MEM_REF
)
849 new_base
= TREE_OPERAND (*def_rhs_basep
, 0);
850 new_offset
= fold_convert (TREE_TYPE (TREE_OPERAND (lhs
, 1)),
851 TREE_OPERAND (*def_rhs_basep
, 1));
855 new_base
= build_fold_addr_expr (*def_rhs_basep
);
856 new_offset
= TREE_OPERAND (lhs
, 1);
858 *def_rhs_basep
= build2 (MEM_REF
, TREE_TYPE (*def_rhs_basep
),
859 new_base
, new_offset
);
860 TREE_THIS_VOLATILE (*def_rhs_basep
) = TREE_THIS_VOLATILE (lhs
);
861 TREE_SIDE_EFFECTS (*def_rhs_basep
) = TREE_SIDE_EFFECTS (lhs
);
862 TREE_THIS_NOTRAP (*def_rhs_basep
) = TREE_THIS_NOTRAP (lhs
);
863 new_lhs
= unshare_expr (TREE_OPERAND (def_rhs
, 0));
864 gimple_assign_set_lhs (use_stmt
, new_lhs
);
865 TREE_THIS_VOLATILE (new_lhs
) = TREE_THIS_VOLATILE (lhs
);
866 TREE_SIDE_EFFECTS (new_lhs
) = TREE_SIDE_EFFECTS (lhs
);
867 *def_rhs_basep
= saved
;
868 tidy_after_forward_propagate_addr (use_stmt
);
869 /* Continue propagating into the RHS if this was not the
875 /* We can have a struct assignment dereferencing our name twice.
876 Note that we didn't propagate into the lhs to not falsely
877 claim we did when propagating into the rhs. */
881 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
882 nodes from the RHS. */
883 rhs
= gimple_assign_rhs1 (use_stmt
);
884 if (TREE_CODE (rhs
) == ADDR_EXPR
)
885 rhs
= TREE_OPERAND (rhs
, 0);
886 while (handled_component_p (rhs
))
887 rhs
= TREE_OPERAND (rhs
, 0);
889 /* Now see if the RHS node is a MEM_REF using NAME. If so,
890 propagate the ADDR_EXPR into the use of NAME and fold the result. */
891 if (TREE_CODE (rhs
) == MEM_REF
892 && TREE_OPERAND (rhs
, 0) == name
)
895 HOST_WIDE_INT def_rhs_offset
;
896 if ((def_rhs_base
= get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs
, 0),
899 double_int off
= mem_ref_offset (rhs
);
901 off
= double_int_add (off
,
902 shwi_to_double_int (def_rhs_offset
));
903 if (TREE_CODE (def_rhs_base
) == MEM_REF
)
905 off
= double_int_add (off
, mem_ref_offset (def_rhs_base
));
906 new_ptr
= TREE_OPERAND (def_rhs_base
, 0);
909 new_ptr
= build_fold_addr_expr (def_rhs_base
);
910 TREE_OPERAND (rhs
, 0) = new_ptr
;
911 TREE_OPERAND (rhs
, 1)
912 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs
, 1)), off
);
913 fold_stmt_inplace (use_stmt_gsi
);
914 tidy_after_forward_propagate_addr (use_stmt
);
917 /* If the RHS is a plain dereference and the value type is the same as
918 that of the pointed-to type of the address we can put the
919 dereferenced address on the RHS preserving the original alias-type. */
920 else if (gimple_assign_rhs1 (use_stmt
) == rhs
921 && integer_zerop (TREE_OPERAND (rhs
, 1))
922 && useless_type_conversion_p
923 (TREE_TYPE (gimple_assign_lhs (use_stmt
)),
924 TREE_TYPE (TREE_OPERAND (def_rhs
, 0))))
926 tree
*def_rhs_basep
= &TREE_OPERAND (def_rhs
, 0);
927 tree new_offset
, new_base
, saved
, new_rhs
;
928 while (handled_component_p (*def_rhs_basep
))
929 def_rhs_basep
= &TREE_OPERAND (*def_rhs_basep
, 0);
930 saved
= *def_rhs_basep
;
931 if (TREE_CODE (*def_rhs_basep
) == MEM_REF
)
933 new_base
= TREE_OPERAND (*def_rhs_basep
, 0);
934 new_offset
= fold_convert (TREE_TYPE (TREE_OPERAND (rhs
, 1)),
935 TREE_OPERAND (*def_rhs_basep
, 1));
939 new_base
= build_fold_addr_expr (*def_rhs_basep
);
940 new_offset
= TREE_OPERAND (rhs
, 1);
942 *def_rhs_basep
= build2 (MEM_REF
, TREE_TYPE (*def_rhs_basep
),
943 new_base
, new_offset
);
944 TREE_THIS_VOLATILE (*def_rhs_basep
) = TREE_THIS_VOLATILE (rhs
);
945 TREE_SIDE_EFFECTS (*def_rhs_basep
) = TREE_SIDE_EFFECTS (rhs
);
946 TREE_THIS_NOTRAP (*def_rhs_basep
) = TREE_THIS_NOTRAP (rhs
);
947 new_rhs
= unshare_expr (TREE_OPERAND (def_rhs
, 0));
948 gimple_assign_set_rhs1 (use_stmt
, new_rhs
);
949 TREE_THIS_VOLATILE (new_rhs
) = TREE_THIS_VOLATILE (rhs
);
950 TREE_SIDE_EFFECTS (new_rhs
) = TREE_SIDE_EFFECTS (rhs
);
951 *def_rhs_basep
= saved
;
952 fold_stmt_inplace (use_stmt_gsi
);
953 tidy_after_forward_propagate_addr (use_stmt
);
958 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
960 if (gimple_assign_rhs_code (use_stmt
) != POINTER_PLUS_EXPR
961 || gimple_assign_rhs1 (use_stmt
) != name
)
964 /* The remaining cases are all for turning pointer arithmetic into
965 array indexing. They only apply when we have the address of
966 element zero in an array. If that is not the case then there
968 array_ref
= TREE_OPERAND (def_rhs
, 0);
969 if ((TREE_CODE (array_ref
) != ARRAY_REF
970 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref
, 0))) != ARRAY_TYPE
971 || TREE_CODE (TREE_OPERAND (array_ref
, 1)) != INTEGER_CST
)
972 && TREE_CODE (TREE_TYPE (array_ref
)) != ARRAY_TYPE
)
975 rhs2
= gimple_assign_rhs2 (use_stmt
);
976 /* Optimize &x[C1] p+ C2 to &x p+ C3 with C3 = C1 * element_size + C2. */
977 if (TREE_CODE (rhs2
) == INTEGER_CST
)
979 tree new_rhs
= build1_loc (gimple_location (use_stmt
),
980 ADDR_EXPR
, TREE_TYPE (def_rhs
),
981 fold_build2 (MEM_REF
,
982 TREE_TYPE (TREE_TYPE (def_rhs
)),
983 unshare_expr (def_rhs
),
984 fold_convert (ptr_type_node
,
986 gimple_assign_set_rhs_from_tree (use_stmt_gsi
, new_rhs
);
987 use_stmt
= gsi_stmt (*use_stmt_gsi
);
988 update_stmt (use_stmt
);
989 tidy_after_forward_propagate_addr (use_stmt
);
996 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
998 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
999 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
1000 node or for recovery of array indexing from pointer arithmetic.
1001 Returns true, if all uses have been propagated into. */
1004 forward_propagate_addr_expr (tree name
, tree rhs
)
1006 int stmt_loop_depth
= bb_loop_depth (gimple_bb (SSA_NAME_DEF_STMT (name
)));
1007 imm_use_iterator iter
;
1010 bool single_use_p
= has_single_use (name
);
1012 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, name
)
1017 /* If the use is not in a simple assignment statement, then
1018 there is nothing we can do. */
1019 if (gimple_code (use_stmt
) != GIMPLE_ASSIGN
)
1021 if (!is_gimple_debug (use_stmt
))
1026 /* If the use is in a deeper loop nest, then we do not want
1027 to propagate non-invariant ADDR_EXPRs into the loop as that
1028 is likely adding expression evaluations into the loop. */
1029 if (bb_loop_depth (gimple_bb (use_stmt
)) > stmt_loop_depth
1030 && !is_gimple_min_invariant (rhs
))
1037 gimple_stmt_iterator gsi
= gsi_for_stmt (use_stmt
);
1038 result
= forward_propagate_addr_expr_1 (name
, rhs
, &gsi
,
1040 /* If the use has moved to a different statement adjust
1041 the update machinery for the old statement too. */
1042 if (use_stmt
!= gsi_stmt (gsi
))
1044 update_stmt (use_stmt
);
1045 use_stmt
= gsi_stmt (gsi
);
1048 update_stmt (use_stmt
);
1052 /* Remove intermediate now unused copy and conversion chains. */
1053 use_rhs
= gimple_assign_rhs1 (use_stmt
);
1055 && TREE_CODE (gimple_assign_lhs (use_stmt
)) == SSA_NAME
1056 && TREE_CODE (use_rhs
) == SSA_NAME
1057 && has_zero_uses (gimple_assign_lhs (use_stmt
)))
1059 gimple_stmt_iterator gsi
= gsi_for_stmt (use_stmt
);
1060 release_defs (use_stmt
);
1061 gsi_remove (&gsi
, true);
1065 return all
&& has_zero_uses (name
);
1069 /* Forward propagate the comparison defined in *DEFGSI like
1070 cond_1 = x CMP y to uses of the form
1074 Returns true if stmt is now unused. Advance DEFGSI to the next
1078 forward_propagate_comparison (gimple_stmt_iterator
*defgsi
)
1080 gimple stmt
= gsi_stmt (*defgsi
);
1081 tree name
= gimple_assign_lhs (stmt
);
1083 tree tmp
= NULL_TREE
;
1084 gimple_stmt_iterator gsi
;
1085 enum tree_code code
;
1088 /* Don't propagate ssa names that occur in abnormal phis. */
1089 if ((TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
1090 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt
)))
1091 || (TREE_CODE (gimple_assign_rhs2 (stmt
)) == SSA_NAME
1092 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt
))))
1095 /* Do not un-cse comparisons. But propagate through copies. */
1096 use_stmt
= get_prop_dest_stmt (name
, &name
);
1098 || !is_gimple_assign (use_stmt
))
1101 code
= gimple_assign_rhs_code (use_stmt
);
1102 lhs
= gimple_assign_lhs (use_stmt
);
1103 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs
)))
1106 /* We can propagate the condition into a statement that
1107 computes the logical negation of the comparison result. */
1108 if ((code
== BIT_NOT_EXPR
1109 && TYPE_PRECISION (TREE_TYPE (lhs
)) == 1)
1110 || (code
== BIT_XOR_EXPR
1111 && integer_onep (gimple_assign_rhs2 (use_stmt
))))
1113 tree type
= TREE_TYPE (gimple_assign_rhs1 (stmt
));
1114 bool nans
= HONOR_NANS (TYPE_MODE (type
));
1115 enum tree_code inv_code
;
1116 inv_code
= invert_tree_comparison (gimple_assign_rhs_code (stmt
), nans
);
1117 if (inv_code
== ERROR_MARK
)
1120 tmp
= build2 (inv_code
, TREE_TYPE (lhs
), gimple_assign_rhs1 (stmt
),
1121 gimple_assign_rhs2 (stmt
));
1126 gsi
= gsi_for_stmt (use_stmt
);
1127 gimple_assign_set_rhs_from_tree (&gsi
, unshare_expr (tmp
));
1128 use_stmt
= gsi_stmt (gsi
);
1129 update_stmt (use_stmt
);
1131 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1133 fprintf (dump_file
, " Replaced '");
1134 print_gimple_expr (dump_file
, stmt
, 0, dump_flags
);
1135 fprintf (dump_file
, "' with '");
1136 print_gimple_expr (dump_file
, use_stmt
, 0, dump_flags
);
1137 fprintf (dump_file
, "'\n");
1140 /* When we remove stmt now the iterator defgsi goes off it's current
1141 sequence, hence advance it now. */
1144 /* Remove defining statements. */
1145 return remove_prop_source_from_use (name
);
1153 /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
1154 If so, we can change STMT into lhs = y which can later be copy
1155 propagated. Similarly for negation.
1157 This could trivially be formulated as a forward propagation
1158 to immediate uses. However, we already had an implementation
1159 from DOM which used backward propagation via the use-def links.
1161 It turns out that backward propagation is actually faster as
1162 there's less work to do for each NOT/NEG expression we find.
1163 Backwards propagation needs to look at the statement in a single
1164 backlink. Forward propagation needs to look at potentially more
1165 than one forward link.
1167 Returns true when the statement was changed. */
1170 simplify_not_neg_expr (gimple_stmt_iterator
*gsi_p
)
1172 gimple stmt
= gsi_stmt (*gsi_p
);
1173 tree rhs
= gimple_assign_rhs1 (stmt
);
1174 gimple rhs_def_stmt
= SSA_NAME_DEF_STMT (rhs
);
1176 /* See if the RHS_DEF_STMT has the same form as our statement. */
1177 if (is_gimple_assign (rhs_def_stmt
)
1178 && gimple_assign_rhs_code (rhs_def_stmt
) == gimple_assign_rhs_code (stmt
))
1180 tree rhs_def_operand
= gimple_assign_rhs1 (rhs_def_stmt
);
1182 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
1183 if (TREE_CODE (rhs_def_operand
) == SSA_NAME
1184 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand
))
1186 gimple_assign_set_rhs_from_tree (gsi_p
, rhs_def_operand
);
1187 stmt
= gsi_stmt (*gsi_p
);
1196 /* Helper function for simplify_gimple_switch. Remove case labels that
1197 have values outside the range of the new type. */
1200 simplify_gimple_switch_label_vec (gimple stmt
, tree index_type
)
1202 unsigned int branch_num
= gimple_switch_num_labels (stmt
);
1203 VEC(tree
, heap
) *labels
= VEC_alloc (tree
, heap
, branch_num
);
1204 unsigned int i
, len
;
1206 /* Collect the existing case labels in a VEC, and preprocess it as if
1207 we are gimplifying a GENERIC SWITCH_EXPR. */
1208 for (i
= 1; i
< branch_num
; i
++)
1209 VEC_quick_push (tree
, labels
, gimple_switch_label (stmt
, i
));
1210 preprocess_case_label_vec_for_gimple (labels
, index_type
, NULL
);
1212 /* If any labels were removed, replace the existing case labels
1213 in the GIMPLE_SWITCH statement with the correct ones.
1214 Note that the type updates were done in-place on the case labels,
1215 so we only have to replace the case labels in the GIMPLE_SWITCH
1216 if the number of labels changed. */
1217 len
= VEC_length (tree
, labels
);
1218 if (len
< branch_num
- 1)
1220 bitmap target_blocks
;
1224 /* Corner case: *all* case labels have been removed as being
1225 out-of-range for INDEX_TYPE. Push one label and let the
1226 CFG cleanups deal with this further. */
1231 label
= CASE_LABEL (gimple_switch_default_label (stmt
));
1232 elt
= build_case_label (build_int_cst (index_type
, 0), NULL
, label
);
1233 VEC_quick_push (tree
, labels
, elt
);
1237 for (i
= 0; i
< VEC_length (tree
, labels
); i
++)
1238 gimple_switch_set_label (stmt
, i
+ 1, VEC_index (tree
, labels
, i
));
1239 for (i
++ ; i
< branch_num
; i
++)
1240 gimple_switch_set_label (stmt
, i
, NULL_TREE
);
1241 gimple_switch_set_num_labels (stmt
, len
+ 1);
1243 /* Cleanup any edges that are now dead. */
1244 target_blocks
= BITMAP_ALLOC (NULL
);
1245 for (i
= 0; i
< gimple_switch_num_labels (stmt
); i
++)
1247 tree elt
= gimple_switch_label (stmt
, i
);
1248 basic_block target
= label_to_block (CASE_LABEL (elt
));
1249 bitmap_set_bit (target_blocks
, target
->index
);
1251 for (ei
= ei_start (gimple_bb (stmt
)->succs
); (e
= ei_safe_edge (ei
)); )
1253 if (! bitmap_bit_p (target_blocks
, e
->dest
->index
))
1257 free_dominance_info (CDI_DOMINATORS
);
1262 BITMAP_FREE (target_blocks
);
1265 VEC_free (tree
, heap
, labels
);
1268 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1269 the condition which we may be able to optimize better. */
1272 simplify_gimple_switch (gimple stmt
)
1274 tree cond
= gimple_switch_index (stmt
);
1278 /* The optimization that we really care about is removing unnecessary
1279 casts. That will let us do much better in propagating the inferred
1280 constant at the switch target. */
1281 if (TREE_CODE (cond
) == SSA_NAME
)
1283 def_stmt
= SSA_NAME_DEF_STMT (cond
);
1284 if (is_gimple_assign (def_stmt
))
1286 if (gimple_assign_rhs_code (def_stmt
) == NOP_EXPR
)
1291 def
= gimple_assign_rhs1 (def_stmt
);
1293 to
= TREE_TYPE (cond
);
1294 ti
= TREE_TYPE (def
);
1296 /* If we have an extension that preserves value, then we
1297 can copy the source value into the switch. */
1299 need_precision
= TYPE_PRECISION (ti
);
1301 if (! INTEGRAL_TYPE_P (ti
))
1303 else if (TYPE_UNSIGNED (to
) && !TYPE_UNSIGNED (ti
))
1305 else if (!TYPE_UNSIGNED (to
) && TYPE_UNSIGNED (ti
))
1306 need_precision
+= 1;
1307 if (TYPE_PRECISION (to
) < need_precision
)
1312 gimple_switch_set_index (stmt
, def
);
1313 simplify_gimple_switch_label_vec (stmt
, ti
);
1324 /* For pointers p2 and p1 return p2 - p1 if the
1325 difference is known and constant, otherwise return NULL. */
1328 constant_pointer_difference (tree p1
, tree p2
)
1331 #define CPD_ITERATIONS 5
1332 tree exps
[2][CPD_ITERATIONS
];
1333 tree offs
[2][CPD_ITERATIONS
];
1336 for (i
= 0; i
< 2; i
++)
1338 tree p
= i
? p1
: p2
;
1339 tree off
= size_zero_node
;
1341 enum tree_code code
;
1343 /* For each of p1 and p2 we need to iterate at least
1344 twice, to handle ADDR_EXPR directly in p1/p2,
1345 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1346 on definition's stmt RHS. Iterate a few extra times. */
1350 if (!POINTER_TYPE_P (TREE_TYPE (p
)))
1352 if (TREE_CODE (p
) == ADDR_EXPR
)
1354 tree q
= TREE_OPERAND (p
, 0);
1355 HOST_WIDE_INT offset
;
1356 tree base
= get_addr_base_and_unit_offset (q
, &offset
);
1361 off
= size_binop (PLUS_EXPR
, off
, size_int (offset
));
1363 if (TREE_CODE (q
) == MEM_REF
1364 && TREE_CODE (TREE_OPERAND (q
, 0)) == SSA_NAME
)
1366 p
= TREE_OPERAND (q
, 0);
1367 off
= size_binop (PLUS_EXPR
, off
,
1368 double_int_to_tree (sizetype
,
1369 mem_ref_offset (q
)));
1378 if (TREE_CODE (p
) != SSA_NAME
)
1382 if (j
== CPD_ITERATIONS
)
1384 stmt
= SSA_NAME_DEF_STMT (p
);
1385 if (!is_gimple_assign (stmt
) || gimple_assign_lhs (stmt
) != p
)
1387 code
= gimple_assign_rhs_code (stmt
);
1388 if (code
== POINTER_PLUS_EXPR
)
1390 if (TREE_CODE (gimple_assign_rhs2 (stmt
)) != INTEGER_CST
)
1392 off
= size_binop (PLUS_EXPR
, off
, gimple_assign_rhs2 (stmt
));
1393 p
= gimple_assign_rhs1 (stmt
);
1395 else if (code
== ADDR_EXPR
|| code
== NOP_EXPR
)
1396 p
= gimple_assign_rhs1 (stmt
);
1404 for (i
= 0; i
< cnt
[0]; i
++)
1405 for (j
= 0; j
< cnt
[1]; j
++)
1406 if (exps
[0][i
] == exps
[1][j
])
1407 return size_binop (MINUS_EXPR
, offs
[0][i
], offs
[1][j
]);
1412 /* *GSI_P is a GIMPLE_CALL to a builtin function.
1414 memcpy (p, "abcd", 4);
1415 memset (p + 4, ' ', 3);
1417 memcpy (p, "abcd ", 7);
1418 call if the latter can be stored by pieces during expansion. */
1421 simplify_builtin_call (gimple_stmt_iterator
*gsi_p
, tree callee2
)
1423 gimple stmt1
, stmt2
= gsi_stmt (*gsi_p
);
1424 tree vuse
= gimple_vuse (stmt2
);
1427 stmt1
= SSA_NAME_DEF_STMT (vuse
);
1429 switch (DECL_FUNCTION_CODE (callee2
))
1431 case BUILT_IN_MEMSET
:
1432 if (gimple_call_num_args (stmt2
) != 3
1433 || gimple_call_lhs (stmt2
)
1435 || BITS_PER_UNIT
!= 8)
1440 tree ptr1
, src1
, str1
, off1
, len1
, lhs1
;
1441 tree ptr2
= gimple_call_arg (stmt2
, 0);
1442 tree val2
= gimple_call_arg (stmt2
, 1);
1443 tree len2
= gimple_call_arg (stmt2
, 2);
1444 tree diff
, vdef
, new_str_cst
;
1446 unsigned int ptr1_align
;
1447 unsigned HOST_WIDE_INT src_len
;
1449 use_operand_p use_p
;
1451 if (!host_integerp (val2
, 0)
1452 || !host_integerp (len2
, 1))
1454 if (is_gimple_call (stmt1
))
1456 /* If first stmt is a call, it needs to be memcpy
1457 or mempcpy, with string literal as second argument and
1459 callee1
= gimple_call_fndecl (stmt1
);
1460 if (callee1
== NULL_TREE
1461 || DECL_BUILT_IN_CLASS (callee1
) != BUILT_IN_NORMAL
1462 || gimple_call_num_args (stmt1
) != 3)
1464 if (DECL_FUNCTION_CODE (callee1
) != BUILT_IN_MEMCPY
1465 && DECL_FUNCTION_CODE (callee1
) != BUILT_IN_MEMPCPY
)
1467 ptr1
= gimple_call_arg (stmt1
, 0);
1468 src1
= gimple_call_arg (stmt1
, 1);
1469 len1
= gimple_call_arg (stmt1
, 2);
1470 lhs1
= gimple_call_lhs (stmt1
);
1471 if (!host_integerp (len1
, 1))
1473 str1
= string_constant (src1
, &off1
);
1474 if (str1
== NULL_TREE
)
1476 if (!host_integerp (off1
, 1)
1477 || compare_tree_int (off1
, TREE_STRING_LENGTH (str1
) - 1) > 0
1478 || compare_tree_int (len1
, TREE_STRING_LENGTH (str1
)
1479 - tree_low_cst (off1
, 1)) > 0
1480 || TREE_CODE (TREE_TYPE (str1
)) != ARRAY_TYPE
1481 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1
)))
1482 != TYPE_MODE (char_type_node
))
1485 else if (gimple_assign_single_p (stmt1
))
1487 /* Otherwise look for length 1 memcpy optimized into
1489 ptr1
= gimple_assign_lhs (stmt1
);
1490 src1
= gimple_assign_rhs1 (stmt1
);
1491 if (TREE_CODE (ptr1
) != MEM_REF
1492 || TYPE_MODE (TREE_TYPE (ptr1
)) != TYPE_MODE (char_type_node
)
1493 || !host_integerp (src1
, 0))
1495 ptr1
= build_fold_addr_expr (ptr1
);
1496 callee1
= NULL_TREE
;
1497 len1
= size_one_node
;
1499 off1
= size_zero_node
;
1505 diff
= constant_pointer_difference (ptr1
, ptr2
);
1506 if (diff
== NULL
&& lhs1
!= NULL
)
1508 diff
= constant_pointer_difference (lhs1
, ptr2
);
1509 if (DECL_FUNCTION_CODE (callee1
) == BUILT_IN_MEMPCPY
1511 diff
= size_binop (PLUS_EXPR
, diff
,
1512 fold_convert (sizetype
, len1
));
1514 /* If the difference between the second and first destination pointer
1515 is not constant, or is bigger than memcpy length, bail out. */
1517 || !host_integerp (diff
, 1)
1518 || tree_int_cst_lt (len1
, diff
))
1521 /* Use maximum of difference plus memset length and memcpy length
1522 as the new memcpy length, if it is too big, bail out. */
1523 src_len
= tree_low_cst (diff
, 1);
1524 src_len
+= tree_low_cst (len2
, 1);
1525 if (src_len
< (unsigned HOST_WIDE_INT
) tree_low_cst (len1
, 1))
1526 src_len
= tree_low_cst (len1
, 1);
1530 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1531 with bigger length will return different result. */
1532 if (lhs1
!= NULL_TREE
1533 && DECL_FUNCTION_CODE (callee1
) == BUILT_IN_MEMPCPY
1534 && (TREE_CODE (lhs1
) != SSA_NAME
1535 || !single_imm_use (lhs1
, &use_p
, &use_stmt
)
1536 || use_stmt
!= stmt2
))
1539 /* If anything reads memory in between memcpy and memset
1540 call, the modified memcpy call might change it. */
1541 vdef
= gimple_vdef (stmt1
);
1543 && (!single_imm_use (vdef
, &use_p
, &use_stmt
)
1544 || use_stmt
!= stmt2
))
1547 ptr1_align
= get_pointer_alignment (ptr1
);
1548 /* Construct the new source string literal. */
1549 src_buf
= XALLOCAVEC (char, src_len
+ 1);
1552 TREE_STRING_POINTER (str1
) + tree_low_cst (off1
, 1),
1553 tree_low_cst (len1
, 1));
1555 src_buf
[0] = tree_low_cst (src1
, 0);
1556 memset (src_buf
+ tree_low_cst (diff
, 1),
1557 tree_low_cst (val2
, 1), tree_low_cst (len2
, 1));
1558 src_buf
[src_len
] = '\0';
1559 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1560 handle embedded '\0's. */
1561 if (strlen (src_buf
) != src_len
)
1563 rtl_profile_for_bb (gimple_bb (stmt2
));
1564 /* If the new memcpy wouldn't be emitted by storing the literal
1565 by pieces, this optimization might enlarge .rodata too much,
1566 as commonly used string literals couldn't be shared any
1568 if (!can_store_by_pieces (src_len
,
1569 builtin_strncpy_read_str
,
1570 src_buf
, ptr1_align
, false))
1573 new_str_cst
= build_string_literal (src_len
, src_buf
);
1576 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1578 if (lhs1
&& DECL_FUNCTION_CODE (callee1
) == BUILT_IN_MEMPCPY
)
1579 gimple_call_set_lhs (stmt1
, NULL_TREE
);
1580 gimple_call_set_arg (stmt1
, 1, new_str_cst
);
1581 gimple_call_set_arg (stmt1
, 2,
1582 build_int_cst (TREE_TYPE (len1
), src_len
));
1583 update_stmt (stmt1
);
1584 unlink_stmt_vdef (stmt2
);
1585 gsi_remove (gsi_p
, true);
1586 release_defs (stmt2
);
1587 if (lhs1
&& DECL_FUNCTION_CODE (callee1
) == BUILT_IN_MEMPCPY
)
1588 release_ssa_name (lhs1
);
1593 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1594 assignment, remove STMT1 and change memset call into
1596 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt1
);
1598 if (!is_gimple_val (ptr1
))
1599 ptr1
= force_gimple_operand_gsi (gsi_p
, ptr1
, true, NULL_TREE
,
1600 true, GSI_SAME_STMT
);
1601 gimple_call_set_fndecl (stmt2
,
1602 builtin_decl_explicit (BUILT_IN_MEMCPY
));
1603 gimple_call_set_arg (stmt2
, 0, ptr1
);
1604 gimple_call_set_arg (stmt2
, 1, new_str_cst
);
1605 gimple_call_set_arg (stmt2
, 2,
1606 build_int_cst (TREE_TYPE (len2
), src_len
));
1607 unlink_stmt_vdef (stmt1
);
1608 gsi_remove (&gsi
, true);
1609 release_defs (stmt1
);
1610 update_stmt (stmt2
);
1621 /* Checks if expression has type of one-bit precision, or is a known
1622 truth-valued expression. */
1624 truth_valued_ssa_name (tree name
)
1627 tree type
= TREE_TYPE (name
);
1629 if (!INTEGRAL_TYPE_P (type
))
1631 /* Don't check here for BOOLEAN_TYPE as the precision isn't
1632 necessarily one and so ~X is not equal to !X. */
1633 if (TYPE_PRECISION (type
) == 1)
1635 def
= SSA_NAME_DEF_STMT (name
);
1636 if (is_gimple_assign (def
))
1637 return truth_value_p (gimple_assign_rhs_code (def
));
1641 /* Helper routine for simplify_bitwise_binary_1 function.
1642 Return for the SSA name NAME the expression X if it mets condition
1643 NAME = !X. Otherwise return NULL_TREE.
1644 Detected patterns for NAME = !X are:
1645 !X and X == 0 for X with integral type.
1646 X ^ 1, X != 1,or ~X for X with integral type with precision of one. */
1648 lookup_logical_inverted_value (tree name
)
1651 enum tree_code code
;
1654 /* If name has none-intergal type, or isn't a SSA_NAME, then
1656 if (TREE_CODE (name
) != SSA_NAME
1657 || !INTEGRAL_TYPE_P (TREE_TYPE (name
)))
1659 def
= SSA_NAME_DEF_STMT (name
);
1660 if (!is_gimple_assign (def
))
1663 code
= gimple_assign_rhs_code (def
);
1664 op1
= gimple_assign_rhs1 (def
);
1667 /* Get for EQ_EXPR or BIT_XOR_EXPR operation the second operand.
1668 If CODE isn't an EQ_EXPR, BIT_XOR_EXPR, or BIT_NOT_EXPR, then return. */
1669 if (code
== EQ_EXPR
|| code
== NE_EXPR
1670 || code
== BIT_XOR_EXPR
)
1671 op2
= gimple_assign_rhs2 (def
);
1676 if (truth_valued_ssa_name (name
))
1680 /* Check if we have X == 0 and X has an integral type. */
1681 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1
)))
1683 if (integer_zerop (op2
))
1687 /* Check if we have X != 1 and X is a truth-valued. */
1688 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1
)))
1690 if (integer_onep (op2
) && truth_valued_ssa_name (op1
))
1694 /* Check if we have X ^ 1 and X is truth valued. */
1695 if (integer_onep (op2
) && truth_valued_ssa_name (op1
))
1705 /* Optimize ARG1 CODE ARG2 to a constant for bitwise binary
1706 operations CODE, if one operand has the logically inverted
1707 value of the other. */
1709 simplify_bitwise_binary_1 (enum tree_code code
, tree type
,
1710 tree arg1
, tree arg2
)
1714 /* If CODE isn't a bitwise binary operation, return NULL_TREE. */
1715 if (code
!= BIT_AND_EXPR
&& code
!= BIT_IOR_EXPR
1716 && code
!= BIT_XOR_EXPR
)
1719 /* First check if operands ARG1 and ARG2 are equal. If so
1720 return NULL_TREE as this optimization is handled fold_stmt. */
1723 /* See if we have in arguments logical-not patterns. */
1724 if (((anot
= lookup_logical_inverted_value (arg1
)) == NULL_TREE
1726 && ((anot
= lookup_logical_inverted_value (arg2
)) == NULL_TREE
1731 if (code
== BIT_AND_EXPR
)
1732 return fold_convert (type
, integer_zero_node
);
1733 /* X | !X -> 1 and X ^ !X -> 1, if X is truth-valued. */
1734 if (truth_valued_ssa_name (anot
))
1735 return fold_convert (type
, integer_one_node
);
1737 /* ??? Otherwise result is (X != 0 ? X : 1). not handled. */
1741 /* Given a ssa_name in NAME see if it was defined by an assignment and
1742 set CODE to be the code and ARG1 to the first operand on the rhs and ARG2
1743 to the second operand on the rhs. */
1746 defcodefor_name (tree name
, enum tree_code
*code
, tree
*arg1
, tree
*arg2
)
1749 enum tree_code code1
;
1753 enum gimple_rhs_class grhs_class
;
1755 code1
= TREE_CODE (name
);
1758 grhs_class
= get_gimple_rhs_class (code1
);
1760 if (code1
== SSA_NAME
)
1762 def
= SSA_NAME_DEF_STMT (name
);
1764 if (def
&& is_gimple_assign (def
)
1765 && can_propagate_from (def
))
1767 code1
= gimple_assign_rhs_code (def
);
1768 arg11
= gimple_assign_rhs1 (def
);
1769 arg21
= gimple_assign_rhs2 (def
);
1770 arg31
= gimple_assign_rhs2 (def
);
1773 else if (grhs_class
== GIMPLE_TERNARY_RHS
1774 || GIMPLE_BINARY_RHS
1776 || GIMPLE_SINGLE_RHS
)
1777 extract_ops_from_tree_1 (name
, &code1
, &arg11
, &arg21
, &arg31
);
1783 /* Ignore arg3 currently. */
1786 /* Simplify bitwise binary operations.
1787 Return true if a transformation applied, otherwise return false. */
1790 simplify_bitwise_binary (gimple_stmt_iterator
*gsi
)
1792 gimple stmt
= gsi_stmt (*gsi
);
1793 tree arg1
= gimple_assign_rhs1 (stmt
);
1794 tree arg2
= gimple_assign_rhs2 (stmt
);
1795 enum tree_code code
= gimple_assign_rhs_code (stmt
);
1797 tree def1_arg1
, def1_arg2
, def2_arg1
, def2_arg2
;
1798 enum tree_code def1_code
, def2_code
;
1800 defcodefor_name (arg1
, &def1_code
, &def1_arg1
, &def1_arg2
);
1801 defcodefor_name (arg2
, &def2_code
, &def2_arg1
, &def2_arg2
);
1803 /* Try to fold (type) X op CST -> (type) (X op ((type-x) CST)). */
1804 if (TREE_CODE (arg2
) == INTEGER_CST
1805 && CONVERT_EXPR_CODE_P (def1_code
)
1806 && INTEGRAL_TYPE_P (TREE_TYPE (def1_arg1
))
1807 && int_fits_type_p (arg2
, TREE_TYPE (def1_arg1
)))
1810 tree tem
= make_ssa_name (TREE_TYPE (def1_arg1
), NULL
);
1812 gimple_build_assign_with_ops (code
, tem
, def1_arg1
,
1813 fold_convert_loc (gimple_location (stmt
),
1814 TREE_TYPE (def1_arg1
),
1816 gimple_set_location (newop
, gimple_location (stmt
));
1817 gsi_insert_before (gsi
, newop
, GSI_SAME_STMT
);
1818 gimple_assign_set_rhs_with_ops_1 (gsi
, NOP_EXPR
,
1819 tem
, NULL_TREE
, NULL_TREE
);
1820 update_stmt (gsi_stmt (*gsi
));
1824 /* For bitwise binary operations apply operand conversions to the
1825 binary operation result instead of to the operands. This allows
1826 to combine successive conversions and bitwise binary operations. */
1827 if (CONVERT_EXPR_CODE_P (def1_code
)
1828 && CONVERT_EXPR_CODE_P (def2_code
)
1829 && types_compatible_p (TREE_TYPE (def1_arg1
), TREE_TYPE (def2_arg1
))
1830 /* Make sure that the conversion widens the operands, or has same
1831 precision, or that it changes the operation to a bitfield
1833 && ((TYPE_PRECISION (TREE_TYPE (def1_arg1
))
1834 <= TYPE_PRECISION (TREE_TYPE (arg1
)))
1835 || (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (arg1
)))
1837 || (TYPE_PRECISION (TREE_TYPE (arg1
))
1838 != GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg1
))))))
1841 tree tem
= make_ssa_name (TREE_TYPE (def1_arg1
), NULL
);
1842 newop
= gimple_build_assign_with_ops (code
, tem
, def1_arg1
, def2_arg1
);
1843 gimple_set_location (newop
, gimple_location (stmt
));
1844 gsi_insert_before (gsi
, newop
, GSI_SAME_STMT
);
1845 gimple_assign_set_rhs_with_ops_1 (gsi
, NOP_EXPR
,
1846 tem
, NULL_TREE
, NULL_TREE
);
1847 update_stmt (gsi_stmt (*gsi
));
1852 /* Simplify (A & B) OP0 (C & B) to (A OP0 C) & B. */
1853 if (def1_code
== def2_code
1854 && def1_code
== BIT_AND_EXPR
1855 && operand_equal_for_phi_arg_p (def1_arg2
,
1861 tree inner
= fold_build2 (code
, TREE_TYPE (arg2
), a
, c
);
1862 /* If A OP0 C (this usually means C is the same as A) is 0
1863 then fold it down correctly. */
1864 if (integer_zerop (inner
))
1866 gimple_assign_set_rhs_from_tree (gsi
, inner
);
1870 /* If A OP0 C (this usually means C is the same as A) is a ssa_name
1871 then fold it down correctly. */
1872 else if (TREE_CODE (inner
) == SSA_NAME
)
1874 tree outer
= fold_build2 (def1_code
, TREE_TYPE (inner
),
1876 gimple_assign_set_rhs_from_tree (gsi
, outer
);
1884 tem
= make_ssa_name (TREE_TYPE (arg2
), NULL
);
1885 newop
= gimple_build_assign_with_ops (code
, tem
, a
, c
);
1886 gimple_set_location (newop
, gimple_location (stmt
));
1887 /* Make sure to re-process the new stmt as it's walking upwards. */
1888 gsi_insert_before (gsi
, newop
, GSI_NEW_STMT
);
1889 gimple_assign_set_rhs1 (stmt
, tem
);
1890 gimple_assign_set_rhs2 (stmt
, b
);
1891 gimple_assign_set_rhs_code (stmt
, def1_code
);
1897 /* (a | CST1) & CST2 -> (a & CST2) | (CST1 & CST2). */
1898 if (code
== BIT_AND_EXPR
1899 && def1_code
== BIT_IOR_EXPR
1900 && TREE_CODE (arg2
) == INTEGER_CST
1901 && TREE_CODE (def1_arg2
) == INTEGER_CST
)
1903 tree cst
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg2
),
1907 if (integer_zerop (cst
))
1909 gimple_assign_set_rhs1 (stmt
, def1_arg1
);
1913 tem
= make_ssa_name (TREE_TYPE (arg2
), NULL
);
1914 newop
= gimple_build_assign_with_ops (BIT_AND_EXPR
,
1915 tem
, def1_arg1
, arg2
);
1916 gimple_set_location (newop
, gimple_location (stmt
));
1917 /* Make sure to re-process the new stmt as it's walking upwards. */
1918 gsi_insert_before (gsi
, newop
, GSI_NEW_STMT
);
1919 gimple_assign_set_rhs1 (stmt
, tem
);
1920 gimple_assign_set_rhs2 (stmt
, cst
);
1921 gimple_assign_set_rhs_code (stmt
, BIT_IOR_EXPR
);
1926 /* Combine successive equal operations with constants. */
1927 if ((code
== BIT_AND_EXPR
1928 || code
== BIT_IOR_EXPR
1929 || code
== BIT_XOR_EXPR
)
1930 && def1_code
== code
1931 && TREE_CODE (arg2
) == INTEGER_CST
1932 && TREE_CODE (def1_arg2
) == INTEGER_CST
)
1934 tree cst
= fold_build2 (code
, TREE_TYPE (arg2
),
1936 gimple_assign_set_rhs1 (stmt
, def1_arg1
);
1937 gimple_assign_set_rhs2 (stmt
, cst
);
1942 /* Canonicalize X ^ ~0 to ~X. */
1943 if (code
== BIT_XOR_EXPR
1944 && TREE_CODE (arg2
) == INTEGER_CST
1945 && integer_all_onesp (arg2
))
1947 gimple_assign_set_rhs_with_ops (gsi
, BIT_NOT_EXPR
, arg1
, NULL_TREE
);
1948 gcc_assert (gsi_stmt (*gsi
) == stmt
);
1953 /* Try simple folding for X op !X, and X op X. */
1954 res
= simplify_bitwise_binary_1 (code
, TREE_TYPE (arg1
), arg1
, arg2
);
1955 if (res
!= NULL_TREE
)
1957 gimple_assign_set_rhs_from_tree (gsi
, res
);
1958 update_stmt (gsi_stmt (*gsi
));
1962 if (code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
)
1964 enum tree_code ocode
= code
== BIT_AND_EXPR
? BIT_IOR_EXPR
: BIT_AND_EXPR
;
1965 if (def1_code
== ocode
)
1968 enum tree_code coden
;
1970 /* ( X | Y) & X -> X */
1971 /* ( X & Y) | X -> X */
1975 gimple_assign_set_rhs_from_tree (gsi
, x
);
1976 update_stmt (gsi_stmt (*gsi
));
1980 defcodefor_name (def1_arg1
, &coden
, &a1
, &a2
);
1981 /* (~X | Y) & X -> X & Y */
1982 /* (~X & Y) | X -> X | Y */
1983 if (coden
== BIT_NOT_EXPR
&& a1
== x
)
1985 gimple_assign_set_rhs_with_ops (gsi
, code
,
1987 gcc_assert (gsi_stmt (*gsi
) == stmt
);
1991 defcodefor_name (def1_arg2
, &coden
, &a1
, &a2
);
1992 /* (Y | ~X) & X -> X & Y */
1993 /* (Y & ~X) | X -> X | Y */
1994 if (coden
== BIT_NOT_EXPR
&& a1
== x
)
1996 gimple_assign_set_rhs_with_ops (gsi
, code
,
1998 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2003 if (def2_code
== ocode
)
2005 enum tree_code coden
;
2008 /* X & ( X | Y) -> X */
2009 /* X | ( X & Y) -> X */
2013 gimple_assign_set_rhs_from_tree (gsi
, x
);
2014 update_stmt (gsi_stmt (*gsi
));
2017 defcodefor_name (def2_arg1
, &coden
, &a1
, NULL
);
2018 /* (~X | Y) & X -> X & Y */
2019 /* (~X & Y) | X -> X | Y */
2020 if (coden
== BIT_NOT_EXPR
&& a1
== x
)
2022 gimple_assign_set_rhs_with_ops (gsi
, code
,
2024 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2028 defcodefor_name (def2_arg2
, &coden
, &a1
, NULL
);
2029 /* (Y | ~X) & X -> X & Y */
2030 /* (Y & ~X) | X -> X | Y */
2031 if (coden
== BIT_NOT_EXPR
&& a1
== x
)
2033 gimple_assign_set_rhs_with_ops (gsi
, code
,
2035 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2046 /* Perform re-associations of the plus or minus statement STMT that are
2047 always permitted. Returns true if the CFG was changed. */
2050 associate_plusminus (gimple_stmt_iterator
*gsi
)
2052 gimple stmt
= gsi_stmt (*gsi
);
2053 tree rhs1
= gimple_assign_rhs1 (stmt
);
2054 tree rhs2
= gimple_assign_rhs2 (stmt
);
2055 enum tree_code code
= gimple_assign_rhs_code (stmt
);
2058 /* We can't reassociate at all for saturating types. */
2059 if (TYPE_SATURATING (TREE_TYPE (rhs1
)))
2062 /* First contract negates. */
2067 /* A +- (-B) -> A -+ B. */
2068 if (TREE_CODE (rhs2
) == SSA_NAME
)
2070 gimple def_stmt
= SSA_NAME_DEF_STMT (rhs2
);
2071 if (is_gimple_assign (def_stmt
)
2072 && gimple_assign_rhs_code (def_stmt
) == NEGATE_EXPR
2073 && can_propagate_from (def_stmt
))
2075 code
= (code
== MINUS_EXPR
) ? PLUS_EXPR
: MINUS_EXPR
;
2076 gimple_assign_set_rhs_code (stmt
, code
);
2077 rhs2
= gimple_assign_rhs1 (def_stmt
);
2078 gimple_assign_set_rhs2 (stmt
, rhs2
);
2079 gimple_set_modified (stmt
, true);
2084 /* (-A) + B -> B - A. */
2085 if (TREE_CODE (rhs1
) == SSA_NAME
2086 && code
== PLUS_EXPR
)
2088 gimple def_stmt
= SSA_NAME_DEF_STMT (rhs1
);
2089 if (is_gimple_assign (def_stmt
)
2090 && gimple_assign_rhs_code (def_stmt
) == NEGATE_EXPR
2091 && can_propagate_from (def_stmt
))
2094 gimple_assign_set_rhs_code (stmt
, code
);
2096 gimple_assign_set_rhs1 (stmt
, rhs1
);
2097 rhs2
= gimple_assign_rhs1 (def_stmt
);
2098 gimple_assign_set_rhs2 (stmt
, rhs2
);
2099 gimple_set_modified (stmt
, true);
2106 /* We can't reassociate floating-point or fixed-point plus or minus
2107 because of saturation to +-Inf. */
2108 if (FLOAT_TYPE_P (TREE_TYPE (rhs1
))
2109 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1
)))
2112 /* Second match patterns that allow contracting a plus-minus pair
2113 irrespective of overflow issues.
2115 (A +- B) - A -> +- B
2117 (CST +- A) +- CST -> CST +- A
2118 (A + CST) +- CST -> A + CST
2121 A - (A +- B) -> -+ B
2122 A +- (B +- A) -> +- B
2123 CST +- (CST +- A) -> CST +- A
2124 CST +- (A +- CST) -> CST +- A
2127 via commutating the addition and contracting operations to zero
2128 by reassociation. */
2130 if (TREE_CODE (rhs1
) == SSA_NAME
)
2132 gimple def_stmt
= SSA_NAME_DEF_STMT (rhs1
);
2133 if (is_gimple_assign (def_stmt
) && can_propagate_from (def_stmt
))
2135 enum tree_code def_code
= gimple_assign_rhs_code (def_stmt
);
2136 if (def_code
== PLUS_EXPR
2137 || def_code
== MINUS_EXPR
)
2139 tree def_rhs1
= gimple_assign_rhs1 (def_stmt
);
2140 tree def_rhs2
= gimple_assign_rhs2 (def_stmt
);
2141 if (operand_equal_p (def_rhs1
, rhs2
, 0)
2142 && code
== MINUS_EXPR
)
2144 /* (A +- B) - A -> +- B. */
2145 code
= ((def_code
== PLUS_EXPR
)
2146 ? TREE_CODE (def_rhs2
) : NEGATE_EXPR
);
2149 gimple_assign_set_rhs_with_ops (gsi
, code
, rhs1
, NULL_TREE
);
2150 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2151 gimple_set_modified (stmt
, true);
2153 else if (operand_equal_p (def_rhs2
, rhs2
, 0)
2154 && code
!= def_code
)
2156 /* (A +- B) -+ B -> A. */
2157 code
= TREE_CODE (def_rhs1
);
2160 gimple_assign_set_rhs_with_ops (gsi
, code
, rhs1
, NULL_TREE
);
2161 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2162 gimple_set_modified (stmt
, true);
2164 else if (TREE_CODE (rhs2
) == INTEGER_CST
2165 && TREE_CODE (def_rhs1
) == INTEGER_CST
)
2167 /* (CST +- A) +- CST -> CST +- A. */
2168 tree cst
= fold_binary (code
, TREE_TYPE (rhs1
),
2170 if (cst
&& !TREE_OVERFLOW (cst
))
2173 gimple_assign_set_rhs_code (stmt
, code
);
2175 gimple_assign_set_rhs1 (stmt
, rhs1
);
2177 gimple_assign_set_rhs2 (stmt
, rhs2
);
2178 gimple_set_modified (stmt
, true);
2181 else if (TREE_CODE (rhs2
) == INTEGER_CST
2182 && TREE_CODE (def_rhs2
) == INTEGER_CST
2183 && def_code
== PLUS_EXPR
)
2185 /* (A + CST) +- CST -> A + CST. */
2186 tree cst
= fold_binary (code
, TREE_TYPE (rhs1
),
2188 if (cst
&& !TREE_OVERFLOW (cst
))
2191 gimple_assign_set_rhs_code (stmt
, code
);
2193 gimple_assign_set_rhs1 (stmt
, rhs1
);
2195 gimple_assign_set_rhs2 (stmt
, rhs2
);
2196 gimple_set_modified (stmt
, true);
2200 else if (def_code
== BIT_NOT_EXPR
2201 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1
)))
2203 tree def_rhs1
= gimple_assign_rhs1 (def_stmt
);
2204 if (code
== PLUS_EXPR
2205 && operand_equal_p (def_rhs1
, rhs2
, 0))
2209 rhs1
= build_int_cst_type (TREE_TYPE (rhs2
), -1);
2211 gimple_assign_set_rhs_with_ops (gsi
, code
, rhs1
, NULL_TREE
);
2212 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2213 gimple_set_modified (stmt
, true);
2215 else if (code
== PLUS_EXPR
2216 && integer_onep (rhs1
))
2222 gimple_assign_set_rhs_with_ops (gsi
, code
, rhs1
, NULL_TREE
);
2223 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2224 gimple_set_modified (stmt
, true);
2230 if (rhs2
&& TREE_CODE (rhs2
) == SSA_NAME
)
2232 gimple def_stmt
= SSA_NAME_DEF_STMT (rhs2
);
2233 if (is_gimple_assign (def_stmt
) && can_propagate_from (def_stmt
))
2235 enum tree_code def_code
= gimple_assign_rhs_code (def_stmt
);
2236 if (def_code
== PLUS_EXPR
2237 || def_code
== MINUS_EXPR
)
2239 tree def_rhs1
= gimple_assign_rhs1 (def_stmt
);
2240 tree def_rhs2
= gimple_assign_rhs2 (def_stmt
);
2241 if (operand_equal_p (def_rhs1
, rhs1
, 0)
2242 && code
== MINUS_EXPR
)
2244 /* A - (A +- B) -> -+ B. */
2245 code
= ((def_code
== PLUS_EXPR
)
2246 ? NEGATE_EXPR
: TREE_CODE (def_rhs2
));
2249 gimple_assign_set_rhs_with_ops (gsi
, code
, rhs1
, NULL_TREE
);
2250 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2251 gimple_set_modified (stmt
, true);
2253 else if (operand_equal_p (def_rhs2
, rhs1
, 0)
2254 && code
!= def_code
)
2256 /* A +- (B +- A) -> +- B. */
2257 code
= ((code
== PLUS_EXPR
)
2258 ? TREE_CODE (def_rhs1
) : NEGATE_EXPR
);
2261 gimple_assign_set_rhs_with_ops (gsi
, code
, rhs1
, NULL_TREE
);
2262 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2263 gimple_set_modified (stmt
, true);
2265 else if (TREE_CODE (rhs1
) == INTEGER_CST
2266 && TREE_CODE (def_rhs1
) == INTEGER_CST
)
2268 /* CST +- (CST +- A) -> CST +- A. */
2269 tree cst
= fold_binary (code
, TREE_TYPE (rhs2
),
2271 if (cst
&& !TREE_OVERFLOW (cst
))
2273 code
= (code
== def_code
? PLUS_EXPR
: MINUS_EXPR
);
2274 gimple_assign_set_rhs_code (stmt
, code
);
2276 gimple_assign_set_rhs1 (stmt
, rhs1
);
2278 gimple_assign_set_rhs2 (stmt
, rhs2
);
2279 gimple_set_modified (stmt
, true);
2282 else if (TREE_CODE (rhs1
) == INTEGER_CST
2283 && TREE_CODE (def_rhs2
) == INTEGER_CST
)
2285 /* CST +- (A +- CST) -> CST +- A. */
2286 tree cst
= fold_binary (def_code
== code
2287 ? PLUS_EXPR
: MINUS_EXPR
,
2290 if (cst
&& !TREE_OVERFLOW (cst
))
2293 gimple_assign_set_rhs1 (stmt
, rhs1
);
2295 gimple_assign_set_rhs2 (stmt
, rhs2
);
2296 gimple_set_modified (stmt
, true);
2300 else if (def_code
== BIT_NOT_EXPR
2301 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2
)))
2303 tree def_rhs1
= gimple_assign_rhs1 (def_stmt
);
2304 if (code
== PLUS_EXPR
2305 && operand_equal_p (def_rhs1
, rhs1
, 0))
2309 rhs1
= build_int_cst_type (TREE_TYPE (rhs1
), -1);
2311 gimple_assign_set_rhs_with_ops (gsi
, code
, rhs1
, NULL_TREE
);
2312 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2313 gimple_set_modified (stmt
, true);
2320 if (gimple_modified_p (stmt
))
2322 fold_stmt_inplace (gsi
);
2324 if (maybe_clean_or_replace_eh_stmt (stmt
, stmt
)
2325 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
2332 /* Associate operands of a POINTER_PLUS_EXPR assignmen at *GSI. Returns
2333 true if anything changed, false otherwise. */
2336 associate_pointerplus (gimple_stmt_iterator
*gsi
)
2338 gimple stmt
= gsi_stmt (*gsi
);
2340 tree ptr
, rhs
, algn
;
2343 tem = (sizetype) ptr;
2347 and produce the simpler and easier to analyze with respect to alignment
2348 ... = ptr & ~algn; */
2349 ptr
= gimple_assign_rhs1 (stmt
);
2350 rhs
= gimple_assign_rhs2 (stmt
);
2351 if (TREE_CODE (rhs
) != SSA_NAME
)
2353 def_stmt
= SSA_NAME_DEF_STMT (rhs
);
2354 if (!is_gimple_assign (def_stmt
)
2355 || gimple_assign_rhs_code (def_stmt
) != NEGATE_EXPR
)
2357 rhs
= gimple_assign_rhs1 (def_stmt
);
2358 if (TREE_CODE (rhs
) != SSA_NAME
)
2360 def_stmt
= SSA_NAME_DEF_STMT (rhs
);
2361 if (!is_gimple_assign (def_stmt
)
2362 || gimple_assign_rhs_code (def_stmt
) != BIT_AND_EXPR
)
2364 rhs
= gimple_assign_rhs1 (def_stmt
);
2365 algn
= gimple_assign_rhs2 (def_stmt
);
2366 if (TREE_CODE (rhs
) != SSA_NAME
2367 || TREE_CODE (algn
) != INTEGER_CST
)
2369 def_stmt
= SSA_NAME_DEF_STMT (rhs
);
2370 if (!is_gimple_assign (def_stmt
)
2371 || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt
)))
2373 if (gimple_assign_rhs1 (def_stmt
) != ptr
)
2376 algn
= double_int_to_tree (TREE_TYPE (ptr
),
2377 double_int_not (tree_to_double_int (algn
)));
2378 gimple_assign_set_rhs_with_ops (gsi
, BIT_AND_EXPR
, ptr
, algn
);
2379 fold_stmt_inplace (gsi
);
2385 /* Combine two conversions in a row for the second conversion at *GSI.
2386 Returns 1 if there were any changes made, 2 if cfg-cleanup needs to
2387 run. Else it returns 0. */
2390 combine_conversions (gimple_stmt_iterator
*gsi
)
2392 gimple stmt
= gsi_stmt (*gsi
);
2395 enum tree_code code
= gimple_assign_rhs_code (stmt
);
2396 enum tree_code code2
;
2398 gcc_checking_assert (CONVERT_EXPR_CODE_P (code
)
2399 || code
== FLOAT_EXPR
2400 || code
== FIX_TRUNC_EXPR
);
2402 lhs
= gimple_assign_lhs (stmt
);
2403 op0
= gimple_assign_rhs1 (stmt
);
2404 if (useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (op0
)))
2406 gimple_assign_set_rhs_code (stmt
, TREE_CODE (op0
));
2410 if (TREE_CODE (op0
) != SSA_NAME
)
2413 def_stmt
= SSA_NAME_DEF_STMT (op0
);
2414 if (!is_gimple_assign (def_stmt
))
2417 code2
= gimple_assign_rhs_code (def_stmt
);
2419 if (CONVERT_EXPR_CODE_P (code2
) || code2
== FLOAT_EXPR
)
2421 tree defop0
= gimple_assign_rhs1 (def_stmt
);
2422 tree type
= TREE_TYPE (lhs
);
2423 tree inside_type
= TREE_TYPE (defop0
);
2424 tree inter_type
= TREE_TYPE (op0
);
2425 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
2426 int inside_ptr
= POINTER_TYPE_P (inside_type
);
2427 int inside_float
= FLOAT_TYPE_P (inside_type
);
2428 int inside_vec
= TREE_CODE (inside_type
) == VECTOR_TYPE
;
2429 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
2430 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
2431 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
2432 int inter_ptr
= POINTER_TYPE_P (inter_type
);
2433 int inter_float
= FLOAT_TYPE_P (inter_type
);
2434 int inter_vec
= TREE_CODE (inter_type
) == VECTOR_TYPE
;
2435 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
2436 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
2437 int final_int
= INTEGRAL_TYPE_P (type
);
2438 int final_ptr
= POINTER_TYPE_P (type
);
2439 int final_float
= FLOAT_TYPE_P (type
);
2440 int final_vec
= TREE_CODE (type
) == VECTOR_TYPE
;
2441 unsigned int final_prec
= TYPE_PRECISION (type
);
2442 int final_unsignedp
= TYPE_UNSIGNED (type
);
2444 /* Don't propagate ssa names that occur in abnormal phis. */
2445 if (TREE_CODE (defop0
) == SSA_NAME
2446 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (defop0
))
2449 /* In addition to the cases of two conversions in a row
2450 handled below, if we are converting something to its own
2451 type via an object of identical or wider precision, neither
2452 conversion is needed. */
2453 if (useless_type_conversion_p (type
, inside_type
)
2454 && (((inter_int
|| inter_ptr
) && final_int
)
2455 || (inter_float
&& final_float
))
2456 && inter_prec
>= final_prec
)
2458 gimple_assign_set_rhs1 (stmt
, unshare_expr (defop0
));
2459 gimple_assign_set_rhs_code (stmt
, TREE_CODE (defop0
));
2461 return remove_prop_source_from_use (op0
) ? 2 : 1;
2464 /* Likewise, if the intermediate and initial types are either both
2465 float or both integer, we don't need the middle conversion if the
2466 former is wider than the latter and doesn't change the signedness
2467 (for integers). Avoid this if the final type is a pointer since
2468 then we sometimes need the middle conversion. Likewise if the
2469 final type has a precision not equal to the size of its mode. */
2470 if (((inter_int
&& inside_int
)
2471 || (inter_float
&& inside_float
)
2472 || (inter_vec
&& inside_vec
))
2473 && inter_prec
>= inside_prec
2474 && (inter_float
|| inter_vec
2475 || inter_unsignedp
== inside_unsignedp
)
2476 && ! (final_prec
!= GET_MODE_PRECISION (TYPE_MODE (type
))
2477 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
2479 && (! final_vec
|| inter_prec
== inside_prec
))
2481 gimple_assign_set_rhs1 (stmt
, defop0
);
2483 return remove_prop_source_from_use (op0
) ? 2 : 1;
2486 /* If we have a sign-extension of a zero-extended value, we can
2487 replace that by a single zero-extension. Likewise if the
2488 final conversion does not change precision we can drop the
2489 intermediate conversion. */
2490 if (inside_int
&& inter_int
&& final_int
2491 && ((inside_prec
< inter_prec
&& inter_prec
< final_prec
2492 && inside_unsignedp
&& !inter_unsignedp
)
2493 || final_prec
== inter_prec
))
2495 gimple_assign_set_rhs1 (stmt
, defop0
);
2497 return remove_prop_source_from_use (op0
) ? 2 : 1;
2500 /* Two conversions in a row are not needed unless:
2501 - some conversion is floating-point (overstrict for now), or
2502 - some conversion is a vector (overstrict for now), or
2503 - the intermediate type is narrower than both initial and
2505 - the intermediate type and innermost type differ in signedness,
2506 and the outermost type is wider than the intermediate, or
2507 - the initial type is a pointer type and the precisions of the
2508 intermediate and final types differ, or
2509 - the final type is a pointer type and the precisions of the
2510 initial and intermediate types differ. */
2511 if (! inside_float
&& ! inter_float
&& ! final_float
2512 && ! inside_vec
&& ! inter_vec
&& ! final_vec
2513 && (inter_prec
>= inside_prec
|| inter_prec
>= final_prec
)
2514 && ! (inside_int
&& inter_int
2515 && inter_unsignedp
!= inside_unsignedp
2516 && inter_prec
< final_prec
)
2517 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
2518 == (final_unsignedp
&& final_prec
> inter_prec
))
2519 && ! (inside_ptr
&& inter_prec
!= final_prec
)
2520 && ! (final_ptr
&& inside_prec
!= inter_prec
)
2521 && ! (final_prec
!= GET_MODE_PRECISION (TYPE_MODE (type
))
2522 && TYPE_MODE (type
) == TYPE_MODE (inter_type
)))
2524 gimple_assign_set_rhs1 (stmt
, defop0
);
2526 return remove_prop_source_from_use (op0
) ? 2 : 1;
2529 /* A truncation to an unsigned type should be canonicalized as
2530 bitwise and of a mask. */
2531 if (final_int
&& inter_int
&& inside_int
2532 && final_prec
== inside_prec
2533 && final_prec
> inter_prec
2537 tem
= fold_build2 (BIT_AND_EXPR
, inside_type
,
2540 (inside_type
, double_int_mask (inter_prec
)));
2541 if (!useless_type_conversion_p (type
, inside_type
))
2543 tem
= force_gimple_operand_gsi (gsi
, tem
, true, NULL_TREE
, true,
2545 gimple_assign_set_rhs1 (stmt
, tem
);
2548 gimple_assign_set_rhs_from_tree (gsi
, tem
);
2549 update_stmt (gsi_stmt (*gsi
));
2553 /* If we are converting an integer to a floating-point that can
2554 represent it exactly and back to an integer, we can skip the
2555 floating-point conversion. */
2556 if (inside_int
&& inter_float
&& final_int
&&
2557 (unsigned) significand_size (TYPE_MODE (inter_type
))
2558 >= inside_prec
- !inside_unsignedp
)
2560 if (useless_type_conversion_p (type
, inside_type
))
2562 gimple_assign_set_rhs1 (stmt
, unshare_expr (defop0
));
2563 gimple_assign_set_rhs_code (stmt
, TREE_CODE (defop0
));
2565 return remove_prop_source_from_use (op0
) ? 2 : 1;
2569 gimple_assign_set_rhs1 (stmt
, defop0
);
2570 gimple_assign_set_rhs_code (stmt
, CONVERT_EXPR
);
2572 return remove_prop_source_from_use (op0
) ? 2 : 1;
2580 /* Main entry point for the forward propagation and statement combine
2584 ssa_forward_propagate_and_combine (void)
2587 unsigned int todoflags
= 0;
2589 cfg_changed
= false;
2593 gimple_stmt_iterator gsi
;
2595 /* Apply forward propagation to all stmts in the basic-block.
2596 Note we update GSI within the loop as necessary. */
2597 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); )
2599 gimple stmt
= gsi_stmt (gsi
);
2601 enum tree_code code
;
2603 if (!is_gimple_assign (stmt
))
2609 lhs
= gimple_assign_lhs (stmt
);
2610 rhs
= gimple_assign_rhs1 (stmt
);
2611 code
= gimple_assign_rhs_code (stmt
);
2612 if (TREE_CODE (lhs
) != SSA_NAME
2613 || has_zero_uses (lhs
))
2619 /* If this statement sets an SSA_NAME to an address,
2620 try to propagate the address into the uses of the SSA_NAME. */
2621 if (code
== ADDR_EXPR
2622 /* Handle pointer conversions on invariant addresses
2623 as well, as this is valid gimple. */
2624 || (CONVERT_EXPR_CODE_P (code
)
2625 && TREE_CODE (rhs
) == ADDR_EXPR
2626 && POINTER_TYPE_P (TREE_TYPE (lhs
))))
2628 tree base
= get_base_address (TREE_OPERAND (rhs
, 0));
2631 || decl_address_invariant_p (base
))
2632 && !stmt_references_abnormal_ssa_name (stmt
)
2633 && forward_propagate_addr_expr (lhs
, rhs
))
2635 release_defs (stmt
);
2636 todoflags
|= TODO_remove_unused_locals
;
2637 gsi_remove (&gsi
, true);
2642 else if (code
== POINTER_PLUS_EXPR
)
2644 tree off
= gimple_assign_rhs2 (stmt
);
2645 if (TREE_CODE (off
) == INTEGER_CST
2646 && can_propagate_from (stmt
)
2647 && !simple_iv_increment_p (stmt
)
2648 /* ??? Better adjust the interface to that function
2649 instead of building new trees here. */
2650 && forward_propagate_addr_expr
2652 build1_loc (gimple_location (stmt
),
2653 ADDR_EXPR
, TREE_TYPE (rhs
),
2654 fold_build2 (MEM_REF
,
2655 TREE_TYPE (TREE_TYPE (rhs
)),
2657 fold_convert (ptr_type_node
,
2660 release_defs (stmt
);
2661 todoflags
|= TODO_remove_unused_locals
;
2662 gsi_remove (&gsi
, true);
2664 else if (is_gimple_min_invariant (rhs
))
2666 /* Make sure to fold &a[0] + off_1 here. */
2667 fold_stmt_inplace (&gsi
);
2669 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
2675 else if (TREE_CODE_CLASS (code
) == tcc_comparison
)
2677 if (forward_propagate_comparison (&gsi
))
2684 /* Combine stmts with the stmts defining their operands.
2685 Note we update GSI within the loop as necessary. */
2686 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);)
2688 gimple stmt
= gsi_stmt (gsi
);
2689 bool changed
= false;
2691 /* Mark stmt as potentially needing revisiting. */
2692 gimple_set_plf (stmt
, GF_PLF_1
, false);
2694 switch (gimple_code (stmt
))
2698 tree rhs1
= gimple_assign_rhs1 (stmt
);
2699 enum tree_code code
= gimple_assign_rhs_code (stmt
);
2701 if ((code
== BIT_NOT_EXPR
2702 || code
== NEGATE_EXPR
)
2703 && TREE_CODE (rhs1
) == SSA_NAME
)
2704 changed
= simplify_not_neg_expr (&gsi
);
2705 else if (code
== COND_EXPR
2706 || code
== VEC_COND_EXPR
)
2708 /* In this case the entire COND_EXPR is in rhs1. */
2709 if (forward_propagate_into_cond (&gsi
)
2710 || combine_cond_exprs (&gsi
))
2713 stmt
= gsi_stmt (gsi
);
2716 else if (TREE_CODE_CLASS (code
) == tcc_comparison
)
2719 did_something
= forward_propagate_into_comparison (&gsi
);
2720 if (did_something
== 2)
2722 changed
= did_something
!= 0;
2724 else if (code
== BIT_AND_EXPR
2725 || code
== BIT_IOR_EXPR
2726 || code
== BIT_XOR_EXPR
)
2727 changed
= simplify_bitwise_binary (&gsi
);
2728 else if (code
== PLUS_EXPR
2729 || code
== MINUS_EXPR
)
2730 changed
= associate_plusminus (&gsi
);
2731 else if (code
== POINTER_PLUS_EXPR
)
2732 changed
= associate_pointerplus (&gsi
);
2733 else if (CONVERT_EXPR_CODE_P (code
)
2734 || code
== FLOAT_EXPR
2735 || code
== FIX_TRUNC_EXPR
)
2737 int did_something
= combine_conversions (&gsi
);
2738 if (did_something
== 2)
2740 changed
= did_something
!= 0;
2746 changed
= simplify_gimple_switch (stmt
);
2752 did_something
= forward_propagate_into_gimple_cond (stmt
);
2753 if (did_something
== 2)
2755 changed
= did_something
!= 0;
2761 tree callee
= gimple_call_fndecl (stmt
);
2762 if (callee
!= NULL_TREE
2763 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
)
2764 changed
= simplify_builtin_call (&gsi
, callee
);
2773 /* If the stmt changed then re-visit it and the statements
2774 inserted before it. */
2775 for (; !gsi_end_p (gsi
); gsi_prev (&gsi
))
2776 if (gimple_plf (gsi_stmt (gsi
), GF_PLF_1
))
2778 if (gsi_end_p (gsi
))
2779 gsi
= gsi_start_bb (bb
);
2785 /* Stmt no longer needs to be revisited. */
2786 gimple_set_plf (stmt
, GF_PLF_1
, true);
2793 todoflags
|= TODO_cleanup_cfg
;
2800 gate_forwprop (void)
2802 return flag_tree_forwprop
;
2805 struct gimple_opt_pass pass_forwprop
=
2809 "forwprop", /* name */
2810 gate_forwprop
, /* gate */
2811 ssa_forward_propagate_and_combine
, /* execute */
2814 0, /* static_pass_number */
2815 TV_TREE_FORWPROP
, /* tv_id */
2816 PROP_cfg
| PROP_ssa
, /* properties_required */
2817 0, /* properties_provided */
2818 0, /* properties_destroyed */
2819 0, /* todo_flags_start */
2822 | TODO_verify_ssa
/* todo_flags_finish */