1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "basic-block.h"
28 #include "gimple-pretty-print.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "langhooks.h"
37 #include "tree-ssa-propagate.h"
39 /* This pass propagates the RHS of assignment statements into use
40 sites of the LHS of the assignment. It's basically a specialized
41 form of tree combination. It is hoped all of this can disappear
42 when we have a generalized tree combiner.
44 One class of common cases we handle is forward propagating a single use
45 variable into a COND_EXPR.
49 if (x) goto ... else goto ...
51 Will be transformed into:
54 if (a COND b) goto ... else goto ...
56 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
58 Or (assuming c1 and c2 are constants):
62 if (x EQ/NEQ c2) goto ... else goto ...
64 Will be transformed into:
67 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
69 Similarly for x = a - c1.
75 if (x) goto ... else goto ...
77 Will be transformed into:
80 if (a == 0) goto ... else goto ...
82 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
83 For these cases, we propagate A into all, possibly more than one,
84 COND_EXPRs that use X.
90 if (x) goto ... else goto ...
92 Will be transformed into:
95 if (a != 0) goto ... else goto ...
97 (Assuming a is an integral type and x is a boolean or x is an
98 integral and a is a boolean.)
100 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
101 For these cases, we propagate A into all, possibly more than one,
102 COND_EXPRs that use X.
104 In addition to eliminating the variable and the statement which assigns
105 a value to the variable, we may be able to later thread the jump without
106 adding insane complexity in the dominator optimizer.
108 Also note these transformations can cascade. We handle this by having
109 a worklist of COND_EXPR statements to examine. As we make a change to
110 a statement, we put it back on the worklist to examine on the next
111 iteration of the main loop.
113 A second class of propagation opportunities arises for ADDR_EXPR
124 ptr = (type1*)&type2var;
127 Will get turned into (if type1 and type2 are the same size
128 and neither have volatile on them):
129 res = VIEW_CONVERT_EXPR<type1>(type2var)
134 ptr2 = ptr + <constant>;
138 ptr2 = &x[constant/elementsize];
143 offset = index * element_size;
144 offset_p = (pointer) offset;
145 ptr2 = ptr + offset_p
147 Will get turned into:
155 Provided that decl has known alignment >= 2, will get turned into
159 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
160 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
163 This will (of course) be extended as other needs arise. */
165 static bool forward_propagate_addr_expr (tree name
, tree rhs
);
167 /* Set to true if we delete dead edges during the optimization. */
168 static bool cfg_changed
;
170 static tree
rhs_to_tree (tree type
, gimple stmt
);
172 /* Get the next statement we can propagate NAME's value into skipping
173 trivial copies. Returns the statement that is suitable as a
174 propagation destination or NULL_TREE if there is no such one.
175 This only returns destinations in a single-use chain. FINAL_NAME_P
176 if non-NULL is written to the ssa name that represents the use. */
179 get_prop_dest_stmt (tree name
, tree
*final_name_p
)
185 /* If name has multiple uses, bail out. */
186 if (!single_imm_use (name
, &use
, &use_stmt
))
189 /* If this is not a trivial copy, we found it. */
190 if (!gimple_assign_ssa_name_copy_p (use_stmt
)
191 || gimple_assign_rhs1 (use_stmt
) != name
)
194 /* Continue searching uses of the copy destination. */
195 name
= gimple_assign_lhs (use_stmt
);
199 *final_name_p
= name
;
204 /* Get the statement we can propagate from into NAME skipping
205 trivial copies. Returns the statement which defines the
206 propagation source or NULL_TREE if there is no such one.
207 If SINGLE_USE_ONLY is set considers only sources which have
208 a single use chain up to NAME. If SINGLE_USE_P is non-null,
209 it is set to whether the chain to NAME is a single use chain
210 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
213 get_prop_source_stmt (tree name
, bool single_use_only
, bool *single_use_p
)
215 bool single_use
= true;
218 gimple def_stmt
= SSA_NAME_DEF_STMT (name
);
220 if (!has_single_use (name
))
227 /* If name is defined by a PHI node or is the default def, bail out. */
228 if (!is_gimple_assign (def_stmt
))
231 /* If def_stmt is a simple copy, continue looking. */
232 if (gimple_assign_rhs_code (def_stmt
) == SSA_NAME
)
233 name
= gimple_assign_rhs1 (def_stmt
);
236 if (!single_use_only
&& single_use_p
)
237 *single_use_p
= single_use
;
244 /* Checks if the destination ssa name in DEF_STMT can be used as
245 propagation source. Returns true if so, otherwise false. */
248 can_propagate_from (gimple def_stmt
)
250 gcc_assert (is_gimple_assign (def_stmt
));
252 /* If the rhs has side-effects we cannot propagate from it. */
253 if (gimple_has_volatile_ops (def_stmt
))
256 /* If the rhs is a load we cannot propagate from it. */
257 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt
)) == tcc_reference
258 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt
)) == tcc_declaration
)
261 /* Constants can be always propagated. */
262 if (gimple_assign_single_p (def_stmt
)
263 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt
)))
266 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
267 if (stmt_references_abnormal_ssa_name (def_stmt
))
270 /* If the definition is a conversion of a pointer to a function type,
271 then we can not apply optimizations as some targets require
272 function pointers to be canonicalized and in this case this
273 optimization could eliminate a necessary canonicalization. */
274 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt
)))
276 tree rhs
= gimple_assign_rhs1 (def_stmt
);
277 if (POINTER_TYPE_P (TREE_TYPE (rhs
))
278 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs
))) == FUNCTION_TYPE
)
285 /* Remove a chain of dead statements starting at the definition of
286 NAME. The chain is linked via the first operand of the defining statements.
287 If NAME was replaced in its only use then this function can be used
288 to clean up dead stmts. The function handles already released SSA
290 Returns true if cleanup-cfg has to run. */
293 remove_prop_source_from_use (tree name
)
295 gimple_stmt_iterator gsi
;
297 bool cfg_changed
= false;
302 if (SSA_NAME_IN_FREE_LIST (name
)
303 || SSA_NAME_IS_DEFAULT_DEF (name
)
304 || !has_zero_uses (name
))
307 stmt
= SSA_NAME_DEF_STMT (name
);
308 if (gimple_code (stmt
) == GIMPLE_PHI
309 || gimple_has_side_effects (stmt
))
312 bb
= gimple_bb (stmt
);
313 gsi
= gsi_for_stmt (stmt
);
314 unlink_stmt_vdef (stmt
);
315 if (gsi_remove (&gsi
, true))
316 cfg_changed
|= gimple_purge_dead_eh_edges (bb
);
319 name
= is_gimple_assign (stmt
) ? gimple_assign_rhs1 (stmt
) : NULL_TREE
;
320 } while (name
&& TREE_CODE (name
) == SSA_NAME
);
325 /* Return the rhs of a gimple_assign STMT in a form of a single tree,
326 converted to type TYPE.
328 This should disappear, but is needed so we can combine expressions and use
329 the fold() interfaces. Long term, we need to develop folding and combine
330 routines that deal with gimple exclusively . */
333 rhs_to_tree (tree type
, gimple stmt
)
335 location_t loc
= gimple_location (stmt
);
336 enum tree_code code
= gimple_assign_rhs_code (stmt
);
337 if (get_gimple_rhs_class (code
) == GIMPLE_TERNARY_RHS
)
338 return fold_build3_loc (loc
, code
, type
, gimple_assign_rhs1 (stmt
),
339 gimple_assign_rhs2 (stmt
),
340 gimple_assign_rhs3 (stmt
));
341 else if (get_gimple_rhs_class (code
) == GIMPLE_BINARY_RHS
)
342 return fold_build2_loc (loc
, code
, type
, gimple_assign_rhs1 (stmt
),
343 gimple_assign_rhs2 (stmt
));
344 else if (get_gimple_rhs_class (code
) == GIMPLE_UNARY_RHS
)
345 return build1 (code
, type
, gimple_assign_rhs1 (stmt
));
346 else if (get_gimple_rhs_class (code
) == GIMPLE_SINGLE_RHS
)
347 return gimple_assign_rhs1 (stmt
);
352 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
353 the folded result in a form suitable for COND_EXPR_COND or
354 NULL_TREE, if there is no suitable simplified form. If
355 INVARIANT_ONLY is true only gimple_min_invariant results are
356 considered simplified. */
359 combine_cond_expr_cond (gimple stmt
, enum tree_code code
, tree type
,
360 tree op0
, tree op1
, bool invariant_only
)
364 gcc_assert (TREE_CODE_CLASS (code
) == tcc_comparison
);
366 fold_defer_overflow_warnings ();
367 t
= fold_binary_loc (gimple_location (stmt
), code
, type
, op0
, op1
);
370 fold_undefer_overflow_warnings (false, NULL
, 0);
374 /* Require that we got a boolean type out if we put one in. */
375 gcc_assert (TREE_CODE (TREE_TYPE (t
)) == TREE_CODE (type
));
377 /* Canonicalize the combined condition for use in a COND_EXPR. */
378 t
= canonicalize_cond_expr_cond (t
);
380 /* Bail out if we required an invariant but didn't get one. */
381 if (!t
|| (invariant_only
&& !is_gimple_min_invariant (t
)))
383 fold_undefer_overflow_warnings (false, NULL
, 0);
387 fold_undefer_overflow_warnings (!gimple_no_warning_p (stmt
), stmt
, 0);
392 /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
393 of its operand. Return a new comparison tree or NULL_TREE if there
394 were no simplifying combines. */
397 forward_propagate_into_comparison_1 (gimple stmt
,
398 enum tree_code code
, tree type
,
401 tree tmp
= NULL_TREE
;
402 tree rhs0
= NULL_TREE
, rhs1
= NULL_TREE
;
403 bool single_use0_p
= false, single_use1_p
= false;
405 /* For comparisons use the first operand, that is likely to
406 simplify comparisons against constants. */
407 if (TREE_CODE (op0
) == SSA_NAME
)
409 gimple def_stmt
= get_prop_source_stmt (op0
, false, &single_use0_p
);
410 if (def_stmt
&& can_propagate_from (def_stmt
))
412 rhs0
= rhs_to_tree (TREE_TYPE (op1
), def_stmt
);
413 tmp
= combine_cond_expr_cond (stmt
, code
, type
,
414 rhs0
, op1
, !single_use0_p
);
420 /* If that wasn't successful, try the second operand. */
421 if (TREE_CODE (op1
) == SSA_NAME
)
423 gimple def_stmt
= get_prop_source_stmt (op1
, false, &single_use1_p
);
424 if (def_stmt
&& can_propagate_from (def_stmt
))
426 rhs1
= rhs_to_tree (TREE_TYPE (op0
), def_stmt
);
427 tmp
= combine_cond_expr_cond (stmt
, code
, type
,
428 op0
, rhs1
, !single_use1_p
);
434 /* If that wasn't successful either, try both operands. */
435 if (rhs0
!= NULL_TREE
436 && rhs1
!= NULL_TREE
)
437 tmp
= combine_cond_expr_cond (stmt
, code
, type
,
439 !(single_use0_p
&& single_use1_p
));
444 /* Propagate from the ssa name definition statements of the assignment
445 from a comparison at *GSI into the conditional if that simplifies it.
446 Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
447 otherwise returns 0. */
450 forward_propagate_into_comparison (gimple_stmt_iterator
*gsi
)
452 gimple stmt
= gsi_stmt (*gsi
);
454 bool cfg_changed
= false;
455 tree type
= TREE_TYPE (gimple_assign_lhs (stmt
));
456 tree rhs1
= gimple_assign_rhs1 (stmt
);
457 tree rhs2
= gimple_assign_rhs2 (stmt
);
459 /* Combine the comparison with defining statements. */
460 tmp
= forward_propagate_into_comparison_1 (stmt
,
461 gimple_assign_rhs_code (stmt
),
463 if (tmp
&& useless_type_conversion_p (type
, TREE_TYPE (tmp
)))
465 gimple_assign_set_rhs_from_tree (gsi
, tmp
);
467 update_stmt (gsi_stmt (*gsi
));
469 if (TREE_CODE (rhs1
) == SSA_NAME
)
470 cfg_changed
|= remove_prop_source_from_use (rhs1
);
471 if (TREE_CODE (rhs2
) == SSA_NAME
)
472 cfg_changed
|= remove_prop_source_from_use (rhs2
);
473 return cfg_changed
? 2 : 1;
479 /* Propagate from the ssa name definition statements of COND_EXPR
480 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
481 Returns zero if no statement was changed, one if there were
482 changes and two if cfg_cleanup needs to run.
484 This must be kept in sync with forward_propagate_into_cond. */
487 forward_propagate_into_gimple_cond (gimple stmt
)
490 enum tree_code code
= gimple_cond_code (stmt
);
491 bool cfg_changed
= false;
492 tree rhs1
= gimple_cond_lhs (stmt
);
493 tree rhs2
= gimple_cond_rhs (stmt
);
495 /* We can do tree combining on SSA_NAME and comparison expressions. */
496 if (TREE_CODE_CLASS (gimple_cond_code (stmt
)) != tcc_comparison
)
499 tmp
= forward_propagate_into_comparison_1 (stmt
, code
,
504 if (dump_file
&& tmp
)
506 fprintf (dump_file
, " Replaced '");
507 print_gimple_expr (dump_file
, stmt
, 0, 0);
508 fprintf (dump_file
, "' with '");
509 print_generic_expr (dump_file
, tmp
, 0);
510 fprintf (dump_file
, "'\n");
513 gimple_cond_set_condition_from_tree (stmt
, unshare_expr (tmp
));
516 if (TREE_CODE (rhs1
) == SSA_NAME
)
517 cfg_changed
|= remove_prop_source_from_use (rhs1
);
518 if (TREE_CODE (rhs2
) == SSA_NAME
)
519 cfg_changed
|= remove_prop_source_from_use (rhs2
);
520 return (cfg_changed
|| is_gimple_min_invariant (tmp
)) ? 2 : 1;
523 /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges. */
524 if ((TREE_CODE (TREE_TYPE (rhs1
)) == BOOLEAN_TYPE
525 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
526 && TYPE_PRECISION (TREE_TYPE (rhs1
)) == 1))
528 && integer_zerop (rhs2
))
530 && integer_onep (rhs2
))))
532 basic_block bb
= gimple_bb (stmt
);
533 gimple_cond_set_code (stmt
, NE_EXPR
);
534 gimple_cond_set_rhs (stmt
, build_zero_cst (TREE_TYPE (rhs1
)));
535 EDGE_SUCC (bb
, 0)->flags
^= (EDGE_TRUE_VALUE
|EDGE_FALSE_VALUE
);
536 EDGE_SUCC (bb
, 1)->flags
^= (EDGE_TRUE_VALUE
|EDGE_FALSE_VALUE
);
544 /* Propagate from the ssa name definition statements of COND_EXPR
545 in the rhs of statement STMT into the conditional if that simplifies it.
546 Returns true zero if the stmt was changed. */
549 forward_propagate_into_cond (gimple_stmt_iterator
*gsi_p
)
551 gimple stmt
= gsi_stmt (*gsi_p
);
552 tree tmp
= NULL_TREE
;
553 tree cond
= gimple_assign_rhs1 (stmt
);
554 enum tree_code code
= gimple_assign_rhs_code (stmt
);
557 /* We can do tree combining on SSA_NAME and comparison expressions. */
558 if (COMPARISON_CLASS_P (cond
))
559 tmp
= forward_propagate_into_comparison_1 (stmt
, TREE_CODE (cond
),
561 TREE_OPERAND (cond
, 0),
562 TREE_OPERAND (cond
, 1));
563 else if (TREE_CODE (cond
) == SSA_NAME
)
565 enum tree_code def_code
;
567 gimple def_stmt
= get_prop_source_stmt (name
, true, NULL
);
568 if (!def_stmt
|| !can_propagate_from (def_stmt
))
571 def_code
= gimple_assign_rhs_code (def_stmt
);
572 if (TREE_CODE_CLASS (def_code
) == tcc_comparison
)
573 tmp
= fold_build2_loc (gimple_location (def_stmt
),
576 gimple_assign_rhs1 (def_stmt
),
577 gimple_assign_rhs2 (def_stmt
));
578 else if (code
== COND_EXPR
579 && ((def_code
== BIT_NOT_EXPR
580 && TYPE_PRECISION (TREE_TYPE (cond
)) == 1)
581 || (def_code
== BIT_XOR_EXPR
582 && integer_onep (gimple_assign_rhs2 (def_stmt
)))))
584 tmp
= gimple_assign_rhs1 (def_stmt
);
590 && is_gimple_condexpr (tmp
))
592 if (dump_file
&& tmp
)
594 fprintf (dump_file
, " Replaced '");
595 print_generic_expr (dump_file
, cond
, 0);
596 fprintf (dump_file
, "' with '");
597 print_generic_expr (dump_file
, tmp
, 0);
598 fprintf (dump_file
, "'\n");
601 if ((code
== VEC_COND_EXPR
) ? integer_all_onesp (tmp
)
602 : integer_onep (tmp
))
603 gimple_assign_set_rhs_from_tree (gsi_p
, gimple_assign_rhs2 (stmt
));
604 else if (integer_zerop (tmp
))
605 gimple_assign_set_rhs_from_tree (gsi_p
, gimple_assign_rhs3 (stmt
));
608 gimple_assign_set_rhs1 (stmt
, unshare_expr (tmp
));
611 tree t
= gimple_assign_rhs2 (stmt
);
612 gimple_assign_set_rhs2 (stmt
, gimple_assign_rhs3 (stmt
));
613 gimple_assign_set_rhs3 (stmt
, t
);
616 stmt
= gsi_stmt (*gsi_p
);
625 /* Propagate from the ssa name definition statements of COND_EXPR
626 values in the rhs of statement STMT into the conditional arms
627 if that simplifies it.
628 Returns true if the stmt was changed. */
631 combine_cond_exprs (gimple_stmt_iterator
*gsi_p
)
633 gimple stmt
= gsi_stmt (*gsi_p
);
634 tree cond
, val1
, val2
;
635 bool changed
= false;
637 cond
= gimple_assign_rhs1 (stmt
);
638 val1
= gimple_assign_rhs2 (stmt
);
639 if (TREE_CODE (val1
) == SSA_NAME
)
641 gimple def_stmt
= SSA_NAME_DEF_STMT (val1
);
642 if (is_gimple_assign (def_stmt
)
643 && gimple_assign_rhs_code (def_stmt
) == gimple_assign_rhs_code (stmt
)
644 && operand_equal_p (gimple_assign_rhs1 (def_stmt
), cond
, 0))
646 val1
= unshare_expr (gimple_assign_rhs2 (def_stmt
));
647 gimple_assign_set_rhs2 (stmt
, val1
);
651 val2
= gimple_assign_rhs3 (stmt
);
652 if (TREE_CODE (val2
) == SSA_NAME
)
654 gimple def_stmt
= SSA_NAME_DEF_STMT (val2
);
655 if (is_gimple_assign (def_stmt
)
656 && gimple_assign_rhs_code (def_stmt
) == gimple_assign_rhs_code (stmt
)
657 && operand_equal_p (gimple_assign_rhs1 (def_stmt
), cond
, 0))
659 val2
= unshare_expr (gimple_assign_rhs3 (def_stmt
));
660 gimple_assign_set_rhs3 (stmt
, val2
);
664 if (operand_equal_p (val1
, val2
, 0))
666 gimple_assign_set_rhs_from_tree (gsi_p
, val1
);
667 stmt
= gsi_stmt (*gsi_p
);
677 /* We've just substituted an ADDR_EXPR into stmt. Update all the
678 relevant data structures to match. */
681 tidy_after_forward_propagate_addr (gimple stmt
)
683 /* We may have turned a trapping insn into a non-trapping insn. */
684 if (maybe_clean_or_replace_eh_stmt (stmt
, stmt
)
685 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
688 if (TREE_CODE (gimple_assign_rhs1 (stmt
)) == ADDR_EXPR
)
689 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt
));
692 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
693 ADDR_EXPR <whatever>.
695 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
696 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
697 node or for recovery of array indexing from pointer arithmetic.
699 Return true if the propagation was successful (the propagation can
700 be not totally successful, yet things may have been changed). */
703 forward_propagate_addr_expr_1 (tree name
, tree def_rhs
,
704 gimple_stmt_iterator
*use_stmt_gsi
,
707 tree lhs
, rhs
, rhs2
, array_ref
;
708 gimple use_stmt
= gsi_stmt (*use_stmt_gsi
);
709 enum tree_code rhs_code
;
712 gcc_assert (TREE_CODE (def_rhs
) == ADDR_EXPR
);
714 lhs
= gimple_assign_lhs (use_stmt
);
715 rhs_code
= gimple_assign_rhs_code (use_stmt
);
716 rhs
= gimple_assign_rhs1 (use_stmt
);
718 /* Trivial cases. The use statement could be a trivial copy or a
719 useless conversion. Recurse to the uses of the lhs as copyprop does
720 not copy through different variant pointers and FRE does not catch
721 all useless conversions. Treat the case of a single-use name and
722 a conversion to def_rhs type separate, though. */
723 if (TREE_CODE (lhs
) == SSA_NAME
724 && ((rhs_code
== SSA_NAME
&& rhs
== name
)
725 || CONVERT_EXPR_CODE_P (rhs_code
)))
727 /* Only recurse if we don't deal with a single use or we cannot
728 do the propagation to the current statement. In particular
729 we can end up with a conversion needed for a non-invariant
730 address which we cannot do in a single statement. */
732 || (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (def_rhs
))
733 && (!is_gimple_min_invariant (def_rhs
)
734 || (INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
735 && POINTER_TYPE_P (TREE_TYPE (def_rhs
))
736 && (TYPE_PRECISION (TREE_TYPE (lhs
))
737 > TYPE_PRECISION (TREE_TYPE (def_rhs
)))))))
738 return forward_propagate_addr_expr (lhs
, def_rhs
);
740 gimple_assign_set_rhs1 (use_stmt
, unshare_expr (def_rhs
));
741 if (useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (def_rhs
)))
742 gimple_assign_set_rhs_code (use_stmt
, TREE_CODE (def_rhs
));
744 gimple_assign_set_rhs_code (use_stmt
, NOP_EXPR
);
748 /* Propagate through constant pointer adjustments. */
749 if (TREE_CODE (lhs
) == SSA_NAME
750 && rhs_code
== POINTER_PLUS_EXPR
752 && TREE_CODE (gimple_assign_rhs2 (use_stmt
)) == INTEGER_CST
)
755 /* As we come here with non-invariant addresses in def_rhs we need
756 to make sure we can build a valid constant offsetted address
757 for further propagation. Simply rely on fold building that
758 and check after the fact. */
759 new_def_rhs
= fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (rhs
)),
761 fold_convert (ptr_type_node
,
762 gimple_assign_rhs2 (use_stmt
)));
763 if (TREE_CODE (new_def_rhs
) == MEM_REF
764 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs
, 0)))
766 new_def_rhs
= build_fold_addr_expr_with_type (new_def_rhs
,
769 /* Recurse. If we could propagate into all uses of lhs do not
770 bother to replace into the current use but just pretend we did. */
771 if (TREE_CODE (new_def_rhs
) == ADDR_EXPR
772 && forward_propagate_addr_expr (lhs
, new_def_rhs
))
775 if (useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (new_def_rhs
)))
776 gimple_assign_set_rhs_with_ops (use_stmt_gsi
, TREE_CODE (new_def_rhs
),
777 new_def_rhs
, NULL_TREE
);
778 else if (is_gimple_min_invariant (new_def_rhs
))
779 gimple_assign_set_rhs_with_ops (use_stmt_gsi
, NOP_EXPR
,
780 new_def_rhs
, NULL_TREE
);
783 gcc_assert (gsi_stmt (*use_stmt_gsi
) == use_stmt
);
784 update_stmt (use_stmt
);
788 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
789 ADDR_EXPR will not appear on the LHS. */
790 lhs
= gimple_assign_lhs (use_stmt
);
791 while (handled_component_p (lhs
))
792 lhs
= TREE_OPERAND (lhs
, 0);
794 /* Now see if the LHS node is a MEM_REF using NAME. If so,
795 propagate the ADDR_EXPR into the use of NAME and fold the result. */
796 if (TREE_CODE (lhs
) == MEM_REF
797 && TREE_OPERAND (lhs
, 0) == name
)
800 HOST_WIDE_INT def_rhs_offset
;
801 /* If the address is invariant we can always fold it. */
802 if ((def_rhs_base
= get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs
, 0),
805 double_int off
= mem_ref_offset (lhs
);
807 off
+= double_int::from_shwi (def_rhs_offset
);
808 if (TREE_CODE (def_rhs_base
) == MEM_REF
)
810 off
+= mem_ref_offset (def_rhs_base
);
811 new_ptr
= TREE_OPERAND (def_rhs_base
, 0);
814 new_ptr
= build_fold_addr_expr (def_rhs_base
);
815 TREE_OPERAND (lhs
, 0) = new_ptr
;
816 TREE_OPERAND (lhs
, 1)
817 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs
, 1)), off
);
818 tidy_after_forward_propagate_addr (use_stmt
);
819 /* Continue propagating into the RHS if this was not the only use. */
823 /* If the LHS is a plain dereference and the value type is the same as
824 that of the pointed-to type of the address we can put the
825 dereferenced address on the LHS preserving the original alias-type. */
826 else if (gimple_assign_lhs (use_stmt
) == lhs
827 && integer_zerop (TREE_OPERAND (lhs
, 1))
828 && useless_type_conversion_p
829 (TREE_TYPE (TREE_OPERAND (def_rhs
, 0)),
830 TREE_TYPE (gimple_assign_rhs1 (use_stmt
))))
832 tree
*def_rhs_basep
= &TREE_OPERAND (def_rhs
, 0);
833 tree new_offset
, new_base
, saved
, new_lhs
;
834 while (handled_component_p (*def_rhs_basep
))
835 def_rhs_basep
= &TREE_OPERAND (*def_rhs_basep
, 0);
836 saved
= *def_rhs_basep
;
837 if (TREE_CODE (*def_rhs_basep
) == MEM_REF
)
839 new_base
= TREE_OPERAND (*def_rhs_basep
, 0);
840 new_offset
= fold_convert (TREE_TYPE (TREE_OPERAND (lhs
, 1)),
841 TREE_OPERAND (*def_rhs_basep
, 1));
845 new_base
= build_fold_addr_expr (*def_rhs_basep
);
846 new_offset
= TREE_OPERAND (lhs
, 1);
848 *def_rhs_basep
= build2 (MEM_REF
, TREE_TYPE (*def_rhs_basep
),
849 new_base
, new_offset
);
850 TREE_THIS_VOLATILE (*def_rhs_basep
) = TREE_THIS_VOLATILE (lhs
);
851 TREE_SIDE_EFFECTS (*def_rhs_basep
) = TREE_SIDE_EFFECTS (lhs
);
852 TREE_THIS_NOTRAP (*def_rhs_basep
) = TREE_THIS_NOTRAP (lhs
);
853 new_lhs
= unshare_expr (TREE_OPERAND (def_rhs
, 0));
854 gimple_assign_set_lhs (use_stmt
, new_lhs
);
855 TREE_THIS_VOLATILE (new_lhs
) = TREE_THIS_VOLATILE (lhs
);
856 TREE_SIDE_EFFECTS (new_lhs
) = TREE_SIDE_EFFECTS (lhs
);
857 *def_rhs_basep
= saved
;
858 tidy_after_forward_propagate_addr (use_stmt
);
859 /* Continue propagating into the RHS if this was not the
865 /* We can have a struct assignment dereferencing our name twice.
866 Note that we didn't propagate into the lhs to not falsely
867 claim we did when propagating into the rhs. */
871 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
872 nodes from the RHS. */
873 rhs
= gimple_assign_rhs1 (use_stmt
);
874 if (TREE_CODE (rhs
) == ADDR_EXPR
)
875 rhs
= TREE_OPERAND (rhs
, 0);
876 while (handled_component_p (rhs
))
877 rhs
= TREE_OPERAND (rhs
, 0);
879 /* Now see if the RHS node is a MEM_REF using NAME. If so,
880 propagate the ADDR_EXPR into the use of NAME and fold the result. */
881 if (TREE_CODE (rhs
) == MEM_REF
882 && TREE_OPERAND (rhs
, 0) == name
)
885 HOST_WIDE_INT def_rhs_offset
;
886 if ((def_rhs_base
= get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs
, 0),
889 double_int off
= mem_ref_offset (rhs
);
891 off
+= double_int::from_shwi (def_rhs_offset
);
892 if (TREE_CODE (def_rhs_base
) == MEM_REF
)
894 off
+= mem_ref_offset (def_rhs_base
);
895 new_ptr
= TREE_OPERAND (def_rhs_base
, 0);
898 new_ptr
= build_fold_addr_expr (def_rhs_base
);
899 TREE_OPERAND (rhs
, 0) = new_ptr
;
900 TREE_OPERAND (rhs
, 1)
901 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs
, 1)), off
);
902 fold_stmt_inplace (use_stmt_gsi
);
903 tidy_after_forward_propagate_addr (use_stmt
);
906 /* If the RHS is a plain dereference and the value type is the same as
907 that of the pointed-to type of the address we can put the
908 dereferenced address on the RHS preserving the original alias-type. */
909 else if (gimple_assign_rhs1 (use_stmt
) == rhs
910 && integer_zerop (TREE_OPERAND (rhs
, 1))
911 && useless_type_conversion_p
912 (TREE_TYPE (gimple_assign_lhs (use_stmt
)),
913 TREE_TYPE (TREE_OPERAND (def_rhs
, 0))))
915 tree
*def_rhs_basep
= &TREE_OPERAND (def_rhs
, 0);
916 tree new_offset
, new_base
, saved
, new_rhs
;
917 while (handled_component_p (*def_rhs_basep
))
918 def_rhs_basep
= &TREE_OPERAND (*def_rhs_basep
, 0);
919 saved
= *def_rhs_basep
;
920 if (TREE_CODE (*def_rhs_basep
) == MEM_REF
)
922 new_base
= TREE_OPERAND (*def_rhs_basep
, 0);
923 new_offset
= fold_convert (TREE_TYPE (TREE_OPERAND (rhs
, 1)),
924 TREE_OPERAND (*def_rhs_basep
, 1));
928 new_base
= build_fold_addr_expr (*def_rhs_basep
);
929 new_offset
= TREE_OPERAND (rhs
, 1);
931 *def_rhs_basep
= build2 (MEM_REF
, TREE_TYPE (*def_rhs_basep
),
932 new_base
, new_offset
);
933 TREE_THIS_VOLATILE (*def_rhs_basep
) = TREE_THIS_VOLATILE (rhs
);
934 TREE_SIDE_EFFECTS (*def_rhs_basep
) = TREE_SIDE_EFFECTS (rhs
);
935 TREE_THIS_NOTRAP (*def_rhs_basep
) = TREE_THIS_NOTRAP (rhs
);
936 new_rhs
= unshare_expr (TREE_OPERAND (def_rhs
, 0));
937 gimple_assign_set_rhs1 (use_stmt
, new_rhs
);
938 TREE_THIS_VOLATILE (new_rhs
) = TREE_THIS_VOLATILE (rhs
);
939 TREE_SIDE_EFFECTS (new_rhs
) = TREE_SIDE_EFFECTS (rhs
);
940 *def_rhs_basep
= saved
;
941 fold_stmt_inplace (use_stmt_gsi
);
942 tidy_after_forward_propagate_addr (use_stmt
);
947 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
949 if (gimple_assign_rhs_code (use_stmt
) != POINTER_PLUS_EXPR
950 || gimple_assign_rhs1 (use_stmt
) != name
)
953 /* The remaining cases are all for turning pointer arithmetic into
954 array indexing. They only apply when we have the address of
955 element zero in an array. If that is not the case then there
957 array_ref
= TREE_OPERAND (def_rhs
, 0);
958 if ((TREE_CODE (array_ref
) != ARRAY_REF
959 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref
, 0))) != ARRAY_TYPE
960 || TREE_CODE (TREE_OPERAND (array_ref
, 1)) != INTEGER_CST
)
961 && TREE_CODE (TREE_TYPE (array_ref
)) != ARRAY_TYPE
)
964 rhs2
= gimple_assign_rhs2 (use_stmt
);
965 /* Optimize &x[C1] p+ C2 to &x p+ C3 with C3 = C1 * element_size + C2. */
966 if (TREE_CODE (rhs2
) == INTEGER_CST
)
968 tree new_rhs
= build1_loc (gimple_location (use_stmt
),
969 ADDR_EXPR
, TREE_TYPE (def_rhs
),
970 fold_build2 (MEM_REF
,
971 TREE_TYPE (TREE_TYPE (def_rhs
)),
972 unshare_expr (def_rhs
),
973 fold_convert (ptr_type_node
,
975 gimple_assign_set_rhs_from_tree (use_stmt_gsi
, new_rhs
);
976 use_stmt
= gsi_stmt (*use_stmt_gsi
);
977 update_stmt (use_stmt
);
978 tidy_after_forward_propagate_addr (use_stmt
);
985 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
987 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
988 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
989 node or for recovery of array indexing from pointer arithmetic.
990 Returns true, if all uses have been propagated into. */
993 forward_propagate_addr_expr (tree name
, tree rhs
)
995 int stmt_loop_depth
= bb_loop_depth (gimple_bb (SSA_NAME_DEF_STMT (name
)));
996 imm_use_iterator iter
;
999 bool single_use_p
= has_single_use (name
);
1001 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, name
)
1006 /* If the use is not in a simple assignment statement, then
1007 there is nothing we can do. */
1008 if (gimple_code (use_stmt
) != GIMPLE_ASSIGN
)
1010 if (!is_gimple_debug (use_stmt
))
1015 /* If the use is in a deeper loop nest, then we do not want
1016 to propagate non-invariant ADDR_EXPRs into the loop as that
1017 is likely adding expression evaluations into the loop. */
1018 if (bb_loop_depth (gimple_bb (use_stmt
)) > stmt_loop_depth
1019 && !is_gimple_min_invariant (rhs
))
1026 gimple_stmt_iterator gsi
= gsi_for_stmt (use_stmt
);
1027 result
= forward_propagate_addr_expr_1 (name
, rhs
, &gsi
,
1029 /* If the use has moved to a different statement adjust
1030 the update machinery for the old statement too. */
1031 if (use_stmt
!= gsi_stmt (gsi
))
1033 update_stmt (use_stmt
);
1034 use_stmt
= gsi_stmt (gsi
);
1037 update_stmt (use_stmt
);
1041 /* Remove intermediate now unused copy and conversion chains. */
1042 use_rhs
= gimple_assign_rhs1 (use_stmt
);
1044 && TREE_CODE (gimple_assign_lhs (use_stmt
)) == SSA_NAME
1045 && TREE_CODE (use_rhs
) == SSA_NAME
1046 && has_zero_uses (gimple_assign_lhs (use_stmt
)))
1048 gimple_stmt_iterator gsi
= gsi_for_stmt (use_stmt
);
1049 release_defs (use_stmt
);
1050 gsi_remove (&gsi
, true);
1054 return all
&& has_zero_uses (name
);
1058 /* Forward propagate the comparison defined in *DEFGSI like
1059 cond_1 = x CMP y to uses of the form
1063 Returns true if stmt is now unused. Advance DEFGSI to the next
1067 forward_propagate_comparison (gimple_stmt_iterator
*defgsi
)
1069 gimple stmt
= gsi_stmt (*defgsi
);
1070 tree name
= gimple_assign_lhs (stmt
);
1072 tree tmp
= NULL_TREE
;
1073 gimple_stmt_iterator gsi
;
1074 enum tree_code code
;
1077 /* Don't propagate ssa names that occur in abnormal phis. */
1078 if ((TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
1079 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt
)))
1080 || (TREE_CODE (gimple_assign_rhs2 (stmt
)) == SSA_NAME
1081 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt
))))
1084 /* Do not un-cse comparisons. But propagate through copies. */
1085 use_stmt
= get_prop_dest_stmt (name
, &name
);
1087 || !is_gimple_assign (use_stmt
))
1090 code
= gimple_assign_rhs_code (use_stmt
);
1091 lhs
= gimple_assign_lhs (use_stmt
);
1092 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs
)))
1095 /* We can propagate the condition into a statement that
1096 computes the logical negation of the comparison result. */
1097 if ((code
== BIT_NOT_EXPR
1098 && TYPE_PRECISION (TREE_TYPE (lhs
)) == 1)
1099 || (code
== BIT_XOR_EXPR
1100 && integer_onep (gimple_assign_rhs2 (use_stmt
))))
1102 tree type
= TREE_TYPE (gimple_assign_rhs1 (stmt
));
1103 bool nans
= HONOR_NANS (TYPE_MODE (type
));
1104 enum tree_code inv_code
;
1105 inv_code
= invert_tree_comparison (gimple_assign_rhs_code (stmt
), nans
);
1106 if (inv_code
== ERROR_MARK
)
1109 tmp
= build2 (inv_code
, TREE_TYPE (lhs
), gimple_assign_rhs1 (stmt
),
1110 gimple_assign_rhs2 (stmt
));
1115 gsi
= gsi_for_stmt (use_stmt
);
1116 gimple_assign_set_rhs_from_tree (&gsi
, unshare_expr (tmp
));
1117 use_stmt
= gsi_stmt (gsi
);
1118 update_stmt (use_stmt
);
1120 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1122 fprintf (dump_file
, " Replaced '");
1123 print_gimple_expr (dump_file
, stmt
, 0, dump_flags
);
1124 fprintf (dump_file
, "' with '");
1125 print_gimple_expr (dump_file
, use_stmt
, 0, dump_flags
);
1126 fprintf (dump_file
, "'\n");
1129 /* When we remove stmt now the iterator defgsi goes off it's current
1130 sequence, hence advance it now. */
1133 /* Remove defining statements. */
1134 return remove_prop_source_from_use (name
);
1142 /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
1143 If so, we can change STMT into lhs = y which can later be copy
1144 propagated. Similarly for negation.
1146 This could trivially be formulated as a forward propagation
1147 to immediate uses. However, we already had an implementation
1148 from DOM which used backward propagation via the use-def links.
1150 It turns out that backward propagation is actually faster as
1151 there's less work to do for each NOT/NEG expression we find.
1152 Backwards propagation needs to look at the statement in a single
1153 backlink. Forward propagation needs to look at potentially more
1154 than one forward link.
1156 Returns true when the statement was changed. */
1159 simplify_not_neg_expr (gimple_stmt_iterator
*gsi_p
)
1161 gimple stmt
= gsi_stmt (*gsi_p
);
1162 tree rhs
= gimple_assign_rhs1 (stmt
);
1163 gimple rhs_def_stmt
= SSA_NAME_DEF_STMT (rhs
);
1165 /* See if the RHS_DEF_STMT has the same form as our statement. */
1166 if (is_gimple_assign (rhs_def_stmt
)
1167 && gimple_assign_rhs_code (rhs_def_stmt
) == gimple_assign_rhs_code (stmt
))
1169 tree rhs_def_operand
= gimple_assign_rhs1 (rhs_def_stmt
);
1171 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
1172 if (TREE_CODE (rhs_def_operand
) == SSA_NAME
1173 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand
))
1175 gimple_assign_set_rhs_from_tree (gsi_p
, rhs_def_operand
);
1176 stmt
= gsi_stmt (*gsi_p
);
1185 /* Helper function for simplify_gimple_switch. Remove case labels that
1186 have values outside the range of the new type. */
1189 simplify_gimple_switch_label_vec (gimple stmt
, tree index_type
)
1191 unsigned int branch_num
= gimple_switch_num_labels (stmt
);
1193 labels
.create (branch_num
);
1194 unsigned int i
, len
;
1196 /* Collect the existing case labels in a VEC, and preprocess it as if
1197 we are gimplifying a GENERIC SWITCH_EXPR. */
1198 for (i
= 1; i
< branch_num
; i
++)
1199 labels
.quick_push (gimple_switch_label (stmt
, i
));
1200 preprocess_case_label_vec_for_gimple (labels
, index_type
, NULL
);
1202 /* If any labels were removed, replace the existing case labels
1203 in the GIMPLE_SWITCH statement with the correct ones.
1204 Note that the type updates were done in-place on the case labels,
1205 so we only have to replace the case labels in the GIMPLE_SWITCH
1206 if the number of labels changed. */
1207 len
= labels
.length ();
1208 if (len
< branch_num
- 1)
1210 bitmap target_blocks
;
1214 /* Corner case: *all* case labels have been removed as being
1215 out-of-range for INDEX_TYPE. Push one label and let the
1216 CFG cleanups deal with this further. */
1221 label
= CASE_LABEL (gimple_switch_default_label (stmt
));
1222 elt
= build_case_label (build_int_cst (index_type
, 0), NULL
, label
);
1223 labels
.quick_push (elt
);
1227 for (i
= 0; i
< labels
.length (); i
++)
1228 gimple_switch_set_label (stmt
, i
+ 1, labels
[i
]);
1229 for (i
++ ; i
< branch_num
; i
++)
1230 gimple_switch_set_label (stmt
, i
, NULL_TREE
);
1231 gimple_switch_set_num_labels (stmt
, len
+ 1);
1233 /* Cleanup any edges that are now dead. */
1234 target_blocks
= BITMAP_ALLOC (NULL
);
1235 for (i
= 0; i
< gimple_switch_num_labels (stmt
); i
++)
1237 tree elt
= gimple_switch_label (stmt
, i
);
1238 basic_block target
= label_to_block (CASE_LABEL (elt
));
1239 bitmap_set_bit (target_blocks
, target
->index
);
1241 for (ei
= ei_start (gimple_bb (stmt
)->succs
); (e
= ei_safe_edge (ei
)); )
1243 if (! bitmap_bit_p (target_blocks
, e
->dest
->index
))
1247 free_dominance_info (CDI_DOMINATORS
);
1252 BITMAP_FREE (target_blocks
);
1258 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1259 the condition which we may be able to optimize better. */
1262 simplify_gimple_switch (gimple stmt
)
1264 tree cond
= gimple_switch_index (stmt
);
1268 /* The optimization that we really care about is removing unnecessary
1269 casts. That will let us do much better in propagating the inferred
1270 constant at the switch target. */
1271 if (TREE_CODE (cond
) == SSA_NAME
)
1273 def_stmt
= SSA_NAME_DEF_STMT (cond
);
1274 if (is_gimple_assign (def_stmt
))
1276 if (gimple_assign_rhs_code (def_stmt
) == NOP_EXPR
)
1281 def
= gimple_assign_rhs1 (def_stmt
);
1283 to
= TREE_TYPE (cond
);
1284 ti
= TREE_TYPE (def
);
1286 /* If we have an extension that preserves value, then we
1287 can copy the source value into the switch. */
1289 need_precision
= TYPE_PRECISION (ti
);
1291 if (! INTEGRAL_TYPE_P (ti
))
1293 else if (TYPE_UNSIGNED (to
) && !TYPE_UNSIGNED (ti
))
1295 else if (!TYPE_UNSIGNED (to
) && TYPE_UNSIGNED (ti
))
1296 need_precision
+= 1;
1297 if (TYPE_PRECISION (to
) < need_precision
)
1302 gimple_switch_set_index (stmt
, def
);
1303 simplify_gimple_switch_label_vec (stmt
, ti
);
1314 /* For pointers p2 and p1 return p2 - p1 if the
1315 difference is known and constant, otherwise return NULL. */
1318 constant_pointer_difference (tree p1
, tree p2
)
1321 #define CPD_ITERATIONS 5
1322 tree exps
[2][CPD_ITERATIONS
];
1323 tree offs
[2][CPD_ITERATIONS
];
1326 for (i
= 0; i
< 2; i
++)
1328 tree p
= i
? p1
: p2
;
1329 tree off
= size_zero_node
;
1331 enum tree_code code
;
1333 /* For each of p1 and p2 we need to iterate at least
1334 twice, to handle ADDR_EXPR directly in p1/p2,
1335 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1336 on definition's stmt RHS. Iterate a few extra times. */
1340 if (!POINTER_TYPE_P (TREE_TYPE (p
)))
1342 if (TREE_CODE (p
) == ADDR_EXPR
)
1344 tree q
= TREE_OPERAND (p
, 0);
1345 HOST_WIDE_INT offset
;
1346 tree base
= get_addr_base_and_unit_offset (q
, &offset
);
1351 off
= size_binop (PLUS_EXPR
, off
, size_int (offset
));
1353 if (TREE_CODE (q
) == MEM_REF
1354 && TREE_CODE (TREE_OPERAND (q
, 0)) == SSA_NAME
)
1356 p
= TREE_OPERAND (q
, 0);
1357 off
= size_binop (PLUS_EXPR
, off
,
1358 double_int_to_tree (sizetype
,
1359 mem_ref_offset (q
)));
1368 if (TREE_CODE (p
) != SSA_NAME
)
1372 if (j
== CPD_ITERATIONS
)
1374 stmt
= SSA_NAME_DEF_STMT (p
);
1375 if (!is_gimple_assign (stmt
) || gimple_assign_lhs (stmt
) != p
)
1377 code
= gimple_assign_rhs_code (stmt
);
1378 if (code
== POINTER_PLUS_EXPR
)
1380 if (TREE_CODE (gimple_assign_rhs2 (stmt
)) != INTEGER_CST
)
1382 off
= size_binop (PLUS_EXPR
, off
, gimple_assign_rhs2 (stmt
));
1383 p
= gimple_assign_rhs1 (stmt
);
1385 else if (code
== ADDR_EXPR
|| code
== NOP_EXPR
)
1386 p
= gimple_assign_rhs1 (stmt
);
1394 for (i
= 0; i
< cnt
[0]; i
++)
1395 for (j
= 0; j
< cnt
[1]; j
++)
1396 if (exps
[0][i
] == exps
[1][j
])
1397 return size_binop (MINUS_EXPR
, offs
[0][i
], offs
[1][j
]);
1402 /* *GSI_P is a GIMPLE_CALL to a builtin function.
1404 memcpy (p, "abcd", 4);
1405 memset (p + 4, ' ', 3);
1407 memcpy (p, "abcd ", 7);
1408 call if the latter can be stored by pieces during expansion. */
1411 simplify_builtin_call (gimple_stmt_iterator
*gsi_p
, tree callee2
)
1413 gimple stmt1
, stmt2
= gsi_stmt (*gsi_p
);
1414 tree vuse
= gimple_vuse (stmt2
);
1417 stmt1
= SSA_NAME_DEF_STMT (vuse
);
1419 switch (DECL_FUNCTION_CODE (callee2
))
1421 case BUILT_IN_MEMSET
:
1422 if (gimple_call_num_args (stmt2
) != 3
1423 || gimple_call_lhs (stmt2
)
1425 || BITS_PER_UNIT
!= 8)
1430 tree ptr1
, src1
, str1
, off1
, len1
, lhs1
;
1431 tree ptr2
= gimple_call_arg (stmt2
, 0);
1432 tree val2
= gimple_call_arg (stmt2
, 1);
1433 tree len2
= gimple_call_arg (stmt2
, 2);
1434 tree diff
, vdef
, new_str_cst
;
1436 unsigned int ptr1_align
;
1437 unsigned HOST_WIDE_INT src_len
;
1439 use_operand_p use_p
;
1441 if (!host_integerp (val2
, 0)
1442 || !host_integerp (len2
, 1))
1444 if (is_gimple_call (stmt1
))
1446 /* If first stmt is a call, it needs to be memcpy
1447 or mempcpy, with string literal as second argument and
1449 callee1
= gimple_call_fndecl (stmt1
);
1450 if (callee1
== NULL_TREE
1451 || DECL_BUILT_IN_CLASS (callee1
) != BUILT_IN_NORMAL
1452 || gimple_call_num_args (stmt1
) != 3)
1454 if (DECL_FUNCTION_CODE (callee1
) != BUILT_IN_MEMCPY
1455 && DECL_FUNCTION_CODE (callee1
) != BUILT_IN_MEMPCPY
)
1457 ptr1
= gimple_call_arg (stmt1
, 0);
1458 src1
= gimple_call_arg (stmt1
, 1);
1459 len1
= gimple_call_arg (stmt1
, 2);
1460 lhs1
= gimple_call_lhs (stmt1
);
1461 if (!host_integerp (len1
, 1))
1463 str1
= string_constant (src1
, &off1
);
1464 if (str1
== NULL_TREE
)
1466 if (!host_integerp (off1
, 1)
1467 || compare_tree_int (off1
, TREE_STRING_LENGTH (str1
) - 1) > 0
1468 || compare_tree_int (len1
, TREE_STRING_LENGTH (str1
)
1469 - tree_low_cst (off1
, 1)) > 0
1470 || TREE_CODE (TREE_TYPE (str1
)) != ARRAY_TYPE
1471 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1
)))
1472 != TYPE_MODE (char_type_node
))
1475 else if (gimple_assign_single_p (stmt1
))
1477 /* Otherwise look for length 1 memcpy optimized into
1479 ptr1
= gimple_assign_lhs (stmt1
);
1480 src1
= gimple_assign_rhs1 (stmt1
);
1481 if (TREE_CODE (ptr1
) != MEM_REF
1482 || TYPE_MODE (TREE_TYPE (ptr1
)) != TYPE_MODE (char_type_node
)
1483 || !host_integerp (src1
, 0))
1485 ptr1
= build_fold_addr_expr (ptr1
);
1486 callee1
= NULL_TREE
;
1487 len1
= size_one_node
;
1489 off1
= size_zero_node
;
1495 diff
= constant_pointer_difference (ptr1
, ptr2
);
1496 if (diff
== NULL
&& lhs1
!= NULL
)
1498 diff
= constant_pointer_difference (lhs1
, ptr2
);
1499 if (DECL_FUNCTION_CODE (callee1
) == BUILT_IN_MEMPCPY
1501 diff
= size_binop (PLUS_EXPR
, diff
,
1502 fold_convert (sizetype
, len1
));
1504 /* If the difference between the second and first destination pointer
1505 is not constant, or is bigger than memcpy length, bail out. */
1507 || !host_integerp (diff
, 1)
1508 || tree_int_cst_lt (len1
, diff
))
1511 /* Use maximum of difference plus memset length and memcpy length
1512 as the new memcpy length, if it is too big, bail out. */
1513 src_len
= tree_low_cst (diff
, 1);
1514 src_len
+= tree_low_cst (len2
, 1);
1515 if (src_len
< (unsigned HOST_WIDE_INT
) tree_low_cst (len1
, 1))
1516 src_len
= tree_low_cst (len1
, 1);
1520 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1521 with bigger length will return different result. */
1522 if (lhs1
!= NULL_TREE
1523 && DECL_FUNCTION_CODE (callee1
) == BUILT_IN_MEMPCPY
1524 && (TREE_CODE (lhs1
) != SSA_NAME
1525 || !single_imm_use (lhs1
, &use_p
, &use_stmt
)
1526 || use_stmt
!= stmt2
))
1529 /* If anything reads memory in between memcpy and memset
1530 call, the modified memcpy call might change it. */
1531 vdef
= gimple_vdef (stmt1
);
1533 && (!single_imm_use (vdef
, &use_p
, &use_stmt
)
1534 || use_stmt
!= stmt2
))
1537 ptr1_align
= get_pointer_alignment (ptr1
);
1538 /* Construct the new source string literal. */
1539 src_buf
= XALLOCAVEC (char, src_len
+ 1);
1542 TREE_STRING_POINTER (str1
) + tree_low_cst (off1
, 1),
1543 tree_low_cst (len1
, 1));
1545 src_buf
[0] = tree_low_cst (src1
, 0);
1546 memset (src_buf
+ tree_low_cst (diff
, 1),
1547 tree_low_cst (val2
, 0), tree_low_cst (len2
, 1));
1548 src_buf
[src_len
] = '\0';
1549 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1550 handle embedded '\0's. */
1551 if (strlen (src_buf
) != src_len
)
1553 rtl_profile_for_bb (gimple_bb (stmt2
));
1554 /* If the new memcpy wouldn't be emitted by storing the literal
1555 by pieces, this optimization might enlarge .rodata too much,
1556 as commonly used string literals couldn't be shared any
1558 if (!can_store_by_pieces (src_len
,
1559 builtin_strncpy_read_str
,
1560 src_buf
, ptr1_align
, false))
1563 new_str_cst
= build_string_literal (src_len
, src_buf
);
1566 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1568 if (lhs1
&& DECL_FUNCTION_CODE (callee1
) == BUILT_IN_MEMPCPY
)
1569 gimple_call_set_lhs (stmt1
, NULL_TREE
);
1570 gimple_call_set_arg (stmt1
, 1, new_str_cst
);
1571 gimple_call_set_arg (stmt1
, 2,
1572 build_int_cst (TREE_TYPE (len1
), src_len
));
1573 update_stmt (stmt1
);
1574 unlink_stmt_vdef (stmt2
);
1575 gsi_remove (gsi_p
, true);
1576 release_defs (stmt2
);
1577 if (lhs1
&& DECL_FUNCTION_CODE (callee1
) == BUILT_IN_MEMPCPY
)
1578 release_ssa_name (lhs1
);
1583 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1584 assignment, remove STMT1 and change memset call into
1586 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt1
);
1588 if (!is_gimple_val (ptr1
))
1589 ptr1
= force_gimple_operand_gsi (gsi_p
, ptr1
, true, NULL_TREE
,
1590 true, GSI_SAME_STMT
);
1591 gimple_call_set_fndecl (stmt2
,
1592 builtin_decl_explicit (BUILT_IN_MEMCPY
));
1593 gimple_call_set_arg (stmt2
, 0, ptr1
);
1594 gimple_call_set_arg (stmt2
, 1, new_str_cst
);
1595 gimple_call_set_arg (stmt2
, 2,
1596 build_int_cst (TREE_TYPE (len2
), src_len
));
1597 unlink_stmt_vdef (stmt1
);
1598 gsi_remove (&gsi
, true);
1599 release_defs (stmt1
);
1600 update_stmt (stmt2
);
1611 /* Checks if expression has type of one-bit precision, or is a known
1612 truth-valued expression. */
1614 truth_valued_ssa_name (tree name
)
1617 tree type
= TREE_TYPE (name
);
1619 if (!INTEGRAL_TYPE_P (type
))
1621 /* Don't check here for BOOLEAN_TYPE as the precision isn't
1622 necessarily one and so ~X is not equal to !X. */
1623 if (TYPE_PRECISION (type
) == 1)
1625 def
= SSA_NAME_DEF_STMT (name
);
1626 if (is_gimple_assign (def
))
1627 return truth_value_p (gimple_assign_rhs_code (def
));
1631 /* Helper routine for simplify_bitwise_binary_1 function.
1632 Return for the SSA name NAME the expression X if it mets condition
1633 NAME = !X. Otherwise return NULL_TREE.
1634 Detected patterns for NAME = !X are:
1635 !X and X == 0 for X with integral type.
1636 X ^ 1, X != 1,or ~X for X with integral type with precision of one. */
1638 lookup_logical_inverted_value (tree name
)
1641 enum tree_code code
;
1644 /* If name has none-intergal type, or isn't a SSA_NAME, then
1646 if (TREE_CODE (name
) != SSA_NAME
1647 || !INTEGRAL_TYPE_P (TREE_TYPE (name
)))
1649 def
= SSA_NAME_DEF_STMT (name
);
1650 if (!is_gimple_assign (def
))
1653 code
= gimple_assign_rhs_code (def
);
1654 op1
= gimple_assign_rhs1 (def
);
1657 /* Get for EQ_EXPR or BIT_XOR_EXPR operation the second operand.
1658 If CODE isn't an EQ_EXPR, BIT_XOR_EXPR, or BIT_NOT_EXPR, then return. */
1659 if (code
== EQ_EXPR
|| code
== NE_EXPR
1660 || code
== BIT_XOR_EXPR
)
1661 op2
= gimple_assign_rhs2 (def
);
1666 if (truth_valued_ssa_name (name
))
1670 /* Check if we have X == 0 and X has an integral type. */
1671 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1
)))
1673 if (integer_zerop (op2
))
1677 /* Check if we have X != 1 and X is a truth-valued. */
1678 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1
)))
1680 if (integer_onep (op2
) && truth_valued_ssa_name (op1
))
1684 /* Check if we have X ^ 1 and X is truth valued. */
1685 if (integer_onep (op2
) && truth_valued_ssa_name (op1
))
1695 /* Optimize ARG1 CODE ARG2 to a constant for bitwise binary
1696 operations CODE, if one operand has the logically inverted
1697 value of the other. */
1699 simplify_bitwise_binary_1 (enum tree_code code
, tree type
,
1700 tree arg1
, tree arg2
)
1704 /* If CODE isn't a bitwise binary operation, return NULL_TREE. */
1705 if (code
!= BIT_AND_EXPR
&& code
!= BIT_IOR_EXPR
1706 && code
!= BIT_XOR_EXPR
)
1709 /* First check if operands ARG1 and ARG2 are equal. If so
1710 return NULL_TREE as this optimization is handled fold_stmt. */
1713 /* See if we have in arguments logical-not patterns. */
1714 if (((anot
= lookup_logical_inverted_value (arg1
)) == NULL_TREE
1716 && ((anot
= lookup_logical_inverted_value (arg2
)) == NULL_TREE
1721 if (code
== BIT_AND_EXPR
)
1722 return fold_convert (type
, integer_zero_node
);
1723 /* X | !X -> 1 and X ^ !X -> 1, if X is truth-valued. */
1724 if (truth_valued_ssa_name (anot
))
1725 return fold_convert (type
, integer_one_node
);
1727 /* ??? Otherwise result is (X != 0 ? X : 1). not handled. */
1731 /* Given a ssa_name in NAME see if it was defined by an assignment and
1732 set CODE to be the code and ARG1 to the first operand on the rhs and ARG2
1733 to the second operand on the rhs. */
1736 defcodefor_name (tree name
, enum tree_code
*code
, tree
*arg1
, tree
*arg2
)
1739 enum tree_code code1
;
1743 enum gimple_rhs_class grhs_class
;
1745 code1
= TREE_CODE (name
);
1748 grhs_class
= get_gimple_rhs_class (code1
);
1750 if (code1
== SSA_NAME
)
1752 def
= SSA_NAME_DEF_STMT (name
);
1754 if (def
&& is_gimple_assign (def
)
1755 && can_propagate_from (def
))
1757 code1
= gimple_assign_rhs_code (def
);
1758 arg11
= gimple_assign_rhs1 (def
);
1759 arg21
= gimple_assign_rhs2 (def
);
1760 arg31
= gimple_assign_rhs2 (def
);
1763 else if (grhs_class
== GIMPLE_TERNARY_RHS
1764 || GIMPLE_BINARY_RHS
1766 || GIMPLE_SINGLE_RHS
)
1767 extract_ops_from_tree_1 (name
, &code1
, &arg11
, &arg21
, &arg31
);
1773 /* Ignore arg3 currently. */
1776 /* Simplify bitwise binary operations.
1777 Return true if a transformation applied, otherwise return false. */
1780 simplify_bitwise_binary (gimple_stmt_iterator
*gsi
)
1782 gimple stmt
= gsi_stmt (*gsi
);
1783 tree arg1
= gimple_assign_rhs1 (stmt
);
1784 tree arg2
= gimple_assign_rhs2 (stmt
);
1785 enum tree_code code
= gimple_assign_rhs_code (stmt
);
1787 tree def1_arg1
, def1_arg2
, def2_arg1
, def2_arg2
;
1788 enum tree_code def1_code
, def2_code
;
1790 defcodefor_name (arg1
, &def1_code
, &def1_arg1
, &def1_arg2
);
1791 defcodefor_name (arg2
, &def2_code
, &def2_arg1
, &def2_arg2
);
1793 /* Try to fold (type) X op CST -> (type) (X op ((type-x) CST)). */
1794 if (TREE_CODE (arg2
) == INTEGER_CST
1795 && CONVERT_EXPR_CODE_P (def1_code
)
1796 && INTEGRAL_TYPE_P (TREE_TYPE (def1_arg1
))
1797 && int_fits_type_p (arg2
, TREE_TYPE (def1_arg1
)))
1800 tree tem
= make_ssa_name (TREE_TYPE (def1_arg1
), NULL
);
1802 gimple_build_assign_with_ops (code
, tem
, def1_arg1
,
1803 fold_convert_loc (gimple_location (stmt
),
1804 TREE_TYPE (def1_arg1
),
1806 gimple_set_location (newop
, gimple_location (stmt
));
1807 gsi_insert_before (gsi
, newop
, GSI_SAME_STMT
);
1808 gimple_assign_set_rhs_with_ops_1 (gsi
, NOP_EXPR
,
1809 tem
, NULL_TREE
, NULL_TREE
);
1810 update_stmt (gsi_stmt (*gsi
));
1814 /* For bitwise binary operations apply operand conversions to the
1815 binary operation result instead of to the operands. This allows
1816 to combine successive conversions and bitwise binary operations. */
1817 if (CONVERT_EXPR_CODE_P (def1_code
)
1818 && CONVERT_EXPR_CODE_P (def2_code
)
1819 && types_compatible_p (TREE_TYPE (def1_arg1
), TREE_TYPE (def2_arg1
))
1820 /* Make sure that the conversion widens the operands, or has same
1821 precision, or that it changes the operation to a bitfield
1823 && ((TYPE_PRECISION (TREE_TYPE (def1_arg1
))
1824 <= TYPE_PRECISION (TREE_TYPE (arg1
)))
1825 || (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (arg1
)))
1827 || (TYPE_PRECISION (TREE_TYPE (arg1
))
1828 != GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg1
))))))
1831 tree tem
= make_ssa_name (TREE_TYPE (def1_arg1
), NULL
);
1832 newop
= gimple_build_assign_with_ops (code
, tem
, def1_arg1
, def2_arg1
);
1833 gimple_set_location (newop
, gimple_location (stmt
));
1834 gsi_insert_before (gsi
, newop
, GSI_SAME_STMT
);
1835 gimple_assign_set_rhs_with_ops_1 (gsi
, NOP_EXPR
,
1836 tem
, NULL_TREE
, NULL_TREE
);
1837 update_stmt (gsi_stmt (*gsi
));
1842 /* Simplify (A & B) OP0 (C & B) to (A OP0 C) & B. */
1843 if (def1_code
== def2_code
1844 && def1_code
== BIT_AND_EXPR
1845 && operand_equal_for_phi_arg_p (def1_arg2
,
1851 tree inner
= fold_build2 (code
, TREE_TYPE (arg2
), a
, c
);
1852 /* If A OP0 C (this usually means C is the same as A) is 0
1853 then fold it down correctly. */
1854 if (integer_zerop (inner
))
1856 gimple_assign_set_rhs_from_tree (gsi
, inner
);
1860 /* If A OP0 C (this usually means C is the same as A) is a ssa_name
1861 then fold it down correctly. */
1862 else if (TREE_CODE (inner
) == SSA_NAME
)
1864 tree outer
= fold_build2 (def1_code
, TREE_TYPE (inner
),
1866 gimple_assign_set_rhs_from_tree (gsi
, outer
);
1874 tem
= make_ssa_name (TREE_TYPE (arg2
), NULL
);
1875 newop
= gimple_build_assign_with_ops (code
, tem
, a
, c
);
1876 gimple_set_location (newop
, gimple_location (stmt
));
1877 /* Make sure to re-process the new stmt as it's walking upwards. */
1878 gsi_insert_before (gsi
, newop
, GSI_NEW_STMT
);
1879 gimple_assign_set_rhs1 (stmt
, tem
);
1880 gimple_assign_set_rhs2 (stmt
, b
);
1881 gimple_assign_set_rhs_code (stmt
, def1_code
);
1887 /* (a | CST1) & CST2 -> (a & CST2) | (CST1 & CST2). */
1888 if (code
== BIT_AND_EXPR
1889 && def1_code
== BIT_IOR_EXPR
1890 && TREE_CODE (arg2
) == INTEGER_CST
1891 && TREE_CODE (def1_arg2
) == INTEGER_CST
)
1893 tree cst
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg2
),
1897 if (integer_zerop (cst
))
1899 gimple_assign_set_rhs1 (stmt
, def1_arg1
);
1903 tem
= make_ssa_name (TREE_TYPE (arg2
), NULL
);
1904 newop
= gimple_build_assign_with_ops (BIT_AND_EXPR
,
1905 tem
, def1_arg1
, arg2
);
1906 gimple_set_location (newop
, gimple_location (stmt
));
1907 /* Make sure to re-process the new stmt as it's walking upwards. */
1908 gsi_insert_before (gsi
, newop
, GSI_NEW_STMT
);
1909 gimple_assign_set_rhs1 (stmt
, tem
);
1910 gimple_assign_set_rhs2 (stmt
, cst
);
1911 gimple_assign_set_rhs_code (stmt
, BIT_IOR_EXPR
);
1916 /* Combine successive equal operations with constants. */
1917 if ((code
== BIT_AND_EXPR
1918 || code
== BIT_IOR_EXPR
1919 || code
== BIT_XOR_EXPR
)
1920 && def1_code
== code
1921 && TREE_CODE (arg2
) == INTEGER_CST
1922 && TREE_CODE (def1_arg2
) == INTEGER_CST
)
1924 tree cst
= fold_build2 (code
, TREE_TYPE (arg2
),
1926 gimple_assign_set_rhs1 (stmt
, def1_arg1
);
1927 gimple_assign_set_rhs2 (stmt
, cst
);
1932 /* Canonicalize X ^ ~0 to ~X. */
1933 if (code
== BIT_XOR_EXPR
1934 && TREE_CODE (arg2
) == INTEGER_CST
1935 && integer_all_onesp (arg2
))
1937 gimple_assign_set_rhs_with_ops (gsi
, BIT_NOT_EXPR
, arg1
, NULL_TREE
);
1938 gcc_assert (gsi_stmt (*gsi
) == stmt
);
1943 /* Try simple folding for X op !X, and X op X. */
1944 res
= simplify_bitwise_binary_1 (code
, TREE_TYPE (arg1
), arg1
, arg2
);
1945 if (res
!= NULL_TREE
)
1947 gimple_assign_set_rhs_from_tree (gsi
, res
);
1948 update_stmt (gsi_stmt (*gsi
));
1952 if (code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
)
1954 enum tree_code ocode
= code
== BIT_AND_EXPR
? BIT_IOR_EXPR
: BIT_AND_EXPR
;
1955 if (def1_code
== ocode
)
1958 enum tree_code coden
;
1960 /* ( X | Y) & X -> X */
1961 /* ( X & Y) | X -> X */
1965 gimple_assign_set_rhs_from_tree (gsi
, x
);
1966 update_stmt (gsi_stmt (*gsi
));
1970 defcodefor_name (def1_arg1
, &coden
, &a1
, &a2
);
1971 /* (~X | Y) & X -> X & Y */
1972 /* (~X & Y) | X -> X | Y */
1973 if (coden
== BIT_NOT_EXPR
&& a1
== x
)
1975 gimple_assign_set_rhs_with_ops (gsi
, code
,
1977 gcc_assert (gsi_stmt (*gsi
) == stmt
);
1981 defcodefor_name (def1_arg2
, &coden
, &a1
, &a2
);
1982 /* (Y | ~X) & X -> X & Y */
1983 /* (Y & ~X) | X -> X | Y */
1984 if (coden
== BIT_NOT_EXPR
&& a1
== x
)
1986 gimple_assign_set_rhs_with_ops (gsi
, code
,
1988 gcc_assert (gsi_stmt (*gsi
) == stmt
);
1993 if (def2_code
== ocode
)
1995 enum tree_code coden
;
1998 /* X & ( X | Y) -> X */
1999 /* X | ( X & Y) -> X */
2003 gimple_assign_set_rhs_from_tree (gsi
, x
);
2004 update_stmt (gsi_stmt (*gsi
));
2007 defcodefor_name (def2_arg1
, &coden
, &a1
, NULL
);
2008 /* (~X | Y) & X -> X & Y */
2009 /* (~X & Y) | X -> X | Y */
2010 if (coden
== BIT_NOT_EXPR
&& a1
== x
)
2012 gimple_assign_set_rhs_with_ops (gsi
, code
,
2014 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2018 defcodefor_name (def2_arg2
, &coden
, &a1
, NULL
);
2019 /* (Y | ~X) & X -> X & Y */
2020 /* (Y & ~X) | X -> X | Y */
2021 if (coden
== BIT_NOT_EXPR
&& a1
== x
)
2023 gimple_assign_set_rhs_with_ops (gsi
, code
,
2025 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2036 /* Perform re-associations of the plus or minus statement STMT that are
2037 always permitted. Returns true if the CFG was changed. */
2040 associate_plusminus (gimple_stmt_iterator
*gsi
)
2042 gimple stmt
= gsi_stmt (*gsi
);
2043 tree rhs1
= gimple_assign_rhs1 (stmt
);
2044 tree rhs2
= gimple_assign_rhs2 (stmt
);
2045 enum tree_code code
= gimple_assign_rhs_code (stmt
);
2048 /* We can't reassociate at all for saturating types. */
2049 if (TYPE_SATURATING (TREE_TYPE (rhs1
)))
2052 /* First contract negates. */
2057 /* A +- (-B) -> A -+ B. */
2058 if (TREE_CODE (rhs2
) == SSA_NAME
)
2060 gimple def_stmt
= SSA_NAME_DEF_STMT (rhs2
);
2061 if (is_gimple_assign (def_stmt
)
2062 && gimple_assign_rhs_code (def_stmt
) == NEGATE_EXPR
2063 && can_propagate_from (def_stmt
))
2065 code
= (code
== MINUS_EXPR
) ? PLUS_EXPR
: MINUS_EXPR
;
2066 gimple_assign_set_rhs_code (stmt
, code
);
2067 rhs2
= gimple_assign_rhs1 (def_stmt
);
2068 gimple_assign_set_rhs2 (stmt
, rhs2
);
2069 gimple_set_modified (stmt
, true);
2074 /* (-A) + B -> B - A. */
2075 if (TREE_CODE (rhs1
) == SSA_NAME
2076 && code
== PLUS_EXPR
)
2078 gimple def_stmt
= SSA_NAME_DEF_STMT (rhs1
);
2079 if (is_gimple_assign (def_stmt
)
2080 && gimple_assign_rhs_code (def_stmt
) == NEGATE_EXPR
2081 && can_propagate_from (def_stmt
))
2084 gimple_assign_set_rhs_code (stmt
, code
);
2086 gimple_assign_set_rhs1 (stmt
, rhs1
);
2087 rhs2
= gimple_assign_rhs1 (def_stmt
);
2088 gimple_assign_set_rhs2 (stmt
, rhs2
);
2089 gimple_set_modified (stmt
, true);
2096 /* We can't reassociate floating-point or fixed-point plus or minus
2097 because of saturation to +-Inf. */
2098 if (FLOAT_TYPE_P (TREE_TYPE (rhs1
))
2099 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1
)))
2102 /* Second match patterns that allow contracting a plus-minus pair
2103 irrespective of overflow issues.
2105 (A +- B) - A -> +- B
2107 (CST +- A) +- CST -> CST +- A
2108 (A + CST) +- CST -> A + CST
2111 A - (A +- B) -> -+ B
2112 A +- (B +- A) -> +- B
2113 CST +- (CST +- A) -> CST +- A
2114 CST +- (A +- CST) -> CST +- A
2117 via commutating the addition and contracting operations to zero
2118 by reassociation. */
2120 if (TREE_CODE (rhs1
) == SSA_NAME
)
2122 gimple def_stmt
= SSA_NAME_DEF_STMT (rhs1
);
2123 if (is_gimple_assign (def_stmt
) && can_propagate_from (def_stmt
))
2125 enum tree_code def_code
= gimple_assign_rhs_code (def_stmt
);
2126 if (def_code
== PLUS_EXPR
2127 || def_code
== MINUS_EXPR
)
2129 tree def_rhs1
= gimple_assign_rhs1 (def_stmt
);
2130 tree def_rhs2
= gimple_assign_rhs2 (def_stmt
);
2131 if (operand_equal_p (def_rhs1
, rhs2
, 0)
2132 && code
== MINUS_EXPR
)
2134 /* (A +- B) - A -> +- B. */
2135 code
= ((def_code
== PLUS_EXPR
)
2136 ? TREE_CODE (def_rhs2
) : NEGATE_EXPR
);
2139 gimple_assign_set_rhs_with_ops (gsi
, code
, rhs1
, NULL_TREE
);
2140 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2141 gimple_set_modified (stmt
, true);
2143 else if (operand_equal_p (def_rhs2
, rhs2
, 0)
2144 && code
!= def_code
)
2146 /* (A +- B) -+ B -> A. */
2147 code
= TREE_CODE (def_rhs1
);
2150 gimple_assign_set_rhs_with_ops (gsi
, code
, rhs1
, NULL_TREE
);
2151 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2152 gimple_set_modified (stmt
, true);
2154 else if (TREE_CODE (rhs2
) == INTEGER_CST
2155 && TREE_CODE (def_rhs1
) == INTEGER_CST
)
2157 /* (CST +- A) +- CST -> CST +- A. */
2158 tree cst
= fold_binary (code
, TREE_TYPE (rhs1
),
2160 if (cst
&& !TREE_OVERFLOW (cst
))
2163 gimple_assign_set_rhs_code (stmt
, code
);
2165 gimple_assign_set_rhs1 (stmt
, rhs1
);
2167 gimple_assign_set_rhs2 (stmt
, rhs2
);
2168 gimple_set_modified (stmt
, true);
2171 else if (TREE_CODE (rhs2
) == INTEGER_CST
2172 && TREE_CODE (def_rhs2
) == INTEGER_CST
2173 && def_code
== PLUS_EXPR
)
2175 /* (A + CST) +- CST -> A + CST. */
2176 tree cst
= fold_binary (code
, TREE_TYPE (rhs1
),
2178 if (cst
&& !TREE_OVERFLOW (cst
))
2181 gimple_assign_set_rhs_code (stmt
, code
);
2183 gimple_assign_set_rhs1 (stmt
, rhs1
);
2185 gimple_assign_set_rhs2 (stmt
, rhs2
);
2186 gimple_set_modified (stmt
, true);
2190 else if (def_code
== BIT_NOT_EXPR
2191 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1
)))
2193 tree def_rhs1
= gimple_assign_rhs1 (def_stmt
);
2194 if (code
== PLUS_EXPR
2195 && operand_equal_p (def_rhs1
, rhs2
, 0))
2199 rhs1
= build_int_cst_type (TREE_TYPE (rhs2
), -1);
2201 gimple_assign_set_rhs_with_ops (gsi
, code
, rhs1
, NULL_TREE
);
2202 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2203 gimple_set_modified (stmt
, true);
2205 else if (code
== PLUS_EXPR
2206 && integer_onep (rhs1
))
2212 gimple_assign_set_rhs_with_ops (gsi
, code
, rhs1
, NULL_TREE
);
2213 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2214 gimple_set_modified (stmt
, true);
2220 if (rhs2
&& TREE_CODE (rhs2
) == SSA_NAME
)
2222 gimple def_stmt
= SSA_NAME_DEF_STMT (rhs2
);
2223 if (is_gimple_assign (def_stmt
) && can_propagate_from (def_stmt
))
2225 enum tree_code def_code
= gimple_assign_rhs_code (def_stmt
);
2226 if (def_code
== PLUS_EXPR
2227 || def_code
== MINUS_EXPR
)
2229 tree def_rhs1
= gimple_assign_rhs1 (def_stmt
);
2230 tree def_rhs2
= gimple_assign_rhs2 (def_stmt
);
2231 if (operand_equal_p (def_rhs1
, rhs1
, 0)
2232 && code
== MINUS_EXPR
)
2234 /* A - (A +- B) -> -+ B. */
2235 code
= ((def_code
== PLUS_EXPR
)
2236 ? NEGATE_EXPR
: TREE_CODE (def_rhs2
));
2239 gimple_assign_set_rhs_with_ops (gsi
, code
, rhs1
, NULL_TREE
);
2240 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2241 gimple_set_modified (stmt
, true);
2243 else if (operand_equal_p (def_rhs2
, rhs1
, 0)
2244 && code
!= def_code
)
2246 /* A +- (B +- A) -> +- B. */
2247 code
= ((code
== PLUS_EXPR
)
2248 ? TREE_CODE (def_rhs1
) : NEGATE_EXPR
);
2251 gimple_assign_set_rhs_with_ops (gsi
, code
, rhs1
, NULL_TREE
);
2252 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2253 gimple_set_modified (stmt
, true);
2255 else if (TREE_CODE (rhs1
) == INTEGER_CST
2256 && TREE_CODE (def_rhs1
) == INTEGER_CST
)
2258 /* CST +- (CST +- A) -> CST +- A. */
2259 tree cst
= fold_binary (code
, TREE_TYPE (rhs2
),
2261 if (cst
&& !TREE_OVERFLOW (cst
))
2263 code
= (code
== def_code
? PLUS_EXPR
: MINUS_EXPR
);
2264 gimple_assign_set_rhs_code (stmt
, code
);
2266 gimple_assign_set_rhs1 (stmt
, rhs1
);
2268 gimple_assign_set_rhs2 (stmt
, rhs2
);
2269 gimple_set_modified (stmt
, true);
2272 else if (TREE_CODE (rhs1
) == INTEGER_CST
2273 && TREE_CODE (def_rhs2
) == INTEGER_CST
)
2275 /* CST +- (A +- CST) -> CST +- A. */
2276 tree cst
= fold_binary (def_code
== code
2277 ? PLUS_EXPR
: MINUS_EXPR
,
2280 if (cst
&& !TREE_OVERFLOW (cst
))
2283 gimple_assign_set_rhs1 (stmt
, rhs1
);
2285 gimple_assign_set_rhs2 (stmt
, rhs2
);
2286 gimple_set_modified (stmt
, true);
2290 else if (def_code
== BIT_NOT_EXPR
2291 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2
)))
2293 tree def_rhs1
= gimple_assign_rhs1 (def_stmt
);
2294 if (code
== PLUS_EXPR
2295 && operand_equal_p (def_rhs1
, rhs1
, 0))
2299 rhs1
= build_int_cst_type (TREE_TYPE (rhs1
), -1);
2301 gimple_assign_set_rhs_with_ops (gsi
, code
, rhs1
, NULL_TREE
);
2302 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2303 gimple_set_modified (stmt
, true);
2310 if (gimple_modified_p (stmt
))
2312 fold_stmt_inplace (gsi
);
2314 if (maybe_clean_or_replace_eh_stmt (stmt
, stmt
)
2315 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
2322 /* Associate operands of a POINTER_PLUS_EXPR assignmen at *GSI. Returns
2323 true if anything changed, false otherwise. */
2326 associate_pointerplus (gimple_stmt_iterator
*gsi
)
2328 gimple stmt
= gsi_stmt (*gsi
);
2330 tree ptr
, rhs
, algn
;
2333 tem = (sizetype) ptr;
2337 and produce the simpler and easier to analyze with respect to alignment
2338 ... = ptr & ~algn; */
2339 ptr
= gimple_assign_rhs1 (stmt
);
2340 rhs
= gimple_assign_rhs2 (stmt
);
2341 if (TREE_CODE (rhs
) != SSA_NAME
)
2343 def_stmt
= SSA_NAME_DEF_STMT (rhs
);
2344 if (!is_gimple_assign (def_stmt
)
2345 || gimple_assign_rhs_code (def_stmt
) != NEGATE_EXPR
)
2347 rhs
= gimple_assign_rhs1 (def_stmt
);
2348 if (TREE_CODE (rhs
) != SSA_NAME
)
2350 def_stmt
= SSA_NAME_DEF_STMT (rhs
);
2351 if (!is_gimple_assign (def_stmt
)
2352 || gimple_assign_rhs_code (def_stmt
) != BIT_AND_EXPR
)
2354 rhs
= gimple_assign_rhs1 (def_stmt
);
2355 algn
= gimple_assign_rhs2 (def_stmt
);
2356 if (TREE_CODE (rhs
) != SSA_NAME
2357 || TREE_CODE (algn
) != INTEGER_CST
)
2359 def_stmt
= SSA_NAME_DEF_STMT (rhs
);
2360 if (!is_gimple_assign (def_stmt
)
2361 || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt
)))
2363 if (gimple_assign_rhs1 (def_stmt
) != ptr
)
2366 algn
= double_int_to_tree (TREE_TYPE (ptr
), ~tree_to_double_int (algn
));
2367 gimple_assign_set_rhs_with_ops (gsi
, BIT_AND_EXPR
, ptr
, algn
);
2368 fold_stmt_inplace (gsi
);
2374 /* Combine two conversions in a row for the second conversion at *GSI.
2375 Returns 1 if there were any changes made, 2 if cfg-cleanup needs to
2376 run. Else it returns 0. */
2379 combine_conversions (gimple_stmt_iterator
*gsi
)
2381 gimple stmt
= gsi_stmt (*gsi
);
2384 enum tree_code code
= gimple_assign_rhs_code (stmt
);
2385 enum tree_code code2
;
2387 gcc_checking_assert (CONVERT_EXPR_CODE_P (code
)
2388 || code
== FLOAT_EXPR
2389 || code
== FIX_TRUNC_EXPR
);
2391 lhs
= gimple_assign_lhs (stmt
);
2392 op0
= gimple_assign_rhs1 (stmt
);
2393 if (useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (op0
)))
2395 gimple_assign_set_rhs_code (stmt
, TREE_CODE (op0
));
2399 if (TREE_CODE (op0
) != SSA_NAME
)
2402 def_stmt
= SSA_NAME_DEF_STMT (op0
);
2403 if (!is_gimple_assign (def_stmt
))
2406 code2
= gimple_assign_rhs_code (def_stmt
);
2408 if (CONVERT_EXPR_CODE_P (code2
) || code2
== FLOAT_EXPR
)
2410 tree defop0
= gimple_assign_rhs1 (def_stmt
);
2411 tree type
= TREE_TYPE (lhs
);
2412 tree inside_type
= TREE_TYPE (defop0
);
2413 tree inter_type
= TREE_TYPE (op0
);
2414 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
2415 int inside_ptr
= POINTER_TYPE_P (inside_type
);
2416 int inside_float
= FLOAT_TYPE_P (inside_type
);
2417 int inside_vec
= TREE_CODE (inside_type
) == VECTOR_TYPE
;
2418 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
2419 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
2420 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
2421 int inter_ptr
= POINTER_TYPE_P (inter_type
);
2422 int inter_float
= FLOAT_TYPE_P (inter_type
);
2423 int inter_vec
= TREE_CODE (inter_type
) == VECTOR_TYPE
;
2424 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
2425 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
2426 int final_int
= INTEGRAL_TYPE_P (type
);
2427 int final_ptr
= POINTER_TYPE_P (type
);
2428 int final_float
= FLOAT_TYPE_P (type
);
2429 int final_vec
= TREE_CODE (type
) == VECTOR_TYPE
;
2430 unsigned int final_prec
= TYPE_PRECISION (type
);
2431 int final_unsignedp
= TYPE_UNSIGNED (type
);
2433 /* Don't propagate ssa names that occur in abnormal phis. */
2434 if (TREE_CODE (defop0
) == SSA_NAME
2435 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (defop0
))
2438 /* In addition to the cases of two conversions in a row
2439 handled below, if we are converting something to its own
2440 type via an object of identical or wider precision, neither
2441 conversion is needed. */
2442 if (useless_type_conversion_p (type
, inside_type
)
2443 && (((inter_int
|| inter_ptr
) && final_int
)
2444 || (inter_float
&& final_float
))
2445 && inter_prec
>= final_prec
)
2447 gimple_assign_set_rhs1 (stmt
, unshare_expr (defop0
));
2448 gimple_assign_set_rhs_code (stmt
, TREE_CODE (defop0
));
2450 return remove_prop_source_from_use (op0
) ? 2 : 1;
2453 /* Likewise, if the intermediate and initial types are either both
2454 float or both integer, we don't need the middle conversion if the
2455 former is wider than the latter and doesn't change the signedness
2456 (for integers). Avoid this if the final type is a pointer since
2457 then we sometimes need the middle conversion. Likewise if the
2458 final type has a precision not equal to the size of its mode. */
2459 if (((inter_int
&& inside_int
)
2460 || (inter_float
&& inside_float
)
2461 || (inter_vec
&& inside_vec
))
2462 && inter_prec
>= inside_prec
2463 && (inter_float
|| inter_vec
2464 || inter_unsignedp
== inside_unsignedp
)
2465 && ! (final_prec
!= GET_MODE_PRECISION (TYPE_MODE (type
))
2466 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
2468 && (! final_vec
|| inter_prec
== inside_prec
))
2470 gimple_assign_set_rhs1 (stmt
, defop0
);
2472 return remove_prop_source_from_use (op0
) ? 2 : 1;
2475 /* If we have a sign-extension of a zero-extended value, we can
2476 replace that by a single zero-extension. Likewise if the
2477 final conversion does not change precision we can drop the
2478 intermediate conversion. */
2479 if (inside_int
&& inter_int
&& final_int
2480 && ((inside_prec
< inter_prec
&& inter_prec
< final_prec
2481 && inside_unsignedp
&& !inter_unsignedp
)
2482 || final_prec
== inter_prec
))
2484 gimple_assign_set_rhs1 (stmt
, defop0
);
2486 return remove_prop_source_from_use (op0
) ? 2 : 1;
2489 /* Two conversions in a row are not needed unless:
2490 - some conversion is floating-point (overstrict for now), or
2491 - some conversion is a vector (overstrict for now), or
2492 - the intermediate type is narrower than both initial and
2494 - the intermediate type and innermost type differ in signedness,
2495 and the outermost type is wider than the intermediate, or
2496 - the initial type is a pointer type and the precisions of the
2497 intermediate and final types differ, or
2498 - the final type is a pointer type and the precisions of the
2499 initial and intermediate types differ. */
2500 if (! inside_float
&& ! inter_float
&& ! final_float
2501 && ! inside_vec
&& ! inter_vec
&& ! final_vec
2502 && (inter_prec
>= inside_prec
|| inter_prec
>= final_prec
)
2503 && ! (inside_int
&& inter_int
2504 && inter_unsignedp
!= inside_unsignedp
2505 && inter_prec
< final_prec
)
2506 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
2507 == (final_unsignedp
&& final_prec
> inter_prec
))
2508 && ! (inside_ptr
&& inter_prec
!= final_prec
)
2509 && ! (final_ptr
&& inside_prec
!= inter_prec
)
2510 && ! (final_prec
!= GET_MODE_PRECISION (TYPE_MODE (type
))
2511 && TYPE_MODE (type
) == TYPE_MODE (inter_type
)))
2513 gimple_assign_set_rhs1 (stmt
, defop0
);
2515 return remove_prop_source_from_use (op0
) ? 2 : 1;
2518 /* A truncation to an unsigned type should be canonicalized as
2519 bitwise and of a mask. */
2520 if (final_int
&& inter_int
&& inside_int
2521 && final_prec
== inside_prec
2522 && final_prec
> inter_prec
2526 tem
= fold_build2 (BIT_AND_EXPR
, inside_type
,
2529 (inside_type
, double_int::mask (inter_prec
)));
2530 if (!useless_type_conversion_p (type
, inside_type
))
2532 tem
= force_gimple_operand_gsi (gsi
, tem
, true, NULL_TREE
, true,
2534 gimple_assign_set_rhs1 (stmt
, tem
);
2537 gimple_assign_set_rhs_from_tree (gsi
, tem
);
2538 update_stmt (gsi_stmt (*gsi
));
2542 /* If we are converting an integer to a floating-point that can
2543 represent it exactly and back to an integer, we can skip the
2544 floating-point conversion. */
2545 if (inside_int
&& inter_float
&& final_int
&&
2546 (unsigned) significand_size (TYPE_MODE (inter_type
))
2547 >= inside_prec
- !inside_unsignedp
)
2549 if (useless_type_conversion_p (type
, inside_type
))
2551 gimple_assign_set_rhs1 (stmt
, unshare_expr (defop0
));
2552 gimple_assign_set_rhs_code (stmt
, TREE_CODE (defop0
));
2554 return remove_prop_source_from_use (op0
) ? 2 : 1;
2558 gimple_assign_set_rhs1 (stmt
, defop0
);
2559 gimple_assign_set_rhs_code (stmt
, CONVERT_EXPR
);
2561 return remove_prop_source_from_use (op0
) ? 2 : 1;
2569 /* Combine an element access with a shuffle. Returns true if there were
2570 any changes made, else it returns false. */
2573 simplify_bitfield_ref (gimple_stmt_iterator
*gsi
)
2575 gimple stmt
= gsi_stmt (*gsi
);
2577 tree op
, op0
, op1
, op2
;
2579 unsigned idx
, n
, size
;
2580 enum tree_code code
;
2582 op
= gimple_assign_rhs1 (stmt
);
2583 gcc_checking_assert (TREE_CODE (op
) == BIT_FIELD_REF
);
2585 op0
= TREE_OPERAND (op
, 0);
2586 if (TREE_CODE (op0
) != SSA_NAME
2587 || TREE_CODE (TREE_TYPE (op0
)) != VECTOR_TYPE
)
2590 def_stmt
= get_prop_source_stmt (op0
, false, NULL
);
2591 if (!def_stmt
|| !can_propagate_from (def_stmt
))
2594 op1
= TREE_OPERAND (op
, 1);
2595 op2
= TREE_OPERAND (op
, 2);
2596 code
= gimple_assign_rhs_code (def_stmt
);
2598 if (code
== CONSTRUCTOR
)
2600 tree tem
= fold_ternary (BIT_FIELD_REF
, TREE_TYPE (op
),
2601 gimple_assign_rhs1 (def_stmt
), op1
, op2
);
2602 if (!tem
|| !valid_gimple_rhs_p (tem
))
2604 gimple_assign_set_rhs_from_tree (gsi
, tem
);
2605 update_stmt (gsi_stmt (*gsi
));
2609 elem_type
= TREE_TYPE (TREE_TYPE (op0
));
2610 if (TREE_TYPE (op
) != elem_type
)
2613 size
= TREE_INT_CST_LOW (TYPE_SIZE (elem_type
));
2614 n
= TREE_INT_CST_LOW (op1
) / size
;
2617 idx
= TREE_INT_CST_LOW (op2
) / size
;
2619 if (code
== VEC_PERM_EXPR
)
2621 tree p
, m
, index
, tem
;
2623 m
= gimple_assign_rhs3 (def_stmt
);
2624 if (TREE_CODE (m
) != VECTOR_CST
)
2626 nelts
= VECTOR_CST_NELTS (m
);
2627 idx
= TREE_INT_CST_LOW (VECTOR_CST_ELT (m
, idx
));
2631 p
= gimple_assign_rhs1 (def_stmt
);
2635 p
= gimple_assign_rhs2 (def_stmt
);
2638 index
= build_int_cst (TREE_TYPE (TREE_TYPE (m
)), idx
* size
);
2639 tem
= build3 (BIT_FIELD_REF
, TREE_TYPE (op
),
2640 unshare_expr (p
), op1
, index
);
2641 gimple_assign_set_rhs1 (stmt
, tem
);
2643 update_stmt (gsi_stmt (*gsi
));
2650 /* Determine whether applying the 2 permutations (mask1 then mask2)
2651 gives back one of the input. */
2654 is_combined_permutation_identity (tree mask1
, tree mask2
)
2657 unsigned int nelts
, i
, j
;
2658 bool maybe_identity1
= true;
2659 bool maybe_identity2
= true;
2661 gcc_checking_assert (TREE_CODE (mask1
) == VECTOR_CST
2662 && TREE_CODE (mask2
) == VECTOR_CST
);
2663 mask
= fold_ternary (VEC_PERM_EXPR
, TREE_TYPE (mask1
), mask1
, mask1
, mask2
);
2664 gcc_assert (TREE_CODE (mask
) == VECTOR_CST
);
2666 nelts
= VECTOR_CST_NELTS (mask
);
2667 for (i
= 0; i
< nelts
; i
++)
2669 tree val
= VECTOR_CST_ELT (mask
, i
);
2670 gcc_assert (TREE_CODE (val
) == INTEGER_CST
);
2671 j
= TREE_INT_CST_LOW (val
) & (2 * nelts
- 1);
2673 maybe_identity2
= false;
2674 else if (j
== i
+ nelts
)
2675 maybe_identity1
= false;
2679 return maybe_identity1
? 1 : maybe_identity2
? 2 : 0;
2682 /* Combine a shuffle with its arguments. Returns 1 if there were any
2683 changes made, 2 if cfg-cleanup needs to run. Else it returns 0. */
2686 simplify_permutation (gimple_stmt_iterator
*gsi
)
2688 gimple stmt
= gsi_stmt (*gsi
);
2690 tree op0
, op1
, op2
, op3
, arg0
, arg1
;
2691 enum tree_code code
;
2692 bool single_use_op0
= false;
2694 gcc_checking_assert (gimple_assign_rhs_code (stmt
) == VEC_PERM_EXPR
);
2696 op0
= gimple_assign_rhs1 (stmt
);
2697 op1
= gimple_assign_rhs2 (stmt
);
2698 op2
= gimple_assign_rhs3 (stmt
);
2700 if (TREE_CODE (op2
) != VECTOR_CST
)
2703 if (TREE_CODE (op0
) == VECTOR_CST
)
2708 else if (TREE_CODE (op0
) == SSA_NAME
)
2710 def_stmt
= get_prop_source_stmt (op0
, false, &single_use_op0
);
2711 if (!def_stmt
|| !can_propagate_from (def_stmt
))
2714 code
= gimple_assign_rhs_code (def_stmt
);
2715 arg0
= gimple_assign_rhs1 (def_stmt
);
2720 /* Two consecutive shuffles. */
2721 if (code
== VEC_PERM_EXPR
)
2728 op3
= gimple_assign_rhs3 (def_stmt
);
2729 if (TREE_CODE (op3
) != VECTOR_CST
)
2731 ident
= is_combined_permutation_identity (op3
, op2
);
2734 orig
= (ident
== 1) ? gimple_assign_rhs1 (def_stmt
)
2735 : gimple_assign_rhs2 (def_stmt
);
2736 gimple_assign_set_rhs1 (stmt
, unshare_expr (orig
));
2737 gimple_assign_set_rhs_code (stmt
, TREE_CODE (orig
));
2738 gimple_set_num_ops (stmt
, 2);
2740 return remove_prop_source_from_use (op0
) ? 2 : 1;
2743 /* Shuffle of a constructor. */
2744 else if (code
== CONSTRUCTOR
|| code
== VECTOR_CST
)
2750 if (TREE_CODE (op0
) == SSA_NAME
&& !single_use_op0
)
2753 if (TREE_CODE (op1
) == VECTOR_CST
)
2755 else if (TREE_CODE (op1
) == SSA_NAME
)
2757 enum tree_code code2
;
2759 gimple def_stmt2
= get_prop_source_stmt (op1
, true, NULL
);
2760 if (!def_stmt2
|| !can_propagate_from (def_stmt2
))
2763 code2
= gimple_assign_rhs_code (def_stmt2
);
2764 if (code2
!= CONSTRUCTOR
&& code2
!= VECTOR_CST
)
2766 arg1
= gimple_assign_rhs1 (def_stmt2
);
2773 /* Already used twice in this statement. */
2774 if (TREE_CODE (op0
) == SSA_NAME
&& num_imm_uses (op0
) > 2)
2778 opt
= fold_ternary (VEC_PERM_EXPR
, TREE_TYPE(op0
), arg0
, arg1
, op2
);
2780 || (TREE_CODE (opt
) != CONSTRUCTOR
&& TREE_CODE(opt
) != VECTOR_CST
))
2782 gimple_assign_set_rhs_from_tree (gsi
, opt
);
2783 update_stmt (gsi_stmt (*gsi
));
2784 if (TREE_CODE (op0
) == SSA_NAME
)
2785 ret
= remove_prop_source_from_use (op0
);
2786 if (op0
!= op1
&& TREE_CODE (op1
) == SSA_NAME
)
2787 ret
|= remove_prop_source_from_use (op1
);
2794 /* Recognize a VEC_PERM_EXPR. Returns true if there were any changes. */
2797 simplify_vector_constructor (gimple_stmt_iterator
*gsi
)
2799 gimple stmt
= gsi_stmt (*gsi
);
2801 tree op
, op2
, orig
, type
, elem_type
;
2802 unsigned elem_size
, nelts
, i
;
2803 enum tree_code code
;
2804 constructor_elt
*elt
;
2808 gcc_checking_assert (gimple_assign_rhs_code (stmt
) == CONSTRUCTOR
);
2810 op
= gimple_assign_rhs1 (stmt
);
2811 type
= TREE_TYPE (op
);
2812 gcc_checking_assert (TREE_CODE (type
) == VECTOR_TYPE
);
2814 nelts
= TYPE_VECTOR_SUBPARTS (type
);
2815 elem_type
= TREE_TYPE (type
);
2816 elem_size
= TREE_INT_CST_LOW (TYPE_SIZE (elem_type
));
2818 sel
= XALLOCAVEC (unsigned char, nelts
);
2821 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (op
), i
, elt
)
2828 if (TREE_CODE (elt
->value
) != SSA_NAME
)
2830 def_stmt
= get_prop_source_stmt (elt
->value
, false, NULL
);
2833 code
= gimple_assign_rhs_code (def_stmt
);
2834 if (code
!= BIT_FIELD_REF
)
2836 op1
= gimple_assign_rhs1 (def_stmt
);
2837 ref
= TREE_OPERAND (op1
, 0);
2845 if (TREE_CODE (ref
) != SSA_NAME
)
2847 if (!useless_type_conversion_p (type
, TREE_TYPE (ref
)))
2851 if (TREE_INT_CST_LOW (TREE_OPERAND (op1
, 1)) != elem_size
)
2853 sel
[i
] = TREE_INT_CST_LOW (TREE_OPERAND (op1
, 2)) / elem_size
;
2854 if (sel
[i
] != i
) maybe_ident
= false;
2860 gimple_assign_set_rhs_from_tree (gsi
, orig
);
2863 tree mask_type
, *mask_elts
;
2865 if (!can_vec_perm_p (TYPE_MODE (type
), false, sel
))
2868 = build_vector_type (build_nonstandard_integer_type (elem_size
, 1),
2870 if (GET_MODE_CLASS (TYPE_MODE (mask_type
)) != MODE_VECTOR_INT
2871 || GET_MODE_SIZE (TYPE_MODE (mask_type
))
2872 != GET_MODE_SIZE (TYPE_MODE (type
)))
2874 mask_elts
= XALLOCAVEC (tree
, nelts
);
2875 for (i
= 0; i
< nelts
; i
++)
2876 mask_elts
[i
] = build_int_cst (TREE_TYPE (mask_type
), sel
[i
]);
2877 op2
= build_vector (mask_type
, mask_elts
);
2878 gimple_assign_set_rhs_with_ops_1 (gsi
, VEC_PERM_EXPR
, orig
, orig
, op2
);
2880 update_stmt (gsi_stmt (*gsi
));
2884 /* Main entry point for the forward propagation and statement combine
2888 ssa_forward_propagate_and_combine (void)
2891 unsigned int todoflags
= 0;
2893 cfg_changed
= false;
2897 gimple_stmt_iterator gsi
;
2899 /* Apply forward propagation to all stmts in the basic-block.
2900 Note we update GSI within the loop as necessary. */
2901 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); )
2903 gimple stmt
= gsi_stmt (gsi
);
2905 enum tree_code code
;
2907 if (!is_gimple_assign (stmt
))
2913 lhs
= gimple_assign_lhs (stmt
);
2914 rhs
= gimple_assign_rhs1 (stmt
);
2915 code
= gimple_assign_rhs_code (stmt
);
2916 if (TREE_CODE (lhs
) != SSA_NAME
2917 || has_zero_uses (lhs
))
2923 /* If this statement sets an SSA_NAME to an address,
2924 try to propagate the address into the uses of the SSA_NAME. */
2925 if (code
== ADDR_EXPR
2926 /* Handle pointer conversions on invariant addresses
2927 as well, as this is valid gimple. */
2928 || (CONVERT_EXPR_CODE_P (code
)
2929 && TREE_CODE (rhs
) == ADDR_EXPR
2930 && POINTER_TYPE_P (TREE_TYPE (lhs
))))
2932 tree base
= get_base_address (TREE_OPERAND (rhs
, 0));
2935 || decl_address_invariant_p (base
))
2936 && !stmt_references_abnormal_ssa_name (stmt
)
2937 && forward_propagate_addr_expr (lhs
, rhs
))
2939 release_defs (stmt
);
2940 todoflags
|= TODO_remove_unused_locals
;
2941 gsi_remove (&gsi
, true);
2946 else if (code
== POINTER_PLUS_EXPR
)
2948 tree off
= gimple_assign_rhs2 (stmt
);
2949 if (TREE_CODE (off
) == INTEGER_CST
2950 && can_propagate_from (stmt
)
2951 && !simple_iv_increment_p (stmt
)
2952 /* ??? Better adjust the interface to that function
2953 instead of building new trees here. */
2954 && forward_propagate_addr_expr
2956 build1_loc (gimple_location (stmt
),
2957 ADDR_EXPR
, TREE_TYPE (rhs
),
2958 fold_build2 (MEM_REF
,
2959 TREE_TYPE (TREE_TYPE (rhs
)),
2961 fold_convert (ptr_type_node
,
2964 release_defs (stmt
);
2965 todoflags
|= TODO_remove_unused_locals
;
2966 gsi_remove (&gsi
, true);
2968 else if (is_gimple_min_invariant (rhs
))
2970 /* Make sure to fold &a[0] + off_1 here. */
2971 fold_stmt_inplace (&gsi
);
2973 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
2979 else if (TREE_CODE_CLASS (code
) == tcc_comparison
)
2981 if (forward_propagate_comparison (&gsi
))
2988 /* Combine stmts with the stmts defining their operands.
2989 Note we update GSI within the loop as necessary. */
2990 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);)
2992 gimple stmt
= gsi_stmt (gsi
);
2993 bool changed
= false;
2995 /* Mark stmt as potentially needing revisiting. */
2996 gimple_set_plf (stmt
, GF_PLF_1
, false);
2998 switch (gimple_code (stmt
))
3002 tree rhs1
= gimple_assign_rhs1 (stmt
);
3003 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3005 if ((code
== BIT_NOT_EXPR
3006 || code
== NEGATE_EXPR
)
3007 && TREE_CODE (rhs1
) == SSA_NAME
)
3008 changed
= simplify_not_neg_expr (&gsi
);
3009 else if (code
== COND_EXPR
3010 || code
== VEC_COND_EXPR
)
3012 /* In this case the entire COND_EXPR is in rhs1. */
3013 if (forward_propagate_into_cond (&gsi
)
3014 || combine_cond_exprs (&gsi
))
3017 stmt
= gsi_stmt (gsi
);
3020 else if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3023 did_something
= forward_propagate_into_comparison (&gsi
);
3024 if (did_something
== 2)
3026 changed
= did_something
!= 0;
3028 else if (code
== BIT_AND_EXPR
3029 || code
== BIT_IOR_EXPR
3030 || code
== BIT_XOR_EXPR
)
3031 changed
= simplify_bitwise_binary (&gsi
);
3032 else if (code
== PLUS_EXPR
3033 || code
== MINUS_EXPR
)
3034 changed
= associate_plusminus (&gsi
);
3035 else if (code
== POINTER_PLUS_EXPR
)
3036 changed
= associate_pointerplus (&gsi
);
3037 else if (CONVERT_EXPR_CODE_P (code
)
3038 || code
== FLOAT_EXPR
3039 || code
== FIX_TRUNC_EXPR
)
3041 int did_something
= combine_conversions (&gsi
);
3042 if (did_something
== 2)
3044 changed
= did_something
!= 0;
3046 else if (code
== VEC_PERM_EXPR
)
3048 int did_something
= simplify_permutation (&gsi
);
3049 if (did_something
== 2)
3051 changed
= did_something
!= 0;
3053 else if (code
== BIT_FIELD_REF
)
3054 changed
= simplify_bitfield_ref (&gsi
);
3055 else if (code
== CONSTRUCTOR
3056 && TREE_CODE (TREE_TYPE (rhs1
)) == VECTOR_TYPE
)
3057 changed
= simplify_vector_constructor (&gsi
);
3062 changed
= simplify_gimple_switch (stmt
);
3068 did_something
= forward_propagate_into_gimple_cond (stmt
);
3069 if (did_something
== 2)
3071 changed
= did_something
!= 0;
3077 tree callee
= gimple_call_fndecl (stmt
);
3078 if (callee
!= NULL_TREE
3079 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
)
3080 changed
= simplify_builtin_call (&gsi
, callee
);
3089 /* If the stmt changed then re-visit it and the statements
3090 inserted before it. */
3091 for (; !gsi_end_p (gsi
); gsi_prev (&gsi
))
3092 if (gimple_plf (gsi_stmt (gsi
), GF_PLF_1
))
3094 if (gsi_end_p (gsi
))
3095 gsi
= gsi_start_bb (bb
);
3101 /* Stmt no longer needs to be revisited. */
3102 gimple_set_plf (stmt
, GF_PLF_1
, true);
3109 todoflags
|= TODO_cleanup_cfg
;
3116 gate_forwprop (void)
3118 return flag_tree_forwprop
;
3121 struct gimple_opt_pass pass_forwprop
=
3125 "forwprop", /* name */
3126 OPTGROUP_NONE
, /* optinfo_flags */
3127 gate_forwprop
, /* gate */
3128 ssa_forward_propagate_and_combine
, /* execute */
3131 0, /* static_pass_number */
3132 TV_TREE_FORWPROP
, /* tv_id */
3133 PROP_cfg
| PROP_ssa
, /* properties_required */
3134 0, /* properties_provided */
3135 0, /* properties_destroyed */
3136 0, /* todo_flags_start */
3139 | TODO_verify_ssa
/* todo_flags_finish */