1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
26 #include "basic-block.h"
27 #include "gimple-pretty-print.h"
28 #include "tree-flow.h"
29 #include "tree-pass.h"
30 #include "langhooks.h"
36 #include "tree-ssa-propagate.h"
38 /* This pass propagates the RHS of assignment statements into use
39 sites of the LHS of the assignment. It's basically a specialized
40 form of tree combination. It is hoped all of this can disappear
41 when we have a generalized tree combiner.
43 One class of common cases we handle is forward propagating a single use
44 variable into a COND_EXPR.
48 if (x) goto ... else goto ...
50 Will be transformed into:
53 if (a COND b) goto ... else goto ...
55 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
57 Or (assuming c1 and c2 are constants):
61 if (x EQ/NEQ c2) goto ... else goto ...
63 Will be transformed into:
66 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
68 Similarly for x = a - c1.
74 if (x) goto ... else goto ...
76 Will be transformed into:
79 if (a == 0) goto ... else goto ...
81 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
82 For these cases, we propagate A into all, possibly more than one,
83 COND_EXPRs that use X.
89 if (x) goto ... else goto ...
91 Will be transformed into:
94 if (a != 0) goto ... else goto ...
96 (Assuming a is an integral type and x is a boolean or x is an
97 integral and a is a boolean.)
99 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
100 For these cases, we propagate A into all, possibly more than one,
101 COND_EXPRs that use X.
103 In addition to eliminating the variable and the statement which assigns
104 a value to the variable, we may be able to later thread the jump without
105 adding insane complexity in the dominator optimizer.
107 Also note these transformations can cascade. We handle this by having
108 a worklist of COND_EXPR statements to examine. As we make a change to
109 a statement, we put it back on the worklist to examine on the next
110 iteration of the main loop.
112 A second class of propagation opportunities arises for ADDR_EXPR
123 ptr = (type1*)&type2var;
126 Will get turned into (if type1 and type2 are the same size
127 and neither have volatile on them):
128 res = VIEW_CONVERT_EXPR<type1>(type2var)
133 ptr2 = ptr + <constant>;
137 ptr2 = &x[constant/elementsize];
142 offset = index * element_size;
143 offset_p = (pointer) offset;
144 ptr2 = ptr + offset_p
146 Will get turned into:
154 Provided that decl has known alignment >= 2, will get turned into
158 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
159 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
162 This will (of course) be extended as other needs arise. */
164 static bool forward_propagate_addr_expr (tree name
, tree rhs
);
166 /* Set to true if we delete dead edges during the optimization. */
167 static bool cfg_changed
;
169 static tree
rhs_to_tree (tree type
, gimple stmt
);
171 /* Get the next statement we can propagate NAME's value into skipping
172 trivial copies. Returns the statement that is suitable as a
173 propagation destination or NULL_TREE if there is no such one.
174 This only returns destinations in a single-use chain. FINAL_NAME_P
175 if non-NULL is written to the ssa name that represents the use. */
178 get_prop_dest_stmt (tree name
, tree
*final_name_p
)
184 /* If name has multiple uses, bail out. */
185 if (!single_imm_use (name
, &use
, &use_stmt
))
188 /* If this is not a trivial copy, we found it. */
189 if (!gimple_assign_ssa_name_copy_p (use_stmt
)
190 || gimple_assign_rhs1 (use_stmt
) != name
)
193 /* Continue searching uses of the copy destination. */
194 name
= gimple_assign_lhs (use_stmt
);
198 *final_name_p
= name
;
203 /* Get the statement we can propagate from into NAME skipping
204 trivial copies. Returns the statement which defines the
205 propagation source or NULL_TREE if there is no such one.
206 If SINGLE_USE_ONLY is set considers only sources which have
207 a single use chain up to NAME. If SINGLE_USE_P is non-null,
208 it is set to whether the chain to NAME is a single use chain
209 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
212 get_prop_source_stmt (tree name
, bool single_use_only
, bool *single_use_p
)
214 bool single_use
= true;
217 gimple def_stmt
= SSA_NAME_DEF_STMT (name
);
219 if (!has_single_use (name
))
226 /* If name is defined by a PHI node or is the default def, bail out. */
227 if (!is_gimple_assign (def_stmt
))
230 /* If def_stmt is a simple copy, continue looking. */
231 if (gimple_assign_rhs_code (def_stmt
) == SSA_NAME
)
232 name
= gimple_assign_rhs1 (def_stmt
);
235 if (!single_use_only
&& single_use_p
)
236 *single_use_p
= single_use
;
243 /* Checks if the destination ssa name in DEF_STMT can be used as
244 propagation source. Returns true if so, otherwise false. */
247 can_propagate_from (gimple def_stmt
)
249 gcc_assert (is_gimple_assign (def_stmt
));
251 /* If the rhs has side-effects we cannot propagate from it. */
252 if (gimple_has_volatile_ops (def_stmt
))
255 /* If the rhs is a load we cannot propagate from it. */
256 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt
)) == tcc_reference
257 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt
)) == tcc_declaration
)
260 /* Constants can be always propagated. */
261 if (gimple_assign_single_p (def_stmt
)
262 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt
)))
265 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
266 if (stmt_references_abnormal_ssa_name (def_stmt
))
269 /* If the definition is a conversion of a pointer to a function type,
270 then we can not apply optimizations as some targets require
271 function pointers to be canonicalized and in this case this
272 optimization could eliminate a necessary canonicalization. */
273 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt
)))
275 tree rhs
= gimple_assign_rhs1 (def_stmt
);
276 if (POINTER_TYPE_P (TREE_TYPE (rhs
))
277 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs
))) == FUNCTION_TYPE
)
284 /* Remove a chain of dead statements starting at the definition of
285 NAME. The chain is linked via the first operand of the defining statements.
286 If NAME was replaced in its only use then this function can be used
287 to clean up dead stmts. The function handles already released SSA
289 Returns true if cleanup-cfg has to run. */
292 remove_prop_source_from_use (tree name
)
294 gimple_stmt_iterator gsi
;
296 bool cfg_changed
= false;
301 if (SSA_NAME_IN_FREE_LIST (name
)
302 || SSA_NAME_IS_DEFAULT_DEF (name
)
303 || !has_zero_uses (name
))
306 stmt
= SSA_NAME_DEF_STMT (name
);
307 if (gimple_code (stmt
) == GIMPLE_PHI
308 || gimple_has_side_effects (stmt
))
311 bb
= gimple_bb (stmt
);
312 gsi
= gsi_for_stmt (stmt
);
313 unlink_stmt_vdef (stmt
);
314 if (gsi_remove (&gsi
, true))
315 cfg_changed
|= gimple_purge_dead_eh_edges (bb
);
318 name
= is_gimple_assign (stmt
) ? gimple_assign_rhs1 (stmt
) : NULL_TREE
;
319 } while (name
&& TREE_CODE (name
) == SSA_NAME
);
324 /* Return the rhs of a gimple_assign STMT in a form of a single tree,
325 converted to type TYPE.
327 This should disappear, but is needed so we can combine expressions and use
328 the fold() interfaces. Long term, we need to develop folding and combine
329 routines that deal with gimple exclusively . */
332 rhs_to_tree (tree type
, gimple stmt
)
334 location_t loc
= gimple_location (stmt
);
335 enum tree_code code
= gimple_assign_rhs_code (stmt
);
336 if (get_gimple_rhs_class (code
) == GIMPLE_TERNARY_RHS
)
337 return fold_build3_loc (loc
, code
, type
, gimple_assign_rhs1 (stmt
),
338 gimple_assign_rhs2 (stmt
),
339 gimple_assign_rhs3 (stmt
));
340 else if (get_gimple_rhs_class (code
) == GIMPLE_BINARY_RHS
)
341 return fold_build2_loc (loc
, code
, type
, gimple_assign_rhs1 (stmt
),
342 gimple_assign_rhs2 (stmt
));
343 else if (get_gimple_rhs_class (code
) == GIMPLE_UNARY_RHS
)
344 return build1 (code
, type
, gimple_assign_rhs1 (stmt
));
345 else if (get_gimple_rhs_class (code
) == GIMPLE_SINGLE_RHS
)
346 return gimple_assign_rhs1 (stmt
);
351 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
352 the folded result in a form suitable for COND_EXPR_COND or
353 NULL_TREE, if there is no suitable simplified form. If
354 INVARIANT_ONLY is true only gimple_min_invariant results are
355 considered simplified. */
358 combine_cond_expr_cond (gimple stmt
, enum tree_code code
, tree type
,
359 tree op0
, tree op1
, bool invariant_only
)
363 gcc_assert (TREE_CODE_CLASS (code
) == tcc_comparison
);
365 fold_defer_overflow_warnings ();
366 t
= fold_binary_loc (gimple_location (stmt
), code
, type
, op0
, op1
);
369 fold_undefer_overflow_warnings (false, NULL
, 0);
373 /* Require that we got a boolean type out if we put one in. */
374 gcc_assert (TREE_CODE (TREE_TYPE (t
)) == TREE_CODE (type
));
376 /* Canonicalize the combined condition for use in a COND_EXPR. */
377 t
= canonicalize_cond_expr_cond (t
);
379 /* Bail out if we required an invariant but didn't get one. */
380 if (!t
|| (invariant_only
&& !is_gimple_min_invariant (t
)))
382 fold_undefer_overflow_warnings (false, NULL
, 0);
386 fold_undefer_overflow_warnings (!gimple_no_warning_p (stmt
), stmt
, 0);
391 /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
392 of its operand. Return a new comparison tree or NULL_TREE if there
393 were no simplifying combines. */
396 forward_propagate_into_comparison_1 (gimple stmt
,
397 enum tree_code code
, tree type
,
400 tree tmp
= NULL_TREE
;
401 tree rhs0
= NULL_TREE
, rhs1
= NULL_TREE
;
402 bool single_use0_p
= false, single_use1_p
= false;
404 /* For comparisons use the first operand, that is likely to
405 simplify comparisons against constants. */
406 if (TREE_CODE (op0
) == SSA_NAME
)
408 gimple def_stmt
= get_prop_source_stmt (op0
, false, &single_use0_p
);
409 if (def_stmt
&& can_propagate_from (def_stmt
))
411 rhs0
= rhs_to_tree (TREE_TYPE (op1
), def_stmt
);
412 tmp
= combine_cond_expr_cond (stmt
, code
, type
,
413 rhs0
, op1
, !single_use0_p
);
419 /* If that wasn't successful, try the second operand. */
420 if (TREE_CODE (op1
) == SSA_NAME
)
422 gimple def_stmt
= get_prop_source_stmt (op1
, false, &single_use1_p
);
423 if (def_stmt
&& can_propagate_from (def_stmt
))
425 rhs1
= rhs_to_tree (TREE_TYPE (op0
), def_stmt
);
426 tmp
= combine_cond_expr_cond (stmt
, code
, type
,
427 op0
, rhs1
, !single_use1_p
);
433 /* If that wasn't successful either, try both operands. */
434 if (rhs0
!= NULL_TREE
435 && rhs1
!= NULL_TREE
)
436 tmp
= combine_cond_expr_cond (stmt
, code
, type
,
438 !(single_use0_p
&& single_use1_p
));
443 /* Propagate from the ssa name definition statements of the assignment
444 from a comparison at *GSI into the conditional if that simplifies it.
445 Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
446 otherwise returns 0. */
449 forward_propagate_into_comparison (gimple_stmt_iterator
*gsi
)
451 gimple stmt
= gsi_stmt (*gsi
);
453 bool cfg_changed
= false;
454 tree type
= TREE_TYPE (gimple_assign_lhs (stmt
));
455 tree rhs1
= gimple_assign_rhs1 (stmt
);
456 tree rhs2
= gimple_assign_rhs2 (stmt
);
458 /* Combine the comparison with defining statements. */
459 tmp
= forward_propagate_into_comparison_1 (stmt
,
460 gimple_assign_rhs_code (stmt
),
462 if (tmp
&& useless_type_conversion_p (type
, TREE_TYPE (tmp
)))
464 gimple_assign_set_rhs_from_tree (gsi
, tmp
);
466 update_stmt (gsi_stmt (*gsi
));
468 if (TREE_CODE (rhs1
) == SSA_NAME
)
469 cfg_changed
|= remove_prop_source_from_use (rhs1
);
470 if (TREE_CODE (rhs2
) == SSA_NAME
)
471 cfg_changed
|= remove_prop_source_from_use (rhs2
);
472 return cfg_changed
? 2 : 1;
478 /* Propagate from the ssa name definition statements of COND_EXPR
479 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
480 Returns zero if no statement was changed, one if there were
481 changes and two if cfg_cleanup needs to run.
483 This must be kept in sync with forward_propagate_into_cond. */
486 forward_propagate_into_gimple_cond (gimple stmt
)
489 enum tree_code code
= gimple_cond_code (stmt
);
490 bool cfg_changed
= false;
491 tree rhs1
= gimple_cond_lhs (stmt
);
492 tree rhs2
= gimple_cond_rhs (stmt
);
494 /* We can do tree combining on SSA_NAME and comparison expressions. */
495 if (TREE_CODE_CLASS (gimple_cond_code (stmt
)) != tcc_comparison
)
498 tmp
= forward_propagate_into_comparison_1 (stmt
, code
,
503 if (dump_file
&& tmp
)
505 fprintf (dump_file
, " Replaced '");
506 print_gimple_expr (dump_file
, stmt
, 0, 0);
507 fprintf (dump_file
, "' with '");
508 print_generic_expr (dump_file
, tmp
, 0);
509 fprintf (dump_file
, "'\n");
512 gimple_cond_set_condition_from_tree (stmt
, unshare_expr (tmp
));
515 if (TREE_CODE (rhs1
) == SSA_NAME
)
516 cfg_changed
|= remove_prop_source_from_use (rhs1
);
517 if (TREE_CODE (rhs2
) == SSA_NAME
)
518 cfg_changed
|= remove_prop_source_from_use (rhs2
);
519 return (cfg_changed
|| is_gimple_min_invariant (tmp
)) ? 2 : 1;
522 /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges. */
523 if ((TREE_CODE (TREE_TYPE (rhs1
)) == BOOLEAN_TYPE
524 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
525 && TYPE_PRECISION (TREE_TYPE (rhs1
)) == 1))
527 && integer_zerop (rhs2
))
529 && integer_onep (rhs2
))))
531 basic_block bb
= gimple_bb (stmt
);
532 gimple_cond_set_code (stmt
, NE_EXPR
);
533 gimple_cond_set_rhs (stmt
, build_zero_cst (TREE_TYPE (rhs1
)));
534 EDGE_SUCC (bb
, 0)->flags
^= (EDGE_TRUE_VALUE
|EDGE_FALSE_VALUE
);
535 EDGE_SUCC (bb
, 1)->flags
^= (EDGE_TRUE_VALUE
|EDGE_FALSE_VALUE
);
543 /* Propagate from the ssa name definition statements of COND_EXPR
544 in the rhs of statement STMT into the conditional if that simplifies it.
545 Returns true zero if the stmt was changed. */
548 forward_propagate_into_cond (gimple_stmt_iterator
*gsi_p
)
550 gimple stmt
= gsi_stmt (*gsi_p
);
551 tree tmp
= NULL_TREE
;
552 tree cond
= gimple_assign_rhs1 (stmt
);
553 enum tree_code code
= gimple_assign_rhs_code (stmt
);
556 /* We can do tree combining on SSA_NAME and comparison expressions. */
557 if (COMPARISON_CLASS_P (cond
))
558 tmp
= forward_propagate_into_comparison_1 (stmt
, TREE_CODE (cond
),
560 TREE_OPERAND (cond
, 0),
561 TREE_OPERAND (cond
, 1));
562 else if (TREE_CODE (cond
) == SSA_NAME
)
564 enum tree_code def_code
;
566 gimple def_stmt
= get_prop_source_stmt (name
, true, NULL
);
567 if (!def_stmt
|| !can_propagate_from (def_stmt
))
570 def_code
= gimple_assign_rhs_code (def_stmt
);
571 if (TREE_CODE_CLASS (def_code
) == tcc_comparison
)
572 tmp
= fold_build2_loc (gimple_location (def_stmt
),
575 gimple_assign_rhs1 (def_stmt
),
576 gimple_assign_rhs2 (def_stmt
));
577 else if (code
== COND_EXPR
578 && ((def_code
== BIT_NOT_EXPR
579 && TYPE_PRECISION (TREE_TYPE (cond
)) == 1)
580 || (def_code
== BIT_XOR_EXPR
581 && integer_onep (gimple_assign_rhs2 (def_stmt
)))))
583 tmp
= gimple_assign_rhs1 (def_stmt
);
589 && is_gimple_condexpr (tmp
))
591 if (dump_file
&& tmp
)
593 fprintf (dump_file
, " Replaced '");
594 print_generic_expr (dump_file
, cond
, 0);
595 fprintf (dump_file
, "' with '");
596 print_generic_expr (dump_file
, tmp
, 0);
597 fprintf (dump_file
, "'\n");
600 if ((code
== VEC_COND_EXPR
) ? integer_all_onesp (tmp
)
601 : integer_onep (tmp
))
602 gimple_assign_set_rhs_from_tree (gsi_p
, gimple_assign_rhs2 (stmt
));
603 else if (integer_zerop (tmp
))
604 gimple_assign_set_rhs_from_tree (gsi_p
, gimple_assign_rhs3 (stmt
));
607 gimple_assign_set_rhs1 (stmt
, unshare_expr (tmp
));
610 tree t
= gimple_assign_rhs2 (stmt
);
611 gimple_assign_set_rhs2 (stmt
, gimple_assign_rhs3 (stmt
));
612 gimple_assign_set_rhs3 (stmt
, t
);
615 stmt
= gsi_stmt (*gsi_p
);
624 /* Propagate from the ssa name definition statements of COND_EXPR
625 values in the rhs of statement STMT into the conditional arms
626 if that simplifies it.
627 Returns true if the stmt was changed. */
630 combine_cond_exprs (gimple_stmt_iterator
*gsi_p
)
632 gimple stmt
= gsi_stmt (*gsi_p
);
633 tree cond
, val1
, val2
;
634 bool changed
= false;
636 cond
= gimple_assign_rhs1 (stmt
);
637 val1
= gimple_assign_rhs2 (stmt
);
638 if (TREE_CODE (val1
) == SSA_NAME
)
640 gimple def_stmt
= SSA_NAME_DEF_STMT (val1
);
641 if (is_gimple_assign (def_stmt
)
642 && gimple_assign_rhs_code (def_stmt
) == gimple_assign_rhs_code (stmt
)
643 && operand_equal_p (gimple_assign_rhs1 (def_stmt
), cond
, 0))
645 val1
= unshare_expr (gimple_assign_rhs2 (def_stmt
));
646 gimple_assign_set_rhs2 (stmt
, val1
);
650 val2
= gimple_assign_rhs3 (stmt
);
651 if (TREE_CODE (val2
) == SSA_NAME
)
653 gimple def_stmt
= SSA_NAME_DEF_STMT (val2
);
654 if (is_gimple_assign (def_stmt
)
655 && gimple_assign_rhs_code (def_stmt
) == gimple_assign_rhs_code (stmt
)
656 && operand_equal_p (gimple_assign_rhs1 (def_stmt
), cond
, 0))
658 val2
= unshare_expr (gimple_assign_rhs3 (def_stmt
));
659 gimple_assign_set_rhs3 (stmt
, val2
);
663 if (operand_equal_p (val1
, val2
, 0))
665 gimple_assign_set_rhs_from_tree (gsi_p
, val1
);
666 stmt
= gsi_stmt (*gsi_p
);
676 /* We've just substituted an ADDR_EXPR into stmt. Update all the
677 relevant data structures to match. */
680 tidy_after_forward_propagate_addr (gimple stmt
)
682 /* We may have turned a trapping insn into a non-trapping insn. */
683 if (maybe_clean_or_replace_eh_stmt (stmt
, stmt
)
684 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
687 if (TREE_CODE (gimple_assign_rhs1 (stmt
)) == ADDR_EXPR
)
688 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt
));
691 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
692 ADDR_EXPR <whatever>.
694 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
695 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
696 node or for recovery of array indexing from pointer arithmetic.
698 Return true if the propagation was successful (the propagation can
699 be not totally successful, yet things may have been changed). */
702 forward_propagate_addr_expr_1 (tree name
, tree def_rhs
,
703 gimple_stmt_iterator
*use_stmt_gsi
,
706 tree lhs
, rhs
, rhs2
, array_ref
;
707 gimple use_stmt
= gsi_stmt (*use_stmt_gsi
);
708 enum tree_code rhs_code
;
711 gcc_assert (TREE_CODE (def_rhs
) == ADDR_EXPR
);
713 lhs
= gimple_assign_lhs (use_stmt
);
714 rhs_code
= gimple_assign_rhs_code (use_stmt
);
715 rhs
= gimple_assign_rhs1 (use_stmt
);
717 /* Trivial cases. The use statement could be a trivial copy or a
718 useless conversion. Recurse to the uses of the lhs as copyprop does
719 not copy through different variant pointers and FRE does not catch
720 all useless conversions. Treat the case of a single-use name and
721 a conversion to def_rhs type separate, though. */
722 if (TREE_CODE (lhs
) == SSA_NAME
723 && ((rhs_code
== SSA_NAME
&& rhs
== name
)
724 || CONVERT_EXPR_CODE_P (rhs_code
)))
726 /* Only recurse if we don't deal with a single use or we cannot
727 do the propagation to the current statement. In particular
728 we can end up with a conversion needed for a non-invariant
729 address which we cannot do in a single statement. */
731 || (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (def_rhs
))
732 && (!is_gimple_min_invariant (def_rhs
)
733 || (INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
734 && POINTER_TYPE_P (TREE_TYPE (def_rhs
))
735 && (TYPE_PRECISION (TREE_TYPE (lhs
))
736 > TYPE_PRECISION (TREE_TYPE (def_rhs
)))))))
737 return forward_propagate_addr_expr (lhs
, def_rhs
);
739 gimple_assign_set_rhs1 (use_stmt
, unshare_expr (def_rhs
));
740 if (useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (def_rhs
)))
741 gimple_assign_set_rhs_code (use_stmt
, TREE_CODE (def_rhs
));
743 gimple_assign_set_rhs_code (use_stmt
, NOP_EXPR
);
747 /* Propagate through constant pointer adjustments. */
748 if (TREE_CODE (lhs
) == SSA_NAME
749 && rhs_code
== POINTER_PLUS_EXPR
751 && TREE_CODE (gimple_assign_rhs2 (use_stmt
)) == INTEGER_CST
)
754 /* As we come here with non-invariant addresses in def_rhs we need
755 to make sure we can build a valid constant offsetted address
756 for further propagation. Simply rely on fold building that
757 and check after the fact. */
758 new_def_rhs
= fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (rhs
)),
760 fold_convert (ptr_type_node
,
761 gimple_assign_rhs2 (use_stmt
)));
762 if (TREE_CODE (new_def_rhs
) == MEM_REF
763 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs
, 0)))
765 new_def_rhs
= build_fold_addr_expr_with_type (new_def_rhs
,
768 /* Recurse. If we could propagate into all uses of lhs do not
769 bother to replace into the current use but just pretend we did. */
770 if (TREE_CODE (new_def_rhs
) == ADDR_EXPR
771 && forward_propagate_addr_expr (lhs
, new_def_rhs
))
774 if (useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (new_def_rhs
)))
775 gimple_assign_set_rhs_with_ops (use_stmt_gsi
, TREE_CODE (new_def_rhs
),
776 new_def_rhs
, NULL_TREE
);
777 else if (is_gimple_min_invariant (new_def_rhs
))
778 gimple_assign_set_rhs_with_ops (use_stmt_gsi
, NOP_EXPR
,
779 new_def_rhs
, NULL_TREE
);
782 gcc_assert (gsi_stmt (*use_stmt_gsi
) == use_stmt
);
783 update_stmt (use_stmt
);
787 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
788 ADDR_EXPR will not appear on the LHS. */
789 lhs
= gimple_assign_lhs (use_stmt
);
790 while (handled_component_p (lhs
))
791 lhs
= TREE_OPERAND (lhs
, 0);
793 /* Now see if the LHS node is a MEM_REF using NAME. If so,
794 propagate the ADDR_EXPR into the use of NAME and fold the result. */
795 if (TREE_CODE (lhs
) == MEM_REF
796 && TREE_OPERAND (lhs
, 0) == name
)
799 HOST_WIDE_INT def_rhs_offset
;
800 /* If the address is invariant we can always fold it. */
801 if ((def_rhs_base
= get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs
, 0),
804 double_int off
= mem_ref_offset (lhs
);
806 off
+= double_int::from_shwi (def_rhs_offset
);
807 if (TREE_CODE (def_rhs_base
) == MEM_REF
)
809 off
+= mem_ref_offset (def_rhs_base
);
810 new_ptr
= TREE_OPERAND (def_rhs_base
, 0);
813 new_ptr
= build_fold_addr_expr (def_rhs_base
);
814 TREE_OPERAND (lhs
, 0) = new_ptr
;
815 TREE_OPERAND (lhs
, 1)
816 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs
, 1)), off
);
817 tidy_after_forward_propagate_addr (use_stmt
);
818 /* Continue propagating into the RHS if this was not the only use. */
822 /* If the LHS is a plain dereference and the value type is the same as
823 that of the pointed-to type of the address we can put the
824 dereferenced address on the LHS preserving the original alias-type. */
825 else if (gimple_assign_lhs (use_stmt
) == lhs
826 && integer_zerop (TREE_OPERAND (lhs
, 1))
827 && useless_type_conversion_p
828 (TREE_TYPE (TREE_OPERAND (def_rhs
, 0)),
829 TREE_TYPE (gimple_assign_rhs1 (use_stmt
)))
830 /* Don't forward anything into clobber stmts if it would result
831 in the lhs no longer being a MEM_REF. */
832 && (!gimple_clobber_p (use_stmt
)
833 || TREE_CODE (TREE_OPERAND (def_rhs
, 0)) == MEM_REF
))
835 tree
*def_rhs_basep
= &TREE_OPERAND (def_rhs
, 0);
836 tree new_offset
, new_base
, saved
, new_lhs
;
837 while (handled_component_p (*def_rhs_basep
))
838 def_rhs_basep
= &TREE_OPERAND (*def_rhs_basep
, 0);
839 saved
= *def_rhs_basep
;
840 if (TREE_CODE (*def_rhs_basep
) == MEM_REF
)
842 new_base
= TREE_OPERAND (*def_rhs_basep
, 0);
843 new_offset
= fold_convert (TREE_TYPE (TREE_OPERAND (lhs
, 1)),
844 TREE_OPERAND (*def_rhs_basep
, 1));
848 new_base
= build_fold_addr_expr (*def_rhs_basep
);
849 new_offset
= TREE_OPERAND (lhs
, 1);
851 *def_rhs_basep
= build2 (MEM_REF
, TREE_TYPE (*def_rhs_basep
),
852 new_base
, new_offset
);
853 TREE_THIS_VOLATILE (*def_rhs_basep
) = TREE_THIS_VOLATILE (lhs
);
854 TREE_SIDE_EFFECTS (*def_rhs_basep
) = TREE_SIDE_EFFECTS (lhs
);
855 TREE_THIS_NOTRAP (*def_rhs_basep
) = TREE_THIS_NOTRAP (lhs
);
856 new_lhs
= unshare_expr (TREE_OPERAND (def_rhs
, 0));
857 gimple_assign_set_lhs (use_stmt
, new_lhs
);
858 TREE_THIS_VOLATILE (new_lhs
) = TREE_THIS_VOLATILE (lhs
);
859 TREE_SIDE_EFFECTS (new_lhs
) = TREE_SIDE_EFFECTS (lhs
);
860 *def_rhs_basep
= saved
;
861 tidy_after_forward_propagate_addr (use_stmt
);
862 /* Continue propagating into the RHS if this was not the
868 /* We can have a struct assignment dereferencing our name twice.
869 Note that we didn't propagate into the lhs to not falsely
870 claim we did when propagating into the rhs. */
874 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
875 nodes from the RHS. */
876 rhs
= gimple_assign_rhs1 (use_stmt
);
877 if (TREE_CODE (rhs
) == ADDR_EXPR
)
878 rhs
= TREE_OPERAND (rhs
, 0);
879 while (handled_component_p (rhs
))
880 rhs
= TREE_OPERAND (rhs
, 0);
882 /* Now see if the RHS node is a MEM_REF using NAME. If so,
883 propagate the ADDR_EXPR into the use of NAME and fold the result. */
884 if (TREE_CODE (rhs
) == MEM_REF
885 && TREE_OPERAND (rhs
, 0) == name
)
888 HOST_WIDE_INT def_rhs_offset
;
889 if ((def_rhs_base
= get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs
, 0),
892 double_int off
= mem_ref_offset (rhs
);
894 off
+= double_int::from_shwi (def_rhs_offset
);
895 if (TREE_CODE (def_rhs_base
) == MEM_REF
)
897 off
+= mem_ref_offset (def_rhs_base
);
898 new_ptr
= TREE_OPERAND (def_rhs_base
, 0);
901 new_ptr
= build_fold_addr_expr (def_rhs_base
);
902 TREE_OPERAND (rhs
, 0) = new_ptr
;
903 TREE_OPERAND (rhs
, 1)
904 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs
, 1)), off
);
905 fold_stmt_inplace (use_stmt_gsi
);
906 tidy_after_forward_propagate_addr (use_stmt
);
909 /* If the RHS is a plain dereference and the value type is the same as
910 that of the pointed-to type of the address we can put the
911 dereferenced address on the RHS preserving the original alias-type. */
912 else if (gimple_assign_rhs1 (use_stmt
) == rhs
913 && integer_zerop (TREE_OPERAND (rhs
, 1))
914 && useless_type_conversion_p
915 (TREE_TYPE (gimple_assign_lhs (use_stmt
)),
916 TREE_TYPE (TREE_OPERAND (def_rhs
, 0))))
918 tree
*def_rhs_basep
= &TREE_OPERAND (def_rhs
, 0);
919 tree new_offset
, new_base
, saved
, new_rhs
;
920 while (handled_component_p (*def_rhs_basep
))
921 def_rhs_basep
= &TREE_OPERAND (*def_rhs_basep
, 0);
922 saved
= *def_rhs_basep
;
923 if (TREE_CODE (*def_rhs_basep
) == MEM_REF
)
925 new_base
= TREE_OPERAND (*def_rhs_basep
, 0);
926 new_offset
= fold_convert (TREE_TYPE (TREE_OPERAND (rhs
, 1)),
927 TREE_OPERAND (*def_rhs_basep
, 1));
931 new_base
= build_fold_addr_expr (*def_rhs_basep
);
932 new_offset
= TREE_OPERAND (rhs
, 1);
934 *def_rhs_basep
= build2 (MEM_REF
, TREE_TYPE (*def_rhs_basep
),
935 new_base
, new_offset
);
936 TREE_THIS_VOLATILE (*def_rhs_basep
) = TREE_THIS_VOLATILE (rhs
);
937 TREE_SIDE_EFFECTS (*def_rhs_basep
) = TREE_SIDE_EFFECTS (rhs
);
938 TREE_THIS_NOTRAP (*def_rhs_basep
) = TREE_THIS_NOTRAP (rhs
);
939 new_rhs
= unshare_expr (TREE_OPERAND (def_rhs
, 0));
940 gimple_assign_set_rhs1 (use_stmt
, new_rhs
);
941 TREE_THIS_VOLATILE (new_rhs
) = TREE_THIS_VOLATILE (rhs
);
942 TREE_SIDE_EFFECTS (new_rhs
) = TREE_SIDE_EFFECTS (rhs
);
943 *def_rhs_basep
= saved
;
944 fold_stmt_inplace (use_stmt_gsi
);
945 tidy_after_forward_propagate_addr (use_stmt
);
950 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
952 if (gimple_assign_rhs_code (use_stmt
) != POINTER_PLUS_EXPR
953 || gimple_assign_rhs1 (use_stmt
) != name
)
956 /* The remaining cases are all for turning pointer arithmetic into
957 array indexing. They only apply when we have the address of
958 element zero in an array. If that is not the case then there
960 array_ref
= TREE_OPERAND (def_rhs
, 0);
961 if ((TREE_CODE (array_ref
) != ARRAY_REF
962 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref
, 0))) != ARRAY_TYPE
963 || TREE_CODE (TREE_OPERAND (array_ref
, 1)) != INTEGER_CST
)
964 && TREE_CODE (TREE_TYPE (array_ref
)) != ARRAY_TYPE
)
967 rhs2
= gimple_assign_rhs2 (use_stmt
);
968 /* Optimize &x[C1] p+ C2 to &x p+ C3 with C3 = C1 * element_size + C2. */
969 if (TREE_CODE (rhs2
) == INTEGER_CST
)
971 tree new_rhs
= build1_loc (gimple_location (use_stmt
),
972 ADDR_EXPR
, TREE_TYPE (def_rhs
),
973 fold_build2 (MEM_REF
,
974 TREE_TYPE (TREE_TYPE (def_rhs
)),
975 unshare_expr (def_rhs
),
976 fold_convert (ptr_type_node
,
978 gimple_assign_set_rhs_from_tree (use_stmt_gsi
, new_rhs
);
979 use_stmt
= gsi_stmt (*use_stmt_gsi
);
980 update_stmt (use_stmt
);
981 tidy_after_forward_propagate_addr (use_stmt
);
988 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
990 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
991 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
992 node or for recovery of array indexing from pointer arithmetic.
993 Returns true, if all uses have been propagated into. */
996 forward_propagate_addr_expr (tree name
, tree rhs
)
998 int stmt_loop_depth
= bb_loop_depth (gimple_bb (SSA_NAME_DEF_STMT (name
)));
999 imm_use_iterator iter
;
1002 bool single_use_p
= has_single_use (name
);
1004 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, name
)
1009 /* If the use is not in a simple assignment statement, then
1010 there is nothing we can do. */
1011 if (gimple_code (use_stmt
) != GIMPLE_ASSIGN
)
1013 if (!is_gimple_debug (use_stmt
))
1018 /* If the use is in a deeper loop nest, then we do not want
1019 to propagate non-invariant ADDR_EXPRs into the loop as that
1020 is likely adding expression evaluations into the loop. */
1021 if (bb_loop_depth (gimple_bb (use_stmt
)) > stmt_loop_depth
1022 && !is_gimple_min_invariant (rhs
))
1029 gimple_stmt_iterator gsi
= gsi_for_stmt (use_stmt
);
1030 result
= forward_propagate_addr_expr_1 (name
, rhs
, &gsi
,
1032 /* If the use has moved to a different statement adjust
1033 the update machinery for the old statement too. */
1034 if (use_stmt
!= gsi_stmt (gsi
))
1036 update_stmt (use_stmt
);
1037 use_stmt
= gsi_stmt (gsi
);
1040 update_stmt (use_stmt
);
1044 /* Remove intermediate now unused copy and conversion chains. */
1045 use_rhs
= gimple_assign_rhs1 (use_stmt
);
1047 && TREE_CODE (gimple_assign_lhs (use_stmt
)) == SSA_NAME
1048 && TREE_CODE (use_rhs
) == SSA_NAME
1049 && has_zero_uses (gimple_assign_lhs (use_stmt
)))
1051 gimple_stmt_iterator gsi
= gsi_for_stmt (use_stmt
);
1052 release_defs (use_stmt
);
1053 gsi_remove (&gsi
, true);
1057 return all
&& has_zero_uses (name
);
1061 /* Forward propagate the comparison defined in *DEFGSI like
1062 cond_1 = x CMP y to uses of the form
1066 Returns true if stmt is now unused. Advance DEFGSI to the next
1070 forward_propagate_comparison (gimple_stmt_iterator
*defgsi
)
1072 gimple stmt
= gsi_stmt (*defgsi
);
1073 tree name
= gimple_assign_lhs (stmt
);
1075 tree tmp
= NULL_TREE
;
1076 gimple_stmt_iterator gsi
;
1077 enum tree_code code
;
1080 /* Don't propagate ssa names that occur in abnormal phis. */
1081 if ((TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
1082 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt
)))
1083 || (TREE_CODE (gimple_assign_rhs2 (stmt
)) == SSA_NAME
1084 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt
))))
1087 /* Do not un-cse comparisons. But propagate through copies. */
1088 use_stmt
= get_prop_dest_stmt (name
, &name
);
1090 || !is_gimple_assign (use_stmt
))
1093 code
= gimple_assign_rhs_code (use_stmt
);
1094 lhs
= gimple_assign_lhs (use_stmt
);
1095 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs
)))
1098 /* We can propagate the condition into a statement that
1099 computes the logical negation of the comparison result. */
1100 if ((code
== BIT_NOT_EXPR
1101 && TYPE_PRECISION (TREE_TYPE (lhs
)) == 1)
1102 || (code
== BIT_XOR_EXPR
1103 && integer_onep (gimple_assign_rhs2 (use_stmt
))))
1105 tree type
= TREE_TYPE (gimple_assign_rhs1 (stmt
));
1106 bool nans
= HONOR_NANS (TYPE_MODE (type
));
1107 enum tree_code inv_code
;
1108 inv_code
= invert_tree_comparison (gimple_assign_rhs_code (stmt
), nans
);
1109 if (inv_code
== ERROR_MARK
)
1112 tmp
= build2 (inv_code
, TREE_TYPE (lhs
), gimple_assign_rhs1 (stmt
),
1113 gimple_assign_rhs2 (stmt
));
1118 gsi
= gsi_for_stmt (use_stmt
);
1119 gimple_assign_set_rhs_from_tree (&gsi
, unshare_expr (tmp
));
1120 use_stmt
= gsi_stmt (gsi
);
1121 update_stmt (use_stmt
);
1123 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1125 fprintf (dump_file
, " Replaced '");
1126 print_gimple_expr (dump_file
, stmt
, 0, dump_flags
);
1127 fprintf (dump_file
, "' with '");
1128 print_gimple_expr (dump_file
, use_stmt
, 0, dump_flags
);
1129 fprintf (dump_file
, "'\n");
1132 /* When we remove stmt now the iterator defgsi goes off it's current
1133 sequence, hence advance it now. */
1136 /* Remove defining statements. */
1137 return remove_prop_source_from_use (name
);
1145 /* GSI_P points to a statement which performs a narrowing integral
1148 Look for cases like:
1158 If T is narrower than X's type and C merely masks off bits outside
1159 of (T) and nothing else.
1161 Normally we'd let DCE remove the dead statement. But no DCE runs
1162 after the last forwprop/combine pass, so we remove the obviously
1163 dead code ourselves.
1165 Return TRUE if a change was made, FALSE otherwise. */
1168 simplify_conversion_from_bitmask (gimple_stmt_iterator
*gsi_p
)
1170 gimple stmt
= gsi_stmt (*gsi_p
);
1171 gimple rhs_def_stmt
= SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt
));
1173 /* See if the input for the conversion was set via a BIT_AND_EXPR and
1174 the only use of the BIT_AND_EXPR result is the conversion. */
1175 if (is_gimple_assign (rhs_def_stmt
)
1176 && gimple_assign_rhs_code (rhs_def_stmt
) == BIT_AND_EXPR
1177 && has_single_use (gimple_assign_lhs (rhs_def_stmt
)))
1179 tree rhs_def_operand1
= gimple_assign_rhs1 (rhs_def_stmt
);
1180 tree rhs_def_operand2
= gimple_assign_rhs2 (rhs_def_stmt
);
1181 tree lhs_type
= TREE_TYPE (gimple_assign_lhs (stmt
));
1183 /* Now verify suitability of the BIT_AND_EXPR's operands.
1184 The first must be an SSA_NAME that we can propagate and the
1185 second must be an integer constant that masks out all the
1186 bits outside the final result's type, but nothing else. */
1187 if (TREE_CODE (rhs_def_operand1
) == SSA_NAME
1188 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand1
)
1189 && TREE_CODE (rhs_def_operand2
) == INTEGER_CST
1190 && operand_equal_p (rhs_def_operand2
,
1191 build_low_bits_mask (TREE_TYPE (rhs_def_operand2
),
1192 TYPE_PRECISION (lhs_type
)),
1195 /* This is an optimizable case. Replace the source operand
1196 in the conversion with the first source operand of the
1198 gimple_assign_set_rhs1 (stmt
, rhs_def_operand1
);
1199 stmt
= gsi_stmt (*gsi_p
);
1202 /* There is no DCE after the last forwprop pass. It's
1203 easy to clean up the first order effects here. */
1204 gimple_stmt_iterator si
;
1205 si
= gsi_for_stmt (rhs_def_stmt
);
1206 gsi_remove (&si
, true);
1207 release_defs (rhs_def_stmt
);
1216 /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
1217 If so, we can change STMT into lhs = y which can later be copy
1218 propagated. Similarly for negation.
1220 This could trivially be formulated as a forward propagation
1221 to immediate uses. However, we already had an implementation
1222 from DOM which used backward propagation via the use-def links.
1224 It turns out that backward propagation is actually faster as
1225 there's less work to do for each NOT/NEG expression we find.
1226 Backwards propagation needs to look at the statement in a single
1227 backlink. Forward propagation needs to look at potentially more
1228 than one forward link.
1230 Returns true when the statement was changed. */
1233 simplify_not_neg_expr (gimple_stmt_iterator
*gsi_p
)
1235 gimple stmt
= gsi_stmt (*gsi_p
);
1236 tree rhs
= gimple_assign_rhs1 (stmt
);
1237 gimple rhs_def_stmt
= SSA_NAME_DEF_STMT (rhs
);
1239 /* See if the RHS_DEF_STMT has the same form as our statement. */
1240 if (is_gimple_assign (rhs_def_stmt
)
1241 && gimple_assign_rhs_code (rhs_def_stmt
) == gimple_assign_rhs_code (stmt
))
1243 tree rhs_def_operand
= gimple_assign_rhs1 (rhs_def_stmt
);
1245 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
1246 if (TREE_CODE (rhs_def_operand
) == SSA_NAME
1247 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand
))
1249 gimple_assign_set_rhs_from_tree (gsi_p
, rhs_def_operand
);
1250 stmt
= gsi_stmt (*gsi_p
);
1259 /* Helper function for simplify_gimple_switch. Remove case labels that
1260 have values outside the range of the new type. */
1263 simplify_gimple_switch_label_vec (gimple stmt
, tree index_type
)
1265 unsigned int branch_num
= gimple_switch_num_labels (stmt
);
1267 labels
.create (branch_num
);
1268 unsigned int i
, len
;
1270 /* Collect the existing case labels in a VEC, and preprocess it as if
1271 we are gimplifying a GENERIC SWITCH_EXPR. */
1272 for (i
= 1; i
< branch_num
; i
++)
1273 labels
.quick_push (gimple_switch_label (stmt
, i
));
1274 preprocess_case_label_vec_for_gimple (labels
, index_type
, NULL
);
1276 /* If any labels were removed, replace the existing case labels
1277 in the GIMPLE_SWITCH statement with the correct ones.
1278 Note that the type updates were done in-place on the case labels,
1279 so we only have to replace the case labels in the GIMPLE_SWITCH
1280 if the number of labels changed. */
1281 len
= labels
.length ();
1282 if (len
< branch_num
- 1)
1284 bitmap target_blocks
;
1288 /* Corner case: *all* case labels have been removed as being
1289 out-of-range for INDEX_TYPE. Push one label and let the
1290 CFG cleanups deal with this further. */
1295 label
= CASE_LABEL (gimple_switch_default_label (stmt
));
1296 elt
= build_case_label (build_int_cst (index_type
, 0), NULL
, label
);
1297 labels
.quick_push (elt
);
1301 for (i
= 0; i
< labels
.length (); i
++)
1302 gimple_switch_set_label (stmt
, i
+ 1, labels
[i
]);
1303 for (i
++ ; i
< branch_num
; i
++)
1304 gimple_switch_set_label (stmt
, i
, NULL_TREE
);
1305 gimple_switch_set_num_labels (stmt
, len
+ 1);
1307 /* Cleanup any edges that are now dead. */
1308 target_blocks
= BITMAP_ALLOC (NULL
);
1309 for (i
= 0; i
< gimple_switch_num_labels (stmt
); i
++)
1311 tree elt
= gimple_switch_label (stmt
, i
);
1312 basic_block target
= label_to_block (CASE_LABEL (elt
));
1313 bitmap_set_bit (target_blocks
, target
->index
);
1315 for (ei
= ei_start (gimple_bb (stmt
)->succs
); (e
= ei_safe_edge (ei
)); )
1317 if (! bitmap_bit_p (target_blocks
, e
->dest
->index
))
1321 free_dominance_info (CDI_DOMINATORS
);
1326 BITMAP_FREE (target_blocks
);
1332 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1333 the condition which we may be able to optimize better. */
1336 simplify_gimple_switch (gimple stmt
)
1338 tree cond
= gimple_switch_index (stmt
);
1342 /* The optimization that we really care about is removing unnecessary
1343 casts. That will let us do much better in propagating the inferred
1344 constant at the switch target. */
1345 if (TREE_CODE (cond
) == SSA_NAME
)
1347 def_stmt
= SSA_NAME_DEF_STMT (cond
);
1348 if (is_gimple_assign (def_stmt
))
1350 if (gimple_assign_rhs_code (def_stmt
) == NOP_EXPR
)
1355 def
= gimple_assign_rhs1 (def_stmt
);
1357 to
= TREE_TYPE (cond
);
1358 ti
= TREE_TYPE (def
);
1360 /* If we have an extension that preserves value, then we
1361 can copy the source value into the switch. */
1363 need_precision
= TYPE_PRECISION (ti
);
1365 if (! INTEGRAL_TYPE_P (ti
))
1367 else if (TYPE_UNSIGNED (to
) && !TYPE_UNSIGNED (ti
))
1369 else if (!TYPE_UNSIGNED (to
) && TYPE_UNSIGNED (ti
))
1370 need_precision
+= 1;
1371 if (TYPE_PRECISION (to
) < need_precision
)
1376 gimple_switch_set_index (stmt
, def
);
1377 simplify_gimple_switch_label_vec (stmt
, ti
);
1388 /* For pointers p2 and p1 return p2 - p1 if the
1389 difference is known and constant, otherwise return NULL. */
1392 constant_pointer_difference (tree p1
, tree p2
)
1395 #define CPD_ITERATIONS 5
1396 tree exps
[2][CPD_ITERATIONS
];
1397 tree offs
[2][CPD_ITERATIONS
];
1400 for (i
= 0; i
< 2; i
++)
1402 tree p
= i
? p1
: p2
;
1403 tree off
= size_zero_node
;
1405 enum tree_code code
;
1407 /* For each of p1 and p2 we need to iterate at least
1408 twice, to handle ADDR_EXPR directly in p1/p2,
1409 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1410 on definition's stmt RHS. Iterate a few extra times. */
1414 if (!POINTER_TYPE_P (TREE_TYPE (p
)))
1416 if (TREE_CODE (p
) == ADDR_EXPR
)
1418 tree q
= TREE_OPERAND (p
, 0);
1419 HOST_WIDE_INT offset
;
1420 tree base
= get_addr_base_and_unit_offset (q
, &offset
);
1425 off
= size_binop (PLUS_EXPR
, off
, size_int (offset
));
1427 if (TREE_CODE (q
) == MEM_REF
1428 && TREE_CODE (TREE_OPERAND (q
, 0)) == SSA_NAME
)
1430 p
= TREE_OPERAND (q
, 0);
1431 off
= size_binop (PLUS_EXPR
, off
,
1432 double_int_to_tree (sizetype
,
1433 mem_ref_offset (q
)));
1442 if (TREE_CODE (p
) != SSA_NAME
)
1446 if (j
== CPD_ITERATIONS
)
1448 stmt
= SSA_NAME_DEF_STMT (p
);
1449 if (!is_gimple_assign (stmt
) || gimple_assign_lhs (stmt
) != p
)
1451 code
= gimple_assign_rhs_code (stmt
);
1452 if (code
== POINTER_PLUS_EXPR
)
1454 if (TREE_CODE (gimple_assign_rhs2 (stmt
)) != INTEGER_CST
)
1456 off
= size_binop (PLUS_EXPR
, off
, gimple_assign_rhs2 (stmt
));
1457 p
= gimple_assign_rhs1 (stmt
);
1459 else if (code
== ADDR_EXPR
|| code
== NOP_EXPR
)
1460 p
= gimple_assign_rhs1 (stmt
);
1468 for (i
= 0; i
< cnt
[0]; i
++)
1469 for (j
= 0; j
< cnt
[1]; j
++)
1470 if (exps
[0][i
] == exps
[1][j
])
1471 return size_binop (MINUS_EXPR
, offs
[0][i
], offs
[1][j
]);
1476 /* *GSI_P is a GIMPLE_CALL to a builtin function.
1478 memcpy (p, "abcd", 4);
1479 memset (p + 4, ' ', 3);
1481 memcpy (p, "abcd ", 7);
1482 call if the latter can be stored by pieces during expansion. */
1485 simplify_builtin_call (gimple_stmt_iterator
*gsi_p
, tree callee2
)
1487 gimple stmt1
, stmt2
= gsi_stmt (*gsi_p
);
1488 tree vuse
= gimple_vuse (stmt2
);
1491 stmt1
= SSA_NAME_DEF_STMT (vuse
);
1493 switch (DECL_FUNCTION_CODE (callee2
))
1495 case BUILT_IN_MEMSET
:
1496 if (gimple_call_num_args (stmt2
) != 3
1497 || gimple_call_lhs (stmt2
)
1499 || BITS_PER_UNIT
!= 8)
1504 tree ptr1
, src1
, str1
, off1
, len1
, lhs1
;
1505 tree ptr2
= gimple_call_arg (stmt2
, 0);
1506 tree val2
= gimple_call_arg (stmt2
, 1);
1507 tree len2
= gimple_call_arg (stmt2
, 2);
1508 tree diff
, vdef
, new_str_cst
;
1510 unsigned int ptr1_align
;
1511 unsigned HOST_WIDE_INT src_len
;
1513 use_operand_p use_p
;
1515 if (!host_integerp (val2
, 0)
1516 || !host_integerp (len2
, 1))
1518 if (is_gimple_call (stmt1
))
1520 /* If first stmt is a call, it needs to be memcpy
1521 or mempcpy, with string literal as second argument and
1523 callee1
= gimple_call_fndecl (stmt1
);
1524 if (callee1
== NULL_TREE
1525 || DECL_BUILT_IN_CLASS (callee1
) != BUILT_IN_NORMAL
1526 || gimple_call_num_args (stmt1
) != 3)
1528 if (DECL_FUNCTION_CODE (callee1
) != BUILT_IN_MEMCPY
1529 && DECL_FUNCTION_CODE (callee1
) != BUILT_IN_MEMPCPY
)
1531 ptr1
= gimple_call_arg (stmt1
, 0);
1532 src1
= gimple_call_arg (stmt1
, 1);
1533 len1
= gimple_call_arg (stmt1
, 2);
1534 lhs1
= gimple_call_lhs (stmt1
);
1535 if (!host_integerp (len1
, 1))
1537 str1
= string_constant (src1
, &off1
);
1538 if (str1
== NULL_TREE
)
1540 if (!host_integerp (off1
, 1)
1541 || compare_tree_int (off1
, TREE_STRING_LENGTH (str1
) - 1) > 0
1542 || compare_tree_int (len1
, TREE_STRING_LENGTH (str1
)
1543 - tree_low_cst (off1
, 1)) > 0
1544 || TREE_CODE (TREE_TYPE (str1
)) != ARRAY_TYPE
1545 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1
)))
1546 != TYPE_MODE (char_type_node
))
1549 else if (gimple_assign_single_p (stmt1
))
1551 /* Otherwise look for length 1 memcpy optimized into
1553 ptr1
= gimple_assign_lhs (stmt1
);
1554 src1
= gimple_assign_rhs1 (stmt1
);
1555 if (TREE_CODE (ptr1
) != MEM_REF
1556 || TYPE_MODE (TREE_TYPE (ptr1
)) != TYPE_MODE (char_type_node
)
1557 || !host_integerp (src1
, 0))
1559 ptr1
= build_fold_addr_expr (ptr1
);
1560 callee1
= NULL_TREE
;
1561 len1
= size_one_node
;
1563 off1
= size_zero_node
;
1569 diff
= constant_pointer_difference (ptr1
, ptr2
);
1570 if (diff
== NULL
&& lhs1
!= NULL
)
1572 diff
= constant_pointer_difference (lhs1
, ptr2
);
1573 if (DECL_FUNCTION_CODE (callee1
) == BUILT_IN_MEMPCPY
1575 diff
= size_binop (PLUS_EXPR
, diff
,
1576 fold_convert (sizetype
, len1
));
1578 /* If the difference between the second and first destination pointer
1579 is not constant, or is bigger than memcpy length, bail out. */
1581 || !host_integerp (diff
, 1)
1582 || tree_int_cst_lt (len1
, diff
))
1585 /* Use maximum of difference plus memset length and memcpy length
1586 as the new memcpy length, if it is too big, bail out. */
1587 src_len
= tree_low_cst (diff
, 1);
1588 src_len
+= tree_low_cst (len2
, 1);
1589 if (src_len
< (unsigned HOST_WIDE_INT
) tree_low_cst (len1
, 1))
1590 src_len
= tree_low_cst (len1
, 1);
1594 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1595 with bigger length will return different result. */
1596 if (lhs1
!= NULL_TREE
1597 && DECL_FUNCTION_CODE (callee1
) == BUILT_IN_MEMPCPY
1598 && (TREE_CODE (lhs1
) != SSA_NAME
1599 || !single_imm_use (lhs1
, &use_p
, &use_stmt
)
1600 || use_stmt
!= stmt2
))
1603 /* If anything reads memory in between memcpy and memset
1604 call, the modified memcpy call might change it. */
1605 vdef
= gimple_vdef (stmt1
);
1607 && (!single_imm_use (vdef
, &use_p
, &use_stmt
)
1608 || use_stmt
!= stmt2
))
1611 ptr1_align
= get_pointer_alignment (ptr1
);
1612 /* Construct the new source string literal. */
1613 src_buf
= XALLOCAVEC (char, src_len
+ 1);
1616 TREE_STRING_POINTER (str1
) + tree_low_cst (off1
, 1),
1617 tree_low_cst (len1
, 1));
1619 src_buf
[0] = tree_low_cst (src1
, 0);
1620 memset (src_buf
+ tree_low_cst (diff
, 1),
1621 tree_low_cst (val2
, 0), tree_low_cst (len2
, 1));
1622 src_buf
[src_len
] = '\0';
1623 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1624 handle embedded '\0's. */
1625 if (strlen (src_buf
) != src_len
)
1627 rtl_profile_for_bb (gimple_bb (stmt2
));
1628 /* If the new memcpy wouldn't be emitted by storing the literal
1629 by pieces, this optimization might enlarge .rodata too much,
1630 as commonly used string literals couldn't be shared any
1632 if (!can_store_by_pieces (src_len
,
1633 builtin_strncpy_read_str
,
1634 src_buf
, ptr1_align
, false))
1637 new_str_cst
= build_string_literal (src_len
, src_buf
);
1640 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1642 if (lhs1
&& DECL_FUNCTION_CODE (callee1
) == BUILT_IN_MEMPCPY
)
1643 gimple_call_set_lhs (stmt1
, NULL_TREE
);
1644 gimple_call_set_arg (stmt1
, 1, new_str_cst
);
1645 gimple_call_set_arg (stmt1
, 2,
1646 build_int_cst (TREE_TYPE (len1
), src_len
));
1647 update_stmt (stmt1
);
1648 unlink_stmt_vdef (stmt2
);
1649 gsi_remove (gsi_p
, true);
1650 release_defs (stmt2
);
1651 if (lhs1
&& DECL_FUNCTION_CODE (callee1
) == BUILT_IN_MEMPCPY
)
1652 release_ssa_name (lhs1
);
1657 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1658 assignment, remove STMT1 and change memset call into
1660 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt1
);
1662 if (!is_gimple_val (ptr1
))
1663 ptr1
= force_gimple_operand_gsi (gsi_p
, ptr1
, true, NULL_TREE
,
1664 true, GSI_SAME_STMT
);
1665 gimple_call_set_fndecl (stmt2
,
1666 builtin_decl_explicit (BUILT_IN_MEMCPY
));
1667 gimple_call_set_arg (stmt2
, 0, ptr1
);
1668 gimple_call_set_arg (stmt2
, 1, new_str_cst
);
1669 gimple_call_set_arg (stmt2
, 2,
1670 build_int_cst (TREE_TYPE (len2
), src_len
));
1671 unlink_stmt_vdef (stmt1
);
1672 gsi_remove (&gsi
, true);
1673 release_defs (stmt1
);
1674 update_stmt (stmt2
);
1685 /* Checks if expression has type of one-bit precision, or is a known
1686 truth-valued expression. */
1688 truth_valued_ssa_name (tree name
)
1691 tree type
= TREE_TYPE (name
);
1693 if (!INTEGRAL_TYPE_P (type
))
1695 /* Don't check here for BOOLEAN_TYPE as the precision isn't
1696 necessarily one and so ~X is not equal to !X. */
1697 if (TYPE_PRECISION (type
) == 1)
1699 def
= SSA_NAME_DEF_STMT (name
);
1700 if (is_gimple_assign (def
))
1701 return truth_value_p (gimple_assign_rhs_code (def
));
1705 /* Helper routine for simplify_bitwise_binary_1 function.
1706 Return for the SSA name NAME the expression X if it mets condition
1707 NAME = !X. Otherwise return NULL_TREE.
1708 Detected patterns for NAME = !X are:
1709 !X and X == 0 for X with integral type.
1710 X ^ 1, X != 1,or ~X for X with integral type with precision of one. */
1712 lookup_logical_inverted_value (tree name
)
1715 enum tree_code code
;
1718 /* If name has none-intergal type, or isn't a SSA_NAME, then
1720 if (TREE_CODE (name
) != SSA_NAME
1721 || !INTEGRAL_TYPE_P (TREE_TYPE (name
)))
1723 def
= SSA_NAME_DEF_STMT (name
);
1724 if (!is_gimple_assign (def
))
1727 code
= gimple_assign_rhs_code (def
);
1728 op1
= gimple_assign_rhs1 (def
);
1731 /* Get for EQ_EXPR or BIT_XOR_EXPR operation the second operand.
1732 If CODE isn't an EQ_EXPR, BIT_XOR_EXPR, or BIT_NOT_EXPR, then return. */
1733 if (code
== EQ_EXPR
|| code
== NE_EXPR
1734 || code
== BIT_XOR_EXPR
)
1735 op2
= gimple_assign_rhs2 (def
);
1740 if (truth_valued_ssa_name (name
))
1744 /* Check if we have X == 0 and X has an integral type. */
1745 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1
)))
1747 if (integer_zerop (op2
))
1751 /* Check if we have X != 1 and X is a truth-valued. */
1752 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1
)))
1754 if (integer_onep (op2
) && truth_valued_ssa_name (op1
))
1758 /* Check if we have X ^ 1 and X is truth valued. */
1759 if (integer_onep (op2
) && truth_valued_ssa_name (op1
))
1769 /* Optimize ARG1 CODE ARG2 to a constant for bitwise binary
1770 operations CODE, if one operand has the logically inverted
1771 value of the other. */
1773 simplify_bitwise_binary_1 (enum tree_code code
, tree type
,
1774 tree arg1
, tree arg2
)
1778 /* If CODE isn't a bitwise binary operation, return NULL_TREE. */
1779 if (code
!= BIT_AND_EXPR
&& code
!= BIT_IOR_EXPR
1780 && code
!= BIT_XOR_EXPR
)
1783 /* First check if operands ARG1 and ARG2 are equal. If so
1784 return NULL_TREE as this optimization is handled fold_stmt. */
1787 /* See if we have in arguments logical-not patterns. */
1788 if (((anot
= lookup_logical_inverted_value (arg1
)) == NULL_TREE
1790 && ((anot
= lookup_logical_inverted_value (arg2
)) == NULL_TREE
1795 if (code
== BIT_AND_EXPR
)
1796 return fold_convert (type
, integer_zero_node
);
1797 /* X | !X -> 1 and X ^ !X -> 1, if X is truth-valued. */
1798 if (truth_valued_ssa_name (anot
))
1799 return fold_convert (type
, integer_one_node
);
1801 /* ??? Otherwise result is (X != 0 ? X : 1). not handled. */
1805 /* Given a ssa_name in NAME see if it was defined by an assignment and
1806 set CODE to be the code and ARG1 to the first operand on the rhs and ARG2
1807 to the second operand on the rhs. */
1810 defcodefor_name (tree name
, enum tree_code
*code
, tree
*arg1
, tree
*arg2
)
1813 enum tree_code code1
;
1817 enum gimple_rhs_class grhs_class
;
1819 code1
= TREE_CODE (name
);
1822 grhs_class
= get_gimple_rhs_class (code1
);
1824 if (code1
== SSA_NAME
)
1826 def
= SSA_NAME_DEF_STMT (name
);
1828 if (def
&& is_gimple_assign (def
)
1829 && can_propagate_from (def
))
1831 code1
= gimple_assign_rhs_code (def
);
1832 arg11
= gimple_assign_rhs1 (def
);
1833 arg21
= gimple_assign_rhs2 (def
);
1834 arg31
= gimple_assign_rhs2 (def
);
1837 else if (grhs_class
== GIMPLE_TERNARY_RHS
1838 || GIMPLE_BINARY_RHS
1840 || GIMPLE_SINGLE_RHS
)
1841 extract_ops_from_tree_1 (name
, &code1
, &arg11
, &arg21
, &arg31
);
1847 /* Ignore arg3 currently. */
1850 /* Return true if a conversion of an operand from type FROM to type TO
1851 should be applied after performing the operation instead. */
1854 hoist_conversion_for_bitop_p (tree to
, tree from
)
1856 /* That's a good idea if the conversion widens the operand, thus
1857 after hoisting the conversion the operation will be narrower. */
1858 if (TYPE_PRECISION (from
) < TYPE_PRECISION (to
))
1861 /* It's also a good idea if the conversion is to a non-integer mode. */
1862 if (GET_MODE_CLASS (TYPE_MODE (to
)) != MODE_INT
)
1865 /* Or if the precision of TO is not the same as the precision
1867 if (TYPE_PRECISION (to
) != GET_MODE_PRECISION (TYPE_MODE (to
)))
1873 /* GSI points to a statement of the form
1875 result = OP0 CODE OP1
1877 Where OP0 and OP1 are single bit SSA_NAMEs and CODE is either
1878 BIT_AND_EXPR or BIT_IOR_EXPR.
1880 If OP0 is fed by a bitwise negation of another single bit SSA_NAME,
1881 then we can simplify the two statements into a single LT_EXPR or LE_EXPR
1882 when code is BIT_AND_EXPR and BIT_IOR_EXPR respectively.
1884 If a simplification is made, return TRUE, else return FALSE. */
1886 simplify_bitwise_binary_boolean (gimple_stmt_iterator
*gsi
,
1887 enum tree_code code
,
1890 gimple op0_def_stmt
= SSA_NAME_DEF_STMT (op0
);
1892 if (!is_gimple_assign (op0_def_stmt
)
1893 || (gimple_assign_rhs_code (op0_def_stmt
) != BIT_NOT_EXPR
))
1896 tree x
= gimple_assign_rhs1 (op0_def_stmt
);
1897 if (TREE_CODE (x
) == SSA_NAME
1898 && INTEGRAL_TYPE_P (TREE_TYPE (x
))
1899 && TYPE_PRECISION (TREE_TYPE (x
)) == 1
1900 && TYPE_UNSIGNED (TREE_TYPE (x
)) == TYPE_UNSIGNED (TREE_TYPE (op1
)))
1902 enum tree_code newcode
;
1904 gimple stmt
= gsi_stmt (*gsi
);
1905 gimple_assign_set_rhs1 (stmt
, x
);
1906 gimple_assign_set_rhs2 (stmt
, op1
);
1907 if (code
== BIT_AND_EXPR
)
1908 newcode
= TYPE_UNSIGNED (TREE_TYPE (x
)) ? LT_EXPR
: GT_EXPR
;
1910 newcode
= TYPE_UNSIGNED (TREE_TYPE (x
)) ? LE_EXPR
: GE_EXPR
;
1911 gimple_assign_set_rhs_code (stmt
, newcode
);
1919 /* Simplify bitwise binary operations.
1920 Return true if a transformation applied, otherwise return false. */
1923 simplify_bitwise_binary (gimple_stmt_iterator
*gsi
)
1925 gimple stmt
= gsi_stmt (*gsi
);
1926 tree arg1
= gimple_assign_rhs1 (stmt
);
1927 tree arg2
= gimple_assign_rhs2 (stmt
);
1928 enum tree_code code
= gimple_assign_rhs_code (stmt
);
1930 tree def1_arg1
, def1_arg2
, def2_arg1
, def2_arg2
;
1931 enum tree_code def1_code
, def2_code
;
1933 defcodefor_name (arg1
, &def1_code
, &def1_arg1
, &def1_arg2
);
1934 defcodefor_name (arg2
, &def2_code
, &def2_arg1
, &def2_arg2
);
1936 /* Try to fold (type) X op CST -> (type) (X op ((type-x) CST))
1938 if (TREE_CODE (arg2
) == INTEGER_CST
1939 && CONVERT_EXPR_CODE_P (def1_code
)
1940 && hoist_conversion_for_bitop_p (TREE_TYPE (arg1
), TREE_TYPE (def1_arg1
))
1941 && INTEGRAL_TYPE_P (TREE_TYPE (def1_arg1
))
1942 && int_fits_type_p (arg2
, TREE_TYPE (def1_arg1
)))
1945 tree tem
= make_ssa_name (TREE_TYPE (def1_arg1
), NULL
);
1947 gimple_build_assign_with_ops (code
, tem
, def1_arg1
,
1948 fold_convert_loc (gimple_location (stmt
),
1949 TREE_TYPE (def1_arg1
),
1951 gimple_set_location (newop
, gimple_location (stmt
));
1952 gsi_insert_before (gsi
, newop
, GSI_SAME_STMT
);
1953 gimple_assign_set_rhs_with_ops_1 (gsi
, NOP_EXPR
,
1954 tem
, NULL_TREE
, NULL_TREE
);
1955 update_stmt (gsi_stmt (*gsi
));
1959 /* For bitwise binary operations apply operand conversions to the
1960 binary operation result instead of to the operands. This allows
1961 to combine successive conversions and bitwise binary operations. */
1962 if (CONVERT_EXPR_CODE_P (def1_code
)
1963 && CONVERT_EXPR_CODE_P (def2_code
)
1964 && types_compatible_p (TREE_TYPE (def1_arg1
), TREE_TYPE (def2_arg1
))
1965 && hoist_conversion_for_bitop_p (TREE_TYPE (arg1
), TREE_TYPE (def1_arg1
)))
1968 tree tem
= make_ssa_name (TREE_TYPE (def1_arg1
), NULL
);
1969 newop
= gimple_build_assign_with_ops (code
, tem
, def1_arg1
, def2_arg1
);
1970 gimple_set_location (newop
, gimple_location (stmt
));
1971 gsi_insert_before (gsi
, newop
, GSI_SAME_STMT
);
1972 gimple_assign_set_rhs_with_ops_1 (gsi
, NOP_EXPR
,
1973 tem
, NULL_TREE
, NULL_TREE
);
1974 update_stmt (gsi_stmt (*gsi
));
1979 /* Simplify (A & B) OP0 (C & B) to (A OP0 C) & B. */
1980 if (def1_code
== def2_code
1981 && def1_code
== BIT_AND_EXPR
1982 && operand_equal_for_phi_arg_p (def1_arg2
,
1988 tree inner
= fold_build2 (code
, TREE_TYPE (arg2
), a
, c
);
1989 /* If A OP0 C (this usually means C is the same as A) is 0
1990 then fold it down correctly. */
1991 if (integer_zerop (inner
))
1993 gimple_assign_set_rhs_from_tree (gsi
, inner
);
1997 /* If A OP0 C (this usually means C is the same as A) is a ssa_name
1998 then fold it down correctly. */
1999 else if (TREE_CODE (inner
) == SSA_NAME
)
2001 tree outer
= fold_build2 (def1_code
, TREE_TYPE (inner
),
2003 gimple_assign_set_rhs_from_tree (gsi
, outer
);
2011 tem
= make_ssa_name (TREE_TYPE (arg2
), NULL
);
2012 newop
= gimple_build_assign_with_ops (code
, tem
, a
, c
);
2013 gimple_set_location (newop
, gimple_location (stmt
));
2014 /* Make sure to re-process the new stmt as it's walking upwards. */
2015 gsi_insert_before (gsi
, newop
, GSI_NEW_STMT
);
2016 gimple_assign_set_rhs1 (stmt
, tem
);
2017 gimple_assign_set_rhs2 (stmt
, b
);
2018 gimple_assign_set_rhs_code (stmt
, def1_code
);
2024 /* (a | CST1) & CST2 -> (a & CST2) | (CST1 & CST2). */
2025 if (code
== BIT_AND_EXPR
2026 && def1_code
== BIT_IOR_EXPR
2027 && CONSTANT_CLASS_P (arg2
)
2028 && CONSTANT_CLASS_P (def1_arg2
))
2030 tree cst
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg2
),
2034 if (integer_zerop (cst
))
2036 gimple_assign_set_rhs1 (stmt
, def1_arg1
);
2040 tem
= make_ssa_name (TREE_TYPE (arg2
), NULL
);
2041 newop
= gimple_build_assign_with_ops (BIT_AND_EXPR
,
2042 tem
, def1_arg1
, arg2
);
2043 gimple_set_location (newop
, gimple_location (stmt
));
2044 /* Make sure to re-process the new stmt as it's walking upwards. */
2045 gsi_insert_before (gsi
, newop
, GSI_NEW_STMT
);
2046 gimple_assign_set_rhs1 (stmt
, tem
);
2047 gimple_assign_set_rhs2 (stmt
, cst
);
2048 gimple_assign_set_rhs_code (stmt
, BIT_IOR_EXPR
);
2053 /* Combine successive equal operations with constants. */
2054 if ((code
== BIT_AND_EXPR
2055 || code
== BIT_IOR_EXPR
2056 || code
== BIT_XOR_EXPR
)
2057 && def1_code
== code
2058 && CONSTANT_CLASS_P (arg2
)
2059 && CONSTANT_CLASS_P (def1_arg2
))
2061 tree cst
= fold_build2 (code
, TREE_TYPE (arg2
),
2063 gimple_assign_set_rhs1 (stmt
, def1_arg1
);
2064 gimple_assign_set_rhs2 (stmt
, cst
);
2069 /* Canonicalize X ^ ~0 to ~X. */
2070 if (code
== BIT_XOR_EXPR
2071 && integer_all_onesp (arg2
))
2073 gimple_assign_set_rhs_with_ops (gsi
, BIT_NOT_EXPR
, arg1
, NULL_TREE
);
2074 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2079 /* Try simple folding for X op !X, and X op X. */
2080 res
= simplify_bitwise_binary_1 (code
, TREE_TYPE (arg1
), arg1
, arg2
);
2081 if (res
!= NULL_TREE
)
2083 gimple_assign_set_rhs_from_tree (gsi
, res
);
2084 update_stmt (gsi_stmt (*gsi
));
2088 if (code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
)
2090 enum tree_code ocode
= code
== BIT_AND_EXPR
? BIT_IOR_EXPR
: BIT_AND_EXPR
;
2091 if (def1_code
== ocode
)
2094 enum tree_code coden
;
2096 /* ( X | Y) & X -> X */
2097 /* ( X & Y) | X -> X */
2101 gimple_assign_set_rhs_from_tree (gsi
, x
);
2102 update_stmt (gsi_stmt (*gsi
));
2106 defcodefor_name (def1_arg1
, &coden
, &a1
, &a2
);
2107 /* (~X | Y) & X -> X & Y */
2108 /* (~X & Y) | X -> X | Y */
2109 if (coden
== BIT_NOT_EXPR
&& a1
== x
)
2111 gimple_assign_set_rhs_with_ops (gsi
, code
,
2113 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2117 defcodefor_name (def1_arg2
, &coden
, &a1
, &a2
);
2118 /* (Y | ~X) & X -> X & Y */
2119 /* (Y & ~X) | X -> X | Y */
2120 if (coden
== BIT_NOT_EXPR
&& a1
== x
)
2122 gimple_assign_set_rhs_with_ops (gsi
, code
,
2124 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2129 if (def2_code
== ocode
)
2131 enum tree_code coden
;
2134 /* X & ( X | Y) -> X */
2135 /* X | ( X & Y) -> X */
2139 gimple_assign_set_rhs_from_tree (gsi
, x
);
2140 update_stmt (gsi_stmt (*gsi
));
2143 defcodefor_name (def2_arg1
, &coden
, &a1
, NULL
);
2144 /* (~X | Y) & X -> X & Y */
2145 /* (~X & Y) | X -> X | Y */
2146 if (coden
== BIT_NOT_EXPR
&& a1
== x
)
2148 gimple_assign_set_rhs_with_ops (gsi
, code
,
2150 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2154 defcodefor_name (def2_arg2
, &coden
, &a1
, NULL
);
2155 /* (Y | ~X) & X -> X & Y */
2156 /* (Y & ~X) | X -> X | Y */
2157 if (coden
== BIT_NOT_EXPR
&& a1
== x
)
2159 gimple_assign_set_rhs_with_ops (gsi
, code
,
2161 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2167 /* If arg1 and arg2 are booleans (or any single bit type)
2168 then try to simplify:
2175 But only do this if our result feeds into a comparison as
2176 this transformation is not always a win, particularly on
2177 targets with and-not instructions. */
2178 if (TREE_CODE (arg1
) == SSA_NAME
2179 && TREE_CODE (arg2
) == SSA_NAME
2180 && INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
2181 && TYPE_PRECISION (TREE_TYPE (arg1
)) == 1
2182 && TYPE_PRECISION (TREE_TYPE (arg2
)) == 1
2183 && (TYPE_UNSIGNED (TREE_TYPE (arg1
))
2184 == TYPE_UNSIGNED (TREE_TYPE (arg2
))))
2186 use_operand_p use_p
;
2189 if (single_imm_use (gimple_assign_lhs (stmt
), &use_p
, &use_stmt
))
2191 if (gimple_code (use_stmt
) == GIMPLE_COND
2192 && gimple_cond_lhs (use_stmt
) == gimple_assign_lhs (stmt
)
2193 && integer_zerop (gimple_cond_rhs (use_stmt
))
2194 && gimple_cond_code (use_stmt
) == NE_EXPR
)
2196 if (simplify_bitwise_binary_boolean (gsi
, code
, arg1
, arg2
))
2198 if (simplify_bitwise_binary_boolean (gsi
, code
, arg2
, arg1
))
2208 /* Recognize rotation patterns. Return true if a transformation
2209 applied, otherwise return false.
2211 We are looking for X with unsigned type T with bitsize B, OP being
2212 +, | or ^, some type T2 wider than T and
2213 (X << CNT1) OP (X >> CNT2) iff CNT1 + CNT2 == B
2214 ((T) ((T2) X << CNT1)) OP ((T) ((T2) X >> CNT2)) iff CNT1 + CNT2 == B
2215 (X << Y) OP (X >> (B - Y))
2216 (X << (int) Y) OP (X >> (int) (B - Y))
2217 ((T) ((T2) X << Y)) OP ((T) ((T2) X >> (B - Y)))
2218 ((T) ((T2) X << (int) Y)) OP ((T) ((T2) X >> (int) (B - Y)))
2219 (X << Y) | (X >> ((-Y) & (B - 1)))
2220 (X << (int) Y) | (X >> (int) ((-Y) & (B - 1)))
2221 ((T) ((T2) X << Y)) | ((T) ((T2) X >> ((-Y) & (B - 1))))
2222 ((T) ((T2) X << (int) Y)) | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
2224 and transform these into:
2228 Note, in the patterns with T2 type, the type of OP operands
2229 might be even a signed type, but should have precision B. */
2232 simplify_rotate (gimple_stmt_iterator
*gsi
)
2234 gimple stmt
= gsi_stmt (*gsi
);
2235 tree arg
[2], rtype
, rotcnt
= NULL_TREE
;
2236 tree def_arg1
[2], def_arg2
[2];
2237 enum tree_code def_code
[2];
2240 bool swapped_p
= false;
2243 arg
[0] = gimple_assign_rhs1 (stmt
);
2244 arg
[1] = gimple_assign_rhs2 (stmt
);
2245 rtype
= TREE_TYPE (arg
[0]);
2247 /* Only create rotates in complete modes. Other cases are not
2248 expanded properly. */
2249 if (!INTEGRAL_TYPE_P (rtype
)
2250 || TYPE_PRECISION (rtype
) != GET_MODE_PRECISION (TYPE_MODE (rtype
)))
2253 for (i
= 0; i
< 2; i
++)
2254 defcodefor_name (arg
[i
], &def_code
[i
], &def_arg1
[i
], &def_arg2
[i
]);
2256 /* Look through narrowing conversions. */
2257 if (CONVERT_EXPR_CODE_P (def_code
[0])
2258 && CONVERT_EXPR_CODE_P (def_code
[1])
2259 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1
[0]))
2260 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1
[1]))
2261 && TYPE_PRECISION (TREE_TYPE (def_arg1
[0]))
2262 == TYPE_PRECISION (TREE_TYPE (def_arg1
[1]))
2263 && TYPE_PRECISION (TREE_TYPE (def_arg1
[0])) > TYPE_PRECISION (rtype
)
2264 && has_single_use (arg
[0])
2265 && has_single_use (arg
[1]))
2267 for (i
= 0; i
< 2; i
++)
2269 arg
[i
] = def_arg1
[i
];
2270 defcodefor_name (arg
[i
], &def_code
[i
], &def_arg1
[i
], &def_arg2
[i
]);
2274 /* One operand has to be LSHIFT_EXPR and one RSHIFT_EXPR. */
2275 for (i
= 0; i
< 2; i
++)
2276 if (def_code
[i
] != LSHIFT_EXPR
&& def_code
[i
] != RSHIFT_EXPR
)
2278 else if (!has_single_use (arg
[i
]))
2280 if (def_code
[0] == def_code
[1])
2283 /* If we've looked through narrowing conversions before, look through
2284 widening conversions from unsigned type with the same precision
2286 if (TYPE_PRECISION (TREE_TYPE (def_arg1
[0])) != TYPE_PRECISION (rtype
))
2287 for (i
= 0; i
< 2; i
++)
2290 enum tree_code code
;
2291 defcodefor_name (def_arg1
[i
], &code
, &tem
, NULL
);
2292 if (!CONVERT_EXPR_CODE_P (code
)
2293 || !INTEGRAL_TYPE_P (TREE_TYPE (tem
))
2294 || TYPE_PRECISION (TREE_TYPE (tem
)) != TYPE_PRECISION (rtype
))
2298 /* Both shifts have to use the same first operand. */
2299 if (TREE_CODE (def_arg1
[0]) != SSA_NAME
|| def_arg1
[0] != def_arg1
[1])
2301 if (!TYPE_UNSIGNED (TREE_TYPE (def_arg1
[0])))
2304 /* CNT1 + CNT2 == B case above. */
2305 if (host_integerp (def_arg2
[0], 1)
2306 && host_integerp (def_arg2
[1], 1)
2307 && (unsigned HOST_WIDE_INT
) tree_low_cst (def_arg2
[0], 1)
2308 + tree_low_cst (def_arg2
[1], 1) == TYPE_PRECISION (rtype
))
2309 rotcnt
= def_arg2
[0];
2310 else if (TREE_CODE (def_arg2
[0]) != SSA_NAME
2311 || TREE_CODE (def_arg2
[1]) != SSA_NAME
)
2315 tree cdef_arg1
[2], cdef_arg2
[2], def_arg2_alt
[2];
2316 enum tree_code cdef_code
[2];
2317 /* Look through conversion of the shift count argument.
2318 The C/C++ FE cast any shift count argument to integer_type_node.
2319 The only problem might be if the shift count type maximum value
2320 is equal or smaller than number of bits in rtype. */
2321 for (i
= 0; i
< 2; i
++)
2323 def_arg2_alt
[i
] = def_arg2
[i
];
2324 defcodefor_name (def_arg2
[i
], &cdef_code
[i
],
2325 &cdef_arg1
[i
], &cdef_arg2
[i
]);
2326 if (CONVERT_EXPR_CODE_P (cdef_code
[i
])
2327 && INTEGRAL_TYPE_P (TREE_TYPE (cdef_arg1
[i
]))
2328 && TYPE_PRECISION (TREE_TYPE (cdef_arg1
[i
]))
2329 > floor_log2 (TYPE_PRECISION (rtype
))
2330 && TYPE_PRECISION (TREE_TYPE (cdef_arg1
[i
]))
2331 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (cdef_arg1
[i
]))))
2333 def_arg2_alt
[i
] = cdef_arg1
[i
];
2334 defcodefor_name (def_arg2_alt
[i
], &cdef_code
[i
],
2335 &cdef_arg1
[i
], &cdef_arg2
[i
]);
2338 for (i
= 0; i
< 2; i
++)
2339 /* Check for one shift count being Y and the other B - Y,
2340 with optional casts. */
2341 if (cdef_code
[i
] == MINUS_EXPR
2342 && host_integerp (cdef_arg1
[i
], 0)
2343 && tree_low_cst (cdef_arg1
[i
], 0) == TYPE_PRECISION (rtype
)
2344 && TREE_CODE (cdef_arg2
[i
]) == SSA_NAME
)
2347 enum tree_code code
;
2349 if (cdef_arg2
[i
] == def_arg2
[1 - i
]
2350 || cdef_arg2
[i
] == def_arg2_alt
[1 - i
])
2352 rotcnt
= cdef_arg2
[i
];
2355 defcodefor_name (cdef_arg2
[i
], &code
, &tem
, NULL
);
2356 if (CONVERT_EXPR_CODE_P (code
)
2357 && INTEGRAL_TYPE_P (TREE_TYPE (tem
))
2358 && TYPE_PRECISION (TREE_TYPE (tem
))
2359 > floor_log2 (TYPE_PRECISION (rtype
))
2360 && TYPE_PRECISION (TREE_TYPE (tem
))
2361 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem
)))
2362 && (tem
== def_arg2
[1 - i
]
2363 || tem
== def_arg2_alt
[1 - i
]))
2369 /* The above sequence isn't safe for Y being 0,
2370 because then one of the shifts triggers undefined behavior.
2371 This alternative is safe even for rotation count of 0.
2372 One shift count is Y and the other (-Y) & (B - 1). */
2373 else if (cdef_code
[i
] == BIT_AND_EXPR
2374 && host_integerp (cdef_arg2
[i
], 0)
2375 && tree_low_cst (cdef_arg2
[i
], 0)
2376 == TYPE_PRECISION (rtype
) - 1
2377 && TREE_CODE (cdef_arg1
[i
]) == SSA_NAME
2378 && gimple_assign_rhs_code (stmt
) == BIT_IOR_EXPR
)
2381 enum tree_code code
;
2383 defcodefor_name (cdef_arg1
[i
], &code
, &tem
, NULL
);
2384 if (CONVERT_EXPR_CODE_P (code
)
2385 && INTEGRAL_TYPE_P (TREE_TYPE (tem
))
2386 && TYPE_PRECISION (TREE_TYPE (tem
))
2387 > floor_log2 (TYPE_PRECISION (rtype
))
2388 && TYPE_PRECISION (TREE_TYPE (tem
))
2389 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem
))))
2390 defcodefor_name (tem
, &code
, &tem
, NULL
);
2392 if (code
== NEGATE_EXPR
)
2394 if (tem
== def_arg2
[1 - i
] || tem
== def_arg2_alt
[1 - i
])
2399 defcodefor_name (tem
, &code
, &tem
, NULL
);
2400 if (CONVERT_EXPR_CODE_P (code
)
2401 && INTEGRAL_TYPE_P (TREE_TYPE (tem
))
2402 && TYPE_PRECISION (TREE_TYPE (tem
))
2403 > floor_log2 (TYPE_PRECISION (rtype
))
2404 && TYPE_PRECISION (TREE_TYPE (tem
))
2405 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem
)))
2406 && (tem
== def_arg2
[1 - i
]
2407 || tem
== def_arg2_alt
[1 - i
]))
2414 if (rotcnt
== NULL_TREE
)
2419 if (!useless_type_conversion_p (TREE_TYPE (def_arg2
[0]),
2420 TREE_TYPE (rotcnt
)))
2422 g
= gimple_build_assign_with_ops (NOP_EXPR
,
2423 make_ssa_name (TREE_TYPE (def_arg2
[0]),
2426 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
2427 rotcnt
= gimple_assign_lhs (g
);
2429 lhs
= gimple_assign_lhs (stmt
);
2430 if (!useless_type_conversion_p (rtype
, TREE_TYPE (def_arg1
[0])))
2431 lhs
= make_ssa_name (TREE_TYPE (def_arg1
[0]), NULL
);
2432 g
= gimple_build_assign_with_ops (((def_code
[0] == LSHIFT_EXPR
) ^ swapped_p
)
2433 ? LROTATE_EXPR
: RROTATE_EXPR
,
2434 lhs
, def_arg1
[0], rotcnt
);
2435 if (!useless_type_conversion_p (rtype
, TREE_TYPE (def_arg1
[0])))
2437 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
2438 g
= gimple_build_assign_with_ops (NOP_EXPR
, gimple_assign_lhs (stmt
),
2441 gsi_replace (gsi
, g
, false);
2445 /* Perform re-associations of the plus or minus statement STMT that are
2446 always permitted. Returns true if the CFG was changed. */
2449 associate_plusminus (gimple_stmt_iterator
*gsi
)
2451 gimple stmt
= gsi_stmt (*gsi
);
2452 tree rhs1
= gimple_assign_rhs1 (stmt
);
2453 tree rhs2
= gimple_assign_rhs2 (stmt
);
2454 enum tree_code code
= gimple_assign_rhs_code (stmt
);
2457 /* We can't reassociate at all for saturating types. */
2458 if (TYPE_SATURATING (TREE_TYPE (rhs1
)))
2461 /* First contract negates. */
2466 /* A +- (-B) -> A -+ B. */
2467 if (TREE_CODE (rhs2
) == SSA_NAME
)
2469 gimple def_stmt
= SSA_NAME_DEF_STMT (rhs2
);
2470 if (is_gimple_assign (def_stmt
)
2471 && gimple_assign_rhs_code (def_stmt
) == NEGATE_EXPR
2472 && can_propagate_from (def_stmt
))
2474 code
= (code
== MINUS_EXPR
) ? PLUS_EXPR
: MINUS_EXPR
;
2475 gimple_assign_set_rhs_code (stmt
, code
);
2476 rhs2
= gimple_assign_rhs1 (def_stmt
);
2477 gimple_assign_set_rhs2 (stmt
, rhs2
);
2478 gimple_set_modified (stmt
, true);
2483 /* (-A) + B -> B - A. */
2484 if (TREE_CODE (rhs1
) == SSA_NAME
2485 && code
== PLUS_EXPR
)
2487 gimple def_stmt
= SSA_NAME_DEF_STMT (rhs1
);
2488 if (is_gimple_assign (def_stmt
)
2489 && gimple_assign_rhs_code (def_stmt
) == NEGATE_EXPR
2490 && can_propagate_from (def_stmt
))
2493 gimple_assign_set_rhs_code (stmt
, code
);
2495 gimple_assign_set_rhs1 (stmt
, rhs1
);
2496 rhs2
= gimple_assign_rhs1 (def_stmt
);
2497 gimple_assign_set_rhs2 (stmt
, rhs2
);
2498 gimple_set_modified (stmt
, true);
2505 /* We can't reassociate floating-point or fixed-point plus or minus
2506 because of saturation to +-Inf. */
2507 if (FLOAT_TYPE_P (TREE_TYPE (rhs1
))
2508 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1
)))
2511 /* Second match patterns that allow contracting a plus-minus pair
2512 irrespective of overflow issues.
2514 (A +- B) - A -> +- B
2516 (CST +- A) +- CST -> CST +- A
2517 (A +- CST) +- CST -> A +- CST
2520 A - (A +- B) -> -+ B
2521 A +- (B +- A) -> +- B
2522 CST +- (CST +- A) -> CST +- A
2523 CST +- (A +- CST) -> CST +- A
2526 via commutating the addition and contracting operations to zero
2527 by reassociation. */
2529 if (TREE_CODE (rhs1
) == SSA_NAME
)
2531 gimple def_stmt
= SSA_NAME_DEF_STMT (rhs1
);
2532 if (is_gimple_assign (def_stmt
) && can_propagate_from (def_stmt
))
2534 enum tree_code def_code
= gimple_assign_rhs_code (def_stmt
);
2535 if (def_code
== PLUS_EXPR
2536 || def_code
== MINUS_EXPR
)
2538 tree def_rhs1
= gimple_assign_rhs1 (def_stmt
);
2539 tree def_rhs2
= gimple_assign_rhs2 (def_stmt
);
2540 if (operand_equal_p (def_rhs1
, rhs2
, 0)
2541 && code
== MINUS_EXPR
)
2543 /* (A +- B) - A -> +- B. */
2544 code
= ((def_code
== PLUS_EXPR
)
2545 ? TREE_CODE (def_rhs2
) : NEGATE_EXPR
);
2548 gimple_assign_set_rhs_with_ops (gsi
, code
, rhs1
, NULL_TREE
);
2549 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2550 gimple_set_modified (stmt
, true);
2552 else if (operand_equal_p (def_rhs2
, rhs2
, 0)
2553 && code
!= def_code
)
2555 /* (A +- B) -+ B -> A. */
2556 code
= TREE_CODE (def_rhs1
);
2559 gimple_assign_set_rhs_with_ops (gsi
, code
, rhs1
, NULL_TREE
);
2560 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2561 gimple_set_modified (stmt
, true);
2563 else if (CONSTANT_CLASS_P (rhs2
)
2564 && CONSTANT_CLASS_P (def_rhs1
))
2566 /* (CST +- A) +- CST -> CST +- A. */
2567 tree cst
= fold_binary (code
, TREE_TYPE (rhs1
),
2569 if (cst
&& !TREE_OVERFLOW (cst
))
2572 gimple_assign_set_rhs_code (stmt
, code
);
2574 gimple_assign_set_rhs1 (stmt
, rhs1
);
2576 gimple_assign_set_rhs2 (stmt
, rhs2
);
2577 gimple_set_modified (stmt
, true);
2580 else if (CONSTANT_CLASS_P (rhs2
)
2581 && CONSTANT_CLASS_P (def_rhs2
))
2583 /* (A +- CST) +- CST -> A +- CST. */
2584 enum tree_code mix
= (code
== def_code
)
2585 ? PLUS_EXPR
: MINUS_EXPR
;
2586 tree cst
= fold_binary (mix
, TREE_TYPE (rhs1
),
2588 if (cst
&& !TREE_OVERFLOW (cst
))
2591 gimple_assign_set_rhs_code (stmt
, code
);
2593 gimple_assign_set_rhs1 (stmt
, rhs1
);
2595 gimple_assign_set_rhs2 (stmt
, rhs2
);
2596 gimple_set_modified (stmt
, true);
2600 else if (def_code
== BIT_NOT_EXPR
&& code
== PLUS_EXPR
)
2602 tree def_rhs1
= gimple_assign_rhs1 (def_stmt
);
2603 if (operand_equal_p (def_rhs1
, rhs2
, 0))
2606 rhs1
= build_all_ones_cst (TREE_TYPE (rhs2
));
2608 code
= TREE_CODE (rhs1
);
2609 gimple_assign_set_rhs_with_ops (gsi
, code
, rhs1
, NULL_TREE
);
2610 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2611 gimple_set_modified (stmt
, true);
2613 else if ((TREE_CODE (TREE_TYPE (rhs2
)) != COMPLEX_TYPE
2614 && integer_onep (rhs2
))
2615 || (TREE_CODE (rhs2
) == COMPLEX_CST
2616 && integer_onep (TREE_REALPART (rhs2
))
2617 && integer_onep (TREE_IMAGPART (rhs2
))))
2623 gimple_assign_set_rhs_with_ops (gsi
, code
, rhs1
, NULL_TREE
);
2624 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2625 gimple_set_modified (stmt
, true);
2631 if (rhs2
&& TREE_CODE (rhs2
) == SSA_NAME
)
2633 gimple def_stmt
= SSA_NAME_DEF_STMT (rhs2
);
2634 if (is_gimple_assign (def_stmt
) && can_propagate_from (def_stmt
))
2636 enum tree_code def_code
= gimple_assign_rhs_code (def_stmt
);
2637 if (def_code
== PLUS_EXPR
2638 || def_code
== MINUS_EXPR
)
2640 tree def_rhs1
= gimple_assign_rhs1 (def_stmt
);
2641 tree def_rhs2
= gimple_assign_rhs2 (def_stmt
);
2642 if (operand_equal_p (def_rhs1
, rhs1
, 0)
2643 && code
== MINUS_EXPR
)
2645 /* A - (A +- B) -> -+ B. */
2646 code
= ((def_code
== PLUS_EXPR
)
2647 ? NEGATE_EXPR
: TREE_CODE (def_rhs2
));
2650 gimple_assign_set_rhs_with_ops (gsi
, code
, rhs1
, NULL_TREE
);
2651 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2652 gimple_set_modified (stmt
, true);
2654 else if (operand_equal_p (def_rhs2
, rhs1
, 0)
2655 && code
!= def_code
)
2657 /* A +- (B +- A) -> +- B. */
2658 code
= ((code
== PLUS_EXPR
)
2659 ? TREE_CODE (def_rhs1
) : NEGATE_EXPR
);
2662 gimple_assign_set_rhs_with_ops (gsi
, code
, rhs1
, NULL_TREE
);
2663 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2664 gimple_set_modified (stmt
, true);
2666 else if (CONSTANT_CLASS_P (rhs1
)
2667 && CONSTANT_CLASS_P (def_rhs1
))
2669 /* CST +- (CST +- A) -> CST +- A. */
2670 tree cst
= fold_binary (code
, TREE_TYPE (rhs2
),
2672 if (cst
&& !TREE_OVERFLOW (cst
))
2674 code
= (code
== def_code
? PLUS_EXPR
: MINUS_EXPR
);
2675 gimple_assign_set_rhs_code (stmt
, code
);
2677 gimple_assign_set_rhs1 (stmt
, rhs1
);
2679 gimple_assign_set_rhs2 (stmt
, rhs2
);
2680 gimple_set_modified (stmt
, true);
2683 else if (CONSTANT_CLASS_P (rhs1
)
2684 && CONSTANT_CLASS_P (def_rhs2
))
2686 /* CST +- (A +- CST) -> CST +- A. */
2687 tree cst
= fold_binary (def_code
== code
2688 ? PLUS_EXPR
: MINUS_EXPR
,
2691 if (cst
&& !TREE_OVERFLOW (cst
))
2694 gimple_assign_set_rhs1 (stmt
, rhs1
);
2696 gimple_assign_set_rhs2 (stmt
, rhs2
);
2697 gimple_set_modified (stmt
, true);
2701 else if (def_code
== BIT_NOT_EXPR
)
2703 tree def_rhs1
= gimple_assign_rhs1 (def_stmt
);
2704 if (code
== PLUS_EXPR
2705 && operand_equal_p (def_rhs1
, rhs1
, 0))
2708 rhs1
= build_all_ones_cst (TREE_TYPE (rhs1
));
2710 code
= TREE_CODE (rhs1
);
2711 gimple_assign_set_rhs_with_ops (gsi
, code
, rhs1
, NULL_TREE
);
2712 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2713 gimple_set_modified (stmt
, true);
2720 if (gimple_modified_p (stmt
))
2722 fold_stmt_inplace (gsi
);
2724 if (maybe_clean_or_replace_eh_stmt (stmt
, stmt
)
2725 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
2732 /* Associate operands of a POINTER_PLUS_EXPR assignmen at *GSI. Returns
2733 true if anything changed, false otherwise. */
2736 associate_pointerplus (gimple_stmt_iterator
*gsi
)
2738 gimple stmt
= gsi_stmt (*gsi
);
2740 tree ptr
, rhs
, algn
;
2743 tem = (sizetype) ptr;
2747 and produce the simpler and easier to analyze with respect to alignment
2748 ... = ptr & ~algn; */
2749 ptr
= gimple_assign_rhs1 (stmt
);
2750 rhs
= gimple_assign_rhs2 (stmt
);
2751 if (TREE_CODE (rhs
) != SSA_NAME
)
2753 def_stmt
= SSA_NAME_DEF_STMT (rhs
);
2754 if (!is_gimple_assign (def_stmt
)
2755 || gimple_assign_rhs_code (def_stmt
) != NEGATE_EXPR
)
2757 rhs
= gimple_assign_rhs1 (def_stmt
);
2758 if (TREE_CODE (rhs
) != SSA_NAME
)
2760 def_stmt
= SSA_NAME_DEF_STMT (rhs
);
2761 if (!is_gimple_assign (def_stmt
)
2762 || gimple_assign_rhs_code (def_stmt
) != BIT_AND_EXPR
)
2764 rhs
= gimple_assign_rhs1 (def_stmt
);
2765 algn
= gimple_assign_rhs2 (def_stmt
);
2766 if (TREE_CODE (rhs
) != SSA_NAME
2767 || TREE_CODE (algn
) != INTEGER_CST
)
2769 def_stmt
= SSA_NAME_DEF_STMT (rhs
);
2770 if (!is_gimple_assign (def_stmt
)
2771 || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt
)))
2773 if (gimple_assign_rhs1 (def_stmt
) != ptr
)
2776 algn
= double_int_to_tree (TREE_TYPE (ptr
), ~tree_to_double_int (algn
));
2777 gimple_assign_set_rhs_with_ops (gsi
, BIT_AND_EXPR
, ptr
, algn
);
2778 fold_stmt_inplace (gsi
);
2784 /* Combine two conversions in a row for the second conversion at *GSI.
2785 Returns 1 if there were any changes made, 2 if cfg-cleanup needs to
2786 run. Else it returns 0. */
2789 combine_conversions (gimple_stmt_iterator
*gsi
)
2791 gimple stmt
= gsi_stmt (*gsi
);
2794 enum tree_code code
= gimple_assign_rhs_code (stmt
);
2795 enum tree_code code2
;
2797 gcc_checking_assert (CONVERT_EXPR_CODE_P (code
)
2798 || code
== FLOAT_EXPR
2799 || code
== FIX_TRUNC_EXPR
);
2801 lhs
= gimple_assign_lhs (stmt
);
2802 op0
= gimple_assign_rhs1 (stmt
);
2803 if (useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (op0
)))
2805 gimple_assign_set_rhs_code (stmt
, TREE_CODE (op0
));
2809 if (TREE_CODE (op0
) != SSA_NAME
)
2812 def_stmt
= SSA_NAME_DEF_STMT (op0
);
2813 if (!is_gimple_assign (def_stmt
))
2816 code2
= gimple_assign_rhs_code (def_stmt
);
2818 if (CONVERT_EXPR_CODE_P (code2
) || code2
== FLOAT_EXPR
)
2820 tree defop0
= gimple_assign_rhs1 (def_stmt
);
2821 tree type
= TREE_TYPE (lhs
);
2822 tree inside_type
= TREE_TYPE (defop0
);
2823 tree inter_type
= TREE_TYPE (op0
);
2824 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
2825 int inside_ptr
= POINTER_TYPE_P (inside_type
);
2826 int inside_float
= FLOAT_TYPE_P (inside_type
);
2827 int inside_vec
= TREE_CODE (inside_type
) == VECTOR_TYPE
;
2828 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
2829 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
2830 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
2831 int inter_ptr
= POINTER_TYPE_P (inter_type
);
2832 int inter_float
= FLOAT_TYPE_P (inter_type
);
2833 int inter_vec
= TREE_CODE (inter_type
) == VECTOR_TYPE
;
2834 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
2835 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
2836 int final_int
= INTEGRAL_TYPE_P (type
);
2837 int final_ptr
= POINTER_TYPE_P (type
);
2838 int final_float
= FLOAT_TYPE_P (type
);
2839 int final_vec
= TREE_CODE (type
) == VECTOR_TYPE
;
2840 unsigned int final_prec
= TYPE_PRECISION (type
);
2841 int final_unsignedp
= TYPE_UNSIGNED (type
);
2843 /* Don't propagate ssa names that occur in abnormal phis. */
2844 if (TREE_CODE (defop0
) == SSA_NAME
2845 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (defop0
))
2848 /* In addition to the cases of two conversions in a row
2849 handled below, if we are converting something to its own
2850 type via an object of identical or wider precision, neither
2851 conversion is needed. */
2852 if (useless_type_conversion_p (type
, inside_type
)
2853 && (((inter_int
|| inter_ptr
) && final_int
)
2854 || (inter_float
&& final_float
))
2855 && inter_prec
>= final_prec
)
2857 gimple_assign_set_rhs1 (stmt
, unshare_expr (defop0
));
2858 gimple_assign_set_rhs_code (stmt
, TREE_CODE (defop0
));
2860 return remove_prop_source_from_use (op0
) ? 2 : 1;
2863 /* Likewise, if the intermediate and initial types are either both
2864 float or both integer, we don't need the middle conversion if the
2865 former is wider than the latter and doesn't change the signedness
2866 (for integers). Avoid this if the final type is a pointer since
2867 then we sometimes need the middle conversion. Likewise if the
2868 final type has a precision not equal to the size of its mode. */
2869 if (((inter_int
&& inside_int
)
2870 || (inter_float
&& inside_float
)
2871 || (inter_vec
&& inside_vec
))
2872 && inter_prec
>= inside_prec
2873 && (inter_float
|| inter_vec
2874 || inter_unsignedp
== inside_unsignedp
)
2875 && ! (final_prec
!= GET_MODE_PRECISION (TYPE_MODE (type
))
2876 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
2878 && (! final_vec
|| inter_prec
== inside_prec
))
2880 gimple_assign_set_rhs1 (stmt
, defop0
);
2882 return remove_prop_source_from_use (op0
) ? 2 : 1;
2885 /* If we have a sign-extension of a zero-extended value, we can
2886 replace that by a single zero-extension. Likewise if the
2887 final conversion does not change precision we can drop the
2888 intermediate conversion. */
2889 if (inside_int
&& inter_int
&& final_int
2890 && ((inside_prec
< inter_prec
&& inter_prec
< final_prec
2891 && inside_unsignedp
&& !inter_unsignedp
)
2892 || final_prec
== inter_prec
))
2894 gimple_assign_set_rhs1 (stmt
, defop0
);
2896 return remove_prop_source_from_use (op0
) ? 2 : 1;
2899 /* Two conversions in a row are not needed unless:
2900 - some conversion is floating-point (overstrict for now), or
2901 - some conversion is a vector (overstrict for now), or
2902 - the intermediate type is narrower than both initial and
2904 - the intermediate type and innermost type differ in signedness,
2905 and the outermost type is wider than the intermediate, or
2906 - the initial type is a pointer type and the precisions of the
2907 intermediate and final types differ, or
2908 - the final type is a pointer type and the precisions of the
2909 initial and intermediate types differ. */
2910 if (! inside_float
&& ! inter_float
&& ! final_float
2911 && ! inside_vec
&& ! inter_vec
&& ! final_vec
2912 && (inter_prec
>= inside_prec
|| inter_prec
>= final_prec
)
2913 && ! (inside_int
&& inter_int
2914 && inter_unsignedp
!= inside_unsignedp
2915 && inter_prec
< final_prec
)
2916 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
2917 == (final_unsignedp
&& final_prec
> inter_prec
))
2918 && ! (inside_ptr
&& inter_prec
!= final_prec
)
2919 && ! (final_ptr
&& inside_prec
!= inter_prec
)
2920 && ! (final_prec
!= GET_MODE_PRECISION (TYPE_MODE (type
))
2921 && TYPE_MODE (type
) == TYPE_MODE (inter_type
)))
2923 gimple_assign_set_rhs1 (stmt
, defop0
);
2925 return remove_prop_source_from_use (op0
) ? 2 : 1;
2928 /* A truncation to an unsigned type should be canonicalized as
2929 bitwise and of a mask. */
2930 if (final_int
&& inter_int
&& inside_int
2931 && final_prec
== inside_prec
2932 && final_prec
> inter_prec
2936 tem
= fold_build2 (BIT_AND_EXPR
, inside_type
,
2939 (inside_type
, double_int::mask (inter_prec
)));
2940 if (!useless_type_conversion_p (type
, inside_type
))
2942 tem
= force_gimple_operand_gsi (gsi
, tem
, true, NULL_TREE
, true,
2944 gimple_assign_set_rhs1 (stmt
, tem
);
2947 gimple_assign_set_rhs_from_tree (gsi
, tem
);
2948 update_stmt (gsi_stmt (*gsi
));
2952 /* If we are converting an integer to a floating-point that can
2953 represent it exactly and back to an integer, we can skip the
2954 floating-point conversion. */
2955 if (inside_int
&& inter_float
&& final_int
&&
2956 (unsigned) significand_size (TYPE_MODE (inter_type
))
2957 >= inside_prec
- !inside_unsignedp
)
2959 if (useless_type_conversion_p (type
, inside_type
))
2961 gimple_assign_set_rhs1 (stmt
, unshare_expr (defop0
));
2962 gimple_assign_set_rhs_code (stmt
, TREE_CODE (defop0
));
2964 return remove_prop_source_from_use (op0
) ? 2 : 1;
2968 gimple_assign_set_rhs1 (stmt
, defop0
);
2969 gimple_assign_set_rhs_code (stmt
, CONVERT_EXPR
);
2971 return remove_prop_source_from_use (op0
) ? 2 : 1;
2979 /* Combine an element access with a shuffle. Returns true if there were
2980 any changes made, else it returns false. */
2983 simplify_bitfield_ref (gimple_stmt_iterator
*gsi
)
2985 gimple stmt
= gsi_stmt (*gsi
);
2987 tree op
, op0
, op1
, op2
;
2989 unsigned idx
, n
, size
;
2990 enum tree_code code
;
2992 op
= gimple_assign_rhs1 (stmt
);
2993 gcc_checking_assert (TREE_CODE (op
) == BIT_FIELD_REF
);
2995 op0
= TREE_OPERAND (op
, 0);
2996 if (TREE_CODE (op0
) != SSA_NAME
2997 || TREE_CODE (TREE_TYPE (op0
)) != VECTOR_TYPE
)
3000 def_stmt
= get_prop_source_stmt (op0
, false, NULL
);
3001 if (!def_stmt
|| !can_propagate_from (def_stmt
))
3004 op1
= TREE_OPERAND (op
, 1);
3005 op2
= TREE_OPERAND (op
, 2);
3006 code
= gimple_assign_rhs_code (def_stmt
);
3008 if (code
== CONSTRUCTOR
)
3010 tree tem
= fold_ternary (BIT_FIELD_REF
, TREE_TYPE (op
),
3011 gimple_assign_rhs1 (def_stmt
), op1
, op2
);
3012 if (!tem
|| !valid_gimple_rhs_p (tem
))
3014 gimple_assign_set_rhs_from_tree (gsi
, tem
);
3015 update_stmt (gsi_stmt (*gsi
));
3019 elem_type
= TREE_TYPE (TREE_TYPE (op0
));
3020 if (TREE_TYPE (op
) != elem_type
)
3023 size
= TREE_INT_CST_LOW (TYPE_SIZE (elem_type
));
3024 n
= TREE_INT_CST_LOW (op1
) / size
;
3027 idx
= TREE_INT_CST_LOW (op2
) / size
;
3029 if (code
== VEC_PERM_EXPR
)
3031 tree p
, m
, index
, tem
;
3033 m
= gimple_assign_rhs3 (def_stmt
);
3034 if (TREE_CODE (m
) != VECTOR_CST
)
3036 nelts
= VECTOR_CST_NELTS (m
);
3037 idx
= TREE_INT_CST_LOW (VECTOR_CST_ELT (m
, idx
));
3041 p
= gimple_assign_rhs1 (def_stmt
);
3045 p
= gimple_assign_rhs2 (def_stmt
);
3048 index
= build_int_cst (TREE_TYPE (TREE_TYPE (m
)), idx
* size
);
3049 tem
= build3 (BIT_FIELD_REF
, TREE_TYPE (op
),
3050 unshare_expr (p
), op1
, index
);
3051 gimple_assign_set_rhs1 (stmt
, tem
);
3053 update_stmt (gsi_stmt (*gsi
));
3060 /* Determine whether applying the 2 permutations (mask1 then mask2)
3061 gives back one of the input. */
3064 is_combined_permutation_identity (tree mask1
, tree mask2
)
3067 unsigned int nelts
, i
, j
;
3068 bool maybe_identity1
= true;
3069 bool maybe_identity2
= true;
3071 gcc_checking_assert (TREE_CODE (mask1
) == VECTOR_CST
3072 && TREE_CODE (mask2
) == VECTOR_CST
);
3073 mask
= fold_ternary (VEC_PERM_EXPR
, TREE_TYPE (mask1
), mask1
, mask1
, mask2
);
3074 gcc_assert (TREE_CODE (mask
) == VECTOR_CST
);
3076 nelts
= VECTOR_CST_NELTS (mask
);
3077 for (i
= 0; i
< nelts
; i
++)
3079 tree val
= VECTOR_CST_ELT (mask
, i
);
3080 gcc_assert (TREE_CODE (val
) == INTEGER_CST
);
3081 j
= TREE_INT_CST_LOW (val
) & (2 * nelts
- 1);
3083 maybe_identity2
= false;
3084 else if (j
== i
+ nelts
)
3085 maybe_identity1
= false;
3089 return maybe_identity1
? 1 : maybe_identity2
? 2 : 0;
3092 /* Combine a shuffle with its arguments. Returns 1 if there were any
3093 changes made, 2 if cfg-cleanup needs to run. Else it returns 0. */
3096 simplify_permutation (gimple_stmt_iterator
*gsi
)
3098 gimple stmt
= gsi_stmt (*gsi
);
3100 tree op0
, op1
, op2
, op3
, arg0
, arg1
;
3101 enum tree_code code
;
3102 bool single_use_op0
= false;
3104 gcc_checking_assert (gimple_assign_rhs_code (stmt
) == VEC_PERM_EXPR
);
3106 op0
= gimple_assign_rhs1 (stmt
);
3107 op1
= gimple_assign_rhs2 (stmt
);
3108 op2
= gimple_assign_rhs3 (stmt
);
3110 if (TREE_CODE (op2
) != VECTOR_CST
)
3113 if (TREE_CODE (op0
) == VECTOR_CST
)
3118 else if (TREE_CODE (op0
) == SSA_NAME
)
3120 def_stmt
= get_prop_source_stmt (op0
, false, &single_use_op0
);
3121 if (!def_stmt
|| !can_propagate_from (def_stmt
))
3124 code
= gimple_assign_rhs_code (def_stmt
);
3125 arg0
= gimple_assign_rhs1 (def_stmt
);
3130 /* Two consecutive shuffles. */
3131 if (code
== VEC_PERM_EXPR
)
3138 op3
= gimple_assign_rhs3 (def_stmt
);
3139 if (TREE_CODE (op3
) != VECTOR_CST
)
3141 ident
= is_combined_permutation_identity (op3
, op2
);
3144 orig
= (ident
== 1) ? gimple_assign_rhs1 (def_stmt
)
3145 : gimple_assign_rhs2 (def_stmt
);
3146 gimple_assign_set_rhs1 (stmt
, unshare_expr (orig
));
3147 gimple_assign_set_rhs_code (stmt
, TREE_CODE (orig
));
3148 gimple_set_num_ops (stmt
, 2);
3150 return remove_prop_source_from_use (op0
) ? 2 : 1;
3153 /* Shuffle of a constructor. */
3154 else if (code
== CONSTRUCTOR
|| code
== VECTOR_CST
)
3160 if (TREE_CODE (op0
) == SSA_NAME
&& !single_use_op0
)
3163 if (TREE_CODE (op1
) == VECTOR_CST
)
3165 else if (TREE_CODE (op1
) == SSA_NAME
)
3167 enum tree_code code2
;
3169 gimple def_stmt2
= get_prop_source_stmt (op1
, true, NULL
);
3170 if (!def_stmt2
|| !can_propagate_from (def_stmt2
))
3173 code2
= gimple_assign_rhs_code (def_stmt2
);
3174 if (code2
!= CONSTRUCTOR
&& code2
!= VECTOR_CST
)
3176 arg1
= gimple_assign_rhs1 (def_stmt2
);
3183 /* Already used twice in this statement. */
3184 if (TREE_CODE (op0
) == SSA_NAME
&& num_imm_uses (op0
) > 2)
3188 opt
= fold_ternary (VEC_PERM_EXPR
, TREE_TYPE(op0
), arg0
, arg1
, op2
);
3190 || (TREE_CODE (opt
) != CONSTRUCTOR
&& TREE_CODE(opt
) != VECTOR_CST
))
3192 gimple_assign_set_rhs_from_tree (gsi
, opt
);
3193 update_stmt (gsi_stmt (*gsi
));
3194 if (TREE_CODE (op0
) == SSA_NAME
)
3195 ret
= remove_prop_source_from_use (op0
);
3196 if (op0
!= op1
&& TREE_CODE (op1
) == SSA_NAME
)
3197 ret
|= remove_prop_source_from_use (op1
);
3204 /* Recognize a VEC_PERM_EXPR. Returns true if there were any changes. */
3207 simplify_vector_constructor (gimple_stmt_iterator
*gsi
)
3209 gimple stmt
= gsi_stmt (*gsi
);
3211 tree op
, op2
, orig
, type
, elem_type
;
3212 unsigned elem_size
, nelts
, i
;
3213 enum tree_code code
;
3214 constructor_elt
*elt
;
3218 gcc_checking_assert (gimple_assign_rhs_code (stmt
) == CONSTRUCTOR
);
3220 op
= gimple_assign_rhs1 (stmt
);
3221 type
= TREE_TYPE (op
);
3222 gcc_checking_assert (TREE_CODE (type
) == VECTOR_TYPE
);
3224 nelts
= TYPE_VECTOR_SUBPARTS (type
);
3225 elem_type
= TREE_TYPE (type
);
3226 elem_size
= TREE_INT_CST_LOW (TYPE_SIZE (elem_type
));
3228 sel
= XALLOCAVEC (unsigned char, nelts
);
3231 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (op
), i
, elt
)
3238 if (TREE_CODE (elt
->value
) != SSA_NAME
)
3240 def_stmt
= get_prop_source_stmt (elt
->value
, false, NULL
);
3243 code
= gimple_assign_rhs_code (def_stmt
);
3244 if (code
!= BIT_FIELD_REF
)
3246 op1
= gimple_assign_rhs1 (def_stmt
);
3247 ref
= TREE_OPERAND (op1
, 0);
3255 if (TREE_CODE (ref
) != SSA_NAME
)
3257 if (!useless_type_conversion_p (type
, TREE_TYPE (ref
)))
3261 if (TREE_INT_CST_LOW (TREE_OPERAND (op1
, 1)) != elem_size
)
3263 sel
[i
] = TREE_INT_CST_LOW (TREE_OPERAND (op1
, 2)) / elem_size
;
3264 if (sel
[i
] != i
) maybe_ident
= false;
3270 gimple_assign_set_rhs_from_tree (gsi
, orig
);
3273 tree mask_type
, *mask_elts
;
3275 if (!can_vec_perm_p (TYPE_MODE (type
), false, sel
))
3278 = build_vector_type (build_nonstandard_integer_type (elem_size
, 1),
3280 if (GET_MODE_CLASS (TYPE_MODE (mask_type
)) != MODE_VECTOR_INT
3281 || GET_MODE_SIZE (TYPE_MODE (mask_type
))
3282 != GET_MODE_SIZE (TYPE_MODE (type
)))
3284 mask_elts
= XALLOCAVEC (tree
, nelts
);
3285 for (i
= 0; i
< nelts
; i
++)
3286 mask_elts
[i
] = build_int_cst (TREE_TYPE (mask_type
), sel
[i
]);
3287 op2
= build_vector (mask_type
, mask_elts
);
3288 gimple_assign_set_rhs_with_ops_1 (gsi
, VEC_PERM_EXPR
, orig
, orig
, op2
);
3290 update_stmt (gsi_stmt (*gsi
));
3294 /* Main entry point for the forward propagation and statement combine
3298 ssa_forward_propagate_and_combine (void)
3301 unsigned int todoflags
= 0;
3303 cfg_changed
= false;
3307 gimple_stmt_iterator gsi
;
3309 /* Apply forward propagation to all stmts in the basic-block.
3310 Note we update GSI within the loop as necessary. */
3311 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); )
3313 gimple stmt
= gsi_stmt (gsi
);
3315 enum tree_code code
;
3317 if (!is_gimple_assign (stmt
))
3323 lhs
= gimple_assign_lhs (stmt
);
3324 rhs
= gimple_assign_rhs1 (stmt
);
3325 code
= gimple_assign_rhs_code (stmt
);
3326 if (TREE_CODE (lhs
) != SSA_NAME
3327 || has_zero_uses (lhs
))
3333 /* If this statement sets an SSA_NAME to an address,
3334 try to propagate the address into the uses of the SSA_NAME. */
3335 if (code
== ADDR_EXPR
3336 /* Handle pointer conversions on invariant addresses
3337 as well, as this is valid gimple. */
3338 || (CONVERT_EXPR_CODE_P (code
)
3339 && TREE_CODE (rhs
) == ADDR_EXPR
3340 && POINTER_TYPE_P (TREE_TYPE (lhs
))))
3342 tree base
= get_base_address (TREE_OPERAND (rhs
, 0));
3345 || decl_address_invariant_p (base
))
3346 && !stmt_references_abnormal_ssa_name (stmt
)
3347 && forward_propagate_addr_expr (lhs
, rhs
))
3349 release_defs (stmt
);
3350 gsi_remove (&gsi
, true);
3355 else if (code
== POINTER_PLUS_EXPR
)
3357 tree off
= gimple_assign_rhs2 (stmt
);
3358 if (TREE_CODE (off
) == INTEGER_CST
3359 && can_propagate_from (stmt
)
3360 && !simple_iv_increment_p (stmt
)
3361 /* ??? Better adjust the interface to that function
3362 instead of building new trees here. */
3363 && forward_propagate_addr_expr
3365 build1_loc (gimple_location (stmt
),
3366 ADDR_EXPR
, TREE_TYPE (rhs
),
3367 fold_build2 (MEM_REF
,
3368 TREE_TYPE (TREE_TYPE (rhs
)),
3370 fold_convert (ptr_type_node
,
3373 release_defs (stmt
);
3374 gsi_remove (&gsi
, true);
3376 else if (is_gimple_min_invariant (rhs
))
3378 /* Make sure to fold &a[0] + off_1 here. */
3379 fold_stmt_inplace (&gsi
);
3381 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
3387 else if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3389 if (forward_propagate_comparison (&gsi
))
3396 /* Combine stmts with the stmts defining their operands.
3397 Note we update GSI within the loop as necessary. */
3398 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);)
3400 gimple stmt
= gsi_stmt (gsi
);
3401 bool changed
= false;
3403 /* Mark stmt as potentially needing revisiting. */
3404 gimple_set_plf (stmt
, GF_PLF_1
, false);
3406 switch (gimple_code (stmt
))
3410 tree rhs1
= gimple_assign_rhs1 (stmt
);
3411 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3413 if ((code
== BIT_NOT_EXPR
3414 || code
== NEGATE_EXPR
)
3415 && TREE_CODE (rhs1
) == SSA_NAME
)
3416 changed
= simplify_not_neg_expr (&gsi
);
3417 else if (code
== COND_EXPR
3418 || code
== VEC_COND_EXPR
)
3420 /* In this case the entire COND_EXPR is in rhs1. */
3421 if (forward_propagate_into_cond (&gsi
)
3422 || combine_cond_exprs (&gsi
))
3425 stmt
= gsi_stmt (gsi
);
3428 else if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3431 did_something
= forward_propagate_into_comparison (&gsi
);
3432 if (did_something
== 2)
3434 changed
= did_something
!= 0;
3436 else if ((code
== PLUS_EXPR
3437 || code
== BIT_IOR_EXPR
3438 || code
== BIT_XOR_EXPR
)
3439 && simplify_rotate (&gsi
))
3441 else if (code
== BIT_AND_EXPR
3442 || code
== BIT_IOR_EXPR
3443 || code
== BIT_XOR_EXPR
)
3444 changed
= simplify_bitwise_binary (&gsi
);
3445 else if (code
== PLUS_EXPR
3446 || code
== MINUS_EXPR
)
3447 changed
= associate_plusminus (&gsi
);
3448 else if (code
== POINTER_PLUS_EXPR
)
3449 changed
= associate_pointerplus (&gsi
);
3450 else if (CONVERT_EXPR_CODE_P (code
)
3451 || code
== FLOAT_EXPR
3452 || code
== FIX_TRUNC_EXPR
)
3454 int did_something
= combine_conversions (&gsi
);
3455 if (did_something
== 2)
3458 /* If we have a narrowing conversion to an integral
3459 type that is fed by a BIT_AND_EXPR, we might be
3460 able to remove the BIT_AND_EXPR if it merely
3461 masks off bits outside the final type (and nothing
3463 if (! did_something
)
3465 tree outer_type
= TREE_TYPE (gimple_assign_lhs (stmt
));
3466 tree inner_type
= TREE_TYPE (gimple_assign_rhs1 (stmt
));
3467 if (INTEGRAL_TYPE_P (outer_type
)
3468 && INTEGRAL_TYPE_P (inner_type
)
3469 && (TYPE_PRECISION (outer_type
)
3470 <= TYPE_PRECISION (inner_type
)))
3471 did_something
= simplify_conversion_from_bitmask (&gsi
);
3474 changed
= did_something
!= 0;
3476 else if (code
== VEC_PERM_EXPR
)
3478 int did_something
= simplify_permutation (&gsi
);
3479 if (did_something
== 2)
3481 changed
= did_something
!= 0;
3483 else if (code
== BIT_FIELD_REF
)
3484 changed
= simplify_bitfield_ref (&gsi
);
3485 else if (code
== CONSTRUCTOR
3486 && TREE_CODE (TREE_TYPE (rhs1
)) == VECTOR_TYPE
)
3487 changed
= simplify_vector_constructor (&gsi
);
3492 changed
= simplify_gimple_switch (stmt
);
3498 did_something
= forward_propagate_into_gimple_cond (stmt
);
3499 if (did_something
== 2)
3501 changed
= did_something
!= 0;
3507 tree callee
= gimple_call_fndecl (stmt
);
3508 if (callee
!= NULL_TREE
3509 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
)
3510 changed
= simplify_builtin_call (&gsi
, callee
);
3519 /* If the stmt changed then re-visit it and the statements
3520 inserted before it. */
3521 for (; !gsi_end_p (gsi
); gsi_prev (&gsi
))
3522 if (gimple_plf (gsi_stmt (gsi
), GF_PLF_1
))
3524 if (gsi_end_p (gsi
))
3525 gsi
= gsi_start_bb (bb
);
3531 /* Stmt no longer needs to be revisited. */
3532 gimple_set_plf (stmt
, GF_PLF_1
, true);
3539 todoflags
|= TODO_cleanup_cfg
;
3546 gate_forwprop (void)
3548 return flag_tree_forwprop
;
3553 const pass_data pass_data_forwprop
=
3555 GIMPLE_PASS
, /* type */
3556 "forwprop", /* name */
3557 OPTGROUP_NONE
, /* optinfo_flags */
3558 true, /* has_gate */
3559 true, /* has_execute */
3560 TV_TREE_FORWPROP
, /* tv_id */
3561 ( PROP_cfg
| PROP_ssa
), /* properties_required */
3562 0, /* properties_provided */
3563 0, /* properties_destroyed */
3564 0, /* todo_flags_start */
3565 ( TODO_update_ssa
| TODO_verify_ssa
), /* todo_flags_finish */
3568 class pass_forwprop
: public gimple_opt_pass
3571 pass_forwprop(gcc::context
*ctxt
)
3572 : gimple_opt_pass(pass_data_forwprop
, ctxt
)
3575 /* opt_pass methods: */
3576 opt_pass
* clone () { return new pass_forwprop (ctxt_
); }
3577 bool gate () { return gate_forwprop (); }
3578 unsigned int execute () { return ssa_forward_propagate_and_combine (); }
3580 }; // class pass_forwprop
3585 make_pass_forwprop (gcc::context
*ctxt
)
3587 return new pass_forwprop (ctxt
);