1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
26 #include "basic-block.h"
27 #include "gimple-pretty-print.h"
29 #include "tree-pass.h"
30 #include "langhooks.h"
36 #include "tree-ssa-propagate.h"
38 /* This pass propagates the RHS of assignment statements into use
39 sites of the LHS of the assignment. It's basically a specialized
40 form of tree combination. It is hoped all of this can disappear
41 when we have a generalized tree combiner.
43 One class of common cases we handle is forward propagating a single use
44 variable into a COND_EXPR.
48 if (x) goto ... else goto ...
50 Will be transformed into:
53 if (a COND b) goto ... else goto ...
55 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
57 Or (assuming c1 and c2 are constants):
61 if (x EQ/NEQ c2) goto ... else goto ...
63 Will be transformed into:
66 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
68 Similarly for x = a - c1.
74 if (x) goto ... else goto ...
76 Will be transformed into:
79 if (a == 0) goto ... else goto ...
81 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
82 For these cases, we propagate A into all, possibly more than one,
83 COND_EXPRs that use X.
89 if (x) goto ... else goto ...
91 Will be transformed into:
94 if (a != 0) goto ... else goto ...
96 (Assuming a is an integral type and x is a boolean or x is an
97 integral and a is a boolean.)
99 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
100 For these cases, we propagate A into all, possibly more than one,
101 COND_EXPRs that use X.
103 In addition to eliminating the variable and the statement which assigns
104 a value to the variable, we may be able to later thread the jump without
105 adding insane complexity in the dominator optimizer.
107 Also note these transformations can cascade. We handle this by having
108 a worklist of COND_EXPR statements to examine. As we make a change to
109 a statement, we put it back on the worklist to examine on the next
110 iteration of the main loop.
112 A second class of propagation opportunities arises for ADDR_EXPR
123 ptr = (type1*)&type2var;
126 Will get turned into (if type1 and type2 are the same size
127 and neither have volatile on them):
128 res = VIEW_CONVERT_EXPR<type1>(type2var)
133 ptr2 = ptr + <constant>;
137 ptr2 = &x[constant/elementsize];
142 offset = index * element_size;
143 offset_p = (pointer) offset;
144 ptr2 = ptr + offset_p
146 Will get turned into:
154 Provided that decl has known alignment >= 2, will get turned into
158 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
159 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
162 This will (of course) be extended as other needs arise. */
164 static bool forward_propagate_addr_expr (tree name
, tree rhs
);
166 /* Set to true if we delete dead edges during the optimization. */
167 static bool cfg_changed
;
169 static tree
rhs_to_tree (tree type
, gimple stmt
);
171 /* Get the next statement we can propagate NAME's value into skipping
172 trivial copies. Returns the statement that is suitable as a
173 propagation destination or NULL_TREE if there is no such one.
174 This only returns destinations in a single-use chain. FINAL_NAME_P
175 if non-NULL is written to the ssa name that represents the use. */
178 get_prop_dest_stmt (tree name
, tree
*final_name_p
)
184 /* If name has multiple uses, bail out. */
185 if (!single_imm_use (name
, &use
, &use_stmt
))
188 /* If this is not a trivial copy, we found it. */
189 if (!gimple_assign_ssa_name_copy_p (use_stmt
)
190 || gimple_assign_rhs1 (use_stmt
) != name
)
193 /* Continue searching uses of the copy destination. */
194 name
= gimple_assign_lhs (use_stmt
);
198 *final_name_p
= name
;
203 /* Get the statement we can propagate from into NAME skipping
204 trivial copies. Returns the statement which defines the
205 propagation source or NULL_TREE if there is no such one.
206 If SINGLE_USE_ONLY is set considers only sources which have
207 a single use chain up to NAME. If SINGLE_USE_P is non-null,
208 it is set to whether the chain to NAME is a single use chain
209 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
212 get_prop_source_stmt (tree name
, bool single_use_only
, bool *single_use_p
)
214 bool single_use
= true;
217 gimple def_stmt
= SSA_NAME_DEF_STMT (name
);
219 if (!has_single_use (name
))
226 /* If name is defined by a PHI node or is the default def, bail out. */
227 if (!is_gimple_assign (def_stmt
))
230 /* If def_stmt is a simple copy, continue looking. */
231 if (gimple_assign_rhs_code (def_stmt
) == SSA_NAME
)
232 name
= gimple_assign_rhs1 (def_stmt
);
235 if (!single_use_only
&& single_use_p
)
236 *single_use_p
= single_use
;
243 /* Checks if the destination ssa name in DEF_STMT can be used as
244 propagation source. Returns true if so, otherwise false. */
247 can_propagate_from (gimple def_stmt
)
249 gcc_assert (is_gimple_assign (def_stmt
));
251 /* If the rhs has side-effects we cannot propagate from it. */
252 if (gimple_has_volatile_ops (def_stmt
))
255 /* If the rhs is a load we cannot propagate from it. */
256 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt
)) == tcc_reference
257 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt
)) == tcc_declaration
)
260 /* Constants can be always propagated. */
261 if (gimple_assign_single_p (def_stmt
)
262 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt
)))
265 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
266 if (stmt_references_abnormal_ssa_name (def_stmt
))
269 /* If the definition is a conversion of a pointer to a function type,
270 then we can not apply optimizations as some targets require
271 function pointers to be canonicalized and in this case this
272 optimization could eliminate a necessary canonicalization. */
273 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt
)))
275 tree rhs
= gimple_assign_rhs1 (def_stmt
);
276 if (POINTER_TYPE_P (TREE_TYPE (rhs
))
277 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs
))) == FUNCTION_TYPE
)
284 /* Remove a chain of dead statements starting at the definition of
285 NAME. The chain is linked via the first operand of the defining statements.
286 If NAME was replaced in its only use then this function can be used
287 to clean up dead stmts. The function handles already released SSA
289 Returns true if cleanup-cfg has to run. */
292 remove_prop_source_from_use (tree name
)
294 gimple_stmt_iterator gsi
;
296 bool cfg_changed
= false;
301 if (SSA_NAME_IN_FREE_LIST (name
)
302 || SSA_NAME_IS_DEFAULT_DEF (name
)
303 || !has_zero_uses (name
))
306 stmt
= SSA_NAME_DEF_STMT (name
);
307 if (gimple_code (stmt
) == GIMPLE_PHI
308 || gimple_has_side_effects (stmt
))
311 bb
= gimple_bb (stmt
);
312 gsi
= gsi_for_stmt (stmt
);
313 unlink_stmt_vdef (stmt
);
314 if (gsi_remove (&gsi
, true))
315 cfg_changed
|= gimple_purge_dead_eh_edges (bb
);
318 name
= is_gimple_assign (stmt
) ? gimple_assign_rhs1 (stmt
) : NULL_TREE
;
319 } while (name
&& TREE_CODE (name
) == SSA_NAME
);
324 /* Return the rhs of a gimple_assign STMT in a form of a single tree,
325 converted to type TYPE.
327 This should disappear, but is needed so we can combine expressions and use
328 the fold() interfaces. Long term, we need to develop folding and combine
329 routines that deal with gimple exclusively . */
332 rhs_to_tree (tree type
, gimple stmt
)
334 location_t loc
= gimple_location (stmt
);
335 enum tree_code code
= gimple_assign_rhs_code (stmt
);
336 if (get_gimple_rhs_class (code
) == GIMPLE_TERNARY_RHS
)
337 return fold_build3_loc (loc
, code
, type
, gimple_assign_rhs1 (stmt
),
338 gimple_assign_rhs2 (stmt
),
339 gimple_assign_rhs3 (stmt
));
340 else if (get_gimple_rhs_class (code
) == GIMPLE_BINARY_RHS
)
341 return fold_build2_loc (loc
, code
, type
, gimple_assign_rhs1 (stmt
),
342 gimple_assign_rhs2 (stmt
));
343 else if (get_gimple_rhs_class (code
) == GIMPLE_UNARY_RHS
)
344 return build1 (code
, type
, gimple_assign_rhs1 (stmt
));
345 else if (get_gimple_rhs_class (code
) == GIMPLE_SINGLE_RHS
)
346 return gimple_assign_rhs1 (stmt
);
351 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
352 the folded result in a form suitable for COND_EXPR_COND or
353 NULL_TREE, if there is no suitable simplified form. If
354 INVARIANT_ONLY is true only gimple_min_invariant results are
355 considered simplified. */
358 combine_cond_expr_cond (gimple stmt
, enum tree_code code
, tree type
,
359 tree op0
, tree op1
, bool invariant_only
)
363 gcc_assert (TREE_CODE_CLASS (code
) == tcc_comparison
);
365 fold_defer_overflow_warnings ();
366 t
= fold_binary_loc (gimple_location (stmt
), code
, type
, op0
, op1
);
369 fold_undefer_overflow_warnings (false, NULL
, 0);
373 /* Require that we got a boolean type out if we put one in. */
374 gcc_assert (TREE_CODE (TREE_TYPE (t
)) == TREE_CODE (type
));
376 /* Canonicalize the combined condition for use in a COND_EXPR. */
377 t
= canonicalize_cond_expr_cond (t
);
379 /* Bail out if we required an invariant but didn't get one. */
380 if (!t
|| (invariant_only
&& !is_gimple_min_invariant (t
)))
382 fold_undefer_overflow_warnings (false, NULL
, 0);
386 fold_undefer_overflow_warnings (!gimple_no_warning_p (stmt
), stmt
, 0);
391 /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
392 of its operand. Return a new comparison tree or NULL_TREE if there
393 were no simplifying combines. */
396 forward_propagate_into_comparison_1 (gimple stmt
,
397 enum tree_code code
, tree type
,
400 tree tmp
= NULL_TREE
;
401 tree rhs0
= NULL_TREE
, rhs1
= NULL_TREE
;
402 bool single_use0_p
= false, single_use1_p
= false;
404 /* For comparisons use the first operand, that is likely to
405 simplify comparisons against constants. */
406 if (TREE_CODE (op0
) == SSA_NAME
)
408 gimple def_stmt
= get_prop_source_stmt (op0
, false, &single_use0_p
);
409 if (def_stmt
&& can_propagate_from (def_stmt
))
411 rhs0
= rhs_to_tree (TREE_TYPE (op1
), def_stmt
);
412 tmp
= combine_cond_expr_cond (stmt
, code
, type
,
413 rhs0
, op1
, !single_use0_p
);
419 /* If that wasn't successful, try the second operand. */
420 if (TREE_CODE (op1
) == SSA_NAME
)
422 gimple def_stmt
= get_prop_source_stmt (op1
, false, &single_use1_p
);
423 if (def_stmt
&& can_propagate_from (def_stmt
))
425 rhs1
= rhs_to_tree (TREE_TYPE (op0
), def_stmt
);
426 tmp
= combine_cond_expr_cond (stmt
, code
, type
,
427 op0
, rhs1
, !single_use1_p
);
433 /* If that wasn't successful either, try both operands. */
434 if (rhs0
!= NULL_TREE
435 && rhs1
!= NULL_TREE
)
436 tmp
= combine_cond_expr_cond (stmt
, code
, type
,
438 !(single_use0_p
&& single_use1_p
));
443 /* Propagate from the ssa name definition statements of the assignment
444 from a comparison at *GSI into the conditional if that simplifies it.
445 Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
446 otherwise returns 0. */
449 forward_propagate_into_comparison (gimple_stmt_iterator
*gsi
)
451 gimple stmt
= gsi_stmt (*gsi
);
453 bool cfg_changed
= false;
454 tree type
= TREE_TYPE (gimple_assign_lhs (stmt
));
455 tree rhs1
= gimple_assign_rhs1 (stmt
);
456 tree rhs2
= gimple_assign_rhs2 (stmt
);
458 /* Combine the comparison with defining statements. */
459 tmp
= forward_propagate_into_comparison_1 (stmt
,
460 gimple_assign_rhs_code (stmt
),
462 if (tmp
&& useless_type_conversion_p (type
, TREE_TYPE (tmp
)))
464 gimple_assign_set_rhs_from_tree (gsi
, tmp
);
466 update_stmt (gsi_stmt (*gsi
));
468 if (TREE_CODE (rhs1
) == SSA_NAME
)
469 cfg_changed
|= remove_prop_source_from_use (rhs1
);
470 if (TREE_CODE (rhs2
) == SSA_NAME
)
471 cfg_changed
|= remove_prop_source_from_use (rhs2
);
472 return cfg_changed
? 2 : 1;
478 /* Propagate from the ssa name definition statements of COND_EXPR
479 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
480 Returns zero if no statement was changed, one if there were
481 changes and two if cfg_cleanup needs to run.
483 This must be kept in sync with forward_propagate_into_cond. */
486 forward_propagate_into_gimple_cond (gimple stmt
)
489 enum tree_code code
= gimple_cond_code (stmt
);
490 bool cfg_changed
= false;
491 tree rhs1
= gimple_cond_lhs (stmt
);
492 tree rhs2
= gimple_cond_rhs (stmt
);
494 /* We can do tree combining on SSA_NAME and comparison expressions. */
495 if (TREE_CODE_CLASS (gimple_cond_code (stmt
)) != tcc_comparison
)
498 tmp
= forward_propagate_into_comparison_1 (stmt
, code
,
503 if (dump_file
&& tmp
)
505 fprintf (dump_file
, " Replaced '");
506 print_gimple_expr (dump_file
, stmt
, 0, 0);
507 fprintf (dump_file
, "' with '");
508 print_generic_expr (dump_file
, tmp
, 0);
509 fprintf (dump_file
, "'\n");
512 gimple_cond_set_condition_from_tree (stmt
, unshare_expr (tmp
));
515 if (TREE_CODE (rhs1
) == SSA_NAME
)
516 cfg_changed
|= remove_prop_source_from_use (rhs1
);
517 if (TREE_CODE (rhs2
) == SSA_NAME
)
518 cfg_changed
|= remove_prop_source_from_use (rhs2
);
519 return (cfg_changed
|| is_gimple_min_invariant (tmp
)) ? 2 : 1;
522 /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges. */
523 if ((TREE_CODE (TREE_TYPE (rhs1
)) == BOOLEAN_TYPE
524 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
525 && TYPE_PRECISION (TREE_TYPE (rhs1
)) == 1))
527 && integer_zerop (rhs2
))
529 && integer_onep (rhs2
))))
531 basic_block bb
= gimple_bb (stmt
);
532 gimple_cond_set_code (stmt
, NE_EXPR
);
533 gimple_cond_set_rhs (stmt
, build_zero_cst (TREE_TYPE (rhs1
)));
534 EDGE_SUCC (bb
, 0)->flags
^= (EDGE_TRUE_VALUE
|EDGE_FALSE_VALUE
);
535 EDGE_SUCC (bb
, 1)->flags
^= (EDGE_TRUE_VALUE
|EDGE_FALSE_VALUE
);
543 /* Propagate from the ssa name definition statements of COND_EXPR
544 in the rhs of statement STMT into the conditional if that simplifies it.
545 Returns true zero if the stmt was changed. */
548 forward_propagate_into_cond (gimple_stmt_iterator
*gsi_p
)
550 gimple stmt
= gsi_stmt (*gsi_p
);
551 tree tmp
= NULL_TREE
;
552 tree cond
= gimple_assign_rhs1 (stmt
);
553 enum tree_code code
= gimple_assign_rhs_code (stmt
);
556 /* We can do tree combining on SSA_NAME and comparison expressions. */
557 if (COMPARISON_CLASS_P (cond
))
558 tmp
= forward_propagate_into_comparison_1 (stmt
, TREE_CODE (cond
),
560 TREE_OPERAND (cond
, 0),
561 TREE_OPERAND (cond
, 1));
562 else if (TREE_CODE (cond
) == SSA_NAME
)
564 enum tree_code def_code
;
566 gimple def_stmt
= get_prop_source_stmt (name
, true, NULL
);
567 if (!def_stmt
|| !can_propagate_from (def_stmt
))
570 def_code
= gimple_assign_rhs_code (def_stmt
);
571 if (TREE_CODE_CLASS (def_code
) == tcc_comparison
)
572 tmp
= fold_build2_loc (gimple_location (def_stmt
),
575 gimple_assign_rhs1 (def_stmt
),
576 gimple_assign_rhs2 (def_stmt
));
577 else if (code
== COND_EXPR
578 && ((def_code
== BIT_NOT_EXPR
579 && TYPE_PRECISION (TREE_TYPE (cond
)) == 1)
580 || (def_code
== BIT_XOR_EXPR
581 && integer_onep (gimple_assign_rhs2 (def_stmt
)))))
583 tmp
= gimple_assign_rhs1 (def_stmt
);
589 && is_gimple_condexpr (tmp
))
591 if (dump_file
&& tmp
)
593 fprintf (dump_file
, " Replaced '");
594 print_generic_expr (dump_file
, cond
, 0);
595 fprintf (dump_file
, "' with '");
596 print_generic_expr (dump_file
, tmp
, 0);
597 fprintf (dump_file
, "'\n");
600 if ((code
== VEC_COND_EXPR
) ? integer_all_onesp (tmp
)
601 : integer_onep (tmp
))
602 gimple_assign_set_rhs_from_tree (gsi_p
, gimple_assign_rhs2 (stmt
));
603 else if (integer_zerop (tmp
))
604 gimple_assign_set_rhs_from_tree (gsi_p
, gimple_assign_rhs3 (stmt
));
607 gimple_assign_set_rhs1 (stmt
, unshare_expr (tmp
));
610 tree t
= gimple_assign_rhs2 (stmt
);
611 gimple_assign_set_rhs2 (stmt
, gimple_assign_rhs3 (stmt
));
612 gimple_assign_set_rhs3 (stmt
, t
);
615 stmt
= gsi_stmt (*gsi_p
);
624 /* Propagate from the ssa name definition statements of COND_EXPR
625 values in the rhs of statement STMT into the conditional arms
626 if that simplifies it.
627 Returns true if the stmt was changed. */
630 combine_cond_exprs (gimple_stmt_iterator
*gsi_p
)
632 gimple stmt
= gsi_stmt (*gsi_p
);
633 tree cond
, val1
, val2
;
634 bool changed
= false;
636 cond
= gimple_assign_rhs1 (stmt
);
637 val1
= gimple_assign_rhs2 (stmt
);
638 if (TREE_CODE (val1
) == SSA_NAME
)
640 gimple def_stmt
= SSA_NAME_DEF_STMT (val1
);
641 if (is_gimple_assign (def_stmt
)
642 && gimple_assign_rhs_code (def_stmt
) == gimple_assign_rhs_code (stmt
)
643 && operand_equal_p (gimple_assign_rhs1 (def_stmt
), cond
, 0))
645 val1
= unshare_expr (gimple_assign_rhs2 (def_stmt
));
646 gimple_assign_set_rhs2 (stmt
, val1
);
650 val2
= gimple_assign_rhs3 (stmt
);
651 if (TREE_CODE (val2
) == SSA_NAME
)
653 gimple def_stmt
= SSA_NAME_DEF_STMT (val2
);
654 if (is_gimple_assign (def_stmt
)
655 && gimple_assign_rhs_code (def_stmt
) == gimple_assign_rhs_code (stmt
)
656 && operand_equal_p (gimple_assign_rhs1 (def_stmt
), cond
, 0))
658 val2
= unshare_expr (gimple_assign_rhs3 (def_stmt
));
659 gimple_assign_set_rhs3 (stmt
, val2
);
663 if (operand_equal_p (val1
, val2
, 0))
665 gimple_assign_set_rhs_from_tree (gsi_p
, val1
);
666 stmt
= gsi_stmt (*gsi_p
);
676 /* We've just substituted an ADDR_EXPR into stmt. Update all the
677 relevant data structures to match. */
680 tidy_after_forward_propagate_addr (gimple stmt
)
682 /* We may have turned a trapping insn into a non-trapping insn. */
683 if (maybe_clean_or_replace_eh_stmt (stmt
, stmt
)
684 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
687 if (TREE_CODE (gimple_assign_rhs1 (stmt
)) == ADDR_EXPR
)
688 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt
));
691 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
692 ADDR_EXPR <whatever>.
694 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
695 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
696 node or for recovery of array indexing from pointer arithmetic.
698 Return true if the propagation was successful (the propagation can
699 be not totally successful, yet things may have been changed). */
702 forward_propagate_addr_expr_1 (tree name
, tree def_rhs
,
703 gimple_stmt_iterator
*use_stmt_gsi
,
706 tree lhs
, rhs
, rhs2
, array_ref
;
707 gimple use_stmt
= gsi_stmt (*use_stmt_gsi
);
708 enum tree_code rhs_code
;
711 gcc_assert (TREE_CODE (def_rhs
) == ADDR_EXPR
);
713 lhs
= gimple_assign_lhs (use_stmt
);
714 rhs_code
= gimple_assign_rhs_code (use_stmt
);
715 rhs
= gimple_assign_rhs1 (use_stmt
);
717 /* Trivial cases. The use statement could be a trivial copy or a
718 useless conversion. Recurse to the uses of the lhs as copyprop does
719 not copy through different variant pointers and FRE does not catch
720 all useless conversions. Treat the case of a single-use name and
721 a conversion to def_rhs type separate, though. */
722 if (TREE_CODE (lhs
) == SSA_NAME
723 && ((rhs_code
== SSA_NAME
&& rhs
== name
)
724 || CONVERT_EXPR_CODE_P (rhs_code
)))
726 /* Only recurse if we don't deal with a single use or we cannot
727 do the propagation to the current statement. In particular
728 we can end up with a conversion needed for a non-invariant
729 address which we cannot do in a single statement. */
731 || (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (def_rhs
))
732 && (!is_gimple_min_invariant (def_rhs
)
733 || (INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
734 && POINTER_TYPE_P (TREE_TYPE (def_rhs
))
735 && (TYPE_PRECISION (TREE_TYPE (lhs
))
736 > TYPE_PRECISION (TREE_TYPE (def_rhs
)))))))
737 return forward_propagate_addr_expr (lhs
, def_rhs
);
739 gimple_assign_set_rhs1 (use_stmt
, unshare_expr (def_rhs
));
740 if (useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (def_rhs
)))
741 gimple_assign_set_rhs_code (use_stmt
, TREE_CODE (def_rhs
));
743 gimple_assign_set_rhs_code (use_stmt
, NOP_EXPR
);
747 /* Propagate through constant pointer adjustments. */
748 if (TREE_CODE (lhs
) == SSA_NAME
749 && rhs_code
== POINTER_PLUS_EXPR
751 && TREE_CODE (gimple_assign_rhs2 (use_stmt
)) == INTEGER_CST
)
754 /* As we come here with non-invariant addresses in def_rhs we need
755 to make sure we can build a valid constant offsetted address
756 for further propagation. Simply rely on fold building that
757 and check after the fact. */
758 new_def_rhs
= fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (rhs
)),
760 fold_convert (ptr_type_node
,
761 gimple_assign_rhs2 (use_stmt
)));
762 if (TREE_CODE (new_def_rhs
) == MEM_REF
763 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs
, 0)))
765 new_def_rhs
= build_fold_addr_expr_with_type (new_def_rhs
,
768 /* Recurse. If we could propagate into all uses of lhs do not
769 bother to replace into the current use but just pretend we did. */
770 if (TREE_CODE (new_def_rhs
) == ADDR_EXPR
771 && forward_propagate_addr_expr (lhs
, new_def_rhs
))
774 if (useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (new_def_rhs
)))
775 gimple_assign_set_rhs_with_ops (use_stmt_gsi
, TREE_CODE (new_def_rhs
),
776 new_def_rhs
, NULL_TREE
);
777 else if (is_gimple_min_invariant (new_def_rhs
))
778 gimple_assign_set_rhs_with_ops (use_stmt_gsi
, NOP_EXPR
,
779 new_def_rhs
, NULL_TREE
);
782 gcc_assert (gsi_stmt (*use_stmt_gsi
) == use_stmt
);
783 update_stmt (use_stmt
);
787 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
788 ADDR_EXPR will not appear on the LHS. */
789 tree
*lhsp
= gimple_assign_lhs_ptr (use_stmt
);
790 while (handled_component_p (*lhsp
))
791 lhsp
= &TREE_OPERAND (*lhsp
, 0);
794 /* Now see if the LHS node is a MEM_REF using NAME. If so,
795 propagate the ADDR_EXPR into the use of NAME and fold the result. */
796 if (TREE_CODE (lhs
) == MEM_REF
797 && TREE_OPERAND (lhs
, 0) == name
)
800 HOST_WIDE_INT def_rhs_offset
;
801 /* If the address is invariant we can always fold it. */
802 if ((def_rhs_base
= get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs
, 0),
805 double_int off
= mem_ref_offset (lhs
);
807 off
+= double_int::from_shwi (def_rhs_offset
);
808 if (TREE_CODE (def_rhs_base
) == MEM_REF
)
810 off
+= mem_ref_offset (def_rhs_base
);
811 new_ptr
= TREE_OPERAND (def_rhs_base
, 0);
814 new_ptr
= build_fold_addr_expr (def_rhs_base
);
815 TREE_OPERAND (lhs
, 0) = new_ptr
;
816 TREE_OPERAND (lhs
, 1)
817 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs
, 1)), off
);
818 tidy_after_forward_propagate_addr (use_stmt
);
819 /* Continue propagating into the RHS if this was not the only use. */
823 /* If the LHS is a plain dereference and the value type is the same as
824 that of the pointed-to type of the address we can put the
825 dereferenced address on the LHS preserving the original alias-type. */
826 else if (integer_zerop (TREE_OPERAND (lhs
, 1))
827 && ((gimple_assign_lhs (use_stmt
) == lhs
828 && useless_type_conversion_p
829 (TREE_TYPE (TREE_OPERAND (def_rhs
, 0)),
830 TREE_TYPE (gimple_assign_rhs1 (use_stmt
))))
831 || types_compatible_p (TREE_TYPE (lhs
),
832 TREE_TYPE (TREE_OPERAND (def_rhs
, 0))))
833 /* Don't forward anything into clobber stmts if it would result
834 in the lhs no longer being a MEM_REF. */
835 && (!gimple_clobber_p (use_stmt
)
836 || TREE_CODE (TREE_OPERAND (def_rhs
, 0)) == MEM_REF
))
838 tree
*def_rhs_basep
= &TREE_OPERAND (def_rhs
, 0);
839 tree new_offset
, new_base
, saved
, new_lhs
;
840 while (handled_component_p (*def_rhs_basep
))
841 def_rhs_basep
= &TREE_OPERAND (*def_rhs_basep
, 0);
842 saved
= *def_rhs_basep
;
843 if (TREE_CODE (*def_rhs_basep
) == MEM_REF
)
845 new_base
= TREE_OPERAND (*def_rhs_basep
, 0);
846 new_offset
= fold_convert (TREE_TYPE (TREE_OPERAND (lhs
, 1)),
847 TREE_OPERAND (*def_rhs_basep
, 1));
851 new_base
= build_fold_addr_expr (*def_rhs_basep
);
852 new_offset
= TREE_OPERAND (lhs
, 1);
854 *def_rhs_basep
= build2 (MEM_REF
, TREE_TYPE (*def_rhs_basep
),
855 new_base
, new_offset
);
856 TREE_THIS_VOLATILE (*def_rhs_basep
) = TREE_THIS_VOLATILE (lhs
);
857 TREE_SIDE_EFFECTS (*def_rhs_basep
) = TREE_SIDE_EFFECTS (lhs
);
858 TREE_THIS_NOTRAP (*def_rhs_basep
) = TREE_THIS_NOTRAP (lhs
);
859 new_lhs
= unshare_expr (TREE_OPERAND (def_rhs
, 0));
861 TREE_THIS_VOLATILE (new_lhs
) = TREE_THIS_VOLATILE (lhs
);
862 TREE_SIDE_EFFECTS (new_lhs
) = TREE_SIDE_EFFECTS (lhs
);
863 *def_rhs_basep
= saved
;
864 tidy_after_forward_propagate_addr (use_stmt
);
865 /* Continue propagating into the RHS if this was not the
871 /* We can have a struct assignment dereferencing our name twice.
872 Note that we didn't propagate into the lhs to not falsely
873 claim we did when propagating into the rhs. */
877 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
878 nodes from the RHS. */
879 tree
*rhsp
= gimple_assign_rhs1_ptr (use_stmt
);
880 if (TREE_CODE (*rhsp
) == ADDR_EXPR
)
881 rhsp
= &TREE_OPERAND (*rhsp
, 0);
882 while (handled_component_p (*rhsp
))
883 rhsp
= &TREE_OPERAND (*rhsp
, 0);
886 /* Now see if the RHS node is a MEM_REF using NAME. If so,
887 propagate the ADDR_EXPR into the use of NAME and fold the result. */
888 if (TREE_CODE (rhs
) == MEM_REF
889 && TREE_OPERAND (rhs
, 0) == name
)
892 HOST_WIDE_INT def_rhs_offset
;
893 if ((def_rhs_base
= get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs
, 0),
896 double_int off
= mem_ref_offset (rhs
);
898 off
+= double_int::from_shwi (def_rhs_offset
);
899 if (TREE_CODE (def_rhs_base
) == MEM_REF
)
901 off
+= mem_ref_offset (def_rhs_base
);
902 new_ptr
= TREE_OPERAND (def_rhs_base
, 0);
905 new_ptr
= build_fold_addr_expr (def_rhs_base
);
906 TREE_OPERAND (rhs
, 0) = new_ptr
;
907 TREE_OPERAND (rhs
, 1)
908 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs
, 1)), off
);
909 fold_stmt_inplace (use_stmt_gsi
);
910 tidy_after_forward_propagate_addr (use_stmt
);
913 /* If the RHS is a plain dereference and the value type is the same as
914 that of the pointed-to type of the address we can put the
915 dereferenced address on the RHS preserving the original alias-type. */
916 else if (integer_zerop (TREE_OPERAND (rhs
, 1))
917 && ((gimple_assign_rhs1 (use_stmt
) == rhs
918 && useless_type_conversion_p
919 (TREE_TYPE (gimple_assign_lhs (use_stmt
)),
920 TREE_TYPE (TREE_OPERAND (def_rhs
, 0))))
921 || types_compatible_p (TREE_TYPE (rhs
),
922 TREE_TYPE (TREE_OPERAND (def_rhs
, 0)))))
924 tree
*def_rhs_basep
= &TREE_OPERAND (def_rhs
, 0);
925 tree new_offset
, new_base
, saved
, new_rhs
;
926 while (handled_component_p (*def_rhs_basep
))
927 def_rhs_basep
= &TREE_OPERAND (*def_rhs_basep
, 0);
928 saved
= *def_rhs_basep
;
929 if (TREE_CODE (*def_rhs_basep
) == MEM_REF
)
931 new_base
= TREE_OPERAND (*def_rhs_basep
, 0);
932 new_offset
= fold_convert (TREE_TYPE (TREE_OPERAND (rhs
, 1)),
933 TREE_OPERAND (*def_rhs_basep
, 1));
937 new_base
= build_fold_addr_expr (*def_rhs_basep
);
938 new_offset
= TREE_OPERAND (rhs
, 1);
940 *def_rhs_basep
= build2 (MEM_REF
, TREE_TYPE (*def_rhs_basep
),
941 new_base
, new_offset
);
942 TREE_THIS_VOLATILE (*def_rhs_basep
) = TREE_THIS_VOLATILE (rhs
);
943 TREE_SIDE_EFFECTS (*def_rhs_basep
) = TREE_SIDE_EFFECTS (rhs
);
944 TREE_THIS_NOTRAP (*def_rhs_basep
) = TREE_THIS_NOTRAP (rhs
);
945 new_rhs
= unshare_expr (TREE_OPERAND (def_rhs
, 0));
947 TREE_THIS_VOLATILE (new_rhs
) = TREE_THIS_VOLATILE (rhs
);
948 TREE_SIDE_EFFECTS (new_rhs
) = TREE_SIDE_EFFECTS (rhs
);
949 *def_rhs_basep
= saved
;
950 fold_stmt_inplace (use_stmt_gsi
);
951 tidy_after_forward_propagate_addr (use_stmt
);
956 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
958 if (gimple_assign_rhs_code (use_stmt
) != POINTER_PLUS_EXPR
959 || gimple_assign_rhs1 (use_stmt
) != name
)
962 /* The remaining cases are all for turning pointer arithmetic into
963 array indexing. They only apply when we have the address of
964 element zero in an array. If that is not the case then there
966 array_ref
= TREE_OPERAND (def_rhs
, 0);
967 if ((TREE_CODE (array_ref
) != ARRAY_REF
968 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref
, 0))) != ARRAY_TYPE
969 || TREE_CODE (TREE_OPERAND (array_ref
, 1)) != INTEGER_CST
)
970 && TREE_CODE (TREE_TYPE (array_ref
)) != ARRAY_TYPE
)
973 rhs2
= gimple_assign_rhs2 (use_stmt
);
974 /* Optimize &x[C1] p+ C2 to &x p+ C3 with C3 = C1 * element_size + C2. */
975 if (TREE_CODE (rhs2
) == INTEGER_CST
)
977 tree new_rhs
= build1_loc (gimple_location (use_stmt
),
978 ADDR_EXPR
, TREE_TYPE (def_rhs
),
979 fold_build2 (MEM_REF
,
980 TREE_TYPE (TREE_TYPE (def_rhs
)),
981 unshare_expr (def_rhs
),
982 fold_convert (ptr_type_node
,
984 gimple_assign_set_rhs_from_tree (use_stmt_gsi
, new_rhs
);
985 use_stmt
= gsi_stmt (*use_stmt_gsi
);
986 update_stmt (use_stmt
);
987 tidy_after_forward_propagate_addr (use_stmt
);
994 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
996 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
997 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
998 node or for recovery of array indexing from pointer arithmetic.
999 Returns true, if all uses have been propagated into. */
1002 forward_propagate_addr_expr (tree name
, tree rhs
)
1004 imm_use_iterator iter
;
1007 bool single_use_p
= has_single_use (name
);
1009 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, name
)
1014 /* If the use is not in a simple assignment statement, then
1015 there is nothing we can do. */
1016 if (!is_gimple_assign (use_stmt
))
1018 if (!is_gimple_debug (use_stmt
))
1023 gimple_stmt_iterator gsi
= gsi_for_stmt (use_stmt
);
1024 result
= forward_propagate_addr_expr_1 (name
, rhs
, &gsi
,
1026 /* If the use has moved to a different statement adjust
1027 the update machinery for the old statement too. */
1028 if (use_stmt
!= gsi_stmt (gsi
))
1030 update_stmt (use_stmt
);
1031 use_stmt
= gsi_stmt (gsi
);
1033 update_stmt (use_stmt
);
1036 /* Remove intermediate now unused copy and conversion chains. */
1037 use_rhs
= gimple_assign_rhs1 (use_stmt
);
1039 && TREE_CODE (gimple_assign_lhs (use_stmt
)) == SSA_NAME
1040 && TREE_CODE (use_rhs
) == SSA_NAME
1041 && has_zero_uses (gimple_assign_lhs (use_stmt
)))
1043 gimple_stmt_iterator gsi
= gsi_for_stmt (use_stmt
);
1044 release_defs (use_stmt
);
1045 gsi_remove (&gsi
, true);
1049 return all
&& has_zero_uses (name
);
1053 /* Forward propagate the comparison defined in *DEFGSI like
1054 cond_1 = x CMP y to uses of the form
1058 Returns true if stmt is now unused. Advance DEFGSI to the next
1062 forward_propagate_comparison (gimple_stmt_iterator
*defgsi
)
1064 gimple stmt
= gsi_stmt (*defgsi
);
1065 tree name
= gimple_assign_lhs (stmt
);
1067 tree tmp
= NULL_TREE
;
1068 gimple_stmt_iterator gsi
;
1069 enum tree_code code
;
1072 /* Don't propagate ssa names that occur in abnormal phis. */
1073 if ((TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
1074 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt
)))
1075 || (TREE_CODE (gimple_assign_rhs2 (stmt
)) == SSA_NAME
1076 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt
))))
1079 /* Do not un-cse comparisons. But propagate through copies. */
1080 use_stmt
= get_prop_dest_stmt (name
, &name
);
1082 || !is_gimple_assign (use_stmt
))
1085 code
= gimple_assign_rhs_code (use_stmt
);
1086 lhs
= gimple_assign_lhs (use_stmt
);
1087 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs
)))
1090 /* We can propagate the condition into a statement that
1091 computes the logical negation of the comparison result. */
1092 if ((code
== BIT_NOT_EXPR
1093 && TYPE_PRECISION (TREE_TYPE (lhs
)) == 1)
1094 || (code
== BIT_XOR_EXPR
1095 && integer_onep (gimple_assign_rhs2 (use_stmt
))))
1097 tree type
= TREE_TYPE (gimple_assign_rhs1 (stmt
));
1098 bool nans
= HONOR_NANS (TYPE_MODE (type
));
1099 enum tree_code inv_code
;
1100 inv_code
= invert_tree_comparison (gimple_assign_rhs_code (stmt
), nans
);
1101 if (inv_code
== ERROR_MARK
)
1104 tmp
= build2 (inv_code
, TREE_TYPE (lhs
), gimple_assign_rhs1 (stmt
),
1105 gimple_assign_rhs2 (stmt
));
1110 gsi
= gsi_for_stmt (use_stmt
);
1111 gimple_assign_set_rhs_from_tree (&gsi
, unshare_expr (tmp
));
1112 use_stmt
= gsi_stmt (gsi
);
1113 update_stmt (use_stmt
);
1115 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1117 fprintf (dump_file
, " Replaced '");
1118 print_gimple_expr (dump_file
, stmt
, 0, dump_flags
);
1119 fprintf (dump_file
, "' with '");
1120 print_gimple_expr (dump_file
, use_stmt
, 0, dump_flags
);
1121 fprintf (dump_file
, "'\n");
1124 /* When we remove stmt now the iterator defgsi goes off it's current
1125 sequence, hence advance it now. */
1128 /* Remove defining statements. */
1129 return remove_prop_source_from_use (name
);
1137 /* GSI_P points to a statement which performs a narrowing integral
1140 Look for cases like:
1150 If T is narrower than X's type and C merely masks off bits outside
1151 of (T) and nothing else.
1153 Normally we'd let DCE remove the dead statement. But no DCE runs
1154 after the last forwprop/combine pass, so we remove the obviously
1155 dead code ourselves.
1157 Return TRUE if a change was made, FALSE otherwise. */
1160 simplify_conversion_from_bitmask (gimple_stmt_iterator
*gsi_p
)
1162 gimple stmt
= gsi_stmt (*gsi_p
);
1163 gimple rhs_def_stmt
= SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt
));
1165 /* See if the input for the conversion was set via a BIT_AND_EXPR and
1166 the only use of the BIT_AND_EXPR result is the conversion. */
1167 if (is_gimple_assign (rhs_def_stmt
)
1168 && gimple_assign_rhs_code (rhs_def_stmt
) == BIT_AND_EXPR
1169 && has_single_use (gimple_assign_lhs (rhs_def_stmt
)))
1171 tree rhs_def_operand1
= gimple_assign_rhs1 (rhs_def_stmt
);
1172 tree rhs_def_operand2
= gimple_assign_rhs2 (rhs_def_stmt
);
1173 tree lhs_type
= TREE_TYPE (gimple_assign_lhs (stmt
));
1175 /* Now verify suitability of the BIT_AND_EXPR's operands.
1176 The first must be an SSA_NAME that we can propagate and the
1177 second must be an integer constant that masks out all the
1178 bits outside the final result's type, but nothing else. */
1179 if (TREE_CODE (rhs_def_operand1
) == SSA_NAME
1180 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand1
)
1181 && TREE_CODE (rhs_def_operand2
) == INTEGER_CST
1182 && operand_equal_p (rhs_def_operand2
,
1183 build_low_bits_mask (TREE_TYPE (rhs_def_operand2
),
1184 TYPE_PRECISION (lhs_type
)),
1187 /* This is an optimizable case. Replace the source operand
1188 in the conversion with the first source operand of the
1190 gimple_assign_set_rhs1 (stmt
, rhs_def_operand1
);
1191 stmt
= gsi_stmt (*gsi_p
);
1194 /* There is no DCE after the last forwprop pass. It's
1195 easy to clean up the first order effects here. */
1196 gimple_stmt_iterator si
;
1197 si
= gsi_for_stmt (rhs_def_stmt
);
1198 gsi_remove (&si
, true);
1199 release_defs (rhs_def_stmt
);
1208 /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
1209 If so, we can change STMT into lhs = y which can later be copy
1210 propagated. Similarly for negation.
1212 This could trivially be formulated as a forward propagation
1213 to immediate uses. However, we already had an implementation
1214 from DOM which used backward propagation via the use-def links.
1216 It turns out that backward propagation is actually faster as
1217 there's less work to do for each NOT/NEG expression we find.
1218 Backwards propagation needs to look at the statement in a single
1219 backlink. Forward propagation needs to look at potentially more
1220 than one forward link.
1222 Returns true when the statement was changed. */
1225 simplify_not_neg_expr (gimple_stmt_iterator
*gsi_p
)
1227 gimple stmt
= gsi_stmt (*gsi_p
);
1228 tree rhs
= gimple_assign_rhs1 (stmt
);
1229 gimple rhs_def_stmt
= SSA_NAME_DEF_STMT (rhs
);
1231 /* See if the RHS_DEF_STMT has the same form as our statement. */
1232 if (is_gimple_assign (rhs_def_stmt
)
1233 && gimple_assign_rhs_code (rhs_def_stmt
) == gimple_assign_rhs_code (stmt
))
1235 tree rhs_def_operand
= gimple_assign_rhs1 (rhs_def_stmt
);
1237 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
1238 if (TREE_CODE (rhs_def_operand
) == SSA_NAME
1239 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand
))
1241 gimple_assign_set_rhs_from_tree (gsi_p
, rhs_def_operand
);
1242 stmt
= gsi_stmt (*gsi_p
);
1251 /* Helper function for simplify_gimple_switch. Remove case labels that
1252 have values outside the range of the new type. */
1255 simplify_gimple_switch_label_vec (gimple stmt
, tree index_type
)
1257 unsigned int branch_num
= gimple_switch_num_labels (stmt
);
1259 labels
.create (branch_num
);
1260 unsigned int i
, len
;
1262 /* Collect the existing case labels in a VEC, and preprocess it as if
1263 we are gimplifying a GENERIC SWITCH_EXPR. */
1264 for (i
= 1; i
< branch_num
; i
++)
1265 labels
.quick_push (gimple_switch_label (stmt
, i
));
1266 preprocess_case_label_vec_for_gimple (labels
, index_type
, NULL
);
1268 /* If any labels were removed, replace the existing case labels
1269 in the GIMPLE_SWITCH statement with the correct ones.
1270 Note that the type updates were done in-place on the case labels,
1271 so we only have to replace the case labels in the GIMPLE_SWITCH
1272 if the number of labels changed. */
1273 len
= labels
.length ();
1274 if (len
< branch_num
- 1)
1276 bitmap target_blocks
;
1280 /* Corner case: *all* case labels have been removed as being
1281 out-of-range for INDEX_TYPE. Push one label and let the
1282 CFG cleanups deal with this further. */
1287 label
= CASE_LABEL (gimple_switch_default_label (stmt
));
1288 elt
= build_case_label (build_int_cst (index_type
, 0), NULL
, label
);
1289 labels
.quick_push (elt
);
1293 for (i
= 0; i
< labels
.length (); i
++)
1294 gimple_switch_set_label (stmt
, i
+ 1, labels
[i
]);
1295 for (i
++ ; i
< branch_num
; i
++)
1296 gimple_switch_set_label (stmt
, i
, NULL_TREE
);
1297 gimple_switch_set_num_labels (stmt
, len
+ 1);
1299 /* Cleanup any edges that are now dead. */
1300 target_blocks
= BITMAP_ALLOC (NULL
);
1301 for (i
= 0; i
< gimple_switch_num_labels (stmt
); i
++)
1303 tree elt
= gimple_switch_label (stmt
, i
);
1304 basic_block target
= label_to_block (CASE_LABEL (elt
));
1305 bitmap_set_bit (target_blocks
, target
->index
);
1307 for (ei
= ei_start (gimple_bb (stmt
)->succs
); (e
= ei_safe_edge (ei
)); )
1309 if (! bitmap_bit_p (target_blocks
, e
->dest
->index
))
1313 free_dominance_info (CDI_DOMINATORS
);
1318 BITMAP_FREE (target_blocks
);
1324 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1325 the condition which we may be able to optimize better. */
1328 simplify_gimple_switch (gimple stmt
)
1330 tree cond
= gimple_switch_index (stmt
);
1334 /* The optimization that we really care about is removing unnecessary
1335 casts. That will let us do much better in propagating the inferred
1336 constant at the switch target. */
1337 if (TREE_CODE (cond
) == SSA_NAME
)
1339 def_stmt
= SSA_NAME_DEF_STMT (cond
);
1340 if (is_gimple_assign (def_stmt
))
1342 if (gimple_assign_rhs_code (def_stmt
) == NOP_EXPR
)
1347 def
= gimple_assign_rhs1 (def_stmt
);
1349 to
= TREE_TYPE (cond
);
1350 ti
= TREE_TYPE (def
);
1352 /* If we have an extension that preserves value, then we
1353 can copy the source value into the switch. */
1355 need_precision
= TYPE_PRECISION (ti
);
1357 if (! INTEGRAL_TYPE_P (ti
))
1359 else if (TYPE_UNSIGNED (to
) && !TYPE_UNSIGNED (ti
))
1361 else if (!TYPE_UNSIGNED (to
) && TYPE_UNSIGNED (ti
))
1362 need_precision
+= 1;
1363 if (TYPE_PRECISION (to
) < need_precision
)
1368 gimple_switch_set_index (stmt
, def
);
1369 simplify_gimple_switch_label_vec (stmt
, ti
);
1380 /* For pointers p2 and p1 return p2 - p1 if the
1381 difference is known and constant, otherwise return NULL. */
1384 constant_pointer_difference (tree p1
, tree p2
)
1387 #define CPD_ITERATIONS 5
1388 tree exps
[2][CPD_ITERATIONS
];
1389 tree offs
[2][CPD_ITERATIONS
];
1392 for (i
= 0; i
< 2; i
++)
1394 tree p
= i
? p1
: p2
;
1395 tree off
= size_zero_node
;
1397 enum tree_code code
;
1399 /* For each of p1 and p2 we need to iterate at least
1400 twice, to handle ADDR_EXPR directly in p1/p2,
1401 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1402 on definition's stmt RHS. Iterate a few extra times. */
1406 if (!POINTER_TYPE_P (TREE_TYPE (p
)))
1408 if (TREE_CODE (p
) == ADDR_EXPR
)
1410 tree q
= TREE_OPERAND (p
, 0);
1411 HOST_WIDE_INT offset
;
1412 tree base
= get_addr_base_and_unit_offset (q
, &offset
);
1417 off
= size_binop (PLUS_EXPR
, off
, size_int (offset
));
1419 if (TREE_CODE (q
) == MEM_REF
1420 && TREE_CODE (TREE_OPERAND (q
, 0)) == SSA_NAME
)
1422 p
= TREE_OPERAND (q
, 0);
1423 off
= size_binop (PLUS_EXPR
, off
,
1424 double_int_to_tree (sizetype
,
1425 mem_ref_offset (q
)));
1434 if (TREE_CODE (p
) != SSA_NAME
)
1438 if (j
== CPD_ITERATIONS
)
1440 stmt
= SSA_NAME_DEF_STMT (p
);
1441 if (!is_gimple_assign (stmt
) || gimple_assign_lhs (stmt
) != p
)
1443 code
= gimple_assign_rhs_code (stmt
);
1444 if (code
== POINTER_PLUS_EXPR
)
1446 if (TREE_CODE (gimple_assign_rhs2 (stmt
)) != INTEGER_CST
)
1448 off
= size_binop (PLUS_EXPR
, off
, gimple_assign_rhs2 (stmt
));
1449 p
= gimple_assign_rhs1 (stmt
);
1451 else if (code
== ADDR_EXPR
|| code
== NOP_EXPR
)
1452 p
= gimple_assign_rhs1 (stmt
);
1460 for (i
= 0; i
< cnt
[0]; i
++)
1461 for (j
= 0; j
< cnt
[1]; j
++)
1462 if (exps
[0][i
] == exps
[1][j
])
1463 return size_binop (MINUS_EXPR
, offs
[0][i
], offs
[1][j
]);
1468 /* *GSI_P is a GIMPLE_CALL to a builtin function.
1470 memcpy (p, "abcd", 4);
1471 memset (p + 4, ' ', 3);
1473 memcpy (p, "abcd ", 7);
1474 call if the latter can be stored by pieces during expansion. */
1477 simplify_builtin_call (gimple_stmt_iterator
*gsi_p
, tree callee2
)
1479 gimple stmt1
, stmt2
= gsi_stmt (*gsi_p
);
1480 tree vuse
= gimple_vuse (stmt2
);
1483 stmt1
= SSA_NAME_DEF_STMT (vuse
);
1485 switch (DECL_FUNCTION_CODE (callee2
))
1487 case BUILT_IN_MEMSET
:
1488 if (gimple_call_num_args (stmt2
) != 3
1489 || gimple_call_lhs (stmt2
)
1491 || BITS_PER_UNIT
!= 8)
1496 tree ptr1
, src1
, str1
, off1
, len1
, lhs1
;
1497 tree ptr2
= gimple_call_arg (stmt2
, 0);
1498 tree val2
= gimple_call_arg (stmt2
, 1);
1499 tree len2
= gimple_call_arg (stmt2
, 2);
1500 tree diff
, vdef
, new_str_cst
;
1502 unsigned int ptr1_align
;
1503 unsigned HOST_WIDE_INT src_len
;
1505 use_operand_p use_p
;
1507 if (!host_integerp (val2
, 0)
1508 || !host_integerp (len2
, 1))
1510 if (is_gimple_call (stmt1
))
1512 /* If first stmt is a call, it needs to be memcpy
1513 or mempcpy, with string literal as second argument and
1515 callee1
= gimple_call_fndecl (stmt1
);
1516 if (callee1
== NULL_TREE
1517 || DECL_BUILT_IN_CLASS (callee1
) != BUILT_IN_NORMAL
1518 || gimple_call_num_args (stmt1
) != 3)
1520 if (DECL_FUNCTION_CODE (callee1
) != BUILT_IN_MEMCPY
1521 && DECL_FUNCTION_CODE (callee1
) != BUILT_IN_MEMPCPY
)
1523 ptr1
= gimple_call_arg (stmt1
, 0);
1524 src1
= gimple_call_arg (stmt1
, 1);
1525 len1
= gimple_call_arg (stmt1
, 2);
1526 lhs1
= gimple_call_lhs (stmt1
);
1527 if (!host_integerp (len1
, 1))
1529 str1
= string_constant (src1
, &off1
);
1530 if (str1
== NULL_TREE
)
1532 if (!host_integerp (off1
, 1)
1533 || compare_tree_int (off1
, TREE_STRING_LENGTH (str1
) - 1) > 0
1534 || compare_tree_int (len1
, TREE_STRING_LENGTH (str1
)
1535 - tree_low_cst (off1
, 1)) > 0
1536 || TREE_CODE (TREE_TYPE (str1
)) != ARRAY_TYPE
1537 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1
)))
1538 != TYPE_MODE (char_type_node
))
1541 else if (gimple_assign_single_p (stmt1
))
1543 /* Otherwise look for length 1 memcpy optimized into
1545 ptr1
= gimple_assign_lhs (stmt1
);
1546 src1
= gimple_assign_rhs1 (stmt1
);
1547 if (TREE_CODE (ptr1
) != MEM_REF
1548 || TYPE_MODE (TREE_TYPE (ptr1
)) != TYPE_MODE (char_type_node
)
1549 || !host_integerp (src1
, 0))
1551 ptr1
= build_fold_addr_expr (ptr1
);
1552 callee1
= NULL_TREE
;
1553 len1
= size_one_node
;
1555 off1
= size_zero_node
;
1561 diff
= constant_pointer_difference (ptr1
, ptr2
);
1562 if (diff
== NULL
&& lhs1
!= NULL
)
1564 diff
= constant_pointer_difference (lhs1
, ptr2
);
1565 if (DECL_FUNCTION_CODE (callee1
) == BUILT_IN_MEMPCPY
1567 diff
= size_binop (PLUS_EXPR
, diff
,
1568 fold_convert (sizetype
, len1
));
1570 /* If the difference between the second and first destination pointer
1571 is not constant, or is bigger than memcpy length, bail out. */
1573 || !host_integerp (diff
, 1)
1574 || tree_int_cst_lt (len1
, diff
))
1577 /* Use maximum of difference plus memset length and memcpy length
1578 as the new memcpy length, if it is too big, bail out. */
1579 src_len
= tree_low_cst (diff
, 1);
1580 src_len
+= tree_low_cst (len2
, 1);
1581 if (src_len
< (unsigned HOST_WIDE_INT
) tree_low_cst (len1
, 1))
1582 src_len
= tree_low_cst (len1
, 1);
1586 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1587 with bigger length will return different result. */
1588 if (lhs1
!= NULL_TREE
1589 && DECL_FUNCTION_CODE (callee1
) == BUILT_IN_MEMPCPY
1590 && (TREE_CODE (lhs1
) != SSA_NAME
1591 || !single_imm_use (lhs1
, &use_p
, &use_stmt
)
1592 || use_stmt
!= stmt2
))
1595 /* If anything reads memory in between memcpy and memset
1596 call, the modified memcpy call might change it. */
1597 vdef
= gimple_vdef (stmt1
);
1599 && (!single_imm_use (vdef
, &use_p
, &use_stmt
)
1600 || use_stmt
!= stmt2
))
1603 ptr1_align
= get_pointer_alignment (ptr1
);
1604 /* Construct the new source string literal. */
1605 src_buf
= XALLOCAVEC (char, src_len
+ 1);
1608 TREE_STRING_POINTER (str1
) + tree_low_cst (off1
, 1),
1609 tree_low_cst (len1
, 1));
1611 src_buf
[0] = tree_low_cst (src1
, 0);
1612 memset (src_buf
+ tree_low_cst (diff
, 1),
1613 tree_low_cst (val2
, 0), tree_low_cst (len2
, 1));
1614 src_buf
[src_len
] = '\0';
1615 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1616 handle embedded '\0's. */
1617 if (strlen (src_buf
) != src_len
)
1619 rtl_profile_for_bb (gimple_bb (stmt2
));
1620 /* If the new memcpy wouldn't be emitted by storing the literal
1621 by pieces, this optimization might enlarge .rodata too much,
1622 as commonly used string literals couldn't be shared any
1624 if (!can_store_by_pieces (src_len
,
1625 builtin_strncpy_read_str
,
1626 src_buf
, ptr1_align
, false))
1629 new_str_cst
= build_string_literal (src_len
, src_buf
);
1632 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1634 if (lhs1
&& DECL_FUNCTION_CODE (callee1
) == BUILT_IN_MEMPCPY
)
1635 gimple_call_set_lhs (stmt1
, NULL_TREE
);
1636 gimple_call_set_arg (stmt1
, 1, new_str_cst
);
1637 gimple_call_set_arg (stmt1
, 2,
1638 build_int_cst (TREE_TYPE (len1
), src_len
));
1639 update_stmt (stmt1
);
1640 unlink_stmt_vdef (stmt2
);
1641 gsi_remove (gsi_p
, true);
1642 release_defs (stmt2
);
1643 if (lhs1
&& DECL_FUNCTION_CODE (callee1
) == BUILT_IN_MEMPCPY
)
1644 release_ssa_name (lhs1
);
1649 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1650 assignment, remove STMT1 and change memset call into
1652 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt1
);
1654 if (!is_gimple_val (ptr1
))
1655 ptr1
= force_gimple_operand_gsi (gsi_p
, ptr1
, true, NULL_TREE
,
1656 true, GSI_SAME_STMT
);
1657 gimple_call_set_fndecl (stmt2
,
1658 builtin_decl_explicit (BUILT_IN_MEMCPY
));
1659 gimple_call_set_arg (stmt2
, 0, ptr1
);
1660 gimple_call_set_arg (stmt2
, 1, new_str_cst
);
1661 gimple_call_set_arg (stmt2
, 2,
1662 build_int_cst (TREE_TYPE (len2
), src_len
));
1663 unlink_stmt_vdef (stmt1
);
1664 gsi_remove (&gsi
, true);
1665 release_defs (stmt1
);
1666 update_stmt (stmt2
);
1677 /* Checks if expression has type of one-bit precision, or is a known
1678 truth-valued expression. */
1680 truth_valued_ssa_name (tree name
)
1683 tree type
= TREE_TYPE (name
);
1685 if (!INTEGRAL_TYPE_P (type
))
1687 /* Don't check here for BOOLEAN_TYPE as the precision isn't
1688 necessarily one and so ~X is not equal to !X. */
1689 if (TYPE_PRECISION (type
) == 1)
1691 def
= SSA_NAME_DEF_STMT (name
);
1692 if (is_gimple_assign (def
))
1693 return truth_value_p (gimple_assign_rhs_code (def
));
1697 /* Helper routine for simplify_bitwise_binary_1 function.
1698 Return for the SSA name NAME the expression X if it mets condition
1699 NAME = !X. Otherwise return NULL_TREE.
1700 Detected patterns for NAME = !X are:
1701 !X and X == 0 for X with integral type.
1702 X ^ 1, X != 1,or ~X for X with integral type with precision of one. */
1704 lookup_logical_inverted_value (tree name
)
1707 enum tree_code code
;
1710 /* If name has none-intergal type, or isn't a SSA_NAME, then
1712 if (TREE_CODE (name
) != SSA_NAME
1713 || !INTEGRAL_TYPE_P (TREE_TYPE (name
)))
1715 def
= SSA_NAME_DEF_STMT (name
);
1716 if (!is_gimple_assign (def
))
1719 code
= gimple_assign_rhs_code (def
);
1720 op1
= gimple_assign_rhs1 (def
);
1723 /* Get for EQ_EXPR or BIT_XOR_EXPR operation the second operand.
1724 If CODE isn't an EQ_EXPR, BIT_XOR_EXPR, or BIT_NOT_EXPR, then return. */
1725 if (code
== EQ_EXPR
|| code
== NE_EXPR
1726 || code
== BIT_XOR_EXPR
)
1727 op2
= gimple_assign_rhs2 (def
);
1732 if (truth_valued_ssa_name (name
))
1736 /* Check if we have X == 0 and X has an integral type. */
1737 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1
)))
1739 if (integer_zerop (op2
))
1743 /* Check if we have X != 1 and X is a truth-valued. */
1744 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1
)))
1746 if (integer_onep (op2
) && truth_valued_ssa_name (op1
))
1750 /* Check if we have X ^ 1 and X is truth valued. */
1751 if (integer_onep (op2
) && truth_valued_ssa_name (op1
))
1761 /* Optimize ARG1 CODE ARG2 to a constant for bitwise binary
1762 operations CODE, if one operand has the logically inverted
1763 value of the other. */
1765 simplify_bitwise_binary_1 (enum tree_code code
, tree type
,
1766 tree arg1
, tree arg2
)
1770 /* If CODE isn't a bitwise binary operation, return NULL_TREE. */
1771 if (code
!= BIT_AND_EXPR
&& code
!= BIT_IOR_EXPR
1772 && code
!= BIT_XOR_EXPR
)
1775 /* First check if operands ARG1 and ARG2 are equal. If so
1776 return NULL_TREE as this optimization is handled fold_stmt. */
1779 /* See if we have in arguments logical-not patterns. */
1780 if (((anot
= lookup_logical_inverted_value (arg1
)) == NULL_TREE
1782 && ((anot
= lookup_logical_inverted_value (arg2
)) == NULL_TREE
1787 if (code
== BIT_AND_EXPR
)
1788 return fold_convert (type
, integer_zero_node
);
1789 /* X | !X -> 1 and X ^ !X -> 1, if X is truth-valued. */
1790 if (truth_valued_ssa_name (anot
))
1791 return fold_convert (type
, integer_one_node
);
1793 /* ??? Otherwise result is (X != 0 ? X : 1). not handled. */
1797 /* Given a ssa_name in NAME see if it was defined by an assignment and
1798 set CODE to be the code and ARG1 to the first operand on the rhs and ARG2
1799 to the second operand on the rhs. */
1802 defcodefor_name (tree name
, enum tree_code
*code
, tree
*arg1
, tree
*arg2
)
1805 enum tree_code code1
;
1809 enum gimple_rhs_class grhs_class
;
1811 code1
= TREE_CODE (name
);
1814 grhs_class
= get_gimple_rhs_class (code1
);
1816 if (code1
== SSA_NAME
)
1818 def
= SSA_NAME_DEF_STMT (name
);
1820 if (def
&& is_gimple_assign (def
)
1821 && can_propagate_from (def
))
1823 code1
= gimple_assign_rhs_code (def
);
1824 arg11
= gimple_assign_rhs1 (def
);
1825 arg21
= gimple_assign_rhs2 (def
);
1826 arg31
= gimple_assign_rhs2 (def
);
1829 else if (grhs_class
== GIMPLE_TERNARY_RHS
1830 || GIMPLE_BINARY_RHS
1832 || GIMPLE_SINGLE_RHS
)
1833 extract_ops_from_tree_1 (name
, &code1
, &arg11
, &arg21
, &arg31
);
1839 /* Ignore arg3 currently. */
1842 /* Return true if a conversion of an operand from type FROM to type TO
1843 should be applied after performing the operation instead. */
1846 hoist_conversion_for_bitop_p (tree to
, tree from
)
1848 /* That's a good idea if the conversion widens the operand, thus
1849 after hoisting the conversion the operation will be narrower. */
1850 if (TYPE_PRECISION (from
) < TYPE_PRECISION (to
))
1853 /* It's also a good idea if the conversion is to a non-integer mode. */
1854 if (GET_MODE_CLASS (TYPE_MODE (to
)) != MODE_INT
)
1857 /* Or if the precision of TO is not the same as the precision
1859 if (TYPE_PRECISION (to
) != GET_MODE_PRECISION (TYPE_MODE (to
)))
1865 /* GSI points to a statement of the form
1867 result = OP0 CODE OP1
1869 Where OP0 and OP1 are single bit SSA_NAMEs and CODE is either
1870 BIT_AND_EXPR or BIT_IOR_EXPR.
1872 If OP0 is fed by a bitwise negation of another single bit SSA_NAME,
1873 then we can simplify the two statements into a single LT_EXPR or LE_EXPR
1874 when code is BIT_AND_EXPR and BIT_IOR_EXPR respectively.
1876 If a simplification is made, return TRUE, else return FALSE. */
1878 simplify_bitwise_binary_boolean (gimple_stmt_iterator
*gsi
,
1879 enum tree_code code
,
1882 gimple op0_def_stmt
= SSA_NAME_DEF_STMT (op0
);
1884 if (!is_gimple_assign (op0_def_stmt
)
1885 || (gimple_assign_rhs_code (op0_def_stmt
) != BIT_NOT_EXPR
))
1888 tree x
= gimple_assign_rhs1 (op0_def_stmt
);
1889 if (TREE_CODE (x
) == SSA_NAME
1890 && INTEGRAL_TYPE_P (TREE_TYPE (x
))
1891 && TYPE_PRECISION (TREE_TYPE (x
)) == 1
1892 && TYPE_UNSIGNED (TREE_TYPE (x
)) == TYPE_UNSIGNED (TREE_TYPE (op1
)))
1894 enum tree_code newcode
;
1896 gimple stmt
= gsi_stmt (*gsi
);
1897 gimple_assign_set_rhs1 (stmt
, x
);
1898 gimple_assign_set_rhs2 (stmt
, op1
);
1899 if (code
== BIT_AND_EXPR
)
1900 newcode
= TYPE_UNSIGNED (TREE_TYPE (x
)) ? LT_EXPR
: GT_EXPR
;
1902 newcode
= TYPE_UNSIGNED (TREE_TYPE (x
)) ? LE_EXPR
: GE_EXPR
;
1903 gimple_assign_set_rhs_code (stmt
, newcode
);
1911 /* Simplify bitwise binary operations.
1912 Return true if a transformation applied, otherwise return false. */
1915 simplify_bitwise_binary (gimple_stmt_iterator
*gsi
)
1917 gimple stmt
= gsi_stmt (*gsi
);
1918 tree arg1
= gimple_assign_rhs1 (stmt
);
1919 tree arg2
= gimple_assign_rhs2 (stmt
);
1920 enum tree_code code
= gimple_assign_rhs_code (stmt
);
1922 tree def1_arg1
, def1_arg2
, def2_arg1
, def2_arg2
;
1923 enum tree_code def1_code
, def2_code
;
1925 defcodefor_name (arg1
, &def1_code
, &def1_arg1
, &def1_arg2
);
1926 defcodefor_name (arg2
, &def2_code
, &def2_arg1
, &def2_arg2
);
1928 /* Try to fold (type) X op CST -> (type) (X op ((type-x) CST))
1930 if (TREE_CODE (arg2
) == INTEGER_CST
1931 && CONVERT_EXPR_CODE_P (def1_code
)
1932 && hoist_conversion_for_bitop_p (TREE_TYPE (arg1
), TREE_TYPE (def1_arg1
))
1933 && INTEGRAL_TYPE_P (TREE_TYPE (def1_arg1
))
1934 && int_fits_type_p (arg2
, TREE_TYPE (def1_arg1
)))
1937 tree tem
= make_ssa_name (TREE_TYPE (def1_arg1
), NULL
);
1939 gimple_build_assign_with_ops (code
, tem
, def1_arg1
,
1940 fold_convert_loc (gimple_location (stmt
),
1941 TREE_TYPE (def1_arg1
),
1943 gimple_set_location (newop
, gimple_location (stmt
));
1944 gsi_insert_before (gsi
, newop
, GSI_SAME_STMT
);
1945 gimple_assign_set_rhs_with_ops_1 (gsi
, NOP_EXPR
,
1946 tem
, NULL_TREE
, NULL_TREE
);
1947 update_stmt (gsi_stmt (*gsi
));
1951 /* For bitwise binary operations apply operand conversions to the
1952 binary operation result instead of to the operands. This allows
1953 to combine successive conversions and bitwise binary operations. */
1954 if (CONVERT_EXPR_CODE_P (def1_code
)
1955 && CONVERT_EXPR_CODE_P (def2_code
)
1956 && types_compatible_p (TREE_TYPE (def1_arg1
), TREE_TYPE (def2_arg1
))
1957 && hoist_conversion_for_bitop_p (TREE_TYPE (arg1
), TREE_TYPE (def1_arg1
)))
1960 tree tem
= make_ssa_name (TREE_TYPE (def1_arg1
), NULL
);
1961 newop
= gimple_build_assign_with_ops (code
, tem
, def1_arg1
, def2_arg1
);
1962 gimple_set_location (newop
, gimple_location (stmt
));
1963 gsi_insert_before (gsi
, newop
, GSI_SAME_STMT
);
1964 gimple_assign_set_rhs_with_ops_1 (gsi
, NOP_EXPR
,
1965 tem
, NULL_TREE
, NULL_TREE
);
1966 update_stmt (gsi_stmt (*gsi
));
1971 /* Simplify (A & B) OP0 (C & B) to (A OP0 C) & B. */
1972 if (def1_code
== def2_code
1973 && def1_code
== BIT_AND_EXPR
1974 && operand_equal_for_phi_arg_p (def1_arg2
,
1980 tree inner
= fold_build2 (code
, TREE_TYPE (arg2
), a
, c
);
1981 /* If A OP0 C (this usually means C is the same as A) is 0
1982 then fold it down correctly. */
1983 if (integer_zerop (inner
))
1985 gimple_assign_set_rhs_from_tree (gsi
, inner
);
1989 /* If A OP0 C (this usually means C is the same as A) is a ssa_name
1990 then fold it down correctly. */
1991 else if (TREE_CODE (inner
) == SSA_NAME
)
1993 tree outer
= fold_build2 (def1_code
, TREE_TYPE (inner
),
1995 gimple_assign_set_rhs_from_tree (gsi
, outer
);
2003 tem
= make_ssa_name (TREE_TYPE (arg2
), NULL
);
2004 newop
= gimple_build_assign_with_ops (code
, tem
, a
, c
);
2005 gimple_set_location (newop
, gimple_location (stmt
));
2006 /* Make sure to re-process the new stmt as it's walking upwards. */
2007 gsi_insert_before (gsi
, newop
, GSI_NEW_STMT
);
2008 gimple_assign_set_rhs1 (stmt
, tem
);
2009 gimple_assign_set_rhs2 (stmt
, b
);
2010 gimple_assign_set_rhs_code (stmt
, def1_code
);
2016 /* (a | CST1) & CST2 -> (a & CST2) | (CST1 & CST2). */
2017 if (code
== BIT_AND_EXPR
2018 && def1_code
== BIT_IOR_EXPR
2019 && CONSTANT_CLASS_P (arg2
)
2020 && CONSTANT_CLASS_P (def1_arg2
))
2022 tree cst
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg2
),
2026 if (integer_zerop (cst
))
2028 gimple_assign_set_rhs1 (stmt
, def1_arg1
);
2032 tem
= make_ssa_name (TREE_TYPE (arg2
), NULL
);
2033 newop
= gimple_build_assign_with_ops (BIT_AND_EXPR
,
2034 tem
, def1_arg1
, arg2
);
2035 gimple_set_location (newop
, gimple_location (stmt
));
2036 /* Make sure to re-process the new stmt as it's walking upwards. */
2037 gsi_insert_before (gsi
, newop
, GSI_NEW_STMT
);
2038 gimple_assign_set_rhs1 (stmt
, tem
);
2039 gimple_assign_set_rhs2 (stmt
, cst
);
2040 gimple_assign_set_rhs_code (stmt
, BIT_IOR_EXPR
);
2045 /* Combine successive equal operations with constants. */
2046 if ((code
== BIT_AND_EXPR
2047 || code
== BIT_IOR_EXPR
2048 || code
== BIT_XOR_EXPR
)
2049 && def1_code
== code
2050 && CONSTANT_CLASS_P (arg2
)
2051 && CONSTANT_CLASS_P (def1_arg2
))
2053 tree cst
= fold_build2 (code
, TREE_TYPE (arg2
),
2055 gimple_assign_set_rhs1 (stmt
, def1_arg1
);
2056 gimple_assign_set_rhs2 (stmt
, cst
);
2061 /* Canonicalize X ^ ~0 to ~X. */
2062 if (code
== BIT_XOR_EXPR
2063 && integer_all_onesp (arg2
))
2065 gimple_assign_set_rhs_with_ops (gsi
, BIT_NOT_EXPR
, arg1
, NULL_TREE
);
2066 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2071 /* Try simple folding for X op !X, and X op X. */
2072 res
= simplify_bitwise_binary_1 (code
, TREE_TYPE (arg1
), arg1
, arg2
);
2073 if (res
!= NULL_TREE
)
2075 gimple_assign_set_rhs_from_tree (gsi
, res
);
2076 update_stmt (gsi_stmt (*gsi
));
2080 if (code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
)
2082 enum tree_code ocode
= code
== BIT_AND_EXPR
? BIT_IOR_EXPR
: BIT_AND_EXPR
;
2083 if (def1_code
== ocode
)
2086 enum tree_code coden
;
2088 /* ( X | Y) & X -> X */
2089 /* ( X & Y) | X -> X */
2093 gimple_assign_set_rhs_from_tree (gsi
, x
);
2094 update_stmt (gsi_stmt (*gsi
));
2098 defcodefor_name (def1_arg1
, &coden
, &a1
, &a2
);
2099 /* (~X | Y) & X -> X & Y */
2100 /* (~X & Y) | X -> X | Y */
2101 if (coden
== BIT_NOT_EXPR
&& a1
== x
)
2103 gimple_assign_set_rhs_with_ops (gsi
, code
,
2105 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2109 defcodefor_name (def1_arg2
, &coden
, &a1
, &a2
);
2110 /* (Y | ~X) & X -> X & Y */
2111 /* (Y & ~X) | X -> X | Y */
2112 if (coden
== BIT_NOT_EXPR
&& a1
== x
)
2114 gimple_assign_set_rhs_with_ops (gsi
, code
,
2116 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2121 if (def2_code
== ocode
)
2123 enum tree_code coden
;
2126 /* X & ( X | Y) -> X */
2127 /* X | ( X & Y) -> X */
2131 gimple_assign_set_rhs_from_tree (gsi
, x
);
2132 update_stmt (gsi_stmt (*gsi
));
2135 defcodefor_name (def2_arg1
, &coden
, &a1
, NULL
);
2136 /* (~X | Y) & X -> X & Y */
2137 /* (~X & Y) | X -> X | Y */
2138 if (coden
== BIT_NOT_EXPR
&& a1
== x
)
2140 gimple_assign_set_rhs_with_ops (gsi
, code
,
2142 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2146 defcodefor_name (def2_arg2
, &coden
, &a1
, NULL
);
2147 /* (Y | ~X) & X -> X & Y */
2148 /* (Y & ~X) | X -> X | Y */
2149 if (coden
== BIT_NOT_EXPR
&& a1
== x
)
2151 gimple_assign_set_rhs_with_ops (gsi
, code
,
2153 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2159 /* If arg1 and arg2 are booleans (or any single bit type)
2160 then try to simplify:
2167 But only do this if our result feeds into a comparison as
2168 this transformation is not always a win, particularly on
2169 targets with and-not instructions. */
2170 if (TREE_CODE (arg1
) == SSA_NAME
2171 && TREE_CODE (arg2
) == SSA_NAME
2172 && INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
2173 && TYPE_PRECISION (TREE_TYPE (arg1
)) == 1
2174 && TYPE_PRECISION (TREE_TYPE (arg2
)) == 1
2175 && (TYPE_UNSIGNED (TREE_TYPE (arg1
))
2176 == TYPE_UNSIGNED (TREE_TYPE (arg2
))))
2178 use_operand_p use_p
;
2181 if (single_imm_use (gimple_assign_lhs (stmt
), &use_p
, &use_stmt
))
2183 if (gimple_code (use_stmt
) == GIMPLE_COND
2184 && gimple_cond_lhs (use_stmt
) == gimple_assign_lhs (stmt
)
2185 && integer_zerop (gimple_cond_rhs (use_stmt
))
2186 && gimple_cond_code (use_stmt
) == NE_EXPR
)
2188 if (simplify_bitwise_binary_boolean (gsi
, code
, arg1
, arg2
))
2190 if (simplify_bitwise_binary_boolean (gsi
, code
, arg2
, arg1
))
2200 /* Recognize rotation patterns. Return true if a transformation
2201 applied, otherwise return false.
2203 We are looking for X with unsigned type T with bitsize B, OP being
2204 +, | or ^, some type T2 wider than T and
2205 (X << CNT1) OP (X >> CNT2) iff CNT1 + CNT2 == B
2206 ((T) ((T2) X << CNT1)) OP ((T) ((T2) X >> CNT2)) iff CNT1 + CNT2 == B
2207 (X << Y) OP (X >> (B - Y))
2208 (X << (int) Y) OP (X >> (int) (B - Y))
2209 ((T) ((T2) X << Y)) OP ((T) ((T2) X >> (B - Y)))
2210 ((T) ((T2) X << (int) Y)) OP ((T) ((T2) X >> (int) (B - Y)))
2211 (X << Y) | (X >> ((-Y) & (B - 1)))
2212 (X << (int) Y) | (X >> (int) ((-Y) & (B - 1)))
2213 ((T) ((T2) X << Y)) | ((T) ((T2) X >> ((-Y) & (B - 1))))
2214 ((T) ((T2) X << (int) Y)) | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
2216 and transform these into:
2220 Note, in the patterns with T2 type, the type of OP operands
2221 might be even a signed type, but should have precision B. */
2224 simplify_rotate (gimple_stmt_iterator
*gsi
)
2226 gimple stmt
= gsi_stmt (*gsi
);
2227 tree arg
[2], rtype
, rotcnt
= NULL_TREE
;
2228 tree def_arg1
[2], def_arg2
[2];
2229 enum tree_code def_code
[2];
2232 bool swapped_p
= false;
2235 arg
[0] = gimple_assign_rhs1 (stmt
);
2236 arg
[1] = gimple_assign_rhs2 (stmt
);
2237 rtype
= TREE_TYPE (arg
[0]);
2239 /* Only create rotates in complete modes. Other cases are not
2240 expanded properly. */
2241 if (!INTEGRAL_TYPE_P (rtype
)
2242 || TYPE_PRECISION (rtype
) != GET_MODE_PRECISION (TYPE_MODE (rtype
)))
2245 for (i
= 0; i
< 2; i
++)
2246 defcodefor_name (arg
[i
], &def_code
[i
], &def_arg1
[i
], &def_arg2
[i
]);
2248 /* Look through narrowing conversions. */
2249 if (CONVERT_EXPR_CODE_P (def_code
[0])
2250 && CONVERT_EXPR_CODE_P (def_code
[1])
2251 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1
[0]))
2252 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1
[1]))
2253 && TYPE_PRECISION (TREE_TYPE (def_arg1
[0]))
2254 == TYPE_PRECISION (TREE_TYPE (def_arg1
[1]))
2255 && TYPE_PRECISION (TREE_TYPE (def_arg1
[0])) > TYPE_PRECISION (rtype
)
2256 && has_single_use (arg
[0])
2257 && has_single_use (arg
[1]))
2259 for (i
= 0; i
< 2; i
++)
2261 arg
[i
] = def_arg1
[i
];
2262 defcodefor_name (arg
[i
], &def_code
[i
], &def_arg1
[i
], &def_arg2
[i
]);
2266 /* One operand has to be LSHIFT_EXPR and one RSHIFT_EXPR. */
2267 for (i
= 0; i
< 2; i
++)
2268 if (def_code
[i
] != LSHIFT_EXPR
&& def_code
[i
] != RSHIFT_EXPR
)
2270 else if (!has_single_use (arg
[i
]))
2272 if (def_code
[0] == def_code
[1])
2275 /* If we've looked through narrowing conversions before, look through
2276 widening conversions from unsigned type with the same precision
2278 if (TYPE_PRECISION (TREE_TYPE (def_arg1
[0])) != TYPE_PRECISION (rtype
))
2279 for (i
= 0; i
< 2; i
++)
2282 enum tree_code code
;
2283 defcodefor_name (def_arg1
[i
], &code
, &tem
, NULL
);
2284 if (!CONVERT_EXPR_CODE_P (code
)
2285 || !INTEGRAL_TYPE_P (TREE_TYPE (tem
))
2286 || TYPE_PRECISION (TREE_TYPE (tem
)) != TYPE_PRECISION (rtype
))
2290 /* Both shifts have to use the same first operand. */
2291 if (TREE_CODE (def_arg1
[0]) != SSA_NAME
|| def_arg1
[0] != def_arg1
[1])
2293 if (!TYPE_UNSIGNED (TREE_TYPE (def_arg1
[0])))
2296 /* CNT1 + CNT2 == B case above. */
2297 if (host_integerp (def_arg2
[0], 1)
2298 && host_integerp (def_arg2
[1], 1)
2299 && (unsigned HOST_WIDE_INT
) tree_low_cst (def_arg2
[0], 1)
2300 + tree_low_cst (def_arg2
[1], 1) == TYPE_PRECISION (rtype
))
2301 rotcnt
= def_arg2
[0];
2302 else if (TREE_CODE (def_arg2
[0]) != SSA_NAME
2303 || TREE_CODE (def_arg2
[1]) != SSA_NAME
)
2307 tree cdef_arg1
[2], cdef_arg2
[2], def_arg2_alt
[2];
2308 enum tree_code cdef_code
[2];
2309 /* Look through conversion of the shift count argument.
2310 The C/C++ FE cast any shift count argument to integer_type_node.
2311 The only problem might be if the shift count type maximum value
2312 is equal or smaller than number of bits in rtype. */
2313 for (i
= 0; i
< 2; i
++)
2315 def_arg2_alt
[i
] = def_arg2
[i
];
2316 defcodefor_name (def_arg2
[i
], &cdef_code
[i
],
2317 &cdef_arg1
[i
], &cdef_arg2
[i
]);
2318 if (CONVERT_EXPR_CODE_P (cdef_code
[i
])
2319 && INTEGRAL_TYPE_P (TREE_TYPE (cdef_arg1
[i
]))
2320 && TYPE_PRECISION (TREE_TYPE (cdef_arg1
[i
]))
2321 > floor_log2 (TYPE_PRECISION (rtype
))
2322 && TYPE_PRECISION (TREE_TYPE (cdef_arg1
[i
]))
2323 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (cdef_arg1
[i
]))))
2325 def_arg2_alt
[i
] = cdef_arg1
[i
];
2326 defcodefor_name (def_arg2_alt
[i
], &cdef_code
[i
],
2327 &cdef_arg1
[i
], &cdef_arg2
[i
]);
2330 for (i
= 0; i
< 2; i
++)
2331 /* Check for one shift count being Y and the other B - Y,
2332 with optional casts. */
2333 if (cdef_code
[i
] == MINUS_EXPR
2334 && host_integerp (cdef_arg1
[i
], 0)
2335 && tree_low_cst (cdef_arg1
[i
], 0) == TYPE_PRECISION (rtype
)
2336 && TREE_CODE (cdef_arg2
[i
]) == SSA_NAME
)
2339 enum tree_code code
;
2341 if (cdef_arg2
[i
] == def_arg2
[1 - i
]
2342 || cdef_arg2
[i
] == def_arg2_alt
[1 - i
])
2344 rotcnt
= cdef_arg2
[i
];
2347 defcodefor_name (cdef_arg2
[i
], &code
, &tem
, NULL
);
2348 if (CONVERT_EXPR_CODE_P (code
)
2349 && INTEGRAL_TYPE_P (TREE_TYPE (tem
))
2350 && TYPE_PRECISION (TREE_TYPE (tem
))
2351 > floor_log2 (TYPE_PRECISION (rtype
))
2352 && TYPE_PRECISION (TREE_TYPE (tem
))
2353 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem
)))
2354 && (tem
== def_arg2
[1 - i
]
2355 || tem
== def_arg2_alt
[1 - i
]))
2361 /* The above sequence isn't safe for Y being 0,
2362 because then one of the shifts triggers undefined behavior.
2363 This alternative is safe even for rotation count of 0.
2364 One shift count is Y and the other (-Y) & (B - 1). */
2365 else if (cdef_code
[i
] == BIT_AND_EXPR
2366 && host_integerp (cdef_arg2
[i
], 0)
2367 && tree_low_cst (cdef_arg2
[i
], 0)
2368 == TYPE_PRECISION (rtype
) - 1
2369 && TREE_CODE (cdef_arg1
[i
]) == SSA_NAME
2370 && gimple_assign_rhs_code (stmt
) == BIT_IOR_EXPR
)
2373 enum tree_code code
;
2375 defcodefor_name (cdef_arg1
[i
], &code
, &tem
, NULL
);
2376 if (CONVERT_EXPR_CODE_P (code
)
2377 && INTEGRAL_TYPE_P (TREE_TYPE (tem
))
2378 && TYPE_PRECISION (TREE_TYPE (tem
))
2379 > floor_log2 (TYPE_PRECISION (rtype
))
2380 && TYPE_PRECISION (TREE_TYPE (tem
))
2381 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem
))))
2382 defcodefor_name (tem
, &code
, &tem
, NULL
);
2384 if (code
== NEGATE_EXPR
)
2386 if (tem
== def_arg2
[1 - i
] || tem
== def_arg2_alt
[1 - i
])
2391 defcodefor_name (tem
, &code
, &tem
, NULL
);
2392 if (CONVERT_EXPR_CODE_P (code
)
2393 && INTEGRAL_TYPE_P (TREE_TYPE (tem
))
2394 && TYPE_PRECISION (TREE_TYPE (tem
))
2395 > floor_log2 (TYPE_PRECISION (rtype
))
2396 && TYPE_PRECISION (TREE_TYPE (tem
))
2397 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem
)))
2398 && (tem
== def_arg2
[1 - i
]
2399 || tem
== def_arg2_alt
[1 - i
]))
2406 if (rotcnt
== NULL_TREE
)
2411 if (!useless_type_conversion_p (TREE_TYPE (def_arg2
[0]),
2412 TREE_TYPE (rotcnt
)))
2414 g
= gimple_build_assign_with_ops (NOP_EXPR
,
2415 make_ssa_name (TREE_TYPE (def_arg2
[0]),
2418 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
2419 rotcnt
= gimple_assign_lhs (g
);
2421 lhs
= gimple_assign_lhs (stmt
);
2422 if (!useless_type_conversion_p (rtype
, TREE_TYPE (def_arg1
[0])))
2423 lhs
= make_ssa_name (TREE_TYPE (def_arg1
[0]), NULL
);
2424 g
= gimple_build_assign_with_ops (((def_code
[0] == LSHIFT_EXPR
) ^ swapped_p
)
2425 ? LROTATE_EXPR
: RROTATE_EXPR
,
2426 lhs
, def_arg1
[0], rotcnt
);
2427 if (!useless_type_conversion_p (rtype
, TREE_TYPE (def_arg1
[0])))
2429 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
2430 g
= gimple_build_assign_with_ops (NOP_EXPR
, gimple_assign_lhs (stmt
),
2433 gsi_replace (gsi
, g
, false);
2437 /* Perform re-associations of the plus or minus statement STMT that are
2438 always permitted. Returns true if the CFG was changed. */
2441 associate_plusminus (gimple_stmt_iterator
*gsi
)
2443 gimple stmt
= gsi_stmt (*gsi
);
2444 tree rhs1
= gimple_assign_rhs1 (stmt
);
2445 tree rhs2
= gimple_assign_rhs2 (stmt
);
2446 enum tree_code code
= gimple_assign_rhs_code (stmt
);
2449 /* We can't reassociate at all for saturating types. */
2450 if (TYPE_SATURATING (TREE_TYPE (rhs1
)))
2453 /* First contract negates. */
2458 /* A +- (-B) -> A -+ B. */
2459 if (TREE_CODE (rhs2
) == SSA_NAME
)
2461 gimple def_stmt
= SSA_NAME_DEF_STMT (rhs2
);
2462 if (is_gimple_assign (def_stmt
)
2463 && gimple_assign_rhs_code (def_stmt
) == NEGATE_EXPR
2464 && can_propagate_from (def_stmt
))
2466 code
= (code
== MINUS_EXPR
) ? PLUS_EXPR
: MINUS_EXPR
;
2467 gimple_assign_set_rhs_code (stmt
, code
);
2468 rhs2
= gimple_assign_rhs1 (def_stmt
);
2469 gimple_assign_set_rhs2 (stmt
, rhs2
);
2470 gimple_set_modified (stmt
, true);
2475 /* (-A) + B -> B - A. */
2476 if (TREE_CODE (rhs1
) == SSA_NAME
2477 && code
== PLUS_EXPR
)
2479 gimple def_stmt
= SSA_NAME_DEF_STMT (rhs1
);
2480 if (is_gimple_assign (def_stmt
)
2481 && gimple_assign_rhs_code (def_stmt
) == NEGATE_EXPR
2482 && can_propagate_from (def_stmt
))
2485 gimple_assign_set_rhs_code (stmt
, code
);
2487 gimple_assign_set_rhs1 (stmt
, rhs1
);
2488 rhs2
= gimple_assign_rhs1 (def_stmt
);
2489 gimple_assign_set_rhs2 (stmt
, rhs2
);
2490 gimple_set_modified (stmt
, true);
2497 /* We can't reassociate floating-point or fixed-point plus or minus
2498 because of saturation to +-Inf. */
2499 if (FLOAT_TYPE_P (TREE_TYPE (rhs1
))
2500 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1
)))
2503 /* Second match patterns that allow contracting a plus-minus pair
2504 irrespective of overflow issues.
2506 (A +- B) - A -> +- B
2508 (CST +- A) +- CST -> CST +- A
2509 (A +- CST) +- CST -> A +- CST
2512 A - (A +- B) -> -+ B
2513 A +- (B +- A) -> +- B
2514 CST +- (CST +- A) -> CST +- A
2515 CST +- (A +- CST) -> CST +- A
2518 via commutating the addition and contracting operations to zero
2519 by reassociation. */
2521 if (TREE_CODE (rhs1
) == SSA_NAME
)
2523 gimple def_stmt
= SSA_NAME_DEF_STMT (rhs1
);
2524 if (is_gimple_assign (def_stmt
) && can_propagate_from (def_stmt
))
2526 enum tree_code def_code
= gimple_assign_rhs_code (def_stmt
);
2527 if (def_code
== PLUS_EXPR
2528 || def_code
== MINUS_EXPR
)
2530 tree def_rhs1
= gimple_assign_rhs1 (def_stmt
);
2531 tree def_rhs2
= gimple_assign_rhs2 (def_stmt
);
2532 if (operand_equal_p (def_rhs1
, rhs2
, 0)
2533 && code
== MINUS_EXPR
)
2535 /* (A +- B) - A -> +- B. */
2536 code
= ((def_code
== PLUS_EXPR
)
2537 ? TREE_CODE (def_rhs2
) : NEGATE_EXPR
);
2540 gimple_assign_set_rhs_with_ops (gsi
, code
, rhs1
, NULL_TREE
);
2541 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2542 gimple_set_modified (stmt
, true);
2544 else if (operand_equal_p (def_rhs2
, rhs2
, 0)
2545 && code
!= def_code
)
2547 /* (A +- B) -+ B -> A. */
2548 code
= TREE_CODE (def_rhs1
);
2551 gimple_assign_set_rhs_with_ops (gsi
, code
, rhs1
, NULL_TREE
);
2552 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2553 gimple_set_modified (stmt
, true);
2555 else if (CONSTANT_CLASS_P (rhs2
)
2556 && CONSTANT_CLASS_P (def_rhs1
))
2558 /* (CST +- A) +- CST -> CST +- A. */
2559 tree cst
= fold_binary (code
, TREE_TYPE (rhs1
),
2561 if (cst
&& !TREE_OVERFLOW (cst
))
2564 gimple_assign_set_rhs_code (stmt
, code
);
2566 gimple_assign_set_rhs1 (stmt
, rhs1
);
2568 gimple_assign_set_rhs2 (stmt
, rhs2
);
2569 gimple_set_modified (stmt
, true);
2572 else if (CONSTANT_CLASS_P (rhs2
)
2573 && CONSTANT_CLASS_P (def_rhs2
))
2575 /* (A +- CST) +- CST -> A +- CST. */
2576 enum tree_code mix
= (code
== def_code
)
2577 ? PLUS_EXPR
: MINUS_EXPR
;
2578 tree cst
= fold_binary (mix
, TREE_TYPE (rhs1
),
2580 if (cst
&& !TREE_OVERFLOW (cst
))
2583 gimple_assign_set_rhs_code (stmt
, code
);
2585 gimple_assign_set_rhs1 (stmt
, rhs1
);
2587 gimple_assign_set_rhs2 (stmt
, rhs2
);
2588 gimple_set_modified (stmt
, true);
2592 else if (def_code
== BIT_NOT_EXPR
&& code
== PLUS_EXPR
)
2594 tree def_rhs1
= gimple_assign_rhs1 (def_stmt
);
2595 if (operand_equal_p (def_rhs1
, rhs2
, 0))
2598 rhs1
= build_all_ones_cst (TREE_TYPE (rhs2
));
2600 code
= TREE_CODE (rhs1
);
2601 gimple_assign_set_rhs_with_ops (gsi
, code
, rhs1
, NULL_TREE
);
2602 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2603 gimple_set_modified (stmt
, true);
2605 else if ((TREE_CODE (TREE_TYPE (rhs2
)) != COMPLEX_TYPE
2606 && integer_onep (rhs2
))
2607 || (TREE_CODE (rhs2
) == COMPLEX_CST
2608 && integer_onep (TREE_REALPART (rhs2
))
2609 && integer_onep (TREE_IMAGPART (rhs2
))))
2615 gimple_assign_set_rhs_with_ops (gsi
, code
, rhs1
, NULL_TREE
);
2616 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2617 gimple_set_modified (stmt
, true);
2623 if (rhs2
&& TREE_CODE (rhs2
) == SSA_NAME
)
2625 gimple def_stmt
= SSA_NAME_DEF_STMT (rhs2
);
2626 if (is_gimple_assign (def_stmt
) && can_propagate_from (def_stmt
))
2628 enum tree_code def_code
= gimple_assign_rhs_code (def_stmt
);
2629 if (def_code
== PLUS_EXPR
2630 || def_code
== MINUS_EXPR
)
2632 tree def_rhs1
= gimple_assign_rhs1 (def_stmt
);
2633 tree def_rhs2
= gimple_assign_rhs2 (def_stmt
);
2634 if (operand_equal_p (def_rhs1
, rhs1
, 0)
2635 && code
== MINUS_EXPR
)
2637 /* A - (A +- B) -> -+ B. */
2638 code
= ((def_code
== PLUS_EXPR
)
2639 ? NEGATE_EXPR
: TREE_CODE (def_rhs2
));
2642 gimple_assign_set_rhs_with_ops (gsi
, code
, rhs1
, NULL_TREE
);
2643 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2644 gimple_set_modified (stmt
, true);
2646 else if (operand_equal_p (def_rhs2
, rhs1
, 0)
2647 && code
!= def_code
)
2649 /* A +- (B +- A) -> +- B. */
2650 code
= ((code
== PLUS_EXPR
)
2651 ? TREE_CODE (def_rhs1
) : NEGATE_EXPR
);
2654 gimple_assign_set_rhs_with_ops (gsi
, code
, rhs1
, NULL_TREE
);
2655 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2656 gimple_set_modified (stmt
, true);
2658 else if (CONSTANT_CLASS_P (rhs1
)
2659 && CONSTANT_CLASS_P (def_rhs1
))
2661 /* CST +- (CST +- A) -> CST +- A. */
2662 tree cst
= fold_binary (code
, TREE_TYPE (rhs2
),
2664 if (cst
&& !TREE_OVERFLOW (cst
))
2666 code
= (code
== def_code
? PLUS_EXPR
: MINUS_EXPR
);
2667 gimple_assign_set_rhs_code (stmt
, code
);
2669 gimple_assign_set_rhs1 (stmt
, rhs1
);
2671 gimple_assign_set_rhs2 (stmt
, rhs2
);
2672 gimple_set_modified (stmt
, true);
2675 else if (CONSTANT_CLASS_P (rhs1
)
2676 && CONSTANT_CLASS_P (def_rhs2
))
2678 /* CST +- (A +- CST) -> CST +- A. */
2679 tree cst
= fold_binary (def_code
== code
2680 ? PLUS_EXPR
: MINUS_EXPR
,
2683 if (cst
&& !TREE_OVERFLOW (cst
))
2686 gimple_assign_set_rhs1 (stmt
, rhs1
);
2688 gimple_assign_set_rhs2 (stmt
, rhs2
);
2689 gimple_set_modified (stmt
, true);
2693 else if (def_code
== BIT_NOT_EXPR
)
2695 tree def_rhs1
= gimple_assign_rhs1 (def_stmt
);
2696 if (code
== PLUS_EXPR
2697 && operand_equal_p (def_rhs1
, rhs1
, 0))
2700 rhs1
= build_all_ones_cst (TREE_TYPE (rhs1
));
2702 code
= TREE_CODE (rhs1
);
2703 gimple_assign_set_rhs_with_ops (gsi
, code
, rhs1
, NULL_TREE
);
2704 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2705 gimple_set_modified (stmt
, true);
2712 if (gimple_modified_p (stmt
))
2714 fold_stmt_inplace (gsi
);
2716 if (maybe_clean_or_replace_eh_stmt (stmt
, stmt
)
2717 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
2724 /* Associate operands of a POINTER_PLUS_EXPR assignmen at *GSI. Returns
2725 true if anything changed, false otherwise. */
2728 associate_pointerplus (gimple_stmt_iterator
*gsi
)
2730 gimple stmt
= gsi_stmt (*gsi
);
2732 tree ptr
, rhs
, algn
;
2735 tem = (sizetype) ptr;
2739 and produce the simpler and easier to analyze with respect to alignment
2740 ... = ptr & ~algn; */
2741 ptr
= gimple_assign_rhs1 (stmt
);
2742 rhs
= gimple_assign_rhs2 (stmt
);
2743 if (TREE_CODE (rhs
) != SSA_NAME
)
2745 def_stmt
= SSA_NAME_DEF_STMT (rhs
);
2746 if (!is_gimple_assign (def_stmt
)
2747 || gimple_assign_rhs_code (def_stmt
) != NEGATE_EXPR
)
2749 rhs
= gimple_assign_rhs1 (def_stmt
);
2750 if (TREE_CODE (rhs
) != SSA_NAME
)
2752 def_stmt
= SSA_NAME_DEF_STMT (rhs
);
2753 if (!is_gimple_assign (def_stmt
)
2754 || gimple_assign_rhs_code (def_stmt
) != BIT_AND_EXPR
)
2756 rhs
= gimple_assign_rhs1 (def_stmt
);
2757 algn
= gimple_assign_rhs2 (def_stmt
);
2758 if (TREE_CODE (rhs
) != SSA_NAME
2759 || TREE_CODE (algn
) != INTEGER_CST
)
2761 def_stmt
= SSA_NAME_DEF_STMT (rhs
);
2762 if (!is_gimple_assign (def_stmt
)
2763 || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt
)))
2765 if (gimple_assign_rhs1 (def_stmt
) != ptr
)
2768 algn
= double_int_to_tree (TREE_TYPE (ptr
), ~tree_to_double_int (algn
));
2769 gimple_assign_set_rhs_with_ops (gsi
, BIT_AND_EXPR
, ptr
, algn
);
2770 fold_stmt_inplace (gsi
);
2776 /* Combine two conversions in a row for the second conversion at *GSI.
2777 Returns 1 if there were any changes made, 2 if cfg-cleanup needs to
2778 run. Else it returns 0. */
2781 combine_conversions (gimple_stmt_iterator
*gsi
)
2783 gimple stmt
= gsi_stmt (*gsi
);
2786 enum tree_code code
= gimple_assign_rhs_code (stmt
);
2787 enum tree_code code2
;
2789 gcc_checking_assert (CONVERT_EXPR_CODE_P (code
)
2790 || code
== FLOAT_EXPR
2791 || code
== FIX_TRUNC_EXPR
);
2793 lhs
= gimple_assign_lhs (stmt
);
2794 op0
= gimple_assign_rhs1 (stmt
);
2795 if (useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (op0
)))
2797 gimple_assign_set_rhs_code (stmt
, TREE_CODE (op0
));
2801 if (TREE_CODE (op0
) != SSA_NAME
)
2804 def_stmt
= SSA_NAME_DEF_STMT (op0
);
2805 if (!is_gimple_assign (def_stmt
))
2808 code2
= gimple_assign_rhs_code (def_stmt
);
2810 if (CONVERT_EXPR_CODE_P (code2
) || code2
== FLOAT_EXPR
)
2812 tree defop0
= gimple_assign_rhs1 (def_stmt
);
2813 tree type
= TREE_TYPE (lhs
);
2814 tree inside_type
= TREE_TYPE (defop0
);
2815 tree inter_type
= TREE_TYPE (op0
);
2816 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
2817 int inside_ptr
= POINTER_TYPE_P (inside_type
);
2818 int inside_float
= FLOAT_TYPE_P (inside_type
);
2819 int inside_vec
= TREE_CODE (inside_type
) == VECTOR_TYPE
;
2820 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
2821 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
2822 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
2823 int inter_ptr
= POINTER_TYPE_P (inter_type
);
2824 int inter_float
= FLOAT_TYPE_P (inter_type
);
2825 int inter_vec
= TREE_CODE (inter_type
) == VECTOR_TYPE
;
2826 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
2827 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
2828 int final_int
= INTEGRAL_TYPE_P (type
);
2829 int final_ptr
= POINTER_TYPE_P (type
);
2830 int final_float
= FLOAT_TYPE_P (type
);
2831 int final_vec
= TREE_CODE (type
) == VECTOR_TYPE
;
2832 unsigned int final_prec
= TYPE_PRECISION (type
);
2833 int final_unsignedp
= TYPE_UNSIGNED (type
);
2835 /* Don't propagate ssa names that occur in abnormal phis. */
2836 if (TREE_CODE (defop0
) == SSA_NAME
2837 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (defop0
))
2840 /* In addition to the cases of two conversions in a row
2841 handled below, if we are converting something to its own
2842 type via an object of identical or wider precision, neither
2843 conversion is needed. */
2844 if (useless_type_conversion_p (type
, inside_type
)
2845 && (((inter_int
|| inter_ptr
) && final_int
)
2846 || (inter_float
&& final_float
))
2847 && inter_prec
>= final_prec
)
2849 gimple_assign_set_rhs1 (stmt
, unshare_expr (defop0
));
2850 gimple_assign_set_rhs_code (stmt
, TREE_CODE (defop0
));
2852 return remove_prop_source_from_use (op0
) ? 2 : 1;
2855 /* Likewise, if the intermediate and initial types are either both
2856 float or both integer, we don't need the middle conversion if the
2857 former is wider than the latter and doesn't change the signedness
2858 (for integers). Avoid this if the final type is a pointer since
2859 then we sometimes need the middle conversion. Likewise if the
2860 final type has a precision not equal to the size of its mode. */
2861 if (((inter_int
&& inside_int
)
2862 || (inter_float
&& inside_float
)
2863 || (inter_vec
&& inside_vec
))
2864 && inter_prec
>= inside_prec
2865 && (inter_float
|| inter_vec
2866 || inter_unsignedp
== inside_unsignedp
)
2867 && ! (final_prec
!= GET_MODE_PRECISION (TYPE_MODE (type
))
2868 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
2870 && (! final_vec
|| inter_prec
== inside_prec
))
2872 gimple_assign_set_rhs1 (stmt
, defop0
);
2874 return remove_prop_source_from_use (op0
) ? 2 : 1;
2877 /* If we have a sign-extension of a zero-extended value, we can
2878 replace that by a single zero-extension. Likewise if the
2879 final conversion does not change precision we can drop the
2880 intermediate conversion. */
2881 if (inside_int
&& inter_int
&& final_int
2882 && ((inside_prec
< inter_prec
&& inter_prec
< final_prec
2883 && inside_unsignedp
&& !inter_unsignedp
)
2884 || final_prec
== inter_prec
))
2886 gimple_assign_set_rhs1 (stmt
, defop0
);
2888 return remove_prop_source_from_use (op0
) ? 2 : 1;
2891 /* Two conversions in a row are not needed unless:
2892 - some conversion is floating-point (overstrict for now), or
2893 - some conversion is a vector (overstrict for now), or
2894 - the intermediate type is narrower than both initial and
2896 - the intermediate type and innermost type differ in signedness,
2897 and the outermost type is wider than the intermediate, or
2898 - the initial type is a pointer type and the precisions of the
2899 intermediate and final types differ, or
2900 - the final type is a pointer type and the precisions of the
2901 initial and intermediate types differ. */
2902 if (! inside_float
&& ! inter_float
&& ! final_float
2903 && ! inside_vec
&& ! inter_vec
&& ! final_vec
2904 && (inter_prec
>= inside_prec
|| inter_prec
>= final_prec
)
2905 && ! (inside_int
&& inter_int
2906 && inter_unsignedp
!= inside_unsignedp
2907 && inter_prec
< final_prec
)
2908 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
2909 == (final_unsignedp
&& final_prec
> inter_prec
))
2910 && ! (inside_ptr
&& inter_prec
!= final_prec
)
2911 && ! (final_ptr
&& inside_prec
!= inter_prec
)
2912 && ! (final_prec
!= GET_MODE_PRECISION (TYPE_MODE (type
))
2913 && TYPE_MODE (type
) == TYPE_MODE (inter_type
)))
2915 gimple_assign_set_rhs1 (stmt
, defop0
);
2917 return remove_prop_source_from_use (op0
) ? 2 : 1;
2920 /* A truncation to an unsigned type should be canonicalized as
2921 bitwise and of a mask. */
2922 if (final_int
&& inter_int
&& inside_int
2923 && final_prec
== inside_prec
2924 && final_prec
> inter_prec
2928 tem
= fold_build2 (BIT_AND_EXPR
, inside_type
,
2931 (inside_type
, double_int::mask (inter_prec
)));
2932 if (!useless_type_conversion_p (type
, inside_type
))
2934 tem
= force_gimple_operand_gsi (gsi
, tem
, true, NULL_TREE
, true,
2936 gimple_assign_set_rhs1 (stmt
, tem
);
2939 gimple_assign_set_rhs_from_tree (gsi
, tem
);
2940 update_stmt (gsi_stmt (*gsi
));
2944 /* If we are converting an integer to a floating-point that can
2945 represent it exactly and back to an integer, we can skip the
2946 floating-point conversion. */
2947 if (inside_int
&& inter_float
&& final_int
&&
2948 (unsigned) significand_size (TYPE_MODE (inter_type
))
2949 >= inside_prec
- !inside_unsignedp
)
2951 if (useless_type_conversion_p (type
, inside_type
))
2953 gimple_assign_set_rhs1 (stmt
, unshare_expr (defop0
));
2954 gimple_assign_set_rhs_code (stmt
, TREE_CODE (defop0
));
2956 return remove_prop_source_from_use (op0
) ? 2 : 1;
2960 gimple_assign_set_rhs1 (stmt
, defop0
);
2961 gimple_assign_set_rhs_code (stmt
, CONVERT_EXPR
);
2963 return remove_prop_source_from_use (op0
) ? 2 : 1;
2971 /* Combine an element access with a shuffle. Returns true if there were
2972 any changes made, else it returns false. */
2975 simplify_bitfield_ref (gimple_stmt_iterator
*gsi
)
2977 gimple stmt
= gsi_stmt (*gsi
);
2979 tree op
, op0
, op1
, op2
;
2981 unsigned idx
, n
, size
;
2982 enum tree_code code
;
2984 op
= gimple_assign_rhs1 (stmt
);
2985 gcc_checking_assert (TREE_CODE (op
) == BIT_FIELD_REF
);
2987 op0
= TREE_OPERAND (op
, 0);
2988 if (TREE_CODE (op0
) != SSA_NAME
2989 || TREE_CODE (TREE_TYPE (op0
)) != VECTOR_TYPE
)
2992 def_stmt
= get_prop_source_stmt (op0
, false, NULL
);
2993 if (!def_stmt
|| !can_propagate_from (def_stmt
))
2996 op1
= TREE_OPERAND (op
, 1);
2997 op2
= TREE_OPERAND (op
, 2);
2998 code
= gimple_assign_rhs_code (def_stmt
);
3000 if (code
== CONSTRUCTOR
)
3002 tree tem
= fold_ternary (BIT_FIELD_REF
, TREE_TYPE (op
),
3003 gimple_assign_rhs1 (def_stmt
), op1
, op2
);
3004 if (!tem
|| !valid_gimple_rhs_p (tem
))
3006 gimple_assign_set_rhs_from_tree (gsi
, tem
);
3007 update_stmt (gsi_stmt (*gsi
));
3011 elem_type
= TREE_TYPE (TREE_TYPE (op0
));
3012 if (TREE_TYPE (op
) != elem_type
)
3015 size
= TREE_INT_CST_LOW (TYPE_SIZE (elem_type
));
3016 n
= TREE_INT_CST_LOW (op1
) / size
;
3019 idx
= TREE_INT_CST_LOW (op2
) / size
;
3021 if (code
== VEC_PERM_EXPR
)
3023 tree p
, m
, index
, tem
;
3025 m
= gimple_assign_rhs3 (def_stmt
);
3026 if (TREE_CODE (m
) != VECTOR_CST
)
3028 nelts
= VECTOR_CST_NELTS (m
);
3029 idx
= TREE_INT_CST_LOW (VECTOR_CST_ELT (m
, idx
));
3033 p
= gimple_assign_rhs1 (def_stmt
);
3037 p
= gimple_assign_rhs2 (def_stmt
);
3040 index
= build_int_cst (TREE_TYPE (TREE_TYPE (m
)), idx
* size
);
3041 tem
= build3 (BIT_FIELD_REF
, TREE_TYPE (op
),
3042 unshare_expr (p
), op1
, index
);
3043 gimple_assign_set_rhs1 (stmt
, tem
);
3045 update_stmt (gsi_stmt (*gsi
));
3052 /* Determine whether applying the 2 permutations (mask1 then mask2)
3053 gives back one of the input. */
3056 is_combined_permutation_identity (tree mask1
, tree mask2
)
3059 unsigned int nelts
, i
, j
;
3060 bool maybe_identity1
= true;
3061 bool maybe_identity2
= true;
3063 gcc_checking_assert (TREE_CODE (mask1
) == VECTOR_CST
3064 && TREE_CODE (mask2
) == VECTOR_CST
);
3065 mask
= fold_ternary (VEC_PERM_EXPR
, TREE_TYPE (mask1
), mask1
, mask1
, mask2
);
3066 gcc_assert (TREE_CODE (mask
) == VECTOR_CST
);
3068 nelts
= VECTOR_CST_NELTS (mask
);
3069 for (i
= 0; i
< nelts
; i
++)
3071 tree val
= VECTOR_CST_ELT (mask
, i
);
3072 gcc_assert (TREE_CODE (val
) == INTEGER_CST
);
3073 j
= TREE_INT_CST_LOW (val
) & (2 * nelts
- 1);
3075 maybe_identity2
= false;
3076 else if (j
== i
+ nelts
)
3077 maybe_identity1
= false;
3081 return maybe_identity1
? 1 : maybe_identity2
? 2 : 0;
3084 /* Combine a shuffle with its arguments. Returns 1 if there were any
3085 changes made, 2 if cfg-cleanup needs to run. Else it returns 0. */
3088 simplify_permutation (gimple_stmt_iterator
*gsi
)
3090 gimple stmt
= gsi_stmt (*gsi
);
3092 tree op0
, op1
, op2
, op3
, arg0
, arg1
;
3093 enum tree_code code
;
3094 bool single_use_op0
= false;
3096 gcc_checking_assert (gimple_assign_rhs_code (stmt
) == VEC_PERM_EXPR
);
3098 op0
= gimple_assign_rhs1 (stmt
);
3099 op1
= gimple_assign_rhs2 (stmt
);
3100 op2
= gimple_assign_rhs3 (stmt
);
3102 if (TREE_CODE (op2
) != VECTOR_CST
)
3105 if (TREE_CODE (op0
) == VECTOR_CST
)
3110 else if (TREE_CODE (op0
) == SSA_NAME
)
3112 def_stmt
= get_prop_source_stmt (op0
, false, &single_use_op0
);
3113 if (!def_stmt
|| !can_propagate_from (def_stmt
))
3116 code
= gimple_assign_rhs_code (def_stmt
);
3117 arg0
= gimple_assign_rhs1 (def_stmt
);
3122 /* Two consecutive shuffles. */
3123 if (code
== VEC_PERM_EXPR
)
3130 op3
= gimple_assign_rhs3 (def_stmt
);
3131 if (TREE_CODE (op3
) != VECTOR_CST
)
3133 ident
= is_combined_permutation_identity (op3
, op2
);
3136 orig
= (ident
== 1) ? gimple_assign_rhs1 (def_stmt
)
3137 : gimple_assign_rhs2 (def_stmt
);
3138 gimple_assign_set_rhs1 (stmt
, unshare_expr (orig
));
3139 gimple_assign_set_rhs_code (stmt
, TREE_CODE (orig
));
3140 gimple_set_num_ops (stmt
, 2);
3142 return remove_prop_source_from_use (op0
) ? 2 : 1;
3145 /* Shuffle of a constructor. */
3146 else if (code
== CONSTRUCTOR
|| code
== VECTOR_CST
)
3152 if (TREE_CODE (op0
) == SSA_NAME
&& !single_use_op0
)
3155 if (TREE_CODE (op1
) == VECTOR_CST
)
3157 else if (TREE_CODE (op1
) == SSA_NAME
)
3159 enum tree_code code2
;
3161 gimple def_stmt2
= get_prop_source_stmt (op1
, true, NULL
);
3162 if (!def_stmt2
|| !can_propagate_from (def_stmt2
))
3165 code2
= gimple_assign_rhs_code (def_stmt2
);
3166 if (code2
!= CONSTRUCTOR
&& code2
!= VECTOR_CST
)
3168 arg1
= gimple_assign_rhs1 (def_stmt2
);
3175 /* Already used twice in this statement. */
3176 if (TREE_CODE (op0
) == SSA_NAME
&& num_imm_uses (op0
) > 2)
3180 opt
= fold_ternary (VEC_PERM_EXPR
, TREE_TYPE (op0
), arg0
, arg1
, op2
);
3182 || (TREE_CODE (opt
) != CONSTRUCTOR
&& TREE_CODE (opt
) != VECTOR_CST
))
3184 gimple_assign_set_rhs_from_tree (gsi
, opt
);
3185 update_stmt (gsi_stmt (*gsi
));
3186 if (TREE_CODE (op0
) == SSA_NAME
)
3187 ret
= remove_prop_source_from_use (op0
);
3188 if (op0
!= op1
&& TREE_CODE (op1
) == SSA_NAME
)
3189 ret
|= remove_prop_source_from_use (op1
);
3196 /* Recognize a VEC_PERM_EXPR. Returns true if there were any changes. */
3199 simplify_vector_constructor (gimple_stmt_iterator
*gsi
)
3201 gimple stmt
= gsi_stmt (*gsi
);
3203 tree op
, op2
, orig
, type
, elem_type
;
3204 unsigned elem_size
, nelts
, i
;
3205 enum tree_code code
;
3206 constructor_elt
*elt
;
3210 gcc_checking_assert (gimple_assign_rhs_code (stmt
) == CONSTRUCTOR
);
3212 op
= gimple_assign_rhs1 (stmt
);
3213 type
= TREE_TYPE (op
);
3214 gcc_checking_assert (TREE_CODE (type
) == VECTOR_TYPE
);
3216 nelts
= TYPE_VECTOR_SUBPARTS (type
);
3217 elem_type
= TREE_TYPE (type
);
3218 elem_size
= TREE_INT_CST_LOW (TYPE_SIZE (elem_type
));
3220 sel
= XALLOCAVEC (unsigned char, nelts
);
3223 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (op
), i
, elt
)
3230 if (TREE_CODE (elt
->value
) != SSA_NAME
)
3232 def_stmt
= get_prop_source_stmt (elt
->value
, false, NULL
);
3235 code
= gimple_assign_rhs_code (def_stmt
);
3236 if (code
!= BIT_FIELD_REF
)
3238 op1
= gimple_assign_rhs1 (def_stmt
);
3239 ref
= TREE_OPERAND (op1
, 0);
3247 if (TREE_CODE (ref
) != SSA_NAME
)
3249 if (!useless_type_conversion_p (type
, TREE_TYPE (ref
)))
3253 if (TREE_INT_CST_LOW (TREE_OPERAND (op1
, 1)) != elem_size
)
3255 sel
[i
] = TREE_INT_CST_LOW (TREE_OPERAND (op1
, 2)) / elem_size
;
3256 if (sel
[i
] != i
) maybe_ident
= false;
3262 gimple_assign_set_rhs_from_tree (gsi
, orig
);
3265 tree mask_type
, *mask_elts
;
3267 if (!can_vec_perm_p (TYPE_MODE (type
), false, sel
))
3270 = build_vector_type (build_nonstandard_integer_type (elem_size
, 1),
3272 if (GET_MODE_CLASS (TYPE_MODE (mask_type
)) != MODE_VECTOR_INT
3273 || GET_MODE_SIZE (TYPE_MODE (mask_type
))
3274 != GET_MODE_SIZE (TYPE_MODE (type
)))
3276 mask_elts
= XALLOCAVEC (tree
, nelts
);
3277 for (i
= 0; i
< nelts
; i
++)
3278 mask_elts
[i
] = build_int_cst (TREE_TYPE (mask_type
), sel
[i
]);
3279 op2
= build_vector (mask_type
, mask_elts
);
3280 gimple_assign_set_rhs_with_ops_1 (gsi
, VEC_PERM_EXPR
, orig
, orig
, op2
);
3282 update_stmt (gsi_stmt (*gsi
));
3286 /* Main entry point for the forward propagation and statement combine
3290 ssa_forward_propagate_and_combine (void)
3293 unsigned int todoflags
= 0;
3295 cfg_changed
= false;
3299 gimple_stmt_iterator gsi
;
3301 /* Apply forward propagation to all stmts in the basic-block.
3302 Note we update GSI within the loop as necessary. */
3303 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); )
3305 gimple stmt
= gsi_stmt (gsi
);
3307 enum tree_code code
;
3309 if (!is_gimple_assign (stmt
))
3315 lhs
= gimple_assign_lhs (stmt
);
3316 rhs
= gimple_assign_rhs1 (stmt
);
3317 code
= gimple_assign_rhs_code (stmt
);
3318 if (TREE_CODE (lhs
) != SSA_NAME
3319 || has_zero_uses (lhs
))
3325 /* If this statement sets an SSA_NAME to an address,
3326 try to propagate the address into the uses of the SSA_NAME. */
3327 if (code
== ADDR_EXPR
3328 /* Handle pointer conversions on invariant addresses
3329 as well, as this is valid gimple. */
3330 || (CONVERT_EXPR_CODE_P (code
)
3331 && TREE_CODE (rhs
) == ADDR_EXPR
3332 && POINTER_TYPE_P (TREE_TYPE (lhs
))))
3334 tree base
= get_base_address (TREE_OPERAND (rhs
, 0));
3337 || decl_address_invariant_p (base
))
3338 && !stmt_references_abnormal_ssa_name (stmt
)
3339 && forward_propagate_addr_expr (lhs
, rhs
))
3341 release_defs (stmt
);
3342 gsi_remove (&gsi
, true);
3347 else if (code
== POINTER_PLUS_EXPR
)
3349 tree off
= gimple_assign_rhs2 (stmt
);
3350 if (TREE_CODE (off
) == INTEGER_CST
3351 && can_propagate_from (stmt
)
3352 && !simple_iv_increment_p (stmt
)
3353 /* ??? Better adjust the interface to that function
3354 instead of building new trees here. */
3355 && forward_propagate_addr_expr
3357 build1_loc (gimple_location (stmt
),
3358 ADDR_EXPR
, TREE_TYPE (rhs
),
3359 fold_build2 (MEM_REF
,
3360 TREE_TYPE (TREE_TYPE (rhs
)),
3362 fold_convert (ptr_type_node
,
3365 release_defs (stmt
);
3366 gsi_remove (&gsi
, true);
3368 else if (is_gimple_min_invariant (rhs
))
3370 /* Make sure to fold &a[0] + off_1 here. */
3371 fold_stmt_inplace (&gsi
);
3373 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
3379 else if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3381 if (forward_propagate_comparison (&gsi
))
3388 /* Combine stmts with the stmts defining their operands.
3389 Note we update GSI within the loop as necessary. */
3390 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);)
3392 gimple stmt
= gsi_stmt (gsi
);
3393 bool changed
= false;
3395 /* Mark stmt as potentially needing revisiting. */
3396 gimple_set_plf (stmt
, GF_PLF_1
, false);
3398 switch (gimple_code (stmt
))
3402 tree rhs1
= gimple_assign_rhs1 (stmt
);
3403 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3405 if ((code
== BIT_NOT_EXPR
3406 || code
== NEGATE_EXPR
)
3407 && TREE_CODE (rhs1
) == SSA_NAME
)
3408 changed
= simplify_not_neg_expr (&gsi
);
3409 else if (code
== COND_EXPR
3410 || code
== VEC_COND_EXPR
)
3412 /* In this case the entire COND_EXPR is in rhs1. */
3413 if (forward_propagate_into_cond (&gsi
)
3414 || combine_cond_exprs (&gsi
))
3417 stmt
= gsi_stmt (gsi
);
3420 else if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3423 did_something
= forward_propagate_into_comparison (&gsi
);
3424 if (did_something
== 2)
3426 changed
= did_something
!= 0;
3428 else if ((code
== PLUS_EXPR
3429 || code
== BIT_IOR_EXPR
3430 || code
== BIT_XOR_EXPR
)
3431 && simplify_rotate (&gsi
))
3433 else if (code
== BIT_AND_EXPR
3434 || code
== BIT_IOR_EXPR
3435 || code
== BIT_XOR_EXPR
)
3436 changed
= simplify_bitwise_binary (&gsi
);
3437 else if (code
== PLUS_EXPR
3438 || code
== MINUS_EXPR
)
3439 changed
= associate_plusminus (&gsi
);
3440 else if (code
== POINTER_PLUS_EXPR
)
3441 changed
= associate_pointerplus (&gsi
);
3442 else if (CONVERT_EXPR_CODE_P (code
)
3443 || code
== FLOAT_EXPR
3444 || code
== FIX_TRUNC_EXPR
)
3446 int did_something
= combine_conversions (&gsi
);
3447 if (did_something
== 2)
3450 /* If we have a narrowing conversion to an integral
3451 type that is fed by a BIT_AND_EXPR, we might be
3452 able to remove the BIT_AND_EXPR if it merely
3453 masks off bits outside the final type (and nothing
3455 if (! did_something
)
3457 tree outer_type
= TREE_TYPE (gimple_assign_lhs (stmt
));
3458 tree inner_type
= TREE_TYPE (gimple_assign_rhs1 (stmt
));
3459 if (INTEGRAL_TYPE_P (outer_type
)
3460 && INTEGRAL_TYPE_P (inner_type
)
3461 && (TYPE_PRECISION (outer_type
)
3462 <= TYPE_PRECISION (inner_type
)))
3463 did_something
= simplify_conversion_from_bitmask (&gsi
);
3466 changed
= did_something
!= 0;
3468 else if (code
== VEC_PERM_EXPR
)
3470 int did_something
= simplify_permutation (&gsi
);
3471 if (did_something
== 2)
3473 changed
= did_something
!= 0;
3475 else if (code
== BIT_FIELD_REF
)
3476 changed
= simplify_bitfield_ref (&gsi
);
3477 else if (code
== CONSTRUCTOR
3478 && TREE_CODE (TREE_TYPE (rhs1
)) == VECTOR_TYPE
)
3479 changed
= simplify_vector_constructor (&gsi
);
3484 changed
= simplify_gimple_switch (stmt
);
3490 did_something
= forward_propagate_into_gimple_cond (stmt
);
3491 if (did_something
== 2)
3493 changed
= did_something
!= 0;
3499 tree callee
= gimple_call_fndecl (stmt
);
3500 if (callee
!= NULL_TREE
3501 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
)
3502 changed
= simplify_builtin_call (&gsi
, callee
);
3511 /* If the stmt changed then re-visit it and the statements
3512 inserted before it. */
3513 for (; !gsi_end_p (gsi
); gsi_prev (&gsi
))
3514 if (gimple_plf (gsi_stmt (gsi
), GF_PLF_1
))
3516 if (gsi_end_p (gsi
))
3517 gsi
= gsi_start_bb (bb
);
3523 /* Stmt no longer needs to be revisited. */
3524 gimple_set_plf (stmt
, GF_PLF_1
, true);
3531 todoflags
|= TODO_cleanup_cfg
;
3538 gate_forwprop (void)
3540 return flag_tree_forwprop
;
3545 const pass_data pass_data_forwprop
=
3547 GIMPLE_PASS
, /* type */
3548 "forwprop", /* name */
3549 OPTGROUP_NONE
, /* optinfo_flags */
3550 true, /* has_gate */
3551 true, /* has_execute */
3552 TV_TREE_FORWPROP
, /* tv_id */
3553 ( PROP_cfg
| PROP_ssa
), /* properties_required */
3554 0, /* properties_provided */
3555 0, /* properties_destroyed */
3556 0, /* todo_flags_start */
3557 ( TODO_update_ssa
| TODO_verify_ssa
), /* todo_flags_finish */
3560 class pass_forwprop
: public gimple_opt_pass
3563 pass_forwprop (gcc::context
*ctxt
)
3564 : gimple_opt_pass (pass_data_forwprop
, ctxt
)
3567 /* opt_pass methods: */
3568 opt_pass
* clone () { return new pass_forwprop (m_ctxt
); }
3569 bool gate () { return gate_forwprop (); }
3570 unsigned int execute () { return ssa_forward_propagate_and_combine (); }
3572 }; // class pass_forwprop
3577 make_pass_forwprop (gcc::context
*ctxt
)
3579 return new pass_forwprop (ctxt
);