Require target lra in gcc.c-torture/compile/asmgoto-6.c
[official-gcc.git] / gcc / tree-ssa-forwprop.cc
blob047f9237dd4158669e484f6bcb6ec74c045faebe
1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004-2023 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "cfghooks.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "optabs-query.h"
32 #include "gimple-pretty-print.h"
33 #include "fold-const.h"
34 #include "stor-layout.h"
35 #include "gimple-iterator.h"
36 #include "gimple-fold.h"
37 #include "tree-eh.h"
38 #include "gimplify.h"
39 #include "gimplify-me.h"
40 #include "tree-cfg.h"
41 #include "expr.h"
42 #include "tree-dfa.h"
43 #include "tree-ssa-propagate.h"
44 #include "tree-ssa-dom.h"
45 #include "tree-ssa-strlen.h"
46 #include "builtins.h"
47 #include "tree-cfgcleanup.h"
48 #include "cfganal.h"
49 #include "optabs-tree.h"
50 #include "tree-vector-builder.h"
51 #include "vec-perm-indices.h"
52 #include "internal-fn.h"
53 #include "cgraph.h"
54 #include "tree-ssa.h"
55 #include "gimple-range.h"
57 /* This pass propagates the RHS of assignment statements into use
58 sites of the LHS of the assignment. It's basically a specialized
59 form of tree combination. It is hoped all of this can disappear
60 when we have a generalized tree combiner.
62 One class of common cases we handle is forward propagating a single use
63 variable into a COND_EXPR.
65 bb0:
66 x = a COND b;
67 if (x) goto ... else goto ...
69 Will be transformed into:
71 bb0:
72 if (a COND b) goto ... else goto ...
74 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
76 Or (assuming c1 and c2 are constants):
78 bb0:
79 x = a + c1;
80 if (x EQ/NEQ c2) goto ... else goto ...
82 Will be transformed into:
84 bb0:
85 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
87 Similarly for x = a - c1.
91 bb0:
92 x = !a
93 if (x) goto ... else goto ...
95 Will be transformed into:
97 bb0:
98 if (a == 0) goto ... else goto ...
100 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
101 For these cases, we propagate A into all, possibly more than one,
102 COND_EXPRs that use X.
106 bb0:
107 x = (typecast) a
108 if (x) goto ... else goto ...
110 Will be transformed into:
112 bb0:
113 if (a != 0) goto ... else goto ...
115 (Assuming a is an integral type and x is a boolean or x is an
116 integral and a is a boolean.)
118 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
119 For these cases, we propagate A into all, possibly more than one,
120 COND_EXPRs that use X.
122 In addition to eliminating the variable and the statement which assigns
123 a value to the variable, we may be able to later thread the jump without
124 adding insane complexity in the dominator optimizer.
126 Also note these transformations can cascade. We handle this by having
127 a worklist of COND_EXPR statements to examine. As we make a change to
128 a statement, we put it back on the worklist to examine on the next
129 iteration of the main loop.
131 A second class of propagation opportunities arises for ADDR_EXPR
132 nodes.
134 ptr = &x->y->z;
135 res = *ptr;
137 Will get turned into
139 res = x->y->z;
142 ptr = (type1*)&type2var;
143 res = *ptr
145 Will get turned into (if type1 and type2 are the same size
146 and neither have volatile on them):
147 res = VIEW_CONVERT_EXPR<type1>(type2var)
151 ptr = &x[0];
152 ptr2 = ptr + <constant>;
154 Will get turned into
156 ptr2 = &x[constant/elementsize];
160 ptr = &x[0];
161 offset = index * element_size;
162 offset_p = (pointer) offset;
163 ptr2 = ptr + offset_p
165 Will get turned into:
167 ptr2 = &x[index];
170 ssa = (int) decl
171 res = ssa & 1
173 Provided that decl has known alignment >= 2, will get turned into
175 res = 0
177 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
178 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
179 {NOT_EXPR,NEG_EXPR}.
181 This will (of course) be extended as other needs arise. */
183 static bool forward_propagate_addr_expr (tree, tree, bool);
185 /* Set to true if we delete dead edges during the optimization. */
186 static bool cfg_changed;
188 static tree rhs_to_tree (tree type, gimple *stmt);
190 static bitmap to_purge;
192 /* Const-and-copy lattice. */
193 static vec<tree> lattice;
195 /* Set the lattice entry for NAME to VAL. */
196 static void
197 fwprop_set_lattice_val (tree name, tree val)
199 if (TREE_CODE (name) == SSA_NAME)
201 if (SSA_NAME_VERSION (name) >= lattice.length ())
203 lattice.reserve (num_ssa_names - lattice.length ());
204 lattice.quick_grow_cleared (num_ssa_names);
206 lattice[SSA_NAME_VERSION (name)] = val;
210 /* Invalidate the lattice entry for NAME, done when releasing SSA names. */
211 static void
212 fwprop_invalidate_lattice (tree name)
214 if (name
215 && TREE_CODE (name) == SSA_NAME
216 && SSA_NAME_VERSION (name) < lattice.length ())
217 lattice[SSA_NAME_VERSION (name)] = NULL_TREE;
221 /* Get the statement we can propagate from into NAME skipping
222 trivial copies. Returns the statement which defines the
223 propagation source or NULL_TREE if there is no such one.
224 If SINGLE_USE_ONLY is set considers only sources which have
225 a single use chain up to NAME. If SINGLE_USE_P is non-null,
226 it is set to whether the chain to NAME is a single use chain
227 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
229 static gimple *
230 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
232 bool single_use = true;
234 do {
235 gimple *def_stmt = SSA_NAME_DEF_STMT (name);
237 if (!has_single_use (name))
239 single_use = false;
240 if (single_use_only)
241 return NULL;
244 /* If name is defined by a PHI node or is the default def, bail out. */
245 if (!is_gimple_assign (def_stmt))
246 return NULL;
248 /* If def_stmt is a simple copy, continue looking. */
249 if (gimple_assign_rhs_code (def_stmt) == SSA_NAME)
250 name = gimple_assign_rhs1 (def_stmt);
251 else
253 if (!single_use_only && single_use_p)
254 *single_use_p = single_use;
256 return def_stmt;
258 } while (1);
261 /* Checks if the destination ssa name in DEF_STMT can be used as
262 propagation source. Returns true if so, otherwise false. */
264 static bool
265 can_propagate_from (gimple *def_stmt)
267 gcc_assert (is_gimple_assign (def_stmt));
269 /* If the rhs has side-effects we cannot propagate from it. */
270 if (gimple_has_volatile_ops (def_stmt))
271 return false;
273 /* If the rhs is a load we cannot propagate from it. */
274 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
275 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
276 return false;
278 /* Constants can be always propagated. */
279 if (gimple_assign_single_p (def_stmt)
280 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
281 return true;
283 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
284 if (stmt_references_abnormal_ssa_name (def_stmt))
285 return false;
287 /* If the definition is a conversion of a pointer to a function type,
288 then we cannot apply optimizations as some targets require
289 function pointers to be canonicalized and in this case this
290 optimization could eliminate a necessary canonicalization. */
291 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
293 tree rhs = gimple_assign_rhs1 (def_stmt);
294 if (FUNCTION_POINTER_TYPE_P (TREE_TYPE (rhs)))
295 return false;
298 return true;
301 /* Remove a chain of dead statements starting at the definition of
302 NAME. The chain is linked via the first operand of the defining statements.
303 If NAME was replaced in its only use then this function can be used
304 to clean up dead stmts. The function handles already released SSA
305 names gracefully.
306 Returns true if cleanup-cfg has to run. */
308 static bool
309 remove_prop_source_from_use (tree name)
311 gimple_stmt_iterator gsi;
312 gimple *stmt;
313 bool cfg_changed = false;
315 do {
316 basic_block bb;
318 if (SSA_NAME_IN_FREE_LIST (name)
319 || SSA_NAME_IS_DEFAULT_DEF (name)
320 || !has_zero_uses (name))
321 return cfg_changed;
323 stmt = SSA_NAME_DEF_STMT (name);
324 if (gimple_code (stmt) == GIMPLE_PHI
325 || gimple_has_side_effects (stmt))
326 return cfg_changed;
328 bb = gimple_bb (stmt);
329 gsi = gsi_for_stmt (stmt);
330 unlink_stmt_vdef (stmt);
331 if (gsi_remove (&gsi, true))
332 bitmap_set_bit (to_purge, bb->index);
333 fwprop_invalidate_lattice (gimple_get_lhs (stmt));
334 release_defs (stmt);
336 name = is_gimple_assign (stmt) ? gimple_assign_rhs1 (stmt) : NULL_TREE;
337 } while (name && TREE_CODE (name) == SSA_NAME);
339 return cfg_changed;
342 /* Return the rhs of a gassign *STMT in a form of a single tree,
343 converted to type TYPE.
345 This should disappear, but is needed so we can combine expressions and use
346 the fold() interfaces. Long term, we need to develop folding and combine
347 routines that deal with gimple exclusively . */
349 static tree
350 rhs_to_tree (tree type, gimple *stmt)
352 location_t loc = gimple_location (stmt);
353 enum tree_code code = gimple_assign_rhs_code (stmt);
354 switch (get_gimple_rhs_class (code))
356 case GIMPLE_TERNARY_RHS:
357 return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt),
358 gimple_assign_rhs2 (stmt),
359 gimple_assign_rhs3 (stmt));
360 case GIMPLE_BINARY_RHS:
361 return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
362 gimple_assign_rhs2 (stmt));
363 case GIMPLE_UNARY_RHS:
364 return build1 (code, type, gimple_assign_rhs1 (stmt));
365 case GIMPLE_SINGLE_RHS:
366 return gimple_assign_rhs1 (stmt);
367 default:
368 gcc_unreachable ();
372 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
373 the folded result in a form suitable for COND_EXPR_COND or
374 NULL_TREE, if there is no suitable simplified form. If
375 INVARIANT_ONLY is true only gimple_min_invariant results are
376 considered simplified. */
378 static tree
379 combine_cond_expr_cond (gimple *stmt, enum tree_code code, tree type,
380 tree op0, tree op1, bool invariant_only)
382 tree t;
384 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
386 fold_defer_overflow_warnings ();
387 t = fold_binary_loc (gimple_location (stmt), code, type, op0, op1);
388 if (!t)
390 fold_undefer_overflow_warnings (false, NULL, 0);
391 return NULL_TREE;
394 /* Require that we got a boolean type out if we put one in. */
395 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
397 /* Canonicalize the combined condition for use in a COND_EXPR. */
398 t = canonicalize_cond_expr_cond (t);
400 /* Bail out if we required an invariant but didn't get one. */
401 if (!t || (invariant_only && !is_gimple_min_invariant (t)))
403 fold_undefer_overflow_warnings (false, NULL, 0);
404 return NULL_TREE;
407 bool nowarn = warning_suppressed_p (stmt, OPT_Wstrict_overflow);
408 fold_undefer_overflow_warnings (!nowarn, stmt, 0);
410 return t;
413 /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
414 of its operand. Return a new comparison tree or NULL_TREE if there
415 were no simplifying combines. */
417 static tree
418 forward_propagate_into_comparison_1 (gimple *stmt,
419 enum tree_code code, tree type,
420 tree op0, tree op1)
422 tree tmp = NULL_TREE;
423 tree rhs0 = NULL_TREE, rhs1 = NULL_TREE;
424 bool single_use0_p = false, single_use1_p = false;
426 /* For comparisons use the first operand, that is likely to
427 simplify comparisons against constants. */
428 if (TREE_CODE (op0) == SSA_NAME)
430 gimple *def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
431 if (def_stmt && can_propagate_from (def_stmt))
433 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
434 bool invariant_only_p = !single_use0_p;
436 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
438 /* Always combine comparisons or conversions from booleans. */
439 if (TREE_CODE (op1) == INTEGER_CST
440 && ((CONVERT_EXPR_CODE_P (def_code)
441 && TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs0, 0)))
442 == BOOLEAN_TYPE)
443 || TREE_CODE_CLASS (def_code) == tcc_comparison))
444 invariant_only_p = false;
446 tmp = combine_cond_expr_cond (stmt, code, type,
447 rhs0, op1, invariant_only_p);
448 if (tmp)
449 return tmp;
453 /* If that wasn't successful, try the second operand. */
454 if (TREE_CODE (op1) == SSA_NAME)
456 gimple *def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
457 if (def_stmt && can_propagate_from (def_stmt))
459 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
460 tmp = combine_cond_expr_cond (stmt, code, type,
461 op0, rhs1, !single_use1_p);
462 if (tmp)
463 return tmp;
467 /* If that wasn't successful either, try both operands. */
468 if (rhs0 != NULL_TREE
469 && rhs1 != NULL_TREE)
470 tmp = combine_cond_expr_cond (stmt, code, type,
471 rhs0, rhs1,
472 !(single_use0_p && single_use1_p));
474 return tmp;
477 /* Propagate from the ssa name definition statements of the assignment
478 from a comparison at *GSI into the conditional if that simplifies it.
479 Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
480 otherwise returns 0. */
482 static int
483 forward_propagate_into_comparison (gimple_stmt_iterator *gsi)
485 gimple *stmt = gsi_stmt (*gsi);
486 tree tmp;
487 bool cfg_changed = false;
488 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
489 tree rhs1 = gimple_assign_rhs1 (stmt);
490 tree rhs2 = gimple_assign_rhs2 (stmt);
492 /* Combine the comparison with defining statements. */
493 tmp = forward_propagate_into_comparison_1 (stmt,
494 gimple_assign_rhs_code (stmt),
495 type, rhs1, rhs2);
496 if (tmp && useless_type_conversion_p (type, TREE_TYPE (tmp)))
498 gimple_assign_set_rhs_from_tree (gsi, tmp);
499 fold_stmt (gsi);
500 update_stmt (gsi_stmt (*gsi));
502 if (TREE_CODE (rhs1) == SSA_NAME)
503 cfg_changed |= remove_prop_source_from_use (rhs1);
504 if (TREE_CODE (rhs2) == SSA_NAME)
505 cfg_changed |= remove_prop_source_from_use (rhs2);
506 return cfg_changed ? 2 : 1;
509 return 0;
512 /* Propagate from the ssa name definition statements of COND_EXPR
513 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
514 Returns zero if no statement was changed, one if there were
515 changes and two if cfg_cleanup needs to run. */
517 static int
518 forward_propagate_into_gimple_cond (gcond *stmt)
520 tree tmp;
521 enum tree_code code = gimple_cond_code (stmt);
522 bool cfg_changed = false;
523 tree rhs1 = gimple_cond_lhs (stmt);
524 tree rhs2 = gimple_cond_rhs (stmt);
526 /* We can do tree combining on SSA_NAME and comparison expressions. */
527 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
528 return 0;
530 tmp = forward_propagate_into_comparison_1 (stmt, code,
531 boolean_type_node,
532 rhs1, rhs2);
533 if (tmp
534 && is_gimple_condexpr_for_cond (tmp))
536 if (dump_file)
538 fprintf (dump_file, " Replaced '");
539 print_gimple_expr (dump_file, stmt, 0);
540 fprintf (dump_file, "' with '");
541 print_generic_expr (dump_file, tmp);
542 fprintf (dump_file, "'\n");
545 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
546 update_stmt (stmt);
548 if (TREE_CODE (rhs1) == SSA_NAME)
549 cfg_changed |= remove_prop_source_from_use (rhs1);
550 if (TREE_CODE (rhs2) == SSA_NAME)
551 cfg_changed |= remove_prop_source_from_use (rhs2);
552 return (cfg_changed || is_gimple_min_invariant (tmp)) ? 2 : 1;
555 /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges. */
556 if ((TREE_CODE (TREE_TYPE (rhs1)) == BOOLEAN_TYPE
557 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
558 && TYPE_PRECISION (TREE_TYPE (rhs1)) == 1))
559 && ((code == EQ_EXPR
560 && integer_zerop (rhs2))
561 || (code == NE_EXPR
562 && integer_onep (rhs2))))
564 basic_block bb = gimple_bb (stmt);
565 gimple_cond_set_code (stmt, NE_EXPR);
566 gimple_cond_set_rhs (stmt, build_zero_cst (TREE_TYPE (rhs1)));
567 EDGE_SUCC (bb, 0)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
568 EDGE_SUCC (bb, 1)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
569 return 1;
572 return 0;
575 /* We've just substituted an ADDR_EXPR into stmt. Update all the
576 relevant data structures to match. */
578 static void
579 tidy_after_forward_propagate_addr (gimple *stmt)
581 /* We may have turned a trapping insn into a non-trapping insn. */
582 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
583 bitmap_set_bit (to_purge, gimple_bb (stmt)->index);
585 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
586 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
589 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
590 ADDR_EXPR <whatever>.
592 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
593 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
594 node or for recovery of array indexing from pointer arithmetic.
596 Return true if the propagation was successful (the propagation can
597 be not totally successful, yet things may have been changed). */
599 static bool
600 forward_propagate_addr_expr_1 (tree name, tree def_rhs,
601 gimple_stmt_iterator *use_stmt_gsi,
602 bool single_use_p)
604 tree lhs, rhs, rhs2, array_ref;
605 gimple *use_stmt = gsi_stmt (*use_stmt_gsi);
606 enum tree_code rhs_code;
607 bool res = true;
609 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
611 lhs = gimple_assign_lhs (use_stmt);
612 rhs_code = gimple_assign_rhs_code (use_stmt);
613 rhs = gimple_assign_rhs1 (use_stmt);
615 /* Do not perform copy-propagation but recurse through copy chains. */
616 if (TREE_CODE (lhs) == SSA_NAME
617 && rhs_code == SSA_NAME)
618 return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
620 /* The use statement could be a conversion. Recurse to the uses of the
621 lhs as copyprop does not copy through pointer to integer to pointer
622 conversions and FRE does not catch all cases either.
623 Treat the case of a single-use name and
624 a conversion to def_rhs type separate, though. */
625 if (TREE_CODE (lhs) == SSA_NAME
626 && CONVERT_EXPR_CODE_P (rhs_code))
628 /* If there is a point in a conversion chain where the types match
629 so we can remove a conversion re-materialize the address here
630 and stop. */
631 if (single_use_p
632 && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
634 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
635 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
636 return true;
639 /* Else recurse if the conversion preserves the address value. */
640 if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs))
641 || POINTER_TYPE_P (TREE_TYPE (lhs)))
642 && (TYPE_PRECISION (TREE_TYPE (lhs))
643 >= TYPE_PRECISION (TREE_TYPE (def_rhs))))
644 return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
646 return false;
649 /* If this isn't a conversion chain from this on we only can propagate
650 into compatible pointer contexts. */
651 if (!types_compatible_p (TREE_TYPE (name), TREE_TYPE (def_rhs)))
652 return false;
654 /* Propagate through constant pointer adjustments. */
655 if (TREE_CODE (lhs) == SSA_NAME
656 && rhs_code == POINTER_PLUS_EXPR
657 && rhs == name
658 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
660 tree new_def_rhs;
661 /* As we come here with non-invariant addresses in def_rhs we need
662 to make sure we can build a valid constant offsetted address
663 for further propagation. Simply rely on fold building that
664 and check after the fact. */
665 new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
666 def_rhs,
667 fold_convert (ptr_type_node,
668 gimple_assign_rhs2 (use_stmt)));
669 if (TREE_CODE (new_def_rhs) == MEM_REF
670 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
671 return false;
672 new_def_rhs = build1 (ADDR_EXPR, TREE_TYPE (rhs), new_def_rhs);
674 /* Recurse. If we could propagate into all uses of lhs do not
675 bother to replace into the current use but just pretend we did. */
676 if (forward_propagate_addr_expr (lhs, new_def_rhs, single_use_p))
677 return true;
679 if (useless_type_conversion_p (TREE_TYPE (lhs),
680 TREE_TYPE (new_def_rhs)))
681 gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
682 new_def_rhs);
683 else if (is_gimple_min_invariant (new_def_rhs))
684 gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR, new_def_rhs);
685 else
686 return false;
687 gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
688 update_stmt (use_stmt);
689 return true;
692 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
693 ADDR_EXPR will not appear on the LHS. */
694 tree *lhsp = gimple_assign_lhs_ptr (use_stmt);
695 while (handled_component_p (*lhsp))
696 lhsp = &TREE_OPERAND (*lhsp, 0);
697 lhs = *lhsp;
699 /* Now see if the LHS node is a MEM_REF using NAME. If so,
700 propagate the ADDR_EXPR into the use of NAME and fold the result. */
701 if (TREE_CODE (lhs) == MEM_REF
702 && TREE_OPERAND (lhs, 0) == name)
704 tree def_rhs_base;
705 poly_int64 def_rhs_offset;
706 /* If the address is invariant we can always fold it. */
707 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
708 &def_rhs_offset)))
710 poly_offset_int off = mem_ref_offset (lhs);
711 tree new_ptr;
712 off += def_rhs_offset;
713 if (TREE_CODE (def_rhs_base) == MEM_REF)
715 off += mem_ref_offset (def_rhs_base);
716 new_ptr = TREE_OPERAND (def_rhs_base, 0);
718 else
719 new_ptr = build_fold_addr_expr (def_rhs_base);
720 TREE_OPERAND (lhs, 0) = new_ptr;
721 TREE_OPERAND (lhs, 1)
722 = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
723 tidy_after_forward_propagate_addr (use_stmt);
724 /* Continue propagating into the RHS if this was not the only use. */
725 if (single_use_p)
726 return true;
728 /* If the LHS is a plain dereference and the value type is the same as
729 that of the pointed-to type of the address we can put the
730 dereferenced address on the LHS preserving the original alias-type. */
731 else if (integer_zerop (TREE_OPERAND (lhs, 1))
732 && ((gimple_assign_lhs (use_stmt) == lhs
733 && useless_type_conversion_p
734 (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
735 TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
736 || types_compatible_p (TREE_TYPE (lhs),
737 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
738 /* Don't forward anything into clobber stmts if it would result
739 in the lhs no longer being a MEM_REF. */
740 && (!gimple_clobber_p (use_stmt)
741 || TREE_CODE (TREE_OPERAND (def_rhs, 0)) == MEM_REF))
743 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
744 tree new_offset, new_base, saved, new_lhs;
745 while (handled_component_p (*def_rhs_basep))
746 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
747 saved = *def_rhs_basep;
748 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
750 new_base = TREE_OPERAND (*def_rhs_basep, 0);
751 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (lhs, 1)),
752 TREE_OPERAND (*def_rhs_basep, 1));
754 else
756 new_base = build_fold_addr_expr (*def_rhs_basep);
757 new_offset = TREE_OPERAND (lhs, 1);
759 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
760 new_base, new_offset);
761 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (lhs);
762 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (lhs);
763 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (lhs);
764 new_lhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
765 *lhsp = new_lhs;
766 TREE_THIS_VOLATILE (new_lhs) = TREE_THIS_VOLATILE (lhs);
767 TREE_SIDE_EFFECTS (new_lhs) = TREE_SIDE_EFFECTS (lhs);
768 *def_rhs_basep = saved;
769 tidy_after_forward_propagate_addr (use_stmt);
770 /* Continue propagating into the RHS if this was not the
771 only use. */
772 if (single_use_p)
773 return true;
775 else
776 /* We can have a struct assignment dereferencing our name twice.
777 Note that we didn't propagate into the lhs to not falsely
778 claim we did when propagating into the rhs. */
779 res = false;
782 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
783 nodes from the RHS. */
784 tree *rhsp = gimple_assign_rhs1_ptr (use_stmt);
785 if (TREE_CODE (*rhsp) == ADDR_EXPR)
786 rhsp = &TREE_OPERAND (*rhsp, 0);
787 while (handled_component_p (*rhsp))
788 rhsp = &TREE_OPERAND (*rhsp, 0);
789 rhs = *rhsp;
791 /* Now see if the RHS node is a MEM_REF using NAME. If so,
792 propagate the ADDR_EXPR into the use of NAME and fold the result. */
793 if (TREE_CODE (rhs) == MEM_REF
794 && TREE_OPERAND (rhs, 0) == name)
796 tree def_rhs_base;
797 poly_int64 def_rhs_offset;
798 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
799 &def_rhs_offset)))
801 poly_offset_int off = mem_ref_offset (rhs);
802 tree new_ptr;
803 off += def_rhs_offset;
804 if (TREE_CODE (def_rhs_base) == MEM_REF)
806 off += mem_ref_offset (def_rhs_base);
807 new_ptr = TREE_OPERAND (def_rhs_base, 0);
809 else
810 new_ptr = build_fold_addr_expr (def_rhs_base);
811 TREE_OPERAND (rhs, 0) = new_ptr;
812 TREE_OPERAND (rhs, 1)
813 = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
814 fold_stmt_inplace (use_stmt_gsi);
815 tidy_after_forward_propagate_addr (use_stmt);
816 return res;
818 /* If the RHS is a plain dereference and the value type is the same as
819 that of the pointed-to type of the address we can put the
820 dereferenced address on the RHS preserving the original alias-type. */
821 else if (integer_zerop (TREE_OPERAND (rhs, 1))
822 && ((gimple_assign_rhs1 (use_stmt) == rhs
823 && useless_type_conversion_p
824 (TREE_TYPE (gimple_assign_lhs (use_stmt)),
825 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
826 || types_compatible_p (TREE_TYPE (rhs),
827 TREE_TYPE (TREE_OPERAND (def_rhs, 0)))))
829 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
830 tree new_offset, new_base, saved, new_rhs;
831 while (handled_component_p (*def_rhs_basep))
832 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
833 saved = *def_rhs_basep;
834 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
836 new_base = TREE_OPERAND (*def_rhs_basep, 0);
837 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (rhs, 1)),
838 TREE_OPERAND (*def_rhs_basep, 1));
840 else
842 new_base = build_fold_addr_expr (*def_rhs_basep);
843 new_offset = TREE_OPERAND (rhs, 1);
845 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
846 new_base, new_offset);
847 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (rhs);
848 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (rhs);
849 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (rhs);
850 new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
851 *rhsp = new_rhs;
852 TREE_THIS_VOLATILE (new_rhs) = TREE_THIS_VOLATILE (rhs);
853 TREE_SIDE_EFFECTS (new_rhs) = TREE_SIDE_EFFECTS (rhs);
854 *def_rhs_basep = saved;
855 fold_stmt_inplace (use_stmt_gsi);
856 tidy_after_forward_propagate_addr (use_stmt);
857 return res;
861 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
862 is nothing to do. */
863 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
864 || gimple_assign_rhs1 (use_stmt) != name)
865 return false;
867 /* The remaining cases are all for turning pointer arithmetic into
868 array indexing. They only apply when we have the address of
869 element zero in an array. If that is not the case then there
870 is nothing to do. */
871 array_ref = TREE_OPERAND (def_rhs, 0);
872 if ((TREE_CODE (array_ref) != ARRAY_REF
873 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
874 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
875 && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
876 return false;
878 rhs2 = gimple_assign_rhs2 (use_stmt);
879 /* Optimize &x[C1] p+ C2 to &x p+ C3 with C3 = C1 * element_size + C2. */
880 if (TREE_CODE (rhs2) == INTEGER_CST)
882 tree new_rhs = build1_loc (gimple_location (use_stmt),
883 ADDR_EXPR, TREE_TYPE (def_rhs),
884 fold_build2 (MEM_REF,
885 TREE_TYPE (TREE_TYPE (def_rhs)),
886 unshare_expr (def_rhs),
887 fold_convert (ptr_type_node,
888 rhs2)));
889 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
890 use_stmt = gsi_stmt (*use_stmt_gsi);
891 update_stmt (use_stmt);
892 tidy_after_forward_propagate_addr (use_stmt);
893 return true;
896 return false;
899 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
901 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
902 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
903 node or for recovery of array indexing from pointer arithmetic.
905 PARENT_SINGLE_USE_P tells if, when in a recursive invocation, NAME was
906 the single use in the previous invocation. Pass true when calling
907 this as toplevel.
909 Returns true, if all uses have been propagated into. */
911 static bool
912 forward_propagate_addr_expr (tree name, tree rhs, bool parent_single_use_p)
914 imm_use_iterator iter;
915 gimple *use_stmt;
916 bool all = true;
917 bool single_use_p = parent_single_use_p && has_single_use (name);
919 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
921 bool result;
922 tree use_rhs;
924 /* If the use is not in a simple assignment statement, then
925 there is nothing we can do. */
926 if (!is_gimple_assign (use_stmt))
928 if (!is_gimple_debug (use_stmt))
929 all = false;
930 continue;
933 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
934 result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
935 single_use_p);
936 /* If the use has moved to a different statement adjust
937 the update machinery for the old statement too. */
938 if (use_stmt != gsi_stmt (gsi))
940 update_stmt (use_stmt);
941 use_stmt = gsi_stmt (gsi);
943 update_stmt (use_stmt);
944 all &= result;
946 /* Remove intermediate now unused copy and conversion chains. */
947 use_rhs = gimple_assign_rhs1 (use_stmt);
948 if (result
949 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
950 && TREE_CODE (use_rhs) == SSA_NAME
951 && has_zero_uses (gimple_assign_lhs (use_stmt)))
953 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
954 fwprop_invalidate_lattice (gimple_get_lhs (use_stmt));
955 release_defs (use_stmt);
956 gsi_remove (&gsi, true);
960 return all && has_zero_uses (name);
964 /* Helper function for simplify_gimple_switch. Remove case labels that
965 have values outside the range of the new type. */
967 static void
968 simplify_gimple_switch_label_vec (gswitch *stmt, tree index_type)
970 unsigned int branch_num = gimple_switch_num_labels (stmt);
971 auto_vec<tree> labels (branch_num);
972 unsigned int i, len;
974 /* Collect the existing case labels in a VEC, and preprocess it as if
975 we are gimplifying a GENERIC SWITCH_EXPR. */
976 for (i = 1; i < branch_num; i++)
977 labels.quick_push (gimple_switch_label (stmt, i));
978 preprocess_case_label_vec_for_gimple (labels, index_type, NULL);
980 /* If any labels were removed, replace the existing case labels
981 in the GIMPLE_SWITCH statement with the correct ones.
982 Note that the type updates were done in-place on the case labels,
983 so we only have to replace the case labels in the GIMPLE_SWITCH
984 if the number of labels changed. */
985 len = labels.length ();
986 if (len < branch_num - 1)
988 bitmap target_blocks;
989 edge_iterator ei;
990 edge e;
992 /* Corner case: *all* case labels have been removed as being
993 out-of-range for INDEX_TYPE. Push one label and let the
994 CFG cleanups deal with this further. */
995 if (len == 0)
997 tree label, elt;
999 label = CASE_LABEL (gimple_switch_default_label (stmt));
1000 elt = build_case_label (build_int_cst (index_type, 0), NULL, label);
1001 labels.quick_push (elt);
1002 len = 1;
1005 for (i = 0; i < labels.length (); i++)
1006 gimple_switch_set_label (stmt, i + 1, labels[i]);
1007 for (i++ ; i < branch_num; i++)
1008 gimple_switch_set_label (stmt, i, NULL_TREE);
1009 gimple_switch_set_num_labels (stmt, len + 1);
1011 /* Cleanup any edges that are now dead. */
1012 target_blocks = BITMAP_ALLOC (NULL);
1013 for (i = 0; i < gimple_switch_num_labels (stmt); i++)
1015 tree elt = gimple_switch_label (stmt, i);
1016 basic_block target = label_to_block (cfun, CASE_LABEL (elt));
1017 bitmap_set_bit (target_blocks, target->index);
1019 for (ei = ei_start (gimple_bb (stmt)->succs); (e = ei_safe_edge (ei)); )
1021 if (! bitmap_bit_p (target_blocks, e->dest->index))
1023 remove_edge (e);
1024 cfg_changed = true;
1025 free_dominance_info (CDI_DOMINATORS);
1027 else
1028 ei_next (&ei);
1030 BITMAP_FREE (target_blocks);
1034 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1035 the condition which we may be able to optimize better. */
1037 static bool
1038 simplify_gimple_switch (gswitch *stmt)
1040 /* The optimization that we really care about is removing unnecessary
1041 casts. That will let us do much better in propagating the inferred
1042 constant at the switch target. */
1043 tree cond = gimple_switch_index (stmt);
1044 if (TREE_CODE (cond) == SSA_NAME)
1046 gimple *def_stmt = SSA_NAME_DEF_STMT (cond);
1047 if (gimple_assign_cast_p (def_stmt))
1049 tree def = gimple_assign_rhs1 (def_stmt);
1050 if (TREE_CODE (def) != SSA_NAME)
1051 return false;
1053 /* If we have an extension or sign-change that preserves the
1054 values we check against then we can copy the source value into
1055 the switch. */
1056 tree ti = TREE_TYPE (def);
1057 if (INTEGRAL_TYPE_P (ti)
1058 && TYPE_PRECISION (ti) <= TYPE_PRECISION (TREE_TYPE (cond)))
1060 size_t n = gimple_switch_num_labels (stmt);
1061 tree min = NULL_TREE, max = NULL_TREE;
1062 if (n > 1)
1064 min = CASE_LOW (gimple_switch_label (stmt, 1));
1065 if (CASE_HIGH (gimple_switch_label (stmt, n - 1)))
1066 max = CASE_HIGH (gimple_switch_label (stmt, n - 1));
1067 else
1068 max = CASE_LOW (gimple_switch_label (stmt, n - 1));
1070 if ((!min || int_fits_type_p (min, ti))
1071 && (!max || int_fits_type_p (max, ti)))
1073 gimple_switch_set_index (stmt, def);
1074 simplify_gimple_switch_label_vec (stmt, ti);
1075 update_stmt (stmt);
1076 return true;
1082 return false;
1085 /* For pointers p2 and p1 return p2 - p1 if the
1086 difference is known and constant, otherwise return NULL. */
1088 static tree
1089 constant_pointer_difference (tree p1, tree p2)
1091 int i, j;
1092 #define CPD_ITERATIONS 5
1093 tree exps[2][CPD_ITERATIONS];
1094 tree offs[2][CPD_ITERATIONS];
1095 int cnt[2];
1097 for (i = 0; i < 2; i++)
1099 tree p = i ? p1 : p2;
1100 tree off = size_zero_node;
1101 gimple *stmt;
1102 enum tree_code code;
1104 /* For each of p1 and p2 we need to iterate at least
1105 twice, to handle ADDR_EXPR directly in p1/p2,
1106 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1107 on definition's stmt RHS. Iterate a few extra times. */
1108 j = 0;
1111 if (!POINTER_TYPE_P (TREE_TYPE (p)))
1112 break;
1113 if (TREE_CODE (p) == ADDR_EXPR)
1115 tree q = TREE_OPERAND (p, 0);
1116 poly_int64 offset;
1117 tree base = get_addr_base_and_unit_offset (q, &offset);
1118 if (base)
1120 q = base;
1121 if (maybe_ne (offset, 0))
1122 off = size_binop (PLUS_EXPR, off, size_int (offset));
1124 if (TREE_CODE (q) == MEM_REF
1125 && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
1127 p = TREE_OPERAND (q, 0);
1128 off = size_binop (PLUS_EXPR, off,
1129 wide_int_to_tree (sizetype,
1130 mem_ref_offset (q)));
1132 else
1134 exps[i][j] = q;
1135 offs[i][j++] = off;
1136 break;
1139 if (TREE_CODE (p) != SSA_NAME)
1140 break;
1141 exps[i][j] = p;
1142 offs[i][j++] = off;
1143 if (j == CPD_ITERATIONS)
1144 break;
1145 stmt = SSA_NAME_DEF_STMT (p);
1146 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
1147 break;
1148 code = gimple_assign_rhs_code (stmt);
1149 if (code == POINTER_PLUS_EXPR)
1151 if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
1152 break;
1153 off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
1154 p = gimple_assign_rhs1 (stmt);
1156 else if (code == ADDR_EXPR || CONVERT_EXPR_CODE_P (code))
1157 p = gimple_assign_rhs1 (stmt);
1158 else
1159 break;
1161 while (1);
1162 cnt[i] = j;
1165 for (i = 0; i < cnt[0]; i++)
1166 for (j = 0; j < cnt[1]; j++)
1167 if (exps[0][i] == exps[1][j])
1168 return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
1170 return NULL_TREE;
1173 /* *GSI_P is a GIMPLE_CALL to a builtin function.
1174 Optimize
1175 memcpy (p, "abcd", 4);
1176 memset (p + 4, ' ', 3);
1177 into
1178 memcpy (p, "abcd ", 7);
1179 call if the latter can be stored by pieces during expansion.
1181 Optimize
1182 memchr ("abcd", a, 4) == 0;
1184 memchr ("abcd", a, 4) != 0;
1186 (a == 'a' || a == 'b' || a == 'c' || a == 'd') == 0
1188 (a == 'a' || a == 'b' || a == 'c' || a == 'd') != 0
1190 Also canonicalize __atomic_fetch_op (p, x, y) op x
1191 to __atomic_op_fetch (p, x, y) or
1192 __atomic_op_fetch (p, x, y) iop x
1193 to __atomic_fetch_op (p, x, y) when possible (also __sync). */
1195 static bool
1196 simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
1198 gimple *stmt1, *stmt2 = gsi_stmt (*gsi_p);
1199 enum built_in_function other_atomic = END_BUILTINS;
1200 enum tree_code atomic_op = ERROR_MARK;
1201 tree vuse = gimple_vuse (stmt2);
1202 if (vuse == NULL)
1203 return false;
1204 stmt1 = SSA_NAME_DEF_STMT (vuse);
1206 tree res;
1208 switch (DECL_FUNCTION_CODE (callee2))
1210 case BUILT_IN_MEMCHR:
1211 if (gimple_call_num_args (stmt2) == 3
1212 && (res = gimple_call_lhs (stmt2)) != nullptr
1213 && use_in_zero_equality (res) != nullptr
1214 && CHAR_BIT == 8
1215 && BITS_PER_UNIT == 8)
1217 tree ptr = gimple_call_arg (stmt2, 0);
1218 if (TREE_CODE (ptr) != ADDR_EXPR
1219 || TREE_CODE (TREE_OPERAND (ptr, 0)) != STRING_CST)
1220 break;
1221 unsigned HOST_WIDE_INT slen
1222 = TREE_STRING_LENGTH (TREE_OPERAND (ptr, 0));
1223 /* It must be a non-empty string constant. */
1224 if (slen < 2)
1225 break;
1226 /* For -Os, only simplify strings with a single character. */
1227 if (!optimize_bb_for_speed_p (gimple_bb (stmt2))
1228 && slen > 2)
1229 break;
1230 tree size = gimple_call_arg (stmt2, 2);
1231 /* Size must be a constant which is <= UNITS_PER_WORD and
1232 <= the string length. */
1233 if (TREE_CODE (size) != INTEGER_CST)
1234 break;
1236 if (!tree_fits_uhwi_p (size))
1237 break;
1239 unsigned HOST_WIDE_INT sz = tree_to_uhwi (size);
1240 if (sz == 0 || sz > UNITS_PER_WORD || sz >= slen)
1241 break;
1243 tree ch = gimple_call_arg (stmt2, 1);
1244 location_t loc = gimple_location (stmt2);
1245 if (!useless_type_conversion_p (char_type_node,
1246 TREE_TYPE (ch)))
1247 ch = fold_convert_loc (loc, char_type_node, ch);
1248 const char *p = TREE_STRING_POINTER (TREE_OPERAND (ptr, 0));
1249 unsigned int isize = sz;
1250 tree *op = XALLOCAVEC (tree, isize);
1251 for (unsigned int i = 0; i < isize; i++)
1253 op[i] = build_int_cst (char_type_node, p[i]);
1254 op[i] = fold_build2_loc (loc, EQ_EXPR, boolean_type_node,
1255 op[i], ch);
1257 for (unsigned int i = isize - 1; i >= 1; i--)
1258 op[i - 1] = fold_convert_loc (loc, boolean_type_node,
1259 fold_build2_loc (loc,
1260 BIT_IOR_EXPR,
1261 boolean_type_node,
1262 op[i - 1],
1263 op[i]));
1264 res = fold_convert_loc (loc, TREE_TYPE (res), op[0]);
1265 gimplify_and_update_call_from_tree (gsi_p, res);
1266 return true;
1268 break;
1270 case BUILT_IN_MEMSET:
1271 if (gimple_call_num_args (stmt2) != 3
1272 || gimple_call_lhs (stmt2)
1273 || CHAR_BIT != 8
1274 || BITS_PER_UNIT != 8)
1275 break;
1276 else
1278 tree callee1;
1279 tree ptr1, src1, str1, off1, len1, lhs1;
1280 tree ptr2 = gimple_call_arg (stmt2, 0);
1281 tree val2 = gimple_call_arg (stmt2, 1);
1282 tree len2 = gimple_call_arg (stmt2, 2);
1283 tree diff, vdef, new_str_cst;
1284 gimple *use_stmt;
1285 unsigned int ptr1_align;
1286 unsigned HOST_WIDE_INT src_len;
1287 char *src_buf;
1288 use_operand_p use_p;
1290 if (!tree_fits_shwi_p (val2)
1291 || !tree_fits_uhwi_p (len2)
1292 || compare_tree_int (len2, 1024) == 1)
1293 break;
1294 if (is_gimple_call (stmt1))
1296 /* If first stmt is a call, it needs to be memcpy
1297 or mempcpy, with string literal as second argument and
1298 constant length. */
1299 callee1 = gimple_call_fndecl (stmt1);
1300 if (callee1 == NULL_TREE
1301 || !fndecl_built_in_p (callee1, BUILT_IN_NORMAL)
1302 || gimple_call_num_args (stmt1) != 3)
1303 break;
1304 if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
1305 && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
1306 break;
1307 ptr1 = gimple_call_arg (stmt1, 0);
1308 src1 = gimple_call_arg (stmt1, 1);
1309 len1 = gimple_call_arg (stmt1, 2);
1310 lhs1 = gimple_call_lhs (stmt1);
1311 if (!tree_fits_uhwi_p (len1))
1312 break;
1313 str1 = string_constant (src1, &off1, NULL, NULL);
1314 if (str1 == NULL_TREE)
1315 break;
1316 if (!tree_fits_uhwi_p (off1)
1317 || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
1318 || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
1319 - tree_to_uhwi (off1)) > 0
1320 || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
1321 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
1322 != TYPE_MODE (char_type_node))
1323 break;
1325 else if (gimple_assign_single_p (stmt1))
1327 /* Otherwise look for length 1 memcpy optimized into
1328 assignment. */
1329 ptr1 = gimple_assign_lhs (stmt1);
1330 src1 = gimple_assign_rhs1 (stmt1);
1331 if (TREE_CODE (ptr1) != MEM_REF
1332 || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
1333 || !tree_fits_shwi_p (src1))
1334 break;
1335 ptr1 = build_fold_addr_expr (ptr1);
1336 STRIP_USELESS_TYPE_CONVERSION (ptr1);
1337 callee1 = NULL_TREE;
1338 len1 = size_one_node;
1339 lhs1 = NULL_TREE;
1340 off1 = size_zero_node;
1341 str1 = NULL_TREE;
1343 else
1344 break;
1346 diff = constant_pointer_difference (ptr1, ptr2);
1347 if (diff == NULL && lhs1 != NULL)
1349 diff = constant_pointer_difference (lhs1, ptr2);
1350 if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1351 && diff != NULL)
1352 diff = size_binop (PLUS_EXPR, diff,
1353 fold_convert (sizetype, len1));
1355 /* If the difference between the second and first destination pointer
1356 is not constant, or is bigger than memcpy length, bail out. */
1357 if (diff == NULL
1358 || !tree_fits_uhwi_p (diff)
1359 || tree_int_cst_lt (len1, diff)
1360 || compare_tree_int (diff, 1024) == 1)
1361 break;
1363 /* Use maximum of difference plus memset length and memcpy length
1364 as the new memcpy length, if it is too big, bail out. */
1365 src_len = tree_to_uhwi (diff);
1366 src_len += tree_to_uhwi (len2);
1367 if (src_len < tree_to_uhwi (len1))
1368 src_len = tree_to_uhwi (len1);
1369 if (src_len > 1024)
1370 break;
1372 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1373 with bigger length will return different result. */
1374 if (lhs1 != NULL_TREE
1375 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1376 && (TREE_CODE (lhs1) != SSA_NAME
1377 || !single_imm_use (lhs1, &use_p, &use_stmt)
1378 || use_stmt != stmt2))
1379 break;
1381 /* If anything reads memory in between memcpy and memset
1382 call, the modified memcpy call might change it. */
1383 vdef = gimple_vdef (stmt1);
1384 if (vdef != NULL
1385 && (!single_imm_use (vdef, &use_p, &use_stmt)
1386 || use_stmt != stmt2))
1387 break;
1389 ptr1_align = get_pointer_alignment (ptr1);
1390 /* Construct the new source string literal. */
1391 src_buf = XALLOCAVEC (char, src_len + 1);
1392 if (callee1)
1393 memcpy (src_buf,
1394 TREE_STRING_POINTER (str1) + tree_to_uhwi (off1),
1395 tree_to_uhwi (len1));
1396 else
1397 src_buf[0] = tree_to_shwi (src1);
1398 memset (src_buf + tree_to_uhwi (diff),
1399 tree_to_shwi (val2), tree_to_uhwi (len2));
1400 src_buf[src_len] = '\0';
1401 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1402 handle embedded '\0's. */
1403 if (strlen (src_buf) != src_len)
1404 break;
1405 rtl_profile_for_bb (gimple_bb (stmt2));
1406 /* If the new memcpy wouldn't be emitted by storing the literal
1407 by pieces, this optimization might enlarge .rodata too much,
1408 as commonly used string literals couldn't be shared any
1409 longer. */
1410 if (!can_store_by_pieces (src_len,
1411 builtin_strncpy_read_str,
1412 src_buf, ptr1_align, false))
1413 break;
1415 new_str_cst = build_string_literal (src_len, src_buf);
1416 if (callee1)
1418 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1419 memset call. */
1420 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1421 gimple_call_set_lhs (stmt1, NULL_TREE);
1422 gimple_call_set_arg (stmt1, 1, new_str_cst);
1423 gimple_call_set_arg (stmt1, 2,
1424 build_int_cst (TREE_TYPE (len1), src_len));
1425 update_stmt (stmt1);
1426 unlink_stmt_vdef (stmt2);
1427 gsi_replace (gsi_p, gimple_build_nop (), false);
1428 fwprop_invalidate_lattice (gimple_get_lhs (stmt2));
1429 release_defs (stmt2);
1430 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1432 fwprop_invalidate_lattice (lhs1);
1433 release_ssa_name (lhs1);
1435 return true;
1437 else
1439 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1440 assignment, remove STMT1 and change memset call into
1441 memcpy call. */
1442 gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
1444 if (!is_gimple_val (ptr1))
1445 ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE,
1446 true, GSI_SAME_STMT);
1447 tree fndecl = builtin_decl_explicit (BUILT_IN_MEMCPY);
1448 gimple_call_set_fndecl (stmt2, fndecl);
1449 gimple_call_set_fntype (as_a <gcall *> (stmt2),
1450 TREE_TYPE (fndecl));
1451 gimple_call_set_arg (stmt2, 0, ptr1);
1452 gimple_call_set_arg (stmt2, 1, new_str_cst);
1453 gimple_call_set_arg (stmt2, 2,
1454 build_int_cst (TREE_TYPE (len2), src_len));
1455 unlink_stmt_vdef (stmt1);
1456 gsi_remove (&gsi, true);
1457 fwprop_invalidate_lattice (gimple_get_lhs (stmt1));
1458 release_defs (stmt1);
1459 update_stmt (stmt2);
1460 return false;
1463 break;
1465 #define CASE_ATOMIC(NAME, OTHER, OP) \
1466 case BUILT_IN_##NAME##_1: \
1467 case BUILT_IN_##NAME##_2: \
1468 case BUILT_IN_##NAME##_4: \
1469 case BUILT_IN_##NAME##_8: \
1470 case BUILT_IN_##NAME##_16: \
1471 atomic_op = OP; \
1472 other_atomic \
1473 = (enum built_in_function) (BUILT_IN_##OTHER##_1 \
1474 + (DECL_FUNCTION_CODE (callee2) \
1475 - BUILT_IN_##NAME##_1)); \
1476 goto handle_atomic_fetch_op;
1478 CASE_ATOMIC (ATOMIC_FETCH_ADD, ATOMIC_ADD_FETCH, PLUS_EXPR)
1479 CASE_ATOMIC (ATOMIC_FETCH_SUB, ATOMIC_SUB_FETCH, MINUS_EXPR)
1480 CASE_ATOMIC (ATOMIC_FETCH_AND, ATOMIC_AND_FETCH, BIT_AND_EXPR)
1481 CASE_ATOMIC (ATOMIC_FETCH_XOR, ATOMIC_XOR_FETCH, BIT_XOR_EXPR)
1482 CASE_ATOMIC (ATOMIC_FETCH_OR, ATOMIC_OR_FETCH, BIT_IOR_EXPR)
1484 CASE_ATOMIC (SYNC_FETCH_AND_ADD, SYNC_ADD_AND_FETCH, PLUS_EXPR)
1485 CASE_ATOMIC (SYNC_FETCH_AND_SUB, SYNC_SUB_AND_FETCH, MINUS_EXPR)
1486 CASE_ATOMIC (SYNC_FETCH_AND_AND, SYNC_AND_AND_FETCH, BIT_AND_EXPR)
1487 CASE_ATOMIC (SYNC_FETCH_AND_XOR, SYNC_XOR_AND_FETCH, BIT_XOR_EXPR)
1488 CASE_ATOMIC (SYNC_FETCH_AND_OR, SYNC_OR_AND_FETCH, BIT_IOR_EXPR)
1490 CASE_ATOMIC (ATOMIC_ADD_FETCH, ATOMIC_FETCH_ADD, MINUS_EXPR)
1491 CASE_ATOMIC (ATOMIC_SUB_FETCH, ATOMIC_FETCH_SUB, PLUS_EXPR)
1492 CASE_ATOMIC (ATOMIC_XOR_FETCH, ATOMIC_FETCH_XOR, BIT_XOR_EXPR)
1494 CASE_ATOMIC (SYNC_ADD_AND_FETCH, SYNC_FETCH_AND_ADD, MINUS_EXPR)
1495 CASE_ATOMIC (SYNC_SUB_AND_FETCH, SYNC_FETCH_AND_SUB, PLUS_EXPR)
1496 CASE_ATOMIC (SYNC_XOR_AND_FETCH, SYNC_FETCH_AND_XOR, BIT_XOR_EXPR)
1498 #undef CASE_ATOMIC
1500 handle_atomic_fetch_op:
1501 if (gimple_call_num_args (stmt2) >= 2 && gimple_call_lhs (stmt2))
1503 tree lhs2 = gimple_call_lhs (stmt2), lhsc = lhs2;
1504 tree arg = gimple_call_arg (stmt2, 1);
1505 gimple *use_stmt, *cast_stmt = NULL;
1506 use_operand_p use_p;
1507 tree ndecl = builtin_decl_explicit (other_atomic);
1509 if (ndecl == NULL_TREE || !single_imm_use (lhs2, &use_p, &use_stmt))
1510 break;
1512 if (gimple_assign_cast_p (use_stmt))
1514 cast_stmt = use_stmt;
1515 lhsc = gimple_assign_lhs (cast_stmt);
1516 if (lhsc == NULL_TREE
1517 || !INTEGRAL_TYPE_P (TREE_TYPE (lhsc))
1518 || (TYPE_PRECISION (TREE_TYPE (lhsc))
1519 != TYPE_PRECISION (TREE_TYPE (lhs2)))
1520 || !single_imm_use (lhsc, &use_p, &use_stmt))
1522 use_stmt = cast_stmt;
1523 cast_stmt = NULL;
1524 lhsc = lhs2;
1528 bool ok = false;
1529 tree oarg = NULL_TREE;
1530 enum tree_code ccode = ERROR_MARK;
1531 tree crhs1 = NULL_TREE, crhs2 = NULL_TREE;
1532 if (is_gimple_assign (use_stmt)
1533 && gimple_assign_rhs_code (use_stmt) == atomic_op)
1535 if (gimple_assign_rhs1 (use_stmt) == lhsc)
1536 oarg = gimple_assign_rhs2 (use_stmt);
1537 else if (atomic_op != MINUS_EXPR)
1538 oarg = gimple_assign_rhs1 (use_stmt);
1540 else if (atomic_op == MINUS_EXPR
1541 && is_gimple_assign (use_stmt)
1542 && gimple_assign_rhs_code (use_stmt) == PLUS_EXPR
1543 && TREE_CODE (arg) == INTEGER_CST
1544 && (TREE_CODE (gimple_assign_rhs2 (use_stmt))
1545 == INTEGER_CST))
1547 tree a = fold_convert (TREE_TYPE (lhs2), arg);
1548 tree o = fold_convert (TREE_TYPE (lhs2),
1549 gimple_assign_rhs2 (use_stmt));
1550 if (wi::to_wide (a) == wi::neg (wi::to_wide (o)))
1551 ok = true;
1553 else if (atomic_op == BIT_AND_EXPR || atomic_op == BIT_IOR_EXPR)
1555 else if (gimple_code (use_stmt) == GIMPLE_COND)
1557 ccode = gimple_cond_code (use_stmt);
1558 crhs1 = gimple_cond_lhs (use_stmt);
1559 crhs2 = gimple_cond_rhs (use_stmt);
1561 else if (is_gimple_assign (use_stmt))
1563 if (gimple_assign_rhs_class (use_stmt) == GIMPLE_BINARY_RHS)
1565 ccode = gimple_assign_rhs_code (use_stmt);
1566 crhs1 = gimple_assign_rhs1 (use_stmt);
1567 crhs2 = gimple_assign_rhs2 (use_stmt);
1569 else if (gimple_assign_rhs_code (use_stmt) == COND_EXPR)
1571 tree cond = gimple_assign_rhs1 (use_stmt);
1572 if (COMPARISON_CLASS_P (cond))
1574 ccode = TREE_CODE (cond);
1575 crhs1 = TREE_OPERAND (cond, 0);
1576 crhs2 = TREE_OPERAND (cond, 1);
1580 if (ccode == EQ_EXPR || ccode == NE_EXPR)
1582 /* Deal with x - y == 0 or x ^ y == 0
1583 being optimized into x == y and x + cst == 0
1584 into x == -cst. */
1585 tree o = NULL_TREE;
1586 if (crhs1 == lhsc)
1587 o = crhs2;
1588 else if (crhs2 == lhsc)
1589 o = crhs1;
1590 if (o && atomic_op != PLUS_EXPR)
1591 oarg = o;
1592 else if (o
1593 && TREE_CODE (o) == INTEGER_CST
1594 && TREE_CODE (arg) == INTEGER_CST)
1596 tree a = fold_convert (TREE_TYPE (lhs2), arg);
1597 o = fold_convert (TREE_TYPE (lhs2), o);
1598 if (wi::to_wide (a) == wi::neg (wi::to_wide (o)))
1599 ok = true;
1602 if (oarg && !ok)
1604 if (operand_equal_p (arg, oarg, 0))
1605 ok = true;
1606 else if (TREE_CODE (arg) == SSA_NAME
1607 && TREE_CODE (oarg) == SSA_NAME)
1609 tree oarg2 = oarg;
1610 if (gimple_assign_cast_p (SSA_NAME_DEF_STMT (oarg)))
1612 gimple *g = SSA_NAME_DEF_STMT (oarg);
1613 oarg2 = gimple_assign_rhs1 (g);
1614 if (TREE_CODE (oarg2) != SSA_NAME
1615 || !INTEGRAL_TYPE_P (TREE_TYPE (oarg2))
1616 || (TYPE_PRECISION (TREE_TYPE (oarg2))
1617 != TYPE_PRECISION (TREE_TYPE (oarg))))
1618 oarg2 = oarg;
1620 if (gimple_assign_cast_p (SSA_NAME_DEF_STMT (arg)))
1622 gimple *g = SSA_NAME_DEF_STMT (arg);
1623 tree rhs1 = gimple_assign_rhs1 (g);
1624 /* Handle e.g.
1625 x.0_1 = (long unsigned int) x_4(D);
1626 _2 = __atomic_fetch_add_8 (&vlong, x.0_1, 0);
1627 _3 = (long int) _2;
1628 _7 = x_4(D) + _3; */
1629 if (rhs1 == oarg || rhs1 == oarg2)
1630 ok = true;
1631 /* Handle e.g.
1632 x.18_1 = (short unsigned int) x_5(D);
1633 _2 = (int) x.18_1;
1634 _3 = __atomic_fetch_xor_2 (&vshort, _2, 0);
1635 _4 = (short int) _3;
1636 _8 = x_5(D) ^ _4;
1637 This happens only for char/short. */
1638 else if (TREE_CODE (rhs1) == SSA_NAME
1639 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1640 && (TYPE_PRECISION (TREE_TYPE (rhs1))
1641 == TYPE_PRECISION (TREE_TYPE (lhs2))))
1643 g = SSA_NAME_DEF_STMT (rhs1);
1644 if (gimple_assign_cast_p (g)
1645 && (gimple_assign_rhs1 (g) == oarg
1646 || gimple_assign_rhs1 (g) == oarg2))
1647 ok = true;
1650 if (!ok && arg == oarg2)
1651 /* Handle e.g.
1652 _1 = __sync_fetch_and_add_4 (&v, x_5(D));
1653 _2 = (int) _1;
1654 x.0_3 = (int) x_5(D);
1655 _7 = _2 + x.0_3; */
1656 ok = true;
1660 if (ok)
1662 tree new_lhs = make_ssa_name (TREE_TYPE (lhs2));
1663 gimple_call_set_lhs (stmt2, new_lhs);
1664 gimple_call_set_fndecl (stmt2, ndecl);
1665 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1666 if (ccode == ERROR_MARK)
1667 gimple_assign_set_rhs_with_ops (&gsi, cast_stmt
1668 ? NOP_EXPR : SSA_NAME,
1669 new_lhs);
1670 else
1672 crhs1 = new_lhs;
1673 crhs2 = build_zero_cst (TREE_TYPE (lhs2));
1674 if (gimple_code (use_stmt) == GIMPLE_COND)
1676 gcond *cond_stmt = as_a <gcond *> (use_stmt);
1677 gimple_cond_set_lhs (cond_stmt, crhs1);
1678 gimple_cond_set_rhs (cond_stmt, crhs2);
1680 else if (gimple_assign_rhs_class (use_stmt)
1681 == GIMPLE_BINARY_RHS)
1683 gimple_assign_set_rhs1 (use_stmt, crhs1);
1684 gimple_assign_set_rhs2 (use_stmt, crhs2);
1686 else
1688 gcc_checking_assert (gimple_assign_rhs_code (use_stmt)
1689 == COND_EXPR);
1690 tree cond = build2 (ccode, boolean_type_node,
1691 crhs1, crhs2);
1692 gimple_assign_set_rhs1 (use_stmt, cond);
1695 update_stmt (use_stmt);
1696 if (atomic_op != BIT_AND_EXPR
1697 && atomic_op != BIT_IOR_EXPR
1698 && !stmt_ends_bb_p (stmt2))
1700 /* For the benefit of debug stmts, emit stmt(s) to set
1701 lhs2 to the value it had from the new builtin.
1702 E.g. if it was previously:
1703 lhs2 = __atomic_fetch_add_8 (ptr, arg, 0);
1704 emit:
1705 new_lhs = __atomic_add_fetch_8 (ptr, arg, 0);
1706 lhs2 = new_lhs - arg;
1707 We also keep cast_stmt if any in the IL for
1708 the same reasons.
1709 These stmts will be DCEd later and proper debug info
1710 will be emitted.
1711 This is only possible for reversible operations
1712 (+/-/^) and without -fnon-call-exceptions. */
1713 gsi = gsi_for_stmt (stmt2);
1714 tree type = TREE_TYPE (lhs2);
1715 if (TREE_CODE (arg) == INTEGER_CST)
1716 arg = fold_convert (type, arg);
1717 else if (!useless_type_conversion_p (type, TREE_TYPE (arg)))
1719 tree narg = make_ssa_name (type);
1720 gimple *g = gimple_build_assign (narg, NOP_EXPR, arg);
1721 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1722 arg = narg;
1724 enum tree_code rcode;
1725 switch (atomic_op)
1727 case PLUS_EXPR: rcode = MINUS_EXPR; break;
1728 case MINUS_EXPR: rcode = PLUS_EXPR; break;
1729 case BIT_XOR_EXPR: rcode = atomic_op; break;
1730 default: gcc_unreachable ();
1732 gimple *g = gimple_build_assign (lhs2, rcode, new_lhs, arg);
1733 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1734 update_stmt (stmt2);
1736 else
1738 /* For e.g.
1739 lhs2 = __atomic_fetch_or_8 (ptr, arg, 0);
1740 after we change it to
1741 new_lhs = __atomic_or_fetch_8 (ptr, arg, 0);
1742 there is no way to find out the lhs2 value (i.e.
1743 what the atomic memory contained before the operation),
1744 values of some bits are lost. We have checked earlier
1745 that we don't have any non-debug users except for what
1746 we are already changing, so we need to reset the
1747 debug stmts and remove the cast_stmt if any. */
1748 imm_use_iterator iter;
1749 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs2)
1750 if (use_stmt != cast_stmt)
1752 gcc_assert (is_gimple_debug (use_stmt));
1753 gimple_debug_bind_reset_value (use_stmt);
1754 update_stmt (use_stmt);
1756 if (cast_stmt)
1758 gsi = gsi_for_stmt (cast_stmt);
1759 gsi_remove (&gsi, true);
1761 update_stmt (stmt2);
1762 release_ssa_name (lhs2);
1766 break;
1768 default:
1769 break;
1771 return false;
1774 /* Given a ssa_name in NAME see if it was defined by an assignment and
1775 set CODE to be the code and ARG1 to the first operand on the rhs and ARG2
1776 to the second operand on the rhs. */
1778 static inline void
1779 defcodefor_name (tree name, enum tree_code *code, tree *arg1, tree *arg2)
1781 gimple *def;
1782 enum tree_code code1;
1783 tree arg11;
1784 tree arg21;
1785 tree arg31;
1786 enum gimple_rhs_class grhs_class;
1788 code1 = TREE_CODE (name);
1789 arg11 = name;
1790 arg21 = NULL_TREE;
1791 arg31 = NULL_TREE;
1792 grhs_class = get_gimple_rhs_class (code1);
1794 if (code1 == SSA_NAME)
1796 def = SSA_NAME_DEF_STMT (name);
1798 if (def && is_gimple_assign (def)
1799 && can_propagate_from (def))
1801 code1 = gimple_assign_rhs_code (def);
1802 arg11 = gimple_assign_rhs1 (def);
1803 arg21 = gimple_assign_rhs2 (def);
1804 arg31 = gimple_assign_rhs3 (def);
1807 else if (grhs_class != GIMPLE_SINGLE_RHS)
1808 code1 = ERROR_MARK;
1810 *code = code1;
1811 *arg1 = arg11;
1812 if (arg2)
1813 *arg2 = arg21;
1814 if (arg31)
1815 *code = ERROR_MARK;
1819 /* Recognize rotation patterns. Return true if a transformation
1820 applied, otherwise return false.
1822 We are looking for X with unsigned type T with bitsize B, OP being
1823 +, | or ^, some type T2 wider than T. For:
1824 (X << CNT1) OP (X >> CNT2) iff CNT1 + CNT2 == B
1825 ((T) ((T2) X << CNT1)) OP ((T) ((T2) X >> CNT2)) iff CNT1 + CNT2 == B
1827 transform these into:
1828 X r<< CNT1
1830 Or for:
1831 (X << Y) OP (X >> (B - Y))
1832 (X << (int) Y) OP (X >> (int) (B - Y))
1833 ((T) ((T2) X << Y)) OP ((T) ((T2) X >> (B - Y)))
1834 ((T) ((T2) X << (int) Y)) OP ((T) ((T2) X >> (int) (B - Y)))
1835 (X << Y) | (X >> ((-Y) & (B - 1)))
1836 (X << (int) Y) | (X >> (int) ((-Y) & (B - 1)))
1837 ((T) ((T2) X << Y)) | ((T) ((T2) X >> ((-Y) & (B - 1))))
1838 ((T) ((T2) X << (int) Y)) | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
1840 transform these into (last 2 only if ranger can prove Y < B
1841 or Y = N * B):
1842 X r<< Y
1844 X r<< (& & (B - 1))
1845 The latter for the forms with T2 wider than T if ranger can't prove Y < B.
1847 Or for:
1848 (X << (Y & (B - 1))) | (X >> ((-Y) & (B - 1)))
1849 (X << (int) (Y & (B - 1))) | (X >> (int) ((-Y) & (B - 1)))
1850 ((T) ((T2) X << (Y & (B - 1)))) | ((T) ((T2) X >> ((-Y) & (B - 1))))
1851 ((T) ((T2) X << (int) (Y & (B - 1)))) \
1852 | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
1854 transform these into:
1855 X r<< (Y & (B - 1))
1857 Note, in the patterns with T2 type, the type of OP operands
1858 might be even a signed type, but should have precision B.
1859 Expressions with & (B - 1) should be recognized only if B is
1860 a power of 2. */
1862 static bool
1863 simplify_rotate (gimple_stmt_iterator *gsi)
1865 gimple *stmt = gsi_stmt (*gsi);
1866 tree arg[2], rtype, rotcnt = NULL_TREE;
1867 tree def_arg1[2], def_arg2[2];
1868 enum tree_code def_code[2];
1869 tree lhs;
1870 int i;
1871 bool swapped_p = false;
1872 gimple *g;
1873 gimple *def_arg_stmt[2] = { NULL, NULL };
1874 int wider_prec = 0;
1875 bool add_masking = false;
1877 arg[0] = gimple_assign_rhs1 (stmt);
1878 arg[1] = gimple_assign_rhs2 (stmt);
1879 rtype = TREE_TYPE (arg[0]);
1881 /* Only create rotates in complete modes. Other cases are not
1882 expanded properly. */
1883 if (!INTEGRAL_TYPE_P (rtype)
1884 || !type_has_mode_precision_p (rtype))
1885 return false;
1887 for (i = 0; i < 2; i++)
1889 defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
1890 if (TREE_CODE (arg[i]) == SSA_NAME)
1891 def_arg_stmt[i] = SSA_NAME_DEF_STMT (arg[i]);
1894 /* Look through narrowing (or same precision) conversions. */
1895 if (CONVERT_EXPR_CODE_P (def_code[0])
1896 && CONVERT_EXPR_CODE_P (def_code[1])
1897 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[0]))
1898 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[1]))
1899 && TYPE_PRECISION (TREE_TYPE (def_arg1[0]))
1900 == TYPE_PRECISION (TREE_TYPE (def_arg1[1]))
1901 && TYPE_PRECISION (TREE_TYPE (def_arg1[0])) >= TYPE_PRECISION (rtype)
1902 && has_single_use (arg[0])
1903 && has_single_use (arg[1]))
1905 wider_prec = TYPE_PRECISION (TREE_TYPE (def_arg1[0]));
1906 for (i = 0; i < 2; i++)
1908 arg[i] = def_arg1[i];
1909 defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
1910 if (TREE_CODE (arg[i]) == SSA_NAME)
1911 def_arg_stmt[i] = SSA_NAME_DEF_STMT (arg[i]);
1914 else
1916 /* Handle signed rotate; the RSHIFT_EXPR has to be done
1917 in unsigned type but LSHIFT_EXPR could be signed. */
1918 i = (def_code[0] == LSHIFT_EXPR || def_code[0] == RSHIFT_EXPR);
1919 if (CONVERT_EXPR_CODE_P (def_code[i])
1920 && (def_code[1 - i] == LSHIFT_EXPR || def_code[1 - i] == RSHIFT_EXPR)
1921 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[i]))
1922 && TYPE_PRECISION (rtype) == TYPE_PRECISION (TREE_TYPE (def_arg1[i]))
1923 && has_single_use (arg[i]))
1925 arg[i] = def_arg1[i];
1926 defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
1927 if (TREE_CODE (arg[i]) == SSA_NAME)
1928 def_arg_stmt[i] = SSA_NAME_DEF_STMT (arg[i]);
1932 /* One operand has to be LSHIFT_EXPR and one RSHIFT_EXPR. */
1933 for (i = 0; i < 2; i++)
1934 if (def_code[i] != LSHIFT_EXPR && def_code[i] != RSHIFT_EXPR)
1935 return false;
1936 else if (!has_single_use (arg[i]))
1937 return false;
1938 if (def_code[0] == def_code[1])
1939 return false;
1941 /* If we've looked through narrowing conversions before, look through
1942 widening conversions from unsigned type with the same precision
1943 as rtype here. */
1944 if (TYPE_PRECISION (TREE_TYPE (def_arg1[0])) != TYPE_PRECISION (rtype))
1945 for (i = 0; i < 2; i++)
1947 tree tem;
1948 enum tree_code code;
1949 defcodefor_name (def_arg1[i], &code, &tem, NULL);
1950 if (!CONVERT_EXPR_CODE_P (code)
1951 || !INTEGRAL_TYPE_P (TREE_TYPE (tem))
1952 || TYPE_PRECISION (TREE_TYPE (tem)) != TYPE_PRECISION (rtype))
1953 return false;
1954 def_arg1[i] = tem;
1956 /* Both shifts have to use the same first operand. */
1957 if (!operand_equal_for_phi_arg_p (def_arg1[0], def_arg1[1])
1958 || !types_compatible_p (TREE_TYPE (def_arg1[0]),
1959 TREE_TYPE (def_arg1[1])))
1961 if ((TYPE_PRECISION (TREE_TYPE (def_arg1[0]))
1962 != TYPE_PRECISION (TREE_TYPE (def_arg1[1])))
1963 || (TYPE_UNSIGNED (TREE_TYPE (def_arg1[0]))
1964 == TYPE_UNSIGNED (TREE_TYPE (def_arg1[1]))))
1965 return false;
1967 /* Handle signed rotate; the RSHIFT_EXPR has to be done
1968 in unsigned type but LSHIFT_EXPR could be signed. */
1969 i = def_code[0] != RSHIFT_EXPR;
1970 if (!TYPE_UNSIGNED (TREE_TYPE (def_arg1[i])))
1971 return false;
1973 tree tem;
1974 enum tree_code code;
1975 defcodefor_name (def_arg1[i], &code, &tem, NULL);
1976 if (!CONVERT_EXPR_CODE_P (code)
1977 || !INTEGRAL_TYPE_P (TREE_TYPE (tem))
1978 || TYPE_PRECISION (TREE_TYPE (tem)) != TYPE_PRECISION (rtype))
1979 return false;
1980 def_arg1[i] = tem;
1981 if (!operand_equal_for_phi_arg_p (def_arg1[0], def_arg1[1])
1982 || !types_compatible_p (TREE_TYPE (def_arg1[0]),
1983 TREE_TYPE (def_arg1[1])))
1984 return false;
1986 else if (!TYPE_UNSIGNED (TREE_TYPE (def_arg1[0])))
1987 return false;
1989 /* CNT1 + CNT2 == B case above. */
1990 if (tree_fits_uhwi_p (def_arg2[0])
1991 && tree_fits_uhwi_p (def_arg2[1])
1992 && tree_to_uhwi (def_arg2[0])
1993 + tree_to_uhwi (def_arg2[1]) == TYPE_PRECISION (rtype))
1994 rotcnt = def_arg2[0];
1995 else if (TREE_CODE (def_arg2[0]) != SSA_NAME
1996 || TREE_CODE (def_arg2[1]) != SSA_NAME)
1997 return false;
1998 else
2000 tree cdef_arg1[2], cdef_arg2[2], def_arg2_alt[2];
2001 enum tree_code cdef_code[2];
2002 gimple *def_arg_alt_stmt[2] = { NULL, NULL };
2003 int check_range = 0;
2004 gimple *check_range_stmt = NULL;
2005 /* Look through conversion of the shift count argument.
2006 The C/C++ FE cast any shift count argument to integer_type_node.
2007 The only problem might be if the shift count type maximum value
2008 is equal or smaller than number of bits in rtype. */
2009 for (i = 0; i < 2; i++)
2011 def_arg2_alt[i] = def_arg2[i];
2012 defcodefor_name (def_arg2[i], &cdef_code[i],
2013 &cdef_arg1[i], &cdef_arg2[i]);
2014 if (CONVERT_EXPR_CODE_P (cdef_code[i])
2015 && INTEGRAL_TYPE_P (TREE_TYPE (cdef_arg1[i]))
2016 && TYPE_PRECISION (TREE_TYPE (cdef_arg1[i]))
2017 > floor_log2 (TYPE_PRECISION (rtype))
2018 && type_has_mode_precision_p (TREE_TYPE (cdef_arg1[i])))
2020 def_arg2_alt[i] = cdef_arg1[i];
2021 if (TREE_CODE (def_arg2[i]) == SSA_NAME)
2022 def_arg_alt_stmt[i] = SSA_NAME_DEF_STMT (def_arg2[i]);
2023 defcodefor_name (def_arg2_alt[i], &cdef_code[i],
2024 &cdef_arg1[i], &cdef_arg2[i]);
2026 else
2027 def_arg_alt_stmt[i] = def_arg_stmt[i];
2029 for (i = 0; i < 2; i++)
2030 /* Check for one shift count being Y and the other B - Y,
2031 with optional casts. */
2032 if (cdef_code[i] == MINUS_EXPR
2033 && tree_fits_shwi_p (cdef_arg1[i])
2034 && tree_to_shwi (cdef_arg1[i]) == TYPE_PRECISION (rtype)
2035 && TREE_CODE (cdef_arg2[i]) == SSA_NAME)
2037 tree tem;
2038 enum tree_code code;
2040 if (cdef_arg2[i] == def_arg2[1 - i]
2041 || cdef_arg2[i] == def_arg2_alt[1 - i])
2043 rotcnt = cdef_arg2[i];
2044 check_range = -1;
2045 if (cdef_arg2[i] == def_arg2[1 - i])
2046 check_range_stmt = def_arg_stmt[1 - i];
2047 else
2048 check_range_stmt = def_arg_alt_stmt[1 - i];
2049 break;
2051 defcodefor_name (cdef_arg2[i], &code, &tem, NULL);
2052 if (CONVERT_EXPR_CODE_P (code)
2053 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
2054 && TYPE_PRECISION (TREE_TYPE (tem))
2055 > floor_log2 (TYPE_PRECISION (rtype))
2056 && type_has_mode_precision_p (TREE_TYPE (tem))
2057 && (tem == def_arg2[1 - i]
2058 || tem == def_arg2_alt[1 - i]))
2060 rotcnt = tem;
2061 check_range = -1;
2062 if (tem == def_arg2[1 - i])
2063 check_range_stmt = def_arg_stmt[1 - i];
2064 else
2065 check_range_stmt = def_arg_alt_stmt[1 - i];
2066 break;
2069 /* The above sequence isn't safe for Y being 0,
2070 because then one of the shifts triggers undefined behavior.
2071 This alternative is safe even for rotation count of 0.
2072 One shift count is Y and the other (-Y) & (B - 1).
2073 Or one shift count is Y & (B - 1) and the other (-Y) & (B - 1). */
2074 else if (cdef_code[i] == BIT_AND_EXPR
2075 && pow2p_hwi (TYPE_PRECISION (rtype))
2076 && tree_fits_shwi_p (cdef_arg2[i])
2077 && tree_to_shwi (cdef_arg2[i])
2078 == TYPE_PRECISION (rtype) - 1
2079 && TREE_CODE (cdef_arg1[i]) == SSA_NAME
2080 && gimple_assign_rhs_code (stmt) == BIT_IOR_EXPR)
2082 tree tem;
2083 enum tree_code code;
2085 defcodefor_name (cdef_arg1[i], &code, &tem, NULL);
2086 if (CONVERT_EXPR_CODE_P (code)
2087 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
2088 && TYPE_PRECISION (TREE_TYPE (tem))
2089 > floor_log2 (TYPE_PRECISION (rtype))
2090 && type_has_mode_precision_p (TREE_TYPE (tem)))
2091 defcodefor_name (tem, &code, &tem, NULL);
2093 if (code == NEGATE_EXPR)
2095 if (tem == def_arg2[1 - i] || tem == def_arg2_alt[1 - i])
2097 rotcnt = tem;
2098 check_range = 1;
2099 if (tem == def_arg2[1 - i])
2100 check_range_stmt = def_arg_stmt[1 - i];
2101 else
2102 check_range_stmt = def_arg_alt_stmt[1 - i];
2103 break;
2105 tree tem2;
2106 defcodefor_name (tem, &code, &tem2, NULL);
2107 if (CONVERT_EXPR_CODE_P (code)
2108 && INTEGRAL_TYPE_P (TREE_TYPE (tem2))
2109 && TYPE_PRECISION (TREE_TYPE (tem2))
2110 > floor_log2 (TYPE_PRECISION (rtype))
2111 && type_has_mode_precision_p (TREE_TYPE (tem2)))
2113 if (tem2 == def_arg2[1 - i]
2114 || tem2 == def_arg2_alt[1 - i])
2116 rotcnt = tem2;
2117 check_range = 1;
2118 if (tem2 == def_arg2[1 - i])
2119 check_range_stmt = def_arg_stmt[1 - i];
2120 else
2121 check_range_stmt = def_arg_alt_stmt[1 - i];
2122 break;
2125 else
2126 tem2 = NULL_TREE;
2128 if (cdef_code[1 - i] == BIT_AND_EXPR
2129 && tree_fits_shwi_p (cdef_arg2[1 - i])
2130 && tree_to_shwi (cdef_arg2[1 - i])
2131 == TYPE_PRECISION (rtype) - 1
2132 && TREE_CODE (cdef_arg1[1 - i]) == SSA_NAME)
2134 if (tem == cdef_arg1[1 - i]
2135 || tem2 == cdef_arg1[1 - i])
2137 rotcnt = def_arg2[1 - i];
2138 break;
2140 tree tem3;
2141 defcodefor_name (cdef_arg1[1 - i], &code, &tem3, NULL);
2142 if (CONVERT_EXPR_CODE_P (code)
2143 && INTEGRAL_TYPE_P (TREE_TYPE (tem3))
2144 && TYPE_PRECISION (TREE_TYPE (tem3))
2145 > floor_log2 (TYPE_PRECISION (rtype))
2146 && type_has_mode_precision_p (TREE_TYPE (tem3)))
2148 if (tem == tem3 || tem2 == tem3)
2150 rotcnt = def_arg2[1 - i];
2151 break;
2157 if (check_range && wider_prec > TYPE_PRECISION (rtype))
2159 if (TREE_CODE (rotcnt) != SSA_NAME)
2160 return false;
2161 int_range_max r;
2162 range_query *q = get_range_query (cfun);
2163 if (q == get_global_range_query ())
2164 q = enable_ranger (cfun);
2165 if (!q->range_of_expr (r, rotcnt, check_range_stmt))
2167 if (check_range > 0)
2168 return false;
2169 r.set_varying (TREE_TYPE (rotcnt));
2171 int prec = TYPE_PRECISION (TREE_TYPE (rotcnt));
2172 signop sign = TYPE_SIGN (TREE_TYPE (rotcnt));
2173 wide_int min = wide_int::from (TYPE_PRECISION (rtype), prec, sign);
2174 wide_int max = wide_int::from (wider_prec - 1, prec, sign);
2175 if (check_range < 0)
2176 max = min;
2177 int_range<1> r2 (TREE_TYPE (rotcnt), min, max);
2178 r.intersect (r2);
2179 if (!r.undefined_p ())
2181 if (check_range > 0)
2183 int_range_max r3;
2184 for (int i = TYPE_PRECISION (rtype) + 1; i < wider_prec;
2185 i += TYPE_PRECISION (rtype))
2187 int j = i + TYPE_PRECISION (rtype) - 2;
2188 min = wide_int::from (i, prec, sign);
2189 max = wide_int::from (MIN (j, wider_prec - 1),
2190 prec, sign);
2191 int_range<1> r4 (TREE_TYPE (rotcnt), min, max);
2192 r3.union_ (r4);
2194 r.intersect (r3);
2195 if (!r.undefined_p ())
2196 return false;
2198 add_masking = true;
2201 if (rotcnt == NULL_TREE)
2202 return false;
2203 swapped_p = i != 1;
2206 if (!useless_type_conversion_p (TREE_TYPE (def_arg2[0]),
2207 TREE_TYPE (rotcnt)))
2209 g = gimple_build_assign (make_ssa_name (TREE_TYPE (def_arg2[0])),
2210 NOP_EXPR, rotcnt);
2211 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2212 rotcnt = gimple_assign_lhs (g);
2214 if (add_masking)
2216 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rotcnt)),
2217 BIT_AND_EXPR, rotcnt,
2218 build_int_cst (TREE_TYPE (rotcnt),
2219 TYPE_PRECISION (rtype) - 1));
2220 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2221 rotcnt = gimple_assign_lhs (g);
2223 lhs = gimple_assign_lhs (stmt);
2224 if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
2225 lhs = make_ssa_name (TREE_TYPE (def_arg1[0]));
2226 g = gimple_build_assign (lhs,
2227 ((def_code[0] == LSHIFT_EXPR) ^ swapped_p)
2228 ? LROTATE_EXPR : RROTATE_EXPR, def_arg1[0], rotcnt);
2229 if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
2231 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2232 g = gimple_build_assign (gimple_assign_lhs (stmt), NOP_EXPR, lhs);
2234 gsi_replace (gsi, g, false);
2235 return true;
2239 /* Check whether an array contains a valid ctz table. */
2240 static bool
2241 check_ctz_array (tree ctor, unsigned HOST_WIDE_INT mulc,
2242 HOST_WIDE_INT &zero_val, unsigned shift, unsigned bits)
2244 tree elt, idx;
2245 unsigned HOST_WIDE_INT i, mask;
2246 unsigned matched = 0;
2248 mask = ((HOST_WIDE_INT_1U << (bits - shift)) - 1) << shift;
2250 zero_val = 0;
2252 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), i, idx, elt)
2254 if (TREE_CODE (idx) != INTEGER_CST || TREE_CODE (elt) != INTEGER_CST)
2255 return false;
2256 if (i > bits * 2)
2257 return false;
2259 unsigned HOST_WIDE_INT index = tree_to_shwi (idx);
2260 HOST_WIDE_INT val = tree_to_shwi (elt);
2262 if (index == 0)
2264 zero_val = val;
2265 matched++;
2268 if (val >= 0 && val < bits && (((mulc << val) & mask) >> shift) == index)
2269 matched++;
2271 if (matched > bits)
2272 return true;
2275 return false;
2278 /* Check whether a string contains a valid ctz table. */
2279 static bool
2280 check_ctz_string (tree string, unsigned HOST_WIDE_INT mulc,
2281 HOST_WIDE_INT &zero_val, unsigned shift, unsigned bits)
2283 unsigned HOST_WIDE_INT len = TREE_STRING_LENGTH (string);
2284 unsigned HOST_WIDE_INT mask;
2285 unsigned matched = 0;
2286 const unsigned char *p = (const unsigned char *) TREE_STRING_POINTER (string);
2288 if (len < bits || len > bits * 2)
2289 return false;
2291 mask = ((HOST_WIDE_INT_1U << (bits - shift)) - 1) << shift;
2293 zero_val = p[0];
2295 for (unsigned i = 0; i < len; i++)
2296 if (p[i] < bits && (((mulc << p[i]) & mask) >> shift) == i)
2297 matched++;
2299 return matched == bits;
2302 /* Recognize count trailing zeroes idiom.
2303 The canonical form is array[((x & -x) * C) >> SHIFT] where C is a magic
2304 constant which when multiplied by a power of 2 creates a unique value
2305 in the top 5 or 6 bits. This is then indexed into a table which maps it
2306 to the number of trailing zeroes. Array[0] is returned so the caller can
2307 emit an appropriate sequence depending on whether ctz (0) is defined on
2308 the target. */
2309 static bool
2310 optimize_count_trailing_zeroes (tree array_ref, tree x, tree mulc,
2311 tree tshift, HOST_WIDE_INT &zero_val)
2313 tree type = TREE_TYPE (array_ref);
2314 tree array = TREE_OPERAND (array_ref, 0);
2316 gcc_assert (TREE_CODE (mulc) == INTEGER_CST);
2317 gcc_assert (TREE_CODE (tshift) == INTEGER_CST);
2319 tree input_type = TREE_TYPE (x);
2320 unsigned input_bits = tree_to_shwi (TYPE_SIZE (input_type));
2322 /* Check the array element type is not wider than 32 bits and the input is
2323 an unsigned 32-bit or 64-bit type. */
2324 if (TYPE_PRECISION (type) > 32 || !TYPE_UNSIGNED (input_type))
2325 return false;
2326 if (input_bits != 32 && input_bits != 64)
2327 return false;
2329 if (!direct_internal_fn_supported_p (IFN_CTZ, input_type, OPTIMIZE_FOR_BOTH))
2330 return false;
2332 /* Check the lower bound of the array is zero. */
2333 tree low = array_ref_low_bound (array_ref);
2334 if (!low || !integer_zerop (low))
2335 return false;
2337 unsigned shiftval = tree_to_shwi (tshift);
2339 /* Check the shift extracts the top 5..7 bits. */
2340 if (shiftval < input_bits - 7 || shiftval > input_bits - 5)
2341 return false;
2343 tree ctor = ctor_for_folding (array);
2344 if (!ctor)
2345 return false;
2347 unsigned HOST_WIDE_INT val = tree_to_uhwi (mulc);
2349 if (TREE_CODE (ctor) == CONSTRUCTOR)
2350 return check_ctz_array (ctor, val, zero_val, shiftval, input_bits);
2352 if (TREE_CODE (ctor) == STRING_CST
2353 && TYPE_PRECISION (type) == CHAR_TYPE_SIZE)
2354 return check_ctz_string (ctor, val, zero_val, shiftval, input_bits);
2356 return false;
2359 /* Match.pd function to match the ctz expression. */
2360 extern bool gimple_ctz_table_index (tree, tree *, tree (*)(tree));
2362 static bool
2363 simplify_count_trailing_zeroes (gimple_stmt_iterator *gsi)
2365 gimple *stmt = gsi_stmt (*gsi);
2366 tree array_ref = gimple_assign_rhs1 (stmt);
2367 tree res_ops[3];
2368 HOST_WIDE_INT zero_val;
2370 gcc_checking_assert (TREE_CODE (array_ref) == ARRAY_REF);
2372 if (!gimple_ctz_table_index (TREE_OPERAND (array_ref, 1), &res_ops[0], NULL))
2373 return false;
2375 if (optimize_count_trailing_zeroes (array_ref, res_ops[0],
2376 res_ops[1], res_ops[2], zero_val))
2378 tree type = TREE_TYPE (res_ops[0]);
2379 HOST_WIDE_INT ctz_val = 0;
2380 HOST_WIDE_INT type_size = tree_to_shwi (TYPE_SIZE (type));
2381 bool zero_ok
2382 = CTZ_DEFINED_VALUE_AT_ZERO (SCALAR_INT_TYPE_MODE (type), ctz_val) == 2;
2384 /* If the input value can't be zero, don't special case ctz (0). */
2385 if (tree_expr_nonzero_p (res_ops[0]))
2387 zero_ok = true;
2388 zero_val = 0;
2389 ctz_val = 0;
2392 /* Skip if there is no value defined at zero, or if we can't easily
2393 return the correct value for zero. */
2394 if (!zero_ok)
2395 return false;
2396 if (zero_val != ctz_val && !(zero_val == 0 && ctz_val == type_size))
2397 return false;
2399 gimple_seq seq = NULL;
2400 gimple *g;
2401 gcall *call = gimple_build_call_internal (IFN_CTZ, 1, res_ops[0]);
2402 gimple_set_location (call, gimple_location (stmt));
2403 gimple_set_lhs (call, make_ssa_name (integer_type_node));
2404 gimple_seq_add_stmt (&seq, call);
2406 tree prev_lhs = gimple_call_lhs (call);
2408 /* Emit ctz (x) & 31 if ctz (0) is 32 but we need to return 0. */
2409 if (zero_val == 0 && ctz_val == type_size)
2411 g = gimple_build_assign (make_ssa_name (integer_type_node),
2412 BIT_AND_EXPR, prev_lhs,
2413 build_int_cst (integer_type_node,
2414 type_size - 1));
2415 gimple_set_location (g, gimple_location (stmt));
2416 gimple_seq_add_stmt (&seq, g);
2417 prev_lhs = gimple_assign_lhs (g);
2420 g = gimple_build_assign (gimple_assign_lhs (stmt), NOP_EXPR, prev_lhs);
2421 gimple_seq_add_stmt (&seq, g);
2422 gsi_replace_with_seq (gsi, seq, true);
2423 return true;
2426 return false;
2430 /* Combine an element access with a shuffle. Returns true if there were
2431 any changes made, else it returns false. */
2433 static bool
2434 simplify_bitfield_ref (gimple_stmt_iterator *gsi)
2436 gimple *stmt = gsi_stmt (*gsi);
2437 gimple *def_stmt;
2438 tree op, op0, op1;
2439 tree elem_type, type;
2440 tree p, m, tem;
2441 unsigned HOST_WIDE_INT nelts, idx;
2442 poly_uint64 size, elem_size;
2443 enum tree_code code;
2445 op = gimple_assign_rhs1 (stmt);
2446 gcc_checking_assert (TREE_CODE (op) == BIT_FIELD_REF);
2448 op0 = TREE_OPERAND (op, 0);
2449 if (TREE_CODE (op0) != SSA_NAME
2450 || TREE_CODE (TREE_TYPE (op0)) != VECTOR_TYPE)
2451 return false;
2453 def_stmt = get_prop_source_stmt (op0, false, NULL);
2454 if (!def_stmt || !can_propagate_from (def_stmt))
2455 return false;
2457 op1 = TREE_OPERAND (op, 1);
2458 code = gimple_assign_rhs_code (def_stmt);
2459 elem_type = TREE_TYPE (TREE_TYPE (op0));
2460 type = TREE_TYPE (op);
2461 /* Also handle vector type.
2462 .i.e.
2463 _7 = VEC_PERM_EXPR <_1, _1, { 2, 3, 2, 3 }>;
2464 _11 = BIT_FIELD_REF <_7, 64, 0>;
2468 _11 = BIT_FIELD_REF <_1, 64, 64>. */
2470 size = tree_to_poly_uint64 (TYPE_SIZE (type));
2471 if (maybe_ne (bit_field_size (op), size))
2472 return false;
2474 elem_size = tree_to_poly_uint64 (TYPE_SIZE (elem_type));
2475 if (code != VEC_PERM_EXPR
2476 || !constant_multiple_p (bit_field_offset (op), elem_size, &idx))
2477 return false;
2479 m = gimple_assign_rhs3 (def_stmt);
2480 if (TREE_CODE (m) != VECTOR_CST
2481 || !VECTOR_CST_NELTS (m).is_constant (&nelts))
2482 return false;
2484 /* One element. */
2485 if (known_eq (size, elem_size))
2486 idx = TREE_INT_CST_LOW (VECTOR_CST_ELT (m, idx)) % (2 * nelts);
2487 else
2489 unsigned HOST_WIDE_INT nelts_op;
2490 if (!constant_multiple_p (size, elem_size, &nelts_op)
2491 || !pow2p_hwi (nelts_op))
2492 return false;
2493 /* Clamp vec_perm_expr index. */
2494 unsigned start = TREE_INT_CST_LOW (vector_cst_elt (m, idx)) % (2 * nelts);
2495 unsigned end = TREE_INT_CST_LOW (vector_cst_elt (m, idx + nelts_op - 1))
2496 % (2 * nelts);
2497 /* Be in the same vector. */
2498 if ((start < nelts) != (end < nelts))
2499 return false;
2500 for (unsigned HOST_WIDE_INT i = 1; i != nelts_op; i++)
2502 /* Continuous area. */
2503 if (TREE_INT_CST_LOW (vector_cst_elt (m, idx + i)) % (2 * nelts) - 1
2504 != TREE_INT_CST_LOW (vector_cst_elt (m, idx + i - 1))
2505 % (2 * nelts))
2506 return false;
2508 /* Alignment not worse than before. */
2509 if (start % nelts_op)
2510 return false;
2511 idx = start;
2514 if (idx < nelts)
2515 p = gimple_assign_rhs1 (def_stmt);
2516 else
2518 p = gimple_assign_rhs2 (def_stmt);
2519 idx -= nelts;
2522 tem = build3 (BIT_FIELD_REF, TREE_TYPE (op),
2523 p, op1, bitsize_int (idx * elem_size));
2524 gimple_assign_set_rhs1 (stmt, tem);
2525 fold_stmt (gsi);
2526 update_stmt (gsi_stmt (*gsi));
2527 return true;
2530 /* Determine whether applying the 2 permutations (mask1 then mask2)
2531 gives back one of the input. */
2533 static int
2534 is_combined_permutation_identity (tree mask1, tree mask2)
2536 tree mask;
2537 unsigned HOST_WIDE_INT nelts, i, j;
2538 bool maybe_identity1 = true;
2539 bool maybe_identity2 = true;
2541 gcc_checking_assert (TREE_CODE (mask1) == VECTOR_CST
2542 && TREE_CODE (mask2) == VECTOR_CST);
2544 /* For VLA masks, check for the following pattern:
2545 v1 = VEC_PERM_EXPR (v0, ..., mask1)
2546 v2 = VEC_PERM_EXPR (v1, ..., mask2)
2548 v2 = v0
2549 if mask1 == mask2 == {nelts - 1, nelts - 2, ...}. */
2551 if (operand_equal_p (mask1, mask2, 0)
2552 && !VECTOR_CST_NELTS (mask1).is_constant ())
2554 vec_perm_builder builder;
2555 if (tree_to_vec_perm_builder (&builder, mask1))
2557 poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (mask1));
2558 vec_perm_indices sel (builder, 1, nelts);
2559 if (sel.series_p (0, 1, nelts - 1, -1))
2560 return 1;
2564 mask = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (mask1), mask1, mask1, mask2);
2565 if (mask == NULL_TREE || TREE_CODE (mask) != VECTOR_CST)
2566 return 0;
2568 if (!VECTOR_CST_NELTS (mask).is_constant (&nelts))
2569 return 0;
2570 for (i = 0; i < nelts; i++)
2572 tree val = VECTOR_CST_ELT (mask, i);
2573 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2574 j = TREE_INT_CST_LOW (val) & (2 * nelts - 1);
2575 if (j == i)
2576 maybe_identity2 = false;
2577 else if (j == i + nelts)
2578 maybe_identity1 = false;
2579 else
2580 return 0;
2582 return maybe_identity1 ? 1 : maybe_identity2 ? 2 : 0;
2585 /* Combine a shuffle with its arguments. Returns 1 if there were any
2586 changes made, 2 if cfg-cleanup needs to run. Else it returns 0. */
2588 static int
2589 simplify_permutation (gimple_stmt_iterator *gsi)
2591 gimple *stmt = gsi_stmt (*gsi);
2592 gimple *def_stmt = NULL;
2593 tree op0, op1, op2, op3, arg0, arg1;
2594 enum tree_code code, code2 = ERROR_MARK;
2595 bool single_use_op0 = false;
2597 gcc_checking_assert (gimple_assign_rhs_code (stmt) == VEC_PERM_EXPR);
2599 op0 = gimple_assign_rhs1 (stmt);
2600 op1 = gimple_assign_rhs2 (stmt);
2601 op2 = gimple_assign_rhs3 (stmt);
2603 if (TREE_CODE (op2) != VECTOR_CST)
2604 return 0;
2606 if (TREE_CODE (op0) == VECTOR_CST)
2608 code = VECTOR_CST;
2609 arg0 = op0;
2611 else if (TREE_CODE (op0) == SSA_NAME)
2613 def_stmt = get_prop_source_stmt (op0, false, &single_use_op0);
2614 if (!def_stmt)
2615 return 0;
2616 code = gimple_assign_rhs_code (def_stmt);
2617 if (code == VIEW_CONVERT_EXPR)
2619 tree rhs = gimple_assign_rhs1 (def_stmt);
2620 tree name = TREE_OPERAND (rhs, 0);
2621 if (TREE_CODE (name) != SSA_NAME)
2622 return 0;
2623 if (!has_single_use (name))
2624 single_use_op0 = false;
2625 /* Here we update the def_stmt through this VIEW_CONVERT_EXPR,
2626 but still keep the code to indicate it comes from
2627 VIEW_CONVERT_EXPR. */
2628 def_stmt = SSA_NAME_DEF_STMT (name);
2629 if (!def_stmt || !is_gimple_assign (def_stmt))
2630 return 0;
2631 if (gimple_assign_rhs_code (def_stmt) != CONSTRUCTOR)
2632 return 0;
2634 if (!can_propagate_from (def_stmt))
2635 return 0;
2636 arg0 = gimple_assign_rhs1 (def_stmt);
2638 else
2639 return 0;
2641 /* Two consecutive shuffles. */
2642 if (code == VEC_PERM_EXPR)
2644 tree orig;
2645 int ident;
2647 if (op0 != op1)
2648 return 0;
2649 op3 = gimple_assign_rhs3 (def_stmt);
2650 if (TREE_CODE (op3) != VECTOR_CST)
2651 return 0;
2652 ident = is_combined_permutation_identity (op3, op2);
2653 if (!ident)
2654 return 0;
2655 orig = (ident == 1) ? gimple_assign_rhs1 (def_stmt)
2656 : gimple_assign_rhs2 (def_stmt);
2657 gimple_assign_set_rhs1 (stmt, unshare_expr (orig));
2658 gimple_assign_set_rhs_code (stmt, TREE_CODE (orig));
2659 gimple_set_num_ops (stmt, 2);
2660 update_stmt (stmt);
2661 return remove_prop_source_from_use (op0) ? 2 : 1;
2663 else if (code == CONSTRUCTOR
2664 || code == VECTOR_CST
2665 || code == VIEW_CONVERT_EXPR)
2667 if (op0 != op1)
2669 if (TREE_CODE (op0) == SSA_NAME && !single_use_op0)
2670 return 0;
2672 if (TREE_CODE (op1) == VECTOR_CST)
2673 arg1 = op1;
2674 else if (TREE_CODE (op1) == SSA_NAME)
2676 gimple *def_stmt2 = get_prop_source_stmt (op1, true, NULL);
2677 if (!def_stmt2)
2678 return 0;
2679 code2 = gimple_assign_rhs_code (def_stmt2);
2680 if (code2 == VIEW_CONVERT_EXPR)
2682 tree rhs = gimple_assign_rhs1 (def_stmt2);
2683 tree name = TREE_OPERAND (rhs, 0);
2684 if (TREE_CODE (name) != SSA_NAME)
2685 return 0;
2686 if (!has_single_use (name))
2687 return 0;
2688 def_stmt2 = SSA_NAME_DEF_STMT (name);
2689 if (!def_stmt2 || !is_gimple_assign (def_stmt2))
2690 return 0;
2691 if (gimple_assign_rhs_code (def_stmt2) != CONSTRUCTOR)
2692 return 0;
2694 else if (code2 != CONSTRUCTOR && code2 != VECTOR_CST)
2695 return 0;
2696 if (!can_propagate_from (def_stmt2))
2697 return 0;
2698 arg1 = gimple_assign_rhs1 (def_stmt2);
2700 else
2701 return 0;
2703 else
2705 /* Already used twice in this statement. */
2706 if (TREE_CODE (op0) == SSA_NAME && num_imm_uses (op0) > 2)
2707 return 0;
2708 arg1 = arg0;
2711 /* If there are any VIEW_CONVERT_EXPRs found when finding permutation
2712 operands source, check whether it's valid to transform and prepare
2713 the required new operands. */
2714 if (code == VIEW_CONVERT_EXPR || code2 == VIEW_CONVERT_EXPR)
2716 /* Figure out the target vector type to which operands should be
2717 converted. If both are CONSTRUCTOR, the types should be the
2718 same, otherwise, use the one of CONSTRUCTOR. */
2719 tree tgt_type = NULL_TREE;
2720 if (code == VIEW_CONVERT_EXPR)
2722 gcc_assert (gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR);
2723 code = CONSTRUCTOR;
2724 tgt_type = TREE_TYPE (arg0);
2726 if (code2 == VIEW_CONVERT_EXPR)
2728 tree arg1_type = TREE_TYPE (arg1);
2729 if (tgt_type == NULL_TREE)
2730 tgt_type = arg1_type;
2731 else if (tgt_type != arg1_type)
2732 return 0;
2735 if (!VECTOR_TYPE_P (tgt_type))
2736 return 0;
2737 tree op2_type = TREE_TYPE (op2);
2739 /* Figure out the shrunk factor. */
2740 poly_uint64 tgt_units = TYPE_VECTOR_SUBPARTS (tgt_type);
2741 poly_uint64 op2_units = TYPE_VECTOR_SUBPARTS (op2_type);
2742 if (maybe_gt (tgt_units, op2_units))
2743 return 0;
2744 unsigned int factor;
2745 if (!constant_multiple_p (op2_units, tgt_units, &factor))
2746 return 0;
2748 /* Build the new permutation control vector as target vector. */
2749 vec_perm_builder builder;
2750 if (!tree_to_vec_perm_builder (&builder, op2))
2751 return 0;
2752 vec_perm_indices indices (builder, 2, op2_units);
2753 vec_perm_indices new_indices;
2754 if (new_indices.new_shrunk_vector (indices, factor))
2756 tree mask_type = tgt_type;
2757 if (!VECTOR_INTEGER_TYPE_P (mask_type))
2759 tree elem_type = TREE_TYPE (mask_type);
2760 unsigned elem_size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
2761 tree int_type = build_nonstandard_integer_type (elem_size, 0);
2762 mask_type = build_vector_type (int_type, tgt_units);
2764 op2 = vec_perm_indices_to_tree (mask_type, new_indices);
2766 else
2767 return 0;
2769 /* Convert the VECTOR_CST to the appropriate vector type. */
2770 if (tgt_type != TREE_TYPE (arg0))
2771 arg0 = fold_build1 (VIEW_CONVERT_EXPR, tgt_type, arg0);
2772 else if (tgt_type != TREE_TYPE (arg1))
2773 arg1 = fold_build1 (VIEW_CONVERT_EXPR, tgt_type, arg1);
2776 /* VIEW_CONVERT_EXPR should be updated to CONSTRUCTOR before. */
2777 gcc_assert (code == CONSTRUCTOR || code == VECTOR_CST);
2779 /* Shuffle of a constructor. */
2780 bool ret = false;
2781 tree res_type
2782 = build_vector_type (TREE_TYPE (TREE_TYPE (arg0)),
2783 TYPE_VECTOR_SUBPARTS (TREE_TYPE (op2)));
2784 tree opt = fold_ternary (VEC_PERM_EXPR, res_type, arg0, arg1, op2);
2785 if (!opt
2786 || (TREE_CODE (opt) != CONSTRUCTOR && TREE_CODE (opt) != VECTOR_CST))
2787 return 0;
2788 /* Found VIEW_CONVERT_EXPR before, need one explicit conversion. */
2789 if (res_type != TREE_TYPE (op0))
2791 tree name = make_ssa_name (TREE_TYPE (opt));
2792 gimple *ass_stmt = gimple_build_assign (name, opt);
2793 gsi_insert_before (gsi, ass_stmt, GSI_SAME_STMT);
2794 opt = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op0), name);
2796 gimple_assign_set_rhs_from_tree (gsi, opt);
2797 update_stmt (gsi_stmt (*gsi));
2798 if (TREE_CODE (op0) == SSA_NAME)
2799 ret = remove_prop_source_from_use (op0);
2800 if (op0 != op1 && TREE_CODE (op1) == SSA_NAME)
2801 ret |= remove_prop_source_from_use (op1);
2802 return ret ? 2 : 1;
2805 return 0;
2808 /* Get the BIT_FIELD_REF definition of VAL, if any, looking through
2809 conversions with code CONV_CODE or update it if still ERROR_MARK.
2810 Return NULL_TREE if no such matching def was found. */
2812 static tree
2813 get_bit_field_ref_def (tree val, enum tree_code &conv_code)
2815 if (TREE_CODE (val) != SSA_NAME)
2816 return NULL_TREE ;
2817 gimple *def_stmt = get_prop_source_stmt (val, false, NULL);
2818 if (!def_stmt)
2819 return NULL_TREE;
2820 enum tree_code code = gimple_assign_rhs_code (def_stmt);
2821 if (code == FLOAT_EXPR
2822 || code == FIX_TRUNC_EXPR
2823 || CONVERT_EXPR_CODE_P (code))
2825 tree op1 = gimple_assign_rhs1 (def_stmt);
2826 if (conv_code == ERROR_MARK)
2827 conv_code = code;
2828 else if (conv_code != code)
2829 return NULL_TREE;
2830 if (TREE_CODE (op1) != SSA_NAME)
2831 return NULL_TREE;
2832 def_stmt = SSA_NAME_DEF_STMT (op1);
2833 if (! is_gimple_assign (def_stmt))
2834 return NULL_TREE;
2835 code = gimple_assign_rhs_code (def_stmt);
2837 if (code != BIT_FIELD_REF)
2838 return NULL_TREE;
2839 return gimple_assign_rhs1 (def_stmt);
2842 /* Recognize a VEC_PERM_EXPR. Returns true if there were any changes. */
2844 static bool
2845 simplify_vector_constructor (gimple_stmt_iterator *gsi)
2847 gimple *stmt = gsi_stmt (*gsi);
2848 tree op, orig[2], type, elem_type;
2849 unsigned elem_size, i;
2850 unsigned HOST_WIDE_INT nelts;
2851 unsigned HOST_WIDE_INT refnelts;
2852 enum tree_code conv_code;
2853 constructor_elt *elt;
2855 op = gimple_assign_rhs1 (stmt);
2856 type = TREE_TYPE (op);
2857 gcc_checking_assert (TREE_CODE (op) == CONSTRUCTOR
2858 && TREE_CODE (type) == VECTOR_TYPE);
2860 if (!TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
2861 return false;
2862 elem_type = TREE_TYPE (type);
2863 elem_size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
2865 orig[0] = NULL;
2866 orig[1] = NULL;
2867 conv_code = ERROR_MARK;
2868 bool maybe_ident = true;
2869 bool maybe_blend[2] = { true, true };
2870 tree one_constant = NULL_TREE;
2871 tree one_nonconstant = NULL_TREE;
2872 auto_vec<tree> constants;
2873 constants.safe_grow_cleared (nelts, true);
2874 auto_vec<std::pair<unsigned, unsigned>, 64> elts;
2875 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (op), i, elt)
2877 tree ref, op1;
2878 unsigned int elem;
2880 if (i >= nelts)
2881 return false;
2883 /* Look for elements extracted and possibly converted from
2884 another vector. */
2885 op1 = get_bit_field_ref_def (elt->value, conv_code);
2886 if (op1
2887 && TREE_CODE ((ref = TREE_OPERAND (op1, 0))) == SSA_NAME
2888 && VECTOR_TYPE_P (TREE_TYPE (ref))
2889 && useless_type_conversion_p (TREE_TYPE (op1),
2890 TREE_TYPE (TREE_TYPE (ref)))
2891 && constant_multiple_p (bit_field_offset (op1),
2892 bit_field_size (op1), &elem)
2893 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (ref)).is_constant (&refnelts))
2895 unsigned int j;
2896 for (j = 0; j < 2; ++j)
2898 if (!orig[j])
2900 if (j == 0
2901 || useless_type_conversion_p (TREE_TYPE (orig[0]),
2902 TREE_TYPE (ref)))
2903 break;
2905 else if (ref == orig[j])
2906 break;
2908 /* Found a suitable vector element. */
2909 if (j < 2)
2911 orig[j] = ref;
2912 if (elem != i || j != 0)
2913 maybe_ident = false;
2914 if (elem != i)
2915 maybe_blend[j] = false;
2916 elts.safe_push (std::make_pair (j, elem));
2917 continue;
2919 /* Else fallthru. */
2921 /* Handle elements not extracted from a vector.
2922 1. constants by permuting with constant vector
2923 2. a unique non-constant element by permuting with a splat vector */
2924 if (orig[1]
2925 && orig[1] != error_mark_node)
2926 return false;
2927 orig[1] = error_mark_node;
2928 if (CONSTANT_CLASS_P (elt->value))
2930 if (one_nonconstant)
2931 return false;
2932 if (!one_constant)
2933 one_constant = elt->value;
2934 constants[i] = elt->value;
2936 else
2938 if (one_constant)
2939 return false;
2940 if (!one_nonconstant)
2941 one_nonconstant = elt->value;
2942 else if (!operand_equal_p (one_nonconstant, elt->value, 0))
2943 return false;
2945 elts.safe_push (std::make_pair (1, i));
2946 maybe_ident = false;
2948 if (i < nelts)
2949 return false;
2951 if (! orig[0]
2952 || ! VECTOR_TYPE_P (TREE_TYPE (orig[0])))
2953 return false;
2954 refnelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (orig[0])).to_constant ();
2955 /* We currently do not handle larger destination vectors. */
2956 if (refnelts < nelts)
2957 return false;
2959 if (maybe_ident)
2961 tree conv_src_type
2962 = (nelts != refnelts
2963 ? (conv_code != ERROR_MARK
2964 ? build_vector_type (TREE_TYPE (TREE_TYPE (orig[0])), nelts)
2965 : type)
2966 : TREE_TYPE (orig[0]));
2967 if (conv_code != ERROR_MARK
2968 && !supportable_convert_operation (conv_code, type, conv_src_type,
2969 &conv_code))
2971 /* Only few targets implement direct conversion patterns so try
2972 some simple special cases via VEC_[UN]PACK[_FLOAT]_LO_EXPR. */
2973 optab optab;
2974 tree halfvectype, dblvectype;
2975 enum tree_code unpack_op;
2977 if (!BYTES_BIG_ENDIAN)
2978 unpack_op = (FLOAT_TYPE_P (TREE_TYPE (type))
2979 ? VEC_UNPACK_FLOAT_LO_EXPR
2980 : VEC_UNPACK_LO_EXPR);
2981 else
2982 unpack_op = (FLOAT_TYPE_P (TREE_TYPE (type))
2983 ? VEC_UNPACK_FLOAT_HI_EXPR
2984 : VEC_UNPACK_HI_EXPR);
2986 /* Conversions between DFP and FP have no special tree code
2987 but we cannot handle those since all relevant vector conversion
2988 optabs only have a single mode. */
2989 if (CONVERT_EXPR_CODE_P (conv_code)
2990 && FLOAT_TYPE_P (TREE_TYPE (type))
2991 && (DECIMAL_FLOAT_TYPE_P (TREE_TYPE (type))
2992 != DECIMAL_FLOAT_TYPE_P (TREE_TYPE (conv_src_type))))
2993 return false;
2995 if (CONVERT_EXPR_CODE_P (conv_code)
2996 && (2 * TYPE_PRECISION (TREE_TYPE (TREE_TYPE (orig[0])))
2997 == TYPE_PRECISION (TREE_TYPE (type)))
2998 && mode_for_vector (as_a <scalar_mode>
2999 (TYPE_MODE (TREE_TYPE (TREE_TYPE (orig[0])))),
3000 nelts * 2).exists ()
3001 && (dblvectype
3002 = build_vector_type (TREE_TYPE (TREE_TYPE (orig[0])),
3003 nelts * 2))
3004 /* Only use it for vector modes or for vector booleans
3005 represented as scalar bitmasks. See PR95528. */
3006 && (VECTOR_MODE_P (TYPE_MODE (dblvectype))
3007 || VECTOR_BOOLEAN_TYPE_P (dblvectype))
3008 && (optab = optab_for_tree_code (unpack_op,
3009 dblvectype,
3010 optab_default))
3011 && (optab_handler (optab, TYPE_MODE (dblvectype))
3012 != CODE_FOR_nothing))
3014 gimple_seq stmts = NULL;
3015 tree dbl;
3016 if (refnelts == nelts)
3018 /* ??? Paradoxical subregs don't exist, so insert into
3019 the lower half of a wider zero vector. */
3020 dbl = gimple_build (&stmts, BIT_INSERT_EXPR, dblvectype,
3021 build_zero_cst (dblvectype), orig[0],
3022 bitsize_zero_node);
3024 else if (refnelts == 2 * nelts)
3025 dbl = orig[0];
3026 else
3027 dbl = gimple_build (&stmts, BIT_FIELD_REF, dblvectype,
3028 orig[0], TYPE_SIZE (dblvectype),
3029 bitsize_zero_node);
3030 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3031 gimple_assign_set_rhs_with_ops (gsi, unpack_op, dbl);
3033 else if (CONVERT_EXPR_CODE_P (conv_code)
3034 && (TYPE_PRECISION (TREE_TYPE (TREE_TYPE (orig[0])))
3035 == 2 * TYPE_PRECISION (TREE_TYPE (type)))
3036 && mode_for_vector (as_a <scalar_mode>
3037 (TYPE_MODE
3038 (TREE_TYPE (TREE_TYPE (orig[0])))),
3039 nelts / 2).exists ()
3040 && (halfvectype
3041 = build_vector_type (TREE_TYPE (TREE_TYPE (orig[0])),
3042 nelts / 2))
3043 /* Only use it for vector modes or for vector booleans
3044 represented as scalar bitmasks. See PR95528. */
3045 && (VECTOR_MODE_P (TYPE_MODE (halfvectype))
3046 || VECTOR_BOOLEAN_TYPE_P (halfvectype))
3047 && (optab = optab_for_tree_code (VEC_PACK_TRUNC_EXPR,
3048 halfvectype,
3049 optab_default))
3050 && (optab_handler (optab, TYPE_MODE (halfvectype))
3051 != CODE_FOR_nothing))
3053 gimple_seq stmts = NULL;
3054 tree low = gimple_build (&stmts, BIT_FIELD_REF, halfvectype,
3055 orig[0], TYPE_SIZE (halfvectype),
3056 bitsize_zero_node);
3057 tree hig = gimple_build (&stmts, BIT_FIELD_REF, halfvectype,
3058 orig[0], TYPE_SIZE (halfvectype),
3059 TYPE_SIZE (halfvectype));
3060 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3061 gimple_assign_set_rhs_with_ops (gsi, VEC_PACK_TRUNC_EXPR,
3062 low, hig);
3064 else
3065 return false;
3066 update_stmt (gsi_stmt (*gsi));
3067 return true;
3069 if (nelts != refnelts)
3071 gassign *lowpart
3072 = gimple_build_assign (make_ssa_name (conv_src_type),
3073 build3 (BIT_FIELD_REF, conv_src_type,
3074 orig[0], TYPE_SIZE (conv_src_type),
3075 bitsize_zero_node));
3076 gsi_insert_before (gsi, lowpart, GSI_SAME_STMT);
3077 orig[0] = gimple_assign_lhs (lowpart);
3079 if (conv_code == ERROR_MARK)
3081 tree src_type = TREE_TYPE (orig[0]);
3082 if (!useless_type_conversion_p (type, src_type))
3084 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type),
3085 TYPE_VECTOR_SUBPARTS (src_type))
3086 && useless_type_conversion_p (TREE_TYPE (type),
3087 TREE_TYPE (src_type)));
3088 tree rhs = build1 (VIEW_CONVERT_EXPR, type, orig[0]);
3089 orig[0] = make_ssa_name (type);
3090 gassign *assign = gimple_build_assign (orig[0], rhs);
3091 gsi_insert_before (gsi, assign, GSI_SAME_STMT);
3093 gimple_assign_set_rhs_from_tree (gsi, orig[0]);
3095 else
3096 gimple_assign_set_rhs_with_ops (gsi, conv_code, orig[0],
3097 NULL_TREE, NULL_TREE);
3099 else
3101 /* If we combine a vector with a non-vector avoid cases where
3102 we'll obviously end up with more GIMPLE stmts which is when
3103 we'll later not fold this to a single insert into the vector
3104 and we had a single extract originally. See PR92819. */
3105 if (nelts == 2
3106 && refnelts > 2
3107 && orig[1] == error_mark_node
3108 && !maybe_blend[0])
3109 return false;
3110 tree mask_type, perm_type, conv_src_type;
3111 perm_type = TREE_TYPE (orig[0]);
3112 conv_src_type = (nelts == refnelts
3113 ? perm_type
3114 : build_vector_type (TREE_TYPE (perm_type), nelts));
3115 if (conv_code != ERROR_MARK
3116 && !supportable_convert_operation (conv_code, type, conv_src_type,
3117 &conv_code))
3118 return false;
3120 /* Now that we know the number of elements of the source build the
3121 permute vector.
3122 ??? When the second vector has constant values we can shuffle
3123 it and its source indexes to make the permutation supported.
3124 For now it mimics a blend. */
3125 vec_perm_builder sel (refnelts, refnelts, 1);
3126 bool all_same_p = true;
3127 for (i = 0; i < elts.length (); ++i)
3129 sel.quick_push (elts[i].second + elts[i].first * refnelts);
3130 all_same_p &= known_eq (sel[i], sel[0]);
3132 /* And fill the tail with "something". It's really don't care,
3133 and ideally we'd allow VEC_PERM to have a smaller destination
3134 vector. As a heuristic:
3136 (a) if what we have so far duplicates a single element, make the
3137 tail do the same
3139 (b) otherwise preserve a uniform orig[0]. This facilitates
3140 later pattern-matching of VEC_PERM_EXPR to a BIT_INSERT_EXPR. */
3141 for (; i < refnelts; ++i)
3142 sel.quick_push (all_same_p
3143 ? sel[0]
3144 : (elts[0].second == 0 && elts[0].first == 0
3145 ? 0 : refnelts) + i);
3146 vec_perm_indices indices (sel, orig[1] ? 2 : 1, refnelts);
3147 machine_mode vmode = TYPE_MODE (perm_type);
3148 if (!can_vec_perm_const_p (vmode, vmode, indices))
3149 return false;
3150 mask_type
3151 = build_vector_type (build_nonstandard_integer_type (elem_size, 1),
3152 refnelts);
3153 if (GET_MODE_CLASS (TYPE_MODE (mask_type)) != MODE_VECTOR_INT
3154 || maybe_ne (GET_MODE_SIZE (TYPE_MODE (mask_type)),
3155 GET_MODE_SIZE (TYPE_MODE (perm_type))))
3156 return false;
3157 tree op2 = vec_perm_indices_to_tree (mask_type, indices);
3158 bool converted_orig1 = false;
3159 gimple_seq stmts = NULL;
3160 if (!orig[1])
3161 orig[1] = orig[0];
3162 else if (orig[1] == error_mark_node
3163 && one_nonconstant)
3165 /* ??? We can see if we can safely convert to the original
3166 element type. */
3167 converted_orig1 = conv_code != ERROR_MARK;
3168 orig[1] = gimple_build_vector_from_val (&stmts, UNKNOWN_LOCATION,
3169 converted_orig1
3170 ? type : perm_type,
3171 one_nonconstant);
3173 else if (orig[1] == error_mark_node)
3175 /* ??? See if we can convert the vector to the original type. */
3176 converted_orig1 = conv_code != ERROR_MARK;
3177 unsigned n = converted_orig1 ? nelts : refnelts;
3178 tree_vector_builder vec (converted_orig1
3179 ? type : perm_type, n, 1);
3180 for (unsigned i = 0; i < n; ++i)
3181 if (i < nelts && constants[i])
3182 vec.quick_push (constants[i]);
3183 else
3184 /* ??? Push a don't-care value. */
3185 vec.quick_push (one_constant);
3186 orig[1] = vec.build ();
3188 tree blend_op2 = NULL_TREE;
3189 if (converted_orig1)
3191 /* Make sure we can do a blend in the target type. */
3192 vec_perm_builder sel (nelts, nelts, 1);
3193 for (i = 0; i < elts.length (); ++i)
3194 sel.quick_push (elts[i].first
3195 ? elts[i].second + nelts : i);
3196 vec_perm_indices indices (sel, 2, nelts);
3197 machine_mode vmode = TYPE_MODE (type);
3198 if (!can_vec_perm_const_p (vmode, vmode, indices))
3199 return false;
3200 mask_type
3201 = build_vector_type (build_nonstandard_integer_type (elem_size, 1),
3202 nelts);
3203 if (GET_MODE_CLASS (TYPE_MODE (mask_type)) != MODE_VECTOR_INT
3204 || maybe_ne (GET_MODE_SIZE (TYPE_MODE (mask_type)),
3205 GET_MODE_SIZE (TYPE_MODE (type))))
3206 return false;
3207 blend_op2 = vec_perm_indices_to_tree (mask_type, indices);
3209 tree orig1_for_perm
3210 = converted_orig1 ? build_zero_cst (perm_type) : orig[1];
3211 tree res = gimple_build (&stmts, VEC_PERM_EXPR, perm_type,
3212 orig[0], orig1_for_perm, op2);
3213 if (nelts != refnelts)
3214 res = gimple_build (&stmts, BIT_FIELD_REF,
3215 conv_code != ERROR_MARK ? conv_src_type : type,
3216 res, TYPE_SIZE (type), bitsize_zero_node);
3217 if (conv_code != ERROR_MARK)
3218 res = gimple_build (&stmts, conv_code, type, res);
3219 else if (!useless_type_conversion_p (type, TREE_TYPE (res)))
3221 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type),
3222 TYPE_VECTOR_SUBPARTS (perm_type))
3223 && useless_type_conversion_p (TREE_TYPE (type),
3224 TREE_TYPE (perm_type)));
3225 res = gimple_build (&stmts, VIEW_CONVERT_EXPR, type, res);
3227 /* Blend in the actual constant. */
3228 if (converted_orig1)
3229 res = gimple_build (&stmts, VEC_PERM_EXPR, type,
3230 res, orig[1], blend_op2);
3231 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3232 gimple_assign_set_rhs_with_ops (gsi, SSA_NAME, res);
3234 update_stmt (gsi_stmt (*gsi));
3235 return true;
3238 /* Prepare a TARGET_MEM_REF ref so that it can be subsetted as
3239 lvalue. This splits out an address computation stmt before *GSI
3240 and returns a MEM_REF wrapping the address. */
3242 static tree
3243 prepare_target_mem_ref_lvalue (tree ref, gimple_stmt_iterator *gsi)
3245 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
3246 mark_addressable (TREE_OPERAND (TREE_OPERAND (ref, 0), 0));
3247 tree ptrtype = build_pointer_type (TREE_TYPE (ref));
3248 tree tem = make_ssa_name (ptrtype);
3249 gimple *new_stmt
3250 = gimple_build_assign (tem, build1 (ADDR_EXPR, TREE_TYPE (tem),
3251 unshare_expr (ref)));
3252 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3253 ref = build2_loc (EXPR_LOCATION (ref),
3254 MEM_REF, TREE_TYPE (ref), tem,
3255 build_int_cst (TREE_TYPE (TREE_OPERAND (ref, 1)), 0));
3256 return ref;
3259 /* Rewrite the vector load at *GSI to component-wise loads if the load
3260 is only used in BIT_FIELD_REF extractions with eventual intermediate
3261 widening. */
3263 static void
3264 optimize_vector_load (gimple_stmt_iterator *gsi)
3266 gimple *stmt = gsi_stmt (*gsi);
3267 tree lhs = gimple_assign_lhs (stmt);
3268 tree rhs = gimple_assign_rhs1 (stmt);
3270 /* Gather BIT_FIELD_REFs to rewrite, looking through
3271 VEC_UNPACK_{LO,HI}_EXPR. */
3272 use_operand_p use_p;
3273 imm_use_iterator iter;
3274 bool rewrite = true;
3275 auto_vec<gimple *, 8> bf_stmts;
3276 auto_vec<tree, 8> worklist;
3277 worklist.quick_push (lhs);
3280 tree def = worklist.pop ();
3281 unsigned HOST_WIDE_INT def_eltsize
3282 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (def))));
3283 FOR_EACH_IMM_USE_FAST (use_p, iter, def)
3285 gimple *use_stmt = USE_STMT (use_p);
3286 if (is_gimple_debug (use_stmt))
3287 continue;
3288 if (!is_gimple_assign (use_stmt))
3290 rewrite = false;
3291 break;
3293 enum tree_code use_code = gimple_assign_rhs_code (use_stmt);
3294 tree use_rhs = gimple_assign_rhs1 (use_stmt);
3295 if (use_code == BIT_FIELD_REF
3296 && TREE_OPERAND (use_rhs, 0) == def
3297 /* If its on the VEC_UNPACK_{HI,LO}_EXPR
3298 def need to verify it is element aligned. */
3299 && (def == lhs
3300 || (known_eq (bit_field_size (use_rhs), def_eltsize)
3301 && constant_multiple_p (bit_field_offset (use_rhs),
3302 def_eltsize)
3303 /* We can simulate the VEC_UNPACK_{HI,LO}_EXPR
3304 via a NOP_EXPR only for integral types.
3305 ??? Support VEC_UNPACK_FLOAT_{HI,LO}_EXPR. */
3306 && INTEGRAL_TYPE_P (TREE_TYPE (use_rhs)))))
3308 bf_stmts.safe_push (use_stmt);
3309 continue;
3311 /* Walk through one level of VEC_UNPACK_{LO,HI}_EXPR. */
3312 if (def == lhs
3313 && (use_code == VEC_UNPACK_HI_EXPR
3314 || use_code == VEC_UNPACK_LO_EXPR)
3315 && use_rhs == lhs)
3317 worklist.safe_push (gimple_assign_lhs (use_stmt));
3318 continue;
3320 rewrite = false;
3321 break;
3323 if (!rewrite)
3324 break;
3326 while (!worklist.is_empty ());
3328 if (!rewrite)
3330 gsi_next (gsi);
3331 return;
3333 /* We now have all ultimate uses of the load to rewrite in bf_stmts. */
3335 /* Prepare the original ref to be wrapped in adjusted BIT_FIELD_REFs.
3336 For TARGET_MEM_REFs we have to separate the LEA from the reference. */
3337 tree load_rhs = rhs;
3338 if (TREE_CODE (load_rhs) == TARGET_MEM_REF)
3339 load_rhs = prepare_target_mem_ref_lvalue (load_rhs, gsi);
3341 /* Rewrite the BIT_FIELD_REFs to be actual loads, re-emitting them at
3342 the place of the original load. */
3343 for (gimple *use_stmt : bf_stmts)
3345 tree bfr = gimple_assign_rhs1 (use_stmt);
3346 tree new_rhs = unshare_expr (load_rhs);
3347 if (TREE_OPERAND (bfr, 0) != lhs)
3349 /* When the BIT_FIELD_REF is on the promoted vector we have to
3350 adjust it and emit a conversion afterwards. */
3351 gimple *def_stmt
3352 = SSA_NAME_DEF_STMT (TREE_OPERAND (bfr, 0));
3353 enum tree_code def_code
3354 = gimple_assign_rhs_code (def_stmt);
3356 /* The adjusted BIT_FIELD_REF is of the promotion source
3357 vector size and at half of the offset... */
3358 new_rhs = fold_build3 (BIT_FIELD_REF,
3359 TREE_TYPE (TREE_TYPE (lhs)),
3360 new_rhs,
3361 TYPE_SIZE (TREE_TYPE (TREE_TYPE (lhs))),
3362 size_binop (EXACT_DIV_EXPR,
3363 TREE_OPERAND (bfr, 2),
3364 bitsize_int (2)));
3365 /* ... and offsetted by half of the vector if VEC_UNPACK_HI_EXPR. */
3366 if (def_code == (!BYTES_BIG_ENDIAN
3367 ? VEC_UNPACK_HI_EXPR : VEC_UNPACK_LO_EXPR))
3368 TREE_OPERAND (new_rhs, 2)
3369 = size_binop (PLUS_EXPR, TREE_OPERAND (new_rhs, 2),
3370 size_binop (EXACT_DIV_EXPR,
3371 TYPE_SIZE (TREE_TYPE (lhs)),
3372 bitsize_int (2)));
3373 tree tem = make_ssa_name (TREE_TYPE (TREE_TYPE (lhs)));
3374 gimple *new_stmt = gimple_build_assign (tem, new_rhs);
3375 location_t loc = gimple_location (use_stmt);
3376 gimple_set_location (new_stmt, loc);
3377 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3378 /* Perform scalar promotion. */
3379 new_stmt = gimple_build_assign (gimple_assign_lhs (use_stmt),
3380 NOP_EXPR, tem);
3381 gimple_set_location (new_stmt, loc);
3382 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3384 else
3386 /* When the BIT_FIELD_REF is on the original load result
3387 we can just wrap that. */
3388 tree new_rhs = fold_build3 (BIT_FIELD_REF, TREE_TYPE (bfr),
3389 unshare_expr (load_rhs),
3390 TREE_OPERAND (bfr, 1),
3391 TREE_OPERAND (bfr, 2));
3392 gimple *new_stmt = gimple_build_assign (gimple_assign_lhs (use_stmt),
3393 new_rhs);
3394 location_t loc = gimple_location (use_stmt);
3395 gimple_set_location (new_stmt, loc);
3396 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3398 gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
3399 unlink_stmt_vdef (use_stmt);
3400 gsi_remove (&gsi2, true);
3403 /* Finally get rid of the intermediate stmts. */
3404 gimple *use_stmt;
3405 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
3407 if (is_gimple_debug (use_stmt))
3409 if (gimple_debug_bind_p (use_stmt))
3411 gimple_debug_bind_reset_value (use_stmt);
3412 update_stmt (use_stmt);
3414 continue;
3416 gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
3417 unlink_stmt_vdef (use_stmt);
3418 release_defs (use_stmt);
3419 gsi_remove (&gsi2, true);
3421 /* And the original load. */
3422 release_defs (stmt);
3423 gsi_remove (gsi, true);
3427 /* Primitive "lattice" function for gimple_simplify. */
3429 static tree
3430 fwprop_ssa_val (tree name)
3432 /* First valueize NAME. */
3433 if (TREE_CODE (name) == SSA_NAME
3434 && SSA_NAME_VERSION (name) < lattice.length ())
3436 tree val = lattice[SSA_NAME_VERSION (name)];
3437 if (val)
3438 name = val;
3440 /* We continue matching along SSA use-def edges for SSA names
3441 that are not single-use. Currently there are no patterns
3442 that would cause any issues with that. */
3443 return name;
3446 /* Main entry point for the forward propagation and statement combine
3447 optimizer. */
3449 namespace {
3451 const pass_data pass_data_forwprop =
3453 GIMPLE_PASS, /* type */
3454 "forwprop", /* name */
3455 OPTGROUP_NONE, /* optinfo_flags */
3456 TV_TREE_FORWPROP, /* tv_id */
3457 ( PROP_cfg | PROP_ssa ), /* properties_required */
3458 0, /* properties_provided */
3459 0, /* properties_destroyed */
3460 0, /* todo_flags_start */
3461 TODO_update_ssa, /* todo_flags_finish */
3464 class pass_forwprop : public gimple_opt_pass
3466 public:
3467 pass_forwprop (gcc::context *ctxt)
3468 : gimple_opt_pass (pass_data_forwprop, ctxt)
3471 /* opt_pass methods: */
3472 opt_pass * clone () final override { return new pass_forwprop (m_ctxt); }
3473 bool gate (function *) final override { return flag_tree_forwprop; }
3474 unsigned int execute (function *) final override;
3476 }; // class pass_forwprop
3478 unsigned int
3479 pass_forwprop::execute (function *fun)
3481 unsigned int todoflags = 0;
3483 cfg_changed = false;
3485 /* Combine stmts with the stmts defining their operands. Do that
3486 in an order that guarantees visiting SSA defs before SSA uses. */
3487 lattice.create (num_ssa_names);
3488 lattice.quick_grow_cleared (num_ssa_names);
3489 int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (fun));
3490 int postorder_num = pre_and_rev_post_order_compute_fn (fun, NULL,
3491 postorder, false);
3492 int *bb_to_rpo = XNEWVEC (int, last_basic_block_for_fn (fun));
3493 for (int i = 0; i < postorder_num; ++i)
3495 bb_to_rpo[postorder[i]] = i;
3496 edge_iterator ei;
3497 edge e;
3498 FOR_EACH_EDGE (e, ei, BASIC_BLOCK_FOR_FN (fun, postorder[i])->succs)
3499 e->flags &= ~EDGE_EXECUTABLE;
3501 single_succ_edge (BASIC_BLOCK_FOR_FN (fun, ENTRY_BLOCK))->flags
3502 |= EDGE_EXECUTABLE;
3503 auto_vec<gimple *, 4> to_fixup;
3504 auto_vec<gimple *, 32> to_remove;
3505 to_purge = BITMAP_ALLOC (NULL);
3506 bitmap need_ab_cleanup = BITMAP_ALLOC (NULL);
3507 for (int i = 0; i < postorder_num; ++i)
3509 gimple_stmt_iterator gsi;
3510 basic_block bb = BASIC_BLOCK_FOR_FN (fun, postorder[i]);
3511 edge_iterator ei;
3512 edge e;
3514 /* Skip processing not executable blocks. We could improve
3515 single_use tracking by at least unlinking uses from unreachable
3516 blocks but since blocks with uses are not processed in a
3517 meaningful order this is probably not worth it. */
3518 bool any = false;
3519 FOR_EACH_EDGE (e, ei, bb->preds)
3521 if ((e->flags & EDGE_EXECUTABLE)
3522 /* With dominators we could improve backedge handling
3523 when e->src is dominated by bb. But for irreducible
3524 regions we have to take all backedges conservatively.
3525 We can handle single-block cycles as we know the
3526 dominator relationship here. */
3527 || bb_to_rpo[e->src->index] > i)
3529 any = true;
3530 break;
3533 if (!any)
3534 continue;
3536 /* Record degenerate PHIs in the lattice. */
3537 for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);
3538 gsi_next (&si))
3540 gphi *phi = si.phi ();
3541 tree res = gimple_phi_result (phi);
3542 if (virtual_operand_p (res))
3543 continue;
3545 tree first = NULL_TREE;
3546 bool all_same = true;
3547 edge_iterator ei;
3548 edge e;
3549 FOR_EACH_EDGE (e, ei, bb->preds)
3551 /* Ignore not executable forward edges. */
3552 if (!(e->flags & EDGE_EXECUTABLE))
3554 if (bb_to_rpo[e->src->index] < i)
3555 continue;
3556 /* Avoid equivalences from backedges - while we might
3557 be able to make irreducible regions reducible and
3558 thus turning a back into a forward edge we do not
3559 want to deal with the intermediate SSA issues that
3560 exposes. */
3561 all_same = false;
3563 tree use = PHI_ARG_DEF_FROM_EDGE (phi, e);
3564 if (use == res)
3565 /* The PHI result can also appear on a backedge, if so
3566 we can ignore this case for the purpose of determining
3567 the singular value. */
3569 else if (! first)
3570 first = use;
3571 else if (! operand_equal_p (first, use, 0))
3573 all_same = false;
3574 break;
3577 if (all_same)
3579 if (may_propagate_copy (res, first))
3580 to_remove.safe_push (phi);
3581 fwprop_set_lattice_val (res, first);
3585 /* Apply forward propagation to all stmts in the basic-block.
3586 Note we update GSI within the loop as necessary. */
3587 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
3589 gimple *stmt = gsi_stmt (gsi);
3590 tree lhs, rhs;
3591 enum tree_code code;
3593 if (!is_gimple_assign (stmt))
3595 gsi_next (&gsi);
3596 continue;
3599 lhs = gimple_assign_lhs (stmt);
3600 rhs = gimple_assign_rhs1 (stmt);
3601 code = gimple_assign_rhs_code (stmt);
3602 if (TREE_CODE (lhs) != SSA_NAME
3603 || has_zero_uses (lhs))
3605 gsi_next (&gsi);
3606 continue;
3609 /* If this statement sets an SSA_NAME to an address,
3610 try to propagate the address into the uses of the SSA_NAME. */
3611 if ((code == ADDR_EXPR
3612 /* Handle pointer conversions on invariant addresses
3613 as well, as this is valid gimple. */
3614 || (CONVERT_EXPR_CODE_P (code)
3615 && TREE_CODE (rhs) == ADDR_EXPR
3616 && POINTER_TYPE_P (TREE_TYPE (lhs))))
3617 && TREE_CODE (TREE_OPERAND (rhs, 0)) != TARGET_MEM_REF)
3619 tree base = get_base_address (TREE_OPERAND (rhs, 0));
3620 if ((!base
3621 || !DECL_P (base)
3622 || decl_address_invariant_p (base))
3623 && !stmt_references_abnormal_ssa_name (stmt)
3624 && forward_propagate_addr_expr (lhs, rhs, true))
3626 fwprop_invalidate_lattice (gimple_get_lhs (stmt));
3627 release_defs (stmt);
3628 gsi_remove (&gsi, true);
3630 else
3631 gsi_next (&gsi);
3633 else if (code == POINTER_PLUS_EXPR)
3635 tree off = gimple_assign_rhs2 (stmt);
3636 if (TREE_CODE (off) == INTEGER_CST
3637 && can_propagate_from (stmt)
3638 && !simple_iv_increment_p (stmt)
3639 /* ??? Better adjust the interface to that function
3640 instead of building new trees here. */
3641 && forward_propagate_addr_expr
3642 (lhs,
3643 build1_loc (gimple_location (stmt),
3644 ADDR_EXPR, TREE_TYPE (rhs),
3645 fold_build2 (MEM_REF,
3646 TREE_TYPE (TREE_TYPE (rhs)),
3647 rhs,
3648 fold_convert (ptr_type_node,
3649 off))), true))
3651 fwprop_invalidate_lattice (gimple_get_lhs (stmt));
3652 release_defs (stmt);
3653 gsi_remove (&gsi, true);
3655 else if (is_gimple_min_invariant (rhs))
3657 /* Make sure to fold &a[0] + off_1 here. */
3658 fold_stmt_inplace (&gsi);
3659 update_stmt (stmt);
3660 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
3661 gsi_next (&gsi);
3663 else
3664 gsi_next (&gsi);
3666 else if (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE
3667 && gimple_assign_load_p (stmt)
3668 && !gimple_has_volatile_ops (stmt)
3669 && (TREE_CODE (gimple_assign_rhs1 (stmt))
3670 != TARGET_MEM_REF)
3671 && !stmt_can_throw_internal (fun, stmt))
3673 /* Rewrite loads used only in real/imagpart extractions to
3674 component-wise loads. */
3675 use_operand_p use_p;
3676 imm_use_iterator iter;
3677 bool rewrite = true;
3678 FOR_EACH_IMM_USE_FAST (use_p, iter, lhs)
3680 gimple *use_stmt = USE_STMT (use_p);
3681 if (is_gimple_debug (use_stmt))
3682 continue;
3683 if (!is_gimple_assign (use_stmt)
3684 || (gimple_assign_rhs_code (use_stmt) != REALPART_EXPR
3685 && gimple_assign_rhs_code (use_stmt) != IMAGPART_EXPR)
3686 || TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) != lhs)
3688 rewrite = false;
3689 break;
3692 if (rewrite)
3694 gimple *use_stmt;
3695 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
3697 if (is_gimple_debug (use_stmt))
3699 if (gimple_debug_bind_p (use_stmt))
3701 gimple_debug_bind_reset_value (use_stmt);
3702 update_stmt (use_stmt);
3704 continue;
3707 tree new_rhs = build1 (gimple_assign_rhs_code (use_stmt),
3708 TREE_TYPE (TREE_TYPE (rhs)),
3709 unshare_expr (rhs));
3710 gimple *new_stmt
3711 = gimple_build_assign (gimple_assign_lhs (use_stmt),
3712 new_rhs);
3714 location_t loc = gimple_location (use_stmt);
3715 gimple_set_location (new_stmt, loc);
3716 gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
3717 unlink_stmt_vdef (use_stmt);
3718 gsi_remove (&gsi2, true);
3720 gsi_insert_before (&gsi, new_stmt, GSI_SAME_STMT);
3723 release_defs (stmt);
3724 gsi_remove (&gsi, true);
3726 else
3727 gsi_next (&gsi);
3729 else if (TREE_CODE (TREE_TYPE (lhs)) == VECTOR_TYPE
3730 && (TYPE_MODE (TREE_TYPE (lhs)) == BLKmode
3731 /* After vector lowering rewrite all loads, but
3732 initially do not since this conflicts with
3733 vector CONSTRUCTOR to shuffle optimization. */
3734 || (fun->curr_properties & PROP_gimple_lvec))
3735 && gimple_assign_load_p (stmt)
3736 && !gimple_has_volatile_ops (stmt)
3737 && !stmt_can_throw_internal (fun, stmt)
3738 && (!VAR_P (rhs) || !DECL_HARD_REGISTER (rhs)))
3739 optimize_vector_load (&gsi);
3741 else if (code == COMPLEX_EXPR)
3743 /* Rewrite stores of a single-use complex build expression
3744 to component-wise stores. */
3745 use_operand_p use_p;
3746 gimple *use_stmt, *def1, *def2;
3747 tree rhs2;
3748 if (single_imm_use (lhs, &use_p, &use_stmt)
3749 && gimple_store_p (use_stmt)
3750 && !gimple_has_volatile_ops (use_stmt)
3751 && is_gimple_assign (use_stmt)
3752 && (TREE_CODE (gimple_assign_lhs (use_stmt))
3753 != TARGET_MEM_REF))
3755 tree use_lhs = gimple_assign_lhs (use_stmt);
3756 if (auto_var_p (use_lhs))
3757 DECL_NOT_GIMPLE_REG_P (use_lhs) = 1;
3758 tree new_lhs = build1 (REALPART_EXPR,
3759 TREE_TYPE (TREE_TYPE (use_lhs)),
3760 unshare_expr (use_lhs));
3761 gimple *new_stmt = gimple_build_assign (new_lhs, rhs);
3762 location_t loc = gimple_location (use_stmt);
3763 gimple_set_location (new_stmt, loc);
3764 gimple_set_vuse (new_stmt, gimple_vuse (use_stmt));
3765 gimple_set_vdef (new_stmt, make_ssa_name (gimple_vop (fun)));
3766 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
3767 gimple_set_vuse (use_stmt, gimple_vdef (new_stmt));
3768 gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
3769 gsi_insert_before (&gsi2, new_stmt, GSI_SAME_STMT);
3771 new_lhs = build1 (IMAGPART_EXPR,
3772 TREE_TYPE (TREE_TYPE (use_lhs)),
3773 unshare_expr (use_lhs));
3774 gimple_assign_set_lhs (use_stmt, new_lhs);
3775 gimple_assign_set_rhs1 (use_stmt, gimple_assign_rhs2 (stmt));
3776 update_stmt (use_stmt);
3778 release_defs (stmt);
3779 gsi_remove (&gsi, true);
3781 /* Rewrite a component-wise load of a complex to a complex
3782 load if the components are not used separately. */
3783 else if (TREE_CODE (rhs) == SSA_NAME
3784 && has_single_use (rhs)
3785 && ((rhs2 = gimple_assign_rhs2 (stmt)), true)
3786 && TREE_CODE (rhs2) == SSA_NAME
3787 && has_single_use (rhs2)
3788 && (def1 = SSA_NAME_DEF_STMT (rhs),
3789 gimple_assign_load_p (def1))
3790 && (def2 = SSA_NAME_DEF_STMT (rhs2),
3791 gimple_assign_load_p (def2))
3792 && (gimple_vuse (def1) == gimple_vuse (def2))
3793 && !gimple_has_volatile_ops (def1)
3794 && !gimple_has_volatile_ops (def2)
3795 && !stmt_can_throw_internal (fun, def1)
3796 && !stmt_can_throw_internal (fun, def2)
3797 && gimple_assign_rhs_code (def1) == REALPART_EXPR
3798 && gimple_assign_rhs_code (def2) == IMAGPART_EXPR
3799 && operand_equal_p (TREE_OPERAND (gimple_assign_rhs1
3800 (def1), 0),
3801 TREE_OPERAND (gimple_assign_rhs1
3802 (def2), 0)))
3804 tree cl = TREE_OPERAND (gimple_assign_rhs1 (def1), 0);
3805 gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (cl));
3806 gcc_assert (gsi_stmt (gsi) == stmt);
3807 gimple_set_vuse (stmt, gimple_vuse (def1));
3808 gimple_set_modified (stmt, true);
3809 gimple_stmt_iterator gsi2 = gsi_for_stmt (def1);
3810 gsi_remove (&gsi, false);
3811 gsi_insert_after (&gsi2, stmt, GSI_SAME_STMT);
3813 else
3814 gsi_next (&gsi);
3816 else if (code == CONSTRUCTOR
3817 && VECTOR_TYPE_P (TREE_TYPE (rhs))
3818 && TYPE_MODE (TREE_TYPE (rhs)) == BLKmode
3819 && CONSTRUCTOR_NELTS (rhs) > 0
3820 && (!VECTOR_TYPE_P (TREE_TYPE (CONSTRUCTOR_ELT (rhs, 0)->value))
3821 || (TYPE_MODE (TREE_TYPE (CONSTRUCTOR_ELT (rhs, 0)->value))
3822 != BLKmode)))
3824 /* Rewrite stores of a single-use vector constructors
3825 to component-wise stores if the mode isn't supported. */
3826 use_operand_p use_p;
3827 gimple *use_stmt;
3828 if (single_imm_use (lhs, &use_p, &use_stmt)
3829 && gimple_store_p (use_stmt)
3830 && !gimple_has_volatile_ops (use_stmt)
3831 && !stmt_can_throw_internal (fun, use_stmt)
3832 && is_gimple_assign (use_stmt))
3834 tree elt_t = TREE_TYPE (CONSTRUCTOR_ELT (rhs, 0)->value);
3835 unsigned HOST_WIDE_INT elt_w
3836 = tree_to_uhwi (TYPE_SIZE (elt_t));
3837 unsigned HOST_WIDE_INT n
3838 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (rhs)));
3839 tree use_lhs = gimple_assign_lhs (use_stmt);
3840 if (auto_var_p (use_lhs))
3841 DECL_NOT_GIMPLE_REG_P (use_lhs) = 1;
3842 else if (TREE_CODE (use_lhs) == TARGET_MEM_REF)
3844 gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
3845 use_lhs = prepare_target_mem_ref_lvalue (use_lhs, &gsi2);
3847 for (unsigned HOST_WIDE_INT bi = 0; bi < n; bi += elt_w)
3849 unsigned HOST_WIDE_INT ci = bi / elt_w;
3850 tree new_rhs;
3851 if (ci < CONSTRUCTOR_NELTS (rhs))
3852 new_rhs = CONSTRUCTOR_ELT (rhs, ci)->value;
3853 else
3854 new_rhs = build_zero_cst (elt_t);
3855 tree new_lhs = build3 (BIT_FIELD_REF,
3856 elt_t,
3857 unshare_expr (use_lhs),
3858 bitsize_int (elt_w),
3859 bitsize_int (bi));
3860 gimple *new_stmt = gimple_build_assign (new_lhs, new_rhs);
3861 location_t loc = gimple_location (use_stmt);
3862 gimple_set_location (new_stmt, loc);
3863 gimple_set_vuse (new_stmt, gimple_vuse (use_stmt));
3864 gimple_set_vdef (new_stmt,
3865 make_ssa_name (gimple_vop (fun)));
3866 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
3867 gimple_set_vuse (use_stmt, gimple_vdef (new_stmt));
3868 gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
3869 gsi_insert_before (&gsi2, new_stmt, GSI_SAME_STMT);
3871 gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
3872 unlink_stmt_vdef (use_stmt);
3873 release_defs (use_stmt);
3874 gsi_remove (&gsi2, true);
3875 release_defs (stmt);
3876 gsi_remove (&gsi, true);
3878 else
3879 gsi_next (&gsi);
3881 else
3882 gsi_next (&gsi);
3885 /* Combine stmts with the stmts defining their operands.
3886 Note we update GSI within the loop as necessary. */
3887 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3889 gimple *stmt = gsi_stmt (gsi);
3891 /* Mark stmt as potentially needing revisiting. */
3892 gimple_set_plf (stmt, GF_PLF_1, false);
3894 bool can_make_abnormal_goto = (is_gimple_call (stmt)
3895 && stmt_can_make_abnormal_goto (stmt));
3897 /* Substitute from our lattice. We need to do so only once. */
3898 bool substituted_p = false;
3899 use_operand_p usep;
3900 ssa_op_iter iter;
3901 FOR_EACH_SSA_USE_OPERAND (usep, stmt, iter, SSA_OP_USE)
3903 tree use = USE_FROM_PTR (usep);
3904 tree val = fwprop_ssa_val (use);
3905 if (val && val != use && may_propagate_copy (use, val))
3907 propagate_value (usep, val);
3908 substituted_p = true;
3911 if (substituted_p
3912 && is_gimple_assign (stmt)
3913 && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
3914 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
3915 if (substituted_p
3916 && can_make_abnormal_goto
3917 && !stmt_can_make_abnormal_goto (stmt))
3918 bitmap_set_bit (need_ab_cleanup, bb->index);
3920 bool changed;
3923 gimple *orig_stmt = stmt = gsi_stmt (gsi);
3924 bool was_noreturn = (is_gimple_call (stmt)
3925 && gimple_call_noreturn_p (stmt));
3926 changed = false;
3928 if (fold_stmt (&gsi, fwprop_ssa_val))
3930 changed = true;
3931 stmt = gsi_stmt (gsi);
3932 /* Cleanup the CFG if we simplified a condition to
3933 true or false. */
3934 if (gcond *cond = dyn_cast <gcond *> (stmt))
3935 if (gimple_cond_true_p (cond)
3936 || gimple_cond_false_p (cond))
3937 cfg_changed = true;
3940 if (changed || substituted_p)
3942 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
3943 bitmap_set_bit (to_purge, bb->index);
3944 if (!was_noreturn
3945 && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
3946 to_fixup.safe_push (stmt);
3947 update_stmt (stmt);
3948 substituted_p = false;
3951 switch (gimple_code (stmt))
3953 case GIMPLE_ASSIGN:
3955 tree rhs1 = gimple_assign_rhs1 (stmt);
3956 enum tree_code code = gimple_assign_rhs_code (stmt);
3958 if (TREE_CODE_CLASS (code) == tcc_comparison)
3960 int did_something;
3961 did_something = forward_propagate_into_comparison (&gsi);
3962 if (maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (gsi)))
3963 bitmap_set_bit (to_purge, bb->index);
3964 if (did_something == 2)
3965 cfg_changed = true;
3966 changed = did_something != 0;
3968 else if ((code == PLUS_EXPR
3969 || code == BIT_IOR_EXPR
3970 || code == BIT_XOR_EXPR)
3971 && simplify_rotate (&gsi))
3972 changed = true;
3973 else if (code == VEC_PERM_EXPR)
3975 int did_something = simplify_permutation (&gsi);
3976 if (did_something == 2)
3977 cfg_changed = true;
3978 changed = did_something != 0;
3980 else if (code == BIT_FIELD_REF)
3981 changed = simplify_bitfield_ref (&gsi);
3982 else if (code == CONSTRUCTOR
3983 && TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
3984 changed = simplify_vector_constructor (&gsi);
3985 else if (code == ARRAY_REF)
3986 changed = simplify_count_trailing_zeroes (&gsi);
3987 break;
3990 case GIMPLE_SWITCH:
3991 changed = simplify_gimple_switch (as_a <gswitch *> (stmt));
3992 break;
3994 case GIMPLE_COND:
3996 int did_something = forward_propagate_into_gimple_cond
3997 (as_a <gcond *> (stmt));
3998 if (did_something == 2)
3999 cfg_changed = true;
4000 changed = did_something != 0;
4001 break;
4004 case GIMPLE_CALL:
4006 tree callee = gimple_call_fndecl (stmt);
4007 if (callee != NULL_TREE
4008 && fndecl_built_in_p (callee, BUILT_IN_NORMAL))
4009 changed = simplify_builtin_call (&gsi, callee);
4010 break;
4013 default:;
4016 if (changed)
4018 /* If the stmt changed then re-visit it and the statements
4019 inserted before it. */
4020 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
4021 if (gimple_plf (gsi_stmt (gsi), GF_PLF_1))
4022 break;
4023 if (gsi_end_p (gsi))
4024 gsi = gsi_start_bb (bb);
4025 else
4026 gsi_next (&gsi);
4029 while (changed);
4031 /* Stmt no longer needs to be revisited. */
4032 stmt = gsi_stmt (gsi);
4033 gcc_checking_assert (!gimple_plf (stmt, GF_PLF_1));
4034 gimple_set_plf (stmt, GF_PLF_1, true);
4036 /* Fill up the lattice. */
4037 if (gimple_assign_single_p (stmt))
4039 tree lhs = gimple_assign_lhs (stmt);
4040 tree rhs = gimple_assign_rhs1 (stmt);
4041 if (TREE_CODE (lhs) == SSA_NAME)
4043 tree val = lhs;
4044 if (TREE_CODE (rhs) == SSA_NAME)
4045 val = fwprop_ssa_val (rhs);
4046 else if (is_gimple_min_invariant (rhs))
4047 val = rhs;
4048 /* If we can propagate the lattice-value mark the
4049 stmt for removal. */
4050 if (val != lhs
4051 && may_propagate_copy (lhs, val))
4052 to_remove.safe_push (stmt);
4053 fwprop_set_lattice_val (lhs, val);
4056 else if (gimple_nop_p (stmt))
4057 to_remove.safe_push (stmt);
4060 /* Substitute in destination PHI arguments. */
4061 FOR_EACH_EDGE (e, ei, bb->succs)
4062 for (gphi_iterator gsi = gsi_start_phis (e->dest);
4063 !gsi_end_p (gsi); gsi_next (&gsi))
4065 gphi *phi = gsi.phi ();
4066 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
4067 tree arg = USE_FROM_PTR (use_p);
4068 if (TREE_CODE (arg) != SSA_NAME
4069 || virtual_operand_p (arg))
4070 continue;
4071 tree val = fwprop_ssa_val (arg);
4072 if (val != arg
4073 && may_propagate_copy (arg, val))
4074 propagate_value (use_p, val);
4077 /* Mark outgoing exectuable edges. */
4078 if (edge e = find_taken_edge (bb, NULL))
4080 e->flags |= EDGE_EXECUTABLE;
4081 if (EDGE_COUNT (bb->succs) > 1)
4082 cfg_changed = true;
4084 else
4086 FOR_EACH_EDGE (e, ei, bb->succs)
4087 e->flags |= EDGE_EXECUTABLE;
4090 free (postorder);
4091 free (bb_to_rpo);
4092 lattice.release ();
4094 /* Remove stmts in reverse order to make debug stmt creation possible. */
4095 while (!to_remove.is_empty())
4097 gimple *stmt = to_remove.pop ();
4098 /* For example remove_prop_source_from_use can remove stmts queued
4099 for removal. Deal with this gracefully. */
4100 if (!gimple_bb (stmt))
4101 continue;
4102 if (dump_file && (dump_flags & TDF_DETAILS))
4104 fprintf (dump_file, "Removing dead stmt ");
4105 print_gimple_stmt (dump_file, stmt, 0);
4106 fprintf (dump_file, "\n");
4108 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4109 if (gimple_code (stmt) == GIMPLE_PHI)
4110 remove_phi_node (&gsi, true);
4111 else
4113 unlink_stmt_vdef (stmt);
4114 gsi_remove (&gsi, true);
4115 release_defs (stmt);
4119 /* Fixup stmts that became noreturn calls. This may require splitting
4120 blocks and thus isn't possible during the walk. Do this
4121 in reverse order so we don't inadvertedly remove a stmt we want to
4122 fixup by visiting a dominating now noreturn call first. */
4123 while (!to_fixup.is_empty ())
4125 gimple *stmt = to_fixup.pop ();
4126 if (dump_file && dump_flags & TDF_DETAILS)
4128 fprintf (dump_file, "Fixing up noreturn call ");
4129 print_gimple_stmt (dump_file, stmt, 0);
4130 fprintf (dump_file, "\n");
4132 cfg_changed |= fixup_noreturn_call (stmt);
4135 cfg_changed |= gimple_purge_all_dead_eh_edges (to_purge);
4136 cfg_changed |= gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
4137 BITMAP_FREE (to_purge);
4138 BITMAP_FREE (need_ab_cleanup);
4140 if (get_range_query (fun) != get_global_range_query ())
4141 disable_ranger (fun);
4143 if (cfg_changed)
4144 todoflags |= TODO_cleanup_cfg;
4146 return todoflags;
4149 } // anon namespace
4151 gimple_opt_pass *
4152 make_pass_forwprop (gcc::context *ctxt)
4154 return new pass_forwprop (ctxt);