Objective-C, NeXT, v2: Correct a regression in code-gen.
[official-gcc.git] / gcc / tree-ssa-forwprop.cc
blob05d42ccd3c619fac42d4d1c58d6610451bf60bc9
1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004-2024 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "cfghooks.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "optabs-query.h"
32 #include "gimple-pretty-print.h"
33 #include "fold-const.h"
34 #include "stor-layout.h"
35 #include "gimple-iterator.h"
36 #include "gimple-fold.h"
37 #include "tree-eh.h"
38 #include "gimplify.h"
39 #include "gimplify-me.h"
40 #include "tree-cfg.h"
41 #include "expr.h"
42 #include "tree-dfa.h"
43 #include "tree-ssa-propagate.h"
44 #include "tree-ssa-dom.h"
45 #include "tree-ssa-strlen.h"
46 #include "builtins.h"
47 #include "tree-cfgcleanup.h"
48 #include "cfganal.h"
49 #include "optabs-tree.h"
50 #include "insn-config.h"
51 #include "recog.h"
52 #include "tree-vector-builder.h"
53 #include "vec-perm-indices.h"
54 #include "internal-fn.h"
55 #include "cgraph.h"
56 #include "tree-ssa.h"
57 #include "gimple-range.h"
58 #include "tree-ssa-dce.h"
60 /* This pass propagates the RHS of assignment statements into use
61 sites of the LHS of the assignment. It's basically a specialized
62 form of tree combination. It is hoped all of this can disappear
63 when we have a generalized tree combiner.
65 One class of common cases we handle is forward propagating a single use
66 variable into a COND_EXPR.
68 bb0:
69 x = a COND b;
70 if (x) goto ... else goto ...
72 Will be transformed into:
74 bb0:
75 if (a COND b) goto ... else goto ...
77 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
79 Or (assuming c1 and c2 are constants):
81 bb0:
82 x = a + c1;
83 if (x EQ/NEQ c2) goto ... else goto ...
85 Will be transformed into:
87 bb0:
88 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
90 Similarly for x = a - c1.
94 bb0:
95 x = !a
96 if (x) goto ... else goto ...
98 Will be transformed into:
100 bb0:
101 if (a == 0) goto ... else goto ...
103 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
104 For these cases, we propagate A into all, possibly more than one,
105 COND_EXPRs that use X.
109 bb0:
110 x = (typecast) a
111 if (x) goto ... else goto ...
113 Will be transformed into:
115 bb0:
116 if (a != 0) goto ... else goto ...
118 (Assuming a is an integral type and x is a boolean or x is an
119 integral and a is a boolean.)
121 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
122 For these cases, we propagate A into all, possibly more than one,
123 COND_EXPRs that use X.
125 In addition to eliminating the variable and the statement which assigns
126 a value to the variable, we may be able to later thread the jump without
127 adding insane complexity in the dominator optimizer.
129 Also note these transformations can cascade. We handle this by having
130 a worklist of COND_EXPR statements to examine. As we make a change to
131 a statement, we put it back on the worklist to examine on the next
132 iteration of the main loop.
134 A second class of propagation opportunities arises for ADDR_EXPR
135 nodes.
137 ptr = &x->y->z;
138 res = *ptr;
140 Will get turned into
142 res = x->y->z;
145 ptr = (type1*)&type2var;
146 res = *ptr
148 Will get turned into (if type1 and type2 are the same size
149 and neither have volatile on them):
150 res = VIEW_CONVERT_EXPR<type1>(type2var)
154 ptr = &x[0];
155 ptr2 = ptr + <constant>;
157 Will get turned into
159 ptr2 = &x[constant/elementsize];
163 ptr = &x[0];
164 offset = index * element_size;
165 offset_p = (pointer) offset;
166 ptr2 = ptr + offset_p
168 Will get turned into:
170 ptr2 = &x[index];
173 ssa = (int) decl
174 res = ssa & 1
176 Provided that decl has known alignment >= 2, will get turned into
178 res = 0
180 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
181 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
182 {NOT_EXPR,NEG_EXPR}.
184 This will (of course) be extended as other needs arise. */
186 static bool forward_propagate_addr_expr (tree, tree, bool);
188 /* Set to true if we delete dead edges during the optimization. */
189 static bool cfg_changed;
191 static tree rhs_to_tree (tree type, gimple *stmt);
193 static bitmap to_purge;
195 /* Const-and-copy lattice. */
196 static vec<tree> lattice;
198 /* Set the lattice entry for NAME to VAL. */
199 static void
200 fwprop_set_lattice_val (tree name, tree val)
202 if (TREE_CODE (name) == SSA_NAME)
204 if (SSA_NAME_VERSION (name) >= lattice.length ())
206 lattice.reserve (num_ssa_names - lattice.length ());
207 lattice.quick_grow_cleared (num_ssa_names);
209 lattice[SSA_NAME_VERSION (name)] = val;
213 /* Invalidate the lattice entry for NAME, done when releasing SSA names. */
214 static void
215 fwprop_invalidate_lattice (tree name)
217 if (name
218 && TREE_CODE (name) == SSA_NAME
219 && SSA_NAME_VERSION (name) < lattice.length ())
220 lattice[SSA_NAME_VERSION (name)] = NULL_TREE;
224 /* Get the statement we can propagate from into NAME skipping
225 trivial copies. Returns the statement which defines the
226 propagation source or NULL_TREE if there is no such one.
227 If SINGLE_USE_ONLY is set considers only sources which have
228 a single use chain up to NAME. If SINGLE_USE_P is non-null,
229 it is set to whether the chain to NAME is a single use chain
230 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
232 static gimple *
233 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
235 bool single_use = true;
237 do {
238 gimple *def_stmt = SSA_NAME_DEF_STMT (name);
240 if (!has_single_use (name))
242 single_use = false;
243 if (single_use_only)
244 return NULL;
247 /* If name is defined by a PHI node or is the default def, bail out. */
248 if (!is_gimple_assign (def_stmt))
249 return NULL;
251 /* If def_stmt is a simple copy, continue looking. */
252 if (gimple_assign_rhs_code (def_stmt) == SSA_NAME)
253 name = gimple_assign_rhs1 (def_stmt);
254 else
256 if (!single_use_only && single_use_p)
257 *single_use_p = single_use;
259 return def_stmt;
261 } while (1);
264 /* Checks if the destination ssa name in DEF_STMT can be used as
265 propagation source. Returns true if so, otherwise false. */
267 static bool
268 can_propagate_from (gimple *def_stmt)
270 gcc_assert (is_gimple_assign (def_stmt));
272 /* If the rhs has side-effects we cannot propagate from it. */
273 if (gimple_has_volatile_ops (def_stmt))
274 return false;
276 /* If the rhs is a load we cannot propagate from it. */
277 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
278 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
279 return false;
281 /* Constants can be always propagated. */
282 if (gimple_assign_single_p (def_stmt)
283 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
284 return true;
286 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
287 if (stmt_references_abnormal_ssa_name (def_stmt))
288 return false;
290 /* If the definition is a conversion of a pointer to a function type,
291 then we cannot apply optimizations as some targets require
292 function pointers to be canonicalized and in this case this
293 optimization could eliminate a necessary canonicalization. */
294 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
296 tree rhs = gimple_assign_rhs1 (def_stmt);
297 if (FUNCTION_POINTER_TYPE_P (TREE_TYPE (rhs)))
298 return false;
301 return true;
304 /* Remove a chain of dead statements starting at the definition of
305 NAME. The chain is linked via the first operand of the defining statements.
306 If NAME was replaced in its only use then this function can be used
307 to clean up dead stmts. The function handles already released SSA
308 names gracefully.
309 Returns true if cleanup-cfg has to run. */
311 static bool
312 remove_prop_source_from_use (tree name)
314 gimple_stmt_iterator gsi;
315 gimple *stmt;
316 bool cfg_changed = false;
318 do {
319 basic_block bb;
321 if (SSA_NAME_IN_FREE_LIST (name)
322 || SSA_NAME_IS_DEFAULT_DEF (name)
323 || !has_zero_uses (name))
324 return cfg_changed;
326 stmt = SSA_NAME_DEF_STMT (name);
327 if (gimple_code (stmt) == GIMPLE_PHI
328 || gimple_has_side_effects (stmt))
329 return cfg_changed;
331 bb = gimple_bb (stmt);
332 gsi = gsi_for_stmt (stmt);
333 unlink_stmt_vdef (stmt);
334 if (gsi_remove (&gsi, true))
335 bitmap_set_bit (to_purge, bb->index);
336 fwprop_invalidate_lattice (gimple_get_lhs (stmt));
337 release_defs (stmt);
339 name = is_gimple_assign (stmt) ? gimple_assign_rhs1 (stmt) : NULL_TREE;
340 } while (name && TREE_CODE (name) == SSA_NAME);
342 return cfg_changed;
345 /* Return the rhs of a gassign *STMT in a form of a single tree,
346 converted to type TYPE.
348 This should disappear, but is needed so we can combine expressions and use
349 the fold() interfaces. Long term, we need to develop folding and combine
350 routines that deal with gimple exclusively . */
352 static tree
353 rhs_to_tree (tree type, gimple *stmt)
355 location_t loc = gimple_location (stmt);
356 enum tree_code code = gimple_assign_rhs_code (stmt);
357 switch (get_gimple_rhs_class (code))
359 case GIMPLE_TERNARY_RHS:
360 return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt),
361 gimple_assign_rhs2 (stmt),
362 gimple_assign_rhs3 (stmt));
363 case GIMPLE_BINARY_RHS:
364 return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
365 gimple_assign_rhs2 (stmt));
366 case GIMPLE_UNARY_RHS:
367 return build1 (code, type, gimple_assign_rhs1 (stmt));
368 case GIMPLE_SINGLE_RHS:
369 return gimple_assign_rhs1 (stmt);
370 default:
371 gcc_unreachable ();
375 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
376 the folded result in a form suitable for COND_EXPR_COND or
377 NULL_TREE, if there is no suitable simplified form. If
378 INVARIANT_ONLY is true only gimple_min_invariant results are
379 considered simplified. */
381 static tree
382 combine_cond_expr_cond (gimple *stmt, enum tree_code code, tree type,
383 tree op0, tree op1, bool invariant_only)
385 tree t;
387 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
389 fold_defer_overflow_warnings ();
390 t = fold_binary_loc (gimple_location (stmt), code, type, op0, op1);
391 if (!t)
393 fold_undefer_overflow_warnings (false, NULL, 0);
394 return NULL_TREE;
397 /* Require that we got a boolean type out if we put one in. */
398 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
400 /* Canonicalize the combined condition for use in a COND_EXPR. */
401 t = canonicalize_cond_expr_cond (t);
403 /* Bail out if we required an invariant but didn't get one. */
404 if (!t || (invariant_only && !is_gimple_min_invariant (t)))
406 fold_undefer_overflow_warnings (false, NULL, 0);
407 return NULL_TREE;
410 bool nowarn = warning_suppressed_p (stmt, OPT_Wstrict_overflow);
411 fold_undefer_overflow_warnings (!nowarn, stmt, 0);
413 return t;
416 /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
417 of its operand. Return a new comparison tree or NULL_TREE if there
418 were no simplifying combines. */
420 static tree
421 forward_propagate_into_comparison_1 (gimple *stmt,
422 enum tree_code code, tree type,
423 tree op0, tree op1)
425 tree tmp = NULL_TREE;
426 tree rhs0 = NULL_TREE, rhs1 = NULL_TREE;
427 bool single_use0_p = false, single_use1_p = false;
429 /* For comparisons use the first operand, that is likely to
430 simplify comparisons against constants. */
431 if (TREE_CODE (op0) == SSA_NAME)
433 gimple *def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
434 if (def_stmt && can_propagate_from (def_stmt))
436 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
437 bool invariant_only_p = !single_use0_p;
439 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
441 /* Always combine comparisons or conversions from booleans. */
442 if (TREE_CODE (op1) == INTEGER_CST
443 && ((CONVERT_EXPR_CODE_P (def_code)
444 && TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs0, 0)))
445 == BOOLEAN_TYPE)
446 || TREE_CODE_CLASS (def_code) == tcc_comparison))
447 invariant_only_p = false;
449 tmp = combine_cond_expr_cond (stmt, code, type,
450 rhs0, op1, invariant_only_p);
451 if (tmp)
452 return tmp;
456 /* If that wasn't successful, try the second operand. */
457 if (TREE_CODE (op1) == SSA_NAME)
459 gimple *def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
460 if (def_stmt && can_propagate_from (def_stmt))
462 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
463 tmp = combine_cond_expr_cond (stmt, code, type,
464 op0, rhs1, !single_use1_p);
465 if (tmp)
466 return tmp;
470 /* If that wasn't successful either, try both operands. */
471 if (rhs0 != NULL_TREE
472 && rhs1 != NULL_TREE)
473 tmp = combine_cond_expr_cond (stmt, code, type,
474 rhs0, rhs1,
475 !(single_use0_p && single_use1_p));
477 return tmp;
480 /* Propagate from the ssa name definition statements of the assignment
481 from a comparison at *GSI into the conditional if that simplifies it.
482 Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
483 otherwise returns 0. */
485 static int
486 forward_propagate_into_comparison (gimple_stmt_iterator *gsi)
488 gimple *stmt = gsi_stmt (*gsi);
489 tree tmp;
490 bool cfg_changed = false;
491 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
492 tree rhs1 = gimple_assign_rhs1 (stmt);
493 tree rhs2 = gimple_assign_rhs2 (stmt);
495 /* Combine the comparison with defining statements. */
496 tmp = forward_propagate_into_comparison_1 (stmt,
497 gimple_assign_rhs_code (stmt),
498 type, rhs1, rhs2);
499 if (tmp && useless_type_conversion_p (type, TREE_TYPE (tmp)))
501 gimple_assign_set_rhs_from_tree (gsi, tmp);
502 fold_stmt (gsi);
503 update_stmt (gsi_stmt (*gsi));
505 if (TREE_CODE (rhs1) == SSA_NAME)
506 cfg_changed |= remove_prop_source_from_use (rhs1);
507 if (TREE_CODE (rhs2) == SSA_NAME)
508 cfg_changed |= remove_prop_source_from_use (rhs2);
509 return cfg_changed ? 2 : 1;
512 return 0;
515 /* Propagate from the ssa name definition statements of COND_EXPR
516 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
517 Returns zero if no statement was changed, one if there were
518 changes and two if cfg_cleanup needs to run. */
520 static int
521 forward_propagate_into_gimple_cond (gcond *stmt)
523 tree tmp;
524 enum tree_code code = gimple_cond_code (stmt);
525 bool cfg_changed = false;
526 tree rhs1 = gimple_cond_lhs (stmt);
527 tree rhs2 = gimple_cond_rhs (stmt);
529 /* We can do tree combining on SSA_NAME and comparison expressions. */
530 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
531 return 0;
533 tmp = forward_propagate_into_comparison_1 (stmt, code,
534 boolean_type_node,
535 rhs1, rhs2);
536 if (tmp
537 && is_gimple_condexpr_for_cond (tmp))
539 if (dump_file)
541 fprintf (dump_file, " Replaced '");
542 print_gimple_expr (dump_file, stmt, 0);
543 fprintf (dump_file, "' with '");
544 print_generic_expr (dump_file, tmp);
545 fprintf (dump_file, "'\n");
548 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
549 update_stmt (stmt);
551 if (TREE_CODE (rhs1) == SSA_NAME)
552 cfg_changed |= remove_prop_source_from_use (rhs1);
553 if (TREE_CODE (rhs2) == SSA_NAME)
554 cfg_changed |= remove_prop_source_from_use (rhs2);
555 return (cfg_changed || is_gimple_min_invariant (tmp)) ? 2 : 1;
558 /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges. */
559 if ((TREE_CODE (TREE_TYPE (rhs1)) == BOOLEAN_TYPE
560 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
561 && TYPE_PRECISION (TREE_TYPE (rhs1)) == 1))
562 && ((code == EQ_EXPR
563 && integer_zerop (rhs2))
564 || (code == NE_EXPR
565 && integer_onep (rhs2))))
567 basic_block bb = gimple_bb (stmt);
568 gimple_cond_set_code (stmt, NE_EXPR);
569 gimple_cond_set_rhs (stmt, build_zero_cst (TREE_TYPE (rhs1)));
570 EDGE_SUCC (bb, 0)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
571 EDGE_SUCC (bb, 1)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
572 return 1;
575 return 0;
578 /* We've just substituted an ADDR_EXPR into stmt. Update all the
579 relevant data structures to match. */
581 static void
582 tidy_after_forward_propagate_addr (gimple *stmt)
584 /* We may have turned a trapping insn into a non-trapping insn. */
585 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
586 bitmap_set_bit (to_purge, gimple_bb (stmt)->index);
588 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
589 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
592 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
593 ADDR_EXPR <whatever>.
595 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
596 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
597 node or for recovery of array indexing from pointer arithmetic.
599 Return true if the propagation was successful (the propagation can
600 be not totally successful, yet things may have been changed). */
602 static bool
603 forward_propagate_addr_expr_1 (tree name, tree def_rhs,
604 gimple_stmt_iterator *use_stmt_gsi,
605 bool single_use_p)
607 tree lhs, rhs, rhs2, array_ref;
608 gimple *use_stmt = gsi_stmt (*use_stmt_gsi);
609 enum tree_code rhs_code;
610 bool res = true;
612 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
614 lhs = gimple_assign_lhs (use_stmt);
615 rhs_code = gimple_assign_rhs_code (use_stmt);
616 rhs = gimple_assign_rhs1 (use_stmt);
618 /* Do not perform copy-propagation but recurse through copy chains. */
619 if (TREE_CODE (lhs) == SSA_NAME
620 && rhs_code == SSA_NAME)
621 return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
623 /* The use statement could be a conversion. Recurse to the uses of the
624 lhs as copyprop does not copy through pointer to integer to pointer
625 conversions and FRE does not catch all cases either.
626 Treat the case of a single-use name and
627 a conversion to def_rhs type separate, though. */
628 if (TREE_CODE (lhs) == SSA_NAME
629 && CONVERT_EXPR_CODE_P (rhs_code))
631 /* If there is a point in a conversion chain where the types match
632 so we can remove a conversion re-materialize the address here
633 and stop. */
634 if (single_use_p
635 && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
637 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
638 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
639 return true;
642 /* Else recurse if the conversion preserves the address value. */
643 if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs))
644 || POINTER_TYPE_P (TREE_TYPE (lhs)))
645 && (TYPE_PRECISION (TREE_TYPE (lhs))
646 >= TYPE_PRECISION (TREE_TYPE (def_rhs))))
647 return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
649 return false;
652 /* If this isn't a conversion chain from this on we only can propagate
653 into compatible pointer contexts. */
654 if (!types_compatible_p (TREE_TYPE (name), TREE_TYPE (def_rhs)))
655 return false;
657 /* Propagate through constant pointer adjustments. */
658 if (TREE_CODE (lhs) == SSA_NAME
659 && rhs_code == POINTER_PLUS_EXPR
660 && rhs == name
661 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
663 tree new_def_rhs;
664 /* As we come here with non-invariant addresses in def_rhs we need
665 to make sure we can build a valid constant offsetted address
666 for further propagation. Simply rely on fold building that
667 and check after the fact. */
668 new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
669 def_rhs,
670 fold_convert (ptr_type_node,
671 gimple_assign_rhs2 (use_stmt)));
672 if (TREE_CODE (new_def_rhs) == MEM_REF
673 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
674 return false;
675 new_def_rhs = build1 (ADDR_EXPR, TREE_TYPE (rhs), new_def_rhs);
677 /* Recurse. If we could propagate into all uses of lhs do not
678 bother to replace into the current use but just pretend we did. */
679 if (forward_propagate_addr_expr (lhs, new_def_rhs, single_use_p))
680 return true;
682 if (useless_type_conversion_p (TREE_TYPE (lhs),
683 TREE_TYPE (new_def_rhs)))
684 gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
685 new_def_rhs);
686 else if (is_gimple_min_invariant (new_def_rhs))
687 gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR, new_def_rhs);
688 else
689 return false;
690 gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
691 update_stmt (use_stmt);
692 return true;
695 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
696 ADDR_EXPR will not appear on the LHS. */
697 tree *lhsp = gimple_assign_lhs_ptr (use_stmt);
698 while (handled_component_p (*lhsp))
699 lhsp = &TREE_OPERAND (*lhsp, 0);
700 lhs = *lhsp;
702 /* Now see if the LHS node is a MEM_REF using NAME. If so,
703 propagate the ADDR_EXPR into the use of NAME and fold the result. */
704 if (TREE_CODE (lhs) == MEM_REF
705 && TREE_OPERAND (lhs, 0) == name)
707 tree def_rhs_base;
708 poly_int64 def_rhs_offset;
709 /* If the address is invariant we can always fold it. */
710 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
711 &def_rhs_offset)))
713 poly_offset_int off = mem_ref_offset (lhs);
714 tree new_ptr;
715 off += def_rhs_offset;
716 if (TREE_CODE (def_rhs_base) == MEM_REF)
718 off += mem_ref_offset (def_rhs_base);
719 new_ptr = TREE_OPERAND (def_rhs_base, 0);
721 else
722 new_ptr = build_fold_addr_expr (def_rhs_base);
723 TREE_OPERAND (lhs, 0) = new_ptr;
724 TREE_OPERAND (lhs, 1)
725 = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
726 tidy_after_forward_propagate_addr (use_stmt);
727 /* Continue propagating into the RHS if this was not the only use. */
728 if (single_use_p)
729 return true;
731 /* If the LHS is a plain dereference and the value type is the same as
732 that of the pointed-to type of the address we can put the
733 dereferenced address on the LHS preserving the original alias-type. */
734 else if (integer_zerop (TREE_OPERAND (lhs, 1))
735 && ((gimple_assign_lhs (use_stmt) == lhs
736 && useless_type_conversion_p
737 (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
738 TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
739 || types_compatible_p (TREE_TYPE (lhs),
740 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
741 /* Don't forward anything into clobber stmts if it would result
742 in the lhs no longer being a MEM_REF. */
743 && (!gimple_clobber_p (use_stmt)
744 || TREE_CODE (TREE_OPERAND (def_rhs, 0)) == MEM_REF))
746 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
747 tree new_offset, new_base, saved, new_lhs;
748 while (handled_component_p (*def_rhs_basep))
749 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
750 saved = *def_rhs_basep;
751 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
753 new_base = TREE_OPERAND (*def_rhs_basep, 0);
754 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (lhs, 1)),
755 TREE_OPERAND (*def_rhs_basep, 1));
757 else
759 new_base = build_fold_addr_expr (*def_rhs_basep);
760 new_offset = TREE_OPERAND (lhs, 1);
762 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
763 new_base, new_offset);
764 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (lhs);
765 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (lhs);
766 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (lhs);
767 new_lhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
768 *lhsp = new_lhs;
769 TREE_THIS_VOLATILE (new_lhs) = TREE_THIS_VOLATILE (lhs);
770 TREE_SIDE_EFFECTS (new_lhs) = TREE_SIDE_EFFECTS (lhs);
771 *def_rhs_basep = saved;
772 tidy_after_forward_propagate_addr (use_stmt);
773 /* Continue propagating into the RHS if this was not the
774 only use. */
775 if (single_use_p)
776 return true;
778 else
779 /* We can have a struct assignment dereferencing our name twice.
780 Note that we didn't propagate into the lhs to not falsely
781 claim we did when propagating into the rhs. */
782 res = false;
785 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
786 nodes from the RHS. */
787 tree *rhsp = gimple_assign_rhs1_ptr (use_stmt);
788 if (TREE_CODE (*rhsp) == ADDR_EXPR)
789 rhsp = &TREE_OPERAND (*rhsp, 0);
790 while (handled_component_p (*rhsp))
791 rhsp = &TREE_OPERAND (*rhsp, 0);
792 rhs = *rhsp;
794 /* Now see if the RHS node is a MEM_REF using NAME. If so,
795 propagate the ADDR_EXPR into the use of NAME and fold the result. */
796 if (TREE_CODE (rhs) == MEM_REF
797 && TREE_OPERAND (rhs, 0) == name)
799 tree def_rhs_base;
800 poly_int64 def_rhs_offset;
801 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
802 &def_rhs_offset)))
804 poly_offset_int off = mem_ref_offset (rhs);
805 tree new_ptr;
806 off += def_rhs_offset;
807 if (TREE_CODE (def_rhs_base) == MEM_REF)
809 off += mem_ref_offset (def_rhs_base);
810 new_ptr = TREE_OPERAND (def_rhs_base, 0);
812 else
813 new_ptr = build_fold_addr_expr (def_rhs_base);
814 TREE_OPERAND (rhs, 0) = new_ptr;
815 TREE_OPERAND (rhs, 1)
816 = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
817 fold_stmt_inplace (use_stmt_gsi);
818 tidy_after_forward_propagate_addr (use_stmt);
819 return res;
821 /* If the RHS is a plain dereference and the value type is the same as
822 that of the pointed-to type of the address we can put the
823 dereferenced address on the RHS preserving the original alias-type. */
824 else if (integer_zerop (TREE_OPERAND (rhs, 1))
825 && ((gimple_assign_rhs1 (use_stmt) == rhs
826 && useless_type_conversion_p
827 (TREE_TYPE (gimple_assign_lhs (use_stmt)),
828 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
829 || types_compatible_p (TREE_TYPE (rhs),
830 TREE_TYPE (TREE_OPERAND (def_rhs, 0)))))
832 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
833 tree new_offset, new_base, saved, new_rhs;
834 while (handled_component_p (*def_rhs_basep))
835 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
836 saved = *def_rhs_basep;
837 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
839 new_base = TREE_OPERAND (*def_rhs_basep, 0);
840 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (rhs, 1)),
841 TREE_OPERAND (*def_rhs_basep, 1));
843 else
845 new_base = build_fold_addr_expr (*def_rhs_basep);
846 new_offset = TREE_OPERAND (rhs, 1);
848 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
849 new_base, new_offset);
850 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (rhs);
851 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (rhs);
852 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (rhs);
853 new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
854 *rhsp = new_rhs;
855 TREE_THIS_VOLATILE (new_rhs) = TREE_THIS_VOLATILE (rhs);
856 TREE_SIDE_EFFECTS (new_rhs) = TREE_SIDE_EFFECTS (rhs);
857 *def_rhs_basep = saved;
858 fold_stmt_inplace (use_stmt_gsi);
859 tidy_after_forward_propagate_addr (use_stmt);
860 return res;
864 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
865 is nothing to do. */
866 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
867 || gimple_assign_rhs1 (use_stmt) != name)
868 return false;
870 /* The remaining cases are all for turning pointer arithmetic into
871 array indexing. They only apply when we have the address of
872 element zero in an array. If that is not the case then there
873 is nothing to do. */
874 array_ref = TREE_OPERAND (def_rhs, 0);
875 if ((TREE_CODE (array_ref) != ARRAY_REF
876 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
877 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
878 && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
879 return false;
881 rhs2 = gimple_assign_rhs2 (use_stmt);
882 /* Optimize &x[C1] p+ C2 to &x p+ C3 with C3 = C1 * element_size + C2. */
883 if (TREE_CODE (rhs2) == INTEGER_CST)
885 tree new_rhs = build1_loc (gimple_location (use_stmt),
886 ADDR_EXPR, TREE_TYPE (def_rhs),
887 fold_build2 (MEM_REF,
888 TREE_TYPE (TREE_TYPE (def_rhs)),
889 unshare_expr (def_rhs),
890 fold_convert (ptr_type_node,
891 rhs2)));
892 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
893 use_stmt = gsi_stmt (*use_stmt_gsi);
894 update_stmt (use_stmt);
895 tidy_after_forward_propagate_addr (use_stmt);
896 return true;
899 return false;
902 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
904 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
905 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
906 node or for recovery of array indexing from pointer arithmetic.
908 PARENT_SINGLE_USE_P tells if, when in a recursive invocation, NAME was
909 the single use in the previous invocation. Pass true when calling
910 this as toplevel.
912 Returns true, if all uses have been propagated into. */
914 static bool
915 forward_propagate_addr_expr (tree name, tree rhs, bool parent_single_use_p)
917 imm_use_iterator iter;
918 gimple *use_stmt;
919 bool all = true;
920 bool single_use_p = parent_single_use_p && has_single_use (name);
922 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
924 bool result;
925 tree use_rhs;
927 /* If the use is not in a simple assignment statement, then
928 there is nothing we can do. */
929 if (!is_gimple_assign (use_stmt))
931 if (!is_gimple_debug (use_stmt))
932 all = false;
933 continue;
936 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
937 result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
938 single_use_p);
939 /* If the use has moved to a different statement adjust
940 the update machinery for the old statement too. */
941 if (use_stmt != gsi_stmt (gsi))
943 update_stmt (use_stmt);
944 use_stmt = gsi_stmt (gsi);
946 update_stmt (use_stmt);
947 all &= result;
949 /* Remove intermediate now unused copy and conversion chains. */
950 use_rhs = gimple_assign_rhs1 (use_stmt);
951 if (result
952 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
953 && TREE_CODE (use_rhs) == SSA_NAME
954 && has_zero_uses (gimple_assign_lhs (use_stmt)))
956 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
957 fwprop_invalidate_lattice (gimple_get_lhs (use_stmt));
958 release_defs (use_stmt);
959 gsi_remove (&gsi, true);
963 return all && has_zero_uses (name);
967 /* Helper function for simplify_gimple_switch. Remove case labels that
968 have values outside the range of the new type. */
970 static void
971 simplify_gimple_switch_label_vec (gswitch *stmt, tree index_type)
973 unsigned int branch_num = gimple_switch_num_labels (stmt);
974 auto_vec<tree> labels (branch_num);
975 unsigned int i, len;
977 /* Collect the existing case labels in a VEC, and preprocess it as if
978 we are gimplifying a GENERIC SWITCH_EXPR. */
979 for (i = 1; i < branch_num; i++)
980 labels.quick_push (gimple_switch_label (stmt, i));
981 preprocess_case_label_vec_for_gimple (labels, index_type, NULL);
983 /* If any labels were removed, replace the existing case labels
984 in the GIMPLE_SWITCH statement with the correct ones.
985 Note that the type updates were done in-place on the case labels,
986 so we only have to replace the case labels in the GIMPLE_SWITCH
987 if the number of labels changed. */
988 len = labels.length ();
989 if (len < branch_num - 1)
991 bitmap target_blocks;
992 edge_iterator ei;
993 edge e;
995 /* Corner case: *all* case labels have been removed as being
996 out-of-range for INDEX_TYPE. Push one label and let the
997 CFG cleanups deal with this further. */
998 if (len == 0)
1000 tree label, elt;
1002 label = CASE_LABEL (gimple_switch_default_label (stmt));
1003 elt = build_case_label (build_int_cst (index_type, 0), NULL, label);
1004 labels.quick_push (elt);
1005 len = 1;
1008 for (i = 0; i < labels.length (); i++)
1009 gimple_switch_set_label (stmt, i + 1, labels[i]);
1010 for (i++ ; i < branch_num; i++)
1011 gimple_switch_set_label (stmt, i, NULL_TREE);
1012 gimple_switch_set_num_labels (stmt, len + 1);
1014 /* Cleanup any edges that are now dead. */
1015 target_blocks = BITMAP_ALLOC (NULL);
1016 for (i = 0; i < gimple_switch_num_labels (stmt); i++)
1018 tree elt = gimple_switch_label (stmt, i);
1019 basic_block target = label_to_block (cfun, CASE_LABEL (elt));
1020 bitmap_set_bit (target_blocks, target->index);
1022 for (ei = ei_start (gimple_bb (stmt)->succs); (e = ei_safe_edge (ei)); )
1024 if (! bitmap_bit_p (target_blocks, e->dest->index))
1026 remove_edge (e);
1027 cfg_changed = true;
1028 free_dominance_info (CDI_DOMINATORS);
1030 else
1031 ei_next (&ei);
1033 BITMAP_FREE (target_blocks);
1037 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1038 the condition which we may be able to optimize better. */
1040 static bool
1041 simplify_gimple_switch (gswitch *stmt)
1043 /* The optimization that we really care about is removing unnecessary
1044 casts. That will let us do much better in propagating the inferred
1045 constant at the switch target. */
1046 tree cond = gimple_switch_index (stmt);
1047 if (TREE_CODE (cond) == SSA_NAME)
1049 gimple *def_stmt = SSA_NAME_DEF_STMT (cond);
1050 if (gimple_assign_cast_p (def_stmt))
1052 tree def = gimple_assign_rhs1 (def_stmt);
1053 if (TREE_CODE (def) != SSA_NAME)
1054 return false;
1056 /* If we have an extension or sign-change that preserves the
1057 values we check against then we can copy the source value into
1058 the switch. */
1059 tree ti = TREE_TYPE (def);
1060 if (INTEGRAL_TYPE_P (ti)
1061 && TYPE_PRECISION (ti) <= TYPE_PRECISION (TREE_TYPE (cond)))
1063 size_t n = gimple_switch_num_labels (stmt);
1064 tree min = NULL_TREE, max = NULL_TREE;
1065 if (n > 1)
1067 min = CASE_LOW (gimple_switch_label (stmt, 1));
1068 if (CASE_HIGH (gimple_switch_label (stmt, n - 1)))
1069 max = CASE_HIGH (gimple_switch_label (stmt, n - 1));
1070 else
1071 max = CASE_LOW (gimple_switch_label (stmt, n - 1));
1073 if ((!min || int_fits_type_p (min, ti))
1074 && (!max || int_fits_type_p (max, ti)))
1076 gimple_switch_set_index (stmt, def);
1077 simplify_gimple_switch_label_vec (stmt, ti);
1078 update_stmt (stmt);
1079 return true;
1085 return false;
1088 /* For pointers p2 and p1 return p2 - p1 if the
1089 difference is known and constant, otherwise return NULL. */
1091 static tree
1092 constant_pointer_difference (tree p1, tree p2)
1094 int i, j;
1095 #define CPD_ITERATIONS 5
1096 tree exps[2][CPD_ITERATIONS];
1097 tree offs[2][CPD_ITERATIONS];
1098 int cnt[2];
1100 for (i = 0; i < 2; i++)
1102 tree p = i ? p1 : p2;
1103 tree off = size_zero_node;
1104 gimple *stmt;
1105 enum tree_code code;
1107 /* For each of p1 and p2 we need to iterate at least
1108 twice, to handle ADDR_EXPR directly in p1/p2,
1109 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1110 on definition's stmt RHS. Iterate a few extra times. */
1111 j = 0;
1114 if (!POINTER_TYPE_P (TREE_TYPE (p)))
1115 break;
1116 if (TREE_CODE (p) == ADDR_EXPR)
1118 tree q = TREE_OPERAND (p, 0);
1119 poly_int64 offset;
1120 tree base = get_addr_base_and_unit_offset (q, &offset);
1121 if (base)
1123 q = base;
1124 if (maybe_ne (offset, 0))
1125 off = size_binop (PLUS_EXPR, off, size_int (offset));
1127 if (TREE_CODE (q) == MEM_REF
1128 && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
1130 p = TREE_OPERAND (q, 0);
1131 off = size_binop (PLUS_EXPR, off,
1132 wide_int_to_tree (sizetype,
1133 mem_ref_offset (q)));
1135 else
1137 exps[i][j] = q;
1138 offs[i][j++] = off;
1139 break;
1142 if (TREE_CODE (p) != SSA_NAME)
1143 break;
1144 exps[i][j] = p;
1145 offs[i][j++] = off;
1146 if (j == CPD_ITERATIONS)
1147 break;
1148 stmt = SSA_NAME_DEF_STMT (p);
1149 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
1150 break;
1151 code = gimple_assign_rhs_code (stmt);
1152 if (code == POINTER_PLUS_EXPR)
1154 if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
1155 break;
1156 off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
1157 p = gimple_assign_rhs1 (stmt);
1159 else if (code == ADDR_EXPR || CONVERT_EXPR_CODE_P (code))
1160 p = gimple_assign_rhs1 (stmt);
1161 else
1162 break;
1164 while (1);
1165 cnt[i] = j;
1168 for (i = 0; i < cnt[0]; i++)
1169 for (j = 0; j < cnt[1]; j++)
1170 if (exps[0][i] == exps[1][j])
1171 return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
1173 return NULL_TREE;
1176 /* *GSI_P is a GIMPLE_CALL to a builtin function.
1177 Optimize
1178 memcpy (p, "abcd", 4);
1179 memset (p + 4, ' ', 3);
1180 into
1181 memcpy (p, "abcd ", 7);
1182 call if the latter can be stored by pieces during expansion.
1184 Optimize
1185 memchr ("abcd", a, 4) == 0;
1187 memchr ("abcd", a, 4) != 0;
1189 (a == 'a' || a == 'b' || a == 'c' || a == 'd') == 0
1191 (a == 'a' || a == 'b' || a == 'c' || a == 'd') != 0
1193 Also canonicalize __atomic_fetch_op (p, x, y) op x
1194 to __atomic_op_fetch (p, x, y) or
1195 __atomic_op_fetch (p, x, y) iop x
1196 to __atomic_fetch_op (p, x, y) when possible (also __sync). */
1198 static bool
1199 simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
1201 gimple *stmt1, *stmt2 = gsi_stmt (*gsi_p);
1202 enum built_in_function other_atomic = END_BUILTINS;
1203 enum tree_code atomic_op = ERROR_MARK;
1204 tree vuse = gimple_vuse (stmt2);
1205 if (vuse == NULL)
1206 return false;
1207 stmt1 = SSA_NAME_DEF_STMT (vuse);
1209 tree res;
1211 switch (DECL_FUNCTION_CODE (callee2))
1213 case BUILT_IN_MEMCHR:
1214 if (gimple_call_num_args (stmt2) == 3
1215 && (res = gimple_call_lhs (stmt2)) != nullptr
1216 && use_in_zero_equality (res) != nullptr
1217 && CHAR_BIT == 8
1218 && BITS_PER_UNIT == 8)
1220 tree ptr = gimple_call_arg (stmt2, 0);
1221 if (TREE_CODE (ptr) != ADDR_EXPR
1222 || TREE_CODE (TREE_OPERAND (ptr, 0)) != STRING_CST)
1223 break;
1224 unsigned HOST_WIDE_INT slen
1225 = TREE_STRING_LENGTH (TREE_OPERAND (ptr, 0));
1226 /* It must be a non-empty string constant. */
1227 if (slen < 2)
1228 break;
1229 /* For -Os, only simplify strings with a single character. */
1230 if (!optimize_bb_for_speed_p (gimple_bb (stmt2))
1231 && slen > 2)
1232 break;
1233 tree size = gimple_call_arg (stmt2, 2);
1234 /* Size must be a constant which is <= UNITS_PER_WORD and
1235 <= the string length. */
1236 if (TREE_CODE (size) != INTEGER_CST)
1237 break;
1239 if (!tree_fits_uhwi_p (size))
1240 break;
1242 unsigned HOST_WIDE_INT sz = tree_to_uhwi (size);
1243 if (sz == 0 || sz > UNITS_PER_WORD || sz >= slen)
1244 break;
1246 tree ch = gimple_call_arg (stmt2, 1);
1247 location_t loc = gimple_location (stmt2);
1248 if (!useless_type_conversion_p (char_type_node,
1249 TREE_TYPE (ch)))
1250 ch = fold_convert_loc (loc, char_type_node, ch);
1251 const char *p = TREE_STRING_POINTER (TREE_OPERAND (ptr, 0));
1252 unsigned int isize = sz;
1253 tree *op = XALLOCAVEC (tree, isize);
1254 for (unsigned int i = 0; i < isize; i++)
1256 op[i] = build_int_cst (char_type_node, p[i]);
1257 op[i] = fold_build2_loc (loc, EQ_EXPR, boolean_type_node,
1258 op[i], ch);
1260 for (unsigned int i = isize - 1; i >= 1; i--)
1261 op[i - 1] = fold_convert_loc (loc, boolean_type_node,
1262 fold_build2_loc (loc,
1263 BIT_IOR_EXPR,
1264 boolean_type_node,
1265 op[i - 1],
1266 op[i]));
1267 res = fold_convert_loc (loc, TREE_TYPE (res), op[0]);
1268 gimplify_and_update_call_from_tree (gsi_p, res);
1269 return true;
1271 break;
1273 case BUILT_IN_MEMSET:
1274 if (gimple_call_num_args (stmt2) != 3
1275 || gimple_call_lhs (stmt2)
1276 || CHAR_BIT != 8
1277 || BITS_PER_UNIT != 8)
1278 break;
1279 else
1281 tree callee1;
1282 tree ptr1, src1, str1, off1, len1, lhs1;
1283 tree ptr2 = gimple_call_arg (stmt2, 0);
1284 tree val2 = gimple_call_arg (stmt2, 1);
1285 tree len2 = gimple_call_arg (stmt2, 2);
1286 tree diff, vdef, new_str_cst;
1287 gimple *use_stmt;
1288 unsigned int ptr1_align;
1289 unsigned HOST_WIDE_INT src_len;
1290 char *src_buf;
1291 use_operand_p use_p;
1293 if (!tree_fits_shwi_p (val2)
1294 || !tree_fits_uhwi_p (len2)
1295 || compare_tree_int (len2, 1024) == 1)
1296 break;
1297 if (is_gimple_call (stmt1))
1299 /* If first stmt is a call, it needs to be memcpy
1300 or mempcpy, with string literal as second argument and
1301 constant length. */
1302 callee1 = gimple_call_fndecl (stmt1);
1303 if (callee1 == NULL_TREE
1304 || !fndecl_built_in_p (callee1, BUILT_IN_NORMAL)
1305 || gimple_call_num_args (stmt1) != 3)
1306 break;
1307 if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
1308 && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
1309 break;
1310 ptr1 = gimple_call_arg (stmt1, 0);
1311 src1 = gimple_call_arg (stmt1, 1);
1312 len1 = gimple_call_arg (stmt1, 2);
1313 lhs1 = gimple_call_lhs (stmt1);
1314 if (!tree_fits_uhwi_p (len1))
1315 break;
1316 str1 = string_constant (src1, &off1, NULL, NULL);
1317 if (str1 == NULL_TREE)
1318 break;
1319 if (!tree_fits_uhwi_p (off1)
1320 || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
1321 || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
1322 - tree_to_uhwi (off1)) > 0
1323 || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
1324 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
1325 != TYPE_MODE (char_type_node))
1326 break;
1328 else if (gimple_assign_single_p (stmt1))
1330 /* Otherwise look for length 1 memcpy optimized into
1331 assignment. */
1332 ptr1 = gimple_assign_lhs (stmt1);
1333 src1 = gimple_assign_rhs1 (stmt1);
1334 if (TREE_CODE (ptr1) != MEM_REF
1335 || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
1336 || !tree_fits_shwi_p (src1))
1337 break;
1338 ptr1 = build_fold_addr_expr (ptr1);
1339 STRIP_USELESS_TYPE_CONVERSION (ptr1);
1340 callee1 = NULL_TREE;
1341 len1 = size_one_node;
1342 lhs1 = NULL_TREE;
1343 off1 = size_zero_node;
1344 str1 = NULL_TREE;
1346 else
1347 break;
1349 diff = constant_pointer_difference (ptr1, ptr2);
1350 if (diff == NULL && lhs1 != NULL)
1352 diff = constant_pointer_difference (lhs1, ptr2);
1353 if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1354 && diff != NULL)
1355 diff = size_binop (PLUS_EXPR, diff,
1356 fold_convert (sizetype, len1));
1358 /* If the difference between the second and first destination pointer
1359 is not constant, or is bigger than memcpy length, bail out. */
1360 if (diff == NULL
1361 || !tree_fits_uhwi_p (diff)
1362 || tree_int_cst_lt (len1, diff)
1363 || compare_tree_int (diff, 1024) == 1)
1364 break;
1366 /* Use maximum of difference plus memset length and memcpy length
1367 as the new memcpy length, if it is too big, bail out. */
1368 src_len = tree_to_uhwi (diff);
1369 src_len += tree_to_uhwi (len2);
1370 if (src_len < tree_to_uhwi (len1))
1371 src_len = tree_to_uhwi (len1);
1372 if (src_len > 1024)
1373 break;
1375 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1376 with bigger length will return different result. */
1377 if (lhs1 != NULL_TREE
1378 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1379 && (TREE_CODE (lhs1) != SSA_NAME
1380 || !single_imm_use (lhs1, &use_p, &use_stmt)
1381 || use_stmt != stmt2))
1382 break;
1384 /* If anything reads memory in between memcpy and memset
1385 call, the modified memcpy call might change it. */
1386 vdef = gimple_vdef (stmt1);
1387 if (vdef != NULL
1388 && (!single_imm_use (vdef, &use_p, &use_stmt)
1389 || use_stmt != stmt2))
1390 break;
1392 ptr1_align = get_pointer_alignment (ptr1);
1393 /* Construct the new source string literal. */
1394 src_buf = XALLOCAVEC (char, src_len + 1);
1395 if (callee1)
1396 memcpy (src_buf,
1397 TREE_STRING_POINTER (str1) + tree_to_uhwi (off1),
1398 tree_to_uhwi (len1));
1399 else
1400 src_buf[0] = tree_to_shwi (src1);
1401 memset (src_buf + tree_to_uhwi (diff),
1402 tree_to_shwi (val2), tree_to_uhwi (len2));
1403 src_buf[src_len] = '\0';
1404 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1405 handle embedded '\0's. */
1406 if (strlen (src_buf) != src_len)
1407 break;
1408 rtl_profile_for_bb (gimple_bb (stmt2));
1409 /* If the new memcpy wouldn't be emitted by storing the literal
1410 by pieces, this optimization might enlarge .rodata too much,
1411 as commonly used string literals couldn't be shared any
1412 longer. */
1413 if (!can_store_by_pieces (src_len,
1414 builtin_strncpy_read_str,
1415 src_buf, ptr1_align, false))
1416 break;
1418 new_str_cst = build_string_literal (src_len, src_buf);
1419 if (callee1)
1421 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1422 memset call. */
1423 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1424 gimple_call_set_lhs (stmt1, NULL_TREE);
1425 gimple_call_set_arg (stmt1, 1, new_str_cst);
1426 gimple_call_set_arg (stmt1, 2,
1427 build_int_cst (TREE_TYPE (len1), src_len));
1428 update_stmt (stmt1);
1429 unlink_stmt_vdef (stmt2);
1430 gsi_replace (gsi_p, gimple_build_nop (), false);
1431 fwprop_invalidate_lattice (gimple_get_lhs (stmt2));
1432 release_defs (stmt2);
1433 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1435 fwprop_invalidate_lattice (lhs1);
1436 release_ssa_name (lhs1);
1438 return true;
1440 else
1442 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1443 assignment, remove STMT1 and change memset call into
1444 memcpy call. */
1445 gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
1447 if (!is_gimple_val (ptr1))
1448 ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE,
1449 true, GSI_SAME_STMT);
1450 tree fndecl = builtin_decl_explicit (BUILT_IN_MEMCPY);
1451 gimple_call_set_fndecl (stmt2, fndecl);
1452 gimple_call_set_fntype (as_a <gcall *> (stmt2),
1453 TREE_TYPE (fndecl));
1454 gimple_call_set_arg (stmt2, 0, ptr1);
1455 gimple_call_set_arg (stmt2, 1, new_str_cst);
1456 gimple_call_set_arg (stmt2, 2,
1457 build_int_cst (TREE_TYPE (len2), src_len));
1458 unlink_stmt_vdef (stmt1);
1459 gsi_remove (&gsi, true);
1460 fwprop_invalidate_lattice (gimple_get_lhs (stmt1));
1461 release_defs (stmt1);
1462 update_stmt (stmt2);
1463 return false;
1466 break;
1468 #define CASE_ATOMIC(NAME, OTHER, OP) \
1469 case BUILT_IN_##NAME##_1: \
1470 case BUILT_IN_##NAME##_2: \
1471 case BUILT_IN_##NAME##_4: \
1472 case BUILT_IN_##NAME##_8: \
1473 case BUILT_IN_##NAME##_16: \
1474 atomic_op = OP; \
1475 other_atomic \
1476 = (enum built_in_function) (BUILT_IN_##OTHER##_1 \
1477 + (DECL_FUNCTION_CODE (callee2) \
1478 - BUILT_IN_##NAME##_1)); \
1479 goto handle_atomic_fetch_op;
1481 CASE_ATOMIC (ATOMIC_FETCH_ADD, ATOMIC_ADD_FETCH, PLUS_EXPR)
1482 CASE_ATOMIC (ATOMIC_FETCH_SUB, ATOMIC_SUB_FETCH, MINUS_EXPR)
1483 CASE_ATOMIC (ATOMIC_FETCH_AND, ATOMIC_AND_FETCH, BIT_AND_EXPR)
1484 CASE_ATOMIC (ATOMIC_FETCH_XOR, ATOMIC_XOR_FETCH, BIT_XOR_EXPR)
1485 CASE_ATOMIC (ATOMIC_FETCH_OR, ATOMIC_OR_FETCH, BIT_IOR_EXPR)
1487 CASE_ATOMIC (SYNC_FETCH_AND_ADD, SYNC_ADD_AND_FETCH, PLUS_EXPR)
1488 CASE_ATOMIC (SYNC_FETCH_AND_SUB, SYNC_SUB_AND_FETCH, MINUS_EXPR)
1489 CASE_ATOMIC (SYNC_FETCH_AND_AND, SYNC_AND_AND_FETCH, BIT_AND_EXPR)
1490 CASE_ATOMIC (SYNC_FETCH_AND_XOR, SYNC_XOR_AND_FETCH, BIT_XOR_EXPR)
1491 CASE_ATOMIC (SYNC_FETCH_AND_OR, SYNC_OR_AND_FETCH, BIT_IOR_EXPR)
1493 CASE_ATOMIC (ATOMIC_ADD_FETCH, ATOMIC_FETCH_ADD, MINUS_EXPR)
1494 CASE_ATOMIC (ATOMIC_SUB_FETCH, ATOMIC_FETCH_SUB, PLUS_EXPR)
1495 CASE_ATOMIC (ATOMIC_XOR_FETCH, ATOMIC_FETCH_XOR, BIT_XOR_EXPR)
1497 CASE_ATOMIC (SYNC_ADD_AND_FETCH, SYNC_FETCH_AND_ADD, MINUS_EXPR)
1498 CASE_ATOMIC (SYNC_SUB_AND_FETCH, SYNC_FETCH_AND_SUB, PLUS_EXPR)
1499 CASE_ATOMIC (SYNC_XOR_AND_FETCH, SYNC_FETCH_AND_XOR, BIT_XOR_EXPR)
1501 #undef CASE_ATOMIC
1503 handle_atomic_fetch_op:
1504 if (gimple_call_num_args (stmt2) >= 2 && gimple_call_lhs (stmt2))
1506 tree lhs2 = gimple_call_lhs (stmt2), lhsc = lhs2;
1507 tree arg = gimple_call_arg (stmt2, 1);
1508 gimple *use_stmt, *cast_stmt = NULL;
1509 use_operand_p use_p;
1510 tree ndecl = builtin_decl_explicit (other_atomic);
1512 if (ndecl == NULL_TREE || !single_imm_use (lhs2, &use_p, &use_stmt))
1513 break;
1515 if (gimple_assign_cast_p (use_stmt))
1517 cast_stmt = use_stmt;
1518 lhsc = gimple_assign_lhs (cast_stmt);
1519 if (lhsc == NULL_TREE
1520 || !INTEGRAL_TYPE_P (TREE_TYPE (lhsc))
1521 || (TYPE_PRECISION (TREE_TYPE (lhsc))
1522 != TYPE_PRECISION (TREE_TYPE (lhs2)))
1523 || !single_imm_use (lhsc, &use_p, &use_stmt))
1525 use_stmt = cast_stmt;
1526 cast_stmt = NULL;
1527 lhsc = lhs2;
1531 bool ok = false;
1532 tree oarg = NULL_TREE;
1533 enum tree_code ccode = ERROR_MARK;
1534 tree crhs1 = NULL_TREE, crhs2 = NULL_TREE;
1535 if (is_gimple_assign (use_stmt)
1536 && gimple_assign_rhs_code (use_stmt) == atomic_op)
1538 if (gimple_assign_rhs1 (use_stmt) == lhsc)
1539 oarg = gimple_assign_rhs2 (use_stmt);
1540 else if (atomic_op != MINUS_EXPR)
1541 oarg = gimple_assign_rhs1 (use_stmt);
1543 else if (atomic_op == MINUS_EXPR
1544 && is_gimple_assign (use_stmt)
1545 && gimple_assign_rhs_code (use_stmt) == PLUS_EXPR
1546 && TREE_CODE (arg) == INTEGER_CST
1547 && (TREE_CODE (gimple_assign_rhs2 (use_stmt))
1548 == INTEGER_CST))
1550 tree a = fold_convert (TREE_TYPE (lhs2), arg);
1551 tree o = fold_convert (TREE_TYPE (lhs2),
1552 gimple_assign_rhs2 (use_stmt));
1553 if (wi::to_wide (a) == wi::neg (wi::to_wide (o)))
1554 ok = true;
1556 else if (atomic_op == BIT_AND_EXPR || atomic_op == BIT_IOR_EXPR)
1558 else if (gimple_code (use_stmt) == GIMPLE_COND)
1560 ccode = gimple_cond_code (use_stmt);
1561 crhs1 = gimple_cond_lhs (use_stmt);
1562 crhs2 = gimple_cond_rhs (use_stmt);
1564 else if (is_gimple_assign (use_stmt))
1566 if (gimple_assign_rhs_class (use_stmt) == GIMPLE_BINARY_RHS)
1568 ccode = gimple_assign_rhs_code (use_stmt);
1569 crhs1 = gimple_assign_rhs1 (use_stmt);
1570 crhs2 = gimple_assign_rhs2 (use_stmt);
1572 else if (gimple_assign_rhs_code (use_stmt) == COND_EXPR)
1574 tree cond = gimple_assign_rhs1 (use_stmt);
1575 if (COMPARISON_CLASS_P (cond))
1577 ccode = TREE_CODE (cond);
1578 crhs1 = TREE_OPERAND (cond, 0);
1579 crhs2 = TREE_OPERAND (cond, 1);
1583 if (ccode == EQ_EXPR || ccode == NE_EXPR)
1585 /* Deal with x - y == 0 or x ^ y == 0
1586 being optimized into x == y and x + cst == 0
1587 into x == -cst. */
1588 tree o = NULL_TREE;
1589 if (crhs1 == lhsc)
1590 o = crhs2;
1591 else if (crhs2 == lhsc)
1592 o = crhs1;
1593 if (o && atomic_op != PLUS_EXPR)
1594 oarg = o;
1595 else if (o
1596 && TREE_CODE (o) == INTEGER_CST
1597 && TREE_CODE (arg) == INTEGER_CST)
1599 tree a = fold_convert (TREE_TYPE (lhs2), arg);
1600 o = fold_convert (TREE_TYPE (lhs2), o);
1601 if (wi::to_wide (a) == wi::neg (wi::to_wide (o)))
1602 ok = true;
1605 if (oarg && !ok)
1607 if (operand_equal_p (arg, oarg, 0))
1608 ok = true;
1609 else if (TREE_CODE (arg) == SSA_NAME
1610 && TREE_CODE (oarg) == SSA_NAME)
1612 tree oarg2 = oarg;
1613 if (gimple_assign_cast_p (SSA_NAME_DEF_STMT (oarg)))
1615 gimple *g = SSA_NAME_DEF_STMT (oarg);
1616 oarg2 = gimple_assign_rhs1 (g);
1617 if (TREE_CODE (oarg2) != SSA_NAME
1618 || !INTEGRAL_TYPE_P (TREE_TYPE (oarg2))
1619 || (TYPE_PRECISION (TREE_TYPE (oarg2))
1620 != TYPE_PRECISION (TREE_TYPE (oarg))))
1621 oarg2 = oarg;
1623 if (gimple_assign_cast_p (SSA_NAME_DEF_STMT (arg)))
1625 gimple *g = SSA_NAME_DEF_STMT (arg);
1626 tree rhs1 = gimple_assign_rhs1 (g);
1627 /* Handle e.g.
1628 x.0_1 = (long unsigned int) x_4(D);
1629 _2 = __atomic_fetch_add_8 (&vlong, x.0_1, 0);
1630 _3 = (long int) _2;
1631 _7 = x_4(D) + _3; */
1632 if (rhs1 == oarg || rhs1 == oarg2)
1633 ok = true;
1634 /* Handle e.g.
1635 x.18_1 = (short unsigned int) x_5(D);
1636 _2 = (int) x.18_1;
1637 _3 = __atomic_fetch_xor_2 (&vshort, _2, 0);
1638 _4 = (short int) _3;
1639 _8 = x_5(D) ^ _4;
1640 This happens only for char/short. */
1641 else if (TREE_CODE (rhs1) == SSA_NAME
1642 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1643 && (TYPE_PRECISION (TREE_TYPE (rhs1))
1644 == TYPE_PRECISION (TREE_TYPE (lhs2))))
1646 g = SSA_NAME_DEF_STMT (rhs1);
1647 if (gimple_assign_cast_p (g)
1648 && (gimple_assign_rhs1 (g) == oarg
1649 || gimple_assign_rhs1 (g) == oarg2))
1650 ok = true;
1653 if (!ok && arg == oarg2)
1654 /* Handle e.g.
1655 _1 = __sync_fetch_and_add_4 (&v, x_5(D));
1656 _2 = (int) _1;
1657 x.0_3 = (int) x_5(D);
1658 _7 = _2 + x.0_3; */
1659 ok = true;
1663 if (ok)
1665 tree new_lhs = make_ssa_name (TREE_TYPE (lhs2));
1666 gimple_call_set_lhs (stmt2, new_lhs);
1667 gimple_call_set_fndecl (stmt2, ndecl);
1668 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1669 if (ccode == ERROR_MARK)
1670 gimple_assign_set_rhs_with_ops (&gsi, cast_stmt
1671 ? NOP_EXPR : SSA_NAME,
1672 new_lhs);
1673 else
1675 crhs1 = new_lhs;
1676 crhs2 = build_zero_cst (TREE_TYPE (lhs2));
1677 if (gimple_code (use_stmt) == GIMPLE_COND)
1679 gcond *cond_stmt = as_a <gcond *> (use_stmt);
1680 gimple_cond_set_lhs (cond_stmt, crhs1);
1681 gimple_cond_set_rhs (cond_stmt, crhs2);
1683 else if (gimple_assign_rhs_class (use_stmt)
1684 == GIMPLE_BINARY_RHS)
1686 gimple_assign_set_rhs1 (use_stmt, crhs1);
1687 gimple_assign_set_rhs2 (use_stmt, crhs2);
1689 else
1691 gcc_checking_assert (gimple_assign_rhs_code (use_stmt)
1692 == COND_EXPR);
1693 tree cond = build2 (ccode, boolean_type_node,
1694 crhs1, crhs2);
1695 gimple_assign_set_rhs1 (use_stmt, cond);
1698 update_stmt (use_stmt);
1699 if (atomic_op != BIT_AND_EXPR
1700 && atomic_op != BIT_IOR_EXPR
1701 && !stmt_ends_bb_p (stmt2))
1703 /* For the benefit of debug stmts, emit stmt(s) to set
1704 lhs2 to the value it had from the new builtin.
1705 E.g. if it was previously:
1706 lhs2 = __atomic_fetch_add_8 (ptr, arg, 0);
1707 emit:
1708 new_lhs = __atomic_add_fetch_8 (ptr, arg, 0);
1709 lhs2 = new_lhs - arg;
1710 We also keep cast_stmt if any in the IL for
1711 the same reasons.
1712 These stmts will be DCEd later and proper debug info
1713 will be emitted.
1714 This is only possible for reversible operations
1715 (+/-/^) and without -fnon-call-exceptions. */
1716 gsi = gsi_for_stmt (stmt2);
1717 tree type = TREE_TYPE (lhs2);
1718 if (TREE_CODE (arg) == INTEGER_CST)
1719 arg = fold_convert (type, arg);
1720 else if (!useless_type_conversion_p (type, TREE_TYPE (arg)))
1722 tree narg = make_ssa_name (type);
1723 gimple *g = gimple_build_assign (narg, NOP_EXPR, arg);
1724 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1725 arg = narg;
1727 enum tree_code rcode;
1728 switch (atomic_op)
1730 case PLUS_EXPR: rcode = MINUS_EXPR; break;
1731 case MINUS_EXPR: rcode = PLUS_EXPR; break;
1732 case BIT_XOR_EXPR: rcode = atomic_op; break;
1733 default: gcc_unreachable ();
1735 gimple *g = gimple_build_assign (lhs2, rcode, new_lhs, arg);
1736 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1737 update_stmt (stmt2);
1739 else
1741 /* For e.g.
1742 lhs2 = __atomic_fetch_or_8 (ptr, arg, 0);
1743 after we change it to
1744 new_lhs = __atomic_or_fetch_8 (ptr, arg, 0);
1745 there is no way to find out the lhs2 value (i.e.
1746 what the atomic memory contained before the operation),
1747 values of some bits are lost. We have checked earlier
1748 that we don't have any non-debug users except for what
1749 we are already changing, so we need to reset the
1750 debug stmts and remove the cast_stmt if any. */
1751 imm_use_iterator iter;
1752 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs2)
1753 if (use_stmt != cast_stmt)
1755 gcc_assert (is_gimple_debug (use_stmt));
1756 gimple_debug_bind_reset_value (use_stmt);
1757 update_stmt (use_stmt);
1759 if (cast_stmt)
1761 gsi = gsi_for_stmt (cast_stmt);
1762 gsi_remove (&gsi, true);
1764 update_stmt (stmt2);
1765 release_ssa_name (lhs2);
1769 break;
1771 default:
1772 break;
1774 return false;
1777 /* Given a ssa_name in NAME see if it was defined by an assignment and
1778 set CODE to be the code and ARG1 to the first operand on the rhs and ARG2
1779 to the second operand on the rhs. */
1781 static inline void
1782 defcodefor_name (tree name, enum tree_code *code, tree *arg1, tree *arg2)
1784 gimple *def;
1785 enum tree_code code1;
1786 tree arg11;
1787 tree arg21;
1788 tree arg31;
1789 enum gimple_rhs_class grhs_class;
1791 code1 = TREE_CODE (name);
1792 arg11 = name;
1793 arg21 = NULL_TREE;
1794 arg31 = NULL_TREE;
1795 grhs_class = get_gimple_rhs_class (code1);
1797 if (code1 == SSA_NAME)
1799 def = SSA_NAME_DEF_STMT (name);
1801 if (def && is_gimple_assign (def)
1802 && can_propagate_from (def))
1804 code1 = gimple_assign_rhs_code (def);
1805 arg11 = gimple_assign_rhs1 (def);
1806 arg21 = gimple_assign_rhs2 (def);
1807 arg31 = gimple_assign_rhs3 (def);
1810 else if (grhs_class != GIMPLE_SINGLE_RHS)
1811 code1 = ERROR_MARK;
1813 *code = code1;
1814 *arg1 = arg11;
1815 if (arg2)
1816 *arg2 = arg21;
1817 if (arg31)
1818 *code = ERROR_MARK;
1822 /* Recognize rotation patterns. Return true if a transformation
1823 applied, otherwise return false.
1825 We are looking for X with unsigned type T with bitsize B, OP being
1826 +, | or ^, some type T2 wider than T. For:
1827 (X << CNT1) OP (X >> CNT2) iff CNT1 + CNT2 == B
1828 ((T) ((T2) X << CNT1)) OP ((T) ((T2) X >> CNT2)) iff CNT1 + CNT2 == B
1830 transform these into:
1831 X r<< CNT1
1833 Or for:
1834 (X << Y) OP (X >> (B - Y))
1835 (X << (int) Y) OP (X >> (int) (B - Y))
1836 ((T) ((T2) X << Y)) OP ((T) ((T2) X >> (B - Y)))
1837 ((T) ((T2) X << (int) Y)) OP ((T) ((T2) X >> (int) (B - Y)))
1838 (X << Y) | (X >> ((-Y) & (B - 1)))
1839 (X << (int) Y) | (X >> (int) ((-Y) & (B - 1)))
1840 ((T) ((T2) X << Y)) | ((T) ((T2) X >> ((-Y) & (B - 1))))
1841 ((T) ((T2) X << (int) Y)) | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
1843 transform these into (last 2 only if ranger can prove Y < B
1844 or Y = N * B):
1845 X r<< Y
1847 X r<< (& & (B - 1))
1848 The latter for the forms with T2 wider than T if ranger can't prove Y < B.
1850 Or for:
1851 (X << (Y & (B - 1))) | (X >> ((-Y) & (B - 1)))
1852 (X << (int) (Y & (B - 1))) | (X >> (int) ((-Y) & (B - 1)))
1853 ((T) ((T2) X << (Y & (B - 1)))) | ((T) ((T2) X >> ((-Y) & (B - 1))))
1854 ((T) ((T2) X << (int) (Y & (B - 1)))) \
1855 | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
1857 transform these into:
1858 X r<< (Y & (B - 1))
1860 Note, in the patterns with T2 type, the type of OP operands
1861 might be even a signed type, but should have precision B.
1862 Expressions with & (B - 1) should be recognized only if B is
1863 a power of 2. */
1865 static bool
1866 simplify_rotate (gimple_stmt_iterator *gsi)
1868 gimple *stmt = gsi_stmt (*gsi);
1869 tree arg[2], rtype, rotcnt = NULL_TREE;
1870 tree def_arg1[2], def_arg2[2];
1871 enum tree_code def_code[2];
1872 tree lhs;
1873 int i;
1874 bool swapped_p = false;
1875 gimple *g;
1876 gimple *def_arg_stmt[2] = { NULL, NULL };
1877 int wider_prec = 0;
1878 bool add_masking = false;
1880 arg[0] = gimple_assign_rhs1 (stmt);
1881 arg[1] = gimple_assign_rhs2 (stmt);
1882 rtype = TREE_TYPE (arg[0]);
1884 /* Only create rotates in complete modes. Other cases are not
1885 expanded properly. */
1886 if (!INTEGRAL_TYPE_P (rtype)
1887 || !type_has_mode_precision_p (rtype))
1888 return false;
1890 for (i = 0; i < 2; i++)
1892 defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
1893 if (TREE_CODE (arg[i]) == SSA_NAME)
1894 def_arg_stmt[i] = SSA_NAME_DEF_STMT (arg[i]);
1897 /* Look through narrowing (or same precision) conversions. */
1898 if (CONVERT_EXPR_CODE_P (def_code[0])
1899 && CONVERT_EXPR_CODE_P (def_code[1])
1900 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[0]))
1901 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[1]))
1902 && TYPE_PRECISION (TREE_TYPE (def_arg1[0]))
1903 == TYPE_PRECISION (TREE_TYPE (def_arg1[1]))
1904 && TYPE_PRECISION (TREE_TYPE (def_arg1[0])) >= TYPE_PRECISION (rtype)
1905 && has_single_use (arg[0])
1906 && has_single_use (arg[1]))
1908 wider_prec = TYPE_PRECISION (TREE_TYPE (def_arg1[0]));
1909 for (i = 0; i < 2; i++)
1911 arg[i] = def_arg1[i];
1912 defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
1913 if (TREE_CODE (arg[i]) == SSA_NAME)
1914 def_arg_stmt[i] = SSA_NAME_DEF_STMT (arg[i]);
1917 else
1919 /* Handle signed rotate; the RSHIFT_EXPR has to be done
1920 in unsigned type but LSHIFT_EXPR could be signed. */
1921 i = (def_code[0] == LSHIFT_EXPR || def_code[0] == RSHIFT_EXPR);
1922 if (CONVERT_EXPR_CODE_P (def_code[i])
1923 && (def_code[1 - i] == LSHIFT_EXPR || def_code[1 - i] == RSHIFT_EXPR)
1924 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[i]))
1925 && TYPE_PRECISION (rtype) == TYPE_PRECISION (TREE_TYPE (def_arg1[i]))
1926 && has_single_use (arg[i]))
1928 arg[i] = def_arg1[i];
1929 defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
1930 if (TREE_CODE (arg[i]) == SSA_NAME)
1931 def_arg_stmt[i] = SSA_NAME_DEF_STMT (arg[i]);
1935 /* One operand has to be LSHIFT_EXPR and one RSHIFT_EXPR. */
1936 for (i = 0; i < 2; i++)
1937 if (def_code[i] != LSHIFT_EXPR && def_code[i] != RSHIFT_EXPR)
1938 return false;
1939 else if (!has_single_use (arg[i]))
1940 return false;
1941 if (def_code[0] == def_code[1])
1942 return false;
1944 /* If we've looked through narrowing conversions before, look through
1945 widening conversions from unsigned type with the same precision
1946 as rtype here. */
1947 if (TYPE_PRECISION (TREE_TYPE (def_arg1[0])) != TYPE_PRECISION (rtype))
1948 for (i = 0; i < 2; i++)
1950 tree tem;
1951 enum tree_code code;
1952 defcodefor_name (def_arg1[i], &code, &tem, NULL);
1953 if (!CONVERT_EXPR_CODE_P (code)
1954 || !INTEGRAL_TYPE_P (TREE_TYPE (tem))
1955 || TYPE_PRECISION (TREE_TYPE (tem)) != TYPE_PRECISION (rtype))
1956 return false;
1957 def_arg1[i] = tem;
1959 /* Both shifts have to use the same first operand. */
1960 if (!operand_equal_for_phi_arg_p (def_arg1[0], def_arg1[1])
1961 || !types_compatible_p (TREE_TYPE (def_arg1[0]),
1962 TREE_TYPE (def_arg1[1])))
1964 if ((TYPE_PRECISION (TREE_TYPE (def_arg1[0]))
1965 != TYPE_PRECISION (TREE_TYPE (def_arg1[1])))
1966 || (TYPE_UNSIGNED (TREE_TYPE (def_arg1[0]))
1967 == TYPE_UNSIGNED (TREE_TYPE (def_arg1[1]))))
1968 return false;
1970 /* Handle signed rotate; the RSHIFT_EXPR has to be done
1971 in unsigned type but LSHIFT_EXPR could be signed. */
1972 i = def_code[0] != RSHIFT_EXPR;
1973 if (!TYPE_UNSIGNED (TREE_TYPE (def_arg1[i])))
1974 return false;
1976 tree tem;
1977 enum tree_code code;
1978 defcodefor_name (def_arg1[i], &code, &tem, NULL);
1979 if (!CONVERT_EXPR_CODE_P (code)
1980 || !INTEGRAL_TYPE_P (TREE_TYPE (tem))
1981 || TYPE_PRECISION (TREE_TYPE (tem)) != TYPE_PRECISION (rtype))
1982 return false;
1983 def_arg1[i] = tem;
1984 if (!operand_equal_for_phi_arg_p (def_arg1[0], def_arg1[1])
1985 || !types_compatible_p (TREE_TYPE (def_arg1[0]),
1986 TREE_TYPE (def_arg1[1])))
1987 return false;
1989 else if (!TYPE_UNSIGNED (TREE_TYPE (def_arg1[0])))
1990 return false;
1992 /* CNT1 + CNT2 == B case above. */
1993 if (tree_fits_uhwi_p (def_arg2[0])
1994 && tree_fits_uhwi_p (def_arg2[1])
1995 && tree_to_uhwi (def_arg2[0])
1996 + tree_to_uhwi (def_arg2[1]) == TYPE_PRECISION (rtype))
1997 rotcnt = def_arg2[0];
1998 else if (TREE_CODE (def_arg2[0]) != SSA_NAME
1999 || TREE_CODE (def_arg2[1]) != SSA_NAME)
2000 return false;
2001 else
2003 tree cdef_arg1[2], cdef_arg2[2], def_arg2_alt[2];
2004 enum tree_code cdef_code[2];
2005 gimple *def_arg_alt_stmt[2] = { NULL, NULL };
2006 int check_range = 0;
2007 gimple *check_range_stmt = NULL;
2008 /* Look through conversion of the shift count argument.
2009 The C/C++ FE cast any shift count argument to integer_type_node.
2010 The only problem might be if the shift count type maximum value
2011 is equal or smaller than number of bits in rtype. */
2012 for (i = 0; i < 2; i++)
2014 def_arg2_alt[i] = def_arg2[i];
2015 defcodefor_name (def_arg2[i], &cdef_code[i],
2016 &cdef_arg1[i], &cdef_arg2[i]);
2017 if (CONVERT_EXPR_CODE_P (cdef_code[i])
2018 && INTEGRAL_TYPE_P (TREE_TYPE (cdef_arg1[i]))
2019 && TYPE_PRECISION (TREE_TYPE (cdef_arg1[i]))
2020 > floor_log2 (TYPE_PRECISION (rtype))
2021 && type_has_mode_precision_p (TREE_TYPE (cdef_arg1[i])))
2023 def_arg2_alt[i] = cdef_arg1[i];
2024 if (TREE_CODE (def_arg2[i]) == SSA_NAME)
2025 def_arg_alt_stmt[i] = SSA_NAME_DEF_STMT (def_arg2[i]);
2026 defcodefor_name (def_arg2_alt[i], &cdef_code[i],
2027 &cdef_arg1[i], &cdef_arg2[i]);
2029 else
2030 def_arg_alt_stmt[i] = def_arg_stmt[i];
2032 for (i = 0; i < 2; i++)
2033 /* Check for one shift count being Y and the other B - Y,
2034 with optional casts. */
2035 if (cdef_code[i] == MINUS_EXPR
2036 && tree_fits_shwi_p (cdef_arg1[i])
2037 && tree_to_shwi (cdef_arg1[i]) == TYPE_PRECISION (rtype)
2038 && TREE_CODE (cdef_arg2[i]) == SSA_NAME)
2040 tree tem;
2041 enum tree_code code;
2043 if (cdef_arg2[i] == def_arg2[1 - i]
2044 || cdef_arg2[i] == def_arg2_alt[1 - i])
2046 rotcnt = cdef_arg2[i];
2047 check_range = -1;
2048 if (cdef_arg2[i] == def_arg2[1 - i])
2049 check_range_stmt = def_arg_stmt[1 - i];
2050 else
2051 check_range_stmt = def_arg_alt_stmt[1 - i];
2052 break;
2054 defcodefor_name (cdef_arg2[i], &code, &tem, NULL);
2055 if (CONVERT_EXPR_CODE_P (code)
2056 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
2057 && TYPE_PRECISION (TREE_TYPE (tem))
2058 > floor_log2 (TYPE_PRECISION (rtype))
2059 && type_has_mode_precision_p (TREE_TYPE (tem))
2060 && (tem == def_arg2[1 - i]
2061 || tem == def_arg2_alt[1 - i]))
2063 rotcnt = tem;
2064 check_range = -1;
2065 if (tem == def_arg2[1 - i])
2066 check_range_stmt = def_arg_stmt[1 - i];
2067 else
2068 check_range_stmt = def_arg_alt_stmt[1 - i];
2069 break;
2072 /* The above sequence isn't safe for Y being 0,
2073 because then one of the shifts triggers undefined behavior.
2074 This alternative is safe even for rotation count of 0.
2075 One shift count is Y and the other (-Y) & (B - 1).
2076 Or one shift count is Y & (B - 1) and the other (-Y) & (B - 1). */
2077 else if (cdef_code[i] == BIT_AND_EXPR
2078 && pow2p_hwi (TYPE_PRECISION (rtype))
2079 && tree_fits_shwi_p (cdef_arg2[i])
2080 && tree_to_shwi (cdef_arg2[i])
2081 == TYPE_PRECISION (rtype) - 1
2082 && TREE_CODE (cdef_arg1[i]) == SSA_NAME
2083 && gimple_assign_rhs_code (stmt) == BIT_IOR_EXPR)
2085 tree tem;
2086 enum tree_code code;
2088 defcodefor_name (cdef_arg1[i], &code, &tem, NULL);
2089 if (CONVERT_EXPR_CODE_P (code)
2090 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
2091 && TYPE_PRECISION (TREE_TYPE (tem))
2092 > floor_log2 (TYPE_PRECISION (rtype))
2093 && type_has_mode_precision_p (TREE_TYPE (tem)))
2094 defcodefor_name (tem, &code, &tem, NULL);
2096 if (code == NEGATE_EXPR)
2098 if (tem == def_arg2[1 - i] || tem == def_arg2_alt[1 - i])
2100 rotcnt = tem;
2101 check_range = 1;
2102 if (tem == def_arg2[1 - i])
2103 check_range_stmt = def_arg_stmt[1 - i];
2104 else
2105 check_range_stmt = def_arg_alt_stmt[1 - i];
2106 break;
2108 tree tem2;
2109 defcodefor_name (tem, &code, &tem2, NULL);
2110 if (CONVERT_EXPR_CODE_P (code)
2111 && INTEGRAL_TYPE_P (TREE_TYPE (tem2))
2112 && TYPE_PRECISION (TREE_TYPE (tem2))
2113 > floor_log2 (TYPE_PRECISION (rtype))
2114 && type_has_mode_precision_p (TREE_TYPE (tem2)))
2116 if (tem2 == def_arg2[1 - i]
2117 || tem2 == def_arg2_alt[1 - i])
2119 rotcnt = tem2;
2120 check_range = 1;
2121 if (tem2 == def_arg2[1 - i])
2122 check_range_stmt = def_arg_stmt[1 - i];
2123 else
2124 check_range_stmt = def_arg_alt_stmt[1 - i];
2125 break;
2128 else
2129 tem2 = NULL_TREE;
2131 if (cdef_code[1 - i] == BIT_AND_EXPR
2132 && tree_fits_shwi_p (cdef_arg2[1 - i])
2133 && tree_to_shwi (cdef_arg2[1 - i])
2134 == TYPE_PRECISION (rtype) - 1
2135 && TREE_CODE (cdef_arg1[1 - i]) == SSA_NAME)
2137 if (tem == cdef_arg1[1 - i]
2138 || tem2 == cdef_arg1[1 - i])
2140 rotcnt = def_arg2[1 - i];
2141 break;
2143 tree tem3;
2144 defcodefor_name (cdef_arg1[1 - i], &code, &tem3, NULL);
2145 if (CONVERT_EXPR_CODE_P (code)
2146 && INTEGRAL_TYPE_P (TREE_TYPE (tem3))
2147 && TYPE_PRECISION (TREE_TYPE (tem3))
2148 > floor_log2 (TYPE_PRECISION (rtype))
2149 && type_has_mode_precision_p (TREE_TYPE (tem3)))
2151 if (tem == tem3 || tem2 == tem3)
2153 rotcnt = def_arg2[1 - i];
2154 break;
2160 if (check_range && wider_prec > TYPE_PRECISION (rtype))
2162 if (TREE_CODE (rotcnt) != SSA_NAME)
2163 return false;
2164 int_range_max r;
2165 range_query *q = get_range_query (cfun);
2166 if (q == get_global_range_query ())
2167 q = enable_ranger (cfun);
2168 if (!q->range_of_expr (r, rotcnt, check_range_stmt))
2170 if (check_range > 0)
2171 return false;
2172 r.set_varying (TREE_TYPE (rotcnt));
2174 int prec = TYPE_PRECISION (TREE_TYPE (rotcnt));
2175 signop sign = TYPE_SIGN (TREE_TYPE (rotcnt));
2176 wide_int min = wide_int::from (TYPE_PRECISION (rtype), prec, sign);
2177 wide_int max = wide_int::from (wider_prec - 1, prec, sign);
2178 if (check_range < 0)
2179 max = min;
2180 int_range<1> r2 (TREE_TYPE (rotcnt), min, max);
2181 r.intersect (r2);
2182 if (!r.undefined_p ())
2184 if (check_range > 0)
2186 int_range_max r3;
2187 for (int i = TYPE_PRECISION (rtype) + 1; i < wider_prec;
2188 i += TYPE_PRECISION (rtype))
2190 int j = i + TYPE_PRECISION (rtype) - 2;
2191 min = wide_int::from (i, prec, sign);
2192 max = wide_int::from (MIN (j, wider_prec - 1),
2193 prec, sign);
2194 int_range<1> r4 (TREE_TYPE (rotcnt), min, max);
2195 r3.union_ (r4);
2197 r.intersect (r3);
2198 if (!r.undefined_p ())
2199 return false;
2201 add_masking = true;
2204 if (rotcnt == NULL_TREE)
2205 return false;
2206 swapped_p = i != 1;
2209 if (!useless_type_conversion_p (TREE_TYPE (def_arg2[0]),
2210 TREE_TYPE (rotcnt)))
2212 g = gimple_build_assign (make_ssa_name (TREE_TYPE (def_arg2[0])),
2213 NOP_EXPR, rotcnt);
2214 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2215 rotcnt = gimple_assign_lhs (g);
2217 if (add_masking)
2219 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rotcnt)),
2220 BIT_AND_EXPR, rotcnt,
2221 build_int_cst (TREE_TYPE (rotcnt),
2222 TYPE_PRECISION (rtype) - 1));
2223 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2224 rotcnt = gimple_assign_lhs (g);
2226 lhs = gimple_assign_lhs (stmt);
2227 if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
2228 lhs = make_ssa_name (TREE_TYPE (def_arg1[0]));
2229 g = gimple_build_assign (lhs,
2230 ((def_code[0] == LSHIFT_EXPR) ^ swapped_p)
2231 ? LROTATE_EXPR : RROTATE_EXPR, def_arg1[0], rotcnt);
2232 if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
2234 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2235 g = gimple_build_assign (gimple_assign_lhs (stmt), NOP_EXPR, lhs);
2237 gsi_replace (gsi, g, false);
2238 return true;
2242 /* Check whether an array contains a valid ctz table. */
2243 static bool
2244 check_ctz_array (tree ctor, unsigned HOST_WIDE_INT mulc,
2245 HOST_WIDE_INT &zero_val, unsigned shift, unsigned bits)
2247 tree elt, idx;
2248 unsigned HOST_WIDE_INT i, mask;
2249 unsigned matched = 0;
2251 mask = ((HOST_WIDE_INT_1U << (bits - shift)) - 1) << shift;
2253 zero_val = 0;
2255 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), i, idx, elt)
2257 if (TREE_CODE (idx) != INTEGER_CST || TREE_CODE (elt) != INTEGER_CST)
2258 return false;
2259 if (i > bits * 2)
2260 return false;
2262 unsigned HOST_WIDE_INT index = tree_to_shwi (idx);
2263 HOST_WIDE_INT val = tree_to_shwi (elt);
2265 if (index == 0)
2267 zero_val = val;
2268 matched++;
2271 if (val >= 0 && val < bits && (((mulc << val) & mask) >> shift) == index)
2272 matched++;
2274 if (matched > bits)
2275 return true;
2278 return false;
2281 /* Check whether a string contains a valid ctz table. */
2282 static bool
2283 check_ctz_string (tree string, unsigned HOST_WIDE_INT mulc,
2284 HOST_WIDE_INT &zero_val, unsigned shift, unsigned bits)
2286 unsigned HOST_WIDE_INT len = TREE_STRING_LENGTH (string);
2287 unsigned HOST_WIDE_INT mask;
2288 unsigned matched = 0;
2289 const unsigned char *p = (const unsigned char *) TREE_STRING_POINTER (string);
2291 if (len < bits || len > bits * 2)
2292 return false;
2294 mask = ((HOST_WIDE_INT_1U << (bits - shift)) - 1) << shift;
2296 zero_val = p[0];
2298 for (unsigned i = 0; i < len; i++)
2299 if (p[i] < bits && (((mulc << p[i]) & mask) >> shift) == i)
2300 matched++;
2302 return matched == bits;
2305 /* Recognize count trailing zeroes idiom.
2306 The canonical form is array[((x & -x) * C) >> SHIFT] where C is a magic
2307 constant which when multiplied by a power of 2 creates a unique value
2308 in the top 5 or 6 bits. This is then indexed into a table which maps it
2309 to the number of trailing zeroes. Array[0] is returned so the caller can
2310 emit an appropriate sequence depending on whether ctz (0) is defined on
2311 the target. */
2312 static bool
2313 optimize_count_trailing_zeroes (tree array_ref, tree x, tree mulc,
2314 tree tshift, HOST_WIDE_INT &zero_val)
2316 tree type = TREE_TYPE (array_ref);
2317 tree array = TREE_OPERAND (array_ref, 0);
2319 gcc_assert (TREE_CODE (mulc) == INTEGER_CST);
2320 gcc_assert (TREE_CODE (tshift) == INTEGER_CST);
2322 tree input_type = TREE_TYPE (x);
2323 unsigned input_bits = tree_to_shwi (TYPE_SIZE (input_type));
2325 /* Check the array element type is not wider than 32 bits and the input is
2326 an unsigned 32-bit or 64-bit type. */
2327 if (TYPE_PRECISION (type) > 32 || !TYPE_UNSIGNED (input_type))
2328 return false;
2329 if (input_bits != 32 && input_bits != 64)
2330 return false;
2332 if (!direct_internal_fn_supported_p (IFN_CTZ, input_type, OPTIMIZE_FOR_BOTH))
2333 return false;
2335 /* Check the lower bound of the array is zero. */
2336 tree low = array_ref_low_bound (array_ref);
2337 if (!low || !integer_zerop (low))
2338 return false;
2340 unsigned shiftval = tree_to_shwi (tshift);
2342 /* Check the shift extracts the top 5..7 bits. */
2343 if (shiftval < input_bits - 7 || shiftval > input_bits - 5)
2344 return false;
2346 tree ctor = ctor_for_folding (array);
2347 if (!ctor)
2348 return false;
2350 unsigned HOST_WIDE_INT val = tree_to_uhwi (mulc);
2352 if (TREE_CODE (ctor) == CONSTRUCTOR)
2353 return check_ctz_array (ctor, val, zero_val, shiftval, input_bits);
2355 if (TREE_CODE (ctor) == STRING_CST
2356 && TYPE_PRECISION (type) == CHAR_TYPE_SIZE)
2357 return check_ctz_string (ctor, val, zero_val, shiftval, input_bits);
2359 return false;
2362 /* Match.pd function to match the ctz expression. */
2363 extern bool gimple_ctz_table_index (tree, tree *, tree (*)(tree));
2365 static bool
2366 simplify_count_trailing_zeroes (gimple_stmt_iterator *gsi)
2368 gimple *stmt = gsi_stmt (*gsi);
2369 tree array_ref = gimple_assign_rhs1 (stmt);
2370 tree res_ops[3];
2371 HOST_WIDE_INT zero_val;
2373 gcc_checking_assert (TREE_CODE (array_ref) == ARRAY_REF);
2375 if (!gimple_ctz_table_index (TREE_OPERAND (array_ref, 1), &res_ops[0], NULL))
2376 return false;
2378 if (optimize_count_trailing_zeroes (array_ref, res_ops[0],
2379 res_ops[1], res_ops[2], zero_val))
2381 tree type = TREE_TYPE (res_ops[0]);
2382 HOST_WIDE_INT ctz_val = 0;
2383 HOST_WIDE_INT type_size = tree_to_shwi (TYPE_SIZE (type));
2384 bool zero_ok
2385 = CTZ_DEFINED_VALUE_AT_ZERO (SCALAR_INT_TYPE_MODE (type), ctz_val) == 2;
2386 int nargs = 2;
2388 /* If the input value can't be zero, don't special case ctz (0). */
2389 if (tree_expr_nonzero_p (res_ops[0]))
2391 zero_ok = true;
2392 zero_val = 0;
2393 ctz_val = 0;
2394 nargs = 1;
2397 /* Skip if there is no value defined at zero, or if we can't easily
2398 return the correct value for zero. */
2399 if (!zero_ok)
2400 return false;
2401 if (zero_val != ctz_val && !(zero_val == 0 && ctz_val == type_size))
2402 return false;
2404 gimple_seq seq = NULL;
2405 gimple *g;
2406 gcall *call
2407 = gimple_build_call_internal (IFN_CTZ, nargs, res_ops[0],
2408 nargs == 1 ? NULL_TREE
2409 : build_int_cst (integer_type_node,
2410 ctz_val));
2411 gimple_set_location (call, gimple_location (stmt));
2412 gimple_set_lhs (call, make_ssa_name (integer_type_node));
2413 gimple_seq_add_stmt (&seq, call);
2415 tree prev_lhs = gimple_call_lhs (call);
2417 /* Emit ctz (x) & 31 if ctz (0) is 32 but we need to return 0. */
2418 if (zero_val == 0 && ctz_val == type_size)
2420 g = gimple_build_assign (make_ssa_name (integer_type_node),
2421 BIT_AND_EXPR, prev_lhs,
2422 build_int_cst (integer_type_node,
2423 type_size - 1));
2424 gimple_set_location (g, gimple_location (stmt));
2425 gimple_seq_add_stmt (&seq, g);
2426 prev_lhs = gimple_assign_lhs (g);
2429 g = gimple_build_assign (gimple_assign_lhs (stmt), NOP_EXPR, prev_lhs);
2430 gimple_seq_add_stmt (&seq, g);
2431 gsi_replace_with_seq (gsi, seq, true);
2432 return true;
2435 return false;
2439 /* Combine an element access with a shuffle. Returns true if there were
2440 any changes made, else it returns false. */
2442 static bool
2443 simplify_bitfield_ref (gimple_stmt_iterator *gsi)
2445 gimple *stmt = gsi_stmt (*gsi);
2446 gimple *def_stmt;
2447 tree op, op0, op1;
2448 tree elem_type, type;
2449 tree p, m, tem;
2450 unsigned HOST_WIDE_INT nelts, idx;
2451 poly_uint64 size, elem_size;
2452 enum tree_code code;
2454 op = gimple_assign_rhs1 (stmt);
2455 gcc_checking_assert (TREE_CODE (op) == BIT_FIELD_REF);
2457 op0 = TREE_OPERAND (op, 0);
2458 if (TREE_CODE (op0) != SSA_NAME
2459 || TREE_CODE (TREE_TYPE (op0)) != VECTOR_TYPE)
2460 return false;
2462 def_stmt = get_prop_source_stmt (op0, false, NULL);
2463 if (!def_stmt || !can_propagate_from (def_stmt))
2464 return false;
2466 op1 = TREE_OPERAND (op, 1);
2467 code = gimple_assign_rhs_code (def_stmt);
2468 elem_type = TREE_TYPE (TREE_TYPE (op0));
2469 type = TREE_TYPE (op);
2470 /* Also handle vector type.
2471 .i.e.
2472 _7 = VEC_PERM_EXPR <_1, _1, { 2, 3, 2, 3 }>;
2473 _11 = BIT_FIELD_REF <_7, 64, 0>;
2477 _11 = BIT_FIELD_REF <_1, 64, 64>. */
2479 size = tree_to_poly_uint64 (TYPE_SIZE (type));
2480 if (maybe_ne (bit_field_size (op), size))
2481 return false;
2483 elem_size = tree_to_poly_uint64 (TYPE_SIZE (elem_type));
2484 if (code != VEC_PERM_EXPR
2485 || !constant_multiple_p (bit_field_offset (op), elem_size, &idx))
2486 return false;
2488 m = gimple_assign_rhs3 (def_stmt);
2489 if (TREE_CODE (m) != VECTOR_CST
2490 || !VECTOR_CST_NELTS (m).is_constant (&nelts))
2491 return false;
2493 /* One element. */
2494 if (known_eq (size, elem_size))
2495 idx = TREE_INT_CST_LOW (VECTOR_CST_ELT (m, idx)) % (2 * nelts);
2496 else
2498 unsigned HOST_WIDE_INT nelts_op;
2499 if (!constant_multiple_p (size, elem_size, &nelts_op)
2500 || !pow2p_hwi (nelts_op))
2501 return false;
2502 /* Clamp vec_perm_expr index. */
2503 unsigned start = TREE_INT_CST_LOW (vector_cst_elt (m, idx)) % (2 * nelts);
2504 unsigned end = TREE_INT_CST_LOW (vector_cst_elt (m, idx + nelts_op - 1))
2505 % (2 * nelts);
2506 /* Be in the same vector. */
2507 if ((start < nelts) != (end < nelts))
2508 return false;
2509 for (unsigned HOST_WIDE_INT i = 1; i != nelts_op; i++)
2511 /* Continuous area. */
2512 if (TREE_INT_CST_LOW (vector_cst_elt (m, idx + i)) % (2 * nelts) - 1
2513 != TREE_INT_CST_LOW (vector_cst_elt (m, idx + i - 1))
2514 % (2 * nelts))
2515 return false;
2517 /* Alignment not worse than before. */
2518 if (start % nelts_op)
2519 return false;
2520 idx = start;
2523 if (idx < nelts)
2524 p = gimple_assign_rhs1 (def_stmt);
2525 else
2527 p = gimple_assign_rhs2 (def_stmt);
2528 idx -= nelts;
2531 tem = build3 (BIT_FIELD_REF, TREE_TYPE (op),
2532 p, op1, bitsize_int (idx * elem_size));
2533 gimple_assign_set_rhs1 (stmt, tem);
2534 fold_stmt (gsi);
2535 update_stmt (gsi_stmt (*gsi));
2536 return true;
2539 /* Determine whether applying the 2 permutations (mask1 then mask2)
2540 gives back one of the input. */
2542 static int
2543 is_combined_permutation_identity (tree mask1, tree mask2)
2545 tree mask;
2546 unsigned HOST_WIDE_INT nelts, i, j;
2547 bool maybe_identity1 = true;
2548 bool maybe_identity2 = true;
2550 gcc_checking_assert (TREE_CODE (mask1) == VECTOR_CST
2551 && TREE_CODE (mask2) == VECTOR_CST);
2553 /* For VLA masks, check for the following pattern:
2554 v1 = VEC_PERM_EXPR (v0, ..., mask1)
2555 v2 = VEC_PERM_EXPR (v1, ..., mask2)
2557 v2 = v0
2558 if mask1 == mask2 == {nelts - 1, nelts - 2, ...}. */
2560 if (operand_equal_p (mask1, mask2, 0)
2561 && !VECTOR_CST_NELTS (mask1).is_constant ())
2563 vec_perm_builder builder;
2564 if (tree_to_vec_perm_builder (&builder, mask1))
2566 poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (mask1));
2567 vec_perm_indices sel (builder, 1, nelts);
2568 if (sel.series_p (0, 1, nelts - 1, -1))
2569 return 1;
2573 mask = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (mask1), mask1, mask1, mask2);
2574 if (mask == NULL_TREE || TREE_CODE (mask) != VECTOR_CST)
2575 return 0;
2577 if (!VECTOR_CST_NELTS (mask).is_constant (&nelts))
2578 return 0;
2579 for (i = 0; i < nelts; i++)
2581 tree val = VECTOR_CST_ELT (mask, i);
2582 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2583 j = TREE_INT_CST_LOW (val) & (2 * nelts - 1);
2584 if (j == i)
2585 maybe_identity2 = false;
2586 else if (j == i + nelts)
2587 maybe_identity1 = false;
2588 else
2589 return 0;
2591 return maybe_identity1 ? 1 : maybe_identity2 ? 2 : 0;
2594 /* Combine a shuffle with its arguments. Returns 1 if there were any
2595 changes made, 2 if cfg-cleanup needs to run. Else it returns 0. */
2597 static int
2598 simplify_permutation (gimple_stmt_iterator *gsi)
2600 gimple *stmt = gsi_stmt (*gsi);
2601 gimple *def_stmt = NULL;
2602 tree op0, op1, op2, op3, arg0, arg1;
2603 enum tree_code code, code2 = ERROR_MARK;
2604 bool single_use_op0 = false;
2606 gcc_checking_assert (gimple_assign_rhs_code (stmt) == VEC_PERM_EXPR);
2608 op0 = gimple_assign_rhs1 (stmt);
2609 op1 = gimple_assign_rhs2 (stmt);
2610 op2 = gimple_assign_rhs3 (stmt);
2612 if (TREE_CODE (op2) != VECTOR_CST)
2613 return 0;
2615 if (TREE_CODE (op0) == VECTOR_CST)
2617 code = VECTOR_CST;
2618 arg0 = op0;
2620 else if (TREE_CODE (op0) == SSA_NAME)
2622 def_stmt = get_prop_source_stmt (op0, false, &single_use_op0);
2623 if (!def_stmt)
2624 return 0;
2625 code = gimple_assign_rhs_code (def_stmt);
2626 if (code == VIEW_CONVERT_EXPR)
2628 tree rhs = gimple_assign_rhs1 (def_stmt);
2629 tree name = TREE_OPERAND (rhs, 0);
2630 if (TREE_CODE (name) != SSA_NAME)
2631 return 0;
2632 if (!has_single_use (name))
2633 single_use_op0 = false;
2634 /* Here we update the def_stmt through this VIEW_CONVERT_EXPR,
2635 but still keep the code to indicate it comes from
2636 VIEW_CONVERT_EXPR. */
2637 def_stmt = SSA_NAME_DEF_STMT (name);
2638 if (!def_stmt || !is_gimple_assign (def_stmt))
2639 return 0;
2640 if (gimple_assign_rhs_code (def_stmt) != CONSTRUCTOR)
2641 return 0;
2643 if (!can_propagate_from (def_stmt))
2644 return 0;
2645 arg0 = gimple_assign_rhs1 (def_stmt);
2647 else
2648 return 0;
2650 /* Two consecutive shuffles. */
2651 if (code == VEC_PERM_EXPR)
2653 tree orig;
2654 int ident;
2656 if (op0 != op1)
2657 return 0;
2658 op3 = gimple_assign_rhs3 (def_stmt);
2659 if (TREE_CODE (op3) != VECTOR_CST)
2660 return 0;
2661 ident = is_combined_permutation_identity (op3, op2);
2662 if (!ident)
2663 return 0;
2664 orig = (ident == 1) ? gimple_assign_rhs1 (def_stmt)
2665 : gimple_assign_rhs2 (def_stmt);
2666 gimple_assign_set_rhs1 (stmt, unshare_expr (orig));
2667 gimple_assign_set_rhs_code (stmt, TREE_CODE (orig));
2668 gimple_set_num_ops (stmt, 2);
2669 update_stmt (stmt);
2670 return remove_prop_source_from_use (op0) ? 2 : 1;
2672 else if (code == CONSTRUCTOR
2673 || code == VECTOR_CST
2674 || code == VIEW_CONVERT_EXPR)
2676 if (op0 != op1)
2678 if (TREE_CODE (op0) == SSA_NAME && !single_use_op0)
2679 return 0;
2681 if (TREE_CODE (op1) == VECTOR_CST)
2682 arg1 = op1;
2683 else if (TREE_CODE (op1) == SSA_NAME)
2685 gimple *def_stmt2 = get_prop_source_stmt (op1, true, NULL);
2686 if (!def_stmt2)
2687 return 0;
2688 code2 = gimple_assign_rhs_code (def_stmt2);
2689 if (code2 == VIEW_CONVERT_EXPR)
2691 tree rhs = gimple_assign_rhs1 (def_stmt2);
2692 tree name = TREE_OPERAND (rhs, 0);
2693 if (TREE_CODE (name) != SSA_NAME)
2694 return 0;
2695 if (!has_single_use (name))
2696 return 0;
2697 def_stmt2 = SSA_NAME_DEF_STMT (name);
2698 if (!def_stmt2 || !is_gimple_assign (def_stmt2))
2699 return 0;
2700 if (gimple_assign_rhs_code (def_stmt2) != CONSTRUCTOR)
2701 return 0;
2703 else if (code2 != CONSTRUCTOR && code2 != VECTOR_CST)
2704 return 0;
2705 if (!can_propagate_from (def_stmt2))
2706 return 0;
2707 arg1 = gimple_assign_rhs1 (def_stmt2);
2709 else
2710 return 0;
2712 else
2714 /* Already used twice in this statement. */
2715 if (TREE_CODE (op0) == SSA_NAME && num_imm_uses (op0) > 2)
2716 return 0;
2717 arg1 = arg0;
2720 /* If there are any VIEW_CONVERT_EXPRs found when finding permutation
2721 operands source, check whether it's valid to transform and prepare
2722 the required new operands. */
2723 if (code == VIEW_CONVERT_EXPR || code2 == VIEW_CONVERT_EXPR)
2725 /* Figure out the target vector type to which operands should be
2726 converted. If both are CONSTRUCTOR, the types should be the
2727 same, otherwise, use the one of CONSTRUCTOR. */
2728 tree tgt_type = NULL_TREE;
2729 if (code == VIEW_CONVERT_EXPR)
2731 gcc_assert (gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR);
2732 code = CONSTRUCTOR;
2733 tgt_type = TREE_TYPE (arg0);
2735 if (code2 == VIEW_CONVERT_EXPR)
2737 tree arg1_type = TREE_TYPE (arg1);
2738 if (tgt_type == NULL_TREE)
2739 tgt_type = arg1_type;
2740 else if (tgt_type != arg1_type)
2741 return 0;
2744 if (!VECTOR_TYPE_P (tgt_type))
2745 return 0;
2746 tree op2_type = TREE_TYPE (op2);
2748 /* Figure out the shrunk factor. */
2749 poly_uint64 tgt_units = TYPE_VECTOR_SUBPARTS (tgt_type);
2750 poly_uint64 op2_units = TYPE_VECTOR_SUBPARTS (op2_type);
2751 if (maybe_gt (tgt_units, op2_units))
2752 return 0;
2753 unsigned int factor;
2754 if (!constant_multiple_p (op2_units, tgt_units, &factor))
2755 return 0;
2757 /* Build the new permutation control vector as target vector. */
2758 vec_perm_builder builder;
2759 if (!tree_to_vec_perm_builder (&builder, op2))
2760 return 0;
2761 vec_perm_indices indices (builder, 2, op2_units);
2762 vec_perm_indices new_indices;
2763 if (new_indices.new_shrunk_vector (indices, factor))
2765 tree mask_type = tgt_type;
2766 if (!VECTOR_INTEGER_TYPE_P (mask_type))
2768 tree elem_type = TREE_TYPE (mask_type);
2769 unsigned elem_size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
2770 tree int_type = build_nonstandard_integer_type (elem_size, 0);
2771 mask_type = build_vector_type (int_type, tgt_units);
2773 op2 = vec_perm_indices_to_tree (mask_type, new_indices);
2775 else
2776 return 0;
2778 /* Convert the VECTOR_CST to the appropriate vector type. */
2779 if (tgt_type != TREE_TYPE (arg0))
2780 arg0 = fold_build1 (VIEW_CONVERT_EXPR, tgt_type, arg0);
2781 else if (tgt_type != TREE_TYPE (arg1))
2782 arg1 = fold_build1 (VIEW_CONVERT_EXPR, tgt_type, arg1);
2785 /* VIEW_CONVERT_EXPR should be updated to CONSTRUCTOR before. */
2786 gcc_assert (code == CONSTRUCTOR || code == VECTOR_CST);
2788 /* Shuffle of a constructor. */
2789 bool ret = false;
2790 tree res_type
2791 = build_vector_type (TREE_TYPE (TREE_TYPE (arg0)),
2792 TYPE_VECTOR_SUBPARTS (TREE_TYPE (op2)));
2793 tree opt = fold_ternary (VEC_PERM_EXPR, res_type, arg0, arg1, op2);
2794 if (!opt
2795 || (TREE_CODE (opt) != CONSTRUCTOR && TREE_CODE (opt) != VECTOR_CST))
2796 return 0;
2797 /* Found VIEW_CONVERT_EXPR before, need one explicit conversion. */
2798 if (res_type != TREE_TYPE (op0))
2800 tree name = make_ssa_name (TREE_TYPE (opt));
2801 gimple *ass_stmt = gimple_build_assign (name, opt);
2802 gsi_insert_before (gsi, ass_stmt, GSI_SAME_STMT);
2803 opt = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op0), name);
2805 gimple_assign_set_rhs_from_tree (gsi, opt);
2806 update_stmt (gsi_stmt (*gsi));
2807 if (TREE_CODE (op0) == SSA_NAME)
2808 ret = remove_prop_source_from_use (op0);
2809 if (op0 != op1 && TREE_CODE (op1) == SSA_NAME)
2810 ret |= remove_prop_source_from_use (op1);
2811 return ret ? 2 : 1;
2814 return 0;
2817 /* Get the BIT_FIELD_REF definition of VAL, if any, looking through
2818 conversions with code CONV_CODE or update it if still ERROR_MARK.
2819 Return NULL_TREE if no such matching def was found. */
2821 static tree
2822 get_bit_field_ref_def (tree val, enum tree_code &conv_code)
2824 if (TREE_CODE (val) != SSA_NAME)
2825 return NULL_TREE ;
2826 gimple *def_stmt = get_prop_source_stmt (val, false, NULL);
2827 if (!def_stmt)
2828 return NULL_TREE;
2829 enum tree_code code = gimple_assign_rhs_code (def_stmt);
2830 if (code == FLOAT_EXPR
2831 || code == FIX_TRUNC_EXPR
2832 || CONVERT_EXPR_CODE_P (code))
2834 tree op1 = gimple_assign_rhs1 (def_stmt);
2835 if (conv_code == ERROR_MARK)
2836 conv_code = code;
2837 else if (conv_code != code)
2838 return NULL_TREE;
2839 if (TREE_CODE (op1) != SSA_NAME)
2840 return NULL_TREE;
2841 def_stmt = SSA_NAME_DEF_STMT (op1);
2842 if (! is_gimple_assign (def_stmt))
2843 return NULL_TREE;
2844 code = gimple_assign_rhs_code (def_stmt);
2846 if (code != BIT_FIELD_REF)
2847 return NULL_TREE;
2848 return gimple_assign_rhs1 (def_stmt);
2851 /* Recognize a VEC_PERM_EXPR. Returns true if there were any changes. */
2853 static bool
2854 simplify_vector_constructor (gimple_stmt_iterator *gsi)
2856 gimple *stmt = gsi_stmt (*gsi);
2857 tree op, orig[2], type, elem_type;
2858 unsigned elem_size, i;
2859 unsigned HOST_WIDE_INT nelts;
2860 unsigned HOST_WIDE_INT refnelts;
2861 enum tree_code conv_code;
2862 constructor_elt *elt;
2864 op = gimple_assign_rhs1 (stmt);
2865 type = TREE_TYPE (op);
2866 gcc_checking_assert (TREE_CODE (op) == CONSTRUCTOR
2867 && TREE_CODE (type) == VECTOR_TYPE);
2869 if (!TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
2870 return false;
2871 elem_type = TREE_TYPE (type);
2872 elem_size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
2874 orig[0] = NULL;
2875 orig[1] = NULL;
2876 conv_code = ERROR_MARK;
2877 bool maybe_ident = true;
2878 bool maybe_blend[2] = { true, true };
2879 tree one_constant = NULL_TREE;
2880 tree one_nonconstant = NULL_TREE;
2881 auto_vec<tree> constants;
2882 constants.safe_grow_cleared (nelts, true);
2883 auto_vec<std::pair<unsigned, unsigned>, 64> elts;
2884 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (op), i, elt)
2886 tree ref, op1;
2887 unsigned int elem;
2889 if (i >= nelts)
2890 return false;
2892 /* Look for elements extracted and possibly converted from
2893 another vector. */
2894 op1 = get_bit_field_ref_def (elt->value, conv_code);
2895 if (op1
2896 && TREE_CODE ((ref = TREE_OPERAND (op1, 0))) == SSA_NAME
2897 && VECTOR_TYPE_P (TREE_TYPE (ref))
2898 && useless_type_conversion_p (TREE_TYPE (op1),
2899 TREE_TYPE (TREE_TYPE (ref)))
2900 && constant_multiple_p (bit_field_offset (op1),
2901 bit_field_size (op1), &elem)
2902 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (ref)).is_constant (&refnelts))
2904 unsigned int j;
2905 for (j = 0; j < 2; ++j)
2907 if (!orig[j])
2909 if (j == 0
2910 || useless_type_conversion_p (TREE_TYPE (orig[0]),
2911 TREE_TYPE (ref)))
2912 break;
2914 else if (ref == orig[j])
2915 break;
2917 /* Found a suitable vector element. */
2918 if (j < 2)
2920 orig[j] = ref;
2921 if (elem != i || j != 0)
2922 maybe_ident = false;
2923 if (elem != i)
2924 maybe_blend[j] = false;
2925 elts.safe_push (std::make_pair (j, elem));
2926 continue;
2928 /* Else fallthru. */
2930 /* Handle elements not extracted from a vector.
2931 1. constants by permuting with constant vector
2932 2. a unique non-constant element by permuting with a splat vector */
2933 if (orig[1]
2934 && orig[1] != error_mark_node)
2935 return false;
2936 orig[1] = error_mark_node;
2937 if (CONSTANT_CLASS_P (elt->value))
2939 if (one_nonconstant)
2940 return false;
2941 if (!one_constant)
2942 one_constant = elt->value;
2943 constants[i] = elt->value;
2945 else
2947 if (one_constant)
2948 return false;
2949 if (!one_nonconstant)
2950 one_nonconstant = elt->value;
2951 else if (!operand_equal_p (one_nonconstant, elt->value, 0))
2952 return false;
2954 elts.safe_push (std::make_pair (1, i));
2955 maybe_ident = false;
2957 if (i < nelts)
2958 return false;
2960 if (! orig[0]
2961 || ! VECTOR_TYPE_P (TREE_TYPE (orig[0])))
2962 return false;
2963 refnelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (orig[0])).to_constant ();
2964 /* We currently do not handle larger destination vectors. */
2965 if (refnelts < nelts)
2966 return false;
2968 if (maybe_ident)
2970 tree conv_src_type
2971 = (nelts != refnelts
2972 ? (conv_code != ERROR_MARK
2973 ? build_vector_type (TREE_TYPE (TREE_TYPE (orig[0])), nelts)
2974 : type)
2975 : TREE_TYPE (orig[0]));
2976 if (conv_code != ERROR_MARK
2977 && !supportable_convert_operation (conv_code, type, conv_src_type,
2978 &conv_code))
2980 /* Only few targets implement direct conversion patterns so try
2981 some simple special cases via VEC_[UN]PACK[_FLOAT]_LO_EXPR. */
2982 optab optab;
2983 insn_code icode;
2984 tree halfvectype, dblvectype;
2985 enum tree_code unpack_op;
2987 if (!BYTES_BIG_ENDIAN)
2988 unpack_op = (FLOAT_TYPE_P (TREE_TYPE (type))
2989 ? VEC_UNPACK_FLOAT_LO_EXPR
2990 : VEC_UNPACK_LO_EXPR);
2991 else
2992 unpack_op = (FLOAT_TYPE_P (TREE_TYPE (type))
2993 ? VEC_UNPACK_FLOAT_HI_EXPR
2994 : VEC_UNPACK_HI_EXPR);
2996 /* Conversions between DFP and FP have no special tree code
2997 but we cannot handle those since all relevant vector conversion
2998 optabs only have a single mode. */
2999 if (CONVERT_EXPR_CODE_P (conv_code)
3000 && FLOAT_TYPE_P (TREE_TYPE (type))
3001 && (DECIMAL_FLOAT_TYPE_P (TREE_TYPE (type))
3002 != DECIMAL_FLOAT_TYPE_P (TREE_TYPE (conv_src_type))))
3003 return false;
3005 if (CONVERT_EXPR_CODE_P (conv_code)
3006 && (2 * TYPE_PRECISION (TREE_TYPE (TREE_TYPE (orig[0])))
3007 == TYPE_PRECISION (TREE_TYPE (type)))
3008 && mode_for_vector (as_a <scalar_mode>
3009 (TYPE_MODE (TREE_TYPE (TREE_TYPE (orig[0])))),
3010 nelts * 2).exists ()
3011 && (dblvectype
3012 = build_vector_type (TREE_TYPE (TREE_TYPE (orig[0])),
3013 nelts * 2))
3014 /* Only use it for vector modes or for vector booleans
3015 represented as scalar bitmasks. See PR95528. */
3016 && (VECTOR_MODE_P (TYPE_MODE (dblvectype))
3017 || VECTOR_BOOLEAN_TYPE_P (dblvectype))
3018 && (optab = optab_for_tree_code (unpack_op,
3019 dblvectype,
3020 optab_default))
3021 && ((icode = optab_handler (optab, TYPE_MODE (dblvectype)))
3022 != CODE_FOR_nothing)
3023 && (insn_data[icode].operand[0].mode == TYPE_MODE (type)))
3025 gimple_seq stmts = NULL;
3026 tree dbl;
3027 if (refnelts == nelts)
3029 /* ??? Paradoxical subregs don't exist, so insert into
3030 the lower half of a wider zero vector. */
3031 dbl = gimple_build (&stmts, BIT_INSERT_EXPR, dblvectype,
3032 build_zero_cst (dblvectype), orig[0],
3033 bitsize_zero_node);
3035 else if (refnelts == 2 * nelts)
3036 dbl = orig[0];
3037 else
3038 dbl = gimple_build (&stmts, BIT_FIELD_REF, dblvectype,
3039 orig[0], TYPE_SIZE (dblvectype),
3040 bitsize_zero_node);
3041 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3042 gimple_assign_set_rhs_with_ops (gsi, unpack_op, dbl);
3044 else if (CONVERT_EXPR_CODE_P (conv_code)
3045 && (TYPE_PRECISION (TREE_TYPE (TREE_TYPE (orig[0])))
3046 == 2 * TYPE_PRECISION (TREE_TYPE (type)))
3047 && mode_for_vector (as_a <scalar_mode>
3048 (TYPE_MODE
3049 (TREE_TYPE (TREE_TYPE (orig[0])))),
3050 nelts / 2).exists ()
3051 && (halfvectype
3052 = build_vector_type (TREE_TYPE (TREE_TYPE (orig[0])),
3053 nelts / 2))
3054 /* Only use it for vector modes or for vector booleans
3055 represented as scalar bitmasks. See PR95528. */
3056 && (VECTOR_MODE_P (TYPE_MODE (halfvectype))
3057 || VECTOR_BOOLEAN_TYPE_P (halfvectype))
3058 && (optab = optab_for_tree_code (VEC_PACK_TRUNC_EXPR,
3059 halfvectype,
3060 optab_default))
3061 && ((icode = optab_handler (optab, TYPE_MODE (halfvectype)))
3062 != CODE_FOR_nothing)
3063 && (insn_data[icode].operand[0].mode == TYPE_MODE (type)))
3065 gimple_seq stmts = NULL;
3066 tree low = gimple_build (&stmts, BIT_FIELD_REF, halfvectype,
3067 orig[0], TYPE_SIZE (halfvectype),
3068 bitsize_zero_node);
3069 tree hig = gimple_build (&stmts, BIT_FIELD_REF, halfvectype,
3070 orig[0], TYPE_SIZE (halfvectype),
3071 TYPE_SIZE (halfvectype));
3072 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3073 gimple_assign_set_rhs_with_ops (gsi, VEC_PACK_TRUNC_EXPR,
3074 low, hig);
3076 else
3077 return false;
3078 update_stmt (gsi_stmt (*gsi));
3079 return true;
3081 if (nelts != refnelts)
3083 gassign *lowpart
3084 = gimple_build_assign (make_ssa_name (conv_src_type),
3085 build3 (BIT_FIELD_REF, conv_src_type,
3086 orig[0], TYPE_SIZE (conv_src_type),
3087 bitsize_zero_node));
3088 gsi_insert_before (gsi, lowpart, GSI_SAME_STMT);
3089 orig[0] = gimple_assign_lhs (lowpart);
3091 if (conv_code == ERROR_MARK)
3093 tree src_type = TREE_TYPE (orig[0]);
3094 if (!useless_type_conversion_p (type, src_type))
3096 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type),
3097 TYPE_VECTOR_SUBPARTS (src_type))
3098 && useless_type_conversion_p (TREE_TYPE (type),
3099 TREE_TYPE (src_type)));
3100 tree rhs = build1 (VIEW_CONVERT_EXPR, type, orig[0]);
3101 orig[0] = make_ssa_name (type);
3102 gassign *assign = gimple_build_assign (orig[0], rhs);
3103 gsi_insert_before (gsi, assign, GSI_SAME_STMT);
3105 gimple_assign_set_rhs_from_tree (gsi, orig[0]);
3107 else
3108 gimple_assign_set_rhs_with_ops (gsi, conv_code, orig[0],
3109 NULL_TREE, NULL_TREE);
3111 else
3113 /* If we combine a vector with a non-vector avoid cases where
3114 we'll obviously end up with more GIMPLE stmts which is when
3115 we'll later not fold this to a single insert into the vector
3116 and we had a single extract originally. See PR92819. */
3117 if (nelts == 2
3118 && refnelts > 2
3119 && orig[1] == error_mark_node
3120 && !maybe_blend[0])
3121 return false;
3122 tree mask_type, perm_type, conv_src_type;
3123 perm_type = TREE_TYPE (orig[0]);
3124 conv_src_type = (nelts == refnelts
3125 ? perm_type
3126 : build_vector_type (TREE_TYPE (perm_type), nelts));
3127 if (conv_code != ERROR_MARK
3128 && !supportable_convert_operation (conv_code, type, conv_src_type,
3129 &conv_code))
3130 return false;
3132 /* Now that we know the number of elements of the source build the
3133 permute vector.
3134 ??? When the second vector has constant values we can shuffle
3135 it and its source indexes to make the permutation supported.
3136 For now it mimics a blend. */
3137 vec_perm_builder sel (refnelts, refnelts, 1);
3138 bool all_same_p = true;
3139 for (i = 0; i < elts.length (); ++i)
3141 sel.quick_push (elts[i].second + elts[i].first * refnelts);
3142 all_same_p &= known_eq (sel[i], sel[0]);
3144 /* And fill the tail with "something". It's really don't care,
3145 and ideally we'd allow VEC_PERM to have a smaller destination
3146 vector. As a heuristic:
3148 (a) if what we have so far duplicates a single element, make the
3149 tail do the same
3151 (b) otherwise preserve a uniform orig[0]. This facilitates
3152 later pattern-matching of VEC_PERM_EXPR to a BIT_INSERT_EXPR. */
3153 for (; i < refnelts; ++i)
3154 sel.quick_push (all_same_p
3155 ? sel[0]
3156 : (elts[0].second == 0 && elts[0].first == 0
3157 ? 0 : refnelts) + i);
3158 vec_perm_indices indices (sel, orig[1] ? 2 : 1, refnelts);
3159 machine_mode vmode = TYPE_MODE (perm_type);
3160 if (!can_vec_perm_const_p (vmode, vmode, indices))
3161 return false;
3162 mask_type
3163 = build_vector_type (build_nonstandard_integer_type (elem_size, 1),
3164 refnelts);
3165 if (GET_MODE_CLASS (TYPE_MODE (mask_type)) != MODE_VECTOR_INT
3166 || maybe_ne (GET_MODE_SIZE (TYPE_MODE (mask_type)),
3167 GET_MODE_SIZE (TYPE_MODE (perm_type))))
3168 return false;
3169 tree op2 = vec_perm_indices_to_tree (mask_type, indices);
3170 bool converted_orig1 = false;
3171 gimple_seq stmts = NULL;
3172 if (!orig[1])
3173 orig[1] = orig[0];
3174 else if (orig[1] == error_mark_node
3175 && one_nonconstant)
3177 /* ??? We can see if we can safely convert to the original
3178 element type. */
3179 converted_orig1 = conv_code != ERROR_MARK;
3180 orig[1] = gimple_build_vector_from_val (&stmts, UNKNOWN_LOCATION,
3181 converted_orig1
3182 ? type : perm_type,
3183 one_nonconstant);
3185 else if (orig[1] == error_mark_node)
3187 /* ??? See if we can convert the vector to the original type. */
3188 converted_orig1 = conv_code != ERROR_MARK;
3189 unsigned n = converted_orig1 ? nelts : refnelts;
3190 tree_vector_builder vec (converted_orig1
3191 ? type : perm_type, n, 1);
3192 for (unsigned i = 0; i < n; ++i)
3193 if (i < nelts && constants[i])
3194 vec.quick_push (constants[i]);
3195 else
3196 /* ??? Push a don't-care value. */
3197 vec.quick_push (one_constant);
3198 orig[1] = vec.build ();
3200 tree blend_op2 = NULL_TREE;
3201 if (converted_orig1)
3203 /* Make sure we can do a blend in the target type. */
3204 vec_perm_builder sel (nelts, nelts, 1);
3205 for (i = 0; i < elts.length (); ++i)
3206 sel.quick_push (elts[i].first
3207 ? elts[i].second + nelts : i);
3208 vec_perm_indices indices (sel, 2, nelts);
3209 machine_mode vmode = TYPE_MODE (type);
3210 if (!can_vec_perm_const_p (vmode, vmode, indices))
3211 return false;
3212 mask_type
3213 = build_vector_type (build_nonstandard_integer_type (elem_size, 1),
3214 nelts);
3215 if (GET_MODE_CLASS (TYPE_MODE (mask_type)) != MODE_VECTOR_INT
3216 || maybe_ne (GET_MODE_SIZE (TYPE_MODE (mask_type)),
3217 GET_MODE_SIZE (TYPE_MODE (type))))
3218 return false;
3219 blend_op2 = vec_perm_indices_to_tree (mask_type, indices);
3221 tree orig1_for_perm
3222 = converted_orig1 ? build_zero_cst (perm_type) : orig[1];
3223 tree res = gimple_build (&stmts, VEC_PERM_EXPR, perm_type,
3224 orig[0], orig1_for_perm, op2);
3225 if (nelts != refnelts)
3226 res = gimple_build (&stmts, BIT_FIELD_REF,
3227 conv_code != ERROR_MARK ? conv_src_type : type,
3228 res, TYPE_SIZE (type), bitsize_zero_node);
3229 if (conv_code != ERROR_MARK)
3230 res = gimple_build (&stmts, conv_code, type, res);
3231 else if (!useless_type_conversion_p (type, TREE_TYPE (res)))
3233 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type),
3234 TYPE_VECTOR_SUBPARTS (perm_type))
3235 && useless_type_conversion_p (TREE_TYPE (type),
3236 TREE_TYPE (perm_type)));
3237 res = gimple_build (&stmts, VIEW_CONVERT_EXPR, type, res);
3239 /* Blend in the actual constant. */
3240 if (converted_orig1)
3241 res = gimple_build (&stmts, VEC_PERM_EXPR, type,
3242 res, orig[1], blend_op2);
3243 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3244 gimple_assign_set_rhs_with_ops (gsi, SSA_NAME, res);
3246 update_stmt (gsi_stmt (*gsi));
3247 return true;
3250 /* Prepare a TARGET_MEM_REF ref so that it can be subsetted as
3251 lvalue. This splits out an address computation stmt before *GSI
3252 and returns a MEM_REF wrapping the address. */
3254 static tree
3255 prepare_target_mem_ref_lvalue (tree ref, gimple_stmt_iterator *gsi)
3257 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
3258 mark_addressable (TREE_OPERAND (TREE_OPERAND (ref, 0), 0));
3259 tree ptrtype = build_pointer_type (TREE_TYPE (ref));
3260 tree tem = make_ssa_name (ptrtype);
3261 gimple *new_stmt
3262 = gimple_build_assign (tem, build1 (ADDR_EXPR, TREE_TYPE (tem),
3263 unshare_expr (ref)));
3264 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3265 ref = build2_loc (EXPR_LOCATION (ref),
3266 MEM_REF, TREE_TYPE (ref), tem,
3267 build_int_cst (TREE_TYPE (TREE_OPERAND (ref, 1)), 0));
3268 return ref;
3271 /* Rewrite the vector load at *GSI to component-wise loads if the load
3272 is only used in BIT_FIELD_REF extractions with eventual intermediate
3273 widening. */
3275 static void
3276 optimize_vector_load (gimple_stmt_iterator *gsi)
3278 gimple *stmt = gsi_stmt (*gsi);
3279 tree lhs = gimple_assign_lhs (stmt);
3280 tree rhs = gimple_assign_rhs1 (stmt);
3282 /* Gather BIT_FIELD_REFs to rewrite, looking through
3283 VEC_UNPACK_{LO,HI}_EXPR. */
3284 use_operand_p use_p;
3285 imm_use_iterator iter;
3286 bool rewrite = true;
3287 auto_vec<gimple *, 8> bf_stmts;
3288 auto_vec<tree, 8> worklist;
3289 worklist.quick_push (lhs);
3292 tree def = worklist.pop ();
3293 unsigned HOST_WIDE_INT def_eltsize
3294 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (def))));
3295 FOR_EACH_IMM_USE_FAST (use_p, iter, def)
3297 gimple *use_stmt = USE_STMT (use_p);
3298 if (is_gimple_debug (use_stmt))
3299 continue;
3300 if (!is_gimple_assign (use_stmt))
3302 rewrite = false;
3303 break;
3305 enum tree_code use_code = gimple_assign_rhs_code (use_stmt);
3306 tree use_rhs = gimple_assign_rhs1 (use_stmt);
3307 if (use_code == BIT_FIELD_REF
3308 && TREE_OPERAND (use_rhs, 0) == def
3309 /* If its on the VEC_UNPACK_{HI,LO}_EXPR
3310 def need to verify it is element aligned. */
3311 && (def == lhs
3312 || (known_eq (bit_field_size (use_rhs), def_eltsize)
3313 && constant_multiple_p (bit_field_offset (use_rhs),
3314 def_eltsize)
3315 /* We can simulate the VEC_UNPACK_{HI,LO}_EXPR
3316 via a NOP_EXPR only for integral types.
3317 ??? Support VEC_UNPACK_FLOAT_{HI,LO}_EXPR. */
3318 && INTEGRAL_TYPE_P (TREE_TYPE (use_rhs)))))
3320 bf_stmts.safe_push (use_stmt);
3321 continue;
3323 /* Walk through one level of VEC_UNPACK_{LO,HI}_EXPR. */
3324 if (def == lhs
3325 && (use_code == VEC_UNPACK_HI_EXPR
3326 || use_code == VEC_UNPACK_LO_EXPR)
3327 && use_rhs == lhs)
3329 worklist.safe_push (gimple_assign_lhs (use_stmt));
3330 continue;
3332 rewrite = false;
3333 break;
3335 if (!rewrite)
3336 break;
3338 while (!worklist.is_empty ());
3340 if (!rewrite)
3342 gsi_next (gsi);
3343 return;
3345 /* We now have all ultimate uses of the load to rewrite in bf_stmts. */
3347 /* Prepare the original ref to be wrapped in adjusted BIT_FIELD_REFs.
3348 For TARGET_MEM_REFs we have to separate the LEA from the reference. */
3349 tree load_rhs = rhs;
3350 if (TREE_CODE (load_rhs) == TARGET_MEM_REF)
3351 load_rhs = prepare_target_mem_ref_lvalue (load_rhs, gsi);
3353 /* Rewrite the BIT_FIELD_REFs to be actual loads, re-emitting them at
3354 the place of the original load. */
3355 for (gimple *use_stmt : bf_stmts)
3357 tree bfr = gimple_assign_rhs1 (use_stmt);
3358 tree new_rhs = unshare_expr (load_rhs);
3359 if (TREE_OPERAND (bfr, 0) != lhs)
3361 /* When the BIT_FIELD_REF is on the promoted vector we have to
3362 adjust it and emit a conversion afterwards. */
3363 gimple *def_stmt
3364 = SSA_NAME_DEF_STMT (TREE_OPERAND (bfr, 0));
3365 enum tree_code def_code
3366 = gimple_assign_rhs_code (def_stmt);
3368 /* The adjusted BIT_FIELD_REF is of the promotion source
3369 vector size and at half of the offset... */
3370 new_rhs = fold_build3 (BIT_FIELD_REF,
3371 TREE_TYPE (TREE_TYPE (lhs)),
3372 new_rhs,
3373 TYPE_SIZE (TREE_TYPE (TREE_TYPE (lhs))),
3374 size_binop (EXACT_DIV_EXPR,
3375 TREE_OPERAND (bfr, 2),
3376 bitsize_int (2)));
3377 /* ... and offsetted by half of the vector if VEC_UNPACK_HI_EXPR. */
3378 if (def_code == (!BYTES_BIG_ENDIAN
3379 ? VEC_UNPACK_HI_EXPR : VEC_UNPACK_LO_EXPR))
3380 TREE_OPERAND (new_rhs, 2)
3381 = size_binop (PLUS_EXPR, TREE_OPERAND (new_rhs, 2),
3382 size_binop (EXACT_DIV_EXPR,
3383 TYPE_SIZE (TREE_TYPE (lhs)),
3384 bitsize_int (2)));
3385 tree tem = make_ssa_name (TREE_TYPE (TREE_TYPE (lhs)));
3386 gimple *new_stmt = gimple_build_assign (tem, new_rhs);
3387 location_t loc = gimple_location (use_stmt);
3388 gimple_set_location (new_stmt, loc);
3389 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3390 /* Perform scalar promotion. */
3391 new_stmt = gimple_build_assign (gimple_assign_lhs (use_stmt),
3392 NOP_EXPR, tem);
3393 gimple_set_location (new_stmt, loc);
3394 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3396 else
3398 /* When the BIT_FIELD_REF is on the original load result
3399 we can just wrap that. */
3400 tree new_rhs = fold_build3 (BIT_FIELD_REF, TREE_TYPE (bfr),
3401 unshare_expr (load_rhs),
3402 TREE_OPERAND (bfr, 1),
3403 TREE_OPERAND (bfr, 2));
3404 gimple *new_stmt = gimple_build_assign (gimple_assign_lhs (use_stmt),
3405 new_rhs);
3406 location_t loc = gimple_location (use_stmt);
3407 gimple_set_location (new_stmt, loc);
3408 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3410 gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
3411 unlink_stmt_vdef (use_stmt);
3412 gsi_remove (&gsi2, true);
3415 /* Finally get rid of the intermediate stmts. */
3416 gimple *use_stmt;
3417 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
3419 if (is_gimple_debug (use_stmt))
3421 if (gimple_debug_bind_p (use_stmt))
3423 gimple_debug_bind_reset_value (use_stmt);
3424 update_stmt (use_stmt);
3426 continue;
3428 gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
3429 unlink_stmt_vdef (use_stmt);
3430 release_defs (use_stmt);
3431 gsi_remove (&gsi2, true);
3433 /* And the original load. */
3434 release_defs (stmt);
3435 gsi_remove (gsi, true);
3439 /* Primitive "lattice" function for gimple_simplify. */
3441 static tree
3442 fwprop_ssa_val (tree name)
3444 /* First valueize NAME. */
3445 if (TREE_CODE (name) == SSA_NAME
3446 && SSA_NAME_VERSION (name) < lattice.length ())
3448 tree val = lattice[SSA_NAME_VERSION (name)];
3449 if (val)
3450 name = val;
3452 /* We continue matching along SSA use-def edges for SSA names
3453 that are not single-use. Currently there are no patterns
3454 that would cause any issues with that. */
3455 return name;
3458 /* Main entry point for the forward propagation and statement combine
3459 optimizer. */
3461 namespace {
3463 const pass_data pass_data_forwprop =
3465 GIMPLE_PASS, /* type */
3466 "forwprop", /* name */
3467 OPTGROUP_NONE, /* optinfo_flags */
3468 TV_TREE_FORWPROP, /* tv_id */
3469 ( PROP_cfg | PROP_ssa ), /* properties_required */
3470 0, /* properties_provided */
3471 0, /* properties_destroyed */
3472 0, /* todo_flags_start */
3473 TODO_update_ssa, /* todo_flags_finish */
3476 class pass_forwprop : public gimple_opt_pass
3478 public:
3479 pass_forwprop (gcc::context *ctxt)
3480 : gimple_opt_pass (pass_data_forwprop, ctxt)
3483 /* opt_pass methods: */
3484 opt_pass * clone () final override { return new pass_forwprop (m_ctxt); }
3485 bool gate (function *) final override { return flag_tree_forwprop; }
3486 unsigned int execute (function *) final override;
3488 }; // class pass_forwprop
3490 unsigned int
3491 pass_forwprop::execute (function *fun)
3493 unsigned int todoflags = 0;
3495 cfg_changed = false;
3497 /* Combine stmts with the stmts defining their operands. Do that
3498 in an order that guarantees visiting SSA defs before SSA uses. */
3499 lattice.create (num_ssa_names);
3500 lattice.quick_grow_cleared (num_ssa_names);
3501 int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (fun));
3502 int postorder_num = pre_and_rev_post_order_compute_fn (fun, NULL,
3503 postorder, false);
3504 int *bb_to_rpo = XNEWVEC (int, last_basic_block_for_fn (fun));
3505 for (int i = 0; i < postorder_num; ++i)
3507 bb_to_rpo[postorder[i]] = i;
3508 edge_iterator ei;
3509 edge e;
3510 FOR_EACH_EDGE (e, ei, BASIC_BLOCK_FOR_FN (fun, postorder[i])->succs)
3511 e->flags &= ~EDGE_EXECUTABLE;
3513 single_succ_edge (BASIC_BLOCK_FOR_FN (fun, ENTRY_BLOCK))->flags
3514 |= EDGE_EXECUTABLE;
3515 auto_vec<gimple *, 4> to_fixup;
3516 auto_vec<gimple *, 32> to_remove;
3517 auto_bitmap simple_dce_worklist;
3518 auto_bitmap need_ab_cleanup;
3519 to_purge = BITMAP_ALLOC (NULL);
3520 for (int i = 0; i < postorder_num; ++i)
3522 gimple_stmt_iterator gsi;
3523 basic_block bb = BASIC_BLOCK_FOR_FN (fun, postorder[i]);
3524 edge_iterator ei;
3525 edge e;
3527 /* Skip processing not executable blocks. We could improve
3528 single_use tracking by at least unlinking uses from unreachable
3529 blocks but since blocks with uses are not processed in a
3530 meaningful order this is probably not worth it. */
3531 bool any = false;
3532 FOR_EACH_EDGE (e, ei, bb->preds)
3534 if ((e->flags & EDGE_EXECUTABLE)
3535 /* With dominators we could improve backedge handling
3536 when e->src is dominated by bb. But for irreducible
3537 regions we have to take all backedges conservatively.
3538 We can handle single-block cycles as we know the
3539 dominator relationship here. */
3540 || bb_to_rpo[e->src->index] > i)
3542 any = true;
3543 break;
3546 if (!any)
3547 continue;
3549 /* Record degenerate PHIs in the lattice. */
3550 for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);
3551 gsi_next (&si))
3553 gphi *phi = si.phi ();
3554 tree res = gimple_phi_result (phi);
3555 if (virtual_operand_p (res))
3556 continue;
3558 tree first = NULL_TREE;
3559 bool all_same = true;
3560 edge_iterator ei;
3561 edge e;
3562 FOR_EACH_EDGE (e, ei, bb->preds)
3564 /* Ignore not executable forward edges. */
3565 if (!(e->flags & EDGE_EXECUTABLE))
3567 if (bb_to_rpo[e->src->index] < i)
3568 continue;
3569 /* Avoid equivalences from backedges - while we might
3570 be able to make irreducible regions reducible and
3571 thus turning a back into a forward edge we do not
3572 want to deal with the intermediate SSA issues that
3573 exposes. */
3574 all_same = false;
3576 tree use = PHI_ARG_DEF_FROM_EDGE (phi, e);
3577 if (use == res)
3578 /* The PHI result can also appear on a backedge, if so
3579 we can ignore this case for the purpose of determining
3580 the singular value. */
3582 else if (! first)
3583 first = use;
3584 else if (! operand_equal_p (first, use, 0))
3586 all_same = false;
3587 break;
3590 if (all_same)
3592 if (may_propagate_copy (res, first))
3593 to_remove.safe_push (phi);
3594 fwprop_set_lattice_val (res, first);
3598 /* Apply forward propagation to all stmts in the basic-block.
3599 Note we update GSI within the loop as necessary. */
3600 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
3602 gimple *stmt = gsi_stmt (gsi);
3603 tree lhs, rhs;
3604 enum tree_code code;
3606 if (!is_gimple_assign (stmt))
3608 gsi_next (&gsi);
3609 continue;
3612 lhs = gimple_assign_lhs (stmt);
3613 rhs = gimple_assign_rhs1 (stmt);
3614 code = gimple_assign_rhs_code (stmt);
3615 if (TREE_CODE (lhs) != SSA_NAME
3616 || has_zero_uses (lhs))
3618 gsi_next (&gsi);
3619 continue;
3622 /* If this statement sets an SSA_NAME to an address,
3623 try to propagate the address into the uses of the SSA_NAME. */
3624 if ((code == ADDR_EXPR
3625 /* Handle pointer conversions on invariant addresses
3626 as well, as this is valid gimple. */
3627 || (CONVERT_EXPR_CODE_P (code)
3628 && TREE_CODE (rhs) == ADDR_EXPR
3629 && POINTER_TYPE_P (TREE_TYPE (lhs))))
3630 && TREE_CODE (TREE_OPERAND (rhs, 0)) != TARGET_MEM_REF)
3632 tree base = get_base_address (TREE_OPERAND (rhs, 0));
3633 if ((!base
3634 || !DECL_P (base)
3635 || decl_address_invariant_p (base))
3636 && !stmt_references_abnormal_ssa_name (stmt)
3637 && forward_propagate_addr_expr (lhs, rhs, true))
3639 fwprop_invalidate_lattice (gimple_get_lhs (stmt));
3640 release_defs (stmt);
3641 gsi_remove (&gsi, true);
3643 else
3644 gsi_next (&gsi);
3646 else if (code == POINTER_PLUS_EXPR)
3648 tree off = gimple_assign_rhs2 (stmt);
3649 if (TREE_CODE (off) == INTEGER_CST
3650 && can_propagate_from (stmt)
3651 && !simple_iv_increment_p (stmt)
3652 /* ??? Better adjust the interface to that function
3653 instead of building new trees here. */
3654 && forward_propagate_addr_expr
3655 (lhs,
3656 build1_loc (gimple_location (stmt),
3657 ADDR_EXPR, TREE_TYPE (rhs),
3658 fold_build2 (MEM_REF,
3659 TREE_TYPE (TREE_TYPE (rhs)),
3660 rhs,
3661 fold_convert (ptr_type_node,
3662 off))), true))
3664 fwprop_invalidate_lattice (gimple_get_lhs (stmt));
3665 release_defs (stmt);
3666 gsi_remove (&gsi, true);
3668 else if (is_gimple_min_invariant (rhs))
3670 /* Make sure to fold &a[0] + off_1 here. */
3671 fold_stmt_inplace (&gsi);
3672 update_stmt (stmt);
3673 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
3674 gsi_next (&gsi);
3676 else
3677 gsi_next (&gsi);
3679 else if (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE
3680 && gimple_assign_load_p (stmt)
3681 && !gimple_has_volatile_ops (stmt)
3682 && (TREE_CODE (gimple_assign_rhs1 (stmt))
3683 != TARGET_MEM_REF)
3684 && !stmt_can_throw_internal (fun, stmt))
3686 /* Rewrite loads used only in real/imagpart extractions to
3687 component-wise loads. */
3688 use_operand_p use_p;
3689 imm_use_iterator iter;
3690 bool rewrite = true;
3691 FOR_EACH_IMM_USE_FAST (use_p, iter, lhs)
3693 gimple *use_stmt = USE_STMT (use_p);
3694 if (is_gimple_debug (use_stmt))
3695 continue;
3696 if (!is_gimple_assign (use_stmt)
3697 || (gimple_assign_rhs_code (use_stmt) != REALPART_EXPR
3698 && gimple_assign_rhs_code (use_stmt) != IMAGPART_EXPR)
3699 || TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) != lhs)
3701 rewrite = false;
3702 break;
3705 if (rewrite)
3707 gimple *use_stmt;
3708 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
3710 if (is_gimple_debug (use_stmt))
3712 if (gimple_debug_bind_p (use_stmt))
3714 gimple_debug_bind_reset_value (use_stmt);
3715 update_stmt (use_stmt);
3717 continue;
3720 tree new_rhs = build1 (gimple_assign_rhs_code (use_stmt),
3721 TREE_TYPE (TREE_TYPE (rhs)),
3722 unshare_expr (rhs));
3723 gimple *new_stmt
3724 = gimple_build_assign (gimple_assign_lhs (use_stmt),
3725 new_rhs);
3727 location_t loc = gimple_location (use_stmt);
3728 gimple_set_location (new_stmt, loc);
3729 gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
3730 unlink_stmt_vdef (use_stmt);
3731 gsi_remove (&gsi2, true);
3733 gsi_insert_before (&gsi, new_stmt, GSI_SAME_STMT);
3736 release_defs (stmt);
3737 gsi_remove (&gsi, true);
3739 else
3740 gsi_next (&gsi);
3742 else if (TREE_CODE (TREE_TYPE (lhs)) == VECTOR_TYPE
3743 && (TYPE_MODE (TREE_TYPE (lhs)) == BLKmode
3744 /* After vector lowering rewrite all loads, but
3745 initially do not since this conflicts with
3746 vector CONSTRUCTOR to shuffle optimization. */
3747 || (fun->curr_properties & PROP_gimple_lvec))
3748 && gimple_assign_load_p (stmt)
3749 && !gimple_has_volatile_ops (stmt)
3750 && !stmt_can_throw_internal (fun, stmt)
3751 && (!VAR_P (rhs) || !DECL_HARD_REGISTER (rhs)))
3752 optimize_vector_load (&gsi);
3754 else if (code == COMPLEX_EXPR)
3756 /* Rewrite stores of a single-use complex build expression
3757 to component-wise stores. */
3758 use_operand_p use_p;
3759 gimple *use_stmt, *def1, *def2;
3760 tree rhs2;
3761 if (single_imm_use (lhs, &use_p, &use_stmt)
3762 && gimple_store_p (use_stmt)
3763 && !gimple_has_volatile_ops (use_stmt)
3764 && is_gimple_assign (use_stmt)
3765 && (TREE_CODE (gimple_assign_lhs (use_stmt))
3766 != TARGET_MEM_REF))
3768 tree use_lhs = gimple_assign_lhs (use_stmt);
3769 if (auto_var_p (use_lhs))
3770 DECL_NOT_GIMPLE_REG_P (use_lhs) = 1;
3771 tree new_lhs = build1 (REALPART_EXPR,
3772 TREE_TYPE (TREE_TYPE (use_lhs)),
3773 unshare_expr (use_lhs));
3774 gimple *new_stmt = gimple_build_assign (new_lhs, rhs);
3775 location_t loc = gimple_location (use_stmt);
3776 gimple_set_location (new_stmt, loc);
3777 gimple_set_vuse (new_stmt, gimple_vuse (use_stmt));
3778 gimple_set_vdef (new_stmt, make_ssa_name (gimple_vop (fun)));
3779 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
3780 gimple_set_vuse (use_stmt, gimple_vdef (new_stmt));
3781 gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
3782 gsi_insert_before (&gsi2, new_stmt, GSI_SAME_STMT);
3784 new_lhs = build1 (IMAGPART_EXPR,
3785 TREE_TYPE (TREE_TYPE (use_lhs)),
3786 unshare_expr (use_lhs));
3787 gimple_assign_set_lhs (use_stmt, new_lhs);
3788 gimple_assign_set_rhs1 (use_stmt, gimple_assign_rhs2 (stmt));
3789 update_stmt (use_stmt);
3791 release_defs (stmt);
3792 gsi_remove (&gsi, true);
3794 /* Rewrite a component-wise load of a complex to a complex
3795 load if the components are not used separately. */
3796 else if (TREE_CODE (rhs) == SSA_NAME
3797 && has_single_use (rhs)
3798 && ((rhs2 = gimple_assign_rhs2 (stmt)), true)
3799 && TREE_CODE (rhs2) == SSA_NAME
3800 && has_single_use (rhs2)
3801 && (def1 = SSA_NAME_DEF_STMT (rhs),
3802 gimple_assign_load_p (def1))
3803 && (def2 = SSA_NAME_DEF_STMT (rhs2),
3804 gimple_assign_load_p (def2))
3805 && (gimple_vuse (def1) == gimple_vuse (def2))
3806 && !gimple_has_volatile_ops (def1)
3807 && !gimple_has_volatile_ops (def2)
3808 && !stmt_can_throw_internal (fun, def1)
3809 && !stmt_can_throw_internal (fun, def2)
3810 && gimple_assign_rhs_code (def1) == REALPART_EXPR
3811 && gimple_assign_rhs_code (def2) == IMAGPART_EXPR
3812 && operand_equal_p (TREE_OPERAND (gimple_assign_rhs1
3813 (def1), 0),
3814 TREE_OPERAND (gimple_assign_rhs1
3815 (def2), 0)))
3817 tree cl = TREE_OPERAND (gimple_assign_rhs1 (def1), 0);
3818 gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (cl));
3819 gcc_assert (gsi_stmt (gsi) == stmt);
3820 gimple_set_vuse (stmt, gimple_vuse (def1));
3821 gimple_set_modified (stmt, true);
3822 gimple_stmt_iterator gsi2 = gsi_for_stmt (def1);
3823 gsi_remove (&gsi, false);
3824 gsi_insert_after (&gsi2, stmt, GSI_SAME_STMT);
3826 else
3827 gsi_next (&gsi);
3829 else if (code == CONSTRUCTOR
3830 && VECTOR_TYPE_P (TREE_TYPE (rhs))
3831 && TYPE_MODE (TREE_TYPE (rhs)) == BLKmode
3832 && CONSTRUCTOR_NELTS (rhs) > 0
3833 && (!VECTOR_TYPE_P (TREE_TYPE (CONSTRUCTOR_ELT (rhs, 0)->value))
3834 || (TYPE_MODE (TREE_TYPE (CONSTRUCTOR_ELT (rhs, 0)->value))
3835 != BLKmode)))
3837 /* Rewrite stores of a single-use vector constructors
3838 to component-wise stores if the mode isn't supported. */
3839 use_operand_p use_p;
3840 gimple *use_stmt;
3841 if (single_imm_use (lhs, &use_p, &use_stmt)
3842 && gimple_store_p (use_stmt)
3843 && !gimple_has_volatile_ops (use_stmt)
3844 && !stmt_can_throw_internal (fun, use_stmt)
3845 && is_gimple_assign (use_stmt))
3847 tree elt_t = TREE_TYPE (CONSTRUCTOR_ELT (rhs, 0)->value);
3848 unsigned HOST_WIDE_INT elt_w
3849 = tree_to_uhwi (TYPE_SIZE (elt_t));
3850 unsigned HOST_WIDE_INT n
3851 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (rhs)));
3852 tree use_lhs = gimple_assign_lhs (use_stmt);
3853 if (auto_var_p (use_lhs))
3854 DECL_NOT_GIMPLE_REG_P (use_lhs) = 1;
3855 else if (TREE_CODE (use_lhs) == TARGET_MEM_REF)
3857 gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
3858 use_lhs = prepare_target_mem_ref_lvalue (use_lhs, &gsi2);
3860 for (unsigned HOST_WIDE_INT bi = 0; bi < n; bi += elt_w)
3862 unsigned HOST_WIDE_INT ci = bi / elt_w;
3863 tree new_rhs;
3864 if (ci < CONSTRUCTOR_NELTS (rhs))
3865 new_rhs = CONSTRUCTOR_ELT (rhs, ci)->value;
3866 else
3867 new_rhs = build_zero_cst (elt_t);
3868 tree new_lhs = build3 (BIT_FIELD_REF,
3869 elt_t,
3870 unshare_expr (use_lhs),
3871 bitsize_int (elt_w),
3872 bitsize_int (bi));
3873 gimple *new_stmt = gimple_build_assign (new_lhs, new_rhs);
3874 location_t loc = gimple_location (use_stmt);
3875 gimple_set_location (new_stmt, loc);
3876 gimple_set_vuse (new_stmt, gimple_vuse (use_stmt));
3877 gimple_set_vdef (new_stmt,
3878 make_ssa_name (gimple_vop (fun)));
3879 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
3880 gimple_set_vuse (use_stmt, gimple_vdef (new_stmt));
3881 gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
3882 gsi_insert_before (&gsi2, new_stmt, GSI_SAME_STMT);
3884 gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
3885 unlink_stmt_vdef (use_stmt);
3886 release_defs (use_stmt);
3887 gsi_remove (&gsi2, true);
3888 release_defs (stmt);
3889 gsi_remove (&gsi, true);
3891 else
3892 gsi_next (&gsi);
3894 else
3895 gsi_next (&gsi);
3898 /* Combine stmts with the stmts defining their operands.
3899 Note we update GSI within the loop as necessary. */
3900 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3902 gimple *stmt = gsi_stmt (gsi);
3904 /* Mark stmt as potentially needing revisiting. */
3905 gimple_set_plf (stmt, GF_PLF_1, false);
3907 bool can_make_abnormal_goto = (is_gimple_call (stmt)
3908 && stmt_can_make_abnormal_goto (stmt));
3910 /* Substitute from our lattice. We need to do so only once. */
3911 bool substituted_p = false;
3912 use_operand_p usep;
3913 ssa_op_iter iter;
3914 FOR_EACH_SSA_USE_OPERAND (usep, stmt, iter, SSA_OP_USE)
3916 tree use = USE_FROM_PTR (usep);
3917 tree val = fwprop_ssa_val (use);
3918 if (val && val != use)
3920 bitmap_set_bit (simple_dce_worklist, SSA_NAME_VERSION (use));
3921 if (may_propagate_copy (use, val))
3923 propagate_value (usep, val);
3924 substituted_p = true;
3928 if (substituted_p
3929 && is_gimple_assign (stmt)
3930 && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
3931 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
3932 if (substituted_p
3933 && can_make_abnormal_goto
3934 && !stmt_can_make_abnormal_goto (stmt))
3935 bitmap_set_bit (need_ab_cleanup, bb->index);
3937 bool changed;
3940 gimple *orig_stmt = stmt = gsi_stmt (gsi);
3941 bool was_noreturn = (is_gimple_call (stmt)
3942 && gimple_call_noreturn_p (stmt));
3943 changed = false;
3945 auto_vec<tree, 8> uses;
3946 FOR_EACH_SSA_USE_OPERAND (usep, stmt, iter, SSA_OP_USE)
3947 if (uses.space (1))
3948 uses.quick_push (USE_FROM_PTR (usep));
3950 if (fold_stmt (&gsi, fwprop_ssa_val))
3952 changed = true;
3953 stmt = gsi_stmt (gsi);
3954 /* Cleanup the CFG if we simplified a condition to
3955 true or false. */
3956 if (gcond *cond = dyn_cast <gcond *> (stmt))
3957 if (gimple_cond_true_p (cond)
3958 || gimple_cond_false_p (cond))
3959 cfg_changed = true;
3960 /* Queue old uses for simple DCE. */
3961 for (tree use : uses)
3962 if (TREE_CODE (use) == SSA_NAME
3963 && !SSA_NAME_IS_DEFAULT_DEF (use))
3964 bitmap_set_bit (simple_dce_worklist,
3965 SSA_NAME_VERSION (use));
3968 if (changed || substituted_p)
3970 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
3971 bitmap_set_bit (to_purge, bb->index);
3972 if (!was_noreturn
3973 && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
3974 to_fixup.safe_push (stmt);
3975 update_stmt (stmt);
3976 substituted_p = false;
3979 switch (gimple_code (stmt))
3981 case GIMPLE_ASSIGN:
3983 tree rhs1 = gimple_assign_rhs1 (stmt);
3984 enum tree_code code = gimple_assign_rhs_code (stmt);
3986 if (TREE_CODE_CLASS (code) == tcc_comparison)
3988 int did_something;
3989 did_something = forward_propagate_into_comparison (&gsi);
3990 if (maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (gsi)))
3991 bitmap_set_bit (to_purge, bb->index);
3992 if (did_something == 2)
3993 cfg_changed = true;
3994 changed = did_something != 0;
3996 else if ((code == PLUS_EXPR
3997 || code == BIT_IOR_EXPR
3998 || code == BIT_XOR_EXPR)
3999 && simplify_rotate (&gsi))
4000 changed = true;
4001 else if (code == VEC_PERM_EXPR)
4003 int did_something = simplify_permutation (&gsi);
4004 if (did_something == 2)
4005 cfg_changed = true;
4006 changed = did_something != 0;
4008 else if (code == BIT_FIELD_REF)
4009 changed = simplify_bitfield_ref (&gsi);
4010 else if (code == CONSTRUCTOR
4011 && TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
4012 changed = simplify_vector_constructor (&gsi);
4013 else if (code == ARRAY_REF)
4014 changed = simplify_count_trailing_zeroes (&gsi);
4015 break;
4018 case GIMPLE_SWITCH:
4019 changed = simplify_gimple_switch (as_a <gswitch *> (stmt));
4020 break;
4022 case GIMPLE_COND:
4024 int did_something = forward_propagate_into_gimple_cond
4025 (as_a <gcond *> (stmt));
4026 if (did_something == 2)
4027 cfg_changed = true;
4028 changed = did_something != 0;
4029 break;
4032 case GIMPLE_CALL:
4034 tree callee = gimple_call_fndecl (stmt);
4035 if (callee != NULL_TREE
4036 && fndecl_built_in_p (callee, BUILT_IN_NORMAL))
4037 changed = simplify_builtin_call (&gsi, callee);
4038 break;
4041 default:;
4044 if (changed)
4046 /* If the stmt changed then re-visit it and the statements
4047 inserted before it. */
4048 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
4049 if (gimple_plf (gsi_stmt (gsi), GF_PLF_1))
4050 break;
4051 if (gsi_end_p (gsi))
4052 gsi = gsi_start_bb (bb);
4053 else
4054 gsi_next (&gsi);
4057 while (changed);
4059 /* Stmt no longer needs to be revisited. */
4060 stmt = gsi_stmt (gsi);
4061 gcc_checking_assert (!gimple_plf (stmt, GF_PLF_1));
4062 gimple_set_plf (stmt, GF_PLF_1, true);
4064 /* Fill up the lattice. */
4065 if (gimple_assign_single_p (stmt))
4067 tree lhs = gimple_assign_lhs (stmt);
4068 tree rhs = gimple_assign_rhs1 (stmt);
4069 if (TREE_CODE (lhs) == SSA_NAME)
4071 tree val = lhs;
4072 if (TREE_CODE (rhs) == SSA_NAME)
4073 val = fwprop_ssa_val (rhs);
4074 else if (is_gimple_min_invariant (rhs))
4075 val = rhs;
4076 /* If we can propagate the lattice-value mark the
4077 stmt for removal. */
4078 if (val != lhs
4079 && may_propagate_copy (lhs, val))
4080 to_remove.safe_push (stmt);
4081 fwprop_set_lattice_val (lhs, val);
4084 else if (gimple_nop_p (stmt))
4085 to_remove.safe_push (stmt);
4088 /* Substitute in destination PHI arguments. */
4089 FOR_EACH_EDGE (e, ei, bb->succs)
4090 for (gphi_iterator gsi = gsi_start_phis (e->dest);
4091 !gsi_end_p (gsi); gsi_next (&gsi))
4093 gphi *phi = gsi.phi ();
4094 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
4095 tree arg = USE_FROM_PTR (use_p);
4096 if (TREE_CODE (arg) != SSA_NAME
4097 || virtual_operand_p (arg))
4098 continue;
4099 tree val = fwprop_ssa_val (arg);
4100 if (val != arg
4101 && may_propagate_copy (arg, val, !(e->flags & EDGE_ABNORMAL)))
4102 propagate_value (use_p, val);
4105 /* Mark outgoing exectuable edges. */
4106 if (edge e = find_taken_edge (bb, NULL))
4108 e->flags |= EDGE_EXECUTABLE;
4109 if (EDGE_COUNT (bb->succs) > 1)
4110 cfg_changed = true;
4112 else
4114 FOR_EACH_EDGE (e, ei, bb->succs)
4115 e->flags |= EDGE_EXECUTABLE;
4118 free (postorder);
4119 free (bb_to_rpo);
4120 lattice.release ();
4122 /* Remove stmts in reverse order to make debug stmt creation possible. */
4123 while (!to_remove.is_empty())
4125 gimple *stmt = to_remove.pop ();
4126 /* For example remove_prop_source_from_use can remove stmts queued
4127 for removal. Deal with this gracefully. */
4128 if (!gimple_bb (stmt))
4129 continue;
4130 if (dump_file && (dump_flags & TDF_DETAILS))
4132 fprintf (dump_file, "Removing dead stmt ");
4133 print_gimple_stmt (dump_file, stmt, 0);
4134 fprintf (dump_file, "\n");
4136 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4137 if (gimple_code (stmt) == GIMPLE_PHI)
4138 remove_phi_node (&gsi, true);
4139 else
4141 unlink_stmt_vdef (stmt);
4142 gsi_remove (&gsi, true);
4143 release_defs (stmt);
4146 simple_dce_from_worklist (simple_dce_worklist, to_purge);
4148 /* Fixup stmts that became noreturn calls. This may require splitting
4149 blocks and thus isn't possible during the walk. Do this
4150 in reverse order so we don't inadvertedly remove a stmt we want to
4151 fixup by visiting a dominating now noreturn call first. */
4152 while (!to_fixup.is_empty ())
4154 gimple *stmt = to_fixup.pop ();
4155 if (dump_file && dump_flags & TDF_DETAILS)
4157 fprintf (dump_file, "Fixing up noreturn call ");
4158 print_gimple_stmt (dump_file, stmt, 0);
4159 fprintf (dump_file, "\n");
4161 cfg_changed |= fixup_noreturn_call (stmt);
4164 cfg_changed |= gimple_purge_all_dead_eh_edges (to_purge);
4165 cfg_changed |= gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
4166 BITMAP_FREE (to_purge);
4168 if (get_range_query (fun) != get_global_range_query ())
4169 disable_ranger (fun);
4171 if (cfg_changed)
4172 todoflags |= TODO_cleanup_cfg;
4174 return todoflags;
4177 } // anon namespace
4179 gimple_opt_pass *
4180 make_pass_forwprop (gcc::context *ctxt)
4182 return new pass_forwprop (ctxt);