* tree.h (TYPE_OVERFLOW_SANITIZED): Define.
[official-gcc.git] / gcc / tree-ssa-forwprop.c
blobfeb825394565324cf7112cf669fe4726ae623294
1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "stor-layout.h"
26 #include "tm_p.h"
27 #include "predict.h"
28 #include "vec.h"
29 #include "hashtab.h"
30 #include "hash-set.h"
31 #include "machmode.h"
32 #include "hard-reg-set.h"
33 #include "input.h"
34 #include "function.h"
35 #include "dominance.h"
36 #include "cfg.h"
37 #include "basic-block.h"
38 #include "gimple-pretty-print.h"
39 #include "tree-ssa-alias.h"
40 #include "internal-fn.h"
41 #include "gimple-fold.h"
42 #include "tree-eh.h"
43 #include "gimple-expr.h"
44 #include "is-a.h"
45 #include "gimple.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-ssa.h"
50 #include "tree-cfg.h"
51 #include "tree-phinodes.h"
52 #include "ssa-iterators.h"
53 #include "stringpool.h"
54 #include "tree-ssanames.h"
55 #include "expr.h"
56 #include "tree-dfa.h"
57 #include "tree-pass.h"
58 #include "langhooks.h"
59 #include "flags.h"
60 #include "diagnostic.h"
61 #include "expr.h"
62 #include "cfgloop.h"
63 #include "insn-codes.h"
64 #include "optabs.h"
65 #include "tree-ssa-propagate.h"
66 #include "tree-ssa-dom.h"
67 #include "builtins.h"
68 #include "tree-cfgcleanup.h"
69 #include "tree-into-ssa.h"
70 #include "cfganal.h"
72 /* This pass propagates the RHS of assignment statements into use
73 sites of the LHS of the assignment. It's basically a specialized
74 form of tree combination. It is hoped all of this can disappear
75 when we have a generalized tree combiner.
77 One class of common cases we handle is forward propagating a single use
78 variable into a COND_EXPR.
80 bb0:
81 x = a COND b;
82 if (x) goto ... else goto ...
84 Will be transformed into:
86 bb0:
87 if (a COND b) goto ... else goto ...
89 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
91 Or (assuming c1 and c2 are constants):
93 bb0:
94 x = a + c1;
95 if (x EQ/NEQ c2) goto ... else goto ...
97 Will be transformed into:
99 bb0:
100 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
102 Similarly for x = a - c1.
106 bb0:
107 x = !a
108 if (x) goto ... else goto ...
110 Will be transformed into:
112 bb0:
113 if (a == 0) goto ... else goto ...
115 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
116 For these cases, we propagate A into all, possibly more than one,
117 COND_EXPRs that use X.
121 bb0:
122 x = (typecast) a
123 if (x) goto ... else goto ...
125 Will be transformed into:
127 bb0:
128 if (a != 0) goto ... else goto ...
130 (Assuming a is an integral type and x is a boolean or x is an
131 integral and a is a boolean.)
133 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
134 For these cases, we propagate A into all, possibly more than one,
135 COND_EXPRs that use X.
137 In addition to eliminating the variable and the statement which assigns
138 a value to the variable, we may be able to later thread the jump without
139 adding insane complexity in the dominator optimizer.
141 Also note these transformations can cascade. We handle this by having
142 a worklist of COND_EXPR statements to examine. As we make a change to
143 a statement, we put it back on the worklist to examine on the next
144 iteration of the main loop.
146 A second class of propagation opportunities arises for ADDR_EXPR
147 nodes.
149 ptr = &x->y->z;
150 res = *ptr;
152 Will get turned into
154 res = x->y->z;
157 ptr = (type1*)&type2var;
158 res = *ptr
160 Will get turned into (if type1 and type2 are the same size
161 and neither have volatile on them):
162 res = VIEW_CONVERT_EXPR<type1>(type2var)
166 ptr = &x[0];
167 ptr2 = ptr + <constant>;
169 Will get turned into
171 ptr2 = &x[constant/elementsize];
175 ptr = &x[0];
176 offset = index * element_size;
177 offset_p = (pointer) offset;
178 ptr2 = ptr + offset_p
180 Will get turned into:
182 ptr2 = &x[index];
185 ssa = (int) decl
186 res = ssa & 1
188 Provided that decl has known alignment >= 2, will get turned into
190 res = 0
192 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
193 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
194 {NOT_EXPR,NEG_EXPR}.
196 This will (of course) be extended as other needs arise. */
198 static bool forward_propagate_addr_expr (tree, tree, bool);
200 /* Set to true if we delete dead edges during the optimization. */
201 static bool cfg_changed;
203 static tree rhs_to_tree (tree type, gimple stmt);
205 static bitmap to_purge;
207 /* Const-and-copy lattice. */
208 static vec<tree> lattice;
210 /* Set the lattice entry for NAME to VAL. */
211 static void
212 fwprop_set_lattice_val (tree name, tree val)
214 if (TREE_CODE (name) == SSA_NAME)
216 if (SSA_NAME_VERSION (name) >= lattice.length ())
218 lattice.reserve (num_ssa_names - lattice.length ());
219 lattice.quick_grow_cleared (num_ssa_names);
221 lattice[SSA_NAME_VERSION (name)] = val;
225 /* Invalidate the lattice entry for NAME, done when releasing SSA names. */
226 static void
227 fwprop_invalidate_lattice (tree name)
229 if (name
230 && TREE_CODE (name) == SSA_NAME
231 && SSA_NAME_VERSION (name) < lattice.length ())
232 lattice[SSA_NAME_VERSION (name)] = NULL_TREE;
236 /* Get the next statement we can propagate NAME's value into skipping
237 trivial copies. Returns the statement that is suitable as a
238 propagation destination or NULL_TREE if there is no such one.
239 This only returns destinations in a single-use chain. FINAL_NAME_P
240 if non-NULL is written to the ssa name that represents the use. */
242 static gimple
243 get_prop_dest_stmt (tree name, tree *final_name_p)
245 use_operand_p use;
246 gimple use_stmt;
248 do {
249 /* If name has multiple uses, bail out. */
250 if (!single_imm_use (name, &use, &use_stmt))
251 return NULL;
253 /* If this is not a trivial copy, we found it. */
254 if (!gimple_assign_ssa_name_copy_p (use_stmt)
255 || gimple_assign_rhs1 (use_stmt) != name)
256 break;
258 /* Continue searching uses of the copy destination. */
259 name = gimple_assign_lhs (use_stmt);
260 } while (1);
262 if (final_name_p)
263 *final_name_p = name;
265 return use_stmt;
268 /* Get the statement we can propagate from into NAME skipping
269 trivial copies. Returns the statement which defines the
270 propagation source or NULL_TREE if there is no such one.
271 If SINGLE_USE_ONLY is set considers only sources which have
272 a single use chain up to NAME. If SINGLE_USE_P is non-null,
273 it is set to whether the chain to NAME is a single use chain
274 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
276 static gimple
277 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
279 bool single_use = true;
281 do {
282 gimple def_stmt = SSA_NAME_DEF_STMT (name);
284 if (!has_single_use (name))
286 single_use = false;
287 if (single_use_only)
288 return NULL;
291 /* If name is defined by a PHI node or is the default def, bail out. */
292 if (!is_gimple_assign (def_stmt))
293 return NULL;
295 /* If def_stmt is a simple copy, continue looking. */
296 if (gimple_assign_rhs_code (def_stmt) == SSA_NAME)
297 name = gimple_assign_rhs1 (def_stmt);
298 else
300 if (!single_use_only && single_use_p)
301 *single_use_p = single_use;
303 return def_stmt;
305 } while (1);
308 /* Checks if the destination ssa name in DEF_STMT can be used as
309 propagation source. Returns true if so, otherwise false. */
311 static bool
312 can_propagate_from (gimple def_stmt)
314 gcc_assert (is_gimple_assign (def_stmt));
316 /* If the rhs has side-effects we cannot propagate from it. */
317 if (gimple_has_volatile_ops (def_stmt))
318 return false;
320 /* If the rhs is a load we cannot propagate from it. */
321 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
322 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
323 return false;
325 /* Constants can be always propagated. */
326 if (gimple_assign_single_p (def_stmt)
327 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
328 return true;
330 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
331 if (stmt_references_abnormal_ssa_name (def_stmt))
332 return false;
334 /* If the definition is a conversion of a pointer to a function type,
335 then we can not apply optimizations as some targets require
336 function pointers to be canonicalized and in this case this
337 optimization could eliminate a necessary canonicalization. */
338 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
340 tree rhs = gimple_assign_rhs1 (def_stmt);
341 if (POINTER_TYPE_P (TREE_TYPE (rhs))
342 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
343 return false;
346 return true;
349 /* Remove a chain of dead statements starting at the definition of
350 NAME. The chain is linked via the first operand of the defining statements.
351 If NAME was replaced in its only use then this function can be used
352 to clean up dead stmts. The function handles already released SSA
353 names gracefully.
354 Returns true if cleanup-cfg has to run. */
356 static bool
357 remove_prop_source_from_use (tree name)
359 gimple_stmt_iterator gsi;
360 gimple stmt;
361 bool cfg_changed = false;
363 do {
364 basic_block bb;
366 if (SSA_NAME_IN_FREE_LIST (name)
367 || SSA_NAME_IS_DEFAULT_DEF (name)
368 || !has_zero_uses (name))
369 return cfg_changed;
371 stmt = SSA_NAME_DEF_STMT (name);
372 if (gimple_code (stmt) == GIMPLE_PHI
373 || gimple_has_side_effects (stmt))
374 return cfg_changed;
376 bb = gimple_bb (stmt);
377 gsi = gsi_for_stmt (stmt);
378 unlink_stmt_vdef (stmt);
379 if (gsi_remove (&gsi, true))
380 bitmap_set_bit (to_purge, bb->index);
381 fwprop_invalidate_lattice (gimple_get_lhs (stmt));
382 release_defs (stmt);
384 name = is_gimple_assign (stmt) ? gimple_assign_rhs1 (stmt) : NULL_TREE;
385 } while (name && TREE_CODE (name) == SSA_NAME);
387 return cfg_changed;
390 /* Return the rhs of a gimple_assign STMT in a form of a single tree,
391 converted to type TYPE.
393 This should disappear, but is needed so we can combine expressions and use
394 the fold() interfaces. Long term, we need to develop folding and combine
395 routines that deal with gimple exclusively . */
397 static tree
398 rhs_to_tree (tree type, gimple stmt)
400 location_t loc = gimple_location (stmt);
401 enum tree_code code = gimple_assign_rhs_code (stmt);
402 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
403 return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt),
404 gimple_assign_rhs2 (stmt),
405 gimple_assign_rhs3 (stmt));
406 else if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
407 return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
408 gimple_assign_rhs2 (stmt));
409 else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
410 return build1 (code, type, gimple_assign_rhs1 (stmt));
411 else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
412 return gimple_assign_rhs1 (stmt);
413 else
414 gcc_unreachable ();
417 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
418 the folded result in a form suitable for COND_EXPR_COND or
419 NULL_TREE, if there is no suitable simplified form. If
420 INVARIANT_ONLY is true only gimple_min_invariant results are
421 considered simplified. */
423 static tree
424 combine_cond_expr_cond (gimple stmt, enum tree_code code, tree type,
425 tree op0, tree op1, bool invariant_only)
427 tree t;
429 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
431 fold_defer_overflow_warnings ();
432 t = fold_binary_loc (gimple_location (stmt), code, type, op0, op1);
433 if (!t)
435 fold_undefer_overflow_warnings (false, NULL, 0);
436 return NULL_TREE;
439 /* Require that we got a boolean type out if we put one in. */
440 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
442 /* Canonicalize the combined condition for use in a COND_EXPR. */
443 t = canonicalize_cond_expr_cond (t);
445 /* Bail out if we required an invariant but didn't get one. */
446 if (!t || (invariant_only && !is_gimple_min_invariant (t)))
448 fold_undefer_overflow_warnings (false, NULL, 0);
449 return NULL_TREE;
452 fold_undefer_overflow_warnings (!gimple_no_warning_p (stmt), stmt, 0);
454 return t;
457 /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
458 of its operand. Return a new comparison tree or NULL_TREE if there
459 were no simplifying combines. */
461 static tree
462 forward_propagate_into_comparison_1 (gimple stmt,
463 enum tree_code code, tree type,
464 tree op0, tree op1)
466 tree tmp = NULL_TREE;
467 tree rhs0 = NULL_TREE, rhs1 = NULL_TREE;
468 bool single_use0_p = false, single_use1_p = false;
470 /* For comparisons use the first operand, that is likely to
471 simplify comparisons against constants. */
472 if (TREE_CODE (op0) == SSA_NAME)
474 gimple def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
475 if (def_stmt && can_propagate_from (def_stmt))
477 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
478 tmp = combine_cond_expr_cond (stmt, code, type,
479 rhs0, op1, !single_use0_p);
480 if (tmp)
481 return tmp;
485 /* If that wasn't successful, try the second operand. */
486 if (TREE_CODE (op1) == SSA_NAME)
488 gimple def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
489 if (def_stmt && can_propagate_from (def_stmt))
491 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
492 tmp = combine_cond_expr_cond (stmt, code, type,
493 op0, rhs1, !single_use1_p);
494 if (tmp)
495 return tmp;
499 /* If that wasn't successful either, try both operands. */
500 if (rhs0 != NULL_TREE
501 && rhs1 != NULL_TREE)
502 tmp = combine_cond_expr_cond (stmt, code, type,
503 rhs0, rhs1,
504 !(single_use0_p && single_use1_p));
506 return tmp;
509 /* Propagate from the ssa name definition statements of the assignment
510 from a comparison at *GSI into the conditional if that simplifies it.
511 Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
512 otherwise returns 0. */
514 static int
515 forward_propagate_into_comparison (gimple_stmt_iterator *gsi)
517 gimple stmt = gsi_stmt (*gsi);
518 tree tmp;
519 bool cfg_changed = false;
520 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
521 tree rhs1 = gimple_assign_rhs1 (stmt);
522 tree rhs2 = gimple_assign_rhs2 (stmt);
524 /* Combine the comparison with defining statements. */
525 tmp = forward_propagate_into_comparison_1 (stmt,
526 gimple_assign_rhs_code (stmt),
527 type, rhs1, rhs2);
528 if (tmp && useless_type_conversion_p (type, TREE_TYPE (tmp)))
530 gimple_assign_set_rhs_from_tree (gsi, tmp);
531 fold_stmt (gsi);
532 update_stmt (gsi_stmt (*gsi));
534 if (TREE_CODE (rhs1) == SSA_NAME)
535 cfg_changed |= remove_prop_source_from_use (rhs1);
536 if (TREE_CODE (rhs2) == SSA_NAME)
537 cfg_changed |= remove_prop_source_from_use (rhs2);
538 return cfg_changed ? 2 : 1;
541 return 0;
544 /* Propagate from the ssa name definition statements of COND_EXPR
545 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
546 Returns zero if no statement was changed, one if there were
547 changes and two if cfg_cleanup needs to run.
549 This must be kept in sync with forward_propagate_into_cond. */
551 static int
552 forward_propagate_into_gimple_cond (gimple stmt)
554 tree tmp;
555 enum tree_code code = gimple_cond_code (stmt);
556 bool cfg_changed = false;
557 tree rhs1 = gimple_cond_lhs (stmt);
558 tree rhs2 = gimple_cond_rhs (stmt);
560 /* We can do tree combining on SSA_NAME and comparison expressions. */
561 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
562 return 0;
564 tmp = forward_propagate_into_comparison_1 (stmt, code,
565 boolean_type_node,
566 rhs1, rhs2);
567 if (tmp)
569 if (dump_file && tmp)
571 fprintf (dump_file, " Replaced '");
572 print_gimple_expr (dump_file, stmt, 0, 0);
573 fprintf (dump_file, "' with '");
574 print_generic_expr (dump_file, tmp, 0);
575 fprintf (dump_file, "'\n");
578 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
579 update_stmt (stmt);
581 if (TREE_CODE (rhs1) == SSA_NAME)
582 cfg_changed |= remove_prop_source_from_use (rhs1);
583 if (TREE_CODE (rhs2) == SSA_NAME)
584 cfg_changed |= remove_prop_source_from_use (rhs2);
585 return (cfg_changed || is_gimple_min_invariant (tmp)) ? 2 : 1;
588 /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges. */
589 if ((TREE_CODE (TREE_TYPE (rhs1)) == BOOLEAN_TYPE
590 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
591 && TYPE_PRECISION (TREE_TYPE (rhs1)) == 1))
592 && ((code == EQ_EXPR
593 && integer_zerop (rhs2))
594 || (code == NE_EXPR
595 && integer_onep (rhs2))))
597 basic_block bb = gimple_bb (stmt);
598 gimple_cond_set_code (stmt, NE_EXPR);
599 gimple_cond_set_rhs (stmt, build_zero_cst (TREE_TYPE (rhs1)));
600 EDGE_SUCC (bb, 0)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
601 EDGE_SUCC (bb, 1)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
602 return 1;
605 return 0;
609 /* Propagate from the ssa name definition statements of COND_EXPR
610 in the rhs of statement STMT into the conditional if that simplifies it.
611 Returns true zero if the stmt was changed. */
613 static bool
614 forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
616 gimple stmt = gsi_stmt (*gsi_p);
617 tree tmp = NULL_TREE;
618 tree cond = gimple_assign_rhs1 (stmt);
619 enum tree_code code = gimple_assign_rhs_code (stmt);
621 /* We can do tree combining on SSA_NAME and comparison expressions. */
622 if (COMPARISON_CLASS_P (cond))
623 tmp = forward_propagate_into_comparison_1 (stmt, TREE_CODE (cond),
624 TREE_TYPE (cond),
625 TREE_OPERAND (cond, 0),
626 TREE_OPERAND (cond, 1));
627 else if (TREE_CODE (cond) == SSA_NAME)
629 enum tree_code def_code;
630 tree name = cond;
631 gimple def_stmt = get_prop_source_stmt (name, true, NULL);
632 if (!def_stmt || !can_propagate_from (def_stmt))
633 return 0;
635 def_code = gimple_assign_rhs_code (def_stmt);
636 if (TREE_CODE_CLASS (def_code) == tcc_comparison)
637 tmp = fold_build2_loc (gimple_location (def_stmt),
638 def_code,
639 TREE_TYPE (cond),
640 gimple_assign_rhs1 (def_stmt),
641 gimple_assign_rhs2 (def_stmt));
644 if (tmp
645 && is_gimple_condexpr (tmp))
647 if (dump_file && tmp)
649 fprintf (dump_file, " Replaced '");
650 print_generic_expr (dump_file, cond, 0);
651 fprintf (dump_file, "' with '");
652 print_generic_expr (dump_file, tmp, 0);
653 fprintf (dump_file, "'\n");
656 if ((code == VEC_COND_EXPR) ? integer_all_onesp (tmp)
657 : integer_onep (tmp))
658 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs2 (stmt));
659 else if (integer_zerop (tmp))
660 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs3 (stmt));
661 else
662 gimple_assign_set_rhs1 (stmt, unshare_expr (tmp));
663 stmt = gsi_stmt (*gsi_p);
664 update_stmt (stmt);
666 return true;
669 return 0;
672 /* We've just substituted an ADDR_EXPR into stmt. Update all the
673 relevant data structures to match. */
675 static void
676 tidy_after_forward_propagate_addr (gimple stmt)
678 /* We may have turned a trapping insn into a non-trapping insn. */
679 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
680 bitmap_set_bit (to_purge, gimple_bb (stmt)->index);
682 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
683 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
686 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
687 ADDR_EXPR <whatever>.
689 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
690 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
691 node or for recovery of array indexing from pointer arithmetic.
693 Return true if the propagation was successful (the propagation can
694 be not totally successful, yet things may have been changed). */
696 static bool
697 forward_propagate_addr_expr_1 (tree name, tree def_rhs,
698 gimple_stmt_iterator *use_stmt_gsi,
699 bool single_use_p)
701 tree lhs, rhs, rhs2, array_ref;
702 gimple use_stmt = gsi_stmt (*use_stmt_gsi);
703 enum tree_code rhs_code;
704 bool res = true;
706 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
708 lhs = gimple_assign_lhs (use_stmt);
709 rhs_code = gimple_assign_rhs_code (use_stmt);
710 rhs = gimple_assign_rhs1 (use_stmt);
712 /* Do not perform copy-propagation but recurse through copy chains. */
713 if (TREE_CODE (lhs) == SSA_NAME
714 && rhs_code == SSA_NAME)
715 return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
717 /* The use statement could be a conversion. Recurse to the uses of the
718 lhs as copyprop does not copy through pointer to integer to pointer
719 conversions and FRE does not catch all cases either.
720 Treat the case of a single-use name and
721 a conversion to def_rhs type separate, though. */
722 if (TREE_CODE (lhs) == SSA_NAME
723 && CONVERT_EXPR_CODE_P (rhs_code))
725 /* If there is a point in a conversion chain where the types match
726 so we can remove a conversion re-materialize the address here
727 and stop. */
728 if (single_use_p
729 && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
731 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
732 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
733 return true;
736 /* Else recurse if the conversion preserves the address value. */
737 if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs))
738 || POINTER_TYPE_P (TREE_TYPE (lhs)))
739 && (TYPE_PRECISION (TREE_TYPE (lhs))
740 >= TYPE_PRECISION (TREE_TYPE (def_rhs))))
741 return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
743 return false;
746 /* If this isn't a conversion chain from this on we only can propagate
747 into compatible pointer contexts. */
748 if (!types_compatible_p (TREE_TYPE (name), TREE_TYPE (def_rhs)))
749 return false;
751 /* Propagate through constant pointer adjustments. */
752 if (TREE_CODE (lhs) == SSA_NAME
753 && rhs_code == POINTER_PLUS_EXPR
754 && rhs == name
755 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
757 tree new_def_rhs;
758 /* As we come here with non-invariant addresses in def_rhs we need
759 to make sure we can build a valid constant offsetted address
760 for further propagation. Simply rely on fold building that
761 and check after the fact. */
762 new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
763 def_rhs,
764 fold_convert (ptr_type_node,
765 gimple_assign_rhs2 (use_stmt)));
766 if (TREE_CODE (new_def_rhs) == MEM_REF
767 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
768 return false;
769 new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs,
770 TREE_TYPE (rhs));
772 /* Recurse. If we could propagate into all uses of lhs do not
773 bother to replace into the current use but just pretend we did. */
774 if (TREE_CODE (new_def_rhs) == ADDR_EXPR
775 && forward_propagate_addr_expr (lhs, new_def_rhs, single_use_p))
776 return true;
778 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs)))
779 gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
780 new_def_rhs, NULL_TREE);
781 else if (is_gimple_min_invariant (new_def_rhs))
782 gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR,
783 new_def_rhs, NULL_TREE);
784 else
785 return false;
786 gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
787 update_stmt (use_stmt);
788 return true;
791 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
792 ADDR_EXPR will not appear on the LHS. */
793 tree *lhsp = gimple_assign_lhs_ptr (use_stmt);
794 while (handled_component_p (*lhsp))
795 lhsp = &TREE_OPERAND (*lhsp, 0);
796 lhs = *lhsp;
798 /* Now see if the LHS node is a MEM_REF using NAME. If so,
799 propagate the ADDR_EXPR into the use of NAME and fold the result. */
800 if (TREE_CODE (lhs) == MEM_REF
801 && TREE_OPERAND (lhs, 0) == name)
803 tree def_rhs_base;
804 HOST_WIDE_INT def_rhs_offset;
805 /* If the address is invariant we can always fold it. */
806 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
807 &def_rhs_offset)))
809 offset_int off = mem_ref_offset (lhs);
810 tree new_ptr;
811 off += def_rhs_offset;
812 if (TREE_CODE (def_rhs_base) == MEM_REF)
814 off += mem_ref_offset (def_rhs_base);
815 new_ptr = TREE_OPERAND (def_rhs_base, 0);
817 else
818 new_ptr = build_fold_addr_expr (def_rhs_base);
819 TREE_OPERAND (lhs, 0) = new_ptr;
820 TREE_OPERAND (lhs, 1)
821 = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
822 tidy_after_forward_propagate_addr (use_stmt);
823 /* Continue propagating into the RHS if this was not the only use. */
824 if (single_use_p)
825 return true;
827 /* If the LHS is a plain dereference and the value type is the same as
828 that of the pointed-to type of the address we can put the
829 dereferenced address on the LHS preserving the original alias-type. */
830 else if (integer_zerop (TREE_OPERAND (lhs, 1))
831 && ((gimple_assign_lhs (use_stmt) == lhs
832 && useless_type_conversion_p
833 (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
834 TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
835 || types_compatible_p (TREE_TYPE (lhs),
836 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
837 /* Don't forward anything into clobber stmts if it would result
838 in the lhs no longer being a MEM_REF. */
839 && (!gimple_clobber_p (use_stmt)
840 || TREE_CODE (TREE_OPERAND (def_rhs, 0)) == MEM_REF))
842 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
843 tree new_offset, new_base, saved, new_lhs;
844 while (handled_component_p (*def_rhs_basep))
845 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
846 saved = *def_rhs_basep;
847 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
849 new_base = TREE_OPERAND (*def_rhs_basep, 0);
850 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (lhs, 1)),
851 TREE_OPERAND (*def_rhs_basep, 1));
853 else
855 new_base = build_fold_addr_expr (*def_rhs_basep);
856 new_offset = TREE_OPERAND (lhs, 1);
858 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
859 new_base, new_offset);
860 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (lhs);
861 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (lhs);
862 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (lhs);
863 new_lhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
864 *lhsp = new_lhs;
865 TREE_THIS_VOLATILE (new_lhs) = TREE_THIS_VOLATILE (lhs);
866 TREE_SIDE_EFFECTS (new_lhs) = TREE_SIDE_EFFECTS (lhs);
867 *def_rhs_basep = saved;
868 tidy_after_forward_propagate_addr (use_stmt);
869 /* Continue propagating into the RHS if this was not the
870 only use. */
871 if (single_use_p)
872 return true;
874 else
875 /* We can have a struct assignment dereferencing our name twice.
876 Note that we didn't propagate into the lhs to not falsely
877 claim we did when propagating into the rhs. */
878 res = false;
881 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
882 nodes from the RHS. */
883 tree *rhsp = gimple_assign_rhs1_ptr (use_stmt);
884 if (TREE_CODE (*rhsp) == ADDR_EXPR)
885 rhsp = &TREE_OPERAND (*rhsp, 0);
886 while (handled_component_p (*rhsp))
887 rhsp = &TREE_OPERAND (*rhsp, 0);
888 rhs = *rhsp;
890 /* Now see if the RHS node is a MEM_REF using NAME. If so,
891 propagate the ADDR_EXPR into the use of NAME and fold the result. */
892 if (TREE_CODE (rhs) == MEM_REF
893 && TREE_OPERAND (rhs, 0) == name)
895 tree def_rhs_base;
896 HOST_WIDE_INT def_rhs_offset;
897 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
898 &def_rhs_offset)))
900 offset_int off = mem_ref_offset (rhs);
901 tree new_ptr;
902 off += def_rhs_offset;
903 if (TREE_CODE (def_rhs_base) == MEM_REF)
905 off += mem_ref_offset (def_rhs_base);
906 new_ptr = TREE_OPERAND (def_rhs_base, 0);
908 else
909 new_ptr = build_fold_addr_expr (def_rhs_base);
910 TREE_OPERAND (rhs, 0) = new_ptr;
911 TREE_OPERAND (rhs, 1)
912 = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
913 fold_stmt_inplace (use_stmt_gsi);
914 tidy_after_forward_propagate_addr (use_stmt);
915 return res;
917 /* If the RHS is a plain dereference and the value type is the same as
918 that of the pointed-to type of the address we can put the
919 dereferenced address on the RHS preserving the original alias-type. */
920 else if (integer_zerop (TREE_OPERAND (rhs, 1))
921 && ((gimple_assign_rhs1 (use_stmt) == rhs
922 && useless_type_conversion_p
923 (TREE_TYPE (gimple_assign_lhs (use_stmt)),
924 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
925 || types_compatible_p (TREE_TYPE (rhs),
926 TREE_TYPE (TREE_OPERAND (def_rhs, 0)))))
928 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
929 tree new_offset, new_base, saved, new_rhs;
930 while (handled_component_p (*def_rhs_basep))
931 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
932 saved = *def_rhs_basep;
933 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
935 new_base = TREE_OPERAND (*def_rhs_basep, 0);
936 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (rhs, 1)),
937 TREE_OPERAND (*def_rhs_basep, 1));
939 else
941 new_base = build_fold_addr_expr (*def_rhs_basep);
942 new_offset = TREE_OPERAND (rhs, 1);
944 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
945 new_base, new_offset);
946 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (rhs);
947 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (rhs);
948 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (rhs);
949 new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
950 *rhsp = new_rhs;
951 TREE_THIS_VOLATILE (new_rhs) = TREE_THIS_VOLATILE (rhs);
952 TREE_SIDE_EFFECTS (new_rhs) = TREE_SIDE_EFFECTS (rhs);
953 *def_rhs_basep = saved;
954 fold_stmt_inplace (use_stmt_gsi);
955 tidy_after_forward_propagate_addr (use_stmt);
956 return res;
960 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
961 is nothing to do. */
962 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
963 || gimple_assign_rhs1 (use_stmt) != name)
964 return false;
966 /* The remaining cases are all for turning pointer arithmetic into
967 array indexing. They only apply when we have the address of
968 element zero in an array. If that is not the case then there
969 is nothing to do. */
970 array_ref = TREE_OPERAND (def_rhs, 0);
971 if ((TREE_CODE (array_ref) != ARRAY_REF
972 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
973 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
974 && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
975 return false;
977 rhs2 = gimple_assign_rhs2 (use_stmt);
978 /* Optimize &x[C1] p+ C2 to &x p+ C3 with C3 = C1 * element_size + C2. */
979 if (TREE_CODE (rhs2) == INTEGER_CST)
981 tree new_rhs = build1_loc (gimple_location (use_stmt),
982 ADDR_EXPR, TREE_TYPE (def_rhs),
983 fold_build2 (MEM_REF,
984 TREE_TYPE (TREE_TYPE (def_rhs)),
985 unshare_expr (def_rhs),
986 fold_convert (ptr_type_node,
987 rhs2)));
988 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
989 use_stmt = gsi_stmt (*use_stmt_gsi);
990 update_stmt (use_stmt);
991 tidy_after_forward_propagate_addr (use_stmt);
992 return true;
995 return false;
998 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
1000 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
1001 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
1002 node or for recovery of array indexing from pointer arithmetic.
1004 PARENT_SINGLE_USE_P tells if, when in a recursive invocation, NAME was
1005 the single use in the previous invocation. Pass true when calling
1006 this as toplevel.
1008 Returns true, if all uses have been propagated into. */
1010 static bool
1011 forward_propagate_addr_expr (tree name, tree rhs, bool parent_single_use_p)
1013 imm_use_iterator iter;
1014 gimple use_stmt;
1015 bool all = true;
1016 bool single_use_p = parent_single_use_p && has_single_use (name);
1018 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
1020 bool result;
1021 tree use_rhs;
1023 /* If the use is not in a simple assignment statement, then
1024 there is nothing we can do. */
1025 if (!is_gimple_assign (use_stmt))
1027 if (!is_gimple_debug (use_stmt))
1028 all = false;
1029 continue;
1032 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1033 result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
1034 single_use_p);
1035 /* If the use has moved to a different statement adjust
1036 the update machinery for the old statement too. */
1037 if (use_stmt != gsi_stmt (gsi))
1039 update_stmt (use_stmt);
1040 use_stmt = gsi_stmt (gsi);
1042 update_stmt (use_stmt);
1043 all &= result;
1045 /* Remove intermediate now unused copy and conversion chains. */
1046 use_rhs = gimple_assign_rhs1 (use_stmt);
1047 if (result
1048 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
1049 && TREE_CODE (use_rhs) == SSA_NAME
1050 && has_zero_uses (gimple_assign_lhs (use_stmt)))
1052 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1053 fwprop_invalidate_lattice (gimple_get_lhs (use_stmt));
1054 release_defs (use_stmt);
1055 gsi_remove (&gsi, true);
1059 return all && has_zero_uses (name);
1063 /* Forward propagate the comparison defined in *DEFGSI like
1064 cond_1 = x CMP y to uses of the form
1065 a_1 = (T')cond_1
1066 a_1 = !cond_1
1067 a_1 = cond_1 != 0
1068 Returns true if stmt is now unused. Advance DEFGSI to the next
1069 statement. */
1071 static bool
1072 forward_propagate_comparison (gimple_stmt_iterator *defgsi)
1074 gimple stmt = gsi_stmt (*defgsi);
1075 tree name = gimple_assign_lhs (stmt);
1076 gimple use_stmt;
1077 tree tmp = NULL_TREE;
1078 gimple_stmt_iterator gsi;
1079 enum tree_code code;
1080 tree lhs;
1082 /* Don't propagate ssa names that occur in abnormal phis. */
1083 if ((TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
1084 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt)))
1085 || (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME
1086 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt))))
1087 goto bailout;
1089 /* Do not un-cse comparisons. But propagate through copies. */
1090 use_stmt = get_prop_dest_stmt (name, &name);
1091 if (!use_stmt
1092 || !is_gimple_assign (use_stmt))
1093 goto bailout;
1095 code = gimple_assign_rhs_code (use_stmt);
1096 lhs = gimple_assign_lhs (use_stmt);
1097 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
1098 goto bailout;
1100 /* We can propagate the condition into a statement that
1101 computes the logical negation of the comparison result. */
1102 if ((code == BIT_NOT_EXPR
1103 && TYPE_PRECISION (TREE_TYPE (lhs)) == 1)
1104 || (code == BIT_XOR_EXPR
1105 && integer_onep (gimple_assign_rhs2 (use_stmt))))
1107 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
1108 bool nans = HONOR_NANS (TYPE_MODE (type));
1109 enum tree_code inv_code;
1110 inv_code = invert_tree_comparison (gimple_assign_rhs_code (stmt), nans);
1111 if (inv_code == ERROR_MARK)
1112 goto bailout;
1114 tmp = build2 (inv_code, TREE_TYPE (lhs), gimple_assign_rhs1 (stmt),
1115 gimple_assign_rhs2 (stmt));
1117 else
1118 goto bailout;
1120 gsi = gsi_for_stmt (use_stmt);
1121 gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (tmp));
1122 use_stmt = gsi_stmt (gsi);
1123 update_stmt (use_stmt);
1125 if (dump_file && (dump_flags & TDF_DETAILS))
1127 fprintf (dump_file, " Replaced '");
1128 print_gimple_expr (dump_file, stmt, 0, dump_flags);
1129 fprintf (dump_file, "' with '");
1130 print_gimple_expr (dump_file, use_stmt, 0, dump_flags);
1131 fprintf (dump_file, "'\n");
1134 /* When we remove stmt now the iterator defgsi goes off it's current
1135 sequence, hence advance it now. */
1136 gsi_next (defgsi);
1138 /* Remove defining statements. */
1139 return remove_prop_source_from_use (name);
1141 bailout:
1142 gsi_next (defgsi);
1143 return false;
1147 /* Helper function for simplify_gimple_switch. Remove case labels that
1148 have values outside the range of the new type. */
1150 static void
1151 simplify_gimple_switch_label_vec (gimple stmt, tree index_type)
1153 unsigned int branch_num = gimple_switch_num_labels (stmt);
1154 auto_vec<tree> labels (branch_num);
1155 unsigned int i, len;
1157 /* Collect the existing case labels in a VEC, and preprocess it as if
1158 we are gimplifying a GENERIC SWITCH_EXPR. */
1159 for (i = 1; i < branch_num; i++)
1160 labels.quick_push (gimple_switch_label (stmt, i));
1161 preprocess_case_label_vec_for_gimple (labels, index_type, NULL);
1163 /* If any labels were removed, replace the existing case labels
1164 in the GIMPLE_SWITCH statement with the correct ones.
1165 Note that the type updates were done in-place on the case labels,
1166 so we only have to replace the case labels in the GIMPLE_SWITCH
1167 if the number of labels changed. */
1168 len = labels.length ();
1169 if (len < branch_num - 1)
1171 bitmap target_blocks;
1172 edge_iterator ei;
1173 edge e;
1175 /* Corner case: *all* case labels have been removed as being
1176 out-of-range for INDEX_TYPE. Push one label and let the
1177 CFG cleanups deal with this further. */
1178 if (len == 0)
1180 tree label, elt;
1182 label = CASE_LABEL (gimple_switch_default_label (stmt));
1183 elt = build_case_label (build_int_cst (index_type, 0), NULL, label);
1184 labels.quick_push (elt);
1185 len = 1;
1188 for (i = 0; i < labels.length (); i++)
1189 gimple_switch_set_label (stmt, i + 1, labels[i]);
1190 for (i++ ; i < branch_num; i++)
1191 gimple_switch_set_label (stmt, i, NULL_TREE);
1192 gimple_switch_set_num_labels (stmt, len + 1);
1194 /* Cleanup any edges that are now dead. */
1195 target_blocks = BITMAP_ALLOC (NULL);
1196 for (i = 0; i < gimple_switch_num_labels (stmt); i++)
1198 tree elt = gimple_switch_label (stmt, i);
1199 basic_block target = label_to_block (CASE_LABEL (elt));
1200 bitmap_set_bit (target_blocks, target->index);
1202 for (ei = ei_start (gimple_bb (stmt)->succs); (e = ei_safe_edge (ei)); )
1204 if (! bitmap_bit_p (target_blocks, e->dest->index))
1206 remove_edge (e);
1207 cfg_changed = true;
1208 free_dominance_info (CDI_DOMINATORS);
1210 else
1211 ei_next (&ei);
1213 BITMAP_FREE (target_blocks);
1217 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1218 the condition which we may be able to optimize better. */
1220 static bool
1221 simplify_gimple_switch (gimple stmt)
1223 /* The optimization that we really care about is removing unnecessary
1224 casts. That will let us do much better in propagating the inferred
1225 constant at the switch target. */
1226 tree cond = gimple_switch_index (stmt);
1227 if (TREE_CODE (cond) == SSA_NAME)
1229 gimple def_stmt = SSA_NAME_DEF_STMT (cond);
1230 if (gimple_assign_cast_p (def_stmt))
1232 tree def = gimple_assign_rhs1 (def_stmt);
1233 if (TREE_CODE (def) != SSA_NAME)
1234 return false;
1236 /* If we have an extension or sign-change that preserves the
1237 values we check against then we can copy the source value into
1238 the switch. */
1239 tree ti = TREE_TYPE (def);
1240 if (INTEGRAL_TYPE_P (ti)
1241 && TYPE_PRECISION (ti) <= TYPE_PRECISION (TREE_TYPE (cond)))
1243 size_t n = gimple_switch_num_labels (stmt);
1244 tree min = NULL_TREE, max = NULL_TREE;
1245 if (n > 1)
1247 min = CASE_LOW (gimple_switch_label (stmt, 1));
1248 if (CASE_HIGH (gimple_switch_label (stmt, n - 1)))
1249 max = CASE_HIGH (gimple_switch_label (stmt, n - 1));
1250 else
1251 max = CASE_LOW (gimple_switch_label (stmt, n - 1));
1253 if ((!min || int_fits_type_p (min, ti))
1254 && (!max || int_fits_type_p (max, ti)))
1256 gimple_switch_set_index (stmt, def);
1257 simplify_gimple_switch_label_vec (stmt, ti);
1258 update_stmt (stmt);
1259 return true;
1265 return false;
1268 /* For pointers p2 and p1 return p2 - p1 if the
1269 difference is known and constant, otherwise return NULL. */
1271 static tree
1272 constant_pointer_difference (tree p1, tree p2)
1274 int i, j;
1275 #define CPD_ITERATIONS 5
1276 tree exps[2][CPD_ITERATIONS];
1277 tree offs[2][CPD_ITERATIONS];
1278 int cnt[2];
1280 for (i = 0; i < 2; i++)
1282 tree p = i ? p1 : p2;
1283 tree off = size_zero_node;
1284 gimple stmt;
1285 enum tree_code code;
1287 /* For each of p1 and p2 we need to iterate at least
1288 twice, to handle ADDR_EXPR directly in p1/p2,
1289 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1290 on definition's stmt RHS. Iterate a few extra times. */
1291 j = 0;
1294 if (!POINTER_TYPE_P (TREE_TYPE (p)))
1295 break;
1296 if (TREE_CODE (p) == ADDR_EXPR)
1298 tree q = TREE_OPERAND (p, 0);
1299 HOST_WIDE_INT offset;
1300 tree base = get_addr_base_and_unit_offset (q, &offset);
1301 if (base)
1303 q = base;
1304 if (offset)
1305 off = size_binop (PLUS_EXPR, off, size_int (offset));
1307 if (TREE_CODE (q) == MEM_REF
1308 && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
1310 p = TREE_OPERAND (q, 0);
1311 off = size_binop (PLUS_EXPR, off,
1312 wide_int_to_tree (sizetype,
1313 mem_ref_offset (q)));
1315 else
1317 exps[i][j] = q;
1318 offs[i][j++] = off;
1319 break;
1322 if (TREE_CODE (p) != SSA_NAME)
1323 break;
1324 exps[i][j] = p;
1325 offs[i][j++] = off;
1326 if (j == CPD_ITERATIONS)
1327 break;
1328 stmt = SSA_NAME_DEF_STMT (p);
1329 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
1330 break;
1331 code = gimple_assign_rhs_code (stmt);
1332 if (code == POINTER_PLUS_EXPR)
1334 if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
1335 break;
1336 off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
1337 p = gimple_assign_rhs1 (stmt);
1339 else if (code == ADDR_EXPR || CONVERT_EXPR_CODE_P (code))
1340 p = gimple_assign_rhs1 (stmt);
1341 else
1342 break;
1344 while (1);
1345 cnt[i] = j;
1348 for (i = 0; i < cnt[0]; i++)
1349 for (j = 0; j < cnt[1]; j++)
1350 if (exps[0][i] == exps[1][j])
1351 return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
1353 return NULL_TREE;
1356 /* *GSI_P is a GIMPLE_CALL to a builtin function.
1357 Optimize
1358 memcpy (p, "abcd", 4);
1359 memset (p + 4, ' ', 3);
1360 into
1361 memcpy (p, "abcd ", 7);
1362 call if the latter can be stored by pieces during expansion. */
1364 static bool
1365 simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
1367 gimple stmt1, stmt2 = gsi_stmt (*gsi_p);
1368 tree vuse = gimple_vuse (stmt2);
1369 if (vuse == NULL)
1370 return false;
1371 stmt1 = SSA_NAME_DEF_STMT (vuse);
1373 switch (DECL_FUNCTION_CODE (callee2))
1375 case BUILT_IN_MEMSET:
1376 if (gimple_call_num_args (stmt2) != 3
1377 || gimple_call_lhs (stmt2)
1378 || CHAR_BIT != 8
1379 || BITS_PER_UNIT != 8)
1380 break;
1381 else
1383 tree callee1;
1384 tree ptr1, src1, str1, off1, len1, lhs1;
1385 tree ptr2 = gimple_call_arg (stmt2, 0);
1386 tree val2 = gimple_call_arg (stmt2, 1);
1387 tree len2 = gimple_call_arg (stmt2, 2);
1388 tree diff, vdef, new_str_cst;
1389 gimple use_stmt;
1390 unsigned int ptr1_align;
1391 unsigned HOST_WIDE_INT src_len;
1392 char *src_buf;
1393 use_operand_p use_p;
1395 if (!tree_fits_shwi_p (val2)
1396 || !tree_fits_uhwi_p (len2))
1397 break;
1398 if (is_gimple_call (stmt1))
1400 /* If first stmt is a call, it needs to be memcpy
1401 or mempcpy, with string literal as second argument and
1402 constant length. */
1403 callee1 = gimple_call_fndecl (stmt1);
1404 if (callee1 == NULL_TREE
1405 || DECL_BUILT_IN_CLASS (callee1) != BUILT_IN_NORMAL
1406 || gimple_call_num_args (stmt1) != 3)
1407 break;
1408 if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
1409 && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
1410 break;
1411 ptr1 = gimple_call_arg (stmt1, 0);
1412 src1 = gimple_call_arg (stmt1, 1);
1413 len1 = gimple_call_arg (stmt1, 2);
1414 lhs1 = gimple_call_lhs (stmt1);
1415 if (!tree_fits_uhwi_p (len1))
1416 break;
1417 str1 = string_constant (src1, &off1);
1418 if (str1 == NULL_TREE)
1419 break;
1420 if (!tree_fits_uhwi_p (off1)
1421 || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
1422 || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
1423 - tree_to_uhwi (off1)) > 0
1424 || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
1425 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
1426 != TYPE_MODE (char_type_node))
1427 break;
1429 else if (gimple_assign_single_p (stmt1))
1431 /* Otherwise look for length 1 memcpy optimized into
1432 assignment. */
1433 ptr1 = gimple_assign_lhs (stmt1);
1434 src1 = gimple_assign_rhs1 (stmt1);
1435 if (TREE_CODE (ptr1) != MEM_REF
1436 || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
1437 || !tree_fits_shwi_p (src1))
1438 break;
1439 ptr1 = build_fold_addr_expr (ptr1);
1440 callee1 = NULL_TREE;
1441 len1 = size_one_node;
1442 lhs1 = NULL_TREE;
1443 off1 = size_zero_node;
1444 str1 = NULL_TREE;
1446 else
1447 break;
1449 diff = constant_pointer_difference (ptr1, ptr2);
1450 if (diff == NULL && lhs1 != NULL)
1452 diff = constant_pointer_difference (lhs1, ptr2);
1453 if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1454 && diff != NULL)
1455 diff = size_binop (PLUS_EXPR, diff,
1456 fold_convert (sizetype, len1));
1458 /* If the difference between the second and first destination pointer
1459 is not constant, or is bigger than memcpy length, bail out. */
1460 if (diff == NULL
1461 || !tree_fits_uhwi_p (diff)
1462 || tree_int_cst_lt (len1, diff))
1463 break;
1465 /* Use maximum of difference plus memset length and memcpy length
1466 as the new memcpy length, if it is too big, bail out. */
1467 src_len = tree_to_uhwi (diff);
1468 src_len += tree_to_uhwi (len2);
1469 if (src_len < tree_to_uhwi (len1))
1470 src_len = tree_to_uhwi (len1);
1471 if (src_len > 1024)
1472 break;
1474 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1475 with bigger length will return different result. */
1476 if (lhs1 != NULL_TREE
1477 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1478 && (TREE_CODE (lhs1) != SSA_NAME
1479 || !single_imm_use (lhs1, &use_p, &use_stmt)
1480 || use_stmt != stmt2))
1481 break;
1483 /* If anything reads memory in between memcpy and memset
1484 call, the modified memcpy call might change it. */
1485 vdef = gimple_vdef (stmt1);
1486 if (vdef != NULL
1487 && (!single_imm_use (vdef, &use_p, &use_stmt)
1488 || use_stmt != stmt2))
1489 break;
1491 ptr1_align = get_pointer_alignment (ptr1);
1492 /* Construct the new source string literal. */
1493 src_buf = XALLOCAVEC (char, src_len + 1);
1494 if (callee1)
1495 memcpy (src_buf,
1496 TREE_STRING_POINTER (str1) + tree_to_uhwi (off1),
1497 tree_to_uhwi (len1));
1498 else
1499 src_buf[0] = tree_to_shwi (src1);
1500 memset (src_buf + tree_to_uhwi (diff),
1501 tree_to_shwi (val2), tree_to_uhwi (len2));
1502 src_buf[src_len] = '\0';
1503 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1504 handle embedded '\0's. */
1505 if (strlen (src_buf) != src_len)
1506 break;
1507 rtl_profile_for_bb (gimple_bb (stmt2));
1508 /* If the new memcpy wouldn't be emitted by storing the literal
1509 by pieces, this optimization might enlarge .rodata too much,
1510 as commonly used string literals couldn't be shared any
1511 longer. */
1512 if (!can_store_by_pieces (src_len,
1513 builtin_strncpy_read_str,
1514 src_buf, ptr1_align, false))
1515 break;
1517 new_str_cst = build_string_literal (src_len, src_buf);
1518 if (callee1)
1520 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1521 memset call. */
1522 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1523 gimple_call_set_lhs (stmt1, NULL_TREE);
1524 gimple_call_set_arg (stmt1, 1, new_str_cst);
1525 gimple_call_set_arg (stmt1, 2,
1526 build_int_cst (TREE_TYPE (len1), src_len));
1527 update_stmt (stmt1);
1528 unlink_stmt_vdef (stmt2);
1529 gsi_remove (gsi_p, true);
1530 fwprop_invalidate_lattice (gimple_get_lhs (stmt2));
1531 release_defs (stmt2);
1532 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1534 fwprop_invalidate_lattice (lhs1);
1535 release_ssa_name (lhs1);
1537 return true;
1539 else
1541 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1542 assignment, remove STMT1 and change memset call into
1543 memcpy call. */
1544 gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
1546 if (!is_gimple_val (ptr1))
1547 ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE,
1548 true, GSI_SAME_STMT);
1549 gimple_call_set_fndecl (stmt2,
1550 builtin_decl_explicit (BUILT_IN_MEMCPY));
1551 gimple_call_set_arg (stmt2, 0, ptr1);
1552 gimple_call_set_arg (stmt2, 1, new_str_cst);
1553 gimple_call_set_arg (stmt2, 2,
1554 build_int_cst (TREE_TYPE (len2), src_len));
1555 unlink_stmt_vdef (stmt1);
1556 gsi_remove (&gsi, true);
1557 fwprop_invalidate_lattice (gimple_get_lhs (stmt1));
1558 release_defs (stmt1);
1559 update_stmt (stmt2);
1560 return false;
1563 break;
1564 default:
1565 break;
1567 return false;
1570 /* Given a ssa_name in NAME see if it was defined by an assignment and
1571 set CODE to be the code and ARG1 to the first operand on the rhs and ARG2
1572 to the second operand on the rhs. */
1574 static inline void
1575 defcodefor_name (tree name, enum tree_code *code, tree *arg1, tree *arg2)
1577 gimple def;
1578 enum tree_code code1;
1579 tree arg11;
1580 tree arg21;
1581 tree arg31;
1582 enum gimple_rhs_class grhs_class;
1584 code1 = TREE_CODE (name);
1585 arg11 = name;
1586 arg21 = NULL_TREE;
1587 grhs_class = get_gimple_rhs_class (code1);
1589 if (code1 == SSA_NAME)
1591 def = SSA_NAME_DEF_STMT (name);
1593 if (def && is_gimple_assign (def)
1594 && can_propagate_from (def))
1596 code1 = gimple_assign_rhs_code (def);
1597 arg11 = gimple_assign_rhs1 (def);
1598 arg21 = gimple_assign_rhs2 (def);
1599 arg31 = gimple_assign_rhs2 (def);
1602 else if (grhs_class == GIMPLE_TERNARY_RHS
1603 || GIMPLE_BINARY_RHS
1604 || GIMPLE_UNARY_RHS
1605 || GIMPLE_SINGLE_RHS)
1606 extract_ops_from_tree_1 (name, &code1, &arg11, &arg21, &arg31);
1608 *code = code1;
1609 *arg1 = arg11;
1610 if (arg2)
1611 *arg2 = arg21;
1612 /* Ignore arg3 currently. */
1616 /* Recognize rotation patterns. Return true if a transformation
1617 applied, otherwise return false.
1619 We are looking for X with unsigned type T with bitsize B, OP being
1620 +, | or ^, some type T2 wider than T and
1621 (X << CNT1) OP (X >> CNT2) iff CNT1 + CNT2 == B
1622 ((T) ((T2) X << CNT1)) OP ((T) ((T2) X >> CNT2)) iff CNT1 + CNT2 == B
1623 (X << Y) OP (X >> (B - Y))
1624 (X << (int) Y) OP (X >> (int) (B - Y))
1625 ((T) ((T2) X << Y)) OP ((T) ((T2) X >> (B - Y)))
1626 ((T) ((T2) X << (int) Y)) OP ((T) ((T2) X >> (int) (B - Y)))
1627 (X << Y) | (X >> ((-Y) & (B - 1)))
1628 (X << (int) Y) | (X >> (int) ((-Y) & (B - 1)))
1629 ((T) ((T2) X << Y)) | ((T) ((T2) X >> ((-Y) & (B - 1))))
1630 ((T) ((T2) X << (int) Y)) | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
1632 and transform these into:
1633 X r<< CNT1
1634 X r<< Y
1636 Note, in the patterns with T2 type, the type of OP operands
1637 might be even a signed type, but should have precision B. */
1639 static bool
1640 simplify_rotate (gimple_stmt_iterator *gsi)
1642 gimple stmt = gsi_stmt (*gsi);
1643 tree arg[2], rtype, rotcnt = NULL_TREE;
1644 tree def_arg1[2], def_arg2[2];
1645 enum tree_code def_code[2];
1646 tree lhs;
1647 int i;
1648 bool swapped_p = false;
1649 gimple g;
1651 arg[0] = gimple_assign_rhs1 (stmt);
1652 arg[1] = gimple_assign_rhs2 (stmt);
1653 rtype = TREE_TYPE (arg[0]);
1655 /* Only create rotates in complete modes. Other cases are not
1656 expanded properly. */
1657 if (!INTEGRAL_TYPE_P (rtype)
1658 || TYPE_PRECISION (rtype) != GET_MODE_PRECISION (TYPE_MODE (rtype)))
1659 return false;
1661 for (i = 0; i < 2; i++)
1662 defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
1664 /* Look through narrowing conversions. */
1665 if (CONVERT_EXPR_CODE_P (def_code[0])
1666 && CONVERT_EXPR_CODE_P (def_code[1])
1667 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[0]))
1668 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[1]))
1669 && TYPE_PRECISION (TREE_TYPE (def_arg1[0]))
1670 == TYPE_PRECISION (TREE_TYPE (def_arg1[1]))
1671 && TYPE_PRECISION (TREE_TYPE (def_arg1[0])) > TYPE_PRECISION (rtype)
1672 && has_single_use (arg[0])
1673 && has_single_use (arg[1]))
1675 for (i = 0; i < 2; i++)
1677 arg[i] = def_arg1[i];
1678 defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
1682 /* One operand has to be LSHIFT_EXPR and one RSHIFT_EXPR. */
1683 for (i = 0; i < 2; i++)
1684 if (def_code[i] != LSHIFT_EXPR && def_code[i] != RSHIFT_EXPR)
1685 return false;
1686 else if (!has_single_use (arg[i]))
1687 return false;
1688 if (def_code[0] == def_code[1])
1689 return false;
1691 /* If we've looked through narrowing conversions before, look through
1692 widening conversions from unsigned type with the same precision
1693 as rtype here. */
1694 if (TYPE_PRECISION (TREE_TYPE (def_arg1[0])) != TYPE_PRECISION (rtype))
1695 for (i = 0; i < 2; i++)
1697 tree tem;
1698 enum tree_code code;
1699 defcodefor_name (def_arg1[i], &code, &tem, NULL);
1700 if (!CONVERT_EXPR_CODE_P (code)
1701 || !INTEGRAL_TYPE_P (TREE_TYPE (tem))
1702 || TYPE_PRECISION (TREE_TYPE (tem)) != TYPE_PRECISION (rtype))
1703 return false;
1704 def_arg1[i] = tem;
1706 /* Both shifts have to use the same first operand. */
1707 if (TREE_CODE (def_arg1[0]) != SSA_NAME || def_arg1[0] != def_arg1[1])
1708 return false;
1709 if (!TYPE_UNSIGNED (TREE_TYPE (def_arg1[0])))
1710 return false;
1712 /* CNT1 + CNT2 == B case above. */
1713 if (tree_fits_uhwi_p (def_arg2[0])
1714 && tree_fits_uhwi_p (def_arg2[1])
1715 && tree_to_uhwi (def_arg2[0])
1716 + tree_to_uhwi (def_arg2[1]) == TYPE_PRECISION (rtype))
1717 rotcnt = def_arg2[0];
1718 else if (TREE_CODE (def_arg2[0]) != SSA_NAME
1719 || TREE_CODE (def_arg2[1]) != SSA_NAME)
1720 return false;
1721 else
1723 tree cdef_arg1[2], cdef_arg2[2], def_arg2_alt[2];
1724 enum tree_code cdef_code[2];
1725 /* Look through conversion of the shift count argument.
1726 The C/C++ FE cast any shift count argument to integer_type_node.
1727 The only problem might be if the shift count type maximum value
1728 is equal or smaller than number of bits in rtype. */
1729 for (i = 0; i < 2; i++)
1731 def_arg2_alt[i] = def_arg2[i];
1732 defcodefor_name (def_arg2[i], &cdef_code[i],
1733 &cdef_arg1[i], &cdef_arg2[i]);
1734 if (CONVERT_EXPR_CODE_P (cdef_code[i])
1735 && INTEGRAL_TYPE_P (TREE_TYPE (cdef_arg1[i]))
1736 && TYPE_PRECISION (TREE_TYPE (cdef_arg1[i]))
1737 > floor_log2 (TYPE_PRECISION (rtype))
1738 && TYPE_PRECISION (TREE_TYPE (cdef_arg1[i]))
1739 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (cdef_arg1[i]))))
1741 def_arg2_alt[i] = cdef_arg1[i];
1742 defcodefor_name (def_arg2_alt[i], &cdef_code[i],
1743 &cdef_arg1[i], &cdef_arg2[i]);
1746 for (i = 0; i < 2; i++)
1747 /* Check for one shift count being Y and the other B - Y,
1748 with optional casts. */
1749 if (cdef_code[i] == MINUS_EXPR
1750 && tree_fits_shwi_p (cdef_arg1[i])
1751 && tree_to_shwi (cdef_arg1[i]) == TYPE_PRECISION (rtype)
1752 && TREE_CODE (cdef_arg2[i]) == SSA_NAME)
1754 tree tem;
1755 enum tree_code code;
1757 if (cdef_arg2[i] == def_arg2[1 - i]
1758 || cdef_arg2[i] == def_arg2_alt[1 - i])
1760 rotcnt = cdef_arg2[i];
1761 break;
1763 defcodefor_name (cdef_arg2[i], &code, &tem, NULL);
1764 if (CONVERT_EXPR_CODE_P (code)
1765 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
1766 && TYPE_PRECISION (TREE_TYPE (tem))
1767 > floor_log2 (TYPE_PRECISION (rtype))
1768 && TYPE_PRECISION (TREE_TYPE (tem))
1769 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem)))
1770 && (tem == def_arg2[1 - i]
1771 || tem == def_arg2_alt[1 - i]))
1773 rotcnt = tem;
1774 break;
1777 /* The above sequence isn't safe for Y being 0,
1778 because then one of the shifts triggers undefined behavior.
1779 This alternative is safe even for rotation count of 0.
1780 One shift count is Y and the other (-Y) & (B - 1). */
1781 else if (cdef_code[i] == BIT_AND_EXPR
1782 && tree_fits_shwi_p (cdef_arg2[i])
1783 && tree_to_shwi (cdef_arg2[i])
1784 == TYPE_PRECISION (rtype) - 1
1785 && TREE_CODE (cdef_arg1[i]) == SSA_NAME
1786 && gimple_assign_rhs_code (stmt) == BIT_IOR_EXPR)
1788 tree tem;
1789 enum tree_code code;
1791 defcodefor_name (cdef_arg1[i], &code, &tem, NULL);
1792 if (CONVERT_EXPR_CODE_P (code)
1793 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
1794 && TYPE_PRECISION (TREE_TYPE (tem))
1795 > floor_log2 (TYPE_PRECISION (rtype))
1796 && TYPE_PRECISION (TREE_TYPE (tem))
1797 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem))))
1798 defcodefor_name (tem, &code, &tem, NULL);
1800 if (code == NEGATE_EXPR)
1802 if (tem == def_arg2[1 - i] || tem == def_arg2_alt[1 - i])
1804 rotcnt = tem;
1805 break;
1807 defcodefor_name (tem, &code, &tem, NULL);
1808 if (CONVERT_EXPR_CODE_P (code)
1809 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
1810 && TYPE_PRECISION (TREE_TYPE (tem))
1811 > floor_log2 (TYPE_PRECISION (rtype))
1812 && TYPE_PRECISION (TREE_TYPE (tem))
1813 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem)))
1814 && (tem == def_arg2[1 - i]
1815 || tem == def_arg2_alt[1 - i]))
1817 rotcnt = tem;
1818 break;
1822 if (rotcnt == NULL_TREE)
1823 return false;
1824 swapped_p = i != 1;
1827 if (!useless_type_conversion_p (TREE_TYPE (def_arg2[0]),
1828 TREE_TYPE (rotcnt)))
1830 g = gimple_build_assign_with_ops (NOP_EXPR,
1831 make_ssa_name (TREE_TYPE (def_arg2[0]),
1832 NULL),
1833 rotcnt, NULL_TREE);
1834 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1835 rotcnt = gimple_assign_lhs (g);
1837 lhs = gimple_assign_lhs (stmt);
1838 if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
1839 lhs = make_ssa_name (TREE_TYPE (def_arg1[0]), NULL);
1840 g = gimple_build_assign_with_ops (((def_code[0] == LSHIFT_EXPR) ^ swapped_p)
1841 ? LROTATE_EXPR : RROTATE_EXPR,
1842 lhs, def_arg1[0], rotcnt);
1843 if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
1845 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1846 g = gimple_build_assign_with_ops (NOP_EXPR, gimple_assign_lhs (stmt),
1847 lhs, NULL_TREE);
1849 gsi_replace (gsi, g, false);
1850 return true;
1853 /* Combine an element access with a shuffle. Returns true if there were
1854 any changes made, else it returns false. */
1856 static bool
1857 simplify_bitfield_ref (gimple_stmt_iterator *gsi)
1859 gimple stmt = gsi_stmt (*gsi);
1860 gimple def_stmt;
1861 tree op, op0, op1, op2;
1862 tree elem_type;
1863 unsigned idx, n, size;
1864 enum tree_code code;
1866 op = gimple_assign_rhs1 (stmt);
1867 gcc_checking_assert (TREE_CODE (op) == BIT_FIELD_REF);
1869 op0 = TREE_OPERAND (op, 0);
1870 if (TREE_CODE (op0) != SSA_NAME
1871 || TREE_CODE (TREE_TYPE (op0)) != VECTOR_TYPE)
1872 return false;
1874 def_stmt = get_prop_source_stmt (op0, false, NULL);
1875 if (!def_stmt || !can_propagate_from (def_stmt))
1876 return false;
1878 op1 = TREE_OPERAND (op, 1);
1879 op2 = TREE_OPERAND (op, 2);
1880 code = gimple_assign_rhs_code (def_stmt);
1882 if (code == CONSTRUCTOR)
1884 tree tem = fold_ternary (BIT_FIELD_REF, TREE_TYPE (op),
1885 gimple_assign_rhs1 (def_stmt), op1, op2);
1886 if (!tem || !valid_gimple_rhs_p (tem))
1887 return false;
1888 gimple_assign_set_rhs_from_tree (gsi, tem);
1889 update_stmt (gsi_stmt (*gsi));
1890 return true;
1893 elem_type = TREE_TYPE (TREE_TYPE (op0));
1894 if (TREE_TYPE (op) != elem_type)
1895 return false;
1897 size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
1898 n = TREE_INT_CST_LOW (op1) / size;
1899 if (n != 1)
1900 return false;
1901 idx = TREE_INT_CST_LOW (op2) / size;
1903 if (code == VEC_PERM_EXPR)
1905 tree p, m, index, tem;
1906 unsigned nelts;
1907 m = gimple_assign_rhs3 (def_stmt);
1908 if (TREE_CODE (m) != VECTOR_CST)
1909 return false;
1910 nelts = VECTOR_CST_NELTS (m);
1911 idx = TREE_INT_CST_LOW (VECTOR_CST_ELT (m, idx));
1912 idx %= 2 * nelts;
1913 if (idx < nelts)
1915 p = gimple_assign_rhs1 (def_stmt);
1917 else
1919 p = gimple_assign_rhs2 (def_stmt);
1920 idx -= nelts;
1922 index = build_int_cst (TREE_TYPE (TREE_TYPE (m)), idx * size);
1923 tem = build3 (BIT_FIELD_REF, TREE_TYPE (op),
1924 unshare_expr (p), op1, index);
1925 gimple_assign_set_rhs1 (stmt, tem);
1926 fold_stmt (gsi);
1927 update_stmt (gsi_stmt (*gsi));
1928 return true;
1931 return false;
1934 /* Determine whether applying the 2 permutations (mask1 then mask2)
1935 gives back one of the input. */
1937 static int
1938 is_combined_permutation_identity (tree mask1, tree mask2)
1940 tree mask;
1941 unsigned int nelts, i, j;
1942 bool maybe_identity1 = true;
1943 bool maybe_identity2 = true;
1945 gcc_checking_assert (TREE_CODE (mask1) == VECTOR_CST
1946 && TREE_CODE (mask2) == VECTOR_CST);
1947 mask = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (mask1), mask1, mask1, mask2);
1948 gcc_assert (TREE_CODE (mask) == VECTOR_CST);
1950 nelts = VECTOR_CST_NELTS (mask);
1951 for (i = 0; i < nelts; i++)
1953 tree val = VECTOR_CST_ELT (mask, i);
1954 gcc_assert (TREE_CODE (val) == INTEGER_CST);
1955 j = TREE_INT_CST_LOW (val) & (2 * nelts - 1);
1956 if (j == i)
1957 maybe_identity2 = false;
1958 else if (j == i + nelts)
1959 maybe_identity1 = false;
1960 else
1961 return 0;
1963 return maybe_identity1 ? 1 : maybe_identity2 ? 2 : 0;
1966 /* Combine a shuffle with its arguments. Returns 1 if there were any
1967 changes made, 2 if cfg-cleanup needs to run. Else it returns 0. */
1969 static int
1970 simplify_permutation (gimple_stmt_iterator *gsi)
1972 gimple stmt = gsi_stmt (*gsi);
1973 gimple def_stmt;
1974 tree op0, op1, op2, op3, arg0, arg1;
1975 enum tree_code code;
1976 bool single_use_op0 = false;
1978 gcc_checking_assert (gimple_assign_rhs_code (stmt) == VEC_PERM_EXPR);
1980 op0 = gimple_assign_rhs1 (stmt);
1981 op1 = gimple_assign_rhs2 (stmt);
1982 op2 = gimple_assign_rhs3 (stmt);
1984 if (TREE_CODE (op2) != VECTOR_CST)
1985 return 0;
1987 if (TREE_CODE (op0) == VECTOR_CST)
1989 code = VECTOR_CST;
1990 arg0 = op0;
1992 else if (TREE_CODE (op0) == SSA_NAME)
1994 def_stmt = get_prop_source_stmt (op0, false, &single_use_op0);
1995 if (!def_stmt || !can_propagate_from (def_stmt))
1996 return 0;
1998 code = gimple_assign_rhs_code (def_stmt);
1999 arg0 = gimple_assign_rhs1 (def_stmt);
2001 else
2002 return 0;
2004 /* Two consecutive shuffles. */
2005 if (code == VEC_PERM_EXPR)
2007 tree orig;
2008 int ident;
2010 if (op0 != op1)
2011 return 0;
2012 op3 = gimple_assign_rhs3 (def_stmt);
2013 if (TREE_CODE (op3) != VECTOR_CST)
2014 return 0;
2015 ident = is_combined_permutation_identity (op3, op2);
2016 if (!ident)
2017 return 0;
2018 orig = (ident == 1) ? gimple_assign_rhs1 (def_stmt)
2019 : gimple_assign_rhs2 (def_stmt);
2020 gimple_assign_set_rhs1 (stmt, unshare_expr (orig));
2021 gimple_assign_set_rhs_code (stmt, TREE_CODE (orig));
2022 gimple_set_num_ops (stmt, 2);
2023 update_stmt (stmt);
2024 return remove_prop_source_from_use (op0) ? 2 : 1;
2027 /* Shuffle of a constructor. */
2028 else if (code == CONSTRUCTOR || code == VECTOR_CST)
2030 tree opt;
2031 bool ret = false;
2032 if (op0 != op1)
2034 if (TREE_CODE (op0) == SSA_NAME && !single_use_op0)
2035 return 0;
2037 if (TREE_CODE (op1) == VECTOR_CST)
2038 arg1 = op1;
2039 else if (TREE_CODE (op1) == SSA_NAME)
2041 enum tree_code code2;
2043 gimple def_stmt2 = get_prop_source_stmt (op1, true, NULL);
2044 if (!def_stmt2 || !can_propagate_from (def_stmt2))
2045 return 0;
2047 code2 = gimple_assign_rhs_code (def_stmt2);
2048 if (code2 != CONSTRUCTOR && code2 != VECTOR_CST)
2049 return 0;
2050 arg1 = gimple_assign_rhs1 (def_stmt2);
2052 else
2053 return 0;
2055 else
2057 /* Already used twice in this statement. */
2058 if (TREE_CODE (op0) == SSA_NAME && num_imm_uses (op0) > 2)
2059 return 0;
2060 arg1 = arg0;
2062 opt = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (op0), arg0, arg1, op2);
2063 if (!opt
2064 || (TREE_CODE (opt) != CONSTRUCTOR && TREE_CODE (opt) != VECTOR_CST))
2065 return 0;
2066 gimple_assign_set_rhs_from_tree (gsi, opt);
2067 update_stmt (gsi_stmt (*gsi));
2068 if (TREE_CODE (op0) == SSA_NAME)
2069 ret = remove_prop_source_from_use (op0);
2070 if (op0 != op1 && TREE_CODE (op1) == SSA_NAME)
2071 ret |= remove_prop_source_from_use (op1);
2072 return ret ? 2 : 1;
2075 return 0;
2078 /* Recognize a VEC_PERM_EXPR. Returns true if there were any changes. */
2080 static bool
2081 simplify_vector_constructor (gimple_stmt_iterator *gsi)
2083 gimple stmt = gsi_stmt (*gsi);
2084 gimple def_stmt;
2085 tree op, op2, orig, type, elem_type;
2086 unsigned elem_size, nelts, i;
2087 enum tree_code code;
2088 constructor_elt *elt;
2089 unsigned char *sel;
2090 bool maybe_ident;
2092 gcc_checking_assert (gimple_assign_rhs_code (stmt) == CONSTRUCTOR);
2094 op = gimple_assign_rhs1 (stmt);
2095 type = TREE_TYPE (op);
2096 gcc_checking_assert (TREE_CODE (type) == VECTOR_TYPE);
2098 nelts = TYPE_VECTOR_SUBPARTS (type);
2099 elem_type = TREE_TYPE (type);
2100 elem_size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
2102 sel = XALLOCAVEC (unsigned char, nelts);
2103 orig = NULL;
2104 maybe_ident = true;
2105 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (op), i, elt)
2107 tree ref, op1;
2109 if (i >= nelts)
2110 return false;
2112 if (TREE_CODE (elt->value) != SSA_NAME)
2113 return false;
2114 def_stmt = get_prop_source_stmt (elt->value, false, NULL);
2115 if (!def_stmt)
2116 return false;
2117 code = gimple_assign_rhs_code (def_stmt);
2118 if (code != BIT_FIELD_REF)
2119 return false;
2120 op1 = gimple_assign_rhs1 (def_stmt);
2121 ref = TREE_OPERAND (op1, 0);
2122 if (orig)
2124 if (ref != orig)
2125 return false;
2127 else
2129 if (TREE_CODE (ref) != SSA_NAME)
2130 return false;
2131 if (!useless_type_conversion_p (type, TREE_TYPE (ref)))
2132 return false;
2133 orig = ref;
2135 if (TREE_INT_CST_LOW (TREE_OPERAND (op1, 1)) != elem_size)
2136 return false;
2137 sel[i] = TREE_INT_CST_LOW (TREE_OPERAND (op1, 2)) / elem_size;
2138 if (sel[i] != i) maybe_ident = false;
2140 if (i < nelts)
2141 return false;
2143 if (maybe_ident)
2144 gimple_assign_set_rhs_from_tree (gsi, orig);
2145 else
2147 tree mask_type, *mask_elts;
2149 if (!can_vec_perm_p (TYPE_MODE (type), false, sel))
2150 return false;
2151 mask_type
2152 = build_vector_type (build_nonstandard_integer_type (elem_size, 1),
2153 nelts);
2154 if (GET_MODE_CLASS (TYPE_MODE (mask_type)) != MODE_VECTOR_INT
2155 || GET_MODE_SIZE (TYPE_MODE (mask_type))
2156 != GET_MODE_SIZE (TYPE_MODE (type)))
2157 return false;
2158 mask_elts = XALLOCAVEC (tree, nelts);
2159 for (i = 0; i < nelts; i++)
2160 mask_elts[i] = build_int_cst (TREE_TYPE (mask_type), sel[i]);
2161 op2 = build_vector (mask_type, mask_elts);
2162 gimple_assign_set_rhs_with_ops_1 (gsi, VEC_PERM_EXPR, orig, orig, op2);
2164 update_stmt (gsi_stmt (*gsi));
2165 return true;
2169 /* Primitive "lattice" function for gimple_simplify. */
2171 static tree
2172 fwprop_ssa_val (tree name)
2174 /* First valueize NAME. */
2175 if (TREE_CODE (name) == SSA_NAME
2176 && SSA_NAME_VERSION (name) < lattice.length ())
2178 tree val = lattice[SSA_NAME_VERSION (name)];
2179 if (val)
2180 name = val;
2182 /* We continue matching along SSA use-def edges for SSA names
2183 that are not single-use. Currently there are no patterns
2184 that would cause any issues with that. */
2185 return name;
2188 /* Main entry point for the forward propagation and statement combine
2189 optimizer. */
2191 namespace {
2193 const pass_data pass_data_forwprop =
2195 GIMPLE_PASS, /* type */
2196 "forwprop", /* name */
2197 OPTGROUP_NONE, /* optinfo_flags */
2198 TV_TREE_FORWPROP, /* tv_id */
2199 ( PROP_cfg | PROP_ssa ), /* properties_required */
2200 0, /* properties_provided */
2201 0, /* properties_destroyed */
2202 0, /* todo_flags_start */
2203 TODO_update_ssa, /* todo_flags_finish */
2206 class pass_forwprop : public gimple_opt_pass
2208 public:
2209 pass_forwprop (gcc::context *ctxt)
2210 : gimple_opt_pass (pass_data_forwprop, ctxt)
2213 /* opt_pass methods: */
2214 opt_pass * clone () { return new pass_forwprop (m_ctxt); }
2215 virtual bool gate (function *) { return flag_tree_forwprop; }
2216 virtual unsigned int execute (function *);
2218 }; // class pass_forwprop
2220 unsigned int
2221 pass_forwprop::execute (function *fun)
2223 unsigned int todoflags = 0;
2225 cfg_changed = false;
2227 /* Combine stmts with the stmts defining their operands. Do that
2228 in an order that guarantees visiting SSA defs before SSA uses. */
2229 lattice.create (num_ssa_names);
2230 lattice.quick_grow_cleared (num_ssa_names);
2231 int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (fun));
2232 int postorder_num = inverted_post_order_compute (postorder);
2233 to_purge = BITMAP_ALLOC (NULL);
2234 for (int i = 0; i < postorder_num; ++i)
2236 gimple_stmt_iterator gsi;
2237 basic_block bb = BASIC_BLOCK_FOR_FN (fun, postorder[i]);
2239 /* Apply forward propagation to all stmts in the basic-block.
2240 Note we update GSI within the loop as necessary. */
2241 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
2243 gimple stmt = gsi_stmt (gsi);
2244 tree lhs, rhs;
2245 enum tree_code code;
2247 if (!is_gimple_assign (stmt))
2249 gsi_next (&gsi);
2250 continue;
2253 lhs = gimple_assign_lhs (stmt);
2254 rhs = gimple_assign_rhs1 (stmt);
2255 code = gimple_assign_rhs_code (stmt);
2256 if (TREE_CODE (lhs) != SSA_NAME
2257 || has_zero_uses (lhs))
2259 gsi_next (&gsi);
2260 continue;
2263 /* If this statement sets an SSA_NAME to an address,
2264 try to propagate the address into the uses of the SSA_NAME. */
2265 if (code == ADDR_EXPR
2266 /* Handle pointer conversions on invariant addresses
2267 as well, as this is valid gimple. */
2268 || (CONVERT_EXPR_CODE_P (code)
2269 && TREE_CODE (rhs) == ADDR_EXPR
2270 && POINTER_TYPE_P (TREE_TYPE (lhs))))
2272 tree base = get_base_address (TREE_OPERAND (rhs, 0));
2273 if ((!base
2274 || !DECL_P (base)
2275 || decl_address_invariant_p (base))
2276 && !stmt_references_abnormal_ssa_name (stmt)
2277 && forward_propagate_addr_expr (lhs, rhs, true))
2279 fwprop_invalidate_lattice (gimple_get_lhs (stmt));
2280 release_defs (stmt);
2281 gsi_remove (&gsi, true);
2283 else
2284 gsi_next (&gsi);
2286 else if (code == POINTER_PLUS_EXPR)
2288 tree off = gimple_assign_rhs2 (stmt);
2289 if (TREE_CODE (off) == INTEGER_CST
2290 && can_propagate_from (stmt)
2291 && !simple_iv_increment_p (stmt)
2292 /* ??? Better adjust the interface to that function
2293 instead of building new trees here. */
2294 && forward_propagate_addr_expr
2295 (lhs,
2296 build1_loc (gimple_location (stmt),
2297 ADDR_EXPR, TREE_TYPE (rhs),
2298 fold_build2 (MEM_REF,
2299 TREE_TYPE (TREE_TYPE (rhs)),
2300 rhs,
2301 fold_convert (ptr_type_node,
2302 off))), true))
2304 fwprop_invalidate_lattice (gimple_get_lhs (stmt));
2305 release_defs (stmt);
2306 gsi_remove (&gsi, true);
2308 else if (is_gimple_min_invariant (rhs))
2310 /* Make sure to fold &a[0] + off_1 here. */
2311 fold_stmt_inplace (&gsi);
2312 update_stmt (stmt);
2313 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2314 gsi_next (&gsi);
2316 else
2317 gsi_next (&gsi);
2319 else if (TREE_CODE_CLASS (code) == tcc_comparison)
2321 if (forward_propagate_comparison (&gsi))
2322 cfg_changed = true;
2324 else
2325 gsi_next (&gsi);
2328 /* Combine stmts with the stmts defining their operands.
2329 Note we update GSI within the loop as necessary. */
2330 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
2332 gimple stmt = gsi_stmt (gsi);
2333 gimple orig_stmt = stmt;
2334 bool changed = false;
2336 /* Mark stmt as potentially needing revisiting. */
2337 gimple_set_plf (stmt, GF_PLF_1, false);
2339 if (fold_stmt (&gsi, fwprop_ssa_val))
2341 changed = true;
2342 stmt = gsi_stmt (gsi);
2343 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
2344 bitmap_set_bit (to_purge, bb->index);
2345 /* Cleanup the CFG if we simplified a condition to
2346 true or false. */
2347 if (gimple_code (stmt) == GIMPLE_COND
2348 && (gimple_cond_true_p (stmt)
2349 || gimple_cond_false_p (stmt)))
2350 cfg_changed = true;
2351 update_stmt (stmt);
2354 switch (gimple_code (stmt))
2356 case GIMPLE_ASSIGN:
2358 tree rhs1 = gimple_assign_rhs1 (stmt);
2359 enum tree_code code = gimple_assign_rhs_code (stmt);
2361 if (code == COND_EXPR
2362 || code == VEC_COND_EXPR)
2364 /* In this case the entire COND_EXPR is in rhs1. */
2365 if (forward_propagate_into_cond (&gsi))
2367 changed = true;
2368 stmt = gsi_stmt (gsi);
2371 else if (TREE_CODE_CLASS (code) == tcc_comparison)
2373 int did_something;
2374 did_something = forward_propagate_into_comparison (&gsi);
2375 if (did_something == 2)
2376 cfg_changed = true;
2377 changed = did_something != 0;
2379 else if ((code == PLUS_EXPR
2380 || code == BIT_IOR_EXPR
2381 || code == BIT_XOR_EXPR)
2382 && simplify_rotate (&gsi))
2383 changed = true;
2384 else if (code == VEC_PERM_EXPR)
2386 int did_something = simplify_permutation (&gsi);
2387 if (did_something == 2)
2388 cfg_changed = true;
2389 changed = did_something != 0;
2391 else if (code == BIT_FIELD_REF)
2392 changed = simplify_bitfield_ref (&gsi);
2393 else if (code == CONSTRUCTOR
2394 && TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
2395 changed = simplify_vector_constructor (&gsi);
2396 break;
2399 case GIMPLE_SWITCH:
2400 changed = simplify_gimple_switch (stmt);
2401 break;
2403 case GIMPLE_COND:
2405 int did_something;
2406 did_something = forward_propagate_into_gimple_cond (stmt);
2407 if (did_something == 2)
2408 cfg_changed = true;
2409 changed = did_something != 0;
2410 break;
2413 case GIMPLE_CALL:
2415 tree callee = gimple_call_fndecl (stmt);
2416 if (callee != NULL_TREE
2417 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
2418 changed = simplify_builtin_call (&gsi, callee);
2419 break;
2422 default:;
2425 if (changed)
2427 /* If the stmt changed then re-visit it and the statements
2428 inserted before it. */
2429 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
2430 if (gimple_plf (gsi_stmt (gsi), GF_PLF_1))
2431 break;
2432 if (gsi_end_p (gsi))
2433 gsi = gsi_start_bb (bb);
2434 else
2435 gsi_next (&gsi);
2437 else
2439 /* Stmt no longer needs to be revisited. */
2440 gimple_set_plf (stmt, GF_PLF_1, true);
2442 /* Fill up the lattice. */
2443 if (gimple_assign_single_p (stmt))
2445 tree lhs = gimple_assign_lhs (stmt);
2446 tree rhs = gimple_assign_rhs1 (stmt);
2447 if (TREE_CODE (lhs) == SSA_NAME)
2449 tree val = lhs;
2450 if (TREE_CODE (rhs) == SSA_NAME)
2451 val = fwprop_ssa_val (rhs);
2452 else if (is_gimple_min_invariant (rhs))
2453 val = rhs;
2454 fwprop_set_lattice_val (lhs, val);
2458 gsi_next (&gsi);
2462 free (postorder);
2463 lattice.release ();
2465 cfg_changed |= gimple_purge_all_dead_eh_edges (to_purge);
2466 BITMAP_FREE (to_purge);
2468 if (cfg_changed)
2469 todoflags |= TODO_cleanup_cfg;
2471 return todoflags;
2474 } // anon namespace
2476 gimple_opt_pass *
2477 make_pass_forwprop (gcc::context *ctxt)
2479 return new pass_forwprop (ctxt);