Replace enum gfc_try with bool type.
[official-gcc.git] / gcc / tree-ssa-forwprop.c
blobac930c68755ad26b459e6b744c7a064ed733c2ff
1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "tm_p.h"
26 #include "basic-block.h"
27 #include "gimple-pretty-print.h"
28 #include "tree-flow.h"
29 #include "tree-pass.h"
30 #include "langhooks.h"
31 #include "flags.h"
32 #include "gimple.h"
33 #include "expr.h"
34 #include "cfgloop.h"
35 #include "optabs.h"
36 #include "tree-ssa-propagate.h"
38 /* This pass propagates the RHS of assignment statements into use
39 sites of the LHS of the assignment. It's basically a specialized
40 form of tree combination. It is hoped all of this can disappear
41 when we have a generalized tree combiner.
43 One class of common cases we handle is forward propagating a single use
44 variable into a COND_EXPR.
46 bb0:
47 x = a COND b;
48 if (x) goto ... else goto ...
50 Will be transformed into:
52 bb0:
53 if (a COND b) goto ... else goto ...
55 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
57 Or (assuming c1 and c2 are constants):
59 bb0:
60 x = a + c1;
61 if (x EQ/NEQ c2) goto ... else goto ...
63 Will be transformed into:
65 bb0:
66 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
68 Similarly for x = a - c1.
72 bb0:
73 x = !a
74 if (x) goto ... else goto ...
76 Will be transformed into:
78 bb0:
79 if (a == 0) goto ... else goto ...
81 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
82 For these cases, we propagate A into all, possibly more than one,
83 COND_EXPRs that use X.
87 bb0:
88 x = (typecast) a
89 if (x) goto ... else goto ...
91 Will be transformed into:
93 bb0:
94 if (a != 0) goto ... else goto ...
96 (Assuming a is an integral type and x is a boolean or x is an
97 integral and a is a boolean.)
99 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
100 For these cases, we propagate A into all, possibly more than one,
101 COND_EXPRs that use X.
103 In addition to eliminating the variable and the statement which assigns
104 a value to the variable, we may be able to later thread the jump without
105 adding insane complexity in the dominator optimizer.
107 Also note these transformations can cascade. We handle this by having
108 a worklist of COND_EXPR statements to examine. As we make a change to
109 a statement, we put it back on the worklist to examine on the next
110 iteration of the main loop.
112 A second class of propagation opportunities arises for ADDR_EXPR
113 nodes.
115 ptr = &x->y->z;
116 res = *ptr;
118 Will get turned into
120 res = x->y->z;
123 ptr = (type1*)&type2var;
124 res = *ptr
126 Will get turned into (if type1 and type2 are the same size
127 and neither have volatile on them):
128 res = VIEW_CONVERT_EXPR<type1>(type2var)
132 ptr = &x[0];
133 ptr2 = ptr + <constant>;
135 Will get turned into
137 ptr2 = &x[constant/elementsize];
141 ptr = &x[0];
142 offset = index * element_size;
143 offset_p = (pointer) offset;
144 ptr2 = ptr + offset_p
146 Will get turned into:
148 ptr2 = &x[index];
151 ssa = (int) decl
152 res = ssa & 1
154 Provided that decl has known alignment >= 2, will get turned into
156 res = 0
158 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
159 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
160 {NOT_EXPR,NEG_EXPR}.
162 This will (of course) be extended as other needs arise. */
164 static bool forward_propagate_addr_expr (tree name, tree rhs);
166 /* Set to true if we delete dead edges during the optimization. */
167 static bool cfg_changed;
169 static tree rhs_to_tree (tree type, gimple stmt);
171 /* Get the next statement we can propagate NAME's value into skipping
172 trivial copies. Returns the statement that is suitable as a
173 propagation destination or NULL_TREE if there is no such one.
174 This only returns destinations in a single-use chain. FINAL_NAME_P
175 if non-NULL is written to the ssa name that represents the use. */
177 static gimple
178 get_prop_dest_stmt (tree name, tree *final_name_p)
180 use_operand_p use;
181 gimple use_stmt;
183 do {
184 /* If name has multiple uses, bail out. */
185 if (!single_imm_use (name, &use, &use_stmt))
186 return NULL;
188 /* If this is not a trivial copy, we found it. */
189 if (!gimple_assign_ssa_name_copy_p (use_stmt)
190 || gimple_assign_rhs1 (use_stmt) != name)
191 break;
193 /* Continue searching uses of the copy destination. */
194 name = gimple_assign_lhs (use_stmt);
195 } while (1);
197 if (final_name_p)
198 *final_name_p = name;
200 return use_stmt;
203 /* Get the statement we can propagate from into NAME skipping
204 trivial copies. Returns the statement which defines the
205 propagation source or NULL_TREE if there is no such one.
206 If SINGLE_USE_ONLY is set considers only sources which have
207 a single use chain up to NAME. If SINGLE_USE_P is non-null,
208 it is set to whether the chain to NAME is a single use chain
209 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
211 static gimple
212 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
214 bool single_use = true;
216 do {
217 gimple def_stmt = SSA_NAME_DEF_STMT (name);
219 if (!has_single_use (name))
221 single_use = false;
222 if (single_use_only)
223 return NULL;
226 /* If name is defined by a PHI node or is the default def, bail out. */
227 if (!is_gimple_assign (def_stmt))
228 return NULL;
230 /* If def_stmt is a simple copy, continue looking. */
231 if (gimple_assign_rhs_code (def_stmt) == SSA_NAME)
232 name = gimple_assign_rhs1 (def_stmt);
233 else
235 if (!single_use_only && single_use_p)
236 *single_use_p = single_use;
238 return def_stmt;
240 } while (1);
243 /* Checks if the destination ssa name in DEF_STMT can be used as
244 propagation source. Returns true if so, otherwise false. */
246 static bool
247 can_propagate_from (gimple def_stmt)
249 gcc_assert (is_gimple_assign (def_stmt));
251 /* If the rhs has side-effects we cannot propagate from it. */
252 if (gimple_has_volatile_ops (def_stmt))
253 return false;
255 /* If the rhs is a load we cannot propagate from it. */
256 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
257 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
258 return false;
260 /* Constants can be always propagated. */
261 if (gimple_assign_single_p (def_stmt)
262 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
263 return true;
265 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
266 if (stmt_references_abnormal_ssa_name (def_stmt))
267 return false;
269 /* If the definition is a conversion of a pointer to a function type,
270 then we can not apply optimizations as some targets require
271 function pointers to be canonicalized and in this case this
272 optimization could eliminate a necessary canonicalization. */
273 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
275 tree rhs = gimple_assign_rhs1 (def_stmt);
276 if (POINTER_TYPE_P (TREE_TYPE (rhs))
277 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
278 return false;
281 return true;
284 /* Remove a chain of dead statements starting at the definition of
285 NAME. The chain is linked via the first operand of the defining statements.
286 If NAME was replaced in its only use then this function can be used
287 to clean up dead stmts. The function handles already released SSA
288 names gracefully.
289 Returns true if cleanup-cfg has to run. */
291 static bool
292 remove_prop_source_from_use (tree name)
294 gimple_stmt_iterator gsi;
295 gimple stmt;
296 bool cfg_changed = false;
298 do {
299 basic_block bb;
301 if (SSA_NAME_IN_FREE_LIST (name)
302 || SSA_NAME_IS_DEFAULT_DEF (name)
303 || !has_zero_uses (name))
304 return cfg_changed;
306 stmt = SSA_NAME_DEF_STMT (name);
307 if (gimple_code (stmt) == GIMPLE_PHI
308 || gimple_has_side_effects (stmt))
309 return cfg_changed;
311 bb = gimple_bb (stmt);
312 gsi = gsi_for_stmt (stmt);
313 unlink_stmt_vdef (stmt);
314 if (gsi_remove (&gsi, true))
315 cfg_changed |= gimple_purge_dead_eh_edges (bb);
316 release_defs (stmt);
318 name = is_gimple_assign (stmt) ? gimple_assign_rhs1 (stmt) : NULL_TREE;
319 } while (name && TREE_CODE (name) == SSA_NAME);
321 return cfg_changed;
324 /* Return the rhs of a gimple_assign STMT in a form of a single tree,
325 converted to type TYPE.
327 This should disappear, but is needed so we can combine expressions and use
328 the fold() interfaces. Long term, we need to develop folding and combine
329 routines that deal with gimple exclusively . */
331 static tree
332 rhs_to_tree (tree type, gimple stmt)
334 location_t loc = gimple_location (stmt);
335 enum tree_code code = gimple_assign_rhs_code (stmt);
336 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
337 return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt),
338 gimple_assign_rhs2 (stmt),
339 gimple_assign_rhs3 (stmt));
340 else if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
341 return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
342 gimple_assign_rhs2 (stmt));
343 else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
344 return build1 (code, type, gimple_assign_rhs1 (stmt));
345 else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
346 return gimple_assign_rhs1 (stmt);
347 else
348 gcc_unreachable ();
351 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
352 the folded result in a form suitable for COND_EXPR_COND or
353 NULL_TREE, if there is no suitable simplified form. If
354 INVARIANT_ONLY is true only gimple_min_invariant results are
355 considered simplified. */
357 static tree
358 combine_cond_expr_cond (gimple stmt, enum tree_code code, tree type,
359 tree op0, tree op1, bool invariant_only)
361 tree t;
363 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
365 fold_defer_overflow_warnings ();
366 t = fold_binary_loc (gimple_location (stmt), code, type, op0, op1);
367 if (!t)
369 fold_undefer_overflow_warnings (false, NULL, 0);
370 return NULL_TREE;
373 /* Require that we got a boolean type out if we put one in. */
374 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
376 /* Canonicalize the combined condition for use in a COND_EXPR. */
377 t = canonicalize_cond_expr_cond (t);
379 /* Bail out if we required an invariant but didn't get one. */
380 if (!t || (invariant_only && !is_gimple_min_invariant (t)))
382 fold_undefer_overflow_warnings (false, NULL, 0);
383 return NULL_TREE;
386 fold_undefer_overflow_warnings (!gimple_no_warning_p (stmt), stmt, 0);
388 return t;
391 /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
392 of its operand. Return a new comparison tree or NULL_TREE if there
393 were no simplifying combines. */
395 static tree
396 forward_propagate_into_comparison_1 (gimple stmt,
397 enum tree_code code, tree type,
398 tree op0, tree op1)
400 tree tmp = NULL_TREE;
401 tree rhs0 = NULL_TREE, rhs1 = NULL_TREE;
402 bool single_use0_p = false, single_use1_p = false;
404 /* For comparisons use the first operand, that is likely to
405 simplify comparisons against constants. */
406 if (TREE_CODE (op0) == SSA_NAME)
408 gimple def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
409 if (def_stmt && can_propagate_from (def_stmt))
411 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
412 tmp = combine_cond_expr_cond (stmt, code, type,
413 rhs0, op1, !single_use0_p);
414 if (tmp)
415 return tmp;
419 /* If that wasn't successful, try the second operand. */
420 if (TREE_CODE (op1) == SSA_NAME)
422 gimple def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
423 if (def_stmt && can_propagate_from (def_stmt))
425 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
426 tmp = combine_cond_expr_cond (stmt, code, type,
427 op0, rhs1, !single_use1_p);
428 if (tmp)
429 return tmp;
433 /* If that wasn't successful either, try both operands. */
434 if (rhs0 != NULL_TREE
435 && rhs1 != NULL_TREE)
436 tmp = combine_cond_expr_cond (stmt, code, type,
437 rhs0, rhs1,
438 !(single_use0_p && single_use1_p));
440 return tmp;
443 /* Propagate from the ssa name definition statements of the assignment
444 from a comparison at *GSI into the conditional if that simplifies it.
445 Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
446 otherwise returns 0. */
448 static int
449 forward_propagate_into_comparison (gimple_stmt_iterator *gsi)
451 gimple stmt = gsi_stmt (*gsi);
452 tree tmp;
453 bool cfg_changed = false;
454 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
455 tree rhs1 = gimple_assign_rhs1 (stmt);
456 tree rhs2 = gimple_assign_rhs2 (stmt);
458 /* Combine the comparison with defining statements. */
459 tmp = forward_propagate_into_comparison_1 (stmt,
460 gimple_assign_rhs_code (stmt),
461 type, rhs1, rhs2);
462 if (tmp && useless_type_conversion_p (type, TREE_TYPE (tmp)))
464 gimple_assign_set_rhs_from_tree (gsi, tmp);
465 fold_stmt (gsi);
466 update_stmt (gsi_stmt (*gsi));
468 if (TREE_CODE (rhs1) == SSA_NAME)
469 cfg_changed |= remove_prop_source_from_use (rhs1);
470 if (TREE_CODE (rhs2) == SSA_NAME)
471 cfg_changed |= remove_prop_source_from_use (rhs2);
472 return cfg_changed ? 2 : 1;
475 return 0;
478 /* Propagate from the ssa name definition statements of COND_EXPR
479 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
480 Returns zero if no statement was changed, one if there were
481 changes and two if cfg_cleanup needs to run.
483 This must be kept in sync with forward_propagate_into_cond. */
485 static int
486 forward_propagate_into_gimple_cond (gimple stmt)
488 tree tmp;
489 enum tree_code code = gimple_cond_code (stmt);
490 bool cfg_changed = false;
491 tree rhs1 = gimple_cond_lhs (stmt);
492 tree rhs2 = gimple_cond_rhs (stmt);
494 /* We can do tree combining on SSA_NAME and comparison expressions. */
495 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
496 return 0;
498 tmp = forward_propagate_into_comparison_1 (stmt, code,
499 boolean_type_node,
500 rhs1, rhs2);
501 if (tmp)
503 if (dump_file && tmp)
505 fprintf (dump_file, " Replaced '");
506 print_gimple_expr (dump_file, stmt, 0, 0);
507 fprintf (dump_file, "' with '");
508 print_generic_expr (dump_file, tmp, 0);
509 fprintf (dump_file, "'\n");
512 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
513 update_stmt (stmt);
515 if (TREE_CODE (rhs1) == SSA_NAME)
516 cfg_changed |= remove_prop_source_from_use (rhs1);
517 if (TREE_CODE (rhs2) == SSA_NAME)
518 cfg_changed |= remove_prop_source_from_use (rhs2);
519 return (cfg_changed || is_gimple_min_invariant (tmp)) ? 2 : 1;
522 /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges. */
523 if ((TREE_CODE (TREE_TYPE (rhs1)) == BOOLEAN_TYPE
524 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
525 && TYPE_PRECISION (TREE_TYPE (rhs1)) == 1))
526 && ((code == EQ_EXPR
527 && integer_zerop (rhs2))
528 || (code == NE_EXPR
529 && integer_onep (rhs2))))
531 basic_block bb = gimple_bb (stmt);
532 gimple_cond_set_code (stmt, NE_EXPR);
533 gimple_cond_set_rhs (stmt, build_zero_cst (TREE_TYPE (rhs1)));
534 EDGE_SUCC (bb, 0)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
535 EDGE_SUCC (bb, 1)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
536 return 1;
539 return 0;
543 /* Propagate from the ssa name definition statements of COND_EXPR
544 in the rhs of statement STMT into the conditional if that simplifies it.
545 Returns true zero if the stmt was changed. */
547 static bool
548 forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
550 gimple stmt = gsi_stmt (*gsi_p);
551 tree tmp = NULL_TREE;
552 tree cond = gimple_assign_rhs1 (stmt);
553 enum tree_code code = gimple_assign_rhs_code (stmt);
554 bool swap = false;
556 /* We can do tree combining on SSA_NAME and comparison expressions. */
557 if (COMPARISON_CLASS_P (cond))
558 tmp = forward_propagate_into_comparison_1 (stmt, TREE_CODE (cond),
559 TREE_TYPE (cond),
560 TREE_OPERAND (cond, 0),
561 TREE_OPERAND (cond, 1));
562 else if (TREE_CODE (cond) == SSA_NAME)
564 enum tree_code def_code;
565 tree name = cond;
566 gimple def_stmt = get_prop_source_stmt (name, true, NULL);
567 if (!def_stmt || !can_propagate_from (def_stmt))
568 return 0;
570 def_code = gimple_assign_rhs_code (def_stmt);
571 if (TREE_CODE_CLASS (def_code) == tcc_comparison)
572 tmp = fold_build2_loc (gimple_location (def_stmt),
573 def_code,
574 TREE_TYPE (cond),
575 gimple_assign_rhs1 (def_stmt),
576 gimple_assign_rhs2 (def_stmt));
577 else if (code == COND_EXPR
578 && ((def_code == BIT_NOT_EXPR
579 && TYPE_PRECISION (TREE_TYPE (cond)) == 1)
580 || (def_code == BIT_XOR_EXPR
581 && integer_onep (gimple_assign_rhs2 (def_stmt)))))
583 tmp = gimple_assign_rhs1 (def_stmt);
584 swap = true;
588 if (tmp
589 && is_gimple_condexpr (tmp))
591 if (dump_file && tmp)
593 fprintf (dump_file, " Replaced '");
594 print_generic_expr (dump_file, cond, 0);
595 fprintf (dump_file, "' with '");
596 print_generic_expr (dump_file, tmp, 0);
597 fprintf (dump_file, "'\n");
600 if ((code == VEC_COND_EXPR) ? integer_all_onesp (tmp)
601 : integer_onep (tmp))
602 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs2 (stmt));
603 else if (integer_zerop (tmp))
604 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs3 (stmt));
605 else
607 gimple_assign_set_rhs1 (stmt, unshare_expr (tmp));
608 if (swap)
610 tree t = gimple_assign_rhs2 (stmt);
611 gimple_assign_set_rhs2 (stmt, gimple_assign_rhs3 (stmt));
612 gimple_assign_set_rhs3 (stmt, t);
615 stmt = gsi_stmt (*gsi_p);
616 update_stmt (stmt);
618 return true;
621 return 0;
624 /* Propagate from the ssa name definition statements of COND_EXPR
625 values in the rhs of statement STMT into the conditional arms
626 if that simplifies it.
627 Returns true if the stmt was changed. */
629 static bool
630 combine_cond_exprs (gimple_stmt_iterator *gsi_p)
632 gimple stmt = gsi_stmt (*gsi_p);
633 tree cond, val1, val2;
634 bool changed = false;
636 cond = gimple_assign_rhs1 (stmt);
637 val1 = gimple_assign_rhs2 (stmt);
638 if (TREE_CODE (val1) == SSA_NAME)
640 gimple def_stmt = SSA_NAME_DEF_STMT (val1);
641 if (is_gimple_assign (def_stmt)
642 && gimple_assign_rhs_code (def_stmt) == gimple_assign_rhs_code (stmt)
643 && operand_equal_p (gimple_assign_rhs1 (def_stmt), cond, 0))
645 val1 = unshare_expr (gimple_assign_rhs2 (def_stmt));
646 gimple_assign_set_rhs2 (stmt, val1);
647 changed = true;
650 val2 = gimple_assign_rhs3 (stmt);
651 if (TREE_CODE (val2) == SSA_NAME)
653 gimple def_stmt = SSA_NAME_DEF_STMT (val2);
654 if (is_gimple_assign (def_stmt)
655 && gimple_assign_rhs_code (def_stmt) == gimple_assign_rhs_code (stmt)
656 && operand_equal_p (gimple_assign_rhs1 (def_stmt), cond, 0))
658 val2 = unshare_expr (gimple_assign_rhs3 (def_stmt));
659 gimple_assign_set_rhs3 (stmt, val2);
660 changed = true;
663 if (operand_equal_p (val1, val2, 0))
665 gimple_assign_set_rhs_from_tree (gsi_p, val1);
666 stmt = gsi_stmt (*gsi_p);
667 changed = true;
670 if (changed)
671 update_stmt (stmt);
673 return changed;
676 /* We've just substituted an ADDR_EXPR into stmt. Update all the
677 relevant data structures to match. */
679 static void
680 tidy_after_forward_propagate_addr (gimple stmt)
682 /* We may have turned a trapping insn into a non-trapping insn. */
683 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
684 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
685 cfg_changed = true;
687 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
688 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
691 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
692 ADDR_EXPR <whatever>.
694 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
695 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
696 node or for recovery of array indexing from pointer arithmetic.
698 Return true if the propagation was successful (the propagation can
699 be not totally successful, yet things may have been changed). */
701 static bool
702 forward_propagate_addr_expr_1 (tree name, tree def_rhs,
703 gimple_stmt_iterator *use_stmt_gsi,
704 bool single_use_p)
706 tree lhs, rhs, rhs2, array_ref;
707 gimple use_stmt = gsi_stmt (*use_stmt_gsi);
708 enum tree_code rhs_code;
709 bool res = true;
711 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
713 lhs = gimple_assign_lhs (use_stmt);
714 rhs_code = gimple_assign_rhs_code (use_stmt);
715 rhs = gimple_assign_rhs1 (use_stmt);
717 /* Trivial cases. The use statement could be a trivial copy or a
718 useless conversion. Recurse to the uses of the lhs as copyprop does
719 not copy through different variant pointers and FRE does not catch
720 all useless conversions. Treat the case of a single-use name and
721 a conversion to def_rhs type separate, though. */
722 if (TREE_CODE (lhs) == SSA_NAME
723 && ((rhs_code == SSA_NAME && rhs == name)
724 || CONVERT_EXPR_CODE_P (rhs_code)))
726 /* Only recurse if we don't deal with a single use or we cannot
727 do the propagation to the current statement. In particular
728 we can end up with a conversion needed for a non-invariant
729 address which we cannot do in a single statement. */
730 if (!single_use_p
731 || (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs))
732 && (!is_gimple_min_invariant (def_rhs)
733 || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
734 && POINTER_TYPE_P (TREE_TYPE (def_rhs))
735 && (TYPE_PRECISION (TREE_TYPE (lhs))
736 > TYPE_PRECISION (TREE_TYPE (def_rhs)))))))
737 return forward_propagate_addr_expr (lhs, def_rhs);
739 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
740 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
741 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
742 else
743 gimple_assign_set_rhs_code (use_stmt, NOP_EXPR);
744 return true;
747 /* Propagate through constant pointer adjustments. */
748 if (TREE_CODE (lhs) == SSA_NAME
749 && rhs_code == POINTER_PLUS_EXPR
750 && rhs == name
751 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
753 tree new_def_rhs;
754 /* As we come here with non-invariant addresses in def_rhs we need
755 to make sure we can build a valid constant offsetted address
756 for further propagation. Simply rely on fold building that
757 and check after the fact. */
758 new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
759 def_rhs,
760 fold_convert (ptr_type_node,
761 gimple_assign_rhs2 (use_stmt)));
762 if (TREE_CODE (new_def_rhs) == MEM_REF
763 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
764 return false;
765 new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs,
766 TREE_TYPE (rhs));
768 /* Recurse. If we could propagate into all uses of lhs do not
769 bother to replace into the current use but just pretend we did. */
770 if (TREE_CODE (new_def_rhs) == ADDR_EXPR
771 && forward_propagate_addr_expr (lhs, new_def_rhs))
772 return true;
774 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs)))
775 gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
776 new_def_rhs, NULL_TREE);
777 else if (is_gimple_min_invariant (new_def_rhs))
778 gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR,
779 new_def_rhs, NULL_TREE);
780 else
781 return false;
782 gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
783 update_stmt (use_stmt);
784 return true;
787 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
788 ADDR_EXPR will not appear on the LHS. */
789 lhs = gimple_assign_lhs (use_stmt);
790 while (handled_component_p (lhs))
791 lhs = TREE_OPERAND (lhs, 0);
793 /* Now see if the LHS node is a MEM_REF using NAME. If so,
794 propagate the ADDR_EXPR into the use of NAME and fold the result. */
795 if (TREE_CODE (lhs) == MEM_REF
796 && TREE_OPERAND (lhs, 0) == name)
798 tree def_rhs_base;
799 HOST_WIDE_INT def_rhs_offset;
800 /* If the address is invariant we can always fold it. */
801 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
802 &def_rhs_offset)))
804 double_int off = mem_ref_offset (lhs);
805 tree new_ptr;
806 off += double_int::from_shwi (def_rhs_offset);
807 if (TREE_CODE (def_rhs_base) == MEM_REF)
809 off += mem_ref_offset (def_rhs_base);
810 new_ptr = TREE_OPERAND (def_rhs_base, 0);
812 else
813 new_ptr = build_fold_addr_expr (def_rhs_base);
814 TREE_OPERAND (lhs, 0) = new_ptr;
815 TREE_OPERAND (lhs, 1)
816 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
817 tidy_after_forward_propagate_addr (use_stmt);
818 /* Continue propagating into the RHS if this was not the only use. */
819 if (single_use_p)
820 return true;
822 /* If the LHS is a plain dereference and the value type is the same as
823 that of the pointed-to type of the address we can put the
824 dereferenced address on the LHS preserving the original alias-type. */
825 else if (gimple_assign_lhs (use_stmt) == lhs
826 && integer_zerop (TREE_OPERAND (lhs, 1))
827 && useless_type_conversion_p
828 (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
829 TREE_TYPE (gimple_assign_rhs1 (use_stmt)))
830 /* Don't forward anything into clobber stmts if it would result
831 in the lhs no longer being a MEM_REF. */
832 && (!gimple_clobber_p (use_stmt)
833 || TREE_CODE (TREE_OPERAND (def_rhs, 0)) == MEM_REF))
835 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
836 tree new_offset, new_base, saved, new_lhs;
837 while (handled_component_p (*def_rhs_basep))
838 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
839 saved = *def_rhs_basep;
840 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
842 new_base = TREE_OPERAND (*def_rhs_basep, 0);
843 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (lhs, 1)),
844 TREE_OPERAND (*def_rhs_basep, 1));
846 else
848 new_base = build_fold_addr_expr (*def_rhs_basep);
849 new_offset = TREE_OPERAND (lhs, 1);
851 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
852 new_base, new_offset);
853 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (lhs);
854 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (lhs);
855 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (lhs);
856 new_lhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
857 gimple_assign_set_lhs (use_stmt, new_lhs);
858 TREE_THIS_VOLATILE (new_lhs) = TREE_THIS_VOLATILE (lhs);
859 TREE_SIDE_EFFECTS (new_lhs) = TREE_SIDE_EFFECTS (lhs);
860 *def_rhs_basep = saved;
861 tidy_after_forward_propagate_addr (use_stmt);
862 /* Continue propagating into the RHS if this was not the
863 only use. */
864 if (single_use_p)
865 return true;
867 else
868 /* We can have a struct assignment dereferencing our name twice.
869 Note that we didn't propagate into the lhs to not falsely
870 claim we did when propagating into the rhs. */
871 res = false;
874 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
875 nodes from the RHS. */
876 rhs = gimple_assign_rhs1 (use_stmt);
877 if (TREE_CODE (rhs) == ADDR_EXPR)
878 rhs = TREE_OPERAND (rhs, 0);
879 while (handled_component_p (rhs))
880 rhs = TREE_OPERAND (rhs, 0);
882 /* Now see if the RHS node is a MEM_REF using NAME. If so,
883 propagate the ADDR_EXPR into the use of NAME and fold the result. */
884 if (TREE_CODE (rhs) == MEM_REF
885 && TREE_OPERAND (rhs, 0) == name)
887 tree def_rhs_base;
888 HOST_WIDE_INT def_rhs_offset;
889 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
890 &def_rhs_offset)))
892 double_int off = mem_ref_offset (rhs);
893 tree new_ptr;
894 off += double_int::from_shwi (def_rhs_offset);
895 if (TREE_CODE (def_rhs_base) == MEM_REF)
897 off += mem_ref_offset (def_rhs_base);
898 new_ptr = TREE_OPERAND (def_rhs_base, 0);
900 else
901 new_ptr = build_fold_addr_expr (def_rhs_base);
902 TREE_OPERAND (rhs, 0) = new_ptr;
903 TREE_OPERAND (rhs, 1)
904 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
905 fold_stmt_inplace (use_stmt_gsi);
906 tidy_after_forward_propagate_addr (use_stmt);
907 return res;
909 /* If the RHS is a plain dereference and the value type is the same as
910 that of the pointed-to type of the address we can put the
911 dereferenced address on the RHS preserving the original alias-type. */
912 else if (gimple_assign_rhs1 (use_stmt) == rhs
913 && integer_zerop (TREE_OPERAND (rhs, 1))
914 && useless_type_conversion_p
915 (TREE_TYPE (gimple_assign_lhs (use_stmt)),
916 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
918 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
919 tree new_offset, new_base, saved, new_rhs;
920 while (handled_component_p (*def_rhs_basep))
921 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
922 saved = *def_rhs_basep;
923 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
925 new_base = TREE_OPERAND (*def_rhs_basep, 0);
926 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (rhs, 1)),
927 TREE_OPERAND (*def_rhs_basep, 1));
929 else
931 new_base = build_fold_addr_expr (*def_rhs_basep);
932 new_offset = TREE_OPERAND (rhs, 1);
934 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
935 new_base, new_offset);
936 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (rhs);
937 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (rhs);
938 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (rhs);
939 new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
940 gimple_assign_set_rhs1 (use_stmt, new_rhs);
941 TREE_THIS_VOLATILE (new_rhs) = TREE_THIS_VOLATILE (rhs);
942 TREE_SIDE_EFFECTS (new_rhs) = TREE_SIDE_EFFECTS (rhs);
943 *def_rhs_basep = saved;
944 fold_stmt_inplace (use_stmt_gsi);
945 tidy_after_forward_propagate_addr (use_stmt);
946 return res;
950 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
951 is nothing to do. */
952 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
953 || gimple_assign_rhs1 (use_stmt) != name)
954 return false;
956 /* The remaining cases are all for turning pointer arithmetic into
957 array indexing. They only apply when we have the address of
958 element zero in an array. If that is not the case then there
959 is nothing to do. */
960 array_ref = TREE_OPERAND (def_rhs, 0);
961 if ((TREE_CODE (array_ref) != ARRAY_REF
962 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
963 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
964 && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
965 return false;
967 rhs2 = gimple_assign_rhs2 (use_stmt);
968 /* Optimize &x[C1] p+ C2 to &x p+ C3 with C3 = C1 * element_size + C2. */
969 if (TREE_CODE (rhs2) == INTEGER_CST)
971 tree new_rhs = build1_loc (gimple_location (use_stmt),
972 ADDR_EXPR, TREE_TYPE (def_rhs),
973 fold_build2 (MEM_REF,
974 TREE_TYPE (TREE_TYPE (def_rhs)),
975 unshare_expr (def_rhs),
976 fold_convert (ptr_type_node,
977 rhs2)));
978 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
979 use_stmt = gsi_stmt (*use_stmt_gsi);
980 update_stmt (use_stmt);
981 tidy_after_forward_propagate_addr (use_stmt);
982 return true;
985 return false;
988 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
990 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
991 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
992 node or for recovery of array indexing from pointer arithmetic.
993 Returns true, if all uses have been propagated into. */
995 static bool
996 forward_propagate_addr_expr (tree name, tree rhs)
998 int stmt_loop_depth = bb_loop_depth (gimple_bb (SSA_NAME_DEF_STMT (name)));
999 imm_use_iterator iter;
1000 gimple use_stmt;
1001 bool all = true;
1002 bool single_use_p = has_single_use (name);
1004 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
1006 bool result;
1007 tree use_rhs;
1009 /* If the use is not in a simple assignment statement, then
1010 there is nothing we can do. */
1011 if (gimple_code (use_stmt) != GIMPLE_ASSIGN)
1013 if (!is_gimple_debug (use_stmt))
1014 all = false;
1015 continue;
1018 /* If the use is in a deeper loop nest, then we do not want
1019 to propagate non-invariant ADDR_EXPRs into the loop as that
1020 is likely adding expression evaluations into the loop. */
1021 if (bb_loop_depth (gimple_bb (use_stmt)) > stmt_loop_depth
1022 && !is_gimple_min_invariant (rhs))
1024 all = false;
1025 continue;
1029 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1030 result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
1031 single_use_p);
1032 /* If the use has moved to a different statement adjust
1033 the update machinery for the old statement too. */
1034 if (use_stmt != gsi_stmt (gsi))
1036 update_stmt (use_stmt);
1037 use_stmt = gsi_stmt (gsi);
1040 update_stmt (use_stmt);
1042 all &= result;
1044 /* Remove intermediate now unused copy and conversion chains. */
1045 use_rhs = gimple_assign_rhs1 (use_stmt);
1046 if (result
1047 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
1048 && TREE_CODE (use_rhs) == SSA_NAME
1049 && has_zero_uses (gimple_assign_lhs (use_stmt)))
1051 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1052 release_defs (use_stmt);
1053 gsi_remove (&gsi, true);
1057 return all && has_zero_uses (name);
1061 /* Forward propagate the comparison defined in *DEFGSI like
1062 cond_1 = x CMP y to uses of the form
1063 a_1 = (T')cond_1
1064 a_1 = !cond_1
1065 a_1 = cond_1 != 0
1066 Returns true if stmt is now unused. Advance DEFGSI to the next
1067 statement. */
1069 static bool
1070 forward_propagate_comparison (gimple_stmt_iterator *defgsi)
1072 gimple stmt = gsi_stmt (*defgsi);
1073 tree name = gimple_assign_lhs (stmt);
1074 gimple use_stmt;
1075 tree tmp = NULL_TREE;
1076 gimple_stmt_iterator gsi;
1077 enum tree_code code;
1078 tree lhs;
1080 /* Don't propagate ssa names that occur in abnormal phis. */
1081 if ((TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
1082 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt)))
1083 || (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME
1084 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt))))
1085 goto bailout;
1087 /* Do not un-cse comparisons. But propagate through copies. */
1088 use_stmt = get_prop_dest_stmt (name, &name);
1089 if (!use_stmt
1090 || !is_gimple_assign (use_stmt))
1091 goto bailout;
1093 code = gimple_assign_rhs_code (use_stmt);
1094 lhs = gimple_assign_lhs (use_stmt);
1095 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
1096 goto bailout;
1098 /* We can propagate the condition into a statement that
1099 computes the logical negation of the comparison result. */
1100 if ((code == BIT_NOT_EXPR
1101 && TYPE_PRECISION (TREE_TYPE (lhs)) == 1)
1102 || (code == BIT_XOR_EXPR
1103 && integer_onep (gimple_assign_rhs2 (use_stmt))))
1105 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
1106 bool nans = HONOR_NANS (TYPE_MODE (type));
1107 enum tree_code inv_code;
1108 inv_code = invert_tree_comparison (gimple_assign_rhs_code (stmt), nans);
1109 if (inv_code == ERROR_MARK)
1110 goto bailout;
1112 tmp = build2 (inv_code, TREE_TYPE (lhs), gimple_assign_rhs1 (stmt),
1113 gimple_assign_rhs2 (stmt));
1115 else
1116 goto bailout;
1118 gsi = gsi_for_stmt (use_stmt);
1119 gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (tmp));
1120 use_stmt = gsi_stmt (gsi);
1121 update_stmt (use_stmt);
1123 if (dump_file && (dump_flags & TDF_DETAILS))
1125 fprintf (dump_file, " Replaced '");
1126 print_gimple_expr (dump_file, stmt, 0, dump_flags);
1127 fprintf (dump_file, "' with '");
1128 print_gimple_expr (dump_file, use_stmt, 0, dump_flags);
1129 fprintf (dump_file, "'\n");
1132 /* When we remove stmt now the iterator defgsi goes off it's current
1133 sequence, hence advance it now. */
1134 gsi_next (defgsi);
1136 /* Remove defining statements. */
1137 return remove_prop_source_from_use (name);
1139 bailout:
1140 gsi_next (defgsi);
1141 return false;
1145 /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
1146 If so, we can change STMT into lhs = y which can later be copy
1147 propagated. Similarly for negation.
1149 This could trivially be formulated as a forward propagation
1150 to immediate uses. However, we already had an implementation
1151 from DOM which used backward propagation via the use-def links.
1153 It turns out that backward propagation is actually faster as
1154 there's less work to do for each NOT/NEG expression we find.
1155 Backwards propagation needs to look at the statement in a single
1156 backlink. Forward propagation needs to look at potentially more
1157 than one forward link.
1159 Returns true when the statement was changed. */
1161 static bool
1162 simplify_not_neg_expr (gimple_stmt_iterator *gsi_p)
1164 gimple stmt = gsi_stmt (*gsi_p);
1165 tree rhs = gimple_assign_rhs1 (stmt);
1166 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
1168 /* See if the RHS_DEF_STMT has the same form as our statement. */
1169 if (is_gimple_assign (rhs_def_stmt)
1170 && gimple_assign_rhs_code (rhs_def_stmt) == gimple_assign_rhs_code (stmt))
1172 tree rhs_def_operand = gimple_assign_rhs1 (rhs_def_stmt);
1174 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
1175 if (TREE_CODE (rhs_def_operand) == SSA_NAME
1176 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
1178 gimple_assign_set_rhs_from_tree (gsi_p, rhs_def_operand);
1179 stmt = gsi_stmt (*gsi_p);
1180 update_stmt (stmt);
1181 return true;
1185 return false;
1188 /* Helper function for simplify_gimple_switch. Remove case labels that
1189 have values outside the range of the new type. */
1191 static void
1192 simplify_gimple_switch_label_vec (gimple stmt, tree index_type)
1194 unsigned int branch_num = gimple_switch_num_labels (stmt);
1195 vec<tree> labels;
1196 labels.create (branch_num);
1197 unsigned int i, len;
1199 /* Collect the existing case labels in a VEC, and preprocess it as if
1200 we are gimplifying a GENERIC SWITCH_EXPR. */
1201 for (i = 1; i < branch_num; i++)
1202 labels.quick_push (gimple_switch_label (stmt, i));
1203 preprocess_case_label_vec_for_gimple (labels, index_type, NULL);
1205 /* If any labels were removed, replace the existing case labels
1206 in the GIMPLE_SWITCH statement with the correct ones.
1207 Note that the type updates were done in-place on the case labels,
1208 so we only have to replace the case labels in the GIMPLE_SWITCH
1209 if the number of labels changed. */
1210 len = labels.length ();
1211 if (len < branch_num - 1)
1213 bitmap target_blocks;
1214 edge_iterator ei;
1215 edge e;
1217 /* Corner case: *all* case labels have been removed as being
1218 out-of-range for INDEX_TYPE. Push one label and let the
1219 CFG cleanups deal with this further. */
1220 if (len == 0)
1222 tree label, elt;
1224 label = CASE_LABEL (gimple_switch_default_label (stmt));
1225 elt = build_case_label (build_int_cst (index_type, 0), NULL, label);
1226 labels.quick_push (elt);
1227 len = 1;
1230 for (i = 0; i < labels.length (); i++)
1231 gimple_switch_set_label (stmt, i + 1, labels[i]);
1232 for (i++ ; i < branch_num; i++)
1233 gimple_switch_set_label (stmt, i, NULL_TREE);
1234 gimple_switch_set_num_labels (stmt, len + 1);
1236 /* Cleanup any edges that are now dead. */
1237 target_blocks = BITMAP_ALLOC (NULL);
1238 for (i = 0; i < gimple_switch_num_labels (stmt); i++)
1240 tree elt = gimple_switch_label (stmt, i);
1241 basic_block target = label_to_block (CASE_LABEL (elt));
1242 bitmap_set_bit (target_blocks, target->index);
1244 for (ei = ei_start (gimple_bb (stmt)->succs); (e = ei_safe_edge (ei)); )
1246 if (! bitmap_bit_p (target_blocks, e->dest->index))
1248 remove_edge (e);
1249 cfg_changed = true;
1250 free_dominance_info (CDI_DOMINATORS);
1252 else
1253 ei_next (&ei);
1255 BITMAP_FREE (target_blocks);
1258 labels.release ();
1261 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1262 the condition which we may be able to optimize better. */
1264 static bool
1265 simplify_gimple_switch (gimple stmt)
1267 tree cond = gimple_switch_index (stmt);
1268 tree def, to, ti;
1269 gimple def_stmt;
1271 /* The optimization that we really care about is removing unnecessary
1272 casts. That will let us do much better in propagating the inferred
1273 constant at the switch target. */
1274 if (TREE_CODE (cond) == SSA_NAME)
1276 def_stmt = SSA_NAME_DEF_STMT (cond);
1277 if (is_gimple_assign (def_stmt))
1279 if (gimple_assign_rhs_code (def_stmt) == NOP_EXPR)
1281 int need_precision;
1282 bool fail;
1284 def = gimple_assign_rhs1 (def_stmt);
1286 to = TREE_TYPE (cond);
1287 ti = TREE_TYPE (def);
1289 /* If we have an extension that preserves value, then we
1290 can copy the source value into the switch. */
1292 need_precision = TYPE_PRECISION (ti);
1293 fail = false;
1294 if (! INTEGRAL_TYPE_P (ti))
1295 fail = true;
1296 else if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
1297 fail = true;
1298 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
1299 need_precision += 1;
1300 if (TYPE_PRECISION (to) < need_precision)
1301 fail = true;
1303 if (!fail)
1305 gimple_switch_set_index (stmt, def);
1306 simplify_gimple_switch_label_vec (stmt, ti);
1307 update_stmt (stmt);
1308 return true;
1314 return false;
1317 /* For pointers p2 and p1 return p2 - p1 if the
1318 difference is known and constant, otherwise return NULL. */
1320 static tree
1321 constant_pointer_difference (tree p1, tree p2)
1323 int i, j;
1324 #define CPD_ITERATIONS 5
1325 tree exps[2][CPD_ITERATIONS];
1326 tree offs[2][CPD_ITERATIONS];
1327 int cnt[2];
1329 for (i = 0; i < 2; i++)
1331 tree p = i ? p1 : p2;
1332 tree off = size_zero_node;
1333 gimple stmt;
1334 enum tree_code code;
1336 /* For each of p1 and p2 we need to iterate at least
1337 twice, to handle ADDR_EXPR directly in p1/p2,
1338 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1339 on definition's stmt RHS. Iterate a few extra times. */
1340 j = 0;
1343 if (!POINTER_TYPE_P (TREE_TYPE (p)))
1344 break;
1345 if (TREE_CODE (p) == ADDR_EXPR)
1347 tree q = TREE_OPERAND (p, 0);
1348 HOST_WIDE_INT offset;
1349 tree base = get_addr_base_and_unit_offset (q, &offset);
1350 if (base)
1352 q = base;
1353 if (offset)
1354 off = size_binop (PLUS_EXPR, off, size_int (offset));
1356 if (TREE_CODE (q) == MEM_REF
1357 && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
1359 p = TREE_OPERAND (q, 0);
1360 off = size_binop (PLUS_EXPR, off,
1361 double_int_to_tree (sizetype,
1362 mem_ref_offset (q)));
1364 else
1366 exps[i][j] = q;
1367 offs[i][j++] = off;
1368 break;
1371 if (TREE_CODE (p) != SSA_NAME)
1372 break;
1373 exps[i][j] = p;
1374 offs[i][j++] = off;
1375 if (j == CPD_ITERATIONS)
1376 break;
1377 stmt = SSA_NAME_DEF_STMT (p);
1378 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
1379 break;
1380 code = gimple_assign_rhs_code (stmt);
1381 if (code == POINTER_PLUS_EXPR)
1383 if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
1384 break;
1385 off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
1386 p = gimple_assign_rhs1 (stmt);
1388 else if (code == ADDR_EXPR || code == NOP_EXPR)
1389 p = gimple_assign_rhs1 (stmt);
1390 else
1391 break;
1393 while (1);
1394 cnt[i] = j;
1397 for (i = 0; i < cnt[0]; i++)
1398 for (j = 0; j < cnt[1]; j++)
1399 if (exps[0][i] == exps[1][j])
1400 return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
1402 return NULL_TREE;
1405 /* *GSI_P is a GIMPLE_CALL to a builtin function.
1406 Optimize
1407 memcpy (p, "abcd", 4);
1408 memset (p + 4, ' ', 3);
1409 into
1410 memcpy (p, "abcd ", 7);
1411 call if the latter can be stored by pieces during expansion. */
1413 static bool
1414 simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
1416 gimple stmt1, stmt2 = gsi_stmt (*gsi_p);
1417 tree vuse = gimple_vuse (stmt2);
1418 if (vuse == NULL)
1419 return false;
1420 stmt1 = SSA_NAME_DEF_STMT (vuse);
1422 switch (DECL_FUNCTION_CODE (callee2))
1424 case BUILT_IN_MEMSET:
1425 if (gimple_call_num_args (stmt2) != 3
1426 || gimple_call_lhs (stmt2)
1427 || CHAR_BIT != 8
1428 || BITS_PER_UNIT != 8)
1429 break;
1430 else
1432 tree callee1;
1433 tree ptr1, src1, str1, off1, len1, lhs1;
1434 tree ptr2 = gimple_call_arg (stmt2, 0);
1435 tree val2 = gimple_call_arg (stmt2, 1);
1436 tree len2 = gimple_call_arg (stmt2, 2);
1437 tree diff, vdef, new_str_cst;
1438 gimple use_stmt;
1439 unsigned int ptr1_align;
1440 unsigned HOST_WIDE_INT src_len;
1441 char *src_buf;
1442 use_operand_p use_p;
1444 if (!host_integerp (val2, 0)
1445 || !host_integerp (len2, 1))
1446 break;
1447 if (is_gimple_call (stmt1))
1449 /* If first stmt is a call, it needs to be memcpy
1450 or mempcpy, with string literal as second argument and
1451 constant length. */
1452 callee1 = gimple_call_fndecl (stmt1);
1453 if (callee1 == NULL_TREE
1454 || DECL_BUILT_IN_CLASS (callee1) != BUILT_IN_NORMAL
1455 || gimple_call_num_args (stmt1) != 3)
1456 break;
1457 if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
1458 && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
1459 break;
1460 ptr1 = gimple_call_arg (stmt1, 0);
1461 src1 = gimple_call_arg (stmt1, 1);
1462 len1 = gimple_call_arg (stmt1, 2);
1463 lhs1 = gimple_call_lhs (stmt1);
1464 if (!host_integerp (len1, 1))
1465 break;
1466 str1 = string_constant (src1, &off1);
1467 if (str1 == NULL_TREE)
1468 break;
1469 if (!host_integerp (off1, 1)
1470 || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
1471 || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
1472 - tree_low_cst (off1, 1)) > 0
1473 || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
1474 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
1475 != TYPE_MODE (char_type_node))
1476 break;
1478 else if (gimple_assign_single_p (stmt1))
1480 /* Otherwise look for length 1 memcpy optimized into
1481 assignment. */
1482 ptr1 = gimple_assign_lhs (stmt1);
1483 src1 = gimple_assign_rhs1 (stmt1);
1484 if (TREE_CODE (ptr1) != MEM_REF
1485 || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
1486 || !host_integerp (src1, 0))
1487 break;
1488 ptr1 = build_fold_addr_expr (ptr1);
1489 callee1 = NULL_TREE;
1490 len1 = size_one_node;
1491 lhs1 = NULL_TREE;
1492 off1 = size_zero_node;
1493 str1 = NULL_TREE;
1495 else
1496 break;
1498 diff = constant_pointer_difference (ptr1, ptr2);
1499 if (diff == NULL && lhs1 != NULL)
1501 diff = constant_pointer_difference (lhs1, ptr2);
1502 if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1503 && diff != NULL)
1504 diff = size_binop (PLUS_EXPR, diff,
1505 fold_convert (sizetype, len1));
1507 /* If the difference between the second and first destination pointer
1508 is not constant, or is bigger than memcpy length, bail out. */
1509 if (diff == NULL
1510 || !host_integerp (diff, 1)
1511 || tree_int_cst_lt (len1, diff))
1512 break;
1514 /* Use maximum of difference plus memset length and memcpy length
1515 as the new memcpy length, if it is too big, bail out. */
1516 src_len = tree_low_cst (diff, 1);
1517 src_len += tree_low_cst (len2, 1);
1518 if (src_len < (unsigned HOST_WIDE_INT) tree_low_cst (len1, 1))
1519 src_len = tree_low_cst (len1, 1);
1520 if (src_len > 1024)
1521 break;
1523 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1524 with bigger length will return different result. */
1525 if (lhs1 != NULL_TREE
1526 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1527 && (TREE_CODE (lhs1) != SSA_NAME
1528 || !single_imm_use (lhs1, &use_p, &use_stmt)
1529 || use_stmt != stmt2))
1530 break;
1532 /* If anything reads memory in between memcpy and memset
1533 call, the modified memcpy call might change it. */
1534 vdef = gimple_vdef (stmt1);
1535 if (vdef != NULL
1536 && (!single_imm_use (vdef, &use_p, &use_stmt)
1537 || use_stmt != stmt2))
1538 break;
1540 ptr1_align = get_pointer_alignment (ptr1);
1541 /* Construct the new source string literal. */
1542 src_buf = XALLOCAVEC (char, src_len + 1);
1543 if (callee1)
1544 memcpy (src_buf,
1545 TREE_STRING_POINTER (str1) + tree_low_cst (off1, 1),
1546 tree_low_cst (len1, 1));
1547 else
1548 src_buf[0] = tree_low_cst (src1, 0);
1549 memset (src_buf + tree_low_cst (diff, 1),
1550 tree_low_cst (val2, 0), tree_low_cst (len2, 1));
1551 src_buf[src_len] = '\0';
1552 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1553 handle embedded '\0's. */
1554 if (strlen (src_buf) != src_len)
1555 break;
1556 rtl_profile_for_bb (gimple_bb (stmt2));
1557 /* If the new memcpy wouldn't be emitted by storing the literal
1558 by pieces, this optimization might enlarge .rodata too much,
1559 as commonly used string literals couldn't be shared any
1560 longer. */
1561 if (!can_store_by_pieces (src_len,
1562 builtin_strncpy_read_str,
1563 src_buf, ptr1_align, false))
1564 break;
1566 new_str_cst = build_string_literal (src_len, src_buf);
1567 if (callee1)
1569 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1570 memset call. */
1571 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1572 gimple_call_set_lhs (stmt1, NULL_TREE);
1573 gimple_call_set_arg (stmt1, 1, new_str_cst);
1574 gimple_call_set_arg (stmt1, 2,
1575 build_int_cst (TREE_TYPE (len1), src_len));
1576 update_stmt (stmt1);
1577 unlink_stmt_vdef (stmt2);
1578 gsi_remove (gsi_p, true);
1579 release_defs (stmt2);
1580 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1581 release_ssa_name (lhs1);
1582 return true;
1584 else
1586 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1587 assignment, remove STMT1 and change memset call into
1588 memcpy call. */
1589 gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
1591 if (!is_gimple_val (ptr1))
1592 ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE,
1593 true, GSI_SAME_STMT);
1594 gimple_call_set_fndecl (stmt2,
1595 builtin_decl_explicit (BUILT_IN_MEMCPY));
1596 gimple_call_set_arg (stmt2, 0, ptr1);
1597 gimple_call_set_arg (stmt2, 1, new_str_cst);
1598 gimple_call_set_arg (stmt2, 2,
1599 build_int_cst (TREE_TYPE (len2), src_len));
1600 unlink_stmt_vdef (stmt1);
1601 gsi_remove (&gsi, true);
1602 release_defs (stmt1);
1603 update_stmt (stmt2);
1604 return false;
1607 break;
1608 default:
1609 break;
1611 return false;
1614 /* Checks if expression has type of one-bit precision, or is a known
1615 truth-valued expression. */
1616 static bool
1617 truth_valued_ssa_name (tree name)
1619 gimple def;
1620 tree type = TREE_TYPE (name);
1622 if (!INTEGRAL_TYPE_P (type))
1623 return false;
1624 /* Don't check here for BOOLEAN_TYPE as the precision isn't
1625 necessarily one and so ~X is not equal to !X. */
1626 if (TYPE_PRECISION (type) == 1)
1627 return true;
1628 def = SSA_NAME_DEF_STMT (name);
1629 if (is_gimple_assign (def))
1630 return truth_value_p (gimple_assign_rhs_code (def));
1631 return false;
1634 /* Helper routine for simplify_bitwise_binary_1 function.
1635 Return for the SSA name NAME the expression X if it mets condition
1636 NAME = !X. Otherwise return NULL_TREE.
1637 Detected patterns for NAME = !X are:
1638 !X and X == 0 for X with integral type.
1639 X ^ 1, X != 1,or ~X for X with integral type with precision of one. */
1640 static tree
1641 lookup_logical_inverted_value (tree name)
1643 tree op1, op2;
1644 enum tree_code code;
1645 gimple def;
1647 /* If name has none-intergal type, or isn't a SSA_NAME, then
1648 return. */
1649 if (TREE_CODE (name) != SSA_NAME
1650 || !INTEGRAL_TYPE_P (TREE_TYPE (name)))
1651 return NULL_TREE;
1652 def = SSA_NAME_DEF_STMT (name);
1653 if (!is_gimple_assign (def))
1654 return NULL_TREE;
1656 code = gimple_assign_rhs_code (def);
1657 op1 = gimple_assign_rhs1 (def);
1658 op2 = NULL_TREE;
1660 /* Get for EQ_EXPR or BIT_XOR_EXPR operation the second operand.
1661 If CODE isn't an EQ_EXPR, BIT_XOR_EXPR, or BIT_NOT_EXPR, then return. */
1662 if (code == EQ_EXPR || code == NE_EXPR
1663 || code == BIT_XOR_EXPR)
1664 op2 = gimple_assign_rhs2 (def);
1666 switch (code)
1668 case BIT_NOT_EXPR:
1669 if (truth_valued_ssa_name (name))
1670 return op1;
1671 break;
1672 case EQ_EXPR:
1673 /* Check if we have X == 0 and X has an integral type. */
1674 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1675 break;
1676 if (integer_zerop (op2))
1677 return op1;
1678 break;
1679 case NE_EXPR:
1680 /* Check if we have X != 1 and X is a truth-valued. */
1681 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1682 break;
1683 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1684 return op1;
1685 break;
1686 case BIT_XOR_EXPR:
1687 /* Check if we have X ^ 1 and X is truth valued. */
1688 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1689 return op1;
1690 break;
1691 default:
1692 break;
1695 return NULL_TREE;
1698 /* Optimize ARG1 CODE ARG2 to a constant for bitwise binary
1699 operations CODE, if one operand has the logically inverted
1700 value of the other. */
1701 static tree
1702 simplify_bitwise_binary_1 (enum tree_code code, tree type,
1703 tree arg1, tree arg2)
1705 tree anot;
1707 /* If CODE isn't a bitwise binary operation, return NULL_TREE. */
1708 if (code != BIT_AND_EXPR && code != BIT_IOR_EXPR
1709 && code != BIT_XOR_EXPR)
1710 return NULL_TREE;
1712 /* First check if operands ARG1 and ARG2 are equal. If so
1713 return NULL_TREE as this optimization is handled fold_stmt. */
1714 if (arg1 == arg2)
1715 return NULL_TREE;
1716 /* See if we have in arguments logical-not patterns. */
1717 if (((anot = lookup_logical_inverted_value (arg1)) == NULL_TREE
1718 || anot != arg2)
1719 && ((anot = lookup_logical_inverted_value (arg2)) == NULL_TREE
1720 || anot != arg1))
1721 return NULL_TREE;
1723 /* X & !X -> 0. */
1724 if (code == BIT_AND_EXPR)
1725 return fold_convert (type, integer_zero_node);
1726 /* X | !X -> 1 and X ^ !X -> 1, if X is truth-valued. */
1727 if (truth_valued_ssa_name (anot))
1728 return fold_convert (type, integer_one_node);
1730 /* ??? Otherwise result is (X != 0 ? X : 1). not handled. */
1731 return NULL_TREE;
1734 /* Given a ssa_name in NAME see if it was defined by an assignment and
1735 set CODE to be the code and ARG1 to the first operand on the rhs and ARG2
1736 to the second operand on the rhs. */
1738 static inline void
1739 defcodefor_name (tree name, enum tree_code *code, tree *arg1, tree *arg2)
1741 gimple def;
1742 enum tree_code code1;
1743 tree arg11;
1744 tree arg21;
1745 tree arg31;
1746 enum gimple_rhs_class grhs_class;
1748 code1 = TREE_CODE (name);
1749 arg11 = name;
1750 arg21 = NULL_TREE;
1751 grhs_class = get_gimple_rhs_class (code1);
1753 if (code1 == SSA_NAME)
1755 def = SSA_NAME_DEF_STMT (name);
1757 if (def && is_gimple_assign (def)
1758 && can_propagate_from (def))
1760 code1 = gimple_assign_rhs_code (def);
1761 arg11 = gimple_assign_rhs1 (def);
1762 arg21 = gimple_assign_rhs2 (def);
1763 arg31 = gimple_assign_rhs2 (def);
1766 else if (grhs_class == GIMPLE_TERNARY_RHS
1767 || GIMPLE_BINARY_RHS
1768 || GIMPLE_UNARY_RHS
1769 || GIMPLE_SINGLE_RHS)
1770 extract_ops_from_tree_1 (name, &code1, &arg11, &arg21, &arg31);
1772 *code = code1;
1773 *arg1 = arg11;
1774 if (arg2)
1775 *arg2 = arg21;
1776 /* Ignore arg3 currently. */
1779 /* Return true if a conversion of an operand from type FROM to type TO
1780 should be applied after performing the operation instead. */
1782 static bool
1783 hoist_conversion_for_bitop_p (tree to, tree from)
1785 /* That's a good idea if the conversion widens the operand, thus
1786 after hoisting the conversion the operation will be narrower. */
1787 if (TYPE_PRECISION (from) < TYPE_PRECISION (to))
1788 return true;
1790 /* It's also a good idea if the conversion is to a non-integer mode. */
1791 if (GET_MODE_CLASS (TYPE_MODE (to)) != MODE_INT)
1792 return true;
1794 /* Or if the precision of TO is not the same as the precision
1795 of its mode. */
1796 if (TYPE_PRECISION (to) != GET_MODE_PRECISION (TYPE_MODE (to)))
1797 return true;
1799 return false;
1802 /* Simplify bitwise binary operations.
1803 Return true if a transformation applied, otherwise return false. */
1805 static bool
1806 simplify_bitwise_binary (gimple_stmt_iterator *gsi)
1808 gimple stmt = gsi_stmt (*gsi);
1809 tree arg1 = gimple_assign_rhs1 (stmt);
1810 tree arg2 = gimple_assign_rhs2 (stmt);
1811 enum tree_code code = gimple_assign_rhs_code (stmt);
1812 tree res;
1813 tree def1_arg1, def1_arg2, def2_arg1, def2_arg2;
1814 enum tree_code def1_code, def2_code;
1816 defcodefor_name (arg1, &def1_code, &def1_arg1, &def1_arg2);
1817 defcodefor_name (arg2, &def2_code, &def2_arg1, &def2_arg2);
1819 /* Try to fold (type) X op CST -> (type) (X op ((type-x) CST))
1820 when profitable. */
1821 if (TREE_CODE (arg2) == INTEGER_CST
1822 && CONVERT_EXPR_CODE_P (def1_code)
1823 && hoist_conversion_for_bitop_p (TREE_TYPE (arg1), TREE_TYPE (def1_arg1))
1824 && INTEGRAL_TYPE_P (TREE_TYPE (def1_arg1))
1825 && int_fits_type_p (arg2, TREE_TYPE (def1_arg1)))
1827 gimple newop;
1828 tree tem = make_ssa_name (TREE_TYPE (def1_arg1), NULL);
1829 newop =
1830 gimple_build_assign_with_ops (code, tem, def1_arg1,
1831 fold_convert_loc (gimple_location (stmt),
1832 TREE_TYPE (def1_arg1),
1833 arg2));
1834 gimple_set_location (newop, gimple_location (stmt));
1835 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1836 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1837 tem, NULL_TREE, NULL_TREE);
1838 update_stmt (gsi_stmt (*gsi));
1839 return true;
1842 /* For bitwise binary operations apply operand conversions to the
1843 binary operation result instead of to the operands. This allows
1844 to combine successive conversions and bitwise binary operations. */
1845 if (CONVERT_EXPR_CODE_P (def1_code)
1846 && CONVERT_EXPR_CODE_P (def2_code)
1847 && types_compatible_p (TREE_TYPE (def1_arg1), TREE_TYPE (def2_arg1))
1848 && hoist_conversion_for_bitop_p (TREE_TYPE (arg1), TREE_TYPE (def1_arg1)))
1850 gimple newop;
1851 tree tem = make_ssa_name (TREE_TYPE (def1_arg1), NULL);
1852 newop = gimple_build_assign_with_ops (code, tem, def1_arg1, def2_arg1);
1853 gimple_set_location (newop, gimple_location (stmt));
1854 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1855 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1856 tem, NULL_TREE, NULL_TREE);
1857 update_stmt (gsi_stmt (*gsi));
1858 return true;
1862 /* Simplify (A & B) OP0 (C & B) to (A OP0 C) & B. */
1863 if (def1_code == def2_code
1864 && def1_code == BIT_AND_EXPR
1865 && operand_equal_for_phi_arg_p (def1_arg2,
1866 def2_arg2))
1868 tree b = def1_arg2;
1869 tree a = def1_arg1;
1870 tree c = def2_arg1;
1871 tree inner = fold_build2 (code, TREE_TYPE (arg2), a, c);
1872 /* If A OP0 C (this usually means C is the same as A) is 0
1873 then fold it down correctly. */
1874 if (integer_zerop (inner))
1876 gimple_assign_set_rhs_from_tree (gsi, inner);
1877 update_stmt (stmt);
1878 return true;
1880 /* If A OP0 C (this usually means C is the same as A) is a ssa_name
1881 then fold it down correctly. */
1882 else if (TREE_CODE (inner) == SSA_NAME)
1884 tree outer = fold_build2 (def1_code, TREE_TYPE (inner),
1885 inner, b);
1886 gimple_assign_set_rhs_from_tree (gsi, outer);
1887 update_stmt (stmt);
1888 return true;
1890 else
1892 gimple newop;
1893 tree tem;
1894 tem = make_ssa_name (TREE_TYPE (arg2), NULL);
1895 newop = gimple_build_assign_with_ops (code, tem, a, c);
1896 gimple_set_location (newop, gimple_location (stmt));
1897 /* Make sure to re-process the new stmt as it's walking upwards. */
1898 gsi_insert_before (gsi, newop, GSI_NEW_STMT);
1899 gimple_assign_set_rhs1 (stmt, tem);
1900 gimple_assign_set_rhs2 (stmt, b);
1901 gimple_assign_set_rhs_code (stmt, def1_code);
1902 update_stmt (stmt);
1903 return true;
1907 /* (a | CST1) & CST2 -> (a & CST2) | (CST1 & CST2). */
1908 if (code == BIT_AND_EXPR
1909 && def1_code == BIT_IOR_EXPR
1910 && TREE_CODE (arg2) == INTEGER_CST
1911 && TREE_CODE (def1_arg2) == INTEGER_CST)
1913 tree cst = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg2),
1914 arg2, def1_arg2);
1915 tree tem;
1916 gimple newop;
1917 if (integer_zerop (cst))
1919 gimple_assign_set_rhs1 (stmt, def1_arg1);
1920 update_stmt (stmt);
1921 return true;
1923 tem = make_ssa_name (TREE_TYPE (arg2), NULL);
1924 newop = gimple_build_assign_with_ops (BIT_AND_EXPR,
1925 tem, def1_arg1, arg2);
1926 gimple_set_location (newop, gimple_location (stmt));
1927 /* Make sure to re-process the new stmt as it's walking upwards. */
1928 gsi_insert_before (gsi, newop, GSI_NEW_STMT);
1929 gimple_assign_set_rhs1 (stmt, tem);
1930 gimple_assign_set_rhs2 (stmt, cst);
1931 gimple_assign_set_rhs_code (stmt, BIT_IOR_EXPR);
1932 update_stmt (stmt);
1933 return true;
1936 /* Combine successive equal operations with constants. */
1937 if ((code == BIT_AND_EXPR
1938 || code == BIT_IOR_EXPR
1939 || code == BIT_XOR_EXPR)
1940 && def1_code == code
1941 && TREE_CODE (arg2) == INTEGER_CST
1942 && TREE_CODE (def1_arg2) == INTEGER_CST)
1944 tree cst = fold_build2 (code, TREE_TYPE (arg2),
1945 arg2, def1_arg2);
1946 gimple_assign_set_rhs1 (stmt, def1_arg1);
1947 gimple_assign_set_rhs2 (stmt, cst);
1948 update_stmt (stmt);
1949 return true;
1952 /* Canonicalize X ^ ~0 to ~X. */
1953 if (code == BIT_XOR_EXPR
1954 && TREE_CODE (arg2) == INTEGER_CST
1955 && integer_all_onesp (arg2))
1957 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, arg1, NULL_TREE);
1958 gcc_assert (gsi_stmt (*gsi) == stmt);
1959 update_stmt (stmt);
1960 return true;
1963 /* Try simple folding for X op !X, and X op X. */
1964 res = simplify_bitwise_binary_1 (code, TREE_TYPE (arg1), arg1, arg2);
1965 if (res != NULL_TREE)
1967 gimple_assign_set_rhs_from_tree (gsi, res);
1968 update_stmt (gsi_stmt (*gsi));
1969 return true;
1972 if (code == BIT_AND_EXPR || code == BIT_IOR_EXPR)
1974 enum tree_code ocode = code == BIT_AND_EXPR ? BIT_IOR_EXPR : BIT_AND_EXPR;
1975 if (def1_code == ocode)
1977 tree x = arg2;
1978 enum tree_code coden;
1979 tree a1, a2;
1980 /* ( X | Y) & X -> X */
1981 /* ( X & Y) | X -> X */
1982 if (x == def1_arg1
1983 || x == def1_arg2)
1985 gimple_assign_set_rhs_from_tree (gsi, x);
1986 update_stmt (gsi_stmt (*gsi));
1987 return true;
1990 defcodefor_name (def1_arg1, &coden, &a1, &a2);
1991 /* (~X | Y) & X -> X & Y */
1992 /* (~X & Y) | X -> X | Y */
1993 if (coden == BIT_NOT_EXPR && a1 == x)
1995 gimple_assign_set_rhs_with_ops (gsi, code,
1996 x, def1_arg2);
1997 gcc_assert (gsi_stmt (*gsi) == stmt);
1998 update_stmt (stmt);
1999 return true;
2001 defcodefor_name (def1_arg2, &coden, &a1, &a2);
2002 /* (Y | ~X) & X -> X & Y */
2003 /* (Y & ~X) | X -> X | Y */
2004 if (coden == BIT_NOT_EXPR && a1 == x)
2006 gimple_assign_set_rhs_with_ops (gsi, code,
2007 x, def1_arg1);
2008 gcc_assert (gsi_stmt (*gsi) == stmt);
2009 update_stmt (stmt);
2010 return true;
2013 if (def2_code == ocode)
2015 enum tree_code coden;
2016 tree a1;
2017 tree x = arg1;
2018 /* X & ( X | Y) -> X */
2019 /* X | ( X & Y) -> X */
2020 if (x == def2_arg1
2021 || x == def2_arg2)
2023 gimple_assign_set_rhs_from_tree (gsi, x);
2024 update_stmt (gsi_stmt (*gsi));
2025 return true;
2027 defcodefor_name (def2_arg1, &coden, &a1, NULL);
2028 /* (~X | Y) & X -> X & Y */
2029 /* (~X & Y) | X -> X | Y */
2030 if (coden == BIT_NOT_EXPR && a1 == x)
2032 gimple_assign_set_rhs_with_ops (gsi, code,
2033 x, def2_arg2);
2034 gcc_assert (gsi_stmt (*gsi) == stmt);
2035 update_stmt (stmt);
2036 return true;
2038 defcodefor_name (def2_arg2, &coden, &a1, NULL);
2039 /* (Y | ~X) & X -> X & Y */
2040 /* (Y & ~X) | X -> X | Y */
2041 if (coden == BIT_NOT_EXPR && a1 == x)
2043 gimple_assign_set_rhs_with_ops (gsi, code,
2044 x, def2_arg1);
2045 gcc_assert (gsi_stmt (*gsi) == stmt);
2046 update_stmt (stmt);
2047 return true;
2052 return false;
2056 /* Perform re-associations of the plus or minus statement STMT that are
2057 always permitted. Returns true if the CFG was changed. */
2059 static bool
2060 associate_plusminus (gimple_stmt_iterator *gsi)
2062 gimple stmt = gsi_stmt (*gsi);
2063 tree rhs1 = gimple_assign_rhs1 (stmt);
2064 tree rhs2 = gimple_assign_rhs2 (stmt);
2065 enum tree_code code = gimple_assign_rhs_code (stmt);
2066 bool changed;
2068 /* We can't reassociate at all for saturating types. */
2069 if (TYPE_SATURATING (TREE_TYPE (rhs1)))
2070 return false;
2072 /* First contract negates. */
2075 changed = false;
2077 /* A +- (-B) -> A -+ B. */
2078 if (TREE_CODE (rhs2) == SSA_NAME)
2080 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
2081 if (is_gimple_assign (def_stmt)
2082 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
2083 && can_propagate_from (def_stmt))
2085 code = (code == MINUS_EXPR) ? PLUS_EXPR : MINUS_EXPR;
2086 gimple_assign_set_rhs_code (stmt, code);
2087 rhs2 = gimple_assign_rhs1 (def_stmt);
2088 gimple_assign_set_rhs2 (stmt, rhs2);
2089 gimple_set_modified (stmt, true);
2090 changed = true;
2094 /* (-A) + B -> B - A. */
2095 if (TREE_CODE (rhs1) == SSA_NAME
2096 && code == PLUS_EXPR)
2098 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
2099 if (is_gimple_assign (def_stmt)
2100 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
2101 && can_propagate_from (def_stmt))
2103 code = MINUS_EXPR;
2104 gimple_assign_set_rhs_code (stmt, code);
2105 rhs1 = rhs2;
2106 gimple_assign_set_rhs1 (stmt, rhs1);
2107 rhs2 = gimple_assign_rhs1 (def_stmt);
2108 gimple_assign_set_rhs2 (stmt, rhs2);
2109 gimple_set_modified (stmt, true);
2110 changed = true;
2114 while (changed);
2116 /* We can't reassociate floating-point or fixed-point plus or minus
2117 because of saturation to +-Inf. */
2118 if (FLOAT_TYPE_P (TREE_TYPE (rhs1))
2119 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1)))
2120 goto out;
2122 /* Second match patterns that allow contracting a plus-minus pair
2123 irrespective of overflow issues.
2125 (A +- B) - A -> +- B
2126 (A +- B) -+ B -> A
2127 (CST +- A) +- CST -> CST +- A
2128 (A + CST) +- CST -> A + CST
2129 ~A + A -> -1
2130 ~A + 1 -> -A
2131 A - (A +- B) -> -+ B
2132 A +- (B +- A) -> +- B
2133 CST +- (CST +- A) -> CST +- A
2134 CST +- (A +- CST) -> CST +- A
2135 A + ~A -> -1
2137 via commutating the addition and contracting operations to zero
2138 by reassociation. */
2140 if (TREE_CODE (rhs1) == SSA_NAME)
2142 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
2143 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
2145 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
2146 if (def_code == PLUS_EXPR
2147 || def_code == MINUS_EXPR)
2149 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2150 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
2151 if (operand_equal_p (def_rhs1, rhs2, 0)
2152 && code == MINUS_EXPR)
2154 /* (A +- B) - A -> +- B. */
2155 code = ((def_code == PLUS_EXPR)
2156 ? TREE_CODE (def_rhs2) : NEGATE_EXPR);
2157 rhs1 = def_rhs2;
2158 rhs2 = NULL_TREE;
2159 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2160 gcc_assert (gsi_stmt (*gsi) == stmt);
2161 gimple_set_modified (stmt, true);
2163 else if (operand_equal_p (def_rhs2, rhs2, 0)
2164 && code != def_code)
2166 /* (A +- B) -+ B -> A. */
2167 code = TREE_CODE (def_rhs1);
2168 rhs1 = def_rhs1;
2169 rhs2 = NULL_TREE;
2170 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2171 gcc_assert (gsi_stmt (*gsi) == stmt);
2172 gimple_set_modified (stmt, true);
2174 else if (TREE_CODE (rhs2) == INTEGER_CST
2175 && TREE_CODE (def_rhs1) == INTEGER_CST)
2177 /* (CST +- A) +- CST -> CST +- A. */
2178 tree cst = fold_binary (code, TREE_TYPE (rhs1),
2179 def_rhs1, rhs2);
2180 if (cst && !TREE_OVERFLOW (cst))
2182 code = def_code;
2183 gimple_assign_set_rhs_code (stmt, code);
2184 rhs1 = cst;
2185 gimple_assign_set_rhs1 (stmt, rhs1);
2186 rhs2 = def_rhs2;
2187 gimple_assign_set_rhs2 (stmt, rhs2);
2188 gimple_set_modified (stmt, true);
2191 else if (TREE_CODE (rhs2) == INTEGER_CST
2192 && TREE_CODE (def_rhs2) == INTEGER_CST
2193 && def_code == PLUS_EXPR)
2195 /* (A + CST) +- CST -> A + CST. */
2196 tree cst = fold_binary (code, TREE_TYPE (rhs1),
2197 def_rhs2, rhs2);
2198 if (cst && !TREE_OVERFLOW (cst))
2200 code = PLUS_EXPR;
2201 gimple_assign_set_rhs_code (stmt, code);
2202 rhs1 = def_rhs1;
2203 gimple_assign_set_rhs1 (stmt, rhs1);
2204 rhs2 = cst;
2205 gimple_assign_set_rhs2 (stmt, rhs2);
2206 gimple_set_modified (stmt, true);
2210 else if (def_code == BIT_NOT_EXPR
2211 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1)))
2213 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2214 if (code == PLUS_EXPR
2215 && operand_equal_p (def_rhs1, rhs2, 0))
2217 /* ~A + A -> -1. */
2218 code = INTEGER_CST;
2219 rhs1 = build_int_cst_type (TREE_TYPE (rhs2), -1);
2220 rhs2 = NULL_TREE;
2221 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2222 gcc_assert (gsi_stmt (*gsi) == stmt);
2223 gimple_set_modified (stmt, true);
2225 else if (code == PLUS_EXPR
2226 && integer_onep (rhs1))
2228 /* ~A + 1 -> -A. */
2229 code = NEGATE_EXPR;
2230 rhs1 = def_rhs1;
2231 rhs2 = NULL_TREE;
2232 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2233 gcc_assert (gsi_stmt (*gsi) == stmt);
2234 gimple_set_modified (stmt, true);
2240 if (rhs2 && TREE_CODE (rhs2) == SSA_NAME)
2242 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
2243 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
2245 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
2246 if (def_code == PLUS_EXPR
2247 || def_code == MINUS_EXPR)
2249 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2250 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
2251 if (operand_equal_p (def_rhs1, rhs1, 0)
2252 && code == MINUS_EXPR)
2254 /* A - (A +- B) -> -+ B. */
2255 code = ((def_code == PLUS_EXPR)
2256 ? NEGATE_EXPR : TREE_CODE (def_rhs2));
2257 rhs1 = def_rhs2;
2258 rhs2 = NULL_TREE;
2259 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2260 gcc_assert (gsi_stmt (*gsi) == stmt);
2261 gimple_set_modified (stmt, true);
2263 else if (operand_equal_p (def_rhs2, rhs1, 0)
2264 && code != def_code)
2266 /* A +- (B +- A) -> +- B. */
2267 code = ((code == PLUS_EXPR)
2268 ? TREE_CODE (def_rhs1) : NEGATE_EXPR);
2269 rhs1 = def_rhs1;
2270 rhs2 = NULL_TREE;
2271 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2272 gcc_assert (gsi_stmt (*gsi) == stmt);
2273 gimple_set_modified (stmt, true);
2275 else if (TREE_CODE (rhs1) == INTEGER_CST
2276 && TREE_CODE (def_rhs1) == INTEGER_CST)
2278 /* CST +- (CST +- A) -> CST +- A. */
2279 tree cst = fold_binary (code, TREE_TYPE (rhs2),
2280 rhs1, def_rhs1);
2281 if (cst && !TREE_OVERFLOW (cst))
2283 code = (code == def_code ? PLUS_EXPR : MINUS_EXPR);
2284 gimple_assign_set_rhs_code (stmt, code);
2285 rhs1 = cst;
2286 gimple_assign_set_rhs1 (stmt, rhs1);
2287 rhs2 = def_rhs2;
2288 gimple_assign_set_rhs2 (stmt, rhs2);
2289 gimple_set_modified (stmt, true);
2292 else if (TREE_CODE (rhs1) == INTEGER_CST
2293 && TREE_CODE (def_rhs2) == INTEGER_CST)
2295 /* CST +- (A +- CST) -> CST +- A. */
2296 tree cst = fold_binary (def_code == code
2297 ? PLUS_EXPR : MINUS_EXPR,
2298 TREE_TYPE (rhs2),
2299 rhs1, def_rhs2);
2300 if (cst && !TREE_OVERFLOW (cst))
2302 rhs1 = cst;
2303 gimple_assign_set_rhs1 (stmt, rhs1);
2304 rhs2 = def_rhs1;
2305 gimple_assign_set_rhs2 (stmt, rhs2);
2306 gimple_set_modified (stmt, true);
2310 else if (def_code == BIT_NOT_EXPR
2311 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2)))
2313 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2314 if (code == PLUS_EXPR
2315 && operand_equal_p (def_rhs1, rhs1, 0))
2317 /* A + ~A -> -1. */
2318 code = INTEGER_CST;
2319 rhs1 = build_int_cst_type (TREE_TYPE (rhs1), -1);
2320 rhs2 = NULL_TREE;
2321 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2322 gcc_assert (gsi_stmt (*gsi) == stmt);
2323 gimple_set_modified (stmt, true);
2329 out:
2330 if (gimple_modified_p (stmt))
2332 fold_stmt_inplace (gsi);
2333 update_stmt (stmt);
2334 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
2335 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
2336 return true;
2339 return false;
2342 /* Associate operands of a POINTER_PLUS_EXPR assignmen at *GSI. Returns
2343 true if anything changed, false otherwise. */
2345 static bool
2346 associate_pointerplus (gimple_stmt_iterator *gsi)
2348 gimple stmt = gsi_stmt (*gsi);
2349 gimple def_stmt;
2350 tree ptr, rhs, algn;
2352 /* Pattern match
2353 tem = (sizetype) ptr;
2354 tem = tem & algn;
2355 tem = -tem;
2356 ... = ptr p+ tem;
2357 and produce the simpler and easier to analyze with respect to alignment
2358 ... = ptr & ~algn; */
2359 ptr = gimple_assign_rhs1 (stmt);
2360 rhs = gimple_assign_rhs2 (stmt);
2361 if (TREE_CODE (rhs) != SSA_NAME)
2362 return false;
2363 def_stmt = SSA_NAME_DEF_STMT (rhs);
2364 if (!is_gimple_assign (def_stmt)
2365 || gimple_assign_rhs_code (def_stmt) != NEGATE_EXPR)
2366 return false;
2367 rhs = gimple_assign_rhs1 (def_stmt);
2368 if (TREE_CODE (rhs) != SSA_NAME)
2369 return false;
2370 def_stmt = SSA_NAME_DEF_STMT (rhs);
2371 if (!is_gimple_assign (def_stmt)
2372 || gimple_assign_rhs_code (def_stmt) != BIT_AND_EXPR)
2373 return false;
2374 rhs = gimple_assign_rhs1 (def_stmt);
2375 algn = gimple_assign_rhs2 (def_stmt);
2376 if (TREE_CODE (rhs) != SSA_NAME
2377 || TREE_CODE (algn) != INTEGER_CST)
2378 return false;
2379 def_stmt = SSA_NAME_DEF_STMT (rhs);
2380 if (!is_gimple_assign (def_stmt)
2381 || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
2382 return false;
2383 if (gimple_assign_rhs1 (def_stmt) != ptr)
2384 return false;
2386 algn = double_int_to_tree (TREE_TYPE (ptr), ~tree_to_double_int (algn));
2387 gimple_assign_set_rhs_with_ops (gsi, BIT_AND_EXPR, ptr, algn);
2388 fold_stmt_inplace (gsi);
2389 update_stmt (stmt);
2391 return true;
2394 /* Combine two conversions in a row for the second conversion at *GSI.
2395 Returns 1 if there were any changes made, 2 if cfg-cleanup needs to
2396 run. Else it returns 0. */
2398 static int
2399 combine_conversions (gimple_stmt_iterator *gsi)
2401 gimple stmt = gsi_stmt (*gsi);
2402 gimple def_stmt;
2403 tree op0, lhs;
2404 enum tree_code code = gimple_assign_rhs_code (stmt);
2405 enum tree_code code2;
2407 gcc_checking_assert (CONVERT_EXPR_CODE_P (code)
2408 || code == FLOAT_EXPR
2409 || code == FIX_TRUNC_EXPR);
2411 lhs = gimple_assign_lhs (stmt);
2412 op0 = gimple_assign_rhs1 (stmt);
2413 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (op0)))
2415 gimple_assign_set_rhs_code (stmt, TREE_CODE (op0));
2416 return 1;
2419 if (TREE_CODE (op0) != SSA_NAME)
2420 return 0;
2422 def_stmt = SSA_NAME_DEF_STMT (op0);
2423 if (!is_gimple_assign (def_stmt))
2424 return 0;
2426 code2 = gimple_assign_rhs_code (def_stmt);
2428 if (CONVERT_EXPR_CODE_P (code2) || code2 == FLOAT_EXPR)
2430 tree defop0 = gimple_assign_rhs1 (def_stmt);
2431 tree type = TREE_TYPE (lhs);
2432 tree inside_type = TREE_TYPE (defop0);
2433 tree inter_type = TREE_TYPE (op0);
2434 int inside_int = INTEGRAL_TYPE_P (inside_type);
2435 int inside_ptr = POINTER_TYPE_P (inside_type);
2436 int inside_float = FLOAT_TYPE_P (inside_type);
2437 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
2438 unsigned int inside_prec = TYPE_PRECISION (inside_type);
2439 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
2440 int inter_int = INTEGRAL_TYPE_P (inter_type);
2441 int inter_ptr = POINTER_TYPE_P (inter_type);
2442 int inter_float = FLOAT_TYPE_P (inter_type);
2443 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
2444 unsigned int inter_prec = TYPE_PRECISION (inter_type);
2445 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
2446 int final_int = INTEGRAL_TYPE_P (type);
2447 int final_ptr = POINTER_TYPE_P (type);
2448 int final_float = FLOAT_TYPE_P (type);
2449 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
2450 unsigned int final_prec = TYPE_PRECISION (type);
2451 int final_unsignedp = TYPE_UNSIGNED (type);
2453 /* Don't propagate ssa names that occur in abnormal phis. */
2454 if (TREE_CODE (defop0) == SSA_NAME
2455 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (defop0))
2456 return 0;
2458 /* In addition to the cases of two conversions in a row
2459 handled below, if we are converting something to its own
2460 type via an object of identical or wider precision, neither
2461 conversion is needed. */
2462 if (useless_type_conversion_p (type, inside_type)
2463 && (((inter_int || inter_ptr) && final_int)
2464 || (inter_float && final_float))
2465 && inter_prec >= final_prec)
2467 gimple_assign_set_rhs1 (stmt, unshare_expr (defop0));
2468 gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0));
2469 update_stmt (stmt);
2470 return remove_prop_source_from_use (op0) ? 2 : 1;
2473 /* Likewise, if the intermediate and initial types are either both
2474 float or both integer, we don't need the middle conversion if the
2475 former is wider than the latter and doesn't change the signedness
2476 (for integers). Avoid this if the final type is a pointer since
2477 then we sometimes need the middle conversion. Likewise if the
2478 final type has a precision not equal to the size of its mode. */
2479 if (((inter_int && inside_int)
2480 || (inter_float && inside_float)
2481 || (inter_vec && inside_vec))
2482 && inter_prec >= inside_prec
2483 && (inter_float || inter_vec
2484 || inter_unsignedp == inside_unsignedp)
2485 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
2486 && TYPE_MODE (type) == TYPE_MODE (inter_type))
2487 && ! final_ptr
2488 && (! final_vec || inter_prec == inside_prec))
2490 gimple_assign_set_rhs1 (stmt, defop0);
2491 update_stmt (stmt);
2492 return remove_prop_source_from_use (op0) ? 2 : 1;
2495 /* If we have a sign-extension of a zero-extended value, we can
2496 replace that by a single zero-extension. Likewise if the
2497 final conversion does not change precision we can drop the
2498 intermediate conversion. */
2499 if (inside_int && inter_int && final_int
2500 && ((inside_prec < inter_prec && inter_prec < final_prec
2501 && inside_unsignedp && !inter_unsignedp)
2502 || final_prec == inter_prec))
2504 gimple_assign_set_rhs1 (stmt, defop0);
2505 update_stmt (stmt);
2506 return remove_prop_source_from_use (op0) ? 2 : 1;
2509 /* Two conversions in a row are not needed unless:
2510 - some conversion is floating-point (overstrict for now), or
2511 - some conversion is a vector (overstrict for now), or
2512 - the intermediate type is narrower than both initial and
2513 final, or
2514 - the intermediate type and innermost type differ in signedness,
2515 and the outermost type is wider than the intermediate, or
2516 - the initial type is a pointer type and the precisions of the
2517 intermediate and final types differ, or
2518 - the final type is a pointer type and the precisions of the
2519 initial and intermediate types differ. */
2520 if (! inside_float && ! inter_float && ! final_float
2521 && ! inside_vec && ! inter_vec && ! final_vec
2522 && (inter_prec >= inside_prec || inter_prec >= final_prec)
2523 && ! (inside_int && inter_int
2524 && inter_unsignedp != inside_unsignedp
2525 && inter_prec < final_prec)
2526 && ((inter_unsignedp && inter_prec > inside_prec)
2527 == (final_unsignedp && final_prec > inter_prec))
2528 && ! (inside_ptr && inter_prec != final_prec)
2529 && ! (final_ptr && inside_prec != inter_prec)
2530 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
2531 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
2533 gimple_assign_set_rhs1 (stmt, defop0);
2534 update_stmt (stmt);
2535 return remove_prop_source_from_use (op0) ? 2 : 1;
2538 /* A truncation to an unsigned type should be canonicalized as
2539 bitwise and of a mask. */
2540 if (final_int && inter_int && inside_int
2541 && final_prec == inside_prec
2542 && final_prec > inter_prec
2543 && inter_unsignedp)
2545 tree tem;
2546 tem = fold_build2 (BIT_AND_EXPR, inside_type,
2547 defop0,
2548 double_int_to_tree
2549 (inside_type, double_int::mask (inter_prec)));
2550 if (!useless_type_conversion_p (type, inside_type))
2552 tem = force_gimple_operand_gsi (gsi, tem, true, NULL_TREE, true,
2553 GSI_SAME_STMT);
2554 gimple_assign_set_rhs1 (stmt, tem);
2556 else
2557 gimple_assign_set_rhs_from_tree (gsi, tem);
2558 update_stmt (gsi_stmt (*gsi));
2559 return 1;
2562 /* If we are converting an integer to a floating-point that can
2563 represent it exactly and back to an integer, we can skip the
2564 floating-point conversion. */
2565 if (inside_int && inter_float && final_int &&
2566 (unsigned) significand_size (TYPE_MODE (inter_type))
2567 >= inside_prec - !inside_unsignedp)
2569 if (useless_type_conversion_p (type, inside_type))
2571 gimple_assign_set_rhs1 (stmt, unshare_expr (defop0));
2572 gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0));
2573 update_stmt (stmt);
2574 return remove_prop_source_from_use (op0) ? 2 : 1;
2576 else
2578 gimple_assign_set_rhs1 (stmt, defop0);
2579 gimple_assign_set_rhs_code (stmt, CONVERT_EXPR);
2580 update_stmt (stmt);
2581 return remove_prop_source_from_use (op0) ? 2 : 1;
2586 return 0;
2589 /* Combine an element access with a shuffle. Returns true if there were
2590 any changes made, else it returns false. */
2592 static bool
2593 simplify_bitfield_ref (gimple_stmt_iterator *gsi)
2595 gimple stmt = gsi_stmt (*gsi);
2596 gimple def_stmt;
2597 tree op, op0, op1, op2;
2598 tree elem_type;
2599 unsigned idx, n, size;
2600 enum tree_code code;
2602 op = gimple_assign_rhs1 (stmt);
2603 gcc_checking_assert (TREE_CODE (op) == BIT_FIELD_REF);
2605 op0 = TREE_OPERAND (op, 0);
2606 if (TREE_CODE (op0) != SSA_NAME
2607 || TREE_CODE (TREE_TYPE (op0)) != VECTOR_TYPE)
2608 return false;
2610 def_stmt = get_prop_source_stmt (op0, false, NULL);
2611 if (!def_stmt || !can_propagate_from (def_stmt))
2612 return false;
2614 op1 = TREE_OPERAND (op, 1);
2615 op2 = TREE_OPERAND (op, 2);
2616 code = gimple_assign_rhs_code (def_stmt);
2618 if (code == CONSTRUCTOR)
2620 tree tem = fold_ternary (BIT_FIELD_REF, TREE_TYPE (op),
2621 gimple_assign_rhs1 (def_stmt), op1, op2);
2622 if (!tem || !valid_gimple_rhs_p (tem))
2623 return false;
2624 gimple_assign_set_rhs_from_tree (gsi, tem);
2625 update_stmt (gsi_stmt (*gsi));
2626 return true;
2629 elem_type = TREE_TYPE (TREE_TYPE (op0));
2630 if (TREE_TYPE (op) != elem_type)
2631 return false;
2633 size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
2634 n = TREE_INT_CST_LOW (op1) / size;
2635 if (n != 1)
2636 return false;
2637 idx = TREE_INT_CST_LOW (op2) / size;
2639 if (code == VEC_PERM_EXPR)
2641 tree p, m, index, tem;
2642 unsigned nelts;
2643 m = gimple_assign_rhs3 (def_stmt);
2644 if (TREE_CODE (m) != VECTOR_CST)
2645 return false;
2646 nelts = VECTOR_CST_NELTS (m);
2647 idx = TREE_INT_CST_LOW (VECTOR_CST_ELT (m, idx));
2648 idx %= 2 * nelts;
2649 if (idx < nelts)
2651 p = gimple_assign_rhs1 (def_stmt);
2653 else
2655 p = gimple_assign_rhs2 (def_stmt);
2656 idx -= nelts;
2658 index = build_int_cst (TREE_TYPE (TREE_TYPE (m)), idx * size);
2659 tem = build3 (BIT_FIELD_REF, TREE_TYPE (op),
2660 unshare_expr (p), op1, index);
2661 gimple_assign_set_rhs1 (stmt, tem);
2662 fold_stmt (gsi);
2663 update_stmt (gsi_stmt (*gsi));
2664 return true;
2667 return false;
2670 /* Determine whether applying the 2 permutations (mask1 then mask2)
2671 gives back one of the input. */
2673 static int
2674 is_combined_permutation_identity (tree mask1, tree mask2)
2676 tree mask;
2677 unsigned int nelts, i, j;
2678 bool maybe_identity1 = true;
2679 bool maybe_identity2 = true;
2681 gcc_checking_assert (TREE_CODE (mask1) == VECTOR_CST
2682 && TREE_CODE (mask2) == VECTOR_CST);
2683 mask = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (mask1), mask1, mask1, mask2);
2684 gcc_assert (TREE_CODE (mask) == VECTOR_CST);
2686 nelts = VECTOR_CST_NELTS (mask);
2687 for (i = 0; i < nelts; i++)
2689 tree val = VECTOR_CST_ELT (mask, i);
2690 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2691 j = TREE_INT_CST_LOW (val) & (2 * nelts - 1);
2692 if (j == i)
2693 maybe_identity2 = false;
2694 else if (j == i + nelts)
2695 maybe_identity1 = false;
2696 else
2697 return 0;
2699 return maybe_identity1 ? 1 : maybe_identity2 ? 2 : 0;
2702 /* Combine a shuffle with its arguments. Returns 1 if there were any
2703 changes made, 2 if cfg-cleanup needs to run. Else it returns 0. */
2705 static int
2706 simplify_permutation (gimple_stmt_iterator *gsi)
2708 gimple stmt = gsi_stmt (*gsi);
2709 gimple def_stmt;
2710 tree op0, op1, op2, op3, arg0, arg1;
2711 enum tree_code code;
2712 bool single_use_op0 = false;
2714 gcc_checking_assert (gimple_assign_rhs_code (stmt) == VEC_PERM_EXPR);
2716 op0 = gimple_assign_rhs1 (stmt);
2717 op1 = gimple_assign_rhs2 (stmt);
2718 op2 = gimple_assign_rhs3 (stmt);
2720 if (TREE_CODE (op2) != VECTOR_CST)
2721 return 0;
2723 if (TREE_CODE (op0) == VECTOR_CST)
2725 code = VECTOR_CST;
2726 arg0 = op0;
2728 else if (TREE_CODE (op0) == SSA_NAME)
2730 def_stmt = get_prop_source_stmt (op0, false, &single_use_op0);
2731 if (!def_stmt || !can_propagate_from (def_stmt))
2732 return 0;
2734 code = gimple_assign_rhs_code (def_stmt);
2735 arg0 = gimple_assign_rhs1 (def_stmt);
2737 else
2738 return 0;
2740 /* Two consecutive shuffles. */
2741 if (code == VEC_PERM_EXPR)
2743 tree orig;
2744 int ident;
2746 if (op0 != op1)
2747 return 0;
2748 op3 = gimple_assign_rhs3 (def_stmt);
2749 if (TREE_CODE (op3) != VECTOR_CST)
2750 return 0;
2751 ident = is_combined_permutation_identity (op3, op2);
2752 if (!ident)
2753 return 0;
2754 orig = (ident == 1) ? gimple_assign_rhs1 (def_stmt)
2755 : gimple_assign_rhs2 (def_stmt);
2756 gimple_assign_set_rhs1 (stmt, unshare_expr (orig));
2757 gimple_assign_set_rhs_code (stmt, TREE_CODE (orig));
2758 gimple_set_num_ops (stmt, 2);
2759 update_stmt (stmt);
2760 return remove_prop_source_from_use (op0) ? 2 : 1;
2763 /* Shuffle of a constructor. */
2764 else if (code == CONSTRUCTOR || code == VECTOR_CST)
2766 tree opt;
2767 bool ret = false;
2768 if (op0 != op1)
2770 if (TREE_CODE (op0) == SSA_NAME && !single_use_op0)
2771 return 0;
2773 if (TREE_CODE (op1) == VECTOR_CST)
2774 arg1 = op1;
2775 else if (TREE_CODE (op1) == SSA_NAME)
2777 enum tree_code code2;
2779 gimple def_stmt2 = get_prop_source_stmt (op1, true, NULL);
2780 if (!def_stmt2 || !can_propagate_from (def_stmt2))
2781 return 0;
2783 code2 = gimple_assign_rhs_code (def_stmt2);
2784 if (code2 != CONSTRUCTOR && code2 != VECTOR_CST)
2785 return 0;
2786 arg1 = gimple_assign_rhs1 (def_stmt2);
2788 else
2789 return 0;
2791 else
2793 /* Already used twice in this statement. */
2794 if (TREE_CODE (op0) == SSA_NAME && num_imm_uses (op0) > 2)
2795 return 0;
2796 arg1 = arg0;
2798 opt = fold_ternary (VEC_PERM_EXPR, TREE_TYPE(op0), arg0, arg1, op2);
2799 if (!opt
2800 || (TREE_CODE (opt) != CONSTRUCTOR && TREE_CODE(opt) != VECTOR_CST))
2801 return 0;
2802 gimple_assign_set_rhs_from_tree (gsi, opt);
2803 update_stmt (gsi_stmt (*gsi));
2804 if (TREE_CODE (op0) == SSA_NAME)
2805 ret = remove_prop_source_from_use (op0);
2806 if (op0 != op1 && TREE_CODE (op1) == SSA_NAME)
2807 ret |= remove_prop_source_from_use (op1);
2808 return ret ? 2 : 1;
2811 return 0;
2814 /* Recognize a VEC_PERM_EXPR. Returns true if there were any changes. */
2816 static bool
2817 simplify_vector_constructor (gimple_stmt_iterator *gsi)
2819 gimple stmt = gsi_stmt (*gsi);
2820 gimple def_stmt;
2821 tree op, op2, orig, type, elem_type;
2822 unsigned elem_size, nelts, i;
2823 enum tree_code code;
2824 constructor_elt *elt;
2825 unsigned char *sel;
2826 bool maybe_ident;
2828 gcc_checking_assert (gimple_assign_rhs_code (stmt) == CONSTRUCTOR);
2830 op = gimple_assign_rhs1 (stmt);
2831 type = TREE_TYPE (op);
2832 gcc_checking_assert (TREE_CODE (type) == VECTOR_TYPE);
2834 nelts = TYPE_VECTOR_SUBPARTS (type);
2835 elem_type = TREE_TYPE (type);
2836 elem_size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
2838 sel = XALLOCAVEC (unsigned char, nelts);
2839 orig = NULL;
2840 maybe_ident = true;
2841 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (op), i, elt)
2843 tree ref, op1;
2845 if (i >= nelts)
2846 return false;
2848 if (TREE_CODE (elt->value) != SSA_NAME)
2849 return false;
2850 def_stmt = get_prop_source_stmt (elt->value, false, NULL);
2851 if (!def_stmt)
2852 return false;
2853 code = gimple_assign_rhs_code (def_stmt);
2854 if (code != BIT_FIELD_REF)
2855 return false;
2856 op1 = gimple_assign_rhs1 (def_stmt);
2857 ref = TREE_OPERAND (op1, 0);
2858 if (orig)
2860 if (ref != orig)
2861 return false;
2863 else
2865 if (TREE_CODE (ref) != SSA_NAME)
2866 return false;
2867 if (!useless_type_conversion_p (type, TREE_TYPE (ref)))
2868 return false;
2869 orig = ref;
2871 if (TREE_INT_CST_LOW (TREE_OPERAND (op1, 1)) != elem_size)
2872 return false;
2873 sel[i] = TREE_INT_CST_LOW (TREE_OPERAND (op1, 2)) / elem_size;
2874 if (sel[i] != i) maybe_ident = false;
2876 if (i < nelts)
2877 return false;
2879 if (maybe_ident)
2880 gimple_assign_set_rhs_from_tree (gsi, orig);
2881 else
2883 tree mask_type, *mask_elts;
2885 if (!can_vec_perm_p (TYPE_MODE (type), false, sel))
2886 return false;
2887 mask_type
2888 = build_vector_type (build_nonstandard_integer_type (elem_size, 1),
2889 nelts);
2890 if (GET_MODE_CLASS (TYPE_MODE (mask_type)) != MODE_VECTOR_INT
2891 || GET_MODE_SIZE (TYPE_MODE (mask_type))
2892 != GET_MODE_SIZE (TYPE_MODE (type)))
2893 return false;
2894 mask_elts = XALLOCAVEC (tree, nelts);
2895 for (i = 0; i < nelts; i++)
2896 mask_elts[i] = build_int_cst (TREE_TYPE (mask_type), sel[i]);
2897 op2 = build_vector (mask_type, mask_elts);
2898 gimple_assign_set_rhs_with_ops_1 (gsi, VEC_PERM_EXPR, orig, orig, op2);
2900 update_stmt (gsi_stmt (*gsi));
2901 return true;
2904 /* Main entry point for the forward propagation and statement combine
2905 optimizer. */
2907 static unsigned int
2908 ssa_forward_propagate_and_combine (void)
2910 basic_block bb;
2911 unsigned int todoflags = 0;
2913 cfg_changed = false;
2915 FOR_EACH_BB (bb)
2917 gimple_stmt_iterator gsi;
2919 /* Apply forward propagation to all stmts in the basic-block.
2920 Note we update GSI within the loop as necessary. */
2921 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
2923 gimple stmt = gsi_stmt (gsi);
2924 tree lhs, rhs;
2925 enum tree_code code;
2927 if (!is_gimple_assign (stmt))
2929 gsi_next (&gsi);
2930 continue;
2933 lhs = gimple_assign_lhs (stmt);
2934 rhs = gimple_assign_rhs1 (stmt);
2935 code = gimple_assign_rhs_code (stmt);
2936 if (TREE_CODE (lhs) != SSA_NAME
2937 || has_zero_uses (lhs))
2939 gsi_next (&gsi);
2940 continue;
2943 /* If this statement sets an SSA_NAME to an address,
2944 try to propagate the address into the uses of the SSA_NAME. */
2945 if (code == ADDR_EXPR
2946 /* Handle pointer conversions on invariant addresses
2947 as well, as this is valid gimple. */
2948 || (CONVERT_EXPR_CODE_P (code)
2949 && TREE_CODE (rhs) == ADDR_EXPR
2950 && POINTER_TYPE_P (TREE_TYPE (lhs))))
2952 tree base = get_base_address (TREE_OPERAND (rhs, 0));
2953 if ((!base
2954 || !DECL_P (base)
2955 || decl_address_invariant_p (base))
2956 && !stmt_references_abnormal_ssa_name (stmt)
2957 && forward_propagate_addr_expr (lhs, rhs))
2959 release_defs (stmt);
2960 gsi_remove (&gsi, true);
2962 else
2963 gsi_next (&gsi);
2965 else if (code == POINTER_PLUS_EXPR)
2967 tree off = gimple_assign_rhs2 (stmt);
2968 if (TREE_CODE (off) == INTEGER_CST
2969 && can_propagate_from (stmt)
2970 && !simple_iv_increment_p (stmt)
2971 /* ??? Better adjust the interface to that function
2972 instead of building new trees here. */
2973 && forward_propagate_addr_expr
2974 (lhs,
2975 build1_loc (gimple_location (stmt),
2976 ADDR_EXPR, TREE_TYPE (rhs),
2977 fold_build2 (MEM_REF,
2978 TREE_TYPE (TREE_TYPE (rhs)),
2979 rhs,
2980 fold_convert (ptr_type_node,
2981 off)))))
2983 release_defs (stmt);
2984 gsi_remove (&gsi, true);
2986 else if (is_gimple_min_invariant (rhs))
2988 /* Make sure to fold &a[0] + off_1 here. */
2989 fold_stmt_inplace (&gsi);
2990 update_stmt (stmt);
2991 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2992 gsi_next (&gsi);
2994 else
2995 gsi_next (&gsi);
2997 else if (TREE_CODE_CLASS (code) == tcc_comparison)
2999 if (forward_propagate_comparison (&gsi))
3000 cfg_changed = true;
3002 else
3003 gsi_next (&gsi);
3006 /* Combine stmts with the stmts defining their operands.
3007 Note we update GSI within the loop as necessary. */
3008 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
3010 gimple stmt = gsi_stmt (gsi);
3011 bool changed = false;
3013 /* Mark stmt as potentially needing revisiting. */
3014 gimple_set_plf (stmt, GF_PLF_1, false);
3016 switch (gimple_code (stmt))
3018 case GIMPLE_ASSIGN:
3020 tree rhs1 = gimple_assign_rhs1 (stmt);
3021 enum tree_code code = gimple_assign_rhs_code (stmt);
3023 if ((code == BIT_NOT_EXPR
3024 || code == NEGATE_EXPR)
3025 && TREE_CODE (rhs1) == SSA_NAME)
3026 changed = simplify_not_neg_expr (&gsi);
3027 else if (code == COND_EXPR
3028 || code == VEC_COND_EXPR)
3030 /* In this case the entire COND_EXPR is in rhs1. */
3031 if (forward_propagate_into_cond (&gsi)
3032 || combine_cond_exprs (&gsi))
3034 changed = true;
3035 stmt = gsi_stmt (gsi);
3038 else if (TREE_CODE_CLASS (code) == tcc_comparison)
3040 int did_something;
3041 did_something = forward_propagate_into_comparison (&gsi);
3042 if (did_something == 2)
3043 cfg_changed = true;
3044 changed = did_something != 0;
3046 else if (code == BIT_AND_EXPR
3047 || code == BIT_IOR_EXPR
3048 || code == BIT_XOR_EXPR)
3049 changed = simplify_bitwise_binary (&gsi);
3050 else if (code == PLUS_EXPR
3051 || code == MINUS_EXPR)
3052 changed = associate_plusminus (&gsi);
3053 else if (code == POINTER_PLUS_EXPR)
3054 changed = associate_pointerplus (&gsi);
3055 else if (CONVERT_EXPR_CODE_P (code)
3056 || code == FLOAT_EXPR
3057 || code == FIX_TRUNC_EXPR)
3059 int did_something = combine_conversions (&gsi);
3060 if (did_something == 2)
3061 cfg_changed = true;
3062 changed = did_something != 0;
3064 else if (code == VEC_PERM_EXPR)
3066 int did_something = simplify_permutation (&gsi);
3067 if (did_something == 2)
3068 cfg_changed = true;
3069 changed = did_something != 0;
3071 else if (code == BIT_FIELD_REF)
3072 changed = simplify_bitfield_ref (&gsi);
3073 else if (code == CONSTRUCTOR
3074 && TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
3075 changed = simplify_vector_constructor (&gsi);
3076 break;
3079 case GIMPLE_SWITCH:
3080 changed = simplify_gimple_switch (stmt);
3081 break;
3083 case GIMPLE_COND:
3085 int did_something;
3086 did_something = forward_propagate_into_gimple_cond (stmt);
3087 if (did_something == 2)
3088 cfg_changed = true;
3089 changed = did_something != 0;
3090 break;
3093 case GIMPLE_CALL:
3095 tree callee = gimple_call_fndecl (stmt);
3096 if (callee != NULL_TREE
3097 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
3098 changed = simplify_builtin_call (&gsi, callee);
3099 break;
3102 default:;
3105 if (changed)
3107 /* If the stmt changed then re-visit it and the statements
3108 inserted before it. */
3109 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
3110 if (gimple_plf (gsi_stmt (gsi), GF_PLF_1))
3111 break;
3112 if (gsi_end_p (gsi))
3113 gsi = gsi_start_bb (bb);
3114 else
3115 gsi_next (&gsi);
3117 else
3119 /* Stmt no longer needs to be revisited. */
3120 gimple_set_plf (stmt, GF_PLF_1, true);
3121 gsi_next (&gsi);
3126 if (cfg_changed)
3127 todoflags |= TODO_cleanup_cfg;
3129 return todoflags;
3133 static bool
3134 gate_forwprop (void)
3136 return flag_tree_forwprop;
3139 struct gimple_opt_pass pass_forwprop =
3142 GIMPLE_PASS,
3143 "forwprop", /* name */
3144 OPTGROUP_NONE, /* optinfo_flags */
3145 gate_forwprop, /* gate */
3146 ssa_forward_propagate_and_combine, /* execute */
3147 NULL, /* sub */
3148 NULL, /* next */
3149 0, /* static_pass_number */
3150 TV_TREE_FORWPROP, /* tv_id */
3151 PROP_cfg | PROP_ssa, /* properties_required */
3152 0, /* properties_provided */
3153 0, /* properties_destroyed */
3154 0, /* todo_flags_start */
3155 TODO_update_ssa
3156 | TODO_verify_ssa /* todo_flags_finish */