Merge from trunk: 215733-215743
[official-gcc.git] / gcc-4_7 / gcc / tree-ssa-reassoc.c
blob3d37860c18aa5e1ff8a90d959df19646b36639f6
1 /* Reassociation for trees.
2 Copyright (C) 2005, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "basic-block.h"
28 #include "tree-pretty-print.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "gimple.h"
33 #include "tree-dump.h"
34 #include "timevar.h"
35 #include "tree-iterator.h"
36 #include "tree-pass.h"
37 #include "alloc-pool.h"
38 #include "vec.h"
39 #include "langhooks.h"
40 #include "pointer-set.h"
41 #include "cfgloop.h"
42 #include "flags.h"
43 #include "target.h"
44 #include "params.h"
45 #include "diagnostic-core.h"
47 /* This is a simple global reassociation pass. It is, in part, based
48 on the LLVM pass of the same name (They do some things more/less
49 than we do, in different orders, etc).
51 It consists of five steps:
53 1. Breaking up subtract operations into addition + negate, where
54 it would promote the reassociation of adds.
56 2. Left linearization of the expression trees, so that (A+B)+(C+D)
57 becomes (((A+B)+C)+D), which is easier for us to rewrite later.
58 During linearization, we place the operands of the binary
59 expressions into a vector of operand_entry_t
61 3. Optimization of the operand lists, eliminating things like a +
62 -a, a & a, etc.
64 4. Rewrite the expression trees we linearized and optimized so
65 they are in proper rank order.
67 5. Repropagate negates, as nothing else will clean it up ATM.
69 A bit of theory on #4, since nobody seems to write anything down
70 about why it makes sense to do it the way they do it:
72 We could do this much nicer theoretically, but don't (for reasons
73 explained after how to do it theoretically nice :P).
75 In order to promote the most redundancy elimination, you want
76 binary expressions whose operands are the same rank (or
77 preferably, the same value) exposed to the redundancy eliminator,
78 for possible elimination.
80 So the way to do this if we really cared, is to build the new op
81 tree from the leaves to the roots, merging as you go, and putting the
82 new op on the end of the worklist, until you are left with one
83 thing on the worklist.
85 IE if you have to rewrite the following set of operands (listed with
86 rank in parentheses), with opcode PLUS_EXPR:
88 a (1), b (1), c (1), d (2), e (2)
91 We start with our merge worklist empty, and the ops list with all of
92 those on it.
94 You want to first merge all leaves of the same rank, as much as
95 possible.
97 So first build a binary op of
99 mergetmp = a + b, and put "mergetmp" on the merge worklist.
101 Because there is no three operand form of PLUS_EXPR, c is not going to
102 be exposed to redundancy elimination as a rank 1 operand.
104 So you might as well throw it on the merge worklist (you could also
105 consider it to now be a rank two operand, and merge it with d and e,
106 but in this case, you then have evicted e from a binary op. So at
107 least in this situation, you can't win.)
109 Then build a binary op of d + e
110 mergetmp2 = d + e
112 and put mergetmp2 on the merge worklist.
114 so merge worklist = {mergetmp, c, mergetmp2}
116 Continue building binary ops of these operations until you have only
117 one operation left on the worklist.
119 So we have
121 build binary op
122 mergetmp3 = mergetmp + c
124 worklist = {mergetmp2, mergetmp3}
126 mergetmp4 = mergetmp2 + mergetmp3
128 worklist = {mergetmp4}
130 because we have one operation left, we can now just set the original
131 statement equal to the result of that operation.
133 This will at least expose a + b and d + e to redundancy elimination
134 as binary operations.
136 For extra points, you can reuse the old statements to build the
137 mergetmps, since you shouldn't run out.
139 So why don't we do this?
141 Because it's expensive, and rarely will help. Most trees we are
142 reassociating have 3 or less ops. If they have 2 ops, they already
143 will be written into a nice single binary op. If you have 3 ops, a
144 single simple check suffices to tell you whether the first two are of the
145 same rank. If so, you know to order it
147 mergetmp = op1 + op2
148 newstmt = mergetmp + op3
150 instead of
151 mergetmp = op2 + op3
152 newstmt = mergetmp + op1
154 If all three are of the same rank, you can't expose them all in a
155 single binary operator anyway, so the above is *still* the best you
156 can do.
158 Thus, this is what we do. When we have three ops left, we check to see
159 what order to put them in, and call it a day. As a nod to vector sum
160 reduction, we check if any of the ops are really a phi node that is a
161 destructive update for the associating op, and keep the destructive
162 update together for vector sum reduction recognition. */
165 /* Statistics */
166 static struct
168 int linearized;
169 int constants_eliminated;
170 int ops_eliminated;
171 int rewritten;
172 } reassociate_stats;
174 /* Operator, rank pair. */
175 typedef struct operand_entry
177 unsigned int rank;
178 int id;
179 tree op;
180 } *operand_entry_t;
182 static alloc_pool operand_entry_pool;
184 /* This is used to assign a unique ID to each struct operand_entry
185 so that qsort results are identical on different hosts. */
186 static int next_operand_entry_id;
188 /* Starting rank number for a given basic block, so that we can rank
189 operations using unmovable instructions in that BB based on the bb
190 depth. */
191 static long *bb_rank;
193 /* Operand->rank hashtable. */
194 static struct pointer_map_t *operand_rank;
196 /* Forward decls. */
197 static long get_rank (tree);
200 /* Bias amount for loop-carried phis. We want this to be larger than
201 the depth of any reassociation tree we can see, but not larger than
202 the rank difference between two blocks. */
203 #define PHI_LOOP_BIAS (1 << 15)
205 /* Rank assigned to a phi statement. If STMT is a loop-carried phi of
206 an innermost loop, and the phi has only a single use which is inside
207 the loop, then the rank is the block rank of the loop latch plus an
208 extra bias for the loop-carried dependence. This causes expressions
209 calculated into an accumulator variable to be independent for each
210 iteration of the loop. If STMT is some other phi, the rank is the
211 block rank of its containing block. */
212 static long
213 phi_rank (gimple stmt)
215 basic_block bb = gimple_bb (stmt);
216 struct loop *father = bb->loop_father;
217 tree res;
218 unsigned i;
219 use_operand_p use;
220 gimple use_stmt;
222 /* We only care about real loops (those with a latch). */
223 if (!father->latch)
224 return bb_rank[bb->index];
226 /* Interesting phis must be in headers of innermost loops. */
227 if (bb != father->header
228 || father->inner)
229 return bb_rank[bb->index];
231 /* Ignore virtual SSA_NAMEs. */
232 res = gimple_phi_result (stmt);
233 if (!is_gimple_reg (SSA_NAME_VAR (res)))
234 return bb_rank[bb->index];
236 /* The phi definition must have a single use, and that use must be
237 within the loop. Otherwise this isn't an accumulator pattern. */
238 if (!single_imm_use (res, &use, &use_stmt)
239 || gimple_bb (use_stmt)->loop_father != father)
240 return bb_rank[bb->index];
242 /* Look for phi arguments from within the loop. If found, bias this phi. */
243 for (i = 0; i < gimple_phi_num_args (stmt); i++)
245 tree arg = gimple_phi_arg_def (stmt, i);
246 if (TREE_CODE (arg) == SSA_NAME
247 && !SSA_NAME_IS_DEFAULT_DEF (arg))
249 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
250 if (gimple_bb (def_stmt)->loop_father == father)
251 return bb_rank[father->latch->index] + PHI_LOOP_BIAS;
255 /* Must be an uninteresting phi. */
256 return bb_rank[bb->index];
259 /* If EXP is an SSA_NAME defined by a PHI statement that represents a
260 loop-carried dependence of an innermost loop, return TRUE; else
261 return FALSE. */
262 static bool
263 loop_carried_phi (tree exp)
265 gimple phi_stmt;
266 long block_rank;
268 if (TREE_CODE (exp) != SSA_NAME
269 || SSA_NAME_IS_DEFAULT_DEF (exp))
270 return false;
272 phi_stmt = SSA_NAME_DEF_STMT (exp);
274 if (gimple_code (SSA_NAME_DEF_STMT (exp)) != GIMPLE_PHI)
275 return false;
277 /* Non-loop-carried phis have block rank. Loop-carried phis have
278 an additional bias added in. If this phi doesn't have block rank,
279 it's biased and should not be propagated. */
280 block_rank = bb_rank[gimple_bb (phi_stmt)->index];
282 if (phi_rank (phi_stmt) != block_rank)
283 return true;
285 return false;
288 /* Return the maximum of RANK and the rank that should be propagated
289 from expression OP. For most operands, this is just the rank of OP.
290 For loop-carried phis, the value is zero to avoid undoing the bias
291 in favor of the phi. */
292 static long
293 propagate_rank (long rank, tree op)
295 long op_rank;
297 if (loop_carried_phi (op))
298 return rank;
300 op_rank = get_rank (op);
302 return MAX (rank, op_rank);
305 /* Look up the operand rank structure for expression E. */
307 static inline long
308 find_operand_rank (tree e)
310 void **slot = pointer_map_contains (operand_rank, e);
311 return slot ? (long) (intptr_t) *slot : -1;
314 /* Insert {E,RANK} into the operand rank hashtable. */
316 static inline void
317 insert_operand_rank (tree e, long rank)
319 void **slot;
320 gcc_assert (rank > 0);
321 slot = pointer_map_insert (operand_rank, e);
322 gcc_assert (!*slot);
323 *slot = (void *) (intptr_t) rank;
326 /* Given an expression E, return the rank of the expression. */
328 static long
329 get_rank (tree e)
331 /* Constants have rank 0. */
332 if (is_gimple_min_invariant (e))
333 return 0;
335 /* SSA_NAME's have the rank of the expression they are the result
337 For globals and uninitialized values, the rank is 0.
338 For function arguments, use the pre-setup rank.
339 For PHI nodes, stores, asm statements, etc, we use the rank of
340 the BB.
341 For simple operations, the rank is the maximum rank of any of
342 its operands, or the bb_rank, whichever is less.
343 I make no claims that this is optimal, however, it gives good
344 results. */
346 /* We make an exception to the normal ranking system to break
347 dependences of accumulator variables in loops. Suppose we
348 have a simple one-block loop containing:
350 x_1 = phi(x_0, x_2)
351 b = a + x_1
352 c = b + d
353 x_2 = c + e
355 As shown, each iteration of the calculation into x is fully
356 dependent upon the iteration before it. We would prefer to
357 see this in the form:
359 x_1 = phi(x_0, x_2)
360 b = a + d
361 c = b + e
362 x_2 = c + x_1
364 If the loop is unrolled, the calculations of b and c from
365 different iterations can be interleaved.
367 To obtain this result during reassociation, we bias the rank
368 of the phi definition x_1 upward, when it is recognized as an
369 accumulator pattern. The artificial rank causes it to be
370 added last, providing the desired independence. */
372 if (TREE_CODE (e) == SSA_NAME)
374 gimple stmt;
375 long rank;
376 int i, n;
377 tree op;
379 if (TREE_CODE (SSA_NAME_VAR (e)) == PARM_DECL
380 && SSA_NAME_IS_DEFAULT_DEF (e))
381 return find_operand_rank (e);
383 stmt = SSA_NAME_DEF_STMT (e);
384 if (gimple_bb (stmt) == NULL)
385 return 0;
387 if (gimple_code (stmt) == GIMPLE_PHI)
388 return phi_rank (stmt);
390 if (!is_gimple_assign (stmt)
391 || gimple_vdef (stmt))
392 return bb_rank[gimple_bb (stmt)->index];
394 /* If we already have a rank for this expression, use that. */
395 rank = find_operand_rank (e);
396 if (rank != -1)
397 return rank;
399 /* Otherwise, find the maximum rank for the operands. As an
400 exception, remove the bias from loop-carried phis when propagating
401 the rank so that dependent operations are not also biased. */
402 rank = 0;
403 if (gimple_assign_single_p (stmt))
405 tree rhs = gimple_assign_rhs1 (stmt);
406 n = TREE_OPERAND_LENGTH (rhs);
407 if (n == 0)
408 rank = propagate_rank (rank, rhs);
409 else
411 for (i = 0; i < n; i++)
413 op = TREE_OPERAND (rhs, i);
415 if (op != NULL_TREE)
416 rank = propagate_rank (rank, op);
420 else
422 n = gimple_num_ops (stmt);
423 for (i = 1; i < n; i++)
425 op = gimple_op (stmt, i);
426 gcc_assert (op);
427 rank = propagate_rank (rank, op);
431 if (dump_file && (dump_flags & TDF_DETAILS))
433 fprintf (dump_file, "Rank for ");
434 print_generic_expr (dump_file, e, 0);
435 fprintf (dump_file, " is %ld\n", (rank + 1));
438 /* Note the rank in the hashtable so we don't recompute it. */
439 insert_operand_rank (e, (rank + 1));
440 return (rank + 1);
443 /* Globals, etc, are rank 0 */
444 return 0;
447 DEF_VEC_P(operand_entry_t);
448 DEF_VEC_ALLOC_P(operand_entry_t, heap);
450 /* We want integer ones to end up last no matter what, since they are
451 the ones we can do the most with. */
452 #define INTEGER_CONST_TYPE 1 << 3
453 #define FLOAT_CONST_TYPE 1 << 2
454 #define OTHER_CONST_TYPE 1 << 1
456 /* Classify an invariant tree into integer, float, or other, so that
457 we can sort them to be near other constants of the same type. */
458 static inline int
459 constant_type (tree t)
461 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
462 return INTEGER_CONST_TYPE;
463 else if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (t)))
464 return FLOAT_CONST_TYPE;
465 else
466 return OTHER_CONST_TYPE;
469 /* qsort comparison function to sort operand entries PA and PB by rank
470 so that the sorted array is ordered by rank in decreasing order. */
471 static int
472 sort_by_operand_rank (const void *pa, const void *pb)
474 const operand_entry_t oea = *(const operand_entry_t *)pa;
475 const operand_entry_t oeb = *(const operand_entry_t *)pb;
477 /* It's nicer for optimize_expression if constants that are likely
478 to fold when added/multiplied//whatever are put next to each
479 other. Since all constants have rank 0, order them by type. */
480 if (oeb->rank == 0 && oea->rank == 0)
482 if (constant_type (oeb->op) != constant_type (oea->op))
483 return constant_type (oeb->op) - constant_type (oea->op);
484 else
485 /* To make sorting result stable, we use unique IDs to determine
486 order. */
487 return oeb->id - oea->id;
490 /* Lastly, make sure the versions that are the same go next to each
491 other. We use SSA_NAME_VERSION because it's stable. */
492 if ((oeb->rank - oea->rank == 0)
493 && TREE_CODE (oea->op) == SSA_NAME
494 && TREE_CODE (oeb->op) == SSA_NAME)
496 if (SSA_NAME_VERSION (oeb->op) != SSA_NAME_VERSION (oea->op))
497 return SSA_NAME_VERSION (oeb->op) - SSA_NAME_VERSION (oea->op);
498 else
499 return oeb->id - oea->id;
502 if (oeb->rank != oea->rank)
503 return oeb->rank - oea->rank;
504 else
505 return oeb->id - oea->id;
508 /* Add an operand entry to *OPS for the tree operand OP. */
510 static void
511 add_to_ops_vec (VEC(operand_entry_t, heap) **ops, tree op)
513 operand_entry_t oe = (operand_entry_t) pool_alloc (operand_entry_pool);
515 oe->op = op;
516 oe->rank = get_rank (op);
517 oe->id = next_operand_entry_id++;
518 VEC_safe_push (operand_entry_t, heap, *ops, oe);
521 /* Return true if STMT is reassociable operation containing a binary
522 operation with tree code CODE, and is inside LOOP. */
524 static bool
525 is_reassociable_op (gimple stmt, enum tree_code code, struct loop *loop)
527 basic_block bb = gimple_bb (stmt);
529 if (gimple_bb (stmt) == NULL)
530 return false;
532 if (!flow_bb_inside_loop_p (loop, bb))
533 return false;
535 if (is_gimple_assign (stmt)
536 && gimple_assign_rhs_code (stmt) == code
537 && has_single_use (gimple_assign_lhs (stmt)))
538 return true;
540 return false;
544 /* Given NAME, if NAME is defined by a unary operation OPCODE, return the
545 operand of the negate operation. Otherwise, return NULL. */
547 static tree
548 get_unary_op (tree name, enum tree_code opcode)
550 gimple stmt = SSA_NAME_DEF_STMT (name);
552 if (!is_gimple_assign (stmt))
553 return NULL_TREE;
555 if (gimple_assign_rhs_code (stmt) == opcode)
556 return gimple_assign_rhs1 (stmt);
557 return NULL_TREE;
560 /* If CURR and LAST are a pair of ops that OPCODE allows us to
561 eliminate through equivalences, do so, remove them from OPS, and
562 return true. Otherwise, return false. */
564 static bool
565 eliminate_duplicate_pair (enum tree_code opcode,
566 VEC (operand_entry_t, heap) **ops,
567 bool *all_done,
568 unsigned int i,
569 operand_entry_t curr,
570 operand_entry_t last)
573 /* If we have two of the same op, and the opcode is & |, min, or max,
574 we can eliminate one of them.
575 If we have two of the same op, and the opcode is ^, we can
576 eliminate both of them. */
578 if (last && last->op == curr->op)
580 switch (opcode)
582 case MAX_EXPR:
583 case MIN_EXPR:
584 case BIT_IOR_EXPR:
585 case BIT_AND_EXPR:
586 if (dump_file && (dump_flags & TDF_DETAILS))
588 fprintf (dump_file, "Equivalence: ");
589 print_generic_expr (dump_file, curr->op, 0);
590 fprintf (dump_file, " [&|minmax] ");
591 print_generic_expr (dump_file, last->op, 0);
592 fprintf (dump_file, " -> ");
593 print_generic_stmt (dump_file, last->op, 0);
596 VEC_ordered_remove (operand_entry_t, *ops, i);
597 reassociate_stats.ops_eliminated ++;
599 return true;
601 case BIT_XOR_EXPR:
602 if (dump_file && (dump_flags & TDF_DETAILS))
604 fprintf (dump_file, "Equivalence: ");
605 print_generic_expr (dump_file, curr->op, 0);
606 fprintf (dump_file, " ^ ");
607 print_generic_expr (dump_file, last->op, 0);
608 fprintf (dump_file, " -> nothing\n");
611 reassociate_stats.ops_eliminated += 2;
613 if (VEC_length (operand_entry_t, *ops) == 2)
615 VEC_free (operand_entry_t, heap, *ops);
616 *ops = NULL;
617 add_to_ops_vec (ops, build_zero_cst (TREE_TYPE (last->op)));
618 *all_done = true;
620 else
622 VEC_ordered_remove (operand_entry_t, *ops, i-1);
623 VEC_ordered_remove (operand_entry_t, *ops, i-1);
626 return true;
628 default:
629 break;
632 return false;
635 static VEC(tree, heap) *plus_negates;
637 /* If OPCODE is PLUS_EXPR, CURR->OP is a negate expression or a bitwise not
638 expression, look in OPS for a corresponding positive operation to cancel
639 it out. If we find one, remove the other from OPS, replace
640 OPS[CURRINDEX] with 0 or -1, respectively, and return true. Otherwise,
641 return false. */
643 static bool
644 eliminate_plus_minus_pair (enum tree_code opcode,
645 VEC (operand_entry_t, heap) **ops,
646 unsigned int currindex,
647 operand_entry_t curr)
649 tree negateop;
650 tree notop;
651 unsigned int i;
652 operand_entry_t oe;
654 if (opcode != PLUS_EXPR || TREE_CODE (curr->op) != SSA_NAME)
655 return false;
657 negateop = get_unary_op (curr->op, NEGATE_EXPR);
658 notop = get_unary_op (curr->op, BIT_NOT_EXPR);
659 if (negateop == NULL_TREE && notop == NULL_TREE)
660 return false;
662 /* Any non-negated version will have a rank that is one less than
663 the current rank. So once we hit those ranks, if we don't find
664 one, we can stop. */
666 for (i = currindex + 1;
667 VEC_iterate (operand_entry_t, *ops, i, oe)
668 && oe->rank >= curr->rank - 1 ;
669 i++)
671 if (oe->op == negateop)
674 if (dump_file && (dump_flags & TDF_DETAILS))
676 fprintf (dump_file, "Equivalence: ");
677 print_generic_expr (dump_file, negateop, 0);
678 fprintf (dump_file, " + -");
679 print_generic_expr (dump_file, oe->op, 0);
680 fprintf (dump_file, " -> 0\n");
683 VEC_ordered_remove (operand_entry_t, *ops, i);
684 add_to_ops_vec (ops, build_zero_cst (TREE_TYPE (oe->op)));
685 VEC_ordered_remove (operand_entry_t, *ops, currindex);
686 reassociate_stats.ops_eliminated ++;
688 return true;
690 else if (oe->op == notop)
692 tree op_type = TREE_TYPE (oe->op);
694 if (dump_file && (dump_flags & TDF_DETAILS))
696 fprintf (dump_file, "Equivalence: ");
697 print_generic_expr (dump_file, notop, 0);
698 fprintf (dump_file, " + ~");
699 print_generic_expr (dump_file, oe->op, 0);
700 fprintf (dump_file, " -> -1\n");
703 VEC_ordered_remove (operand_entry_t, *ops, i);
704 add_to_ops_vec (ops, build_int_cst_type (op_type, -1));
705 VEC_ordered_remove (operand_entry_t, *ops, currindex);
706 reassociate_stats.ops_eliminated ++;
708 return true;
712 /* CURR->OP is a negate expr in a plus expr: save it for later
713 inspection in repropagate_negates(). */
714 if (negateop != NULL_TREE)
715 VEC_safe_push (tree, heap, plus_negates, curr->op);
717 return false;
720 /* If OPCODE is BIT_IOR_EXPR, BIT_AND_EXPR, and, CURR->OP is really a
721 bitwise not expression, look in OPS for a corresponding operand to
722 cancel it out. If we find one, remove the other from OPS, replace
723 OPS[CURRINDEX] with 0, and return true. Otherwise, return
724 false. */
726 static bool
727 eliminate_not_pairs (enum tree_code opcode,
728 VEC (operand_entry_t, heap) **ops,
729 unsigned int currindex,
730 operand_entry_t curr)
732 tree notop;
733 unsigned int i;
734 operand_entry_t oe;
736 if ((opcode != BIT_IOR_EXPR && opcode != BIT_AND_EXPR)
737 || TREE_CODE (curr->op) != SSA_NAME)
738 return false;
740 notop = get_unary_op (curr->op, BIT_NOT_EXPR);
741 if (notop == NULL_TREE)
742 return false;
744 /* Any non-not version will have a rank that is one less than
745 the current rank. So once we hit those ranks, if we don't find
746 one, we can stop. */
748 for (i = currindex + 1;
749 VEC_iterate (operand_entry_t, *ops, i, oe)
750 && oe->rank >= curr->rank - 1;
751 i++)
753 if (oe->op == notop)
755 if (dump_file && (dump_flags & TDF_DETAILS))
757 fprintf (dump_file, "Equivalence: ");
758 print_generic_expr (dump_file, notop, 0);
759 if (opcode == BIT_AND_EXPR)
760 fprintf (dump_file, " & ~");
761 else if (opcode == BIT_IOR_EXPR)
762 fprintf (dump_file, " | ~");
763 print_generic_expr (dump_file, oe->op, 0);
764 if (opcode == BIT_AND_EXPR)
765 fprintf (dump_file, " -> 0\n");
766 else if (opcode == BIT_IOR_EXPR)
767 fprintf (dump_file, " -> -1\n");
770 if (opcode == BIT_AND_EXPR)
771 oe->op = build_zero_cst (TREE_TYPE (oe->op));
772 else if (opcode == BIT_IOR_EXPR)
773 oe->op = build_low_bits_mask (TREE_TYPE (oe->op),
774 TYPE_PRECISION (TREE_TYPE (oe->op)));
776 reassociate_stats.ops_eliminated
777 += VEC_length (operand_entry_t, *ops) - 1;
778 VEC_free (operand_entry_t, heap, *ops);
779 *ops = NULL;
780 VEC_safe_push (operand_entry_t, heap, *ops, oe);
781 return true;
785 return false;
788 /* Use constant value that may be present in OPS to try to eliminate
789 operands. Note that this function is only really used when we've
790 eliminated ops for other reasons, or merged constants. Across
791 single statements, fold already does all of this, plus more. There
792 is little point in duplicating logic, so I've only included the
793 identities that I could ever construct testcases to trigger. */
795 static void
796 eliminate_using_constants (enum tree_code opcode,
797 VEC(operand_entry_t, heap) **ops)
799 operand_entry_t oelast = VEC_last (operand_entry_t, *ops);
800 tree type = TREE_TYPE (oelast->op);
802 if (oelast->rank == 0
803 && (INTEGRAL_TYPE_P (type) || FLOAT_TYPE_P (type)))
805 switch (opcode)
807 case BIT_AND_EXPR:
808 if (integer_zerop (oelast->op))
810 if (VEC_length (operand_entry_t, *ops) != 1)
812 if (dump_file && (dump_flags & TDF_DETAILS))
813 fprintf (dump_file, "Found & 0, removing all other ops\n");
815 reassociate_stats.ops_eliminated
816 += VEC_length (operand_entry_t, *ops) - 1;
818 VEC_free (operand_entry_t, heap, *ops);
819 *ops = NULL;
820 VEC_safe_push (operand_entry_t, heap, *ops, oelast);
821 return;
824 else if (integer_all_onesp (oelast->op))
826 if (VEC_length (operand_entry_t, *ops) != 1)
828 if (dump_file && (dump_flags & TDF_DETAILS))
829 fprintf (dump_file, "Found & -1, removing\n");
830 VEC_pop (operand_entry_t, *ops);
831 reassociate_stats.ops_eliminated++;
834 break;
835 case BIT_IOR_EXPR:
836 if (integer_all_onesp (oelast->op))
838 if (VEC_length (operand_entry_t, *ops) != 1)
840 if (dump_file && (dump_flags & TDF_DETAILS))
841 fprintf (dump_file, "Found | -1, removing all other ops\n");
843 reassociate_stats.ops_eliminated
844 += VEC_length (operand_entry_t, *ops) - 1;
846 VEC_free (operand_entry_t, heap, *ops);
847 *ops = NULL;
848 VEC_safe_push (operand_entry_t, heap, *ops, oelast);
849 return;
852 else if (integer_zerop (oelast->op))
854 if (VEC_length (operand_entry_t, *ops) != 1)
856 if (dump_file && (dump_flags & TDF_DETAILS))
857 fprintf (dump_file, "Found | 0, removing\n");
858 VEC_pop (operand_entry_t, *ops);
859 reassociate_stats.ops_eliminated++;
862 break;
863 case MULT_EXPR:
864 if (integer_zerop (oelast->op)
865 || (FLOAT_TYPE_P (type)
866 && !HONOR_NANS (TYPE_MODE (type))
867 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
868 && real_zerop (oelast->op)))
870 if (VEC_length (operand_entry_t, *ops) != 1)
872 if (dump_file && (dump_flags & TDF_DETAILS))
873 fprintf (dump_file, "Found * 0, removing all other ops\n");
875 reassociate_stats.ops_eliminated
876 += VEC_length (operand_entry_t, *ops) - 1;
877 VEC_free (operand_entry_t, heap, *ops);
878 *ops = NULL;
879 VEC_safe_push (operand_entry_t, heap, *ops, oelast);
880 return;
883 else if (integer_onep (oelast->op)
884 || (FLOAT_TYPE_P (type)
885 && !HONOR_SNANS (TYPE_MODE (type))
886 && real_onep (oelast->op)))
888 if (VEC_length (operand_entry_t, *ops) != 1)
890 if (dump_file && (dump_flags & TDF_DETAILS))
891 fprintf (dump_file, "Found * 1, removing\n");
892 VEC_pop (operand_entry_t, *ops);
893 reassociate_stats.ops_eliminated++;
894 return;
897 break;
898 case BIT_XOR_EXPR:
899 case PLUS_EXPR:
900 case MINUS_EXPR:
901 if (integer_zerop (oelast->op)
902 || (FLOAT_TYPE_P (type)
903 && (opcode == PLUS_EXPR || opcode == MINUS_EXPR)
904 && fold_real_zero_addition_p (type, oelast->op,
905 opcode == MINUS_EXPR)))
907 if (VEC_length (operand_entry_t, *ops) != 1)
909 if (dump_file && (dump_flags & TDF_DETAILS))
910 fprintf (dump_file, "Found [|^+] 0, removing\n");
911 VEC_pop (operand_entry_t, *ops);
912 reassociate_stats.ops_eliminated++;
913 return;
916 break;
917 default:
918 break;
924 static void linearize_expr_tree (VEC(operand_entry_t, heap) **, gimple,
925 bool, bool);
927 /* Structure for tracking and counting operands. */
928 typedef struct oecount_s {
929 int cnt;
930 int id;
931 enum tree_code oecode;
932 tree op;
933 } oecount;
935 DEF_VEC_O(oecount);
936 DEF_VEC_ALLOC_O(oecount,heap);
938 /* The heap for the oecount hashtable and the sorted list of operands. */
939 static VEC (oecount, heap) *cvec;
941 /* Hash function for oecount. */
943 static hashval_t
944 oecount_hash (const void *p)
946 const oecount *c = VEC_index (oecount, cvec, (size_t)p - 42);
947 return htab_hash_pointer (c->op) ^ (hashval_t)c->oecode;
950 /* Comparison function for oecount. */
952 static int
953 oecount_eq (const void *p1, const void *p2)
955 const oecount *c1 = VEC_index (oecount, cvec, (size_t)p1 - 42);
956 const oecount *c2 = VEC_index (oecount, cvec, (size_t)p2 - 42);
957 return (c1->oecode == c2->oecode
958 && c1->op == c2->op);
961 /* Comparison function for qsort sorting oecount elements by count. */
963 static int
964 oecount_cmp (const void *p1, const void *p2)
966 const oecount *c1 = (const oecount *)p1;
967 const oecount *c2 = (const oecount *)p2;
968 if (c1->cnt != c2->cnt)
969 return c1->cnt - c2->cnt;
970 else
971 /* If counts are identical, use unique IDs to stabilize qsort. */
972 return c1->id - c2->id;
975 /* Walks the linear chain with result *DEF searching for an operation
976 with operand OP and code OPCODE removing that from the chain. *DEF
977 is updated if there is only one operand but no operation left. */
979 static void
980 zero_one_operation (tree *def, enum tree_code opcode, tree op)
982 gimple stmt = SSA_NAME_DEF_STMT (*def);
986 tree name = gimple_assign_rhs1 (stmt);
988 /* If this is the operation we look for and one of the operands
989 is ours simply propagate the other operand into the stmts
990 single use. */
991 if (gimple_assign_rhs_code (stmt) == opcode
992 && (name == op
993 || gimple_assign_rhs2 (stmt) == op))
995 gimple use_stmt;
996 use_operand_p use;
997 gimple_stmt_iterator gsi;
998 if (name == op)
999 name = gimple_assign_rhs2 (stmt);
1000 gcc_assert (has_single_use (gimple_assign_lhs (stmt)));
1001 single_imm_use (gimple_assign_lhs (stmt), &use, &use_stmt);
1002 if (gimple_assign_lhs (stmt) == *def)
1003 *def = name;
1004 SET_USE (use, name);
1005 if (TREE_CODE (name) != SSA_NAME)
1006 update_stmt (use_stmt);
1007 gsi = gsi_for_stmt (stmt);
1008 gsi_remove (&gsi, true);
1009 release_defs (stmt);
1010 return;
1013 /* Continue walking the chain. */
1014 gcc_assert (name != op
1015 && TREE_CODE (name) == SSA_NAME);
1016 stmt = SSA_NAME_DEF_STMT (name);
1018 while (1);
1021 /* Builds one statement performing OP1 OPCODE OP2 using TMPVAR for
1022 the result. Places the statement after the definition of either
1023 OP1 or OP2. Returns the new statement. */
1025 static gimple
1026 build_and_add_sum (tree tmpvar, tree op1, tree op2, enum tree_code opcode)
1028 gimple op1def = NULL, op2def = NULL;
1029 gimple_stmt_iterator gsi;
1030 tree op;
1031 gimple sum;
1033 /* Create the addition statement. */
1034 sum = gimple_build_assign_with_ops (opcode, tmpvar, op1, op2);
1035 op = make_ssa_name (tmpvar, sum);
1036 gimple_assign_set_lhs (sum, op);
1038 /* Find an insertion place and insert. */
1039 if (TREE_CODE (op1) == SSA_NAME)
1040 op1def = SSA_NAME_DEF_STMT (op1);
1041 if (TREE_CODE (op2) == SSA_NAME)
1042 op2def = SSA_NAME_DEF_STMT (op2);
1043 if ((!op1def || gimple_nop_p (op1def))
1044 && (!op2def || gimple_nop_p (op2def)))
1046 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
1047 gsi_insert_before (&gsi, sum, GSI_NEW_STMT);
1049 else if ((!op1def || gimple_nop_p (op1def))
1050 || (op2def && !gimple_nop_p (op2def)
1051 && stmt_dominates_stmt_p (op1def, op2def)))
1053 if (gimple_code (op2def) == GIMPLE_PHI)
1055 gsi = gsi_after_labels (gimple_bb (op2def));
1056 gsi_insert_before (&gsi, sum, GSI_NEW_STMT);
1058 else
1060 if (!stmt_ends_bb_p (op2def))
1062 gsi = gsi_for_stmt (op2def);
1063 gsi_insert_after (&gsi, sum, GSI_NEW_STMT);
1065 else
1067 edge e;
1068 edge_iterator ei;
1070 FOR_EACH_EDGE (e, ei, gimple_bb (op2def)->succs)
1071 if (e->flags & EDGE_FALLTHRU)
1072 gsi_insert_on_edge_immediate (e, sum);
1076 else
1078 if (gimple_code (op1def) == GIMPLE_PHI)
1080 gsi = gsi_after_labels (gimple_bb (op1def));
1081 gsi_insert_before (&gsi, sum, GSI_NEW_STMT);
1083 else
1085 if (!stmt_ends_bb_p (op1def))
1087 gsi = gsi_for_stmt (op1def);
1088 gsi_insert_after (&gsi, sum, GSI_NEW_STMT);
1090 else
1092 edge e;
1093 edge_iterator ei;
1095 FOR_EACH_EDGE (e, ei, gimple_bb (op1def)->succs)
1096 if (e->flags & EDGE_FALLTHRU)
1097 gsi_insert_on_edge_immediate (e, sum);
1101 update_stmt (sum);
1103 return sum;
1106 /* Perform un-distribution of divisions and multiplications.
1107 A * X + B * X is transformed into (A + B) * X and A / X + B / X
1108 to (A + B) / X for real X.
1110 The algorithm is organized as follows.
1112 - First we walk the addition chain *OPS looking for summands that
1113 are defined by a multiplication or a real division. This results
1114 in the candidates bitmap with relevant indices into *OPS.
1116 - Second we build the chains of multiplications or divisions for
1117 these candidates, counting the number of occurences of (operand, code)
1118 pairs in all of the candidates chains.
1120 - Third we sort the (operand, code) pairs by number of occurence and
1121 process them starting with the pair with the most uses.
1123 * For each such pair we walk the candidates again to build a
1124 second candidate bitmap noting all multiplication/division chains
1125 that have at least one occurence of (operand, code).
1127 * We build an alternate addition chain only covering these
1128 candidates with one (operand, code) operation removed from their
1129 multiplication/division chain.
1131 * The first candidate gets replaced by the alternate addition chain
1132 multiplied/divided by the operand.
1134 * All candidate chains get disabled for further processing and
1135 processing of (operand, code) pairs continues.
1137 The alternate addition chains built are re-processed by the main
1138 reassociation algorithm which allows optimizing a * x * y + b * y * x
1139 to (a + b ) * x * y in one invocation of the reassociation pass. */
1141 static bool
1142 undistribute_ops_list (enum tree_code opcode,
1143 VEC (operand_entry_t, heap) **ops, struct loop *loop)
1145 unsigned int length = VEC_length (operand_entry_t, *ops);
1146 operand_entry_t oe1;
1147 unsigned i, j;
1148 sbitmap candidates, candidates2;
1149 unsigned nr_candidates, nr_candidates2;
1150 sbitmap_iterator sbi0;
1151 VEC (operand_entry_t, heap) **subops;
1152 htab_t ctable;
1153 bool changed = false;
1154 int next_oecount_id = 0;
1156 if (length <= 1
1157 || opcode != PLUS_EXPR)
1158 return false;
1160 /* Build a list of candidates to process. */
1161 candidates = sbitmap_alloc (length);
1162 sbitmap_zero (candidates);
1163 nr_candidates = 0;
1164 FOR_EACH_VEC_ELT (operand_entry_t, *ops, i, oe1)
1166 enum tree_code dcode;
1167 gimple oe1def;
1169 if (TREE_CODE (oe1->op) != SSA_NAME)
1170 continue;
1171 oe1def = SSA_NAME_DEF_STMT (oe1->op);
1172 if (!is_gimple_assign (oe1def))
1173 continue;
1174 dcode = gimple_assign_rhs_code (oe1def);
1175 if ((dcode != MULT_EXPR
1176 && dcode != RDIV_EXPR)
1177 || !is_reassociable_op (oe1def, dcode, loop))
1178 continue;
1180 SET_BIT (candidates, i);
1181 nr_candidates++;
1184 if (nr_candidates < 2)
1186 sbitmap_free (candidates);
1187 return false;
1190 if (dump_file && (dump_flags & TDF_DETAILS))
1192 fprintf (dump_file, "searching for un-distribute opportunities ");
1193 print_generic_expr (dump_file,
1194 VEC_index (operand_entry_t, *ops,
1195 sbitmap_first_set_bit (candidates))->op, 0);
1196 fprintf (dump_file, " %d\n", nr_candidates);
1199 /* Build linearized sub-operand lists and the counting table. */
1200 cvec = NULL;
1201 ctable = htab_create (15, oecount_hash, oecount_eq, NULL);
1202 subops = XCNEWVEC (VEC (operand_entry_t, heap) *,
1203 VEC_length (operand_entry_t, *ops));
1204 EXECUTE_IF_SET_IN_SBITMAP (candidates, 0, i, sbi0)
1206 gimple oedef;
1207 enum tree_code oecode;
1208 unsigned j;
1210 oedef = SSA_NAME_DEF_STMT (VEC_index (operand_entry_t, *ops, i)->op);
1211 oecode = gimple_assign_rhs_code (oedef);
1212 linearize_expr_tree (&subops[i], oedef,
1213 associative_tree_code (oecode), false);
1215 FOR_EACH_VEC_ELT (operand_entry_t, subops[i], j, oe1)
1217 oecount c;
1218 void **slot;
1219 size_t idx;
1220 c.oecode = oecode;
1221 c.cnt = 1;
1222 c.id = next_oecount_id++;
1223 c.op = oe1->op;
1224 VEC_safe_push (oecount, heap, cvec, &c);
1225 idx = VEC_length (oecount, cvec) + 41;
1226 slot = htab_find_slot (ctable, (void *)idx, INSERT);
1227 if (!*slot)
1229 *slot = (void *)idx;
1231 else
1233 VEC_pop (oecount, cvec);
1234 VEC_index (oecount, cvec, (size_t)*slot - 42)->cnt++;
1238 htab_delete (ctable);
1240 /* Sort the counting table. */
1241 VEC_qsort (oecount, cvec, oecount_cmp);
1243 if (dump_file && (dump_flags & TDF_DETAILS))
1245 oecount *c;
1246 fprintf (dump_file, "Candidates:\n");
1247 FOR_EACH_VEC_ELT (oecount, cvec, j, c)
1249 fprintf (dump_file, " %u %s: ", c->cnt,
1250 c->oecode == MULT_EXPR
1251 ? "*" : c->oecode == RDIV_EXPR ? "/" : "?");
1252 print_generic_expr (dump_file, c->op, 0);
1253 fprintf (dump_file, "\n");
1257 /* Process the (operand, code) pairs in order of most occurence. */
1258 candidates2 = sbitmap_alloc (length);
1259 while (!VEC_empty (oecount, cvec))
1261 oecount *c = VEC_last (oecount, cvec);
1262 if (c->cnt < 2)
1263 break;
1265 /* Now collect the operands in the outer chain that contain
1266 the common operand in their inner chain. */
1267 sbitmap_zero (candidates2);
1268 nr_candidates2 = 0;
1269 EXECUTE_IF_SET_IN_SBITMAP (candidates, 0, i, sbi0)
1271 gimple oedef;
1272 enum tree_code oecode;
1273 unsigned j;
1274 tree op = VEC_index (operand_entry_t, *ops, i)->op;
1276 /* If we undistributed in this chain already this may be
1277 a constant. */
1278 if (TREE_CODE (op) != SSA_NAME)
1279 continue;
1281 oedef = SSA_NAME_DEF_STMT (op);
1282 oecode = gimple_assign_rhs_code (oedef);
1283 if (oecode != c->oecode)
1284 continue;
1286 FOR_EACH_VEC_ELT (operand_entry_t, subops[i], j, oe1)
1288 if (oe1->op == c->op)
1290 SET_BIT (candidates2, i);
1291 ++nr_candidates2;
1292 break;
1297 if (nr_candidates2 >= 2)
1299 operand_entry_t oe1, oe2;
1300 tree tmpvar;
1301 gimple prod;
1302 int first = sbitmap_first_set_bit (candidates2);
1304 /* Build the new addition chain. */
1305 oe1 = VEC_index (operand_entry_t, *ops, first);
1306 if (dump_file && (dump_flags & TDF_DETAILS))
1308 fprintf (dump_file, "Building (");
1309 print_generic_expr (dump_file, oe1->op, 0);
1311 tmpvar = create_tmp_reg (TREE_TYPE (oe1->op), NULL);
1312 add_referenced_var (tmpvar);
1313 zero_one_operation (&oe1->op, c->oecode, c->op);
1314 EXECUTE_IF_SET_IN_SBITMAP (candidates2, first+1, i, sbi0)
1316 gimple sum;
1317 oe2 = VEC_index (operand_entry_t, *ops, i);
1318 if (dump_file && (dump_flags & TDF_DETAILS))
1320 fprintf (dump_file, " + ");
1321 print_generic_expr (dump_file, oe2->op, 0);
1323 zero_one_operation (&oe2->op, c->oecode, c->op);
1324 sum = build_and_add_sum (tmpvar, oe1->op, oe2->op, opcode);
1325 oe2->op = build_zero_cst (TREE_TYPE (oe2->op));
1326 oe2->rank = 0;
1327 oe1->op = gimple_get_lhs (sum);
1330 /* Apply the multiplication/division. */
1331 prod = build_and_add_sum (tmpvar, oe1->op, c->op, c->oecode);
1332 if (dump_file && (dump_flags & TDF_DETAILS))
1334 fprintf (dump_file, ") %s ", c->oecode == MULT_EXPR ? "*" : "/");
1335 print_generic_expr (dump_file, c->op, 0);
1336 fprintf (dump_file, "\n");
1339 /* Record it in the addition chain and disable further
1340 undistribution with this op. */
1341 oe1->op = gimple_assign_lhs (prod);
1342 oe1->rank = get_rank (oe1->op);
1343 VEC_free (operand_entry_t, heap, subops[first]);
1345 changed = true;
1348 VEC_pop (oecount, cvec);
1351 for (i = 0; i < VEC_length (operand_entry_t, *ops); ++i)
1352 VEC_free (operand_entry_t, heap, subops[i]);
1353 free (subops);
1354 VEC_free (oecount, heap, cvec);
1355 sbitmap_free (candidates);
1356 sbitmap_free (candidates2);
1358 return changed;
1361 /* If OPCODE is BIT_IOR_EXPR or BIT_AND_EXPR and CURR is a comparison
1362 expression, examine the other OPS to see if any of them are comparisons
1363 of the same values, which we may be able to combine or eliminate.
1364 For example, we can rewrite (a < b) | (a == b) as (a <= b). */
1366 static bool
1367 eliminate_redundant_comparison (enum tree_code opcode,
1368 VEC (operand_entry_t, heap) **ops,
1369 unsigned int currindex,
1370 operand_entry_t curr)
1372 tree op1, op2;
1373 enum tree_code lcode, rcode;
1374 gimple def1, def2;
1375 int i;
1376 operand_entry_t oe;
1378 if (opcode != BIT_IOR_EXPR && opcode != BIT_AND_EXPR)
1379 return false;
1381 /* Check that CURR is a comparison. */
1382 if (TREE_CODE (curr->op) != SSA_NAME)
1383 return false;
1384 def1 = SSA_NAME_DEF_STMT (curr->op);
1385 if (!is_gimple_assign (def1))
1386 return false;
1387 lcode = gimple_assign_rhs_code (def1);
1388 if (TREE_CODE_CLASS (lcode) != tcc_comparison)
1389 return false;
1390 op1 = gimple_assign_rhs1 (def1);
1391 op2 = gimple_assign_rhs2 (def1);
1393 /* Now look for a similar comparison in the remaining OPS. */
1394 for (i = currindex + 1;
1395 VEC_iterate (operand_entry_t, *ops, i, oe);
1396 i++)
1398 tree t;
1400 if (TREE_CODE (oe->op) != SSA_NAME)
1401 continue;
1402 def2 = SSA_NAME_DEF_STMT (oe->op);
1403 if (!is_gimple_assign (def2))
1404 continue;
1405 rcode = gimple_assign_rhs_code (def2);
1406 if (TREE_CODE_CLASS (rcode) != tcc_comparison)
1407 continue;
1409 /* If we got here, we have a match. See if we can combine the
1410 two comparisons. */
1411 if (opcode == BIT_IOR_EXPR)
1412 t = maybe_fold_or_comparisons (lcode, op1, op2,
1413 rcode, gimple_assign_rhs1 (def2),
1414 gimple_assign_rhs2 (def2));
1415 else
1416 t = maybe_fold_and_comparisons (lcode, op1, op2,
1417 rcode, gimple_assign_rhs1 (def2),
1418 gimple_assign_rhs2 (def2));
1419 if (!t)
1420 continue;
1422 /* maybe_fold_and_comparisons and maybe_fold_or_comparisons
1423 always give us a boolean_type_node value back. If the original
1424 BIT_AND_EXPR or BIT_IOR_EXPR was of a wider integer type,
1425 we need to convert. */
1426 if (!useless_type_conversion_p (TREE_TYPE (curr->op), TREE_TYPE (t)))
1427 t = fold_convert (TREE_TYPE (curr->op), t);
1429 if (TREE_CODE (t) != INTEGER_CST
1430 && !operand_equal_p (t, curr->op, 0))
1432 enum tree_code subcode;
1433 tree newop1, newop2;
1434 if (!COMPARISON_CLASS_P (t))
1435 continue;
1436 extract_ops_from_tree (t, &subcode, &newop1, &newop2);
1437 STRIP_USELESS_TYPE_CONVERSION (newop1);
1438 STRIP_USELESS_TYPE_CONVERSION (newop2);
1439 if (!is_gimple_val (newop1) || !is_gimple_val (newop2))
1440 continue;
1443 if (dump_file && (dump_flags & TDF_DETAILS))
1445 fprintf (dump_file, "Equivalence: ");
1446 print_generic_expr (dump_file, curr->op, 0);
1447 fprintf (dump_file, " %s ", op_symbol_code (opcode));
1448 print_generic_expr (dump_file, oe->op, 0);
1449 fprintf (dump_file, " -> ");
1450 print_generic_expr (dump_file, t, 0);
1451 fprintf (dump_file, "\n");
1454 /* Now we can delete oe, as it has been subsumed by the new combined
1455 expression t. */
1456 VEC_ordered_remove (operand_entry_t, *ops, i);
1457 reassociate_stats.ops_eliminated ++;
1459 /* If t is the same as curr->op, we're done. Otherwise we must
1460 replace curr->op with t. Special case is if we got a constant
1461 back, in which case we add it to the end instead of in place of
1462 the current entry. */
1463 if (TREE_CODE (t) == INTEGER_CST)
1465 VEC_ordered_remove (operand_entry_t, *ops, currindex);
1466 add_to_ops_vec (ops, t);
1468 else if (!operand_equal_p (t, curr->op, 0))
1470 tree tmpvar;
1471 gimple sum;
1472 enum tree_code subcode;
1473 tree newop1;
1474 tree newop2;
1475 gcc_assert (COMPARISON_CLASS_P (t));
1476 tmpvar = create_tmp_var (TREE_TYPE (t), NULL);
1477 add_referenced_var (tmpvar);
1478 extract_ops_from_tree (t, &subcode, &newop1, &newop2);
1479 STRIP_USELESS_TYPE_CONVERSION (newop1);
1480 STRIP_USELESS_TYPE_CONVERSION (newop2);
1481 gcc_checking_assert (is_gimple_val (newop1)
1482 && is_gimple_val (newop2));
1483 sum = build_and_add_sum (tmpvar, newop1, newop2, subcode);
1484 curr->op = gimple_get_lhs (sum);
1486 return true;
1489 return false;
1492 /* Perform various identities and other optimizations on the list of
1493 operand entries, stored in OPS. The tree code for the binary
1494 operation between all the operands is OPCODE. */
1496 static void
1497 optimize_ops_list (enum tree_code opcode,
1498 VEC (operand_entry_t, heap) **ops)
1500 unsigned int length = VEC_length (operand_entry_t, *ops);
1501 unsigned int i;
1502 operand_entry_t oe;
1503 operand_entry_t oelast = NULL;
1504 bool iterate = false;
1506 if (length == 1)
1507 return;
1509 oelast = VEC_last (operand_entry_t, *ops);
1511 /* If the last two are constants, pop the constants off, merge them
1512 and try the next two. */
1513 if (oelast->rank == 0 && is_gimple_min_invariant (oelast->op))
1515 operand_entry_t oelm1 = VEC_index (operand_entry_t, *ops, length - 2);
1517 if (oelm1->rank == 0
1518 && is_gimple_min_invariant (oelm1->op)
1519 && useless_type_conversion_p (TREE_TYPE (oelm1->op),
1520 TREE_TYPE (oelast->op)))
1522 tree folded = fold_binary (opcode, TREE_TYPE (oelm1->op),
1523 oelm1->op, oelast->op);
1525 if (folded && is_gimple_min_invariant (folded))
1527 if (dump_file && (dump_flags & TDF_DETAILS))
1528 fprintf (dump_file, "Merging constants\n");
1530 VEC_pop (operand_entry_t, *ops);
1531 VEC_pop (operand_entry_t, *ops);
1533 add_to_ops_vec (ops, folded);
1534 reassociate_stats.constants_eliminated++;
1536 optimize_ops_list (opcode, ops);
1537 return;
1542 eliminate_using_constants (opcode, ops);
1543 oelast = NULL;
1545 for (i = 0; VEC_iterate (operand_entry_t, *ops, i, oe);)
1547 bool done = false;
1549 if (eliminate_not_pairs (opcode, ops, i, oe))
1550 return;
1551 if (eliminate_duplicate_pair (opcode, ops, &done, i, oe, oelast)
1552 || (!done && eliminate_plus_minus_pair (opcode, ops, i, oe))
1553 || (!done && eliminate_redundant_comparison (opcode, ops, i, oe)))
1555 if (done)
1556 return;
1557 iterate = true;
1558 oelast = NULL;
1559 continue;
1561 oelast = oe;
1562 i++;
1565 length = VEC_length (operand_entry_t, *ops);
1566 oelast = VEC_last (operand_entry_t, *ops);
1568 if (iterate)
1569 optimize_ops_list (opcode, ops);
1572 /* The following functions are subroutines to optimize_range_tests and allow
1573 it to try to change a logical combination of comparisons into a range
1574 test.
1576 For example, both
1577 X == 2 || X == 5 || X == 3 || X == 4
1579 X >= 2 && X <= 5
1580 are converted to
1581 (unsigned) (X - 2) <= 3
1583 For more information see comments above fold_test_range in fold-const.c,
1584 this implementation is for GIMPLE. */
1586 struct range_entry
1588 tree exp;
1589 tree low;
1590 tree high;
1591 bool in_p;
1592 bool strict_overflow_p;
1593 unsigned int idx, next;
1596 /* This is similar to make_range in fold-const.c, but on top of
1597 GIMPLE instead of trees. */
1599 static void
1600 init_range_entry (struct range_entry *r, tree exp)
1602 int in_p;
1603 tree low, high;
1604 bool is_bool, strict_overflow_p;
1606 r->exp = NULL_TREE;
1607 r->in_p = false;
1608 r->strict_overflow_p = false;
1609 r->low = NULL_TREE;
1610 r->high = NULL_TREE;
1611 if (TREE_CODE (exp) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (exp)))
1612 return;
1614 /* Start with simply saying "EXP != 0" and then look at the code of EXP
1615 and see if we can refine the range. Some of the cases below may not
1616 happen, but it doesn't seem worth worrying about this. We "continue"
1617 the outer loop when we've changed something; otherwise we "break"
1618 the switch, which will "break" the while. */
1619 low = build_int_cst (TREE_TYPE (exp), 0);
1620 high = low;
1621 in_p = 0;
1622 strict_overflow_p = false;
1623 is_bool = false;
1624 if (TYPE_PRECISION (TREE_TYPE (exp)) == 1)
1626 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
1627 is_bool = true;
1628 else
1629 return;
1631 else if (TREE_CODE (TREE_TYPE (exp)) == BOOLEAN_TYPE)
1632 is_bool = true;
1634 while (1)
1636 gimple stmt;
1637 enum tree_code code;
1638 tree arg0, arg1, exp_type;
1639 tree nexp;
1640 location_t loc;
1642 if (TREE_CODE (exp) != SSA_NAME)
1643 break;
1645 stmt = SSA_NAME_DEF_STMT (exp);
1646 if (!is_gimple_assign (stmt))
1647 break;
1649 code = gimple_assign_rhs_code (stmt);
1650 arg0 = gimple_assign_rhs1 (stmt);
1651 if (TREE_CODE (arg0) != SSA_NAME)
1652 break;
1653 arg1 = gimple_assign_rhs2 (stmt);
1654 exp_type = TREE_TYPE (exp);
1655 loc = gimple_location (stmt);
1656 switch (code)
1658 case BIT_NOT_EXPR:
1659 if (TREE_CODE (TREE_TYPE (exp)) == BOOLEAN_TYPE)
1661 in_p = !in_p;
1662 exp = arg0;
1663 continue;
1665 break;
1666 case SSA_NAME:
1667 exp = arg0;
1668 continue;
1669 CASE_CONVERT:
1670 if (is_bool)
1671 goto do_default;
1672 if (TYPE_PRECISION (TREE_TYPE (arg0)) == 1)
1674 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
1675 is_bool = true;
1676 else
1677 return;
1679 else if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE)
1680 is_bool = true;
1681 goto do_default;
1682 case EQ_EXPR:
1683 case NE_EXPR:
1684 case LT_EXPR:
1685 case LE_EXPR:
1686 case GE_EXPR:
1687 case GT_EXPR:
1688 is_bool = true;
1689 /* FALLTHRU */
1690 default:
1691 if (!is_bool)
1692 return;
1693 do_default:
1694 nexp = make_range_step (loc, code, arg0, arg1, exp_type,
1695 &low, &high, &in_p,
1696 &strict_overflow_p);
1697 if (nexp != NULL_TREE)
1699 exp = nexp;
1700 gcc_assert (TREE_CODE (exp) == SSA_NAME);
1701 continue;
1703 break;
1705 break;
1707 if (is_bool)
1709 r->exp = exp;
1710 r->in_p = in_p;
1711 r->low = low;
1712 r->high = high;
1713 r->strict_overflow_p = strict_overflow_p;
1717 /* Comparison function for qsort. Sort entries
1718 without SSA_NAME exp first, then with SSA_NAMEs sorted
1719 by increasing SSA_NAME_VERSION, and for the same SSA_NAMEs
1720 by increasing ->low and if ->low is the same, by increasing
1721 ->high. ->low == NULL_TREE means minimum, ->high == NULL_TREE
1722 maximum. */
1724 static int
1725 range_entry_cmp (const void *a, const void *b)
1727 const struct range_entry *p = (const struct range_entry *) a;
1728 const struct range_entry *q = (const struct range_entry *) b;
1730 if (p->exp != NULL_TREE && TREE_CODE (p->exp) == SSA_NAME)
1732 if (q->exp != NULL_TREE && TREE_CODE (q->exp) == SSA_NAME)
1734 /* Group range_entries for the same SSA_NAME together. */
1735 if (SSA_NAME_VERSION (p->exp) < SSA_NAME_VERSION (q->exp))
1736 return -1;
1737 else if (SSA_NAME_VERSION (p->exp) > SSA_NAME_VERSION (q->exp))
1738 return 1;
1739 /* If ->low is different, NULL low goes first, then by
1740 ascending low. */
1741 if (p->low != NULL_TREE)
1743 if (q->low != NULL_TREE)
1745 tree tem = fold_binary (LT_EXPR, boolean_type_node,
1746 p->low, q->low);
1747 if (tem && integer_onep (tem))
1748 return -1;
1749 tem = fold_binary (GT_EXPR, boolean_type_node,
1750 p->low, q->low);
1751 if (tem && integer_onep (tem))
1752 return 1;
1754 else
1755 return 1;
1757 else if (q->low != NULL_TREE)
1758 return -1;
1759 /* If ->high is different, NULL high goes last, before that by
1760 ascending high. */
1761 if (p->high != NULL_TREE)
1763 if (q->high != NULL_TREE)
1765 tree tem = fold_binary (LT_EXPR, boolean_type_node,
1766 p->high, q->high);
1767 if (tem && integer_onep (tem))
1768 return -1;
1769 tem = fold_binary (GT_EXPR, boolean_type_node,
1770 p->high, q->high);
1771 if (tem && integer_onep (tem))
1772 return 1;
1774 else
1775 return -1;
1777 else if (p->high != NULL_TREE)
1778 return 1;
1779 /* If both ranges are the same, sort below by ascending idx. */
1781 else
1782 return 1;
1784 else if (q->exp != NULL_TREE && TREE_CODE (q->exp) == SSA_NAME)
1785 return -1;
1787 if (p->idx < q->idx)
1788 return -1;
1789 else
1791 gcc_checking_assert (p->idx > q->idx);
1792 return 1;
1796 /* Helper routine of optimize_range_test.
1797 [EXP, IN_P, LOW, HIGH, STRICT_OVERFLOW_P] is a merged range for
1798 RANGE and OTHERRANGE through OTHERRANGE + COUNT - 1 ranges,
1799 OPCODE and OPS are arguments of optimize_range_tests. Return
1800 true if the range merge has been successful. */
1802 static bool
1803 update_range_test (struct range_entry *range, struct range_entry *otherrange,
1804 unsigned int count, enum tree_code opcode,
1805 VEC (operand_entry_t, heap) **ops, tree exp, bool in_p,
1806 tree low, tree high, bool strict_overflow_p)
1808 tree op = VEC_index (operand_entry_t, *ops, range->idx)->op;
1809 location_t loc = gimple_location (SSA_NAME_DEF_STMT (op));
1810 tree tem = build_range_check (loc, TREE_TYPE (op), exp, in_p, low, high);
1811 enum warn_strict_overflow_code wc = WARN_STRICT_OVERFLOW_COMPARISON;
1812 gimple_stmt_iterator gsi;
1814 if (tem == NULL_TREE)
1815 return false;
1817 if (strict_overflow_p && issue_strict_overflow_warning (wc))
1818 warning_at (loc, OPT_Wstrict_overflow,
1819 "assuming signed overflow does not occur "
1820 "when simplifying range test");
1822 if (dump_file && (dump_flags & TDF_DETAILS))
1824 struct range_entry *r;
1825 fprintf (dump_file, "Optimizing range tests ");
1826 print_generic_expr (dump_file, range->exp, 0);
1827 fprintf (dump_file, " %c[", range->in_p ? '+' : '-');
1828 print_generic_expr (dump_file, range->low, 0);
1829 fprintf (dump_file, ", ");
1830 print_generic_expr (dump_file, range->high, 0);
1831 fprintf (dump_file, "]");
1832 for (r = otherrange; r < otherrange + count; r++)
1834 fprintf (dump_file, " and %c[", r->in_p ? '+' : '-');
1835 print_generic_expr (dump_file, r->low, 0);
1836 fprintf (dump_file, ", ");
1837 print_generic_expr (dump_file, r->high, 0);
1838 fprintf (dump_file, "]");
1840 fprintf (dump_file, "\n into ");
1841 print_generic_expr (dump_file, tem, 0);
1842 fprintf (dump_file, "\n");
1845 if (opcode == BIT_IOR_EXPR)
1846 tem = invert_truthvalue_loc (loc, tem);
1848 tem = fold_convert_loc (loc, TREE_TYPE (op), tem);
1849 gsi = gsi_for_stmt (SSA_NAME_DEF_STMT (op));
1850 tem = force_gimple_operand_gsi (&gsi, tem, true, NULL_TREE, true,
1851 GSI_SAME_STMT);
1853 VEC_index (operand_entry_t, *ops, range->idx)->op = tem;
1854 range->exp = exp;
1855 range->low = low;
1856 range->high = high;
1857 range->in_p = in_p;
1858 range->strict_overflow_p = false;
1860 for (range = otherrange; range < otherrange + count; range++)
1862 VEC_index (operand_entry_t, *ops, range->idx)->op = error_mark_node;
1863 range->exp = NULL_TREE;
1865 return true;
1868 /* Optimize range tests, similarly how fold_range_test optimizes
1869 it on trees. The tree code for the binary
1870 operation between all the operands is OPCODE. */
1872 static void
1873 optimize_range_tests (enum tree_code opcode,
1874 VEC (operand_entry_t, heap) **ops)
1876 unsigned int length = VEC_length (operand_entry_t, *ops), i, j, first;
1877 operand_entry_t oe;
1878 struct range_entry *ranges;
1879 bool any_changes = false;
1881 if (length == 1)
1882 return;
1884 ranges = XNEWVEC (struct range_entry, length);
1885 for (i = 0; i < length; i++)
1887 ranges[i].idx = i;
1888 init_range_entry (ranges + i, VEC_index (operand_entry_t, *ops, i)->op);
1889 /* For | invert it now, we will invert it again before emitting
1890 the optimized expression. */
1891 if (opcode == BIT_IOR_EXPR)
1892 ranges[i].in_p = !ranges[i].in_p;
1895 qsort (ranges, length, sizeof (*ranges), range_entry_cmp);
1896 for (i = 0; i < length; i++)
1897 if (ranges[i].exp != NULL_TREE && TREE_CODE (ranges[i].exp) == SSA_NAME)
1898 break;
1900 /* Try to merge ranges. */
1901 for (first = i; i < length; i++)
1903 tree low = ranges[i].low;
1904 tree high = ranges[i].high;
1905 int in_p = ranges[i].in_p;
1906 bool strict_overflow_p = ranges[i].strict_overflow_p;
1907 int update_fail_count = 0;
1909 for (j = i + 1; j < length; j++)
1911 if (ranges[i].exp != ranges[j].exp)
1912 break;
1913 if (!merge_ranges (&in_p, &low, &high, in_p, low, high,
1914 ranges[j].in_p, ranges[j].low, ranges[j].high))
1915 break;
1916 strict_overflow_p |= ranges[j].strict_overflow_p;
1919 if (j == i + 1)
1920 continue;
1922 if (update_range_test (ranges + i, ranges + i + 1, j - i - 1, opcode,
1923 ops, ranges[i].exp, in_p, low, high,
1924 strict_overflow_p))
1926 i = j - 1;
1927 any_changes = true;
1929 /* Avoid quadratic complexity if all merge_ranges calls would succeed,
1930 while update_range_test would fail. */
1931 else if (update_fail_count == 64)
1932 i = j - 1;
1933 else
1934 ++update_fail_count;
1937 /* Optimize X == CST1 || X == CST2
1938 if popcount (CST1 ^ CST2) == 1 into
1939 (X & ~(CST1 ^ CST2)) == (CST1 & ~(CST1 ^ CST2)).
1940 Similarly for ranges. E.g.
1941 X != 2 && X != 3 && X != 10 && X != 11
1942 will be transformed by the above loop into
1943 (X - 2U) <= 1U && (X - 10U) <= 1U
1944 and this loop can transform that into
1945 ((X & ~8) - 2U) <= 1U. */
1946 for (i = first; i < length; i++)
1948 tree lowi, highi, lowj, highj, type, lowxor, highxor, tem, exp;
1950 if (ranges[i].exp == NULL_TREE || ranges[i].in_p)
1951 continue;
1952 type = TREE_TYPE (ranges[i].exp);
1953 if (!INTEGRAL_TYPE_P (type))
1954 continue;
1955 lowi = ranges[i].low;
1956 if (lowi == NULL_TREE)
1957 lowi = TYPE_MIN_VALUE (type);
1958 highi = ranges[i].high;
1959 if (highi == NULL_TREE)
1960 continue;
1961 for (j = i + 1; j < length && j < i + 64; j++)
1963 if (ranges[j].exp == NULL_TREE)
1964 continue;
1965 if (ranges[i].exp != ranges[j].exp)
1966 break;
1967 if (ranges[j].in_p)
1968 continue;
1969 lowj = ranges[j].low;
1970 if (lowj == NULL_TREE)
1971 continue;
1972 highj = ranges[j].high;
1973 if (highj == NULL_TREE)
1974 highj = TYPE_MAX_VALUE (type);
1975 tem = fold_binary (GT_EXPR, boolean_type_node,
1976 lowj, highi);
1977 if (tem == NULL_TREE || !integer_onep (tem))
1978 continue;
1979 lowxor = fold_binary (BIT_XOR_EXPR, type, lowi, lowj);
1980 if (lowxor == NULL_TREE || TREE_CODE (lowxor) != INTEGER_CST)
1981 continue;
1982 gcc_checking_assert (!integer_zerop (lowxor));
1983 tem = fold_binary (MINUS_EXPR, type, lowxor,
1984 build_int_cst (type, 1));
1985 if (tem == NULL_TREE)
1986 continue;
1987 tem = fold_binary (BIT_AND_EXPR, type, lowxor, tem);
1988 if (tem == NULL_TREE || !integer_zerop (tem))
1989 continue;
1990 highxor = fold_binary (BIT_XOR_EXPR, type, highi, highj);
1991 if (!tree_int_cst_equal (lowxor, highxor))
1992 continue;
1993 tem = fold_build1 (BIT_NOT_EXPR, type, lowxor);
1994 exp = fold_build2 (BIT_AND_EXPR, type, ranges[i].exp, tem);
1995 lowj = fold_build2 (BIT_AND_EXPR, type, lowi, tem);
1996 highj = fold_build2 (BIT_AND_EXPR, type, highi, tem);
1997 if (update_range_test (ranges + i, ranges + j, 1, opcode, ops, exp,
1998 ranges[i].in_p, lowj, highj,
1999 ranges[i].strict_overflow_p
2000 || ranges[j].strict_overflow_p))
2002 any_changes = true;
2003 break;
2008 if (any_changes)
2010 j = 0;
2011 FOR_EACH_VEC_ELT (operand_entry_t, *ops, i, oe)
2013 if (oe->op == error_mark_node)
2014 continue;
2015 else if (i != j)
2016 VEC_replace (operand_entry_t, *ops, j, oe);
2017 j++;
2019 VEC_truncate (operand_entry_t, *ops, j);
2022 XDELETEVEC (ranges);
2025 /* Return true if OPERAND is defined by a PHI node which uses the LHS
2026 of STMT in it's operands. This is also known as a "destructive
2027 update" operation. */
2029 static bool
2030 is_phi_for_stmt (gimple stmt, tree operand)
2032 gimple def_stmt;
2033 tree lhs;
2034 use_operand_p arg_p;
2035 ssa_op_iter i;
2037 if (TREE_CODE (operand) != SSA_NAME)
2038 return false;
2040 lhs = gimple_assign_lhs (stmt);
2042 def_stmt = SSA_NAME_DEF_STMT (operand);
2043 if (gimple_code (def_stmt) != GIMPLE_PHI)
2044 return false;
2046 FOR_EACH_PHI_ARG (arg_p, def_stmt, i, SSA_OP_USE)
2047 if (lhs == USE_FROM_PTR (arg_p))
2048 return true;
2049 return false;
2052 /* Remove def stmt of VAR if VAR has zero uses and recurse
2053 on rhs1 operand if so. */
2055 static void
2056 remove_visited_stmt_chain (tree var)
2058 gimple stmt;
2059 gimple_stmt_iterator gsi;
2061 while (1)
2063 if (TREE_CODE (var) != SSA_NAME || !has_zero_uses (var))
2064 return;
2065 stmt = SSA_NAME_DEF_STMT (var);
2066 if (!is_gimple_assign (stmt)
2067 || !gimple_visited_p (stmt))
2068 return;
2069 var = gimple_assign_rhs1 (stmt);
2070 gsi = gsi_for_stmt (stmt);
2071 gsi_remove (&gsi, true);
2072 release_defs (stmt);
2076 /* This function checks three consequtive operands in
2077 passed operands vector OPS starting from OPINDEX and
2078 swaps two operands if it is profitable for binary operation
2079 consuming OPINDEX + 1 abnd OPINDEX + 2 operands.
2081 We pair ops with the same rank if possible.
2083 The alternative we try is to see if STMT is a destructive
2084 update style statement, which is like:
2085 b = phi (a, ...)
2086 a = c + b;
2087 In that case, we want to use the destructive update form to
2088 expose the possible vectorizer sum reduction opportunity.
2089 In that case, the third operand will be the phi node. This
2090 check is not performed if STMT is null.
2092 We could, of course, try to be better as noted above, and do a
2093 lot of work to try to find these opportunities in >3 operand
2094 cases, but it is unlikely to be worth it. */
2096 static void
2097 swap_ops_for_binary_stmt (VEC(operand_entry_t, heap) * ops,
2098 unsigned int opindex, gimple stmt)
2100 operand_entry_t oe1, oe2, oe3;
2102 oe1 = VEC_index (operand_entry_t, ops, opindex);
2103 oe2 = VEC_index (operand_entry_t, ops, opindex + 1);
2104 oe3 = VEC_index (operand_entry_t, ops, opindex + 2);
2106 if ((oe1->rank == oe2->rank
2107 && oe2->rank != oe3->rank)
2108 || (stmt && is_phi_for_stmt (stmt, oe3->op)
2109 && !is_phi_for_stmt (stmt, oe1->op)
2110 && !is_phi_for_stmt (stmt, oe2->op)))
2112 struct operand_entry temp = *oe3;
2113 oe3->op = oe1->op;
2114 oe3->rank = oe1->rank;
2115 oe1->op = temp.op;
2116 oe1->rank= temp.rank;
2118 else if ((oe1->rank == oe3->rank
2119 && oe2->rank != oe3->rank)
2120 || (stmt && is_phi_for_stmt (stmt, oe2->op)
2121 && !is_phi_for_stmt (stmt, oe1->op)
2122 && !is_phi_for_stmt (stmt, oe3->op)))
2124 struct operand_entry temp = *oe2;
2125 oe2->op = oe1->op;
2126 oe2->rank = oe1->rank;
2127 oe1->op = temp.op;
2128 oe1->rank= temp.rank;
2132 /* Assign UIDs to statements in basic block BB. */
2134 static void
2135 assign_uids (basic_block bb)
2137 unsigned uid = 0;
2138 gimple_stmt_iterator gsi;
2139 /* First assign uids to phis. */
2140 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2142 gimple stmt = gsi_stmt (gsi);
2143 gimple_set_uid (stmt, uid++);
2146 /* Then assign uids to stmts. */
2147 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2149 gimple stmt = gsi_stmt (gsi);
2150 gimple_set_uid (stmt, uid++);
2154 /* For each operand in OPS, find the basic block that contains the statement
2155 which defines the operand. For all such basic blocks, assign UIDs. */
2157 static void
2158 assign_uids_in_relevant_bbs (VEC(operand_entry_t, heap) * ops)
2160 operand_entry_t oe;
2161 int i;
2162 struct pointer_set_t *seen_bbs = pointer_set_create ();
2164 for (i = 0; VEC_iterate (operand_entry_t, ops, i, oe); i++)
2166 gimple def_stmt;
2167 basic_block bb;
2168 if (TREE_CODE (oe->op) != SSA_NAME)
2169 continue;
2170 def_stmt = SSA_NAME_DEF_STMT (oe->op);
2171 bb = gimple_bb (def_stmt);
2172 if (!pointer_set_contains (seen_bbs, bb))
2174 assign_uids (bb);
2175 pointer_set_insert (seen_bbs, bb);
2178 pointer_set_destroy (seen_bbs);
2181 /* Ensure that operands in the OPS vector starting from OPINDEXth entry are live
2182 at STMT. This is accomplished by moving STMT if needed. */
2184 static void
2185 ensure_ops_are_available (gimple stmt, VEC(operand_entry_t, heap) * ops,
2186 int opindex)
2188 int i;
2189 int len = VEC_length (operand_entry_t, ops);
2190 gimple insert_stmt = stmt;
2191 basic_block insert_bb = gimple_bb (stmt);
2192 gimple_stmt_iterator gsi_insert, gsistmt;
2193 for (i = opindex; i < len; i++)
2195 operand_entry_t oe = VEC_index (operand_entry_t, ops, i);
2196 gimple def_stmt;
2197 basic_block def_bb;
2198 /* Ignore constants and operands with default definitons. */
2199 if (TREE_CODE (oe->op) != SSA_NAME
2200 || SSA_NAME_IS_DEFAULT_DEF (oe->op))
2201 continue;
2202 def_stmt = SSA_NAME_DEF_STMT (oe->op);
2203 def_bb = gimple_bb (def_stmt);
2204 if (def_bb != insert_bb
2205 && !dominated_by_p (CDI_DOMINATORS, insert_bb, def_bb))
2207 insert_bb = def_bb;
2208 insert_stmt = def_stmt;
2210 else if (def_bb == insert_bb
2211 && gimple_uid (insert_stmt) < gimple_uid (def_stmt))
2212 insert_stmt = def_stmt;
2214 if (insert_stmt == stmt)
2215 return;
2216 gsistmt = gsi_for_stmt (stmt);
2217 /* If INSERT_STMT is a phi node, then do not insert just after that statement.
2218 Instead, find the first non-label gimple statement in BB and insert before
2219 that. */
2220 if (gimple_code (insert_stmt) == GIMPLE_PHI)
2222 gsi_insert = gsi_after_labels (insert_bb);
2223 gsi_move_before (&gsistmt, &gsi_insert);
2225 /* Statements marked for throw can not be in the middle of a basic block. So
2226 we can not insert a statement (not marked for throw) immediately after. */
2227 else if (lookup_stmt_eh_lp (insert_stmt) > 0
2228 && stmt_can_throw_internal (insert_stmt))
2230 edge e, succ_edge = NULL;
2231 edge_iterator ei;
2233 /* There should be exactly one normal edge out of the basic block. */
2234 FOR_EACH_EDGE (e, ei, insert_bb->succs)
2236 if (!(e->flags & EDGE_COMPLEX))
2238 gcc_assert (succ_edge == NULL);
2239 succ_edge = e;
2242 /* Insert STMT at the beginning of the successor basic block. */
2243 insert_bb = succ_edge->dest;
2244 gsi_insert = gsi_after_labels (insert_bb);
2245 gsi_move_before (&gsistmt, &gsi_insert);
2247 else
2249 gsi_insert = gsi_for_stmt (insert_stmt);
2250 gsi_move_after (&gsistmt, &gsi_insert);
2255 /* Recursively rewrite our linearized statements so that the operators
2256 match those in OPS[OPINDEX], putting the computation in rank
2257 order. */
2259 static void
2260 rewrite_expr_tree (gimple stmt, unsigned int opindex,
2261 VEC(operand_entry_t, heap) * ops, bool moved)
2263 tree rhs1 = gimple_assign_rhs1 (stmt);
2264 tree rhs2 = gimple_assign_rhs2 (stmt);
2265 operand_entry_t oe;
2267 /* The final recursion case for this function is that you have
2268 exactly two operations left.
2269 If we had one exactly one op in the entire list to start with, we
2270 would have never called this function, and the tail recursion
2271 rewrites them one at a time. */
2272 if (opindex + 2 == VEC_length (operand_entry_t, ops))
2274 operand_entry_t oe1, oe2;
2276 oe1 = VEC_index (operand_entry_t, ops, opindex);
2277 oe2 = VEC_index (operand_entry_t, ops, opindex + 1);
2279 if (rhs1 != oe1->op || rhs2 != oe2->op)
2281 if (dump_file && (dump_flags & TDF_DETAILS))
2283 fprintf (dump_file, "Transforming ");
2284 print_gimple_stmt (dump_file, stmt, 0, 0);
2287 gimple_assign_set_rhs1 (stmt, oe1->op);
2288 gimple_assign_set_rhs2 (stmt, oe2->op);
2289 update_stmt (stmt);
2290 if (rhs1 != oe1->op && rhs1 != oe2->op)
2291 remove_visited_stmt_chain (rhs1);
2293 if (dump_file && (dump_flags & TDF_DETAILS))
2295 fprintf (dump_file, " into ");
2296 print_gimple_stmt (dump_file, stmt, 0, 0);
2300 return;
2303 /* If we hit here, we should have 3 or more ops left. */
2304 gcc_assert (opindex + 2 < VEC_length (operand_entry_t, ops));
2306 /* Rewrite the next operator. */
2307 oe = VEC_index (operand_entry_t, ops, opindex);
2309 if (oe->op != rhs2)
2311 if (!moved)
2313 gimple stmt1 = stmt;
2314 unsigned int count, i = 1;
2316 count = VEC_length (operand_entry_t, ops) - opindex - 2;
2317 while (i <= count)
2319 stmt1 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt1));
2320 /* Ensure that STMT1 is moved to a place where all operands in
2321 OPS[opindex + i...] are available. */
2322 ensure_ops_are_available (stmt1, ops, opindex + i);
2323 i++;
2325 moved = true;
2328 if (dump_file && (dump_flags & TDF_DETAILS))
2330 fprintf (dump_file, "Transforming ");
2331 print_gimple_stmt (dump_file, stmt, 0, 0);
2334 gimple_assign_set_rhs2 (stmt, oe->op);
2335 update_stmt (stmt);
2337 if (dump_file && (dump_flags & TDF_DETAILS))
2339 fprintf (dump_file, " into ");
2340 print_gimple_stmt (dump_file, stmt, 0, 0);
2343 /* Recurse on the LHS of the binary operator, which is guaranteed to
2344 be the non-leaf side. */
2345 rewrite_expr_tree (SSA_NAME_DEF_STMT (rhs1), opindex + 1, ops, moved);
2348 /* Find out how many cycles we need to compute statements chain.
2349 OPS_NUM holds number os statements in a chain. CPU_WIDTH is a
2350 maximum number of independent statements we may execute per cycle. */
2352 static int
2353 get_required_cycles (int ops_num, int cpu_width)
2355 int res;
2356 int elog;
2357 unsigned int rest;
2359 /* While we have more than 2 * cpu_width operands
2360 we may reduce number of operands by cpu_width
2361 per cycle. */
2362 res = ops_num / (2 * cpu_width);
2364 /* Remained operands count may be reduced twice per cycle
2365 until we have only one operand. */
2366 rest = (unsigned)(ops_num - res * cpu_width);
2367 elog = exact_log2 (rest);
2368 if (elog >= 0)
2369 res += elog;
2370 else
2371 res += floor_log2 (rest) + 1;
2373 return res;
2376 /* Returns an optimal number of registers to use for computation of
2377 given statements. */
2379 static int
2380 get_reassociation_width (int ops_num, enum tree_code opc,
2381 enum machine_mode mode)
2383 int param_width = PARAM_VALUE (PARAM_TREE_REASSOC_WIDTH);
2384 int width;
2385 int width_min;
2386 int cycles_best;
2388 if (param_width > 0)
2389 width = param_width;
2390 else
2391 width = targetm.sched.reassociation_width (opc, mode);
2393 if (width == 1)
2394 return width;
2396 /* Get the minimal time required for sequence computation. */
2397 cycles_best = get_required_cycles (ops_num, width);
2399 /* Check if we may use less width and still compute sequence for
2400 the same time. It will allow us to reduce registers usage.
2401 get_required_cycles is monotonically increasing with lower width
2402 so we can perform a binary search for the minimal width that still
2403 results in the optimal cycle count. */
2404 width_min = 1;
2405 while (width > width_min)
2407 int width_mid = (width + width_min) / 2;
2409 if (get_required_cycles (ops_num, width_mid) == cycles_best)
2410 width = width_mid;
2411 else if (width_min < width_mid)
2412 width_min = width_mid;
2413 else
2414 break;
2417 return width;
2420 /* Recursively rewrite our linearized statements so that the operators
2421 match those in OPS[OPINDEX], putting the computation in rank
2422 order and trying to allow operations to be executed in
2423 parallel. */
2425 static void
2426 rewrite_expr_tree_parallel (gimple stmt, int width,
2427 VEC(operand_entry_t, heap) * ops)
2429 enum tree_code opcode = gimple_assign_rhs_code (stmt);
2430 int op_num = VEC_length (operand_entry_t, ops);
2431 int stmt_num = op_num - 1;
2432 gimple *stmts = XALLOCAVEC (gimple, stmt_num);
2433 int op_index = op_num - 1;
2434 int stmt_index = 0;
2435 int ready_stmts_end = 0;
2436 int i = 0;
2437 tree last_rhs1 = gimple_assign_rhs1 (stmt);
2438 tree lhs_var;
2440 /* We start expression rewriting from the top statements.
2441 So, in this loop we create a full list of statements
2442 we will work with. */
2443 stmts[stmt_num - 1] = stmt;
2444 for (i = stmt_num - 2; i >= 0; i--)
2445 stmts[i] = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmts[i+1]));
2447 lhs_var = create_tmp_reg (TREE_TYPE (last_rhs1), NULL);
2448 add_referenced_var (lhs_var);
2450 for (i = 0; i < stmt_num; i++)
2452 tree op1, op2;
2454 /* Determine whether we should use results of
2455 already handled statements or not. */
2456 if (ready_stmts_end == 0
2457 && (i - stmt_index >= width || op_index < 1))
2458 ready_stmts_end = i;
2460 /* Now we choose operands for the next statement. Non zero
2461 value in ready_stmts_end means here that we should use
2462 the result of already generated statements as new operand. */
2463 if (ready_stmts_end > 0)
2465 op1 = gimple_assign_lhs (stmts[stmt_index++]);
2466 if (ready_stmts_end > stmt_index)
2467 op2 = gimple_assign_lhs (stmts[stmt_index++]);
2468 else if (op_index >= 0)
2469 op2 = VEC_index (operand_entry_t, ops, op_index--)->op;
2470 else
2472 gcc_assert (stmt_index < i);
2473 op2 = gimple_assign_lhs (stmts[stmt_index++]);
2476 if (stmt_index >= ready_stmts_end)
2477 ready_stmts_end = 0;
2479 else
2481 if (op_index > 1)
2482 swap_ops_for_binary_stmt (ops, op_index - 2, NULL);
2483 op2 = VEC_index (operand_entry_t, ops, op_index--)->op;
2484 op1 = VEC_index (operand_entry_t, ops, op_index--)->op;
2487 /* If we emit the last statement then we should put
2488 operands into the last statement. It will also
2489 break the loop. */
2490 if (op_index < 0 && stmt_index == i)
2491 i = stmt_num - 1;
2493 if (dump_file && (dump_flags & TDF_DETAILS))
2495 fprintf (dump_file, "Transforming ");
2496 print_gimple_stmt (dump_file, stmts[i], 0, 0);
2499 /* We keep original statement only for the last one. All
2500 others are recreated. */
2501 if (i == stmt_num - 1)
2503 gimple_assign_set_rhs1 (stmts[i], op1);
2504 gimple_assign_set_rhs2 (stmts[i], op2);
2505 update_stmt (stmts[i]);
2507 else
2508 stmts[i] = build_and_add_sum (lhs_var, op1, op2, opcode);
2510 if (dump_file && (dump_flags & TDF_DETAILS))
2512 fprintf (dump_file, " into ");
2513 print_gimple_stmt (dump_file, stmts[i], 0, 0);
2517 remove_visited_stmt_chain (last_rhs1);
2520 /* Transform STMT, which is really (A +B) + (C + D) into the left
2521 linear form, ((A+B)+C)+D.
2522 Recurse on D if necessary. */
2524 static void
2525 linearize_expr (gimple stmt)
2527 gimple_stmt_iterator gsinow, gsirhs;
2528 gimple binlhs = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
2529 gimple binrhs = SSA_NAME_DEF_STMT (gimple_assign_rhs2 (stmt));
2530 enum tree_code rhscode = gimple_assign_rhs_code (stmt);
2531 gimple newbinrhs = NULL;
2532 struct loop *loop = loop_containing_stmt (stmt);
2534 gcc_assert (is_reassociable_op (binlhs, rhscode, loop)
2535 && is_reassociable_op (binrhs, rhscode, loop));
2537 gsinow = gsi_for_stmt (stmt);
2538 gsirhs = gsi_for_stmt (binrhs);
2539 gsi_move_before (&gsirhs, &gsinow);
2541 gimple_assign_set_rhs2 (stmt, gimple_assign_rhs1 (binrhs));
2542 gimple_assign_set_rhs1 (binrhs, gimple_assign_lhs (binlhs));
2543 gimple_assign_set_rhs1 (stmt, gimple_assign_lhs (binrhs));
2545 if (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME)
2546 newbinrhs = SSA_NAME_DEF_STMT (gimple_assign_rhs2 (stmt));
2548 if (dump_file && (dump_flags & TDF_DETAILS))
2550 fprintf (dump_file, "Linearized: ");
2551 print_gimple_stmt (dump_file, stmt, 0, 0);
2554 reassociate_stats.linearized++;
2555 update_stmt (binrhs);
2556 update_stmt (binlhs);
2557 update_stmt (stmt);
2559 gimple_set_visited (stmt, true);
2560 gimple_set_visited (binlhs, true);
2561 gimple_set_visited (binrhs, true);
2563 /* Tail recurse on the new rhs if it still needs reassociation. */
2564 if (newbinrhs && is_reassociable_op (newbinrhs, rhscode, loop))
2565 /* ??? This should probably be linearize_expr (newbinrhs) but I don't
2566 want to change the algorithm while converting to tuples. */
2567 linearize_expr (stmt);
2570 /* If LHS has a single immediate use that is a GIMPLE_ASSIGN statement, return
2571 it. Otherwise, return NULL. */
2573 static gimple
2574 get_single_immediate_use (tree lhs)
2576 use_operand_p immuse;
2577 gimple immusestmt;
2579 if (TREE_CODE (lhs) == SSA_NAME
2580 && single_imm_use (lhs, &immuse, &immusestmt)
2581 && is_gimple_assign (immusestmt))
2582 return immusestmt;
2584 return NULL;
2587 /* Recursively negate the value of TONEGATE, and return the SSA_NAME
2588 representing the negated value. Insertions of any necessary
2589 instructions go before GSI.
2590 This function is recursive in that, if you hand it "a_5" as the
2591 value to negate, and a_5 is defined by "a_5 = b_3 + b_4", it will
2592 transform b_3 + b_4 into a_5 = -b_3 + -b_4. */
2594 static tree
2595 negate_value (tree tonegate, gimple_stmt_iterator *gsi)
2597 gimple negatedefstmt= NULL;
2598 tree resultofnegate;
2600 /* If we are trying to negate a name, defined by an add, negate the
2601 add operands instead. */
2602 if (TREE_CODE (tonegate) == SSA_NAME)
2603 negatedefstmt = SSA_NAME_DEF_STMT (tonegate);
2604 if (TREE_CODE (tonegate) == SSA_NAME
2605 && is_gimple_assign (negatedefstmt)
2606 && TREE_CODE (gimple_assign_lhs (negatedefstmt)) == SSA_NAME
2607 && has_single_use (gimple_assign_lhs (negatedefstmt))
2608 && gimple_assign_rhs_code (negatedefstmt) == PLUS_EXPR)
2610 gimple_stmt_iterator gsi;
2611 tree rhs1 = gimple_assign_rhs1 (negatedefstmt);
2612 tree rhs2 = gimple_assign_rhs2 (negatedefstmt);
2614 gsi = gsi_for_stmt (negatedefstmt);
2615 rhs1 = negate_value (rhs1, &gsi);
2616 gimple_assign_set_rhs1 (negatedefstmt, rhs1);
2618 gsi = gsi_for_stmt (negatedefstmt);
2619 rhs2 = negate_value (rhs2, &gsi);
2620 gimple_assign_set_rhs2 (negatedefstmt, rhs2);
2622 update_stmt (negatedefstmt);
2623 return gimple_assign_lhs (negatedefstmt);
2626 tonegate = fold_build1 (NEGATE_EXPR, TREE_TYPE (tonegate), tonegate);
2627 resultofnegate = force_gimple_operand_gsi (gsi, tonegate, true,
2628 NULL_TREE, true, GSI_SAME_STMT);
2629 return resultofnegate;
2632 /* Return true if we should break up the subtract in STMT into an add
2633 with negate. This is true when we the subtract operands are really
2634 adds, or the subtract itself is used in an add expression. In
2635 either case, breaking up the subtract into an add with negate
2636 exposes the adds to reassociation. */
2638 static bool
2639 should_break_up_subtract (gimple stmt)
2641 tree lhs = gimple_assign_lhs (stmt);
2642 tree binlhs = gimple_assign_rhs1 (stmt);
2643 tree binrhs = gimple_assign_rhs2 (stmt);
2644 gimple immusestmt;
2645 struct loop *loop = loop_containing_stmt (stmt);
2647 if (TREE_CODE (binlhs) == SSA_NAME
2648 && is_reassociable_op (SSA_NAME_DEF_STMT (binlhs), PLUS_EXPR, loop))
2649 return true;
2651 if (TREE_CODE (binrhs) == SSA_NAME
2652 && is_reassociable_op (SSA_NAME_DEF_STMT (binrhs), PLUS_EXPR, loop))
2653 return true;
2655 if (TREE_CODE (lhs) == SSA_NAME
2656 && (immusestmt = get_single_immediate_use (lhs))
2657 && is_gimple_assign (immusestmt)
2658 && (gimple_assign_rhs_code (immusestmt) == PLUS_EXPR
2659 || gimple_assign_rhs_code (immusestmt) == MULT_EXPR))
2660 return true;
2661 return false;
2664 /* Transform STMT from A - B into A + -B. */
2666 static void
2667 break_up_subtract (gimple stmt, gimple_stmt_iterator *gsip)
2669 tree rhs1 = gimple_assign_rhs1 (stmt);
2670 tree rhs2 = gimple_assign_rhs2 (stmt);
2672 if (dump_file && (dump_flags & TDF_DETAILS))
2674 fprintf (dump_file, "Breaking up subtract ");
2675 print_gimple_stmt (dump_file, stmt, 0, 0);
2678 rhs2 = negate_value (rhs2, gsip);
2679 gimple_assign_set_rhs_with_ops (gsip, PLUS_EXPR, rhs1, rhs2);
2680 update_stmt (stmt);
2683 /* Recursively linearize a binary expression that is the RHS of STMT.
2684 Place the operands of the expression tree in the vector named OPS. */
2686 static void
2687 linearize_expr_tree (VEC(operand_entry_t, heap) **ops, gimple stmt,
2688 bool is_associative, bool set_visited)
2690 tree binlhs = gimple_assign_rhs1 (stmt);
2691 tree binrhs = gimple_assign_rhs2 (stmt);
2692 gimple binlhsdef, binrhsdef;
2693 bool binlhsisreassoc = false;
2694 bool binrhsisreassoc = false;
2695 enum tree_code rhscode = gimple_assign_rhs_code (stmt);
2696 struct loop *loop = loop_containing_stmt (stmt);
2698 if (set_visited)
2699 gimple_set_visited (stmt, true);
2701 if (TREE_CODE (binlhs) == SSA_NAME)
2703 binlhsdef = SSA_NAME_DEF_STMT (binlhs);
2704 binlhsisreassoc = (is_reassociable_op (binlhsdef, rhscode, loop)
2705 && !stmt_could_throw_p (binlhsdef));
2708 if (TREE_CODE (binrhs) == SSA_NAME)
2710 binrhsdef = SSA_NAME_DEF_STMT (binrhs);
2711 binrhsisreassoc = (is_reassociable_op (binrhsdef, rhscode, loop)
2712 && !stmt_could_throw_p (binrhsdef));
2715 /* If the LHS is not reassociable, but the RHS is, we need to swap
2716 them. If neither is reassociable, there is nothing we can do, so
2717 just put them in the ops vector. If the LHS is reassociable,
2718 linearize it. If both are reassociable, then linearize the RHS
2719 and the LHS. */
2721 if (!binlhsisreassoc)
2723 tree temp;
2725 /* If this is not a associative operation like division, give up. */
2726 if (!is_associative)
2728 add_to_ops_vec (ops, binrhs);
2729 return;
2732 if (!binrhsisreassoc)
2734 add_to_ops_vec (ops, binrhs);
2735 add_to_ops_vec (ops, binlhs);
2736 return;
2739 if (dump_file && (dump_flags & TDF_DETAILS))
2741 fprintf (dump_file, "swapping operands of ");
2742 print_gimple_stmt (dump_file, stmt, 0, 0);
2745 swap_tree_operands (stmt,
2746 gimple_assign_rhs1_ptr (stmt),
2747 gimple_assign_rhs2_ptr (stmt));
2748 update_stmt (stmt);
2750 if (dump_file && (dump_flags & TDF_DETAILS))
2752 fprintf (dump_file, " is now ");
2753 print_gimple_stmt (dump_file, stmt, 0, 0);
2756 /* We want to make it so the lhs is always the reassociative op,
2757 so swap. */
2758 temp = binlhs;
2759 binlhs = binrhs;
2760 binrhs = temp;
2762 else if (binrhsisreassoc)
2764 linearize_expr (stmt);
2765 binlhs = gimple_assign_rhs1 (stmt);
2766 binrhs = gimple_assign_rhs2 (stmt);
2769 gcc_assert (TREE_CODE (binrhs) != SSA_NAME
2770 || !is_reassociable_op (SSA_NAME_DEF_STMT (binrhs),
2771 rhscode, loop));
2772 linearize_expr_tree (ops, SSA_NAME_DEF_STMT (binlhs),
2773 is_associative, set_visited);
2774 add_to_ops_vec (ops, binrhs);
2777 /* Repropagate the negates back into subtracts, since no other pass
2778 currently does it. */
2780 static void
2781 repropagate_negates (void)
2783 unsigned int i = 0;
2784 tree negate;
2786 FOR_EACH_VEC_ELT (tree, plus_negates, i, negate)
2788 gimple user = get_single_immediate_use (negate);
2790 if (!user || !is_gimple_assign (user))
2791 continue;
2793 /* The negate operand can be either operand of a PLUS_EXPR
2794 (it can be the LHS if the RHS is a constant for example).
2796 Force the negate operand to the RHS of the PLUS_EXPR, then
2797 transform the PLUS_EXPR into a MINUS_EXPR. */
2798 if (gimple_assign_rhs_code (user) == PLUS_EXPR)
2800 /* If the negated operand appears on the LHS of the
2801 PLUS_EXPR, exchange the operands of the PLUS_EXPR
2802 to force the negated operand to the RHS of the PLUS_EXPR. */
2803 if (gimple_assign_rhs1 (user) == negate)
2805 swap_tree_operands (user,
2806 gimple_assign_rhs1_ptr (user),
2807 gimple_assign_rhs2_ptr (user));
2810 /* Now transform the PLUS_EXPR into a MINUS_EXPR and replace
2811 the RHS of the PLUS_EXPR with the operand of the NEGATE_EXPR. */
2812 if (gimple_assign_rhs2 (user) == negate)
2814 tree rhs1 = gimple_assign_rhs1 (user);
2815 tree rhs2 = get_unary_op (negate, NEGATE_EXPR);
2816 gimple_stmt_iterator gsi = gsi_for_stmt (user);
2817 gimple_assign_set_rhs_with_ops (&gsi, MINUS_EXPR, rhs1, rhs2);
2818 update_stmt (user);
2821 else if (gimple_assign_rhs_code (user) == MINUS_EXPR)
2823 if (gimple_assign_rhs1 (user) == negate)
2825 /* We have
2826 x = -a
2827 y = x - b
2828 which we transform into
2829 x = a + b
2830 y = -x .
2831 This pushes down the negate which we possibly can merge
2832 into some other operation, hence insert it into the
2833 plus_negates vector. */
2834 gimple feed = SSA_NAME_DEF_STMT (negate);
2835 tree a = gimple_assign_rhs1 (feed);
2836 tree rhs2 = gimple_assign_rhs2 (user);
2837 gimple_stmt_iterator gsi = gsi_for_stmt (feed), gsi2;
2838 gimple_replace_lhs (feed, negate);
2839 gimple_assign_set_rhs_with_ops (&gsi, PLUS_EXPR, a, rhs2);
2840 update_stmt (gsi_stmt (gsi));
2841 gsi2 = gsi_for_stmt (user);
2842 gimple_assign_set_rhs_with_ops (&gsi2, NEGATE_EXPR, negate, NULL);
2843 update_stmt (gsi_stmt (gsi2));
2844 gsi_move_before (&gsi, &gsi2);
2845 VEC_safe_push (tree, heap, plus_negates,
2846 gimple_assign_lhs (gsi_stmt (gsi2)));
2848 else
2850 /* Transform "x = -a; y = b - x" into "y = b + a", getting
2851 rid of one operation. */
2852 gimple feed = SSA_NAME_DEF_STMT (negate);
2853 tree a = gimple_assign_rhs1 (feed);
2854 tree rhs1 = gimple_assign_rhs1 (user);
2855 gimple_stmt_iterator gsi = gsi_for_stmt (user);
2856 gimple_assign_set_rhs_with_ops (&gsi, PLUS_EXPR, rhs1, a);
2857 update_stmt (gsi_stmt (gsi));
2863 /* Returns true if OP is of a type for which we can do reassociation.
2864 That is for integral or non-saturating fixed-point types, and for
2865 floating point type when associative-math is enabled. */
2867 static bool
2868 can_reassociate_p (tree op)
2870 tree type = TREE_TYPE (op);
2871 if ((INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
2872 || NON_SAT_FIXED_POINT_TYPE_P (type)
2873 || (flag_associative_math && FLOAT_TYPE_P (type)))
2874 return true;
2875 return false;
2878 /* Break up subtract operations in block BB.
2880 We do this top down because we don't know whether the subtract is
2881 part of a possible chain of reassociation except at the top.
2883 IE given
2884 d = f + g
2885 c = a + e
2886 b = c - d
2887 q = b - r
2888 k = t - q
2890 we want to break up k = t - q, but we won't until we've transformed q
2891 = b - r, which won't be broken up until we transform b = c - d.
2893 En passant, clear the GIMPLE visited flag on every statement. */
2895 static void
2896 break_up_subtract_bb (basic_block bb)
2898 gimple_stmt_iterator gsi;
2899 basic_block son;
2901 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2903 gimple stmt = gsi_stmt (gsi);
2904 gimple_set_visited (stmt, false);
2906 if (!is_gimple_assign (stmt)
2907 || !can_reassociate_p (gimple_assign_lhs (stmt)))
2908 continue;
2910 /* Look for simple gimple subtract operations. */
2911 if (gimple_assign_rhs_code (stmt) == MINUS_EXPR)
2913 if (!can_reassociate_p (gimple_assign_rhs1 (stmt))
2914 || !can_reassociate_p (gimple_assign_rhs2 (stmt)))
2915 continue;
2917 /* Check for a subtract used only in an addition. If this
2918 is the case, transform it into add of a negate for better
2919 reassociation. IE transform C = A-B into C = A + -B if C
2920 is only used in an addition. */
2921 if (should_break_up_subtract (stmt))
2922 break_up_subtract (stmt, &gsi);
2924 else if (gimple_assign_rhs_code (stmt) == NEGATE_EXPR
2925 && can_reassociate_p (gimple_assign_rhs1 (stmt)))
2926 VEC_safe_push (tree, heap, plus_negates, gimple_assign_lhs (stmt));
2928 for (son = first_dom_son (CDI_DOMINATORS, bb);
2929 son;
2930 son = next_dom_son (CDI_DOMINATORS, son))
2931 break_up_subtract_bb (son);
2934 /* Reassociate expressions in basic block BB and its post-dominator as
2935 children. */
2937 static void
2938 reassociate_bb (basic_block bb)
2940 gimple_stmt_iterator gsi;
2941 basic_block son;
2943 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
2945 gimple stmt = gsi_stmt (gsi);
2947 if (is_gimple_assign (stmt)
2948 && !stmt_could_throw_p (stmt))
2950 tree lhs, rhs1, rhs2;
2951 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
2953 /* If this is not a gimple binary expression, there is
2954 nothing for us to do with it. */
2955 if (get_gimple_rhs_class (rhs_code) != GIMPLE_BINARY_RHS)
2956 continue;
2958 /* If this was part of an already processed statement,
2959 we don't need to touch it again. */
2960 if (gimple_visited_p (stmt))
2962 /* This statement might have become dead because of previous
2963 reassociations. */
2964 if (has_zero_uses (gimple_get_lhs (stmt)))
2966 gsi_remove (&gsi, true);
2967 release_defs (stmt);
2968 /* We might end up removing the last stmt above which
2969 places the iterator to the end of the sequence.
2970 Reset it to the last stmt in this case which might
2971 be the end of the sequence as well if we removed
2972 the last statement of the sequence. In which case
2973 we need to bail out. */
2974 if (gsi_end_p (gsi))
2976 gsi = gsi_last_bb (bb);
2977 if (gsi_end_p (gsi))
2978 break;
2981 continue;
2984 lhs = gimple_assign_lhs (stmt);
2985 rhs1 = gimple_assign_rhs1 (stmt);
2986 rhs2 = gimple_assign_rhs2 (stmt);
2988 /* For non-bit or min/max operations we can't associate
2989 all types. Verify that here. */
2990 if (rhs_code != BIT_IOR_EXPR
2991 && rhs_code != BIT_AND_EXPR
2992 && rhs_code != BIT_XOR_EXPR
2993 && rhs_code != MIN_EXPR
2994 && rhs_code != MAX_EXPR
2995 && (!can_reassociate_p (lhs)
2996 || !can_reassociate_p (rhs1)
2997 || !can_reassociate_p (rhs2)))
2998 continue;
3000 if (associative_tree_code (rhs_code))
3002 VEC(operand_entry_t, heap) *ops = NULL;
3004 /* There may be no immediate uses left by the time we
3005 get here because we may have eliminated them all. */
3006 if (TREE_CODE (lhs) == SSA_NAME && has_zero_uses (lhs))
3007 continue;
3009 gimple_set_visited (stmt, true);
3010 linearize_expr_tree (&ops, stmt, true, true);
3011 VEC_qsort (operand_entry_t, ops, sort_by_operand_rank);
3012 optimize_ops_list (rhs_code, &ops);
3013 if (undistribute_ops_list (rhs_code, &ops,
3014 loop_containing_stmt (stmt)))
3016 VEC_qsort (operand_entry_t, ops, sort_by_operand_rank);
3017 optimize_ops_list (rhs_code, &ops);
3020 if (rhs_code == BIT_IOR_EXPR || rhs_code == BIT_AND_EXPR)
3021 optimize_range_tests (rhs_code, &ops);
3023 if (VEC_length (operand_entry_t, ops) == 1)
3025 if (dump_file && (dump_flags & TDF_DETAILS))
3027 fprintf (dump_file, "Transforming ");
3028 print_gimple_stmt (dump_file, stmt, 0, 0);
3031 rhs1 = gimple_assign_rhs1 (stmt);
3032 gimple_assign_set_rhs_from_tree (&gsi,
3033 VEC_last (operand_entry_t,
3034 ops)->op);
3035 update_stmt (stmt);
3036 remove_visited_stmt_chain (rhs1);
3038 if (dump_file && (dump_flags & TDF_DETAILS))
3040 fprintf (dump_file, " into ");
3041 print_gimple_stmt (dump_file, stmt, 0, 0);
3044 else
3046 enum machine_mode mode = TYPE_MODE (TREE_TYPE (lhs));
3047 int ops_num = VEC_length (operand_entry_t, ops);
3048 int width = get_reassociation_width (ops_num, rhs_code, mode);
3050 if (dump_file && (dump_flags & TDF_DETAILS))
3051 fprintf (dump_file,
3052 "Width = %d was chosen for reassociation\n", width);
3054 if (width > 1
3055 && VEC_length (operand_entry_t, ops) > 3)
3056 rewrite_expr_tree_parallel (stmt, width, ops);
3057 else
3059 /* When there are three operands left, we want
3060 to make sure the ones that get the double
3061 binary op are chosen wisely. */
3062 int len = VEC_length (operand_entry_t, ops);
3063 if (len >= 3)
3064 swap_ops_for_binary_stmt (ops, len - 3, stmt);
3066 assign_uids_in_relevant_bbs (ops);
3067 rewrite_expr_tree (stmt, 0, ops, false);
3072 VEC_free (operand_entry_t, heap, ops);
3076 for (son = first_dom_son (CDI_POST_DOMINATORS, bb);
3077 son;
3078 son = next_dom_son (CDI_POST_DOMINATORS, son))
3079 reassociate_bb (son);
3082 void dump_ops_vector (FILE *file, VEC (operand_entry_t, heap) *ops);
3083 void debug_ops_vector (VEC (operand_entry_t, heap) *ops);
3085 /* Dump the operand entry vector OPS to FILE. */
3087 void
3088 dump_ops_vector (FILE *file, VEC (operand_entry_t, heap) *ops)
3090 operand_entry_t oe;
3091 unsigned int i;
3093 FOR_EACH_VEC_ELT (operand_entry_t, ops, i, oe)
3095 fprintf (file, "Op %d -> rank: %d, tree: ", i, oe->rank);
3096 print_generic_expr (file, oe->op, 0);
3100 /* Dump the operand entry vector OPS to STDERR. */
3102 DEBUG_FUNCTION void
3103 debug_ops_vector (VEC (operand_entry_t, heap) *ops)
3105 dump_ops_vector (stderr, ops);
3108 static void
3109 do_reassoc (void)
3111 break_up_subtract_bb (ENTRY_BLOCK_PTR);
3112 reassociate_bb (EXIT_BLOCK_PTR);
3115 /* Initialize the reassociation pass. */
3117 static void
3118 init_reassoc (void)
3120 int i;
3121 long rank = 2;
3122 tree param;
3123 int *bbs = XNEWVEC (int, last_basic_block + 1);
3125 /* Find the loops, so that we can prevent moving calculations in
3126 them. */
3127 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
3129 memset (&reassociate_stats, 0, sizeof (reassociate_stats));
3131 operand_entry_pool = create_alloc_pool ("operand entry pool",
3132 sizeof (struct operand_entry), 30);
3133 next_operand_entry_id = 0;
3135 /* Reverse RPO (Reverse Post Order) will give us something where
3136 deeper loops come later. */
3137 pre_and_rev_post_order_compute (NULL, bbs, false);
3138 bb_rank = XCNEWVEC (long, last_basic_block + 1);
3139 operand_rank = pointer_map_create ();
3141 /* Give each argument a distinct rank. */
3142 for (param = DECL_ARGUMENTS (current_function_decl);
3143 param;
3144 param = DECL_CHAIN (param))
3146 if (gimple_default_def (cfun, param) != NULL)
3148 tree def = gimple_default_def (cfun, param);
3149 insert_operand_rank (def, ++rank);
3153 /* Give the chain decl a distinct rank. */
3154 if (cfun->static_chain_decl != NULL)
3156 tree def = gimple_default_def (cfun, cfun->static_chain_decl);
3157 if (def != NULL)
3158 insert_operand_rank (def, ++rank);
3161 /* Set up rank for each BB */
3162 for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
3163 bb_rank[bbs[i]] = ++rank << 16;
3165 free (bbs);
3166 calculate_dominance_info (CDI_POST_DOMINATORS);
3167 plus_negates = NULL;
3170 /* Cleanup after the reassociation pass, and print stats if
3171 requested. */
3173 static void
3174 fini_reassoc (void)
3176 statistics_counter_event (cfun, "Linearized",
3177 reassociate_stats.linearized);
3178 statistics_counter_event (cfun, "Constants eliminated",
3179 reassociate_stats.constants_eliminated);
3180 statistics_counter_event (cfun, "Ops eliminated",
3181 reassociate_stats.ops_eliminated);
3182 statistics_counter_event (cfun, "Statements rewritten",
3183 reassociate_stats.rewritten);
3185 pointer_map_destroy (operand_rank);
3186 free_alloc_pool (operand_entry_pool);
3187 free (bb_rank);
3188 VEC_free (tree, heap, plus_negates);
3189 free_dominance_info (CDI_POST_DOMINATORS);
3190 loop_optimizer_finalize ();
3193 /* Gate and execute functions for Reassociation. */
3195 static unsigned int
3196 execute_reassoc (void)
3198 init_reassoc ();
3200 do_reassoc ();
3201 repropagate_negates ();
3203 fini_reassoc ();
3204 return 0;
3207 static bool
3208 gate_tree_ssa_reassoc (void)
3210 return flag_tree_reassoc != 0;
3213 struct gimple_opt_pass pass_reassoc =
3216 GIMPLE_PASS,
3217 "reassoc", /* name */
3218 gate_tree_ssa_reassoc, /* gate */
3219 execute_reassoc, /* execute */
3220 NULL, /* sub */
3221 NULL, /* next */
3222 0, /* static_pass_number */
3223 TV_TREE_REASSOC, /* tv_id */
3224 PROP_cfg | PROP_ssa, /* properties_required */
3225 0, /* properties_provided */
3226 0, /* properties_destroyed */
3227 0, /* todo_flags_start */
3228 TODO_verify_ssa
3229 | TODO_verify_flow
3230 | TODO_ggc_collect /* todo_flags_finish */