2007-02-28 Eric Christopher <echristo@apple.com>
[official-gcc.git] / gcc / tree-ssa-reassoc.c
blob32a66eb00153e30426f2e7e26e276211bb4bae5e
1 /* Reassociation for trees.
2 Copyright (C) 2005 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "errors.h"
27 #include "ggc.h"
28 #include "tree.h"
29 #include "basic-block.h"
30 #include "diagnostic.h"
31 #include "tree-inline.h"
32 #include "tree-flow.h"
33 #include "tree-gimple.h"
34 #include "tree-dump.h"
35 #include "timevar.h"
36 #include "tree-iterator.h"
37 #include "tree-pass.h"
38 #include "alloc-pool.h"
39 #include "vec.h"
40 #include "langhooks.h"
41 #include "pointer-set.h"
43 /* This is a simple global reassociation pass. It is, in part, based
44 on the LLVM pass of the same name (They do some things more/less
45 than we do, in different orders, etc).
47 It consists of five steps:
49 1. Breaking up subtract operations into addition + negate, where
50 it would promote the reassociation of adds.
52 2. Left linearization of the expression trees, so that (A+B)+(C+D)
53 becomes (((A+B)+C)+D), which is easier for us to rewrite later.
54 During linearization, we place the operands of the binary
55 expressions into a vector of operand_entry_t
57 3. Optimization of the operand lists, eliminating things like a +
58 -a, a & a, etc.
60 4. Rewrite the expression trees we linearized and optimized so
61 they are in proper rank order.
63 5. Repropagate negates, as nothing else will clean it up ATM.
65 A bit of theory on #4, since nobody seems to write anything down
66 about why it makes sense to do it the way they do it:
68 We could do this much nicer theoretically, but don't (for reasons
69 explained after how to do it theoretically nice :P).
71 In order to promote the most redundancy elimination, you want
72 binary expressions whose operands are the same rank (or
73 preferably, the same value) exposed to the redundancy eliminator,
74 for possible elimination.
76 So the way to do this if we really cared, is to build the new op
77 tree from the leaves to the roots, merging as you go, and putting the
78 new op on the end of the worklist, until you are left with one
79 thing on the worklist.
81 IE if you have to rewrite the following set of operands (listed with
82 rank in parentheses), with opcode PLUS_EXPR:
84 a (1), b (1), c (1), d (2), e (2)
87 We start with our merge worklist empty, and the ops list with all of
88 those on it.
90 You want to first merge all leaves of the same rank, as much as
91 possible.
93 So first build a binary op of
95 mergetmp = a + b, and put "mergetmp" on the merge worklist.
97 Because there is no three operand form of PLUS_EXPR, c is not going to
98 be exposed to redundancy elimination as a rank 1 operand.
100 So you might as well throw it on the merge worklist (you could also
101 consider it to now be a rank two operand, and merge it with d and e,
102 but in this case, you then have evicted e from a binary op. So at
103 least in this situation, you can't win.)
105 Then build a binary op of d + e
106 mergetmp2 = d + e
108 and put mergetmp2 on the merge worklist.
110 so merge worklist = {mergetmp, c, mergetmp2}
112 Continue building binary ops of these operations until you have only
113 one operation left on the worklist.
115 So we have
117 build binary op
118 mergetmp3 = mergetmp + c
120 worklist = {mergetmp2, mergetmp3}
122 mergetmp4 = mergetmp2 + mergetmp3
124 worklist = {mergetmp4}
126 because we have one operation left, we can now just set the original
127 statement equal to the result of that operation.
129 This will at least expose a + b and d + e to redundancy elimination
130 as binary operations.
132 For extra points, you can reuse the old statements to build the
133 mergetmps, since you shouldn't run out.
135 So why don't we do this?
137 Because it's expensive, and rarely will help. Most trees we are
138 reassociating have 3 or less ops. If they have 2 ops, they already
139 will be written into a nice single binary op. If you have 3 ops, a
140 single simple check suffices to tell you whether the first two are of the
141 same rank. If so, you know to order it
143 mergetmp = op1 + op2
144 newstmt = mergetmp + op3
146 instead of
147 mergetmp = op2 + op3
148 newstmt = mergetmp + op1
150 If all three are of the same rank, you can't expose them all in a
151 single binary operator anyway, so the above is *still* the best you
152 can do.
154 Thus, this is what we do. When we have three ops left, we check to see
155 what order to put them in, and call it a day. As a nod to vector sum
156 reduction, we check if any of ops are a really a phi node that is a
157 destructive update for the associating op, and keep the destructive
158 update together for vector sum reduction recognition. */
161 /* Statistics */
162 static struct
164 int linearized;
165 int constants_eliminated;
166 int ops_eliminated;
167 int rewritten;
168 } reassociate_stats;
170 /* Operator, rank pair. */
171 typedef struct operand_entry
173 unsigned int rank;
174 tree op;
175 } *operand_entry_t;
177 static alloc_pool operand_entry_pool;
180 /* Starting rank number for a given basic block, so that we can rank
181 operations using unmovable instructions in that BB based on the bb
182 depth. */
183 static long *bb_rank;
185 /* Operand->rank hashtable. */
186 static struct pointer_map_t *operand_rank;
189 /* Look up the operand rank structure for expression E. */
191 static inline long
192 find_operand_rank (tree e)
194 void **slot = pointer_map_contains (operand_rank, e);
195 return slot ? (long) *slot : -1;
198 /* Insert {E,RANK} into the operand rank hashtable. */
200 static inline void
201 insert_operand_rank (tree e, long rank)
203 void **slot;
204 gcc_assert (rank > 0);
205 slot = pointer_map_insert (operand_rank, e);
206 gcc_assert (!*slot);
207 *slot = (void *) rank;
210 /* Given an expression E, return the rank of the expression. */
212 static long
213 get_rank (tree e)
215 /* Constants have rank 0. */
216 if (is_gimple_min_invariant (e))
217 return 0;
219 /* SSA_NAME's have the rank of the expression they are the result
221 For globals and uninitialized values, the rank is 0.
222 For function arguments, use the pre-setup rank.
223 For PHI nodes, stores, asm statements, etc, we use the rank of
224 the BB.
225 For simple operations, the rank is the maximum rank of any of
226 its operands, or the bb_rank, whichever is less.
227 I make no claims that this is optimal, however, it gives good
228 results. */
230 if (TREE_CODE (e) == SSA_NAME)
232 tree stmt;
233 tree rhs;
234 long rank, maxrank;
235 int i;
236 int n;
238 if (TREE_CODE (SSA_NAME_VAR (e)) == PARM_DECL
239 && SSA_NAME_IS_DEFAULT_DEF (e))
240 return find_operand_rank (e);
242 stmt = SSA_NAME_DEF_STMT (e);
243 if (bb_for_stmt (stmt) == NULL)
244 return 0;
246 if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT
247 || !ZERO_SSA_OPERANDS (stmt, SSA_OP_VIRTUAL_DEFS))
248 return bb_rank[bb_for_stmt (stmt)->index];
250 /* If we already have a rank for this expression, use that. */
251 rank = find_operand_rank (e);
252 if (rank != -1)
253 return rank;
255 /* Otherwise, find the maximum rank for the operands, or the bb
256 rank, whichever is less. */
257 rank = 0;
258 maxrank = bb_rank[bb_for_stmt(stmt)->index];
259 rhs = GIMPLE_STMT_OPERAND (stmt, 1);
260 n = TREE_OPERAND_LENGTH (rhs);
261 if (n == 0)
262 rank = MAX (rank, get_rank (rhs));
263 else
265 for (i = 0;
266 i < n
267 && TREE_OPERAND (rhs, i)
268 && rank != maxrank;
269 i++)
270 rank = MAX(rank, get_rank (TREE_OPERAND (rhs, i)));
273 if (dump_file && (dump_flags & TDF_DETAILS))
275 fprintf (dump_file, "Rank for ");
276 print_generic_expr (dump_file, e, 0);
277 fprintf (dump_file, " is %ld\n", (rank + 1));
280 /* Note the rank in the hashtable so we don't recompute it. */
281 insert_operand_rank (e, (rank + 1));
282 return (rank + 1);
285 /* Globals, etc, are rank 0 */
286 return 0;
289 DEF_VEC_P(operand_entry_t);
290 DEF_VEC_ALLOC_P(operand_entry_t, heap);
292 /* We want integer ones to end up last no matter what, since they are
293 the ones we can do the most with. */
294 #define INTEGER_CONST_TYPE 1 << 3
295 #define FLOAT_CONST_TYPE 1 << 2
296 #define OTHER_CONST_TYPE 1 << 1
298 /* Classify an invariant tree into integer, float, or other, so that
299 we can sort them to be near other constants of the same type. */
300 static inline int
301 constant_type (tree t)
303 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
304 return INTEGER_CONST_TYPE;
305 else if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (t)))
306 return FLOAT_CONST_TYPE;
307 else
308 return OTHER_CONST_TYPE;
311 /* qsort comparison function to sort operand entries PA and PB by rank
312 so that the sorted array is ordered by rank in decreasing order. */
313 static int
314 sort_by_operand_rank (const void *pa, const void *pb)
316 const operand_entry_t oea = *(const operand_entry_t *)pa;
317 const operand_entry_t oeb = *(const operand_entry_t *)pb;
319 /* It's nicer for optimize_expression if constants that are likely
320 to fold when added/multiplied//whatever are put next to each
321 other. Since all constants have rank 0, order them by type. */
322 if (oeb->rank == 0 && oea->rank == 0)
323 return constant_type (oeb->op) - constant_type (oea->op);
325 /* Lastly, make sure the versions that are the same go next to each
326 other. We use SSA_NAME_VERSION because it's stable. */
327 if ((oeb->rank - oea->rank == 0)
328 && TREE_CODE (oea->op) == SSA_NAME
329 && TREE_CODE (oeb->op) == SSA_NAME)
330 return SSA_NAME_VERSION (oeb->op) - SSA_NAME_VERSION (oea->op);
332 return oeb->rank - oea->rank;
335 /* Add an operand entry to *OPS for the tree operand OP. */
337 static void
338 add_to_ops_vec (VEC(operand_entry_t, heap) **ops, tree op)
340 operand_entry_t oe = pool_alloc (operand_entry_pool);
342 oe->op = op;
343 oe->rank = get_rank (op);
344 VEC_safe_push (operand_entry_t, heap, *ops, oe);
347 /* Return true if STMT is reassociable operation containing a binary
348 operation with tree code CODE. */
350 static bool
351 is_reassociable_op (tree stmt, enum tree_code code)
353 if (!IS_EMPTY_STMT (stmt)
354 && TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
355 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == code
356 && has_single_use (GIMPLE_STMT_OPERAND (stmt, 0)))
357 return true;
358 return false;
362 /* Given NAME, if NAME is defined by a unary operation OPCODE, return the
363 operand of the negate operation. Otherwise, return NULL. */
365 static tree
366 get_unary_op (tree name, enum tree_code opcode)
368 tree stmt = SSA_NAME_DEF_STMT (name);
369 tree rhs;
371 if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
372 return NULL_TREE;
374 rhs = GIMPLE_STMT_OPERAND (stmt, 1);
375 if (TREE_CODE (rhs) == opcode)
376 return TREE_OPERAND (rhs, 0);
377 return NULL_TREE;
380 /* If CURR and LAST are a pair of ops that OPCODE allows us to
381 eliminate through equivalences, do so, remove them from OPS, and
382 return true. Otherwise, return false. */
384 static bool
385 eliminate_duplicate_pair (enum tree_code opcode,
386 VEC (operand_entry_t, heap) **ops,
387 bool *all_done,
388 unsigned int i,
389 operand_entry_t curr,
390 operand_entry_t last)
393 /* If we have two of the same op, and the opcode is & |, min, or max,
394 we can eliminate one of them.
395 If we have two of the same op, and the opcode is ^, we can
396 eliminate both of them. */
398 if (last && last->op == curr->op)
400 switch (opcode)
402 case MAX_EXPR:
403 case MIN_EXPR:
404 case BIT_IOR_EXPR:
405 case BIT_AND_EXPR:
406 if (dump_file && (dump_flags & TDF_DETAILS))
408 fprintf (dump_file, "Equivalence: ");
409 print_generic_expr (dump_file, curr->op, 0);
410 fprintf (dump_file, " [&|minmax] ");
411 print_generic_expr (dump_file, last->op, 0);
412 fprintf (dump_file, " -> ");
413 print_generic_stmt (dump_file, last->op, 0);
416 VEC_ordered_remove (operand_entry_t, *ops, i);
417 reassociate_stats.ops_eliminated ++;
419 return true;
421 case BIT_XOR_EXPR:
422 if (dump_file && (dump_flags & TDF_DETAILS))
424 fprintf (dump_file, "Equivalence: ");
425 print_generic_expr (dump_file, curr->op, 0);
426 fprintf (dump_file, " ^ ");
427 print_generic_expr (dump_file, last->op, 0);
428 fprintf (dump_file, " -> nothing\n");
431 reassociate_stats.ops_eliminated += 2;
433 if (VEC_length (operand_entry_t, *ops) == 2)
435 VEC_free (operand_entry_t, heap, *ops);
436 *ops = NULL;
437 add_to_ops_vec (ops, fold_convert (TREE_TYPE (last->op),
438 integer_zero_node));
439 *all_done = true;
441 else
443 VEC_ordered_remove (operand_entry_t, *ops, i-1);
444 VEC_ordered_remove (operand_entry_t, *ops, i-1);
447 return true;
449 default:
450 break;
453 return false;
456 /* If OPCODE is PLUS_EXPR, CURR->OP is really a negate expression,
457 look in OPS for a corresponding positive operation to cancel it
458 out. If we find one, remove the other from OPS, replace
459 OPS[CURRINDEX] with 0, and return true. Otherwise, return
460 false. */
462 static bool
463 eliminate_plus_minus_pair (enum tree_code opcode,
464 VEC (operand_entry_t, heap) **ops,
465 unsigned int currindex,
466 operand_entry_t curr)
468 tree negateop;
469 unsigned int i;
470 operand_entry_t oe;
472 if (opcode != PLUS_EXPR || TREE_CODE (curr->op) != SSA_NAME)
473 return false;
475 negateop = get_unary_op (curr->op, NEGATE_EXPR);
476 if (negateop == NULL_TREE)
477 return false;
479 /* Any non-negated version will have a rank that is one less than
480 the current rank. So once we hit those ranks, if we don't find
481 one, we can stop. */
483 for (i = currindex + 1;
484 VEC_iterate (operand_entry_t, *ops, i, oe)
485 && oe->rank >= curr->rank - 1 ;
486 i++)
488 if (oe->op == negateop)
491 if (dump_file && (dump_flags & TDF_DETAILS))
493 fprintf (dump_file, "Equivalence: ");
494 print_generic_expr (dump_file, negateop, 0);
495 fprintf (dump_file, " + -");
496 print_generic_expr (dump_file, oe->op, 0);
497 fprintf (dump_file, " -> 0\n");
500 VEC_ordered_remove (operand_entry_t, *ops, i);
501 add_to_ops_vec (ops, fold_convert(TREE_TYPE (oe->op),
502 integer_zero_node));
503 VEC_ordered_remove (operand_entry_t, *ops, currindex);
504 reassociate_stats.ops_eliminated ++;
506 return true;
510 return false;
513 /* If OPCODE is BIT_IOR_EXPR, BIT_AND_EXPR, and, CURR->OP is really a
514 bitwise not expression, look in OPS for a corresponding operand to
515 cancel it out. If we find one, remove the other from OPS, replace
516 OPS[CURRINDEX] with 0, and return true. Otherwise, return
517 false. */
519 static bool
520 eliminate_not_pairs (enum tree_code opcode,
521 VEC (operand_entry_t, heap) **ops,
522 unsigned int currindex,
523 operand_entry_t curr)
525 tree notop;
526 unsigned int i;
527 operand_entry_t oe;
529 if ((opcode != BIT_IOR_EXPR && opcode != BIT_AND_EXPR)
530 || TREE_CODE (curr->op) != SSA_NAME)
531 return false;
533 notop = get_unary_op (curr->op, BIT_NOT_EXPR);
534 if (notop == NULL_TREE)
535 return false;
537 /* Any non-not version will have a rank that is one less than
538 the current rank. So once we hit those ranks, if we don't find
539 one, we can stop. */
541 for (i = currindex + 1;
542 VEC_iterate (operand_entry_t, *ops, i, oe)
543 && oe->rank >= curr->rank - 1;
544 i++)
546 if (oe->op == notop)
548 if (dump_file && (dump_flags & TDF_DETAILS))
550 fprintf (dump_file, "Equivalence: ");
551 print_generic_expr (dump_file, notop, 0);
552 if (opcode == BIT_AND_EXPR)
553 fprintf (dump_file, " & ~");
554 else if (opcode == BIT_IOR_EXPR)
555 fprintf (dump_file, " | ~");
556 print_generic_expr (dump_file, oe->op, 0);
557 if (opcode == BIT_AND_EXPR)
558 fprintf (dump_file, " -> 0\n");
559 else if (opcode == BIT_IOR_EXPR)
560 fprintf (dump_file, " -> -1\n");
563 if (opcode == BIT_AND_EXPR)
564 oe->op = fold_convert (TREE_TYPE (oe->op), integer_zero_node);
565 else if (opcode == BIT_IOR_EXPR)
566 oe->op = build_low_bits_mask (TREE_TYPE (oe->op),
567 TYPE_PRECISION (TREE_TYPE (oe->op)));
569 reassociate_stats.ops_eliminated
570 += VEC_length (operand_entry_t, *ops) - 1;
571 VEC_free (operand_entry_t, heap, *ops);
572 *ops = NULL;
573 VEC_safe_push (operand_entry_t, heap, *ops, oe);
574 return true;
578 return false;
581 /* Use constant value that may be present in OPS to try to eliminate
582 operands. Note that this function is only really used when we've
583 eliminated ops for other reasons, or merged constants. Across
584 single statements, fold already does all of this, plus more. There
585 is little point in duplicating logic, so I've only included the
586 identities that I could ever construct testcases to trigger. */
588 static void
589 eliminate_using_constants (enum tree_code opcode,
590 VEC(operand_entry_t, heap) **ops)
592 operand_entry_t oelast = VEC_last (operand_entry_t, *ops);
594 if (oelast->rank == 0 && INTEGRAL_TYPE_P (TREE_TYPE (oelast->op)))
596 switch (opcode)
598 case BIT_AND_EXPR:
599 if (integer_zerop (oelast->op))
601 if (VEC_length (operand_entry_t, *ops) != 1)
603 if (dump_file && (dump_flags & TDF_DETAILS))
604 fprintf (dump_file, "Found & 0, removing all other ops\n");
606 reassociate_stats.ops_eliminated
607 += VEC_length (operand_entry_t, *ops) - 1;
609 VEC_free (operand_entry_t, heap, *ops);
610 *ops = NULL;
611 VEC_safe_push (operand_entry_t, heap, *ops, oelast);
612 return;
615 else if (integer_all_onesp (oelast->op))
617 if (VEC_length (operand_entry_t, *ops) != 1)
619 if (dump_file && (dump_flags & TDF_DETAILS))
620 fprintf (dump_file, "Found & -1, removing\n");
621 VEC_pop (operand_entry_t, *ops);
622 reassociate_stats.ops_eliminated++;
625 break;
626 case BIT_IOR_EXPR:
627 if (integer_all_onesp (oelast->op))
629 if (VEC_length (operand_entry_t, *ops) != 1)
631 if (dump_file && (dump_flags & TDF_DETAILS))
632 fprintf (dump_file, "Found | -1, removing all other ops\n");
634 reassociate_stats.ops_eliminated
635 += VEC_length (operand_entry_t, *ops) - 1;
637 VEC_free (operand_entry_t, heap, *ops);
638 *ops = NULL;
639 VEC_safe_push (operand_entry_t, heap, *ops, oelast);
640 return;
643 else if (integer_zerop (oelast->op))
645 if (VEC_length (operand_entry_t, *ops) != 1)
647 if (dump_file && (dump_flags & TDF_DETAILS))
648 fprintf (dump_file, "Found | 0, removing\n");
649 VEC_pop (operand_entry_t, *ops);
650 reassociate_stats.ops_eliminated++;
653 break;
654 case MULT_EXPR:
655 if (integer_zerop (oelast->op))
657 if (VEC_length (operand_entry_t, *ops) != 1)
659 if (dump_file && (dump_flags & TDF_DETAILS))
660 fprintf (dump_file, "Found * 0, removing all other ops\n");
662 reassociate_stats.ops_eliminated
663 += VEC_length (operand_entry_t, *ops) - 1;
664 VEC_free (operand_entry_t, heap, *ops);
665 *ops = NULL;
666 VEC_safe_push (operand_entry_t, heap, *ops, oelast);
667 return;
670 else if (integer_onep (oelast->op))
672 if (VEC_length (operand_entry_t, *ops) != 1)
674 if (dump_file && (dump_flags & TDF_DETAILS))
675 fprintf (dump_file, "Found * 1, removing\n");
676 VEC_pop (operand_entry_t, *ops);
677 reassociate_stats.ops_eliminated++;
678 return;
681 break;
682 case BIT_XOR_EXPR:
683 case PLUS_EXPR:
684 case MINUS_EXPR:
685 if (integer_zerop (oelast->op))
687 if (VEC_length (operand_entry_t, *ops) != 1)
689 if (dump_file && (dump_flags & TDF_DETAILS))
690 fprintf (dump_file, "Found [|^+] 0, removing\n");
691 VEC_pop (operand_entry_t, *ops);
692 reassociate_stats.ops_eliminated++;
693 return;
696 break;
697 default:
698 break;
703 /* Perform various identities and other optimizations on the list of
704 operand entries, stored in OPS. The tree code for the binary
705 operation between all the operands is OPCODE. */
707 static void
708 optimize_ops_list (enum tree_code opcode,
709 VEC (operand_entry_t, heap) **ops)
711 unsigned int length = VEC_length (operand_entry_t, *ops);
712 unsigned int i;
713 operand_entry_t oe;
714 operand_entry_t oelast = NULL;
715 bool iterate = false;
717 if (length == 1)
718 return;
720 oelast = VEC_last (operand_entry_t, *ops);
722 /* If the last two are constants, pop the constants off, merge them
723 and try the next two. */
724 if (oelast->rank == 0 && is_gimple_min_invariant (oelast->op))
726 operand_entry_t oelm1 = VEC_index (operand_entry_t, *ops, length - 2);
728 if (oelm1->rank == 0
729 && is_gimple_min_invariant (oelm1->op)
730 && lang_hooks.types_compatible_p (TREE_TYPE (oelm1->op),
731 TREE_TYPE (oelast->op)))
733 tree folded = fold_binary (opcode, TREE_TYPE (oelm1->op),
734 oelm1->op, oelast->op);
736 if (folded && is_gimple_min_invariant (folded))
738 if (dump_file && (dump_flags & TDF_DETAILS))
739 fprintf (dump_file, "Merging constants\n");
741 VEC_pop (operand_entry_t, *ops);
742 VEC_pop (operand_entry_t, *ops);
744 add_to_ops_vec (ops, folded);
745 reassociate_stats.constants_eliminated++;
747 optimize_ops_list (opcode, ops);
748 return;
753 eliminate_using_constants (opcode, ops);
754 oelast = NULL;
756 for (i = 0; VEC_iterate (operand_entry_t, *ops, i, oe);)
758 bool done = false;
760 if (eliminate_not_pairs (opcode, ops, i, oe))
761 return;
762 if (eliminate_duplicate_pair (opcode, ops, &done, i, oe, oelast)
763 || (!done && eliminate_plus_minus_pair (opcode, ops, i, oe)))
765 if (done)
766 return;
767 iterate = true;
768 oelast = NULL;
769 continue;
771 oelast = oe;
772 i++;
775 length = VEC_length (operand_entry_t, *ops);
776 oelast = VEC_last (operand_entry_t, *ops);
778 if (iterate)
779 optimize_ops_list (opcode, ops);
782 /* Return true if OPERAND is defined by a PHI node which uses the LHS
783 of STMT in it's operands. This is also known as a "destructive
784 update" operation. */
786 static bool
787 is_phi_for_stmt (tree stmt, tree operand)
789 tree def_stmt;
790 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
791 use_operand_p arg_p;
792 ssa_op_iter i;
794 if (TREE_CODE (operand) != SSA_NAME)
795 return false;
797 def_stmt = SSA_NAME_DEF_STMT (operand);
798 if (TREE_CODE (def_stmt) != PHI_NODE)
799 return false;
801 FOR_EACH_PHI_ARG (arg_p, def_stmt, i, SSA_OP_USE)
802 if (lhs == USE_FROM_PTR (arg_p))
803 return true;
804 return false;
807 /* Recursively rewrite our linearized statements so that the operators
808 match those in OPS[OPINDEX], putting the computation in rank
809 order. */
811 static void
812 rewrite_expr_tree (tree stmt, unsigned int opindex,
813 VEC(operand_entry_t, heap) * ops)
815 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
816 operand_entry_t oe;
818 /* If we have three operands left, then we want to make sure the one
819 that gets the double binary op are the ones with the same rank.
821 The alternative we try is to see if this is a destructive
822 update style statement, which is like:
823 b = phi (a, ...)
824 a = c + b;
825 In that case, we want to use the destructive update form to
826 expose the possible vectorizer sum reduction opportunity.
827 In that case, the third operand will be the phi node.
829 We could, of course, try to be better as noted above, and do a
830 lot of work to try to find these opportunities in >3 operand
831 cases, but it is unlikely to be worth it. */
832 if (opindex + 3 == VEC_length (operand_entry_t, ops))
834 operand_entry_t oe1, oe2, oe3;
836 oe1 = VEC_index (operand_entry_t, ops, opindex);
837 oe2 = VEC_index (operand_entry_t, ops, opindex + 1);
838 oe3 = VEC_index (operand_entry_t, ops, opindex + 2);
840 if ((oe1->rank == oe2->rank
841 && oe2->rank != oe3->rank)
842 || (is_phi_for_stmt (stmt, oe3->op)
843 && !is_phi_for_stmt (stmt, oe1->op)
844 && !is_phi_for_stmt (stmt, oe2->op)))
846 struct operand_entry temp = *oe3;
847 oe3->op = oe1->op;
848 oe3->rank = oe1->rank;
849 oe1->op = temp.op;
850 oe1->rank= temp.rank;
854 /* The final recursion case for this function is that you have
855 exactly two operations left.
856 If we had one exactly one op in the entire list to start with, we
857 would have never called this function, and the tail recursion
858 rewrites them one at a time. */
859 if (opindex + 2 == VEC_length (operand_entry_t, ops))
861 operand_entry_t oe1, oe2;
863 oe1 = VEC_index (operand_entry_t, ops, opindex);
864 oe2 = VEC_index (operand_entry_t, ops, opindex + 1);
866 if (TREE_OPERAND (rhs, 0) != oe1->op
867 || TREE_OPERAND (rhs, 1) != oe2->op)
870 if (dump_file && (dump_flags & TDF_DETAILS))
872 fprintf (dump_file, "Transforming ");
873 print_generic_expr (dump_file, rhs, 0);
876 TREE_OPERAND (rhs, 0) = oe1->op;
877 TREE_OPERAND (rhs, 1) = oe2->op;
878 update_stmt (stmt);
880 if (dump_file && (dump_flags & TDF_DETAILS))
882 fprintf (dump_file, " into ");
883 print_generic_stmt (dump_file, rhs, 0);
887 return;
890 /* If we hit here, we should have 3 or more ops left. */
891 gcc_assert (opindex + 2 < VEC_length (operand_entry_t, ops));
893 /* Rewrite the next operator. */
894 oe = VEC_index (operand_entry_t, ops, opindex);
896 if (oe->op != TREE_OPERAND (rhs, 1))
899 if (dump_file && (dump_flags & TDF_DETAILS))
901 fprintf (dump_file, "Transforming ");
902 print_generic_expr (dump_file, rhs, 0);
905 TREE_OPERAND (rhs, 1) = oe->op;
906 update_stmt (stmt);
908 if (dump_file && (dump_flags & TDF_DETAILS))
910 fprintf (dump_file, " into ");
911 print_generic_stmt (dump_file, rhs, 0);
914 /* Recurse on the LHS of the binary operator, which is guaranteed to
915 be the non-leaf side. */
916 rewrite_expr_tree (SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0)),
917 opindex + 1, ops);
920 /* Transform STMT, which is really (A +B) + (C + D) into the left
921 linear form, ((A+B)+C)+D.
922 Recurse on D if necessary. */
924 static void
925 linearize_expr (tree stmt)
927 block_stmt_iterator bsinow, bsirhs;
928 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
929 enum tree_code rhscode = TREE_CODE (rhs);
930 tree binrhs = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 1));
931 tree binlhs = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0));
932 tree newbinrhs = NULL_TREE;
934 gcc_assert (is_reassociable_op (binlhs, TREE_CODE (rhs))
935 && is_reassociable_op (binrhs, TREE_CODE (rhs)));
937 bsinow = bsi_for_stmt (stmt);
938 bsirhs = bsi_for_stmt (binrhs);
939 bsi_move_before (&bsirhs, &bsinow);
941 TREE_OPERAND (rhs, 1) = TREE_OPERAND (GIMPLE_STMT_OPERAND (binrhs, 1), 0);
942 if (TREE_CODE (TREE_OPERAND (rhs, 1)) == SSA_NAME)
943 newbinrhs = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 1));
944 TREE_OPERAND (GIMPLE_STMT_OPERAND (binrhs, 1), 0)
945 = GIMPLE_STMT_OPERAND (binlhs, 0);
946 TREE_OPERAND (rhs, 0) = GIMPLE_STMT_OPERAND (binrhs, 0);
948 if (dump_file && (dump_flags & TDF_DETAILS))
950 fprintf (dump_file, "Linearized: ");
951 print_generic_stmt (dump_file, rhs, 0);
954 reassociate_stats.linearized++;
955 update_stmt (binrhs);
956 update_stmt (binlhs);
957 update_stmt (stmt);
958 TREE_VISITED (binrhs) = 1;
959 TREE_VISITED (binlhs) = 1;
960 TREE_VISITED (stmt) = 1;
962 /* Tail recurse on the new rhs if it still needs reassociation. */
963 if (newbinrhs && is_reassociable_op (newbinrhs, rhscode))
964 linearize_expr (stmt);
968 /* If LHS has a single immediate use that is a GIMPLE_MODIFY_STMT, return
969 it. Otherwise, return NULL. */
971 static tree
972 get_single_immediate_use (tree lhs)
974 use_operand_p immuse;
975 tree immusestmt;
977 if (TREE_CODE (lhs) == SSA_NAME
978 && single_imm_use (lhs, &immuse, &immusestmt))
980 if (TREE_CODE (immusestmt) == RETURN_EXPR)
981 immusestmt = TREE_OPERAND (immusestmt, 0);
982 if (TREE_CODE (immusestmt) == GIMPLE_MODIFY_STMT)
983 return immusestmt;
985 return NULL_TREE;
987 static VEC(tree, heap) *broken_up_subtracts;
990 /* Recursively negate the value of TONEGATE, and return the SSA_NAME
991 representing the negated value. Insertions of any necessary
992 instructions go before BSI.
993 This function is recursive in that, if you hand it "a_5" as the
994 value to negate, and a_5 is defined by "a_5 = b_3 + b_4", it will
995 transform b_3 + b_4 into a_5 = -b_3 + -b_4. */
997 static tree
998 negate_value (tree tonegate, block_stmt_iterator *bsi)
1000 tree negatedef = tonegate;
1001 tree resultofnegate;
1003 if (TREE_CODE (tonegate) == SSA_NAME)
1004 negatedef = SSA_NAME_DEF_STMT (tonegate);
1006 /* If we are trying to negate a name, defined by an add, negate the
1007 add operands instead. */
1008 if (TREE_CODE (tonegate) == SSA_NAME
1009 && TREE_CODE (negatedef) == GIMPLE_MODIFY_STMT
1010 && TREE_CODE (GIMPLE_STMT_OPERAND (negatedef, 0)) == SSA_NAME
1011 && has_single_use (GIMPLE_STMT_OPERAND (negatedef, 0))
1012 && TREE_CODE (GIMPLE_STMT_OPERAND (negatedef, 1)) == PLUS_EXPR)
1014 block_stmt_iterator bsi;
1015 tree binop = GIMPLE_STMT_OPERAND (negatedef, 1);
1017 bsi = bsi_for_stmt (negatedef);
1018 TREE_OPERAND (binop, 0) = negate_value (TREE_OPERAND (binop, 0),
1019 &bsi);
1020 bsi = bsi_for_stmt (negatedef);
1021 TREE_OPERAND (binop, 1) = negate_value (TREE_OPERAND (binop, 1),
1022 &bsi);
1023 update_stmt (negatedef);
1024 return GIMPLE_STMT_OPERAND (negatedef, 0);
1027 tonegate = fold_build1 (NEGATE_EXPR, TREE_TYPE (tonegate), tonegate);
1028 resultofnegate = force_gimple_operand_bsi (bsi, tonegate, true,
1029 NULL_TREE);
1030 VEC_safe_push (tree, heap, broken_up_subtracts, resultofnegate);
1031 return resultofnegate;
1035 /* Return true if we should break up the subtract in STMT into an add
1036 with negate. This is true when we the subtract operands are really
1037 adds, or the subtract itself is used in an add expression. In
1038 either case, breaking up the subtract into an add with negate
1039 exposes the adds to reassociation. */
1041 static bool
1042 should_break_up_subtract (tree stmt)
1045 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
1046 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
1047 tree binlhs = TREE_OPERAND (rhs, 0);
1048 tree binrhs = TREE_OPERAND (rhs, 1);
1049 tree immusestmt;
1051 if (TREE_CODE (binlhs) == SSA_NAME
1052 && is_reassociable_op (SSA_NAME_DEF_STMT (binlhs), PLUS_EXPR))
1053 return true;
1055 if (TREE_CODE (binrhs) == SSA_NAME
1056 && is_reassociable_op (SSA_NAME_DEF_STMT (binrhs), PLUS_EXPR))
1057 return true;
1059 if (TREE_CODE (lhs) == SSA_NAME
1060 && (immusestmt = get_single_immediate_use (lhs))
1061 && TREE_CODE (GIMPLE_STMT_OPERAND (immusestmt, 1)) == PLUS_EXPR)
1062 return true;
1063 return false;
1067 /* Transform STMT from A - B into A + -B. */
1069 static void
1070 break_up_subtract (tree stmt, block_stmt_iterator *bsi)
1072 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
1074 if (dump_file && (dump_flags & TDF_DETAILS))
1076 fprintf (dump_file, "Breaking up subtract ");
1077 print_generic_stmt (dump_file, stmt, 0);
1080 TREE_SET_CODE (GIMPLE_STMT_OPERAND (stmt, 1), PLUS_EXPR);
1081 TREE_OPERAND (rhs, 1) = negate_value (TREE_OPERAND (rhs, 1), bsi);
1083 update_stmt (stmt);
1086 /* Recursively linearize a binary expression that is the RHS of STMT.
1087 Place the operands of the expression tree in the vector named OPS. */
1089 static void
1090 linearize_expr_tree (VEC(operand_entry_t, heap) **ops, tree stmt)
1092 block_stmt_iterator bsinow, bsilhs;
1093 tree rhs = GENERIC_TREE_OPERAND (stmt, 1);
1094 tree binrhs = TREE_OPERAND (rhs, 1);
1095 tree binlhs = TREE_OPERAND (rhs, 0);
1096 tree binlhsdef, binrhsdef;
1097 bool binlhsisreassoc = false;
1098 bool binrhsisreassoc = false;
1099 enum tree_code rhscode = TREE_CODE (rhs);
1101 TREE_VISITED (stmt) = 1;
1103 if (TREE_CODE (binlhs) == SSA_NAME)
1105 binlhsdef = SSA_NAME_DEF_STMT (binlhs);
1106 binlhsisreassoc = is_reassociable_op (binlhsdef, rhscode);
1109 if (TREE_CODE (binrhs) == SSA_NAME)
1111 binrhsdef = SSA_NAME_DEF_STMT (binrhs);
1112 binrhsisreassoc = is_reassociable_op (binrhsdef, rhscode);
1115 /* If the LHS is not reassociable, but the RHS is, we need to swap
1116 them. If neither is reassociable, there is nothing we can do, so
1117 just put them in the ops vector. If the LHS is reassociable,
1118 linearize it. If both are reassociable, then linearize the RHS
1119 and the LHS. */
1121 if (!binlhsisreassoc)
1123 tree temp;
1125 if (!binrhsisreassoc)
1127 add_to_ops_vec (ops, binrhs);
1128 add_to_ops_vec (ops, binlhs);
1129 return;
1132 if (dump_file && (dump_flags & TDF_DETAILS))
1134 fprintf (dump_file, "swapping operands of ");
1135 print_generic_expr (dump_file, stmt, 0);
1138 swap_tree_operands (stmt, &TREE_OPERAND (rhs, 0),
1139 &TREE_OPERAND (rhs, 1));
1140 update_stmt (stmt);
1142 if (dump_file && (dump_flags & TDF_DETAILS))
1144 fprintf (dump_file, " is now ");
1145 print_generic_stmt (dump_file, stmt, 0);
1148 /* We want to make it so the lhs is always the reassociative op,
1149 so swap. */
1150 temp = binlhs;
1151 binlhs = binrhs;
1152 binrhs = temp;
1154 else if (binrhsisreassoc)
1156 linearize_expr (stmt);
1157 gcc_assert (rhs == GIMPLE_STMT_OPERAND (stmt, 1));
1158 binlhs = TREE_OPERAND (rhs, 0);
1159 binrhs = TREE_OPERAND (rhs, 1);
1162 gcc_assert (TREE_CODE (binrhs) != SSA_NAME
1163 || !is_reassociable_op (SSA_NAME_DEF_STMT (binrhs), rhscode));
1164 bsinow = bsi_for_stmt (stmt);
1165 bsilhs = bsi_for_stmt (SSA_NAME_DEF_STMT (binlhs));
1166 bsi_move_before (&bsilhs, &bsinow);
1167 linearize_expr_tree (ops, SSA_NAME_DEF_STMT (binlhs));
1168 add_to_ops_vec (ops, binrhs);
1171 /* Repropagate the negates back into subtracts, since no other pass
1172 currently does it. */
1174 static void
1175 repropagate_negates (void)
1177 unsigned int i = 0;
1178 tree negate;
1180 for (i = 0; VEC_iterate (tree, broken_up_subtracts, i, negate); i++)
1182 tree user = get_single_immediate_use (negate);
1184 /* The negate operand can be either operand of a PLUS_EXPR
1185 (it can be the LHS if the RHS is a constant for example).
1187 Force the negate operand to the RHS of the PLUS_EXPR, then
1188 transform the PLUS_EXPR into a MINUS_EXPR. */
1189 if (user
1190 && TREE_CODE (user) == GIMPLE_MODIFY_STMT
1191 && TREE_CODE (GIMPLE_STMT_OPERAND (user, 1)) == PLUS_EXPR)
1193 tree rhs = GIMPLE_STMT_OPERAND (user, 1);
1195 /* If the negated operand appears on the LHS of the
1196 PLUS_EXPR, exchange the operands of the PLUS_EXPR
1197 to force the negated operand to the RHS of the PLUS_EXPR. */
1198 if (TREE_OPERAND (GIMPLE_STMT_OPERAND (user, 1), 0) == negate)
1200 tree temp = TREE_OPERAND (rhs, 0);
1201 TREE_OPERAND (rhs, 0) = TREE_OPERAND (rhs, 1);
1202 TREE_OPERAND (rhs, 1) = temp;
1205 /* Now transform the PLUS_EXPR into a MINUS_EXPR and replace
1206 the RHS of the PLUS_EXPR with the operand of the NEGATE_EXPR. */
1207 if (TREE_OPERAND (GIMPLE_STMT_OPERAND (user, 1), 1) == negate)
1209 TREE_SET_CODE (rhs, MINUS_EXPR);
1210 TREE_OPERAND (rhs, 1) = get_unary_op (negate, NEGATE_EXPR);
1211 update_stmt (user);
1217 /* Break up subtract operations in block BB.
1219 We do this top down because we don't know whether the subtract is
1220 part of a possible chain of reassociation except at the top.
1222 IE given
1223 d = f + g
1224 c = a + e
1225 b = c - d
1226 q = b - r
1227 k = t - q
1229 we want to break up k = t - q, but we won't until we've transformed q
1230 = b - r, which won't be broken up until we transform b = c - d. */
1232 static void
1233 break_up_subtract_bb (basic_block bb)
1235 block_stmt_iterator bsi;
1236 basic_block son;
1238 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1240 tree stmt = bsi_stmt (bsi);
1242 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
1244 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
1245 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
1247 TREE_VISITED (stmt) = 0;
1248 /* If unsafe math optimizations we can do reassociation for
1249 non-integral types. */
1250 if ((!INTEGRAL_TYPE_P (TREE_TYPE (lhs))
1251 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs)))
1252 && (!SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs))
1253 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE(lhs))
1254 || !flag_unsafe_math_optimizations))
1255 continue;
1257 /* Check for a subtract used only in an addition. If this
1258 is the case, transform it into add of a negate for better
1259 reassociation. IE transform C = A-B into C = A + -B if C
1260 is only used in an addition. */
1261 if (TREE_CODE (rhs) == MINUS_EXPR)
1262 if (should_break_up_subtract (stmt))
1263 break_up_subtract (stmt, &bsi);
1266 for (son = first_dom_son (CDI_DOMINATORS, bb);
1267 son;
1268 son = next_dom_son (CDI_DOMINATORS, son))
1269 break_up_subtract_bb (son);
1272 /* Reassociate expressions in basic block BB and its post-dominator as
1273 children. */
1275 static void
1276 reassociate_bb (basic_block bb)
1278 block_stmt_iterator bsi;
1279 basic_block son;
1281 for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_prev (&bsi))
1283 tree stmt = bsi_stmt (bsi);
1285 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
1287 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
1288 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
1290 /* If this was part of an already processed tree, we don't
1291 need to touch it again. */
1292 if (TREE_VISITED (stmt))
1293 continue;
1295 /* If unsafe math optimizations we can do reassociation for
1296 non-integral types. */
1297 if ((!INTEGRAL_TYPE_P (TREE_TYPE (lhs))
1298 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs)))
1299 && (!SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs))
1300 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE(lhs))
1301 || !flag_unsafe_math_optimizations))
1302 continue;
1304 if (associative_tree_code (TREE_CODE (rhs)))
1306 VEC(operand_entry_t, heap) *ops = NULL;
1308 /* There may be no immediate uses left by the time we
1309 get here because we may have eliminated them all. */
1310 if (TREE_CODE (lhs) == SSA_NAME && has_zero_uses (lhs))
1311 continue;
1313 TREE_VISITED (stmt) = 1;
1314 linearize_expr_tree (&ops, stmt);
1315 qsort (VEC_address (operand_entry_t, ops),
1316 VEC_length (operand_entry_t, ops),
1317 sizeof (operand_entry_t),
1318 sort_by_operand_rank);
1319 optimize_ops_list (TREE_CODE (rhs), &ops);
1321 if (VEC_length (operand_entry_t, ops) == 1)
1323 if (dump_file && (dump_flags & TDF_DETAILS))
1325 fprintf (dump_file, "Transforming ");
1326 print_generic_expr (dump_file, rhs, 0);
1328 GIMPLE_STMT_OPERAND (stmt, 1)
1329 = VEC_last (operand_entry_t, ops)->op;
1330 update_stmt (stmt);
1332 if (dump_file && (dump_flags & TDF_DETAILS))
1334 fprintf (dump_file, " into ");
1335 print_generic_stmt (dump_file,
1336 GIMPLE_STMT_OPERAND (stmt, 1), 0);
1339 else
1341 rewrite_expr_tree (stmt, 0, ops);
1344 VEC_free (operand_entry_t, heap, ops);
1348 for (son = first_dom_son (CDI_POST_DOMINATORS, bb);
1349 son;
1350 son = next_dom_son (CDI_POST_DOMINATORS, son))
1351 reassociate_bb (son);
1354 void dump_ops_vector (FILE *file, VEC (operand_entry_t, heap) *ops);
1355 void debug_ops_vector (VEC (operand_entry_t, heap) *ops);
1357 /* Dump the operand entry vector OPS to FILE. */
1359 void
1360 dump_ops_vector (FILE *file, VEC (operand_entry_t, heap) *ops)
1362 operand_entry_t oe;
1363 unsigned int i;
1365 for (i = 0; VEC_iterate (operand_entry_t, ops, i, oe); i++)
1367 fprintf (file, "Op %d -> rank: %d, tree: ", i, oe->rank);
1368 print_generic_stmt (file, oe->op, 0);
1372 /* Dump the operand entry vector OPS to STDERR. */
1374 void
1375 debug_ops_vector (VEC (operand_entry_t, heap) *ops)
1377 dump_ops_vector (stderr, ops);
1380 static void
1381 do_reassoc (void)
1383 break_up_subtract_bb (ENTRY_BLOCK_PTR);
1384 reassociate_bb (EXIT_BLOCK_PTR);
1387 /* Initialize the reassociation pass. */
1389 static void
1390 init_reassoc (void)
1392 int i;
1393 long rank = 2;
1394 tree param;
1395 int *bbs = XNEWVEC (int, last_basic_block + 1);
1397 memset (&reassociate_stats, 0, sizeof (reassociate_stats));
1399 operand_entry_pool = create_alloc_pool ("operand entry pool",
1400 sizeof (struct operand_entry), 30);
1402 /* Reverse RPO (Reverse Post Order) will give us something where
1403 deeper loops come later. */
1404 pre_and_rev_post_order_compute (NULL, bbs, false);
1405 bb_rank = XCNEWVEC (long, last_basic_block + 1);
1406 operand_rank = pointer_map_create ();
1408 /* Give each argument a distinct rank. */
1409 for (param = DECL_ARGUMENTS (current_function_decl);
1410 param;
1411 param = TREE_CHAIN (param))
1413 if (gimple_default_def (cfun, param) != NULL)
1415 tree def = gimple_default_def (cfun, param);
1416 insert_operand_rank (def, ++rank);
1420 /* Give the chain decl a distinct rank. */
1421 if (cfun->static_chain_decl != NULL)
1423 tree def = gimple_default_def (cfun, cfun->static_chain_decl);
1424 if (def != NULL)
1425 insert_operand_rank (def, ++rank);
1428 /* Set up rank for each BB */
1429 for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
1430 bb_rank[bbs[i]] = ++rank << 16;
1432 free (bbs);
1433 calculate_dominance_info (CDI_DOMINATORS);
1434 calculate_dominance_info (CDI_POST_DOMINATORS);
1435 broken_up_subtracts = NULL;
1438 /* Cleanup after the reassociation pass, and print stats if
1439 requested. */
1441 static void
1442 fini_reassoc (void)
1445 if (dump_file && (dump_flags & TDF_STATS))
1447 fprintf (dump_file, "Reassociation stats:\n");
1448 fprintf (dump_file, "Linearized: %d\n",
1449 reassociate_stats.linearized);
1450 fprintf (dump_file, "Constants eliminated: %d\n",
1451 reassociate_stats.constants_eliminated);
1452 fprintf (dump_file, "Ops eliminated: %d\n",
1453 reassociate_stats.ops_eliminated);
1454 fprintf (dump_file, "Statements rewritten: %d\n",
1455 reassociate_stats.rewritten);
1458 pointer_map_destroy (operand_rank);
1459 free_alloc_pool (operand_entry_pool);
1460 free (bb_rank);
1461 VEC_free (tree, heap, broken_up_subtracts);
1462 free_dominance_info (CDI_POST_DOMINATORS);
1465 /* Gate and execute functions for Reassociation. */
1467 static unsigned int
1468 execute_reassoc (void)
1470 init_reassoc ();
1472 do_reassoc ();
1473 repropagate_negates ();
1475 fini_reassoc ();
1476 return 0;
1479 struct tree_opt_pass pass_reassoc =
1481 "reassoc", /* name */
1482 NULL, /* gate */
1483 execute_reassoc, /* execute */
1484 NULL, /* sub */
1485 NULL, /* next */
1486 0, /* static_pass_number */
1487 TV_TREE_REASSOC, /* tv_id */
1488 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
1489 0, /* properties_provided */
1490 0, /* properties_destroyed */
1491 0, /* todo_flags_start */
1492 TODO_dump_func | TODO_ggc_collect | TODO_verify_ssa, /* todo_flags_finish */
1493 0 /* letter */