* jump.c: Remove prototypes for delete_computation and
[official-gcc.git] / gcc / tree-ssa-reassoc.c
blobe216f996637b7d91ad16a6cf0376ea9a3b0968e8
1 /* Reassociation for trees.
2 Copyright (C) 2005 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "errors.h"
27 #include "ggc.h"
28 #include "tree.h"
29 #include "basic-block.h"
30 #include "diagnostic.h"
31 #include "tree-inline.h"
32 #include "tree-flow.h"
33 #include "tree-gimple.h"
34 #include "tree-dump.h"
35 #include "timevar.h"
36 #include "tree-iterator.h"
37 #include "tree-pass.h"
38 #include "alloc-pool.h"
39 #include "vec.h"
40 #include "langhooks.h"
41 #include "pointer-set.h"
43 /* This is a simple global reassociation pass. It is, in part, based
44 on the LLVM pass of the same name (They do some things more/less
45 than we do, in different orders, etc).
47 It consists of five steps:
49 1. Breaking up subtract operations into addition + negate, where
50 it would promote the reassociation of adds.
52 2. Left linearization of the expression trees, so that (A+B)+(C+D)
53 becomes (((A+B)+C)+D), which is easier for us to rewrite later.
54 During linearization, we place the operands of the binary
55 expressions into a vector of operand_entry_t
57 3. Optimization of the operand lists, eliminating things like a +
58 -a, a & a, etc.
60 4. Rewrite the expression trees we linearized and optimized so
61 they are in proper rank order.
63 5. Repropagate negates, as nothing else will clean it up ATM.
65 A bit of theory on #4, since nobody seems to write anything down
66 about why it makes sense to do it the way they do it:
68 We could do this much nicer theoretically, but don't (for reasons
69 explained after how to do it theoretically nice :P).
71 In order to promote the most redundancy elimination, you want
72 binary expressions whose operands are the same rank (or
73 preferably, the same value) exposed to the redundancy eliminator,
74 for possible elimination.
76 So the way to do this if we really cared, is to build the new op
77 tree from the leaves to the roots, merging as you go, and putting the
78 new op on the end of the worklist, until you are left with one
79 thing on the worklist.
81 IE if you have to rewrite the following set of operands (listed with
82 rank in parentheses), with opcode PLUS_EXPR:
84 a (1), b (1), c (1), d (2), e (2)
87 We start with our merge worklist empty, and the ops list with all of
88 those on it.
90 You want to first merge all leaves of the same rank, as much as
91 possible.
93 So first build a binary op of
95 mergetmp = a + b, and put "mergetmp" on the merge worklist.
97 Because there is no three operand form of PLUS_EXPR, c is not going to
98 be exposed to redundancy elimination as a rank 1 operand.
100 So you might as well throw it on the merge worklist (you could also
101 consider it to now be a rank two operand, and merge it with d and e,
102 but in this case, you then have evicted e from a binary op. So at
103 least in this situation, you can't win.)
105 Then build a binary op of d + e
106 mergetmp2 = d + e
108 and put mergetmp2 on the merge worklist.
110 so merge worklist = {mergetmp, c, mergetmp2}
112 Continue building binary ops of these operations until you have only
113 one operation left on the worklist.
115 So we have
117 build binary op
118 mergetmp3 = mergetmp + c
120 worklist = {mergetmp2, mergetmp3}
122 mergetmp4 = mergetmp2 + mergetmp3
124 worklist = {mergetmp4}
126 because we have one operation left, we can now just set the original
127 statement equal to the result of that operation.
129 This will at least expose a + b and d + e to redundancy elimination
130 as binary operations.
132 For extra points, you can reuse the old statements to build the
133 mergetmps, since you shouldn't run out.
135 So why don't we do this?
137 Because it's expensive, and rarely will help. Most trees we are
138 reassociating have 3 or less ops. If they have 2 ops, they already
139 will be written into a nice single binary op. If you have 3 ops, a
140 single simple check suffices to tell you whether the first two are of the
141 same rank. If so, you know to order it
143 mergetmp = op1 + op2
144 newstmt = mergetmp + op3
146 instead of
147 mergetmp = op2 + op3
148 newstmt = mergetmp + op1
150 If all three are of the same rank, you can't expose them all in a
151 single binary operator anyway, so the above is *still* the best you
152 can do.
154 Thus, this is what we do. When we have three ops left, we check to see
155 what order to put them in, and call it a day. As a nod to vector sum
156 reduction, we check if any of ops are a really a phi node that is a
157 destructive update for the associating op, and keep the destructive
158 update together for vector sum reduction recognition. */
161 /* Statistics */
162 static struct
164 int linearized;
165 int constants_eliminated;
166 int ops_eliminated;
167 int rewritten;
168 } reassociate_stats;
170 /* Operator, rank pair. */
171 typedef struct operand_entry
173 unsigned int rank;
174 tree op;
175 } *operand_entry_t;
177 static alloc_pool operand_entry_pool;
180 /* Starting rank number for a given basic block, so that we can rank
181 operations using unmovable instructions in that BB based on the bb
182 depth. */
183 static long *bb_rank;
185 /* Operand->rank hashtable. */
186 static struct pointer_map_t *operand_rank;
189 /* Look up the operand rank structure for expression E. */
191 static inline long
192 find_operand_rank (tree e)
194 void **slot = pointer_map_contains (operand_rank, e);
195 return slot ? (long) *slot : -1;
198 /* Insert {E,RANK} into the operand rank hashtable. */
200 static inline void
201 insert_operand_rank (tree e, long rank)
203 void **slot;
204 gcc_assert (rank > 0);
205 slot = pointer_map_insert (operand_rank, e);
206 gcc_assert (!*slot);
207 *slot = (void *) rank;
210 /* Given an expression E, return the rank of the expression. */
212 static long
213 get_rank (tree e)
215 /* Constants have rank 0. */
216 if (is_gimple_min_invariant (e))
217 return 0;
219 /* SSA_NAME's have the rank of the expression they are the result
221 For globals and uninitialized values, the rank is 0.
222 For function arguments, use the pre-setup rank.
223 For PHI nodes, stores, asm statements, etc, we use the rank of
224 the BB.
225 For simple operations, the rank is the maximum rank of any of
226 its operands, or the bb_rank, whichever is less.
227 I make no claims that this is optimal, however, it gives good
228 results. */
230 if (TREE_CODE (e) == SSA_NAME)
232 tree stmt;
233 tree rhs;
234 long rank, maxrank;
235 int i;
237 if (TREE_CODE (SSA_NAME_VAR (e)) == PARM_DECL
238 && SSA_NAME_IS_DEFAULT_DEF (e))
239 return find_operand_rank (e);
241 stmt = SSA_NAME_DEF_STMT (e);
242 if (bb_for_stmt (stmt) == NULL)
243 return 0;
245 if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT
246 || !ZERO_SSA_OPERANDS (stmt, SSA_OP_VIRTUAL_DEFS))
247 return bb_rank[bb_for_stmt (stmt)->index];
249 /* If we already have a rank for this expression, use that. */
250 rank = find_operand_rank (e);
251 if (rank != -1)
252 return rank;
254 /* Otherwise, find the maximum rank for the operands, or the bb
255 rank, whichever is less. */
256 rank = 0;
257 maxrank = bb_rank[bb_for_stmt(stmt)->index];
258 rhs = GIMPLE_STMT_OPERAND (stmt, 1);
259 if (TREE_CODE_LENGTH (TREE_CODE (rhs)) == 0)
260 rank = MAX (rank, get_rank (rhs));
261 else
263 for (i = 0;
264 i < TREE_CODE_LENGTH (TREE_CODE (rhs))
265 && TREE_OPERAND (rhs, i)
266 && rank != maxrank;
267 i++)
268 rank = MAX(rank, get_rank (TREE_OPERAND (rhs, i)));
271 if (dump_file && (dump_flags & TDF_DETAILS))
273 fprintf (dump_file, "Rank for ");
274 print_generic_expr (dump_file, e, 0);
275 fprintf (dump_file, " is %ld\n", (rank + 1));
278 /* Note the rank in the hashtable so we don't recompute it. */
279 insert_operand_rank (e, (rank + 1));
280 return (rank + 1);
283 /* Globals, etc, are rank 0 */
284 return 0;
287 DEF_VEC_P(operand_entry_t);
288 DEF_VEC_ALLOC_P(operand_entry_t, heap);
290 /* We want integer ones to end up last no matter what, since they are
291 the ones we can do the most with. */
292 #define INTEGER_CONST_TYPE 1 << 3
293 #define FLOAT_CONST_TYPE 1 << 2
294 #define OTHER_CONST_TYPE 1 << 1
296 /* Classify an invariant tree into integer, float, or other, so that
297 we can sort them to be near other constants of the same type. */
298 static inline int
299 constant_type (tree t)
301 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
302 return INTEGER_CONST_TYPE;
303 else if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (t)))
304 return FLOAT_CONST_TYPE;
305 else
306 return OTHER_CONST_TYPE;
309 /* qsort comparison function to sort operand entries PA and PB by rank
310 so that the sorted array is ordered by rank in decreasing order. */
311 static int
312 sort_by_operand_rank (const void *pa, const void *pb)
314 const operand_entry_t oea = *(const operand_entry_t *)pa;
315 const operand_entry_t oeb = *(const operand_entry_t *)pb;
317 /* It's nicer for optimize_expression if constants that are likely
318 to fold when added/multiplied//whatever are put next to each
319 other. Since all constants have rank 0, order them by type. */
320 if (oeb->rank == 0 && oea->rank == 0)
321 return constant_type (oeb->op) - constant_type (oea->op);
323 /* Lastly, make sure the versions that are the same go next to each
324 other. We use SSA_NAME_VERSION because it's stable. */
325 if ((oeb->rank - oea->rank == 0)
326 && TREE_CODE (oea->op) == SSA_NAME
327 && TREE_CODE (oeb->op) == SSA_NAME)
328 return SSA_NAME_VERSION (oeb->op) - SSA_NAME_VERSION (oea->op);
330 return oeb->rank - oea->rank;
333 /* Add an operand entry to *OPS for the tree operand OP. */
335 static void
336 add_to_ops_vec (VEC(operand_entry_t, heap) **ops, tree op)
338 operand_entry_t oe = pool_alloc (operand_entry_pool);
340 oe->op = op;
341 oe->rank = get_rank (op);
342 VEC_safe_push (operand_entry_t, heap, *ops, oe);
345 /* Return true if STMT is reassociable operation containing a binary
346 operation with tree code CODE. */
348 static bool
349 is_reassociable_op (tree stmt, enum tree_code code)
351 if (!IS_EMPTY_STMT (stmt)
352 && TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
353 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == code
354 && has_single_use (GIMPLE_STMT_OPERAND (stmt, 0)))
355 return true;
356 return false;
360 /* Given NAME, if NAME is defined by a unary operation OPCODE, return the
361 operand of the negate operation. Otherwise, return NULL. */
363 static tree
364 get_unary_op (tree name, enum tree_code opcode)
366 tree stmt = SSA_NAME_DEF_STMT (name);
367 tree rhs;
369 if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
370 return NULL_TREE;
372 rhs = GIMPLE_STMT_OPERAND (stmt, 1);
373 if (TREE_CODE (rhs) == opcode)
374 return TREE_OPERAND (rhs, 0);
375 return NULL_TREE;
378 /* If CURR and LAST are a pair of ops that OPCODE allows us to
379 eliminate through equivalences, do so, remove them from OPS, and
380 return true. Otherwise, return false. */
382 static bool
383 eliminate_duplicate_pair (enum tree_code opcode,
384 VEC (operand_entry_t, heap) **ops,
385 bool *all_done,
386 unsigned int i,
387 operand_entry_t curr,
388 operand_entry_t last)
391 /* If we have two of the same op, and the opcode is & |, min, or max,
392 we can eliminate one of them.
393 If we have two of the same op, and the opcode is ^, we can
394 eliminate both of them. */
396 if (last && last->op == curr->op)
398 switch (opcode)
400 case MAX_EXPR:
401 case MIN_EXPR:
402 case BIT_IOR_EXPR:
403 case BIT_AND_EXPR:
404 if (dump_file && (dump_flags & TDF_DETAILS))
406 fprintf (dump_file, "Equivalence: ");
407 print_generic_expr (dump_file, curr->op, 0);
408 fprintf (dump_file, " [&|minmax] ");
409 print_generic_expr (dump_file, last->op, 0);
410 fprintf (dump_file, " -> ");
411 print_generic_stmt (dump_file, last->op, 0);
414 VEC_ordered_remove (operand_entry_t, *ops, i);
415 reassociate_stats.ops_eliminated ++;
417 return true;
419 case BIT_XOR_EXPR:
420 if (dump_file && (dump_flags & TDF_DETAILS))
422 fprintf (dump_file, "Equivalence: ");
423 print_generic_expr (dump_file, curr->op, 0);
424 fprintf (dump_file, " ^ ");
425 print_generic_expr (dump_file, last->op, 0);
426 fprintf (dump_file, " -> nothing\n");
429 reassociate_stats.ops_eliminated += 2;
431 if (VEC_length (operand_entry_t, *ops) == 2)
433 VEC_free (operand_entry_t, heap, *ops);
434 *ops = NULL;
435 add_to_ops_vec (ops, fold_convert (TREE_TYPE (last->op),
436 integer_zero_node));
437 *all_done = true;
439 else
441 VEC_ordered_remove (operand_entry_t, *ops, i-1);
442 VEC_ordered_remove (operand_entry_t, *ops, i-1);
445 return true;
447 default:
448 break;
451 return false;
454 /* If OPCODE is PLUS_EXPR, CURR->OP is really a negate expression,
455 look in OPS for a corresponding positive operation to cancel it
456 out. If we find one, remove the other from OPS, replace
457 OPS[CURRINDEX] with 0, and return true. Otherwise, return
458 false. */
460 static bool
461 eliminate_plus_minus_pair (enum tree_code opcode,
462 VEC (operand_entry_t, heap) **ops,
463 unsigned int currindex,
464 operand_entry_t curr)
466 tree negateop;
467 unsigned int i;
468 operand_entry_t oe;
470 if (opcode != PLUS_EXPR || TREE_CODE (curr->op) != SSA_NAME)
471 return false;
473 negateop = get_unary_op (curr->op, NEGATE_EXPR);
474 if (negateop == NULL_TREE)
475 return false;
477 /* Any non-negated version will have a rank that is one less than
478 the current rank. So once we hit those ranks, if we don't find
479 one, we can stop. */
481 for (i = currindex + 1;
482 VEC_iterate (operand_entry_t, *ops, i, oe)
483 && oe->rank >= curr->rank - 1 ;
484 i++)
486 if (oe->op == negateop)
489 if (dump_file && (dump_flags & TDF_DETAILS))
491 fprintf (dump_file, "Equivalence: ");
492 print_generic_expr (dump_file, negateop, 0);
493 fprintf (dump_file, " + -");
494 print_generic_expr (dump_file, oe->op, 0);
495 fprintf (dump_file, " -> 0\n");
498 VEC_ordered_remove (operand_entry_t, *ops, i);
499 add_to_ops_vec (ops, fold_convert(TREE_TYPE (oe->op),
500 integer_zero_node));
501 VEC_ordered_remove (operand_entry_t, *ops, currindex);
502 reassociate_stats.ops_eliminated ++;
504 return true;
508 return false;
511 /* If OPCODE is BIT_IOR_EXPR, BIT_AND_EXPR, and, CURR->OP is really a
512 bitwise not expression, look in OPS for a corresponding operand to
513 cancel it out. If we find one, remove the other from OPS, replace
514 OPS[CURRINDEX] with 0, and return true. Otherwise, return
515 false. */
517 static bool
518 eliminate_not_pairs (enum tree_code opcode,
519 VEC (operand_entry_t, heap) **ops,
520 unsigned int currindex,
521 operand_entry_t curr)
523 tree notop;
524 unsigned int i;
525 operand_entry_t oe;
527 if ((opcode != BIT_IOR_EXPR && opcode != BIT_AND_EXPR)
528 || TREE_CODE (curr->op) != SSA_NAME)
529 return false;
531 notop = get_unary_op (curr->op, BIT_NOT_EXPR);
532 if (notop == NULL_TREE)
533 return false;
535 /* Any non-not version will have a rank that is one less than
536 the current rank. So once we hit those ranks, if we don't find
537 one, we can stop. */
539 for (i = currindex + 1;
540 VEC_iterate (operand_entry_t, *ops, i, oe)
541 && oe->rank >= curr->rank - 1;
542 i++)
544 if (oe->op == notop)
546 if (dump_file && (dump_flags & TDF_DETAILS))
548 fprintf (dump_file, "Equivalence: ");
549 print_generic_expr (dump_file, notop, 0);
550 if (opcode == BIT_AND_EXPR)
551 fprintf (dump_file, " & ~");
552 else if (opcode == BIT_IOR_EXPR)
553 fprintf (dump_file, " | ~");
554 print_generic_expr (dump_file, oe->op, 0);
555 if (opcode == BIT_AND_EXPR)
556 fprintf (dump_file, " -> 0\n");
557 else if (opcode == BIT_IOR_EXPR)
558 fprintf (dump_file, " -> -1\n");
561 if (opcode == BIT_AND_EXPR)
562 oe->op = fold_convert (TREE_TYPE (oe->op), integer_zero_node);
563 else if (opcode == BIT_IOR_EXPR)
564 oe->op = build_low_bits_mask (TREE_TYPE (oe->op),
565 TYPE_PRECISION (TREE_TYPE (oe->op)));
567 reassociate_stats.ops_eliminated
568 += VEC_length (operand_entry_t, *ops) - 1;
569 VEC_free (operand_entry_t, heap, *ops);
570 *ops = NULL;
571 VEC_safe_push (operand_entry_t, heap, *ops, oe);
572 return true;
576 return false;
579 /* Use constant value that may be present in OPS to try to eliminate
580 operands. Note that this function is only really used when we've
581 eliminated ops for other reasons, or merged constants. Across
582 single statements, fold already does all of this, plus more. There
583 is little point in duplicating logic, so I've only included the
584 identities that I could ever construct testcases to trigger. */
586 static void
587 eliminate_using_constants (enum tree_code opcode,
588 VEC(operand_entry_t, heap) **ops)
590 operand_entry_t oelast = VEC_last (operand_entry_t, *ops);
592 if (oelast->rank == 0 && INTEGRAL_TYPE_P (TREE_TYPE (oelast->op)))
594 switch (opcode)
596 case BIT_AND_EXPR:
597 if (integer_zerop (oelast->op))
599 if (VEC_length (operand_entry_t, *ops) != 1)
601 if (dump_file && (dump_flags & TDF_DETAILS))
602 fprintf (dump_file, "Found & 0, removing all other ops\n");
604 reassociate_stats.ops_eliminated
605 += VEC_length (operand_entry_t, *ops) - 1;
607 VEC_free (operand_entry_t, heap, *ops);
608 *ops = NULL;
609 VEC_safe_push (operand_entry_t, heap, *ops, oelast);
610 return;
613 else if (integer_all_onesp (oelast->op))
615 if (VEC_length (operand_entry_t, *ops) != 1)
617 if (dump_file && (dump_flags & TDF_DETAILS))
618 fprintf (dump_file, "Found & -1, removing\n");
619 VEC_pop (operand_entry_t, *ops);
620 reassociate_stats.ops_eliminated++;
623 break;
624 case BIT_IOR_EXPR:
625 if (integer_all_onesp (oelast->op))
627 if (VEC_length (operand_entry_t, *ops) != 1)
629 if (dump_file && (dump_flags & TDF_DETAILS))
630 fprintf (dump_file, "Found | -1, removing all other ops\n");
632 reassociate_stats.ops_eliminated
633 += VEC_length (operand_entry_t, *ops) - 1;
635 VEC_free (operand_entry_t, heap, *ops);
636 *ops = NULL;
637 VEC_safe_push (operand_entry_t, heap, *ops, oelast);
638 return;
641 else if (integer_zerop (oelast->op))
643 if (VEC_length (operand_entry_t, *ops) != 1)
645 if (dump_file && (dump_flags & TDF_DETAILS))
646 fprintf (dump_file, "Found | 0, removing\n");
647 VEC_pop (operand_entry_t, *ops);
648 reassociate_stats.ops_eliminated++;
651 break;
652 case MULT_EXPR:
653 if (integer_zerop (oelast->op))
655 if (VEC_length (operand_entry_t, *ops) != 1)
657 if (dump_file && (dump_flags & TDF_DETAILS))
658 fprintf (dump_file, "Found * 0, removing all other ops\n");
660 reassociate_stats.ops_eliminated
661 += VEC_length (operand_entry_t, *ops) - 1;
662 VEC_free (operand_entry_t, heap, *ops);
663 *ops = NULL;
664 VEC_safe_push (operand_entry_t, heap, *ops, oelast);
665 return;
668 else if (integer_onep (oelast->op))
670 if (VEC_length (operand_entry_t, *ops) != 1)
672 if (dump_file && (dump_flags & TDF_DETAILS))
673 fprintf (dump_file, "Found * 1, removing\n");
674 VEC_pop (operand_entry_t, *ops);
675 reassociate_stats.ops_eliminated++;
676 return;
679 break;
680 case BIT_XOR_EXPR:
681 case PLUS_EXPR:
682 case MINUS_EXPR:
683 if (integer_zerop (oelast->op))
685 if (VEC_length (operand_entry_t, *ops) != 1)
687 if (dump_file && (dump_flags & TDF_DETAILS))
688 fprintf (dump_file, "Found [|^+] 0, removing\n");
689 VEC_pop (operand_entry_t, *ops);
690 reassociate_stats.ops_eliminated++;
691 return;
694 break;
695 default:
696 break;
701 /* Perform various identities and other optimizations on the list of
702 operand entries, stored in OPS. The tree code for the binary
703 operation between all the operands is OPCODE. */
705 static void
706 optimize_ops_list (enum tree_code opcode,
707 VEC (operand_entry_t, heap) **ops)
709 unsigned int length = VEC_length (operand_entry_t, *ops);
710 unsigned int i;
711 operand_entry_t oe;
712 operand_entry_t oelast = NULL;
713 bool iterate = false;
715 if (length == 1)
716 return;
718 oelast = VEC_last (operand_entry_t, *ops);
720 /* If the last two are constants, pop the constants off, merge them
721 and try the next two. */
722 if (oelast->rank == 0 && is_gimple_min_invariant (oelast->op))
724 operand_entry_t oelm1 = VEC_index (operand_entry_t, *ops, length - 2);
726 if (oelm1->rank == 0
727 && is_gimple_min_invariant (oelm1->op)
728 && lang_hooks.types_compatible_p (TREE_TYPE (oelm1->op),
729 TREE_TYPE (oelast->op)))
731 tree folded = fold_binary (opcode, TREE_TYPE (oelm1->op),
732 oelm1->op, oelast->op);
734 if (folded && is_gimple_min_invariant (folded))
736 if (dump_file && (dump_flags & TDF_DETAILS))
737 fprintf (dump_file, "Merging constants\n");
739 VEC_pop (operand_entry_t, *ops);
740 VEC_pop (operand_entry_t, *ops);
742 add_to_ops_vec (ops, folded);
743 reassociate_stats.constants_eliminated++;
745 optimize_ops_list (opcode, ops);
746 return;
751 eliminate_using_constants (opcode, ops);
752 oelast = NULL;
754 for (i = 0; VEC_iterate (operand_entry_t, *ops, i, oe);)
756 bool done = false;
758 if (eliminate_not_pairs (opcode, ops, i, oe))
759 return;
760 if (eliminate_duplicate_pair (opcode, ops, &done, i, oe, oelast)
761 || (!done && eliminate_plus_minus_pair (opcode, ops, i, oe)))
763 if (done)
764 return;
765 iterate = true;
766 oelast = NULL;
767 continue;
769 oelast = oe;
770 i++;
773 length = VEC_length (operand_entry_t, *ops);
774 oelast = VEC_last (operand_entry_t, *ops);
776 if (iterate)
777 optimize_ops_list (opcode, ops);
780 /* Return true if OPERAND is defined by a PHI node which uses the LHS
781 of STMT in it's operands. This is also known as a "destructive
782 update" operation. */
784 static bool
785 is_phi_for_stmt (tree stmt, tree operand)
787 tree def_stmt;
788 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
789 use_operand_p arg_p;
790 ssa_op_iter i;
792 if (TREE_CODE (operand) != SSA_NAME)
793 return false;
795 def_stmt = SSA_NAME_DEF_STMT (operand);
796 if (TREE_CODE (def_stmt) != PHI_NODE)
797 return false;
799 FOR_EACH_PHI_ARG (arg_p, def_stmt, i, SSA_OP_USE)
800 if (lhs == USE_FROM_PTR (arg_p))
801 return true;
802 return false;
805 /* Recursively rewrite our linearized statements so that the operators
806 match those in OPS[OPINDEX], putting the computation in rank
807 order. */
809 static void
810 rewrite_expr_tree (tree stmt, unsigned int opindex,
811 VEC(operand_entry_t, heap) * ops)
813 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
814 operand_entry_t oe;
816 /* If we have three operands left, then we want to make sure the one
817 that gets the double binary op are the ones with the same rank.
819 The alternative we try is to see if this is a destructive
820 update style statement, which is like:
821 b = phi (a, ...)
822 a = c + b;
823 In that case, we want to use the destructive update form to
824 expose the possible vectorizer sum reduction opportunity.
825 In that case, the third operand will be the phi node.
827 We could, of course, try to be better as noted above, and do a
828 lot of work to try to find these opportunities in >3 operand
829 cases, but it is unlikely to be worth it. */
830 if (opindex + 3 == VEC_length (operand_entry_t, ops))
832 operand_entry_t oe1, oe2, oe3;
834 oe1 = VEC_index (operand_entry_t, ops, opindex);
835 oe2 = VEC_index (operand_entry_t, ops, opindex + 1);
836 oe3 = VEC_index (operand_entry_t, ops, opindex + 2);
838 if ((oe1->rank == oe2->rank
839 && oe2->rank != oe3->rank)
840 || (is_phi_for_stmt (stmt, oe3->op)
841 && !is_phi_for_stmt (stmt, oe1->op)
842 && !is_phi_for_stmt (stmt, oe2->op)))
844 struct operand_entry temp = *oe3;
845 oe3->op = oe1->op;
846 oe3->rank = oe1->rank;
847 oe1->op = temp.op;
848 oe1->rank= temp.rank;
852 /* The final recursion case for this function is that you have
853 exactly two operations left.
854 If we had one exactly one op in the entire list to start with, we
855 would have never called this function, and the tail recursion
856 rewrites them one at a time. */
857 if (opindex + 2 == VEC_length (operand_entry_t, ops))
859 operand_entry_t oe1, oe2;
861 oe1 = VEC_index (operand_entry_t, ops, opindex);
862 oe2 = VEC_index (operand_entry_t, ops, opindex + 1);
864 if (TREE_OPERAND (rhs, 0) != oe1->op
865 || TREE_OPERAND (rhs, 1) != oe2->op)
868 if (dump_file && (dump_flags & TDF_DETAILS))
870 fprintf (dump_file, "Transforming ");
871 print_generic_expr (dump_file, rhs, 0);
874 TREE_OPERAND (rhs, 0) = oe1->op;
875 TREE_OPERAND (rhs, 1) = oe2->op;
876 update_stmt (stmt);
878 if (dump_file && (dump_flags & TDF_DETAILS))
880 fprintf (dump_file, " into ");
881 print_generic_stmt (dump_file, rhs, 0);
885 return;
888 /* If we hit here, we should have 3 or more ops left. */
889 gcc_assert (opindex + 2 < VEC_length (operand_entry_t, ops));
891 /* Rewrite the next operator. */
892 oe = VEC_index (operand_entry_t, ops, opindex);
894 if (oe->op != TREE_OPERAND (rhs, 1))
897 if (dump_file && (dump_flags & TDF_DETAILS))
899 fprintf (dump_file, "Transforming ");
900 print_generic_expr (dump_file, rhs, 0);
903 TREE_OPERAND (rhs, 1) = oe->op;
904 update_stmt (stmt);
906 if (dump_file && (dump_flags & TDF_DETAILS))
908 fprintf (dump_file, " into ");
909 print_generic_stmt (dump_file, rhs, 0);
912 /* Recurse on the LHS of the binary operator, which is guaranteed to
913 be the non-leaf side. */
914 rewrite_expr_tree (SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0)),
915 opindex + 1, ops);
918 /* Transform STMT, which is really (A +B) + (C + D) into the left
919 linear form, ((A+B)+C)+D.
920 Recurse on D if necessary. */
922 static void
923 linearize_expr (tree stmt)
925 block_stmt_iterator bsinow, bsirhs;
926 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
927 enum tree_code rhscode = TREE_CODE (rhs);
928 tree binrhs = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 1));
929 tree binlhs = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0));
930 tree newbinrhs = NULL_TREE;
932 gcc_assert (is_reassociable_op (binlhs, TREE_CODE (rhs))
933 && is_reassociable_op (binrhs, TREE_CODE (rhs)));
935 bsinow = bsi_for_stmt (stmt);
936 bsirhs = bsi_for_stmt (binrhs);
937 bsi_move_before (&bsirhs, &bsinow);
939 TREE_OPERAND (rhs, 1) = TREE_OPERAND (GIMPLE_STMT_OPERAND (binrhs, 1), 0);
940 if (TREE_CODE (TREE_OPERAND (rhs, 1)) == SSA_NAME)
941 newbinrhs = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 1));
942 TREE_OPERAND (GIMPLE_STMT_OPERAND (binrhs, 1), 0)
943 = GIMPLE_STMT_OPERAND (binlhs, 0);
944 TREE_OPERAND (rhs, 0) = GIMPLE_STMT_OPERAND (binrhs, 0);
946 if (dump_file && (dump_flags & TDF_DETAILS))
948 fprintf (dump_file, "Linearized: ");
949 print_generic_stmt (dump_file, rhs, 0);
952 reassociate_stats.linearized++;
953 update_stmt (binrhs);
954 update_stmt (binlhs);
955 update_stmt (stmt);
956 TREE_VISITED (binrhs) = 1;
957 TREE_VISITED (binlhs) = 1;
958 TREE_VISITED (stmt) = 1;
960 /* Tail recurse on the new rhs if it still needs reassociation. */
961 if (newbinrhs && is_reassociable_op (newbinrhs, rhscode))
962 linearize_expr (stmt);
966 /* If LHS has a single immediate use that is a GIMPLE_MODIFY_STMT, return
967 it. Otherwise, return NULL. */
969 static tree
970 get_single_immediate_use (tree lhs)
972 use_operand_p immuse;
973 tree immusestmt;
975 if (TREE_CODE (lhs) == SSA_NAME
976 && single_imm_use (lhs, &immuse, &immusestmt))
978 if (TREE_CODE (immusestmt) == RETURN_EXPR)
979 immusestmt = TREE_OPERAND (immusestmt, 0);
980 if (TREE_CODE (immusestmt) == GIMPLE_MODIFY_STMT)
981 return immusestmt;
983 return NULL_TREE;
985 static VEC(tree, heap) *broken_up_subtracts;
988 /* Recursively negate the value of TONEGATE, and return the SSA_NAME
989 representing the negated value. Insertions of any necessary
990 instructions go before BSI.
991 This function is recursive in that, if you hand it "a_5" as the
992 value to negate, and a_5 is defined by "a_5 = b_3 + b_4", it will
993 transform b_3 + b_4 into a_5 = -b_3 + -b_4. */
995 static tree
996 negate_value (tree tonegate, block_stmt_iterator *bsi)
998 tree negatedef = tonegate;
999 tree resultofnegate;
1001 if (TREE_CODE (tonegate) == SSA_NAME)
1002 negatedef = SSA_NAME_DEF_STMT (tonegate);
1004 /* If we are trying to negate a name, defined by an add, negate the
1005 add operands instead. */
1006 if (TREE_CODE (tonegate) == SSA_NAME
1007 && TREE_CODE (negatedef) == GIMPLE_MODIFY_STMT
1008 && TREE_CODE (GIMPLE_STMT_OPERAND (negatedef, 0)) == SSA_NAME
1009 && has_single_use (GIMPLE_STMT_OPERAND (negatedef, 0))
1010 && TREE_CODE (GIMPLE_STMT_OPERAND (negatedef, 1)) == PLUS_EXPR)
1012 block_stmt_iterator bsi;
1013 tree binop = GIMPLE_STMT_OPERAND (negatedef, 1);
1015 bsi = bsi_for_stmt (negatedef);
1016 TREE_OPERAND (binop, 0) = negate_value (TREE_OPERAND (binop, 0),
1017 &bsi);
1018 bsi = bsi_for_stmt (negatedef);
1019 TREE_OPERAND (binop, 1) = negate_value (TREE_OPERAND (binop, 1),
1020 &bsi);
1021 update_stmt (negatedef);
1022 return GIMPLE_STMT_OPERAND (negatedef, 0);
1025 tonegate = fold_build1 (NEGATE_EXPR, TREE_TYPE (tonegate), tonegate);
1026 resultofnegate = force_gimple_operand_bsi (bsi, tonegate, true,
1027 NULL_TREE);
1028 VEC_safe_push (tree, heap, broken_up_subtracts, resultofnegate);
1029 return resultofnegate;
1033 /* Return true if we should break up the subtract in STMT into an add
1034 with negate. This is true when we the subtract operands are really
1035 adds, or the subtract itself is used in an add expression. In
1036 either case, breaking up the subtract into an add with negate
1037 exposes the adds to reassociation. */
1039 static bool
1040 should_break_up_subtract (tree stmt)
1043 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
1044 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
1045 tree binlhs = TREE_OPERAND (rhs, 0);
1046 tree binrhs = TREE_OPERAND (rhs, 1);
1047 tree immusestmt;
1049 if (TREE_CODE (binlhs) == SSA_NAME
1050 && is_reassociable_op (SSA_NAME_DEF_STMT (binlhs), PLUS_EXPR))
1051 return true;
1053 if (TREE_CODE (binrhs) == SSA_NAME
1054 && is_reassociable_op (SSA_NAME_DEF_STMT (binrhs), PLUS_EXPR))
1055 return true;
1057 if (TREE_CODE (lhs) == SSA_NAME
1058 && (immusestmt = get_single_immediate_use (lhs))
1059 && TREE_CODE (GIMPLE_STMT_OPERAND (immusestmt, 1)) == PLUS_EXPR)
1060 return true;
1061 return false;
1065 /* Transform STMT from A - B into A + -B. */
1067 static void
1068 break_up_subtract (tree stmt, block_stmt_iterator *bsi)
1070 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
1072 if (dump_file && (dump_flags & TDF_DETAILS))
1074 fprintf (dump_file, "Breaking up subtract ");
1075 print_generic_stmt (dump_file, stmt, 0);
1078 TREE_SET_CODE (GIMPLE_STMT_OPERAND (stmt, 1), PLUS_EXPR);
1079 TREE_OPERAND (rhs, 1) = negate_value (TREE_OPERAND (rhs, 1), bsi);
1081 update_stmt (stmt);
1084 /* Recursively linearize a binary expression that is the RHS of STMT.
1085 Place the operands of the expression tree in the vector named OPS. */
1087 static void
1088 linearize_expr_tree (VEC(operand_entry_t, heap) **ops, tree stmt)
1090 block_stmt_iterator bsinow, bsilhs;
1091 tree rhs = GENERIC_TREE_OPERAND (stmt, 1);
1092 tree binrhs = TREE_OPERAND (rhs, 1);
1093 tree binlhs = TREE_OPERAND (rhs, 0);
1094 tree binlhsdef, binrhsdef;
1095 bool binlhsisreassoc = false;
1096 bool binrhsisreassoc = false;
1097 enum tree_code rhscode = TREE_CODE (rhs);
1099 TREE_VISITED (stmt) = 1;
1101 if (TREE_CODE (binlhs) == SSA_NAME)
1103 binlhsdef = SSA_NAME_DEF_STMT (binlhs);
1104 binlhsisreassoc = is_reassociable_op (binlhsdef, rhscode);
1107 if (TREE_CODE (binrhs) == SSA_NAME)
1109 binrhsdef = SSA_NAME_DEF_STMT (binrhs);
1110 binrhsisreassoc = is_reassociable_op (binrhsdef, rhscode);
1113 /* If the LHS is not reassociable, but the RHS is, we need to swap
1114 them. If neither is reassociable, there is nothing we can do, so
1115 just put them in the ops vector. If the LHS is reassociable,
1116 linearize it. If both are reassociable, then linearize the RHS
1117 and the LHS. */
1119 if (!binlhsisreassoc)
1121 tree temp;
1123 if (!binrhsisreassoc)
1125 add_to_ops_vec (ops, binrhs);
1126 add_to_ops_vec (ops, binlhs);
1127 return;
1130 if (dump_file && (dump_flags & TDF_DETAILS))
1132 fprintf (dump_file, "swapping operands of ");
1133 print_generic_expr (dump_file, stmt, 0);
1136 swap_tree_operands (stmt, &TREE_OPERAND (rhs, 0),
1137 &TREE_OPERAND (rhs, 1));
1138 update_stmt (stmt);
1140 if (dump_file && (dump_flags & TDF_DETAILS))
1142 fprintf (dump_file, " is now ");
1143 print_generic_stmt (dump_file, stmt, 0);
1146 /* We want to make it so the lhs is always the reassociative op,
1147 so swap. */
1148 temp = binlhs;
1149 binlhs = binrhs;
1150 binrhs = temp;
1152 else if (binrhsisreassoc)
1154 linearize_expr (stmt);
1155 gcc_assert (rhs == GIMPLE_STMT_OPERAND (stmt, 1));
1156 binlhs = TREE_OPERAND (rhs, 0);
1157 binrhs = TREE_OPERAND (rhs, 1);
1160 gcc_assert (TREE_CODE (binrhs) != SSA_NAME
1161 || !is_reassociable_op (SSA_NAME_DEF_STMT (binrhs), rhscode));
1162 bsinow = bsi_for_stmt (stmt);
1163 bsilhs = bsi_for_stmt (SSA_NAME_DEF_STMT (binlhs));
1164 bsi_move_before (&bsilhs, &bsinow);
1165 linearize_expr_tree (ops, SSA_NAME_DEF_STMT (binlhs));
1166 add_to_ops_vec (ops, binrhs);
1169 /* Repropagate the negates back into subtracts, since no other pass
1170 currently does it. */
1172 static void
1173 repropagate_negates (void)
1175 unsigned int i = 0;
1176 tree negate;
1178 for (i = 0; VEC_iterate (tree, broken_up_subtracts, i, negate); i++)
1180 tree user = get_single_immediate_use (negate);
1182 /* The negate operand can be either operand of a PLUS_EXPR
1183 (it can be the LHS if the RHS is a constant for example).
1185 Force the negate operand to the RHS of the PLUS_EXPR, then
1186 transform the PLUS_EXPR into a MINUS_EXPR. */
1187 if (user
1188 && TREE_CODE (user) == GIMPLE_MODIFY_STMT
1189 && TREE_CODE (GIMPLE_STMT_OPERAND (user, 1)) == PLUS_EXPR)
1191 tree rhs = GIMPLE_STMT_OPERAND (user, 1);
1193 /* If the negated operand appears on the LHS of the
1194 PLUS_EXPR, exchange the operands of the PLUS_EXPR
1195 to force the negated operand to the RHS of the PLUS_EXPR. */
1196 if (TREE_OPERAND (GIMPLE_STMT_OPERAND (user, 1), 0) == negate)
1198 tree temp = TREE_OPERAND (rhs, 0);
1199 TREE_OPERAND (rhs, 0) = TREE_OPERAND (rhs, 1);
1200 TREE_OPERAND (rhs, 1) = temp;
1203 /* Now transform the PLUS_EXPR into a MINUS_EXPR and replace
1204 the RHS of the PLUS_EXPR with the operand of the NEGATE_EXPR. */
1205 if (TREE_OPERAND (GIMPLE_STMT_OPERAND (user, 1), 1) == negate)
1207 TREE_SET_CODE (rhs, MINUS_EXPR);
1208 TREE_OPERAND (rhs, 1) = get_unary_op (negate, NEGATE_EXPR);
1209 update_stmt (user);
1215 /* Break up subtract operations in block BB.
1217 We do this top down because we don't know whether the subtract is
1218 part of a possible chain of reassociation except at the top.
1220 IE given
1221 d = f + g
1222 c = a + e
1223 b = c - d
1224 q = b - r
1225 k = t - q
1227 we want to break up k = t - q, but we won't until we've transformed q
1228 = b - r, which won't be broken up until we transform b = c - d. */
1230 static void
1231 break_up_subtract_bb (basic_block bb)
1233 block_stmt_iterator bsi;
1234 basic_block son;
1236 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1238 tree stmt = bsi_stmt (bsi);
1240 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
1242 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
1243 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
1245 TREE_VISITED (stmt) = 0;
1246 /* If unsafe math optimizations we can do reassociation for
1247 non-integral types. */
1248 if ((!INTEGRAL_TYPE_P (TREE_TYPE (lhs))
1249 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs)))
1250 && (!SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs))
1251 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE(lhs))
1252 || !flag_unsafe_math_optimizations))
1253 continue;
1255 /* Check for a subtract used only in an addition. If this
1256 is the case, transform it into add of a negate for better
1257 reassociation. IE transform C = A-B into C = A + -B if C
1258 is only used in an addition. */
1259 if (TREE_CODE (rhs) == MINUS_EXPR)
1260 if (should_break_up_subtract (stmt))
1261 break_up_subtract (stmt, &bsi);
1264 for (son = first_dom_son (CDI_DOMINATORS, bb);
1265 son;
1266 son = next_dom_son (CDI_DOMINATORS, son))
1267 break_up_subtract_bb (son);
1270 /* Reassociate expressions in basic block BB and its post-dominator as
1271 children. */
1273 static void
1274 reassociate_bb (basic_block bb)
1276 block_stmt_iterator bsi;
1277 basic_block son;
1279 for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_prev (&bsi))
1281 tree stmt = bsi_stmt (bsi);
1283 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
1285 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
1286 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
1288 /* If this was part of an already processed tree, we don't
1289 need to touch it again. */
1290 if (TREE_VISITED (stmt))
1291 continue;
1293 /* If unsafe math optimizations we can do reassociation for
1294 non-integral types. */
1295 if ((!INTEGRAL_TYPE_P (TREE_TYPE (lhs))
1296 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs)))
1297 && (!SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs))
1298 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE(lhs))
1299 || !flag_unsafe_math_optimizations))
1300 continue;
1302 if (associative_tree_code (TREE_CODE (rhs)))
1304 VEC(operand_entry_t, heap) *ops = NULL;
1306 /* There may be no immediate uses left by the time we
1307 get here because we may have eliminated them all. */
1308 if (TREE_CODE (lhs) == SSA_NAME && has_zero_uses (lhs))
1309 continue;
1311 TREE_VISITED (stmt) = 1;
1312 linearize_expr_tree (&ops, stmt);
1313 qsort (VEC_address (operand_entry_t, ops),
1314 VEC_length (operand_entry_t, ops),
1315 sizeof (operand_entry_t),
1316 sort_by_operand_rank);
1317 optimize_ops_list (TREE_CODE (rhs), &ops);
1319 if (VEC_length (operand_entry_t, ops) == 1)
1321 if (dump_file && (dump_flags & TDF_DETAILS))
1323 fprintf (dump_file, "Transforming ");
1324 print_generic_expr (dump_file, rhs, 0);
1326 GIMPLE_STMT_OPERAND (stmt, 1)
1327 = VEC_last (operand_entry_t, ops)->op;
1328 update_stmt (stmt);
1330 if (dump_file && (dump_flags & TDF_DETAILS))
1332 fprintf (dump_file, " into ");
1333 print_generic_stmt (dump_file,
1334 GIMPLE_STMT_OPERAND (stmt, 1), 0);
1337 else
1339 rewrite_expr_tree (stmt, 0, ops);
1342 VEC_free (operand_entry_t, heap, ops);
1346 for (son = first_dom_son (CDI_POST_DOMINATORS, bb);
1347 son;
1348 son = next_dom_son (CDI_POST_DOMINATORS, son))
1349 reassociate_bb (son);
1352 void dump_ops_vector (FILE *file, VEC (operand_entry_t, heap) *ops);
1353 void debug_ops_vector (VEC (operand_entry_t, heap) *ops);
1355 /* Dump the operand entry vector OPS to FILE. */
1357 void
1358 dump_ops_vector (FILE *file, VEC (operand_entry_t, heap) *ops)
1360 operand_entry_t oe;
1361 unsigned int i;
1363 for (i = 0; VEC_iterate (operand_entry_t, ops, i, oe); i++)
1365 fprintf (file, "Op %d -> rank: %d, tree: ", i, oe->rank);
1366 print_generic_stmt (file, oe->op, 0);
1370 /* Dump the operand entry vector OPS to STDERR. */
1372 void
1373 debug_ops_vector (VEC (operand_entry_t, heap) *ops)
1375 dump_ops_vector (stderr, ops);
1378 static void
1379 do_reassoc (void)
1381 break_up_subtract_bb (ENTRY_BLOCK_PTR);
1382 reassociate_bb (EXIT_BLOCK_PTR);
1385 /* Initialize the reassociation pass. */
1387 static void
1388 init_reassoc (void)
1390 int i;
1391 long rank = 2;
1392 tree param;
1393 int *bbs = XNEWVEC (int, last_basic_block + 1);
1395 memset (&reassociate_stats, 0, sizeof (reassociate_stats));
1397 operand_entry_pool = create_alloc_pool ("operand entry pool",
1398 sizeof (struct operand_entry), 30);
1400 /* Reverse RPO (Reverse Post Order) will give us something where
1401 deeper loops come later. */
1402 pre_and_rev_post_order_compute (NULL, bbs, false);
1403 bb_rank = XCNEWVEC (long, last_basic_block + 1);
1404 operand_rank = pointer_map_create ();
1406 /* Give each argument a distinct rank. */
1407 for (param = DECL_ARGUMENTS (current_function_decl);
1408 param;
1409 param = TREE_CHAIN (param))
1411 if (gimple_default_def (cfun, param) != NULL)
1413 tree def = gimple_default_def (cfun, param);
1414 insert_operand_rank (def, ++rank);
1418 /* Give the chain decl a distinct rank. */
1419 if (cfun->static_chain_decl != NULL)
1421 tree def = gimple_default_def (cfun, cfun->static_chain_decl);
1422 if (def != NULL)
1423 insert_operand_rank (def, ++rank);
1426 /* Set up rank for each BB */
1427 for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
1428 bb_rank[bbs[i]] = ++rank << 16;
1430 free (bbs);
1431 calculate_dominance_info (CDI_DOMINATORS);
1432 calculate_dominance_info (CDI_POST_DOMINATORS);
1433 broken_up_subtracts = NULL;
1436 /* Cleanup after the reassociation pass, and print stats if
1437 requested. */
1439 static void
1440 fini_reassoc (void)
1443 if (dump_file && (dump_flags & TDF_STATS))
1445 fprintf (dump_file, "Reassociation stats:\n");
1446 fprintf (dump_file, "Linearized: %d\n",
1447 reassociate_stats.linearized);
1448 fprintf (dump_file, "Constants eliminated: %d\n",
1449 reassociate_stats.constants_eliminated);
1450 fprintf (dump_file, "Ops eliminated: %d\n",
1451 reassociate_stats.ops_eliminated);
1452 fprintf (dump_file, "Statements rewritten: %d\n",
1453 reassociate_stats.rewritten);
1456 pointer_map_destroy (operand_rank);
1457 free_alloc_pool (operand_entry_pool);
1458 free (bb_rank);
1459 VEC_free (tree, heap, broken_up_subtracts);
1460 free_dominance_info (CDI_POST_DOMINATORS);
1463 /* Gate and execute functions for Reassociation. */
1465 static unsigned int
1466 execute_reassoc (void)
1468 init_reassoc ();
1470 do_reassoc ();
1471 repropagate_negates ();
1473 fini_reassoc ();
1474 return 0;
1477 struct tree_opt_pass pass_reassoc =
1479 "reassoc", /* name */
1480 NULL, /* gate */
1481 execute_reassoc, /* execute */
1482 NULL, /* sub */
1483 NULL, /* next */
1484 0, /* static_pass_number */
1485 TV_TREE_REASSOC, /* tv_id */
1486 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
1487 0, /* properties_provided */
1488 0, /* properties_destroyed */
1489 0, /* todo_flags_start */
1490 TODO_dump_func | TODO_ggc_collect | TODO_verify_ssa, /* todo_flags_finish */
1491 0 /* letter */