Merge from mainline
[official-gcc.git] / gcc / tree-ssa-reassoc.c
blobc36293f6937d7a3f0293314ab8897223f56bc10d
1 /* Reassociation for trees.
2 Copyright (C) 2005 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "errors.h"
27 #include "ggc.h"
28 #include "tree.h"
29 #include "basic-block.h"
30 #include "diagnostic.h"
31 #include "tree-inline.h"
32 #include "tree-flow.h"
33 #include "tree-gimple.h"
34 #include "tree-dump.h"
35 #include "timevar.h"
36 #include "tree-iterator.h"
37 #include "tree-pass.h"
38 #include "alloc-pool.h"
39 #include "vec.h"
40 #include "langhooks.h"
42 /* This is a simple global reassociation pass. It is, in part, based
43 on the LLVM pass of the same name (They do some things more/less
44 than we do, in different orders, etc).
46 It consists of five steps:
48 1. Breaking up subtract operations into addition + negate, where
49 it would promote the reassociation of adds.
51 2. Left linearization of the expression trees, so that (A+B)+(C+D)
52 becomes (((A+B)+C)+D), which is easier for us to rewrite later.
53 During linearization, we place the operands of the binary
54 expressions into a vector of operand_entry_t
56 3. Optimization of the operand lists, eliminating things like a +
57 -a, a & a, etc.
59 4. Rewrite the expression trees we linearized and optimized so
60 they are in proper rank order.
62 5. Repropagate negates, as nothing else will clean it up ATM.
64 A bit of theory on #4, since nobody seems to write anything down
65 about why it makes sense to do it the way they do it:
67 We could do this much nicer theoretically, but don't (for reasons
68 explained after how to do it theoretically nice :P).
70 In order to promote the most redundancy elimination, you want
71 binary expressions whose operands are the same rank (or
72 preferably, the same value) exposed to the redundancy eliminator,
73 for possible elimination.
75 So the way to do this if we really cared, is to build the new op
76 tree from the leaves to the roots, merging as you go, and putting the
77 new op on the end of the worklist, until you are left with one
78 thing on the worklist.
80 IE if you have to rewrite the following set of operands (listed with
81 rank in parentheses), with opcode PLUS_EXPR:
83 a (1), b (1), c (1), d (2), e (2)
86 We start with our merge worklist empty, and the ops list with all of
87 those on it.
89 You want to first merge all leaves of the same rank, as much as
90 possible.
92 So first build a binary op of
94 mergetmp = a + b, and put "mergetmp" on the merge worklist.
96 Because there is no three operand form of PLUS_EXPR, c is not going to
97 be exposed to redundancy elimination as a rank 1 operand.
99 So you might as well throw it on the merge worklist (you could also
100 consider it to now be a rank two operand, and merge it with d and e,
101 but in this case, you then have evicted e from a binary op. So at
102 least in this situation, you can't win.)
104 Then build a binary op of d + e
105 mergetmp2 = d + e
107 and put mergetmp2 on the merge worklist.
109 so merge worklist = {mergetmp, c, mergetmp2}
111 Continue building binary ops of these operations until you have only
112 one operation left on the worklist.
114 So we have
116 build binary op
117 mergetmp3 = mergetmp + c
119 worklist = {mergetmp2, mergetmp3}
121 mergetmp4 = mergetmp2 + mergetmp3
123 worklist = {mergetmp4}
125 because we have one operation left, we can now just set the original
126 statement equal to the result of that operation.
128 This will at least expose a + b and d + e to redundancy elimination
129 as binary operations.
131 For extra points, you can reuse the old statements to build the
132 mergetmps, since you shouldn't run out.
134 So why don't we do this?
136 Because it's expensive, and rarely will help. Most trees we are
137 reassociating have 3 or less ops. If they have 2 ops, they already
138 will be written into a nice single binary op. If you have 3 ops, a
139 single simple check suffices to tell you whether the first two are of the
140 same rank. If so, you know to order it
142 mergetmp = op1 + op2
143 newstmt = mergetmp + op3
145 instead of
146 mergetmp = op2 + op3
147 newstmt = mergetmp + op1
149 If all three are of the same rank, you can't expose them all in a
150 single binary operator anyway, so the above is *still* the best you
151 can do.
153 Thus, this is what we do. When we have three ops left, we check to see
154 what order to put them in, and call it a day. As a nod to vector sum
155 reduction, we check if any of ops are a really a phi node that is a
156 destructive update for the associating op, and keep the destructive
157 update together for vector sum reduction recognition. */
160 /* Statistics */
161 static struct
163 int linearized;
164 int constants_eliminated;
165 int ops_eliminated;
166 int rewritten;
167 } reassociate_stats;
169 /* Operator, rank pair. */
170 typedef struct operand_entry
172 unsigned int rank;
173 tree op;
174 } *operand_entry_t;
176 static alloc_pool operand_entry_pool;
179 /* Starting rank number for a given basic block, so that we can rank
180 operations using unmovable instructions in that BB based on the bb
181 depth. */
182 static unsigned int *bb_rank;
184 /* Operand->rank hashtable. */
185 static htab_t operand_rank;
188 /* Look up the operand rank structure for expression E. */
190 static operand_entry_t
191 find_operand_rank (tree e)
193 void **slot;
194 struct operand_entry vrd;
196 vrd.op = e;
197 slot = htab_find_slot (operand_rank, &vrd, NO_INSERT);
198 if (!slot)
199 return NULL;
200 return ((operand_entry_t) *slot);
203 /* Insert {E,RANK} into the operand rank hashtable. */
205 static void
206 insert_operand_rank (tree e, unsigned int rank)
208 void **slot;
209 operand_entry_t new_pair = pool_alloc (operand_entry_pool);
211 new_pair->op = e;
212 new_pair->rank = rank;
213 slot = htab_find_slot (operand_rank, new_pair, INSERT);
214 gcc_assert (*slot == NULL);
215 *slot = new_pair;
218 /* Return the hash value for a operand rank structure */
220 static hashval_t
221 operand_entry_hash (const void *p)
223 const operand_entry_t vr = (operand_entry_t) p;
224 return iterative_hash_expr (vr->op, 0);
227 /* Return true if two operand rank structures are equal. */
229 static int
230 operand_entry_eq (const void *p1, const void *p2)
232 const operand_entry_t vr1 = (operand_entry_t) p1;
233 const operand_entry_t vr2 = (operand_entry_t) p2;
234 return vr1->op == vr2->op;
237 /* Given an expression E, return the rank of the expression. */
239 static unsigned int
240 get_rank (tree e)
242 operand_entry_t vr;
244 /* Constants have rank 0. */
245 if (is_gimple_min_invariant (e))
246 return 0;
248 /* SSA_NAME's have the rank of the expression they are the result
250 For globals and uninitialized values, the rank is 0.
251 For function arguments, use the pre-setup rank.
252 For PHI nodes, stores, asm statements, etc, we use the rank of
253 the BB.
254 For simple operations, the rank is the maximum rank of any of
255 its operands, or the bb_rank, whichever is less.
256 I make no claims that this is optimal, however, it gives good
257 results. */
259 if (TREE_CODE (e) == SSA_NAME)
261 tree stmt;
262 tree rhs;
263 unsigned int rank, maxrank;
264 int i;
266 if (TREE_CODE (SSA_NAME_VAR (e)) == PARM_DECL
267 && e == default_def (SSA_NAME_VAR (e)))
268 return find_operand_rank (e)->rank;
270 stmt = SSA_NAME_DEF_STMT (e);
271 if (bb_for_stmt (stmt) == NULL)
272 return 0;
274 if (TREE_CODE (stmt) != MODIFY_EXPR
275 || !ZERO_SSA_OPERANDS (stmt, SSA_OP_VIRTUAL_DEFS))
276 return bb_rank[bb_for_stmt (stmt)->index];
278 /* If we already have a rank for this expression, use that. */
279 vr = find_operand_rank (e);
280 if (vr)
281 return vr->rank;
283 /* Otherwise, find the maximum rank for the operands, or the bb
284 rank, whichever is less. */
285 rank = 0;
286 maxrank = bb_rank[bb_for_stmt(stmt)->index];
287 rhs = TREE_OPERAND (stmt, 1);
288 if (TREE_CODE_LENGTH (TREE_CODE (rhs)) == 0)
289 rank = MAX (rank, get_rank (rhs));
290 else
292 for (i = 0;
293 i < TREE_CODE_LENGTH (TREE_CODE (rhs))
294 && TREE_OPERAND (rhs, i)
295 && rank != maxrank;
296 i++)
297 rank = MAX(rank, get_rank (TREE_OPERAND (rhs, i)));
300 if (dump_file && (dump_flags & TDF_DETAILS))
302 fprintf (dump_file, "Rank for ");
303 print_generic_expr (dump_file, e, 0);
304 fprintf (dump_file, " is %d\n", (rank + 1));
307 /* Note the rank in the hashtable so we don't recompute it. */
308 insert_operand_rank (e, (rank + 1));
309 return (rank + 1);
312 /* Globals, etc, are rank 0 */
313 return 0;
316 DEF_VEC_P(operand_entry_t);
317 DEF_VEC_ALLOC_P(operand_entry_t, heap);
319 /* We want integer ones to end up last no matter what, since they are
320 the ones we can do the most with. */
321 #define INTEGER_CONST_TYPE 1 << 3
322 #define FLOAT_CONST_TYPE 1 << 2
323 #define OTHER_CONST_TYPE 1 << 1
325 /* Classify an invariant tree into integer, float, or other, so that
326 we can sort them to be near other constants of the same type. */
327 static inline int
328 constant_type (tree t)
330 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
331 return INTEGER_CONST_TYPE;
332 else if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (t)))
333 return FLOAT_CONST_TYPE;
334 else
335 return OTHER_CONST_TYPE;
338 /* qsort comparison function to sort operand entries PA and PB by rank
339 so that the sorted array is ordered by rank in decreasing order. */
340 static int
341 sort_by_operand_rank (const void *pa, const void *pb)
343 const operand_entry_t oea = *(const operand_entry_t *)pa;
344 const operand_entry_t oeb = *(const operand_entry_t *)pb;
346 /* It's nicer for optimize_expression if constants that are likely
347 to fold when added/multiplied//whatever are put next to each
348 other. Since all constants have rank 0, order them by type. */
349 if (oeb->rank == 0 && oea->rank == 0)
350 return constant_type (oeb->op) - constant_type (oea->op);
352 /* Lastly, make sure the versions that are the same go next to each
353 other. We use SSA_NAME_VERSION because it's stable. */
354 if ((oeb->rank - oea->rank == 0)
355 && TREE_CODE (oea->op) == SSA_NAME
356 && TREE_CODE (oeb->op) == SSA_NAME)
357 return SSA_NAME_VERSION (oeb->op) - SSA_NAME_VERSION (oea->op);
359 return oeb->rank - oea->rank;
362 /* Add an operand entry to *OPS for the tree operand OP. */
364 static void
365 add_to_ops_vec (VEC(operand_entry_t, heap) **ops, tree op)
367 operand_entry_t oe = pool_alloc (operand_entry_pool);
369 oe->op = op;
370 oe->rank = get_rank (op);
371 VEC_safe_push (operand_entry_t, heap, *ops, oe);
374 /* Return true if STMT is reassociable operation containing a binary
375 operation with tree code CODE. */
377 static bool
378 is_reassociable_op (tree stmt, enum tree_code code)
380 if (!IS_EMPTY_STMT (stmt)
381 && TREE_CODE (stmt) == MODIFY_EXPR
382 && TREE_CODE (TREE_OPERAND (stmt, 1)) == code
383 && has_single_use (TREE_OPERAND (stmt, 0)))
384 return true;
385 return false;
389 /* Given NAME, if NAME is defined by a unary operation OPCODE, return the
390 operand of the negate operation. Otherwise, return NULL. */
392 static tree
393 get_unary_op (tree name, enum tree_code opcode)
395 tree stmt = SSA_NAME_DEF_STMT (name);
396 tree rhs;
398 if (TREE_CODE (stmt) != MODIFY_EXPR)
399 return NULL_TREE;
401 rhs = TREE_OPERAND (stmt, 1);
402 if (TREE_CODE (rhs) == opcode)
403 return TREE_OPERAND (rhs, 0);
404 return NULL_TREE;
407 /* If CURR and LAST are a pair of ops that OPCODE allows us to
408 eliminate through equivalences, do so, remove them from OPS, and
409 return true. Otherwise, return false. */
411 static bool
412 eliminate_duplicate_pair (enum tree_code opcode,
413 VEC (operand_entry_t, heap) **ops,
414 bool *all_done,
415 unsigned int i,
416 operand_entry_t curr,
417 operand_entry_t last)
420 /* If we have two of the same op, and the opcode is & or |, we can
421 eliminate one of them.
422 If we have two of the same op, and the opcode is ^, we can
423 eliminate both of them. */
425 if (last && last->op == curr->op)
427 switch (opcode)
429 case BIT_IOR_EXPR:
430 case BIT_AND_EXPR:
431 if (dump_file && (dump_flags & TDF_DETAILS))
433 fprintf (dump_file, "Equivalence: ");
434 print_generic_expr (dump_file, curr->op, 0);
435 fprintf (dump_file, " [&|] ");
436 print_generic_expr (dump_file, last->op, 0);
437 fprintf (dump_file, " -> ");
438 print_generic_stmt (dump_file, last->op, 0);
441 VEC_ordered_remove (operand_entry_t, *ops, i);
442 reassociate_stats.ops_eliminated ++;
444 return true;
446 case BIT_XOR_EXPR:
447 if (dump_file && (dump_flags & TDF_DETAILS))
449 fprintf (dump_file, "Equivalence: ");
450 print_generic_expr (dump_file, curr->op, 0);
451 fprintf (dump_file, " ^ ");
452 print_generic_expr (dump_file, last->op, 0);
453 fprintf (dump_file, " -> nothing\n");
456 reassociate_stats.ops_eliminated += 2;
458 if (VEC_length (operand_entry_t, *ops) == 2)
460 VEC_free (operand_entry_t, heap, *ops);
461 *ops = NULL;
462 add_to_ops_vec (ops, fold_convert (TREE_TYPE (last->op),
463 integer_zero_node));
464 *all_done = true;
466 else
468 VEC_ordered_remove (operand_entry_t, *ops, i-1);
469 VEC_ordered_remove (operand_entry_t, *ops, i-1);
472 return true;
474 default:
475 break;
478 return false;
481 /* If OPCODE is PLUS_EXPR, CURR->OP is really a negate expression,
482 look in OPS for a corresponding positive operation to cancel it
483 out. If we find one, remove the other from OPS, replace
484 OPS[CURRINDEX] with 0, and return true. Otherwise, return
485 false. */
487 static bool
488 eliminate_plus_minus_pair (enum tree_code opcode,
489 VEC (operand_entry_t, heap) **ops,
490 unsigned int currindex,
491 operand_entry_t curr)
493 tree negateop;
494 unsigned int i;
495 operand_entry_t oe;
497 if (opcode != PLUS_EXPR || TREE_CODE (curr->op) != SSA_NAME)
498 return false;
500 negateop = get_unary_op (curr->op, NEGATE_EXPR);
501 if (negateop == NULL_TREE)
502 return false;
504 /* Any non-negated version will have a rank that is one less than
505 the current rank. So once we hit those ranks, if we don't find
506 one, we can stop. */
508 for (i = currindex + 1;
509 VEC_iterate (operand_entry_t, *ops, i, oe)
510 && oe->rank >= curr->rank - 1 ;
511 i++)
513 if (oe->op == negateop)
516 if (dump_file && (dump_flags & TDF_DETAILS))
518 fprintf (dump_file, "Equivalence: ");
519 print_generic_expr (dump_file, negateop, 0);
520 fprintf (dump_file, " + -");
521 print_generic_expr (dump_file, oe->op, 0);
522 fprintf (dump_file, " -> 0\n");
525 VEC_ordered_remove (operand_entry_t, *ops, i);
526 add_to_ops_vec (ops, fold_convert(TREE_TYPE (oe->op),
527 integer_zero_node));
528 VEC_ordered_remove (operand_entry_t, *ops, currindex);
529 reassociate_stats.ops_eliminated ++;
531 return true;
535 return false;
538 /* If OPCODE is BIT_IOR_EXPR, BIT_AND_EXPR, and, CURR->OP is really a
539 bitwise not expression, look in OPS for a corresponding operand to
540 cancel it out. If we find one, remove the other from OPS, replace
541 OPS[CURRINDEX] with 0, and return true. Otherwise, return
542 false. */
544 static bool
545 eliminate_not_pairs (enum tree_code opcode,
546 VEC (operand_entry_t, heap) **ops,
547 unsigned int currindex,
548 operand_entry_t curr)
550 tree notop;
551 unsigned int i;
552 operand_entry_t oe;
554 if ((opcode != BIT_IOR_EXPR && opcode != BIT_AND_EXPR)
555 || TREE_CODE (curr->op) != SSA_NAME)
556 return false;
558 notop = get_unary_op (curr->op, BIT_NOT_EXPR);
559 if (notop == NULL_TREE)
560 return false;
562 /* Any non-not version will have a rank that is one less than
563 the current rank. So once we hit those ranks, if we don't find
564 one, we can stop. */
566 for (i = currindex + 1;
567 VEC_iterate (operand_entry_t, *ops, i, oe)
568 && oe->rank >= curr->rank - 1;
569 i++)
571 if (oe->op == notop)
573 if (dump_file && (dump_flags & TDF_DETAILS))
575 fprintf (dump_file, "Equivalence: ");
576 print_generic_expr (dump_file, notop, 0);
577 if (opcode == BIT_AND_EXPR)
578 fprintf (dump_file, " & ~");
579 else if (opcode == BIT_IOR_EXPR)
580 fprintf (dump_file, " | ~");
581 print_generic_expr (dump_file, oe->op, 0);
582 if (opcode == BIT_AND_EXPR)
583 fprintf (dump_file, " -> 0\n");
584 else if (opcode == BIT_IOR_EXPR)
585 fprintf (dump_file, " -> -1\n");
588 if (opcode == BIT_AND_EXPR)
589 oe->op = fold_convert (TREE_TYPE (oe->op), integer_zero_node);
590 else if (opcode == BIT_IOR_EXPR)
591 oe->op = build_low_bits_mask (TREE_TYPE (oe->op),
592 TYPE_PRECISION (TREE_TYPE (oe->op)));
594 reassociate_stats.ops_eliminated
595 += VEC_length (operand_entry_t, *ops) - 1;
596 VEC_free (operand_entry_t, heap, *ops);
597 *ops = NULL;
598 VEC_safe_push (operand_entry_t, heap, *ops, oe);
599 return true;
603 return false;
606 /* Use constant value that may be present in OPS to try to eliminate
607 operands. Note that this function is only really used when we've
608 eliminated ops for other reasons, or merged constants. Across
609 single statements, fold already does all of this, plus more. There
610 is little point in duplicating logic, so I've only included the
611 identities that I could ever construct testcases to trigger. */
613 static void
614 eliminate_using_constants (enum tree_code opcode,
615 VEC(operand_entry_t, heap) **ops)
617 operand_entry_t oelast = VEC_last (operand_entry_t, *ops);
619 if (oelast->rank == 0 && INTEGRAL_TYPE_P (TREE_TYPE (oelast->op)))
621 switch (opcode)
623 case BIT_AND_EXPR:
624 if (integer_zerop (oelast->op))
626 if (VEC_length (operand_entry_t, *ops) != 1)
628 if (dump_file && (dump_flags & TDF_DETAILS))
629 fprintf (dump_file, "Found & 0, removing all other ops\n");
631 reassociate_stats.ops_eliminated
632 += VEC_length (operand_entry_t, *ops) - 1;
634 VEC_free (operand_entry_t, heap, *ops);
635 *ops = NULL;
636 VEC_safe_push (operand_entry_t, heap, *ops, oelast);
637 return;
640 else if (integer_all_onesp (oelast->op))
642 if (VEC_length (operand_entry_t, *ops) != 1)
644 if (dump_file && (dump_flags & TDF_DETAILS))
645 fprintf (dump_file, "Found & -1, removing\n");
646 VEC_pop (operand_entry_t, *ops);
647 reassociate_stats.ops_eliminated++;
650 break;
651 case BIT_IOR_EXPR:
652 if (integer_all_onesp (oelast->op))
654 if (VEC_length (operand_entry_t, *ops) != 1)
656 if (dump_file && (dump_flags & TDF_DETAILS))
657 fprintf (dump_file, "Found | -1, removing all other ops\n");
659 reassociate_stats.ops_eliminated
660 += VEC_length (operand_entry_t, *ops) - 1;
662 VEC_free (operand_entry_t, heap, *ops);
663 *ops = NULL;
664 VEC_safe_push (operand_entry_t, heap, *ops, oelast);
665 return;
668 else if (integer_zerop (oelast->op))
670 if (VEC_length (operand_entry_t, *ops) != 1)
672 if (dump_file && (dump_flags & TDF_DETAILS))
673 fprintf (dump_file, "Found | 0, removing\n");
674 VEC_pop (operand_entry_t, *ops);
675 reassociate_stats.ops_eliminated++;
678 break;
679 case MULT_EXPR:
680 if (integer_zerop (oelast->op))
682 if (VEC_length (operand_entry_t, *ops) != 1)
684 if (dump_file && (dump_flags & TDF_DETAILS))
685 fprintf (dump_file, "Found * 0, removing all other ops\n");
687 reassociate_stats.ops_eliminated
688 += VEC_length (operand_entry_t, *ops) - 1;
689 VEC_free (operand_entry_t, heap, *ops);
690 *ops = NULL;
691 VEC_safe_push (operand_entry_t, heap, *ops, oelast);
692 return;
695 else if (integer_onep (oelast->op))
697 if (VEC_length (operand_entry_t, *ops) != 1)
699 if (dump_file && (dump_flags & TDF_DETAILS))
700 fprintf (dump_file, "Found * 1, removing\n");
701 VEC_pop (operand_entry_t, *ops);
702 reassociate_stats.ops_eliminated++;
703 return;
706 break;
707 case BIT_XOR_EXPR:
708 case PLUS_EXPR:
709 case MINUS_EXPR:
710 if (integer_zerop (oelast->op))
712 if (VEC_length (operand_entry_t, *ops) != 1)
714 if (dump_file && (dump_flags & TDF_DETAILS))
715 fprintf (dump_file, "Found [|^+] 0, removing\n");
716 VEC_pop (operand_entry_t, *ops);
717 reassociate_stats.ops_eliminated++;
718 return;
721 break;
722 default:
723 break;
728 /* Perform various identities and other optimizations on the list of
729 operand entries, stored in OPS. The tree code for the binary
730 operation between all the operands is OPCODE. */
732 static void
733 optimize_ops_list (enum tree_code opcode,
734 VEC (operand_entry_t, heap) **ops)
736 unsigned int length = VEC_length (operand_entry_t, *ops);
737 unsigned int i;
738 operand_entry_t oe;
739 operand_entry_t oelast = NULL;
740 bool iterate = false;
742 if (length == 1)
743 return;
745 oelast = VEC_last (operand_entry_t, *ops);
747 /* If the last two are constants, pop the constants off, merge them
748 and try the next two. */
749 if (oelast->rank == 0 && is_gimple_min_invariant (oelast->op))
751 operand_entry_t oelm1 = VEC_index (operand_entry_t, *ops, length - 2);
753 if (oelm1->rank == 0
754 && is_gimple_min_invariant (oelm1->op)
755 && lang_hooks.types_compatible_p (TREE_TYPE (oelm1->op),
756 TREE_TYPE (oelast->op)))
758 tree folded = fold_binary (opcode, TREE_TYPE (oelm1->op),
759 oelm1->op, oelast->op);
761 if (folded && is_gimple_min_invariant (folded))
763 if (dump_file && (dump_flags & TDF_DETAILS))
764 fprintf (dump_file, "Merging constants\n");
766 VEC_pop (operand_entry_t, *ops);
767 VEC_pop (operand_entry_t, *ops);
769 add_to_ops_vec (ops, folded);
770 reassociate_stats.constants_eliminated++;
772 optimize_ops_list (opcode, ops);
773 return;
778 eliminate_using_constants (opcode, ops);
779 oelast = NULL;
781 for (i = 0; VEC_iterate (operand_entry_t, *ops, i, oe);)
783 bool done = false;
785 if (eliminate_not_pairs (opcode, ops, i, oe))
786 return;
787 if (eliminate_duplicate_pair (opcode, ops, &done, i, oe, oelast)
788 || (!done && eliminate_plus_minus_pair (opcode, ops, i, oe)))
790 if (done)
791 return;
792 iterate = true;
793 oelast = NULL;
794 continue;
796 oelast = oe;
797 i++;
800 length = VEC_length (operand_entry_t, *ops);
801 oelast = VEC_last (operand_entry_t, *ops);
803 if (iterate)
804 optimize_ops_list (opcode, ops);
807 /* Return true if OPERAND is defined by a PHI node which uses the LHS
808 of STMT in it's operands. This is also known as a "destructive
809 update" operation. */
811 static bool
812 is_phi_for_stmt (tree stmt, tree operand)
814 tree def_stmt;
815 tree lhs = TREE_OPERAND (stmt, 0);
816 use_operand_p arg_p;
817 ssa_op_iter i;
819 if (TREE_CODE (operand) != SSA_NAME)
820 return false;
822 def_stmt = SSA_NAME_DEF_STMT (operand);
823 if (TREE_CODE (def_stmt) != PHI_NODE)
824 return false;
826 FOR_EACH_PHI_ARG (arg_p, def_stmt, i, SSA_OP_USE)
827 if (lhs == USE_FROM_PTR (arg_p))
828 return true;
829 return false;
832 /* Recursively rewrite our linearized statements so that the operators
833 match those in OPS[OPINDEX], putting the computation in rank
834 order. */
836 static void
837 rewrite_expr_tree (tree stmt, unsigned int opindex,
838 VEC(operand_entry_t, heap) * ops)
840 tree rhs = TREE_OPERAND (stmt, 1);
841 operand_entry_t oe;
843 /* If we have three operands left, then we want to make sure the one
844 that gets the double binary op are the ones with the same rank.
846 The alternative we try is to see if this is a destructive
847 update style statement, which is like:
848 b = phi (a, ...)
849 a = c + b;
850 In that case, we want to use the destructive update form to
851 expose the possible vectorizer sum reduction opportunity.
852 In that case, the third operand will be the phi node.
854 We could, of course, try to be better as noted above, and do a
855 lot of work to try to find these opportunities in >3 operand
856 cases, but it is unlikely to be worth it. */
857 if (opindex + 3 == VEC_length (operand_entry_t, ops))
859 operand_entry_t oe1, oe2, oe3;
861 oe1 = VEC_index (operand_entry_t, ops, opindex);
862 oe2 = VEC_index (operand_entry_t, ops, opindex + 1);
863 oe3 = VEC_index (operand_entry_t, ops, opindex + 2);
865 if ((oe1->rank == oe2->rank
866 && oe2->rank != oe3->rank)
867 || (is_phi_for_stmt (stmt, oe3->op)
868 && !is_phi_for_stmt (stmt, oe1->op)
869 && !is_phi_for_stmt (stmt, oe2->op)))
871 struct operand_entry temp = *oe3;
872 oe3->op = oe1->op;
873 oe3->rank = oe1->rank;
874 oe1->op = temp.op;
875 oe1->rank= temp.rank;
879 /* The final recursion case for this function is that you have
880 exactly two operations left.
881 If we had one exactly one op in the entire list to start with, we
882 would have never called this function, and the tail recursion
883 rewrites them one at a time. */
884 if (opindex + 2 == VEC_length (operand_entry_t, ops))
886 operand_entry_t oe1, oe2;
888 oe1 = VEC_index (operand_entry_t, ops, opindex);
889 oe2 = VEC_index (operand_entry_t, ops, opindex + 1);
891 if (TREE_OPERAND (rhs, 0) != oe1->op
892 || TREE_OPERAND (rhs, 1) != oe2->op)
895 if (dump_file && (dump_flags & TDF_DETAILS))
897 fprintf (dump_file, "Transforming ");
898 print_generic_expr (dump_file, rhs, 0);
901 TREE_OPERAND (rhs, 0) = oe1->op;
902 TREE_OPERAND (rhs, 1) = oe2->op;
903 update_stmt (stmt);
905 if (dump_file && (dump_flags & TDF_DETAILS))
907 fprintf (dump_file, " into ");
908 print_generic_stmt (dump_file, rhs, 0);
912 return;
915 /* If we hit here, we should have 3 or more ops left. */
916 gcc_assert (opindex + 2 < VEC_length (operand_entry_t, ops));
918 /* Rewrite the next operator. */
919 oe = VEC_index (operand_entry_t, ops, opindex);
921 if (oe->op != TREE_OPERAND (rhs, 1))
924 if (dump_file && (dump_flags & TDF_DETAILS))
926 fprintf (dump_file, "Transforming ");
927 print_generic_expr (dump_file, rhs, 0);
930 TREE_OPERAND (rhs, 1) = oe->op;
931 update_stmt (stmt);
933 if (dump_file && (dump_flags & TDF_DETAILS))
935 fprintf (dump_file, " into ");
936 print_generic_stmt (dump_file, rhs, 0);
939 /* Recurse on the LHS of the binary operator, which is guaranteed to
940 be the non-leaf side. */
941 rewrite_expr_tree (SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0)),
942 opindex + 1, ops);
945 /* Transform STMT, which is really (A +B) + (C + D) into the left
946 linear form, ((A+B)+C)+D.
947 Recurse on D if necessary. */
949 static void
950 linearize_expr (tree stmt)
952 block_stmt_iterator bsinow, bsirhs;
953 tree rhs = TREE_OPERAND (stmt, 1);
954 enum tree_code rhscode = TREE_CODE (rhs);
955 tree binrhs = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 1));
956 tree binlhs = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0));
957 tree newbinrhs = NULL_TREE;
959 gcc_assert (is_reassociable_op (binlhs, TREE_CODE (rhs))
960 && is_reassociable_op (binrhs, TREE_CODE (rhs)));
962 bsinow = bsi_for_stmt (stmt);
963 bsirhs = bsi_for_stmt (binrhs);
964 bsi_move_before (&bsirhs, &bsinow);
966 TREE_OPERAND (rhs, 1) = TREE_OPERAND (TREE_OPERAND (binrhs, 1), 0);
967 if (TREE_CODE (TREE_OPERAND (rhs, 1)) == SSA_NAME)
968 newbinrhs = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 1));
969 TREE_OPERAND (TREE_OPERAND (binrhs, 1), 0) = TREE_OPERAND (binlhs, 0);
970 TREE_OPERAND (rhs, 0) = TREE_OPERAND (binrhs, 0);
972 if (dump_file && (dump_flags & TDF_DETAILS))
974 fprintf (dump_file, "Linearized: ");
975 print_generic_stmt (dump_file, rhs, 0);
978 reassociate_stats.linearized++;
979 update_stmt (binrhs);
980 update_stmt (binlhs);
981 update_stmt (stmt);
982 TREE_VISITED (binrhs) = 1;
983 TREE_VISITED (binlhs) = 1;
984 TREE_VISITED (stmt) = 1;
986 /* Tail recurse on the new rhs if it still needs reassociation. */
987 if (newbinrhs && is_reassociable_op (newbinrhs, rhscode))
988 linearize_expr (stmt);
992 /* If LHS has a single immediate use that is a MODIFY_EXPR, return
993 it. Otherwise, return NULL. */
995 static tree
996 get_single_immediate_use (tree lhs)
998 use_operand_p immuse;
999 tree immusestmt;
1001 if (TREE_CODE (lhs) == SSA_NAME
1002 && single_imm_use (lhs, &immuse, &immusestmt))
1004 if (TREE_CODE (immusestmt) == RETURN_EXPR)
1005 immusestmt = TREE_OPERAND (immusestmt, 0);
1006 if (TREE_CODE (immusestmt) == MODIFY_EXPR)
1007 return immusestmt;
1009 return NULL_TREE;
1011 static VEC(tree, heap) *broken_up_subtracts;
1014 /* Recursively negate the value of TONEGATE, and return the SSA_NAME
1015 representing the negated value. Insertions of any necessary
1016 instructions go before BSI.
1017 This function is recursive in that, if you hand it "a_5" as the
1018 value to negate, and a_5 is defined by "a_5 = b_3 + b_4", it will
1019 transform b_3 + b_4 into a_5 = -b_3 + -b_4. */
1021 static tree
1022 negate_value (tree tonegate, block_stmt_iterator *bsi)
1024 tree negatedef = tonegate;
1025 tree resultofnegate;
1027 if (TREE_CODE (tonegate) == SSA_NAME)
1028 negatedef = SSA_NAME_DEF_STMT (tonegate);
1030 /* If we are trying to negate a name, defined by an add, negate the
1031 add operands instead. */
1032 if (TREE_CODE (tonegate) == SSA_NAME
1033 && TREE_CODE (negatedef) == MODIFY_EXPR
1034 && TREE_CODE (TREE_OPERAND (negatedef, 0)) == SSA_NAME
1035 && num_imm_uses (TREE_OPERAND (negatedef, 0)) == 1
1036 && TREE_CODE (TREE_OPERAND (negatedef, 1)) == PLUS_EXPR)
1038 block_stmt_iterator bsi;
1039 tree binop = TREE_OPERAND (negatedef, 1);
1041 bsi = bsi_for_stmt (negatedef);
1042 TREE_OPERAND (binop, 0) = negate_value (TREE_OPERAND (binop, 0),
1043 &bsi);
1044 bsi = bsi_for_stmt (negatedef);
1045 TREE_OPERAND (binop, 1) = negate_value (TREE_OPERAND (binop, 1),
1046 &bsi);
1047 update_stmt (negatedef);
1048 return TREE_OPERAND (negatedef, 0);
1051 tonegate = fold_build1 (NEGATE_EXPR, TREE_TYPE (tonegate), tonegate);
1052 resultofnegate = force_gimple_operand_bsi (bsi, tonegate, true,
1053 NULL_TREE);
1054 VEC_safe_push (tree, heap, broken_up_subtracts, resultofnegate);
1055 return resultofnegate;
1059 /* Return true if we should break up the subtract in STMT into an add
1060 with negate. This is true when we the subtract operands are really
1061 adds, or the subtract itself is used in an add expression. In
1062 either case, breaking up the subtract into an add with negate
1063 exposes the adds to reassociation. */
1065 static bool
1066 should_break_up_subtract (tree stmt)
1069 tree lhs = TREE_OPERAND (stmt, 0);
1070 tree rhs = TREE_OPERAND (stmt, 1);
1071 tree binlhs = TREE_OPERAND (rhs, 0);
1072 tree binrhs = TREE_OPERAND (rhs, 1);
1073 tree immusestmt;
1075 if (TREE_CODE (binlhs) == SSA_NAME
1076 && is_reassociable_op (SSA_NAME_DEF_STMT (binlhs), PLUS_EXPR))
1077 return true;
1079 if (TREE_CODE (binrhs) == SSA_NAME
1080 && is_reassociable_op (SSA_NAME_DEF_STMT (binrhs), PLUS_EXPR))
1081 return true;
1083 if (TREE_CODE (lhs) == SSA_NAME
1084 && (immusestmt = get_single_immediate_use (lhs))
1085 && TREE_CODE (TREE_OPERAND (immusestmt, 1)) == PLUS_EXPR)
1086 return true;
1087 return false;
1091 /* Transform STMT from A - B into A + -B. */
1093 static void
1094 break_up_subtract (tree stmt, block_stmt_iterator *bsi)
1096 tree rhs = TREE_OPERAND (stmt, 1);
1098 if (dump_file && (dump_flags & TDF_DETAILS))
1100 fprintf (dump_file, "Breaking up subtract ");
1101 print_generic_stmt (dump_file, stmt, 0);
1104 TREE_SET_CODE (TREE_OPERAND (stmt, 1), PLUS_EXPR);
1105 TREE_OPERAND (rhs, 1) = negate_value (TREE_OPERAND (rhs, 1), bsi);
1107 update_stmt (stmt);
1110 /* Recursively linearize a binary expression that is the RHS of STMT.
1111 Place the operands of the expression tree in the vector named OPS. */
1113 static void
1114 linearize_expr_tree (VEC(operand_entry_t, heap) **ops, tree stmt)
1116 block_stmt_iterator bsinow, bsilhs;
1117 tree rhs = TREE_OPERAND (stmt, 1);
1118 tree binrhs = TREE_OPERAND (rhs, 1);
1119 tree binlhs = TREE_OPERAND (rhs, 0);
1120 tree binlhsdef, binrhsdef;
1121 bool binlhsisreassoc = false;
1122 bool binrhsisreassoc = false;
1123 enum tree_code rhscode = TREE_CODE (rhs);
1125 TREE_VISITED (stmt) = 1;
1127 if (TREE_CODE (binlhs) == SSA_NAME)
1129 binlhsdef = SSA_NAME_DEF_STMT (binlhs);
1130 binlhsisreassoc = is_reassociable_op (binlhsdef, rhscode);
1133 if (TREE_CODE (binrhs) == SSA_NAME)
1135 binrhsdef = SSA_NAME_DEF_STMT (binrhs);
1136 binrhsisreassoc = is_reassociable_op (binrhsdef, rhscode);
1139 /* If the LHS is not reassociable, but the RHS is, we need to swap
1140 them. If neither is reassociable, there is nothing we can do, so
1141 just put them in the ops vector. If the LHS is reassociable,
1142 linearize it. If both are reassociable, then linearize the RHS
1143 and the LHS. */
1145 if (!binlhsisreassoc)
1147 tree temp;
1149 if (!binrhsisreassoc)
1151 add_to_ops_vec (ops, binrhs);
1152 add_to_ops_vec (ops, binlhs);
1153 return;
1156 if (dump_file && (dump_flags & TDF_DETAILS))
1158 fprintf (dump_file, "swapping operands of ");
1159 print_generic_expr (dump_file, stmt, 0);
1162 swap_tree_operands (stmt, &TREE_OPERAND (rhs, 0),
1163 &TREE_OPERAND (rhs, 1));
1164 update_stmt (stmt);
1166 if (dump_file && (dump_flags & TDF_DETAILS))
1168 fprintf (dump_file, " is now ");
1169 print_generic_stmt (dump_file, stmt, 0);
1172 /* We want to make it so the lhs is always the reassociative op,
1173 so swap. */
1174 temp = binlhs;
1175 binlhs = binrhs;
1176 binrhs = temp;
1178 else if (binrhsisreassoc)
1180 linearize_expr (stmt);
1181 gcc_assert (rhs == TREE_OPERAND (stmt, 1));
1182 binlhs = TREE_OPERAND (rhs, 0);
1183 binrhs = TREE_OPERAND (rhs, 1);
1186 gcc_assert (TREE_CODE (binrhs) != SSA_NAME
1187 || !is_reassociable_op (SSA_NAME_DEF_STMT (binrhs), rhscode));
1188 bsinow = bsi_for_stmt (stmt);
1189 bsilhs = bsi_for_stmt (SSA_NAME_DEF_STMT (binlhs));
1190 bsi_move_before (&bsilhs, &bsinow);
1191 linearize_expr_tree (ops, SSA_NAME_DEF_STMT (binlhs));
1192 add_to_ops_vec (ops, binrhs);
1195 /* Repropagate the negates back into subtracts, since no other pass
1196 currently does it. */
1198 static void
1199 repropagate_negates (void)
1201 unsigned int i = 0;
1202 tree negate;
1204 for (i = 0; VEC_iterate (tree, broken_up_subtracts, i, negate); i++)
1206 tree user = get_single_immediate_use (negate);
1208 /* The negate operand can be either operand of a PLUS_EXPR
1209 (it can be the LHS if the RHS is a constant for example).
1211 Force the negate operand to the RHS of the PLUS_EXPR, then
1212 transform the PLUS_EXPR into a MINUS_EXPR. */
1213 if (user
1214 && TREE_CODE (user) == MODIFY_EXPR
1215 && TREE_CODE (TREE_OPERAND (user, 1)) == PLUS_EXPR)
1217 tree rhs = TREE_OPERAND (user, 1);
1219 /* If the negated operand appears on the LHS of the
1220 PLUS_EXPR, exchange the operands of the PLUS_EXPR
1221 to force the negated operand to the RHS of the PLUS_EXPR. */
1222 if (TREE_OPERAND (TREE_OPERAND (user, 1), 0) == negate)
1224 tree temp = TREE_OPERAND (rhs, 0);
1225 TREE_OPERAND (rhs, 0) = TREE_OPERAND (rhs, 1);
1226 TREE_OPERAND (rhs, 1) = temp;
1229 /* Now transform the PLUS_EXPR into a MINUS_EXPR and replace
1230 the RHS of the PLUS_EXPR with the operand of the NEGATE_EXPR. */
1231 if (TREE_OPERAND (TREE_OPERAND (user, 1), 1) == negate)
1233 TREE_SET_CODE (rhs, MINUS_EXPR);
1234 TREE_OPERAND (rhs, 1) = get_unary_op (negate, NEGATE_EXPR);
1235 update_stmt (user);
1241 /* Break up subtract operations in block BB.
1243 We do this top down because we don't know whether the subtract is
1244 part of a possible chain of reassociation except at the top.
1246 IE given
1247 d = f + g
1248 c = a + e
1249 b = c - d
1250 q = b - r
1251 k = t - q
1253 we want to break up k = t - q, but we won't until we've transformed q
1254 = b - r, which won't be broken up until we transform b = c - d. */
1256 static void
1257 break_up_subtract_bb (basic_block bb)
1259 block_stmt_iterator bsi;
1260 basic_block son;
1262 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1264 tree stmt = bsi_stmt (bsi);
1266 if (TREE_CODE (stmt) == MODIFY_EXPR)
1268 tree lhs = TREE_OPERAND (stmt, 0);
1269 tree rhs = TREE_OPERAND (stmt, 1);
1271 TREE_VISITED (stmt) = 0;
1272 /* If unsafe math optimizations we can do reassociation for
1273 non-integral types. */
1274 if ((!INTEGRAL_TYPE_P (TREE_TYPE (lhs))
1275 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs)))
1276 && (!SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs))
1277 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE(lhs))
1278 || !flag_unsafe_math_optimizations))
1279 continue;
1281 /* Check for a subtract used only in an addition. If this
1282 is the case, transform it into add of a negate for better
1283 reassociation. IE transform C = A-B into C = A + -B if C
1284 is only used in an addition. */
1285 if (TREE_CODE (rhs) == MINUS_EXPR)
1286 if (should_break_up_subtract (stmt))
1287 break_up_subtract (stmt, &bsi);
1290 for (son = first_dom_son (CDI_DOMINATORS, bb);
1291 son;
1292 son = next_dom_son (CDI_DOMINATORS, son))
1293 break_up_subtract_bb (son);
1296 /* Reassociate expressions in basic block BB and its post-dominator as
1297 children. */
1299 static void
1300 reassociate_bb (basic_block bb)
1302 block_stmt_iterator bsi;
1303 basic_block son;
1305 for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_prev (&bsi))
1307 tree stmt = bsi_stmt (bsi);
1309 if (TREE_CODE (stmt) == MODIFY_EXPR)
1311 tree lhs = TREE_OPERAND (stmt, 0);
1312 tree rhs = TREE_OPERAND (stmt, 1);
1314 /* If this was part of an already processed tree, we don't
1315 need to touch it again. */
1316 if (TREE_VISITED (stmt))
1317 continue;
1319 /* If unsafe math optimizations we can do reassociation for
1320 non-integral types. */
1321 if ((!INTEGRAL_TYPE_P (TREE_TYPE (lhs))
1322 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs)))
1323 && (!SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs))
1324 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE(lhs))
1325 || !flag_unsafe_math_optimizations))
1326 continue;
1328 if (associative_tree_code (TREE_CODE (rhs)))
1330 VEC(operand_entry_t, heap) *ops = NULL;
1332 /* There may be no immediate uses left by the time we
1333 get here because we may have eliminated them all. */
1334 if (TREE_CODE (lhs) == SSA_NAME && num_imm_uses (lhs) == 0)
1335 continue;
1337 TREE_VISITED (stmt) = 1;
1338 linearize_expr_tree (&ops, stmt);
1339 qsort (VEC_address (operand_entry_t, ops),
1340 VEC_length (operand_entry_t, ops),
1341 sizeof (operand_entry_t),
1342 sort_by_operand_rank);
1343 optimize_ops_list (TREE_CODE (rhs), &ops);
1345 if (VEC_length (operand_entry_t, ops) == 1)
1347 if (dump_file && (dump_flags & TDF_DETAILS))
1349 fprintf (dump_file, "Transforming ");
1350 print_generic_expr (dump_file, rhs, 0);
1352 TREE_OPERAND (stmt, 1) = VEC_last (operand_entry_t, ops)->op;
1353 update_stmt (stmt);
1355 if (dump_file && (dump_flags & TDF_DETAILS))
1357 fprintf (dump_file, " into ");
1358 print_generic_stmt (dump_file,
1359 TREE_OPERAND (stmt, 1), 0);
1362 else
1364 rewrite_expr_tree (stmt, 0, ops);
1367 VEC_free (operand_entry_t, heap, ops);
1371 for (son = first_dom_son (CDI_POST_DOMINATORS, bb);
1372 son;
1373 son = next_dom_son (CDI_POST_DOMINATORS, son))
1374 reassociate_bb (son);
1377 void dump_ops_vector (FILE *file, VEC (operand_entry_t, heap) *ops);
1378 void debug_ops_vector (VEC (operand_entry_t, heap) *ops);
1380 /* Dump the operand entry vector OPS to FILE. */
1382 void
1383 dump_ops_vector (FILE *file, VEC (operand_entry_t, heap) *ops)
1385 operand_entry_t oe;
1386 unsigned int i;
1388 for (i = 0; VEC_iterate (operand_entry_t, ops, i, oe); i++)
1390 fprintf (file, "Op %d -> rank: %d, tree: ", i, oe->rank);
1391 print_generic_stmt (file, oe->op, 0);
1395 /* Dump the operand entry vector OPS to STDERR. */
1397 void
1398 debug_ops_vector (VEC (operand_entry_t, heap) *ops)
1400 dump_ops_vector (stderr, ops);
1403 static void
1404 do_reassoc (void)
1406 break_up_subtract_bb (ENTRY_BLOCK_PTR);
1407 reassociate_bb (EXIT_BLOCK_PTR);
1410 /* Initialize the reassociation pass. */
1412 static void
1413 init_reassoc (void)
1415 int i;
1416 unsigned int rank = 2;
1417 tree param;
1418 int *bbs = XNEWVEC (int, last_basic_block + 1);
1420 memset (&reassociate_stats, 0, sizeof (reassociate_stats));
1422 operand_entry_pool = create_alloc_pool ("operand entry pool",
1423 sizeof (struct operand_entry), 30);
1425 /* Reverse RPO (Reverse Post Order) will give us something where
1426 deeper loops come later. */
1427 pre_and_rev_post_order_compute (NULL, bbs, false);
1428 bb_rank = XCNEWVEC (unsigned int, last_basic_block + 1);
1430 operand_rank = htab_create (511, operand_entry_hash,
1431 operand_entry_eq, 0);
1433 /* Give each argument a distinct rank. */
1434 for (param = DECL_ARGUMENTS (current_function_decl);
1435 param;
1436 param = TREE_CHAIN (param))
1438 if (default_def (param) != NULL)
1440 tree def = default_def (param);
1441 insert_operand_rank (def, ++rank);
1445 /* Give the chain decl a distinct rank. */
1446 if (cfun->static_chain_decl != NULL)
1448 tree def = default_def (cfun->static_chain_decl);
1449 if (def != NULL)
1450 insert_operand_rank (def, ++rank);
1453 /* Set up rank for each BB */
1454 for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
1455 bb_rank[bbs[i]] = ++rank << 16;
1457 free (bbs);
1458 calculate_dominance_info (CDI_DOMINATORS);
1459 calculate_dominance_info (CDI_POST_DOMINATORS);
1460 broken_up_subtracts = NULL;
1463 /* Cleanup after the reassociation pass, and print stats if
1464 requested. */
1466 static void
1467 fini_reassoc (void)
1470 if (dump_file && (dump_flags & TDF_STATS))
1472 fprintf (dump_file, "Reassociation stats:\n");
1473 fprintf (dump_file, "Linearized: %d\n",
1474 reassociate_stats.linearized);
1475 fprintf (dump_file, "Constants eliminated: %d\n",
1476 reassociate_stats.constants_eliminated);
1477 fprintf (dump_file, "Ops eliminated: %d\n",
1478 reassociate_stats.ops_eliminated);
1479 fprintf (dump_file, "Statements rewritten: %d\n",
1480 reassociate_stats.rewritten);
1482 htab_delete (operand_rank);
1484 free_alloc_pool (operand_entry_pool);
1485 free (bb_rank);
1486 VEC_free (tree, heap, broken_up_subtracts);
1487 free_dominance_info (CDI_POST_DOMINATORS);
1490 /* Gate and execute functions for Reassociation. */
1492 static void
1493 execute_reassoc (void)
1495 init_reassoc ();
1497 do_reassoc ();
1498 repropagate_negates ();
1500 fini_reassoc ();
1503 struct tree_opt_pass pass_reassoc =
1505 "reassoc", /* name */
1506 NULL, /* gate */
1507 execute_reassoc, /* execute */
1508 NULL, /* sub */
1509 NULL, /* next */
1510 0, /* static_pass_number */
1511 TV_TREE_REASSOC, /* tv_id */
1512 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
1513 0, /* properties_provided */
1514 0, /* properties_destroyed */
1515 0, /* todo_flags_start */
1516 TODO_dump_func | TODO_ggc_collect | TODO_verify_ssa, /* todo_flags_finish */
1517 0 /* letter */