Add files that I missed when importing NaCl changes earlier
[gcc/nacl-gcc.git] / gcc / tree-ssa-reassoc.c
blob52b5909ff78f87fe3b3eb065d06b25a4cf3e87f0
1 /* Reassociation for trees.
2 Copyright (C) 2005, 2007 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "errors.h"
26 #include "ggc.h"
27 #include "tree.h"
28 #include "basic-block.h"
29 #include "diagnostic.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "tree-gimple.h"
33 #include "tree-dump.h"
34 #include "timevar.h"
35 #include "tree-iterator.h"
36 #include "tree-pass.h"
37 #include "alloc-pool.h"
38 #include "vec.h"
39 #include "langhooks.h"
41 /* This is a simple global reassociation pass. It is, in part, based
42 on the LLVM pass of the same name (They do some things more/less
43 than we do, in different orders, etc).
45 It consists of five steps:
47 1. Breaking up subtract operations into addition + negate, where
48 it would promote the reassociation of adds.
50 2. Left linearization of the expression trees, so that (A+B)+(C+D)
51 becomes (((A+B)+C)+D), which is easier for us to rewrite later.
52 During linearization, we place the operands of the binary
53 expressions into a vector of operand_entry_t
55 3. Optimization of the operand lists, eliminating things like a +
56 -a, a & a, etc.
58 4. Rewrite the expression trees we linearized and optimized so
59 they are in proper rank order.
61 5. Repropagate negates, as nothing else will clean it up ATM.
63 A bit of theory on #4, since nobody seems to write anything down
64 about why it makes sense to do it the way they do it:
66 We could do this much nicer theoretically, but don't (for reasons
67 explained after how to do it theoretically nice :P).
69 In order to promote the most redundancy elimination, you want
70 binary expressions whose operands are the same rank (or
71 preferably, the same value) exposed to the redundancy eliminator,
72 for possible elimination.
74 So the way to do this if we really cared, is to build the new op
75 tree from the leaves to the roots, merging as you go, and putting the
76 new op on the end of the worklist, until you are left with one
77 thing on the worklist.
79 IE if you have to rewrite the following set of operands (listed with
80 rank in parentheses), with opcode PLUS_EXPR:
82 a (1), b (1), c (1), d (2), e (2)
85 We start with our merge worklist empty, and the ops list with all of
86 those on it.
88 You want to first merge all leaves of the same rank, as much as
89 possible.
91 So first build a binary op of
93 mergetmp = a + b, and put "mergetmp" on the merge worklist.
95 Because there is no three operand form of PLUS_EXPR, c is not going to
96 be exposed to redundancy elimination as a rank 1 operand.
98 So you might as well throw it on the merge worklist (you could also
99 consider it to now be a rank two operand, and merge it with d and e,
100 but in this case, you then have evicted e from a binary op. So at
101 least in this situation, you can't win.)
103 Then build a binary op of d + e
104 mergetmp2 = d + e
106 and put mergetmp2 on the merge worklist.
108 so merge worklist = {mergetmp, c, mergetmp2}
110 Continue building binary ops of these operations until you have only
111 one operation left on the worklist.
113 So we have
115 build binary op
116 mergetmp3 = mergetmp + c
118 worklist = {mergetmp2, mergetmp3}
120 mergetmp4 = mergetmp2 + mergetmp3
122 worklist = {mergetmp4}
124 because we have one operation left, we can now just set the original
125 statement equal to the result of that operation.
127 This will at least expose a + b and d + e to redundancy elimination
128 as binary operations.
130 For extra points, you can reuse the old statements to build the
131 mergetmps, since you shouldn't run out.
133 So why don't we do this?
135 Because it's expensive, and rarely will help. Most trees we are
136 reassociating have 3 or less ops. If they have 2 ops, they already
137 will be written into a nice single binary op. If you have 3 ops, a
138 single simple check suffices to tell you whether the first two are of the
139 same rank. If so, you know to order it
141 mergetmp = op1 + op2
142 newstmt = mergetmp + op3
144 instead of
145 mergetmp = op2 + op3
146 newstmt = mergetmp + op1
148 If all three are of the same rank, you can't expose them all in a
149 single binary operator anyway, so the above is *still* the best you
150 can do.
152 Thus, this is what we do. When we have three ops left, we check to see
153 what order to put them in, and call it a day. As a nod to vector sum
154 reduction, we check if any of ops are a really a phi node that is a
155 destructive update for the associating op, and keep the destructive
156 update together for vector sum reduction recognition. */
159 /* Statistics */
160 static struct
162 int linearized;
163 int constants_eliminated;
164 int ops_eliminated;
165 int rewritten;
166 } reassociate_stats;
168 /* Operator, rank pair. */
169 typedef struct operand_entry
171 unsigned int rank;
172 tree op;
173 } *operand_entry_t;
175 static alloc_pool operand_entry_pool;
178 /* Starting rank number for a given basic block, so that we can rank
179 operations using unmovable instructions in that BB based on the bb
180 depth. */
181 static unsigned int *bb_rank;
183 /* Operand->rank hashtable. */
184 static htab_t operand_rank;
187 /* Look up the operand rank structure for expression E. */
189 static operand_entry_t
190 find_operand_rank (tree e)
192 void **slot;
193 struct operand_entry vrd;
195 vrd.op = e;
196 slot = htab_find_slot (operand_rank, &vrd, NO_INSERT);
197 if (!slot)
198 return NULL;
199 return ((operand_entry_t) *slot);
202 /* Insert {E,RANK} into the operand rank hashtable. */
204 static void
205 insert_operand_rank (tree e, unsigned int rank)
207 void **slot;
208 operand_entry_t new_pair = pool_alloc (operand_entry_pool);
210 new_pair->op = e;
211 new_pair->rank = rank;
212 slot = htab_find_slot (operand_rank, new_pair, INSERT);
213 gcc_assert (*slot == NULL);
214 *slot = new_pair;
217 /* Return the hash value for a operand rank structure */
219 static hashval_t
220 operand_entry_hash (const void *p)
222 const operand_entry_t vr = (operand_entry_t) p;
223 return iterative_hash_expr (vr->op, 0);
226 /* Return true if two operand rank structures are equal. */
228 static int
229 operand_entry_eq (const void *p1, const void *p2)
231 const operand_entry_t vr1 = (operand_entry_t) p1;
232 const operand_entry_t vr2 = (operand_entry_t) p2;
233 return vr1->op == vr2->op;
236 /* Given an expression E, return the rank of the expression. */
238 static unsigned int
239 get_rank (tree e)
241 operand_entry_t vr;
243 /* Constants have rank 0. */
244 if (is_gimple_min_invariant (e))
245 return 0;
247 /* SSA_NAME's have the rank of the expression they are the result
249 For globals and uninitialized values, the rank is 0.
250 For function arguments, use the pre-setup rank.
251 For PHI nodes, stores, asm statements, etc, we use the rank of
252 the BB.
253 For simple operations, the rank is the maximum rank of any of
254 its operands, or the bb_rank, whichever is less.
255 I make no claims that this is optimal, however, it gives good
256 results. */
258 if (TREE_CODE (e) == SSA_NAME)
260 tree stmt;
261 tree rhs;
262 unsigned int rank, maxrank;
263 int i;
265 if (TREE_CODE (SSA_NAME_VAR (e)) == PARM_DECL
266 && e == default_def (SSA_NAME_VAR (e)))
267 return find_operand_rank (e)->rank;
269 stmt = SSA_NAME_DEF_STMT (e);
270 if (bb_for_stmt (stmt) == NULL)
271 return 0;
273 if (TREE_CODE (stmt) != MODIFY_EXPR
274 || !ZERO_SSA_OPERANDS (stmt, SSA_OP_VIRTUAL_DEFS))
275 return bb_rank[bb_for_stmt (stmt)->index];
277 /* If we already have a rank for this expression, use that. */
278 vr = find_operand_rank (e);
279 if (vr)
280 return vr->rank;
282 /* Otherwise, find the maximum rank for the operands, or the bb
283 rank, whichever is less. */
284 rank = 0;
285 maxrank = bb_rank[bb_for_stmt(stmt)->index];
286 rhs = TREE_OPERAND (stmt, 1);
287 if (TREE_CODE_LENGTH (TREE_CODE (rhs)) == 0)
288 rank = MAX (rank, get_rank (rhs));
289 else
291 for (i = 0;
292 i < TREE_CODE_LENGTH (TREE_CODE (rhs))
293 && TREE_OPERAND (rhs, i)
294 && rank != maxrank;
295 i++)
296 rank = MAX(rank, get_rank (TREE_OPERAND (rhs, i)));
299 if (dump_file && (dump_flags & TDF_DETAILS))
301 fprintf (dump_file, "Rank for ");
302 print_generic_expr (dump_file, e, 0);
303 fprintf (dump_file, " is %d\n", (rank + 1));
306 /* Note the rank in the hashtable so we don't recompute it. */
307 insert_operand_rank (e, (rank + 1));
308 return (rank + 1);
311 /* Globals, etc, are rank 0 */
312 return 0;
315 DEF_VEC_P(operand_entry_t);
316 DEF_VEC_ALLOC_P(operand_entry_t, heap);
318 /* We want integer ones to end up last no matter what, since they are
319 the ones we can do the most with. */
320 #define INTEGER_CONST_TYPE 1 << 3
321 #define FLOAT_CONST_TYPE 1 << 2
322 #define OTHER_CONST_TYPE 1 << 1
324 /* Classify an invariant tree into integer, float, or other, so that
325 we can sort them to be near other constants of the same type. */
326 static inline int
327 constant_type (tree t)
329 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
330 return INTEGER_CONST_TYPE;
331 else if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (t)))
332 return FLOAT_CONST_TYPE;
333 else
334 return OTHER_CONST_TYPE;
337 /* qsort comparison function to sort operand entries PA and PB by rank
338 so that the sorted array is ordered by rank in decreasing order. */
339 static int
340 sort_by_operand_rank (const void *pa, const void *pb)
342 const operand_entry_t oea = *(const operand_entry_t *)pa;
343 const operand_entry_t oeb = *(const operand_entry_t *)pb;
345 /* It's nicer for optimize_expression if constants that are likely
346 to fold when added/multiplied//whatever are put next to each
347 other. Since all constants have rank 0, order them by type. */
348 if (oeb->rank == 0 && oea->rank == 0)
349 return constant_type (oeb->op) - constant_type (oea->op);
351 /* Lastly, make sure the versions that are the same go next to each
352 other. We use SSA_NAME_VERSION because it's stable. */
353 if ((oeb->rank - oea->rank == 0)
354 && TREE_CODE (oea->op) == SSA_NAME
355 && TREE_CODE (oeb->op) == SSA_NAME)
356 return SSA_NAME_VERSION (oeb->op) - SSA_NAME_VERSION (oea->op);
358 return oeb->rank - oea->rank;
361 /* Add an operand entry to *OPS for the tree operand OP. */
363 static void
364 add_to_ops_vec (VEC(operand_entry_t, heap) **ops, tree op)
366 operand_entry_t oe = pool_alloc (operand_entry_pool);
368 oe->op = op;
369 oe->rank = get_rank (op);
370 VEC_safe_push (operand_entry_t, heap, *ops, oe);
373 /* Return true if STMT is reassociable operation containing a binary
374 operation with tree code CODE. */
376 static bool
377 is_reassociable_op (tree stmt, enum tree_code code)
379 if (!IS_EMPTY_STMT (stmt)
380 && TREE_CODE (stmt) == MODIFY_EXPR
381 && TREE_CODE (TREE_OPERAND (stmt, 1)) == code
382 && has_single_use (TREE_OPERAND (stmt, 0)))
383 return true;
384 return false;
388 /* Given NAME, if NAME is defined by a unary operation OPCODE, return the
389 operand of the negate operation. Otherwise, return NULL. */
391 static tree
392 get_unary_op (tree name, enum tree_code opcode)
394 tree stmt = SSA_NAME_DEF_STMT (name);
395 tree rhs;
397 if (TREE_CODE (stmt) != MODIFY_EXPR)
398 return NULL_TREE;
400 rhs = TREE_OPERAND (stmt, 1);
401 if (TREE_CODE (rhs) == opcode)
402 return TREE_OPERAND (rhs, 0);
403 return NULL_TREE;
406 /* If CURR and LAST are a pair of ops that OPCODE allows us to
407 eliminate through equivalences, do so, remove them from OPS, and
408 return true. Otherwise, return false. */
410 static bool
411 eliminate_duplicate_pair (enum tree_code opcode,
412 VEC (operand_entry_t, heap) **ops,
413 bool *all_done,
414 unsigned int i,
415 operand_entry_t curr,
416 operand_entry_t last)
419 /* If we have two of the same op, and the opcode is & |, min, or max,
420 we can eliminate one of them.
421 If we have two of the same op, and the opcode is ^, we can
422 eliminate both of them. */
424 if (last && last->op == curr->op)
426 switch (opcode)
428 case MAX_EXPR:
429 case MIN_EXPR:
430 case BIT_IOR_EXPR:
431 case BIT_AND_EXPR:
432 if (dump_file && (dump_flags & TDF_DETAILS))
434 fprintf (dump_file, "Equivalence: ");
435 print_generic_expr (dump_file, curr->op, 0);
436 fprintf (dump_file, " [&|minmax] ");
437 print_generic_expr (dump_file, last->op, 0);
438 fprintf (dump_file, " -> ");
439 print_generic_stmt (dump_file, last->op, 0);
442 VEC_ordered_remove (operand_entry_t, *ops, i);
443 reassociate_stats.ops_eliminated ++;
445 return true;
447 case BIT_XOR_EXPR:
448 if (dump_file && (dump_flags & TDF_DETAILS))
450 fprintf (dump_file, "Equivalence: ");
451 print_generic_expr (dump_file, curr->op, 0);
452 fprintf (dump_file, " ^ ");
453 print_generic_expr (dump_file, last->op, 0);
454 fprintf (dump_file, " -> nothing\n");
457 reassociate_stats.ops_eliminated += 2;
459 if (VEC_length (operand_entry_t, *ops) == 2)
461 VEC_free (operand_entry_t, heap, *ops);
462 *ops = NULL;
463 add_to_ops_vec (ops, fold_convert (TREE_TYPE (last->op),
464 integer_zero_node));
465 *all_done = true;
467 else
469 VEC_ordered_remove (operand_entry_t, *ops, i-1);
470 VEC_ordered_remove (operand_entry_t, *ops, i-1);
473 return true;
475 default:
476 break;
479 return false;
482 /* If OPCODE is PLUS_EXPR, CURR->OP is really a negate expression,
483 look in OPS for a corresponding positive operation to cancel it
484 out. If we find one, remove the other from OPS, replace
485 OPS[CURRINDEX] with 0, and return true. Otherwise, return
486 false. */
488 static bool
489 eliminate_plus_minus_pair (enum tree_code opcode,
490 VEC (operand_entry_t, heap) **ops,
491 unsigned int currindex,
492 operand_entry_t curr)
494 tree negateop;
495 unsigned int i;
496 operand_entry_t oe;
498 if (opcode != PLUS_EXPR || TREE_CODE (curr->op) != SSA_NAME)
499 return false;
501 negateop = get_unary_op (curr->op, NEGATE_EXPR);
502 if (negateop == NULL_TREE)
503 return false;
505 /* Any non-negated version will have a rank that is one less than
506 the current rank. So once we hit those ranks, if we don't find
507 one, we can stop. */
509 for (i = currindex + 1;
510 VEC_iterate (operand_entry_t, *ops, i, oe)
511 && oe->rank >= curr->rank - 1 ;
512 i++)
514 if (oe->op == negateop)
517 if (dump_file && (dump_flags & TDF_DETAILS))
519 fprintf (dump_file, "Equivalence: ");
520 print_generic_expr (dump_file, negateop, 0);
521 fprintf (dump_file, " + -");
522 print_generic_expr (dump_file, oe->op, 0);
523 fprintf (dump_file, " -> 0\n");
526 VEC_ordered_remove (operand_entry_t, *ops, i);
527 add_to_ops_vec (ops, fold_convert(TREE_TYPE (oe->op),
528 integer_zero_node));
529 VEC_ordered_remove (operand_entry_t, *ops, currindex);
530 reassociate_stats.ops_eliminated ++;
532 return true;
536 return false;
539 /* If OPCODE is BIT_IOR_EXPR, BIT_AND_EXPR, and, CURR->OP is really a
540 bitwise not expression, look in OPS for a corresponding operand to
541 cancel it out. If we find one, remove the other from OPS, replace
542 OPS[CURRINDEX] with 0, and return true. Otherwise, return
543 false. */
545 static bool
546 eliminate_not_pairs (enum tree_code opcode,
547 VEC (operand_entry_t, heap) **ops,
548 unsigned int currindex,
549 operand_entry_t curr)
551 tree notop;
552 unsigned int i;
553 operand_entry_t oe;
555 if ((opcode != BIT_IOR_EXPR && opcode != BIT_AND_EXPR)
556 || TREE_CODE (curr->op) != SSA_NAME)
557 return false;
559 notop = get_unary_op (curr->op, BIT_NOT_EXPR);
560 if (notop == NULL_TREE)
561 return false;
563 /* Any non-not version will have a rank that is one less than
564 the current rank. So once we hit those ranks, if we don't find
565 one, we can stop. */
567 for (i = currindex + 1;
568 VEC_iterate (operand_entry_t, *ops, i, oe)
569 && oe->rank >= curr->rank - 1;
570 i++)
572 if (oe->op == notop)
574 if (dump_file && (dump_flags & TDF_DETAILS))
576 fprintf (dump_file, "Equivalence: ");
577 print_generic_expr (dump_file, notop, 0);
578 if (opcode == BIT_AND_EXPR)
579 fprintf (dump_file, " & ~");
580 else if (opcode == BIT_IOR_EXPR)
581 fprintf (dump_file, " | ~");
582 print_generic_expr (dump_file, oe->op, 0);
583 if (opcode == BIT_AND_EXPR)
584 fprintf (dump_file, " -> 0\n");
585 else if (opcode == BIT_IOR_EXPR)
586 fprintf (dump_file, " -> -1\n");
589 if (opcode == BIT_AND_EXPR)
590 oe->op = fold_convert (TREE_TYPE (oe->op), integer_zero_node);
591 else if (opcode == BIT_IOR_EXPR)
592 oe->op = build_low_bits_mask (TREE_TYPE (oe->op),
593 TYPE_PRECISION (TREE_TYPE (oe->op)));
595 reassociate_stats.ops_eliminated
596 += VEC_length (operand_entry_t, *ops) - 1;
597 VEC_free (operand_entry_t, heap, *ops);
598 *ops = NULL;
599 VEC_safe_push (operand_entry_t, heap, *ops, oe);
600 return true;
604 return false;
607 /* Use constant value that may be present in OPS to try to eliminate
608 operands. Note that this function is only really used when we've
609 eliminated ops for other reasons, or merged constants. Across
610 single statements, fold already does all of this, plus more. There
611 is little point in duplicating logic, so I've only included the
612 identities that I could ever construct testcases to trigger. */
614 static void
615 eliminate_using_constants (enum tree_code opcode,
616 VEC(operand_entry_t, heap) **ops)
618 operand_entry_t oelast = VEC_last (operand_entry_t, *ops);
620 if (oelast->rank == 0 && INTEGRAL_TYPE_P (TREE_TYPE (oelast->op)))
622 switch (opcode)
624 case BIT_AND_EXPR:
625 if (integer_zerop (oelast->op))
627 if (VEC_length (operand_entry_t, *ops) != 1)
629 if (dump_file && (dump_flags & TDF_DETAILS))
630 fprintf (dump_file, "Found & 0, removing all other ops\n");
632 reassociate_stats.ops_eliminated
633 += VEC_length (operand_entry_t, *ops) - 1;
635 VEC_free (operand_entry_t, heap, *ops);
636 *ops = NULL;
637 VEC_safe_push (operand_entry_t, heap, *ops, oelast);
638 return;
641 else if (integer_all_onesp (oelast->op))
643 if (VEC_length (operand_entry_t, *ops) != 1)
645 if (dump_file && (dump_flags & TDF_DETAILS))
646 fprintf (dump_file, "Found & -1, removing\n");
647 VEC_pop (operand_entry_t, *ops);
648 reassociate_stats.ops_eliminated++;
651 break;
652 case BIT_IOR_EXPR:
653 if (integer_all_onesp (oelast->op))
655 if (VEC_length (operand_entry_t, *ops) != 1)
657 if (dump_file && (dump_flags & TDF_DETAILS))
658 fprintf (dump_file, "Found | -1, removing all other ops\n");
660 reassociate_stats.ops_eliminated
661 += VEC_length (operand_entry_t, *ops) - 1;
663 VEC_free (operand_entry_t, heap, *ops);
664 *ops = NULL;
665 VEC_safe_push (operand_entry_t, heap, *ops, oelast);
666 return;
669 else if (integer_zerop (oelast->op))
671 if (VEC_length (operand_entry_t, *ops) != 1)
673 if (dump_file && (dump_flags & TDF_DETAILS))
674 fprintf (dump_file, "Found | 0, removing\n");
675 VEC_pop (operand_entry_t, *ops);
676 reassociate_stats.ops_eliminated++;
679 break;
680 case MULT_EXPR:
681 if (integer_zerop (oelast->op))
683 if (VEC_length (operand_entry_t, *ops) != 1)
685 if (dump_file && (dump_flags & TDF_DETAILS))
686 fprintf (dump_file, "Found * 0, removing all other ops\n");
688 reassociate_stats.ops_eliminated
689 += VEC_length (operand_entry_t, *ops) - 1;
690 VEC_free (operand_entry_t, heap, *ops);
691 *ops = NULL;
692 VEC_safe_push (operand_entry_t, heap, *ops, oelast);
693 return;
696 else if (integer_onep (oelast->op))
698 if (VEC_length (operand_entry_t, *ops) != 1)
700 if (dump_file && (dump_flags & TDF_DETAILS))
701 fprintf (dump_file, "Found * 1, removing\n");
702 VEC_pop (operand_entry_t, *ops);
703 reassociate_stats.ops_eliminated++;
704 return;
707 break;
708 case BIT_XOR_EXPR:
709 case PLUS_EXPR:
710 case MINUS_EXPR:
711 if (integer_zerop (oelast->op))
713 if (VEC_length (operand_entry_t, *ops) != 1)
715 if (dump_file && (dump_flags & TDF_DETAILS))
716 fprintf (dump_file, "Found [|^+] 0, removing\n");
717 VEC_pop (operand_entry_t, *ops);
718 reassociate_stats.ops_eliminated++;
719 return;
722 break;
723 default:
724 break;
729 /* Perform various identities and other optimizations on the list of
730 operand entries, stored in OPS. The tree code for the binary
731 operation between all the operands is OPCODE. */
733 static void
734 optimize_ops_list (enum tree_code opcode,
735 VEC (operand_entry_t, heap) **ops)
737 unsigned int length = VEC_length (operand_entry_t, *ops);
738 unsigned int i;
739 operand_entry_t oe;
740 operand_entry_t oelast = NULL;
741 bool iterate = false;
743 if (length == 1)
744 return;
746 oelast = VEC_last (operand_entry_t, *ops);
748 /* If the last two are constants, pop the constants off, merge them
749 and try the next two. */
750 if (oelast->rank == 0 && is_gimple_min_invariant (oelast->op))
752 operand_entry_t oelm1 = VEC_index (operand_entry_t, *ops, length - 2);
754 if (oelm1->rank == 0
755 && is_gimple_min_invariant (oelm1->op)
756 && lang_hooks.types_compatible_p (TREE_TYPE (oelm1->op),
757 TREE_TYPE (oelast->op)))
759 tree folded = fold_binary (opcode, TREE_TYPE (oelm1->op),
760 oelm1->op, oelast->op);
762 if (folded && is_gimple_min_invariant (folded))
764 if (dump_file && (dump_flags & TDF_DETAILS))
765 fprintf (dump_file, "Merging constants\n");
767 VEC_pop (operand_entry_t, *ops);
768 VEC_pop (operand_entry_t, *ops);
770 add_to_ops_vec (ops, folded);
771 reassociate_stats.constants_eliminated++;
773 optimize_ops_list (opcode, ops);
774 return;
779 eliminate_using_constants (opcode, ops);
780 oelast = NULL;
782 for (i = 0; VEC_iterate (operand_entry_t, *ops, i, oe);)
784 bool done = false;
786 if (eliminate_not_pairs (opcode, ops, i, oe))
787 return;
788 if (eliminate_duplicate_pair (opcode, ops, &done, i, oe, oelast)
789 || (!done && eliminate_plus_minus_pair (opcode, ops, i, oe)))
791 if (done)
792 return;
793 iterate = true;
794 oelast = NULL;
795 continue;
797 oelast = oe;
798 i++;
801 length = VEC_length (operand_entry_t, *ops);
802 oelast = VEC_last (operand_entry_t, *ops);
804 if (iterate)
805 optimize_ops_list (opcode, ops);
808 /* Return true if OPERAND is defined by a PHI node which uses the LHS
809 of STMT in it's operands. This is also known as a "destructive
810 update" operation. */
812 static bool
813 is_phi_for_stmt (tree stmt, tree operand)
815 tree def_stmt;
816 tree lhs = TREE_OPERAND (stmt, 0);
817 use_operand_p arg_p;
818 ssa_op_iter i;
820 if (TREE_CODE (operand) != SSA_NAME)
821 return false;
823 def_stmt = SSA_NAME_DEF_STMT (operand);
824 if (TREE_CODE (def_stmt) != PHI_NODE)
825 return false;
827 FOR_EACH_PHI_ARG (arg_p, def_stmt, i, SSA_OP_USE)
828 if (lhs == USE_FROM_PTR (arg_p))
829 return true;
830 return false;
833 /* Recursively rewrite our linearized statements so that the operators
834 match those in OPS[OPINDEX], putting the computation in rank
835 order. */
837 static void
838 rewrite_expr_tree (tree stmt, unsigned int opindex,
839 VEC(operand_entry_t, heap) * ops)
841 tree rhs = TREE_OPERAND (stmt, 1);
842 operand_entry_t oe;
844 /* If we have three operands left, then we want to make sure the one
845 that gets the double binary op are the ones with the same rank.
847 The alternative we try is to see if this is a destructive
848 update style statement, which is like:
849 b = phi (a, ...)
850 a = c + b;
851 In that case, we want to use the destructive update form to
852 expose the possible vectorizer sum reduction opportunity.
853 In that case, the third operand will be the phi node.
855 We could, of course, try to be better as noted above, and do a
856 lot of work to try to find these opportunities in >3 operand
857 cases, but it is unlikely to be worth it. */
858 if (opindex + 3 == VEC_length (operand_entry_t, ops))
860 operand_entry_t oe1, oe2, oe3;
862 oe1 = VEC_index (operand_entry_t, ops, opindex);
863 oe2 = VEC_index (operand_entry_t, ops, opindex + 1);
864 oe3 = VEC_index (operand_entry_t, ops, opindex + 2);
866 if ((oe1->rank == oe2->rank
867 && oe2->rank != oe3->rank)
868 || (is_phi_for_stmt (stmt, oe3->op)
869 && !is_phi_for_stmt (stmt, oe1->op)
870 && !is_phi_for_stmt (stmt, oe2->op)))
872 struct operand_entry temp = *oe3;
873 oe3->op = oe1->op;
874 oe3->rank = oe1->rank;
875 oe1->op = temp.op;
876 oe1->rank= temp.rank;
880 /* The final recursion case for this function is that you have
881 exactly two operations left.
882 If we had one exactly one op in the entire list to start with, we
883 would have never called this function, and the tail recursion
884 rewrites them one at a time. */
885 if (opindex + 2 == VEC_length (operand_entry_t, ops))
887 operand_entry_t oe1, oe2;
889 oe1 = VEC_index (operand_entry_t, ops, opindex);
890 oe2 = VEC_index (operand_entry_t, ops, opindex + 1);
892 if (TREE_OPERAND (rhs, 0) != oe1->op
893 || TREE_OPERAND (rhs, 1) != oe2->op)
896 if (dump_file && (dump_flags & TDF_DETAILS))
898 fprintf (dump_file, "Transforming ");
899 print_generic_expr (dump_file, rhs, 0);
902 TREE_OPERAND (rhs, 0) = oe1->op;
903 TREE_OPERAND (rhs, 1) = oe2->op;
904 update_stmt (stmt);
906 if (dump_file && (dump_flags & TDF_DETAILS))
908 fprintf (dump_file, " into ");
909 print_generic_stmt (dump_file, rhs, 0);
913 return;
916 /* If we hit here, we should have 3 or more ops left. */
917 gcc_assert (opindex + 2 < VEC_length (operand_entry_t, ops));
919 /* Rewrite the next operator. */
920 oe = VEC_index (operand_entry_t, ops, opindex);
922 if (oe->op != TREE_OPERAND (rhs, 1))
925 if (dump_file && (dump_flags & TDF_DETAILS))
927 fprintf (dump_file, "Transforming ");
928 print_generic_expr (dump_file, rhs, 0);
931 TREE_OPERAND (rhs, 1) = oe->op;
932 update_stmt (stmt);
934 if (dump_file && (dump_flags & TDF_DETAILS))
936 fprintf (dump_file, " into ");
937 print_generic_stmt (dump_file, rhs, 0);
940 /* Recurse on the LHS of the binary operator, which is guaranteed to
941 be the non-leaf side. */
942 rewrite_expr_tree (SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0)),
943 opindex + 1, ops);
946 /* Transform STMT, which is really (A +B) + (C + D) into the left
947 linear form, ((A+B)+C)+D.
948 Recurse on D if necessary. */
950 static void
951 linearize_expr (tree stmt)
953 block_stmt_iterator bsinow, bsirhs;
954 tree rhs = TREE_OPERAND (stmt, 1);
955 enum tree_code rhscode = TREE_CODE (rhs);
956 tree binrhs = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 1));
957 tree binlhs = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0));
958 tree newbinrhs = NULL_TREE;
960 gcc_assert (is_reassociable_op (binlhs, TREE_CODE (rhs))
961 && is_reassociable_op (binrhs, TREE_CODE (rhs)));
963 bsinow = bsi_for_stmt (stmt);
964 bsirhs = bsi_for_stmt (binrhs);
965 bsi_move_before (&bsirhs, &bsinow);
967 TREE_OPERAND (rhs, 1) = TREE_OPERAND (TREE_OPERAND (binrhs, 1), 0);
968 if (TREE_CODE (TREE_OPERAND (rhs, 1)) == SSA_NAME)
969 newbinrhs = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 1));
970 TREE_OPERAND (TREE_OPERAND (binrhs, 1), 0) = TREE_OPERAND (binlhs, 0);
971 TREE_OPERAND (rhs, 0) = TREE_OPERAND (binrhs, 0);
973 if (dump_file && (dump_flags & TDF_DETAILS))
975 fprintf (dump_file, "Linearized: ");
976 print_generic_stmt (dump_file, rhs, 0);
979 reassociate_stats.linearized++;
980 update_stmt (binrhs);
981 update_stmt (binlhs);
982 update_stmt (stmt);
983 TREE_VISITED (binrhs) = 1;
984 TREE_VISITED (binlhs) = 1;
985 TREE_VISITED (stmt) = 1;
987 /* Tail recurse on the new rhs if it still needs reassociation. */
988 if (newbinrhs && is_reassociable_op (newbinrhs, rhscode))
989 linearize_expr (stmt);
993 /* If LHS has a single immediate use that is a MODIFY_EXPR, return
994 it. Otherwise, return NULL. */
996 static tree
997 get_single_immediate_use (tree lhs)
999 use_operand_p immuse;
1000 tree immusestmt;
1002 if (TREE_CODE (lhs) == SSA_NAME
1003 && single_imm_use (lhs, &immuse, &immusestmt))
1005 if (TREE_CODE (immusestmt) == RETURN_EXPR)
1006 immusestmt = TREE_OPERAND (immusestmt, 0);
1007 if (TREE_CODE (immusestmt) == MODIFY_EXPR)
1008 return immusestmt;
1010 return NULL_TREE;
1012 static VEC(tree, heap) *broken_up_subtracts;
1015 /* Recursively negate the value of TONEGATE, and return the SSA_NAME
1016 representing the negated value. Insertions of any necessary
1017 instructions go before BSI.
1018 This function is recursive in that, if you hand it "a_5" as the
1019 value to negate, and a_5 is defined by "a_5 = b_3 + b_4", it will
1020 transform b_3 + b_4 into a_5 = -b_3 + -b_4. */
1022 static tree
1023 negate_value (tree tonegate, block_stmt_iterator *bsi)
1025 tree negatedef = tonegate;
1026 tree resultofnegate;
1028 if (TREE_CODE (tonegate) == SSA_NAME)
1029 negatedef = SSA_NAME_DEF_STMT (tonegate);
1031 /* If we are trying to negate a name, defined by an add, negate the
1032 add operands instead. */
1033 if (TREE_CODE (tonegate) == SSA_NAME
1034 && TREE_CODE (negatedef) == MODIFY_EXPR
1035 && TREE_CODE (TREE_OPERAND (negatedef, 0)) == SSA_NAME
1036 && has_single_use (TREE_OPERAND (negatedef, 0))
1037 && TREE_CODE (TREE_OPERAND (negatedef, 1)) == PLUS_EXPR)
1039 block_stmt_iterator bsi;
1040 tree binop = TREE_OPERAND (negatedef, 1);
1042 bsi = bsi_for_stmt (negatedef);
1043 TREE_OPERAND (binop, 0) = negate_value (TREE_OPERAND (binop, 0),
1044 &bsi);
1045 bsi = bsi_for_stmt (negatedef);
1046 TREE_OPERAND (binop, 1) = negate_value (TREE_OPERAND (binop, 1),
1047 &bsi);
1048 update_stmt (negatedef);
1049 return TREE_OPERAND (negatedef, 0);
1052 tonegate = fold_build1 (NEGATE_EXPR, TREE_TYPE (tonegate), tonegate);
1053 resultofnegate = force_gimple_operand_bsi (bsi, tonegate, true,
1054 NULL_TREE);
1055 VEC_safe_push (tree, heap, broken_up_subtracts, resultofnegate);
1056 return resultofnegate;
1060 /* Return true if we should break up the subtract in STMT into an add
1061 with negate. This is true when we the subtract operands are really
1062 adds, or the subtract itself is used in an add expression. In
1063 either case, breaking up the subtract into an add with negate
1064 exposes the adds to reassociation. */
1066 static bool
1067 should_break_up_subtract (tree stmt)
1070 tree lhs = TREE_OPERAND (stmt, 0);
1071 tree rhs = TREE_OPERAND (stmt, 1);
1072 tree binlhs = TREE_OPERAND (rhs, 0);
1073 tree binrhs = TREE_OPERAND (rhs, 1);
1074 tree immusestmt;
1076 if (TREE_CODE (binlhs) == SSA_NAME
1077 && is_reassociable_op (SSA_NAME_DEF_STMT (binlhs), PLUS_EXPR))
1078 return true;
1080 if (TREE_CODE (binrhs) == SSA_NAME
1081 && is_reassociable_op (SSA_NAME_DEF_STMT (binrhs), PLUS_EXPR))
1082 return true;
1084 if (TREE_CODE (lhs) == SSA_NAME
1085 && (immusestmt = get_single_immediate_use (lhs))
1086 && TREE_CODE (TREE_OPERAND (immusestmt, 1)) == PLUS_EXPR)
1087 return true;
1088 return false;
1092 /* Transform STMT from A - B into A + -B. */
1094 static void
1095 break_up_subtract (tree stmt, block_stmt_iterator *bsi)
1097 tree rhs = TREE_OPERAND (stmt, 1);
1099 if (dump_file && (dump_flags & TDF_DETAILS))
1101 fprintf (dump_file, "Breaking up subtract ");
1102 print_generic_stmt (dump_file, stmt, 0);
1105 TREE_SET_CODE (TREE_OPERAND (stmt, 1), PLUS_EXPR);
1106 TREE_OPERAND (rhs, 1) = negate_value (TREE_OPERAND (rhs, 1), bsi);
1108 update_stmt (stmt);
1111 /* Recursively linearize a binary expression that is the RHS of STMT.
1112 Place the operands of the expression tree in the vector named OPS. */
1114 static void
1115 linearize_expr_tree (VEC(operand_entry_t, heap) **ops, tree stmt)
1117 block_stmt_iterator bsinow, bsilhs;
1118 tree rhs = TREE_OPERAND (stmt, 1);
1119 tree binrhs = TREE_OPERAND (rhs, 1);
1120 tree binlhs = TREE_OPERAND (rhs, 0);
1121 tree binlhsdef, binrhsdef;
1122 bool binlhsisreassoc = false;
1123 bool binrhsisreassoc = false;
1124 enum tree_code rhscode = TREE_CODE (rhs);
1126 TREE_VISITED (stmt) = 1;
1128 if (TREE_CODE (binlhs) == SSA_NAME)
1130 binlhsdef = SSA_NAME_DEF_STMT (binlhs);
1131 binlhsisreassoc = is_reassociable_op (binlhsdef, rhscode);
1134 if (TREE_CODE (binrhs) == SSA_NAME)
1136 binrhsdef = SSA_NAME_DEF_STMT (binrhs);
1137 binrhsisreassoc = is_reassociable_op (binrhsdef, rhscode);
1140 /* If the LHS is not reassociable, but the RHS is, we need to swap
1141 them. If neither is reassociable, there is nothing we can do, so
1142 just put them in the ops vector. If the LHS is reassociable,
1143 linearize it. If both are reassociable, then linearize the RHS
1144 and the LHS. */
1146 if (!binlhsisreassoc)
1148 tree temp;
1150 if (!binrhsisreassoc)
1152 add_to_ops_vec (ops, binrhs);
1153 add_to_ops_vec (ops, binlhs);
1154 return;
1157 if (dump_file && (dump_flags & TDF_DETAILS))
1159 fprintf (dump_file, "swapping operands of ");
1160 print_generic_expr (dump_file, stmt, 0);
1163 swap_tree_operands (stmt, &TREE_OPERAND (rhs, 0),
1164 &TREE_OPERAND (rhs, 1));
1165 update_stmt (stmt);
1167 if (dump_file && (dump_flags & TDF_DETAILS))
1169 fprintf (dump_file, " is now ");
1170 print_generic_stmt (dump_file, stmt, 0);
1173 /* We want to make it so the lhs is always the reassociative op,
1174 so swap. */
1175 temp = binlhs;
1176 binlhs = binrhs;
1177 binrhs = temp;
1179 else if (binrhsisreassoc)
1181 linearize_expr (stmt);
1182 gcc_assert (rhs == TREE_OPERAND (stmt, 1));
1183 binlhs = TREE_OPERAND (rhs, 0);
1184 binrhs = TREE_OPERAND (rhs, 1);
1187 gcc_assert (TREE_CODE (binrhs) != SSA_NAME
1188 || !is_reassociable_op (SSA_NAME_DEF_STMT (binrhs), rhscode));
1189 bsinow = bsi_for_stmt (stmt);
1190 bsilhs = bsi_for_stmt (SSA_NAME_DEF_STMT (binlhs));
1191 bsi_move_before (&bsilhs, &bsinow);
1192 linearize_expr_tree (ops, SSA_NAME_DEF_STMT (binlhs));
1193 add_to_ops_vec (ops, binrhs);
1196 /* Repropagate the negates back into subtracts, since no other pass
1197 currently does it. */
1199 static void
1200 repropagate_negates (void)
1202 unsigned int i = 0;
1203 tree negate;
1205 for (i = 0; VEC_iterate (tree, broken_up_subtracts, i, negate); i++)
1207 tree user = get_single_immediate_use (negate);
1209 /* The negate operand can be either operand of a PLUS_EXPR
1210 (it can be the LHS if the RHS is a constant for example).
1212 Force the negate operand to the RHS of the PLUS_EXPR, then
1213 transform the PLUS_EXPR into a MINUS_EXPR. */
1214 if (user
1215 && TREE_CODE (user) == MODIFY_EXPR
1216 && TREE_CODE (TREE_OPERAND (user, 1)) == PLUS_EXPR)
1218 tree rhs = TREE_OPERAND (user, 1);
1220 /* If the negated operand appears on the LHS of the
1221 PLUS_EXPR, exchange the operands of the PLUS_EXPR
1222 to force the negated operand to the RHS of the PLUS_EXPR. */
1223 if (TREE_OPERAND (TREE_OPERAND (user, 1), 0) == negate)
1225 tree temp = TREE_OPERAND (rhs, 0);
1226 TREE_OPERAND (rhs, 0) = TREE_OPERAND (rhs, 1);
1227 TREE_OPERAND (rhs, 1) = temp;
1230 /* Now transform the PLUS_EXPR into a MINUS_EXPR and replace
1231 the RHS of the PLUS_EXPR with the operand of the NEGATE_EXPR. */
1232 if (TREE_OPERAND (TREE_OPERAND (user, 1), 1) == negate)
1234 TREE_SET_CODE (rhs, MINUS_EXPR);
1235 TREE_OPERAND (rhs, 1) = get_unary_op (negate, NEGATE_EXPR);
1236 update_stmt (user);
1242 /* Break up subtract operations in block BB.
1244 We do this top down because we don't know whether the subtract is
1245 part of a possible chain of reassociation except at the top.
1247 IE given
1248 d = f + g
1249 c = a + e
1250 b = c - d
1251 q = b - r
1252 k = t - q
1254 we want to break up k = t - q, but we won't until we've transformed q
1255 = b - r, which won't be broken up until we transform b = c - d. */
1257 static void
1258 break_up_subtract_bb (basic_block bb)
1260 block_stmt_iterator bsi;
1261 basic_block son;
1263 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1265 tree stmt = bsi_stmt (bsi);
1267 if (TREE_CODE (stmt) == MODIFY_EXPR)
1269 tree lhs = TREE_OPERAND (stmt, 0);
1270 tree rhs = TREE_OPERAND (stmt, 1);
1272 TREE_VISITED (stmt) = 0;
1273 /* If unsafe math optimizations we can do reassociation for
1274 non-integral types. */
1275 if ((!INTEGRAL_TYPE_P (TREE_TYPE (lhs))
1276 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs)))
1277 && (!SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs))
1278 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE(lhs))
1279 || !flag_unsafe_math_optimizations))
1280 continue;
1282 /* Check for a subtract used only in an addition. If this
1283 is the case, transform it into add of a negate for better
1284 reassociation. IE transform C = A-B into C = A + -B if C
1285 is only used in an addition. */
1286 if (TREE_CODE (rhs) == MINUS_EXPR)
1287 if (should_break_up_subtract (stmt))
1288 break_up_subtract (stmt, &bsi);
1291 for (son = first_dom_son (CDI_DOMINATORS, bb);
1292 son;
1293 son = next_dom_son (CDI_DOMINATORS, son))
1294 break_up_subtract_bb (son);
1297 /* Reassociate expressions in basic block BB and its post-dominator as
1298 children. */
1300 static void
1301 reassociate_bb (basic_block bb)
1303 block_stmt_iterator bsi;
1304 basic_block son;
1306 for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_prev (&bsi))
1308 tree stmt = bsi_stmt (bsi);
1310 if (TREE_CODE (stmt) == MODIFY_EXPR)
1312 tree lhs = TREE_OPERAND (stmt, 0);
1313 tree rhs = TREE_OPERAND (stmt, 1);
1315 /* If this was part of an already processed tree, we don't
1316 need to touch it again. */
1317 if (TREE_VISITED (stmt))
1318 continue;
1320 /* If unsafe math optimizations we can do reassociation for
1321 non-integral types. */
1322 if ((!INTEGRAL_TYPE_P (TREE_TYPE (lhs))
1323 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs)))
1324 && (!SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs))
1325 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE(lhs))
1326 || !flag_unsafe_math_optimizations))
1327 continue;
1329 if (associative_tree_code (TREE_CODE (rhs)))
1331 VEC(operand_entry_t, heap) *ops = NULL;
1333 /* There may be no immediate uses left by the time we
1334 get here because we may have eliminated them all. */
1335 if (TREE_CODE (lhs) == SSA_NAME && has_zero_uses (lhs))
1336 continue;
1338 TREE_VISITED (stmt) = 1;
1339 linearize_expr_tree (&ops, stmt);
1340 qsort (VEC_address (operand_entry_t, ops),
1341 VEC_length (operand_entry_t, ops),
1342 sizeof (operand_entry_t),
1343 sort_by_operand_rank);
1344 optimize_ops_list (TREE_CODE (rhs), &ops);
1346 if (VEC_length (operand_entry_t, ops) == 1)
1348 if (dump_file && (dump_flags & TDF_DETAILS))
1350 fprintf (dump_file, "Transforming ");
1351 print_generic_expr (dump_file, rhs, 0);
1353 TREE_OPERAND (stmt, 1) = VEC_last (operand_entry_t, ops)->op;
1354 update_stmt (stmt);
1356 if (dump_file && (dump_flags & TDF_DETAILS))
1358 fprintf (dump_file, " into ");
1359 print_generic_stmt (dump_file,
1360 TREE_OPERAND (stmt, 1), 0);
1363 else
1365 rewrite_expr_tree (stmt, 0, ops);
1368 VEC_free (operand_entry_t, heap, ops);
1372 for (son = first_dom_son (CDI_POST_DOMINATORS, bb);
1373 son;
1374 son = next_dom_son (CDI_POST_DOMINATORS, son))
1375 reassociate_bb (son);
1378 void dump_ops_vector (FILE *file, VEC (operand_entry_t, heap) *ops);
1379 void debug_ops_vector (VEC (operand_entry_t, heap) *ops);
1381 /* Dump the operand entry vector OPS to FILE. */
1383 void
1384 dump_ops_vector (FILE *file, VEC (operand_entry_t, heap) *ops)
1386 operand_entry_t oe;
1387 unsigned int i;
1389 for (i = 0; VEC_iterate (operand_entry_t, ops, i, oe); i++)
1391 fprintf (file, "Op %d -> rank: %d, tree: ", i, oe->rank);
1392 print_generic_stmt (file, oe->op, 0);
1396 /* Dump the operand entry vector OPS to STDERR. */
1398 void
1399 debug_ops_vector (VEC (operand_entry_t, heap) *ops)
1401 dump_ops_vector (stderr, ops);
1404 static void
1405 do_reassoc (void)
1407 break_up_subtract_bb (ENTRY_BLOCK_PTR);
1408 reassociate_bb (EXIT_BLOCK_PTR);
1411 /* Initialize the reassociation pass. */
1413 static void
1414 init_reassoc (void)
1416 int i;
1417 unsigned int rank = 2;
1418 tree param;
1419 int *bbs = XNEWVEC (int, last_basic_block + 1);
1421 memset (&reassociate_stats, 0, sizeof (reassociate_stats));
1423 operand_entry_pool = create_alloc_pool ("operand entry pool",
1424 sizeof (struct operand_entry), 30);
1426 /* Reverse RPO (Reverse Post Order) will give us something where
1427 deeper loops come later. */
1428 pre_and_rev_post_order_compute (NULL, bbs, false);
1429 bb_rank = XCNEWVEC (unsigned int, last_basic_block + 1);
1431 operand_rank = htab_create (511, operand_entry_hash,
1432 operand_entry_eq, 0);
1434 /* Give each argument a distinct rank. */
1435 for (param = DECL_ARGUMENTS (current_function_decl);
1436 param;
1437 param = TREE_CHAIN (param))
1439 if (default_def (param) != NULL)
1441 tree def = default_def (param);
1442 insert_operand_rank (def, ++rank);
1446 /* Give the chain decl a distinct rank. */
1447 if (cfun->static_chain_decl != NULL)
1449 tree def = default_def (cfun->static_chain_decl);
1450 if (def != NULL)
1451 insert_operand_rank (def, ++rank);
1454 /* Set up rank for each BB */
1455 for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
1456 bb_rank[bbs[i]] = ++rank << 16;
1458 free (bbs);
1459 calculate_dominance_info (CDI_DOMINATORS);
1460 calculate_dominance_info (CDI_POST_DOMINATORS);
1461 broken_up_subtracts = NULL;
1464 /* Cleanup after the reassociation pass, and print stats if
1465 requested. */
1467 static void
1468 fini_reassoc (void)
1471 if (dump_file && (dump_flags & TDF_STATS))
1473 fprintf (dump_file, "Reassociation stats:\n");
1474 fprintf (dump_file, "Linearized: %d\n",
1475 reassociate_stats.linearized);
1476 fprintf (dump_file, "Constants eliminated: %d\n",
1477 reassociate_stats.constants_eliminated);
1478 fprintf (dump_file, "Ops eliminated: %d\n",
1479 reassociate_stats.ops_eliminated);
1480 fprintf (dump_file, "Statements rewritten: %d\n",
1481 reassociate_stats.rewritten);
1483 htab_delete (operand_rank);
1485 free_alloc_pool (operand_entry_pool);
1486 free (bb_rank);
1487 VEC_free (tree, heap, broken_up_subtracts);
1488 free_dominance_info (CDI_POST_DOMINATORS);
1491 /* Gate and execute functions for Reassociation. */
1493 static unsigned int
1494 execute_reassoc (void)
1496 init_reassoc ();
1498 do_reassoc ();
1499 repropagate_negates ();
1501 fini_reassoc ();
1502 return 0;
1505 struct tree_opt_pass pass_reassoc =
1507 "reassoc", /* name */
1508 NULL, /* gate */
1509 execute_reassoc, /* execute */
1510 NULL, /* sub */
1511 NULL, /* next */
1512 0, /* static_pass_number */
1513 TV_TREE_REASSOC, /* tv_id */
1514 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
1515 0, /* properties_provided */
1516 0, /* properties_destroyed */
1517 0, /* todo_flags_start */
1518 TODO_dump_func | TODO_ggc_collect | TODO_verify_ssa, /* todo_flags_finish */
1519 0 /* letter */