* varasm.c (elf_record_gcc_switches): Cast second argument of
[official-gcc.git] / gcc / tree-ssa-reassoc.c
blob17c4c6f4a5292883128ccb38316560a93a558302
1 /* Reassociation for trees.
2 Copyright (C) 2005 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "errors.h"
27 #include "ggc.h"
28 #include "tree.h"
29 #include "basic-block.h"
30 #include "diagnostic.h"
31 #include "tree-inline.h"
32 #include "tree-flow.h"
33 #include "tree-gimple.h"
34 #include "tree-dump.h"
35 #include "timevar.h"
36 #include "tree-iterator.h"
37 #include "tree-pass.h"
38 #include "alloc-pool.h"
39 #include "vec.h"
40 #include "langhooks.h"
42 /* This is a simple global reassociation pass. It is, in part, based
43 on the LLVM pass of the same name (They do some things more/less
44 than we do, in different orders, etc).
46 It consists of five steps:
48 1. Breaking up subtract operations into addition + negate, where
49 it would promote the reassociation of adds.
51 2. Left linearization of the expression trees, so that (A+B)+(C+D)
52 becomes (((A+B)+C)+D), which is easier for us to rewrite later.
53 During linearization, we place the operands of the binary
54 expressions into a vector of operand_entry_t
56 3. Optimization of the operand lists, eliminating things like a +
57 -a, a & a, etc.
59 4. Rewrite the expression trees we linearized and optimized so
60 they are in proper rank order.
62 5. Repropagate negates, as nothing else will clean it up ATM.
64 A bit of theory on #4, since nobody seems to write anything down
65 about why it makes sense to do it the way they do it:
67 We could do this much nicer theoretically, but don't (for reasons
68 explained after how to do it theoretically nice :P).
70 In order to promote the most redundancy elimination, you want
71 binary expressions whose operands are the same rank (or
72 preferably, the same value) exposed to the redundancy eliminator,
73 for possible elimination.
75 So the way to do this if we really cared, is to build the new op
76 tree from the leaves to the roots, merging as you go, and putting the
77 new op on the end of the worklist, until you are left with one
78 thing on the worklist.
80 IE if you have to rewrite the following set of operands (listed with
81 rank in parentheses), with opcode PLUS_EXPR:
83 a (1), b (1), c (1), d (2), e (2)
86 We start with our merge worklist empty, and the ops list with all of
87 those on it.
89 You want to first merge all leaves of the same rank, as much as
90 possible.
92 So first build a binary op of
94 mergetmp = a + b, and put "mergetmp" on the merge worklist.
96 Because there is no three operand form of PLUS_EXPR, c is not going to
97 be exposed to redundancy elimination as a rank 1 operand.
99 So you might as well throw it on the merge worklist (you could also
100 consider it to now be a rank two operand, and merge it with d and e,
101 but in this case, you then have evicted e from a binary op. So at
102 least in this situation, you can't win.)
104 Then build a binary op of d + e
105 mergetmp2 = d + e
107 and put mergetmp2 on the merge worklist.
109 so merge worklist = {mergetmp, c, mergetmp2}
111 Continue building binary ops of these operations until you have only
112 one operation left on the worklist.
114 So we have
116 build binary op
117 mergetmp3 = mergetmp + c
119 worklist = {mergetmp2, mergetmp3}
121 mergetmp4 = mergetmp2 + mergetmp3
123 worklist = {mergetmp4}
125 because we have one operation left, we can now just set the original
126 statement equal to the result of that operation.
128 This will at least expose a + b and d + e to redundancy elimination
129 as binary operations.
131 For extra points, you can reuse the old statements to build the
132 mergetmps, since you shouldn't run out.
134 So why don't we do this?
136 Because it's expensive, and rarely will help. Most trees we are
137 reassociating have 3 or less ops. If they have 2 ops, they already
138 will be written into a nice single binary op. If you have 3 ops, a
139 single simple check suffices to tell you whether the first two are of the
140 same rank. If so, you know to order it
142 mergetmp = op1 + op2
143 newstmt = mergetmp + op3
145 instead of
146 mergetmp = op2 + op3
147 newstmt = mergetmp + op1
149 If all three are of the same rank, you can't expose them all in a
150 single binary operator anyway, so the above is *still* the best you
151 can do.
153 Thus, this is what we do. When we have three ops left, we check to see
154 what order to put them in, and call it a day. As a nod to vector sum
155 reduction, we check if any of ops are a really a phi node that is a
156 destructive update for the associating op, and keep the destructive
157 update together for vector sum reduction recognition. */
160 /* Statistics */
161 static struct
163 int linearized;
164 int constants_eliminated;
165 int ops_eliminated;
166 int rewritten;
167 } reassociate_stats;
169 /* Operator, rank pair. */
170 typedef struct operand_entry
172 unsigned int rank;
173 tree op;
174 } *operand_entry_t;
176 static alloc_pool operand_entry_pool;
179 /* Starting rank number for a given basic block, so that we can rank
180 operations using unmovable instructions in that BB based on the bb
181 depth. */
182 static unsigned int *bb_rank;
184 /* Operand->rank hashtable. */
185 static htab_t operand_rank;
188 /* Look up the operand rank structure for expression E. */
190 static operand_entry_t
191 find_operand_rank (tree e)
193 void **slot;
194 struct operand_entry vrd;
196 vrd.op = e;
197 slot = htab_find_slot (operand_rank, &vrd, NO_INSERT);
198 if (!slot)
199 return NULL;
200 return ((operand_entry_t) *slot);
203 /* Insert {E,RANK} into the operand rank hashtable. */
205 static void
206 insert_operand_rank (tree e, unsigned int rank)
208 void **slot;
209 operand_entry_t new_pair = pool_alloc (operand_entry_pool);
211 new_pair->op = e;
212 new_pair->rank = rank;
213 slot = htab_find_slot (operand_rank, new_pair, INSERT);
214 gcc_assert (*slot == NULL);
215 *slot = new_pair;
218 /* Return the hash value for a operand rank structure */
220 static hashval_t
221 operand_entry_hash (const void *p)
223 const operand_entry_t vr = (operand_entry_t) p;
224 return iterative_hash_expr (vr->op, 0);
227 /* Return true if two operand rank structures are equal. */
229 static int
230 operand_entry_eq (const void *p1, const void *p2)
232 const operand_entry_t vr1 = (operand_entry_t) p1;
233 const operand_entry_t vr2 = (operand_entry_t) p2;
234 return vr1->op == vr2->op;
237 /* Given an expression E, return the rank of the expression. */
239 static unsigned int
240 get_rank (tree e)
242 operand_entry_t vr;
244 /* Constants have rank 0. */
245 if (is_gimple_min_invariant (e))
246 return 0;
248 /* SSA_NAME's have the rank of the expression they are the result
250 For globals and uninitialized values, the rank is 0.
251 For function arguments, use the pre-setup rank.
252 For PHI nodes, stores, asm statements, etc, we use the rank of
253 the BB.
254 For simple operations, the rank is the maximum rank of any of
255 its operands, or the bb_rank, whichever is less.
256 I make no claims that this is optimal, however, it gives good
257 results. */
259 if (TREE_CODE (e) == SSA_NAME)
261 tree stmt;
262 tree rhs;
263 unsigned int rank, maxrank;
264 int i;
266 if (TREE_CODE (SSA_NAME_VAR (e)) == PARM_DECL
267 && e == gimple_default_def (cfun, SSA_NAME_VAR (e)))
268 return find_operand_rank (e)->rank;
270 stmt = SSA_NAME_DEF_STMT (e);
271 if (bb_for_stmt (stmt) == NULL)
272 return 0;
274 if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT
275 || !ZERO_SSA_OPERANDS (stmt, SSA_OP_VIRTUAL_DEFS))
276 return bb_rank[bb_for_stmt (stmt)->index];
278 /* If we already have a rank for this expression, use that. */
279 vr = find_operand_rank (e);
280 if (vr)
281 return vr->rank;
283 /* Otherwise, find the maximum rank for the operands, or the bb
284 rank, whichever is less. */
285 rank = 0;
286 maxrank = bb_rank[bb_for_stmt(stmt)->index];
287 rhs = GIMPLE_STMT_OPERAND (stmt, 1);
288 if (TREE_CODE_LENGTH (TREE_CODE (rhs)) == 0)
289 rank = MAX (rank, get_rank (rhs));
290 else
292 for (i = 0;
293 i < TREE_CODE_LENGTH (TREE_CODE (rhs))
294 && TREE_OPERAND (rhs, i)
295 && rank != maxrank;
296 i++)
297 rank = MAX(rank, get_rank (TREE_OPERAND (rhs, i)));
300 if (dump_file && (dump_flags & TDF_DETAILS))
302 fprintf (dump_file, "Rank for ");
303 print_generic_expr (dump_file, e, 0);
304 fprintf (dump_file, " is %d\n", (rank + 1));
307 /* Note the rank in the hashtable so we don't recompute it. */
308 insert_operand_rank (e, (rank + 1));
309 return (rank + 1);
312 /* Globals, etc, are rank 0 */
313 return 0;
316 DEF_VEC_P(operand_entry_t);
317 DEF_VEC_ALLOC_P(operand_entry_t, heap);
319 /* We want integer ones to end up last no matter what, since they are
320 the ones we can do the most with. */
321 #define INTEGER_CONST_TYPE 1 << 3
322 #define FLOAT_CONST_TYPE 1 << 2
323 #define OTHER_CONST_TYPE 1 << 1
325 /* Classify an invariant tree into integer, float, or other, so that
326 we can sort them to be near other constants of the same type. */
327 static inline int
328 constant_type (tree t)
330 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
331 return INTEGER_CONST_TYPE;
332 else if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (t)))
333 return FLOAT_CONST_TYPE;
334 else
335 return OTHER_CONST_TYPE;
338 /* qsort comparison function to sort operand entries PA and PB by rank
339 so that the sorted array is ordered by rank in decreasing order. */
340 static int
341 sort_by_operand_rank (const void *pa, const void *pb)
343 const operand_entry_t oea = *(const operand_entry_t *)pa;
344 const operand_entry_t oeb = *(const operand_entry_t *)pb;
346 /* It's nicer for optimize_expression if constants that are likely
347 to fold when added/multiplied//whatever are put next to each
348 other. Since all constants have rank 0, order them by type. */
349 if (oeb->rank == 0 && oea->rank == 0)
350 return constant_type (oeb->op) - constant_type (oea->op);
352 /* Lastly, make sure the versions that are the same go next to each
353 other. We use SSA_NAME_VERSION because it's stable. */
354 if ((oeb->rank - oea->rank == 0)
355 && TREE_CODE (oea->op) == SSA_NAME
356 && TREE_CODE (oeb->op) == SSA_NAME)
357 return SSA_NAME_VERSION (oeb->op) - SSA_NAME_VERSION (oea->op);
359 return oeb->rank - oea->rank;
362 /* Add an operand entry to *OPS for the tree operand OP. */
364 static void
365 add_to_ops_vec (VEC(operand_entry_t, heap) **ops, tree op)
367 operand_entry_t oe = pool_alloc (operand_entry_pool);
369 oe->op = op;
370 oe->rank = get_rank (op);
371 VEC_safe_push (operand_entry_t, heap, *ops, oe);
374 /* Return true if STMT is reassociable operation containing a binary
375 operation with tree code CODE. */
377 static bool
378 is_reassociable_op (tree stmt, enum tree_code code)
380 if (!IS_EMPTY_STMT (stmt)
381 && TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
382 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == code
383 && has_single_use (GIMPLE_STMT_OPERAND (stmt, 0)))
384 return true;
385 return false;
389 /* Given NAME, if NAME is defined by a unary operation OPCODE, return the
390 operand of the negate operation. Otherwise, return NULL. */
392 static tree
393 get_unary_op (tree name, enum tree_code opcode)
395 tree stmt = SSA_NAME_DEF_STMT (name);
396 tree rhs;
398 if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
399 return NULL_TREE;
401 rhs = GIMPLE_STMT_OPERAND (stmt, 1);
402 if (TREE_CODE (rhs) == opcode)
403 return TREE_OPERAND (rhs, 0);
404 return NULL_TREE;
407 /* If CURR and LAST are a pair of ops that OPCODE allows us to
408 eliminate through equivalences, do so, remove them from OPS, and
409 return true. Otherwise, return false. */
411 static bool
412 eliminate_duplicate_pair (enum tree_code opcode,
413 VEC (operand_entry_t, heap) **ops,
414 bool *all_done,
415 unsigned int i,
416 operand_entry_t curr,
417 operand_entry_t last)
420 /* If we have two of the same op, and the opcode is & |, min, or max,
421 we can eliminate one of them.
422 If we have two of the same op, and the opcode is ^, we can
423 eliminate both of them. */
425 if (last && last->op == curr->op)
427 switch (opcode)
429 case MAX_EXPR:
430 case MIN_EXPR:
431 case BIT_IOR_EXPR:
432 case BIT_AND_EXPR:
433 if (dump_file && (dump_flags & TDF_DETAILS))
435 fprintf (dump_file, "Equivalence: ");
436 print_generic_expr (dump_file, curr->op, 0);
437 fprintf (dump_file, " [&|minmax] ");
438 print_generic_expr (dump_file, last->op, 0);
439 fprintf (dump_file, " -> ");
440 print_generic_stmt (dump_file, last->op, 0);
443 VEC_ordered_remove (operand_entry_t, *ops, i);
444 reassociate_stats.ops_eliminated ++;
446 return true;
448 case BIT_XOR_EXPR:
449 if (dump_file && (dump_flags & TDF_DETAILS))
451 fprintf (dump_file, "Equivalence: ");
452 print_generic_expr (dump_file, curr->op, 0);
453 fprintf (dump_file, " ^ ");
454 print_generic_expr (dump_file, last->op, 0);
455 fprintf (dump_file, " -> nothing\n");
458 reassociate_stats.ops_eliminated += 2;
460 if (VEC_length (operand_entry_t, *ops) == 2)
462 VEC_free (operand_entry_t, heap, *ops);
463 *ops = NULL;
464 add_to_ops_vec (ops, fold_convert (TREE_TYPE (last->op),
465 integer_zero_node));
466 *all_done = true;
468 else
470 VEC_ordered_remove (operand_entry_t, *ops, i-1);
471 VEC_ordered_remove (operand_entry_t, *ops, i-1);
474 return true;
476 default:
477 break;
480 return false;
483 /* If OPCODE is PLUS_EXPR, CURR->OP is really a negate expression,
484 look in OPS for a corresponding positive operation to cancel it
485 out. If we find one, remove the other from OPS, replace
486 OPS[CURRINDEX] with 0, and return true. Otherwise, return
487 false. */
489 static bool
490 eliminate_plus_minus_pair (enum tree_code opcode,
491 VEC (operand_entry_t, heap) **ops,
492 unsigned int currindex,
493 operand_entry_t curr)
495 tree negateop;
496 unsigned int i;
497 operand_entry_t oe;
499 if (opcode != PLUS_EXPR || TREE_CODE (curr->op) != SSA_NAME)
500 return false;
502 negateop = get_unary_op (curr->op, NEGATE_EXPR);
503 if (negateop == NULL_TREE)
504 return false;
506 /* Any non-negated version will have a rank that is one less than
507 the current rank. So once we hit those ranks, if we don't find
508 one, we can stop. */
510 for (i = currindex + 1;
511 VEC_iterate (operand_entry_t, *ops, i, oe)
512 && oe->rank >= curr->rank - 1 ;
513 i++)
515 if (oe->op == negateop)
518 if (dump_file && (dump_flags & TDF_DETAILS))
520 fprintf (dump_file, "Equivalence: ");
521 print_generic_expr (dump_file, negateop, 0);
522 fprintf (dump_file, " + -");
523 print_generic_expr (dump_file, oe->op, 0);
524 fprintf (dump_file, " -> 0\n");
527 VEC_ordered_remove (operand_entry_t, *ops, i);
528 add_to_ops_vec (ops, fold_convert(TREE_TYPE (oe->op),
529 integer_zero_node));
530 VEC_ordered_remove (operand_entry_t, *ops, currindex);
531 reassociate_stats.ops_eliminated ++;
533 return true;
537 return false;
540 /* If OPCODE is BIT_IOR_EXPR, BIT_AND_EXPR, and, CURR->OP is really a
541 bitwise not expression, look in OPS for a corresponding operand to
542 cancel it out. If we find one, remove the other from OPS, replace
543 OPS[CURRINDEX] with 0, and return true. Otherwise, return
544 false. */
546 static bool
547 eliminate_not_pairs (enum tree_code opcode,
548 VEC (operand_entry_t, heap) **ops,
549 unsigned int currindex,
550 operand_entry_t curr)
552 tree notop;
553 unsigned int i;
554 operand_entry_t oe;
556 if ((opcode != BIT_IOR_EXPR && opcode != BIT_AND_EXPR)
557 || TREE_CODE (curr->op) != SSA_NAME)
558 return false;
560 notop = get_unary_op (curr->op, BIT_NOT_EXPR);
561 if (notop == NULL_TREE)
562 return false;
564 /* Any non-not version will have a rank that is one less than
565 the current rank. So once we hit those ranks, if we don't find
566 one, we can stop. */
568 for (i = currindex + 1;
569 VEC_iterate (operand_entry_t, *ops, i, oe)
570 && oe->rank >= curr->rank - 1;
571 i++)
573 if (oe->op == notop)
575 if (dump_file && (dump_flags & TDF_DETAILS))
577 fprintf (dump_file, "Equivalence: ");
578 print_generic_expr (dump_file, notop, 0);
579 if (opcode == BIT_AND_EXPR)
580 fprintf (dump_file, " & ~");
581 else if (opcode == BIT_IOR_EXPR)
582 fprintf (dump_file, " | ~");
583 print_generic_expr (dump_file, oe->op, 0);
584 if (opcode == BIT_AND_EXPR)
585 fprintf (dump_file, " -> 0\n");
586 else if (opcode == BIT_IOR_EXPR)
587 fprintf (dump_file, " -> -1\n");
590 if (opcode == BIT_AND_EXPR)
591 oe->op = fold_convert (TREE_TYPE (oe->op), integer_zero_node);
592 else if (opcode == BIT_IOR_EXPR)
593 oe->op = build_low_bits_mask (TREE_TYPE (oe->op),
594 TYPE_PRECISION (TREE_TYPE (oe->op)));
596 reassociate_stats.ops_eliminated
597 += VEC_length (operand_entry_t, *ops) - 1;
598 VEC_free (operand_entry_t, heap, *ops);
599 *ops = NULL;
600 VEC_safe_push (operand_entry_t, heap, *ops, oe);
601 return true;
605 return false;
608 /* Use constant value that may be present in OPS to try to eliminate
609 operands. Note that this function is only really used when we've
610 eliminated ops for other reasons, or merged constants. Across
611 single statements, fold already does all of this, plus more. There
612 is little point in duplicating logic, so I've only included the
613 identities that I could ever construct testcases to trigger. */
615 static void
616 eliminate_using_constants (enum tree_code opcode,
617 VEC(operand_entry_t, heap) **ops)
619 operand_entry_t oelast = VEC_last (operand_entry_t, *ops);
621 if (oelast->rank == 0 && INTEGRAL_TYPE_P (TREE_TYPE (oelast->op)))
623 switch (opcode)
625 case BIT_AND_EXPR:
626 if (integer_zerop (oelast->op))
628 if (VEC_length (operand_entry_t, *ops) != 1)
630 if (dump_file && (dump_flags & TDF_DETAILS))
631 fprintf (dump_file, "Found & 0, removing all other ops\n");
633 reassociate_stats.ops_eliminated
634 += VEC_length (operand_entry_t, *ops) - 1;
636 VEC_free (operand_entry_t, heap, *ops);
637 *ops = NULL;
638 VEC_safe_push (operand_entry_t, heap, *ops, oelast);
639 return;
642 else if (integer_all_onesp (oelast->op))
644 if (VEC_length (operand_entry_t, *ops) != 1)
646 if (dump_file && (dump_flags & TDF_DETAILS))
647 fprintf (dump_file, "Found & -1, removing\n");
648 VEC_pop (operand_entry_t, *ops);
649 reassociate_stats.ops_eliminated++;
652 break;
653 case BIT_IOR_EXPR:
654 if (integer_all_onesp (oelast->op))
656 if (VEC_length (operand_entry_t, *ops) != 1)
658 if (dump_file && (dump_flags & TDF_DETAILS))
659 fprintf (dump_file, "Found | -1, removing all other ops\n");
661 reassociate_stats.ops_eliminated
662 += VEC_length (operand_entry_t, *ops) - 1;
664 VEC_free (operand_entry_t, heap, *ops);
665 *ops = NULL;
666 VEC_safe_push (operand_entry_t, heap, *ops, oelast);
667 return;
670 else if (integer_zerop (oelast->op))
672 if (VEC_length (operand_entry_t, *ops) != 1)
674 if (dump_file && (dump_flags & TDF_DETAILS))
675 fprintf (dump_file, "Found | 0, removing\n");
676 VEC_pop (operand_entry_t, *ops);
677 reassociate_stats.ops_eliminated++;
680 break;
681 case MULT_EXPR:
682 if (integer_zerop (oelast->op))
684 if (VEC_length (operand_entry_t, *ops) != 1)
686 if (dump_file && (dump_flags & TDF_DETAILS))
687 fprintf (dump_file, "Found * 0, removing all other ops\n");
689 reassociate_stats.ops_eliminated
690 += VEC_length (operand_entry_t, *ops) - 1;
691 VEC_free (operand_entry_t, heap, *ops);
692 *ops = NULL;
693 VEC_safe_push (operand_entry_t, heap, *ops, oelast);
694 return;
697 else if (integer_onep (oelast->op))
699 if (VEC_length (operand_entry_t, *ops) != 1)
701 if (dump_file && (dump_flags & TDF_DETAILS))
702 fprintf (dump_file, "Found * 1, removing\n");
703 VEC_pop (operand_entry_t, *ops);
704 reassociate_stats.ops_eliminated++;
705 return;
708 break;
709 case BIT_XOR_EXPR:
710 case PLUS_EXPR:
711 case MINUS_EXPR:
712 if (integer_zerop (oelast->op))
714 if (VEC_length (operand_entry_t, *ops) != 1)
716 if (dump_file && (dump_flags & TDF_DETAILS))
717 fprintf (dump_file, "Found [|^+] 0, removing\n");
718 VEC_pop (operand_entry_t, *ops);
719 reassociate_stats.ops_eliminated++;
720 return;
723 break;
724 default:
725 break;
730 /* Perform various identities and other optimizations on the list of
731 operand entries, stored in OPS. The tree code for the binary
732 operation between all the operands is OPCODE. */
734 static void
735 optimize_ops_list (enum tree_code opcode,
736 VEC (operand_entry_t, heap) **ops)
738 unsigned int length = VEC_length (operand_entry_t, *ops);
739 unsigned int i;
740 operand_entry_t oe;
741 operand_entry_t oelast = NULL;
742 bool iterate = false;
744 if (length == 1)
745 return;
747 oelast = VEC_last (operand_entry_t, *ops);
749 /* If the last two are constants, pop the constants off, merge them
750 and try the next two. */
751 if (oelast->rank == 0 && is_gimple_min_invariant (oelast->op))
753 operand_entry_t oelm1 = VEC_index (operand_entry_t, *ops, length - 2);
755 if (oelm1->rank == 0
756 && is_gimple_min_invariant (oelm1->op)
757 && lang_hooks.types_compatible_p (TREE_TYPE (oelm1->op),
758 TREE_TYPE (oelast->op)))
760 tree folded = fold_binary (opcode, TREE_TYPE (oelm1->op),
761 oelm1->op, oelast->op);
763 if (folded && is_gimple_min_invariant (folded))
765 if (dump_file && (dump_flags & TDF_DETAILS))
766 fprintf (dump_file, "Merging constants\n");
768 VEC_pop (operand_entry_t, *ops);
769 VEC_pop (operand_entry_t, *ops);
771 add_to_ops_vec (ops, folded);
772 reassociate_stats.constants_eliminated++;
774 optimize_ops_list (opcode, ops);
775 return;
780 eliminate_using_constants (opcode, ops);
781 oelast = NULL;
783 for (i = 0; VEC_iterate (operand_entry_t, *ops, i, oe);)
785 bool done = false;
787 if (eliminate_not_pairs (opcode, ops, i, oe))
788 return;
789 if (eliminate_duplicate_pair (opcode, ops, &done, i, oe, oelast)
790 || (!done && eliminate_plus_minus_pair (opcode, ops, i, oe)))
792 if (done)
793 return;
794 iterate = true;
795 oelast = NULL;
796 continue;
798 oelast = oe;
799 i++;
802 length = VEC_length (operand_entry_t, *ops);
803 oelast = VEC_last (operand_entry_t, *ops);
805 if (iterate)
806 optimize_ops_list (opcode, ops);
809 /* Return true if OPERAND is defined by a PHI node which uses the LHS
810 of STMT in it's operands. This is also known as a "destructive
811 update" operation. */
813 static bool
814 is_phi_for_stmt (tree stmt, tree operand)
816 tree def_stmt;
817 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
818 use_operand_p arg_p;
819 ssa_op_iter i;
821 if (TREE_CODE (operand) != SSA_NAME)
822 return false;
824 def_stmt = SSA_NAME_DEF_STMT (operand);
825 if (TREE_CODE (def_stmt) != PHI_NODE)
826 return false;
828 FOR_EACH_PHI_ARG (arg_p, def_stmt, i, SSA_OP_USE)
829 if (lhs == USE_FROM_PTR (arg_p))
830 return true;
831 return false;
834 /* Recursively rewrite our linearized statements so that the operators
835 match those in OPS[OPINDEX], putting the computation in rank
836 order. */
838 static void
839 rewrite_expr_tree (tree stmt, unsigned int opindex,
840 VEC(operand_entry_t, heap) * ops)
842 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
843 operand_entry_t oe;
845 /* If we have three operands left, then we want to make sure the one
846 that gets the double binary op are the ones with the same rank.
848 The alternative we try is to see if this is a destructive
849 update style statement, which is like:
850 b = phi (a, ...)
851 a = c + b;
852 In that case, we want to use the destructive update form to
853 expose the possible vectorizer sum reduction opportunity.
854 In that case, the third operand will be the phi node.
856 We could, of course, try to be better as noted above, and do a
857 lot of work to try to find these opportunities in >3 operand
858 cases, but it is unlikely to be worth it. */
859 if (opindex + 3 == VEC_length (operand_entry_t, ops))
861 operand_entry_t oe1, oe2, oe3;
863 oe1 = VEC_index (operand_entry_t, ops, opindex);
864 oe2 = VEC_index (operand_entry_t, ops, opindex + 1);
865 oe3 = VEC_index (operand_entry_t, ops, opindex + 2);
867 if ((oe1->rank == oe2->rank
868 && oe2->rank != oe3->rank)
869 || (is_phi_for_stmt (stmt, oe3->op)
870 && !is_phi_for_stmt (stmt, oe1->op)
871 && !is_phi_for_stmt (stmt, oe2->op)))
873 struct operand_entry temp = *oe3;
874 oe3->op = oe1->op;
875 oe3->rank = oe1->rank;
876 oe1->op = temp.op;
877 oe1->rank= temp.rank;
881 /* The final recursion case for this function is that you have
882 exactly two operations left.
883 If we had one exactly one op in the entire list to start with, we
884 would have never called this function, and the tail recursion
885 rewrites them one at a time. */
886 if (opindex + 2 == VEC_length (operand_entry_t, ops))
888 operand_entry_t oe1, oe2;
890 oe1 = VEC_index (operand_entry_t, ops, opindex);
891 oe2 = VEC_index (operand_entry_t, ops, opindex + 1);
893 if (TREE_OPERAND (rhs, 0) != oe1->op
894 || TREE_OPERAND (rhs, 1) != oe2->op)
897 if (dump_file && (dump_flags & TDF_DETAILS))
899 fprintf (dump_file, "Transforming ");
900 print_generic_expr (dump_file, rhs, 0);
903 TREE_OPERAND (rhs, 0) = oe1->op;
904 TREE_OPERAND (rhs, 1) = oe2->op;
905 update_stmt (stmt);
907 if (dump_file && (dump_flags & TDF_DETAILS))
909 fprintf (dump_file, " into ");
910 print_generic_stmt (dump_file, rhs, 0);
914 return;
917 /* If we hit here, we should have 3 or more ops left. */
918 gcc_assert (opindex + 2 < VEC_length (operand_entry_t, ops));
920 /* Rewrite the next operator. */
921 oe = VEC_index (operand_entry_t, ops, opindex);
923 if (oe->op != TREE_OPERAND (rhs, 1))
926 if (dump_file && (dump_flags & TDF_DETAILS))
928 fprintf (dump_file, "Transforming ");
929 print_generic_expr (dump_file, rhs, 0);
932 TREE_OPERAND (rhs, 1) = oe->op;
933 update_stmt (stmt);
935 if (dump_file && (dump_flags & TDF_DETAILS))
937 fprintf (dump_file, " into ");
938 print_generic_stmt (dump_file, rhs, 0);
941 /* Recurse on the LHS of the binary operator, which is guaranteed to
942 be the non-leaf side. */
943 rewrite_expr_tree (SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0)),
944 opindex + 1, ops);
947 /* Transform STMT, which is really (A +B) + (C + D) into the left
948 linear form, ((A+B)+C)+D.
949 Recurse on D if necessary. */
951 static void
952 linearize_expr (tree stmt)
954 block_stmt_iterator bsinow, bsirhs;
955 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
956 enum tree_code rhscode = TREE_CODE (rhs);
957 tree binrhs = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 1));
958 tree binlhs = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0));
959 tree newbinrhs = NULL_TREE;
961 gcc_assert (is_reassociable_op (binlhs, TREE_CODE (rhs))
962 && is_reassociable_op (binrhs, TREE_CODE (rhs)));
964 bsinow = bsi_for_stmt (stmt);
965 bsirhs = bsi_for_stmt (binrhs);
966 bsi_move_before (&bsirhs, &bsinow);
968 TREE_OPERAND (rhs, 1) = TREE_OPERAND (GIMPLE_STMT_OPERAND (binrhs, 1), 0);
969 if (TREE_CODE (TREE_OPERAND (rhs, 1)) == SSA_NAME)
970 newbinrhs = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 1));
971 TREE_OPERAND (GIMPLE_STMT_OPERAND (binrhs, 1), 0)
972 = GIMPLE_STMT_OPERAND (binlhs, 0);
973 TREE_OPERAND (rhs, 0) = GIMPLE_STMT_OPERAND (binrhs, 0);
975 if (dump_file && (dump_flags & TDF_DETAILS))
977 fprintf (dump_file, "Linearized: ");
978 print_generic_stmt (dump_file, rhs, 0);
981 reassociate_stats.linearized++;
982 update_stmt (binrhs);
983 update_stmt (binlhs);
984 update_stmt (stmt);
985 TREE_VISITED (binrhs) = 1;
986 TREE_VISITED (binlhs) = 1;
987 TREE_VISITED (stmt) = 1;
989 /* Tail recurse on the new rhs if it still needs reassociation. */
990 if (newbinrhs && is_reassociable_op (newbinrhs, rhscode))
991 linearize_expr (stmt);
995 /* If LHS has a single immediate use that is a GIMPLE_MODIFY_STMT, return
996 it. Otherwise, return NULL. */
998 static tree
999 get_single_immediate_use (tree lhs)
1001 use_operand_p immuse;
1002 tree immusestmt;
1004 if (TREE_CODE (lhs) == SSA_NAME
1005 && single_imm_use (lhs, &immuse, &immusestmt))
1007 if (TREE_CODE (immusestmt) == RETURN_EXPR)
1008 immusestmt = TREE_OPERAND (immusestmt, 0);
1009 if (TREE_CODE (immusestmt) == GIMPLE_MODIFY_STMT)
1010 return immusestmt;
1012 return NULL_TREE;
1014 static VEC(tree, heap) *broken_up_subtracts;
1017 /* Recursively negate the value of TONEGATE, and return the SSA_NAME
1018 representing the negated value. Insertions of any necessary
1019 instructions go before BSI.
1020 This function is recursive in that, if you hand it "a_5" as the
1021 value to negate, and a_5 is defined by "a_5 = b_3 + b_4", it will
1022 transform b_3 + b_4 into a_5 = -b_3 + -b_4. */
1024 static tree
1025 negate_value (tree tonegate, block_stmt_iterator *bsi)
1027 tree negatedef = tonegate;
1028 tree resultofnegate;
1030 if (TREE_CODE (tonegate) == SSA_NAME)
1031 negatedef = SSA_NAME_DEF_STMT (tonegate);
1033 /* If we are trying to negate a name, defined by an add, negate the
1034 add operands instead. */
1035 if (TREE_CODE (tonegate) == SSA_NAME
1036 && TREE_CODE (negatedef) == GIMPLE_MODIFY_STMT
1037 && TREE_CODE (GIMPLE_STMT_OPERAND (negatedef, 0)) == SSA_NAME
1038 && has_single_use (GIMPLE_STMT_OPERAND (negatedef, 0))
1039 && TREE_CODE (GIMPLE_STMT_OPERAND (negatedef, 1)) == PLUS_EXPR)
1041 block_stmt_iterator bsi;
1042 tree binop = GIMPLE_STMT_OPERAND (negatedef, 1);
1044 bsi = bsi_for_stmt (negatedef);
1045 TREE_OPERAND (binop, 0) = negate_value (TREE_OPERAND (binop, 0),
1046 &bsi);
1047 bsi = bsi_for_stmt (negatedef);
1048 TREE_OPERAND (binop, 1) = negate_value (TREE_OPERAND (binop, 1),
1049 &bsi);
1050 update_stmt (negatedef);
1051 return GIMPLE_STMT_OPERAND (negatedef, 0);
1054 tonegate = fold_build1 (NEGATE_EXPR, TREE_TYPE (tonegate), tonegate);
1055 resultofnegate = force_gimple_operand_bsi (bsi, tonegate, true,
1056 NULL_TREE);
1057 VEC_safe_push (tree, heap, broken_up_subtracts, resultofnegate);
1058 return resultofnegate;
1062 /* Return true if we should break up the subtract in STMT into an add
1063 with negate. This is true when we the subtract operands are really
1064 adds, or the subtract itself is used in an add expression. In
1065 either case, breaking up the subtract into an add with negate
1066 exposes the adds to reassociation. */
1068 static bool
1069 should_break_up_subtract (tree stmt)
1072 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
1073 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
1074 tree binlhs = TREE_OPERAND (rhs, 0);
1075 tree binrhs = TREE_OPERAND (rhs, 1);
1076 tree immusestmt;
1078 if (TREE_CODE (binlhs) == SSA_NAME
1079 && is_reassociable_op (SSA_NAME_DEF_STMT (binlhs), PLUS_EXPR))
1080 return true;
1082 if (TREE_CODE (binrhs) == SSA_NAME
1083 && is_reassociable_op (SSA_NAME_DEF_STMT (binrhs), PLUS_EXPR))
1084 return true;
1086 if (TREE_CODE (lhs) == SSA_NAME
1087 && (immusestmt = get_single_immediate_use (lhs))
1088 && TREE_CODE (GIMPLE_STMT_OPERAND (immusestmt, 1)) == PLUS_EXPR)
1089 return true;
1090 return false;
1094 /* Transform STMT from A - B into A + -B. */
1096 static void
1097 break_up_subtract (tree stmt, block_stmt_iterator *bsi)
1099 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
1101 if (dump_file && (dump_flags & TDF_DETAILS))
1103 fprintf (dump_file, "Breaking up subtract ");
1104 print_generic_stmt (dump_file, stmt, 0);
1107 TREE_SET_CODE (GIMPLE_STMT_OPERAND (stmt, 1), PLUS_EXPR);
1108 TREE_OPERAND (rhs, 1) = negate_value (TREE_OPERAND (rhs, 1), bsi);
1110 update_stmt (stmt);
1113 /* Recursively linearize a binary expression that is the RHS of STMT.
1114 Place the operands of the expression tree in the vector named OPS. */
1116 static void
1117 linearize_expr_tree (VEC(operand_entry_t, heap) **ops, tree stmt)
1119 block_stmt_iterator bsinow, bsilhs;
1120 tree rhs = GENERIC_TREE_OPERAND (stmt, 1);
1121 tree binrhs = TREE_OPERAND (rhs, 1);
1122 tree binlhs = TREE_OPERAND (rhs, 0);
1123 tree binlhsdef, binrhsdef;
1124 bool binlhsisreassoc = false;
1125 bool binrhsisreassoc = false;
1126 enum tree_code rhscode = TREE_CODE (rhs);
1128 TREE_VISITED (stmt) = 1;
1130 if (TREE_CODE (binlhs) == SSA_NAME)
1132 binlhsdef = SSA_NAME_DEF_STMT (binlhs);
1133 binlhsisreassoc = is_reassociable_op (binlhsdef, rhscode);
1136 if (TREE_CODE (binrhs) == SSA_NAME)
1138 binrhsdef = SSA_NAME_DEF_STMT (binrhs);
1139 binrhsisreassoc = is_reassociable_op (binrhsdef, rhscode);
1142 /* If the LHS is not reassociable, but the RHS is, we need to swap
1143 them. If neither is reassociable, there is nothing we can do, so
1144 just put them in the ops vector. If the LHS is reassociable,
1145 linearize it. If both are reassociable, then linearize the RHS
1146 and the LHS. */
1148 if (!binlhsisreassoc)
1150 tree temp;
1152 if (!binrhsisreassoc)
1154 add_to_ops_vec (ops, binrhs);
1155 add_to_ops_vec (ops, binlhs);
1156 return;
1159 if (dump_file && (dump_flags & TDF_DETAILS))
1161 fprintf (dump_file, "swapping operands of ");
1162 print_generic_expr (dump_file, stmt, 0);
1165 swap_tree_operands (stmt, &TREE_OPERAND (rhs, 0),
1166 &TREE_OPERAND (rhs, 1));
1167 update_stmt (stmt);
1169 if (dump_file && (dump_flags & TDF_DETAILS))
1171 fprintf (dump_file, " is now ");
1172 print_generic_stmt (dump_file, stmt, 0);
1175 /* We want to make it so the lhs is always the reassociative op,
1176 so swap. */
1177 temp = binlhs;
1178 binlhs = binrhs;
1179 binrhs = temp;
1181 else if (binrhsisreassoc)
1183 linearize_expr (stmt);
1184 gcc_assert (rhs == GIMPLE_STMT_OPERAND (stmt, 1));
1185 binlhs = TREE_OPERAND (rhs, 0);
1186 binrhs = TREE_OPERAND (rhs, 1);
1189 gcc_assert (TREE_CODE (binrhs) != SSA_NAME
1190 || !is_reassociable_op (SSA_NAME_DEF_STMT (binrhs), rhscode));
1191 bsinow = bsi_for_stmt (stmt);
1192 bsilhs = bsi_for_stmt (SSA_NAME_DEF_STMT (binlhs));
1193 bsi_move_before (&bsilhs, &bsinow);
1194 linearize_expr_tree (ops, SSA_NAME_DEF_STMT (binlhs));
1195 add_to_ops_vec (ops, binrhs);
1198 /* Repropagate the negates back into subtracts, since no other pass
1199 currently does it. */
1201 static void
1202 repropagate_negates (void)
1204 unsigned int i = 0;
1205 tree negate;
1207 for (i = 0; VEC_iterate (tree, broken_up_subtracts, i, negate); i++)
1209 tree user = get_single_immediate_use (negate);
1211 /* The negate operand can be either operand of a PLUS_EXPR
1212 (it can be the LHS if the RHS is a constant for example).
1214 Force the negate operand to the RHS of the PLUS_EXPR, then
1215 transform the PLUS_EXPR into a MINUS_EXPR. */
1216 if (user
1217 && TREE_CODE (user) == GIMPLE_MODIFY_STMT
1218 && TREE_CODE (GIMPLE_STMT_OPERAND (user, 1)) == PLUS_EXPR)
1220 tree rhs = GIMPLE_STMT_OPERAND (user, 1);
1222 /* If the negated operand appears on the LHS of the
1223 PLUS_EXPR, exchange the operands of the PLUS_EXPR
1224 to force the negated operand to the RHS of the PLUS_EXPR. */
1225 if (TREE_OPERAND (GIMPLE_STMT_OPERAND (user, 1), 0) == negate)
1227 tree temp = TREE_OPERAND (rhs, 0);
1228 TREE_OPERAND (rhs, 0) = TREE_OPERAND (rhs, 1);
1229 TREE_OPERAND (rhs, 1) = temp;
1232 /* Now transform the PLUS_EXPR into a MINUS_EXPR and replace
1233 the RHS of the PLUS_EXPR with the operand of the NEGATE_EXPR. */
1234 if (TREE_OPERAND (GIMPLE_STMT_OPERAND (user, 1), 1) == negate)
1236 TREE_SET_CODE (rhs, MINUS_EXPR);
1237 TREE_OPERAND (rhs, 1) = get_unary_op (negate, NEGATE_EXPR);
1238 update_stmt (user);
1244 /* Break up subtract operations in block BB.
1246 We do this top down because we don't know whether the subtract is
1247 part of a possible chain of reassociation except at the top.
1249 IE given
1250 d = f + g
1251 c = a + e
1252 b = c - d
1253 q = b - r
1254 k = t - q
1256 we want to break up k = t - q, but we won't until we've transformed q
1257 = b - r, which won't be broken up until we transform b = c - d. */
1259 static void
1260 break_up_subtract_bb (basic_block bb)
1262 block_stmt_iterator bsi;
1263 basic_block son;
1265 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1267 tree stmt = bsi_stmt (bsi);
1269 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
1271 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
1272 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
1274 TREE_VISITED (stmt) = 0;
1275 /* If unsafe math optimizations we can do reassociation for
1276 non-integral types. */
1277 if ((!INTEGRAL_TYPE_P (TREE_TYPE (lhs))
1278 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs)))
1279 && (!SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs))
1280 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE(lhs))
1281 || !flag_unsafe_math_optimizations))
1282 continue;
1284 /* Check for a subtract used only in an addition. If this
1285 is the case, transform it into add of a negate for better
1286 reassociation. IE transform C = A-B into C = A + -B if C
1287 is only used in an addition. */
1288 if (TREE_CODE (rhs) == MINUS_EXPR)
1289 if (should_break_up_subtract (stmt))
1290 break_up_subtract (stmt, &bsi);
1293 for (son = first_dom_son (CDI_DOMINATORS, bb);
1294 son;
1295 son = next_dom_son (CDI_DOMINATORS, son))
1296 break_up_subtract_bb (son);
1299 /* Reassociate expressions in basic block BB and its post-dominator as
1300 children. */
1302 static void
1303 reassociate_bb (basic_block bb)
1305 block_stmt_iterator bsi;
1306 basic_block son;
1308 for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_prev (&bsi))
1310 tree stmt = bsi_stmt (bsi);
1312 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
1314 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
1315 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
1317 /* If this was part of an already processed tree, we don't
1318 need to touch it again. */
1319 if (TREE_VISITED (stmt))
1320 continue;
1322 /* If unsafe math optimizations we can do reassociation for
1323 non-integral types. */
1324 if ((!INTEGRAL_TYPE_P (TREE_TYPE (lhs))
1325 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs)))
1326 && (!SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs))
1327 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE(lhs))
1328 || !flag_unsafe_math_optimizations))
1329 continue;
1331 if (associative_tree_code (TREE_CODE (rhs)))
1333 VEC(operand_entry_t, heap) *ops = NULL;
1335 /* There may be no immediate uses left by the time we
1336 get here because we may have eliminated them all. */
1337 if (TREE_CODE (lhs) == SSA_NAME && has_zero_uses (lhs))
1338 continue;
1340 TREE_VISITED (stmt) = 1;
1341 linearize_expr_tree (&ops, stmt);
1342 qsort (VEC_address (operand_entry_t, ops),
1343 VEC_length (operand_entry_t, ops),
1344 sizeof (operand_entry_t),
1345 sort_by_operand_rank);
1346 optimize_ops_list (TREE_CODE (rhs), &ops);
1348 if (VEC_length (operand_entry_t, ops) == 1)
1350 if (dump_file && (dump_flags & TDF_DETAILS))
1352 fprintf (dump_file, "Transforming ");
1353 print_generic_expr (dump_file, rhs, 0);
1355 GIMPLE_STMT_OPERAND (stmt, 1)
1356 = VEC_last (operand_entry_t, ops)->op;
1357 update_stmt (stmt);
1359 if (dump_file && (dump_flags & TDF_DETAILS))
1361 fprintf (dump_file, " into ");
1362 print_generic_stmt (dump_file,
1363 GIMPLE_STMT_OPERAND (stmt, 1), 0);
1366 else
1368 rewrite_expr_tree (stmt, 0, ops);
1371 VEC_free (operand_entry_t, heap, ops);
1375 for (son = first_dom_son (CDI_POST_DOMINATORS, bb);
1376 son;
1377 son = next_dom_son (CDI_POST_DOMINATORS, son))
1378 reassociate_bb (son);
1381 void dump_ops_vector (FILE *file, VEC (operand_entry_t, heap) *ops);
1382 void debug_ops_vector (VEC (operand_entry_t, heap) *ops);
1384 /* Dump the operand entry vector OPS to FILE. */
1386 void
1387 dump_ops_vector (FILE *file, VEC (operand_entry_t, heap) *ops)
1389 operand_entry_t oe;
1390 unsigned int i;
1392 for (i = 0; VEC_iterate (operand_entry_t, ops, i, oe); i++)
1394 fprintf (file, "Op %d -> rank: %d, tree: ", i, oe->rank);
1395 print_generic_stmt (file, oe->op, 0);
1399 /* Dump the operand entry vector OPS to STDERR. */
1401 void
1402 debug_ops_vector (VEC (operand_entry_t, heap) *ops)
1404 dump_ops_vector (stderr, ops);
1407 static void
1408 do_reassoc (void)
1410 break_up_subtract_bb (ENTRY_BLOCK_PTR);
1411 reassociate_bb (EXIT_BLOCK_PTR);
1414 /* Initialize the reassociation pass. */
1416 static void
1417 init_reassoc (void)
1419 int i;
1420 unsigned int rank = 2;
1421 tree param;
1422 int *bbs = XNEWVEC (int, last_basic_block + 1);
1424 memset (&reassociate_stats, 0, sizeof (reassociate_stats));
1426 operand_entry_pool = create_alloc_pool ("operand entry pool",
1427 sizeof (struct operand_entry), 30);
1429 /* Reverse RPO (Reverse Post Order) will give us something where
1430 deeper loops come later. */
1431 pre_and_rev_post_order_compute (NULL, bbs, false);
1432 bb_rank = XCNEWVEC (unsigned int, last_basic_block + 1);
1434 operand_rank = htab_create (511, operand_entry_hash,
1435 operand_entry_eq, 0);
1437 /* Give each argument a distinct rank. */
1438 for (param = DECL_ARGUMENTS (current_function_decl);
1439 param;
1440 param = TREE_CHAIN (param))
1442 if (gimple_default_def (cfun, param) != NULL)
1444 tree def = gimple_default_def (cfun, param);
1445 insert_operand_rank (def, ++rank);
1449 /* Give the chain decl a distinct rank. */
1450 if (cfun->static_chain_decl != NULL)
1452 tree def = gimple_default_def (cfun, cfun->static_chain_decl);
1453 if (def != NULL)
1454 insert_operand_rank (def, ++rank);
1457 /* Set up rank for each BB */
1458 for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
1459 bb_rank[bbs[i]] = ++rank << 16;
1461 free (bbs);
1462 calculate_dominance_info (CDI_DOMINATORS);
1463 calculate_dominance_info (CDI_POST_DOMINATORS);
1464 broken_up_subtracts = NULL;
1467 /* Cleanup after the reassociation pass, and print stats if
1468 requested. */
1470 static void
1471 fini_reassoc (void)
1474 if (dump_file && (dump_flags & TDF_STATS))
1476 fprintf (dump_file, "Reassociation stats:\n");
1477 fprintf (dump_file, "Linearized: %d\n",
1478 reassociate_stats.linearized);
1479 fprintf (dump_file, "Constants eliminated: %d\n",
1480 reassociate_stats.constants_eliminated);
1481 fprintf (dump_file, "Ops eliminated: %d\n",
1482 reassociate_stats.ops_eliminated);
1483 fprintf (dump_file, "Statements rewritten: %d\n",
1484 reassociate_stats.rewritten);
1486 htab_delete (operand_rank);
1488 free_alloc_pool (operand_entry_pool);
1489 free (bb_rank);
1490 VEC_free (tree, heap, broken_up_subtracts);
1491 free_dominance_info (CDI_POST_DOMINATORS);
1494 /* Gate and execute functions for Reassociation. */
1496 static unsigned int
1497 execute_reassoc (void)
1499 init_reassoc ();
1501 do_reassoc ();
1502 repropagate_negates ();
1504 fini_reassoc ();
1505 return 0;
1508 struct tree_opt_pass pass_reassoc =
1510 "reassoc", /* name */
1511 NULL, /* gate */
1512 execute_reassoc, /* execute */
1513 NULL, /* sub */
1514 NULL, /* next */
1515 0, /* static_pass_number */
1516 TV_TREE_REASSOC, /* tv_id */
1517 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
1518 0, /* properties_provided */
1519 0, /* properties_destroyed */
1520 0, /* todo_flags_start */
1521 TODO_dump_func | TODO_ggc_collect | TODO_verify_ssa, /* todo_flags_finish */
1522 0 /* letter */