In gcc/: 2011-01-12 Nicola Pero <nicola.pero@meta-innovation.com>
[official-gcc.git] / gcc / tree-ssa-reassoc.c
blob197591e330f919f5936d0618d2a1af7458f2e56b
1 /* Reassociation for trees.
2 Copyright (C) 2005, 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "basic-block.h"
27 #include "tree-pretty-print.h"
28 #include "gimple-pretty-print.h"
29 #include "tree-inline.h"
30 #include "tree-flow.h"
31 #include "gimple.h"
32 #include "tree-dump.h"
33 #include "timevar.h"
34 #include "tree-iterator.h"
35 #include "tree-pass.h"
36 #include "alloc-pool.h"
37 #include "vec.h"
38 #include "langhooks.h"
39 #include "pointer-set.h"
40 #include "cfgloop.h"
41 #include "flags.h"
43 /* This is a simple global reassociation pass. It is, in part, based
44 on the LLVM pass of the same name (They do some things more/less
45 than we do, in different orders, etc).
47 It consists of five steps:
49 1. Breaking up subtract operations into addition + negate, where
50 it would promote the reassociation of adds.
52 2. Left linearization of the expression trees, so that (A+B)+(C+D)
53 becomes (((A+B)+C)+D), which is easier for us to rewrite later.
54 During linearization, we place the operands of the binary
55 expressions into a vector of operand_entry_t
57 3. Optimization of the operand lists, eliminating things like a +
58 -a, a & a, etc.
60 4. Rewrite the expression trees we linearized and optimized so
61 they are in proper rank order.
63 5. Repropagate negates, as nothing else will clean it up ATM.
65 A bit of theory on #4, since nobody seems to write anything down
66 about why it makes sense to do it the way they do it:
68 We could do this much nicer theoretically, but don't (for reasons
69 explained after how to do it theoretically nice :P).
71 In order to promote the most redundancy elimination, you want
72 binary expressions whose operands are the same rank (or
73 preferably, the same value) exposed to the redundancy eliminator,
74 for possible elimination.
76 So the way to do this if we really cared, is to build the new op
77 tree from the leaves to the roots, merging as you go, and putting the
78 new op on the end of the worklist, until you are left with one
79 thing on the worklist.
81 IE if you have to rewrite the following set of operands (listed with
82 rank in parentheses), with opcode PLUS_EXPR:
84 a (1), b (1), c (1), d (2), e (2)
87 We start with our merge worklist empty, and the ops list with all of
88 those on it.
90 You want to first merge all leaves of the same rank, as much as
91 possible.
93 So first build a binary op of
95 mergetmp = a + b, and put "mergetmp" on the merge worklist.
97 Because there is no three operand form of PLUS_EXPR, c is not going to
98 be exposed to redundancy elimination as a rank 1 operand.
100 So you might as well throw it on the merge worklist (you could also
101 consider it to now be a rank two operand, and merge it with d and e,
102 but in this case, you then have evicted e from a binary op. So at
103 least in this situation, you can't win.)
105 Then build a binary op of d + e
106 mergetmp2 = d + e
108 and put mergetmp2 on the merge worklist.
110 so merge worklist = {mergetmp, c, mergetmp2}
112 Continue building binary ops of these operations until you have only
113 one operation left on the worklist.
115 So we have
117 build binary op
118 mergetmp3 = mergetmp + c
120 worklist = {mergetmp2, mergetmp3}
122 mergetmp4 = mergetmp2 + mergetmp3
124 worklist = {mergetmp4}
126 because we have one operation left, we can now just set the original
127 statement equal to the result of that operation.
129 This will at least expose a + b and d + e to redundancy elimination
130 as binary operations.
132 For extra points, you can reuse the old statements to build the
133 mergetmps, since you shouldn't run out.
135 So why don't we do this?
137 Because it's expensive, and rarely will help. Most trees we are
138 reassociating have 3 or less ops. If they have 2 ops, they already
139 will be written into a nice single binary op. If you have 3 ops, a
140 single simple check suffices to tell you whether the first two are of the
141 same rank. If so, you know to order it
143 mergetmp = op1 + op2
144 newstmt = mergetmp + op3
146 instead of
147 mergetmp = op2 + op3
148 newstmt = mergetmp + op1
150 If all three are of the same rank, you can't expose them all in a
151 single binary operator anyway, so the above is *still* the best you
152 can do.
154 Thus, this is what we do. When we have three ops left, we check to see
155 what order to put them in, and call it a day. As a nod to vector sum
156 reduction, we check if any of the ops are really a phi node that is a
157 destructive update for the associating op, and keep the destructive
158 update together for vector sum reduction recognition. */
161 /* Statistics */
162 static struct
164 int linearized;
165 int constants_eliminated;
166 int ops_eliminated;
167 int rewritten;
168 } reassociate_stats;
170 /* Operator, rank pair. */
171 typedef struct operand_entry
173 unsigned int rank;
174 int id;
175 tree op;
176 } *operand_entry_t;
178 static alloc_pool operand_entry_pool;
180 /* This is used to assign a unique ID to each struct operand_entry
181 so that qsort results are identical on different hosts. */
182 static int next_operand_entry_id;
184 /* Starting rank number for a given basic block, so that we can rank
185 operations using unmovable instructions in that BB based on the bb
186 depth. */
187 static long *bb_rank;
189 /* Operand->rank hashtable. */
190 static struct pointer_map_t *operand_rank;
193 /* Look up the operand rank structure for expression E. */
195 static inline long
196 find_operand_rank (tree e)
198 void **slot = pointer_map_contains (operand_rank, e);
199 return slot ? (long) (intptr_t) *slot : -1;
202 /* Insert {E,RANK} into the operand rank hashtable. */
204 static inline void
205 insert_operand_rank (tree e, long rank)
207 void **slot;
208 gcc_assert (rank > 0);
209 slot = pointer_map_insert (operand_rank, e);
210 gcc_assert (!*slot);
211 *slot = (void *) (intptr_t) rank;
214 /* Given an expression E, return the rank of the expression. */
216 static long
217 get_rank (tree e)
219 /* Constants have rank 0. */
220 if (is_gimple_min_invariant (e))
221 return 0;
223 /* SSA_NAME's have the rank of the expression they are the result
225 For globals and uninitialized values, the rank is 0.
226 For function arguments, use the pre-setup rank.
227 For PHI nodes, stores, asm statements, etc, we use the rank of
228 the BB.
229 For simple operations, the rank is the maximum rank of any of
230 its operands, or the bb_rank, whichever is less.
231 I make no claims that this is optimal, however, it gives good
232 results. */
234 if (TREE_CODE (e) == SSA_NAME)
236 gimple stmt;
237 long rank, maxrank;
238 int i, n;
240 if (TREE_CODE (SSA_NAME_VAR (e)) == PARM_DECL
241 && SSA_NAME_IS_DEFAULT_DEF (e))
242 return find_operand_rank (e);
244 stmt = SSA_NAME_DEF_STMT (e);
245 if (gimple_bb (stmt) == NULL)
246 return 0;
248 if (!is_gimple_assign (stmt)
249 || gimple_vdef (stmt))
250 return bb_rank[gimple_bb (stmt)->index];
252 /* If we already have a rank for this expression, use that. */
253 rank = find_operand_rank (e);
254 if (rank != -1)
255 return rank;
257 /* Otherwise, find the maximum rank for the operands, or the bb
258 rank, whichever is less. */
259 rank = 0;
260 maxrank = bb_rank[gimple_bb(stmt)->index];
261 if (gimple_assign_single_p (stmt))
263 tree rhs = gimple_assign_rhs1 (stmt);
264 n = TREE_OPERAND_LENGTH (rhs);
265 if (n == 0)
266 rank = MAX (rank, get_rank (rhs));
267 else
269 for (i = 0;
270 i < n && TREE_OPERAND (rhs, i) && rank != maxrank; i++)
271 rank = MAX(rank, get_rank (TREE_OPERAND (rhs, i)));
274 else
276 n = gimple_num_ops (stmt);
277 for (i = 1; i < n && rank != maxrank; i++)
279 gcc_assert (gimple_op (stmt, i));
280 rank = MAX(rank, get_rank (gimple_op (stmt, i)));
284 if (dump_file && (dump_flags & TDF_DETAILS))
286 fprintf (dump_file, "Rank for ");
287 print_generic_expr (dump_file, e, 0);
288 fprintf (dump_file, " is %ld\n", (rank + 1));
291 /* Note the rank in the hashtable so we don't recompute it. */
292 insert_operand_rank (e, (rank + 1));
293 return (rank + 1);
296 /* Globals, etc, are rank 0 */
297 return 0;
300 DEF_VEC_P(operand_entry_t);
301 DEF_VEC_ALLOC_P(operand_entry_t, heap);
303 /* We want integer ones to end up last no matter what, since they are
304 the ones we can do the most with. */
305 #define INTEGER_CONST_TYPE 1 << 3
306 #define FLOAT_CONST_TYPE 1 << 2
307 #define OTHER_CONST_TYPE 1 << 1
309 /* Classify an invariant tree into integer, float, or other, so that
310 we can sort them to be near other constants of the same type. */
311 static inline int
312 constant_type (tree t)
314 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
315 return INTEGER_CONST_TYPE;
316 else if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (t)))
317 return FLOAT_CONST_TYPE;
318 else
319 return OTHER_CONST_TYPE;
322 /* qsort comparison function to sort operand entries PA and PB by rank
323 so that the sorted array is ordered by rank in decreasing order. */
324 static int
325 sort_by_operand_rank (const void *pa, const void *pb)
327 const operand_entry_t oea = *(const operand_entry_t *)pa;
328 const operand_entry_t oeb = *(const operand_entry_t *)pb;
330 /* It's nicer for optimize_expression if constants that are likely
331 to fold when added/multiplied//whatever are put next to each
332 other. Since all constants have rank 0, order them by type. */
333 if (oeb->rank == 0 && oea->rank == 0)
335 if (constant_type (oeb->op) != constant_type (oea->op))
336 return constant_type (oeb->op) - constant_type (oea->op);
337 else
338 /* To make sorting result stable, we use unique IDs to determine
339 order. */
340 return oeb->id - oea->id;
343 /* Lastly, make sure the versions that are the same go next to each
344 other. We use SSA_NAME_VERSION because it's stable. */
345 if ((oeb->rank - oea->rank == 0)
346 && TREE_CODE (oea->op) == SSA_NAME
347 && TREE_CODE (oeb->op) == SSA_NAME)
349 if (SSA_NAME_VERSION (oeb->op) != SSA_NAME_VERSION (oea->op))
350 return SSA_NAME_VERSION (oeb->op) - SSA_NAME_VERSION (oea->op);
351 else
352 return oeb->id - oea->id;
355 if (oeb->rank != oea->rank)
356 return oeb->rank - oea->rank;
357 else
358 return oeb->id - oea->id;
361 /* Add an operand entry to *OPS for the tree operand OP. */
363 static void
364 add_to_ops_vec (VEC(operand_entry_t, heap) **ops, tree op)
366 operand_entry_t oe = (operand_entry_t) pool_alloc (operand_entry_pool);
368 oe->op = op;
369 oe->rank = get_rank (op);
370 oe->id = next_operand_entry_id++;
371 VEC_safe_push (operand_entry_t, heap, *ops, oe);
374 /* Return true if STMT is reassociable operation containing a binary
375 operation with tree code CODE, and is inside LOOP. */
377 static bool
378 is_reassociable_op (gimple stmt, enum tree_code code, struct loop *loop)
380 basic_block bb = gimple_bb (stmt);
382 if (gimple_bb (stmt) == NULL)
383 return false;
385 if (!flow_bb_inside_loop_p (loop, bb))
386 return false;
388 if (is_gimple_assign (stmt)
389 && gimple_assign_rhs_code (stmt) == code
390 && has_single_use (gimple_assign_lhs (stmt)))
391 return true;
393 return false;
397 /* Given NAME, if NAME is defined by a unary operation OPCODE, return the
398 operand of the negate operation. Otherwise, return NULL. */
400 static tree
401 get_unary_op (tree name, enum tree_code opcode)
403 gimple stmt = SSA_NAME_DEF_STMT (name);
405 if (!is_gimple_assign (stmt))
406 return NULL_TREE;
408 if (gimple_assign_rhs_code (stmt) == opcode)
409 return gimple_assign_rhs1 (stmt);
410 return NULL_TREE;
413 /* If CURR and LAST are a pair of ops that OPCODE allows us to
414 eliminate through equivalences, do so, remove them from OPS, and
415 return true. Otherwise, return false. */
417 static bool
418 eliminate_duplicate_pair (enum tree_code opcode,
419 VEC (operand_entry_t, heap) **ops,
420 bool *all_done,
421 unsigned int i,
422 operand_entry_t curr,
423 operand_entry_t last)
426 /* If we have two of the same op, and the opcode is & |, min, or max,
427 we can eliminate one of them.
428 If we have two of the same op, and the opcode is ^, we can
429 eliminate both of them. */
431 if (last && last->op == curr->op)
433 switch (opcode)
435 case MAX_EXPR:
436 case MIN_EXPR:
437 case BIT_IOR_EXPR:
438 case BIT_AND_EXPR:
439 if (dump_file && (dump_flags & TDF_DETAILS))
441 fprintf (dump_file, "Equivalence: ");
442 print_generic_expr (dump_file, curr->op, 0);
443 fprintf (dump_file, " [&|minmax] ");
444 print_generic_expr (dump_file, last->op, 0);
445 fprintf (dump_file, " -> ");
446 print_generic_stmt (dump_file, last->op, 0);
449 VEC_ordered_remove (operand_entry_t, *ops, i);
450 reassociate_stats.ops_eliminated ++;
452 return true;
454 case BIT_XOR_EXPR:
455 if (dump_file && (dump_flags & TDF_DETAILS))
457 fprintf (dump_file, "Equivalence: ");
458 print_generic_expr (dump_file, curr->op, 0);
459 fprintf (dump_file, " ^ ");
460 print_generic_expr (dump_file, last->op, 0);
461 fprintf (dump_file, " -> nothing\n");
464 reassociate_stats.ops_eliminated += 2;
466 if (VEC_length (operand_entry_t, *ops) == 2)
468 VEC_free (operand_entry_t, heap, *ops);
469 *ops = NULL;
470 add_to_ops_vec (ops, build_zero_cst (TREE_TYPE (last->op)));
471 *all_done = true;
473 else
475 VEC_ordered_remove (operand_entry_t, *ops, i-1);
476 VEC_ordered_remove (operand_entry_t, *ops, i-1);
479 return true;
481 default:
482 break;
485 return false;
488 static VEC(tree, heap) *plus_negates;
490 /* If OPCODE is PLUS_EXPR, CURR->OP is a negate expression or a bitwise not
491 expression, look in OPS for a corresponding positive operation to cancel
492 it out. If we find one, remove the other from OPS, replace
493 OPS[CURRINDEX] with 0 or -1, respectively, and return true. Otherwise,
494 return false. */
496 static bool
497 eliminate_plus_minus_pair (enum tree_code opcode,
498 VEC (operand_entry_t, heap) **ops,
499 unsigned int currindex,
500 operand_entry_t curr)
502 tree negateop;
503 tree notop;
504 unsigned int i;
505 operand_entry_t oe;
507 if (opcode != PLUS_EXPR || TREE_CODE (curr->op) != SSA_NAME)
508 return false;
510 negateop = get_unary_op (curr->op, NEGATE_EXPR);
511 notop = get_unary_op (curr->op, BIT_NOT_EXPR);
512 if (negateop == NULL_TREE && notop == NULL_TREE)
513 return false;
515 /* Any non-negated version will have a rank that is one less than
516 the current rank. So once we hit those ranks, if we don't find
517 one, we can stop. */
519 for (i = currindex + 1;
520 VEC_iterate (operand_entry_t, *ops, i, oe)
521 && oe->rank >= curr->rank - 1 ;
522 i++)
524 if (oe->op == negateop)
527 if (dump_file && (dump_flags & TDF_DETAILS))
529 fprintf (dump_file, "Equivalence: ");
530 print_generic_expr (dump_file, negateop, 0);
531 fprintf (dump_file, " + -");
532 print_generic_expr (dump_file, oe->op, 0);
533 fprintf (dump_file, " -> 0\n");
536 VEC_ordered_remove (operand_entry_t, *ops, i);
537 add_to_ops_vec (ops, build_zero_cst (TREE_TYPE (oe->op)));
538 VEC_ordered_remove (operand_entry_t, *ops, currindex);
539 reassociate_stats.ops_eliminated ++;
541 return true;
543 else if (oe->op == notop)
545 tree op_type = TREE_TYPE (oe->op);
547 if (dump_file && (dump_flags & TDF_DETAILS))
549 fprintf (dump_file, "Equivalence: ");
550 print_generic_expr (dump_file, notop, 0);
551 fprintf (dump_file, " + ~");
552 print_generic_expr (dump_file, oe->op, 0);
553 fprintf (dump_file, " -> -1\n");
556 VEC_ordered_remove (operand_entry_t, *ops, i);
557 add_to_ops_vec (ops, build_int_cst_type (op_type, -1));
558 VEC_ordered_remove (operand_entry_t, *ops, currindex);
559 reassociate_stats.ops_eliminated ++;
561 return true;
565 /* CURR->OP is a negate expr in a plus expr: save it for later
566 inspection in repropagate_negates(). */
567 if (negateop != NULL_TREE)
568 VEC_safe_push (tree, heap, plus_negates, curr->op);
570 return false;
573 /* If OPCODE is BIT_IOR_EXPR, BIT_AND_EXPR, and, CURR->OP is really a
574 bitwise not expression, look in OPS for a corresponding operand to
575 cancel it out. If we find one, remove the other from OPS, replace
576 OPS[CURRINDEX] with 0, and return true. Otherwise, return
577 false. */
579 static bool
580 eliminate_not_pairs (enum tree_code opcode,
581 VEC (operand_entry_t, heap) **ops,
582 unsigned int currindex,
583 operand_entry_t curr)
585 tree notop;
586 unsigned int i;
587 operand_entry_t oe;
589 if ((opcode != BIT_IOR_EXPR && opcode != BIT_AND_EXPR)
590 || TREE_CODE (curr->op) != SSA_NAME)
591 return false;
593 notop = get_unary_op (curr->op, BIT_NOT_EXPR);
594 if (notop == NULL_TREE)
595 return false;
597 /* Any non-not version will have a rank that is one less than
598 the current rank. So once we hit those ranks, if we don't find
599 one, we can stop. */
601 for (i = currindex + 1;
602 VEC_iterate (operand_entry_t, *ops, i, oe)
603 && oe->rank >= curr->rank - 1;
604 i++)
606 if (oe->op == notop)
608 if (dump_file && (dump_flags & TDF_DETAILS))
610 fprintf (dump_file, "Equivalence: ");
611 print_generic_expr (dump_file, notop, 0);
612 if (opcode == BIT_AND_EXPR)
613 fprintf (dump_file, " & ~");
614 else if (opcode == BIT_IOR_EXPR)
615 fprintf (dump_file, " | ~");
616 print_generic_expr (dump_file, oe->op, 0);
617 if (opcode == BIT_AND_EXPR)
618 fprintf (dump_file, " -> 0\n");
619 else if (opcode == BIT_IOR_EXPR)
620 fprintf (dump_file, " -> -1\n");
623 if (opcode == BIT_AND_EXPR)
624 oe->op = build_zero_cst (TREE_TYPE (oe->op));
625 else if (opcode == BIT_IOR_EXPR)
626 oe->op = build_low_bits_mask (TREE_TYPE (oe->op),
627 TYPE_PRECISION (TREE_TYPE (oe->op)));
629 reassociate_stats.ops_eliminated
630 += VEC_length (operand_entry_t, *ops) - 1;
631 VEC_free (operand_entry_t, heap, *ops);
632 *ops = NULL;
633 VEC_safe_push (operand_entry_t, heap, *ops, oe);
634 return true;
638 return false;
641 /* Use constant value that may be present in OPS to try to eliminate
642 operands. Note that this function is only really used when we've
643 eliminated ops for other reasons, or merged constants. Across
644 single statements, fold already does all of this, plus more. There
645 is little point in duplicating logic, so I've only included the
646 identities that I could ever construct testcases to trigger. */
648 static void
649 eliminate_using_constants (enum tree_code opcode,
650 VEC(operand_entry_t, heap) **ops)
652 operand_entry_t oelast = VEC_last (operand_entry_t, *ops);
653 tree type = TREE_TYPE (oelast->op);
655 if (oelast->rank == 0
656 && (INTEGRAL_TYPE_P (type) || FLOAT_TYPE_P (type)))
658 switch (opcode)
660 case BIT_AND_EXPR:
661 if (integer_zerop (oelast->op))
663 if (VEC_length (operand_entry_t, *ops) != 1)
665 if (dump_file && (dump_flags & TDF_DETAILS))
666 fprintf (dump_file, "Found & 0, removing all other ops\n");
668 reassociate_stats.ops_eliminated
669 += VEC_length (operand_entry_t, *ops) - 1;
671 VEC_free (operand_entry_t, heap, *ops);
672 *ops = NULL;
673 VEC_safe_push (operand_entry_t, heap, *ops, oelast);
674 return;
677 else if (integer_all_onesp (oelast->op))
679 if (VEC_length (operand_entry_t, *ops) != 1)
681 if (dump_file && (dump_flags & TDF_DETAILS))
682 fprintf (dump_file, "Found & -1, removing\n");
683 VEC_pop (operand_entry_t, *ops);
684 reassociate_stats.ops_eliminated++;
687 break;
688 case BIT_IOR_EXPR:
689 if (integer_all_onesp (oelast->op))
691 if (VEC_length (operand_entry_t, *ops) != 1)
693 if (dump_file && (dump_flags & TDF_DETAILS))
694 fprintf (dump_file, "Found | -1, removing all other ops\n");
696 reassociate_stats.ops_eliminated
697 += VEC_length (operand_entry_t, *ops) - 1;
699 VEC_free (operand_entry_t, heap, *ops);
700 *ops = NULL;
701 VEC_safe_push (operand_entry_t, heap, *ops, oelast);
702 return;
705 else if (integer_zerop (oelast->op))
707 if (VEC_length (operand_entry_t, *ops) != 1)
709 if (dump_file && (dump_flags & TDF_DETAILS))
710 fprintf (dump_file, "Found | 0, removing\n");
711 VEC_pop (operand_entry_t, *ops);
712 reassociate_stats.ops_eliminated++;
715 break;
716 case MULT_EXPR:
717 if (integer_zerop (oelast->op)
718 || (FLOAT_TYPE_P (type)
719 && !HONOR_NANS (TYPE_MODE (type))
720 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
721 && real_zerop (oelast->op)))
723 if (VEC_length (operand_entry_t, *ops) != 1)
725 if (dump_file && (dump_flags & TDF_DETAILS))
726 fprintf (dump_file, "Found * 0, removing all other ops\n");
728 reassociate_stats.ops_eliminated
729 += VEC_length (operand_entry_t, *ops) - 1;
730 VEC_free (operand_entry_t, heap, *ops);
731 *ops = NULL;
732 VEC_safe_push (operand_entry_t, heap, *ops, oelast);
733 return;
736 else if (integer_onep (oelast->op)
737 || (FLOAT_TYPE_P (type)
738 && !HONOR_SNANS (TYPE_MODE (type))
739 && real_onep (oelast->op)))
741 if (VEC_length (operand_entry_t, *ops) != 1)
743 if (dump_file && (dump_flags & TDF_DETAILS))
744 fprintf (dump_file, "Found * 1, removing\n");
745 VEC_pop (operand_entry_t, *ops);
746 reassociate_stats.ops_eliminated++;
747 return;
750 break;
751 case BIT_XOR_EXPR:
752 case PLUS_EXPR:
753 case MINUS_EXPR:
754 if (integer_zerop (oelast->op)
755 || (FLOAT_TYPE_P (type)
756 && (opcode == PLUS_EXPR || opcode == MINUS_EXPR)
757 && fold_real_zero_addition_p (type, oelast->op,
758 opcode == MINUS_EXPR)))
760 if (VEC_length (operand_entry_t, *ops) != 1)
762 if (dump_file && (dump_flags & TDF_DETAILS))
763 fprintf (dump_file, "Found [|^+] 0, removing\n");
764 VEC_pop (operand_entry_t, *ops);
765 reassociate_stats.ops_eliminated++;
766 return;
769 break;
770 default:
771 break;
777 static void linearize_expr_tree (VEC(operand_entry_t, heap) **, gimple,
778 bool, bool);
780 /* Structure for tracking and counting operands. */
781 typedef struct oecount_s {
782 int cnt;
783 int id;
784 enum tree_code oecode;
785 tree op;
786 } oecount;
788 DEF_VEC_O(oecount);
789 DEF_VEC_ALLOC_O(oecount,heap);
791 /* The heap for the oecount hashtable and the sorted list of operands. */
792 static VEC (oecount, heap) *cvec;
794 /* Hash function for oecount. */
796 static hashval_t
797 oecount_hash (const void *p)
799 const oecount *c = VEC_index (oecount, cvec, (size_t)p - 42);
800 return htab_hash_pointer (c->op) ^ (hashval_t)c->oecode;
803 /* Comparison function for oecount. */
805 static int
806 oecount_eq (const void *p1, const void *p2)
808 const oecount *c1 = VEC_index (oecount, cvec, (size_t)p1 - 42);
809 const oecount *c2 = VEC_index (oecount, cvec, (size_t)p2 - 42);
810 return (c1->oecode == c2->oecode
811 && c1->op == c2->op);
814 /* Comparison function for qsort sorting oecount elements by count. */
816 static int
817 oecount_cmp (const void *p1, const void *p2)
819 const oecount *c1 = (const oecount *)p1;
820 const oecount *c2 = (const oecount *)p2;
821 if (c1->cnt != c2->cnt)
822 return c1->cnt - c2->cnt;
823 else
824 /* If counts are identical, use unique IDs to stabilize qsort. */
825 return c1->id - c2->id;
828 /* Walks the linear chain with result *DEF searching for an operation
829 with operand OP and code OPCODE removing that from the chain. *DEF
830 is updated if there is only one operand but no operation left. */
832 static void
833 zero_one_operation (tree *def, enum tree_code opcode, tree op)
835 gimple stmt = SSA_NAME_DEF_STMT (*def);
839 tree name = gimple_assign_rhs1 (stmt);
841 /* If this is the operation we look for and one of the operands
842 is ours simply propagate the other operand into the stmts
843 single use. */
844 if (gimple_assign_rhs_code (stmt) == opcode
845 && (name == op
846 || gimple_assign_rhs2 (stmt) == op))
848 gimple use_stmt;
849 use_operand_p use;
850 gimple_stmt_iterator gsi;
851 if (name == op)
852 name = gimple_assign_rhs2 (stmt);
853 gcc_assert (has_single_use (gimple_assign_lhs (stmt)));
854 single_imm_use (gimple_assign_lhs (stmt), &use, &use_stmt);
855 if (gimple_assign_lhs (stmt) == *def)
856 *def = name;
857 SET_USE (use, name);
858 if (TREE_CODE (name) != SSA_NAME)
859 update_stmt (use_stmt);
860 gsi = gsi_for_stmt (stmt);
861 gsi_remove (&gsi, true);
862 release_defs (stmt);
863 return;
866 /* Continue walking the chain. */
867 gcc_assert (name != op
868 && TREE_CODE (name) == SSA_NAME);
869 stmt = SSA_NAME_DEF_STMT (name);
871 while (1);
874 /* Builds one statement performing OP1 OPCODE OP2 using TMPVAR for
875 the result. Places the statement after the definition of either
876 OP1 or OP2. Returns the new statement. */
878 static gimple
879 build_and_add_sum (tree tmpvar, tree op1, tree op2, enum tree_code opcode)
881 gimple op1def = NULL, op2def = NULL;
882 gimple_stmt_iterator gsi;
883 tree op;
884 gimple sum;
886 /* Create the addition statement. */
887 sum = gimple_build_assign_with_ops (opcode, tmpvar, op1, op2);
888 op = make_ssa_name (tmpvar, sum);
889 gimple_assign_set_lhs (sum, op);
891 /* Find an insertion place and insert. */
892 if (TREE_CODE (op1) == SSA_NAME)
893 op1def = SSA_NAME_DEF_STMT (op1);
894 if (TREE_CODE (op2) == SSA_NAME)
895 op2def = SSA_NAME_DEF_STMT (op2);
896 if ((!op1def || gimple_nop_p (op1def))
897 && (!op2def || gimple_nop_p (op2def)))
899 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
900 gsi_insert_before (&gsi, sum, GSI_NEW_STMT);
902 else if ((!op1def || gimple_nop_p (op1def))
903 || (op2def && !gimple_nop_p (op2def)
904 && stmt_dominates_stmt_p (op1def, op2def)))
906 if (gimple_code (op2def) == GIMPLE_PHI)
908 gsi = gsi_after_labels (gimple_bb (op2def));
909 gsi_insert_before (&gsi, sum, GSI_NEW_STMT);
911 else
913 if (!stmt_ends_bb_p (op2def))
915 gsi = gsi_for_stmt (op2def);
916 gsi_insert_after (&gsi, sum, GSI_NEW_STMT);
918 else
920 edge e;
921 edge_iterator ei;
923 FOR_EACH_EDGE (e, ei, gimple_bb (op2def)->succs)
924 if (e->flags & EDGE_FALLTHRU)
925 gsi_insert_on_edge_immediate (e, sum);
929 else
931 if (gimple_code (op1def) == GIMPLE_PHI)
933 gsi = gsi_after_labels (gimple_bb (op1def));
934 gsi_insert_before (&gsi, sum, GSI_NEW_STMT);
936 else
938 if (!stmt_ends_bb_p (op1def))
940 gsi = gsi_for_stmt (op1def);
941 gsi_insert_after (&gsi, sum, GSI_NEW_STMT);
943 else
945 edge e;
946 edge_iterator ei;
948 FOR_EACH_EDGE (e, ei, gimple_bb (op1def)->succs)
949 if (e->flags & EDGE_FALLTHRU)
950 gsi_insert_on_edge_immediate (e, sum);
954 update_stmt (sum);
956 return sum;
959 /* Perform un-distribution of divisions and multiplications.
960 A * X + B * X is transformed into (A + B) * X and A / X + B / X
961 to (A + B) / X for real X.
963 The algorithm is organized as follows.
965 - First we walk the addition chain *OPS looking for summands that
966 are defined by a multiplication or a real division. This results
967 in the candidates bitmap with relevant indices into *OPS.
969 - Second we build the chains of multiplications or divisions for
970 these candidates, counting the number of occurences of (operand, code)
971 pairs in all of the candidates chains.
973 - Third we sort the (operand, code) pairs by number of occurence and
974 process them starting with the pair with the most uses.
976 * For each such pair we walk the candidates again to build a
977 second candidate bitmap noting all multiplication/division chains
978 that have at least one occurence of (operand, code).
980 * We build an alternate addition chain only covering these
981 candidates with one (operand, code) operation removed from their
982 multiplication/division chain.
984 * The first candidate gets replaced by the alternate addition chain
985 multiplied/divided by the operand.
987 * All candidate chains get disabled for further processing and
988 processing of (operand, code) pairs continues.
990 The alternate addition chains built are re-processed by the main
991 reassociation algorithm which allows optimizing a * x * y + b * y * x
992 to (a + b ) * x * y in one invocation of the reassociation pass. */
994 static bool
995 undistribute_ops_list (enum tree_code opcode,
996 VEC (operand_entry_t, heap) **ops, struct loop *loop)
998 unsigned int length = VEC_length (operand_entry_t, *ops);
999 operand_entry_t oe1;
1000 unsigned i, j;
1001 sbitmap candidates, candidates2;
1002 unsigned nr_candidates, nr_candidates2;
1003 sbitmap_iterator sbi0;
1004 VEC (operand_entry_t, heap) **subops;
1005 htab_t ctable;
1006 bool changed = false;
1007 int next_oecount_id = 0;
1009 if (length <= 1
1010 || opcode != PLUS_EXPR)
1011 return false;
1013 /* Build a list of candidates to process. */
1014 candidates = sbitmap_alloc (length);
1015 sbitmap_zero (candidates);
1016 nr_candidates = 0;
1017 FOR_EACH_VEC_ELT (operand_entry_t, *ops, i, oe1)
1019 enum tree_code dcode;
1020 gimple oe1def;
1022 if (TREE_CODE (oe1->op) != SSA_NAME)
1023 continue;
1024 oe1def = SSA_NAME_DEF_STMT (oe1->op);
1025 if (!is_gimple_assign (oe1def))
1026 continue;
1027 dcode = gimple_assign_rhs_code (oe1def);
1028 if ((dcode != MULT_EXPR
1029 && dcode != RDIV_EXPR)
1030 || !is_reassociable_op (oe1def, dcode, loop))
1031 continue;
1033 SET_BIT (candidates, i);
1034 nr_candidates++;
1037 if (nr_candidates < 2)
1039 sbitmap_free (candidates);
1040 return false;
1043 if (dump_file && (dump_flags & TDF_DETAILS))
1045 fprintf (dump_file, "searching for un-distribute opportunities ");
1046 print_generic_expr (dump_file,
1047 VEC_index (operand_entry_t, *ops,
1048 sbitmap_first_set_bit (candidates))->op, 0);
1049 fprintf (dump_file, " %d\n", nr_candidates);
1052 /* Build linearized sub-operand lists and the counting table. */
1053 cvec = NULL;
1054 ctable = htab_create (15, oecount_hash, oecount_eq, NULL);
1055 subops = XCNEWVEC (VEC (operand_entry_t, heap) *,
1056 VEC_length (operand_entry_t, *ops));
1057 EXECUTE_IF_SET_IN_SBITMAP (candidates, 0, i, sbi0)
1059 gimple oedef;
1060 enum tree_code oecode;
1061 unsigned j;
1063 oedef = SSA_NAME_DEF_STMT (VEC_index (operand_entry_t, *ops, i)->op);
1064 oecode = gimple_assign_rhs_code (oedef);
1065 linearize_expr_tree (&subops[i], oedef,
1066 associative_tree_code (oecode), false);
1068 FOR_EACH_VEC_ELT (operand_entry_t, subops[i], j, oe1)
1070 oecount c;
1071 void **slot;
1072 size_t idx;
1073 c.oecode = oecode;
1074 c.cnt = 1;
1075 c.id = next_oecount_id++;
1076 c.op = oe1->op;
1077 VEC_safe_push (oecount, heap, cvec, &c);
1078 idx = VEC_length (oecount, cvec) + 41;
1079 slot = htab_find_slot (ctable, (void *)idx, INSERT);
1080 if (!*slot)
1082 *slot = (void *)idx;
1084 else
1086 VEC_pop (oecount, cvec);
1087 VEC_index (oecount, cvec, (size_t)*slot - 42)->cnt++;
1091 htab_delete (ctable);
1093 /* Sort the counting table. */
1094 VEC_qsort (oecount, cvec, oecount_cmp);
1096 if (dump_file && (dump_flags & TDF_DETAILS))
1098 oecount *c;
1099 fprintf (dump_file, "Candidates:\n");
1100 FOR_EACH_VEC_ELT (oecount, cvec, j, c)
1102 fprintf (dump_file, " %u %s: ", c->cnt,
1103 c->oecode == MULT_EXPR
1104 ? "*" : c->oecode == RDIV_EXPR ? "/" : "?");
1105 print_generic_expr (dump_file, c->op, 0);
1106 fprintf (dump_file, "\n");
1110 /* Process the (operand, code) pairs in order of most occurence. */
1111 candidates2 = sbitmap_alloc (length);
1112 while (!VEC_empty (oecount, cvec))
1114 oecount *c = VEC_last (oecount, cvec);
1115 if (c->cnt < 2)
1116 break;
1118 /* Now collect the operands in the outer chain that contain
1119 the common operand in their inner chain. */
1120 sbitmap_zero (candidates2);
1121 nr_candidates2 = 0;
1122 EXECUTE_IF_SET_IN_SBITMAP (candidates, 0, i, sbi0)
1124 gimple oedef;
1125 enum tree_code oecode;
1126 unsigned j;
1127 tree op = VEC_index (operand_entry_t, *ops, i)->op;
1129 /* If we undistributed in this chain already this may be
1130 a constant. */
1131 if (TREE_CODE (op) != SSA_NAME)
1132 continue;
1134 oedef = SSA_NAME_DEF_STMT (op);
1135 oecode = gimple_assign_rhs_code (oedef);
1136 if (oecode != c->oecode)
1137 continue;
1139 FOR_EACH_VEC_ELT (operand_entry_t, subops[i], j, oe1)
1141 if (oe1->op == c->op)
1143 SET_BIT (candidates2, i);
1144 ++nr_candidates2;
1145 break;
1150 if (nr_candidates2 >= 2)
1152 operand_entry_t oe1, oe2;
1153 tree tmpvar;
1154 gimple prod;
1155 int first = sbitmap_first_set_bit (candidates2);
1157 /* Build the new addition chain. */
1158 oe1 = VEC_index (operand_entry_t, *ops, first);
1159 if (dump_file && (dump_flags & TDF_DETAILS))
1161 fprintf (dump_file, "Building (");
1162 print_generic_expr (dump_file, oe1->op, 0);
1164 tmpvar = create_tmp_reg (TREE_TYPE (oe1->op), NULL);
1165 add_referenced_var (tmpvar);
1166 zero_one_operation (&oe1->op, c->oecode, c->op);
1167 EXECUTE_IF_SET_IN_SBITMAP (candidates2, first+1, i, sbi0)
1169 gimple sum;
1170 oe2 = VEC_index (operand_entry_t, *ops, i);
1171 if (dump_file && (dump_flags & TDF_DETAILS))
1173 fprintf (dump_file, " + ");
1174 print_generic_expr (dump_file, oe2->op, 0);
1176 zero_one_operation (&oe2->op, c->oecode, c->op);
1177 sum = build_and_add_sum (tmpvar, oe1->op, oe2->op, opcode);
1178 oe2->op = build_zero_cst (TREE_TYPE (oe2->op));
1179 oe2->rank = 0;
1180 oe1->op = gimple_get_lhs (sum);
1183 /* Apply the multiplication/division. */
1184 prod = build_and_add_sum (tmpvar, oe1->op, c->op, c->oecode);
1185 if (dump_file && (dump_flags & TDF_DETAILS))
1187 fprintf (dump_file, ") %s ", c->oecode == MULT_EXPR ? "*" : "/");
1188 print_generic_expr (dump_file, c->op, 0);
1189 fprintf (dump_file, "\n");
1192 /* Record it in the addition chain and disable further
1193 undistribution with this op. */
1194 oe1->op = gimple_assign_lhs (prod);
1195 oe1->rank = get_rank (oe1->op);
1196 VEC_free (operand_entry_t, heap, subops[first]);
1198 changed = true;
1201 VEC_pop (oecount, cvec);
1204 for (i = 0; i < VEC_length (operand_entry_t, *ops); ++i)
1205 VEC_free (operand_entry_t, heap, subops[i]);
1206 free (subops);
1207 VEC_free (oecount, heap, cvec);
1208 sbitmap_free (candidates);
1209 sbitmap_free (candidates2);
1211 return changed;
1214 /* If OPCODE is BIT_IOR_EXPR or BIT_AND_EXPR and CURR is a comparison
1215 expression, examine the other OPS to see if any of them are comparisons
1216 of the same values, which we may be able to combine or eliminate.
1217 For example, we can rewrite (a < b) | (a == b) as (a <= b). */
1219 static bool
1220 eliminate_redundant_comparison (enum tree_code opcode,
1221 VEC (operand_entry_t, heap) **ops,
1222 unsigned int currindex,
1223 operand_entry_t curr)
1225 tree op1, op2;
1226 enum tree_code lcode, rcode;
1227 gimple def1, def2;
1228 int i;
1229 operand_entry_t oe;
1231 if (opcode != BIT_IOR_EXPR && opcode != BIT_AND_EXPR)
1232 return false;
1234 /* Check that CURR is a comparison. */
1235 if (TREE_CODE (curr->op) != SSA_NAME)
1236 return false;
1237 def1 = SSA_NAME_DEF_STMT (curr->op);
1238 if (!is_gimple_assign (def1))
1239 return false;
1240 lcode = gimple_assign_rhs_code (def1);
1241 if (TREE_CODE_CLASS (lcode) != tcc_comparison)
1242 return false;
1243 op1 = gimple_assign_rhs1 (def1);
1244 op2 = gimple_assign_rhs2 (def1);
1246 /* Now look for a similar comparison in the remaining OPS. */
1247 for (i = currindex + 1;
1248 VEC_iterate (operand_entry_t, *ops, i, oe);
1249 i++)
1251 tree t;
1253 if (TREE_CODE (oe->op) != SSA_NAME)
1254 continue;
1255 def2 = SSA_NAME_DEF_STMT (oe->op);
1256 if (!is_gimple_assign (def2))
1257 continue;
1258 rcode = gimple_assign_rhs_code (def2);
1259 if (TREE_CODE_CLASS (rcode) != tcc_comparison)
1260 continue;
1262 /* If we got here, we have a match. See if we can combine the
1263 two comparisons. */
1264 if (opcode == BIT_IOR_EXPR)
1265 t = maybe_fold_or_comparisons (lcode, op1, op2,
1266 rcode, gimple_assign_rhs1 (def2),
1267 gimple_assign_rhs2 (def2));
1268 else
1269 t = maybe_fold_and_comparisons (lcode, op1, op2,
1270 rcode, gimple_assign_rhs1 (def2),
1271 gimple_assign_rhs2 (def2));
1272 if (!t)
1273 continue;
1275 /* maybe_fold_and_comparisons and maybe_fold_or_comparisons
1276 always give us a boolean_type_node value back. If the original
1277 BIT_AND_EXPR or BIT_IOR_EXPR was of a wider integer type,
1278 we need to convert. */
1279 if (!useless_type_conversion_p (TREE_TYPE (curr->op), TREE_TYPE (t)))
1280 t = fold_convert (TREE_TYPE (curr->op), t);
1282 if (dump_file && (dump_flags & TDF_DETAILS))
1284 fprintf (dump_file, "Equivalence: ");
1285 print_generic_expr (dump_file, curr->op, 0);
1286 fprintf (dump_file, " %s ", op_symbol_code (opcode));
1287 print_generic_expr (dump_file, oe->op, 0);
1288 fprintf (dump_file, " -> ");
1289 print_generic_expr (dump_file, t, 0);
1290 fprintf (dump_file, "\n");
1293 /* Now we can delete oe, as it has been subsumed by the new combined
1294 expression t. */
1295 VEC_ordered_remove (operand_entry_t, *ops, i);
1296 reassociate_stats.ops_eliminated ++;
1298 /* If t is the same as curr->op, we're done. Otherwise we must
1299 replace curr->op with t. Special case is if we got a constant
1300 back, in which case we add it to the end instead of in place of
1301 the current entry. */
1302 if (TREE_CODE (t) == INTEGER_CST)
1304 VEC_ordered_remove (operand_entry_t, *ops, currindex);
1305 add_to_ops_vec (ops, t);
1307 else if (!operand_equal_p (t, curr->op, 0))
1309 tree tmpvar;
1310 gimple sum;
1311 enum tree_code subcode;
1312 tree newop1;
1313 tree newop2;
1314 gcc_assert (COMPARISON_CLASS_P (t));
1315 tmpvar = create_tmp_var (TREE_TYPE (t), NULL);
1316 add_referenced_var (tmpvar);
1317 extract_ops_from_tree (t, &subcode, &newop1, &newop2);
1318 STRIP_USELESS_TYPE_CONVERSION (newop1);
1319 STRIP_USELESS_TYPE_CONVERSION (newop2);
1320 gcc_checking_assert (is_gimple_val (newop1)
1321 && is_gimple_val (newop2));
1322 sum = build_and_add_sum (tmpvar, newop1, newop2, subcode);
1323 curr->op = gimple_get_lhs (sum);
1325 return true;
1328 return false;
1331 /* Perform various identities and other optimizations on the list of
1332 operand entries, stored in OPS. The tree code for the binary
1333 operation between all the operands is OPCODE. */
1335 static void
1336 optimize_ops_list (enum tree_code opcode,
1337 VEC (operand_entry_t, heap) **ops)
1339 unsigned int length = VEC_length (operand_entry_t, *ops);
1340 unsigned int i;
1341 operand_entry_t oe;
1342 operand_entry_t oelast = NULL;
1343 bool iterate = false;
1345 if (length == 1)
1346 return;
1348 oelast = VEC_last (operand_entry_t, *ops);
1350 /* If the last two are constants, pop the constants off, merge them
1351 and try the next two. */
1352 if (oelast->rank == 0 && is_gimple_min_invariant (oelast->op))
1354 operand_entry_t oelm1 = VEC_index (operand_entry_t, *ops, length - 2);
1356 if (oelm1->rank == 0
1357 && is_gimple_min_invariant (oelm1->op)
1358 && useless_type_conversion_p (TREE_TYPE (oelm1->op),
1359 TREE_TYPE (oelast->op)))
1361 tree folded = fold_binary (opcode, TREE_TYPE (oelm1->op),
1362 oelm1->op, oelast->op);
1364 if (folded && is_gimple_min_invariant (folded))
1366 if (dump_file && (dump_flags & TDF_DETAILS))
1367 fprintf (dump_file, "Merging constants\n");
1369 VEC_pop (operand_entry_t, *ops);
1370 VEC_pop (operand_entry_t, *ops);
1372 add_to_ops_vec (ops, folded);
1373 reassociate_stats.constants_eliminated++;
1375 optimize_ops_list (opcode, ops);
1376 return;
1381 eliminate_using_constants (opcode, ops);
1382 oelast = NULL;
1384 for (i = 0; VEC_iterate (operand_entry_t, *ops, i, oe);)
1386 bool done = false;
1388 if (eliminate_not_pairs (opcode, ops, i, oe))
1389 return;
1390 if (eliminate_duplicate_pair (opcode, ops, &done, i, oe, oelast)
1391 || (!done && eliminate_plus_minus_pair (opcode, ops, i, oe))
1392 || (!done && eliminate_redundant_comparison (opcode, ops, i, oe)))
1394 if (done)
1395 return;
1396 iterate = true;
1397 oelast = NULL;
1398 continue;
1400 oelast = oe;
1401 i++;
1404 length = VEC_length (operand_entry_t, *ops);
1405 oelast = VEC_last (operand_entry_t, *ops);
1407 if (iterate)
1408 optimize_ops_list (opcode, ops);
1411 /* Return true if OPERAND is defined by a PHI node which uses the LHS
1412 of STMT in it's operands. This is also known as a "destructive
1413 update" operation. */
1415 static bool
1416 is_phi_for_stmt (gimple stmt, tree operand)
1418 gimple def_stmt;
1419 tree lhs;
1420 use_operand_p arg_p;
1421 ssa_op_iter i;
1423 if (TREE_CODE (operand) != SSA_NAME)
1424 return false;
1426 lhs = gimple_assign_lhs (stmt);
1428 def_stmt = SSA_NAME_DEF_STMT (operand);
1429 if (gimple_code (def_stmt) != GIMPLE_PHI)
1430 return false;
1432 FOR_EACH_PHI_ARG (arg_p, def_stmt, i, SSA_OP_USE)
1433 if (lhs == USE_FROM_PTR (arg_p))
1434 return true;
1435 return false;
1438 /* Remove def stmt of VAR if VAR has zero uses and recurse
1439 on rhs1 operand if so. */
1441 static void
1442 remove_visited_stmt_chain (tree var)
1444 gimple stmt;
1445 gimple_stmt_iterator gsi;
1447 while (1)
1449 if (TREE_CODE (var) != SSA_NAME || !has_zero_uses (var))
1450 return;
1451 stmt = SSA_NAME_DEF_STMT (var);
1452 if (!is_gimple_assign (stmt)
1453 || !gimple_visited_p (stmt))
1454 return;
1455 var = gimple_assign_rhs1 (stmt);
1456 gsi = gsi_for_stmt (stmt);
1457 gsi_remove (&gsi, true);
1458 release_defs (stmt);
1462 /* Recursively rewrite our linearized statements so that the operators
1463 match those in OPS[OPINDEX], putting the computation in rank
1464 order. */
1466 static void
1467 rewrite_expr_tree (gimple stmt, unsigned int opindex,
1468 VEC(operand_entry_t, heap) * ops, bool moved)
1470 tree rhs1 = gimple_assign_rhs1 (stmt);
1471 tree rhs2 = gimple_assign_rhs2 (stmt);
1472 operand_entry_t oe;
1474 /* If we have three operands left, then we want to make sure the one
1475 that gets the double binary op are the ones with the same rank.
1477 The alternative we try is to see if this is a destructive
1478 update style statement, which is like:
1479 b = phi (a, ...)
1480 a = c + b;
1481 In that case, we want to use the destructive update form to
1482 expose the possible vectorizer sum reduction opportunity.
1483 In that case, the third operand will be the phi node.
1485 We could, of course, try to be better as noted above, and do a
1486 lot of work to try to find these opportunities in >3 operand
1487 cases, but it is unlikely to be worth it. */
1488 if (opindex + 3 == VEC_length (operand_entry_t, ops))
1490 operand_entry_t oe1, oe2, oe3;
1492 oe1 = VEC_index (operand_entry_t, ops, opindex);
1493 oe2 = VEC_index (operand_entry_t, ops, opindex + 1);
1494 oe3 = VEC_index (operand_entry_t, ops, opindex + 2);
1496 if ((oe1->rank == oe2->rank
1497 && oe2->rank != oe3->rank)
1498 || (is_phi_for_stmt (stmt, oe3->op)
1499 && !is_phi_for_stmt (stmt, oe1->op)
1500 && !is_phi_for_stmt (stmt, oe2->op)))
1502 struct operand_entry temp = *oe3;
1503 oe3->op = oe1->op;
1504 oe3->rank = oe1->rank;
1505 oe1->op = temp.op;
1506 oe1->rank= temp.rank;
1508 else if ((oe1->rank == oe3->rank
1509 && oe2->rank != oe3->rank)
1510 || (is_phi_for_stmt (stmt, oe2->op)
1511 && !is_phi_for_stmt (stmt, oe1->op)
1512 && !is_phi_for_stmt (stmt, oe3->op)))
1514 struct operand_entry temp = *oe2;
1515 oe2->op = oe1->op;
1516 oe2->rank = oe1->rank;
1517 oe1->op = temp.op;
1518 oe1->rank= temp.rank;
1522 /* The final recursion case for this function is that you have
1523 exactly two operations left.
1524 If we had one exactly one op in the entire list to start with, we
1525 would have never called this function, and the tail recursion
1526 rewrites them one at a time. */
1527 if (opindex + 2 == VEC_length (operand_entry_t, ops))
1529 operand_entry_t oe1, oe2;
1531 oe1 = VEC_index (operand_entry_t, ops, opindex);
1532 oe2 = VEC_index (operand_entry_t, ops, opindex + 1);
1534 if (rhs1 != oe1->op || rhs2 != oe2->op)
1536 if (dump_file && (dump_flags & TDF_DETAILS))
1538 fprintf (dump_file, "Transforming ");
1539 print_gimple_stmt (dump_file, stmt, 0, 0);
1542 gimple_assign_set_rhs1 (stmt, oe1->op);
1543 gimple_assign_set_rhs2 (stmt, oe2->op);
1544 update_stmt (stmt);
1545 if (rhs1 != oe1->op && rhs1 != oe2->op)
1546 remove_visited_stmt_chain (rhs1);
1548 if (dump_file && (dump_flags & TDF_DETAILS))
1550 fprintf (dump_file, " into ");
1551 print_gimple_stmt (dump_file, stmt, 0, 0);
1555 return;
1558 /* If we hit here, we should have 3 or more ops left. */
1559 gcc_assert (opindex + 2 < VEC_length (operand_entry_t, ops));
1561 /* Rewrite the next operator. */
1562 oe = VEC_index (operand_entry_t, ops, opindex);
1564 if (oe->op != rhs2)
1566 if (!moved)
1568 gimple_stmt_iterator gsinow, gsirhs1;
1569 gimple stmt1 = stmt, stmt2;
1570 unsigned int count;
1572 gsinow = gsi_for_stmt (stmt);
1573 count = VEC_length (operand_entry_t, ops) - opindex - 2;
1574 while (count-- != 0)
1576 stmt2 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt1));
1577 gsirhs1 = gsi_for_stmt (stmt2);
1578 gsi_move_before (&gsirhs1, &gsinow);
1579 gsi_prev (&gsinow);
1580 stmt1 = stmt2;
1582 moved = true;
1585 if (dump_file && (dump_flags & TDF_DETAILS))
1587 fprintf (dump_file, "Transforming ");
1588 print_gimple_stmt (dump_file, stmt, 0, 0);
1591 gimple_assign_set_rhs2 (stmt, oe->op);
1592 update_stmt (stmt);
1594 if (dump_file && (dump_flags & TDF_DETAILS))
1596 fprintf (dump_file, " into ");
1597 print_gimple_stmt (dump_file, stmt, 0, 0);
1600 /* Recurse on the LHS of the binary operator, which is guaranteed to
1601 be the non-leaf side. */
1602 rewrite_expr_tree (SSA_NAME_DEF_STMT (rhs1), opindex + 1, ops, moved);
1605 /* Transform STMT, which is really (A +B) + (C + D) into the left
1606 linear form, ((A+B)+C)+D.
1607 Recurse on D if necessary. */
1609 static void
1610 linearize_expr (gimple stmt)
1612 gimple_stmt_iterator gsinow, gsirhs;
1613 gimple binlhs = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
1614 gimple binrhs = SSA_NAME_DEF_STMT (gimple_assign_rhs2 (stmt));
1615 enum tree_code rhscode = gimple_assign_rhs_code (stmt);
1616 gimple newbinrhs = NULL;
1617 struct loop *loop = loop_containing_stmt (stmt);
1619 gcc_assert (is_reassociable_op (binlhs, rhscode, loop)
1620 && is_reassociable_op (binrhs, rhscode, loop));
1622 gsinow = gsi_for_stmt (stmt);
1623 gsirhs = gsi_for_stmt (binrhs);
1624 gsi_move_before (&gsirhs, &gsinow);
1626 gimple_assign_set_rhs2 (stmt, gimple_assign_rhs1 (binrhs));
1627 gimple_assign_set_rhs1 (binrhs, gimple_assign_lhs (binlhs));
1628 gimple_assign_set_rhs1 (stmt, gimple_assign_lhs (binrhs));
1630 if (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME)
1631 newbinrhs = SSA_NAME_DEF_STMT (gimple_assign_rhs2 (stmt));
1633 if (dump_file && (dump_flags & TDF_DETAILS))
1635 fprintf (dump_file, "Linearized: ");
1636 print_gimple_stmt (dump_file, stmt, 0, 0);
1639 reassociate_stats.linearized++;
1640 update_stmt (binrhs);
1641 update_stmt (binlhs);
1642 update_stmt (stmt);
1644 gimple_set_visited (stmt, true);
1645 gimple_set_visited (binlhs, true);
1646 gimple_set_visited (binrhs, true);
1648 /* Tail recurse on the new rhs if it still needs reassociation. */
1649 if (newbinrhs && is_reassociable_op (newbinrhs, rhscode, loop))
1650 /* ??? This should probably be linearize_expr (newbinrhs) but I don't
1651 want to change the algorithm while converting to tuples. */
1652 linearize_expr (stmt);
1655 /* If LHS has a single immediate use that is a GIMPLE_ASSIGN statement, return
1656 it. Otherwise, return NULL. */
1658 static gimple
1659 get_single_immediate_use (tree lhs)
1661 use_operand_p immuse;
1662 gimple immusestmt;
1664 if (TREE_CODE (lhs) == SSA_NAME
1665 && single_imm_use (lhs, &immuse, &immusestmt)
1666 && is_gimple_assign (immusestmt))
1667 return immusestmt;
1669 return NULL;
1672 /* Recursively negate the value of TONEGATE, and return the SSA_NAME
1673 representing the negated value. Insertions of any necessary
1674 instructions go before GSI.
1675 This function is recursive in that, if you hand it "a_5" as the
1676 value to negate, and a_5 is defined by "a_5 = b_3 + b_4", it will
1677 transform b_3 + b_4 into a_5 = -b_3 + -b_4. */
1679 static tree
1680 negate_value (tree tonegate, gimple_stmt_iterator *gsi)
1682 gimple negatedefstmt= NULL;
1683 tree resultofnegate;
1685 /* If we are trying to negate a name, defined by an add, negate the
1686 add operands instead. */
1687 if (TREE_CODE (tonegate) == SSA_NAME)
1688 negatedefstmt = SSA_NAME_DEF_STMT (tonegate);
1689 if (TREE_CODE (tonegate) == SSA_NAME
1690 && is_gimple_assign (negatedefstmt)
1691 && TREE_CODE (gimple_assign_lhs (negatedefstmt)) == SSA_NAME
1692 && has_single_use (gimple_assign_lhs (negatedefstmt))
1693 && gimple_assign_rhs_code (negatedefstmt) == PLUS_EXPR)
1695 gimple_stmt_iterator gsi;
1696 tree rhs1 = gimple_assign_rhs1 (negatedefstmt);
1697 tree rhs2 = gimple_assign_rhs2 (negatedefstmt);
1699 gsi = gsi_for_stmt (negatedefstmt);
1700 rhs1 = negate_value (rhs1, &gsi);
1701 gimple_assign_set_rhs1 (negatedefstmt, rhs1);
1703 gsi = gsi_for_stmt (negatedefstmt);
1704 rhs2 = negate_value (rhs2, &gsi);
1705 gimple_assign_set_rhs2 (negatedefstmt, rhs2);
1707 update_stmt (negatedefstmt);
1708 return gimple_assign_lhs (negatedefstmt);
1711 tonegate = fold_build1 (NEGATE_EXPR, TREE_TYPE (tonegate), tonegate);
1712 resultofnegate = force_gimple_operand_gsi (gsi, tonegate, true,
1713 NULL_TREE, true, GSI_SAME_STMT);
1714 return resultofnegate;
1717 /* Return true if we should break up the subtract in STMT into an add
1718 with negate. This is true when we the subtract operands are really
1719 adds, or the subtract itself is used in an add expression. In
1720 either case, breaking up the subtract into an add with negate
1721 exposes the adds to reassociation. */
1723 static bool
1724 should_break_up_subtract (gimple stmt)
1726 tree lhs = gimple_assign_lhs (stmt);
1727 tree binlhs = gimple_assign_rhs1 (stmt);
1728 tree binrhs = gimple_assign_rhs2 (stmt);
1729 gimple immusestmt;
1730 struct loop *loop = loop_containing_stmt (stmt);
1732 if (TREE_CODE (binlhs) == SSA_NAME
1733 && is_reassociable_op (SSA_NAME_DEF_STMT (binlhs), PLUS_EXPR, loop))
1734 return true;
1736 if (TREE_CODE (binrhs) == SSA_NAME
1737 && is_reassociable_op (SSA_NAME_DEF_STMT (binrhs), PLUS_EXPR, loop))
1738 return true;
1740 if (TREE_CODE (lhs) == SSA_NAME
1741 && (immusestmt = get_single_immediate_use (lhs))
1742 && is_gimple_assign (immusestmt)
1743 && (gimple_assign_rhs_code (immusestmt) == PLUS_EXPR
1744 || gimple_assign_rhs_code (immusestmt) == MULT_EXPR))
1745 return true;
1746 return false;
1749 /* Transform STMT from A - B into A + -B. */
1751 static void
1752 break_up_subtract (gimple stmt, gimple_stmt_iterator *gsip)
1754 tree rhs1 = gimple_assign_rhs1 (stmt);
1755 tree rhs2 = gimple_assign_rhs2 (stmt);
1757 if (dump_file && (dump_flags & TDF_DETAILS))
1759 fprintf (dump_file, "Breaking up subtract ");
1760 print_gimple_stmt (dump_file, stmt, 0, 0);
1763 rhs2 = negate_value (rhs2, gsip);
1764 gimple_assign_set_rhs_with_ops (gsip, PLUS_EXPR, rhs1, rhs2);
1765 update_stmt (stmt);
1768 /* Recursively linearize a binary expression that is the RHS of STMT.
1769 Place the operands of the expression tree in the vector named OPS. */
1771 static void
1772 linearize_expr_tree (VEC(operand_entry_t, heap) **ops, gimple stmt,
1773 bool is_associative, bool set_visited)
1775 tree binlhs = gimple_assign_rhs1 (stmt);
1776 tree binrhs = gimple_assign_rhs2 (stmt);
1777 gimple binlhsdef, binrhsdef;
1778 bool binlhsisreassoc = false;
1779 bool binrhsisreassoc = false;
1780 enum tree_code rhscode = gimple_assign_rhs_code (stmt);
1781 struct loop *loop = loop_containing_stmt (stmt);
1783 if (set_visited)
1784 gimple_set_visited (stmt, true);
1786 if (TREE_CODE (binlhs) == SSA_NAME)
1788 binlhsdef = SSA_NAME_DEF_STMT (binlhs);
1789 binlhsisreassoc = (is_reassociable_op (binlhsdef, rhscode, loop)
1790 && !stmt_could_throw_p (binlhsdef));
1793 if (TREE_CODE (binrhs) == SSA_NAME)
1795 binrhsdef = SSA_NAME_DEF_STMT (binrhs);
1796 binrhsisreassoc = (is_reassociable_op (binrhsdef, rhscode, loop)
1797 && !stmt_could_throw_p (binrhsdef));
1800 /* If the LHS is not reassociable, but the RHS is, we need to swap
1801 them. If neither is reassociable, there is nothing we can do, so
1802 just put them in the ops vector. If the LHS is reassociable,
1803 linearize it. If both are reassociable, then linearize the RHS
1804 and the LHS. */
1806 if (!binlhsisreassoc)
1808 tree temp;
1810 /* If this is not a associative operation like division, give up. */
1811 if (!is_associative)
1813 add_to_ops_vec (ops, binrhs);
1814 return;
1817 if (!binrhsisreassoc)
1819 add_to_ops_vec (ops, binrhs);
1820 add_to_ops_vec (ops, binlhs);
1821 return;
1824 if (dump_file && (dump_flags & TDF_DETAILS))
1826 fprintf (dump_file, "swapping operands of ");
1827 print_gimple_stmt (dump_file, stmt, 0, 0);
1830 swap_tree_operands (stmt,
1831 gimple_assign_rhs1_ptr (stmt),
1832 gimple_assign_rhs2_ptr (stmt));
1833 update_stmt (stmt);
1835 if (dump_file && (dump_flags & TDF_DETAILS))
1837 fprintf (dump_file, " is now ");
1838 print_gimple_stmt (dump_file, stmt, 0, 0);
1841 /* We want to make it so the lhs is always the reassociative op,
1842 so swap. */
1843 temp = binlhs;
1844 binlhs = binrhs;
1845 binrhs = temp;
1847 else if (binrhsisreassoc)
1849 linearize_expr (stmt);
1850 binlhs = gimple_assign_rhs1 (stmt);
1851 binrhs = gimple_assign_rhs2 (stmt);
1854 gcc_assert (TREE_CODE (binrhs) != SSA_NAME
1855 || !is_reassociable_op (SSA_NAME_DEF_STMT (binrhs),
1856 rhscode, loop));
1857 linearize_expr_tree (ops, SSA_NAME_DEF_STMT (binlhs),
1858 is_associative, set_visited);
1859 add_to_ops_vec (ops, binrhs);
1862 /* Repropagate the negates back into subtracts, since no other pass
1863 currently does it. */
1865 static void
1866 repropagate_negates (void)
1868 unsigned int i = 0;
1869 tree negate;
1871 FOR_EACH_VEC_ELT (tree, plus_negates, i, negate)
1873 gimple user = get_single_immediate_use (negate);
1875 if (!user || !is_gimple_assign (user))
1876 continue;
1878 /* The negate operand can be either operand of a PLUS_EXPR
1879 (it can be the LHS if the RHS is a constant for example).
1881 Force the negate operand to the RHS of the PLUS_EXPR, then
1882 transform the PLUS_EXPR into a MINUS_EXPR. */
1883 if (gimple_assign_rhs_code (user) == PLUS_EXPR)
1885 /* If the negated operand appears on the LHS of the
1886 PLUS_EXPR, exchange the operands of the PLUS_EXPR
1887 to force the negated operand to the RHS of the PLUS_EXPR. */
1888 if (gimple_assign_rhs1 (user) == negate)
1890 swap_tree_operands (user,
1891 gimple_assign_rhs1_ptr (user),
1892 gimple_assign_rhs2_ptr (user));
1895 /* Now transform the PLUS_EXPR into a MINUS_EXPR and replace
1896 the RHS of the PLUS_EXPR with the operand of the NEGATE_EXPR. */
1897 if (gimple_assign_rhs2 (user) == negate)
1899 tree rhs1 = gimple_assign_rhs1 (user);
1900 tree rhs2 = get_unary_op (negate, NEGATE_EXPR);
1901 gimple_stmt_iterator gsi = gsi_for_stmt (user);
1902 gimple_assign_set_rhs_with_ops (&gsi, MINUS_EXPR, rhs1, rhs2);
1903 update_stmt (user);
1906 else if (gimple_assign_rhs_code (user) == MINUS_EXPR)
1908 if (gimple_assign_rhs1 (user) == negate)
1910 /* We have
1911 x = -a
1912 y = x - b
1913 which we transform into
1914 x = a + b
1915 y = -x .
1916 This pushes down the negate which we possibly can merge
1917 into some other operation, hence insert it into the
1918 plus_negates vector. */
1919 gimple feed = SSA_NAME_DEF_STMT (negate);
1920 tree a = gimple_assign_rhs1 (feed);
1921 tree rhs2 = gimple_assign_rhs2 (user);
1922 gimple_stmt_iterator gsi = gsi_for_stmt (feed), gsi2;
1923 gimple_replace_lhs (feed, negate);
1924 gimple_assign_set_rhs_with_ops (&gsi, PLUS_EXPR, a, rhs2);
1925 update_stmt (gsi_stmt (gsi));
1926 gsi2 = gsi_for_stmt (user);
1927 gimple_assign_set_rhs_with_ops (&gsi2, NEGATE_EXPR, negate, NULL);
1928 update_stmt (gsi_stmt (gsi2));
1929 gsi_move_before (&gsi, &gsi2);
1930 VEC_safe_push (tree, heap, plus_negates,
1931 gimple_assign_lhs (gsi_stmt (gsi2)));
1933 else
1935 /* Transform "x = -a; y = b - x" into "y = b + a", getting
1936 rid of one operation. */
1937 gimple feed = SSA_NAME_DEF_STMT (negate);
1938 tree a = gimple_assign_rhs1 (feed);
1939 tree rhs1 = gimple_assign_rhs1 (user);
1940 gimple_stmt_iterator gsi = gsi_for_stmt (user);
1941 gimple_assign_set_rhs_with_ops (&gsi, PLUS_EXPR, rhs1, a);
1942 update_stmt (gsi_stmt (gsi));
1948 /* Returns true if OP is of a type for which we can do reassociation.
1949 That is for integral or non-saturating fixed-point types, and for
1950 floating point type when associative-math is enabled. */
1952 static bool
1953 can_reassociate_p (tree op)
1955 tree type = TREE_TYPE (op);
1956 if ((INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
1957 || NON_SAT_FIXED_POINT_TYPE_P (type)
1958 || (flag_associative_math && FLOAT_TYPE_P (type)))
1959 return true;
1960 return false;
1963 /* Break up subtract operations in block BB.
1965 We do this top down because we don't know whether the subtract is
1966 part of a possible chain of reassociation except at the top.
1968 IE given
1969 d = f + g
1970 c = a + e
1971 b = c - d
1972 q = b - r
1973 k = t - q
1975 we want to break up k = t - q, but we won't until we've transformed q
1976 = b - r, which won't be broken up until we transform b = c - d.
1978 En passant, clear the GIMPLE visited flag on every statement. */
1980 static void
1981 break_up_subtract_bb (basic_block bb)
1983 gimple_stmt_iterator gsi;
1984 basic_block son;
1986 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1988 gimple stmt = gsi_stmt (gsi);
1989 gimple_set_visited (stmt, false);
1991 if (!is_gimple_assign (stmt)
1992 || !can_reassociate_p (gimple_assign_lhs (stmt)))
1993 continue;
1995 /* Look for simple gimple subtract operations. */
1996 if (gimple_assign_rhs_code (stmt) == MINUS_EXPR)
1998 if (!can_reassociate_p (gimple_assign_rhs1 (stmt))
1999 || !can_reassociate_p (gimple_assign_rhs2 (stmt)))
2000 continue;
2002 /* Check for a subtract used only in an addition. If this
2003 is the case, transform it into add of a negate for better
2004 reassociation. IE transform C = A-B into C = A + -B if C
2005 is only used in an addition. */
2006 if (should_break_up_subtract (stmt))
2007 break_up_subtract (stmt, &gsi);
2009 else if (gimple_assign_rhs_code (stmt) == NEGATE_EXPR
2010 && can_reassociate_p (gimple_assign_rhs1 (stmt)))
2011 VEC_safe_push (tree, heap, plus_negates, gimple_assign_lhs (stmt));
2013 for (son = first_dom_son (CDI_DOMINATORS, bb);
2014 son;
2015 son = next_dom_son (CDI_DOMINATORS, son))
2016 break_up_subtract_bb (son);
2019 /* Reassociate expressions in basic block BB and its post-dominator as
2020 children. */
2022 static void
2023 reassociate_bb (basic_block bb)
2025 gimple_stmt_iterator gsi;
2026 basic_block son;
2028 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
2030 gimple stmt = gsi_stmt (gsi);
2032 if (is_gimple_assign (stmt)
2033 && !stmt_could_throw_p (stmt))
2035 tree lhs, rhs1, rhs2;
2036 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
2038 /* If this is not a gimple binary expression, there is
2039 nothing for us to do with it. */
2040 if (get_gimple_rhs_class (rhs_code) != GIMPLE_BINARY_RHS)
2041 continue;
2043 /* If this was part of an already processed statement,
2044 we don't need to touch it again. */
2045 if (gimple_visited_p (stmt))
2047 /* This statement might have become dead because of previous
2048 reassociations. */
2049 if (has_zero_uses (gimple_get_lhs (stmt)))
2051 gsi_remove (&gsi, true);
2052 release_defs (stmt);
2053 /* We might end up removing the last stmt above which
2054 places the iterator to the end of the sequence.
2055 Reset it to the last stmt in this case which might
2056 be the end of the sequence as well if we removed
2057 the last statement of the sequence. In which case
2058 we need to bail out. */
2059 if (gsi_end_p (gsi))
2061 gsi = gsi_last_bb (bb);
2062 if (gsi_end_p (gsi))
2063 break;
2066 continue;
2069 lhs = gimple_assign_lhs (stmt);
2070 rhs1 = gimple_assign_rhs1 (stmt);
2071 rhs2 = gimple_assign_rhs2 (stmt);
2073 /* For non-bit or min/max operations we can't associate
2074 all types. Verify that here. */
2075 if (rhs_code != BIT_IOR_EXPR
2076 && rhs_code != BIT_AND_EXPR
2077 && rhs_code != BIT_XOR_EXPR
2078 && rhs_code != MIN_EXPR
2079 && rhs_code != MAX_EXPR
2080 && (!can_reassociate_p (lhs)
2081 || !can_reassociate_p (rhs1)
2082 || !can_reassociate_p (rhs2)))
2083 continue;
2085 if (associative_tree_code (rhs_code))
2087 VEC(operand_entry_t, heap) *ops = NULL;
2089 /* There may be no immediate uses left by the time we
2090 get here because we may have eliminated them all. */
2091 if (TREE_CODE (lhs) == SSA_NAME && has_zero_uses (lhs))
2092 continue;
2094 gimple_set_visited (stmt, true);
2095 linearize_expr_tree (&ops, stmt, true, true);
2096 VEC_qsort (operand_entry_t, ops, sort_by_operand_rank);
2097 optimize_ops_list (rhs_code, &ops);
2098 if (undistribute_ops_list (rhs_code, &ops,
2099 loop_containing_stmt (stmt)))
2101 VEC_qsort (operand_entry_t, ops, sort_by_operand_rank);
2102 optimize_ops_list (rhs_code, &ops);
2105 if (VEC_length (operand_entry_t, ops) == 1)
2107 if (dump_file && (dump_flags & TDF_DETAILS))
2109 fprintf (dump_file, "Transforming ");
2110 print_gimple_stmt (dump_file, stmt, 0, 0);
2113 rhs1 = gimple_assign_rhs1 (stmt);
2114 gimple_assign_set_rhs_from_tree (&gsi,
2115 VEC_last (operand_entry_t,
2116 ops)->op);
2117 update_stmt (stmt);
2118 remove_visited_stmt_chain (rhs1);
2120 if (dump_file && (dump_flags & TDF_DETAILS))
2122 fprintf (dump_file, " into ");
2123 print_gimple_stmt (dump_file, stmt, 0, 0);
2126 else
2127 rewrite_expr_tree (stmt, 0, ops, false);
2129 VEC_free (operand_entry_t, heap, ops);
2133 for (son = first_dom_son (CDI_POST_DOMINATORS, bb);
2134 son;
2135 son = next_dom_son (CDI_POST_DOMINATORS, son))
2136 reassociate_bb (son);
2139 void dump_ops_vector (FILE *file, VEC (operand_entry_t, heap) *ops);
2140 void debug_ops_vector (VEC (operand_entry_t, heap) *ops);
2142 /* Dump the operand entry vector OPS to FILE. */
2144 void
2145 dump_ops_vector (FILE *file, VEC (operand_entry_t, heap) *ops)
2147 operand_entry_t oe;
2148 unsigned int i;
2150 FOR_EACH_VEC_ELT (operand_entry_t, ops, i, oe)
2152 fprintf (file, "Op %d -> rank: %d, tree: ", i, oe->rank);
2153 print_generic_expr (file, oe->op, 0);
2157 /* Dump the operand entry vector OPS to STDERR. */
2159 DEBUG_FUNCTION void
2160 debug_ops_vector (VEC (operand_entry_t, heap) *ops)
2162 dump_ops_vector (stderr, ops);
2165 static void
2166 do_reassoc (void)
2168 break_up_subtract_bb (ENTRY_BLOCK_PTR);
2169 reassociate_bb (EXIT_BLOCK_PTR);
2172 /* Initialize the reassociation pass. */
2174 static void
2175 init_reassoc (void)
2177 int i;
2178 long rank = 2;
2179 tree param;
2180 int *bbs = XNEWVEC (int, last_basic_block + 1);
2182 /* Find the loops, so that we can prevent moving calculations in
2183 them. */
2184 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
2186 memset (&reassociate_stats, 0, sizeof (reassociate_stats));
2188 operand_entry_pool = create_alloc_pool ("operand entry pool",
2189 sizeof (struct operand_entry), 30);
2190 next_operand_entry_id = 0;
2192 /* Reverse RPO (Reverse Post Order) will give us something where
2193 deeper loops come later. */
2194 pre_and_rev_post_order_compute (NULL, bbs, false);
2195 bb_rank = XCNEWVEC (long, last_basic_block + 1);
2196 operand_rank = pointer_map_create ();
2198 /* Give each argument a distinct rank. */
2199 for (param = DECL_ARGUMENTS (current_function_decl);
2200 param;
2201 param = DECL_CHAIN (param))
2203 if (gimple_default_def (cfun, param) != NULL)
2205 tree def = gimple_default_def (cfun, param);
2206 insert_operand_rank (def, ++rank);
2210 /* Give the chain decl a distinct rank. */
2211 if (cfun->static_chain_decl != NULL)
2213 tree def = gimple_default_def (cfun, cfun->static_chain_decl);
2214 if (def != NULL)
2215 insert_operand_rank (def, ++rank);
2218 /* Set up rank for each BB */
2219 for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
2220 bb_rank[bbs[i]] = ++rank << 16;
2222 free (bbs);
2223 calculate_dominance_info (CDI_POST_DOMINATORS);
2224 plus_negates = NULL;
2227 /* Cleanup after the reassociation pass, and print stats if
2228 requested. */
2230 static void
2231 fini_reassoc (void)
2233 statistics_counter_event (cfun, "Linearized",
2234 reassociate_stats.linearized);
2235 statistics_counter_event (cfun, "Constants eliminated",
2236 reassociate_stats.constants_eliminated);
2237 statistics_counter_event (cfun, "Ops eliminated",
2238 reassociate_stats.ops_eliminated);
2239 statistics_counter_event (cfun, "Statements rewritten",
2240 reassociate_stats.rewritten);
2242 pointer_map_destroy (operand_rank);
2243 free_alloc_pool (operand_entry_pool);
2244 free (bb_rank);
2245 VEC_free (tree, heap, plus_negates);
2246 free_dominance_info (CDI_POST_DOMINATORS);
2247 loop_optimizer_finalize ();
2250 /* Gate and execute functions for Reassociation. */
2252 static unsigned int
2253 execute_reassoc (void)
2255 init_reassoc ();
2257 do_reassoc ();
2258 repropagate_negates ();
2260 fini_reassoc ();
2261 return 0;
2264 static bool
2265 gate_tree_ssa_reassoc (void)
2267 return flag_tree_reassoc != 0;
2270 struct gimple_opt_pass pass_reassoc =
2273 GIMPLE_PASS,
2274 "reassoc", /* name */
2275 gate_tree_ssa_reassoc, /* gate */
2276 execute_reassoc, /* execute */
2277 NULL, /* sub */
2278 NULL, /* next */
2279 0, /* static_pass_number */
2280 TV_TREE_REASSOC, /* tv_id */
2281 PROP_cfg | PROP_ssa, /* properties_required */
2282 0, /* properties_provided */
2283 0, /* properties_destroyed */
2284 0, /* todo_flags_start */
2285 TODO_dump_func | TODO_ggc_collect | TODO_verify_ssa /* todo_flags_finish */