Correct typos.
[official-gcc.git] / gcc / tree-ssa-reassoc.c
blob987ec650778993165f147557697ae75d41588fea
1 /* Reassociation for trees.
2 Copyright (C) 2005, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "basic-block.h"
28 #include "tree-pretty-print.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "gimple.h"
33 #include "tree-dump.h"
34 #include "timevar.h"
35 #include "tree-iterator.h"
36 #include "tree-pass.h"
37 #include "alloc-pool.h"
38 #include "vec.h"
39 #include "langhooks.h"
40 #include "pointer-set.h"
41 #include "cfgloop.h"
42 #include "flags.h"
44 /* This is a simple global reassociation pass. It is, in part, based
45 on the LLVM pass of the same name (They do some things more/less
46 than we do, in different orders, etc).
48 It consists of five steps:
50 1. Breaking up subtract operations into addition + negate, where
51 it would promote the reassociation of adds.
53 2. Left linearization of the expression trees, so that (A+B)+(C+D)
54 becomes (((A+B)+C)+D), which is easier for us to rewrite later.
55 During linearization, we place the operands of the binary
56 expressions into a vector of operand_entry_t
58 3. Optimization of the operand lists, eliminating things like a +
59 -a, a & a, etc.
61 4. Rewrite the expression trees we linearized and optimized so
62 they are in proper rank order.
64 5. Repropagate negates, as nothing else will clean it up ATM.
66 A bit of theory on #4, since nobody seems to write anything down
67 about why it makes sense to do it the way they do it:
69 We could do this much nicer theoretically, but don't (for reasons
70 explained after how to do it theoretically nice :P).
72 In order to promote the most redundancy elimination, you want
73 binary expressions whose operands are the same rank (or
74 preferably, the same value) exposed to the redundancy eliminator,
75 for possible elimination.
77 So the way to do this if we really cared, is to build the new op
78 tree from the leaves to the roots, merging as you go, and putting the
79 new op on the end of the worklist, until you are left with one
80 thing on the worklist.
82 IE if you have to rewrite the following set of operands (listed with
83 rank in parentheses), with opcode PLUS_EXPR:
85 a (1), b (1), c (1), d (2), e (2)
88 We start with our merge worklist empty, and the ops list with all of
89 those on it.
91 You want to first merge all leaves of the same rank, as much as
92 possible.
94 So first build a binary op of
96 mergetmp = a + b, and put "mergetmp" on the merge worklist.
98 Because there is no three operand form of PLUS_EXPR, c is not going to
99 be exposed to redundancy elimination as a rank 1 operand.
101 So you might as well throw it on the merge worklist (you could also
102 consider it to now be a rank two operand, and merge it with d and e,
103 but in this case, you then have evicted e from a binary op. So at
104 least in this situation, you can't win.)
106 Then build a binary op of d + e
107 mergetmp2 = d + e
109 and put mergetmp2 on the merge worklist.
111 so merge worklist = {mergetmp, c, mergetmp2}
113 Continue building binary ops of these operations until you have only
114 one operation left on the worklist.
116 So we have
118 build binary op
119 mergetmp3 = mergetmp + c
121 worklist = {mergetmp2, mergetmp3}
123 mergetmp4 = mergetmp2 + mergetmp3
125 worklist = {mergetmp4}
127 because we have one operation left, we can now just set the original
128 statement equal to the result of that operation.
130 This will at least expose a + b and d + e to redundancy elimination
131 as binary operations.
133 For extra points, you can reuse the old statements to build the
134 mergetmps, since you shouldn't run out.
136 So why don't we do this?
138 Because it's expensive, and rarely will help. Most trees we are
139 reassociating have 3 or less ops. If they have 2 ops, they already
140 will be written into a nice single binary op. If you have 3 ops, a
141 single simple check suffices to tell you whether the first two are of the
142 same rank. If so, you know to order it
144 mergetmp = op1 + op2
145 newstmt = mergetmp + op3
147 instead of
148 mergetmp = op2 + op3
149 newstmt = mergetmp + op1
151 If all three are of the same rank, you can't expose them all in a
152 single binary operator anyway, so the above is *still* the best you
153 can do.
155 Thus, this is what we do. When we have three ops left, we check to see
156 what order to put them in, and call it a day. As a nod to vector sum
157 reduction, we check if any of the ops are really a phi node that is a
158 destructive update for the associating op, and keep the destructive
159 update together for vector sum reduction recognition. */
162 /* Statistics */
163 static struct
165 int linearized;
166 int constants_eliminated;
167 int ops_eliminated;
168 int rewritten;
169 } reassociate_stats;
171 /* Operator, rank pair. */
172 typedef struct operand_entry
174 unsigned int rank;
175 int id;
176 tree op;
177 } *operand_entry_t;
179 static alloc_pool operand_entry_pool;
181 /* This is used to assign a unique ID to each struct operand_entry
182 so that qsort results are identical on different hosts. */
183 static int next_operand_entry_id;
185 /* Starting rank number for a given basic block, so that we can rank
186 operations using unmovable instructions in that BB based on the bb
187 depth. */
188 static long *bb_rank;
190 /* Operand->rank hashtable. */
191 static struct pointer_map_t *operand_rank;
194 /* Look up the operand rank structure for expression E. */
196 static inline long
197 find_operand_rank (tree e)
199 void **slot = pointer_map_contains (operand_rank, e);
200 return slot ? (long) (intptr_t) *slot : -1;
203 /* Insert {E,RANK} into the operand rank hashtable. */
205 static inline void
206 insert_operand_rank (tree e, long rank)
208 void **slot;
209 gcc_assert (rank > 0);
210 slot = pointer_map_insert (operand_rank, e);
211 gcc_assert (!*slot);
212 *slot = (void *) (intptr_t) rank;
215 /* Given an expression E, return the rank of the expression. */
217 static long
218 get_rank (tree e)
220 /* Constants have rank 0. */
221 if (is_gimple_min_invariant (e))
222 return 0;
224 /* SSA_NAME's have the rank of the expression they are the result
226 For globals and uninitialized values, the rank is 0.
227 For function arguments, use the pre-setup rank.
228 For PHI nodes, stores, asm statements, etc, we use the rank of
229 the BB.
230 For simple operations, the rank is the maximum rank of any of
231 its operands, or the bb_rank, whichever is less.
232 I make no claims that this is optimal, however, it gives good
233 results. */
235 if (TREE_CODE (e) == SSA_NAME)
237 gimple stmt;
238 long rank, maxrank;
239 int i, n;
241 if (TREE_CODE (SSA_NAME_VAR (e)) == PARM_DECL
242 && SSA_NAME_IS_DEFAULT_DEF (e))
243 return find_operand_rank (e);
245 stmt = SSA_NAME_DEF_STMT (e);
246 if (gimple_bb (stmt) == NULL)
247 return 0;
249 if (!is_gimple_assign (stmt)
250 || gimple_vdef (stmt))
251 return bb_rank[gimple_bb (stmt)->index];
253 /* If we already have a rank for this expression, use that. */
254 rank = find_operand_rank (e);
255 if (rank != -1)
256 return rank;
258 /* Otherwise, find the maximum rank for the operands, or the bb
259 rank, whichever is less. */
260 rank = 0;
261 maxrank = bb_rank[gimple_bb(stmt)->index];
262 if (gimple_assign_single_p (stmt))
264 tree rhs = gimple_assign_rhs1 (stmt);
265 n = TREE_OPERAND_LENGTH (rhs);
266 if (n == 0)
267 rank = MAX (rank, get_rank (rhs));
268 else
270 for (i = 0;
271 i < n && TREE_OPERAND (rhs, i) && rank != maxrank; i++)
272 rank = MAX(rank, get_rank (TREE_OPERAND (rhs, i)));
275 else
277 n = gimple_num_ops (stmt);
278 for (i = 1; i < n && rank != maxrank; i++)
280 gcc_assert (gimple_op (stmt, i));
281 rank = MAX(rank, get_rank (gimple_op (stmt, i)));
285 if (dump_file && (dump_flags & TDF_DETAILS))
287 fprintf (dump_file, "Rank for ");
288 print_generic_expr (dump_file, e, 0);
289 fprintf (dump_file, " is %ld\n", (rank + 1));
292 /* Note the rank in the hashtable so we don't recompute it. */
293 insert_operand_rank (e, (rank + 1));
294 return (rank + 1);
297 /* Globals, etc, are rank 0 */
298 return 0;
301 DEF_VEC_P(operand_entry_t);
302 DEF_VEC_ALLOC_P(operand_entry_t, heap);
304 /* We want integer ones to end up last no matter what, since they are
305 the ones we can do the most with. */
306 #define INTEGER_CONST_TYPE 1 << 3
307 #define FLOAT_CONST_TYPE 1 << 2
308 #define OTHER_CONST_TYPE 1 << 1
310 /* Classify an invariant tree into integer, float, or other, so that
311 we can sort them to be near other constants of the same type. */
312 static inline int
313 constant_type (tree t)
315 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
316 return INTEGER_CONST_TYPE;
317 else if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (t)))
318 return FLOAT_CONST_TYPE;
319 else
320 return OTHER_CONST_TYPE;
323 /* qsort comparison function to sort operand entries PA and PB by rank
324 so that the sorted array is ordered by rank in decreasing order. */
325 static int
326 sort_by_operand_rank (const void *pa, const void *pb)
328 const operand_entry_t oea = *(const operand_entry_t *)pa;
329 const operand_entry_t oeb = *(const operand_entry_t *)pb;
331 /* It's nicer for optimize_expression if constants that are likely
332 to fold when added/multiplied//whatever are put next to each
333 other. Since all constants have rank 0, order them by type. */
334 if (oeb->rank == 0 && oea->rank == 0)
336 if (constant_type (oeb->op) != constant_type (oea->op))
337 return constant_type (oeb->op) - constant_type (oea->op);
338 else
339 /* To make sorting result stable, we use unique IDs to determine
340 order. */
341 return oeb->id - oea->id;
344 /* Lastly, make sure the versions that are the same go next to each
345 other. We use SSA_NAME_VERSION because it's stable. */
346 if ((oeb->rank - oea->rank == 0)
347 && TREE_CODE (oea->op) == SSA_NAME
348 && TREE_CODE (oeb->op) == SSA_NAME)
350 if (SSA_NAME_VERSION (oeb->op) != SSA_NAME_VERSION (oea->op))
351 return SSA_NAME_VERSION (oeb->op) - SSA_NAME_VERSION (oea->op);
352 else
353 return oeb->id - oea->id;
356 if (oeb->rank != oea->rank)
357 return oeb->rank - oea->rank;
358 else
359 return oeb->id - oea->id;
362 /* Add an operand entry to *OPS for the tree operand OP. */
364 static void
365 add_to_ops_vec (VEC(operand_entry_t, heap) **ops, tree op)
367 operand_entry_t oe = (operand_entry_t) pool_alloc (operand_entry_pool);
369 oe->op = op;
370 oe->rank = get_rank (op);
371 oe->id = next_operand_entry_id++;
372 VEC_safe_push (operand_entry_t, heap, *ops, oe);
375 /* Return true if STMT is reassociable operation containing a binary
376 operation with tree code CODE, and is inside LOOP. */
378 static bool
379 is_reassociable_op (gimple stmt, enum tree_code code, struct loop *loop)
381 basic_block bb = gimple_bb (stmt);
383 if (gimple_bb (stmt) == NULL)
384 return false;
386 if (!flow_bb_inside_loop_p (loop, bb))
387 return false;
389 if (is_gimple_assign (stmt)
390 && gimple_assign_rhs_code (stmt) == code
391 && has_single_use (gimple_assign_lhs (stmt)))
392 return true;
394 return false;
398 /* Given NAME, if NAME is defined by a unary operation OPCODE, return the
399 operand of the negate operation. Otherwise, return NULL. */
401 static tree
402 get_unary_op (tree name, enum tree_code opcode)
404 gimple stmt = SSA_NAME_DEF_STMT (name);
406 if (!is_gimple_assign (stmt))
407 return NULL_TREE;
409 if (gimple_assign_rhs_code (stmt) == opcode)
410 return gimple_assign_rhs1 (stmt);
411 return NULL_TREE;
414 /* If CURR and LAST are a pair of ops that OPCODE allows us to
415 eliminate through equivalences, do so, remove them from OPS, and
416 return true. Otherwise, return false. */
418 static bool
419 eliminate_duplicate_pair (enum tree_code opcode,
420 VEC (operand_entry_t, heap) **ops,
421 bool *all_done,
422 unsigned int i,
423 operand_entry_t curr,
424 operand_entry_t last)
427 /* If we have two of the same op, and the opcode is & |, min, or max,
428 we can eliminate one of them.
429 If we have two of the same op, and the opcode is ^, we can
430 eliminate both of them. */
432 if (last && last->op == curr->op)
434 switch (opcode)
436 case MAX_EXPR:
437 case MIN_EXPR:
438 case BIT_IOR_EXPR:
439 case BIT_AND_EXPR:
440 if (dump_file && (dump_flags & TDF_DETAILS))
442 fprintf (dump_file, "Equivalence: ");
443 print_generic_expr (dump_file, curr->op, 0);
444 fprintf (dump_file, " [&|minmax] ");
445 print_generic_expr (dump_file, last->op, 0);
446 fprintf (dump_file, " -> ");
447 print_generic_stmt (dump_file, last->op, 0);
450 VEC_ordered_remove (operand_entry_t, *ops, i);
451 reassociate_stats.ops_eliminated ++;
453 return true;
455 case BIT_XOR_EXPR:
456 if (dump_file && (dump_flags & TDF_DETAILS))
458 fprintf (dump_file, "Equivalence: ");
459 print_generic_expr (dump_file, curr->op, 0);
460 fprintf (dump_file, " ^ ");
461 print_generic_expr (dump_file, last->op, 0);
462 fprintf (dump_file, " -> nothing\n");
465 reassociate_stats.ops_eliminated += 2;
467 if (VEC_length (operand_entry_t, *ops) == 2)
469 VEC_free (operand_entry_t, heap, *ops);
470 *ops = NULL;
471 add_to_ops_vec (ops, build_zero_cst (TREE_TYPE (last->op)));
472 *all_done = true;
474 else
476 VEC_ordered_remove (operand_entry_t, *ops, i-1);
477 VEC_ordered_remove (operand_entry_t, *ops, i-1);
480 return true;
482 default:
483 break;
486 return false;
489 static VEC(tree, heap) *plus_negates;
491 /* If OPCODE is PLUS_EXPR, CURR->OP is a negate expression or a bitwise not
492 expression, look in OPS for a corresponding positive operation to cancel
493 it out. If we find one, remove the other from OPS, replace
494 OPS[CURRINDEX] with 0 or -1, respectively, and return true. Otherwise,
495 return false. */
497 static bool
498 eliminate_plus_minus_pair (enum tree_code opcode,
499 VEC (operand_entry_t, heap) **ops,
500 unsigned int currindex,
501 operand_entry_t curr)
503 tree negateop;
504 tree notop;
505 unsigned int i;
506 operand_entry_t oe;
508 if (opcode != PLUS_EXPR || TREE_CODE (curr->op) != SSA_NAME)
509 return false;
511 negateop = get_unary_op (curr->op, NEGATE_EXPR);
512 notop = get_unary_op (curr->op, BIT_NOT_EXPR);
513 if (negateop == NULL_TREE && notop == NULL_TREE)
514 return false;
516 /* Any non-negated version will have a rank that is one less than
517 the current rank. So once we hit those ranks, if we don't find
518 one, we can stop. */
520 for (i = currindex + 1;
521 VEC_iterate (operand_entry_t, *ops, i, oe)
522 && oe->rank >= curr->rank - 1 ;
523 i++)
525 if (oe->op == negateop)
528 if (dump_file && (dump_flags & TDF_DETAILS))
530 fprintf (dump_file, "Equivalence: ");
531 print_generic_expr (dump_file, negateop, 0);
532 fprintf (dump_file, " + -");
533 print_generic_expr (dump_file, oe->op, 0);
534 fprintf (dump_file, " -> 0\n");
537 VEC_ordered_remove (operand_entry_t, *ops, i);
538 add_to_ops_vec (ops, build_zero_cst (TREE_TYPE (oe->op)));
539 VEC_ordered_remove (operand_entry_t, *ops, currindex);
540 reassociate_stats.ops_eliminated ++;
542 return true;
544 else if (oe->op == notop)
546 tree op_type = TREE_TYPE (oe->op);
548 if (dump_file && (dump_flags & TDF_DETAILS))
550 fprintf (dump_file, "Equivalence: ");
551 print_generic_expr (dump_file, notop, 0);
552 fprintf (dump_file, " + ~");
553 print_generic_expr (dump_file, oe->op, 0);
554 fprintf (dump_file, " -> -1\n");
557 VEC_ordered_remove (operand_entry_t, *ops, i);
558 add_to_ops_vec (ops, build_int_cst_type (op_type, -1));
559 VEC_ordered_remove (operand_entry_t, *ops, currindex);
560 reassociate_stats.ops_eliminated ++;
562 return true;
566 /* CURR->OP is a negate expr in a plus expr: save it for later
567 inspection in repropagate_negates(). */
568 if (negateop != NULL_TREE)
569 VEC_safe_push (tree, heap, plus_negates, curr->op);
571 return false;
574 /* If OPCODE is BIT_IOR_EXPR, BIT_AND_EXPR, and, CURR->OP is really a
575 bitwise not expression, look in OPS for a corresponding operand to
576 cancel it out. If we find one, remove the other from OPS, replace
577 OPS[CURRINDEX] with 0, and return true. Otherwise, return
578 false. */
580 static bool
581 eliminate_not_pairs (enum tree_code opcode,
582 VEC (operand_entry_t, heap) **ops,
583 unsigned int currindex,
584 operand_entry_t curr)
586 tree notop;
587 unsigned int i;
588 operand_entry_t oe;
590 if ((opcode != BIT_IOR_EXPR && opcode != BIT_AND_EXPR)
591 || TREE_CODE (curr->op) != SSA_NAME)
592 return false;
594 notop = get_unary_op (curr->op, BIT_NOT_EXPR);
595 if (notop == NULL_TREE)
596 return false;
598 /* Any non-not version will have a rank that is one less than
599 the current rank. So once we hit those ranks, if we don't find
600 one, we can stop. */
602 for (i = currindex + 1;
603 VEC_iterate (operand_entry_t, *ops, i, oe)
604 && oe->rank >= curr->rank - 1;
605 i++)
607 if (oe->op == notop)
609 if (dump_file && (dump_flags & TDF_DETAILS))
611 fprintf (dump_file, "Equivalence: ");
612 print_generic_expr (dump_file, notop, 0);
613 if (opcode == BIT_AND_EXPR)
614 fprintf (dump_file, " & ~");
615 else if (opcode == BIT_IOR_EXPR)
616 fprintf (dump_file, " | ~");
617 print_generic_expr (dump_file, oe->op, 0);
618 if (opcode == BIT_AND_EXPR)
619 fprintf (dump_file, " -> 0\n");
620 else if (opcode == BIT_IOR_EXPR)
621 fprintf (dump_file, " -> -1\n");
624 if (opcode == BIT_AND_EXPR)
625 oe->op = build_zero_cst (TREE_TYPE (oe->op));
626 else if (opcode == BIT_IOR_EXPR)
627 oe->op = build_low_bits_mask (TREE_TYPE (oe->op),
628 TYPE_PRECISION (TREE_TYPE (oe->op)));
630 reassociate_stats.ops_eliminated
631 += VEC_length (operand_entry_t, *ops) - 1;
632 VEC_free (operand_entry_t, heap, *ops);
633 *ops = NULL;
634 VEC_safe_push (operand_entry_t, heap, *ops, oe);
635 return true;
639 return false;
642 /* Use constant value that may be present in OPS to try to eliminate
643 operands. Note that this function is only really used when we've
644 eliminated ops for other reasons, or merged constants. Across
645 single statements, fold already does all of this, plus more. There
646 is little point in duplicating logic, so I've only included the
647 identities that I could ever construct testcases to trigger. */
649 static void
650 eliminate_using_constants (enum tree_code opcode,
651 VEC(operand_entry_t, heap) **ops)
653 operand_entry_t oelast = VEC_last (operand_entry_t, *ops);
654 tree type = TREE_TYPE (oelast->op);
656 if (oelast->rank == 0
657 && (INTEGRAL_TYPE_P (type) || FLOAT_TYPE_P (type)))
659 switch (opcode)
661 case BIT_AND_EXPR:
662 if (integer_zerop (oelast->op))
664 if (VEC_length (operand_entry_t, *ops) != 1)
666 if (dump_file && (dump_flags & TDF_DETAILS))
667 fprintf (dump_file, "Found & 0, removing all other ops\n");
669 reassociate_stats.ops_eliminated
670 += VEC_length (operand_entry_t, *ops) - 1;
672 VEC_free (operand_entry_t, heap, *ops);
673 *ops = NULL;
674 VEC_safe_push (operand_entry_t, heap, *ops, oelast);
675 return;
678 else if (integer_all_onesp (oelast->op))
680 if (VEC_length (operand_entry_t, *ops) != 1)
682 if (dump_file && (dump_flags & TDF_DETAILS))
683 fprintf (dump_file, "Found & -1, removing\n");
684 VEC_pop (operand_entry_t, *ops);
685 reassociate_stats.ops_eliminated++;
688 break;
689 case BIT_IOR_EXPR:
690 if (integer_all_onesp (oelast->op))
692 if (VEC_length (operand_entry_t, *ops) != 1)
694 if (dump_file && (dump_flags & TDF_DETAILS))
695 fprintf (dump_file, "Found | -1, removing all other ops\n");
697 reassociate_stats.ops_eliminated
698 += VEC_length (operand_entry_t, *ops) - 1;
700 VEC_free (operand_entry_t, heap, *ops);
701 *ops = NULL;
702 VEC_safe_push (operand_entry_t, heap, *ops, oelast);
703 return;
706 else if (integer_zerop (oelast->op))
708 if (VEC_length (operand_entry_t, *ops) != 1)
710 if (dump_file && (dump_flags & TDF_DETAILS))
711 fprintf (dump_file, "Found | 0, removing\n");
712 VEC_pop (operand_entry_t, *ops);
713 reassociate_stats.ops_eliminated++;
716 break;
717 case MULT_EXPR:
718 if (integer_zerop (oelast->op)
719 || (FLOAT_TYPE_P (type)
720 && !HONOR_NANS (TYPE_MODE (type))
721 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
722 && real_zerop (oelast->op)))
724 if (VEC_length (operand_entry_t, *ops) != 1)
726 if (dump_file && (dump_flags & TDF_DETAILS))
727 fprintf (dump_file, "Found * 0, removing all other ops\n");
729 reassociate_stats.ops_eliminated
730 += VEC_length (operand_entry_t, *ops) - 1;
731 VEC_free (operand_entry_t, heap, *ops);
732 *ops = NULL;
733 VEC_safe_push (operand_entry_t, heap, *ops, oelast);
734 return;
737 else if (integer_onep (oelast->op)
738 || (FLOAT_TYPE_P (type)
739 && !HONOR_SNANS (TYPE_MODE (type))
740 && real_onep (oelast->op)))
742 if (VEC_length (operand_entry_t, *ops) != 1)
744 if (dump_file && (dump_flags & TDF_DETAILS))
745 fprintf (dump_file, "Found * 1, removing\n");
746 VEC_pop (operand_entry_t, *ops);
747 reassociate_stats.ops_eliminated++;
748 return;
751 break;
752 case BIT_XOR_EXPR:
753 case PLUS_EXPR:
754 case MINUS_EXPR:
755 if (integer_zerop (oelast->op)
756 || (FLOAT_TYPE_P (type)
757 && (opcode == PLUS_EXPR || opcode == MINUS_EXPR)
758 && fold_real_zero_addition_p (type, oelast->op,
759 opcode == MINUS_EXPR)))
761 if (VEC_length (operand_entry_t, *ops) != 1)
763 if (dump_file && (dump_flags & TDF_DETAILS))
764 fprintf (dump_file, "Found [|^+] 0, removing\n");
765 VEC_pop (operand_entry_t, *ops);
766 reassociate_stats.ops_eliminated++;
767 return;
770 break;
771 default:
772 break;
778 static void linearize_expr_tree (VEC(operand_entry_t, heap) **, gimple,
779 bool, bool);
781 /* Structure for tracking and counting operands. */
782 typedef struct oecount_s {
783 int cnt;
784 int id;
785 enum tree_code oecode;
786 tree op;
787 } oecount;
789 DEF_VEC_O(oecount);
790 DEF_VEC_ALLOC_O(oecount,heap);
792 /* The heap for the oecount hashtable and the sorted list of operands. */
793 static VEC (oecount, heap) *cvec;
795 /* Hash function for oecount. */
797 static hashval_t
798 oecount_hash (const void *p)
800 const oecount *c = VEC_index (oecount, cvec, (size_t)p - 42);
801 return htab_hash_pointer (c->op) ^ (hashval_t)c->oecode;
804 /* Comparison function for oecount. */
806 static int
807 oecount_eq (const void *p1, const void *p2)
809 const oecount *c1 = VEC_index (oecount, cvec, (size_t)p1 - 42);
810 const oecount *c2 = VEC_index (oecount, cvec, (size_t)p2 - 42);
811 return (c1->oecode == c2->oecode
812 && c1->op == c2->op);
815 /* Comparison function for qsort sorting oecount elements by count. */
817 static int
818 oecount_cmp (const void *p1, const void *p2)
820 const oecount *c1 = (const oecount *)p1;
821 const oecount *c2 = (const oecount *)p2;
822 if (c1->cnt != c2->cnt)
823 return c1->cnt - c2->cnt;
824 else
825 /* If counts are identical, use unique IDs to stabilize qsort. */
826 return c1->id - c2->id;
829 /* Walks the linear chain with result *DEF searching for an operation
830 with operand OP and code OPCODE removing that from the chain. *DEF
831 is updated if there is only one operand but no operation left. */
833 static void
834 zero_one_operation (tree *def, enum tree_code opcode, tree op)
836 gimple stmt = SSA_NAME_DEF_STMT (*def);
840 tree name = gimple_assign_rhs1 (stmt);
842 /* If this is the operation we look for and one of the operands
843 is ours simply propagate the other operand into the stmts
844 single use. */
845 if (gimple_assign_rhs_code (stmt) == opcode
846 && (name == op
847 || gimple_assign_rhs2 (stmt) == op))
849 gimple use_stmt;
850 use_operand_p use;
851 gimple_stmt_iterator gsi;
852 if (name == op)
853 name = gimple_assign_rhs2 (stmt);
854 gcc_assert (has_single_use (gimple_assign_lhs (stmt)));
855 single_imm_use (gimple_assign_lhs (stmt), &use, &use_stmt);
856 if (gimple_assign_lhs (stmt) == *def)
857 *def = name;
858 SET_USE (use, name);
859 if (TREE_CODE (name) != SSA_NAME)
860 update_stmt (use_stmt);
861 gsi = gsi_for_stmt (stmt);
862 gsi_remove (&gsi, true);
863 release_defs (stmt);
864 return;
867 /* Continue walking the chain. */
868 gcc_assert (name != op
869 && TREE_CODE (name) == SSA_NAME);
870 stmt = SSA_NAME_DEF_STMT (name);
872 while (1);
875 /* Builds one statement performing OP1 OPCODE OP2 using TMPVAR for
876 the result. Places the statement after the definition of either
877 OP1 or OP2. Returns the new statement. */
879 static gimple
880 build_and_add_sum (tree tmpvar, tree op1, tree op2, enum tree_code opcode)
882 gimple op1def = NULL, op2def = NULL;
883 gimple_stmt_iterator gsi;
884 tree op;
885 gimple sum;
887 /* Create the addition statement. */
888 sum = gimple_build_assign_with_ops (opcode, tmpvar, op1, op2);
889 op = make_ssa_name (tmpvar, sum);
890 gimple_assign_set_lhs (sum, op);
892 /* Find an insertion place and insert. */
893 if (TREE_CODE (op1) == SSA_NAME)
894 op1def = SSA_NAME_DEF_STMT (op1);
895 if (TREE_CODE (op2) == SSA_NAME)
896 op2def = SSA_NAME_DEF_STMT (op2);
897 if ((!op1def || gimple_nop_p (op1def))
898 && (!op2def || gimple_nop_p (op2def)))
900 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
901 gsi_insert_before (&gsi, sum, GSI_NEW_STMT);
903 else if ((!op1def || gimple_nop_p (op1def))
904 || (op2def && !gimple_nop_p (op2def)
905 && stmt_dominates_stmt_p (op1def, op2def)))
907 if (gimple_code (op2def) == GIMPLE_PHI)
909 gsi = gsi_after_labels (gimple_bb (op2def));
910 gsi_insert_before (&gsi, sum, GSI_NEW_STMT);
912 else
914 if (!stmt_ends_bb_p (op2def))
916 gsi = gsi_for_stmt (op2def);
917 gsi_insert_after (&gsi, sum, GSI_NEW_STMT);
919 else
921 edge e;
922 edge_iterator ei;
924 FOR_EACH_EDGE (e, ei, gimple_bb (op2def)->succs)
925 if (e->flags & EDGE_FALLTHRU)
926 gsi_insert_on_edge_immediate (e, sum);
930 else
932 if (gimple_code (op1def) == GIMPLE_PHI)
934 gsi = gsi_after_labels (gimple_bb (op1def));
935 gsi_insert_before (&gsi, sum, GSI_NEW_STMT);
937 else
939 if (!stmt_ends_bb_p (op1def))
941 gsi = gsi_for_stmt (op1def);
942 gsi_insert_after (&gsi, sum, GSI_NEW_STMT);
944 else
946 edge e;
947 edge_iterator ei;
949 FOR_EACH_EDGE (e, ei, gimple_bb (op1def)->succs)
950 if (e->flags & EDGE_FALLTHRU)
951 gsi_insert_on_edge_immediate (e, sum);
955 update_stmt (sum);
957 return sum;
960 /* Perform un-distribution of divisions and multiplications.
961 A * X + B * X is transformed into (A + B) * X and A / X + B / X
962 to (A + B) / X for real X.
964 The algorithm is organized as follows.
966 - First we walk the addition chain *OPS looking for summands that
967 are defined by a multiplication or a real division. This results
968 in the candidates bitmap with relevant indices into *OPS.
970 - Second we build the chains of multiplications or divisions for
971 these candidates, counting the number of occurences of (operand, code)
972 pairs in all of the candidates chains.
974 - Third we sort the (operand, code) pairs by number of occurence and
975 process them starting with the pair with the most uses.
977 * For each such pair we walk the candidates again to build a
978 second candidate bitmap noting all multiplication/division chains
979 that have at least one occurence of (operand, code).
981 * We build an alternate addition chain only covering these
982 candidates with one (operand, code) operation removed from their
983 multiplication/division chain.
985 * The first candidate gets replaced by the alternate addition chain
986 multiplied/divided by the operand.
988 * All candidate chains get disabled for further processing and
989 processing of (operand, code) pairs continues.
991 The alternate addition chains built are re-processed by the main
992 reassociation algorithm which allows optimizing a * x * y + b * y * x
993 to (a + b ) * x * y in one invocation of the reassociation pass. */
995 static bool
996 undistribute_ops_list (enum tree_code opcode,
997 VEC (operand_entry_t, heap) **ops, struct loop *loop)
999 unsigned int length = VEC_length (operand_entry_t, *ops);
1000 operand_entry_t oe1;
1001 unsigned i, j;
1002 sbitmap candidates, candidates2;
1003 unsigned nr_candidates, nr_candidates2;
1004 sbitmap_iterator sbi0;
1005 VEC (operand_entry_t, heap) **subops;
1006 htab_t ctable;
1007 bool changed = false;
1008 int next_oecount_id = 0;
1010 if (length <= 1
1011 || opcode != PLUS_EXPR)
1012 return false;
1014 /* Build a list of candidates to process. */
1015 candidates = sbitmap_alloc (length);
1016 sbitmap_zero (candidates);
1017 nr_candidates = 0;
1018 FOR_EACH_VEC_ELT (operand_entry_t, *ops, i, oe1)
1020 enum tree_code dcode;
1021 gimple oe1def;
1023 if (TREE_CODE (oe1->op) != SSA_NAME)
1024 continue;
1025 oe1def = SSA_NAME_DEF_STMT (oe1->op);
1026 if (!is_gimple_assign (oe1def))
1027 continue;
1028 dcode = gimple_assign_rhs_code (oe1def);
1029 if ((dcode != MULT_EXPR
1030 && dcode != RDIV_EXPR)
1031 || !is_reassociable_op (oe1def, dcode, loop))
1032 continue;
1034 SET_BIT (candidates, i);
1035 nr_candidates++;
1038 if (nr_candidates < 2)
1040 sbitmap_free (candidates);
1041 return false;
1044 if (dump_file && (dump_flags & TDF_DETAILS))
1046 fprintf (dump_file, "searching for un-distribute opportunities ");
1047 print_generic_expr (dump_file,
1048 VEC_index (operand_entry_t, *ops,
1049 sbitmap_first_set_bit (candidates))->op, 0);
1050 fprintf (dump_file, " %d\n", nr_candidates);
1053 /* Build linearized sub-operand lists and the counting table. */
1054 cvec = NULL;
1055 ctable = htab_create (15, oecount_hash, oecount_eq, NULL);
1056 subops = XCNEWVEC (VEC (operand_entry_t, heap) *,
1057 VEC_length (operand_entry_t, *ops));
1058 EXECUTE_IF_SET_IN_SBITMAP (candidates, 0, i, sbi0)
1060 gimple oedef;
1061 enum tree_code oecode;
1062 unsigned j;
1064 oedef = SSA_NAME_DEF_STMT (VEC_index (operand_entry_t, *ops, i)->op);
1065 oecode = gimple_assign_rhs_code (oedef);
1066 linearize_expr_tree (&subops[i], oedef,
1067 associative_tree_code (oecode), false);
1069 FOR_EACH_VEC_ELT (operand_entry_t, subops[i], j, oe1)
1071 oecount c;
1072 void **slot;
1073 size_t idx;
1074 c.oecode = oecode;
1075 c.cnt = 1;
1076 c.id = next_oecount_id++;
1077 c.op = oe1->op;
1078 VEC_safe_push (oecount, heap, cvec, &c);
1079 idx = VEC_length (oecount, cvec) + 41;
1080 slot = htab_find_slot (ctable, (void *)idx, INSERT);
1081 if (!*slot)
1083 *slot = (void *)idx;
1085 else
1087 VEC_pop (oecount, cvec);
1088 VEC_index (oecount, cvec, (size_t)*slot - 42)->cnt++;
1092 htab_delete (ctable);
1094 /* Sort the counting table. */
1095 VEC_qsort (oecount, cvec, oecount_cmp);
1097 if (dump_file && (dump_flags & TDF_DETAILS))
1099 oecount *c;
1100 fprintf (dump_file, "Candidates:\n");
1101 FOR_EACH_VEC_ELT (oecount, cvec, j, c)
1103 fprintf (dump_file, " %u %s: ", c->cnt,
1104 c->oecode == MULT_EXPR
1105 ? "*" : c->oecode == RDIV_EXPR ? "/" : "?");
1106 print_generic_expr (dump_file, c->op, 0);
1107 fprintf (dump_file, "\n");
1111 /* Process the (operand, code) pairs in order of most occurence. */
1112 candidates2 = sbitmap_alloc (length);
1113 while (!VEC_empty (oecount, cvec))
1115 oecount *c = VEC_last (oecount, cvec);
1116 if (c->cnt < 2)
1117 break;
1119 /* Now collect the operands in the outer chain that contain
1120 the common operand in their inner chain. */
1121 sbitmap_zero (candidates2);
1122 nr_candidates2 = 0;
1123 EXECUTE_IF_SET_IN_SBITMAP (candidates, 0, i, sbi0)
1125 gimple oedef;
1126 enum tree_code oecode;
1127 unsigned j;
1128 tree op = VEC_index (operand_entry_t, *ops, i)->op;
1130 /* If we undistributed in this chain already this may be
1131 a constant. */
1132 if (TREE_CODE (op) != SSA_NAME)
1133 continue;
1135 oedef = SSA_NAME_DEF_STMT (op);
1136 oecode = gimple_assign_rhs_code (oedef);
1137 if (oecode != c->oecode)
1138 continue;
1140 FOR_EACH_VEC_ELT (operand_entry_t, subops[i], j, oe1)
1142 if (oe1->op == c->op)
1144 SET_BIT (candidates2, i);
1145 ++nr_candidates2;
1146 break;
1151 if (nr_candidates2 >= 2)
1153 operand_entry_t oe1, oe2;
1154 tree tmpvar;
1155 gimple prod;
1156 int first = sbitmap_first_set_bit (candidates2);
1158 /* Build the new addition chain. */
1159 oe1 = VEC_index (operand_entry_t, *ops, first);
1160 if (dump_file && (dump_flags & TDF_DETAILS))
1162 fprintf (dump_file, "Building (");
1163 print_generic_expr (dump_file, oe1->op, 0);
1165 tmpvar = create_tmp_reg (TREE_TYPE (oe1->op), NULL);
1166 add_referenced_var (tmpvar);
1167 zero_one_operation (&oe1->op, c->oecode, c->op);
1168 EXECUTE_IF_SET_IN_SBITMAP (candidates2, first+1, i, sbi0)
1170 gimple sum;
1171 oe2 = VEC_index (operand_entry_t, *ops, i);
1172 if (dump_file && (dump_flags & TDF_DETAILS))
1174 fprintf (dump_file, " + ");
1175 print_generic_expr (dump_file, oe2->op, 0);
1177 zero_one_operation (&oe2->op, c->oecode, c->op);
1178 sum = build_and_add_sum (tmpvar, oe1->op, oe2->op, opcode);
1179 oe2->op = build_zero_cst (TREE_TYPE (oe2->op));
1180 oe2->rank = 0;
1181 oe1->op = gimple_get_lhs (sum);
1184 /* Apply the multiplication/division. */
1185 prod = build_and_add_sum (tmpvar, oe1->op, c->op, c->oecode);
1186 if (dump_file && (dump_flags & TDF_DETAILS))
1188 fprintf (dump_file, ") %s ", c->oecode == MULT_EXPR ? "*" : "/");
1189 print_generic_expr (dump_file, c->op, 0);
1190 fprintf (dump_file, "\n");
1193 /* Record it in the addition chain and disable further
1194 undistribution with this op. */
1195 oe1->op = gimple_assign_lhs (prod);
1196 oe1->rank = get_rank (oe1->op);
1197 VEC_free (operand_entry_t, heap, subops[first]);
1199 changed = true;
1202 VEC_pop (oecount, cvec);
1205 for (i = 0; i < VEC_length (operand_entry_t, *ops); ++i)
1206 VEC_free (operand_entry_t, heap, subops[i]);
1207 free (subops);
1208 VEC_free (oecount, heap, cvec);
1209 sbitmap_free (candidates);
1210 sbitmap_free (candidates2);
1212 return changed;
1215 /* If OPCODE is BIT_IOR_EXPR or BIT_AND_EXPR and CURR is a comparison
1216 expression, examine the other OPS to see if any of them are comparisons
1217 of the same values, which we may be able to combine or eliminate.
1218 For example, we can rewrite (a < b) | (a == b) as (a <= b). */
1220 static bool
1221 eliminate_redundant_comparison (enum tree_code opcode,
1222 VEC (operand_entry_t, heap) **ops,
1223 unsigned int currindex,
1224 operand_entry_t curr)
1226 tree op1, op2;
1227 enum tree_code lcode, rcode;
1228 gimple def1, def2;
1229 int i;
1230 operand_entry_t oe;
1232 if (opcode != BIT_IOR_EXPR && opcode != BIT_AND_EXPR)
1233 return false;
1235 /* Check that CURR is a comparison. */
1236 if (TREE_CODE (curr->op) != SSA_NAME)
1237 return false;
1238 def1 = SSA_NAME_DEF_STMT (curr->op);
1239 if (!is_gimple_assign (def1))
1240 return false;
1241 lcode = gimple_assign_rhs_code (def1);
1242 if (TREE_CODE_CLASS (lcode) != tcc_comparison)
1243 return false;
1244 op1 = gimple_assign_rhs1 (def1);
1245 op2 = gimple_assign_rhs2 (def1);
1247 /* Now look for a similar comparison in the remaining OPS. */
1248 for (i = currindex + 1;
1249 VEC_iterate (operand_entry_t, *ops, i, oe);
1250 i++)
1252 tree t;
1254 if (TREE_CODE (oe->op) != SSA_NAME)
1255 continue;
1256 def2 = SSA_NAME_DEF_STMT (oe->op);
1257 if (!is_gimple_assign (def2))
1258 continue;
1259 rcode = gimple_assign_rhs_code (def2);
1260 if (TREE_CODE_CLASS (rcode) != tcc_comparison)
1261 continue;
1263 /* If we got here, we have a match. See if we can combine the
1264 two comparisons. */
1265 if (opcode == BIT_IOR_EXPR)
1266 t = maybe_fold_or_comparisons (lcode, op1, op2,
1267 rcode, gimple_assign_rhs1 (def2),
1268 gimple_assign_rhs2 (def2));
1269 else
1270 t = maybe_fold_and_comparisons (lcode, op1, op2,
1271 rcode, gimple_assign_rhs1 (def2),
1272 gimple_assign_rhs2 (def2));
1273 if (!t)
1274 continue;
1276 /* maybe_fold_and_comparisons and maybe_fold_or_comparisons
1277 always give us a boolean_type_node value back. If the original
1278 BIT_AND_EXPR or BIT_IOR_EXPR was of a wider integer type,
1279 we need to convert. */
1280 if (!useless_type_conversion_p (TREE_TYPE (curr->op), TREE_TYPE (t)))
1281 t = fold_convert (TREE_TYPE (curr->op), t);
1283 if (TREE_CODE (t) != INTEGER_CST
1284 && !operand_equal_p (t, curr->op, 0))
1286 enum tree_code subcode;
1287 tree newop1, newop2;
1288 if (!COMPARISON_CLASS_P (t))
1289 continue;
1290 extract_ops_from_tree (t, &subcode, &newop1, &newop2);
1291 STRIP_USELESS_TYPE_CONVERSION (newop1);
1292 STRIP_USELESS_TYPE_CONVERSION (newop2);
1293 if (!is_gimple_val (newop1) || !is_gimple_val (newop2))
1294 continue;
1297 if (dump_file && (dump_flags & TDF_DETAILS))
1299 fprintf (dump_file, "Equivalence: ");
1300 print_generic_expr (dump_file, curr->op, 0);
1301 fprintf (dump_file, " %s ", op_symbol_code (opcode));
1302 print_generic_expr (dump_file, oe->op, 0);
1303 fprintf (dump_file, " -> ");
1304 print_generic_expr (dump_file, t, 0);
1305 fprintf (dump_file, "\n");
1308 /* Now we can delete oe, as it has been subsumed by the new combined
1309 expression t. */
1310 VEC_ordered_remove (operand_entry_t, *ops, i);
1311 reassociate_stats.ops_eliminated ++;
1313 /* If t is the same as curr->op, we're done. Otherwise we must
1314 replace curr->op with t. Special case is if we got a constant
1315 back, in which case we add it to the end instead of in place of
1316 the current entry. */
1317 if (TREE_CODE (t) == INTEGER_CST)
1319 VEC_ordered_remove (operand_entry_t, *ops, currindex);
1320 add_to_ops_vec (ops, t);
1322 else if (!operand_equal_p (t, curr->op, 0))
1324 tree tmpvar;
1325 gimple sum;
1326 enum tree_code subcode;
1327 tree newop1;
1328 tree newop2;
1329 gcc_assert (COMPARISON_CLASS_P (t));
1330 tmpvar = create_tmp_var (TREE_TYPE (t), NULL);
1331 add_referenced_var (tmpvar);
1332 extract_ops_from_tree (t, &subcode, &newop1, &newop2);
1333 STRIP_USELESS_TYPE_CONVERSION (newop1);
1334 STRIP_USELESS_TYPE_CONVERSION (newop2);
1335 gcc_checking_assert (is_gimple_val (newop1)
1336 && is_gimple_val (newop2));
1337 sum = build_and_add_sum (tmpvar, newop1, newop2, subcode);
1338 curr->op = gimple_get_lhs (sum);
1340 return true;
1343 return false;
1346 /* Perform various identities and other optimizations on the list of
1347 operand entries, stored in OPS. The tree code for the binary
1348 operation between all the operands is OPCODE. */
1350 static void
1351 optimize_ops_list (enum tree_code opcode,
1352 VEC (operand_entry_t, heap) **ops)
1354 unsigned int length = VEC_length (operand_entry_t, *ops);
1355 unsigned int i;
1356 operand_entry_t oe;
1357 operand_entry_t oelast = NULL;
1358 bool iterate = false;
1360 if (length == 1)
1361 return;
1363 oelast = VEC_last (operand_entry_t, *ops);
1365 /* If the last two are constants, pop the constants off, merge them
1366 and try the next two. */
1367 if (oelast->rank == 0 && is_gimple_min_invariant (oelast->op))
1369 operand_entry_t oelm1 = VEC_index (operand_entry_t, *ops, length - 2);
1371 if (oelm1->rank == 0
1372 && is_gimple_min_invariant (oelm1->op)
1373 && useless_type_conversion_p (TREE_TYPE (oelm1->op),
1374 TREE_TYPE (oelast->op)))
1376 tree folded = fold_binary (opcode, TREE_TYPE (oelm1->op),
1377 oelm1->op, oelast->op);
1379 if (folded && is_gimple_min_invariant (folded))
1381 if (dump_file && (dump_flags & TDF_DETAILS))
1382 fprintf (dump_file, "Merging constants\n");
1384 VEC_pop (operand_entry_t, *ops);
1385 VEC_pop (operand_entry_t, *ops);
1387 add_to_ops_vec (ops, folded);
1388 reassociate_stats.constants_eliminated++;
1390 optimize_ops_list (opcode, ops);
1391 return;
1396 eliminate_using_constants (opcode, ops);
1397 oelast = NULL;
1399 for (i = 0; VEC_iterate (operand_entry_t, *ops, i, oe);)
1401 bool done = false;
1403 if (eliminate_not_pairs (opcode, ops, i, oe))
1404 return;
1405 if (eliminate_duplicate_pair (opcode, ops, &done, i, oe, oelast)
1406 || (!done && eliminate_plus_minus_pair (opcode, ops, i, oe))
1407 || (!done && eliminate_redundant_comparison (opcode, ops, i, oe)))
1409 if (done)
1410 return;
1411 iterate = true;
1412 oelast = NULL;
1413 continue;
1415 oelast = oe;
1416 i++;
1419 length = VEC_length (operand_entry_t, *ops);
1420 oelast = VEC_last (operand_entry_t, *ops);
1422 if (iterate)
1423 optimize_ops_list (opcode, ops);
1426 /* Return true if OPERAND is defined by a PHI node which uses the LHS
1427 of STMT in it's operands. This is also known as a "destructive
1428 update" operation. */
1430 static bool
1431 is_phi_for_stmt (gimple stmt, tree operand)
1433 gimple def_stmt;
1434 tree lhs;
1435 use_operand_p arg_p;
1436 ssa_op_iter i;
1438 if (TREE_CODE (operand) != SSA_NAME)
1439 return false;
1441 lhs = gimple_assign_lhs (stmt);
1443 def_stmt = SSA_NAME_DEF_STMT (operand);
1444 if (gimple_code (def_stmt) != GIMPLE_PHI)
1445 return false;
1447 FOR_EACH_PHI_ARG (arg_p, def_stmt, i, SSA_OP_USE)
1448 if (lhs == USE_FROM_PTR (arg_p))
1449 return true;
1450 return false;
1453 /* Remove def stmt of VAR if VAR has zero uses and recurse
1454 on rhs1 operand if so. */
1456 static void
1457 remove_visited_stmt_chain (tree var)
1459 gimple stmt;
1460 gimple_stmt_iterator gsi;
1462 while (1)
1464 if (TREE_CODE (var) != SSA_NAME || !has_zero_uses (var))
1465 return;
1466 stmt = SSA_NAME_DEF_STMT (var);
1467 if (!is_gimple_assign (stmt)
1468 || !gimple_visited_p (stmt))
1469 return;
1470 var = gimple_assign_rhs1 (stmt);
1471 gsi = gsi_for_stmt (stmt);
1472 gsi_remove (&gsi, true);
1473 release_defs (stmt);
1477 /* Recursively rewrite our linearized statements so that the operators
1478 match those in OPS[OPINDEX], putting the computation in rank
1479 order. */
1481 static void
1482 rewrite_expr_tree (gimple stmt, unsigned int opindex,
1483 VEC(operand_entry_t, heap) * ops, bool moved)
1485 tree rhs1 = gimple_assign_rhs1 (stmt);
1486 tree rhs2 = gimple_assign_rhs2 (stmt);
1487 operand_entry_t oe;
1489 /* If we have three operands left, then we want to make sure the one
1490 that gets the double binary op are the ones with the same rank.
1492 The alternative we try is to see if this is a destructive
1493 update style statement, which is like:
1494 b = phi (a, ...)
1495 a = c + b;
1496 In that case, we want to use the destructive update form to
1497 expose the possible vectorizer sum reduction opportunity.
1498 In that case, the third operand will be the phi node.
1500 We could, of course, try to be better as noted above, and do a
1501 lot of work to try to find these opportunities in >3 operand
1502 cases, but it is unlikely to be worth it. */
1503 if (opindex + 3 == VEC_length (operand_entry_t, ops))
1505 operand_entry_t oe1, oe2, oe3;
1507 oe1 = VEC_index (operand_entry_t, ops, opindex);
1508 oe2 = VEC_index (operand_entry_t, ops, opindex + 1);
1509 oe3 = VEC_index (operand_entry_t, ops, opindex + 2);
1511 if ((oe1->rank == oe2->rank
1512 && oe2->rank != oe3->rank)
1513 || (is_phi_for_stmt (stmt, oe3->op)
1514 && !is_phi_for_stmt (stmt, oe1->op)
1515 && !is_phi_for_stmt (stmt, oe2->op)))
1517 struct operand_entry temp = *oe3;
1518 oe3->op = oe1->op;
1519 oe3->rank = oe1->rank;
1520 oe1->op = temp.op;
1521 oe1->rank= temp.rank;
1523 else if ((oe1->rank == oe3->rank
1524 && oe2->rank != oe3->rank)
1525 || (is_phi_for_stmt (stmt, oe2->op)
1526 && !is_phi_for_stmt (stmt, oe1->op)
1527 && !is_phi_for_stmt (stmt, oe3->op)))
1529 struct operand_entry temp = *oe2;
1530 oe2->op = oe1->op;
1531 oe2->rank = oe1->rank;
1532 oe1->op = temp.op;
1533 oe1->rank= temp.rank;
1537 /* The final recursion case for this function is that you have
1538 exactly two operations left.
1539 If we had one exactly one op in the entire list to start with, we
1540 would have never called this function, and the tail recursion
1541 rewrites them one at a time. */
1542 if (opindex + 2 == VEC_length (operand_entry_t, ops))
1544 operand_entry_t oe1, oe2;
1546 oe1 = VEC_index (operand_entry_t, ops, opindex);
1547 oe2 = VEC_index (operand_entry_t, ops, opindex + 1);
1549 if (rhs1 != oe1->op || rhs2 != oe2->op)
1551 if (dump_file && (dump_flags & TDF_DETAILS))
1553 fprintf (dump_file, "Transforming ");
1554 print_gimple_stmt (dump_file, stmt, 0, 0);
1557 gimple_assign_set_rhs1 (stmt, oe1->op);
1558 gimple_assign_set_rhs2 (stmt, oe2->op);
1559 update_stmt (stmt);
1560 if (rhs1 != oe1->op && rhs1 != oe2->op)
1561 remove_visited_stmt_chain (rhs1);
1563 if (dump_file && (dump_flags & TDF_DETAILS))
1565 fprintf (dump_file, " into ");
1566 print_gimple_stmt (dump_file, stmt, 0, 0);
1570 return;
1573 /* If we hit here, we should have 3 or more ops left. */
1574 gcc_assert (opindex + 2 < VEC_length (operand_entry_t, ops));
1576 /* Rewrite the next operator. */
1577 oe = VEC_index (operand_entry_t, ops, opindex);
1579 if (oe->op != rhs2)
1581 if (!moved)
1583 gimple_stmt_iterator gsinow, gsirhs1;
1584 gimple stmt1 = stmt, stmt2;
1585 unsigned int count;
1587 gsinow = gsi_for_stmt (stmt);
1588 count = VEC_length (operand_entry_t, ops) - opindex - 2;
1589 while (count-- != 0)
1591 stmt2 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt1));
1592 gsirhs1 = gsi_for_stmt (stmt2);
1593 gsi_move_before (&gsirhs1, &gsinow);
1594 gsi_prev (&gsinow);
1595 stmt1 = stmt2;
1597 moved = true;
1600 if (dump_file && (dump_flags & TDF_DETAILS))
1602 fprintf (dump_file, "Transforming ");
1603 print_gimple_stmt (dump_file, stmt, 0, 0);
1606 gimple_assign_set_rhs2 (stmt, oe->op);
1607 update_stmt (stmt);
1609 if (dump_file && (dump_flags & TDF_DETAILS))
1611 fprintf (dump_file, " into ");
1612 print_gimple_stmt (dump_file, stmt, 0, 0);
1615 /* Recurse on the LHS of the binary operator, which is guaranteed to
1616 be the non-leaf side. */
1617 rewrite_expr_tree (SSA_NAME_DEF_STMT (rhs1), opindex + 1, ops, moved);
1620 /* Transform STMT, which is really (A +B) + (C + D) into the left
1621 linear form, ((A+B)+C)+D.
1622 Recurse on D if necessary. */
1624 static void
1625 linearize_expr (gimple stmt)
1627 gimple_stmt_iterator gsinow, gsirhs;
1628 gimple binlhs = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
1629 gimple binrhs = SSA_NAME_DEF_STMT (gimple_assign_rhs2 (stmt));
1630 enum tree_code rhscode = gimple_assign_rhs_code (stmt);
1631 gimple newbinrhs = NULL;
1632 struct loop *loop = loop_containing_stmt (stmt);
1634 gcc_assert (is_reassociable_op (binlhs, rhscode, loop)
1635 && is_reassociable_op (binrhs, rhscode, loop));
1637 gsinow = gsi_for_stmt (stmt);
1638 gsirhs = gsi_for_stmt (binrhs);
1639 gsi_move_before (&gsirhs, &gsinow);
1641 gimple_assign_set_rhs2 (stmt, gimple_assign_rhs1 (binrhs));
1642 gimple_assign_set_rhs1 (binrhs, gimple_assign_lhs (binlhs));
1643 gimple_assign_set_rhs1 (stmt, gimple_assign_lhs (binrhs));
1645 if (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME)
1646 newbinrhs = SSA_NAME_DEF_STMT (gimple_assign_rhs2 (stmt));
1648 if (dump_file && (dump_flags & TDF_DETAILS))
1650 fprintf (dump_file, "Linearized: ");
1651 print_gimple_stmt (dump_file, stmt, 0, 0);
1654 reassociate_stats.linearized++;
1655 update_stmt (binrhs);
1656 update_stmt (binlhs);
1657 update_stmt (stmt);
1659 gimple_set_visited (stmt, true);
1660 gimple_set_visited (binlhs, true);
1661 gimple_set_visited (binrhs, true);
1663 /* Tail recurse on the new rhs if it still needs reassociation. */
1664 if (newbinrhs && is_reassociable_op (newbinrhs, rhscode, loop))
1665 /* ??? This should probably be linearize_expr (newbinrhs) but I don't
1666 want to change the algorithm while converting to tuples. */
1667 linearize_expr (stmt);
1670 /* If LHS has a single immediate use that is a GIMPLE_ASSIGN statement, return
1671 it. Otherwise, return NULL. */
1673 static gimple
1674 get_single_immediate_use (tree lhs)
1676 use_operand_p immuse;
1677 gimple immusestmt;
1679 if (TREE_CODE (lhs) == SSA_NAME
1680 && single_imm_use (lhs, &immuse, &immusestmt)
1681 && is_gimple_assign (immusestmt))
1682 return immusestmt;
1684 return NULL;
1687 /* Recursively negate the value of TONEGATE, and return the SSA_NAME
1688 representing the negated value. Insertions of any necessary
1689 instructions go before GSI.
1690 This function is recursive in that, if you hand it "a_5" as the
1691 value to negate, and a_5 is defined by "a_5 = b_3 + b_4", it will
1692 transform b_3 + b_4 into a_5 = -b_3 + -b_4. */
1694 static tree
1695 negate_value (tree tonegate, gimple_stmt_iterator *gsi)
1697 gimple negatedefstmt= NULL;
1698 tree resultofnegate;
1700 /* If we are trying to negate a name, defined by an add, negate the
1701 add operands instead. */
1702 if (TREE_CODE (tonegate) == SSA_NAME)
1703 negatedefstmt = SSA_NAME_DEF_STMT (tonegate);
1704 if (TREE_CODE (tonegate) == SSA_NAME
1705 && is_gimple_assign (negatedefstmt)
1706 && TREE_CODE (gimple_assign_lhs (negatedefstmt)) == SSA_NAME
1707 && has_single_use (gimple_assign_lhs (negatedefstmt))
1708 && gimple_assign_rhs_code (negatedefstmt) == PLUS_EXPR)
1710 gimple_stmt_iterator gsi;
1711 tree rhs1 = gimple_assign_rhs1 (negatedefstmt);
1712 tree rhs2 = gimple_assign_rhs2 (negatedefstmt);
1714 gsi = gsi_for_stmt (negatedefstmt);
1715 rhs1 = negate_value (rhs1, &gsi);
1716 gimple_assign_set_rhs1 (negatedefstmt, rhs1);
1718 gsi = gsi_for_stmt (negatedefstmt);
1719 rhs2 = negate_value (rhs2, &gsi);
1720 gimple_assign_set_rhs2 (negatedefstmt, rhs2);
1722 update_stmt (negatedefstmt);
1723 return gimple_assign_lhs (negatedefstmt);
1726 tonegate = fold_build1 (NEGATE_EXPR, TREE_TYPE (tonegate), tonegate);
1727 resultofnegate = force_gimple_operand_gsi (gsi, tonegate, true,
1728 NULL_TREE, true, GSI_SAME_STMT);
1729 return resultofnegate;
1732 /* Return true if we should break up the subtract in STMT into an add
1733 with negate. This is true when we the subtract operands are really
1734 adds, or the subtract itself is used in an add expression. In
1735 either case, breaking up the subtract into an add with negate
1736 exposes the adds to reassociation. */
1738 static bool
1739 should_break_up_subtract (gimple stmt)
1741 tree lhs = gimple_assign_lhs (stmt);
1742 tree binlhs = gimple_assign_rhs1 (stmt);
1743 tree binrhs = gimple_assign_rhs2 (stmt);
1744 gimple immusestmt;
1745 struct loop *loop = loop_containing_stmt (stmt);
1747 if (TREE_CODE (binlhs) == SSA_NAME
1748 && is_reassociable_op (SSA_NAME_DEF_STMT (binlhs), PLUS_EXPR, loop))
1749 return true;
1751 if (TREE_CODE (binrhs) == SSA_NAME
1752 && is_reassociable_op (SSA_NAME_DEF_STMT (binrhs), PLUS_EXPR, loop))
1753 return true;
1755 if (TREE_CODE (lhs) == SSA_NAME
1756 && (immusestmt = get_single_immediate_use (lhs))
1757 && is_gimple_assign (immusestmt)
1758 && (gimple_assign_rhs_code (immusestmt) == PLUS_EXPR
1759 || gimple_assign_rhs_code (immusestmt) == MULT_EXPR))
1760 return true;
1761 return false;
1764 /* Transform STMT from A - B into A + -B. */
1766 static void
1767 break_up_subtract (gimple stmt, gimple_stmt_iterator *gsip)
1769 tree rhs1 = gimple_assign_rhs1 (stmt);
1770 tree rhs2 = gimple_assign_rhs2 (stmt);
1772 if (dump_file && (dump_flags & TDF_DETAILS))
1774 fprintf (dump_file, "Breaking up subtract ");
1775 print_gimple_stmt (dump_file, stmt, 0, 0);
1778 rhs2 = negate_value (rhs2, gsip);
1779 gimple_assign_set_rhs_with_ops (gsip, PLUS_EXPR, rhs1, rhs2);
1780 update_stmt (stmt);
1783 /* Recursively linearize a binary expression that is the RHS of STMT.
1784 Place the operands of the expression tree in the vector named OPS. */
1786 static void
1787 linearize_expr_tree (VEC(operand_entry_t, heap) **ops, gimple stmt,
1788 bool is_associative, bool set_visited)
1790 tree binlhs = gimple_assign_rhs1 (stmt);
1791 tree binrhs = gimple_assign_rhs2 (stmt);
1792 gimple binlhsdef, binrhsdef;
1793 bool binlhsisreassoc = false;
1794 bool binrhsisreassoc = false;
1795 enum tree_code rhscode = gimple_assign_rhs_code (stmt);
1796 struct loop *loop = loop_containing_stmt (stmt);
1798 if (set_visited)
1799 gimple_set_visited (stmt, true);
1801 if (TREE_CODE (binlhs) == SSA_NAME)
1803 binlhsdef = SSA_NAME_DEF_STMT (binlhs);
1804 binlhsisreassoc = (is_reassociable_op (binlhsdef, rhscode, loop)
1805 && !stmt_could_throw_p (binlhsdef));
1808 if (TREE_CODE (binrhs) == SSA_NAME)
1810 binrhsdef = SSA_NAME_DEF_STMT (binrhs);
1811 binrhsisreassoc = (is_reassociable_op (binrhsdef, rhscode, loop)
1812 && !stmt_could_throw_p (binrhsdef));
1815 /* If the LHS is not reassociable, but the RHS is, we need to swap
1816 them. If neither is reassociable, there is nothing we can do, so
1817 just put them in the ops vector. If the LHS is reassociable,
1818 linearize it. If both are reassociable, then linearize the RHS
1819 and the LHS. */
1821 if (!binlhsisreassoc)
1823 tree temp;
1825 /* If this is not a associative operation like division, give up. */
1826 if (!is_associative)
1828 add_to_ops_vec (ops, binrhs);
1829 return;
1832 if (!binrhsisreassoc)
1834 add_to_ops_vec (ops, binrhs);
1835 add_to_ops_vec (ops, binlhs);
1836 return;
1839 if (dump_file && (dump_flags & TDF_DETAILS))
1841 fprintf (dump_file, "swapping operands of ");
1842 print_gimple_stmt (dump_file, stmt, 0, 0);
1845 swap_tree_operands (stmt,
1846 gimple_assign_rhs1_ptr (stmt),
1847 gimple_assign_rhs2_ptr (stmt));
1848 update_stmt (stmt);
1850 if (dump_file && (dump_flags & TDF_DETAILS))
1852 fprintf (dump_file, " is now ");
1853 print_gimple_stmt (dump_file, stmt, 0, 0);
1856 /* We want to make it so the lhs is always the reassociative op,
1857 so swap. */
1858 temp = binlhs;
1859 binlhs = binrhs;
1860 binrhs = temp;
1862 else if (binrhsisreassoc)
1864 linearize_expr (stmt);
1865 binlhs = gimple_assign_rhs1 (stmt);
1866 binrhs = gimple_assign_rhs2 (stmt);
1869 gcc_assert (TREE_CODE (binrhs) != SSA_NAME
1870 || !is_reassociable_op (SSA_NAME_DEF_STMT (binrhs),
1871 rhscode, loop));
1872 linearize_expr_tree (ops, SSA_NAME_DEF_STMT (binlhs),
1873 is_associative, set_visited);
1874 add_to_ops_vec (ops, binrhs);
1877 /* Repropagate the negates back into subtracts, since no other pass
1878 currently does it. */
1880 static void
1881 repropagate_negates (void)
1883 unsigned int i = 0;
1884 tree negate;
1886 FOR_EACH_VEC_ELT (tree, plus_negates, i, negate)
1888 gimple user = get_single_immediate_use (negate);
1890 if (!user || !is_gimple_assign (user))
1891 continue;
1893 /* The negate operand can be either operand of a PLUS_EXPR
1894 (it can be the LHS if the RHS is a constant for example).
1896 Force the negate operand to the RHS of the PLUS_EXPR, then
1897 transform the PLUS_EXPR into a MINUS_EXPR. */
1898 if (gimple_assign_rhs_code (user) == PLUS_EXPR)
1900 /* If the negated operand appears on the LHS of the
1901 PLUS_EXPR, exchange the operands of the PLUS_EXPR
1902 to force the negated operand to the RHS of the PLUS_EXPR. */
1903 if (gimple_assign_rhs1 (user) == negate)
1905 swap_tree_operands (user,
1906 gimple_assign_rhs1_ptr (user),
1907 gimple_assign_rhs2_ptr (user));
1910 /* Now transform the PLUS_EXPR into a MINUS_EXPR and replace
1911 the RHS of the PLUS_EXPR with the operand of the NEGATE_EXPR. */
1912 if (gimple_assign_rhs2 (user) == negate)
1914 tree rhs1 = gimple_assign_rhs1 (user);
1915 tree rhs2 = get_unary_op (negate, NEGATE_EXPR);
1916 gimple_stmt_iterator gsi = gsi_for_stmt (user);
1917 gimple_assign_set_rhs_with_ops (&gsi, MINUS_EXPR, rhs1, rhs2);
1918 update_stmt (user);
1921 else if (gimple_assign_rhs_code (user) == MINUS_EXPR)
1923 if (gimple_assign_rhs1 (user) == negate)
1925 /* We have
1926 x = -a
1927 y = x - b
1928 which we transform into
1929 x = a + b
1930 y = -x .
1931 This pushes down the negate which we possibly can merge
1932 into some other operation, hence insert it into the
1933 plus_negates vector. */
1934 gimple feed = SSA_NAME_DEF_STMT (negate);
1935 tree a = gimple_assign_rhs1 (feed);
1936 tree rhs2 = gimple_assign_rhs2 (user);
1937 gimple_stmt_iterator gsi = gsi_for_stmt (feed), gsi2;
1938 gimple_replace_lhs (feed, negate);
1939 gimple_assign_set_rhs_with_ops (&gsi, PLUS_EXPR, a, rhs2);
1940 update_stmt (gsi_stmt (gsi));
1941 gsi2 = gsi_for_stmt (user);
1942 gimple_assign_set_rhs_with_ops (&gsi2, NEGATE_EXPR, negate, NULL);
1943 update_stmt (gsi_stmt (gsi2));
1944 gsi_move_before (&gsi, &gsi2);
1945 VEC_safe_push (tree, heap, plus_negates,
1946 gimple_assign_lhs (gsi_stmt (gsi2)));
1948 else
1950 /* Transform "x = -a; y = b - x" into "y = b + a", getting
1951 rid of one operation. */
1952 gimple feed = SSA_NAME_DEF_STMT (negate);
1953 tree a = gimple_assign_rhs1 (feed);
1954 tree rhs1 = gimple_assign_rhs1 (user);
1955 gimple_stmt_iterator gsi = gsi_for_stmt (user);
1956 gimple_assign_set_rhs_with_ops (&gsi, PLUS_EXPR, rhs1, a);
1957 update_stmt (gsi_stmt (gsi));
1963 /* Returns true if OP is of a type for which we can do reassociation.
1964 That is for integral or non-saturating fixed-point types, and for
1965 floating point type when associative-math is enabled. */
1967 static bool
1968 can_reassociate_p (tree op)
1970 tree type = TREE_TYPE (op);
1971 if ((INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
1972 || NON_SAT_FIXED_POINT_TYPE_P (type)
1973 || (flag_associative_math && FLOAT_TYPE_P (type)))
1974 return true;
1975 return false;
1978 /* Break up subtract operations in block BB.
1980 We do this top down because we don't know whether the subtract is
1981 part of a possible chain of reassociation except at the top.
1983 IE given
1984 d = f + g
1985 c = a + e
1986 b = c - d
1987 q = b - r
1988 k = t - q
1990 we want to break up k = t - q, but we won't until we've transformed q
1991 = b - r, which won't be broken up until we transform b = c - d.
1993 En passant, clear the GIMPLE visited flag on every statement. */
1995 static void
1996 break_up_subtract_bb (basic_block bb)
1998 gimple_stmt_iterator gsi;
1999 basic_block son;
2001 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2003 gimple stmt = gsi_stmt (gsi);
2004 gimple_set_visited (stmt, false);
2006 if (!is_gimple_assign (stmt)
2007 || !can_reassociate_p (gimple_assign_lhs (stmt)))
2008 continue;
2010 /* Look for simple gimple subtract operations. */
2011 if (gimple_assign_rhs_code (stmt) == MINUS_EXPR)
2013 if (!can_reassociate_p (gimple_assign_rhs1 (stmt))
2014 || !can_reassociate_p (gimple_assign_rhs2 (stmt)))
2015 continue;
2017 /* Check for a subtract used only in an addition. If this
2018 is the case, transform it into add of a negate for better
2019 reassociation. IE transform C = A-B into C = A + -B if C
2020 is only used in an addition. */
2021 if (should_break_up_subtract (stmt))
2022 break_up_subtract (stmt, &gsi);
2024 else if (gimple_assign_rhs_code (stmt) == NEGATE_EXPR
2025 && can_reassociate_p (gimple_assign_rhs1 (stmt)))
2026 VEC_safe_push (tree, heap, plus_negates, gimple_assign_lhs (stmt));
2028 for (son = first_dom_son (CDI_DOMINATORS, bb);
2029 son;
2030 son = next_dom_son (CDI_DOMINATORS, son))
2031 break_up_subtract_bb (son);
2034 /* Reassociate expressions in basic block BB and its post-dominator as
2035 children. */
2037 static void
2038 reassociate_bb (basic_block bb)
2040 gimple_stmt_iterator gsi;
2041 basic_block son;
2043 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
2045 gimple stmt = gsi_stmt (gsi);
2047 if (is_gimple_assign (stmt)
2048 && !stmt_could_throw_p (stmt))
2050 tree lhs, rhs1, rhs2;
2051 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
2053 /* If this is not a gimple binary expression, there is
2054 nothing for us to do with it. */
2055 if (get_gimple_rhs_class (rhs_code) != GIMPLE_BINARY_RHS)
2056 continue;
2058 /* If this was part of an already processed statement,
2059 we don't need to touch it again. */
2060 if (gimple_visited_p (stmt))
2062 /* This statement might have become dead because of previous
2063 reassociations. */
2064 if (has_zero_uses (gimple_get_lhs (stmt)))
2066 gsi_remove (&gsi, true);
2067 release_defs (stmt);
2068 /* We might end up removing the last stmt above which
2069 places the iterator to the end of the sequence.
2070 Reset it to the last stmt in this case which might
2071 be the end of the sequence as well if we removed
2072 the last statement of the sequence. In which case
2073 we need to bail out. */
2074 if (gsi_end_p (gsi))
2076 gsi = gsi_last_bb (bb);
2077 if (gsi_end_p (gsi))
2078 break;
2081 continue;
2084 lhs = gimple_assign_lhs (stmt);
2085 rhs1 = gimple_assign_rhs1 (stmt);
2086 rhs2 = gimple_assign_rhs2 (stmt);
2088 /* For non-bit or min/max operations we can't associate
2089 all types. Verify that here. */
2090 if (rhs_code != BIT_IOR_EXPR
2091 && rhs_code != BIT_AND_EXPR
2092 && rhs_code != BIT_XOR_EXPR
2093 && rhs_code != MIN_EXPR
2094 && rhs_code != MAX_EXPR
2095 && (!can_reassociate_p (lhs)
2096 || !can_reassociate_p (rhs1)
2097 || !can_reassociate_p (rhs2)))
2098 continue;
2100 if (associative_tree_code (rhs_code))
2102 VEC(operand_entry_t, heap) *ops = NULL;
2104 /* There may be no immediate uses left by the time we
2105 get here because we may have eliminated them all. */
2106 if (TREE_CODE (lhs) == SSA_NAME && has_zero_uses (lhs))
2107 continue;
2109 gimple_set_visited (stmt, true);
2110 linearize_expr_tree (&ops, stmt, true, true);
2111 VEC_qsort (operand_entry_t, ops, sort_by_operand_rank);
2112 optimize_ops_list (rhs_code, &ops);
2113 if (undistribute_ops_list (rhs_code, &ops,
2114 loop_containing_stmt (stmt)))
2116 VEC_qsort (operand_entry_t, ops, sort_by_operand_rank);
2117 optimize_ops_list (rhs_code, &ops);
2120 if (VEC_length (operand_entry_t, ops) == 1)
2122 if (dump_file && (dump_flags & TDF_DETAILS))
2124 fprintf (dump_file, "Transforming ");
2125 print_gimple_stmt (dump_file, stmt, 0, 0);
2128 rhs1 = gimple_assign_rhs1 (stmt);
2129 gimple_assign_set_rhs_from_tree (&gsi,
2130 VEC_last (operand_entry_t,
2131 ops)->op);
2132 update_stmt (stmt);
2133 remove_visited_stmt_chain (rhs1);
2135 if (dump_file && (dump_flags & TDF_DETAILS))
2137 fprintf (dump_file, " into ");
2138 print_gimple_stmt (dump_file, stmt, 0, 0);
2141 else
2142 rewrite_expr_tree (stmt, 0, ops, false);
2144 VEC_free (operand_entry_t, heap, ops);
2148 for (son = first_dom_son (CDI_POST_DOMINATORS, bb);
2149 son;
2150 son = next_dom_son (CDI_POST_DOMINATORS, son))
2151 reassociate_bb (son);
2154 void dump_ops_vector (FILE *file, VEC (operand_entry_t, heap) *ops);
2155 void debug_ops_vector (VEC (operand_entry_t, heap) *ops);
2157 /* Dump the operand entry vector OPS to FILE. */
2159 void
2160 dump_ops_vector (FILE *file, VEC (operand_entry_t, heap) *ops)
2162 operand_entry_t oe;
2163 unsigned int i;
2165 FOR_EACH_VEC_ELT (operand_entry_t, ops, i, oe)
2167 fprintf (file, "Op %d -> rank: %d, tree: ", i, oe->rank);
2168 print_generic_expr (file, oe->op, 0);
2172 /* Dump the operand entry vector OPS to STDERR. */
2174 DEBUG_FUNCTION void
2175 debug_ops_vector (VEC (operand_entry_t, heap) *ops)
2177 dump_ops_vector (stderr, ops);
2180 static void
2181 do_reassoc (void)
2183 break_up_subtract_bb (ENTRY_BLOCK_PTR);
2184 reassociate_bb (EXIT_BLOCK_PTR);
2187 /* Initialize the reassociation pass. */
2189 static void
2190 init_reassoc (void)
2192 int i;
2193 long rank = 2;
2194 tree param;
2195 int *bbs = XNEWVEC (int, last_basic_block + 1);
2197 /* Find the loops, so that we can prevent moving calculations in
2198 them. */
2199 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
2201 memset (&reassociate_stats, 0, sizeof (reassociate_stats));
2203 operand_entry_pool = create_alloc_pool ("operand entry pool",
2204 sizeof (struct operand_entry), 30);
2205 next_operand_entry_id = 0;
2207 /* Reverse RPO (Reverse Post Order) will give us something where
2208 deeper loops come later. */
2209 pre_and_rev_post_order_compute (NULL, bbs, false);
2210 bb_rank = XCNEWVEC (long, last_basic_block + 1);
2211 operand_rank = pointer_map_create ();
2213 /* Give each argument a distinct rank. */
2214 for (param = DECL_ARGUMENTS (current_function_decl);
2215 param;
2216 param = DECL_CHAIN (param))
2218 if (gimple_default_def (cfun, param) != NULL)
2220 tree def = gimple_default_def (cfun, param);
2221 insert_operand_rank (def, ++rank);
2225 /* Give the chain decl a distinct rank. */
2226 if (cfun->static_chain_decl != NULL)
2228 tree def = gimple_default_def (cfun, cfun->static_chain_decl);
2229 if (def != NULL)
2230 insert_operand_rank (def, ++rank);
2233 /* Set up rank for each BB */
2234 for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
2235 bb_rank[bbs[i]] = ++rank << 16;
2237 free (bbs);
2238 calculate_dominance_info (CDI_POST_DOMINATORS);
2239 plus_negates = NULL;
2242 /* Cleanup after the reassociation pass, and print stats if
2243 requested. */
2245 static void
2246 fini_reassoc (void)
2248 statistics_counter_event (cfun, "Linearized",
2249 reassociate_stats.linearized);
2250 statistics_counter_event (cfun, "Constants eliminated",
2251 reassociate_stats.constants_eliminated);
2252 statistics_counter_event (cfun, "Ops eliminated",
2253 reassociate_stats.ops_eliminated);
2254 statistics_counter_event (cfun, "Statements rewritten",
2255 reassociate_stats.rewritten);
2257 pointer_map_destroy (operand_rank);
2258 free_alloc_pool (operand_entry_pool);
2259 free (bb_rank);
2260 VEC_free (tree, heap, plus_negates);
2261 free_dominance_info (CDI_POST_DOMINATORS);
2262 loop_optimizer_finalize ();
2265 /* Gate and execute functions for Reassociation. */
2267 static unsigned int
2268 execute_reassoc (void)
2270 init_reassoc ();
2272 do_reassoc ();
2273 repropagate_negates ();
2275 fini_reassoc ();
2276 return 0;
2279 static bool
2280 gate_tree_ssa_reassoc (void)
2282 return flag_tree_reassoc != 0;
2285 struct gimple_opt_pass pass_reassoc =
2288 GIMPLE_PASS,
2289 "reassoc", /* name */
2290 gate_tree_ssa_reassoc, /* gate */
2291 execute_reassoc, /* execute */
2292 NULL, /* sub */
2293 NULL, /* next */
2294 0, /* static_pass_number */
2295 TV_TREE_REASSOC, /* tv_id */
2296 PROP_cfg | PROP_ssa, /* properties_required */
2297 0, /* properties_provided */
2298 0, /* properties_destroyed */
2299 0, /* todo_flags_start */
2300 TODO_verify_ssa
2301 | TODO_verify_flow
2302 | TODO_dump_func
2303 | TODO_ggc_collect /* todo_flags_finish */