1 /* Reassociation for trees.
2 Copyright (C) 2005, 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
26 #include "basic-block.h"
27 #include "tree-pretty-print.h"
28 #include "gimple-pretty-print.h"
29 #include "tree-inline.h"
30 #include "tree-flow.h"
32 #include "tree-dump.h"
34 #include "tree-iterator.h"
35 #include "tree-pass.h"
36 #include "alloc-pool.h"
38 #include "langhooks.h"
39 #include "pointer-set.h"
43 /* This is a simple global reassociation pass. It is, in part, based
44 on the LLVM pass of the same name (They do some things more/less
45 than we do, in different orders, etc).
47 It consists of five steps:
49 1. Breaking up subtract operations into addition + negate, where
50 it would promote the reassociation of adds.
52 2. Left linearization of the expression trees, so that (A+B)+(C+D)
53 becomes (((A+B)+C)+D), which is easier for us to rewrite later.
54 During linearization, we place the operands of the binary
55 expressions into a vector of operand_entry_t
57 3. Optimization of the operand lists, eliminating things like a +
60 4. Rewrite the expression trees we linearized and optimized so
61 they are in proper rank order.
63 5. Repropagate negates, as nothing else will clean it up ATM.
65 A bit of theory on #4, since nobody seems to write anything down
66 about why it makes sense to do it the way they do it:
68 We could do this much nicer theoretically, but don't (for reasons
69 explained after how to do it theoretically nice :P).
71 In order to promote the most redundancy elimination, you want
72 binary expressions whose operands are the same rank (or
73 preferably, the same value) exposed to the redundancy eliminator,
74 for possible elimination.
76 So the way to do this if we really cared, is to build the new op
77 tree from the leaves to the roots, merging as you go, and putting the
78 new op on the end of the worklist, until you are left with one
79 thing on the worklist.
81 IE if you have to rewrite the following set of operands (listed with
82 rank in parentheses), with opcode PLUS_EXPR:
84 a (1), b (1), c (1), d (2), e (2)
87 We start with our merge worklist empty, and the ops list with all of
90 You want to first merge all leaves of the same rank, as much as
93 So first build a binary op of
95 mergetmp = a + b, and put "mergetmp" on the merge worklist.
97 Because there is no three operand form of PLUS_EXPR, c is not going to
98 be exposed to redundancy elimination as a rank 1 operand.
100 So you might as well throw it on the merge worklist (you could also
101 consider it to now be a rank two operand, and merge it with d and e,
102 but in this case, you then have evicted e from a binary op. So at
103 least in this situation, you can't win.)
105 Then build a binary op of d + e
108 and put mergetmp2 on the merge worklist.
110 so merge worklist = {mergetmp, c, mergetmp2}
112 Continue building binary ops of these operations until you have only
113 one operation left on the worklist.
118 mergetmp3 = mergetmp + c
120 worklist = {mergetmp2, mergetmp3}
122 mergetmp4 = mergetmp2 + mergetmp3
124 worklist = {mergetmp4}
126 because we have one operation left, we can now just set the original
127 statement equal to the result of that operation.
129 This will at least expose a + b and d + e to redundancy elimination
130 as binary operations.
132 For extra points, you can reuse the old statements to build the
133 mergetmps, since you shouldn't run out.
135 So why don't we do this?
137 Because it's expensive, and rarely will help. Most trees we are
138 reassociating have 3 or less ops. If they have 2 ops, they already
139 will be written into a nice single binary op. If you have 3 ops, a
140 single simple check suffices to tell you whether the first two are of the
141 same rank. If so, you know to order it
144 newstmt = mergetmp + op3
148 newstmt = mergetmp + op1
150 If all three are of the same rank, you can't expose them all in a
151 single binary operator anyway, so the above is *still* the best you
154 Thus, this is what we do. When we have three ops left, we check to see
155 what order to put them in, and call it a day. As a nod to vector sum
156 reduction, we check if any of the ops are really a phi node that is a
157 destructive update for the associating op, and keep the destructive
158 update together for vector sum reduction recognition. */
165 int constants_eliminated
;
170 /* Operator, rank pair. */
171 typedef struct operand_entry
178 static alloc_pool operand_entry_pool
;
180 /* This is used to assign a unique ID to each struct operand_entry
181 so that qsort results are identical on different hosts. */
182 static int next_operand_entry_id
;
184 /* Starting rank number for a given basic block, so that we can rank
185 operations using unmovable instructions in that BB based on the bb
187 static long *bb_rank
;
189 /* Operand->rank hashtable. */
190 static struct pointer_map_t
*operand_rank
;
193 /* Look up the operand rank structure for expression E. */
196 find_operand_rank (tree e
)
198 void **slot
= pointer_map_contains (operand_rank
, e
);
199 return slot
? (long) (intptr_t) *slot
: -1;
202 /* Insert {E,RANK} into the operand rank hashtable. */
205 insert_operand_rank (tree e
, long rank
)
208 gcc_assert (rank
> 0);
209 slot
= pointer_map_insert (operand_rank
, e
);
211 *slot
= (void *) (intptr_t) rank
;
214 /* Given an expression E, return the rank of the expression. */
219 /* Constants have rank 0. */
220 if (is_gimple_min_invariant (e
))
223 /* SSA_NAME's have the rank of the expression they are the result
225 For globals and uninitialized values, the rank is 0.
226 For function arguments, use the pre-setup rank.
227 For PHI nodes, stores, asm statements, etc, we use the rank of
229 For simple operations, the rank is the maximum rank of any of
230 its operands, or the bb_rank, whichever is less.
231 I make no claims that this is optimal, however, it gives good
234 if (TREE_CODE (e
) == SSA_NAME
)
240 if (TREE_CODE (SSA_NAME_VAR (e
)) == PARM_DECL
241 && SSA_NAME_IS_DEFAULT_DEF (e
))
242 return find_operand_rank (e
);
244 stmt
= SSA_NAME_DEF_STMT (e
);
245 if (gimple_bb (stmt
) == NULL
)
248 if (!is_gimple_assign (stmt
)
249 || gimple_vdef (stmt
))
250 return bb_rank
[gimple_bb (stmt
)->index
];
252 /* If we already have a rank for this expression, use that. */
253 rank
= find_operand_rank (e
);
257 /* Otherwise, find the maximum rank for the operands, or the bb
258 rank, whichever is less. */
260 maxrank
= bb_rank
[gimple_bb(stmt
)->index
];
261 if (gimple_assign_single_p (stmt
))
263 tree rhs
= gimple_assign_rhs1 (stmt
);
264 n
= TREE_OPERAND_LENGTH (rhs
);
266 rank
= MAX (rank
, get_rank (rhs
));
270 i
< n
&& TREE_OPERAND (rhs
, i
) && rank
!= maxrank
; i
++)
271 rank
= MAX(rank
, get_rank (TREE_OPERAND (rhs
, i
)));
276 n
= gimple_num_ops (stmt
);
277 for (i
= 1; i
< n
&& rank
!= maxrank
; i
++)
279 gcc_assert (gimple_op (stmt
, i
));
280 rank
= MAX(rank
, get_rank (gimple_op (stmt
, i
)));
284 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
286 fprintf (dump_file
, "Rank for ");
287 print_generic_expr (dump_file
, e
, 0);
288 fprintf (dump_file
, " is %ld\n", (rank
+ 1));
291 /* Note the rank in the hashtable so we don't recompute it. */
292 insert_operand_rank (e
, (rank
+ 1));
296 /* Globals, etc, are rank 0 */
300 DEF_VEC_P(operand_entry_t
);
301 DEF_VEC_ALLOC_P(operand_entry_t
, heap
);
303 /* We want integer ones to end up last no matter what, since they are
304 the ones we can do the most with. */
305 #define INTEGER_CONST_TYPE 1 << 3
306 #define FLOAT_CONST_TYPE 1 << 2
307 #define OTHER_CONST_TYPE 1 << 1
309 /* Classify an invariant tree into integer, float, or other, so that
310 we can sort them to be near other constants of the same type. */
312 constant_type (tree t
)
314 if (INTEGRAL_TYPE_P (TREE_TYPE (t
)))
315 return INTEGER_CONST_TYPE
;
316 else if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (t
)))
317 return FLOAT_CONST_TYPE
;
319 return OTHER_CONST_TYPE
;
322 /* qsort comparison function to sort operand entries PA and PB by rank
323 so that the sorted array is ordered by rank in decreasing order. */
325 sort_by_operand_rank (const void *pa
, const void *pb
)
327 const operand_entry_t oea
= *(const operand_entry_t
*)pa
;
328 const operand_entry_t oeb
= *(const operand_entry_t
*)pb
;
330 /* It's nicer for optimize_expression if constants that are likely
331 to fold when added/multiplied//whatever are put next to each
332 other. Since all constants have rank 0, order them by type. */
333 if (oeb
->rank
== 0 && oea
->rank
== 0)
335 if (constant_type (oeb
->op
) != constant_type (oea
->op
))
336 return constant_type (oeb
->op
) - constant_type (oea
->op
);
338 /* To make sorting result stable, we use unique IDs to determine
340 return oeb
->id
- oea
->id
;
343 /* Lastly, make sure the versions that are the same go next to each
344 other. We use SSA_NAME_VERSION because it's stable. */
345 if ((oeb
->rank
- oea
->rank
== 0)
346 && TREE_CODE (oea
->op
) == SSA_NAME
347 && TREE_CODE (oeb
->op
) == SSA_NAME
)
349 if (SSA_NAME_VERSION (oeb
->op
) != SSA_NAME_VERSION (oea
->op
))
350 return SSA_NAME_VERSION (oeb
->op
) - SSA_NAME_VERSION (oea
->op
);
352 return oeb
->id
- oea
->id
;
355 if (oeb
->rank
!= oea
->rank
)
356 return oeb
->rank
- oea
->rank
;
358 return oeb
->id
- oea
->id
;
361 /* Add an operand entry to *OPS for the tree operand OP. */
364 add_to_ops_vec (VEC(operand_entry_t
, heap
) **ops
, tree op
)
366 operand_entry_t oe
= (operand_entry_t
) pool_alloc (operand_entry_pool
);
369 oe
->rank
= get_rank (op
);
370 oe
->id
= next_operand_entry_id
++;
371 VEC_safe_push (operand_entry_t
, heap
, *ops
, oe
);
374 /* Return true if STMT is reassociable operation containing a binary
375 operation with tree code CODE, and is inside LOOP. */
378 is_reassociable_op (gimple stmt
, enum tree_code code
, struct loop
*loop
)
380 basic_block bb
= gimple_bb (stmt
);
382 if (gimple_bb (stmt
) == NULL
)
385 if (!flow_bb_inside_loop_p (loop
, bb
))
388 if (is_gimple_assign (stmt
)
389 && gimple_assign_rhs_code (stmt
) == code
390 && has_single_use (gimple_assign_lhs (stmt
)))
397 /* Given NAME, if NAME is defined by a unary operation OPCODE, return the
398 operand of the negate operation. Otherwise, return NULL. */
401 get_unary_op (tree name
, enum tree_code opcode
)
403 gimple stmt
= SSA_NAME_DEF_STMT (name
);
405 if (!is_gimple_assign (stmt
))
408 if (gimple_assign_rhs_code (stmt
) == opcode
)
409 return gimple_assign_rhs1 (stmt
);
413 /* If CURR and LAST are a pair of ops that OPCODE allows us to
414 eliminate through equivalences, do so, remove them from OPS, and
415 return true. Otherwise, return false. */
418 eliminate_duplicate_pair (enum tree_code opcode
,
419 VEC (operand_entry_t
, heap
) **ops
,
422 operand_entry_t curr
,
423 operand_entry_t last
)
426 /* If we have two of the same op, and the opcode is & |, min, or max,
427 we can eliminate one of them.
428 If we have two of the same op, and the opcode is ^, we can
429 eliminate both of them. */
431 if (last
&& last
->op
== curr
->op
)
439 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
441 fprintf (dump_file
, "Equivalence: ");
442 print_generic_expr (dump_file
, curr
->op
, 0);
443 fprintf (dump_file
, " [&|minmax] ");
444 print_generic_expr (dump_file
, last
->op
, 0);
445 fprintf (dump_file
, " -> ");
446 print_generic_stmt (dump_file
, last
->op
, 0);
449 VEC_ordered_remove (operand_entry_t
, *ops
, i
);
450 reassociate_stats
.ops_eliminated
++;
455 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
457 fprintf (dump_file
, "Equivalence: ");
458 print_generic_expr (dump_file
, curr
->op
, 0);
459 fprintf (dump_file
, " ^ ");
460 print_generic_expr (dump_file
, last
->op
, 0);
461 fprintf (dump_file
, " -> nothing\n");
464 reassociate_stats
.ops_eliminated
+= 2;
466 if (VEC_length (operand_entry_t
, *ops
) == 2)
468 VEC_free (operand_entry_t
, heap
, *ops
);
470 add_to_ops_vec (ops
, fold_convert (TREE_TYPE (last
->op
),
476 VEC_ordered_remove (operand_entry_t
, *ops
, i
-1);
477 VEC_ordered_remove (operand_entry_t
, *ops
, i
-1);
489 static VEC(tree
, heap
) *plus_negates
;
491 /* If OPCODE is PLUS_EXPR, CURR->OP is a negate expression or a bitwise not
492 expression, look in OPS for a corresponding positive operation to cancel
493 it out. If we find one, remove the other from OPS, replace
494 OPS[CURRINDEX] with 0 or -1, respectively, and return true. Otherwise,
498 eliminate_plus_minus_pair (enum tree_code opcode
,
499 VEC (operand_entry_t
, heap
) **ops
,
500 unsigned int currindex
,
501 operand_entry_t curr
)
508 if (opcode
!= PLUS_EXPR
|| TREE_CODE (curr
->op
) != SSA_NAME
)
511 negateop
= get_unary_op (curr
->op
, NEGATE_EXPR
);
512 notop
= get_unary_op (curr
->op
, BIT_NOT_EXPR
);
513 if (negateop
== NULL_TREE
&& notop
== NULL_TREE
)
516 /* Any non-negated version will have a rank that is one less than
517 the current rank. So once we hit those ranks, if we don't find
520 for (i
= currindex
+ 1;
521 VEC_iterate (operand_entry_t
, *ops
, i
, oe
)
522 && oe
->rank
>= curr
->rank
- 1 ;
525 if (oe
->op
== negateop
)
528 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
530 fprintf (dump_file
, "Equivalence: ");
531 print_generic_expr (dump_file
, negateop
, 0);
532 fprintf (dump_file
, " + -");
533 print_generic_expr (dump_file
, oe
->op
, 0);
534 fprintf (dump_file
, " -> 0\n");
537 VEC_ordered_remove (operand_entry_t
, *ops
, i
);
538 add_to_ops_vec (ops
, fold_convert(TREE_TYPE (oe
->op
),
540 VEC_ordered_remove (operand_entry_t
, *ops
, currindex
);
541 reassociate_stats
.ops_eliminated
++;
545 else if (oe
->op
== notop
)
547 tree op_type
= TREE_TYPE (oe
->op
);
549 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
551 fprintf (dump_file
, "Equivalence: ");
552 print_generic_expr (dump_file
, notop
, 0);
553 fprintf (dump_file
, " + ~");
554 print_generic_expr (dump_file
, oe
->op
, 0);
555 fprintf (dump_file
, " -> -1\n");
558 VEC_ordered_remove (operand_entry_t
, *ops
, i
);
559 add_to_ops_vec (ops
, build_int_cst_type (op_type
, -1));
560 VEC_ordered_remove (operand_entry_t
, *ops
, currindex
);
561 reassociate_stats
.ops_eliminated
++;
567 /* CURR->OP is a negate expr in a plus expr: save it for later
568 inspection in repropagate_negates(). */
569 if (negateop
!= NULL_TREE
)
570 VEC_safe_push (tree
, heap
, plus_negates
, curr
->op
);
575 /* If OPCODE is BIT_IOR_EXPR, BIT_AND_EXPR, and, CURR->OP is really a
576 bitwise not expression, look in OPS for a corresponding operand to
577 cancel it out. If we find one, remove the other from OPS, replace
578 OPS[CURRINDEX] with 0, and return true. Otherwise, return
582 eliminate_not_pairs (enum tree_code opcode
,
583 VEC (operand_entry_t
, heap
) **ops
,
584 unsigned int currindex
,
585 operand_entry_t curr
)
591 if ((opcode
!= BIT_IOR_EXPR
&& opcode
!= BIT_AND_EXPR
)
592 || TREE_CODE (curr
->op
) != SSA_NAME
)
595 notop
= get_unary_op (curr
->op
, BIT_NOT_EXPR
);
596 if (notop
== NULL_TREE
)
599 /* Any non-not version will have a rank that is one less than
600 the current rank. So once we hit those ranks, if we don't find
603 for (i
= currindex
+ 1;
604 VEC_iterate (operand_entry_t
, *ops
, i
, oe
)
605 && oe
->rank
>= curr
->rank
- 1;
610 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
612 fprintf (dump_file
, "Equivalence: ");
613 print_generic_expr (dump_file
, notop
, 0);
614 if (opcode
== BIT_AND_EXPR
)
615 fprintf (dump_file
, " & ~");
616 else if (opcode
== BIT_IOR_EXPR
)
617 fprintf (dump_file
, " | ~");
618 print_generic_expr (dump_file
, oe
->op
, 0);
619 if (opcode
== BIT_AND_EXPR
)
620 fprintf (dump_file
, " -> 0\n");
621 else if (opcode
== BIT_IOR_EXPR
)
622 fprintf (dump_file
, " -> -1\n");
625 if (opcode
== BIT_AND_EXPR
)
626 oe
->op
= fold_convert (TREE_TYPE (oe
->op
), integer_zero_node
);
627 else if (opcode
== BIT_IOR_EXPR
)
628 oe
->op
= build_low_bits_mask (TREE_TYPE (oe
->op
),
629 TYPE_PRECISION (TREE_TYPE (oe
->op
)));
631 reassociate_stats
.ops_eliminated
632 += VEC_length (operand_entry_t
, *ops
) - 1;
633 VEC_free (operand_entry_t
, heap
, *ops
);
635 VEC_safe_push (operand_entry_t
, heap
, *ops
, oe
);
643 /* Use constant value that may be present in OPS to try to eliminate
644 operands. Note that this function is only really used when we've
645 eliminated ops for other reasons, or merged constants. Across
646 single statements, fold already does all of this, plus more. There
647 is little point in duplicating logic, so I've only included the
648 identities that I could ever construct testcases to trigger. */
651 eliminate_using_constants (enum tree_code opcode
,
652 VEC(operand_entry_t
, heap
) **ops
)
654 operand_entry_t oelast
= VEC_last (operand_entry_t
, *ops
);
655 tree type
= TREE_TYPE (oelast
->op
);
657 if (oelast
->rank
== 0
658 && (INTEGRAL_TYPE_P (type
) || FLOAT_TYPE_P (type
)))
663 if (integer_zerop (oelast
->op
))
665 if (VEC_length (operand_entry_t
, *ops
) != 1)
667 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
668 fprintf (dump_file
, "Found & 0, removing all other ops\n");
670 reassociate_stats
.ops_eliminated
671 += VEC_length (operand_entry_t
, *ops
) - 1;
673 VEC_free (operand_entry_t
, heap
, *ops
);
675 VEC_safe_push (operand_entry_t
, heap
, *ops
, oelast
);
679 else if (integer_all_onesp (oelast
->op
))
681 if (VEC_length (operand_entry_t
, *ops
) != 1)
683 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
684 fprintf (dump_file
, "Found & -1, removing\n");
685 VEC_pop (operand_entry_t
, *ops
);
686 reassociate_stats
.ops_eliminated
++;
691 if (integer_all_onesp (oelast
->op
))
693 if (VEC_length (operand_entry_t
, *ops
) != 1)
695 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
696 fprintf (dump_file
, "Found | -1, removing all other ops\n");
698 reassociate_stats
.ops_eliminated
699 += VEC_length (operand_entry_t
, *ops
) - 1;
701 VEC_free (operand_entry_t
, heap
, *ops
);
703 VEC_safe_push (operand_entry_t
, heap
, *ops
, oelast
);
707 else if (integer_zerop (oelast
->op
))
709 if (VEC_length (operand_entry_t
, *ops
) != 1)
711 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
712 fprintf (dump_file
, "Found | 0, removing\n");
713 VEC_pop (operand_entry_t
, *ops
);
714 reassociate_stats
.ops_eliminated
++;
719 if (integer_zerop (oelast
->op
)
720 || (FLOAT_TYPE_P (type
)
721 && !HONOR_NANS (TYPE_MODE (type
))
722 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
723 && real_zerop (oelast
->op
)))
725 if (VEC_length (operand_entry_t
, *ops
) != 1)
727 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
728 fprintf (dump_file
, "Found * 0, removing all other ops\n");
730 reassociate_stats
.ops_eliminated
731 += VEC_length (operand_entry_t
, *ops
) - 1;
732 VEC_free (operand_entry_t
, heap
, *ops
);
734 VEC_safe_push (operand_entry_t
, heap
, *ops
, oelast
);
738 else if (integer_onep (oelast
->op
)
739 || (FLOAT_TYPE_P (type
)
740 && !HONOR_SNANS (TYPE_MODE (type
))
741 && real_onep (oelast
->op
)))
743 if (VEC_length (operand_entry_t
, *ops
) != 1)
745 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
746 fprintf (dump_file
, "Found * 1, removing\n");
747 VEC_pop (operand_entry_t
, *ops
);
748 reassociate_stats
.ops_eliminated
++;
756 if (integer_zerop (oelast
->op
)
757 || (FLOAT_TYPE_P (type
)
758 && (opcode
== PLUS_EXPR
|| opcode
== MINUS_EXPR
)
759 && fold_real_zero_addition_p (type
, oelast
->op
,
760 opcode
== MINUS_EXPR
)))
762 if (VEC_length (operand_entry_t
, *ops
) != 1)
764 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
765 fprintf (dump_file
, "Found [|^+] 0, removing\n");
766 VEC_pop (operand_entry_t
, *ops
);
767 reassociate_stats
.ops_eliminated
++;
779 static void linearize_expr_tree (VEC(operand_entry_t
, heap
) **, gimple
,
782 /* Structure for tracking and counting operands. */
783 typedef struct oecount_s
{
786 enum tree_code oecode
;
791 DEF_VEC_ALLOC_O(oecount
,heap
);
793 /* The heap for the oecount hashtable and the sorted list of operands. */
794 static VEC (oecount
, heap
) *cvec
;
796 /* Hash function for oecount. */
799 oecount_hash (const void *p
)
801 const oecount
*c
= VEC_index (oecount
, cvec
, (size_t)p
- 42);
802 return htab_hash_pointer (c
->op
) ^ (hashval_t
)c
->oecode
;
805 /* Comparison function for oecount. */
808 oecount_eq (const void *p1
, const void *p2
)
810 const oecount
*c1
= VEC_index (oecount
, cvec
, (size_t)p1
- 42);
811 const oecount
*c2
= VEC_index (oecount
, cvec
, (size_t)p2
- 42);
812 return (c1
->oecode
== c2
->oecode
813 && c1
->op
== c2
->op
);
816 /* Comparison function for qsort sorting oecount elements by count. */
819 oecount_cmp (const void *p1
, const void *p2
)
821 const oecount
*c1
= (const oecount
*)p1
;
822 const oecount
*c2
= (const oecount
*)p2
;
823 if (c1
->cnt
!= c2
->cnt
)
824 return c1
->cnt
- c2
->cnt
;
826 /* If counts are identical, use unique IDs to stabilize qsort. */
827 return c1
->id
- c2
->id
;
830 /* Walks the linear chain with result *DEF searching for an operation
831 with operand OP and code OPCODE removing that from the chain. *DEF
832 is updated if there is only one operand but no operation left. */
835 zero_one_operation (tree
*def
, enum tree_code opcode
, tree op
)
837 gimple stmt
= SSA_NAME_DEF_STMT (*def
);
841 tree name
= gimple_assign_rhs1 (stmt
);
843 /* If this is the operation we look for and one of the operands
844 is ours simply propagate the other operand into the stmts
846 if (gimple_assign_rhs_code (stmt
) == opcode
848 || gimple_assign_rhs2 (stmt
) == op
))
852 gimple_stmt_iterator gsi
;
854 name
= gimple_assign_rhs2 (stmt
);
855 gcc_assert (has_single_use (gimple_assign_lhs (stmt
)));
856 single_imm_use (gimple_assign_lhs (stmt
), &use
, &use_stmt
);
857 if (gimple_assign_lhs (stmt
) == *def
)
860 if (TREE_CODE (name
) != SSA_NAME
)
861 update_stmt (use_stmt
);
862 gsi
= gsi_for_stmt (stmt
);
863 gsi_remove (&gsi
, true);
868 /* Continue walking the chain. */
869 gcc_assert (name
!= op
870 && TREE_CODE (name
) == SSA_NAME
);
871 stmt
= SSA_NAME_DEF_STMT (name
);
876 /* Builds one statement performing OP1 OPCODE OP2 using TMPVAR for
877 the result. Places the statement after the definition of either
878 OP1 or OP2. Returns the new statement. */
881 build_and_add_sum (tree tmpvar
, tree op1
, tree op2
, enum tree_code opcode
)
883 gimple op1def
= NULL
, op2def
= NULL
;
884 gimple_stmt_iterator gsi
;
888 /* Create the addition statement. */
889 sum
= gimple_build_assign_with_ops (opcode
, tmpvar
, op1
, op2
);
890 op
= make_ssa_name (tmpvar
, sum
);
891 gimple_assign_set_lhs (sum
, op
);
893 /* Find an insertion place and insert. */
894 if (TREE_CODE (op1
) == SSA_NAME
)
895 op1def
= SSA_NAME_DEF_STMT (op1
);
896 if (TREE_CODE (op2
) == SSA_NAME
)
897 op2def
= SSA_NAME_DEF_STMT (op2
);
898 if ((!op1def
|| gimple_nop_p (op1def
))
899 && (!op2def
|| gimple_nop_p (op2def
)))
901 gsi
= gsi_after_labels (single_succ (ENTRY_BLOCK_PTR
));
902 gsi_insert_before (&gsi
, sum
, GSI_NEW_STMT
);
904 else if ((!op1def
|| gimple_nop_p (op1def
))
905 || (op2def
&& !gimple_nop_p (op2def
)
906 && stmt_dominates_stmt_p (op1def
, op2def
)))
908 if (gimple_code (op2def
) == GIMPLE_PHI
)
910 gsi
= gsi_after_labels (gimple_bb (op2def
));
911 gsi_insert_before (&gsi
, sum
, GSI_NEW_STMT
);
915 if (!stmt_ends_bb_p (op2def
))
917 gsi
= gsi_for_stmt (op2def
);
918 gsi_insert_after (&gsi
, sum
, GSI_NEW_STMT
);
925 FOR_EACH_EDGE (e
, ei
, gimple_bb (op2def
)->succs
)
926 if (e
->flags
& EDGE_FALLTHRU
)
927 gsi_insert_on_edge_immediate (e
, sum
);
933 if (gimple_code (op1def
) == GIMPLE_PHI
)
935 gsi
= gsi_after_labels (gimple_bb (op1def
));
936 gsi_insert_before (&gsi
, sum
, GSI_NEW_STMT
);
940 if (!stmt_ends_bb_p (op1def
))
942 gsi
= gsi_for_stmt (op1def
);
943 gsi_insert_after (&gsi
, sum
, GSI_NEW_STMT
);
950 FOR_EACH_EDGE (e
, ei
, gimple_bb (op1def
)->succs
)
951 if (e
->flags
& EDGE_FALLTHRU
)
952 gsi_insert_on_edge_immediate (e
, sum
);
961 /* Perform un-distribution of divisions and multiplications.
962 A * X + B * X is transformed into (A + B) * X and A / X + B / X
963 to (A + B) / X for real X.
965 The algorithm is organized as follows.
967 - First we walk the addition chain *OPS looking for summands that
968 are defined by a multiplication or a real division. This results
969 in the candidates bitmap with relevant indices into *OPS.
971 - Second we build the chains of multiplications or divisions for
972 these candidates, counting the number of occurences of (operand, code)
973 pairs in all of the candidates chains.
975 - Third we sort the (operand, code) pairs by number of occurence and
976 process them starting with the pair with the most uses.
978 * For each such pair we walk the candidates again to build a
979 second candidate bitmap noting all multiplication/division chains
980 that have at least one occurence of (operand, code).
982 * We build an alternate addition chain only covering these
983 candidates with one (operand, code) operation removed from their
984 multiplication/division chain.
986 * The first candidate gets replaced by the alternate addition chain
987 multiplied/divided by the operand.
989 * All candidate chains get disabled for further processing and
990 processing of (operand, code) pairs continues.
992 The alternate addition chains built are re-processed by the main
993 reassociation algorithm which allows optimizing a * x * y + b * y * x
994 to (a + b ) * x * y in one invocation of the reassociation pass. */
997 undistribute_ops_list (enum tree_code opcode
,
998 VEC (operand_entry_t
, heap
) **ops
, struct loop
*loop
)
1000 unsigned int length
= VEC_length (operand_entry_t
, *ops
);
1001 operand_entry_t oe1
;
1003 sbitmap candidates
, candidates2
;
1004 unsigned nr_candidates
, nr_candidates2
;
1005 sbitmap_iterator sbi0
;
1006 VEC (operand_entry_t
, heap
) **subops
;
1008 bool changed
= false;
1009 int next_oecount_id
= 0;
1012 || opcode
!= PLUS_EXPR
)
1015 /* Build a list of candidates to process. */
1016 candidates
= sbitmap_alloc (length
);
1017 sbitmap_zero (candidates
);
1019 FOR_EACH_VEC_ELT (operand_entry_t
, *ops
, i
, oe1
)
1021 enum tree_code dcode
;
1024 if (TREE_CODE (oe1
->op
) != SSA_NAME
)
1026 oe1def
= SSA_NAME_DEF_STMT (oe1
->op
);
1027 if (!is_gimple_assign (oe1def
))
1029 dcode
= gimple_assign_rhs_code (oe1def
);
1030 if ((dcode
!= MULT_EXPR
1031 && dcode
!= RDIV_EXPR
)
1032 || !is_reassociable_op (oe1def
, dcode
, loop
))
1035 SET_BIT (candidates
, i
);
1039 if (nr_candidates
< 2)
1041 sbitmap_free (candidates
);
1045 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1047 fprintf (dump_file
, "searching for un-distribute opportunities ");
1048 print_generic_expr (dump_file
,
1049 VEC_index (operand_entry_t
, *ops
,
1050 sbitmap_first_set_bit (candidates
))->op
, 0);
1051 fprintf (dump_file
, " %d\n", nr_candidates
);
1054 /* Build linearized sub-operand lists and the counting table. */
1056 ctable
= htab_create (15, oecount_hash
, oecount_eq
, NULL
);
1057 subops
= XCNEWVEC (VEC (operand_entry_t
, heap
) *,
1058 VEC_length (operand_entry_t
, *ops
));
1059 EXECUTE_IF_SET_IN_SBITMAP (candidates
, 0, i
, sbi0
)
1062 enum tree_code oecode
;
1065 oedef
= SSA_NAME_DEF_STMT (VEC_index (operand_entry_t
, *ops
, i
)->op
);
1066 oecode
= gimple_assign_rhs_code (oedef
);
1067 linearize_expr_tree (&subops
[i
], oedef
,
1068 associative_tree_code (oecode
), false);
1070 FOR_EACH_VEC_ELT (operand_entry_t
, subops
[i
], j
, oe1
)
1077 c
.id
= next_oecount_id
++;
1079 VEC_safe_push (oecount
, heap
, cvec
, &c
);
1080 idx
= VEC_length (oecount
, cvec
) + 41;
1081 slot
= htab_find_slot (ctable
, (void *)idx
, INSERT
);
1084 *slot
= (void *)idx
;
1088 VEC_pop (oecount
, cvec
);
1089 VEC_index (oecount
, cvec
, (size_t)*slot
- 42)->cnt
++;
1093 htab_delete (ctable
);
1095 /* Sort the counting table. */
1096 VEC_qsort (oecount
, cvec
, oecount_cmp
);
1098 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1101 fprintf (dump_file
, "Candidates:\n");
1102 FOR_EACH_VEC_ELT (oecount
, cvec
, j
, c
)
1104 fprintf (dump_file
, " %u %s: ", c
->cnt
,
1105 c
->oecode
== MULT_EXPR
1106 ? "*" : c
->oecode
== RDIV_EXPR
? "/" : "?");
1107 print_generic_expr (dump_file
, c
->op
, 0);
1108 fprintf (dump_file
, "\n");
1112 /* Process the (operand, code) pairs in order of most occurence. */
1113 candidates2
= sbitmap_alloc (length
);
1114 while (!VEC_empty (oecount
, cvec
))
1116 oecount
*c
= VEC_last (oecount
, cvec
);
1120 /* Now collect the operands in the outer chain that contain
1121 the common operand in their inner chain. */
1122 sbitmap_zero (candidates2
);
1124 EXECUTE_IF_SET_IN_SBITMAP (candidates
, 0, i
, sbi0
)
1127 enum tree_code oecode
;
1129 tree op
= VEC_index (operand_entry_t
, *ops
, i
)->op
;
1131 /* If we undistributed in this chain already this may be
1133 if (TREE_CODE (op
) != SSA_NAME
)
1136 oedef
= SSA_NAME_DEF_STMT (op
);
1137 oecode
= gimple_assign_rhs_code (oedef
);
1138 if (oecode
!= c
->oecode
)
1141 FOR_EACH_VEC_ELT (operand_entry_t
, subops
[i
], j
, oe1
)
1143 if (oe1
->op
== c
->op
)
1145 SET_BIT (candidates2
, i
);
1152 if (nr_candidates2
>= 2)
1154 operand_entry_t oe1
, oe2
;
1157 int first
= sbitmap_first_set_bit (candidates2
);
1159 /* Build the new addition chain. */
1160 oe1
= VEC_index (operand_entry_t
, *ops
, first
);
1161 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1163 fprintf (dump_file
, "Building (");
1164 print_generic_expr (dump_file
, oe1
->op
, 0);
1166 tmpvar
= create_tmp_reg (TREE_TYPE (oe1
->op
), NULL
);
1167 add_referenced_var (tmpvar
);
1168 zero_one_operation (&oe1
->op
, c
->oecode
, c
->op
);
1169 EXECUTE_IF_SET_IN_SBITMAP (candidates2
, first
+1, i
, sbi0
)
1172 oe2
= VEC_index (operand_entry_t
, *ops
, i
);
1173 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1175 fprintf (dump_file
, " + ");
1176 print_generic_expr (dump_file
, oe2
->op
, 0);
1178 zero_one_operation (&oe2
->op
, c
->oecode
, c
->op
);
1179 sum
= build_and_add_sum (tmpvar
, oe1
->op
, oe2
->op
, opcode
);
1180 oe2
->op
= fold_convert (TREE_TYPE (oe2
->op
), integer_zero_node
);
1182 oe1
->op
= gimple_get_lhs (sum
);
1185 /* Apply the multiplication/division. */
1186 prod
= build_and_add_sum (tmpvar
, oe1
->op
, c
->op
, c
->oecode
);
1187 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1189 fprintf (dump_file
, ") %s ", c
->oecode
== MULT_EXPR
? "*" : "/");
1190 print_generic_expr (dump_file
, c
->op
, 0);
1191 fprintf (dump_file
, "\n");
1194 /* Record it in the addition chain and disable further
1195 undistribution with this op. */
1196 oe1
->op
= gimple_assign_lhs (prod
);
1197 oe1
->rank
= get_rank (oe1
->op
);
1198 VEC_free (operand_entry_t
, heap
, subops
[first
]);
1203 VEC_pop (oecount
, cvec
);
1206 for (i
= 0; i
< VEC_length (operand_entry_t
, *ops
); ++i
)
1207 VEC_free (operand_entry_t
, heap
, subops
[i
]);
1209 VEC_free (oecount
, heap
, cvec
);
1210 sbitmap_free (candidates
);
1211 sbitmap_free (candidates2
);
1216 /* If OPCODE is BIT_IOR_EXPR or BIT_AND_EXPR and CURR is a comparison
1217 expression, examine the other OPS to see if any of them are comparisons
1218 of the same values, which we may be able to combine or eliminate.
1219 For example, we can rewrite (a < b) | (a == b) as (a <= b). */
1222 eliminate_redundant_comparison (enum tree_code opcode
,
1223 VEC (operand_entry_t
, heap
) **ops
,
1224 unsigned int currindex
,
1225 operand_entry_t curr
)
1228 enum tree_code lcode
, rcode
;
1233 if (opcode
!= BIT_IOR_EXPR
&& opcode
!= BIT_AND_EXPR
)
1236 /* Check that CURR is a comparison. */
1237 if (TREE_CODE (curr
->op
) != SSA_NAME
)
1239 def1
= SSA_NAME_DEF_STMT (curr
->op
);
1240 if (!is_gimple_assign (def1
))
1242 lcode
= gimple_assign_rhs_code (def1
);
1243 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
)
1245 op1
= gimple_assign_rhs1 (def1
);
1246 op2
= gimple_assign_rhs2 (def1
);
1248 /* Now look for a similar comparison in the remaining OPS. */
1249 for (i
= currindex
+ 1;
1250 VEC_iterate (operand_entry_t
, *ops
, i
, oe
);
1255 if (TREE_CODE (oe
->op
) != SSA_NAME
)
1257 def2
= SSA_NAME_DEF_STMT (oe
->op
);
1258 if (!is_gimple_assign (def2
))
1260 rcode
= gimple_assign_rhs_code (def2
);
1261 if (TREE_CODE_CLASS (rcode
) != tcc_comparison
)
1264 /* If we got here, we have a match. See if we can combine the
1266 if (opcode
== BIT_IOR_EXPR
)
1267 t
= maybe_fold_or_comparisons (lcode
, op1
, op2
,
1268 rcode
, gimple_assign_rhs1 (def2
),
1269 gimple_assign_rhs2 (def2
));
1271 t
= maybe_fold_and_comparisons (lcode
, op1
, op2
,
1272 rcode
, gimple_assign_rhs1 (def2
),
1273 gimple_assign_rhs2 (def2
));
1277 /* maybe_fold_and_comparisons and maybe_fold_or_comparisons
1278 always give us a boolean_type_node value back. If the original
1279 BIT_AND_EXPR or BIT_IOR_EXPR was of a wider integer type,
1280 we need to convert. */
1281 if (!useless_type_conversion_p (TREE_TYPE (curr
->op
), TREE_TYPE (t
)))
1282 t
= fold_convert (TREE_TYPE (curr
->op
), t
);
1284 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1286 fprintf (dump_file
, "Equivalence: ");
1287 print_generic_expr (dump_file
, curr
->op
, 0);
1288 fprintf (dump_file
, " %s ", op_symbol_code (opcode
));
1289 print_generic_expr (dump_file
, oe
->op
, 0);
1290 fprintf (dump_file
, " -> ");
1291 print_generic_expr (dump_file
, t
, 0);
1292 fprintf (dump_file
, "\n");
1295 /* Now we can delete oe, as it has been subsumed by the new combined
1297 VEC_ordered_remove (operand_entry_t
, *ops
, i
);
1298 reassociate_stats
.ops_eliminated
++;
1300 /* If t is the same as curr->op, we're done. Otherwise we must
1301 replace curr->op with t. Special case is if we got a constant
1302 back, in which case we add it to the end instead of in place of
1303 the current entry. */
1304 if (TREE_CODE (t
) == INTEGER_CST
)
1306 VEC_ordered_remove (operand_entry_t
, *ops
, currindex
);
1307 add_to_ops_vec (ops
, t
);
1309 else if (!operand_equal_p (t
, curr
->op
, 0))
1313 enum tree_code subcode
;
1316 gcc_assert (COMPARISON_CLASS_P (t
));
1317 tmpvar
= create_tmp_var (TREE_TYPE (t
), NULL
);
1318 add_referenced_var (tmpvar
);
1319 extract_ops_from_tree (t
, &subcode
, &newop1
, &newop2
);
1320 STRIP_USELESS_TYPE_CONVERSION (newop1
);
1321 STRIP_USELESS_TYPE_CONVERSION (newop2
);
1322 gcc_checking_assert (is_gimple_val (newop1
)
1323 && is_gimple_val (newop2
));
1324 sum
= build_and_add_sum (tmpvar
, newop1
, newop2
, subcode
);
1325 curr
->op
= gimple_get_lhs (sum
);
1333 /* Perform various identities and other optimizations on the list of
1334 operand entries, stored in OPS. The tree code for the binary
1335 operation between all the operands is OPCODE. */
1338 optimize_ops_list (enum tree_code opcode
,
1339 VEC (operand_entry_t
, heap
) **ops
)
1341 unsigned int length
= VEC_length (operand_entry_t
, *ops
);
1344 operand_entry_t oelast
= NULL
;
1345 bool iterate
= false;
1350 oelast
= VEC_last (operand_entry_t
, *ops
);
1352 /* If the last two are constants, pop the constants off, merge them
1353 and try the next two. */
1354 if (oelast
->rank
== 0 && is_gimple_min_invariant (oelast
->op
))
1356 operand_entry_t oelm1
= VEC_index (operand_entry_t
, *ops
, length
- 2);
1358 if (oelm1
->rank
== 0
1359 && is_gimple_min_invariant (oelm1
->op
)
1360 && useless_type_conversion_p (TREE_TYPE (oelm1
->op
),
1361 TREE_TYPE (oelast
->op
)))
1363 tree folded
= fold_binary (opcode
, TREE_TYPE (oelm1
->op
),
1364 oelm1
->op
, oelast
->op
);
1366 if (folded
&& is_gimple_min_invariant (folded
))
1368 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1369 fprintf (dump_file
, "Merging constants\n");
1371 VEC_pop (operand_entry_t
, *ops
);
1372 VEC_pop (operand_entry_t
, *ops
);
1374 add_to_ops_vec (ops
, folded
);
1375 reassociate_stats
.constants_eliminated
++;
1377 optimize_ops_list (opcode
, ops
);
1383 eliminate_using_constants (opcode
, ops
);
1386 for (i
= 0; VEC_iterate (operand_entry_t
, *ops
, i
, oe
);)
1390 if (eliminate_not_pairs (opcode
, ops
, i
, oe
))
1392 if (eliminate_duplicate_pair (opcode
, ops
, &done
, i
, oe
, oelast
)
1393 || (!done
&& eliminate_plus_minus_pair (opcode
, ops
, i
, oe
))
1394 || (!done
&& eliminate_redundant_comparison (opcode
, ops
, i
, oe
)))
1406 length
= VEC_length (operand_entry_t
, *ops
);
1407 oelast
= VEC_last (operand_entry_t
, *ops
);
1410 optimize_ops_list (opcode
, ops
);
1413 /* Return true if OPERAND is defined by a PHI node which uses the LHS
1414 of STMT in it's operands. This is also known as a "destructive
1415 update" operation. */
1418 is_phi_for_stmt (gimple stmt
, tree operand
)
1422 use_operand_p arg_p
;
1425 if (TREE_CODE (operand
) != SSA_NAME
)
1428 lhs
= gimple_assign_lhs (stmt
);
1430 def_stmt
= SSA_NAME_DEF_STMT (operand
);
1431 if (gimple_code (def_stmt
) != GIMPLE_PHI
)
1434 FOR_EACH_PHI_ARG (arg_p
, def_stmt
, i
, SSA_OP_USE
)
1435 if (lhs
== USE_FROM_PTR (arg_p
))
1440 /* Remove def stmt of VAR if VAR has zero uses and recurse
1441 on rhs1 operand if so. */
1444 remove_visited_stmt_chain (tree var
)
1447 gimple_stmt_iterator gsi
;
1451 if (TREE_CODE (var
) != SSA_NAME
|| !has_zero_uses (var
))
1453 stmt
= SSA_NAME_DEF_STMT (var
);
1454 if (!is_gimple_assign (stmt
)
1455 || !gimple_visited_p (stmt
))
1457 var
= gimple_assign_rhs1 (stmt
);
1458 gsi
= gsi_for_stmt (stmt
);
1459 gsi_remove (&gsi
, true);
1460 release_defs (stmt
);
1464 /* Recursively rewrite our linearized statements so that the operators
1465 match those in OPS[OPINDEX], putting the computation in rank
1469 rewrite_expr_tree (gimple stmt
, unsigned int opindex
,
1470 VEC(operand_entry_t
, heap
) * ops
, bool moved
)
1472 tree rhs1
= gimple_assign_rhs1 (stmt
);
1473 tree rhs2
= gimple_assign_rhs2 (stmt
);
1476 /* If we have three operands left, then we want to make sure the one
1477 that gets the double binary op are the ones with the same rank.
1479 The alternative we try is to see if this is a destructive
1480 update style statement, which is like:
1483 In that case, we want to use the destructive update form to
1484 expose the possible vectorizer sum reduction opportunity.
1485 In that case, the third operand will be the phi node.
1487 We could, of course, try to be better as noted above, and do a
1488 lot of work to try to find these opportunities in >3 operand
1489 cases, but it is unlikely to be worth it. */
1490 if (opindex
+ 3 == VEC_length (operand_entry_t
, ops
))
1492 operand_entry_t oe1
, oe2
, oe3
;
1494 oe1
= VEC_index (operand_entry_t
, ops
, opindex
);
1495 oe2
= VEC_index (operand_entry_t
, ops
, opindex
+ 1);
1496 oe3
= VEC_index (operand_entry_t
, ops
, opindex
+ 2);
1498 if ((oe1
->rank
== oe2
->rank
1499 && oe2
->rank
!= oe3
->rank
)
1500 || (is_phi_for_stmt (stmt
, oe3
->op
)
1501 && !is_phi_for_stmt (stmt
, oe1
->op
)
1502 && !is_phi_for_stmt (stmt
, oe2
->op
)))
1504 struct operand_entry temp
= *oe3
;
1506 oe3
->rank
= oe1
->rank
;
1508 oe1
->rank
= temp
.rank
;
1510 else if ((oe1
->rank
== oe3
->rank
1511 && oe2
->rank
!= oe3
->rank
)
1512 || (is_phi_for_stmt (stmt
, oe2
->op
)
1513 && !is_phi_for_stmt (stmt
, oe1
->op
)
1514 && !is_phi_for_stmt (stmt
, oe3
->op
)))
1516 struct operand_entry temp
= *oe2
;
1518 oe2
->rank
= oe1
->rank
;
1520 oe1
->rank
= temp
.rank
;
1524 /* The final recursion case for this function is that you have
1525 exactly two operations left.
1526 If we had one exactly one op in the entire list to start with, we
1527 would have never called this function, and the tail recursion
1528 rewrites them one at a time. */
1529 if (opindex
+ 2 == VEC_length (operand_entry_t
, ops
))
1531 operand_entry_t oe1
, oe2
;
1533 oe1
= VEC_index (operand_entry_t
, ops
, opindex
);
1534 oe2
= VEC_index (operand_entry_t
, ops
, opindex
+ 1);
1536 if (rhs1
!= oe1
->op
|| rhs2
!= oe2
->op
)
1538 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1540 fprintf (dump_file
, "Transforming ");
1541 print_gimple_stmt (dump_file
, stmt
, 0, 0);
1544 gimple_assign_set_rhs1 (stmt
, oe1
->op
);
1545 gimple_assign_set_rhs2 (stmt
, oe2
->op
);
1547 if (rhs1
!= oe1
->op
&& rhs1
!= oe2
->op
)
1548 remove_visited_stmt_chain (rhs1
);
1550 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1552 fprintf (dump_file
, " into ");
1553 print_gimple_stmt (dump_file
, stmt
, 0, 0);
1560 /* If we hit here, we should have 3 or more ops left. */
1561 gcc_assert (opindex
+ 2 < VEC_length (operand_entry_t
, ops
));
1563 /* Rewrite the next operator. */
1564 oe
= VEC_index (operand_entry_t
, ops
, opindex
);
1570 gimple_stmt_iterator gsinow
, gsirhs1
;
1571 gimple stmt1
= stmt
, stmt2
;
1574 gsinow
= gsi_for_stmt (stmt
);
1575 count
= VEC_length (operand_entry_t
, ops
) - opindex
- 2;
1576 while (count
-- != 0)
1578 stmt2
= SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt1
));
1579 gsirhs1
= gsi_for_stmt (stmt2
);
1580 gsi_move_before (&gsirhs1
, &gsinow
);
1587 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1589 fprintf (dump_file
, "Transforming ");
1590 print_gimple_stmt (dump_file
, stmt
, 0, 0);
1593 gimple_assign_set_rhs2 (stmt
, oe
->op
);
1596 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1598 fprintf (dump_file
, " into ");
1599 print_gimple_stmt (dump_file
, stmt
, 0, 0);
1602 /* Recurse on the LHS of the binary operator, which is guaranteed to
1603 be the non-leaf side. */
1604 rewrite_expr_tree (SSA_NAME_DEF_STMT (rhs1
), opindex
+ 1, ops
, moved
);
1607 /* Transform STMT, which is really (A +B) + (C + D) into the left
1608 linear form, ((A+B)+C)+D.
1609 Recurse on D if necessary. */
1612 linearize_expr (gimple stmt
)
1614 gimple_stmt_iterator gsinow
, gsirhs
;
1615 gimple binlhs
= SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt
));
1616 gimple binrhs
= SSA_NAME_DEF_STMT (gimple_assign_rhs2 (stmt
));
1617 enum tree_code rhscode
= gimple_assign_rhs_code (stmt
);
1618 gimple newbinrhs
= NULL
;
1619 struct loop
*loop
= loop_containing_stmt (stmt
);
1621 gcc_assert (is_reassociable_op (binlhs
, rhscode
, loop
)
1622 && is_reassociable_op (binrhs
, rhscode
, loop
));
1624 gsinow
= gsi_for_stmt (stmt
);
1625 gsirhs
= gsi_for_stmt (binrhs
);
1626 gsi_move_before (&gsirhs
, &gsinow
);
1628 gimple_assign_set_rhs2 (stmt
, gimple_assign_rhs1 (binrhs
));
1629 gimple_assign_set_rhs1 (binrhs
, gimple_assign_lhs (binlhs
));
1630 gimple_assign_set_rhs1 (stmt
, gimple_assign_lhs (binrhs
));
1632 if (TREE_CODE (gimple_assign_rhs2 (stmt
)) == SSA_NAME
)
1633 newbinrhs
= SSA_NAME_DEF_STMT (gimple_assign_rhs2 (stmt
));
1635 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1637 fprintf (dump_file
, "Linearized: ");
1638 print_gimple_stmt (dump_file
, stmt
, 0, 0);
1641 reassociate_stats
.linearized
++;
1642 update_stmt (binrhs
);
1643 update_stmt (binlhs
);
1646 gimple_set_visited (stmt
, true);
1647 gimple_set_visited (binlhs
, true);
1648 gimple_set_visited (binrhs
, true);
1650 /* Tail recurse on the new rhs if it still needs reassociation. */
1651 if (newbinrhs
&& is_reassociable_op (newbinrhs
, rhscode
, loop
))
1652 /* ??? This should probably be linearize_expr (newbinrhs) but I don't
1653 want to change the algorithm while converting to tuples. */
1654 linearize_expr (stmt
);
1657 /* If LHS has a single immediate use that is a GIMPLE_ASSIGN statement, return
1658 it. Otherwise, return NULL. */
1661 get_single_immediate_use (tree lhs
)
1663 use_operand_p immuse
;
1666 if (TREE_CODE (lhs
) == SSA_NAME
1667 && single_imm_use (lhs
, &immuse
, &immusestmt
)
1668 && is_gimple_assign (immusestmt
))
1674 /* Recursively negate the value of TONEGATE, and return the SSA_NAME
1675 representing the negated value. Insertions of any necessary
1676 instructions go before GSI.
1677 This function is recursive in that, if you hand it "a_5" as the
1678 value to negate, and a_5 is defined by "a_5 = b_3 + b_4", it will
1679 transform b_3 + b_4 into a_5 = -b_3 + -b_4. */
1682 negate_value (tree tonegate
, gimple_stmt_iterator
*gsi
)
1684 gimple negatedefstmt
= NULL
;
1685 tree resultofnegate
;
1687 /* If we are trying to negate a name, defined by an add, negate the
1688 add operands instead. */
1689 if (TREE_CODE (tonegate
) == SSA_NAME
)
1690 negatedefstmt
= SSA_NAME_DEF_STMT (tonegate
);
1691 if (TREE_CODE (tonegate
) == SSA_NAME
1692 && is_gimple_assign (negatedefstmt
)
1693 && TREE_CODE (gimple_assign_lhs (negatedefstmt
)) == SSA_NAME
1694 && has_single_use (gimple_assign_lhs (negatedefstmt
))
1695 && gimple_assign_rhs_code (negatedefstmt
) == PLUS_EXPR
)
1697 gimple_stmt_iterator gsi
;
1698 tree rhs1
= gimple_assign_rhs1 (negatedefstmt
);
1699 tree rhs2
= gimple_assign_rhs2 (negatedefstmt
);
1701 gsi
= gsi_for_stmt (negatedefstmt
);
1702 rhs1
= negate_value (rhs1
, &gsi
);
1703 gimple_assign_set_rhs1 (negatedefstmt
, rhs1
);
1705 gsi
= gsi_for_stmt (negatedefstmt
);
1706 rhs2
= negate_value (rhs2
, &gsi
);
1707 gimple_assign_set_rhs2 (negatedefstmt
, rhs2
);
1709 update_stmt (negatedefstmt
);
1710 return gimple_assign_lhs (negatedefstmt
);
1713 tonegate
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (tonegate
), tonegate
);
1714 resultofnegate
= force_gimple_operand_gsi (gsi
, tonegate
, true,
1715 NULL_TREE
, true, GSI_SAME_STMT
);
1716 return resultofnegate
;
1719 /* Return true if we should break up the subtract in STMT into an add
1720 with negate. This is true when we the subtract operands are really
1721 adds, or the subtract itself is used in an add expression. In
1722 either case, breaking up the subtract into an add with negate
1723 exposes the adds to reassociation. */
1726 should_break_up_subtract (gimple stmt
)
1728 tree lhs
= gimple_assign_lhs (stmt
);
1729 tree binlhs
= gimple_assign_rhs1 (stmt
);
1730 tree binrhs
= gimple_assign_rhs2 (stmt
);
1732 struct loop
*loop
= loop_containing_stmt (stmt
);
1734 if (TREE_CODE (binlhs
) == SSA_NAME
1735 && is_reassociable_op (SSA_NAME_DEF_STMT (binlhs
), PLUS_EXPR
, loop
))
1738 if (TREE_CODE (binrhs
) == SSA_NAME
1739 && is_reassociable_op (SSA_NAME_DEF_STMT (binrhs
), PLUS_EXPR
, loop
))
1742 if (TREE_CODE (lhs
) == SSA_NAME
1743 && (immusestmt
= get_single_immediate_use (lhs
))
1744 && is_gimple_assign (immusestmt
)
1745 && (gimple_assign_rhs_code (immusestmt
) == PLUS_EXPR
1746 || gimple_assign_rhs_code (immusestmt
) == MULT_EXPR
))
1751 /* Transform STMT from A - B into A + -B. */
1754 break_up_subtract (gimple stmt
, gimple_stmt_iterator
*gsip
)
1756 tree rhs1
= gimple_assign_rhs1 (stmt
);
1757 tree rhs2
= gimple_assign_rhs2 (stmt
);
1759 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1761 fprintf (dump_file
, "Breaking up subtract ");
1762 print_gimple_stmt (dump_file
, stmt
, 0, 0);
1765 rhs2
= negate_value (rhs2
, gsip
);
1766 gimple_assign_set_rhs_with_ops (gsip
, PLUS_EXPR
, rhs1
, rhs2
);
1770 /* Recursively linearize a binary expression that is the RHS of STMT.
1771 Place the operands of the expression tree in the vector named OPS. */
1774 linearize_expr_tree (VEC(operand_entry_t
, heap
) **ops
, gimple stmt
,
1775 bool is_associative
, bool set_visited
)
1777 tree binlhs
= gimple_assign_rhs1 (stmt
);
1778 tree binrhs
= gimple_assign_rhs2 (stmt
);
1779 gimple binlhsdef
, binrhsdef
;
1780 bool binlhsisreassoc
= false;
1781 bool binrhsisreassoc
= false;
1782 enum tree_code rhscode
= gimple_assign_rhs_code (stmt
);
1783 struct loop
*loop
= loop_containing_stmt (stmt
);
1786 gimple_set_visited (stmt
, true);
1788 if (TREE_CODE (binlhs
) == SSA_NAME
)
1790 binlhsdef
= SSA_NAME_DEF_STMT (binlhs
);
1791 binlhsisreassoc
= is_reassociable_op (binlhsdef
, rhscode
, loop
);
1794 if (TREE_CODE (binrhs
) == SSA_NAME
)
1796 binrhsdef
= SSA_NAME_DEF_STMT (binrhs
);
1797 binrhsisreassoc
= is_reassociable_op (binrhsdef
, rhscode
, loop
);
1800 /* If the LHS is not reassociable, but the RHS is, we need to swap
1801 them. If neither is reassociable, there is nothing we can do, so
1802 just put them in the ops vector. If the LHS is reassociable,
1803 linearize it. If both are reassociable, then linearize the RHS
1806 if (!binlhsisreassoc
)
1810 /* If this is not a associative operation like division, give up. */
1811 if (!is_associative
)
1813 add_to_ops_vec (ops
, binrhs
);
1817 if (!binrhsisreassoc
)
1819 add_to_ops_vec (ops
, binrhs
);
1820 add_to_ops_vec (ops
, binlhs
);
1824 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1826 fprintf (dump_file
, "swapping operands of ");
1827 print_gimple_stmt (dump_file
, stmt
, 0, 0);
1830 swap_tree_operands (stmt
,
1831 gimple_assign_rhs1_ptr (stmt
),
1832 gimple_assign_rhs2_ptr (stmt
));
1835 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1837 fprintf (dump_file
, " is now ");
1838 print_gimple_stmt (dump_file
, stmt
, 0, 0);
1841 /* We want to make it so the lhs is always the reassociative op,
1847 else if (binrhsisreassoc
)
1849 linearize_expr (stmt
);
1850 binlhs
= gimple_assign_rhs1 (stmt
);
1851 binrhs
= gimple_assign_rhs2 (stmt
);
1854 gcc_assert (TREE_CODE (binrhs
) != SSA_NAME
1855 || !is_reassociable_op (SSA_NAME_DEF_STMT (binrhs
),
1857 linearize_expr_tree (ops
, SSA_NAME_DEF_STMT (binlhs
),
1858 is_associative
, set_visited
);
1859 add_to_ops_vec (ops
, binrhs
);
1862 /* Repropagate the negates back into subtracts, since no other pass
1863 currently does it. */
1866 repropagate_negates (void)
1871 FOR_EACH_VEC_ELT (tree
, plus_negates
, i
, negate
)
1873 gimple user
= get_single_immediate_use (negate
);
1875 if (!user
|| !is_gimple_assign (user
))
1878 /* The negate operand can be either operand of a PLUS_EXPR
1879 (it can be the LHS if the RHS is a constant for example).
1881 Force the negate operand to the RHS of the PLUS_EXPR, then
1882 transform the PLUS_EXPR into a MINUS_EXPR. */
1883 if (gimple_assign_rhs_code (user
) == PLUS_EXPR
)
1885 /* If the negated operand appears on the LHS of the
1886 PLUS_EXPR, exchange the operands of the PLUS_EXPR
1887 to force the negated operand to the RHS of the PLUS_EXPR. */
1888 if (gimple_assign_rhs1 (user
) == negate
)
1890 swap_tree_operands (user
,
1891 gimple_assign_rhs1_ptr (user
),
1892 gimple_assign_rhs2_ptr (user
));
1895 /* Now transform the PLUS_EXPR into a MINUS_EXPR and replace
1896 the RHS of the PLUS_EXPR with the operand of the NEGATE_EXPR. */
1897 if (gimple_assign_rhs2 (user
) == negate
)
1899 tree rhs1
= gimple_assign_rhs1 (user
);
1900 tree rhs2
= get_unary_op (negate
, NEGATE_EXPR
);
1901 gimple_stmt_iterator gsi
= gsi_for_stmt (user
);
1902 gimple_assign_set_rhs_with_ops (&gsi
, MINUS_EXPR
, rhs1
, rhs2
);
1906 else if (gimple_assign_rhs_code (user
) == MINUS_EXPR
)
1908 if (gimple_assign_rhs1 (user
) == negate
)
1913 which we transform into
1916 This pushes down the negate which we possibly can merge
1917 into some other operation, hence insert it into the
1918 plus_negates vector. */
1919 gimple feed
= SSA_NAME_DEF_STMT (negate
);
1920 tree a
= gimple_assign_rhs1 (feed
);
1921 tree rhs2
= gimple_assign_rhs2 (user
);
1922 gimple_stmt_iterator gsi
= gsi_for_stmt (feed
), gsi2
;
1923 gimple_replace_lhs (feed
, negate
);
1924 gimple_assign_set_rhs_with_ops (&gsi
, PLUS_EXPR
, a
, rhs2
);
1925 update_stmt (gsi_stmt (gsi
));
1926 gsi2
= gsi_for_stmt (user
);
1927 gimple_assign_set_rhs_with_ops (&gsi2
, NEGATE_EXPR
, negate
, NULL
);
1928 update_stmt (gsi_stmt (gsi2
));
1929 gsi_move_before (&gsi
, &gsi2
);
1930 VEC_safe_push (tree
, heap
, plus_negates
,
1931 gimple_assign_lhs (gsi_stmt (gsi2
)));
1935 /* Transform "x = -a; y = b - x" into "y = b + a", getting
1936 rid of one operation. */
1937 gimple feed
= SSA_NAME_DEF_STMT (negate
);
1938 tree a
= gimple_assign_rhs1 (feed
);
1939 tree rhs1
= gimple_assign_rhs1 (user
);
1940 gimple_stmt_iterator gsi
= gsi_for_stmt (user
);
1941 gimple_assign_set_rhs_with_ops (&gsi
, PLUS_EXPR
, rhs1
, a
);
1942 update_stmt (gsi_stmt (gsi
));
1948 /* Returns true if OP is of a type for which we can do reassociation.
1949 That is for integral or non-saturating fixed-point types, and for
1950 floating point type when associative-math is enabled. */
1953 can_reassociate_p (tree op
)
1955 tree type
= TREE_TYPE (op
);
1956 if ((INTEGRAL_TYPE_P (type
) && TYPE_OVERFLOW_WRAPS (type
))
1957 || NON_SAT_FIXED_POINT_TYPE_P (type
)
1958 || (flag_associative_math
&& FLOAT_TYPE_P (type
)))
1963 /* Break up subtract operations in block BB.
1965 We do this top down because we don't know whether the subtract is
1966 part of a possible chain of reassociation except at the top.
1975 we want to break up k = t - q, but we won't until we've transformed q
1976 = b - r, which won't be broken up until we transform b = c - d.
1978 En passant, clear the GIMPLE visited flag on every statement. */
1981 break_up_subtract_bb (basic_block bb
)
1983 gimple_stmt_iterator gsi
;
1986 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1988 gimple stmt
= gsi_stmt (gsi
);
1989 gimple_set_visited (stmt
, false);
1991 if (!is_gimple_assign (stmt
)
1992 || !can_reassociate_p (gimple_assign_lhs (stmt
)))
1995 /* Look for simple gimple subtract operations. */
1996 if (gimple_assign_rhs_code (stmt
) == MINUS_EXPR
)
1998 if (!can_reassociate_p (gimple_assign_rhs1 (stmt
))
1999 || !can_reassociate_p (gimple_assign_rhs2 (stmt
)))
2002 /* Check for a subtract used only in an addition. If this
2003 is the case, transform it into add of a negate for better
2004 reassociation. IE transform C = A-B into C = A + -B if C
2005 is only used in an addition. */
2006 if (should_break_up_subtract (stmt
))
2007 break_up_subtract (stmt
, &gsi
);
2009 else if (gimple_assign_rhs_code (stmt
) == NEGATE_EXPR
2010 && can_reassociate_p (gimple_assign_rhs1 (stmt
)))
2011 VEC_safe_push (tree
, heap
, plus_negates
, gimple_assign_lhs (stmt
));
2013 for (son
= first_dom_son (CDI_DOMINATORS
, bb
);
2015 son
= next_dom_son (CDI_DOMINATORS
, son
))
2016 break_up_subtract_bb (son
);
2019 /* Reassociate expressions in basic block BB and its post-dominator as
2023 reassociate_bb (basic_block bb
)
2025 gimple_stmt_iterator gsi
;
2028 for (gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
2030 gimple stmt
= gsi_stmt (gsi
);
2032 if (is_gimple_assign (stmt
))
2034 tree lhs
, rhs1
, rhs2
;
2035 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
2037 /* If this is not a gimple binary expression, there is
2038 nothing for us to do with it. */
2039 if (get_gimple_rhs_class (rhs_code
) != GIMPLE_BINARY_RHS
)
2042 /* If this was part of an already processed statement,
2043 we don't need to touch it again. */
2044 if (gimple_visited_p (stmt
))
2046 /* This statement might have become dead because of previous
2048 if (has_zero_uses (gimple_get_lhs (stmt
)))
2050 gsi_remove (&gsi
, true);
2051 release_defs (stmt
);
2052 /* We might end up removing the last stmt above which
2053 places the iterator to the end of the sequence.
2054 Reset it to the last stmt in this case which might
2055 be the end of the sequence as well if we removed
2056 the last statement of the sequence. In which case
2057 we need to bail out. */
2058 if (gsi_end_p (gsi
))
2060 gsi
= gsi_last_bb (bb
);
2061 if (gsi_end_p (gsi
))
2068 lhs
= gimple_assign_lhs (stmt
);
2069 rhs1
= gimple_assign_rhs1 (stmt
);
2070 rhs2
= gimple_assign_rhs2 (stmt
);
2072 /* For non-bit or min/max operations we can't associate
2073 all types. Verify that here. */
2074 if (rhs_code
!= BIT_IOR_EXPR
2075 && rhs_code
!= BIT_AND_EXPR
2076 && rhs_code
!= BIT_XOR_EXPR
2077 && rhs_code
!= MIN_EXPR
2078 && rhs_code
!= MAX_EXPR
2079 && (!can_reassociate_p (lhs
)
2080 || !can_reassociate_p (rhs1
)
2081 || !can_reassociate_p (rhs2
)))
2084 if (associative_tree_code (rhs_code
))
2086 VEC(operand_entry_t
, heap
) *ops
= NULL
;
2088 /* There may be no immediate uses left by the time we
2089 get here because we may have eliminated them all. */
2090 if (TREE_CODE (lhs
) == SSA_NAME
&& has_zero_uses (lhs
))
2093 gimple_set_visited (stmt
, true);
2094 linearize_expr_tree (&ops
, stmt
, true, true);
2095 VEC_qsort (operand_entry_t
, ops
, sort_by_operand_rank
);
2096 optimize_ops_list (rhs_code
, &ops
);
2097 if (undistribute_ops_list (rhs_code
, &ops
,
2098 loop_containing_stmt (stmt
)))
2100 VEC_qsort (operand_entry_t
, ops
, sort_by_operand_rank
);
2101 optimize_ops_list (rhs_code
, &ops
);
2104 if (VEC_length (operand_entry_t
, ops
) == 1)
2106 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2108 fprintf (dump_file
, "Transforming ");
2109 print_gimple_stmt (dump_file
, stmt
, 0, 0);
2112 rhs1
= gimple_assign_rhs1 (stmt
);
2113 gimple_assign_set_rhs_from_tree (&gsi
,
2114 VEC_last (operand_entry_t
,
2117 remove_visited_stmt_chain (rhs1
);
2119 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2121 fprintf (dump_file
, " into ");
2122 print_gimple_stmt (dump_file
, stmt
, 0, 0);
2126 rewrite_expr_tree (stmt
, 0, ops
, false);
2128 VEC_free (operand_entry_t
, heap
, ops
);
2132 for (son
= first_dom_son (CDI_POST_DOMINATORS
, bb
);
2134 son
= next_dom_son (CDI_POST_DOMINATORS
, son
))
2135 reassociate_bb (son
);
2138 void dump_ops_vector (FILE *file
, VEC (operand_entry_t
, heap
) *ops
);
2139 void debug_ops_vector (VEC (operand_entry_t
, heap
) *ops
);
2141 /* Dump the operand entry vector OPS to FILE. */
2144 dump_ops_vector (FILE *file
, VEC (operand_entry_t
, heap
) *ops
)
2149 FOR_EACH_VEC_ELT (operand_entry_t
, ops
, i
, oe
)
2151 fprintf (file
, "Op %d -> rank: %d, tree: ", i
, oe
->rank
);
2152 print_generic_expr (file
, oe
->op
, 0);
2156 /* Dump the operand entry vector OPS to STDERR. */
2159 debug_ops_vector (VEC (operand_entry_t
, heap
) *ops
)
2161 dump_ops_vector (stderr
, ops
);
2167 break_up_subtract_bb (ENTRY_BLOCK_PTR
);
2168 reassociate_bb (EXIT_BLOCK_PTR
);
2171 /* Initialize the reassociation pass. */
2179 int *bbs
= XNEWVEC (int, last_basic_block
+ 1);
2181 /* Find the loops, so that we can prevent moving calculations in
2183 loop_optimizer_init (AVOID_CFG_MODIFICATIONS
);
2185 memset (&reassociate_stats
, 0, sizeof (reassociate_stats
));
2187 operand_entry_pool
= create_alloc_pool ("operand entry pool",
2188 sizeof (struct operand_entry
), 30);
2189 next_operand_entry_id
= 0;
2191 /* Reverse RPO (Reverse Post Order) will give us something where
2192 deeper loops come later. */
2193 pre_and_rev_post_order_compute (NULL
, bbs
, false);
2194 bb_rank
= XCNEWVEC (long, last_basic_block
+ 1);
2195 operand_rank
= pointer_map_create ();
2197 /* Give each argument a distinct rank. */
2198 for (param
= DECL_ARGUMENTS (current_function_decl
);
2200 param
= DECL_CHAIN (param
))
2202 if (gimple_default_def (cfun
, param
) != NULL
)
2204 tree def
= gimple_default_def (cfun
, param
);
2205 insert_operand_rank (def
, ++rank
);
2209 /* Give the chain decl a distinct rank. */
2210 if (cfun
->static_chain_decl
!= NULL
)
2212 tree def
= gimple_default_def (cfun
, cfun
->static_chain_decl
);
2214 insert_operand_rank (def
, ++rank
);
2217 /* Set up rank for each BB */
2218 for (i
= 0; i
< n_basic_blocks
- NUM_FIXED_BLOCKS
; i
++)
2219 bb_rank
[bbs
[i
]] = ++rank
<< 16;
2222 calculate_dominance_info (CDI_POST_DOMINATORS
);
2223 plus_negates
= NULL
;
2226 /* Cleanup after the reassociation pass, and print stats if
2232 statistics_counter_event (cfun
, "Linearized",
2233 reassociate_stats
.linearized
);
2234 statistics_counter_event (cfun
, "Constants eliminated",
2235 reassociate_stats
.constants_eliminated
);
2236 statistics_counter_event (cfun
, "Ops eliminated",
2237 reassociate_stats
.ops_eliminated
);
2238 statistics_counter_event (cfun
, "Statements rewritten",
2239 reassociate_stats
.rewritten
);
2241 pointer_map_destroy (operand_rank
);
2242 free_alloc_pool (operand_entry_pool
);
2244 VEC_free (tree
, heap
, plus_negates
);
2245 free_dominance_info (CDI_POST_DOMINATORS
);
2246 loop_optimizer_finalize ();
2249 /* Gate and execute functions for Reassociation. */
2252 execute_reassoc (void)
2257 repropagate_negates ();
2264 gate_tree_ssa_reassoc (void)
2266 return flag_tree_reassoc
!= 0;
2269 struct gimple_opt_pass pass_reassoc
=
2273 "reassoc", /* name */
2274 gate_tree_ssa_reassoc
, /* gate */
2275 execute_reassoc
, /* execute */
2278 0, /* static_pass_number */
2279 TV_TREE_REASSOC
, /* tv_id */
2280 PROP_cfg
| PROP_ssa
, /* properties_required */
2281 0, /* properties_provided */
2282 0, /* properties_destroyed */
2283 0, /* todo_flags_start */
2284 TODO_dump_func
| TODO_ggc_collect
| TODO_verify_ssa
/* todo_flags_finish */