1 /* Predictive commoning.
2 Copyright (C) 2005 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 2, or (at your option) any
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 /* This file implements the predictive commoning optimization. Predictive
22 commoning can be viewed as CSE around a loop, and with some improvements,
23 as generalized strength reduction-- i.e., reusing values computed in
24 earlier iterations of a loop in the later ones. So far, the pass only
25 handles the most useful case, that is, reusing values of memory references.
26 If you think this is all just a special case of PRE, you are sort of right;
27 however, concentrating on loops is simpler, and makes it possible to
28 incorporate data dependence analysis to detect the opportunities, perform
29 loop unrolling to avoid copies together with renaming immediately,
30 and if needed, we could also take register pressure into account.
32 Let us demonstrate what is done on an example:
34 for (i = 0; i < 100; i++)
36 a[i+2] = a[i] + a[i+1];
42 1) We find data references in the loop, and split them to mutually
43 independent groups (i.e., we find components of a data dependence
44 graph). We ignore read-read dependences whose distance is not constant.
45 (TODO -- we could also ignore antidependences). In this example, we
46 find the following groups:
48 a[i]{read}, a[i+1]{read}, a[i+2]{write}
49 b[10]{read}, b[10]{write}
50 c[99 - i]{read}, c[i]{write}
51 d[i + 1]{read}, d[i]{write}
53 2) Inside each of the group, we verify several conditions:
54 a) all the references must differ in indices only, and the indices
55 must all have the same step
56 b) the references must dominate loop latch (and thus, they must be
57 ordered by dominance relation).
58 c) the distance of the indices must be a small multiple of the step
59 We are then able to compute the difference of the references (# of
60 iterations before they point to the same place as the first of them).
61 Also, in case there are writes in the loop, we split the groups into
62 chains whose head is the write whose values are used by the reads in
63 the same chain. The chains are then processed independently,
64 making the further transformations simpler. Also, the shorter chains
65 need the same number of registers, but may require lower unrolling
66 factor in order to get rid of the copies on the loop latch.
68 In our example, we get the following chains (the chain for c is invalid).
70 a[i]{read,+0}, a[i+1]{read,-1}, a[i+2]{write,-2}
71 b[10]{read,+0}, b[10]{write,+0}
72 d[i + 1]{read,+0}, d[i]{write,+1}
74 3) For each read, we determine the read or write whose value it reuses,
75 together with the distance of this reuse. I.e. we take the last
76 reference before it with distance 0, or the last of the references
77 with the smallest positive distance to the read. Then, we remove
78 the references that are not used in any of these chains, discard the
79 empty groups, and propagate all the links so that they point to the
80 single root reference of the chain (adjusting their distance
81 appropriately). Some extra care needs to be taken for references with
82 step 0. In our example (the numbers indicate the distance of the
85 a[i] --> (*) 2, a[i+1] --> (*) 1, a[i+2] (*)
86 b[10] --> (*) 1, b[10] (*)
88 4) The chains are combined together if possible. If the corresponding
89 elements of two chains are always combined together with the same
90 operator, we remember just the result of this combination, instead
91 of remembering the values separately. We may need to perform
92 reassociation to enable combining, for example
94 e[i] + f[i+1] + e[i+1] + f[i]
96 can be reassociated as
98 (e[i] + f[i]) + (e[i+1] + f[i+1])
100 and we can combine the chains for e and f into one chain.
102 5) For each root reference (end of the chain) R, let N be maximum distance
103 of a reference reusing its value. Variables R0 upto RN are created,
104 together with phi nodes that transfer values from R1 .. RN to
106 Initial values are loaded to R0..R(N-1) (in case not all references
107 must necessarily be accessed and they may trap, we may fail here;
108 TODO sometimes, the loads could be guarded by a check for the number
109 of iterations). Values loaded/stored in roots are also copied to
110 RN. Other reads are replaced with the appropriate variable Ri.
111 Everything is put to SSA form.
113 As a small improvement, if R0 is dead after the root (i.e., all uses of
114 the value with the maximum distance dominate the root), we can avoid
115 creating RN and use R0 instead of it.
117 In our example, we get (only the parts concerning a and b are shown):
118 for (i = 0; i < 100; i++)
130 6) Factor F for unrolling is determined as the smallest common multiple of
131 (N + 1) for each root reference (N for references for that we avoided
132 creating RN). If F and the loop is small enough, loop is unrolled F
133 times. The stores to RN (R0) in the copies of the loop body are
134 periodically replaced with R0, R1, ... (R1, R2, ...), so that they can
135 be coalesced and the copies can be eliminated.
137 TODO -- copy propagation and other optimizations may change the live
138 ranges of the temporary registers and prevent them from being coalesced;
139 this may increase the register pressure.
141 In our case, F = 2 and the (main loop of the) result is
143 for (i = 0; i < ...; i += 2)
160 TODO -- stores killing other stores can be taken into account, e.g.,
161 for (i = 0; i < n; i++)
171 for (i = 0; i < n; i++)
181 The interesting part is that this would generalize store motion; still, since
182 sm is performed elsewhere, it does not seem that important.
184 Predictive commoning can be generalized for arbitrary computations (not
185 just memory loads), and also nontrivial transfer functions (e.g., replacing
186 i * i with ii_last + 2 * i + 1), to generalize strength reduction. */
190 #include "coretypes.h"
195 #include "tree-flow.h"
197 #include "tree-data-ref.h"
198 #include "tree-scalar-evolution.h"
199 #include "tree-chrec.h"
201 #include "diagnostic.h"
202 #include "tree-pass.h"
203 #include "tree-affine.h"
204 #include "tree-inline.h"
206 /* The maximum number of iterations between the considered memory
209 #define MAX_DISTANCE (target_avail_regs < 16 ? 4 : 8)
211 /* Data references. */
215 /* The reference itself. */
216 struct data_reference
*ref
;
218 /* The statement in that the reference appears. */
221 /* Distance of the reference from the root of the chain (in number of
222 iterations of the loop). */
225 /* Number of iterations offset from the first reference in the component. */
228 /* Number of the reference in a component, in dominance ordering. */
231 /* True if the memory reference is always accessed when the loop is
233 unsigned always_accessed
: 1;
237 DEF_VEC_ALLOC_P (dref
, heap
);
239 /* Type of the chain of the references. */
243 /* The addresses of the references in the chain are constant. */
246 /* There are only loads in the chain. */
249 /* Root of the chain is store, the rest are loads. */
252 /* A combination of two chains. */
256 /* Chains of data references. */
260 /* Type of the chain. */
261 enum chain_type type
;
263 /* For combination chains, the operator and the two chains that are
264 combined, and the type of the result. */
265 enum tree_code
operator;
267 struct chain
*ch1
, *ch2
;
269 /* The references in the chain. */
270 VEC(dref
,heap
) *refs
;
272 /* The maximum distance of the reference in the chain from the root. */
275 /* The variables used to copy the value throughout iterations. */
276 VEC(tree
,heap
) *vars
;
278 /* Initializers for the variables. */
279 VEC(tree
,heap
) *inits
;
281 /* True if there is a use of a variable with the maximal distance
282 that comes after the root in the loop. */
283 unsigned has_max_use_after
: 1;
285 /* True if all the memory references in the chain are always accessed. */
286 unsigned all_always_accessed
: 1;
288 /* True if this chain was combined together with some other chain. */
289 unsigned combined
: 1;
293 DEF_VEC_ALLOC_P (chain_p
, heap
);
295 /* Describes the knowledge about the step of the memory references in
300 /* The step is zero. */
303 /* The step is nonzero. */
306 /* The step may or may not be nonzero. */
310 /* Components of the data dependence graph. */
314 /* The references in the component. */
315 VEC(dref
,heap
) *refs
;
317 /* What we know about the step of the references in the component. */
318 enum ref_step_type comp_step
;
320 /* Next component in the list. */
321 struct component
*next
;
324 /* Bitmap of ssa names defined by looparound phi nodes covered by chains. */
326 static bitmap looparound_phis
;
328 /* Cache used by tree_to_aff_combination_expand. */
330 static struct pointer_map_t
*name_expansions
;
332 /* Dumps data reference REF to FILE. */
334 extern void dump_dref (FILE *, dref
);
336 dump_dref (FILE *file
, dref ref
)
341 print_generic_expr (file
, DR_REF (ref
->ref
), TDF_SLIM
);
342 fprintf (file
, " (id %u%s)\n", ref
->pos
,
343 DR_IS_READ (ref
->ref
) ? "" : ", write");
345 fprintf (file
, " offset ");
346 dump_double_int (file
, ref
->offset
, false);
347 fprintf (file
, "\n");
349 fprintf (file
, " distance %u\n", ref
->distance
);
353 if (TREE_CODE (ref
->stmt
) == PHI_NODE
)
354 fprintf (file
, " looparound ref\n");
356 fprintf (file
, " combination ref\n");
357 fprintf (file
, " in statement ");
358 print_generic_expr (file
, ref
->stmt
, TDF_SLIM
);
359 fprintf (file
, "\n");
360 fprintf (file
, " distance %u\n", ref
->distance
);
365 /* Dumps CHAIN to FILE. */
367 extern void dump_chain (FILE *, chain_p
);
369 dump_chain (FILE *file
, chain_p chain
)
372 const char *chain_type
;
379 chain_type
= "Load motion";
383 chain_type
= "Loads-only";
387 chain_type
= "Store-loads";
391 chain_type
= "Combination";
398 fprintf (file
, "%s chain %p%s\n", chain_type
, (void *) chain
,
399 chain
->combined
? " (combined)" : "");
400 if (chain
->type
!= CT_INVARIANT
)
401 fprintf (file
, " max distance %u%s\n", chain
->length
,
402 chain
->has_max_use_after
? "" : ", may reuse first");
404 if (chain
->type
== CT_COMBINATION
)
406 fprintf (file
, " equal to %p %s %p in type ",
407 (void *) chain
->ch1
, op_symbol_code (chain
->operator),
408 (void *) chain
->ch2
);
409 print_generic_expr (file
, chain
->rslt_type
, TDF_SLIM
);
410 fprintf (file
, "\n");
415 fprintf (file
, " vars");
416 for (i
= 0; VEC_iterate (tree
, chain
->vars
, i
, var
); i
++)
419 print_generic_expr (file
, var
, TDF_SLIM
);
421 fprintf (file
, "\n");
426 fprintf (file
, " inits");
427 for (i
= 0; VEC_iterate (tree
, chain
->inits
, i
, var
); i
++)
430 print_generic_expr (file
, var
, TDF_SLIM
);
432 fprintf (file
, "\n");
435 fprintf (file
, " references:\n");
436 for (i
= 0; VEC_iterate (dref
, chain
->refs
, i
, a
); i
++)
439 fprintf (file
, "\n");
442 /* Dumps CHAINS to FILE. */
444 extern void dump_chains (FILE *, VEC (chain_p
, heap
) *);
446 dump_chains (FILE *file
, VEC (chain_p
, heap
) *chains
)
451 for (i
= 0; VEC_iterate (chain_p
, chains
, i
, chain
); i
++)
452 dump_chain (file
, chain
);
455 /* Dumps COMP to FILE. */
457 extern void dump_component (FILE *, struct component
*);
459 dump_component (FILE *file
, struct component
*comp
)
464 fprintf (file
, "Component%s:\n",
465 comp
->comp_step
== RS_INVARIANT
? " (invariant)" : "");
466 for (i
= 0; VEC_iterate (dref
, comp
->refs
, i
, a
); i
++)
468 fprintf (file
, "\n");
471 /* Dumps COMPS to FILE. */
473 extern void dump_components (FILE *, struct component
*);
475 dump_components (FILE *file
, struct component
*comps
)
477 struct component
*comp
;
479 for (comp
= comps
; comp
; comp
= comp
->next
)
480 dump_component (file
, comp
);
483 /* Frees a chain CHAIN. */
486 release_chain (chain_p chain
)
494 for (i
= 0; VEC_iterate (dref
, chain
->refs
, i
, ref
); i
++)
497 VEC_free (dref
, heap
, chain
->refs
);
498 VEC_free (tree
, heap
, chain
->vars
);
499 VEC_free (tree
, heap
, chain
->inits
);
507 release_chains (VEC (chain_p
, heap
) *chains
)
512 for (i
= 0; VEC_iterate (chain_p
, chains
, i
, chain
); i
++)
513 release_chain (chain
);
514 VEC_free (chain_p
, heap
, chains
);
517 /* Frees a component COMP. */
520 release_component (struct component
*comp
)
522 VEC_free (dref
, heap
, comp
->refs
);
526 /* Frees list of components COMPS. */
529 release_components (struct component
*comps
)
531 struct component
*act
, *next
;
533 for (act
= comps
; act
; act
= next
)
536 release_component (act
);
540 /* Finds a root of tree given by FATHERS containing A, and performs path
544 component_of (unsigned fathers
[], unsigned a
)
548 for (root
= a
; root
!= fathers
[root
]; root
= fathers
[root
])
551 for (; a
!= root
; a
= n
)
560 /* Join operation for DFU. FATHERS gives the tree, SIZES are sizes of the
561 components, A and B are components to merge. */
564 merge_comps (unsigned fathers
[], unsigned sizes
[], unsigned a
, unsigned b
)
566 unsigned ca
= component_of (fathers
, a
);
567 unsigned cb
= component_of (fathers
, b
);
572 if (sizes
[ca
] < sizes
[cb
])
574 sizes
[cb
] += sizes
[ca
];
579 sizes
[ca
] += sizes
[cb
];
584 /* Returns true if A is a reference that is suitable for predictive commoning
585 in the innermost loop that contains it. REF_STEP is set according to the
586 step of the reference A. */
589 suitable_reference_p (struct data_reference
*a
, enum ref_step_type
*ref_step
)
591 tree ref
= DR_REF (a
), step
= DR_STEP (a
);
594 || !is_gimple_reg_type (TREE_TYPE (ref
)))
597 if (integer_zerop (step
))
598 *ref_step
= RS_INVARIANT
;
599 else if (integer_nonzerop (step
))
600 *ref_step
= RS_NONZERO
;
607 /* Stores DR_OFFSET (DR) + DR_INIT (DR) to OFFSET. */
610 aff_combination_dr_offset (struct data_reference
*dr
, aff_tree
*offset
)
614 tree_to_aff_combination_expand (DR_OFFSET (dr
), sizetype
, offset
,
616 aff_combination_const (&delta
, sizetype
, tree_to_double_int (DR_INIT (dr
)));
617 aff_combination_add (offset
, &delta
);
620 /* Determines number of iterations of the innermost enclosing loop before B
621 refers to exactly the same location as A and stores it to OFF. If A and
622 B do not have the same step, they never meet, or anything else fails,
623 returns false, otherwise returns true. Both A and B are assumed to
624 satisfy suitable_reference_p. */
627 determine_offset (struct data_reference
*a
, struct data_reference
*b
,
630 aff_tree diff
, baseb
, step
;
632 /* Check whether the base address and the step of both references is the
634 if (!operand_equal_p (DR_STEP (a
), DR_STEP (b
), 0)
635 || !operand_equal_p (DR_BASE_ADDRESS (a
), DR_BASE_ADDRESS (b
), 0))
638 if (integer_zerop (DR_STEP (a
)))
640 /* If the references have loop invariant address, check that they access
641 exactly the same location. */
642 *off
= double_int_zero
;
643 return (operand_equal_p (DR_OFFSET (a
), DR_OFFSET (b
), 0)
644 && operand_equal_p (DR_INIT (a
), DR_INIT (b
), 0));
647 /* Compare the offsets of the addresses, and check whether the difference
648 is a multiple of step. */
649 aff_combination_dr_offset (a
, &diff
);
650 aff_combination_dr_offset (b
, &baseb
);
651 aff_combination_scale (&baseb
, double_int_minus_one
);
652 aff_combination_add (&diff
, &baseb
);
654 tree_to_aff_combination_expand (DR_STEP (a
), sizetype
,
655 &step
, &name_expansions
);
656 return aff_combination_constant_multiple_p (&diff
, &step
, off
);
659 /* Returns the last basic block in LOOP for that we are sure that
660 it is executed whenever the loop is entered. */
663 last_always_executed_block (struct loop
*loop
)
666 VEC (edge
, heap
) *exits
= get_loop_exit_edges (loop
);
668 basic_block last
= loop
->latch
;
670 for (i
= 0; VEC_iterate (edge
, exits
, i
, ex
); i
++)
671 last
= nearest_common_dominator (CDI_DOMINATORS
, last
, ex
->src
);
672 VEC_free (edge
, heap
, exits
);
677 /* Splits dependence graph on DATAREFS described by DEPENDS to components. */
679 static struct component
*
680 split_data_refs_to_components (struct loop
*loop
,
681 VEC (data_reference_p
, heap
) *datarefs
,
682 VEC (ddr_p
, heap
) *depends
)
684 unsigned i
, n
= VEC_length (data_reference_p
, datarefs
);
685 unsigned ca
, ia
, ib
, bad
;
686 unsigned *comp_father
= XNEWVEC (unsigned, n
+ 1);
687 unsigned *comp_size
= XNEWVEC (unsigned, n
+ 1);
688 struct component
**comps
;
689 struct data_reference
*dr
, *dra
, *drb
;
690 struct data_dependence_relation
*ddr
;
691 struct component
*comp_list
= NULL
, *comp
;
693 basic_block last_always_executed
= last_always_executed_block (loop
);
695 for (i
= 0; VEC_iterate (data_reference_p
, datarefs
, i
, dr
); i
++)
699 /* A fake reference for call or asm_expr that may clobber memory;
703 dr
->aux
= (void *) (size_t) i
;
708 /* A component reserved for the "bad" data references. */
712 for (i
= 0; VEC_iterate (data_reference_p
, datarefs
, i
, dr
); i
++)
714 enum ref_step_type dummy
;
716 if (!suitable_reference_p (dr
, &dummy
))
718 ia
= (unsigned) (size_t) dr
->aux
;
719 merge_comps (comp_father
, comp_size
, n
, ia
);
723 for (i
= 0; VEC_iterate (ddr_p
, depends
, i
, ddr
); i
++)
725 double_int dummy_off
;
727 if (DDR_ARE_DEPENDENT (ddr
) == chrec_known
)
732 ia
= component_of (comp_father
, (unsigned) (size_t) dra
->aux
);
733 ib
= component_of (comp_father
, (unsigned) (size_t) drb
->aux
);
737 bad
= component_of (comp_father
, n
);
739 /* If both A and B are reads, we may ignore unsuitable dependences. */
740 if (DR_IS_READ (dra
) && DR_IS_READ (drb
)
741 && (ia
== bad
|| ib
== bad
742 || !determine_offset (dra
, drb
, &dummy_off
)))
745 merge_comps (comp_father
, comp_size
, ia
, ib
);
748 comps
= XCNEWVEC (struct component
*, n
);
749 bad
= component_of (comp_father
, n
);
750 for (i
= 0; VEC_iterate (data_reference_p
, datarefs
, i
, dr
); i
++)
752 ia
= (unsigned) (size_t) dr
->aux
;
753 ca
= component_of (comp_father
, ia
);
760 comp
= XCNEW (struct component
);
761 comp
->refs
= VEC_alloc (dref
, heap
, comp_size
[ca
]);
765 dataref
= XCNEW (struct dref
);
767 dataref
->stmt
= DR_STMT (dr
);
768 dataref
->offset
= double_int_zero
;
769 dataref
->distance
= 0;
771 dataref
->always_accessed
772 = dominated_by_p (CDI_DOMINATORS
, last_always_executed
,
773 bb_for_stmt (dataref
->stmt
));
774 dataref
->pos
= VEC_length (dref
, comp
->refs
);
775 VEC_quick_push (dref
, comp
->refs
, dataref
);
778 for (i
= 0; i
< n
; i
++)
783 comp
->next
= comp_list
;
795 /* Returns true if the component COMP satisfies the conditions
796 described in 2) at the beginning of this file. LOOP is the current
800 suitable_component_p (struct loop
*loop
, struct component
*comp
)
804 basic_block ba
, bp
= loop
->header
;
805 bool ok
, has_write
= false;
807 for (i
= 0; VEC_iterate (dref
, comp
->refs
, i
, a
); i
++)
809 ba
= bb_for_stmt (a
->stmt
);
811 if (!just_once_each_iteration_p (loop
, ba
))
814 gcc_assert (dominated_by_p (CDI_DOMINATORS
, ba
, bp
));
817 if (!DR_IS_READ (a
->ref
))
821 first
= VEC_index (dref
, comp
->refs
, 0);
822 ok
= suitable_reference_p (first
->ref
, &comp
->comp_step
);
824 first
->offset
= double_int_zero
;
826 for (i
= 1; VEC_iterate (dref
, comp
->refs
, i
, a
); i
++)
828 if (!determine_offset (first
->ref
, a
->ref
, &a
->offset
))
831 #ifdef ENABLE_CHECKING
833 enum ref_step_type a_step
;
834 ok
= suitable_reference_p (a
->ref
, &a_step
);
835 gcc_assert (ok
&& a_step
== comp
->comp_step
);
840 /* If there is a write inside the component, we must know whether the
841 step is nonzero or not -- we would not otherwise be able to recognize
842 whether the value accessed by reads comes from the OFFSET-th iteration
843 or the previous one. */
844 if (has_write
&& comp
->comp_step
== RS_ANY
)
850 /* Check the conditions on references inside each of components COMPS,
851 and remove the unsuitable components from the list. The new list
852 of components is returned. The conditions are described in 2) at
853 the beginning of this file. LOOP is the current loop. */
855 static struct component
*
856 filter_suitable_components (struct loop
*loop
, struct component
*comps
)
858 struct component
**comp
, *act
;
860 for (comp
= &comps
; *comp
; )
863 if (suitable_component_p (loop
, act
))
868 release_component (act
);
875 /* Compares two drefs A and B by their offset and position. Callback for
879 order_drefs (const void *a
, const void *b
)
883 int offcmp
= double_int_scmp ((*da
)->offset
, (*db
)->offset
);
888 return (*da
)->pos
- (*db
)->pos
;
891 /* Returns root of the CHAIN. */
894 get_chain_root (chain_p chain
)
896 return VEC_index (dref
, chain
->refs
, 0);
899 /* Adds REF to the chain CHAIN. */
902 add_ref_to_chain (chain_p chain
, dref ref
)
904 dref root
= get_chain_root (chain
);
907 gcc_assert (double_int_scmp (root
->offset
, ref
->offset
) <= 0);
908 dist
= double_int_add (ref
->offset
, double_int_neg (root
->offset
));
909 if (double_int_ucmp (uhwi_to_double_int (MAX_DISTANCE
), dist
) <= 0)
911 gcc_assert (double_int_fits_in_uhwi_p (dist
));
913 VEC_safe_push (dref
, heap
, chain
->refs
, ref
);
915 ref
->distance
= double_int_to_uhwi (dist
);
917 if (ref
->distance
>= chain
->length
)
919 chain
->length
= ref
->distance
;
920 chain
->has_max_use_after
= false;
923 if (ref
->distance
== chain
->length
924 && ref
->pos
> root
->pos
)
925 chain
->has_max_use_after
= true;
927 chain
->all_always_accessed
&= ref
->always_accessed
;
930 /* Returns the chain for invariant component COMP. */
933 make_invariant_chain (struct component
*comp
)
935 chain_p chain
= XCNEW (struct chain
);
939 chain
->type
= CT_INVARIANT
;
941 chain
->all_always_accessed
= true;
943 for (i
= 0; VEC_iterate (dref
, comp
->refs
, i
, ref
); i
++)
945 VEC_safe_push (dref
, heap
, chain
->refs
, ref
);
946 chain
->all_always_accessed
&= ref
->always_accessed
;
952 /* Make a new chain rooted at REF. */
955 make_rooted_chain (dref ref
)
957 chain_p chain
= XCNEW (struct chain
);
959 chain
->type
= DR_IS_READ (ref
->ref
) ? CT_LOAD
: CT_STORE_LOAD
;
961 VEC_safe_push (dref
, heap
, chain
->refs
, ref
);
962 chain
->all_always_accessed
= ref
->always_accessed
;
969 /* Returns true if CHAIN is not trivial. */
972 nontrivial_chain_p (chain_p chain
)
974 return chain
!= NULL
&& VEC_length (dref
, chain
->refs
) > 1;
977 /* Returns the ssa name that contains the value of REF, or NULL_TREE if there
981 name_for_ref (dref ref
)
985 if (TREE_CODE (ref
->stmt
) == GIMPLE_MODIFY_STMT
)
987 if (!ref
->ref
|| DR_IS_READ (ref
->ref
))
988 name
= GIMPLE_STMT_OPERAND (ref
->stmt
, 0);
990 name
= GIMPLE_STMT_OPERAND (ref
->stmt
, 1);
993 name
= PHI_RESULT (ref
->stmt
);
995 return (TREE_CODE (name
) == SSA_NAME
? name
: NULL_TREE
);
998 /* Returns true if REF is a valid initializer for ROOT with given DISTANCE (in
999 iterations of the innermost enclosing loop). */
1002 valid_initializer_p (struct data_reference
*ref
,
1003 unsigned distance
, struct data_reference
*root
)
1005 aff_tree diff
, base
, step
;
1008 if (!DR_BASE_ADDRESS (ref
))
1011 /* Both REF and ROOT must be accessing the same object. */
1012 if (!operand_equal_p (DR_BASE_ADDRESS (ref
), DR_BASE_ADDRESS (root
), 0))
1015 /* The initializer is defined outside of loop, hence its address must be
1016 invariant inside the loop. */
1017 gcc_assert (integer_zerop (DR_STEP (ref
)));
1019 /* If the address of the reference is invariant, initializer must access
1020 exactly the same location. */
1021 if (integer_zerop (DR_STEP (root
)))
1022 return (operand_equal_p (DR_OFFSET (ref
), DR_OFFSET (root
), 0)
1023 && operand_equal_p (DR_INIT (ref
), DR_INIT (root
), 0));
1025 /* Verify that this index of REF is equal to the root's index at
1026 -DISTANCE-th iteration. */
1027 aff_combination_dr_offset (root
, &diff
);
1028 aff_combination_dr_offset (ref
, &base
);
1029 aff_combination_scale (&base
, double_int_minus_one
);
1030 aff_combination_add (&diff
, &base
);
1032 tree_to_aff_combination_expand (DR_STEP (root
), sizetype
, &step
,
1034 if (!aff_combination_constant_multiple_p (&diff
, &step
, &off
))
1037 if (!double_int_equal_p (off
, uhwi_to_double_int (distance
)))
1043 /* Finds looparound phi node of LOOP that copies the value of REF, and if its
1044 initial value is correct (equal to initial value of REF shifted by one
1045 iteration), returns the phi node. Otherwise, NULL_TREE is returned. ROOT
1046 is the root of the current chain. */
1049 find_looparound_phi (struct loop
*loop
, dref ref
, dref root
)
1051 tree name
, phi
, init
, init_stmt
, init_ref
;
1052 edge latch
= loop_latch_edge (loop
);
1053 struct data_reference init_dr
;
1055 if (TREE_CODE (ref
->stmt
) == GIMPLE_MODIFY_STMT
)
1057 if (DR_IS_READ (ref
->ref
))
1058 name
= GIMPLE_STMT_OPERAND (ref
->stmt
, 0);
1060 name
= GIMPLE_STMT_OPERAND (ref
->stmt
, 1);
1063 name
= PHI_RESULT (ref
->stmt
);
1067 for (phi
= phi_nodes (loop
->header
); phi
; phi
= PHI_CHAIN (phi
))
1068 if (PHI_ARG_DEF_FROM_EDGE (phi
, latch
) == name
)
1074 init
= PHI_ARG_DEF_FROM_EDGE (phi
, loop_preheader_edge (loop
));
1075 if (TREE_CODE (init
) != SSA_NAME
)
1077 init_stmt
= SSA_NAME_DEF_STMT (init
);
1078 if (TREE_CODE (init_stmt
) != GIMPLE_MODIFY_STMT
)
1080 gcc_assert (GIMPLE_STMT_OPERAND (init_stmt
, 0) == init
);
1082 init_ref
= GIMPLE_STMT_OPERAND (init_stmt
, 1);
1083 if (!REFERENCE_CLASS_P (init_ref
)
1084 && !DECL_P (init_ref
))
1087 /* Analyze the behavior of INIT_REF with respect to LOOP (innermost
1088 loop enclosing PHI). */
1089 memset (&init_dr
, 0, sizeof (struct data_reference
));
1090 DR_REF (&init_dr
) = init_ref
;
1091 DR_STMT (&init_dr
) = phi
;
1092 dr_analyze_innermost (&init_dr
);
1094 if (!valid_initializer_p (&init_dr
, ref
->distance
+ 1, root
->ref
))
1100 /* Adds a reference for the looparound copy of REF in PHI to CHAIN. */
1103 insert_looparound_copy (chain_p chain
, dref ref
, tree phi
)
1105 dref nw
= XCNEW (struct dref
), aref
;
1109 nw
->distance
= ref
->distance
+ 1;
1110 nw
->always_accessed
= 1;
1112 for (i
= 0; VEC_iterate (dref
, chain
->refs
, i
, aref
); i
++)
1113 if (aref
->distance
>= nw
->distance
)
1115 VEC_safe_insert (dref
, heap
, chain
->refs
, i
, nw
);
1117 if (nw
->distance
> chain
->length
)
1119 chain
->length
= nw
->distance
;
1120 chain
->has_max_use_after
= false;
1124 /* For references in CHAIN that are copied around the LOOP (created previously
1125 by PRE, or by user), add the results of such copies to the chain. This
1126 enables us to remove the copies by unrolling, and may need less registers
1127 (also, it may allow us to combine chains together). */
1130 add_looparound_copies (struct loop
*loop
, chain_p chain
)
1133 dref ref
, root
= get_chain_root (chain
);
1136 for (i
= 0; VEC_iterate (dref
, chain
->refs
, i
, ref
); i
++)
1138 phi
= find_looparound_phi (loop
, ref
, root
);
1142 bitmap_set_bit (looparound_phis
, SSA_NAME_VERSION (PHI_RESULT (phi
)));
1143 insert_looparound_copy (chain
, ref
, phi
);
1147 /* Find roots of the values and determine distances in the component COMP.
1148 The references are redistributed into CHAINS. LOOP is the current
1152 determine_roots_comp (struct loop
*loop
,
1153 struct component
*comp
,
1154 VEC (chain_p
, heap
) **chains
)
1158 chain_p chain
= NULL
;
1160 /* Invariants are handled specially. */
1161 if (comp
->comp_step
== RS_INVARIANT
)
1163 chain
= make_invariant_chain (comp
);
1164 VEC_safe_push (chain_p
, heap
, *chains
, chain
);
1168 qsort (VEC_address (dref
, comp
->refs
), VEC_length (dref
, comp
->refs
),
1169 sizeof (dref
), order_drefs
);
1171 for (i
= 0; VEC_iterate (dref
, comp
->refs
, i
, a
); i
++)
1173 if (!chain
|| !DR_IS_READ (a
->ref
))
1175 if (nontrivial_chain_p (chain
))
1176 VEC_safe_push (chain_p
, heap
, *chains
, chain
);
1178 release_chain (chain
);
1179 chain
= make_rooted_chain (a
);
1183 add_ref_to_chain (chain
, a
);
1186 if (nontrivial_chain_p (chain
))
1188 add_looparound_copies (loop
, chain
);
1189 VEC_safe_push (chain_p
, heap
, *chains
, chain
);
1192 release_chain (chain
);
1195 /* Find roots of the values and determine distances in components COMPS, and
1196 separates the references to CHAINS. LOOP is the current loop. */
1199 determine_roots (struct loop
*loop
,
1200 struct component
*comps
, VEC (chain_p
, heap
) **chains
)
1202 struct component
*comp
;
1204 for (comp
= comps
; comp
; comp
= comp
->next
)
1205 determine_roots_comp (loop
, comp
, chains
);
1208 /* Replace the reference in statement STMT with temporary variable
1209 NEW. If SET is true, NEW is instead initialized to the value of
1210 the reference in the statement. IN_LHS is true if the reference
1211 is in the lhs of STMT, false if it is in rhs. */
1214 replace_ref_with (tree stmt
, tree
new, bool set
, bool in_lhs
)
1217 block_stmt_iterator bsi
;
1219 if (TREE_CODE (stmt
) == PHI_NODE
)
1221 gcc_assert (!in_lhs
&& !set
);
1223 val
= PHI_RESULT (stmt
);
1224 bsi
= bsi_after_labels (bb_for_stmt (stmt
));
1225 remove_phi_node (stmt
, NULL_TREE
, false);
1227 /* Turn the phi node into GIMPLE_MODIFY_STMT. */
1228 new_stmt
= build_gimple_modify_stmt_stat (val
, new);
1229 SSA_NAME_DEF_STMT (val
) = new_stmt
;
1230 bsi_insert_before (&bsi
, new_stmt
, BSI_NEW_STMT
);
1234 /* Since the reference is of gimple_reg type, it should only
1235 appear as lhs or rhs of modify statement. */
1236 gcc_assert (TREE_CODE (stmt
) == GIMPLE_MODIFY_STMT
);
1238 /* If we do not need to initialize NEW, just replace the use of OLD. */
1241 gcc_assert (!in_lhs
);
1242 GIMPLE_STMT_OPERAND (stmt
, 1) = new;
1247 bsi
= bsi_for_stmt (stmt
);
1250 val
= GIMPLE_STMT_OPERAND (stmt
, 1);
1259 (since the reference is of gimple_reg type, VAL is either gimple
1260 invariant or ssa name). */
1264 val
= GIMPLE_STMT_OPERAND (stmt
, 0);
1274 new_stmt
= build_gimple_modify_stmt_stat (new, unshare_expr (val
));
1275 bsi_insert_after (&bsi
, new_stmt
, BSI_NEW_STMT
);
1276 SSA_NAME_DEF_STMT (new) = new_stmt
;
1279 /* Returns the reference to the address of REF in the ITER-th iteration of
1280 LOOP, or NULL if we fail to determine it (ITER may be negative). We
1281 try to preserve the original shape of the reference (not rewrite it
1282 as an indirect ref to the address), to make tree_could_trap_p in
1283 prepare_initializers_chain return false more often. */
1286 ref_at_iteration (struct loop
*loop
, tree ref
, int iter
)
1288 tree idx
, *idx_p
, type
, val
, op0
= NULL_TREE
, ret
;
1292 if (handled_component_p (ref
))
1294 op0
= ref_at_iteration (loop
, TREE_OPERAND (ref
, 0), iter
);
1298 else if (!INDIRECT_REF_P (ref
))
1299 return unshare_expr (ref
);
1301 if (TREE_CODE (ref
) == INDIRECT_REF
)
1303 ret
= build1 (INDIRECT_REF
, TREE_TYPE (ref
), NULL_TREE
);
1304 idx
= TREE_OPERAND (ref
, 0);
1305 idx_p
= &TREE_OPERAND (ret
, 0);
1307 else if (TREE_CODE (ref
) == COMPONENT_REF
)
1309 /* Check that the offset is loop invariant. */
1310 if (TREE_OPERAND (ref
, 2)
1311 && !expr_invariant_in_loop_p (loop
, TREE_OPERAND (ref
, 2)))
1314 return build3 (COMPONENT_REF
, TREE_TYPE (ref
), op0
,
1315 unshare_expr (TREE_OPERAND (ref
, 1)),
1316 unshare_expr (TREE_OPERAND (ref
, 2)));
1318 else if (TREE_CODE (ref
) == ARRAY_REF
)
1320 /* Check that the lower bound and the step are loop invariant. */
1321 if (TREE_OPERAND (ref
, 2)
1322 && !expr_invariant_in_loop_p (loop
, TREE_OPERAND (ref
, 2)))
1324 if (TREE_OPERAND (ref
, 3)
1325 && !expr_invariant_in_loop_p (loop
, TREE_OPERAND (ref
, 3)))
1328 ret
= build4 (ARRAY_REF
, TREE_TYPE (ref
), op0
, NULL_TREE
,
1329 unshare_expr (TREE_OPERAND (ref
, 2)),
1330 unshare_expr (TREE_OPERAND (ref
, 3)));
1331 idx
= TREE_OPERAND (ref
, 1);
1332 idx_p
= &TREE_OPERAND (ret
, 1);
1337 ok
= simple_iv (loop
, first_stmt (loop
->header
), idx
, &iv
, true);
1340 iv
.base
= expand_simple_operations (iv
.base
);
1341 if (integer_zerop (iv
.step
))
1342 *idx_p
= unshare_expr (iv
.base
);
1345 type
= TREE_TYPE (iv
.base
);
1346 val
= fold_build2 (MULT_EXPR
, type
, iv
.step
,
1347 build_int_cst_type (type
, iter
));
1348 val
= fold_build2 (PLUS_EXPR
, type
, iv
.base
, val
);
1349 *idx_p
= unshare_expr (val
);
1355 /* Get the initialization expression for the INDEX-th temporary variable
1359 get_init_expr (chain_p chain
, unsigned index
)
1361 if (chain
->type
== CT_COMBINATION
)
1363 tree e1
= get_init_expr (chain
->ch1
, index
);
1364 tree e2
= get_init_expr (chain
->ch2
, index
);
1366 return fold_build2 (chain
->operator, chain
->rslt_type
, e1
, e2
);
1369 return VEC_index (tree
, chain
->inits
, index
);
1372 /* Marks all virtual operands of statement STMT for renaming. */
1375 mark_virtual_ops_for_renaming (tree stmt
)
1380 if (TREE_CODE (stmt
) == PHI_NODE
)
1385 FOR_EACH_SSA_TREE_OPERAND (var
, stmt
, iter
, SSA_OP_ALL_VIRTUALS
)
1387 if (TREE_CODE (var
) == SSA_NAME
)
1388 var
= SSA_NAME_VAR (var
);
1389 mark_sym_for_renaming (var
);
1393 /* Calls mark_virtual_ops_for_renaming for all members of LIST. */
1396 mark_virtual_ops_for_renaming_list (tree list
)
1398 tree_stmt_iterator tsi
;
1400 for (tsi
= tsi_start (list
); !tsi_end_p (tsi
); tsi_next (&tsi
))
1401 mark_virtual_ops_for_renaming (tsi_stmt (tsi
));
1404 /* Creates the variables for CHAIN, as well as phi nodes for them and
1405 initialization on entry to LOOP. Uids of the newly created
1406 temporary variables are marked in TMP_VARS. */
1409 initialize_root_vars (struct loop
*loop
, chain_p chain
, bitmap tmp_vars
)
1412 unsigned n
= chain
->length
;
1413 dref root
= get_chain_root (chain
);
1414 bool reuse_first
= !chain
->has_max_use_after
;
1415 tree ref
, init
, var
, next
, stmts
;
1417 edge entry
= loop_preheader_edge (loop
), latch
= loop_latch_edge (loop
);
1419 /* If N == 0, then all the references are within the single iteration. And
1420 since this is an nonempty chain, reuse_first cannot be true. */
1421 gcc_assert (n
> 0 || !reuse_first
);
1423 chain
->vars
= VEC_alloc (tree
, heap
, n
+ 1);
1425 if (chain
->type
== CT_COMBINATION
)
1426 ref
= GIMPLE_STMT_OPERAND (root
->stmt
, 0);
1428 ref
= DR_REF (root
->ref
);
1430 for (i
= 0; i
< n
+ (reuse_first
? 0 : 1); i
++)
1432 var
= create_tmp_var (TREE_TYPE (ref
), get_lsm_tmp_name (ref
, i
));
1433 add_referenced_var (var
);
1434 bitmap_set_bit (tmp_vars
, DECL_UID (var
));
1435 VEC_quick_push (tree
, chain
->vars
, var
);
1438 VEC_quick_push (tree
, chain
->vars
, VEC_index (tree
, chain
->vars
, 0));
1440 for (i
= 0; VEC_iterate (tree
, chain
->vars
, i
, var
); i
++)
1441 VEC_replace (tree
, chain
->vars
, i
, make_ssa_name (var
, NULL_TREE
));
1443 for (i
= 0; i
< n
; i
++)
1445 var
= VEC_index (tree
, chain
->vars
, i
);
1446 next
= VEC_index (tree
, chain
->vars
, i
+ 1);
1447 init
= get_init_expr (chain
, i
);
1449 init
= force_gimple_operand (init
, &stmts
, true, NULL_TREE
);
1452 mark_virtual_ops_for_renaming_list (stmts
);
1453 bsi_insert_on_edge_immediate (entry
, stmts
);
1456 phi
= create_phi_node (var
, loop
->header
);
1457 SSA_NAME_DEF_STMT (var
) = phi
;
1458 add_phi_arg (phi
, init
, entry
);
1459 add_phi_arg (phi
, next
, latch
);
1463 /* Create the variables and initialization statement for root of chain
1464 CHAIN. Uids of the newly created temporary variables are marked
1468 initialize_root (struct loop
*loop
, chain_p chain
, bitmap tmp_vars
)
1470 dref root
= get_chain_root (chain
);
1471 bool in_lhs
= (chain
->type
== CT_STORE_LOAD
1472 || chain
->type
== CT_COMBINATION
);
1474 initialize_root_vars (loop
, chain
, tmp_vars
);
1475 replace_ref_with (root
->stmt
,
1476 VEC_index (tree
, chain
->vars
, chain
->length
),
1480 /* Initializes a variable for load motion for ROOT and prepares phi nodes and
1481 initialization on entry to LOOP if necessary. The ssa name for the variable
1482 is stored in VARS. If WRITTEN is true, also a phi node to copy its value
1483 around the loop is created. Uid of the newly created temporary variable
1484 is marked in TMP_VARS. INITS is the list containing the (single)
1488 initialize_root_vars_lm (struct loop
*loop
, dref root
, bool written
,
1489 VEC(tree
, heap
) **vars
, VEC(tree
, heap
) *inits
,
1493 tree ref
= DR_REF (root
->ref
), init
, var
, next
, stmts
;
1495 edge entry
= loop_preheader_edge (loop
), latch
= loop_latch_edge (loop
);
1497 /* Find the initializer for the variable, and check that it cannot
1499 init
= VEC_index (tree
, inits
, 0);
1501 *vars
= VEC_alloc (tree
, heap
, written
? 2 : 1);
1502 var
= create_tmp_var (TREE_TYPE (ref
), get_lsm_tmp_name (ref
, 0));
1503 add_referenced_var (var
);
1504 bitmap_set_bit (tmp_vars
, DECL_UID (var
));
1505 VEC_quick_push (tree
, *vars
, var
);
1507 VEC_quick_push (tree
, *vars
, VEC_index (tree
, *vars
, 0));
1509 for (i
= 0; VEC_iterate (tree
, *vars
, i
, var
); i
++)
1510 VEC_replace (tree
, *vars
, i
, make_ssa_name (var
, NULL_TREE
));
1512 var
= VEC_index (tree
, *vars
, 0);
1514 init
= force_gimple_operand (init
, &stmts
, written
, NULL_TREE
);
1517 mark_virtual_ops_for_renaming_list (stmts
);
1518 bsi_insert_on_edge_immediate (entry
, stmts
);
1523 next
= VEC_index (tree
, *vars
, 1);
1524 phi
= create_phi_node (var
, loop
->header
);
1525 SSA_NAME_DEF_STMT (var
) = phi
;
1526 add_phi_arg (phi
, init
, entry
);
1527 add_phi_arg (phi
, next
, latch
);
1531 init
= build_gimple_modify_stmt_stat (var
, init
);
1532 SSA_NAME_DEF_STMT (var
) = init
;
1533 mark_virtual_ops_for_renaming (init
);
1534 bsi_insert_on_edge_immediate (entry
, init
);
1539 /* Execute load motion for references in chain CHAIN. Uids of the newly
1540 created temporary variables are marked in TMP_VARS. */
1543 execute_load_motion (struct loop
*loop
, chain_p chain
, bitmap tmp_vars
)
1545 VEC (tree
, heap
) *vars
;
1547 unsigned n_writes
= 0, ridx
, i
;
1550 gcc_assert (chain
->type
== CT_INVARIANT
);
1551 gcc_assert (!chain
->combined
);
1552 for (i
= 0; VEC_iterate (dref
, chain
->refs
, i
, a
); i
++)
1553 if (!DR_IS_READ (a
->ref
))
1556 /* If there are no reads in the loop, there is nothing to do. */
1557 if (n_writes
== VEC_length (dref
, chain
->refs
))
1560 initialize_root_vars_lm (loop
, get_chain_root (chain
), n_writes
> 0,
1561 &vars
, chain
->inits
, tmp_vars
);
1564 for (i
= 0; VEC_iterate (dref
, chain
->refs
, i
, a
); i
++)
1566 bool is_read
= DR_IS_READ (a
->ref
);
1567 mark_virtual_ops_for_renaming (a
->stmt
);
1569 if (!DR_IS_READ (a
->ref
))
1574 var
= VEC_index (tree
, vars
, 0);
1575 var
= make_ssa_name (SSA_NAME_VAR (var
), NULL_TREE
);
1576 VEC_replace (tree
, vars
, 0, var
);
1582 replace_ref_with (a
->stmt
, VEC_index (tree
, vars
, ridx
),
1583 !is_read
, !is_read
);
1586 VEC_free (tree
, heap
, vars
);
1589 /* Returns the single statement in that NAME is used, excepting
1590 the looparound phi nodes contained in one of the chains. If there is no
1591 such statement, or more statements, NULL_TREE is returned. */
1594 single_nonlooparound_use (tree name
)
1597 imm_use_iterator it
;
1598 tree stmt
, ret
= NULL_TREE
;
1600 FOR_EACH_IMM_USE_FAST (use
, it
, name
)
1602 stmt
= USE_STMT (use
);
1604 if (TREE_CODE (stmt
) == PHI_NODE
)
1606 /* Ignore uses in looparound phi nodes. Uses in other phi nodes
1607 could not be processed anyway, so just fail for them. */
1608 if (bitmap_bit_p (looparound_phis
,
1609 SSA_NAME_VERSION (PHI_RESULT (stmt
))))
1614 else if (ret
!= NULL_TREE
)
1623 /* Remove statement STMT, as well as the chain of assignments in that it is
1627 remove_stmt (tree stmt
)
1631 if (TREE_CODE (stmt
) == PHI_NODE
)
1633 name
= PHI_RESULT (stmt
);
1634 next
= single_nonlooparound_use (name
);
1635 remove_phi_node (stmt
, NULL_TREE
, true);
1638 || TREE_CODE (next
) != GIMPLE_MODIFY_STMT
1639 || GIMPLE_STMT_OPERAND (next
, 1) != name
)
1647 block_stmt_iterator bsi
;
1649 bsi
= bsi_for_stmt (stmt
);
1651 name
= GIMPLE_STMT_OPERAND (stmt
, 0);
1652 gcc_assert (TREE_CODE (name
) == SSA_NAME
);
1654 next
= single_nonlooparound_use (name
);
1656 mark_virtual_ops_for_renaming (stmt
);
1657 bsi_remove (&bsi
, true);
1660 || TREE_CODE (next
) != GIMPLE_MODIFY_STMT
1661 || GIMPLE_STMT_OPERAND (next
, 1) != name
)
1668 /* Perform the predictive commoning optimization for a chain CHAIN.
1669 Uids of the newly created temporary variables are marked in TMP_VARS.*/
1672 execute_pred_commoning_chain (struct loop
*loop
, chain_p chain
,
1679 if (chain
->combined
)
1681 /* For combined chains, just remove the statements that are used to
1682 compute the values of the expression (except for the root one). */
1683 for (i
= 1; VEC_iterate (dref
, chain
->refs
, i
, a
); i
++)
1684 remove_stmt (a
->stmt
);
1688 /* For non-combined chains, set up the variables that hold its value,
1689 and replace the uses of the original references by these
1691 root
= get_chain_root (chain
);
1692 mark_virtual_ops_for_renaming (root
->stmt
);
1694 initialize_root (loop
, chain
, tmp_vars
);
1695 for (i
= 1; VEC_iterate (dref
, chain
->refs
, i
, a
); i
++)
1697 mark_virtual_ops_for_renaming (a
->stmt
);
1698 var
= VEC_index (tree
, chain
->vars
, chain
->length
- a
->distance
);
1699 replace_ref_with (a
->stmt
, var
, false, false);
1704 /* Determines the unroll factor necessary to remove as many temporary variable
1705 copies as possible. CHAINS is the list of chains that will be
1709 determine_unroll_factor (VEC (chain_p
, heap
) *chains
)
1712 unsigned factor
= 1, af
, nfactor
, i
;
1713 unsigned max
= PARAM_VALUE (PARAM_MAX_UNROLL_TIMES
);
1715 for (i
= 0; VEC_iterate (chain_p
, chains
, i
, chain
); i
++)
1717 if (chain
->type
== CT_INVARIANT
|| chain
->combined
)
1720 /* The best unroll factor for this chain is equal to the number of
1721 temporary variables that we create for it. */
1723 if (chain
->has_max_use_after
)
1726 nfactor
= factor
* af
/ gcd (factor
, af
);
1734 /* Perform the predictive commoning optimization for CHAINS.
1735 Uids of the newly created temporary variables are marked in TMP_VARS. */
1738 execute_pred_commoning (struct loop
*loop
, VEC (chain_p
, heap
) *chains
,
1744 for (i
= 0; VEC_iterate (chain_p
, chains
, i
, chain
); i
++)
1746 if (chain
->type
== CT_INVARIANT
)
1747 execute_load_motion (loop
, chain
, tmp_vars
);
1749 execute_pred_commoning_chain (loop
, chain
, tmp_vars
);
1752 update_ssa (TODO_update_ssa_only_virtuals
);
1755 /* For each reference in CHAINS, if its defining statement is
1756 ssa name, set it to phi node that defines it. */
1759 replace_phis_by_defined_names (VEC (chain_p
, heap
) *chains
)
1765 for (i
= 0; VEC_iterate (chain_p
, chains
, i
, chain
); i
++)
1766 for (j
= 0; VEC_iterate (dref
, chain
->refs
, j
, a
); j
++)
1768 gcc_assert (TREE_CODE (a
->stmt
) != SSA_NAME
);
1769 if (TREE_CODE (a
->stmt
) == PHI_NODE
)
1770 a
->stmt
= PHI_RESULT (a
->stmt
);
1774 /* For each reference in CHAINS, if its defining statement is
1775 phi node, set it to the ssa name that is defined by it. */
1778 replace_names_by_phis (VEC (chain_p
, heap
) *chains
)
1784 for (i
= 0; VEC_iterate (chain_p
, chains
, i
, chain
); i
++)
1785 for (j
= 0; VEC_iterate (dref
, chain
->refs
, j
, a
); j
++)
1786 if (TREE_CODE (a
->stmt
) == SSA_NAME
)
1788 a
->stmt
= SSA_NAME_DEF_STMT (a
->stmt
);
1789 gcc_assert (TREE_CODE (a
->stmt
) == PHI_NODE
);
1793 /* Wrapper over execute_pred_commoning, to pass it as a callback
1794 to tree_transform_and_unroll_loop. */
1798 VEC (chain_p
, heap
) *chains
;
1803 execute_pred_commoning_cbck (struct loop
*loop
, void *data
)
1805 struct epcc_data
*dta
= data
;
1807 /* Restore phi nodes that were replaced by ssa names before
1808 tree_transform_and_unroll_loop (see detailed description in
1809 tree_predictive_commoning_loop). */
1810 replace_names_by_phis (dta
->chains
);
1811 execute_pred_commoning (loop
, dta
->chains
, dta
->tmp_vars
);
1814 /* Returns true if we can and should unroll LOOP FACTOR times. Number
1815 of iterations of the loop is returned in NITER. */
1818 should_unroll_loop_p (struct loop
*loop
, unsigned factor
,
1819 struct tree_niter_desc
*niter
)
1826 /* Check whether unrolling is possible. We only want to unroll loops
1827 for that we are able to determine number of iterations. We also
1828 want to split the extra iterations of the loop from its end,
1829 therefore we require that the loop has precisely one
1832 exit
= single_dom_exit (loop
);
1836 if (!number_of_iterations_exit (loop
, exit
, niter
, false))
1839 /* And of course, we must be able to duplicate the loop. */
1840 if (!can_duplicate_loop_p (loop
))
1843 /* The final loop should be small enough. */
1844 if (tree_num_loop_insns (loop
, &eni_size_weights
) * factor
1845 > (unsigned) PARAM_VALUE (PARAM_MAX_UNROLLED_INSNS
))
1851 /* Base NAME and all the names in the chain of phi nodes that use it
1852 on variable VAR. The phi nodes are recognized by being in the copies of
1853 the header of the LOOP. */
1856 base_names_in_chain_on (struct loop
*loop
, tree name
, tree var
)
1859 imm_use_iterator iter
;
1862 SSA_NAME_VAR (name
) = var
;
1867 FOR_EACH_IMM_USE_STMT (stmt
, iter
, name
)
1869 if (TREE_CODE (stmt
) == PHI_NODE
1870 && flow_bb_inside_loop_p (loop
, bb_for_stmt (stmt
)))
1873 BREAK_FROM_IMM_USE_STMT (iter
);
1879 if (bb_for_stmt (phi
) == loop
->header
)
1880 e
= loop_latch_edge (loop
);
1882 e
= single_pred_edge (bb_for_stmt (stmt
));
1884 name
= PHI_RESULT (phi
);
1885 SSA_NAME_VAR (name
) = var
;
1889 /* Given an unrolled LOOP after predictive commoning, remove the
1890 register copies arising from phi nodes by changing the base
1891 variables of SSA names. TMP_VARS is the set of the temporary variables
1892 for those we want to perform this. */
1895 eliminate_temp_copies (struct loop
*loop
, bitmap tmp_vars
)
1898 tree phi
, name
, use
, var
, stmt
;
1900 e
= loop_latch_edge (loop
);
1901 for (phi
= phi_nodes (loop
->header
); phi
; phi
= PHI_CHAIN (phi
))
1903 name
= PHI_RESULT (phi
);
1904 var
= SSA_NAME_VAR (name
);
1905 if (!bitmap_bit_p (tmp_vars
, DECL_UID (var
)))
1907 use
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
1908 gcc_assert (TREE_CODE (use
) == SSA_NAME
);
1910 /* Base all the ssa names in the ud and du chain of NAME on VAR. */
1911 stmt
= SSA_NAME_DEF_STMT (use
);
1912 while (TREE_CODE (stmt
) == PHI_NODE
)
1914 gcc_assert (single_pred_p (bb_for_stmt (stmt
)));
1915 use
= PHI_ARG_DEF (stmt
, 0);
1916 stmt
= SSA_NAME_DEF_STMT (use
);
1919 base_names_in_chain_on (loop
, use
, var
);
1923 /* Returns true if CHAIN is suitable to be combined. */
1926 chain_can_be_combined_p (chain_p chain
)
1928 return (!chain
->combined
1929 && (chain
->type
== CT_LOAD
|| chain
->type
== CT_COMBINATION
));
1932 /* Returns the modify statement that uses NAME. Skips over assignment
1933 statements, NAME is replaced with the actual name used in the returned
1937 find_use_stmt (tree
*name
)
1939 tree stmt
, rhs
, lhs
;
1941 /* Skip over assignments. */
1944 stmt
= single_nonlooparound_use (*name
);
1948 if (TREE_CODE (stmt
) != GIMPLE_MODIFY_STMT
)
1951 lhs
= GIMPLE_STMT_OPERAND (stmt
, 0);
1952 if (TREE_CODE (lhs
) != SSA_NAME
)
1955 rhs
= GIMPLE_STMT_OPERAND (stmt
, 1);
1963 || REFERENCE_CLASS_P (rhs
)
1964 || TREE_CODE_LENGTH (TREE_CODE (rhs
)) != 2)
1970 /* Returns true if we may perform reassociation for operation CODE in TYPE. */
1973 may_reassociate_p (tree type
, enum tree_code code
)
1975 if (FLOAT_TYPE_P (type
)
1976 && !flag_unsafe_math_optimizations
)
1979 return (commutative_tree_code (code
)
1980 && associative_tree_code (code
));
1983 /* If the operation used in STMT is associative and commutative, go through the
1984 tree of the same operations and returns its root. Distance to the root
1985 is stored in DISTANCE. */
1988 find_associative_operation_root (tree stmt
, unsigned *distance
)
1990 tree rhs
= GIMPLE_STMT_OPERAND (stmt
, 1), lhs
, next
;
1991 enum tree_code code
= TREE_CODE (rhs
);
1994 if (!may_reassociate_p (TREE_TYPE (rhs
), code
))
1999 lhs
= GIMPLE_STMT_OPERAND (stmt
, 0);
2000 gcc_assert (TREE_CODE (lhs
) == SSA_NAME
);
2002 next
= find_use_stmt (&lhs
);
2006 rhs
= GIMPLE_STMT_OPERAND (next
, 1);
2007 if (TREE_CODE (rhs
) != code
)
2019 /* Returns the common statement in that NAME1 and NAME2 have a use. If there
2020 is no such statement, returns NULL_TREE. In case the operation used on
2021 NAME1 and NAME2 is associative and commutative, returns the root of the
2022 tree formed by this operation instead of the statement that uses NAME1 or
2026 find_common_use_stmt (tree
*name1
, tree
*name2
)
2030 stmt1
= find_use_stmt (name1
);
2034 stmt2
= find_use_stmt (name2
);
2041 stmt1
= find_associative_operation_root (stmt1
, NULL
);
2044 stmt2
= find_associative_operation_root (stmt2
, NULL
);
2048 return (stmt1
== stmt2
? stmt1
: NULL_TREE
);
2051 /* Checks whether R1 and R2 are combined together using CODE, with the result
2052 in RSLT_TYPE, in order R1 CODE R2 if SWAP is false and in order R2 CODE R1
2053 if it is true. If CODE is ERROR_MARK, set these values instead. */
2056 combinable_refs_p (dref r1
, dref r2
,
2057 enum tree_code
*code
, bool *swap
, tree
*rslt_type
)
2059 enum tree_code acode
;
2062 tree name1
, name2
, stmt
, rhs
;
2064 name1
= name_for_ref (r1
);
2065 name2
= name_for_ref (r2
);
2066 gcc_assert (name1
!= NULL_TREE
&& name2
!= NULL_TREE
);
2068 stmt
= find_common_use_stmt (&name1
, &name2
);
2073 rhs
= GIMPLE_STMT_OPERAND (stmt
, 1);
2074 acode
= TREE_CODE (rhs
);
2075 aswap
= (!commutative_tree_code (acode
)
2076 && TREE_OPERAND (rhs
, 0) != name1
);
2077 atype
= TREE_TYPE (rhs
);
2079 if (*code
== ERROR_MARK
)
2087 return (*code
== acode
2089 && *rslt_type
== atype
);
2092 /* Remove OP from the operation on rhs of STMT, and replace STMT with
2093 an assignment of the remaining operand. */
2096 remove_name_from_operation (tree stmt
, tree op
)
2100 gcc_assert (TREE_CODE (stmt
) == GIMPLE_MODIFY_STMT
);
2102 rhs
= &GIMPLE_STMT_OPERAND (stmt
, 1);
2103 if (TREE_OPERAND (*rhs
, 0) == op
)
2104 *rhs
= TREE_OPERAND (*rhs
, 1);
2105 else if (TREE_OPERAND (*rhs
, 1) == op
)
2106 *rhs
= TREE_OPERAND (*rhs
, 0);
2112 /* Reassociates the expression in that NAME1 and NAME2 are used so that they
2113 are combined in a single statement, and returns this statement. */
2116 reassociate_to_the_same_stmt (tree name1
, tree name2
)
2118 tree stmt1
, stmt2
, root1
, root2
, r1
, r2
, s1
, s2
;
2119 tree new_stmt
, tmp_stmt
, new_name
, tmp_name
, var
;
2120 unsigned dist1
, dist2
;
2121 enum tree_code code
;
2122 tree type
= TREE_TYPE (name1
);
2123 block_stmt_iterator bsi
;
2125 stmt1
= find_use_stmt (&name1
);
2126 stmt2
= find_use_stmt (&name2
);
2127 root1
= find_associative_operation_root (stmt1
, &dist1
);
2128 root2
= find_associative_operation_root (stmt2
, &dist2
);
2129 code
= TREE_CODE (GIMPLE_STMT_OPERAND (stmt1
, 1));
2131 gcc_assert (root1
&& root2
&& root1
== root2
2132 && code
== TREE_CODE (GIMPLE_STMT_OPERAND (stmt2
, 1)));
2134 /* Find the root of the nearest expression in that both NAME1 and NAME2
2141 while (dist1
> dist2
)
2143 s1
= find_use_stmt (&r1
);
2144 r1
= GIMPLE_STMT_OPERAND (s1
, 0);
2147 while (dist2
> dist1
)
2149 s2
= find_use_stmt (&r2
);
2150 r2
= GIMPLE_STMT_OPERAND (s2
, 0);
2156 s1
= find_use_stmt (&r1
);
2157 r1
= GIMPLE_STMT_OPERAND (s1
, 0);
2158 s2
= find_use_stmt (&r2
);
2159 r2
= GIMPLE_STMT_OPERAND (s2
, 0);
2162 /* Remove NAME1 and NAME2 from the statements in that they are used
2164 remove_name_from_operation (stmt1
, name1
);
2165 remove_name_from_operation (stmt2
, name2
);
2167 /* Insert the new statement combining NAME1 and NAME2 before S1, and
2168 combine it with the rhs of S1. */
2169 var
= create_tmp_var (type
, "predreastmp");
2170 add_referenced_var (var
);
2171 new_name
= make_ssa_name (var
, NULL_TREE
);
2172 new_stmt
= build_gimple_modify_stmt_stat (new_name
,
2173 fold_build2 (code
, type
, name1
, name2
));
2174 SSA_NAME_DEF_STMT (new_name
) = new_stmt
;
2176 var
= create_tmp_var (type
, "predreastmp");
2177 add_referenced_var (var
);
2178 tmp_name
= make_ssa_name (var
, NULL_TREE
);
2179 tmp_stmt
= build_gimple_modify_stmt_stat (tmp_name
,
2180 GIMPLE_STMT_OPERAND (s1
, 1));
2181 SSA_NAME_DEF_STMT (tmp_name
) = tmp_stmt
;
2183 GIMPLE_STMT_OPERAND (s1
, 1) = fold_build2 (code
, type
, new_name
, tmp_name
);
2186 bsi
= bsi_for_stmt (s1
);
2187 bsi_insert_before (&bsi
, new_stmt
, BSI_SAME_STMT
);
2188 bsi_insert_before (&bsi
, tmp_stmt
, BSI_SAME_STMT
);
2193 /* Returns the statement that combines references R1 and R2. In case R1
2194 and R2 are not used in the same statement, but they are used with an
2195 associative and commutative operation in the same expression, reassociate
2196 the expression so that they are used in the same statement. */
2199 stmt_combining_refs (dref r1
, dref r2
)
2202 tree name1
= name_for_ref (r1
);
2203 tree name2
= name_for_ref (r2
);
2205 stmt1
= find_use_stmt (&name1
);
2206 stmt2
= find_use_stmt (&name2
);
2210 return reassociate_to_the_same_stmt (name1
, name2
);
2213 /* Tries to combine chains CH1 and CH2 together. If this succeeds, the
2214 description of the new chain is returned, otherwise we return NULL. */
2217 combine_chains (chain_p ch1
, chain_p ch2
)
2220 enum tree_code op
= ERROR_MARK
;
2225 tree rslt_type
= NULL_TREE
;
2229 if (ch1
->length
!= ch2
->length
)
2232 if (VEC_length (dref
, ch1
->refs
) != VEC_length (dref
, ch2
->refs
))
2235 for (i
= 0; (VEC_iterate (dref
, ch1
->refs
, i
, r1
)
2236 && VEC_iterate (dref
, ch2
->refs
, i
, r2
)); i
++)
2238 if (r1
->distance
!= r2
->distance
)
2241 if (!combinable_refs_p (r1
, r2
, &op
, &swap
, &rslt_type
))
2252 new_chain
= XCNEW (struct chain
);
2253 new_chain
->type
= CT_COMBINATION
;
2254 new_chain
->operator = op
;
2255 new_chain
->ch1
= ch1
;
2256 new_chain
->ch2
= ch2
;
2257 new_chain
->rslt_type
= rslt_type
;
2258 new_chain
->length
= ch1
->length
;
2260 for (i
= 0; (VEC_iterate (dref
, ch1
->refs
, i
, r1
)
2261 && VEC_iterate (dref
, ch2
->refs
, i
, r2
)); i
++)
2263 nw
= XCNEW (struct dref
);
2264 nw
->stmt
= stmt_combining_refs (r1
, r2
);
2265 nw
->distance
= r1
->distance
;
2267 VEC_safe_push (dref
, heap
, new_chain
->refs
, nw
);
2270 new_chain
->has_max_use_after
= false;
2271 root_stmt
= get_chain_root (new_chain
)->stmt
;
2272 for (i
= 1; VEC_iterate (dref
, new_chain
->refs
, i
, nw
); i
++)
2274 if (nw
->distance
== new_chain
->length
2275 && !stmt_dominates_stmt_p (nw
->stmt
, root_stmt
))
2277 new_chain
->has_max_use_after
= true;
2282 ch1
->combined
= true;
2283 ch2
->combined
= true;
2287 /* Try to combine the CHAINS. */
2290 try_combine_chains (VEC (chain_p
, heap
) **chains
)
2293 chain_p ch1
, ch2
, cch
;
2294 VEC (chain_p
, heap
) *worklist
= NULL
;
2296 for (i
= 0; VEC_iterate (chain_p
, *chains
, i
, ch1
); i
++)
2297 if (chain_can_be_combined_p (ch1
))
2298 VEC_safe_push (chain_p
, heap
, worklist
, ch1
);
2300 while (!VEC_empty (chain_p
, worklist
))
2302 ch1
= VEC_pop (chain_p
, worklist
);
2303 if (!chain_can_be_combined_p (ch1
))
2306 for (j
= 0; VEC_iterate (chain_p
, *chains
, j
, ch2
); j
++)
2308 if (!chain_can_be_combined_p (ch2
))
2311 cch
= combine_chains (ch1
, ch2
);
2314 VEC_safe_push (chain_p
, heap
, worklist
, cch
);
2315 VEC_safe_push (chain_p
, heap
, *chains
, cch
);
2322 /* Sets alias information based on data reference DR for REF,
2326 set_alias_info (tree ref
, struct data_reference
*dr
)
2329 tree tag
= DR_SYMBOL_TAG (dr
);
2331 gcc_assert (tag
!= NULL_TREE
);
2333 ref
= get_base_address (ref
);
2334 if (!ref
|| !INDIRECT_REF_P (ref
))
2337 var
= SSA_NAME_VAR (TREE_OPERAND (ref
, 0));
2338 if (var_ann (var
)->symbol_mem_tag
)
2342 new_type_alias (var
, tag
, ref
);
2344 var_ann (var
)->symbol_mem_tag
= tag
;
2346 var_ann (var
)->subvars
= DR_SUBVARS (dr
);
2349 /* Prepare initializers for CHAIN in LOOP. Returns false if this is
2350 impossible because one of these initializers may trap, true otherwise. */
2353 prepare_initializers_chain (struct loop
*loop
, chain_p chain
)
2355 unsigned i
, n
= (chain
->type
== CT_INVARIANT
) ? 1 : chain
->length
;
2356 struct data_reference
*dr
= get_chain_root (chain
)->ref
;
2359 edge entry
= loop_preheader_edge (loop
);
2361 /* Find the initializers for the variables, and check that they cannot
2363 chain
->inits
= VEC_alloc (tree
, heap
, n
);
2364 for (i
= 0; i
< n
; i
++)
2365 VEC_quick_push (tree
, chain
->inits
, NULL_TREE
);
2367 /* If we have replaced some looparound phi nodes, use their initializers
2368 instead of creating our own. */
2369 for (i
= 0; VEC_iterate (dref
, chain
->refs
, i
, laref
); i
++)
2371 if (TREE_CODE (laref
->stmt
) != PHI_NODE
)
2374 gcc_assert (laref
->distance
> 0);
2375 VEC_replace (tree
, chain
->inits
, n
- laref
->distance
,
2376 PHI_ARG_DEF_FROM_EDGE (laref
->stmt
, entry
));
2379 for (i
= 0; i
< n
; i
++)
2381 if (VEC_index (tree
, chain
->inits
, i
) != NULL_TREE
)
2384 init
= ref_at_iteration (loop
, DR_REF (dr
), (int) i
- n
);
2388 if (!chain
->all_always_accessed
&& tree_could_trap_p (init
))
2391 init
= force_gimple_operand (init
, &stmts
, false, NULL_TREE
);
2394 mark_virtual_ops_for_renaming_list (stmts
);
2395 bsi_insert_on_edge_immediate (entry
, stmts
);
2397 set_alias_info (init
, dr
);
2399 VEC_replace (tree
, chain
->inits
, i
, init
);
2405 /* Prepare initializers for CHAINS in LOOP, and free chains that cannot
2406 be used because the initializers might trap. */
2409 prepare_initializers (struct loop
*loop
, VEC (chain_p
, heap
) *chains
)
2414 for (i
= 0; i
< VEC_length (chain_p
, chains
); )
2416 chain
= VEC_index (chain_p
, chains
, i
);
2417 if (prepare_initializers_chain (loop
, chain
))
2421 release_chain (chain
);
2422 VEC_unordered_remove (chain_p
, chains
, i
);
2427 /* Performs predictive commoning for LOOP. Returns true if LOOP was
2431 tree_predictive_commoning_loop (struct loop
*loop
)
2433 VEC (data_reference_p
, heap
) *datarefs
;
2434 VEC (ddr_p
, heap
) *dependences
;
2435 struct component
*components
;
2436 VEC (chain_p
, heap
) *chains
= NULL
;
2437 unsigned unroll_factor
;
2438 struct tree_niter_desc desc
;
2439 bool unroll
= false;
2443 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2444 fprintf (dump_file
, "Processing loop %d\n", loop
->num
);
2446 /* Find the data references and split them into components according to their
2447 dependence relations. */
2448 datarefs
= VEC_alloc (data_reference_p
, heap
, 10);
2449 dependences
= VEC_alloc (ddr_p
, heap
, 10);
2450 compute_data_dependences_for_loop (loop
, true, &datarefs
, &dependences
);
2451 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2452 dump_data_dependence_relations (dump_file
, dependences
);
2454 components
= split_data_refs_to_components (loop
, datarefs
, dependences
);
2455 free_dependence_relations (dependences
);
2458 free_data_refs (datarefs
);
2462 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2464 fprintf (dump_file
, "Initial state:\n\n");
2465 dump_components (dump_file
, components
);
2468 /* Find the suitable components and split them into chains. */
2469 components
= filter_suitable_components (loop
, components
);
2471 tmp_vars
= BITMAP_ALLOC (NULL
);
2472 looparound_phis
= BITMAP_ALLOC (NULL
);
2473 determine_roots (loop
, components
, &chains
);
2474 release_components (components
);
2478 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2480 "Predictive commoning failed: no suitable chains\n");
2483 prepare_initializers (loop
, chains
);
2485 /* Try to combine the chains that are always worked with together. */
2486 try_combine_chains (&chains
);
2488 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2490 fprintf (dump_file
, "Before commoning:\n\n");
2491 dump_chains (dump_file
, chains
);
2494 /* Determine the unroll factor, and if the loop should be unrolled, ensure
2495 that its number of iterations is divisible by the factor. */
2496 unroll_factor
= determine_unroll_factor (chains
);
2498 unroll
= should_unroll_loop_p (loop
, unroll_factor
, &desc
);
2499 exit
= single_dom_exit (loop
);
2501 /* Execute the predictive commoning transformations, and possibly unroll the
2505 struct epcc_data dta
;
2507 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2508 fprintf (dump_file
, "Unrolling %u times.\n", unroll_factor
);
2510 dta
.chains
= chains
;
2511 dta
.tmp_vars
= tmp_vars
;
2513 update_ssa (TODO_update_ssa_only_virtuals
);
2515 /* Cfg manipulations performed in tree_transform_and_unroll_loop before
2516 execute_pred_commoning_cbck is called may cause phi nodes to be
2517 reallocated, which is a problem since CHAINS may point to these
2518 statements. To fix this, we store the ssa names defined by the
2519 phi nodes here instead of the phi nodes themselves, and restore
2520 the phi nodes in execute_pred_commoning_cbck. A bit hacky. */
2521 replace_phis_by_defined_names (chains
);
2523 tree_transform_and_unroll_loop (loop
, unroll_factor
, exit
, &desc
,
2524 execute_pred_commoning_cbck
, &dta
);
2525 eliminate_temp_copies (loop
, tmp_vars
);
2529 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2531 "Executing predictive commoning without unrolling.\n");
2532 execute_pred_commoning (loop
, chains
, tmp_vars
);
2536 release_chains (chains
);
2537 free_data_refs (datarefs
);
2538 BITMAP_FREE (tmp_vars
);
2539 BITMAP_FREE (looparound_phis
);
2541 free_affine_expand_cache (&name_expansions
);
2546 /* Runs predictive commoning. */
2549 tree_predictive_commoning (void)
2551 bool unrolled
= false;
2555 initialize_original_copy_tables ();
2556 FOR_EACH_LOOP (li
, loop
, LI_ONLY_INNERMOST
)
2558 unrolled
|= tree_predictive_commoning_loop (loop
);
2564 cleanup_tree_cfg_loop ();
2566 free_original_copy_tables ();