1 /* Generic partial redundancy elimination with lazy code motion support.
2 Copyright (C) 1998-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* These routines are meant to be used by various optimization
21 passes which can be modeled as lazy code motion problems.
22 Including, but not limited to:
24 * Traditional partial redundancy elimination.
26 * Placement of caller/caller register save/restores.
32 * Conversion of flat register files to a stacked register
35 * Dead load/store elimination.
37 These routines accept as input:
39 * Basic block information (number of blocks, lists of
40 predecessors and successors). Note the granularity
41 does not need to be basic block, they could be statements
44 * Bitmaps of local properties (computed, transparent and
45 anticipatable expressions).
47 The output of these routines is bitmap of redundant computations
48 and a bitmap of optimal placement points. */
53 #include "coretypes.h"
57 #include "hard-reg-set.h"
59 #include "insn-config.h"
61 #include "basic-block.h"
67 /* Edge based LCM routines. */
68 static void compute_antinout_edge (sbitmap
*, sbitmap
*, sbitmap
*, sbitmap
*);
69 static void compute_earliest (struct edge_list
*, int, sbitmap
*, sbitmap
*,
70 sbitmap
*, sbitmap
*, sbitmap
*);
71 static void compute_laterin (struct edge_list
*, sbitmap
*, sbitmap
*,
72 sbitmap
*, sbitmap
*);
73 static void compute_insert_delete (struct edge_list
*edge_list
, sbitmap
*,
74 sbitmap
*, sbitmap
*, sbitmap
*, sbitmap
*);
76 /* Edge based LCM routines on a reverse flowgraph. */
77 static void compute_farthest (struct edge_list
*, int, sbitmap
*, sbitmap
*,
78 sbitmap
*, sbitmap
*, sbitmap
*);
79 static void compute_nearerout (struct edge_list
*, sbitmap
*, sbitmap
*,
80 sbitmap
*, sbitmap
*);
81 static void compute_rev_insert_delete (struct edge_list
*edge_list
, sbitmap
*,
82 sbitmap
*, sbitmap
*, sbitmap
*,
85 /* Edge based lcm routines. */
87 /* Compute expression anticipatability at entrance and exit of each block.
88 This is done based on the flow graph, and not on the pred-succ lists.
89 Other than that, its pretty much identical to compute_antinout. */
92 compute_antinout_edge (sbitmap
*antloc
, sbitmap
*transp
, sbitmap
*antin
,
97 basic_block
*worklist
, *qin
, *qout
, *qend
;
101 /* Allocate a worklist array/queue. Entries are only added to the
102 list if they were not already on the list. So the size is
103 bounded by the number of basic blocks. */
104 qin
= qout
= worklist
= XNEWVEC (basic_block
, n_basic_blocks_for_fn (cfun
));
106 /* We want a maximal solution, so make an optimistic initialization of
108 bitmap_vector_ones (antin
, last_basic_block_for_fn (cfun
));
110 /* Put every block on the worklist; this is necessary because of the
111 optimistic initialization of ANTIN above. */
112 int *postorder
= XNEWVEC (int, n_basic_blocks_for_fn (cfun
));
113 int postorder_num
= post_order_compute (postorder
, false, false);
114 for (int i
= 0; i
< postorder_num
; ++i
)
116 bb
= BASIC_BLOCK_FOR_FN (cfun
, postorder
[i
]);
123 qend
= &worklist
[n_basic_blocks_for_fn (cfun
) - NUM_FIXED_BLOCKS
];
124 qlen
= n_basic_blocks_for_fn (cfun
) - NUM_FIXED_BLOCKS
;
126 /* Mark blocks which are predecessors of the exit block so that we
127 can easily identify them below. */
128 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
129 e
->src
->aux
= EXIT_BLOCK_PTR_FOR_FN (cfun
);
131 /* Iterate until the worklist is empty. */
134 /* Take the first entry off the worklist. */
141 if (bb
->aux
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
142 /* Do not clear the aux field for blocks which are predecessors of
143 the EXIT block. That way we never add then to the worklist
145 bitmap_clear (antout
[bb
->index
]);
148 /* Clear the aux field of this block so that it can be added to
149 the worklist again if necessary. */
151 bitmap_intersection_of_succs (antout
[bb
->index
], antin
, bb
);
154 if (bitmap_or_and (antin
[bb
->index
], antloc
[bb
->index
],
155 transp
[bb
->index
], antout
[bb
->index
]))
156 /* If the in state of this block changed, then we need
157 to add the predecessors of this block to the worklist
158 if they are not already on the worklist. */
159 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
160 if (!e
->src
->aux
&& e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
))
170 clear_aux_for_edges ();
171 clear_aux_for_blocks ();
175 /* Compute the earliest vector for edge based lcm. */
178 compute_earliest (struct edge_list
*edge_list
, int n_exprs
, sbitmap
*antin
,
179 sbitmap
*antout
, sbitmap
*avout
, sbitmap
*kill
,
182 sbitmap difference
, temp_bitmap
;
184 basic_block pred
, succ
;
186 num_edges
= NUM_EDGES (edge_list
);
188 difference
= sbitmap_alloc (n_exprs
);
189 temp_bitmap
= sbitmap_alloc (n_exprs
);
191 for (x
= 0; x
< num_edges
; x
++)
193 pred
= INDEX_EDGE_PRED_BB (edge_list
, x
);
194 succ
= INDEX_EDGE_SUCC_BB (edge_list
, x
);
195 if (pred
== ENTRY_BLOCK_PTR_FOR_FN (cfun
))
196 bitmap_copy (earliest
[x
], antin
[succ
->index
]);
199 if (succ
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
200 bitmap_clear (earliest
[x
]);
203 bitmap_and_compl (difference
, antin
[succ
->index
],
205 bitmap_not (temp_bitmap
, antout
[pred
->index
]);
206 bitmap_and_or (earliest
[x
], difference
,
207 kill
[pred
->index
], temp_bitmap
);
212 sbitmap_free (temp_bitmap
);
213 sbitmap_free (difference
);
216 /* later(p,s) is dependent on the calculation of laterin(p).
217 laterin(p) is dependent on the calculation of later(p2,p).
219 laterin(ENTRY) is defined as all 0's
220 later(ENTRY, succs(ENTRY)) are defined using laterin(ENTRY)
221 laterin(succs(ENTRY)) is defined by later(ENTRY, succs(ENTRY)).
223 If we progress in this manner, starting with all basic blocks
224 in the work list, anytime we change later(bb), we need to add
225 succs(bb) to the worklist if they are not already on the worklist.
229 We prime the worklist all the normal basic blocks. The ENTRY block can
230 never be added to the worklist since it is never the successor of any
231 block. We explicitly prevent the EXIT block from being added to the
234 We optimistically initialize LATER. That is the only time this routine
235 will compute LATER for an edge out of the entry block since the entry
236 block is never on the worklist. Thus, LATERIN is neither used nor
237 computed for the ENTRY block.
239 Since the EXIT block is never added to the worklist, we will neither
240 use nor compute LATERIN for the exit block. Edges which reach the
241 EXIT block are handled in the normal fashion inside the loop. However,
242 the insertion/deletion computation needs LATERIN(EXIT), so we have
246 compute_laterin (struct edge_list
*edge_list
, sbitmap
*earliest
,
247 sbitmap
*antloc
, sbitmap
*later
, sbitmap
*laterin
)
251 basic_block
*worklist
, *qin
, *qout
, *qend
, bb
;
255 num_edges
= NUM_EDGES (edge_list
);
257 /* Allocate a worklist array/queue. Entries are only added to the
258 list if they were not already on the list. So the size is
259 bounded by the number of basic blocks. */
260 qin
= qout
= worklist
261 = XNEWVEC (basic_block
, n_basic_blocks_for_fn (cfun
));
263 /* Initialize a mapping from each edge to its index. */
264 for (i
= 0; i
< num_edges
; i
++)
265 INDEX_EDGE (edge_list
, i
)->aux
= (void *) (size_t) i
;
267 /* We want a maximal solution, so initially consider LATER true for
268 all edges. This allows propagation through a loop since the incoming
269 loop edge will have LATER set, so if all the other incoming edges
270 to the loop are set, then LATERIN will be set for the head of the
273 If the optimistic setting of LATER on that edge was incorrect (for
274 example the expression is ANTLOC in a block within the loop) then
275 this algorithm will detect it when we process the block at the head
276 of the optimistic edge. That will requeue the affected blocks. */
277 bitmap_vector_ones (later
, num_edges
);
279 /* Note that even though we want an optimistic setting of LATER, we
280 do not want to be overly optimistic. Consider an outgoing edge from
281 the entry block. That edge should always have a LATER value the
282 same as EARLIEST for that edge. */
283 FOR_EACH_EDGE (e
, ei
, ENTRY_BLOCK_PTR_FOR_FN (cfun
)->succs
)
284 bitmap_copy (later
[(size_t) e
->aux
], earliest
[(size_t) e
->aux
]);
286 /* Add all the blocks to the worklist. This prevents an early exit from
287 the loop given our optimistic initialization of LATER above. */
288 int *postorder
= XNEWVEC (int, n_basic_blocks_for_fn (cfun
));
289 int postorder_num
= inverted_post_order_compute (postorder
);
290 for (int i
= 0; i
< postorder_num
; ++i
)
292 bb
= BASIC_BLOCK_FOR_FN (cfun
, postorder
[i
]);
293 if (bb
== EXIT_BLOCK_PTR_FOR_FN (cfun
)
294 || bb
== ENTRY_BLOCK_PTR_FOR_FN (cfun
))
301 /* Note that we do not use the last allocated element for our queue,
302 as EXIT_BLOCK is never inserted into it. */
304 qend
= &worklist
[n_basic_blocks_for_fn (cfun
) - NUM_FIXED_BLOCKS
];
305 qlen
= n_basic_blocks_for_fn (cfun
) - NUM_FIXED_BLOCKS
;
307 /* Iterate until the worklist is empty. */
310 /* Take the first entry off the worklist. */
317 /* Compute the intersection of LATERIN for each incoming edge to B. */
318 bitmap_ones (laterin
[bb
->index
]);
319 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
320 bitmap_and (laterin
[bb
->index
], laterin
[bb
->index
],
321 later
[(size_t)e
->aux
]);
323 /* Calculate LATER for all outgoing edges. */
324 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
325 if (bitmap_ior_and_compl (later
[(size_t) e
->aux
],
326 earliest
[(size_t) e
->aux
],
329 /* If LATER for an outgoing edge was changed, then we need
330 to add the target of the outgoing edge to the worklist. */
331 && e
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
) && e
->dest
->aux
== 0)
341 /* Computation of insertion and deletion points requires computing LATERIN
342 for the EXIT block. We allocated an extra entry in the LATERIN array
343 for just this purpose. */
344 bitmap_ones (laterin
[last_basic_block_for_fn (cfun
)]);
345 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
346 bitmap_and (laterin
[last_basic_block_for_fn (cfun
)],
347 laterin
[last_basic_block_for_fn (cfun
)],
348 later
[(size_t) e
->aux
]);
350 clear_aux_for_edges ();
354 /* Compute the insertion and deletion points for edge based LCM. */
357 compute_insert_delete (struct edge_list
*edge_list
, sbitmap
*antloc
,
358 sbitmap
*later
, sbitmap
*laterin
, sbitmap
*insert
,
364 FOR_EACH_BB_FN (bb
, cfun
)
365 bitmap_and_compl (del
[bb
->index
], antloc
[bb
->index
],
368 for (x
= 0; x
< NUM_EDGES (edge_list
); x
++)
370 basic_block b
= INDEX_EDGE_SUCC_BB (edge_list
, x
);
372 if (b
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
373 bitmap_and_compl (insert
[x
], later
[x
],
374 laterin
[last_basic_block_for_fn (cfun
)]);
376 bitmap_and_compl (insert
[x
], later
[x
], laterin
[b
->index
]);
380 /* Given local properties TRANSP, ANTLOC, AVOUT, KILL return the insert and
381 delete vectors for edge based LCM. Returns an edgelist which is used to
382 map the insert vector to what edge an expression should be inserted on. */
385 pre_edge_lcm (int n_exprs
, sbitmap
*transp
,
386 sbitmap
*avloc
, sbitmap
*antloc
, sbitmap
*kill
,
387 sbitmap
**insert
, sbitmap
**del
)
389 sbitmap
*antin
, *antout
, *earliest
;
390 sbitmap
*avin
, *avout
;
391 sbitmap
*later
, *laterin
;
392 struct edge_list
*edge_list
;
395 edge_list
= create_edge_list ();
396 num_edges
= NUM_EDGES (edge_list
);
398 #ifdef LCM_DEBUG_INFO
401 fprintf (dump_file
, "Edge List:\n");
402 verify_edge_list (dump_file
, edge_list
);
403 print_edge_list (dump_file
, edge_list
);
404 dump_bitmap_vector (dump_file
, "transp", "", transp
,
405 last_basic_block_for_fn (cfun
));
406 dump_bitmap_vector (dump_file
, "antloc", "", antloc
,
407 last_basic_block_for_fn (cfun
));
408 dump_bitmap_vector (dump_file
, "avloc", "", avloc
,
409 last_basic_block_for_fn (cfun
));
410 dump_bitmap_vector (dump_file
, "kill", "", kill
,
411 last_basic_block_for_fn (cfun
));
415 /* Compute global availability. */
416 avin
= sbitmap_vector_alloc (last_basic_block_for_fn (cfun
), n_exprs
);
417 avout
= sbitmap_vector_alloc (last_basic_block_for_fn (cfun
), n_exprs
);
418 compute_available (avloc
, kill
, avout
, avin
);
419 sbitmap_vector_free (avin
);
421 /* Compute global anticipatability. */
422 antin
= sbitmap_vector_alloc (last_basic_block_for_fn (cfun
), n_exprs
);
423 antout
= sbitmap_vector_alloc (last_basic_block_for_fn (cfun
), n_exprs
);
424 compute_antinout_edge (antloc
, transp
, antin
, antout
);
426 #ifdef LCM_DEBUG_INFO
429 dump_bitmap_vector (dump_file
, "antin", "", antin
,
430 last_basic_block_for_fn (cfun
));
431 dump_bitmap_vector (dump_file
, "antout", "", antout
,
432 last_basic_block_for_fn (cfun
));
436 /* Compute earliestness. */
437 earliest
= sbitmap_vector_alloc (num_edges
, n_exprs
);
438 compute_earliest (edge_list
, n_exprs
, antin
, antout
, avout
, kill
, earliest
);
440 #ifdef LCM_DEBUG_INFO
442 dump_bitmap_vector (dump_file
, "earliest", "", earliest
, num_edges
);
445 sbitmap_vector_free (antout
);
446 sbitmap_vector_free (antin
);
447 sbitmap_vector_free (avout
);
449 later
= sbitmap_vector_alloc (num_edges
, n_exprs
);
451 /* Allocate an extra element for the exit block in the laterin vector. */
452 laterin
= sbitmap_vector_alloc (last_basic_block_for_fn (cfun
) + 1,
454 compute_laterin (edge_list
, earliest
, antloc
, later
, laterin
);
456 #ifdef LCM_DEBUG_INFO
459 dump_bitmap_vector (dump_file
, "laterin", "", laterin
,
460 last_basic_block_for_fn (cfun
) + 1);
461 dump_bitmap_vector (dump_file
, "later", "", later
, num_edges
);
465 sbitmap_vector_free (earliest
);
467 *insert
= sbitmap_vector_alloc (num_edges
, n_exprs
);
468 *del
= sbitmap_vector_alloc (last_basic_block_for_fn (cfun
), n_exprs
);
469 bitmap_vector_clear (*insert
, num_edges
);
470 bitmap_vector_clear (*del
, last_basic_block_for_fn (cfun
));
471 compute_insert_delete (edge_list
, antloc
, later
, laterin
, *insert
, *del
);
473 sbitmap_vector_free (laterin
);
474 sbitmap_vector_free (later
);
476 #ifdef LCM_DEBUG_INFO
479 dump_bitmap_vector (dump_file
, "pre_insert_map", "", *insert
, num_edges
);
480 dump_bitmap_vector (dump_file
, "pre_delete_map", "", *del
,
481 last_basic_block_for_fn (cfun
));
488 /* Compute the AVIN and AVOUT vectors from the AVLOC and KILL vectors.
489 Return the number of passes we performed to iterate to a solution. */
492 compute_available (sbitmap
*avloc
, sbitmap
*kill
, sbitmap
*avout
,
496 basic_block
*worklist
, *qin
, *qout
, *qend
, bb
;
500 /* Allocate a worklist array/queue. Entries are only added to the
501 list if they were not already on the list. So the size is
502 bounded by the number of basic blocks. */
503 qin
= qout
= worklist
=
504 XNEWVEC (basic_block
, n_basic_blocks_for_fn (cfun
) - NUM_FIXED_BLOCKS
);
506 /* We want a maximal solution. */
507 bitmap_vector_ones (avout
, last_basic_block_for_fn (cfun
));
509 /* Put every block on the worklist; this is necessary because of the
510 optimistic initialization of AVOUT above. Use inverted postorder
511 to make the dataflow problem require less iterations. */
512 int *postorder
= XNEWVEC (int, n_basic_blocks_for_fn (cfun
));
513 int postorder_num
= inverted_post_order_compute (postorder
);
514 for (int i
= 0; i
< postorder_num
; ++i
)
516 bb
= BASIC_BLOCK_FOR_FN (cfun
, postorder
[i
]);
517 if (bb
== EXIT_BLOCK_PTR_FOR_FN (cfun
)
518 || bb
== ENTRY_BLOCK_PTR_FOR_FN (cfun
))
526 qend
= &worklist
[n_basic_blocks_for_fn (cfun
) - NUM_FIXED_BLOCKS
];
527 qlen
= n_basic_blocks_for_fn (cfun
) - NUM_FIXED_BLOCKS
;
529 /* Mark blocks which are successors of the entry block so that we
530 can easily identify them below. */
531 FOR_EACH_EDGE (e
, ei
, ENTRY_BLOCK_PTR_FOR_FN (cfun
)->succs
)
532 e
->dest
->aux
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
534 /* Iterate until the worklist is empty. */
537 /* Take the first entry off the worklist. */
544 /* If one of the predecessor blocks is the ENTRY block, then the
545 intersection of avouts is the null set. We can identify such blocks
546 by the special value in the AUX field in the block structure. */
547 if (bb
->aux
== ENTRY_BLOCK_PTR_FOR_FN (cfun
))
548 /* Do not clear the aux field for blocks which are successors of the
549 ENTRY block. That way we never add then to the worklist again. */
550 bitmap_clear (avin
[bb
->index
]);
553 /* Clear the aux field of this block so that it can be added to
554 the worklist again if necessary. */
556 bitmap_intersection_of_preds (avin
[bb
->index
], avout
, bb
);
559 if (bitmap_ior_and_compl (avout
[bb
->index
], avloc
[bb
->index
],
560 avin
[bb
->index
], kill
[bb
->index
]))
561 /* If the out state of this block changed, then we need
562 to add the successors of this block to the worklist
563 if they are not already on the worklist. */
564 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
565 if (!e
->dest
->aux
&& e
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
576 clear_aux_for_edges ();
577 clear_aux_for_blocks ();
581 /* Compute the farthest vector for edge based lcm. */
584 compute_farthest (struct edge_list
*edge_list
, int n_exprs
,
585 sbitmap
*st_avout
, sbitmap
*st_avin
, sbitmap
*st_antin
,
586 sbitmap
*kill
, sbitmap
*farthest
)
588 sbitmap difference
, temp_bitmap
;
590 basic_block pred
, succ
;
592 num_edges
= NUM_EDGES (edge_list
);
594 difference
= sbitmap_alloc (n_exprs
);
595 temp_bitmap
= sbitmap_alloc (n_exprs
);
597 for (x
= 0; x
< num_edges
; x
++)
599 pred
= INDEX_EDGE_PRED_BB (edge_list
, x
);
600 succ
= INDEX_EDGE_SUCC_BB (edge_list
, x
);
601 if (succ
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
602 bitmap_copy (farthest
[x
], st_avout
[pred
->index
]);
605 if (pred
== ENTRY_BLOCK_PTR_FOR_FN (cfun
))
606 bitmap_clear (farthest
[x
]);
609 bitmap_and_compl (difference
, st_avout
[pred
->index
],
610 st_antin
[succ
->index
]);
611 bitmap_not (temp_bitmap
, st_avin
[succ
->index
]);
612 bitmap_and_or (farthest
[x
], difference
,
613 kill
[succ
->index
], temp_bitmap
);
618 sbitmap_free (temp_bitmap
);
619 sbitmap_free (difference
);
622 /* Compute nearer and nearerout vectors for edge based lcm.
624 This is the mirror of compute_laterin, additional comments on the
625 implementation can be found before compute_laterin. */
628 compute_nearerout (struct edge_list
*edge_list
, sbitmap
*farthest
,
629 sbitmap
*st_avloc
, sbitmap
*nearer
, sbitmap
*nearerout
)
633 basic_block
*worklist
, *tos
, bb
;
636 num_edges
= NUM_EDGES (edge_list
);
638 /* Allocate a worklist array/queue. Entries are only added to the
639 list if they were not already on the list. So the size is
640 bounded by the number of basic blocks. */
641 tos
= worklist
= XNEWVEC (basic_block
, n_basic_blocks_for_fn (cfun
) + 1);
643 /* Initialize NEARER for each edge and build a mapping from an edge to
645 for (i
= 0; i
< num_edges
; i
++)
646 INDEX_EDGE (edge_list
, i
)->aux
= (void *) (size_t) i
;
648 /* We want a maximal solution. */
649 bitmap_vector_ones (nearer
, num_edges
);
651 /* Note that even though we want an optimistic setting of NEARER, we
652 do not want to be overly optimistic. Consider an incoming edge to
653 the exit block. That edge should always have a NEARER value the
654 same as FARTHEST for that edge. */
655 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
656 bitmap_copy (nearer
[(size_t)e
->aux
], farthest
[(size_t)e
->aux
]);
658 /* Add all the blocks to the worklist. This prevents an early exit
659 from the loop given our optimistic initialization of NEARER. */
660 FOR_EACH_BB_FN (bb
, cfun
)
666 /* Iterate until the worklist is empty. */
667 while (tos
!= worklist
)
669 /* Take the first entry off the worklist. */
673 /* Compute the intersection of NEARER for each outgoing edge from B. */
674 bitmap_ones (nearerout
[bb
->index
]);
675 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
676 bitmap_and (nearerout
[bb
->index
], nearerout
[bb
->index
],
677 nearer
[(size_t) e
->aux
]);
679 /* Calculate NEARER for all incoming edges. */
680 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
681 if (bitmap_ior_and_compl (nearer
[(size_t) e
->aux
],
682 farthest
[(size_t) e
->aux
],
683 nearerout
[e
->dest
->index
],
684 st_avloc
[e
->dest
->index
])
685 /* If NEARER for an incoming edge was changed, then we need
686 to add the source of the incoming edge to the worklist. */
687 && e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
) && e
->src
->aux
== 0)
694 /* Computation of insertion and deletion points requires computing NEAREROUT
695 for the ENTRY block. We allocated an extra entry in the NEAREROUT array
696 for just this purpose. */
697 bitmap_ones (nearerout
[last_basic_block_for_fn (cfun
)]);
698 FOR_EACH_EDGE (e
, ei
, ENTRY_BLOCK_PTR_FOR_FN (cfun
)->succs
)
699 bitmap_and (nearerout
[last_basic_block_for_fn (cfun
)],
700 nearerout
[last_basic_block_for_fn (cfun
)],
701 nearer
[(size_t) e
->aux
]);
703 clear_aux_for_edges ();
707 /* Compute the insertion and deletion points for edge based LCM. */
710 compute_rev_insert_delete (struct edge_list
*edge_list
, sbitmap
*st_avloc
,
711 sbitmap
*nearer
, sbitmap
*nearerout
,
712 sbitmap
*insert
, sbitmap
*del
)
717 FOR_EACH_BB_FN (bb
, cfun
)
718 bitmap_and_compl (del
[bb
->index
], st_avloc
[bb
->index
],
719 nearerout
[bb
->index
]);
721 for (x
= 0; x
< NUM_EDGES (edge_list
); x
++)
723 basic_block b
= INDEX_EDGE_PRED_BB (edge_list
, x
);
724 if (b
== ENTRY_BLOCK_PTR_FOR_FN (cfun
))
725 bitmap_and_compl (insert
[x
], nearer
[x
],
726 nearerout
[last_basic_block_for_fn (cfun
)]);
728 bitmap_and_compl (insert
[x
], nearer
[x
], nearerout
[b
->index
]);
732 /* Given local properties TRANSP, ST_AVLOC, ST_ANTLOC, KILL return the
733 insert and delete vectors for edge based reverse LCM. Returns an
734 edgelist which is used to map the insert vector to what edge
735 an expression should be inserted on. */
738 pre_edge_rev_lcm (int n_exprs
, sbitmap
*transp
,
739 sbitmap
*st_avloc
, sbitmap
*st_antloc
, sbitmap
*kill
,
740 sbitmap
**insert
, sbitmap
**del
)
742 sbitmap
*st_antin
, *st_antout
;
743 sbitmap
*st_avout
, *st_avin
, *farthest
;
744 sbitmap
*nearer
, *nearerout
;
745 struct edge_list
*edge_list
;
748 edge_list
= create_edge_list ();
749 num_edges
= NUM_EDGES (edge_list
);
751 st_antin
= sbitmap_vector_alloc (last_basic_block_for_fn (cfun
), n_exprs
);
752 st_antout
= sbitmap_vector_alloc (last_basic_block_for_fn (cfun
), n_exprs
);
753 bitmap_vector_clear (st_antin
, last_basic_block_for_fn (cfun
));
754 bitmap_vector_clear (st_antout
, last_basic_block_for_fn (cfun
));
755 compute_antinout_edge (st_antloc
, transp
, st_antin
, st_antout
);
757 /* Compute global anticipatability. */
758 st_avout
= sbitmap_vector_alloc (last_basic_block_for_fn (cfun
), n_exprs
);
759 st_avin
= sbitmap_vector_alloc (last_basic_block_for_fn (cfun
), n_exprs
);
760 compute_available (st_avloc
, kill
, st_avout
, st_avin
);
762 #ifdef LCM_DEBUG_INFO
765 fprintf (dump_file
, "Edge List:\n");
766 verify_edge_list (dump_file
, edge_list
);
767 print_edge_list (dump_file
, edge_list
);
768 dump_bitmap_vector (dump_file
, "transp", "", transp
,
769 last_basic_block_for_fn (cfun
));
770 dump_bitmap_vector (dump_file
, "st_avloc", "", st_avloc
,
771 last_basic_block_for_fn (cfun
));
772 dump_bitmap_vector (dump_file
, "st_antloc", "", st_antloc
,
773 last_basic_block_for_fn (cfun
));
774 dump_bitmap_vector (dump_file
, "st_antin", "", st_antin
,
775 last_basic_block_for_fn (cfun
));
776 dump_bitmap_vector (dump_file
, "st_antout", "", st_antout
,
777 last_basic_block_for_fn (cfun
));
778 dump_bitmap_vector (dump_file
, "st_kill", "", kill
,
779 last_basic_block_for_fn (cfun
));
783 #ifdef LCM_DEBUG_INFO
786 dump_bitmap_vector (dump_file
, "st_avout", "", st_avout
, last_basic_block_for_fn (cfun
));
787 dump_bitmap_vector (dump_file
, "st_avin", "", st_avin
, last_basic_block_for_fn (cfun
));
791 /* Compute farthestness. */
792 farthest
= sbitmap_vector_alloc (num_edges
, n_exprs
);
793 compute_farthest (edge_list
, n_exprs
, st_avout
, st_avin
, st_antin
,
796 #ifdef LCM_DEBUG_INFO
798 dump_bitmap_vector (dump_file
, "farthest", "", farthest
, num_edges
);
801 sbitmap_vector_free (st_antin
);
802 sbitmap_vector_free (st_antout
);
804 sbitmap_vector_free (st_avin
);
805 sbitmap_vector_free (st_avout
);
807 nearer
= sbitmap_vector_alloc (num_edges
, n_exprs
);
809 /* Allocate an extra element for the entry block. */
810 nearerout
= sbitmap_vector_alloc (last_basic_block_for_fn (cfun
) + 1,
812 compute_nearerout (edge_list
, farthest
, st_avloc
, nearer
, nearerout
);
814 #ifdef LCM_DEBUG_INFO
817 dump_bitmap_vector (dump_file
, "nearerout", "", nearerout
,
818 last_basic_block_for_fn (cfun
) + 1);
819 dump_bitmap_vector (dump_file
, "nearer", "", nearer
, num_edges
);
823 sbitmap_vector_free (farthest
);
825 *insert
= sbitmap_vector_alloc (num_edges
, n_exprs
);
826 *del
= sbitmap_vector_alloc (last_basic_block_for_fn (cfun
), n_exprs
);
827 compute_rev_insert_delete (edge_list
, st_avloc
, nearer
, nearerout
,
830 sbitmap_vector_free (nearerout
);
831 sbitmap_vector_free (nearer
);
833 #ifdef LCM_DEBUG_INFO
836 dump_bitmap_vector (dump_file
, "pre_insert_map", "", *insert
, num_edges
);
837 dump_bitmap_vector (dump_file
, "pre_delete_map", "", *del
,
838 last_basic_block_for_fn (cfun
));