tree-ssa-threadupdate.c: Include tree-cfg.h and tree-pass.h
[official-gcc.git] / gcc / lcm.c
blobaa63c7272f0443760e7a2d1cbb3656e315bacd80
1 /* Generic partial redundancy elimination with lazy code motion support.
2 Copyright (C) 1998-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* These routines are meant to be used by various optimization
21 passes which can be modeled as lazy code motion problems.
22 Including, but not limited to:
24 * Traditional partial redundancy elimination.
26 * Placement of caller/caller register save/restores.
28 * Load/store motion.
30 * Copy motion.
32 * Conversion of flat register files to a stacked register
33 model.
35 * Dead load/store elimination.
37 These routines accept as input:
39 * Basic block information (number of blocks, lists of
40 predecessors and successors). Note the granularity
41 does not need to be basic block, they could be statements
42 or functions.
44 * Bitmaps of local properties (computed, transparent and
45 anticipatable expressions).
47 The output of these routines is bitmap of redundant computations
48 and a bitmap of optimal placement points. */
51 #include "config.h"
52 #include "system.h"
53 #include "coretypes.h"
54 #include "tm.h"
55 #include "rtl.h"
56 #include "regs.h"
57 #include "hard-reg-set.h"
58 #include "flags.h"
59 #include "insn-config.h"
60 #include "recog.h"
61 #include "basic-block.h"
62 #include "tm_p.h"
63 #include "function.h"
64 #include "sbitmap.h"
65 #include "dumpfile.h"
67 /* Edge based LCM routines. */
68 static void compute_antinout_edge (sbitmap *, sbitmap *, sbitmap *, sbitmap *);
69 static void compute_earliest (struct edge_list *, int, sbitmap *, sbitmap *,
70 sbitmap *, sbitmap *, sbitmap *);
71 static void compute_laterin (struct edge_list *, sbitmap *, sbitmap *,
72 sbitmap *, sbitmap *);
73 static void compute_insert_delete (struct edge_list *edge_list, sbitmap *,
74 sbitmap *, sbitmap *, sbitmap *, sbitmap *);
76 /* Edge based LCM routines on a reverse flowgraph. */
77 static void compute_farthest (struct edge_list *, int, sbitmap *, sbitmap *,
78 sbitmap*, sbitmap *, sbitmap *);
79 static void compute_nearerout (struct edge_list *, sbitmap *, sbitmap *,
80 sbitmap *, sbitmap *);
81 static void compute_rev_insert_delete (struct edge_list *edge_list, sbitmap *,
82 sbitmap *, sbitmap *, sbitmap *,
83 sbitmap *);
85 /* Edge based lcm routines. */
87 /* Compute expression anticipatability at entrance and exit of each block.
88 This is done based on the flow graph, and not on the pred-succ lists.
89 Other than that, its pretty much identical to compute_antinout. */
91 static void
92 compute_antinout_edge (sbitmap *antloc, sbitmap *transp, sbitmap *antin,
93 sbitmap *antout)
95 basic_block bb;
96 edge e;
97 basic_block *worklist, *qin, *qout, *qend;
98 unsigned int qlen;
99 edge_iterator ei;
101 /* Allocate a worklist array/queue. Entries are only added to the
102 list if they were not already on the list. So the size is
103 bounded by the number of basic blocks. */
104 qin = qout = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
106 /* We want a maximal solution, so make an optimistic initialization of
107 ANTIN. */
108 bitmap_vector_ones (antin, last_basic_block);
110 /* Put every block on the worklist; this is necessary because of the
111 optimistic initialization of ANTIN above. */
112 FOR_EACH_BB_REVERSE (bb)
114 *qin++ = bb;
115 bb->aux = bb;
118 qin = worklist;
119 qend = &worklist[n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS];
120 qlen = n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS;
122 /* Mark blocks which are predecessors of the exit block so that we
123 can easily identify them below. */
124 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
125 e->src->aux = EXIT_BLOCK_PTR_FOR_FN (cfun);
127 /* Iterate until the worklist is empty. */
128 while (qlen)
130 /* Take the first entry off the worklist. */
131 bb = *qout++;
132 qlen--;
134 if (qout >= qend)
135 qout = worklist;
137 if (bb->aux == EXIT_BLOCK_PTR_FOR_FN (cfun))
138 /* Do not clear the aux field for blocks which are predecessors of
139 the EXIT block. That way we never add then to the worklist
140 again. */
141 bitmap_clear (antout[bb->index]);
142 else
144 /* Clear the aux field of this block so that it can be added to
145 the worklist again if necessary. */
146 bb->aux = NULL;
147 bitmap_intersection_of_succs (antout[bb->index], antin, bb);
150 if (bitmap_or_and (antin[bb->index], antloc[bb->index],
151 transp[bb->index], antout[bb->index]))
152 /* If the in state of this block changed, then we need
153 to add the predecessors of this block to the worklist
154 if they are not already on the worklist. */
155 FOR_EACH_EDGE (e, ei, bb->preds)
156 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
158 *qin++ = e->src;
159 e->src->aux = e;
160 qlen++;
161 if (qin >= qend)
162 qin = worklist;
166 clear_aux_for_edges ();
167 clear_aux_for_blocks ();
168 free (worklist);
171 /* Compute the earliest vector for edge based lcm. */
173 static void
174 compute_earliest (struct edge_list *edge_list, int n_exprs, sbitmap *antin,
175 sbitmap *antout, sbitmap *avout, sbitmap *kill,
176 sbitmap *earliest)
178 sbitmap difference, temp_bitmap;
179 int x, num_edges;
180 basic_block pred, succ;
182 num_edges = NUM_EDGES (edge_list);
184 difference = sbitmap_alloc (n_exprs);
185 temp_bitmap = sbitmap_alloc (n_exprs);
187 for (x = 0; x < num_edges; x++)
189 pred = INDEX_EDGE_PRED_BB (edge_list, x);
190 succ = INDEX_EDGE_SUCC_BB (edge_list, x);
191 if (pred == ENTRY_BLOCK_PTR_FOR_FN (cfun))
192 bitmap_copy (earliest[x], antin[succ->index]);
193 else
195 if (succ == EXIT_BLOCK_PTR_FOR_FN (cfun))
196 bitmap_clear (earliest[x]);
197 else
199 bitmap_and_compl (difference, antin[succ->index],
200 avout[pred->index]);
201 bitmap_not (temp_bitmap, antout[pred->index]);
202 bitmap_and_or (earliest[x], difference,
203 kill[pred->index], temp_bitmap);
208 sbitmap_free (temp_bitmap);
209 sbitmap_free (difference);
212 /* later(p,s) is dependent on the calculation of laterin(p).
213 laterin(p) is dependent on the calculation of later(p2,p).
215 laterin(ENTRY) is defined as all 0's
216 later(ENTRY, succs(ENTRY)) are defined using laterin(ENTRY)
217 laterin(succs(ENTRY)) is defined by later(ENTRY, succs(ENTRY)).
219 If we progress in this manner, starting with all basic blocks
220 in the work list, anytime we change later(bb), we need to add
221 succs(bb) to the worklist if they are not already on the worklist.
223 Boundary conditions:
225 We prime the worklist all the normal basic blocks. The ENTRY block can
226 never be added to the worklist since it is never the successor of any
227 block. We explicitly prevent the EXIT block from being added to the
228 worklist.
230 We optimistically initialize LATER. That is the only time this routine
231 will compute LATER for an edge out of the entry block since the entry
232 block is never on the worklist. Thus, LATERIN is neither used nor
233 computed for the ENTRY block.
235 Since the EXIT block is never added to the worklist, we will neither
236 use nor compute LATERIN for the exit block. Edges which reach the
237 EXIT block are handled in the normal fashion inside the loop. However,
238 the insertion/deletion computation needs LATERIN(EXIT), so we have
239 to compute it. */
241 static void
242 compute_laterin (struct edge_list *edge_list, sbitmap *earliest,
243 sbitmap *antloc, sbitmap *later, sbitmap *laterin)
245 int num_edges, i;
246 edge e;
247 basic_block *worklist, *qin, *qout, *qend, bb;
248 unsigned int qlen;
249 edge_iterator ei;
251 num_edges = NUM_EDGES (edge_list);
253 /* Allocate a worklist array/queue. Entries are only added to the
254 list if they were not already on the list. So the size is
255 bounded by the number of basic blocks. */
256 qin = qout = worklist
257 = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
259 /* Initialize a mapping from each edge to its index. */
260 for (i = 0; i < num_edges; i++)
261 INDEX_EDGE (edge_list, i)->aux = (void *) (size_t) i;
263 /* We want a maximal solution, so initially consider LATER true for
264 all edges. This allows propagation through a loop since the incoming
265 loop edge will have LATER set, so if all the other incoming edges
266 to the loop are set, then LATERIN will be set for the head of the
267 loop.
269 If the optimistic setting of LATER on that edge was incorrect (for
270 example the expression is ANTLOC in a block within the loop) then
271 this algorithm will detect it when we process the block at the head
272 of the optimistic edge. That will requeue the affected blocks. */
273 bitmap_vector_ones (later, num_edges);
275 /* Note that even though we want an optimistic setting of LATER, we
276 do not want to be overly optimistic. Consider an outgoing edge from
277 the entry block. That edge should always have a LATER value the
278 same as EARLIEST for that edge. */
279 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
280 bitmap_copy (later[(size_t) e->aux], earliest[(size_t) e->aux]);
282 /* Add all the blocks to the worklist. This prevents an early exit from
283 the loop given our optimistic initialization of LATER above. */
284 FOR_EACH_BB (bb)
286 *qin++ = bb;
287 bb->aux = bb;
290 /* Note that we do not use the last allocated element for our queue,
291 as EXIT_BLOCK is never inserted into it. */
292 qin = worklist;
293 qend = &worklist[n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS];
294 qlen = n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS;
296 /* Iterate until the worklist is empty. */
297 while (qlen)
299 /* Take the first entry off the worklist. */
300 bb = *qout++;
301 bb->aux = NULL;
302 qlen--;
303 if (qout >= qend)
304 qout = worklist;
306 /* Compute the intersection of LATERIN for each incoming edge to B. */
307 bitmap_ones (laterin[bb->index]);
308 FOR_EACH_EDGE (e, ei, bb->preds)
309 bitmap_and (laterin[bb->index], laterin[bb->index],
310 later[(size_t)e->aux]);
312 /* Calculate LATER for all outgoing edges. */
313 FOR_EACH_EDGE (e, ei, bb->succs)
314 if (bitmap_ior_and_compl (later[(size_t) e->aux],
315 earliest[(size_t) e->aux],
316 laterin[e->src->index],
317 antloc[e->src->index])
318 /* If LATER for an outgoing edge was changed, then we need
319 to add the target of the outgoing edge to the worklist. */
320 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun) && e->dest->aux == 0)
322 *qin++ = e->dest;
323 e->dest->aux = e;
324 qlen++;
325 if (qin >= qend)
326 qin = worklist;
330 /* Computation of insertion and deletion points requires computing LATERIN
331 for the EXIT block. We allocated an extra entry in the LATERIN array
332 for just this purpose. */
333 bitmap_ones (laterin[last_basic_block]);
334 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
335 bitmap_and (laterin[last_basic_block],
336 laterin[last_basic_block],
337 later[(size_t) e->aux]);
339 clear_aux_for_edges ();
340 free (worklist);
343 /* Compute the insertion and deletion points for edge based LCM. */
345 static void
346 compute_insert_delete (struct edge_list *edge_list, sbitmap *antloc,
347 sbitmap *later, sbitmap *laterin, sbitmap *insert,
348 sbitmap *del)
350 int x;
351 basic_block bb;
353 FOR_EACH_BB (bb)
354 bitmap_and_compl (del[bb->index], antloc[bb->index],
355 laterin[bb->index]);
357 for (x = 0; x < NUM_EDGES (edge_list); x++)
359 basic_block b = INDEX_EDGE_SUCC_BB (edge_list, x);
361 if (b == EXIT_BLOCK_PTR_FOR_FN (cfun))
362 bitmap_and_compl (insert[x], later[x], laterin[last_basic_block]);
363 else
364 bitmap_and_compl (insert[x], later[x], laterin[b->index]);
368 /* Given local properties TRANSP, ANTLOC, AVOUT, KILL return the insert and
369 delete vectors for edge based LCM. Returns an edgelist which is used to
370 map the insert vector to what edge an expression should be inserted on. */
372 struct edge_list *
373 pre_edge_lcm (int n_exprs, sbitmap *transp,
374 sbitmap *avloc, sbitmap *antloc, sbitmap *kill,
375 sbitmap **insert, sbitmap **del)
377 sbitmap *antin, *antout, *earliest;
378 sbitmap *avin, *avout;
379 sbitmap *later, *laterin;
380 struct edge_list *edge_list;
381 int num_edges;
383 edge_list = create_edge_list ();
384 num_edges = NUM_EDGES (edge_list);
386 #ifdef LCM_DEBUG_INFO
387 if (dump_file)
389 fprintf (dump_file, "Edge List:\n");
390 verify_edge_list (dump_file, edge_list);
391 print_edge_list (dump_file, edge_list);
392 dump_bitmap_vector (dump_file, "transp", "", transp, last_basic_block);
393 dump_bitmap_vector (dump_file, "antloc", "", antloc, last_basic_block);
394 dump_bitmap_vector (dump_file, "avloc", "", avloc, last_basic_block);
395 dump_bitmap_vector (dump_file, "kill", "", kill, last_basic_block);
397 #endif
399 /* Compute global availability. */
400 avin = sbitmap_vector_alloc (last_basic_block, n_exprs);
401 avout = sbitmap_vector_alloc (last_basic_block, n_exprs);
402 compute_available (avloc, kill, avout, avin);
403 sbitmap_vector_free (avin);
405 /* Compute global anticipatability. */
406 antin = sbitmap_vector_alloc (last_basic_block, n_exprs);
407 antout = sbitmap_vector_alloc (last_basic_block, n_exprs);
408 compute_antinout_edge (antloc, transp, antin, antout);
410 #ifdef LCM_DEBUG_INFO
411 if (dump_file)
413 dump_bitmap_vector (dump_file, "antin", "", antin, last_basic_block);
414 dump_bitmap_vector (dump_file, "antout", "", antout, last_basic_block);
416 #endif
418 /* Compute earliestness. */
419 earliest = sbitmap_vector_alloc (num_edges, n_exprs);
420 compute_earliest (edge_list, n_exprs, antin, antout, avout, kill, earliest);
422 #ifdef LCM_DEBUG_INFO
423 if (dump_file)
424 dump_bitmap_vector (dump_file, "earliest", "", earliest, num_edges);
425 #endif
427 sbitmap_vector_free (antout);
428 sbitmap_vector_free (antin);
429 sbitmap_vector_free (avout);
431 later = sbitmap_vector_alloc (num_edges, n_exprs);
433 /* Allocate an extra element for the exit block in the laterin vector. */
434 laterin = sbitmap_vector_alloc (last_basic_block + 1, n_exprs);
435 compute_laterin (edge_list, earliest, antloc, later, laterin);
437 #ifdef LCM_DEBUG_INFO
438 if (dump_file)
440 dump_bitmap_vector (dump_file, "laterin", "", laterin, last_basic_block + 1);
441 dump_bitmap_vector (dump_file, "later", "", later, num_edges);
443 #endif
445 sbitmap_vector_free (earliest);
447 *insert = sbitmap_vector_alloc (num_edges, n_exprs);
448 *del = sbitmap_vector_alloc (last_basic_block, n_exprs);
449 bitmap_vector_clear (*insert, num_edges);
450 bitmap_vector_clear (*del, last_basic_block);
451 compute_insert_delete (edge_list, antloc, later, laterin, *insert, *del);
453 sbitmap_vector_free (laterin);
454 sbitmap_vector_free (later);
456 #ifdef LCM_DEBUG_INFO
457 if (dump_file)
459 dump_bitmap_vector (dump_file, "pre_insert_map", "", *insert, num_edges);
460 dump_bitmap_vector (dump_file, "pre_delete_map", "", *del,
461 last_basic_block);
463 #endif
465 return edge_list;
468 /* Compute the AVIN and AVOUT vectors from the AVLOC and KILL vectors.
469 Return the number of passes we performed to iterate to a solution. */
471 void
472 compute_available (sbitmap *avloc, sbitmap *kill, sbitmap *avout,
473 sbitmap *avin)
475 edge e;
476 basic_block *worklist, *qin, *qout, *qend, bb;
477 unsigned int qlen;
478 edge_iterator ei;
480 /* Allocate a worklist array/queue. Entries are only added to the
481 list if they were not already on the list. So the size is
482 bounded by the number of basic blocks. */
483 qin = qout = worklist =
484 XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
486 /* We want a maximal solution. */
487 bitmap_vector_ones (avout, last_basic_block);
489 /* Put every block on the worklist; this is necessary because of the
490 optimistic initialization of AVOUT above. */
491 FOR_EACH_BB (bb)
493 *qin++ = bb;
494 bb->aux = bb;
497 qin = worklist;
498 qend = &worklist[n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS];
499 qlen = n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS;
501 /* Mark blocks which are successors of the entry block so that we
502 can easily identify them below. */
503 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
504 e->dest->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun);
506 /* Iterate until the worklist is empty. */
507 while (qlen)
509 /* Take the first entry off the worklist. */
510 bb = *qout++;
511 qlen--;
513 if (qout >= qend)
514 qout = worklist;
516 /* If one of the predecessor blocks is the ENTRY block, then the
517 intersection of avouts is the null set. We can identify such blocks
518 by the special value in the AUX field in the block structure. */
519 if (bb->aux == ENTRY_BLOCK_PTR_FOR_FN (cfun))
520 /* Do not clear the aux field for blocks which are successors of the
521 ENTRY block. That way we never add then to the worklist again. */
522 bitmap_clear (avin[bb->index]);
523 else
525 /* Clear the aux field of this block so that it can be added to
526 the worklist again if necessary. */
527 bb->aux = NULL;
528 bitmap_intersection_of_preds (avin[bb->index], avout, bb);
531 if (bitmap_ior_and_compl (avout[bb->index], avloc[bb->index],
532 avin[bb->index], kill[bb->index]))
533 /* If the out state of this block changed, then we need
534 to add the successors of this block to the worklist
535 if they are not already on the worklist. */
536 FOR_EACH_EDGE (e, ei, bb->succs)
537 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
539 *qin++ = e->dest;
540 e->dest->aux = e;
541 qlen++;
543 if (qin >= qend)
544 qin = worklist;
548 clear_aux_for_edges ();
549 clear_aux_for_blocks ();
550 free (worklist);
553 /* Compute the farthest vector for edge based lcm. */
555 static void
556 compute_farthest (struct edge_list *edge_list, int n_exprs,
557 sbitmap *st_avout, sbitmap *st_avin, sbitmap *st_antin,
558 sbitmap *kill, sbitmap *farthest)
560 sbitmap difference, temp_bitmap;
561 int x, num_edges;
562 basic_block pred, succ;
564 num_edges = NUM_EDGES (edge_list);
566 difference = sbitmap_alloc (n_exprs);
567 temp_bitmap = sbitmap_alloc (n_exprs);
569 for (x = 0; x < num_edges; x++)
571 pred = INDEX_EDGE_PRED_BB (edge_list, x);
572 succ = INDEX_EDGE_SUCC_BB (edge_list, x);
573 if (succ == EXIT_BLOCK_PTR_FOR_FN (cfun))
574 bitmap_copy (farthest[x], st_avout[pred->index]);
575 else
577 if (pred == ENTRY_BLOCK_PTR_FOR_FN (cfun))
578 bitmap_clear (farthest[x]);
579 else
581 bitmap_and_compl (difference, st_avout[pred->index],
582 st_antin[succ->index]);
583 bitmap_not (temp_bitmap, st_avin[succ->index]);
584 bitmap_and_or (farthest[x], difference,
585 kill[succ->index], temp_bitmap);
590 sbitmap_free (temp_bitmap);
591 sbitmap_free (difference);
594 /* Compute nearer and nearerout vectors for edge based lcm.
596 This is the mirror of compute_laterin, additional comments on the
597 implementation can be found before compute_laterin. */
599 static void
600 compute_nearerout (struct edge_list *edge_list, sbitmap *farthest,
601 sbitmap *st_avloc, sbitmap *nearer, sbitmap *nearerout)
603 int num_edges, i;
604 edge e;
605 basic_block *worklist, *tos, bb;
606 edge_iterator ei;
608 num_edges = NUM_EDGES (edge_list);
610 /* Allocate a worklist array/queue. Entries are only added to the
611 list if they were not already on the list. So the size is
612 bounded by the number of basic blocks. */
613 tos = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun) + 1);
615 /* Initialize NEARER for each edge and build a mapping from an edge to
616 its index. */
617 for (i = 0; i < num_edges; i++)
618 INDEX_EDGE (edge_list, i)->aux = (void *) (size_t) i;
620 /* We want a maximal solution. */
621 bitmap_vector_ones (nearer, num_edges);
623 /* Note that even though we want an optimistic setting of NEARER, we
624 do not want to be overly optimistic. Consider an incoming edge to
625 the exit block. That edge should always have a NEARER value the
626 same as FARTHEST for that edge. */
627 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
628 bitmap_copy (nearer[(size_t)e->aux], farthest[(size_t)e->aux]);
630 /* Add all the blocks to the worklist. This prevents an early exit
631 from the loop given our optimistic initialization of NEARER. */
632 FOR_EACH_BB (bb)
634 *tos++ = bb;
635 bb->aux = bb;
638 /* Iterate until the worklist is empty. */
639 while (tos != worklist)
641 /* Take the first entry off the worklist. */
642 bb = *--tos;
643 bb->aux = NULL;
645 /* Compute the intersection of NEARER for each outgoing edge from B. */
646 bitmap_ones (nearerout[bb->index]);
647 FOR_EACH_EDGE (e, ei, bb->succs)
648 bitmap_and (nearerout[bb->index], nearerout[bb->index],
649 nearer[(size_t) e->aux]);
651 /* Calculate NEARER for all incoming edges. */
652 FOR_EACH_EDGE (e, ei, bb->preds)
653 if (bitmap_ior_and_compl (nearer[(size_t) e->aux],
654 farthest[(size_t) e->aux],
655 nearerout[e->dest->index],
656 st_avloc[e->dest->index])
657 /* If NEARER for an incoming edge was changed, then we need
658 to add the source of the incoming edge to the worklist. */
659 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun) && e->src->aux == 0)
661 *tos++ = e->src;
662 e->src->aux = e;
666 /* Computation of insertion and deletion points requires computing NEAREROUT
667 for the ENTRY block. We allocated an extra entry in the NEAREROUT array
668 for just this purpose. */
669 bitmap_ones (nearerout[last_basic_block]);
670 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
671 bitmap_and (nearerout[last_basic_block],
672 nearerout[last_basic_block],
673 nearer[(size_t) e->aux]);
675 clear_aux_for_edges ();
676 free (tos);
679 /* Compute the insertion and deletion points for edge based LCM. */
681 static void
682 compute_rev_insert_delete (struct edge_list *edge_list, sbitmap *st_avloc,
683 sbitmap *nearer, sbitmap *nearerout,
684 sbitmap *insert, sbitmap *del)
686 int x;
687 basic_block bb;
689 FOR_EACH_BB (bb)
690 bitmap_and_compl (del[bb->index], st_avloc[bb->index],
691 nearerout[bb->index]);
693 for (x = 0; x < NUM_EDGES (edge_list); x++)
695 basic_block b = INDEX_EDGE_PRED_BB (edge_list, x);
696 if (b == ENTRY_BLOCK_PTR_FOR_FN (cfun))
697 bitmap_and_compl (insert[x], nearer[x], nearerout[last_basic_block]);
698 else
699 bitmap_and_compl (insert[x], nearer[x], nearerout[b->index]);
703 /* Given local properties TRANSP, ST_AVLOC, ST_ANTLOC, KILL return the
704 insert and delete vectors for edge based reverse LCM. Returns an
705 edgelist which is used to map the insert vector to what edge
706 an expression should be inserted on. */
708 struct edge_list *
709 pre_edge_rev_lcm (int n_exprs, sbitmap *transp,
710 sbitmap *st_avloc, sbitmap *st_antloc, sbitmap *kill,
711 sbitmap **insert, sbitmap **del)
713 sbitmap *st_antin, *st_antout;
714 sbitmap *st_avout, *st_avin, *farthest;
715 sbitmap *nearer, *nearerout;
716 struct edge_list *edge_list;
717 int num_edges;
719 edge_list = create_edge_list ();
720 num_edges = NUM_EDGES (edge_list);
722 st_antin = sbitmap_vector_alloc (last_basic_block, n_exprs);
723 st_antout = sbitmap_vector_alloc (last_basic_block, n_exprs);
724 bitmap_vector_clear (st_antin, last_basic_block);
725 bitmap_vector_clear (st_antout, last_basic_block);
726 compute_antinout_edge (st_antloc, transp, st_antin, st_antout);
728 /* Compute global anticipatability. */
729 st_avout = sbitmap_vector_alloc (last_basic_block, n_exprs);
730 st_avin = sbitmap_vector_alloc (last_basic_block, n_exprs);
731 compute_available (st_avloc, kill, st_avout, st_avin);
733 #ifdef LCM_DEBUG_INFO
734 if (dump_file)
736 fprintf (dump_file, "Edge List:\n");
737 verify_edge_list (dump_file, edge_list);
738 print_edge_list (dump_file, edge_list);
739 dump_bitmap_vector (dump_file, "transp", "", transp, last_basic_block);
740 dump_bitmap_vector (dump_file, "st_avloc", "", st_avloc, last_basic_block);
741 dump_bitmap_vector (dump_file, "st_antloc", "", st_antloc, last_basic_block);
742 dump_bitmap_vector (dump_file, "st_antin", "", st_antin, last_basic_block);
743 dump_bitmap_vector (dump_file, "st_antout", "", st_antout, last_basic_block);
744 dump_bitmap_vector (dump_file, "st_kill", "", kill, last_basic_block);
746 #endif
748 #ifdef LCM_DEBUG_INFO
749 if (dump_file)
751 dump_bitmap_vector (dump_file, "st_avout", "", st_avout, last_basic_block);
752 dump_bitmap_vector (dump_file, "st_avin", "", st_avin, last_basic_block);
754 #endif
756 /* Compute farthestness. */
757 farthest = sbitmap_vector_alloc (num_edges, n_exprs);
758 compute_farthest (edge_list, n_exprs, st_avout, st_avin, st_antin,
759 kill, farthest);
761 #ifdef LCM_DEBUG_INFO
762 if (dump_file)
763 dump_bitmap_vector (dump_file, "farthest", "", farthest, num_edges);
764 #endif
766 sbitmap_vector_free (st_antin);
767 sbitmap_vector_free (st_antout);
769 sbitmap_vector_free (st_avin);
770 sbitmap_vector_free (st_avout);
772 nearer = sbitmap_vector_alloc (num_edges, n_exprs);
774 /* Allocate an extra element for the entry block. */
775 nearerout = sbitmap_vector_alloc (last_basic_block + 1, n_exprs);
776 compute_nearerout (edge_list, farthest, st_avloc, nearer, nearerout);
778 #ifdef LCM_DEBUG_INFO
779 if (dump_file)
781 dump_bitmap_vector (dump_file, "nearerout", "", nearerout,
782 last_basic_block + 1);
783 dump_bitmap_vector (dump_file, "nearer", "", nearer, num_edges);
785 #endif
787 sbitmap_vector_free (farthest);
789 *insert = sbitmap_vector_alloc (num_edges, n_exprs);
790 *del = sbitmap_vector_alloc (last_basic_block, n_exprs);
791 compute_rev_insert_delete (edge_list, st_avloc, nearer, nearerout,
792 *insert, *del);
794 sbitmap_vector_free (nearerout);
795 sbitmap_vector_free (nearer);
797 #ifdef LCM_DEBUG_INFO
798 if (dump_file)
800 dump_bitmap_vector (dump_file, "pre_insert_map", "", *insert, num_edges);
801 dump_bitmap_vector (dump_file, "pre_delete_map", "", *del,
802 last_basic_block);
804 #endif
805 return edge_list;