* config/darwin.c (darwin_assemble_visibility): Treat
[official-gcc.git] / gcc / lcm.c
blobfec2ba45d306779427d6ebeb4334013a01e37125
1 /* Generic partial redundancy elimination with lazy code motion support.
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2007, 2008,
3 2010, 2011 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* These routines are meant to be used by various optimization
22 passes which can be modeled as lazy code motion problems.
23 Including, but not limited to:
25 * Traditional partial redundancy elimination.
27 * Placement of caller/caller register save/restores.
29 * Load/store motion.
31 * Copy motion.
33 * Conversion of flat register files to a stacked register
34 model.
36 * Dead load/store elimination.
38 These routines accept as input:
40 * Basic block information (number of blocks, lists of
41 predecessors and successors). Note the granularity
42 does not need to be basic block, they could be statements
43 or functions.
45 * Bitmaps of local properties (computed, transparent and
46 anticipatable expressions).
48 The output of these routines is bitmap of redundant computations
49 and a bitmap of optimal placement points. */
52 #include "config.h"
53 #include "system.h"
54 #include "coretypes.h"
55 #include "tm.h"
56 #include "rtl.h"
57 #include "regs.h"
58 #include "hard-reg-set.h"
59 #include "flags.h"
60 #include "insn-config.h"
61 #include "recog.h"
62 #include "basic-block.h"
63 #include "tm_p.h"
64 #include "function.h"
65 #include "sbitmap.h"
66 #include "dumpfile.h"
68 /* Edge based LCM routines. */
69 static void compute_antinout_edge (sbitmap *, sbitmap *, sbitmap *, sbitmap *);
70 static void compute_earliest (struct edge_list *, int, sbitmap *, sbitmap *,
71 sbitmap *, sbitmap *, sbitmap *);
72 static void compute_laterin (struct edge_list *, sbitmap *, sbitmap *,
73 sbitmap *, sbitmap *);
74 static void compute_insert_delete (struct edge_list *edge_list, sbitmap *,
75 sbitmap *, sbitmap *, sbitmap *, sbitmap *);
77 /* Edge based LCM routines on a reverse flowgraph. */
78 static void compute_farthest (struct edge_list *, int, sbitmap *, sbitmap *,
79 sbitmap*, sbitmap *, sbitmap *);
80 static void compute_nearerout (struct edge_list *, sbitmap *, sbitmap *,
81 sbitmap *, sbitmap *);
82 static void compute_rev_insert_delete (struct edge_list *edge_list, sbitmap *,
83 sbitmap *, sbitmap *, sbitmap *,
84 sbitmap *);
86 /* Edge based lcm routines. */
88 /* Compute expression anticipatability at entrance and exit of each block.
89 This is done based on the flow graph, and not on the pred-succ lists.
90 Other than that, its pretty much identical to compute_antinout. */
92 static void
93 compute_antinout_edge (sbitmap *antloc, sbitmap *transp, sbitmap *antin,
94 sbitmap *antout)
96 basic_block bb;
97 edge e;
98 basic_block *worklist, *qin, *qout, *qend;
99 unsigned int qlen;
100 edge_iterator ei;
102 /* Allocate a worklist array/queue. Entries are only added to the
103 list if they were not already on the list. So the size is
104 bounded by the number of basic blocks. */
105 qin = qout = worklist = XNEWVEC (basic_block, n_basic_blocks);
107 /* We want a maximal solution, so make an optimistic initialization of
108 ANTIN. */
109 sbitmap_vector_ones (antin, last_basic_block);
111 /* Put every block on the worklist; this is necessary because of the
112 optimistic initialization of ANTIN above. */
113 FOR_EACH_BB_REVERSE (bb)
115 *qin++ = bb;
116 bb->aux = bb;
119 qin = worklist;
120 qend = &worklist[n_basic_blocks - NUM_FIXED_BLOCKS];
121 qlen = n_basic_blocks - NUM_FIXED_BLOCKS;
123 /* Mark blocks which are predecessors of the exit block so that we
124 can easily identify them below. */
125 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
126 e->src->aux = EXIT_BLOCK_PTR;
128 /* Iterate until the worklist is empty. */
129 while (qlen)
131 /* Take the first entry off the worklist. */
132 bb = *qout++;
133 qlen--;
135 if (qout >= qend)
136 qout = worklist;
138 if (bb->aux == EXIT_BLOCK_PTR)
139 /* Do not clear the aux field for blocks which are predecessors of
140 the EXIT block. That way we never add then to the worklist
141 again. */
142 sbitmap_zero (antout[bb->index]);
143 else
145 /* Clear the aux field of this block so that it can be added to
146 the worklist again if necessary. */
147 bb->aux = NULL;
148 sbitmap_intersection_of_succs (antout[bb->index], antin, bb);
151 if (sbitmap_a_or_b_and_c_cg (antin[bb->index], antloc[bb->index],
152 transp[bb->index], antout[bb->index]))
153 /* If the in state of this block changed, then we need
154 to add the predecessors of this block to the worklist
155 if they are not already on the worklist. */
156 FOR_EACH_EDGE (e, ei, bb->preds)
157 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
159 *qin++ = e->src;
160 e->src->aux = e;
161 qlen++;
162 if (qin >= qend)
163 qin = worklist;
167 clear_aux_for_edges ();
168 clear_aux_for_blocks ();
169 free (worklist);
172 /* Compute the earliest vector for edge based lcm. */
174 static void
175 compute_earliest (struct edge_list *edge_list, int n_exprs, sbitmap *antin,
176 sbitmap *antout, sbitmap *avout, sbitmap *kill,
177 sbitmap *earliest)
179 sbitmap difference, temp_bitmap;
180 int x, num_edges;
181 basic_block pred, succ;
183 num_edges = NUM_EDGES (edge_list);
185 difference = sbitmap_alloc (n_exprs);
186 temp_bitmap = sbitmap_alloc (n_exprs);
188 for (x = 0; x < num_edges; x++)
190 pred = INDEX_EDGE_PRED_BB (edge_list, x);
191 succ = INDEX_EDGE_SUCC_BB (edge_list, x);
192 if (pred == ENTRY_BLOCK_PTR)
193 sbitmap_copy (earliest[x], antin[succ->index]);
194 else
196 if (succ == EXIT_BLOCK_PTR)
197 sbitmap_zero (earliest[x]);
198 else
200 sbitmap_difference (difference, antin[succ->index],
201 avout[pred->index]);
202 sbitmap_not (temp_bitmap, antout[pred->index]);
203 sbitmap_a_and_b_or_c (earliest[x], difference,
204 kill[pred->index], temp_bitmap);
209 sbitmap_free (temp_bitmap);
210 sbitmap_free (difference);
213 /* later(p,s) is dependent on the calculation of laterin(p).
214 laterin(p) is dependent on the calculation of later(p2,p).
216 laterin(ENTRY) is defined as all 0's
217 later(ENTRY, succs(ENTRY)) are defined using laterin(ENTRY)
218 laterin(succs(ENTRY)) is defined by later(ENTRY, succs(ENTRY)).
220 If we progress in this manner, starting with all basic blocks
221 in the work list, anytime we change later(bb), we need to add
222 succs(bb) to the worklist if they are not already on the worklist.
224 Boundary conditions:
226 We prime the worklist all the normal basic blocks. The ENTRY block can
227 never be added to the worklist since it is never the successor of any
228 block. We explicitly prevent the EXIT block from being added to the
229 worklist.
231 We optimistically initialize LATER. That is the only time this routine
232 will compute LATER for an edge out of the entry block since the entry
233 block is never on the worklist. Thus, LATERIN is neither used nor
234 computed for the ENTRY block.
236 Since the EXIT block is never added to the worklist, we will neither
237 use nor compute LATERIN for the exit block. Edges which reach the
238 EXIT block are handled in the normal fashion inside the loop. However,
239 the insertion/deletion computation needs LATERIN(EXIT), so we have
240 to compute it. */
242 static void
243 compute_laterin (struct edge_list *edge_list, sbitmap *earliest,
244 sbitmap *antloc, sbitmap *later, sbitmap *laterin)
246 int num_edges, i;
247 edge e;
248 basic_block *worklist, *qin, *qout, *qend, bb;
249 unsigned int qlen;
250 edge_iterator ei;
252 num_edges = NUM_EDGES (edge_list);
254 /* Allocate a worklist array/queue. Entries are only added to the
255 list if they were not already on the list. So the size is
256 bounded by the number of basic blocks. */
257 qin = qout = worklist
258 = XNEWVEC (basic_block, n_basic_blocks);
260 /* Initialize a mapping from each edge to its index. */
261 for (i = 0; i < num_edges; i++)
262 INDEX_EDGE (edge_list, i)->aux = (void *) (size_t) i;
264 /* We want a maximal solution, so initially consider LATER true for
265 all edges. This allows propagation through a loop since the incoming
266 loop edge will have LATER set, so if all the other incoming edges
267 to the loop are set, then LATERIN will be set for the head of the
268 loop.
270 If the optimistic setting of LATER on that edge was incorrect (for
271 example the expression is ANTLOC in a block within the loop) then
272 this algorithm will detect it when we process the block at the head
273 of the optimistic edge. That will requeue the affected blocks. */
274 sbitmap_vector_ones (later, num_edges);
276 /* Note that even though we want an optimistic setting of LATER, we
277 do not want to be overly optimistic. Consider an outgoing edge from
278 the entry block. That edge should always have a LATER value the
279 same as EARLIEST for that edge. */
280 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
281 sbitmap_copy (later[(size_t) e->aux], earliest[(size_t) e->aux]);
283 /* Add all the blocks to the worklist. This prevents an early exit from
284 the loop given our optimistic initialization of LATER above. */
285 FOR_EACH_BB (bb)
287 *qin++ = bb;
288 bb->aux = bb;
291 /* Note that we do not use the last allocated element for our queue,
292 as EXIT_BLOCK is never inserted into it. */
293 qin = worklist;
294 qend = &worklist[n_basic_blocks - NUM_FIXED_BLOCKS];
295 qlen = n_basic_blocks - NUM_FIXED_BLOCKS;
297 /* Iterate until the worklist is empty. */
298 while (qlen)
300 /* Take the first entry off the worklist. */
301 bb = *qout++;
302 bb->aux = NULL;
303 qlen--;
304 if (qout >= qend)
305 qout = worklist;
307 /* Compute the intersection of LATERIN for each incoming edge to B. */
308 sbitmap_ones (laterin[bb->index]);
309 FOR_EACH_EDGE (e, ei, bb->preds)
310 sbitmap_a_and_b (laterin[bb->index], laterin[bb->index],
311 later[(size_t)e->aux]);
313 /* Calculate LATER for all outgoing edges. */
314 FOR_EACH_EDGE (e, ei, bb->succs)
315 if (sbitmap_union_of_diff_cg (later[(size_t) e->aux],
316 earliest[(size_t) e->aux],
317 laterin[e->src->index],
318 antloc[e->src->index])
319 /* If LATER for an outgoing edge was changed, then we need
320 to add the target of the outgoing edge to the worklist. */
321 && e->dest != EXIT_BLOCK_PTR && e->dest->aux == 0)
323 *qin++ = e->dest;
324 e->dest->aux = e;
325 qlen++;
326 if (qin >= qend)
327 qin = worklist;
331 /* Computation of insertion and deletion points requires computing LATERIN
332 for the EXIT block. We allocated an extra entry in the LATERIN array
333 for just this purpose. */
334 sbitmap_ones (laterin[last_basic_block]);
335 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
336 sbitmap_a_and_b (laterin[last_basic_block],
337 laterin[last_basic_block],
338 later[(size_t) e->aux]);
340 clear_aux_for_edges ();
341 free (worklist);
344 /* Compute the insertion and deletion points for edge based LCM. */
346 static void
347 compute_insert_delete (struct edge_list *edge_list, sbitmap *antloc,
348 sbitmap *later, sbitmap *laterin, sbitmap *insert,
349 sbitmap *del)
351 int x;
352 basic_block bb;
354 FOR_EACH_BB (bb)
355 sbitmap_difference (del[bb->index], antloc[bb->index],
356 laterin[bb->index]);
358 for (x = 0; x < NUM_EDGES (edge_list); x++)
360 basic_block b = INDEX_EDGE_SUCC_BB (edge_list, x);
362 if (b == EXIT_BLOCK_PTR)
363 sbitmap_difference (insert[x], later[x], laterin[last_basic_block]);
364 else
365 sbitmap_difference (insert[x], later[x], laterin[b->index]);
369 /* Given local properties TRANSP, ANTLOC, AVOUT, KILL return the insert and
370 delete vectors for edge based LCM. Returns an edgelist which is used to
371 map the insert vector to what edge an expression should be inserted on. */
373 struct edge_list *
374 pre_edge_lcm (int n_exprs, sbitmap *transp,
375 sbitmap *avloc, sbitmap *antloc, sbitmap *kill,
376 sbitmap **insert, sbitmap **del)
378 sbitmap *antin, *antout, *earliest;
379 sbitmap *avin, *avout;
380 sbitmap *later, *laterin;
381 struct edge_list *edge_list;
382 int num_edges;
384 edge_list = create_edge_list ();
385 num_edges = NUM_EDGES (edge_list);
387 #ifdef LCM_DEBUG_INFO
388 if (dump_file)
390 fprintf (dump_file, "Edge List:\n");
391 verify_edge_list (dump_file, edge_list);
392 print_edge_list (dump_file, edge_list);
393 dump_sbitmap_vector (dump_file, "transp", "", transp, last_basic_block);
394 dump_sbitmap_vector (dump_file, "antloc", "", antloc, last_basic_block);
395 dump_sbitmap_vector (dump_file, "avloc", "", avloc, last_basic_block);
396 dump_sbitmap_vector (dump_file, "kill", "", kill, last_basic_block);
398 #endif
400 /* Compute global availability. */
401 avin = sbitmap_vector_alloc (last_basic_block, n_exprs);
402 avout = sbitmap_vector_alloc (last_basic_block, n_exprs);
403 compute_available (avloc, kill, avout, avin);
404 sbitmap_vector_free (avin);
406 /* Compute global anticipatability. */
407 antin = sbitmap_vector_alloc (last_basic_block, n_exprs);
408 antout = sbitmap_vector_alloc (last_basic_block, n_exprs);
409 compute_antinout_edge (antloc, transp, antin, antout);
411 #ifdef LCM_DEBUG_INFO
412 if (dump_file)
414 dump_sbitmap_vector (dump_file, "antin", "", antin, last_basic_block);
415 dump_sbitmap_vector (dump_file, "antout", "", antout, last_basic_block);
417 #endif
419 /* Compute earliestness. */
420 earliest = sbitmap_vector_alloc (num_edges, n_exprs);
421 compute_earliest (edge_list, n_exprs, antin, antout, avout, kill, earliest);
423 #ifdef LCM_DEBUG_INFO
424 if (dump_file)
425 dump_sbitmap_vector (dump_file, "earliest", "", earliest, num_edges);
426 #endif
428 sbitmap_vector_free (antout);
429 sbitmap_vector_free (antin);
430 sbitmap_vector_free (avout);
432 later = sbitmap_vector_alloc (num_edges, n_exprs);
434 /* Allocate an extra element for the exit block in the laterin vector. */
435 laterin = sbitmap_vector_alloc (last_basic_block + 1, n_exprs);
436 compute_laterin (edge_list, earliest, antloc, later, laterin);
438 #ifdef LCM_DEBUG_INFO
439 if (dump_file)
441 dump_sbitmap_vector (dump_file, "laterin", "", laterin, last_basic_block + 1);
442 dump_sbitmap_vector (dump_file, "later", "", later, num_edges);
444 #endif
446 sbitmap_vector_free (earliest);
448 *insert = sbitmap_vector_alloc (num_edges, n_exprs);
449 *del = sbitmap_vector_alloc (last_basic_block, n_exprs);
450 sbitmap_vector_zero (*insert, num_edges);
451 sbitmap_vector_zero (*del, last_basic_block);
452 compute_insert_delete (edge_list, antloc, later, laterin, *insert, *del);
454 sbitmap_vector_free (laterin);
455 sbitmap_vector_free (later);
457 #ifdef LCM_DEBUG_INFO
458 if (dump_file)
460 dump_sbitmap_vector (dump_file, "pre_insert_map", "", *insert, num_edges);
461 dump_sbitmap_vector (dump_file, "pre_delete_map", "", *del,
462 last_basic_block);
464 #endif
466 return edge_list;
469 /* Compute the AVIN and AVOUT vectors from the AVLOC and KILL vectors.
470 Return the number of passes we performed to iterate to a solution. */
472 void
473 compute_available (sbitmap *avloc, sbitmap *kill, sbitmap *avout,
474 sbitmap *avin)
476 edge e;
477 basic_block *worklist, *qin, *qout, *qend, bb;
478 unsigned int qlen;
479 edge_iterator ei;
481 /* Allocate a worklist array/queue. Entries are only added to the
482 list if they were not already on the list. So the size is
483 bounded by the number of basic blocks. */
484 qin = qout = worklist =
485 XNEWVEC (basic_block, n_basic_blocks - NUM_FIXED_BLOCKS);
487 /* We want a maximal solution. */
488 sbitmap_vector_ones (avout, last_basic_block);
490 /* Put every block on the worklist; this is necessary because of the
491 optimistic initialization of AVOUT above. */
492 FOR_EACH_BB (bb)
494 *qin++ = bb;
495 bb->aux = bb;
498 qin = worklist;
499 qend = &worklist[n_basic_blocks - NUM_FIXED_BLOCKS];
500 qlen = n_basic_blocks - NUM_FIXED_BLOCKS;
502 /* Mark blocks which are successors of the entry block so that we
503 can easily identify them below. */
504 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
505 e->dest->aux = ENTRY_BLOCK_PTR;
507 /* Iterate until the worklist is empty. */
508 while (qlen)
510 /* Take the first entry off the worklist. */
511 bb = *qout++;
512 qlen--;
514 if (qout >= qend)
515 qout = worklist;
517 /* If one of the predecessor blocks is the ENTRY block, then the
518 intersection of avouts is the null set. We can identify such blocks
519 by the special value in the AUX field in the block structure. */
520 if (bb->aux == ENTRY_BLOCK_PTR)
521 /* Do not clear the aux field for blocks which are successors of the
522 ENTRY block. That way we never add then to the worklist again. */
523 sbitmap_zero (avin[bb->index]);
524 else
526 /* Clear the aux field of this block so that it can be added to
527 the worklist again if necessary. */
528 bb->aux = NULL;
529 sbitmap_intersection_of_preds (avin[bb->index], avout, bb);
532 if (sbitmap_union_of_diff_cg (avout[bb->index], avloc[bb->index],
533 avin[bb->index], kill[bb->index]))
534 /* If the out state of this block changed, then we need
535 to add the successors of this block to the worklist
536 if they are not already on the worklist. */
537 FOR_EACH_EDGE (e, ei, bb->succs)
538 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
540 *qin++ = e->dest;
541 e->dest->aux = e;
542 qlen++;
544 if (qin >= qend)
545 qin = worklist;
549 clear_aux_for_edges ();
550 clear_aux_for_blocks ();
551 free (worklist);
554 /* Compute the farthest vector for edge based lcm. */
556 static void
557 compute_farthest (struct edge_list *edge_list, int n_exprs,
558 sbitmap *st_avout, sbitmap *st_avin, sbitmap *st_antin,
559 sbitmap *kill, sbitmap *farthest)
561 sbitmap difference, temp_bitmap;
562 int x, num_edges;
563 basic_block pred, succ;
565 num_edges = NUM_EDGES (edge_list);
567 difference = sbitmap_alloc (n_exprs);
568 temp_bitmap = sbitmap_alloc (n_exprs);
570 for (x = 0; x < num_edges; x++)
572 pred = INDEX_EDGE_PRED_BB (edge_list, x);
573 succ = INDEX_EDGE_SUCC_BB (edge_list, x);
574 if (succ == EXIT_BLOCK_PTR)
575 sbitmap_copy (farthest[x], st_avout[pred->index]);
576 else
578 if (pred == ENTRY_BLOCK_PTR)
579 sbitmap_zero (farthest[x]);
580 else
582 sbitmap_difference (difference, st_avout[pred->index],
583 st_antin[succ->index]);
584 sbitmap_not (temp_bitmap, st_avin[succ->index]);
585 sbitmap_a_and_b_or_c (farthest[x], difference,
586 kill[succ->index], temp_bitmap);
591 sbitmap_free (temp_bitmap);
592 sbitmap_free (difference);
595 /* Compute nearer and nearerout vectors for edge based lcm.
597 This is the mirror of compute_laterin, additional comments on the
598 implementation can be found before compute_laterin. */
600 static void
601 compute_nearerout (struct edge_list *edge_list, sbitmap *farthest,
602 sbitmap *st_avloc, sbitmap *nearer, sbitmap *nearerout)
604 int num_edges, i;
605 edge e;
606 basic_block *worklist, *tos, bb;
607 edge_iterator ei;
609 num_edges = NUM_EDGES (edge_list);
611 /* Allocate a worklist array/queue. Entries are only added to the
612 list if they were not already on the list. So the size is
613 bounded by the number of basic blocks. */
614 tos = worklist = XNEWVEC (basic_block, n_basic_blocks + 1);
616 /* Initialize NEARER for each edge and build a mapping from an edge to
617 its index. */
618 for (i = 0; i < num_edges; i++)
619 INDEX_EDGE (edge_list, i)->aux = (void *) (size_t) i;
621 /* We want a maximal solution. */
622 sbitmap_vector_ones (nearer, num_edges);
624 /* Note that even though we want an optimistic setting of NEARER, we
625 do not want to be overly optimistic. Consider an incoming edge to
626 the exit block. That edge should always have a NEARER value the
627 same as FARTHEST for that edge. */
628 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
629 sbitmap_copy (nearer[(size_t)e->aux], farthest[(size_t)e->aux]);
631 /* Add all the blocks to the worklist. This prevents an early exit
632 from the loop given our optimistic initialization of NEARER. */
633 FOR_EACH_BB (bb)
635 *tos++ = bb;
636 bb->aux = bb;
639 /* Iterate until the worklist is empty. */
640 while (tos != worklist)
642 /* Take the first entry off the worklist. */
643 bb = *--tos;
644 bb->aux = NULL;
646 /* Compute the intersection of NEARER for each outgoing edge from B. */
647 sbitmap_ones (nearerout[bb->index]);
648 FOR_EACH_EDGE (e, ei, bb->succs)
649 sbitmap_a_and_b (nearerout[bb->index], nearerout[bb->index],
650 nearer[(size_t) e->aux]);
652 /* Calculate NEARER for all incoming edges. */
653 FOR_EACH_EDGE (e, ei, bb->preds)
654 if (sbitmap_union_of_diff_cg (nearer[(size_t) e->aux],
655 farthest[(size_t) e->aux],
656 nearerout[e->dest->index],
657 st_avloc[e->dest->index])
658 /* If NEARER for an incoming edge was changed, then we need
659 to add the source of the incoming edge to the worklist. */
660 && e->src != ENTRY_BLOCK_PTR && e->src->aux == 0)
662 *tos++ = e->src;
663 e->src->aux = e;
667 /* Computation of insertion and deletion points requires computing NEAREROUT
668 for the ENTRY block. We allocated an extra entry in the NEAREROUT array
669 for just this purpose. */
670 sbitmap_ones (nearerout[last_basic_block]);
671 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
672 sbitmap_a_and_b (nearerout[last_basic_block],
673 nearerout[last_basic_block],
674 nearer[(size_t) e->aux]);
676 clear_aux_for_edges ();
677 free (tos);
680 /* Compute the insertion and deletion points for edge based LCM. */
682 static void
683 compute_rev_insert_delete (struct edge_list *edge_list, sbitmap *st_avloc,
684 sbitmap *nearer, sbitmap *nearerout,
685 sbitmap *insert, sbitmap *del)
687 int x;
688 basic_block bb;
690 FOR_EACH_BB (bb)
691 sbitmap_difference (del[bb->index], st_avloc[bb->index],
692 nearerout[bb->index]);
694 for (x = 0; x < NUM_EDGES (edge_list); x++)
696 basic_block b = INDEX_EDGE_PRED_BB (edge_list, x);
697 if (b == ENTRY_BLOCK_PTR)
698 sbitmap_difference (insert[x], nearer[x], nearerout[last_basic_block]);
699 else
700 sbitmap_difference (insert[x], nearer[x], nearerout[b->index]);
704 /* Given local properties TRANSP, ST_AVLOC, ST_ANTLOC, KILL return the
705 insert and delete vectors for edge based reverse LCM. Returns an
706 edgelist which is used to map the insert vector to what edge
707 an expression should be inserted on. */
709 struct edge_list *
710 pre_edge_rev_lcm (int n_exprs, sbitmap *transp,
711 sbitmap *st_avloc, sbitmap *st_antloc, sbitmap *kill,
712 sbitmap **insert, sbitmap **del)
714 sbitmap *st_antin, *st_antout;
715 sbitmap *st_avout, *st_avin, *farthest;
716 sbitmap *nearer, *nearerout;
717 struct edge_list *edge_list;
718 int num_edges;
720 edge_list = create_edge_list ();
721 num_edges = NUM_EDGES (edge_list);
723 st_antin = sbitmap_vector_alloc (last_basic_block, n_exprs);
724 st_antout = sbitmap_vector_alloc (last_basic_block, n_exprs);
725 sbitmap_vector_zero (st_antin, last_basic_block);
726 sbitmap_vector_zero (st_antout, last_basic_block);
727 compute_antinout_edge (st_antloc, transp, st_antin, st_antout);
729 /* Compute global anticipatability. */
730 st_avout = sbitmap_vector_alloc (last_basic_block, n_exprs);
731 st_avin = sbitmap_vector_alloc (last_basic_block, n_exprs);
732 compute_available (st_avloc, kill, st_avout, st_avin);
734 #ifdef LCM_DEBUG_INFO
735 if (dump_file)
737 fprintf (dump_file, "Edge List:\n");
738 verify_edge_list (dump_file, edge_list);
739 print_edge_list (dump_file, edge_list);
740 dump_sbitmap_vector (dump_file, "transp", "", transp, last_basic_block);
741 dump_sbitmap_vector (dump_file, "st_avloc", "", st_avloc, last_basic_block);
742 dump_sbitmap_vector (dump_file, "st_antloc", "", st_antloc, last_basic_block);
743 dump_sbitmap_vector (dump_file, "st_antin", "", st_antin, last_basic_block);
744 dump_sbitmap_vector (dump_file, "st_antout", "", st_antout, last_basic_block);
745 dump_sbitmap_vector (dump_file, "st_kill", "", kill, last_basic_block);
747 #endif
749 #ifdef LCM_DEBUG_INFO
750 if (dump_file)
752 dump_sbitmap_vector (dump_file, "st_avout", "", st_avout, last_basic_block);
753 dump_sbitmap_vector (dump_file, "st_avin", "", st_avin, last_basic_block);
755 #endif
757 /* Compute farthestness. */
758 farthest = sbitmap_vector_alloc (num_edges, n_exprs);
759 compute_farthest (edge_list, n_exprs, st_avout, st_avin, st_antin,
760 kill, farthest);
762 #ifdef LCM_DEBUG_INFO
763 if (dump_file)
764 dump_sbitmap_vector (dump_file, "farthest", "", farthest, num_edges);
765 #endif
767 sbitmap_vector_free (st_antin);
768 sbitmap_vector_free (st_antout);
770 sbitmap_vector_free (st_avin);
771 sbitmap_vector_free (st_avout);
773 nearer = sbitmap_vector_alloc (num_edges, n_exprs);
775 /* Allocate an extra element for the entry block. */
776 nearerout = sbitmap_vector_alloc (last_basic_block + 1, n_exprs);
777 compute_nearerout (edge_list, farthest, st_avloc, nearer, nearerout);
779 #ifdef LCM_DEBUG_INFO
780 if (dump_file)
782 dump_sbitmap_vector (dump_file, "nearerout", "", nearerout,
783 last_basic_block + 1);
784 dump_sbitmap_vector (dump_file, "nearer", "", nearer, num_edges);
786 #endif
788 sbitmap_vector_free (farthest);
790 *insert = sbitmap_vector_alloc (num_edges, n_exprs);
791 *del = sbitmap_vector_alloc (last_basic_block, n_exprs);
792 compute_rev_insert_delete (edge_list, st_avloc, nearer, nearerout,
793 *insert, *del);
795 sbitmap_vector_free (nearerout);
796 sbitmap_vector_free (nearer);
798 #ifdef LCM_DEBUG_INFO
799 if (dump_file)
801 dump_sbitmap_vector (dump_file, "pre_insert_map", "", *insert, num_edges);
802 dump_sbitmap_vector (dump_file, "pre_delete_map", "", *del,
803 last_basic_block);
805 #endif
806 return edge_list;