2015-09-21 Steven G. Kargl <kargl@gcc.gnu.org>
[official-gcc.git] / gcc / lcm.c
blobf6d1ed542f42427de875c1d106b3ba9d3e925735
1 /* Generic partial redundancy elimination with lazy code motion support.
2 Copyright (C) 1998-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* These routines are meant to be used by various optimization
21 passes which can be modeled as lazy code motion problems.
22 Including, but not limited to:
24 * Traditional partial redundancy elimination.
26 * Placement of caller/caller register save/restores.
28 * Load/store motion.
30 * Copy motion.
32 * Conversion of flat register files to a stacked register
33 model.
35 * Dead load/store elimination.
37 These routines accept as input:
39 * Basic block information (number of blocks, lists of
40 predecessors and successors). Note the granularity
41 does not need to be basic block, they could be statements
42 or functions.
44 * Bitmaps of local properties (computed, transparent and
45 anticipatable expressions).
47 The output of these routines is bitmap of redundant computations
48 and a bitmap of optimal placement points. */
51 #include "config.h"
52 #include "system.h"
53 #include "coretypes.h"
54 #include "backend.h"
55 #include "rtl.h"
56 #include "regs.h"
57 #include "flags.h"
58 #include "insn-config.h"
59 #include "recog.h"
60 #include "cfganal.h"
61 #include "lcm.h"
62 #include "tm_p.h"
63 #include "dumpfile.h"
65 /* Edge based LCM routines. */
66 static void compute_antinout_edge (sbitmap *, sbitmap *, sbitmap *, sbitmap *);
67 static void compute_earliest (struct edge_list *, int, sbitmap *, sbitmap *,
68 sbitmap *, sbitmap *, sbitmap *);
69 static void compute_laterin (struct edge_list *, sbitmap *, sbitmap *,
70 sbitmap *, sbitmap *);
71 static void compute_insert_delete (struct edge_list *edge_list, sbitmap *,
72 sbitmap *, sbitmap *, sbitmap *, sbitmap *);
74 /* Edge based LCM routines on a reverse flowgraph. */
75 static void compute_farthest (struct edge_list *, int, sbitmap *, sbitmap *,
76 sbitmap*, sbitmap *, sbitmap *);
77 static void compute_nearerout (struct edge_list *, sbitmap *, sbitmap *,
78 sbitmap *, sbitmap *);
79 static void compute_rev_insert_delete (struct edge_list *edge_list, sbitmap *,
80 sbitmap *, sbitmap *, sbitmap *,
81 sbitmap *);
83 /* Edge based lcm routines. */
85 /* Compute expression anticipatability at entrance and exit of each block.
86 This is done based on the flow graph, and not on the pred-succ lists.
87 Other than that, its pretty much identical to compute_antinout. */
89 static void
90 compute_antinout_edge (sbitmap *antloc, sbitmap *transp, sbitmap *antin,
91 sbitmap *antout)
93 basic_block bb;
94 edge e;
95 basic_block *worklist, *qin, *qout, *qend;
96 unsigned int qlen;
97 edge_iterator ei;
99 /* Allocate a worklist array/queue. Entries are only added to the
100 list if they were not already on the list. So the size is
101 bounded by the number of basic blocks. */
102 qin = qout = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
104 /* We want a maximal solution, so make an optimistic initialization of
105 ANTIN. */
106 bitmap_vector_ones (antin, last_basic_block_for_fn (cfun));
108 /* Put every block on the worklist; this is necessary because of the
109 optimistic initialization of ANTIN above. */
110 int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
111 int postorder_num = post_order_compute (postorder, false, false);
112 for (int i = 0; i < postorder_num; ++i)
114 bb = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
115 *qin++ = bb;
116 bb->aux = bb;
118 free (postorder);
120 qin = worklist;
121 qend = &worklist[n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS];
122 qlen = n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS;
124 /* Mark blocks which are predecessors of the exit block so that we
125 can easily identify them below. */
126 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
127 e->src->aux = EXIT_BLOCK_PTR_FOR_FN (cfun);
129 /* Iterate until the worklist is empty. */
130 while (qlen)
132 /* Take the first entry off the worklist. */
133 bb = *qout++;
134 qlen--;
136 if (qout >= qend)
137 qout = worklist;
139 if (bb->aux == EXIT_BLOCK_PTR_FOR_FN (cfun))
140 /* Do not clear the aux field for blocks which are predecessors of
141 the EXIT block. That way we never add then to the worklist
142 again. */
143 bitmap_clear (antout[bb->index]);
144 else
146 /* Clear the aux field of this block so that it can be added to
147 the worklist again if necessary. */
148 bb->aux = NULL;
149 bitmap_intersection_of_succs (antout[bb->index], antin, bb);
152 if (bitmap_or_and (antin[bb->index], antloc[bb->index],
153 transp[bb->index], antout[bb->index]))
154 /* If the in state of this block changed, then we need
155 to add the predecessors of this block to the worklist
156 if they are not already on the worklist. */
157 FOR_EACH_EDGE (e, ei, bb->preds)
158 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
160 *qin++ = e->src;
161 e->src->aux = e;
162 qlen++;
163 if (qin >= qend)
164 qin = worklist;
168 clear_aux_for_edges ();
169 clear_aux_for_blocks ();
170 free (worklist);
173 /* Compute the earliest vector for edge based lcm. */
175 static void
176 compute_earliest (struct edge_list *edge_list, int n_exprs, sbitmap *antin,
177 sbitmap *antout, sbitmap *avout, sbitmap *kill,
178 sbitmap *earliest)
180 sbitmap difference, temp_bitmap;
181 int x, num_edges;
182 basic_block pred, succ;
184 num_edges = NUM_EDGES (edge_list);
186 difference = sbitmap_alloc (n_exprs);
187 temp_bitmap = sbitmap_alloc (n_exprs);
189 for (x = 0; x < num_edges; x++)
191 pred = INDEX_EDGE_PRED_BB (edge_list, x);
192 succ = INDEX_EDGE_SUCC_BB (edge_list, x);
193 if (pred == ENTRY_BLOCK_PTR_FOR_FN (cfun))
194 bitmap_copy (earliest[x], antin[succ->index]);
195 else
197 if (succ == EXIT_BLOCK_PTR_FOR_FN (cfun))
198 bitmap_clear (earliest[x]);
199 else
201 bitmap_and_compl (difference, antin[succ->index],
202 avout[pred->index]);
203 bitmap_not (temp_bitmap, antout[pred->index]);
204 bitmap_and_or (earliest[x], difference,
205 kill[pred->index], temp_bitmap);
210 sbitmap_free (temp_bitmap);
211 sbitmap_free (difference);
214 /* later(p,s) is dependent on the calculation of laterin(p).
215 laterin(p) is dependent on the calculation of later(p2,p).
217 laterin(ENTRY) is defined as all 0's
218 later(ENTRY, succs(ENTRY)) are defined using laterin(ENTRY)
219 laterin(succs(ENTRY)) is defined by later(ENTRY, succs(ENTRY)).
221 If we progress in this manner, starting with all basic blocks
222 in the work list, anytime we change later(bb), we need to add
223 succs(bb) to the worklist if they are not already on the worklist.
225 Boundary conditions:
227 We prime the worklist all the normal basic blocks. The ENTRY block can
228 never be added to the worklist since it is never the successor of any
229 block. We explicitly prevent the EXIT block from being added to the
230 worklist.
232 We optimistically initialize LATER. That is the only time this routine
233 will compute LATER for an edge out of the entry block since the entry
234 block is never on the worklist. Thus, LATERIN is neither used nor
235 computed for the ENTRY block.
237 Since the EXIT block is never added to the worklist, we will neither
238 use nor compute LATERIN for the exit block. Edges which reach the
239 EXIT block are handled in the normal fashion inside the loop. However,
240 the insertion/deletion computation needs LATERIN(EXIT), so we have
241 to compute it. */
243 static void
244 compute_laterin (struct edge_list *edge_list, sbitmap *earliest,
245 sbitmap *antloc, sbitmap *later, sbitmap *laterin)
247 int num_edges, i;
248 edge e;
249 basic_block *worklist, *qin, *qout, *qend, bb;
250 unsigned int qlen;
251 edge_iterator ei;
253 num_edges = NUM_EDGES (edge_list);
255 /* Allocate a worklist array/queue. Entries are only added to the
256 list if they were not already on the list. So the size is
257 bounded by the number of basic blocks. */
258 qin = qout = worklist
259 = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
261 /* Initialize a mapping from each edge to its index. */
262 for (i = 0; i < num_edges; i++)
263 INDEX_EDGE (edge_list, i)->aux = (void *) (size_t) i;
265 /* We want a maximal solution, so initially consider LATER true for
266 all edges. This allows propagation through a loop since the incoming
267 loop edge will have LATER set, so if all the other incoming edges
268 to the loop are set, then LATERIN will be set for the head of the
269 loop.
271 If the optimistic setting of LATER on that edge was incorrect (for
272 example the expression is ANTLOC in a block within the loop) then
273 this algorithm will detect it when we process the block at the head
274 of the optimistic edge. That will requeue the affected blocks. */
275 bitmap_vector_ones (later, num_edges);
277 /* Note that even though we want an optimistic setting of LATER, we
278 do not want to be overly optimistic. Consider an outgoing edge from
279 the entry block. That edge should always have a LATER value the
280 same as EARLIEST for that edge. */
281 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
282 bitmap_copy (later[(size_t) e->aux], earliest[(size_t) e->aux]);
284 /* Add all the blocks to the worklist. This prevents an early exit from
285 the loop given our optimistic initialization of LATER above. */
286 int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
287 int postorder_num = inverted_post_order_compute (postorder);
288 for (int i = 0; i < postorder_num; ++i)
290 bb = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
291 if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
292 || bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
293 continue;
294 *qin++ = bb;
295 bb->aux = bb;
297 free (postorder);
299 /* Note that we do not use the last allocated element for our queue,
300 as EXIT_BLOCK is never inserted into it. */
301 qin = worklist;
302 qend = &worklist[n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS];
303 qlen = n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS;
305 /* Iterate until the worklist is empty. */
306 while (qlen)
308 /* Take the first entry off the worklist. */
309 bb = *qout++;
310 bb->aux = NULL;
311 qlen--;
312 if (qout >= qend)
313 qout = worklist;
315 /* Compute the intersection of LATERIN for each incoming edge to B. */
316 bitmap_ones (laterin[bb->index]);
317 FOR_EACH_EDGE (e, ei, bb->preds)
318 bitmap_and (laterin[bb->index], laterin[bb->index],
319 later[(size_t)e->aux]);
321 /* Calculate LATER for all outgoing edges. */
322 FOR_EACH_EDGE (e, ei, bb->succs)
323 if (bitmap_ior_and_compl (later[(size_t) e->aux],
324 earliest[(size_t) e->aux],
325 laterin[bb->index],
326 antloc[bb->index])
327 /* If LATER for an outgoing edge was changed, then we need
328 to add the target of the outgoing edge to the worklist. */
329 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun) && e->dest->aux == 0)
331 *qin++ = e->dest;
332 e->dest->aux = e;
333 qlen++;
334 if (qin >= qend)
335 qin = worklist;
339 /* Computation of insertion and deletion points requires computing LATERIN
340 for the EXIT block. We allocated an extra entry in the LATERIN array
341 for just this purpose. */
342 bitmap_ones (laterin[last_basic_block_for_fn (cfun)]);
343 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
344 bitmap_and (laterin[last_basic_block_for_fn (cfun)],
345 laterin[last_basic_block_for_fn (cfun)],
346 later[(size_t) e->aux]);
348 clear_aux_for_edges ();
349 free (worklist);
352 /* Compute the insertion and deletion points for edge based LCM. */
354 static void
355 compute_insert_delete (struct edge_list *edge_list, sbitmap *antloc,
356 sbitmap *later, sbitmap *laterin, sbitmap *insert,
357 sbitmap *del)
359 int x;
360 basic_block bb;
362 FOR_EACH_BB_FN (bb, cfun)
363 bitmap_and_compl (del[bb->index], antloc[bb->index],
364 laterin[bb->index]);
366 for (x = 0; x < NUM_EDGES (edge_list); x++)
368 basic_block b = INDEX_EDGE_SUCC_BB (edge_list, x);
370 if (b == EXIT_BLOCK_PTR_FOR_FN (cfun))
371 bitmap_and_compl (insert[x], later[x],
372 laterin[last_basic_block_for_fn (cfun)]);
373 else
374 bitmap_and_compl (insert[x], later[x], laterin[b->index]);
378 /* Given local properties TRANSP, ANTLOC, AVLOC, KILL return the insert and
379 delete vectors for edge based LCM and return the AVIN, AVOUT bitmap.
380 map the insert vector to what edge an expression should be inserted on. */
382 struct edge_list *
383 pre_edge_lcm_avs (int n_exprs, sbitmap *transp,
384 sbitmap *avloc, sbitmap *antloc, sbitmap *kill,
385 sbitmap *avin, sbitmap *avout,
386 sbitmap **insert, sbitmap **del)
388 sbitmap *antin, *antout, *earliest;
389 sbitmap *later, *laterin;
390 struct edge_list *edge_list;
391 int num_edges;
393 edge_list = create_edge_list ();
394 num_edges = NUM_EDGES (edge_list);
396 #ifdef LCM_DEBUG_INFO
397 if (dump_file)
399 fprintf (dump_file, "Edge List:\n");
400 verify_edge_list (dump_file, edge_list);
401 print_edge_list (dump_file, edge_list);
402 dump_bitmap_vector (dump_file, "transp", "", transp,
403 last_basic_block_for_fn (cfun));
404 dump_bitmap_vector (dump_file, "antloc", "", antloc,
405 last_basic_block_for_fn (cfun));
406 dump_bitmap_vector (dump_file, "avloc", "", avloc,
407 last_basic_block_for_fn (cfun));
408 dump_bitmap_vector (dump_file, "kill", "", kill,
409 last_basic_block_for_fn (cfun));
411 #endif
413 /* Compute global availability. */
414 compute_available (avloc, kill, avout, avin);
416 /* Compute global anticipatability. */
417 antin = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
418 antout = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
419 compute_antinout_edge (antloc, transp, antin, antout);
421 #ifdef LCM_DEBUG_INFO
422 if (dump_file)
424 dump_bitmap_vector (dump_file, "antin", "", antin,
425 last_basic_block_for_fn (cfun));
426 dump_bitmap_vector (dump_file, "antout", "", antout,
427 last_basic_block_for_fn (cfun));
429 #endif
431 /* Compute earliestness. */
432 earliest = sbitmap_vector_alloc (num_edges, n_exprs);
433 compute_earliest (edge_list, n_exprs, antin, antout, avout, kill, earliest);
435 #ifdef LCM_DEBUG_INFO
436 if (dump_file)
437 dump_bitmap_vector (dump_file, "earliest", "", earliest, num_edges);
438 #endif
440 sbitmap_vector_free (antout);
441 sbitmap_vector_free (antin);
443 later = sbitmap_vector_alloc (num_edges, n_exprs);
445 /* Allocate an extra element for the exit block in the laterin vector. */
446 laterin = sbitmap_vector_alloc (last_basic_block_for_fn (cfun) + 1,
447 n_exprs);
448 compute_laterin (edge_list, earliest, antloc, later, laterin);
450 #ifdef LCM_DEBUG_INFO
451 if (dump_file)
453 dump_bitmap_vector (dump_file, "laterin", "", laterin,
454 last_basic_block_for_fn (cfun) + 1);
455 dump_bitmap_vector (dump_file, "later", "", later, num_edges);
457 #endif
459 sbitmap_vector_free (earliest);
461 *insert = sbitmap_vector_alloc (num_edges, n_exprs);
462 *del = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
463 bitmap_vector_clear (*insert, num_edges);
464 bitmap_vector_clear (*del, last_basic_block_for_fn (cfun));
465 compute_insert_delete (edge_list, antloc, later, laterin, *insert, *del);
467 sbitmap_vector_free (laterin);
468 sbitmap_vector_free (later);
470 #ifdef LCM_DEBUG_INFO
471 if (dump_file)
473 dump_bitmap_vector (dump_file, "pre_insert_map", "", *insert, num_edges);
474 dump_bitmap_vector (dump_file, "pre_delete_map", "", *del,
475 last_basic_block_for_fn (cfun));
477 #endif
479 return edge_list;
482 /* Wrapper to allocate avin/avout and call pre_edge_lcm_avs. */
484 struct edge_list *
485 pre_edge_lcm (int n_exprs, sbitmap *transp,
486 sbitmap *avloc, sbitmap *antloc, sbitmap *kill,
487 sbitmap **insert, sbitmap **del)
489 struct edge_list *edge_list;
490 sbitmap *avin, *avout;
492 avin = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
493 avout = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
495 edge_list = pre_edge_lcm_avs (n_exprs, transp, avloc, antloc, kill,
496 avin, avout, insert, del);
498 sbitmap_vector_free (avout);
499 sbitmap_vector_free (avin);
501 return edge_list;
504 /* Compute the AVIN and AVOUT vectors from the AVLOC and KILL vectors.
505 Return the number of passes we performed to iterate to a solution. */
507 void
508 compute_available (sbitmap *avloc, sbitmap *kill, sbitmap *avout,
509 sbitmap *avin)
511 edge e;
512 basic_block *worklist, *qin, *qout, *qend, bb;
513 unsigned int qlen;
514 edge_iterator ei;
516 /* Allocate a worklist array/queue. Entries are only added to the
517 list if they were not already on the list. So the size is
518 bounded by the number of basic blocks. */
519 qin = qout = worklist =
520 XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
522 /* We want a maximal solution. */
523 bitmap_vector_ones (avout, last_basic_block_for_fn (cfun));
525 /* Put every block on the worklist; this is necessary because of the
526 optimistic initialization of AVOUT above. Use inverted postorder
527 to make the dataflow problem require less iterations. */
528 int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
529 int postorder_num = inverted_post_order_compute (postorder);
530 for (int i = 0; i < postorder_num; ++i)
532 bb = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
533 if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
534 || bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
535 continue;
536 *qin++ = bb;
537 bb->aux = bb;
539 free (postorder);
541 qin = worklist;
542 qend = &worklist[n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS];
543 qlen = n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS;
545 /* Mark blocks which are successors of the entry block so that we
546 can easily identify them below. */
547 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
548 e->dest->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun);
550 /* Iterate until the worklist is empty. */
551 while (qlen)
553 /* Take the first entry off the worklist. */
554 bb = *qout++;
555 qlen--;
557 if (qout >= qend)
558 qout = worklist;
560 /* If one of the predecessor blocks is the ENTRY block, then the
561 intersection of avouts is the null set. We can identify such blocks
562 by the special value in the AUX field in the block structure. */
563 if (bb->aux == ENTRY_BLOCK_PTR_FOR_FN (cfun))
564 /* Do not clear the aux field for blocks which are successors of the
565 ENTRY block. That way we never add then to the worklist again. */
566 bitmap_clear (avin[bb->index]);
567 else
569 /* Clear the aux field of this block so that it can be added to
570 the worklist again if necessary. */
571 bb->aux = NULL;
572 bitmap_intersection_of_preds (avin[bb->index], avout, bb);
575 if (bitmap_ior_and_compl (avout[bb->index], avloc[bb->index],
576 avin[bb->index], kill[bb->index]))
577 /* If the out state of this block changed, then we need
578 to add the successors of this block to the worklist
579 if they are not already on the worklist. */
580 FOR_EACH_EDGE (e, ei, bb->succs)
581 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
583 *qin++ = e->dest;
584 e->dest->aux = e;
585 qlen++;
587 if (qin >= qend)
588 qin = worklist;
592 clear_aux_for_edges ();
593 clear_aux_for_blocks ();
594 free (worklist);
597 /* Compute the farthest vector for edge based lcm. */
599 static void
600 compute_farthest (struct edge_list *edge_list, int n_exprs,
601 sbitmap *st_avout, sbitmap *st_avin, sbitmap *st_antin,
602 sbitmap *kill, sbitmap *farthest)
604 sbitmap difference, temp_bitmap;
605 int x, num_edges;
606 basic_block pred, succ;
608 num_edges = NUM_EDGES (edge_list);
610 difference = sbitmap_alloc (n_exprs);
611 temp_bitmap = sbitmap_alloc (n_exprs);
613 for (x = 0; x < num_edges; x++)
615 pred = INDEX_EDGE_PRED_BB (edge_list, x);
616 succ = INDEX_EDGE_SUCC_BB (edge_list, x);
617 if (succ == EXIT_BLOCK_PTR_FOR_FN (cfun))
618 bitmap_copy (farthest[x], st_avout[pred->index]);
619 else
621 if (pred == ENTRY_BLOCK_PTR_FOR_FN (cfun))
622 bitmap_clear (farthest[x]);
623 else
625 bitmap_and_compl (difference, st_avout[pred->index],
626 st_antin[succ->index]);
627 bitmap_not (temp_bitmap, st_avin[succ->index]);
628 bitmap_and_or (farthest[x], difference,
629 kill[succ->index], temp_bitmap);
634 sbitmap_free (temp_bitmap);
635 sbitmap_free (difference);
638 /* Compute nearer and nearerout vectors for edge based lcm.
640 This is the mirror of compute_laterin, additional comments on the
641 implementation can be found before compute_laterin. */
643 static void
644 compute_nearerout (struct edge_list *edge_list, sbitmap *farthest,
645 sbitmap *st_avloc, sbitmap *nearer, sbitmap *nearerout)
647 int num_edges, i;
648 edge e;
649 basic_block *worklist, *tos, bb;
650 edge_iterator ei;
652 num_edges = NUM_EDGES (edge_list);
654 /* Allocate a worklist array/queue. Entries are only added to the
655 list if they were not already on the list. So the size is
656 bounded by the number of basic blocks. */
657 tos = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun) + 1);
659 /* Initialize NEARER for each edge and build a mapping from an edge to
660 its index. */
661 for (i = 0; i < num_edges; i++)
662 INDEX_EDGE (edge_list, i)->aux = (void *) (size_t) i;
664 /* We want a maximal solution. */
665 bitmap_vector_ones (nearer, num_edges);
667 /* Note that even though we want an optimistic setting of NEARER, we
668 do not want to be overly optimistic. Consider an incoming edge to
669 the exit block. That edge should always have a NEARER value the
670 same as FARTHEST for that edge. */
671 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
672 bitmap_copy (nearer[(size_t)e->aux], farthest[(size_t)e->aux]);
674 /* Add all the blocks to the worklist. This prevents an early exit
675 from the loop given our optimistic initialization of NEARER. */
676 FOR_EACH_BB_FN (bb, cfun)
678 *tos++ = bb;
679 bb->aux = bb;
682 /* Iterate until the worklist is empty. */
683 while (tos != worklist)
685 /* Take the first entry off the worklist. */
686 bb = *--tos;
687 bb->aux = NULL;
689 /* Compute the intersection of NEARER for each outgoing edge from B. */
690 bitmap_ones (nearerout[bb->index]);
691 FOR_EACH_EDGE (e, ei, bb->succs)
692 bitmap_and (nearerout[bb->index], nearerout[bb->index],
693 nearer[(size_t) e->aux]);
695 /* Calculate NEARER for all incoming edges. */
696 FOR_EACH_EDGE (e, ei, bb->preds)
697 if (bitmap_ior_and_compl (nearer[(size_t) e->aux],
698 farthest[(size_t) e->aux],
699 nearerout[e->dest->index],
700 st_avloc[e->dest->index])
701 /* If NEARER for an incoming edge was changed, then we need
702 to add the source of the incoming edge to the worklist. */
703 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun) && e->src->aux == 0)
705 *tos++ = e->src;
706 e->src->aux = e;
710 /* Computation of insertion and deletion points requires computing NEAREROUT
711 for the ENTRY block. We allocated an extra entry in the NEAREROUT array
712 for just this purpose. */
713 bitmap_ones (nearerout[last_basic_block_for_fn (cfun)]);
714 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
715 bitmap_and (nearerout[last_basic_block_for_fn (cfun)],
716 nearerout[last_basic_block_for_fn (cfun)],
717 nearer[(size_t) e->aux]);
719 clear_aux_for_edges ();
720 free (tos);
723 /* Compute the insertion and deletion points for edge based LCM. */
725 static void
726 compute_rev_insert_delete (struct edge_list *edge_list, sbitmap *st_avloc,
727 sbitmap *nearer, sbitmap *nearerout,
728 sbitmap *insert, sbitmap *del)
730 int x;
731 basic_block bb;
733 FOR_EACH_BB_FN (bb, cfun)
734 bitmap_and_compl (del[bb->index], st_avloc[bb->index],
735 nearerout[bb->index]);
737 for (x = 0; x < NUM_EDGES (edge_list); x++)
739 basic_block b = INDEX_EDGE_PRED_BB (edge_list, x);
740 if (b == ENTRY_BLOCK_PTR_FOR_FN (cfun))
741 bitmap_and_compl (insert[x], nearer[x],
742 nearerout[last_basic_block_for_fn (cfun)]);
743 else
744 bitmap_and_compl (insert[x], nearer[x], nearerout[b->index]);
748 /* Given local properties TRANSP, ST_AVLOC, ST_ANTLOC, KILL return the
749 insert and delete vectors for edge based reverse LCM. Returns an
750 edgelist which is used to map the insert vector to what edge
751 an expression should be inserted on. */
753 struct edge_list *
754 pre_edge_rev_lcm (int n_exprs, sbitmap *transp,
755 sbitmap *st_avloc, sbitmap *st_antloc, sbitmap *kill,
756 sbitmap **insert, sbitmap **del)
758 sbitmap *st_antin, *st_antout;
759 sbitmap *st_avout, *st_avin, *farthest;
760 sbitmap *nearer, *nearerout;
761 struct edge_list *edge_list;
762 int num_edges;
764 edge_list = create_edge_list ();
765 num_edges = NUM_EDGES (edge_list);
767 st_antin = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
768 st_antout = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
769 bitmap_vector_clear (st_antin, last_basic_block_for_fn (cfun));
770 bitmap_vector_clear (st_antout, last_basic_block_for_fn (cfun));
771 compute_antinout_edge (st_antloc, transp, st_antin, st_antout);
773 /* Compute global anticipatability. */
774 st_avout = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
775 st_avin = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
776 compute_available (st_avloc, kill, st_avout, st_avin);
778 #ifdef LCM_DEBUG_INFO
779 if (dump_file)
781 fprintf (dump_file, "Edge List:\n");
782 verify_edge_list (dump_file, edge_list);
783 print_edge_list (dump_file, edge_list);
784 dump_bitmap_vector (dump_file, "transp", "", transp,
785 last_basic_block_for_fn (cfun));
786 dump_bitmap_vector (dump_file, "st_avloc", "", st_avloc,
787 last_basic_block_for_fn (cfun));
788 dump_bitmap_vector (dump_file, "st_antloc", "", st_antloc,
789 last_basic_block_for_fn (cfun));
790 dump_bitmap_vector (dump_file, "st_antin", "", st_antin,
791 last_basic_block_for_fn (cfun));
792 dump_bitmap_vector (dump_file, "st_antout", "", st_antout,
793 last_basic_block_for_fn (cfun));
794 dump_bitmap_vector (dump_file, "st_kill", "", kill,
795 last_basic_block_for_fn (cfun));
797 #endif
799 #ifdef LCM_DEBUG_INFO
800 if (dump_file)
802 dump_bitmap_vector (dump_file, "st_avout", "", st_avout, last_basic_block_for_fn (cfun));
803 dump_bitmap_vector (dump_file, "st_avin", "", st_avin, last_basic_block_for_fn (cfun));
805 #endif
807 /* Compute farthestness. */
808 farthest = sbitmap_vector_alloc (num_edges, n_exprs);
809 compute_farthest (edge_list, n_exprs, st_avout, st_avin, st_antin,
810 kill, farthest);
812 #ifdef LCM_DEBUG_INFO
813 if (dump_file)
814 dump_bitmap_vector (dump_file, "farthest", "", farthest, num_edges);
815 #endif
817 sbitmap_vector_free (st_antin);
818 sbitmap_vector_free (st_antout);
820 sbitmap_vector_free (st_avin);
821 sbitmap_vector_free (st_avout);
823 nearer = sbitmap_vector_alloc (num_edges, n_exprs);
825 /* Allocate an extra element for the entry block. */
826 nearerout = sbitmap_vector_alloc (last_basic_block_for_fn (cfun) + 1,
827 n_exprs);
828 compute_nearerout (edge_list, farthest, st_avloc, nearer, nearerout);
830 #ifdef LCM_DEBUG_INFO
831 if (dump_file)
833 dump_bitmap_vector (dump_file, "nearerout", "", nearerout,
834 last_basic_block_for_fn (cfun) + 1);
835 dump_bitmap_vector (dump_file, "nearer", "", nearer, num_edges);
837 #endif
839 sbitmap_vector_free (farthest);
841 *insert = sbitmap_vector_alloc (num_edges, n_exprs);
842 *del = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
843 compute_rev_insert_delete (edge_list, st_avloc, nearer, nearerout,
844 *insert, *del);
846 sbitmap_vector_free (nearerout);
847 sbitmap_vector_free (nearer);
849 #ifdef LCM_DEBUG_INFO
850 if (dump_file)
852 dump_bitmap_vector (dump_file, "pre_insert_map", "", *insert, num_edges);
853 dump_bitmap_vector (dump_file, "pre_delete_map", "", *del,
854 last_basic_block_for_fn (cfun));
856 #endif
857 return edge_list;