2018-07-13 Richard Biener <rguenther@suse.de>
[official-gcc.git] / gcc / lcm.c
blob154dc947b8602e2eb7e4e100d402f45548149e71
1 /* Generic partial redundancy elimination with lazy code motion support.
2 Copyright (C) 1998-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* These routines are meant to be used by various optimization
21 passes which can be modeled as lazy code motion problems.
22 Including, but not limited to:
24 * Traditional partial redundancy elimination.
26 * Placement of caller/caller register save/restores.
28 * Load/store motion.
30 * Copy motion.
32 * Conversion of flat register files to a stacked register
33 model.
35 * Dead load/store elimination.
37 These routines accept as input:
39 * Basic block information (number of blocks, lists of
40 predecessors and successors). Note the granularity
41 does not need to be basic block, they could be statements
42 or functions.
44 * Bitmaps of local properties (computed, transparent and
45 anticipatable expressions).
47 The output of these routines is bitmap of redundant computations
48 and a bitmap of optimal placement points. */
51 #include "config.h"
52 #include "system.h"
53 #include "coretypes.h"
54 #include "backend.h"
55 #include "cfganal.h"
56 #include "lcm.h"
58 /* Edge based LCM routines. */
59 static void compute_antinout_edge (sbitmap *, sbitmap *, sbitmap *, sbitmap *);
60 static void compute_earliest (struct edge_list *, int, sbitmap *, sbitmap *,
61 sbitmap *, sbitmap *, sbitmap *);
62 static void compute_laterin (struct edge_list *, sbitmap *, sbitmap *,
63 sbitmap *, sbitmap *);
64 static void compute_insert_delete (struct edge_list *edge_list, sbitmap *,
65 sbitmap *, sbitmap *, sbitmap *, sbitmap *);
67 /* Edge based LCM routines on a reverse flowgraph. */
68 static void compute_farthest (struct edge_list *, int, sbitmap *, sbitmap *,
69 sbitmap*, sbitmap *, sbitmap *);
70 static void compute_nearerout (struct edge_list *, sbitmap *, sbitmap *,
71 sbitmap *, sbitmap *);
72 static void compute_rev_insert_delete (struct edge_list *edge_list, sbitmap *,
73 sbitmap *, sbitmap *, sbitmap *,
74 sbitmap *);
76 /* Edge based lcm routines. */
78 /* Compute expression anticipatability at entrance and exit of each block.
79 This is done based on the flow graph, and not on the pred-succ lists.
80 Other than that, its pretty much identical to compute_antinout. */
82 static void
83 compute_antinout_edge (sbitmap *antloc, sbitmap *transp, sbitmap *antin,
84 sbitmap *antout)
86 basic_block bb;
87 edge e;
88 basic_block *worklist, *qin, *qout, *qend;
89 unsigned int qlen;
90 edge_iterator ei;
92 /* Allocate a worklist array/queue. Entries are only added to the
93 list if they were not already on the list. So the size is
94 bounded by the number of basic blocks. */
95 qin = qout = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
97 /* We want a maximal solution, so make an optimistic initialization of
98 ANTIN. */
99 bitmap_vector_ones (antin, last_basic_block_for_fn (cfun));
101 /* Put every block on the worklist; this is necessary because of the
102 optimistic initialization of ANTIN above. */
103 int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
104 int postorder_num = post_order_compute (postorder, false, false);
105 for (int i = 0; i < postorder_num; ++i)
107 bb = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
108 *qin++ = bb;
109 bb->aux = bb;
111 free (postorder);
113 qin = worklist;
114 qend = &worklist[n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS];
115 qlen = n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS;
117 /* Mark blocks which are predecessors of the exit block so that we
118 can easily identify them below. */
119 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
120 e->src->aux = EXIT_BLOCK_PTR_FOR_FN (cfun);
122 /* Iterate until the worklist is empty. */
123 while (qlen)
125 /* Take the first entry off the worklist. */
126 bb = *qout++;
127 qlen--;
129 if (qout >= qend)
130 qout = worklist;
132 if (bb->aux == EXIT_BLOCK_PTR_FOR_FN (cfun))
133 /* Do not clear the aux field for blocks which are predecessors of
134 the EXIT block. That way we never add then to the worklist
135 again. */
136 bitmap_clear (antout[bb->index]);
137 else
139 /* Clear the aux field of this block so that it can be added to
140 the worklist again if necessary. */
141 bb->aux = NULL;
142 bitmap_intersection_of_succs (antout[bb->index], antin, bb);
145 if (bitmap_or_and (antin[bb->index], antloc[bb->index],
146 transp[bb->index], antout[bb->index]))
147 /* If the in state of this block changed, then we need
148 to add the predecessors of this block to the worklist
149 if they are not already on the worklist. */
150 FOR_EACH_EDGE (e, ei, bb->preds)
151 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
153 *qin++ = e->src;
154 e->src->aux = e;
155 qlen++;
156 if (qin >= qend)
157 qin = worklist;
161 clear_aux_for_edges ();
162 clear_aux_for_blocks ();
163 free (worklist);
166 /* Compute the earliest vector for edge based lcm. */
168 static void
169 compute_earliest (struct edge_list *edge_list, int n_exprs, sbitmap *antin,
170 sbitmap *antout, sbitmap *avout, sbitmap *kill,
171 sbitmap *earliest)
173 int x, num_edges;
174 basic_block pred, succ;
176 num_edges = NUM_EDGES (edge_list);
178 auto_sbitmap difference (n_exprs), temp_bitmap (n_exprs);
179 for (x = 0; x < num_edges; x++)
181 pred = INDEX_EDGE_PRED_BB (edge_list, x);
182 succ = INDEX_EDGE_SUCC_BB (edge_list, x);
183 if (pred == ENTRY_BLOCK_PTR_FOR_FN (cfun))
184 bitmap_copy (earliest[x], antin[succ->index]);
185 else
187 if (succ == EXIT_BLOCK_PTR_FOR_FN (cfun))
188 bitmap_clear (earliest[x]);
189 else
191 bitmap_and_compl (difference, antin[succ->index],
192 avout[pred->index]);
193 bitmap_not (temp_bitmap, antout[pred->index]);
194 bitmap_and_or (earliest[x], difference,
195 kill[pred->index], temp_bitmap);
201 /* later(p,s) is dependent on the calculation of laterin(p).
202 laterin(p) is dependent on the calculation of later(p2,p).
204 laterin(ENTRY) is defined as all 0's
205 later(ENTRY, succs(ENTRY)) are defined using laterin(ENTRY)
206 laterin(succs(ENTRY)) is defined by later(ENTRY, succs(ENTRY)).
208 If we progress in this manner, starting with all basic blocks
209 in the work list, anytime we change later(bb), we need to add
210 succs(bb) to the worklist if they are not already on the worklist.
212 Boundary conditions:
214 We prime the worklist all the normal basic blocks. The ENTRY block can
215 never be added to the worklist since it is never the successor of any
216 block. We explicitly prevent the EXIT block from being added to the
217 worklist.
219 We optimistically initialize LATER. That is the only time this routine
220 will compute LATER for an edge out of the entry block since the entry
221 block is never on the worklist. Thus, LATERIN is neither used nor
222 computed for the ENTRY block.
224 Since the EXIT block is never added to the worklist, we will neither
225 use nor compute LATERIN for the exit block. Edges which reach the
226 EXIT block are handled in the normal fashion inside the loop. However,
227 the insertion/deletion computation needs LATERIN(EXIT), so we have
228 to compute it. */
230 static void
231 compute_laterin (struct edge_list *edge_list, sbitmap *earliest,
232 sbitmap *antloc, sbitmap *later, sbitmap *laterin)
234 int num_edges, i;
235 edge e;
236 basic_block *worklist, *qin, *qout, *qend, bb;
237 unsigned int qlen;
238 edge_iterator ei;
240 num_edges = NUM_EDGES (edge_list);
242 /* Allocate a worklist array/queue. Entries are only added to the
243 list if they were not already on the list. So the size is
244 bounded by the number of basic blocks. */
245 qin = qout = worklist
246 = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
248 /* Initialize a mapping from each edge to its index. */
249 for (i = 0; i < num_edges; i++)
250 INDEX_EDGE (edge_list, i)->aux = (void *) (size_t) i;
252 /* We want a maximal solution, so initially consider LATER true for
253 all edges. This allows propagation through a loop since the incoming
254 loop edge will have LATER set, so if all the other incoming edges
255 to the loop are set, then LATERIN will be set for the head of the
256 loop.
258 If the optimistic setting of LATER on that edge was incorrect (for
259 example the expression is ANTLOC in a block within the loop) then
260 this algorithm will detect it when we process the block at the head
261 of the optimistic edge. That will requeue the affected blocks. */
262 bitmap_vector_ones (later, num_edges);
264 /* Note that even though we want an optimistic setting of LATER, we
265 do not want to be overly optimistic. Consider an outgoing edge from
266 the entry block. That edge should always have a LATER value the
267 same as EARLIEST for that edge. */
268 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
269 bitmap_copy (later[(size_t) e->aux], earliest[(size_t) e->aux]);
271 /* Add all the blocks to the worklist. This prevents an early exit from
272 the loop given our optimistic initialization of LATER above. */
273 auto_vec<int, 20> postorder;
274 inverted_post_order_compute (&postorder);
275 for (unsigned int i = 0; i < postorder.length (); ++i)
277 bb = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
278 if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
279 || bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
280 continue;
281 *qin++ = bb;
282 bb->aux = bb;
285 /* Note that we do not use the last allocated element for our queue,
286 as EXIT_BLOCK is never inserted into it. */
287 qin = worklist;
288 qend = &worklist[n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS];
289 qlen = n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS;
291 /* Iterate until the worklist is empty. */
292 while (qlen)
294 /* Take the first entry off the worklist. */
295 bb = *qout++;
296 bb->aux = NULL;
297 qlen--;
298 if (qout >= qend)
299 qout = worklist;
301 /* Compute the intersection of LATERIN for each incoming edge to B. */
302 bitmap_ones (laterin[bb->index]);
303 FOR_EACH_EDGE (e, ei, bb->preds)
304 bitmap_and (laterin[bb->index], laterin[bb->index],
305 later[(size_t)e->aux]);
307 /* Calculate LATER for all outgoing edges. */
308 FOR_EACH_EDGE (e, ei, bb->succs)
309 if (bitmap_ior_and_compl (later[(size_t) e->aux],
310 earliest[(size_t) e->aux],
311 laterin[bb->index],
312 antloc[bb->index])
313 /* If LATER for an outgoing edge was changed, then we need
314 to add the target of the outgoing edge to the worklist. */
315 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun) && e->dest->aux == 0)
317 *qin++ = e->dest;
318 e->dest->aux = e;
319 qlen++;
320 if (qin >= qend)
321 qin = worklist;
325 /* Computation of insertion and deletion points requires computing LATERIN
326 for the EXIT block. We allocated an extra entry in the LATERIN array
327 for just this purpose. */
328 bitmap_ones (laterin[last_basic_block_for_fn (cfun)]);
329 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
330 bitmap_and (laterin[last_basic_block_for_fn (cfun)],
331 laterin[last_basic_block_for_fn (cfun)],
332 later[(size_t) e->aux]);
334 clear_aux_for_edges ();
335 free (worklist);
338 /* Compute the insertion and deletion points for edge based LCM. */
340 static void
341 compute_insert_delete (struct edge_list *edge_list, sbitmap *antloc,
342 sbitmap *later, sbitmap *laterin, sbitmap *insert,
343 sbitmap *del)
345 int x;
346 basic_block bb;
348 FOR_EACH_BB_FN (bb, cfun)
349 bitmap_and_compl (del[bb->index], antloc[bb->index],
350 laterin[bb->index]);
352 for (x = 0; x < NUM_EDGES (edge_list); x++)
354 basic_block b = INDEX_EDGE_SUCC_BB (edge_list, x);
356 if (b == EXIT_BLOCK_PTR_FOR_FN (cfun))
357 bitmap_and_compl (insert[x], later[x],
358 laterin[last_basic_block_for_fn (cfun)]);
359 else
360 bitmap_and_compl (insert[x], later[x], laterin[b->index]);
364 /* Given local properties TRANSP, ANTLOC, AVLOC, KILL return the insert and
365 delete vectors for edge based LCM and return the AVIN, AVOUT bitmap.
366 map the insert vector to what edge an expression should be inserted on. */
368 struct edge_list *
369 pre_edge_lcm_avs (int n_exprs, sbitmap *transp,
370 sbitmap *avloc, sbitmap *antloc, sbitmap *kill,
371 sbitmap *avin, sbitmap *avout,
372 sbitmap **insert, sbitmap **del)
374 sbitmap *antin, *antout, *earliest;
375 sbitmap *later, *laterin;
376 struct edge_list *edge_list;
377 int num_edges;
379 edge_list = create_edge_list ();
380 num_edges = NUM_EDGES (edge_list);
382 #ifdef LCM_DEBUG_INFO
383 if (dump_file)
385 fprintf (dump_file, "Edge List:\n");
386 verify_edge_list (dump_file, edge_list);
387 print_edge_list (dump_file, edge_list);
388 dump_bitmap_vector (dump_file, "transp", "", transp,
389 last_basic_block_for_fn (cfun));
390 dump_bitmap_vector (dump_file, "antloc", "", antloc,
391 last_basic_block_for_fn (cfun));
392 dump_bitmap_vector (dump_file, "avloc", "", avloc,
393 last_basic_block_for_fn (cfun));
394 dump_bitmap_vector (dump_file, "kill", "", kill,
395 last_basic_block_for_fn (cfun));
397 #endif
399 /* Compute global availability. */
400 compute_available (avloc, kill, avout, avin);
402 /* Compute global anticipatability. */
403 antin = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
404 antout = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
405 compute_antinout_edge (antloc, transp, antin, antout);
407 #ifdef LCM_DEBUG_INFO
408 if (dump_file)
410 dump_bitmap_vector (dump_file, "antin", "", antin,
411 last_basic_block_for_fn (cfun));
412 dump_bitmap_vector (dump_file, "antout", "", antout,
413 last_basic_block_for_fn (cfun));
415 #endif
417 /* Compute earliestness. */
418 earliest = sbitmap_vector_alloc (num_edges, n_exprs);
419 compute_earliest (edge_list, n_exprs, antin, antout, avout, kill, earliest);
421 #ifdef LCM_DEBUG_INFO
422 if (dump_file)
423 dump_bitmap_vector (dump_file, "earliest", "", earliest, num_edges);
424 #endif
426 sbitmap_vector_free (antout);
427 sbitmap_vector_free (antin);
429 later = sbitmap_vector_alloc (num_edges, n_exprs);
431 /* Allocate an extra element for the exit block in the laterin vector. */
432 laterin = sbitmap_vector_alloc (last_basic_block_for_fn (cfun) + 1,
433 n_exprs);
434 compute_laterin (edge_list, earliest, antloc, later, laterin);
436 #ifdef LCM_DEBUG_INFO
437 if (dump_file)
439 dump_bitmap_vector (dump_file, "laterin", "", laterin,
440 last_basic_block_for_fn (cfun) + 1);
441 dump_bitmap_vector (dump_file, "later", "", later, num_edges);
443 #endif
445 sbitmap_vector_free (earliest);
447 *insert = sbitmap_vector_alloc (num_edges, n_exprs);
448 *del = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
449 bitmap_vector_clear (*insert, num_edges);
450 bitmap_vector_clear (*del, last_basic_block_for_fn (cfun));
451 compute_insert_delete (edge_list, antloc, later, laterin, *insert, *del);
453 sbitmap_vector_free (laterin);
454 sbitmap_vector_free (later);
456 #ifdef LCM_DEBUG_INFO
457 if (dump_file)
459 dump_bitmap_vector (dump_file, "pre_insert_map", "", *insert, num_edges);
460 dump_bitmap_vector (dump_file, "pre_delete_map", "", *del,
461 last_basic_block_for_fn (cfun));
463 #endif
465 return edge_list;
468 /* Wrapper to allocate avin/avout and call pre_edge_lcm_avs. */
470 struct edge_list *
471 pre_edge_lcm (int n_exprs, sbitmap *transp,
472 sbitmap *avloc, sbitmap *antloc, sbitmap *kill,
473 sbitmap **insert, sbitmap **del)
475 struct edge_list *edge_list;
476 sbitmap *avin, *avout;
478 avin = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
479 avout = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
481 edge_list = pre_edge_lcm_avs (n_exprs, transp, avloc, antloc, kill,
482 avin, avout, insert, del);
484 sbitmap_vector_free (avout);
485 sbitmap_vector_free (avin);
487 return edge_list;
490 /* Compute the AVIN and AVOUT vectors from the AVLOC and KILL vectors.
491 Return the number of passes we performed to iterate to a solution. */
493 void
494 compute_available (sbitmap *avloc, sbitmap *kill, sbitmap *avout,
495 sbitmap *avin)
497 edge e;
498 basic_block *worklist, *qin, *qout, *qend, bb;
499 unsigned int qlen;
500 edge_iterator ei;
502 /* Allocate a worklist array/queue. Entries are only added to the
503 list if they were not already on the list. So the size is
504 bounded by the number of basic blocks. */
505 qin = qout = worklist =
506 XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
508 /* We want a maximal solution. */
509 bitmap_vector_ones (avout, last_basic_block_for_fn (cfun));
511 /* Put every block on the worklist; this is necessary because of the
512 optimistic initialization of AVOUT above. Use inverted postorder
513 to make the dataflow problem require less iterations. */
514 auto_vec<int, 20> postorder;
515 inverted_post_order_compute (&postorder);
516 for (unsigned int i = 0; i < postorder.length (); ++i)
518 bb = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
519 if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
520 || bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
521 continue;
522 *qin++ = bb;
523 bb->aux = bb;
526 qin = worklist;
527 qend = &worklist[n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS];
528 qlen = n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS;
530 /* Mark blocks which are successors of the entry block so that we
531 can easily identify them below. */
532 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
533 e->dest->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun);
535 /* Iterate until the worklist is empty. */
536 while (qlen)
538 /* Take the first entry off the worklist. */
539 bb = *qout++;
540 qlen--;
542 if (qout >= qend)
543 qout = worklist;
545 /* If one of the predecessor blocks is the ENTRY block, then the
546 intersection of avouts is the null set. We can identify such blocks
547 by the special value in the AUX field in the block structure. */
548 if (bb->aux == ENTRY_BLOCK_PTR_FOR_FN (cfun))
549 /* Do not clear the aux field for blocks which are successors of the
550 ENTRY block. That way we never add then to the worklist again. */
551 bitmap_clear (avin[bb->index]);
552 else
554 /* Clear the aux field of this block so that it can be added to
555 the worklist again if necessary. */
556 bb->aux = NULL;
557 bitmap_intersection_of_preds (avin[bb->index], avout, bb);
560 if (bitmap_ior_and_compl (avout[bb->index], avloc[bb->index],
561 avin[bb->index], kill[bb->index]))
562 /* If the out state of this block changed, then we need
563 to add the successors of this block to the worklist
564 if they are not already on the worklist. */
565 FOR_EACH_EDGE (e, ei, bb->succs)
566 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
568 *qin++ = e->dest;
569 e->dest->aux = e;
570 qlen++;
572 if (qin >= qend)
573 qin = worklist;
577 clear_aux_for_edges ();
578 clear_aux_for_blocks ();
579 free (worklist);
582 /* Compute the farthest vector for edge based lcm. */
584 static void
585 compute_farthest (struct edge_list *edge_list, int n_exprs,
586 sbitmap *st_avout, sbitmap *st_avin, sbitmap *st_antin,
587 sbitmap *kill, sbitmap *farthest)
589 int x, num_edges;
590 basic_block pred, succ;
592 num_edges = NUM_EDGES (edge_list);
594 auto_sbitmap difference (n_exprs), temp_bitmap (n_exprs);
595 for (x = 0; x < num_edges; x++)
597 pred = INDEX_EDGE_PRED_BB (edge_list, x);
598 succ = INDEX_EDGE_SUCC_BB (edge_list, x);
599 if (succ == EXIT_BLOCK_PTR_FOR_FN (cfun))
600 bitmap_copy (farthest[x], st_avout[pred->index]);
601 else
603 if (pred == ENTRY_BLOCK_PTR_FOR_FN (cfun))
604 bitmap_clear (farthest[x]);
605 else
607 bitmap_and_compl (difference, st_avout[pred->index],
608 st_antin[succ->index]);
609 bitmap_not (temp_bitmap, st_avin[succ->index]);
610 bitmap_and_or (farthest[x], difference,
611 kill[succ->index], temp_bitmap);
617 /* Compute nearer and nearerout vectors for edge based lcm.
619 This is the mirror of compute_laterin, additional comments on the
620 implementation can be found before compute_laterin. */
622 static void
623 compute_nearerout (struct edge_list *edge_list, sbitmap *farthest,
624 sbitmap *st_avloc, sbitmap *nearer, sbitmap *nearerout)
626 int num_edges, i;
627 edge e;
628 basic_block *worklist, *tos, bb;
629 edge_iterator ei;
631 num_edges = NUM_EDGES (edge_list);
633 /* Allocate a worklist array/queue. Entries are only added to the
634 list if they were not already on the list. So the size is
635 bounded by the number of basic blocks. */
636 tos = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun) + 1);
638 /* Initialize NEARER for each edge and build a mapping from an edge to
639 its index. */
640 for (i = 0; i < num_edges; i++)
641 INDEX_EDGE (edge_list, i)->aux = (void *) (size_t) i;
643 /* We want a maximal solution. */
644 bitmap_vector_ones (nearer, num_edges);
646 /* Note that even though we want an optimistic setting of NEARER, we
647 do not want to be overly optimistic. Consider an incoming edge to
648 the exit block. That edge should always have a NEARER value the
649 same as FARTHEST for that edge. */
650 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
651 bitmap_copy (nearer[(size_t)e->aux], farthest[(size_t)e->aux]);
653 /* Add all the blocks to the worklist. This prevents an early exit
654 from the loop given our optimistic initialization of NEARER. */
655 FOR_EACH_BB_FN (bb, cfun)
657 *tos++ = bb;
658 bb->aux = bb;
661 /* Iterate until the worklist is empty. */
662 while (tos != worklist)
664 /* Take the first entry off the worklist. */
665 bb = *--tos;
666 bb->aux = NULL;
668 /* Compute the intersection of NEARER for each outgoing edge from B. */
669 bitmap_ones (nearerout[bb->index]);
670 FOR_EACH_EDGE (e, ei, bb->succs)
671 bitmap_and (nearerout[bb->index], nearerout[bb->index],
672 nearer[(size_t) e->aux]);
674 /* Calculate NEARER for all incoming edges. */
675 FOR_EACH_EDGE (e, ei, bb->preds)
676 if (bitmap_ior_and_compl (nearer[(size_t) e->aux],
677 farthest[(size_t) e->aux],
678 nearerout[e->dest->index],
679 st_avloc[e->dest->index])
680 /* If NEARER for an incoming edge was changed, then we need
681 to add the source of the incoming edge to the worklist. */
682 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun) && e->src->aux == 0)
684 *tos++ = e->src;
685 e->src->aux = e;
689 /* Computation of insertion and deletion points requires computing NEAREROUT
690 for the ENTRY block. We allocated an extra entry in the NEAREROUT array
691 for just this purpose. */
692 bitmap_ones (nearerout[last_basic_block_for_fn (cfun)]);
693 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
694 bitmap_and (nearerout[last_basic_block_for_fn (cfun)],
695 nearerout[last_basic_block_for_fn (cfun)],
696 nearer[(size_t) e->aux]);
698 clear_aux_for_edges ();
699 free (tos);
702 /* Compute the insertion and deletion points for edge based LCM. */
704 static void
705 compute_rev_insert_delete (struct edge_list *edge_list, sbitmap *st_avloc,
706 sbitmap *nearer, sbitmap *nearerout,
707 sbitmap *insert, sbitmap *del)
709 int x;
710 basic_block bb;
712 FOR_EACH_BB_FN (bb, cfun)
713 bitmap_and_compl (del[bb->index], st_avloc[bb->index],
714 nearerout[bb->index]);
716 for (x = 0; x < NUM_EDGES (edge_list); x++)
718 basic_block b = INDEX_EDGE_PRED_BB (edge_list, x);
719 if (b == ENTRY_BLOCK_PTR_FOR_FN (cfun))
720 bitmap_and_compl (insert[x], nearer[x],
721 nearerout[last_basic_block_for_fn (cfun)]);
722 else
723 bitmap_and_compl (insert[x], nearer[x], nearerout[b->index]);
727 /* Given local properties TRANSP, ST_AVLOC, ST_ANTLOC, KILL return the
728 insert and delete vectors for edge based reverse LCM. Returns an
729 edgelist which is used to map the insert vector to what edge
730 an expression should be inserted on. */
732 struct edge_list *
733 pre_edge_rev_lcm (int n_exprs, sbitmap *transp,
734 sbitmap *st_avloc, sbitmap *st_antloc, sbitmap *kill,
735 sbitmap **insert, sbitmap **del)
737 sbitmap *st_antin, *st_antout;
738 sbitmap *st_avout, *st_avin, *farthest;
739 sbitmap *nearer, *nearerout;
740 struct edge_list *edge_list;
741 int num_edges;
743 edge_list = create_edge_list ();
744 num_edges = NUM_EDGES (edge_list);
746 st_antin = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
747 st_antout = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
748 bitmap_vector_clear (st_antin, last_basic_block_for_fn (cfun));
749 bitmap_vector_clear (st_antout, last_basic_block_for_fn (cfun));
750 compute_antinout_edge (st_antloc, transp, st_antin, st_antout);
752 /* Compute global anticipatability. */
753 st_avout = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
754 st_avin = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
755 compute_available (st_avloc, kill, st_avout, st_avin);
757 #ifdef LCM_DEBUG_INFO
758 if (dump_file)
760 fprintf (dump_file, "Edge List:\n");
761 verify_edge_list (dump_file, edge_list);
762 print_edge_list (dump_file, edge_list);
763 dump_bitmap_vector (dump_file, "transp", "", transp,
764 last_basic_block_for_fn (cfun));
765 dump_bitmap_vector (dump_file, "st_avloc", "", st_avloc,
766 last_basic_block_for_fn (cfun));
767 dump_bitmap_vector (dump_file, "st_antloc", "", st_antloc,
768 last_basic_block_for_fn (cfun));
769 dump_bitmap_vector (dump_file, "st_antin", "", st_antin,
770 last_basic_block_for_fn (cfun));
771 dump_bitmap_vector (dump_file, "st_antout", "", st_antout,
772 last_basic_block_for_fn (cfun));
773 dump_bitmap_vector (dump_file, "st_kill", "", kill,
774 last_basic_block_for_fn (cfun));
776 #endif
778 #ifdef LCM_DEBUG_INFO
779 if (dump_file)
781 dump_bitmap_vector (dump_file, "st_avout", "", st_avout, last_basic_block_for_fn (cfun));
782 dump_bitmap_vector (dump_file, "st_avin", "", st_avin, last_basic_block_for_fn (cfun));
784 #endif
786 /* Compute farthestness. */
787 farthest = sbitmap_vector_alloc (num_edges, n_exprs);
788 compute_farthest (edge_list, n_exprs, st_avout, st_avin, st_antin,
789 kill, farthest);
791 #ifdef LCM_DEBUG_INFO
792 if (dump_file)
793 dump_bitmap_vector (dump_file, "farthest", "", farthest, num_edges);
794 #endif
796 sbitmap_vector_free (st_antin);
797 sbitmap_vector_free (st_antout);
799 sbitmap_vector_free (st_avin);
800 sbitmap_vector_free (st_avout);
802 nearer = sbitmap_vector_alloc (num_edges, n_exprs);
804 /* Allocate an extra element for the entry block. */
805 nearerout = sbitmap_vector_alloc (last_basic_block_for_fn (cfun) + 1,
806 n_exprs);
807 compute_nearerout (edge_list, farthest, st_avloc, nearer, nearerout);
809 #ifdef LCM_DEBUG_INFO
810 if (dump_file)
812 dump_bitmap_vector (dump_file, "nearerout", "", nearerout,
813 last_basic_block_for_fn (cfun) + 1);
814 dump_bitmap_vector (dump_file, "nearer", "", nearer, num_edges);
816 #endif
818 sbitmap_vector_free (farthest);
820 *insert = sbitmap_vector_alloc (num_edges, n_exprs);
821 *del = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
822 compute_rev_insert_delete (edge_list, st_avloc, nearer, nearerout,
823 *insert, *del);
825 sbitmap_vector_free (nearerout);
826 sbitmap_vector_free (nearer);
828 #ifdef LCM_DEBUG_INFO
829 if (dump_file)
831 dump_bitmap_vector (dump_file, "pre_insert_map", "", *insert, num_edges);
832 dump_bitmap_vector (dump_file, "pre_delete_map", "", *del,
833 last_basic_block_for_fn (cfun));
835 #endif
836 return edge_list;