* lcm.c (optimize_mode_switching): Fix typo.
[official-gcc.git] / gcc / lcm.c
blob17899191dacb02c2f3dd85b043d25ee4e1b38226
1 /* Generic partial redundancy elimination with lazy code motion support.
2 Copyright (C) 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
21 /* These routines are meant to be used by various optimization
22 passes which can be modeled as lazy code motion problems.
23 Including, but not limited to:
25 * Traditional partial redundancy elimination.
27 * Placement of caller/caller register save/restores.
29 * Load/store motion.
31 * Copy motion.
33 * Conversion of flat register files to a stacked register
34 model.
36 * Dead load/store elimination.
38 These routines accept as input:
40 * Basic block information (number of blocks, lists of
41 predecessors and successors). Note the granularity
42 does not need to be basic block, they could be statements
43 or functions.
45 * Bitmaps of local properties (computed, transparent and
46 anticipatable expressions).
48 The output of these routines is bitmap of redundant computations
49 and a bitmap of optimal placement points. */
52 #include "config.h"
53 #include "system.h"
54 #include "rtl.h"
55 #include "regs.h"
56 #include "hard-reg-set.h"
57 #include "flags.h"
58 #include "real.h"
59 #include "insn-config.h"
60 #include "recog.h"
61 #include "basic-block.h"
62 #include "tm_p.h"
64 /* We want target macros for the mode switching code to be able to refer
65 to instruction attribute values. */
66 #include "insn-attr.h"
68 /* Edge based LCM routines. */
69 static void compute_antinout_edge PARAMS ((sbitmap *, sbitmap *,
70 sbitmap *, sbitmap *));
71 static void compute_earliest PARAMS ((struct edge_list *, int,
72 sbitmap *, sbitmap *,
73 sbitmap *, sbitmap *,
74 sbitmap *));
75 static void compute_laterin PARAMS ((struct edge_list *, sbitmap *,
76 sbitmap *, sbitmap *,
77 sbitmap *));
78 static void compute_insert_delete PARAMS ((struct edge_list *edge_list,
79 sbitmap *, sbitmap *,
80 sbitmap *, sbitmap *,
81 sbitmap *));
83 /* Edge based LCM routines on a reverse flowgraph. */
84 static void compute_farthest PARAMS ((struct edge_list *, int,
85 sbitmap *, sbitmap *,
86 sbitmap*, sbitmap *,
87 sbitmap *));
88 static void compute_nearerout PARAMS ((struct edge_list *, sbitmap *,
89 sbitmap *, sbitmap *,
90 sbitmap *));
91 static void compute_rev_insert_delete PARAMS ((struct edge_list *edge_list,
92 sbitmap *, sbitmap *,
93 sbitmap *, sbitmap *,
94 sbitmap *));
96 /* Edge based lcm routines. */
98 /* Compute expression anticipatability at entrance and exit of each block.
99 This is done based on the flow graph, and not on the pred-succ lists.
100 Other than that, its pretty much identical to compute_antinout. */
102 static void
103 compute_antinout_edge (antloc, transp, antin, antout)
104 sbitmap *antloc;
105 sbitmap *transp;
106 sbitmap *antin;
107 sbitmap *antout;
109 basic_block bb;
110 edge e;
111 basic_block *worklist, *qin, *qout, *qend;
112 unsigned int qlen;
114 /* Allocate a worklist array/queue. Entries are only added to the
115 list if they were not already on the list. So the size is
116 bounded by the number of basic blocks. */
117 qin = qout = worklist
118 = (basic_block *) xmalloc (sizeof (basic_block) * num_basic_blocks);
120 /* We want a maximal solution, so make an optimistic initialization of
121 ANTIN. */
122 sbitmap_vector_ones (antin, last_basic_block);
124 /* Put every block on the worklist; this is necessary because of the
125 optimistic initialization of ANTIN above. */
126 FOR_ALL_BB_REVERSE (bb)
128 *qin++ = bb;
129 bb->aux = bb;
132 qin = worklist;
133 qend = &worklist[num_basic_blocks];
134 qlen = num_basic_blocks;
136 /* Mark blocks which are predecessors of the exit block so that we
137 can easily identify them below. */
138 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
139 e->src->aux = EXIT_BLOCK_PTR;
141 /* Iterate until the worklist is empty. */
142 while (qlen)
144 /* Take the first entry off the worklist. */
145 basic_block bb = *qout++;
146 qlen--;
148 if (qout >= qend)
149 qout = worklist;
151 if (bb->aux == EXIT_BLOCK_PTR)
152 /* Do not clear the aux field for blocks which are predecessors of
153 the EXIT block. That way we never add then to the worklist
154 again. */
155 sbitmap_zero (antout[bb->sindex]);
156 else
158 /* Clear the aux field of this block so that it can be added to
159 the worklist again if necessary. */
160 bb->aux = NULL;
161 sbitmap_intersection_of_succs (antout[bb->sindex], antin, bb->sindex);
164 if (sbitmap_a_or_b_and_c_cg (antin[bb->sindex], antloc[bb->sindex],
165 transp[bb->sindex], antout[bb->sindex]))
166 /* If the in state of this block changed, then we need
167 to add the predecessors of this block to the worklist
168 if they are not already on the worklist. */
169 for (e = bb->pred; e; e = e->pred_next)
170 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
172 *qin++ = e->src;
173 e->src->aux = e;
174 qlen++;
175 if (qin >= qend)
176 qin = worklist;
180 clear_aux_for_edges ();
181 clear_aux_for_blocks ();
182 free (worklist);
185 /* Compute the earliest vector for edge based lcm. */
187 static void
188 compute_earliest (edge_list, n_exprs, antin, antout, avout, kill, earliest)
189 struct edge_list *edge_list;
190 int n_exprs;
191 sbitmap *antin, *antout, *avout, *kill, *earliest;
193 sbitmap difference, temp_bitmap;
194 int x, num_edges;
195 basic_block pred, succ;
197 num_edges = NUM_EDGES (edge_list);
199 difference = sbitmap_alloc (n_exprs);
200 temp_bitmap = sbitmap_alloc (n_exprs);
202 for (x = 0; x < num_edges; x++)
204 pred = INDEX_EDGE_PRED_BB (edge_list, x);
205 succ = INDEX_EDGE_SUCC_BB (edge_list, x);
206 if (pred == ENTRY_BLOCK_PTR)
207 sbitmap_copy (earliest[x], antin[succ->sindex]);
208 else
210 /* We refer to the EXIT_BLOCK index, instead of testing for
211 EXIT_BLOCK_PTR, so that EXIT_BLOCK_PTR's index can be
212 changed so as to pretend it's a regular block, so that
213 its antin can be taken into account. */
214 if (succ->sindex == EXIT_BLOCK)
215 sbitmap_zero (earliest[x]);
216 else
218 sbitmap_difference (difference, antin[succ->sindex],
219 avout[pred->sindex]);
220 sbitmap_not (temp_bitmap, antout[pred->sindex]);
221 sbitmap_a_and_b_or_c (earliest[x], difference,
222 kill[pred->sindex], temp_bitmap);
227 sbitmap_free (temp_bitmap);
228 sbitmap_free (difference);
231 /* later(p,s) is dependent on the calculation of laterin(p).
232 laterin(p) is dependent on the calculation of later(p2,p).
234 laterin(ENTRY) is defined as all 0's
235 later(ENTRY, succs(ENTRY)) are defined using laterin(ENTRY)
236 laterin(succs(ENTRY)) is defined by later(ENTRY, succs(ENTRY)).
238 If we progress in this manner, starting with all basic blocks
239 in the work list, anytime we change later(bb), we need to add
240 succs(bb) to the worklist if they are not already on the worklist.
242 Boundary conditions:
244 We prime the worklist all the normal basic blocks. The ENTRY block can
245 never be added to the worklist since it is never the successor of any
246 block. We explicitly prevent the EXIT block from being added to the
247 worklist.
249 We optimistically initialize LATER. That is the only time this routine
250 will compute LATER for an edge out of the entry block since the entry
251 block is never on the worklist. Thus, LATERIN is neither used nor
252 computed for the ENTRY block.
254 Since the EXIT block is never added to the worklist, we will neither
255 use nor compute LATERIN for the exit block. Edges which reach the
256 EXIT block are handled in the normal fashion inside the loop. However,
257 the insertion/deletion computation needs LATERIN(EXIT), so we have
258 to compute it. */
260 static void
261 compute_laterin (edge_list, earliest, antloc, later, laterin)
262 struct edge_list *edge_list;
263 sbitmap *earliest, *antloc, *later, *laterin;
265 int num_edges, i;
266 edge e;
267 basic_block *worklist, *qin, *qout, *qend, bb;
268 unsigned int qlen;
270 num_edges = NUM_EDGES (edge_list);
272 /* Allocate a worklist array/queue. Entries are only added to the
273 list if they were not already on the list. So the size is
274 bounded by the number of basic blocks. */
275 qin = qout = worklist
276 = (basic_block *) xmalloc (sizeof (basic_block) * (num_basic_blocks + 1));
278 /* Initialize a mapping from each edge to its index. */
279 for (i = 0; i < num_edges; i++)
280 INDEX_EDGE (edge_list, i)->aux = (void *) (size_t) i;
282 /* We want a maximal solution, so initially consider LATER true for
283 all edges. This allows propagation through a loop since the incoming
284 loop edge will have LATER set, so if all the other incoming edges
285 to the loop are set, then LATERIN will be set for the head of the
286 loop.
288 If the optimistic setting of LATER on that edge was incorrect (for
289 example the expression is ANTLOC in a block within the loop) then
290 this algorithm will detect it when we process the block at the head
291 of the optimistic edge. That will requeue the affected blocks. */
292 sbitmap_vector_ones (later, num_edges);
294 /* Note that even though we want an optimistic setting of LATER, we
295 do not want to be overly optimistic. Consider an outgoing edge from
296 the entry block. That edge should always have a LATER value the
297 same as EARLIEST for that edge. */
298 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
299 sbitmap_copy (later[(size_t) e->aux], earliest[(size_t) e->aux]);
301 /* Add all the blocks to the worklist. This prevents an early exit from
302 the loop given our optimistic initialization of LATER above. */
303 FOR_ALL_BB (bb)
305 *qin++ = bb;
306 bb->aux = bb;
308 qin = worklist;
309 /* Note that we do not use the last allocated element for our queue,
310 as EXIT_BLOCK is never inserted into it. In fact the above allocation
311 of num_basic_blocks + 1 elements is not encessary. */
312 qend = &worklist[num_basic_blocks];
313 qlen = num_basic_blocks;
315 /* Iterate until the worklist is empty. */
316 while (qlen)
318 /* Take the first entry off the worklist. */
319 bb = *qout++;
320 bb->aux = NULL;
321 qlen--;
322 if (qout >= qend)
323 qout = worklist;
325 /* Compute the intersection of LATERIN for each incoming edge to B. */
326 sbitmap_ones (laterin[bb->sindex]);
327 for (e = bb->pred; e != NULL; e = e->pred_next)
328 sbitmap_a_and_b (laterin[bb->sindex], laterin[bb->sindex], later[(size_t)e->aux]);
330 /* Calculate LATER for all outgoing edges. */
331 for (e = bb->succ; e != NULL; e = e->succ_next)
332 if (sbitmap_union_of_diff_cg (later[(size_t) e->aux],
333 earliest[(size_t) e->aux],
334 laterin[e->src->sindex],
335 antloc[e->src->sindex])
336 /* If LATER for an outgoing edge was changed, then we need
337 to add the target of the outgoing edge to the worklist. */
338 && e->dest != EXIT_BLOCK_PTR && e->dest->aux == 0)
340 *qin++ = e->dest;
341 e->dest->aux = e;
342 qlen++;
343 if (qin >= qend)
344 qin = worklist;
348 /* Computation of insertion and deletion points requires computing LATERIN
349 for the EXIT block. We allocated an extra entry in the LATERIN array
350 for just this purpose. */
351 sbitmap_ones (laterin[last_basic_block]);
352 for (e = EXIT_BLOCK_PTR->pred; e != NULL; e = e->pred_next)
353 sbitmap_a_and_b (laterin[last_basic_block],
354 laterin[last_basic_block],
355 later[(size_t) e->aux]);
357 clear_aux_for_edges ();
358 free (worklist);
361 /* Compute the insertion and deletion points for edge based LCM. */
363 static void
364 compute_insert_delete (edge_list, antloc, later, laterin,
365 insert, delete)
366 struct edge_list *edge_list;
367 sbitmap *antloc, *later, *laterin, *insert, *delete;
369 int x;
370 basic_block bb;
372 FOR_ALL_BB (bb)
373 sbitmap_difference (delete[bb->sindex], antloc[bb->sindex], laterin[bb->sindex]);
375 for (x = 0; x < NUM_EDGES (edge_list); x++)
377 basic_block b = INDEX_EDGE_SUCC_BB (edge_list, x);
379 if (b == EXIT_BLOCK_PTR)
380 sbitmap_difference (insert[x], later[x], laterin[last_basic_block]);
381 else
382 sbitmap_difference (insert[x], later[x], laterin[b->sindex]);
386 /* Given local properties TRANSP, ANTLOC, AVOUT, KILL return the insert and
387 delete vectors for edge based LCM. Returns an edgelist which is used to
388 map the insert vector to what edge an expression should be inserted on. */
390 struct edge_list *
391 pre_edge_lcm (file, n_exprs, transp, avloc, antloc, kill, insert, delete)
392 FILE *file ATTRIBUTE_UNUSED;
393 int n_exprs;
394 sbitmap *transp;
395 sbitmap *avloc;
396 sbitmap *antloc;
397 sbitmap *kill;
398 sbitmap **insert;
399 sbitmap **delete;
401 sbitmap *antin, *antout, *earliest;
402 sbitmap *avin, *avout;
403 sbitmap *later, *laterin;
404 struct edge_list *edge_list;
405 int num_edges;
407 edge_list = create_edge_list ();
408 num_edges = NUM_EDGES (edge_list);
410 #ifdef LCM_DEBUG_INFO
411 if (file)
413 fprintf (file, "Edge List:\n");
414 verify_edge_list (file, edge_list);
415 print_edge_list (file, edge_list);
416 dump_sbitmap_vector (file, "transp", "", transp, last_basic_block);
417 dump_sbitmap_vector (file, "antloc", "", antloc, last_basic_block);
418 dump_sbitmap_vector (file, "avloc", "", avloc, last_basic_block);
419 dump_sbitmap_vector (file, "kill", "", kill, last_basic_block);
421 #endif
423 /* Compute global availability. */
424 avin = sbitmap_vector_alloc (last_basic_block, n_exprs);
425 avout = sbitmap_vector_alloc (last_basic_block, n_exprs);
426 compute_available (avloc, kill, avout, avin);
427 sbitmap_vector_free (avin);
429 /* Compute global anticipatability. */
430 antin = sbitmap_vector_alloc (last_basic_block, n_exprs);
431 antout = sbitmap_vector_alloc (last_basic_block, n_exprs);
432 compute_antinout_edge (antloc, transp, antin, antout);
434 #ifdef LCM_DEBUG_INFO
435 if (file)
437 dump_sbitmap_vector (file, "antin", "", antin, last_basic_block);
438 dump_sbitmap_vector (file, "antout", "", antout, last_basic_block);
440 #endif
442 /* Compute earliestness. */
443 earliest = sbitmap_vector_alloc (num_edges, n_exprs);
444 compute_earliest (edge_list, n_exprs, antin, antout, avout, kill, earliest);
446 #ifdef LCM_DEBUG_INFO
447 if (file)
448 dump_sbitmap_vector (file, "earliest", "", earliest, num_edges);
449 #endif
451 sbitmap_vector_free (antout);
452 sbitmap_vector_free (antin);
453 sbitmap_vector_free (avout);
455 later = sbitmap_vector_alloc (num_edges, n_exprs);
457 /* Allocate an extra element for the exit block in the laterin vector. */
458 laterin = sbitmap_vector_alloc (last_basic_block + 1, n_exprs);
459 compute_laterin (edge_list, earliest, antloc, later, laterin);
461 #ifdef LCM_DEBUG_INFO
462 if (file)
464 dump_sbitmap_vector (file, "laterin", "", laterin, last_basic_block + 1);
465 dump_sbitmap_vector (file, "later", "", later, num_edges);
467 #endif
469 sbitmap_vector_free (earliest);
471 *insert = sbitmap_vector_alloc (num_edges, n_exprs);
472 *delete = sbitmap_vector_alloc (last_basic_block, n_exprs);
473 compute_insert_delete (edge_list, antloc, later, laterin, *insert, *delete);
475 sbitmap_vector_free (laterin);
476 sbitmap_vector_free (later);
478 #ifdef LCM_DEBUG_INFO
479 if (file)
481 dump_sbitmap_vector (file, "pre_insert_map", "", *insert, num_edges);
482 dump_sbitmap_vector (file, "pre_delete_map", "", *delete,
483 last_basic_block);
485 #endif
487 return edge_list;
490 /* Compute the AVIN and AVOUT vectors from the AVLOC and KILL vectors.
491 Return the number of passes we performed to iterate to a solution. */
493 void
494 compute_available (avloc, kill, avout, avin)
495 sbitmap *avloc, *kill, *avout, *avin;
497 edge e;
498 basic_block *worklist, *qin, *qout, *qend, bb;
499 unsigned int qlen;
501 /* Allocate a worklist array/queue. Entries are only added to the
502 list if they were not already on the list. So the size is
503 bounded by the number of basic blocks. */
504 qin = qout = worklist
505 = (basic_block *) xmalloc (sizeof (basic_block) * num_basic_blocks);
507 /* We want a maximal solution. */
508 sbitmap_vector_ones (avout, last_basic_block);
510 /* Put every block on the worklist; this is necessary because of the
511 optimistic initialization of AVOUT above. */
512 FOR_ALL_BB (bb)
514 *qin++ = bb;
515 bb->aux = bb;
518 qin = worklist;
519 qend = &worklist[num_basic_blocks];
520 qlen = num_basic_blocks;
522 /* Mark blocks which are successors of the entry block so that we
523 can easily identify them below. */
524 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
525 e->dest->aux = ENTRY_BLOCK_PTR;
527 /* Iterate until the worklist is empty. */
528 while (qlen)
530 /* Take the first entry off the worklist. */
531 basic_block bb = *qout++;
532 qlen--;
534 if (qout >= qend)
535 qout = worklist;
537 /* If one of the predecessor blocks is the ENTRY block, then the
538 intersection of avouts is the null set. We can identify such blocks
539 by the special value in the AUX field in the block structure. */
540 if (bb->aux == ENTRY_BLOCK_PTR)
541 /* Do not clear the aux field for blocks which are successors of the
542 ENTRY block. That way we never add then to the worklist again. */
543 sbitmap_zero (avin[bb->sindex]);
544 else
546 /* Clear the aux field of this block so that it can be added to
547 the worklist again if necessary. */
548 bb->aux = NULL;
549 sbitmap_intersection_of_preds (avin[bb->sindex], avout, bb->sindex);
552 if (sbitmap_union_of_diff_cg (avout[bb->sindex], avloc[bb->sindex],
553 avin[bb->sindex], kill[bb->sindex]))
554 /* If the out state of this block changed, then we need
555 to add the successors of this block to the worklist
556 if they are not already on the worklist. */
557 for (e = bb->succ; e; e = e->succ_next)
558 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
560 *qin++ = e->dest;
561 e->dest->aux = e;
562 qlen++;
564 if (qin >= qend)
565 qin = worklist;
569 clear_aux_for_edges ();
570 clear_aux_for_blocks ();
571 free (worklist);
574 /* Compute the farthest vector for edge based lcm. */
576 static void
577 compute_farthest (edge_list, n_exprs, st_avout, st_avin, st_antin,
578 kill, farthest)
579 struct edge_list *edge_list;
580 int n_exprs;
581 sbitmap *st_avout, *st_avin, *st_antin, *kill, *farthest;
583 sbitmap difference, temp_bitmap;
584 int x, num_edges;
585 basic_block pred, succ;
587 num_edges = NUM_EDGES (edge_list);
589 difference = sbitmap_alloc (n_exprs);
590 temp_bitmap = sbitmap_alloc (n_exprs);
592 for (x = 0; x < num_edges; x++)
594 pred = INDEX_EDGE_PRED_BB (edge_list, x);
595 succ = INDEX_EDGE_SUCC_BB (edge_list, x);
596 if (succ == EXIT_BLOCK_PTR)
597 sbitmap_copy (farthest[x], st_avout[pred->sindex]);
598 else
600 if (pred == ENTRY_BLOCK_PTR)
601 sbitmap_zero (farthest[x]);
602 else
604 sbitmap_difference (difference, st_avout[pred->sindex],
605 st_antin[succ->sindex]);
606 sbitmap_not (temp_bitmap, st_avin[succ->sindex]);
607 sbitmap_a_and_b_or_c (farthest[x], difference,
608 kill[succ->sindex], temp_bitmap);
613 sbitmap_free (temp_bitmap);
614 sbitmap_free (difference);
617 /* Compute nearer and nearerout vectors for edge based lcm.
619 This is the mirror of compute_laterin, additional comments on the
620 implementation can be found before compute_laterin. */
622 static void
623 compute_nearerout (edge_list, farthest, st_avloc, nearer, nearerout)
624 struct edge_list *edge_list;
625 sbitmap *farthest, *st_avloc, *nearer, *nearerout;
627 int num_edges, i;
628 edge e;
629 basic_block *worklist, *tos, bb;
631 num_edges = NUM_EDGES (edge_list);
633 /* Allocate a worklist array/queue. Entries are only added to the
634 list if they were not already on the list. So the size is
635 bounded by the number of basic blocks. */
636 tos = worklist
637 = (basic_block *) xmalloc (sizeof (basic_block) * (num_basic_blocks + 1));
639 /* Initialize NEARER for each edge and build a mapping from an edge to
640 its index. */
641 for (i = 0; i < num_edges; i++)
642 INDEX_EDGE (edge_list, i)->aux = (void *) (size_t) i;
644 /* We want a maximal solution. */
645 sbitmap_vector_ones (nearer, num_edges);
647 /* Note that even though we want an optimistic setting of NEARER, we
648 do not want to be overly optimistic. Consider an incoming edge to
649 the exit block. That edge should always have a NEARER value the
650 same as FARTHEST for that edge. */
651 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
652 sbitmap_copy (nearer[(size_t)e->aux], farthest[(size_t)e->aux]);
654 /* Add all the blocks to the worklist. This prevents an early exit
655 from the loop given our optimistic initialization of NEARER. */
656 FOR_ALL_BB (bb)
658 *tos++ = bb;
659 bb->aux = bb;
662 /* Iterate until the worklist is empty. */
663 while (tos != worklist)
665 /* Take the first entry off the worklist. */
666 bb = *--tos;
667 bb->aux = NULL;
669 /* Compute the intersection of NEARER for each outgoing edge from B. */
670 sbitmap_ones (nearerout[bb->sindex]);
671 for (e = bb->succ; e != NULL; e = e->succ_next)
672 sbitmap_a_and_b (nearerout[bb->sindex], nearerout[bb->sindex],
673 nearer[(size_t) e->aux]);
675 /* Calculate NEARER for all incoming edges. */
676 for (e = bb->pred; e != NULL; e = e->pred_next)
677 if (sbitmap_union_of_diff_cg (nearer[(size_t) e->aux],
678 farthest[(size_t) e->aux],
679 nearerout[e->dest->sindex],
680 st_avloc[e->dest->sindex])
681 /* If NEARER for an incoming edge was changed, then we need
682 to add the source of the incoming edge to the worklist. */
683 && e->src != ENTRY_BLOCK_PTR && e->src->aux == 0)
685 *tos++ = e->src;
686 e->src->aux = e;
690 /* Computation of insertion and deletion points requires computing NEAREROUT
691 for the ENTRY block. We allocated an extra entry in the NEAREROUT array
692 for just this purpose. */
693 sbitmap_ones (nearerout[last_basic_block]);
694 for (e = ENTRY_BLOCK_PTR->succ; e != NULL; e = e->succ_next)
695 sbitmap_a_and_b (nearerout[last_basic_block],
696 nearerout[last_basic_block],
697 nearer[(size_t) e->aux]);
699 clear_aux_for_edges ();
700 free (tos);
703 /* Compute the insertion and deletion points for edge based LCM. */
705 static void
706 compute_rev_insert_delete (edge_list, st_avloc, nearer, nearerout,
707 insert, delete)
708 struct edge_list *edge_list;
709 sbitmap *st_avloc, *nearer, *nearerout, *insert, *delete;
711 int x;
712 basic_block bb;
714 FOR_ALL_BB (bb)
715 sbitmap_difference (delete[bb->sindex], st_avloc[bb->sindex],
716 nearerout[bb->sindex]);
718 for (x = 0; x < NUM_EDGES (edge_list); x++)
720 basic_block b = INDEX_EDGE_PRED_BB (edge_list, x);
721 if (b == ENTRY_BLOCK_PTR)
722 sbitmap_difference (insert[x], nearer[x], nearerout[last_basic_block]);
723 else
724 sbitmap_difference (insert[x], nearer[x], nearerout[b->sindex]);
728 /* Given local properties TRANSP, ST_AVLOC, ST_ANTLOC, KILL return the
729 insert and delete vectors for edge based reverse LCM. Returns an
730 edgelist which is used to map the insert vector to what edge
731 an expression should be inserted on. */
733 struct edge_list *
734 pre_edge_rev_lcm (file, n_exprs, transp, st_avloc, st_antloc, kill,
735 insert, delete)
736 FILE *file ATTRIBUTE_UNUSED;
737 int n_exprs;
738 sbitmap *transp;
739 sbitmap *st_avloc;
740 sbitmap *st_antloc;
741 sbitmap *kill;
742 sbitmap **insert;
743 sbitmap **delete;
745 sbitmap *st_antin, *st_antout;
746 sbitmap *st_avout, *st_avin, *farthest;
747 sbitmap *nearer, *nearerout;
748 struct edge_list *edge_list;
749 int num_edges;
751 edge_list = create_edge_list ();
752 num_edges = NUM_EDGES (edge_list);
754 st_antin = (sbitmap *) sbitmap_vector_alloc (last_basic_block, n_exprs);
755 st_antout = (sbitmap *) sbitmap_vector_alloc (last_basic_block, n_exprs);
756 sbitmap_vector_zero (st_antin, last_basic_block);
757 sbitmap_vector_zero (st_antout, last_basic_block);
758 compute_antinout_edge (st_antloc, transp, st_antin, st_antout);
760 /* Compute global anticipatability. */
761 st_avout = sbitmap_vector_alloc (last_basic_block, n_exprs);
762 st_avin = sbitmap_vector_alloc (last_basic_block, n_exprs);
763 compute_available (st_avloc, kill, st_avout, st_avin);
765 #ifdef LCM_DEBUG_INFO
766 if (file)
768 fprintf (file, "Edge List:\n");
769 verify_edge_list (file, edge_list);
770 print_edge_list (file, edge_list);
771 dump_sbitmap_vector (file, "transp", "", transp, last_basic_block);
772 dump_sbitmap_vector (file, "st_avloc", "", st_avloc, last_basic_block);
773 dump_sbitmap_vector (file, "st_antloc", "", st_antloc, last_basic_block);
774 dump_sbitmap_vector (file, "st_antin", "", st_antin, last_basic_block);
775 dump_sbitmap_vector (file, "st_antout", "", st_antout, last_basic_block);
776 dump_sbitmap_vector (file, "st_kill", "", kill, last_basic_block);
778 #endif
780 #ifdef LCM_DEBUG_INFO
781 if (file)
783 dump_sbitmap_vector (file, "st_avout", "", st_avout, last_basic_block);
784 dump_sbitmap_vector (file, "st_avin", "", st_avin, last_basic_block);
786 #endif
788 /* Compute farthestness. */
789 farthest = sbitmap_vector_alloc (num_edges, n_exprs);
790 compute_farthest (edge_list, n_exprs, st_avout, st_avin, st_antin,
791 kill, farthest);
793 #ifdef LCM_DEBUG_INFO
794 if (file)
795 dump_sbitmap_vector (file, "farthest", "", farthest, num_edges);
796 #endif
798 sbitmap_vector_free (st_antin);
799 sbitmap_vector_free (st_antout);
801 sbitmap_vector_free (st_avin);
802 sbitmap_vector_free (st_avout);
804 nearer = sbitmap_vector_alloc (num_edges, n_exprs);
806 /* Allocate an extra element for the entry block. */
807 nearerout = sbitmap_vector_alloc (last_basic_block + 1, n_exprs);
808 compute_nearerout (edge_list, farthest, st_avloc, nearer, nearerout);
810 #ifdef LCM_DEBUG_INFO
811 if (file)
813 dump_sbitmap_vector (file, "nearerout", "", nearerout,
814 last_basic_block + 1);
815 dump_sbitmap_vector (file, "nearer", "", nearer, num_edges);
817 #endif
819 sbitmap_vector_free (farthest);
821 *insert = sbitmap_vector_alloc (num_edges, n_exprs);
822 *delete = sbitmap_vector_alloc (last_basic_block, n_exprs);
823 compute_rev_insert_delete (edge_list, st_avloc, nearer, nearerout,
824 *insert, *delete);
826 sbitmap_vector_free (nearerout);
827 sbitmap_vector_free (nearer);
829 #ifdef LCM_DEBUG_INFO
830 if (file)
832 dump_sbitmap_vector (file, "pre_insert_map", "", *insert, num_edges);
833 dump_sbitmap_vector (file, "pre_delete_map", "", *delete,
834 last_basic_block);
836 #endif
837 return edge_list;
840 /* Mode switching:
842 The algorithm for setting the modes consists of scanning the insn list
843 and finding all the insns which require a specific mode. Each insn gets
844 a unique struct seginfo element. These structures are inserted into a list
845 for each basic block. For each entity, there is an array of bb_info over
846 the flow graph basic blocks (local var 'bb_info'), and contains a list
847 of all insns within that basic block, in the order they are encountered.
849 For each entity, any basic block WITHOUT any insns requiring a specific
850 mode are given a single entry, without a mode. (Each basic block
851 in the flow graph must have at least one entry in the segment table.)
853 The LCM algorithm is then run over the flow graph to determine where to
854 place the sets to the highest-priority value in respect of first the first
855 insn in any one block. Any adjustments required to the transparancy
856 vectors are made, then the next iteration starts for the next-lower
857 priority mode, till for each entity all modes are exhasted.
859 More details are located in the code for optimize_mode_switching(). */
861 /* This structure contains the information for each insn which requires
862 either single or double mode to be set.
863 MODE is the mode this insn must be executed in.
864 INSN_PTR is the insn to be executed (may be the note that marks the
865 beginning of a basic block).
866 BBNUM is the flow graph basic block this insn occurs in.
867 NEXT is the next insn in the same basic block. */
868 struct seginfo
870 int mode;
871 rtx insn_ptr;
872 int bbnum;
873 struct seginfo *next;
874 HARD_REG_SET regs_live;
877 struct bb_info
879 struct seginfo *seginfo;
880 int computing;
883 /* These bitmaps are used for the LCM algorithm. */
885 #ifdef OPTIMIZE_MODE_SWITCHING
886 static sbitmap *antic;
887 static sbitmap *transp;
888 static sbitmap *comp;
889 static sbitmap *delete;
890 static sbitmap *insert;
892 static struct seginfo * new_seginfo PARAMS ((int, rtx, int, HARD_REG_SET));
893 static void add_seginfo PARAMS ((struct bb_info *, struct seginfo *));
894 static void reg_dies PARAMS ((rtx, HARD_REG_SET));
895 static void reg_becomes_live PARAMS ((rtx, rtx, void *));
896 static void make_preds_opaque PARAMS ((basic_block, int));
897 #endif
899 #ifdef OPTIMIZE_MODE_SWITCHING
901 /* This function will allocate a new BBINFO structure, initialized
902 with the MODE, INSN, and basic block BB parameters. */
904 static struct seginfo *
905 new_seginfo (mode, insn, bb, regs_live)
906 int mode;
907 rtx insn;
908 int bb;
909 HARD_REG_SET regs_live;
911 struct seginfo *ptr;
912 ptr = xmalloc (sizeof (struct seginfo));
913 ptr->mode = mode;
914 ptr->insn_ptr = insn;
915 ptr->bbnum = bb;
916 ptr->next = NULL;
917 COPY_HARD_REG_SET (ptr->regs_live, regs_live);
918 return ptr;
921 /* Add a seginfo element to the end of a list.
922 HEAD is a pointer to the list beginning.
923 INFO is the structure to be linked in. */
925 static void
926 add_seginfo (head, info)
927 struct bb_info *head;
928 struct seginfo *info;
930 struct seginfo *ptr;
932 if (head->seginfo == NULL)
933 head->seginfo = info;
934 else
936 ptr = head->seginfo;
937 while (ptr->next != NULL)
938 ptr = ptr->next;
939 ptr->next = info;
943 /* Make all predecessors of basic block B opaque, recursively, till we hit
944 some that are already non-transparent, or an edge where aux is set; that
945 denotes that a mode set is to be done on that edge.
946 J is the bit number in the bitmaps that corresponds to the entity that
947 we are currently handling mode-switching for. */
949 static void
950 make_preds_opaque (b, j)
951 basic_block b;
952 int j;
954 edge e;
956 for (e = b->pred; e; e = e->pred_next)
958 basic_block pb = e->src;
960 if (e->aux || ! TEST_BIT (transp[pb->sindex], j))
961 continue;
963 RESET_BIT (transp[pb->sindex], j);
964 make_preds_opaque (pb, j);
968 /* Record in LIVE that register REG died. */
970 static void
971 reg_dies (reg, live)
972 rtx reg;
973 HARD_REG_SET live;
975 int regno, nregs;
977 if (GET_CODE (reg) != REG)
978 return;
980 regno = REGNO (reg);
981 if (regno < FIRST_PSEUDO_REGISTER)
982 for (nregs = HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1; nregs >= 0;
983 nregs--)
984 CLEAR_HARD_REG_BIT (live, regno + nregs);
987 /* Record in LIVE that register REG became live.
988 This is called via note_stores. */
990 static void
991 reg_becomes_live (reg, setter, live)
992 rtx reg;
993 rtx setter ATTRIBUTE_UNUSED;
994 void *live;
996 int regno, nregs;
998 if (GET_CODE (reg) == SUBREG)
999 reg = SUBREG_REG (reg);
1001 if (GET_CODE (reg) != REG)
1002 return;
1004 regno = REGNO (reg);
1005 if (regno < FIRST_PSEUDO_REGISTER)
1006 for (nregs = HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1; nregs >= 0;
1007 nregs--)
1008 SET_HARD_REG_BIT (* (HARD_REG_SET *) live, regno + nregs);
1011 /* Find all insns that need a particular mode setting, and insert the
1012 necessary mode switches. Return true if we did work. */
1015 optimize_mode_switching (file)
1016 FILE *file;
1018 rtx insn;
1019 int e;
1020 basic_block bb;
1021 int need_commit = 0;
1022 sbitmap *kill;
1023 struct edge_list *edge_list;
1024 static const int num_modes[] = NUM_MODES_FOR_MODE_SWITCHING;
1025 #define N_ENTITIES ARRAY_SIZE (num_modes)
1026 int entity_map[N_ENTITIES];
1027 struct bb_info *bb_info[N_ENTITIES];
1028 int i, j;
1029 int n_entities;
1030 int max_num_modes = 0;
1031 bool emited = false;
1033 clear_bb_flags ();
1034 #ifdef NORMAL_MODE
1035 /* Increment last_basic_block before allocating bb_info. */
1036 last_basic_block++;
1037 #endif
1039 for (e = N_ENTITIES - 1, n_entities = 0; e >= 0; e--)
1040 if (OPTIMIZE_MODE_SWITCHING (e))
1042 /* Create the list of segments within each basic block. */
1043 bb_info[n_entities]
1044 = (struct bb_info *) xcalloc (last_basic_block, sizeof **bb_info);
1045 entity_map[n_entities++] = e;
1046 if (num_modes[e] > max_num_modes)
1047 max_num_modes = num_modes[e];
1050 #ifdef NORMAL_MODE
1051 /* Decrement it back in case we return below. */
1052 last_basic_block--;
1053 #endif
1055 if (! n_entities)
1056 return 0;
1058 #ifdef NORMAL_MODE
1059 /* We're going to pretend the EXIT_BLOCK is a regular basic block,
1060 so that switching back to normal mode when entering the
1061 EXIT_BLOCK isn't optimized away. We do this by incrementing the
1062 basic block count, growing the VARRAY of basic_block_info and
1063 appending the EXIT_BLOCK_PTR to it. */
1064 last_basic_block++;
1065 if (VARRAY_SIZE (basic_block_info) < last_basic_block)
1066 VARRAY_GROW (basic_block_info, last_basic_block);
1067 BASIC_BLOCK (last_basic_block - 1) = EXIT_BLOCK_PTR;
1068 EXIT_BLOCK_PTR->sindex = last_basic_block;
1069 #endif
1071 /* Create the bitmap vectors. */
1073 antic = sbitmap_vector_alloc (last_basic_block, n_entities);
1074 transp = sbitmap_vector_alloc (last_basic_block, n_entities);
1075 comp = sbitmap_vector_alloc (last_basic_block, n_entities);
1077 sbitmap_vector_ones (transp, last_basic_block);
1079 for (j = n_entities - 1; j >= 0; j--)
1081 int e = entity_map[j];
1082 int no_mode = num_modes[e];
1083 struct bb_info *info = bb_info[j];
1085 /* Determine what the first use (if any) need for a mode of entity E is.
1086 This will be the mode that is anticipatable for this block.
1087 Also compute the initial transparency settings. */
1088 FOR_ALL_BB (bb)
1090 struct seginfo *ptr;
1091 int last_mode = no_mode;
1092 HARD_REG_SET live_now;
1094 REG_SET_TO_HARD_REG_SET (live_now,
1095 bb->global_live_at_start);
1096 for (insn = bb->head;
1097 insn != NULL && insn != NEXT_INSN (bb->end);
1098 insn = NEXT_INSN (insn))
1100 if (INSN_P (insn))
1102 int mode = MODE_NEEDED (e, insn);
1103 rtx link;
1105 if (mode != no_mode && mode != last_mode)
1107 last_mode = mode;
1108 ptr = new_seginfo (mode, insn, bb->sindex, live_now);
1109 add_seginfo (info + bb->sindex, ptr);
1110 RESET_BIT (transp[bb->sindex], j);
1113 /* Update LIVE_NOW. */
1114 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1115 if (REG_NOTE_KIND (link) == REG_DEAD)
1116 reg_dies (XEXP (link, 0), live_now);
1118 note_stores (PATTERN (insn), reg_becomes_live, &live_now);
1119 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1120 if (REG_NOTE_KIND (link) == REG_UNUSED)
1121 reg_dies (XEXP (link, 0), live_now);
1125 info[bb->sindex].computing = last_mode;
1126 /* Check for blocks without ANY mode requirements. */
1127 if (last_mode == no_mode)
1129 ptr = new_seginfo (no_mode, insn, bb->sindex, live_now);
1130 add_seginfo (info + bb->sindex, ptr);
1133 #ifdef NORMAL_MODE
1135 int mode = NORMAL_MODE (e);
1137 if (mode != no_mode)
1139 edge eg;
1141 for (eg = ENTRY_BLOCK_PTR->succ; eg; eg = eg->succ_next)
1143 bb = eg->dest;
1145 /* By always making this nontransparent, we save
1146 an extra check in make_preds_opaque. We also
1147 need this to avoid confusing pre_edge_lcm when
1148 antic is cleared but transp and comp are set. */
1149 RESET_BIT (transp[bb->sindex], j);
1151 /* If the block already has MODE, pretend it
1152 has none (because we don't need to set it),
1153 but retain whatever mode it computes. */
1154 if (info[bb->sindex].seginfo->mode == mode)
1155 info[bb->sindex].seginfo->mode = no_mode;
1157 /* Insert a fake computing definition of MODE into entry
1158 blocks which compute no mode. This represents the mode on
1159 entry. */
1160 else if (info[bb->sindex].computing == no_mode)
1162 info[bb->sindex].computing = mode;
1163 info[bb->sindex].seginfo->mode = no_mode;
1167 bb = EXIT_BLOCK_PTR;
1168 info[bb->sindex].seginfo->mode = mode;
1171 #endif /* NORMAL_MODE */
1174 kill = sbitmap_vector_alloc (last_basic_block, n_entities);
1175 for (i = 0; i < max_num_modes; i++)
1177 int current_mode[N_ENTITIES];
1179 /* Set the anticipatable and computing arrays. */
1180 sbitmap_vector_zero (antic, last_basic_block);
1181 sbitmap_vector_zero (comp, last_basic_block);
1182 for (j = n_entities - 1; j >= 0; j--)
1184 int m = current_mode[j] = MODE_PRIORITY_TO_MODE (entity_map[j], i);
1185 struct bb_info *info = bb_info[j];
1187 FOR_ALL_BB (bb)
1189 if (info[bb->sindex].seginfo->mode == m)
1190 SET_BIT (antic[bb->sindex], j);
1192 if (info[bb->sindex].computing == m)
1193 SET_BIT (comp[bb->sindex], j);
1197 /* Calculate the optimal locations for the
1198 placement mode switches to modes with priority I. */
1200 FOR_ALL_BB_REVERSE (bb)
1201 sbitmap_not (kill[bb->sindex], transp[bb->sindex]);
1202 edge_list = pre_edge_lcm (file, 1, transp, comp, antic,
1203 kill, &insert, &delete);
1205 for (j = n_entities - 1; j >= 0; j--)
1207 /* Insert all mode sets that have been inserted by lcm. */
1208 int no_mode = num_modes[entity_map[j]];
1210 /* Wherever we have moved a mode setting upwards in the flow graph,
1211 the blocks between the new setting site and the now redundant
1212 computation ceases to be transparent for any lower-priority
1213 mode of the same entity. First set the aux field of each
1214 insertion site edge non-transparent, then propagate the new
1215 non-transparency from the redundant computation upwards till
1216 we hit an insertion site or an already non-transparent block. */
1217 for (e = NUM_EDGES (edge_list) - 1; e >= 0; e--)
1219 edge eg = INDEX_EDGE (edge_list, e);
1220 int mode;
1221 basic_block src_bb;
1222 HARD_REG_SET live_at_edge;
1223 rtx mode_set;
1225 eg->aux = 0;
1227 if (! TEST_BIT (insert[e], j))
1228 continue;
1230 eg->aux = (void *)1;
1232 mode = current_mode[j];
1233 src_bb = eg->src;
1235 REG_SET_TO_HARD_REG_SET (live_at_edge,
1236 src_bb->global_live_at_end);
1238 start_sequence ();
1239 EMIT_MODE_SET (entity_map[j], mode, live_at_edge);
1240 mode_set = gen_sequence ();
1241 end_sequence ();
1243 /* Do not bother to insert empty sequence. */
1244 if (GET_CODE (mode_set) == SEQUENCE
1245 && !XVECLEN (mode_set, 0))
1246 continue;
1248 /* If this is an abnormal edge, we'll insert at the end
1249 of the previous block. */
1250 if (eg->flags & EDGE_ABNORMAL)
1252 emited = true;
1253 if (GET_CODE (src_bb->end) == JUMP_INSN)
1254 emit_insn_before (mode_set, src_bb->end);
1255 /* It doesn't make sense to switch to normal mode
1256 after a CALL_INSN, so we're going to abort if we
1257 find one. The cases in which a CALL_INSN may
1258 have an abnormal edge are sibcalls and EH edges.
1259 In the case of sibcalls, the dest basic-block is
1260 the EXIT_BLOCK, that runs in normal mode; it is
1261 assumed that a sibcall insn requires normal mode
1262 itself, so no mode switch would be required after
1263 the call (it wouldn't make sense, anyway). In
1264 the case of EH edges, EH entry points also start
1265 in normal mode, so a similar reasoning applies. */
1266 else if (GET_CODE (src_bb->end) == INSN)
1267 emit_insn_after (mode_set, src_bb->end);
1268 else
1269 abort ();
1270 bb_info[j][src_bb->sindex].computing = mode;
1271 RESET_BIT (transp[src_bb->sindex], j);
1273 else
1275 need_commit = 1;
1276 insert_insn_on_edge (mode_set, eg);
1280 FOR_ALL_BB_REVERSE (bb)
1281 if (TEST_BIT (delete[bb->sindex], j))
1283 make_preds_opaque (bb, j);
1284 /* Cancel the 'deleted' mode set. */
1285 bb_info[j][bb->sindex].seginfo->mode = no_mode;
1289 clear_aux_for_edges ();
1290 free_edge_list (edge_list);
1293 #ifdef NORMAL_MODE
1294 /* Restore the special status of EXIT_BLOCK. */
1295 last_basic_block--;
1296 VARRAY_POP (basic_block_info);
1297 EXIT_BLOCK_PTR->sindex = EXIT_BLOCK;
1298 #endif
1300 /* Now output the remaining mode sets in all the segments. */
1301 for (j = n_entities - 1; j >= 0; j--)
1303 int no_mode = num_modes[entity_map[j]];
1305 #ifdef NORMAL_MODE
1306 if (bb_info[j][last_basic_block].seginfo->mode != no_mode)
1308 edge eg;
1309 struct seginfo *ptr = bb_info[j][last_basic_block].seginfo;
1311 for (eg = EXIT_BLOCK_PTR->pred; eg; eg = eg->pred_next)
1313 rtx mode_set;
1315 if (bb_info[j][eg->src->sindex].computing == ptr->mode)
1316 continue;
1318 start_sequence ();
1319 EMIT_MODE_SET (entity_map[j], ptr->mode, ptr->regs_live);
1320 mode_set = gen_sequence ();
1321 end_sequence ();
1323 /* Do not bother to insert empty sequence. */
1324 if (GET_CODE (mode_set) == SEQUENCE
1325 && !XVECLEN (mode_set, 0))
1326 continue;
1328 /* If this is an abnormal edge, we'll insert at the end of the
1329 previous block. */
1330 if (eg->flags & EDGE_ABNORMAL)
1332 emited = true;
1333 if (GET_CODE (eg->src->end) == JUMP_INSN)
1334 emit_insn_before (mode_set, eg->src->end);
1335 else if (GET_CODE (eg->src->end) == INSN)
1336 emit_insn_after (mode_set, eg->src->end);
1337 else
1338 abort ();
1340 else
1342 need_commit = 1;
1343 insert_insn_on_edge (mode_set, eg);
1348 #endif
1350 FOR_ALL_BB_REVERSE (bb)
1352 struct seginfo *ptr, *next;
1353 for (ptr = bb_info[j][bb->sindex].seginfo; ptr; ptr = next)
1355 next = ptr->next;
1356 if (ptr->mode != no_mode)
1358 rtx mode_set;
1360 start_sequence ();
1361 EMIT_MODE_SET (entity_map[j], ptr->mode, ptr->regs_live);
1362 mode_set = gen_sequence ();
1363 end_sequence ();
1365 /* Do not bother to insert empty sequence. */
1366 if (GET_CODE (mode_set) == SEQUENCE
1367 && !XVECLEN (mode_set, 0))
1368 continue;
1370 emited = true;
1371 if (GET_CODE (ptr->insn_ptr) == NOTE
1372 && (NOTE_LINE_NUMBER (ptr->insn_ptr)
1373 == NOTE_INSN_BASIC_BLOCK))
1374 emit_insn_after (mode_set, ptr->insn_ptr);
1375 else
1376 emit_insn_before (mode_set, ptr->insn_ptr);
1379 free (ptr);
1383 free (bb_info[j]);
1386 /* Finished. Free up all the things we've allocated. */
1388 sbitmap_vector_free (kill);
1389 sbitmap_vector_free (antic);
1390 sbitmap_vector_free (transp);
1391 sbitmap_vector_free (comp);
1392 sbitmap_vector_free (delete);
1393 sbitmap_vector_free (insert);
1395 if (need_commit)
1396 commit_edge_insertions ();
1398 if (!need_commit && !emited)
1399 return 0;
1401 max_regno = max_reg_num ();
1402 allocate_reg_info (max_regno, FALSE, FALSE);
1403 update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES,
1404 (PROP_DEATH_NOTES | PROP_KILL_DEAD_CODE
1405 | PROP_SCAN_DEAD_CODE));
1407 return 1;
1409 #endif /* OPTIMIZE_MODE_SWITCHING */