2005-06-30 J. D. Johnston <jjohnst@us.ibm.com>
[official-gcc.git] / gcc / mode-switching.c
blob62e7112d0cd76dc8e9fb4fcfd25cb59eae716f45
1 /* CPU mode switching
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "flags.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "recog.h"
33 #include "basic-block.h"
34 #include "output.h"
35 #include "tm_p.h"
36 #include "function.h"
38 /* We want target macros for the mode switching code to be able to refer
39 to instruction attribute values. */
40 #include "insn-attr.h"
42 #ifdef OPTIMIZE_MODE_SWITCHING
44 /* The algorithm for setting the modes consists of scanning the insn list
45 and finding all the insns which require a specific mode. Each insn gets
46 a unique struct seginfo element. These structures are inserted into a list
47 for each basic block. For each entity, there is an array of bb_info over
48 the flow graph basic blocks (local var 'bb_info'), and contains a list
49 of all insns within that basic block, in the order they are encountered.
51 For each entity, any basic block WITHOUT any insns requiring a specific
52 mode are given a single entry, without a mode. (Each basic block
53 in the flow graph must have at least one entry in the segment table.)
55 The LCM algorithm is then run over the flow graph to determine where to
56 place the sets to the highest-priority value in respect of first the first
57 insn in any one block. Any adjustments required to the transparency
58 vectors are made, then the next iteration starts for the next-lower
59 priority mode, till for each entity all modes are exhausted.
61 More details are located in the code for optimize_mode_switching(). */
63 /* This structure contains the information for each insn which requires
64 either single or double mode to be set.
65 MODE is the mode this insn must be executed in.
66 INSN_PTR is the insn to be executed (may be the note that marks the
67 beginning of a basic block).
68 BBNUM is the flow graph basic block this insn occurs in.
69 NEXT is the next insn in the same basic block. */
70 struct seginfo
72 int mode;
73 rtx insn_ptr;
74 int bbnum;
75 struct seginfo *next;
76 HARD_REG_SET regs_live;
79 struct bb_info
81 struct seginfo *seginfo;
82 int computing;
85 /* These bitmaps are used for the LCM algorithm. */
87 static sbitmap *antic;
88 static sbitmap *transp;
89 static sbitmap *comp;
91 static struct seginfo * new_seginfo (int, rtx, int, HARD_REG_SET);
92 static void add_seginfo (struct bb_info *, struct seginfo *);
93 static void reg_dies (rtx, HARD_REG_SET);
94 static void reg_becomes_live (rtx, rtx, void *);
95 static void make_preds_opaque (basic_block, int);
98 /* This function will allocate a new BBINFO structure, initialized
99 with the MODE, INSN, and basic block BB parameters. */
101 static struct seginfo *
102 new_seginfo (int mode, rtx insn, int bb, HARD_REG_SET regs_live)
104 struct seginfo *ptr;
105 ptr = xmalloc (sizeof (struct seginfo));
106 ptr->mode = mode;
107 ptr->insn_ptr = insn;
108 ptr->bbnum = bb;
109 ptr->next = NULL;
110 COPY_HARD_REG_SET (ptr->regs_live, regs_live);
111 return ptr;
114 /* Add a seginfo element to the end of a list.
115 HEAD is a pointer to the list beginning.
116 INFO is the structure to be linked in. */
118 static void
119 add_seginfo (struct bb_info *head, struct seginfo *info)
121 struct seginfo *ptr;
123 if (head->seginfo == NULL)
124 head->seginfo = info;
125 else
127 ptr = head->seginfo;
128 while (ptr->next != NULL)
129 ptr = ptr->next;
130 ptr->next = info;
134 /* Make all predecessors of basic block B opaque, recursively, till we hit
135 some that are already non-transparent, or an edge where aux is set; that
136 denotes that a mode set is to be done on that edge.
137 J is the bit number in the bitmaps that corresponds to the entity that
138 we are currently handling mode-switching for. */
140 static void
141 make_preds_opaque (basic_block b, int j)
143 edge e;
144 edge_iterator ei;
146 FOR_EACH_EDGE (e, ei, b->preds)
148 basic_block pb = e->src;
150 if (e->aux || ! TEST_BIT (transp[pb->index], j))
151 continue;
153 RESET_BIT (transp[pb->index], j);
154 make_preds_opaque (pb, j);
158 /* Record in LIVE that register REG died. */
160 static void
161 reg_dies (rtx reg, HARD_REG_SET live)
163 int regno, nregs;
165 if (!REG_P (reg))
166 return;
168 regno = REGNO (reg);
169 if (regno < FIRST_PSEUDO_REGISTER)
170 for (nregs = hard_regno_nregs[regno][GET_MODE (reg)] - 1; nregs >= 0;
171 nregs--)
172 CLEAR_HARD_REG_BIT (live, regno + nregs);
175 /* Record in LIVE that register REG became live.
176 This is called via note_stores. */
178 static void
179 reg_becomes_live (rtx reg, rtx setter ATTRIBUTE_UNUSED, void *live)
181 int regno, nregs;
183 if (GET_CODE (reg) == SUBREG)
184 reg = SUBREG_REG (reg);
186 if (!REG_P (reg))
187 return;
189 regno = REGNO (reg);
190 if (regno < FIRST_PSEUDO_REGISTER)
191 for (nregs = hard_regno_nregs[regno][GET_MODE (reg)] - 1; nregs >= 0;
192 nregs--)
193 SET_HARD_REG_BIT (* (HARD_REG_SET *) live, regno + nregs);
196 /* Make sure if MODE_ENTRY is defined the MODE_EXIT is defined
197 and vice versa. */
198 #if defined (MODE_ENTRY) != defined (MODE_EXIT)
199 #error "Both MODE_ENTRY and MODE_EXIT must be defined"
200 #endif
202 #if defined (MODE_ENTRY) && defined (MODE_EXIT)
203 /* Split the fallthrough edge to the exit block, so that we can note
204 that there NORMAL_MODE is required. Return the new block if it's
205 inserted before the exit block. Otherwise return null. */
207 static basic_block
208 create_pre_exit (int n_entities, int *entity_map, const int *num_modes)
210 edge eg;
211 edge_iterator ei;
212 basic_block pre_exit;
214 /* The only non-call predecessor at this stage is a block with a
215 fallthrough edge; there can be at most one, but there could be
216 none at all, e.g. when exit is called. */
217 pre_exit = 0;
218 FOR_EACH_EDGE (eg, ei, EXIT_BLOCK_PTR->preds)
219 if (eg->flags & EDGE_FALLTHRU)
221 basic_block src_bb = eg->src;
222 regset live_at_end = src_bb->il.rtl->global_live_at_end;
223 rtx last_insn, ret_reg;
225 gcc_assert (!pre_exit);
226 /* If this function returns a value at the end, we have to
227 insert the final mode switch before the return value copy
228 to its hard register. */
229 if (EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 1
230 && NONJUMP_INSN_P ((last_insn = BB_END (src_bb)))
231 && GET_CODE (PATTERN (last_insn)) == USE
232 && GET_CODE ((ret_reg = XEXP (PATTERN (last_insn), 0))) == REG)
234 int ret_start = REGNO (ret_reg);
235 int nregs = hard_regno_nregs[ret_start][GET_MODE (ret_reg)];
236 int ret_end = ret_start + nregs;
237 int short_block = 0;
238 int maybe_builtin_apply = 0;
239 int forced_late_switch = 0;
240 rtx before_return_copy;
244 rtx return_copy = PREV_INSN (last_insn);
245 rtx return_copy_pat, copy_reg;
246 int copy_start, copy_num;
247 int j;
249 if (INSN_P (return_copy))
251 if (GET_CODE (PATTERN (return_copy)) == USE
252 && GET_CODE (XEXP (PATTERN (return_copy), 0)) == REG
253 && (FUNCTION_VALUE_REGNO_P
254 (REGNO (XEXP (PATTERN (return_copy), 0)))))
256 maybe_builtin_apply = 1;
257 last_insn = return_copy;
258 continue;
260 /* If the return register is not (in its entirety)
261 likely spilled, the return copy might be
262 partially or completely optimized away. */
263 return_copy_pat = single_set (return_copy);
264 if (!return_copy_pat)
266 return_copy_pat = PATTERN (return_copy);
267 if (GET_CODE (return_copy_pat) != CLOBBER)
268 break;
270 copy_reg = SET_DEST (return_copy_pat);
271 if (GET_CODE (copy_reg) == REG)
272 copy_start = REGNO (copy_reg);
273 else if (GET_CODE (copy_reg) == SUBREG
274 && GET_CODE (SUBREG_REG (copy_reg)) == REG)
275 copy_start = REGNO (SUBREG_REG (copy_reg));
276 else
277 break;
278 if (copy_start >= FIRST_PSEUDO_REGISTER)
279 break;
280 copy_num
281 = hard_regno_nregs[copy_start][GET_MODE (copy_reg)];
283 /* If the return register is not likely spilled, - as is
284 the case for floating point on SH4 - then it might
285 be set by an arithmetic operation that needs a
286 different mode than the exit block. */
287 for (j = n_entities - 1; j >= 0; j--)
289 int e = entity_map[j];
290 int mode = MODE_NEEDED (e, return_copy);
292 if (mode != num_modes[e] && mode != MODE_EXIT (e))
293 break;
295 if (j >= 0)
297 /* For the SH4, floating point loads depend on fpscr,
298 thus we might need to put the final mode switch
299 after the return value copy. That is still OK,
300 because a floating point return value does not
301 conflict with address reloads. */
302 if (copy_start >= ret_start
303 && copy_start + copy_num <= ret_end
304 && OBJECT_P (SET_SRC (return_copy_pat)))
305 forced_late_switch = 1;
306 break;
309 if (copy_start >= ret_start
310 && copy_start + copy_num <= ret_end)
311 nregs -= copy_num;
312 else if (!maybe_builtin_apply
313 || !FUNCTION_VALUE_REGNO_P (copy_start))
314 break;
315 last_insn = return_copy;
317 /* ??? Exception handling can lead to the return value
318 copy being already separated from the return value use,
319 as in unwind-dw2.c .
320 Similarly, conditionally returning without a value,
321 and conditionally using builtin_return can lead to an
322 isolated use. */
323 if (return_copy == BB_HEAD (src_bb))
325 short_block = 1;
326 break;
328 last_insn = return_copy;
330 while (nregs);
332 /* If we didn't see a full return value copy, verify that there
333 is a plausible reason for this. If some, but not all of the
334 return register is likely spilled, we can expect that there
335 is a copy for the likely spilled part. */
336 gcc_assert (!nregs
337 || forced_late_switch
338 || short_block
339 || !(CLASS_LIKELY_SPILLED_P
340 (REGNO_REG_CLASS (ret_start)))
341 || (nregs
342 != hard_regno_nregs[ret_start][GET_MODE (ret_reg)])
343 /* For multi-hard-register floating point
344 values, sometimes the likely-spilled part
345 is ordinarily copied first, then the other
346 part is set with an arithmetic operation.
347 This doesn't actually cause reload
348 failures, so let it pass. */
349 || (GET_MODE_CLASS (GET_MODE (ret_reg)) != MODE_INT
350 && nregs != 1));
352 if (INSN_P (last_insn))
354 before_return_copy
355 = emit_note_before (NOTE_INSN_DELETED, last_insn);
356 /* Instructions preceding LAST_INSN in the same block might
357 require a different mode than MODE_EXIT, so if we might
358 have such instructions, keep them in a separate block
359 from pre_exit. */
360 if (last_insn != BB_HEAD (src_bb))
361 src_bb = split_block (src_bb,
362 PREV_INSN (before_return_copy))->dest;
364 else
365 before_return_copy = last_insn;
366 pre_exit = split_block (src_bb, before_return_copy)->src;
368 else
370 pre_exit = split_edge (eg);
371 COPY_REG_SET (pre_exit->il.rtl->global_live_at_start, live_at_end);
372 COPY_REG_SET (pre_exit->il.rtl->global_live_at_end, live_at_end);
376 return pre_exit;
378 #endif
380 /* Find all insns that need a particular mode setting, and insert the
381 necessary mode switches. Return true if we did work. */
384 optimize_mode_switching (FILE *file)
386 rtx insn;
387 int e;
388 basic_block bb;
389 int need_commit = 0;
390 sbitmap *kill;
391 struct edge_list *edge_list;
392 static const int num_modes[] = NUM_MODES_FOR_MODE_SWITCHING;
393 #define N_ENTITIES ARRAY_SIZE (num_modes)
394 int entity_map[N_ENTITIES];
395 struct bb_info *bb_info[N_ENTITIES];
396 int i, j;
397 int n_entities;
398 int max_num_modes = 0;
399 bool emited = false;
400 basic_block post_entry ATTRIBUTE_UNUSED, pre_exit ATTRIBUTE_UNUSED;
402 clear_bb_flags ();
404 for (e = N_ENTITIES - 1, n_entities = 0; e >= 0; e--)
405 if (OPTIMIZE_MODE_SWITCHING (e))
407 int entry_exit_extra = 0;
409 /* Create the list of segments within each basic block.
410 If NORMAL_MODE is defined, allow for two extra
411 blocks split from the entry and exit block. */
412 #if defined (MODE_ENTRY) && defined (MODE_EXIT)
413 entry_exit_extra = 3;
414 #endif
415 bb_info[n_entities]
416 = xcalloc (last_basic_block + entry_exit_extra, sizeof **bb_info);
417 entity_map[n_entities++] = e;
418 if (num_modes[e] > max_num_modes)
419 max_num_modes = num_modes[e];
422 if (! n_entities)
423 return 0;
425 #if defined (MODE_ENTRY) && defined (MODE_EXIT)
426 /* Split the edge from the entry block, so that we can note that
427 there NORMAL_MODE is supplied. */
428 post_entry = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
429 pre_exit = create_pre_exit (n_entities, entity_map, num_modes);
430 #endif
432 /* Create the bitmap vectors. */
434 antic = sbitmap_vector_alloc (last_basic_block, n_entities);
435 transp = sbitmap_vector_alloc (last_basic_block, n_entities);
436 comp = sbitmap_vector_alloc (last_basic_block, n_entities);
438 sbitmap_vector_ones (transp, last_basic_block);
440 for (j = n_entities - 1; j >= 0; j--)
442 int e = entity_map[j];
443 int no_mode = num_modes[e];
444 struct bb_info *info = bb_info[j];
446 /* Determine what the first use (if any) need for a mode of entity E is.
447 This will be the mode that is anticipatable for this block.
448 Also compute the initial transparency settings. */
449 FOR_EACH_BB (bb)
451 struct seginfo *ptr;
452 int last_mode = no_mode;
453 HARD_REG_SET live_now;
455 REG_SET_TO_HARD_REG_SET (live_now,
456 bb->il.rtl->global_live_at_start);
457 for (insn = BB_HEAD (bb);
458 insn != NULL && insn != NEXT_INSN (BB_END (bb));
459 insn = NEXT_INSN (insn))
461 if (INSN_P (insn))
463 int mode = MODE_NEEDED (e, insn);
464 rtx link;
466 if (mode != no_mode && mode != last_mode)
468 last_mode = mode;
469 ptr = new_seginfo (mode, insn, bb->index, live_now);
470 add_seginfo (info + bb->index, ptr);
471 RESET_BIT (transp[bb->index], j);
473 #ifdef MODE_AFTER
474 last_mode = MODE_AFTER (last_mode, insn);
475 #endif
476 /* Update LIVE_NOW. */
477 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
478 if (REG_NOTE_KIND (link) == REG_DEAD)
479 reg_dies (XEXP (link, 0), live_now);
481 note_stores (PATTERN (insn), reg_becomes_live, &live_now);
482 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
483 if (REG_NOTE_KIND (link) == REG_UNUSED)
484 reg_dies (XEXP (link, 0), live_now);
488 info[bb->index].computing = last_mode;
489 /* Check for blocks without ANY mode requirements. */
490 if (last_mode == no_mode)
492 ptr = new_seginfo (no_mode, BB_END (bb), bb->index, live_now);
493 add_seginfo (info + bb->index, ptr);
496 #if defined (MODE_ENTRY) && defined (MODE_EXIT)
498 int mode = MODE_ENTRY (e);
500 if (mode != no_mode)
502 bb = post_entry;
504 /* By always making this nontransparent, we save
505 an extra check in make_preds_opaque. We also
506 need this to avoid confusing pre_edge_lcm when
507 antic is cleared but transp and comp are set. */
508 RESET_BIT (transp[bb->index], j);
510 /* Insert a fake computing definition of MODE into entry
511 blocks which compute no mode. This represents the mode on
512 entry. */
513 info[bb->index].computing = mode;
515 if (pre_exit)
516 info[pre_exit->index].seginfo->mode = MODE_EXIT (e);
519 #endif /* NORMAL_MODE */
522 kill = sbitmap_vector_alloc (last_basic_block, n_entities);
523 for (i = 0; i < max_num_modes; i++)
525 int current_mode[N_ENTITIES];
526 sbitmap *delete;
527 sbitmap *insert;
529 /* Set the anticipatable and computing arrays. */
530 sbitmap_vector_zero (antic, last_basic_block);
531 sbitmap_vector_zero (comp, last_basic_block);
532 for (j = n_entities - 1; j >= 0; j--)
534 int m = current_mode[j] = MODE_PRIORITY_TO_MODE (entity_map[j], i);
535 struct bb_info *info = bb_info[j];
537 FOR_EACH_BB (bb)
539 if (info[bb->index].seginfo->mode == m)
540 SET_BIT (antic[bb->index], j);
542 if (info[bb->index].computing == m)
543 SET_BIT (comp[bb->index], j);
547 /* Calculate the optimal locations for the
548 placement mode switches to modes with priority I. */
550 FOR_EACH_BB (bb)
551 sbitmap_not (kill[bb->index], transp[bb->index]);
552 edge_list = pre_edge_lcm (file, n_entities, transp, comp, antic,
553 kill, &insert, &delete);
555 for (j = n_entities - 1; j >= 0; j--)
557 /* Insert all mode sets that have been inserted by lcm. */
558 int no_mode = num_modes[entity_map[j]];
560 /* Wherever we have moved a mode setting upwards in the flow graph,
561 the blocks between the new setting site and the now redundant
562 computation ceases to be transparent for any lower-priority
563 mode of the same entity. First set the aux field of each
564 insertion site edge non-transparent, then propagate the new
565 non-transparency from the redundant computation upwards till
566 we hit an insertion site or an already non-transparent block. */
567 for (e = NUM_EDGES (edge_list) - 1; e >= 0; e--)
569 edge eg = INDEX_EDGE (edge_list, e);
570 int mode;
571 basic_block src_bb;
572 HARD_REG_SET live_at_edge;
573 rtx mode_set;
575 eg->aux = 0;
577 if (! TEST_BIT (insert[e], j))
578 continue;
580 eg->aux = (void *)1;
582 mode = current_mode[j];
583 src_bb = eg->src;
585 REG_SET_TO_HARD_REG_SET (live_at_edge,
586 src_bb->il.rtl->global_live_at_end);
588 start_sequence ();
589 EMIT_MODE_SET (entity_map[j], mode, live_at_edge);
590 mode_set = get_insns ();
591 end_sequence ();
593 /* Do not bother to insert empty sequence. */
594 if (mode_set == NULL_RTX)
595 continue;
597 /* If this is an abnormal edge, we'll insert at the end
598 of the previous block. */
599 if (eg->flags & EDGE_ABNORMAL)
601 emited = true;
602 if (JUMP_P (BB_END (src_bb)))
603 emit_insn_before (mode_set, BB_END (src_bb));
604 else
606 /* It doesn't make sense to switch to normal
607 mode after a CALL_INSN. The cases in which a
608 CALL_INSN may have an abnormal edge are
609 sibcalls and EH edges. In the case of
610 sibcalls, the dest basic-block is the
611 EXIT_BLOCK, that runs in normal mode; it is
612 assumed that a sibcall insn requires normal
613 mode itself, so no mode switch would be
614 required after the call (it wouldn't make
615 sense, anyway). In the case of EH edges, EH
616 entry points also start in normal mode, so a
617 similar reasoning applies. */
618 gcc_assert (NONJUMP_INSN_P (BB_END (src_bb)));
619 emit_insn_after (mode_set, BB_END (src_bb));
621 bb_info[j][src_bb->index].computing = mode;
622 RESET_BIT (transp[src_bb->index], j);
624 else
626 need_commit = 1;
627 insert_insn_on_edge (mode_set, eg);
631 FOR_EACH_BB_REVERSE (bb)
632 if (TEST_BIT (delete[bb->index], j))
634 make_preds_opaque (bb, j);
635 /* Cancel the 'deleted' mode set. */
636 bb_info[j][bb->index].seginfo->mode = no_mode;
640 sbitmap_vector_free (delete);
641 sbitmap_vector_free (insert);
642 clear_aux_for_edges ();
643 free_edge_list (edge_list);
646 /* Now output the remaining mode sets in all the segments. */
647 for (j = n_entities - 1; j >= 0; j--)
649 int no_mode = num_modes[entity_map[j]];
651 FOR_EACH_BB_REVERSE (bb)
653 struct seginfo *ptr, *next;
654 for (ptr = bb_info[j][bb->index].seginfo; ptr; ptr = next)
656 next = ptr->next;
657 if (ptr->mode != no_mode)
659 rtx mode_set;
661 start_sequence ();
662 EMIT_MODE_SET (entity_map[j], ptr->mode, ptr->regs_live);
663 mode_set = get_insns ();
664 end_sequence ();
666 /* Insert MODE_SET only if it is nonempty. */
667 if (mode_set != NULL_RTX)
669 emited = true;
670 if (NOTE_P (ptr->insn_ptr)
671 && (NOTE_LINE_NUMBER (ptr->insn_ptr)
672 == NOTE_INSN_BASIC_BLOCK))
673 emit_insn_after (mode_set, ptr->insn_ptr);
674 else
675 emit_insn_before (mode_set, ptr->insn_ptr);
679 free (ptr);
683 free (bb_info[j]);
686 /* Finished. Free up all the things we've allocated. */
688 sbitmap_vector_free (kill);
689 sbitmap_vector_free (antic);
690 sbitmap_vector_free (transp);
691 sbitmap_vector_free (comp);
693 if (need_commit)
694 commit_edge_insertions ();
696 #if defined (MODE_ENTRY) && defined (MODE_EXIT)
697 cleanup_cfg (CLEANUP_NO_INSN_DEL);
698 #else
699 if (!need_commit && !emited)
700 return 0;
701 #endif
703 max_regno = max_reg_num ();
704 allocate_reg_info (max_regno, FALSE, FALSE);
705 update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES,
706 (PROP_DEATH_NOTES | PROP_KILL_DEAD_CODE
707 | PROP_SCAN_DEAD_CODE));
709 return 1;
711 #endif /* OPTIMIZE_MODE_SWITCHING */