2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2007, 2008,
3 2009, 2010 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "hard-reg-set.h"
30 #include "insn-config.h"
32 #include "basic-block.h"
35 #include "tree-pass.h"
39 /* We want target macros for the mode switching code to be able to refer
40 to instruction attribute values. */
41 #include "insn-attr.h"
43 #ifdef OPTIMIZE_MODE_SWITCHING
45 /* The algorithm for setting the modes consists of scanning the insn list
46 and finding all the insns which require a specific mode. Each insn gets
47 a unique struct seginfo element. These structures are inserted into a list
48 for each basic block. For each entity, there is an array of bb_info over
49 the flow graph basic blocks (local var 'bb_info'), and contains a list
50 of all insns within that basic block, in the order they are encountered.
52 For each entity, any basic block WITHOUT any insns requiring a specific
53 mode are given a single entry, without a mode. (Each basic block
54 in the flow graph must have at least one entry in the segment table.)
56 The LCM algorithm is then run over the flow graph to determine where to
57 place the sets to the highest-priority value in respect of first the first
58 insn in any one block. Any adjustments required to the transparency
59 vectors are made, then the next iteration starts for the next-lower
60 priority mode, till for each entity all modes are exhausted.
62 More details are located in the code for optimize_mode_switching(). */
64 /* This structure contains the information for each insn which requires
65 either single or double mode to be set.
66 MODE is the mode this insn must be executed in.
67 INSN_PTR is the insn to be executed (may be the note that marks the
68 beginning of a basic block).
69 BBNUM is the flow graph basic block this insn occurs in.
70 NEXT is the next insn in the same basic block. */
77 HARD_REG_SET regs_live
;
82 struct seginfo
*seginfo
;
86 /* These bitmaps are used for the LCM algorithm. */
88 static sbitmap
*antic
;
89 static sbitmap
*transp
;
92 static struct seginfo
* new_seginfo (int, rtx
, int, HARD_REG_SET
);
93 static void add_seginfo (struct bb_info
*, struct seginfo
*);
94 static void reg_dies (rtx
, HARD_REG_SET
*);
95 static void reg_becomes_live (rtx
, const_rtx
, void *);
96 static void make_preds_opaque (basic_block
, int);
99 /* This function will allocate a new BBINFO structure, initialized
100 with the MODE, INSN, and basic block BB parameters. */
102 static struct seginfo
*
103 new_seginfo (int mode
, rtx insn
, int bb
, HARD_REG_SET regs_live
)
106 ptr
= XNEW (struct seginfo
);
108 ptr
->insn_ptr
= insn
;
111 COPY_HARD_REG_SET (ptr
->regs_live
, regs_live
);
115 /* Add a seginfo element to the end of a list.
116 HEAD is a pointer to the list beginning.
117 INFO is the structure to be linked in. */
120 add_seginfo (struct bb_info
*head
, struct seginfo
*info
)
124 if (head
->seginfo
== NULL
)
125 head
->seginfo
= info
;
129 while (ptr
->next
!= NULL
)
135 /* Make all predecessors of basic block B opaque, recursively, till we hit
136 some that are already non-transparent, or an edge where aux is set; that
137 denotes that a mode set is to be done on that edge.
138 J is the bit number in the bitmaps that corresponds to the entity that
139 we are currently handling mode-switching for. */
142 make_preds_opaque (basic_block b
, int j
)
147 FOR_EACH_EDGE (e
, ei
, b
->preds
)
149 basic_block pb
= e
->src
;
151 if (e
->aux
|| ! TEST_BIT (transp
[pb
->index
], j
))
154 RESET_BIT (transp
[pb
->index
], j
);
155 make_preds_opaque (pb
, j
);
159 /* Record in LIVE that register REG died. */
162 reg_dies (rtx reg
, HARD_REG_SET
*live
)
170 if (regno
< FIRST_PSEUDO_REGISTER
)
171 remove_from_hard_reg_set (live
, GET_MODE (reg
), regno
);
174 /* Record in LIVE that register REG became live.
175 This is called via note_stores. */
178 reg_becomes_live (rtx reg
, const_rtx setter ATTRIBUTE_UNUSED
, void *live
)
182 if (GET_CODE (reg
) == SUBREG
)
183 reg
= SUBREG_REG (reg
);
189 if (regno
< FIRST_PSEUDO_REGISTER
)
190 add_to_hard_reg_set ((HARD_REG_SET
*) live
, GET_MODE (reg
), regno
);
193 /* Make sure if MODE_ENTRY is defined the MODE_EXIT is defined
195 #if defined (MODE_ENTRY) != defined (MODE_EXIT)
196 #error "Both MODE_ENTRY and MODE_EXIT must be defined"
199 #if defined (MODE_ENTRY) && defined (MODE_EXIT)
200 /* Split the fallthrough edge to the exit block, so that we can note
201 that there NORMAL_MODE is required. Return the new block if it's
202 inserted before the exit block. Otherwise return null. */
205 create_pre_exit (int n_entities
, int *entity_map
, const int *num_modes
)
209 basic_block pre_exit
;
211 /* The only non-call predecessor at this stage is a block with a
212 fallthrough edge; there can be at most one, but there could be
213 none at all, e.g. when exit is called. */
215 FOR_EACH_EDGE (eg
, ei
, EXIT_BLOCK_PTR
->preds
)
216 if (eg
->flags
& EDGE_FALLTHRU
)
218 basic_block src_bb
= eg
->src
;
219 rtx last_insn
, ret_reg
;
221 gcc_assert (!pre_exit
);
222 /* If this function returns a value at the end, we have to
223 insert the final mode switch before the return value copy
224 to its hard register. */
225 if (EDGE_COUNT (EXIT_BLOCK_PTR
->preds
) == 1
226 && NONJUMP_INSN_P ((last_insn
= BB_END (src_bb
)))
227 && GET_CODE (PATTERN (last_insn
)) == USE
228 && GET_CODE ((ret_reg
= XEXP (PATTERN (last_insn
), 0))) == REG
)
230 int ret_start
= REGNO (ret_reg
);
231 int nregs
= hard_regno_nregs
[ret_start
][GET_MODE (ret_reg
)];
232 int ret_end
= ret_start
+ nregs
;
234 int maybe_builtin_apply
= 0;
235 int forced_late_switch
= 0;
236 rtx before_return_copy
;
240 rtx return_copy
= PREV_INSN (last_insn
);
241 rtx return_copy_pat
, copy_reg
;
242 int copy_start
, copy_num
;
245 if (INSN_P (return_copy
))
247 /* When using SJLJ exceptions, the call to the
248 unregister function is inserted between the
249 clobber of the return value and the copy.
250 We do not want to split the block before this
251 or any other call; if we have not found the
252 copy yet, the copy must have been deleted. */
253 if (CALL_P (return_copy
))
258 return_copy_pat
= PATTERN (return_copy
);
259 switch (GET_CODE (return_copy_pat
))
262 /* Skip __builtin_apply pattern. */
263 if (GET_CODE (XEXP (return_copy_pat
, 0)) == REG
264 && (targetm
.calls
.function_value_regno_p
265 (REGNO (XEXP (return_copy_pat
, 0)))))
267 maybe_builtin_apply
= 1;
268 last_insn
= return_copy
;
274 /* Skip barrier insns. */
275 if (!MEM_VOLATILE_P (return_copy_pat
))
281 case UNSPEC_VOLATILE
:
282 last_insn
= return_copy
;
289 /* If the return register is not (in its entirety)
290 likely spilled, the return copy might be
291 partially or completely optimized away. */
292 return_copy_pat
= single_set (return_copy
);
293 if (!return_copy_pat
)
295 return_copy_pat
= PATTERN (return_copy
);
296 if (GET_CODE (return_copy_pat
) != CLOBBER
)
300 /* This might be (clobber (reg [<result>]))
301 when not optimizing. Then check if
302 the previous insn is the clobber for
303 the return register. */
304 copy_reg
= SET_DEST (return_copy_pat
);
305 if (GET_CODE (copy_reg
) == REG
306 && !HARD_REGISTER_NUM_P (REGNO (copy_reg
)))
308 if (INSN_P (PREV_INSN (return_copy
)))
310 return_copy
= PREV_INSN (return_copy
);
311 return_copy_pat
= PATTERN (return_copy
);
312 if (GET_CODE (return_copy_pat
) != CLOBBER
)
318 copy_reg
= SET_DEST (return_copy_pat
);
319 if (GET_CODE (copy_reg
) == REG
)
320 copy_start
= REGNO (copy_reg
);
321 else if (GET_CODE (copy_reg
) == SUBREG
322 && GET_CODE (SUBREG_REG (copy_reg
)) == REG
)
323 copy_start
= REGNO (SUBREG_REG (copy_reg
));
326 if (copy_start
>= FIRST_PSEUDO_REGISTER
)
329 = hard_regno_nregs
[copy_start
][GET_MODE (copy_reg
)];
331 /* If the return register is not likely spilled, - as is
332 the case for floating point on SH4 - then it might
333 be set by an arithmetic operation that needs a
334 different mode than the exit block. */
335 for (j
= n_entities
- 1; j
>= 0; j
--)
337 int e
= entity_map
[j
];
338 int mode
= MODE_NEEDED (e
, return_copy
);
340 if (mode
!= num_modes
[e
] && mode
!= MODE_EXIT (e
))
345 /* For the SH4, floating point loads depend on fpscr,
346 thus we might need to put the final mode switch
347 after the return value copy. That is still OK,
348 because a floating point return value does not
349 conflict with address reloads. */
350 if (copy_start
>= ret_start
351 && copy_start
+ copy_num
<= ret_end
352 && OBJECT_P (SET_SRC (return_copy_pat
)))
353 forced_late_switch
= 1;
357 if (copy_start
>= ret_start
358 && copy_start
+ copy_num
<= ret_end
)
360 else if (!maybe_builtin_apply
361 || !targetm
.calls
.function_value_regno_p
364 last_insn
= return_copy
;
366 /* ??? Exception handling can lead to the return value
367 copy being already separated from the return value use,
369 Similarly, conditionally returning without a value,
370 and conditionally using builtin_return can lead to an
372 if (return_copy
== BB_HEAD (src_bb
))
377 last_insn
= return_copy
;
381 /* If we didn't see a full return value copy, verify that there
382 is a plausible reason for this. If some, but not all of the
383 return register is likely spilled, we can expect that there
384 is a copy for the likely spilled part. */
386 || forced_late_switch
388 || !(targetm
.class_likely_spilled_p
389 (REGNO_REG_CLASS (ret_start
)))
391 != hard_regno_nregs
[ret_start
][GET_MODE (ret_reg
)])
392 /* For multi-hard-register floating point
393 values, sometimes the likely-spilled part
394 is ordinarily copied first, then the other
395 part is set with an arithmetic operation.
396 This doesn't actually cause reload
397 failures, so let it pass. */
398 || (GET_MODE_CLASS (GET_MODE (ret_reg
)) != MODE_INT
401 if (INSN_P (last_insn
))
404 = emit_note_before (NOTE_INSN_DELETED
, last_insn
);
405 /* Instructions preceding LAST_INSN in the same block might
406 require a different mode than MODE_EXIT, so if we might
407 have such instructions, keep them in a separate block
409 if (last_insn
!= BB_HEAD (src_bb
))
410 src_bb
= split_block (src_bb
,
411 PREV_INSN (before_return_copy
))->dest
;
414 before_return_copy
= last_insn
;
415 pre_exit
= split_block (src_bb
, before_return_copy
)->src
;
419 pre_exit
= split_edge (eg
);
427 /* Find all insns that need a particular mode setting, and insert the
428 necessary mode switches. Return true if we did work. */
431 optimize_mode_switching (void)
438 struct edge_list
*edge_list
;
439 static const int num_modes
[] = NUM_MODES_FOR_MODE_SWITCHING
;
440 #define N_ENTITIES ARRAY_SIZE (num_modes)
441 int entity_map
[N_ENTITIES
];
442 struct bb_info
*bb_info
[N_ENTITIES
];
445 int max_num_modes
= 0;
446 bool emitted ATTRIBUTE_UNUSED
= false;
447 basic_block post_entry ATTRIBUTE_UNUSED
, pre_exit ATTRIBUTE_UNUSED
;
449 for (e
= N_ENTITIES
- 1, n_entities
= 0; e
>= 0; e
--)
450 if (OPTIMIZE_MODE_SWITCHING (e
))
452 int entry_exit_extra
= 0;
454 /* Create the list of segments within each basic block.
455 If NORMAL_MODE is defined, allow for two extra
456 blocks split from the entry and exit block. */
457 #if defined (MODE_ENTRY) && defined (MODE_EXIT)
458 entry_exit_extra
= 3;
461 = XCNEWVEC (struct bb_info
, last_basic_block
+ entry_exit_extra
);
462 entity_map
[n_entities
++] = e
;
463 if (num_modes
[e
] > max_num_modes
)
464 max_num_modes
= num_modes
[e
];
470 #if defined (MODE_ENTRY) && defined (MODE_EXIT)
471 /* Split the edge from the entry block, so that we can note that
472 there NORMAL_MODE is supplied. */
473 post_entry
= split_edge (single_succ_edge (ENTRY_BLOCK_PTR
));
474 pre_exit
= create_pre_exit (n_entities
, entity_map
, num_modes
);
479 /* Create the bitmap vectors. */
481 antic
= sbitmap_vector_alloc (last_basic_block
, n_entities
);
482 transp
= sbitmap_vector_alloc (last_basic_block
, n_entities
);
483 comp
= sbitmap_vector_alloc (last_basic_block
, n_entities
);
485 sbitmap_vector_ones (transp
, last_basic_block
);
487 for (j
= n_entities
- 1; j
>= 0; j
--)
489 int e
= entity_map
[j
];
490 int no_mode
= num_modes
[e
];
491 struct bb_info
*info
= bb_info
[j
];
493 /* Determine what the first use (if any) need for a mode of entity E is.
494 This will be the mode that is anticipatable for this block.
495 Also compute the initial transparency settings. */
499 int last_mode
= no_mode
;
500 bool any_set_required
= false;
501 HARD_REG_SET live_now
;
503 REG_SET_TO_HARD_REG_SET (live_now
, df_get_live_in (bb
));
505 /* Pretend the mode is clobbered across abnormal edges. */
509 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
510 if (e
->flags
& EDGE_COMPLEX
)
514 ptr
= new_seginfo (no_mode
, BB_HEAD (bb
), bb
->index
, live_now
);
515 add_seginfo (info
+ bb
->index
, ptr
);
516 RESET_BIT (transp
[bb
->index
], j
);
520 FOR_BB_INSNS (bb
, insn
)
524 int mode
= MODE_NEEDED (e
, insn
);
527 if (mode
!= no_mode
&& mode
!= last_mode
)
529 any_set_required
= true;
531 ptr
= new_seginfo (mode
, insn
, bb
->index
, live_now
);
532 add_seginfo (info
+ bb
->index
, ptr
);
533 RESET_BIT (transp
[bb
->index
], j
);
536 last_mode
= MODE_AFTER (e
, last_mode
, insn
);
538 /* Update LIVE_NOW. */
539 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
540 if (REG_NOTE_KIND (link
) == REG_DEAD
)
541 reg_dies (XEXP (link
, 0), &live_now
);
543 note_stores (PATTERN (insn
), reg_becomes_live
, &live_now
);
544 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
545 if (REG_NOTE_KIND (link
) == REG_UNUSED
)
546 reg_dies (XEXP (link
, 0), &live_now
);
550 info
[bb
->index
].computing
= last_mode
;
551 /* Check for blocks without ANY mode requirements.
552 N.B. because of MODE_AFTER, last_mode might still be different
554 if (!any_set_required
)
556 ptr
= new_seginfo (no_mode
, BB_END (bb
), bb
->index
, live_now
);
557 add_seginfo (info
+ bb
->index
, ptr
);
560 #if defined (MODE_ENTRY) && defined (MODE_EXIT)
562 int mode
= MODE_ENTRY (e
);
568 /* By always making this nontransparent, we save
569 an extra check in make_preds_opaque. We also
570 need this to avoid confusing pre_edge_lcm when
571 antic is cleared but transp and comp are set. */
572 RESET_BIT (transp
[bb
->index
], j
);
574 /* Insert a fake computing definition of MODE into entry
575 blocks which compute no mode. This represents the mode on
577 info
[bb
->index
].computing
= mode
;
580 info
[pre_exit
->index
].seginfo
->mode
= MODE_EXIT (e
);
583 #endif /* NORMAL_MODE */
586 kill
= sbitmap_vector_alloc (last_basic_block
, n_entities
);
587 for (i
= 0; i
< max_num_modes
; i
++)
589 int current_mode
[N_ENTITIES
];
593 /* Set the anticipatable and computing arrays. */
594 sbitmap_vector_zero (antic
, last_basic_block
);
595 sbitmap_vector_zero (comp
, last_basic_block
);
596 for (j
= n_entities
- 1; j
>= 0; j
--)
598 int m
= current_mode
[j
] = MODE_PRIORITY_TO_MODE (entity_map
[j
], i
);
599 struct bb_info
*info
= bb_info
[j
];
603 if (info
[bb
->index
].seginfo
->mode
== m
)
604 SET_BIT (antic
[bb
->index
], j
);
606 if (info
[bb
->index
].computing
== m
)
607 SET_BIT (comp
[bb
->index
], j
);
611 /* Calculate the optimal locations for the
612 placement mode switches to modes with priority I. */
615 sbitmap_not (kill
[bb
->index
], transp
[bb
->index
]);
616 edge_list
= pre_edge_lcm (n_entities
, transp
, comp
, antic
,
617 kill
, &insert
, &del
);
619 for (j
= n_entities
- 1; j
>= 0; j
--)
621 /* Insert all mode sets that have been inserted by lcm. */
622 int no_mode
= num_modes
[entity_map
[j
]];
624 /* Wherever we have moved a mode setting upwards in the flow graph,
625 the blocks between the new setting site and the now redundant
626 computation ceases to be transparent for any lower-priority
627 mode of the same entity. First set the aux field of each
628 insertion site edge non-transparent, then propagate the new
629 non-transparency from the redundant computation upwards till
630 we hit an insertion site or an already non-transparent block. */
631 for (e
= NUM_EDGES (edge_list
) - 1; e
>= 0; e
--)
633 edge eg
= INDEX_EDGE (edge_list
, e
);
636 HARD_REG_SET live_at_edge
;
641 if (! TEST_BIT (insert
[e
], j
))
646 mode
= current_mode
[j
];
649 REG_SET_TO_HARD_REG_SET (live_at_edge
, df_get_live_out (src_bb
));
652 EMIT_MODE_SET (entity_map
[j
], mode
, live_at_edge
);
653 mode_set
= get_insns ();
656 /* Do not bother to insert empty sequence. */
657 if (mode_set
== NULL_RTX
)
660 /* We should not get an abnormal edge here. */
661 gcc_assert (! (eg
->flags
& EDGE_ABNORMAL
));
664 insert_insn_on_edge (mode_set
, eg
);
667 FOR_EACH_BB_REVERSE (bb
)
668 if (TEST_BIT (del
[bb
->index
], j
))
670 make_preds_opaque (bb
, j
);
671 /* Cancel the 'deleted' mode set. */
672 bb_info
[j
][bb
->index
].seginfo
->mode
= no_mode
;
676 sbitmap_vector_free (del
);
677 sbitmap_vector_free (insert
);
678 clear_aux_for_edges ();
679 free_edge_list (edge_list
);
682 /* Now output the remaining mode sets in all the segments. */
683 for (j
= n_entities
- 1; j
>= 0; j
--)
685 int no_mode
= num_modes
[entity_map
[j
]];
687 FOR_EACH_BB_REVERSE (bb
)
689 struct seginfo
*ptr
, *next
;
690 for (ptr
= bb_info
[j
][bb
->index
].seginfo
; ptr
; ptr
= next
)
693 if (ptr
->mode
!= no_mode
)
698 EMIT_MODE_SET (entity_map
[j
], ptr
->mode
, ptr
->regs_live
);
699 mode_set
= get_insns ();
702 /* Insert MODE_SET only if it is nonempty. */
703 if (mode_set
!= NULL_RTX
)
706 if (NOTE_INSN_BASIC_BLOCK_P (ptr
->insn_ptr
))
707 emit_insn_after (mode_set
, ptr
->insn_ptr
);
709 emit_insn_before (mode_set
, ptr
->insn_ptr
);
720 /* Finished. Free up all the things we've allocated. */
721 sbitmap_vector_free (kill
);
722 sbitmap_vector_free (antic
);
723 sbitmap_vector_free (transp
);
724 sbitmap_vector_free (comp
);
727 commit_edge_insertions ();
729 #if defined (MODE_ENTRY) && defined (MODE_EXIT)
730 cleanup_cfg (CLEANUP_NO_INSN_DEL
);
732 if (!need_commit
&& !emitted
)
739 #endif /* OPTIMIZE_MODE_SWITCHING */
742 gate_mode_switching (void)
744 #ifdef OPTIMIZE_MODE_SWITCHING
752 rest_of_handle_mode_switching (void)
754 #ifdef OPTIMIZE_MODE_SWITCHING
755 optimize_mode_switching ();
756 #endif /* OPTIMIZE_MODE_SWITCHING */
761 struct rtl_opt_pass pass_mode_switching
=
765 "mode_sw", /* name */
766 gate_mode_switching
, /* gate */
767 rest_of_handle_mode_switching
, /* execute */
770 0, /* static_pass_number */
771 TV_MODE_SWITCH
, /* tv_id */
772 0, /* properties_required */
773 0, /* properties_provided */
774 0, /* properties_destroyed */
775 0, /* todo_flags_start */
776 TODO_df_finish
| TODO_verify_rtl_sharing
|
777 0 /* todo_flags_finish */