PR target/41993
[official-gcc.git] / gcc / mode-switching.c
blob386f8d1abe9d817d28ccc57d5ba50a7892bf2e3f
1 /* CPU mode switching
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2007, 2008,
3 2009, 2010, 2012 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "flags.h"
30 #include "insn-config.h"
31 #include "recog.h"
32 #include "basic-block.h"
33 #include "tm_p.h"
34 #include "function.h"
35 #include "tree-pass.h"
36 #include "df.h"
37 #include "emit-rtl.h"
39 /* We want target macros for the mode switching code to be able to refer
40 to instruction attribute values. */
41 #include "insn-attr.h"
43 #ifdef OPTIMIZE_MODE_SWITCHING
45 /* The algorithm for setting the modes consists of scanning the insn list
46 and finding all the insns which require a specific mode. Each insn gets
47 a unique struct seginfo element. These structures are inserted into a list
48 for each basic block. For each entity, there is an array of bb_info over
49 the flow graph basic blocks (local var 'bb_info'), and contains a list
50 of all insns within that basic block, in the order they are encountered.
52 For each entity, any basic block WITHOUT any insns requiring a specific
53 mode are given a single entry, without a mode. (Each basic block
54 in the flow graph must have at least one entry in the segment table.)
56 The LCM algorithm is then run over the flow graph to determine where to
57 place the sets to the highest-priority value in respect of first the first
58 insn in any one block. Any adjustments required to the transparency
59 vectors are made, then the next iteration starts for the next-lower
60 priority mode, till for each entity all modes are exhausted.
62 More details are located in the code for optimize_mode_switching(). */
64 /* This structure contains the information for each insn which requires
65 either single or double mode to be set.
66 MODE is the mode this insn must be executed in.
67 INSN_PTR is the insn to be executed (may be the note that marks the
68 beginning of a basic block).
69 BBNUM is the flow graph basic block this insn occurs in.
70 NEXT is the next insn in the same basic block. */
71 struct seginfo
73 int mode;
74 rtx insn_ptr;
75 int bbnum;
76 struct seginfo *next;
77 HARD_REG_SET regs_live;
80 struct bb_info
82 struct seginfo *seginfo;
83 int computing;
86 /* These bitmaps are used for the LCM algorithm. */
88 static sbitmap *antic;
89 static sbitmap *transp;
90 static sbitmap *comp;
92 static struct seginfo * new_seginfo (int, rtx, int, HARD_REG_SET);
93 static void add_seginfo (struct bb_info *, struct seginfo *);
94 static void reg_dies (rtx, HARD_REG_SET *);
95 static void reg_becomes_live (rtx, const_rtx, void *);
96 static void make_preds_opaque (basic_block, int);
99 /* This function will allocate a new BBINFO structure, initialized
100 with the MODE, INSN, and basic block BB parameters. */
102 static struct seginfo *
103 new_seginfo (int mode, rtx insn, int bb, HARD_REG_SET regs_live)
105 struct seginfo *ptr;
106 ptr = XNEW (struct seginfo);
107 ptr->mode = mode;
108 ptr->insn_ptr = insn;
109 ptr->bbnum = bb;
110 ptr->next = NULL;
111 COPY_HARD_REG_SET (ptr->regs_live, regs_live);
112 return ptr;
115 /* Add a seginfo element to the end of a list.
116 HEAD is a pointer to the list beginning.
117 INFO is the structure to be linked in. */
119 static void
120 add_seginfo (struct bb_info *head, struct seginfo *info)
122 struct seginfo *ptr;
124 if (head->seginfo == NULL)
125 head->seginfo = info;
126 else
128 ptr = head->seginfo;
129 while (ptr->next != NULL)
130 ptr = ptr->next;
131 ptr->next = info;
135 /* Make all predecessors of basic block B opaque, recursively, till we hit
136 some that are already non-transparent, or an edge where aux is set; that
137 denotes that a mode set is to be done on that edge.
138 J is the bit number in the bitmaps that corresponds to the entity that
139 we are currently handling mode-switching for. */
141 static void
142 make_preds_opaque (basic_block b, int j)
144 edge e;
145 edge_iterator ei;
147 FOR_EACH_EDGE (e, ei, b->preds)
149 basic_block pb = e->src;
151 if (e->aux || ! bitmap_bit_p (transp[pb->index], j))
152 continue;
154 bitmap_clear_bit (transp[pb->index], j);
155 make_preds_opaque (pb, j);
159 /* Record in LIVE that register REG died. */
161 static void
162 reg_dies (rtx reg, HARD_REG_SET *live)
164 int regno;
166 if (!REG_P (reg))
167 return;
169 regno = REGNO (reg);
170 if (regno < FIRST_PSEUDO_REGISTER)
171 remove_from_hard_reg_set (live, GET_MODE (reg), regno);
174 /* Record in LIVE that register REG became live.
175 This is called via note_stores. */
177 static void
178 reg_becomes_live (rtx reg, const_rtx setter ATTRIBUTE_UNUSED, void *live)
180 int regno;
182 if (GET_CODE (reg) == SUBREG)
183 reg = SUBREG_REG (reg);
185 if (!REG_P (reg))
186 return;
188 regno = REGNO (reg);
189 if (regno < FIRST_PSEUDO_REGISTER)
190 add_to_hard_reg_set ((HARD_REG_SET *) live, GET_MODE (reg), regno);
193 /* Make sure if MODE_ENTRY is defined the MODE_EXIT is defined
194 and vice versa. */
195 #if defined (MODE_ENTRY) != defined (MODE_EXIT)
196 #error "Both MODE_ENTRY and MODE_EXIT must be defined"
197 #endif
199 #if defined (MODE_ENTRY) && defined (MODE_EXIT)
200 /* Split the fallthrough edge to the exit block, so that we can note
201 that there NORMAL_MODE is required. Return the new block if it's
202 inserted before the exit block. Otherwise return null. */
204 static basic_block
205 create_pre_exit (int n_entities, int *entity_map, const int *num_modes)
207 edge eg;
208 edge_iterator ei;
209 basic_block pre_exit;
211 /* The only non-call predecessor at this stage is a block with a
212 fallthrough edge; there can be at most one, but there could be
213 none at all, e.g. when exit is called. */
214 pre_exit = 0;
215 FOR_EACH_EDGE (eg, ei, EXIT_BLOCK_PTR->preds)
216 if (eg->flags & EDGE_FALLTHRU)
218 basic_block src_bb = eg->src;
219 rtx last_insn, ret_reg;
221 gcc_assert (!pre_exit);
222 /* If this function returns a value at the end, we have to
223 insert the final mode switch before the return value copy
224 to its hard register. */
225 if (EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 1
226 && NONJUMP_INSN_P ((last_insn = BB_END (src_bb)))
227 && GET_CODE (PATTERN (last_insn)) == USE
228 && GET_CODE ((ret_reg = XEXP (PATTERN (last_insn), 0))) == REG)
230 int ret_start = REGNO (ret_reg);
231 int nregs = hard_regno_nregs[ret_start][GET_MODE (ret_reg)];
232 int ret_end = ret_start + nregs;
233 int short_block = 0;
234 int maybe_builtin_apply = 0;
235 int forced_late_switch = 0;
236 rtx before_return_copy;
240 rtx return_copy = PREV_INSN (last_insn);
241 rtx return_copy_pat, copy_reg;
242 int copy_start, copy_num;
243 int j;
245 if (INSN_P (return_copy))
247 /* When using SJLJ exceptions, the call to the
248 unregister function is inserted between the
249 clobber of the return value and the copy.
250 We do not want to split the block before this
251 or any other call; if we have not found the
252 copy yet, the copy must have been deleted. */
253 if (CALL_P (return_copy))
255 short_block = 1;
256 break;
258 return_copy_pat = PATTERN (return_copy);
259 switch (GET_CODE (return_copy_pat))
261 case USE:
262 /* Skip __builtin_apply pattern. */
263 if (GET_CODE (XEXP (return_copy_pat, 0)) == REG
264 && (targetm.calls.function_value_regno_p
265 (REGNO (XEXP (return_copy_pat, 0)))))
267 maybe_builtin_apply = 1;
268 last_insn = return_copy;
269 continue;
271 break;
273 case ASM_OPERANDS:
274 /* Skip barrier insns. */
275 if (!MEM_VOLATILE_P (return_copy_pat))
276 break;
278 /* Fall through. */
280 case ASM_INPUT:
281 case UNSPEC_VOLATILE:
282 last_insn = return_copy;
283 continue;
285 default:
286 break;
289 /* If the return register is not (in its entirety)
290 likely spilled, the return copy might be
291 partially or completely optimized away. */
292 return_copy_pat = single_set (return_copy);
293 if (!return_copy_pat)
295 return_copy_pat = PATTERN (return_copy);
296 if (GET_CODE (return_copy_pat) != CLOBBER)
297 break;
298 else if (!optimize)
300 /* This might be (clobber (reg [<result>]))
301 when not optimizing. Then check if
302 the previous insn is the clobber for
303 the return register. */
304 copy_reg = SET_DEST (return_copy_pat);
305 if (GET_CODE (copy_reg) == REG
306 && !HARD_REGISTER_NUM_P (REGNO (copy_reg)))
308 if (INSN_P (PREV_INSN (return_copy)))
310 return_copy = PREV_INSN (return_copy);
311 return_copy_pat = PATTERN (return_copy);
312 if (GET_CODE (return_copy_pat) != CLOBBER)
313 break;
318 copy_reg = SET_DEST (return_copy_pat);
319 if (GET_CODE (copy_reg) == REG)
320 copy_start = REGNO (copy_reg);
321 else if (GET_CODE (copy_reg) == SUBREG
322 && GET_CODE (SUBREG_REG (copy_reg)) == REG)
323 copy_start = REGNO (SUBREG_REG (copy_reg));
324 else
325 break;
326 if (copy_start >= FIRST_PSEUDO_REGISTER)
328 last_insn = return_copy;
329 continue;
331 copy_num
332 = hard_regno_nregs[copy_start][GET_MODE (copy_reg)];
334 /* If the return register is not likely spilled, - as is
335 the case for floating point on SH4 - then it might
336 be set by an arithmetic operation that needs a
337 different mode than the exit block. */
338 for (j = n_entities - 1; j >= 0; j--)
340 int e = entity_map[j];
341 int mode = MODE_NEEDED (e, return_copy);
343 if (mode != num_modes[e] && mode != MODE_EXIT (e))
344 break;
346 if (j >= 0)
348 /* __builtin_return emits a sequence of loads to all
349 return registers. One of them might require
350 another mode than MODE_EXIT, even if it is
351 unrelated to the return value, so we want to put
352 the final mode switch after it. */
353 if (maybe_builtin_apply
354 && targetm.calls.function_value_regno_p
355 (copy_start))
356 forced_late_switch = 1;
358 /* For the SH4, floating point loads depend on fpscr,
359 thus we might need to put the final mode switch
360 after the return value copy. That is still OK,
361 because a floating point return value does not
362 conflict with address reloads. */
363 if (copy_start >= ret_start
364 && copy_start + copy_num <= ret_end
365 && OBJECT_P (SET_SRC (return_copy_pat)))
366 forced_late_switch = 1;
367 break;
370 if (copy_start >= ret_start
371 && copy_start + copy_num <= ret_end)
372 nregs -= copy_num;
373 else if (!maybe_builtin_apply
374 || !targetm.calls.function_value_regno_p
375 (copy_start))
376 break;
377 last_insn = return_copy;
379 /* ??? Exception handling can lead to the return value
380 copy being already separated from the return value use,
381 as in unwind-dw2.c .
382 Similarly, conditionally returning without a value,
383 and conditionally using builtin_return can lead to an
384 isolated use. */
385 if (return_copy == BB_HEAD (src_bb))
387 short_block = 1;
388 break;
390 last_insn = return_copy;
392 while (nregs);
394 /* If we didn't see a full return value copy, verify that there
395 is a plausible reason for this. If some, but not all of the
396 return register is likely spilled, we can expect that there
397 is a copy for the likely spilled part. */
398 gcc_assert (!nregs
399 || forced_late_switch
400 || short_block
401 || !(targetm.class_likely_spilled_p
402 (REGNO_REG_CLASS (ret_start)))
403 || (nregs
404 != hard_regno_nregs[ret_start][GET_MODE (ret_reg)])
405 /* For multi-hard-register floating point
406 values, sometimes the likely-spilled part
407 is ordinarily copied first, then the other
408 part is set with an arithmetic operation.
409 This doesn't actually cause reload
410 failures, so let it pass. */
411 || (GET_MODE_CLASS (GET_MODE (ret_reg)) != MODE_INT
412 && nregs != 1));
414 if (INSN_P (last_insn))
416 before_return_copy
417 = emit_note_before (NOTE_INSN_DELETED, last_insn);
418 /* Instructions preceding LAST_INSN in the same block might
419 require a different mode than MODE_EXIT, so if we might
420 have such instructions, keep them in a separate block
421 from pre_exit. */
422 if (last_insn != BB_HEAD (src_bb))
423 src_bb = split_block (src_bb,
424 PREV_INSN (before_return_copy))->dest;
426 else
427 before_return_copy = last_insn;
428 pre_exit = split_block (src_bb, before_return_copy)->src;
430 else
432 pre_exit = split_edge (eg);
436 return pre_exit;
438 #endif
440 /* Find all insns that need a particular mode setting, and insert the
441 necessary mode switches. Return true if we did work. */
443 static int
444 optimize_mode_switching (void)
446 rtx insn;
447 int e;
448 basic_block bb;
449 int need_commit = 0;
450 sbitmap *kill;
451 struct edge_list *edge_list;
452 static const int num_modes[] = NUM_MODES_FOR_MODE_SWITCHING;
453 #define N_ENTITIES ARRAY_SIZE (num_modes)
454 int entity_map[N_ENTITIES];
455 struct bb_info *bb_info[N_ENTITIES];
456 int i, j;
457 int n_entities;
458 int max_num_modes = 0;
459 bool emitted ATTRIBUTE_UNUSED = false;
460 basic_block post_entry ATTRIBUTE_UNUSED, pre_exit ATTRIBUTE_UNUSED;
462 for (e = N_ENTITIES - 1, n_entities = 0; e >= 0; e--)
463 if (OPTIMIZE_MODE_SWITCHING (e))
465 int entry_exit_extra = 0;
467 /* Create the list of segments within each basic block.
468 If NORMAL_MODE is defined, allow for two extra
469 blocks split from the entry and exit block. */
470 #if defined (MODE_ENTRY) && defined (MODE_EXIT)
471 entry_exit_extra = 3;
472 #endif
473 bb_info[n_entities]
474 = XCNEWVEC (struct bb_info, last_basic_block + entry_exit_extra);
475 entity_map[n_entities++] = e;
476 if (num_modes[e] > max_num_modes)
477 max_num_modes = num_modes[e];
480 if (! n_entities)
481 return 0;
483 #if defined (MODE_ENTRY) && defined (MODE_EXIT)
484 /* Split the edge from the entry block, so that we can note that
485 there NORMAL_MODE is supplied. */
486 post_entry = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
487 pre_exit = create_pre_exit (n_entities, entity_map, num_modes);
488 #endif
490 df_analyze ();
492 /* Create the bitmap vectors. */
494 antic = sbitmap_vector_alloc (last_basic_block, n_entities);
495 transp = sbitmap_vector_alloc (last_basic_block, n_entities);
496 comp = sbitmap_vector_alloc (last_basic_block, n_entities);
498 bitmap_vector_ones (transp, last_basic_block);
500 for (j = n_entities - 1; j >= 0; j--)
502 int e = entity_map[j];
503 int no_mode = num_modes[e];
504 struct bb_info *info = bb_info[j];
506 /* Determine what the first use (if any) need for a mode of entity E is.
507 This will be the mode that is anticipatable for this block.
508 Also compute the initial transparency settings. */
509 FOR_EACH_BB (bb)
511 struct seginfo *ptr;
512 int last_mode = no_mode;
513 bool any_set_required = false;
514 HARD_REG_SET live_now;
516 REG_SET_TO_HARD_REG_SET (live_now, df_get_live_in (bb));
518 /* Pretend the mode is clobbered across abnormal edges. */
520 edge_iterator ei;
521 edge e;
522 FOR_EACH_EDGE (e, ei, bb->preds)
523 if (e->flags & EDGE_COMPLEX)
524 break;
525 if (e)
527 ptr = new_seginfo (no_mode, BB_HEAD (bb), bb->index, live_now);
528 add_seginfo (info + bb->index, ptr);
529 bitmap_clear_bit (transp[bb->index], j);
533 FOR_BB_INSNS (bb, insn)
535 if (INSN_P (insn))
537 int mode = MODE_NEEDED (e, insn);
538 rtx link;
540 if (mode != no_mode && mode != last_mode)
542 any_set_required = true;
543 last_mode = mode;
544 ptr = new_seginfo (mode, insn, bb->index, live_now);
545 add_seginfo (info + bb->index, ptr);
546 bitmap_clear_bit (transp[bb->index], j);
548 #ifdef MODE_AFTER
549 last_mode = MODE_AFTER (e, last_mode, insn);
550 #endif
551 /* Update LIVE_NOW. */
552 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
553 if (REG_NOTE_KIND (link) == REG_DEAD)
554 reg_dies (XEXP (link, 0), &live_now);
556 note_stores (PATTERN (insn), reg_becomes_live, &live_now);
557 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
558 if (REG_NOTE_KIND (link) == REG_UNUSED)
559 reg_dies (XEXP (link, 0), &live_now);
563 info[bb->index].computing = last_mode;
564 /* Check for blocks without ANY mode requirements.
565 N.B. because of MODE_AFTER, last_mode might still be different
566 from no_mode. */
567 if (!any_set_required)
569 ptr = new_seginfo (no_mode, BB_END (bb), bb->index, live_now);
570 add_seginfo (info + bb->index, ptr);
573 #if defined (MODE_ENTRY) && defined (MODE_EXIT)
575 int mode = MODE_ENTRY (e);
577 if (mode != no_mode)
579 bb = post_entry;
581 /* By always making this nontransparent, we save
582 an extra check in make_preds_opaque. We also
583 need this to avoid confusing pre_edge_lcm when
584 antic is cleared but transp and comp are set. */
585 bitmap_clear_bit (transp[bb->index], j);
587 /* Insert a fake computing definition of MODE into entry
588 blocks which compute no mode. This represents the mode on
589 entry. */
590 info[bb->index].computing = mode;
592 if (pre_exit)
593 info[pre_exit->index].seginfo->mode = MODE_EXIT (e);
596 #endif /* NORMAL_MODE */
599 kill = sbitmap_vector_alloc (last_basic_block, n_entities);
600 for (i = 0; i < max_num_modes; i++)
602 int current_mode[N_ENTITIES];
603 sbitmap *del;
604 sbitmap *insert;
606 /* Set the anticipatable and computing arrays. */
607 bitmap_vector_clear (antic, last_basic_block);
608 bitmap_vector_clear (comp, last_basic_block);
609 for (j = n_entities - 1; j >= 0; j--)
611 int m = current_mode[j] = MODE_PRIORITY_TO_MODE (entity_map[j], i);
612 struct bb_info *info = bb_info[j];
614 FOR_EACH_BB (bb)
616 if (info[bb->index].seginfo->mode == m)
617 bitmap_set_bit (antic[bb->index], j);
619 if (info[bb->index].computing == m)
620 bitmap_set_bit (comp[bb->index], j);
624 /* Calculate the optimal locations for the
625 placement mode switches to modes with priority I. */
627 FOR_EACH_BB (bb)
628 bitmap_not (kill[bb->index], transp[bb->index]);
629 edge_list = pre_edge_lcm (n_entities, transp, comp, antic,
630 kill, &insert, &del);
632 for (j = n_entities - 1; j >= 0; j--)
634 /* Insert all mode sets that have been inserted by lcm. */
635 int no_mode = num_modes[entity_map[j]];
637 /* Wherever we have moved a mode setting upwards in the flow graph,
638 the blocks between the new setting site and the now redundant
639 computation ceases to be transparent for any lower-priority
640 mode of the same entity. First set the aux field of each
641 insertion site edge non-transparent, then propagate the new
642 non-transparency from the redundant computation upwards till
643 we hit an insertion site or an already non-transparent block. */
644 for (e = NUM_EDGES (edge_list) - 1; e >= 0; e--)
646 edge eg = INDEX_EDGE (edge_list, e);
647 int mode;
648 basic_block src_bb;
649 HARD_REG_SET live_at_edge;
650 rtx mode_set;
652 eg->aux = 0;
654 if (! bitmap_bit_p (insert[e], j))
655 continue;
657 eg->aux = (void *)1;
659 mode = current_mode[j];
660 src_bb = eg->src;
662 REG_SET_TO_HARD_REG_SET (live_at_edge, df_get_live_out (src_bb));
664 start_sequence ();
665 EMIT_MODE_SET (entity_map[j], mode, live_at_edge);
666 mode_set = get_insns ();
667 end_sequence ();
669 /* Do not bother to insert empty sequence. */
670 if (mode_set == NULL_RTX)
671 continue;
673 /* We should not get an abnormal edge here. */
674 gcc_assert (! (eg->flags & EDGE_ABNORMAL));
676 need_commit = 1;
677 insert_insn_on_edge (mode_set, eg);
680 FOR_EACH_BB_REVERSE (bb)
681 if (bitmap_bit_p (del[bb->index], j))
683 make_preds_opaque (bb, j);
684 /* Cancel the 'deleted' mode set. */
685 bb_info[j][bb->index].seginfo->mode = no_mode;
689 sbitmap_vector_free (del);
690 sbitmap_vector_free (insert);
691 clear_aux_for_edges ();
692 free_edge_list (edge_list);
695 /* Now output the remaining mode sets in all the segments. */
696 for (j = n_entities - 1; j >= 0; j--)
698 int no_mode = num_modes[entity_map[j]];
700 FOR_EACH_BB_REVERSE (bb)
702 struct seginfo *ptr, *next;
703 for (ptr = bb_info[j][bb->index].seginfo; ptr; ptr = next)
705 next = ptr->next;
706 if (ptr->mode != no_mode)
708 rtx mode_set;
710 start_sequence ();
711 EMIT_MODE_SET (entity_map[j], ptr->mode, ptr->regs_live);
712 mode_set = get_insns ();
713 end_sequence ();
715 /* Insert MODE_SET only if it is nonempty. */
716 if (mode_set != NULL_RTX)
718 emitted = true;
719 if (NOTE_INSN_BASIC_BLOCK_P (ptr->insn_ptr))
720 emit_insn_after (mode_set, ptr->insn_ptr);
721 else
722 emit_insn_before (mode_set, ptr->insn_ptr);
726 free (ptr);
730 free (bb_info[j]);
733 /* Finished. Free up all the things we've allocated. */
734 sbitmap_vector_free (kill);
735 sbitmap_vector_free (antic);
736 sbitmap_vector_free (transp);
737 sbitmap_vector_free (comp);
739 if (need_commit)
740 commit_edge_insertions ();
742 #if defined (MODE_ENTRY) && defined (MODE_EXIT)
743 cleanup_cfg (CLEANUP_NO_INSN_DEL);
744 #else
745 if (!need_commit && !emitted)
746 return 0;
747 #endif
749 return 1;
752 #endif /* OPTIMIZE_MODE_SWITCHING */
754 static bool
755 gate_mode_switching (void)
757 #ifdef OPTIMIZE_MODE_SWITCHING
758 return true;
759 #else
760 return false;
761 #endif
764 static unsigned int
765 rest_of_handle_mode_switching (void)
767 #ifdef OPTIMIZE_MODE_SWITCHING
768 optimize_mode_switching ();
769 #endif /* OPTIMIZE_MODE_SWITCHING */
770 return 0;
774 struct rtl_opt_pass pass_mode_switching =
777 RTL_PASS,
778 "mode_sw", /* name */
779 OPTGROUP_NONE, /* optinfo_flags */
780 gate_mode_switching, /* gate */
781 rest_of_handle_mode_switching, /* execute */
782 NULL, /* sub */
783 NULL, /* next */
784 0, /* static_pass_number */
785 TV_MODE_SWITCH, /* tv_id */
786 0, /* properties_required */
787 0, /* properties_provided */
788 0, /* properties_destroyed */
789 0, /* todo_flags_start */
790 TODO_df_finish | TODO_verify_rtl_sharing |
791 0 /* todo_flags_finish */