1 /* Integrated Register Allocator. Changing code and generating moves.
2 Copyright (C) 2006, 2007
3 Free Software Foundation, Inc.
4 Contributed by Vladimir Makarov <vmakarov@redhat.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
26 #include "coretypes.h"
35 #include "hard-reg-set.h"
36 #include "basic-block.h"
41 #include "tree-pass.h"
50 static struct move
*create_move (pseudo_t
, pseudo_t
);
51 static void free_move (struct move
*);
52 static void free_move_list (struct move
*);
53 static int eq_move_lists_p (struct move
*, struct move
*);
54 static void change_regs (rtx
*);
55 static void add_to_edge_list (edge
, struct move
*, int);
56 static rtx
create_new_reg (rtx
);
57 static int subloop_tree_node_p (struct ira_loop_tree_node
*,
58 struct ira_loop_tree_node
*);
59 static void set_pseudo_reg (pseudo_t
, rtx
);
60 static int not_modified_p (pseudo_t
, pseudo_t
);
61 static void generate_edge_moves (edge
);
62 static void change_loop (struct ira_loop_tree_node
*);
63 static int eq_edge_move_lists_p (VEC(edge
,gc
) *);
64 static int can_move_through_p (rtx
*, struct move
*, int);
65 static void unify_moves (basic_block
, int);
66 static void traverse_moves (struct move
*);
67 static struct move
*modify_move_list (struct move
*);
68 static rtx
emit_move_list (struct move
*, int);
69 static void emit_moves (void);
71 /* The structure represents pseudo shuffling. */
74 /* The shuffled pseudos. */
76 /* The next move in the sequence. */
78 /* Use for finding dependencies. */
80 /* The size of the following array. */
82 /* Moves on which given move depends on. Dependency can be cyclic.
83 It means we need a temporary to generates the moves. */
87 /* Array of moves (indexed by BB index) which should be put at the
88 start/end of the corresponding blocks. */
89 static struct move
**at_bb_start
, **at_bb_end
;
91 /* Max regno before renaming some pseudo-registers. For example, the
92 same pseudo-register can be renamed in loop if its allocation is
93 different outside the loop. */
94 static int max_regno_before_changing
;
96 /* The function returns new move of pseudos TO and FROM. */
98 create_move (pseudo_t to
, pseudo_t from
)
102 move
= ira_allocate (sizeof (struct move
));
108 move
->visited_p
= FALSE
;
112 /* The function frees memory for MOVE and its dependencies. */
114 free_move (struct move
*move
)
116 if (move
->deps
!= NULL
)
117 ira_free (move
->deps
);
121 /* The function frees memory for list of the moves given by its
124 free_move_list (struct move
*head
)
128 for (; head
!= NULL
; head
= next
)
135 /* The function returns nonzero if the the move list LIST1 and LIST2
136 are equal (two moves are equal if they shuffles the same
139 eq_move_lists_p (struct move
*list1
, struct move
*list2
)
141 for (; list1
!= NULL
&& list2
!= NULL
;
142 list1
= list1
->next
, list2
= list2
->next
)
143 if (list1
->from
!= list2
->from
|| list1
->to
!= list2
->to
)
145 return list1
== list2
;
148 /* This recursive function changes pseudo-registers in *LOC if it is
151 change_regs (rtx
*loc
)
157 if (*loc
== NULL_RTX
)
159 code
= GET_CODE (*loc
);
162 regno
= REGNO (*loc
);
163 if (regno
< FIRST_PSEUDO_REGISTER
)
165 if (regno
>= max_regno_before_changing
)
166 /* It is a shared register which was changed already. */
168 /* ??? That is for reg_equal. */
169 if (ira_curr_loop_tree_node
->regno_pseudo_map
[regno
] != NULL
)
170 *loc
= PSEUDO_REG (ira_curr_loop_tree_node
->regno_pseudo_map
[regno
]);
174 fmt
= GET_RTX_FORMAT (code
);
175 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
178 change_regs (&XEXP (*loc
, i
));
179 else if (fmt
[i
] == 'E')
183 for (j
= XVECLEN (*loc
, i
) - 1; j
>= 0; j
--)
184 change_regs (&XVECEXP (*loc
, i
, j
));
189 /* The function attaches MOVE to the edge E. The move is attached to
190 the head of the list if HEAD_P is nonzero. */
192 add_to_edge_list (edge e
, struct move
*move
, int head_p
)
196 if (head_p
|| e
->aux
== NULL
)
203 for (last
= e
->aux
; last
->next
!= NULL
; last
= last
->next
)
210 /* The function creates and returns new pseudo-register with the same
211 attributes as ORIGINAL_REG. */
213 create_new_reg (rtx original_reg
)
217 new_reg
= gen_reg_rtx (GET_MODE (original_reg
));
218 ORIGINAL_REGNO (new_reg
) = ORIGINAL_REGNO (original_reg
);
219 REG_USERVAR_P (new_reg
) = REG_USERVAR_P (original_reg
);
220 REG_POINTER (new_reg
) = REG_POINTER (original_reg
);
221 REG_ATTRS (new_reg
) = REG_ATTRS (original_reg
);
223 fprintf (ira_dump_file
, " Creating newreg=%i from oldreg=%i\n",
224 REGNO (new_reg
), REGNO (original_reg
));
228 /* The function returns non-zero if loop given by SUBNODE inside the
229 loop given by NODE. */
231 subloop_tree_node_p (struct ira_loop_tree_node
*subnode
,
232 struct ira_loop_tree_node
*node
)
234 for (; subnode
!= NULL
; subnode
= subnode
->father
)
240 /* The function sets up field `reg' to REG for pseudos which has the
241 same regno as PSEUDO and which are inside the loop corresponding to
244 set_pseudo_reg (pseudo_t pseudo
, rtx reg
)
247 struct ira_loop_tree_node
*node
;
249 node
= PSEUDO_LOOP_TREE_NODE (pseudo
);
250 for (p
= regno_pseudo_map
[PSEUDO_REGNO (pseudo
)];
252 p
= PSEUDO_NEXT_REGNO_PSEUDO (p
))
253 if (subloop_tree_node_p (PSEUDO_LOOP_TREE_NODE (p
), node
))
254 PSEUDO_REG (p
) = reg
;
257 /* The following function returns nonzero if move insn of SRC_PSEUDO
258 to DEST_PSEUDO does not change value of the destination. */
260 not_modified_p (pseudo_t src_pseudo
, pseudo_t dest_pseudo
)
262 int regno
, orig_regno
;
264 struct ira_loop_tree_node
*node
;
266 orig_regno
= PSEUDO_REGNO (src_pseudo
);
267 regno
= REGNO (PSEUDO_REG (dest_pseudo
));
268 for (node
= PSEUDO_LOOP_TREE_NODE (src_pseudo
);
271 if ((p
= node
->regno_pseudo_map
[orig_regno
]) == NULL
)
273 else if (REGNO (PSEUDO_REG (p
)) == (unsigned) regno
)
275 else if (bitmap_bit_p (node
->modified_regnos
, orig_regno
))
280 /* The function generates and attaches moves to the edge E. It looks
281 at the final regnos of pseudos living on the edge with the same
282 original regno to find what moves should be generated. */
284 generate_edge_moves (edge e
)
286 struct ira_loop_tree_node
*src_loop_node
, *dest_loop_node
;
289 pseudo_t src_pseudo
, dest_pseudo
, *src_map
, *dest_map
;
292 src_loop_node
= IRA_BB_NODE (e
->src
)->father
;
293 dest_loop_node
= IRA_BB_NODE (e
->dest
)->father
;
295 if (src_loop_node
== dest_loop_node
)
297 src_map
= src_loop_node
->regno_pseudo_map
;
298 dest_map
= dest_loop_node
->regno_pseudo_map
;
299 EXECUTE_IF_SET_IN_REG_SET (DF_UPWARD_LIVE_IN (build_df
, e
->dest
),
300 FIRST_PSEUDO_REGISTER
, regno
, bi
)
301 if (bitmap_bit_p (DF_UPWARD_LIVE_OUT (build_df
, e
->src
), regno
))
303 src_pseudo
= src_map
[regno
];
304 dest_pseudo
= dest_map
[regno
];
305 if (REGNO (PSEUDO_REG (src_pseudo
))
306 == REGNO (PSEUDO_REG (dest_pseudo
)))
308 /* Actually it is not a optimization we need this code because
309 the memory (remember about equivalent memory) might be ROM
310 (or placed in read only section). */
311 if (PSEUDO_HARD_REGNO (dest_pseudo
) < 0
312 && PSEUDO_HARD_REGNO (src_pseudo
) >= 0
313 && not_modified_p (src_pseudo
, dest_pseudo
))
315 if (ira_dump_file
!= NULL
)
316 fprintf (ira_dump_file
, "Remove r%d:%d->%d\n", regno
,
317 PSEUDO_NUM (src_pseudo
), PSEUDO_NUM (dest_pseudo
));
320 move
= create_move (dest_pseudo
, src_pseudo
);
321 add_to_edge_list (e
, move
, TRUE
);
325 /* Bitmap of pseudos local for the current loop. */
326 static bitmap local_pseudo_bitmap
;
328 /* This bitmap is used to find that we need to generate and use a new
329 pseudo-register when processing pseudos with the same original
331 static bitmap used_regno_bitmap
;
333 /* The following function changes (if necessary) pseudo-registers
334 inside loop given by loop tree node NODE. */
336 change_loop (struct ira_loop_tree_node
*node
)
341 pseudo_t pseudo
, father_pseudo
, *map
;
342 rtx insn
, original_reg
;
344 if (node
!= ira_loop_tree_root
)
347 if (node
->bb
!= NULL
)
349 FOR_BB_INSNS (node
->bb
, insn
)
355 if (ira_dump_file
!= NULL
)
356 fprintf (ira_dump_file
, "Changing RTL for loop %d (header bb%d)\n",
357 node
->loop
->num
, node
->loop
->header
->index
);
359 map
= ira_curr_loop_tree_node
->father
->regno_pseudo_map
;
360 EXECUTE_IF_SET_IN_REG_SET (ira_curr_loop_tree_node
->border_pseudos
,
363 pseudo
= pseudos
[i
];
364 regno
= PSEUDO_REGNO (pseudo
);
365 father_pseudo
= map
[regno
];
366 /* We generate the same register move because the reload can
367 put a pseudo into memory in this case we will have live
368 range splitting. If it does not happen such the same
369 hard register moves will be removed. The worst case when
370 the both pseudos are put into memory by the reload is
372 if (father_pseudo
!= NULL
373 && (PSEUDO_HARD_REGNO (pseudo
)
374 == PSEUDO_HARD_REGNO (father_pseudo
))
375 && (PSEUDO_HARD_REGNO (pseudo
) < 0
376 /* don't create copies because reload can spill a
377 pseudo set by copy although pseudo will not get
379 || reg_equiv_invariant_p
[regno
]
380 || reg_equiv_const
[regno
] != NULL_RTX
))
382 original_reg
= PSEUDO_REG (pseudo
);
383 if (father_pseudo
== NULL
384 || REGNO (PSEUDO_REG (father_pseudo
)) == REGNO (original_reg
))
387 fprintf (ira_dump_file
, " %i vs father %i:",
388 PSEUDO_HARD_REGNO (pseudo
),
389 PSEUDO_HARD_REGNO (father_pseudo
));
390 set_pseudo_reg (pseudo
, create_new_reg (original_reg
));
394 /* Rename locals: Local pseudos with same regno in different loops
395 might get the different hard register. So we need to change
397 bitmap_and_compl (local_pseudo_bitmap
,
398 ira_curr_loop_tree_node
->mentioned_pseudos
,
399 ira_curr_loop_tree_node
->border_pseudos
);
400 EXECUTE_IF_SET_IN_REG_SET (local_pseudo_bitmap
, 0, i
, bi
)
402 pseudo
= pseudos
[i
];
403 regno
= PSEUDO_REGNO (pseudo
);
404 if (PSEUDO_CAP_MEMBER (pseudo
) != NULL
)
406 used_p
= bitmap_bit_p (used_regno_bitmap
, regno
);
407 bitmap_set_bit (used_regno_bitmap
, regno
);
410 set_pseudo_reg (pseudo
, create_new_reg (PSEUDO_REG (pseudo
)));
414 /* The function returns nonzero if move lists on all edges in vector
417 eq_edge_move_lists_p (VEC(edge
,gc
) *vec
)
422 list
= EDGE_I (vec
, 0)->aux
;
423 for (i
= EDGE_COUNT (vec
) - 1; i
> 0; i
--)
424 if (! eq_move_lists_p (list
, EDGE_I (vec
, i
)->aux
))
429 /* Current regno pseudo map used to check moves in function
430 `can_move_through_p'. */
431 static pseudo_t
*curr_jump_map
;
433 /* This recursive function returns nonzero if list of moves LIST can
434 be moved through setting (if OUTPUT_P is nonzero) or reading
437 can_move_through_p (rtx
*loc
, struct move
*list
, int output_p
)
441 enum rtx_code code
= GET_CODE (*loc
);
443 code
= GET_CODE (*loc
);
446 int hard_regno
, regno
;
447 HARD_REG_SET regs
, move_regs
;
449 regno
= ORIGINAL_REGNO (*loc
);
450 hard_regno
= REGNO (*loc
);
451 if (hard_regno
>= FIRST_PSEUDO_REGISTER
)
452 hard_regno
= PSEUDO_HARD_REGNO (curr_jump_map
[regno
]);
454 CLEAR_HARD_REG_SET (regs
);
457 (regs
, reg_mode_hard_regset
[hard_regno
] [GET_MODE (*loc
)]);
458 for (;list
!= NULL
; list
= list
->next
)
460 && regno
== (int) ORIGINAL_REGNO (PSEUDO_REG (list
->from
)))
464 hard_regno
= PSEUDO_HARD_REGNO (list
->to
);
466 CLEAR_HARD_REG_SET (move_regs
);
470 reg_mode_hard_regset
[hard_regno
] [PSEUDO_MODE (list
->to
)]);
473 hard_regno
= PSEUDO_HARD_REGNO (list
->from
);
475 IOR_HARD_REG_SET (move_regs
,
477 [hard_regno
] [PSEUDO_MODE (list
->from
)]);
479 AND_HARD_REG_SET (move_regs
, regs
);
480 GO_IF_HARD_REG_EQUAL (move_regs
, zero_hard_reg_set
, cont
);
487 else if (code
== SET
)
489 if (! can_move_through_p (&SET_DEST (*loc
), list
, TRUE
))
491 if (! can_move_through_p (&SET_SRC (*loc
), list
, FALSE
))
495 else if (code
== CLOBBER
)
497 if (! can_move_through_p (&XEXP (*loc
, 0), list
, TRUE
))
501 else if (code
== MEM
)
503 if (! can_move_through_p (&XEXP (*loc
, 0), list
, FALSE
))
507 else if (code
== PRE_DEC
|| code
== POST_DEC
|| code
== PRE_INC
||
508 code
== POST_INC
|| code
== POST_MODIFY
|| code
== PRE_MODIFY
)
510 if (! can_move_through_p (&XEXP (*loc
, 0), list
, TRUE
))
512 if (! can_move_through_p (&XEXP (*loc
, 0), list
, FALSE
))
517 fmt
= GET_RTX_FORMAT (code
);
518 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
522 if (! can_move_through_p (&XEXP (*loc
, i
), list
, output_p
))
525 else if (fmt
[i
] == 'E')
529 for (j
= XVECLEN (*loc
, i
) - 1; j
>= 0; j
--)
531 if (! can_move_through_p (&XVECEXP (*loc
, i
, j
), list
, output_p
))
539 /* The function looks at all enter edges (if START_P) or exit edges of
540 basic block BB and puts move lists at the BB start or end if it is
541 possible. In other words, it decreases code duplication of
542 shuffling pseudos. */
544 unify_moves (basic_block bb
, int start_p
)
551 vec
= (start_p
? bb
->preds
: bb
->succs
);
552 if (EDGE_COUNT (vec
) == 0 || ! eq_edge_move_lists_p (vec
))
556 curr_jump_map
= IRA_BB_NODE (bb
)->father
->regno_pseudo_map
;
558 && (control_flow_insn_p (BB_END (bb
))
559 && ! can_move_through_p (&PATTERN (BB_END (bb
)), list
, FALSE
)))
562 for (i
= EDGE_COUNT (vec
) - 1; i
> 0; i
--)
565 free_move_list (e
->aux
);
569 at_bb_start
[bb
->index
] = list
;
571 at_bb_end
[bb
->index
] = list
;
574 /* Last move (in move sequence being processed) setting up the
575 corresponding hard register. */
576 static struct move
*hard_regno_last_set
[FIRST_PSEUDO_REGISTER
];
578 /* If the element value is equal to CURR_TICK then the corresponding
579 element in `hard_regno_last_set' is defined and correct. */
580 static int hard_regno_last_set_check
[FIRST_PSEUDO_REGISTER
];
582 /* Last move (in move sequence being processed) setting up the
583 corresponding pseudo. */
584 static struct move
**pseudo_last_set
;
586 /* If the element value is equal to CURR_TICK then the corresponding
587 element in . `pseudo_last_set' is defined and correct. */
588 static int *pseudo_last_set_check
;
590 /* This array contains moves sorted topologically (depth-first) on
591 their dependency graph. */
592 static varray_type move_varray
;
594 /* The variable value is used to check correctness of values of
595 elements of arrays `hard_regno_last_set' and
596 `pseudo_last_set_check'. */
597 static int curr_tick
;
599 /* This recursive function traverses dependecies of MOVE and do
600 toplogical sorting (in depth-first order). */
602 traverse_moves (struct move
*move
)
608 move
->visited_p
= TRUE
;
609 for (i
= move
->deps_num
- 1; i
>= 0; i
--)
610 traverse_moves (move
->deps
[i
]);
611 VARRAY_PUSH_GENERIC_PTR (move_varray
, move
);
614 /* The function removes unnecessary moves in the LIST, makes
615 topological sorting, and removes cycles on hard reg dependencies by
616 introducing new pseudos assigned to memory and additional moves.
617 It returns the result move list. */
619 modify_move_list (struct move
*list
)
621 int i
, n
, nregs
, hard_regno
;
622 pseudo_t to
, from
, new_pseudo
;
623 struct move
*move
, *new_move
, *set_move
, *first
, *last
;
627 /* Creat move deps. */
629 for (move
= list
; move
!= NULL
; move
= move
->next
)
632 if ((hard_regno
= PSEUDO_HARD_REGNO (to
)) < 0)
634 nregs
= hard_regno_nregs
[hard_regno
] [PSEUDO_MODE (to
)];
635 for (i
= 0; i
< nregs
; i
++)
637 hard_regno_last_set
[hard_regno
+ i
] = move
;
638 hard_regno_last_set_check
[hard_regno
+ i
] = curr_tick
;
641 for (move
= list
; move
!= NULL
; move
= move
->next
)
645 if ((hard_regno
= PSEUDO_HARD_REGNO (from
)) >= 0)
647 nregs
= hard_regno_nregs
[hard_regno
] [PSEUDO_MODE (from
)];
648 for (n
= i
= 0; i
< nregs
; i
++)
649 if (hard_regno_last_set_check
[hard_regno
+ i
] == curr_tick
650 && (PSEUDO_REGNO (hard_regno_last_set
[hard_regno
+ i
]->to
)
651 != PSEUDO_REGNO (from
)))
653 move
->deps
= ira_allocate (n
* sizeof (struct move
*));
654 for (n
= i
= 0; i
< nregs
; i
++)
655 if (hard_regno_last_set_check
[hard_regno
+ i
] == curr_tick
656 && (PSEUDO_REGNO (hard_regno_last_set
[hard_regno
+ i
]->to
)
657 != PSEUDO_REGNO (from
)))
658 move
->deps
[n
++] = hard_regno_last_set
[hard_regno
+ i
];
662 /* Toplogical sorting: */
663 VARRAY_POP_ALL (move_varray
);
664 for (move
= list
; move
!= NULL
; move
= move
->next
)
665 traverse_moves (move
);
667 for (i
= VARRAY_ACTIVE_SIZE (move_varray
) - 1; i
>= 0; i
--)
669 move
= VARRAY_GENERIC_PTR (move_varray
, i
);
675 first
= VARRAY_TOP_GENERIC_PTR (move_varray
);
676 /* Removing cycles: */
678 VARRAY_POP_ALL (move_varray
);
679 for (move
= first
; move
!= NULL
; move
= move
->next
)
683 if ((hard_regno
= PSEUDO_HARD_REGNO (from
)) >= 0)
685 nregs
= hard_regno_nregs
[hard_regno
] [PSEUDO_MODE (from
)];
686 for (i
= 0; i
< nregs
; i
++)
687 if (hard_regno_last_set_check
[hard_regno
+ i
] == curr_tick
688 && PSEUDO_HARD_REGNO (hard_regno_last_set
689 [hard_regno
+ i
]->to
) >= 0)
691 set_move
= hard_regno_last_set
[hard_regno
+ i
];
693 = create_pseudo (PSEUDO_REGNO (set_move
->to
), FALSE
,
694 PSEUDO_LOOP_TREE_NODE (set_move
->to
));
695 /* That is a minimum to emit pseudos correctly and for
696 setting reg_renumber. */
697 PSEUDO_HARD_REGNO (new_pseudo
) = -1;
698 PSEUDO_REG (new_pseudo
)
699 = create_new_reg (PSEUDO_REG (set_move
->to
));
700 new_move
= create_move (set_move
->to
, new_pseudo
);
701 set_move
->to
= new_pseudo
;
702 VARRAY_PUSH_GENERIC_PTR (move_varray
, new_move
);
706 if ((hard_regno
= PSEUDO_HARD_REGNO (to
)) < 0)
708 nregs
= hard_regno_nregs
[hard_regno
] [PSEUDO_MODE (to
)];
709 for (i
= 0; i
< nregs
; i
++)
711 hard_regno_last_set
[hard_regno
+ i
] = move
;
712 hard_regno_last_set_check
[hard_regno
+ i
] = curr_tick
;
715 for (i
= VARRAY_ACTIVE_SIZE (move_varray
) - 1; i
>= 0; i
--)
717 move
= VARRAY_GENERIC_PTR (move_varray
, i
);
725 /* The function generates rtx move insns from the move list LIST. It
726 updates allocation cost using move execution frequency FERQ. */
728 emit_move_list (struct move
*list
, int freq
)
732 enum machine_mode mode
;
733 enum reg_class cover_class
;
735 list
= modify_move_list (list
);
737 for (; list
!= NULL
; list
= list
->next
)
739 emit_move_insn (PSEUDO_REG (list
->to
), PSEUDO_REG (list
->from
));
740 mode
= PSEUDO_MODE (list
->to
);
741 cover_class
= PSEUDO_COVER_CLASS (list
->to
);
743 if (PSEUDO_HARD_REGNO (list
->to
) < 0)
745 if (PSEUDO_HARD_REGNO (list
->from
) >= 0)
747 cost
= memory_move_cost
[mode
] [cover_class
] [0] * freq
;
751 else if (PSEUDO_HARD_REGNO (list
->from
) < 0)
753 if (PSEUDO_HARD_REGNO (list
->to
) >= 0)
755 cost
= memory_move_cost
[mode
] [cover_class
] [0] * freq
;
761 cost
= register_move_cost
[mode
] [cover_class
] [cover_class
] * freq
;
762 shuffle_cost
+= cost
;
764 overall_cost
+= cost
;
766 result
= get_insns ();
771 /* The function generates rtx move insns from move lists attached to
772 basic blocks and edges. */
783 if (at_bb_start
[bb
->index
] != NULL
)
785 insns
= emit_move_list (at_bb_start
[bb
->index
],
786 REG_FREQ_FROM_BB (bb
));
789 tmp
= NEXT_INSN (tmp
);
790 if (NOTE_INSN_BASIC_BLOCK_P (tmp
))
791 tmp
= NEXT_INSN (tmp
);
792 if (tmp
== BB_HEAD (bb
))
793 emit_insn_before (insns
, tmp
);
794 else if (tmp
!= NULL_RTX
)
795 emit_insn_after (insns
, PREV_INSN (tmp
));
797 emit_insn_after (insns
, get_last_insn ());
800 if (at_bb_end
[bb
->index
] != NULL
)
802 insns
= emit_move_list (at_bb_end
[bb
->index
],
803 REG_FREQ_FROM_BB (bb
));
804 if (! control_flow_insn_p (BB_END (bb
)))
805 emit_insn_after (insns
, BB_END (bb
));
807 emit_insn_before (insns
, BB_END (bb
));
810 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
814 ira_assert ((e
->flags
& EDGE_ABNORMAL
) == 0
815 || ! EDGE_CRITICAL_P (e
));
817 (emit_move_list (e
->aux
,
818 REG_FREQ_FROM_EDGE_FREQ (EDGE_FREQUENCY (e
))),
820 if (e
->src
->next_bb
!= e
->dest
)
821 additional_jumps_num
++;
826 /* Entry function changing code and generating pseudo shuffling for
827 the regional register allocation. */
836 at_bb_start
= ira_allocate (sizeof (struct move
*) * last_basic_block
);
837 memset (at_bb_start
, 0, sizeof (struct move
*) * last_basic_block
);
838 at_bb_end
= ira_allocate (sizeof (struct move
*) * last_basic_block
);
839 memset (at_bb_end
, 0, sizeof (struct move
*) * last_basic_block
);
840 for (i
= 0; i
< pseudos_num
; i
++)
841 if (PSEUDO_CAP_MEMBER (pseudos
[i
]) == NULL
)
842 PSEUDO_REG (pseudos
[i
]) = regno_reg_rtx
[PSEUDO_REGNO (pseudos
[i
])];
843 local_pseudo_bitmap
= ira_allocate_bitmap ();
844 used_regno_bitmap
= ira_allocate_bitmap ();
845 max_regno_before_changing
= max_reg_num ();
846 traverse_loop_tree (ira_loop_tree_root
, change_loop
, NULL
);
847 ira_free_bitmap (used_regno_bitmap
);
848 ira_free_bitmap (local_pseudo_bitmap
);
851 at_bb_start
[bb
->index
] = NULL
;
852 at_bb_end
[bb
->index
] = NULL
;
853 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
854 if (e
->dest
!= EXIT_BLOCK_PTR
)
855 generate_edge_moves (e
);
857 pseudo_last_set
= ira_allocate (sizeof (struct move
*) * max_reg_num ());
858 pseudo_last_set_check
= ira_allocate (sizeof (int) * max_reg_num ());
859 memset (pseudo_last_set_check
, 0, sizeof (int) * max_reg_num ());
860 memset (hard_regno_last_set_check
, 0, sizeof (hard_regno_last_set_check
));
863 unify_moves (bb
, TRUE
);
865 unify_moves (bb
, FALSE
);
866 VARRAY_GENERIC_PTR_NOGC_INIT (move_varray
, pseudos_num
, "ordered moves");
871 free_move_list (at_bb_start
[bb
->index
]);
872 free_move_list (at_bb_end
[bb
->index
]);
873 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
875 free_move_list (e
->aux
);
879 VARRAY_FREE (move_varray
);
880 ira_free (pseudo_last_set_check
);
881 ira_free (pseudo_last_set
);
882 commit_edge_insertions ();
883 ira_free (at_bb_end
);
884 ira_free (at_bb_start
);