PR bootstrap/48153
[official-gcc.git] / gcc / ira-emit.c
blobb90adb71da3747694765ef1b5aa4207b7da22d19
1 /* Integrated Register Allocator. Changing code and generating moves.
2 Copyright (C) 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Vladimir Makarov <vmakarov@redhat.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "regs.h"
28 #include "rtl.h"
29 #include "tm_p.h"
30 #include "target.h"
31 #include "flags.h"
32 #include "obstack.h"
33 #include "bitmap.h"
34 #include "hard-reg-set.h"
35 #include "basic-block.h"
36 #include "expr.h"
37 #include "recog.h"
38 #include "params.h"
39 #include "timevar.h"
40 #include "tree-pass.h"
41 #include "output.h"
42 #include "reload.h"
43 #include "df.h"
44 #include "ira-int.h"
47 typedef struct move *move_t;
49 /* The structure represents an allocno move. Both allocnos have the
50 same origional regno but different allocation. */
51 struct move
53 /* The allocnos involved in the move. */
54 ira_allocno_t from, to;
55 /* The next move in the move sequence. */
56 move_t next;
57 /* Used for finding dependencies. */
58 bool visited_p;
59 /* The size of the following array. */
60 int deps_num;
61 /* Moves on which given move depends on. Dependency can be cyclic.
62 It means we need a temporary to generates the moves. Sequence
63 A1->A2, B1->B2 where A1 and B2 are assigned to reg R1 and A2 and
64 B1 are assigned to reg R2 is an example of the cyclic
65 dependencies. */
66 move_t *deps;
67 /* First insn generated for the move. */
68 rtx insn;
71 /* Array of moves (indexed by BB index) which should be put at the
72 start/end of the corresponding basic blocks. */
73 static move_t *at_bb_start, *at_bb_end;
75 /* Max regno before renaming some pseudo-registers. For example, the
76 same pseudo-register can be renamed in a loop if its allocation is
77 different outside the loop. */
78 static int max_regno_before_changing;
80 /* Return new move of allocnos TO and FROM. */
81 static move_t
82 create_move (ira_allocno_t to, ira_allocno_t from)
84 move_t move;
86 move = (move_t) ira_allocate (sizeof (struct move));
87 move->deps = NULL;
88 move->deps_num = 0;
89 move->to = to;
90 move->from = from;
91 move->next = NULL;
92 move->insn = NULL_RTX;
93 move->visited_p = false;
94 return move;
97 /* Free memory for MOVE and its dependencies. */
98 static void
99 free_move (move_t move)
101 if (move->deps != NULL)
102 ira_free (move->deps);
103 ira_free (move);
106 /* Free memory for list of the moves given by its HEAD. */
107 static void
108 free_move_list (move_t head)
110 move_t next;
112 for (; head != NULL; head = next)
114 next = head->next;
115 free_move (head);
119 /* Return TRUE if the the move list LIST1 and LIST2 are equal (two
120 moves are equal if they involve the same allocnos). */
121 static bool
122 eq_move_lists_p (move_t list1, move_t list2)
124 for (; list1 != NULL && list2 != NULL;
125 list1 = list1->next, list2 = list2->next)
126 if (list1->from != list2->from || list1->to != list2->to)
127 return false;
128 return list1 == list2;
131 /* Print move list LIST into file F. */
132 static void
133 print_move_list (FILE *f, move_t list)
135 for (; list != NULL; list = list->next)
136 fprintf (f, " a%dr%d->a%dr%d",
137 ALLOCNO_NUM (list->from), ALLOCNO_REGNO (list->from),
138 ALLOCNO_NUM (list->to), ALLOCNO_REGNO (list->to));
139 fprintf (f, "\n");
142 extern void ira_debug_move_list (move_t list);
144 /* Print move list LIST into stderr. */
145 void
146 ira_debug_move_list (move_t list)
148 print_move_list (stderr, list);
151 /* This recursive function changes pseudo-registers in *LOC if it is
152 necessary. The function returns TRUE if a change was done. */
153 static bool
154 change_regs (rtx *loc)
156 int i, regno, result = false;
157 const char *fmt;
158 enum rtx_code code;
159 rtx reg;
161 if (*loc == NULL_RTX)
162 return false;
163 code = GET_CODE (*loc);
164 if (code == REG)
166 regno = REGNO (*loc);
167 if (regno < FIRST_PSEUDO_REGISTER)
168 return false;
169 if (regno >= max_regno_before_changing)
170 /* It is a shared register which was changed already. */
171 return false;
172 if (ira_curr_regno_allocno_map[regno] == NULL)
173 return false;
174 reg = ALLOCNO_REG (ira_curr_regno_allocno_map[regno]);
175 if (reg == *loc)
176 return false;
177 *loc = reg;
178 return true;
181 fmt = GET_RTX_FORMAT (code);
182 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
184 if (fmt[i] == 'e')
185 result = change_regs (&XEXP (*loc, i)) || result;
186 else if (fmt[i] == 'E')
188 int j;
190 for (j = XVECLEN (*loc, i) - 1; j >= 0; j--)
191 result = change_regs (&XVECEXP (*loc, i, j)) || result;
194 return result;
197 /* Attach MOVE to the edge E. The move is attached to the head of the
198 list if HEAD_P is TRUE. */
199 static void
200 add_to_edge_list (edge e, move_t move, bool head_p)
202 move_t last;
204 if (head_p || e->aux == NULL)
206 move->next = (move_t) e->aux;
207 e->aux = move;
209 else
211 for (last = (move_t) e->aux; last->next != NULL; last = last->next)
213 last->next = move;
214 move->next = NULL;
218 /* Create and return new pseudo-register with the same attributes as
219 ORIGINAL_REG. */
220 static rtx
221 create_new_reg (rtx original_reg)
223 rtx new_reg;
225 new_reg = gen_reg_rtx (GET_MODE (original_reg));
226 ORIGINAL_REGNO (new_reg) = ORIGINAL_REGNO (original_reg);
227 REG_USERVAR_P (new_reg) = REG_USERVAR_P (original_reg);
228 REG_POINTER (new_reg) = REG_POINTER (original_reg);
229 REG_ATTRS (new_reg) = REG_ATTRS (original_reg);
230 if (internal_flag_ira_verbose > 3 && ira_dump_file != NULL)
231 fprintf (ira_dump_file, " Creating newreg=%i from oldreg=%i\n",
232 REGNO (new_reg), REGNO (original_reg));
233 return new_reg;
236 /* Return TRUE if loop given by SUBNODE inside the loop given by
237 NODE. */
238 static bool
239 subloop_tree_node_p (ira_loop_tree_node_t subnode, ira_loop_tree_node_t node)
241 for (; subnode != NULL; subnode = subnode->parent)
242 if (subnode == node)
243 return true;
244 return false;
247 /* Set up member `reg' to REG for allocnos which has the same regno as
248 ALLOCNO and which are inside the loop corresponding to ALLOCNO. */
249 static void
250 set_allocno_reg (ira_allocno_t allocno, rtx reg)
252 int regno;
253 ira_allocno_t a;
254 ira_loop_tree_node_t node;
256 node = ALLOCNO_LOOP_TREE_NODE (allocno);
257 for (a = ira_regno_allocno_map[ALLOCNO_REGNO (allocno)];
258 a != NULL;
259 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
260 if (subloop_tree_node_p (ALLOCNO_LOOP_TREE_NODE (a), node))
261 ALLOCNO_REG (a) = reg;
262 for (a = ALLOCNO_CAP (allocno); a != NULL; a = ALLOCNO_CAP (a))
263 ALLOCNO_REG (a) = reg;
264 regno = ALLOCNO_REGNO (allocno);
265 for (a = allocno;;)
267 if (a == NULL || (a = ALLOCNO_CAP (a)) == NULL)
269 node = node->parent;
270 if (node == NULL)
271 break;
272 a = node->regno_allocno_map[regno];
274 if (a == NULL)
275 continue;
276 if (ALLOCNO_CHILD_RENAMED_P (a))
277 break;
278 ALLOCNO_CHILD_RENAMED_P (a) = true;
282 /* Return true if there is an entry to given loop not from its parent
283 (or grandparent) block. For example, it is possible for two
284 adjacent loops inside another loop. */
285 static bool
286 entered_from_non_parent_p (ira_loop_tree_node_t loop_node)
288 ira_loop_tree_node_t bb_node, src_loop_node, parent;
289 edge e;
290 edge_iterator ei;
292 for (bb_node = loop_node->children; bb_node != NULL; bb_node = bb_node->next)
293 if (bb_node->bb != NULL)
295 FOR_EACH_EDGE (e, ei, bb_node->bb->preds)
296 if (e->src != ENTRY_BLOCK_PTR
297 && (src_loop_node = IRA_BB_NODE (e->src)->parent) != loop_node)
299 for (parent = src_loop_node->parent;
300 parent != NULL;
301 parent = parent->parent)
302 if (parent == loop_node)
303 break;
304 if (parent != NULL)
305 /* That is an exit from a nested loop -- skip it. */
306 continue;
307 for (parent = loop_node->parent;
308 parent != NULL;
309 parent = parent->parent)
310 if (src_loop_node == parent)
311 break;
312 if (parent == NULL)
313 return true;
316 return false;
319 /* Set up ENTERED_FROM_NON_PARENT_P for each loop region. */
320 static void
321 setup_entered_from_non_parent_p (void)
323 unsigned int i;
324 loop_p loop;
326 FOR_EACH_VEC_ELT (loop_p, ira_loops.larray, i, loop)
327 if (ira_loop_nodes[i].regno_allocno_map != NULL)
328 ira_loop_nodes[i].entered_from_non_parent_p
329 = entered_from_non_parent_p (&ira_loop_nodes[i]);
332 /* Return TRUE if move of SRC_ALLOCNO (assigned to hard register) to
333 DEST_ALLOCNO (assigned to memory) can be removed beacuse it does
334 not change value of the destination. One possible reason for this
335 is the situation when SRC_ALLOCNO is not modified in the
336 corresponding loop. */
337 static bool
338 store_can_be_removed_p (ira_allocno_t src_allocno, ira_allocno_t dest_allocno)
340 int regno, orig_regno;
341 ira_allocno_t a;
342 ira_loop_tree_node_t node;
344 ira_assert (ALLOCNO_CAP_MEMBER (src_allocno) == NULL
345 && ALLOCNO_CAP_MEMBER (dest_allocno) == NULL);
346 orig_regno = ALLOCNO_REGNO (src_allocno);
347 regno = REGNO (ALLOCNO_REG (dest_allocno));
348 for (node = ALLOCNO_LOOP_TREE_NODE (src_allocno);
349 node != NULL;
350 node = node->parent)
352 a = node->regno_allocno_map[orig_regno];
353 ira_assert (a != NULL);
354 if (REGNO (ALLOCNO_REG (a)) == (unsigned) regno)
355 /* We achieved the destination and everything is ok. */
356 return true;
357 else if (bitmap_bit_p (node->modified_regnos, orig_regno))
358 return false;
359 else if (node->entered_from_non_parent_p)
360 /* If there is a path from a destination loop block to the
361 source loop header containing basic blocks of non-parents
362 (grandparents) of the source loop, we should have checked
363 modifications of the pseudo on this path too to decide
364 about possibility to remove the store. It could be done by
365 solving a data-flow problem. Unfortunately such global
366 solution would complicate IR flattening. Therefore we just
367 prohibit removal of the store in such complicated case. */
368 return false;
370 /* It is actually a loop entry -- do not remove the store. */
371 return false;
374 /* Generate and attach moves to the edge E. This looks at the final
375 regnos of allocnos living on the edge with the same original regno
376 to figure out when moves should be generated. */
377 static void
378 generate_edge_moves (edge e)
380 ira_loop_tree_node_t src_loop_node, dest_loop_node;
381 unsigned int regno;
382 bitmap_iterator bi;
383 ira_allocno_t src_allocno, dest_allocno, *src_map, *dest_map;
384 move_t move;
386 src_loop_node = IRA_BB_NODE (e->src)->parent;
387 dest_loop_node = IRA_BB_NODE (e->dest)->parent;
388 e->aux = NULL;
389 if (src_loop_node == dest_loop_node)
390 return;
391 src_map = src_loop_node->regno_allocno_map;
392 dest_map = dest_loop_node->regno_allocno_map;
393 EXECUTE_IF_SET_IN_REG_SET (DF_LR_IN (e->dest),
394 FIRST_PSEUDO_REGISTER, regno, bi)
395 if (bitmap_bit_p (DF_LR_OUT (e->src), regno))
397 src_allocno = src_map[regno];
398 dest_allocno = dest_map[regno];
399 if (REGNO (ALLOCNO_REG (src_allocno))
400 == REGNO (ALLOCNO_REG (dest_allocno)))
401 continue;
402 /* Remove unnecessary stores at the region exit. We should do
403 this for readonly memory for sure and this is guaranteed by
404 that we never generate moves on region borders (see
405 checking ira_reg_equiv_invariant_p in function
406 change_loop). */
407 if (ALLOCNO_HARD_REGNO (dest_allocno) < 0
408 && ALLOCNO_HARD_REGNO (src_allocno) >= 0
409 && store_can_be_removed_p (src_allocno, dest_allocno))
411 ALLOCNO_MEM_OPTIMIZED_DEST (src_allocno) = dest_allocno;
412 ALLOCNO_MEM_OPTIMIZED_DEST_P (dest_allocno) = true;
413 if (internal_flag_ira_verbose > 3 && ira_dump_file != NULL)
414 fprintf (ira_dump_file, " Remove r%d:a%d->a%d(mem)\n",
415 regno, ALLOCNO_NUM (src_allocno),
416 ALLOCNO_NUM (dest_allocno));
417 continue;
419 move = create_move (dest_allocno, src_allocno);
420 add_to_edge_list (e, move, true);
424 /* Bitmap of allocnos local for the current loop. */
425 static bitmap local_allocno_bitmap;
427 /* This bitmap is used to find that we need to generate and to use a
428 new pseudo-register when processing allocnos with the same original
429 regno. */
430 static bitmap used_regno_bitmap;
432 /* This bitmap contains regnos of allocnos which were renamed locally
433 because the allocnos correspond to disjoint live ranges in loops
434 with a common parent. */
435 static bitmap renamed_regno_bitmap;
437 /* Change (if necessary) pseudo-registers inside loop given by loop
438 tree node NODE. */
439 static void
440 change_loop (ira_loop_tree_node_t node)
442 bitmap_iterator bi;
443 unsigned int i;
444 int regno;
445 bool used_p;
446 ira_allocno_t allocno, parent_allocno, *map;
447 rtx insn, original_reg;
448 enum reg_class cover_class;
449 ira_loop_tree_node_t parent;
451 if (node != ira_loop_tree_root)
454 if (node->bb != NULL)
456 FOR_BB_INSNS (node->bb, insn)
457 if (INSN_P (insn) && change_regs (&insn))
459 df_insn_rescan (insn);
460 df_notes_rescan (insn);
462 return;
465 if (internal_flag_ira_verbose > 3 && ira_dump_file != NULL)
466 fprintf (ira_dump_file,
467 " Changing RTL for loop %d (header bb%d)\n",
468 node->loop->num, node->loop->header->index);
470 parent = ira_curr_loop_tree_node->parent;
471 map = parent->regno_allocno_map;
472 EXECUTE_IF_SET_IN_REG_SET (ira_curr_loop_tree_node->border_allocnos,
473 0, i, bi)
475 allocno = ira_allocnos[i];
476 regno = ALLOCNO_REGNO (allocno);
477 cover_class = ALLOCNO_COVER_CLASS (allocno);
478 parent_allocno = map[regno];
479 ira_assert (regno < ira_reg_equiv_len);
480 /* We generate the same hard register move because the
481 reload pass can put an allocno into memory in this case
482 we will have live range splitting. If it does not happen
483 such the same hard register moves will be removed. The
484 worst case when the both allocnos are put into memory by
485 the reload is very rare. */
486 if (parent_allocno != NULL
487 && (ALLOCNO_HARD_REGNO (allocno)
488 == ALLOCNO_HARD_REGNO (parent_allocno))
489 && (ALLOCNO_HARD_REGNO (allocno) < 0
490 || (parent->reg_pressure[cover_class] + 1
491 <= ira_available_class_regs[cover_class])
492 || TEST_HARD_REG_BIT (ira_prohibited_mode_move_regs
493 [ALLOCNO_MODE (allocno)],
494 ALLOCNO_HARD_REGNO (allocno))
495 /* don't create copies because reload can spill an
496 allocno set by copy although the allocno will not
497 get memory slot. */
498 || ira_reg_equiv_invariant_p[regno]
499 || ira_reg_equiv_const[regno] != NULL_RTX))
500 continue;
501 original_reg = ALLOCNO_REG (allocno);
502 if (parent_allocno == NULL
503 || REGNO (ALLOCNO_REG (parent_allocno)) == REGNO (original_reg))
505 if (internal_flag_ira_verbose > 3 && ira_dump_file)
506 fprintf (ira_dump_file, " %i vs parent %i:",
507 ALLOCNO_HARD_REGNO (allocno),
508 ALLOCNO_HARD_REGNO (parent_allocno));
509 set_allocno_reg (allocno, create_new_reg (original_reg));
513 /* Rename locals: Local allocnos with same regno in different loops
514 might get the different hard register. So we need to change
515 ALLOCNO_REG. */
516 bitmap_and_compl (local_allocno_bitmap,
517 ira_curr_loop_tree_node->all_allocnos,
518 ira_curr_loop_tree_node->border_allocnos);
519 EXECUTE_IF_SET_IN_REG_SET (local_allocno_bitmap, 0, i, bi)
521 allocno = ira_allocnos[i];
522 regno = ALLOCNO_REGNO (allocno);
523 if (ALLOCNO_CAP_MEMBER (allocno) != NULL)
524 continue;
525 used_p = !bitmap_set_bit (used_regno_bitmap, regno);
526 ALLOCNO_SOMEWHERE_RENAMED_P (allocno) = true;
527 if (! used_p)
528 continue;
529 bitmap_set_bit (renamed_regno_bitmap, regno);
530 set_allocno_reg (allocno, create_new_reg (ALLOCNO_REG (allocno)));
534 /* Process to set up flag somewhere_renamed_p. */
535 static void
536 set_allocno_somewhere_renamed_p (void)
538 unsigned int regno;
539 ira_allocno_t allocno;
540 ira_allocno_iterator ai;
542 FOR_EACH_ALLOCNO (allocno, ai)
544 regno = ALLOCNO_REGNO (allocno);
545 if (bitmap_bit_p (renamed_regno_bitmap, regno)
546 && REGNO (ALLOCNO_REG (allocno)) == regno)
547 ALLOCNO_SOMEWHERE_RENAMED_P (allocno) = true;
551 /* Return TRUE if move lists on all edges given in vector VEC are
552 equal. */
553 static bool
554 eq_edge_move_lists_p (VEC(edge,gc) *vec)
556 move_t list;
557 int i;
559 list = (move_t) EDGE_I (vec, 0)->aux;
560 for (i = EDGE_COUNT (vec) - 1; i > 0; i--)
561 if (! eq_move_lists_p (list, (move_t) EDGE_I (vec, i)->aux))
562 return false;
563 return true;
566 /* Look at all entry edges (if START_P) or exit edges of basic block
567 BB and put move lists at the BB start or end if it is possible. In
568 other words, this decreases code duplication of allocno moves. */
569 static void
570 unify_moves (basic_block bb, bool start_p)
572 int i;
573 edge e;
574 move_t list;
575 VEC(edge,gc) *vec;
577 vec = (start_p ? bb->preds : bb->succs);
578 if (EDGE_COUNT (vec) == 0 || ! eq_edge_move_lists_p (vec))
579 return;
580 e = EDGE_I (vec, 0);
581 list = (move_t) e->aux;
582 if (! start_p && control_flow_insn_p (BB_END (bb)))
583 return;
584 e->aux = NULL;
585 for (i = EDGE_COUNT (vec) - 1; i > 0; i--)
587 e = EDGE_I (vec, i);
588 free_move_list ((move_t) e->aux);
589 e->aux = NULL;
591 if (start_p)
592 at_bb_start[bb->index] = list;
593 else
594 at_bb_end[bb->index] = list;
597 /* Last move (in move sequence being processed) setting up the
598 corresponding hard register. */
599 static move_t hard_regno_last_set[FIRST_PSEUDO_REGISTER];
601 /* If the element value is equal to CURR_TICK then the corresponding
602 element in `hard_regno_last_set' is defined and correct. */
603 static int hard_regno_last_set_check[FIRST_PSEUDO_REGISTER];
605 /* Last move (in move sequence being processed) setting up the
606 corresponding allocno. */
607 static move_t *allocno_last_set;
609 /* If the element value is equal to CURR_TICK then the corresponding
610 element in . `allocno_last_set' is defined and correct. */
611 static int *allocno_last_set_check;
613 /* Definition of vector of moves. */
614 DEF_VEC_P(move_t);
615 DEF_VEC_ALLOC_P(move_t, heap);
617 /* This vec contains moves sorted topologically (depth-first) on their
618 dependency graph. */
619 static VEC(move_t,heap) *move_vec;
621 /* The variable value is used to check correctness of values of
622 elements of arrays `hard_regno_last_set' and
623 `allocno_last_set_check'. */
624 static int curr_tick;
626 /* This recursive function traverses dependencies of MOVE and produces
627 topological sorting (in depth-first order). */
628 static void
629 traverse_moves (move_t move)
631 int i;
633 if (move->visited_p)
634 return;
635 move->visited_p = true;
636 for (i = move->deps_num - 1; i >= 0; i--)
637 traverse_moves (move->deps[i]);
638 VEC_safe_push (move_t, heap, move_vec, move);
641 /* Remove unnecessary moves in the LIST, makes topological sorting,
642 and removes cycles on hard reg dependencies by introducing new
643 allocnos assigned to memory and additional moves. It returns the
644 result move list. */
645 static move_t
646 modify_move_list (move_t list)
648 int i, n, nregs, hard_regno;
649 ira_allocno_t to, from;
650 move_t move, new_move, set_move, first, last;
652 if (list == NULL)
653 return NULL;
654 /* Creat move deps. */
655 curr_tick++;
656 for (move = list; move != NULL; move = move->next)
658 to = move->to;
659 if ((hard_regno = ALLOCNO_HARD_REGNO (to)) < 0)
660 continue;
661 nregs = hard_regno_nregs[hard_regno][ALLOCNO_MODE (to)];
662 for (i = 0; i < nregs; i++)
664 hard_regno_last_set[hard_regno + i] = move;
665 hard_regno_last_set_check[hard_regno + i] = curr_tick;
668 for (move = list; move != NULL; move = move->next)
670 from = move->from;
671 to = move->to;
672 if ((hard_regno = ALLOCNO_HARD_REGNO (from)) >= 0)
674 nregs = hard_regno_nregs[hard_regno][ALLOCNO_MODE (from)];
675 for (n = i = 0; i < nregs; i++)
676 if (hard_regno_last_set_check[hard_regno + i] == curr_tick
677 && (ALLOCNO_REGNO (hard_regno_last_set[hard_regno + i]->to)
678 != ALLOCNO_REGNO (from)))
679 n++;
680 move->deps = (move_t *) ira_allocate (n * sizeof (move_t));
681 for (n = i = 0; i < nregs; i++)
682 if (hard_regno_last_set_check[hard_regno + i] == curr_tick
683 && (ALLOCNO_REGNO (hard_regno_last_set[hard_regno + i]->to)
684 != ALLOCNO_REGNO (from)))
685 move->deps[n++] = hard_regno_last_set[hard_regno + i];
686 move->deps_num = n;
689 /* Toplogical sorting: */
690 VEC_truncate (move_t, move_vec, 0);
691 for (move = list; move != NULL; move = move->next)
692 traverse_moves (move);
693 last = NULL;
694 for (i = (int) VEC_length (move_t, move_vec) - 1; i >= 0; i--)
696 move = VEC_index (move_t, move_vec, i);
697 move->next = NULL;
698 if (last != NULL)
699 last->next = move;
700 last = move;
702 first = VEC_last (move_t, move_vec);
703 /* Removing cycles: */
704 curr_tick++;
705 VEC_truncate (move_t, move_vec, 0);
706 for (move = first; move != NULL; move = move->next)
708 from = move->from;
709 to = move->to;
710 if ((hard_regno = ALLOCNO_HARD_REGNO (from)) >= 0)
712 nregs = hard_regno_nregs[hard_regno][ALLOCNO_MODE (from)];
713 for (i = 0; i < nregs; i++)
714 if (hard_regno_last_set_check[hard_regno + i] == curr_tick
715 && ALLOCNO_HARD_REGNO
716 (hard_regno_last_set[hard_regno + i]->to) >= 0)
718 int n, j;
719 ira_allocno_t new_allocno;
721 set_move = hard_regno_last_set[hard_regno + i];
722 /* It does not matter what loop_tree_node (of TO or
723 FROM) to use for the new allocno because of
724 subsequent IRA internal representation
725 flattening. */
726 new_allocno
727 = ira_create_allocno (ALLOCNO_REGNO (set_move->to), false,
728 ALLOCNO_LOOP_TREE_NODE (set_move->to));
729 ALLOCNO_MODE (new_allocno) = ALLOCNO_MODE (set_move->to);
730 ira_set_allocno_cover_class
731 (new_allocno, ALLOCNO_COVER_CLASS (set_move->to));
732 ira_create_allocno_objects (new_allocno);
733 ALLOCNO_ASSIGNED_P (new_allocno) = true;
734 ALLOCNO_HARD_REGNO (new_allocno) = -1;
735 ALLOCNO_REG (new_allocno)
736 = create_new_reg (ALLOCNO_REG (set_move->to));
738 /* Make it possibly conflicting with all earlier
739 created allocnos. Cases where temporary allocnos
740 created to remove the cycles are quite rare. */
741 n = ALLOCNO_NUM_OBJECTS (new_allocno);
742 gcc_assert (n == ALLOCNO_NUM_OBJECTS (set_move->to));
743 for (j = 0; j < n; j++)
745 ira_object_t new_obj = ALLOCNO_OBJECT (new_allocno, j);
747 OBJECT_MIN (new_obj) = 0;
748 OBJECT_MAX (new_obj) = ira_objects_num - 1;
751 new_move = create_move (set_move->to, new_allocno);
752 set_move->to = new_allocno;
753 VEC_safe_push (move_t, heap, move_vec, new_move);
754 ira_move_loops_num++;
755 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
756 fprintf (ira_dump_file,
757 " Creating temporary allocno a%dr%d\n",
758 ALLOCNO_NUM (new_allocno),
759 REGNO (ALLOCNO_REG (new_allocno)));
762 if ((hard_regno = ALLOCNO_HARD_REGNO (to)) < 0)
763 continue;
764 nregs = hard_regno_nregs[hard_regno][ALLOCNO_MODE (to)];
765 for (i = 0; i < nregs; i++)
767 hard_regno_last_set[hard_regno + i] = move;
768 hard_regno_last_set_check[hard_regno + i] = curr_tick;
771 for (i = (int) VEC_length (move_t, move_vec) - 1; i >= 0; i--)
773 move = VEC_index (move_t, move_vec, i);
774 move->next = NULL;
775 last->next = move;
776 last = move;
778 return first;
781 /* Generate RTX move insns from the move list LIST. This updates
782 allocation cost using move execution frequency FREQ. */
783 static rtx
784 emit_move_list (move_t list, int freq)
786 int cost;
787 rtx result, insn;
788 enum machine_mode mode;
789 enum reg_class cover_class;
791 start_sequence ();
792 for (; list != NULL; list = list->next)
794 start_sequence ();
795 emit_move_insn (ALLOCNO_REG (list->to), ALLOCNO_REG (list->from));
796 list->insn = get_insns ();
797 end_sequence ();
798 /* The reload needs to have set up insn codes. If the reload
799 sets up insn codes by itself, it may fail because insns will
800 have hard registers instead of pseudos and there may be no
801 machine insn with given hard registers. */
802 for (insn = list->insn; insn != NULL_RTX; insn = NEXT_INSN (insn))
803 recog_memoized (insn);
804 emit_insn (list->insn);
805 mode = ALLOCNO_MODE (list->to);
806 cover_class = ALLOCNO_COVER_CLASS (list->to);
807 cost = 0;
808 if (ALLOCNO_HARD_REGNO (list->to) < 0)
810 if (ALLOCNO_HARD_REGNO (list->from) >= 0)
812 cost = ira_memory_move_cost[mode][cover_class][0] * freq;
813 ira_store_cost += cost;
816 else if (ALLOCNO_HARD_REGNO (list->from) < 0)
818 if (ALLOCNO_HARD_REGNO (list->to) >= 0)
820 cost = ira_memory_move_cost[mode][cover_class][0] * freq;
821 ira_load_cost += cost;
824 else
826 cost = (ira_get_register_move_cost (mode, cover_class, cover_class)
827 * freq);
828 ira_shuffle_cost += cost;
830 ira_overall_cost += cost;
832 result = get_insns ();
833 end_sequence ();
834 return result;
837 /* Generate RTX move insns from move lists attached to basic blocks
838 and edges. */
839 static void
840 emit_moves (void)
842 basic_block bb;
843 edge_iterator ei;
844 edge e;
845 rtx insns, tmp;
847 FOR_EACH_BB (bb)
849 if (at_bb_start[bb->index] != NULL)
851 at_bb_start[bb->index] = modify_move_list (at_bb_start[bb->index]);
852 insns = emit_move_list (at_bb_start[bb->index],
853 REG_FREQ_FROM_BB (bb));
854 tmp = BB_HEAD (bb);
855 if (LABEL_P (tmp))
856 tmp = NEXT_INSN (tmp);
857 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
858 tmp = NEXT_INSN (tmp);
859 if (tmp == BB_HEAD (bb))
860 emit_insn_before (insns, tmp);
861 else if (tmp != NULL_RTX)
862 emit_insn_after (insns, PREV_INSN (tmp));
863 else
864 emit_insn_after (insns, get_last_insn ());
867 if (at_bb_end[bb->index] != NULL)
869 at_bb_end[bb->index] = modify_move_list (at_bb_end[bb->index]);
870 insns = emit_move_list (at_bb_end[bb->index], REG_FREQ_FROM_BB (bb));
871 ira_assert (! control_flow_insn_p (BB_END (bb)));
872 emit_insn_after (insns, BB_END (bb));
875 FOR_EACH_EDGE (e, ei, bb->succs)
877 if (e->aux == NULL)
878 continue;
879 ira_assert ((e->flags & EDGE_ABNORMAL) == 0
880 || ! EDGE_CRITICAL_P (e));
881 e->aux = modify_move_list ((move_t) e->aux);
882 insert_insn_on_edge
883 (emit_move_list ((move_t) e->aux,
884 REG_FREQ_FROM_EDGE_FREQ (EDGE_FREQUENCY (e))),
886 if (e->src->next_bb != e->dest)
887 ira_additional_jumps_num++;
892 /* Update costs of A and corresponding allocnos on upper levels on the
893 loop tree from reading (if READ_P) or writing A on an execution
894 path with FREQ. */
895 static void
896 update_costs (ira_allocno_t a, bool read_p, int freq)
898 ira_loop_tree_node_t parent;
900 for (;;)
902 ALLOCNO_NREFS (a)++;
903 ALLOCNO_FREQ (a) += freq;
904 ALLOCNO_MEMORY_COST (a)
905 += (ira_memory_move_cost[ALLOCNO_MODE (a)][ALLOCNO_COVER_CLASS (a)]
906 [read_p ? 1 : 0] * freq);
907 if (ALLOCNO_CAP (a) != NULL)
908 a = ALLOCNO_CAP (a);
909 else if ((parent = ALLOCNO_LOOP_TREE_NODE (a)->parent) == NULL
910 || (a = parent->regno_allocno_map[ALLOCNO_REGNO (a)]) == NULL)
911 break;
915 /* Process moves from LIST with execution FREQ to add ranges, copies,
916 and modify costs for allocnos involved in the moves. All regnos
917 living through the list is in LIVE_THROUGH, and the loop tree node
918 used to find corresponding allocnos is NODE. */
919 static void
920 add_range_and_copies_from_move_list (move_t list, ira_loop_tree_node_t node,
921 bitmap live_through, int freq)
923 int start, n;
924 unsigned int regno;
925 move_t move;
926 ira_allocno_t a;
927 ira_copy_t cp;
928 live_range_t r;
929 bitmap_iterator bi;
930 HARD_REG_SET hard_regs_live;
932 if (list == NULL)
933 return;
934 n = 0;
935 EXECUTE_IF_SET_IN_BITMAP (live_through, FIRST_PSEUDO_REGISTER, regno, bi)
936 n++;
937 REG_SET_TO_HARD_REG_SET (hard_regs_live, live_through);
938 /* This is a trick to guarantee that new ranges is not merged with
939 the old ones. */
940 ira_max_point++;
941 start = ira_max_point;
942 for (move = list; move != NULL; move = move->next)
944 ira_allocno_t from = move->from;
945 ira_allocno_t to = move->to;
946 int nr, i;
948 bitmap_clear_bit (live_through, ALLOCNO_REGNO (from));
949 bitmap_clear_bit (live_through, ALLOCNO_REGNO (to));
951 nr = ALLOCNO_NUM_OBJECTS (to);
952 for (i = 0; i < nr; i++)
954 ira_object_t to_obj = ALLOCNO_OBJECT (to, i);
955 if (OBJECT_CONFLICT_ARRAY (to_obj) == NULL)
957 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
958 fprintf (ira_dump_file, " Allocate conflicts for a%dr%d\n",
959 ALLOCNO_NUM (to), REGNO (ALLOCNO_REG (to)));
960 ira_allocate_object_conflicts (to_obj, n);
963 ior_hard_reg_conflicts (from, &hard_regs_live);
964 ior_hard_reg_conflicts (to, &hard_regs_live);
966 update_costs (from, true, freq);
967 update_costs (to, false, freq);
968 cp = ira_add_allocno_copy (from, to, freq, false, move->insn, NULL);
969 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
970 fprintf (ira_dump_file, " Adding cp%d:a%dr%d-a%dr%d\n",
971 cp->num, ALLOCNO_NUM (cp->first),
972 REGNO (ALLOCNO_REG (cp->first)), ALLOCNO_NUM (cp->second),
973 REGNO (ALLOCNO_REG (cp->second)));
975 nr = ALLOCNO_NUM_OBJECTS (from);
976 for (i = 0; i < nr; i++)
978 ira_object_t from_obj = ALLOCNO_OBJECT (from, i);
979 r = OBJECT_LIVE_RANGES (from_obj);
980 if (r == NULL || r->finish >= 0)
982 ira_add_live_range_to_object (from_obj, start, ira_max_point);
983 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
984 fprintf (ira_dump_file,
985 " Adding range [%d..%d] to allocno a%dr%d\n",
986 start, ira_max_point, ALLOCNO_NUM (from),
987 REGNO (ALLOCNO_REG (from)));
989 else
991 r->finish = ira_max_point;
992 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
993 fprintf (ira_dump_file,
994 " Adding range [%d..%d] to allocno a%dr%d\n",
995 r->start, ira_max_point, ALLOCNO_NUM (from),
996 REGNO (ALLOCNO_REG (from)));
999 ira_max_point++;
1000 nr = ALLOCNO_NUM_OBJECTS (to);
1001 for (i = 0; i < nr; i++)
1003 ira_object_t to_obj = ALLOCNO_OBJECT (to, i);
1004 ira_add_live_range_to_object (to_obj, ira_max_point, -1);
1006 ira_max_point++;
1008 for (move = list; move != NULL; move = move->next)
1010 int nr, i;
1011 nr = ALLOCNO_NUM_OBJECTS (move->to);
1012 for (i = 0; i < nr; i++)
1014 ira_object_t to_obj = ALLOCNO_OBJECT (move->to, i);
1015 r = OBJECT_LIVE_RANGES (to_obj);
1016 if (r->finish < 0)
1018 r->finish = ira_max_point - 1;
1019 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
1020 fprintf (ira_dump_file,
1021 " Adding range [%d..%d] to allocno a%dr%d\n",
1022 r->start, r->finish, ALLOCNO_NUM (move->to),
1023 REGNO (ALLOCNO_REG (move->to)));
1027 EXECUTE_IF_SET_IN_BITMAP (live_through, FIRST_PSEUDO_REGISTER, regno, bi)
1029 ira_allocno_t to;
1030 int nr, i;
1032 a = node->regno_allocno_map[regno];
1033 if ((to = ALLOCNO_MEM_OPTIMIZED_DEST (a)) != NULL)
1034 a = to;
1035 nr = ALLOCNO_NUM_OBJECTS (a);
1036 for (i = 0; i < nr; i++)
1038 ira_object_t obj = ALLOCNO_OBJECT (a, i);
1039 ira_add_live_range_to_object (obj, start, ira_max_point - 1);
1041 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
1042 fprintf
1043 (ira_dump_file,
1044 " Adding range [%d..%d] to live through %s allocno a%dr%d\n",
1045 start, ira_max_point - 1,
1046 to != NULL ? "upper level" : "",
1047 ALLOCNO_NUM (a), REGNO (ALLOCNO_REG (a)));
1051 /* Process all move list to add ranges, conflicts, copies, and modify
1052 costs for allocnos involved in the moves. */
1053 static void
1054 add_ranges_and_copies (void)
1056 basic_block bb;
1057 edge_iterator ei;
1058 edge e;
1059 ira_loop_tree_node_t node;
1060 bitmap live_through;
1062 live_through = ira_allocate_bitmap ();
1063 FOR_EACH_BB (bb)
1065 /* It does not matter what loop_tree_node (of source or
1066 destination block) to use for searching allocnos by their
1067 regnos because of subsequent IR flattening. */
1068 node = IRA_BB_NODE (bb)->parent;
1069 bitmap_copy (live_through, DF_LR_IN (bb));
1070 add_range_and_copies_from_move_list
1071 (at_bb_start[bb->index], node, live_through, REG_FREQ_FROM_BB (bb));
1072 bitmap_copy (live_through, DF_LR_OUT (bb));
1073 add_range_and_copies_from_move_list
1074 (at_bb_end[bb->index], node, live_through, REG_FREQ_FROM_BB (bb));
1075 FOR_EACH_EDGE (e, ei, bb->succs)
1077 bitmap_and (live_through, DF_LR_IN (e->dest), DF_LR_OUT (bb));
1078 add_range_and_copies_from_move_list
1079 ((move_t) e->aux, node, live_through,
1080 REG_FREQ_FROM_EDGE_FREQ (EDGE_FREQUENCY (e)));
1083 ira_free_bitmap (live_through);
1086 /* The entry function changes code and generates shuffling allocnos on
1087 region borders for the regional (LOOPS_P is TRUE in this case)
1088 register allocation. */
1089 void
1090 ira_emit (bool loops_p)
1092 basic_block bb;
1093 rtx insn;
1094 edge_iterator ei;
1095 edge e;
1096 ira_allocno_t a;
1097 ira_allocno_iterator ai;
1099 FOR_EACH_ALLOCNO (a, ai)
1100 ALLOCNO_REG (a) = regno_reg_rtx[ALLOCNO_REGNO (a)];
1101 if (! loops_p)
1102 return;
1103 at_bb_start = (move_t *) ira_allocate (sizeof (move_t) * last_basic_block);
1104 memset (at_bb_start, 0, sizeof (move_t) * last_basic_block);
1105 at_bb_end = (move_t *) ira_allocate (sizeof (move_t) * last_basic_block);
1106 memset (at_bb_end, 0, sizeof (move_t) * last_basic_block);
1107 local_allocno_bitmap = ira_allocate_bitmap ();
1108 used_regno_bitmap = ira_allocate_bitmap ();
1109 renamed_regno_bitmap = ira_allocate_bitmap ();
1110 max_regno_before_changing = max_reg_num ();
1111 ira_traverse_loop_tree (true, ira_loop_tree_root, change_loop, NULL);
1112 set_allocno_somewhere_renamed_p ();
1113 ira_free_bitmap (used_regno_bitmap);
1114 ira_free_bitmap (renamed_regno_bitmap);
1115 ira_free_bitmap (local_allocno_bitmap);
1116 setup_entered_from_non_parent_p ();
1117 FOR_EACH_BB (bb)
1119 at_bb_start[bb->index] = NULL;
1120 at_bb_end[bb->index] = NULL;
1121 FOR_EACH_EDGE (e, ei, bb->succs)
1122 if (e->dest != EXIT_BLOCK_PTR)
1123 generate_edge_moves (e);
1125 allocno_last_set
1126 = (move_t *) ira_allocate (sizeof (move_t) * max_reg_num ());
1127 allocno_last_set_check
1128 = (int *) ira_allocate (sizeof (int) * max_reg_num ());
1129 memset (allocno_last_set_check, 0, sizeof (int) * max_reg_num ());
1130 memset (hard_regno_last_set_check, 0, sizeof (hard_regno_last_set_check));
1131 curr_tick = 0;
1132 FOR_EACH_BB (bb)
1133 unify_moves (bb, true);
1134 FOR_EACH_BB (bb)
1135 unify_moves (bb, false);
1136 move_vec = VEC_alloc (move_t, heap, ira_allocnos_num);
1137 emit_moves ();
1138 add_ranges_and_copies ();
1139 /* Clean up: */
1140 FOR_EACH_BB (bb)
1142 free_move_list (at_bb_start[bb->index]);
1143 free_move_list (at_bb_end[bb->index]);
1144 FOR_EACH_EDGE (e, ei, bb->succs)
1146 free_move_list ((move_t) e->aux);
1147 e->aux = NULL;
1150 VEC_free (move_t, heap, move_vec);
1151 ira_free (allocno_last_set_check);
1152 ira_free (allocno_last_set);
1153 commit_edge_insertions ();
1154 /* Fix insn codes. It is necessary to do it before reload because
1155 reload assumes initial insn codes defined. The insn codes can be
1156 invalidated by CFG infrastructure for example in jump
1157 redirection. */
1158 FOR_EACH_BB (bb)
1159 FOR_BB_INSNS_REVERSE (bb, insn)
1160 if (INSN_P (insn))
1161 recog_memoized (insn);
1162 ira_free (at_bb_end);
1163 ira_free (at_bb_start);