2007-10-09 Vladimir Makarov <vmakarov@redhat.com>
[official-gcc.git] / gcc / ira-build.c
blob32ca60145de0f8e8a92c827439e481f235a6d7c5
1 /* Building allocnos for IRA.
2 Copyright (C) 2006, 2007
3 Free Software Foundation, Inc.
4 Contributed by Vladimir Makarov <vmakarov@redhat.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "target.h"
30 #include "regs.h"
31 #include "flags.h"
32 #include "hard-reg-set.h"
33 #include "basic-block.h"
34 #include "insn-config.h"
35 #include "recog.h"
36 #include "toplev.h"
37 #include "params.h"
38 #include "df.h"
39 #include "output.h"
40 #include "reload.h"
41 #include "ira-int.h"
43 static void create_loop_tree_nodes (int);
44 static void finish_loop_tree_nodes (void);
45 static void add_loop_to_tree (struct loop *);
46 static void form_loop_tree (void);
48 static void initiate_calls (void);
49 static void finish_calls (void);
51 static void initiate_allocnos (void);
52 static void check_allocno_conflict_vec (allocno_t, int);
53 static void add_allocno_conflict (allocno_t, allocno_t);
54 static allocno_t create_cap_allocno (allocno_t);
55 static void finish_allocnos (void);
57 static void initiate_copies (void);
58 static void finish_copies (void);
60 static void create_insn_allocnos (rtx, int);
61 static void create_bb_allocnos (struct ira_loop_tree_node *);
62 static void create_loop_allocnos (edge);
63 static void create_loop_tree_node_allocnos (struct ira_loop_tree_node *);
64 static void create_allocnos (void);
65 static void create_loop_tree_node_caps (struct ira_loop_tree_node *);
66 #ifdef ENABLE_IRA_CHECKING
67 static void check_coalesced_allocnos (void);
68 #endif
70 /* All natural loops. */
71 struct loops ira_loops;
73 /* The root of the loop tree corresponding to the all function. */
74 struct ira_loop_tree_node *ira_loop_tree_root;
76 /* All basic block data are referred through the following array. We
77 can not use member `aux' for this because it is used for insertion
78 of insns on edges. */
79 struct ira_loop_tree_node *ira_bb_nodes;
81 /* All loop data are referred through the following array. */
82 struct ira_loop_tree_node *ira_loop_nodes;
84 /* Map regno -> allocno for the current loop tree node. */
85 allocno_t *regno_allocno_map;
87 /* Array of references to all allocnos and its size. The order number
88 of the allocno corresponds to the index in the array. */
89 allocno_t *allocnos;
90 int allocnos_num;
92 /* Array of references to copies and its size. The order number of
93 the copy corresponds to the index in the array. */
94 copy_t *copies;
95 int copies_num;
99 /* LAST_BASIC_BLOCK before generating additional insns because of live
100 range splitting. Emitting insns on a critical edge creates a new
101 basic block. */
102 static int last_basic_block_before_change;
104 /* The following function creates the loop tree nodes. If LOOPS_P is
105 zero, the nodes corresponding to the loops (except the root which
106 corresponds the all function) will be not created (it will be done
107 only for basic blocks). */
108 static void
109 create_loop_tree_nodes (int loops_p)
111 unsigned int i, j;
112 int max_regno, skip_p;
113 edge_iterator ei;
114 edge e;
115 VEC (edge, heap) *edges;
116 loop_p loop;
118 ira_bb_nodes
119 = ira_allocate (sizeof (struct ira_loop_tree_node) * last_basic_block);
120 last_basic_block_before_change = last_basic_block;
121 for (i = 0; i < (unsigned int) last_basic_block; i++)
123 ira_bb_nodes [i].regno_allocno_map = NULL;
124 memset (ira_bb_nodes [i].reg_pressure, 0,
125 sizeof (ira_bb_nodes [i].reg_pressure));
126 ira_bb_nodes [i].mentioned_allocnos = NULL;
127 ira_bb_nodes [i].modified_regnos = NULL;
128 ira_bb_nodes [i].border_allocnos = NULL;
129 ira_bb_nodes [i].local_copies = NULL;
131 ira_loop_nodes = ira_allocate (sizeof (struct ira_loop_tree_node)
132 * VEC_length (loop_p, ira_loops.larray));
133 max_regno = max_reg_num ();
134 for (i = 0; VEC_iterate (loop_p, ira_loops.larray, i, loop); i++)
136 if (loop != ira_loops.tree_root)
138 ira_loop_nodes [i].regno_allocno_map = NULL;
139 if (! loops_p)
140 continue;
141 skip_p = FALSE;
142 FOR_EACH_EDGE (e, ei, loop->header->preds)
143 if (e->src != loop->latch
144 && (e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
146 skip_p = TRUE;
147 break;
149 if (skip_p)
150 continue;
151 edges = get_loop_exit_edges (loop);
152 for (j = 0; VEC_iterate (edge, edges, j, e); j++)
153 if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
155 skip_p = TRUE;
156 break;
158 VEC_free (edge, heap, edges);
159 if (skip_p)
160 continue;
162 ira_loop_nodes [i].regno_allocno_map
163 = ira_allocate (sizeof (allocno_t) * max_regno);
164 memset (ira_loop_nodes [i].regno_allocno_map, 0,
165 sizeof (allocno_t) * max_regno);
166 memset (ira_loop_nodes [i].reg_pressure, 0,
167 sizeof (ira_loop_nodes [i].reg_pressure));
168 ira_loop_nodes [i].mentioned_allocnos = ira_allocate_bitmap ();
169 ira_loop_nodes [i].modified_regnos = ira_allocate_bitmap ();
170 ira_loop_nodes [i].border_allocnos = ira_allocate_bitmap ();
171 ira_loop_nodes [i].local_copies = ira_allocate_bitmap ();
175 /* The function frees the loop tree nodes. */
176 static void
177 finish_loop_tree_nodes (void)
179 unsigned int i;
180 loop_p loop;
182 for (i = 0; VEC_iterate (loop_p, ira_loops.larray, i, loop); i++)
183 if (ira_loop_nodes [i].regno_allocno_map != NULL)
185 ira_free_bitmap (ira_loop_nodes [i].local_copies);
186 ira_free_bitmap (ira_loop_nodes [i].border_allocnos);
187 ira_free_bitmap (ira_loop_nodes [i].modified_regnos);
188 ira_free_bitmap (ira_loop_nodes [i].mentioned_allocnos);
189 ira_free (ira_loop_nodes [i].regno_allocno_map);
191 ira_free (ira_loop_nodes);
192 for (i = 0; i < (unsigned int) last_basic_block_before_change; i++)
194 if (ira_bb_nodes [i].local_copies != NULL)
195 ira_free_bitmap (ira_bb_nodes [i].local_copies);
196 if (ira_bb_nodes [i].border_allocnos != NULL)
197 ira_free_bitmap (ira_bb_nodes [i].border_allocnos);
198 if (ira_bb_nodes [i].modified_regnos != NULL)
199 ira_free_bitmap (ira_bb_nodes [i].modified_regnos);
200 if (ira_bb_nodes [i].mentioned_allocnos != NULL)
201 ira_free_bitmap (ira_bb_nodes [i].mentioned_allocnos);
202 if (ira_bb_nodes [i].regno_allocno_map != NULL)
203 ira_free (ira_bb_nodes [i].regno_allocno_map);
205 ira_free (ira_bb_nodes);
210 /* The following recursive functions adds loop to the loop tree
211 hierarchy. The loop is added only once. */
212 static void
213 add_loop_to_tree (struct loop *loop)
215 struct loop *father;
216 struct ira_loop_tree_node *loop_node, *father_node;
218 /* Can not use loop node access macros because of potential checking
219 and because the nodes are not initialized yet. */
220 if (loop_outer (loop) != NULL)
221 add_loop_to_tree (loop_outer (loop));
222 if (ira_loop_nodes [loop->num].regno_allocno_map != NULL
223 && ira_loop_nodes [loop->num].inner == NULL)
225 /* We have not added loop node to the tree yet. */
226 loop_node = &ira_loop_nodes [loop->num];
227 loop_node->loop = loop;
228 loop_node->bb = NULL;
229 for (father = loop_outer (loop);
230 father != NULL;
231 father = loop_outer (father))
232 if (ira_loop_nodes [father->num].regno_allocno_map != NULL)
233 break;
234 if (father == NULL)
236 loop_node->next = NULL;
237 loop_node->father = NULL;
239 else
241 father_node = &ira_loop_nodes [father->num];
242 loop_node->next = father_node->inner;
243 father_node->inner = loop_node;
244 loop_node->father = father_node;
249 /* The following function creates the loop tree. The algorithm is
250 designed to provide correct order of loops (by the last loop BB)
251 and basic blocks in chain formed by next. */
252 static void
253 form_loop_tree (void)
255 unsigned int i;
256 basic_block bb;
257 struct loop *father;
258 struct ira_loop_tree_node *bb_node, *loop_node;
259 loop_p loop;
261 /* Can not use loop/bb node access macros because of potential
262 checking and the nodes are not initialized yet. */
263 for (i = 0; VEC_iterate (loop_p, ira_loops.larray, i, loop); i++)
264 if (ira_loop_nodes [i].regno_allocno_map != NULL)
265 ira_loop_nodes [i].inner = NULL;
266 FOR_EACH_BB_REVERSE (bb)
268 bb_node = &ira_bb_nodes [bb->index];
269 bb_node->bb = bb;
270 bb_node->loop = NULL;
271 bb_node->inner = NULL;
272 bb_node->next = NULL;
273 for (father = bb->loop_father;
274 father != NULL;
275 father = loop_outer (father))
276 if (ira_loop_nodes [father->num].regno_allocno_map != NULL)
277 break;
278 add_loop_to_tree (father);
279 loop_node = &ira_loop_nodes [father->num];
280 bb_node->next = loop_node->inner;
281 bb_node->father = loop_node;
282 loop_node->inner = bb_node;
284 ira_loop_tree_root = IRA_LOOP_NODE_BY_INDEX (ira_loops.tree_root->num);
285 ira_assert (ira_loop_tree_root->regno_allocno_map != NULL);
290 /* Array of vectors containing calls intersected by pseudo-registers. */
291 VEC(rtx, heap) **regno_calls;
293 /* The length of the previous array. */
294 static int regno_calls_num;
296 /* The function initializes array of vectors containing calls
297 intersected by pseudo-registers. */
298 static void
299 initiate_calls (void)
301 regno_calls_num = max_reg_num ();
302 regno_calls = ira_allocate (regno_calls_num * sizeof (VEC(rtx, heap) *));
303 memset (regno_calls, 0, regno_calls_num * sizeof (VEC(rtx, heap) *));
306 /* The function adds CALL to REGNO's vector of intersected calls. */
308 add_regno_call (int regno, rtx call)
310 int result;
312 gcc_assert (regno < regno_calls_num);
313 if (regno_calls [regno] == NULL)
314 regno_calls [regno] = VEC_alloc (rtx, heap, 10);
315 result = VEC_length (rtx, regno_calls [regno]);
316 VEC_safe_push (rtx, heap, regno_calls [regno], call);
317 return result;
320 /* The function frees array of vectors containing calls
321 intersected by pseudo-registers. */
322 static void
323 finish_calls (void)
325 int i;
327 for (i = 0; i < regno_calls_num; i++)
328 VEC_free (rtx, heap, regno_calls [i]);
329 ira_free (regno_calls);
334 /* Varray containing references to all created allocnos. It is a
335 container of array allocnos. */
336 static varray_type allocno_varray;
338 /* The function initializes data concerning allocnos. */
339 static void
340 initiate_allocnos (void)
342 VARRAY_GENERIC_PTR_NOGC_INIT
343 (allocno_varray, max_reg_num () * 2, "allocnos");
344 allocnos = NULL;
345 allocnos_num = 0;
346 regno_allocno_map = ira_allocate (max_reg_num () * sizeof (allocno_t));
347 memset (regno_allocno_map, 0, max_reg_num () * sizeof (allocno_t));
350 /* The function creates and returns allocno corresponding to REGNO in
351 LOOP_TREE_NODE. Add the allocno to the list of allocnos with the
352 same regno if ! CAP_P. */
353 allocno_t
354 create_allocno (int regno, int cap_p, struct ira_loop_tree_node *loop_tree_node)
356 allocno_t a;
358 a = ira_allocate (sizeof (struct allocno));
359 ALLOCNO_REGNO (a) = regno;
360 ALLOCNO_LOOP_TREE_NODE (a) = loop_tree_node;
361 if (! cap_p)
363 ALLOCNO_NEXT_REGNO_ALLOCNO (a) = regno_allocno_map [regno];
364 regno_allocno_map [regno] = a;
365 if (loop_tree_node->regno_allocno_map [regno] == NULL)
366 /* Remember that we can create temporary allocnos to break
367 cycles in register shuffle. */
368 loop_tree_node->regno_allocno_map [regno] = a;
370 ALLOCNO_CAP (a) = NULL;
371 ALLOCNO_CAP_MEMBER (a) = NULL;
372 ALLOCNO_NUM (a) = allocnos_num;
373 ALLOCNO_CONFLICT_ALLOCNO_VEC (a) = NULL;
374 ALLOCNO_CONFLICT_ALLOCNO_VEC_ACTIVE_SIZE (a) = 0;
375 CLEAR_HARD_REG_SET (ALLOCNO_CONFLICT_HARD_REGS (a));
376 ALLOCNO_FREQ (a) = 1;
377 ALLOCNO_HARD_REGNO (a) = -1;
378 ALLOCNO_CALL_FREQ (a) = 0;
379 ALLOCNO_CALLS_CROSSED_NUM (a) = 0;
380 ALLOCNO_CALLS_CROSSED_START (a) = -1;
381 #ifdef STACK_REGS
382 ALLOCNO_NO_STACK_REG_P (a) = FALSE;
383 #endif
384 ALLOCNO_IN_GRAPH_P (a) = FALSE;
385 ALLOCNO_ASSIGNED_P (a) = FALSE;
386 ALLOCNO_MAY_BE_SPILLED_P (a) = FALSE;
387 ALLOCNO_MODE (a) = (regno < 0 ? VOIDmode : PSEUDO_REGNO_MODE (regno));
388 ALLOCNO_COPIES (a) = NULL;
389 ALLOCNO_HARD_REG_COSTS (a) = NULL;
390 ALLOCNO_CONFLICT_HARD_REG_COSTS (a) = NULL;
391 ALLOCNO_CURR_HARD_REG_COSTS (a) = NULL;
392 ALLOCNO_CURR_CONFLICT_HARD_REG_COSTS (a) = NULL;
393 ALLOCNO_LEFT_CONFLICTS_NUM (a) = -1;
394 ALLOCNO_COVER_CLASS (a) = NO_REGS;
395 ALLOCNO_BEST_CLASS (a) = NO_REGS;
396 ALLOCNO_COVER_CLASS_COST (a) = 0;
397 ALLOCNO_MEMORY_COST (a) = 0;
398 ALLOCNO_NEXT_BUCKET_ALLOCNO (a) = NULL;
399 ALLOCNO_PREV_BUCKET_ALLOCNO (a) = NULL;
400 ALLOCNO_FIRST_COALESCED_ALLOCNO (a) = a;
401 ALLOCNO_NEXT_COALESCED_ALLOCNO (a) = a;
402 VARRAY_PUSH_GENERIC_PTR (allocno_varray, a);
403 allocnos = (allocno_t *) &VARRAY_GENERIC_PTR (allocno_varray, 0);
404 allocnos_num = VARRAY_ACTIVE_SIZE (allocno_varray);
405 return a;
408 /* The function allocates conflict vector of A for NUM allocnos. */
409 void
410 allocate_allocno_conflicts (allocno_t a, int num)
412 ira_assert (ALLOCNO_CONFLICT_ALLOCNO_VEC (a) == NULL);
413 ALLOCNO_CONFLICT_ALLOCNO_VEC (a)
414 = ira_allocate (sizeof (allocno_t) * (num + 1));
415 ALLOCNO_CONFLICT_ALLOCNO_VEC (a) [0] = NULL;
416 ALLOCNO_CONFLICT_ALLOCNO_VEC_ACTIVE_SIZE (a) = 0;
417 ALLOCNO_CONFLICT_ALLOCNO_VEC_SIZE (a) = num;
420 /* The function checks that conflict vector of A has enough space to
421 contain NUM allocno references. If the space is not enough, the
422 function expands the conflict vector. */
423 static void
424 check_allocno_conflict_vec (allocno_t a, int num)
426 int size;
427 allocno_t *vec;
429 ira_assert (ALLOCNO_CONFLICT_ALLOCNO_VEC (a) != NULL);
430 if (ALLOCNO_CONFLICT_ALLOCNO_VEC_SIZE (a) >= num)
431 return;
432 size = 3 * num / 2 + 1;
433 vec = ira_allocate (sizeof (allocno_t) * (size + 1));
434 memcpy (vec, ALLOCNO_CONFLICT_ALLOCNO_VEC (a),
435 sizeof (allocno_t)
436 * (ALLOCNO_CONFLICT_ALLOCNO_VEC_ACTIVE_SIZE (a) + 1));
437 ira_free (ALLOCNO_CONFLICT_ALLOCNO_VEC (a));
438 ALLOCNO_CONFLICT_ALLOCNO_VEC (a) = vec;
439 ALLOCNO_CONFLICT_ALLOCNO_VEC_SIZE (a) = size;
442 /* The function adds A1 to conflict vector of A2 and vise versa. */
443 static void
444 add_allocno_conflict (allocno_t a1, allocno_t a2)
446 int size1, size2;
447 allocno_t *vec1, *vec2;
449 size1 = ALLOCNO_CONFLICT_ALLOCNO_VEC_ACTIVE_SIZE (a1);
450 size2 = ALLOCNO_CONFLICT_ALLOCNO_VEC_ACTIVE_SIZE (a2);
451 check_allocno_conflict_vec (a1, size1 + 1);
452 check_allocno_conflict_vec (a2, size2 + 1);
453 vec1 = ALLOCNO_CONFLICT_ALLOCNO_VEC (a1);
454 vec2 = ALLOCNO_CONFLICT_ALLOCNO_VEC (a2);
455 vec1 [size1] = a2;
456 vec2 [size2] = a1;
457 vec1 [size1 + 1] = NULL;
458 vec2 [size2 + 1] = NULL;
459 ALLOCNO_CONFLICT_ALLOCNO_VEC_ACTIVE_SIZE (a1)++;
460 ALLOCNO_CONFLICT_ALLOCNO_VEC_ACTIVE_SIZE (a2)++;
463 /* This recursive function outputs allocno A and if it is cap the
464 function outputs members. */
465 void
466 print_expanded_allocno (allocno_t a)
468 basic_block bb;
470 fprintf (ira_dump_file, " a%d(r%d", ALLOCNO_NUM (a), ALLOCNO_REGNO (a));
471 if ((bb = ALLOCNO_LOOP_TREE_NODE (a)->bb) != NULL)
472 fprintf (ira_dump_file, ",b%d", bb->index);
473 else
474 fprintf (ira_dump_file, ",l%d", ALLOCNO_LOOP_TREE_NODE (a)->loop->num);
475 if (ALLOCNO_CAP_MEMBER (a) != NULL)
477 fprintf (ira_dump_file, ":");
478 print_expanded_allocno (ALLOCNO_CAP_MEMBER (a));
480 fprintf (ira_dump_file, ")");
483 /* The function creates and returns cap representing allocno A in the
484 father loop. */
485 static allocno_t
486 create_cap_allocno (allocno_t a)
488 int i, regno, hard_regs_num, conflicts_num;
489 int *reg_costs, *conflict_reg_costs;
490 basic_block bb;
491 allocno_t cap, conflict_allocno, conflict_father_allocno;
492 allocno_t *allocno_vec;
493 struct ira_loop_tree_node *father;
495 ira_assert (ALLOCNO_FIRST_COALESCED_ALLOCNO (a) == a
496 && ALLOCNO_NEXT_COALESCED_ALLOCNO (a) == a);
497 father = ALLOCNO_LOOP_TREE_NODE (a)->father;
498 cap = create_allocno (ALLOCNO_REGNO (a), TRUE, father);
499 /* We just need to set a mode giving the same size. */
500 ALLOCNO_MODE (cap) = ALLOCNO_MODE (a);
501 ALLOCNO_COVER_CLASS (cap) = ALLOCNO_COVER_CLASS (a);
502 ALLOCNO_BEST_CLASS (cap) = ALLOCNO_BEST_CLASS (a);
503 ALLOCNO_AVAILABLE_REGS_NUM (cap) = ALLOCNO_AVAILABLE_REGS_NUM (a);
504 ALLOCNO_CAP_MEMBER (cap) = a;
505 hard_regs_num = class_hard_regs_num [ALLOCNO_COVER_CLASS (a)];
506 ALLOCNO_HARD_REG_COSTS (cap) = reg_costs
507 = ira_allocate (hard_regs_num * sizeof (int));
508 memcpy (reg_costs, ALLOCNO_HARD_REG_COSTS (a), hard_regs_num * sizeof (int));
509 ALLOCNO_CONFLICT_HARD_REG_COSTS (cap) = conflict_reg_costs
510 = ira_allocate (hard_regs_num * sizeof (int));
511 memcpy (conflict_reg_costs, ALLOCNO_CONFLICT_HARD_REG_COSTS (a),
512 hard_regs_num * sizeof (int));
513 ALLOCNO_CURR_HARD_REG_COSTS (cap)
514 = ira_allocate (hard_regs_num * sizeof (int));
515 ALLOCNO_CURR_CONFLICT_HARD_REG_COSTS (cap)
516 = ira_allocate (hard_regs_num * sizeof (int));
517 /* ??? costs, call_p etc. */
518 bitmap_set_bit (father->mentioned_allocnos, ALLOCNO_NUM (cap));
519 ALLOCNO_CAP (a) = cap;
520 ALLOCNO_AVAILABLE_REGS_NUM (cap) = ALLOCNO_AVAILABLE_REGS_NUM (a);
521 ALLOCNO_COVER_CLASS_COST (cap) = ALLOCNO_COVER_CLASS_COST (a);
522 ALLOCNO_MEMORY_COST (cap) = ALLOCNO_MEMORY_COST (a);
523 ALLOCNO_FREQ (cap) = ALLOCNO_FREQ (a);
524 ALLOCNO_CALL_FREQ (cap) = ALLOCNO_CALL_FREQ (a);
525 IOR_HARD_REG_SET (ALLOCNO_CONFLICT_HARD_REGS (cap),
526 ALLOCNO_CONFLICT_HARD_REGS (a));
527 ALLOCNO_CALLS_CROSSED_NUM (cap) = ALLOCNO_CALLS_CROSSED_NUM (a);
528 ALLOCNO_CALLS_CROSSED_START (cap) = ALLOCNO_CALLS_CROSSED_START (a);
529 #ifdef STACK_REGS
530 ALLOCNO_NO_STACK_REG_P (cap) = ALLOCNO_NO_STACK_REG_P (a);
531 #endif
532 allocno_vec = ALLOCNO_CONFLICT_ALLOCNO_VEC (a);
533 for (conflicts_num = i = 0;
534 (conflict_allocno = allocno_vec [i]) != NULL;
535 i++)
536 conflicts_num++;
537 allocate_allocno_conflicts (cap, conflicts_num);
538 for (conflicts_num = i = 0;
539 (conflict_allocno = allocno_vec [i]) != NULL;
540 i++)
542 regno = ALLOCNO_REGNO (conflict_allocno);
543 conflict_father_allocno = father->regno_allocno_map [regno];
544 if (conflict_father_allocno == NULL)
545 conflict_father_allocno = ALLOCNO_CAP (conflict_allocno);
546 if (conflict_father_allocno != NULL)
547 add_allocno_conflict (cap, conflict_father_allocno);
549 if (ira_dump_file != NULL)
551 fprintf (ira_dump_file, " Creating cap ");
552 print_expanded_allocno (cap);
553 fprintf (ira_dump_file, "\n Conflicts:");
554 allocno_vec = ALLOCNO_CONFLICT_ALLOCNO_VEC (cap);
555 for (i = 0; (conflict_allocno = allocno_vec [i]) != NULL; i++)
557 fprintf (ira_dump_file, " a%d(r%d,", ALLOCNO_NUM (conflict_allocno),
558 ALLOCNO_REGNO (conflict_allocno));
559 if ((bb = ALLOCNO_LOOP_TREE_NODE (conflict_allocno)->bb) != NULL)
560 fprintf (ira_dump_file, "b%d)", bb->index);
561 else
562 fprintf (ira_dump_file, "l%d)",
563 ALLOCNO_LOOP_TREE_NODE (conflict_allocno)->loop->num);
565 fprintf (ira_dump_file, "\n");
567 return cap;
570 /* The function frees memory allocated for all allocnos. */
571 static void
572 finish_allocnos (void)
574 int i;
575 allocno_t a;
577 for (i = 0; i < allocnos_num; i++)
579 a = allocnos [i];
580 if (ALLOCNO_CONFLICT_ALLOCNO_VEC (a) != NULL)
581 ira_free (ALLOCNO_CONFLICT_ALLOCNO_VEC (a));
582 if (ALLOCNO_HARD_REG_COSTS (a) != NULL)
583 ira_free (ALLOCNO_HARD_REG_COSTS (a));
584 if (ALLOCNO_CONFLICT_HARD_REG_COSTS (a) != NULL)
585 ira_free (ALLOCNO_CONFLICT_HARD_REG_COSTS (a));
586 if (ALLOCNO_CURR_HARD_REG_COSTS (a) != NULL)
587 ira_free (ALLOCNO_CURR_HARD_REG_COSTS (a));
588 if (ALLOCNO_CURR_CONFLICT_HARD_REG_COSTS (a) != NULL)
589 ira_free (ALLOCNO_CURR_CONFLICT_HARD_REG_COSTS (a));
590 ira_free (a);
592 ira_free (regno_allocno_map);
593 VARRAY_FREE (allocno_varray);
598 /* Varray containing references to all created copies. It is a
599 container of array copies. */
600 static varray_type copy_varray;
602 /* The function initializes data concerning allocno copies. */
603 static void
604 initiate_copies (void)
606 VARRAY_GENERIC_PTR_NOGC_INIT (copy_varray, get_max_uid (), "copies");
607 copies = NULL;
608 copies_num = 0;
611 /* The function creates and returns copy of allocnos FIRST and SECOND
612 with frequency FREQ and move insn MOVE_INSN. */
613 copy_t
614 create_copy (allocno_t first, allocno_t second, int freq, rtx move_insn)
616 copy_t cp;
618 cp = ira_allocate (sizeof (struct allocno_copy));
619 cp->num = copies_num;
620 cp->first = first;
621 cp->second = second;
622 cp->freq = freq;
623 cp->move_insn = move_insn;
624 VARRAY_PUSH_GENERIC_PTR (copy_varray, cp);
625 copies = (copy_t *) &VARRAY_GENERIC_PTR (copy_varray, 0);
626 copies_num = VARRAY_ACTIVE_SIZE (copy_varray);
627 return cp;
630 /* The function frees memory allocated for all copies. */
631 static void
632 finish_copies (void)
634 int i;
636 for (i = 0; i < copies_num; i++)
637 ira_free (copies [i]);
638 VARRAY_FREE (copy_varray);
643 /* The current loop tree node. */
644 struct ira_loop_tree_node *ira_curr_loop_tree_node;
646 /* The recursive function traverses loop tree node with root LOOP_NODE
647 calling non-null functions PREORDER_FUNC and POSTORDER_FUNC
648 correspondingly in preorder and postorder. The function sets up
649 IRA_CURR_LOOP_TREE_NODE. */
650 void
651 traverse_loop_tree (struct ira_loop_tree_node *loop_node,
652 void (*preorder_func) (struct ira_loop_tree_node *),
653 void (*postorder_func) (struct ira_loop_tree_node *))
655 struct ira_loop_tree_node *subloop_node;
657 if (loop_node->bb == NULL)
658 ira_curr_loop_tree_node = loop_node;
659 if (preorder_func != NULL)
660 (*preorder_func) (loop_node);
662 for (subloop_node = loop_node->inner;
663 subloop_node != NULL;
664 subloop_node = subloop_node->next)
666 traverse_loop_tree (subloop_node, preorder_func, postorder_func);
667 ira_curr_loop_tree_node = loop_node;
670 if (postorder_func != NULL)
671 (*postorder_func) (loop_node);
676 /* The current basic block. */
677 static basic_block curr_bb;
679 /* This recursive function creates allocnos corresponding to
680 pseudo-registers containing in X. Nonzero OUTPUT_P means that X is
681 lvalue. */
682 static void
683 create_insn_allocnos (rtx x, int output_p)
685 int i, j;
686 const char *fmt;
687 enum rtx_code code = GET_CODE (x);
689 if (code == REG)
691 int regno;
693 if ((regno = REGNO (x)) >= FIRST_PSEUDO_REGISTER)
695 allocno_t a;
697 if ((a = ira_curr_loop_tree_node->regno_allocno_map [regno]) == NULL)
698 a = create_allocno (regno, FALSE, ira_curr_loop_tree_node);
700 ALLOCNO_FREQ (a) += REG_FREQ_FROM_BB (curr_bb);
701 bitmap_set_bit (ira_curr_loop_tree_node->mentioned_allocnos,
702 ALLOCNO_NUM (a));
703 if (output_p)
704 bitmap_set_bit (ira_curr_loop_tree_node->modified_regnos, regno);
706 return;
708 else if (code == SET)
710 create_insn_allocnos (SET_DEST (x), TRUE);
711 create_insn_allocnos (SET_SRC (x), FALSE);
712 return;
714 else if (code == CLOBBER)
716 create_insn_allocnos (XEXP (x, 0), TRUE);
717 return;
719 else if (code == MEM)
721 create_insn_allocnos (XEXP (x, 0), FALSE);
722 return;
724 else if (code == PRE_DEC || code == POST_DEC || code == PRE_INC ||
725 code == POST_INC || code == POST_MODIFY || code == PRE_MODIFY)
727 create_insn_allocnos (XEXP (x, 0), TRUE);
728 create_insn_allocnos (XEXP (x, 0), FALSE);
729 return;
732 fmt = GET_RTX_FORMAT (code);
733 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
735 if (fmt[i] == 'e')
736 create_insn_allocnos (XEXP (x, i), output_p);
737 else if (fmt[i] == 'E')
738 for (j = 0; j < XVECLEN (x, i); j++)
739 create_insn_allocnos (XVECEXP (x, i, j), output_p);
743 /* The function creates allocnos corresponding to pseudo-registers
744 living in basic blocks represented by the corresponding loop tree
745 node BB_NODE. */
746 static void
747 create_bb_allocnos (struct ira_loop_tree_node *bb_node)
749 basic_block bb;
750 rtx insn;
751 unsigned int i;
752 bitmap_iterator bi;
753 allocno_t *map;
755 curr_bb = bb = bb_node->bb;
756 ira_assert (bb != NULL);
757 FOR_BB_INSNS (bb, insn)
758 if (INSN_P (insn))
759 create_insn_allocnos (PATTERN (insn), FALSE);
760 /* It might be a allocno living through from one subloop to
761 another. */
762 map = ira_curr_loop_tree_node->regno_allocno_map;
763 EXECUTE_IF_SET_IN_REG_SET (DF_LR_IN (bb), FIRST_PSEUDO_REGISTER, i, bi)
764 if (map [i] == NULL)
765 create_allocno (i, FALSE, ira_curr_loop_tree_node);
768 /* The function creates allocnos corresponding to pseudo-registers
769 living on edge E (a loop enter or exit). It also finds allocnos
770 living on the loop border. */
771 static void
772 create_loop_allocnos (edge e)
774 unsigned int i;
775 bitmap live_in_regs, border_allocnos;
776 bitmap_iterator bi;
777 allocno_t *map;
779 live_in_regs = DF_LR_IN (e->dest);
780 map = ira_curr_loop_tree_node->regno_allocno_map;
781 border_allocnos = ira_curr_loop_tree_node->border_allocnos;
782 EXECUTE_IF_SET_IN_REG_SET (DF_LR_OUT (e->src),
783 FIRST_PSEUDO_REGISTER, i, bi)
784 if (bitmap_bit_p (live_in_regs, i))
786 if (map [i] == NULL)
787 create_allocno (i, FALSE, ira_curr_loop_tree_node);
788 bitmap_set_bit (border_allocnos, ALLOCNO_NUM (map [i]));
792 /* The function creates allocnos corresponding to pseudo-registers
793 living in loop represented by the corresponding loop tree node
794 LOOP_NODE. */
795 static void
796 create_loop_tree_node_allocnos (struct ira_loop_tree_node *loop_node)
798 if (loop_node->bb != NULL)
799 create_bb_allocnos (loop_node);
800 else if (loop_node != ira_loop_tree_root)
802 int i;
803 edge_iterator ei;
804 edge e;
805 VEC (edge, heap) *edges;
807 FOR_EACH_EDGE (e, ei, loop_node->loop->header->preds)
808 if (e->src != loop_node->loop->latch)
809 create_loop_allocnos (e);
811 edges = get_loop_exit_edges (loop_node->loop);
812 for (i = 0; VEC_iterate (edge, edges, i, e); i++)
813 create_loop_allocnos (e);
814 VEC_free (edge, heap, edges);
818 /* The function creates allocnos corresponding to pseudo-registers in
819 the current function. The function traverses the loop tree for
820 this. */
821 static void
822 create_allocnos (void)
824 int i;
825 allocno_t a, father_a;
826 struct ira_loop_tree_node *father;
828 traverse_loop_tree (ira_loop_tree_root, create_loop_tree_node_allocnos, NULL);
829 if (flag_ira_algorithm != IRA_ALGORITHM_REGIONAL
830 && flag_ira_algorithm != IRA_ALGORITHM_MIXED)
831 return;
832 /* Propagate frequencies for regional register allocator. */
833 for (i = max_reg_num () - 1; i >= FIRST_PSEUDO_REGISTER; i--)
834 for (a = regno_allocno_map [i];
835 a != NULL;
836 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
837 if ((father = ALLOCNO_LOOP_TREE_NODE (a)->father) != NULL
838 && (father_a = father->regno_allocno_map [i]) != NULL)
839 ALLOCNO_FREQ (father_a) += ALLOCNO_FREQ (a);
844 /* Bitmap of allocnos living only inside the current loop. */
845 static bitmap local_allocnos_bitmap;
847 /* The function creates caps representing allocnos living only inside
848 the loop given by LOOP_NODE on higher loop level. */
849 static void
850 create_loop_tree_node_caps (struct ira_loop_tree_node *loop_node)
852 unsigned int i;
853 bitmap_iterator bi;
854 allocno_t a, cap, another_a, father_a;
855 copy_t cp, next_cp;
856 struct ira_loop_tree_node *father;
858 if (loop_node->bb != NULL || loop_node == ira_loop_tree_root)
859 return;
860 bitmap_and_compl (local_allocnos_bitmap, loop_node->mentioned_allocnos,
861 loop_node->border_allocnos);
862 father = loop_node->father;
863 EXECUTE_IF_SET_IN_BITMAP (local_allocnos_bitmap, 0, i, bi)
864 if (father->regno_allocno_map [ALLOCNO_REGNO (allocnos [i])] == NULL)
865 create_cap_allocno (allocnos [i]);
866 EXECUTE_IF_SET_IN_BITMAP (local_allocnos_bitmap, 0, i, bi)
868 a = allocnos [i];
869 cap = ALLOCNO_CAP (a);
870 if (cap == NULL)
871 continue;
872 for (cp = ALLOCNO_COPIES (a); cp != NULL; cp = next_cp)
874 if (cp->first == a)
876 next_cp = cp->next_first_allocno_copy;
877 another_a = cp->second;
879 else if (cp->second == a)
881 next_cp = cp->next_second_allocno_copy;
882 another_a = cp->first;
884 else
885 gcc_unreachable ();
886 father_a = father->regno_allocno_map [ALLOCNO_REGNO (another_a)];
887 if (father_a != NULL)
888 /* Upper level allocno might be not existing because it is
889 not mentioned or lived on the border. It is just
890 living on bb start of the loop. */
891 add_allocno_copy (cap, father_a, cp->freq, cp->move_insn);
898 #ifdef ENABLE_IRA_CHECKING
899 /* The function checks that there are no coalesced allocnos. */
900 static void
901 check_coalesced_allocnos (void)
903 int i;
904 allocno_t a;
906 for (i = 0; i < allocnos_num; i++)
908 a = allocnos [i];
909 ira_assert (ALLOCNO_FIRST_COALESCED_ALLOCNO (a) == a
910 && ALLOCNO_NEXT_COALESCED_ALLOCNO (a) == a);
913 #endif
915 /* This entry function creates internal representation for IRA
916 (allocnos, copies, loop tree nodes). If LOOPS_P is zero the nodes
917 corresponding to the loops (except the root which corresponds the
918 all function) and correspondingly allocnos for the loops will be not
919 created (it will be done only for basic blocks). Such value is
920 used for Chaitin-Briggs and Chow's priority coloring. The function
921 returns nonzero if we generates loop structure (besides node
922 representing all function) for regional allocation. */
924 ira_build (int loops_p)
926 unsigned int i;
927 loop_p loop;
929 df_analyze ();
931 CLEAR_HARD_REG_SET (cfun->emit->call_used_regs);
932 initiate_calls ();
933 initiate_allocnos ();
934 initiate_copies ();
935 ira_assert (current_loops == NULL);
936 flow_loops_find (&ira_loops);
937 current_loops = &ira_loops;
938 create_loop_tree_nodes (loops_p);
939 form_loop_tree ();
940 create_allocnos ();
941 ira_costs ();
942 ira_build_conflicts ();
943 tune_allocno_costs_and_cover_classes ();
944 if (flag_ira_algorithm == IRA_ALGORITHM_REGIONAL
945 || flag_ira_algorithm == IRA_ALGORITHM_MIXED)
947 local_allocnos_bitmap = ira_allocate_bitmap ();
948 traverse_loop_tree (ira_loop_tree_root, NULL,
949 create_loop_tree_node_caps);
950 ira_free_bitmap (local_allocnos_bitmap);
951 for (i = 0; VEC_iterate (loop_p, ira_loops.larray, i, loop); i++)
952 if (ira_loop_nodes [i].regno_allocno_map != NULL
953 && ira_loop_tree_root != &ira_loop_nodes [i])
954 return TRUE;
956 return FALSE;
959 /* The function releases data created by function ira_build. */
960 void
961 ira_destroy (void)
963 basic_block bb;
965 finish_loop_tree_nodes ();
966 ira_assert (current_loops == &ira_loops);
967 flow_loops_free (&ira_loops);
968 free_dominance_info (CDI_DOMINATORS);
969 FOR_ALL_BB (bb)
970 bb->loop_father = NULL;
971 current_loops = NULL;
972 finish_copies ();
973 finish_allocnos ();
974 finish_calls ();