PR target/37878
[official-gcc.git] / gcc / ira-build.c
blob8e92f8c1b3ef6118c2acf81649c47389aef6dbf5
1 /* Building internal representation for IRA.
2 Copyright (C) 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Vladimir Makarov <vmakarov@redhat.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "target.h"
29 #include "regs.h"
30 #include "flags.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
33 #include "insn-config.h"
34 #include "recog.h"
35 #include "toplev.h"
36 #include "params.h"
37 #include "df.h"
38 #include "output.h"
39 #include "reload.h"
40 #include "sparseset.h"
41 #include "ira-int.h"
43 static ira_copy_t find_allocno_copy (ira_allocno_t, ira_allocno_t, rtx,
44 ira_loop_tree_node_t);
46 /* The root of the loop tree corresponding to the all function. */
47 ira_loop_tree_node_t ira_loop_tree_root;
49 /* Height of the loop tree. */
50 int ira_loop_tree_height;
52 /* All nodes representing basic blocks are referred through the
53 following array. We can not use basic block member `aux' for this
54 because it is used for insertion of insns on edges. */
55 ira_loop_tree_node_t ira_bb_nodes;
57 /* All nodes representing loops are referred through the following
58 array. */
59 ira_loop_tree_node_t ira_loop_nodes;
61 /* Map regno -> allocnos with given regno (see comments for
62 allocno member `next_regno_allocno'). */
63 ira_allocno_t *ira_regno_allocno_map;
65 /* Array of references to all allocnos. The order number of the
66 allocno corresponds to the index in the array. Removed allocnos
67 have NULL element value. */
68 ira_allocno_t *ira_allocnos;
70 /* Sizes of the previous array. */
71 int ira_allocnos_num;
73 /* Map conflict id -> allocno with given conflict id (see comments for
74 allocno member `conflict_id'). */
75 ira_allocno_t *ira_conflict_id_allocno_map;
77 /* Array of references to all copies. The order number of the copy
78 corresponds to the index in the array. Removed copies have NULL
79 element value. */
80 ira_copy_t *ira_copies;
82 /* Size of the previous array. */
83 int ira_copies_num;
87 /* LAST_BASIC_BLOCK before generating additional insns because of live
88 range splitting. Emitting insns on a critical edge creates a new
89 basic block. */
90 static int last_basic_block_before_change;
92 /* The following function allocates the loop tree nodes. If LOOPS_P
93 is FALSE, the nodes corresponding to the loops (except the root
94 which corresponds the all function) will be not allocated but nodes
95 will still be allocated for basic blocks. */
96 static void
97 create_loop_tree_nodes (bool loops_p)
99 unsigned int i, j;
100 int max_regno;
101 bool skip_p;
102 edge_iterator ei;
103 edge e;
104 VEC (edge, heap) *edges;
105 loop_p loop;
107 ira_bb_nodes
108 = ((struct ira_loop_tree_node *)
109 ira_allocate (sizeof (struct ira_loop_tree_node) * last_basic_block));
110 last_basic_block_before_change = last_basic_block;
111 for (i = 0; i < (unsigned int) last_basic_block; i++)
113 ira_bb_nodes[i].regno_allocno_map = NULL;
114 memset (ira_bb_nodes[i].reg_pressure, 0,
115 sizeof (ira_bb_nodes[i].reg_pressure));
116 ira_bb_nodes[i].all_allocnos = NULL;
117 ira_bb_nodes[i].modified_regnos = NULL;
118 ira_bb_nodes[i].border_allocnos = NULL;
119 ira_bb_nodes[i].local_copies = NULL;
121 ira_loop_nodes = ((struct ira_loop_tree_node *)
122 ira_allocate (sizeof (struct ira_loop_tree_node)
123 * VEC_length (loop_p, ira_loops.larray)));
124 max_regno = max_reg_num ();
125 for (i = 0; VEC_iterate (loop_p, ira_loops.larray, i, loop); i++)
127 if (loop != ira_loops.tree_root)
129 ira_loop_nodes[i].regno_allocno_map = NULL;
130 if (! loops_p)
131 continue;
132 skip_p = false;
133 FOR_EACH_EDGE (e, ei, loop->header->preds)
134 if (e->src != loop->latch
135 && (e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
137 skip_p = true;
138 break;
140 if (skip_p)
141 continue;
142 edges = get_loop_exit_edges (loop);
143 for (j = 0; VEC_iterate (edge, edges, j, e); j++)
144 if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
146 skip_p = true;
147 break;
149 VEC_free (edge, heap, edges);
150 if (skip_p)
151 continue;
153 ira_loop_nodes[i].regno_allocno_map
154 = (ira_allocno_t *) ira_allocate (sizeof (ira_allocno_t) * max_regno);
155 memset (ira_loop_nodes[i].regno_allocno_map, 0,
156 sizeof (ira_allocno_t) * max_regno);
157 memset (ira_loop_nodes[i].reg_pressure, 0,
158 sizeof (ira_loop_nodes[i].reg_pressure));
159 ira_loop_nodes[i].all_allocnos = ira_allocate_bitmap ();
160 ira_loop_nodes[i].modified_regnos = ira_allocate_bitmap ();
161 ira_loop_nodes[i].border_allocnos = ira_allocate_bitmap ();
162 ira_loop_nodes[i].local_copies = ira_allocate_bitmap ();
166 /* The function returns TRUE if there are more one allocation
167 region. */
168 static bool
169 more_one_region_p (void)
171 unsigned int i;
172 loop_p loop;
174 for (i = 0; VEC_iterate (loop_p, ira_loops.larray, i, loop); i++)
175 if (ira_loop_nodes[i].regno_allocno_map != NULL
176 && ira_loop_tree_root != &ira_loop_nodes[i])
177 return true;
178 return false;
181 /* Free the loop tree node of a loop. */
182 static void
183 finish_loop_tree_node (ira_loop_tree_node_t loop)
185 if (loop->regno_allocno_map != NULL)
187 ira_assert (loop->bb == NULL);
188 ira_free_bitmap (loop->local_copies);
189 ira_free_bitmap (loop->border_allocnos);
190 ira_free_bitmap (loop->modified_regnos);
191 ira_free_bitmap (loop->all_allocnos);
192 ira_free (loop->regno_allocno_map);
193 loop->regno_allocno_map = NULL;
197 /* Free the loop tree nodes. */
198 static void
199 finish_loop_tree_nodes (void)
201 unsigned int i;
202 loop_p loop;
204 for (i = 0; VEC_iterate (loop_p, ira_loops.larray, i, loop); i++)
205 finish_loop_tree_node (&ira_loop_nodes[i]);
206 ira_free (ira_loop_nodes);
207 for (i = 0; i < (unsigned int) last_basic_block_before_change; i++)
209 if (ira_bb_nodes[i].local_copies != NULL)
210 ira_free_bitmap (ira_bb_nodes[i].local_copies);
211 if (ira_bb_nodes[i].border_allocnos != NULL)
212 ira_free_bitmap (ira_bb_nodes[i].border_allocnos);
213 if (ira_bb_nodes[i].modified_regnos != NULL)
214 ira_free_bitmap (ira_bb_nodes[i].modified_regnos);
215 if (ira_bb_nodes[i].all_allocnos != NULL)
216 ira_free_bitmap (ira_bb_nodes[i].all_allocnos);
217 if (ira_bb_nodes[i].regno_allocno_map != NULL)
218 ira_free (ira_bb_nodes[i].regno_allocno_map);
220 ira_free (ira_bb_nodes);
225 /* The following recursive function adds LOOP to the loop tree
226 hierarchy. LOOP is added only once. */
227 static void
228 add_loop_to_tree (struct loop *loop)
230 struct loop *parent;
231 ira_loop_tree_node_t loop_node, parent_node;
233 /* We can not use loop node access macros here because of potential
234 checking and because the nodes are not initialized enough
235 yet. */
236 if (loop_outer (loop) != NULL)
237 add_loop_to_tree (loop_outer (loop));
238 if (ira_loop_nodes[loop->num].regno_allocno_map != NULL
239 && ira_loop_nodes[loop->num].children == NULL)
241 /* We have not added loop node to the tree yet. */
242 loop_node = &ira_loop_nodes[loop->num];
243 loop_node->loop = loop;
244 loop_node->bb = NULL;
245 for (parent = loop_outer (loop);
246 parent != NULL;
247 parent = loop_outer (parent))
248 if (ira_loop_nodes[parent->num].regno_allocno_map != NULL)
249 break;
250 if (parent == NULL)
252 loop_node->next = NULL;
253 loop_node->subloop_next = NULL;
254 loop_node->parent = NULL;
256 else
258 parent_node = &ira_loop_nodes[parent->num];
259 loop_node->next = parent_node->children;
260 parent_node->children = loop_node;
261 loop_node->subloop_next = parent_node->subloops;
262 parent_node->subloops = loop_node;
263 loop_node->parent = parent_node;
268 /* The following recursive function sets up levels of nodes of the
269 tree given its root LOOP_NODE. The enumeration starts with LEVEL.
270 The function returns maximal value of level in the tree + 1. */
271 static int
272 setup_loop_tree_level (ira_loop_tree_node_t loop_node, int level)
274 int height, max_height;
275 ira_loop_tree_node_t subloop_node;
277 ira_assert (loop_node->bb == NULL);
278 loop_node->level = level;
279 max_height = level + 1;
280 for (subloop_node = loop_node->subloops;
281 subloop_node != NULL;
282 subloop_node = subloop_node->subloop_next)
284 ira_assert (subloop_node->bb == NULL);
285 height = setup_loop_tree_level (subloop_node, level + 1);
286 if (height > max_height)
287 max_height = height;
289 return max_height;
292 /* Create the loop tree. The algorithm is designed to provide correct
293 order of loops (they are ordered by their last loop BB) and basic
294 blocks in the chain formed by member next. */
295 static void
296 form_loop_tree (void)
298 unsigned int i;
299 basic_block bb;
300 struct loop *parent;
301 ira_loop_tree_node_t bb_node, loop_node;
302 loop_p loop;
304 /* We can not use loop/bb node access macros because of potential
305 checking and because the nodes are not initialized enough
306 yet. */
307 for (i = 0; VEC_iterate (loop_p, ira_loops.larray, i, loop); i++)
308 if (ira_loop_nodes[i].regno_allocno_map != NULL)
310 ira_loop_nodes[i].children = NULL;
311 ira_loop_nodes[i].subloops = NULL;
313 FOR_EACH_BB (bb)
315 bb_node = &ira_bb_nodes[bb->index];
316 bb_node->bb = bb;
317 bb_node->loop = NULL;
318 bb_node->subloops = NULL;
319 bb_node->children = NULL;
320 bb_node->subloop_next = NULL;
321 bb_node->next = NULL;
322 for (parent = bb->loop_father;
323 parent != NULL;
324 parent = loop_outer (parent))
325 if (ira_loop_nodes[parent->num].regno_allocno_map != NULL)
326 break;
327 add_loop_to_tree (parent);
328 loop_node = &ira_loop_nodes[parent->num];
329 bb_node->next = loop_node->children;
330 bb_node->parent = loop_node;
331 loop_node->children = bb_node;
333 ira_loop_tree_root = IRA_LOOP_NODE_BY_INDEX (ira_loops.tree_root->num);
334 ira_loop_tree_height = setup_loop_tree_level (ira_loop_tree_root, 0);
335 ira_assert (ira_loop_tree_root->regno_allocno_map != NULL);
340 /* Rebuild IRA_REGNO_ALLOCNO_MAP and REGNO_ALLOCNO_MAPs of the loop
341 tree nodes. */
342 static void
343 rebuild_regno_allocno_maps (void)
345 unsigned int l;
346 int max_regno, regno;
347 ira_allocno_t a;
348 ira_loop_tree_node_t loop_tree_node;
349 loop_p loop;
350 ira_allocno_iterator ai;
352 max_regno = max_reg_num ();
353 for (l = 0; VEC_iterate (loop_p, ira_loops.larray, l, loop); l++)
354 if (ira_loop_nodes[l].regno_allocno_map != NULL)
356 ira_free (ira_loop_nodes[l].regno_allocno_map);
357 ira_loop_nodes[l].regno_allocno_map
358 = (ira_allocno_t *) ira_allocate (sizeof (ira_allocno_t)
359 * max_regno);
360 memset (ira_loop_nodes[l].regno_allocno_map, 0,
361 sizeof (ira_allocno_t) * max_regno);
363 ira_free (ira_regno_allocno_map);
364 ira_regno_allocno_map
365 = (ira_allocno_t *) ira_allocate (max_regno * sizeof (ira_allocno_t));
366 memset (ira_regno_allocno_map, 0, max_regno * sizeof (ira_allocno_t));
367 FOR_EACH_ALLOCNO (a, ai)
369 if (ALLOCNO_CAP_MEMBER (a) != NULL)
370 /* Caps are not in the regno allocno maps. */
371 continue;
372 regno = ALLOCNO_REGNO (a);
373 loop_tree_node = ALLOCNO_LOOP_TREE_NODE (a);
374 ALLOCNO_NEXT_REGNO_ALLOCNO (a) = ira_regno_allocno_map[regno];
375 ira_regno_allocno_map[regno] = a;
376 if (loop_tree_node->regno_allocno_map[regno] == NULL)
377 /* Remember that we can create temporary allocnos to break
378 cycles in register shuffle. */
379 loop_tree_node->regno_allocno_map[regno] = a;
385 /* Pools for allocnos and allocno live ranges. */
386 static alloc_pool allocno_pool, allocno_live_range_pool;
388 /* Vec containing references to all created allocnos. It is a
389 container of array allocnos. */
390 static VEC(ira_allocno_t,heap) *allocno_vec;
392 /* Vec containing references to all created allocnos. It is a
393 container of ira_conflict_id_allocno_map. */
394 static VEC(ira_allocno_t,heap) *ira_conflict_id_allocno_map_vec;
396 /* Initialize data concerning allocnos. */
397 static void
398 initiate_allocnos (void)
400 allocno_live_range_pool
401 = create_alloc_pool ("allocno live ranges",
402 sizeof (struct ira_allocno_live_range), 100);
403 allocno_pool
404 = create_alloc_pool ("allocnos", sizeof (struct ira_allocno), 100);
405 allocno_vec = VEC_alloc (ira_allocno_t, heap, max_reg_num () * 2);
406 ira_allocnos = NULL;
407 ira_allocnos_num = 0;
408 ira_conflict_id_allocno_map_vec
409 = VEC_alloc (ira_allocno_t, heap, max_reg_num () * 2);
410 ira_conflict_id_allocno_map = NULL;
411 ira_regno_allocno_map
412 = (ira_allocno_t *) ira_allocate (max_reg_num () * sizeof (ira_allocno_t));
413 memset (ira_regno_allocno_map, 0, max_reg_num () * sizeof (ira_allocno_t));
416 /* Create and return the allocno corresponding to REGNO in
417 LOOP_TREE_NODE. Add the allocno to the list of allocnos with the
418 same regno if CAP_P is FALSE. */
419 ira_allocno_t
420 ira_create_allocno (int regno, bool cap_p, ira_loop_tree_node_t loop_tree_node)
422 ira_allocno_t a;
424 a = (ira_allocno_t) pool_alloc (allocno_pool);
425 ALLOCNO_REGNO (a) = regno;
426 ALLOCNO_LOOP_TREE_NODE (a) = loop_tree_node;
427 if (! cap_p)
429 ALLOCNO_NEXT_REGNO_ALLOCNO (a) = ira_regno_allocno_map[regno];
430 ira_regno_allocno_map[regno] = a;
431 if (loop_tree_node->regno_allocno_map[regno] == NULL)
432 /* Remember that we can create temporary allocnos to break
433 cycles in register shuffle on region borders (see
434 ira-emit.c). */
435 loop_tree_node->regno_allocno_map[regno] = a;
437 ALLOCNO_CAP (a) = NULL;
438 ALLOCNO_CAP_MEMBER (a) = NULL;
439 ALLOCNO_NUM (a) = ira_allocnos_num;
440 bitmap_set_bit (loop_tree_node->all_allocnos, ALLOCNO_NUM (a));
441 ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a) = NULL;
442 ALLOCNO_CONFLICT_ALLOCNOS_NUM (a) = 0;
443 COPY_HARD_REG_SET (ALLOCNO_CONFLICT_HARD_REGS (a), ira_no_alloc_regs);
444 COPY_HARD_REG_SET (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a), ira_no_alloc_regs);
445 ALLOCNO_NREFS (a) = 0;
446 ALLOCNO_FREQ (a) = 0;
447 ALLOCNO_HARD_REGNO (a) = -1;
448 ALLOCNO_CALL_FREQ (a) = 0;
449 ALLOCNO_CALLS_CROSSED_NUM (a) = 0;
450 #ifdef STACK_REGS
451 ALLOCNO_NO_STACK_REG_P (a) = false;
452 ALLOCNO_TOTAL_NO_STACK_REG_P (a) = false;
453 #endif
454 ALLOCNO_MEM_OPTIMIZED_DEST (a) = NULL;
455 ALLOCNO_MEM_OPTIMIZED_DEST_P (a) = false;
456 ALLOCNO_SOMEWHERE_RENAMED_P (a) = false;
457 ALLOCNO_CHILD_RENAMED_P (a) = false;
458 ALLOCNO_DONT_REASSIGN_P (a) = false;
459 ALLOCNO_IN_GRAPH_P (a) = false;
460 ALLOCNO_ASSIGNED_P (a) = false;
461 ALLOCNO_MAY_BE_SPILLED_P (a) = false;
462 ALLOCNO_SPLAY_REMOVED_P (a) = false;
463 ALLOCNO_CONFLICT_VEC_P (a) = false;
464 ALLOCNO_MODE (a) = (regno < 0 ? VOIDmode : PSEUDO_REGNO_MODE (regno));
465 ALLOCNO_COPIES (a) = NULL;
466 ALLOCNO_HARD_REG_COSTS (a) = NULL;
467 ALLOCNO_CONFLICT_HARD_REG_COSTS (a) = NULL;
468 ALLOCNO_UPDATED_HARD_REG_COSTS (a) = NULL;
469 ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) = NULL;
470 ALLOCNO_LEFT_CONFLICTS_NUM (a) = -1;
471 ALLOCNO_COVER_CLASS (a) = NO_REGS;
472 ALLOCNO_UPDATED_COVER_CLASS_COST (a) = 0;
473 ALLOCNO_COVER_CLASS_COST (a) = 0;
474 ALLOCNO_MEMORY_COST (a) = 0;
475 ALLOCNO_UPDATED_MEMORY_COST (a) = 0;
476 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a) = 0;
477 ALLOCNO_NEXT_BUCKET_ALLOCNO (a) = NULL;
478 ALLOCNO_PREV_BUCKET_ALLOCNO (a) = NULL;
479 ALLOCNO_FIRST_COALESCED_ALLOCNO (a) = a;
480 ALLOCNO_NEXT_COALESCED_ALLOCNO (a) = a;
481 ALLOCNO_LIVE_RANGES (a) = NULL;
482 ALLOCNO_MIN (a) = INT_MAX;
483 ALLOCNO_MAX (a) = -1;
484 ALLOCNO_CONFLICT_ID (a) = ira_allocnos_num;
485 VEC_safe_push (ira_allocno_t, heap, allocno_vec, a);
486 ira_allocnos = VEC_address (ira_allocno_t, allocno_vec);
487 ira_allocnos_num = VEC_length (ira_allocno_t, allocno_vec);
488 VEC_safe_push (ira_allocno_t, heap, ira_conflict_id_allocno_map_vec, a);
489 ira_conflict_id_allocno_map
490 = VEC_address (ira_allocno_t, ira_conflict_id_allocno_map_vec);
491 return a;
494 /* Set up cover class for A and update its conflict hard registers. */
495 void
496 ira_set_allocno_cover_class (ira_allocno_t a, enum reg_class cover_class)
498 ALLOCNO_COVER_CLASS (a) = cover_class;
499 IOR_COMPL_HARD_REG_SET (ALLOCNO_CONFLICT_HARD_REGS (a),
500 reg_class_contents[cover_class]);
501 IOR_COMPL_HARD_REG_SET (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a),
502 reg_class_contents[cover_class]);
505 /* Return TRUE if the conflict vector with NUM elements is more
506 profitable than conflict bit vector for A. */
507 bool
508 ira_conflict_vector_profitable_p (ira_allocno_t a, int num)
510 int nw;
512 if (ALLOCNO_MAX (a) < ALLOCNO_MIN (a))
513 /* We prefer bit vector in such case because it does not result in
514 allocation. */
515 return false;
517 nw = (ALLOCNO_MAX (a) - ALLOCNO_MIN (a) + IRA_INT_BITS) / IRA_INT_BITS;
518 return (2 * sizeof (ira_allocno_t) * (num + 1)
519 < 3 * nw * sizeof (IRA_INT_TYPE));
522 /* Allocates and initialize the conflict vector of A for NUM
523 conflicting allocnos. */
524 void
525 ira_allocate_allocno_conflict_vec (ira_allocno_t a, int num)
527 int size;
528 ira_allocno_t *vec;
530 ira_assert (ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a) == NULL);
531 num++; /* for NULL end marker */
532 size = sizeof (ira_allocno_t) * num;
533 ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a) = ira_allocate (size);
534 vec = (ira_allocno_t *) ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a);
535 vec[0] = NULL;
536 ALLOCNO_CONFLICT_ALLOCNOS_NUM (a) = 0;
537 ALLOCNO_CONFLICT_ALLOCNO_ARRAY_SIZE (a) = size;
538 ALLOCNO_CONFLICT_VEC_P (a) = true;
541 /* Allocate and initialize the conflict bit vector of A. */
542 static void
543 allocate_allocno_conflict_bit_vec (ira_allocno_t a)
545 unsigned int size;
547 ira_assert (ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a) == NULL);
548 size = ((ALLOCNO_MAX (a) - ALLOCNO_MIN (a) + IRA_INT_BITS)
549 / IRA_INT_BITS * sizeof (IRA_INT_TYPE));
550 ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a) = ira_allocate (size);
551 memset (ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a), 0, size);
552 ALLOCNO_CONFLICT_ALLOCNO_ARRAY_SIZE (a) = size;
553 ALLOCNO_CONFLICT_VEC_P (a) = false;
556 /* Allocate and initialize the conflict vector or conflict bit vector
557 of A for NUM conflicting allocnos whatever is more profitable. */
558 void
559 ira_allocate_allocno_conflicts (ira_allocno_t a, int num)
561 if (ira_conflict_vector_profitable_p (a, num))
562 ira_allocate_allocno_conflict_vec (a, num);
563 else
564 allocate_allocno_conflict_bit_vec (a);
567 /* Add A2 to the conflicts of A1. */
568 static void
569 add_to_allocno_conflicts (ira_allocno_t a1, ira_allocno_t a2)
571 int num;
572 unsigned int size;
574 if (ALLOCNO_CONFLICT_VEC_P (a1))
576 ira_allocno_t *vec;
578 num = ALLOCNO_CONFLICT_ALLOCNOS_NUM (a1) + 2;
579 if (ALLOCNO_CONFLICT_ALLOCNO_ARRAY_SIZE (a1)
580 >= num * sizeof (ira_allocno_t))
581 vec = (ira_allocno_t *) ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a1);
582 else
584 size = (3 * num / 2 + 1) * sizeof (ira_allocno_t);
585 vec = (ira_allocno_t *) ira_allocate (size);
586 memcpy (vec, ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a1),
587 sizeof (ira_allocno_t) * ALLOCNO_CONFLICT_ALLOCNOS_NUM (a1));
588 ira_free (ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a1));
589 ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a1) = vec;
590 ALLOCNO_CONFLICT_ALLOCNO_ARRAY_SIZE (a1) = size;
592 vec[num - 2] = a2;
593 vec[num - 1] = NULL;
594 ALLOCNO_CONFLICT_ALLOCNOS_NUM (a1)++;
596 else
598 int nw, added_head_nw, id;
599 IRA_INT_TYPE *vec;
601 id = ALLOCNO_CONFLICT_ID (a2);
602 vec = (IRA_INT_TYPE *) ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a1);
603 if (ALLOCNO_MIN (a1) > id)
605 /* Expand head of the bit vector. */
606 added_head_nw = (ALLOCNO_MIN (a1) - id - 1) / IRA_INT_BITS + 1;
607 nw = (ALLOCNO_MAX (a1) - ALLOCNO_MIN (a1)) / IRA_INT_BITS + 1;
608 size = (nw + added_head_nw) * sizeof (IRA_INT_TYPE);
609 if (ALLOCNO_CONFLICT_ALLOCNO_ARRAY_SIZE (a1) >= size)
611 memmove ((char *) vec + added_head_nw * sizeof (IRA_INT_TYPE),
612 vec, nw * sizeof (IRA_INT_TYPE));
613 memset (vec, 0, added_head_nw * sizeof (IRA_INT_TYPE));
615 else
617 size
618 = (3 * (nw + added_head_nw) / 2 + 1) * sizeof (IRA_INT_TYPE);
619 vec = (IRA_INT_TYPE *) ira_allocate (size);
620 memcpy ((char *) vec + added_head_nw * sizeof (IRA_INT_TYPE),
621 ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a1),
622 nw * sizeof (IRA_INT_TYPE));
623 memset (vec, 0, added_head_nw * sizeof (IRA_INT_TYPE));
624 memset ((char *) vec
625 + (nw + added_head_nw) * sizeof (IRA_INT_TYPE),
626 0, size - (nw + added_head_nw) * sizeof (IRA_INT_TYPE));
627 ira_free (ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a1));
628 ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a1) = vec;
629 ALLOCNO_CONFLICT_ALLOCNO_ARRAY_SIZE (a1) = size;
631 ALLOCNO_MIN (a1) -= added_head_nw * IRA_INT_BITS;
633 else if (ALLOCNO_MAX (a1) < id)
635 nw = (id - ALLOCNO_MIN (a1)) / IRA_INT_BITS + 1;
636 size = nw * sizeof (IRA_INT_TYPE);
637 if (ALLOCNO_CONFLICT_ALLOCNO_ARRAY_SIZE (a1) < size)
639 /* Expand tail of the bit vector. */
640 size = (3 * nw / 2 + 1) * sizeof (IRA_INT_TYPE);
641 vec = (IRA_INT_TYPE *) ira_allocate (size);
642 memcpy (vec, ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a1),
643 ALLOCNO_CONFLICT_ALLOCNO_ARRAY_SIZE (a1));
644 memset ((char *) vec + ALLOCNO_CONFLICT_ALLOCNO_ARRAY_SIZE (a1),
645 0, size - ALLOCNO_CONFLICT_ALLOCNO_ARRAY_SIZE (a1));
646 ira_free (ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a1));
647 ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a1) = vec;
648 ALLOCNO_CONFLICT_ALLOCNO_ARRAY_SIZE (a1) = size;
650 ALLOCNO_MAX (a1) = id;
652 SET_ALLOCNO_SET_BIT (vec, id, ALLOCNO_MIN (a1), ALLOCNO_MAX (a1));
656 /* Add A1 to the conflicts of A2 and vise versa. */
657 void
658 ira_add_allocno_conflict (ira_allocno_t a1, ira_allocno_t a2)
660 add_to_allocno_conflicts (a1, a2);
661 add_to_allocno_conflicts (a2, a1);
664 /* Clear all conflicts of allocno A. */
665 static void
666 clear_allocno_conflicts (ira_allocno_t a)
668 if (ALLOCNO_CONFLICT_VEC_P (a))
670 ALLOCNO_CONFLICT_ALLOCNOS_NUM (a) = 0;
671 ((ira_allocno_t *) ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a))[0] = NULL;
673 else if (ALLOCNO_CONFLICT_ALLOCNO_ARRAY_SIZE (a) != 0)
675 int nw;
677 nw = (ALLOCNO_MAX (a) - ALLOCNO_MIN (a)) / IRA_INT_BITS + 1;
678 memset (ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a), 0,
679 nw * sizeof (IRA_INT_TYPE));
683 /* The array used to find duplications in conflict vectors of
684 allocnos. */
685 static int *allocno_conflict_check;
687 /* The value used to mark allocation presence in conflict vector of
688 the current allocno. */
689 static int curr_allocno_conflict_check_tick;
691 /* Remove duplications in conflict vector of A. */
692 static void
693 compress_allocno_conflict_vec (ira_allocno_t a)
695 ira_allocno_t *vec, conflict_a;
696 int i, j;
698 ira_assert (ALLOCNO_CONFLICT_VEC_P (a));
699 vec = (ira_allocno_t *) ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a);
700 curr_allocno_conflict_check_tick++;
701 for (i = j = 0; (conflict_a = vec[i]) != NULL; i++)
703 if (allocno_conflict_check[ALLOCNO_NUM (conflict_a)]
704 != curr_allocno_conflict_check_tick)
706 allocno_conflict_check[ALLOCNO_NUM (conflict_a)]
707 = curr_allocno_conflict_check_tick;
708 vec[j++] = conflict_a;
711 ALLOCNO_CONFLICT_ALLOCNOS_NUM (a) = j;
712 vec[j] = NULL;
715 /* Remove duplications in conflict vectors of all allocnos. */
716 static void
717 compress_conflict_vecs (void)
719 ira_allocno_t a;
720 ira_allocno_iterator ai;
722 allocno_conflict_check
723 = (int *) ira_allocate (sizeof (int) * ira_allocnos_num);
724 memset (allocno_conflict_check, 0, sizeof (int) * ira_allocnos_num);
725 curr_allocno_conflict_check_tick = 0;
726 FOR_EACH_ALLOCNO (a, ai)
727 if (ALLOCNO_CONFLICT_VEC_P (a))
728 compress_allocno_conflict_vec (a);
729 ira_free (allocno_conflict_check);
732 /* This recursive function outputs allocno A and if it is a cap the
733 function outputs its members. */
734 void
735 ira_print_expanded_allocno (ira_allocno_t a)
737 basic_block bb;
739 fprintf (ira_dump_file, " a%d(r%d", ALLOCNO_NUM (a), ALLOCNO_REGNO (a));
740 if ((bb = ALLOCNO_LOOP_TREE_NODE (a)->bb) != NULL)
741 fprintf (ira_dump_file, ",b%d", bb->index);
742 else
743 fprintf (ira_dump_file, ",l%d", ALLOCNO_LOOP_TREE_NODE (a)->loop->num);
744 if (ALLOCNO_CAP_MEMBER (a) != NULL)
746 fprintf (ira_dump_file, ":");
747 ira_print_expanded_allocno (ALLOCNO_CAP_MEMBER (a));
749 fprintf (ira_dump_file, ")");
752 /* Create and return the cap representing allocno A in the
753 parent loop. */
754 static ira_allocno_t
755 create_cap_allocno (ira_allocno_t a)
757 ira_allocno_t cap;
758 ira_loop_tree_node_t parent;
759 enum reg_class cover_class;
761 ira_assert (ALLOCNO_FIRST_COALESCED_ALLOCNO (a) == a
762 && ALLOCNO_NEXT_COALESCED_ALLOCNO (a) == a);
763 parent = ALLOCNO_LOOP_TREE_NODE (a)->parent;
764 cap = ira_create_allocno (ALLOCNO_REGNO (a), true, parent);
765 ALLOCNO_MODE (cap) = ALLOCNO_MODE (a);
766 cover_class = ALLOCNO_COVER_CLASS (a);
767 ira_set_allocno_cover_class (cap, cover_class);
768 ALLOCNO_AVAILABLE_REGS_NUM (cap) = ALLOCNO_AVAILABLE_REGS_NUM (a);
769 ALLOCNO_CAP_MEMBER (cap) = a;
770 ALLOCNO_CAP (a) = cap;
771 ALLOCNO_COVER_CLASS_COST (cap) = ALLOCNO_COVER_CLASS_COST (a);
772 ALLOCNO_MEMORY_COST (cap) = ALLOCNO_MEMORY_COST (a);
773 ira_allocate_and_copy_costs
774 (&ALLOCNO_HARD_REG_COSTS (cap), cover_class, ALLOCNO_HARD_REG_COSTS (a));
775 ira_allocate_and_copy_costs
776 (&ALLOCNO_CONFLICT_HARD_REG_COSTS (cap), cover_class,
777 ALLOCNO_CONFLICT_HARD_REG_COSTS (a));
778 ALLOCNO_NREFS (cap) = ALLOCNO_NREFS (a);
779 ALLOCNO_FREQ (cap) = ALLOCNO_FREQ (a);
780 ALLOCNO_CALL_FREQ (cap) = ALLOCNO_CALL_FREQ (a);
781 IOR_HARD_REG_SET (ALLOCNO_CONFLICT_HARD_REGS (cap),
782 ALLOCNO_CONFLICT_HARD_REGS (a));
783 IOR_HARD_REG_SET (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (cap),
784 ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a));
785 ALLOCNO_CALLS_CROSSED_NUM (cap) = ALLOCNO_CALLS_CROSSED_NUM (a);
786 #ifdef STACK_REGS
787 ALLOCNO_NO_STACK_REG_P (cap) = ALLOCNO_NO_STACK_REG_P (a);
788 ALLOCNO_TOTAL_NO_STACK_REG_P (cap) = ALLOCNO_TOTAL_NO_STACK_REG_P (a);
789 #endif
790 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
792 fprintf (ira_dump_file, " Creating cap ");
793 ira_print_expanded_allocno (cap);
794 fprintf (ira_dump_file, "\n");
796 return cap;
799 /* Create and return allocno live range with given attributes. */
800 allocno_live_range_t
801 ira_create_allocno_live_range (ira_allocno_t a, int start, int finish,
802 allocno_live_range_t next)
804 allocno_live_range_t p;
806 p = (allocno_live_range_t) pool_alloc (allocno_live_range_pool);
807 p->allocno = a;
808 p->start = start;
809 p->finish = finish;
810 p->next = next;
811 return p;
814 /* Copy allocno live range R and return the result. */
815 static allocno_live_range_t
816 copy_allocno_live_range (allocno_live_range_t r)
818 allocno_live_range_t p;
820 p = (allocno_live_range_t) pool_alloc (allocno_live_range_pool);
821 *p = *r;
822 return p;
825 /* Copy allocno live range list given by its head R and return the
826 result. */
827 static allocno_live_range_t
828 copy_allocno_live_range_list (allocno_live_range_t r)
830 allocno_live_range_t p, first, last;
832 if (r == NULL)
833 return NULL;
834 for (first = last = NULL; r != NULL; r = r->next)
836 p = copy_allocno_live_range (r);
837 if (first == NULL)
838 first = p;
839 else
840 last->next = p;
841 last = p;
843 return first;
846 /* Free allocno live range R. */
847 void
848 ira_finish_allocno_live_range (allocno_live_range_t r)
850 pool_free (allocno_live_range_pool, r);
853 /* Free updated register costs of allocno A. */
854 void
855 ira_free_allocno_updated_costs (ira_allocno_t a)
857 enum reg_class cover_class;
859 cover_class = ALLOCNO_COVER_CLASS (a);
860 if (ALLOCNO_UPDATED_HARD_REG_COSTS (a) != NULL)
861 ira_free_cost_vector (ALLOCNO_UPDATED_HARD_REG_COSTS (a), cover_class);
862 ALLOCNO_UPDATED_HARD_REG_COSTS (a) = NULL;
863 if (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) != NULL)
864 ira_free_cost_vector (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a),
865 cover_class);
866 ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) = NULL;
869 /* Free the memory allocated for allocno A. */
870 static void
871 finish_allocno (ira_allocno_t a)
873 allocno_live_range_t r, next_r;
874 enum reg_class cover_class = ALLOCNO_COVER_CLASS (a);
876 ira_allocnos[ALLOCNO_NUM (a)] = NULL;
877 ira_conflict_id_allocno_map[ALLOCNO_CONFLICT_ID (a)] = NULL;
878 if (ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a) != NULL)
879 ira_free (ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a));
880 if (ALLOCNO_HARD_REG_COSTS (a) != NULL)
881 ira_free_cost_vector (ALLOCNO_HARD_REG_COSTS (a), cover_class);
882 if (ALLOCNO_CONFLICT_HARD_REG_COSTS (a) != NULL)
883 ira_free_cost_vector (ALLOCNO_CONFLICT_HARD_REG_COSTS (a), cover_class);
884 if (ALLOCNO_UPDATED_HARD_REG_COSTS (a) != NULL)
885 ira_free_cost_vector (ALLOCNO_UPDATED_HARD_REG_COSTS (a), cover_class);
886 if (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) != NULL)
887 ira_free_cost_vector (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a),
888 cover_class);
889 for (r = ALLOCNO_LIVE_RANGES (a); r != NULL; r = next_r)
891 next_r = r->next;
892 ira_finish_allocno_live_range (r);
894 pool_free (allocno_pool, a);
897 /* Free the memory allocated for all allocnos. */
898 static void
899 finish_allocnos (void)
901 ira_allocno_t a;
902 ira_allocno_iterator ai;
904 FOR_EACH_ALLOCNO (a, ai)
905 finish_allocno (a);
906 ira_free (ira_regno_allocno_map);
907 VEC_free (ira_allocno_t, heap, ira_conflict_id_allocno_map_vec);
908 VEC_free (ira_allocno_t, heap, allocno_vec);
909 free_alloc_pool (allocno_pool);
910 free_alloc_pool (allocno_live_range_pool);
915 /* Pools for copies. */
916 static alloc_pool copy_pool;
918 /* Vec containing references to all created copies. It is a
919 container of array ira_copies. */
920 static VEC(ira_copy_t,heap) *copy_vec;
922 /* The function initializes data concerning allocno copies. */
923 static void
924 initiate_copies (void)
926 copy_pool
927 = create_alloc_pool ("copies", sizeof (struct ira_allocno_copy), 100);
928 copy_vec = VEC_alloc (ira_copy_t, heap, get_max_uid ());
929 ira_copies = NULL;
930 ira_copies_num = 0;
933 /* Return copy connecting A1 and A2 and originated from INSN of
934 LOOP_TREE_NODE if any. */
935 static ira_copy_t
936 find_allocno_copy (ira_allocno_t a1, ira_allocno_t a2, rtx insn,
937 ira_loop_tree_node_t loop_tree_node)
939 ira_copy_t cp, next_cp;
940 ira_allocno_t another_a;
942 for (cp = ALLOCNO_COPIES (a1); cp != NULL; cp = next_cp)
944 if (cp->first == a1)
946 next_cp = cp->next_first_allocno_copy;
947 another_a = cp->second;
949 else if (cp->second == a1)
951 next_cp = cp->next_second_allocno_copy;
952 another_a = cp->first;
954 else
955 gcc_unreachable ();
956 if (another_a == a2 && cp->insn == insn
957 && cp->loop_tree_node == loop_tree_node)
958 return cp;
960 return NULL;
963 /* Create and return copy with given attributes LOOP_TREE_NODE, FIRST,
964 SECOND, FREQ, and INSN. */
965 ira_copy_t
966 ira_create_copy (ira_allocno_t first, ira_allocno_t second, int freq, rtx insn,
967 ira_loop_tree_node_t loop_tree_node)
969 ira_copy_t cp;
971 cp = (ira_copy_t) pool_alloc (copy_pool);
972 cp->num = ira_copies_num;
973 cp->first = first;
974 cp->second = second;
975 cp->freq = freq;
976 cp->insn = insn;
977 cp->loop_tree_node = loop_tree_node;
978 VEC_safe_push (ira_copy_t, heap, copy_vec, cp);
979 ira_copies = VEC_address (ira_copy_t, copy_vec);
980 ira_copies_num = VEC_length (ira_copy_t, copy_vec);
981 return cp;
984 /* Attach a copy CP to allocnos involved into the copy. */
985 void
986 ira_add_allocno_copy_to_list (ira_copy_t cp)
988 ira_allocno_t first = cp->first, second = cp->second;
990 cp->prev_first_allocno_copy = NULL;
991 cp->prev_second_allocno_copy = NULL;
992 cp->next_first_allocno_copy = ALLOCNO_COPIES (first);
993 if (cp->next_first_allocno_copy != NULL)
995 if (cp->next_first_allocno_copy->first == first)
996 cp->next_first_allocno_copy->prev_first_allocno_copy = cp;
997 else
998 cp->next_first_allocno_copy->prev_second_allocno_copy = cp;
1000 cp->next_second_allocno_copy = ALLOCNO_COPIES (second);
1001 if (cp->next_second_allocno_copy != NULL)
1003 if (cp->next_second_allocno_copy->second == second)
1004 cp->next_second_allocno_copy->prev_second_allocno_copy = cp;
1005 else
1006 cp->next_second_allocno_copy->prev_first_allocno_copy = cp;
1008 ALLOCNO_COPIES (first) = cp;
1009 ALLOCNO_COPIES (second) = cp;
1012 /* Detach a copy CP from allocnos involved into the copy. */
1013 void
1014 ira_remove_allocno_copy_from_list (ira_copy_t cp)
1016 ira_allocno_t first = cp->first, second = cp->second;
1017 ira_copy_t prev, next;
1019 next = cp->next_first_allocno_copy;
1020 prev = cp->prev_first_allocno_copy;
1021 if (prev == NULL)
1022 ALLOCNO_COPIES (first) = next;
1023 else if (prev->first == first)
1024 prev->next_first_allocno_copy = next;
1025 else
1026 prev->next_second_allocno_copy = next;
1027 if (next != NULL)
1029 if (next->first == first)
1030 next->prev_first_allocno_copy = prev;
1031 else
1032 next->prev_second_allocno_copy = prev;
1034 cp->prev_first_allocno_copy = cp->next_first_allocno_copy = NULL;
1036 next = cp->next_second_allocno_copy;
1037 prev = cp->prev_second_allocno_copy;
1038 if (prev == NULL)
1039 ALLOCNO_COPIES (second) = next;
1040 else if (prev->second == second)
1041 prev->next_second_allocno_copy = next;
1042 else
1043 prev->next_first_allocno_copy = next;
1044 if (next != NULL)
1046 if (next->second == second)
1047 next->prev_second_allocno_copy = prev;
1048 else
1049 next->prev_first_allocno_copy = prev;
1051 cp->prev_second_allocno_copy = cp->next_second_allocno_copy = NULL;
1054 /* Make a copy CP a canonical copy where number of the
1055 first allocno is less than the second one. */
1056 void
1057 ira_swap_allocno_copy_ends_if_necessary (ira_copy_t cp)
1059 ira_allocno_t temp;
1060 ira_copy_t temp_cp;
1062 if (ALLOCNO_NUM (cp->first) <= ALLOCNO_NUM (cp->second))
1063 return;
1065 temp = cp->first;
1066 cp->first = cp->second;
1067 cp->second = temp;
1069 temp_cp = cp->prev_first_allocno_copy;
1070 cp->prev_first_allocno_copy = cp->prev_second_allocno_copy;
1071 cp->prev_second_allocno_copy = temp_cp;
1073 temp_cp = cp->next_first_allocno_copy;
1074 cp->next_first_allocno_copy = cp->next_second_allocno_copy;
1075 cp->next_second_allocno_copy = temp_cp;
1078 /* Create (or update frequency if the copy already exists) and return
1079 the copy of allocnos FIRST and SECOND with frequency FREQ
1080 corresponding to move insn INSN (if any) and originated from
1081 LOOP_TREE_NODE. */
1082 ira_copy_t
1083 ira_add_allocno_copy (ira_allocno_t first, ira_allocno_t second, int freq,
1084 rtx insn, ira_loop_tree_node_t loop_tree_node)
1086 ira_copy_t cp;
1088 if ((cp = find_allocno_copy (first, second, insn, loop_tree_node)) != NULL)
1090 cp->freq += freq;
1091 return cp;
1093 cp = ira_create_copy (first, second, freq, insn, loop_tree_node);
1094 ira_assert (first != NULL && second != NULL);
1095 ira_add_allocno_copy_to_list (cp);
1096 ira_swap_allocno_copy_ends_if_necessary (cp);
1097 return cp;
1100 /* Print info about copy CP into file F. */
1101 static void
1102 print_copy (FILE *f, ira_copy_t cp)
1104 fprintf (f, " cp%d:a%d(r%d)<->a%d(r%d)@%d\n", cp->num,
1105 ALLOCNO_NUM (cp->first), ALLOCNO_REGNO (cp->first),
1106 ALLOCNO_NUM (cp->second), ALLOCNO_REGNO (cp->second), cp->freq);
1109 /* Print info about copy CP into stderr. */
1110 void
1111 ira_debug_copy (ira_copy_t cp)
1113 print_copy (stderr, cp);
1116 /* Print info about all copies into file F. */
1117 static void
1118 print_copies (FILE *f)
1120 ira_copy_t cp;
1121 ira_copy_iterator ci;
1123 FOR_EACH_COPY (cp, ci)
1124 print_copy (f, cp);
1127 /* Print info about all copies into stderr. */
1128 void
1129 ira_debug_copies (void)
1131 print_copies (stderr);
1134 /* Print info about copies involving allocno A into file F. */
1135 static void
1136 print_allocno_copies (FILE *f, ira_allocno_t a)
1138 ira_allocno_t another_a;
1139 ira_copy_t cp, next_cp;
1141 fprintf (f, " a%d(r%d):", ALLOCNO_NUM (a), ALLOCNO_REGNO (a));
1142 for (cp = ALLOCNO_COPIES (a); cp != NULL; cp = next_cp)
1144 if (cp->first == a)
1146 next_cp = cp->next_first_allocno_copy;
1147 another_a = cp->second;
1149 else if (cp->second == a)
1151 next_cp = cp->next_second_allocno_copy;
1152 another_a = cp->first;
1154 else
1155 gcc_unreachable ();
1156 fprintf (f, " cp%d:a%d(r%d)@%d", cp->num,
1157 ALLOCNO_NUM (another_a), ALLOCNO_REGNO (another_a), cp->freq);
1159 fprintf (f, "\n");
1162 /* Print info about copies involving allocno A into stderr. */
1163 void
1164 ira_debug_allocno_copies (ira_allocno_t a)
1166 print_allocno_copies (stderr, a);
1169 /* The function frees memory allocated for copy CP. */
1170 static void
1171 finish_copy (ira_copy_t cp)
1173 pool_free (copy_pool, cp);
1177 /* Free memory allocated for all copies. */
1178 static void
1179 finish_copies (void)
1181 ira_copy_t cp;
1182 ira_copy_iterator ci;
1184 FOR_EACH_COPY (cp, ci)
1185 finish_copy (cp);
1186 VEC_free (ira_copy_t, heap, copy_vec);
1187 free_alloc_pool (copy_pool);
1192 /* Pools for cost vectors. It is defined only for cover classes. */
1193 static alloc_pool cost_vector_pool[N_REG_CLASSES];
1195 /* The function initiates work with hard register cost vectors. It
1196 creates allocation pool for each cover class. */
1197 static void
1198 initiate_cost_vectors (void)
1200 int i;
1201 enum reg_class cover_class;
1203 for (i = 0; i < ira_reg_class_cover_size; i++)
1205 cover_class = ira_reg_class_cover[i];
1206 cost_vector_pool[cover_class]
1207 = create_alloc_pool ("cost vectors",
1208 sizeof (int)
1209 * ira_class_hard_regs_num[cover_class],
1210 100);
1214 /* Allocate and return a cost vector VEC for COVER_CLASS. */
1215 int *
1216 ira_allocate_cost_vector (enum reg_class cover_class)
1218 return (int *) pool_alloc (cost_vector_pool[cover_class]);
1221 /* Free a cost vector VEC for COVER_CLASS. */
1222 void
1223 ira_free_cost_vector (int *vec, enum reg_class cover_class)
1225 ira_assert (vec != NULL);
1226 pool_free (cost_vector_pool[cover_class], vec);
1229 /* Finish work with hard register cost vectors. Release allocation
1230 pool for each cover class. */
1231 static void
1232 finish_cost_vectors (void)
1234 int i;
1235 enum reg_class cover_class;
1237 for (i = 0; i < ira_reg_class_cover_size; i++)
1239 cover_class = ira_reg_class_cover[i];
1240 free_alloc_pool (cost_vector_pool[cover_class]);
1246 /* The current loop tree node and its regno allocno map. */
1247 ira_loop_tree_node_t ira_curr_loop_tree_node;
1248 ira_allocno_t *ira_curr_regno_allocno_map;
1250 /* This recursive function traverses loop tree with root LOOP_NODE
1251 calling non-null functions PREORDER_FUNC and POSTORDER_FUNC
1252 correspondingly in preorder and postorder. The function sets up
1253 IRA_CURR_LOOP_TREE_NODE and IRA_CURR_REGNO_ALLOCNO_MAP. If BB_P,
1254 basic block nodes of LOOP_NODE is also processed (before its
1255 subloop nodes). */
1256 void
1257 ira_traverse_loop_tree (bool bb_p, ira_loop_tree_node_t loop_node,
1258 void (*preorder_func) (ira_loop_tree_node_t),
1259 void (*postorder_func) (ira_loop_tree_node_t))
1261 ira_loop_tree_node_t subloop_node;
1263 ira_assert (loop_node->bb == NULL);
1264 ira_curr_loop_tree_node = loop_node;
1265 ira_curr_regno_allocno_map = ira_curr_loop_tree_node->regno_allocno_map;
1267 if (preorder_func != NULL)
1268 (*preorder_func) (loop_node);
1270 if (bb_p)
1271 for (subloop_node = loop_node->children;
1272 subloop_node != NULL;
1273 subloop_node = subloop_node->next)
1274 if (subloop_node->bb != NULL)
1276 if (preorder_func != NULL)
1277 (*preorder_func) (subloop_node);
1279 if (postorder_func != NULL)
1280 (*postorder_func) (subloop_node);
1283 for (subloop_node = loop_node->subloops;
1284 subloop_node != NULL;
1285 subloop_node = subloop_node->subloop_next)
1287 ira_assert (subloop_node->bb == NULL);
1288 ira_traverse_loop_tree (bb_p, subloop_node,
1289 preorder_func, postorder_func);
1292 ira_curr_loop_tree_node = loop_node;
1293 ira_curr_regno_allocno_map = ira_curr_loop_tree_node->regno_allocno_map;
1295 if (postorder_func != NULL)
1296 (*postorder_func) (loop_node);
1301 /* The basic block currently being processed. */
1302 static basic_block curr_bb;
1304 /* This recursive function creates allocnos corresponding to
1305 pseudo-registers containing in X. True OUTPUT_P means that X is
1306 a lvalue. */
1307 static void
1308 create_insn_allocnos (rtx x, bool output_p)
1310 int i, j;
1311 const char *fmt;
1312 enum rtx_code code = GET_CODE (x);
1314 if (code == REG)
1316 int regno;
1318 if ((regno = REGNO (x)) >= FIRST_PSEUDO_REGISTER)
1320 ira_allocno_t a;
1322 if ((a = ira_curr_regno_allocno_map[regno]) == NULL)
1323 a = ira_create_allocno (regno, false, ira_curr_loop_tree_node);
1325 ALLOCNO_NREFS (a)++;
1326 ALLOCNO_FREQ (a) += REG_FREQ_FROM_BB (curr_bb);
1327 if (output_p)
1328 bitmap_set_bit (ira_curr_loop_tree_node->modified_regnos, regno);
1330 return;
1332 else if (code == SET)
1334 create_insn_allocnos (SET_DEST (x), true);
1335 create_insn_allocnos (SET_SRC (x), false);
1336 return;
1338 else if (code == CLOBBER)
1340 create_insn_allocnos (XEXP (x, 0), true);
1341 return;
1343 else if (code == MEM)
1345 create_insn_allocnos (XEXP (x, 0), false);
1346 return;
1348 else if (code == PRE_DEC || code == POST_DEC || code == PRE_INC ||
1349 code == POST_INC || code == POST_MODIFY || code == PRE_MODIFY)
1351 create_insn_allocnos (XEXP (x, 0), true);
1352 create_insn_allocnos (XEXP (x, 0), false);
1353 return;
1356 fmt = GET_RTX_FORMAT (code);
1357 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1359 if (fmt[i] == 'e')
1360 create_insn_allocnos (XEXP (x, i), output_p);
1361 else if (fmt[i] == 'E')
1362 for (j = 0; j < XVECLEN (x, i); j++)
1363 create_insn_allocnos (XVECEXP (x, i, j), output_p);
1367 /* Create allocnos corresponding to pseudo-registers living in the
1368 basic block represented by the corresponding loop tree node
1369 BB_NODE. */
1370 static void
1371 create_bb_allocnos (ira_loop_tree_node_t bb_node)
1373 basic_block bb;
1374 rtx insn;
1375 unsigned int i;
1376 bitmap_iterator bi;
1378 curr_bb = bb = bb_node->bb;
1379 ira_assert (bb != NULL);
1380 FOR_BB_INSNS_REVERSE (bb, insn)
1381 if (INSN_P (insn))
1382 create_insn_allocnos (PATTERN (insn), false);
1383 /* It might be a allocno living through from one subloop to
1384 another. */
1385 EXECUTE_IF_SET_IN_REG_SET (DF_LR_IN (bb), FIRST_PSEUDO_REGISTER, i, bi)
1386 if (ira_curr_regno_allocno_map[i] == NULL)
1387 ira_create_allocno (i, false, ira_curr_loop_tree_node);
1390 /* Create allocnos corresponding to pseudo-registers living on edge E
1391 (a loop entry or exit). Also mark the allocnos as living on the
1392 loop border. */
1393 static void
1394 create_loop_allocnos (edge e)
1396 unsigned int i;
1397 bitmap live_in_regs, border_allocnos;
1398 bitmap_iterator bi;
1399 ira_loop_tree_node_t parent;
1401 live_in_regs = DF_LR_IN (e->dest);
1402 border_allocnos = ira_curr_loop_tree_node->border_allocnos;
1403 EXECUTE_IF_SET_IN_REG_SET (DF_LR_OUT (e->src),
1404 FIRST_PSEUDO_REGISTER, i, bi)
1405 if (bitmap_bit_p (live_in_regs, i))
1407 if (ira_curr_regno_allocno_map[i] == NULL)
1409 /* The order of creations is important for right
1410 ira_regno_allocno_map. */
1411 if ((parent = ira_curr_loop_tree_node->parent) != NULL
1412 && parent->regno_allocno_map[i] == NULL)
1413 ira_create_allocno (i, false, parent);
1414 ira_create_allocno (i, false, ira_curr_loop_tree_node);
1416 bitmap_set_bit (border_allocnos,
1417 ALLOCNO_NUM (ira_curr_regno_allocno_map[i]));
1421 /* Create allocnos corresponding to pseudo-registers living in loop
1422 represented by the corresponding loop tree node LOOP_NODE. This
1423 function is called by ira_traverse_loop_tree. */
1424 static void
1425 create_loop_tree_node_allocnos (ira_loop_tree_node_t loop_node)
1427 if (loop_node->bb != NULL)
1428 create_bb_allocnos (loop_node);
1429 else if (loop_node != ira_loop_tree_root)
1431 int i;
1432 edge_iterator ei;
1433 edge e;
1434 VEC (edge, heap) *edges;
1436 FOR_EACH_EDGE (e, ei, loop_node->loop->header->preds)
1437 if (e->src != loop_node->loop->latch)
1438 create_loop_allocnos (e);
1440 edges = get_loop_exit_edges (loop_node->loop);
1441 for (i = 0; VEC_iterate (edge, edges, i, e); i++)
1442 create_loop_allocnos (e);
1443 VEC_free (edge, heap, edges);
1447 /* Propagate information about allocnos modified inside the loop given
1448 by its LOOP_TREE_NODE to its parent. */
1449 static void
1450 propagate_modified_regnos (ira_loop_tree_node_t loop_tree_node)
1452 if (loop_tree_node == ira_loop_tree_root)
1453 return;
1454 ira_assert (loop_tree_node->bb == NULL);
1455 bitmap_ior_into (loop_tree_node->parent->modified_regnos,
1456 loop_tree_node->modified_regnos);
1459 /* Propagate new info about allocno A (see comments about accumulated
1460 info in allocno definition) to the corresponding allocno on upper
1461 loop tree level. So allocnos on upper levels accumulate
1462 information about the corresponding allocnos in nested regions.
1463 The new info means allocno info finally calculated in this
1464 file. */
1465 static void
1466 propagate_allocno_info (void)
1468 int i;
1469 ira_allocno_t a, parent_a;
1470 ira_loop_tree_node_t parent;
1471 enum reg_class cover_class;
1473 if (flag_ira_algorithm != IRA_ALGORITHM_REGIONAL
1474 && flag_ira_algorithm != IRA_ALGORITHM_MIXED)
1475 return;
1476 for (i = max_reg_num () - 1; i >= FIRST_PSEUDO_REGISTER; i--)
1477 for (a = ira_regno_allocno_map[i];
1478 a != NULL;
1479 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
1480 if ((parent = ALLOCNO_LOOP_TREE_NODE (a)->parent) != NULL
1481 && (parent_a = parent->regno_allocno_map[i]) != NULL
1482 /* There are no caps yet at this point. So use
1483 border_allocnos to find allocnos for the propagation. */
1484 && bitmap_bit_p (ALLOCNO_LOOP_TREE_NODE (a)->border_allocnos,
1485 ALLOCNO_NUM (a)))
1487 ALLOCNO_NREFS (parent_a) += ALLOCNO_NREFS (a);
1488 ALLOCNO_FREQ (parent_a) += ALLOCNO_FREQ (a);
1489 ALLOCNO_CALL_FREQ (parent_a) += ALLOCNO_CALL_FREQ (a);
1490 #ifdef STACK_REGS
1491 if (ALLOCNO_TOTAL_NO_STACK_REG_P (a))
1492 ALLOCNO_TOTAL_NO_STACK_REG_P (parent_a) = true;
1493 #endif
1494 IOR_HARD_REG_SET (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (parent_a),
1495 ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a));
1496 ALLOCNO_CALLS_CROSSED_NUM (parent_a)
1497 += ALLOCNO_CALLS_CROSSED_NUM (a);
1498 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (parent_a)
1499 += ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a);
1500 cover_class = ALLOCNO_COVER_CLASS (a);
1501 ira_assert (cover_class == ALLOCNO_COVER_CLASS (parent_a));
1502 ira_allocate_and_accumulate_costs
1503 (&ALLOCNO_HARD_REG_COSTS (parent_a), cover_class,
1504 ALLOCNO_HARD_REG_COSTS (a));
1505 ira_allocate_and_accumulate_costs
1506 (&ALLOCNO_CONFLICT_HARD_REG_COSTS (parent_a),
1507 cover_class,
1508 ALLOCNO_CONFLICT_HARD_REG_COSTS (a));
1509 ALLOCNO_COVER_CLASS_COST (parent_a)
1510 += ALLOCNO_COVER_CLASS_COST (a);
1511 ALLOCNO_MEMORY_COST (parent_a) += ALLOCNO_MEMORY_COST (a);
1515 /* Create allocnos corresponding to pseudo-registers in the current
1516 function. Traverse the loop tree for this. */
1517 static void
1518 create_allocnos (void)
1520 /* We need to process BB first to correctly link allocnos by member
1521 next_regno_allocno. */
1522 ira_traverse_loop_tree (true, ira_loop_tree_root,
1523 create_loop_tree_node_allocnos, NULL);
1524 if (optimize)
1525 ira_traverse_loop_tree (false, ira_loop_tree_root, NULL,
1526 propagate_modified_regnos);
1531 /* The page contains function to remove some regions from a separate
1532 register allocation. We remove regions whose separate allocation
1533 will hardly improve the result. As a result we speed up regional
1534 register allocation. */
1536 /* Merge ranges R1 and R2 and returns the result. The function
1537 maintains the order of ranges and tries to minimize number of the
1538 result ranges. */
1539 static allocno_live_range_t
1540 merge_ranges (allocno_live_range_t r1, allocno_live_range_t r2)
1542 allocno_live_range_t first, last, temp;
1544 if (r1 == NULL)
1545 return r2;
1546 if (r2 == NULL)
1547 return r1;
1548 for (first = last = NULL; r1 != NULL && r2 != NULL;)
1550 if (r1->start < r2->start)
1552 temp = r1;
1553 r1 = r2;
1554 r2 = temp;
1556 if (r1->start <= r2->finish + 1)
1558 /* Intersected ranges: merge r1 and r2 into r1. */
1559 r1->start = r2->start;
1560 if (r1->finish < r2->finish)
1561 r1->finish = r2->finish;
1562 temp = r2;
1563 r2 = r2->next;
1564 ira_finish_allocno_live_range (temp);
1565 if (r2 == NULL)
1567 /* To try to merge with subsequent ranges in r1. */
1568 r2 = r1->next;
1569 r1->next = NULL;
1572 else
1574 /* Add r1 to the result. */
1575 if (first == NULL)
1576 first = last = r1;
1577 else
1579 last->next = r1;
1580 last = r1;
1582 r1 = r1->next;
1583 if (r1 == NULL)
1585 /* To try to merge with subsequent ranges in r2. */
1586 r1 = r2->next;
1587 r2->next = NULL;
1591 if (r1 != NULL)
1593 if (first == NULL)
1594 first = r1;
1595 else
1596 last->next = r1;
1597 ira_assert (r1->next == NULL);
1599 else if (r2 != NULL)
1601 if (first == NULL)
1602 first = r2;
1603 else
1604 last->next = r2;
1605 ira_assert (r2->next == NULL);
1607 else
1609 ira_assert (last->next == NULL);
1611 return first;
1614 /* The function changes allocno in range list given by R onto A. */
1615 static void
1616 change_allocno_in_range_list (allocno_live_range_t r, ira_allocno_t a)
1618 for (; r != NULL; r = r->next)
1619 r->allocno = a;
1622 /* Return TRUE if NODE represents a loop with low register
1623 pressure. */
1624 static bool
1625 low_pressure_loop_node_p (ira_loop_tree_node_t node)
1627 int i;
1628 enum reg_class cover_class;
1630 if (node->bb != NULL)
1631 return false;
1633 for (i = 0; i < ira_reg_class_cover_size; i++)
1635 cover_class = ira_reg_class_cover[i];
1636 if (node->reg_pressure[cover_class]
1637 > ira_available_class_regs[cover_class])
1638 return false;
1640 return true;
1643 /* Return TRUE if NODE represents a loop with should be removed from
1644 regional allocation. We remove a loop with low register pressure
1645 inside another loop with register pressure. In this case a
1646 separate allocation of the loop hardly helps (for irregular
1647 register file architecture it could help by choosing a better hard
1648 register in the loop but we prefer faster allocation even in this
1649 case). */
1650 static bool
1651 loop_node_to_be_removed_p (ira_loop_tree_node_t node)
1653 return (node->parent != NULL && low_pressure_loop_node_p (node->parent)
1654 && low_pressure_loop_node_p (node));
1657 /* Definition of vector of loop tree nodes. */
1658 DEF_VEC_P(ira_loop_tree_node_t);
1659 DEF_VEC_ALLOC_P(ira_loop_tree_node_t, heap);
1661 /* Vec containing references to all removed loop tree nodes. */
1662 static VEC(ira_loop_tree_node_t,heap) *removed_loop_vec;
1664 /* Vec containing references to all children of loop tree nodes. */
1665 static VEC(ira_loop_tree_node_t,heap) *children_vec;
1667 /* Remove subregions of NODE if their separate allocation will not
1668 improve the result. */
1669 static void
1670 remove_uneccesary_loop_nodes_from_loop_tree (ira_loop_tree_node_t node)
1672 unsigned int start;
1673 bool remove_p;
1674 ira_loop_tree_node_t subnode;
1676 remove_p = loop_node_to_be_removed_p (node);
1677 if (! remove_p)
1678 VEC_safe_push (ira_loop_tree_node_t, heap, children_vec, node);
1679 start = VEC_length (ira_loop_tree_node_t, children_vec);
1680 for (subnode = node->children; subnode != NULL; subnode = subnode->next)
1681 if (subnode->bb == NULL)
1682 remove_uneccesary_loop_nodes_from_loop_tree (subnode);
1683 else
1684 VEC_safe_push (ira_loop_tree_node_t, heap, children_vec, subnode);
1685 node->children = node->subloops = NULL;
1686 if (remove_p)
1688 VEC_safe_push (ira_loop_tree_node_t, heap, removed_loop_vec, node);
1689 return;
1691 while (VEC_length (ira_loop_tree_node_t, children_vec) > start)
1693 subnode = VEC_pop (ira_loop_tree_node_t, children_vec);
1694 subnode->parent = node;
1695 subnode->next = node->children;
1696 node->children = subnode;
1697 if (subnode->bb == NULL)
1699 subnode->subloop_next = node->subloops;
1700 node->subloops = subnode;
1705 /* Remove allocnos from loops removed from the allocation
1706 consideration. */
1707 static void
1708 remove_unnecessary_allocnos (void)
1710 int regno;
1711 bool merged_p;
1712 enum reg_class cover_class;
1713 ira_allocno_t a, prev_a, next_a, parent_a;
1714 ira_loop_tree_node_t a_node, parent;
1715 allocno_live_range_t r;
1717 merged_p = false;
1718 for (regno = max_reg_num () - 1; regno >= FIRST_PSEUDO_REGISTER; regno--)
1719 for (prev_a = NULL, a = ira_regno_allocno_map[regno];
1720 a != NULL;
1721 a = next_a)
1723 next_a = ALLOCNO_NEXT_REGNO_ALLOCNO (a);
1724 a_node = ALLOCNO_LOOP_TREE_NODE (a);
1725 if (! loop_node_to_be_removed_p (a_node))
1726 prev_a = a;
1727 else
1729 for (parent = a_node->parent;
1730 (parent_a = parent->regno_allocno_map[regno]) == NULL
1731 && loop_node_to_be_removed_p (parent);
1732 parent = parent->parent)
1734 if (parent_a == NULL)
1736 /* There are no allocnos with the same regno in upper
1737 region -- just move the allocno to the upper
1738 region. */
1739 prev_a = a;
1740 ALLOCNO_LOOP_TREE_NODE (a) = parent;
1741 parent->regno_allocno_map[regno] = a;
1742 bitmap_set_bit (parent->all_allocnos, ALLOCNO_NUM (a));
1744 else
1746 /* Remove the allocno and update info of allocno in
1747 the upper region. */
1748 if (prev_a == NULL)
1749 ira_regno_allocno_map[regno] = next_a;
1750 else
1751 ALLOCNO_NEXT_REGNO_ALLOCNO (prev_a) = next_a;
1752 r = ALLOCNO_LIVE_RANGES (a);
1753 change_allocno_in_range_list (r, parent_a);
1754 ALLOCNO_LIVE_RANGES (parent_a)
1755 = merge_ranges (r, ALLOCNO_LIVE_RANGES (parent_a));
1756 merged_p = true;
1757 ALLOCNO_LIVE_RANGES (a) = NULL;
1758 IOR_HARD_REG_SET (ALLOCNO_CONFLICT_HARD_REGS (parent_a),
1759 ALLOCNO_CONFLICT_HARD_REGS (a));
1760 #ifdef STACK_REGS
1761 if (ALLOCNO_NO_STACK_REG_P (a))
1762 ALLOCNO_NO_STACK_REG_P (parent_a) = true;
1763 #endif
1764 ALLOCNO_NREFS (parent_a) += ALLOCNO_NREFS (a);
1765 ALLOCNO_FREQ (parent_a) += ALLOCNO_FREQ (a);
1766 ALLOCNO_CALL_FREQ (parent_a) += ALLOCNO_CALL_FREQ (a);
1767 IOR_HARD_REG_SET
1768 (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (parent_a),
1769 ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a));
1770 ALLOCNO_CALLS_CROSSED_NUM (parent_a)
1771 += ALLOCNO_CALLS_CROSSED_NUM (a);
1772 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (parent_a)
1773 += ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a);
1774 #ifdef STACK_REGS
1775 if (ALLOCNO_TOTAL_NO_STACK_REG_P (a))
1776 ALLOCNO_TOTAL_NO_STACK_REG_P (parent_a) = true;
1777 #endif
1778 cover_class = ALLOCNO_COVER_CLASS (a);
1779 ira_assert (cover_class == ALLOCNO_COVER_CLASS (parent_a));
1780 ira_allocate_and_accumulate_costs
1781 (&ALLOCNO_HARD_REG_COSTS (parent_a), cover_class,
1782 ALLOCNO_HARD_REG_COSTS (a));
1783 ira_allocate_and_accumulate_costs
1784 (&ALLOCNO_CONFLICT_HARD_REG_COSTS (parent_a),
1785 cover_class,
1786 ALLOCNO_CONFLICT_HARD_REG_COSTS (a));
1787 ALLOCNO_COVER_CLASS_COST (parent_a)
1788 += ALLOCNO_COVER_CLASS_COST (a);
1789 ALLOCNO_MEMORY_COST (parent_a) += ALLOCNO_MEMORY_COST (a);
1790 finish_allocno (a);
1794 if (merged_p)
1795 ira_rebuild_start_finish_chains ();
1798 /* Remove loops from consideration. We remove loops for which a
1799 separate allocation will not improve the result. We have to do
1800 this after allocno creation and their costs and cover class
1801 evaluation because only after that the register pressure can be
1802 known and is calculated. */
1803 static void
1804 remove_unnecessary_regions (void)
1806 children_vec
1807 = VEC_alloc (ira_loop_tree_node_t, heap,
1808 last_basic_block + VEC_length (loop_p, ira_loops.larray));
1809 removed_loop_vec
1810 = VEC_alloc (ira_loop_tree_node_t, heap,
1811 last_basic_block + VEC_length (loop_p, ira_loops.larray));
1812 remove_uneccesary_loop_nodes_from_loop_tree (ira_loop_tree_root) ;
1813 VEC_free (ira_loop_tree_node_t, heap, children_vec);
1814 remove_unnecessary_allocnos ();
1815 while (VEC_length (ira_loop_tree_node_t, removed_loop_vec) > 0)
1816 finish_loop_tree_node (VEC_pop (ira_loop_tree_node_t, removed_loop_vec));
1817 VEC_free (ira_loop_tree_node_t, heap, removed_loop_vec);
1822 /* Set up minimal and maximal live range points for allocnos. */
1823 static void
1824 setup_min_max_allocno_live_range_point (void)
1826 int i;
1827 ira_allocno_t a, parent_a, cap;
1828 ira_allocno_iterator ai;
1829 allocno_live_range_t r;
1830 ira_loop_tree_node_t parent;
1832 FOR_EACH_ALLOCNO (a, ai)
1834 r = ALLOCNO_LIVE_RANGES (a);
1835 if (r == NULL)
1836 continue;
1837 ALLOCNO_MAX (a) = r->finish;
1838 for (; r->next != NULL; r = r->next)
1840 ALLOCNO_MIN (a) = r->start;
1842 for (i = max_reg_num () - 1; i >= FIRST_PSEUDO_REGISTER; i--)
1843 for (a = ira_regno_allocno_map[i];
1844 a != NULL;
1845 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
1847 if (ALLOCNO_MAX (a) < 0)
1848 continue;
1849 ira_assert (ALLOCNO_CAP_MEMBER (a) == NULL);
1850 /* Accumulation of range info. */
1851 if (ALLOCNO_CAP (a) != NULL)
1853 for (cap = ALLOCNO_CAP (a); cap != NULL; cap = ALLOCNO_CAP (cap))
1855 if (ALLOCNO_MAX (cap) < ALLOCNO_MAX (a))
1856 ALLOCNO_MAX (cap) = ALLOCNO_MAX (a);
1857 if (ALLOCNO_MIN (cap) > ALLOCNO_MIN (a))
1858 ALLOCNO_MIN (cap) = ALLOCNO_MIN (a);
1860 continue;
1862 if ((parent = ALLOCNO_LOOP_TREE_NODE (a)->parent) == NULL)
1863 continue;
1864 parent_a = parent->regno_allocno_map[i];
1865 if (ALLOCNO_MAX (parent_a) < ALLOCNO_MAX (a))
1866 ALLOCNO_MAX (parent_a) = ALLOCNO_MAX (a);
1867 if (ALLOCNO_MIN (parent_a) > ALLOCNO_MIN (a))
1868 ALLOCNO_MIN (parent_a) = ALLOCNO_MIN (a);
1870 #ifdef ENABLE_IRA_CHECKING
1871 FOR_EACH_ALLOCNO (a, ai)
1873 if ((0 <= ALLOCNO_MIN (a) && ALLOCNO_MIN (a) <= ira_max_point)
1874 && (0 <= ALLOCNO_MAX (a) && ALLOCNO_MAX (a) <= ira_max_point))
1875 continue;
1876 gcc_unreachable ();
1878 #endif
1881 /* Sort allocnos according to their live ranges. Allocnos with
1882 smaller cover class are put first. Allocnos with the same cove
1883 class are ordered according their start (min). Allocnos with the
1884 same start are ordered according their finish (max). */
1885 static int
1886 allocno_range_compare_func (const void *v1p, const void *v2p)
1888 int diff;
1889 ira_allocno_t a1 = *(const ira_allocno_t *) v1p;
1890 ira_allocno_t a2 = *(const ira_allocno_t *) v2p;
1892 if ((diff = ALLOCNO_COVER_CLASS (a1) - ALLOCNO_COVER_CLASS (a2)) != 0)
1893 return diff;
1894 if ((diff = ALLOCNO_MIN (a1) - ALLOCNO_MIN (a2)) != 0)
1895 return diff;
1896 if ((diff = ALLOCNO_MAX (a1) - ALLOCNO_MAX (a2)) != 0)
1897 return diff;
1898 return ALLOCNO_NUM (a1) - ALLOCNO_NUM (a2);
1901 /* Sort ira_conflict_id_allocno_map and set up conflict id of
1902 allocnos. */
1903 static void
1904 sort_conflict_id_allocno_map (void)
1906 int i, num;
1907 ira_allocno_t a;
1908 ira_allocno_iterator ai;
1910 num = 0;
1911 FOR_EACH_ALLOCNO (a, ai)
1912 ira_conflict_id_allocno_map[num++] = a;
1913 qsort (ira_conflict_id_allocno_map, num, sizeof (ira_allocno_t),
1914 allocno_range_compare_func);
1915 for (i = 0; i < num; i++)
1916 if ((a = ira_conflict_id_allocno_map[i]) != NULL)
1917 ALLOCNO_CONFLICT_ID (a) = i;
1918 for (i = num; i < ira_allocnos_num; i++)
1919 ira_conflict_id_allocno_map[i] = NULL;
1922 /* Set up minimal and maximal conflict ids of allocnos with which
1923 given allocno can conflict. */
1924 static void
1925 setup_min_max_conflict_allocno_ids (void)
1927 enum reg_class cover_class;
1928 int i, j, min, max, start, finish, first_not_finished, filled_area_start;
1929 int *live_range_min, *last_lived;
1930 ira_allocno_t a;
1932 live_range_min = (int *) ira_allocate (sizeof (int) * ira_allocnos_num);
1933 cover_class = -1;
1934 first_not_finished = -1;
1935 for (i = 0; i < ira_allocnos_num; i++)
1937 a = ira_conflict_id_allocno_map[i];
1938 if (a == NULL)
1939 continue;
1940 if (cover_class != ALLOCNO_COVER_CLASS (a))
1942 cover_class = ALLOCNO_COVER_CLASS (a);
1943 min = i;
1944 first_not_finished = i;
1946 else
1948 start = ALLOCNO_MIN (a);
1949 /* If we skip an allocno, the allocno with smaller ids will
1950 be also skipped because of the secondary sorting the
1951 range finishes (see function
1952 allocno_range_compare_func). */
1953 while (first_not_finished < i
1954 && start > ALLOCNO_MAX (ira_conflict_id_allocno_map
1955 [first_not_finished]))
1956 first_not_finished++;
1957 min = first_not_finished;
1959 if (min == i)
1960 /* We could increase min further in this case but it is good
1961 enough. */
1962 min++;
1963 live_range_min[i] = ALLOCNO_MIN (a);
1964 ALLOCNO_MIN (a) = min;
1966 last_lived = (int *) ira_allocate (sizeof (int) * ira_max_point);
1967 cover_class = -1;
1968 filled_area_start = -1;
1969 for (i = ira_allocnos_num - 1; i >= 0; i--)
1971 a = ira_conflict_id_allocno_map[i];
1972 if (a == NULL)
1973 continue;
1974 if (cover_class != ALLOCNO_COVER_CLASS (a))
1976 cover_class = ALLOCNO_COVER_CLASS (a);
1977 for (j = 0; j < ira_max_point; j++)
1978 last_lived[j] = -1;
1979 filled_area_start = ira_max_point;
1981 min = live_range_min[i];
1982 finish = ALLOCNO_MAX (a);
1983 max = last_lived[finish];
1984 if (max < 0)
1985 /* We could decrease max further in this case but it is good
1986 enough. */
1987 max = ALLOCNO_CONFLICT_ID (a) - 1;
1988 ALLOCNO_MAX (a) = max;
1989 /* In filling, we can go further A range finish to recognize
1990 intersection quickly because if the finish of subsequently
1991 processed allocno (it has smaller conflict id) range is
1992 further A range finish than they are definitely intersected
1993 (the reason for this is the allocnos with bigger conflict id
1994 have their range starts not smaller than allocnos with
1995 smaller ids. */
1996 for (j = min; j < filled_area_start; j++)
1997 last_lived[j] = i;
1998 filled_area_start = min;
2000 ira_free (last_lived);
2001 ira_free (live_range_min);
2006 static void
2007 create_caps (void)
2009 ira_allocno_t a;
2010 ira_allocno_iterator ai;
2011 ira_loop_tree_node_t loop_tree_node;
2013 FOR_EACH_ALLOCNO (a, ai)
2015 if (ALLOCNO_LOOP_TREE_NODE (a) == ira_loop_tree_root)
2016 continue;
2017 if (ALLOCNO_CAP_MEMBER (a) != NULL)
2018 create_cap_allocno (a);
2019 else if (ALLOCNO_CAP (a) == NULL)
2021 loop_tree_node = ALLOCNO_LOOP_TREE_NODE (a);
2022 if (!bitmap_bit_p (loop_tree_node->border_allocnos, ALLOCNO_NUM (a)))
2023 create_cap_allocno (a);
2030 /* The page contains code transforming more one region internal
2031 representation (IR) to one region IR which is necessary for reload.
2032 This transformation is called IR flattening. We might just rebuild
2033 the IR for one region but we don't do it because it takes a lot of
2034 time. */
2036 /* Map: regno -> allocnos which will finally represent the regno for
2037 IR with one region. */
2038 static ira_allocno_t *regno_top_level_allocno_map;
2040 /* Process all allocnos originated from pseudo REGNO and copy live
2041 ranges, hard reg conflicts, and allocno stack reg attributes from
2042 low level allocnos to final allocnos which are destinations of
2043 removed stores at a loop exit. Return true if we copied live
2044 ranges. */
2045 static bool
2046 copy_info_to_removed_store_destinations (int regno)
2048 ira_allocno_t a, parent_a;
2049 ira_loop_tree_node_t parent;
2050 allocno_live_range_t r;
2051 bool merged_p;
2053 merged_p = false;
2054 for (a = ira_regno_allocno_map[regno];
2055 a != NULL;
2056 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
2058 if (a != regno_top_level_allocno_map[REGNO (ALLOCNO_REG (a))])
2059 /* This allocno will be removed. */
2060 continue;
2061 /* Caps will be removed. */
2062 ira_assert (ALLOCNO_CAP_MEMBER (a) == NULL);
2063 for (parent = ALLOCNO_LOOP_TREE_NODE (a)->parent;
2064 parent != NULL;
2065 parent = parent->parent)
2066 if ((parent_a = parent->regno_allocno_map[regno]) == NULL
2067 || (parent_a == regno_top_level_allocno_map[REGNO (ALLOCNO_REG
2068 (parent_a))]
2069 && ALLOCNO_MEM_OPTIMIZED_DEST_P (parent_a)))
2070 break;
2071 if (parent == NULL || parent_a == NULL)
2072 continue;
2073 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
2075 fprintf
2076 (ira_dump_file,
2077 " Coping ranges of a%dr%d to a%dr%d: ",
2078 ALLOCNO_NUM (a), REGNO (ALLOCNO_REG (a)),
2079 ALLOCNO_NUM (parent_a), REGNO (ALLOCNO_REG (parent_a)));
2080 ira_print_live_range_list (ira_dump_file,
2081 ALLOCNO_LIVE_RANGES (a));
2083 r = copy_allocno_live_range_list (ALLOCNO_LIVE_RANGES (a));
2084 change_allocno_in_range_list (r, parent_a);
2085 ALLOCNO_LIVE_RANGES (parent_a)
2086 = merge_ranges (r, ALLOCNO_LIVE_RANGES (parent_a));
2087 IOR_HARD_REG_SET (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (parent_a),
2088 ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a));
2089 #ifdef STACK_REGS
2090 if (ALLOCNO_TOTAL_NO_STACK_REG_P (a))
2091 ALLOCNO_TOTAL_NO_STACK_REG_P (parent_a) = true;
2092 #endif
2093 merged_p = true;
2095 return merged_p;
2098 /* Flatten the IR. In other words, this function transforms IR as if
2099 it were built with one region (without loops). We could make it
2100 much simpler by rebuilding IR with one region, but unfortunately it
2101 takes a lot of time. MAX_REGNO_BEFORE_EMIT and
2102 IRA_MAX_POINT_BEFORE_EMIT are correspondingly MAX_REG_NUM () and
2103 IRA_MAX_POINT before emitting insns on the loop borders. */
2104 void
2105 ira_flattening (int max_regno_before_emit, int ira_max_point_before_emit)
2107 int i, j, num;
2108 bool stop_p, keep_p;
2109 int hard_regs_num;
2110 bool new_pseudos_p, merged_p, mem_dest_p;
2111 unsigned int n;
2112 enum reg_class cover_class;
2113 ira_allocno_t a, parent_a, first, second, node_first, node_second;
2114 ira_copy_t cp;
2115 ira_loop_tree_node_t parent, node;
2116 allocno_live_range_t r;
2117 ira_allocno_iterator ai;
2118 ira_copy_iterator ci;
2119 sparseset allocnos_live;
2121 regno_top_level_allocno_map
2122 = (ira_allocno_t *) ira_allocate (max_reg_num () * sizeof (ira_allocno_t));
2123 memset (regno_top_level_allocno_map, 0,
2124 max_reg_num () * sizeof (ira_allocno_t));
2125 new_pseudos_p = merged_p = false;
2126 FOR_EACH_ALLOCNO (a, ai)
2128 if (ALLOCNO_CAP_MEMBER (a) != NULL)
2129 /* Caps are not in the regno allocno maps and they are never
2130 will be transformed into allocnos existing after IR
2131 flattening. */
2132 continue;
2133 COPY_HARD_REG_SET (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a),
2134 ALLOCNO_CONFLICT_HARD_REGS (a));
2135 #ifdef STACK_REGS
2136 ALLOCNO_TOTAL_NO_STACK_REG_P (a) = ALLOCNO_NO_STACK_REG_P (a);
2137 #endif
2139 /* Fix final allocno attributes. */
2140 for (i = max_regno_before_emit - 1; i >= FIRST_PSEUDO_REGISTER; i--)
2142 mem_dest_p = false;
2143 for (a = ira_regno_allocno_map[i];
2144 a != NULL;
2145 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
2147 ira_assert (ALLOCNO_CAP_MEMBER (a) == NULL);
2148 if (ALLOCNO_SOMEWHERE_RENAMED_P (a))
2149 new_pseudos_p = true;
2150 if (ALLOCNO_CAP (a) != NULL
2151 || (parent = ALLOCNO_LOOP_TREE_NODE (a)->parent) == NULL
2152 || ((parent_a = parent->regno_allocno_map[ALLOCNO_REGNO (a)])
2153 == NULL))
2155 ALLOCNO_COPIES (a) = NULL;
2156 regno_top_level_allocno_map[REGNO (ALLOCNO_REG (a))] = a;
2157 continue;
2159 ira_assert (ALLOCNO_CAP_MEMBER (parent_a) == NULL);
2161 if (ALLOCNO_MEM_OPTIMIZED_DEST (a) != NULL)
2162 mem_dest_p = true;
2163 if (REGNO (ALLOCNO_REG (a)) == REGNO (ALLOCNO_REG (parent_a)))
2165 IOR_HARD_REG_SET (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (parent_a),
2166 ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a));
2167 #ifdef STACK_REGS
2168 if (ALLOCNO_TOTAL_NO_STACK_REG_P (a))
2169 ALLOCNO_TOTAL_NO_STACK_REG_P (parent_a) = true;
2170 #endif
2171 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
2173 fprintf (ira_dump_file,
2174 " Moving ranges of a%dr%d to a%dr%d: ",
2175 ALLOCNO_NUM (a), REGNO (ALLOCNO_REG (a)),
2176 ALLOCNO_NUM (parent_a),
2177 REGNO (ALLOCNO_REG (parent_a)));
2178 ira_print_live_range_list (ira_dump_file,
2179 ALLOCNO_LIVE_RANGES (a));
2181 change_allocno_in_range_list (ALLOCNO_LIVE_RANGES (a), parent_a);
2182 ALLOCNO_LIVE_RANGES (parent_a)
2183 = merge_ranges (ALLOCNO_LIVE_RANGES (a),
2184 ALLOCNO_LIVE_RANGES (parent_a));
2185 merged_p = true;
2186 ALLOCNO_LIVE_RANGES (a) = NULL;
2187 ALLOCNO_MEM_OPTIMIZED_DEST_P (parent_a)
2188 = (ALLOCNO_MEM_OPTIMIZED_DEST_P (parent_a)
2189 || ALLOCNO_MEM_OPTIMIZED_DEST_P (a));
2190 continue;
2192 new_pseudos_p = true;
2193 first = ALLOCNO_MEM_OPTIMIZED_DEST (a) == NULL ? NULL : a;
2194 stop_p = false;
2195 for (;;)
2197 if (first == NULL
2198 && ALLOCNO_MEM_OPTIMIZED_DEST (parent_a) != NULL)
2199 first = parent_a;
2200 ALLOCNO_NREFS (parent_a) -= ALLOCNO_NREFS (a);
2201 ALLOCNO_FREQ (parent_a) -= ALLOCNO_FREQ (a);
2202 if (first != NULL
2203 && ALLOCNO_MEM_OPTIMIZED_DEST (first) == parent_a)
2204 stop_p = true;
2205 else if (!stop_p)
2207 ALLOCNO_CALL_FREQ (parent_a) -= ALLOCNO_CALL_FREQ (a);
2208 ALLOCNO_CALLS_CROSSED_NUM (parent_a)
2209 -= ALLOCNO_CALLS_CROSSED_NUM (a);
2210 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (parent_a)
2211 -= ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a);
2213 ira_assert (ALLOCNO_CALLS_CROSSED_NUM (parent_a) >= 0
2214 && ALLOCNO_NREFS (parent_a) >= 0
2215 && ALLOCNO_FREQ (parent_a) >= 0);
2216 cover_class = ALLOCNO_COVER_CLASS (parent_a);
2217 hard_regs_num = ira_class_hard_regs_num[cover_class];
2218 if (ALLOCNO_HARD_REG_COSTS (a) != NULL
2219 && ALLOCNO_HARD_REG_COSTS (parent_a) != NULL)
2220 for (j = 0; j < hard_regs_num; j++)
2221 ALLOCNO_HARD_REG_COSTS (parent_a)[j]
2222 -= ALLOCNO_HARD_REG_COSTS (a)[j];
2223 if (ALLOCNO_CONFLICT_HARD_REG_COSTS (a) != NULL
2224 && ALLOCNO_CONFLICT_HARD_REG_COSTS (parent_a) != NULL)
2225 for (j = 0; j < hard_regs_num; j++)
2226 ALLOCNO_CONFLICT_HARD_REG_COSTS (parent_a)[j]
2227 -= ALLOCNO_CONFLICT_HARD_REG_COSTS (a)[j];
2228 ALLOCNO_COVER_CLASS_COST (parent_a)
2229 -= ALLOCNO_COVER_CLASS_COST (a);
2230 ALLOCNO_MEMORY_COST (parent_a) -= ALLOCNO_MEMORY_COST (a);
2231 if (ALLOCNO_CAP (parent_a) != NULL
2232 || (parent
2233 = ALLOCNO_LOOP_TREE_NODE (parent_a)->parent) == NULL
2234 || (parent_a = (parent->regno_allocno_map
2235 [ALLOCNO_REGNO (parent_a)])) == NULL)
2236 break;
2238 ALLOCNO_COPIES (a) = NULL;
2239 regno_top_level_allocno_map[REGNO (ALLOCNO_REG (a))] = a;
2241 if (mem_dest_p && copy_info_to_removed_store_destinations (i))
2242 merged_p = true;
2244 ira_assert (new_pseudos_p || ira_max_point_before_emit == ira_max_point);
2245 if (merged_p || ira_max_point_before_emit != ira_max_point)
2246 ira_rebuild_start_finish_chains ();
2247 if (new_pseudos_p)
2249 /* Rebuild conflicts. */
2250 FOR_EACH_ALLOCNO (a, ai)
2252 if (a != regno_top_level_allocno_map[REGNO (ALLOCNO_REG (a))]
2253 || ALLOCNO_CAP_MEMBER (a) != NULL)
2254 continue;
2255 for (r = ALLOCNO_LIVE_RANGES (a); r != NULL; r = r->next)
2256 ira_assert (r->allocno == a);
2257 clear_allocno_conflicts (a);
2259 allocnos_live = sparseset_alloc (ira_allocnos_num);
2260 for (i = 0; i < ira_max_point; i++)
2262 for (r = ira_start_point_ranges[i]; r != NULL; r = r->start_next)
2264 a = r->allocno;
2265 if (a != regno_top_level_allocno_map[REGNO (ALLOCNO_REG (a))]
2266 || ALLOCNO_CAP_MEMBER (a) != NULL)
2267 continue;
2268 num = ALLOCNO_NUM (a);
2269 cover_class = ALLOCNO_COVER_CLASS (a);
2270 sparseset_set_bit (allocnos_live, num);
2271 EXECUTE_IF_SET_IN_SPARSESET (allocnos_live, n)
2273 ira_allocno_t live_a = ira_allocnos[n];
2275 if (cover_class == ALLOCNO_COVER_CLASS (live_a)
2276 /* Don't set up conflict for the allocno with itself. */
2277 && num != (int) n)
2278 ira_add_allocno_conflict (a, live_a);
2282 for (r = ira_finish_point_ranges[i]; r != NULL; r = r->finish_next)
2283 sparseset_clear_bit (allocnos_live, ALLOCNO_NUM (r->allocno));
2285 sparseset_free (allocnos_live);
2286 compress_conflict_vecs ();
2288 /* Mark some copies for removing and change allocnos in the rest
2289 copies. */
2290 FOR_EACH_COPY (cp, ci)
2292 if (ALLOCNO_CAP_MEMBER (cp->first) != NULL
2293 || ALLOCNO_CAP_MEMBER (cp->second) != NULL)
2295 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
2296 fprintf
2297 (ira_dump_file, " Remove cp%d:%c%dr%d-%c%dr%d\n",
2298 cp->num, ALLOCNO_CAP_MEMBER (cp->first) != NULL ? 'c' : 'a',
2299 ALLOCNO_NUM (cp->first), REGNO (ALLOCNO_REG (cp->first)),
2300 ALLOCNO_CAP_MEMBER (cp->second) != NULL ? 'c' : 'a',
2301 ALLOCNO_NUM (cp->second), REGNO (ALLOCNO_REG (cp->second)));
2302 cp->loop_tree_node = NULL;
2303 continue;
2305 first = regno_top_level_allocno_map[REGNO (ALLOCNO_REG (cp->first))];
2306 second = regno_top_level_allocno_map[REGNO (ALLOCNO_REG (cp->second))];
2307 node = cp->loop_tree_node;
2308 if (node == NULL)
2309 keep_p = true; /* It copy generated in ira-emit.c. */
2310 else
2312 /* Check that the copy was not propagated from level on
2313 which we will have different pseudos. */
2314 node_first = node->regno_allocno_map[ALLOCNO_REGNO (cp->first)];
2315 node_second = node->regno_allocno_map[ALLOCNO_REGNO (cp->second)];
2316 keep_p = ((REGNO (ALLOCNO_REG (first))
2317 == REGNO (ALLOCNO_REG (node_first)))
2318 && (REGNO (ALLOCNO_REG (second))
2319 == REGNO (ALLOCNO_REG (node_second))));
2321 if (keep_p)
2323 cp->loop_tree_node = ira_loop_tree_root;
2324 cp->first = first;
2325 cp->second = second;
2327 else
2329 cp->loop_tree_node = NULL;
2330 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
2331 fprintf (ira_dump_file, " Remove cp%d:a%dr%d-a%dr%d\n",
2332 cp->num, ALLOCNO_NUM (cp->first),
2333 REGNO (ALLOCNO_REG (cp->first)), ALLOCNO_NUM (cp->second),
2334 REGNO (ALLOCNO_REG (cp->second)));
2337 /* Remove unnecessary allocnos on lower levels of the loop tree. */
2338 FOR_EACH_ALLOCNO (a, ai)
2340 if (a != regno_top_level_allocno_map[REGNO (ALLOCNO_REG (a))]
2341 || ALLOCNO_CAP_MEMBER (a) != NULL)
2343 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
2344 fprintf (ira_dump_file, " Remove a%dr%d\n",
2345 ALLOCNO_NUM (a), REGNO (ALLOCNO_REG (a)));
2346 finish_allocno (a);
2347 continue;
2349 ALLOCNO_LOOP_TREE_NODE (a) = ira_loop_tree_root;
2350 ALLOCNO_REGNO (a) = REGNO (ALLOCNO_REG (a));
2351 ALLOCNO_CAP (a) = NULL;
2352 /* Restore updated costs for assignments from reload. */
2353 ALLOCNO_UPDATED_MEMORY_COST (a) = ALLOCNO_MEMORY_COST (a);
2354 ALLOCNO_UPDATED_COVER_CLASS_COST (a) = ALLOCNO_COVER_CLASS_COST (a);
2355 if (! ALLOCNO_ASSIGNED_P (a))
2356 ira_free_allocno_updated_costs (a);
2357 ira_assert (ALLOCNO_UPDATED_HARD_REG_COSTS (a) == NULL);
2358 ira_assert (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) == NULL);
2360 /* Remove unnecessary copies. */
2361 FOR_EACH_COPY (cp, ci)
2363 if (cp->loop_tree_node == NULL)
2365 ira_copies[cp->num] = NULL;
2366 finish_copy (cp);
2367 continue;
2369 ira_assert
2370 (ALLOCNO_LOOP_TREE_NODE (cp->first) == ira_loop_tree_root
2371 && ALLOCNO_LOOP_TREE_NODE (cp->second) == ira_loop_tree_root);
2372 ira_add_allocno_copy_to_list (cp);
2373 ira_swap_allocno_copy_ends_if_necessary (cp);
2375 rebuild_regno_allocno_maps ();
2376 if (ira_max_point != ira_max_point_before_emit)
2377 ira_compress_allocno_live_ranges ();
2378 ira_free (regno_top_level_allocno_map);
2383 #ifdef ENABLE_IRA_CHECKING
2384 /* Check creation of all allocnos. Allocnos on lower levels should
2385 have allocnos or caps on all upper levels. */
2386 static void
2387 check_allocno_creation (void)
2389 ira_allocno_t a;
2390 ira_allocno_iterator ai;
2391 ira_loop_tree_node_t loop_tree_node;
2393 FOR_EACH_ALLOCNO (a, ai)
2395 loop_tree_node = ALLOCNO_LOOP_TREE_NODE (a);
2396 ira_assert (bitmap_bit_p (loop_tree_node->all_allocnos,
2397 ALLOCNO_NUM (a)));
2398 if (loop_tree_node == ira_loop_tree_root)
2399 continue;
2400 if (ALLOCNO_CAP_MEMBER (a) != NULL)
2401 ira_assert (ALLOCNO_CAP (a) != NULL);
2402 else if (ALLOCNO_CAP (a) == NULL)
2403 ira_assert (loop_tree_node->parent
2404 ->regno_allocno_map[ALLOCNO_REGNO (a)] != NULL
2405 && bitmap_bit_p (loop_tree_node->border_allocnos,
2406 ALLOCNO_NUM (a)));
2409 #endif
2411 /* Create a internal representation (IR) for IRA (allocnos, copies,
2412 loop tree nodes). If LOOPS_P is FALSE the nodes corresponding to
2413 the loops (except the root which corresponds the all function) and
2414 correspondingly allocnos for the loops will be not created. Such
2415 parameter value is used for Chaitin-Briggs coloring. The function
2416 returns TRUE if we generate loop structure (besides nodes
2417 representing all function and the basic blocks) for regional
2418 allocation. A true return means that we really need to flatten IR
2419 before the reload. */
2420 bool
2421 ira_build (bool loops_p)
2423 df_analyze ();
2425 initiate_cost_vectors ();
2426 initiate_allocnos ();
2427 initiate_copies ();
2428 create_loop_tree_nodes (loops_p);
2429 form_loop_tree ();
2430 create_allocnos ();
2431 ira_costs ();
2432 ira_create_allocno_live_ranges ();
2433 remove_unnecessary_regions ();
2434 ira_compress_allocno_live_ranges ();
2435 loops_p = more_one_region_p ();
2436 if (loops_p)
2438 propagate_allocno_info ();
2439 create_caps ();
2441 ira_tune_allocno_costs_and_cover_classes ();
2442 #ifdef ENABLE_IRA_CHECKING
2443 check_allocno_creation ();
2444 #endif
2445 setup_min_max_allocno_live_range_point ();
2446 sort_conflict_id_allocno_map ();
2447 setup_min_max_conflict_allocno_ids ();
2448 ira_build_conflicts ();
2449 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
2450 print_copies (ira_dump_file);
2451 if (internal_flag_ira_verbose > 0 && ira_dump_file != NULL)
2453 int n, nr;
2454 ira_allocno_t a;
2455 allocno_live_range_t r;
2456 ira_allocno_iterator ai;
2458 n = 0;
2459 FOR_EACH_ALLOCNO (a, ai)
2460 n += ALLOCNO_CONFLICT_ALLOCNOS_NUM (a);
2461 nr = 0;
2462 FOR_EACH_ALLOCNO (a, ai)
2463 for (r = ALLOCNO_LIVE_RANGES (a); r != NULL; r = r->next)
2464 nr++;
2465 fprintf (ira_dump_file, " regions=%d, blocks=%d, points=%d\n",
2466 VEC_length (loop_p, ira_loops.larray), n_basic_blocks,
2467 ira_max_point);
2468 fprintf (ira_dump_file,
2469 " allocnos=%d, copies=%d, conflicts=%d, ranges=%d\n",
2470 ira_allocnos_num, ira_copies_num, n, nr);
2472 return loops_p;
2475 /* Release the data created by function ira_build. */
2476 void
2477 ira_destroy (void)
2479 finish_loop_tree_nodes ();
2480 finish_copies ();
2481 finish_allocnos ();
2482 finish_cost_vectors ();
2483 ira_finish_allocno_live_ranges ();