aix.h (FP_SAVE_INLINE, [...]): Delete.
[official-gcc.git] / gcc / ira-build.c
blob2edab52b1e5ca7992cf57e25e42b3d7de8b7e137
1 /* Building internal representation for IRA.
2 Copyright (C) 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Vladimir Makarov <vmakarov@redhat.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "target.h"
29 #include "regs.h"
30 #include "flags.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
33 #include "insn-config.h"
34 #include "recog.h"
35 #include "diagnostic-core.h"
36 #include "params.h"
37 #include "df.h"
38 #include "output.h"
39 #include "reload.h"
40 #include "sparseset.h"
41 #include "ira-int.h"
42 #include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
44 static ira_copy_t find_allocno_copy (ira_allocno_t, ira_allocno_t, rtx,
45 ira_loop_tree_node_t);
47 /* The root of the loop tree corresponding to the all function. */
48 ira_loop_tree_node_t ira_loop_tree_root;
50 /* Height of the loop tree. */
51 int ira_loop_tree_height;
53 /* All nodes representing basic blocks are referred through the
54 following array. We can not use basic block member `aux' for this
55 because it is used for insertion of insns on edges. */
56 ira_loop_tree_node_t ira_bb_nodes;
58 /* All nodes representing loops are referred through the following
59 array. */
60 ira_loop_tree_node_t ira_loop_nodes;
62 /* Map regno -> allocnos with given regno (see comments for
63 allocno member `next_regno_allocno'). */
64 ira_allocno_t *ira_regno_allocno_map;
66 /* Array of references to all allocnos. The order number of the
67 allocno corresponds to the index in the array. Removed allocnos
68 have NULL element value. */
69 ira_allocno_t *ira_allocnos;
71 /* Sizes of the previous array. */
72 int ira_allocnos_num;
74 /* Count of conflict record structures we've created, used when creating
75 a new conflict id. */
76 int ira_objects_num;
78 /* Map a conflict id to its conflict record. */
79 ira_object_t *ira_object_id_map;
81 /* Array of references to all copies. The order number of the copy
82 corresponds to the index in the array. Removed copies have NULL
83 element value. */
84 ira_copy_t *ira_copies;
86 /* Size of the previous array. */
87 int ira_copies_num;
91 /* LAST_BASIC_BLOCK before generating additional insns because of live
92 range splitting. Emitting insns on a critical edge creates a new
93 basic block. */
94 static int last_basic_block_before_change;
96 /* Initialize some members in loop tree node NODE. Use LOOP_NUM for
97 the member loop_num. */
98 static void
99 init_loop_tree_node (struct ira_loop_tree_node *node, int loop_num)
101 int max_regno = max_reg_num ();
103 node->regno_allocno_map
104 = (ira_allocno_t *) ira_allocate (sizeof (ira_allocno_t) * max_regno);
105 memset (node->regno_allocno_map, 0, sizeof (ira_allocno_t) * max_regno);
106 memset (node->reg_pressure, 0, sizeof (node->reg_pressure));
107 node->all_allocnos = ira_allocate_bitmap ();
108 node->modified_regnos = ira_allocate_bitmap ();
109 node->border_allocnos = ira_allocate_bitmap ();
110 node->local_copies = ira_allocate_bitmap ();
111 node->loop_num = loop_num;
112 node->children = NULL;
113 node->subloops = NULL;
117 /* The following function allocates the loop tree nodes. If
118 CURRENT_LOOPS is NULL, the nodes corresponding to the loops (except
119 the root which corresponds the all function) will be not allocated
120 but nodes will still be allocated for basic blocks. */
121 static void
122 create_loop_tree_nodes (void)
124 unsigned int i, j;
125 bool skip_p;
126 edge_iterator ei;
127 edge e;
128 VEC (edge, heap) *edges;
129 loop_p loop;
131 ira_bb_nodes
132 = ((struct ira_loop_tree_node *)
133 ira_allocate (sizeof (struct ira_loop_tree_node) * last_basic_block));
134 last_basic_block_before_change = last_basic_block;
135 for (i = 0; i < (unsigned int) last_basic_block; i++)
137 ira_bb_nodes[i].regno_allocno_map = NULL;
138 memset (ira_bb_nodes[i].reg_pressure, 0,
139 sizeof (ira_bb_nodes[i].reg_pressure));
140 ira_bb_nodes[i].all_allocnos = NULL;
141 ira_bb_nodes[i].modified_regnos = NULL;
142 ira_bb_nodes[i].border_allocnos = NULL;
143 ira_bb_nodes[i].local_copies = NULL;
145 if (current_loops == NULL)
147 ira_loop_nodes = ((struct ira_loop_tree_node *)
148 ira_allocate (sizeof (struct ira_loop_tree_node)));
149 init_loop_tree_node (ira_loop_nodes, 0);
150 return;
152 ira_loop_nodes = ((struct ira_loop_tree_node *)
153 ira_allocate (sizeof (struct ira_loop_tree_node)
154 * VEC_length (loop_p, ira_loops.larray)));
155 FOR_EACH_VEC_ELT (loop_p, ira_loops.larray, i, loop)
157 if (loop != ira_loops.tree_root)
159 ira_loop_nodes[i].regno_allocno_map = NULL;
160 skip_p = false;
161 FOR_EACH_EDGE (e, ei, loop->header->preds)
162 if (e->src != loop->latch
163 && (e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
165 skip_p = true;
166 break;
168 if (skip_p)
169 continue;
170 edges = get_loop_exit_edges (loop);
171 FOR_EACH_VEC_ELT (edge, edges, j, e)
172 if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
174 skip_p = true;
175 break;
177 VEC_free (edge, heap, edges);
178 if (skip_p)
179 continue;
181 init_loop_tree_node (&ira_loop_nodes[i], loop->num);
185 /* The function returns TRUE if there are more one allocation
186 region. */
187 static bool
188 more_one_region_p (void)
190 unsigned int i;
191 loop_p loop;
193 if (current_loops != NULL)
194 FOR_EACH_VEC_ELT (loop_p, ira_loops.larray, i, loop)
195 if (ira_loop_nodes[i].regno_allocno_map != NULL
196 && ira_loop_tree_root != &ira_loop_nodes[i])
197 return true;
198 return false;
201 /* Free the loop tree node of a loop. */
202 static void
203 finish_loop_tree_node (ira_loop_tree_node_t loop)
205 if (loop->regno_allocno_map != NULL)
207 ira_assert (loop->bb == NULL);
208 ira_free_bitmap (loop->local_copies);
209 ira_free_bitmap (loop->border_allocnos);
210 ira_free_bitmap (loop->modified_regnos);
211 ira_free_bitmap (loop->all_allocnos);
212 ira_free (loop->regno_allocno_map);
213 loop->regno_allocno_map = NULL;
217 /* Free the loop tree nodes. */
218 static void
219 finish_loop_tree_nodes (void)
221 unsigned int i;
222 loop_p loop;
224 if (current_loops == NULL)
225 finish_loop_tree_node (&ira_loop_nodes[0]);
226 else
227 FOR_EACH_VEC_ELT (loop_p, ira_loops.larray, i, loop)
228 finish_loop_tree_node (&ira_loop_nodes[i]);
229 ira_free (ira_loop_nodes);
230 for (i = 0; i < (unsigned int) last_basic_block_before_change; i++)
232 if (ira_bb_nodes[i].local_copies != NULL)
233 ira_free_bitmap (ira_bb_nodes[i].local_copies);
234 if (ira_bb_nodes[i].border_allocnos != NULL)
235 ira_free_bitmap (ira_bb_nodes[i].border_allocnos);
236 if (ira_bb_nodes[i].modified_regnos != NULL)
237 ira_free_bitmap (ira_bb_nodes[i].modified_regnos);
238 if (ira_bb_nodes[i].all_allocnos != NULL)
239 ira_free_bitmap (ira_bb_nodes[i].all_allocnos);
240 if (ira_bb_nodes[i].regno_allocno_map != NULL)
241 ira_free (ira_bb_nodes[i].regno_allocno_map);
243 ira_free (ira_bb_nodes);
248 /* The following recursive function adds LOOP to the loop tree
249 hierarchy. LOOP is added only once. If LOOP is NULL we adding
250 loop designating the whole function when CFG loops are not
251 built. */
252 static void
253 add_loop_to_tree (struct loop *loop)
255 int loop_num;
256 struct loop *parent;
257 ira_loop_tree_node_t loop_node, parent_node;
259 /* We can not use loop node access macros here because of potential
260 checking and because the nodes are not initialized enough
261 yet. */
262 if (loop != NULL && loop_outer (loop) != NULL)
263 add_loop_to_tree (loop_outer (loop));
264 loop_num = loop != NULL ? loop->num : 0;
265 if (ira_loop_nodes[loop_num].regno_allocno_map != NULL
266 && ira_loop_nodes[loop_num].children == NULL)
268 /* We have not added loop node to the tree yet. */
269 loop_node = &ira_loop_nodes[loop_num];
270 loop_node->loop = loop;
271 loop_node->bb = NULL;
272 if (loop == NULL)
273 parent = NULL;
274 else
276 for (parent = loop_outer (loop);
277 parent != NULL;
278 parent = loop_outer (parent))
279 if (ira_loop_nodes[parent->num].regno_allocno_map != NULL)
280 break;
282 if (parent == NULL)
284 loop_node->next = NULL;
285 loop_node->subloop_next = NULL;
286 loop_node->parent = NULL;
288 else
290 parent_node = &ira_loop_nodes[parent->num];
291 loop_node->next = parent_node->children;
292 parent_node->children = loop_node;
293 loop_node->subloop_next = parent_node->subloops;
294 parent_node->subloops = loop_node;
295 loop_node->parent = parent_node;
300 /* The following recursive function sets up levels of nodes of the
301 tree given its root LOOP_NODE. The enumeration starts with LEVEL.
302 The function returns maximal value of level in the tree + 1. */
303 static int
304 setup_loop_tree_level (ira_loop_tree_node_t loop_node, int level)
306 int height, max_height;
307 ira_loop_tree_node_t subloop_node;
309 ira_assert (loop_node->bb == NULL);
310 loop_node->level = level;
311 max_height = level + 1;
312 for (subloop_node = loop_node->subloops;
313 subloop_node != NULL;
314 subloop_node = subloop_node->subloop_next)
316 ira_assert (subloop_node->bb == NULL);
317 height = setup_loop_tree_level (subloop_node, level + 1);
318 if (height > max_height)
319 max_height = height;
321 return max_height;
324 /* Create the loop tree. The algorithm is designed to provide correct
325 order of loops (they are ordered by their last loop BB) and basic
326 blocks in the chain formed by member next. */
327 static void
328 form_loop_tree (void)
330 basic_block bb;
331 struct loop *parent;
332 ira_loop_tree_node_t bb_node, loop_node;
334 /* We can not use loop/bb node access macros because of potential
335 checking and because the nodes are not initialized enough
336 yet. */
337 FOR_EACH_BB (bb)
339 bb_node = &ira_bb_nodes[bb->index];
340 bb_node->bb = bb;
341 bb_node->loop = NULL;
342 bb_node->subloops = NULL;
343 bb_node->children = NULL;
344 bb_node->subloop_next = NULL;
345 bb_node->next = NULL;
346 if (current_loops == NULL)
347 parent = NULL;
348 else
350 for (parent = bb->loop_father;
351 parent != NULL;
352 parent = loop_outer (parent))
353 if (ira_loop_nodes[parent->num].regno_allocno_map != NULL)
354 break;
356 add_loop_to_tree (parent);
357 loop_node = &ira_loop_nodes[parent == NULL ? 0 : parent->num];
358 bb_node->next = loop_node->children;
359 bb_node->parent = loop_node;
360 loop_node->children = bb_node;
362 ira_loop_tree_root = IRA_LOOP_NODE_BY_INDEX (0);
363 ira_loop_tree_height = setup_loop_tree_level (ira_loop_tree_root, 0);
364 ira_assert (ira_loop_tree_root->regno_allocno_map != NULL);
369 /* Rebuild IRA_REGNO_ALLOCNO_MAP and REGNO_ALLOCNO_MAPs of the loop
370 tree nodes. */
371 static void
372 rebuild_regno_allocno_maps (void)
374 unsigned int l;
375 int max_regno, regno;
376 ira_allocno_t a;
377 ira_loop_tree_node_t loop_tree_node;
378 loop_p loop;
379 ira_allocno_iterator ai;
381 ira_assert (current_loops != NULL);
382 max_regno = max_reg_num ();
383 FOR_EACH_VEC_ELT (loop_p, ira_loops.larray, l, loop)
384 if (ira_loop_nodes[l].regno_allocno_map != NULL)
386 ira_free (ira_loop_nodes[l].regno_allocno_map);
387 ira_loop_nodes[l].regno_allocno_map
388 = (ira_allocno_t *) ira_allocate (sizeof (ira_allocno_t)
389 * max_regno);
390 memset (ira_loop_nodes[l].regno_allocno_map, 0,
391 sizeof (ira_allocno_t) * max_regno);
393 ira_free (ira_regno_allocno_map);
394 ira_regno_allocno_map
395 = (ira_allocno_t *) ira_allocate (max_regno * sizeof (ira_allocno_t));
396 memset (ira_regno_allocno_map, 0, max_regno * sizeof (ira_allocno_t));
397 FOR_EACH_ALLOCNO (a, ai)
399 if (ALLOCNO_CAP_MEMBER (a) != NULL)
400 /* Caps are not in the regno allocno maps. */
401 continue;
402 regno = ALLOCNO_REGNO (a);
403 loop_tree_node = ALLOCNO_LOOP_TREE_NODE (a);
404 ALLOCNO_NEXT_REGNO_ALLOCNO (a) = ira_regno_allocno_map[regno];
405 ira_regno_allocno_map[regno] = a;
406 if (loop_tree_node->regno_allocno_map[regno] == NULL)
407 /* Remember that we can create temporary allocnos to break
408 cycles in register shuffle. */
409 loop_tree_node->regno_allocno_map[regno] = a;
414 /* Pools for allocnos, allocno live ranges and objects. */
415 static alloc_pool allocno_pool, live_range_pool, object_pool;
417 /* Vec containing references to all created allocnos. It is a
418 container of array allocnos. */
419 static VEC(ira_allocno_t,heap) *allocno_vec;
421 /* Vec containing references to all created ira_objects. It is a
422 container of ira_object_id_map. */
423 static VEC(ira_object_t,heap) *ira_object_id_map_vec;
425 /* Initialize data concerning allocnos. */
426 static void
427 initiate_allocnos (void)
429 live_range_pool
430 = create_alloc_pool ("live ranges",
431 sizeof (struct live_range), 100);
432 allocno_pool
433 = create_alloc_pool ("allocnos", sizeof (struct ira_allocno), 100);
434 object_pool
435 = create_alloc_pool ("objects", sizeof (struct ira_object), 100);
436 allocno_vec = VEC_alloc (ira_allocno_t, heap, max_reg_num () * 2);
437 ira_allocnos = NULL;
438 ira_allocnos_num = 0;
439 ira_objects_num = 0;
440 ira_object_id_map_vec
441 = VEC_alloc (ira_object_t, heap, max_reg_num () * 2);
442 ira_object_id_map = NULL;
443 ira_regno_allocno_map
444 = (ira_allocno_t *) ira_allocate (max_reg_num ()
445 * sizeof (ira_allocno_t));
446 memset (ira_regno_allocno_map, 0, max_reg_num () * sizeof (ira_allocno_t));
449 /* Create and return an object corresponding to a new allocno A. */
450 static ira_object_t
451 ira_create_object (ira_allocno_t a, int subword)
453 enum reg_class aclass = ALLOCNO_CLASS (a);
454 ira_object_t obj = (ira_object_t) pool_alloc (object_pool);
456 OBJECT_ALLOCNO (obj) = a;
457 OBJECT_SUBWORD (obj) = subword;
458 OBJECT_CONFLICT_ID (obj) = ira_objects_num;
459 OBJECT_CONFLICT_VEC_P (obj) = false;
460 OBJECT_CONFLICT_ARRAY (obj) = NULL;
461 OBJECT_NUM_CONFLICTS (obj) = 0;
462 COPY_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj), ira_no_alloc_regs);
463 COPY_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj), ira_no_alloc_regs);
464 IOR_COMPL_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj),
465 reg_class_contents[aclass]);
466 IOR_COMPL_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj),
467 reg_class_contents[aclass]);
468 OBJECT_MIN (obj) = INT_MAX;
469 OBJECT_MAX (obj) = -1;
470 OBJECT_LIVE_RANGES (obj) = NULL;
472 VEC_safe_push (ira_object_t, heap, ira_object_id_map_vec, obj);
473 ira_object_id_map
474 = VEC_address (ira_object_t, ira_object_id_map_vec);
475 ira_objects_num = VEC_length (ira_object_t, ira_object_id_map_vec);
477 return obj;
480 /* Create and return the allocno corresponding to REGNO in
481 LOOP_TREE_NODE. Add the allocno to the list of allocnos with the
482 same regno if CAP_P is FALSE. */
483 ira_allocno_t
484 ira_create_allocno (int regno, bool cap_p,
485 ira_loop_tree_node_t loop_tree_node)
487 ira_allocno_t a;
489 a = (ira_allocno_t) pool_alloc (allocno_pool);
490 ALLOCNO_REGNO (a) = regno;
491 ALLOCNO_LOOP_TREE_NODE (a) = loop_tree_node;
492 if (! cap_p)
494 ALLOCNO_NEXT_REGNO_ALLOCNO (a) = ira_regno_allocno_map[regno];
495 ira_regno_allocno_map[regno] = a;
496 if (loop_tree_node->regno_allocno_map[regno] == NULL)
497 /* Remember that we can create temporary allocnos to break
498 cycles in register shuffle on region borders (see
499 ira-emit.c). */
500 loop_tree_node->regno_allocno_map[regno] = a;
502 ALLOCNO_CAP (a) = NULL;
503 ALLOCNO_CAP_MEMBER (a) = NULL;
504 ALLOCNO_NUM (a) = ira_allocnos_num;
505 bitmap_set_bit (loop_tree_node->all_allocnos, ALLOCNO_NUM (a));
506 ALLOCNO_NREFS (a) = 0;
507 ALLOCNO_FREQ (a) = 0;
508 ALLOCNO_HARD_REGNO (a) = -1;
509 ALLOCNO_CALL_FREQ (a) = 0;
510 ALLOCNO_CALLS_CROSSED_NUM (a) = 0;
511 ALLOCNO_CHEAP_CALLS_CROSSED_NUM (a) = 0;
512 #ifdef STACK_REGS
513 ALLOCNO_NO_STACK_REG_P (a) = false;
514 ALLOCNO_TOTAL_NO_STACK_REG_P (a) = false;
515 #endif
516 ALLOCNO_DONT_REASSIGN_P (a) = false;
517 ALLOCNO_BAD_SPILL_P (a) = false;
518 ALLOCNO_ASSIGNED_P (a) = false;
519 ALLOCNO_MODE (a) = (regno < 0 ? VOIDmode : PSEUDO_REGNO_MODE (regno));
520 ALLOCNO_COPIES (a) = NULL;
521 ALLOCNO_HARD_REG_COSTS (a) = NULL;
522 ALLOCNO_CONFLICT_HARD_REG_COSTS (a) = NULL;
523 ALLOCNO_UPDATED_HARD_REG_COSTS (a) = NULL;
524 ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) = NULL;
525 ALLOCNO_CLASS (a) = NO_REGS;
526 ALLOCNO_UPDATED_CLASS_COST (a) = 0;
527 ALLOCNO_CLASS_COST (a) = 0;
528 ALLOCNO_MEMORY_COST (a) = 0;
529 ALLOCNO_UPDATED_MEMORY_COST (a) = 0;
530 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a) = 0;
531 ALLOCNO_NUM_OBJECTS (a) = 0;
533 ALLOCNO_ADD_DATA (a) = NULL;
534 VEC_safe_push (ira_allocno_t, heap, allocno_vec, a);
535 ira_allocnos = VEC_address (ira_allocno_t, allocno_vec);
536 ira_allocnos_num = VEC_length (ira_allocno_t, allocno_vec);
538 return a;
541 /* Set up register class for A and update its conflict hard
542 registers. */
543 void
544 ira_set_allocno_class (ira_allocno_t a, enum reg_class aclass)
546 ira_allocno_object_iterator oi;
547 ira_object_t obj;
549 ALLOCNO_CLASS (a) = aclass;
550 FOR_EACH_ALLOCNO_OBJECT (a, obj, oi)
552 IOR_COMPL_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj),
553 reg_class_contents[aclass]);
554 IOR_COMPL_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj),
555 reg_class_contents[aclass]);
559 /* Determine the number of objects we should associate with allocno A
560 and allocate them. */
561 void
562 ira_create_allocno_objects (ira_allocno_t a)
564 enum machine_mode mode = ALLOCNO_MODE (a);
565 enum reg_class aclass = ALLOCNO_CLASS (a);
566 int n = ira_reg_class_max_nregs[aclass][mode];
567 int i;
569 if (GET_MODE_SIZE (mode) != 2 * UNITS_PER_WORD || n != 2)
570 n = 1;
572 ALLOCNO_NUM_OBJECTS (a) = n;
573 for (i = 0; i < n; i++)
574 ALLOCNO_OBJECT (a, i) = ira_create_object (a, i);
577 /* For each allocno, set ALLOCNO_NUM_OBJECTS and create the
578 ALLOCNO_OBJECT structures. This must be called after the allocno
579 classes are known. */
580 static void
581 create_allocno_objects (void)
583 ira_allocno_t a;
584 ira_allocno_iterator ai;
586 FOR_EACH_ALLOCNO (a, ai)
587 ira_create_allocno_objects (a);
590 /* Merge hard register conflict information for all objects associated with
591 allocno TO into the corresponding objects associated with FROM.
592 If TOTAL_ONLY is true, we only merge OBJECT_TOTAL_CONFLICT_HARD_REGS. */
593 static void
594 merge_hard_reg_conflicts (ira_allocno_t from, ira_allocno_t to,
595 bool total_only)
597 int i;
598 gcc_assert (ALLOCNO_NUM_OBJECTS (to) == ALLOCNO_NUM_OBJECTS (from));
599 for (i = 0; i < ALLOCNO_NUM_OBJECTS (to); i++)
601 ira_object_t from_obj = ALLOCNO_OBJECT (from, i);
602 ira_object_t to_obj = ALLOCNO_OBJECT (to, i);
604 if (!total_only)
605 IOR_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (to_obj),
606 OBJECT_CONFLICT_HARD_REGS (from_obj));
607 IOR_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (to_obj),
608 OBJECT_TOTAL_CONFLICT_HARD_REGS (from_obj));
610 #ifdef STACK_REGS
611 if (!total_only && ALLOCNO_NO_STACK_REG_P (from))
612 ALLOCNO_NO_STACK_REG_P (to) = true;
613 if (ALLOCNO_TOTAL_NO_STACK_REG_P (from))
614 ALLOCNO_TOTAL_NO_STACK_REG_P (to) = true;
615 #endif
618 /* Update hard register conflict information for all objects associated with
619 A to include the regs in SET. */
620 void
621 ior_hard_reg_conflicts (ira_allocno_t a, HARD_REG_SET *set)
623 ira_allocno_object_iterator i;
624 ira_object_t obj;
626 FOR_EACH_ALLOCNO_OBJECT (a, obj, i)
628 IOR_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj), *set);
629 IOR_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj), *set);
633 /* Return TRUE if a conflict vector with NUM elements is more
634 profitable than a conflict bit vector for OBJ. */
635 bool
636 ira_conflict_vector_profitable_p (ira_object_t obj, int num)
638 int nw;
639 int max = OBJECT_MAX (obj);
640 int min = OBJECT_MIN (obj);
642 if (max < min)
643 /* We prefer a bit vector in such case because it does not result
644 in allocation. */
645 return false;
647 nw = (max - min + IRA_INT_BITS) / IRA_INT_BITS;
648 return (2 * sizeof (ira_object_t) * (num + 1)
649 < 3 * nw * sizeof (IRA_INT_TYPE));
652 /* Allocates and initialize the conflict vector of OBJ for NUM
653 conflicting objects. */
654 void
655 ira_allocate_conflict_vec (ira_object_t obj, int num)
657 int size;
658 ira_object_t *vec;
660 ira_assert (OBJECT_CONFLICT_ARRAY (obj) == NULL);
661 num++; /* for NULL end marker */
662 size = sizeof (ira_object_t) * num;
663 OBJECT_CONFLICT_ARRAY (obj) = ira_allocate (size);
664 vec = (ira_object_t *) OBJECT_CONFLICT_ARRAY (obj);
665 vec[0] = NULL;
666 OBJECT_NUM_CONFLICTS (obj) = 0;
667 OBJECT_CONFLICT_ARRAY_SIZE (obj) = size;
668 OBJECT_CONFLICT_VEC_P (obj) = true;
671 /* Allocate and initialize the conflict bit vector of OBJ. */
672 static void
673 allocate_conflict_bit_vec (ira_object_t obj)
675 unsigned int size;
677 ira_assert (OBJECT_CONFLICT_ARRAY (obj) == NULL);
678 size = ((OBJECT_MAX (obj) - OBJECT_MIN (obj) + IRA_INT_BITS)
679 / IRA_INT_BITS * sizeof (IRA_INT_TYPE));
680 OBJECT_CONFLICT_ARRAY (obj) = ira_allocate (size);
681 memset (OBJECT_CONFLICT_ARRAY (obj), 0, size);
682 OBJECT_CONFLICT_ARRAY_SIZE (obj) = size;
683 OBJECT_CONFLICT_VEC_P (obj) = false;
686 /* Allocate and initialize the conflict vector or conflict bit vector
687 of OBJ for NUM conflicting allocnos whatever is more profitable. */
688 void
689 ira_allocate_object_conflicts (ira_object_t obj, int num)
691 if (ira_conflict_vector_profitable_p (obj, num))
692 ira_allocate_conflict_vec (obj, num);
693 else
694 allocate_conflict_bit_vec (obj);
697 /* Add OBJ2 to the conflicts of OBJ1. */
698 static void
699 add_to_conflicts (ira_object_t obj1, ira_object_t obj2)
701 int num;
702 unsigned int size;
704 if (OBJECT_CONFLICT_VEC_P (obj1))
706 ira_object_t *vec = OBJECT_CONFLICT_VEC (obj1);
707 int curr_num = OBJECT_NUM_CONFLICTS (obj1);
708 num = curr_num + 2;
709 if (OBJECT_CONFLICT_ARRAY_SIZE (obj1) < num * sizeof (ira_object_t))
711 ira_object_t *newvec;
712 size = (3 * num / 2 + 1) * sizeof (ira_allocno_t);
713 newvec = (ira_object_t *) ira_allocate (size);
714 memcpy (newvec, vec, curr_num * sizeof (ira_object_t));
715 ira_free (vec);
716 vec = newvec;
717 OBJECT_CONFLICT_ARRAY (obj1) = vec;
718 OBJECT_CONFLICT_ARRAY_SIZE (obj1) = size;
720 vec[num - 2] = obj2;
721 vec[num - 1] = NULL;
722 OBJECT_NUM_CONFLICTS (obj1)++;
724 else
726 int nw, added_head_nw, id;
727 IRA_INT_TYPE *vec = OBJECT_CONFLICT_BITVEC (obj1);
729 id = OBJECT_CONFLICT_ID (obj2);
730 if (OBJECT_MIN (obj1) > id)
732 /* Expand head of the bit vector. */
733 added_head_nw = (OBJECT_MIN (obj1) - id - 1) / IRA_INT_BITS + 1;
734 nw = (OBJECT_MAX (obj1) - OBJECT_MIN (obj1)) / IRA_INT_BITS + 1;
735 size = (nw + added_head_nw) * sizeof (IRA_INT_TYPE);
736 if (OBJECT_CONFLICT_ARRAY_SIZE (obj1) >= size)
738 memmove ((char *) vec + added_head_nw * sizeof (IRA_INT_TYPE),
739 vec, nw * sizeof (IRA_INT_TYPE));
740 memset (vec, 0, added_head_nw * sizeof (IRA_INT_TYPE));
742 else
744 size
745 = (3 * (nw + added_head_nw) / 2 + 1) * sizeof (IRA_INT_TYPE);
746 vec = (IRA_INT_TYPE *) ira_allocate (size);
747 memcpy ((char *) vec + added_head_nw * sizeof (IRA_INT_TYPE),
748 OBJECT_CONFLICT_ARRAY (obj1), nw * sizeof (IRA_INT_TYPE));
749 memset (vec, 0, added_head_nw * sizeof (IRA_INT_TYPE));
750 memset ((char *) vec
751 + (nw + added_head_nw) * sizeof (IRA_INT_TYPE),
752 0, size - (nw + added_head_nw) * sizeof (IRA_INT_TYPE));
753 ira_free (OBJECT_CONFLICT_ARRAY (obj1));
754 OBJECT_CONFLICT_ARRAY (obj1) = vec;
755 OBJECT_CONFLICT_ARRAY_SIZE (obj1) = size;
757 OBJECT_MIN (obj1) -= added_head_nw * IRA_INT_BITS;
759 else if (OBJECT_MAX (obj1) < id)
761 nw = (id - OBJECT_MIN (obj1)) / IRA_INT_BITS + 1;
762 size = nw * sizeof (IRA_INT_TYPE);
763 if (OBJECT_CONFLICT_ARRAY_SIZE (obj1) < size)
765 /* Expand tail of the bit vector. */
766 size = (3 * nw / 2 + 1) * sizeof (IRA_INT_TYPE);
767 vec = (IRA_INT_TYPE *) ira_allocate (size);
768 memcpy (vec, OBJECT_CONFLICT_ARRAY (obj1), OBJECT_CONFLICT_ARRAY_SIZE (obj1));
769 memset ((char *) vec + OBJECT_CONFLICT_ARRAY_SIZE (obj1),
770 0, size - OBJECT_CONFLICT_ARRAY_SIZE (obj1));
771 ira_free (OBJECT_CONFLICT_ARRAY (obj1));
772 OBJECT_CONFLICT_ARRAY (obj1) = vec;
773 OBJECT_CONFLICT_ARRAY_SIZE (obj1) = size;
775 OBJECT_MAX (obj1) = id;
777 SET_MINMAX_SET_BIT (vec, id, OBJECT_MIN (obj1), OBJECT_MAX (obj1));
781 /* Add OBJ1 to the conflicts of OBJ2 and vice versa. */
782 static void
783 ira_add_conflict (ira_object_t obj1, ira_object_t obj2)
785 add_to_conflicts (obj1, obj2);
786 add_to_conflicts (obj2, obj1);
789 /* Clear all conflicts of OBJ. */
790 static void
791 clear_conflicts (ira_object_t obj)
793 if (OBJECT_CONFLICT_VEC_P (obj))
795 OBJECT_NUM_CONFLICTS (obj) = 0;
796 OBJECT_CONFLICT_VEC (obj)[0] = NULL;
798 else if (OBJECT_CONFLICT_ARRAY_SIZE (obj) != 0)
800 int nw;
802 nw = (OBJECT_MAX (obj) - OBJECT_MIN (obj)) / IRA_INT_BITS + 1;
803 memset (OBJECT_CONFLICT_BITVEC (obj), 0, nw * sizeof (IRA_INT_TYPE));
807 /* The array used to find duplications in conflict vectors of
808 allocnos. */
809 static int *conflict_check;
811 /* The value used to mark allocation presence in conflict vector of
812 the current allocno. */
813 static int curr_conflict_check_tick;
815 /* Remove duplications in conflict vector of OBJ. */
816 static void
817 compress_conflict_vec (ira_object_t obj)
819 ira_object_t *vec, conflict_obj;
820 int i, j;
822 ira_assert (OBJECT_CONFLICT_VEC_P (obj));
823 vec = OBJECT_CONFLICT_VEC (obj);
824 curr_conflict_check_tick++;
825 for (i = j = 0; (conflict_obj = vec[i]) != NULL; i++)
827 int id = OBJECT_CONFLICT_ID (conflict_obj);
828 if (conflict_check[id] != curr_conflict_check_tick)
830 conflict_check[id] = curr_conflict_check_tick;
831 vec[j++] = conflict_obj;
834 OBJECT_NUM_CONFLICTS (obj) = j;
835 vec[j] = NULL;
838 /* Remove duplications in conflict vectors of all allocnos. */
839 static void
840 compress_conflict_vecs (void)
842 ira_object_t obj;
843 ira_object_iterator oi;
845 conflict_check = (int *) ira_allocate (sizeof (int) * ira_objects_num);
846 memset (conflict_check, 0, sizeof (int) * ira_objects_num);
847 curr_conflict_check_tick = 0;
848 FOR_EACH_OBJECT (obj, oi)
850 if (OBJECT_CONFLICT_VEC_P (obj))
851 compress_conflict_vec (obj);
853 ira_free (conflict_check);
856 /* This recursive function outputs allocno A and if it is a cap the
857 function outputs its members. */
858 void
859 ira_print_expanded_allocno (ira_allocno_t a)
861 basic_block bb;
863 fprintf (ira_dump_file, " a%d(r%d", ALLOCNO_NUM (a), ALLOCNO_REGNO (a));
864 if ((bb = ALLOCNO_LOOP_TREE_NODE (a)->bb) != NULL)
865 fprintf (ira_dump_file, ",b%d", bb->index);
866 else
867 fprintf (ira_dump_file, ",l%d", ALLOCNO_LOOP_TREE_NODE (a)->loop_num);
868 if (ALLOCNO_CAP_MEMBER (a) != NULL)
870 fprintf (ira_dump_file, ":");
871 ira_print_expanded_allocno (ALLOCNO_CAP_MEMBER (a));
873 fprintf (ira_dump_file, ")");
876 /* Create and return the cap representing allocno A in the
877 parent loop. */
878 static ira_allocno_t
879 create_cap_allocno (ira_allocno_t a)
881 ira_allocno_t cap;
882 ira_loop_tree_node_t parent;
883 enum reg_class aclass;
885 parent = ALLOCNO_LOOP_TREE_NODE (a)->parent;
886 cap = ira_create_allocno (ALLOCNO_REGNO (a), true, parent);
887 ALLOCNO_MODE (cap) = ALLOCNO_MODE (a);
888 aclass = ALLOCNO_CLASS (a);
889 ira_set_allocno_class (cap, aclass);
890 ira_create_allocno_objects (cap);
891 ALLOCNO_CAP_MEMBER (cap) = a;
892 ALLOCNO_CAP (a) = cap;
893 ALLOCNO_CLASS_COST (cap) = ALLOCNO_CLASS_COST (a);
894 ALLOCNO_MEMORY_COST (cap) = ALLOCNO_MEMORY_COST (a);
895 ira_allocate_and_copy_costs
896 (&ALLOCNO_HARD_REG_COSTS (cap), aclass, ALLOCNO_HARD_REG_COSTS (a));
897 ira_allocate_and_copy_costs
898 (&ALLOCNO_CONFLICT_HARD_REG_COSTS (cap), aclass,
899 ALLOCNO_CONFLICT_HARD_REG_COSTS (a));
900 ALLOCNO_BAD_SPILL_P (cap) = ALLOCNO_BAD_SPILL_P (a);
901 ALLOCNO_NREFS (cap) = ALLOCNO_NREFS (a);
902 ALLOCNO_FREQ (cap) = ALLOCNO_FREQ (a);
903 ALLOCNO_CALL_FREQ (cap) = ALLOCNO_CALL_FREQ (a);
905 merge_hard_reg_conflicts (a, cap, false);
907 ALLOCNO_CALLS_CROSSED_NUM (cap) = ALLOCNO_CALLS_CROSSED_NUM (a);
908 ALLOCNO_CHEAP_CALLS_CROSSED_NUM (cap) = ALLOCNO_CHEAP_CALLS_CROSSED_NUM (a);
909 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
911 fprintf (ira_dump_file, " Creating cap ");
912 ira_print_expanded_allocno (cap);
913 fprintf (ira_dump_file, "\n");
915 return cap;
918 /* Create and return a live range for OBJECT with given attributes. */
919 live_range_t
920 ira_create_live_range (ira_object_t obj, int start, int finish,
921 live_range_t next)
923 live_range_t p;
925 p = (live_range_t) pool_alloc (live_range_pool);
926 p->object = obj;
927 p->start = start;
928 p->finish = finish;
929 p->next = next;
930 return p;
933 /* Create a new live range for OBJECT and queue it at the head of its
934 live range list. */
935 void
936 ira_add_live_range_to_object (ira_object_t object, int start, int finish)
938 live_range_t p;
939 p = ira_create_live_range (object, start, finish,
940 OBJECT_LIVE_RANGES (object));
941 OBJECT_LIVE_RANGES (object) = p;
944 /* Copy allocno live range R and return the result. */
945 static live_range_t
946 copy_live_range (live_range_t r)
948 live_range_t p;
950 p = (live_range_t) pool_alloc (live_range_pool);
951 *p = *r;
952 return p;
955 /* Copy allocno live range list given by its head R and return the
956 result. */
957 live_range_t
958 ira_copy_live_range_list (live_range_t r)
960 live_range_t p, first, last;
962 if (r == NULL)
963 return NULL;
964 for (first = last = NULL; r != NULL; r = r->next)
966 p = copy_live_range (r);
967 if (first == NULL)
968 first = p;
969 else
970 last->next = p;
971 last = p;
973 return first;
976 /* Merge ranges R1 and R2 and returns the result. The function
977 maintains the order of ranges and tries to minimize number of the
978 result ranges. */
979 live_range_t
980 ira_merge_live_ranges (live_range_t r1, live_range_t r2)
982 live_range_t first, last, temp;
984 if (r1 == NULL)
985 return r2;
986 if (r2 == NULL)
987 return r1;
988 for (first = last = NULL; r1 != NULL && r2 != NULL;)
990 if (r1->start < r2->start)
992 temp = r1;
993 r1 = r2;
994 r2 = temp;
996 if (r1->start <= r2->finish + 1)
998 /* Intersected ranges: merge r1 and r2 into r1. */
999 r1->start = r2->start;
1000 if (r1->finish < r2->finish)
1001 r1->finish = r2->finish;
1002 temp = r2;
1003 r2 = r2->next;
1004 ira_finish_live_range (temp);
1005 if (r2 == NULL)
1007 /* To try to merge with subsequent ranges in r1. */
1008 r2 = r1->next;
1009 r1->next = NULL;
1012 else
1014 /* Add r1 to the result. */
1015 if (first == NULL)
1016 first = last = r1;
1017 else
1019 last->next = r1;
1020 last = r1;
1022 r1 = r1->next;
1023 if (r1 == NULL)
1025 /* To try to merge with subsequent ranges in r2. */
1026 r1 = r2->next;
1027 r2->next = NULL;
1031 if (r1 != NULL)
1033 if (first == NULL)
1034 first = r1;
1035 else
1036 last->next = r1;
1037 ira_assert (r1->next == NULL);
1039 else if (r2 != NULL)
1041 if (first == NULL)
1042 first = r2;
1043 else
1044 last->next = r2;
1045 ira_assert (r2->next == NULL);
1047 else
1049 ira_assert (last->next == NULL);
1051 return first;
1054 /* Return TRUE if live ranges R1 and R2 intersect. */
1055 bool
1056 ira_live_ranges_intersect_p (live_range_t r1, live_range_t r2)
1058 /* Remember the live ranges are always kept ordered. */
1059 while (r1 != NULL && r2 != NULL)
1061 if (r1->start > r2->finish)
1062 r1 = r1->next;
1063 else if (r2->start > r1->finish)
1064 r2 = r2->next;
1065 else
1066 return true;
1068 return false;
1071 /* Free allocno live range R. */
1072 void
1073 ira_finish_live_range (live_range_t r)
1075 pool_free (live_range_pool, r);
1078 /* Free list of allocno live ranges starting with R. */
1079 void
1080 ira_finish_live_range_list (live_range_t r)
1082 live_range_t next_r;
1084 for (; r != NULL; r = next_r)
1086 next_r = r->next;
1087 ira_finish_live_range (r);
1091 /* Free updated register costs of allocno A. */
1092 void
1093 ira_free_allocno_updated_costs (ira_allocno_t a)
1095 enum reg_class aclass;
1097 aclass = ALLOCNO_CLASS (a);
1098 if (ALLOCNO_UPDATED_HARD_REG_COSTS (a) != NULL)
1099 ira_free_cost_vector (ALLOCNO_UPDATED_HARD_REG_COSTS (a), aclass);
1100 ALLOCNO_UPDATED_HARD_REG_COSTS (a) = NULL;
1101 if (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) != NULL)
1102 ira_free_cost_vector (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a),
1103 aclass);
1104 ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) = NULL;
1107 /* Free and nullify all cost vectors allocated earlier for allocno
1108 A. */
1109 static void
1110 ira_free_allocno_costs (ira_allocno_t a)
1112 enum reg_class aclass = ALLOCNO_CLASS (a);
1113 ira_object_t obj;
1114 ira_allocno_object_iterator oi;
1116 FOR_EACH_ALLOCNO_OBJECT (a, obj, oi)
1118 ira_finish_live_range_list (OBJECT_LIVE_RANGES (obj));
1119 ira_object_id_map[OBJECT_CONFLICT_ID (obj)] = NULL;
1120 if (OBJECT_CONFLICT_ARRAY (obj) != NULL)
1121 ira_free (OBJECT_CONFLICT_ARRAY (obj));
1122 pool_free (object_pool, obj);
1125 ira_allocnos[ALLOCNO_NUM (a)] = NULL;
1126 if (ALLOCNO_HARD_REG_COSTS (a) != NULL)
1127 ira_free_cost_vector (ALLOCNO_HARD_REG_COSTS (a), aclass);
1128 if (ALLOCNO_CONFLICT_HARD_REG_COSTS (a) != NULL)
1129 ira_free_cost_vector (ALLOCNO_CONFLICT_HARD_REG_COSTS (a), aclass);
1130 if (ALLOCNO_UPDATED_HARD_REG_COSTS (a) != NULL)
1131 ira_free_cost_vector (ALLOCNO_UPDATED_HARD_REG_COSTS (a), aclass);
1132 if (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) != NULL)
1133 ira_free_cost_vector (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a),
1134 aclass);
1135 ALLOCNO_HARD_REG_COSTS (a) = NULL;
1136 ALLOCNO_CONFLICT_HARD_REG_COSTS (a) = NULL;
1137 ALLOCNO_UPDATED_HARD_REG_COSTS (a) = NULL;
1138 ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) = NULL;
1141 /* Free the memory allocated for allocno A. */
1142 static void
1143 finish_allocno (ira_allocno_t a)
1145 ira_free_allocno_costs (a);
1146 pool_free (allocno_pool, a);
1149 /* Free the memory allocated for all allocnos. */
1150 static void
1151 finish_allocnos (void)
1153 ira_allocno_t a;
1154 ira_allocno_iterator ai;
1156 FOR_EACH_ALLOCNO (a, ai)
1157 finish_allocno (a);
1158 ira_free (ira_regno_allocno_map);
1159 VEC_free (ira_object_t, heap, ira_object_id_map_vec);
1160 VEC_free (ira_allocno_t, heap, allocno_vec);
1161 free_alloc_pool (allocno_pool);
1162 free_alloc_pool (object_pool);
1163 free_alloc_pool (live_range_pool);
1168 /* Pools for copies. */
1169 static alloc_pool copy_pool;
1171 /* Vec containing references to all created copies. It is a
1172 container of array ira_copies. */
1173 static VEC(ira_copy_t,heap) *copy_vec;
1175 /* The function initializes data concerning allocno copies. */
1176 static void
1177 initiate_copies (void)
1179 copy_pool
1180 = create_alloc_pool ("copies", sizeof (struct ira_allocno_copy), 100);
1181 copy_vec = VEC_alloc (ira_copy_t, heap, get_max_uid ());
1182 ira_copies = NULL;
1183 ira_copies_num = 0;
1186 /* Return copy connecting A1 and A2 and originated from INSN of
1187 LOOP_TREE_NODE if any. */
1188 static ira_copy_t
1189 find_allocno_copy (ira_allocno_t a1, ira_allocno_t a2, rtx insn,
1190 ira_loop_tree_node_t loop_tree_node)
1192 ira_copy_t cp, next_cp;
1193 ira_allocno_t another_a;
1195 for (cp = ALLOCNO_COPIES (a1); cp != NULL; cp = next_cp)
1197 if (cp->first == a1)
1199 next_cp = cp->next_first_allocno_copy;
1200 another_a = cp->second;
1202 else if (cp->second == a1)
1204 next_cp = cp->next_second_allocno_copy;
1205 another_a = cp->first;
1207 else
1208 gcc_unreachable ();
1209 if (another_a == a2 && cp->insn == insn
1210 && cp->loop_tree_node == loop_tree_node)
1211 return cp;
1213 return NULL;
1216 /* Create and return copy with given attributes LOOP_TREE_NODE, FIRST,
1217 SECOND, FREQ, CONSTRAINT_P, and INSN. */
1218 ira_copy_t
1219 ira_create_copy (ira_allocno_t first, ira_allocno_t second, int freq,
1220 bool constraint_p, rtx insn,
1221 ira_loop_tree_node_t loop_tree_node)
1223 ira_copy_t cp;
1225 cp = (ira_copy_t) pool_alloc (copy_pool);
1226 cp->num = ira_copies_num;
1227 cp->first = first;
1228 cp->second = second;
1229 cp->freq = freq;
1230 cp->constraint_p = constraint_p;
1231 cp->insn = insn;
1232 cp->loop_tree_node = loop_tree_node;
1233 VEC_safe_push (ira_copy_t, heap, copy_vec, cp);
1234 ira_copies = VEC_address (ira_copy_t, copy_vec);
1235 ira_copies_num = VEC_length (ira_copy_t, copy_vec);
1236 return cp;
1239 /* Attach a copy CP to allocnos involved into the copy. */
1240 void
1241 ira_add_allocno_copy_to_list (ira_copy_t cp)
1243 ira_allocno_t first = cp->first, second = cp->second;
1245 cp->prev_first_allocno_copy = NULL;
1246 cp->prev_second_allocno_copy = NULL;
1247 cp->next_first_allocno_copy = ALLOCNO_COPIES (first);
1248 if (cp->next_first_allocno_copy != NULL)
1250 if (cp->next_first_allocno_copy->first == first)
1251 cp->next_first_allocno_copy->prev_first_allocno_copy = cp;
1252 else
1253 cp->next_first_allocno_copy->prev_second_allocno_copy = cp;
1255 cp->next_second_allocno_copy = ALLOCNO_COPIES (second);
1256 if (cp->next_second_allocno_copy != NULL)
1258 if (cp->next_second_allocno_copy->second == second)
1259 cp->next_second_allocno_copy->prev_second_allocno_copy = cp;
1260 else
1261 cp->next_second_allocno_copy->prev_first_allocno_copy = cp;
1263 ALLOCNO_COPIES (first) = cp;
1264 ALLOCNO_COPIES (second) = cp;
1267 /* Make a copy CP a canonical copy where number of the
1268 first allocno is less than the second one. */
1269 void
1270 ira_swap_allocno_copy_ends_if_necessary (ira_copy_t cp)
1272 ira_allocno_t temp;
1273 ira_copy_t temp_cp;
1275 if (ALLOCNO_NUM (cp->first) <= ALLOCNO_NUM (cp->second))
1276 return;
1278 temp = cp->first;
1279 cp->first = cp->second;
1280 cp->second = temp;
1282 temp_cp = cp->prev_first_allocno_copy;
1283 cp->prev_first_allocno_copy = cp->prev_second_allocno_copy;
1284 cp->prev_second_allocno_copy = temp_cp;
1286 temp_cp = cp->next_first_allocno_copy;
1287 cp->next_first_allocno_copy = cp->next_second_allocno_copy;
1288 cp->next_second_allocno_copy = temp_cp;
1291 /* Create (or update frequency if the copy already exists) and return
1292 the copy of allocnos FIRST and SECOND with frequency FREQ
1293 corresponding to move insn INSN (if any) and originated from
1294 LOOP_TREE_NODE. */
1295 ira_copy_t
1296 ira_add_allocno_copy (ira_allocno_t first, ira_allocno_t second, int freq,
1297 bool constraint_p, rtx insn,
1298 ira_loop_tree_node_t loop_tree_node)
1300 ira_copy_t cp;
1302 if ((cp = find_allocno_copy (first, second, insn, loop_tree_node)) != NULL)
1304 cp->freq += freq;
1305 return cp;
1307 cp = ira_create_copy (first, second, freq, constraint_p, insn,
1308 loop_tree_node);
1309 ira_assert (first != NULL && second != NULL);
1310 ira_add_allocno_copy_to_list (cp);
1311 ira_swap_allocno_copy_ends_if_necessary (cp);
1312 return cp;
1315 /* Print info about copy CP into file F. */
1316 static void
1317 print_copy (FILE *f, ira_copy_t cp)
1319 fprintf (f, " cp%d:a%d(r%d)<->a%d(r%d)@%d:%s\n", cp->num,
1320 ALLOCNO_NUM (cp->first), ALLOCNO_REGNO (cp->first),
1321 ALLOCNO_NUM (cp->second), ALLOCNO_REGNO (cp->second), cp->freq,
1322 cp->insn != NULL
1323 ? "move" : cp->constraint_p ? "constraint" : "shuffle");
1326 /* Print info about copy CP into stderr. */
1327 void
1328 ira_debug_copy (ira_copy_t cp)
1330 print_copy (stderr, cp);
1333 /* Print info about all copies into file F. */
1334 static void
1335 print_copies (FILE *f)
1337 ira_copy_t cp;
1338 ira_copy_iterator ci;
1340 FOR_EACH_COPY (cp, ci)
1341 print_copy (f, cp);
1344 /* Print info about all copies into stderr. */
1345 void
1346 ira_debug_copies (void)
1348 print_copies (stderr);
1351 /* Print info about copies involving allocno A into file F. */
1352 static void
1353 print_allocno_copies (FILE *f, ira_allocno_t a)
1355 ira_allocno_t another_a;
1356 ira_copy_t cp, next_cp;
1358 fprintf (f, " a%d(r%d):", ALLOCNO_NUM (a), ALLOCNO_REGNO (a));
1359 for (cp = ALLOCNO_COPIES (a); cp != NULL; cp = next_cp)
1361 if (cp->first == a)
1363 next_cp = cp->next_first_allocno_copy;
1364 another_a = cp->second;
1366 else if (cp->second == a)
1368 next_cp = cp->next_second_allocno_copy;
1369 another_a = cp->first;
1371 else
1372 gcc_unreachable ();
1373 fprintf (f, " cp%d:a%d(r%d)@%d", cp->num,
1374 ALLOCNO_NUM (another_a), ALLOCNO_REGNO (another_a), cp->freq);
1376 fprintf (f, "\n");
1379 /* Print info about copies involving allocno A into stderr. */
1380 void
1381 ira_debug_allocno_copies (ira_allocno_t a)
1383 print_allocno_copies (stderr, a);
1386 /* The function frees memory allocated for copy CP. */
1387 static void
1388 finish_copy (ira_copy_t cp)
1390 pool_free (copy_pool, cp);
1394 /* Free memory allocated for all copies. */
1395 static void
1396 finish_copies (void)
1398 ira_copy_t cp;
1399 ira_copy_iterator ci;
1401 FOR_EACH_COPY (cp, ci)
1402 finish_copy (cp);
1403 VEC_free (ira_copy_t, heap, copy_vec);
1404 free_alloc_pool (copy_pool);
1409 /* Pools for cost vectors. It is defined only for allocno classes. */
1410 static alloc_pool cost_vector_pool[N_REG_CLASSES];
1412 /* The function initiates work with hard register cost vectors. It
1413 creates allocation pool for each allocno class. */
1414 static void
1415 initiate_cost_vectors (void)
1417 int i;
1418 enum reg_class aclass;
1420 for (i = 0; i < ira_allocno_classes_num; i++)
1422 aclass = ira_allocno_classes[i];
1423 cost_vector_pool[aclass]
1424 = create_alloc_pool ("cost vectors",
1425 sizeof (int) * ira_class_hard_regs_num[aclass],
1426 100);
1430 /* Allocate and return a cost vector VEC for ACLASS. */
1431 int *
1432 ira_allocate_cost_vector (reg_class_t aclass)
1434 return (int *) pool_alloc (cost_vector_pool[(int) aclass]);
1437 /* Free a cost vector VEC for ACLASS. */
1438 void
1439 ira_free_cost_vector (int *vec, reg_class_t aclass)
1441 ira_assert (vec != NULL);
1442 pool_free (cost_vector_pool[(int) aclass], vec);
1445 /* Finish work with hard register cost vectors. Release allocation
1446 pool for each allocno class. */
1447 static void
1448 finish_cost_vectors (void)
1450 int i;
1451 enum reg_class aclass;
1453 for (i = 0; i < ira_allocno_classes_num; i++)
1455 aclass = ira_allocno_classes[i];
1456 free_alloc_pool (cost_vector_pool[aclass]);
1462 /* The current loop tree node and its regno allocno map. */
1463 ira_loop_tree_node_t ira_curr_loop_tree_node;
1464 ira_allocno_t *ira_curr_regno_allocno_map;
1466 /* This recursive function traverses loop tree with root LOOP_NODE
1467 calling non-null functions PREORDER_FUNC and POSTORDER_FUNC
1468 correspondingly in preorder and postorder. The function sets up
1469 IRA_CURR_LOOP_TREE_NODE and IRA_CURR_REGNO_ALLOCNO_MAP. If BB_P,
1470 basic block nodes of LOOP_NODE is also processed (before its
1471 subloop nodes). */
1472 void
1473 ira_traverse_loop_tree (bool bb_p, ira_loop_tree_node_t loop_node,
1474 void (*preorder_func) (ira_loop_tree_node_t),
1475 void (*postorder_func) (ira_loop_tree_node_t))
1477 ira_loop_tree_node_t subloop_node;
1479 ira_assert (loop_node->bb == NULL);
1480 ira_curr_loop_tree_node = loop_node;
1481 ira_curr_regno_allocno_map = ira_curr_loop_tree_node->regno_allocno_map;
1483 if (preorder_func != NULL)
1484 (*preorder_func) (loop_node);
1486 if (bb_p)
1487 for (subloop_node = loop_node->children;
1488 subloop_node != NULL;
1489 subloop_node = subloop_node->next)
1490 if (subloop_node->bb != NULL)
1492 if (preorder_func != NULL)
1493 (*preorder_func) (subloop_node);
1495 if (postorder_func != NULL)
1496 (*postorder_func) (subloop_node);
1499 for (subloop_node = loop_node->subloops;
1500 subloop_node != NULL;
1501 subloop_node = subloop_node->subloop_next)
1503 ira_assert (subloop_node->bb == NULL);
1504 ira_traverse_loop_tree (bb_p, subloop_node,
1505 preorder_func, postorder_func);
1508 ira_curr_loop_tree_node = loop_node;
1509 ira_curr_regno_allocno_map = ira_curr_loop_tree_node->regno_allocno_map;
1511 if (postorder_func != NULL)
1512 (*postorder_func) (loop_node);
1517 /* The basic block currently being processed. */
1518 static basic_block curr_bb;
1520 /* This recursive function creates allocnos corresponding to
1521 pseudo-registers containing in X. True OUTPUT_P means that X is
1522 a lvalue. */
1523 static void
1524 create_insn_allocnos (rtx x, bool output_p)
1526 int i, j;
1527 const char *fmt;
1528 enum rtx_code code = GET_CODE (x);
1530 if (code == REG)
1532 int regno;
1534 if ((regno = REGNO (x)) >= FIRST_PSEUDO_REGISTER)
1536 ira_allocno_t a;
1538 if ((a = ira_curr_regno_allocno_map[regno]) == NULL)
1539 a = ira_create_allocno (regno, false, ira_curr_loop_tree_node);
1541 ALLOCNO_NREFS (a)++;
1542 ALLOCNO_FREQ (a) += REG_FREQ_FROM_BB (curr_bb);
1543 if (output_p)
1544 bitmap_set_bit (ira_curr_loop_tree_node->modified_regnos, regno);
1546 return;
1548 else if (code == SET)
1550 create_insn_allocnos (SET_DEST (x), true);
1551 create_insn_allocnos (SET_SRC (x), false);
1552 return;
1554 else if (code == CLOBBER)
1556 create_insn_allocnos (XEXP (x, 0), true);
1557 return;
1559 else if (code == MEM)
1561 create_insn_allocnos (XEXP (x, 0), false);
1562 return;
1564 else if (code == PRE_DEC || code == POST_DEC || code == PRE_INC ||
1565 code == POST_INC || code == POST_MODIFY || code == PRE_MODIFY)
1567 create_insn_allocnos (XEXP (x, 0), true);
1568 create_insn_allocnos (XEXP (x, 0), false);
1569 return;
1572 fmt = GET_RTX_FORMAT (code);
1573 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1575 if (fmt[i] == 'e')
1576 create_insn_allocnos (XEXP (x, i), output_p);
1577 else if (fmt[i] == 'E')
1578 for (j = 0; j < XVECLEN (x, i); j++)
1579 create_insn_allocnos (XVECEXP (x, i, j), output_p);
1583 /* Create allocnos corresponding to pseudo-registers living in the
1584 basic block represented by the corresponding loop tree node
1585 BB_NODE. */
1586 static void
1587 create_bb_allocnos (ira_loop_tree_node_t bb_node)
1589 basic_block bb;
1590 rtx insn;
1591 unsigned int i;
1592 bitmap_iterator bi;
1594 curr_bb = bb = bb_node->bb;
1595 ira_assert (bb != NULL);
1596 FOR_BB_INSNS_REVERSE (bb, insn)
1597 if (NONDEBUG_INSN_P (insn))
1598 create_insn_allocnos (PATTERN (insn), false);
1599 /* It might be a allocno living through from one subloop to
1600 another. */
1601 EXECUTE_IF_SET_IN_REG_SET (DF_LR_IN (bb), FIRST_PSEUDO_REGISTER, i, bi)
1602 if (ira_curr_regno_allocno_map[i] == NULL)
1603 ira_create_allocno (i, false, ira_curr_loop_tree_node);
1606 /* Create allocnos corresponding to pseudo-registers living on edge E
1607 (a loop entry or exit). Also mark the allocnos as living on the
1608 loop border. */
1609 static void
1610 create_loop_allocnos (edge e)
1612 unsigned int i;
1613 bitmap live_in_regs, border_allocnos;
1614 bitmap_iterator bi;
1615 ira_loop_tree_node_t parent;
1617 live_in_regs = DF_LR_IN (e->dest);
1618 border_allocnos = ira_curr_loop_tree_node->border_allocnos;
1619 EXECUTE_IF_SET_IN_REG_SET (DF_LR_OUT (e->src),
1620 FIRST_PSEUDO_REGISTER, i, bi)
1621 if (bitmap_bit_p (live_in_regs, i))
1623 if (ira_curr_regno_allocno_map[i] == NULL)
1625 /* The order of creations is important for right
1626 ira_regno_allocno_map. */
1627 if ((parent = ira_curr_loop_tree_node->parent) != NULL
1628 && parent->regno_allocno_map[i] == NULL)
1629 ira_create_allocno (i, false, parent);
1630 ira_create_allocno (i, false, ira_curr_loop_tree_node);
1632 bitmap_set_bit (border_allocnos,
1633 ALLOCNO_NUM (ira_curr_regno_allocno_map[i]));
1637 /* Create allocnos corresponding to pseudo-registers living in loop
1638 represented by the corresponding loop tree node LOOP_NODE. This
1639 function is called by ira_traverse_loop_tree. */
1640 static void
1641 create_loop_tree_node_allocnos (ira_loop_tree_node_t loop_node)
1643 if (loop_node->bb != NULL)
1644 create_bb_allocnos (loop_node);
1645 else if (loop_node != ira_loop_tree_root)
1647 int i;
1648 edge_iterator ei;
1649 edge e;
1650 VEC (edge, heap) *edges;
1652 ira_assert (current_loops != NULL);
1653 FOR_EACH_EDGE (e, ei, loop_node->loop->header->preds)
1654 if (e->src != loop_node->loop->latch)
1655 create_loop_allocnos (e);
1657 edges = get_loop_exit_edges (loop_node->loop);
1658 FOR_EACH_VEC_ELT (edge, edges, i, e)
1659 create_loop_allocnos (e);
1660 VEC_free (edge, heap, edges);
1664 /* Propagate information about allocnos modified inside the loop given
1665 by its LOOP_TREE_NODE to its parent. */
1666 static void
1667 propagate_modified_regnos (ira_loop_tree_node_t loop_tree_node)
1669 if (loop_tree_node == ira_loop_tree_root)
1670 return;
1671 ira_assert (loop_tree_node->bb == NULL);
1672 bitmap_ior_into (loop_tree_node->parent->modified_regnos,
1673 loop_tree_node->modified_regnos);
1676 /* Propagate new info about allocno A (see comments about accumulated
1677 info in allocno definition) to the corresponding allocno on upper
1678 loop tree level. So allocnos on upper levels accumulate
1679 information about the corresponding allocnos in nested regions.
1680 The new info means allocno info finally calculated in this
1681 file. */
1682 static void
1683 propagate_allocno_info (void)
1685 int i;
1686 ira_allocno_t a, parent_a;
1687 ira_loop_tree_node_t parent;
1688 enum reg_class aclass;
1690 if (flag_ira_region != IRA_REGION_ALL
1691 && flag_ira_region != IRA_REGION_MIXED)
1692 return;
1693 for (i = max_reg_num () - 1; i >= FIRST_PSEUDO_REGISTER; i--)
1694 for (a = ira_regno_allocno_map[i];
1695 a != NULL;
1696 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
1697 if ((parent = ALLOCNO_LOOP_TREE_NODE (a)->parent) != NULL
1698 && (parent_a = parent->regno_allocno_map[i]) != NULL
1699 /* There are no caps yet at this point. So use
1700 border_allocnos to find allocnos for the propagation. */
1701 && bitmap_bit_p (ALLOCNO_LOOP_TREE_NODE (a)->border_allocnos,
1702 ALLOCNO_NUM (a)))
1704 if (! ALLOCNO_BAD_SPILL_P (a))
1705 ALLOCNO_BAD_SPILL_P (parent_a) = false;
1706 ALLOCNO_NREFS (parent_a) += ALLOCNO_NREFS (a);
1707 ALLOCNO_FREQ (parent_a) += ALLOCNO_FREQ (a);
1708 ALLOCNO_CALL_FREQ (parent_a) += ALLOCNO_CALL_FREQ (a);
1709 merge_hard_reg_conflicts (a, parent_a, true);
1710 ALLOCNO_CALLS_CROSSED_NUM (parent_a)
1711 += ALLOCNO_CALLS_CROSSED_NUM (a);
1712 ALLOCNO_CHEAP_CALLS_CROSSED_NUM (parent_a)
1713 += ALLOCNO_CHEAP_CALLS_CROSSED_NUM (a);
1714 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (parent_a)
1715 += ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a);
1716 aclass = ALLOCNO_CLASS (a);
1717 ira_assert (aclass == ALLOCNO_CLASS (parent_a));
1718 ira_allocate_and_accumulate_costs
1719 (&ALLOCNO_HARD_REG_COSTS (parent_a), aclass,
1720 ALLOCNO_HARD_REG_COSTS (a));
1721 ira_allocate_and_accumulate_costs
1722 (&ALLOCNO_CONFLICT_HARD_REG_COSTS (parent_a),
1723 aclass,
1724 ALLOCNO_CONFLICT_HARD_REG_COSTS (a));
1725 ALLOCNO_CLASS_COST (parent_a)
1726 += ALLOCNO_CLASS_COST (a);
1727 ALLOCNO_MEMORY_COST (parent_a) += ALLOCNO_MEMORY_COST (a);
1731 /* Create allocnos corresponding to pseudo-registers in the current
1732 function. Traverse the loop tree for this. */
1733 static void
1734 create_allocnos (void)
1736 /* We need to process BB first to correctly link allocnos by member
1737 next_regno_allocno. */
1738 ira_traverse_loop_tree (true, ira_loop_tree_root,
1739 create_loop_tree_node_allocnos, NULL);
1740 if (optimize)
1741 ira_traverse_loop_tree (false, ira_loop_tree_root, NULL,
1742 propagate_modified_regnos);
1747 /* The page contains function to remove some regions from a separate
1748 register allocation. We remove regions whose separate allocation
1749 will hardly improve the result. As a result we speed up regional
1750 register allocation. */
1752 /* The function changes the object in range list given by R to OBJ. */
1753 static void
1754 change_object_in_range_list (live_range_t r, ira_object_t obj)
1756 for (; r != NULL; r = r->next)
1757 r->object = obj;
1760 /* Move all live ranges associated with allocno FROM to allocno TO. */
1761 static void
1762 move_allocno_live_ranges (ira_allocno_t from, ira_allocno_t to)
1764 int i;
1765 int n = ALLOCNO_NUM_OBJECTS (from);
1767 gcc_assert (n == ALLOCNO_NUM_OBJECTS (to));
1769 for (i = 0; i < n; i++)
1771 ira_object_t from_obj = ALLOCNO_OBJECT (from, i);
1772 ira_object_t to_obj = ALLOCNO_OBJECT (to, i);
1773 live_range_t lr = OBJECT_LIVE_RANGES (from_obj);
1775 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
1777 fprintf (ira_dump_file,
1778 " Moving ranges of a%dr%d to a%dr%d: ",
1779 ALLOCNO_NUM (from), ALLOCNO_REGNO (from),
1780 ALLOCNO_NUM (to), ALLOCNO_REGNO (to));
1781 ira_print_live_range_list (ira_dump_file, lr);
1783 change_object_in_range_list (lr, to_obj);
1784 OBJECT_LIVE_RANGES (to_obj)
1785 = ira_merge_live_ranges (lr, OBJECT_LIVE_RANGES (to_obj));
1786 OBJECT_LIVE_RANGES (from_obj) = NULL;
1790 static void
1791 copy_allocno_live_ranges (ira_allocno_t from, ira_allocno_t to)
1793 int i;
1794 int n = ALLOCNO_NUM_OBJECTS (from);
1796 gcc_assert (n == ALLOCNO_NUM_OBJECTS (to));
1798 for (i = 0; i < n; i++)
1800 ira_object_t from_obj = ALLOCNO_OBJECT (from, i);
1801 ira_object_t to_obj = ALLOCNO_OBJECT (to, i);
1802 live_range_t lr = OBJECT_LIVE_RANGES (from_obj);
1804 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
1806 fprintf (ira_dump_file, " Copying ranges of a%dr%d to a%dr%d: ",
1807 ALLOCNO_NUM (from), ALLOCNO_REGNO (from),
1808 ALLOCNO_NUM (to), ALLOCNO_REGNO (to));
1809 ira_print_live_range_list (ira_dump_file, lr);
1811 lr = ira_copy_live_range_list (lr);
1812 change_object_in_range_list (lr, to_obj);
1813 OBJECT_LIVE_RANGES (to_obj)
1814 = ira_merge_live_ranges (lr, OBJECT_LIVE_RANGES (to_obj));
1818 /* Return TRUE if NODE represents a loop with low register
1819 pressure. */
1820 static bool
1821 low_pressure_loop_node_p (ira_loop_tree_node_t node)
1823 int i;
1824 enum reg_class pclass;
1826 if (node->bb != NULL)
1827 return false;
1829 for (i = 0; i < ira_pressure_classes_num; i++)
1831 pclass = ira_pressure_classes[i];
1832 if (node->reg_pressure[pclass] > ira_available_class_regs[pclass]
1833 && ira_available_class_regs[pclass] > 1)
1834 return false;
1836 return true;
1839 #ifdef STACK_REGS
1840 /* Return TRUE if LOOP has a complex enter or exit edge. We don't
1841 form a region from such loop if the target use stack register
1842 because reg-stack.c can not deal with such edges. */
1843 static bool
1844 loop_with_complex_edge_p (struct loop *loop)
1846 int i;
1847 edge_iterator ei;
1848 edge e;
1849 VEC (edge, heap) *edges;
1851 FOR_EACH_EDGE (e, ei, loop->header->preds)
1852 if (e->flags & EDGE_EH)
1853 return true;
1854 edges = get_loop_exit_edges (loop);
1855 FOR_EACH_VEC_ELT (edge, edges, i, e)
1856 if (e->flags & EDGE_COMPLEX)
1857 return true;
1858 return false;
1860 #endif
1862 /* Sort loops for marking them for removal. We put already marked
1863 loops first, then less frequent loops next, and then outer loops
1864 next. */
1865 static int
1866 loop_compare_func (const void *v1p, const void *v2p)
1868 int diff;
1869 ira_loop_tree_node_t l1 = *(const ira_loop_tree_node_t *) v1p;
1870 ira_loop_tree_node_t l2 = *(const ira_loop_tree_node_t *) v2p;
1872 ira_assert (l1->parent != NULL && l2->parent != NULL);
1873 if (l1->to_remove_p && ! l2->to_remove_p)
1874 return -1;
1875 if (! l1->to_remove_p && l2->to_remove_p)
1876 return 1;
1877 if ((diff = l1->loop->header->frequency - l2->loop->header->frequency) != 0)
1878 return diff;
1879 if ((diff = (int) loop_depth (l1->loop) - (int) loop_depth (l2->loop)) != 0)
1880 return diff;
1881 /* Make sorting stable. */
1882 return l1->loop_num - l2->loop_num;
1885 /* Mark loops which should be removed from regional allocation. We
1886 remove a loop with low register pressure inside another loop with
1887 register pressure. In this case a separate allocation of the loop
1888 hardly helps (for irregular register file architecture it could
1889 help by choosing a better hard register in the loop but we prefer
1890 faster allocation even in this case). We also remove cheap loops
1891 if there are more than IRA_MAX_LOOPS_NUM of them. Loop with EH
1892 exit or enter edges are removed too because the allocation might
1893 require put pseudo moves on the EH edges (we could still do this
1894 for pseudos with caller saved hard registers in some cases but it
1895 is impossible to say here or during top-down allocation pass what
1896 hard register the pseudos get finally). */
1897 static void
1898 mark_loops_for_removal (void)
1900 int i, n;
1901 ira_loop_tree_node_t *sorted_loops;
1902 loop_p loop;
1904 ira_assert (current_loops != NULL);
1905 sorted_loops
1906 = (ira_loop_tree_node_t *) ira_allocate (sizeof (ira_loop_tree_node_t)
1907 * VEC_length (loop_p,
1908 ira_loops.larray));
1909 for (n = i = 0; VEC_iterate (loop_p, ira_loops.larray, i, loop); i++)
1910 if (ira_loop_nodes[i].regno_allocno_map != NULL)
1912 if (ira_loop_nodes[i].parent == NULL)
1914 /* Don't remove the root. */
1915 ira_loop_nodes[i].to_remove_p = false;
1916 continue;
1918 sorted_loops[n++] = &ira_loop_nodes[i];
1919 ira_loop_nodes[i].to_remove_p
1920 = ((low_pressure_loop_node_p (ira_loop_nodes[i].parent)
1921 && low_pressure_loop_node_p (&ira_loop_nodes[i]))
1922 #ifdef STACK_REGS
1923 || loop_with_complex_edge_p (ira_loop_nodes[i].loop)
1924 #endif
1927 qsort (sorted_loops, n, sizeof (ira_loop_tree_node_t), loop_compare_func);
1928 for (i = 0; n - i + 1 > IRA_MAX_LOOPS_NUM; i++)
1930 sorted_loops[i]->to_remove_p = true;
1931 if (internal_flag_ira_verbose > 1 && ira_dump_file != NULL)
1932 fprintf
1933 (ira_dump_file,
1934 " Mark loop %d (header %d, freq %d, depth %d) for removal (%s)\n",
1935 sorted_loops[i]->loop_num, sorted_loops[i]->loop->header->index,
1936 sorted_loops[i]->loop->header->frequency,
1937 loop_depth (sorted_loops[i]->loop),
1938 low_pressure_loop_node_p (sorted_loops[i]->parent)
1939 && low_pressure_loop_node_p (sorted_loops[i])
1940 ? "low pressure" : "cheap loop");
1942 ira_free (sorted_loops);
1945 /* Mark all loops but root for removing. */
1946 static void
1947 mark_all_loops_for_removal (void)
1949 int i;
1950 loop_p loop;
1952 ira_assert (current_loops != NULL);
1953 FOR_EACH_VEC_ELT (loop_p, ira_loops.larray, i, loop)
1954 if (ira_loop_nodes[i].regno_allocno_map != NULL)
1956 if (ira_loop_nodes[i].parent == NULL)
1958 /* Don't remove the root. */
1959 ira_loop_nodes[i].to_remove_p = false;
1960 continue;
1962 ira_loop_nodes[i].to_remove_p = true;
1963 if (internal_flag_ira_verbose > 1 && ira_dump_file != NULL)
1964 fprintf
1965 (ira_dump_file,
1966 " Mark loop %d (header %d, freq %d, depth %d) for removal\n",
1967 ira_loop_nodes[i].loop_num,
1968 ira_loop_nodes[i].loop->header->index,
1969 ira_loop_nodes[i].loop->header->frequency,
1970 loop_depth (ira_loop_nodes[i].loop));
1974 /* Definition of vector of loop tree nodes. */
1975 DEF_VEC_P(ira_loop_tree_node_t);
1976 DEF_VEC_ALLOC_P(ira_loop_tree_node_t, heap);
1978 /* Vec containing references to all removed loop tree nodes. */
1979 static VEC(ira_loop_tree_node_t,heap) *removed_loop_vec;
1981 /* Vec containing references to all children of loop tree nodes. */
1982 static VEC(ira_loop_tree_node_t,heap) *children_vec;
1984 /* Remove subregions of NODE if their separate allocation will not
1985 improve the result. */
1986 static void
1987 remove_uneccesary_loop_nodes_from_loop_tree (ira_loop_tree_node_t node)
1989 unsigned int start;
1990 bool remove_p;
1991 ira_loop_tree_node_t subnode;
1993 remove_p = node->to_remove_p;
1994 if (! remove_p)
1995 VEC_safe_push (ira_loop_tree_node_t, heap, children_vec, node);
1996 start = VEC_length (ira_loop_tree_node_t, children_vec);
1997 for (subnode = node->children; subnode != NULL; subnode = subnode->next)
1998 if (subnode->bb == NULL)
1999 remove_uneccesary_loop_nodes_from_loop_tree (subnode);
2000 else
2001 VEC_safe_push (ira_loop_tree_node_t, heap, children_vec, subnode);
2002 node->children = node->subloops = NULL;
2003 if (remove_p)
2005 VEC_safe_push (ira_loop_tree_node_t, heap, removed_loop_vec, node);
2006 return;
2008 while (VEC_length (ira_loop_tree_node_t, children_vec) > start)
2010 subnode = VEC_pop (ira_loop_tree_node_t, children_vec);
2011 subnode->parent = node;
2012 subnode->next = node->children;
2013 node->children = subnode;
2014 if (subnode->bb == NULL)
2016 subnode->subloop_next = node->subloops;
2017 node->subloops = subnode;
2022 /* Return TRUE if NODE is inside PARENT. */
2023 static bool
2024 loop_is_inside_p (ira_loop_tree_node_t node, ira_loop_tree_node_t parent)
2026 for (node = node->parent; node != NULL; node = node->parent)
2027 if (node == parent)
2028 return true;
2029 return false;
2032 /* Sort allocnos according to their order in regno allocno list. */
2033 static int
2034 regno_allocno_order_compare_func (const void *v1p, const void *v2p)
2036 ira_allocno_t a1 = *(const ira_allocno_t *) v1p;
2037 ira_allocno_t a2 = *(const ira_allocno_t *) v2p;
2038 ira_loop_tree_node_t n1 = ALLOCNO_LOOP_TREE_NODE (a1);
2039 ira_loop_tree_node_t n2 = ALLOCNO_LOOP_TREE_NODE (a2);
2041 if (loop_is_inside_p (n1, n2))
2042 return -1;
2043 else if (loop_is_inside_p (n2, n1))
2044 return 1;
2045 /* If allocnos are equally good, sort by allocno numbers, so that
2046 the results of qsort leave nothing to chance. We put allocnos
2047 with higher number first in the list because it is the original
2048 order for allocnos from loops on the same levels. */
2049 return ALLOCNO_NUM (a2) - ALLOCNO_NUM (a1);
2052 /* This array is used to sort allocnos to restore allocno order in
2053 the regno allocno list. */
2054 static ira_allocno_t *regno_allocnos;
2056 /* Restore allocno order for REGNO in the regno allocno list. */
2057 static void
2058 ira_rebuild_regno_allocno_list (int regno)
2060 int i, n;
2061 ira_allocno_t a;
2063 for (n = 0, a = ira_regno_allocno_map[regno];
2064 a != NULL;
2065 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
2066 regno_allocnos[n++] = a;
2067 ira_assert (n > 0);
2068 qsort (regno_allocnos, n, sizeof (ira_allocno_t),
2069 regno_allocno_order_compare_func);
2070 for (i = 1; i < n; i++)
2071 ALLOCNO_NEXT_REGNO_ALLOCNO (regno_allocnos[i - 1]) = regno_allocnos[i];
2072 ALLOCNO_NEXT_REGNO_ALLOCNO (regno_allocnos[n - 1]) = NULL;
2073 ira_regno_allocno_map[regno] = regno_allocnos[0];
2074 if (internal_flag_ira_verbose > 1 && ira_dump_file != NULL)
2075 fprintf (ira_dump_file, " Rebuilding regno allocno list for %d\n", regno);
2078 /* Propagate info from allocno FROM_A to allocno A. */
2079 static void
2080 propagate_some_info_from_allocno (ira_allocno_t a, ira_allocno_t from_a)
2082 enum reg_class aclass;
2084 merge_hard_reg_conflicts (from_a, a, false);
2085 ALLOCNO_NREFS (a) += ALLOCNO_NREFS (from_a);
2086 ALLOCNO_FREQ (a) += ALLOCNO_FREQ (from_a);
2087 ALLOCNO_CALL_FREQ (a) += ALLOCNO_CALL_FREQ (from_a);
2088 ALLOCNO_CALLS_CROSSED_NUM (a) += ALLOCNO_CALLS_CROSSED_NUM (from_a);
2089 ALLOCNO_CHEAP_CALLS_CROSSED_NUM (a)
2090 += ALLOCNO_CHEAP_CALLS_CROSSED_NUM (from_a);
2091 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a)
2092 += ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (from_a);
2093 if (! ALLOCNO_BAD_SPILL_P (from_a))
2094 ALLOCNO_BAD_SPILL_P (a) = false;
2095 aclass = ALLOCNO_CLASS (from_a);
2096 ira_assert (aclass == ALLOCNO_CLASS (a));
2097 ira_allocate_and_accumulate_costs (&ALLOCNO_HARD_REG_COSTS (a), aclass,
2098 ALLOCNO_HARD_REG_COSTS (from_a));
2099 ira_allocate_and_accumulate_costs (&ALLOCNO_CONFLICT_HARD_REG_COSTS (a),
2100 aclass,
2101 ALLOCNO_CONFLICT_HARD_REG_COSTS (from_a));
2102 ALLOCNO_CLASS_COST (a) += ALLOCNO_CLASS_COST (from_a);
2103 ALLOCNO_MEMORY_COST (a) += ALLOCNO_MEMORY_COST (from_a);
2106 /* Remove allocnos from loops removed from the allocation
2107 consideration. */
2108 static void
2109 remove_unnecessary_allocnos (void)
2111 int regno;
2112 bool merged_p, rebuild_p;
2113 ira_allocno_t a, prev_a, next_a, parent_a;
2114 ira_loop_tree_node_t a_node, parent;
2116 merged_p = false;
2117 regno_allocnos = NULL;
2118 for (regno = max_reg_num () - 1; regno >= FIRST_PSEUDO_REGISTER; regno--)
2120 rebuild_p = false;
2121 for (prev_a = NULL, a = ira_regno_allocno_map[regno];
2122 a != NULL;
2123 a = next_a)
2125 next_a = ALLOCNO_NEXT_REGNO_ALLOCNO (a);
2126 a_node = ALLOCNO_LOOP_TREE_NODE (a);
2127 if (! a_node->to_remove_p)
2128 prev_a = a;
2129 else
2131 for (parent = a_node->parent;
2132 (parent_a = parent->regno_allocno_map[regno]) == NULL
2133 && parent->to_remove_p;
2134 parent = parent->parent)
2136 if (parent_a == NULL)
2138 /* There are no allocnos with the same regno in
2139 upper region -- just move the allocno to the
2140 upper region. */
2141 prev_a = a;
2142 ALLOCNO_LOOP_TREE_NODE (a) = parent;
2143 parent->regno_allocno_map[regno] = a;
2144 bitmap_set_bit (parent->all_allocnos, ALLOCNO_NUM (a));
2145 rebuild_p = true;
2147 else
2149 /* Remove the allocno and update info of allocno in
2150 the upper region. */
2151 if (prev_a == NULL)
2152 ira_regno_allocno_map[regno] = next_a;
2153 else
2154 ALLOCNO_NEXT_REGNO_ALLOCNO (prev_a) = next_a;
2155 move_allocno_live_ranges (a, parent_a);
2156 merged_p = true;
2157 propagate_some_info_from_allocno (parent_a, a);
2158 /* Remove it from the corresponding regno allocno
2159 map to avoid info propagation of subsequent
2160 allocno into this already removed allocno. */
2161 a_node->regno_allocno_map[regno] = NULL;
2162 finish_allocno (a);
2166 if (rebuild_p)
2167 /* We need to restore the order in regno allocno list. */
2169 if (regno_allocnos == NULL)
2170 regno_allocnos
2171 = (ira_allocno_t *) ira_allocate (sizeof (ira_allocno_t)
2172 * ira_allocnos_num);
2173 ira_rebuild_regno_allocno_list (regno);
2176 if (merged_p)
2177 ira_rebuild_start_finish_chains ();
2178 if (regno_allocnos != NULL)
2179 ira_free (regno_allocnos);
2182 /* Remove allocnos from all loops but the root. */
2183 static void
2184 remove_low_level_allocnos (void)
2186 int regno;
2187 bool merged_p, propagate_p;
2188 ira_allocno_t a, top_a;
2189 ira_loop_tree_node_t a_node, parent;
2190 ira_allocno_iterator ai;
2192 merged_p = false;
2193 FOR_EACH_ALLOCNO (a, ai)
2195 a_node = ALLOCNO_LOOP_TREE_NODE (a);
2196 if (a_node == ira_loop_tree_root || ALLOCNO_CAP_MEMBER (a) != NULL)
2197 continue;
2198 regno = ALLOCNO_REGNO (a);
2199 if ((top_a = ira_loop_tree_root->regno_allocno_map[regno]) == NULL)
2201 ALLOCNO_LOOP_TREE_NODE (a) = ira_loop_tree_root;
2202 ira_loop_tree_root->regno_allocno_map[regno] = a;
2203 continue;
2205 propagate_p = a_node->parent->regno_allocno_map[regno] == NULL;
2206 /* Remove the allocno and update info of allocno in the upper
2207 region. */
2208 move_allocno_live_ranges (a, top_a);
2209 merged_p = true;
2210 if (propagate_p)
2211 propagate_some_info_from_allocno (top_a, a);
2213 FOR_EACH_ALLOCNO (a, ai)
2215 a_node = ALLOCNO_LOOP_TREE_NODE (a);
2216 if (a_node == ira_loop_tree_root)
2217 continue;
2218 parent = a_node->parent;
2219 regno = ALLOCNO_REGNO (a);
2220 if (ALLOCNO_CAP_MEMBER (a) != NULL)
2221 ira_assert (ALLOCNO_CAP (a) != NULL);
2222 else if (ALLOCNO_CAP (a) == NULL)
2223 ira_assert (parent->regno_allocno_map[regno] != NULL);
2225 FOR_EACH_ALLOCNO (a, ai)
2227 regno = ALLOCNO_REGNO (a);
2228 if (ira_loop_tree_root->regno_allocno_map[regno] == a)
2230 ira_object_t obj;
2231 ira_allocno_object_iterator oi;
2233 ira_regno_allocno_map[regno] = a;
2234 ALLOCNO_NEXT_REGNO_ALLOCNO (a) = NULL;
2235 ALLOCNO_CAP_MEMBER (a) = NULL;
2236 FOR_EACH_ALLOCNO_OBJECT (a, obj, oi)
2237 COPY_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj),
2238 OBJECT_TOTAL_CONFLICT_HARD_REGS (obj));
2239 #ifdef STACK_REGS
2240 if (ALLOCNO_TOTAL_NO_STACK_REG_P (a))
2241 ALLOCNO_NO_STACK_REG_P (a) = true;
2242 #endif
2244 else
2245 finish_allocno (a);
2247 if (merged_p)
2248 ira_rebuild_start_finish_chains ();
2251 /* Remove loops from consideration. We remove all loops except for
2252 root if ALL_P or loops for which a separate allocation will not
2253 improve the result. We have to do this after allocno creation and
2254 their costs and allocno class evaluation because only after that
2255 the register pressure can be known and is calculated. */
2256 static void
2257 remove_unnecessary_regions (bool all_p)
2259 if (current_loops == NULL)
2260 return;
2261 if (all_p)
2262 mark_all_loops_for_removal ();
2263 else
2264 mark_loops_for_removal ();
2265 children_vec
2266 = VEC_alloc (ira_loop_tree_node_t, heap,
2267 last_basic_block + VEC_length (loop_p, ira_loops.larray));
2268 removed_loop_vec
2269 = VEC_alloc (ira_loop_tree_node_t, heap,
2270 last_basic_block + VEC_length (loop_p, ira_loops.larray));
2271 remove_uneccesary_loop_nodes_from_loop_tree (ira_loop_tree_root) ;
2272 VEC_free (ira_loop_tree_node_t, heap, children_vec);
2273 if (all_p)
2274 remove_low_level_allocnos ();
2275 else
2276 remove_unnecessary_allocnos ();
2277 while (VEC_length (ira_loop_tree_node_t, removed_loop_vec) > 0)
2278 finish_loop_tree_node (VEC_pop (ira_loop_tree_node_t, removed_loop_vec));
2279 VEC_free (ira_loop_tree_node_t, heap, removed_loop_vec);
2284 /* At this point true value of allocno attribute bad_spill_p means
2285 that there is an insn where allocno occurs and where the allocno
2286 can not be used as memory. The function updates the attribute, now
2287 it can be true only for allocnos which can not be used as memory in
2288 an insn and in whose live ranges there is other allocno deaths.
2289 Spilling allocnos with true value will not improve the code because
2290 it will not make other allocnos colorable and additional reloads
2291 for the corresponding pseudo will be generated in reload pass for
2292 each insn it occurs.
2294 This is a trick mentioned in one classic article of Chaitin etc
2295 which is frequently omitted in other implementations of RA based on
2296 graph coloring. */
2297 static void
2298 update_bad_spill_attribute (void)
2300 int i;
2301 ira_allocno_t a;
2302 ira_allocno_iterator ai;
2303 ira_allocno_object_iterator aoi;
2304 ira_object_t obj;
2305 live_range_t r;
2306 enum reg_class aclass;
2307 bitmap_head dead_points[N_REG_CLASSES];
2309 for (i = 0; i < ira_allocno_classes_num; i++)
2311 aclass = ira_allocno_classes[i];
2312 bitmap_initialize (&dead_points[aclass], &reg_obstack);
2314 FOR_EACH_ALLOCNO (a, ai)
2316 aclass = ALLOCNO_CLASS (a);
2317 if (aclass == NO_REGS)
2318 continue;
2319 FOR_EACH_ALLOCNO_OBJECT (a, obj, aoi)
2320 for (r = OBJECT_LIVE_RANGES (obj); r != NULL; r = r->next)
2321 bitmap_set_bit (&dead_points[aclass], r->finish);
2323 FOR_EACH_ALLOCNO (a, ai)
2325 aclass = ALLOCNO_CLASS (a);
2326 if (aclass == NO_REGS)
2327 continue;
2328 if (! ALLOCNO_BAD_SPILL_P (a))
2329 continue;
2330 FOR_EACH_ALLOCNO_OBJECT (a, obj, aoi)
2332 for (r = OBJECT_LIVE_RANGES (obj); r != NULL; r = r->next)
2334 for (i = r->start + 1; i < r->finish; i++)
2335 if (bitmap_bit_p (&dead_points[aclass], i))
2336 break;
2337 if (i < r->finish)
2338 break;
2340 if (r != NULL)
2342 ALLOCNO_BAD_SPILL_P (a) = false;
2343 break;
2347 for (i = 0; i < ira_allocno_classes_num; i++)
2349 aclass = ira_allocno_classes[i];
2350 bitmap_clear (&dead_points[aclass]);
2356 /* Set up minimal and maximal live range points for allocnos. */
2357 static void
2358 setup_min_max_allocno_live_range_point (void)
2360 int i;
2361 ira_allocno_t a, parent_a, cap;
2362 ira_allocno_iterator ai;
2363 #ifdef ENABLE_IRA_CHECKING
2364 ira_object_iterator oi;
2365 ira_object_t obj;
2366 #endif
2367 live_range_t r;
2368 ira_loop_tree_node_t parent;
2370 FOR_EACH_ALLOCNO (a, ai)
2372 int n = ALLOCNO_NUM_OBJECTS (a);
2374 for (i = 0; i < n; i++)
2376 ira_object_t obj = ALLOCNO_OBJECT (a, i);
2377 r = OBJECT_LIVE_RANGES (obj);
2378 if (r == NULL)
2379 continue;
2380 OBJECT_MAX (obj) = r->finish;
2381 for (; r->next != NULL; r = r->next)
2383 OBJECT_MIN (obj) = r->start;
2386 for (i = max_reg_num () - 1; i >= FIRST_PSEUDO_REGISTER; i--)
2387 for (a = ira_regno_allocno_map[i];
2388 a != NULL;
2389 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
2391 int j;
2392 int n = ALLOCNO_NUM_OBJECTS (a);
2394 for (j = 0; j < n; j++)
2396 ira_object_t obj = ALLOCNO_OBJECT (a, j);
2397 ira_object_t parent_obj;
2399 if (OBJECT_MAX (obj) < 0)
2400 continue;
2401 ira_assert (ALLOCNO_CAP_MEMBER (a) == NULL);
2402 /* Accumulation of range info. */
2403 if (ALLOCNO_CAP (a) != NULL)
2405 for (cap = ALLOCNO_CAP (a); cap != NULL; cap = ALLOCNO_CAP (cap))
2407 ira_object_t cap_obj = ALLOCNO_OBJECT (cap, j);
2408 if (OBJECT_MAX (cap_obj) < OBJECT_MAX (obj))
2409 OBJECT_MAX (cap_obj) = OBJECT_MAX (obj);
2410 if (OBJECT_MIN (cap_obj) > OBJECT_MIN (obj))
2411 OBJECT_MIN (cap_obj) = OBJECT_MIN (obj);
2413 continue;
2415 if ((parent = ALLOCNO_LOOP_TREE_NODE (a)->parent) == NULL)
2416 continue;
2417 parent_a = parent->regno_allocno_map[i];
2418 parent_obj = ALLOCNO_OBJECT (parent_a, j);
2419 if (OBJECT_MAX (parent_obj) < OBJECT_MAX (obj))
2420 OBJECT_MAX (parent_obj) = OBJECT_MAX (obj);
2421 if (OBJECT_MIN (parent_obj) > OBJECT_MIN (obj))
2422 OBJECT_MIN (parent_obj) = OBJECT_MIN (obj);
2425 #ifdef ENABLE_IRA_CHECKING
2426 FOR_EACH_OBJECT (obj, oi)
2428 if ((0 <= OBJECT_MIN (obj) && OBJECT_MIN (obj) <= ira_max_point)
2429 && (0 <= OBJECT_MAX (obj) && OBJECT_MAX (obj) <= ira_max_point))
2430 continue;
2431 gcc_unreachable ();
2433 #endif
2436 /* Sort allocnos according to their live ranges. Allocnos with
2437 smaller allocno class are put first unless we use priority
2438 coloring. Allocnos with the same class are ordered according
2439 their start (min). Allocnos with the same start are ordered
2440 according their finish (max). */
2441 static int
2442 object_range_compare_func (const void *v1p, const void *v2p)
2444 int diff;
2445 ira_object_t obj1 = *(const ira_object_t *) v1p;
2446 ira_object_t obj2 = *(const ira_object_t *) v2p;
2447 ira_allocno_t a1 = OBJECT_ALLOCNO (obj1);
2448 ira_allocno_t a2 = OBJECT_ALLOCNO (obj2);
2450 if ((diff = OBJECT_MIN (obj1) - OBJECT_MIN (obj2)) != 0)
2451 return diff;
2452 if ((diff = OBJECT_MAX (obj1) - OBJECT_MAX (obj2)) != 0)
2453 return diff;
2454 return ALLOCNO_NUM (a1) - ALLOCNO_NUM (a2);
2457 /* Sort ira_object_id_map and set up conflict id of allocnos. */
2458 static void
2459 sort_conflict_id_map (void)
2461 int i, num;
2462 ira_allocno_t a;
2463 ira_allocno_iterator ai;
2465 num = 0;
2466 FOR_EACH_ALLOCNO (a, ai)
2468 ira_allocno_object_iterator oi;
2469 ira_object_t obj;
2471 FOR_EACH_ALLOCNO_OBJECT (a, obj, oi)
2472 ira_object_id_map[num++] = obj;
2474 qsort (ira_object_id_map, num, sizeof (ira_object_t),
2475 object_range_compare_func);
2476 for (i = 0; i < num; i++)
2478 ira_object_t obj = ira_object_id_map[i];
2480 gcc_assert (obj != NULL);
2481 OBJECT_CONFLICT_ID (obj) = i;
2483 for (i = num; i < ira_objects_num; i++)
2484 ira_object_id_map[i] = NULL;
2487 /* Set up minimal and maximal conflict ids of allocnos with which
2488 given allocno can conflict. */
2489 static void
2490 setup_min_max_conflict_allocno_ids (void)
2492 int aclass;
2493 int i, j, min, max, start, finish, first_not_finished, filled_area_start;
2494 int *live_range_min, *last_lived;
2495 int word0_min, word0_max;
2496 ira_allocno_t a;
2497 ira_allocno_iterator ai;
2499 live_range_min = (int *) ira_allocate (sizeof (int) * ira_objects_num);
2500 aclass = -1;
2501 first_not_finished = -1;
2502 for (i = 0; i < ira_objects_num; i++)
2504 ira_object_t obj = ira_object_id_map[i];
2506 if (obj == NULL)
2507 continue;
2509 a = OBJECT_ALLOCNO (obj);
2511 if (aclass < 0)
2513 aclass = ALLOCNO_CLASS (a);
2514 min = i;
2515 first_not_finished = i;
2517 else
2519 start = OBJECT_MIN (obj);
2520 /* If we skip an allocno, the allocno with smaller ids will
2521 be also skipped because of the secondary sorting the
2522 range finishes (see function
2523 object_range_compare_func). */
2524 while (first_not_finished < i
2525 && start > OBJECT_MAX (ira_object_id_map
2526 [first_not_finished]))
2527 first_not_finished++;
2528 min = first_not_finished;
2530 if (min == i)
2531 /* We could increase min further in this case but it is good
2532 enough. */
2533 min++;
2534 live_range_min[i] = OBJECT_MIN (obj);
2535 OBJECT_MIN (obj) = min;
2537 last_lived = (int *) ira_allocate (sizeof (int) * ira_max_point);
2538 aclass = -1;
2539 filled_area_start = -1;
2540 for (i = ira_objects_num - 1; i >= 0; i--)
2542 ira_object_t obj = ira_object_id_map[i];
2544 if (obj == NULL)
2545 continue;
2547 a = OBJECT_ALLOCNO (obj);
2548 if (aclass < 0)
2550 aclass = ALLOCNO_CLASS (a);
2551 for (j = 0; j < ira_max_point; j++)
2552 last_lived[j] = -1;
2553 filled_area_start = ira_max_point;
2555 min = live_range_min[i];
2556 finish = OBJECT_MAX (obj);
2557 max = last_lived[finish];
2558 if (max < 0)
2559 /* We could decrease max further in this case but it is good
2560 enough. */
2561 max = OBJECT_CONFLICT_ID (obj) - 1;
2562 OBJECT_MAX (obj) = max;
2563 /* In filling, we can go further A range finish to recognize
2564 intersection quickly because if the finish of subsequently
2565 processed allocno (it has smaller conflict id) range is
2566 further A range finish than they are definitely intersected
2567 (the reason for this is the allocnos with bigger conflict id
2568 have their range starts not smaller than allocnos with
2569 smaller ids. */
2570 for (j = min; j < filled_area_start; j++)
2571 last_lived[j] = i;
2572 filled_area_start = min;
2574 ira_free (last_lived);
2575 ira_free (live_range_min);
2577 /* For allocnos with more than one object, we may later record extra conflicts in
2578 subobject 0 that we cannot really know about here.
2579 For now, simply widen the min/max range of these subobjects. */
2581 word0_min = INT_MAX;
2582 word0_max = INT_MIN;
2584 FOR_EACH_ALLOCNO (a, ai)
2586 int n = ALLOCNO_NUM_OBJECTS (a);
2587 ira_object_t obj0;
2589 if (n < 2)
2590 continue;
2591 obj0 = ALLOCNO_OBJECT (a, 0);
2592 if (OBJECT_CONFLICT_ID (obj0) < word0_min)
2593 word0_min = OBJECT_CONFLICT_ID (obj0);
2594 if (OBJECT_CONFLICT_ID (obj0) > word0_max)
2595 word0_max = OBJECT_CONFLICT_ID (obj0);
2597 FOR_EACH_ALLOCNO (a, ai)
2599 int n = ALLOCNO_NUM_OBJECTS (a);
2600 ira_object_t obj0;
2602 if (n < 2)
2603 continue;
2604 obj0 = ALLOCNO_OBJECT (a, 0);
2605 if (OBJECT_MIN (obj0) > word0_min)
2606 OBJECT_MIN (obj0) = word0_min;
2607 if (OBJECT_MAX (obj0) < word0_max)
2608 OBJECT_MAX (obj0) = word0_max;
2614 static void
2615 create_caps (void)
2617 ira_allocno_t a;
2618 ira_allocno_iterator ai;
2619 ira_loop_tree_node_t loop_tree_node;
2621 FOR_EACH_ALLOCNO (a, ai)
2623 if (ALLOCNO_LOOP_TREE_NODE (a) == ira_loop_tree_root)
2624 continue;
2625 if (ALLOCNO_CAP_MEMBER (a) != NULL)
2626 create_cap_allocno (a);
2627 else if (ALLOCNO_CAP (a) == NULL)
2629 loop_tree_node = ALLOCNO_LOOP_TREE_NODE (a);
2630 if (!bitmap_bit_p (loop_tree_node->border_allocnos, ALLOCNO_NUM (a)))
2631 create_cap_allocno (a);
2638 /* The page contains code transforming more one region internal
2639 representation (IR) to one region IR which is necessary for reload.
2640 This transformation is called IR flattening. We might just rebuild
2641 the IR for one region but we don't do it because it takes a lot of
2642 time. */
2644 /* Map: regno -> allocnos which will finally represent the regno for
2645 IR with one region. */
2646 static ira_allocno_t *regno_top_level_allocno_map;
2648 /* Find the allocno that corresponds to A at a level one higher up in the
2649 loop tree. Returns NULL if A is a cap, or if it has no parent. */
2650 ira_allocno_t
2651 ira_parent_allocno (ira_allocno_t a)
2653 ira_loop_tree_node_t parent;
2655 if (ALLOCNO_CAP (a) != NULL)
2656 return NULL;
2658 parent = ALLOCNO_LOOP_TREE_NODE (a)->parent;
2659 if (parent == NULL)
2660 return NULL;
2662 return parent->regno_allocno_map[ALLOCNO_REGNO (a)];
2665 /* Find the allocno that corresponds to A at a level one higher up in the
2666 loop tree. If ALLOCNO_CAP is set for A, return that. */
2667 ira_allocno_t
2668 ira_parent_or_cap_allocno (ira_allocno_t a)
2670 if (ALLOCNO_CAP (a) != NULL)
2671 return ALLOCNO_CAP (a);
2673 return ira_parent_allocno (a);
2676 /* Process all allocnos originated from pseudo REGNO and copy live
2677 ranges, hard reg conflicts, and allocno stack reg attributes from
2678 low level allocnos to final allocnos which are destinations of
2679 removed stores at a loop exit. Return true if we copied live
2680 ranges. */
2681 static bool
2682 copy_info_to_removed_store_destinations (int regno)
2684 ira_allocno_t a;
2685 ira_allocno_t parent_a = NULL;
2686 ira_loop_tree_node_t parent;
2687 bool merged_p;
2689 merged_p = false;
2690 for (a = ira_regno_allocno_map[regno];
2691 a != NULL;
2692 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
2694 if (a != regno_top_level_allocno_map[REGNO (allocno_emit_reg (a))])
2695 /* This allocno will be removed. */
2696 continue;
2698 /* Caps will be removed. */
2699 ira_assert (ALLOCNO_CAP_MEMBER (a) == NULL);
2700 for (parent = ALLOCNO_LOOP_TREE_NODE (a)->parent;
2701 parent != NULL;
2702 parent = parent->parent)
2703 if ((parent_a = parent->regno_allocno_map[regno]) == NULL
2704 || (parent_a
2705 == regno_top_level_allocno_map[REGNO
2706 (allocno_emit_reg (parent_a))]
2707 && ALLOCNO_EMIT_DATA (parent_a)->mem_optimized_dest_p))
2708 break;
2709 if (parent == NULL || parent_a == NULL)
2710 continue;
2712 copy_allocno_live_ranges (a, parent_a);
2713 merge_hard_reg_conflicts (a, parent_a, true);
2715 ALLOCNO_CALL_FREQ (parent_a) += ALLOCNO_CALL_FREQ (a);
2716 ALLOCNO_CALLS_CROSSED_NUM (parent_a)
2717 += ALLOCNO_CALLS_CROSSED_NUM (a);
2718 ALLOCNO_CHEAP_CALLS_CROSSED_NUM (parent_a)
2719 += ALLOCNO_CHEAP_CALLS_CROSSED_NUM (a);
2720 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (parent_a)
2721 += ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a);
2722 merged_p = true;
2724 return merged_p;
2727 /* Flatten the IR. In other words, this function transforms IR as if
2728 it were built with one region (without loops). We could make it
2729 much simpler by rebuilding IR with one region, but unfortunately it
2730 takes a lot of time. MAX_REGNO_BEFORE_EMIT and
2731 IRA_MAX_POINT_BEFORE_EMIT are correspondingly MAX_REG_NUM () and
2732 IRA_MAX_POINT before emitting insns on the loop borders. */
2733 void
2734 ira_flattening (int max_regno_before_emit, int ira_max_point_before_emit)
2736 int i, j;
2737 bool keep_p;
2738 int hard_regs_num;
2739 bool new_pseudos_p, merged_p, mem_dest_p;
2740 unsigned int n;
2741 enum reg_class aclass;
2742 ira_allocno_t a, parent_a, first, second, node_first, node_second;
2743 ira_copy_t cp;
2744 ira_loop_tree_node_t node;
2745 live_range_t r;
2746 ira_allocno_iterator ai;
2747 ira_copy_iterator ci;
2749 regno_top_level_allocno_map
2750 = (ira_allocno_t *) ira_allocate (max_reg_num ()
2751 * sizeof (ira_allocno_t));
2752 memset (regno_top_level_allocno_map, 0,
2753 max_reg_num () * sizeof (ira_allocno_t));
2754 new_pseudos_p = merged_p = false;
2755 FOR_EACH_ALLOCNO (a, ai)
2757 ira_allocno_object_iterator oi;
2758 ira_object_t obj;
2760 if (ALLOCNO_CAP_MEMBER (a) != NULL)
2761 /* Caps are not in the regno allocno maps and they are never
2762 will be transformed into allocnos existing after IR
2763 flattening. */
2764 continue;
2765 FOR_EACH_ALLOCNO_OBJECT (a, obj, oi)
2766 COPY_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj),
2767 OBJECT_CONFLICT_HARD_REGS (obj));
2768 #ifdef STACK_REGS
2769 ALLOCNO_TOTAL_NO_STACK_REG_P (a) = ALLOCNO_NO_STACK_REG_P (a);
2770 #endif
2772 /* Fix final allocno attributes. */
2773 for (i = max_regno_before_emit - 1; i >= FIRST_PSEUDO_REGISTER; i--)
2775 mem_dest_p = false;
2776 for (a = ira_regno_allocno_map[i];
2777 a != NULL;
2778 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
2780 ira_emit_data_t parent_data, data = ALLOCNO_EMIT_DATA (a);
2782 ira_assert (ALLOCNO_CAP_MEMBER (a) == NULL);
2783 if (data->somewhere_renamed_p)
2784 new_pseudos_p = true;
2785 parent_a = ira_parent_allocno (a);
2786 if (parent_a == NULL)
2788 ALLOCNO_COPIES (a) = NULL;
2789 regno_top_level_allocno_map[REGNO (data->reg)] = a;
2790 continue;
2792 ira_assert (ALLOCNO_CAP_MEMBER (parent_a) == NULL);
2794 if (data->mem_optimized_dest != NULL)
2795 mem_dest_p = true;
2796 parent_data = ALLOCNO_EMIT_DATA (parent_a);
2797 if (REGNO (data->reg) == REGNO (parent_data->reg))
2799 merge_hard_reg_conflicts (a, parent_a, true);
2800 move_allocno_live_ranges (a, parent_a);
2801 merged_p = true;
2802 parent_data->mem_optimized_dest_p
2803 = (parent_data->mem_optimized_dest_p
2804 || data->mem_optimized_dest_p);
2805 continue;
2807 new_pseudos_p = true;
2808 for (;;)
2810 ALLOCNO_NREFS (parent_a) -= ALLOCNO_NREFS (a);
2811 ALLOCNO_FREQ (parent_a) -= ALLOCNO_FREQ (a);
2812 ALLOCNO_CALL_FREQ (parent_a) -= ALLOCNO_CALL_FREQ (a);
2813 ALLOCNO_CALLS_CROSSED_NUM (parent_a)
2814 -= ALLOCNO_CALLS_CROSSED_NUM (a);
2815 ALLOCNO_CHEAP_CALLS_CROSSED_NUM (parent_a)
2816 -= ALLOCNO_CHEAP_CALLS_CROSSED_NUM (a);
2817 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (parent_a)
2818 -= ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a);
2819 ira_assert (ALLOCNO_CALLS_CROSSED_NUM (parent_a) >= 0
2820 && ALLOCNO_NREFS (parent_a) >= 0
2821 && ALLOCNO_FREQ (parent_a) >= 0);
2822 aclass = ALLOCNO_CLASS (parent_a);
2823 hard_regs_num = ira_class_hard_regs_num[aclass];
2824 if (ALLOCNO_HARD_REG_COSTS (a) != NULL
2825 && ALLOCNO_HARD_REG_COSTS (parent_a) != NULL)
2826 for (j = 0; j < hard_regs_num; j++)
2827 ALLOCNO_HARD_REG_COSTS (parent_a)[j]
2828 -= ALLOCNO_HARD_REG_COSTS (a)[j];
2829 if (ALLOCNO_CONFLICT_HARD_REG_COSTS (a) != NULL
2830 && ALLOCNO_CONFLICT_HARD_REG_COSTS (parent_a) != NULL)
2831 for (j = 0; j < hard_regs_num; j++)
2832 ALLOCNO_CONFLICT_HARD_REG_COSTS (parent_a)[j]
2833 -= ALLOCNO_CONFLICT_HARD_REG_COSTS (a)[j];
2834 ALLOCNO_CLASS_COST (parent_a)
2835 -= ALLOCNO_CLASS_COST (a);
2836 ALLOCNO_MEMORY_COST (parent_a) -= ALLOCNO_MEMORY_COST (a);
2837 parent_a = ira_parent_allocno (parent_a);
2838 if (parent_a == NULL)
2839 break;
2841 ALLOCNO_COPIES (a) = NULL;
2842 regno_top_level_allocno_map[REGNO (data->reg)] = a;
2844 if (mem_dest_p && copy_info_to_removed_store_destinations (i))
2845 merged_p = true;
2847 ira_assert (new_pseudos_p || ira_max_point_before_emit == ira_max_point);
2848 if (merged_p || ira_max_point_before_emit != ira_max_point)
2849 ira_rebuild_start_finish_chains ();
2850 if (new_pseudos_p)
2852 sparseset objects_live;
2854 /* Rebuild conflicts. */
2855 FOR_EACH_ALLOCNO (a, ai)
2857 ira_allocno_object_iterator oi;
2858 ira_object_t obj;
2860 if (a != regno_top_level_allocno_map[REGNO (allocno_emit_reg (a))]
2861 || ALLOCNO_CAP_MEMBER (a) != NULL)
2862 continue;
2863 FOR_EACH_ALLOCNO_OBJECT (a, obj, oi)
2865 for (r = OBJECT_LIVE_RANGES (obj); r != NULL; r = r->next)
2866 ira_assert (r->object == obj);
2867 clear_conflicts (obj);
2870 objects_live = sparseset_alloc (ira_objects_num);
2871 for (i = 0; i < ira_max_point; i++)
2873 for (r = ira_start_point_ranges[i]; r != NULL; r = r->start_next)
2875 ira_object_t obj = r->object;
2877 a = OBJECT_ALLOCNO (obj);
2878 if (a != regno_top_level_allocno_map[REGNO (allocno_emit_reg (a))]
2879 || ALLOCNO_CAP_MEMBER (a) != NULL)
2880 continue;
2882 aclass = ALLOCNO_CLASS (a);
2883 sparseset_set_bit (objects_live, OBJECT_CONFLICT_ID (obj));
2884 EXECUTE_IF_SET_IN_SPARSESET (objects_live, n)
2886 ira_object_t live_obj = ira_object_id_map[n];
2887 ira_allocno_t live_a = OBJECT_ALLOCNO (live_obj);
2888 enum reg_class live_aclass = ALLOCNO_CLASS (live_a);
2890 if (ira_reg_classes_intersect_p[aclass][live_aclass]
2891 /* Don't set up conflict for the allocno with itself. */
2892 && live_a != a)
2893 ira_add_conflict (obj, live_obj);
2897 for (r = ira_finish_point_ranges[i]; r != NULL; r = r->finish_next)
2898 sparseset_clear_bit (objects_live, OBJECT_CONFLICT_ID (r->object));
2900 sparseset_free (objects_live);
2901 compress_conflict_vecs ();
2903 /* Mark some copies for removing and change allocnos in the rest
2904 copies. */
2905 FOR_EACH_COPY (cp, ci)
2907 if (ALLOCNO_CAP_MEMBER (cp->first) != NULL
2908 || ALLOCNO_CAP_MEMBER (cp->second) != NULL)
2910 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
2911 fprintf
2912 (ira_dump_file, " Remove cp%d:%c%dr%d-%c%dr%d\n",
2913 cp->num, ALLOCNO_CAP_MEMBER (cp->first) != NULL ? 'c' : 'a',
2914 ALLOCNO_NUM (cp->first),
2915 REGNO (allocno_emit_reg (cp->first)),
2916 ALLOCNO_CAP_MEMBER (cp->second) != NULL ? 'c' : 'a',
2917 ALLOCNO_NUM (cp->second),
2918 REGNO (allocno_emit_reg (cp->second)));
2919 cp->loop_tree_node = NULL;
2920 continue;
2922 first
2923 = regno_top_level_allocno_map[REGNO (allocno_emit_reg (cp->first))];
2924 second
2925 = regno_top_level_allocno_map[REGNO (allocno_emit_reg (cp->second))];
2926 node = cp->loop_tree_node;
2927 if (node == NULL)
2928 keep_p = true; /* It copy generated in ira-emit.c. */
2929 else
2931 /* Check that the copy was not propagated from level on
2932 which we will have different pseudos. */
2933 node_first = node->regno_allocno_map[ALLOCNO_REGNO (cp->first)];
2934 node_second = node->regno_allocno_map[ALLOCNO_REGNO (cp->second)];
2935 keep_p = ((REGNO (allocno_emit_reg (first))
2936 == REGNO (allocno_emit_reg (node_first)))
2937 && (REGNO (allocno_emit_reg (second))
2938 == REGNO (allocno_emit_reg (node_second))));
2940 if (keep_p)
2942 cp->loop_tree_node = ira_loop_tree_root;
2943 cp->first = first;
2944 cp->second = second;
2946 else
2948 cp->loop_tree_node = NULL;
2949 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
2950 fprintf (ira_dump_file, " Remove cp%d:a%dr%d-a%dr%d\n",
2951 cp->num, ALLOCNO_NUM (cp->first),
2952 REGNO (allocno_emit_reg (cp->first)),
2953 ALLOCNO_NUM (cp->second),
2954 REGNO (allocno_emit_reg (cp->second)));
2957 /* Remove unnecessary allocnos on lower levels of the loop tree. */
2958 FOR_EACH_ALLOCNO (a, ai)
2960 if (a != regno_top_level_allocno_map[REGNO (allocno_emit_reg (a))]
2961 || ALLOCNO_CAP_MEMBER (a) != NULL)
2963 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
2964 fprintf (ira_dump_file, " Remove a%dr%d\n",
2965 ALLOCNO_NUM (a), REGNO (allocno_emit_reg (a)));
2966 finish_allocno (a);
2967 continue;
2969 ALLOCNO_LOOP_TREE_NODE (a) = ira_loop_tree_root;
2970 ALLOCNO_REGNO (a) = REGNO (allocno_emit_reg (a));
2971 ALLOCNO_CAP (a) = NULL;
2972 /* Restore updated costs for assignments from reload. */
2973 ALLOCNO_UPDATED_MEMORY_COST (a) = ALLOCNO_MEMORY_COST (a);
2974 ALLOCNO_UPDATED_CLASS_COST (a) = ALLOCNO_CLASS_COST (a);
2975 if (! ALLOCNO_ASSIGNED_P (a))
2976 ira_free_allocno_updated_costs (a);
2977 ira_assert (ALLOCNO_UPDATED_HARD_REG_COSTS (a) == NULL);
2978 ira_assert (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) == NULL);
2980 /* Remove unnecessary copies. */
2981 FOR_EACH_COPY (cp, ci)
2983 if (cp->loop_tree_node == NULL)
2985 ira_copies[cp->num] = NULL;
2986 finish_copy (cp);
2987 continue;
2989 ira_assert
2990 (ALLOCNO_LOOP_TREE_NODE (cp->first) == ira_loop_tree_root
2991 && ALLOCNO_LOOP_TREE_NODE (cp->second) == ira_loop_tree_root);
2992 ira_add_allocno_copy_to_list (cp);
2993 ira_swap_allocno_copy_ends_if_necessary (cp);
2995 rebuild_regno_allocno_maps ();
2996 if (ira_max_point != ira_max_point_before_emit)
2997 ira_compress_allocno_live_ranges ();
2998 ira_free (regno_top_level_allocno_map);
3003 #ifdef ENABLE_IRA_CHECKING
3004 /* Check creation of all allocnos. Allocnos on lower levels should
3005 have allocnos or caps on all upper levels. */
3006 static void
3007 check_allocno_creation (void)
3009 ira_allocno_t a;
3010 ira_allocno_iterator ai;
3011 ira_loop_tree_node_t loop_tree_node;
3013 FOR_EACH_ALLOCNO (a, ai)
3015 loop_tree_node = ALLOCNO_LOOP_TREE_NODE (a);
3016 ira_assert (bitmap_bit_p (loop_tree_node->all_allocnos,
3017 ALLOCNO_NUM (a)));
3018 if (loop_tree_node == ira_loop_tree_root)
3019 continue;
3020 if (ALLOCNO_CAP_MEMBER (a) != NULL)
3021 ira_assert (ALLOCNO_CAP (a) != NULL);
3022 else if (ALLOCNO_CAP (a) == NULL)
3023 ira_assert (loop_tree_node->parent
3024 ->regno_allocno_map[ALLOCNO_REGNO (a)] != NULL
3025 && bitmap_bit_p (loop_tree_node->border_allocnos,
3026 ALLOCNO_NUM (a)));
3029 #endif
3031 /* Identify allocnos which prefer a register class with a single hard register.
3032 Adjust ALLOCNO_CONFLICT_HARD_REG_COSTS so that conflicting allocnos are
3033 less likely to use the preferred singleton register. */
3034 static void
3035 update_conflict_hard_reg_costs (void)
3037 ira_allocno_t a;
3038 ira_allocno_iterator ai;
3039 int i, index, min;
3041 FOR_EACH_ALLOCNO (a, ai)
3043 reg_class_t aclass = ALLOCNO_CLASS (a);
3044 reg_class_t pref = reg_preferred_class (ALLOCNO_REGNO (a));
3046 if (reg_class_size[(int) pref] != 1)
3047 continue;
3048 index = ira_class_hard_reg_index[(int) aclass]
3049 [ira_class_hard_regs[(int) pref][0]];
3050 if (index < 0)
3051 continue;
3052 if (ALLOCNO_CONFLICT_HARD_REG_COSTS (a) == NULL
3053 || ALLOCNO_HARD_REG_COSTS (a) == NULL)
3054 continue;
3055 min = INT_MAX;
3056 for (i = ira_class_hard_regs_num[(int) aclass] - 1; i >= 0; i--)
3057 if (ALLOCNO_HARD_REG_COSTS (a)[i] > ALLOCNO_CLASS_COST (a)
3058 && min > ALLOCNO_HARD_REG_COSTS (a)[i])
3059 min = ALLOCNO_HARD_REG_COSTS (a)[i];
3060 if (min == INT_MAX)
3061 continue;
3062 ira_allocate_and_set_costs (&ALLOCNO_CONFLICT_HARD_REG_COSTS (a),
3063 aclass, 0);
3064 ALLOCNO_CONFLICT_HARD_REG_COSTS (a)[index]
3065 -= min - ALLOCNO_CLASS_COST (a);
3069 /* Create a internal representation (IR) for IRA (allocnos, copies,
3070 loop tree nodes). The function returns TRUE if we generate loop
3071 structure (besides nodes representing all function and the basic
3072 blocks) for regional allocation. A true return means that we
3073 really need to flatten IR before the reload. */
3074 bool
3075 ira_build (void)
3077 bool loops_p;
3079 df_analyze ();
3080 initiate_cost_vectors ();
3081 initiate_allocnos ();
3082 initiate_copies ();
3083 create_loop_tree_nodes ();
3084 form_loop_tree ();
3085 create_allocnos ();
3086 ira_costs ();
3087 create_allocno_objects ();
3088 ira_create_allocno_live_ranges ();
3089 remove_unnecessary_regions (false);
3090 ira_compress_allocno_live_ranges ();
3091 update_bad_spill_attribute ();
3092 loops_p = more_one_region_p ();
3093 if (loops_p)
3095 propagate_allocno_info ();
3096 create_caps ();
3098 ira_tune_allocno_costs ();
3099 #ifdef ENABLE_IRA_CHECKING
3100 check_allocno_creation ();
3101 #endif
3102 setup_min_max_allocno_live_range_point ();
3103 sort_conflict_id_map ();
3104 setup_min_max_conflict_allocno_ids ();
3105 ira_build_conflicts ();
3106 update_conflict_hard_reg_costs ();
3107 if (! ira_conflicts_p)
3109 ira_allocno_t a;
3110 ira_allocno_iterator ai;
3112 /* Remove all regions but root one. */
3113 if (loops_p)
3115 remove_unnecessary_regions (true);
3116 loops_p = false;
3118 /* We don't save hard registers around calls for fast allocation
3119 -- add caller clobbered registers as conflicting ones to
3120 allocno crossing calls. */
3121 FOR_EACH_ALLOCNO (a, ai)
3122 if (ALLOCNO_CALLS_CROSSED_NUM (a) != 0)
3123 ior_hard_reg_conflicts (a, &call_used_reg_set);
3125 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
3126 print_copies (ira_dump_file);
3127 if (internal_flag_ira_verbose > 0 && ira_dump_file != NULL)
3129 int n, nr, nr_big;
3130 ira_allocno_t a;
3131 live_range_t r;
3132 ira_allocno_iterator ai;
3134 n = 0;
3135 nr = 0;
3136 nr_big = 0;
3137 FOR_EACH_ALLOCNO (a, ai)
3139 int j, nobj = ALLOCNO_NUM_OBJECTS (a);
3141 if (nobj > 1)
3142 nr_big++;
3143 for (j = 0; j < nobj; j++)
3145 ira_object_t obj = ALLOCNO_OBJECT (a, j);
3146 n += OBJECT_NUM_CONFLICTS (obj);
3147 for (r = OBJECT_LIVE_RANGES (obj); r != NULL; r = r->next)
3148 nr++;
3151 fprintf (ira_dump_file, " regions=%d, blocks=%d, points=%d\n",
3152 current_loops == NULL ? 1 : VEC_length (loop_p, ira_loops.larray),
3153 n_basic_blocks, ira_max_point);
3154 fprintf (ira_dump_file,
3155 " allocnos=%d (big %d), copies=%d, conflicts=%d, ranges=%d\n",
3156 ira_allocnos_num, nr_big, ira_copies_num, n, nr);
3158 return loops_p;
3161 /* Release the data created by function ira_build. */
3162 void
3163 ira_destroy (void)
3165 finish_loop_tree_nodes ();
3166 finish_copies ();
3167 finish_allocnos ();
3168 finish_cost_vectors ();
3169 ira_finish_allocno_live_ranges ();