1 /* Building internal representation for IRA.
2 Copyright (C) 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Vladimir Makarov <vmakarov@redhat.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
33 #include "insn-config.h"
35 #include "diagnostic-core.h"
39 #include "sparseset.h"
41 #include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
43 static ira_copy_t
find_allocno_copy (ira_allocno_t
, ira_allocno_t
, rtx
,
44 ira_loop_tree_node_t
);
46 /* The root of the loop tree corresponding to the all function. */
47 ira_loop_tree_node_t ira_loop_tree_root
;
49 /* Height of the loop tree. */
50 int ira_loop_tree_height
;
52 /* All nodes representing basic blocks are referred through the
53 following array. We can not use basic block member `aux' for this
54 because it is used for insertion of insns on edges. */
55 ira_loop_tree_node_t ira_bb_nodes
;
57 /* All nodes representing loops are referred through the following
59 ira_loop_tree_node_t ira_loop_nodes
;
61 /* Map regno -> allocnos with given regno (see comments for
62 allocno member `next_regno_allocno'). */
63 ira_allocno_t
*ira_regno_allocno_map
;
65 /* Array of references to all allocnos. The order number of the
66 allocno corresponds to the index in the array. Removed allocnos
67 have NULL element value. */
68 ira_allocno_t
*ira_allocnos
;
70 /* Sizes of the previous array. */
73 /* Count of conflict record structures we've created, used when creating
77 /* Map a conflict id to its conflict record. */
78 ira_object_t
*ira_object_id_map
;
80 /* Array of references to all copies. The order number of the copy
81 corresponds to the index in the array. Removed copies have NULL
83 ira_copy_t
*ira_copies
;
85 /* Size of the previous array. */
90 /* LAST_BASIC_BLOCK before generating additional insns because of live
91 range splitting. Emitting insns on a critical edge creates a new
93 static int last_basic_block_before_change
;
95 /* Initialize some members in loop tree node NODE. Use LOOP_NUM for
96 the member loop_num. */
98 init_loop_tree_node (struct ira_loop_tree_node
*node
, int loop_num
)
100 int max_regno
= max_reg_num ();
102 node
->regno_allocno_map
103 = (ira_allocno_t
*) ira_allocate (sizeof (ira_allocno_t
) * max_regno
);
104 memset (node
->regno_allocno_map
, 0, sizeof (ira_allocno_t
) * max_regno
);
105 memset (node
->reg_pressure
, 0, sizeof (node
->reg_pressure
));
106 node
->all_allocnos
= ira_allocate_bitmap ();
107 node
->modified_regnos
= ira_allocate_bitmap ();
108 node
->border_allocnos
= ira_allocate_bitmap ();
109 node
->local_copies
= ira_allocate_bitmap ();
110 node
->loop_num
= loop_num
;
111 node
->children
= NULL
;
112 node
->subloops
= NULL
;
116 /* The following function allocates the loop tree nodes. If
117 CURRENT_LOOPS is NULL, the nodes corresponding to the loops (except
118 the root which corresponds the all function) will be not allocated
119 but nodes will still be allocated for basic blocks. */
121 create_loop_tree_nodes (void)
127 VEC (edge
, heap
) *edges
;
131 = ((struct ira_loop_tree_node
*)
132 ira_allocate (sizeof (struct ira_loop_tree_node
) * last_basic_block
));
133 last_basic_block_before_change
= last_basic_block
;
134 for (i
= 0; i
< (unsigned int) last_basic_block
; i
++)
136 ira_bb_nodes
[i
].regno_allocno_map
= NULL
;
137 memset (ira_bb_nodes
[i
].reg_pressure
, 0,
138 sizeof (ira_bb_nodes
[i
].reg_pressure
));
139 ira_bb_nodes
[i
].all_allocnos
= NULL
;
140 ira_bb_nodes
[i
].modified_regnos
= NULL
;
141 ira_bb_nodes
[i
].border_allocnos
= NULL
;
142 ira_bb_nodes
[i
].local_copies
= NULL
;
144 if (current_loops
== NULL
)
146 ira_loop_nodes
= ((struct ira_loop_tree_node
*)
147 ira_allocate (sizeof (struct ira_loop_tree_node
)));
148 init_loop_tree_node (ira_loop_nodes
, 0);
151 ira_loop_nodes
= ((struct ira_loop_tree_node
*)
152 ira_allocate (sizeof (struct ira_loop_tree_node
)
153 * VEC_length (loop_p
, ira_loops
.larray
)));
154 FOR_EACH_VEC_ELT (loop_p
, ira_loops
.larray
, i
, loop
)
156 if (loop
!= ira_loops
.tree_root
)
158 ira_loop_nodes
[i
].regno_allocno_map
= NULL
;
160 FOR_EACH_EDGE (e
, ei
, loop
->header
->preds
)
161 if (e
->src
!= loop
->latch
162 && (e
->flags
& EDGE_ABNORMAL
) && EDGE_CRITICAL_P (e
))
169 edges
= get_loop_exit_edges (loop
);
170 FOR_EACH_VEC_ELT (edge
, edges
, j
, e
)
171 if ((e
->flags
& EDGE_ABNORMAL
) && EDGE_CRITICAL_P (e
))
176 VEC_free (edge
, heap
, edges
);
180 init_loop_tree_node (&ira_loop_nodes
[i
], loop
->num
);
184 /* The function returns TRUE if there are more one allocation
187 more_one_region_p (void)
192 if (current_loops
!= NULL
)
193 FOR_EACH_VEC_ELT (loop_p
, ira_loops
.larray
, i
, loop
)
194 if (ira_loop_nodes
[i
].regno_allocno_map
!= NULL
195 && ira_loop_tree_root
!= &ira_loop_nodes
[i
])
200 /* Free the loop tree node of a loop. */
202 finish_loop_tree_node (ira_loop_tree_node_t loop
)
204 if (loop
->regno_allocno_map
!= NULL
)
206 ira_assert (loop
->bb
== NULL
);
207 ira_free_bitmap (loop
->local_copies
);
208 ira_free_bitmap (loop
->border_allocnos
);
209 ira_free_bitmap (loop
->modified_regnos
);
210 ira_free_bitmap (loop
->all_allocnos
);
211 ira_free (loop
->regno_allocno_map
);
212 loop
->regno_allocno_map
= NULL
;
216 /* Free the loop tree nodes. */
218 finish_loop_tree_nodes (void)
223 if (current_loops
== NULL
)
224 finish_loop_tree_node (&ira_loop_nodes
[0]);
226 FOR_EACH_VEC_ELT (loop_p
, ira_loops
.larray
, i
, loop
)
227 finish_loop_tree_node (&ira_loop_nodes
[i
]);
228 ira_free (ira_loop_nodes
);
229 for (i
= 0; i
< (unsigned int) last_basic_block_before_change
; i
++)
231 if (ira_bb_nodes
[i
].local_copies
!= NULL
)
232 ira_free_bitmap (ira_bb_nodes
[i
].local_copies
);
233 if (ira_bb_nodes
[i
].border_allocnos
!= NULL
)
234 ira_free_bitmap (ira_bb_nodes
[i
].border_allocnos
);
235 if (ira_bb_nodes
[i
].modified_regnos
!= NULL
)
236 ira_free_bitmap (ira_bb_nodes
[i
].modified_regnos
);
237 if (ira_bb_nodes
[i
].all_allocnos
!= NULL
)
238 ira_free_bitmap (ira_bb_nodes
[i
].all_allocnos
);
239 if (ira_bb_nodes
[i
].regno_allocno_map
!= NULL
)
240 ira_free (ira_bb_nodes
[i
].regno_allocno_map
);
242 ira_free (ira_bb_nodes
);
247 /* The following recursive function adds LOOP to the loop tree
248 hierarchy. LOOP is added only once. If LOOP is NULL we adding
249 loop designating the whole function when CFG loops are not
252 add_loop_to_tree (struct loop
*loop
)
256 ira_loop_tree_node_t loop_node
, parent_node
;
258 /* We can not use loop node access macros here because of potential
259 checking and because the nodes are not initialized enough
261 if (loop
!= NULL
&& loop_outer (loop
) != NULL
)
262 add_loop_to_tree (loop_outer (loop
));
263 loop_num
= loop
!= NULL
? loop
->num
: 0;
264 if (ira_loop_nodes
[loop_num
].regno_allocno_map
!= NULL
265 && ira_loop_nodes
[loop_num
].children
== NULL
)
267 /* We have not added loop node to the tree yet. */
268 loop_node
= &ira_loop_nodes
[loop_num
];
269 loop_node
->loop
= loop
;
270 loop_node
->bb
= NULL
;
275 for (parent
= loop_outer (loop
);
277 parent
= loop_outer (parent
))
278 if (ira_loop_nodes
[parent
->num
].regno_allocno_map
!= NULL
)
283 loop_node
->next
= NULL
;
284 loop_node
->subloop_next
= NULL
;
285 loop_node
->parent
= NULL
;
289 parent_node
= &ira_loop_nodes
[parent
->num
];
290 loop_node
->next
= parent_node
->children
;
291 parent_node
->children
= loop_node
;
292 loop_node
->subloop_next
= parent_node
->subloops
;
293 parent_node
->subloops
= loop_node
;
294 loop_node
->parent
= parent_node
;
299 /* The following recursive function sets up levels of nodes of the
300 tree given its root LOOP_NODE. The enumeration starts with LEVEL.
301 The function returns maximal value of level in the tree + 1. */
303 setup_loop_tree_level (ira_loop_tree_node_t loop_node
, int level
)
305 int height
, max_height
;
306 ira_loop_tree_node_t subloop_node
;
308 ira_assert (loop_node
->bb
== NULL
);
309 loop_node
->level
= level
;
310 max_height
= level
+ 1;
311 for (subloop_node
= loop_node
->subloops
;
312 subloop_node
!= NULL
;
313 subloop_node
= subloop_node
->subloop_next
)
315 ira_assert (subloop_node
->bb
== NULL
);
316 height
= setup_loop_tree_level (subloop_node
, level
+ 1);
317 if (height
> max_height
)
323 /* Create the loop tree. The algorithm is designed to provide correct
324 order of loops (they are ordered by their last loop BB) and basic
325 blocks in the chain formed by member next. */
327 form_loop_tree (void)
331 ira_loop_tree_node_t bb_node
, loop_node
;
333 /* We can not use loop/bb node access macros because of potential
334 checking and because the nodes are not initialized enough
338 bb_node
= &ira_bb_nodes
[bb
->index
];
340 bb_node
->loop
= NULL
;
341 bb_node
->subloops
= NULL
;
342 bb_node
->children
= NULL
;
343 bb_node
->subloop_next
= NULL
;
344 bb_node
->next
= NULL
;
345 if (current_loops
== NULL
)
349 for (parent
= bb
->loop_father
;
351 parent
= loop_outer (parent
))
352 if (ira_loop_nodes
[parent
->num
].regno_allocno_map
!= NULL
)
355 add_loop_to_tree (parent
);
356 loop_node
= &ira_loop_nodes
[parent
== NULL
? 0 : parent
->num
];
357 bb_node
->next
= loop_node
->children
;
358 bb_node
->parent
= loop_node
;
359 loop_node
->children
= bb_node
;
361 ira_loop_tree_root
= IRA_LOOP_NODE_BY_INDEX (0);
362 ira_loop_tree_height
= setup_loop_tree_level (ira_loop_tree_root
, 0);
363 ira_assert (ira_loop_tree_root
->regno_allocno_map
!= NULL
);
368 /* Rebuild IRA_REGNO_ALLOCNO_MAP and REGNO_ALLOCNO_MAPs of the loop
371 rebuild_regno_allocno_maps (void)
374 int max_regno
, regno
;
376 ira_loop_tree_node_t loop_tree_node
;
378 ira_allocno_iterator ai
;
380 ira_assert (current_loops
!= NULL
);
381 max_regno
= max_reg_num ();
382 FOR_EACH_VEC_ELT (loop_p
, ira_loops
.larray
, l
, loop
)
383 if (ira_loop_nodes
[l
].regno_allocno_map
!= NULL
)
385 ira_free (ira_loop_nodes
[l
].regno_allocno_map
);
386 ira_loop_nodes
[l
].regno_allocno_map
387 = (ira_allocno_t
*) ira_allocate (sizeof (ira_allocno_t
)
389 memset (ira_loop_nodes
[l
].regno_allocno_map
, 0,
390 sizeof (ira_allocno_t
) * max_regno
);
392 ira_free (ira_regno_allocno_map
);
393 ira_regno_allocno_map
394 = (ira_allocno_t
*) ira_allocate (max_regno
* sizeof (ira_allocno_t
));
395 memset (ira_regno_allocno_map
, 0, max_regno
* sizeof (ira_allocno_t
));
396 FOR_EACH_ALLOCNO (a
, ai
)
398 if (ALLOCNO_CAP_MEMBER (a
) != NULL
)
399 /* Caps are not in the regno allocno maps. */
401 regno
= ALLOCNO_REGNO (a
);
402 loop_tree_node
= ALLOCNO_LOOP_TREE_NODE (a
);
403 ALLOCNO_NEXT_REGNO_ALLOCNO (a
) = ira_regno_allocno_map
[regno
];
404 ira_regno_allocno_map
[regno
] = a
;
405 if (loop_tree_node
->regno_allocno_map
[regno
] == NULL
)
406 /* Remember that we can create temporary allocnos to break
407 cycles in register shuffle. */
408 loop_tree_node
->regno_allocno_map
[regno
] = a
;
413 /* Pools for allocnos, allocno live ranges and objects. */
414 static alloc_pool allocno_pool
, live_range_pool
, object_pool
;
416 /* Vec containing references to all created allocnos. It is a
417 container of array allocnos. */
418 static VEC(ira_allocno_t
,heap
) *allocno_vec
;
420 /* Vec containing references to all created ira_objects. It is a
421 container of ira_object_id_map. */
422 static VEC(ira_object_t
,heap
) *ira_object_id_map_vec
;
424 /* Initialize data concerning allocnos. */
426 initiate_allocnos (void)
429 = create_alloc_pool ("live ranges",
430 sizeof (struct live_range
), 100);
432 = create_alloc_pool ("allocnos", sizeof (struct ira_allocno
), 100);
434 = create_alloc_pool ("objects", sizeof (struct ira_object
), 100);
435 allocno_vec
= VEC_alloc (ira_allocno_t
, heap
, max_reg_num () * 2);
437 ira_allocnos_num
= 0;
439 ira_object_id_map_vec
440 = VEC_alloc (ira_object_t
, heap
, max_reg_num () * 2);
441 ira_object_id_map
= NULL
;
442 ira_regno_allocno_map
443 = (ira_allocno_t
*) ira_allocate (max_reg_num ()
444 * sizeof (ira_allocno_t
));
445 memset (ira_regno_allocno_map
, 0, max_reg_num () * sizeof (ira_allocno_t
));
448 /* Create and return an object corresponding to a new allocno A. */
450 ira_create_object (ira_allocno_t a
, int subword
)
452 enum reg_class aclass
= ALLOCNO_CLASS (a
);
453 ira_object_t obj
= (ira_object_t
) pool_alloc (object_pool
);
455 OBJECT_ALLOCNO (obj
) = a
;
456 OBJECT_SUBWORD (obj
) = subword
;
457 OBJECT_CONFLICT_ID (obj
) = ira_objects_num
;
458 OBJECT_CONFLICT_VEC_P (obj
) = false;
459 OBJECT_CONFLICT_ARRAY (obj
) = NULL
;
460 OBJECT_NUM_CONFLICTS (obj
) = 0;
461 COPY_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj
), ira_no_alloc_regs
);
462 COPY_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj
), ira_no_alloc_regs
);
463 IOR_COMPL_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj
),
464 reg_class_contents
[aclass
]);
465 IOR_COMPL_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj
),
466 reg_class_contents
[aclass
]);
467 OBJECT_MIN (obj
) = INT_MAX
;
468 OBJECT_MAX (obj
) = -1;
469 OBJECT_LIVE_RANGES (obj
) = NULL
;
471 VEC_safe_push (ira_object_t
, heap
, ira_object_id_map_vec
, obj
);
473 = VEC_address (ira_object_t
, ira_object_id_map_vec
);
474 ira_objects_num
= VEC_length (ira_object_t
, ira_object_id_map_vec
);
479 /* Create and return the allocno corresponding to REGNO in
480 LOOP_TREE_NODE. Add the allocno to the list of allocnos with the
481 same regno if CAP_P is FALSE. */
483 ira_create_allocno (int regno
, bool cap_p
,
484 ira_loop_tree_node_t loop_tree_node
)
488 a
= (ira_allocno_t
) pool_alloc (allocno_pool
);
489 ALLOCNO_REGNO (a
) = regno
;
490 ALLOCNO_LOOP_TREE_NODE (a
) = loop_tree_node
;
493 ALLOCNO_NEXT_REGNO_ALLOCNO (a
) = ira_regno_allocno_map
[regno
];
494 ira_regno_allocno_map
[regno
] = a
;
495 if (loop_tree_node
->regno_allocno_map
[regno
] == NULL
)
496 /* Remember that we can create temporary allocnos to break
497 cycles in register shuffle on region borders (see
499 loop_tree_node
->regno_allocno_map
[regno
] = a
;
501 ALLOCNO_CAP (a
) = NULL
;
502 ALLOCNO_CAP_MEMBER (a
) = NULL
;
503 ALLOCNO_NUM (a
) = ira_allocnos_num
;
504 bitmap_set_bit (loop_tree_node
->all_allocnos
, ALLOCNO_NUM (a
));
505 ALLOCNO_NREFS (a
) = 0;
506 ALLOCNO_FREQ (a
) = 0;
507 ALLOCNO_HARD_REGNO (a
) = -1;
508 ALLOCNO_CALL_FREQ (a
) = 0;
509 ALLOCNO_CALLS_CROSSED_NUM (a
) = 0;
510 ALLOCNO_CHEAP_CALLS_CROSSED_NUM (a
) = 0;
512 ALLOCNO_NO_STACK_REG_P (a
) = false;
513 ALLOCNO_TOTAL_NO_STACK_REG_P (a
) = false;
515 ALLOCNO_DONT_REASSIGN_P (a
) = false;
516 ALLOCNO_BAD_SPILL_P (a
) = false;
517 ALLOCNO_ASSIGNED_P (a
) = false;
518 ALLOCNO_MODE (a
) = (regno
< 0 ? VOIDmode
: PSEUDO_REGNO_MODE (regno
));
519 ALLOCNO_COPIES (a
) = NULL
;
520 ALLOCNO_HARD_REG_COSTS (a
) = NULL
;
521 ALLOCNO_CONFLICT_HARD_REG_COSTS (a
) = NULL
;
522 ALLOCNO_UPDATED_HARD_REG_COSTS (a
) = NULL
;
523 ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a
) = NULL
;
524 ALLOCNO_CLASS (a
) = NO_REGS
;
525 ALLOCNO_UPDATED_CLASS_COST (a
) = 0;
526 ALLOCNO_CLASS_COST (a
) = 0;
527 ALLOCNO_MEMORY_COST (a
) = 0;
528 ALLOCNO_UPDATED_MEMORY_COST (a
) = 0;
529 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a
) = 0;
530 ALLOCNO_NUM_OBJECTS (a
) = 0;
532 ALLOCNO_ADD_DATA (a
) = NULL
;
533 VEC_safe_push (ira_allocno_t
, heap
, allocno_vec
, a
);
534 ira_allocnos
= VEC_address (ira_allocno_t
, allocno_vec
);
535 ira_allocnos_num
= VEC_length (ira_allocno_t
, allocno_vec
);
540 /* Set up register class for A and update its conflict hard
543 ira_set_allocno_class (ira_allocno_t a
, enum reg_class aclass
)
545 ira_allocno_object_iterator oi
;
548 ALLOCNO_CLASS (a
) = aclass
;
549 FOR_EACH_ALLOCNO_OBJECT (a
, obj
, oi
)
551 IOR_COMPL_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj
),
552 reg_class_contents
[aclass
]);
553 IOR_COMPL_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj
),
554 reg_class_contents
[aclass
]);
558 /* Determine the number of objects we should associate with allocno A
559 and allocate them. */
561 ira_create_allocno_objects (ira_allocno_t a
)
563 enum machine_mode mode
= ALLOCNO_MODE (a
);
564 enum reg_class aclass
= ALLOCNO_CLASS (a
);
565 int n
= ira_reg_class_max_nregs
[aclass
][mode
];
568 if (GET_MODE_SIZE (mode
) != 2 * UNITS_PER_WORD
|| n
!= 2)
571 ALLOCNO_NUM_OBJECTS (a
) = n
;
572 for (i
= 0; i
< n
; i
++)
573 ALLOCNO_OBJECT (a
, i
) = ira_create_object (a
, i
);
576 /* For each allocno, set ALLOCNO_NUM_OBJECTS and create the
577 ALLOCNO_OBJECT structures. This must be called after the allocno
578 classes are known. */
580 create_allocno_objects (void)
583 ira_allocno_iterator ai
;
585 FOR_EACH_ALLOCNO (a
, ai
)
586 ira_create_allocno_objects (a
);
589 /* Merge hard register conflict information for all objects associated with
590 allocno TO into the corresponding objects associated with FROM.
591 If TOTAL_ONLY is true, we only merge OBJECT_TOTAL_CONFLICT_HARD_REGS. */
593 merge_hard_reg_conflicts (ira_allocno_t from
, ira_allocno_t to
,
597 gcc_assert (ALLOCNO_NUM_OBJECTS (to
) == ALLOCNO_NUM_OBJECTS (from
));
598 for (i
= 0; i
< ALLOCNO_NUM_OBJECTS (to
); i
++)
600 ira_object_t from_obj
= ALLOCNO_OBJECT (from
, i
);
601 ira_object_t to_obj
= ALLOCNO_OBJECT (to
, i
);
604 IOR_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (to_obj
),
605 OBJECT_CONFLICT_HARD_REGS (from_obj
));
606 IOR_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (to_obj
),
607 OBJECT_TOTAL_CONFLICT_HARD_REGS (from_obj
));
610 if (!total_only
&& ALLOCNO_NO_STACK_REG_P (from
))
611 ALLOCNO_NO_STACK_REG_P (to
) = true;
612 if (ALLOCNO_TOTAL_NO_STACK_REG_P (from
))
613 ALLOCNO_TOTAL_NO_STACK_REG_P (to
) = true;
617 /* Update hard register conflict information for all objects associated with
618 A to include the regs in SET. */
620 ior_hard_reg_conflicts (ira_allocno_t a
, HARD_REG_SET
*set
)
622 ira_allocno_object_iterator i
;
625 FOR_EACH_ALLOCNO_OBJECT (a
, obj
, i
)
627 IOR_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj
), *set
);
628 IOR_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj
), *set
);
632 /* Return TRUE if a conflict vector with NUM elements is more
633 profitable than a conflict bit vector for OBJ. */
635 ira_conflict_vector_profitable_p (ira_object_t obj
, int num
)
638 int max
= OBJECT_MAX (obj
);
639 int min
= OBJECT_MIN (obj
);
642 /* We prefer a bit vector in such case because it does not result
646 nw
= (max
- min
+ IRA_INT_BITS
) / IRA_INT_BITS
;
647 return (2 * sizeof (ira_object_t
) * (num
+ 1)
648 < 3 * nw
* sizeof (IRA_INT_TYPE
));
651 /* Allocates and initialize the conflict vector of OBJ for NUM
652 conflicting objects. */
654 ira_allocate_conflict_vec (ira_object_t obj
, int num
)
659 ira_assert (OBJECT_CONFLICT_ARRAY (obj
) == NULL
);
660 num
++; /* for NULL end marker */
661 size
= sizeof (ira_object_t
) * num
;
662 OBJECT_CONFLICT_ARRAY (obj
) = ira_allocate (size
);
663 vec
= (ira_object_t
*) OBJECT_CONFLICT_ARRAY (obj
);
665 OBJECT_NUM_CONFLICTS (obj
) = 0;
666 OBJECT_CONFLICT_ARRAY_SIZE (obj
) = size
;
667 OBJECT_CONFLICT_VEC_P (obj
) = true;
670 /* Allocate and initialize the conflict bit vector of OBJ. */
672 allocate_conflict_bit_vec (ira_object_t obj
)
676 ira_assert (OBJECT_CONFLICT_ARRAY (obj
) == NULL
);
677 size
= ((OBJECT_MAX (obj
) - OBJECT_MIN (obj
) + IRA_INT_BITS
)
678 / IRA_INT_BITS
* sizeof (IRA_INT_TYPE
));
679 OBJECT_CONFLICT_ARRAY (obj
) = ira_allocate (size
);
680 memset (OBJECT_CONFLICT_ARRAY (obj
), 0, size
);
681 OBJECT_CONFLICT_ARRAY_SIZE (obj
) = size
;
682 OBJECT_CONFLICT_VEC_P (obj
) = false;
685 /* Allocate and initialize the conflict vector or conflict bit vector
686 of OBJ for NUM conflicting allocnos whatever is more profitable. */
688 ira_allocate_object_conflicts (ira_object_t obj
, int num
)
690 if (ira_conflict_vector_profitable_p (obj
, num
))
691 ira_allocate_conflict_vec (obj
, num
);
693 allocate_conflict_bit_vec (obj
);
696 /* Add OBJ2 to the conflicts of OBJ1. */
698 add_to_conflicts (ira_object_t obj1
, ira_object_t obj2
)
703 if (OBJECT_CONFLICT_VEC_P (obj1
))
705 ira_object_t
*vec
= OBJECT_CONFLICT_VEC (obj1
);
706 int curr_num
= OBJECT_NUM_CONFLICTS (obj1
);
708 if (OBJECT_CONFLICT_ARRAY_SIZE (obj1
) < num
* sizeof (ira_object_t
))
710 ira_object_t
*newvec
;
711 size
= (3 * num
/ 2 + 1) * sizeof (ira_allocno_t
);
712 newvec
= (ira_object_t
*) ira_allocate (size
);
713 memcpy (newvec
, vec
, curr_num
* sizeof (ira_object_t
));
716 OBJECT_CONFLICT_ARRAY (obj1
) = vec
;
717 OBJECT_CONFLICT_ARRAY_SIZE (obj1
) = size
;
721 OBJECT_NUM_CONFLICTS (obj1
)++;
725 int nw
, added_head_nw
, id
;
726 IRA_INT_TYPE
*vec
= OBJECT_CONFLICT_BITVEC (obj1
);
728 id
= OBJECT_CONFLICT_ID (obj2
);
729 if (OBJECT_MIN (obj1
) > id
)
731 /* Expand head of the bit vector. */
732 added_head_nw
= (OBJECT_MIN (obj1
) - id
- 1) / IRA_INT_BITS
+ 1;
733 nw
= (OBJECT_MAX (obj1
) - OBJECT_MIN (obj1
)) / IRA_INT_BITS
+ 1;
734 size
= (nw
+ added_head_nw
) * sizeof (IRA_INT_TYPE
);
735 if (OBJECT_CONFLICT_ARRAY_SIZE (obj1
) >= size
)
737 memmove ((char *) vec
+ added_head_nw
* sizeof (IRA_INT_TYPE
),
738 vec
, nw
* sizeof (IRA_INT_TYPE
));
739 memset (vec
, 0, added_head_nw
* sizeof (IRA_INT_TYPE
));
744 = (3 * (nw
+ added_head_nw
) / 2 + 1) * sizeof (IRA_INT_TYPE
);
745 vec
= (IRA_INT_TYPE
*) ira_allocate (size
);
746 memcpy ((char *) vec
+ added_head_nw
* sizeof (IRA_INT_TYPE
),
747 OBJECT_CONFLICT_ARRAY (obj1
), nw
* sizeof (IRA_INT_TYPE
));
748 memset (vec
, 0, added_head_nw
* sizeof (IRA_INT_TYPE
));
750 + (nw
+ added_head_nw
) * sizeof (IRA_INT_TYPE
),
751 0, size
- (nw
+ added_head_nw
) * sizeof (IRA_INT_TYPE
));
752 ira_free (OBJECT_CONFLICT_ARRAY (obj1
));
753 OBJECT_CONFLICT_ARRAY (obj1
) = vec
;
754 OBJECT_CONFLICT_ARRAY_SIZE (obj1
) = size
;
756 OBJECT_MIN (obj1
) -= added_head_nw
* IRA_INT_BITS
;
758 else if (OBJECT_MAX (obj1
) < id
)
760 nw
= (id
- OBJECT_MIN (obj1
)) / IRA_INT_BITS
+ 1;
761 size
= nw
* sizeof (IRA_INT_TYPE
);
762 if (OBJECT_CONFLICT_ARRAY_SIZE (obj1
) < size
)
764 /* Expand tail of the bit vector. */
765 size
= (3 * nw
/ 2 + 1) * sizeof (IRA_INT_TYPE
);
766 vec
= (IRA_INT_TYPE
*) ira_allocate (size
);
767 memcpy (vec
, OBJECT_CONFLICT_ARRAY (obj1
), OBJECT_CONFLICT_ARRAY_SIZE (obj1
));
768 memset ((char *) vec
+ OBJECT_CONFLICT_ARRAY_SIZE (obj1
),
769 0, size
- OBJECT_CONFLICT_ARRAY_SIZE (obj1
));
770 ira_free (OBJECT_CONFLICT_ARRAY (obj1
));
771 OBJECT_CONFLICT_ARRAY (obj1
) = vec
;
772 OBJECT_CONFLICT_ARRAY_SIZE (obj1
) = size
;
774 OBJECT_MAX (obj1
) = id
;
776 SET_MINMAX_SET_BIT (vec
, id
, OBJECT_MIN (obj1
), OBJECT_MAX (obj1
));
780 /* Add OBJ1 to the conflicts of OBJ2 and vice versa. */
782 ira_add_conflict (ira_object_t obj1
, ira_object_t obj2
)
784 add_to_conflicts (obj1
, obj2
);
785 add_to_conflicts (obj2
, obj1
);
788 /* Clear all conflicts of OBJ. */
790 clear_conflicts (ira_object_t obj
)
792 if (OBJECT_CONFLICT_VEC_P (obj
))
794 OBJECT_NUM_CONFLICTS (obj
) = 0;
795 OBJECT_CONFLICT_VEC (obj
)[0] = NULL
;
797 else if (OBJECT_CONFLICT_ARRAY_SIZE (obj
) != 0)
801 nw
= (OBJECT_MAX (obj
) - OBJECT_MIN (obj
)) / IRA_INT_BITS
+ 1;
802 memset (OBJECT_CONFLICT_BITVEC (obj
), 0, nw
* sizeof (IRA_INT_TYPE
));
806 /* The array used to find duplications in conflict vectors of
808 static int *conflict_check
;
810 /* The value used to mark allocation presence in conflict vector of
811 the current allocno. */
812 static int curr_conflict_check_tick
;
814 /* Remove duplications in conflict vector of OBJ. */
816 compress_conflict_vec (ira_object_t obj
)
818 ira_object_t
*vec
, conflict_obj
;
821 ira_assert (OBJECT_CONFLICT_VEC_P (obj
));
822 vec
= OBJECT_CONFLICT_VEC (obj
);
823 curr_conflict_check_tick
++;
824 for (i
= j
= 0; (conflict_obj
= vec
[i
]) != NULL
; i
++)
826 int id
= OBJECT_CONFLICT_ID (conflict_obj
);
827 if (conflict_check
[id
] != curr_conflict_check_tick
)
829 conflict_check
[id
] = curr_conflict_check_tick
;
830 vec
[j
++] = conflict_obj
;
833 OBJECT_NUM_CONFLICTS (obj
) = j
;
837 /* Remove duplications in conflict vectors of all allocnos. */
839 compress_conflict_vecs (void)
842 ira_object_iterator oi
;
844 conflict_check
= (int *) ira_allocate (sizeof (int) * ira_objects_num
);
845 memset (conflict_check
, 0, sizeof (int) * ira_objects_num
);
846 curr_conflict_check_tick
= 0;
847 FOR_EACH_OBJECT (obj
, oi
)
849 if (OBJECT_CONFLICT_VEC_P (obj
))
850 compress_conflict_vec (obj
);
852 ira_free (conflict_check
);
855 /* This recursive function outputs allocno A and if it is a cap the
856 function outputs its members. */
858 ira_print_expanded_allocno (ira_allocno_t a
)
862 fprintf (ira_dump_file
, " a%d(r%d", ALLOCNO_NUM (a
), ALLOCNO_REGNO (a
));
863 if ((bb
= ALLOCNO_LOOP_TREE_NODE (a
)->bb
) != NULL
)
864 fprintf (ira_dump_file
, ",b%d", bb
->index
);
866 fprintf (ira_dump_file
, ",l%d", ALLOCNO_LOOP_TREE_NODE (a
)->loop_num
);
867 if (ALLOCNO_CAP_MEMBER (a
) != NULL
)
869 fprintf (ira_dump_file
, ":");
870 ira_print_expanded_allocno (ALLOCNO_CAP_MEMBER (a
));
872 fprintf (ira_dump_file
, ")");
875 /* Create and return the cap representing allocno A in the
878 create_cap_allocno (ira_allocno_t a
)
881 ira_loop_tree_node_t parent
;
882 enum reg_class aclass
;
884 parent
= ALLOCNO_LOOP_TREE_NODE (a
)->parent
;
885 cap
= ira_create_allocno (ALLOCNO_REGNO (a
), true, parent
);
886 ALLOCNO_MODE (cap
) = ALLOCNO_MODE (a
);
887 aclass
= ALLOCNO_CLASS (a
);
888 ira_set_allocno_class (cap
, aclass
);
889 ira_create_allocno_objects (cap
);
890 ALLOCNO_CAP_MEMBER (cap
) = a
;
891 ALLOCNO_CAP (a
) = cap
;
892 ALLOCNO_CLASS_COST (cap
) = ALLOCNO_CLASS_COST (a
);
893 ALLOCNO_MEMORY_COST (cap
) = ALLOCNO_MEMORY_COST (a
);
894 ira_allocate_and_copy_costs
895 (&ALLOCNO_HARD_REG_COSTS (cap
), aclass
, ALLOCNO_HARD_REG_COSTS (a
));
896 ira_allocate_and_copy_costs
897 (&ALLOCNO_CONFLICT_HARD_REG_COSTS (cap
), aclass
,
898 ALLOCNO_CONFLICT_HARD_REG_COSTS (a
));
899 ALLOCNO_BAD_SPILL_P (cap
) = ALLOCNO_BAD_SPILL_P (a
);
900 ALLOCNO_NREFS (cap
) = ALLOCNO_NREFS (a
);
901 ALLOCNO_FREQ (cap
) = ALLOCNO_FREQ (a
);
902 ALLOCNO_CALL_FREQ (cap
) = ALLOCNO_CALL_FREQ (a
);
904 merge_hard_reg_conflicts (a
, cap
, false);
906 ALLOCNO_CALLS_CROSSED_NUM (cap
) = ALLOCNO_CALLS_CROSSED_NUM (a
);
907 ALLOCNO_CHEAP_CALLS_CROSSED_NUM (cap
) = ALLOCNO_CHEAP_CALLS_CROSSED_NUM (a
);
908 if (internal_flag_ira_verbose
> 2 && ira_dump_file
!= NULL
)
910 fprintf (ira_dump_file
, " Creating cap ");
911 ira_print_expanded_allocno (cap
);
912 fprintf (ira_dump_file
, "\n");
917 /* Create and return a live range for OBJECT with given attributes. */
919 ira_create_live_range (ira_object_t obj
, int start
, int finish
,
924 p
= (live_range_t
) pool_alloc (live_range_pool
);
932 /* Create a new live range for OBJECT and queue it at the head of its
935 ira_add_live_range_to_object (ira_object_t object
, int start
, int finish
)
938 p
= ira_create_live_range (object
, start
, finish
,
939 OBJECT_LIVE_RANGES (object
));
940 OBJECT_LIVE_RANGES (object
) = p
;
943 /* Copy allocno live range R and return the result. */
945 copy_live_range (live_range_t r
)
949 p
= (live_range_t
) pool_alloc (live_range_pool
);
954 /* Copy allocno live range list given by its head R and return the
957 ira_copy_live_range_list (live_range_t r
)
959 live_range_t p
, first
, last
;
963 for (first
= last
= NULL
; r
!= NULL
; r
= r
->next
)
965 p
= copy_live_range (r
);
975 /* Merge ranges R1 and R2 and returns the result. The function
976 maintains the order of ranges and tries to minimize number of the
979 ira_merge_live_ranges (live_range_t r1
, live_range_t r2
)
981 live_range_t first
, last
, temp
;
987 for (first
= last
= NULL
; r1
!= NULL
&& r2
!= NULL
;)
989 if (r1
->start
< r2
->start
)
995 if (r1
->start
<= r2
->finish
+ 1)
997 /* Intersected ranges: merge r1 and r2 into r1. */
998 r1
->start
= r2
->start
;
999 if (r1
->finish
< r2
->finish
)
1000 r1
->finish
= r2
->finish
;
1003 ira_finish_live_range (temp
);
1006 /* To try to merge with subsequent ranges in r1. */
1013 /* Add r1 to the result. */
1024 /* To try to merge with subsequent ranges in r2. */
1036 ira_assert (r1
->next
== NULL
);
1038 else if (r2
!= NULL
)
1044 ira_assert (r2
->next
== NULL
);
1048 ira_assert (last
->next
== NULL
);
1053 /* Return TRUE if live ranges R1 and R2 intersect. */
1055 ira_live_ranges_intersect_p (live_range_t r1
, live_range_t r2
)
1057 /* Remember the live ranges are always kept ordered. */
1058 while (r1
!= NULL
&& r2
!= NULL
)
1060 if (r1
->start
> r2
->finish
)
1062 else if (r2
->start
> r1
->finish
)
1070 /* Free allocno live range R. */
1072 ira_finish_live_range (live_range_t r
)
1074 pool_free (live_range_pool
, r
);
1077 /* Free list of allocno live ranges starting with R. */
1079 ira_finish_live_range_list (live_range_t r
)
1081 live_range_t next_r
;
1083 for (; r
!= NULL
; r
= next_r
)
1086 ira_finish_live_range (r
);
1090 /* Free updated register costs of allocno A. */
1092 ira_free_allocno_updated_costs (ira_allocno_t a
)
1094 enum reg_class aclass
;
1096 aclass
= ALLOCNO_CLASS (a
);
1097 if (ALLOCNO_UPDATED_HARD_REG_COSTS (a
) != NULL
)
1098 ira_free_cost_vector (ALLOCNO_UPDATED_HARD_REG_COSTS (a
), aclass
);
1099 ALLOCNO_UPDATED_HARD_REG_COSTS (a
) = NULL
;
1100 if (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a
) != NULL
)
1101 ira_free_cost_vector (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a
),
1103 ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a
) = NULL
;
1106 /* Free and nullify all cost vectors allocated earlier for allocno
1109 ira_free_allocno_costs (ira_allocno_t a
)
1111 enum reg_class aclass
= ALLOCNO_CLASS (a
);
1113 ira_allocno_object_iterator oi
;
1115 FOR_EACH_ALLOCNO_OBJECT (a
, obj
, oi
)
1117 ira_finish_live_range_list (OBJECT_LIVE_RANGES (obj
));
1118 ira_object_id_map
[OBJECT_CONFLICT_ID (obj
)] = NULL
;
1119 if (OBJECT_CONFLICT_ARRAY (obj
) != NULL
)
1120 ira_free (OBJECT_CONFLICT_ARRAY (obj
));
1121 pool_free (object_pool
, obj
);
1124 ira_allocnos
[ALLOCNO_NUM (a
)] = NULL
;
1125 if (ALLOCNO_HARD_REG_COSTS (a
) != NULL
)
1126 ira_free_cost_vector (ALLOCNO_HARD_REG_COSTS (a
), aclass
);
1127 if (ALLOCNO_CONFLICT_HARD_REG_COSTS (a
) != NULL
)
1128 ira_free_cost_vector (ALLOCNO_CONFLICT_HARD_REG_COSTS (a
), aclass
);
1129 if (ALLOCNO_UPDATED_HARD_REG_COSTS (a
) != NULL
)
1130 ira_free_cost_vector (ALLOCNO_UPDATED_HARD_REG_COSTS (a
), aclass
);
1131 if (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a
) != NULL
)
1132 ira_free_cost_vector (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a
),
1134 ALLOCNO_HARD_REG_COSTS (a
) = NULL
;
1135 ALLOCNO_CONFLICT_HARD_REG_COSTS (a
) = NULL
;
1136 ALLOCNO_UPDATED_HARD_REG_COSTS (a
) = NULL
;
1137 ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a
) = NULL
;
1140 /* Free the memory allocated for allocno A. */
1142 finish_allocno (ira_allocno_t a
)
1144 ira_free_allocno_costs (a
);
1145 pool_free (allocno_pool
, a
);
1148 /* Free the memory allocated for all allocnos. */
1150 finish_allocnos (void)
1153 ira_allocno_iterator ai
;
1155 FOR_EACH_ALLOCNO (a
, ai
)
1157 ira_free (ira_regno_allocno_map
);
1158 VEC_free (ira_object_t
, heap
, ira_object_id_map_vec
);
1159 VEC_free (ira_allocno_t
, heap
, allocno_vec
);
1160 free_alloc_pool (allocno_pool
);
1161 free_alloc_pool (object_pool
);
1162 free_alloc_pool (live_range_pool
);
1167 /* Pools for copies. */
1168 static alloc_pool copy_pool
;
1170 /* Vec containing references to all created copies. It is a
1171 container of array ira_copies. */
1172 static VEC(ira_copy_t
,heap
) *copy_vec
;
1174 /* The function initializes data concerning allocno copies. */
1176 initiate_copies (void)
1179 = create_alloc_pool ("copies", sizeof (struct ira_allocno_copy
), 100);
1180 copy_vec
= VEC_alloc (ira_copy_t
, heap
, get_max_uid ());
1185 /* Return copy connecting A1 and A2 and originated from INSN of
1186 LOOP_TREE_NODE if any. */
1188 find_allocno_copy (ira_allocno_t a1
, ira_allocno_t a2
, rtx insn
,
1189 ira_loop_tree_node_t loop_tree_node
)
1191 ira_copy_t cp
, next_cp
;
1192 ira_allocno_t another_a
;
1194 for (cp
= ALLOCNO_COPIES (a1
); cp
!= NULL
; cp
= next_cp
)
1196 if (cp
->first
== a1
)
1198 next_cp
= cp
->next_first_allocno_copy
;
1199 another_a
= cp
->second
;
1201 else if (cp
->second
== a1
)
1203 next_cp
= cp
->next_second_allocno_copy
;
1204 another_a
= cp
->first
;
1208 if (another_a
== a2
&& cp
->insn
== insn
1209 && cp
->loop_tree_node
== loop_tree_node
)
1215 /* Create and return copy with given attributes LOOP_TREE_NODE, FIRST,
1216 SECOND, FREQ, CONSTRAINT_P, and INSN. */
1218 ira_create_copy (ira_allocno_t first
, ira_allocno_t second
, int freq
,
1219 bool constraint_p
, rtx insn
,
1220 ira_loop_tree_node_t loop_tree_node
)
1224 cp
= (ira_copy_t
) pool_alloc (copy_pool
);
1225 cp
->num
= ira_copies_num
;
1227 cp
->second
= second
;
1229 cp
->constraint_p
= constraint_p
;
1231 cp
->loop_tree_node
= loop_tree_node
;
1232 VEC_safe_push (ira_copy_t
, heap
, copy_vec
, cp
);
1233 ira_copies
= VEC_address (ira_copy_t
, copy_vec
);
1234 ira_copies_num
= VEC_length (ira_copy_t
, copy_vec
);
1238 /* Attach a copy CP to allocnos involved into the copy. */
1240 ira_add_allocno_copy_to_list (ira_copy_t cp
)
1242 ira_allocno_t first
= cp
->first
, second
= cp
->second
;
1244 cp
->prev_first_allocno_copy
= NULL
;
1245 cp
->prev_second_allocno_copy
= NULL
;
1246 cp
->next_first_allocno_copy
= ALLOCNO_COPIES (first
);
1247 if (cp
->next_first_allocno_copy
!= NULL
)
1249 if (cp
->next_first_allocno_copy
->first
== first
)
1250 cp
->next_first_allocno_copy
->prev_first_allocno_copy
= cp
;
1252 cp
->next_first_allocno_copy
->prev_second_allocno_copy
= cp
;
1254 cp
->next_second_allocno_copy
= ALLOCNO_COPIES (second
);
1255 if (cp
->next_second_allocno_copy
!= NULL
)
1257 if (cp
->next_second_allocno_copy
->second
== second
)
1258 cp
->next_second_allocno_copy
->prev_second_allocno_copy
= cp
;
1260 cp
->next_second_allocno_copy
->prev_first_allocno_copy
= cp
;
1262 ALLOCNO_COPIES (first
) = cp
;
1263 ALLOCNO_COPIES (second
) = cp
;
1266 /* Make a copy CP a canonical copy where number of the
1267 first allocno is less than the second one. */
1269 ira_swap_allocno_copy_ends_if_necessary (ira_copy_t cp
)
1274 if (ALLOCNO_NUM (cp
->first
) <= ALLOCNO_NUM (cp
->second
))
1278 cp
->first
= cp
->second
;
1281 temp_cp
= cp
->prev_first_allocno_copy
;
1282 cp
->prev_first_allocno_copy
= cp
->prev_second_allocno_copy
;
1283 cp
->prev_second_allocno_copy
= temp_cp
;
1285 temp_cp
= cp
->next_first_allocno_copy
;
1286 cp
->next_first_allocno_copy
= cp
->next_second_allocno_copy
;
1287 cp
->next_second_allocno_copy
= temp_cp
;
1290 /* Create (or update frequency if the copy already exists) and return
1291 the copy of allocnos FIRST and SECOND with frequency FREQ
1292 corresponding to move insn INSN (if any) and originated from
1295 ira_add_allocno_copy (ira_allocno_t first
, ira_allocno_t second
, int freq
,
1296 bool constraint_p
, rtx insn
,
1297 ira_loop_tree_node_t loop_tree_node
)
1301 if ((cp
= find_allocno_copy (first
, second
, insn
, loop_tree_node
)) != NULL
)
1306 cp
= ira_create_copy (first
, second
, freq
, constraint_p
, insn
,
1308 ira_assert (first
!= NULL
&& second
!= NULL
);
1309 ira_add_allocno_copy_to_list (cp
);
1310 ira_swap_allocno_copy_ends_if_necessary (cp
);
1314 /* Print info about copy CP into file F. */
1316 print_copy (FILE *f
, ira_copy_t cp
)
1318 fprintf (f
, " cp%d:a%d(r%d)<->a%d(r%d)@%d:%s\n", cp
->num
,
1319 ALLOCNO_NUM (cp
->first
), ALLOCNO_REGNO (cp
->first
),
1320 ALLOCNO_NUM (cp
->second
), ALLOCNO_REGNO (cp
->second
), cp
->freq
,
1322 ? "move" : cp
->constraint_p
? "constraint" : "shuffle");
1325 /* Print info about copy CP into stderr. */
1327 ira_debug_copy (ira_copy_t cp
)
1329 print_copy (stderr
, cp
);
1332 /* Print info about all copies into file F. */
1334 print_copies (FILE *f
)
1337 ira_copy_iterator ci
;
1339 FOR_EACH_COPY (cp
, ci
)
1343 /* Print info about all copies into stderr. */
1345 ira_debug_copies (void)
1347 print_copies (stderr
);
1350 /* Print info about copies involving allocno A into file F. */
1352 print_allocno_copies (FILE *f
, ira_allocno_t a
)
1354 ira_allocno_t another_a
;
1355 ira_copy_t cp
, next_cp
;
1357 fprintf (f
, " a%d(r%d):", ALLOCNO_NUM (a
), ALLOCNO_REGNO (a
));
1358 for (cp
= ALLOCNO_COPIES (a
); cp
!= NULL
; cp
= next_cp
)
1362 next_cp
= cp
->next_first_allocno_copy
;
1363 another_a
= cp
->second
;
1365 else if (cp
->second
== a
)
1367 next_cp
= cp
->next_second_allocno_copy
;
1368 another_a
= cp
->first
;
1372 fprintf (f
, " cp%d:a%d(r%d)@%d", cp
->num
,
1373 ALLOCNO_NUM (another_a
), ALLOCNO_REGNO (another_a
), cp
->freq
);
1378 /* Print info about copies involving allocno A into stderr. */
1380 ira_debug_allocno_copies (ira_allocno_t a
)
1382 print_allocno_copies (stderr
, a
);
1385 /* The function frees memory allocated for copy CP. */
1387 finish_copy (ira_copy_t cp
)
1389 pool_free (copy_pool
, cp
);
1393 /* Free memory allocated for all copies. */
1395 finish_copies (void)
1398 ira_copy_iterator ci
;
1400 FOR_EACH_COPY (cp
, ci
)
1402 VEC_free (ira_copy_t
, heap
, copy_vec
);
1403 free_alloc_pool (copy_pool
);
1408 /* Pools for cost vectors. It is defined only for allocno classes. */
1409 static alloc_pool cost_vector_pool
[N_REG_CLASSES
];
1411 /* The function initiates work with hard register cost vectors. It
1412 creates allocation pool for each allocno class. */
1414 initiate_cost_vectors (void)
1417 enum reg_class aclass
;
1419 for (i
= 0; i
< ira_allocno_classes_num
; i
++)
1421 aclass
= ira_allocno_classes
[i
];
1422 cost_vector_pool
[aclass
]
1423 = create_alloc_pool ("cost vectors",
1424 sizeof (int) * ira_class_hard_regs_num
[aclass
],
1429 /* Allocate and return a cost vector VEC for ACLASS. */
1431 ira_allocate_cost_vector (reg_class_t aclass
)
1433 return (int *) pool_alloc (cost_vector_pool
[(int) aclass
]);
1436 /* Free a cost vector VEC for ACLASS. */
1438 ira_free_cost_vector (int *vec
, reg_class_t aclass
)
1440 ira_assert (vec
!= NULL
);
1441 pool_free (cost_vector_pool
[(int) aclass
], vec
);
1444 /* Finish work with hard register cost vectors. Release allocation
1445 pool for each allocno class. */
1447 finish_cost_vectors (void)
1450 enum reg_class aclass
;
1452 for (i
= 0; i
< ira_allocno_classes_num
; i
++)
1454 aclass
= ira_allocno_classes
[i
];
1455 free_alloc_pool (cost_vector_pool
[aclass
]);
1461 /* Compute a post-ordering of the reverse control flow of the loop body
1462 designated by the children nodes of LOOP_NODE, whose body nodes in
1463 pre-order are input as LOOP_PREORDER. Return a VEC with a post-order
1464 of the reverse loop body.
1466 For the post-order of the reverse CFG, we visit the basic blocks in
1467 LOOP_PREORDER array in the reverse order of where they appear.
1468 This is important: We do not just want to compute a post-order of
1469 the reverse CFG, we want to make a best-guess for a visiting order that
1470 minimizes the number of chain elements per allocno live range. If the
1471 blocks would be visited in a different order, we would still compute a
1472 correct post-ordering but it would be less likely that two nodes
1473 connected by an edge in the CFG are neighbours in the topsort. */
1475 static VEC (ira_loop_tree_node_t
, heap
) *
1476 ira_loop_tree_body_rev_postorder (ira_loop_tree_node_t loop_node ATTRIBUTE_UNUSED
,
1477 VEC (ira_loop_tree_node_t
, heap
) *loop_preorder
)
1479 VEC (ira_loop_tree_node_t
, heap
) *topsort_nodes
= NULL
;
1480 unsigned int n_loop_preorder
;
1482 n_loop_preorder
= VEC_length (ira_loop_tree_node_t
, loop_preorder
);
1483 if (n_loop_preorder
!= 0)
1485 ira_loop_tree_node_t subloop_node
;
1487 VEC (ira_loop_tree_node_t
, heap
) *dfs_stack
;
1489 /* This is a bit of strange abuse of the BB_VISITED flag: We use
1490 the flag to mark blocks we still have to visit to add them to
1491 our post-order. Define an alias to avoid confusion. */
1492 #define BB_TO_VISIT BB_VISITED
1494 FOR_EACH_VEC_ELT (ira_loop_tree_node_t
, loop_preorder
, i
, subloop_node
)
1496 gcc_checking_assert (! (subloop_node
->bb
->flags
& BB_TO_VISIT
));
1497 subloop_node
->bb
->flags
|= BB_TO_VISIT
;
1500 topsort_nodes
= VEC_alloc (ira_loop_tree_node_t
, heap
, n_loop_preorder
);
1501 dfs_stack
= VEC_alloc (ira_loop_tree_node_t
, heap
, n_loop_preorder
);
1503 FOR_EACH_VEC_ELT_REVERSE (ira_loop_tree_node_t
, loop_preorder
,
1506 if (! (subloop_node
->bb
->flags
& BB_TO_VISIT
))
1509 subloop_node
->bb
->flags
&= ~BB_TO_VISIT
;
1510 VEC_quick_push (ira_loop_tree_node_t
, dfs_stack
, subloop_node
);
1511 while (! VEC_empty (ira_loop_tree_node_t
, dfs_stack
))
1516 ira_loop_tree_node_t n
= VEC_last (ira_loop_tree_node_t
,
1518 FOR_EACH_EDGE (e
, ei
, n
->bb
->preds
)
1520 ira_loop_tree_node_t pred_node
;
1521 basic_block pred_bb
= e
->src
;
1523 if (e
->src
== ENTRY_BLOCK_PTR
)
1526 pred_node
= IRA_BB_NODE_BY_INDEX (pred_bb
->index
);
1528 && (pred_node
->bb
->flags
& BB_TO_VISIT
))
1530 pred_node
->bb
->flags
&= ~BB_TO_VISIT
;
1531 VEC_quick_push (ira_loop_tree_node_t
, dfs_stack
, pred_node
);
1534 if (n
== VEC_last (ira_loop_tree_node_t
, dfs_stack
))
1536 VEC_pop (ira_loop_tree_node_t
, dfs_stack
);
1537 VEC_quick_push (ira_loop_tree_node_t
, topsort_nodes
, n
);
1543 VEC_free (ira_loop_tree_node_t
, heap
, dfs_stack
);
1546 gcc_assert (VEC_length (ira_loop_tree_node_t
, topsort_nodes
)
1547 == n_loop_preorder
);
1548 return topsort_nodes
;
1551 /* The current loop tree node and its regno allocno map. */
1552 ira_loop_tree_node_t ira_curr_loop_tree_node
;
1553 ira_allocno_t
*ira_curr_regno_allocno_map
;
1555 /* This recursive function traverses loop tree with root LOOP_NODE
1556 calling non-null functions PREORDER_FUNC and POSTORDER_FUNC
1557 correspondingly in preorder and postorder. The function sets up
1558 IRA_CURR_LOOP_TREE_NODE and IRA_CURR_REGNO_ALLOCNO_MAP. If BB_P,
1559 basic block nodes of LOOP_NODE is also processed (before its
1562 If BB_P is set and POSTORDER_FUNC is given, the basic blocks in
1563 the loop are passed in the *reverse* post-order of the *reverse*
1564 CFG. This is only used by ira_create_allocno_live_ranges, which
1565 wants to visit basic blocks in this order to minimize the number
1566 of elements per live range chain.
1567 Note that the loop tree nodes are still visited in the normal,
1568 forward post-order of the loop tree. */
1571 ira_traverse_loop_tree (bool bb_p
, ira_loop_tree_node_t loop_node
,
1572 void (*preorder_func
) (ira_loop_tree_node_t
),
1573 void (*postorder_func
) (ira_loop_tree_node_t
))
1575 ira_loop_tree_node_t subloop_node
;
1577 ira_assert (loop_node
->bb
== NULL
);
1578 ira_curr_loop_tree_node
= loop_node
;
1579 ira_curr_regno_allocno_map
= ira_curr_loop_tree_node
->regno_allocno_map
;
1581 if (preorder_func
!= NULL
)
1582 (*preorder_func
) (loop_node
);
1586 VEC (ira_loop_tree_node_t
, heap
) *loop_preorder
= NULL
;
1589 /* Add all nodes to the set of nodes to visit. The IRA loop tree
1590 is set up such that nodes in the loop body appear in a pre-order
1591 of their place in the CFG. */
1592 for (subloop_node
= loop_node
->children
;
1593 subloop_node
!= NULL
;
1594 subloop_node
= subloop_node
->next
)
1595 if (subloop_node
->bb
!= NULL
)
1596 VEC_safe_push (ira_loop_tree_node_t
, heap
,
1597 loop_preorder
, subloop_node
);
1599 if (preorder_func
!= NULL
)
1600 FOR_EACH_VEC_ELT (ira_loop_tree_node_t
, loop_preorder
, i
, subloop_node
)
1601 (*preorder_func
) (subloop_node
);
1603 if (postorder_func
!= NULL
)
1605 VEC (ira_loop_tree_node_t
, heap
) *loop_rev_postorder
=
1606 ira_loop_tree_body_rev_postorder (loop_node
, loop_preorder
);
1607 FOR_EACH_VEC_ELT_REVERSE (ira_loop_tree_node_t
, loop_rev_postorder
,
1609 (*postorder_func
) (subloop_node
);
1610 VEC_free (ira_loop_tree_node_t
, heap
, loop_rev_postorder
);
1613 VEC_free (ira_loop_tree_node_t
, heap
, loop_preorder
);
1616 for (subloop_node
= loop_node
->subloops
;
1617 subloop_node
!= NULL
;
1618 subloop_node
= subloop_node
->subloop_next
)
1620 ira_assert (subloop_node
->bb
== NULL
);
1621 ira_traverse_loop_tree (bb_p
, subloop_node
,
1622 preorder_func
, postorder_func
);
1625 ira_curr_loop_tree_node
= loop_node
;
1626 ira_curr_regno_allocno_map
= ira_curr_loop_tree_node
->regno_allocno_map
;
1628 if (postorder_func
!= NULL
)
1629 (*postorder_func
) (loop_node
);
1634 /* The basic block currently being processed. */
1635 static basic_block curr_bb
;
1637 /* This recursive function creates allocnos corresponding to
1638 pseudo-registers containing in X. True OUTPUT_P means that X is
1641 create_insn_allocnos (rtx x
, bool output_p
)
1645 enum rtx_code code
= GET_CODE (x
);
1651 if ((regno
= REGNO (x
)) >= FIRST_PSEUDO_REGISTER
)
1655 if ((a
= ira_curr_regno_allocno_map
[regno
]) == NULL
)
1656 a
= ira_create_allocno (regno
, false, ira_curr_loop_tree_node
);
1658 ALLOCNO_NREFS (a
)++;
1659 ALLOCNO_FREQ (a
) += REG_FREQ_FROM_BB (curr_bb
);
1661 bitmap_set_bit (ira_curr_loop_tree_node
->modified_regnos
, regno
);
1665 else if (code
== SET
)
1667 create_insn_allocnos (SET_DEST (x
), true);
1668 create_insn_allocnos (SET_SRC (x
), false);
1671 else if (code
== CLOBBER
)
1673 create_insn_allocnos (XEXP (x
, 0), true);
1676 else if (code
== MEM
)
1678 create_insn_allocnos (XEXP (x
, 0), false);
1681 else if (code
== PRE_DEC
|| code
== POST_DEC
|| code
== PRE_INC
||
1682 code
== POST_INC
|| code
== POST_MODIFY
|| code
== PRE_MODIFY
)
1684 create_insn_allocnos (XEXP (x
, 0), true);
1685 create_insn_allocnos (XEXP (x
, 0), false);
1689 fmt
= GET_RTX_FORMAT (code
);
1690 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1693 create_insn_allocnos (XEXP (x
, i
), output_p
);
1694 else if (fmt
[i
] == 'E')
1695 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1696 create_insn_allocnos (XVECEXP (x
, i
, j
), output_p
);
1700 /* Create allocnos corresponding to pseudo-registers living in the
1701 basic block represented by the corresponding loop tree node
1704 create_bb_allocnos (ira_loop_tree_node_t bb_node
)
1711 curr_bb
= bb
= bb_node
->bb
;
1712 ira_assert (bb
!= NULL
);
1713 FOR_BB_INSNS_REVERSE (bb
, insn
)
1714 if (NONDEBUG_INSN_P (insn
))
1715 create_insn_allocnos (PATTERN (insn
), false);
1716 /* It might be a allocno living through from one subloop to
1718 EXECUTE_IF_SET_IN_REG_SET (df_get_live_in (bb
), FIRST_PSEUDO_REGISTER
, i
, bi
)
1719 if (ira_curr_regno_allocno_map
[i
] == NULL
)
1720 ira_create_allocno (i
, false, ira_curr_loop_tree_node
);
1723 /* Create allocnos corresponding to pseudo-registers living on edge E
1724 (a loop entry or exit). Also mark the allocnos as living on the
1727 create_loop_allocnos (edge e
)
1730 bitmap live_in_regs
, border_allocnos
;
1732 ira_loop_tree_node_t parent
;
1734 live_in_regs
= df_get_live_in (e
->dest
);
1735 border_allocnos
= ira_curr_loop_tree_node
->border_allocnos
;
1736 EXECUTE_IF_SET_IN_REG_SET (df_get_live_out (e
->src
),
1737 FIRST_PSEUDO_REGISTER
, i
, bi
)
1738 if (bitmap_bit_p (live_in_regs
, i
))
1740 if (ira_curr_regno_allocno_map
[i
] == NULL
)
1742 /* The order of creations is important for right
1743 ira_regno_allocno_map. */
1744 if ((parent
= ira_curr_loop_tree_node
->parent
) != NULL
1745 && parent
->regno_allocno_map
[i
] == NULL
)
1746 ira_create_allocno (i
, false, parent
);
1747 ira_create_allocno (i
, false, ira_curr_loop_tree_node
);
1749 bitmap_set_bit (border_allocnos
,
1750 ALLOCNO_NUM (ira_curr_regno_allocno_map
[i
]));
1754 /* Create allocnos corresponding to pseudo-registers living in loop
1755 represented by the corresponding loop tree node LOOP_NODE. This
1756 function is called by ira_traverse_loop_tree. */
1758 create_loop_tree_node_allocnos (ira_loop_tree_node_t loop_node
)
1760 if (loop_node
->bb
!= NULL
)
1761 create_bb_allocnos (loop_node
);
1762 else if (loop_node
!= ira_loop_tree_root
)
1767 VEC (edge
, heap
) *edges
;
1769 ira_assert (current_loops
!= NULL
);
1770 FOR_EACH_EDGE (e
, ei
, loop_node
->loop
->header
->preds
)
1771 if (e
->src
!= loop_node
->loop
->latch
)
1772 create_loop_allocnos (e
);
1774 edges
= get_loop_exit_edges (loop_node
->loop
);
1775 FOR_EACH_VEC_ELT (edge
, edges
, i
, e
)
1776 create_loop_allocnos (e
);
1777 VEC_free (edge
, heap
, edges
);
1781 /* Propagate information about allocnos modified inside the loop given
1782 by its LOOP_TREE_NODE to its parent. */
1784 propagate_modified_regnos (ira_loop_tree_node_t loop_tree_node
)
1786 if (loop_tree_node
== ira_loop_tree_root
)
1788 ira_assert (loop_tree_node
->bb
== NULL
);
1789 bitmap_ior_into (loop_tree_node
->parent
->modified_regnos
,
1790 loop_tree_node
->modified_regnos
);
1793 /* Propagate new info about allocno A (see comments about accumulated
1794 info in allocno definition) to the corresponding allocno on upper
1795 loop tree level. So allocnos on upper levels accumulate
1796 information about the corresponding allocnos in nested regions.
1797 The new info means allocno info finally calculated in this
1800 propagate_allocno_info (void)
1803 ira_allocno_t a
, parent_a
;
1804 ira_loop_tree_node_t parent
;
1805 enum reg_class aclass
;
1807 if (flag_ira_region
!= IRA_REGION_ALL
1808 && flag_ira_region
!= IRA_REGION_MIXED
)
1810 for (i
= max_reg_num () - 1; i
>= FIRST_PSEUDO_REGISTER
; i
--)
1811 for (a
= ira_regno_allocno_map
[i
];
1813 a
= ALLOCNO_NEXT_REGNO_ALLOCNO (a
))
1814 if ((parent
= ALLOCNO_LOOP_TREE_NODE (a
)->parent
) != NULL
1815 && (parent_a
= parent
->regno_allocno_map
[i
]) != NULL
1816 /* There are no caps yet at this point. So use
1817 border_allocnos to find allocnos for the propagation. */
1818 && bitmap_bit_p (ALLOCNO_LOOP_TREE_NODE (a
)->border_allocnos
,
1821 if (! ALLOCNO_BAD_SPILL_P (a
))
1822 ALLOCNO_BAD_SPILL_P (parent_a
) = false;
1823 ALLOCNO_NREFS (parent_a
) += ALLOCNO_NREFS (a
);
1824 ALLOCNO_FREQ (parent_a
) += ALLOCNO_FREQ (a
);
1825 ALLOCNO_CALL_FREQ (parent_a
) += ALLOCNO_CALL_FREQ (a
);
1826 merge_hard_reg_conflicts (a
, parent_a
, true);
1827 ALLOCNO_CALLS_CROSSED_NUM (parent_a
)
1828 += ALLOCNO_CALLS_CROSSED_NUM (a
);
1829 ALLOCNO_CHEAP_CALLS_CROSSED_NUM (parent_a
)
1830 += ALLOCNO_CHEAP_CALLS_CROSSED_NUM (a
);
1831 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (parent_a
)
1832 += ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a
);
1833 aclass
= ALLOCNO_CLASS (a
);
1834 ira_assert (aclass
== ALLOCNO_CLASS (parent_a
));
1835 ira_allocate_and_accumulate_costs
1836 (&ALLOCNO_HARD_REG_COSTS (parent_a
), aclass
,
1837 ALLOCNO_HARD_REG_COSTS (a
));
1838 ira_allocate_and_accumulate_costs
1839 (&ALLOCNO_CONFLICT_HARD_REG_COSTS (parent_a
),
1841 ALLOCNO_CONFLICT_HARD_REG_COSTS (a
));
1842 ALLOCNO_CLASS_COST (parent_a
)
1843 += ALLOCNO_CLASS_COST (a
);
1844 ALLOCNO_MEMORY_COST (parent_a
) += ALLOCNO_MEMORY_COST (a
);
1848 /* Create allocnos corresponding to pseudo-registers in the current
1849 function. Traverse the loop tree for this. */
1851 create_allocnos (void)
1853 /* We need to process BB first to correctly link allocnos by member
1854 next_regno_allocno. */
1855 ira_traverse_loop_tree (true, ira_loop_tree_root
,
1856 create_loop_tree_node_allocnos
, NULL
);
1858 ira_traverse_loop_tree (false, ira_loop_tree_root
, NULL
,
1859 propagate_modified_regnos
);
1864 /* The page contains function to remove some regions from a separate
1865 register allocation. We remove regions whose separate allocation
1866 will hardly improve the result. As a result we speed up regional
1867 register allocation. */
1869 /* The function changes the object in range list given by R to OBJ. */
1871 change_object_in_range_list (live_range_t r
, ira_object_t obj
)
1873 for (; r
!= NULL
; r
= r
->next
)
1877 /* Move all live ranges associated with allocno FROM to allocno TO. */
1879 move_allocno_live_ranges (ira_allocno_t from
, ira_allocno_t to
)
1882 int n
= ALLOCNO_NUM_OBJECTS (from
);
1884 gcc_assert (n
== ALLOCNO_NUM_OBJECTS (to
));
1886 for (i
= 0; i
< n
; i
++)
1888 ira_object_t from_obj
= ALLOCNO_OBJECT (from
, i
);
1889 ira_object_t to_obj
= ALLOCNO_OBJECT (to
, i
);
1890 live_range_t lr
= OBJECT_LIVE_RANGES (from_obj
);
1892 if (internal_flag_ira_verbose
> 4 && ira_dump_file
!= NULL
)
1894 fprintf (ira_dump_file
,
1895 " Moving ranges of a%dr%d to a%dr%d: ",
1896 ALLOCNO_NUM (from
), ALLOCNO_REGNO (from
),
1897 ALLOCNO_NUM (to
), ALLOCNO_REGNO (to
));
1898 ira_print_live_range_list (ira_dump_file
, lr
);
1900 change_object_in_range_list (lr
, to_obj
);
1901 OBJECT_LIVE_RANGES (to_obj
)
1902 = ira_merge_live_ranges (lr
, OBJECT_LIVE_RANGES (to_obj
));
1903 OBJECT_LIVE_RANGES (from_obj
) = NULL
;
1908 copy_allocno_live_ranges (ira_allocno_t from
, ira_allocno_t to
)
1911 int n
= ALLOCNO_NUM_OBJECTS (from
);
1913 gcc_assert (n
== ALLOCNO_NUM_OBJECTS (to
));
1915 for (i
= 0; i
< n
; i
++)
1917 ira_object_t from_obj
= ALLOCNO_OBJECT (from
, i
);
1918 ira_object_t to_obj
= ALLOCNO_OBJECT (to
, i
);
1919 live_range_t lr
= OBJECT_LIVE_RANGES (from_obj
);
1921 if (internal_flag_ira_verbose
> 4 && ira_dump_file
!= NULL
)
1923 fprintf (ira_dump_file
, " Copying ranges of a%dr%d to a%dr%d: ",
1924 ALLOCNO_NUM (from
), ALLOCNO_REGNO (from
),
1925 ALLOCNO_NUM (to
), ALLOCNO_REGNO (to
));
1926 ira_print_live_range_list (ira_dump_file
, lr
);
1928 lr
= ira_copy_live_range_list (lr
);
1929 change_object_in_range_list (lr
, to_obj
);
1930 OBJECT_LIVE_RANGES (to_obj
)
1931 = ira_merge_live_ranges (lr
, OBJECT_LIVE_RANGES (to_obj
));
1935 /* Return TRUE if NODE represents a loop with low register
1938 low_pressure_loop_node_p (ira_loop_tree_node_t node
)
1941 enum reg_class pclass
;
1943 if (node
->bb
!= NULL
)
1946 for (i
= 0; i
< ira_pressure_classes_num
; i
++)
1948 pclass
= ira_pressure_classes
[i
];
1949 if (node
->reg_pressure
[pclass
] > ira_class_hard_regs_num
[pclass
]
1950 && ira_class_hard_regs_num
[pclass
] > 1)
1957 /* Return TRUE if LOOP has a complex enter or exit edge. We don't
1958 form a region from such loop if the target use stack register
1959 because reg-stack.c can not deal with such edges. */
1961 loop_with_complex_edge_p (struct loop
*loop
)
1966 VEC (edge
, heap
) *edges
;
1969 FOR_EACH_EDGE (e
, ei
, loop
->header
->preds
)
1970 if (e
->flags
& EDGE_EH
)
1972 edges
= get_loop_exit_edges (loop
);
1974 FOR_EACH_VEC_ELT (edge
, edges
, i
, e
)
1975 if (e
->flags
& EDGE_COMPLEX
)
1980 VEC_free (edge
, heap
, edges
);
1985 /* Sort loops for marking them for removal. We put already marked
1986 loops first, then less frequent loops next, and then outer loops
1989 loop_compare_func (const void *v1p
, const void *v2p
)
1992 ira_loop_tree_node_t l1
= *(const ira_loop_tree_node_t
*) v1p
;
1993 ira_loop_tree_node_t l2
= *(const ira_loop_tree_node_t
*) v2p
;
1995 ira_assert (l1
->parent
!= NULL
&& l2
->parent
!= NULL
);
1996 if (l1
->to_remove_p
&& ! l2
->to_remove_p
)
1998 if (! l1
->to_remove_p
&& l2
->to_remove_p
)
2000 if ((diff
= l1
->loop
->header
->frequency
- l2
->loop
->header
->frequency
) != 0)
2002 if ((diff
= (int) loop_depth (l1
->loop
) - (int) loop_depth (l2
->loop
)) != 0)
2004 /* Make sorting stable. */
2005 return l1
->loop_num
- l2
->loop_num
;
2008 /* Mark loops which should be removed from regional allocation. We
2009 remove a loop with low register pressure inside another loop with
2010 register pressure. In this case a separate allocation of the loop
2011 hardly helps (for irregular register file architecture it could
2012 help by choosing a better hard register in the loop but we prefer
2013 faster allocation even in this case). We also remove cheap loops
2014 if there are more than IRA_MAX_LOOPS_NUM of them. Loop with EH
2015 exit or enter edges are removed too because the allocation might
2016 require put pseudo moves on the EH edges (we could still do this
2017 for pseudos with caller saved hard registers in some cases but it
2018 is impossible to say here or during top-down allocation pass what
2019 hard register the pseudos get finally). */
2021 mark_loops_for_removal (void)
2024 ira_loop_tree_node_t
*sorted_loops
;
2027 ira_assert (current_loops
!= NULL
);
2029 = (ira_loop_tree_node_t
*) ira_allocate (sizeof (ira_loop_tree_node_t
)
2030 * VEC_length (loop_p
,
2032 for (n
= i
= 0; VEC_iterate (loop_p
, ira_loops
.larray
, i
, loop
); i
++)
2033 if (ira_loop_nodes
[i
].regno_allocno_map
!= NULL
)
2035 if (ira_loop_nodes
[i
].parent
== NULL
)
2037 /* Don't remove the root. */
2038 ira_loop_nodes
[i
].to_remove_p
= false;
2041 sorted_loops
[n
++] = &ira_loop_nodes
[i
];
2042 ira_loop_nodes
[i
].to_remove_p
2043 = ((low_pressure_loop_node_p (ira_loop_nodes
[i
].parent
)
2044 && low_pressure_loop_node_p (&ira_loop_nodes
[i
]))
2046 || loop_with_complex_edge_p (ira_loop_nodes
[i
].loop
)
2050 qsort (sorted_loops
, n
, sizeof (ira_loop_tree_node_t
), loop_compare_func
);
2051 for (i
= 0; n
- i
+ 1 > IRA_MAX_LOOPS_NUM
; i
++)
2053 sorted_loops
[i
]->to_remove_p
= true;
2054 if (internal_flag_ira_verbose
> 1 && ira_dump_file
!= NULL
)
2057 " Mark loop %d (header %d, freq %d, depth %d) for removal (%s)\n",
2058 sorted_loops
[i
]->loop_num
, sorted_loops
[i
]->loop
->header
->index
,
2059 sorted_loops
[i
]->loop
->header
->frequency
,
2060 loop_depth (sorted_loops
[i
]->loop
),
2061 low_pressure_loop_node_p (sorted_loops
[i
]->parent
)
2062 && low_pressure_loop_node_p (sorted_loops
[i
])
2063 ? "low pressure" : "cheap loop");
2065 ira_free (sorted_loops
);
2068 /* Mark all loops but root for removing. */
2070 mark_all_loops_for_removal (void)
2075 ira_assert (current_loops
!= NULL
);
2076 FOR_EACH_VEC_ELT (loop_p
, ira_loops
.larray
, i
, loop
)
2077 if (ira_loop_nodes
[i
].regno_allocno_map
!= NULL
)
2079 if (ira_loop_nodes
[i
].parent
== NULL
)
2081 /* Don't remove the root. */
2082 ira_loop_nodes
[i
].to_remove_p
= false;
2085 ira_loop_nodes
[i
].to_remove_p
= true;
2086 if (internal_flag_ira_verbose
> 1 && ira_dump_file
!= NULL
)
2089 " Mark loop %d (header %d, freq %d, depth %d) for removal\n",
2090 ira_loop_nodes
[i
].loop_num
,
2091 ira_loop_nodes
[i
].loop
->header
->index
,
2092 ira_loop_nodes
[i
].loop
->header
->frequency
,
2093 loop_depth (ira_loop_nodes
[i
].loop
));
2097 /* Definition of vector of loop tree nodes. */
2098 DEF_VEC_P(ira_loop_tree_node_t
);
2099 DEF_VEC_ALLOC_P(ira_loop_tree_node_t
, heap
);
2101 /* Vec containing references to all removed loop tree nodes. */
2102 static VEC(ira_loop_tree_node_t
,heap
) *removed_loop_vec
;
2104 /* Vec containing references to all children of loop tree nodes. */
2105 static VEC(ira_loop_tree_node_t
,heap
) *children_vec
;
2107 /* Remove subregions of NODE if their separate allocation will not
2108 improve the result. */
2110 remove_uneccesary_loop_nodes_from_loop_tree (ira_loop_tree_node_t node
)
2114 ira_loop_tree_node_t subnode
;
2116 remove_p
= node
->to_remove_p
;
2118 VEC_safe_push (ira_loop_tree_node_t
, heap
, children_vec
, node
);
2119 start
= VEC_length (ira_loop_tree_node_t
, children_vec
);
2120 for (subnode
= node
->children
; subnode
!= NULL
; subnode
= subnode
->next
)
2121 if (subnode
->bb
== NULL
)
2122 remove_uneccesary_loop_nodes_from_loop_tree (subnode
);
2124 VEC_safe_push (ira_loop_tree_node_t
, heap
, children_vec
, subnode
);
2125 node
->children
= node
->subloops
= NULL
;
2128 VEC_safe_push (ira_loop_tree_node_t
, heap
, removed_loop_vec
, node
);
2131 while (VEC_length (ira_loop_tree_node_t
, children_vec
) > start
)
2133 subnode
= VEC_pop (ira_loop_tree_node_t
, children_vec
);
2134 subnode
->parent
= node
;
2135 subnode
->next
= node
->children
;
2136 node
->children
= subnode
;
2137 if (subnode
->bb
== NULL
)
2139 subnode
->subloop_next
= node
->subloops
;
2140 node
->subloops
= subnode
;
2145 /* Return TRUE if NODE is inside PARENT. */
2147 loop_is_inside_p (ira_loop_tree_node_t node
, ira_loop_tree_node_t parent
)
2149 for (node
= node
->parent
; node
!= NULL
; node
= node
->parent
)
2155 /* Sort allocnos according to their order in regno allocno list. */
2157 regno_allocno_order_compare_func (const void *v1p
, const void *v2p
)
2159 ira_allocno_t a1
= *(const ira_allocno_t
*) v1p
;
2160 ira_allocno_t a2
= *(const ira_allocno_t
*) v2p
;
2161 ira_loop_tree_node_t n1
= ALLOCNO_LOOP_TREE_NODE (a1
);
2162 ira_loop_tree_node_t n2
= ALLOCNO_LOOP_TREE_NODE (a2
);
2164 if (loop_is_inside_p (n1
, n2
))
2166 else if (loop_is_inside_p (n2
, n1
))
2168 /* If allocnos are equally good, sort by allocno numbers, so that
2169 the results of qsort leave nothing to chance. We put allocnos
2170 with higher number first in the list because it is the original
2171 order for allocnos from loops on the same levels. */
2172 return ALLOCNO_NUM (a2
) - ALLOCNO_NUM (a1
);
2175 /* This array is used to sort allocnos to restore allocno order in
2176 the regno allocno list. */
2177 static ira_allocno_t
*regno_allocnos
;
2179 /* Restore allocno order for REGNO in the regno allocno list. */
2181 ira_rebuild_regno_allocno_list (int regno
)
2186 for (n
= 0, a
= ira_regno_allocno_map
[regno
];
2188 a
= ALLOCNO_NEXT_REGNO_ALLOCNO (a
))
2189 regno_allocnos
[n
++] = a
;
2191 qsort (regno_allocnos
, n
, sizeof (ira_allocno_t
),
2192 regno_allocno_order_compare_func
);
2193 for (i
= 1; i
< n
; i
++)
2194 ALLOCNO_NEXT_REGNO_ALLOCNO (regno_allocnos
[i
- 1]) = regno_allocnos
[i
];
2195 ALLOCNO_NEXT_REGNO_ALLOCNO (regno_allocnos
[n
- 1]) = NULL
;
2196 ira_regno_allocno_map
[regno
] = regno_allocnos
[0];
2197 if (internal_flag_ira_verbose
> 1 && ira_dump_file
!= NULL
)
2198 fprintf (ira_dump_file
, " Rebuilding regno allocno list for %d\n", regno
);
2201 /* Propagate info from allocno FROM_A to allocno A. */
2203 propagate_some_info_from_allocno (ira_allocno_t a
, ira_allocno_t from_a
)
2205 enum reg_class aclass
;
2207 merge_hard_reg_conflicts (from_a
, a
, false);
2208 ALLOCNO_NREFS (a
) += ALLOCNO_NREFS (from_a
);
2209 ALLOCNO_FREQ (a
) += ALLOCNO_FREQ (from_a
);
2210 ALLOCNO_CALL_FREQ (a
) += ALLOCNO_CALL_FREQ (from_a
);
2211 ALLOCNO_CALLS_CROSSED_NUM (a
) += ALLOCNO_CALLS_CROSSED_NUM (from_a
);
2212 ALLOCNO_CHEAP_CALLS_CROSSED_NUM (a
)
2213 += ALLOCNO_CHEAP_CALLS_CROSSED_NUM (from_a
);
2214 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a
)
2215 += ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (from_a
);
2216 if (! ALLOCNO_BAD_SPILL_P (from_a
))
2217 ALLOCNO_BAD_SPILL_P (a
) = false;
2218 aclass
= ALLOCNO_CLASS (from_a
);
2219 ira_assert (aclass
== ALLOCNO_CLASS (a
));
2220 ira_allocate_and_accumulate_costs (&ALLOCNO_HARD_REG_COSTS (a
), aclass
,
2221 ALLOCNO_HARD_REG_COSTS (from_a
));
2222 ira_allocate_and_accumulate_costs (&ALLOCNO_CONFLICT_HARD_REG_COSTS (a
),
2224 ALLOCNO_CONFLICT_HARD_REG_COSTS (from_a
));
2225 ALLOCNO_CLASS_COST (a
) += ALLOCNO_CLASS_COST (from_a
);
2226 ALLOCNO_MEMORY_COST (a
) += ALLOCNO_MEMORY_COST (from_a
);
2229 /* Remove allocnos from loops removed from the allocation
2232 remove_unnecessary_allocnos (void)
2235 bool merged_p
, rebuild_p
;
2236 ira_allocno_t a
, prev_a
, next_a
, parent_a
;
2237 ira_loop_tree_node_t a_node
, parent
;
2240 regno_allocnos
= NULL
;
2241 for (regno
= max_reg_num () - 1; regno
>= FIRST_PSEUDO_REGISTER
; regno
--)
2244 for (prev_a
= NULL
, a
= ira_regno_allocno_map
[regno
];
2248 next_a
= ALLOCNO_NEXT_REGNO_ALLOCNO (a
);
2249 a_node
= ALLOCNO_LOOP_TREE_NODE (a
);
2250 if (! a_node
->to_remove_p
)
2254 for (parent
= a_node
->parent
;
2255 (parent_a
= parent
->regno_allocno_map
[regno
]) == NULL
2256 && parent
->to_remove_p
;
2257 parent
= parent
->parent
)
2259 if (parent_a
== NULL
)
2261 /* There are no allocnos with the same regno in
2262 upper region -- just move the allocno to the
2265 ALLOCNO_LOOP_TREE_NODE (a
) = parent
;
2266 parent
->regno_allocno_map
[regno
] = a
;
2267 bitmap_set_bit (parent
->all_allocnos
, ALLOCNO_NUM (a
));
2272 /* Remove the allocno and update info of allocno in
2273 the upper region. */
2275 ira_regno_allocno_map
[regno
] = next_a
;
2277 ALLOCNO_NEXT_REGNO_ALLOCNO (prev_a
) = next_a
;
2278 move_allocno_live_ranges (a
, parent_a
);
2280 propagate_some_info_from_allocno (parent_a
, a
);
2281 /* Remove it from the corresponding regno allocno
2282 map to avoid info propagation of subsequent
2283 allocno into this already removed allocno. */
2284 a_node
->regno_allocno_map
[regno
] = NULL
;
2290 /* We need to restore the order in regno allocno list. */
2292 if (regno_allocnos
== NULL
)
2294 = (ira_allocno_t
*) ira_allocate (sizeof (ira_allocno_t
)
2295 * ira_allocnos_num
);
2296 ira_rebuild_regno_allocno_list (regno
);
2300 ira_rebuild_start_finish_chains ();
2301 if (regno_allocnos
!= NULL
)
2302 ira_free (regno_allocnos
);
2305 /* Remove allocnos from all loops but the root. */
2307 remove_low_level_allocnos (void)
2310 bool merged_p
, propagate_p
;
2311 ira_allocno_t a
, top_a
;
2312 ira_loop_tree_node_t a_node
, parent
;
2313 ira_allocno_iterator ai
;
2316 FOR_EACH_ALLOCNO (a
, ai
)
2318 a_node
= ALLOCNO_LOOP_TREE_NODE (a
);
2319 if (a_node
== ira_loop_tree_root
|| ALLOCNO_CAP_MEMBER (a
) != NULL
)
2321 regno
= ALLOCNO_REGNO (a
);
2322 if ((top_a
= ira_loop_tree_root
->regno_allocno_map
[regno
]) == NULL
)
2324 ALLOCNO_LOOP_TREE_NODE (a
) = ira_loop_tree_root
;
2325 ira_loop_tree_root
->regno_allocno_map
[regno
] = a
;
2328 propagate_p
= a_node
->parent
->regno_allocno_map
[regno
] == NULL
;
2329 /* Remove the allocno and update info of allocno in the upper
2331 move_allocno_live_ranges (a
, top_a
);
2334 propagate_some_info_from_allocno (top_a
, a
);
2336 FOR_EACH_ALLOCNO (a
, ai
)
2338 a_node
= ALLOCNO_LOOP_TREE_NODE (a
);
2339 if (a_node
== ira_loop_tree_root
)
2341 parent
= a_node
->parent
;
2342 regno
= ALLOCNO_REGNO (a
);
2343 if (ALLOCNO_CAP_MEMBER (a
) != NULL
)
2344 ira_assert (ALLOCNO_CAP (a
) != NULL
);
2345 else if (ALLOCNO_CAP (a
) == NULL
)
2346 ira_assert (parent
->regno_allocno_map
[regno
] != NULL
);
2348 FOR_EACH_ALLOCNO (a
, ai
)
2350 regno
= ALLOCNO_REGNO (a
);
2351 if (ira_loop_tree_root
->regno_allocno_map
[regno
] == a
)
2354 ira_allocno_object_iterator oi
;
2356 ira_regno_allocno_map
[regno
] = a
;
2357 ALLOCNO_NEXT_REGNO_ALLOCNO (a
) = NULL
;
2358 ALLOCNO_CAP_MEMBER (a
) = NULL
;
2359 FOR_EACH_ALLOCNO_OBJECT (a
, obj
, oi
)
2360 COPY_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj
),
2361 OBJECT_TOTAL_CONFLICT_HARD_REGS (obj
));
2363 if (ALLOCNO_TOTAL_NO_STACK_REG_P (a
))
2364 ALLOCNO_NO_STACK_REG_P (a
) = true;
2371 ira_rebuild_start_finish_chains ();
2374 /* Remove loops from consideration. We remove all loops except for
2375 root if ALL_P or loops for which a separate allocation will not
2376 improve the result. We have to do this after allocno creation and
2377 their costs and allocno class evaluation because only after that
2378 the register pressure can be known and is calculated. */
2380 remove_unnecessary_regions (bool all_p
)
2382 if (current_loops
== NULL
)
2385 mark_all_loops_for_removal ();
2387 mark_loops_for_removal ();
2389 = VEC_alloc (ira_loop_tree_node_t
, heap
,
2390 last_basic_block
+ VEC_length (loop_p
, ira_loops
.larray
));
2392 = VEC_alloc (ira_loop_tree_node_t
, heap
,
2393 last_basic_block
+ VEC_length (loop_p
, ira_loops
.larray
));
2394 remove_uneccesary_loop_nodes_from_loop_tree (ira_loop_tree_root
) ;
2395 VEC_free (ira_loop_tree_node_t
, heap
, children_vec
);
2397 remove_low_level_allocnos ();
2399 remove_unnecessary_allocnos ();
2400 while (VEC_length (ira_loop_tree_node_t
, removed_loop_vec
) > 0)
2401 finish_loop_tree_node (VEC_pop (ira_loop_tree_node_t
, removed_loop_vec
));
2402 VEC_free (ira_loop_tree_node_t
, heap
, removed_loop_vec
);
2407 /* At this point true value of allocno attribute bad_spill_p means
2408 that there is an insn where allocno occurs and where the allocno
2409 can not be used as memory. The function updates the attribute, now
2410 it can be true only for allocnos which can not be used as memory in
2411 an insn and in whose live ranges there is other allocno deaths.
2412 Spilling allocnos with true value will not improve the code because
2413 it will not make other allocnos colorable and additional reloads
2414 for the corresponding pseudo will be generated in reload pass for
2415 each insn it occurs.
2417 This is a trick mentioned in one classic article of Chaitin etc
2418 which is frequently omitted in other implementations of RA based on
2421 update_bad_spill_attribute (void)
2425 ira_allocno_iterator ai
;
2426 ira_allocno_object_iterator aoi
;
2429 enum reg_class aclass
;
2430 bitmap_head dead_points
[N_REG_CLASSES
];
2432 for (i
= 0; i
< ira_allocno_classes_num
; i
++)
2434 aclass
= ira_allocno_classes
[i
];
2435 bitmap_initialize (&dead_points
[aclass
], ®_obstack
);
2437 FOR_EACH_ALLOCNO (a
, ai
)
2439 aclass
= ALLOCNO_CLASS (a
);
2440 if (aclass
== NO_REGS
)
2442 FOR_EACH_ALLOCNO_OBJECT (a
, obj
, aoi
)
2443 for (r
= OBJECT_LIVE_RANGES (obj
); r
!= NULL
; r
= r
->next
)
2444 bitmap_set_bit (&dead_points
[aclass
], r
->finish
);
2446 FOR_EACH_ALLOCNO (a
, ai
)
2448 aclass
= ALLOCNO_CLASS (a
);
2449 if (aclass
== NO_REGS
)
2451 if (! ALLOCNO_BAD_SPILL_P (a
))
2453 FOR_EACH_ALLOCNO_OBJECT (a
, obj
, aoi
)
2455 for (r
= OBJECT_LIVE_RANGES (obj
); r
!= NULL
; r
= r
->next
)
2457 for (i
= r
->start
+ 1; i
< r
->finish
; i
++)
2458 if (bitmap_bit_p (&dead_points
[aclass
], i
))
2465 ALLOCNO_BAD_SPILL_P (a
) = false;
2470 for (i
= 0; i
< ira_allocno_classes_num
; i
++)
2472 aclass
= ira_allocno_classes
[i
];
2473 bitmap_clear (&dead_points
[aclass
]);
2479 /* Set up minimal and maximal live range points for allocnos. */
2481 setup_min_max_allocno_live_range_point (void)
2484 ira_allocno_t a
, parent_a
, cap
;
2485 ira_allocno_iterator ai
;
2486 #ifdef ENABLE_IRA_CHECKING
2487 ira_object_iterator oi
;
2491 ira_loop_tree_node_t parent
;
2493 FOR_EACH_ALLOCNO (a
, ai
)
2495 int n
= ALLOCNO_NUM_OBJECTS (a
);
2497 for (i
= 0; i
< n
; i
++)
2499 ira_object_t obj
= ALLOCNO_OBJECT (a
, i
);
2500 r
= OBJECT_LIVE_RANGES (obj
);
2503 OBJECT_MAX (obj
) = r
->finish
;
2504 for (; r
->next
!= NULL
; r
= r
->next
)
2506 OBJECT_MIN (obj
) = r
->start
;
2509 for (i
= max_reg_num () - 1; i
>= FIRST_PSEUDO_REGISTER
; i
--)
2510 for (a
= ira_regno_allocno_map
[i
];
2512 a
= ALLOCNO_NEXT_REGNO_ALLOCNO (a
))
2515 int n
= ALLOCNO_NUM_OBJECTS (a
);
2517 for (j
= 0; j
< n
; j
++)
2519 ira_object_t obj
= ALLOCNO_OBJECT (a
, j
);
2520 ira_object_t parent_obj
;
2522 if (OBJECT_MAX (obj
) < 0)
2524 ira_assert (ALLOCNO_CAP_MEMBER (a
) == NULL
);
2525 /* Accumulation of range info. */
2526 if (ALLOCNO_CAP (a
) != NULL
)
2528 for (cap
= ALLOCNO_CAP (a
); cap
!= NULL
; cap
= ALLOCNO_CAP (cap
))
2530 ira_object_t cap_obj
= ALLOCNO_OBJECT (cap
, j
);
2531 if (OBJECT_MAX (cap_obj
) < OBJECT_MAX (obj
))
2532 OBJECT_MAX (cap_obj
) = OBJECT_MAX (obj
);
2533 if (OBJECT_MIN (cap_obj
) > OBJECT_MIN (obj
))
2534 OBJECT_MIN (cap_obj
) = OBJECT_MIN (obj
);
2538 if ((parent
= ALLOCNO_LOOP_TREE_NODE (a
)->parent
) == NULL
)
2540 parent_a
= parent
->regno_allocno_map
[i
];
2541 parent_obj
= ALLOCNO_OBJECT (parent_a
, j
);
2542 if (OBJECT_MAX (parent_obj
) < OBJECT_MAX (obj
))
2543 OBJECT_MAX (parent_obj
) = OBJECT_MAX (obj
);
2544 if (OBJECT_MIN (parent_obj
) > OBJECT_MIN (obj
))
2545 OBJECT_MIN (parent_obj
) = OBJECT_MIN (obj
);
2548 #ifdef ENABLE_IRA_CHECKING
2549 FOR_EACH_OBJECT (obj
, oi
)
2551 if ((0 <= OBJECT_MIN (obj
) && OBJECT_MIN (obj
) <= ira_max_point
)
2552 && (0 <= OBJECT_MAX (obj
) && OBJECT_MAX (obj
) <= ira_max_point
))
2559 /* Sort allocnos according to their live ranges. Allocnos with
2560 smaller allocno class are put first unless we use priority
2561 coloring. Allocnos with the same class are ordered according
2562 their start (min). Allocnos with the same start are ordered
2563 according their finish (max). */
2565 object_range_compare_func (const void *v1p
, const void *v2p
)
2568 ira_object_t obj1
= *(const ira_object_t
*) v1p
;
2569 ira_object_t obj2
= *(const ira_object_t
*) v2p
;
2570 ira_allocno_t a1
= OBJECT_ALLOCNO (obj1
);
2571 ira_allocno_t a2
= OBJECT_ALLOCNO (obj2
);
2573 if ((diff
= OBJECT_MIN (obj1
) - OBJECT_MIN (obj2
)) != 0)
2575 if ((diff
= OBJECT_MAX (obj1
) - OBJECT_MAX (obj2
)) != 0)
2577 return ALLOCNO_NUM (a1
) - ALLOCNO_NUM (a2
);
2580 /* Sort ira_object_id_map and set up conflict id of allocnos. */
2582 sort_conflict_id_map (void)
2586 ira_allocno_iterator ai
;
2589 FOR_EACH_ALLOCNO (a
, ai
)
2591 ira_allocno_object_iterator oi
;
2594 FOR_EACH_ALLOCNO_OBJECT (a
, obj
, oi
)
2595 ira_object_id_map
[num
++] = obj
;
2597 qsort (ira_object_id_map
, num
, sizeof (ira_object_t
),
2598 object_range_compare_func
);
2599 for (i
= 0; i
< num
; i
++)
2601 ira_object_t obj
= ira_object_id_map
[i
];
2603 gcc_assert (obj
!= NULL
);
2604 OBJECT_CONFLICT_ID (obj
) = i
;
2606 for (i
= num
; i
< ira_objects_num
; i
++)
2607 ira_object_id_map
[i
] = NULL
;
2610 /* Set up minimal and maximal conflict ids of allocnos with which
2611 given allocno can conflict. */
2613 setup_min_max_conflict_allocno_ids (void)
2616 int i
, j
, min
, max
, start
, finish
, first_not_finished
, filled_area_start
;
2617 int *live_range_min
, *last_lived
;
2618 int word0_min
, word0_max
;
2620 ira_allocno_iterator ai
;
2622 live_range_min
= (int *) ira_allocate (sizeof (int) * ira_objects_num
);
2624 first_not_finished
= -1;
2625 for (i
= 0; i
< ira_objects_num
; i
++)
2627 ira_object_t obj
= ira_object_id_map
[i
];
2632 a
= OBJECT_ALLOCNO (obj
);
2636 aclass
= ALLOCNO_CLASS (a
);
2638 first_not_finished
= i
;
2642 start
= OBJECT_MIN (obj
);
2643 /* If we skip an allocno, the allocno with smaller ids will
2644 be also skipped because of the secondary sorting the
2645 range finishes (see function
2646 object_range_compare_func). */
2647 while (first_not_finished
< i
2648 && start
> OBJECT_MAX (ira_object_id_map
2649 [first_not_finished
]))
2650 first_not_finished
++;
2651 min
= first_not_finished
;
2654 /* We could increase min further in this case but it is good
2657 live_range_min
[i
] = OBJECT_MIN (obj
);
2658 OBJECT_MIN (obj
) = min
;
2660 last_lived
= (int *) ira_allocate (sizeof (int) * ira_max_point
);
2662 filled_area_start
= -1;
2663 for (i
= ira_objects_num
- 1; i
>= 0; i
--)
2665 ira_object_t obj
= ira_object_id_map
[i
];
2670 a
= OBJECT_ALLOCNO (obj
);
2673 aclass
= ALLOCNO_CLASS (a
);
2674 for (j
= 0; j
< ira_max_point
; j
++)
2676 filled_area_start
= ira_max_point
;
2678 min
= live_range_min
[i
];
2679 finish
= OBJECT_MAX (obj
);
2680 max
= last_lived
[finish
];
2682 /* We could decrease max further in this case but it is good
2684 max
= OBJECT_CONFLICT_ID (obj
) - 1;
2685 OBJECT_MAX (obj
) = max
;
2686 /* In filling, we can go further A range finish to recognize
2687 intersection quickly because if the finish of subsequently
2688 processed allocno (it has smaller conflict id) range is
2689 further A range finish than they are definitely intersected
2690 (the reason for this is the allocnos with bigger conflict id
2691 have their range starts not smaller than allocnos with
2693 for (j
= min
; j
< filled_area_start
; j
++)
2695 filled_area_start
= min
;
2697 ira_free (last_lived
);
2698 ira_free (live_range_min
);
2700 /* For allocnos with more than one object, we may later record extra conflicts in
2701 subobject 0 that we cannot really know about here.
2702 For now, simply widen the min/max range of these subobjects. */
2704 word0_min
= INT_MAX
;
2705 word0_max
= INT_MIN
;
2707 FOR_EACH_ALLOCNO (a
, ai
)
2709 int n
= ALLOCNO_NUM_OBJECTS (a
);
2714 obj0
= ALLOCNO_OBJECT (a
, 0);
2715 if (OBJECT_CONFLICT_ID (obj0
) < word0_min
)
2716 word0_min
= OBJECT_CONFLICT_ID (obj0
);
2717 if (OBJECT_CONFLICT_ID (obj0
) > word0_max
)
2718 word0_max
= OBJECT_CONFLICT_ID (obj0
);
2720 FOR_EACH_ALLOCNO (a
, ai
)
2722 int n
= ALLOCNO_NUM_OBJECTS (a
);
2727 obj0
= ALLOCNO_OBJECT (a
, 0);
2728 if (OBJECT_MIN (obj0
) > word0_min
)
2729 OBJECT_MIN (obj0
) = word0_min
;
2730 if (OBJECT_MAX (obj0
) < word0_max
)
2731 OBJECT_MAX (obj0
) = word0_max
;
2741 ira_allocno_iterator ai
;
2742 ira_loop_tree_node_t loop_tree_node
;
2744 FOR_EACH_ALLOCNO (a
, ai
)
2746 if (ALLOCNO_LOOP_TREE_NODE (a
) == ira_loop_tree_root
)
2748 if (ALLOCNO_CAP_MEMBER (a
) != NULL
)
2749 create_cap_allocno (a
);
2750 else if (ALLOCNO_CAP (a
) == NULL
)
2752 loop_tree_node
= ALLOCNO_LOOP_TREE_NODE (a
);
2753 if (!bitmap_bit_p (loop_tree_node
->border_allocnos
, ALLOCNO_NUM (a
)))
2754 create_cap_allocno (a
);
2761 /* The page contains code transforming more one region internal
2762 representation (IR) to one region IR which is necessary for reload.
2763 This transformation is called IR flattening. We might just rebuild
2764 the IR for one region but we don't do it because it takes a lot of
2767 /* Map: regno -> allocnos which will finally represent the regno for
2768 IR with one region. */
2769 static ira_allocno_t
*regno_top_level_allocno_map
;
2771 /* Find the allocno that corresponds to A at a level one higher up in the
2772 loop tree. Returns NULL if A is a cap, or if it has no parent. */
2774 ira_parent_allocno (ira_allocno_t a
)
2776 ira_loop_tree_node_t parent
;
2778 if (ALLOCNO_CAP (a
) != NULL
)
2781 parent
= ALLOCNO_LOOP_TREE_NODE (a
)->parent
;
2785 return parent
->regno_allocno_map
[ALLOCNO_REGNO (a
)];
2788 /* Find the allocno that corresponds to A at a level one higher up in the
2789 loop tree. If ALLOCNO_CAP is set for A, return that. */
2791 ira_parent_or_cap_allocno (ira_allocno_t a
)
2793 if (ALLOCNO_CAP (a
) != NULL
)
2794 return ALLOCNO_CAP (a
);
2796 return ira_parent_allocno (a
);
2799 /* Process all allocnos originated from pseudo REGNO and copy live
2800 ranges, hard reg conflicts, and allocno stack reg attributes from
2801 low level allocnos to final allocnos which are destinations of
2802 removed stores at a loop exit. Return true if we copied live
2805 copy_info_to_removed_store_destinations (int regno
)
2808 ira_allocno_t parent_a
= NULL
;
2809 ira_loop_tree_node_t parent
;
2813 for (a
= ira_regno_allocno_map
[regno
];
2815 a
= ALLOCNO_NEXT_REGNO_ALLOCNO (a
))
2817 if (a
!= regno_top_level_allocno_map
[REGNO (allocno_emit_reg (a
))])
2818 /* This allocno will be removed. */
2821 /* Caps will be removed. */
2822 ira_assert (ALLOCNO_CAP_MEMBER (a
) == NULL
);
2823 for (parent
= ALLOCNO_LOOP_TREE_NODE (a
)->parent
;
2825 parent
= parent
->parent
)
2826 if ((parent_a
= parent
->regno_allocno_map
[regno
]) == NULL
2828 == regno_top_level_allocno_map
[REGNO
2829 (allocno_emit_reg (parent_a
))]
2830 && ALLOCNO_EMIT_DATA (parent_a
)->mem_optimized_dest_p
))
2832 if (parent
== NULL
|| parent_a
== NULL
)
2835 copy_allocno_live_ranges (a
, parent_a
);
2836 merge_hard_reg_conflicts (a
, parent_a
, true);
2838 ALLOCNO_CALL_FREQ (parent_a
) += ALLOCNO_CALL_FREQ (a
);
2839 ALLOCNO_CALLS_CROSSED_NUM (parent_a
)
2840 += ALLOCNO_CALLS_CROSSED_NUM (a
);
2841 ALLOCNO_CHEAP_CALLS_CROSSED_NUM (parent_a
)
2842 += ALLOCNO_CHEAP_CALLS_CROSSED_NUM (a
);
2843 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (parent_a
)
2844 += ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a
);
2850 /* Flatten the IR. In other words, this function transforms IR as if
2851 it were built with one region (without loops). We could make it
2852 much simpler by rebuilding IR with one region, but unfortunately it
2853 takes a lot of time. MAX_REGNO_BEFORE_EMIT and
2854 IRA_MAX_POINT_BEFORE_EMIT are correspondingly MAX_REG_NUM () and
2855 IRA_MAX_POINT before emitting insns on the loop borders. */
2857 ira_flattening (int max_regno_before_emit
, int ira_max_point_before_emit
)
2862 bool new_pseudos_p
, merged_p
, mem_dest_p
;
2864 enum reg_class aclass
;
2865 ira_allocno_t a
, parent_a
, first
, second
, node_first
, node_second
;
2867 ira_loop_tree_node_t node
;
2869 ira_allocno_iterator ai
;
2870 ira_copy_iterator ci
;
2872 regno_top_level_allocno_map
2873 = (ira_allocno_t
*) ira_allocate (max_reg_num ()
2874 * sizeof (ira_allocno_t
));
2875 memset (regno_top_level_allocno_map
, 0,
2876 max_reg_num () * sizeof (ira_allocno_t
));
2877 new_pseudos_p
= merged_p
= false;
2878 FOR_EACH_ALLOCNO (a
, ai
)
2880 ira_allocno_object_iterator oi
;
2883 if (ALLOCNO_CAP_MEMBER (a
) != NULL
)
2884 /* Caps are not in the regno allocno maps and they are never
2885 will be transformed into allocnos existing after IR
2888 FOR_EACH_ALLOCNO_OBJECT (a
, obj
, oi
)
2889 COPY_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj
),
2890 OBJECT_CONFLICT_HARD_REGS (obj
));
2892 ALLOCNO_TOTAL_NO_STACK_REG_P (a
) = ALLOCNO_NO_STACK_REG_P (a
);
2895 /* Fix final allocno attributes. */
2896 for (i
= max_regno_before_emit
- 1; i
>= FIRST_PSEUDO_REGISTER
; i
--)
2899 for (a
= ira_regno_allocno_map
[i
];
2901 a
= ALLOCNO_NEXT_REGNO_ALLOCNO (a
))
2903 ira_emit_data_t parent_data
, data
= ALLOCNO_EMIT_DATA (a
);
2905 ira_assert (ALLOCNO_CAP_MEMBER (a
) == NULL
);
2906 if (data
->somewhere_renamed_p
)
2907 new_pseudos_p
= true;
2908 parent_a
= ira_parent_allocno (a
);
2909 if (parent_a
== NULL
)
2911 ALLOCNO_COPIES (a
) = NULL
;
2912 regno_top_level_allocno_map
[REGNO (data
->reg
)] = a
;
2915 ira_assert (ALLOCNO_CAP_MEMBER (parent_a
) == NULL
);
2917 if (data
->mem_optimized_dest
!= NULL
)
2919 parent_data
= ALLOCNO_EMIT_DATA (parent_a
);
2920 if (REGNO (data
->reg
) == REGNO (parent_data
->reg
))
2922 merge_hard_reg_conflicts (a
, parent_a
, true);
2923 move_allocno_live_ranges (a
, parent_a
);
2925 parent_data
->mem_optimized_dest_p
2926 = (parent_data
->mem_optimized_dest_p
2927 || data
->mem_optimized_dest_p
);
2930 new_pseudos_p
= true;
2933 ALLOCNO_NREFS (parent_a
) -= ALLOCNO_NREFS (a
);
2934 ALLOCNO_FREQ (parent_a
) -= ALLOCNO_FREQ (a
);
2935 ALLOCNO_CALL_FREQ (parent_a
) -= ALLOCNO_CALL_FREQ (a
);
2936 ALLOCNO_CALLS_CROSSED_NUM (parent_a
)
2937 -= ALLOCNO_CALLS_CROSSED_NUM (a
);
2938 ALLOCNO_CHEAP_CALLS_CROSSED_NUM (parent_a
)
2939 -= ALLOCNO_CHEAP_CALLS_CROSSED_NUM (a
);
2940 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (parent_a
)
2941 -= ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a
);
2942 ira_assert (ALLOCNO_CALLS_CROSSED_NUM (parent_a
) >= 0
2943 && ALLOCNO_NREFS (parent_a
) >= 0
2944 && ALLOCNO_FREQ (parent_a
) >= 0);
2945 aclass
= ALLOCNO_CLASS (parent_a
);
2946 hard_regs_num
= ira_class_hard_regs_num
[aclass
];
2947 if (ALLOCNO_HARD_REG_COSTS (a
) != NULL
2948 && ALLOCNO_HARD_REG_COSTS (parent_a
) != NULL
)
2949 for (j
= 0; j
< hard_regs_num
; j
++)
2950 ALLOCNO_HARD_REG_COSTS (parent_a
)[j
]
2951 -= ALLOCNO_HARD_REG_COSTS (a
)[j
];
2952 if (ALLOCNO_CONFLICT_HARD_REG_COSTS (a
) != NULL
2953 && ALLOCNO_CONFLICT_HARD_REG_COSTS (parent_a
) != NULL
)
2954 for (j
= 0; j
< hard_regs_num
; j
++)
2955 ALLOCNO_CONFLICT_HARD_REG_COSTS (parent_a
)[j
]
2956 -= ALLOCNO_CONFLICT_HARD_REG_COSTS (a
)[j
];
2957 ALLOCNO_CLASS_COST (parent_a
)
2958 -= ALLOCNO_CLASS_COST (a
);
2959 ALLOCNO_MEMORY_COST (parent_a
) -= ALLOCNO_MEMORY_COST (a
);
2960 parent_a
= ira_parent_allocno (parent_a
);
2961 if (parent_a
== NULL
)
2964 ALLOCNO_COPIES (a
) = NULL
;
2965 regno_top_level_allocno_map
[REGNO (data
->reg
)] = a
;
2967 if (mem_dest_p
&& copy_info_to_removed_store_destinations (i
))
2970 ira_assert (new_pseudos_p
|| ira_max_point_before_emit
== ira_max_point
);
2971 if (merged_p
|| ira_max_point_before_emit
!= ira_max_point
)
2972 ira_rebuild_start_finish_chains ();
2975 sparseset objects_live
;
2977 /* Rebuild conflicts. */
2978 FOR_EACH_ALLOCNO (a
, ai
)
2980 ira_allocno_object_iterator oi
;
2983 if (a
!= regno_top_level_allocno_map
[REGNO (allocno_emit_reg (a
))]
2984 || ALLOCNO_CAP_MEMBER (a
) != NULL
)
2986 FOR_EACH_ALLOCNO_OBJECT (a
, obj
, oi
)
2988 for (r
= OBJECT_LIVE_RANGES (obj
); r
!= NULL
; r
= r
->next
)
2989 ira_assert (r
->object
== obj
);
2990 clear_conflicts (obj
);
2993 objects_live
= sparseset_alloc (ira_objects_num
);
2994 for (i
= 0; i
< ira_max_point
; i
++)
2996 for (r
= ira_start_point_ranges
[i
]; r
!= NULL
; r
= r
->start_next
)
2998 ira_object_t obj
= r
->object
;
3000 a
= OBJECT_ALLOCNO (obj
);
3001 if (a
!= regno_top_level_allocno_map
[REGNO (allocno_emit_reg (a
))]
3002 || ALLOCNO_CAP_MEMBER (a
) != NULL
)
3005 aclass
= ALLOCNO_CLASS (a
);
3006 sparseset_set_bit (objects_live
, OBJECT_CONFLICT_ID (obj
));
3007 EXECUTE_IF_SET_IN_SPARSESET (objects_live
, n
)
3009 ira_object_t live_obj
= ira_object_id_map
[n
];
3010 ira_allocno_t live_a
= OBJECT_ALLOCNO (live_obj
);
3011 enum reg_class live_aclass
= ALLOCNO_CLASS (live_a
);
3013 if (ira_reg_classes_intersect_p
[aclass
][live_aclass
]
3014 /* Don't set up conflict for the allocno with itself. */
3016 ira_add_conflict (obj
, live_obj
);
3020 for (r
= ira_finish_point_ranges
[i
]; r
!= NULL
; r
= r
->finish_next
)
3021 sparseset_clear_bit (objects_live
, OBJECT_CONFLICT_ID (r
->object
));
3023 sparseset_free (objects_live
);
3024 compress_conflict_vecs ();
3026 /* Mark some copies for removing and change allocnos in the rest
3028 FOR_EACH_COPY (cp
, ci
)
3030 if (ALLOCNO_CAP_MEMBER (cp
->first
) != NULL
3031 || ALLOCNO_CAP_MEMBER (cp
->second
) != NULL
)
3033 if (internal_flag_ira_verbose
> 4 && ira_dump_file
!= NULL
)
3035 (ira_dump_file
, " Remove cp%d:%c%dr%d-%c%dr%d\n",
3036 cp
->num
, ALLOCNO_CAP_MEMBER (cp
->first
) != NULL
? 'c' : 'a',
3037 ALLOCNO_NUM (cp
->first
),
3038 REGNO (allocno_emit_reg (cp
->first
)),
3039 ALLOCNO_CAP_MEMBER (cp
->second
) != NULL
? 'c' : 'a',
3040 ALLOCNO_NUM (cp
->second
),
3041 REGNO (allocno_emit_reg (cp
->second
)));
3042 cp
->loop_tree_node
= NULL
;
3046 = regno_top_level_allocno_map
[REGNO (allocno_emit_reg (cp
->first
))];
3048 = regno_top_level_allocno_map
[REGNO (allocno_emit_reg (cp
->second
))];
3049 node
= cp
->loop_tree_node
;
3051 keep_p
= true; /* It copy generated in ira-emit.c. */
3054 /* Check that the copy was not propagated from level on
3055 which we will have different pseudos. */
3056 node_first
= node
->regno_allocno_map
[ALLOCNO_REGNO (cp
->first
)];
3057 node_second
= node
->regno_allocno_map
[ALLOCNO_REGNO (cp
->second
)];
3058 keep_p
= ((REGNO (allocno_emit_reg (first
))
3059 == REGNO (allocno_emit_reg (node_first
)))
3060 && (REGNO (allocno_emit_reg (second
))
3061 == REGNO (allocno_emit_reg (node_second
))));
3065 cp
->loop_tree_node
= ira_loop_tree_root
;
3067 cp
->second
= second
;
3071 cp
->loop_tree_node
= NULL
;
3072 if (internal_flag_ira_verbose
> 4 && ira_dump_file
!= NULL
)
3073 fprintf (ira_dump_file
, " Remove cp%d:a%dr%d-a%dr%d\n",
3074 cp
->num
, ALLOCNO_NUM (cp
->first
),
3075 REGNO (allocno_emit_reg (cp
->first
)),
3076 ALLOCNO_NUM (cp
->second
),
3077 REGNO (allocno_emit_reg (cp
->second
)));
3080 /* Remove unnecessary allocnos on lower levels of the loop tree. */
3081 FOR_EACH_ALLOCNO (a
, ai
)
3083 if (a
!= regno_top_level_allocno_map
[REGNO (allocno_emit_reg (a
))]
3084 || ALLOCNO_CAP_MEMBER (a
) != NULL
)
3086 if (internal_flag_ira_verbose
> 4 && ira_dump_file
!= NULL
)
3087 fprintf (ira_dump_file
, " Remove a%dr%d\n",
3088 ALLOCNO_NUM (a
), REGNO (allocno_emit_reg (a
)));
3092 ALLOCNO_LOOP_TREE_NODE (a
) = ira_loop_tree_root
;
3093 ALLOCNO_REGNO (a
) = REGNO (allocno_emit_reg (a
));
3094 ALLOCNO_CAP (a
) = NULL
;
3095 /* Restore updated costs for assignments from reload. */
3096 ALLOCNO_UPDATED_MEMORY_COST (a
) = ALLOCNO_MEMORY_COST (a
);
3097 ALLOCNO_UPDATED_CLASS_COST (a
) = ALLOCNO_CLASS_COST (a
);
3098 if (! ALLOCNO_ASSIGNED_P (a
))
3099 ira_free_allocno_updated_costs (a
);
3100 ira_assert (ALLOCNO_UPDATED_HARD_REG_COSTS (a
) == NULL
);
3101 ira_assert (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a
) == NULL
);
3103 /* Remove unnecessary copies. */
3104 FOR_EACH_COPY (cp
, ci
)
3106 if (cp
->loop_tree_node
== NULL
)
3108 ira_copies
[cp
->num
] = NULL
;
3113 (ALLOCNO_LOOP_TREE_NODE (cp
->first
) == ira_loop_tree_root
3114 && ALLOCNO_LOOP_TREE_NODE (cp
->second
) == ira_loop_tree_root
);
3115 ira_add_allocno_copy_to_list (cp
);
3116 ira_swap_allocno_copy_ends_if_necessary (cp
);
3118 rebuild_regno_allocno_maps ();
3119 if (ira_max_point
!= ira_max_point_before_emit
)
3120 ira_compress_allocno_live_ranges ();
3121 ira_free (regno_top_level_allocno_map
);
3126 #ifdef ENABLE_IRA_CHECKING
3127 /* Check creation of all allocnos. Allocnos on lower levels should
3128 have allocnos or caps on all upper levels. */
3130 check_allocno_creation (void)
3133 ira_allocno_iterator ai
;
3134 ira_loop_tree_node_t loop_tree_node
;
3136 FOR_EACH_ALLOCNO (a
, ai
)
3138 loop_tree_node
= ALLOCNO_LOOP_TREE_NODE (a
);
3139 ira_assert (bitmap_bit_p (loop_tree_node
->all_allocnos
,
3141 if (loop_tree_node
== ira_loop_tree_root
)
3143 if (ALLOCNO_CAP_MEMBER (a
) != NULL
)
3144 ira_assert (ALLOCNO_CAP (a
) != NULL
);
3145 else if (ALLOCNO_CAP (a
) == NULL
)
3146 ira_assert (loop_tree_node
->parent
3147 ->regno_allocno_map
[ALLOCNO_REGNO (a
)] != NULL
3148 && bitmap_bit_p (loop_tree_node
->border_allocnos
,
3154 /* Identify allocnos which prefer a register class with a single hard register.
3155 Adjust ALLOCNO_CONFLICT_HARD_REG_COSTS so that conflicting allocnos are
3156 less likely to use the preferred singleton register. */
3158 update_conflict_hard_reg_costs (void)
3161 ira_allocno_iterator ai
;
3164 FOR_EACH_ALLOCNO (a
, ai
)
3166 reg_class_t aclass
= ALLOCNO_CLASS (a
);
3167 reg_class_t pref
= reg_preferred_class (ALLOCNO_REGNO (a
));
3168 int singleton
= ira_class_singleton
[pref
][ALLOCNO_MODE (a
)];
3171 index
= ira_class_hard_reg_index
[(int) aclass
][singleton
];
3174 if (ALLOCNO_CONFLICT_HARD_REG_COSTS (a
) == NULL
3175 || ALLOCNO_HARD_REG_COSTS (a
) == NULL
)
3178 for (i
= ira_class_hard_regs_num
[(int) aclass
] - 1; i
>= 0; i
--)
3179 if (ALLOCNO_HARD_REG_COSTS (a
)[i
] > ALLOCNO_CLASS_COST (a
)
3180 && min
> ALLOCNO_HARD_REG_COSTS (a
)[i
])
3181 min
= ALLOCNO_HARD_REG_COSTS (a
)[i
];
3184 ira_allocate_and_set_costs (&ALLOCNO_CONFLICT_HARD_REG_COSTS (a
),
3186 ALLOCNO_CONFLICT_HARD_REG_COSTS (a
)[index
]
3187 -= min
- ALLOCNO_CLASS_COST (a
);
3191 /* Create a internal representation (IR) for IRA (allocnos, copies,
3192 loop tree nodes). The function returns TRUE if we generate loop
3193 structure (besides nodes representing all function and the basic
3194 blocks) for regional allocation. A true return means that we
3195 really need to flatten IR before the reload. */
3202 initiate_cost_vectors ();
3203 initiate_allocnos ();
3205 create_loop_tree_nodes ();
3209 create_allocno_objects ();
3210 ira_create_allocno_live_ranges ();
3211 remove_unnecessary_regions (false);
3212 ira_compress_allocno_live_ranges ();
3213 update_bad_spill_attribute ();
3214 loops_p
= more_one_region_p ();
3217 propagate_allocno_info ();
3220 ira_tune_allocno_costs ();
3221 #ifdef ENABLE_IRA_CHECKING
3222 check_allocno_creation ();
3224 setup_min_max_allocno_live_range_point ();
3225 sort_conflict_id_map ();
3226 setup_min_max_conflict_allocno_ids ();
3227 ira_build_conflicts ();
3228 update_conflict_hard_reg_costs ();
3229 if (! ira_conflicts_p
)
3232 ira_allocno_iterator ai
;
3234 /* Remove all regions but root one. */
3237 remove_unnecessary_regions (true);
3240 /* We don't save hard registers around calls for fast allocation
3241 -- add caller clobbered registers as conflicting ones to
3242 allocno crossing calls. */
3243 FOR_EACH_ALLOCNO (a
, ai
)
3244 if (ALLOCNO_CALLS_CROSSED_NUM (a
) != 0)
3245 ior_hard_reg_conflicts (a
, &call_used_reg_set
);
3247 if (internal_flag_ira_verbose
> 2 && ira_dump_file
!= NULL
)
3248 print_copies (ira_dump_file
);
3249 if (internal_flag_ira_verbose
> 0 && ira_dump_file
!= NULL
)
3254 ira_allocno_iterator ai
;
3259 FOR_EACH_ALLOCNO (a
, ai
)
3261 int j
, nobj
= ALLOCNO_NUM_OBJECTS (a
);
3265 for (j
= 0; j
< nobj
; j
++)
3267 ira_object_t obj
= ALLOCNO_OBJECT (a
, j
);
3268 n
+= OBJECT_NUM_CONFLICTS (obj
);
3269 for (r
= OBJECT_LIVE_RANGES (obj
); r
!= NULL
; r
= r
->next
)
3273 fprintf (ira_dump_file
, " regions=%d, blocks=%d, points=%d\n",
3274 current_loops
== NULL
? 1 : VEC_length (loop_p
, ira_loops
.larray
),
3275 n_basic_blocks
, ira_max_point
);
3276 fprintf (ira_dump_file
,
3277 " allocnos=%d (big %d), copies=%d, conflicts=%d, ranges=%d\n",
3278 ira_allocnos_num
, nr_big
, ira_copies_num
, n
, nr
);
3283 /* Release the data created by function ira_build. */
3287 finish_loop_tree_nodes ();
3290 finish_cost_vectors ();
3291 ira_finish_allocno_live_ranges ();