* config/mips/mips.md (mulsi3_mul3, muldi3_mul3): Merge these ...
[official-gcc.git] / gcc / ira-costs.c
blob774902035e1f1047f2e6cad5d4b33613b27747c8
1 /* IRA hard register and memory cost calculation for allocnos.
2 Copyright (C) 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Vladimir Makarov <vmakarov@redhat.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "hard-reg-set.h"
27 #include "rtl.h"
28 #include "expr.h"
29 #include "tm_p.h"
30 #include "flags.h"
31 #include "basic-block.h"
32 #include "regs.h"
33 #include "addresses.h"
34 #include "insn-config.h"
35 #include "recog.h"
36 #include "toplev.h"
37 #include "target.h"
38 #include "params.h"
39 #include "ira-int.h"
41 /* The file contains code is similar to one in regclass but the code
42 works on the allocno basis. */
44 #ifdef FORBIDDEN_INC_DEC_CLASSES
45 /* Indexed by n, is TRUE if allocno with number N is used in an
46 auto-inc or auto-dec context. */
47 static bool *in_inc_dec;
48 #endif
50 /* The `costs' struct records the cost of using hard registers of each
51 class considered for the calculation and of using memory for each
52 allocno. */
53 struct costs
55 int mem_cost;
56 /* Costs for register classes start here. We process only some
57 register classes (cover classes on the 1st cost calculation
58 iteration and important classes on the 2nd iteration). */
59 int cost[1];
62 /* Initialized once. It is a maximal possible size of the allocated
63 struct costs. */
64 static int max_struct_costs_size;
66 /* Allocated and initialized once, and used to initialize cost values
67 for each insn. */
68 static struct costs *init_cost;
70 /* Allocated once, and used for temporary purposes. */
71 static struct costs *temp_costs;
73 /* Allocated once, and used for the cost calculation. */
74 static struct costs *op_costs[MAX_RECOG_OPERANDS];
75 static struct costs *this_op_costs[MAX_RECOG_OPERANDS];
77 /* Original and accumulated costs of each class for each allocno. */
78 static struct costs *allocno_costs, *total_costs;
80 /* Classes used for cost calculation. They may be different on
81 different iterations of the cost calculations or in different
82 optimization modes. */
83 static enum reg_class *cost_classes;
85 /* The size of the previous array. */
86 static int cost_classes_num;
88 /* Map: cost class -> order number (they start with 0) of the cost
89 class. */
90 static int cost_class_nums[N_REG_CLASSES];
92 /* It is the current size of struct costs. */
93 static int struct_costs_size;
95 /* Return pointer to structure containing costs of allocno with given
96 NUM in array ARR. */
97 #define COSTS_OF_ALLOCNO(arr, num) \
98 ((struct costs *) ((char *) (arr) + (num) * struct_costs_size))
100 /* Record register class preferences of each allocno. Null value
101 means no preferences. It happens on the 1st iteration of the cost
102 calculation. */
103 static enum reg_class *allocno_pref;
105 /* Allocated buffers for allocno_pref. */
106 static enum reg_class *allocno_pref_buffer;
108 /* Execution frequency of the current insn. */
109 static int frequency;
113 /* Compute the cost of loading X into (if TO_P is TRUE) or from (if
114 TO_P is FALSE) a register of class RCLASS in mode MODE. X must not
115 be a pseudo register. */
116 static int
117 copy_cost (rtx x, enum machine_mode mode, enum reg_class rclass, bool to_p,
118 secondary_reload_info *prev_sri)
120 secondary_reload_info sri;
121 enum reg_class secondary_class = NO_REGS;
123 /* If X is a SCRATCH, there is actually nothing to move since we are
124 assuming optimal allocation. */
125 if (GET_CODE (x) == SCRATCH)
126 return 0;
128 /* Get the class we will actually use for a reload. */
129 rclass = PREFERRED_RELOAD_CLASS (x, rclass);
131 /* If we need a secondary reload for an intermediate, the cost is
132 that to load the input into the intermediate register, then to
133 copy it. */
134 sri.prev_sri = prev_sri;
135 sri.extra_cost = 0;
136 secondary_class = targetm.secondary_reload (to_p, x, rclass, mode, &sri);
138 if (ira_register_move_cost[mode] == NULL)
139 ira_init_register_move_cost (mode);
141 if (secondary_class != NO_REGS)
142 return (move_cost[mode][secondary_class][rclass] + sri.extra_cost
143 + copy_cost (x, mode, secondary_class, to_p, &sri));
145 /* For memory, use the memory move cost, for (hard) registers, use
146 the cost to move between the register classes, and use 2 for
147 everything else (constants). */
148 if (MEM_P (x) || rclass == NO_REGS)
149 return sri.extra_cost + ira_memory_move_cost[mode][rclass][to_p != 0];
150 else if (REG_P (x))
151 return
152 (sri.extra_cost + move_cost[mode][REGNO_REG_CLASS (REGNO (x))][rclass]);
153 else
154 /* If this is a constant, we may eventually want to call rtx_cost
155 here. */
156 return sri.extra_cost + COSTS_N_INSNS (1);
161 /* Record the cost of using memory or hard registers of various
162 classes for the operands in INSN.
164 N_ALTS is the number of alternatives.
165 N_OPS is the number of operands.
166 OPS is an array of the operands.
167 MODES are the modes of the operands, in case any are VOIDmode.
168 CONSTRAINTS are the constraints to use for the operands. This array
169 is modified by this procedure.
171 This procedure works alternative by alternative. For each
172 alternative we assume that we will be able to allocate all allocnos
173 to their ideal register class and calculate the cost of using that
174 alternative. Then we compute, for each operand that is a
175 pseudo-register, the cost of having the allocno allocated to each
176 register class and using it in that alternative. To this cost is
177 added the cost of the alternative.
179 The cost of each class for this insn is its lowest cost among all
180 the alternatives. */
181 static void
182 record_reg_classes (int n_alts, int n_ops, rtx *ops,
183 enum machine_mode *modes, const char **constraints,
184 rtx insn, struct costs **op_costs,
185 enum reg_class *allocno_pref)
187 int alt;
188 int i, j, k;
189 rtx set;
191 /* Process each alternative, each time minimizing an operand's cost
192 with the cost for each operand in that alternative. */
193 for (alt = 0; alt < n_alts; alt++)
195 enum reg_class classes[MAX_RECOG_OPERANDS];
196 int allows_mem[MAX_RECOG_OPERANDS];
197 int rclass;
198 int alt_fail = 0;
199 int alt_cost = 0, op_cost_add;
201 for (i = 0; i < n_ops; i++)
203 unsigned char c;
204 const char *p = constraints[i];
205 rtx op = ops[i];
206 enum machine_mode mode = modes[i];
207 int allows_addr = 0;
208 int win = 0;
210 /* Initially show we know nothing about the register class. */
211 classes[i] = NO_REGS;
212 allows_mem[i] = 0;
214 /* If this operand has no constraints at all, we can
215 conclude nothing about it since anything is valid. */
216 if (*p == 0)
218 if (REG_P (op) && REGNO (op) >= FIRST_PSEUDO_REGISTER)
219 memset (this_op_costs[i], 0, struct_costs_size);
220 continue;
223 /* If this alternative is only relevant when this operand
224 matches a previous operand, we do different things
225 depending on whether this operand is a allocno-reg or not.
226 We must process any modifiers for the operand before we
227 can make this test. */
228 while (*p == '%' || *p == '=' || *p == '+' || *p == '&')
229 p++;
231 if (p[0] >= '0' && p[0] <= '0' + i && (p[1] == ',' || p[1] == 0))
233 /* Copy class and whether memory is allowed from the
234 matching alternative. Then perform any needed cost
235 computations and/or adjustments. */
236 j = p[0] - '0';
237 classes[i] = classes[j];
238 allows_mem[i] = allows_mem[j];
240 if (! REG_P (op) || REGNO (op) < FIRST_PSEUDO_REGISTER)
242 /* If this matches the other operand, we have no
243 added cost and we win. */
244 if (rtx_equal_p (ops[j], op))
245 win = 1;
246 /* If we can put the other operand into a register,
247 add to the cost of this alternative the cost to
248 copy this operand to the register used for the
249 other operand. */
250 else if (classes[j] != NO_REGS)
252 alt_cost += copy_cost (op, mode, classes[j], 1, NULL);
253 win = 1;
256 else if (! REG_P (ops[j])
257 || REGNO (ops[j]) < FIRST_PSEUDO_REGISTER)
259 /* This op is an allocno but the one it matches is
260 not. */
262 /* If we can't put the other operand into a
263 register, this alternative can't be used. */
265 if (classes[j] == NO_REGS)
266 alt_fail = 1;
267 /* Otherwise, add to the cost of this alternative
268 the cost to copy the other operand to the hard
269 register used for this operand. */
270 else
271 alt_cost += copy_cost (ops[j], mode, classes[j], 1, NULL);
273 else
275 /* The costs of this operand are not the same as the
276 other operand since move costs are not symmetric.
277 Moreover, if we cannot tie them, this alternative
278 needs to do a copy, which is one insn. */
279 struct costs *pp = this_op_costs[i];
281 if (ira_register_move_cost[mode] == NULL)
282 ira_init_register_move_cost (mode);
284 for (k = 0; k < cost_classes_num; k++)
286 rclass = cost_classes[k];
287 pp->cost[k]
288 = ((recog_data.operand_type[i] != OP_OUT
289 ? ira_may_move_in_cost[mode][rclass]
290 [classes[i]] * frequency : 0)
291 + (recog_data.operand_type[i] != OP_IN
292 ? ira_may_move_out_cost[mode][classes[i]]
293 [rclass] * frequency : 0));
296 /* If the alternative actually allows memory, make
297 things a bit cheaper since we won't need an extra
298 insn to load it. */
299 pp->mem_cost
300 = ((recog_data.operand_type[i] != OP_IN
301 ? ira_memory_move_cost[mode][classes[i]][0] : 0)
302 + (recog_data.operand_type[i] != OP_OUT
303 ? ira_memory_move_cost[mode][classes[i]][1] : 0)
304 - allows_mem[i]) * frequency;
305 /* If we have assigned a class to this allocno in our
306 first pass, add a cost to this alternative
307 corresponding to what we would add if this allocno
308 were not in the appropriate class. We could use
309 cover class here but it is less accurate
310 approximation. */
311 if (allocno_pref)
313 enum reg_class pref_class
314 = allocno_pref[ALLOCNO_NUM
315 (ira_curr_regno_allocno_map
316 [REGNO (op)])];
318 if (pref_class == NO_REGS)
319 alt_cost
320 += ((recog_data.operand_type[i] != OP_IN
321 ? ira_memory_move_cost[mode][classes[i]][0]
322 : 0)
323 + (recog_data.operand_type[i] != OP_OUT
324 ? ira_memory_move_cost[mode][classes[i]][1]
325 : 0));
326 else if (ira_reg_class_intersect
327 [pref_class][classes[i]] == NO_REGS)
328 alt_cost += (ira_register_move_cost
329 [mode][pref_class][classes[i]]);
331 if (REGNO (ops[i]) != REGNO (ops[j])
332 && ! find_reg_note (insn, REG_DEAD, op))
333 alt_cost += 2;
335 /* This is in place of ordinary cost computation for
336 this operand, so skip to the end of the
337 alternative (should be just one character). */
338 while (*p && *p++ != ',')
341 constraints[i] = p;
342 continue;
346 /* Scan all the constraint letters. See if the operand
347 matches any of the constraints. Collect the valid
348 register classes and see if this operand accepts
349 memory. */
350 while ((c = *p))
352 switch (c)
354 case ',':
355 break;
356 case '*':
357 /* Ignore the next letter for this pass. */
358 c = *++p;
359 break;
361 case '?':
362 alt_cost += 2;
363 case '!': case '#': case '&':
364 case '0': case '1': case '2': case '3': case '4':
365 case '5': case '6': case '7': case '8': case '9':
366 break;
368 case 'p':
369 allows_addr = 1;
370 win = address_operand (op, GET_MODE (op));
371 /* We know this operand is an address, so we want it
372 to be allocated to a register that can be the
373 base of an address, i.e. BASE_REG_CLASS. */
374 classes[i]
375 = ira_reg_class_union[classes[i]]
376 [base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
377 break;
379 case 'm': case 'o': case 'V':
380 /* It doesn't seem worth distinguishing between
381 offsettable and non-offsettable addresses
382 here. */
383 allows_mem[i] = 1;
384 if (MEM_P (op))
385 win = 1;
386 break;
388 case '<':
389 if (MEM_P (op)
390 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
391 || GET_CODE (XEXP (op, 0)) == POST_DEC))
392 win = 1;
393 break;
395 case '>':
396 if (MEM_P (op)
397 && (GET_CODE (XEXP (op, 0)) == PRE_INC
398 || GET_CODE (XEXP (op, 0)) == POST_INC))
399 win = 1;
400 break;
402 case 'E':
403 case 'F':
404 if (GET_CODE (op) == CONST_DOUBLE
405 || (GET_CODE (op) == CONST_VECTOR
406 && (GET_MODE_CLASS (GET_MODE (op))
407 == MODE_VECTOR_FLOAT)))
408 win = 1;
409 break;
411 case 'G':
412 case 'H':
413 if (GET_CODE (op) == CONST_DOUBLE
414 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
415 win = 1;
416 break;
418 case 's':
419 if (GET_CODE (op) == CONST_INT
420 || (GET_CODE (op) == CONST_DOUBLE
421 && GET_MODE (op) == VOIDmode))
422 break;
424 case 'i':
425 if (CONSTANT_P (op)
426 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)))
427 win = 1;
428 break;
430 case 'n':
431 if (GET_CODE (op) == CONST_INT
432 || (GET_CODE (op) == CONST_DOUBLE
433 && GET_MODE (op) == VOIDmode))
434 win = 1;
435 break;
437 case 'I':
438 case 'J':
439 case 'K':
440 case 'L':
441 case 'M':
442 case 'N':
443 case 'O':
444 case 'P':
445 if (GET_CODE (op) == CONST_INT
446 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
447 win = 1;
448 break;
450 case 'X':
451 win = 1;
452 break;
454 case 'g':
455 if (MEM_P (op)
456 || (CONSTANT_P (op)
457 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))))
458 win = 1;
459 allows_mem[i] = 1;
460 case 'r':
461 classes[i] = ira_reg_class_union[classes[i]][GENERAL_REGS];
462 break;
464 default:
465 if (REG_CLASS_FROM_CONSTRAINT (c, p) != NO_REGS)
466 classes[i] = ira_reg_class_union[classes[i]]
467 [REG_CLASS_FROM_CONSTRAINT (c, p)];
468 #ifdef EXTRA_CONSTRAINT_STR
469 else if (EXTRA_CONSTRAINT_STR (op, c, p))
470 win = 1;
472 if (EXTRA_MEMORY_CONSTRAINT (c, p))
474 /* Every MEM can be reloaded to fit. */
475 allows_mem[i] = 1;
476 if (MEM_P (op))
477 win = 1;
479 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
481 /* Every address can be reloaded to fit. */
482 allows_addr = 1;
483 if (address_operand (op, GET_MODE (op)))
484 win = 1;
485 /* We know this operand is an address, so we
486 want it to be allocated to a hard register
487 that can be the base of an address,
488 i.e. BASE_REG_CLASS. */
489 classes[i]
490 = ira_reg_class_union[classes[i]]
491 [base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
493 #endif
494 break;
496 p += CONSTRAINT_LEN (c, p);
497 if (c == ',')
498 break;
501 constraints[i] = p;
503 /* How we account for this operand now depends on whether it
504 is a pseudo register or not. If it is, we first check if
505 any register classes are valid. If not, we ignore this
506 alternative, since we want to assume that all allocnos get
507 allocated for register preferencing. If some register
508 class is valid, compute the costs of moving the allocno
509 into that class. */
510 if (REG_P (op) && REGNO (op) >= FIRST_PSEUDO_REGISTER)
512 if (classes[i] == NO_REGS)
514 /* We must always fail if the operand is a REG, but
515 we did not find a suitable class.
517 Otherwise we may perform an uninitialized read
518 from this_op_costs after the `continue' statement
519 below. */
520 alt_fail = 1;
522 else
524 struct costs *pp = this_op_costs[i];
526 if (ira_register_move_cost[mode] == NULL)
527 ira_init_register_move_cost (mode);
529 for (k = 0; k < cost_classes_num; k++)
531 rclass = cost_classes[k];
532 pp->cost[k]
533 = ((recog_data.operand_type[i] != OP_OUT
534 ? ira_may_move_in_cost[mode][rclass]
535 [classes[i]] * frequency : 0)
536 + (recog_data.operand_type[i] != OP_IN
537 ? ira_may_move_out_cost[mode][classes[i]]
538 [rclass] * frequency : 0));
541 /* If the alternative actually allows memory, make
542 things a bit cheaper since we won't need an extra
543 insn to load it. */
544 pp->mem_cost
545 = ((recog_data.operand_type[i] != OP_IN
546 ? ira_memory_move_cost[mode][classes[i]][0] : 0)
547 + (recog_data.operand_type[i] != OP_OUT
548 ? ira_memory_move_cost[mode][classes[i]][1] : 0)
549 - allows_mem[i]) * frequency;
550 /* If we have assigned a class to this allocno in our
551 first pass, add a cost to this alternative
552 corresponding to what we would add if this allocno
553 were not in the appropriate class. We could use
554 cover class here but it is less accurate
555 approximation. */
556 if (allocno_pref)
558 enum reg_class pref_class
559 = allocno_pref[ALLOCNO_NUM
560 (ira_curr_regno_allocno_map
561 [REGNO (op)])];
563 if (pref_class == NO_REGS)
564 alt_cost
565 += ((recog_data.operand_type[i] != OP_IN
566 ? ira_memory_move_cost[mode][classes[i]][0]
567 : 0)
568 + (recog_data.operand_type[i] != OP_OUT
569 ? ira_memory_move_cost[mode][classes[i]][1]
570 : 0));
571 else if (ira_reg_class_intersect[pref_class][classes[i]]
572 == NO_REGS)
573 alt_cost += (ira_register_move_cost
574 [mode][pref_class][classes[i]]);
579 /* Otherwise, if this alternative wins, either because we
580 have already determined that or if we have a hard
581 register of the proper class, there is no cost for this
582 alternative. */
583 else if (win || (REG_P (op)
584 && reg_fits_class_p (op, classes[i],
585 0, GET_MODE (op))))
588 /* If registers are valid, the cost of this alternative
589 includes copying the object to and/or from a
590 register. */
591 else if (classes[i] != NO_REGS)
593 if (recog_data.operand_type[i] != OP_OUT)
594 alt_cost += copy_cost (op, mode, classes[i], 1, NULL);
596 if (recog_data.operand_type[i] != OP_IN)
597 alt_cost += copy_cost (op, mode, classes[i], 0, NULL);
599 /* The only other way this alternative can be used is if
600 this is a constant that could be placed into memory. */
601 else if (CONSTANT_P (op) && (allows_addr || allows_mem[i]))
602 alt_cost += ira_memory_move_cost[mode][classes[i]][1];
603 else
604 alt_fail = 1;
607 if (alt_fail)
608 continue;
610 op_cost_add = alt_cost * frequency;
611 /* Finally, update the costs with the information we've
612 calculated about this alternative. */
613 for (i = 0; i < n_ops; i++)
614 if (REG_P (ops[i]) && REGNO (ops[i]) >= FIRST_PSEUDO_REGISTER)
616 struct costs *pp = op_costs[i], *qq = this_op_costs[i];
617 int scale = 1 + (recog_data.operand_type[i] == OP_INOUT);
619 pp->mem_cost = MIN (pp->mem_cost,
620 (qq->mem_cost + op_cost_add) * scale);
622 for (k = 0; k < cost_classes_num; k++)
623 pp->cost[k]
624 = MIN (pp->cost[k], (qq->cost[k] + op_cost_add) * scale);
628 /* If this insn is a single set copying operand 1 to operand 0 and
629 one operand is an allocno with the other a hard reg or an allocno
630 that prefers a hard register that is in its own register class
631 then we may want to adjust the cost of that register class to -1.
633 Avoid the adjustment if the source does not die to avoid
634 stressing of register allocator by preferrencing two colliding
635 registers into single class.
637 Also avoid the adjustment if a copy between hard registers of the
638 class is expensive (ten times the cost of a default copy is
639 considered arbitrarily expensive). This avoids losing when the
640 preferred class is very expensive as the source of a copy
641 instruction. */
642 if ((set = single_set (insn)) != 0
643 && ops[0] == SET_DEST (set) && ops[1] == SET_SRC (set)
644 && REG_P (ops[0]) && REG_P (ops[1])
645 && find_regno_note (insn, REG_DEAD, REGNO (ops[1])))
646 for (i = 0; i <= 1; i++)
647 if (REGNO (ops[i]) >= FIRST_PSEUDO_REGISTER)
649 unsigned int regno = REGNO (ops[!i]);
650 enum machine_mode mode = GET_MODE (ops[!i]);
651 int rclass;
652 unsigned int nr;
654 if (regno < FIRST_PSEUDO_REGISTER)
655 for (k = 0; k < cost_classes_num; k++)
657 rclass = cost_classes[k];
658 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno)
659 && (reg_class_size[rclass]
660 == (unsigned) CLASS_MAX_NREGS (rclass, mode)))
662 if (reg_class_size[rclass] == 1)
663 op_costs[i]->cost[k] = -frequency;
664 else
666 for (nr = 0;
667 nr < (unsigned) hard_regno_nregs[regno][mode];
668 nr++)
669 if (! TEST_HARD_REG_BIT (reg_class_contents[rclass],
670 regno + nr))
671 break;
673 if (nr == (unsigned) hard_regno_nregs[regno][mode])
674 op_costs[i]->cost[k] = -frequency;
683 /* Wrapper around REGNO_OK_FOR_INDEX_P, to allow pseudo registers. */
684 static inline bool
685 ok_for_index_p_nonstrict (rtx reg)
687 unsigned regno = REGNO (reg);
689 return regno >= FIRST_PSEUDO_REGISTER || REGNO_OK_FOR_INDEX_P (regno);
692 /* A version of regno_ok_for_base_p for use here, when all
693 pseudo-registers should count as OK. Arguments as for
694 regno_ok_for_base_p. */
695 static inline bool
696 ok_for_base_p_nonstrict (rtx reg, enum machine_mode mode,
697 enum rtx_code outer_code, enum rtx_code index_code)
699 unsigned regno = REGNO (reg);
701 if (regno >= FIRST_PSEUDO_REGISTER)
702 return true;
703 return ok_for_base_p_1 (regno, mode, outer_code, index_code);
706 /* Record the pseudo registers we must reload into hard registers in a
707 subexpression of a memory address, X.
709 If CONTEXT is 0, we are looking at the base part of an address,
710 otherwise we are looking at the index part.
712 MODE is the mode of the memory reference; OUTER_CODE and INDEX_CODE
713 give the context that the rtx appears in. These three arguments
714 are passed down to base_reg_class.
716 SCALE is twice the amount to multiply the cost by (it is twice so
717 we can represent half-cost adjustments). */
718 static void
719 record_address_regs (enum machine_mode mode, rtx x, int context,
720 enum rtx_code outer_code, enum rtx_code index_code,
721 int scale)
723 enum rtx_code code = GET_CODE (x);
724 enum reg_class rclass;
726 if (context == 1)
727 rclass = INDEX_REG_CLASS;
728 else
729 rclass = base_reg_class (mode, outer_code, index_code);
731 switch (code)
733 case CONST_INT:
734 case CONST:
735 case CC0:
736 case PC:
737 case SYMBOL_REF:
738 case LABEL_REF:
739 return;
741 case PLUS:
742 /* When we have an address that is a sum, we must determine
743 whether registers are "base" or "index" regs. If there is a
744 sum of two registers, we must choose one to be the "base".
745 Luckily, we can use the REG_POINTER to make a good choice
746 most of the time. We only need to do this on machines that
747 can have two registers in an address and where the base and
748 index register classes are different.
750 ??? This code used to set REGNO_POINTER_FLAG in some cases,
751 but that seems bogus since it should only be set when we are
752 sure the register is being used as a pointer. */
754 rtx arg0 = XEXP (x, 0);
755 rtx arg1 = XEXP (x, 1);
756 enum rtx_code code0 = GET_CODE (arg0);
757 enum rtx_code code1 = GET_CODE (arg1);
759 /* Look inside subregs. */
760 if (code0 == SUBREG)
761 arg0 = SUBREG_REG (arg0), code0 = GET_CODE (arg0);
762 if (code1 == SUBREG)
763 arg1 = SUBREG_REG (arg1), code1 = GET_CODE (arg1);
765 /* If this machine only allows one register per address, it
766 must be in the first operand. */
767 if (MAX_REGS_PER_ADDRESS == 1)
768 record_address_regs (mode, arg0, 0, PLUS, code1, scale);
770 /* If index and base registers are the same on this machine,
771 just record registers in any non-constant operands. We
772 assume here, as well as in the tests below, that all
773 addresses are in canonical form. */
774 else if (INDEX_REG_CLASS == base_reg_class (VOIDmode, PLUS, SCRATCH))
776 record_address_regs (mode, arg0, context, PLUS, code1, scale);
777 if (! CONSTANT_P (arg1))
778 record_address_regs (mode, arg1, context, PLUS, code0, scale);
781 /* If the second operand is a constant integer, it doesn't
782 change what class the first operand must be. */
783 else if (code1 == CONST_INT || code1 == CONST_DOUBLE)
784 record_address_regs (mode, arg0, context, PLUS, code1, scale);
785 /* If the second operand is a symbolic constant, the first
786 operand must be an index register. */
787 else if (code1 == SYMBOL_REF || code1 == CONST || code1 == LABEL_REF)
788 record_address_regs (mode, arg0, 1, PLUS, code1, scale);
789 /* If both operands are registers but one is already a hard
790 register of index or reg-base class, give the other the
791 class that the hard register is not. */
792 else if (code0 == REG && code1 == REG
793 && REGNO (arg0) < FIRST_PSEUDO_REGISTER
794 && (ok_for_base_p_nonstrict (arg0, mode, PLUS, REG)
795 || ok_for_index_p_nonstrict (arg0)))
796 record_address_regs (mode, arg1,
797 ok_for_base_p_nonstrict (arg0, mode, PLUS, REG)
798 ? 1 : 0,
799 PLUS, REG, scale);
800 else if (code0 == REG && code1 == REG
801 && REGNO (arg1) < FIRST_PSEUDO_REGISTER
802 && (ok_for_base_p_nonstrict (arg1, mode, PLUS, REG)
803 || ok_for_index_p_nonstrict (arg1)))
804 record_address_regs (mode, arg0,
805 ok_for_base_p_nonstrict (arg1, mode, PLUS, REG)
806 ? 1 : 0,
807 PLUS, REG, scale);
808 /* If one operand is known to be a pointer, it must be the
809 base with the other operand the index. Likewise if the
810 other operand is a MULT. */
811 else if ((code0 == REG && REG_POINTER (arg0)) || code1 == MULT)
813 record_address_regs (mode, arg0, 0, PLUS, code1, scale);
814 record_address_regs (mode, arg1, 1, PLUS, code0, scale);
816 else if ((code1 == REG && REG_POINTER (arg1)) || code0 == MULT)
818 record_address_regs (mode, arg0, 1, PLUS, code1, scale);
819 record_address_regs (mode, arg1, 0, PLUS, code0, scale);
821 /* Otherwise, count equal chances that each might be a base or
822 index register. This case should be rare. */
823 else
825 record_address_regs (mode, arg0, 0, PLUS, code1, scale / 2);
826 record_address_regs (mode, arg0, 1, PLUS, code1, scale / 2);
827 record_address_regs (mode, arg1, 0, PLUS, code0, scale / 2);
828 record_address_regs (mode, arg1, 1, PLUS, code0, scale / 2);
831 break;
833 /* Double the importance of an allocno that is incremented or
834 decremented, since it would take two extra insns if it ends
835 up in the wrong place. */
836 case POST_MODIFY:
837 case PRE_MODIFY:
838 record_address_regs (mode, XEXP (x, 0), 0, code,
839 GET_CODE (XEXP (XEXP (x, 1), 1)), 2 * scale);
840 if (REG_P (XEXP (XEXP (x, 1), 1)))
841 record_address_regs (mode, XEXP (XEXP (x, 1), 1), 1, code, REG,
842 2 * scale);
843 break;
845 case POST_INC:
846 case PRE_INC:
847 case POST_DEC:
848 case PRE_DEC:
849 /* Double the importance of an allocno that is incremented or
850 decremented, since it would take two extra insns if it ends
851 up in the wrong place. If the operand is a pseudo-register,
852 show it is being used in an INC_DEC context. */
853 #ifdef FORBIDDEN_INC_DEC_CLASSES
854 if (REG_P (XEXP (x, 0))
855 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER)
856 in_inc_dec[ALLOCNO_NUM (ira_curr_regno_allocno_map
857 [REGNO (XEXP (x, 0))])] = true;
858 #endif
859 record_address_regs (mode, XEXP (x, 0), 0, code, SCRATCH, 2 * scale);
860 break;
862 case REG:
864 struct costs *pp;
865 int i, k;
867 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
868 break;
870 pp = COSTS_OF_ALLOCNO (allocno_costs,
871 ALLOCNO_NUM (ira_curr_regno_allocno_map
872 [REGNO (x)]));
873 pp->mem_cost += (ira_memory_move_cost[Pmode][rclass][1] * scale) / 2;
874 if (ira_register_move_cost[Pmode] == NULL)
875 ira_init_register_move_cost (Pmode);
876 for (k = 0; k < cost_classes_num; k++)
878 i = cost_classes[k];
879 pp->cost[k]
880 += (ira_may_move_in_cost[Pmode][i][rclass] * scale) / 2;
883 break;
885 default:
887 const char *fmt = GET_RTX_FORMAT (code);
888 int i;
889 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
890 if (fmt[i] == 'e')
891 record_address_regs (mode, XEXP (x, i), context, code, SCRATCH,
892 scale);
899 /* Calculate the costs of insn operands. */
900 static void
901 record_operand_costs (rtx insn, struct costs **op_costs,
902 enum reg_class *allocno_pref)
904 const char *constraints[MAX_RECOG_OPERANDS];
905 enum machine_mode modes[MAX_RECOG_OPERANDS];
906 int i;
908 for (i = 0; i < recog_data.n_operands; i++)
910 constraints[i] = recog_data.constraints[i];
911 modes[i] = recog_data.operand_mode[i];
914 /* If we get here, we are set up to record the costs of all the
915 operands for this insn. Start by initializing the costs. Then
916 handle any address registers. Finally record the desired classes
917 for any allocnos, doing it twice if some pair of operands are
918 commutative. */
919 for (i = 0; i < recog_data.n_operands; i++)
921 memcpy (op_costs[i], init_cost, struct_costs_size);
923 if (GET_CODE (recog_data.operand[i]) == SUBREG)
924 recog_data.operand[i] = SUBREG_REG (recog_data.operand[i]);
926 if (MEM_P (recog_data.operand[i]))
927 record_address_regs (GET_MODE (recog_data.operand[i]),
928 XEXP (recog_data.operand[i], 0),
929 0, MEM, SCRATCH, frequency * 2);
930 else if (constraints[i][0] == 'p'
931 || EXTRA_ADDRESS_CONSTRAINT (constraints[i][0],
932 constraints[i]))
933 record_address_regs (VOIDmode, recog_data.operand[i], 0, ADDRESS,
934 SCRATCH, frequency * 2);
937 /* Check for commutative in a separate loop so everything will have
938 been initialized. We must do this even if one operand is a
939 constant--see addsi3 in m68k.md. */
940 for (i = 0; i < (int) recog_data.n_operands - 1; i++)
941 if (constraints[i][0] == '%')
943 const char *xconstraints[MAX_RECOG_OPERANDS];
944 int j;
946 /* Handle commutative operands by swapping the constraints.
947 We assume the modes are the same. */
948 for (j = 0; j < recog_data.n_operands; j++)
949 xconstraints[j] = constraints[j];
951 xconstraints[i] = constraints[i+1];
952 xconstraints[i+1] = constraints[i];
953 record_reg_classes (recog_data.n_alternatives, recog_data.n_operands,
954 recog_data.operand, modes,
955 xconstraints, insn, op_costs, allocno_pref);
957 record_reg_classes (recog_data.n_alternatives, recog_data.n_operands,
958 recog_data.operand, modes,
959 constraints, insn, op_costs, allocno_pref);
964 /* Process one insn INSN. Scan it and record each time it would save
965 code to put a certain allocnos in a certain class. Return the last
966 insn processed, so that the scan can be continued from there. */
967 static rtx
968 scan_one_insn (rtx insn)
970 enum rtx_code pat_code;
971 rtx set, note;
972 int i, k;
974 if (!INSN_P (insn))
975 return insn;
977 pat_code = GET_CODE (PATTERN (insn));
978 if (pat_code == USE || pat_code == CLOBBER || pat_code == ASM_INPUT
979 || pat_code == ADDR_VEC || pat_code == ADDR_DIFF_VEC)
980 return insn;
982 set = single_set (insn);
983 extract_insn (insn);
985 /* If this insn loads a parameter from its stack slot, then it
986 represents a savings, rather than a cost, if the parameter is
987 stored in memory. Record this fact. */
988 if (set != 0 && REG_P (SET_DEST (set)) && MEM_P (SET_SRC (set))
989 && (note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) != NULL_RTX
990 && MEM_P (XEXP (note, 0)))
992 COSTS_OF_ALLOCNO (allocno_costs,
993 ALLOCNO_NUM (ira_curr_regno_allocno_map
994 [REGNO (SET_DEST (set))]))->mem_cost
995 -= (ira_memory_move_cost[GET_MODE (SET_DEST (set))][GENERAL_REGS][1]
996 * frequency);
997 record_address_regs (GET_MODE (SET_SRC (set)), XEXP (SET_SRC (set), 0),
998 0, MEM, SCRATCH, frequency * 2);
1001 record_operand_costs (insn, op_costs, allocno_pref);
1003 /* Now add the cost for each operand to the total costs for its
1004 allocno. */
1005 for (i = 0; i < recog_data.n_operands; i++)
1006 if (REG_P (recog_data.operand[i])
1007 && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER)
1009 int regno = REGNO (recog_data.operand[i]);
1010 struct costs *p
1011 = COSTS_OF_ALLOCNO (allocno_costs,
1012 ALLOCNO_NUM (ira_curr_regno_allocno_map[regno]));
1013 struct costs *q = op_costs[i];
1015 p->mem_cost += q->mem_cost;
1016 for (k = 0; k < cost_classes_num; k++)
1017 p->cost[k] += q->cost[k];
1020 return insn;
1025 /* Print allocnos costs to file F. */
1026 static void
1027 print_costs (FILE *f)
1029 int k;
1030 ira_allocno_t a;
1031 ira_allocno_iterator ai;
1033 fprintf (f, "\n");
1034 FOR_EACH_ALLOCNO (a, ai)
1036 int i, rclass;
1037 basic_block bb;
1038 int regno = ALLOCNO_REGNO (a);
1040 i = ALLOCNO_NUM (a);
1041 fprintf (f, " a%d(r%d,", i, regno);
1042 if ((bb = ALLOCNO_LOOP_TREE_NODE (a)->bb) != NULL)
1043 fprintf (f, "b%d", bb->index);
1044 else
1045 fprintf (f, "l%d", ALLOCNO_LOOP_TREE_NODE (a)->loop->num);
1046 fprintf (f, ") costs:");
1047 for (k = 0; k < cost_classes_num; k++)
1049 rclass = cost_classes[k];
1050 if (contains_reg_of_mode[rclass][PSEUDO_REGNO_MODE (regno)]
1051 #ifdef FORBIDDEN_INC_DEC_CLASSES
1052 && (! in_inc_dec[i] || ! forbidden_inc_dec_class[rclass])
1053 #endif
1054 #ifdef CANNOT_CHANGE_MODE_CLASS
1055 && ! invalid_mode_change_p (regno, (enum reg_class) rclass,
1056 PSEUDO_REGNO_MODE (regno))
1057 #endif
1060 fprintf (f, " %s:%d", reg_class_names[rclass],
1061 COSTS_OF_ALLOCNO (allocno_costs, i)->cost[k]);
1062 if (flag_ira_algorithm == IRA_ALGORITHM_REGIONAL
1063 || flag_ira_algorithm == IRA_ALGORITHM_MIXED)
1064 fprintf (f, ",%d", COSTS_OF_ALLOCNO (total_costs, i)->cost[k]);
1067 fprintf (f, " MEM:%i\n", COSTS_OF_ALLOCNO (allocno_costs, i)->mem_cost);
1071 /* Traverse the BB represented by LOOP_TREE_NODE to update the allocno
1072 costs. */
1073 static void
1074 process_bb_node_for_costs (ira_loop_tree_node_t loop_tree_node)
1076 basic_block bb;
1077 rtx insn;
1079 bb = loop_tree_node->bb;
1080 if (bb == NULL)
1081 return;
1082 frequency = REG_FREQ_FROM_BB (bb);
1083 if (frequency == 0)
1084 frequency = 1;
1085 FOR_BB_INSNS (bb, insn)
1086 insn = scan_one_insn (insn);
1089 /* Find costs of register classes and memory for allocnos and their
1090 best costs. */
1091 static void
1092 find_allocno_class_costs (void)
1094 int i, k;
1095 int pass;
1096 basic_block bb;
1098 init_recog ();
1099 #ifdef FORBIDDEN_INC_DEC_CLASSES
1100 in_inc_dec = ira_allocate (sizeof (bool) * ira_allocnos_num);
1101 #endif /* FORBIDDEN_INC_DEC_CLASSES */
1102 allocno_pref = NULL;
1103 /* Normally we scan the insns once and determine the best class to
1104 use for each allocno. However, if -fexpensive-optimizations are
1105 on, we do so twice, the second time using the tentative best
1106 classes to guide the selection. */
1107 for (pass = 0; pass <= flag_expensive_optimizations; pass++)
1109 if (internal_flag_ira_verbose > 0 && ira_dump_file)
1110 fprintf (ira_dump_file, "\nPass %i for finding allocno costs\n\n",
1111 pass);
1112 /* We could use only cover classes. Unfortunately it does not
1113 work well for some targets where some subclass of cover class
1114 is costly and wrong cover class is chosen. */
1115 for (cost_classes_num = 0;
1116 cost_classes_num < ira_important_classes_num;
1117 cost_classes_num++)
1119 cost_classes[cost_classes_num]
1120 = ira_important_classes[cost_classes_num];
1121 cost_class_nums[cost_classes[cost_classes_num]]
1122 = cost_classes_num;
1124 struct_costs_size
1125 = sizeof (struct costs) + sizeof (int) * (cost_classes_num - 1);
1126 /* Zero out our accumulation of the cost of each class for each
1127 allocno. */
1128 memset (allocno_costs, 0, ira_allocnos_num * struct_costs_size);
1129 #ifdef FORBIDDEN_INC_DEC_CLASSES
1130 memset (in_inc_dec, 0, ira_allocnos_num * sizeof (bool));
1131 #endif
1133 /* Scan the instructions and record each time it would save code
1134 to put a certain allocno in a certain class. */
1135 ira_traverse_loop_tree (true, ira_loop_tree_root,
1136 process_bb_node_for_costs, NULL);
1138 memcpy (total_costs, allocno_costs,
1139 max_struct_costs_size * ira_allocnos_num);
1140 if (pass == 0)
1141 allocno_pref = allocno_pref_buffer;
1143 /* Now for each allocno look at how desirable each class is and
1144 find which class is preferred. */
1145 for (i = max_reg_num () - 1; i >= FIRST_PSEUDO_REGISTER; i--)
1147 ira_allocno_t a, parent_a;
1148 int rclass, a_num, parent_a_num;
1149 ira_loop_tree_node_t parent;
1150 int best_cost;
1151 enum reg_class best, alt_class, common_class;
1152 #ifdef FORBIDDEN_INC_DEC_CLASSES
1153 int inc_dec_p = false;
1154 #endif
1156 if (ira_regno_allocno_map[i] == NULL)
1157 continue;
1158 memset (temp_costs, 0, struct_costs_size);
1159 /* Find cost of all allocnos with the same regno. */
1160 for (a = ira_regno_allocno_map[i];
1161 a != NULL;
1162 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
1164 a_num = ALLOCNO_NUM (a);
1165 if ((flag_ira_algorithm == IRA_ALGORITHM_REGIONAL
1166 || flag_ira_algorithm == IRA_ALGORITHM_MIXED)
1167 && (parent = ALLOCNO_LOOP_TREE_NODE (a)->parent) != NULL
1168 && (parent_a = parent->regno_allocno_map[i]) != NULL
1169 /* There are no caps yet. */
1170 && bitmap_bit_p (ALLOCNO_LOOP_TREE_NODE (a)->border_allocnos,
1171 ALLOCNO_NUM (a)))
1173 /* Propagate costs to upper levels in the region
1174 tree. */
1175 parent_a_num = ALLOCNO_NUM (parent_a);
1176 for (k = 0; k < cost_classes_num; k++)
1177 COSTS_OF_ALLOCNO (total_costs, parent_a_num)->cost[k]
1178 += COSTS_OF_ALLOCNO (total_costs, a_num)->cost[k];
1179 COSTS_OF_ALLOCNO (total_costs, parent_a_num)->mem_cost
1180 += COSTS_OF_ALLOCNO (total_costs, a_num)->mem_cost;
1182 for (k = 0; k < cost_classes_num; k++)
1183 temp_costs->cost[k]
1184 += COSTS_OF_ALLOCNO (allocno_costs, a_num)->cost[k];
1185 temp_costs->mem_cost
1186 += COSTS_OF_ALLOCNO (allocno_costs, a_num)->mem_cost;
1187 #ifdef FORBIDDEN_INC_DEC_CLASSES
1188 if (in_inc_dec[a_num])
1189 inc_dec_p = true;
1190 #endif
1192 best_cost = (1 << (HOST_BITS_PER_INT - 2)) - 1;
1193 best = ALL_REGS;
1194 alt_class = NO_REGS;
1195 /* Find best common class for all allocnos with the same
1196 regno. */
1197 for (k = 0; k < cost_classes_num; k++)
1199 rclass = cost_classes[k];
1200 /* Ignore classes that are too small for this operand or
1201 invalid for an operand that was auto-incremented. */
1202 if (! contains_reg_of_mode[rclass][PSEUDO_REGNO_MODE (i)]
1203 #ifdef FORBIDDEN_INC_DEC_CLASSES
1204 || (inc_dec_p && forbidden_inc_dec_class[rclass])
1205 #endif
1206 #ifdef CANNOT_CHANGE_MODE_CLASS
1207 || invalid_mode_change_p (i, (enum reg_class) rclass,
1208 PSEUDO_REGNO_MODE (i))
1209 #endif
1211 continue;
1212 if (temp_costs->cost[k] < best_cost)
1214 best_cost = temp_costs->cost[k];
1215 best = (enum reg_class) rclass;
1217 else if (temp_costs->cost[k] == best_cost)
1218 best = ira_reg_class_union[best][rclass];
1219 if (pass == flag_expensive_optimizations
1220 && temp_costs->cost[k] < temp_costs->mem_cost
1221 && (reg_class_size[reg_class_subunion[alt_class][rclass]]
1222 > reg_class_size[alt_class]))
1223 alt_class = reg_class_subunion[alt_class][rclass];
1225 if (pass == flag_expensive_optimizations)
1227 if (best_cost > temp_costs->mem_cost)
1228 best = alt_class = NO_REGS;
1229 else if (best == alt_class)
1230 alt_class = NO_REGS;
1231 setup_reg_classes (i, best, alt_class);
1232 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
1233 fprintf (ira_dump_file,
1234 " r%d: preferred %s, alternative %s\n",
1235 i, reg_class_names[best], reg_class_names[alt_class]);
1237 if (best_cost > temp_costs->mem_cost)
1238 common_class = NO_REGS;
1239 else
1240 /* Make the common class a cover class. Remember all
1241 allocnos with the same regno should have the same cover
1242 class. */
1243 common_class = ira_class_translate[best];
1244 for (a = ira_regno_allocno_map[i];
1245 a != NULL;
1246 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
1248 a_num = ALLOCNO_NUM (a);
1249 if (common_class == NO_REGS)
1250 best = NO_REGS;
1251 else
1253 /* Finding best class which is subset of the common
1254 class. */
1255 best_cost = (1 << (HOST_BITS_PER_INT - 2)) - 1;
1256 best = ALL_REGS;
1257 for (k = 0; k < cost_classes_num; k++)
1259 rclass = cost_classes[k];
1260 if (! ira_class_subset_p[rclass][common_class])
1261 continue;
1262 /* Ignore classes that are too small for this
1263 operand or invalid for an operand that was
1264 auto-incremented. */
1265 if (! contains_reg_of_mode[rclass][PSEUDO_REGNO_MODE (i)]
1266 #ifdef FORBIDDEN_INC_DEC_CLASSES
1267 || (inc_dec_p && forbidden_inc_dec_class[rclass])
1268 #endif
1269 #ifdef CANNOT_CHANGE_MODE_CLASS
1270 || invalid_mode_change_p (i, (enum reg_class) rclass,
1271 PSEUDO_REGNO_MODE (i))
1272 #endif
1275 else if (COSTS_OF_ALLOCNO (total_costs, a_num)->cost[k]
1276 < best_cost)
1278 best_cost
1279 = COSTS_OF_ALLOCNO (total_costs, a_num)->cost[k];
1280 best = (enum reg_class) rclass;
1282 else if (COSTS_OF_ALLOCNO (total_costs, a_num)->cost[k]
1283 == best_cost)
1284 best = ira_reg_class_union[best][rclass];
1287 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL
1288 && (pass == 0 || allocno_pref[a_num] != best))
1290 fprintf (ira_dump_file, " a%d (r%d,", a_num, i);
1291 if ((bb = ALLOCNO_LOOP_TREE_NODE (a)->bb) != NULL)
1292 fprintf (ira_dump_file, "b%d", bb->index);
1293 else
1294 fprintf (ira_dump_file, "l%d",
1295 ALLOCNO_LOOP_TREE_NODE (a)->loop->num);
1296 fprintf (ira_dump_file, ") best %s, cover %s\n",
1297 reg_class_names[best],
1298 reg_class_names[ira_class_translate[best]]);
1300 allocno_pref[a_num] = best;
1304 if (internal_flag_ira_verbose > 4 && ira_dump_file)
1306 print_costs (ira_dump_file);
1307 fprintf (ira_dump_file,"\n");
1310 #ifdef FORBIDDEN_INC_DEC_CLASSES
1311 ira_free (in_inc_dec);
1312 #endif
1317 /* Process moves involving hard regs to modify allocno hard register
1318 costs. We can do this only after determining allocno cover class.
1319 If a hard register forms a register class, than moves with the hard
1320 register are already taken into account in class costs for the
1321 allocno. */
1322 static void
1323 process_bb_node_for_hard_reg_moves (ira_loop_tree_node_t loop_tree_node)
1325 int i, freq, cost, src_regno, dst_regno, hard_regno;
1326 bool to_p;
1327 ira_allocno_t a;
1328 enum reg_class rclass, hard_reg_class;
1329 enum machine_mode mode;
1330 basic_block bb;
1331 rtx insn, set, src, dst;
1333 bb = loop_tree_node->bb;
1334 if (bb == NULL)
1335 return;
1336 freq = REG_FREQ_FROM_BB (bb);
1337 if (freq == 0)
1338 freq = 1;
1339 FOR_BB_INSNS (bb, insn)
1341 if (! INSN_P (insn))
1342 continue;
1343 set = single_set (insn);
1344 if (set == NULL_RTX)
1345 continue;
1346 dst = SET_DEST (set);
1347 src = SET_SRC (set);
1348 if (! REG_P (dst) || ! REG_P (src))
1349 continue;
1350 dst_regno = REGNO (dst);
1351 src_regno = REGNO (src);
1352 if (dst_regno >= FIRST_PSEUDO_REGISTER
1353 && src_regno < FIRST_PSEUDO_REGISTER)
1355 hard_regno = src_regno;
1356 to_p = true;
1357 a = ira_curr_regno_allocno_map[dst_regno];
1359 else if (src_regno >= FIRST_PSEUDO_REGISTER
1360 && dst_regno < FIRST_PSEUDO_REGISTER)
1362 hard_regno = dst_regno;
1363 to_p = false;
1364 a = ira_curr_regno_allocno_map[src_regno];
1366 else
1367 continue;
1368 rclass = ALLOCNO_COVER_CLASS (a);
1369 if (! TEST_HARD_REG_BIT (reg_class_contents[rclass], hard_regno))
1370 continue;
1371 i = ira_class_hard_reg_index[rclass][hard_regno];
1372 if (i < 0)
1373 continue;
1374 mode = ALLOCNO_MODE (a);
1375 hard_reg_class = REGNO_REG_CLASS (hard_regno);
1376 cost = (to_p ? ira_register_move_cost[mode][hard_reg_class][rclass]
1377 : ira_register_move_cost[mode][rclass][hard_reg_class]) * freq;
1378 ira_allocate_and_set_costs (&ALLOCNO_HARD_REG_COSTS (a), rclass,
1379 ALLOCNO_COVER_CLASS_COST (a));
1380 ira_allocate_and_set_costs (&ALLOCNO_CONFLICT_HARD_REG_COSTS (a),
1381 rclass, 0);
1382 ALLOCNO_HARD_REG_COSTS (a)[i] -= cost;
1383 ALLOCNO_CONFLICT_HARD_REG_COSTS (a)[i] -= cost;
1384 ALLOCNO_COVER_CLASS_COST (a) = MIN (ALLOCNO_COVER_CLASS_COST (a),
1385 ALLOCNO_HARD_REG_COSTS (a)[i]);
1389 /* After we find hard register and memory costs for allocnos, define
1390 its cover class and modify hard register cost because insns moving
1391 allocno to/from hard registers. */
1392 static void
1393 setup_allocno_cover_class_and_costs (void)
1395 int i, j, n, regno;
1396 int *reg_costs;
1397 enum reg_class cover_class, rclass;
1398 enum machine_mode mode;
1399 ira_allocno_t a;
1400 ira_allocno_iterator ai;
1402 FOR_EACH_ALLOCNO (a, ai)
1404 i = ALLOCNO_NUM (a);
1405 mode = ALLOCNO_MODE (a);
1406 cover_class = ira_class_translate[allocno_pref[i]];
1407 ira_assert (allocno_pref[i] == NO_REGS || cover_class != NO_REGS);
1408 ALLOCNO_MEMORY_COST (a) = ALLOCNO_UPDATED_MEMORY_COST (a)
1409 = COSTS_OF_ALLOCNO (allocno_costs, i)->mem_cost;
1410 ira_set_allocno_cover_class (a, cover_class);
1411 if (cover_class == NO_REGS)
1412 continue;
1413 ALLOCNO_AVAILABLE_REGS_NUM (a) = ira_available_class_regs[cover_class];
1414 ALLOCNO_COVER_CLASS_COST (a)
1415 = (COSTS_OF_ALLOCNO (allocno_costs, i)
1416 ->cost[cost_class_nums[allocno_pref[i]]]);
1417 if (optimize && ALLOCNO_COVER_CLASS (a) != allocno_pref[i])
1419 n = ira_class_hard_regs_num[cover_class];
1420 ALLOCNO_HARD_REG_COSTS (a)
1421 = reg_costs = ira_allocate_cost_vector (cover_class);
1422 for (j = n - 1; j >= 0; j--)
1424 regno = ira_class_hard_regs[cover_class][j];
1425 rclass = REGNO_REG_CLASS (regno);
1426 reg_costs[j] = (COSTS_OF_ALLOCNO (allocno_costs, i)
1427 ->cost[cost_class_nums[rclass]]);
1431 if (optimize)
1432 ira_traverse_loop_tree (true, ira_loop_tree_root,
1433 process_bb_node_for_hard_reg_moves, NULL);
1438 /* Function called once during compiler work. */
1439 void
1440 ira_init_costs_once (void)
1442 int i;
1444 init_cost = NULL;
1445 for (i = 0; i < MAX_RECOG_OPERANDS; i++)
1447 op_costs[i] = NULL;
1448 this_op_costs[i] = NULL;
1450 temp_costs = NULL;
1451 cost_classes = NULL;
1454 /* Free allocated temporary cost vectors. */
1455 static void
1456 free_ira_costs (void)
1458 int i;
1460 if (init_cost != NULL)
1461 free (init_cost);
1462 init_cost = NULL;
1463 for (i = 0; i < MAX_RECOG_OPERANDS; i++)
1465 if (op_costs[i] != NULL)
1466 free (op_costs[i]);
1467 if (this_op_costs[i] != NULL)
1468 free (this_op_costs[i]);
1469 op_costs[i] = this_op_costs[i] = NULL;
1471 if (temp_costs != NULL)
1472 free (temp_costs);
1473 temp_costs = NULL;
1474 if (cost_classes != NULL)
1475 free (cost_classes);
1476 cost_classes = NULL;
1479 /* This is called each time register related information is
1480 changed. */
1481 void
1482 ira_init_costs (void)
1484 int i;
1486 free_ira_costs ();
1487 max_struct_costs_size
1488 = sizeof (struct costs) + sizeof (int) * (ira_important_classes_num - 1);
1489 /* Don't use ira_allocate because vectors live through several IRA calls. */
1490 init_cost = (struct costs *) xmalloc (max_struct_costs_size);
1491 init_cost->mem_cost = 1000000;
1492 for (i = 0; i < ira_important_classes_num; i++)
1493 init_cost->cost[i] = 1000000;
1494 for (i = 0; i < MAX_RECOG_OPERANDS; i++)
1496 op_costs[i] = (struct costs *) xmalloc (max_struct_costs_size);
1497 this_op_costs[i] = (struct costs *) xmalloc (max_struct_costs_size);
1499 temp_costs = (struct costs *) xmalloc (max_struct_costs_size);
1500 cost_classes = (enum reg_class *) xmalloc (sizeof (enum reg_class)
1501 * ira_important_classes_num);
1504 /* Function called once at the end of compiler work. */
1505 void
1506 ira_finish_costs_once (void)
1508 free_ira_costs ();
1513 /* Entry function which defines cover class, memory and hard register
1514 costs for each allocno. */
1515 void
1516 ira_costs (void)
1518 ira_allocno_t a;
1519 ira_allocno_iterator ai;
1521 allocno_costs = (struct costs *) ira_allocate (max_struct_costs_size
1522 * ira_allocnos_num);
1523 total_costs = (struct costs *) ira_allocate (max_struct_costs_size
1524 * ira_allocnos_num);
1525 allocno_pref_buffer
1526 = (enum reg_class *) ira_allocate (sizeof (enum reg_class)
1527 * ira_allocnos_num);
1528 find_allocno_class_costs ();
1529 setup_allocno_cover_class_and_costs ();
1530 /* Because we could process operands only as subregs, check mode of
1531 the registers themselves too. */
1532 FOR_EACH_ALLOCNO (a, ai)
1533 if (ira_register_move_cost[ALLOCNO_MODE (a)] == NULL
1534 && have_regs_of_mode[ALLOCNO_MODE (a)])
1535 ira_init_register_move_cost (ALLOCNO_MODE (a));
1536 ira_free (allocno_pref_buffer);
1537 ira_free (total_costs);
1538 ira_free (allocno_costs);
1543 /* Change hard register costs for allocnos which lives through
1544 function calls. This is called only when we found all intersected
1545 calls during building allocno live ranges. */
1546 void
1547 ira_tune_allocno_costs_and_cover_classes (void)
1549 int j, n, regno;
1550 int cost, min_cost, *reg_costs;
1551 enum reg_class cover_class, rclass;
1552 enum machine_mode mode;
1553 ira_allocno_t a;
1554 ira_allocno_iterator ai;
1556 FOR_EACH_ALLOCNO (a, ai)
1558 cover_class = ALLOCNO_COVER_CLASS (a);
1559 if (cover_class == NO_REGS)
1560 continue;
1561 mode = ALLOCNO_MODE (a);
1562 n = ira_class_hard_regs_num[cover_class];
1563 min_cost = INT_MAX;
1564 if (ALLOCNO_CALLS_CROSSED_NUM (a) != 0)
1566 ira_allocate_and_set_costs
1567 (&ALLOCNO_HARD_REG_COSTS (a), cover_class,
1568 ALLOCNO_COVER_CLASS_COST (a));
1569 reg_costs = ALLOCNO_HARD_REG_COSTS (a);
1570 for (j = n - 1; j >= 0; j--)
1572 regno = ira_class_hard_regs[cover_class][j];
1573 rclass = REGNO_REG_CLASS (regno);
1574 cost = 0;
1575 /* ??? If only part is call clobbered. */
1576 if (! ira_hard_reg_not_in_set_p (regno, mode, call_used_reg_set))
1577 cost += (ALLOCNO_CALL_FREQ (a)
1578 * (ira_memory_move_cost[mode][rclass][0]
1579 + ira_memory_move_cost[mode][rclass][1]));
1580 #ifdef IRA_HARD_REGNO_ADD_COST_MULTIPLIER
1581 cost += ((ira_memory_move_cost[mode][rclass][0]
1582 + ira_memory_move_cost[mode][rclass][1])
1583 * ALLOCNO_FREQ (a)
1584 * IRA_HARD_REGNO_ADD_COST_MULTIPLIER (regno) / 2);
1585 #endif
1586 reg_costs[j] += cost;
1587 if (min_cost > reg_costs[j])
1588 min_cost = reg_costs[j];
1591 if (min_cost != INT_MAX)
1592 ALLOCNO_COVER_CLASS_COST (a) = min_cost;