Enable dumping of alias graphs.
[official-gcc/Ramakrishna.git] / gcc / ira-costs.c
blobfd756f6de73ebf1f90dcd38f30ed49a71d1fe87c
1 /* IRA hard register and memory cost calculation for allocnos or pseudos.
2 Copyright (C) 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Vladimir Makarov <vmakarov@redhat.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "hard-reg-set.h"
27 #include "rtl.h"
28 #include "expr.h"
29 #include "tm_p.h"
30 #include "flags.h"
31 #include "basic-block.h"
32 #include "regs.h"
33 #include "addresses.h"
34 #include "insn-config.h"
35 #include "recog.h"
36 #include "toplev.h"
37 #include "target.h"
38 #include "params.h"
39 #include "ira-int.h"
41 /* The flags is set up every time when we calculate pseudo register
42 classes through function ira_set_pseudo_classes. */
43 static bool pseudo_classes_defined_p = false;
45 /* TRUE if we work with allocnos. Otherwise we work with pseudos. */
46 static bool allocno_p;
48 /* Number of elements in arrays `in_inc_dec' and `costs'. */
49 static int cost_elements_num;
51 #ifdef FORBIDDEN_INC_DEC_CLASSES
52 /* Indexed by n, is TRUE if allocno or pseudo with number N is used in
53 an auto-inc or auto-dec context. */
54 static bool *in_inc_dec;
55 #endif
57 /* The `costs' struct records the cost of using hard registers of each
58 class considered for the calculation and of using memory for each
59 allocno or pseudo. */
60 struct costs
62 int mem_cost;
63 /* Costs for register classes start here. We process only some
64 register classes (cover classes on the 1st cost calculation
65 iteration and important classes on the 2nd iteration). */
66 int cost[1];
69 /* Initialized once. It is a maximal possible size of the allocated
70 struct costs. */
71 static int max_struct_costs_size;
73 /* Allocated and initialized once, and used to initialize cost values
74 for each insn. */
75 static struct costs *init_cost;
77 /* Allocated once, and used for temporary purposes. */
78 static struct costs *temp_costs;
80 /* Allocated once, and used for the cost calculation. */
81 static struct costs *op_costs[MAX_RECOG_OPERANDS];
82 static struct costs *this_op_costs[MAX_RECOG_OPERANDS];
84 /* Costs of each class for each allocno or pseudo. */
85 static struct costs *costs;
87 /* Accumulated costs of each class for each allocno. */
88 static struct costs *total_allocno_costs;
90 /* Classes used for cost calculation. They may be different on
91 different iterations of the cost calculations or in different
92 optimization modes. */
93 static enum reg_class *cost_classes;
95 /* The size of the previous array. */
96 static int cost_classes_num;
98 /* Map: cost class -> order number (they start with 0) of the cost
99 class. The order number is negative for non-cost classes. */
100 static int cost_class_nums[N_REG_CLASSES];
102 /* It is the current size of struct costs. */
103 static int struct_costs_size;
105 /* Return pointer to structure containing costs of allocno or pseudo
106 with given NUM in array ARR. */
107 #define COSTS(arr, num) \
108 ((struct costs *) ((char *) (arr) + (num) * struct_costs_size))
110 /* Return index in COSTS when processing reg with REGNO. */
111 #define COST_INDEX(regno) (allocno_p \
112 ? ALLOCNO_NUM (ira_curr_regno_allocno_map[regno]) \
113 : (int) regno)
115 /* Record register class preferences of each allocno or pseudo. Null
116 value means no preferences. It happens on the 1st iteration of the
117 cost calculation. */
118 static enum reg_class *pref;
120 /* Allocated buffers for pref. */
121 static enum reg_class *pref_buffer;
123 /* Record cover register class of each allocno with the same regno. */
124 static enum reg_class *regno_cover_class;
126 /* Execution frequency of the current insn. */
127 static int frequency;
131 /* Compute the cost of loading X into (if TO_P is TRUE) or from (if
132 TO_P is FALSE) a register of class RCLASS in mode MODE. X must not
133 be a pseudo register. */
134 static int
135 copy_cost (rtx x, enum machine_mode mode, enum reg_class rclass, bool to_p,
136 secondary_reload_info *prev_sri)
138 secondary_reload_info sri;
139 enum reg_class secondary_class = NO_REGS;
141 /* If X is a SCRATCH, there is actually nothing to move since we are
142 assuming optimal allocation. */
143 if (GET_CODE (x) == SCRATCH)
144 return 0;
146 /* Get the class we will actually use for a reload. */
147 rclass = PREFERRED_RELOAD_CLASS (x, rclass);
149 /* If we need a secondary reload for an intermediate, the cost is
150 that to load the input into the intermediate register, then to
151 copy it. */
152 sri.prev_sri = prev_sri;
153 sri.extra_cost = 0;
154 secondary_class = targetm.secondary_reload (to_p, x, rclass, mode, &sri);
156 if (secondary_class != NO_REGS)
158 if (!move_cost[mode])
159 init_move_cost (mode);
160 return (move_cost[mode][secondary_class][rclass] + sri.extra_cost
161 + copy_cost (x, mode, secondary_class, to_p, &sri));
164 /* For memory, use the memory move cost, for (hard) registers, use
165 the cost to move between the register classes, and use 2 for
166 everything else (constants). */
167 if (MEM_P (x) || rclass == NO_REGS)
168 return sri.extra_cost + ira_memory_move_cost[mode][rclass][to_p != 0];
169 else if (REG_P (x))
171 if (!move_cost[mode])
172 init_move_cost (mode);
173 return (sri.extra_cost + move_cost[mode][REGNO_REG_CLASS (REGNO (x))][rclass]);
175 else
176 /* If this is a constant, we may eventually want to call rtx_cost
177 here. */
178 return sri.extra_cost + COSTS_N_INSNS (1);
183 /* Record the cost of using memory or hard registers of various
184 classes for the operands in INSN.
186 N_ALTS is the number of alternatives.
187 N_OPS is the number of operands.
188 OPS is an array of the operands.
189 MODES are the modes of the operands, in case any are VOIDmode.
190 CONSTRAINTS are the constraints to use for the operands. This array
191 is modified by this procedure.
193 This procedure works alternative by alternative. For each
194 alternative we assume that we will be able to allocate all allocnos
195 to their ideal register class and calculate the cost of using that
196 alternative. Then we compute, for each operand that is a
197 pseudo-register, the cost of having the allocno allocated to each
198 register class and using it in that alternative. To this cost is
199 added the cost of the alternative.
201 The cost of each class for this insn is its lowest cost among all
202 the alternatives. */
203 static void
204 record_reg_classes (int n_alts, int n_ops, rtx *ops,
205 enum machine_mode *modes, const char **constraints,
206 rtx insn, struct costs **op_costs,
207 enum reg_class *pref)
209 int alt;
210 int i, j, k;
211 rtx set;
212 int insn_allows_mem[MAX_RECOG_OPERANDS];
214 for (i = 0; i < n_ops; i++)
215 insn_allows_mem[i] = 0;
217 /* Process each alternative, each time minimizing an operand's cost
218 with the cost for each operand in that alternative. */
219 for (alt = 0; alt < n_alts; alt++)
221 enum reg_class classes[MAX_RECOG_OPERANDS];
222 int allows_mem[MAX_RECOG_OPERANDS];
223 enum reg_class rclass;
224 int alt_fail = 0;
225 int alt_cost = 0, op_cost_add;
227 for (i = 0; i < n_ops; i++)
229 unsigned char c;
230 const char *p = constraints[i];
231 rtx op = ops[i];
232 enum machine_mode mode = modes[i];
233 int allows_addr = 0;
234 int win = 0;
236 /* Initially show we know nothing about the register class. */
237 classes[i] = NO_REGS;
238 allows_mem[i] = 0;
240 /* If this operand has no constraints at all, we can
241 conclude nothing about it since anything is valid. */
242 if (*p == 0)
244 if (REG_P (op) && REGNO (op) >= FIRST_PSEUDO_REGISTER)
245 memset (this_op_costs[i], 0, struct_costs_size);
246 continue;
249 /* If this alternative is only relevant when this operand
250 matches a previous operand, we do different things
251 depending on whether this operand is a allocno-reg or not.
252 We must process any modifiers for the operand before we
253 can make this test. */
254 while (*p == '%' || *p == '=' || *p == '+' || *p == '&')
255 p++;
257 if (p[0] >= '0' && p[0] <= '0' + i && (p[1] == ',' || p[1] == 0))
259 /* Copy class and whether memory is allowed from the
260 matching alternative. Then perform any needed cost
261 computations and/or adjustments. */
262 j = p[0] - '0';
263 classes[i] = classes[j];
264 allows_mem[i] = allows_mem[j];
265 if (allows_mem[i])
266 insn_allows_mem[i] = 1;
268 if (! REG_P (op) || REGNO (op) < FIRST_PSEUDO_REGISTER)
270 /* If this matches the other operand, we have no
271 added cost and we win. */
272 if (rtx_equal_p (ops[j], op))
273 win = 1;
274 /* If we can put the other operand into a register,
275 add to the cost of this alternative the cost to
276 copy this operand to the register used for the
277 other operand. */
278 else if (classes[j] != NO_REGS)
280 alt_cost += copy_cost (op, mode, classes[j], 1, NULL);
281 win = 1;
284 else if (! REG_P (ops[j])
285 || REGNO (ops[j]) < FIRST_PSEUDO_REGISTER)
287 /* This op is an allocno but the one it matches is
288 not. */
290 /* If we can't put the other operand into a
291 register, this alternative can't be used. */
293 if (classes[j] == NO_REGS)
294 alt_fail = 1;
295 /* Otherwise, add to the cost of this alternative
296 the cost to copy the other operand to the hard
297 register used for this operand. */
298 else
299 alt_cost += copy_cost (ops[j], mode, classes[j], 1, NULL);
301 else
303 /* The costs of this operand are not the same as the
304 other operand since move costs are not symmetric.
305 Moreover, if we cannot tie them, this alternative
306 needs to do a copy, which is one insn. */
307 struct costs *pp = this_op_costs[i];
309 for (k = 0; k < cost_classes_num; k++)
311 rclass = cost_classes[k];
312 pp->cost[k]
313 = (((recog_data.operand_type[i] != OP_OUT
314 ? ira_get_may_move_cost (mode, rclass,
315 classes[i], true) : 0)
316 + (recog_data.operand_type[i] != OP_IN
317 ? ira_get_may_move_cost (mode, classes[i],
318 rclass, false) : 0))
319 * frequency);
322 /* If the alternative actually allows memory, make
323 things a bit cheaper since we won't need an extra
324 insn to load it. */
325 pp->mem_cost
326 = ((recog_data.operand_type[i] != OP_IN
327 ? ira_memory_move_cost[mode][classes[i]][0] : 0)
328 + (recog_data.operand_type[i] != OP_OUT
329 ? ira_memory_move_cost[mode][classes[i]][1] : 0)
330 - allows_mem[i]) * frequency;
332 /* If we have assigned a class to this allocno in our
333 first pass, add a cost to this alternative
334 corresponding to what we would add if this allocno
335 were not in the appropriate class. We could use
336 cover class here but it is less accurate
337 approximation. */
338 if (pref)
340 enum reg_class pref_class = pref[COST_INDEX (REGNO (op))];
342 if (pref_class == NO_REGS)
343 alt_cost
344 += ((recog_data.operand_type[i] != OP_IN
345 ? ira_memory_move_cost[mode][classes[i]][0]
346 : 0)
347 + (recog_data.operand_type[i] != OP_OUT
348 ? ira_memory_move_cost[mode][classes[i]][1]
349 : 0));
350 else if (ira_reg_class_intersect
351 [pref_class][classes[i]] == NO_REGS)
352 alt_cost += ira_get_register_move_cost (mode,
353 pref_class,
354 classes[i]);
356 if (REGNO (ops[i]) != REGNO (ops[j])
357 && ! find_reg_note (insn, REG_DEAD, op))
358 alt_cost += 2;
360 /* This is in place of ordinary cost computation for
361 this operand, so skip to the end of the
362 alternative (should be just one character). */
363 while (*p && *p++ != ',')
366 constraints[i] = p;
367 continue;
371 /* Scan all the constraint letters. See if the operand
372 matches any of the constraints. Collect the valid
373 register classes and see if this operand accepts
374 memory. */
375 while ((c = *p))
377 switch (c)
379 case ',':
380 break;
381 case '*':
382 /* Ignore the next letter for this pass. */
383 c = *++p;
384 break;
386 case '?':
387 alt_cost += 2;
388 case '!': case '#': case '&':
389 case '0': case '1': case '2': case '3': case '4':
390 case '5': case '6': case '7': case '8': case '9':
391 break;
393 case 'p':
394 allows_addr = 1;
395 win = address_operand (op, GET_MODE (op));
396 /* We know this operand is an address, so we want it
397 to be allocated to a register that can be the
398 base of an address, i.e. BASE_REG_CLASS. */
399 classes[i]
400 = ira_reg_class_union[classes[i]]
401 [base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
402 break;
404 case 'm': case 'o': case 'V':
405 /* It doesn't seem worth distinguishing between
406 offsettable and non-offsettable addresses
407 here. */
408 insn_allows_mem[i] = allows_mem[i] = 1;
409 if (MEM_P (op))
410 win = 1;
411 break;
413 case '<':
414 if (MEM_P (op)
415 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
416 || GET_CODE (XEXP (op, 0)) == POST_DEC))
417 win = 1;
418 break;
420 case '>':
421 if (MEM_P (op)
422 && (GET_CODE (XEXP (op, 0)) == PRE_INC
423 || GET_CODE (XEXP (op, 0)) == POST_INC))
424 win = 1;
425 break;
427 case 'E':
428 case 'F':
429 if (GET_CODE (op) == CONST_DOUBLE
430 || (GET_CODE (op) == CONST_VECTOR
431 && (GET_MODE_CLASS (GET_MODE (op))
432 == MODE_VECTOR_FLOAT)))
433 win = 1;
434 break;
436 case 'G':
437 case 'H':
438 if (GET_CODE (op) == CONST_DOUBLE
439 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
440 win = 1;
441 break;
443 case 's':
444 if (CONST_INT_P (op)
445 || (GET_CODE (op) == CONST_DOUBLE
446 && GET_MODE (op) == VOIDmode))
447 break;
449 case 'i':
450 if (CONSTANT_P (op)
451 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)))
452 win = 1;
453 break;
455 case 'n':
456 if (CONST_INT_P (op)
457 || (GET_CODE (op) == CONST_DOUBLE
458 && GET_MODE (op) == VOIDmode))
459 win = 1;
460 break;
462 case 'I':
463 case 'J':
464 case 'K':
465 case 'L':
466 case 'M':
467 case 'N':
468 case 'O':
469 case 'P':
470 if (CONST_INT_P (op)
471 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
472 win = 1;
473 break;
475 case 'X':
476 win = 1;
477 break;
479 case 'g':
480 if (MEM_P (op)
481 || (CONSTANT_P (op)
482 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))))
483 win = 1;
484 insn_allows_mem[i] = allows_mem[i] = 1;
485 case 'r':
486 classes[i] = ira_reg_class_union[classes[i]][GENERAL_REGS];
487 break;
489 default:
490 if (REG_CLASS_FROM_CONSTRAINT (c, p) != NO_REGS)
491 classes[i] = ira_reg_class_union[classes[i]]
492 [REG_CLASS_FROM_CONSTRAINT (c, p)];
493 #ifdef EXTRA_CONSTRAINT_STR
494 else if (EXTRA_CONSTRAINT_STR (op, c, p))
495 win = 1;
497 if (EXTRA_MEMORY_CONSTRAINT (c, p))
499 /* Every MEM can be reloaded to fit. */
500 insn_allows_mem[i] = allows_mem[i] = 1;
501 if (MEM_P (op))
502 win = 1;
504 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
506 /* Every address can be reloaded to fit. */
507 allows_addr = 1;
508 if (address_operand (op, GET_MODE (op)))
509 win = 1;
510 /* We know this operand is an address, so we
511 want it to be allocated to a hard register
512 that can be the base of an address,
513 i.e. BASE_REG_CLASS. */
514 classes[i]
515 = ira_reg_class_union[classes[i]]
516 [base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
518 #endif
519 break;
521 p += CONSTRAINT_LEN (c, p);
522 if (c == ',')
523 break;
526 constraints[i] = p;
528 /* How we account for this operand now depends on whether it
529 is a pseudo register or not. If it is, we first check if
530 any register classes are valid. If not, we ignore this
531 alternative, since we want to assume that all allocnos get
532 allocated for register preferencing. If some register
533 class is valid, compute the costs of moving the allocno
534 into that class. */
535 if (REG_P (op) && REGNO (op) >= FIRST_PSEUDO_REGISTER)
537 if (classes[i] == NO_REGS)
539 /* We must always fail if the operand is a REG, but
540 we did not find a suitable class.
542 Otherwise we may perform an uninitialized read
543 from this_op_costs after the `continue' statement
544 below. */
545 alt_fail = 1;
547 else
549 struct costs *pp = this_op_costs[i];
551 for (k = 0; k < cost_classes_num; k++)
553 rclass = cost_classes[k];
554 pp->cost[k]
555 = (((recog_data.operand_type[i] != OP_OUT
556 ? ira_get_may_move_cost (mode, rclass,
557 classes[i], true) : 0)
558 + (recog_data.operand_type[i] != OP_IN
559 ? ira_get_may_move_cost (mode, classes[i],
560 rclass, false) : 0))
561 * frequency);
564 /* If the alternative actually allows memory, make
565 things a bit cheaper since we won't need an extra
566 insn to load it. */
567 pp->mem_cost
568 = ((recog_data.operand_type[i] != OP_IN
569 ? ira_memory_move_cost[mode][classes[i]][0] : 0)
570 + (recog_data.operand_type[i] != OP_OUT
571 ? ira_memory_move_cost[mode][classes[i]][1] : 0)
572 - allows_mem[i]) * frequency;
573 /* If we have assigned a class to this allocno in our
574 first pass, add a cost to this alternative
575 corresponding to what we would add if this allocno
576 were not in the appropriate class. We could use
577 cover class here but it is less accurate
578 approximation. */
579 if (pref)
581 enum reg_class pref_class = pref[COST_INDEX (REGNO (op))];
583 if (pref_class == NO_REGS)
584 alt_cost
585 += ((recog_data.operand_type[i] != OP_IN
586 ? ira_memory_move_cost[mode][classes[i]][0]
587 : 0)
588 + (recog_data.operand_type[i] != OP_OUT
589 ? ira_memory_move_cost[mode][classes[i]][1]
590 : 0));
591 else if (ira_reg_class_intersect[pref_class][classes[i]]
592 == NO_REGS)
593 alt_cost += ira_get_register_move_cost (mode,
594 pref_class,
595 classes[i]);
600 /* Otherwise, if this alternative wins, either because we
601 have already determined that or if we have a hard
602 register of the proper class, there is no cost for this
603 alternative. */
604 else if (win || (REG_P (op)
605 && reg_fits_class_p (op, classes[i],
606 0, GET_MODE (op))))
609 /* If registers are valid, the cost of this alternative
610 includes copying the object to and/or from a
611 register. */
612 else if (classes[i] != NO_REGS)
614 if (recog_data.operand_type[i] != OP_OUT)
615 alt_cost += copy_cost (op, mode, classes[i], 1, NULL);
617 if (recog_data.operand_type[i] != OP_IN)
618 alt_cost += copy_cost (op, mode, classes[i], 0, NULL);
620 /* The only other way this alternative can be used is if
621 this is a constant that could be placed into memory. */
622 else if (CONSTANT_P (op) && (allows_addr || allows_mem[i]))
623 alt_cost += ira_memory_move_cost[mode][classes[i]][1];
624 else
625 alt_fail = 1;
628 if (alt_fail)
629 continue;
631 op_cost_add = alt_cost * frequency;
632 /* Finally, update the costs with the information we've
633 calculated about this alternative. */
634 for (i = 0; i < n_ops; i++)
635 if (REG_P (ops[i]) && REGNO (ops[i]) >= FIRST_PSEUDO_REGISTER)
637 struct costs *pp = op_costs[i], *qq = this_op_costs[i];
638 int scale = 1 + (recog_data.operand_type[i] == OP_INOUT);
640 pp->mem_cost = MIN (pp->mem_cost,
641 (qq->mem_cost + op_cost_add) * scale);
643 for (k = 0; k < cost_classes_num; k++)
644 pp->cost[k]
645 = MIN (pp->cost[k], (qq->cost[k] + op_cost_add) * scale);
649 if (allocno_p)
650 for (i = 0; i < n_ops; i++)
652 ira_allocno_t a;
653 rtx op = ops[i];
655 if (! REG_P (op) || REGNO (op) < FIRST_PSEUDO_REGISTER)
656 continue;
657 a = ira_curr_regno_allocno_map [REGNO (op)];
658 if (! ALLOCNO_BAD_SPILL_P (a) && insn_allows_mem[i] == 0)
659 ALLOCNO_BAD_SPILL_P (a) = true;
662 /* If this insn is a single set copying operand 1 to operand 0 and
663 one operand is an allocno with the other a hard reg or an allocno
664 that prefers a hard register that is in its own register class
665 then we may want to adjust the cost of that register class to -1.
667 Avoid the adjustment if the source does not die to avoid
668 stressing of register allocator by preferrencing two colliding
669 registers into single class.
671 Also avoid the adjustment if a copy between hard registers of the
672 class is expensive (ten times the cost of a default copy is
673 considered arbitrarily expensive). This avoids losing when the
674 preferred class is very expensive as the source of a copy
675 instruction. */
676 if ((set = single_set (insn)) != 0
677 && ops[0] == SET_DEST (set) && ops[1] == SET_SRC (set)
678 && REG_P (ops[0]) && REG_P (ops[1])
679 && find_regno_note (insn, REG_DEAD, REGNO (ops[1])))
680 for (i = 0; i <= 1; i++)
681 if (REGNO (ops[i]) >= FIRST_PSEUDO_REGISTER)
683 unsigned int regno = REGNO (ops[!i]);
684 enum machine_mode mode = GET_MODE (ops[!i]);
685 enum reg_class rclass;
686 unsigned int nr;
688 if (regno < FIRST_PSEUDO_REGISTER)
689 for (k = 0; k < cost_classes_num; k++)
691 rclass = cost_classes[k];
692 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno)
693 && (reg_class_size[rclass]
694 == (unsigned) CLASS_MAX_NREGS (rclass, mode)))
696 if (reg_class_size[rclass] == 1)
697 op_costs[i]->cost[k] = -frequency;
698 else
700 for (nr = 0;
701 nr < (unsigned) hard_regno_nregs[regno][mode];
702 nr++)
703 if (! TEST_HARD_REG_BIT (reg_class_contents[rclass],
704 regno + nr))
705 break;
707 if (nr == (unsigned) hard_regno_nregs[regno][mode])
708 op_costs[i]->cost[k] = -frequency;
717 /* Wrapper around REGNO_OK_FOR_INDEX_P, to allow pseudo registers. */
718 static inline bool
719 ok_for_index_p_nonstrict (rtx reg)
721 unsigned regno = REGNO (reg);
723 return regno >= FIRST_PSEUDO_REGISTER || REGNO_OK_FOR_INDEX_P (regno);
726 /* A version of regno_ok_for_base_p for use here, when all
727 pseudo-registers should count as OK. Arguments as for
728 regno_ok_for_base_p. */
729 static inline bool
730 ok_for_base_p_nonstrict (rtx reg, enum machine_mode mode,
731 enum rtx_code outer_code, enum rtx_code index_code)
733 unsigned regno = REGNO (reg);
735 if (regno >= FIRST_PSEUDO_REGISTER)
736 return true;
737 return ok_for_base_p_1 (regno, mode, outer_code, index_code);
740 /* Record the pseudo registers we must reload into hard registers in a
741 subexpression of a memory address, X.
743 If CONTEXT is 0, we are looking at the base part of an address,
744 otherwise we are looking at the index part.
746 MODE is the mode of the memory reference; OUTER_CODE and INDEX_CODE
747 give the context that the rtx appears in. These three arguments
748 are passed down to base_reg_class.
750 SCALE is twice the amount to multiply the cost by (it is twice so
751 we can represent half-cost adjustments). */
752 static void
753 record_address_regs (enum machine_mode mode, rtx x, int context,
754 enum rtx_code outer_code, enum rtx_code index_code,
755 int scale)
757 enum rtx_code code = GET_CODE (x);
758 enum reg_class rclass;
760 if (context == 1)
761 rclass = INDEX_REG_CLASS;
762 else
763 rclass = base_reg_class (mode, outer_code, index_code);
765 switch (code)
767 case CONST_INT:
768 case CONST:
769 case CC0:
770 case PC:
771 case SYMBOL_REF:
772 case LABEL_REF:
773 return;
775 case PLUS:
776 /* When we have an address that is a sum, we must determine
777 whether registers are "base" or "index" regs. If there is a
778 sum of two registers, we must choose one to be the "base".
779 Luckily, we can use the REG_POINTER to make a good choice
780 most of the time. We only need to do this on machines that
781 can have two registers in an address and where the base and
782 index register classes are different.
784 ??? This code used to set REGNO_POINTER_FLAG in some cases,
785 but that seems bogus since it should only be set when we are
786 sure the register is being used as a pointer. */
788 rtx arg0 = XEXP (x, 0);
789 rtx arg1 = XEXP (x, 1);
790 enum rtx_code code0 = GET_CODE (arg0);
791 enum rtx_code code1 = GET_CODE (arg1);
793 /* Look inside subregs. */
794 if (code0 == SUBREG)
795 arg0 = SUBREG_REG (arg0), code0 = GET_CODE (arg0);
796 if (code1 == SUBREG)
797 arg1 = SUBREG_REG (arg1), code1 = GET_CODE (arg1);
799 /* If this machine only allows one register per address, it
800 must be in the first operand. */
801 if (MAX_REGS_PER_ADDRESS == 1)
802 record_address_regs (mode, arg0, 0, PLUS, code1, scale);
804 /* If index and base registers are the same on this machine,
805 just record registers in any non-constant operands. We
806 assume here, as well as in the tests below, that all
807 addresses are in canonical form. */
808 else if (INDEX_REG_CLASS == base_reg_class (VOIDmode, PLUS, SCRATCH))
810 record_address_regs (mode, arg0, context, PLUS, code1, scale);
811 if (! CONSTANT_P (arg1))
812 record_address_regs (mode, arg1, context, PLUS, code0, scale);
815 /* If the second operand is a constant integer, it doesn't
816 change what class the first operand must be. */
817 else if (code1 == CONST_INT || code1 == CONST_DOUBLE)
818 record_address_regs (mode, arg0, context, PLUS, code1, scale);
819 /* If the second operand is a symbolic constant, the first
820 operand must be an index register. */
821 else if (code1 == SYMBOL_REF || code1 == CONST || code1 == LABEL_REF)
822 record_address_regs (mode, arg0, 1, PLUS, code1, scale);
823 /* If both operands are registers but one is already a hard
824 register of index or reg-base class, give the other the
825 class that the hard register is not. */
826 else if (code0 == REG && code1 == REG
827 && REGNO (arg0) < FIRST_PSEUDO_REGISTER
828 && (ok_for_base_p_nonstrict (arg0, mode, PLUS, REG)
829 || ok_for_index_p_nonstrict (arg0)))
830 record_address_regs (mode, arg1,
831 ok_for_base_p_nonstrict (arg0, mode, PLUS, REG)
832 ? 1 : 0,
833 PLUS, REG, scale);
834 else if (code0 == REG && code1 == REG
835 && REGNO (arg1) < FIRST_PSEUDO_REGISTER
836 && (ok_for_base_p_nonstrict (arg1, mode, PLUS, REG)
837 || ok_for_index_p_nonstrict (arg1)))
838 record_address_regs (mode, arg0,
839 ok_for_base_p_nonstrict (arg1, mode, PLUS, REG)
840 ? 1 : 0,
841 PLUS, REG, scale);
842 /* If one operand is known to be a pointer, it must be the
843 base with the other operand the index. Likewise if the
844 other operand is a MULT. */
845 else if ((code0 == REG && REG_POINTER (arg0)) || code1 == MULT)
847 record_address_regs (mode, arg0, 0, PLUS, code1, scale);
848 record_address_regs (mode, arg1, 1, PLUS, code0, scale);
850 else if ((code1 == REG && REG_POINTER (arg1)) || code0 == MULT)
852 record_address_regs (mode, arg0, 1, PLUS, code1, scale);
853 record_address_regs (mode, arg1, 0, PLUS, code0, scale);
855 /* Otherwise, count equal chances that each might be a base or
856 index register. This case should be rare. */
857 else
859 record_address_regs (mode, arg0, 0, PLUS, code1, scale / 2);
860 record_address_regs (mode, arg0, 1, PLUS, code1, scale / 2);
861 record_address_regs (mode, arg1, 0, PLUS, code0, scale / 2);
862 record_address_regs (mode, arg1, 1, PLUS, code0, scale / 2);
865 break;
867 /* Double the importance of an allocno that is incremented or
868 decremented, since it would take two extra insns if it ends
869 up in the wrong place. */
870 case POST_MODIFY:
871 case PRE_MODIFY:
872 record_address_regs (mode, XEXP (x, 0), 0, code,
873 GET_CODE (XEXP (XEXP (x, 1), 1)), 2 * scale);
874 if (REG_P (XEXP (XEXP (x, 1), 1)))
875 record_address_regs (mode, XEXP (XEXP (x, 1), 1), 1, code, REG,
876 2 * scale);
877 break;
879 case POST_INC:
880 case PRE_INC:
881 case POST_DEC:
882 case PRE_DEC:
883 /* Double the importance of an allocno that is incremented or
884 decremented, since it would take two extra insns if it ends
885 up in the wrong place. If the operand is a pseudo-register,
886 show it is being used in an INC_DEC context. */
887 #ifdef FORBIDDEN_INC_DEC_CLASSES
888 if (REG_P (XEXP (x, 0))
889 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER)
890 in_inc_dec[COST_INDEX (REGNO (XEXP (x, 0)))] = true;
891 #endif
892 record_address_regs (mode, XEXP (x, 0), 0, code, SCRATCH, 2 * scale);
893 break;
895 case REG:
897 struct costs *pp;
898 enum reg_class i;
899 int k;
901 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
902 break;
904 if (allocno_p)
905 ALLOCNO_BAD_SPILL_P (ira_curr_regno_allocno_map[REGNO (x)]) = true;
906 pp = COSTS (costs, COST_INDEX (REGNO (x)));
907 pp->mem_cost += (ira_memory_move_cost[Pmode][rclass][1] * scale) / 2;
908 for (k = 0; k < cost_classes_num; k++)
910 i = cost_classes[k];
911 pp->cost[k]
912 += (ira_get_may_move_cost (Pmode, i, rclass, true) * scale) / 2;
915 break;
917 default:
919 const char *fmt = GET_RTX_FORMAT (code);
920 int i;
921 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
922 if (fmt[i] == 'e')
923 record_address_regs (mode, XEXP (x, i), context, code, SCRATCH,
924 scale);
931 /* Calculate the costs of insn operands. */
932 static void
933 record_operand_costs (rtx insn, struct costs **op_costs, enum reg_class *pref)
935 const char *constraints[MAX_RECOG_OPERANDS];
936 enum machine_mode modes[MAX_RECOG_OPERANDS];
937 int i;
939 for (i = 0; i < recog_data.n_operands; i++)
941 constraints[i] = recog_data.constraints[i];
942 modes[i] = recog_data.operand_mode[i];
945 /* If we get here, we are set up to record the costs of all the
946 operands for this insn. Start by initializing the costs. Then
947 handle any address registers. Finally record the desired classes
948 for any allocnos, doing it twice if some pair of operands are
949 commutative. */
950 for (i = 0; i < recog_data.n_operands; i++)
952 memcpy (op_costs[i], init_cost, struct_costs_size);
954 if (GET_CODE (recog_data.operand[i]) == SUBREG)
955 recog_data.operand[i] = SUBREG_REG (recog_data.operand[i]);
957 if (MEM_P (recog_data.operand[i]))
958 record_address_regs (GET_MODE (recog_data.operand[i]),
959 XEXP (recog_data.operand[i], 0),
960 0, MEM, SCRATCH, frequency * 2);
961 else if (constraints[i][0] == 'p'
962 || EXTRA_ADDRESS_CONSTRAINT (constraints[i][0],
963 constraints[i]))
964 record_address_regs (VOIDmode, recog_data.operand[i], 0, ADDRESS,
965 SCRATCH, frequency * 2);
968 /* Check for commutative in a separate loop so everything will have
969 been initialized. We must do this even if one operand is a
970 constant--see addsi3 in m68k.md. */
971 for (i = 0; i < (int) recog_data.n_operands - 1; i++)
972 if (constraints[i][0] == '%')
974 const char *xconstraints[MAX_RECOG_OPERANDS];
975 int j;
977 /* Handle commutative operands by swapping the constraints.
978 We assume the modes are the same. */
979 for (j = 0; j < recog_data.n_operands; j++)
980 xconstraints[j] = constraints[j];
982 xconstraints[i] = constraints[i+1];
983 xconstraints[i+1] = constraints[i];
984 record_reg_classes (recog_data.n_alternatives, recog_data.n_operands,
985 recog_data.operand, modes,
986 xconstraints, insn, op_costs, pref);
988 record_reg_classes (recog_data.n_alternatives, recog_data.n_operands,
989 recog_data.operand, modes,
990 constraints, insn, op_costs, pref);
995 /* Process one insn INSN. Scan it and record each time it would save
996 code to put a certain allocnos in a certain class. Return the last
997 insn processed, so that the scan can be continued from there. */
998 static rtx
999 scan_one_insn (rtx insn)
1001 enum rtx_code pat_code;
1002 rtx set, note;
1003 int i, k;
1005 if (!NONDEBUG_INSN_P (insn))
1006 return insn;
1008 pat_code = GET_CODE (PATTERN (insn));
1009 if (pat_code == USE || pat_code == CLOBBER || pat_code == ASM_INPUT
1010 || pat_code == ADDR_VEC || pat_code == ADDR_DIFF_VEC)
1011 return insn;
1013 set = single_set (insn);
1014 extract_insn (insn);
1016 /* If this insn loads a parameter from its stack slot, then it
1017 represents a savings, rather than a cost, if the parameter is
1018 stored in memory. Record this fact. */
1019 if (set != 0 && REG_P (SET_DEST (set)) && MEM_P (SET_SRC (set))
1020 && (note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) != NULL_RTX
1021 && MEM_P (XEXP (note, 0)))
1023 enum reg_class cl = GENERAL_REGS;
1024 rtx reg = SET_DEST (set);
1025 int num = COST_INDEX (REGNO (reg));
1027 if (pref)
1028 cl = pref[num];
1029 COSTS (costs, num)->mem_cost
1030 -= ira_memory_move_cost[GET_MODE (reg)][cl][1] * frequency;
1031 record_address_regs (GET_MODE (SET_SRC (set)), XEXP (SET_SRC (set), 0),
1032 0, MEM, SCRATCH, frequency * 2);
1035 record_operand_costs (insn, op_costs, pref);
1037 /* Now add the cost for each operand to the total costs for its
1038 allocno. */
1039 for (i = 0; i < recog_data.n_operands; i++)
1040 if (REG_P (recog_data.operand[i])
1041 && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER)
1043 int regno = REGNO (recog_data.operand[i]);
1044 struct costs *p = COSTS (costs, COST_INDEX (regno));
1045 struct costs *q = op_costs[i];
1047 p->mem_cost += q->mem_cost;
1048 for (k = 0; k < cost_classes_num; k++)
1049 p->cost[k] += q->cost[k];
1052 return insn;
1057 /* Print allocnos costs to file F. */
1058 static void
1059 print_allocno_costs (FILE *f)
1061 int k;
1062 ira_allocno_t a;
1063 ira_allocno_iterator ai;
1065 ira_assert (allocno_p);
1066 fprintf (f, "\n");
1067 FOR_EACH_ALLOCNO (a, ai)
1069 int i, rclass;
1070 basic_block bb;
1071 int regno = ALLOCNO_REGNO (a);
1073 i = ALLOCNO_NUM (a);
1074 fprintf (f, " a%d(r%d,", i, regno);
1075 if ((bb = ALLOCNO_LOOP_TREE_NODE (a)->bb) != NULL)
1076 fprintf (f, "b%d", bb->index);
1077 else
1078 fprintf (f, "l%d", ALLOCNO_LOOP_TREE_NODE (a)->loop->num);
1079 fprintf (f, ") costs:");
1080 for (k = 0; k < cost_classes_num; k++)
1082 rclass = cost_classes[k];
1083 if (contains_reg_of_mode[rclass][PSEUDO_REGNO_MODE (regno)]
1084 #ifdef FORBIDDEN_INC_DEC_CLASSES
1085 && (! in_inc_dec[i] || ! forbidden_inc_dec_class[rclass])
1086 #endif
1087 #ifdef CANNOT_CHANGE_MODE_CLASS
1088 && ! invalid_mode_change_p (regno, (enum reg_class) rclass,
1089 PSEUDO_REGNO_MODE (regno))
1090 #endif
1093 fprintf (f, " %s:%d", reg_class_names[rclass],
1094 COSTS (costs, i)->cost[k]);
1095 if (flag_ira_region == IRA_REGION_ALL
1096 || flag_ira_region == IRA_REGION_MIXED)
1097 fprintf (f, ",%d", COSTS (total_allocno_costs, i)->cost[k]);
1100 fprintf (f, " MEM:%i\n", COSTS (costs, i)->mem_cost);
1104 /* Print pseudo costs to file F. */
1105 static void
1106 print_pseudo_costs (FILE *f)
1108 int regno, k;
1109 int rclass;
1111 ira_assert (! allocno_p);
1112 fprintf (f, "\n");
1113 for (regno = max_reg_num () - 1; regno >= FIRST_PSEUDO_REGISTER; regno--)
1115 if (regno_reg_rtx[regno] == NULL_RTX)
1116 continue;
1117 fprintf (f, " r%d costs:", regno);
1118 for (k = 0; k < cost_classes_num; k++)
1120 rclass = cost_classes[k];
1121 if (contains_reg_of_mode[rclass][PSEUDO_REGNO_MODE (regno)]
1122 #ifdef FORBIDDEN_INC_DEC_CLASSES
1123 && (! in_inc_dec[regno] || ! forbidden_inc_dec_class[rclass])
1124 #endif
1125 #ifdef CANNOT_CHANGE_MODE_CLASS
1126 && ! invalid_mode_change_p (regno, (enum reg_class) rclass,
1127 PSEUDO_REGNO_MODE (regno))
1128 #endif
1130 fprintf (f, " %s:%d", reg_class_names[rclass],
1131 COSTS (costs, regno)->cost[k]);
1133 fprintf (f, " MEM:%i\n", COSTS (costs, regno)->mem_cost);
1137 /* Traverse the BB represented by LOOP_TREE_NODE to update the allocno
1138 costs. */
1139 static void
1140 process_bb_for_costs (basic_block bb)
1142 rtx insn;
1144 frequency = REG_FREQ_FROM_BB (bb);
1145 if (frequency == 0)
1146 frequency = 1;
1147 FOR_BB_INSNS (bb, insn)
1148 insn = scan_one_insn (insn);
1151 /* Traverse the BB represented by LOOP_TREE_NODE to update the allocno
1152 costs. */
1153 static void
1154 process_bb_node_for_costs (ira_loop_tree_node_t loop_tree_node)
1156 basic_block bb;
1158 bb = loop_tree_node->bb;
1159 if (bb != NULL)
1160 process_bb_for_costs (bb);
1163 /* Find costs of register classes and memory for allocnos or pseudos
1164 and their best costs. Set up preferred, alternative and cover
1165 classes for pseudos. */
1166 static void
1167 find_costs_and_classes (FILE *dump_file)
1169 int i, k, start;
1170 int pass;
1171 basic_block bb;
1173 init_recog ();
1174 #ifdef FORBIDDEN_INC_DEC_CLASSES
1175 in_inc_dec = ira_allocate (sizeof (bool) * cost_elements_num);
1176 #endif /* FORBIDDEN_INC_DEC_CLASSES */
1177 pref = NULL;
1178 start = 0;
1179 if (!resize_reg_info () && allocno_p && pseudo_classes_defined_p)
1181 ira_allocno_t a;
1182 ira_allocno_iterator ai;
1184 pref = pref_buffer;
1185 FOR_EACH_ALLOCNO (a, ai)
1186 pref[ALLOCNO_NUM (a)] = reg_preferred_class (ALLOCNO_REGNO (a));
1187 if (flag_expensive_optimizations)
1188 start = 1;
1190 if (allocno_p)
1191 /* Clear the flag for the next compiled function. */
1192 pseudo_classes_defined_p = false;
1193 /* Normally we scan the insns once and determine the best class to
1194 use for each allocno. However, if -fexpensive-optimizations are
1195 on, we do so twice, the second time using the tentative best
1196 classes to guide the selection. */
1197 for (pass = start; pass <= flag_expensive_optimizations; pass++)
1199 if ((!allocno_p || internal_flag_ira_verbose > 0) && dump_file)
1200 fprintf (dump_file,
1201 "\nPass %i for finding pseudo/allocno costs\n\n", pass);
1202 /* We could use only cover classes. Unfortunately it does not
1203 work well for some targets where some subclass of cover class
1204 is costly and wrong cover class is chosen. */
1205 for (i = 0; i < N_REG_CLASSES; i++)
1206 cost_class_nums[i] = -1;
1207 for (cost_classes_num = 0;
1208 cost_classes_num < ira_important_classes_num;
1209 cost_classes_num++)
1211 cost_classes[cost_classes_num]
1212 = ira_important_classes[cost_classes_num];
1213 cost_class_nums[cost_classes[cost_classes_num]]
1214 = cost_classes_num;
1216 struct_costs_size
1217 = sizeof (struct costs) + sizeof (int) * (cost_classes_num - 1);
1218 /* Zero out our accumulation of the cost of each class for each
1219 allocno. */
1220 memset (costs, 0, cost_elements_num * struct_costs_size);
1221 #ifdef FORBIDDEN_INC_DEC_CLASSES
1222 memset (in_inc_dec, 0, cost_elements_num * sizeof (bool));
1223 #endif
1225 if (allocno_p)
1227 /* Scan the instructions and record each time it would save code
1228 to put a certain allocno in a certain class. */
1229 ira_traverse_loop_tree (true, ira_loop_tree_root,
1230 process_bb_node_for_costs, NULL);
1232 memcpy (total_allocno_costs, costs,
1233 max_struct_costs_size * ira_allocnos_num);
1235 else
1237 basic_block bb;
1239 FOR_EACH_BB (bb)
1240 process_bb_for_costs (bb);
1243 if (pass == 0)
1244 pref = pref_buffer;
1246 /* Now for each allocno look at how desirable each class is and
1247 find which class is preferred. */
1248 for (i = max_reg_num () - 1; i >= FIRST_PSEUDO_REGISTER; i--)
1250 ira_allocno_t a, parent_a;
1251 int rclass, a_num, parent_a_num;
1252 ira_loop_tree_node_t parent;
1253 int best_cost, allocno_cost;
1254 enum reg_class best, alt_class;
1255 #ifdef FORBIDDEN_INC_DEC_CLASSES
1256 int inc_dec_p = false;
1257 #endif
1259 if (! allocno_p)
1261 if (regno_reg_rtx[i] == NULL_RTX)
1262 continue;
1263 #ifdef FORBIDDEN_INC_DEC_CLASSES
1264 inc_dec_p = in_inc_dec[i];
1265 #endif
1266 memcpy (temp_costs, COSTS (costs, i), struct_costs_size);
1268 else
1270 if (ira_regno_allocno_map[i] == NULL)
1271 continue;
1272 memset (temp_costs, 0, struct_costs_size);
1273 /* Find cost of all allocnos with the same regno. */
1274 for (a = ira_regno_allocno_map[i];
1275 a != NULL;
1276 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
1278 a_num = ALLOCNO_NUM (a);
1279 if ((flag_ira_region == IRA_REGION_ALL
1280 || flag_ira_region == IRA_REGION_MIXED)
1281 && (parent = ALLOCNO_LOOP_TREE_NODE (a)->parent) != NULL
1282 && (parent_a = parent->regno_allocno_map[i]) != NULL
1283 /* There are no caps yet. */
1284 && bitmap_bit_p (ALLOCNO_LOOP_TREE_NODE
1285 (a)->border_allocnos,
1286 ALLOCNO_NUM (a)))
1288 /* Propagate costs to upper levels in the region
1289 tree. */
1290 parent_a_num = ALLOCNO_NUM (parent_a);
1291 for (k = 0; k < cost_classes_num; k++)
1292 COSTS (total_allocno_costs, parent_a_num)->cost[k]
1293 += COSTS (total_allocno_costs, a_num)->cost[k];
1294 COSTS (total_allocno_costs, parent_a_num)->mem_cost
1295 += COSTS (total_allocno_costs, a_num)->mem_cost;
1297 for (k = 0; k < cost_classes_num; k++)
1298 temp_costs->cost[k] += COSTS (costs, a_num)->cost[k];
1299 temp_costs->mem_cost += COSTS (costs, a_num)->mem_cost;
1300 #ifdef FORBIDDEN_INC_DEC_CLASSES
1301 if (in_inc_dec[a_num])
1302 inc_dec_p = true;
1303 #endif
1306 best_cost = (1 << (HOST_BITS_PER_INT - 2)) - 1;
1307 best = ALL_REGS;
1308 alt_class = NO_REGS;
1309 /* Find best common class for all allocnos with the same
1310 regno. */
1311 for (k = 0; k < cost_classes_num; k++)
1313 rclass = cost_classes[k];
1314 /* Ignore classes that are too small for this operand or
1315 invalid for an operand that was auto-incremented. */
1316 if (! contains_reg_of_mode[rclass][PSEUDO_REGNO_MODE (i)]
1317 #ifdef FORBIDDEN_INC_DEC_CLASSES
1318 || (inc_dec_p && forbidden_inc_dec_class[rclass])
1319 #endif
1320 #ifdef CANNOT_CHANGE_MODE_CLASS
1321 || invalid_mode_change_p (i, (enum reg_class) rclass,
1322 PSEUDO_REGNO_MODE (i))
1323 #endif
1325 continue;
1326 if (temp_costs->cost[k] < best_cost)
1328 best_cost = temp_costs->cost[k];
1329 best = (enum reg_class) rclass;
1331 else if (temp_costs->cost[k] == best_cost)
1332 best = ira_reg_class_union[best][rclass];
1333 if (pass == flag_expensive_optimizations
1334 && temp_costs->cost[k] < temp_costs->mem_cost
1335 && (reg_class_size[reg_class_subunion[alt_class][rclass]]
1336 > reg_class_size[alt_class]))
1337 alt_class = reg_class_subunion[alt_class][rclass];
1339 alt_class = ira_class_translate[alt_class];
1340 if (best_cost > temp_costs->mem_cost)
1341 regno_cover_class[i] = NO_REGS;
1342 else if (flag_ira_algorithm == IRA_ALGORITHM_PRIORITY)
1343 /* Make the common class the biggest class of best and
1344 alt_class. */
1345 regno_cover_class[i] = alt_class == NO_REGS ? best : alt_class;
1346 else
1347 /* Make the common class a cover class. Remember all
1348 allocnos with the same regno should have the same cover
1349 class. */
1350 regno_cover_class[i] = ira_class_translate[best];
1351 if (pass == flag_expensive_optimizations)
1353 if (best_cost > temp_costs->mem_cost)
1354 best = alt_class = NO_REGS;
1355 else if (best == alt_class)
1356 alt_class = NO_REGS;
1357 setup_reg_classes (i, best, alt_class, regno_cover_class[i]);
1358 if ((!allocno_p || internal_flag_ira_verbose > 2)
1359 && dump_file != NULL)
1360 fprintf (dump_file,
1361 " r%d: preferred %s, alternative %s, cover %s\n",
1362 i, reg_class_names[best], reg_class_names[alt_class],
1363 reg_class_names[regno_cover_class[i]]);
1365 if (! allocno_p)
1367 pref[i] = best_cost > temp_costs->mem_cost ? NO_REGS : best;
1368 continue;
1370 for (a = ira_regno_allocno_map[i];
1371 a != NULL;
1372 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
1374 a_num = ALLOCNO_NUM (a);
1375 if (regno_cover_class[i] == NO_REGS)
1376 best = NO_REGS;
1377 else
1379 /* Finding best class which is subset of the common
1380 class. */
1381 best_cost = (1 << (HOST_BITS_PER_INT - 2)) - 1;
1382 allocno_cost = best_cost;
1383 best = ALL_REGS;
1384 for (k = 0; k < cost_classes_num; k++)
1386 rclass = cost_classes[k];
1387 if (! ira_class_subset_p[rclass][regno_cover_class[i]])
1388 continue;
1389 /* Ignore classes that are too small for this
1390 operand or invalid for an operand that was
1391 auto-incremented. */
1392 if (! contains_reg_of_mode[rclass][PSEUDO_REGNO_MODE (i)]
1393 #ifdef FORBIDDEN_INC_DEC_CLASSES
1394 || (inc_dec_p && forbidden_inc_dec_class[rclass])
1395 #endif
1396 #ifdef CANNOT_CHANGE_MODE_CLASS
1397 || invalid_mode_change_p (i, (enum reg_class) rclass,
1398 PSEUDO_REGNO_MODE (i))
1399 #endif
1402 else if (COSTS (total_allocno_costs, a_num)->cost[k]
1403 < best_cost)
1405 best_cost
1406 = COSTS (total_allocno_costs, a_num)->cost[k];
1407 allocno_cost = COSTS (costs, a_num)->cost[k];
1408 best = (enum reg_class) rclass;
1410 else if (COSTS (total_allocno_costs, a_num)->cost[k]
1411 == best_cost)
1413 best = ira_reg_class_union[best][rclass];
1414 allocno_cost
1415 = MAX (allocno_cost, COSTS (costs, a_num)->cost[k]);
1418 ALLOCNO_COVER_CLASS_COST (a) = allocno_cost;
1420 ira_assert (flag_ira_algorithm == IRA_ALGORITHM_PRIORITY
1421 || ira_class_translate[best] == regno_cover_class[i]);
1422 if (internal_flag_ira_verbose > 2 && dump_file != NULL
1423 && (pass == 0 || pref[a_num] != best))
1425 fprintf (dump_file, " a%d (r%d,", a_num, i);
1426 if ((bb = ALLOCNO_LOOP_TREE_NODE (a)->bb) != NULL)
1427 fprintf (dump_file, "b%d", bb->index);
1428 else
1429 fprintf (dump_file, "l%d",
1430 ALLOCNO_LOOP_TREE_NODE (a)->loop->num);
1431 fprintf (dump_file, ") best %s, cover %s\n",
1432 reg_class_names[best],
1433 reg_class_names[regno_cover_class[i]]);
1435 pref[a_num] = best;
1439 if (internal_flag_ira_verbose > 4 && dump_file)
1441 if (allocno_p)
1442 print_allocno_costs (dump_file);
1443 else
1444 print_pseudo_costs (dump_file);
1445 fprintf (dump_file,"\n");
1448 #ifdef FORBIDDEN_INC_DEC_CLASSES
1449 ira_free (in_inc_dec);
1450 #endif
1455 /* Process moves involving hard regs to modify allocno hard register
1456 costs. We can do this only after determining allocno cover class.
1457 If a hard register forms a register class, than moves with the hard
1458 register are already taken into account in class costs for the
1459 allocno. */
1460 static void
1461 process_bb_node_for_hard_reg_moves (ira_loop_tree_node_t loop_tree_node)
1463 int i, freq, cost, src_regno, dst_regno, hard_regno;
1464 bool to_p;
1465 ira_allocno_t a;
1466 enum reg_class rclass, hard_reg_class;
1467 enum machine_mode mode;
1468 basic_block bb;
1469 rtx insn, set, src, dst;
1471 bb = loop_tree_node->bb;
1472 if (bb == NULL)
1473 return;
1474 freq = REG_FREQ_FROM_BB (bb);
1475 if (freq == 0)
1476 freq = 1;
1477 FOR_BB_INSNS (bb, insn)
1479 if (!NONDEBUG_INSN_P (insn))
1480 continue;
1481 set = single_set (insn);
1482 if (set == NULL_RTX)
1483 continue;
1484 dst = SET_DEST (set);
1485 src = SET_SRC (set);
1486 if (! REG_P (dst) || ! REG_P (src))
1487 continue;
1488 dst_regno = REGNO (dst);
1489 src_regno = REGNO (src);
1490 if (dst_regno >= FIRST_PSEUDO_REGISTER
1491 && src_regno < FIRST_PSEUDO_REGISTER)
1493 hard_regno = src_regno;
1494 to_p = true;
1495 a = ira_curr_regno_allocno_map[dst_regno];
1497 else if (src_regno >= FIRST_PSEUDO_REGISTER
1498 && dst_regno < FIRST_PSEUDO_REGISTER)
1500 hard_regno = dst_regno;
1501 to_p = false;
1502 a = ira_curr_regno_allocno_map[src_regno];
1504 else
1505 continue;
1506 rclass = ALLOCNO_COVER_CLASS (a);
1507 if (! TEST_HARD_REG_BIT (reg_class_contents[rclass], hard_regno))
1508 continue;
1509 i = ira_class_hard_reg_index[rclass][hard_regno];
1510 if (i < 0)
1511 continue;
1512 mode = ALLOCNO_MODE (a);
1513 hard_reg_class = REGNO_REG_CLASS (hard_regno);
1514 cost
1515 = (to_p ? ira_get_register_move_cost (mode, hard_reg_class, rclass)
1516 : ira_get_register_move_cost (mode, rclass, hard_reg_class)) * freq;
1517 ira_allocate_and_set_costs (&ALLOCNO_HARD_REG_COSTS (a), rclass,
1518 ALLOCNO_COVER_CLASS_COST (a));
1519 ira_allocate_and_set_costs (&ALLOCNO_CONFLICT_HARD_REG_COSTS (a),
1520 rclass, 0);
1521 ALLOCNO_HARD_REG_COSTS (a)[i] -= cost;
1522 ALLOCNO_CONFLICT_HARD_REG_COSTS (a)[i] -= cost;
1523 ALLOCNO_COVER_CLASS_COST (a) = MIN (ALLOCNO_COVER_CLASS_COST (a),
1524 ALLOCNO_HARD_REG_COSTS (a)[i]);
1528 /* After we find hard register and memory costs for allocnos, define
1529 its cover class and modify hard register cost because insns moving
1530 allocno to/from hard registers. */
1531 static void
1532 setup_allocno_cover_class_and_costs (void)
1534 int i, j, n, regno, num;
1535 int *reg_costs;
1536 enum reg_class cover_class, rclass;
1537 enum machine_mode mode;
1538 ira_allocno_t a;
1539 ira_allocno_iterator ai;
1541 ira_assert (allocno_p);
1542 FOR_EACH_ALLOCNO (a, ai)
1544 i = ALLOCNO_NUM (a);
1545 mode = ALLOCNO_MODE (a);
1546 cover_class = regno_cover_class[ALLOCNO_REGNO (a)];
1547 ira_assert (pref[i] == NO_REGS || cover_class != NO_REGS);
1548 ALLOCNO_MEMORY_COST (a) = COSTS (costs, i)->mem_cost;
1549 ira_set_allocno_cover_class (a, cover_class);
1550 if (cover_class == NO_REGS)
1551 continue;
1552 ALLOCNO_AVAILABLE_REGS_NUM (a) = ira_available_class_regs[cover_class];
1553 if (optimize && ALLOCNO_COVER_CLASS (a) != pref[i])
1555 n = ira_class_hard_regs_num[cover_class];
1556 ALLOCNO_HARD_REG_COSTS (a)
1557 = reg_costs = ira_allocate_cost_vector (cover_class);
1558 for (j = n - 1; j >= 0; j--)
1560 regno = ira_class_hard_regs[cover_class][j];
1561 if (TEST_HARD_REG_BIT (reg_class_contents[pref[i]], regno))
1562 reg_costs[j] = ALLOCNO_COVER_CLASS_COST (a);
1563 else
1565 rclass = REGNO_REG_CLASS (regno);
1566 num = cost_class_nums[rclass];
1567 if (num < 0)
1569 /* The hard register class is not a cover class or a
1570 class not fully inside in a cover class -- use
1571 the allocno cover class. */
1572 ira_assert (ira_hard_regno_cover_class[regno]
1573 == cover_class);
1574 num = cost_class_nums[cover_class];
1576 reg_costs[j] = COSTS (costs, i)->cost[num];
1581 if (optimize)
1582 ira_traverse_loop_tree (true, ira_loop_tree_root,
1583 process_bb_node_for_hard_reg_moves, NULL);
1588 /* Function called once during compiler work. */
1589 void
1590 ira_init_costs_once (void)
1592 int i;
1594 init_cost = NULL;
1595 for (i = 0; i < MAX_RECOG_OPERANDS; i++)
1597 op_costs[i] = NULL;
1598 this_op_costs[i] = NULL;
1600 temp_costs = NULL;
1601 cost_classes = NULL;
1604 /* Free allocated temporary cost vectors. */
1605 static void
1606 free_ira_costs (void)
1608 int i;
1610 if (init_cost != NULL)
1611 free (init_cost);
1612 init_cost = NULL;
1613 for (i = 0; i < MAX_RECOG_OPERANDS; i++)
1615 if (op_costs[i] != NULL)
1616 free (op_costs[i]);
1617 if (this_op_costs[i] != NULL)
1618 free (this_op_costs[i]);
1619 op_costs[i] = this_op_costs[i] = NULL;
1621 if (temp_costs != NULL)
1622 free (temp_costs);
1623 temp_costs = NULL;
1624 if (cost_classes != NULL)
1625 free (cost_classes);
1626 cost_classes = NULL;
1629 /* This is called each time register related information is
1630 changed. */
1631 void
1632 ira_init_costs (void)
1634 int i;
1636 free_ira_costs ();
1637 max_struct_costs_size
1638 = sizeof (struct costs) + sizeof (int) * (ira_important_classes_num - 1);
1639 /* Don't use ira_allocate because vectors live through several IRA calls. */
1640 init_cost = (struct costs *) xmalloc (max_struct_costs_size);
1641 init_cost->mem_cost = 1000000;
1642 for (i = 0; i < ira_important_classes_num; i++)
1643 init_cost->cost[i] = 1000000;
1644 for (i = 0; i < MAX_RECOG_OPERANDS; i++)
1646 op_costs[i] = (struct costs *) xmalloc (max_struct_costs_size);
1647 this_op_costs[i] = (struct costs *) xmalloc (max_struct_costs_size);
1649 temp_costs = (struct costs *) xmalloc (max_struct_costs_size);
1650 cost_classes = (enum reg_class *) xmalloc (sizeof (enum reg_class)
1651 * ira_important_classes_num);
1654 /* Function called once at the end of compiler work. */
1655 void
1656 ira_finish_costs_once (void)
1658 free_ira_costs ();
1663 /* Common initialization function for ira_costs and
1664 ira_set_pseudo_classes. */
1665 static void
1666 init_costs (void)
1668 costs = (struct costs *) ira_allocate (max_struct_costs_size
1669 * cost_elements_num);
1670 pref_buffer
1671 = (enum reg_class *) ira_allocate (sizeof (enum reg_class)
1672 * cost_elements_num);
1673 regno_cover_class
1674 = (enum reg_class *) ira_allocate (sizeof (enum reg_class)
1675 * max_reg_num ());
1678 /* Common finalization function for ira_costs and
1679 ira_set_pseudo_classes. */
1680 static void
1681 finish_costs (void)
1683 ira_free (regno_cover_class);
1684 ira_free (pref_buffer);
1685 ira_free (costs);
1688 /* Entry function which defines cover class, memory and hard register
1689 costs for each allocno. */
1690 void
1691 ira_costs (void)
1693 allocno_p = true;
1694 cost_elements_num = ira_allocnos_num;
1695 init_costs ();
1696 total_allocno_costs = (struct costs *) ira_allocate (max_struct_costs_size
1697 * ira_allocnos_num);
1698 find_costs_and_classes (ira_dump_file);
1699 setup_allocno_cover_class_and_costs ();
1700 finish_costs ();
1701 ira_free (total_allocno_costs);
1704 /* Entry function which defines classes for pseudos. */
1705 void
1706 ira_set_pseudo_classes (FILE *dump_file)
1708 allocno_p = false;
1709 internal_flag_ira_verbose = flag_ira_verbose;
1710 cost_elements_num = max_reg_num ();
1711 init_costs ();
1712 find_costs_and_classes (dump_file);
1713 pseudo_classes_defined_p = true;
1714 finish_costs ();
1719 /* Change hard register costs for allocnos which lives through
1720 function calls. This is called only when we found all intersected
1721 calls during building allocno live ranges. */
1722 void
1723 ira_tune_allocno_costs_and_cover_classes (void)
1725 int j, n, regno;
1726 int cost, min_cost, *reg_costs;
1727 enum reg_class cover_class, rclass;
1728 enum machine_mode mode;
1729 ira_allocno_t a;
1730 ira_allocno_iterator ai;
1732 FOR_EACH_ALLOCNO (a, ai)
1734 cover_class = ALLOCNO_COVER_CLASS (a);
1735 if (cover_class == NO_REGS)
1736 continue;
1737 mode = ALLOCNO_MODE (a);
1738 n = ira_class_hard_regs_num[cover_class];
1739 min_cost = INT_MAX;
1740 if (ALLOCNO_CALLS_CROSSED_NUM (a) != 0)
1742 ira_allocate_and_set_costs
1743 (&ALLOCNO_HARD_REG_COSTS (a), cover_class,
1744 ALLOCNO_COVER_CLASS_COST (a));
1745 reg_costs = ALLOCNO_HARD_REG_COSTS (a);
1746 for (j = n - 1; j >= 0; j--)
1748 regno = ira_class_hard_regs[cover_class][j];
1749 rclass = REGNO_REG_CLASS (regno);
1750 cost = 0;
1751 if (! ira_hard_reg_not_in_set_p (regno, mode, call_used_reg_set)
1752 || HARD_REGNO_CALL_PART_CLOBBERED (regno, mode))
1753 cost += (ALLOCNO_CALL_FREQ (a)
1754 * (ira_memory_move_cost[mode][rclass][0]
1755 + ira_memory_move_cost[mode][rclass][1]));
1756 #ifdef IRA_HARD_REGNO_ADD_COST_MULTIPLIER
1757 cost += ((ira_memory_move_cost[mode][rclass][0]
1758 + ira_memory_move_cost[mode][rclass][1])
1759 * ALLOCNO_FREQ (a)
1760 * IRA_HARD_REGNO_ADD_COST_MULTIPLIER (regno) / 2);
1761 #endif
1762 reg_costs[j] += cost;
1763 if (min_cost > reg_costs[j])
1764 min_cost = reg_costs[j];
1767 if (min_cost != INT_MAX)
1768 ALLOCNO_COVER_CLASS_COST (a) = min_cost;