* gcc.dg/vect/slp-perm-1.c (main): Make sure loops aren't vectorized.
[official-gcc.git] / gcc / postreload.c
blob9a05cbc729fe4bc3c12771126863b774325993d7
1 /* Perform simple optimizations to clean up the result of reload.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
4 2010 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
27 #include "machmode.h"
28 #include "hard-reg-set.h"
29 #include "rtl.h"
30 #include "tm_p.h"
31 #include "obstack.h"
32 #include "insn-config.h"
33 #include "flags.h"
34 #include "function.h"
35 #include "expr.h"
36 #include "optabs.h"
37 #include "regs.h"
38 #include "basic-block.h"
39 #include "reload.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "cselib.h"
43 #include "diagnostic-core.h"
44 #include "toplev.h"
45 #include "except.h"
46 #include "tree.h"
47 #include "timevar.h"
48 #include "tree-pass.h"
49 #include "df.h"
50 #include "dbgcnt.h"
52 static int reload_cse_noop_set_p (rtx);
53 static void reload_cse_simplify (rtx, rtx);
54 static void reload_cse_regs_1 (rtx);
55 static int reload_cse_simplify_set (rtx, rtx);
56 static int reload_cse_simplify_operands (rtx, rtx);
58 static void reload_combine (void);
59 static void reload_combine_note_use (rtx *, rtx, int, rtx);
60 static void reload_combine_note_store (rtx, const_rtx, void *);
62 static bool reload_cse_move2add (rtx);
63 static void move2add_note_store (rtx, const_rtx, void *);
65 /* Call cse / combine like post-reload optimization phases.
66 FIRST is the first instruction. */
67 void
68 reload_cse_regs (rtx first ATTRIBUTE_UNUSED)
70 bool moves_converted;
71 reload_cse_regs_1 (first);
72 reload_combine ();
73 moves_converted = reload_cse_move2add (first);
74 if (flag_expensive_optimizations)
76 if (moves_converted)
77 reload_combine ();
78 reload_cse_regs_1 (first);
82 /* See whether a single set SET is a noop. */
83 static int
84 reload_cse_noop_set_p (rtx set)
86 if (cselib_reg_set_mode (SET_DEST (set)) != GET_MODE (SET_DEST (set)))
87 return 0;
89 return rtx_equal_for_cselib_p (SET_DEST (set), SET_SRC (set));
92 /* Try to simplify INSN. */
93 static void
94 reload_cse_simplify (rtx insn, rtx testreg)
96 rtx body = PATTERN (insn);
98 if (GET_CODE (body) == SET)
100 int count = 0;
102 /* Simplify even if we may think it is a no-op.
103 We may think a memory load of a value smaller than WORD_SIZE
104 is redundant because we haven't taken into account possible
105 implicit extension. reload_cse_simplify_set() will bring
106 this out, so it's safer to simplify before we delete. */
107 count += reload_cse_simplify_set (body, insn);
109 if (!count && reload_cse_noop_set_p (body))
111 rtx value = SET_DEST (body);
112 if (REG_P (value)
113 && ! REG_FUNCTION_VALUE_P (value))
114 value = 0;
115 delete_insn_and_edges (insn);
116 return;
119 if (count > 0)
120 apply_change_group ();
121 else
122 reload_cse_simplify_operands (insn, testreg);
124 else if (GET_CODE (body) == PARALLEL)
126 int i;
127 int count = 0;
128 rtx value = NULL_RTX;
130 /* Registers mentioned in the clobber list for an asm cannot be reused
131 within the body of the asm. Invalidate those registers now so that
132 we don't try to substitute values for them. */
133 if (asm_noperands (body) >= 0)
135 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
137 rtx part = XVECEXP (body, 0, i);
138 if (GET_CODE (part) == CLOBBER && REG_P (XEXP (part, 0)))
139 cselib_invalidate_rtx (XEXP (part, 0));
143 /* If every action in a PARALLEL is a noop, we can delete
144 the entire PARALLEL. */
145 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
147 rtx part = XVECEXP (body, 0, i);
148 if (GET_CODE (part) == SET)
150 if (! reload_cse_noop_set_p (part))
151 break;
152 if (REG_P (SET_DEST (part))
153 && REG_FUNCTION_VALUE_P (SET_DEST (part)))
155 if (value)
156 break;
157 value = SET_DEST (part);
160 else if (GET_CODE (part) != CLOBBER)
161 break;
164 if (i < 0)
166 delete_insn_and_edges (insn);
167 /* We're done with this insn. */
168 return;
171 /* It's not a no-op, but we can try to simplify it. */
172 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
173 if (GET_CODE (XVECEXP (body, 0, i)) == SET)
174 count += reload_cse_simplify_set (XVECEXP (body, 0, i), insn);
176 if (count > 0)
177 apply_change_group ();
178 else
179 reload_cse_simplify_operands (insn, testreg);
183 /* Do a very simple CSE pass over the hard registers.
185 This function detects no-op moves where we happened to assign two
186 different pseudo-registers to the same hard register, and then
187 copied one to the other. Reload will generate a useless
188 instruction copying a register to itself.
190 This function also detects cases where we load a value from memory
191 into two different registers, and (if memory is more expensive than
192 registers) changes it to simply copy the first register into the
193 second register.
195 Another optimization is performed that scans the operands of each
196 instruction to see whether the value is already available in a
197 hard register. It then replaces the operand with the hard register
198 if possible, much like an optional reload would. */
200 static void
201 reload_cse_regs_1 (rtx first)
203 rtx insn;
204 rtx testreg = gen_rtx_REG (VOIDmode, -1);
206 cselib_init (CSELIB_RECORD_MEMORY);
207 init_alias_analysis ();
209 for (insn = first; insn; insn = NEXT_INSN (insn))
211 if (INSN_P (insn))
212 reload_cse_simplify (insn, testreg);
214 cselib_process_insn (insn);
217 /* Clean up. */
218 end_alias_analysis ();
219 cselib_finish ();
222 /* Try to simplify a single SET instruction. SET is the set pattern.
223 INSN is the instruction it came from.
224 This function only handles one case: if we set a register to a value
225 which is not a register, we try to find that value in some other register
226 and change the set into a register copy. */
228 static int
229 reload_cse_simplify_set (rtx set, rtx insn)
231 int did_change = 0;
232 int dreg;
233 rtx src;
234 enum reg_class dclass;
235 int old_cost;
236 cselib_val *val;
237 struct elt_loc_list *l;
238 #ifdef LOAD_EXTEND_OP
239 enum rtx_code extend_op = UNKNOWN;
240 #endif
241 bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn));
243 dreg = true_regnum (SET_DEST (set));
244 if (dreg < 0)
245 return 0;
247 src = SET_SRC (set);
248 if (side_effects_p (src) || true_regnum (src) >= 0)
249 return 0;
251 dclass = REGNO_REG_CLASS (dreg);
253 #ifdef LOAD_EXTEND_OP
254 /* When replacing a memory with a register, we need to honor assumptions
255 that combine made wrt the contents of sign bits. We'll do this by
256 generating an extend instruction instead of a reg->reg copy. Thus
257 the destination must be a register that we can widen. */
258 if (MEM_P (src)
259 && GET_MODE_BITSIZE (GET_MODE (src)) < BITS_PER_WORD
260 && (extend_op = LOAD_EXTEND_OP (GET_MODE (src))) != UNKNOWN
261 && !REG_P (SET_DEST (set)))
262 return 0;
263 #endif
265 val = cselib_lookup (src, GET_MODE (SET_DEST (set)), 0);
266 if (! val)
267 return 0;
269 /* If memory loads are cheaper than register copies, don't change them. */
270 if (MEM_P (src))
271 old_cost = memory_move_cost (GET_MODE (src), dclass, true);
272 else if (REG_P (src))
273 old_cost = register_move_cost (GET_MODE (src),
274 REGNO_REG_CLASS (REGNO (src)), dclass);
275 else
276 old_cost = rtx_cost (src, SET, speed);
278 for (l = val->locs; l; l = l->next)
280 rtx this_rtx = l->loc;
281 int this_cost;
283 if (CONSTANT_P (this_rtx) && ! references_value_p (this_rtx, 0))
285 #ifdef LOAD_EXTEND_OP
286 if (extend_op != UNKNOWN)
288 HOST_WIDE_INT this_val;
290 /* ??? I'm lazy and don't wish to handle CONST_DOUBLE. Other
291 constants, such as SYMBOL_REF, cannot be extended. */
292 if (!CONST_INT_P (this_rtx))
293 continue;
295 this_val = INTVAL (this_rtx);
296 switch (extend_op)
298 case ZERO_EXTEND:
299 this_val &= GET_MODE_MASK (GET_MODE (src));
300 break;
301 case SIGN_EXTEND:
302 /* ??? In theory we're already extended. */
303 if (this_val == trunc_int_for_mode (this_val, GET_MODE (src)))
304 break;
305 default:
306 gcc_unreachable ();
308 this_rtx = GEN_INT (this_val);
310 #endif
311 this_cost = rtx_cost (this_rtx, SET, speed);
313 else if (REG_P (this_rtx))
315 #ifdef LOAD_EXTEND_OP
316 if (extend_op != UNKNOWN)
318 this_rtx = gen_rtx_fmt_e (extend_op, word_mode, this_rtx);
319 this_cost = rtx_cost (this_rtx, SET, speed);
321 else
322 #endif
323 this_cost = register_move_cost (GET_MODE (this_rtx),
324 REGNO_REG_CLASS (REGNO (this_rtx)),
325 dclass);
327 else
328 continue;
330 /* If equal costs, prefer registers over anything else. That
331 tends to lead to smaller instructions on some machines. */
332 if (this_cost < old_cost
333 || (this_cost == old_cost
334 && REG_P (this_rtx)
335 && !REG_P (SET_SRC (set))))
337 #ifdef LOAD_EXTEND_OP
338 if (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set))) < BITS_PER_WORD
339 && extend_op != UNKNOWN
340 #ifdef CANNOT_CHANGE_MODE_CLASS
341 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SET_DEST (set)),
342 word_mode,
343 REGNO_REG_CLASS (REGNO (SET_DEST (set))))
344 #endif
347 rtx wide_dest = gen_rtx_REG (word_mode, REGNO (SET_DEST (set)));
348 ORIGINAL_REGNO (wide_dest) = ORIGINAL_REGNO (SET_DEST (set));
349 validate_change (insn, &SET_DEST (set), wide_dest, 1);
351 #endif
353 validate_unshare_change (insn, &SET_SRC (set), this_rtx, 1);
354 old_cost = this_cost, did_change = 1;
358 return did_change;
361 /* Try to replace operands in INSN with equivalent values that are already
362 in registers. This can be viewed as optional reloading.
364 For each non-register operand in the insn, see if any hard regs are
365 known to be equivalent to that operand. Record the alternatives which
366 can accept these hard registers. Among all alternatives, select the
367 ones which are better or equal to the one currently matching, where
368 "better" is in terms of '?' and '!' constraints. Among the remaining
369 alternatives, select the one which replaces most operands with
370 hard registers. */
372 static int
373 reload_cse_simplify_operands (rtx insn, rtx testreg)
375 int i, j;
377 /* For each operand, all registers that are equivalent to it. */
378 HARD_REG_SET equiv_regs[MAX_RECOG_OPERANDS];
380 const char *constraints[MAX_RECOG_OPERANDS];
382 /* Vector recording how bad an alternative is. */
383 int *alternative_reject;
384 /* Vector recording how many registers can be introduced by choosing
385 this alternative. */
386 int *alternative_nregs;
387 /* Array of vectors recording, for each operand and each alternative,
388 which hard register to substitute, or -1 if the operand should be
389 left as it is. */
390 int *op_alt_regno[MAX_RECOG_OPERANDS];
391 /* Array of alternatives, sorted in order of decreasing desirability. */
392 int *alternative_order;
394 extract_insn (insn);
396 if (recog_data.n_alternatives == 0 || recog_data.n_operands == 0)
397 return 0;
399 /* Figure out which alternative currently matches. */
400 if (! constrain_operands (1))
401 fatal_insn_not_found (insn);
403 alternative_reject = XALLOCAVEC (int, recog_data.n_alternatives);
404 alternative_nregs = XALLOCAVEC (int, recog_data.n_alternatives);
405 alternative_order = XALLOCAVEC (int, recog_data.n_alternatives);
406 memset (alternative_reject, 0, recog_data.n_alternatives * sizeof (int));
407 memset (alternative_nregs, 0, recog_data.n_alternatives * sizeof (int));
409 /* For each operand, find out which regs are equivalent. */
410 for (i = 0; i < recog_data.n_operands; i++)
412 cselib_val *v;
413 struct elt_loc_list *l;
414 rtx op;
416 CLEAR_HARD_REG_SET (equiv_regs[i]);
418 /* cselib blows up on CODE_LABELs. Trying to fix that doesn't seem
419 right, so avoid the problem here. Likewise if we have a constant
420 and the insn pattern doesn't tell us the mode we need. */
421 if (LABEL_P (recog_data.operand[i])
422 || (CONSTANT_P (recog_data.operand[i])
423 && recog_data.operand_mode[i] == VOIDmode))
424 continue;
426 op = recog_data.operand[i];
427 #ifdef LOAD_EXTEND_OP
428 if (MEM_P (op)
429 && GET_MODE_BITSIZE (GET_MODE (op)) < BITS_PER_WORD
430 && LOAD_EXTEND_OP (GET_MODE (op)) != UNKNOWN)
432 rtx set = single_set (insn);
434 /* We might have multiple sets, some of which do implicit
435 extension. Punt on this for now. */
436 if (! set)
437 continue;
438 /* If the destination is also a MEM or a STRICT_LOW_PART, no
439 extension applies.
440 Also, if there is an explicit extension, we don't have to
441 worry about an implicit one. */
442 else if (MEM_P (SET_DEST (set))
443 || GET_CODE (SET_DEST (set)) == STRICT_LOW_PART
444 || GET_CODE (SET_SRC (set)) == ZERO_EXTEND
445 || GET_CODE (SET_SRC (set)) == SIGN_EXTEND)
446 ; /* Continue ordinary processing. */
447 #ifdef CANNOT_CHANGE_MODE_CLASS
448 /* If the register cannot change mode to word_mode, it follows that
449 it cannot have been used in word_mode. */
450 else if (REG_P (SET_DEST (set))
451 && CANNOT_CHANGE_MODE_CLASS (GET_MODE (SET_DEST (set)),
452 word_mode,
453 REGNO_REG_CLASS (REGNO (SET_DEST (set)))))
454 ; /* Continue ordinary processing. */
455 #endif
456 /* If this is a straight load, make the extension explicit. */
457 else if (REG_P (SET_DEST (set))
458 && recog_data.n_operands == 2
459 && SET_SRC (set) == op
460 && SET_DEST (set) == recog_data.operand[1-i])
462 validate_change (insn, recog_data.operand_loc[i],
463 gen_rtx_fmt_e (LOAD_EXTEND_OP (GET_MODE (op)),
464 word_mode, op),
466 validate_change (insn, recog_data.operand_loc[1-i],
467 gen_rtx_REG (word_mode, REGNO (SET_DEST (set))),
469 if (! apply_change_group ())
470 return 0;
471 return reload_cse_simplify_operands (insn, testreg);
473 else
474 /* ??? There might be arithmetic operations with memory that are
475 safe to optimize, but is it worth the trouble? */
476 continue;
478 #endif /* LOAD_EXTEND_OP */
479 v = cselib_lookup (op, recog_data.operand_mode[i], 0);
480 if (! v)
481 continue;
483 for (l = v->locs; l; l = l->next)
484 if (REG_P (l->loc))
485 SET_HARD_REG_BIT (equiv_regs[i], REGNO (l->loc));
488 for (i = 0; i < recog_data.n_operands; i++)
490 enum machine_mode mode;
491 int regno;
492 const char *p;
494 op_alt_regno[i] = XALLOCAVEC (int, recog_data.n_alternatives);
495 for (j = 0; j < recog_data.n_alternatives; j++)
496 op_alt_regno[i][j] = -1;
498 p = constraints[i] = recog_data.constraints[i];
499 mode = recog_data.operand_mode[i];
501 /* Add the reject values for each alternative given by the constraints
502 for this operand. */
503 j = 0;
504 while (*p != '\0')
506 char c = *p++;
507 if (c == ',')
508 j++;
509 else if (c == '?')
510 alternative_reject[j] += 3;
511 else if (c == '!')
512 alternative_reject[j] += 300;
515 /* We won't change operands which are already registers. We
516 also don't want to modify output operands. */
517 regno = true_regnum (recog_data.operand[i]);
518 if (regno >= 0
519 || constraints[i][0] == '='
520 || constraints[i][0] == '+')
521 continue;
523 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
525 enum reg_class rclass = NO_REGS;
527 if (! TEST_HARD_REG_BIT (equiv_regs[i], regno))
528 continue;
530 SET_REGNO (testreg, regno);
531 PUT_MODE (testreg, mode);
533 /* We found a register equal to this operand. Now look for all
534 alternatives that can accept this register and have not been
535 assigned a register they can use yet. */
536 j = 0;
537 p = constraints[i];
538 for (;;)
540 char c = *p;
542 switch (c)
544 case '=': case '+': case '?':
545 case '#': case '&': case '!':
546 case '*': case '%':
547 case '0': case '1': case '2': case '3': case '4':
548 case '5': case '6': case '7': case '8': case '9':
549 case '<': case '>': case 'V': case 'o':
550 case 'E': case 'F': case 'G': case 'H':
551 case 's': case 'i': case 'n':
552 case 'I': case 'J': case 'K': case 'L':
553 case 'M': case 'N': case 'O': case 'P':
554 case 'p': case 'X': case TARGET_MEM_CONSTRAINT:
555 /* These don't say anything we care about. */
556 break;
558 case 'g': case 'r':
559 rclass = reg_class_subunion[(int) rclass][(int) GENERAL_REGS];
560 break;
562 default:
563 rclass
564 = (reg_class_subunion
565 [(int) rclass]
566 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
567 break;
569 case ',': case '\0':
570 /* See if REGNO fits this alternative, and set it up as the
571 replacement register if we don't have one for this
572 alternative yet and the operand being replaced is not
573 a cheap CONST_INT. */
574 if (op_alt_regno[i][j] == -1
575 && reg_fits_class_p (testreg, rclass, 0, mode)
576 && (!CONST_INT_P (recog_data.operand[i])
577 || (rtx_cost (recog_data.operand[i], SET,
578 optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn)))
579 > rtx_cost (testreg, SET,
580 optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn))))))
582 alternative_nregs[j]++;
583 op_alt_regno[i][j] = regno;
585 j++;
586 rclass = NO_REGS;
587 break;
589 p += CONSTRAINT_LEN (c, p);
591 if (c == '\0')
592 break;
597 /* Record all alternatives which are better or equal to the currently
598 matching one in the alternative_order array. */
599 for (i = j = 0; i < recog_data.n_alternatives; i++)
600 if (alternative_reject[i] <= alternative_reject[which_alternative])
601 alternative_order[j++] = i;
602 recog_data.n_alternatives = j;
604 /* Sort it. Given a small number of alternatives, a dumb algorithm
605 won't hurt too much. */
606 for (i = 0; i < recog_data.n_alternatives - 1; i++)
608 int best = i;
609 int best_reject = alternative_reject[alternative_order[i]];
610 int best_nregs = alternative_nregs[alternative_order[i]];
611 int tmp;
613 for (j = i + 1; j < recog_data.n_alternatives; j++)
615 int this_reject = alternative_reject[alternative_order[j]];
616 int this_nregs = alternative_nregs[alternative_order[j]];
618 if (this_reject < best_reject
619 || (this_reject == best_reject && this_nregs > best_nregs))
621 best = j;
622 best_reject = this_reject;
623 best_nregs = this_nregs;
627 tmp = alternative_order[best];
628 alternative_order[best] = alternative_order[i];
629 alternative_order[i] = tmp;
632 /* Substitute the operands as determined by op_alt_regno for the best
633 alternative. */
634 j = alternative_order[0];
636 for (i = 0; i < recog_data.n_operands; i++)
638 enum machine_mode mode = recog_data.operand_mode[i];
639 if (op_alt_regno[i][j] == -1)
640 continue;
642 validate_change (insn, recog_data.operand_loc[i],
643 gen_rtx_REG (mode, op_alt_regno[i][j]), 1);
646 for (i = recog_data.n_dups - 1; i >= 0; i--)
648 int op = recog_data.dup_num[i];
649 enum machine_mode mode = recog_data.operand_mode[op];
651 if (op_alt_regno[op][j] == -1)
652 continue;
654 validate_change (insn, recog_data.dup_loc[i],
655 gen_rtx_REG (mode, op_alt_regno[op][j]), 1);
658 return apply_change_group ();
661 /* If reload couldn't use reg+reg+offset addressing, try to use reg+reg
662 addressing now.
663 This code might also be useful when reload gave up on reg+reg addressing
664 because of clashes between the return register and INDEX_REG_CLASS. */
666 /* The maximum number of uses of a register we can keep track of to
667 replace them with reg+reg addressing. */
668 #define RELOAD_COMBINE_MAX_USES 16
670 /* Describes a recorded use of a register. */
671 struct reg_use
673 /* The insn where a register has been used. */
674 rtx insn;
675 /* Points to the memory reference enclosing the use, if any, NULL_RTX
676 otherwise. */
677 rtx containing_mem;
678 /* Location of the register withing INSN. */
679 rtx *usep;
680 /* The reverse uid of the insn. */
681 int ruid;
684 /* If the register is used in some unknown fashion, USE_INDEX is negative.
685 If it is dead, USE_INDEX is RELOAD_COMBINE_MAX_USES, and STORE_RUID
686 indicates where it is first set or clobbered.
687 Otherwise, USE_INDEX is the index of the last encountered use of the
688 register (which is first among these we have seen since we scan backwards).
689 USE_RUID indicates the first encountered, i.e. last, of these uses.
690 If ALL_OFFSETS_MATCH is true, all encountered uses were inside a PLUS
691 with a constant offset; OFFSET contains this constant in that case.
692 STORE_RUID is always meaningful if we only want to use a value in a
693 register in a different place: it denotes the next insn in the insn
694 stream (i.e. the last encountered) that sets or clobbers the register.
695 REAL_STORE_RUID is similar, but clobbers are ignored when updating it. */
696 static struct
698 struct reg_use reg_use[RELOAD_COMBINE_MAX_USES];
699 rtx offset;
700 int use_index;
701 int store_ruid;
702 int real_store_ruid;
703 int use_ruid;
704 bool all_offsets_match;
705 } reg_state[FIRST_PSEUDO_REGISTER];
707 /* Reverse linear uid. This is increased in reload_combine while scanning
708 the instructions from last to first. It is used to set last_label_ruid
709 and the store_ruid / use_ruid fields in reg_state. */
710 static int reload_combine_ruid;
712 /* The RUID of the last label we encountered in reload_combine. */
713 static int last_label_ruid;
715 /* The RUID of the last jump we encountered in reload_combine. */
716 static int last_jump_ruid;
718 /* The register numbers of the first and last index register. A value of
719 -1 in LAST_INDEX_REG indicates that we've previously computed these
720 values and found no suitable index registers. */
721 static int first_index_reg = -1;
722 static int last_index_reg;
724 #define LABEL_LIVE(LABEL) \
725 (label_live[CODE_LABEL_NUMBER (LABEL) - min_labelno])
727 /* Subroutine of reload_combine_split_ruids, called to fix up a single
728 ruid pointed to by *PRUID if it is higher than SPLIT_RUID. */
730 static inline void
731 reload_combine_split_one_ruid (int *pruid, int split_ruid)
733 if (*pruid > split_ruid)
734 (*pruid)++;
737 /* Called when we insert a new insn in a position we've already passed in
738 the scan. Examine all our state, increasing all ruids that are higher
739 than SPLIT_RUID by one in order to make room for a new insn. */
741 static void
742 reload_combine_split_ruids (int split_ruid)
744 unsigned i;
746 reload_combine_split_one_ruid (&reload_combine_ruid, split_ruid);
747 reload_combine_split_one_ruid (&last_label_ruid, split_ruid);
748 reload_combine_split_one_ruid (&last_jump_ruid, split_ruid);
750 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
752 int j, idx = reg_state[i].use_index;
753 reload_combine_split_one_ruid (&reg_state[i].use_ruid, split_ruid);
754 reload_combine_split_one_ruid (&reg_state[i].store_ruid, split_ruid);
755 reload_combine_split_one_ruid (&reg_state[i].real_store_ruid,
756 split_ruid);
757 if (idx < 0)
758 continue;
759 for (j = idx; j < RELOAD_COMBINE_MAX_USES; j++)
761 reload_combine_split_one_ruid (&reg_state[i].reg_use[j].ruid,
762 split_ruid);
767 /* Called when we are about to rescan a previously encountered insn with
768 reload_combine_note_use after modifying some part of it. This clears all
769 information about uses in that particular insn. */
771 static void
772 reload_combine_purge_insn_uses (rtx insn)
774 unsigned i;
776 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
778 int j, k, idx = reg_state[i].use_index;
779 if (idx < 0)
780 continue;
781 j = k = RELOAD_COMBINE_MAX_USES;
782 while (j-- > idx)
784 if (reg_state[i].reg_use[j].insn != insn)
786 k--;
787 if (k != j)
788 reg_state[i].reg_use[k] = reg_state[i].reg_use[j];
791 reg_state[i].use_index = k;
795 /* Called when we need to forget about all uses of REGNO after an insn
796 which is identified by RUID. */
798 static void
799 reload_combine_purge_reg_uses_after_ruid (unsigned regno, int ruid)
801 int j, k, idx = reg_state[regno].use_index;
802 if (idx < 0)
803 return;
804 j = k = RELOAD_COMBINE_MAX_USES;
805 while (j-- > idx)
807 if (reg_state[regno].reg_use[j].ruid >= ruid)
809 k--;
810 if (k != j)
811 reg_state[regno].reg_use[k] = reg_state[regno].reg_use[j];
814 reg_state[regno].use_index = k;
817 /* Find the use of REGNO with the ruid that is highest among those
818 lower than RUID_LIMIT, and return it if it is the only use of this
819 reg in the insn (or if the insn is a debug insn). Return NULL
820 otherwise. */
822 static struct reg_use *
823 reload_combine_closest_single_use (unsigned regno, int ruid_limit)
825 int i, best_ruid = 0;
826 int use_idx = reg_state[regno].use_index;
827 struct reg_use *retval;
829 if (use_idx < 0)
830 return NULL;
831 retval = NULL;
832 for (i = use_idx; i < RELOAD_COMBINE_MAX_USES; i++)
834 struct reg_use *use = reg_state[regno].reg_use + i;
835 int this_ruid = use->ruid;
836 if (this_ruid >= ruid_limit)
837 continue;
838 if (this_ruid > best_ruid)
840 best_ruid = this_ruid;
841 retval = reg_state[regno].reg_use + i;
843 else if (this_ruid == best_ruid && !DEBUG_INSN_P (use->insn))
844 retval = NULL;
846 if (last_label_ruid >= best_ruid)
847 return NULL;
848 return retval;
851 /* After we've moved an add insn, fix up any debug insns that occur between
852 the old location of the add and the new location. REGNO is the destination
853 register of the add insn; REG is the corresponding RTX. REPLACEMENT is
854 the SET_SRC of the add. MIN_RUID specifies the ruid of the insn after
855 which we've placed the add, we ignore any debug insns after it. */
857 static void
858 fixup_debug_insns (unsigned regno, rtx reg, rtx replacement, int min_ruid)
860 struct reg_use *use;
861 int from = reload_combine_ruid;
862 for (;;)
864 rtx t;
865 rtx use_insn = NULL_RTX;
866 if (from < min_ruid)
867 break;
868 use = reload_combine_closest_single_use (regno, from);
869 if (use)
871 from = use->ruid;
872 use_insn = use->insn;
874 else
875 break;
877 if (NONDEBUG_INSN_P (use->insn))
878 continue;
879 t = INSN_VAR_LOCATION_LOC (use_insn);
880 t = simplify_replace_rtx (t, reg, copy_rtx (replacement));
881 validate_change (use->insn,
882 &INSN_VAR_LOCATION_LOC (use->insn), t, 0);
883 reload_combine_purge_insn_uses (use_insn);
884 reload_combine_note_use (&PATTERN (use_insn), use_insn,
885 use->ruid, NULL_RTX);
889 /* Called by reload_combine when scanning INSN. This function tries to detect
890 patterns where a constant is added to a register, and the result is used
891 in an address.
892 Return true if no further processing is needed on INSN; false if it wasn't
893 recognized and should be handled normally. */
895 static bool
896 reload_combine_recognize_const_pattern (rtx insn)
898 int from_ruid = reload_combine_ruid;
899 rtx set, pat, reg, src, addreg;
900 unsigned int regno;
901 struct reg_use *use;
902 bool must_move_add;
903 rtx add_moved_after_insn = NULL_RTX;
904 int add_moved_after_ruid = 0;
905 int clobbered_regno = -1;
907 set = single_set (insn);
908 if (set == NULL_RTX)
909 return false;
911 reg = SET_DEST (set);
912 src = SET_SRC (set);
913 if (!REG_P (reg)
914 || hard_regno_nregs[REGNO (reg)][GET_MODE (reg)] != 1
915 || GET_MODE (reg) != Pmode
916 || reg == stack_pointer_rtx)
917 return false;
919 regno = REGNO (reg);
921 /* We look for a REG1 = REG2 + CONSTANT insn, followed by either
922 uses of REG1 inside an address, or inside another add insn. If
923 possible and profitable, merge the addition into subsequent
924 uses. */
925 if (GET_CODE (src) != PLUS
926 || !REG_P (XEXP (src, 0))
927 || !CONSTANT_P (XEXP (src, 1)))
928 return false;
930 addreg = XEXP (src, 0);
931 must_move_add = rtx_equal_p (reg, addreg);
933 pat = PATTERN (insn);
934 if (must_move_add && set != pat)
936 /* We have to be careful when moving the add; apart from the
937 single_set there may also be clobbers. Recognize one special
938 case, that of one clobber alongside the set (likely a clobber
939 of the CC register). */
940 gcc_assert (GET_CODE (PATTERN (insn)) == PARALLEL);
941 if (XVECLEN (pat, 0) != 2 || XVECEXP (pat, 0, 0) != set
942 || GET_CODE (XVECEXP (pat, 0, 1)) != CLOBBER
943 || !REG_P (XEXP (XVECEXP (pat, 0, 1), 0)))
944 return false;
945 clobbered_regno = REGNO (XEXP (XVECEXP (pat, 0, 1), 0));
950 use = reload_combine_closest_single_use (regno, from_ruid);
952 if (use)
953 /* Start the search for the next use from here. */
954 from_ruid = use->ruid;
956 /* We'll fix up DEBUG_INSNs after we're done. */
957 if (use && DEBUG_INSN_P (use->insn))
958 continue;
960 if (use && GET_MODE (*use->usep) == Pmode)
962 rtx use_insn = use->insn;
963 int use_ruid = use->ruid;
964 rtx mem = use->containing_mem;
965 bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (use_insn));
967 /* Avoid moving the add insn past a jump. */
968 if (must_move_add && use_ruid <= last_jump_ruid)
969 break;
971 /* If the add clobbers another hard reg in parallel, don't move
972 it past a real set of this hard reg. */
973 if (must_move_add && clobbered_regno >= 0
974 && reg_state[clobbered_regno].real_store_ruid >= use_ruid)
975 break;
977 /* Avoid moving a use of ADDREG past a point where it
978 is stored. */
979 if (reg_state[REGNO (addreg)].store_ruid >= use_ruid)
980 break;
982 if (mem != NULL_RTX)
984 addr_space_t as = MEM_ADDR_SPACE (mem);
985 rtx oldaddr = XEXP (mem, 0);
986 rtx newaddr = NULL_RTX;
987 int old_cost = address_cost (oldaddr, GET_MODE (mem), as, speed);
988 int new_cost;
990 newaddr = simplify_replace_rtx (oldaddr, reg, copy_rtx (src));
991 if (memory_address_addr_space_p (GET_MODE (mem), newaddr, as))
993 XEXP (mem, 0) = newaddr;
994 new_cost = address_cost (newaddr, GET_MODE (mem), as, speed);
995 XEXP (mem, 0) = oldaddr;
996 if (new_cost <= old_cost
997 && validate_change (use_insn,
998 &XEXP (mem, 0), newaddr, 0))
1000 reload_combine_purge_insn_uses (use_insn);
1001 reload_combine_note_use (&PATTERN (use_insn), use_insn,
1002 use_ruid, NULL_RTX);
1004 if (must_move_add)
1006 add_moved_after_insn = use_insn;
1007 add_moved_after_ruid = use_ruid;
1009 continue;
1013 else
1015 rtx new_set = single_set (use_insn);
1016 if (new_set
1017 && REG_P (SET_DEST (new_set))
1018 && GET_CODE (SET_SRC (new_set)) == PLUS
1019 && REG_P (XEXP (SET_SRC (new_set), 0))
1020 && CONSTANT_P (XEXP (SET_SRC (new_set), 1)))
1022 rtx new_src;
1023 int old_cost = rtx_cost (SET_SRC (new_set), SET, speed);
1025 gcc_assert (rtx_equal_p (XEXP (SET_SRC (new_set), 0), reg));
1026 new_src = simplify_replace_rtx (SET_SRC (new_set), reg,
1027 copy_rtx (src));
1029 if (rtx_cost (new_src, SET, speed) <= old_cost
1030 && validate_change (use_insn, &SET_SRC (new_set),
1031 new_src, 0))
1033 reload_combine_purge_insn_uses (use_insn);
1034 reload_combine_note_use (&SET_SRC (new_set), use_insn,
1035 use_ruid, NULL_RTX);
1037 if (must_move_add)
1039 /* See if that took care of the add insn. */
1040 if (rtx_equal_p (SET_DEST (new_set), reg))
1042 delete_insn (insn);
1043 return true;
1045 else
1047 add_moved_after_insn = use_insn;
1048 add_moved_after_ruid = use_ruid;
1051 continue;
1055 /* If we get here, we couldn't handle this use. */
1056 if (must_move_add)
1057 break;
1060 while (use);
1062 if (!must_move_add || add_moved_after_insn == NULL_RTX)
1063 /* Process the add normally. */
1064 return false;
1066 fixup_debug_insns (regno, reg, src, add_moved_after_ruid);
1068 reorder_insns (insn, insn, add_moved_after_insn);
1069 reload_combine_purge_reg_uses_after_ruid (regno, add_moved_after_ruid);
1070 reload_combine_split_ruids (add_moved_after_ruid - 1);
1071 reload_combine_note_use (&PATTERN (insn), insn,
1072 add_moved_after_ruid, NULL_RTX);
1073 reg_state[regno].store_ruid = add_moved_after_ruid;
1075 return true;
1078 /* Called by reload_combine when scanning INSN. Try to detect a pattern we
1079 can handle and improve. Return true if no further processing is needed on
1080 INSN; false if it wasn't recognized and should be handled normally. */
1082 static bool
1083 reload_combine_recognize_pattern (rtx insn)
1085 rtx set, reg, src;
1086 unsigned int regno;
1088 set = single_set (insn);
1089 if (set == NULL_RTX)
1090 return false;
1092 reg = SET_DEST (set);
1093 src = SET_SRC (set);
1094 if (!REG_P (reg)
1095 || hard_regno_nregs[REGNO (reg)][GET_MODE (reg)] != 1)
1096 return false;
1098 regno = REGNO (reg);
1100 /* Look for (set (REGX) (CONST_INT))
1101 (set (REGX) (PLUS (REGX) (REGY)))
1103 ... (MEM (REGX)) ...
1104 and convert it to
1105 (set (REGZ) (CONST_INT))
1107 ... (MEM (PLUS (REGZ) (REGY)))... .
1109 First, check that we have (set (REGX) (PLUS (REGX) (REGY)))
1110 and that we know all uses of REGX before it dies.
1111 Also, explicitly check that REGX != REGY; our life information
1112 does not yet show whether REGY changes in this insn. */
1114 if (GET_CODE (src) == PLUS
1115 && reg_state[regno].all_offsets_match
1116 && last_index_reg != -1
1117 && REG_P (XEXP (src, 1))
1118 && rtx_equal_p (XEXP (src, 0), reg)
1119 && !rtx_equal_p (XEXP (src, 1), reg)
1120 && last_label_ruid < reg_state[regno].use_ruid)
1122 rtx base = XEXP (src, 1);
1123 rtx prev = prev_nonnote_insn (insn);
1124 rtx prev_set = prev ? single_set (prev) : NULL_RTX;
1125 rtx index_reg = NULL_RTX;
1126 rtx reg_sum = NULL_RTX;
1127 int i;
1129 /* Now we need to set INDEX_REG to an index register (denoted as
1130 REGZ in the illustration above) and REG_SUM to the expression
1131 register+register that we want to use to substitute uses of REG
1132 (typically in MEMs) with. First check REG and BASE for being
1133 index registers; we can use them even if they are not dead. */
1134 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], regno)
1135 || TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS],
1136 REGNO (base)))
1138 index_reg = reg;
1139 reg_sum = src;
1141 else
1143 /* Otherwise, look for a free index register. Since we have
1144 checked above that neither REG nor BASE are index registers,
1145 if we find anything at all, it will be different from these
1146 two registers. */
1147 for (i = first_index_reg; i <= last_index_reg; i++)
1149 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], i)
1150 && reg_state[i].use_index == RELOAD_COMBINE_MAX_USES
1151 && reg_state[i].store_ruid <= reg_state[regno].use_ruid
1152 && hard_regno_nregs[i][GET_MODE (reg)] == 1)
1154 index_reg = gen_rtx_REG (GET_MODE (reg), i);
1155 reg_sum = gen_rtx_PLUS (GET_MODE (reg), index_reg, base);
1156 break;
1161 /* Check that PREV_SET is indeed (set (REGX) (CONST_INT)) and that
1162 (REGY), i.e. BASE, is not clobbered before the last use we'll
1163 create. */
1164 if (reg_sum
1165 && prev_set
1166 && CONST_INT_P (SET_SRC (prev_set))
1167 && rtx_equal_p (SET_DEST (prev_set), reg)
1168 && reg_state[regno].use_index >= 0
1169 && (reg_state[REGNO (base)].store_ruid
1170 <= reg_state[regno].use_ruid))
1172 /* Change destination register and, if necessary, the constant
1173 value in PREV, the constant loading instruction. */
1174 validate_change (prev, &SET_DEST (prev_set), index_reg, 1);
1175 if (reg_state[regno].offset != const0_rtx)
1176 validate_change (prev,
1177 &SET_SRC (prev_set),
1178 GEN_INT (INTVAL (SET_SRC (prev_set))
1179 + INTVAL (reg_state[regno].offset)),
1182 /* Now for every use of REG that we have recorded, replace REG
1183 with REG_SUM. */
1184 for (i = reg_state[regno].use_index;
1185 i < RELOAD_COMBINE_MAX_USES; i++)
1186 validate_unshare_change (reg_state[regno].reg_use[i].insn,
1187 reg_state[regno].reg_use[i].usep,
1188 /* Each change must have its own
1189 replacement. */
1190 reg_sum, 1);
1192 if (apply_change_group ())
1194 /* For every new use of REG_SUM, we have to record the use
1195 of BASE therein, i.e. operand 1. */
1196 for (i = reg_state[regno].use_index;
1197 i < RELOAD_COMBINE_MAX_USES; i++)
1198 reload_combine_note_use
1199 (&XEXP (*reg_state[regno].reg_use[i].usep, 1),
1200 reg_state[regno].reg_use[i].insn,
1201 reg_state[regno].reg_use[i].ruid,
1202 reg_state[regno].reg_use[i].containing_mem);
1204 /* Delete the reg-reg addition. */
1205 delete_insn (insn);
1207 if (reg_state[regno].offset != const0_rtx)
1208 /* Previous REG_EQUIV / REG_EQUAL notes for PREV
1209 are now invalid. */
1210 remove_reg_equal_equiv_notes (prev);
1212 reg_state[regno].use_index = RELOAD_COMBINE_MAX_USES;
1213 reg_state[REGNO (index_reg)].store_ruid
1214 = reload_combine_ruid;
1215 return true;
1219 return false;
1222 static void
1223 reload_combine (void)
1225 rtx insn, prev;
1226 int i;
1227 basic_block bb;
1228 unsigned int r;
1229 int min_labelno, n_labels;
1230 HARD_REG_SET ever_live_at_start, *label_live;
1232 /* To avoid wasting too much time later searching for an index register,
1233 determine the minimum and maximum index register numbers. */
1234 if (INDEX_REG_CLASS == NO_REGS)
1235 last_index_reg = -1;
1236 else if (first_index_reg == -1 && last_index_reg == 0)
1238 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
1239 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], r))
1241 if (first_index_reg == -1)
1242 first_index_reg = r;
1244 last_index_reg = r;
1247 /* If no index register is available, we can quit now. Set LAST_INDEX_REG
1248 to -1 so we'll know to quit early the next time we get here. */
1249 if (first_index_reg == -1)
1251 last_index_reg = -1;
1252 return;
1256 #if 0
1257 /* If reg+reg can be used in offsetable memory addresses, the main chunk of
1258 reload has already used it where appropriate, so there is no use in
1259 trying to generate it now. */
1260 if (double_reg_address_ok || last_index_reg == -1)
1261 return;
1262 #endif
1264 /* Set up LABEL_LIVE and EVER_LIVE_AT_START. The register lifetime
1265 information is a bit fuzzy immediately after reload, but it's
1266 still good enough to determine which registers are live at a jump
1267 destination. */
1268 min_labelno = get_first_label_num ();
1269 n_labels = max_label_num () - min_labelno;
1270 label_live = XNEWVEC (HARD_REG_SET, n_labels);
1271 CLEAR_HARD_REG_SET (ever_live_at_start);
1273 FOR_EACH_BB_REVERSE (bb)
1275 insn = BB_HEAD (bb);
1276 if (LABEL_P (insn))
1278 HARD_REG_SET live;
1279 bitmap live_in = df_get_live_in (bb);
1281 REG_SET_TO_HARD_REG_SET (live, live_in);
1282 compute_use_by_pseudos (&live, live_in);
1283 COPY_HARD_REG_SET (LABEL_LIVE (insn), live);
1284 IOR_HARD_REG_SET (ever_live_at_start, live);
1288 /* Initialize last_label_ruid, reload_combine_ruid and reg_state. */
1289 last_label_ruid = last_jump_ruid = reload_combine_ruid = 0;
1290 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
1292 reg_state[r].store_ruid = 0;
1293 reg_state[r].real_store_ruid = 0;
1294 if (fixed_regs[r])
1295 reg_state[r].use_index = -1;
1296 else
1297 reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
1300 for (insn = get_last_insn (); insn; insn = prev)
1302 rtx note;
1304 prev = PREV_INSN (insn);
1306 /* We cannot do our optimization across labels. Invalidating all the use
1307 information we have would be costly, so we just note where the label
1308 is and then later disable any optimization that would cross it. */
1309 if (LABEL_P (insn))
1310 last_label_ruid = reload_combine_ruid;
1311 else if (BARRIER_P (insn))
1312 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
1313 if (! fixed_regs[r])
1314 reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
1316 if (! INSN_P (insn))
1317 continue;
1319 reload_combine_ruid++;
1321 if (control_flow_insn_p (insn))
1322 last_jump_ruid = reload_combine_ruid;
1324 if (reload_combine_recognize_const_pattern (insn)
1325 || reload_combine_recognize_pattern (insn))
1326 continue;
1328 note_stores (PATTERN (insn), reload_combine_note_store, NULL);
1330 if (CALL_P (insn))
1332 rtx link;
1334 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
1335 if (call_used_regs[r])
1337 reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
1338 reg_state[r].store_ruid = reload_combine_ruid;
1341 for (link = CALL_INSN_FUNCTION_USAGE (insn); link;
1342 link = XEXP (link, 1))
1344 rtx usage_rtx = XEXP (XEXP (link, 0), 0);
1345 if (REG_P (usage_rtx))
1347 unsigned int i;
1348 unsigned int start_reg = REGNO (usage_rtx);
1349 unsigned int num_regs =
1350 hard_regno_nregs[start_reg][GET_MODE (usage_rtx)];
1351 unsigned int end_reg = start_reg + num_regs - 1;
1352 for (i = start_reg; i <= end_reg; i++)
1353 if (GET_CODE (XEXP (link, 0)) == CLOBBER)
1355 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
1356 reg_state[i].store_ruid = reload_combine_ruid;
1358 else
1359 reg_state[i].use_index = -1;
1364 else if (JUMP_P (insn)
1365 && GET_CODE (PATTERN (insn)) != RETURN)
1367 /* Non-spill registers might be used at the call destination in
1368 some unknown fashion, so we have to mark the unknown use. */
1369 HARD_REG_SET *live;
1371 if ((condjump_p (insn) || condjump_in_parallel_p (insn))
1372 && JUMP_LABEL (insn))
1373 live = &LABEL_LIVE (JUMP_LABEL (insn));
1374 else
1375 live = &ever_live_at_start;
1377 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
1378 if (TEST_HARD_REG_BIT (*live, i))
1379 reg_state[i].use_index = -1;
1382 reload_combine_note_use (&PATTERN (insn), insn,
1383 reload_combine_ruid, NULL_RTX);
1384 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1386 if (REG_NOTE_KIND (note) == REG_INC
1387 && REG_P (XEXP (note, 0)))
1389 int regno = REGNO (XEXP (note, 0));
1391 reg_state[regno].store_ruid = reload_combine_ruid;
1392 reg_state[regno].real_store_ruid = reload_combine_ruid;
1393 reg_state[regno].use_index = -1;
1398 free (label_live);
1401 /* Check if DST is a register or a subreg of a register; if it is,
1402 update store_ruid, real_store_ruid and use_index in the reg_state
1403 structure accordingly. Called via note_stores from reload_combine. */
1405 static void
1406 reload_combine_note_store (rtx dst, const_rtx set, void *data ATTRIBUTE_UNUSED)
1408 int regno = 0;
1409 int i;
1410 enum machine_mode mode = GET_MODE (dst);
1412 if (GET_CODE (dst) == SUBREG)
1414 regno = subreg_regno_offset (REGNO (SUBREG_REG (dst)),
1415 GET_MODE (SUBREG_REG (dst)),
1416 SUBREG_BYTE (dst),
1417 GET_MODE (dst));
1418 dst = SUBREG_REG (dst);
1420 if (!REG_P (dst))
1421 return;
1422 regno += REGNO (dst);
1424 /* note_stores might have stripped a STRICT_LOW_PART, so we have to be
1425 careful with registers / register parts that are not full words.
1426 Similarly for ZERO_EXTRACT. */
1427 if (GET_CODE (SET_DEST (set)) == ZERO_EXTRACT
1428 || GET_CODE (SET_DEST (set)) == STRICT_LOW_PART)
1430 for (i = hard_regno_nregs[regno][mode] - 1 + regno; i >= regno; i--)
1432 reg_state[i].use_index = -1;
1433 reg_state[i].store_ruid = reload_combine_ruid;
1434 reg_state[i].real_store_ruid = reload_combine_ruid;
1437 else
1439 for (i = hard_regno_nregs[regno][mode] - 1 + regno; i >= regno; i--)
1441 reg_state[i].store_ruid = reload_combine_ruid;
1442 if (GET_CODE (set) == SET)
1443 reg_state[i].real_store_ruid = reload_combine_ruid;
1444 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
1449 /* XP points to a piece of rtl that has to be checked for any uses of
1450 registers.
1451 *XP is the pattern of INSN, or a part of it.
1452 Called from reload_combine, and recursively by itself. */
1453 static void
1454 reload_combine_note_use (rtx *xp, rtx insn, int ruid, rtx containing_mem)
1456 rtx x = *xp;
1457 enum rtx_code code = x->code;
1458 const char *fmt;
1459 int i, j;
1460 rtx offset = const0_rtx; /* For the REG case below. */
1462 switch (code)
1464 case SET:
1465 if (REG_P (SET_DEST (x)))
1467 reload_combine_note_use (&SET_SRC (x), insn, ruid, NULL_RTX);
1468 return;
1470 break;
1472 case USE:
1473 /* If this is the USE of a return value, we can't change it. */
1474 if (REG_P (XEXP (x, 0)) && REG_FUNCTION_VALUE_P (XEXP (x, 0)))
1476 /* Mark the return register as used in an unknown fashion. */
1477 rtx reg = XEXP (x, 0);
1478 int regno = REGNO (reg);
1479 int nregs = hard_regno_nregs[regno][GET_MODE (reg)];
1481 while (--nregs >= 0)
1482 reg_state[regno + nregs].use_index = -1;
1483 return;
1485 break;
1487 case CLOBBER:
1488 if (REG_P (SET_DEST (x)))
1490 /* No spurious CLOBBERs of pseudo registers may remain. */
1491 gcc_assert (REGNO (SET_DEST (x)) < FIRST_PSEUDO_REGISTER);
1492 return;
1494 break;
1496 case PLUS:
1497 /* We are interested in (plus (reg) (const_int)) . */
1498 if (!REG_P (XEXP (x, 0))
1499 || !CONST_INT_P (XEXP (x, 1)))
1500 break;
1501 offset = XEXP (x, 1);
1502 x = XEXP (x, 0);
1503 /* Fall through. */
1504 case REG:
1506 int regno = REGNO (x);
1507 int use_index;
1508 int nregs;
1510 /* No spurious USEs of pseudo registers may remain. */
1511 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
1513 nregs = hard_regno_nregs[regno][GET_MODE (x)];
1515 /* We can't substitute into multi-hard-reg uses. */
1516 if (nregs > 1)
1518 while (--nregs >= 0)
1519 reg_state[regno + nregs].use_index = -1;
1520 return;
1523 /* If this register is already used in some unknown fashion, we
1524 can't do anything.
1525 If we decrement the index from zero to -1, we can't store more
1526 uses, so this register becomes used in an unknown fashion. */
1527 use_index = --reg_state[regno].use_index;
1528 if (use_index < 0)
1529 return;
1531 if (use_index == RELOAD_COMBINE_MAX_USES - 1)
1533 /* This is the first use of this register we have seen since we
1534 marked it as dead. */
1535 reg_state[regno].offset = offset;
1536 reg_state[regno].all_offsets_match = true;
1537 reg_state[regno].use_ruid = ruid;
1539 else
1541 if (reg_state[regno].use_ruid > ruid)
1542 reg_state[regno].use_ruid = ruid;
1544 if (! rtx_equal_p (offset, reg_state[regno].offset))
1545 reg_state[regno].all_offsets_match = false;
1548 reg_state[regno].reg_use[use_index].insn = insn;
1549 reg_state[regno].reg_use[use_index].ruid = ruid;
1550 reg_state[regno].reg_use[use_index].containing_mem = containing_mem;
1551 reg_state[regno].reg_use[use_index].usep = xp;
1552 return;
1555 case MEM:
1556 containing_mem = x;
1557 break;
1559 default:
1560 break;
1563 /* Recursively process the components of X. */
1564 fmt = GET_RTX_FORMAT (code);
1565 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1567 if (fmt[i] == 'e')
1568 reload_combine_note_use (&XEXP (x, i), insn, ruid, containing_mem);
1569 else if (fmt[i] == 'E')
1571 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1572 reload_combine_note_use (&XVECEXP (x, i, j), insn, ruid,
1573 containing_mem);
1578 /* See if we can reduce the cost of a constant by replacing a move
1579 with an add. We track situations in which a register is set to a
1580 constant or to a register plus a constant. */
1581 /* We cannot do our optimization across labels. Invalidating all the
1582 information about register contents we have would be costly, so we
1583 use move2add_last_label_luid to note where the label is and then
1584 later disable any optimization that would cross it.
1585 reg_offset[n] / reg_base_reg[n] / reg_symbol_ref[n] / reg_mode[n]
1586 are only valid if reg_set_luid[n] is greater than
1587 move2add_last_label_luid. */
1588 static int reg_set_luid[FIRST_PSEUDO_REGISTER];
1590 /* If reg_base_reg[n] is negative, register n has been set to
1591 reg_offset[n] or reg_symbol_ref[n] + reg_offset[n] in mode reg_mode[n].
1592 If reg_base_reg[n] is non-negative, register n has been set to the
1593 sum of reg_offset[n] and the value of register reg_base_reg[n]
1594 before reg_set_luid[n], calculated in mode reg_mode[n] . */
1595 static HOST_WIDE_INT reg_offset[FIRST_PSEUDO_REGISTER];
1596 static int reg_base_reg[FIRST_PSEUDO_REGISTER];
1597 static rtx reg_symbol_ref[FIRST_PSEUDO_REGISTER];
1598 static enum machine_mode reg_mode[FIRST_PSEUDO_REGISTER];
1600 /* move2add_luid is linearly increased while scanning the instructions
1601 from first to last. It is used to set reg_set_luid in
1602 reload_cse_move2add and move2add_note_store. */
1603 static int move2add_luid;
1605 /* move2add_last_label_luid is set whenever a label is found. Labels
1606 invalidate all previously collected reg_offset data. */
1607 static int move2add_last_label_luid;
1609 /* ??? We don't know how zero / sign extension is handled, hence we
1610 can't go from a narrower to a wider mode. */
1611 #define MODES_OK_FOR_MOVE2ADD(OUTMODE, INMODE) \
1612 (GET_MODE_SIZE (OUTMODE) == GET_MODE_SIZE (INMODE) \
1613 || (GET_MODE_SIZE (OUTMODE) <= GET_MODE_SIZE (INMODE) \
1614 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (OUTMODE), \
1615 GET_MODE_BITSIZE (INMODE))))
1617 /* This function is called with INSN that sets REG to (SYM + OFF),
1618 while REG is known to already have value (SYM + offset).
1619 This function tries to change INSN into an add instruction
1620 (set (REG) (plus (REG) (OFF - offset))) using the known value.
1621 It also updates the information about REG's known value.
1622 Return true if we made a change. */
1624 static bool
1625 move2add_use_add2_insn (rtx reg, rtx sym, rtx off, rtx insn)
1627 rtx pat = PATTERN (insn);
1628 rtx src = SET_SRC (pat);
1629 int regno = REGNO (reg);
1630 rtx new_src = gen_int_mode (INTVAL (off) - reg_offset[regno],
1631 GET_MODE (reg));
1632 bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn));
1633 bool changed = false;
1635 /* (set (reg) (plus (reg) (const_int 0))) is not canonical;
1636 use (set (reg) (reg)) instead.
1637 We don't delete this insn, nor do we convert it into a
1638 note, to avoid losing register notes or the return
1639 value flag. jump2 already knows how to get rid of
1640 no-op moves. */
1641 if (new_src == const0_rtx)
1643 /* If the constants are different, this is a
1644 truncation, that, if turned into (set (reg)
1645 (reg)), would be discarded. Maybe we should
1646 try a truncMN pattern? */
1647 if (INTVAL (off) == reg_offset [regno])
1648 changed = validate_change (insn, &SET_SRC (pat), reg, 0);
1650 else if (rtx_cost (new_src, PLUS, speed) < rtx_cost (src, SET, speed)
1651 && have_add2_insn (reg, new_src))
1653 rtx tem = gen_rtx_PLUS (GET_MODE (reg), reg, new_src);
1654 changed = validate_change (insn, &SET_SRC (pat), tem, 0);
1656 else if (sym == NULL_RTX && GET_MODE (reg) != BImode)
1658 enum machine_mode narrow_mode;
1659 for (narrow_mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1660 narrow_mode != VOIDmode
1661 && narrow_mode != GET_MODE (reg);
1662 narrow_mode = GET_MODE_WIDER_MODE (narrow_mode))
1664 if (have_insn_for (STRICT_LOW_PART, narrow_mode)
1665 && ((reg_offset[regno]
1666 & ~GET_MODE_MASK (narrow_mode))
1667 == (INTVAL (off)
1668 & ~GET_MODE_MASK (narrow_mode))))
1670 rtx narrow_reg = gen_rtx_REG (narrow_mode,
1671 REGNO (reg));
1672 rtx narrow_src = gen_int_mode (INTVAL (off),
1673 narrow_mode);
1674 rtx new_set =
1675 gen_rtx_SET (VOIDmode,
1676 gen_rtx_STRICT_LOW_PART (VOIDmode,
1677 narrow_reg),
1678 narrow_src);
1679 changed = validate_change (insn, &PATTERN (insn),
1680 new_set, 0);
1681 if (changed)
1682 break;
1686 reg_set_luid[regno] = move2add_luid;
1687 reg_base_reg[regno] = -1;
1688 reg_mode[regno] = GET_MODE (reg);
1689 reg_symbol_ref[regno] = sym;
1690 reg_offset[regno] = INTVAL (off);
1691 return changed;
1695 /* This function is called with INSN that sets REG to (SYM + OFF),
1696 but REG doesn't have known value (SYM + offset). This function
1697 tries to find another register which is known to already have
1698 value (SYM + offset) and change INSN into an add instruction
1699 (set (REG) (plus (the found register) (OFF - offset))) if such
1700 a register is found. It also updates the information about
1701 REG's known value.
1702 Return true iff we made a change. */
1704 static bool
1705 move2add_use_add3_insn (rtx reg, rtx sym, rtx off, rtx insn)
1707 rtx pat = PATTERN (insn);
1708 rtx src = SET_SRC (pat);
1709 int regno = REGNO (reg);
1710 int min_cost = INT_MAX;
1711 int min_regno = 0;
1712 bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn));
1713 int i;
1714 bool changed = false;
1716 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1717 if (reg_set_luid[i] > move2add_last_label_luid
1718 && reg_mode[i] == GET_MODE (reg)
1719 && reg_base_reg[i] < 0
1720 && reg_symbol_ref[i] != NULL_RTX
1721 && rtx_equal_p (sym, reg_symbol_ref[i]))
1723 rtx new_src = gen_int_mode (INTVAL (off) - reg_offset[i],
1724 GET_MODE (reg));
1725 /* (set (reg) (plus (reg) (const_int 0))) is not canonical;
1726 use (set (reg) (reg)) instead.
1727 We don't delete this insn, nor do we convert it into a
1728 note, to avoid losing register notes or the return
1729 value flag. jump2 already knows how to get rid of
1730 no-op moves. */
1731 if (new_src == const0_rtx)
1733 min_cost = 0;
1734 min_regno = i;
1735 break;
1737 else
1739 int cost = rtx_cost (new_src, PLUS, speed);
1740 if (cost < min_cost)
1742 min_cost = cost;
1743 min_regno = i;
1748 if (min_cost < rtx_cost (src, SET, speed))
1750 rtx tem;
1752 tem = gen_rtx_REG (GET_MODE (reg), min_regno);
1753 if (i != min_regno)
1755 rtx new_src = gen_int_mode (INTVAL (off) - reg_offset[min_regno],
1756 GET_MODE (reg));
1757 tem = gen_rtx_PLUS (GET_MODE (reg), tem, new_src);
1759 if (validate_change (insn, &SET_SRC (pat), tem, 0))
1760 changed = true;
1762 reg_set_luid[regno] = move2add_luid;
1763 reg_base_reg[regno] = -1;
1764 reg_mode[regno] = GET_MODE (reg);
1765 reg_symbol_ref[regno] = sym;
1766 reg_offset[regno] = INTVAL (off);
1767 return changed;
1770 /* Convert move insns with constant inputs to additions if they are cheaper.
1771 Return true if any changes were made. */
1772 static bool
1773 reload_cse_move2add (rtx first)
1775 int i;
1776 rtx insn;
1777 bool changed = false;
1779 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1781 reg_set_luid[i] = 0;
1782 reg_offset[i] = 0;
1783 reg_base_reg[i] = 0;
1784 reg_symbol_ref[i] = NULL_RTX;
1785 reg_mode[i] = VOIDmode;
1788 move2add_last_label_luid = 0;
1789 move2add_luid = 2;
1790 for (insn = first; insn; insn = NEXT_INSN (insn), move2add_luid++)
1792 rtx pat, note;
1794 if (LABEL_P (insn))
1796 move2add_last_label_luid = move2add_luid;
1797 /* We're going to increment move2add_luid twice after a
1798 label, so that we can use move2add_last_label_luid + 1 as
1799 the luid for constants. */
1800 move2add_luid++;
1801 continue;
1803 if (! INSN_P (insn))
1804 continue;
1805 pat = PATTERN (insn);
1806 /* For simplicity, we only perform this optimization on
1807 straightforward SETs. */
1808 if (GET_CODE (pat) == SET
1809 && REG_P (SET_DEST (pat)))
1811 rtx reg = SET_DEST (pat);
1812 int regno = REGNO (reg);
1813 rtx src = SET_SRC (pat);
1815 /* Check if we have valid information on the contents of this
1816 register in the mode of REG. */
1817 if (reg_set_luid[regno] > move2add_last_label_luid
1818 && MODES_OK_FOR_MOVE2ADD (GET_MODE (reg), reg_mode[regno])
1819 && dbg_cnt (cse2_move2add))
1821 /* Try to transform (set (REGX) (CONST_INT A))
1823 (set (REGX) (CONST_INT B))
1825 (set (REGX) (CONST_INT A))
1827 (set (REGX) (plus (REGX) (CONST_INT B-A)))
1829 (set (REGX) (CONST_INT A))
1831 (set (STRICT_LOW_PART (REGX)) (CONST_INT B))
1834 if (CONST_INT_P (src)
1835 && reg_base_reg[regno] < 0
1836 && reg_symbol_ref[regno] == NULL_RTX)
1838 changed |= move2add_use_add2_insn (reg, NULL_RTX, src, insn);
1839 continue;
1842 /* Try to transform (set (REGX) (REGY))
1843 (set (REGX) (PLUS (REGX) (CONST_INT A)))
1845 (set (REGX) (REGY))
1846 (set (REGX) (PLUS (REGX) (CONST_INT B)))
1848 (set (REGX) (REGY))
1849 (set (REGX) (PLUS (REGX) (CONST_INT A)))
1851 (set (REGX) (plus (REGX) (CONST_INT B-A))) */
1852 else if (REG_P (src)
1853 && reg_set_luid[regno] == reg_set_luid[REGNO (src)]
1854 && reg_base_reg[regno] == reg_base_reg[REGNO (src)]
1855 && MODES_OK_FOR_MOVE2ADD (GET_MODE (reg),
1856 reg_mode[REGNO (src)]))
1858 rtx next = next_nonnote_insn (insn);
1859 rtx set = NULL_RTX;
1860 if (next)
1861 set = single_set (next);
1862 if (set
1863 && SET_DEST (set) == reg
1864 && GET_CODE (SET_SRC (set)) == PLUS
1865 && XEXP (SET_SRC (set), 0) == reg
1866 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
1868 rtx src3 = XEXP (SET_SRC (set), 1);
1869 HOST_WIDE_INT added_offset = INTVAL (src3);
1870 HOST_WIDE_INT base_offset = reg_offset[REGNO (src)];
1871 HOST_WIDE_INT regno_offset = reg_offset[regno];
1872 rtx new_src =
1873 gen_int_mode (added_offset
1874 + base_offset
1875 - regno_offset,
1876 GET_MODE (reg));
1877 bool success = false;
1878 bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn));
1880 if (new_src == const0_rtx)
1881 /* See above why we create (set (reg) (reg)) here. */
1882 success
1883 = validate_change (next, &SET_SRC (set), reg, 0);
1884 else if ((rtx_cost (new_src, PLUS, speed)
1885 < COSTS_N_INSNS (1) + rtx_cost (src3, SET, speed))
1886 && have_add2_insn (reg, new_src))
1888 rtx newpat = gen_rtx_SET (VOIDmode,
1889 reg,
1890 gen_rtx_PLUS (GET_MODE (reg),
1891 reg,
1892 new_src));
1893 success
1894 = validate_change (next, &PATTERN (next),
1895 newpat, 0);
1897 if (success)
1898 delete_insn (insn);
1899 changed |= success;
1900 insn = next;
1901 reg_mode[regno] = GET_MODE (reg);
1902 reg_offset[regno] =
1903 trunc_int_for_mode (added_offset + base_offset,
1904 GET_MODE (reg));
1905 continue;
1910 /* Try to transform
1911 (set (REGX) (CONST (PLUS (SYMBOL_REF) (CONST_INT A))))
1913 (set (REGY) (CONST (PLUS (SYMBOL_REF) (CONST_INT B))))
1915 (set (REGX) (CONST (PLUS (SYMBOL_REF) (CONST_INT A))))
1917 (set (REGY) (CONST (PLUS (REGX) (CONST_INT B-A)))) */
1918 if ((GET_CODE (src) == SYMBOL_REF
1919 || (GET_CODE (src) == CONST
1920 && GET_CODE (XEXP (src, 0)) == PLUS
1921 && GET_CODE (XEXP (XEXP (src, 0), 0)) == SYMBOL_REF
1922 && CONST_INT_P (XEXP (XEXP (src, 0), 1))))
1923 && dbg_cnt (cse2_move2add))
1925 rtx sym, off;
1927 if (GET_CODE (src) == SYMBOL_REF)
1929 sym = src;
1930 off = const0_rtx;
1932 else
1934 sym = XEXP (XEXP (src, 0), 0);
1935 off = XEXP (XEXP (src, 0), 1);
1938 /* If the reg already contains the value which is sum of
1939 sym and some constant value, we can use an add2 insn. */
1940 if (reg_set_luid[regno] > move2add_last_label_luid
1941 && MODES_OK_FOR_MOVE2ADD (GET_MODE (reg), reg_mode[regno])
1942 && reg_base_reg[regno] < 0
1943 && reg_symbol_ref[regno] != NULL_RTX
1944 && rtx_equal_p (sym, reg_symbol_ref[regno]))
1945 changed |= move2add_use_add2_insn (reg, sym, off, insn);
1947 /* Otherwise, we have to find a register whose value is sum
1948 of sym and some constant value. */
1949 else
1950 changed |= move2add_use_add3_insn (reg, sym, off, insn);
1952 continue;
1956 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1958 if (REG_NOTE_KIND (note) == REG_INC
1959 && REG_P (XEXP (note, 0)))
1961 /* Reset the information about this register. */
1962 int regno = REGNO (XEXP (note, 0));
1963 if (regno < FIRST_PSEUDO_REGISTER)
1964 reg_set_luid[regno] = 0;
1967 note_stores (PATTERN (insn), move2add_note_store, insn);
1969 /* If INSN is a conditional branch, we try to extract an
1970 implicit set out of it. */
1971 if (any_condjump_p (insn))
1973 rtx cnd = fis_get_condition (insn);
1975 if (cnd != NULL_RTX
1976 && GET_CODE (cnd) == NE
1977 && REG_P (XEXP (cnd, 0))
1978 && !reg_set_p (XEXP (cnd, 0), insn)
1979 /* The following two checks, which are also in
1980 move2add_note_store, are intended to reduce the
1981 number of calls to gen_rtx_SET to avoid memory
1982 allocation if possible. */
1983 && SCALAR_INT_MODE_P (GET_MODE (XEXP (cnd, 0)))
1984 && hard_regno_nregs[REGNO (XEXP (cnd, 0))][GET_MODE (XEXP (cnd, 0))] == 1
1985 && CONST_INT_P (XEXP (cnd, 1)))
1987 rtx implicit_set =
1988 gen_rtx_SET (VOIDmode, XEXP (cnd, 0), XEXP (cnd, 1));
1989 move2add_note_store (SET_DEST (implicit_set), implicit_set, insn);
1993 /* If this is a CALL_INSN, all call used registers are stored with
1994 unknown values. */
1995 if (CALL_P (insn))
1997 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1999 if (call_used_regs[i])
2000 /* Reset the information about this register. */
2001 reg_set_luid[i] = 0;
2005 return changed;
2008 /* SET is a SET or CLOBBER that sets DST. DATA is the insn which
2009 contains SET.
2010 Update reg_set_luid, reg_offset and reg_base_reg accordingly.
2011 Called from reload_cse_move2add via note_stores. */
2013 static void
2014 move2add_note_store (rtx dst, const_rtx set, void *data)
2016 rtx insn = (rtx) data;
2017 unsigned int regno = 0;
2018 unsigned int nregs = 0;
2019 unsigned int i;
2020 enum machine_mode mode = GET_MODE (dst);
2022 if (GET_CODE (dst) == SUBREG)
2024 regno = subreg_regno_offset (REGNO (SUBREG_REG (dst)),
2025 GET_MODE (SUBREG_REG (dst)),
2026 SUBREG_BYTE (dst),
2027 GET_MODE (dst));
2028 nregs = subreg_nregs (dst);
2029 dst = SUBREG_REG (dst);
2032 /* Some targets do argument pushes without adding REG_INC notes. */
2034 if (MEM_P (dst))
2036 dst = XEXP (dst, 0);
2037 if (GET_CODE (dst) == PRE_INC || GET_CODE (dst) == POST_INC
2038 || GET_CODE (dst) == PRE_DEC || GET_CODE (dst) == POST_DEC)
2039 reg_set_luid[REGNO (XEXP (dst, 0))] = 0;
2040 return;
2042 if (!REG_P (dst))
2043 return;
2045 regno += REGNO (dst);
2046 if (!nregs)
2047 nregs = hard_regno_nregs[regno][mode];
2049 if (SCALAR_INT_MODE_P (GET_MODE (dst))
2050 && nregs == 1 && GET_CODE (set) == SET)
2052 rtx note, sym = NULL_RTX;
2053 HOST_WIDE_INT off;
2055 note = find_reg_equal_equiv_note (insn);
2056 if (note && GET_CODE (XEXP (note, 0)) == SYMBOL_REF)
2058 sym = XEXP (note, 0);
2059 off = 0;
2061 else if (note && GET_CODE (XEXP (note, 0)) == CONST
2062 && GET_CODE (XEXP (XEXP (note, 0), 0)) == PLUS
2063 && GET_CODE (XEXP (XEXP (XEXP (note, 0), 0), 0)) == SYMBOL_REF
2064 && CONST_INT_P (XEXP (XEXP (XEXP (note, 0), 0), 1)))
2066 sym = XEXP (XEXP (XEXP (note, 0), 0), 0);
2067 off = INTVAL (XEXP (XEXP (XEXP (note, 0), 0), 1));
2070 if (sym != NULL_RTX)
2072 reg_base_reg[regno] = -1;
2073 reg_symbol_ref[regno] = sym;
2074 reg_offset[regno] = off;
2075 reg_mode[regno] = mode;
2076 reg_set_luid[regno] = move2add_luid;
2077 return;
2081 if (SCALAR_INT_MODE_P (GET_MODE (dst))
2082 && nregs == 1 && GET_CODE (set) == SET
2083 && GET_CODE (SET_DEST (set)) != ZERO_EXTRACT
2084 && GET_CODE (SET_DEST (set)) != STRICT_LOW_PART)
2086 rtx src = SET_SRC (set);
2087 rtx base_reg;
2088 HOST_WIDE_INT offset;
2089 int base_regno;
2090 /* This may be different from mode, if SET_DEST (set) is a
2091 SUBREG. */
2092 enum machine_mode dst_mode = GET_MODE (dst);
2094 switch (GET_CODE (src))
2096 case PLUS:
2097 if (REG_P (XEXP (src, 0)))
2099 base_reg = XEXP (src, 0);
2101 if (CONST_INT_P (XEXP (src, 1)))
2102 offset = INTVAL (XEXP (src, 1));
2103 else if (REG_P (XEXP (src, 1))
2104 && (reg_set_luid[REGNO (XEXP (src, 1))]
2105 > move2add_last_label_luid)
2106 && (MODES_OK_FOR_MOVE2ADD
2107 (dst_mode, reg_mode[REGNO (XEXP (src, 1))])))
2109 if (reg_base_reg[REGNO (XEXP (src, 1))] < 0)
2110 offset = reg_offset[REGNO (XEXP (src, 1))];
2111 /* Maybe the first register is known to be a
2112 constant. */
2113 else if (reg_set_luid[REGNO (base_reg)]
2114 > move2add_last_label_luid
2115 && (MODES_OK_FOR_MOVE2ADD
2116 (dst_mode, reg_mode[REGNO (XEXP (src, 1))]))
2117 && reg_base_reg[REGNO (base_reg)] < 0)
2119 offset = reg_offset[REGNO (base_reg)];
2120 base_reg = XEXP (src, 1);
2122 else
2123 goto invalidate;
2125 else
2126 goto invalidate;
2128 break;
2131 goto invalidate;
2133 case REG:
2134 base_reg = src;
2135 offset = 0;
2136 break;
2138 case CONST_INT:
2139 /* Start tracking the register as a constant. */
2140 reg_base_reg[regno] = -1;
2141 reg_symbol_ref[regno] = NULL_RTX;
2142 reg_offset[regno] = INTVAL (SET_SRC (set));
2143 /* We assign the same luid to all registers set to constants. */
2144 reg_set_luid[regno] = move2add_last_label_luid + 1;
2145 reg_mode[regno] = mode;
2146 return;
2148 default:
2149 invalidate:
2150 /* Invalidate the contents of the register. */
2151 reg_set_luid[regno] = 0;
2152 return;
2155 base_regno = REGNO (base_reg);
2156 /* If information about the base register is not valid, set it
2157 up as a new base register, pretending its value is known
2158 starting from the current insn. */
2159 if (reg_set_luid[base_regno] <= move2add_last_label_luid)
2161 reg_base_reg[base_regno] = base_regno;
2162 reg_symbol_ref[base_regno] = NULL_RTX;
2163 reg_offset[base_regno] = 0;
2164 reg_set_luid[base_regno] = move2add_luid;
2165 reg_mode[base_regno] = mode;
2167 else if (! MODES_OK_FOR_MOVE2ADD (dst_mode,
2168 reg_mode[base_regno]))
2169 goto invalidate;
2171 reg_mode[regno] = mode;
2173 /* Copy base information from our base register. */
2174 reg_set_luid[regno] = reg_set_luid[base_regno];
2175 reg_base_reg[regno] = reg_base_reg[base_regno];
2176 reg_symbol_ref[regno] = reg_symbol_ref[base_regno];
2178 /* Compute the sum of the offsets or constants. */
2179 reg_offset[regno] = trunc_int_for_mode (offset
2180 + reg_offset[base_regno],
2181 dst_mode);
2183 else
2185 unsigned int endregno = regno + nregs;
2187 for (i = regno; i < endregno; i++)
2188 /* Reset the information about this register. */
2189 reg_set_luid[i] = 0;
2193 static bool
2194 gate_handle_postreload (void)
2196 return (optimize > 0 && reload_completed);
2200 static unsigned int
2201 rest_of_handle_postreload (void)
2203 if (!dbg_cnt (postreload_cse))
2204 return 0;
2206 /* Do a very simple CSE pass over just the hard registers. */
2207 reload_cse_regs (get_insns ());
2208 /* Reload_cse_regs can eliminate potentially-trapping MEMs.
2209 Remove any EH edges associated with them. */
2210 if (cfun->can_throw_non_call_exceptions)
2211 purge_all_dead_edges ();
2213 return 0;
2216 struct rtl_opt_pass pass_postreload_cse =
2219 RTL_PASS,
2220 "postreload", /* name */
2221 gate_handle_postreload, /* gate */
2222 rest_of_handle_postreload, /* execute */
2223 NULL, /* sub */
2224 NULL, /* next */
2225 0, /* static_pass_number */
2226 TV_RELOAD_CSE_REGS, /* tv_id */
2227 0, /* properties_required */
2228 0, /* properties_provided */
2229 0, /* properties_destroyed */
2230 0, /* todo_flags_start */
2231 TODO_df_finish | TODO_verify_rtl_sharing |
2232 TODO_dump_func /* todo_flags_finish */