PR target/27829
[official-gcc.git] / gcc / regmove.c
blob31f1851a362a26034ca2966cf2ad493f8e91ad67
1 /* Move registers around to reduce number of move instructions needed.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
23 /* This module looks for cases where matching constraints would force
24 an instruction to need a reload, and this reload would be a register
25 to register move. It then attempts to change the registers used by the
26 instruction to avoid the move instruction. */
28 #include "config.h"
29 #include "system.h"
30 #include "coretypes.h"
31 #include "tm.h"
32 #include "rtl.h" /* stdio.h must precede rtl.h for FFS. */
33 #include "tm_p.h"
34 #include "insn-config.h"
35 #include "recog.h"
36 #include "output.h"
37 #include "regs.h"
38 #include "hard-reg-set.h"
39 #include "flags.h"
40 #include "function.h"
41 #include "expr.h"
42 #include "basic-block.h"
43 #include "except.h"
44 #include "toplev.h"
45 #include "reload.h"
46 #include "timevar.h"
47 #include "tree-pass.h"
50 /* Turn STACK_GROWS_DOWNWARD into a boolean. */
51 #ifdef STACK_GROWS_DOWNWARD
52 #undef STACK_GROWS_DOWNWARD
53 #define STACK_GROWS_DOWNWARD 1
54 #else
55 #define STACK_GROWS_DOWNWARD 0
56 #endif
58 static int perhaps_ends_bb_p (rtx);
59 static int optimize_reg_copy_1 (rtx, rtx, rtx);
60 static void optimize_reg_copy_2 (rtx, rtx, rtx);
61 static void optimize_reg_copy_3 (rtx, rtx, rtx);
62 static void copy_src_to_dest (rtx, rtx, rtx, int);
63 static int *regmove_bb_head;
65 struct match {
66 int with[MAX_RECOG_OPERANDS];
67 enum { READ, WRITE, READWRITE } use[MAX_RECOG_OPERANDS];
68 int commutative[MAX_RECOG_OPERANDS];
69 int early_clobber[MAX_RECOG_OPERANDS];
72 static rtx discover_flags_reg (void);
73 static void mark_flags_life_zones (rtx);
74 static void flags_set_1 (rtx, rtx, void *);
76 static int try_auto_increment (rtx, rtx, rtx, rtx, HOST_WIDE_INT, int);
77 static int find_matches (rtx, struct match *);
78 static void replace_in_call_usage (rtx *, unsigned int, rtx, rtx);
79 static int fixup_match_1 (rtx, rtx, rtx, rtx, rtx, int, int, int);
80 static int reg_is_remote_constant_p (rtx, rtx, rtx);
81 static int stable_and_no_regs_but_for_p (rtx, rtx, rtx);
82 static int regclass_compatible_p (int, int);
83 static int replacement_quality (rtx);
84 static int fixup_match_2 (rtx, rtx, rtx, rtx);
86 /* Return nonzero if registers with CLASS1 and CLASS2 can be merged without
87 causing too much register allocation problems. */
88 static int
89 regclass_compatible_p (int class0, int class1)
91 return (class0 == class1
92 || (reg_class_subset_p (class0, class1)
93 && ! CLASS_LIKELY_SPILLED_P (class0))
94 || (reg_class_subset_p (class1, class0)
95 && ! CLASS_LIKELY_SPILLED_P (class1)));
98 /* INC_INSN is an instruction that adds INCREMENT to REG.
99 Try to fold INC_INSN as a post/pre in/decrement into INSN.
100 Iff INC_INSN_SET is nonzero, inc_insn has a destination different from src.
101 Return nonzero for success. */
102 static int
103 try_auto_increment (rtx insn, rtx inc_insn, rtx inc_insn_set, rtx reg,
104 HOST_WIDE_INT increment, int pre)
106 enum rtx_code inc_code;
108 rtx pset = single_set (insn);
109 if (pset)
111 /* Can't use the size of SET_SRC, we might have something like
112 (sign_extend:SI (mem:QI ... */
113 rtx use = find_use_as_address (pset, reg, 0);
114 if (use != 0 && use != (rtx) (size_t) 1)
116 int size = GET_MODE_SIZE (GET_MODE (use));
117 if (0
118 || (HAVE_POST_INCREMENT
119 && pre == 0 && (inc_code = POST_INC, increment == size))
120 || (HAVE_PRE_INCREMENT
121 && pre == 1 && (inc_code = PRE_INC, increment == size))
122 || (HAVE_POST_DECREMENT
123 && pre == 0 && (inc_code = POST_DEC, increment == -size))
124 || (HAVE_PRE_DECREMENT
125 && pre == 1 && (inc_code = PRE_DEC, increment == -size))
128 if (inc_insn_set)
129 validate_change
130 (inc_insn,
131 &SET_SRC (inc_insn_set),
132 XEXP (SET_SRC (inc_insn_set), 0), 1);
133 validate_change (insn, &XEXP (use, 0),
134 gen_rtx_fmt_e (inc_code, Pmode, reg), 1);
135 if (apply_change_group ())
137 /* If there is a REG_DEAD note on this insn, we must
138 change this not to REG_UNUSED meaning that the register
139 is set, but the value is dead. Failure to do so will
140 result in a sched1 dieing -- when it recomputes lifetime
141 information, the number of REG_DEAD notes will have
142 changed. */
143 rtx note = find_reg_note (insn, REG_DEAD, reg);
144 if (note)
145 PUT_MODE (note, REG_UNUSED);
147 REG_NOTES (insn)
148 = gen_rtx_EXPR_LIST (REG_INC,
149 reg, REG_NOTES (insn));
150 if (! inc_insn_set)
151 delete_insn (inc_insn);
152 return 1;
157 return 0;
160 /* Determine if the pattern generated by add_optab has a clobber,
161 such as might be issued for a flags hard register. To make the
162 code elsewhere simpler, we handle cc0 in this same framework.
164 Return the register if one was discovered. Return NULL_RTX if
165 if no flags were found. Return pc_rtx if we got confused. */
167 static rtx
168 discover_flags_reg (void)
170 rtx tmp;
171 tmp = gen_rtx_REG (word_mode, 10000);
172 tmp = gen_add3_insn (tmp, tmp, const2_rtx);
174 /* If we get something that isn't a simple set, or a
175 [(set ..) (clobber ..)], this whole function will go wrong. */
176 if (GET_CODE (tmp) == SET)
177 return NULL_RTX;
178 else if (GET_CODE (tmp) == PARALLEL)
180 int found;
182 if (XVECLEN (tmp, 0) != 2)
183 return pc_rtx;
184 tmp = XVECEXP (tmp, 0, 1);
185 if (GET_CODE (tmp) != CLOBBER)
186 return pc_rtx;
187 tmp = XEXP (tmp, 0);
189 /* Don't do anything foolish if the md wanted to clobber a
190 scratch or something. We only care about hard regs.
191 Moreover we don't like the notion of subregs of hard regs. */
192 if (GET_CODE (tmp) == SUBREG
193 && REG_P (SUBREG_REG (tmp))
194 && REGNO (SUBREG_REG (tmp)) < FIRST_PSEUDO_REGISTER)
195 return pc_rtx;
196 found = (REG_P (tmp) && REGNO (tmp) < FIRST_PSEUDO_REGISTER);
198 return (found ? tmp : NULL_RTX);
201 return pc_rtx;
204 /* It is a tedious task identifying when the flags register is live and
205 when it is safe to optimize. Since we process the instruction stream
206 multiple times, locate and record these live zones by marking the
207 mode of the instructions --
209 QImode is used on the instruction at which the flags becomes live.
211 HImode is used within the range (exclusive) that the flags are
212 live. Thus the user of the flags is not marked.
214 All other instructions are cleared to VOIDmode. */
216 /* Used to communicate with flags_set_1. */
217 static rtx flags_set_1_rtx;
218 static int flags_set_1_set;
220 static void
221 mark_flags_life_zones (rtx flags)
223 int flags_regno;
224 int flags_nregs;
225 basic_block block;
227 #ifdef HAVE_cc0
228 /* If we found a flags register on a cc0 host, bail. */
229 if (flags == NULL_RTX)
230 flags = cc0_rtx;
231 else if (flags != cc0_rtx)
232 flags = pc_rtx;
233 #endif
235 /* Simple cases first: if no flags, clear all modes. If confusing,
236 mark the entire function as being in a flags shadow. */
237 if (flags == NULL_RTX || flags == pc_rtx)
239 enum machine_mode mode = (flags ? HImode : VOIDmode);
240 rtx insn;
241 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
242 PUT_MODE (insn, mode);
243 return;
246 #ifdef HAVE_cc0
247 flags_regno = -1;
248 flags_nregs = 1;
249 #else
250 flags_regno = REGNO (flags);
251 flags_nregs = hard_regno_nregs[flags_regno][GET_MODE (flags)];
252 #endif
253 flags_set_1_rtx = flags;
255 /* Process each basic block. */
256 FOR_EACH_BB_REVERSE (block)
258 rtx insn, end;
259 int live;
261 insn = BB_HEAD (block);
262 end = BB_END (block);
264 /* Look out for the (unlikely) case of flags being live across
265 basic block boundaries. */
266 live = 0;
267 #ifndef HAVE_cc0
269 int i;
270 for (i = 0; i < flags_nregs; ++i)
271 live |= REGNO_REG_SET_P (block->il.rtl->global_live_at_start,
272 flags_regno + i);
274 #endif
276 while (1)
278 /* Process liveness in reverse order of importance --
279 alive, death, birth. This lets more important info
280 overwrite the mode of lesser info. */
282 if (INSN_P (insn))
284 #ifdef HAVE_cc0
285 /* In the cc0 case, death is not marked in reg notes,
286 but is instead the mere use of cc0 when it is alive. */
287 if (live && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
288 live = 0;
289 #else
290 /* In the hard reg case, we watch death notes. */
291 if (live && find_regno_note (insn, REG_DEAD, flags_regno))
292 live = 0;
293 #endif
294 PUT_MODE (insn, (live ? HImode : VOIDmode));
296 /* In either case, birth is denoted simply by its presence
297 as the destination of a set. */
298 flags_set_1_set = 0;
299 note_stores (PATTERN (insn), flags_set_1, NULL);
300 if (flags_set_1_set)
302 live = 1;
303 PUT_MODE (insn, QImode);
306 else
307 PUT_MODE (insn, (live ? HImode : VOIDmode));
309 if (insn == end)
310 break;
311 insn = NEXT_INSN (insn);
316 /* A subroutine of mark_flags_life_zones, called through note_stores. */
318 static void
319 flags_set_1 (rtx x, rtx pat, void *data ATTRIBUTE_UNUSED)
321 if (GET_CODE (pat) == SET
322 && reg_overlap_mentioned_p (x, flags_set_1_rtx))
323 flags_set_1_set = 1;
326 static int *regno_src_regno;
328 /* Indicate how good a choice REG (which appears as a source) is to replace
329 a destination register with. The higher the returned value, the better
330 the choice. The main objective is to avoid using a register that is
331 a candidate for tying to a hard register, since the output might in
332 turn be a candidate to be tied to a different hard register. */
333 static int
334 replacement_quality (rtx reg)
336 int src_regno;
338 /* Bad if this isn't a register at all. */
339 if (!REG_P (reg))
340 return 0;
342 /* If this register is not meant to get a hard register,
343 it is a poor choice. */
344 if (REG_LIVE_LENGTH (REGNO (reg)) < 0)
345 return 0;
347 src_regno = regno_src_regno[REGNO (reg)];
349 /* If it was not copied from another register, it is fine. */
350 if (src_regno < 0)
351 return 3;
353 /* Copied from a hard register? */
354 if (src_regno < FIRST_PSEUDO_REGISTER)
355 return 1;
357 /* Copied from a pseudo register - not as bad as from a hard register,
358 yet still cumbersome, since the register live length will be lengthened
359 when the registers get tied. */
360 return 2;
363 /* Return 1 if INSN might end a basic block. */
365 static int perhaps_ends_bb_p (rtx insn)
367 switch (GET_CODE (insn))
369 case CODE_LABEL:
370 case JUMP_INSN:
371 /* These always end a basic block. */
372 return 1;
374 case CALL_INSN:
375 /* A CALL_INSN might be the last insn of a basic block, if it is inside
376 an EH region or if there are nonlocal gotos. Note that this test is
377 very conservative. */
378 if (nonlocal_goto_handler_labels)
379 return 1;
380 /* Fall through. */
381 default:
382 return can_throw_internal (insn);
386 /* INSN is a copy from SRC to DEST, both registers, and SRC does not die
387 in INSN.
389 Search forward to see if SRC dies before either it or DEST is modified,
390 but don't scan past the end of a basic block. If so, we can replace SRC
391 with DEST and let SRC die in INSN.
393 This will reduce the number of registers live in that range and may enable
394 DEST to be tied to SRC, thus often saving one register in addition to a
395 register-register copy. */
397 static int
398 optimize_reg_copy_1 (rtx insn, rtx dest, rtx src)
400 rtx p, q;
401 rtx note;
402 rtx dest_death = 0;
403 int sregno = REGNO (src);
404 int dregno = REGNO (dest);
406 /* We don't want to mess with hard regs if register classes are small. */
407 if (sregno == dregno
408 || (SMALL_REGISTER_CLASSES
409 && (sregno < FIRST_PSEUDO_REGISTER
410 || dregno < FIRST_PSEUDO_REGISTER))
411 /* We don't see all updates to SP if they are in an auto-inc memory
412 reference, so we must disallow this optimization on them. */
413 || sregno == STACK_POINTER_REGNUM || dregno == STACK_POINTER_REGNUM)
414 return 0;
416 for (p = NEXT_INSN (insn); p; p = NEXT_INSN (p))
418 /* ??? We can't scan past the end of a basic block without updating
419 the register lifetime info (REG_DEAD/basic_block_live_at_start). */
420 if (perhaps_ends_bb_p (p))
421 break;
422 else if (! INSN_P (p))
423 continue;
425 if (reg_set_p (src, p) || reg_set_p (dest, p)
426 /* If SRC is an asm-declared register, it must not be replaced
427 in any asm. Unfortunately, the REG_EXPR tree for the asm
428 variable may be absent in the SRC rtx, so we can't check the
429 actual register declaration easily (the asm operand will have
430 it, though). To avoid complicating the test for a rare case,
431 we just don't perform register replacement for a hard reg
432 mentioned in an asm. */
433 || (sregno < FIRST_PSEUDO_REGISTER
434 && asm_noperands (PATTERN (p)) >= 0
435 && reg_overlap_mentioned_p (src, PATTERN (p)))
436 /* Don't change hard registers used by a call. */
437 || (CALL_P (p) && sregno < FIRST_PSEUDO_REGISTER
438 && find_reg_fusage (p, USE, src))
439 /* Don't change a USE of a register. */
440 || (GET_CODE (PATTERN (p)) == USE
441 && reg_overlap_mentioned_p (src, XEXP (PATTERN (p), 0))))
442 break;
444 /* See if all of SRC dies in P. This test is slightly more
445 conservative than it needs to be. */
446 if ((note = find_regno_note (p, REG_DEAD, sregno)) != 0
447 && GET_MODE (XEXP (note, 0)) == GET_MODE (src))
449 int failed = 0;
450 int d_length = 0;
451 int s_length = 0;
452 int d_n_calls = 0;
453 int s_n_calls = 0;
455 /* We can do the optimization. Scan forward from INSN again,
456 replacing regs as we go. Set FAILED if a replacement can't
457 be done. In that case, we can't move the death note for SRC.
458 This should be rare. */
460 /* Set to stop at next insn. */
461 for (q = next_real_insn (insn);
462 q != next_real_insn (p);
463 q = next_real_insn (q))
465 if (reg_overlap_mentioned_p (src, PATTERN (q)))
467 /* If SRC is a hard register, we might miss some
468 overlapping registers with validate_replace_rtx,
469 so we would have to undo it. We can't if DEST is
470 present in the insn, so fail in that combination
471 of cases. */
472 if (sregno < FIRST_PSEUDO_REGISTER
473 && reg_mentioned_p (dest, PATTERN (q)))
474 failed = 1;
476 /* Replace all uses and make sure that the register
477 isn't still present. */
478 else if (validate_replace_rtx (src, dest, q)
479 && (sregno >= FIRST_PSEUDO_REGISTER
480 || ! reg_overlap_mentioned_p (src,
481 PATTERN (q))))
483 else
485 validate_replace_rtx (dest, src, q);
486 failed = 1;
490 /* For SREGNO, count the total number of insns scanned.
491 For DREGNO, count the total number of insns scanned after
492 passing the death note for DREGNO. */
493 s_length++;
494 if (dest_death)
495 d_length++;
497 /* If the insn in which SRC dies is a CALL_INSN, don't count it
498 as a call that has been crossed. Otherwise, count it. */
499 if (q != p && CALL_P (q))
501 /* Similarly, total calls for SREGNO, total calls beyond
502 the death note for DREGNO. */
503 s_n_calls++;
504 if (dest_death)
505 d_n_calls++;
508 /* If DEST dies here, remove the death note and save it for
509 later. Make sure ALL of DEST dies here; again, this is
510 overly conservative. */
511 if (dest_death == 0
512 && (dest_death = find_regno_note (q, REG_DEAD, dregno)) != 0)
514 if (GET_MODE (XEXP (dest_death, 0)) != GET_MODE (dest))
515 failed = 1, dest_death = 0;
516 else
517 remove_note (q, dest_death);
521 if (! failed)
523 /* These counters need to be updated if and only if we are
524 going to move the REG_DEAD note. */
525 if (sregno >= FIRST_PSEUDO_REGISTER)
527 if (REG_LIVE_LENGTH (sregno) >= 0)
529 REG_LIVE_LENGTH (sregno) -= s_length;
530 /* REG_LIVE_LENGTH is only an approximation after
531 combine if sched is not run, so make sure that we
532 still have a reasonable value. */
533 if (REG_LIVE_LENGTH (sregno) < 2)
534 REG_LIVE_LENGTH (sregno) = 2;
537 REG_N_CALLS_CROSSED (sregno) -= s_n_calls;
540 /* Move death note of SRC from P to INSN. */
541 remove_note (p, note);
542 XEXP (note, 1) = REG_NOTES (insn);
543 REG_NOTES (insn) = note;
546 /* DEST is also dead if INSN has a REG_UNUSED note for DEST. */
547 if (! dest_death
548 && (dest_death = find_regno_note (insn, REG_UNUSED, dregno)))
550 PUT_REG_NOTE_KIND (dest_death, REG_DEAD);
551 remove_note (insn, dest_death);
554 /* Put death note of DEST on P if we saw it die. */
555 if (dest_death)
557 XEXP (dest_death, 1) = REG_NOTES (p);
558 REG_NOTES (p) = dest_death;
560 if (dregno >= FIRST_PSEUDO_REGISTER)
562 /* If and only if we are moving the death note for DREGNO,
563 then we need to update its counters. */
564 if (REG_LIVE_LENGTH (dregno) >= 0)
565 REG_LIVE_LENGTH (dregno) += d_length;
566 REG_N_CALLS_CROSSED (dregno) += d_n_calls;
570 return ! failed;
573 /* If SRC is a hard register which is set or killed in some other
574 way, we can't do this optimization. */
575 else if (sregno < FIRST_PSEUDO_REGISTER
576 && dead_or_set_p (p, src))
577 break;
579 return 0;
582 /* INSN is a copy of SRC to DEST, in which SRC dies. See if we now have
583 a sequence of insns that modify DEST followed by an insn that sets
584 SRC to DEST in which DEST dies, with no prior modification of DEST.
585 (There is no need to check if the insns in between actually modify
586 DEST. We should not have cases where DEST is not modified, but
587 the optimization is safe if no such modification is detected.)
588 In that case, we can replace all uses of DEST, starting with INSN and
589 ending with the set of SRC to DEST, with SRC. We do not do this
590 optimization if a CALL_INSN is crossed unless SRC already crosses a
591 call or if DEST dies before the copy back to SRC.
593 It is assumed that DEST and SRC are pseudos; it is too complicated to do
594 this for hard registers since the substitutions we may make might fail. */
596 static void
597 optimize_reg_copy_2 (rtx insn, rtx dest, rtx src)
599 rtx p, q;
600 rtx set;
601 int sregno = REGNO (src);
602 int dregno = REGNO (dest);
604 for (p = NEXT_INSN (insn); p; p = NEXT_INSN (p))
606 /* ??? We can't scan past the end of a basic block without updating
607 the register lifetime info (REG_DEAD/basic_block_live_at_start). */
608 if (perhaps_ends_bb_p (p))
609 break;
610 else if (! INSN_P (p))
611 continue;
613 set = single_set (p);
614 if (set && SET_SRC (set) == dest && SET_DEST (set) == src
615 && find_reg_note (p, REG_DEAD, dest))
617 /* We can do the optimization. Scan forward from INSN again,
618 replacing regs as we go. */
620 /* Set to stop at next insn. */
621 for (q = insn; q != NEXT_INSN (p); q = NEXT_INSN (q))
622 if (INSN_P (q))
624 if (reg_mentioned_p (dest, PATTERN (q)))
625 PATTERN (q) = replace_rtx (PATTERN (q), dest, src);
628 if (CALL_P (q))
630 REG_N_CALLS_CROSSED (dregno)--;
631 REG_N_CALLS_CROSSED (sregno)++;
635 remove_note (p, find_reg_note (p, REG_DEAD, dest));
636 REG_N_DEATHS (dregno)--;
637 remove_note (insn, find_reg_note (insn, REG_DEAD, src));
638 REG_N_DEATHS (sregno)--;
639 return;
642 if (reg_set_p (src, p)
643 || find_reg_note (p, REG_DEAD, dest)
644 || (CALL_P (p) && REG_N_CALLS_CROSSED (sregno) == 0))
645 break;
648 /* INSN is a ZERO_EXTEND or SIGN_EXTEND of SRC to DEST.
649 Look if SRC dies there, and if it is only set once, by loading
650 it from memory. If so, try to incorporate the zero/sign extension
651 into the memory read, change SRC to the mode of DEST, and alter
652 the remaining accesses to use the appropriate SUBREG. This allows
653 SRC and DEST to be tied later. */
654 static void
655 optimize_reg_copy_3 (rtx insn, rtx dest, rtx src)
657 rtx src_reg = XEXP (src, 0);
658 int src_no = REGNO (src_reg);
659 int dst_no = REGNO (dest);
660 rtx p, set;
661 enum machine_mode old_mode;
663 if (src_no < FIRST_PSEUDO_REGISTER
664 || dst_no < FIRST_PSEUDO_REGISTER
665 || ! find_reg_note (insn, REG_DEAD, src_reg)
666 || REG_N_DEATHS (src_no) != 1
667 || REG_N_SETS (src_no) != 1)
668 return;
669 for (p = PREV_INSN (insn); p && ! reg_set_p (src_reg, p); p = PREV_INSN (p))
670 /* ??? We can't scan past the end of a basic block without updating
671 the register lifetime info (REG_DEAD/basic_block_live_at_start). */
672 if (perhaps_ends_bb_p (p))
673 break;
675 if (! p)
676 return;
678 if (! (set = single_set (p))
679 || !MEM_P (SET_SRC (set))
680 /* If there's a REG_EQUIV note, this must be an insn that loads an
681 argument. Prefer keeping the note over doing this optimization. */
682 || find_reg_note (p, REG_EQUIV, NULL_RTX)
683 || SET_DEST (set) != src_reg)
684 return;
686 /* Be conservative: although this optimization is also valid for
687 volatile memory references, that could cause trouble in later passes. */
688 if (MEM_VOLATILE_P (SET_SRC (set)))
689 return;
691 /* Do not use a SUBREG to truncate from one mode to another if truncation
692 is not a nop. */
693 if (GET_MODE_BITSIZE (GET_MODE (src_reg)) <= GET_MODE_BITSIZE (GET_MODE (src))
694 && !TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (src)),
695 GET_MODE_BITSIZE (GET_MODE (src_reg))))
696 return;
698 old_mode = GET_MODE (src_reg);
699 PUT_MODE (src_reg, GET_MODE (src));
700 XEXP (src, 0) = SET_SRC (set);
702 /* Include this change in the group so that it's easily undone if
703 one of the changes in the group is invalid. */
704 validate_change (p, &SET_SRC (set), src, 1);
706 /* Now walk forward making additional replacements. We want to be able
707 to undo all the changes if a later substitution fails. */
708 while (p = NEXT_INSN (p), p != insn)
710 if (! INSN_P (p))
711 continue;
713 /* Make a tentative change. */
714 validate_replace_rtx_group (src_reg,
715 gen_lowpart_SUBREG (old_mode, src_reg),
719 validate_replace_rtx_group (src, src_reg, insn);
721 /* Now see if all the changes are valid. */
722 if (! apply_change_group ())
724 /* One or more changes were no good. Back out everything. */
725 PUT_MODE (src_reg, old_mode);
726 XEXP (src, 0) = src_reg;
728 else
730 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
731 if (note)
732 remove_note (p, note);
737 /* If we were not able to update the users of src to use dest directly, try
738 instead moving the value to dest directly before the operation. */
740 static void
741 copy_src_to_dest (rtx insn, rtx src, rtx dest, int old_max_uid)
743 rtx seq;
744 rtx link;
745 rtx next;
746 rtx set;
747 rtx move_insn;
748 rtx *p_insn_notes;
749 rtx *p_move_notes;
750 int src_regno;
751 int dest_regno;
752 int bb;
753 int insn_uid;
754 int move_uid;
756 /* A REG_LIVE_LENGTH of -1 indicates the register is equivalent to a constant
757 or memory location and is used infrequently; a REG_LIVE_LENGTH of -2 is
758 parameter when there is no frame pointer that is not allocated a register.
759 For now, we just reject them, rather than incrementing the live length. */
761 if (REG_P (src)
762 && REG_LIVE_LENGTH (REGNO (src)) > 0
763 && REG_P (dest)
764 && REG_LIVE_LENGTH (REGNO (dest)) > 0
765 && (set = single_set (insn)) != NULL_RTX
766 && !reg_mentioned_p (dest, SET_SRC (set))
767 && GET_MODE (src) == GET_MODE (dest))
769 int old_num_regs = reg_rtx_no;
771 /* Generate the src->dest move. */
772 start_sequence ();
773 emit_move_insn (dest, src);
774 seq = get_insns ();
775 end_sequence ();
776 /* If this sequence uses new registers, we may not use it. */
777 if (old_num_regs != reg_rtx_no
778 || ! validate_replace_rtx (src, dest, insn))
780 /* We have to restore reg_rtx_no to its old value, lest
781 recompute_reg_usage will try to compute the usage of the
782 new regs, yet reg_n_info is not valid for them. */
783 reg_rtx_no = old_num_regs;
784 return;
786 emit_insn_before (seq, insn);
787 move_insn = PREV_INSN (insn);
788 p_move_notes = &REG_NOTES (move_insn);
789 p_insn_notes = &REG_NOTES (insn);
791 /* Move any notes mentioning src to the move instruction. */
792 for (link = REG_NOTES (insn); link != NULL_RTX; link = next)
794 next = XEXP (link, 1);
795 if (XEXP (link, 0) == src)
797 *p_move_notes = link;
798 p_move_notes = &XEXP (link, 1);
800 else
802 *p_insn_notes = link;
803 p_insn_notes = &XEXP (link, 1);
807 *p_move_notes = NULL_RTX;
808 *p_insn_notes = NULL_RTX;
810 /* Is the insn the head of a basic block? If so extend it. */
811 insn_uid = INSN_UID (insn);
812 move_uid = INSN_UID (move_insn);
813 if (insn_uid < old_max_uid)
815 bb = regmove_bb_head[insn_uid];
816 if (bb >= 0)
818 BB_HEAD (BASIC_BLOCK (bb)) = move_insn;
819 regmove_bb_head[insn_uid] = -1;
823 /* Update the various register tables. */
824 dest_regno = REGNO (dest);
825 REG_N_SETS (dest_regno) ++;
826 REG_LIVE_LENGTH (dest_regno)++;
827 if (REGNO_FIRST_UID (dest_regno) == insn_uid)
828 REGNO_FIRST_UID (dest_regno) = move_uid;
830 src_regno = REGNO (src);
831 if (! find_reg_note (move_insn, REG_DEAD, src))
832 REG_LIVE_LENGTH (src_regno)++;
834 if (REGNO_FIRST_UID (src_regno) == insn_uid)
835 REGNO_FIRST_UID (src_regno) = move_uid;
837 if (REGNO_LAST_UID (src_regno) == insn_uid)
838 REGNO_LAST_UID (src_regno) = move_uid;
843 /* Return whether REG is set in only one location, and is set to a
844 constant, but is set in a different basic block from INSN (an
845 instructions which uses REG). In this case REG is equivalent to a
846 constant, and we don't want to break that equivalence, because that
847 may increase register pressure and make reload harder. If REG is
848 set in the same basic block as INSN, we don't worry about it,
849 because we'll probably need a register anyhow (??? but what if REG
850 is used in a different basic block as well as this one?). FIRST is
851 the first insn in the function. */
853 static int
854 reg_is_remote_constant_p (rtx reg, rtx insn, rtx first)
856 rtx p;
858 if (REG_N_SETS (REGNO (reg)) != 1)
859 return 0;
861 /* Look for the set. */
862 for (p = BB_HEAD (BLOCK_FOR_INSN (insn)); p != insn; p = NEXT_INSN (p))
864 rtx s;
866 if (!INSN_P (p))
867 continue;
868 s = single_set (p);
869 if (s != 0
870 && REG_P (SET_DEST (s))
871 && REGNO (SET_DEST (s)) == REGNO (reg))
873 /* The register is set in the same basic block. */
874 return 0;
878 for (p = first; p && p != insn; p = NEXT_INSN (p))
880 rtx s;
882 if (! INSN_P (p))
883 continue;
884 s = single_set (p);
885 if (s != 0
886 && REG_P (SET_DEST (s))
887 && REGNO (SET_DEST (s)) == REGNO (reg))
889 /* This is the instruction which sets REG. If there is a
890 REG_EQUAL note, then REG is equivalent to a constant. */
891 if (find_reg_note (p, REG_EQUAL, NULL_RTX))
892 return 1;
893 return 0;
897 return 0;
900 /* INSN is adding a CONST_INT to a REG. We search backwards looking for
901 another add immediate instruction with the same source and dest registers,
902 and if we find one, we change INSN to an increment, and return 1. If
903 no changes are made, we return 0.
905 This changes
906 (set (reg100) (plus reg1 offset1))
908 (set (reg100) (plus reg1 offset2))
910 (set (reg100) (plus reg1 offset1))
912 (set (reg100) (plus reg100 offset2-offset1)) */
914 /* ??? What does this comment mean? */
915 /* cse disrupts preincrement / postdecrement sequences when it finds a
916 hard register as ultimate source, like the frame pointer. */
918 static int
919 fixup_match_2 (rtx insn, rtx dst, rtx src, rtx offset)
921 rtx p, dst_death = 0;
922 int length, num_calls = 0;
924 /* If SRC dies in INSN, we'd have to move the death note. This is
925 considered to be very unlikely, so we just skip the optimization
926 in this case. */
927 if (find_regno_note (insn, REG_DEAD, REGNO (src)))
928 return 0;
930 /* Scan backward to find the first instruction that sets DST. */
932 for (length = 0, p = PREV_INSN (insn); p; p = PREV_INSN (p))
934 rtx pset;
936 /* ??? We can't scan past the end of a basic block without updating
937 the register lifetime info (REG_DEAD/basic_block_live_at_start). */
938 if (perhaps_ends_bb_p (p))
939 break;
940 else if (! INSN_P (p))
941 continue;
943 if (find_regno_note (p, REG_DEAD, REGNO (dst)))
944 dst_death = p;
945 if (! dst_death)
946 length++;
948 pset = single_set (p);
949 if (pset && SET_DEST (pset) == dst
950 && GET_CODE (SET_SRC (pset)) == PLUS
951 && XEXP (SET_SRC (pset), 0) == src
952 && GET_CODE (XEXP (SET_SRC (pset), 1)) == CONST_INT)
954 HOST_WIDE_INT newconst
955 = INTVAL (offset) - INTVAL (XEXP (SET_SRC (pset), 1));
956 rtx add = gen_add3_insn (dst, dst, GEN_INT (newconst));
958 if (add && validate_change (insn, &PATTERN (insn), add, 0))
960 /* Remove the death note for DST from DST_DEATH. */
961 if (dst_death)
963 remove_death (REGNO (dst), dst_death);
964 REG_LIVE_LENGTH (REGNO (dst)) += length;
965 REG_N_CALLS_CROSSED (REGNO (dst)) += num_calls;
968 if (dump_file)
969 fprintf (dump_file,
970 "Fixed operand of insn %d.\n",
971 INSN_UID (insn));
973 #ifdef AUTO_INC_DEC
974 for (p = PREV_INSN (insn); p; p = PREV_INSN (p))
976 if (LABEL_P (p)
977 || JUMP_P (p))
978 break;
979 if (! INSN_P (p))
980 continue;
981 if (reg_overlap_mentioned_p (dst, PATTERN (p)))
983 if (try_auto_increment (p, insn, 0, dst, newconst, 0))
984 return 1;
985 break;
988 for (p = NEXT_INSN (insn); p; p = NEXT_INSN (p))
990 if (LABEL_P (p)
991 || JUMP_P (p))
992 break;
993 if (! INSN_P (p))
994 continue;
995 if (reg_overlap_mentioned_p (dst, PATTERN (p)))
997 try_auto_increment (p, insn, 0, dst, newconst, 1);
998 break;
1001 #endif
1002 return 1;
1006 if (reg_set_p (dst, PATTERN (p)))
1007 break;
1009 /* If we have passed a call instruction, and the
1010 pseudo-reg SRC is not already live across a call,
1011 then don't perform the optimization. */
1012 /* reg_set_p is overly conservative for CALL_INSNS, thinks that all
1013 hard regs are clobbered. Thus, we only use it for src for
1014 non-call insns. */
1015 if (CALL_P (p))
1017 if (! dst_death)
1018 num_calls++;
1020 if (REG_N_CALLS_CROSSED (REGNO (src)) == 0)
1021 break;
1023 if (call_used_regs [REGNO (dst)]
1024 || find_reg_fusage (p, CLOBBER, dst))
1025 break;
1027 else if (reg_set_p (src, PATTERN (p)))
1028 break;
1031 return 0;
1034 /* Main entry for the register move optimization.
1035 F is the first instruction.
1036 NREGS is one plus the highest pseudo-reg number used in the instruction.
1037 REGMOVE_DUMP_FILE is a stream for output of a trace of actions taken
1038 (or 0 if none should be output). */
1040 static void
1041 regmove_optimize (rtx f, int nregs)
1043 int old_max_uid = get_max_uid ();
1044 rtx insn;
1045 struct match match;
1046 int pass;
1047 int i;
1048 rtx copy_src, copy_dst;
1049 basic_block bb;
1051 /* ??? Hack. Regmove doesn't examine the CFG, and gets mightily
1052 confused by non-call exceptions ending blocks. */
1053 if (flag_non_call_exceptions)
1054 return;
1056 /* Find out where a potential flags register is live, and so that we
1057 can suppress some optimizations in those zones. */
1058 mark_flags_life_zones (discover_flags_reg ());
1060 regno_src_regno = XNEWVEC (int, nregs);
1061 for (i = nregs; --i >= 0; ) regno_src_regno[i] = -1;
1063 regmove_bb_head = XNEWVEC (int, old_max_uid + 1);
1064 for (i = old_max_uid; i >= 0; i--) regmove_bb_head[i] = -1;
1065 FOR_EACH_BB (bb)
1066 regmove_bb_head[INSN_UID (BB_HEAD (bb))] = bb->index;
1068 /* A forward/backward pass. Replace output operands with input operands. */
1070 for (pass = 0; pass <= 2; pass++)
1072 if (! flag_regmove && pass >= flag_expensive_optimizations)
1073 goto done;
1075 if (dump_file)
1076 fprintf (dump_file, "Starting %s pass...\n",
1077 pass ? "backward" : "forward");
1079 for (insn = pass ? get_last_insn () : f; insn;
1080 insn = pass ? PREV_INSN (insn) : NEXT_INSN (insn))
1082 rtx set;
1083 int op_no, match_no;
1085 set = single_set (insn);
1086 if (! set)
1087 continue;
1089 if (flag_expensive_optimizations && ! pass
1090 && (GET_CODE (SET_SRC (set)) == SIGN_EXTEND
1091 || GET_CODE (SET_SRC (set)) == ZERO_EXTEND)
1092 && REG_P (XEXP (SET_SRC (set), 0))
1093 && REG_P (SET_DEST (set)))
1094 optimize_reg_copy_3 (insn, SET_DEST (set), SET_SRC (set));
1096 if (flag_expensive_optimizations && ! pass
1097 && REG_P (SET_SRC (set))
1098 && REG_P (SET_DEST (set)))
1100 /* If this is a register-register copy where SRC is not dead,
1101 see if we can optimize it. If this optimization succeeds,
1102 it will become a copy where SRC is dead. */
1103 if ((find_reg_note (insn, REG_DEAD, SET_SRC (set))
1104 || optimize_reg_copy_1 (insn, SET_DEST (set), SET_SRC (set)))
1105 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
1107 /* Similarly for a pseudo-pseudo copy when SRC is dead. */
1108 if (REGNO (SET_SRC (set)) >= FIRST_PSEUDO_REGISTER)
1109 optimize_reg_copy_2 (insn, SET_DEST (set), SET_SRC (set));
1110 if (regno_src_regno[REGNO (SET_DEST (set))] < 0
1111 && SET_SRC (set) != SET_DEST (set))
1113 int srcregno = REGNO (SET_SRC (set));
1114 if (regno_src_regno[srcregno] >= 0)
1115 srcregno = regno_src_regno[srcregno];
1116 regno_src_regno[REGNO (SET_DEST (set))] = srcregno;
1120 if (! flag_regmove)
1121 continue;
1123 if (! find_matches (insn, &match))
1124 continue;
1126 /* Now scan through the operands looking for a source operand
1127 which is supposed to match the destination operand.
1128 Then scan forward for an instruction which uses the dest
1129 operand.
1130 If it dies there, then replace the dest in both operands with
1131 the source operand. */
1133 for (op_no = 0; op_no < recog_data.n_operands; op_no++)
1135 rtx src, dst, src_subreg;
1136 enum reg_class src_class, dst_class;
1138 match_no = match.with[op_no];
1140 /* Nothing to do if the two operands aren't supposed to match. */
1141 if (match_no < 0)
1142 continue;
1144 src = recog_data.operand[op_no];
1145 dst = recog_data.operand[match_no];
1147 if (!REG_P (src))
1148 continue;
1150 src_subreg = src;
1151 if (GET_CODE (dst) == SUBREG
1152 && GET_MODE_SIZE (GET_MODE (dst))
1153 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dst))))
1155 dst = SUBREG_REG (dst);
1156 src_subreg = lowpart_subreg (GET_MODE (dst),
1157 src, GET_MODE (src));
1158 if (!src_subreg)
1159 continue;
1161 if (!REG_P (dst)
1162 || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1163 continue;
1165 if (REGNO (src) < FIRST_PSEUDO_REGISTER)
1167 if (match.commutative[op_no] < op_no)
1168 regno_src_regno[REGNO (dst)] = REGNO (src);
1169 continue;
1172 if (REG_LIVE_LENGTH (REGNO (src)) < 0)
1173 continue;
1175 /* op_no/src must be a read-only operand, and
1176 match_operand/dst must be a write-only operand. */
1177 if (match.use[op_no] != READ
1178 || match.use[match_no] != WRITE)
1179 continue;
1181 if (match.early_clobber[match_no]
1182 && count_occurrences (PATTERN (insn), src, 0) > 1)
1183 continue;
1185 /* Make sure match_operand is the destination. */
1186 if (recog_data.operand[match_no] != SET_DEST (set))
1187 continue;
1189 /* If the operands already match, then there is nothing to do. */
1190 if (operands_match_p (src, dst))
1191 continue;
1193 /* But in the commutative case, we might find a better match. */
1194 if (match.commutative[op_no] >= 0)
1196 rtx comm = recog_data.operand[match.commutative[op_no]];
1197 if (operands_match_p (comm, dst)
1198 && (replacement_quality (comm)
1199 >= replacement_quality (src)))
1200 continue;
1203 src_class = reg_preferred_class (REGNO (src));
1204 dst_class = reg_preferred_class (REGNO (dst));
1205 if (! regclass_compatible_p (src_class, dst_class))
1206 continue;
1208 if (GET_MODE (src) != GET_MODE (dst))
1209 continue;
1211 if (fixup_match_1 (insn, set, src, src_subreg, dst, pass,
1212 op_no, match_no))
1213 break;
1218 /* A backward pass. Replace input operands with output operands. */
1220 if (dump_file)
1221 fprintf (dump_file, "Starting backward pass...\n");
1223 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1225 if (INSN_P (insn))
1227 int op_no, match_no;
1228 int success = 0;
1230 if (! find_matches (insn, &match))
1231 continue;
1233 /* Now scan through the operands looking for a destination operand
1234 which is supposed to match a source operand.
1235 Then scan backward for an instruction which sets the source
1236 operand. If safe, then replace the source operand with the
1237 dest operand in both instructions. */
1239 copy_src = NULL_RTX;
1240 copy_dst = NULL_RTX;
1241 for (op_no = 0; op_no < recog_data.n_operands; op_no++)
1243 rtx set, p, src, dst;
1244 rtx src_note, dst_note;
1245 int num_calls = 0;
1246 enum reg_class src_class, dst_class;
1247 int length;
1249 match_no = match.with[op_no];
1251 /* Nothing to do if the two operands aren't supposed to match. */
1252 if (match_no < 0)
1253 continue;
1255 dst = recog_data.operand[match_no];
1256 src = recog_data.operand[op_no];
1258 if (!REG_P (src))
1259 continue;
1261 if (!REG_P (dst)
1262 || REGNO (dst) < FIRST_PSEUDO_REGISTER
1263 || REG_LIVE_LENGTH (REGNO (dst)) < 0
1264 || GET_MODE (src) != GET_MODE (dst))
1265 continue;
1267 /* If the operands already match, then there is nothing to do. */
1268 if (operands_match_p (src, dst))
1269 continue;
1271 if (match.commutative[op_no] >= 0)
1273 rtx comm = recog_data.operand[match.commutative[op_no]];
1274 if (operands_match_p (comm, dst))
1275 continue;
1278 set = single_set (insn);
1279 if (! set)
1280 continue;
1282 /* Note that single_set ignores parts of a parallel set for
1283 which one of the destinations is REG_UNUSED. We can't
1284 handle that here, since we can wind up rewriting things
1285 such that a single register is set twice within a single
1286 parallel. */
1287 if (reg_set_p (src, insn))
1288 continue;
1290 /* match_no/dst must be a write-only operand, and
1291 operand_operand/src must be a read-only operand. */
1292 if (match.use[op_no] != READ
1293 || match.use[match_no] != WRITE)
1294 continue;
1296 if (match.early_clobber[match_no]
1297 && count_occurrences (PATTERN (insn), src, 0) > 1)
1298 continue;
1300 /* Make sure match_no is the destination. */
1301 if (recog_data.operand[match_no] != SET_DEST (set))
1302 continue;
1304 if (REGNO (src) < FIRST_PSEUDO_REGISTER)
1306 if (GET_CODE (SET_SRC (set)) == PLUS
1307 && GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT
1308 && XEXP (SET_SRC (set), 0) == src
1309 && fixup_match_2 (insn, dst, src,
1310 XEXP (SET_SRC (set), 1)))
1311 break;
1312 continue;
1314 src_class = reg_preferred_class (REGNO (src));
1315 dst_class = reg_preferred_class (REGNO (dst));
1317 if (! (src_note = find_reg_note (insn, REG_DEAD, src)))
1319 /* We used to force the copy here like in other cases, but
1320 it produces worse code, as it eliminates no copy
1321 instructions and the copy emitted will be produced by
1322 reload anyway. On patterns with multiple alternatives,
1323 there may be better solution available.
1325 In particular this change produced slower code for numeric
1326 i387 programs. */
1328 continue;
1331 if (! regclass_compatible_p (src_class, dst_class))
1333 if (!copy_src)
1335 copy_src = src;
1336 copy_dst = dst;
1338 continue;
1341 /* Can not modify an earlier insn to set dst if this insn
1342 uses an old value in the source. */
1343 if (reg_overlap_mentioned_p (dst, SET_SRC (set)))
1345 if (!copy_src)
1347 copy_src = src;
1348 copy_dst = dst;
1350 continue;
1353 /* If src is set once in a different basic block,
1354 and is set equal to a constant, then do not use
1355 it for this optimization, as this would make it
1356 no longer equivalent to a constant. */
1358 if (reg_is_remote_constant_p (src, insn, f))
1360 if (!copy_src)
1362 copy_src = src;
1363 copy_dst = dst;
1365 continue;
1369 if (dump_file)
1370 fprintf (dump_file,
1371 "Could fix operand %d of insn %d matching operand %d.\n",
1372 op_no, INSN_UID (insn), match_no);
1374 /* Scan backward to find the first instruction that uses
1375 the input operand. If the operand is set here, then
1376 replace it in both instructions with match_no. */
1378 for (length = 0, p = PREV_INSN (insn); p; p = PREV_INSN (p))
1380 rtx pset;
1382 /* ??? We can't scan past the end of a basic block without
1383 updating the register lifetime info
1384 (REG_DEAD/basic_block_live_at_start). */
1385 if (perhaps_ends_bb_p (p))
1386 break;
1387 else if (! INSN_P (p))
1388 continue;
1390 length++;
1392 /* ??? See if all of SRC is set in P. This test is much
1393 more conservative than it needs to be. */
1394 pset = single_set (p);
1395 if (pset && SET_DEST (pset) == src)
1397 /* We use validate_replace_rtx, in case there
1398 are multiple identical source operands. All of
1399 them have to be changed at the same time. */
1400 if (validate_replace_rtx (src, dst, insn))
1402 if (validate_change (p, &SET_DEST (pset),
1403 dst, 0))
1404 success = 1;
1405 else
1407 /* Change all source operands back.
1408 This modifies the dst as a side-effect. */
1409 validate_replace_rtx (dst, src, insn);
1410 /* Now make sure the dst is right. */
1411 validate_change (insn,
1412 recog_data.operand_loc[match_no],
1413 dst, 0);
1416 break;
1419 if (reg_overlap_mentioned_p (src, PATTERN (p))
1420 || reg_overlap_mentioned_p (dst, PATTERN (p)))
1421 break;
1423 /* If we have passed a call instruction, and the
1424 pseudo-reg DST is not already live across a call,
1425 then don't perform the optimization. */
1426 if (CALL_P (p))
1428 num_calls++;
1430 if (REG_N_CALLS_CROSSED (REGNO (dst)) == 0)
1431 break;
1435 if (success)
1437 int dstno, srcno;
1439 /* Remove the death note for SRC from INSN. */
1440 remove_note (insn, src_note);
1441 /* Move the death note for SRC to P if it is used
1442 there. */
1443 if (reg_overlap_mentioned_p (src, PATTERN (p)))
1445 XEXP (src_note, 1) = REG_NOTES (p);
1446 REG_NOTES (p) = src_note;
1448 /* If there is a REG_DEAD note for DST on P, then remove
1449 it, because DST is now set there. */
1450 if ((dst_note = find_reg_note (p, REG_DEAD, dst)))
1451 remove_note (p, dst_note);
1453 dstno = REGNO (dst);
1454 srcno = REGNO (src);
1456 REG_N_SETS (dstno)++;
1457 REG_N_SETS (srcno)--;
1459 REG_N_CALLS_CROSSED (dstno) += num_calls;
1460 REG_N_CALLS_CROSSED (srcno) -= num_calls;
1462 REG_LIVE_LENGTH (dstno) += length;
1463 if (REG_LIVE_LENGTH (srcno) >= 0)
1465 REG_LIVE_LENGTH (srcno) -= length;
1466 /* REG_LIVE_LENGTH is only an approximation after
1467 combine if sched is not run, so make sure that we
1468 still have a reasonable value. */
1469 if (REG_LIVE_LENGTH (srcno) < 2)
1470 REG_LIVE_LENGTH (srcno) = 2;
1473 if (dump_file)
1474 fprintf (dump_file,
1475 "Fixed operand %d of insn %d matching operand %d.\n",
1476 op_no, INSN_UID (insn), match_no);
1478 break;
1482 /* If we weren't able to replace any of the alternatives, try an
1483 alternative approach of copying the source to the destination. */
1484 if (!success && copy_src != NULL_RTX)
1485 copy_src_to_dest (insn, copy_src, copy_dst, old_max_uid);
1490 /* In fixup_match_1, some insns may have been inserted after basic block
1491 ends. Fix that here. */
1492 FOR_EACH_BB (bb)
1494 rtx end = BB_END (bb);
1495 rtx new = end;
1496 rtx next = NEXT_INSN (new);
1497 while (next != 0 && INSN_UID (next) >= old_max_uid
1498 && (bb->next_bb == EXIT_BLOCK_PTR || BB_HEAD (bb->next_bb) != next))
1499 new = next, next = NEXT_INSN (new);
1500 BB_END (bb) = new;
1503 done:
1504 /* Clean up. */
1505 free (regno_src_regno);
1506 free (regmove_bb_head);
1509 /* Returns nonzero if INSN's pattern has matching constraints for any operand.
1510 Returns 0 if INSN can't be recognized, or if the alternative can't be
1511 determined.
1513 Initialize the info in MATCHP based on the constraints. */
1515 static int
1516 find_matches (rtx insn, struct match *matchp)
1518 int likely_spilled[MAX_RECOG_OPERANDS];
1519 int op_no;
1520 int any_matches = 0;
1522 extract_insn (insn);
1523 if (! constrain_operands (0))
1524 return 0;
1526 /* Must initialize this before main loop, because the code for
1527 the commutative case may set matches for operands other than
1528 the current one. */
1529 for (op_no = recog_data.n_operands; --op_no >= 0; )
1530 matchp->with[op_no] = matchp->commutative[op_no] = -1;
1532 for (op_no = 0; op_no < recog_data.n_operands; op_no++)
1534 const char *p;
1535 char c;
1536 int i = 0;
1538 p = recog_data.constraints[op_no];
1540 likely_spilled[op_no] = 0;
1541 matchp->use[op_no] = READ;
1542 matchp->early_clobber[op_no] = 0;
1543 if (*p == '=')
1544 matchp->use[op_no] = WRITE;
1545 else if (*p == '+')
1546 matchp->use[op_no] = READWRITE;
1548 for (;*p && i < which_alternative; p++)
1549 if (*p == ',')
1550 i++;
1552 while ((c = *p) != '\0' && c != ',')
1554 switch (c)
1556 case '=':
1557 break;
1558 case '+':
1559 break;
1560 case '&':
1561 matchp->early_clobber[op_no] = 1;
1562 break;
1563 case '%':
1564 matchp->commutative[op_no] = op_no + 1;
1565 matchp->commutative[op_no + 1] = op_no;
1566 break;
1568 case '0': case '1': case '2': case '3': case '4':
1569 case '5': case '6': case '7': case '8': case '9':
1571 char *end;
1572 unsigned long match_ul = strtoul (p, &end, 10);
1573 int match = match_ul;
1575 p = end;
1577 if (match < op_no && likely_spilled[match])
1578 continue;
1579 matchp->with[op_no] = match;
1580 any_matches = 1;
1581 if (matchp->commutative[op_no] >= 0)
1582 matchp->with[matchp->commutative[op_no]] = match;
1584 continue;
1586 case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'h':
1587 case 'j': case 'k': case 'l': case 'p': case 'q': case 't': case 'u':
1588 case 'v': case 'w': case 'x': case 'y': case 'z': case 'A': case 'B':
1589 case 'C': case 'D': case 'W': case 'Y': case 'Z':
1590 if (CLASS_LIKELY_SPILLED_P (REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p) ))
1591 likely_spilled[op_no] = 1;
1592 break;
1594 p += CONSTRAINT_LEN (c, p);
1597 return any_matches;
1600 /* Try to replace all occurrences of DST_REG with SRC in LOC, that is
1601 assumed to be in INSN. */
1603 static void
1604 replace_in_call_usage (rtx *loc, unsigned int dst_reg, rtx src, rtx insn)
1606 rtx x = *loc;
1607 enum rtx_code code;
1608 const char *fmt;
1609 int i, j;
1611 if (! x)
1612 return;
1614 code = GET_CODE (x);
1615 if (code == REG)
1617 if (REGNO (x) != dst_reg)
1618 return;
1620 validate_change (insn, loc, src, 1);
1622 return;
1625 /* Process each of our operands recursively. */
1626 fmt = GET_RTX_FORMAT (code);
1627 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1628 if (*fmt == 'e')
1629 replace_in_call_usage (&XEXP (x, i), dst_reg, src, insn);
1630 else if (*fmt == 'E')
1631 for (j = 0; j < XVECLEN (x, i); j++)
1632 replace_in_call_usage (& XVECEXP (x, i, j), dst_reg, src, insn);
1635 /* Try to replace output operand DST in SET, with input operand SRC. SET is
1636 the only set in INSN. INSN has just been recognized and constrained.
1637 SRC is operand number OPERAND_NUMBER in INSN.
1638 DST is operand number MATCH_NUMBER in INSN.
1639 If BACKWARD is nonzero, we have been called in a backward pass.
1640 Return nonzero for success. */
1642 static int
1643 fixup_match_1 (rtx insn, rtx set, rtx src, rtx src_subreg, rtx dst,
1644 int backward, int operand_number, int match_number)
1646 rtx p;
1647 rtx post_inc = 0, post_inc_set = 0, search_end = 0;
1648 int success = 0;
1649 int num_calls = 0, s_num_calls = 0;
1650 enum rtx_code code = NOTE;
1651 HOST_WIDE_INT insn_const = 0, newconst = 0;
1652 rtx overlap = 0; /* need to move insn ? */
1653 rtx src_note = find_reg_note (insn, REG_DEAD, src), dst_note = NULL_RTX;
1654 int length, s_length;
1656 if (! src_note)
1658 /* Look for (set (regX) (op regA constX))
1659 (set (regY) (op regA constY))
1660 and change that to
1661 (set (regA) (op regA constX)).
1662 (set (regY) (op regA constY-constX)).
1663 This works for add and shift operations, if
1664 regA is dead after or set by the second insn. */
1666 code = GET_CODE (SET_SRC (set));
1667 if ((code == PLUS || code == LSHIFTRT
1668 || code == ASHIFT || code == ASHIFTRT)
1669 && XEXP (SET_SRC (set), 0) == src
1670 && GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
1671 insn_const = INTVAL (XEXP (SET_SRC (set), 1));
1672 else if (! stable_and_no_regs_but_for_p (SET_SRC (set), src, dst))
1673 return 0;
1674 else
1675 /* We might find a src_note while scanning. */
1676 code = NOTE;
1679 if (dump_file)
1680 fprintf (dump_file,
1681 "Could fix operand %d of insn %d matching operand %d.\n",
1682 operand_number, INSN_UID (insn), match_number);
1684 /* If SRC is equivalent to a constant set in a different basic block,
1685 then do not use it for this optimization. We want the equivalence
1686 so that if we have to reload this register, we can reload the
1687 constant, rather than extending the lifespan of the register. */
1688 if (reg_is_remote_constant_p (src, insn, get_insns ()))
1689 return 0;
1691 /* Scan forward to find the next instruction that
1692 uses the output operand. If the operand dies here,
1693 then replace it in both instructions with
1694 operand_number. */
1696 for (length = s_length = 0, p = NEXT_INSN (insn); p; p = NEXT_INSN (p))
1698 if (CALL_P (p))
1699 replace_in_call_usage (& CALL_INSN_FUNCTION_USAGE (p),
1700 REGNO (dst), src, p);
1702 /* ??? We can't scan past the end of a basic block without updating
1703 the register lifetime info (REG_DEAD/basic_block_live_at_start). */
1704 if (perhaps_ends_bb_p (p))
1705 break;
1706 else if (! INSN_P (p))
1707 continue;
1709 length++;
1710 if (src_note)
1711 s_length++;
1713 if (reg_set_p (src, p) || reg_set_p (dst, p)
1714 || (GET_CODE (PATTERN (p)) == USE
1715 && reg_overlap_mentioned_p (src, XEXP (PATTERN (p), 0))))
1716 break;
1718 /* See if all of DST dies in P. This test is
1719 slightly more conservative than it needs to be. */
1720 if ((dst_note = find_regno_note (p, REG_DEAD, REGNO (dst)))
1721 && (GET_MODE (XEXP (dst_note, 0)) == GET_MODE (dst)))
1723 /* If we would be moving INSN, check that we won't move it
1724 into the shadow of a live a live flags register. */
1725 /* ??? We only try to move it in front of P, although
1726 we could move it anywhere between OVERLAP and P. */
1727 if (overlap && GET_MODE (PREV_INSN (p)) != VOIDmode)
1728 break;
1730 if (! src_note)
1732 rtx q;
1733 rtx set2 = NULL_RTX;
1735 /* If an optimization is done, the value of SRC while P
1736 is executed will be changed. Check that this is OK. */
1737 if (reg_overlap_mentioned_p (src, PATTERN (p)))
1738 break;
1739 for (q = p; q; q = NEXT_INSN (q))
1741 /* ??? We can't scan past the end of a basic block without
1742 updating the register lifetime info
1743 (REG_DEAD/basic_block_live_at_start). */
1744 if (perhaps_ends_bb_p (q))
1746 q = 0;
1747 break;
1749 else if (! INSN_P (q))
1750 continue;
1751 else if (reg_overlap_mentioned_p (src, PATTERN (q))
1752 || reg_set_p (src, q))
1753 break;
1755 if (q)
1756 set2 = single_set (q);
1757 if (! q || ! set2 || GET_CODE (SET_SRC (set2)) != code
1758 || XEXP (SET_SRC (set2), 0) != src
1759 || GET_CODE (XEXP (SET_SRC (set2), 1)) != CONST_INT
1760 || (SET_DEST (set2) != src
1761 && ! find_reg_note (q, REG_DEAD, src)))
1763 /* If this is a PLUS, we can still save a register by doing
1764 src += insn_const;
1766 src -= insn_const; .
1767 This also gives opportunities for subsequent
1768 optimizations in the backward pass, so do it there. */
1769 if (code == PLUS && backward
1770 /* Don't do this if we can likely tie DST to SET_DEST
1771 of P later; we can't do this tying here if we got a
1772 hard register. */
1773 && ! (dst_note && ! REG_N_CALLS_CROSSED (REGNO (dst))
1774 && single_set (p)
1775 && REG_P (SET_DEST (single_set (p)))
1776 && (REGNO (SET_DEST (single_set (p)))
1777 < FIRST_PSEUDO_REGISTER))
1778 /* We may only emit an insn directly after P if we
1779 are not in the shadow of a live flags register. */
1780 && GET_MODE (p) == VOIDmode)
1782 search_end = q;
1783 q = insn;
1784 set2 = set;
1785 newconst = -insn_const;
1786 code = MINUS;
1788 else
1789 break;
1791 else
1793 newconst = INTVAL (XEXP (SET_SRC (set2), 1)) - insn_const;
1794 /* Reject out of range shifts. */
1795 if (code != PLUS
1796 && (newconst < 0
1797 || ((unsigned HOST_WIDE_INT) newconst
1798 >= (GET_MODE_BITSIZE (GET_MODE
1799 (SET_SRC (set2)))))))
1800 break;
1801 if (code == PLUS)
1803 post_inc = q;
1804 if (SET_DEST (set2) != src)
1805 post_inc_set = set2;
1808 /* We use 1 as last argument to validate_change so that all
1809 changes are accepted or rejected together by apply_change_group
1810 when it is called by validate_replace_rtx . */
1811 validate_change (q, &XEXP (SET_SRC (set2), 1),
1812 GEN_INT (newconst), 1);
1814 validate_change (insn, recog_data.operand_loc[match_number], src, 1);
1815 if (validate_replace_rtx (dst, src_subreg, p))
1816 success = 1;
1817 break;
1820 if (reg_overlap_mentioned_p (dst, PATTERN (p)))
1821 break;
1822 if (! src_note && reg_overlap_mentioned_p (src, PATTERN (p)))
1824 /* INSN was already checked to be movable wrt. the registers that it
1825 sets / uses when we found no REG_DEAD note for src on it, but it
1826 still might clobber the flags register. We'll have to check that
1827 we won't insert it into the shadow of a live flags register when
1828 we finally know where we are to move it. */
1829 overlap = p;
1830 src_note = find_reg_note (p, REG_DEAD, src);
1833 /* If we have passed a call instruction, and the pseudo-reg SRC is not
1834 already live across a call, then don't perform the optimization. */
1835 if (CALL_P (p))
1837 if (REG_N_CALLS_CROSSED (REGNO (src)) == 0)
1838 break;
1840 num_calls++;
1842 if (src_note)
1843 s_num_calls++;
1848 if (! success)
1849 return 0;
1851 /* Remove the death note for DST from P. */
1852 remove_note (p, dst_note);
1853 if (code == MINUS)
1855 post_inc = emit_insn_after (copy_rtx (PATTERN (insn)), p);
1856 if ((HAVE_PRE_INCREMENT || HAVE_PRE_DECREMENT)
1857 && search_end
1858 && try_auto_increment (search_end, post_inc, 0, src, newconst, 1))
1859 post_inc = 0;
1860 validate_change (insn, &XEXP (SET_SRC (set), 1), GEN_INT (insn_const), 0);
1861 REG_N_SETS (REGNO (src))++;
1862 REG_LIVE_LENGTH (REGNO (src))++;
1864 if (overlap)
1866 /* The lifetime of src and dest overlap,
1867 but we can change this by moving insn. */
1868 rtx pat = PATTERN (insn);
1869 if (src_note)
1870 remove_note (overlap, src_note);
1871 if ((HAVE_POST_INCREMENT || HAVE_POST_DECREMENT)
1872 && code == PLUS
1873 && try_auto_increment (overlap, insn, 0, src, insn_const, 0))
1874 insn = overlap;
1875 else
1877 rtx notes = REG_NOTES (insn);
1879 emit_insn_after_with_line_notes (pat, PREV_INSN (p), insn);
1880 delete_insn (insn);
1881 /* emit_insn_after_with_line_notes has no
1882 return value, so search for the new insn. */
1883 insn = p;
1884 while (! INSN_P (insn) || PATTERN (insn) != pat)
1885 insn = PREV_INSN (insn);
1887 REG_NOTES (insn) = notes;
1890 /* Sometimes we'd generate src = const; src += n;
1891 if so, replace the instruction that set src
1892 in the first place. */
1894 if (! overlap && (code == PLUS || code == MINUS))
1896 rtx note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
1897 rtx q, set2 = NULL_RTX;
1898 int num_calls2 = 0, s_length2 = 0;
1900 if (note && CONSTANT_P (XEXP (note, 0)))
1902 for (q = PREV_INSN (insn); q; q = PREV_INSN (q))
1904 /* ??? We can't scan past the end of a basic block without
1905 updating the register lifetime info
1906 (REG_DEAD/basic_block_live_at_start). */
1907 if (perhaps_ends_bb_p (q))
1909 q = 0;
1910 break;
1912 else if (! INSN_P (q))
1913 continue;
1915 s_length2++;
1916 if (reg_set_p (src, q))
1918 set2 = single_set (q);
1919 break;
1921 if (reg_overlap_mentioned_p (src, PATTERN (q)))
1923 q = 0;
1924 break;
1926 if (CALL_P (p))
1927 num_calls2++;
1929 if (q && set2 && SET_DEST (set2) == src && CONSTANT_P (SET_SRC (set2))
1930 && validate_change (insn, &SET_SRC (set), XEXP (note, 0), 0))
1932 delete_insn (q);
1933 REG_N_SETS (REGNO (src))--;
1934 REG_N_CALLS_CROSSED (REGNO (src)) -= num_calls2;
1935 REG_LIVE_LENGTH (REGNO (src)) -= s_length2;
1936 insn_const = 0;
1941 if ((HAVE_PRE_INCREMENT || HAVE_PRE_DECREMENT)
1942 && (code == PLUS || code == MINUS) && insn_const
1943 && try_auto_increment (p, insn, 0, src, insn_const, 1))
1944 insn = p;
1945 else if ((HAVE_POST_INCREMENT || HAVE_POST_DECREMENT)
1946 && post_inc
1947 && try_auto_increment (p, post_inc, post_inc_set, src, newconst, 0))
1948 post_inc = 0;
1949 /* If post_inc still prevails, try to find an
1950 insn where it can be used as a pre-in/decrement.
1951 If code is MINUS, this was already tried. */
1952 if (post_inc && code == PLUS
1953 /* Check that newconst is likely to be usable
1954 in a pre-in/decrement before starting the search. */
1955 && ((HAVE_PRE_INCREMENT && newconst > 0 && newconst <= MOVE_MAX)
1956 || (HAVE_PRE_DECREMENT && newconst < 0 && newconst >= -MOVE_MAX))
1957 && exact_log2 (newconst))
1959 rtx q, inc_dest;
1961 inc_dest = post_inc_set ? SET_DEST (post_inc_set) : src;
1962 for (q = post_inc; (q = NEXT_INSN (q)); )
1964 /* ??? We can't scan past the end of a basic block without updating
1965 the register lifetime info
1966 (REG_DEAD/basic_block_live_at_start). */
1967 if (perhaps_ends_bb_p (q))
1968 break;
1969 else if (! INSN_P (q))
1970 continue;
1971 else if (src != inc_dest
1972 && (reg_overlap_mentioned_p (src, PATTERN (q))
1973 || reg_set_p (src, q)))
1974 break;
1975 else if (reg_set_p (inc_dest, q))
1976 break;
1977 else if (reg_overlap_mentioned_p (inc_dest, PATTERN (q)))
1979 try_auto_increment (q, post_inc,
1980 post_inc_set, inc_dest, newconst, 1);
1981 break;
1986 /* Move the death note for DST to INSN if it is used
1987 there. */
1988 if (reg_overlap_mentioned_p (dst, PATTERN (insn)))
1990 XEXP (dst_note, 1) = REG_NOTES (insn);
1991 REG_NOTES (insn) = dst_note;
1994 if (src_note)
1996 /* Move the death note for SRC from INSN to P. */
1997 if (! overlap)
1998 remove_note (insn, src_note);
1999 XEXP (src_note, 1) = REG_NOTES (p);
2000 REG_NOTES (p) = src_note;
2002 REG_N_CALLS_CROSSED (REGNO (src)) += s_num_calls;
2005 REG_N_SETS (REGNO (src))++;
2006 REG_N_SETS (REGNO (dst))--;
2008 REG_N_CALLS_CROSSED (REGNO (dst)) -= num_calls;
2010 REG_LIVE_LENGTH (REGNO (src)) += s_length;
2011 if (REG_LIVE_LENGTH (REGNO (dst)) >= 0)
2013 REG_LIVE_LENGTH (REGNO (dst)) -= length;
2014 /* REG_LIVE_LENGTH is only an approximation after
2015 combine if sched is not run, so make sure that we
2016 still have a reasonable value. */
2017 if (REG_LIVE_LENGTH (REGNO (dst)) < 2)
2018 REG_LIVE_LENGTH (REGNO (dst)) = 2;
2020 if (dump_file)
2021 fprintf (dump_file,
2022 "Fixed operand %d of insn %d matching operand %d.\n",
2023 operand_number, INSN_UID (insn), match_number);
2024 return 1;
2028 /* Return nonzero if X is stable and mentions no registers but for
2029 mentioning SRC or mentioning / changing DST . If in doubt, presume
2030 it is unstable.
2031 The rationale is that we want to check if we can move an insn easily
2032 while just paying attention to SRC and DST. */
2033 static int
2034 stable_and_no_regs_but_for_p (rtx x, rtx src, rtx dst)
2036 RTX_CODE code = GET_CODE (x);
2037 switch (GET_RTX_CLASS (code))
2039 case RTX_UNARY:
2040 case RTX_BIN_ARITH:
2041 case RTX_COMM_ARITH:
2042 case RTX_COMPARE:
2043 case RTX_COMM_COMPARE:
2044 case RTX_TERNARY:
2045 case RTX_BITFIELD_OPS:
2047 int i;
2048 const char *fmt = GET_RTX_FORMAT (code);
2049 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2050 if (fmt[i] == 'e'
2051 && ! stable_and_no_regs_but_for_p (XEXP (x, i), src, dst))
2052 return 0;
2053 return 1;
2055 case RTX_OBJ:
2056 if (code == REG)
2057 return x == src || x == dst;
2058 /* If this is a MEM, look inside - there might be a register hidden in
2059 the address of an unchanging MEM. */
2060 if (code == MEM
2061 && ! stable_and_no_regs_but_for_p (XEXP (x, 0), src, dst))
2062 return 0;
2063 /* Fall through. */
2064 default:
2065 return ! rtx_unstable_p (x);
2069 /* Track stack adjustments and stack memory references. Attempt to
2070 reduce the number of stack adjustments by back-propagating across
2071 the memory references.
2073 This is intended primarily for use with targets that do not define
2074 ACCUMULATE_OUTGOING_ARGS. It is of significantly more value to
2075 targets that define PREFERRED_STACK_BOUNDARY more aligned than
2076 STACK_BOUNDARY (e.g. x86), or if not all registers can be pushed
2077 (e.g. x86 fp regs) which would ordinarily have to be implemented
2078 as a sub/mov pair due to restrictions in calls.c.
2080 Propagation stops when any of the insns that need adjusting are
2081 (a) no longer valid because we've exceeded their range, (b) a
2082 non-trivial push instruction, or (c) a call instruction.
2084 Restriction B is based on the assumption that push instructions
2085 are smaller or faster. If a port really wants to remove all
2086 pushes, it should have defined ACCUMULATE_OUTGOING_ARGS. The
2087 one exception that is made is for an add immediately followed
2088 by a push. */
2090 /* This structure records stack memory references between stack adjusting
2091 instructions. */
2093 struct csa_memlist
2095 HOST_WIDE_INT sp_offset;
2096 rtx insn, *mem;
2097 struct csa_memlist *next;
2100 static int stack_memref_p (rtx);
2101 static rtx single_set_for_csa (rtx);
2102 static void free_csa_memlist (struct csa_memlist *);
2103 static struct csa_memlist *record_one_stack_memref (rtx, rtx *,
2104 struct csa_memlist *);
2105 static int try_apply_stack_adjustment (rtx, struct csa_memlist *,
2106 HOST_WIDE_INT, HOST_WIDE_INT);
2107 static void combine_stack_adjustments_for_block (basic_block);
2108 static int record_stack_memrefs (rtx *, void *);
2111 /* Main entry point for stack adjustment combination. */
2113 static void
2114 combine_stack_adjustments (void)
2116 basic_block bb;
2118 FOR_EACH_BB (bb)
2119 combine_stack_adjustments_for_block (bb);
2122 /* Recognize a MEM of the form (sp) or (plus sp const). */
2124 static int
2125 stack_memref_p (rtx x)
2127 if (!MEM_P (x))
2128 return 0;
2129 x = XEXP (x, 0);
2131 if (x == stack_pointer_rtx)
2132 return 1;
2133 if (GET_CODE (x) == PLUS
2134 && XEXP (x, 0) == stack_pointer_rtx
2135 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2136 return 1;
2138 return 0;
2141 /* Recognize either normal single_set or the hack in i386.md for
2142 tying fp and sp adjustments. */
2144 static rtx
2145 single_set_for_csa (rtx insn)
2147 int i;
2148 rtx tmp = single_set (insn);
2149 if (tmp)
2150 return tmp;
2152 if (!NONJUMP_INSN_P (insn)
2153 || GET_CODE (PATTERN (insn)) != PARALLEL)
2154 return NULL_RTX;
2156 tmp = PATTERN (insn);
2157 if (GET_CODE (XVECEXP (tmp, 0, 0)) != SET)
2158 return NULL_RTX;
2160 for (i = 1; i < XVECLEN (tmp, 0); ++i)
2162 rtx this = XVECEXP (tmp, 0, i);
2164 /* The special case is allowing a no-op set. */
2165 if (GET_CODE (this) == SET
2166 && SET_SRC (this) == SET_DEST (this))
2168 else if (GET_CODE (this) != CLOBBER
2169 && GET_CODE (this) != USE)
2170 return NULL_RTX;
2173 return XVECEXP (tmp, 0, 0);
2176 /* Free the list of csa_memlist nodes. */
2178 static void
2179 free_csa_memlist (struct csa_memlist *memlist)
2181 struct csa_memlist *next;
2182 for (; memlist ; memlist = next)
2184 next = memlist->next;
2185 free (memlist);
2189 /* Create a new csa_memlist node from the given memory reference.
2190 It is already known that the memory is stack_memref_p. */
2192 static struct csa_memlist *
2193 record_one_stack_memref (rtx insn, rtx *mem, struct csa_memlist *next_memlist)
2195 struct csa_memlist *ml;
2197 ml = XNEW (struct csa_memlist);
2199 if (XEXP (*mem, 0) == stack_pointer_rtx)
2200 ml->sp_offset = 0;
2201 else
2202 ml->sp_offset = INTVAL (XEXP (XEXP (*mem, 0), 1));
2204 ml->insn = insn;
2205 ml->mem = mem;
2206 ml->next = next_memlist;
2208 return ml;
2211 /* Attempt to apply ADJUST to the stack adjusting insn INSN, as well
2212 as each of the memories in MEMLIST. Return true on success. */
2214 static int
2215 try_apply_stack_adjustment (rtx insn, struct csa_memlist *memlist, HOST_WIDE_INT new_adjust,
2216 HOST_WIDE_INT delta)
2218 struct csa_memlist *ml;
2219 rtx set;
2221 set = single_set_for_csa (insn);
2222 validate_change (insn, &XEXP (SET_SRC (set), 1), GEN_INT (new_adjust), 1);
2224 for (ml = memlist; ml ; ml = ml->next)
2225 validate_change
2226 (ml->insn, ml->mem,
2227 replace_equiv_address_nv (*ml->mem,
2228 plus_constant (stack_pointer_rtx,
2229 ml->sp_offset - delta)), 1);
2231 if (apply_change_group ())
2233 /* Succeeded. Update our knowledge of the memory references. */
2234 for (ml = memlist; ml ; ml = ml->next)
2235 ml->sp_offset -= delta;
2237 return 1;
2239 else
2240 return 0;
2243 /* Called via for_each_rtx and used to record all stack memory references in
2244 the insn and discard all other stack pointer references. */
2245 struct record_stack_memrefs_data
2247 rtx insn;
2248 struct csa_memlist *memlist;
2251 static int
2252 record_stack_memrefs (rtx *xp, void *data)
2254 rtx x = *xp;
2255 struct record_stack_memrefs_data *d =
2256 (struct record_stack_memrefs_data *) data;
2257 if (!x)
2258 return 0;
2259 switch (GET_CODE (x))
2261 case MEM:
2262 if (!reg_mentioned_p (stack_pointer_rtx, x))
2263 return -1;
2264 /* We are not able to handle correctly all possible memrefs containing
2265 stack pointer, so this check is necessary. */
2266 if (stack_memref_p (x))
2268 d->memlist = record_one_stack_memref (d->insn, xp, d->memlist);
2269 return -1;
2271 return 1;
2272 case REG:
2273 /* ??? We want be able to handle non-memory stack pointer
2274 references later. For now just discard all insns referring to
2275 stack pointer outside mem expressions. We would probably
2276 want to teach validate_replace to simplify expressions first.
2278 We can't just compare with STACK_POINTER_RTX because the
2279 reference to the stack pointer might be in some other mode.
2280 In particular, an explicit clobber in an asm statement will
2281 result in a QImode clobber. */
2282 if (REGNO (x) == STACK_POINTER_REGNUM)
2283 return 1;
2284 break;
2285 default:
2286 break;
2288 return 0;
2291 /* Subroutine of combine_stack_adjustments, called for each basic block. */
2293 static void
2294 combine_stack_adjustments_for_block (basic_block bb)
2296 HOST_WIDE_INT last_sp_adjust = 0;
2297 rtx last_sp_set = NULL_RTX;
2298 struct csa_memlist *memlist = NULL;
2299 rtx insn, next, set;
2300 struct record_stack_memrefs_data data;
2301 bool end_of_block = false;
2303 for (insn = BB_HEAD (bb); !end_of_block ; insn = next)
2305 end_of_block = insn == BB_END (bb);
2306 next = NEXT_INSN (insn);
2308 if (! INSN_P (insn))
2309 continue;
2311 set = single_set_for_csa (insn);
2312 if (set)
2314 rtx dest = SET_DEST (set);
2315 rtx src = SET_SRC (set);
2317 /* Find constant additions to the stack pointer. */
2318 if (dest == stack_pointer_rtx
2319 && GET_CODE (src) == PLUS
2320 && XEXP (src, 0) == stack_pointer_rtx
2321 && GET_CODE (XEXP (src, 1)) == CONST_INT)
2323 HOST_WIDE_INT this_adjust = INTVAL (XEXP (src, 1));
2325 /* If we've not seen an adjustment previously, record
2326 it now and continue. */
2327 if (! last_sp_set)
2329 last_sp_set = insn;
2330 last_sp_adjust = this_adjust;
2331 continue;
2334 /* If not all recorded memrefs can be adjusted, or the
2335 adjustment is now too large for a constant addition,
2336 we cannot merge the two stack adjustments.
2338 Also we need to be careful to not move stack pointer
2339 such that we create stack accesses outside the allocated
2340 area. We can combine an allocation into the first insn,
2341 or a deallocation into the second insn. We can not
2342 combine an allocation followed by a deallocation.
2344 The only somewhat frequent occurrence of the later is when
2345 a function allocates a stack frame but does not use it.
2346 For this case, we would need to analyze rtl stream to be
2347 sure that allocated area is really unused. This means not
2348 only checking the memory references, but also all registers
2349 or global memory references possibly containing a stack
2350 frame address.
2352 Perhaps the best way to address this problem is to teach
2353 gcc not to allocate stack for objects never used. */
2355 /* Combine an allocation into the first instruction. */
2356 if (STACK_GROWS_DOWNWARD ? this_adjust <= 0 : this_adjust >= 0)
2358 if (try_apply_stack_adjustment (last_sp_set, memlist,
2359 last_sp_adjust + this_adjust,
2360 this_adjust))
2362 /* It worked! */
2363 delete_insn (insn);
2364 last_sp_adjust += this_adjust;
2365 continue;
2369 /* Otherwise we have a deallocation. Do not combine with
2370 a previous allocation. Combine into the second insn. */
2371 else if (STACK_GROWS_DOWNWARD
2372 ? last_sp_adjust >= 0 : last_sp_adjust <= 0)
2374 if (try_apply_stack_adjustment (insn, memlist,
2375 last_sp_adjust + this_adjust,
2376 -last_sp_adjust))
2378 /* It worked! */
2379 delete_insn (last_sp_set);
2380 last_sp_set = insn;
2381 last_sp_adjust += this_adjust;
2382 free_csa_memlist (memlist);
2383 memlist = NULL;
2384 continue;
2388 /* Combination failed. Restart processing from here. If
2389 deallocation+allocation conspired to cancel, we can
2390 delete the old deallocation insn. */
2391 if (last_sp_set && last_sp_adjust == 0)
2392 delete_insn (insn);
2393 free_csa_memlist (memlist);
2394 memlist = NULL;
2395 last_sp_set = insn;
2396 last_sp_adjust = this_adjust;
2397 continue;
2400 /* Find a predecrement of exactly the previous adjustment and
2401 turn it into a direct store. Obviously we can't do this if
2402 there were any intervening uses of the stack pointer. */
2403 if (memlist == NULL
2404 && MEM_P (dest)
2405 && ((GET_CODE (XEXP (dest, 0)) == PRE_DEC
2406 && (last_sp_adjust
2407 == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (dest))))
2408 || (GET_CODE (XEXP (dest, 0)) == PRE_MODIFY
2409 && GET_CODE (XEXP (XEXP (dest, 0), 1)) == PLUS
2410 && XEXP (XEXP (XEXP (dest, 0), 1), 0) == stack_pointer_rtx
2411 && (GET_CODE (XEXP (XEXP (XEXP (dest, 0), 1), 1))
2412 == CONST_INT)
2413 && (INTVAL (XEXP (XEXP (XEXP (dest, 0), 1), 1))
2414 == -last_sp_adjust)))
2415 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx
2416 && ! reg_mentioned_p (stack_pointer_rtx, src)
2417 && memory_address_p (GET_MODE (dest), stack_pointer_rtx)
2418 && validate_change (insn, &SET_DEST (set),
2419 replace_equiv_address (dest,
2420 stack_pointer_rtx),
2423 delete_insn (last_sp_set);
2424 free_csa_memlist (memlist);
2425 memlist = NULL;
2426 last_sp_set = NULL_RTX;
2427 last_sp_adjust = 0;
2428 continue;
2432 data.insn = insn;
2433 data.memlist = memlist;
2434 if (!CALL_P (insn) && last_sp_set
2435 && !for_each_rtx (&PATTERN (insn), record_stack_memrefs, &data))
2437 memlist = data.memlist;
2438 continue;
2440 memlist = data.memlist;
2442 /* Otherwise, we were not able to process the instruction.
2443 Do not continue collecting data across such a one. */
2444 if (last_sp_set
2445 && (CALL_P (insn)
2446 || reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))))
2448 if (last_sp_set && last_sp_adjust == 0)
2449 delete_insn (last_sp_set);
2450 free_csa_memlist (memlist);
2451 memlist = NULL;
2452 last_sp_set = NULL_RTX;
2453 last_sp_adjust = 0;
2457 if (last_sp_set && last_sp_adjust == 0)
2458 delete_insn (last_sp_set);
2460 if (memlist)
2461 free_csa_memlist (memlist);
2464 static bool
2465 gate_handle_regmove (void)
2467 return (optimize > 0 && flag_regmove);
2471 /* Register allocation pre-pass, to reduce number of moves necessary
2472 for two-address machines. */
2473 static unsigned int
2474 rest_of_handle_regmove (void)
2476 regmove_optimize (get_insns (), max_reg_num ());
2477 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_UPDATE_LIFE);
2478 return 0;
2481 struct tree_opt_pass pass_regmove =
2483 "regmove", /* name */
2484 gate_handle_regmove, /* gate */
2485 rest_of_handle_regmove, /* execute */
2486 NULL, /* sub */
2487 NULL, /* next */
2488 0, /* static_pass_number */
2489 TV_REGMOVE, /* tv_id */
2490 0, /* properties_required */
2491 0, /* properties_provided */
2492 0, /* properties_destroyed */
2493 0, /* todo_flags_start */
2494 TODO_dump_func |
2495 TODO_ggc_collect, /* todo_flags_finish */
2496 'N' /* letter */
2500 static bool
2501 gate_handle_stack_adjustments (void)
2503 return (optimize > 0);
2506 static unsigned int
2507 rest_of_handle_stack_adjustments (void)
2509 life_analysis (PROP_POSTRELOAD);
2510 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_UPDATE_LIFE
2511 | (flag_crossjumping ? CLEANUP_CROSSJUMP : 0));
2513 /* This is kind of a heuristic. We need to run combine_stack_adjustments
2514 even for machines with possibly nonzero RETURN_POPS_ARGS
2515 and ACCUMULATE_OUTGOING_ARGS. We expect that only ports having
2516 push instructions will have popping returns. */
2517 #ifndef PUSH_ROUNDING
2518 if (!ACCUMULATE_OUTGOING_ARGS)
2519 #endif
2520 combine_stack_adjustments ();
2521 return 0;
2524 struct tree_opt_pass pass_stack_adjustments =
2526 "csa", /* name */
2527 gate_handle_stack_adjustments, /* gate */
2528 rest_of_handle_stack_adjustments, /* execute */
2529 NULL, /* sub */
2530 NULL, /* next */
2531 0, /* static_pass_number */
2532 0, /* tv_id */
2533 0, /* properties_required */
2534 0, /* properties_provided */
2535 0, /* properties_destroyed */
2536 0, /* todo_flags_start */
2537 TODO_dump_func |
2538 TODO_ggc_collect, /* todo_flags_finish */
2539 0 /* letter */