* postreload.c (reload_cse_simplify): Skip also USE when detecting
[official-gcc.git] / gcc / postreload.c
blob4f3a526981e82d35f953a576ec966fe7dfa9de1c
1 /* Perform simple optimizations to clean up the result of reload.
2 Copyright (C) 1987-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "predict.h"
28 #include "df.h"
29 #include "tm_p.h"
30 #include "optabs.h"
31 #include "regs.h"
32 #include "emit-rtl.h"
33 #include "recog.h"
35 #include "cfgrtl.h"
36 #include "cfgbuild.h"
37 #include "cfgcleanup.h"
38 #include "reload.h"
39 #include "cselib.h"
40 #include "tree-pass.h"
41 #include "dbgcnt.h"
43 #ifndef LOAD_EXTEND_OP
44 #define LOAD_EXTEND_OP(M) UNKNOWN
45 #endif
47 static int reload_cse_noop_set_p (rtx);
48 static bool reload_cse_simplify (rtx_insn *, rtx);
49 static void reload_cse_regs_1 (void);
50 static int reload_cse_simplify_set (rtx, rtx_insn *);
51 static int reload_cse_simplify_operands (rtx_insn *, rtx);
53 static void reload_combine (void);
54 static void reload_combine_note_use (rtx *, rtx_insn *, int, rtx);
55 static void reload_combine_note_store (rtx, const_rtx, void *);
57 static bool reload_cse_move2add (rtx_insn *);
58 static void move2add_note_store (rtx, const_rtx, void *);
60 /* Call cse / combine like post-reload optimization phases.
61 FIRST is the first instruction. */
63 static void
64 reload_cse_regs (rtx_insn *first ATTRIBUTE_UNUSED)
66 bool moves_converted;
67 reload_cse_regs_1 ();
68 reload_combine ();
69 moves_converted = reload_cse_move2add (first);
70 if (flag_expensive_optimizations)
72 if (moves_converted)
73 reload_combine ();
74 reload_cse_regs_1 ();
78 /* See whether a single set SET is a noop. */
79 static int
80 reload_cse_noop_set_p (rtx set)
82 if (cselib_reg_set_mode (SET_DEST (set)) != GET_MODE (SET_DEST (set)))
83 return 0;
85 return rtx_equal_for_cselib_p (SET_DEST (set), SET_SRC (set));
88 /* Try to simplify INSN. Return true if the CFG may have changed. */
89 static bool
90 reload_cse_simplify (rtx_insn *insn, rtx testreg)
92 rtx body = PATTERN (insn);
93 basic_block insn_bb = BLOCK_FOR_INSN (insn);
94 unsigned insn_bb_succs = EDGE_COUNT (insn_bb->succs);
96 if (GET_CODE (body) == SET)
98 int count = 0;
100 /* Simplify even if we may think it is a no-op.
101 We may think a memory load of a value smaller than WORD_SIZE
102 is redundant because we haven't taken into account possible
103 implicit extension. reload_cse_simplify_set() will bring
104 this out, so it's safer to simplify before we delete. */
105 count += reload_cse_simplify_set (body, insn);
107 if (!count && reload_cse_noop_set_p (body))
109 if (check_for_inc_dec (insn))
110 delete_insn_and_edges (insn);
111 /* We're done with this insn. */
112 goto done;
115 if (count > 0)
116 apply_change_group ();
117 else
118 reload_cse_simplify_operands (insn, testreg);
120 else if (GET_CODE (body) == PARALLEL)
122 int i;
123 int count = 0;
124 rtx value = NULL_RTX;
126 /* Registers mentioned in the clobber list for an asm cannot be reused
127 within the body of the asm. Invalidate those registers now so that
128 we don't try to substitute values for them. */
129 if (asm_noperands (body) >= 0)
131 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
133 rtx part = XVECEXP (body, 0, i);
134 if (GET_CODE (part) == CLOBBER && REG_P (XEXP (part, 0)))
135 cselib_invalidate_rtx (XEXP (part, 0));
139 /* If every action in a PARALLEL is a noop, we can delete
140 the entire PARALLEL. */
141 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
143 rtx part = XVECEXP (body, 0, i);
144 if (GET_CODE (part) == SET)
146 if (! reload_cse_noop_set_p (part))
147 break;
148 if (REG_P (SET_DEST (part))
149 && REG_FUNCTION_VALUE_P (SET_DEST (part)))
151 if (value)
152 break;
153 value = SET_DEST (part);
156 else if (GET_CODE (part) != CLOBBER
157 && GET_CODE (part) != USE)
158 break;
161 if (i < 0)
163 if (check_for_inc_dec (insn))
164 delete_insn_and_edges (insn);
165 /* We're done with this insn. */
166 goto done;
169 /* It's not a no-op, but we can try to simplify it. */
170 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
171 if (GET_CODE (XVECEXP (body, 0, i)) == SET)
172 count += reload_cse_simplify_set (XVECEXP (body, 0, i), insn);
174 if (count > 0)
175 apply_change_group ();
176 else
177 reload_cse_simplify_operands (insn, testreg);
180 done:
181 return (EDGE_COUNT (insn_bb->succs) != insn_bb_succs);
184 /* Do a very simple CSE pass over the hard registers.
186 This function detects no-op moves where we happened to assign two
187 different pseudo-registers to the same hard register, and then
188 copied one to the other. Reload will generate a useless
189 instruction copying a register to itself.
191 This function also detects cases where we load a value from memory
192 into two different registers, and (if memory is more expensive than
193 registers) changes it to simply copy the first register into the
194 second register.
196 Another optimization is performed that scans the operands of each
197 instruction to see whether the value is already available in a
198 hard register. It then replaces the operand with the hard register
199 if possible, much like an optional reload would. */
201 static void
202 reload_cse_regs_1 (void)
204 bool cfg_changed = false;
205 basic_block bb;
206 rtx_insn *insn;
207 rtx testreg = gen_rtx_REG (word_mode, LAST_VIRTUAL_REGISTER + 1);
209 cselib_init (CSELIB_RECORD_MEMORY);
210 init_alias_analysis ();
212 FOR_EACH_BB_FN (bb, cfun)
213 FOR_BB_INSNS (bb, insn)
215 if (INSN_P (insn))
216 cfg_changed |= reload_cse_simplify (insn, testreg);
218 cselib_process_insn (insn);
221 /* Clean up. */
222 end_alias_analysis ();
223 cselib_finish ();
224 if (cfg_changed)
225 cleanup_cfg (0);
228 /* Try to simplify a single SET instruction. SET is the set pattern.
229 INSN is the instruction it came from.
230 This function only handles one case: if we set a register to a value
231 which is not a register, we try to find that value in some other register
232 and change the set into a register copy. */
234 static int
235 reload_cse_simplify_set (rtx set, rtx_insn *insn)
237 int did_change = 0;
238 int dreg;
239 rtx src;
240 reg_class_t dclass;
241 int old_cost;
242 cselib_val *val;
243 struct elt_loc_list *l;
244 enum rtx_code extend_op = UNKNOWN;
245 bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn));
247 dreg = true_regnum (SET_DEST (set));
248 if (dreg < 0)
249 return 0;
251 src = SET_SRC (set);
252 if (side_effects_p (src) || true_regnum (src) >= 0)
253 return 0;
255 dclass = REGNO_REG_CLASS (dreg);
257 /* When replacing a memory with a register, we need to honor assumptions
258 that combine made wrt the contents of sign bits. We'll do this by
259 generating an extend instruction instead of a reg->reg copy. Thus
260 the destination must be a register that we can widen. */
261 if (MEM_P (src)
262 && GET_MODE_BITSIZE (GET_MODE (src)) < BITS_PER_WORD
263 && (extend_op = LOAD_EXTEND_OP (GET_MODE (src))) != UNKNOWN
264 && !REG_P (SET_DEST (set)))
265 return 0;
267 val = cselib_lookup (src, GET_MODE (SET_DEST (set)), 0, VOIDmode);
268 if (! val)
269 return 0;
271 /* If memory loads are cheaper than register copies, don't change them. */
272 if (MEM_P (src))
273 old_cost = memory_move_cost (GET_MODE (src), dclass, true);
274 else if (REG_P (src))
275 old_cost = register_move_cost (GET_MODE (src),
276 REGNO_REG_CLASS (REGNO (src)), dclass);
277 else
278 old_cost = set_src_cost (src, GET_MODE (SET_DEST (set)), speed);
280 for (l = val->locs; l; l = l->next)
282 rtx this_rtx = l->loc;
283 int this_cost;
285 if (CONSTANT_P (this_rtx) && ! references_value_p (this_rtx, 0))
287 if (extend_op != UNKNOWN)
289 wide_int result;
291 if (!CONST_SCALAR_INT_P (this_rtx))
292 continue;
294 switch (extend_op)
296 case ZERO_EXTEND:
297 result = wide_int::from (std::make_pair (this_rtx,
298 GET_MODE (src)),
299 BITS_PER_WORD, UNSIGNED);
300 break;
301 case SIGN_EXTEND:
302 result = wide_int::from (std::make_pair (this_rtx,
303 GET_MODE (src)),
304 BITS_PER_WORD, SIGNED);
305 break;
306 default:
307 gcc_unreachable ();
309 this_rtx = immed_wide_int_const (result, word_mode);
312 this_cost = set_src_cost (this_rtx, GET_MODE (SET_DEST (set)), speed);
314 else if (REG_P (this_rtx))
316 if (extend_op != UNKNOWN)
318 this_rtx = gen_rtx_fmt_e (extend_op, word_mode, this_rtx);
319 this_cost = set_src_cost (this_rtx, word_mode, speed);
321 else
322 this_cost = register_move_cost (GET_MODE (this_rtx),
323 REGNO_REG_CLASS (REGNO (this_rtx)),
324 dclass);
326 else
327 continue;
329 /* If equal costs, prefer registers over anything else. That
330 tends to lead to smaller instructions on some machines. */
331 if (this_cost < old_cost
332 || (this_cost == old_cost
333 && REG_P (this_rtx)
334 && !REG_P (SET_SRC (set))))
336 if (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set))) < BITS_PER_WORD
337 && extend_op != UNKNOWN
338 #ifdef CANNOT_CHANGE_MODE_CLASS
339 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SET_DEST (set)),
340 word_mode,
341 REGNO_REG_CLASS (REGNO (SET_DEST (set))))
342 #endif
345 rtx wide_dest = gen_rtx_REG (word_mode, REGNO (SET_DEST (set)));
346 ORIGINAL_REGNO (wide_dest) = ORIGINAL_REGNO (SET_DEST (set));
347 validate_change (insn, &SET_DEST (set), wide_dest, 1);
350 validate_unshare_change (insn, &SET_SRC (set), this_rtx, 1);
351 old_cost = this_cost, did_change = 1;
355 return did_change;
358 /* Try to replace operands in INSN with equivalent values that are already
359 in registers. This can be viewed as optional reloading.
361 For each non-register operand in the insn, see if any hard regs are
362 known to be equivalent to that operand. Record the alternatives which
363 can accept these hard registers. Among all alternatives, select the
364 ones which are better or equal to the one currently matching, where
365 "better" is in terms of '?' and '!' constraints. Among the remaining
366 alternatives, select the one which replaces most operands with
367 hard registers. */
369 static int
370 reload_cse_simplify_operands (rtx_insn *insn, rtx testreg)
372 int i, j;
374 /* For each operand, all registers that are equivalent to it. */
375 HARD_REG_SET equiv_regs[MAX_RECOG_OPERANDS];
377 const char *constraints[MAX_RECOG_OPERANDS];
379 /* Vector recording how bad an alternative is. */
380 int *alternative_reject;
381 /* Vector recording how many registers can be introduced by choosing
382 this alternative. */
383 int *alternative_nregs;
384 /* Array of vectors recording, for each operand and each alternative,
385 which hard register to substitute, or -1 if the operand should be
386 left as it is. */
387 int *op_alt_regno[MAX_RECOG_OPERANDS];
388 /* Array of alternatives, sorted in order of decreasing desirability. */
389 int *alternative_order;
391 extract_constrain_insn (insn);
393 if (recog_data.n_alternatives == 0 || recog_data.n_operands == 0)
394 return 0;
396 alternative_reject = XALLOCAVEC (int, recog_data.n_alternatives);
397 alternative_nregs = XALLOCAVEC (int, recog_data.n_alternatives);
398 alternative_order = XALLOCAVEC (int, recog_data.n_alternatives);
399 memset (alternative_reject, 0, recog_data.n_alternatives * sizeof (int));
400 memset (alternative_nregs, 0, recog_data.n_alternatives * sizeof (int));
402 /* For each operand, find out which regs are equivalent. */
403 for (i = 0; i < recog_data.n_operands; i++)
405 cselib_val *v;
406 struct elt_loc_list *l;
407 rtx op;
409 CLEAR_HARD_REG_SET (equiv_regs[i]);
411 /* cselib blows up on CODE_LABELs. Trying to fix that doesn't seem
412 right, so avoid the problem here. Likewise if we have a constant
413 and the insn pattern doesn't tell us the mode we need. */
414 if (LABEL_P (recog_data.operand[i])
415 || (CONSTANT_P (recog_data.operand[i])
416 && recog_data.operand_mode[i] == VOIDmode))
417 continue;
419 op = recog_data.operand[i];
420 if (MEM_P (op)
421 && GET_MODE_BITSIZE (GET_MODE (op)) < BITS_PER_WORD
422 && LOAD_EXTEND_OP (GET_MODE (op)) != UNKNOWN)
424 rtx set = single_set (insn);
426 /* We might have multiple sets, some of which do implicit
427 extension. Punt on this for now. */
428 if (! set)
429 continue;
430 /* If the destination is also a MEM or a STRICT_LOW_PART, no
431 extension applies.
432 Also, if there is an explicit extension, we don't have to
433 worry about an implicit one. */
434 else if (MEM_P (SET_DEST (set))
435 || GET_CODE (SET_DEST (set)) == STRICT_LOW_PART
436 || GET_CODE (SET_SRC (set)) == ZERO_EXTEND
437 || GET_CODE (SET_SRC (set)) == SIGN_EXTEND)
438 ; /* Continue ordinary processing. */
439 #ifdef CANNOT_CHANGE_MODE_CLASS
440 /* If the register cannot change mode to word_mode, it follows that
441 it cannot have been used in word_mode. */
442 else if (REG_P (SET_DEST (set))
443 && CANNOT_CHANGE_MODE_CLASS (GET_MODE (SET_DEST (set)),
444 word_mode,
445 REGNO_REG_CLASS (REGNO (SET_DEST (set)))))
446 ; /* Continue ordinary processing. */
447 #endif
448 /* If this is a straight load, make the extension explicit. */
449 else if (REG_P (SET_DEST (set))
450 && recog_data.n_operands == 2
451 && SET_SRC (set) == op
452 && SET_DEST (set) == recog_data.operand[1-i])
454 validate_change (insn, recog_data.operand_loc[i],
455 gen_rtx_fmt_e (LOAD_EXTEND_OP (GET_MODE (op)),
456 word_mode, op),
458 validate_change (insn, recog_data.operand_loc[1-i],
459 gen_rtx_REG (word_mode, REGNO (SET_DEST (set))),
461 if (! apply_change_group ())
462 return 0;
463 return reload_cse_simplify_operands (insn, testreg);
465 else
466 /* ??? There might be arithmetic operations with memory that are
467 safe to optimize, but is it worth the trouble? */
468 continue;
471 if (side_effects_p (op))
472 continue;
473 v = cselib_lookup (op, recog_data.operand_mode[i], 0, VOIDmode);
474 if (! v)
475 continue;
477 for (l = v->locs; l; l = l->next)
478 if (REG_P (l->loc))
479 SET_HARD_REG_BIT (equiv_regs[i], REGNO (l->loc));
482 alternative_mask preferred = get_preferred_alternatives (insn);
483 for (i = 0; i < recog_data.n_operands; i++)
485 machine_mode mode;
486 int regno;
487 const char *p;
489 op_alt_regno[i] = XALLOCAVEC (int, recog_data.n_alternatives);
490 for (j = 0; j < recog_data.n_alternatives; j++)
491 op_alt_regno[i][j] = -1;
493 p = constraints[i] = recog_data.constraints[i];
494 mode = recog_data.operand_mode[i];
496 /* Add the reject values for each alternative given by the constraints
497 for this operand. */
498 j = 0;
499 while (*p != '\0')
501 char c = *p++;
502 if (c == ',')
503 j++;
504 else if (c == '?')
505 alternative_reject[j] += 3;
506 else if (c == '!')
507 alternative_reject[j] += 300;
510 /* We won't change operands which are already registers. We
511 also don't want to modify output operands. */
512 regno = true_regnum (recog_data.operand[i]);
513 if (regno >= 0
514 || constraints[i][0] == '='
515 || constraints[i][0] == '+')
516 continue;
518 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
520 enum reg_class rclass = NO_REGS;
522 if (! TEST_HARD_REG_BIT (equiv_regs[i], regno))
523 continue;
525 set_mode_and_regno (testreg, mode, regno);
527 /* We found a register equal to this operand. Now look for all
528 alternatives that can accept this register and have not been
529 assigned a register they can use yet. */
530 j = 0;
531 p = constraints[i];
532 for (;;)
534 char c = *p;
536 switch (c)
538 case 'g':
539 rclass = reg_class_subunion[rclass][GENERAL_REGS];
540 break;
542 default:
543 rclass
544 = (reg_class_subunion
545 [rclass]
546 [reg_class_for_constraint (lookup_constraint (p))]);
547 break;
549 case ',': case '\0':
550 /* See if REGNO fits this alternative, and set it up as the
551 replacement register if we don't have one for this
552 alternative yet and the operand being replaced is not
553 a cheap CONST_INT. */
554 if (op_alt_regno[i][j] == -1
555 && TEST_BIT (preferred, j)
556 && reg_fits_class_p (testreg, rclass, 0, mode)
557 && (!CONST_INT_P (recog_data.operand[i])
558 || (set_src_cost (recog_data.operand[i], mode,
559 optimize_bb_for_speed_p
560 (BLOCK_FOR_INSN (insn)))
561 > set_src_cost (testreg, mode,
562 optimize_bb_for_speed_p
563 (BLOCK_FOR_INSN (insn))))))
565 alternative_nregs[j]++;
566 op_alt_regno[i][j] = regno;
568 j++;
569 rclass = NO_REGS;
570 break;
572 p += CONSTRAINT_LEN (c, p);
574 if (c == '\0')
575 break;
580 /* Record all alternatives which are better or equal to the currently
581 matching one in the alternative_order array. */
582 for (i = j = 0; i < recog_data.n_alternatives; i++)
583 if (alternative_reject[i] <= alternative_reject[which_alternative])
584 alternative_order[j++] = i;
585 recog_data.n_alternatives = j;
587 /* Sort it. Given a small number of alternatives, a dumb algorithm
588 won't hurt too much. */
589 for (i = 0; i < recog_data.n_alternatives - 1; i++)
591 int best = i;
592 int best_reject = alternative_reject[alternative_order[i]];
593 int best_nregs = alternative_nregs[alternative_order[i]];
595 for (j = i + 1; j < recog_data.n_alternatives; j++)
597 int this_reject = alternative_reject[alternative_order[j]];
598 int this_nregs = alternative_nregs[alternative_order[j]];
600 if (this_reject < best_reject
601 || (this_reject == best_reject && this_nregs > best_nregs))
603 best = j;
604 best_reject = this_reject;
605 best_nregs = this_nregs;
609 std::swap (alternative_order[best], alternative_order[i]);
612 /* Substitute the operands as determined by op_alt_regno for the best
613 alternative. */
614 j = alternative_order[0];
616 for (i = 0; i < recog_data.n_operands; i++)
618 machine_mode mode = recog_data.operand_mode[i];
619 if (op_alt_regno[i][j] == -1)
620 continue;
622 validate_change (insn, recog_data.operand_loc[i],
623 gen_rtx_REG (mode, op_alt_regno[i][j]), 1);
626 for (i = recog_data.n_dups - 1; i >= 0; i--)
628 int op = recog_data.dup_num[i];
629 machine_mode mode = recog_data.operand_mode[op];
631 if (op_alt_regno[op][j] == -1)
632 continue;
634 validate_change (insn, recog_data.dup_loc[i],
635 gen_rtx_REG (mode, op_alt_regno[op][j]), 1);
638 return apply_change_group ();
641 /* If reload couldn't use reg+reg+offset addressing, try to use reg+reg
642 addressing now.
643 This code might also be useful when reload gave up on reg+reg addressing
644 because of clashes between the return register and INDEX_REG_CLASS. */
646 /* The maximum number of uses of a register we can keep track of to
647 replace them with reg+reg addressing. */
648 #define RELOAD_COMBINE_MAX_USES 16
650 /* Describes a recorded use of a register. */
651 struct reg_use
653 /* The insn where a register has been used. */
654 rtx_insn *insn;
655 /* Points to the memory reference enclosing the use, if any, NULL_RTX
656 otherwise. */
657 rtx containing_mem;
658 /* Location of the register within INSN. */
659 rtx *usep;
660 /* The reverse uid of the insn. */
661 int ruid;
664 /* If the register is used in some unknown fashion, USE_INDEX is negative.
665 If it is dead, USE_INDEX is RELOAD_COMBINE_MAX_USES, and STORE_RUID
666 indicates where it is first set or clobbered.
667 Otherwise, USE_INDEX is the index of the last encountered use of the
668 register (which is first among these we have seen since we scan backwards).
669 USE_RUID indicates the first encountered, i.e. last, of these uses.
670 If ALL_OFFSETS_MATCH is true, all encountered uses were inside a PLUS
671 with a constant offset; OFFSET contains this constant in that case.
672 STORE_RUID is always meaningful if we only want to use a value in a
673 register in a different place: it denotes the next insn in the insn
674 stream (i.e. the last encountered) that sets or clobbers the register.
675 REAL_STORE_RUID is similar, but clobbers are ignored when updating it. */
676 static struct
678 struct reg_use reg_use[RELOAD_COMBINE_MAX_USES];
679 rtx offset;
680 int use_index;
681 int store_ruid;
682 int real_store_ruid;
683 int use_ruid;
684 bool all_offsets_match;
685 } reg_state[FIRST_PSEUDO_REGISTER];
687 /* Reverse linear uid. This is increased in reload_combine while scanning
688 the instructions from last to first. It is used to set last_label_ruid
689 and the store_ruid / use_ruid fields in reg_state. */
690 static int reload_combine_ruid;
692 /* The RUID of the last label we encountered in reload_combine. */
693 static int last_label_ruid;
695 /* The RUID of the last jump we encountered in reload_combine. */
696 static int last_jump_ruid;
698 /* The register numbers of the first and last index register. A value of
699 -1 in LAST_INDEX_REG indicates that we've previously computed these
700 values and found no suitable index registers. */
701 static int first_index_reg = -1;
702 static int last_index_reg;
704 #define LABEL_LIVE(LABEL) \
705 (label_live[CODE_LABEL_NUMBER (LABEL) - min_labelno])
707 /* Subroutine of reload_combine_split_ruids, called to fix up a single
708 ruid pointed to by *PRUID if it is higher than SPLIT_RUID. */
710 static inline void
711 reload_combine_split_one_ruid (int *pruid, int split_ruid)
713 if (*pruid > split_ruid)
714 (*pruid)++;
717 /* Called when we insert a new insn in a position we've already passed in
718 the scan. Examine all our state, increasing all ruids that are higher
719 than SPLIT_RUID by one in order to make room for a new insn. */
721 static void
722 reload_combine_split_ruids (int split_ruid)
724 unsigned i;
726 reload_combine_split_one_ruid (&reload_combine_ruid, split_ruid);
727 reload_combine_split_one_ruid (&last_label_ruid, split_ruid);
728 reload_combine_split_one_ruid (&last_jump_ruid, split_ruid);
730 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
732 int j, idx = reg_state[i].use_index;
733 reload_combine_split_one_ruid (&reg_state[i].use_ruid, split_ruid);
734 reload_combine_split_one_ruid (&reg_state[i].store_ruid, split_ruid);
735 reload_combine_split_one_ruid (&reg_state[i].real_store_ruid,
736 split_ruid);
737 if (idx < 0)
738 continue;
739 for (j = idx; j < RELOAD_COMBINE_MAX_USES; j++)
741 reload_combine_split_one_ruid (&reg_state[i].reg_use[j].ruid,
742 split_ruid);
747 /* Called when we are about to rescan a previously encountered insn with
748 reload_combine_note_use after modifying some part of it. This clears all
749 information about uses in that particular insn. */
751 static void
752 reload_combine_purge_insn_uses (rtx_insn *insn)
754 unsigned i;
756 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
758 int j, k, idx = reg_state[i].use_index;
759 if (idx < 0)
760 continue;
761 j = k = RELOAD_COMBINE_MAX_USES;
762 while (j-- > idx)
764 if (reg_state[i].reg_use[j].insn != insn)
766 k--;
767 if (k != j)
768 reg_state[i].reg_use[k] = reg_state[i].reg_use[j];
771 reg_state[i].use_index = k;
775 /* Called when we need to forget about all uses of REGNO after an insn
776 which is identified by RUID. */
778 static void
779 reload_combine_purge_reg_uses_after_ruid (unsigned regno, int ruid)
781 int j, k, idx = reg_state[regno].use_index;
782 if (idx < 0)
783 return;
784 j = k = RELOAD_COMBINE_MAX_USES;
785 while (j-- > idx)
787 if (reg_state[regno].reg_use[j].ruid >= ruid)
789 k--;
790 if (k != j)
791 reg_state[regno].reg_use[k] = reg_state[regno].reg_use[j];
794 reg_state[regno].use_index = k;
797 /* Find the use of REGNO with the ruid that is highest among those
798 lower than RUID_LIMIT, and return it if it is the only use of this
799 reg in the insn. Return NULL otherwise. */
801 static struct reg_use *
802 reload_combine_closest_single_use (unsigned regno, int ruid_limit)
804 int i, best_ruid = 0;
805 int use_idx = reg_state[regno].use_index;
806 struct reg_use *retval;
808 if (use_idx < 0)
809 return NULL;
810 retval = NULL;
811 for (i = use_idx; i < RELOAD_COMBINE_MAX_USES; i++)
813 struct reg_use *use = reg_state[regno].reg_use + i;
814 int this_ruid = use->ruid;
815 if (this_ruid >= ruid_limit)
816 continue;
817 if (this_ruid > best_ruid)
819 best_ruid = this_ruid;
820 retval = use;
822 else if (this_ruid == best_ruid)
823 retval = NULL;
825 if (last_label_ruid >= best_ruid)
826 return NULL;
827 return retval;
830 /* After we've moved an add insn, fix up any debug insns that occur
831 between the old location of the add and the new location. REG is
832 the destination register of the add insn; REPLACEMENT is the
833 SET_SRC of the add. FROM and TO specify the range in which we
834 should make this change on debug insns. */
836 static void
837 fixup_debug_insns (rtx reg, rtx replacement, rtx_insn *from, rtx_insn *to)
839 rtx_insn *insn;
840 for (insn = from; insn != to; insn = NEXT_INSN (insn))
842 rtx t;
844 if (!DEBUG_INSN_P (insn))
845 continue;
847 t = INSN_VAR_LOCATION_LOC (insn);
848 t = simplify_replace_rtx (t, reg, replacement);
849 validate_change (insn, &INSN_VAR_LOCATION_LOC (insn), t, 0);
853 /* Subroutine of reload_combine_recognize_const_pattern. Try to replace REG
854 with SRC in the insn described by USE, taking costs into account. Return
855 true if we made the replacement. */
857 static bool
858 try_replace_in_use (struct reg_use *use, rtx reg, rtx src)
860 rtx_insn *use_insn = use->insn;
861 rtx mem = use->containing_mem;
862 bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (use_insn));
864 if (mem != NULL_RTX)
866 addr_space_t as = MEM_ADDR_SPACE (mem);
867 rtx oldaddr = XEXP (mem, 0);
868 rtx newaddr = NULL_RTX;
869 int old_cost = address_cost (oldaddr, GET_MODE (mem), as, speed);
870 int new_cost;
872 newaddr = simplify_replace_rtx (oldaddr, reg, src);
873 if (memory_address_addr_space_p (GET_MODE (mem), newaddr, as))
875 XEXP (mem, 0) = newaddr;
876 new_cost = address_cost (newaddr, GET_MODE (mem), as, speed);
877 XEXP (mem, 0) = oldaddr;
878 if (new_cost <= old_cost
879 && validate_change (use_insn,
880 &XEXP (mem, 0), newaddr, 0))
881 return true;
884 else
886 rtx new_set = single_set (use_insn);
887 if (new_set
888 && REG_P (SET_DEST (new_set))
889 && GET_CODE (SET_SRC (new_set)) == PLUS
890 && REG_P (XEXP (SET_SRC (new_set), 0))
891 && CONSTANT_P (XEXP (SET_SRC (new_set), 1)))
893 rtx new_src;
894 machine_mode mode = GET_MODE (SET_DEST (new_set));
895 int old_cost = set_src_cost (SET_SRC (new_set), mode, speed);
897 gcc_assert (rtx_equal_p (XEXP (SET_SRC (new_set), 0), reg));
898 new_src = simplify_replace_rtx (SET_SRC (new_set), reg, src);
900 if (set_src_cost (new_src, mode, speed) <= old_cost
901 && validate_change (use_insn, &SET_SRC (new_set),
902 new_src, 0))
903 return true;
906 return false;
909 /* Called by reload_combine when scanning INSN. This function tries to detect
910 patterns where a constant is added to a register, and the result is used
911 in an address.
912 Return true if no further processing is needed on INSN; false if it wasn't
913 recognized and should be handled normally. */
915 static bool
916 reload_combine_recognize_const_pattern (rtx_insn *insn)
918 int from_ruid = reload_combine_ruid;
919 rtx set, pat, reg, src, addreg;
920 unsigned int regno;
921 struct reg_use *use;
922 bool must_move_add;
923 rtx_insn *add_moved_after_insn = NULL;
924 int add_moved_after_ruid = 0;
925 int clobbered_regno = -1;
927 set = single_set (insn);
928 if (set == NULL_RTX)
929 return false;
931 reg = SET_DEST (set);
932 src = SET_SRC (set);
933 if (!REG_P (reg)
934 || REG_NREGS (reg) != 1
935 || GET_MODE (reg) != Pmode
936 || reg == stack_pointer_rtx)
937 return false;
939 regno = REGNO (reg);
941 /* We look for a REG1 = REG2 + CONSTANT insn, followed by either
942 uses of REG1 inside an address, or inside another add insn. If
943 possible and profitable, merge the addition into subsequent
944 uses. */
945 if (GET_CODE (src) != PLUS
946 || !REG_P (XEXP (src, 0))
947 || !CONSTANT_P (XEXP (src, 1)))
948 return false;
950 addreg = XEXP (src, 0);
951 must_move_add = rtx_equal_p (reg, addreg);
953 pat = PATTERN (insn);
954 if (must_move_add && set != pat)
956 /* We have to be careful when moving the add; apart from the
957 single_set there may also be clobbers. Recognize one special
958 case, that of one clobber alongside the set (likely a clobber
959 of the CC register). */
960 gcc_assert (GET_CODE (PATTERN (insn)) == PARALLEL);
961 if (XVECLEN (pat, 0) != 2 || XVECEXP (pat, 0, 0) != set
962 || GET_CODE (XVECEXP (pat, 0, 1)) != CLOBBER
963 || !REG_P (XEXP (XVECEXP (pat, 0, 1), 0)))
964 return false;
965 clobbered_regno = REGNO (XEXP (XVECEXP (pat, 0, 1), 0));
970 use = reload_combine_closest_single_use (regno, from_ruid);
972 if (use)
973 /* Start the search for the next use from here. */
974 from_ruid = use->ruid;
976 if (use && GET_MODE (*use->usep) == Pmode)
978 bool delete_add = false;
979 rtx_insn *use_insn = use->insn;
980 int use_ruid = use->ruid;
982 /* Avoid moving the add insn past a jump. */
983 if (must_move_add && use_ruid <= last_jump_ruid)
984 break;
986 /* If the add clobbers another hard reg in parallel, don't move
987 it past a real set of this hard reg. */
988 if (must_move_add && clobbered_regno >= 0
989 && reg_state[clobbered_regno].real_store_ruid >= use_ruid)
990 break;
992 /* Do not separate cc0 setter and cc0 user on HAVE_cc0 targets. */
993 if (HAVE_cc0 && must_move_add && sets_cc0_p (PATTERN (use_insn)))
994 break;
996 gcc_assert (reg_state[regno].store_ruid <= use_ruid);
997 /* Avoid moving a use of ADDREG past a point where it is stored. */
998 if (reg_state[REGNO (addreg)].store_ruid > use_ruid)
999 break;
1001 /* We also must not move the addition past an insn that sets
1002 the same register, unless we can combine two add insns. */
1003 if (must_move_add && reg_state[regno].store_ruid == use_ruid)
1005 if (use->containing_mem == NULL_RTX)
1006 delete_add = true;
1007 else
1008 break;
1011 if (try_replace_in_use (use, reg, src))
1013 reload_combine_purge_insn_uses (use_insn);
1014 reload_combine_note_use (&PATTERN (use_insn), use_insn,
1015 use_ruid, NULL_RTX);
1017 if (delete_add)
1019 fixup_debug_insns (reg, src, insn, use_insn);
1020 delete_insn (insn);
1021 return true;
1023 if (must_move_add)
1025 add_moved_after_insn = use_insn;
1026 add_moved_after_ruid = use_ruid;
1028 continue;
1031 /* If we get here, we couldn't handle this use. */
1032 if (must_move_add)
1033 break;
1035 while (use);
1037 if (!must_move_add || add_moved_after_insn == NULL_RTX)
1038 /* Process the add normally. */
1039 return false;
1041 fixup_debug_insns (reg, src, insn, add_moved_after_insn);
1043 reorder_insns (insn, insn, add_moved_after_insn);
1044 reload_combine_purge_reg_uses_after_ruid (regno, add_moved_after_ruid);
1045 reload_combine_split_ruids (add_moved_after_ruid - 1);
1046 reload_combine_note_use (&PATTERN (insn), insn,
1047 add_moved_after_ruid, NULL_RTX);
1048 reg_state[regno].store_ruid = add_moved_after_ruid;
1050 return true;
1053 /* Called by reload_combine when scanning INSN. Try to detect a pattern we
1054 can handle and improve. Return true if no further processing is needed on
1055 INSN; false if it wasn't recognized and should be handled normally. */
1057 static bool
1058 reload_combine_recognize_pattern (rtx_insn *insn)
1060 rtx set, reg, src;
1062 set = single_set (insn);
1063 if (set == NULL_RTX)
1064 return false;
1066 reg = SET_DEST (set);
1067 src = SET_SRC (set);
1068 if (!REG_P (reg) || REG_NREGS (reg) != 1)
1069 return false;
1071 unsigned int regno = REGNO (reg);
1072 machine_mode mode = GET_MODE (reg);
1074 if (reg_state[regno].use_index < 0
1075 || reg_state[regno].use_index >= RELOAD_COMBINE_MAX_USES)
1076 return false;
1078 for (int i = reg_state[regno].use_index;
1079 i < RELOAD_COMBINE_MAX_USES; i++)
1081 struct reg_use *use = reg_state[regno].reg_use + i;
1082 if (GET_MODE (*use->usep) != mode)
1083 return false;
1086 /* Look for (set (REGX) (CONST_INT))
1087 (set (REGX) (PLUS (REGX) (REGY)))
1089 ... (MEM (REGX)) ...
1090 and convert it to
1091 (set (REGZ) (CONST_INT))
1093 ... (MEM (PLUS (REGZ) (REGY)))... .
1095 First, check that we have (set (REGX) (PLUS (REGX) (REGY)))
1096 and that we know all uses of REGX before it dies.
1097 Also, explicitly check that REGX != REGY; our life information
1098 does not yet show whether REGY changes in this insn. */
1100 if (GET_CODE (src) == PLUS
1101 && reg_state[regno].all_offsets_match
1102 && last_index_reg != -1
1103 && REG_P (XEXP (src, 1))
1104 && rtx_equal_p (XEXP (src, 0), reg)
1105 && !rtx_equal_p (XEXP (src, 1), reg)
1106 && last_label_ruid < reg_state[regno].use_ruid)
1108 rtx base = XEXP (src, 1);
1109 rtx_insn *prev = prev_nonnote_nondebug_insn (insn);
1110 rtx prev_set = prev ? single_set (prev) : NULL_RTX;
1111 rtx index_reg = NULL_RTX;
1112 rtx reg_sum = NULL_RTX;
1113 int i;
1115 /* Now we need to set INDEX_REG to an index register (denoted as
1116 REGZ in the illustration above) and REG_SUM to the expression
1117 register+register that we want to use to substitute uses of REG
1118 (typically in MEMs) with. First check REG and BASE for being
1119 index registers; we can use them even if they are not dead. */
1120 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], regno)
1121 || TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS],
1122 REGNO (base)))
1124 index_reg = reg;
1125 reg_sum = src;
1127 else
1129 /* Otherwise, look for a free index register. Since we have
1130 checked above that neither REG nor BASE are index registers,
1131 if we find anything at all, it will be different from these
1132 two registers. */
1133 for (i = first_index_reg; i <= last_index_reg; i++)
1135 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], i)
1136 && reg_state[i].use_index == RELOAD_COMBINE_MAX_USES
1137 && reg_state[i].store_ruid <= reg_state[regno].use_ruid
1138 && (call_used_regs[i] || df_regs_ever_live_p (i))
1139 && (!frame_pointer_needed || i != HARD_FRAME_POINTER_REGNUM)
1140 && !fixed_regs[i] && !global_regs[i]
1141 && hard_regno_nregs[i][GET_MODE (reg)] == 1
1142 && targetm.hard_regno_scratch_ok (i))
1144 index_reg = gen_rtx_REG (GET_MODE (reg), i);
1145 reg_sum = gen_rtx_PLUS (GET_MODE (reg), index_reg, base);
1146 break;
1151 /* Check that PREV_SET is indeed (set (REGX) (CONST_INT)) and that
1152 (REGY), i.e. BASE, is not clobbered before the last use we'll
1153 create. */
1154 if (reg_sum
1155 && prev_set
1156 && CONST_INT_P (SET_SRC (prev_set))
1157 && rtx_equal_p (SET_DEST (prev_set), reg)
1158 && (reg_state[REGNO (base)].store_ruid
1159 <= reg_state[regno].use_ruid))
1161 /* Change destination register and, if necessary, the constant
1162 value in PREV, the constant loading instruction. */
1163 validate_change (prev, &SET_DEST (prev_set), index_reg, 1);
1164 if (reg_state[regno].offset != const0_rtx)
1165 validate_change (prev,
1166 &SET_SRC (prev_set),
1167 GEN_INT (INTVAL (SET_SRC (prev_set))
1168 + INTVAL (reg_state[regno].offset)),
1171 /* Now for every use of REG that we have recorded, replace REG
1172 with REG_SUM. */
1173 for (i = reg_state[regno].use_index;
1174 i < RELOAD_COMBINE_MAX_USES; i++)
1175 validate_unshare_change (reg_state[regno].reg_use[i].insn,
1176 reg_state[regno].reg_use[i].usep,
1177 /* Each change must have its own
1178 replacement. */
1179 reg_sum, 1);
1181 if (apply_change_group ())
1183 struct reg_use *lowest_ruid = NULL;
1185 /* For every new use of REG_SUM, we have to record the use
1186 of BASE therein, i.e. operand 1. */
1187 for (i = reg_state[regno].use_index;
1188 i < RELOAD_COMBINE_MAX_USES; i++)
1190 struct reg_use *use = reg_state[regno].reg_use + i;
1191 reload_combine_note_use (&XEXP (*use->usep, 1), use->insn,
1192 use->ruid, use->containing_mem);
1193 if (lowest_ruid == NULL || use->ruid < lowest_ruid->ruid)
1194 lowest_ruid = use;
1197 fixup_debug_insns (reg, reg_sum, insn, lowest_ruid->insn);
1199 /* Delete the reg-reg addition. */
1200 delete_insn (insn);
1202 if (reg_state[regno].offset != const0_rtx)
1203 /* Previous REG_EQUIV / REG_EQUAL notes for PREV
1204 are now invalid. */
1205 remove_reg_equal_equiv_notes (prev);
1207 reg_state[regno].use_index = RELOAD_COMBINE_MAX_USES;
1208 return true;
1212 return false;
1215 static void
1216 reload_combine (void)
1218 rtx_insn *insn, *prev;
1219 basic_block bb;
1220 unsigned int r;
1221 int min_labelno, n_labels;
1222 HARD_REG_SET ever_live_at_start, *label_live;
1224 /* To avoid wasting too much time later searching for an index register,
1225 determine the minimum and maximum index register numbers. */
1226 if (INDEX_REG_CLASS == NO_REGS)
1227 last_index_reg = -1;
1228 else if (first_index_reg == -1 && last_index_reg == 0)
1230 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
1231 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], r))
1233 if (first_index_reg == -1)
1234 first_index_reg = r;
1236 last_index_reg = r;
1239 /* If no index register is available, we can quit now. Set LAST_INDEX_REG
1240 to -1 so we'll know to quit early the next time we get here. */
1241 if (first_index_reg == -1)
1243 last_index_reg = -1;
1244 return;
1248 /* Set up LABEL_LIVE and EVER_LIVE_AT_START. The register lifetime
1249 information is a bit fuzzy immediately after reload, but it's
1250 still good enough to determine which registers are live at a jump
1251 destination. */
1252 min_labelno = get_first_label_num ();
1253 n_labels = max_label_num () - min_labelno;
1254 label_live = XNEWVEC (HARD_REG_SET, n_labels);
1255 CLEAR_HARD_REG_SET (ever_live_at_start);
1257 FOR_EACH_BB_REVERSE_FN (bb, cfun)
1259 insn = BB_HEAD (bb);
1260 if (LABEL_P (insn))
1262 HARD_REG_SET live;
1263 bitmap live_in = df_get_live_in (bb);
1265 REG_SET_TO_HARD_REG_SET (live, live_in);
1266 compute_use_by_pseudos (&live, live_in);
1267 COPY_HARD_REG_SET (LABEL_LIVE (insn), live);
1268 IOR_HARD_REG_SET (ever_live_at_start, live);
1272 /* Initialize last_label_ruid, reload_combine_ruid and reg_state. */
1273 last_label_ruid = last_jump_ruid = reload_combine_ruid = 0;
1274 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
1276 reg_state[r].store_ruid = 0;
1277 reg_state[r].real_store_ruid = 0;
1278 if (fixed_regs[r])
1279 reg_state[r].use_index = -1;
1280 else
1281 reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
1284 for (insn = get_last_insn (); insn; insn = prev)
1286 bool control_flow_insn;
1287 rtx note;
1289 prev = PREV_INSN (insn);
1291 /* We cannot do our optimization across labels. Invalidating all the use
1292 information we have would be costly, so we just note where the label
1293 is and then later disable any optimization that would cross it. */
1294 if (LABEL_P (insn))
1295 last_label_ruid = reload_combine_ruid;
1296 else if (BARRIER_P (insn))
1298 /* Crossing a barrier resets all the use information. */
1299 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
1300 if (! fixed_regs[r])
1301 reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
1303 else if (INSN_P (insn) && volatile_insn_p (PATTERN (insn)))
1304 /* Optimizations across insns being marked as volatile must be
1305 prevented. All the usage information is invalidated
1306 here. */
1307 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
1308 if (! fixed_regs[r]
1309 && reg_state[r].use_index != RELOAD_COMBINE_MAX_USES)
1310 reg_state[r].use_index = -1;
1312 if (! NONDEBUG_INSN_P (insn))
1313 continue;
1315 reload_combine_ruid++;
1317 control_flow_insn = control_flow_insn_p (insn);
1318 if (control_flow_insn)
1319 last_jump_ruid = reload_combine_ruid;
1321 if (reload_combine_recognize_const_pattern (insn)
1322 || reload_combine_recognize_pattern (insn))
1323 continue;
1325 note_stores (PATTERN (insn), reload_combine_note_store, NULL);
1327 if (CALL_P (insn))
1329 rtx link;
1330 HARD_REG_SET used_regs;
1332 get_call_reg_set_usage (insn, &used_regs, call_used_reg_set);
1334 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
1335 if (TEST_HARD_REG_BIT (used_regs, r))
1337 reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
1338 reg_state[r].store_ruid = reload_combine_ruid;
1341 for (link = CALL_INSN_FUNCTION_USAGE (insn); link;
1342 link = XEXP (link, 1))
1344 rtx setuse = XEXP (link, 0);
1345 rtx usage_rtx = XEXP (setuse, 0);
1346 if ((GET_CODE (setuse) == USE || GET_CODE (setuse) == CLOBBER)
1347 && REG_P (usage_rtx))
1349 unsigned int end_regno = END_REGNO (usage_rtx);
1350 for (unsigned int i = REGNO (usage_rtx); i < end_regno; ++i)
1351 if (GET_CODE (XEXP (link, 0)) == CLOBBER)
1353 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
1354 reg_state[i].store_ruid = reload_combine_ruid;
1356 else
1357 reg_state[i].use_index = -1;
1362 if (control_flow_insn && !ANY_RETURN_P (PATTERN (insn)))
1364 /* Non-spill registers might be used at the call destination in
1365 some unknown fashion, so we have to mark the unknown use. */
1366 HARD_REG_SET *live;
1368 if ((condjump_p (insn) || condjump_in_parallel_p (insn))
1369 && JUMP_LABEL (insn))
1371 if (ANY_RETURN_P (JUMP_LABEL (insn)))
1372 live = NULL;
1373 else
1374 live = &LABEL_LIVE (JUMP_LABEL (insn));
1376 else
1377 live = &ever_live_at_start;
1379 if (live)
1380 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
1381 if (TEST_HARD_REG_BIT (*live, r))
1382 reg_state[r].use_index = -1;
1385 reload_combine_note_use (&PATTERN (insn), insn, reload_combine_ruid,
1386 NULL_RTX);
1388 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1390 if (REG_NOTE_KIND (note) == REG_INC && REG_P (XEXP (note, 0)))
1392 int regno = REGNO (XEXP (note, 0));
1393 reg_state[regno].store_ruid = reload_combine_ruid;
1394 reg_state[regno].real_store_ruid = reload_combine_ruid;
1395 reg_state[regno].use_index = -1;
1400 free (label_live);
1403 /* Check if DST is a register or a subreg of a register; if it is,
1404 update store_ruid, real_store_ruid and use_index in the reg_state
1405 structure accordingly. Called via note_stores from reload_combine. */
1407 static void
1408 reload_combine_note_store (rtx dst, const_rtx set, void *data ATTRIBUTE_UNUSED)
1410 int regno = 0;
1411 int i;
1412 machine_mode mode = GET_MODE (dst);
1414 if (GET_CODE (dst) == SUBREG)
1416 regno = subreg_regno_offset (REGNO (SUBREG_REG (dst)),
1417 GET_MODE (SUBREG_REG (dst)),
1418 SUBREG_BYTE (dst),
1419 GET_MODE (dst));
1420 dst = SUBREG_REG (dst);
1423 /* Some targets do argument pushes without adding REG_INC notes. */
1425 if (MEM_P (dst))
1427 dst = XEXP (dst, 0);
1428 if (GET_CODE (dst) == PRE_INC || GET_CODE (dst) == POST_INC
1429 || GET_CODE (dst) == PRE_DEC || GET_CODE (dst) == POST_DEC
1430 || GET_CODE (dst) == PRE_MODIFY || GET_CODE (dst) == POST_MODIFY)
1432 unsigned int end_regno = END_REGNO (XEXP (dst, 0));
1433 for (unsigned int i = REGNO (XEXP (dst, 0)); i < end_regno; ++i)
1435 /* We could probably do better, but for now mark the register
1436 as used in an unknown fashion and set/clobbered at this
1437 insn. */
1438 reg_state[i].use_index = -1;
1439 reg_state[i].store_ruid = reload_combine_ruid;
1440 reg_state[i].real_store_ruid = reload_combine_ruid;
1443 else
1444 return;
1447 if (!REG_P (dst))
1448 return;
1449 regno += REGNO (dst);
1451 /* note_stores might have stripped a STRICT_LOW_PART, so we have to be
1452 careful with registers / register parts that are not full words.
1453 Similarly for ZERO_EXTRACT. */
1454 if (GET_CODE (SET_DEST (set)) == ZERO_EXTRACT
1455 || GET_CODE (SET_DEST (set)) == STRICT_LOW_PART)
1457 for (i = hard_regno_nregs[regno][mode] - 1 + regno; i >= regno; i--)
1459 reg_state[i].use_index = -1;
1460 reg_state[i].store_ruid = reload_combine_ruid;
1461 reg_state[i].real_store_ruid = reload_combine_ruid;
1464 else
1466 for (i = hard_regno_nregs[regno][mode] - 1 + regno; i >= regno; i--)
1468 reg_state[i].store_ruid = reload_combine_ruid;
1469 if (GET_CODE (set) == SET)
1470 reg_state[i].real_store_ruid = reload_combine_ruid;
1471 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
1476 /* XP points to a piece of rtl that has to be checked for any uses of
1477 registers.
1478 *XP is the pattern of INSN, or a part of it.
1479 Called from reload_combine, and recursively by itself. */
1480 static void
1481 reload_combine_note_use (rtx *xp, rtx_insn *insn, int ruid, rtx containing_mem)
1483 rtx x = *xp;
1484 enum rtx_code code = x->code;
1485 const char *fmt;
1486 int i, j;
1487 rtx offset = const0_rtx; /* For the REG case below. */
1489 switch (code)
1491 case SET:
1492 if (REG_P (SET_DEST (x)))
1494 reload_combine_note_use (&SET_SRC (x), insn, ruid, NULL_RTX);
1495 return;
1497 break;
1499 case USE:
1500 /* If this is the USE of a return value, we can't change it. */
1501 if (REG_P (XEXP (x, 0)) && REG_FUNCTION_VALUE_P (XEXP (x, 0)))
1503 /* Mark the return register as used in an unknown fashion. */
1504 rtx reg = XEXP (x, 0);
1505 unsigned int end_regno = END_REGNO (reg);
1506 for (unsigned int regno = REGNO (reg); regno < end_regno; ++regno)
1507 reg_state[regno].use_index = -1;
1508 return;
1510 break;
1512 case CLOBBER:
1513 if (REG_P (SET_DEST (x)))
1515 /* No spurious CLOBBERs of pseudo registers may remain. */
1516 gcc_assert (REGNO (SET_DEST (x)) < FIRST_PSEUDO_REGISTER);
1517 return;
1519 break;
1521 case PLUS:
1522 /* We are interested in (plus (reg) (const_int)) . */
1523 if (!REG_P (XEXP (x, 0))
1524 || !CONST_INT_P (XEXP (x, 1)))
1525 break;
1526 offset = XEXP (x, 1);
1527 x = XEXP (x, 0);
1528 /* Fall through. */
1529 case REG:
1531 int regno = REGNO (x);
1532 int use_index;
1533 int nregs;
1535 /* No spurious USEs of pseudo registers may remain. */
1536 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
1538 nregs = REG_NREGS (x);
1540 /* We can't substitute into multi-hard-reg uses. */
1541 if (nregs > 1)
1543 while (--nregs >= 0)
1544 reg_state[regno + nregs].use_index = -1;
1545 return;
1548 /* We may be called to update uses in previously seen insns.
1549 Don't add uses beyond the last store we saw. */
1550 if (ruid < reg_state[regno].store_ruid)
1551 return;
1553 /* If this register is already used in some unknown fashion, we
1554 can't do anything.
1555 If we decrement the index from zero to -1, we can't store more
1556 uses, so this register becomes used in an unknown fashion. */
1557 use_index = --reg_state[regno].use_index;
1558 if (use_index < 0)
1559 return;
1561 if (use_index == RELOAD_COMBINE_MAX_USES - 1)
1563 /* This is the first use of this register we have seen since we
1564 marked it as dead. */
1565 reg_state[regno].offset = offset;
1566 reg_state[regno].all_offsets_match = true;
1567 reg_state[regno].use_ruid = ruid;
1569 else
1571 if (reg_state[regno].use_ruid > ruid)
1572 reg_state[regno].use_ruid = ruid;
1574 if (! rtx_equal_p (offset, reg_state[regno].offset))
1575 reg_state[regno].all_offsets_match = false;
1578 reg_state[regno].reg_use[use_index].insn = insn;
1579 reg_state[regno].reg_use[use_index].ruid = ruid;
1580 reg_state[regno].reg_use[use_index].containing_mem = containing_mem;
1581 reg_state[regno].reg_use[use_index].usep = xp;
1582 return;
1585 case MEM:
1586 containing_mem = x;
1587 break;
1589 default:
1590 break;
1593 /* Recursively process the components of X. */
1594 fmt = GET_RTX_FORMAT (code);
1595 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1597 if (fmt[i] == 'e')
1598 reload_combine_note_use (&XEXP (x, i), insn, ruid, containing_mem);
1599 else if (fmt[i] == 'E')
1601 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1602 reload_combine_note_use (&XVECEXP (x, i, j), insn, ruid,
1603 containing_mem);
1608 /* See if we can reduce the cost of a constant by replacing a move
1609 with an add. We track situations in which a register is set to a
1610 constant or to a register plus a constant. */
1611 /* We cannot do our optimization across labels. Invalidating all the
1612 information about register contents we have would be costly, so we
1613 use move2add_last_label_luid to note where the label is and then
1614 later disable any optimization that would cross it.
1615 reg_offset[n] / reg_base_reg[n] / reg_symbol_ref[n] / reg_mode[n]
1616 are only valid if reg_set_luid[n] is greater than
1617 move2add_last_label_luid.
1618 For a set that established a new (potential) base register with
1619 non-constant value, we use move2add_luid from the place where the
1620 setting insn is encountered; registers based off that base then
1621 get the same reg_set_luid. Constants all get
1622 move2add_last_label_luid + 1 as their reg_set_luid. */
1623 static int reg_set_luid[FIRST_PSEUDO_REGISTER];
1625 /* If reg_base_reg[n] is negative, register n has been set to
1626 reg_offset[n] or reg_symbol_ref[n] + reg_offset[n] in mode reg_mode[n].
1627 If reg_base_reg[n] is non-negative, register n has been set to the
1628 sum of reg_offset[n] and the value of register reg_base_reg[n]
1629 before reg_set_luid[n], calculated in mode reg_mode[n] .
1630 For multi-hard-register registers, all but the first one are
1631 recorded as BLKmode in reg_mode. Setting reg_mode to VOIDmode
1632 marks it as invalid. */
1633 static HOST_WIDE_INT reg_offset[FIRST_PSEUDO_REGISTER];
1634 static int reg_base_reg[FIRST_PSEUDO_REGISTER];
1635 static rtx reg_symbol_ref[FIRST_PSEUDO_REGISTER];
1636 static machine_mode reg_mode[FIRST_PSEUDO_REGISTER];
1638 /* move2add_luid is linearly increased while scanning the instructions
1639 from first to last. It is used to set reg_set_luid in
1640 reload_cse_move2add and move2add_note_store. */
1641 static int move2add_luid;
1643 /* move2add_last_label_luid is set whenever a label is found. Labels
1644 invalidate all previously collected reg_offset data. */
1645 static int move2add_last_label_luid;
1647 /* ??? We don't know how zero / sign extension is handled, hence we
1648 can't go from a narrower to a wider mode. */
1649 #define MODES_OK_FOR_MOVE2ADD(OUTMODE, INMODE) \
1650 (GET_MODE_SIZE (OUTMODE) == GET_MODE_SIZE (INMODE) \
1651 || (GET_MODE_SIZE (OUTMODE) <= GET_MODE_SIZE (INMODE) \
1652 && TRULY_NOOP_TRUNCATION_MODES_P (OUTMODE, INMODE)))
1654 /* Record that REG is being set to a value with the mode of REG. */
1656 static void
1657 move2add_record_mode (rtx reg)
1659 int regno, nregs;
1660 machine_mode mode = GET_MODE (reg);
1662 if (GET_CODE (reg) == SUBREG)
1664 regno = subreg_regno (reg);
1665 nregs = subreg_nregs (reg);
1667 else if (REG_P (reg))
1669 regno = REGNO (reg);
1670 nregs = REG_NREGS (reg);
1672 else
1673 gcc_unreachable ();
1674 for (int i = nregs - 1; i > 0; i--)
1675 reg_mode[regno + i] = BLKmode;
1676 reg_mode[regno] = mode;
1679 /* Record that REG is being set to the sum of SYM and OFF. */
1681 static void
1682 move2add_record_sym_value (rtx reg, rtx sym, rtx off)
1684 int regno = REGNO (reg);
1686 move2add_record_mode (reg);
1687 reg_set_luid[regno] = move2add_luid;
1688 reg_base_reg[regno] = -1;
1689 reg_symbol_ref[regno] = sym;
1690 reg_offset[regno] = INTVAL (off);
1693 /* Check if REGNO contains a valid value in MODE. */
1695 static bool
1696 move2add_valid_value_p (int regno, machine_mode mode)
1698 if (reg_set_luid[regno] <= move2add_last_label_luid)
1699 return false;
1701 if (mode != reg_mode[regno])
1703 if (!MODES_OK_FOR_MOVE2ADD (mode, reg_mode[regno]))
1704 return false;
1705 /* The value loaded into regno in reg_mode[regno] is also valid in
1706 mode after truncation only if (REG:mode regno) is the lowpart of
1707 (REG:reg_mode[regno] regno). Now, for big endian, the starting
1708 regno of the lowpart might be different. */
1709 int s_off = subreg_lowpart_offset (mode, reg_mode[regno]);
1710 s_off = subreg_regno_offset (regno, reg_mode[regno], s_off, mode);
1711 if (s_off != 0)
1712 /* We could in principle adjust regno, check reg_mode[regno] to be
1713 BLKmode, and return s_off to the caller (vs. -1 for failure),
1714 but we currently have no callers that could make use of this
1715 information. */
1716 return false;
1719 for (int i = hard_regno_nregs[regno][mode] - 1; i > 0; i--)
1720 if (reg_mode[regno + i] != BLKmode)
1721 return false;
1722 return true;
1725 /* This function is called with INSN that sets REG to (SYM + OFF),
1726 while REG is known to already have value (SYM + offset).
1727 This function tries to change INSN into an add instruction
1728 (set (REG) (plus (REG) (OFF - offset))) using the known value.
1729 It also updates the information about REG's known value.
1730 Return true if we made a change. */
1732 static bool
1733 move2add_use_add2_insn (rtx reg, rtx sym, rtx off, rtx_insn *insn)
1735 rtx pat = PATTERN (insn);
1736 rtx src = SET_SRC (pat);
1737 int regno = REGNO (reg);
1738 rtx new_src = gen_int_mode (UINTVAL (off) - reg_offset[regno],
1739 GET_MODE (reg));
1740 bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn));
1741 bool changed = false;
1743 /* (set (reg) (plus (reg) (const_int 0))) is not canonical;
1744 use (set (reg) (reg)) instead.
1745 We don't delete this insn, nor do we convert it into a
1746 note, to avoid losing register notes or the return
1747 value flag. jump2 already knows how to get rid of
1748 no-op moves. */
1749 if (new_src == const0_rtx)
1751 /* If the constants are different, this is a
1752 truncation, that, if turned into (set (reg)
1753 (reg)), would be discarded. Maybe we should
1754 try a truncMN pattern? */
1755 if (INTVAL (off) == reg_offset [regno])
1756 changed = validate_change (insn, &SET_SRC (pat), reg, 0);
1758 else
1760 struct full_rtx_costs oldcst, newcst;
1761 rtx tem = gen_rtx_PLUS (GET_MODE (reg), reg, new_src);
1763 get_full_set_rtx_cost (pat, &oldcst);
1764 SET_SRC (pat) = tem;
1765 get_full_set_rtx_cost (pat, &newcst);
1766 SET_SRC (pat) = src;
1768 if (costs_lt_p (&newcst, &oldcst, speed)
1769 && have_add2_insn (reg, new_src))
1770 changed = validate_change (insn, &SET_SRC (pat), tem, 0);
1771 else if (sym == NULL_RTX && GET_MODE (reg) != BImode)
1773 machine_mode narrow_mode;
1774 for (narrow_mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1775 narrow_mode != VOIDmode
1776 && narrow_mode != GET_MODE (reg);
1777 narrow_mode = GET_MODE_WIDER_MODE (narrow_mode))
1779 if (have_insn_for (STRICT_LOW_PART, narrow_mode)
1780 && ((reg_offset[regno] & ~GET_MODE_MASK (narrow_mode))
1781 == (INTVAL (off) & ~GET_MODE_MASK (narrow_mode))))
1783 rtx narrow_reg = gen_lowpart_common (narrow_mode, reg);
1784 rtx narrow_src = gen_int_mode (INTVAL (off),
1785 narrow_mode);
1786 rtx new_set
1787 = gen_rtx_SET (gen_rtx_STRICT_LOW_PART (VOIDmode,
1788 narrow_reg),
1789 narrow_src);
1790 get_full_set_rtx_cost (new_set, &newcst);
1791 if (costs_lt_p (&newcst, &oldcst, speed))
1793 changed = validate_change (insn, &PATTERN (insn),
1794 new_set, 0);
1795 if (changed)
1796 break;
1802 move2add_record_sym_value (reg, sym, off);
1803 return changed;
1807 /* This function is called with INSN that sets REG to (SYM + OFF),
1808 but REG doesn't have known value (SYM + offset). This function
1809 tries to find another register which is known to already have
1810 value (SYM + offset) and change INSN into an add instruction
1811 (set (REG) (plus (the found register) (OFF - offset))) if such
1812 a register is found. It also updates the information about
1813 REG's known value.
1814 Return true iff we made a change. */
1816 static bool
1817 move2add_use_add3_insn (rtx reg, rtx sym, rtx off, rtx_insn *insn)
1819 rtx pat = PATTERN (insn);
1820 rtx src = SET_SRC (pat);
1821 int regno = REGNO (reg);
1822 int min_regno = 0;
1823 bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn));
1824 int i;
1825 bool changed = false;
1826 struct full_rtx_costs oldcst, newcst, mincst;
1827 rtx plus_expr;
1829 init_costs_to_max (&mincst);
1830 get_full_set_rtx_cost (pat, &oldcst);
1832 plus_expr = gen_rtx_PLUS (GET_MODE (reg), reg, const0_rtx);
1833 SET_SRC (pat) = plus_expr;
1835 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1836 if (move2add_valid_value_p (i, GET_MODE (reg))
1837 && reg_base_reg[i] < 0
1838 && reg_symbol_ref[i] != NULL_RTX
1839 && rtx_equal_p (sym, reg_symbol_ref[i]))
1841 rtx new_src = gen_int_mode (UINTVAL (off) - reg_offset[i],
1842 GET_MODE (reg));
1843 /* (set (reg) (plus (reg) (const_int 0))) is not canonical;
1844 use (set (reg) (reg)) instead.
1845 We don't delete this insn, nor do we convert it into a
1846 note, to avoid losing register notes or the return
1847 value flag. jump2 already knows how to get rid of
1848 no-op moves. */
1849 if (new_src == const0_rtx)
1851 init_costs_to_zero (&mincst);
1852 min_regno = i;
1853 break;
1855 else
1857 XEXP (plus_expr, 1) = new_src;
1858 get_full_set_rtx_cost (pat, &newcst);
1860 if (costs_lt_p (&newcst, &mincst, speed))
1862 mincst = newcst;
1863 min_regno = i;
1867 SET_SRC (pat) = src;
1869 if (costs_lt_p (&mincst, &oldcst, speed))
1871 rtx tem;
1873 tem = gen_rtx_REG (GET_MODE (reg), min_regno);
1874 if (i != min_regno)
1876 rtx new_src = gen_int_mode (UINTVAL (off) - reg_offset[min_regno],
1877 GET_MODE (reg));
1878 tem = gen_rtx_PLUS (GET_MODE (reg), tem, new_src);
1880 if (validate_change (insn, &SET_SRC (pat), tem, 0))
1881 changed = true;
1883 reg_set_luid[regno] = move2add_luid;
1884 move2add_record_sym_value (reg, sym, off);
1885 return changed;
1888 /* Convert move insns with constant inputs to additions if they are cheaper.
1889 Return true if any changes were made. */
1890 static bool
1891 reload_cse_move2add (rtx_insn *first)
1893 int i;
1894 rtx_insn *insn;
1895 bool changed = false;
1897 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1899 reg_set_luid[i] = 0;
1900 reg_offset[i] = 0;
1901 reg_base_reg[i] = 0;
1902 reg_symbol_ref[i] = NULL_RTX;
1903 reg_mode[i] = VOIDmode;
1906 move2add_last_label_luid = 0;
1907 move2add_luid = 2;
1908 for (insn = first; insn; insn = NEXT_INSN (insn), move2add_luid++)
1910 rtx pat, note;
1912 if (LABEL_P (insn))
1914 move2add_last_label_luid = move2add_luid;
1915 /* We're going to increment move2add_luid twice after a
1916 label, so that we can use move2add_last_label_luid + 1 as
1917 the luid for constants. */
1918 move2add_luid++;
1919 continue;
1921 if (! INSN_P (insn))
1922 continue;
1923 pat = PATTERN (insn);
1924 /* For simplicity, we only perform this optimization on
1925 straightforward SETs. */
1926 if (GET_CODE (pat) == SET
1927 && REG_P (SET_DEST (pat)))
1929 rtx reg = SET_DEST (pat);
1930 int regno = REGNO (reg);
1931 rtx src = SET_SRC (pat);
1933 /* Check if we have valid information on the contents of this
1934 register in the mode of REG. */
1935 if (move2add_valid_value_p (regno, GET_MODE (reg))
1936 && dbg_cnt (cse2_move2add))
1938 /* Try to transform (set (REGX) (CONST_INT A))
1940 (set (REGX) (CONST_INT B))
1942 (set (REGX) (CONST_INT A))
1944 (set (REGX) (plus (REGX) (CONST_INT B-A)))
1946 (set (REGX) (CONST_INT A))
1948 (set (STRICT_LOW_PART (REGX)) (CONST_INT B))
1951 if (CONST_INT_P (src)
1952 && reg_base_reg[regno] < 0
1953 && reg_symbol_ref[regno] == NULL_RTX)
1955 changed |= move2add_use_add2_insn (reg, NULL_RTX, src, insn);
1956 continue;
1959 /* Try to transform (set (REGX) (REGY))
1960 (set (REGX) (PLUS (REGX) (CONST_INT A)))
1962 (set (REGX) (REGY))
1963 (set (REGX) (PLUS (REGX) (CONST_INT B)))
1965 (set (REGX) (REGY))
1966 (set (REGX) (PLUS (REGX) (CONST_INT A)))
1968 (set (REGX) (plus (REGX) (CONST_INT B-A))) */
1969 else if (REG_P (src)
1970 && reg_set_luid[regno] == reg_set_luid[REGNO (src)]
1971 && reg_base_reg[regno] == reg_base_reg[REGNO (src)]
1972 && move2add_valid_value_p (REGNO (src), GET_MODE (reg)))
1974 rtx_insn *next = next_nonnote_nondebug_insn (insn);
1975 rtx set = NULL_RTX;
1976 if (next)
1977 set = single_set (next);
1978 if (set
1979 && SET_DEST (set) == reg
1980 && GET_CODE (SET_SRC (set)) == PLUS
1981 && XEXP (SET_SRC (set), 0) == reg
1982 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
1984 rtx src3 = XEXP (SET_SRC (set), 1);
1985 unsigned HOST_WIDE_INT added_offset = UINTVAL (src3);
1986 HOST_WIDE_INT base_offset = reg_offset[REGNO (src)];
1987 HOST_WIDE_INT regno_offset = reg_offset[regno];
1988 rtx new_src =
1989 gen_int_mode (added_offset
1990 + base_offset
1991 - regno_offset,
1992 GET_MODE (reg));
1993 bool success = false;
1994 bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn));
1996 if (new_src == const0_rtx)
1997 /* See above why we create (set (reg) (reg)) here. */
1998 success
1999 = validate_change (next, &SET_SRC (set), reg, 0);
2000 else
2002 rtx old_src = SET_SRC (set);
2003 struct full_rtx_costs oldcst, newcst;
2004 rtx tem = gen_rtx_PLUS (GET_MODE (reg), reg, new_src);
2006 get_full_set_rtx_cost (set, &oldcst);
2007 SET_SRC (set) = tem;
2008 get_full_set_src_cost (tem, GET_MODE (reg), &newcst);
2009 SET_SRC (set) = old_src;
2010 costs_add_n_insns (&oldcst, 1);
2012 if (costs_lt_p (&newcst, &oldcst, speed)
2013 && have_add2_insn (reg, new_src))
2015 rtx newpat = gen_rtx_SET (reg, tem);
2016 success
2017 = validate_change (next, &PATTERN (next),
2018 newpat, 0);
2021 if (success)
2022 delete_insn (insn);
2023 changed |= success;
2024 insn = next;
2025 move2add_record_mode (reg);
2026 reg_offset[regno]
2027 = trunc_int_for_mode (added_offset + base_offset,
2028 GET_MODE (reg));
2029 continue;
2034 /* Try to transform
2035 (set (REGX) (CONST (PLUS (SYMBOL_REF) (CONST_INT A))))
2037 (set (REGY) (CONST (PLUS (SYMBOL_REF) (CONST_INT B))))
2039 (set (REGX) (CONST (PLUS (SYMBOL_REF) (CONST_INT A))))
2041 (set (REGY) (CONST (PLUS (REGX) (CONST_INT B-A)))) */
2042 if ((GET_CODE (src) == SYMBOL_REF
2043 || (GET_CODE (src) == CONST
2044 && GET_CODE (XEXP (src, 0)) == PLUS
2045 && GET_CODE (XEXP (XEXP (src, 0), 0)) == SYMBOL_REF
2046 && CONST_INT_P (XEXP (XEXP (src, 0), 1))))
2047 && dbg_cnt (cse2_move2add))
2049 rtx sym, off;
2051 if (GET_CODE (src) == SYMBOL_REF)
2053 sym = src;
2054 off = const0_rtx;
2056 else
2058 sym = XEXP (XEXP (src, 0), 0);
2059 off = XEXP (XEXP (src, 0), 1);
2062 /* If the reg already contains the value which is sum of
2063 sym and some constant value, we can use an add2 insn. */
2064 if (move2add_valid_value_p (regno, GET_MODE (reg))
2065 && reg_base_reg[regno] < 0
2066 && reg_symbol_ref[regno] != NULL_RTX
2067 && rtx_equal_p (sym, reg_symbol_ref[regno]))
2068 changed |= move2add_use_add2_insn (reg, sym, off, insn);
2070 /* Otherwise, we have to find a register whose value is sum
2071 of sym and some constant value. */
2072 else
2073 changed |= move2add_use_add3_insn (reg, sym, off, insn);
2075 continue;
2079 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
2081 if (REG_NOTE_KIND (note) == REG_INC
2082 && REG_P (XEXP (note, 0)))
2084 /* Reset the information about this register. */
2085 int regno = REGNO (XEXP (note, 0));
2086 if (regno < FIRST_PSEUDO_REGISTER)
2088 move2add_record_mode (XEXP (note, 0));
2089 reg_mode[regno] = VOIDmode;
2093 note_stores (PATTERN (insn), move2add_note_store, insn);
2095 /* If INSN is a conditional branch, we try to extract an
2096 implicit set out of it. */
2097 if (any_condjump_p (insn))
2099 rtx cnd = fis_get_condition (insn);
2101 if (cnd != NULL_RTX
2102 && GET_CODE (cnd) == NE
2103 && REG_P (XEXP (cnd, 0))
2104 && !reg_set_p (XEXP (cnd, 0), insn)
2105 /* The following two checks, which are also in
2106 move2add_note_store, are intended to reduce the
2107 number of calls to gen_rtx_SET to avoid memory
2108 allocation if possible. */
2109 && SCALAR_INT_MODE_P (GET_MODE (XEXP (cnd, 0)))
2110 && REG_NREGS (XEXP (cnd, 0)) == 1
2111 && CONST_INT_P (XEXP (cnd, 1)))
2113 rtx implicit_set =
2114 gen_rtx_SET (XEXP (cnd, 0), XEXP (cnd, 1));
2115 move2add_note_store (SET_DEST (implicit_set), implicit_set, insn);
2119 /* If this is a CALL_INSN, all call used registers are stored with
2120 unknown values. */
2121 if (CALL_P (insn))
2123 rtx link;
2125 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
2127 if (call_used_regs[i])
2128 /* Reset the information about this register. */
2129 reg_mode[i] = VOIDmode;
2132 for (link = CALL_INSN_FUNCTION_USAGE (insn); link;
2133 link = XEXP (link, 1))
2135 rtx setuse = XEXP (link, 0);
2136 rtx usage_rtx = XEXP (setuse, 0);
2137 if (GET_CODE (setuse) == CLOBBER
2138 && REG_P (usage_rtx))
2140 unsigned int end_regno = END_REGNO (usage_rtx);
2141 for (unsigned int r = REGNO (usage_rtx); r < end_regno; ++r)
2142 /* Reset the information about this register. */
2143 reg_mode[r] = VOIDmode;
2148 return changed;
2151 /* SET is a SET or CLOBBER that sets DST. DATA is the insn which
2152 contains SET.
2153 Update reg_set_luid, reg_offset and reg_base_reg accordingly.
2154 Called from reload_cse_move2add via note_stores. */
2156 static void
2157 move2add_note_store (rtx dst, const_rtx set, void *data)
2159 rtx_insn *insn = (rtx_insn *) data;
2160 unsigned int regno = 0;
2161 machine_mode mode = GET_MODE (dst);
2163 /* Some targets do argument pushes without adding REG_INC notes. */
2165 if (MEM_P (dst))
2167 dst = XEXP (dst, 0);
2168 if (GET_CODE (dst) == PRE_INC || GET_CODE (dst) == POST_INC
2169 || GET_CODE (dst) == PRE_DEC || GET_CODE (dst) == POST_DEC)
2170 reg_mode[REGNO (XEXP (dst, 0))] = VOIDmode;
2171 return;
2174 if (GET_CODE (dst) == SUBREG)
2175 regno = subreg_regno (dst);
2176 else if (REG_P (dst))
2177 regno = REGNO (dst);
2178 else
2179 return;
2181 if (SCALAR_INT_MODE_P (mode)
2182 && GET_CODE (set) == SET)
2184 rtx note, sym = NULL_RTX;
2185 rtx off;
2187 note = find_reg_equal_equiv_note (insn);
2188 if (note && GET_CODE (XEXP (note, 0)) == SYMBOL_REF)
2190 sym = XEXP (note, 0);
2191 off = const0_rtx;
2193 else if (note && GET_CODE (XEXP (note, 0)) == CONST
2194 && GET_CODE (XEXP (XEXP (note, 0), 0)) == PLUS
2195 && GET_CODE (XEXP (XEXP (XEXP (note, 0), 0), 0)) == SYMBOL_REF
2196 && CONST_INT_P (XEXP (XEXP (XEXP (note, 0), 0), 1)))
2198 sym = XEXP (XEXP (XEXP (note, 0), 0), 0);
2199 off = XEXP (XEXP (XEXP (note, 0), 0), 1);
2202 if (sym != NULL_RTX)
2204 move2add_record_sym_value (dst, sym, off);
2205 return;
2209 if (SCALAR_INT_MODE_P (mode)
2210 && GET_CODE (set) == SET
2211 && GET_CODE (SET_DEST (set)) != ZERO_EXTRACT
2212 && GET_CODE (SET_DEST (set)) != STRICT_LOW_PART)
2214 rtx src = SET_SRC (set);
2215 rtx base_reg;
2216 unsigned HOST_WIDE_INT offset;
2217 int base_regno;
2219 switch (GET_CODE (src))
2221 case PLUS:
2222 if (REG_P (XEXP (src, 0)))
2224 base_reg = XEXP (src, 0);
2226 if (CONST_INT_P (XEXP (src, 1)))
2227 offset = UINTVAL (XEXP (src, 1));
2228 else if (REG_P (XEXP (src, 1))
2229 && move2add_valid_value_p (REGNO (XEXP (src, 1)), mode))
2231 if (reg_base_reg[REGNO (XEXP (src, 1))] < 0
2232 && reg_symbol_ref[REGNO (XEXP (src, 1))] == NULL_RTX)
2233 offset = reg_offset[REGNO (XEXP (src, 1))];
2234 /* Maybe the first register is known to be a
2235 constant. */
2236 else if (move2add_valid_value_p (REGNO (base_reg), mode)
2237 && reg_base_reg[REGNO (base_reg)] < 0
2238 && reg_symbol_ref[REGNO (base_reg)] == NULL_RTX)
2240 offset = reg_offset[REGNO (base_reg)];
2241 base_reg = XEXP (src, 1);
2243 else
2244 goto invalidate;
2246 else
2247 goto invalidate;
2249 break;
2252 goto invalidate;
2254 case REG:
2255 base_reg = src;
2256 offset = 0;
2257 break;
2259 case CONST_INT:
2260 /* Start tracking the register as a constant. */
2261 reg_base_reg[regno] = -1;
2262 reg_symbol_ref[regno] = NULL_RTX;
2263 reg_offset[regno] = INTVAL (SET_SRC (set));
2264 /* We assign the same luid to all registers set to constants. */
2265 reg_set_luid[regno] = move2add_last_label_luid + 1;
2266 move2add_record_mode (dst);
2267 return;
2269 default:
2270 goto invalidate;
2273 base_regno = REGNO (base_reg);
2274 /* If information about the base register is not valid, set it
2275 up as a new base register, pretending its value is known
2276 starting from the current insn. */
2277 if (!move2add_valid_value_p (base_regno, mode))
2279 reg_base_reg[base_regno] = base_regno;
2280 reg_symbol_ref[base_regno] = NULL_RTX;
2281 reg_offset[base_regno] = 0;
2282 reg_set_luid[base_regno] = move2add_luid;
2283 gcc_assert (GET_MODE (base_reg) == mode);
2284 move2add_record_mode (base_reg);
2287 /* Copy base information from our base register. */
2288 reg_set_luid[regno] = reg_set_luid[base_regno];
2289 reg_base_reg[regno] = reg_base_reg[base_regno];
2290 reg_symbol_ref[regno] = reg_symbol_ref[base_regno];
2292 /* Compute the sum of the offsets or constants. */
2293 reg_offset[regno]
2294 = trunc_int_for_mode (offset + reg_offset[base_regno], mode);
2296 move2add_record_mode (dst);
2298 else
2300 invalidate:
2301 /* Invalidate the contents of the register. */
2302 move2add_record_mode (dst);
2303 reg_mode[regno] = VOIDmode;
2307 namespace {
2309 const pass_data pass_data_postreload_cse =
2311 RTL_PASS, /* type */
2312 "postreload", /* name */
2313 OPTGROUP_NONE, /* optinfo_flags */
2314 TV_RELOAD_CSE_REGS, /* tv_id */
2315 0, /* properties_required */
2316 0, /* properties_provided */
2317 0, /* properties_destroyed */
2318 0, /* todo_flags_start */
2319 TODO_df_finish, /* todo_flags_finish */
2322 class pass_postreload_cse : public rtl_opt_pass
2324 public:
2325 pass_postreload_cse (gcc::context *ctxt)
2326 : rtl_opt_pass (pass_data_postreload_cse, ctxt)
2329 /* opt_pass methods: */
2330 virtual bool gate (function *) { return (optimize > 0 && reload_completed); }
2332 virtual unsigned int execute (function *);
2334 }; // class pass_postreload_cse
2336 unsigned int
2337 pass_postreload_cse::execute (function *fun)
2339 if (!dbg_cnt (postreload_cse))
2340 return 0;
2342 /* Do a very simple CSE pass over just the hard registers. */
2343 reload_cse_regs (get_insns ());
2344 /* Reload_cse_regs can eliminate potentially-trapping MEMs.
2345 Remove any EH edges associated with them. */
2346 if (fun->can_throw_non_call_exceptions
2347 && purge_all_dead_edges ())
2348 cleanup_cfg (0);
2350 return 0;
2353 } // anon namespace
2355 rtl_opt_pass *
2356 make_pass_postreload_cse (gcc::context *ctxt)
2358 return new pass_postreload_cse (ctxt);