1 /* Perform simple optimizations to clean up the result of reload.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
32 #include "insn-config.h"
38 #include "basic-block.h"
48 static int reload_cse_noop_set_p
PARAMS ((rtx
));
49 static void reload_cse_simplify
PARAMS ((rtx
, rtx
));
50 static void reload_cse_regs_1
PARAMS ((rtx
));
51 static int reload_cse_simplify_set
PARAMS ((rtx
, rtx
));
52 static int reload_cse_simplify_operands
PARAMS ((rtx
, rtx
));
54 static void reload_combine
PARAMS ((void));
55 static void reload_combine_note_use
PARAMS ((rtx
*, rtx
));
56 static void reload_combine_note_store
PARAMS ((rtx
, rtx
, void *));
58 static void reload_cse_move2add
PARAMS ((rtx
));
59 static void move2add_note_store
PARAMS ((rtx
, rtx
, void *));
61 /* Call cse / combine like post-reload optimization phases.
62 FIRST is the first instruction. */
64 reload_cse_regs (first
)
65 rtx first ATTRIBUTE_UNUSED
;
67 reload_cse_regs_1 (first
);
69 reload_cse_move2add (first
);
70 if (flag_expensive_optimizations
)
71 reload_cse_regs_1 (first
);
74 /* See whether a single set SET is a noop. */
76 reload_cse_noop_set_p (set
)
79 if (cselib_reg_set_mode (SET_DEST (set
)) != GET_MODE (SET_DEST (set
)))
82 return rtx_equal_for_cselib_p (SET_DEST (set
), SET_SRC (set
));
85 /* Try to simplify INSN. */
87 reload_cse_simplify (insn
, testreg
)
91 rtx body
= PATTERN (insn
);
93 if (GET_CODE (body
) == SET
)
97 /* Simplify even if we may think it is a no-op.
98 We may think a memory load of a value smaller than WORD_SIZE
99 is redundant because we haven't taken into account possible
100 implicit extension. reload_cse_simplify_set() will bring
101 this out, so it's safer to simplify before we delete. */
102 count
+= reload_cse_simplify_set (body
, insn
);
104 if (!count
&& reload_cse_noop_set_p (body
))
106 rtx value
= SET_DEST (body
);
108 && ! REG_FUNCTION_VALUE_P (value
))
110 delete_insn_and_edges (insn
);
115 apply_change_group ();
117 reload_cse_simplify_operands (insn
, testreg
);
119 else if (GET_CODE (body
) == PARALLEL
)
123 rtx value
= NULL_RTX
;
125 /* If every action in a PARALLEL is a noop, we can delete
126 the entire PARALLEL. */
127 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; --i
)
129 rtx part
= XVECEXP (body
, 0, i
);
130 if (GET_CODE (part
) == SET
)
132 if (! reload_cse_noop_set_p (part
))
134 if (REG_P (SET_DEST (part
))
135 && REG_FUNCTION_VALUE_P (SET_DEST (part
)))
139 value
= SET_DEST (part
);
142 else if (GET_CODE (part
) != CLOBBER
)
148 delete_insn_and_edges (insn
);
149 /* We're done with this insn. */
153 /* It's not a no-op, but we can try to simplify it. */
154 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; --i
)
155 if (GET_CODE (XVECEXP (body
, 0, i
)) == SET
)
156 count
+= reload_cse_simplify_set (XVECEXP (body
, 0, i
), insn
);
159 apply_change_group ();
161 reload_cse_simplify_operands (insn
, testreg
);
165 /* Do a very simple CSE pass over the hard registers.
167 This function detects no-op moves where we happened to assign two
168 different pseudo-registers to the same hard register, and then
169 copied one to the other. Reload will generate a useless
170 instruction copying a register to itself.
172 This function also detects cases where we load a value from memory
173 into two different registers, and (if memory is more expensive than
174 registers) changes it to simply copy the first register into the
177 Another optimization is performed that scans the operands of each
178 instruction to see whether the value is already available in a
179 hard register. It then replaces the operand with the hard register
180 if possible, much like an optional reload would. */
183 reload_cse_regs_1 (first
)
187 rtx testreg
= gen_rtx_REG (VOIDmode
, -1);
190 init_alias_analysis ();
192 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
195 reload_cse_simplify (insn
, testreg
);
197 cselib_process_insn (insn
);
201 end_alias_analysis ();
205 /* Try to simplify a single SET instruction. SET is the set pattern.
206 INSN is the instruction it came from.
207 This function only handles one case: if we set a register to a value
208 which is not a register, we try to find that value in some other register
209 and change the set into a register copy. */
212 reload_cse_simplify_set (set
, insn
)
219 enum reg_class dclass
;
222 struct elt_loc_list
*l
;
223 #ifdef LOAD_EXTEND_OP
224 enum rtx_code extend_op
= NIL
;
227 dreg
= true_regnum (SET_DEST (set
));
232 if (side_effects_p (src
) || true_regnum (src
) >= 0)
235 dclass
= REGNO_REG_CLASS (dreg
);
237 #ifdef LOAD_EXTEND_OP
238 /* When replacing a memory with a register, we need to honor assumptions
239 that combine made wrt the contents of sign bits. We'll do this by
240 generating an extend instruction instead of a reg->reg copy. Thus
241 the destination must be a register that we can widen. */
242 if (GET_CODE (src
) == MEM
243 && GET_MODE_BITSIZE (GET_MODE (src
)) < BITS_PER_WORD
244 && (extend_op
= LOAD_EXTEND_OP (GET_MODE (src
))) != NIL
245 && GET_CODE (SET_DEST (set
)) != REG
)
249 val
= cselib_lookup (src
, GET_MODE (SET_DEST (set
)), 0);
253 /* If memory loads are cheaper than register copies, don't change them. */
254 if (GET_CODE (src
) == MEM
)
255 old_cost
= MEMORY_MOVE_COST (GET_MODE (src
), dclass
, 1);
256 else if (GET_CODE (src
) == REG
)
257 old_cost
= REGISTER_MOVE_COST (GET_MODE (src
),
258 REGNO_REG_CLASS (REGNO (src
)), dclass
);
260 old_cost
= rtx_cost (src
, SET
);
262 for (l
= val
->locs
; l
; l
= l
->next
)
264 rtx this_rtx
= l
->loc
;
267 if (CONSTANT_P (this_rtx
) && ! references_value_p (this_rtx
, 0))
269 #ifdef LOAD_EXTEND_OP
270 if (extend_op
!= NIL
)
272 HOST_WIDE_INT this_val
;
274 /* ??? I'm lazy and don't wish to handle CONST_DOUBLE. Other
275 constants, such as SYMBOL_REF, cannot be extended. */
276 if (GET_CODE (this_rtx
) != CONST_INT
)
279 this_val
= INTVAL (this_rtx
);
283 this_val
&= GET_MODE_MASK (GET_MODE (src
));
286 /* ??? In theory we're already extended. */
287 if (this_val
== trunc_int_for_mode (this_val
, GET_MODE (src
)))
292 this_rtx
= GEN_INT (this_val
);
295 this_cost
= rtx_cost (this_rtx
, SET
);
297 else if (GET_CODE (this_rtx
) == REG
)
299 #ifdef LOAD_EXTEND_OP
300 if (extend_op
!= NIL
)
302 this_rtx
= gen_rtx_fmt_e (extend_op
, word_mode
, this_rtx
);
303 this_cost
= rtx_cost (this_rtx
, SET
);
307 this_cost
= REGISTER_MOVE_COST (GET_MODE (this_rtx
),
308 REGNO_REG_CLASS (REGNO (this_rtx
)),
314 /* If equal costs, prefer registers over anything else. That
315 tends to lead to smaller instructions on some machines. */
316 if (this_cost
< old_cost
317 || (this_cost
== old_cost
318 && GET_CODE (this_rtx
) == REG
319 && GET_CODE (SET_SRC (set
)) != REG
))
321 #ifdef LOAD_EXTEND_OP
322 if (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set
))) < BITS_PER_WORD
324 #ifdef CANNOT_CHANGE_MODE_CLASS
325 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SET_DEST (set
)),
327 REGNO_REG_CLASS (REGNO (SET_DEST (set
))))
331 rtx wide_dest
= gen_rtx_REG (word_mode
, REGNO (SET_DEST (set
)));
332 ORIGINAL_REGNO (wide_dest
) = ORIGINAL_REGNO (SET_DEST (set
));
333 validate_change (insn
, &SET_DEST (set
), wide_dest
, 1);
337 validate_change (insn
, &SET_SRC (set
), copy_rtx (this_rtx
), 1);
338 old_cost
= this_cost
, did_change
= 1;
345 /* Try to replace operands in INSN with equivalent values that are already
346 in registers. This can be viewed as optional reloading.
348 For each non-register operand in the insn, see if any hard regs are
349 known to be equivalent to that operand. Record the alternatives which
350 can accept these hard registers. Among all alternatives, select the
351 ones which are better or equal to the one currently matching, where
352 "better" is in terms of '?' and '!' constraints. Among the remaining
353 alternatives, select the one which replaces most operands with
357 reload_cse_simplify_operands (insn
, testreg
)
363 /* For each operand, all registers that are equivalent to it. */
364 HARD_REG_SET equiv_regs
[MAX_RECOG_OPERANDS
];
366 const char *constraints
[MAX_RECOG_OPERANDS
];
368 /* Vector recording how bad an alternative is. */
369 int *alternative_reject
;
370 /* Vector recording how many registers can be introduced by choosing
372 int *alternative_nregs
;
373 /* Array of vectors recording, for each operand and each alternative,
374 which hard register to substitute, or -1 if the operand should be
376 int *op_alt_regno
[MAX_RECOG_OPERANDS
];
377 /* Array of alternatives, sorted in order of decreasing desirability. */
378 int *alternative_order
;
382 if (recog_data
.n_alternatives
== 0 || recog_data
.n_operands
== 0)
385 /* Figure out which alternative currently matches. */
386 if (! constrain_operands (1))
387 fatal_insn_not_found (insn
);
389 alternative_reject
= (int *) alloca (recog_data
.n_alternatives
* sizeof (int));
390 alternative_nregs
= (int *) alloca (recog_data
.n_alternatives
* sizeof (int));
391 alternative_order
= (int *) alloca (recog_data
.n_alternatives
* sizeof (int));
392 memset ((char *) alternative_reject
, 0, recog_data
.n_alternatives
* sizeof (int));
393 memset ((char *) alternative_nregs
, 0, recog_data
.n_alternatives
* sizeof (int));
395 /* For each operand, find out which regs are equivalent. */
396 for (i
= 0; i
< recog_data
.n_operands
; i
++)
399 struct elt_loc_list
*l
;
401 CLEAR_HARD_REG_SET (equiv_regs
[i
]);
403 /* cselib blows up on CODE_LABELs. Trying to fix that doesn't seem
404 right, so avoid the problem here. Likewise if we have a constant
405 and the insn pattern doesn't tell us the mode we need. */
406 if (GET_CODE (recog_data
.operand
[i
]) == CODE_LABEL
407 || (CONSTANT_P (recog_data
.operand
[i
])
408 && recog_data
.operand_mode
[i
] == VOIDmode
))
411 v
= cselib_lookup (recog_data
.operand
[i
], recog_data
.operand_mode
[i
], 0);
415 for (l
= v
->locs
; l
; l
= l
->next
)
416 if (GET_CODE (l
->loc
) == REG
)
417 SET_HARD_REG_BIT (equiv_regs
[i
], REGNO (l
->loc
));
420 for (i
= 0; i
< recog_data
.n_operands
; i
++)
422 enum machine_mode mode
;
426 op_alt_regno
[i
] = (int *) alloca (recog_data
.n_alternatives
* sizeof (int));
427 for (j
= 0; j
< recog_data
.n_alternatives
; j
++)
428 op_alt_regno
[i
][j
] = -1;
430 p
= constraints
[i
] = recog_data
.constraints
[i
];
431 mode
= recog_data
.operand_mode
[i
];
433 /* Add the reject values for each alternative given by the constraints
442 alternative_reject
[j
] += 3;
444 alternative_reject
[j
] += 300;
447 /* We won't change operands which are already registers. We
448 also don't want to modify output operands. */
449 regno
= true_regnum (recog_data
.operand
[i
]);
451 || constraints
[i
][0] == '='
452 || constraints
[i
][0] == '+')
455 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
457 int class = (int) NO_REGS
;
459 if (! TEST_HARD_REG_BIT (equiv_regs
[i
], regno
))
462 REGNO (testreg
) = regno
;
463 PUT_MODE (testreg
, mode
);
465 /* We found a register equal to this operand. Now look for all
466 alternatives that can accept this register and have not been
467 assigned a register they can use yet. */
476 case '=': case '+': case '?':
477 case '#': case '&': case '!':
479 case '0': case '1': case '2': case '3': case '4':
480 case '5': case '6': case '7': case '8': case '9':
481 case 'm': case '<': case '>': case 'V': case 'o':
482 case 'E': case 'F': case 'G': case 'H':
483 case 's': case 'i': case 'n':
484 case 'I': case 'J': case 'K': case 'L':
485 case 'M': case 'N': case 'O': case 'P':
487 /* These don't say anything we care about. */
491 class = reg_class_subunion
[(int) class][(int) GENERAL_REGS
];
496 = (reg_class_subunion
498 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c
, p
)]);
502 /* See if REGNO fits this alternative, and set it up as the
503 replacement register if we don't have one for this
504 alternative yet and the operand being replaced is not
505 a cheap CONST_INT. */
506 if (op_alt_regno
[i
][j
] == -1
507 && reg_fits_class_p (testreg
, class, 0, mode
)
508 && (GET_CODE (recog_data
.operand
[i
]) != CONST_INT
509 || (rtx_cost (recog_data
.operand
[i
], SET
)
510 > rtx_cost (testreg
, SET
))))
512 alternative_nregs
[j
]++;
513 op_alt_regno
[i
][j
] = regno
;
518 p
+= CONSTRAINT_LEN (c
, p
);
526 /* Record all alternatives which are better or equal to the currently
527 matching one in the alternative_order array. */
528 for (i
= j
= 0; i
< recog_data
.n_alternatives
; i
++)
529 if (alternative_reject
[i
] <= alternative_reject
[which_alternative
])
530 alternative_order
[j
++] = i
;
531 recog_data
.n_alternatives
= j
;
533 /* Sort it. Given a small number of alternatives, a dumb algorithm
534 won't hurt too much. */
535 for (i
= 0; i
< recog_data
.n_alternatives
- 1; i
++)
538 int best_reject
= alternative_reject
[alternative_order
[i
]];
539 int best_nregs
= alternative_nregs
[alternative_order
[i
]];
542 for (j
= i
+ 1; j
< recog_data
.n_alternatives
; j
++)
544 int this_reject
= alternative_reject
[alternative_order
[j
]];
545 int this_nregs
= alternative_nregs
[alternative_order
[j
]];
547 if (this_reject
< best_reject
548 || (this_reject
== best_reject
&& this_nregs
< best_nregs
))
551 best_reject
= this_reject
;
552 best_nregs
= this_nregs
;
556 tmp
= alternative_order
[best
];
557 alternative_order
[best
] = alternative_order
[i
];
558 alternative_order
[i
] = tmp
;
561 /* Substitute the operands as determined by op_alt_regno for the best
563 j
= alternative_order
[0];
565 for (i
= 0; i
< recog_data
.n_operands
; i
++)
567 enum machine_mode mode
= recog_data
.operand_mode
[i
];
568 if (op_alt_regno
[i
][j
] == -1)
571 validate_change (insn
, recog_data
.operand_loc
[i
],
572 gen_rtx_REG (mode
, op_alt_regno
[i
][j
]), 1);
575 for (i
= recog_data
.n_dups
- 1; i
>= 0; i
--)
577 int op
= recog_data
.dup_num
[i
];
578 enum machine_mode mode
= recog_data
.operand_mode
[op
];
580 if (op_alt_regno
[op
][j
] == -1)
583 validate_change (insn
, recog_data
.dup_loc
[i
],
584 gen_rtx_REG (mode
, op_alt_regno
[op
][j
]), 1);
587 return apply_change_group ();
590 /* If reload couldn't use reg+reg+offset addressing, try to use reg+reg
592 This code might also be useful when reload gave up on reg+reg addressing
593 because of clashes between the return register and INDEX_REG_CLASS. */
595 /* The maximum number of uses of a register we can keep track of to
596 replace them with reg+reg addressing. */
597 #define RELOAD_COMBINE_MAX_USES 6
599 /* INSN is the insn where a register has ben used, and USEP points to the
600 location of the register within the rtl. */
601 struct reg_use
{ rtx insn
, *usep
; };
603 /* If the register is used in some unknown fashion, USE_INDEX is negative.
604 If it is dead, USE_INDEX is RELOAD_COMBINE_MAX_USES, and STORE_RUID
605 indicates where it becomes live again.
606 Otherwise, USE_INDEX is the index of the last encountered use of the
607 register (which is first among these we have seen since we scan backwards),
608 OFFSET contains the constant offset that is added to the register in
609 all encountered uses, and USE_RUID indicates the first encountered, i.e.
611 STORE_RUID is always meaningful if we only want to use a value in a
612 register in a different place: it denotes the next insn in the insn
613 stream (i.e. the last encountered) that sets or clobbers the register. */
616 struct reg_use reg_use
[RELOAD_COMBINE_MAX_USES
];
621 } reg_state
[FIRST_PSEUDO_REGISTER
];
623 /* Reverse linear uid. This is increased in reload_combine while scanning
624 the instructions from last to first. It is used to set last_label_ruid
625 and the store_ruid / use_ruid fields in reg_state. */
626 static int reload_combine_ruid
;
628 #define LABEL_LIVE(LABEL) \
629 (label_live[CODE_LABEL_NUMBER (LABEL) - min_labelno])
635 int first_index_reg
= -1;
636 int last_index_reg
= 0;
641 int min_labelno
, n_labels
;
642 HARD_REG_SET ever_live_at_start
, *label_live
;
644 /* If reg+reg can be used in offsetable memory addresses, the main chunk of
645 reload has already used it where appropriate, so there is no use in
646 trying to generate it now. */
647 if (double_reg_address_ok
&& INDEX_REG_CLASS
!= NO_REGS
)
650 /* To avoid wasting too much time later searching for an index register,
651 determine the minimum and maximum index register numbers. */
652 for (r
= 0; r
< FIRST_PSEUDO_REGISTER
; r
++)
653 if (TEST_HARD_REG_BIT (reg_class_contents
[INDEX_REG_CLASS
], r
))
655 if (first_index_reg
== -1)
661 /* If no index register is available, we can quit now. */
662 if (first_index_reg
== -1)
665 /* Set up LABEL_LIVE and EVER_LIVE_AT_START. The register lifetime
666 information is a bit fuzzy immediately after reload, but it's
667 still good enough to determine which registers are live at a jump
669 min_labelno
= get_first_label_num ();
670 n_labels
= max_label_num () - min_labelno
;
671 label_live
= (HARD_REG_SET
*) xmalloc (n_labels
* sizeof (HARD_REG_SET
));
672 CLEAR_HARD_REG_SET (ever_live_at_start
);
674 FOR_EACH_BB_REVERSE (bb
)
677 if (GET_CODE (insn
) == CODE_LABEL
)
681 REG_SET_TO_HARD_REG_SET (live
,
682 bb
->global_live_at_start
);
683 compute_use_by_pseudos (&live
,
684 bb
->global_live_at_start
);
685 COPY_HARD_REG_SET (LABEL_LIVE (insn
), live
);
686 IOR_HARD_REG_SET (ever_live_at_start
, live
);
690 /* Initialize last_label_ruid, reload_combine_ruid and reg_state. */
691 last_label_ruid
= reload_combine_ruid
= 0;
692 for (r
= 0; r
< FIRST_PSEUDO_REGISTER
; r
++)
694 reg_state
[r
].store_ruid
= reload_combine_ruid
;
696 reg_state
[r
].use_index
= -1;
698 reg_state
[r
].use_index
= RELOAD_COMBINE_MAX_USES
;
701 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
705 /* We cannot do our optimization across labels. Invalidating all the use
706 information we have would be costly, so we just note where the label
707 is and then later disable any optimization that would cross it. */
708 if (GET_CODE (insn
) == CODE_LABEL
)
709 last_label_ruid
= reload_combine_ruid
;
710 else if (GET_CODE (insn
) == BARRIER
)
711 for (r
= 0; r
< FIRST_PSEUDO_REGISTER
; r
++)
713 reg_state
[r
].use_index
= RELOAD_COMBINE_MAX_USES
;
718 reload_combine_ruid
++;
720 /* Look for (set (REGX) (CONST_INT))
721 (set (REGX) (PLUS (REGX) (REGY)))
725 (set (REGZ) (CONST_INT))
727 ... (MEM (PLUS (REGZ) (REGY)))... .
729 First, check that we have (set (REGX) (PLUS (REGX) (REGY)))
730 and that we know all uses of REGX before it dies. */
731 set
= single_set (insn
);
733 && GET_CODE (SET_DEST (set
)) == REG
734 && (HARD_REGNO_NREGS (REGNO (SET_DEST (set
)),
735 GET_MODE (SET_DEST (set
)))
737 && GET_CODE (SET_SRC (set
)) == PLUS
738 && GET_CODE (XEXP (SET_SRC (set
), 1)) == REG
739 && rtx_equal_p (XEXP (SET_SRC (set
), 0), SET_DEST (set
))
740 && last_label_ruid
< reg_state
[REGNO (SET_DEST (set
))].use_ruid
)
742 rtx reg
= SET_DEST (set
);
743 rtx plus
= SET_SRC (set
);
744 rtx base
= XEXP (plus
, 1);
745 rtx prev
= prev_nonnote_insn (insn
);
746 rtx prev_set
= prev
? single_set (prev
) : NULL_RTX
;
747 unsigned int regno
= REGNO (reg
);
748 rtx const_reg
= NULL_RTX
;
749 rtx reg_sum
= NULL_RTX
;
751 /* Now, we need an index register.
752 We'll set index_reg to this index register, const_reg to the
753 register that is to be loaded with the constant
754 (denoted as REGZ in the substitution illustration above),
755 and reg_sum to the register-register that we want to use to
756 substitute uses of REG (typically in MEMs) with.
757 First check REG and BASE for being index registers;
758 we can use them even if they are not dead. */
759 if (TEST_HARD_REG_BIT (reg_class_contents
[INDEX_REG_CLASS
], regno
)
760 || TEST_HARD_REG_BIT (reg_class_contents
[INDEX_REG_CLASS
],
768 /* Otherwise, look for a free index register. Since we have
769 checked above that neiter REG nor BASE are index registers,
770 if we find anything at all, it will be different from these
772 for (i
= first_index_reg
; i
<= last_index_reg
; i
++)
774 if (TEST_HARD_REG_BIT (reg_class_contents
[INDEX_REG_CLASS
],
776 && reg_state
[i
].use_index
== RELOAD_COMBINE_MAX_USES
777 && reg_state
[i
].store_ruid
<= reg_state
[regno
].use_ruid
778 && HARD_REGNO_NREGS (i
, GET_MODE (reg
)) == 1)
780 rtx index_reg
= gen_rtx_REG (GET_MODE (reg
), i
);
782 const_reg
= index_reg
;
783 reg_sum
= gen_rtx_PLUS (GET_MODE (reg
), index_reg
, base
);
789 /* Check that PREV_SET is indeed (set (REGX) (CONST_INT)) and that
790 (REGY), i.e. BASE, is not clobbered before the last use we'll
793 && GET_CODE (SET_SRC (prev_set
)) == CONST_INT
794 && rtx_equal_p (SET_DEST (prev_set
), reg
)
795 && reg_state
[regno
].use_index
>= 0
796 && (reg_state
[REGNO (base
)].store_ruid
797 <= reg_state
[regno
].use_ruid
)
802 /* Change destination register and, if necessary, the
803 constant value in PREV, the constant loading instruction. */
804 validate_change (prev
, &SET_DEST (prev_set
), const_reg
, 1);
805 if (reg_state
[regno
].offset
!= const0_rtx
)
806 validate_change (prev
,
808 GEN_INT (INTVAL (SET_SRC (prev_set
))
809 + INTVAL (reg_state
[regno
].offset
)),
812 /* Now for every use of REG that we have recorded, replace REG
814 for (i
= reg_state
[regno
].use_index
;
815 i
< RELOAD_COMBINE_MAX_USES
; i
++)
816 validate_change (reg_state
[regno
].reg_use
[i
].insn
,
817 reg_state
[regno
].reg_use
[i
].usep
,
818 /* Each change must have its own
820 copy_rtx (reg_sum
), 1);
822 if (apply_change_group ())
826 /* Delete the reg-reg addition. */
829 if (reg_state
[regno
].offset
!= const0_rtx
)
830 /* Previous REG_EQUIV / REG_EQUAL notes for PREV
832 for (np
= ®_NOTES (prev
); *np
;)
834 if (REG_NOTE_KIND (*np
) == REG_EQUAL
835 || REG_NOTE_KIND (*np
) == REG_EQUIV
)
841 reg_state
[regno
].use_index
= RELOAD_COMBINE_MAX_USES
;
842 reg_state
[REGNO (const_reg
)].store_ruid
843 = reload_combine_ruid
;
849 note_stores (PATTERN (insn
), reload_combine_note_store
, NULL
);
851 if (GET_CODE (insn
) == CALL_INSN
)
855 for (r
= 0; r
< FIRST_PSEUDO_REGISTER
; r
++)
856 if (call_used_regs
[r
])
858 reg_state
[r
].use_index
= RELOAD_COMBINE_MAX_USES
;
859 reg_state
[r
].store_ruid
= reload_combine_ruid
;
862 for (link
= CALL_INSN_FUNCTION_USAGE (insn
); link
;
863 link
= XEXP (link
, 1))
865 rtx usage_rtx
= XEXP (XEXP (link
, 0), 0);
866 if (GET_CODE (usage_rtx
) == REG
)
869 unsigned int start_reg
= REGNO (usage_rtx
);
870 unsigned int num_regs
=
871 HARD_REGNO_NREGS (start_reg
, GET_MODE (usage_rtx
));
872 unsigned int end_reg
= start_reg
+ num_regs
- 1;
873 for (i
= start_reg
; i
<= end_reg
; i
++)
874 if (GET_CODE (XEXP (link
, 0)) == CLOBBER
)
876 reg_state
[i
].use_index
= RELOAD_COMBINE_MAX_USES
;
877 reg_state
[i
].store_ruid
= reload_combine_ruid
;
880 reg_state
[i
].use_index
= -1;
885 else if (GET_CODE (insn
) == JUMP_INSN
886 && GET_CODE (PATTERN (insn
)) != RETURN
)
888 /* Non-spill registers might be used at the call destination in
889 some unknown fashion, so we have to mark the unknown use. */
892 if ((condjump_p (insn
) || condjump_in_parallel_p (insn
))
893 && JUMP_LABEL (insn
))
894 live
= &LABEL_LIVE (JUMP_LABEL (insn
));
896 live
= &ever_live_at_start
;
898 for (i
= FIRST_PSEUDO_REGISTER
- 1; i
>= 0; --i
)
899 if (TEST_HARD_REG_BIT (*live
, i
))
900 reg_state
[i
].use_index
= -1;
903 reload_combine_note_use (&PATTERN (insn
), insn
);
904 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
906 if (REG_NOTE_KIND (note
) == REG_INC
907 && GET_CODE (XEXP (note
, 0)) == REG
)
909 int regno
= REGNO (XEXP (note
, 0));
911 reg_state
[regno
].store_ruid
= reload_combine_ruid
;
912 reg_state
[regno
].use_index
= -1;
920 /* Check if DST is a register or a subreg of a register; if it is,
921 update reg_state[regno].store_ruid and reg_state[regno].use_index
922 accordingly. Called via note_stores from reload_combine. */
925 reload_combine_note_store (dst
, set
, data
)
927 void *data ATTRIBUTE_UNUSED
;
931 enum machine_mode mode
= GET_MODE (dst
);
933 if (GET_CODE (dst
) == SUBREG
)
935 regno
= subreg_regno_offset (REGNO (SUBREG_REG (dst
)),
936 GET_MODE (SUBREG_REG (dst
)),
939 dst
= SUBREG_REG (dst
);
941 if (GET_CODE (dst
) != REG
)
943 regno
+= REGNO (dst
);
945 /* note_stores might have stripped a STRICT_LOW_PART, so we have to be
946 careful with registers / register parts that are not full words.
948 Similarly for ZERO_EXTRACT and SIGN_EXTRACT. */
949 if (GET_CODE (set
) != SET
950 || GET_CODE (SET_DEST (set
)) == ZERO_EXTRACT
951 || GET_CODE (SET_DEST (set
)) == SIGN_EXTRACT
952 || GET_CODE (SET_DEST (set
)) == STRICT_LOW_PART
)
954 for (i
= HARD_REGNO_NREGS (regno
, mode
) - 1 + regno
; i
>= regno
; i
--)
956 reg_state
[i
].use_index
= -1;
957 reg_state
[i
].store_ruid
= reload_combine_ruid
;
962 for (i
= HARD_REGNO_NREGS (regno
, mode
) - 1 + regno
; i
>= regno
; i
--)
964 reg_state
[i
].store_ruid
= reload_combine_ruid
;
965 reg_state
[i
].use_index
= RELOAD_COMBINE_MAX_USES
;
970 /* XP points to a piece of rtl that has to be checked for any uses of
972 *XP is the pattern of INSN, or a part of it.
973 Called from reload_combine, and recursively by itself. */
975 reload_combine_note_use (xp
, insn
)
979 enum rtx_code code
= x
->code
;
982 rtx offset
= const0_rtx
; /* For the REG case below. */
987 if (GET_CODE (SET_DEST (x
)) == REG
)
989 reload_combine_note_use (&SET_SRC (x
), insn
);
995 /* If this is the USE of a return value, we can't change it. */
996 if (GET_CODE (XEXP (x
, 0)) == REG
&& REG_FUNCTION_VALUE_P (XEXP (x
, 0)))
998 /* Mark the return register as used in an unknown fashion. */
999 rtx reg
= XEXP (x
, 0);
1000 int regno
= REGNO (reg
);
1001 int nregs
= HARD_REGNO_NREGS (regno
, GET_MODE (reg
));
1003 while (--nregs
>= 0)
1004 reg_state
[regno
+ nregs
].use_index
= -1;
1010 if (GET_CODE (SET_DEST (x
)) == REG
)
1012 /* No spurious CLOBBERs of pseudo registers may remain. */
1013 if (REGNO (SET_DEST (x
)) >= FIRST_PSEUDO_REGISTER
)
1020 /* We are interested in (plus (reg) (const_int)) . */
1021 if (GET_CODE (XEXP (x
, 0)) != REG
1022 || GET_CODE (XEXP (x
, 1)) != CONST_INT
)
1024 offset
= XEXP (x
, 1);
1029 int regno
= REGNO (x
);
1033 /* No spurious USEs of pseudo registers may remain. */
1034 if (regno
>= FIRST_PSEUDO_REGISTER
)
1037 nregs
= HARD_REGNO_NREGS (regno
, GET_MODE (x
));
1039 /* We can't substitute into multi-hard-reg uses. */
1042 while (--nregs
>= 0)
1043 reg_state
[regno
+ nregs
].use_index
= -1;
1047 /* If this register is already used in some unknown fashion, we
1049 If we decrement the index from zero to -1, we can't store more
1050 uses, so this register becomes used in an unknown fashion. */
1051 use_index
= --reg_state
[regno
].use_index
;
1055 if (use_index
!= RELOAD_COMBINE_MAX_USES
- 1)
1057 /* We have found another use for a register that is already
1058 used later. Check if the offsets match; if not, mark the
1059 register as used in an unknown fashion. */
1060 if (! rtx_equal_p (offset
, reg_state
[regno
].offset
))
1062 reg_state
[regno
].use_index
= -1;
1068 /* This is the first use of this register we have seen since we
1069 marked it as dead. */
1070 reg_state
[regno
].offset
= offset
;
1071 reg_state
[regno
].use_ruid
= reload_combine_ruid
;
1073 reg_state
[regno
].reg_use
[use_index
].insn
= insn
;
1074 reg_state
[regno
].reg_use
[use_index
].usep
= xp
;
1082 /* Recursively process the components of X. */
1083 fmt
= GET_RTX_FORMAT (code
);
1084 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1087 reload_combine_note_use (&XEXP (x
, i
), insn
);
1088 else if (fmt
[i
] == 'E')
1090 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
1091 reload_combine_note_use (&XVECEXP (x
, i
, j
), insn
);
1096 /* See if we can reduce the cost of a constant by replacing a move
1097 with an add. We track situations in which a register is set to a
1098 constant or to a register plus a constant. */
1099 /* We cannot do our optimization across labels. Invalidating all the
1100 information about register contents we have would be costly, so we
1101 use move2add_last_label_luid to note where the label is and then
1102 later disable any optimization that would cross it.
1103 reg_offset[n] / reg_base_reg[n] / reg_mode[n] are only valid if
1104 reg_set_luid[n] is greater than move2add_last_label_luid. */
1105 static int reg_set_luid
[FIRST_PSEUDO_REGISTER
];
1107 /* If reg_base_reg[n] is negative, register n has been set to
1108 reg_offset[n] in mode reg_mode[n] .
1109 If reg_base_reg[n] is non-negative, register n has been set to the
1110 sum of reg_offset[n] and the value of register reg_base_reg[n]
1111 before reg_set_luid[n], calculated in mode reg_mode[n] . */
1112 static HOST_WIDE_INT reg_offset
[FIRST_PSEUDO_REGISTER
];
1113 static int reg_base_reg
[FIRST_PSEUDO_REGISTER
];
1114 static enum machine_mode reg_mode
[FIRST_PSEUDO_REGISTER
];
1116 /* move2add_luid is linearly increased while scanning the instructions
1117 from first to last. It is used to set reg_set_luid in
1118 reload_cse_move2add and move2add_note_store. */
1119 static int move2add_luid
;
1121 /* move2add_last_label_luid is set whenever a label is found. Labels
1122 invalidate all previously collected reg_offset data. */
1123 static int move2add_last_label_luid
;
1125 /* ??? We don't know how zero / sign extension is handled, hence we
1126 can't go from a narrower to a wider mode. */
1127 #define MODES_OK_FOR_MOVE2ADD(OUTMODE, INMODE) \
1128 (GET_MODE_SIZE (OUTMODE) == GET_MODE_SIZE (INMODE) \
1129 || (GET_MODE_SIZE (OUTMODE) <= GET_MODE_SIZE (INMODE) \
1130 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (OUTMODE), \
1131 GET_MODE_BITSIZE (INMODE))))
1134 reload_cse_move2add (first
)
1140 for (i
= FIRST_PSEUDO_REGISTER
- 1; i
>= 0; i
--)
1141 reg_set_luid
[i
] = 0;
1143 move2add_last_label_luid
= 0;
1145 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
), move2add_luid
++)
1149 if (GET_CODE (insn
) == CODE_LABEL
)
1151 move2add_last_label_luid
= move2add_luid
;
1152 /* We're going to increment move2add_luid twice after a
1153 label, so that we can use move2add_last_label_luid + 1 as
1154 the luid for constants. */
1158 if (! INSN_P (insn
))
1160 pat
= PATTERN (insn
);
1161 /* For simplicity, we only perform this optimization on
1162 straightforward SETs. */
1163 if (GET_CODE (pat
) == SET
1164 && GET_CODE (SET_DEST (pat
)) == REG
)
1166 rtx reg
= SET_DEST (pat
);
1167 int regno
= REGNO (reg
);
1168 rtx src
= SET_SRC (pat
);
1170 /* Check if we have valid information on the contents of this
1171 register in the mode of REG. */
1172 if (reg_set_luid
[regno
] > move2add_last_label_luid
1173 && MODES_OK_FOR_MOVE2ADD (GET_MODE (reg
), reg_mode
[regno
]))
1175 /* Try to transform (set (REGX) (CONST_INT A))
1177 (set (REGX) (CONST_INT B))
1179 (set (REGX) (CONST_INT A))
1181 (set (REGX) (plus (REGX) (CONST_INT B-A)))
1183 (set (REGX) (CONST_INT A))
1185 (set (STRICT_LOW_PART (REGX)) (CONST_INT B))
1188 if (GET_CODE (src
) == CONST_INT
&& reg_base_reg
[regno
] < 0)
1191 GEN_INT (trunc_int_for_mode (INTVAL (src
)
1192 - reg_offset
[regno
],
1194 /* (set (reg) (plus (reg) (const_int 0))) is not canonical;
1195 use (set (reg) (reg)) instead.
1196 We don't delete this insn, nor do we convert it into a
1197 note, to avoid losing register notes or the return
1198 value flag. jump2 already knows how to get rid of
1200 if (new_src
== const0_rtx
)
1202 /* If the constants are different, this is a
1203 truncation, that, if turned into (set (reg)
1204 (reg)), would be discarded. Maybe we should
1205 try a truncMN pattern? */
1206 if (INTVAL (src
) == reg_offset
[regno
])
1207 validate_change (insn
, &SET_SRC (pat
), reg
, 0);
1209 else if (rtx_cost (new_src
, PLUS
) < rtx_cost (src
, SET
)
1210 && have_add2_insn (reg
, new_src
))
1212 rtx newpat
= gen_add2_insn (reg
, new_src
);
1213 if (INSN_P (newpat
) && NEXT_INSN (newpat
) == NULL_RTX
)
1214 newpat
= PATTERN (newpat
);
1215 /* If it was the first insn of a sequence or
1216 some other emitted insn, validate_change will
1218 validate_change (insn
, &PATTERN (insn
),
1223 enum machine_mode narrow_mode
;
1224 for (narrow_mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1225 narrow_mode
!= GET_MODE (reg
);
1226 narrow_mode
= GET_MODE_WIDER_MODE (narrow_mode
))
1228 if (have_insn_for (STRICT_LOW_PART
, narrow_mode
)
1229 && ((reg_offset
[regno
]
1230 & ~GET_MODE_MASK (narrow_mode
))
1232 & ~GET_MODE_MASK (narrow_mode
))))
1234 rtx narrow_reg
= gen_rtx_REG (narrow_mode
,
1237 GEN_INT (trunc_int_for_mode (INTVAL (src
),
1240 gen_rtx_SET (VOIDmode
,
1241 gen_rtx_STRICT_LOW_PART (VOIDmode
,
1244 if (validate_change (insn
, &PATTERN (insn
),
1250 reg_set_luid
[regno
] = move2add_luid
;
1251 reg_mode
[regno
] = GET_MODE (reg
);
1252 reg_offset
[regno
] = INTVAL (src
);
1256 /* Try to transform (set (REGX) (REGY))
1257 (set (REGX) (PLUS (REGX) (CONST_INT A)))
1260 (set (REGX) (PLUS (REGX) (CONST_INT B)))
1263 (set (REGX) (PLUS (REGX) (CONST_INT A)))
1265 (set (REGX) (plus (REGX) (CONST_INT B-A))) */
1266 else if (GET_CODE (src
) == REG
1267 && reg_set_luid
[regno
] == reg_set_luid
[REGNO (src
)]
1268 && reg_base_reg
[regno
] == reg_base_reg
[REGNO (src
)]
1269 && MODES_OK_FOR_MOVE2ADD (GET_MODE (reg
),
1270 reg_mode
[REGNO (src
)]))
1272 rtx next
= next_nonnote_insn (insn
);
1275 set
= single_set (next
);
1277 && SET_DEST (set
) == reg
1278 && GET_CODE (SET_SRC (set
)) == PLUS
1279 && XEXP (SET_SRC (set
), 0) == reg
1280 && GET_CODE (XEXP (SET_SRC (set
), 1)) == CONST_INT
)
1282 rtx src3
= XEXP (SET_SRC (set
), 1);
1283 HOST_WIDE_INT added_offset
= INTVAL (src3
);
1284 HOST_WIDE_INT base_offset
= reg_offset
[REGNO (src
)];
1285 HOST_WIDE_INT regno_offset
= reg_offset
[regno
];
1287 GEN_INT (trunc_int_for_mode (added_offset
1293 if (new_src
== const0_rtx
)
1294 /* See above why we create (set (reg) (reg)) here. */
1296 = validate_change (next
, &SET_SRC (set
), reg
, 0);
1297 else if ((rtx_cost (new_src
, PLUS
)
1298 < COSTS_N_INSNS (1) + rtx_cost (src3
, SET
))
1299 && have_add2_insn (reg
, new_src
))
1301 rtx newpat
= gen_add2_insn (reg
, new_src
);
1303 && NEXT_INSN (newpat
) == NULL_RTX
)
1304 newpat
= PATTERN (newpat
);
1306 = validate_change (next
, &PATTERN (next
),
1312 reg_mode
[regno
] = GET_MODE (reg
);
1314 trunc_int_for_mode (added_offset
+ base_offset
,
1322 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
1324 if (REG_NOTE_KIND (note
) == REG_INC
1325 && GET_CODE (XEXP (note
, 0)) == REG
)
1327 /* Reset the information about this register. */
1328 int regno
= REGNO (XEXP (note
, 0));
1329 if (regno
< FIRST_PSEUDO_REGISTER
)
1330 reg_set_luid
[regno
] = 0;
1333 note_stores (PATTERN (insn
), move2add_note_store
, NULL
);
1335 /* If INSN is a conditional branch, we try to extract an
1336 implicit set out of it. */
1337 if (any_condjump_p (insn
) && onlyjump_p (insn
))
1339 rtx cnd
= fis_get_condition (insn
);
1342 && GET_CODE (cnd
) == NE
1343 && GET_CODE (XEXP (cnd
, 0)) == REG
1344 /* The following two checks, which are also in
1345 move2add_note_store, are intended to reduce the
1346 number of calls to gen_rtx_SET to avoid memory
1347 allocation if possible. */
1348 && SCALAR_INT_MODE_P (GET_MODE (XEXP (cnd
, 0)))
1349 && HARD_REGNO_NREGS (REGNO (XEXP (cnd
, 0)), GET_MODE (XEXP (cnd
, 0))) == 1
1350 && GET_CODE (XEXP (cnd
, 1)) == CONST_INT
)
1353 gen_rtx_SET (VOIDmode
, XEXP (cnd
, 0), XEXP (cnd
, 1));
1354 move2add_note_store (SET_DEST (implicit_set
), implicit_set
, 0);
1358 /* If this is a CALL_INSN, all call used registers are stored with
1360 if (GET_CODE (insn
) == CALL_INSN
)
1362 for (i
= FIRST_PSEUDO_REGISTER
- 1; i
>= 0; i
--)
1364 if (call_used_regs
[i
])
1365 /* Reset the information about this register. */
1366 reg_set_luid
[i
] = 0;
1372 /* SET is a SET or CLOBBER that sets DST.
1373 Update reg_set_luid, reg_offset and reg_base_reg accordingly.
1374 Called from reload_cse_move2add via note_stores. */
1377 move2add_note_store (dst
, set
, data
)
1379 void *data ATTRIBUTE_UNUSED
;
1381 unsigned int regno
= 0;
1383 enum machine_mode mode
= GET_MODE (dst
);
1385 if (GET_CODE (dst
) == SUBREG
)
1387 regno
= subreg_regno_offset (REGNO (SUBREG_REG (dst
)),
1388 GET_MODE (SUBREG_REG (dst
)),
1391 dst
= SUBREG_REG (dst
);
1394 /* Some targets do argument pushes without adding REG_INC notes. */
1396 if (GET_CODE (dst
) == MEM
)
1398 dst
= XEXP (dst
, 0);
1399 if (GET_CODE (dst
) == PRE_INC
|| GET_CODE (dst
) == POST_INC
1400 || GET_CODE (dst
) == PRE_DEC
|| GET_CODE (dst
) == POST_DEC
)
1401 reg_set_luid
[REGNO (XEXP (dst
, 0))] = 0;
1404 if (GET_CODE (dst
) != REG
)
1407 regno
+= REGNO (dst
);
1409 if (SCALAR_INT_MODE_P (mode
)
1410 && HARD_REGNO_NREGS (regno
, mode
) == 1 && GET_CODE (set
) == SET
1411 && GET_CODE (SET_DEST (set
)) != ZERO_EXTRACT
1412 && GET_CODE (SET_DEST (set
)) != SIGN_EXTRACT
1413 && GET_CODE (SET_DEST (set
)) != STRICT_LOW_PART
)
1415 rtx src
= SET_SRC (set
);
1417 HOST_WIDE_INT offset
;
1419 /* This may be different from mode, if SET_DEST (set) is a
1421 enum machine_mode dst_mode
= GET_MODE (dst
);
1423 switch (GET_CODE (src
))
1426 if (GET_CODE (XEXP (src
, 0)) == REG
)
1428 base_reg
= XEXP (src
, 0);
1430 if (GET_CODE (XEXP (src
, 1)) == CONST_INT
)
1431 offset
= INTVAL (XEXP (src
, 1));
1432 else if (GET_CODE (XEXP (src
, 1)) == REG
1433 && (reg_set_luid
[REGNO (XEXP (src
, 1))]
1434 > move2add_last_label_luid
)
1435 && (MODES_OK_FOR_MOVE2ADD
1436 (dst_mode
, reg_mode
[REGNO (XEXP (src
, 1))])))
1438 if (reg_base_reg
[REGNO (XEXP (src
, 1))] < 0)
1439 offset
= reg_offset
[REGNO (XEXP (src
, 1))];
1440 /* Maybe the first register is known to be a
1442 else if (reg_set_luid
[REGNO (base_reg
)]
1443 > move2add_last_label_luid
1444 && (MODES_OK_FOR_MOVE2ADD
1445 (dst_mode
, reg_mode
[REGNO (XEXP (src
, 1))]))
1446 && reg_base_reg
[REGNO (base_reg
)] < 0)
1448 offset
= reg_offset
[REGNO (base_reg
)];
1449 base_reg
= XEXP (src
, 1);
1468 /* Start tracking the register as a constant. */
1469 reg_base_reg
[regno
] = -1;
1470 reg_offset
[regno
] = INTVAL (SET_SRC (set
));
1471 /* We assign the same luid to all registers set to constants. */
1472 reg_set_luid
[regno
] = move2add_last_label_luid
+ 1;
1473 reg_mode
[regno
] = mode
;
1478 /* Invalidate the contents of the register. */
1479 reg_set_luid
[regno
] = 0;
1483 base_regno
= REGNO (base_reg
);
1484 /* If information about the base register is not valid, set it
1485 up as a new base register, pretending its value is known
1486 starting from the current insn. */
1487 if (reg_set_luid
[base_regno
] <= move2add_last_label_luid
)
1489 reg_base_reg
[base_regno
] = base_regno
;
1490 reg_offset
[base_regno
] = 0;
1491 reg_set_luid
[base_regno
] = move2add_luid
;
1492 reg_mode
[base_regno
] = mode
;
1494 else if (! MODES_OK_FOR_MOVE2ADD (dst_mode
,
1495 reg_mode
[base_regno
]))
1498 reg_mode
[regno
] = mode
;
1500 /* Copy base information from our base register. */
1501 reg_set_luid
[regno
] = reg_set_luid
[base_regno
];
1502 reg_base_reg
[regno
] = reg_base_reg
[base_regno
];
1504 /* Compute the sum of the offsets or constants. */
1505 reg_offset
[regno
] = trunc_int_for_mode (offset
1506 + reg_offset
[base_regno
],
1511 unsigned int endregno
= regno
+ HARD_REGNO_NREGS (regno
, mode
);
1513 for (i
= regno
; i
< endregno
; i
++)
1514 /* Reset the information about this register. */
1515 reg_set_luid
[i
] = 0;