* class.c (build_base_path): Tidy a bit.
[official-gcc.git] / gcc / integrate.c
blobde897176ea0ccafa5fe9a040017cd67264091de5
1 /* Procedure integration for GCC.
2 Copyright (C) 1988, 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4 Contributed by Michael Tiemann (tiemann@cygnus.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "tm_p.h"
31 #include "regs.h"
32 #include "flags.h"
33 #include "debug.h"
34 #include "insn-config.h"
35 #include "expr.h"
36 #include "output.h"
37 #include "recog.h"
38 #include "integrate.h"
39 #include "real.h"
40 #include "except.h"
41 #include "function.h"
42 #include "toplev.h"
43 #include "intl.h"
44 #include "params.h"
45 #include "ggc.h"
46 #include "target.h"
47 #include "langhooks.h"
49 /* Round to the next highest integer that meets the alignment. */
50 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
53 /* Private type used by {get/has}_func_hard_reg_initial_val. */
54 typedef struct initial_value_pair GTY(()) {
55 rtx hard_reg;
56 rtx pseudo;
57 } initial_value_pair;
58 typedef struct initial_value_struct GTY(()) {
59 int num_entries;
60 int max_entries;
61 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
62 } initial_value_struct;
64 static void subst_constants (rtx *, rtx, struct inline_remap *, int);
65 static void set_block_origin_self (tree);
66 static void set_block_abstract_flags (tree, int);
67 static void mark_stores (rtx, rtx, void *);
69 /* Returns the Ith entry in the label_map contained in MAP. If the
70 Ith entry has not yet been set, return a fresh label. This function
71 performs a lazy initialization of label_map, thereby avoiding huge memory
72 explosions when the label_map gets very large. */
74 rtx
75 get_label_from_map (struct inline_remap *map, int i)
77 rtx x = map->label_map[i];
79 if (x == NULL_RTX)
80 x = map->label_map[i] = gen_label_rtx ();
82 return x;
85 /* Return false if the function FNDECL cannot be inlined on account of its
86 attributes, true otherwise. */
87 bool
88 function_attribute_inlinable_p (tree fndecl)
90 if (targetm.attribute_table)
92 tree a;
94 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
96 tree name = TREE_PURPOSE (a);
97 int i;
99 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
100 if (is_attribute_p (targetm.attribute_table[i].name, name))
101 return targetm.function_attribute_inlinable_p (fndecl);
105 return true;
108 /* Copy NODE (which must be a DECL, but not a PARM_DECL). The DECL
109 originally was in the FROM_FN, but now it will be in the
110 TO_FN. */
112 tree
113 copy_decl_for_inlining (tree decl, tree from_fn, tree to_fn)
115 tree copy;
117 /* Copy the declaration. */
118 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
120 tree type;
121 int invisiref = 0;
123 /* See if the frontend wants to pass this by invisible reference. */
124 if (TREE_CODE (decl) == PARM_DECL
125 && DECL_ARG_TYPE (decl) != TREE_TYPE (decl)
126 && POINTER_TYPE_P (DECL_ARG_TYPE (decl))
127 && TREE_TYPE (DECL_ARG_TYPE (decl)) == TREE_TYPE (decl))
129 invisiref = 1;
130 type = DECL_ARG_TYPE (decl);
132 else
133 type = TREE_TYPE (decl);
135 /* For a parameter, we must make an equivalent VAR_DECL, not a
136 new PARM_DECL. */
137 copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
138 if (!invisiref)
140 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
141 TREE_READONLY (copy) = TREE_READONLY (decl);
142 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
144 else
146 TREE_ADDRESSABLE (copy) = 0;
147 TREE_READONLY (copy) = 1;
148 TREE_THIS_VOLATILE (copy) = 0;
151 else
153 copy = copy_node (decl);
154 /* The COPY is not abstract; it will be generated in TO_FN. */
155 DECL_ABSTRACT (copy) = 0;
156 lang_hooks.dup_lang_specific_decl (copy);
158 /* TREE_ADDRESSABLE isn't used to indicate that a label's
159 address has been taken; it's for internal bookkeeping in
160 expand_goto_internal. */
161 if (TREE_CODE (copy) == LABEL_DECL)
163 TREE_ADDRESSABLE (copy) = 0;
164 DECL_TOO_LATE (copy) = 0;
168 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
169 declaration inspired this copy. */
170 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
172 /* The new variable/label has no RTL, yet. */
173 if (!TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
174 SET_DECL_RTL (copy, NULL_RTX);
176 /* These args would always appear unused, if not for this. */
177 TREE_USED (copy) = 1;
179 /* Set the context for the new declaration. */
180 if (!DECL_CONTEXT (decl))
181 /* Globals stay global. */
183 else if (DECL_CONTEXT (decl) != from_fn)
184 /* Things that weren't in the scope of the function we're inlining
185 from aren't in the scope we're inlining to, either. */
187 else if (TREE_STATIC (decl))
188 /* Function-scoped static variables should stay in the original
189 function. */
191 else
192 /* Ordinary automatic local variables are now in the scope of the
193 new function. */
194 DECL_CONTEXT (copy) = to_fn;
196 return copy;
199 /* Unfortunately, we need a global copy of const_equiv map for communication
200 with a function called from note_stores. Be *very* careful that this
201 is used properly in the presence of recursion. */
203 varray_type global_const_equiv_varray;
205 /* Create a new copy of an rtx. Recursively copies the operands of the rtx,
206 except for those few rtx codes that are sharable.
208 We always return an rtx that is similar to that incoming rtx, with the
209 exception of possibly changing a REG to a SUBREG or vice versa. No
210 rtl is ever emitted.
212 If FOR_LHS is nonzero, if means we are processing something that will
213 be the LHS of a SET. In that case, we copy RTX_UNCHANGING_P even if
214 inlining since we need to be conservative in how it is set for
215 such cases.
217 Handle constants that need to be placed in the constant pool by
218 calling `force_const_mem'. */
221 copy_rtx_and_substitute (rtx orig, struct inline_remap *map, int for_lhs)
223 rtx copy, temp;
224 int i, j;
225 RTX_CODE code;
226 enum machine_mode mode;
227 const char *format_ptr;
228 int regno;
230 if (orig == 0)
231 return 0;
233 code = GET_CODE (orig);
234 mode = GET_MODE (orig);
236 switch (code)
238 case REG:
239 /* If the stack pointer register shows up, it must be part of
240 stack-adjustments (*not* because we eliminated the frame pointer!).
241 Small hard registers are returned as-is. Pseudo-registers
242 go through their `reg_map'. */
243 regno = REGNO (orig);
244 if (regno <= LAST_VIRTUAL_REGISTER)
246 /* Some hard registers are also mapped,
247 but others are not translated. */
248 if (map->reg_map[regno] != 0)
249 return map->reg_map[regno];
251 /* If this is the virtual frame pointer, make space in current
252 function's stack frame for the stack frame of the inline function.
254 Copy the address of this area into a pseudo. Map
255 virtual_stack_vars_rtx to this pseudo and set up a constant
256 equivalence for it to be the address. This will substitute the
257 address into insns where it can be substituted and use the new
258 pseudo where it can't. */
259 else if (regno == VIRTUAL_STACK_VARS_REGNUM)
261 rtx loc, seq;
262 int size
263 = get_func_frame_size (DECL_STRUCT_FUNCTION (map->fndecl));
264 #ifdef FRAME_GROWS_DOWNWARD
265 int alignment
266 = (DECL_STRUCT_FUNCTION (map->fndecl)->stack_alignment_needed
267 / BITS_PER_UNIT);
269 /* In this case, virtual_stack_vars_rtx points to one byte
270 higher than the top of the frame area. So make sure we
271 allocate a big enough chunk to keep the frame pointer
272 aligned like a real one. */
273 if (alignment)
274 size = CEIL_ROUND (size, alignment);
275 #endif
276 start_sequence ();
277 loc = assign_stack_temp (BLKmode, size, 1);
278 loc = XEXP (loc, 0);
279 #ifdef FRAME_GROWS_DOWNWARD
280 /* In this case, virtual_stack_vars_rtx points to one byte
281 higher than the top of the frame area. So compute the offset
282 to one byte higher than our substitute frame. */
283 loc = plus_constant (loc, size);
284 #endif
285 map->reg_map[regno] = temp
286 = force_reg (Pmode, force_operand (loc, NULL_RTX));
288 #ifdef STACK_BOUNDARY
289 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
290 #endif
292 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
294 seq = get_insns ();
295 end_sequence ();
296 emit_insn_after (seq, map->insns_at_start);
297 return temp;
299 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM)
301 /* Do the same for a block to contain any arguments referenced
302 in memory. */
303 rtx loc, seq;
304 int size = DECL_STRUCT_FUNCTION (map->fndecl)->args_size;
306 start_sequence ();
307 loc = assign_stack_temp (BLKmode, size, 1);
308 loc = XEXP (loc, 0);
309 /* When arguments grow downward, the virtual incoming
310 args pointer points to the top of the argument block,
311 so the remapped location better do the same. */
312 #ifdef ARGS_GROW_DOWNWARD
313 loc = plus_constant (loc, size);
314 #endif
315 map->reg_map[regno] = temp
316 = force_reg (Pmode, force_operand (loc, NULL_RTX));
318 #ifdef STACK_BOUNDARY
319 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
320 #endif
322 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
324 seq = get_insns ();
325 end_sequence ();
326 emit_insn_after (seq, map->insns_at_start);
327 return temp;
329 else if (REG_FUNCTION_VALUE_P (orig))
331 if (rtx_equal_function_value_matters)
332 /* This is an ignored return value. We must not
333 leave it in with REG_FUNCTION_VALUE_P set, since
334 that would confuse subsequent inlining of the
335 current function into a later function. */
336 return gen_rtx_REG (GET_MODE (orig), regno);
337 else
338 /* Must be unrolling loops or replicating code if we
339 reach here, so return the register unchanged. */
340 return orig;
342 else
343 return orig;
345 abort ();
347 if (map->reg_map[regno] == NULL)
349 map->reg_map[regno] = gen_reg_rtx (mode);
350 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
351 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
352 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
353 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
355 if (REG_POINTER (map->x_regno_reg_rtx[regno]))
356 mark_reg_pointer (map->reg_map[regno],
357 map->regno_pointer_align[regno]);
359 return map->reg_map[regno];
361 case SUBREG:
362 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map, for_lhs);
363 return simplify_gen_subreg (GET_MODE (orig), copy,
364 GET_MODE (SUBREG_REG (orig)),
365 SUBREG_BYTE (orig));
367 case ADDRESSOF:
368 copy = gen_rtx_ADDRESSOF (mode,
369 copy_rtx_and_substitute (XEXP (orig, 0),
370 map, for_lhs),
371 0, ADDRESSOF_DECL (orig));
372 regno = ADDRESSOF_REGNO (orig);
373 if (map->reg_map[regno])
374 regno = REGNO (map->reg_map[regno]);
375 else if (regno > LAST_VIRTUAL_REGISTER)
377 temp = XEXP (orig, 0);
378 map->reg_map[regno] = gen_reg_rtx (GET_MODE (temp));
379 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (temp);
380 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (temp);
381 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (temp);
382 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
384 /* Objects may initially be represented as registers, but
385 but turned into a MEM if their address is taken by
386 put_var_into_stack. Therefore, the register table may have
387 entries which are MEMs.
389 We briefly tried to clear such entries, but that ended up
390 cascading into many changes due to the optimizers not being
391 prepared for empty entries in the register table. So we've
392 decided to allow the MEMs in the register table for now. */
393 if (REG_P (map->x_regno_reg_rtx[regno])
394 && REG_POINTER (map->x_regno_reg_rtx[regno]))
395 mark_reg_pointer (map->reg_map[regno],
396 map->regno_pointer_align[regno]);
397 regno = REGNO (map->reg_map[regno]);
399 ADDRESSOF_REGNO (copy) = regno;
400 return copy;
402 case USE:
403 case CLOBBER:
404 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
405 to (use foo) if the original insn didn't have a subreg.
406 Removing the subreg distorts the VAX movstrhi pattern
407 by changing the mode of an operand. */
408 copy = copy_rtx_and_substitute (XEXP (orig, 0), map, code == CLOBBER);
409 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
410 copy = SUBREG_REG (copy);
411 return gen_rtx_fmt_e (code, VOIDmode, copy);
413 /* We need to handle "deleted" labels that appear in the DECL_RTL
414 of a LABEL_DECL. */
415 case NOTE:
416 if (NOTE_LINE_NUMBER (orig) != NOTE_INSN_DELETED_LABEL)
417 break;
419 /* Fall through. */
420 case CODE_LABEL:
421 LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
422 = LABEL_PRESERVE_P (orig);
423 return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
425 case LABEL_REF:
426 copy
427 = gen_rtx_LABEL_REF
428 (mode,
429 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
430 : get_label_from_map (map, CODE_LABEL_NUMBER (XEXP (orig, 0))));
432 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
434 /* The fact that this label was previously nonlocal does not mean
435 it still is, so we must check if it is within the range of
436 this function's labels. */
437 LABEL_REF_NONLOCAL_P (copy)
438 = (LABEL_REF_NONLOCAL_P (orig)
439 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
440 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
442 /* If we have made a nonlocal label local, it means that this
443 inlined call will be referring to our nonlocal goto handler.
444 So make sure we create one for this block; we normally would
445 not since this is not otherwise considered a "call". */
446 if (LABEL_REF_NONLOCAL_P (orig) && ! LABEL_REF_NONLOCAL_P (copy))
447 function_call_count++;
449 return copy;
451 case PC:
452 case CC0:
453 case CONST_INT:
454 case CONST_VECTOR:
455 return orig;
457 case SYMBOL_REF:
458 /* Symbols which represent the address of a label stored in the constant
459 pool must be modified to point to a constant pool entry for the
460 remapped label. Otherwise, symbols are returned unchanged. */
461 if (CONSTANT_POOL_ADDRESS_P (orig))
463 struct function *f = cfun;
464 rtx constant = get_pool_constant_for_function (f, orig);
465 if (GET_CODE (constant) == LABEL_REF)
466 return XEXP (force_const_mem
467 (GET_MODE (orig),
468 copy_rtx_and_substitute (constant, map, for_lhs)),
471 return orig;
473 case CONST_DOUBLE:
474 /* We have to make a new copy of this CONST_DOUBLE because don't want
475 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
476 duplicate of a CONST_DOUBLE we have already seen. */
477 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
479 REAL_VALUE_TYPE d;
481 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
482 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
484 else
485 return immed_double_const (CONST_DOUBLE_LOW (orig),
486 CONST_DOUBLE_HIGH (orig), VOIDmode);
488 case CONST:
489 break;
491 case ASM_OPERANDS:
492 /* If a single asm insn contains multiple output operands then
493 it contains multiple ASM_OPERANDS rtx's that share the input
494 and constraint vecs. We must make sure that the copied insn
495 continues to share it. */
496 if (map->orig_asm_operands_vector == ASM_OPERANDS_INPUT_VEC (orig))
498 copy = rtx_alloc (ASM_OPERANDS);
499 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
500 PUT_MODE (copy, GET_MODE (orig));
501 ASM_OPERANDS_TEMPLATE (copy) = ASM_OPERANDS_TEMPLATE (orig);
502 ASM_OPERANDS_OUTPUT_CONSTRAINT (copy)
503 = ASM_OPERANDS_OUTPUT_CONSTRAINT (orig);
504 ASM_OPERANDS_OUTPUT_IDX (copy) = ASM_OPERANDS_OUTPUT_IDX (orig);
505 ASM_OPERANDS_INPUT_VEC (copy) = map->copy_asm_operands_vector;
506 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy)
507 = map->copy_asm_constraints_vector;
508 ASM_OPERANDS_SOURCE_FILE (copy) = ASM_OPERANDS_SOURCE_FILE (orig);
509 ASM_OPERANDS_SOURCE_LINE (copy) = ASM_OPERANDS_SOURCE_LINE (orig);
510 return copy;
512 break;
514 case CALL:
515 /* This is given special treatment because the first
516 operand of a CALL is a (MEM ...) which may get
517 forced into a register for cse. This is undesirable
518 if function-address cse isn't wanted or if we won't do cse. */
519 #ifndef NO_FUNCTION_CSE
520 if (! (optimize && ! flag_no_function_cse))
521 #endif
523 rtx copy
524 = gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
525 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
526 map, 0));
528 MEM_COPY_ATTRIBUTES (copy, XEXP (orig, 0));
530 return
531 gen_rtx_CALL (GET_MODE (orig), copy,
532 copy_rtx_and_substitute (XEXP (orig, 1), map, 0));
534 break;
536 #if 0
537 /* Must be ifdefed out for loop unrolling to work. */
538 /* ??? Is this for the old or the new unroller? */
539 case RETURN:
540 abort ();
541 #endif
543 case SET:
544 /* If this is setting fp or ap, it means that we have a nonlocal goto.
545 Adjust the setting by the offset of the area we made.
546 If the nonlocal goto is into the current function,
547 this will result in unnecessarily bad code, but should work. */
548 if (SET_DEST (orig) == virtual_stack_vars_rtx
549 || SET_DEST (orig) == virtual_incoming_args_rtx)
551 /* In case a translation hasn't occurred already, make one now. */
552 rtx equiv_reg;
553 rtx equiv_loc;
554 HOST_WIDE_INT loc_offset;
556 copy_rtx_and_substitute (SET_DEST (orig), map, for_lhs);
557 equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
558 equiv_loc = VARRAY_CONST_EQUIV (map->const_equiv_varray,
559 REGNO (equiv_reg)).rtx;
560 loc_offset
561 = GET_CODE (equiv_loc) == REG ? 0 : INTVAL (XEXP (equiv_loc, 1));
563 return gen_rtx_SET (VOIDmode, SET_DEST (orig),
564 force_operand
565 (plus_constant
566 (copy_rtx_and_substitute (SET_SRC (orig),
567 map, 0),
568 - loc_offset),
569 NULL_RTX));
571 else
572 return gen_rtx_SET (VOIDmode,
573 copy_rtx_and_substitute (SET_DEST (orig), map, 1),
574 copy_rtx_and_substitute (SET_SRC (orig), map, 0));
575 break;
577 case MEM:
578 copy = gen_rtx_MEM (mode, copy_rtx_and_substitute (XEXP (orig, 0),
579 map, 0));
580 MEM_COPY_ATTRIBUTES (copy, orig);
581 return copy;
583 default:
584 break;
587 copy = rtx_alloc (code);
588 PUT_MODE (copy, mode);
589 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
590 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
591 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
593 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
595 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
597 switch (*format_ptr++)
599 case '0':
600 X0ANY (copy, i) = X0ANY (orig, i);
601 break;
603 case 'e':
604 XEXP (copy, i)
605 = copy_rtx_and_substitute (XEXP (orig, i), map, for_lhs);
606 break;
608 case 'u':
609 /* Change any references to old-insns to point to the
610 corresponding copied insns. */
611 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
612 break;
614 case 'E':
615 XVEC (copy, i) = XVEC (orig, i);
616 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
618 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
619 for (j = 0; j < XVECLEN (copy, i); j++)
620 XVECEXP (copy, i, j)
621 = copy_rtx_and_substitute (XVECEXP (orig, i, j),
622 map, for_lhs);
624 break;
626 case 'w':
627 XWINT (copy, i) = XWINT (orig, i);
628 break;
630 case 'i':
631 XINT (copy, i) = XINT (orig, i);
632 break;
634 case 's':
635 XSTR (copy, i) = XSTR (orig, i);
636 break;
638 case 't':
639 XTREE (copy, i) = XTREE (orig, i);
640 break;
642 default:
643 abort ();
647 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
649 map->orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
650 map->copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
651 map->copy_asm_constraints_vector
652 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
655 return copy;
658 /* Substitute known constant values into INSN, if that is valid. */
660 void
661 try_constants (rtx insn, struct inline_remap *map)
663 int i;
665 map->num_sets = 0;
667 /* First try just updating addresses, then other things. This is
668 important when we have something like the store of a constant
669 into memory and we can update the memory address but the machine
670 does not support a constant source. */
671 subst_constants (&PATTERN (insn), insn, map, 1);
672 apply_change_group ();
673 subst_constants (&PATTERN (insn), insn, map, 0);
674 apply_change_group ();
676 /* Enforce consistency between the addresses in the regular insn flow
677 and the ones in CALL_INSN_FUNCTION_USAGE lists, if any. */
678 if (GET_CODE (insn) == CALL_INSN && CALL_INSN_FUNCTION_USAGE (insn))
680 subst_constants (&CALL_INSN_FUNCTION_USAGE (insn), insn, map, 1);
681 apply_change_group ();
684 /* Show we don't know the value of anything stored or clobbered. */
685 note_stores (PATTERN (insn), mark_stores, NULL);
686 map->last_pc_value = 0;
687 #ifdef HAVE_cc0
688 map->last_cc0_value = 0;
689 #endif
691 /* Set up any constant equivalences made in this insn. */
692 for (i = 0; i < map->num_sets; i++)
694 if (GET_CODE (map->equiv_sets[i].dest) == REG)
696 int regno = REGNO (map->equiv_sets[i].dest);
698 MAYBE_EXTEND_CONST_EQUIV_VARRAY (map, regno);
699 if (VARRAY_CONST_EQUIV (map->const_equiv_varray, regno).rtx == 0
700 /* Following clause is a hack to make case work where GNU C++
701 reassigns a variable to make cse work right. */
702 || ! rtx_equal_p (VARRAY_CONST_EQUIV (map->const_equiv_varray,
703 regno).rtx,
704 map->equiv_sets[i].equiv))
705 SET_CONST_EQUIV_DATA (map, map->equiv_sets[i].dest,
706 map->equiv_sets[i].equiv, map->const_age);
708 else if (map->equiv_sets[i].dest == pc_rtx)
709 map->last_pc_value = map->equiv_sets[i].equiv;
710 #ifdef HAVE_cc0
711 else if (map->equiv_sets[i].dest == cc0_rtx)
712 map->last_cc0_value = map->equiv_sets[i].equiv;
713 #endif
717 /* Substitute known constants for pseudo regs in the contents of LOC,
718 which are part of INSN.
719 If INSN is zero, the substitution should always be done (this is used to
720 update DECL_RTL).
721 These changes are taken out by try_constants if the result is not valid.
723 Note that we are more concerned with determining when the result of a SET
724 is a constant, for further propagation, than actually inserting constants
725 into insns; cse will do the latter task better.
727 This function is also used to adjust address of items previously addressed
728 via the virtual stack variable or virtual incoming arguments registers.
730 If MEMONLY is nonzero, only make changes inside a MEM. */
732 static void
733 subst_constants (rtx *loc, rtx insn, struct inline_remap *map, int memonly)
735 rtx x = *loc;
736 int i, j;
737 enum rtx_code code;
738 const char *format_ptr;
739 int num_changes = num_validated_changes ();
740 rtx new = 0;
741 enum machine_mode op0_mode = MAX_MACHINE_MODE;
743 code = GET_CODE (x);
745 switch (code)
747 case PC:
748 case CONST_INT:
749 case CONST_DOUBLE:
750 case CONST_VECTOR:
751 case SYMBOL_REF:
752 case CONST:
753 case LABEL_REF:
754 case ADDRESS:
755 return;
757 #ifdef HAVE_cc0
758 case CC0:
759 if (! memonly)
760 validate_change (insn, loc, map->last_cc0_value, 1);
761 return;
762 #endif
764 case USE:
765 case CLOBBER:
766 /* The only thing we can do with a USE or CLOBBER is possibly do
767 some substitutions in a MEM within it. */
768 if (GET_CODE (XEXP (x, 0)) == MEM)
769 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map, 0);
770 return;
772 case REG:
773 /* Substitute for parms and known constants. Don't replace
774 hard regs used as user variables with constants. */
775 if (! memonly)
777 int regno = REGNO (x);
778 struct const_equiv_data *p;
780 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
781 && (size_t) regno < VARRAY_SIZE (map->const_equiv_varray)
782 && (p = &VARRAY_CONST_EQUIV (map->const_equiv_varray, regno),
783 p->rtx != 0)
784 && p->age >= map->const_age)
785 validate_change (insn, loc, p->rtx, 1);
787 return;
789 case SUBREG:
790 /* SUBREG applied to something other than a reg
791 should be treated as ordinary, since that must
792 be a special hack and we don't know how to treat it specially.
793 Consider for example mulsidi3 in m68k.md.
794 Ordinary SUBREG of a REG needs this special treatment. */
795 if (! memonly && GET_CODE (SUBREG_REG (x)) == REG)
797 rtx inner = SUBREG_REG (x);
798 rtx new = 0;
800 /* We can't call subst_constants on &SUBREG_REG (x) because any
801 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
802 see what is inside, try to form the new SUBREG and see if that is
803 valid. We handle two cases: extracting a full word in an
804 integral mode and extracting the low part. */
805 subst_constants (&inner, NULL_RTX, map, 0);
806 new = simplify_gen_subreg (GET_MODE (x), inner,
807 GET_MODE (SUBREG_REG (x)),
808 SUBREG_BYTE (x));
810 if (new)
811 validate_change (insn, loc, new, 1);
812 else
813 cancel_changes (num_changes);
815 return;
817 break;
819 case MEM:
820 subst_constants (&XEXP (x, 0), insn, map, 0);
822 /* If a memory address got spoiled, change it back. */
823 if (! memonly && insn != 0 && num_validated_changes () != num_changes
824 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
825 cancel_changes (num_changes);
826 return;
828 case SET:
830 /* Substitute constants in our source, and in any arguments to a
831 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
832 itself. */
833 rtx *dest_loc = &SET_DEST (x);
834 rtx dest = *dest_loc;
835 rtx src, tem;
836 enum machine_mode compare_mode = VOIDmode;
838 /* If SET_SRC is a COMPARE which subst_constants would turn into
839 COMPARE of 2 VOIDmode constants, note the mode in which comparison
840 is to be done. */
841 if (GET_CODE (SET_SRC (x)) == COMPARE)
843 src = SET_SRC (x);
844 if (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
845 || CC0_P (dest))
847 compare_mode = GET_MODE (XEXP (src, 0));
848 if (compare_mode == VOIDmode)
849 compare_mode = GET_MODE (XEXP (src, 1));
853 subst_constants (&SET_SRC (x), insn, map, memonly);
854 src = SET_SRC (x);
856 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
857 || GET_CODE (*dest_loc) == SUBREG
858 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
860 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
862 subst_constants (&XEXP (*dest_loc, 1), insn, map, memonly);
863 subst_constants (&XEXP (*dest_loc, 2), insn, map, memonly);
865 dest_loc = &XEXP (*dest_loc, 0);
868 /* Do substitute in the address of a destination in memory. */
869 if (GET_CODE (*dest_loc) == MEM)
870 subst_constants (&XEXP (*dest_loc, 0), insn, map, 0);
872 /* Check for the case of DEST a SUBREG, both it and the underlying
873 register are less than one word, and the SUBREG has the wider mode.
874 In the case, we are really setting the underlying register to the
875 source converted to the mode of DEST. So indicate that. */
876 if (GET_CODE (dest) == SUBREG
877 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
878 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
879 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
880 <= GET_MODE_SIZE (GET_MODE (dest)))
881 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
882 src)))
883 src = tem, dest = SUBREG_REG (dest);
885 /* If storing a recognizable value save it for later recording. */
886 if ((map->num_sets < MAX_RECOG_OPERANDS)
887 && (CONSTANT_P (src)
888 || (GET_CODE (src) == REG
889 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
890 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
891 || (GET_CODE (src) == PLUS
892 && GET_CODE (XEXP (src, 0)) == REG
893 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
894 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
895 && CONSTANT_P (XEXP (src, 1)))
896 || GET_CODE (src) == COMPARE
897 || CC0_P (dest)
898 || (dest == pc_rtx
899 && (src == pc_rtx || GET_CODE (src) == RETURN
900 || GET_CODE (src) == LABEL_REF))))
902 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
903 it will cause us to save the COMPARE with any constants
904 substituted, which is what we want for later. */
905 rtx src_copy = copy_rtx (src);
906 map->equiv_sets[map->num_sets].equiv = src_copy;
907 map->equiv_sets[map->num_sets++].dest = dest;
908 if (compare_mode != VOIDmode
909 && GET_CODE (src) == COMPARE
910 && (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
911 || CC0_P (dest))
912 && GET_MODE (XEXP (src, 0)) == VOIDmode
913 && GET_MODE (XEXP (src, 1)) == VOIDmode)
915 map->compare_src = src_copy;
916 map->compare_mode = compare_mode;
920 return;
922 default:
923 break;
926 format_ptr = GET_RTX_FORMAT (code);
928 /* If the first operand is an expression, save its mode for later. */
929 if (*format_ptr == 'e')
930 op0_mode = GET_MODE (XEXP (x, 0));
932 for (i = 0; i < GET_RTX_LENGTH (code); i++)
934 switch (*format_ptr++)
936 case '0':
937 break;
939 case 'e':
940 if (XEXP (x, i))
941 subst_constants (&XEXP (x, i), insn, map, memonly);
942 break;
944 case 'u':
945 case 'i':
946 case 's':
947 case 'w':
948 case 'n':
949 case 't':
950 case 'B':
951 break;
953 case 'E':
954 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
955 for (j = 0; j < XVECLEN (x, i); j++)
956 subst_constants (&XVECEXP (x, i, j), insn, map, memonly);
958 break;
960 default:
961 abort ();
965 /* If this is a commutative operation, move a constant to the second
966 operand unless the second operand is already a CONST_INT. */
967 if (! memonly
968 && (GET_RTX_CLASS (code) == RTX_COMM_ARITH
969 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
970 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
972 rtx tem = XEXP (x, 0);
973 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
974 validate_change (insn, &XEXP (x, 1), tem, 1);
977 /* Simplify the expression in case we put in some constants. */
978 if (! memonly)
979 switch (GET_RTX_CLASS (code))
981 case RTX_UNARY:
982 if (op0_mode == MAX_MACHINE_MODE)
983 abort ();
984 new = simplify_unary_operation (code, GET_MODE (x),
985 XEXP (x, 0), op0_mode);
986 break;
988 case RTX_COMPARE:
989 case RTX_COMM_COMPARE:
991 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
993 if (op_mode == VOIDmode)
994 op_mode = GET_MODE (XEXP (x, 1));
996 new = simplify_relational_operation (code, GET_MODE (x), op_mode,
997 XEXP (x, 0), XEXP (x, 1));
998 break;
1001 case RTX_BIN_ARITH:
1002 case RTX_COMM_ARITH:
1003 new = simplify_binary_operation (code, GET_MODE (x),
1004 XEXP (x, 0), XEXP (x, 1));
1005 break;
1007 case RTX_BITFIELD_OPS:
1008 case RTX_TERNARY:
1009 if (op0_mode == MAX_MACHINE_MODE)
1010 abort ();
1012 if (code == IF_THEN_ELSE)
1014 rtx op0 = XEXP (x, 0);
1016 if (COMPARISON_P (op0)
1017 && GET_MODE (op0) == VOIDmode
1018 && ! side_effects_p (op0)
1019 && XEXP (op0, 0) == map->compare_src
1020 && GET_MODE (XEXP (op0, 1)) == VOIDmode)
1022 /* We have compare of two VOIDmode constants for which
1023 we recorded the comparison mode. */
1024 rtx tem =
1025 simplify_gen_relational (GET_CODE (op0), GET_MODE (op0),
1026 map->compare_mode, XEXP (op0, 0),
1027 XEXP (op0, 1));
1029 if (GET_CODE (tem) != CONST_INT)
1030 new = simplify_ternary_operation (code, GET_MODE (x),
1031 op0_mode, tem, XEXP (x, 1),
1032 XEXP (x, 2));
1033 else if (tem == const0_rtx)
1034 new = XEXP (x, 2);
1035 else
1036 new = XEXP (x, 1);
1039 if (!new)
1040 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
1041 XEXP (x, 0), XEXP (x, 1),
1042 XEXP (x, 2));
1043 break;
1045 default:
1046 break;
1049 if (new)
1050 validate_change (insn, loc, new, 1);
1053 /* Show that register modified no longer contain known constants. We are
1054 called from note_stores with parts of the new insn. */
1056 static void
1057 mark_stores (rtx dest, rtx x ATTRIBUTE_UNUSED, void *data ATTRIBUTE_UNUSED)
1059 int regno = -1;
1060 enum machine_mode mode = VOIDmode;
1062 /* DEST is always the innermost thing set, except in the case of
1063 SUBREGs of hard registers. */
1065 if (GET_CODE (dest) == REG)
1066 regno = REGNO (dest), mode = GET_MODE (dest);
1067 else if (GET_CODE (dest) == SUBREG && GET_CODE (SUBREG_REG (dest)) == REG)
1069 regno = REGNO (SUBREG_REG (dest));
1070 if (regno < FIRST_PSEUDO_REGISTER)
1071 regno += subreg_regno_offset (REGNO (SUBREG_REG (dest)),
1072 GET_MODE (SUBREG_REG (dest)),
1073 SUBREG_BYTE (dest),
1074 GET_MODE (dest));
1075 mode = GET_MODE (SUBREG_REG (dest));
1078 if (regno >= 0)
1080 unsigned int uregno = regno;
1081 unsigned int last_reg = (uregno >= FIRST_PSEUDO_REGISTER ? uregno
1082 : uregno + hard_regno_nregs[uregno][mode] - 1);
1083 unsigned int i;
1085 /* Ignore virtual stack var or virtual arg register since those
1086 are handled separately. */
1087 if (uregno != VIRTUAL_INCOMING_ARGS_REGNUM
1088 && uregno != VIRTUAL_STACK_VARS_REGNUM)
1089 for (i = uregno; i <= last_reg; i++)
1090 if ((size_t) i < VARRAY_SIZE (global_const_equiv_varray))
1091 VARRAY_CONST_EQUIV (global_const_equiv_varray, i).rtx = 0;
1095 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
1096 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
1097 that it points to the node itself, thus indicating that the node is its
1098 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
1099 the given node is NULL, recursively descend the decl/block tree which
1100 it is the root of, and for each other ..._DECL or BLOCK node contained
1101 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
1102 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
1103 values to point to themselves. */
1105 static void
1106 set_block_origin_self (tree stmt)
1108 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
1110 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
1113 tree local_decl;
1115 for (local_decl = BLOCK_VARS (stmt);
1116 local_decl != NULL_TREE;
1117 local_decl = TREE_CHAIN (local_decl))
1118 set_decl_origin_self (local_decl); /* Potential recursion. */
1122 tree subblock;
1124 for (subblock = BLOCK_SUBBLOCKS (stmt);
1125 subblock != NULL_TREE;
1126 subblock = BLOCK_CHAIN (subblock))
1127 set_block_origin_self (subblock); /* Recurse. */
1132 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
1133 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
1134 node to so that it points to the node itself, thus indicating that the
1135 node represents its own (abstract) origin. Additionally, if the
1136 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
1137 the decl/block tree of which the given node is the root of, and for
1138 each other ..._DECL or BLOCK node contained therein whose
1139 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
1140 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
1141 point to themselves. */
1143 void
1144 set_decl_origin_self (tree decl)
1146 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
1148 DECL_ABSTRACT_ORIGIN (decl) = decl;
1149 if (TREE_CODE (decl) == FUNCTION_DECL)
1151 tree arg;
1153 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
1154 DECL_ABSTRACT_ORIGIN (arg) = arg;
1155 if (DECL_INITIAL (decl) != NULL_TREE
1156 && DECL_INITIAL (decl) != error_mark_node)
1157 set_block_origin_self (DECL_INITIAL (decl));
1162 /* Given a pointer to some BLOCK node, and a boolean value to set the
1163 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
1164 the given block, and for all local decls and all local sub-blocks
1165 (recursively) which are contained therein. */
1167 static void
1168 set_block_abstract_flags (tree stmt, int setting)
1170 tree local_decl;
1171 tree subblock;
1173 BLOCK_ABSTRACT (stmt) = setting;
1175 for (local_decl = BLOCK_VARS (stmt);
1176 local_decl != NULL_TREE;
1177 local_decl = TREE_CHAIN (local_decl))
1178 set_decl_abstract_flags (local_decl, setting);
1180 for (subblock = BLOCK_SUBBLOCKS (stmt);
1181 subblock != NULL_TREE;
1182 subblock = BLOCK_CHAIN (subblock))
1183 set_block_abstract_flags (subblock, setting);
1186 /* Given a pointer to some ..._DECL node, and a boolean value to set the
1187 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
1188 given decl, and (in the case where the decl is a FUNCTION_DECL) also
1189 set the abstract flags for all of the parameters, local vars, local
1190 blocks and sub-blocks (recursively) to the same setting. */
1192 void
1193 set_decl_abstract_flags (tree decl, int setting)
1195 DECL_ABSTRACT (decl) = setting;
1196 if (TREE_CODE (decl) == FUNCTION_DECL)
1198 tree arg;
1200 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
1201 DECL_ABSTRACT (arg) = setting;
1202 if (DECL_INITIAL (decl) != NULL_TREE
1203 && DECL_INITIAL (decl) != error_mark_node)
1204 set_block_abstract_flags (DECL_INITIAL (decl), setting);
1208 /* Functions to keep track of the values hard regs had at the start of
1209 the function. */
1212 get_hard_reg_initial_reg (struct function *fun, rtx reg)
1214 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
1215 int i;
1217 if (ivs == 0)
1218 return NULL_RTX;
1220 for (i = 0; i < ivs->num_entries; i++)
1221 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
1222 return ivs->entries[i].hard_reg;
1224 return NULL_RTX;
1228 has_func_hard_reg_initial_val (struct function *fun, rtx reg)
1230 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
1231 int i;
1233 if (ivs == 0)
1234 return NULL_RTX;
1236 for (i = 0; i < ivs->num_entries; i++)
1237 if (rtx_equal_p (ivs->entries[i].hard_reg, reg))
1238 return ivs->entries[i].pseudo;
1240 return NULL_RTX;
1244 get_func_hard_reg_initial_val (struct function *fun, rtx reg)
1246 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
1247 rtx rv = has_func_hard_reg_initial_val (fun, reg);
1249 if (rv)
1250 return rv;
1252 if (ivs == 0)
1254 fun->hard_reg_initial_vals = ggc_alloc (sizeof (initial_value_struct));
1255 ivs = fun->hard_reg_initial_vals;
1256 ivs->num_entries = 0;
1257 ivs->max_entries = 5;
1258 ivs->entries = ggc_alloc (5 * sizeof (initial_value_pair));
1261 if (ivs->num_entries >= ivs->max_entries)
1263 ivs->max_entries += 5;
1264 ivs->entries = ggc_realloc (ivs->entries,
1265 ivs->max_entries
1266 * sizeof (initial_value_pair));
1269 ivs->entries[ivs->num_entries].hard_reg = reg;
1270 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (GET_MODE (reg));
1272 return ivs->entries[ivs->num_entries++].pseudo;
1276 get_hard_reg_initial_val (enum machine_mode mode, int regno)
1278 return get_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
1282 has_hard_reg_initial_val (enum machine_mode mode, int regno)
1284 return has_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
1287 void
1288 emit_initial_value_sets (void)
1290 struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
1291 int i;
1292 rtx seq;
1294 if (ivs == 0)
1295 return;
1297 start_sequence ();
1298 for (i = 0; i < ivs->num_entries; i++)
1299 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
1300 seq = get_insns ();
1301 end_sequence ();
1303 emit_insn_after (seq, get_insns ());
1306 /* If the backend knows where to allocate pseudos for hard
1307 register initial values, register these allocations now. */
1308 void
1309 allocate_initial_values (rtx *reg_equiv_memory_loc ATTRIBUTE_UNUSED)
1311 #ifdef ALLOCATE_INITIAL_VALUE
1312 struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
1313 int i;
1315 if (ivs == 0)
1316 return;
1318 for (i = 0; i < ivs->num_entries; i++)
1320 int regno = REGNO (ivs->entries[i].pseudo);
1321 rtx x = ALLOCATE_INITIAL_VALUE (ivs->entries[i].hard_reg);
1323 if (x == NULL_RTX || REG_N_SETS (REGNO (ivs->entries[i].pseudo)) > 1)
1324 ; /* Do nothing. */
1325 else if (GET_CODE (x) == MEM)
1326 reg_equiv_memory_loc[regno] = x;
1327 else if (GET_CODE (x) == REG)
1329 reg_renumber[regno] = REGNO (x);
1330 /* Poke the regno right into regno_reg_rtx
1331 so that even fixed regs are accepted. */
1332 REGNO (ivs->entries[i].pseudo) = REGNO (x);
1334 else abort ();
1336 #endif
1339 #include "gt-integrate.h"