* m68k.c (output_function_prologue): Fix computation of save mask
[official-gcc.git] / gcc / integrate.c
blob33a96c40f918132c32094c128909cdf1c14d0e3b
1 /* Procedure integration for GNU CC.
2 Copyright (C) 1988, 91, 93-98, 1999 Free Software Foundation, Inc.
3 Contributed by Michael Tiemann (tiemann@cygnus.com)
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "regs.h"
29 #include "flags.h"
30 #include "insn-config.h"
31 #include "insn-flags.h"
32 #include "expr.h"
33 #include "output.h"
34 #include "recog.h"
35 #include "integrate.h"
36 #include "real.h"
37 #include "except.h"
38 #include "function.h"
39 #include "toplev.h"
40 #include "intl.h"
42 #include "obstack.h"
43 #define obstack_chunk_alloc xmalloc
44 #define obstack_chunk_free free
46 extern struct obstack *function_maybepermanent_obstack;
48 /* Similar, but round to the next highest integer that meets the
49 alignment. */
50 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
52 /* Default max number of insns a function can have and still be inline.
53 This is overridden on RISC machines. */
54 #ifndef INTEGRATE_THRESHOLD
55 /* Inlining small functions might save more space then not inlining at
56 all. Assume 1 instruction for the call and 1.5 insns per argument. */
57 #define INTEGRATE_THRESHOLD(DECL) \
58 (optimize_size \
59 ? (1 + (3 * list_length (DECL_ARGUMENTS (DECL))) / 2) \
60 : (8 * (8 + list_length (DECL_ARGUMENTS (DECL)))))
61 #endif
63 static rtx initialize_for_inline PROTO((tree, int, int, int, int));
64 static void finish_inline PROTO((tree, rtx));
65 static void adjust_copied_decl_tree PROTO((tree));
66 static tree copy_decl_list PROTO((tree));
67 static tree copy_decl_tree PROTO((tree));
68 static void copy_decl_rtls PROTO((tree));
69 static void save_constants PROTO((rtx *));
70 static void note_modified_parmregs PROTO((rtx, rtx));
71 static rtx copy_for_inline PROTO((rtx));
72 static void integrate_parm_decls PROTO((tree, struct inline_remap *,
73 rtvec));
74 static void integrate_decl_tree PROTO((tree, int,
75 struct inline_remap *));
76 static void save_constants_in_decl_trees PROTO ((tree));
77 static void subst_constants PROTO((rtx *, rtx,
78 struct inline_remap *));
79 static void restore_constants PROTO((rtx *));
80 static void set_block_origin_self PROTO((tree));
81 static void set_decl_origin_self PROTO((tree));
82 static void set_block_abstract_flags PROTO((tree, int));
83 static void process_reg_param PROTO((struct inline_remap *, rtx,
84 rtx));
87 void set_decl_abstract_flags PROTO((tree, int));
88 static tree copy_and_set_decl_abstract_origin PROTO((tree));
90 /* The maximum number of instructions accepted for inlining a
91 function. Increasing values mean more agressive inlining.
92 This affects currently only functions explicitly marked as
93 inline (or methods defined within the class definition for C++).
94 The default value of 10000 is arbitrary but high to match the
95 previously unlimited gcc capabilities. */
97 int inline_max_insns = 10000;
100 /* Returns the Ith entry in the label_map contained in MAP. If the
101 Ith entry has not yet been set, return a fresh label. This function
102 performs a lazy initialization of label_map, thereby avoiding huge memory
103 explosions when the label_map gets very large. */
106 get_label_from_map (map, i)
107 struct inline_remap *map;
108 int i;
110 rtx x = map->label_map[i];
112 if (x == NULL_RTX)
113 x = map->label_map[i] = gen_label_rtx();
115 return x;
118 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
119 is safe and reasonable to integrate into other functions.
120 Nonzero means value is a warning msgid with a single %s
121 for the function's name. */
123 const char *
124 function_cannot_inline_p (fndecl)
125 register tree fndecl;
127 register rtx insn;
128 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
130 /* For functions marked as inline increase the maximum size to
131 inline_max_insns (-finline-limit-<n>). For regular functions
132 use the limit given by INTEGRATE_THRESHOLD. */
134 int max_insns = (DECL_INLINE (fndecl))
135 ? (inline_max_insns
136 + 8 * list_length (DECL_ARGUMENTS (fndecl)))
137 : INTEGRATE_THRESHOLD (fndecl);
139 register int ninsns = 0;
140 register tree parms;
141 rtx result;
143 /* No inlines with varargs. */
144 if ((last && TREE_VALUE (last) != void_type_node)
145 || current_function_varargs)
146 return N_("varargs function cannot be inline");
148 if (current_function_calls_alloca)
149 return N_("function using alloca cannot be inline");
151 if (current_function_contains_functions)
152 return N_("function with nested functions cannot be inline");
154 if (current_function_cannot_inline)
155 return current_function_cannot_inline;
157 /* If its not even close, don't even look. */
158 if (get_max_uid () > 3 * max_insns)
159 return N_("function too large to be inline");
161 #if 0
162 /* Don't inline functions which do not specify a function prototype and
163 have BLKmode argument or take the address of a parameter. */
164 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
166 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
167 TREE_ADDRESSABLE (parms) = 1;
168 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
169 return N_("no prototype, and parameter address used; cannot be inline");
171 #endif
173 /* We can't inline functions that return structures
174 the old-fashioned PCC way, copying into a static block. */
175 if (current_function_returns_pcc_struct)
176 return N_("inline functions not supported for this return value type");
178 /* We can't inline functions that return structures of varying size. */
179 if (int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
180 return N_("function with varying-size return value cannot be inline");
182 /* Cannot inline a function with a varying size argument or one that
183 receives a transparent union. */
184 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
186 if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
187 return N_("function with varying-size parameter cannot be inline");
188 else if (TYPE_TRANSPARENT_UNION (TREE_TYPE (parms)))
189 return N_("function with transparent unit parameter cannot be inline");
192 if (get_max_uid () > max_insns)
194 for (ninsns = 0, insn = get_first_nonparm_insn ();
195 insn && ninsns < max_insns;
196 insn = NEXT_INSN (insn))
197 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
198 ninsns++;
200 if (ninsns >= max_insns)
201 return N_("function too large to be inline");
204 /* We will not inline a function which uses computed goto. The addresses of
205 its local labels, which may be tucked into global storage, are of course
206 not constant across instantiations, which causes unexpected behaviour. */
207 if (current_function_has_computed_jump)
208 return N_("function with computed jump cannot inline");
210 /* We cannot inline a nested function that jumps to a nonlocal label. */
211 if (current_function_has_nonlocal_goto)
212 return N_("function with nonlocal goto cannot be inline");
214 /* This is a hack, until the inliner is taught about eh regions at
215 the start of the function. */
216 for (insn = get_insns ();
217 insn
218 && ! (GET_CODE (insn) == NOTE
219 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG);
220 insn = NEXT_INSN (insn))
222 if (insn && GET_CODE (insn) == NOTE
223 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
224 return N_("function with complex parameters cannot be inline");
227 /* We can't inline functions that return a PARALLEL rtx. */
228 result = DECL_RTL (DECL_RESULT (fndecl));
229 if (result && GET_CODE (result) == PARALLEL)
230 return N_("inline functions not supported for this return value type");
232 return 0;
235 /* Variables used within save_for_inline. */
237 /* Mapping from old pseudo-register to new pseudo-registers.
238 The first element of this map is reg_map[FIRST_PSEUDO_REGISTER].
239 It is allocated in `save_for_inline' and `expand_inline_function',
240 and deallocated on exit from each of those routines. */
241 static rtx *reg_map;
243 /* Mapping from old code-labels to new code-labels.
244 The first element of this map is label_map[min_labelno].
245 It is allocated in `save_for_inline' and `expand_inline_function',
246 and deallocated on exit from each of those routines. */
247 static rtx *label_map;
249 /* Mapping from old insn uid's to copied insns.
250 It is allocated in `save_for_inline' and `expand_inline_function',
251 and deallocated on exit from each of those routines. */
252 static rtx *insn_map;
254 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
255 Zero for a reg that isn't a parm's home.
256 Only reg numbers less than max_parm_reg are mapped here. */
257 static tree *parmdecl_map;
259 /* Keep track of first pseudo-register beyond those that are parms. */
260 extern int max_parm_reg;
261 extern rtx *parm_reg_stack_loc;
263 /* When an insn is being copied by copy_for_inline,
264 this is nonzero if we have copied an ASM_OPERANDS.
265 In that case, it is the original input-operand vector. */
266 static rtvec orig_asm_operands_vector;
268 /* When an insn is being copied by copy_for_inline,
269 this is nonzero if we have copied an ASM_OPERANDS.
270 In that case, it is the copied input-operand vector. */
271 static rtvec copy_asm_operands_vector;
273 /* Likewise, this is the copied constraints vector. */
274 static rtvec copy_asm_constraints_vector;
276 /* In save_for_inline, nonzero if past the parm-initialization insns. */
277 static int in_nonparm_insns;
279 /* subroutines passed to duplicate_eh_handlers to map exception labels */
281 static rtx
282 save_for_inline_eh_labelmap (label)
283 rtx label;
285 int index = CODE_LABEL_NUMBER (label);
286 return label_map[index];
289 /* Subroutine for `save_for_inline{copying,nocopy}'. Performs initialization
290 needed to save FNDECL's insns and info for future inline expansion. */
292 static rtx
293 initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, copy)
294 tree fndecl;
295 int min_labelno;
296 int max_labelno;
297 int max_reg;
298 int copy;
300 int function_flags, i;
301 rtvec arg_vector;
302 tree parms;
304 /* Compute the values of any flags we must restore when inlining this. */
306 function_flags
307 = (current_function_calls_alloca * FUNCTION_FLAGS_CALLS_ALLOCA
308 + current_function_calls_setjmp * FUNCTION_FLAGS_CALLS_SETJMP
309 + current_function_calls_longjmp * FUNCTION_FLAGS_CALLS_LONGJMP
310 + current_function_returns_struct * FUNCTION_FLAGS_RETURNS_STRUCT
311 + (current_function_returns_pcc_struct
312 * FUNCTION_FLAGS_RETURNS_PCC_STRUCT)
313 + current_function_needs_context * FUNCTION_FLAGS_NEEDS_CONTEXT
314 + (current_function_has_nonlocal_label
315 * FUNCTION_FLAGS_HAS_NONLOCAL_LABEL)
316 + current_function_returns_pointer * FUNCTION_FLAGS_RETURNS_POINTER
317 + current_function_uses_const_pool * FUNCTION_FLAGS_USES_CONST_POOL
318 + (current_function_uses_pic_offset_table
319 * FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
320 + current_function_has_computed_jump * FUNCTION_FLAGS_HAS_COMPUTED_JUMP);
322 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
323 bzero ((char *) parmdecl_map, max_parm_reg * sizeof (tree));
324 arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
326 for (parms = DECL_ARGUMENTS (fndecl), i = 0;
327 parms;
328 parms = TREE_CHAIN (parms), i++)
330 rtx p = DECL_RTL (parms);
331 int copied_incoming = 0;
333 /* If we have (mem (addressof (mem ...))), use the inner MEM since
334 otherwise the copy_rtx call below will not unshare the MEM since
335 it shares ADDRESSOF. */
336 if (GET_CODE (p) == MEM && GET_CODE (XEXP (p, 0)) == ADDRESSOF
337 && GET_CODE (XEXP (XEXP (p, 0), 0)) == MEM)
338 p = XEXP (XEXP (p, 0), 0);
340 if (GET_CODE (p) == MEM && copy)
342 /* Copy the rtl so that modifications of the addresses
343 later in compilation won't affect this arg_vector.
344 Virtual register instantiation can screw the address
345 of the rtl. */
346 rtx new = copy_rtx (p);
348 /* Don't leave the old copy anywhere in this decl. */
349 if (DECL_RTL (parms) == DECL_INCOMING_RTL (parms)
350 || (GET_CODE (DECL_RTL (parms)) == MEM
351 && GET_CODE (DECL_INCOMING_RTL (parms)) == MEM
352 && (XEXP (DECL_RTL (parms), 0)
353 == XEXP (DECL_INCOMING_RTL (parms), 0))))
354 DECL_INCOMING_RTL (parms) = new, copied_incoming = 1;
356 DECL_RTL (parms) = new;
359 RTVEC_ELT (arg_vector, i) = p;
361 if (GET_CODE (p) == REG)
362 parmdecl_map[REGNO (p)] = parms;
363 else if (GET_CODE (p) == CONCAT)
365 rtx preal = gen_realpart (GET_MODE (XEXP (p, 0)), p);
366 rtx pimag = gen_imagpart (GET_MODE (preal), p);
368 if (GET_CODE (preal) == REG)
369 parmdecl_map[REGNO (preal)] = parms;
370 if (GET_CODE (pimag) == REG)
371 parmdecl_map[REGNO (pimag)] = parms;
374 /* This flag is cleared later
375 if the function ever modifies the value of the parm. */
376 TREE_READONLY (parms) = 1;
378 /* Copy DECL_INCOMING_RTL if not done already. This can
379 happen if DECL_RTL is a reg. */
380 if (copy && ! copied_incoming)
382 p = DECL_INCOMING_RTL (parms);
384 /* If we have (mem (addressof (mem ...))), use the inner MEM since
385 otherwise the copy_rtx call below will not unshare the MEM since
386 it shares ADDRESSOF. */
387 if (GET_CODE (p) == MEM && GET_CODE (XEXP (p, 0)) == ADDRESSOF
388 && GET_CODE (XEXP (XEXP (p, 0), 0)) == MEM)
389 p = XEXP (XEXP (p, 0), 0);
391 if (GET_CODE (p) == MEM)
392 DECL_INCOMING_RTL (parms) = copy_rtx (p);
396 /* Assume we start out in the insns that set up the parameters. */
397 in_nonparm_insns = 0;
399 /* The list of DECL_SAVED_INSNS, starts off with a header which
400 contains the following information:
402 the first insn of the function (not including the insns that copy
403 parameters into registers).
404 the first parameter insn of the function,
405 the first label used by that function,
406 the last label used by that function,
407 the highest register number used for parameters,
408 the total number of registers used,
409 the size of the incoming stack area for parameters,
410 the number of bytes popped on return,
411 the stack slot list,
412 the labels that are forced to exist,
413 some flags that are used to restore compiler globals,
414 the value of current_function_outgoing_args_size,
415 the original argument vector,
416 the original DECL_INITIAL,
417 and pointers to the table of pseudo regs, pointer flags, and alignment. */
419 return gen_inline_header_rtx (NULL_RTX, NULL_RTX, min_labelno, max_labelno,
420 max_parm_reg, max_reg,
421 current_function_args_size,
422 current_function_pops_args,
423 stack_slot_list, forced_labels, function_flags,
424 current_function_outgoing_args_size,
425 arg_vector, (rtx) DECL_INITIAL (fndecl),
426 (rtvec) regno_reg_rtx, regno_pointer_flag,
427 regno_pointer_align,
428 (rtvec) parm_reg_stack_loc);
431 /* Subroutine for `save_for_inline{copying,nocopy}'. Finishes up the
432 things that must be done to make FNDECL expandable as an inline function.
433 HEAD contains the chain of insns to which FNDECL will expand. */
435 static void
436 finish_inline (fndecl, head)
437 tree fndecl;
438 rtx head;
440 FIRST_FUNCTION_INSN (head) = get_first_nonparm_insn ();
441 FIRST_PARM_INSN (head) = get_insns ();
442 DECL_SAVED_INSNS (fndecl) = head;
443 DECL_FRAME_SIZE (fndecl) = get_frame_size ();
446 /* Adjust the BLOCK_END_NOTE pointers in a given copied DECL tree so that
447 they all point to the new (copied) rtxs. */
449 static void
450 adjust_copied_decl_tree (block)
451 register tree block;
453 register tree subblock;
454 register rtx original_end;
456 original_end = BLOCK_END_NOTE (block);
457 if (original_end)
459 BLOCK_END_NOTE (block) = (rtx) NOTE_SOURCE_FILE (original_end);
460 NOTE_SOURCE_FILE (original_end) = 0;
463 /* Process all subblocks. */
464 for (subblock = BLOCK_SUBBLOCKS (block);
465 subblock;
466 subblock = TREE_CHAIN (subblock))
467 adjust_copied_decl_tree (subblock);
470 /* Make the insns and PARM_DECLs of the current function permanent
471 and record other information in DECL_SAVED_INSNS to allow inlining
472 of this function in subsequent calls.
474 This function is called when we are going to immediately compile
475 the insns for FNDECL. The insns in maybepermanent_obstack cannot be
476 modified by the compilation process, so we copy all of them to
477 new storage and consider the new insns to be the insn chain to be
478 compiled. Our caller (rest_of_compilation) saves the original
479 DECL_INITIAL and DECL_ARGUMENTS; here we copy them. */
481 /* ??? The nonlocal_label list should be adjusted also. However, since
482 a function that contains a nested function never gets inlined currently,
483 the nonlocal_label list will always be empty, so we don't worry about
484 it for now. */
486 void
487 save_for_inline_copying (fndecl)
488 tree fndecl;
490 rtx first_insn, last_insn, insn;
491 rtx head, copy;
492 int max_labelno, min_labelno, i, len;
493 int max_reg;
494 int max_uid;
495 rtx first_nonparm_insn;
496 char *new, *new1;
497 rtx *new_parm_reg_stack_loc;
498 rtx *new2;
500 /* Make and emit a return-label if we have not already done so.
501 Do this before recording the bounds on label numbers. */
503 if (return_label == 0)
505 return_label = gen_label_rtx ();
506 emit_label (return_label);
509 /* Get some bounds on the labels and registers used. */
511 max_labelno = max_label_num ();
512 min_labelno = get_first_label_num ();
513 max_reg = max_reg_num ();
515 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
516 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
517 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
518 for the parms, prior to elimination of virtual registers.
519 These values are needed for substituting parms properly. */
521 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
523 head = initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, 1);
525 if (current_function_uses_const_pool)
527 /* Replace any constant pool references with the actual constant. We
528 will put the constants back in the copy made below. */
529 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
530 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
532 save_constants (&PATTERN (insn));
533 if (REG_NOTES (insn))
534 save_constants (&REG_NOTES (insn));
537 /* Also scan all decls, and replace any constant pool references with the
538 actual constant. */
539 save_constants_in_decl_trees (DECL_INITIAL (fndecl));
541 /* Clear out the constant pool so that we can recreate it with the
542 copied constants below. */
543 init_const_rtx_hash_table ();
544 clear_const_double_mem ();
547 max_uid = INSN_UID (head);
549 /* We have now allocated all that needs to be allocated permanently
550 on the rtx obstack. Set our high-water mark, so that we
551 can free the rest of this when the time comes. */
553 preserve_data ();
555 /* Copy the chain insns of this function.
556 Install the copied chain as the insns of this function,
557 for continued compilation;
558 the original chain is recorded as the DECL_SAVED_INSNS
559 for inlining future calls. */
561 /* If there are insns that copy parms from the stack into pseudo registers,
562 those insns are not copied. `expand_inline_function' must
563 emit the correct code to handle such things. */
565 insn = get_insns ();
566 if (GET_CODE (insn) != NOTE)
567 abort ();
568 first_insn = rtx_alloc (NOTE);
569 NOTE_SOURCE_FILE (first_insn) = NOTE_SOURCE_FILE (insn);
570 NOTE_LINE_NUMBER (first_insn) = NOTE_LINE_NUMBER (insn);
571 INSN_UID (first_insn) = INSN_UID (insn);
572 PREV_INSN (first_insn) = NULL;
573 NEXT_INSN (first_insn) = NULL;
574 last_insn = first_insn;
576 /* Each pseudo-reg in the old insn chain must have a unique rtx in the copy.
577 Make these new rtx's now, and install them in regno_reg_rtx, so they
578 will be the official pseudo-reg rtx's for the rest of compilation. */
580 reg_map = (rtx *) savealloc (regno_pointer_flag_length * sizeof (rtx));
582 len = sizeof (struct rtx_def) + (GET_RTX_LENGTH (REG) - 1) * sizeof (rtunion);
583 for (i = max_reg - 1; i > LAST_VIRTUAL_REGISTER; i--)
584 reg_map[i] = (rtx)obstack_copy (function_maybepermanent_obstack,
585 regno_reg_rtx[i], len);
587 regno_reg_rtx = reg_map;
589 /* Put copies of all the virtual register rtx into the new regno_reg_rtx. */
590 init_virtual_regs ();
592 /* Likewise each label rtx must have a unique rtx as its copy. */
594 /* We used to use alloca here, but the size of what it would try to
595 allocate would occasionally cause it to exceed the stack limit and
596 cause unpredictable core dumps. Some examples were > 2Mb in size. */
597 label_map = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
599 for (i = min_labelno; i < max_labelno; i++)
600 label_map[i] = gen_label_rtx ();
602 /* Likewise for parm_reg_stack_slot. */
603 new_parm_reg_stack_loc = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
604 for (i = 0; i < max_parm_reg; i++)
605 new_parm_reg_stack_loc[i] = copy_for_inline (parm_reg_stack_loc[i]);
607 parm_reg_stack_loc = new_parm_reg_stack_loc;
609 /* Record the mapping of old insns to copied insns. */
611 insn_map = (rtx *) alloca (max_uid * sizeof (rtx));
612 bzero ((char *) insn_map, max_uid * sizeof (rtx));
614 /* Get the insn which signals the end of parameter setup code. */
615 first_nonparm_insn = get_first_nonparm_insn ();
617 /* Copy any entries in regno_reg_rtx or DECL_RTLs that reference MEM
618 (the former occurs when a variable has its address taken)
619 since these may be shared and can be changed by virtual
620 register instantiation. DECL_RTL values for our arguments
621 have already been copied by initialize_for_inline. */
622 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_reg; i++)
623 if (GET_CODE (regno_reg_rtx[i]) == MEM)
624 XEXP (regno_reg_rtx[i], 0)
625 = copy_for_inline (XEXP (regno_reg_rtx[i], 0));
627 /* Copy the parm_reg_stack_loc array, and substitute for all of the rtx
628 contained in it. */
629 new2 = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
630 bcopy ((char *) parm_reg_stack_loc, (char *) new2,
631 max_parm_reg * sizeof (rtx));
632 parm_reg_stack_loc = new2;
633 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; ++i)
634 if (parm_reg_stack_loc[i])
635 parm_reg_stack_loc[i] = copy_for_inline (parm_reg_stack_loc[i]);
637 /* Copy the tree of subblocks of the function, and the decls in them.
638 We will use the copy for compiling this function, then restore the original
639 subblocks and decls for use when inlining this function.
641 Several parts of the compiler modify BLOCK trees. In particular,
642 instantiate_virtual_regs will instantiate any virtual regs
643 mentioned in the DECL_RTLs of the decls, and loop
644 unrolling will replicate any BLOCK trees inside an unrolled loop.
646 The modified subblocks or DECL_RTLs would be incorrect for the original rtl
647 which we will use for inlining. The rtl might even contain pseudoregs
648 whose space has been freed. */
650 DECL_INITIAL (fndecl) = copy_decl_tree (DECL_INITIAL (fndecl));
651 DECL_ARGUMENTS (fndecl) = copy_decl_list (DECL_ARGUMENTS (fndecl));
653 /* Now copy each DECL_RTL which is a MEM,
654 so it is safe to modify their addresses. */
655 copy_decl_rtls (DECL_INITIAL (fndecl));
657 /* The fndecl node acts as its own progenitor, so mark it as such. */
658 DECL_ABSTRACT_ORIGIN (fndecl) = fndecl;
660 /* Now copy the chain of insns. Do this twice. The first copy the insn
661 itself and its body. The second time copy of REG_NOTES. This is because
662 a REG_NOTE may have a forward pointer to another insn. */
664 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
666 orig_asm_operands_vector = 0;
668 if (insn == first_nonparm_insn)
669 in_nonparm_insns = 1;
671 switch (GET_CODE (insn))
673 case NOTE:
674 /* No need to keep these. */
675 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED)
676 continue;
678 copy = rtx_alloc (NOTE);
679 NOTE_LINE_NUMBER (copy) = NOTE_LINE_NUMBER (insn);
680 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END)
681 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
682 else
684 NOTE_SOURCE_FILE (insn) = (char *) copy;
685 NOTE_SOURCE_FILE (copy) = 0;
687 if (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG
688 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_END)
690 int new_region = CODE_LABEL_NUMBER
691 (label_map[NOTE_BLOCK_NUMBER (copy)]);
693 /* we have to duplicate the handlers for the original */
694 if (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG)
695 duplicate_eh_handlers (NOTE_BLOCK_NUMBER (copy), new_region,
696 save_for_inline_eh_labelmap);
698 /* We have to forward these both to match the new exception
699 region. */
700 NOTE_BLOCK_NUMBER (copy) = new_region;
703 RTX_INTEGRATED_P (copy) = RTX_INTEGRATED_P (insn);
704 break;
706 case INSN:
707 case JUMP_INSN:
708 case CALL_INSN:
709 copy = rtx_alloc (GET_CODE (insn));
711 if (GET_CODE (insn) == CALL_INSN)
712 CALL_INSN_FUNCTION_USAGE (copy)
713 = copy_for_inline (CALL_INSN_FUNCTION_USAGE (insn));
715 PATTERN (copy) = copy_for_inline (PATTERN (insn));
716 INSN_CODE (copy) = -1;
717 LOG_LINKS (copy) = NULL_RTX;
718 RTX_INTEGRATED_P (copy) = RTX_INTEGRATED_P (insn);
719 break;
721 case CODE_LABEL:
722 copy = label_map[CODE_LABEL_NUMBER (insn)];
723 LABEL_NAME (copy) = LABEL_NAME (insn);
724 break;
726 case BARRIER:
727 copy = rtx_alloc (BARRIER);
728 break;
730 default:
731 abort ();
733 INSN_UID (copy) = INSN_UID (insn);
734 insn_map[INSN_UID (insn)] = copy;
735 NEXT_INSN (last_insn) = copy;
736 PREV_INSN (copy) = last_insn;
737 last_insn = copy;
740 adjust_copied_decl_tree (DECL_INITIAL (fndecl));
742 /* Now copy the REG_NOTES. */
743 for (insn = NEXT_INSN (get_insns ()); insn; insn = NEXT_INSN (insn))
744 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
745 && insn_map[INSN_UID(insn)])
746 REG_NOTES (insn_map[INSN_UID (insn)])
747 = copy_for_inline (REG_NOTES (insn));
749 NEXT_INSN (last_insn) = NULL;
751 finish_inline (fndecl, head);
753 /* Make new versions of the register tables. */
754 new = (char *) savealloc (regno_pointer_flag_length);
755 bcopy (regno_pointer_flag, new, regno_pointer_flag_length);
756 new1 = (char *) savealloc (regno_pointer_flag_length);
757 bcopy (regno_pointer_align, new1, regno_pointer_flag_length);
759 regno_pointer_flag = new;
760 regno_pointer_align = new1;
762 set_new_first_and_last_insn (first_insn, last_insn);
764 if (label_map)
765 free (label_map);
768 /* Copy NODE (as with copy_node). NODE must be a DECL. Set the
769 DECL_ABSTRACT_ORIGIN for the new accordinly. */
771 static tree
772 copy_and_set_decl_abstract_origin (node)
773 tree node;
775 tree copy = copy_node (node);
776 if (DECL_ABSTRACT_ORIGIN (copy) != NULL_TREE)
777 /* That means that NODE already had a DECL_ABSTRACT_ORIGIN. (This
778 situation occurs if we inline a function which itself made
779 calls to inline functions.) Since DECL_ABSTRACT_ORIGIN is the
780 most distant ancestor, we don't have to do anything here. */
782 else
783 /* The most distant ancestor must be NODE. */
784 DECL_ABSTRACT_ORIGIN (copy) = node;
786 return copy;
789 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
790 For example, this can copy a list made of TREE_LIST nodes. While copying,
791 set DECL_ABSTRACT_ORIGIN appropriately. */
793 static tree
794 copy_decl_list (list)
795 tree list;
797 tree head;
798 register tree prev, next;
800 if (list == 0)
801 return 0;
803 head = prev = copy_and_set_decl_abstract_origin (list);
804 next = TREE_CHAIN (list);
805 while (next)
807 register tree copy;
809 copy = copy_and_set_decl_abstract_origin (next);
810 TREE_CHAIN (prev) = copy;
811 prev = copy;
812 next = TREE_CHAIN (next);
814 return head;
817 /* Make a copy of the entire tree of blocks BLOCK, and return it. */
819 static tree
820 copy_decl_tree (block)
821 tree block;
823 tree t, vars, subblocks;
825 vars = copy_decl_list (BLOCK_VARS (block));
826 subblocks = 0;
828 /* Process all subblocks. */
829 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
831 tree copy = copy_decl_tree (t);
832 TREE_CHAIN (copy) = subblocks;
833 subblocks = copy;
836 t = copy_node (block);
837 BLOCK_VARS (t) = vars;
838 BLOCK_SUBBLOCKS (t) = nreverse (subblocks);
839 /* If the BLOCK being cloned is already marked as having been instantiated
840 from something else, then leave that `origin' marking alone. Otherwise,
841 mark the clone as having originated from the BLOCK we are cloning. */
842 if (BLOCK_ABSTRACT_ORIGIN (t) == NULL_TREE)
843 BLOCK_ABSTRACT_ORIGIN (t) = block;
844 return t;
847 /* Copy DECL_RTLs in all decls in the given BLOCK node. */
849 static void
850 copy_decl_rtls (block)
851 tree block;
853 tree t;
855 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
856 if (DECL_RTL (t) && GET_CODE (DECL_RTL (t)) == MEM)
857 DECL_RTL (t) = copy_for_inline (DECL_RTL (t));
859 /* Process all subblocks. */
860 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
861 copy_decl_rtls (t);
864 /* Make the insns and PARM_DECLs of the current function permanent
865 and record other information in DECL_SAVED_INSNS to allow inlining
866 of this function in subsequent calls.
868 This routine need not copy any insns because we are not going
869 to immediately compile the insns in the insn chain. There
870 are two cases when we would compile the insns for FNDECL:
871 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
872 be output at the end of other compilation, because somebody took
873 its address. In the first case, the insns of FNDECL are copied
874 as it is expanded inline, so FNDECL's saved insns are not
875 modified. In the second case, FNDECL is used for the last time,
876 so modifying the rtl is not a problem.
878 We don't have to worry about FNDECL being inline expanded by
879 other functions which are written at the end of compilation
880 because flag_no_inline is turned on when we begin writing
881 functions at the end of compilation. */
883 void
884 save_for_inline_nocopy (fndecl)
885 tree fndecl;
887 rtx insn;
888 rtx head;
889 rtx first_nonparm_insn;
891 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
892 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
893 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
894 for the parms, prior to elimination of virtual registers.
895 These values are needed for substituting parms properly. */
897 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
899 /* Make and emit a return-label if we have not already done so. */
901 if (return_label == 0)
903 return_label = gen_label_rtx ();
904 emit_label (return_label);
907 head = initialize_for_inline (fndecl, get_first_label_num (),
908 max_label_num (), max_reg_num (), 0);
910 /* If there are insns that copy parms from the stack into pseudo registers,
911 those insns are not copied. `expand_inline_function' must
912 emit the correct code to handle such things. */
914 insn = get_insns ();
915 if (GET_CODE (insn) != NOTE)
916 abort ();
918 /* Get the insn which signals the end of parameter setup code. */
919 first_nonparm_insn = get_first_nonparm_insn ();
921 /* Now just scan the chain of insns to see what happens to our
922 PARM_DECLs. If a PARM_DECL is used but never modified, we
923 can substitute its rtl directly when expanding inline (and
924 perform constant folding when its incoming value is constant).
925 Otherwise, we have to copy its value into a new register and track
926 the new register's life. */
928 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
930 if (insn == first_nonparm_insn)
931 in_nonparm_insns = 1;
933 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
935 if (current_function_uses_const_pool)
937 /* Replace any constant pool references with the actual constant.
938 We will put the constant back if we need to write the
939 function out after all. */
940 save_constants (&PATTERN (insn));
941 if (REG_NOTES (insn))
942 save_constants (&REG_NOTES (insn));
945 /* Record what interesting things happen to our parameters. */
946 note_stores (PATTERN (insn), note_modified_parmregs);
950 /* Also scan all decls, and replace any constant pool references with the
951 actual constant. */
952 save_constants_in_decl_trees (DECL_INITIAL (fndecl));
954 /* We have now allocated all that needs to be allocated permanently
955 on the rtx obstack. Set our high-water mark, so that we
956 can free the rest of this when the time comes. */
958 preserve_data ();
960 finish_inline (fndecl, head);
963 /* Given PX, a pointer into an insn, search for references to the constant
964 pool. Replace each with a CONST that has the mode of the original
965 constant, contains the constant, and has RTX_INTEGRATED_P set.
966 Similarly, constant pool addresses not enclosed in a MEM are replaced
967 with an ADDRESS and CONST rtx which also gives the constant, its
968 mode, the mode of the address, and has RTX_INTEGRATED_P set. */
970 static void
971 save_constants (px)
972 rtx *px;
974 rtx x;
975 int i, j;
977 again:
978 x = *px;
980 /* If this is a CONST_DOUBLE, don't try to fix things up in
981 CONST_DOUBLE_MEM, because this is an infinite recursion. */
982 if (GET_CODE (x) == CONST_DOUBLE)
983 return;
984 else if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == SYMBOL_REF
985 && CONSTANT_POOL_ADDRESS_P (XEXP (x,0)))
987 enum machine_mode const_mode = get_pool_mode (XEXP (x, 0));
988 rtx new = gen_rtx_CONST (const_mode, get_pool_constant (XEXP (x, 0)));
989 RTX_INTEGRATED_P (new) = 1;
991 /* If the MEM was in a different mode than the constant (perhaps we
992 were only looking at the low-order part), surround it with a
993 SUBREG so we can save both modes. */
995 if (GET_MODE (x) != const_mode)
997 new = gen_rtx_SUBREG (GET_MODE (x), new, 0);
998 RTX_INTEGRATED_P (new) = 1;
1001 *px = new;
1002 save_constants (&XEXP (*px, 0));
1004 else if (GET_CODE (x) == SYMBOL_REF
1005 && CONSTANT_POOL_ADDRESS_P (x))
1007 *px = gen_rtx_ADDRESS (GET_MODE (x),
1008 gen_rtx_CONST (get_pool_mode (x),
1009 get_pool_constant (x)));
1010 save_constants (&XEXP (*px, 0));
1011 RTX_INTEGRATED_P (*px) = 1;
1014 else
1016 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
1017 int len = GET_RTX_LENGTH (GET_CODE (x));
1019 for (i = len-1; i >= 0; i--)
1021 switch (fmt[i])
1023 case 'E':
1024 for (j = 0; j < XVECLEN (x, i); j++)
1025 save_constants (&XVECEXP (x, i, j));
1026 break;
1028 case 'e':
1029 if (XEXP (x, i) == 0)
1030 continue;
1031 if (i == 0)
1033 /* Hack tail-recursion here. */
1034 px = &XEXP (x, 0);
1035 goto again;
1037 save_constants (&XEXP (x, i));
1038 break;
1044 /* Note whether a parameter is modified or not. */
1046 static void
1047 note_modified_parmregs (reg, x)
1048 rtx reg;
1049 rtx x ATTRIBUTE_UNUSED;
1051 if (GET_CODE (reg) == REG && in_nonparm_insns
1052 && REGNO (reg) < max_parm_reg
1053 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
1054 && parmdecl_map[REGNO (reg)] != 0)
1055 TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
1058 /* Copy the rtx ORIG recursively, replacing pseudo-regs and labels
1059 according to `reg_map' and `label_map'. The original rtl insns
1060 will be saved for inlining; this is used to make a copy
1061 which is used to finish compiling the inline function itself.
1063 If we find a "saved" constant pool entry, one which was replaced with
1064 the value of the constant, convert it back to a constant pool entry.
1065 Since the pool wasn't touched, this should simply restore the old
1066 address.
1068 All other kinds of rtx are copied except those that can never be
1069 changed during compilation. */
1071 static rtx
1072 copy_for_inline (orig)
1073 rtx orig;
1075 register rtx x = orig;
1076 register rtx new;
1077 register int i;
1078 register enum rtx_code code;
1079 register char *format_ptr;
1081 if (x == 0)
1082 return x;
1084 code = GET_CODE (x);
1086 /* These types may be freely shared. */
1088 switch (code)
1090 case QUEUED:
1091 case CONST_INT:
1092 case PC:
1093 case CC0:
1094 return x;
1096 case SYMBOL_REF:
1097 if (! SYMBOL_REF_NEED_ADJUST (x))
1098 return x;
1099 return rethrow_symbol_map (x, save_for_inline_eh_labelmap);
1101 case CONST_DOUBLE:
1102 /* We have to make a new CONST_DOUBLE to ensure that we account for
1103 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
1104 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
1106 REAL_VALUE_TYPE d;
1108 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
1109 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x));
1111 else
1112 return immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
1113 VOIDmode);
1115 case CONST:
1116 /* Get constant pool entry for constant in the pool. */
1117 if (RTX_INTEGRATED_P (x))
1118 return validize_mem (force_const_mem (GET_MODE (x),
1119 copy_for_inline (XEXP (x, 0))));
1120 break;
1122 case SUBREG:
1123 /* Get constant pool entry, but access in different mode. */
1124 if (RTX_INTEGRATED_P (x))
1126 new = force_const_mem (GET_MODE (SUBREG_REG (x)),
1127 copy_for_inline (XEXP (SUBREG_REG (x), 0)));
1129 PUT_MODE (new, GET_MODE (x));
1130 return validize_mem (new);
1132 break;
1134 case ADDRESS:
1135 /* If not special for constant pool error. Else get constant pool
1136 address. */
1137 if (! RTX_INTEGRATED_P (x))
1138 abort ();
1140 new = force_const_mem (GET_MODE (XEXP (x, 0)),
1141 copy_for_inline (XEXP (XEXP (x, 0), 0)));
1142 new = XEXP (new, 0);
1144 #ifdef POINTERS_EXTEND_UNSIGNED
1145 if (GET_MODE (new) != GET_MODE (x))
1146 new = convert_memory_address (GET_MODE (x), new);
1147 #endif
1149 return new;
1151 case ASM_OPERANDS:
1152 /* If a single asm insn contains multiple output operands
1153 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
1154 We must make sure that the copied insn continues to share it. */
1155 if (orig_asm_operands_vector == XVEC (orig, 3))
1157 x = rtx_alloc (ASM_OPERANDS);
1158 x->volatil = orig->volatil;
1159 XSTR (x, 0) = XSTR (orig, 0);
1160 XSTR (x, 1) = XSTR (orig, 1);
1161 XINT (x, 2) = XINT (orig, 2);
1162 XVEC (x, 3) = copy_asm_operands_vector;
1163 XVEC (x, 4) = copy_asm_constraints_vector;
1164 XSTR (x, 5) = XSTR (orig, 5);
1165 XINT (x, 6) = XINT (orig, 6);
1166 return x;
1168 break;
1170 case MEM:
1171 /* A MEM is usually allowed to be shared if its address is constant
1172 or is a constant plus one of the special registers.
1174 We do not allow sharing of addresses that are either a special
1175 register or the sum of a constant and a special register because
1176 it is possible for unshare_all_rtl to copy the address, into memory
1177 that won't be saved. Although the MEM can safely be shared, and
1178 won't be copied there, the address itself cannot be shared, and may
1179 need to be copied.
1181 There are also two exceptions with constants: The first is if the
1182 constant is a LABEL_REF or the sum of the LABEL_REF
1183 and an integer. This case can happen if we have an inline
1184 function that supplies a constant operand to the call of another
1185 inline function that uses it in a switch statement. In this case,
1186 we will be replacing the LABEL_REF, so we have to replace this MEM
1187 as well.
1189 The second case is if we have a (const (plus (address ..) ...)).
1190 In that case we need to put back the address of the constant pool
1191 entry. */
1193 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
1194 && GET_CODE (XEXP (x, 0)) != LABEL_REF
1195 && ! (GET_CODE (XEXP (x, 0)) == CONST
1196 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
1197 && ((GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
1198 == LABEL_REF)
1199 || (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
1200 == ADDRESS)))))
1201 return x;
1202 break;
1204 case LABEL_REF:
1205 /* If this is a non-local label, just make a new LABEL_REF.
1206 Otherwise, use the new label as well. */
1207 x = gen_rtx_LABEL_REF (GET_MODE (orig),
1208 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
1209 : label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))]);
1210 LABEL_REF_NONLOCAL_P (x) = LABEL_REF_NONLOCAL_P (orig);
1211 LABEL_OUTSIDE_LOOP_P (x) = LABEL_OUTSIDE_LOOP_P (orig);
1212 return x;
1214 case REG:
1215 if (REGNO (x) > LAST_VIRTUAL_REGISTER)
1216 return reg_map [REGNO (x)];
1217 else
1218 return x;
1220 case SET:
1221 /* If a parm that gets modified lives in a pseudo-reg,
1222 clear its TREE_READONLY to prevent certain optimizations. */
1224 rtx dest = SET_DEST (x);
1226 while (GET_CODE (dest) == STRICT_LOW_PART
1227 || GET_CODE (dest) == ZERO_EXTRACT
1228 || GET_CODE (dest) == SUBREG)
1229 dest = XEXP (dest, 0);
1231 if (GET_CODE (dest) == REG
1232 && REGNO (dest) < max_parm_reg
1233 && REGNO (dest) >= FIRST_PSEUDO_REGISTER
1234 && parmdecl_map[REGNO (dest)] != 0
1235 /* The insn to load an arg pseudo from a stack slot
1236 does not count as modifying it. */
1237 && in_nonparm_insns)
1238 TREE_READONLY (parmdecl_map[REGNO (dest)]) = 0;
1240 break;
1242 #if 0 /* This is a good idea, but here is the wrong place for it. */
1243 /* Arrange that CONST_INTs always appear as the second operand
1244 if they appear, and that `frame_pointer_rtx' or `arg_pointer_rtx'
1245 always appear as the first. */
1246 case PLUS:
1247 if (GET_CODE (XEXP (x, 0)) == CONST_INT
1248 || (XEXP (x, 1) == frame_pointer_rtx
1249 || (ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1250 && XEXP (x, 1) == arg_pointer_rtx)))
1252 rtx t = XEXP (x, 0);
1253 XEXP (x, 0) = XEXP (x, 1);
1254 XEXP (x, 1) = t;
1256 break;
1257 #endif
1258 default:
1259 break;
1262 /* Replace this rtx with a copy of itself. */
1264 x = rtx_alloc (code);
1265 bcopy ((char *) orig, (char *) x,
1266 (sizeof (*x) - sizeof (x->fld)
1267 + sizeof (x->fld[0]) * GET_RTX_LENGTH (code)));
1269 /* Now scan the subexpressions recursively.
1270 We can store any replaced subexpressions directly into X
1271 since we know X is not shared! Any vectors in X
1272 must be copied if X was copied. */
1274 format_ptr = GET_RTX_FORMAT (code);
1276 for (i = 0; i < GET_RTX_LENGTH (code); i++)
1278 switch (*format_ptr++)
1280 case 'e':
1281 XEXP (x, i) = copy_for_inline (XEXP (x, i));
1282 break;
1284 case 'u':
1285 /* Change any references to old-insns to point to the
1286 corresponding copied insns. */
1287 XEXP (x, i) = insn_map[INSN_UID (XEXP (x, i))];
1288 break;
1290 case 'E':
1291 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
1293 register int j;
1295 XVEC (x, i) = gen_rtvec_vv (XVECLEN (x, i), XVEC (x, i)->elem);
1296 for (j = 0; j < XVECLEN (x, i); j++)
1297 XVECEXP (x, i, j)
1298 = copy_for_inline (XVECEXP (x, i, j));
1300 break;
1304 if (code == ASM_OPERANDS && orig_asm_operands_vector == 0)
1306 orig_asm_operands_vector = XVEC (orig, 3);
1307 copy_asm_operands_vector = XVEC (x, 3);
1308 copy_asm_constraints_vector = XVEC (x, 4);
1311 return x;
1314 /* Unfortunately, we need a global copy of const_equiv map for communication
1315 with a function called from note_stores. Be *very* careful that this
1316 is used properly in the presence of recursion. */
1318 varray_type global_const_equiv_varray;
1320 #define FIXED_BASE_PLUS_P(X) \
1321 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
1322 && GET_CODE (XEXP (X, 0)) == REG \
1323 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
1324 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
1326 /* Called to set up a mapping for the case where a parameter is in a
1327 register. If it is read-only and our argument is a constant, set up the
1328 constant equivalence.
1330 If LOC is REG_USERVAR_P, the usual case, COPY must also have that flag set
1331 if it is a register.
1333 Also, don't allow hard registers here; they might not be valid when
1334 substituted into insns. */
1335 static void
1336 process_reg_param (map, loc, copy)
1337 struct inline_remap *map;
1338 rtx loc, copy;
1340 if ((GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
1341 || (GET_CODE (copy) == REG && REG_USERVAR_P (loc)
1342 && ! REG_USERVAR_P (copy))
1343 || (GET_CODE (copy) == REG
1344 && REGNO (copy) < FIRST_PSEUDO_REGISTER))
1346 rtx temp = copy_to_mode_reg (GET_MODE (loc), copy);
1347 REG_USERVAR_P (temp) = REG_USERVAR_P (loc);
1348 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1349 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
1350 copy = temp;
1352 map->reg_map[REGNO (loc)] = copy;
1355 /* Used by duplicate_eh_handlers to map labels for the exception table */
1356 static struct inline_remap *eif_eh_map;
1358 static rtx
1359 expand_inline_function_eh_labelmap (label)
1360 rtx label;
1362 int index = CODE_LABEL_NUMBER (label);
1363 return get_label_from_map (eif_eh_map, index);
1366 /* Integrate the procedure defined by FNDECL. Note that this function
1367 may wind up calling itself. Since the static variables are not
1368 reentrant, we do not assign them until after the possibility
1369 of recursion is eliminated.
1371 If IGNORE is nonzero, do not produce a value.
1372 Otherwise store the value in TARGET if it is nonzero and that is convenient.
1374 Value is:
1375 (rtx)-1 if we could not substitute the function
1376 0 if we substituted it and it does not produce a value
1377 else an rtx for where the value is stored. */
1380 expand_inline_function (fndecl, parms, target, ignore, type,
1381 structure_value_addr)
1382 tree fndecl, parms;
1383 rtx target;
1384 int ignore;
1385 tree type;
1386 rtx structure_value_addr;
1388 tree formal, actual, block;
1389 rtx header = DECL_SAVED_INSNS (fndecl);
1390 rtx insns = FIRST_FUNCTION_INSN (header);
1391 rtx parm_insns = FIRST_PARM_INSN (header);
1392 tree *arg_trees;
1393 rtx *arg_vals;
1394 rtx insn;
1395 int max_regno;
1396 register int i;
1397 int min_labelno = FIRST_LABELNO (header);
1398 int max_labelno = LAST_LABELNO (header);
1399 int nargs;
1400 rtx local_return_label = 0;
1401 rtx loc;
1402 rtx stack_save = 0;
1403 rtx temp;
1404 struct inline_remap *map = 0;
1405 #ifdef HAVE_cc0
1406 rtx cc0_insn = 0;
1407 #endif
1408 rtvec arg_vector = ORIGINAL_ARG_VECTOR (header);
1409 rtx static_chain_value = 0;
1411 /* The pointer used to track the true location of the memory used
1412 for MAP->LABEL_MAP. */
1413 rtx *real_label_map = 0;
1415 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
1416 max_regno = MAX_REGNUM (header) + 3;
1417 if (max_regno < FIRST_PSEUDO_REGISTER)
1418 abort ();
1420 nargs = list_length (DECL_ARGUMENTS (fndecl));
1422 /* Check that the parms type match and that sufficient arguments were
1423 passed. Since the appropriate conversions or default promotions have
1424 already been applied, the machine modes should match exactly. */
1426 for (formal = DECL_ARGUMENTS (fndecl), actual = parms;
1427 formal;
1428 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual))
1430 tree arg;
1431 enum machine_mode mode;
1433 if (actual == 0)
1434 return (rtx) (HOST_WIDE_INT) -1;
1436 arg = TREE_VALUE (actual);
1437 mode = TYPE_MODE (DECL_ARG_TYPE (formal));
1439 if (mode != TYPE_MODE (TREE_TYPE (arg))
1440 /* If they are block mode, the types should match exactly.
1441 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
1442 which could happen if the parameter has incomplete type. */
1443 || (mode == BLKmode
1444 && (TYPE_MAIN_VARIANT (TREE_TYPE (arg))
1445 != TYPE_MAIN_VARIANT (TREE_TYPE (formal)))))
1446 return (rtx) (HOST_WIDE_INT) -1;
1449 /* Extra arguments are valid, but will be ignored below, so we must
1450 evaluate them here for side-effects. */
1451 for (; actual; actual = TREE_CHAIN (actual))
1452 expand_expr (TREE_VALUE (actual), const0_rtx,
1453 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual))), 0);
1455 /* Make a binding contour to keep inline cleanups called at
1456 outer function-scope level from looking like they are shadowing
1457 parameter declarations. */
1458 pushlevel (0);
1460 /* Expand the function arguments. Do this first so that any
1461 new registers get created before we allocate the maps. */
1463 arg_vals = (rtx *) alloca (nargs * sizeof (rtx));
1464 arg_trees = (tree *) alloca (nargs * sizeof (tree));
1466 for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
1467 formal;
1468 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
1470 /* Actual parameter, converted to the type of the argument within the
1471 function. */
1472 tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
1473 /* Mode of the variable used within the function. */
1474 enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
1475 int invisiref = 0;
1477 arg_trees[i] = arg;
1478 loc = RTVEC_ELT (arg_vector, i);
1480 /* If this is an object passed by invisible reference, we copy the
1481 object into a stack slot and save its address. If this will go
1482 into memory, we do nothing now. Otherwise, we just expand the
1483 argument. */
1484 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1485 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1487 rtx stack_slot
1488 = assign_stack_temp (TYPE_MODE (TREE_TYPE (arg)),
1489 int_size_in_bytes (TREE_TYPE (arg)), 1);
1490 MEM_SET_IN_STRUCT_P (stack_slot,
1491 AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1493 store_expr (arg, stack_slot, 0);
1495 arg_vals[i] = XEXP (stack_slot, 0);
1496 invisiref = 1;
1498 else if (GET_CODE (loc) != MEM)
1500 if (GET_MODE (loc) != TYPE_MODE (TREE_TYPE (arg)))
1501 /* The mode if LOC and ARG can differ if LOC was a variable
1502 that had its mode promoted via PROMOTED_MODE. */
1503 arg_vals[i] = convert_modes (GET_MODE (loc),
1504 TYPE_MODE (TREE_TYPE (arg)),
1505 expand_expr (arg, NULL_RTX, mode,
1506 EXPAND_SUM),
1507 TREE_UNSIGNED (TREE_TYPE (formal)));
1508 else
1509 arg_vals[i] = expand_expr (arg, NULL_RTX, mode, EXPAND_SUM);
1511 else
1512 arg_vals[i] = 0;
1514 if (arg_vals[i] != 0
1515 && (! TREE_READONLY (formal)
1516 /* If the parameter is not read-only, copy our argument through
1517 a register. Also, we cannot use ARG_VALS[I] if it overlaps
1518 TARGET in any way. In the inline function, they will likely
1519 be two different pseudos, and `safe_from_p' will make all
1520 sorts of smart assumptions about their not conflicting.
1521 But if ARG_VALS[I] overlaps TARGET, these assumptions are
1522 wrong, so put ARG_VALS[I] into a fresh register.
1523 Don't worry about invisible references, since their stack
1524 temps will never overlap the target. */
1525 || (target != 0
1526 && ! invisiref
1527 && (GET_CODE (arg_vals[i]) == REG
1528 || GET_CODE (arg_vals[i]) == SUBREG
1529 || GET_CODE (arg_vals[i]) == MEM)
1530 && reg_overlap_mentioned_p (arg_vals[i], target))
1531 /* ??? We must always copy a SUBREG into a REG, because it might
1532 get substituted into an address, and not all ports correctly
1533 handle SUBREGs in addresses. */
1534 || (GET_CODE (arg_vals[i]) == SUBREG)))
1535 arg_vals[i] = copy_to_mode_reg (GET_MODE (loc), arg_vals[i]);
1537 if (arg_vals[i] != 0 && GET_CODE (arg_vals[i]) == REG
1538 && POINTER_TYPE_P (TREE_TYPE (formal)))
1539 mark_reg_pointer (arg_vals[i],
1540 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (formal)))
1541 / BITS_PER_UNIT));
1544 /* Allocate the structures we use to remap things. */
1546 map = (struct inline_remap *) alloca (sizeof (struct inline_remap));
1547 map->fndecl = fndecl;
1549 map->reg_map = (rtx *) alloca (max_regno * sizeof (rtx));
1550 bzero ((char *) map->reg_map, max_regno * sizeof (rtx));
1552 /* We used to use alloca here, but the size of what it would try to
1553 allocate would occasionally cause it to exceed the stack limit and
1554 cause unpredictable core dumps. */
1555 real_label_map
1556 = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
1557 map->label_map = real_label_map;
1559 map->insn_map = (rtx *) alloca (INSN_UID (header) * sizeof (rtx));
1560 bzero ((char *) map->insn_map, INSN_UID (header) * sizeof (rtx));
1561 map->min_insnno = 0;
1562 map->max_insnno = INSN_UID (header);
1564 map->integrating = 1;
1566 /* const_equiv_varray maps pseudos in our routine to constants, so
1567 it needs to be large enough for all our pseudos. This is the
1568 number we are currently using plus the number in the called
1569 routine, plus 15 for each arg, five to compute the virtual frame
1570 pointer, and five for the return value. This should be enough
1571 for most cases. We do not reference entries outside the range of
1572 the map.
1574 ??? These numbers are quite arbitrary and were obtained by
1575 experimentation. At some point, we should try to allocate the
1576 table after all the parameters are set up so we an more accurately
1577 estimate the number of pseudos we will need. */
1579 VARRAY_CONST_EQUIV_INIT (map->const_equiv_varray,
1580 (max_reg_num ()
1581 + (max_regno - FIRST_PSEUDO_REGISTER)
1582 + 15 * nargs
1583 + 10),
1584 "expand_inline_function");
1585 map->const_age = 0;
1587 /* Record the current insn in case we have to set up pointers to frame
1588 and argument memory blocks. If there are no insns yet, add a dummy
1589 insn that can be used as an insertion point. */
1590 map->insns_at_start = get_last_insn ();
1591 if (map->insns_at_start == 0)
1592 map->insns_at_start = emit_note (NULL_PTR, NOTE_INSN_DELETED);
1594 map->regno_pointer_flag = INLINE_REGNO_POINTER_FLAG (header);
1595 map->regno_pointer_align = INLINE_REGNO_POINTER_ALIGN (header);
1597 /* Update the outgoing argument size to allow for those in the inlined
1598 function. */
1599 if (OUTGOING_ARGS_SIZE (header) > current_function_outgoing_args_size)
1600 current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (header);
1602 /* If the inline function needs to make PIC references, that means
1603 that this function's PIC offset table must be used. */
1604 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
1605 current_function_uses_pic_offset_table = 1;
1607 /* If this function needs a context, set it up. */
1608 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_NEEDS_CONTEXT)
1609 static_chain_value = lookup_static_chain (fndecl);
1611 if (GET_CODE (parm_insns) == NOTE
1612 && NOTE_LINE_NUMBER (parm_insns) > 0)
1614 rtx note = emit_note (NOTE_SOURCE_FILE (parm_insns),
1615 NOTE_LINE_NUMBER (parm_insns));
1616 if (note)
1617 RTX_INTEGRATED_P (note) = 1;
1620 /* Process each argument. For each, set up things so that the function's
1621 reference to the argument will refer to the argument being passed.
1622 We only replace REG with REG here. Any simplifications are done
1623 via const_equiv_map.
1625 We make two passes: In the first, we deal with parameters that will
1626 be placed into registers, since we need to ensure that the allocated
1627 register number fits in const_equiv_map. Then we store all non-register
1628 parameters into their memory location. */
1630 /* Don't try to free temp stack slots here, because we may put one of the
1631 parameters into a temp stack slot. */
1633 for (i = 0; i < nargs; i++)
1635 rtx copy = arg_vals[i];
1637 loc = RTVEC_ELT (arg_vector, i);
1639 /* There are three cases, each handled separately. */
1640 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1641 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1643 /* This must be an object passed by invisible reference (it could
1644 also be a variable-sized object, but we forbid inlining functions
1645 with variable-sized arguments). COPY is the address of the
1646 actual value (this computation will cause it to be copied). We
1647 map that address for the register, noting the actual address as
1648 an equivalent in case it can be substituted into the insns. */
1650 if (GET_CODE (copy) != REG)
1652 temp = copy_addr_to_reg (copy);
1653 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1654 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
1655 copy = temp;
1657 map->reg_map[REGNO (XEXP (loc, 0))] = copy;
1659 else if (GET_CODE (loc) == MEM)
1661 /* This is the case of a parameter that lives in memory.
1662 It will live in the block we allocate in the called routine's
1663 frame that simulates the incoming argument area. Do nothing
1664 now; we will call store_expr later. */
1667 else if (GET_CODE (loc) == REG)
1668 process_reg_param (map, loc, copy);
1669 else if (GET_CODE (loc) == CONCAT)
1671 rtx locreal = gen_realpart (GET_MODE (XEXP (loc, 0)), loc);
1672 rtx locimag = gen_imagpart (GET_MODE (XEXP (loc, 0)), loc);
1673 rtx copyreal = gen_realpart (GET_MODE (locreal), copy);
1674 rtx copyimag = gen_imagpart (GET_MODE (locimag), copy);
1676 process_reg_param (map, locreal, copyreal);
1677 process_reg_param (map, locimag, copyimag);
1679 else
1680 abort ();
1683 /* Now do the parameters that will be placed in memory. */
1685 for (formal = DECL_ARGUMENTS (fndecl), i = 0;
1686 formal; formal = TREE_CHAIN (formal), i++)
1688 loc = RTVEC_ELT (arg_vector, i);
1690 if (GET_CODE (loc) == MEM
1691 /* Exclude case handled above. */
1692 && ! (GET_CODE (XEXP (loc, 0)) == REG
1693 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
1695 rtx note = emit_note (DECL_SOURCE_FILE (formal),
1696 DECL_SOURCE_LINE (formal));
1697 if (note)
1698 RTX_INTEGRATED_P (note) = 1;
1700 /* Compute the address in the area we reserved and store the
1701 value there. */
1702 temp = copy_rtx_and_substitute (loc, map);
1703 subst_constants (&temp, NULL_RTX, map);
1704 apply_change_group ();
1705 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
1706 temp = change_address (temp, VOIDmode, XEXP (temp, 0));
1707 store_expr (arg_trees[i], temp, 0);
1711 /* Deal with the places that the function puts its result.
1712 We are driven by what is placed into DECL_RESULT.
1714 Initially, we assume that we don't have anything special handling for
1715 REG_FUNCTION_RETURN_VALUE_P. */
1717 map->inline_target = 0;
1718 loc = DECL_RTL (DECL_RESULT (fndecl));
1720 if (TYPE_MODE (type) == VOIDmode)
1721 /* There is no return value to worry about. */
1723 else if (GET_CODE (loc) == MEM)
1725 if (GET_CODE (XEXP (loc, 0)) == ADDRESSOF)
1727 temp = copy_rtx_and_substitute (loc, map);
1728 subst_constants (&temp, NULL_RTX, map);
1729 apply_change_group ();
1730 target = temp;
1732 else
1734 if (! structure_value_addr
1735 || ! aggregate_value_p (DECL_RESULT (fndecl)))
1736 abort ();
1738 /* Pass the function the address in which to return a structure
1739 value. Note that a constructor can cause someone to call us
1740 with STRUCTURE_VALUE_ADDR, but the initialization takes place
1741 via the first parameter, rather than the struct return address.
1743 We have two cases: If the address is a simple register
1744 indirect, use the mapping mechanism to point that register to
1745 our structure return address. Otherwise, store the structure
1746 return value into the place that it will be referenced from. */
1748 if (GET_CODE (XEXP (loc, 0)) == REG)
1750 temp = force_operand (structure_value_addr, NULL_RTX);
1751 temp = force_reg (Pmode, temp);
1752 map->reg_map[REGNO (XEXP (loc, 0))] = temp;
1754 if (CONSTANT_P (structure_value_addr)
1755 || GET_CODE (structure_value_addr) == ADDRESSOF
1756 || (GET_CODE (structure_value_addr) == PLUS
1757 && (XEXP (structure_value_addr, 0)
1758 == virtual_stack_vars_rtx)
1759 && (GET_CODE (XEXP (structure_value_addr, 1))
1760 == CONST_INT)))
1762 SET_CONST_EQUIV_DATA (map, temp, structure_value_addr,
1763 CONST_AGE_PARM);
1766 else
1768 temp = copy_rtx_and_substitute (loc, map);
1769 subst_constants (&temp, NULL_RTX, map);
1770 apply_change_group ();
1771 emit_move_insn (temp, structure_value_addr);
1775 else if (ignore)
1776 /* We will ignore the result value, so don't look at its structure.
1777 Note that preparations for an aggregate return value
1778 do need to be made (above) even if it will be ignored. */
1780 else if (GET_CODE (loc) == REG)
1782 /* The function returns an object in a register and we use the return
1783 value. Set up our target for remapping. */
1785 /* Machine mode function was declared to return. */
1786 enum machine_mode departing_mode = TYPE_MODE (type);
1787 /* (Possibly wider) machine mode it actually computes
1788 (for the sake of callers that fail to declare it right).
1789 We have to use the mode of the result's RTL, rather than
1790 its type, since expand_function_start may have promoted it. */
1791 enum machine_mode arriving_mode
1792 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1793 rtx reg_to_map;
1795 /* Don't use MEMs as direct targets because on some machines
1796 substituting a MEM for a REG makes invalid insns.
1797 Let the combiner substitute the MEM if that is valid. */
1798 if (target == 0 || GET_CODE (target) != REG
1799 || GET_MODE (target) != departing_mode)
1801 /* Don't make BLKmode registers. If this looks like
1802 a BLKmode object being returned in a register, get
1803 the mode from that, otherwise abort. */
1804 if (departing_mode == BLKmode)
1806 if (REG == GET_CODE (DECL_RTL (DECL_RESULT (fndecl))))
1808 departing_mode = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1809 arriving_mode = departing_mode;
1811 else
1812 abort();
1815 target = gen_reg_rtx (departing_mode);
1818 /* If function's value was promoted before return,
1819 avoid machine mode mismatch when we substitute INLINE_TARGET.
1820 But TARGET is what we will return to the caller. */
1821 if (arriving_mode != departing_mode)
1823 /* Avoid creating a paradoxical subreg wider than
1824 BITS_PER_WORD, since that is illegal. */
1825 if (GET_MODE_BITSIZE (arriving_mode) > BITS_PER_WORD)
1827 if (!TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (departing_mode),
1828 GET_MODE_BITSIZE (arriving_mode)))
1829 /* Maybe could be handled by using convert_move () ? */
1830 abort ();
1831 reg_to_map = gen_reg_rtx (arriving_mode);
1832 target = gen_lowpart (departing_mode, reg_to_map);
1834 else
1835 reg_to_map = gen_rtx_SUBREG (arriving_mode, target, 0);
1837 else
1838 reg_to_map = target;
1840 /* Usually, the result value is the machine's return register.
1841 Sometimes it may be a pseudo. Handle both cases. */
1842 if (REG_FUNCTION_VALUE_P (loc))
1843 map->inline_target = reg_to_map;
1844 else
1845 map->reg_map[REGNO (loc)] = reg_to_map;
1847 else
1848 abort ();
1850 /* Make a fresh binding contour that we can easily remove. Do this after
1851 expanding our arguments so cleanups are properly scoped. */
1852 pushlevel (0);
1853 expand_start_bindings (0);
1855 /* Initialize label_map. get_label_from_map will actually make
1856 the labels. */
1857 bzero ((char *) &map->label_map [min_labelno],
1858 (max_labelno - min_labelno) * sizeof (rtx));
1860 /* Perform postincrements before actually calling the function. */
1861 emit_queue ();
1863 /* Clean up stack so that variables might have smaller offsets. */
1864 do_pending_stack_adjust ();
1866 /* Save a copy of the location of const_equiv_varray for
1867 mark_stores, called via note_stores. */
1868 global_const_equiv_varray = map->const_equiv_varray;
1870 /* If the called function does an alloca, save and restore the
1871 stack pointer around the call. This saves stack space, but
1872 also is required if this inline is being done between two
1873 pushes. */
1874 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_CALLS_ALLOCA)
1875 emit_stack_save (SAVE_BLOCK, &stack_save, NULL_RTX);
1877 /* Now copy the insns one by one. Do this in two passes, first the insns and
1878 then their REG_NOTES, just like save_for_inline. */
1880 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1882 for (insn = insns; insn; insn = NEXT_INSN (insn))
1884 rtx copy, pattern, set;
1886 map->orig_asm_operands_vector = 0;
1888 switch (GET_CODE (insn))
1890 case INSN:
1891 pattern = PATTERN (insn);
1892 set = single_set (insn);
1893 copy = 0;
1894 if (GET_CODE (pattern) == USE
1895 && GET_CODE (XEXP (pattern, 0)) == REG
1896 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1897 /* The (USE (REG n)) at return from the function should
1898 be ignored since we are changing (REG n) into
1899 inline_target. */
1900 break;
1902 /* If the inline fn needs eh context, make sure that
1903 the current fn has one. */
1904 if (GET_CODE (pattern) == USE
1905 && find_reg_note (insn, REG_EH_CONTEXT, 0) != 0)
1906 get_eh_context ();
1908 /* Ignore setting a function value that we don't want to use. */
1909 if (map->inline_target == 0
1910 && set != 0
1911 && GET_CODE (SET_DEST (set)) == REG
1912 && REG_FUNCTION_VALUE_P (SET_DEST (set)))
1914 if (volatile_refs_p (SET_SRC (set)))
1916 rtx new_set;
1918 /* If we must not delete the source,
1919 load it into a new temporary. */
1920 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1922 new_set = single_set (copy);
1923 if (new_set == 0)
1924 abort ();
1926 SET_DEST (new_set)
1927 = gen_reg_rtx (GET_MODE (SET_DEST (new_set)));
1929 /* If the source and destination are the same and it
1930 has a note on it, keep the insn. */
1931 else if (rtx_equal_p (SET_DEST (set), SET_SRC (set))
1932 && REG_NOTES (insn) != 0)
1933 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1934 else
1935 break;
1938 /* If this is setting the static chain rtx, omit it. */
1939 else if (static_chain_value != 0
1940 && set != 0
1941 && GET_CODE (SET_DEST (set)) == REG
1942 && rtx_equal_p (SET_DEST (set),
1943 static_chain_incoming_rtx))
1944 break;
1946 /* If this is setting the static chain pseudo, set it from
1947 the value we want to give it instead. */
1948 else if (static_chain_value != 0
1949 && set != 0
1950 && rtx_equal_p (SET_SRC (set),
1951 static_chain_incoming_rtx))
1953 rtx newdest = copy_rtx_and_substitute (SET_DEST (set), map);
1955 copy = emit_move_insn (newdest, static_chain_value);
1956 static_chain_value = 0;
1958 else
1959 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1960 /* REG_NOTES will be copied later. */
1962 #ifdef HAVE_cc0
1963 /* If this insn is setting CC0, it may need to look at
1964 the insn that uses CC0 to see what type of insn it is.
1965 In that case, the call to recog via validate_change will
1966 fail. So don't substitute constants here. Instead,
1967 do it when we emit the following insn.
1969 For example, see the pyr.md file. That machine has signed and
1970 unsigned compares. The compare patterns must check the
1971 following branch insn to see which what kind of compare to
1972 emit.
1974 If the previous insn set CC0, substitute constants on it as
1975 well. */
1976 if (sets_cc0_p (PATTERN (copy)) != 0)
1977 cc0_insn = copy;
1978 else
1980 if (cc0_insn)
1981 try_constants (cc0_insn, map);
1982 cc0_insn = 0;
1983 try_constants (copy, map);
1985 #else
1986 try_constants (copy, map);
1987 #endif
1988 break;
1990 case JUMP_INSN:
1991 if (GET_CODE (PATTERN (insn)) == RETURN
1992 || (GET_CODE (PATTERN (insn)) == PARALLEL
1993 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
1995 if (local_return_label == 0)
1996 local_return_label = gen_label_rtx ();
1997 pattern = gen_jump (local_return_label);
1999 else
2000 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
2002 copy = emit_jump_insn (pattern);
2004 #ifdef HAVE_cc0
2005 if (cc0_insn)
2006 try_constants (cc0_insn, map);
2007 cc0_insn = 0;
2008 #endif
2009 try_constants (copy, map);
2011 /* If this used to be a conditional jump insn but whose branch
2012 direction is now know, we must do something special. */
2013 if (condjump_p (insn) && ! simplejump_p (insn) && map->last_pc_value)
2015 #ifdef HAVE_cc0
2016 /* The previous insn set cc0 for us. So delete it. */
2017 delete_insn (PREV_INSN (copy));
2018 #endif
2020 /* If this is now a no-op, delete it. */
2021 if (map->last_pc_value == pc_rtx)
2023 delete_insn (copy);
2024 copy = 0;
2026 else
2027 /* Otherwise, this is unconditional jump so we must put a
2028 BARRIER after it. We could do some dead code elimination
2029 here, but jump.c will do it just as well. */
2030 emit_barrier ();
2032 break;
2034 case CALL_INSN:
2035 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
2036 copy = emit_call_insn (pattern);
2038 /* Because the USAGE information potentially contains objects other
2039 than hard registers, we need to copy it. */
2040 CALL_INSN_FUNCTION_USAGE (copy)
2041 = copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn), map);
2043 #ifdef HAVE_cc0
2044 if (cc0_insn)
2045 try_constants (cc0_insn, map);
2046 cc0_insn = 0;
2047 #endif
2048 try_constants (copy, map);
2050 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
2051 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2052 VARRAY_CONST_EQUIV (map->const_equiv_varray, i).rtx = 0;
2053 break;
2055 case CODE_LABEL:
2056 copy = emit_label (get_label_from_map (map,
2057 CODE_LABEL_NUMBER (insn)));
2058 LABEL_NAME (copy) = LABEL_NAME (insn);
2059 map->const_age++;
2060 break;
2062 case BARRIER:
2063 copy = emit_barrier ();
2064 break;
2066 case NOTE:
2067 /* It is important to discard function-end and function-beg notes,
2068 so we have only one of each in the current function.
2069 Also, NOTE_INSN_DELETED notes aren't useful (save_for_inline
2070 deleted these in the copy used for continuing compilation,
2071 not the copy used for inlining). */
2072 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
2073 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
2074 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED)
2076 copy = emit_note (NOTE_SOURCE_FILE (insn),
2077 NOTE_LINE_NUMBER (insn));
2078 if (copy
2079 && (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG
2080 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_END))
2082 rtx label
2083 = get_label_from_map (map, NOTE_BLOCK_NUMBER (copy));
2085 /* we have to duplicate the handlers for the original */
2086 if (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG)
2088 /* We need to duplicate the handlers for the EH region
2089 and we need to indicate where the label map is */
2090 eif_eh_map = map;
2091 duplicate_eh_handlers (NOTE_BLOCK_NUMBER (copy),
2092 CODE_LABEL_NUMBER (label),
2093 expand_inline_function_eh_labelmap);
2096 /* We have to forward these both to match the new exception
2097 region. */
2098 NOTE_BLOCK_NUMBER (copy) = CODE_LABEL_NUMBER (label);
2101 else
2102 copy = 0;
2103 break;
2105 default:
2106 abort ();
2107 break;
2110 if (copy)
2111 RTX_INTEGRATED_P (copy) = 1;
2113 map->insn_map[INSN_UID (insn)] = copy;
2116 /* Now copy the REG_NOTES. Increment const_age, so that only constants
2117 from parameters can be substituted in. These are the only ones that
2118 are valid across the entire function. */
2119 map->const_age++;
2120 for (insn = insns; insn; insn = NEXT_INSN (insn))
2121 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
2122 && map->insn_map[INSN_UID (insn)]
2123 && REG_NOTES (insn))
2125 rtx tem = copy_rtx_and_substitute (REG_NOTES (insn), map);
2126 /* We must also do subst_constants, in case one of our parameters
2127 has const type and constant value. */
2128 subst_constants (&tem, NULL_RTX, map);
2129 apply_change_group ();
2130 REG_NOTES (map->insn_map[INSN_UID (insn)]) = tem;
2133 if (local_return_label)
2134 emit_label (local_return_label);
2136 /* Restore the stack pointer if we saved it above. */
2137 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_CALLS_ALLOCA)
2138 emit_stack_restore (SAVE_BLOCK, stack_save, NULL_RTX);
2140 /* Make copies of the decls of the symbols in the inline function, so that
2141 the copies of the variables get declared in the current function. Set
2142 up things so that lookup_static_chain knows that to interpret registers
2143 in SAVE_EXPRs for TYPE_SIZEs as local. */
2145 inline_function_decl = fndecl;
2146 integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
2147 integrate_decl_tree ((tree) ORIGINAL_DECL_INITIAL (header), 0, map);
2148 inline_function_decl = 0;
2150 /* End the scope containing the copied formal parameter variables
2151 and copied LABEL_DECLs. */
2153 expand_end_bindings (getdecls (), 1, 1);
2154 block = poplevel (1, 1, 0);
2155 BLOCK_ABSTRACT_ORIGIN (block) = (DECL_ABSTRACT_ORIGIN (fndecl) == NULL
2156 ? fndecl : DECL_ABSTRACT_ORIGIN (fndecl));
2157 poplevel (0, 0, 0);
2159 /* Must mark the line number note after inlined functions as a repeat, so
2160 that the test coverage code can avoid counting the call twice. This
2161 just tells the code to ignore the immediately following line note, since
2162 there already exists a copy of this note before the expanded inline call.
2163 This line number note is still needed for debugging though, so we can't
2164 delete it. */
2165 if (flag_test_coverage)
2166 emit_note (0, NOTE_REPEATED_LINE_NUMBER);
2168 emit_line_note (input_filename, lineno);
2170 /* If the function returns a BLKmode object in a register, copy it
2171 out of the temp register into a BLKmode memory object. */
2172 if (TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
2173 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl))))
2174 target = copy_blkmode_from_reg (0, target, TREE_TYPE (TREE_TYPE (fndecl)));
2176 if (structure_value_addr)
2178 target = gen_rtx_MEM (TYPE_MODE (type),
2179 memory_address (TYPE_MODE (type),
2180 structure_value_addr));
2181 MEM_SET_IN_STRUCT_P (target, 1);
2184 /* Make sure we free the things we explicitly allocated with xmalloc. */
2185 if (real_label_map)
2186 free (real_label_map);
2187 if (map)
2188 VARRAY_FREE (map->const_equiv_varray);
2190 return target;
2193 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
2194 push all of those decls and give each one the corresponding home. */
2196 static void
2197 integrate_parm_decls (args, map, arg_vector)
2198 tree args;
2199 struct inline_remap *map;
2200 rtvec arg_vector;
2202 register tree tail;
2203 register int i;
2205 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
2207 register tree decl = build_decl (VAR_DECL, DECL_NAME (tail),
2208 TREE_TYPE (tail));
2209 rtx new_decl_rtl
2210 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map);
2212 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (tail);
2213 /* We really should be setting DECL_INCOMING_RTL to something reasonable
2214 here, but that's going to require some more work. */
2215 /* DECL_INCOMING_RTL (decl) = ?; */
2216 /* These args would always appear unused, if not for this. */
2217 TREE_USED (decl) = 1;
2218 /* Prevent warning for shadowing with these. */
2219 DECL_ABSTRACT_ORIGIN (decl) = DECL_ORIGIN (tail);
2220 pushdecl (decl);
2221 /* Fully instantiate the address with the equivalent form so that the
2222 debugging information contains the actual register, instead of the
2223 virtual register. Do this by not passing an insn to
2224 subst_constants. */
2225 subst_constants (&new_decl_rtl, NULL_RTX, map);
2226 apply_change_group ();
2227 DECL_RTL (decl) = new_decl_rtl;
2231 /* Given a BLOCK node LET, push decls and levels so as to construct in the
2232 current function a tree of contexts isomorphic to the one that is given.
2234 LEVEL indicates how far down into the BLOCK tree is the node we are
2235 currently traversing. It is always zero except for recursive calls.
2237 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
2238 registers used in the DECL_RTL field should be remapped. If it is zero,
2239 no mapping is necessary. */
2241 static void
2242 integrate_decl_tree (let, level, map)
2243 tree let;
2244 int level;
2245 struct inline_remap *map;
2247 tree t, node;
2249 if (level > 0)
2250 pushlevel (0);
2252 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2254 tree d;
2256 push_obstacks_nochange ();
2257 saveable_allocation ();
2258 d = copy_and_set_decl_abstract_origin (t);
2259 pop_obstacks ();
2261 if (DECL_RTL (t) != 0)
2263 DECL_RTL (d) = copy_rtx_and_substitute (DECL_RTL (t), map);
2264 /* Fully instantiate the address with the equivalent form so that the
2265 debugging information contains the actual register, instead of the
2266 virtual register. Do this by not passing an insn to
2267 subst_constants. */
2268 subst_constants (&DECL_RTL (d), NULL_RTX, map);
2269 apply_change_group ();
2271 /* These args would always appear unused, if not for this. */
2272 TREE_USED (d) = 1;
2274 if (DECL_LANG_SPECIFIC (d))
2275 copy_lang_decl (d);
2277 pushdecl (d);
2280 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2281 integrate_decl_tree (t, level + 1, map);
2283 if (level > 0)
2285 node = poplevel (1, 0, 0);
2286 if (node)
2288 TREE_USED (node) = TREE_USED (let);
2289 BLOCK_ABSTRACT_ORIGIN (node) = let;
2294 /* Given a BLOCK node LET, search for all DECL_RTL fields, and pass them
2295 through save_constants. */
2297 static void
2298 save_constants_in_decl_trees (let)
2299 tree let;
2301 tree t;
2303 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2304 if (DECL_RTL (t) != 0)
2305 save_constants (&DECL_RTL (t));
2307 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2308 save_constants_in_decl_trees (t);
2311 /* Create a new copy of an rtx.
2312 Recursively copies the operands of the rtx,
2313 except for those few rtx codes that are sharable.
2315 We always return an rtx that is similar to that incoming rtx, with the
2316 exception of possibly changing a REG to a SUBREG or vice versa. No
2317 rtl is ever emitted.
2319 Handle constants that need to be placed in the constant pool by
2320 calling `force_const_mem'. */
2323 copy_rtx_and_substitute (orig, map)
2324 register rtx orig;
2325 struct inline_remap *map;
2327 register rtx copy, temp;
2328 register int i, j;
2329 register RTX_CODE code;
2330 register enum machine_mode mode;
2331 register char *format_ptr;
2332 int regno;
2334 if (orig == 0)
2335 return 0;
2337 code = GET_CODE (orig);
2338 mode = GET_MODE (orig);
2340 switch (code)
2342 case REG:
2343 /* If the stack pointer register shows up, it must be part of
2344 stack-adjustments (*not* because we eliminated the frame pointer!).
2345 Small hard registers are returned as-is. Pseudo-registers
2346 go through their `reg_map'. */
2347 regno = REGNO (orig);
2348 if (regno <= LAST_VIRTUAL_REGISTER)
2350 /* Some hard registers are also mapped,
2351 but others are not translated. */
2352 if (map->reg_map[regno] != 0)
2353 return map->reg_map[regno];
2355 /* If this is the virtual frame pointer, make space in current
2356 function's stack frame for the stack frame of the inline function.
2358 Copy the address of this area into a pseudo. Map
2359 virtual_stack_vars_rtx to this pseudo and set up a constant
2360 equivalence for it to be the address. This will substitute the
2361 address into insns where it can be substituted and use the new
2362 pseudo where it can't. */
2363 if (regno == VIRTUAL_STACK_VARS_REGNUM)
2365 rtx loc, seq;
2366 int size = DECL_FRAME_SIZE (map->fndecl);
2368 #ifdef FRAME_GROWS_DOWNWARD
2369 /* In this case, virtual_stack_vars_rtx points to one byte
2370 higher than the top of the frame area. So make sure we
2371 allocate a big enough chunk to keep the frame pointer
2372 aligned like a real one. */
2373 size = CEIL_ROUND (size, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2374 #endif
2375 start_sequence ();
2376 loc = assign_stack_temp (BLKmode, size, 1);
2377 loc = XEXP (loc, 0);
2378 #ifdef FRAME_GROWS_DOWNWARD
2379 /* In this case, virtual_stack_vars_rtx points to one byte
2380 higher than the top of the frame area. So compute the offset
2381 to one byte higher than our substitute frame. */
2382 loc = plus_constant (loc, size);
2383 #endif
2384 map->reg_map[regno] = temp
2385 = force_reg (Pmode, force_operand (loc, NULL_RTX));
2387 #ifdef STACK_BOUNDARY
2388 mark_reg_pointer (map->reg_map[regno],
2389 STACK_BOUNDARY / BITS_PER_UNIT);
2390 #endif
2392 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
2394 seq = gen_sequence ();
2395 end_sequence ();
2396 emit_insn_after (seq, map->insns_at_start);
2397 return temp;
2399 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM)
2401 /* Do the same for a block to contain any arguments referenced
2402 in memory. */
2403 rtx loc, seq;
2404 int size = FUNCTION_ARGS_SIZE (DECL_SAVED_INSNS (map->fndecl));
2406 start_sequence ();
2407 loc = assign_stack_temp (BLKmode, size, 1);
2408 loc = XEXP (loc, 0);
2409 /* When arguments grow downward, the virtual incoming
2410 args pointer points to the top of the argument block,
2411 so the remapped location better do the same. */
2412 #ifdef ARGS_GROW_DOWNWARD
2413 loc = plus_constant (loc, size);
2414 #endif
2415 map->reg_map[regno] = temp
2416 = force_reg (Pmode, force_operand (loc, NULL_RTX));
2418 #ifdef STACK_BOUNDARY
2419 mark_reg_pointer (map->reg_map[regno],
2420 STACK_BOUNDARY / BITS_PER_UNIT);
2421 #endif
2423 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
2425 seq = gen_sequence ();
2426 end_sequence ();
2427 emit_insn_after (seq, map->insns_at_start);
2428 return temp;
2430 else if (REG_FUNCTION_VALUE_P (orig))
2432 /* This is a reference to the function return value. If
2433 the function doesn't have a return value, error. If the
2434 mode doesn't agree, and it ain't BLKmode, make a SUBREG. */
2435 if (map->inline_target == 0)
2436 /* Must be unrolling loops or replicating code if we
2437 reach here, so return the register unchanged. */
2438 return orig;
2439 else if (GET_MODE (map->inline_target) != BLKmode
2440 && mode != GET_MODE (map->inline_target))
2441 return gen_lowpart (mode, map->inline_target);
2442 else
2443 return map->inline_target;
2445 return orig;
2447 if (map->reg_map[regno] == NULL)
2449 map->reg_map[regno] = gen_reg_rtx (mode);
2450 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
2451 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
2452 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
2453 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2455 if (map->regno_pointer_flag[regno])
2456 mark_reg_pointer (map->reg_map[regno],
2457 map->regno_pointer_align[regno]);
2459 return map->reg_map[regno];
2461 case SUBREG:
2462 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map);
2463 /* SUBREG is ordinary, but don't make nested SUBREGs. */
2464 if (GET_CODE (copy) == SUBREG)
2465 return gen_rtx_SUBREG (GET_MODE (orig), SUBREG_REG (copy),
2466 SUBREG_WORD (orig) + SUBREG_WORD (copy));
2467 else if (GET_CODE (copy) == CONCAT)
2469 rtx retval = subreg_realpart_p (orig) ? XEXP (copy, 0) : XEXP (copy, 1);
2471 if (GET_MODE (retval) == GET_MODE (orig))
2472 return retval;
2473 else
2474 return gen_rtx_SUBREG (GET_MODE (orig), retval,
2475 (SUBREG_WORD (orig) %
2476 (GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (orig)))
2477 / (unsigned) UNITS_PER_WORD)));
2479 else
2480 return gen_rtx_SUBREG (GET_MODE (orig), copy,
2481 SUBREG_WORD (orig));
2483 case ADDRESSOF:
2484 copy = gen_rtx_ADDRESSOF (mode,
2485 copy_rtx_and_substitute (XEXP (orig, 0), map), 0);
2486 SET_ADDRESSOF_DECL (copy, ADDRESSOF_DECL (orig));
2487 regno = ADDRESSOF_REGNO (orig);
2488 if (map->reg_map[regno])
2489 regno = REGNO (map->reg_map[regno]);
2490 else if (regno > LAST_VIRTUAL_REGISTER)
2492 temp = XEXP (orig, 0);
2493 map->reg_map[regno] = gen_reg_rtx (GET_MODE (temp));
2494 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (temp);
2495 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (temp);
2496 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (temp);
2497 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2499 if (map->regno_pointer_flag[regno])
2500 mark_reg_pointer (map->reg_map[regno],
2501 map->regno_pointer_align[regno]);
2502 regno = REGNO (map->reg_map[regno]);
2504 ADDRESSOF_REGNO (copy) = regno;
2505 return copy;
2507 case USE:
2508 case CLOBBER:
2509 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
2510 to (use foo) if the original insn didn't have a subreg.
2511 Removing the subreg distorts the VAX movstrhi pattern
2512 by changing the mode of an operand. */
2513 copy = copy_rtx_and_substitute (XEXP (orig, 0), map);
2514 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
2515 copy = SUBREG_REG (copy);
2516 return gen_rtx_fmt_e (code, VOIDmode, copy);
2518 case CODE_LABEL:
2519 LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
2520 = LABEL_PRESERVE_P (orig);
2521 return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
2523 case LABEL_REF:
2524 copy = gen_rtx_LABEL_REF (mode,
2525 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
2526 : get_label_from_map (map,
2527 CODE_LABEL_NUMBER (XEXP (orig, 0))));
2528 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
2530 /* The fact that this label was previously nonlocal does not mean
2531 it still is, so we must check if it is within the range of
2532 this function's labels. */
2533 LABEL_REF_NONLOCAL_P (copy)
2534 = (LABEL_REF_NONLOCAL_P (orig)
2535 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
2536 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
2538 /* If we have made a nonlocal label local, it means that this
2539 inlined call will be referring to our nonlocal goto handler.
2540 So make sure we create one for this block; we normally would
2541 not since this is not otherwise considered a "call". */
2542 if (LABEL_REF_NONLOCAL_P (orig) && ! LABEL_REF_NONLOCAL_P (copy))
2543 function_call_count++;
2545 return copy;
2547 case PC:
2548 case CC0:
2549 case CONST_INT:
2550 return orig;
2552 case SYMBOL_REF:
2553 /* Symbols which represent the address of a label stored in the constant
2554 pool must be modified to point to a constant pool entry for the
2555 remapped label. Otherwise, symbols are returned unchanged. */
2556 if (CONSTANT_POOL_ADDRESS_P (orig))
2558 rtx constant = get_pool_constant (orig);
2559 if (GET_CODE (constant) == LABEL_REF)
2560 return XEXP (force_const_mem (GET_MODE (orig),
2561 copy_rtx_and_substitute (constant,
2562 map)),
2565 else
2566 if (SYMBOL_REF_NEED_ADJUST (orig))
2568 eif_eh_map = map;
2569 return rethrow_symbol_map (orig,
2570 expand_inline_function_eh_labelmap);
2573 return orig;
2575 case CONST_DOUBLE:
2576 /* We have to make a new copy of this CONST_DOUBLE because don't want
2577 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
2578 duplicate of a CONST_DOUBLE we have already seen. */
2579 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
2581 REAL_VALUE_TYPE d;
2583 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
2584 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
2586 else
2587 return immed_double_const (CONST_DOUBLE_LOW (orig),
2588 CONST_DOUBLE_HIGH (orig), VOIDmode);
2590 case CONST:
2591 /* Make new constant pool entry for a constant
2592 that was in the pool of the inline function. */
2593 if (RTX_INTEGRATED_P (orig))
2595 /* If this was an address of a constant pool entry that itself
2596 had to be placed in the constant pool, it might not be a
2597 valid address. So the recursive call below might turn it
2598 into a register. In that case, it isn't a constant any
2599 more, so return it. This has the potential of changing a
2600 MEM into a REG, but we'll assume that it safe. */
2601 temp = copy_rtx_and_substitute (XEXP (orig, 0), map);
2602 if (! CONSTANT_P (temp))
2603 return temp;
2604 return validize_mem (force_const_mem (GET_MODE (orig), temp));
2606 break;
2608 case ADDRESS:
2609 /* If from constant pool address, make new constant pool entry and
2610 return its address. */
2611 if (! RTX_INTEGRATED_P (orig))
2612 abort ();
2614 temp
2615 = force_const_mem (GET_MODE (XEXP (orig, 0)),
2616 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
2617 map));
2619 #if 0
2620 /* Legitimizing the address here is incorrect.
2622 The only ADDRESS rtx's that can reach here are ones created by
2623 save_constants. Hence the operand of the ADDRESS is always valid
2624 in this position of the instruction, since the original rtx without
2625 the ADDRESS was valid.
2627 The reason we don't legitimize the address here is that on the
2628 Sparc, the caller may have a (high ...) surrounding this ADDRESS.
2629 This code forces the operand of the address to a register, which
2630 fails because we can not take the HIGH part of a register.
2632 Also, change_address may create new registers. These registers
2633 will not have valid reg_map entries. This can cause try_constants()
2634 to fail because assumes that all registers in the rtx have valid
2635 reg_map entries, and it may end up replacing one of these new
2636 registers with junk. */
2638 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
2639 temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
2640 #endif
2642 temp = XEXP (temp, 0);
2644 #ifdef POINTERS_EXTEND_UNSIGNED
2645 if (GET_MODE (temp) != GET_MODE (orig))
2646 temp = convert_memory_address (GET_MODE (orig), temp);
2647 #endif
2649 return temp;
2651 case ASM_OPERANDS:
2652 /* If a single asm insn contains multiple output operands
2653 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
2654 We must make sure that the copied insn continues to share it. */
2655 if (map->orig_asm_operands_vector == XVEC (orig, 3))
2657 copy = rtx_alloc (ASM_OPERANDS);
2658 copy->volatil = orig->volatil;
2659 XSTR (copy, 0) = XSTR (orig, 0);
2660 XSTR (copy, 1) = XSTR (orig, 1);
2661 XINT (copy, 2) = XINT (orig, 2);
2662 XVEC (copy, 3) = map->copy_asm_operands_vector;
2663 XVEC (copy, 4) = map->copy_asm_constraints_vector;
2664 XSTR (copy, 5) = XSTR (orig, 5);
2665 XINT (copy, 6) = XINT (orig, 6);
2666 return copy;
2668 break;
2670 case CALL:
2671 /* This is given special treatment because the first
2672 operand of a CALL is a (MEM ...) which may get
2673 forced into a register for cse. This is undesirable
2674 if function-address cse isn't wanted or if we won't do cse. */
2675 #ifndef NO_FUNCTION_CSE
2676 if (! (optimize && ! flag_no_function_cse))
2677 #endif
2678 return gen_rtx_CALL (GET_MODE (orig),
2679 gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
2680 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0), map)),
2681 copy_rtx_and_substitute (XEXP (orig, 1), map));
2682 break;
2684 #if 0
2685 /* Must be ifdefed out for loop unrolling to work. */
2686 case RETURN:
2687 abort ();
2688 #endif
2690 case SET:
2691 /* If this is setting fp or ap, it means that we have a nonlocal goto.
2692 Adjust the setting by the offset of the area we made.
2693 If the nonlocal goto is into the current function,
2694 this will result in unnecessarily bad code, but should work. */
2695 if (SET_DEST (orig) == virtual_stack_vars_rtx
2696 || SET_DEST (orig) == virtual_incoming_args_rtx)
2698 /* In case a translation hasn't occurred already, make one now. */
2699 rtx equiv_reg;
2700 rtx equiv_loc;
2701 HOST_WIDE_INT loc_offset;
2703 copy_rtx_and_substitute (SET_DEST (orig), map);
2704 equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
2705 equiv_loc = VARRAY_CONST_EQUIV (map->const_equiv_varray, REGNO (equiv_reg)).rtx;
2706 loc_offset
2707 = GET_CODE (equiv_loc) == REG ? 0 : INTVAL (XEXP (equiv_loc, 1));
2708 return gen_rtx_SET (VOIDmode, SET_DEST (orig),
2709 force_operand
2710 (plus_constant
2711 (copy_rtx_and_substitute (SET_SRC (orig), map),
2712 - loc_offset),
2713 NULL_RTX));
2715 break;
2717 case MEM:
2718 copy = rtx_alloc (MEM);
2719 PUT_MODE (copy, mode);
2720 XEXP (copy, 0) = copy_rtx_and_substitute (XEXP (orig, 0), map);
2721 MEM_COPY_ATTRIBUTES (copy, orig);
2722 MEM_ALIAS_SET (copy) = MEM_ALIAS_SET (orig);
2724 /* If doing function inlining, this MEM might not be const in the
2725 function that it is being inlined into, and thus may not be
2726 unchanging after function inlining. Constant pool references are
2727 handled elsewhere, so this doesn't lose RTX_UNCHANGING_P bits
2728 for them. */
2729 if (! map->integrating)
2730 RTX_UNCHANGING_P (copy) = RTX_UNCHANGING_P (orig);
2732 return copy;
2734 default:
2735 break;
2738 copy = rtx_alloc (code);
2739 PUT_MODE (copy, mode);
2740 copy->in_struct = orig->in_struct;
2741 copy->volatil = orig->volatil;
2742 copy->unchanging = orig->unchanging;
2744 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2746 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2748 switch (*format_ptr++)
2750 case '0':
2751 XEXP (copy, i) = XEXP (orig, i);
2752 break;
2754 case 'e':
2755 XEXP (copy, i) = copy_rtx_and_substitute (XEXP (orig, i), map);
2756 break;
2758 case 'u':
2759 /* Change any references to old-insns to point to the
2760 corresponding copied insns. */
2761 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
2762 break;
2764 case 'E':
2765 XVEC (copy, i) = XVEC (orig, i);
2766 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2768 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2769 for (j = 0; j < XVECLEN (copy, i); j++)
2770 XVECEXP (copy, i, j)
2771 = copy_rtx_and_substitute (XVECEXP (orig, i, j), map);
2773 break;
2775 case 'w':
2776 XWINT (copy, i) = XWINT (orig, i);
2777 break;
2779 case 'i':
2780 XINT (copy, i) = XINT (orig, i);
2781 break;
2783 case 's':
2784 XSTR (copy, i) = XSTR (orig, i);
2785 break;
2787 default:
2788 abort ();
2792 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
2794 map->orig_asm_operands_vector = XVEC (orig, 3);
2795 map->copy_asm_operands_vector = XVEC (copy, 3);
2796 map->copy_asm_constraints_vector = XVEC (copy, 4);
2799 return copy;
2802 /* Substitute known constant values into INSN, if that is valid. */
2804 void
2805 try_constants (insn, map)
2806 rtx insn;
2807 struct inline_remap *map;
2809 int i;
2811 map->num_sets = 0;
2812 subst_constants (&PATTERN (insn), insn, map);
2814 /* Apply the changes if they are valid; otherwise discard them. */
2815 apply_change_group ();
2817 /* Show we don't know the value of anything stored or clobbered. */
2818 note_stores (PATTERN (insn), mark_stores);
2819 map->last_pc_value = 0;
2820 #ifdef HAVE_cc0
2821 map->last_cc0_value = 0;
2822 #endif
2824 /* Set up any constant equivalences made in this insn. */
2825 for (i = 0; i < map->num_sets; i++)
2827 if (GET_CODE (map->equiv_sets[i].dest) == REG)
2829 int regno = REGNO (map->equiv_sets[i].dest);
2831 MAYBE_EXTEND_CONST_EQUIV_VARRAY (map, regno);
2832 if (VARRAY_CONST_EQUIV (map->const_equiv_varray, regno).rtx == 0
2833 /* Following clause is a hack to make case work where GNU C++
2834 reassigns a variable to make cse work right. */
2835 || ! rtx_equal_p (VARRAY_CONST_EQUIV (map->const_equiv_varray,
2836 regno).rtx,
2837 map->equiv_sets[i].equiv))
2838 SET_CONST_EQUIV_DATA (map, map->equiv_sets[i].dest,
2839 map->equiv_sets[i].equiv, map->const_age);
2841 else if (map->equiv_sets[i].dest == pc_rtx)
2842 map->last_pc_value = map->equiv_sets[i].equiv;
2843 #ifdef HAVE_cc0
2844 else if (map->equiv_sets[i].dest == cc0_rtx)
2845 map->last_cc0_value = map->equiv_sets[i].equiv;
2846 #endif
2850 /* Substitute known constants for pseudo regs in the contents of LOC,
2851 which are part of INSN.
2852 If INSN is zero, the substitution should always be done (this is used to
2853 update DECL_RTL).
2854 These changes are taken out by try_constants if the result is not valid.
2856 Note that we are more concerned with determining when the result of a SET
2857 is a constant, for further propagation, than actually inserting constants
2858 into insns; cse will do the latter task better.
2860 This function is also used to adjust address of items previously addressed
2861 via the virtual stack variable or virtual incoming arguments registers. */
2863 static void
2864 subst_constants (loc, insn, map)
2865 rtx *loc;
2866 rtx insn;
2867 struct inline_remap *map;
2869 rtx x = *loc;
2870 register int i;
2871 register enum rtx_code code;
2872 register char *format_ptr;
2873 int num_changes = num_validated_changes ();
2874 rtx new = 0;
2875 enum machine_mode op0_mode = MAX_MACHINE_MODE;
2877 code = GET_CODE (x);
2879 switch (code)
2881 case PC:
2882 case CONST_INT:
2883 case CONST_DOUBLE:
2884 case SYMBOL_REF:
2885 case CONST:
2886 case LABEL_REF:
2887 case ADDRESS:
2888 return;
2890 #ifdef HAVE_cc0
2891 case CC0:
2892 validate_change (insn, loc, map->last_cc0_value, 1);
2893 return;
2894 #endif
2896 case USE:
2897 case CLOBBER:
2898 /* The only thing we can do with a USE or CLOBBER is possibly do
2899 some substitutions in a MEM within it. */
2900 if (GET_CODE (XEXP (x, 0)) == MEM)
2901 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map);
2902 return;
2904 case REG:
2905 /* Substitute for parms and known constants. Don't replace
2906 hard regs used as user variables with constants. */
2908 int regno = REGNO (x);
2909 struct const_equiv_data *p;
2911 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
2912 && regno < VARRAY_SIZE (map->const_equiv_varray)
2913 && (p = &VARRAY_CONST_EQUIV (map->const_equiv_varray, regno),
2914 p->rtx != 0)
2915 && p->age >= map->const_age)
2916 validate_change (insn, loc, p->rtx, 1);
2917 return;
2920 case SUBREG:
2921 /* SUBREG applied to something other than a reg
2922 should be treated as ordinary, since that must
2923 be a special hack and we don't know how to treat it specially.
2924 Consider for example mulsidi3 in m68k.md.
2925 Ordinary SUBREG of a REG needs this special treatment. */
2926 if (GET_CODE (SUBREG_REG (x)) == REG)
2928 rtx inner = SUBREG_REG (x);
2929 rtx new = 0;
2931 /* We can't call subst_constants on &SUBREG_REG (x) because any
2932 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2933 see what is inside, try to form the new SUBREG and see if that is
2934 valid. We handle two cases: extracting a full word in an
2935 integral mode and extracting the low part. */
2936 subst_constants (&inner, NULL_RTX, map);
2938 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
2939 && GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD
2940 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
2941 new = operand_subword (inner, SUBREG_WORD (x), 0,
2942 GET_MODE (SUBREG_REG (x)));
2944 cancel_changes (num_changes);
2945 if (new == 0 && subreg_lowpart_p (x))
2946 new = gen_lowpart_common (GET_MODE (x), inner);
2948 if (new)
2949 validate_change (insn, loc, new, 1);
2951 return;
2953 break;
2955 case MEM:
2956 subst_constants (&XEXP (x, 0), insn, map);
2958 /* If a memory address got spoiled, change it back. */
2959 if (insn != 0 && num_validated_changes () != num_changes
2960 && !memory_address_p (GET_MODE (x), XEXP (x, 0)))
2961 cancel_changes (num_changes);
2962 return;
2964 case SET:
2966 /* Substitute constants in our source, and in any arguments to a
2967 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2968 itself. */
2969 rtx *dest_loc = &SET_DEST (x);
2970 rtx dest = *dest_loc;
2971 rtx src, tem;
2973 subst_constants (&SET_SRC (x), insn, map);
2974 src = SET_SRC (x);
2976 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
2977 || GET_CODE (*dest_loc) == SUBREG
2978 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2980 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2982 subst_constants (&XEXP (*dest_loc, 1), insn, map);
2983 subst_constants (&XEXP (*dest_loc, 2), insn, map);
2985 dest_loc = &XEXP (*dest_loc, 0);
2988 /* Do substitute in the address of a destination in memory. */
2989 if (GET_CODE (*dest_loc) == MEM)
2990 subst_constants (&XEXP (*dest_loc, 0), insn, map);
2992 /* Check for the case of DEST a SUBREG, both it and the underlying
2993 register are less than one word, and the SUBREG has the wider mode.
2994 In the case, we are really setting the underlying register to the
2995 source converted to the mode of DEST. So indicate that. */
2996 if (GET_CODE (dest) == SUBREG
2997 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2998 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2999 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
3000 <= GET_MODE_SIZE (GET_MODE (dest)))
3001 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
3002 src)))
3003 src = tem, dest = SUBREG_REG (dest);
3005 /* If storing a recognizable value save it for later recording. */
3006 if ((map->num_sets < MAX_RECOG_OPERANDS)
3007 && (CONSTANT_P (src)
3008 || (GET_CODE (src) == REG
3009 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
3010 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
3011 || (GET_CODE (src) == PLUS
3012 && GET_CODE (XEXP (src, 0)) == REG
3013 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
3014 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
3015 && CONSTANT_P (XEXP (src, 1)))
3016 || GET_CODE (src) == COMPARE
3017 #ifdef HAVE_cc0
3018 || dest == cc0_rtx
3019 #endif
3020 || (dest == pc_rtx
3021 && (src == pc_rtx || GET_CODE (src) == RETURN
3022 || GET_CODE (src) == LABEL_REF))))
3024 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
3025 it will cause us to save the COMPARE with any constants
3026 substituted, which is what we want for later. */
3027 map->equiv_sets[map->num_sets].equiv = copy_rtx (src);
3028 map->equiv_sets[map->num_sets++].dest = dest;
3031 return;
3033 default:
3034 break;
3037 format_ptr = GET_RTX_FORMAT (code);
3039 /* If the first operand is an expression, save its mode for later. */
3040 if (*format_ptr == 'e')
3041 op0_mode = GET_MODE (XEXP (x, 0));
3043 for (i = 0; i < GET_RTX_LENGTH (code); i++)
3045 switch (*format_ptr++)
3047 case '0':
3048 break;
3050 case 'e':
3051 if (XEXP (x, i))
3052 subst_constants (&XEXP (x, i), insn, map);
3053 break;
3055 case 'u':
3056 case 'i':
3057 case 's':
3058 case 'w':
3059 break;
3061 case 'E':
3062 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
3064 int j;
3065 for (j = 0; j < XVECLEN (x, i); j++)
3066 subst_constants (&XVECEXP (x, i, j), insn, map);
3068 break;
3070 default:
3071 abort ();
3075 /* If this is a commutative operation, move a constant to the second
3076 operand unless the second operand is already a CONST_INT. */
3077 if ((GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
3078 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
3080 rtx tem = XEXP (x, 0);
3081 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
3082 validate_change (insn, &XEXP (x, 1), tem, 1);
3085 /* Simplify the expression in case we put in some constants. */
3086 switch (GET_RTX_CLASS (code))
3088 case '1':
3089 if (op0_mode == MAX_MACHINE_MODE)
3090 abort ();
3091 new = simplify_unary_operation (code, GET_MODE (x),
3092 XEXP (x, 0), op0_mode);
3093 break;
3095 case '<':
3097 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
3098 if (op_mode == VOIDmode)
3099 op_mode = GET_MODE (XEXP (x, 1));
3100 new = simplify_relational_operation (code, op_mode,
3101 XEXP (x, 0), XEXP (x, 1));
3102 #ifdef FLOAT_STORE_FLAG_VALUE
3103 if (new != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3104 new = ((new == const0_rtx) ? CONST0_RTX (GET_MODE (x))
3105 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
3106 GET_MODE (x)));
3107 #endif
3108 break;
3111 case '2':
3112 case 'c':
3113 new = simplify_binary_operation (code, GET_MODE (x),
3114 XEXP (x, 0), XEXP (x, 1));
3115 break;
3117 case 'b':
3118 case '3':
3119 if (op0_mode == MAX_MACHINE_MODE)
3120 abort ();
3121 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
3122 XEXP (x, 0), XEXP (x, 1), XEXP (x, 2));
3123 break;
3126 if (new)
3127 validate_change (insn, loc, new, 1);
3130 /* Show that register modified no longer contain known constants. We are
3131 called from note_stores with parts of the new insn. */
3133 void
3134 mark_stores (dest, x)
3135 rtx dest;
3136 rtx x ATTRIBUTE_UNUSED;
3138 int regno = -1;
3139 enum machine_mode mode;
3141 /* DEST is always the innermost thing set, except in the case of
3142 SUBREGs of hard registers. */
3144 if (GET_CODE (dest) == REG)
3145 regno = REGNO (dest), mode = GET_MODE (dest);
3146 else if (GET_CODE (dest) == SUBREG && GET_CODE (SUBREG_REG (dest)) == REG)
3148 regno = REGNO (SUBREG_REG (dest)) + SUBREG_WORD (dest);
3149 mode = GET_MODE (SUBREG_REG (dest));
3152 if (regno >= 0)
3154 int last_reg = (regno >= FIRST_PSEUDO_REGISTER ? regno
3155 : regno + HARD_REGNO_NREGS (regno, mode) - 1);
3156 int i;
3158 /* Ignore virtual stack var or virtual arg register since those
3159 are handled separately. */
3160 if (regno != VIRTUAL_INCOMING_ARGS_REGNUM
3161 && regno != VIRTUAL_STACK_VARS_REGNUM)
3162 for (i = regno; i <= last_reg; i++)
3163 if (i < VARRAY_SIZE (global_const_equiv_varray))
3164 VARRAY_CONST_EQUIV (global_const_equiv_varray, i).rtx = 0;
3168 /* If any CONST expressions with RTX_INTEGRATED_P are present in the rtx
3169 pointed to by PX, they represent constants in the constant pool.
3170 Replace these with a new memory reference obtained from force_const_mem.
3171 Similarly, ADDRESS expressions with RTX_INTEGRATED_P represent the
3172 address of a constant pool entry. Replace them with the address of
3173 a new constant pool entry obtained from force_const_mem. */
3175 static void
3176 restore_constants (px)
3177 rtx *px;
3179 rtx x = *px;
3180 int i, j;
3181 char *fmt;
3183 if (x == 0)
3184 return;
3186 if (GET_CODE (x) == CONST_DOUBLE)
3188 /* We have to make a new CONST_DOUBLE to ensure that we account for
3189 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
3190 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3192 REAL_VALUE_TYPE d;
3194 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
3195 *px = CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x));
3197 else
3198 *px = immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
3199 VOIDmode);
3202 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == CONST)
3204 restore_constants (&XEXP (x, 0));
3205 *px = validize_mem (force_const_mem (GET_MODE (x), XEXP (x, 0)));
3207 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == SUBREG)
3209 /* This must be (subreg/i:M1 (const/i:M2 ...) 0). */
3210 rtx new = XEXP (SUBREG_REG (x), 0);
3212 restore_constants (&new);
3213 new = force_const_mem (GET_MODE (SUBREG_REG (x)), new);
3214 PUT_MODE (new, GET_MODE (x));
3215 *px = validize_mem (new);
3217 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == ADDRESS)
3219 rtx new = XEXP (force_const_mem (GET_MODE (XEXP (x, 0)),
3220 XEXP (XEXP (x, 0), 0)),
3223 #ifdef POINTERS_EXTEND_UNSIGNED
3224 if (GET_MODE (new) != GET_MODE (x))
3225 new = convert_memory_address (GET_MODE (x), new);
3226 #endif
3228 *px = new;
3230 else
3232 fmt = GET_RTX_FORMAT (GET_CODE (x));
3233 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
3235 switch (*fmt++)
3237 case 'E':
3238 for (j = 0; j < XVECLEN (x, i); j++)
3239 restore_constants (&XVECEXP (x, i, j));
3240 break;
3242 case 'e':
3243 restore_constants (&XEXP (x, i));
3244 break;
3250 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
3251 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
3252 that it points to the node itself, thus indicating that the node is its
3253 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
3254 the given node is NULL, recursively descend the decl/block tree which
3255 it is the root of, and for each other ..._DECL or BLOCK node contained
3256 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
3257 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
3258 values to point to themselves. */
3260 static void
3261 set_block_origin_self (stmt)
3262 register tree stmt;
3264 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
3266 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
3269 register tree local_decl;
3271 for (local_decl = BLOCK_VARS (stmt);
3272 local_decl != NULL_TREE;
3273 local_decl = TREE_CHAIN (local_decl))
3274 set_decl_origin_self (local_decl); /* Potential recursion. */
3278 register tree subblock;
3280 for (subblock = BLOCK_SUBBLOCKS (stmt);
3281 subblock != NULL_TREE;
3282 subblock = BLOCK_CHAIN (subblock))
3283 set_block_origin_self (subblock); /* Recurse. */
3288 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
3289 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
3290 node to so that it points to the node itself, thus indicating that the
3291 node represents its own (abstract) origin. Additionally, if the
3292 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
3293 the decl/block tree of which the given node is the root of, and for
3294 each other ..._DECL or BLOCK node contained therein whose
3295 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
3296 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
3297 point to themselves. */
3299 static void
3300 set_decl_origin_self (decl)
3301 register tree decl;
3303 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
3305 DECL_ABSTRACT_ORIGIN (decl) = decl;
3306 if (TREE_CODE (decl) == FUNCTION_DECL)
3308 register tree arg;
3310 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3311 DECL_ABSTRACT_ORIGIN (arg) = arg;
3312 if (DECL_INITIAL (decl) != NULL_TREE
3313 && DECL_INITIAL (decl) != error_mark_node)
3314 set_block_origin_self (DECL_INITIAL (decl));
3319 /* Given a pointer to some BLOCK node, and a boolean value to set the
3320 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
3321 the given block, and for all local decls and all local sub-blocks
3322 (recursively) which are contained therein. */
3324 static void
3325 set_block_abstract_flags (stmt, setting)
3326 register tree stmt;
3327 register int setting;
3329 register tree local_decl;
3330 register tree subblock;
3332 BLOCK_ABSTRACT (stmt) = setting;
3334 for (local_decl = BLOCK_VARS (stmt);
3335 local_decl != NULL_TREE;
3336 local_decl = TREE_CHAIN (local_decl))
3337 set_decl_abstract_flags (local_decl, setting);
3339 for (subblock = BLOCK_SUBBLOCKS (stmt);
3340 subblock != NULL_TREE;
3341 subblock = BLOCK_CHAIN (subblock))
3342 set_block_abstract_flags (subblock, setting);
3345 /* Given a pointer to some ..._DECL node, and a boolean value to set the
3346 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
3347 given decl, and (in the case where the decl is a FUNCTION_DECL) also
3348 set the abstract flags for all of the parameters, local vars, local
3349 blocks and sub-blocks (recursively) to the same setting. */
3351 void
3352 set_decl_abstract_flags (decl, setting)
3353 register tree decl;
3354 register int setting;
3356 DECL_ABSTRACT (decl) = setting;
3357 if (TREE_CODE (decl) == FUNCTION_DECL)
3359 register tree arg;
3361 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3362 DECL_ABSTRACT (arg) = setting;
3363 if (DECL_INITIAL (decl) != NULL_TREE
3364 && DECL_INITIAL (decl) != error_mark_node)
3365 set_block_abstract_flags (DECL_INITIAL (decl), setting);
3369 /* Output the assembly language code for the function FNDECL
3370 from its DECL_SAVED_INSNS. Used for inline functions that are output
3371 at end of compilation instead of where they came in the source. */
3373 void
3374 output_inline_function (fndecl)
3375 tree fndecl;
3377 rtx head;
3378 rtx last;
3380 /* Things we allocate from here on are part of this function, not
3381 permanent. */
3382 temporary_allocation ();
3384 head = DECL_SAVED_INSNS (fndecl);
3385 current_function_decl = fndecl;
3387 /* This call is only used to initialize global variables. */
3388 init_function_start (fndecl, "lossage", 1);
3390 /* Redo parameter determinations in case the FUNCTION_...
3391 macros took machine-specific actions that need to be redone. */
3392 assign_parms (fndecl, 1);
3394 /* Set stack frame size. */
3395 assign_stack_local (BLKmode, DECL_FRAME_SIZE (fndecl), 0);
3397 /* The first is a bit of a lie (the array may be larger), but doesn't
3398 matter too much and it isn't worth saving the actual bound. */
3399 reg_rtx_no = regno_pointer_flag_length = MAX_REGNUM (head);
3400 regno_reg_rtx = (rtx *) INLINE_REGNO_REG_RTX (head);
3401 regno_pointer_flag = INLINE_REGNO_POINTER_FLAG (head);
3402 regno_pointer_align = INLINE_REGNO_POINTER_ALIGN (head);
3403 max_parm_reg = MAX_PARMREG (head);
3404 parm_reg_stack_loc = (rtx *) PARMREG_STACK_LOC (head);
3406 stack_slot_list = STACK_SLOT_LIST (head);
3407 forced_labels = FORCED_LABELS (head);
3409 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_HAS_COMPUTED_JUMP)
3410 current_function_has_computed_jump = 1;
3412 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_ALLOCA)
3413 current_function_calls_alloca = 1;
3415 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_SETJMP)
3416 current_function_calls_setjmp = 1;
3418 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_LONGJMP)
3419 current_function_calls_longjmp = 1;
3421 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_STRUCT)
3422 current_function_returns_struct = 1;
3424 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_PCC_STRUCT)
3425 current_function_returns_pcc_struct = 1;
3427 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_NEEDS_CONTEXT)
3428 current_function_needs_context = 1;
3430 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_HAS_NONLOCAL_LABEL)
3431 current_function_has_nonlocal_label = 1;
3433 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_POINTER)
3434 current_function_returns_pointer = 1;
3436 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_CONST_POOL)
3437 current_function_uses_const_pool = 1;
3439 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
3440 current_function_uses_pic_offset_table = 1;
3442 current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (head);
3443 current_function_pops_args = POPS_ARGS (head);
3445 /* This is the only thing the expand_function_end call that uses to be here
3446 actually does and that call can cause problems. */
3447 immediate_size_expand--;
3449 /* Find last insn and rebuild the constant pool. */
3450 for (last = FIRST_PARM_INSN (head);
3451 NEXT_INSN (last); last = NEXT_INSN (last))
3453 if (GET_RTX_CLASS (GET_CODE (last)) == 'i')
3455 restore_constants (&PATTERN (last));
3456 restore_constants (&REG_NOTES (last));
3460 set_new_first_and_last_insn (FIRST_PARM_INSN (head), last);
3461 set_new_first_and_last_label_num (FIRST_LABELNO (head), LAST_LABELNO (head));
3463 /* We must have already output DWARF debugging information for the
3464 original (abstract) inline function declaration/definition, so
3465 we want to make sure that the debugging information we generate
3466 for this special instance of the inline function refers back to
3467 the information we already generated. To make sure that happens,
3468 we simply have to set the DECL_ABSTRACT_ORIGIN for the function
3469 node (and for all of the local ..._DECL nodes which are its children)
3470 so that they all point to themselves. */
3472 set_decl_origin_self (fndecl);
3474 /* We're not deferring this any longer. */
3475 DECL_DEFER_OUTPUT (fndecl) = 0;
3477 /* We can't inline this anymore. */
3478 DECL_INLINE (fndecl) = 0;
3480 /* Compile this function all the way down to assembly code. */
3481 rest_of_compilation (fndecl);
3483 current_function_decl = 0;