Do not do src->dest copy if register would not be allocated a normal register
[official-gcc.git] / gcc / integrate.c
blob518f1c0c12f98b802a550c03eb8940a480494750
1 /* Procedure integration for GNU CC.
2 Copyright (C) 1988, 91, 93-97, 1998 Free Software Foundation, Inc.
3 Contributed by Michael Tiemann (tiemann@cygnus.com)
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "regs.h"
29 #include "flags.h"
30 #include "insn-config.h"
31 #include "insn-flags.h"
32 #include "expr.h"
33 #include "output.h"
34 #include "recog.h"
35 #include "integrate.h"
36 #include "real.h"
37 #include "except.h"
38 #include "function.h"
39 #include "toplev.h"
41 #include "obstack.h"
42 #define obstack_chunk_alloc xmalloc
43 #define obstack_chunk_free free
45 extern struct obstack *function_maybepermanent_obstack;
47 extern tree pushdecl ();
48 extern tree poplevel ();
50 /* Similar, but round to the next highest integer that meets the
51 alignment. */
52 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
54 /* Default max number of insns a function can have and still be inline.
55 This is overridden on RISC machines. */
56 #ifndef INTEGRATE_THRESHOLD
57 /* Inlining small functions might save more space then not inlining at
58 all. Assume 1 instruction for the call and 1.5 insns per argument. */
59 #define INTEGRATE_THRESHOLD(DECL) \
60 (optimize_size \
61 ? (1 + (3 * list_length (DECL_ARGUMENTS (DECL)) / 2)) \
62 : (8 * (8 + list_length (DECL_ARGUMENTS (DECL)))))
63 #endif
65 static rtx initialize_for_inline PROTO((tree, int, int, int, int));
66 static void finish_inline PROTO((tree, rtx));
67 static void adjust_copied_decl_tree PROTO((tree));
68 static tree copy_decl_list PROTO((tree));
69 static tree copy_decl_tree PROTO((tree));
70 static void copy_decl_rtls PROTO((tree));
71 static void save_constants PROTO((rtx *));
72 static void note_modified_parmregs PROTO((rtx, rtx));
73 static rtx copy_for_inline PROTO((rtx));
74 static void integrate_parm_decls PROTO((tree, struct inline_remap *, rtvec));
75 static void integrate_decl_tree PROTO((tree, int, struct inline_remap *));
76 static void save_constants_in_decl_trees PROTO ((tree));
77 static void subst_constants PROTO((rtx *, rtx, struct inline_remap *));
78 static void restore_constants PROTO((rtx *));
79 static void set_block_origin_self PROTO((tree));
80 static void set_decl_origin_self PROTO((tree));
81 static void set_block_abstract_flags PROTO((tree, int));
83 void set_decl_abstract_flags PROTO((tree, int));
85 /* Returns the Ith entry in the label_map contained in MAP. If the
86 Ith entry has not yet been set, return a fresh label. This function
87 performs a lazy initialization of label_map, thereby avoiding huge memory
88 explosions when the label_map gets very large. */
90 rtx
91 get_label_from_map (map, i)
92 struct inline_remap *map;
93 int i;
95 rtx x = map->label_map[i];
97 if (x == NULL_RTX)
99 push_obstacks_nochange ();
100 end_temporary_allocation ();
101 x = map->label_map[i] = gen_label_rtx();
102 pop_obstacks ();
105 return x;
108 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
109 is safe and reasonable to integrate into other functions.
110 Nonzero means value is a warning message with a single %s
111 for the function's name. */
113 char *
114 function_cannot_inline_p (fndecl)
115 register tree fndecl;
117 register rtx insn;
118 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
119 int max_insns = INTEGRATE_THRESHOLD (fndecl);
120 register int ninsns = 0;
121 register tree parms;
122 rtx result;
124 /* No inlines with varargs. */
125 if ((last && TREE_VALUE (last) != void_type_node)
126 || current_function_varargs)
127 return "varargs function cannot be inline";
129 if (current_function_calls_alloca)
130 return "function using alloca cannot be inline";
132 if (current_function_contains_functions)
133 return "function with nested functions cannot be inline";
135 if (current_function_cannot_inline)
136 return current_function_cannot_inline;
138 /* If its not even close, don't even look. */
139 if (!DECL_INLINE (fndecl) && get_max_uid () > 3 * max_insns)
140 return "function too large to be inline";
142 #if 0
143 /* Don't inline functions which do not specify a function prototype and
144 have BLKmode argument or take the address of a parameter. */
145 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
147 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
148 TREE_ADDRESSABLE (parms) = 1;
149 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
150 return "no prototype, and parameter address used; cannot be inline";
152 #endif
154 /* We can't inline functions that return structures
155 the old-fashioned PCC way, copying into a static block. */
156 if (current_function_returns_pcc_struct)
157 return "inline functions not supported for this return value type";
159 /* We can't inline functions that return BLKmode structures in registers. */
160 if (TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
161 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl))))
162 return "inline functions not supported for this return value type";
164 /* We can't inline functions that return structures of varying size. */
165 if (int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
166 return "function with varying-size return value cannot be inline";
168 /* Cannot inline a function with a varying size argument or one that
169 receives a transparent union. */
170 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
172 if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
173 return "function with varying-size parameter cannot be inline";
174 else if (TYPE_TRANSPARENT_UNION (TREE_TYPE (parms)))
175 return "function with transparent unit parameter cannot be inline";
178 if (!DECL_INLINE (fndecl) && get_max_uid () > max_insns)
180 for (ninsns = 0, insn = get_first_nonparm_insn ();
181 insn && ninsns < max_insns;
182 insn = NEXT_INSN (insn))
183 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
184 ninsns++;
186 if (ninsns >= max_insns)
187 return "function too large to be inline";
190 /* We cannot inline this function if forced_labels is non-zero. This
191 implies that a label in this function was used as an initializer.
192 Because labels can not be duplicated, all labels in the function
193 will be renamed when it is inlined. However, there is no way to find
194 and fix all variables initialized with addresses of labels in this
195 function, hence inlining is impossible. */
197 if (forced_labels)
198 return "function with label addresses used in initializers cannot inline";
200 /* We cannot inline a nested function that jumps to a nonlocal label. */
201 if (current_function_has_nonlocal_goto)
202 return "function with nonlocal goto cannot be inline";
204 /* This is a hack, until the inliner is taught about eh regions at
205 the start of the function. */
206 for (insn = get_insns ();
207 insn
208 && ! (GET_CODE (insn) == NOTE
209 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG);
210 insn = NEXT_INSN (insn))
212 if (insn && GET_CODE (insn) == NOTE
213 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
214 return "function with complex parameters cannot be inline";
217 /* We can't inline functions that return a PARALLEL rtx. */
218 result = DECL_RTL (DECL_RESULT (fndecl));
219 if (result && GET_CODE (result) == PARALLEL)
220 return "inline functions not supported for this return value type";
222 return 0;
225 /* Variables used within save_for_inline. */
227 /* Mapping from old pseudo-register to new pseudo-registers.
228 The first element of this map is reg_map[FIRST_PSEUDO_REGISTER].
229 It is allocated in `save_for_inline' and `expand_inline_function',
230 and deallocated on exit from each of those routines. */
231 static rtx *reg_map;
233 /* Mapping from old code-labels to new code-labels.
234 The first element of this map is label_map[min_labelno].
235 It is allocated in `save_for_inline' and `expand_inline_function',
236 and deallocated on exit from each of those routines. */
237 static rtx *label_map;
239 /* Mapping from old insn uid's to copied insns.
240 It is allocated in `save_for_inline' and `expand_inline_function',
241 and deallocated on exit from each of those routines. */
242 static rtx *insn_map;
244 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
245 Zero for a reg that isn't a parm's home.
246 Only reg numbers less than max_parm_reg are mapped here. */
247 static tree *parmdecl_map;
249 /* Keep track of first pseudo-register beyond those that are parms. */
250 extern int max_parm_reg;
251 extern rtx *parm_reg_stack_loc;
253 /* When an insn is being copied by copy_for_inline,
254 this is nonzero if we have copied an ASM_OPERANDS.
255 In that case, it is the original input-operand vector. */
256 static rtvec orig_asm_operands_vector;
258 /* When an insn is being copied by copy_for_inline,
259 this is nonzero if we have copied an ASM_OPERANDS.
260 In that case, it is the copied input-operand vector. */
261 static rtvec copy_asm_operands_vector;
263 /* Likewise, this is the copied constraints vector. */
264 static rtvec copy_asm_constraints_vector;
266 /* In save_for_inline, nonzero if past the parm-initialization insns. */
267 static int in_nonparm_insns;
269 /* Subroutine for `save_for_inline{copying,nocopy}'. Performs initialization
270 needed to save FNDECL's insns and info for future inline expansion. */
272 static rtx
273 initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, copy)
274 tree fndecl;
275 int min_labelno;
276 int max_labelno;
277 int max_reg;
278 int copy;
280 int function_flags, i;
281 rtvec arg_vector;
282 tree parms;
284 /* Compute the values of any flags we must restore when inlining this. */
286 function_flags
287 = (current_function_calls_alloca * FUNCTION_FLAGS_CALLS_ALLOCA
288 + current_function_calls_setjmp * FUNCTION_FLAGS_CALLS_SETJMP
289 + current_function_calls_longjmp * FUNCTION_FLAGS_CALLS_LONGJMP
290 + current_function_returns_struct * FUNCTION_FLAGS_RETURNS_STRUCT
291 + current_function_returns_pcc_struct * FUNCTION_FLAGS_RETURNS_PCC_STRUCT
292 + current_function_needs_context * FUNCTION_FLAGS_NEEDS_CONTEXT
293 + current_function_has_nonlocal_label * FUNCTION_FLAGS_HAS_NONLOCAL_LABEL
294 + current_function_returns_pointer * FUNCTION_FLAGS_RETURNS_POINTER
295 + current_function_uses_const_pool * FUNCTION_FLAGS_USES_CONST_POOL
296 + current_function_uses_pic_offset_table * FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE);
298 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
299 bzero ((char *) parmdecl_map, max_parm_reg * sizeof (tree));
300 arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
302 for (parms = DECL_ARGUMENTS (fndecl), i = 0;
303 parms;
304 parms = TREE_CHAIN (parms), i++)
306 rtx p = DECL_RTL (parms);
307 int copied_incoming = 0;
309 /* If we have (mem (addressof (mem ...))), use the inner MEM since
310 otherwise the copy_rtx call below will not unshare the MEM since
311 it shares ADDRESSOF. */
312 if (GET_CODE (p) == MEM && GET_CODE (XEXP (p, 0)) == ADDRESSOF
313 && GET_CODE (XEXP (XEXP (p, 0), 0)) == MEM)
314 p = XEXP (XEXP (p, 0), 0);
316 if (GET_CODE (p) == MEM && copy)
318 /* Copy the rtl so that modifications of the addresses
319 later in compilation won't affect this arg_vector.
320 Virtual register instantiation can screw the address
321 of the rtl. */
322 rtx new = copy_rtx (p);
324 /* Don't leave the old copy anywhere in this decl. */
325 if (DECL_RTL (parms) == DECL_INCOMING_RTL (parms)
326 || (GET_CODE (DECL_RTL (parms)) == MEM
327 && GET_CODE (DECL_INCOMING_RTL (parms)) == MEM
328 && (XEXP (DECL_RTL (parms), 0)
329 == XEXP (DECL_INCOMING_RTL (parms), 0))))
330 DECL_INCOMING_RTL (parms) = new, copied_incoming = 1;
332 DECL_RTL (parms) = new;
335 RTVEC_ELT (arg_vector, i) = p;
337 if (GET_CODE (p) == REG)
338 parmdecl_map[REGNO (p)] = parms;
339 else if (GET_CODE (p) == CONCAT)
341 rtx preal = gen_realpart (GET_MODE (XEXP (p, 0)), p);
342 rtx pimag = gen_imagpart (GET_MODE (preal), p);
344 if (GET_CODE (preal) == REG)
345 parmdecl_map[REGNO (preal)] = parms;
346 if (GET_CODE (pimag) == REG)
347 parmdecl_map[REGNO (pimag)] = parms;
350 /* This flag is cleared later
351 if the function ever modifies the value of the parm. */
352 TREE_READONLY (parms) = 1;
354 /* Copy DECL_INCOMING_RTL if not done already. This can
355 happen if DECL_RTL is a reg. */
356 if (copy && ! copied_incoming)
358 p = DECL_INCOMING_RTL (parms);
360 /* If we have (mem (addressof (mem ...))), use the inner MEM since
361 otherwise the copy_rtx call below will not unshare the MEM since
362 it shares ADDRESSOF. */
363 if (GET_CODE (p) == MEM && GET_CODE (XEXP (p, 0)) == ADDRESSOF
364 && GET_CODE (XEXP (XEXP (p, 0), 0)) == MEM)
365 p = XEXP (XEXP (p, 0), 0);
367 if (GET_CODE (p) == MEM)
368 DECL_INCOMING_RTL (parms) = copy_rtx (p);
372 /* Assume we start out in the insns that set up the parameters. */
373 in_nonparm_insns = 0;
375 /* The list of DECL_SAVED_INSNS, starts off with a header which
376 contains the following information:
378 the first insn of the function (not including the insns that copy
379 parameters into registers).
380 the first parameter insn of the function,
381 the first label used by that function,
382 the last label used by that function,
383 the highest register number used for parameters,
384 the total number of registers used,
385 the size of the incoming stack area for parameters,
386 the number of bytes popped on return,
387 the stack slot list,
388 the labels that are forced to exist,
389 some flags that are used to restore compiler globals,
390 the value of current_function_outgoing_args_size,
391 the original argument vector,
392 the original DECL_INITIAL,
393 and pointers to the table of pseudo regs, pointer flags, and alignment. */
395 return gen_inline_header_rtx (NULL_RTX, NULL_RTX, min_labelno, max_labelno,
396 max_parm_reg, max_reg,
397 current_function_args_size,
398 current_function_pops_args,
399 stack_slot_list, forced_labels, function_flags,
400 current_function_outgoing_args_size,
401 arg_vector, (rtx) DECL_INITIAL (fndecl),
402 (rtvec) regno_reg_rtx, regno_pointer_flag,
403 regno_pointer_align,
404 (rtvec) parm_reg_stack_loc);
407 /* Subroutine for `save_for_inline{copying,nocopy}'. Finishes up the
408 things that must be done to make FNDECL expandable as an inline function.
409 HEAD contains the chain of insns to which FNDECL will expand. */
411 static void
412 finish_inline (fndecl, head)
413 tree fndecl;
414 rtx head;
416 FIRST_FUNCTION_INSN (head) = get_first_nonparm_insn ();
417 FIRST_PARM_INSN (head) = get_insns ();
418 DECL_SAVED_INSNS (fndecl) = head;
419 DECL_FRAME_SIZE (fndecl) = get_frame_size ();
422 /* Adjust the BLOCK_END_NOTE pointers in a given copied DECL tree so that
423 they all point to the new (copied) rtxs. */
425 static void
426 adjust_copied_decl_tree (block)
427 register tree block;
429 register tree subblock;
430 register rtx original_end;
432 original_end = BLOCK_END_NOTE (block);
433 if (original_end)
435 BLOCK_END_NOTE (block) = (rtx) NOTE_SOURCE_FILE (original_end);
436 NOTE_SOURCE_FILE (original_end) = 0;
439 /* Process all subblocks. */
440 for (subblock = BLOCK_SUBBLOCKS (block);
441 subblock;
442 subblock = TREE_CHAIN (subblock))
443 adjust_copied_decl_tree (subblock);
446 /* Make the insns and PARM_DECLs of the current function permanent
447 and record other information in DECL_SAVED_INSNS to allow inlining
448 of this function in subsequent calls.
450 This function is called when we are going to immediately compile
451 the insns for FNDECL. The insns in maybepermanent_obstack cannot be
452 modified by the compilation process, so we copy all of them to
453 new storage and consider the new insns to be the insn chain to be
454 compiled. Our caller (rest_of_compilation) saves the original
455 DECL_INITIAL and DECL_ARGUMENTS; here we copy them. */
457 /* ??? The nonlocal_label list should be adjusted also. However, since
458 a function that contains a nested function never gets inlined currently,
459 the nonlocal_label list will always be empty, so we don't worry about
460 it for now. */
462 void
463 save_for_inline_copying (fndecl)
464 tree fndecl;
466 rtx first_insn, last_insn, insn;
467 rtx head, copy;
468 int max_labelno, min_labelno, i, len;
469 int max_reg;
470 int max_uid;
471 rtx first_nonparm_insn;
472 char *new, *new1;
473 rtx *new_parm_reg_stack_loc;
474 rtx *new2;
476 /* Make and emit a return-label if we have not already done so.
477 Do this before recording the bounds on label numbers. */
479 if (return_label == 0)
481 return_label = gen_label_rtx ();
482 emit_label (return_label);
485 /* Get some bounds on the labels and registers used. */
487 max_labelno = max_label_num ();
488 min_labelno = get_first_label_num ();
489 max_reg = max_reg_num ();
491 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
492 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
493 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
494 for the parms, prior to elimination of virtual registers.
495 These values are needed for substituting parms properly. */
497 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
499 head = initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, 1);
501 if (current_function_uses_const_pool)
503 /* Replace any constant pool references with the actual constant. We
504 will put the constants back in the copy made below. */
505 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
506 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
508 save_constants (&PATTERN (insn));
509 if (REG_NOTES (insn))
510 save_constants (&REG_NOTES (insn));
513 /* Also scan all decls, and replace any constant pool references with the
514 actual constant. */
515 save_constants_in_decl_trees (DECL_INITIAL (fndecl));
517 /* Clear out the constant pool so that we can recreate it with the
518 copied constants below. */
519 init_const_rtx_hash_table ();
520 clear_const_double_mem ();
523 max_uid = INSN_UID (head);
525 /* We have now allocated all that needs to be allocated permanently
526 on the rtx obstack. Set our high-water mark, so that we
527 can free the rest of this when the time comes. */
529 preserve_data ();
531 /* Copy the chain insns of this function.
532 Install the copied chain as the insns of this function,
533 for continued compilation;
534 the original chain is recorded as the DECL_SAVED_INSNS
535 for inlining future calls. */
537 /* If there are insns that copy parms from the stack into pseudo registers,
538 those insns are not copied. `expand_inline_function' must
539 emit the correct code to handle such things. */
541 insn = get_insns ();
542 if (GET_CODE (insn) != NOTE)
543 abort ();
544 first_insn = rtx_alloc (NOTE);
545 NOTE_SOURCE_FILE (first_insn) = NOTE_SOURCE_FILE (insn);
546 NOTE_LINE_NUMBER (first_insn) = NOTE_LINE_NUMBER (insn);
547 INSN_UID (first_insn) = INSN_UID (insn);
548 PREV_INSN (first_insn) = NULL;
549 NEXT_INSN (first_insn) = NULL;
550 last_insn = first_insn;
552 /* Each pseudo-reg in the old insn chain must have a unique rtx in the copy.
553 Make these new rtx's now, and install them in regno_reg_rtx, so they
554 will be the official pseudo-reg rtx's for the rest of compilation. */
556 reg_map = (rtx *) savealloc (regno_pointer_flag_length * sizeof (rtx));
558 len = sizeof (struct rtx_def) + (GET_RTX_LENGTH (REG) - 1) * sizeof (rtunion);
559 for (i = max_reg - 1; i > LAST_VIRTUAL_REGISTER; i--)
560 reg_map[i] = (rtx)obstack_copy (function_maybepermanent_obstack,
561 regno_reg_rtx[i], len);
563 regno_reg_rtx = reg_map;
565 /* Put copies of all the virtual register rtx into the new regno_reg_rtx. */
566 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
567 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
568 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
569 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
571 /* Likewise each label rtx must have a unique rtx as its copy. */
573 /* We used to use alloca here, but the size of what it would try to
574 allocate would occasionally cause it to exceed the stack limit and
575 cause unpredictable core dumps. Some examples were > 2Mb in size. */
576 label_map = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
578 for (i = min_labelno; i < max_labelno; i++)
579 label_map[i] = gen_label_rtx ();
581 /* Likewise for parm_reg_stack_slot. */
582 new_parm_reg_stack_loc = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
583 for (i = 0; i < max_parm_reg; i++)
584 new_parm_reg_stack_loc[i] = copy_for_inline (parm_reg_stack_loc[i]);
586 parm_reg_stack_loc = new_parm_reg_stack_loc;
588 /* Record the mapping of old insns to copied insns. */
590 insn_map = (rtx *) alloca (max_uid * sizeof (rtx));
591 bzero ((char *) insn_map, max_uid * sizeof (rtx));
593 /* Get the insn which signals the end of parameter setup code. */
594 first_nonparm_insn = get_first_nonparm_insn ();
596 /* Copy any entries in regno_reg_rtx or DECL_RTLs that reference MEM
597 (the former occurs when a variable has its address taken)
598 since these may be shared and can be changed by virtual
599 register instantiation. DECL_RTL values for our arguments
600 have already been copied by initialize_for_inline. */
601 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_reg; i++)
602 if (GET_CODE (regno_reg_rtx[i]) == MEM)
603 XEXP (regno_reg_rtx[i], 0)
604 = copy_for_inline (XEXP (regno_reg_rtx[i], 0));
606 /* Copy the parm_reg_stack_loc array, and substitute for all of the rtx
607 contained in it. */
608 new2 = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
609 bcopy ((char *) parm_reg_stack_loc, (char *) new2,
610 max_parm_reg * sizeof (rtx));
611 parm_reg_stack_loc = new2;
612 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; ++i)
613 if (parm_reg_stack_loc[i])
614 parm_reg_stack_loc[i] = copy_for_inline (parm_reg_stack_loc[i]);
616 /* Copy the tree of subblocks of the function, and the decls in them.
617 We will use the copy for compiling this function, then restore the original
618 subblocks and decls for use when inlining this function.
620 Several parts of the compiler modify BLOCK trees. In particular,
621 instantiate_virtual_regs will instantiate any virtual regs
622 mentioned in the DECL_RTLs of the decls, and loop
623 unrolling will replicate any BLOCK trees inside an unrolled loop.
625 The modified subblocks or DECL_RTLs would be incorrect for the original rtl
626 which we will use for inlining. The rtl might even contain pseudoregs
627 whose space has been freed. */
629 DECL_INITIAL (fndecl) = copy_decl_tree (DECL_INITIAL (fndecl));
630 DECL_ARGUMENTS (fndecl) = copy_decl_list (DECL_ARGUMENTS (fndecl));
632 /* Now copy each DECL_RTL which is a MEM,
633 so it is safe to modify their addresses. */
634 copy_decl_rtls (DECL_INITIAL (fndecl));
636 /* The fndecl node acts as its own progenitor, so mark it as such. */
637 DECL_ABSTRACT_ORIGIN (fndecl) = fndecl;
639 /* Now copy the chain of insns. Do this twice. The first copy the insn
640 itself and its body. The second time copy of REG_NOTES. This is because
641 a REG_NOTE may have a forward pointer to another insn. */
643 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
645 orig_asm_operands_vector = 0;
647 if (insn == first_nonparm_insn)
648 in_nonparm_insns = 1;
650 switch (GET_CODE (insn))
652 case NOTE:
653 /* No need to keep these. */
654 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED)
655 continue;
657 copy = rtx_alloc (NOTE);
658 NOTE_LINE_NUMBER (copy) = NOTE_LINE_NUMBER (insn);
659 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END)
660 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
661 else
663 NOTE_SOURCE_FILE (insn) = (char *) copy;
664 NOTE_SOURCE_FILE (copy) = 0;
666 if (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG
667 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_END)
669 int new_region = CODE_LABEL_NUMBER
670 (label_map[NOTE_BLOCK_NUMBER (copy)]);
672 /* we have to duplicate the handlers for the original */
673 if (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG)
675 handler_info *ptr, *temp;
676 int nr;
677 nr = new_eh_region_entry (new_region);
678 ptr = get_first_handler (NOTE_BLOCK_NUMBER (copy));
679 for ( ; ptr; ptr = ptr->next)
681 temp = get_new_handler (
682 label_map[CODE_LABEL_NUMBER (ptr->handler_label)],
683 ptr->type_info);
684 add_new_handler (nr, temp);
688 /* We have to forward these both to match the new exception
689 region. */
690 NOTE_BLOCK_NUMBER (copy) = new_region;
693 RTX_INTEGRATED_P (copy) = RTX_INTEGRATED_P (insn);
694 break;
696 case INSN:
697 case JUMP_INSN:
698 case CALL_INSN:
699 copy = rtx_alloc (GET_CODE (insn));
701 if (GET_CODE (insn) == CALL_INSN)
702 CALL_INSN_FUNCTION_USAGE (copy)
703 = copy_for_inline (CALL_INSN_FUNCTION_USAGE (insn));
705 PATTERN (copy) = copy_for_inline (PATTERN (insn));
706 INSN_CODE (copy) = -1;
707 LOG_LINKS (copy) = NULL_RTX;
708 RTX_INTEGRATED_P (copy) = RTX_INTEGRATED_P (insn);
709 break;
711 case CODE_LABEL:
712 copy = label_map[CODE_LABEL_NUMBER (insn)];
713 LABEL_NAME (copy) = LABEL_NAME (insn);
714 break;
716 case BARRIER:
717 copy = rtx_alloc (BARRIER);
718 break;
720 default:
721 abort ();
723 INSN_UID (copy) = INSN_UID (insn);
724 insn_map[INSN_UID (insn)] = copy;
725 NEXT_INSN (last_insn) = copy;
726 PREV_INSN (copy) = last_insn;
727 last_insn = copy;
730 adjust_copied_decl_tree (DECL_INITIAL (fndecl));
732 /* Now copy the REG_NOTES. */
733 for (insn = NEXT_INSN (get_insns ()); insn; insn = NEXT_INSN (insn))
734 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
735 && insn_map[INSN_UID(insn)])
736 REG_NOTES (insn_map[INSN_UID (insn)])
737 = copy_for_inline (REG_NOTES (insn));
739 NEXT_INSN (last_insn) = NULL;
741 finish_inline (fndecl, head);
743 /* Make new versions of the register tables. */
744 new = (char *) savealloc (regno_pointer_flag_length);
745 bcopy (regno_pointer_flag, new, regno_pointer_flag_length);
746 new1 = (char *) savealloc (regno_pointer_flag_length);
747 bcopy (regno_pointer_align, new1, regno_pointer_flag_length);
749 regno_pointer_flag = new;
750 regno_pointer_align = new1;
752 set_new_first_and_last_insn (first_insn, last_insn);
754 if (label_map)
755 free (label_map);
758 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
759 For example, this can copy a list made of TREE_LIST nodes. While copying,
760 for each node copied which doesn't already have is DECL_ABSTRACT_ORIGIN
761 set to some non-zero value, set the DECL_ABSTRACT_ORIGIN of the copy to
762 point to the corresponding (abstract) original node. */
764 static tree
765 copy_decl_list (list)
766 tree list;
768 tree head;
769 register tree prev, next;
771 if (list == 0)
772 return 0;
774 head = prev = copy_node (list);
775 if (DECL_ABSTRACT_ORIGIN (head) == NULL_TREE)
776 DECL_ABSTRACT_ORIGIN (head) = list;
777 next = TREE_CHAIN (list);
778 while (next)
780 register tree copy;
782 copy = copy_node (next);
783 if (DECL_ABSTRACT_ORIGIN (copy) == NULL_TREE)
784 DECL_ABSTRACT_ORIGIN (copy) = next;
785 TREE_CHAIN (prev) = copy;
786 prev = copy;
787 next = TREE_CHAIN (next);
789 return head;
792 /* Make a copy of the entire tree of blocks BLOCK, and return it. */
794 static tree
795 copy_decl_tree (block)
796 tree block;
798 tree t, vars, subblocks;
800 vars = copy_decl_list (BLOCK_VARS (block));
801 subblocks = 0;
803 /* Process all subblocks. */
804 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
806 tree copy = copy_decl_tree (t);
807 TREE_CHAIN (copy) = subblocks;
808 subblocks = copy;
811 t = copy_node (block);
812 BLOCK_VARS (t) = vars;
813 BLOCK_SUBBLOCKS (t) = nreverse (subblocks);
814 /* If the BLOCK being cloned is already marked as having been instantiated
815 from something else, then leave that `origin' marking alone. Otherwise,
816 mark the clone as having originated from the BLOCK we are cloning. */
817 if (BLOCK_ABSTRACT_ORIGIN (t) == NULL_TREE)
818 BLOCK_ABSTRACT_ORIGIN (t) = block;
819 return t;
822 /* Copy DECL_RTLs in all decls in the given BLOCK node. */
824 static void
825 copy_decl_rtls (block)
826 tree block;
828 tree t;
830 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
831 if (DECL_RTL (t) && GET_CODE (DECL_RTL (t)) == MEM)
832 DECL_RTL (t) = copy_for_inline (DECL_RTL (t));
834 /* Process all subblocks. */
835 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
836 copy_decl_rtls (t);
839 /* Make the insns and PARM_DECLs of the current function permanent
840 and record other information in DECL_SAVED_INSNS to allow inlining
841 of this function in subsequent calls.
843 This routine need not copy any insns because we are not going
844 to immediately compile the insns in the insn chain. There
845 are two cases when we would compile the insns for FNDECL:
846 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
847 be output at the end of other compilation, because somebody took
848 its address. In the first case, the insns of FNDECL are copied
849 as it is expanded inline, so FNDECL's saved insns are not
850 modified. In the second case, FNDECL is used for the last time,
851 so modifying the rtl is not a problem.
853 We don't have to worry about FNDECL being inline expanded by
854 other functions which are written at the end of compilation
855 because flag_no_inline is turned on when we begin writing
856 functions at the end of compilation. */
858 void
859 save_for_inline_nocopy (fndecl)
860 tree fndecl;
862 rtx insn;
863 rtx head;
864 rtx first_nonparm_insn;
866 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
867 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
868 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
869 for the parms, prior to elimination of virtual registers.
870 These values are needed for substituting parms properly. */
872 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
874 /* Make and emit a return-label if we have not already done so. */
876 if (return_label == 0)
878 return_label = gen_label_rtx ();
879 emit_label (return_label);
882 head = initialize_for_inline (fndecl, get_first_label_num (),
883 max_label_num (), max_reg_num (), 0);
885 /* If there are insns that copy parms from the stack into pseudo registers,
886 those insns are not copied. `expand_inline_function' must
887 emit the correct code to handle such things. */
889 insn = get_insns ();
890 if (GET_CODE (insn) != NOTE)
891 abort ();
893 /* Get the insn which signals the end of parameter setup code. */
894 first_nonparm_insn = get_first_nonparm_insn ();
896 /* Now just scan the chain of insns to see what happens to our
897 PARM_DECLs. If a PARM_DECL is used but never modified, we
898 can substitute its rtl directly when expanding inline (and
899 perform constant folding when its incoming value is constant).
900 Otherwise, we have to copy its value into a new register and track
901 the new register's life. */
903 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
905 if (insn == first_nonparm_insn)
906 in_nonparm_insns = 1;
908 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
910 if (current_function_uses_const_pool)
912 /* Replace any constant pool references with the actual constant.
913 We will put the constant back if we need to write the
914 function out after all. */
915 save_constants (&PATTERN (insn));
916 if (REG_NOTES (insn))
917 save_constants (&REG_NOTES (insn));
920 /* Record what interesting things happen to our parameters. */
921 note_stores (PATTERN (insn), note_modified_parmregs);
925 /* Also scan all decls, and replace any constant pool references with the
926 actual constant. */
927 save_constants_in_decl_trees (DECL_INITIAL (fndecl));
929 /* We have now allocated all that needs to be allocated permanently
930 on the rtx obstack. Set our high-water mark, so that we
931 can free the rest of this when the time comes. */
933 preserve_data ();
935 finish_inline (fndecl, head);
938 /* Given PX, a pointer into an insn, search for references to the constant
939 pool. Replace each with a CONST that has the mode of the original
940 constant, contains the constant, and has RTX_INTEGRATED_P set.
941 Similarly, constant pool addresses not enclosed in a MEM are replaced
942 with an ADDRESS and CONST rtx which also gives the constant, its
943 mode, the mode of the address, and has RTX_INTEGRATED_P set. */
945 static void
946 save_constants (px)
947 rtx *px;
949 rtx x;
950 int i, j;
952 again:
953 x = *px;
955 /* If this is a CONST_DOUBLE, don't try to fix things up in
956 CONST_DOUBLE_MEM, because this is an infinite recursion. */
957 if (GET_CODE (x) == CONST_DOUBLE)
958 return;
959 else if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == SYMBOL_REF
960 && CONSTANT_POOL_ADDRESS_P (XEXP (x,0)))
962 enum machine_mode const_mode = get_pool_mode (XEXP (x, 0));
963 rtx new = gen_rtx_CONST (const_mode, get_pool_constant (XEXP (x, 0)));
964 RTX_INTEGRATED_P (new) = 1;
966 /* If the MEM was in a different mode than the constant (perhaps we
967 were only looking at the low-order part), surround it with a
968 SUBREG so we can save both modes. */
970 if (GET_MODE (x) != const_mode)
972 new = gen_rtx_SUBREG (GET_MODE (x), new, 0);
973 RTX_INTEGRATED_P (new) = 1;
976 *px = new;
977 save_constants (&XEXP (*px, 0));
979 else if (GET_CODE (x) == SYMBOL_REF
980 && CONSTANT_POOL_ADDRESS_P (x))
982 *px = gen_rtx_ADDRESS (GET_MODE (x),
983 gen_rtx_CONST (get_pool_mode (x),
984 get_pool_constant (x)));
985 save_constants (&XEXP (*px, 0));
986 RTX_INTEGRATED_P (*px) = 1;
989 else
991 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
992 int len = GET_RTX_LENGTH (GET_CODE (x));
994 for (i = len-1; i >= 0; i--)
996 switch (fmt[i])
998 case 'E':
999 for (j = 0; j < XVECLEN (x, i); j++)
1000 save_constants (&XVECEXP (x, i, j));
1001 break;
1003 case 'e':
1004 if (XEXP (x, i) == 0)
1005 continue;
1006 if (i == 0)
1008 /* Hack tail-recursion here. */
1009 px = &XEXP (x, 0);
1010 goto again;
1012 save_constants (&XEXP (x, i));
1013 break;
1019 /* Note whether a parameter is modified or not. */
1021 static void
1022 note_modified_parmregs (reg, x)
1023 rtx reg;
1024 rtx x ATTRIBUTE_UNUSED;
1026 if (GET_CODE (reg) == REG && in_nonparm_insns
1027 && REGNO (reg) < max_parm_reg
1028 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
1029 && parmdecl_map[REGNO (reg)] != 0)
1030 TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
1033 /* Copy the rtx ORIG recursively, replacing pseudo-regs and labels
1034 according to `reg_map' and `label_map'. The original rtl insns
1035 will be saved for inlining; this is used to make a copy
1036 which is used to finish compiling the inline function itself.
1038 If we find a "saved" constant pool entry, one which was replaced with
1039 the value of the constant, convert it back to a constant pool entry.
1040 Since the pool wasn't touched, this should simply restore the old
1041 address.
1043 All other kinds of rtx are copied except those that can never be
1044 changed during compilation. */
1046 static rtx
1047 copy_for_inline (orig)
1048 rtx orig;
1050 register rtx x = orig;
1051 register rtx new;
1052 register int i;
1053 register enum rtx_code code;
1054 register char *format_ptr;
1056 if (x == 0)
1057 return x;
1059 code = GET_CODE (x);
1061 /* These types may be freely shared. */
1063 switch (code)
1065 case QUEUED:
1066 case CONST_INT:
1067 case SYMBOL_REF:
1068 case PC:
1069 case CC0:
1070 return x;
1072 case CONST_DOUBLE:
1073 /* We have to make a new CONST_DOUBLE to ensure that we account for
1074 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
1075 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
1077 REAL_VALUE_TYPE d;
1079 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
1080 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x));
1082 else
1083 return immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
1084 VOIDmode);
1086 case CONST:
1087 /* Get constant pool entry for constant in the pool. */
1088 if (RTX_INTEGRATED_P (x))
1089 return validize_mem (force_const_mem (GET_MODE (x),
1090 copy_for_inline (XEXP (x, 0))));
1091 break;
1093 case SUBREG:
1094 /* Get constant pool entry, but access in different mode. */
1095 if (RTX_INTEGRATED_P (x))
1097 new = force_const_mem (GET_MODE (SUBREG_REG (x)),
1098 copy_for_inline (XEXP (SUBREG_REG (x), 0)));
1100 PUT_MODE (new, GET_MODE (x));
1101 return validize_mem (new);
1103 break;
1105 case ADDRESS:
1106 /* If not special for constant pool error. Else get constant pool
1107 address. */
1108 if (! RTX_INTEGRATED_P (x))
1109 abort ();
1111 new = force_const_mem (GET_MODE (XEXP (x, 0)),
1112 copy_for_inline (XEXP (XEXP (x, 0), 0)));
1113 new = XEXP (new, 0);
1115 #ifdef POINTERS_EXTEND_UNSIGNED
1116 if (GET_MODE (new) != GET_MODE (x))
1117 new = convert_memory_address (GET_MODE (x), new);
1118 #endif
1120 return new;
1122 case ASM_OPERANDS:
1123 /* If a single asm insn contains multiple output operands
1124 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
1125 We must make sure that the copied insn continues to share it. */
1126 if (orig_asm_operands_vector == XVEC (orig, 3))
1128 x = rtx_alloc (ASM_OPERANDS);
1129 x->volatil = orig->volatil;
1130 XSTR (x, 0) = XSTR (orig, 0);
1131 XSTR (x, 1) = XSTR (orig, 1);
1132 XINT (x, 2) = XINT (orig, 2);
1133 XVEC (x, 3) = copy_asm_operands_vector;
1134 XVEC (x, 4) = copy_asm_constraints_vector;
1135 XSTR (x, 5) = XSTR (orig, 5);
1136 XINT (x, 6) = XINT (orig, 6);
1137 return x;
1139 break;
1141 case MEM:
1142 /* A MEM is usually allowed to be shared if its address is constant
1143 or is a constant plus one of the special registers.
1145 We do not allow sharing of addresses that are either a special
1146 register or the sum of a constant and a special register because
1147 it is possible for unshare_all_rtl to copy the address, into memory
1148 that won't be saved. Although the MEM can safely be shared, and
1149 won't be copied there, the address itself cannot be shared, and may
1150 need to be copied.
1152 There are also two exceptions with constants: The first is if the
1153 constant is a LABEL_REF or the sum of the LABEL_REF
1154 and an integer. This case can happen if we have an inline
1155 function that supplies a constant operand to the call of another
1156 inline function that uses it in a switch statement. In this case,
1157 we will be replacing the LABEL_REF, so we have to replace this MEM
1158 as well.
1160 The second case is if we have a (const (plus (address ..) ...)).
1161 In that case we need to put back the address of the constant pool
1162 entry. */
1164 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
1165 && GET_CODE (XEXP (x, 0)) != LABEL_REF
1166 && ! (GET_CODE (XEXP (x, 0)) == CONST
1167 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
1168 && ((GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
1169 == LABEL_REF)
1170 || (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
1171 == ADDRESS)))))
1172 return x;
1173 break;
1175 case LABEL_REF:
1176 /* If this is a non-local label, just make a new LABEL_REF.
1177 Otherwise, use the new label as well. */
1178 x = gen_rtx_LABEL_REF (GET_MODE (orig),
1179 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
1180 : label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))]);
1181 LABEL_REF_NONLOCAL_P (x) = LABEL_REF_NONLOCAL_P (orig);
1182 LABEL_OUTSIDE_LOOP_P (x) = LABEL_OUTSIDE_LOOP_P (orig);
1183 return x;
1185 case REG:
1186 if (REGNO (x) > LAST_VIRTUAL_REGISTER)
1187 return reg_map [REGNO (x)];
1188 else
1189 return x;
1191 case SET:
1192 /* If a parm that gets modified lives in a pseudo-reg,
1193 clear its TREE_READONLY to prevent certain optimizations. */
1195 rtx dest = SET_DEST (x);
1197 while (GET_CODE (dest) == STRICT_LOW_PART
1198 || GET_CODE (dest) == ZERO_EXTRACT
1199 || GET_CODE (dest) == SUBREG)
1200 dest = XEXP (dest, 0);
1202 if (GET_CODE (dest) == REG
1203 && REGNO (dest) < max_parm_reg
1204 && REGNO (dest) >= FIRST_PSEUDO_REGISTER
1205 && parmdecl_map[REGNO (dest)] != 0
1206 /* The insn to load an arg pseudo from a stack slot
1207 does not count as modifying it. */
1208 && in_nonparm_insns)
1209 TREE_READONLY (parmdecl_map[REGNO (dest)]) = 0;
1211 break;
1213 #if 0 /* This is a good idea, but here is the wrong place for it. */
1214 /* Arrange that CONST_INTs always appear as the second operand
1215 if they appear, and that `frame_pointer_rtx' or `arg_pointer_rtx'
1216 always appear as the first. */
1217 case PLUS:
1218 if (GET_CODE (XEXP (x, 0)) == CONST_INT
1219 || (XEXP (x, 1) == frame_pointer_rtx
1220 || (ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1221 && XEXP (x, 1) == arg_pointer_rtx)))
1223 rtx t = XEXP (x, 0);
1224 XEXP (x, 0) = XEXP (x, 1);
1225 XEXP (x, 1) = t;
1227 break;
1228 #endif
1229 default:
1230 break;
1233 /* Replace this rtx with a copy of itself. */
1235 x = rtx_alloc (code);
1236 bcopy ((char *) orig, (char *) x,
1237 (sizeof (*x) - sizeof (x->fld)
1238 + sizeof (x->fld[0]) * GET_RTX_LENGTH (code)));
1240 /* Now scan the subexpressions recursively.
1241 We can store any replaced subexpressions directly into X
1242 since we know X is not shared! Any vectors in X
1243 must be copied if X was copied. */
1245 format_ptr = GET_RTX_FORMAT (code);
1247 for (i = 0; i < GET_RTX_LENGTH (code); i++)
1249 switch (*format_ptr++)
1251 case 'e':
1252 XEXP (x, i) = copy_for_inline (XEXP (x, i));
1253 break;
1255 case 'u':
1256 /* Change any references to old-insns to point to the
1257 corresponding copied insns. */
1258 XEXP (x, i) = insn_map[INSN_UID (XEXP (x, i))];
1259 break;
1261 case 'E':
1262 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
1264 register int j;
1266 XVEC (x, i) = gen_rtvec_vv (XVECLEN (x, i), XVEC (x, i)->elem);
1267 for (j = 0; j < XVECLEN (x, i); j++)
1268 XVECEXP (x, i, j)
1269 = copy_for_inline (XVECEXP (x, i, j));
1271 break;
1275 if (code == ASM_OPERANDS && orig_asm_operands_vector == 0)
1277 orig_asm_operands_vector = XVEC (orig, 3);
1278 copy_asm_operands_vector = XVEC (x, 3);
1279 copy_asm_constraints_vector = XVEC (x, 4);
1282 return x;
1285 /* Unfortunately, we need a global copy of const_equiv map for communication
1286 with a function called from note_stores. Be *very* careful that this
1287 is used properly in the presence of recursion. */
1289 rtx *global_const_equiv_map;
1290 int global_const_equiv_map_size;
1292 #define FIXED_BASE_PLUS_P(X) \
1293 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
1294 && GET_CODE (XEXP (X, 0)) == REG \
1295 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
1296 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
1298 /* Integrate the procedure defined by FNDECL. Note that this function
1299 may wind up calling itself. Since the static variables are not
1300 reentrant, we do not assign them until after the possibility
1301 of recursion is eliminated.
1303 If IGNORE is nonzero, do not produce a value.
1304 Otherwise store the value in TARGET if it is nonzero and that is convenient.
1306 Value is:
1307 (rtx)-1 if we could not substitute the function
1308 0 if we substituted it and it does not produce a value
1309 else an rtx for where the value is stored. */
1312 expand_inline_function (fndecl, parms, target, ignore, type,
1313 structure_value_addr)
1314 tree fndecl, parms;
1315 rtx target;
1316 int ignore;
1317 tree type;
1318 rtx structure_value_addr;
1320 tree formal, actual, block;
1321 rtx header = DECL_SAVED_INSNS (fndecl);
1322 rtx insns = FIRST_FUNCTION_INSN (header);
1323 rtx parm_insns = FIRST_PARM_INSN (header);
1324 tree *arg_trees;
1325 rtx *arg_vals;
1326 rtx insn;
1327 int max_regno;
1328 register int i;
1329 int min_labelno = FIRST_LABELNO (header);
1330 int max_labelno = LAST_LABELNO (header);
1331 int nargs;
1332 rtx local_return_label = 0;
1333 rtx loc;
1334 rtx stack_save = 0;
1335 rtx temp;
1336 struct inline_remap *map;
1337 #ifdef HAVE_cc0
1338 rtx cc0_insn = 0;
1339 #endif
1340 rtvec arg_vector = ORIGINAL_ARG_VECTOR (header);
1341 rtx static_chain_value = 0;
1343 /* The pointer used to track the true location of the memory used
1344 for MAP->LABEL_MAP. */
1345 rtx *real_label_map = 0;
1347 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
1348 max_regno = MAX_REGNUM (header) + 3;
1349 if (max_regno < FIRST_PSEUDO_REGISTER)
1350 abort ();
1352 nargs = list_length (DECL_ARGUMENTS (fndecl));
1354 /* Check that the parms type match and that sufficient arguments were
1355 passed. Since the appropriate conversions or default promotions have
1356 already been applied, the machine modes should match exactly. */
1358 for (formal = DECL_ARGUMENTS (fndecl), actual = parms;
1359 formal;
1360 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual))
1362 tree arg;
1363 enum machine_mode mode;
1365 if (actual == 0)
1366 return (rtx) (HOST_WIDE_INT) -1;
1368 arg = TREE_VALUE (actual);
1369 mode = TYPE_MODE (DECL_ARG_TYPE (formal));
1371 if (mode != TYPE_MODE (TREE_TYPE (arg))
1372 /* If they are block mode, the types should match exactly.
1373 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
1374 which could happen if the parameter has incomplete type. */
1375 || (mode == BLKmode
1376 && (TYPE_MAIN_VARIANT (TREE_TYPE (arg))
1377 != TYPE_MAIN_VARIANT (TREE_TYPE (formal)))))
1378 return (rtx) (HOST_WIDE_INT) -1;
1381 /* Extra arguments are valid, but will be ignored below, so we must
1382 evaluate them here for side-effects. */
1383 for (; actual; actual = TREE_CHAIN (actual))
1384 expand_expr (TREE_VALUE (actual), const0_rtx,
1385 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual))), 0);
1387 /* Make a binding contour to keep inline cleanups called at
1388 outer function-scope level from looking like they are shadowing
1389 parameter declarations. */
1390 pushlevel (0);
1392 /* Expand the function arguments. Do this first so that any
1393 new registers get created before we allocate the maps. */
1395 arg_vals = (rtx *) alloca (nargs * sizeof (rtx));
1396 arg_trees = (tree *) alloca (nargs * sizeof (tree));
1398 for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
1399 formal;
1400 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
1402 /* Actual parameter, converted to the type of the argument within the
1403 function. */
1404 tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
1405 /* Mode of the variable used within the function. */
1406 enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
1407 int invisiref = 0;
1409 arg_trees[i] = arg;
1410 loc = RTVEC_ELT (arg_vector, i);
1412 /* If this is an object passed by invisible reference, we copy the
1413 object into a stack slot and save its address. If this will go
1414 into memory, we do nothing now. Otherwise, we just expand the
1415 argument. */
1416 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1417 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1419 rtx stack_slot
1420 = assign_stack_temp (TYPE_MODE (TREE_TYPE (arg)),
1421 int_size_in_bytes (TREE_TYPE (arg)), 1);
1422 MEM_IN_STRUCT_P (stack_slot) = AGGREGATE_TYPE_P (TREE_TYPE (arg));
1424 store_expr (arg, stack_slot, 0);
1426 arg_vals[i] = XEXP (stack_slot, 0);
1427 invisiref = 1;
1429 else if (GET_CODE (loc) != MEM)
1431 if (GET_MODE (loc) != TYPE_MODE (TREE_TYPE (arg)))
1432 /* The mode if LOC and ARG can differ if LOC was a variable
1433 that had its mode promoted via PROMOTED_MODE. */
1434 arg_vals[i] = convert_modes (GET_MODE (loc),
1435 TYPE_MODE (TREE_TYPE (arg)),
1436 expand_expr (arg, NULL_RTX, mode,
1437 EXPAND_SUM),
1438 TREE_UNSIGNED (TREE_TYPE (formal)));
1439 else
1440 arg_vals[i] = expand_expr (arg, NULL_RTX, mode, EXPAND_SUM);
1442 else
1443 arg_vals[i] = 0;
1445 if (arg_vals[i] != 0
1446 && (! TREE_READONLY (formal)
1447 /* If the parameter is not read-only, copy our argument through
1448 a register. Also, we cannot use ARG_VALS[I] if it overlaps
1449 TARGET in any way. In the inline function, they will likely
1450 be two different pseudos, and `safe_from_p' will make all
1451 sorts of smart assumptions about their not conflicting.
1452 But if ARG_VALS[I] overlaps TARGET, these assumptions are
1453 wrong, so put ARG_VALS[I] into a fresh register.
1454 Don't worry about invisible references, since their stack
1455 temps will never overlap the target. */
1456 || (target != 0
1457 && ! invisiref
1458 && (GET_CODE (arg_vals[i]) == REG
1459 || GET_CODE (arg_vals[i]) == SUBREG
1460 || GET_CODE (arg_vals[i]) == MEM)
1461 && reg_overlap_mentioned_p (arg_vals[i], target))
1462 /* ??? We must always copy a SUBREG into a REG, because it might
1463 get substituted into an address, and not all ports correctly
1464 handle SUBREGs in addresses. */
1465 || (GET_CODE (arg_vals[i]) == SUBREG)))
1466 arg_vals[i] = copy_to_mode_reg (GET_MODE (loc), arg_vals[i]);
1468 if (arg_vals[i] != 0 && GET_CODE (arg_vals[i]) == REG
1469 && POINTER_TYPE_P (TREE_TYPE (formal)))
1470 mark_reg_pointer (arg_vals[i],
1471 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (formal)))
1472 / BITS_PER_UNIT));
1475 /* Allocate the structures we use to remap things. */
1477 map = (struct inline_remap *) alloca (sizeof (struct inline_remap));
1478 map->fndecl = fndecl;
1480 map->reg_map = (rtx *) alloca (max_regno * sizeof (rtx));
1481 bzero ((char *) map->reg_map, max_regno * sizeof (rtx));
1483 /* We used to use alloca here, but the size of what it would try to
1484 allocate would occasionally cause it to exceed the stack limit and
1485 cause unpredictable core dumps. */
1486 real_label_map
1487 = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
1488 map->label_map = real_label_map;
1490 map->insn_map = (rtx *) alloca (INSN_UID (header) * sizeof (rtx));
1491 bzero ((char *) map->insn_map, INSN_UID (header) * sizeof (rtx));
1492 map->min_insnno = 0;
1493 map->max_insnno = INSN_UID (header);
1495 map->integrating = 1;
1497 /* const_equiv_map maps pseudos in our routine to constants, so it needs to
1498 be large enough for all our pseudos. This is the number we are currently
1499 using plus the number in the called routine, plus 15 for each arg,
1500 five to compute the virtual frame pointer, and five for the return value.
1501 This should be enough for most cases. We do not reference entries
1502 outside the range of the map.
1504 ??? These numbers are quite arbitrary and were obtained by
1505 experimentation. At some point, we should try to allocate the
1506 table after all the parameters are set up so we an more accurately
1507 estimate the number of pseudos we will need. */
1509 map->const_equiv_map_size
1510 = max_reg_num () + (max_regno - FIRST_PSEUDO_REGISTER) + 15 * nargs + 10;
1512 map->const_equiv_map
1513 = (rtx *)alloca (map->const_equiv_map_size * sizeof (rtx));
1514 bzero ((char *) map->const_equiv_map,
1515 map->const_equiv_map_size * sizeof (rtx));
1517 map->const_age_map
1518 = (unsigned *)alloca (map->const_equiv_map_size * sizeof (unsigned));
1519 bzero ((char *) map->const_age_map,
1520 map->const_equiv_map_size * sizeof (unsigned));
1521 map->const_age = 0;
1523 /* Record the current insn in case we have to set up pointers to frame
1524 and argument memory blocks. If there are no insns yet, add a dummy
1525 insn that can be used as an insertion point. */
1526 map->insns_at_start = get_last_insn ();
1527 if (map->insns_at_start == 0)
1528 map->insns_at_start = emit_note (NULL_PTR, NOTE_INSN_DELETED);
1530 map->regno_pointer_flag = INLINE_REGNO_POINTER_FLAG (header);
1531 map->regno_pointer_align = INLINE_REGNO_POINTER_ALIGN (header);
1533 /* Update the outgoing argument size to allow for those in the inlined
1534 function. */
1535 if (OUTGOING_ARGS_SIZE (header) > current_function_outgoing_args_size)
1536 current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (header);
1538 /* If the inline function needs to make PIC references, that means
1539 that this function's PIC offset table must be used. */
1540 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
1541 current_function_uses_pic_offset_table = 1;
1543 /* If this function needs a context, set it up. */
1544 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_NEEDS_CONTEXT)
1545 static_chain_value = lookup_static_chain (fndecl);
1547 if (GET_CODE (parm_insns) == NOTE
1548 && NOTE_LINE_NUMBER (parm_insns) > 0)
1550 rtx note = emit_note (NOTE_SOURCE_FILE (parm_insns),
1551 NOTE_LINE_NUMBER (parm_insns));
1552 if (note)
1553 RTX_INTEGRATED_P (note) = 1;
1556 /* Process each argument. For each, set up things so that the function's
1557 reference to the argument will refer to the argument being passed.
1558 We only replace REG with REG here. Any simplifications are done
1559 via const_equiv_map.
1561 We make two passes: In the first, we deal with parameters that will
1562 be placed into registers, since we need to ensure that the allocated
1563 register number fits in const_equiv_map. Then we store all non-register
1564 parameters into their memory location. */
1566 /* Don't try to free temp stack slots here, because we may put one of the
1567 parameters into a temp stack slot. */
1569 for (i = 0; i < nargs; i++)
1571 rtx copy = arg_vals[i];
1573 loc = RTVEC_ELT (arg_vector, i);
1575 /* There are three cases, each handled separately. */
1576 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1577 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1579 /* This must be an object passed by invisible reference (it could
1580 also be a variable-sized object, but we forbid inlining functions
1581 with variable-sized arguments). COPY is the address of the
1582 actual value (this computation will cause it to be copied). We
1583 map that address for the register, noting the actual address as
1584 an equivalent in case it can be substituted into the insns. */
1586 if (GET_CODE (copy) != REG)
1588 temp = copy_addr_to_reg (copy);
1589 if ((CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1590 && REGNO (temp) < map->const_equiv_map_size)
1592 map->const_equiv_map[REGNO (temp)] = copy;
1593 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1595 copy = temp;
1597 map->reg_map[REGNO (XEXP (loc, 0))] = copy;
1599 else if (GET_CODE (loc) == MEM)
1601 /* This is the case of a parameter that lives in memory.
1602 It will live in the block we allocate in the called routine's
1603 frame that simulates the incoming argument area. Do nothing
1604 now; we will call store_expr later. */
1607 else if (GET_CODE (loc) == REG)
1609 /* This is the good case where the parameter is in a register.
1610 If it is read-only and our argument is a constant, set up the
1611 constant equivalence.
1613 If LOC is REG_USERVAR_P, the usual case, COPY must also have
1614 that flag set if it is a register.
1616 Also, don't allow hard registers here; they might not be valid
1617 when substituted into insns. */
1619 if ((GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
1620 || (GET_CODE (copy) == REG && REG_USERVAR_P (loc)
1621 && ! REG_USERVAR_P (copy))
1622 || (GET_CODE (copy) == REG
1623 && REGNO (copy) < FIRST_PSEUDO_REGISTER))
1625 temp = copy_to_mode_reg (GET_MODE (loc), copy);
1626 REG_USERVAR_P (temp) = REG_USERVAR_P (loc);
1627 if ((CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1628 && REGNO (temp) < map->const_equiv_map_size)
1630 map->const_equiv_map[REGNO (temp)] = copy;
1631 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1633 copy = temp;
1635 map->reg_map[REGNO (loc)] = copy;
1637 else if (GET_CODE (loc) == CONCAT)
1639 /* This is the good case where the parameter is in a
1640 pair of separate pseudos.
1641 If it is read-only and our argument is a constant, set up the
1642 constant equivalence.
1644 If LOC is REG_USERVAR_P, the usual case, COPY must also have
1645 that flag set if it is a register.
1647 Also, don't allow hard registers here; they might not be valid
1648 when substituted into insns. */
1649 rtx locreal = gen_realpart (GET_MODE (XEXP (loc, 0)), loc);
1650 rtx locimag = gen_imagpart (GET_MODE (XEXP (loc, 0)), loc);
1651 rtx copyreal = gen_realpart (GET_MODE (locreal), copy);
1652 rtx copyimag = gen_imagpart (GET_MODE (locimag), copy);
1654 if ((GET_CODE (copyreal) != REG && GET_CODE (copyreal) != SUBREG)
1655 || (GET_CODE (copyreal) == REG && REG_USERVAR_P (locreal)
1656 && ! REG_USERVAR_P (copyreal))
1657 || (GET_CODE (copyreal) == REG
1658 && REGNO (copyreal) < FIRST_PSEUDO_REGISTER))
1660 temp = copy_to_mode_reg (GET_MODE (locreal), copyreal);
1661 REG_USERVAR_P (temp) = REG_USERVAR_P (locreal);
1662 if ((CONSTANT_P (copyreal) || FIXED_BASE_PLUS_P (copyreal))
1663 && REGNO (temp) < map->const_equiv_map_size)
1665 map->const_equiv_map[REGNO (temp)] = copyreal;
1666 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1668 copyreal = temp;
1670 map->reg_map[REGNO (locreal)] = copyreal;
1672 if ((GET_CODE (copyimag) != REG && GET_CODE (copyimag) != SUBREG)
1673 || (GET_CODE (copyimag) == REG && REG_USERVAR_P (locimag)
1674 && ! REG_USERVAR_P (copyimag))
1675 || (GET_CODE (copyimag) == REG
1676 && REGNO (copyimag) < FIRST_PSEUDO_REGISTER))
1678 temp = copy_to_mode_reg (GET_MODE (locimag), copyimag);
1679 REG_USERVAR_P (temp) = REG_USERVAR_P (locimag);
1680 if ((CONSTANT_P (copyimag) || FIXED_BASE_PLUS_P (copyimag))
1681 && REGNO (temp) < map->const_equiv_map_size)
1683 map->const_equiv_map[REGNO (temp)] = copyimag;
1684 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1686 copyimag = temp;
1688 map->reg_map[REGNO (locimag)] = copyimag;
1690 else
1691 abort ();
1694 /* Now do the parameters that will be placed in memory. */
1696 for (formal = DECL_ARGUMENTS (fndecl), i = 0;
1697 formal; formal = TREE_CHAIN (formal), i++)
1699 loc = RTVEC_ELT (arg_vector, i);
1701 if (GET_CODE (loc) == MEM
1702 /* Exclude case handled above. */
1703 && ! (GET_CODE (XEXP (loc, 0)) == REG
1704 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
1706 rtx note = emit_note (DECL_SOURCE_FILE (formal),
1707 DECL_SOURCE_LINE (formal));
1708 if (note)
1709 RTX_INTEGRATED_P (note) = 1;
1711 /* Compute the address in the area we reserved and store the
1712 value there. */
1713 temp = copy_rtx_and_substitute (loc, map);
1714 subst_constants (&temp, NULL_RTX, map);
1715 apply_change_group ();
1716 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
1717 temp = change_address (temp, VOIDmode, XEXP (temp, 0));
1718 store_expr (arg_trees[i], temp, 0);
1722 /* Deal with the places that the function puts its result.
1723 We are driven by what is placed into DECL_RESULT.
1725 Initially, we assume that we don't have anything special handling for
1726 REG_FUNCTION_RETURN_VALUE_P. */
1728 map->inline_target = 0;
1729 loc = DECL_RTL (DECL_RESULT (fndecl));
1730 if (TYPE_MODE (type) == VOIDmode)
1731 /* There is no return value to worry about. */
1733 else if (GET_CODE (loc) == MEM)
1735 if (! structure_value_addr || ! aggregate_value_p (DECL_RESULT (fndecl)))
1736 abort ();
1738 /* Pass the function the address in which to return a structure value.
1739 Note that a constructor can cause someone to call us with
1740 STRUCTURE_VALUE_ADDR, but the initialization takes place
1741 via the first parameter, rather than the struct return address.
1743 We have two cases: If the address is a simple register indirect,
1744 use the mapping mechanism to point that register to our structure
1745 return address. Otherwise, store the structure return value into
1746 the place that it will be referenced from. */
1748 if (GET_CODE (XEXP (loc, 0)) == REG)
1750 temp = force_reg (Pmode,
1751 force_operand (structure_value_addr, NULL_RTX));
1752 map->reg_map[REGNO (XEXP (loc, 0))] = temp;
1753 if ((CONSTANT_P (structure_value_addr)
1754 || GET_CODE (structure_value_addr) == ADDRESSOF
1755 || (GET_CODE (structure_value_addr) == PLUS
1756 && XEXP (structure_value_addr, 0) == virtual_stack_vars_rtx
1757 && GET_CODE (XEXP (structure_value_addr, 1)) == CONST_INT))
1758 && REGNO (temp) < map->const_equiv_map_size)
1760 map->const_equiv_map[REGNO (temp)] = structure_value_addr;
1761 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1764 else
1766 temp = copy_rtx_and_substitute (loc, map);
1767 subst_constants (&temp, NULL_RTX, map);
1768 apply_change_group ();
1769 emit_move_insn (temp, structure_value_addr);
1772 else if (ignore)
1773 /* We will ignore the result value, so don't look at its structure.
1774 Note that preparations for an aggregate return value
1775 do need to be made (above) even if it will be ignored. */
1777 else if (GET_CODE (loc) == REG)
1779 /* The function returns an object in a register and we use the return
1780 value. Set up our target for remapping. */
1782 /* Machine mode function was declared to return. */
1783 enum machine_mode departing_mode = TYPE_MODE (type);
1784 /* (Possibly wider) machine mode it actually computes
1785 (for the sake of callers that fail to declare it right).
1786 We have to use the mode of the result's RTL, rather than
1787 its type, since expand_function_start may have promoted it. */
1788 enum machine_mode arriving_mode
1789 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1790 rtx reg_to_map;
1792 /* Don't use MEMs as direct targets because on some machines
1793 substituting a MEM for a REG makes invalid insns.
1794 Let the combiner substitute the MEM if that is valid. */
1795 if (target == 0 || GET_CODE (target) != REG
1796 || GET_MODE (target) != departing_mode)
1797 target = gen_reg_rtx (departing_mode);
1799 /* If function's value was promoted before return,
1800 avoid machine mode mismatch when we substitute INLINE_TARGET.
1801 But TARGET is what we will return to the caller. */
1802 if (arriving_mode != departing_mode)
1804 /* Avoid creating a paradoxical subreg wider than
1805 BITS_PER_WORD, since that is illegal. */
1806 if (GET_MODE_BITSIZE (arriving_mode) > BITS_PER_WORD)
1808 if (!TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (departing_mode),
1809 GET_MODE_BITSIZE (arriving_mode)))
1810 /* Maybe could be handled by using convert_move () ? */
1811 abort ();
1812 reg_to_map = gen_reg_rtx (arriving_mode);
1813 target = gen_lowpart (departing_mode, reg_to_map);
1815 else
1816 reg_to_map = gen_rtx_SUBREG (arriving_mode, target, 0);
1818 else
1819 reg_to_map = target;
1821 /* Usually, the result value is the machine's return register.
1822 Sometimes it may be a pseudo. Handle both cases. */
1823 if (REG_FUNCTION_VALUE_P (loc))
1824 map->inline_target = reg_to_map;
1825 else
1826 map->reg_map[REGNO (loc)] = reg_to_map;
1828 else
1829 abort ();
1831 /* Make a fresh binding contour that we can easily remove. Do this after
1832 expanding our arguments so cleanups are properly scoped. */
1833 pushlevel (0);
1834 expand_start_bindings (0);
1836 /* Initialize label_map. get_label_from_map will actually make
1837 the labels. */
1838 bzero ((char *) &map->label_map [min_labelno],
1839 (max_labelno - min_labelno) * sizeof (rtx));
1841 /* Perform postincrements before actually calling the function. */
1842 emit_queue ();
1844 /* Clean up stack so that variables might have smaller offsets. */
1845 do_pending_stack_adjust ();
1847 /* Save a copy of the location of const_equiv_map for mark_stores, called
1848 via note_stores. */
1849 global_const_equiv_map = map->const_equiv_map;
1850 global_const_equiv_map_size = map->const_equiv_map_size;
1852 /* If the called function does an alloca, save and restore the
1853 stack pointer around the call. This saves stack space, but
1854 also is required if this inline is being done between two
1855 pushes. */
1856 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_CALLS_ALLOCA)
1857 emit_stack_save (SAVE_BLOCK, &stack_save, NULL_RTX);
1859 /* Now copy the insns one by one. Do this in two passes, first the insns and
1860 then their REG_NOTES, just like save_for_inline. */
1862 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1864 for (insn = insns; insn; insn = NEXT_INSN (insn))
1866 rtx copy, pattern, set;
1868 map->orig_asm_operands_vector = 0;
1870 switch (GET_CODE (insn))
1872 case INSN:
1873 pattern = PATTERN (insn);
1874 set = single_set (insn);
1875 copy = 0;
1876 if (GET_CODE (pattern) == USE
1877 && GET_CODE (XEXP (pattern, 0)) == REG
1878 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1879 /* The (USE (REG n)) at return from the function should
1880 be ignored since we are changing (REG n) into
1881 inline_target. */
1882 break;
1884 /* If the inline fn needs eh context, make sure that
1885 the current fn has one. */
1886 if (GET_CODE (pattern) == USE
1887 && find_reg_note (insn, REG_EH_CONTEXT, 0) != 0)
1888 get_eh_context ();
1890 /* Ignore setting a function value that we don't want to use. */
1891 if (map->inline_target == 0
1892 && set != 0
1893 && GET_CODE (SET_DEST (set)) == REG
1894 && REG_FUNCTION_VALUE_P (SET_DEST (set)))
1896 if (volatile_refs_p (SET_SRC (set)))
1898 rtx new_set;
1900 /* If we must not delete the source,
1901 load it into a new temporary. */
1902 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1904 new_set = single_set (copy);
1905 if (new_set == 0)
1906 abort ();
1908 SET_DEST (new_set)
1909 = gen_reg_rtx (GET_MODE (SET_DEST (new_set)));
1911 /* If the source and destination are the same and it
1912 has a note on it, keep the insn. */
1913 else if (rtx_equal_p (SET_DEST (set), SET_SRC (set))
1914 && REG_NOTES (insn) != 0)
1915 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1916 else
1917 break;
1920 /* If this is setting the static chain rtx, omit it. */
1921 else if (static_chain_value != 0
1922 && set != 0
1923 && GET_CODE (SET_DEST (set)) == REG
1924 && rtx_equal_p (SET_DEST (set),
1925 static_chain_incoming_rtx))
1926 break;
1928 /* If this is setting the static chain pseudo, set it from
1929 the value we want to give it instead. */
1930 else if (static_chain_value != 0
1931 && set != 0
1932 && rtx_equal_p (SET_SRC (set),
1933 static_chain_incoming_rtx))
1935 rtx newdest = copy_rtx_and_substitute (SET_DEST (set), map);
1937 copy = emit_move_insn (newdest, static_chain_value);
1938 static_chain_value = 0;
1940 else
1941 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1942 /* REG_NOTES will be copied later. */
1944 #ifdef HAVE_cc0
1945 /* If this insn is setting CC0, it may need to look at
1946 the insn that uses CC0 to see what type of insn it is.
1947 In that case, the call to recog via validate_change will
1948 fail. So don't substitute constants here. Instead,
1949 do it when we emit the following insn.
1951 For example, see the pyr.md file. That machine has signed and
1952 unsigned compares. The compare patterns must check the
1953 following branch insn to see which what kind of compare to
1954 emit.
1956 If the previous insn set CC0, substitute constants on it as
1957 well. */
1958 if (sets_cc0_p (PATTERN (copy)) != 0)
1959 cc0_insn = copy;
1960 else
1962 if (cc0_insn)
1963 try_constants (cc0_insn, map);
1964 cc0_insn = 0;
1965 try_constants (copy, map);
1967 #else
1968 try_constants (copy, map);
1969 #endif
1970 break;
1972 case JUMP_INSN:
1973 if (GET_CODE (PATTERN (insn)) == RETURN
1974 || (GET_CODE (PATTERN (insn)) == PARALLEL
1975 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
1977 if (local_return_label == 0)
1978 local_return_label = gen_label_rtx ();
1979 pattern = gen_jump (local_return_label);
1981 else
1982 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
1984 copy = emit_jump_insn (pattern);
1986 #ifdef HAVE_cc0
1987 if (cc0_insn)
1988 try_constants (cc0_insn, map);
1989 cc0_insn = 0;
1990 #endif
1991 try_constants (copy, map);
1993 /* If this used to be a conditional jump insn but whose branch
1994 direction is now know, we must do something special. */
1995 if (condjump_p (insn) && ! simplejump_p (insn) && map->last_pc_value)
1997 #ifdef HAVE_cc0
1998 /* The previous insn set cc0 for us. So delete it. */
1999 delete_insn (PREV_INSN (copy));
2000 #endif
2002 /* If this is now a no-op, delete it. */
2003 if (map->last_pc_value == pc_rtx)
2005 delete_insn (copy);
2006 copy = 0;
2008 else
2009 /* Otherwise, this is unconditional jump so we must put a
2010 BARRIER after it. We could do some dead code elimination
2011 here, but jump.c will do it just as well. */
2012 emit_barrier ();
2014 break;
2016 case CALL_INSN:
2017 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
2018 copy = emit_call_insn (pattern);
2020 /* Because the USAGE information potentially contains objects other
2021 than hard registers, we need to copy it. */
2022 CALL_INSN_FUNCTION_USAGE (copy)
2023 = copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn), map);
2025 #ifdef HAVE_cc0
2026 if (cc0_insn)
2027 try_constants (cc0_insn, map);
2028 cc0_insn = 0;
2029 #endif
2030 try_constants (copy, map);
2032 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
2033 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2034 map->const_equiv_map[i] = 0;
2035 break;
2037 case CODE_LABEL:
2038 copy = emit_label (get_label_from_map (map,
2039 CODE_LABEL_NUMBER (insn)));
2040 LABEL_NAME (copy) = LABEL_NAME (insn);
2041 map->const_age++;
2042 break;
2044 case BARRIER:
2045 copy = emit_barrier ();
2046 break;
2048 case NOTE:
2049 /* It is important to discard function-end and function-beg notes,
2050 so we have only one of each in the current function.
2051 Also, NOTE_INSN_DELETED notes aren't useful (save_for_inline
2052 deleted these in the copy used for continuing compilation,
2053 not the copy used for inlining). */
2054 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
2055 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
2056 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED)
2058 copy = emit_note (NOTE_SOURCE_FILE (insn),
2059 NOTE_LINE_NUMBER (insn));
2060 if (copy
2061 && (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG
2062 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_END))
2064 rtx label
2065 = get_label_from_map (map, NOTE_BLOCK_NUMBER (copy));
2067 /* we have to duplicate the handlers for the original */
2068 if (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG)
2070 handler_info *ptr, *temp;
2071 int nr;
2072 nr = new_eh_region_entry (CODE_LABEL_NUMBER (label));
2073 ptr = get_first_handler (NOTE_BLOCK_NUMBER (copy));
2074 for ( ; ptr; ptr = ptr->next)
2076 temp = get_new_handler ( get_label_from_map (map,
2077 CODE_LABEL_NUMBER (ptr->handler_label)),
2078 ptr->type_info);
2079 add_new_handler (nr, temp);
2083 /* We have to forward these both to match the new exception
2084 region. */
2085 NOTE_BLOCK_NUMBER (copy) = CODE_LABEL_NUMBER (label);
2088 else
2089 copy = 0;
2090 break;
2092 default:
2093 abort ();
2094 break;
2097 if (copy)
2098 RTX_INTEGRATED_P (copy) = 1;
2100 map->insn_map[INSN_UID (insn)] = copy;
2103 /* Now copy the REG_NOTES. Increment const_age, so that only constants
2104 from parameters can be substituted in. These are the only ones that
2105 are valid across the entire function. */
2106 map->const_age++;
2107 for (insn = insns; insn; insn = NEXT_INSN (insn))
2108 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
2109 && map->insn_map[INSN_UID (insn)]
2110 && REG_NOTES (insn))
2112 rtx tem = copy_rtx_and_substitute (REG_NOTES (insn), map);
2113 /* We must also do subst_constants, in case one of our parameters
2114 has const type and constant value. */
2115 subst_constants (&tem, NULL_RTX, map);
2116 apply_change_group ();
2117 REG_NOTES (map->insn_map[INSN_UID (insn)]) = tem;
2120 if (local_return_label)
2121 emit_label (local_return_label);
2123 /* Restore the stack pointer if we saved it above. */
2124 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_CALLS_ALLOCA)
2125 emit_stack_restore (SAVE_BLOCK, stack_save, NULL_RTX);
2127 /* Make copies of the decls of the symbols in the inline function, so that
2128 the copies of the variables get declared in the current function. Set
2129 up things so that lookup_static_chain knows that to interpret registers
2130 in SAVE_EXPRs for TYPE_SIZEs as local. */
2132 inline_function_decl = fndecl;
2133 integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
2134 integrate_decl_tree ((tree) ORIGINAL_DECL_INITIAL (header), 0, map);
2135 inline_function_decl = 0;
2137 /* End the scope containing the copied formal parameter variables
2138 and copied LABEL_DECLs. */
2140 expand_end_bindings (getdecls (), 1, 1);
2141 block = poplevel (1, 1, 0);
2142 BLOCK_ABSTRACT_ORIGIN (block) = (DECL_ABSTRACT_ORIGIN (fndecl) == NULL
2143 ? fndecl : DECL_ABSTRACT_ORIGIN (fndecl));
2144 poplevel (0, 0, 0);
2146 /* Must mark the line number note after inlined functions as a repeat, so
2147 that the test coverage code can avoid counting the call twice. This
2148 just tells the code to ignore the immediately following line note, since
2149 there already exists a copy of this note before the expanded inline call.
2150 This line number note is still needed for debugging though, so we can't
2151 delete it. */
2152 if (flag_test_coverage)
2153 emit_note (0, NOTE_REPEATED_LINE_NUMBER);
2155 emit_line_note (input_filename, lineno);
2157 if (structure_value_addr)
2159 target = gen_rtx_MEM (TYPE_MODE (type),
2160 memory_address (TYPE_MODE (type),
2161 structure_value_addr));
2162 MEM_IN_STRUCT_P (target) = 1;
2165 /* Make sure we free the things we explicitly allocated with xmalloc. */
2166 if (real_label_map)
2167 free (real_label_map);
2169 return target;
2172 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
2173 push all of those decls and give each one the corresponding home. */
2175 static void
2176 integrate_parm_decls (args, map, arg_vector)
2177 tree args;
2178 struct inline_remap *map;
2179 rtvec arg_vector;
2181 register tree tail;
2182 register int i;
2184 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
2186 register tree decl = build_decl (VAR_DECL, DECL_NAME (tail),
2187 TREE_TYPE (tail));
2188 rtx new_decl_rtl
2189 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map);
2191 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (tail);
2192 /* We really should be setting DECL_INCOMING_RTL to something reasonable
2193 here, but that's going to require some more work. */
2194 /* DECL_INCOMING_RTL (decl) = ?; */
2195 /* These args would always appear unused, if not for this. */
2196 TREE_USED (decl) = 1;
2197 /* Prevent warning for shadowing with these. */
2198 DECL_ABSTRACT_ORIGIN (decl) = tail;
2199 pushdecl (decl);
2200 /* Fully instantiate the address with the equivalent form so that the
2201 debugging information contains the actual register, instead of the
2202 virtual register. Do this by not passing an insn to
2203 subst_constants. */
2204 subst_constants (&new_decl_rtl, NULL_RTX, map);
2205 apply_change_group ();
2206 DECL_RTL (decl) = new_decl_rtl;
2210 /* Given a BLOCK node LET, push decls and levels so as to construct in the
2211 current function a tree of contexts isomorphic to the one that is given.
2213 LEVEL indicates how far down into the BLOCK tree is the node we are
2214 currently traversing. It is always zero except for recursive calls.
2216 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
2217 registers used in the DECL_RTL field should be remapped. If it is zero,
2218 no mapping is necessary. */
2220 static void
2221 integrate_decl_tree (let, level, map)
2222 tree let;
2223 int level;
2224 struct inline_remap *map;
2226 tree t, node;
2228 if (level > 0)
2229 pushlevel (0);
2231 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2233 tree d;
2235 push_obstacks_nochange ();
2236 saveable_allocation ();
2237 d = copy_node (t);
2238 pop_obstacks ();
2240 if (DECL_RTL (t) != 0)
2242 DECL_RTL (d) = copy_rtx_and_substitute (DECL_RTL (t), map);
2243 /* Fully instantiate the address with the equivalent form so that the
2244 debugging information contains the actual register, instead of the
2245 virtual register. Do this by not passing an insn to
2246 subst_constants. */
2247 subst_constants (&DECL_RTL (d), NULL_RTX, map);
2248 apply_change_group ();
2250 /* These args would always appear unused, if not for this. */
2251 TREE_USED (d) = 1;
2252 /* Prevent warning for shadowing with these. */
2253 DECL_ABSTRACT_ORIGIN (d) = t;
2255 if (DECL_LANG_SPECIFIC (d))
2256 copy_lang_decl (d);
2258 pushdecl (d);
2261 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2262 integrate_decl_tree (t, level + 1, map);
2264 if (level > 0)
2266 node = poplevel (1, 0, 0);
2267 if (node)
2269 TREE_USED (node) = TREE_USED (let);
2270 BLOCK_ABSTRACT_ORIGIN (node) = let;
2275 /* Given a BLOCK node LET, search for all DECL_RTL fields, and pass them
2276 through save_constants. */
2278 static void
2279 save_constants_in_decl_trees (let)
2280 tree let;
2282 tree t;
2284 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2285 if (DECL_RTL (t) != 0)
2286 save_constants (&DECL_RTL (t));
2288 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2289 save_constants_in_decl_trees (t);
2292 /* Create a new copy of an rtx.
2293 Recursively copies the operands of the rtx,
2294 except for those few rtx codes that are sharable.
2296 We always return an rtx that is similar to that incoming rtx, with the
2297 exception of possibly changing a REG to a SUBREG or vice versa. No
2298 rtl is ever emitted.
2300 Handle constants that need to be placed in the constant pool by
2301 calling `force_const_mem'. */
2304 copy_rtx_and_substitute (orig, map)
2305 register rtx orig;
2306 struct inline_remap *map;
2308 register rtx copy, temp;
2309 register int i, j;
2310 register RTX_CODE code;
2311 register enum machine_mode mode;
2312 register char *format_ptr;
2313 int regno;
2315 if (orig == 0)
2316 return 0;
2318 code = GET_CODE (orig);
2319 mode = GET_MODE (orig);
2321 switch (code)
2323 case REG:
2324 /* If the stack pointer register shows up, it must be part of
2325 stack-adjustments (*not* because we eliminated the frame pointer!).
2326 Small hard registers are returned as-is. Pseudo-registers
2327 go through their `reg_map'. */
2328 regno = REGNO (orig);
2329 if (regno <= LAST_VIRTUAL_REGISTER)
2331 /* Some hard registers are also mapped,
2332 but others are not translated. */
2333 if (map->reg_map[regno] != 0)
2334 return map->reg_map[regno];
2336 /* If this is the virtual frame pointer, make space in current
2337 function's stack frame for the stack frame of the inline function.
2339 Copy the address of this area into a pseudo. Map
2340 virtual_stack_vars_rtx to this pseudo and set up a constant
2341 equivalence for it to be the address. This will substitute the
2342 address into insns where it can be substituted and use the new
2343 pseudo where it can't. */
2344 if (regno == VIRTUAL_STACK_VARS_REGNUM)
2346 rtx loc, seq;
2347 int size = DECL_FRAME_SIZE (map->fndecl);
2349 #ifdef FRAME_GROWS_DOWNWARD
2350 /* In this case, virtual_stack_vars_rtx points to one byte
2351 higher than the top of the frame area. So make sure we
2352 allocate a big enough chunk to keep the frame pointer
2353 aligned like a real one. */
2354 size = CEIL_ROUND (size, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2355 #endif
2356 start_sequence ();
2357 loc = assign_stack_temp (BLKmode, size, 1);
2358 loc = XEXP (loc, 0);
2359 #ifdef FRAME_GROWS_DOWNWARD
2360 /* In this case, virtual_stack_vars_rtx points to one byte
2361 higher than the top of the frame area. So compute the offset
2362 to one byte higher than our substitute frame. */
2363 loc = plus_constant (loc, size);
2364 #endif
2365 map->reg_map[regno] = temp
2366 = force_reg (Pmode, force_operand (loc, NULL_RTX));
2368 #ifdef STACK_BOUNDARY
2369 mark_reg_pointer (map->reg_map[regno],
2370 STACK_BOUNDARY / BITS_PER_UNIT);
2371 #endif
2373 if (REGNO (temp) < map->const_equiv_map_size)
2375 map->const_equiv_map[REGNO (temp)] = loc;
2376 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
2379 seq = gen_sequence ();
2380 end_sequence ();
2381 emit_insn_after (seq, map->insns_at_start);
2382 return temp;
2384 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM)
2386 /* Do the same for a block to contain any arguments referenced
2387 in memory. */
2388 rtx loc, seq;
2389 int size = FUNCTION_ARGS_SIZE (DECL_SAVED_INSNS (map->fndecl));
2391 start_sequence ();
2392 loc = assign_stack_temp (BLKmode, size, 1);
2393 loc = XEXP (loc, 0);
2394 /* When arguments grow downward, the virtual incoming
2395 args pointer points to the top of the argument block,
2396 so the remapped location better do the same. */
2397 #ifdef ARGS_GROW_DOWNWARD
2398 loc = plus_constant (loc, size);
2399 #endif
2400 map->reg_map[regno] = temp
2401 = force_reg (Pmode, force_operand (loc, NULL_RTX));
2403 #ifdef STACK_BOUNDARY
2404 mark_reg_pointer (map->reg_map[regno],
2405 STACK_BOUNDARY / BITS_PER_UNIT);
2406 #endif
2408 if (REGNO (temp) < map->const_equiv_map_size)
2410 map->const_equiv_map[REGNO (temp)] = loc;
2411 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
2414 seq = gen_sequence ();
2415 end_sequence ();
2416 emit_insn_after (seq, map->insns_at_start);
2417 return temp;
2419 else if (REG_FUNCTION_VALUE_P (orig))
2421 /* This is a reference to the function return value. If
2422 the function doesn't have a return value, error. If the
2423 mode doesn't agree, make a SUBREG. */
2424 if (map->inline_target == 0)
2425 /* Must be unrolling loops or replicating code if we
2426 reach here, so return the register unchanged. */
2427 return orig;
2428 else if (mode != GET_MODE (map->inline_target))
2429 return gen_lowpart (mode, map->inline_target);
2430 else
2431 return map->inline_target;
2433 return orig;
2435 if (map->reg_map[regno] == NULL)
2437 map->reg_map[regno] = gen_reg_rtx (mode);
2438 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
2439 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
2440 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
2441 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2443 if (map->regno_pointer_flag[regno])
2444 mark_reg_pointer (map->reg_map[regno],
2445 map->regno_pointer_align[regno]);
2447 return map->reg_map[regno];
2449 case SUBREG:
2450 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map);
2451 /* SUBREG is ordinary, but don't make nested SUBREGs. */
2452 if (GET_CODE (copy) == SUBREG)
2453 return gen_rtx_SUBREG (GET_MODE (orig), SUBREG_REG (copy),
2454 SUBREG_WORD (orig) + SUBREG_WORD (copy));
2455 else if (GET_CODE (copy) == CONCAT)
2456 return (subreg_realpart_p (orig) ? XEXP (copy, 0) : XEXP (copy, 1));
2457 else
2458 return gen_rtx_SUBREG (GET_MODE (orig), copy,
2459 SUBREG_WORD (orig));
2461 case ADDRESSOF:
2462 copy = gen_rtx_ADDRESSOF (mode,
2463 copy_rtx_and_substitute (XEXP (orig, 0), map), 0);
2464 SET_ADDRESSOF_DECL (copy, ADDRESSOF_DECL (orig));
2465 regno = ADDRESSOF_REGNO (orig);
2466 if (map->reg_map[regno])
2467 regno = REGNO (map->reg_map[regno]);
2468 else if (regno > LAST_VIRTUAL_REGISTER)
2470 temp = XEXP (orig, 0);
2471 map->reg_map[regno] = gen_reg_rtx (GET_MODE (temp));
2472 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (temp);
2473 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (temp);
2474 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (temp);
2475 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2477 if (map->regno_pointer_flag[regno])
2478 mark_reg_pointer (map->reg_map[regno],
2479 map->regno_pointer_align[regno]);
2480 regno = REGNO (map->reg_map[regno]);
2482 ADDRESSOF_REGNO (copy) = regno;
2483 return copy;
2485 case USE:
2486 case CLOBBER:
2487 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
2488 to (use foo) if the original insn didn't have a subreg.
2489 Removing the subreg distorts the VAX movstrhi pattern
2490 by changing the mode of an operand. */
2491 copy = copy_rtx_and_substitute (XEXP (orig, 0), map);
2492 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
2493 copy = SUBREG_REG (copy);
2494 return gen_rtx_fmt_e (code, VOIDmode, copy);
2496 case CODE_LABEL:
2497 LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
2498 = LABEL_PRESERVE_P (orig);
2499 return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
2501 case LABEL_REF:
2502 copy = gen_rtx_LABEL_REF (mode,
2503 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
2504 : get_label_from_map (map,
2505 CODE_LABEL_NUMBER (XEXP (orig, 0))));
2506 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
2508 /* The fact that this label was previously nonlocal does not mean
2509 it still is, so we must check if it is within the range of
2510 this function's labels. */
2511 LABEL_REF_NONLOCAL_P (copy)
2512 = (LABEL_REF_NONLOCAL_P (orig)
2513 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
2514 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
2516 /* If we have made a nonlocal label local, it means that this
2517 inlined call will be referring to our nonlocal goto handler.
2518 So make sure we create one for this block; we normally would
2519 not since this is not otherwise considered a "call". */
2520 if (LABEL_REF_NONLOCAL_P (orig) && ! LABEL_REF_NONLOCAL_P (copy))
2521 function_call_count++;
2523 return copy;
2525 case PC:
2526 case CC0:
2527 case CONST_INT:
2528 return orig;
2530 case SYMBOL_REF:
2531 /* Symbols which represent the address of a label stored in the constant
2532 pool must be modified to point to a constant pool entry for the
2533 remapped label. Otherwise, symbols are returned unchanged. */
2534 if (CONSTANT_POOL_ADDRESS_P (orig))
2536 rtx constant = get_pool_constant (orig);
2537 if (GET_CODE (constant) == LABEL_REF)
2538 return XEXP (force_const_mem (GET_MODE (orig),
2539 copy_rtx_and_substitute (constant,
2540 map)),
2544 return orig;
2546 case CONST_DOUBLE:
2547 /* We have to make a new copy of this CONST_DOUBLE because don't want
2548 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
2549 duplicate of a CONST_DOUBLE we have already seen. */
2550 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
2552 REAL_VALUE_TYPE d;
2554 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
2555 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
2557 else
2558 return immed_double_const (CONST_DOUBLE_LOW (orig),
2559 CONST_DOUBLE_HIGH (orig), VOIDmode);
2561 case CONST:
2562 /* Make new constant pool entry for a constant
2563 that was in the pool of the inline function. */
2564 if (RTX_INTEGRATED_P (orig))
2566 /* If this was an address of a constant pool entry that itself
2567 had to be placed in the constant pool, it might not be a
2568 valid address. So the recursive call below might turn it
2569 into a register. In that case, it isn't a constant any
2570 more, so return it. This has the potential of changing a
2571 MEM into a REG, but we'll assume that it safe. */
2572 temp = copy_rtx_and_substitute (XEXP (orig, 0), map);
2573 if (! CONSTANT_P (temp))
2574 return temp;
2575 return validize_mem (force_const_mem (GET_MODE (orig), temp));
2577 break;
2579 case ADDRESS:
2580 /* If from constant pool address, make new constant pool entry and
2581 return its address. */
2582 if (! RTX_INTEGRATED_P (orig))
2583 abort ();
2585 temp
2586 = force_const_mem (GET_MODE (XEXP (orig, 0)),
2587 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
2588 map));
2590 #if 0
2591 /* Legitimizing the address here is incorrect.
2593 The only ADDRESS rtx's that can reach here are ones created by
2594 save_constants. Hence the operand of the ADDRESS is always valid
2595 in this position of the instruction, since the original rtx without
2596 the ADDRESS was valid.
2598 The reason we don't legitimize the address here is that on the
2599 Sparc, the caller may have a (high ...) surrounding this ADDRESS.
2600 This code forces the operand of the address to a register, which
2601 fails because we can not take the HIGH part of a register.
2603 Also, change_address may create new registers. These registers
2604 will not have valid reg_map entries. This can cause try_constants()
2605 to fail because assumes that all registers in the rtx have valid
2606 reg_map entries, and it may end up replacing one of these new
2607 registers with junk. */
2609 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
2610 temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
2611 #endif
2613 temp = XEXP (temp, 0);
2615 #ifdef POINTERS_EXTEND_UNSIGNED
2616 if (GET_MODE (temp) != GET_MODE (orig))
2617 temp = convert_memory_address (GET_MODE (orig), temp);
2618 #endif
2620 return temp;
2622 case ASM_OPERANDS:
2623 /* If a single asm insn contains multiple output operands
2624 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
2625 We must make sure that the copied insn continues to share it. */
2626 if (map->orig_asm_operands_vector == XVEC (orig, 3))
2628 copy = rtx_alloc (ASM_OPERANDS);
2629 copy->volatil = orig->volatil;
2630 XSTR (copy, 0) = XSTR (orig, 0);
2631 XSTR (copy, 1) = XSTR (orig, 1);
2632 XINT (copy, 2) = XINT (orig, 2);
2633 XVEC (copy, 3) = map->copy_asm_operands_vector;
2634 XVEC (copy, 4) = map->copy_asm_constraints_vector;
2635 XSTR (copy, 5) = XSTR (orig, 5);
2636 XINT (copy, 6) = XINT (orig, 6);
2637 return copy;
2639 break;
2641 case CALL:
2642 /* This is given special treatment because the first
2643 operand of a CALL is a (MEM ...) which may get
2644 forced into a register for cse. This is undesirable
2645 if function-address cse isn't wanted or if we won't do cse. */
2646 #ifndef NO_FUNCTION_CSE
2647 if (! (optimize && ! flag_no_function_cse))
2648 #endif
2649 return gen_rtx_CALL (GET_MODE (orig),
2650 gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
2651 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0), map)),
2652 copy_rtx_and_substitute (XEXP (orig, 1), map));
2653 break;
2655 #if 0
2656 /* Must be ifdefed out for loop unrolling to work. */
2657 case RETURN:
2658 abort ();
2659 #endif
2661 case SET:
2662 /* If this is setting fp or ap, it means that we have a nonlocal goto.
2663 Adjust the setting by the offset of the area we made.
2664 If the nonlocal goto is into the current function,
2665 this will result in unnecessarily bad code, but should work. */
2666 if (SET_DEST (orig) == virtual_stack_vars_rtx
2667 || SET_DEST (orig) == virtual_incoming_args_rtx)
2669 /* In case a translation hasn't occurred already, make one now. */
2670 rtx equiv_reg;
2671 rtx equiv_loc;
2672 HOST_WIDE_INT loc_offset;
2674 copy_rtx_and_substitute (SET_DEST (orig), map);
2675 equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
2676 equiv_loc = map->const_equiv_map[REGNO (equiv_reg)];
2677 loc_offset
2678 = GET_CODE (equiv_loc) == REG ? 0 : INTVAL (XEXP (equiv_loc, 1));
2679 return gen_rtx_SET (VOIDmode, SET_DEST (orig),
2680 force_operand
2681 (plus_constant
2682 (copy_rtx_and_substitute (SET_SRC (orig), map),
2683 - loc_offset),
2684 NULL_RTX));
2686 break;
2688 case MEM:
2689 copy = rtx_alloc (MEM);
2690 PUT_MODE (copy, mode);
2691 XEXP (copy, 0) = copy_rtx_and_substitute (XEXP (orig, 0), map);
2692 MEM_IN_STRUCT_P (copy) = MEM_IN_STRUCT_P (orig);
2693 MEM_VOLATILE_P (copy) = MEM_VOLATILE_P (orig);
2695 /* If doing function inlining, this MEM might not be const in the
2696 function that it is being inlined into, and thus may not be
2697 unchanging after function inlining. Constant pool references are
2698 handled elsewhere, so this doesn't lose RTX_UNCHANGING_P bits
2699 for them. */
2700 if (! map->integrating)
2701 RTX_UNCHANGING_P (copy) = RTX_UNCHANGING_P (orig);
2703 return copy;
2705 default:
2706 break;
2709 copy = rtx_alloc (code);
2710 PUT_MODE (copy, mode);
2711 copy->in_struct = orig->in_struct;
2712 copy->volatil = orig->volatil;
2713 copy->unchanging = orig->unchanging;
2715 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2717 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2719 switch (*format_ptr++)
2721 case '0':
2722 XEXP (copy, i) = XEXP (orig, i);
2723 break;
2725 case 'e':
2726 XEXP (copy, i) = copy_rtx_and_substitute (XEXP (orig, i), map);
2727 break;
2729 case 'u':
2730 /* Change any references to old-insns to point to the
2731 corresponding copied insns. */
2732 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
2733 break;
2735 case 'E':
2736 XVEC (copy, i) = XVEC (orig, i);
2737 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2739 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2740 for (j = 0; j < XVECLEN (copy, i); j++)
2741 XVECEXP (copy, i, j)
2742 = copy_rtx_and_substitute (XVECEXP (orig, i, j), map);
2744 break;
2746 case 'w':
2747 XWINT (copy, i) = XWINT (orig, i);
2748 break;
2750 case 'i':
2751 XINT (copy, i) = XINT (orig, i);
2752 break;
2754 case 's':
2755 XSTR (copy, i) = XSTR (orig, i);
2756 break;
2758 default:
2759 abort ();
2763 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
2765 map->orig_asm_operands_vector = XVEC (orig, 3);
2766 map->copy_asm_operands_vector = XVEC (copy, 3);
2767 map->copy_asm_constraints_vector = XVEC (copy, 4);
2770 return copy;
2773 /* Substitute known constant values into INSN, if that is valid. */
2775 void
2776 try_constants (insn, map)
2777 rtx insn;
2778 struct inline_remap *map;
2780 int i;
2782 map->num_sets = 0;
2783 subst_constants (&PATTERN (insn), insn, map);
2785 /* Apply the changes if they are valid; otherwise discard them. */
2786 apply_change_group ();
2788 /* Show we don't know the value of anything stored or clobbered. */
2789 note_stores (PATTERN (insn), mark_stores);
2790 map->last_pc_value = 0;
2791 #ifdef HAVE_cc0
2792 map->last_cc0_value = 0;
2793 #endif
2795 /* Set up any constant equivalences made in this insn. */
2796 for (i = 0; i < map->num_sets; i++)
2798 if (GET_CODE (map->equiv_sets[i].dest) == REG)
2800 int regno = REGNO (map->equiv_sets[i].dest);
2802 if (regno < map->const_equiv_map_size
2803 && (map->const_equiv_map[regno] == 0
2804 /* Following clause is a hack to make case work where GNU C++
2805 reassigns a variable to make cse work right. */
2806 || ! rtx_equal_p (map->const_equiv_map[regno],
2807 map->equiv_sets[i].equiv)))
2809 map->const_equiv_map[regno] = map->equiv_sets[i].equiv;
2810 map->const_age_map[regno] = map->const_age;
2813 else if (map->equiv_sets[i].dest == pc_rtx)
2814 map->last_pc_value = map->equiv_sets[i].equiv;
2815 #ifdef HAVE_cc0
2816 else if (map->equiv_sets[i].dest == cc0_rtx)
2817 map->last_cc0_value = map->equiv_sets[i].equiv;
2818 #endif
2822 /* Substitute known constants for pseudo regs in the contents of LOC,
2823 which are part of INSN.
2824 If INSN is zero, the substitution should always be done (this is used to
2825 update DECL_RTL).
2826 These changes are taken out by try_constants if the result is not valid.
2828 Note that we are more concerned with determining when the result of a SET
2829 is a constant, for further propagation, than actually inserting constants
2830 into insns; cse will do the latter task better.
2832 This function is also used to adjust address of items previously addressed
2833 via the virtual stack variable or virtual incoming arguments registers. */
2835 static void
2836 subst_constants (loc, insn, map)
2837 rtx *loc;
2838 rtx insn;
2839 struct inline_remap *map;
2841 rtx x = *loc;
2842 register int i;
2843 register enum rtx_code code;
2844 register char *format_ptr;
2845 int num_changes = num_validated_changes ();
2846 rtx new = 0;
2847 enum machine_mode op0_mode;
2849 code = GET_CODE (x);
2851 switch (code)
2853 case PC:
2854 case CONST_INT:
2855 case CONST_DOUBLE:
2856 case SYMBOL_REF:
2857 case CONST:
2858 case LABEL_REF:
2859 case ADDRESS:
2860 return;
2862 #ifdef HAVE_cc0
2863 case CC0:
2864 validate_change (insn, loc, map->last_cc0_value, 1);
2865 return;
2866 #endif
2868 case USE:
2869 case CLOBBER:
2870 /* The only thing we can do with a USE or CLOBBER is possibly do
2871 some substitutions in a MEM within it. */
2872 if (GET_CODE (XEXP (x, 0)) == MEM)
2873 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map);
2874 return;
2876 case REG:
2877 /* Substitute for parms and known constants. Don't replace
2878 hard regs used as user variables with constants. */
2880 int regno = REGNO (x);
2882 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
2883 && regno < map->const_equiv_map_size
2884 && map->const_equiv_map[regno] != 0
2885 && map->const_age_map[regno] >= map->const_age)
2886 validate_change (insn, loc, map->const_equiv_map[regno], 1);
2887 return;
2890 case SUBREG:
2891 /* SUBREG applied to something other than a reg
2892 should be treated as ordinary, since that must
2893 be a special hack and we don't know how to treat it specially.
2894 Consider for example mulsidi3 in m68k.md.
2895 Ordinary SUBREG of a REG needs this special treatment. */
2896 if (GET_CODE (SUBREG_REG (x)) == REG)
2898 rtx inner = SUBREG_REG (x);
2899 rtx new = 0;
2901 /* We can't call subst_constants on &SUBREG_REG (x) because any
2902 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2903 see what is inside, try to form the new SUBREG and see if that is
2904 valid. We handle two cases: extracting a full word in an
2905 integral mode and extracting the low part. */
2906 subst_constants (&inner, NULL_RTX, map);
2908 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
2909 && GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD
2910 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
2911 new = operand_subword (inner, SUBREG_WORD (x), 0,
2912 GET_MODE (SUBREG_REG (x)));
2914 cancel_changes (num_changes);
2915 if (new == 0 && subreg_lowpart_p (x))
2916 new = gen_lowpart_common (GET_MODE (x), inner);
2918 if (new)
2919 validate_change (insn, loc, new, 1);
2921 return;
2923 break;
2925 case MEM:
2926 subst_constants (&XEXP (x, 0), insn, map);
2928 /* If a memory address got spoiled, change it back. */
2929 if (insn != 0 && num_validated_changes () != num_changes
2930 && !memory_address_p (GET_MODE (x), XEXP (x, 0)))
2931 cancel_changes (num_changes);
2932 return;
2934 case SET:
2936 /* Substitute constants in our source, and in any arguments to a
2937 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2938 itself. */
2939 rtx *dest_loc = &SET_DEST (x);
2940 rtx dest = *dest_loc;
2941 rtx src, tem;
2943 subst_constants (&SET_SRC (x), insn, map);
2944 src = SET_SRC (x);
2946 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
2947 || GET_CODE (*dest_loc) == SUBREG
2948 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2950 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2952 subst_constants (&XEXP (*dest_loc, 1), insn, map);
2953 subst_constants (&XEXP (*dest_loc, 2), insn, map);
2955 dest_loc = &XEXP (*dest_loc, 0);
2958 /* Do substitute in the address of a destination in memory. */
2959 if (GET_CODE (*dest_loc) == MEM)
2960 subst_constants (&XEXP (*dest_loc, 0), insn, map);
2962 /* Check for the case of DEST a SUBREG, both it and the underlying
2963 register are less than one word, and the SUBREG has the wider mode.
2964 In the case, we are really setting the underlying register to the
2965 source converted to the mode of DEST. So indicate that. */
2966 if (GET_CODE (dest) == SUBREG
2967 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2968 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2969 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2970 <= GET_MODE_SIZE (GET_MODE (dest)))
2971 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
2972 src)))
2973 src = tem, dest = SUBREG_REG (dest);
2975 /* If storing a recognizable value save it for later recording. */
2976 if ((map->num_sets < MAX_RECOG_OPERANDS)
2977 && (CONSTANT_P (src)
2978 || (GET_CODE (src) == REG
2979 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
2980 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
2981 || (GET_CODE (src) == PLUS
2982 && GET_CODE (XEXP (src, 0)) == REG
2983 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2984 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
2985 && CONSTANT_P (XEXP (src, 1)))
2986 || GET_CODE (src) == COMPARE
2987 #ifdef HAVE_cc0
2988 || dest == cc0_rtx
2989 #endif
2990 || (dest == pc_rtx
2991 && (src == pc_rtx || GET_CODE (src) == RETURN
2992 || GET_CODE (src) == LABEL_REF))))
2994 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2995 it will cause us to save the COMPARE with any constants
2996 substituted, which is what we want for later. */
2997 map->equiv_sets[map->num_sets].equiv = copy_rtx (src);
2998 map->equiv_sets[map->num_sets++].dest = dest;
3001 return;
3003 default:
3004 break;
3007 format_ptr = GET_RTX_FORMAT (code);
3009 /* If the first operand is an expression, save its mode for later. */
3010 if (*format_ptr == 'e')
3011 op0_mode = GET_MODE (XEXP (x, 0));
3013 for (i = 0; i < GET_RTX_LENGTH (code); i++)
3015 switch (*format_ptr++)
3017 case '0':
3018 break;
3020 case 'e':
3021 if (XEXP (x, i))
3022 subst_constants (&XEXP (x, i), insn, map);
3023 break;
3025 case 'u':
3026 case 'i':
3027 case 's':
3028 case 'w':
3029 break;
3031 case 'E':
3032 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
3034 int j;
3035 for (j = 0; j < XVECLEN (x, i); j++)
3036 subst_constants (&XVECEXP (x, i, j), insn, map);
3038 break;
3040 default:
3041 abort ();
3045 /* If this is a commutative operation, move a constant to the second
3046 operand unless the second operand is already a CONST_INT. */
3047 if ((GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
3048 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
3050 rtx tem = XEXP (x, 0);
3051 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
3052 validate_change (insn, &XEXP (x, 1), tem, 1);
3055 /* Simplify the expression in case we put in some constants. */
3056 switch (GET_RTX_CLASS (code))
3058 case '1':
3059 new = simplify_unary_operation (code, GET_MODE (x),
3060 XEXP (x, 0), op0_mode);
3061 break;
3063 case '<':
3065 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
3066 if (op_mode == VOIDmode)
3067 op_mode = GET_MODE (XEXP (x, 1));
3068 new = simplify_relational_operation (code, op_mode,
3069 XEXP (x, 0), XEXP (x, 1));
3070 #ifdef FLOAT_STORE_FLAG_VALUE
3071 if (new != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3072 new = ((new == const0_rtx) ? CONST0_RTX (GET_MODE (x))
3073 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
3074 GET_MODE (x)));
3075 #endif
3076 break;
3079 case '2':
3080 case 'c':
3081 new = simplify_binary_operation (code, GET_MODE (x),
3082 XEXP (x, 0), XEXP (x, 1));
3083 break;
3085 case 'b':
3086 case '3':
3087 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
3088 XEXP (x, 0), XEXP (x, 1), XEXP (x, 2));
3089 break;
3092 if (new)
3093 validate_change (insn, loc, new, 1);
3096 /* Show that register modified no longer contain known constants. We are
3097 called from note_stores with parts of the new insn. */
3099 void
3100 mark_stores (dest, x)
3101 rtx dest;
3102 rtx x ATTRIBUTE_UNUSED;
3104 int regno = -1;
3105 enum machine_mode mode;
3107 /* DEST is always the innermost thing set, except in the case of
3108 SUBREGs of hard registers. */
3110 if (GET_CODE (dest) == REG)
3111 regno = REGNO (dest), mode = GET_MODE (dest);
3112 else if (GET_CODE (dest) == SUBREG && GET_CODE (SUBREG_REG (dest)) == REG)
3114 regno = REGNO (SUBREG_REG (dest)) + SUBREG_WORD (dest);
3115 mode = GET_MODE (SUBREG_REG (dest));
3118 if (regno >= 0)
3120 int last_reg = (regno >= FIRST_PSEUDO_REGISTER ? regno
3121 : regno + HARD_REGNO_NREGS (regno, mode) - 1);
3122 int i;
3124 /* Ignore virtual stack var or virtual arg register since those
3125 are handled separately. */
3126 if (regno != VIRTUAL_INCOMING_ARGS_REGNUM
3127 && regno != VIRTUAL_STACK_VARS_REGNUM)
3128 for (i = regno; i <= last_reg; i++)
3129 if (i < global_const_equiv_map_size)
3130 global_const_equiv_map[i] = 0;
3134 /* If any CONST expressions with RTX_INTEGRATED_P are present in the rtx
3135 pointed to by PX, they represent constants in the constant pool.
3136 Replace these with a new memory reference obtained from force_const_mem.
3137 Similarly, ADDRESS expressions with RTX_INTEGRATED_P represent the
3138 address of a constant pool entry. Replace them with the address of
3139 a new constant pool entry obtained from force_const_mem. */
3141 static void
3142 restore_constants (px)
3143 rtx *px;
3145 rtx x = *px;
3146 int i, j;
3147 char *fmt;
3149 if (x == 0)
3150 return;
3152 if (GET_CODE (x) == CONST_DOUBLE)
3154 /* We have to make a new CONST_DOUBLE to ensure that we account for
3155 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
3156 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3158 REAL_VALUE_TYPE d;
3160 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
3161 *px = CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x));
3163 else
3164 *px = immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
3165 VOIDmode);
3168 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == CONST)
3170 restore_constants (&XEXP (x, 0));
3171 *px = validize_mem (force_const_mem (GET_MODE (x), XEXP (x, 0)));
3173 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == SUBREG)
3175 /* This must be (subreg/i:M1 (const/i:M2 ...) 0). */
3176 rtx new = XEXP (SUBREG_REG (x), 0);
3178 restore_constants (&new);
3179 new = force_const_mem (GET_MODE (SUBREG_REG (x)), new);
3180 PUT_MODE (new, GET_MODE (x));
3181 *px = validize_mem (new);
3183 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == ADDRESS)
3185 rtx new = XEXP (force_const_mem (GET_MODE (XEXP (x, 0)),
3186 XEXP (XEXP (x, 0), 0)),
3189 #ifdef POINTERS_EXTEND_UNSIGNED
3190 if (GET_MODE (new) != GET_MODE (x))
3191 new = convert_memory_address (GET_MODE (x), new);
3192 #endif
3194 *px = new;
3196 else
3198 fmt = GET_RTX_FORMAT (GET_CODE (x));
3199 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
3201 switch (*fmt++)
3203 case 'E':
3204 for (j = 0; j < XVECLEN (x, i); j++)
3205 restore_constants (&XVECEXP (x, i, j));
3206 break;
3208 case 'e':
3209 restore_constants (&XEXP (x, i));
3210 break;
3216 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
3217 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
3218 that it points to the node itself, thus indicating that the node is its
3219 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
3220 the given node is NULL, recursively descend the decl/block tree which
3221 it is the root of, and for each other ..._DECL or BLOCK node contained
3222 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
3223 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
3224 values to point to themselves. */
3226 static void
3227 set_block_origin_self (stmt)
3228 register tree stmt;
3230 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
3232 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
3235 register tree local_decl;
3237 for (local_decl = BLOCK_VARS (stmt);
3238 local_decl != NULL_TREE;
3239 local_decl = TREE_CHAIN (local_decl))
3240 set_decl_origin_self (local_decl); /* Potential recursion. */
3244 register tree subblock;
3246 for (subblock = BLOCK_SUBBLOCKS (stmt);
3247 subblock != NULL_TREE;
3248 subblock = BLOCK_CHAIN (subblock))
3249 set_block_origin_self (subblock); /* Recurse. */
3254 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
3255 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
3256 node to so that it points to the node itself, thus indicating that the
3257 node represents its own (abstract) origin. Additionally, if the
3258 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
3259 the decl/block tree of which the given node is the root of, and for
3260 each other ..._DECL or BLOCK node contained therein whose
3261 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
3262 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
3263 point to themselves. */
3265 static void
3266 set_decl_origin_self (decl)
3267 register tree decl;
3269 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
3271 DECL_ABSTRACT_ORIGIN (decl) = decl;
3272 if (TREE_CODE (decl) == FUNCTION_DECL)
3274 register tree arg;
3276 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3277 DECL_ABSTRACT_ORIGIN (arg) = arg;
3278 if (DECL_INITIAL (decl) != NULL_TREE
3279 && DECL_INITIAL (decl) != error_mark_node)
3280 set_block_origin_self (DECL_INITIAL (decl));
3285 /* Given a pointer to some BLOCK node, and a boolean value to set the
3286 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
3287 the given block, and for all local decls and all local sub-blocks
3288 (recursively) which are contained therein. */
3290 static void
3291 set_block_abstract_flags (stmt, setting)
3292 register tree stmt;
3293 register int setting;
3295 register tree local_decl;
3296 register tree subblock;
3298 BLOCK_ABSTRACT (stmt) = setting;
3300 for (local_decl = BLOCK_VARS (stmt);
3301 local_decl != NULL_TREE;
3302 local_decl = TREE_CHAIN (local_decl))
3303 set_decl_abstract_flags (local_decl, setting);
3305 for (subblock = BLOCK_SUBBLOCKS (stmt);
3306 subblock != NULL_TREE;
3307 subblock = BLOCK_CHAIN (subblock))
3308 set_block_abstract_flags (subblock, setting);
3311 /* Given a pointer to some ..._DECL node, and a boolean value to set the
3312 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
3313 given decl, and (in the case where the decl is a FUNCTION_DECL) also
3314 set the abstract flags for all of the parameters, local vars, local
3315 blocks and sub-blocks (recursively) to the same setting. */
3317 void
3318 set_decl_abstract_flags (decl, setting)
3319 register tree decl;
3320 register int setting;
3322 DECL_ABSTRACT (decl) = setting;
3323 if (TREE_CODE (decl) == FUNCTION_DECL)
3325 register tree arg;
3327 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3328 DECL_ABSTRACT (arg) = setting;
3329 if (DECL_INITIAL (decl) != NULL_TREE
3330 && DECL_INITIAL (decl) != error_mark_node)
3331 set_block_abstract_flags (DECL_INITIAL (decl), setting);
3335 /* Output the assembly language code for the function FNDECL
3336 from its DECL_SAVED_INSNS. Used for inline functions that are output
3337 at end of compilation instead of where they came in the source. */
3339 void
3340 output_inline_function (fndecl)
3341 tree fndecl;
3343 rtx head;
3344 rtx last;
3346 /* Things we allocate from here on are part of this function, not
3347 permanent. */
3348 temporary_allocation ();
3350 head = DECL_SAVED_INSNS (fndecl);
3351 current_function_decl = fndecl;
3353 /* This call is only used to initialize global variables. */
3354 init_function_start (fndecl, "lossage", 1);
3356 /* Redo parameter determinations in case the FUNCTION_...
3357 macros took machine-specific actions that need to be redone. */
3358 assign_parms (fndecl, 1);
3360 /* Set stack frame size. */
3361 assign_stack_local (BLKmode, DECL_FRAME_SIZE (fndecl), 0);
3363 /* The first is a bit of a lie (the array may be larger), but doesn't
3364 matter too much and it isn't worth saving the actual bound. */
3365 reg_rtx_no = regno_pointer_flag_length = MAX_REGNUM (head);
3366 regno_reg_rtx = (rtx *) INLINE_REGNO_REG_RTX (head);
3367 regno_pointer_flag = INLINE_REGNO_POINTER_FLAG (head);
3368 regno_pointer_align = INLINE_REGNO_POINTER_ALIGN (head);
3369 max_parm_reg = MAX_PARMREG (head);
3370 parm_reg_stack_loc = (rtx *) PARMREG_STACK_LOC (head);
3372 stack_slot_list = STACK_SLOT_LIST (head);
3373 forced_labels = FORCED_LABELS (head);
3375 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_ALLOCA)
3376 current_function_calls_alloca = 1;
3378 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_SETJMP)
3379 current_function_calls_setjmp = 1;
3381 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_LONGJMP)
3382 current_function_calls_longjmp = 1;
3384 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_STRUCT)
3385 current_function_returns_struct = 1;
3387 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_PCC_STRUCT)
3388 current_function_returns_pcc_struct = 1;
3390 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_NEEDS_CONTEXT)
3391 current_function_needs_context = 1;
3393 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_HAS_NONLOCAL_LABEL)
3394 current_function_has_nonlocal_label = 1;
3396 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_POINTER)
3397 current_function_returns_pointer = 1;
3399 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_CONST_POOL)
3400 current_function_uses_const_pool = 1;
3402 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
3403 current_function_uses_pic_offset_table = 1;
3405 current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (head);
3406 current_function_pops_args = POPS_ARGS (head);
3408 /* This is the only thing the expand_function_end call that uses to be here
3409 actually does and that call can cause problems. */
3410 immediate_size_expand--;
3412 /* Find last insn and rebuild the constant pool. */
3413 for (last = FIRST_PARM_INSN (head);
3414 NEXT_INSN (last); last = NEXT_INSN (last))
3416 if (GET_RTX_CLASS (GET_CODE (last)) == 'i')
3418 restore_constants (&PATTERN (last));
3419 restore_constants (&REG_NOTES (last));
3423 set_new_first_and_last_insn (FIRST_PARM_INSN (head), last);
3424 set_new_first_and_last_label_num (FIRST_LABELNO (head), LAST_LABELNO (head));
3426 /* We must have already output DWARF debugging information for the
3427 original (abstract) inline function declaration/definition, so
3428 we want to make sure that the debugging information we generate
3429 for this special instance of the inline function refers back to
3430 the information we already generated. To make sure that happens,
3431 we simply have to set the DECL_ABSTRACT_ORIGIN for the function
3432 node (and for all of the local ..._DECL nodes which are its children)
3433 so that they all point to themselves. */
3435 set_decl_origin_self (fndecl);
3437 /* We're not deferring this any longer. */
3438 DECL_DEFER_OUTPUT (fndecl) = 0;
3440 /* We can't inline this anymore. */
3441 DECL_INLINE (fndecl) = 0;
3443 /* Compile this function all the way down to assembly code. */
3444 rest_of_compilation (fndecl);
3446 current_function_decl = 0;