--with-gnu-ld uses different x- fiile under aix 4.1
[official-gcc.git] / gcc / integrate.c
blobdf9bc729fdcccf7a574d3d7ad8aac6baf133e0c4
1 /* Procedure integration for GNU CC.
2 Copyright (C) 1988, 91, 93-98, 1999 Free Software Foundation, Inc.
3 Contributed by Michael Tiemann (tiemann@cygnus.com)
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "regs.h"
29 #include "flags.h"
30 #include "insn-config.h"
31 #include "insn-flags.h"
32 #include "expr.h"
33 #include "output.h"
34 #include "recog.h"
35 #include "integrate.h"
36 #include "real.h"
37 #include "except.h"
38 #include "function.h"
39 #include "toplev.h"
41 #include "obstack.h"
42 #define obstack_chunk_alloc xmalloc
43 #define obstack_chunk_free free
45 extern struct obstack *function_maybepermanent_obstack;
47 /* Similar, but round to the next highest integer that meets the
48 alignment. */
49 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
51 /* Default max number of insns a function can have and still be inline.
52 This is overridden on RISC machines. */
53 #ifndef INTEGRATE_THRESHOLD
54 /* Inlining small functions might save more space then not inlining at
55 all. Assume 1 instruction for the call and 1.5 insns per argument. */
56 #define INTEGRATE_THRESHOLD(DECL) \
57 (optimize_size \
58 ? (1 + (3 * list_length (DECL_ARGUMENTS (DECL)) / 2)) \
59 : (8 * (8 + list_length (DECL_ARGUMENTS (DECL)))))
60 #endif
62 static rtx initialize_for_inline PROTO((tree, int, int, int, int));
63 static void finish_inline PROTO((tree, rtx));
64 static void adjust_copied_decl_tree PROTO((tree));
65 static tree copy_decl_list PROTO((tree));
66 static tree copy_decl_tree PROTO((tree));
67 static void copy_decl_rtls PROTO((tree));
68 static void save_constants PROTO((rtx *));
69 static void note_modified_parmregs PROTO((rtx, rtx));
70 static rtx copy_for_inline PROTO((rtx));
71 static void integrate_parm_decls PROTO((tree, struct inline_remap *,
72 rtvec));
73 static void integrate_decl_tree PROTO((tree, int,
74 struct inline_remap *));
75 static void save_constants_in_decl_trees PROTO ((tree));
76 static void subst_constants PROTO((rtx *, rtx,
77 struct inline_remap *));
78 static void restore_constants PROTO((rtx *));
79 static void set_block_origin_self PROTO((tree));
80 static void set_decl_origin_self PROTO((tree));
81 static void set_block_abstract_flags PROTO((tree, int));
82 static void process_reg_param PROTO((struct inline_remap *, rtx,
83 rtx));
86 void set_decl_abstract_flags PROTO((tree, int));
87 static tree copy_and_set_decl_abstract_origin PROTO((tree));
89 /* Returns the Ith entry in the label_map contained in MAP. If the
90 Ith entry has not yet been set, return a fresh label. This function
91 performs a lazy initialization of label_map, thereby avoiding huge memory
92 explosions when the label_map gets very large. */
94 rtx
95 get_label_from_map (map, i)
96 struct inline_remap *map;
97 int i;
99 rtx x = map->label_map[i];
101 if (x == NULL_RTX)
102 x = map->label_map[i] = gen_label_rtx();
104 return x;
107 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
108 is safe and reasonable to integrate into other functions.
109 Nonzero means value is a warning message with a single %s
110 for the function's name. */
112 char *
113 function_cannot_inline_p (fndecl)
114 register tree fndecl;
116 register rtx insn;
117 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
118 int max_insns = INTEGRATE_THRESHOLD (fndecl);
119 register int ninsns = 0;
120 register tree parms;
121 rtx result;
123 /* No inlines with varargs. */
124 if ((last && TREE_VALUE (last) != void_type_node)
125 || current_function_varargs)
126 return "varargs function cannot be inline";
128 if (current_function_calls_alloca)
129 return "function using alloca cannot be inline";
131 if (current_function_contains_functions)
132 return "function with nested functions cannot be inline";
134 if (current_function_cannot_inline)
135 return current_function_cannot_inline;
137 /* If its not even close, don't even look. */
138 if (!DECL_INLINE (fndecl) && get_max_uid () > 3 * max_insns)
139 return "function too large to be inline";
141 #if 0
142 /* Don't inline functions which do not specify a function prototype and
143 have BLKmode argument or take the address of a parameter. */
144 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
146 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
147 TREE_ADDRESSABLE (parms) = 1;
148 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
149 return "no prototype, and parameter address used; cannot be inline";
151 #endif
153 /* We can't inline functions that return structures
154 the old-fashioned PCC way, copying into a static block. */
155 if (current_function_returns_pcc_struct)
156 return "inline functions not supported for this return value type";
158 /* We can't inline functions that return structures of varying size. */
159 if (int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
160 return "function with varying-size return value cannot be inline";
162 /* Cannot inline a function with a varying size argument or one that
163 receives a transparent union. */
164 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
166 if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
167 return "function with varying-size parameter cannot be inline";
168 else if (TYPE_TRANSPARENT_UNION (TREE_TYPE (parms)))
169 return "function with transparent unit parameter cannot be inline";
172 if (!DECL_INLINE (fndecl) && get_max_uid () > max_insns)
174 for (ninsns = 0, insn = get_first_nonparm_insn ();
175 insn && ninsns < max_insns;
176 insn = NEXT_INSN (insn))
177 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
178 ninsns++;
180 if (ninsns >= max_insns)
181 return "function too large to be inline";
184 /* We cannot inline this function if forced_labels is non-zero. This
185 implies that a label in this function was used as an initializer.
186 Because labels can not be duplicated, all labels in the function
187 will be renamed when it is inlined. However, there is no way to find
188 and fix all variables initialized with addresses of labels in this
189 function, hence inlining is impossible. */
191 if (forced_labels)
192 return "function with label addresses used in initializers cannot inline";
194 /* We cannot inline a nested function that jumps to a nonlocal label. */
195 if (current_function_has_nonlocal_goto)
196 return "function with nonlocal goto cannot be inline";
198 /* This is a hack, until the inliner is taught about eh regions at
199 the start of the function. */
200 for (insn = get_insns ();
201 insn
202 && ! (GET_CODE (insn) == NOTE
203 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG);
204 insn = NEXT_INSN (insn))
206 if (insn && GET_CODE (insn) == NOTE
207 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
208 return "function with complex parameters cannot be inline";
211 /* We can't inline functions that return a PARALLEL rtx. */
212 result = DECL_RTL (DECL_RESULT (fndecl));
213 if (result && GET_CODE (result) == PARALLEL)
214 return "inline functions not supported for this return value type";
216 return 0;
219 /* Variables used within save_for_inline. */
221 /* Mapping from old pseudo-register to new pseudo-registers.
222 The first element of this map is reg_map[FIRST_PSEUDO_REGISTER].
223 It is allocated in `save_for_inline' and `expand_inline_function',
224 and deallocated on exit from each of those routines. */
225 static rtx *reg_map;
227 /* Mapping from old code-labels to new code-labels.
228 The first element of this map is label_map[min_labelno].
229 It is allocated in `save_for_inline' and `expand_inline_function',
230 and deallocated on exit from each of those routines. */
231 static rtx *label_map;
233 /* Mapping from old insn uid's to copied insns.
234 It is allocated in `save_for_inline' and `expand_inline_function',
235 and deallocated on exit from each of those routines. */
236 static rtx *insn_map;
238 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
239 Zero for a reg that isn't a parm's home.
240 Only reg numbers less than max_parm_reg are mapped here. */
241 static tree *parmdecl_map;
243 /* Keep track of first pseudo-register beyond those that are parms. */
244 extern int max_parm_reg;
245 extern rtx *parm_reg_stack_loc;
247 /* When an insn is being copied by copy_for_inline,
248 this is nonzero if we have copied an ASM_OPERANDS.
249 In that case, it is the original input-operand vector. */
250 static rtvec orig_asm_operands_vector;
252 /* When an insn is being copied by copy_for_inline,
253 this is nonzero if we have copied an ASM_OPERANDS.
254 In that case, it is the copied input-operand vector. */
255 static rtvec copy_asm_operands_vector;
257 /* Likewise, this is the copied constraints vector. */
258 static rtvec copy_asm_constraints_vector;
260 /* In save_for_inline, nonzero if past the parm-initialization insns. */
261 static int in_nonparm_insns;
263 /* subroutines passed to duplicate_eh_handlers to map exception labels */
265 static rtx
266 save_for_inline_eh_labelmap (label)
267 rtx label;
269 int index = CODE_LABEL_NUMBER (label);
270 return label_map[index];
273 /* Subroutine for `save_for_inline{copying,nocopy}'. Performs initialization
274 needed to save FNDECL's insns and info for future inline expansion. */
276 static rtx
277 initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, copy)
278 tree fndecl;
279 int min_labelno;
280 int max_labelno;
281 int max_reg;
282 int copy;
284 int function_flags, i;
285 rtvec arg_vector;
286 tree parms;
288 /* Compute the values of any flags we must restore when inlining this. */
290 function_flags
291 = (current_function_calls_alloca * FUNCTION_FLAGS_CALLS_ALLOCA
292 + current_function_calls_setjmp * FUNCTION_FLAGS_CALLS_SETJMP
293 + current_function_calls_longjmp * FUNCTION_FLAGS_CALLS_LONGJMP
294 + current_function_returns_struct * FUNCTION_FLAGS_RETURNS_STRUCT
295 + current_function_returns_pcc_struct * FUNCTION_FLAGS_RETURNS_PCC_STRUCT
296 + current_function_needs_context * FUNCTION_FLAGS_NEEDS_CONTEXT
297 + current_function_has_nonlocal_label * FUNCTION_FLAGS_HAS_NONLOCAL_LABEL
298 + current_function_returns_pointer * FUNCTION_FLAGS_RETURNS_POINTER
299 + current_function_uses_const_pool * FUNCTION_FLAGS_USES_CONST_POOL
300 + current_function_uses_pic_offset_table * FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE);
302 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
303 bzero ((char *) parmdecl_map, max_parm_reg * sizeof (tree));
304 arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
306 for (parms = DECL_ARGUMENTS (fndecl), i = 0;
307 parms;
308 parms = TREE_CHAIN (parms), i++)
310 rtx p = DECL_RTL (parms);
311 int copied_incoming = 0;
313 /* If we have (mem (addressof (mem ...))), use the inner MEM since
314 otherwise the copy_rtx call below will not unshare the MEM since
315 it shares ADDRESSOF. */
316 if (GET_CODE (p) == MEM && GET_CODE (XEXP (p, 0)) == ADDRESSOF
317 && GET_CODE (XEXP (XEXP (p, 0), 0)) == MEM)
318 p = XEXP (XEXP (p, 0), 0);
320 if (GET_CODE (p) == MEM && copy)
322 /* Copy the rtl so that modifications of the addresses
323 later in compilation won't affect this arg_vector.
324 Virtual register instantiation can screw the address
325 of the rtl. */
326 rtx new = copy_rtx (p);
328 /* Don't leave the old copy anywhere in this decl. */
329 if (DECL_RTL (parms) == DECL_INCOMING_RTL (parms)
330 || (GET_CODE (DECL_RTL (parms)) == MEM
331 && GET_CODE (DECL_INCOMING_RTL (parms)) == MEM
332 && (XEXP (DECL_RTL (parms), 0)
333 == XEXP (DECL_INCOMING_RTL (parms), 0))))
334 DECL_INCOMING_RTL (parms) = new, copied_incoming = 1;
336 DECL_RTL (parms) = new;
339 RTVEC_ELT (arg_vector, i) = p;
341 if (GET_CODE (p) == REG)
342 parmdecl_map[REGNO (p)] = parms;
343 else if (GET_CODE (p) == CONCAT)
345 rtx preal = gen_realpart (GET_MODE (XEXP (p, 0)), p);
346 rtx pimag = gen_imagpart (GET_MODE (preal), p);
348 if (GET_CODE (preal) == REG)
349 parmdecl_map[REGNO (preal)] = parms;
350 if (GET_CODE (pimag) == REG)
351 parmdecl_map[REGNO (pimag)] = parms;
354 /* This flag is cleared later
355 if the function ever modifies the value of the parm. */
356 TREE_READONLY (parms) = 1;
358 /* Copy DECL_INCOMING_RTL if not done already. This can
359 happen if DECL_RTL is a reg. */
360 if (copy && ! copied_incoming)
362 p = DECL_INCOMING_RTL (parms);
364 /* If we have (mem (addressof (mem ...))), use the inner MEM since
365 otherwise the copy_rtx call below will not unshare the MEM since
366 it shares ADDRESSOF. */
367 if (GET_CODE (p) == MEM && GET_CODE (XEXP (p, 0)) == ADDRESSOF
368 && GET_CODE (XEXP (XEXP (p, 0), 0)) == MEM)
369 p = XEXP (XEXP (p, 0), 0);
371 if (GET_CODE (p) == MEM)
372 DECL_INCOMING_RTL (parms) = copy_rtx (p);
376 /* Assume we start out in the insns that set up the parameters. */
377 in_nonparm_insns = 0;
379 /* The list of DECL_SAVED_INSNS, starts off with a header which
380 contains the following information:
382 the first insn of the function (not including the insns that copy
383 parameters into registers).
384 the first parameter insn of the function,
385 the first label used by that function,
386 the last label used by that function,
387 the highest register number used for parameters,
388 the total number of registers used,
389 the size of the incoming stack area for parameters,
390 the number of bytes popped on return,
391 the stack slot list,
392 the labels that are forced to exist,
393 some flags that are used to restore compiler globals,
394 the value of current_function_outgoing_args_size,
395 the original argument vector,
396 the original DECL_INITIAL,
397 and pointers to the table of pseudo regs, pointer flags, and alignment. */
399 return gen_inline_header_rtx (NULL_RTX, NULL_RTX, min_labelno, max_labelno,
400 max_parm_reg, max_reg,
401 current_function_args_size,
402 current_function_pops_args,
403 stack_slot_list, forced_labels, function_flags,
404 current_function_outgoing_args_size,
405 arg_vector, (rtx) DECL_INITIAL (fndecl),
406 (rtvec) regno_reg_rtx, regno_pointer_flag,
407 regno_pointer_align,
408 (rtvec) parm_reg_stack_loc);
411 /* Subroutine for `save_for_inline{copying,nocopy}'. Finishes up the
412 things that must be done to make FNDECL expandable as an inline function.
413 HEAD contains the chain of insns to which FNDECL will expand. */
415 static void
416 finish_inline (fndecl, head)
417 tree fndecl;
418 rtx head;
420 FIRST_FUNCTION_INSN (head) = get_first_nonparm_insn ();
421 FIRST_PARM_INSN (head) = get_insns ();
422 DECL_SAVED_INSNS (fndecl) = head;
423 DECL_FRAME_SIZE (fndecl) = get_frame_size ();
426 /* Adjust the BLOCK_END_NOTE pointers in a given copied DECL tree so that
427 they all point to the new (copied) rtxs. */
429 static void
430 adjust_copied_decl_tree (block)
431 register tree block;
433 register tree subblock;
434 register rtx original_end;
436 original_end = BLOCK_END_NOTE (block);
437 if (original_end)
439 BLOCK_END_NOTE (block) = (rtx) NOTE_SOURCE_FILE (original_end);
440 NOTE_SOURCE_FILE (original_end) = 0;
443 /* Process all subblocks. */
444 for (subblock = BLOCK_SUBBLOCKS (block);
445 subblock;
446 subblock = TREE_CHAIN (subblock))
447 adjust_copied_decl_tree (subblock);
450 /* Make the insns and PARM_DECLs of the current function permanent
451 and record other information in DECL_SAVED_INSNS to allow inlining
452 of this function in subsequent calls.
454 This function is called when we are going to immediately compile
455 the insns for FNDECL. The insns in maybepermanent_obstack cannot be
456 modified by the compilation process, so we copy all of them to
457 new storage and consider the new insns to be the insn chain to be
458 compiled. Our caller (rest_of_compilation) saves the original
459 DECL_INITIAL and DECL_ARGUMENTS; here we copy them. */
461 /* ??? The nonlocal_label list should be adjusted also. However, since
462 a function that contains a nested function never gets inlined currently,
463 the nonlocal_label list will always be empty, so we don't worry about
464 it for now. */
466 void
467 save_for_inline_copying (fndecl)
468 tree fndecl;
470 rtx first_insn, last_insn, insn;
471 rtx head, copy;
472 int max_labelno, min_labelno, i, len;
473 int max_reg;
474 int max_uid;
475 rtx first_nonparm_insn;
476 char *new, *new1;
477 rtx *new_parm_reg_stack_loc;
478 rtx *new2;
480 /* Make and emit a return-label if we have not already done so.
481 Do this before recording the bounds on label numbers. */
483 if (return_label == 0)
485 return_label = gen_label_rtx ();
486 emit_label (return_label);
489 /* Get some bounds on the labels and registers used. */
491 max_labelno = max_label_num ();
492 min_labelno = get_first_label_num ();
493 max_reg = max_reg_num ();
495 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
496 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
497 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
498 for the parms, prior to elimination of virtual registers.
499 These values are needed for substituting parms properly. */
501 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
503 head = initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, 1);
505 if (current_function_uses_const_pool)
507 /* Replace any constant pool references with the actual constant. We
508 will put the constants back in the copy made below. */
509 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
510 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
512 save_constants (&PATTERN (insn));
513 if (REG_NOTES (insn))
514 save_constants (&REG_NOTES (insn));
517 /* Also scan all decls, and replace any constant pool references with the
518 actual constant. */
519 save_constants_in_decl_trees (DECL_INITIAL (fndecl));
521 /* Clear out the constant pool so that we can recreate it with the
522 copied constants below. */
523 init_const_rtx_hash_table ();
524 clear_const_double_mem ();
527 max_uid = INSN_UID (head);
529 /* We have now allocated all that needs to be allocated permanently
530 on the rtx obstack. Set our high-water mark, so that we
531 can free the rest of this when the time comes. */
533 preserve_data ();
535 /* Copy the chain insns of this function.
536 Install the copied chain as the insns of this function,
537 for continued compilation;
538 the original chain is recorded as the DECL_SAVED_INSNS
539 for inlining future calls. */
541 /* If there are insns that copy parms from the stack into pseudo registers,
542 those insns are not copied. `expand_inline_function' must
543 emit the correct code to handle such things. */
545 insn = get_insns ();
546 if (GET_CODE (insn) != NOTE)
547 abort ();
548 first_insn = rtx_alloc (NOTE);
549 NOTE_SOURCE_FILE (first_insn) = NOTE_SOURCE_FILE (insn);
550 NOTE_LINE_NUMBER (first_insn) = NOTE_LINE_NUMBER (insn);
551 INSN_UID (first_insn) = INSN_UID (insn);
552 PREV_INSN (first_insn) = NULL;
553 NEXT_INSN (first_insn) = NULL;
554 last_insn = first_insn;
556 /* Each pseudo-reg in the old insn chain must have a unique rtx in the copy.
557 Make these new rtx's now, and install them in regno_reg_rtx, so they
558 will be the official pseudo-reg rtx's for the rest of compilation. */
560 reg_map = (rtx *) savealloc (regno_pointer_flag_length * sizeof (rtx));
562 len = sizeof (struct rtx_def) + (GET_RTX_LENGTH (REG) - 1) * sizeof (rtunion);
563 for (i = max_reg - 1; i > LAST_VIRTUAL_REGISTER; i--)
564 reg_map[i] = (rtx)obstack_copy (function_maybepermanent_obstack,
565 regno_reg_rtx[i], len);
567 regno_reg_rtx = reg_map;
569 /* Put copies of all the virtual register rtx into the new regno_reg_rtx. */
570 init_virtual_regs ();
572 /* Likewise each label rtx must have a unique rtx as its copy. */
574 /* We used to use alloca here, but the size of what it would try to
575 allocate would occasionally cause it to exceed the stack limit and
576 cause unpredictable core dumps. Some examples were > 2Mb in size. */
577 label_map = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
579 for (i = min_labelno; i < max_labelno; i++)
580 label_map[i] = gen_label_rtx ();
582 /* Likewise for parm_reg_stack_slot. */
583 new_parm_reg_stack_loc = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
584 for (i = 0; i < max_parm_reg; i++)
585 new_parm_reg_stack_loc[i] = copy_for_inline (parm_reg_stack_loc[i]);
587 parm_reg_stack_loc = new_parm_reg_stack_loc;
589 /* Record the mapping of old insns to copied insns. */
591 insn_map = (rtx *) alloca (max_uid * sizeof (rtx));
592 bzero ((char *) insn_map, max_uid * sizeof (rtx));
594 /* Get the insn which signals the end of parameter setup code. */
595 first_nonparm_insn = get_first_nonparm_insn ();
597 /* Copy any entries in regno_reg_rtx or DECL_RTLs that reference MEM
598 (the former occurs when a variable has its address taken)
599 since these may be shared and can be changed by virtual
600 register instantiation. DECL_RTL values for our arguments
601 have already been copied by initialize_for_inline. */
602 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_reg; i++)
603 if (GET_CODE (regno_reg_rtx[i]) == MEM)
604 XEXP (regno_reg_rtx[i], 0)
605 = copy_for_inline (XEXP (regno_reg_rtx[i], 0));
607 /* Copy the parm_reg_stack_loc array, and substitute for all of the rtx
608 contained in it. */
609 new2 = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
610 bcopy ((char *) parm_reg_stack_loc, (char *) new2,
611 max_parm_reg * sizeof (rtx));
612 parm_reg_stack_loc = new2;
613 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; ++i)
614 if (parm_reg_stack_loc[i])
615 parm_reg_stack_loc[i] = copy_for_inline (parm_reg_stack_loc[i]);
617 /* Copy the tree of subblocks of the function, and the decls in them.
618 We will use the copy for compiling this function, then restore the original
619 subblocks and decls for use when inlining this function.
621 Several parts of the compiler modify BLOCK trees. In particular,
622 instantiate_virtual_regs will instantiate any virtual regs
623 mentioned in the DECL_RTLs of the decls, and loop
624 unrolling will replicate any BLOCK trees inside an unrolled loop.
626 The modified subblocks or DECL_RTLs would be incorrect for the original rtl
627 which we will use for inlining. The rtl might even contain pseudoregs
628 whose space has been freed. */
630 DECL_INITIAL (fndecl) = copy_decl_tree (DECL_INITIAL (fndecl));
631 DECL_ARGUMENTS (fndecl) = copy_decl_list (DECL_ARGUMENTS (fndecl));
633 /* Now copy each DECL_RTL which is a MEM,
634 so it is safe to modify their addresses. */
635 copy_decl_rtls (DECL_INITIAL (fndecl));
637 /* The fndecl node acts as its own progenitor, so mark it as such. */
638 DECL_ABSTRACT_ORIGIN (fndecl) = fndecl;
640 /* Now copy the chain of insns. Do this twice. The first copy the insn
641 itself and its body. The second time copy of REG_NOTES. This is because
642 a REG_NOTE may have a forward pointer to another insn. */
644 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
646 orig_asm_operands_vector = 0;
648 if (insn == first_nonparm_insn)
649 in_nonparm_insns = 1;
651 switch (GET_CODE (insn))
653 case NOTE:
654 /* No need to keep these. */
655 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED)
656 continue;
658 copy = rtx_alloc (NOTE);
659 NOTE_LINE_NUMBER (copy) = NOTE_LINE_NUMBER (insn);
660 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END)
661 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
662 else
664 NOTE_SOURCE_FILE (insn) = (char *) copy;
665 NOTE_SOURCE_FILE (copy) = 0;
667 if (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG
668 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_END)
670 int new_region = CODE_LABEL_NUMBER
671 (label_map[NOTE_BLOCK_NUMBER (copy)]);
673 /* we have to duplicate the handlers for the original */
674 if (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG)
675 duplicate_eh_handlers (NOTE_BLOCK_NUMBER (copy), new_region,
676 save_for_inline_eh_labelmap);
678 /* We have to forward these both to match the new exception
679 region. */
680 NOTE_BLOCK_NUMBER (copy) = new_region;
683 RTX_INTEGRATED_P (copy) = RTX_INTEGRATED_P (insn);
684 break;
686 case INSN:
687 case JUMP_INSN:
688 case CALL_INSN:
689 copy = rtx_alloc (GET_CODE (insn));
691 if (GET_CODE (insn) == CALL_INSN)
692 CALL_INSN_FUNCTION_USAGE (copy)
693 = copy_for_inline (CALL_INSN_FUNCTION_USAGE (insn));
695 PATTERN (copy) = copy_for_inline (PATTERN (insn));
696 INSN_CODE (copy) = -1;
697 LOG_LINKS (copy) = NULL_RTX;
698 RTX_INTEGRATED_P (copy) = RTX_INTEGRATED_P (insn);
699 break;
701 case CODE_LABEL:
702 copy = label_map[CODE_LABEL_NUMBER (insn)];
703 LABEL_NAME (copy) = LABEL_NAME (insn);
704 break;
706 case BARRIER:
707 copy = rtx_alloc (BARRIER);
708 break;
710 default:
711 abort ();
713 INSN_UID (copy) = INSN_UID (insn);
714 insn_map[INSN_UID (insn)] = copy;
715 NEXT_INSN (last_insn) = copy;
716 PREV_INSN (copy) = last_insn;
717 last_insn = copy;
720 adjust_copied_decl_tree (DECL_INITIAL (fndecl));
722 /* Now copy the REG_NOTES. */
723 for (insn = NEXT_INSN (get_insns ()); insn; insn = NEXT_INSN (insn))
724 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
725 && insn_map[INSN_UID(insn)])
726 REG_NOTES (insn_map[INSN_UID (insn)])
727 = copy_for_inline (REG_NOTES (insn));
729 NEXT_INSN (last_insn) = NULL;
731 finish_inline (fndecl, head);
733 /* Make new versions of the register tables. */
734 new = (char *) savealloc (regno_pointer_flag_length);
735 bcopy (regno_pointer_flag, new, regno_pointer_flag_length);
736 new1 = (char *) savealloc (regno_pointer_flag_length);
737 bcopy (regno_pointer_align, new1, regno_pointer_flag_length);
739 regno_pointer_flag = new;
740 regno_pointer_align = new1;
742 set_new_first_and_last_insn (first_insn, last_insn);
744 if (label_map)
745 free (label_map);
748 /* Copy NODE (as with copy_node). NODE must be a DECL. Set the
749 DECL_ABSTRACT_ORIGIN for the new accordinly. */
751 static tree
752 copy_and_set_decl_abstract_origin (node)
753 tree node;
755 tree copy = copy_node (node);
756 if (DECL_ABSTRACT_ORIGIN (copy) != NULL_TREE)
757 /* That means that NODE already had a DECL_ABSTRACT_ORIGIN. (This
758 situation occurs if we inline a function which itself made
759 calls to inline functions.) Since DECL_ABSTRACT_ORIGIN is the
760 most distant ancestor, we don't have to do anything here. */
762 else
763 /* The most distant ancestor must be NODE. */
764 DECL_ABSTRACT_ORIGIN (copy) = node;
766 return copy;
769 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
770 For example, this can copy a list made of TREE_LIST nodes. While copying,
771 set DECL_ABSTRACT_ORIGIN appropriately. */
773 static tree
774 copy_decl_list (list)
775 tree list;
777 tree head;
778 register tree prev, next;
780 if (list == 0)
781 return 0;
783 head = prev = copy_and_set_decl_abstract_origin (list);
784 next = TREE_CHAIN (list);
785 while (next)
787 register tree copy;
789 copy = copy_and_set_decl_abstract_origin (next);
790 TREE_CHAIN (prev) = copy;
791 prev = copy;
792 next = TREE_CHAIN (next);
794 return head;
797 /* Make a copy of the entire tree of blocks BLOCK, and return it. */
799 static tree
800 copy_decl_tree (block)
801 tree block;
803 tree t, vars, subblocks;
805 vars = copy_decl_list (BLOCK_VARS (block));
806 subblocks = 0;
808 /* Process all subblocks. */
809 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
811 tree copy = copy_decl_tree (t);
812 TREE_CHAIN (copy) = subblocks;
813 subblocks = copy;
816 t = copy_node (block);
817 BLOCK_VARS (t) = vars;
818 BLOCK_SUBBLOCKS (t) = nreverse (subblocks);
819 /* If the BLOCK being cloned is already marked as having been instantiated
820 from something else, then leave that `origin' marking alone. Otherwise,
821 mark the clone as having originated from the BLOCK we are cloning. */
822 if (BLOCK_ABSTRACT_ORIGIN (t) == NULL_TREE)
823 BLOCK_ABSTRACT_ORIGIN (t) = block;
824 return t;
827 /* Copy DECL_RTLs in all decls in the given BLOCK node. */
829 static void
830 copy_decl_rtls (block)
831 tree block;
833 tree t;
835 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
836 if (DECL_RTL (t) && GET_CODE (DECL_RTL (t)) == MEM)
837 DECL_RTL (t) = copy_for_inline (DECL_RTL (t));
839 /* Process all subblocks. */
840 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
841 copy_decl_rtls (t);
844 /* Make the insns and PARM_DECLs of the current function permanent
845 and record other information in DECL_SAVED_INSNS to allow inlining
846 of this function in subsequent calls.
848 This routine need not copy any insns because we are not going
849 to immediately compile the insns in the insn chain. There
850 are two cases when we would compile the insns for FNDECL:
851 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
852 be output at the end of other compilation, because somebody took
853 its address. In the first case, the insns of FNDECL are copied
854 as it is expanded inline, so FNDECL's saved insns are not
855 modified. In the second case, FNDECL is used for the last time,
856 so modifying the rtl is not a problem.
858 We don't have to worry about FNDECL being inline expanded by
859 other functions which are written at the end of compilation
860 because flag_no_inline is turned on when we begin writing
861 functions at the end of compilation. */
863 void
864 save_for_inline_nocopy (fndecl)
865 tree fndecl;
867 rtx insn;
868 rtx head;
869 rtx first_nonparm_insn;
871 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
872 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
873 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
874 for the parms, prior to elimination of virtual registers.
875 These values are needed for substituting parms properly. */
877 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
879 /* Make and emit a return-label if we have not already done so. */
881 if (return_label == 0)
883 return_label = gen_label_rtx ();
884 emit_label (return_label);
887 head = initialize_for_inline (fndecl, get_first_label_num (),
888 max_label_num (), max_reg_num (), 0);
890 /* If there are insns that copy parms from the stack into pseudo registers,
891 those insns are not copied. `expand_inline_function' must
892 emit the correct code to handle such things. */
894 insn = get_insns ();
895 if (GET_CODE (insn) != NOTE)
896 abort ();
898 /* Get the insn which signals the end of parameter setup code. */
899 first_nonparm_insn = get_first_nonparm_insn ();
901 /* Now just scan the chain of insns to see what happens to our
902 PARM_DECLs. If a PARM_DECL is used but never modified, we
903 can substitute its rtl directly when expanding inline (and
904 perform constant folding when its incoming value is constant).
905 Otherwise, we have to copy its value into a new register and track
906 the new register's life. */
908 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
910 if (insn == first_nonparm_insn)
911 in_nonparm_insns = 1;
913 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
915 if (current_function_uses_const_pool)
917 /* Replace any constant pool references with the actual constant.
918 We will put the constant back if we need to write the
919 function out after all. */
920 save_constants (&PATTERN (insn));
921 if (REG_NOTES (insn))
922 save_constants (&REG_NOTES (insn));
925 /* Record what interesting things happen to our parameters. */
926 note_stores (PATTERN (insn), note_modified_parmregs);
930 /* Also scan all decls, and replace any constant pool references with the
931 actual constant. */
932 save_constants_in_decl_trees (DECL_INITIAL (fndecl));
934 /* We have now allocated all that needs to be allocated permanently
935 on the rtx obstack. Set our high-water mark, so that we
936 can free the rest of this when the time comes. */
938 preserve_data ();
940 finish_inline (fndecl, head);
943 /* Given PX, a pointer into an insn, search for references to the constant
944 pool. Replace each with a CONST that has the mode of the original
945 constant, contains the constant, and has RTX_INTEGRATED_P set.
946 Similarly, constant pool addresses not enclosed in a MEM are replaced
947 with an ADDRESS and CONST rtx which also gives the constant, its
948 mode, the mode of the address, and has RTX_INTEGRATED_P set. */
950 static void
951 save_constants (px)
952 rtx *px;
954 rtx x;
955 int i, j;
957 again:
958 x = *px;
960 /* If this is a CONST_DOUBLE, don't try to fix things up in
961 CONST_DOUBLE_MEM, because this is an infinite recursion. */
962 if (GET_CODE (x) == CONST_DOUBLE)
963 return;
964 else if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == SYMBOL_REF
965 && CONSTANT_POOL_ADDRESS_P (XEXP (x,0)))
967 enum machine_mode const_mode = get_pool_mode (XEXP (x, 0));
968 rtx new = gen_rtx_CONST (const_mode, get_pool_constant (XEXP (x, 0)));
969 RTX_INTEGRATED_P (new) = 1;
971 /* If the MEM was in a different mode than the constant (perhaps we
972 were only looking at the low-order part), surround it with a
973 SUBREG so we can save both modes. */
975 if (GET_MODE (x) != const_mode)
977 new = gen_rtx_SUBREG (GET_MODE (x), new, 0);
978 RTX_INTEGRATED_P (new) = 1;
981 *px = new;
982 save_constants (&XEXP (*px, 0));
984 else if (GET_CODE (x) == SYMBOL_REF
985 && CONSTANT_POOL_ADDRESS_P (x))
987 *px = gen_rtx_ADDRESS (GET_MODE (x),
988 gen_rtx_CONST (get_pool_mode (x),
989 get_pool_constant (x)));
990 save_constants (&XEXP (*px, 0));
991 RTX_INTEGRATED_P (*px) = 1;
994 else
996 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
997 int len = GET_RTX_LENGTH (GET_CODE (x));
999 for (i = len-1; i >= 0; i--)
1001 switch (fmt[i])
1003 case 'E':
1004 for (j = 0; j < XVECLEN (x, i); j++)
1005 save_constants (&XVECEXP (x, i, j));
1006 break;
1008 case 'e':
1009 if (XEXP (x, i) == 0)
1010 continue;
1011 if (i == 0)
1013 /* Hack tail-recursion here. */
1014 px = &XEXP (x, 0);
1015 goto again;
1017 save_constants (&XEXP (x, i));
1018 break;
1024 /* Note whether a parameter is modified or not. */
1026 static void
1027 note_modified_parmregs (reg, x)
1028 rtx reg;
1029 rtx x ATTRIBUTE_UNUSED;
1031 if (GET_CODE (reg) == REG && in_nonparm_insns
1032 && REGNO (reg) < max_parm_reg
1033 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
1034 && parmdecl_map[REGNO (reg)] != 0)
1035 TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
1038 /* Copy the rtx ORIG recursively, replacing pseudo-regs and labels
1039 according to `reg_map' and `label_map'. The original rtl insns
1040 will be saved for inlining; this is used to make a copy
1041 which is used to finish compiling the inline function itself.
1043 If we find a "saved" constant pool entry, one which was replaced with
1044 the value of the constant, convert it back to a constant pool entry.
1045 Since the pool wasn't touched, this should simply restore the old
1046 address.
1048 All other kinds of rtx are copied except those that can never be
1049 changed during compilation. */
1051 static rtx
1052 copy_for_inline (orig)
1053 rtx orig;
1055 register rtx x = orig;
1056 register rtx new;
1057 register int i;
1058 register enum rtx_code code;
1059 register char *format_ptr;
1061 if (x == 0)
1062 return x;
1064 code = GET_CODE (x);
1066 /* These types may be freely shared. */
1068 switch (code)
1070 case QUEUED:
1071 case CONST_INT:
1072 case PC:
1073 case CC0:
1074 return x;
1076 case SYMBOL_REF:
1077 if (! SYMBOL_REF_NEED_ADJUST (x))
1078 return x;
1079 return rethrow_symbol_map (x, save_for_inline_eh_labelmap);
1081 case CONST_DOUBLE:
1082 /* We have to make a new CONST_DOUBLE to ensure that we account for
1083 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
1084 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
1086 REAL_VALUE_TYPE d;
1088 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
1089 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x));
1091 else
1092 return immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
1093 VOIDmode);
1095 case CONST:
1096 /* Get constant pool entry for constant in the pool. */
1097 if (RTX_INTEGRATED_P (x))
1098 return validize_mem (force_const_mem (GET_MODE (x),
1099 copy_for_inline (XEXP (x, 0))));
1100 break;
1102 case SUBREG:
1103 /* Get constant pool entry, but access in different mode. */
1104 if (RTX_INTEGRATED_P (x))
1106 new = force_const_mem (GET_MODE (SUBREG_REG (x)),
1107 copy_for_inline (XEXP (SUBREG_REG (x), 0)));
1109 PUT_MODE (new, GET_MODE (x));
1110 return validize_mem (new);
1112 break;
1114 case ADDRESS:
1115 /* If not special for constant pool error. Else get constant pool
1116 address. */
1117 if (! RTX_INTEGRATED_P (x))
1118 abort ();
1120 new = force_const_mem (GET_MODE (XEXP (x, 0)),
1121 copy_for_inline (XEXP (XEXP (x, 0), 0)));
1122 new = XEXP (new, 0);
1124 #ifdef POINTERS_EXTEND_UNSIGNED
1125 if (GET_MODE (new) != GET_MODE (x))
1126 new = convert_memory_address (GET_MODE (x), new);
1127 #endif
1129 return new;
1131 case ASM_OPERANDS:
1132 /* If a single asm insn contains multiple output operands
1133 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
1134 We must make sure that the copied insn continues to share it. */
1135 if (orig_asm_operands_vector == XVEC (orig, 3))
1137 x = rtx_alloc (ASM_OPERANDS);
1138 x->volatil = orig->volatil;
1139 XSTR (x, 0) = XSTR (orig, 0);
1140 XSTR (x, 1) = XSTR (orig, 1);
1141 XINT (x, 2) = XINT (orig, 2);
1142 XVEC (x, 3) = copy_asm_operands_vector;
1143 XVEC (x, 4) = copy_asm_constraints_vector;
1144 XSTR (x, 5) = XSTR (orig, 5);
1145 XINT (x, 6) = XINT (orig, 6);
1146 return x;
1148 break;
1150 case MEM:
1151 /* A MEM is usually allowed to be shared if its address is constant
1152 or is a constant plus one of the special registers.
1154 We do not allow sharing of addresses that are either a special
1155 register or the sum of a constant and a special register because
1156 it is possible for unshare_all_rtl to copy the address, into memory
1157 that won't be saved. Although the MEM can safely be shared, and
1158 won't be copied there, the address itself cannot be shared, and may
1159 need to be copied.
1161 There are also two exceptions with constants: The first is if the
1162 constant is a LABEL_REF or the sum of the LABEL_REF
1163 and an integer. This case can happen if we have an inline
1164 function that supplies a constant operand to the call of another
1165 inline function that uses it in a switch statement. In this case,
1166 we will be replacing the LABEL_REF, so we have to replace this MEM
1167 as well.
1169 The second case is if we have a (const (plus (address ..) ...)).
1170 In that case we need to put back the address of the constant pool
1171 entry. */
1173 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
1174 && GET_CODE (XEXP (x, 0)) != LABEL_REF
1175 && ! (GET_CODE (XEXP (x, 0)) == CONST
1176 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
1177 && ((GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
1178 == LABEL_REF)
1179 || (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
1180 == ADDRESS)))))
1181 return x;
1182 break;
1184 case LABEL_REF:
1185 /* If this is a non-local label, just make a new LABEL_REF.
1186 Otherwise, use the new label as well. */
1187 x = gen_rtx_LABEL_REF (GET_MODE (orig),
1188 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
1189 : label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))]);
1190 LABEL_REF_NONLOCAL_P (x) = LABEL_REF_NONLOCAL_P (orig);
1191 LABEL_OUTSIDE_LOOP_P (x) = LABEL_OUTSIDE_LOOP_P (orig);
1192 return x;
1194 case REG:
1195 if (REGNO (x) > LAST_VIRTUAL_REGISTER)
1196 return reg_map [REGNO (x)];
1197 else
1198 return x;
1200 case SET:
1201 /* If a parm that gets modified lives in a pseudo-reg,
1202 clear its TREE_READONLY to prevent certain optimizations. */
1204 rtx dest = SET_DEST (x);
1206 while (GET_CODE (dest) == STRICT_LOW_PART
1207 || GET_CODE (dest) == ZERO_EXTRACT
1208 || GET_CODE (dest) == SUBREG)
1209 dest = XEXP (dest, 0);
1211 if (GET_CODE (dest) == REG
1212 && REGNO (dest) < max_parm_reg
1213 && REGNO (dest) >= FIRST_PSEUDO_REGISTER
1214 && parmdecl_map[REGNO (dest)] != 0
1215 /* The insn to load an arg pseudo from a stack slot
1216 does not count as modifying it. */
1217 && in_nonparm_insns)
1218 TREE_READONLY (parmdecl_map[REGNO (dest)]) = 0;
1220 break;
1222 #if 0 /* This is a good idea, but here is the wrong place for it. */
1223 /* Arrange that CONST_INTs always appear as the second operand
1224 if they appear, and that `frame_pointer_rtx' or `arg_pointer_rtx'
1225 always appear as the first. */
1226 case PLUS:
1227 if (GET_CODE (XEXP (x, 0)) == CONST_INT
1228 || (XEXP (x, 1) == frame_pointer_rtx
1229 || (ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1230 && XEXP (x, 1) == arg_pointer_rtx)))
1232 rtx t = XEXP (x, 0);
1233 XEXP (x, 0) = XEXP (x, 1);
1234 XEXP (x, 1) = t;
1236 break;
1237 #endif
1238 default:
1239 break;
1242 /* Replace this rtx with a copy of itself. */
1244 x = rtx_alloc (code);
1245 bcopy ((char *) orig, (char *) x,
1246 (sizeof (*x) - sizeof (x->fld)
1247 + sizeof (x->fld[0]) * GET_RTX_LENGTH (code)));
1249 /* Now scan the subexpressions recursively.
1250 We can store any replaced subexpressions directly into X
1251 since we know X is not shared! Any vectors in X
1252 must be copied if X was copied. */
1254 format_ptr = GET_RTX_FORMAT (code);
1256 for (i = 0; i < GET_RTX_LENGTH (code); i++)
1258 switch (*format_ptr++)
1260 case 'e':
1261 XEXP (x, i) = copy_for_inline (XEXP (x, i));
1262 break;
1264 case 'u':
1265 /* Change any references to old-insns to point to the
1266 corresponding copied insns. */
1267 XEXP (x, i) = insn_map[INSN_UID (XEXP (x, i))];
1268 break;
1270 case 'E':
1271 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
1273 register int j;
1275 XVEC (x, i) = gen_rtvec_vv (XVECLEN (x, i), XVEC (x, i)->elem);
1276 for (j = 0; j < XVECLEN (x, i); j++)
1277 XVECEXP (x, i, j)
1278 = copy_for_inline (XVECEXP (x, i, j));
1280 break;
1284 if (code == ASM_OPERANDS && orig_asm_operands_vector == 0)
1286 orig_asm_operands_vector = XVEC (orig, 3);
1287 copy_asm_operands_vector = XVEC (x, 3);
1288 copy_asm_constraints_vector = XVEC (x, 4);
1291 return x;
1294 /* Unfortunately, we need a global copy of const_equiv map for communication
1295 with a function called from note_stores. Be *very* careful that this
1296 is used properly in the presence of recursion. */
1298 rtx *global_const_equiv_map;
1299 int global_const_equiv_map_size;
1301 #define FIXED_BASE_PLUS_P(X) \
1302 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
1303 && GET_CODE (XEXP (X, 0)) == REG \
1304 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
1305 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
1307 /* Called to set up a mapping for the case where a parameter is in a
1308 register. If it is read-only and our argument is a constant, set up the
1309 constant equivalence.
1311 If LOC is REG_USERVAR_P, the usual case, COPY must also have that flag set
1312 if it is a register.
1314 Also, don't allow hard registers here; they might not be valid when
1315 substituted into insns. */
1316 static void
1317 process_reg_param (map, loc, copy)
1318 struct inline_remap *map;
1319 rtx loc, copy;
1321 if ((GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
1322 || (GET_CODE (copy) == REG && REG_USERVAR_P (loc)
1323 && ! REG_USERVAR_P (copy))
1324 || (GET_CODE (copy) == REG
1325 && REGNO (copy) < FIRST_PSEUDO_REGISTER))
1327 rtx temp = copy_to_mode_reg (GET_MODE (loc), copy);
1328 REG_USERVAR_P (temp) = REG_USERVAR_P (loc);
1329 if ((CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1330 && REGNO (temp) < map->const_equiv_map_size)
1332 map->const_equiv_map[REGNO (temp)] = copy;
1333 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1335 copy = temp;
1337 map->reg_map[REGNO (loc)] = copy;
1340 /* Used by duplicate_eh_handlers to map labels for the exception table */
1341 static struct inline_remap *eif_eh_map;
1343 static rtx
1344 expand_inline_function_eh_labelmap (label)
1345 rtx label;
1347 int index = CODE_LABEL_NUMBER (label);
1348 return get_label_from_map (eif_eh_map, index);
1351 /* Integrate the procedure defined by FNDECL. Note that this function
1352 may wind up calling itself. Since the static variables are not
1353 reentrant, we do not assign them until after the possibility
1354 of recursion is eliminated.
1356 If IGNORE is nonzero, do not produce a value.
1357 Otherwise store the value in TARGET if it is nonzero and that is convenient.
1359 Value is:
1360 (rtx)-1 if we could not substitute the function
1361 0 if we substituted it and it does not produce a value
1362 else an rtx for where the value is stored. */
1365 expand_inline_function (fndecl, parms, target, ignore, type,
1366 structure_value_addr)
1367 tree fndecl, parms;
1368 rtx target;
1369 int ignore;
1370 tree type;
1371 rtx structure_value_addr;
1373 tree formal, actual, block;
1374 rtx header = DECL_SAVED_INSNS (fndecl);
1375 rtx insns = FIRST_FUNCTION_INSN (header);
1376 rtx parm_insns = FIRST_PARM_INSN (header);
1377 tree *arg_trees;
1378 rtx *arg_vals;
1379 rtx insn;
1380 int max_regno;
1381 register int i;
1382 int min_labelno = FIRST_LABELNO (header);
1383 int max_labelno = LAST_LABELNO (header);
1384 int nargs;
1385 rtx local_return_label = 0;
1386 rtx loc;
1387 rtx stack_save = 0;
1388 rtx temp;
1389 struct inline_remap *map;
1390 #ifdef HAVE_cc0
1391 rtx cc0_insn = 0;
1392 #endif
1393 rtvec arg_vector = ORIGINAL_ARG_VECTOR (header);
1394 rtx static_chain_value = 0;
1396 /* The pointer used to track the true location of the memory used
1397 for MAP->LABEL_MAP. */
1398 rtx *real_label_map = 0;
1400 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
1401 max_regno = MAX_REGNUM (header) + 3;
1402 if (max_regno < FIRST_PSEUDO_REGISTER)
1403 abort ();
1405 nargs = list_length (DECL_ARGUMENTS (fndecl));
1407 /* Check that the parms type match and that sufficient arguments were
1408 passed. Since the appropriate conversions or default promotions have
1409 already been applied, the machine modes should match exactly. */
1411 for (formal = DECL_ARGUMENTS (fndecl), actual = parms;
1412 formal;
1413 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual))
1415 tree arg;
1416 enum machine_mode mode;
1418 if (actual == 0)
1419 return (rtx) (HOST_WIDE_INT) -1;
1421 arg = TREE_VALUE (actual);
1422 mode = TYPE_MODE (DECL_ARG_TYPE (formal));
1424 if (mode != TYPE_MODE (TREE_TYPE (arg))
1425 /* If they are block mode, the types should match exactly.
1426 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
1427 which could happen if the parameter has incomplete type. */
1428 || (mode == BLKmode
1429 && (TYPE_MAIN_VARIANT (TREE_TYPE (arg))
1430 != TYPE_MAIN_VARIANT (TREE_TYPE (formal)))))
1431 return (rtx) (HOST_WIDE_INT) -1;
1434 /* Extra arguments are valid, but will be ignored below, so we must
1435 evaluate them here for side-effects. */
1436 for (; actual; actual = TREE_CHAIN (actual))
1437 expand_expr (TREE_VALUE (actual), const0_rtx,
1438 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual))), 0);
1440 /* Make a binding contour to keep inline cleanups called at
1441 outer function-scope level from looking like they are shadowing
1442 parameter declarations. */
1443 pushlevel (0);
1445 /* Expand the function arguments. Do this first so that any
1446 new registers get created before we allocate the maps. */
1448 arg_vals = (rtx *) alloca (nargs * sizeof (rtx));
1449 arg_trees = (tree *) alloca (nargs * sizeof (tree));
1451 for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
1452 formal;
1453 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
1455 /* Actual parameter, converted to the type of the argument within the
1456 function. */
1457 tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
1458 /* Mode of the variable used within the function. */
1459 enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
1460 int invisiref = 0;
1462 arg_trees[i] = arg;
1463 loc = RTVEC_ELT (arg_vector, i);
1465 /* If this is an object passed by invisible reference, we copy the
1466 object into a stack slot and save its address. If this will go
1467 into memory, we do nothing now. Otherwise, we just expand the
1468 argument. */
1469 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1470 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1472 rtx stack_slot
1473 = assign_stack_temp (TYPE_MODE (TREE_TYPE (arg)),
1474 int_size_in_bytes (TREE_TYPE (arg)), 1);
1475 MEM_SET_IN_STRUCT_P (stack_slot,
1476 AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1478 store_expr (arg, stack_slot, 0);
1480 arg_vals[i] = XEXP (stack_slot, 0);
1481 invisiref = 1;
1483 else if (GET_CODE (loc) != MEM)
1485 if (GET_MODE (loc) != TYPE_MODE (TREE_TYPE (arg)))
1486 /* The mode if LOC and ARG can differ if LOC was a variable
1487 that had its mode promoted via PROMOTED_MODE. */
1488 arg_vals[i] = convert_modes (GET_MODE (loc),
1489 TYPE_MODE (TREE_TYPE (arg)),
1490 expand_expr (arg, NULL_RTX, mode,
1491 EXPAND_SUM),
1492 TREE_UNSIGNED (TREE_TYPE (formal)));
1493 else
1494 arg_vals[i] = expand_expr (arg, NULL_RTX, mode, EXPAND_SUM);
1496 else
1497 arg_vals[i] = 0;
1499 if (arg_vals[i] != 0
1500 && (! TREE_READONLY (formal)
1501 /* If the parameter is not read-only, copy our argument through
1502 a register. Also, we cannot use ARG_VALS[I] if it overlaps
1503 TARGET in any way. In the inline function, they will likely
1504 be two different pseudos, and `safe_from_p' will make all
1505 sorts of smart assumptions about their not conflicting.
1506 But if ARG_VALS[I] overlaps TARGET, these assumptions are
1507 wrong, so put ARG_VALS[I] into a fresh register.
1508 Don't worry about invisible references, since their stack
1509 temps will never overlap the target. */
1510 || (target != 0
1511 && ! invisiref
1512 && (GET_CODE (arg_vals[i]) == REG
1513 || GET_CODE (arg_vals[i]) == SUBREG
1514 || GET_CODE (arg_vals[i]) == MEM)
1515 && reg_overlap_mentioned_p (arg_vals[i], target))
1516 /* ??? We must always copy a SUBREG into a REG, because it might
1517 get substituted into an address, and not all ports correctly
1518 handle SUBREGs in addresses. */
1519 || (GET_CODE (arg_vals[i]) == SUBREG)))
1520 arg_vals[i] = copy_to_mode_reg (GET_MODE (loc), arg_vals[i]);
1522 if (arg_vals[i] != 0 && GET_CODE (arg_vals[i]) == REG
1523 && POINTER_TYPE_P (TREE_TYPE (formal)))
1524 mark_reg_pointer (arg_vals[i],
1525 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (formal)))
1526 / BITS_PER_UNIT));
1529 /* Allocate the structures we use to remap things. */
1531 map = (struct inline_remap *) alloca (sizeof (struct inline_remap));
1532 map->fndecl = fndecl;
1534 map->reg_map = (rtx *) alloca (max_regno * sizeof (rtx));
1535 bzero ((char *) map->reg_map, max_regno * sizeof (rtx));
1537 /* We used to use alloca here, but the size of what it would try to
1538 allocate would occasionally cause it to exceed the stack limit and
1539 cause unpredictable core dumps. */
1540 real_label_map
1541 = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
1542 map->label_map = real_label_map;
1544 map->insn_map = (rtx *) alloca (INSN_UID (header) * sizeof (rtx));
1545 bzero ((char *) map->insn_map, INSN_UID (header) * sizeof (rtx));
1546 map->min_insnno = 0;
1547 map->max_insnno = INSN_UID (header);
1549 map->integrating = 1;
1551 /* const_equiv_map maps pseudos in our routine to constants, so it needs to
1552 be large enough for all our pseudos. This is the number we are currently
1553 using plus the number in the called routine, plus 15 for each arg,
1554 five to compute the virtual frame pointer, and five for the return value.
1555 This should be enough for most cases. We do not reference entries
1556 outside the range of the map.
1558 ??? These numbers are quite arbitrary and were obtained by
1559 experimentation. At some point, we should try to allocate the
1560 table after all the parameters are set up so we an more accurately
1561 estimate the number of pseudos we will need. */
1563 map->const_equiv_map_size
1564 = max_reg_num () + (max_regno - FIRST_PSEUDO_REGISTER) + 15 * nargs + 10;
1566 map->const_equiv_map
1567 = (rtx *)alloca (map->const_equiv_map_size * sizeof (rtx));
1568 bzero ((char *) map->const_equiv_map,
1569 map->const_equiv_map_size * sizeof (rtx));
1571 map->const_age_map
1572 = (unsigned *)alloca (map->const_equiv_map_size * sizeof (unsigned));
1573 bzero ((char *) map->const_age_map,
1574 map->const_equiv_map_size * sizeof (unsigned));
1575 map->const_age = 0;
1577 /* Record the current insn in case we have to set up pointers to frame
1578 and argument memory blocks. If there are no insns yet, add a dummy
1579 insn that can be used as an insertion point. */
1580 map->insns_at_start = get_last_insn ();
1581 if (map->insns_at_start == 0)
1582 map->insns_at_start = emit_note (NULL_PTR, NOTE_INSN_DELETED);
1584 map->regno_pointer_flag = INLINE_REGNO_POINTER_FLAG (header);
1585 map->regno_pointer_align = INLINE_REGNO_POINTER_ALIGN (header);
1587 /* Update the outgoing argument size to allow for those in the inlined
1588 function. */
1589 if (OUTGOING_ARGS_SIZE (header) > current_function_outgoing_args_size)
1590 current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (header);
1592 /* If the inline function needs to make PIC references, that means
1593 that this function's PIC offset table must be used. */
1594 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
1595 current_function_uses_pic_offset_table = 1;
1597 /* If this function needs a context, set it up. */
1598 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_NEEDS_CONTEXT)
1599 static_chain_value = lookup_static_chain (fndecl);
1601 if (GET_CODE (parm_insns) == NOTE
1602 && NOTE_LINE_NUMBER (parm_insns) > 0)
1604 rtx note = emit_note (NOTE_SOURCE_FILE (parm_insns),
1605 NOTE_LINE_NUMBER (parm_insns));
1606 if (note)
1607 RTX_INTEGRATED_P (note) = 1;
1610 /* Process each argument. For each, set up things so that the function's
1611 reference to the argument will refer to the argument being passed.
1612 We only replace REG with REG here. Any simplifications are done
1613 via const_equiv_map.
1615 We make two passes: In the first, we deal with parameters that will
1616 be placed into registers, since we need to ensure that the allocated
1617 register number fits in const_equiv_map. Then we store all non-register
1618 parameters into their memory location. */
1620 /* Don't try to free temp stack slots here, because we may put one of the
1621 parameters into a temp stack slot. */
1623 for (i = 0; i < nargs; i++)
1625 rtx copy = arg_vals[i];
1627 loc = RTVEC_ELT (arg_vector, i);
1629 /* There are three cases, each handled separately. */
1630 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1631 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1633 /* This must be an object passed by invisible reference (it could
1634 also be a variable-sized object, but we forbid inlining functions
1635 with variable-sized arguments). COPY is the address of the
1636 actual value (this computation will cause it to be copied). We
1637 map that address for the register, noting the actual address as
1638 an equivalent in case it can be substituted into the insns. */
1640 if (GET_CODE (copy) != REG)
1642 temp = copy_addr_to_reg (copy);
1643 if ((CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1644 && REGNO (temp) < map->const_equiv_map_size)
1646 map->const_equiv_map[REGNO (temp)] = copy;
1647 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1649 copy = temp;
1651 map->reg_map[REGNO (XEXP (loc, 0))] = copy;
1653 else if (GET_CODE (loc) == MEM)
1655 /* This is the case of a parameter that lives in memory.
1656 It will live in the block we allocate in the called routine's
1657 frame that simulates the incoming argument area. Do nothing
1658 now; we will call store_expr later. */
1661 else if (GET_CODE (loc) == REG)
1662 process_reg_param (map, loc, copy);
1663 else if (GET_CODE (loc) == CONCAT)
1665 rtx locreal = gen_realpart (GET_MODE (XEXP (loc, 0)), loc);
1666 rtx locimag = gen_imagpart (GET_MODE (XEXP (loc, 0)), loc);
1667 rtx copyreal = gen_realpart (GET_MODE (locreal), copy);
1668 rtx copyimag = gen_imagpart (GET_MODE (locimag), copy);
1670 process_reg_param (map, locreal, copyreal);
1671 process_reg_param (map, locimag, copyimag);
1673 else
1674 abort ();
1677 /* Now do the parameters that will be placed in memory. */
1679 for (formal = DECL_ARGUMENTS (fndecl), i = 0;
1680 formal; formal = TREE_CHAIN (formal), i++)
1682 loc = RTVEC_ELT (arg_vector, i);
1684 if (GET_CODE (loc) == MEM
1685 /* Exclude case handled above. */
1686 && ! (GET_CODE (XEXP (loc, 0)) == REG
1687 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
1689 rtx note = emit_note (DECL_SOURCE_FILE (formal),
1690 DECL_SOURCE_LINE (formal));
1691 if (note)
1692 RTX_INTEGRATED_P (note) = 1;
1694 /* Compute the address in the area we reserved and store the
1695 value there. */
1696 temp = copy_rtx_and_substitute (loc, map);
1697 subst_constants (&temp, NULL_RTX, map);
1698 apply_change_group ();
1699 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
1700 temp = change_address (temp, VOIDmode, XEXP (temp, 0));
1701 store_expr (arg_trees[i], temp, 0);
1705 /* Deal with the places that the function puts its result.
1706 We are driven by what is placed into DECL_RESULT.
1708 Initially, we assume that we don't have anything special handling for
1709 REG_FUNCTION_RETURN_VALUE_P. */
1711 map->inline_target = 0;
1712 loc = DECL_RTL (DECL_RESULT (fndecl));
1714 if (TYPE_MODE (type) == VOIDmode)
1715 /* There is no return value to worry about. */
1717 else if (GET_CODE (loc) == MEM)
1719 if (GET_CODE (XEXP (loc, 0)) == ADDRESSOF)
1721 temp = copy_rtx_and_substitute (loc, map);
1722 subst_constants (&temp, NULL_RTX, map);
1723 apply_change_group ();
1724 target = temp;
1726 else
1728 if (! structure_value_addr
1729 || ! aggregate_value_p (DECL_RESULT (fndecl)))
1730 abort ();
1732 /* Pass the function the address in which to return a structure
1733 value. Note that a constructor can cause someone to call us
1734 with STRUCTURE_VALUE_ADDR, but the initialization takes place
1735 via the first parameter, rather than the struct return address.
1737 We have two cases: If the address is a simple register
1738 indirect, use the mapping mechanism to point that register to
1739 our structure return address. Otherwise, store the structure
1740 return value into the place that it will be referenced from. */
1742 if (GET_CODE (XEXP (loc, 0)) == REG)
1744 temp = force_operand (structure_value_addr, NULL_RTX);
1745 temp = force_reg (Pmode, temp);
1746 map->reg_map[REGNO (XEXP (loc, 0))] = temp;
1748 if ((CONSTANT_P (structure_value_addr)
1749 || GET_CODE (structure_value_addr) == ADDRESSOF
1750 || (GET_CODE (structure_value_addr) == PLUS
1751 && (XEXP (structure_value_addr, 0)
1752 == virtual_stack_vars_rtx)
1753 && (GET_CODE (XEXP (structure_value_addr, 1))
1754 == CONST_INT)))
1755 && REGNO (temp) < map->const_equiv_map_size)
1757 map->const_equiv_map[REGNO (temp)] = structure_value_addr;
1758 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1761 else
1763 temp = copy_rtx_and_substitute (loc, map);
1764 subst_constants (&temp, NULL_RTX, map);
1765 apply_change_group ();
1766 emit_move_insn (temp, structure_value_addr);
1770 else if (ignore)
1771 /* We will ignore the result value, so don't look at its structure.
1772 Note that preparations for an aggregate return value
1773 do need to be made (above) even if it will be ignored. */
1775 else if (GET_CODE (loc) == REG)
1777 /* The function returns an object in a register and we use the return
1778 value. Set up our target for remapping. */
1780 /* Machine mode function was declared to return. */
1781 enum machine_mode departing_mode = TYPE_MODE (type);
1782 /* (Possibly wider) machine mode it actually computes
1783 (for the sake of callers that fail to declare it right).
1784 We have to use the mode of the result's RTL, rather than
1785 its type, since expand_function_start may have promoted it. */
1786 enum machine_mode arriving_mode
1787 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1788 rtx reg_to_map;
1790 /* Don't use MEMs as direct targets because on some machines
1791 substituting a MEM for a REG makes invalid insns.
1792 Let the combiner substitute the MEM if that is valid. */
1793 if (target == 0 || GET_CODE (target) != REG
1794 || GET_MODE (target) != departing_mode)
1796 /* Don't make BLKmode registers. If this looks like
1797 a BLKmode object being returned in a register, get
1798 the mode from that, otherwise abort. */
1799 if (departing_mode == BLKmode)
1801 if (REG == GET_CODE (DECL_RTL (DECL_RESULT (fndecl))))
1803 departing_mode = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1804 arriving_mode = departing_mode;
1806 else
1807 abort();
1810 target = gen_reg_rtx (departing_mode);
1813 /* If function's value was promoted before return,
1814 avoid machine mode mismatch when we substitute INLINE_TARGET.
1815 But TARGET is what we will return to the caller. */
1816 if (arriving_mode != departing_mode)
1818 /* Avoid creating a paradoxical subreg wider than
1819 BITS_PER_WORD, since that is illegal. */
1820 if (GET_MODE_BITSIZE (arriving_mode) > BITS_PER_WORD)
1822 if (!TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (departing_mode),
1823 GET_MODE_BITSIZE (arriving_mode)))
1824 /* Maybe could be handled by using convert_move () ? */
1825 abort ();
1826 reg_to_map = gen_reg_rtx (arriving_mode);
1827 target = gen_lowpart (departing_mode, reg_to_map);
1829 else
1830 reg_to_map = gen_rtx_SUBREG (arriving_mode, target, 0);
1832 else
1833 reg_to_map = target;
1835 /* Usually, the result value is the machine's return register.
1836 Sometimes it may be a pseudo. Handle both cases. */
1837 if (REG_FUNCTION_VALUE_P (loc))
1838 map->inline_target = reg_to_map;
1839 else
1840 map->reg_map[REGNO (loc)] = reg_to_map;
1842 else
1843 abort ();
1845 /* Make a fresh binding contour that we can easily remove. Do this after
1846 expanding our arguments so cleanups are properly scoped. */
1847 pushlevel (0);
1848 expand_start_bindings (0);
1850 /* Initialize label_map. get_label_from_map will actually make
1851 the labels. */
1852 bzero ((char *) &map->label_map [min_labelno],
1853 (max_labelno - min_labelno) * sizeof (rtx));
1855 /* Perform postincrements before actually calling the function. */
1856 emit_queue ();
1858 /* Clean up stack so that variables might have smaller offsets. */
1859 do_pending_stack_adjust ();
1861 /* Save a copy of the location of const_equiv_map for mark_stores, called
1862 via note_stores. */
1863 global_const_equiv_map = map->const_equiv_map;
1864 global_const_equiv_map_size = map->const_equiv_map_size;
1866 /* If the called function does an alloca, save and restore the
1867 stack pointer around the call. This saves stack space, but
1868 also is required if this inline is being done between two
1869 pushes. */
1870 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_CALLS_ALLOCA)
1871 emit_stack_save (SAVE_BLOCK, &stack_save, NULL_RTX);
1873 /* Now copy the insns one by one. Do this in two passes, first the insns and
1874 then their REG_NOTES, just like save_for_inline. */
1876 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1878 for (insn = insns; insn; insn = NEXT_INSN (insn))
1880 rtx copy, pattern, set;
1882 map->orig_asm_operands_vector = 0;
1884 switch (GET_CODE (insn))
1886 case INSN:
1887 pattern = PATTERN (insn);
1888 set = single_set (insn);
1889 copy = 0;
1890 if (GET_CODE (pattern) == USE
1891 && GET_CODE (XEXP (pattern, 0)) == REG
1892 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1893 /* The (USE (REG n)) at return from the function should
1894 be ignored since we are changing (REG n) into
1895 inline_target. */
1896 break;
1898 /* If the inline fn needs eh context, make sure that
1899 the current fn has one. */
1900 if (GET_CODE (pattern) == USE
1901 && find_reg_note (insn, REG_EH_CONTEXT, 0) != 0)
1902 get_eh_context ();
1904 /* Ignore setting a function value that we don't want to use. */
1905 if (map->inline_target == 0
1906 && set != 0
1907 && GET_CODE (SET_DEST (set)) == REG
1908 && REG_FUNCTION_VALUE_P (SET_DEST (set)))
1910 if (volatile_refs_p (SET_SRC (set)))
1912 rtx new_set;
1914 /* If we must not delete the source,
1915 load it into a new temporary. */
1916 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1918 new_set = single_set (copy);
1919 if (new_set == 0)
1920 abort ();
1922 SET_DEST (new_set)
1923 = gen_reg_rtx (GET_MODE (SET_DEST (new_set)));
1925 /* If the source and destination are the same and it
1926 has a note on it, keep the insn. */
1927 else if (rtx_equal_p (SET_DEST (set), SET_SRC (set))
1928 && REG_NOTES (insn) != 0)
1929 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1930 else
1931 break;
1934 /* If this is setting the static chain rtx, omit it. */
1935 else if (static_chain_value != 0
1936 && set != 0
1937 && GET_CODE (SET_DEST (set)) == REG
1938 && rtx_equal_p (SET_DEST (set),
1939 static_chain_incoming_rtx))
1940 break;
1942 /* If this is setting the static chain pseudo, set it from
1943 the value we want to give it instead. */
1944 else if (static_chain_value != 0
1945 && set != 0
1946 && rtx_equal_p (SET_SRC (set),
1947 static_chain_incoming_rtx))
1949 rtx newdest = copy_rtx_and_substitute (SET_DEST (set), map);
1951 copy = emit_move_insn (newdest, static_chain_value);
1952 static_chain_value = 0;
1954 else
1955 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1956 /* REG_NOTES will be copied later. */
1958 #ifdef HAVE_cc0
1959 /* If this insn is setting CC0, it may need to look at
1960 the insn that uses CC0 to see what type of insn it is.
1961 In that case, the call to recog via validate_change will
1962 fail. So don't substitute constants here. Instead,
1963 do it when we emit the following insn.
1965 For example, see the pyr.md file. That machine has signed and
1966 unsigned compares. The compare patterns must check the
1967 following branch insn to see which what kind of compare to
1968 emit.
1970 If the previous insn set CC0, substitute constants on it as
1971 well. */
1972 if (sets_cc0_p (PATTERN (copy)) != 0)
1973 cc0_insn = copy;
1974 else
1976 if (cc0_insn)
1977 try_constants (cc0_insn, map);
1978 cc0_insn = 0;
1979 try_constants (copy, map);
1981 #else
1982 try_constants (copy, map);
1983 #endif
1984 break;
1986 case JUMP_INSN:
1987 if (GET_CODE (PATTERN (insn)) == RETURN
1988 || (GET_CODE (PATTERN (insn)) == PARALLEL
1989 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
1991 if (local_return_label == 0)
1992 local_return_label = gen_label_rtx ();
1993 pattern = gen_jump (local_return_label);
1995 else
1996 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
1998 copy = emit_jump_insn (pattern);
2000 #ifdef HAVE_cc0
2001 if (cc0_insn)
2002 try_constants (cc0_insn, map);
2003 cc0_insn = 0;
2004 #endif
2005 try_constants (copy, map);
2007 /* If this used to be a conditional jump insn but whose branch
2008 direction is now know, we must do something special. */
2009 if (condjump_p (insn) && ! simplejump_p (insn) && map->last_pc_value)
2011 #ifdef HAVE_cc0
2012 /* The previous insn set cc0 for us. So delete it. */
2013 delete_insn (PREV_INSN (copy));
2014 #endif
2016 /* If this is now a no-op, delete it. */
2017 if (map->last_pc_value == pc_rtx)
2019 delete_insn (copy);
2020 copy = 0;
2022 else
2023 /* Otherwise, this is unconditional jump so we must put a
2024 BARRIER after it. We could do some dead code elimination
2025 here, but jump.c will do it just as well. */
2026 emit_barrier ();
2028 break;
2030 case CALL_INSN:
2031 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
2032 copy = emit_call_insn (pattern);
2034 /* Because the USAGE information potentially contains objects other
2035 than hard registers, we need to copy it. */
2036 CALL_INSN_FUNCTION_USAGE (copy)
2037 = copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn), map);
2039 #ifdef HAVE_cc0
2040 if (cc0_insn)
2041 try_constants (cc0_insn, map);
2042 cc0_insn = 0;
2043 #endif
2044 try_constants (copy, map);
2046 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
2047 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2048 map->const_equiv_map[i] = 0;
2049 break;
2051 case CODE_LABEL:
2052 copy = emit_label (get_label_from_map (map,
2053 CODE_LABEL_NUMBER (insn)));
2054 LABEL_NAME (copy) = LABEL_NAME (insn);
2055 map->const_age++;
2056 break;
2058 case BARRIER:
2059 copy = emit_barrier ();
2060 break;
2062 case NOTE:
2063 /* It is important to discard function-end and function-beg notes,
2064 so we have only one of each in the current function.
2065 Also, NOTE_INSN_DELETED notes aren't useful (save_for_inline
2066 deleted these in the copy used for continuing compilation,
2067 not the copy used for inlining). */
2068 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
2069 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
2070 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED)
2072 copy = emit_note (NOTE_SOURCE_FILE (insn),
2073 NOTE_LINE_NUMBER (insn));
2074 if (copy
2075 && (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG
2076 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_END))
2078 rtx label
2079 = get_label_from_map (map, NOTE_BLOCK_NUMBER (copy));
2081 /* we have to duplicate the handlers for the original */
2082 if (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG)
2084 /* We need to duplicate the handlers for the EH region
2085 and we need to indicate where the label map is */
2086 eif_eh_map = map;
2087 duplicate_eh_handlers (NOTE_BLOCK_NUMBER (copy),
2088 CODE_LABEL_NUMBER (label),
2089 expand_inline_function_eh_labelmap);
2092 /* We have to forward these both to match the new exception
2093 region. */
2094 NOTE_BLOCK_NUMBER (copy) = CODE_LABEL_NUMBER (label);
2097 else
2098 copy = 0;
2099 break;
2101 default:
2102 abort ();
2103 break;
2106 if (copy)
2107 RTX_INTEGRATED_P (copy) = 1;
2109 map->insn_map[INSN_UID (insn)] = copy;
2112 /* Now copy the REG_NOTES. Increment const_age, so that only constants
2113 from parameters can be substituted in. These are the only ones that
2114 are valid across the entire function. */
2115 map->const_age++;
2116 for (insn = insns; insn; insn = NEXT_INSN (insn))
2117 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
2118 && map->insn_map[INSN_UID (insn)]
2119 && REG_NOTES (insn))
2121 rtx tem = copy_rtx_and_substitute (REG_NOTES (insn), map);
2122 /* We must also do subst_constants, in case one of our parameters
2123 has const type and constant value. */
2124 subst_constants (&tem, NULL_RTX, map);
2125 apply_change_group ();
2126 REG_NOTES (map->insn_map[INSN_UID (insn)]) = tem;
2129 if (local_return_label)
2130 emit_label (local_return_label);
2132 /* Restore the stack pointer if we saved it above. */
2133 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_CALLS_ALLOCA)
2134 emit_stack_restore (SAVE_BLOCK, stack_save, NULL_RTX);
2136 /* Make copies of the decls of the symbols in the inline function, so that
2137 the copies of the variables get declared in the current function. Set
2138 up things so that lookup_static_chain knows that to interpret registers
2139 in SAVE_EXPRs for TYPE_SIZEs as local. */
2141 inline_function_decl = fndecl;
2142 integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
2143 integrate_decl_tree ((tree) ORIGINAL_DECL_INITIAL (header), 0, map);
2144 inline_function_decl = 0;
2146 /* End the scope containing the copied formal parameter variables
2147 and copied LABEL_DECLs. */
2149 expand_end_bindings (getdecls (), 1, 1);
2150 block = poplevel (1, 1, 0);
2151 BLOCK_ABSTRACT_ORIGIN (block) = (DECL_ABSTRACT_ORIGIN (fndecl) == NULL
2152 ? fndecl : DECL_ABSTRACT_ORIGIN (fndecl));
2153 poplevel (0, 0, 0);
2155 /* Must mark the line number note after inlined functions as a repeat, so
2156 that the test coverage code can avoid counting the call twice. This
2157 just tells the code to ignore the immediately following line note, since
2158 there already exists a copy of this note before the expanded inline call.
2159 This line number note is still needed for debugging though, so we can't
2160 delete it. */
2161 if (flag_test_coverage)
2162 emit_note (0, NOTE_REPEATED_LINE_NUMBER);
2164 emit_line_note (input_filename, lineno);
2166 /* If the function returns a BLKmode object in a register, copy it
2167 out of the temp register into a BLKmode memory object. */
2168 if (TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
2169 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl))))
2170 target = copy_blkmode_from_reg (0, target, TREE_TYPE (TREE_TYPE (fndecl)));
2172 if (structure_value_addr)
2174 target = gen_rtx_MEM (TYPE_MODE (type),
2175 memory_address (TYPE_MODE (type),
2176 structure_value_addr));
2177 MEM_SET_IN_STRUCT_P (target, 1);
2180 /* Make sure we free the things we explicitly allocated with xmalloc. */
2181 if (real_label_map)
2182 free (real_label_map);
2184 return target;
2187 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
2188 push all of those decls and give each one the corresponding home. */
2190 static void
2191 integrate_parm_decls (args, map, arg_vector)
2192 tree args;
2193 struct inline_remap *map;
2194 rtvec arg_vector;
2196 register tree tail;
2197 register int i;
2199 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
2201 register tree decl = build_decl (VAR_DECL, DECL_NAME (tail),
2202 TREE_TYPE (tail));
2203 rtx new_decl_rtl
2204 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map);
2206 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (tail);
2207 /* We really should be setting DECL_INCOMING_RTL to something reasonable
2208 here, but that's going to require some more work. */
2209 /* DECL_INCOMING_RTL (decl) = ?; */
2210 /* These args would always appear unused, if not for this. */
2211 TREE_USED (decl) = 1;
2212 /* Prevent warning for shadowing with these. */
2213 DECL_ABSTRACT_ORIGIN (decl) = DECL_ORIGIN (tail);
2214 pushdecl (decl);
2215 /* Fully instantiate the address with the equivalent form so that the
2216 debugging information contains the actual register, instead of the
2217 virtual register. Do this by not passing an insn to
2218 subst_constants. */
2219 subst_constants (&new_decl_rtl, NULL_RTX, map);
2220 apply_change_group ();
2221 DECL_RTL (decl) = new_decl_rtl;
2225 /* Given a BLOCK node LET, push decls and levels so as to construct in the
2226 current function a tree of contexts isomorphic to the one that is given.
2228 LEVEL indicates how far down into the BLOCK tree is the node we are
2229 currently traversing. It is always zero except for recursive calls.
2231 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
2232 registers used in the DECL_RTL field should be remapped. If it is zero,
2233 no mapping is necessary. */
2235 static void
2236 integrate_decl_tree (let, level, map)
2237 tree let;
2238 int level;
2239 struct inline_remap *map;
2241 tree t, node;
2243 if (level > 0)
2244 pushlevel (0);
2246 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2248 tree d;
2250 push_obstacks_nochange ();
2251 saveable_allocation ();
2252 d = copy_and_set_decl_abstract_origin (t);
2253 pop_obstacks ();
2255 if (DECL_RTL (t) != 0)
2257 DECL_RTL (d) = copy_rtx_and_substitute (DECL_RTL (t), map);
2258 /* Fully instantiate the address with the equivalent form so that the
2259 debugging information contains the actual register, instead of the
2260 virtual register. Do this by not passing an insn to
2261 subst_constants. */
2262 subst_constants (&DECL_RTL (d), NULL_RTX, map);
2263 apply_change_group ();
2265 /* These args would always appear unused, if not for this. */
2266 TREE_USED (d) = 1;
2268 if (DECL_LANG_SPECIFIC (d))
2269 copy_lang_decl (d);
2271 pushdecl (d);
2274 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2275 integrate_decl_tree (t, level + 1, map);
2277 if (level > 0)
2279 node = poplevel (1, 0, 0);
2280 if (node)
2282 TREE_USED (node) = TREE_USED (let);
2283 BLOCK_ABSTRACT_ORIGIN (node) = let;
2288 /* Given a BLOCK node LET, search for all DECL_RTL fields, and pass them
2289 through save_constants. */
2291 static void
2292 save_constants_in_decl_trees (let)
2293 tree let;
2295 tree t;
2297 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2298 if (DECL_RTL (t) != 0)
2299 save_constants (&DECL_RTL (t));
2301 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2302 save_constants_in_decl_trees (t);
2305 /* Create a new copy of an rtx.
2306 Recursively copies the operands of the rtx,
2307 except for those few rtx codes that are sharable.
2309 We always return an rtx that is similar to that incoming rtx, with the
2310 exception of possibly changing a REG to a SUBREG or vice versa. No
2311 rtl is ever emitted.
2313 Handle constants that need to be placed in the constant pool by
2314 calling `force_const_mem'. */
2317 copy_rtx_and_substitute (orig, map)
2318 register rtx orig;
2319 struct inline_remap *map;
2321 register rtx copy, temp;
2322 register int i, j;
2323 register RTX_CODE code;
2324 register enum machine_mode mode;
2325 register char *format_ptr;
2326 int regno;
2328 if (orig == 0)
2329 return 0;
2331 code = GET_CODE (orig);
2332 mode = GET_MODE (orig);
2334 switch (code)
2336 case REG:
2337 /* If the stack pointer register shows up, it must be part of
2338 stack-adjustments (*not* because we eliminated the frame pointer!).
2339 Small hard registers are returned as-is. Pseudo-registers
2340 go through their `reg_map'. */
2341 regno = REGNO (orig);
2342 if (regno <= LAST_VIRTUAL_REGISTER)
2344 /* Some hard registers are also mapped,
2345 but others are not translated. */
2346 if (map->reg_map[regno] != 0)
2347 return map->reg_map[regno];
2349 /* If this is the virtual frame pointer, make space in current
2350 function's stack frame for the stack frame of the inline function.
2352 Copy the address of this area into a pseudo. Map
2353 virtual_stack_vars_rtx to this pseudo and set up a constant
2354 equivalence for it to be the address. This will substitute the
2355 address into insns where it can be substituted and use the new
2356 pseudo where it can't. */
2357 if (regno == VIRTUAL_STACK_VARS_REGNUM)
2359 rtx loc, seq;
2360 int size = DECL_FRAME_SIZE (map->fndecl);
2362 #ifdef FRAME_GROWS_DOWNWARD
2363 /* In this case, virtual_stack_vars_rtx points to one byte
2364 higher than the top of the frame area. So make sure we
2365 allocate a big enough chunk to keep the frame pointer
2366 aligned like a real one. */
2367 size = CEIL_ROUND (size, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2368 #endif
2369 start_sequence ();
2370 loc = assign_stack_temp (BLKmode, size, 1);
2371 loc = XEXP (loc, 0);
2372 #ifdef FRAME_GROWS_DOWNWARD
2373 /* In this case, virtual_stack_vars_rtx points to one byte
2374 higher than the top of the frame area. So compute the offset
2375 to one byte higher than our substitute frame. */
2376 loc = plus_constant (loc, size);
2377 #endif
2378 map->reg_map[regno] = temp
2379 = force_reg (Pmode, force_operand (loc, NULL_RTX));
2381 #ifdef STACK_BOUNDARY
2382 mark_reg_pointer (map->reg_map[regno],
2383 STACK_BOUNDARY / BITS_PER_UNIT);
2384 #endif
2386 if (REGNO (temp) < map->const_equiv_map_size)
2388 map->const_equiv_map[REGNO (temp)] = loc;
2389 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
2392 seq = gen_sequence ();
2393 end_sequence ();
2394 emit_insn_after (seq, map->insns_at_start);
2395 return temp;
2397 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM)
2399 /* Do the same for a block to contain any arguments referenced
2400 in memory. */
2401 rtx loc, seq;
2402 int size = FUNCTION_ARGS_SIZE (DECL_SAVED_INSNS (map->fndecl));
2404 start_sequence ();
2405 loc = assign_stack_temp (BLKmode, size, 1);
2406 loc = XEXP (loc, 0);
2407 /* When arguments grow downward, the virtual incoming
2408 args pointer points to the top of the argument block,
2409 so the remapped location better do the same. */
2410 #ifdef ARGS_GROW_DOWNWARD
2411 loc = plus_constant (loc, size);
2412 #endif
2413 map->reg_map[regno] = temp
2414 = force_reg (Pmode, force_operand (loc, NULL_RTX));
2416 #ifdef STACK_BOUNDARY
2417 mark_reg_pointer (map->reg_map[regno],
2418 STACK_BOUNDARY / BITS_PER_UNIT);
2419 #endif
2421 if (REGNO (temp) < map->const_equiv_map_size)
2423 map->const_equiv_map[REGNO (temp)] = loc;
2424 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
2427 seq = gen_sequence ();
2428 end_sequence ();
2429 emit_insn_after (seq, map->insns_at_start);
2430 return temp;
2432 else if (REG_FUNCTION_VALUE_P (orig))
2434 /* This is a reference to the function return value. If
2435 the function doesn't have a return value, error. If the
2436 mode doesn't agree, and it ain't BLKmode, make a SUBREG. */
2437 if (map->inline_target == 0)
2438 /* Must be unrolling loops or replicating code if we
2439 reach here, so return the register unchanged. */
2440 return orig;
2441 else if (GET_MODE (map->inline_target) != BLKmode
2442 && mode != GET_MODE (map->inline_target))
2443 return gen_lowpart (mode, map->inline_target);
2444 else
2445 return map->inline_target;
2447 return orig;
2449 if (map->reg_map[regno] == NULL)
2451 map->reg_map[regno] = gen_reg_rtx (mode);
2452 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
2453 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
2454 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
2455 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2457 if (map->regno_pointer_flag[regno])
2458 mark_reg_pointer (map->reg_map[regno],
2459 map->regno_pointer_align[regno]);
2461 return map->reg_map[regno];
2463 case SUBREG:
2464 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map);
2465 /* SUBREG is ordinary, but don't make nested SUBREGs. */
2466 if (GET_CODE (copy) == SUBREG)
2467 return gen_rtx_SUBREG (GET_MODE (orig), SUBREG_REG (copy),
2468 SUBREG_WORD (orig) + SUBREG_WORD (copy));
2469 else if (GET_CODE (copy) == CONCAT)
2471 rtx retval = subreg_realpart_p (orig) ? XEXP (copy, 0) : XEXP (copy, 1);
2473 if (GET_MODE (retval) == GET_MODE (orig))
2474 return retval;
2475 else
2476 return gen_rtx_SUBREG (GET_MODE (orig), retval,
2477 (SUBREG_WORD (orig) %
2478 (GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (orig)))
2479 / (unsigned) UNITS_PER_WORD)));
2481 else
2482 return gen_rtx_SUBREG (GET_MODE (orig), copy,
2483 SUBREG_WORD (orig));
2485 case ADDRESSOF:
2486 copy = gen_rtx_ADDRESSOF (mode,
2487 copy_rtx_and_substitute (XEXP (orig, 0), map), 0);
2488 SET_ADDRESSOF_DECL (copy, ADDRESSOF_DECL (orig));
2489 regno = ADDRESSOF_REGNO (orig);
2490 if (map->reg_map[regno])
2491 regno = REGNO (map->reg_map[regno]);
2492 else if (regno > LAST_VIRTUAL_REGISTER)
2494 temp = XEXP (orig, 0);
2495 map->reg_map[regno] = gen_reg_rtx (GET_MODE (temp));
2496 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (temp);
2497 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (temp);
2498 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (temp);
2499 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2501 if (map->regno_pointer_flag[regno])
2502 mark_reg_pointer (map->reg_map[regno],
2503 map->regno_pointer_align[regno]);
2504 regno = REGNO (map->reg_map[regno]);
2506 ADDRESSOF_REGNO (copy) = regno;
2507 return copy;
2509 case USE:
2510 case CLOBBER:
2511 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
2512 to (use foo) if the original insn didn't have a subreg.
2513 Removing the subreg distorts the VAX movstrhi pattern
2514 by changing the mode of an operand. */
2515 copy = copy_rtx_and_substitute (XEXP (orig, 0), map);
2516 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
2517 copy = SUBREG_REG (copy);
2518 return gen_rtx_fmt_e (code, VOIDmode, copy);
2520 case CODE_LABEL:
2521 LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
2522 = LABEL_PRESERVE_P (orig);
2523 return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
2525 case LABEL_REF:
2526 copy = gen_rtx_LABEL_REF (mode,
2527 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
2528 : get_label_from_map (map,
2529 CODE_LABEL_NUMBER (XEXP (orig, 0))));
2530 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
2532 /* The fact that this label was previously nonlocal does not mean
2533 it still is, so we must check if it is within the range of
2534 this function's labels. */
2535 LABEL_REF_NONLOCAL_P (copy)
2536 = (LABEL_REF_NONLOCAL_P (orig)
2537 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
2538 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
2540 /* If we have made a nonlocal label local, it means that this
2541 inlined call will be referring to our nonlocal goto handler.
2542 So make sure we create one for this block; we normally would
2543 not since this is not otherwise considered a "call". */
2544 if (LABEL_REF_NONLOCAL_P (orig) && ! LABEL_REF_NONLOCAL_P (copy))
2545 function_call_count++;
2547 return copy;
2549 case PC:
2550 case CC0:
2551 case CONST_INT:
2552 return orig;
2554 case SYMBOL_REF:
2555 /* Symbols which represent the address of a label stored in the constant
2556 pool must be modified to point to a constant pool entry for the
2557 remapped label. Otherwise, symbols are returned unchanged. */
2558 if (CONSTANT_POOL_ADDRESS_P (orig))
2560 rtx constant = get_pool_constant (orig);
2561 if (GET_CODE (constant) == LABEL_REF)
2562 return XEXP (force_const_mem (GET_MODE (orig),
2563 copy_rtx_and_substitute (constant,
2564 map)),
2567 else
2568 if (SYMBOL_REF_NEED_ADJUST (orig))
2570 eif_eh_map = map;
2571 return rethrow_symbol_map (orig,
2572 expand_inline_function_eh_labelmap);
2575 return orig;
2577 case CONST_DOUBLE:
2578 /* We have to make a new copy of this CONST_DOUBLE because don't want
2579 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
2580 duplicate of a CONST_DOUBLE we have already seen. */
2581 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
2583 REAL_VALUE_TYPE d;
2585 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
2586 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
2588 else
2589 return immed_double_const (CONST_DOUBLE_LOW (orig),
2590 CONST_DOUBLE_HIGH (orig), VOIDmode);
2592 case CONST:
2593 /* Make new constant pool entry for a constant
2594 that was in the pool of the inline function. */
2595 if (RTX_INTEGRATED_P (orig))
2597 /* If this was an address of a constant pool entry that itself
2598 had to be placed in the constant pool, it might not be a
2599 valid address. So the recursive call below might turn it
2600 into a register. In that case, it isn't a constant any
2601 more, so return it. This has the potential of changing a
2602 MEM into a REG, but we'll assume that it safe. */
2603 temp = copy_rtx_and_substitute (XEXP (orig, 0), map);
2604 if (! CONSTANT_P (temp))
2605 return temp;
2606 return validize_mem (force_const_mem (GET_MODE (orig), temp));
2608 break;
2610 case ADDRESS:
2611 /* If from constant pool address, make new constant pool entry and
2612 return its address. */
2613 if (! RTX_INTEGRATED_P (orig))
2614 abort ();
2616 temp
2617 = force_const_mem (GET_MODE (XEXP (orig, 0)),
2618 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
2619 map));
2621 #if 0
2622 /* Legitimizing the address here is incorrect.
2624 The only ADDRESS rtx's that can reach here are ones created by
2625 save_constants. Hence the operand of the ADDRESS is always valid
2626 in this position of the instruction, since the original rtx without
2627 the ADDRESS was valid.
2629 The reason we don't legitimize the address here is that on the
2630 Sparc, the caller may have a (high ...) surrounding this ADDRESS.
2631 This code forces the operand of the address to a register, which
2632 fails because we can not take the HIGH part of a register.
2634 Also, change_address may create new registers. These registers
2635 will not have valid reg_map entries. This can cause try_constants()
2636 to fail because assumes that all registers in the rtx have valid
2637 reg_map entries, and it may end up replacing one of these new
2638 registers with junk. */
2640 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
2641 temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
2642 #endif
2644 temp = XEXP (temp, 0);
2646 #ifdef POINTERS_EXTEND_UNSIGNED
2647 if (GET_MODE (temp) != GET_MODE (orig))
2648 temp = convert_memory_address (GET_MODE (orig), temp);
2649 #endif
2651 return temp;
2653 case ASM_OPERANDS:
2654 /* If a single asm insn contains multiple output operands
2655 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
2656 We must make sure that the copied insn continues to share it. */
2657 if (map->orig_asm_operands_vector == XVEC (orig, 3))
2659 copy = rtx_alloc (ASM_OPERANDS);
2660 copy->volatil = orig->volatil;
2661 XSTR (copy, 0) = XSTR (orig, 0);
2662 XSTR (copy, 1) = XSTR (orig, 1);
2663 XINT (copy, 2) = XINT (orig, 2);
2664 XVEC (copy, 3) = map->copy_asm_operands_vector;
2665 XVEC (copy, 4) = map->copy_asm_constraints_vector;
2666 XSTR (copy, 5) = XSTR (orig, 5);
2667 XINT (copy, 6) = XINT (orig, 6);
2668 return copy;
2670 break;
2672 case CALL:
2673 /* This is given special treatment because the first
2674 operand of a CALL is a (MEM ...) which may get
2675 forced into a register for cse. This is undesirable
2676 if function-address cse isn't wanted or if we won't do cse. */
2677 #ifndef NO_FUNCTION_CSE
2678 if (! (optimize && ! flag_no_function_cse))
2679 #endif
2680 return gen_rtx_CALL (GET_MODE (orig),
2681 gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
2682 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0), map)),
2683 copy_rtx_and_substitute (XEXP (orig, 1), map));
2684 break;
2686 #if 0
2687 /* Must be ifdefed out for loop unrolling to work. */
2688 case RETURN:
2689 abort ();
2690 #endif
2692 case SET:
2693 /* If this is setting fp or ap, it means that we have a nonlocal goto.
2694 Adjust the setting by the offset of the area we made.
2695 If the nonlocal goto is into the current function,
2696 this will result in unnecessarily bad code, but should work. */
2697 if (SET_DEST (orig) == virtual_stack_vars_rtx
2698 || SET_DEST (orig) == virtual_incoming_args_rtx)
2700 /* In case a translation hasn't occurred already, make one now. */
2701 rtx equiv_reg;
2702 rtx equiv_loc;
2703 HOST_WIDE_INT loc_offset;
2705 copy_rtx_and_substitute (SET_DEST (orig), map);
2706 equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
2707 equiv_loc = map->const_equiv_map[REGNO (equiv_reg)];
2708 loc_offset
2709 = GET_CODE (equiv_loc) == REG ? 0 : INTVAL (XEXP (equiv_loc, 1));
2710 return gen_rtx_SET (VOIDmode, SET_DEST (orig),
2711 force_operand
2712 (plus_constant
2713 (copy_rtx_and_substitute (SET_SRC (orig), map),
2714 - loc_offset),
2715 NULL_RTX));
2717 break;
2719 case MEM:
2720 copy = rtx_alloc (MEM);
2721 PUT_MODE (copy, mode);
2722 XEXP (copy, 0) = copy_rtx_and_substitute (XEXP (orig, 0), map);
2723 MEM_COPY_ATTRIBUTES (copy, orig);
2724 MEM_ALIAS_SET (copy) = MEM_ALIAS_SET (orig);
2726 /* If doing function inlining, this MEM might not be const in the
2727 function that it is being inlined into, and thus may not be
2728 unchanging after function inlining. Constant pool references are
2729 handled elsewhere, so this doesn't lose RTX_UNCHANGING_P bits
2730 for them. */
2731 if (! map->integrating)
2732 RTX_UNCHANGING_P (copy) = RTX_UNCHANGING_P (orig);
2734 return copy;
2736 default:
2737 break;
2740 copy = rtx_alloc (code);
2741 PUT_MODE (copy, mode);
2742 copy->in_struct = orig->in_struct;
2743 copy->volatil = orig->volatil;
2744 copy->unchanging = orig->unchanging;
2746 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2748 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2750 switch (*format_ptr++)
2752 case '0':
2753 XEXP (copy, i) = XEXP (orig, i);
2754 break;
2756 case 'e':
2757 XEXP (copy, i) = copy_rtx_and_substitute (XEXP (orig, i), map);
2758 break;
2760 case 'u':
2761 /* Change any references to old-insns to point to the
2762 corresponding copied insns. */
2763 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
2764 break;
2766 case 'E':
2767 XVEC (copy, i) = XVEC (orig, i);
2768 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2770 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2771 for (j = 0; j < XVECLEN (copy, i); j++)
2772 XVECEXP (copy, i, j)
2773 = copy_rtx_and_substitute (XVECEXP (orig, i, j), map);
2775 break;
2777 case 'w':
2778 XWINT (copy, i) = XWINT (orig, i);
2779 break;
2781 case 'i':
2782 XINT (copy, i) = XINT (orig, i);
2783 break;
2785 case 's':
2786 XSTR (copy, i) = XSTR (orig, i);
2787 break;
2789 default:
2790 abort ();
2794 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
2796 map->orig_asm_operands_vector = XVEC (orig, 3);
2797 map->copy_asm_operands_vector = XVEC (copy, 3);
2798 map->copy_asm_constraints_vector = XVEC (copy, 4);
2801 return copy;
2804 /* Substitute known constant values into INSN, if that is valid. */
2806 void
2807 try_constants (insn, map)
2808 rtx insn;
2809 struct inline_remap *map;
2811 int i;
2813 map->num_sets = 0;
2814 subst_constants (&PATTERN (insn), insn, map);
2816 /* Apply the changes if they are valid; otherwise discard them. */
2817 apply_change_group ();
2819 /* Show we don't know the value of anything stored or clobbered. */
2820 note_stores (PATTERN (insn), mark_stores);
2821 map->last_pc_value = 0;
2822 #ifdef HAVE_cc0
2823 map->last_cc0_value = 0;
2824 #endif
2826 /* Set up any constant equivalences made in this insn. */
2827 for (i = 0; i < map->num_sets; i++)
2829 if (GET_CODE (map->equiv_sets[i].dest) == REG)
2831 int regno = REGNO (map->equiv_sets[i].dest);
2833 if (regno < map->const_equiv_map_size
2834 && (map->const_equiv_map[regno] == 0
2835 /* Following clause is a hack to make case work where GNU C++
2836 reassigns a variable to make cse work right. */
2837 || ! rtx_equal_p (map->const_equiv_map[regno],
2838 map->equiv_sets[i].equiv)))
2840 map->const_equiv_map[regno] = map->equiv_sets[i].equiv;
2841 map->const_age_map[regno] = map->const_age;
2844 else if (map->equiv_sets[i].dest == pc_rtx)
2845 map->last_pc_value = map->equiv_sets[i].equiv;
2846 #ifdef HAVE_cc0
2847 else if (map->equiv_sets[i].dest == cc0_rtx)
2848 map->last_cc0_value = map->equiv_sets[i].equiv;
2849 #endif
2853 /* Substitute known constants for pseudo regs in the contents of LOC,
2854 which are part of INSN.
2855 If INSN is zero, the substitution should always be done (this is used to
2856 update DECL_RTL).
2857 These changes are taken out by try_constants if the result is not valid.
2859 Note that we are more concerned with determining when the result of a SET
2860 is a constant, for further propagation, than actually inserting constants
2861 into insns; cse will do the latter task better.
2863 This function is also used to adjust address of items previously addressed
2864 via the virtual stack variable or virtual incoming arguments registers. */
2866 static void
2867 subst_constants (loc, insn, map)
2868 rtx *loc;
2869 rtx insn;
2870 struct inline_remap *map;
2872 rtx x = *loc;
2873 register int i;
2874 register enum rtx_code code;
2875 register char *format_ptr;
2876 int num_changes = num_validated_changes ();
2877 rtx new = 0;
2878 enum machine_mode op0_mode;
2880 code = GET_CODE (x);
2882 switch (code)
2884 case PC:
2885 case CONST_INT:
2886 case CONST_DOUBLE:
2887 case SYMBOL_REF:
2888 case CONST:
2889 case LABEL_REF:
2890 case ADDRESS:
2891 return;
2893 #ifdef HAVE_cc0
2894 case CC0:
2895 validate_change (insn, loc, map->last_cc0_value, 1);
2896 return;
2897 #endif
2899 case USE:
2900 case CLOBBER:
2901 /* The only thing we can do with a USE or CLOBBER is possibly do
2902 some substitutions in a MEM within it. */
2903 if (GET_CODE (XEXP (x, 0)) == MEM)
2904 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map);
2905 return;
2907 case REG:
2908 /* Substitute for parms and known constants. Don't replace
2909 hard regs used as user variables with constants. */
2911 int regno = REGNO (x);
2913 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
2914 && regno < map->const_equiv_map_size
2915 && map->const_equiv_map[regno] != 0
2916 && map->const_age_map[regno] >= map->const_age)
2917 validate_change (insn, loc, map->const_equiv_map[regno], 1);
2918 return;
2921 case SUBREG:
2922 /* SUBREG applied to something other than a reg
2923 should be treated as ordinary, since that must
2924 be a special hack and we don't know how to treat it specially.
2925 Consider for example mulsidi3 in m68k.md.
2926 Ordinary SUBREG of a REG needs this special treatment. */
2927 if (GET_CODE (SUBREG_REG (x)) == REG)
2929 rtx inner = SUBREG_REG (x);
2930 rtx new = 0;
2932 /* We can't call subst_constants on &SUBREG_REG (x) because any
2933 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2934 see what is inside, try to form the new SUBREG and see if that is
2935 valid. We handle two cases: extracting a full word in an
2936 integral mode and extracting the low part. */
2937 subst_constants (&inner, NULL_RTX, map);
2939 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
2940 && GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD
2941 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
2942 new = operand_subword (inner, SUBREG_WORD (x), 0,
2943 GET_MODE (SUBREG_REG (x)));
2945 cancel_changes (num_changes);
2946 if (new == 0 && subreg_lowpart_p (x))
2947 new = gen_lowpart_common (GET_MODE (x), inner);
2949 if (new)
2950 validate_change (insn, loc, new, 1);
2952 return;
2954 break;
2956 case MEM:
2957 subst_constants (&XEXP (x, 0), insn, map);
2959 /* If a memory address got spoiled, change it back. */
2960 if (insn != 0 && num_validated_changes () != num_changes
2961 && !memory_address_p (GET_MODE (x), XEXP (x, 0)))
2962 cancel_changes (num_changes);
2963 return;
2965 case SET:
2967 /* Substitute constants in our source, and in any arguments to a
2968 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2969 itself. */
2970 rtx *dest_loc = &SET_DEST (x);
2971 rtx dest = *dest_loc;
2972 rtx src, tem;
2974 subst_constants (&SET_SRC (x), insn, map);
2975 src = SET_SRC (x);
2977 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
2978 || GET_CODE (*dest_loc) == SUBREG
2979 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2981 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2983 subst_constants (&XEXP (*dest_loc, 1), insn, map);
2984 subst_constants (&XEXP (*dest_loc, 2), insn, map);
2986 dest_loc = &XEXP (*dest_loc, 0);
2989 /* Do substitute in the address of a destination in memory. */
2990 if (GET_CODE (*dest_loc) == MEM)
2991 subst_constants (&XEXP (*dest_loc, 0), insn, map);
2993 /* Check for the case of DEST a SUBREG, both it and the underlying
2994 register are less than one word, and the SUBREG has the wider mode.
2995 In the case, we are really setting the underlying register to the
2996 source converted to the mode of DEST. So indicate that. */
2997 if (GET_CODE (dest) == SUBREG
2998 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2999 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
3000 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
3001 <= GET_MODE_SIZE (GET_MODE (dest)))
3002 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
3003 src)))
3004 src = tem, dest = SUBREG_REG (dest);
3006 /* If storing a recognizable value save it for later recording. */
3007 if ((map->num_sets < MAX_RECOG_OPERANDS)
3008 && (CONSTANT_P (src)
3009 || (GET_CODE (src) == REG
3010 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
3011 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
3012 || (GET_CODE (src) == PLUS
3013 && GET_CODE (XEXP (src, 0)) == REG
3014 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
3015 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
3016 && CONSTANT_P (XEXP (src, 1)))
3017 || GET_CODE (src) == COMPARE
3018 #ifdef HAVE_cc0
3019 || dest == cc0_rtx
3020 #endif
3021 || (dest == pc_rtx
3022 && (src == pc_rtx || GET_CODE (src) == RETURN
3023 || GET_CODE (src) == LABEL_REF))))
3025 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
3026 it will cause us to save the COMPARE with any constants
3027 substituted, which is what we want for later. */
3028 map->equiv_sets[map->num_sets].equiv = copy_rtx (src);
3029 map->equiv_sets[map->num_sets++].dest = dest;
3032 return;
3034 default:
3035 break;
3038 format_ptr = GET_RTX_FORMAT (code);
3040 /* If the first operand is an expression, save its mode for later. */
3041 if (*format_ptr == 'e')
3042 op0_mode = GET_MODE (XEXP (x, 0));
3044 for (i = 0; i < GET_RTX_LENGTH (code); i++)
3046 switch (*format_ptr++)
3048 case '0':
3049 break;
3051 case 'e':
3052 if (XEXP (x, i))
3053 subst_constants (&XEXP (x, i), insn, map);
3054 break;
3056 case 'u':
3057 case 'i':
3058 case 's':
3059 case 'w':
3060 break;
3062 case 'E':
3063 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
3065 int j;
3066 for (j = 0; j < XVECLEN (x, i); j++)
3067 subst_constants (&XVECEXP (x, i, j), insn, map);
3069 break;
3071 default:
3072 abort ();
3076 /* If this is a commutative operation, move a constant to the second
3077 operand unless the second operand is already a CONST_INT. */
3078 if ((GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
3079 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
3081 rtx tem = XEXP (x, 0);
3082 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
3083 validate_change (insn, &XEXP (x, 1), tem, 1);
3086 /* Simplify the expression in case we put in some constants. */
3087 switch (GET_RTX_CLASS (code))
3089 case '1':
3090 new = simplify_unary_operation (code, GET_MODE (x),
3091 XEXP (x, 0), op0_mode);
3092 break;
3094 case '<':
3096 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
3097 if (op_mode == VOIDmode)
3098 op_mode = GET_MODE (XEXP (x, 1));
3099 new = simplify_relational_operation (code, op_mode,
3100 XEXP (x, 0), XEXP (x, 1));
3101 #ifdef FLOAT_STORE_FLAG_VALUE
3102 if (new != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3103 new = ((new == const0_rtx) ? CONST0_RTX (GET_MODE (x))
3104 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
3105 GET_MODE (x)));
3106 #endif
3107 break;
3110 case '2':
3111 case 'c':
3112 new = simplify_binary_operation (code, GET_MODE (x),
3113 XEXP (x, 0), XEXP (x, 1));
3114 break;
3116 case 'b':
3117 case '3':
3118 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
3119 XEXP (x, 0), XEXP (x, 1), XEXP (x, 2));
3120 break;
3123 if (new)
3124 validate_change (insn, loc, new, 1);
3127 /* Show that register modified no longer contain known constants. We are
3128 called from note_stores with parts of the new insn. */
3130 void
3131 mark_stores (dest, x)
3132 rtx dest;
3133 rtx x ATTRIBUTE_UNUSED;
3135 int regno = -1;
3136 enum machine_mode mode;
3138 /* DEST is always the innermost thing set, except in the case of
3139 SUBREGs of hard registers. */
3141 if (GET_CODE (dest) == REG)
3142 regno = REGNO (dest), mode = GET_MODE (dest);
3143 else if (GET_CODE (dest) == SUBREG && GET_CODE (SUBREG_REG (dest)) == REG)
3145 regno = REGNO (SUBREG_REG (dest)) + SUBREG_WORD (dest);
3146 mode = GET_MODE (SUBREG_REG (dest));
3149 if (regno >= 0)
3151 int last_reg = (regno >= FIRST_PSEUDO_REGISTER ? regno
3152 : regno + HARD_REGNO_NREGS (regno, mode) - 1);
3153 int i;
3155 /* Ignore virtual stack var or virtual arg register since those
3156 are handled separately. */
3157 if (regno != VIRTUAL_INCOMING_ARGS_REGNUM
3158 && regno != VIRTUAL_STACK_VARS_REGNUM)
3159 for (i = regno; i <= last_reg; i++)
3160 if (i < global_const_equiv_map_size)
3161 global_const_equiv_map[i] = 0;
3165 /* If any CONST expressions with RTX_INTEGRATED_P are present in the rtx
3166 pointed to by PX, they represent constants in the constant pool.
3167 Replace these with a new memory reference obtained from force_const_mem.
3168 Similarly, ADDRESS expressions with RTX_INTEGRATED_P represent the
3169 address of a constant pool entry. Replace them with the address of
3170 a new constant pool entry obtained from force_const_mem. */
3172 static void
3173 restore_constants (px)
3174 rtx *px;
3176 rtx x = *px;
3177 int i, j;
3178 char *fmt;
3180 if (x == 0)
3181 return;
3183 if (GET_CODE (x) == CONST_DOUBLE)
3185 /* We have to make a new CONST_DOUBLE to ensure that we account for
3186 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
3187 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3189 REAL_VALUE_TYPE d;
3191 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
3192 *px = CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x));
3194 else
3195 *px = immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
3196 VOIDmode);
3199 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == CONST)
3201 restore_constants (&XEXP (x, 0));
3202 *px = validize_mem (force_const_mem (GET_MODE (x), XEXP (x, 0)));
3204 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == SUBREG)
3206 /* This must be (subreg/i:M1 (const/i:M2 ...) 0). */
3207 rtx new = XEXP (SUBREG_REG (x), 0);
3209 restore_constants (&new);
3210 new = force_const_mem (GET_MODE (SUBREG_REG (x)), new);
3211 PUT_MODE (new, GET_MODE (x));
3212 *px = validize_mem (new);
3214 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == ADDRESS)
3216 rtx new = XEXP (force_const_mem (GET_MODE (XEXP (x, 0)),
3217 XEXP (XEXP (x, 0), 0)),
3220 #ifdef POINTERS_EXTEND_UNSIGNED
3221 if (GET_MODE (new) != GET_MODE (x))
3222 new = convert_memory_address (GET_MODE (x), new);
3223 #endif
3225 *px = new;
3227 else
3229 fmt = GET_RTX_FORMAT (GET_CODE (x));
3230 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
3232 switch (*fmt++)
3234 case 'E':
3235 for (j = 0; j < XVECLEN (x, i); j++)
3236 restore_constants (&XVECEXP (x, i, j));
3237 break;
3239 case 'e':
3240 restore_constants (&XEXP (x, i));
3241 break;
3247 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
3248 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
3249 that it points to the node itself, thus indicating that the node is its
3250 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
3251 the given node is NULL, recursively descend the decl/block tree which
3252 it is the root of, and for each other ..._DECL or BLOCK node contained
3253 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
3254 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
3255 values to point to themselves. */
3257 static void
3258 set_block_origin_self (stmt)
3259 register tree stmt;
3261 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
3263 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
3266 register tree local_decl;
3268 for (local_decl = BLOCK_VARS (stmt);
3269 local_decl != NULL_TREE;
3270 local_decl = TREE_CHAIN (local_decl))
3271 set_decl_origin_self (local_decl); /* Potential recursion. */
3275 register tree subblock;
3277 for (subblock = BLOCK_SUBBLOCKS (stmt);
3278 subblock != NULL_TREE;
3279 subblock = BLOCK_CHAIN (subblock))
3280 set_block_origin_self (subblock); /* Recurse. */
3285 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
3286 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
3287 node to so that it points to the node itself, thus indicating that the
3288 node represents its own (abstract) origin. Additionally, if the
3289 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
3290 the decl/block tree of which the given node is the root of, and for
3291 each other ..._DECL or BLOCK node contained therein whose
3292 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
3293 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
3294 point to themselves. */
3296 static void
3297 set_decl_origin_self (decl)
3298 register tree decl;
3300 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
3302 DECL_ABSTRACT_ORIGIN (decl) = decl;
3303 if (TREE_CODE (decl) == FUNCTION_DECL)
3305 register tree arg;
3307 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3308 DECL_ABSTRACT_ORIGIN (arg) = arg;
3309 if (DECL_INITIAL (decl) != NULL_TREE
3310 && DECL_INITIAL (decl) != error_mark_node)
3311 set_block_origin_self (DECL_INITIAL (decl));
3316 /* Given a pointer to some BLOCK node, and a boolean value to set the
3317 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
3318 the given block, and for all local decls and all local sub-blocks
3319 (recursively) which are contained therein. */
3321 static void
3322 set_block_abstract_flags (stmt, setting)
3323 register tree stmt;
3324 register int setting;
3326 register tree local_decl;
3327 register tree subblock;
3329 BLOCK_ABSTRACT (stmt) = setting;
3331 for (local_decl = BLOCK_VARS (stmt);
3332 local_decl != NULL_TREE;
3333 local_decl = TREE_CHAIN (local_decl))
3334 set_decl_abstract_flags (local_decl, setting);
3336 for (subblock = BLOCK_SUBBLOCKS (stmt);
3337 subblock != NULL_TREE;
3338 subblock = BLOCK_CHAIN (subblock))
3339 set_block_abstract_flags (subblock, setting);
3342 /* Given a pointer to some ..._DECL node, and a boolean value to set the
3343 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
3344 given decl, and (in the case where the decl is a FUNCTION_DECL) also
3345 set the abstract flags for all of the parameters, local vars, local
3346 blocks and sub-blocks (recursively) to the same setting. */
3348 void
3349 set_decl_abstract_flags (decl, setting)
3350 register tree decl;
3351 register int setting;
3353 DECL_ABSTRACT (decl) = setting;
3354 if (TREE_CODE (decl) == FUNCTION_DECL)
3356 register tree arg;
3358 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3359 DECL_ABSTRACT (arg) = setting;
3360 if (DECL_INITIAL (decl) != NULL_TREE
3361 && DECL_INITIAL (decl) != error_mark_node)
3362 set_block_abstract_flags (DECL_INITIAL (decl), setting);
3366 /* Output the assembly language code for the function FNDECL
3367 from its DECL_SAVED_INSNS. Used for inline functions that are output
3368 at end of compilation instead of where they came in the source. */
3370 void
3371 output_inline_function (fndecl)
3372 tree fndecl;
3374 rtx head;
3375 rtx last;
3377 /* Things we allocate from here on are part of this function, not
3378 permanent. */
3379 temporary_allocation ();
3381 head = DECL_SAVED_INSNS (fndecl);
3382 current_function_decl = fndecl;
3384 /* This call is only used to initialize global variables. */
3385 init_function_start (fndecl, "lossage", 1);
3387 /* Redo parameter determinations in case the FUNCTION_...
3388 macros took machine-specific actions that need to be redone. */
3389 assign_parms (fndecl, 1);
3391 /* Set stack frame size. */
3392 assign_stack_local (BLKmode, DECL_FRAME_SIZE (fndecl), 0);
3394 /* The first is a bit of a lie (the array may be larger), but doesn't
3395 matter too much and it isn't worth saving the actual bound. */
3396 reg_rtx_no = regno_pointer_flag_length = MAX_REGNUM (head);
3397 regno_reg_rtx = (rtx *) INLINE_REGNO_REG_RTX (head);
3398 regno_pointer_flag = INLINE_REGNO_POINTER_FLAG (head);
3399 regno_pointer_align = INLINE_REGNO_POINTER_ALIGN (head);
3400 max_parm_reg = MAX_PARMREG (head);
3401 parm_reg_stack_loc = (rtx *) PARMREG_STACK_LOC (head);
3403 stack_slot_list = STACK_SLOT_LIST (head);
3404 forced_labels = FORCED_LABELS (head);
3406 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_ALLOCA)
3407 current_function_calls_alloca = 1;
3409 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_SETJMP)
3410 current_function_calls_setjmp = 1;
3412 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_LONGJMP)
3413 current_function_calls_longjmp = 1;
3415 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_STRUCT)
3416 current_function_returns_struct = 1;
3418 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_PCC_STRUCT)
3419 current_function_returns_pcc_struct = 1;
3421 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_NEEDS_CONTEXT)
3422 current_function_needs_context = 1;
3424 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_HAS_NONLOCAL_LABEL)
3425 current_function_has_nonlocal_label = 1;
3427 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_POINTER)
3428 current_function_returns_pointer = 1;
3430 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_CONST_POOL)
3431 current_function_uses_const_pool = 1;
3433 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
3434 current_function_uses_pic_offset_table = 1;
3436 current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (head);
3437 current_function_pops_args = POPS_ARGS (head);
3439 /* This is the only thing the expand_function_end call that uses to be here
3440 actually does and that call can cause problems. */
3441 immediate_size_expand--;
3443 /* Find last insn and rebuild the constant pool. */
3444 for (last = FIRST_PARM_INSN (head);
3445 NEXT_INSN (last); last = NEXT_INSN (last))
3447 if (GET_RTX_CLASS (GET_CODE (last)) == 'i')
3449 restore_constants (&PATTERN (last));
3450 restore_constants (&REG_NOTES (last));
3454 set_new_first_and_last_insn (FIRST_PARM_INSN (head), last);
3455 set_new_first_and_last_label_num (FIRST_LABELNO (head), LAST_LABELNO (head));
3457 /* We must have already output DWARF debugging information for the
3458 original (abstract) inline function declaration/definition, so
3459 we want to make sure that the debugging information we generate
3460 for this special instance of the inline function refers back to
3461 the information we already generated. To make sure that happens,
3462 we simply have to set the DECL_ABSTRACT_ORIGIN for the function
3463 node (and for all of the local ..._DECL nodes which are its children)
3464 so that they all point to themselves. */
3466 set_decl_origin_self (fndecl);
3468 /* We're not deferring this any longer. */
3469 DECL_DEFER_OUTPUT (fndecl) = 0;
3471 /* We can't inline this anymore. */
3472 DECL_INLINE (fndecl) = 0;
3474 /* Compile this function all the way down to assembly code. */
3475 rest_of_compilation (fndecl);
3477 current_function_decl = 0;