1 /* Procedure integration for GNU CC.
2 Copyright (C) 1988, 1991, 1993, 1994 Free Software Foundation, Inc.
3 Contributed by Michael Tiemann (tiemann@cygnus.com)
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
28 #include "insn-config.h"
29 #include "insn-flags.h"
32 #include "integrate.h"
38 #define obstack_chunk_alloc xmalloc
39 #define obstack_chunk_free free
41 extern struct obstack
*function_maybepermanent_obstack
;
43 extern tree
pushdecl ();
44 extern tree
poplevel ();
46 /* Similar, but round to the next highest integer that meets the
48 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
50 /* Default max number of insns a function can have and still be inline.
51 This is overridden on RISC machines. */
52 #ifndef INTEGRATE_THRESHOLD
53 #define INTEGRATE_THRESHOLD(DECL) \
54 (8 * (8 + list_length (DECL_ARGUMENTS (DECL))))
57 static rtx initialize_for_inline
PROTO((tree
, int, int, int, int));
58 static void finish_inline
PROTO((tree
, rtx
));
59 static void adjust_copied_decl_tree
PROTO((tree
));
60 static tree copy_decl_list
PROTO((tree
));
61 static tree copy_decl_tree
PROTO((tree
));
62 static void copy_decl_rtls
PROTO((tree
));
63 static void save_constants
PROTO((rtx
*));
64 static void note_modified_parmregs
PROTO((rtx
, rtx
));
65 static rtx copy_for_inline
PROTO((rtx
));
66 static void integrate_parm_decls
PROTO((tree
, struct inline_remap
*, rtvec
));
67 static void integrate_decl_tree
PROTO((tree
, int, struct inline_remap
*));
68 static void subst_constants
PROTO((rtx
*, rtx
, struct inline_remap
*));
69 static void restore_constants
PROTO((rtx
*));
70 static void set_block_origin_self
PROTO((tree
));
71 static void set_decl_origin_self
PROTO((tree
));
72 static void set_block_abstract_flags
PROTO((tree
, int));
74 void set_decl_abstract_flags
PROTO((tree
, int));
76 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
77 is safe and reasonable to integrate into other functions.
78 Nonzero means value is a warning message with a single %s
79 for the function's name. */
82 function_cannot_inline_p (fndecl
)
86 tree last
= tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl
)));
87 int max_insns
= INTEGRATE_THRESHOLD (fndecl
);
88 register int ninsns
= 0;
91 /* No inlines with varargs. `grokdeclarator' gives a warning
92 message about that if `inline' is specified. This code
93 it put in to catch the volunteers. */
94 if ((last
&& TREE_VALUE (last
) != void_type_node
)
95 || current_function_varargs
)
96 return "varargs function cannot be inline";
98 if (current_function_calls_alloca
)
99 return "function using alloca cannot be inline";
101 if (current_function_contains_functions
)
102 return "function with nested functions cannot be inline";
104 /* If its not even close, don't even look. */
105 if (!DECL_INLINE (fndecl
) && get_max_uid () > 3 * max_insns
)
106 return "function too large to be inline";
109 /* Large stacks are OK now that inlined functions can share them. */
110 /* Don't inline functions with large stack usage,
111 since they can make other recursive functions burn up stack. */
112 if (!DECL_INLINE (fndecl
) && get_frame_size () > 100)
113 return "function stack frame for inlining";
117 /* Don't inline functions which do not specify a function prototype and
118 have BLKmode argument or take the address of a parameter. */
119 for (parms
= DECL_ARGUMENTS (fndecl
); parms
; parms
= TREE_CHAIN (parms
))
121 if (TYPE_MODE (TREE_TYPE (parms
)) == BLKmode
)
122 TREE_ADDRESSABLE (parms
) = 1;
123 if (last
== NULL_TREE
&& TREE_ADDRESSABLE (parms
))
124 return "no prototype, and parameter address used; cannot be inline";
128 /* We can't inline functions that return structures
129 the old-fashioned PCC way, copying into a static block. */
130 if (current_function_returns_pcc_struct
)
131 return "inline functions not supported for this return value type";
133 /* We can't inline functions that return structures of varying size. */
134 if (int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl
))) < 0)
135 return "function with varying-size return value cannot be inline";
137 /* Cannot inline a function with a varying size argument. */
138 for (parms
= DECL_ARGUMENTS (fndecl
); parms
; parms
= TREE_CHAIN (parms
))
139 if (int_size_in_bytes (TREE_TYPE (parms
)) < 0)
140 return "function with varying-size parameter cannot be inline";
142 if (!DECL_INLINE (fndecl
) && get_max_uid () > max_insns
)
144 for (ninsns
= 0, insn
= get_first_nonparm_insn (); insn
&& ninsns
< max_insns
;
145 insn
= NEXT_INSN (insn
))
147 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
151 if (ninsns
>= max_insns
)
152 return "function too large to be inline";
155 /* We cannot inline this function if forced_labels is non-zero. This
156 implies that a label in this function was used as an initializer.
157 Because labels can not be duplicated, all labels in the function
158 will be renamed when it is inlined. However, there is no way to find
159 and fix all variables initialized with addresses of labels in this
160 function, hence inlining is impossible. */
163 return "function with label addresses used in initializers cannot inline";
165 /* We cannot inline a nested function that jumps to a nonlocal label. */
166 if (current_function_has_nonlocal_goto
)
167 return "function with nonlocal goto cannot be inline";
172 /* Variables used within save_for_inline. */
174 /* Mapping from old pseudo-register to new pseudo-registers.
175 The first element of this map is reg_map[FIRST_PSEUDO_REGISTER].
176 It is allocated in `save_for_inline' and `expand_inline_function',
177 and deallocated on exit from each of those routines. */
180 /* Mapping from old code-labels to new code-labels.
181 The first element of this map is label_map[min_labelno].
182 It is allocated in `save_for_inline' and `expand_inline_function',
183 and deallocated on exit from each of those routines. */
184 static rtx
*label_map
;
186 /* Mapping from old insn uid's to copied insns.
187 It is allocated in `save_for_inline' and `expand_inline_function',
188 and deallocated on exit from each of those routines. */
189 static rtx
*insn_map
;
191 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
192 Zero for a reg that isn't a parm's home.
193 Only reg numbers less than max_parm_reg are mapped here. */
194 static tree
*parmdecl_map
;
196 /* Keep track of first pseudo-register beyond those that are parms. */
197 static int max_parm_reg
;
199 /* When an insn is being copied by copy_for_inline,
200 this is nonzero if we have copied an ASM_OPERANDS.
201 In that case, it is the original input-operand vector. */
202 static rtvec orig_asm_operands_vector
;
204 /* When an insn is being copied by copy_for_inline,
205 this is nonzero if we have copied an ASM_OPERANDS.
206 In that case, it is the copied input-operand vector. */
207 static rtvec copy_asm_operands_vector
;
209 /* Likewise, this is the copied constraints vector. */
210 static rtvec copy_asm_constraints_vector
;
212 /* In save_for_inline, nonzero if past the parm-initialization insns. */
213 static int in_nonparm_insns
;
215 /* Subroutine for `save_for_inline{copying,nocopy}'. Performs initialization
216 needed to save FNDECL's insns and info for future inline expansion. */
219 initialize_for_inline (fndecl
, min_labelno
, max_labelno
, max_reg
, copy
)
226 int function_flags
, i
;
230 /* Compute the values of any flags we must restore when inlining this. */
233 = (current_function_calls_alloca
* FUNCTION_FLAGS_CALLS_ALLOCA
234 + current_function_calls_setjmp
* FUNCTION_FLAGS_CALLS_SETJMP
235 + current_function_calls_longjmp
* FUNCTION_FLAGS_CALLS_LONGJMP
236 + current_function_returns_struct
* FUNCTION_FLAGS_RETURNS_STRUCT
237 + current_function_returns_pcc_struct
* FUNCTION_FLAGS_RETURNS_PCC_STRUCT
238 + current_function_needs_context
* FUNCTION_FLAGS_NEEDS_CONTEXT
239 + current_function_has_nonlocal_label
* FUNCTION_FLAGS_HAS_NONLOCAL_LABEL
240 + current_function_returns_pointer
* FUNCTION_FLAGS_RETURNS_POINTER
241 + current_function_uses_const_pool
* FUNCTION_FLAGS_USES_CONST_POOL
242 + current_function_uses_pic_offset_table
* FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE
);
244 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
245 bzero (parmdecl_map
, max_parm_reg
* sizeof (tree
));
246 arg_vector
= rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl
)));
248 for (parms
= DECL_ARGUMENTS (fndecl
), i
= 0;
250 parms
= TREE_CHAIN (parms
), i
++)
252 rtx p
= DECL_RTL (parms
);
254 if (GET_CODE (p
) == MEM
&& copy
)
256 /* Copy the rtl so that modifications of the addresses
257 later in compilation won't affect this arg_vector.
258 Virtual register instantiation can screw the address
260 rtx
new = copy_rtx (p
);
262 /* Don't leave the old copy anywhere in this decl. */
263 if (DECL_RTL (parms
) == DECL_INCOMING_RTL (parms
)
264 || (GET_CODE (DECL_RTL (parms
)) == MEM
265 && GET_CODE (DECL_INCOMING_RTL (parms
)) == MEM
266 && (XEXP (DECL_RTL (parms
), 0)
267 == XEXP (DECL_INCOMING_RTL (parms
), 0))))
268 DECL_INCOMING_RTL (parms
) = new;
269 DECL_RTL (parms
) = new;
272 RTVEC_ELT (arg_vector
, i
) = p
;
274 if (GET_CODE (p
) == REG
)
275 parmdecl_map
[REGNO (p
)] = parms
;
276 else if (GET_CODE (p
) == CONCAT
)
278 rtx preal
= gen_realpart (GET_MODE (XEXP (p
, 0)), p
);
279 rtx pimag
= gen_imagpart (GET_MODE (preal
), p
);
281 if (GET_CODE (preal
) == REG
)
282 parmdecl_map
[REGNO (preal
)] = parms
;
283 if (GET_CODE (pimag
) == REG
)
284 parmdecl_map
[REGNO (pimag
)] = parms
;
287 /* This flag is cleared later
288 if the function ever modifies the value of the parm. */
289 TREE_READONLY (parms
) = 1;
292 /* Assume we start out in the insns that set up the parameters. */
293 in_nonparm_insns
= 0;
295 /* The list of DECL_SAVED_INSNS, starts off with a header which
296 contains the following information:
298 the first insn of the function (not including the insns that copy
299 parameters into registers).
300 the first parameter insn of the function,
301 the first label used by that function,
302 the last label used by that function,
303 the highest register number used for parameters,
304 the total number of registers used,
305 the size of the incoming stack area for parameters,
306 the number of bytes popped on return,
308 some flags that are used to restore compiler globals,
309 the value of current_function_outgoing_args_size,
310 the original argument vector,
311 and the original DECL_INITIAL. */
313 return gen_inline_header_rtx (NULL_RTX
, NULL_RTX
, min_labelno
, max_labelno
,
314 max_parm_reg
, max_reg
,
315 current_function_args_size
,
316 current_function_pops_args
,
317 stack_slot_list
, function_flags
,
318 current_function_outgoing_args_size
,
319 arg_vector
, (rtx
) DECL_INITIAL (fndecl
));
322 /* Subroutine for `save_for_inline{copying,nocopy}'. Finishes up the
323 things that must be done to make FNDECL expandable as an inline function.
324 HEAD contains the chain of insns to which FNDECL will expand. */
327 finish_inline (fndecl
, head
)
331 NEXT_INSN (head
) = get_first_nonparm_insn ();
332 FIRST_PARM_INSN (head
) = get_insns ();
333 DECL_SAVED_INSNS (fndecl
) = head
;
334 DECL_FRAME_SIZE (fndecl
) = get_frame_size ();
335 DECL_INLINE (fndecl
) = 1;
338 /* Adjust the BLOCK_END_NOTE pointers in a given copied DECL tree so that
339 they all point to the new (copied) rtxs. */
342 adjust_copied_decl_tree (block
)
345 register tree subblock
;
346 register rtx original_end
;
348 original_end
= BLOCK_END_NOTE (block
);
351 BLOCK_END_NOTE (block
) = (rtx
) NOTE_SOURCE_FILE (original_end
);
352 NOTE_SOURCE_FILE (original_end
) = 0;
355 /* Process all subblocks. */
356 for (subblock
= BLOCK_SUBBLOCKS (block
);
358 subblock
= TREE_CHAIN (subblock
))
359 adjust_copied_decl_tree (subblock
);
362 /* Make the insns and PARM_DECLs of the current function permanent
363 and record other information in DECL_SAVED_INSNS to allow inlining
364 of this function in subsequent calls.
366 This function is called when we are going to immediately compile
367 the insns for FNDECL. The insns in maybepermanent_obstack cannot be
368 modified by the compilation process, so we copy all of them to
369 new storage and consider the new insns to be the insn chain to be
370 compiled. Our caller (rest_of_compilation) saves the original
371 DECL_INITIAL and DECL_ARGUMENTS; here we copy them. */
374 save_for_inline_copying (fndecl
)
377 rtx first_insn
, last_insn
, insn
;
379 int max_labelno
, min_labelno
, i
, len
;
382 rtx first_nonparm_insn
;
384 /* Make and emit a return-label if we have not already done so.
385 Do this before recording the bounds on label numbers. */
387 if (return_label
== 0)
389 return_label
= gen_label_rtx ();
390 emit_label (return_label
);
393 /* Get some bounds on the labels and registers used. */
395 max_labelno
= max_label_num ();
396 min_labelno
= get_first_label_num ();
397 max_reg
= max_reg_num ();
399 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
400 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
401 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
402 for the parms, prior to elimination of virtual registers.
403 These values are needed for substituting parms properly. */
405 max_parm_reg
= max_parm_reg_num ();
406 parmdecl_map
= (tree
*) alloca (max_parm_reg
* sizeof (tree
));
408 head
= initialize_for_inline (fndecl
, min_labelno
, max_labelno
, max_reg
, 1);
410 if (current_function_uses_const_pool
)
412 /* Replace any constant pool references with the actual constant. We
413 will put the constants back in the copy made below. */
414 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
415 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
417 save_constants (&PATTERN (insn
));
418 if (REG_NOTES (insn
))
419 save_constants (®_NOTES (insn
));
422 /* Clear out the constant pool so that we can recreate it with the
423 copied constants below. */
424 init_const_rtx_hash_table ();
425 clear_const_double_mem ();
428 max_uid
= INSN_UID (head
);
430 /* We have now allocated all that needs to be allocated permanently
431 on the rtx obstack. Set our high-water mark, so that we
432 can free the rest of this when the time comes. */
436 /* Copy the chain insns of this function.
437 Install the copied chain as the insns of this function,
438 for continued compilation;
439 the original chain is recorded as the DECL_SAVED_INSNS
440 for inlining future calls. */
442 /* If there are insns that copy parms from the stack into pseudo registers,
443 those insns are not copied. `expand_inline_function' must
444 emit the correct code to handle such things. */
447 if (GET_CODE (insn
) != NOTE
)
449 first_insn
= rtx_alloc (NOTE
);
450 NOTE_SOURCE_FILE (first_insn
) = NOTE_SOURCE_FILE (insn
);
451 NOTE_LINE_NUMBER (first_insn
) = NOTE_LINE_NUMBER (insn
);
452 INSN_UID (first_insn
) = INSN_UID (insn
);
453 PREV_INSN (first_insn
) = NULL
;
454 NEXT_INSN (first_insn
) = NULL
;
455 last_insn
= first_insn
;
457 /* Each pseudo-reg in the old insn chain must have a unique rtx in the copy.
458 Make these new rtx's now, and install them in regno_reg_rtx, so they
459 will be the official pseudo-reg rtx's for the rest of compilation. */
461 reg_map
= (rtx
*) alloca ((max_reg
+ 1) * sizeof (rtx
));
463 len
= sizeof (struct rtx_def
) + (GET_RTX_LENGTH (REG
) - 1) * sizeof (rtunion
);
464 for (i
= max_reg
- 1; i
> LAST_VIRTUAL_REGISTER
; i
--)
465 reg_map
[i
] = (rtx
)obstack_copy (function_maybepermanent_obstack
,
466 regno_reg_rtx
[i
], len
);
468 bcopy (reg_map
+ LAST_VIRTUAL_REGISTER
+ 1,
469 regno_reg_rtx
+ LAST_VIRTUAL_REGISTER
+ 1,
470 (max_reg
- (LAST_VIRTUAL_REGISTER
+ 1)) * sizeof (rtx
));
472 /* Likewise each label rtx must have a unique rtx as its copy. */
474 label_map
= (rtx
*)alloca ((max_labelno
- min_labelno
) * sizeof (rtx
));
475 label_map
-= min_labelno
;
477 for (i
= min_labelno
; i
< max_labelno
; i
++)
478 label_map
[i
] = gen_label_rtx ();
480 /* Record the mapping of old insns to copied insns. */
482 insn_map
= (rtx
*) alloca (max_uid
* sizeof (rtx
));
483 bzero (insn_map
, max_uid
* sizeof (rtx
));
485 /* Get the insn which signals the end of parameter setup code. */
486 first_nonparm_insn
= get_first_nonparm_insn ();
488 /* Copy any entries in regno_reg_rtx or DECL_RTLs that reference MEM
489 (the former occurs when a variable has its address taken)
490 since these may be shared and can be changed by virtual
491 register instantiation. DECL_RTL values for our arguments
492 have already been copied by initialize_for_inline. */
493 for (i
= LAST_VIRTUAL_REGISTER
+ 1; i
< max_reg
; i
++)
494 if (GET_CODE (regno_reg_rtx
[i
]) == MEM
)
495 XEXP (regno_reg_rtx
[i
], 0)
496 = copy_for_inline (XEXP (regno_reg_rtx
[i
], 0));
498 /* Copy the tree of subblocks of the function, and the decls in them.
499 We will use the copy for compiling this function, then restore the original
500 subblocks and decls for use when inlining this function.
502 Several parts of the compiler modify BLOCK trees. In particular,
503 instantiate_virtual_regs will instantiate any virtual regs
504 mentioned in the DECL_RTLs of the decls, and loop
505 unrolling will replicate any BLOCK trees inside an unrolled loop.
507 The modified subblocks or DECL_RTLs would be incorrect for the original rtl
508 which we will use for inlining. The rtl might even contain pseudoregs
509 whose space has been freed. */
511 DECL_INITIAL (fndecl
) = copy_decl_tree (DECL_INITIAL (fndecl
));
512 DECL_ARGUMENTS (fndecl
) = copy_decl_list (DECL_ARGUMENTS (fndecl
));
514 /* Now copy each DECL_RTL which is a MEM,
515 so it is safe to modify their addresses. */
516 copy_decl_rtls (DECL_INITIAL (fndecl
));
518 /* The fndecl node acts as its own progenitor, so mark it as such. */
519 DECL_ABSTRACT_ORIGIN (fndecl
) = fndecl
;
521 /* Now copy the chain of insns. Do this twice. The first copy the insn
522 itself and its body. The second time copy of REG_NOTES. This is because
523 a REG_NOTE may have a forward pointer to another insn. */
525 for (insn
= NEXT_INSN (insn
); insn
; insn
= NEXT_INSN (insn
))
527 orig_asm_operands_vector
= 0;
529 if (insn
== first_nonparm_insn
)
530 in_nonparm_insns
= 1;
532 switch (GET_CODE (insn
))
535 /* No need to keep these. */
536 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_DELETED
)
539 copy
= rtx_alloc (NOTE
);
540 NOTE_LINE_NUMBER (copy
) = NOTE_LINE_NUMBER (insn
);
541 if (NOTE_LINE_NUMBER (insn
) != NOTE_INSN_BLOCK_END
)
542 NOTE_SOURCE_FILE (copy
) = NOTE_SOURCE_FILE (insn
);
545 NOTE_SOURCE_FILE (insn
) = (char *) copy
;
546 NOTE_SOURCE_FILE (copy
) = 0;
553 copy
= rtx_alloc (GET_CODE (insn
));
555 if (GET_CODE (insn
) == CALL_INSN
)
556 CALL_INSN_FUNCTION_USAGE (copy
) =
557 copy_for_inline (CALL_INSN_FUNCTION_USAGE (insn
));
559 PATTERN (copy
) = copy_for_inline (PATTERN (insn
));
560 INSN_CODE (copy
) = -1;
561 LOG_LINKS (copy
) = NULL_RTX
;
562 RTX_INTEGRATED_P (copy
) = RTX_INTEGRATED_P (insn
);
566 copy
= label_map
[CODE_LABEL_NUMBER (insn
)];
567 LABEL_NAME (copy
) = LABEL_NAME (insn
);
571 copy
= rtx_alloc (BARRIER
);
577 INSN_UID (copy
) = INSN_UID (insn
);
578 insn_map
[INSN_UID (insn
)] = copy
;
579 NEXT_INSN (last_insn
) = copy
;
580 PREV_INSN (copy
) = last_insn
;
584 adjust_copied_decl_tree (DECL_INITIAL (fndecl
));
586 /* Now copy the REG_NOTES. */
587 for (insn
= NEXT_INSN (get_insns ()); insn
; insn
= NEXT_INSN (insn
))
588 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i'
589 && insn_map
[INSN_UID(insn
)])
590 REG_NOTES (insn_map
[INSN_UID (insn
)])
591 = copy_for_inline (REG_NOTES (insn
));
593 NEXT_INSN (last_insn
) = NULL
;
595 finish_inline (fndecl
, head
);
597 set_new_first_and_last_insn (first_insn
, last_insn
);
600 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
601 For example, this can copy a list made of TREE_LIST nodes. While copying,
602 for each node copied which doesn't already have is DECL_ABSTRACT_ORIGIN
603 set to some non-zero value, set the DECL_ABSTRACT_ORIGIN of the copy to
604 point to the corresponding (abstract) original node. */
607 copy_decl_list (list
)
611 register tree prev
, next
;
616 head
= prev
= copy_node (list
);
617 if (DECL_ABSTRACT_ORIGIN (head
) == NULL_TREE
)
618 DECL_ABSTRACT_ORIGIN (head
) = list
;
619 next
= TREE_CHAIN (list
);
624 copy
= copy_node (next
);
625 if (DECL_ABSTRACT_ORIGIN (copy
) == NULL_TREE
)
626 DECL_ABSTRACT_ORIGIN (copy
) = next
;
627 TREE_CHAIN (prev
) = copy
;
629 next
= TREE_CHAIN (next
);
634 /* Make a copy of the entire tree of blocks BLOCK, and return it. */
637 copy_decl_tree (block
)
640 tree t
, vars
, subblocks
;
642 vars
= copy_decl_list (BLOCK_VARS (block
));
645 /* Process all subblocks. */
646 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= TREE_CHAIN (t
))
648 tree copy
= copy_decl_tree (t
);
649 TREE_CHAIN (copy
) = subblocks
;
653 t
= copy_node (block
);
654 BLOCK_VARS (t
) = vars
;
655 BLOCK_SUBBLOCKS (t
) = nreverse (subblocks
);
656 /* If the BLOCK being cloned is already marked as having been instantiated
657 from something else, then leave that `origin' marking alone. Elsewise,
658 mark the clone as having originated from the BLOCK we are cloning. */
659 if (BLOCK_ABSTRACT_ORIGIN (t
) == NULL_TREE
)
660 BLOCK_ABSTRACT_ORIGIN (t
) = block
;
664 /* Copy DECL_RTLs in all decls in the given BLOCK node. */
667 copy_decl_rtls (block
)
672 for (t
= BLOCK_VARS (block
); t
; t
= TREE_CHAIN (t
))
673 if (DECL_RTL (t
) && GET_CODE (DECL_RTL (t
)) == MEM
)
674 DECL_RTL (t
) = copy_for_inline (DECL_RTL (t
));
676 /* Process all subblocks. */
677 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= TREE_CHAIN (t
))
681 /* Make the insns and PARM_DECLs of the current function permanent
682 and record other information in DECL_SAVED_INSNS to allow inlining
683 of this function in subsequent calls.
685 This routine need not copy any insns because we are not going
686 to immediately compile the insns in the insn chain. There
687 are two cases when we would compile the insns for FNDECL:
688 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
689 be output at the end of other compilation, because somebody took
690 its address. In the first case, the insns of FNDECL are copied
691 as it is expanded inline, so FNDECL's saved insns are not
692 modified. In the second case, FNDECL is used for the last time,
693 so modifying the rtl is not a problem.
695 ??? Actually, we do not verify that FNDECL is not inline expanded
696 by other functions which must also be written down at the end
697 of compilation. We could set flag_no_inline to nonzero when
698 the time comes to write down such functions. */
701 save_for_inline_nocopy (fndecl
)
706 rtx first_nonparm_insn
;
708 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
709 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
710 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
711 for the parms, prior to elimination of virtual registers.
712 These values are needed for substituting parms properly. */
714 max_parm_reg
= max_parm_reg_num ();
715 parmdecl_map
= (tree
*) alloca (max_parm_reg
* sizeof (tree
));
717 /* Make and emit a return-label if we have not already done so. */
719 if (return_label
== 0)
721 return_label
= gen_label_rtx ();
722 emit_label (return_label
);
725 head
= initialize_for_inline (fndecl
, get_first_label_num (),
726 max_label_num (), max_reg_num (), 0);
728 /* If there are insns that copy parms from the stack into pseudo registers,
729 those insns are not copied. `expand_inline_function' must
730 emit the correct code to handle such things. */
733 if (GET_CODE (insn
) != NOTE
)
736 /* Get the insn which signals the end of parameter setup code. */
737 first_nonparm_insn
= get_first_nonparm_insn ();
739 /* Now just scan the chain of insns to see what happens to our
740 PARM_DECLs. If a PARM_DECL is used but never modified, we
741 can substitute its rtl directly when expanding inline (and
742 perform constant folding when its incoming value is constant).
743 Otherwise, we have to copy its value into a new register and track
744 the new register's life. */
746 for (insn
= NEXT_INSN (insn
); insn
; insn
= NEXT_INSN (insn
))
748 if (insn
== first_nonparm_insn
)
749 in_nonparm_insns
= 1;
751 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
753 if (current_function_uses_const_pool
)
755 /* Replace any constant pool references with the actual constant.
756 We will put the constant back if we need to write the
757 function out after all. */
758 save_constants (&PATTERN (insn
));
759 if (REG_NOTES (insn
))
760 save_constants (®_NOTES (insn
));
763 /* Record what interesting things happen to our parameters. */
764 note_stores (PATTERN (insn
), note_modified_parmregs
);
768 /* We have now allocated all that needs to be allocated permanently
769 on the rtx obstack. Set our high-water mark, so that we
770 can free the rest of this when the time comes. */
774 finish_inline (fndecl
, head
);
777 /* Given PX, a pointer into an insn, search for references to the constant
778 pool. Replace each with a CONST that has the mode of the original
779 constant, contains the constant, and has RTX_INTEGRATED_P set.
780 Similarly, constant pool addresses not enclosed in a MEM are replaced
781 with an ADDRESS rtx which also gives the constant, mode, and has
782 RTX_INTEGRATED_P set. */
794 /* If this is a CONST_DOUBLE, don't try to fix things up in
795 CONST_DOUBLE_MEM, because this is an infinite recursion. */
796 if (GET_CODE (x
) == CONST_DOUBLE
)
798 else if (GET_CODE (x
) == MEM
&& GET_CODE (XEXP (x
, 0)) == SYMBOL_REF
799 && CONSTANT_POOL_ADDRESS_P (XEXP (x
,0)))
801 enum machine_mode const_mode
= get_pool_mode (XEXP (x
, 0));
802 rtx
new = gen_rtx (CONST
, const_mode
, get_pool_constant (XEXP (x
, 0)));
803 RTX_INTEGRATED_P (new) = 1;
805 /* If the MEM was in a different mode than the constant (perhaps we
806 were only looking at the low-order part), surround it with a
807 SUBREG so we can save both modes. */
809 if (GET_MODE (x
) != const_mode
)
811 new = gen_rtx (SUBREG
, GET_MODE (x
), new, 0);
812 RTX_INTEGRATED_P (new) = 1;
816 save_constants (&XEXP (*px
, 0));
818 else if (GET_CODE (x
) == SYMBOL_REF
819 && CONSTANT_POOL_ADDRESS_P (x
))
821 *px
= gen_rtx (ADDRESS
, get_pool_mode (x
), get_pool_constant (x
));
822 save_constants (&XEXP (*px
, 0));
823 RTX_INTEGRATED_P (*px
) = 1;
828 char *fmt
= GET_RTX_FORMAT (GET_CODE (x
));
829 int len
= GET_RTX_LENGTH (GET_CODE (x
));
831 for (i
= len
-1; i
>= 0; i
--)
836 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
837 save_constants (&XVECEXP (x
, i
, j
));
841 if (XEXP (x
, i
) == 0)
845 /* Hack tail-recursion here. */
849 save_constants (&XEXP (x
, i
));
856 /* Note whether a parameter is modified or not. */
859 note_modified_parmregs (reg
, x
)
863 if (GET_CODE (reg
) == REG
&& in_nonparm_insns
864 && REGNO (reg
) < max_parm_reg
865 && REGNO (reg
) >= FIRST_PSEUDO_REGISTER
866 && parmdecl_map
[REGNO (reg
)] != 0)
867 TREE_READONLY (parmdecl_map
[REGNO (reg
)]) = 0;
870 /* Copy the rtx ORIG recursively, replacing pseudo-regs and labels
871 according to `reg_map' and `label_map'. The original rtl insns
872 will be saved for inlining; this is used to make a copy
873 which is used to finish compiling the inline function itself.
875 If we find a "saved" constant pool entry, one which was replaced with
876 the value of the constant, convert it back to a constant pool entry.
877 Since the pool wasn't touched, this should simply restore the old
880 All other kinds of rtx are copied except those that can never be
881 changed during compilation. */
884 copy_for_inline (orig
)
887 register rtx x
= orig
;
889 register enum rtx_code code
;
890 register char *format_ptr
;
897 /* These types may be freely shared. */
909 /* We have to make a new CONST_DOUBLE to ensure that we account for
910 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
911 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
)
915 REAL_VALUE_FROM_CONST_DOUBLE (d
, x
);
916 return CONST_DOUBLE_FROM_REAL_VALUE (d
, GET_MODE (x
));
919 return immed_double_const (CONST_DOUBLE_LOW (x
), CONST_DOUBLE_HIGH (x
),
923 /* Get constant pool entry for constant in the pool. */
924 if (RTX_INTEGRATED_P (x
))
925 return validize_mem (force_const_mem (GET_MODE (x
),
926 copy_for_inline (XEXP (x
, 0))));
930 /* Get constant pool entry, but access in different mode. */
931 if (RTX_INTEGRATED_P (x
))
934 = force_const_mem (GET_MODE (SUBREG_REG (x
)),
935 copy_for_inline (XEXP (SUBREG_REG (x
), 0)));
937 PUT_MODE (new, GET_MODE (x
));
938 return validize_mem (new);
943 /* If not special for constant pool error. Else get constant pool
945 if (! RTX_INTEGRATED_P (x
))
948 return XEXP (force_const_mem (GET_MODE (x
),
949 copy_for_inline (XEXP (x
, 0))), 0);
952 /* If a single asm insn contains multiple output operands
953 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
954 We must make sure that the copied insn continues to share it. */
955 if (orig_asm_operands_vector
== XVEC (orig
, 3))
957 x
= rtx_alloc (ASM_OPERANDS
);
958 x
->volatil
= orig
->volatil
;
959 XSTR (x
, 0) = XSTR (orig
, 0);
960 XSTR (x
, 1) = XSTR (orig
, 1);
961 XINT (x
, 2) = XINT (orig
, 2);
962 XVEC (x
, 3) = copy_asm_operands_vector
;
963 XVEC (x
, 4) = copy_asm_constraints_vector
;
964 XSTR (x
, 5) = XSTR (orig
, 5);
965 XINT (x
, 6) = XINT (orig
, 6);
971 /* A MEM is usually allowed to be shared if its address is constant
972 or is a constant plus one of the special registers.
974 We do not allow sharing of addresses that are either a special
975 register or the sum of a constant and a special register because
976 it is possible for unshare_all_rtl to copy the address, into memory
977 that won't be saved. Although the MEM can safely be shared, and
978 won't be copied there, the address itself cannot be shared, and may
981 There are also two exceptions with constants: The first is if the
982 constant is a LABEL_REF or the sum of the LABEL_REF
983 and an integer. This case can happen if we have an inline
984 function that supplies a constant operand to the call of another
985 inline function that uses it in a switch statement. In this case,
986 we will be replacing the LABEL_REF, so we have to replace this MEM
989 The second case is if we have a (const (plus (address ..) ...)).
990 In that case we need to put back the address of the constant pool
993 if (CONSTANT_ADDRESS_P (XEXP (x
, 0))
994 && GET_CODE (XEXP (x
, 0)) != LABEL_REF
995 && ! (GET_CODE (XEXP (x
, 0)) == CONST
996 && (GET_CODE (XEXP (XEXP (x
, 0), 0)) == PLUS
997 && ((GET_CODE (XEXP (XEXP (XEXP (x
, 0), 0), 0))
999 || (GET_CODE (XEXP (XEXP (XEXP (x
, 0), 0), 0))
1005 /* If this is a non-local label, just make a new LABEL_REF.
1006 Otherwise, use the new label as well. */
1007 x
= gen_rtx (LABEL_REF
, GET_MODE (orig
),
1008 LABEL_REF_NONLOCAL_P (orig
) ? XEXP (orig
, 0)
1009 : label_map
[CODE_LABEL_NUMBER (XEXP (orig
, 0))]);
1010 LABEL_REF_NONLOCAL_P (x
) = LABEL_REF_NONLOCAL_P (orig
);
1011 LABEL_OUTSIDE_LOOP_P (x
) = LABEL_OUTSIDE_LOOP_P (orig
);
1015 if (REGNO (x
) > LAST_VIRTUAL_REGISTER
)
1016 return reg_map
[REGNO (x
)];
1021 /* If a parm that gets modified lives in a pseudo-reg,
1022 clear its TREE_READONLY to prevent certain optimizations. */
1024 rtx dest
= SET_DEST (x
);
1026 while (GET_CODE (dest
) == STRICT_LOW_PART
1027 || GET_CODE (dest
) == ZERO_EXTRACT
1028 || GET_CODE (dest
) == SUBREG
)
1029 dest
= XEXP (dest
, 0);
1031 if (GET_CODE (dest
) == REG
1032 && REGNO (dest
) < max_parm_reg
1033 && REGNO (dest
) >= FIRST_PSEUDO_REGISTER
1034 && parmdecl_map
[REGNO (dest
)] != 0
1035 /* The insn to load an arg pseudo from a stack slot
1036 does not count as modifying it. */
1037 && in_nonparm_insns
)
1038 TREE_READONLY (parmdecl_map
[REGNO (dest
)]) = 0;
1042 #if 0 /* This is a good idea, but here is the wrong place for it. */
1043 /* Arrange that CONST_INTs always appear as the second operand
1044 if they appear, and that `frame_pointer_rtx' or `arg_pointer_rtx'
1045 always appear as the first. */
1047 if (GET_CODE (XEXP (x
, 0)) == CONST_INT
1048 || (XEXP (x
, 1) == frame_pointer_rtx
1049 || (ARG_POINTER_REGNUM
!= FRAME_POINTER_REGNUM
1050 && XEXP (x
, 1) == arg_pointer_rtx
)))
1052 rtx t
= XEXP (x
, 0);
1053 XEXP (x
, 0) = XEXP (x
, 1);
1060 /* Replace this rtx with a copy of itself. */
1062 x
= rtx_alloc (code
);
1063 bcopy (orig
, x
, (sizeof (*x
) - sizeof (x
->fld
)
1064 + sizeof (x
->fld
[0]) * GET_RTX_LENGTH (code
)));
1066 /* Now scan the subexpressions recursively.
1067 We can store any replaced subexpressions directly into X
1068 since we know X is not shared! Any vectors in X
1069 must be copied if X was copied. */
1071 format_ptr
= GET_RTX_FORMAT (code
);
1073 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
1075 switch (*format_ptr
++)
1078 XEXP (x
, i
) = copy_for_inline (XEXP (x
, i
));
1082 /* Change any references to old-insns to point to the
1083 corresponding copied insns. */
1084 XEXP (x
, i
) = insn_map
[INSN_UID (XEXP (x
, i
))];
1088 if (XVEC (x
, i
) != NULL
&& XVECLEN (x
, i
) != 0)
1092 XVEC (x
, i
) = gen_rtvec_v (XVECLEN (x
, i
), &XVECEXP (x
, i
, 0));
1093 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1095 = copy_for_inline (XVECEXP (x
, i
, j
));
1101 if (code
== ASM_OPERANDS
&& orig_asm_operands_vector
== 0)
1103 orig_asm_operands_vector
= XVEC (orig
, 3);
1104 copy_asm_operands_vector
= XVEC (x
, 3);
1105 copy_asm_constraints_vector
= XVEC (x
, 4);
1111 /* Unfortunately, we need a global copy of const_equiv map for communication
1112 with a function called from note_stores. Be *very* careful that this
1113 is used properly in the presence of recursion. */
1115 rtx
*global_const_equiv_map
;
1116 int global_const_equiv_map_size
;
1118 #define FIXED_BASE_PLUS_P(X) \
1119 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
1120 && GET_CODE (XEXP (X, 0)) == REG \
1121 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
1122 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
1124 /* Integrate the procedure defined by FNDECL. Note that this function
1125 may wind up calling itself. Since the static variables are not
1126 reentrant, we do not assign them until after the possibility
1127 of recursion is eliminated.
1129 If IGNORE is nonzero, do not produce a value.
1130 Otherwise store the value in TARGET if it is nonzero and that is convenient.
1133 (rtx)-1 if we could not substitute the function
1134 0 if we substituted it and it does not produce a value
1135 else an rtx for where the value is stored. */
1138 expand_inline_function (fndecl
, parms
, target
, ignore
, type
, structure_value_addr
)
1143 rtx structure_value_addr
;
1145 tree formal
, actual
, block
;
1146 rtx header
= DECL_SAVED_INSNS (fndecl
);
1147 rtx insns
= FIRST_FUNCTION_INSN (header
);
1148 rtx parm_insns
= FIRST_PARM_INSN (header
);
1154 int min_labelno
= FIRST_LABELNO (header
);
1155 int max_labelno
= LAST_LABELNO (header
);
1157 rtx local_return_label
= 0;
1160 struct inline_remap
*map
;
1162 rtvec arg_vector
= ORIGINAL_ARG_VECTOR (header
);
1163 rtx static_chain_value
= 0;
1165 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
1166 max_regno
= MAX_REGNUM (header
) + 3;
1167 if (max_regno
< FIRST_PSEUDO_REGISTER
)
1170 nargs
= list_length (DECL_ARGUMENTS (fndecl
));
1172 /* Check that the parms type match and that sufficient arguments were
1173 passed. Since the appropriate conversions or default promotions have
1174 already been applied, the machine modes should match exactly. */
1176 for (formal
= DECL_ARGUMENTS (fndecl
),
1179 formal
= TREE_CHAIN (formal
),
1180 actual
= TREE_CHAIN (actual
))
1183 enum machine_mode mode
;
1186 return (rtx
) (HOST_WIDE_INT
) -1;
1188 arg
= TREE_VALUE (actual
);
1189 mode
= TYPE_MODE (DECL_ARG_TYPE (formal
));
1191 if (mode
!= TYPE_MODE (TREE_TYPE (arg
))
1192 /* If they are block mode, the types should match exactly.
1193 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
1194 which could happen if the parameter has incomplete type. */
1195 || (mode
== BLKmode
&& TREE_TYPE (arg
) != TREE_TYPE (formal
)))
1196 return (rtx
) (HOST_WIDE_INT
) -1;
1199 /* Extra arguments are valid, but will be ignored below, so we must
1200 evaluate them here for side-effects. */
1201 for (; actual
; actual
= TREE_CHAIN (actual
))
1202 expand_expr (TREE_VALUE (actual
), const0_rtx
,
1203 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual
))), 0);
1205 /* Make a binding contour to keep inline cleanups called at
1206 outer function-scope level from looking like they are shadowing
1207 parameter declarations. */
1210 /* Make a fresh binding contour that we can easily remove. */
1212 expand_start_bindings (0);
1213 if (GET_CODE (parm_insns
) == NOTE
1214 && NOTE_LINE_NUMBER (parm_insns
) > 0)
1216 rtx note
= emit_note (NOTE_SOURCE_FILE (parm_insns
),
1217 NOTE_LINE_NUMBER (parm_insns
));
1219 RTX_INTEGRATED_P (note
) = 1;
1222 /* Expand the function arguments. Do this first so that any
1223 new registers get created before we allocate the maps. */
1225 arg_vals
= (rtx
*) alloca (nargs
* sizeof (rtx
));
1226 arg_trees
= (tree
*) alloca (nargs
* sizeof (tree
));
1228 for (formal
= DECL_ARGUMENTS (fndecl
), actual
= parms
, i
= 0;
1230 formal
= TREE_CHAIN (formal
), actual
= TREE_CHAIN (actual
), i
++)
1232 /* Actual parameter, converted to the type of the argument within the
1234 tree arg
= convert (TREE_TYPE (formal
), TREE_VALUE (actual
));
1235 /* Mode of the variable used within the function. */
1236 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (formal
));
1238 /* Make sure this formal has some correspondence in the users code
1239 * before emitting any line notes for it. */
1240 if (DECL_SOURCE_LINE (formal
))
1242 rtx note
= emit_note (DECL_SOURCE_FILE (formal
),
1243 DECL_SOURCE_LINE (formal
));
1245 RTX_INTEGRATED_P (note
) = 1;
1249 loc
= RTVEC_ELT (arg_vector
, i
);
1251 /* If this is an object passed by invisible reference, we copy the
1252 object into a stack slot and save its address. If this will go
1253 into memory, we do nothing now. Otherwise, we just expand the
1255 if (GET_CODE (loc
) == MEM
&& GET_CODE (XEXP (loc
, 0)) == REG
1256 && REGNO (XEXP (loc
, 0)) > LAST_VIRTUAL_REGISTER
)
1259 = assign_stack_temp (TYPE_MODE (TREE_TYPE (arg
)),
1260 int_size_in_bytes (TREE_TYPE (arg
)), 1);
1262 store_expr (arg
, stack_slot
, 0);
1264 arg_vals
[i
] = XEXP (stack_slot
, 0);
1266 else if (GET_CODE (loc
) != MEM
)
1268 if (GET_MODE (loc
) != TYPE_MODE (TREE_TYPE (arg
)))
1269 /* The mode if LOC and ARG can differ if LOC was a variable
1270 that had its mode promoted via PROMOTED_MODE. */
1271 arg_vals
[i
] = convert_modes (GET_MODE (loc
),
1272 TYPE_MODE (TREE_TYPE (arg
)),
1273 expand_expr (arg
, NULL_RTX
, mode
,
1275 TREE_UNSIGNED (TREE_TYPE (formal
)));
1277 arg_vals
[i
] = expand_expr (arg
, NULL_RTX
, mode
, EXPAND_SUM
);
1282 if (arg_vals
[i
] != 0
1283 && (! TREE_READONLY (formal
)
1284 /* If the parameter is not read-only, copy our argument through
1285 a register. Also, we cannot use ARG_VALS[I] if it overlaps
1286 TARGET in any way. In the inline function, they will likely
1287 be two different pseudos, and `safe_from_p' will make all
1288 sorts of smart assumptions about their not conflicting.
1289 But if ARG_VALS[I] overlaps TARGET, these assumptions are
1290 wrong, so put ARG_VALS[I] into a fresh register. */
1292 && (GET_CODE (arg_vals
[i
]) == REG
1293 || GET_CODE (arg_vals
[i
]) == SUBREG
1294 || GET_CODE (arg_vals
[i
]) == MEM
)
1295 && reg_overlap_mentioned_p (arg_vals
[i
], target
))
1296 /* ??? We must always copy a SUBREG into a REG, because it might
1297 get substituted into an address, and not all ports correctly
1298 handle SUBREGs in addresses. */
1299 || (GET_CODE (arg_vals
[i
]) == SUBREG
)))
1300 arg_vals
[i
] = copy_to_mode_reg (GET_MODE (loc
), arg_vals
[i
]);
1303 /* Allocate the structures we use to remap things. */
1305 map
= (struct inline_remap
*) alloca (sizeof (struct inline_remap
));
1306 map
->fndecl
= fndecl
;
1308 map
->reg_map
= (rtx
*) alloca (max_regno
* sizeof (rtx
));
1309 bzero (map
->reg_map
, max_regno
* sizeof (rtx
));
1311 map
->label_map
= (rtx
*)alloca ((max_labelno
- min_labelno
) * sizeof (rtx
));
1312 map
->label_map
-= min_labelno
;
1314 map
->insn_map
= (rtx
*) alloca (INSN_UID (header
) * sizeof (rtx
));
1315 bzero (map
->insn_map
, INSN_UID (header
) * sizeof (rtx
));
1316 map
->min_insnno
= 0;
1317 map
->max_insnno
= INSN_UID (header
);
1319 map
->integrating
= 1;
1321 /* const_equiv_map maps pseudos in our routine to constants, so it needs to
1322 be large enough for all our pseudos. This is the number we are currently
1323 using plus the number in the called routine, plus 15 for each arg,
1324 five to compute the virtual frame pointer, and five for the return value.
1325 This should be enough for most cases. We do not reference entries
1326 outside the range of the map.
1328 ??? These numbers are quite arbitrary and were obtained by
1329 experimentation. At some point, we should try to allocate the
1330 table after all the parameters are set up so we an more accurately
1331 estimate the number of pseudos we will need. */
1333 map
->const_equiv_map_size
1334 = max_reg_num () + (max_regno
- FIRST_PSEUDO_REGISTER
) + 15 * nargs
+ 10;
1336 map
->const_equiv_map
1337 = (rtx
*)alloca (map
->const_equiv_map_size
* sizeof (rtx
));
1338 bzero (map
->const_equiv_map
, map
->const_equiv_map_size
* sizeof (rtx
));
1341 = (unsigned *)alloca (map
->const_equiv_map_size
* sizeof (unsigned));
1342 bzero (map
->const_age_map
, map
->const_equiv_map_size
* sizeof (unsigned));
1345 /* Record the current insn in case we have to set up pointers to frame
1346 and argument memory blocks. */
1347 map
->insns_at_start
= get_last_insn ();
1349 /* Update the outgoing argument size to allow for those in the inlined
1351 if (OUTGOING_ARGS_SIZE (header
) > current_function_outgoing_args_size
)
1352 current_function_outgoing_args_size
= OUTGOING_ARGS_SIZE (header
);
1354 /* If the inline function needs to make PIC references, that means
1355 that this function's PIC offset table must be used. */
1356 if (FUNCTION_FLAGS (header
) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE
)
1357 current_function_uses_pic_offset_table
= 1;
1359 /* If this function needs a context, set it up. */
1360 if (FUNCTION_FLAGS (header
) & FUNCTION_FLAGS_NEEDS_CONTEXT
)
1361 static_chain_value
= lookup_static_chain (fndecl
);
1363 /* Process each argument. For each, set up things so that the function's
1364 reference to the argument will refer to the argument being passed.
1365 We only replace REG with REG here. Any simplifications are done
1366 via const_equiv_map.
1368 We make two passes: In the first, we deal with parameters that will
1369 be placed into registers, since we need to ensure that the allocated
1370 register number fits in const_equiv_map. Then we store all non-register
1371 parameters into their memory location. */
1373 /* Don't try to free temp stack slots here, because we may put one of the
1374 parameters into a temp stack slot. */
1376 for (i
= 0; i
< nargs
; i
++)
1378 rtx copy
= arg_vals
[i
];
1380 loc
= RTVEC_ELT (arg_vector
, i
);
1382 /* There are three cases, each handled separately. */
1383 if (GET_CODE (loc
) == MEM
&& GET_CODE (XEXP (loc
, 0)) == REG
1384 && REGNO (XEXP (loc
, 0)) > LAST_VIRTUAL_REGISTER
)
1386 /* This must be an object passed by invisible reference (it could
1387 also be a variable-sized object, but we forbid inlining functions
1388 with variable-sized arguments). COPY is the address of the
1389 actual value (this computation will cause it to be copied). We
1390 map that address for the register, noting the actual address as
1391 an equivalent in case it can be substituted into the insns. */
1393 if (GET_CODE (copy
) != REG
)
1395 temp
= copy_addr_to_reg (copy
);
1396 if ((CONSTANT_P (copy
) || FIXED_BASE_PLUS_P (copy
))
1397 && REGNO (temp
) < map
->const_equiv_map_size
)
1399 map
->const_equiv_map
[REGNO (temp
)] = copy
;
1400 map
->const_age_map
[REGNO (temp
)] = CONST_AGE_PARM
;
1404 map
->reg_map
[REGNO (XEXP (loc
, 0))] = copy
;
1406 else if (GET_CODE (loc
) == MEM
)
1408 /* This is the case of a parameter that lives in memory.
1409 It will live in the block we allocate in the called routine's
1410 frame that simulates the incoming argument area. Do nothing
1411 now; we will call store_expr later. */
1414 else if (GET_CODE (loc
) == REG
)
1416 /* This is the good case where the parameter is in a register.
1417 If it is read-only and our argument is a constant, set up the
1418 constant equivalence.
1420 If LOC is REG_USERVAR_P, the usual case, COPY must also have
1421 that flag set if it is a register.
1423 Also, don't allow hard registers here; they might not be valid
1424 when substituted into insns. */
1426 if ((GET_CODE (copy
) != REG
&& GET_CODE (copy
) != SUBREG
)
1427 || (GET_CODE (copy
) == REG
&& REG_USERVAR_P (loc
)
1428 && ! REG_USERVAR_P (copy
))
1429 || (GET_CODE (copy
) == REG
1430 && REGNO (copy
) < FIRST_PSEUDO_REGISTER
))
1432 temp
= copy_to_mode_reg (GET_MODE (loc
), copy
);
1433 REG_USERVAR_P (temp
) = REG_USERVAR_P (loc
);
1434 if ((CONSTANT_P (copy
) || FIXED_BASE_PLUS_P (copy
))
1435 && REGNO (temp
) < map
->const_equiv_map_size
)
1437 map
->const_equiv_map
[REGNO (temp
)] = copy
;
1438 map
->const_age_map
[REGNO (temp
)] = CONST_AGE_PARM
;
1442 map
->reg_map
[REGNO (loc
)] = copy
;
1444 else if (GET_CODE (loc
) == CONCAT
)
1446 /* This is the good case where the parameter is in a
1447 pair of separate pseudos.
1448 If it is read-only and our argument is a constant, set up the
1449 constant equivalence.
1451 If LOC is REG_USERVAR_P, the usual case, COPY must also have
1452 that flag set if it is a register.
1454 Also, don't allow hard registers here; they might not be valid
1455 when substituted into insns. */
1456 rtx locreal
= gen_realpart (GET_MODE (XEXP (loc
, 0)), loc
);
1457 rtx locimag
= gen_imagpart (GET_MODE (XEXP (loc
, 0)), loc
);
1458 rtx copyreal
= gen_realpart (GET_MODE (locreal
), copy
);
1459 rtx copyimag
= gen_imagpart (GET_MODE (locimag
), copy
);
1461 if ((GET_CODE (copyreal
) != REG
&& GET_CODE (copyreal
) != SUBREG
)
1462 || (GET_CODE (copyreal
) == REG
&& REG_USERVAR_P (locreal
)
1463 && ! REG_USERVAR_P (copyreal
))
1464 || (GET_CODE (copyreal
) == REG
1465 && REGNO (copyreal
) < FIRST_PSEUDO_REGISTER
))
1467 temp
= copy_to_mode_reg (GET_MODE (locreal
), copyreal
);
1468 REG_USERVAR_P (temp
) = REG_USERVAR_P (locreal
);
1469 if ((CONSTANT_P (copyreal
) || FIXED_BASE_PLUS_P (copyreal
))
1470 && REGNO (temp
) < map
->const_equiv_map_size
)
1472 map
->const_equiv_map
[REGNO (temp
)] = copyreal
;
1473 map
->const_age_map
[REGNO (temp
)] = CONST_AGE_PARM
;
1477 map
->reg_map
[REGNO (locreal
)] = copyreal
;
1479 if ((GET_CODE (copyimag
) != REG
&& GET_CODE (copyimag
) != SUBREG
)
1480 || (GET_CODE (copyimag
) == REG
&& REG_USERVAR_P (locimag
)
1481 && ! REG_USERVAR_P (copyimag
))
1482 || (GET_CODE (copyimag
) == REG
1483 && REGNO (copyimag
) < FIRST_PSEUDO_REGISTER
))
1485 temp
= copy_to_mode_reg (GET_MODE (locimag
), copyimag
);
1486 REG_USERVAR_P (temp
) = REG_USERVAR_P (locimag
);
1487 if ((CONSTANT_P (copyimag
) || FIXED_BASE_PLUS_P (copyimag
))
1488 && REGNO (temp
) < map
->const_equiv_map_size
)
1490 map
->const_equiv_map
[REGNO (temp
)] = copyimag
;
1491 map
->const_age_map
[REGNO (temp
)] = CONST_AGE_PARM
;
1495 map
->reg_map
[REGNO (locimag
)] = copyimag
;
1501 /* Now do the parameters that will be placed in memory. */
1503 for (formal
= DECL_ARGUMENTS (fndecl
), i
= 0;
1504 formal
; formal
= TREE_CHAIN (formal
), i
++)
1506 loc
= RTVEC_ELT (arg_vector
, i
);
1508 if (GET_CODE (loc
) == MEM
1509 /* Exclude case handled above. */
1510 && ! (GET_CODE (XEXP (loc
, 0)) == REG
1511 && REGNO (XEXP (loc
, 0)) > LAST_VIRTUAL_REGISTER
))
1513 rtx note
= emit_note (DECL_SOURCE_FILE (formal
),
1514 DECL_SOURCE_LINE (formal
));
1516 RTX_INTEGRATED_P (note
) = 1;
1518 /* Compute the address in the area we reserved and store the
1520 temp
= copy_rtx_and_substitute (loc
, map
);
1521 subst_constants (&temp
, NULL_RTX
, map
);
1522 apply_change_group ();
1523 if (! memory_address_p (GET_MODE (temp
), XEXP (temp
, 0)))
1524 temp
= change_address (temp
, VOIDmode
, XEXP (temp
, 0));
1525 store_expr (arg_trees
[i
], temp
, 0);
1529 /* Deal with the places that the function puts its result.
1530 We are driven by what is placed into DECL_RESULT.
1532 Initially, we assume that we don't have anything special handling for
1533 REG_FUNCTION_RETURN_VALUE_P. */
1535 map
->inline_target
= 0;
1536 loc
= DECL_RTL (DECL_RESULT (fndecl
));
1537 if (TYPE_MODE (type
) == VOIDmode
)
1538 /* There is no return value to worry about. */
1540 else if (GET_CODE (loc
) == MEM
)
1542 if (! structure_value_addr
|| ! aggregate_value_p (DECL_RESULT (fndecl
)))
1545 /* Pass the function the address in which to return a structure value.
1546 Note that a constructor can cause someone to call us with
1547 STRUCTURE_VALUE_ADDR, but the initialization takes place
1548 via the first parameter, rather than the struct return address.
1550 We have two cases: If the address is a simple register indirect,
1551 use the mapping mechanism to point that register to our structure
1552 return address. Otherwise, store the structure return value into
1553 the place that it will be referenced from. */
1555 if (GET_CODE (XEXP (loc
, 0)) == REG
)
1557 temp
= force_reg (Pmode
, structure_value_addr
);
1558 map
->reg_map
[REGNO (XEXP (loc
, 0))] = temp
;
1559 if ((CONSTANT_P (structure_value_addr
)
1560 || (GET_CODE (structure_value_addr
) == PLUS
1561 && XEXP (structure_value_addr
, 0) == virtual_stack_vars_rtx
1562 && GET_CODE (XEXP (structure_value_addr
, 1)) == CONST_INT
))
1563 && REGNO (temp
) < map
->const_equiv_map_size
)
1565 map
->const_equiv_map
[REGNO (temp
)] = structure_value_addr
;
1566 map
->const_age_map
[REGNO (temp
)] = CONST_AGE_PARM
;
1571 temp
= copy_rtx_and_substitute (loc
, map
);
1572 subst_constants (&temp
, NULL_RTX
, map
);
1573 apply_change_group ();
1574 emit_move_insn (temp
, structure_value_addr
);
1578 /* We will ignore the result value, so don't look at its structure.
1579 Note that preparations for an aggregate return value
1580 do need to be made (above) even if it will be ignored. */
1582 else if (GET_CODE (loc
) == REG
)
1584 /* The function returns an object in a register and we use the return
1585 value. Set up our target for remapping. */
1587 /* Machine mode function was declared to return. */
1588 enum machine_mode departing_mode
= TYPE_MODE (type
);
1589 /* (Possibly wider) machine mode it actually computes
1590 (for the sake of callers that fail to declare it right). */
1591 enum machine_mode arriving_mode
1592 = TYPE_MODE (TREE_TYPE (DECL_RESULT (fndecl
)));
1595 /* Don't use MEMs as direct targets because on some machines
1596 substituting a MEM for a REG makes invalid insns.
1597 Let the combiner substitute the MEM if that is valid. */
1598 if (target
== 0 || GET_CODE (target
) != REG
1599 || GET_MODE (target
) != departing_mode
)
1600 target
= gen_reg_rtx (departing_mode
);
1602 /* If function's value was promoted before return,
1603 avoid machine mode mismatch when we substitute INLINE_TARGET.
1604 But TARGET is what we will return to the caller. */
1605 if (arriving_mode
!= departing_mode
)
1606 reg_to_map
= gen_rtx (SUBREG
, arriving_mode
, target
, 0);
1608 reg_to_map
= target
;
1610 /* Usually, the result value is the machine's return register.
1611 Sometimes it may be a pseudo. Handle both cases. */
1612 if (REG_FUNCTION_VALUE_P (loc
))
1613 map
->inline_target
= reg_to_map
;
1615 map
->reg_map
[REGNO (loc
)] = reg_to_map
;
1618 /* Make new label equivalences for the labels in the called function. */
1619 for (i
= min_labelno
; i
< max_labelno
; i
++)
1620 map
->label_map
[i
] = gen_label_rtx ();
1622 /* Perform postincrements before actually calling the function. */
1625 /* Clean up stack so that variables might have smaller offsets. */
1626 do_pending_stack_adjust ();
1628 /* Save a copy of the location of const_equiv_map for mark_stores, called
1630 global_const_equiv_map
= map
->const_equiv_map
;
1631 global_const_equiv_map_size
= map
->const_equiv_map_size
;
1633 /* Now copy the insns one by one. Do this in two passes, first the insns and
1634 then their REG_NOTES, just like save_for_inline. */
1636 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1638 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
1642 map
->orig_asm_operands_vector
= 0;
1644 switch (GET_CODE (insn
))
1647 pattern
= PATTERN (insn
);
1649 if (GET_CODE (pattern
) == USE
1650 && GET_CODE (XEXP (pattern
, 0)) == REG
1651 && REG_FUNCTION_VALUE_P (XEXP (pattern
, 0)))
1652 /* The (USE (REG n)) at return from the function should
1653 be ignored since we are changing (REG n) into
1657 /* Ignore setting a function value that we don't want to use. */
1658 if (map
->inline_target
== 0
1659 && GET_CODE (pattern
) == SET
1660 && GET_CODE (SET_DEST (pattern
)) == REG
1661 && REG_FUNCTION_VALUE_P (SET_DEST (pattern
)))
1663 if (volatile_refs_p (SET_SRC (pattern
)))
1665 /* If we must not delete the source,
1666 load it into a new temporary. */
1667 copy
= emit_insn (copy_rtx_and_substitute (pattern
, map
));
1668 SET_DEST (PATTERN (copy
))
1669 = gen_reg_rtx (GET_MODE (SET_DEST (PATTERN (copy
))));
1674 /* If this is setting the static chain pseudo, set it from
1675 the value we want to give it instead. */
1676 else if (static_chain_value
!= 0
1677 && GET_CODE (pattern
) == SET
1678 && rtx_equal_p (SET_SRC (pattern
),
1679 static_chain_incoming_rtx
))
1681 rtx newdest
= copy_rtx_and_substitute (SET_DEST (pattern
), map
);
1683 copy
= emit_insn (gen_rtx (SET
, VOIDmode
, newdest
,
1684 static_chain_value
));
1686 static_chain_value
= 0;
1689 copy
= emit_insn (copy_rtx_and_substitute (pattern
, map
));
1690 /* REG_NOTES will be copied later. */
1693 /* If this insn is setting CC0, it may need to look at
1694 the insn that uses CC0 to see what type of insn it is.
1695 In that case, the call to recog via validate_change will
1696 fail. So don't substitute constants here. Instead,
1697 do it when we emit the following insn.
1699 For example, see the pyr.md file. That machine has signed and
1700 unsigned compares. The compare patterns must check the
1701 following branch insn to see which what kind of compare to
1704 If the previous insn set CC0, substitute constants on it as
1706 if (sets_cc0_p (PATTERN (copy
)) != 0)
1711 try_constants (cc0_insn
, map
);
1713 try_constants (copy
, map
);
1716 try_constants (copy
, map
);
1721 if (GET_CODE (PATTERN (insn
)) == RETURN
)
1723 if (local_return_label
== 0)
1724 local_return_label
= gen_label_rtx ();
1725 pattern
= gen_jump (local_return_label
);
1728 pattern
= copy_rtx_and_substitute (PATTERN (insn
), map
);
1730 copy
= emit_jump_insn (pattern
);
1734 try_constants (cc0_insn
, map
);
1737 try_constants (copy
, map
);
1739 /* If this used to be a conditional jump insn but whose branch
1740 direction is now know, we must do something special. */
1741 if (condjump_p (insn
) && ! simplejump_p (insn
) && map
->last_pc_value
)
1744 /* The previous insn set cc0 for us. So delete it. */
1745 delete_insn (PREV_INSN (copy
));
1748 /* If this is now a no-op, delete it. */
1749 if (map
->last_pc_value
== pc_rtx
)
1755 /* Otherwise, this is unconditional jump so we must put a
1756 BARRIER after it. We could do some dead code elimination
1757 here, but jump.c will do it just as well. */
1763 pattern
= copy_rtx_and_substitute (PATTERN (insn
), map
);
1764 copy
= emit_call_insn (pattern
);
1766 /* Because the USAGE information potentially contains objects other
1767 than hard registers, we need to copy it. */
1768 CALL_INSN_FUNCTION_USAGE (copy
) =
1769 copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn
), map
);
1773 try_constants (cc0_insn
, map
);
1776 try_constants (copy
, map
);
1778 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1779 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1780 map
->const_equiv_map
[i
] = 0;
1784 copy
= emit_label (map
->label_map
[CODE_LABEL_NUMBER (insn
)]);
1785 LABEL_NAME (copy
) = LABEL_NAME (insn
);
1790 copy
= emit_barrier ();
1794 /* It is important to discard function-end and function-beg notes,
1795 so we have only one of each in the current function.
1796 Also, NOTE_INSN_DELETED notes aren't useful (save_for_inline
1797 deleted these in the copy used for continuing compilation,
1798 not the copy used for inlining). */
1799 if (NOTE_LINE_NUMBER (insn
) != NOTE_INSN_FUNCTION_END
1800 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_FUNCTION_BEG
1801 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_DELETED
)
1802 copy
= emit_note (NOTE_SOURCE_FILE (insn
), NOTE_LINE_NUMBER (insn
));
1813 RTX_INTEGRATED_P (copy
) = 1;
1815 map
->insn_map
[INSN_UID (insn
)] = copy
;
1818 /* Now copy the REG_NOTES. Increment const_age, so that only constants
1819 from parameters can be substituted in. These are the only ones that
1820 are valid across the entire function. */
1822 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
1823 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i'
1824 && map
->insn_map
[INSN_UID (insn
)]
1825 && REG_NOTES (insn
))
1827 rtx tem
= copy_rtx_and_substitute (REG_NOTES (insn
), map
);
1828 /* We must also do subst_constants, in case one of our parameters
1829 has const type and constant value. */
1830 subst_constants (&tem
, NULL_RTX
, map
);
1831 apply_change_group ();
1832 REG_NOTES (map
->insn_map
[INSN_UID (insn
)]) = tem
;
1835 if (local_return_label
)
1836 emit_label (local_return_label
);
1838 /* Make copies of the decls of the symbols in the inline function, so that
1839 the copies of the variables get declared in the current function. Set
1840 up things so that lookup_static_chain knows that to interpret registers
1841 in SAVE_EXPRs for TYPE_SIZEs as local. */
1843 inline_function_decl
= fndecl
;
1844 integrate_parm_decls (DECL_ARGUMENTS (fndecl
), map
, arg_vector
);
1845 integrate_decl_tree ((tree
) ORIGINAL_DECL_INITIAL (header
), 0, map
);
1846 inline_function_decl
= 0;
1848 /* End the scope containing the copied formal parameter variables
1849 and copied LABEL_DECLs. */
1851 expand_end_bindings (getdecls (), 1, 1);
1852 block
= poplevel (1, 1, 0);
1853 BLOCK_ABSTRACT_ORIGIN (block
) = (DECL_ABSTRACT_ORIGIN (fndecl
) == NULL
1854 ? fndecl
: DECL_ABSTRACT_ORIGIN (fndecl
));
1856 emit_line_note (input_filename
, lineno
);
1858 if (structure_value_addr
)
1860 target
= gen_rtx (MEM
, TYPE_MODE (type
),
1861 memory_address (TYPE_MODE (type
), structure_value_addr
));
1862 MEM_IN_STRUCT_P (target
) = 1;
1867 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
1868 push all of those decls and give each one the corresponding home. */
1871 integrate_parm_decls (args
, map
, arg_vector
)
1873 struct inline_remap
*map
;
1879 for (tail
= args
, i
= 0; tail
; tail
= TREE_CHAIN (tail
), i
++)
1881 register tree decl
= build_decl (VAR_DECL
, DECL_NAME (tail
),
1884 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector
, i
), map
);
1886 DECL_ARG_TYPE (decl
) = DECL_ARG_TYPE (tail
);
1887 /* We really should be setting DECL_INCOMING_RTL to something reasonable
1888 here, but that's going to require some more work. */
1889 /* DECL_INCOMING_RTL (decl) = ?; */
1890 /* These args would always appear unused, if not for this. */
1891 TREE_USED (decl
) = 1;
1892 /* Prevent warning for shadowing with these. */
1893 DECL_ABSTRACT_ORIGIN (decl
) = tail
;
1895 /* Fully instantiate the address with the equivalent form so that the
1896 debugging information contains the actual register, instead of the
1897 virtual register. Do this by not passing an insn to
1899 subst_constants (&new_decl_rtl
, NULL_RTX
, map
);
1900 apply_change_group ();
1901 DECL_RTL (decl
) = new_decl_rtl
;
1905 /* Given a BLOCK node LET, push decls and levels so as to construct in the
1906 current function a tree of contexts isomorphic to the one that is given.
1908 LEVEL indicates how far down into the BLOCK tree is the node we are
1909 currently traversing. It is always zero except for recursive calls.
1911 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
1912 registers used in the DECL_RTL field should be remapped. If it is zero,
1913 no mapping is necessary. */
1916 integrate_decl_tree (let
, level
, map
)
1919 struct inline_remap
*map
;
1926 for (t
= BLOCK_VARS (let
); t
; t
= TREE_CHAIN (t
))
1930 push_obstacks_nochange ();
1931 saveable_allocation ();
1935 if (DECL_RTL (t
) != 0)
1937 DECL_RTL (d
) = copy_rtx_and_substitute (DECL_RTL (t
), map
);
1938 /* Fully instantiate the address with the equivalent form so that the
1939 debugging information contains the actual register, instead of the
1940 virtual register. Do this by not passing an insn to
1942 subst_constants (&DECL_RTL (d
), NULL_RTX
, map
);
1943 apply_change_group ();
1945 /* These args would always appear unused, if not for this. */
1947 /* Prevent warning for shadowing with these. */
1948 DECL_ABSTRACT_ORIGIN (d
) = t
;
1950 if (DECL_LANG_SPECIFIC (d
))
1956 for (t
= BLOCK_SUBBLOCKS (let
); t
; t
= TREE_CHAIN (t
))
1957 integrate_decl_tree (t
, level
+ 1, map
);
1961 node
= poplevel (1, 0, 0);
1964 TREE_USED (node
) = TREE_USED (let
);
1965 BLOCK_ABSTRACT_ORIGIN (node
) = let
;
1970 /* Create a new copy of an rtx.
1971 Recursively copies the operands of the rtx,
1972 except for those few rtx codes that are sharable.
1974 We always return an rtx that is similar to that incoming rtx, with the
1975 exception of possibly changing a REG to a SUBREG or vice versa. No
1976 rtl is ever emitted.
1978 Handle constants that need to be placed in the constant pool by
1979 calling `force_const_mem'. */
1982 copy_rtx_and_substitute (orig
, map
)
1984 struct inline_remap
*map
;
1986 register rtx copy
, temp
;
1988 register RTX_CODE code
;
1989 register enum machine_mode mode
;
1990 register char *format_ptr
;
1996 code
= GET_CODE (orig
);
1997 mode
= GET_MODE (orig
);
2002 /* If the stack pointer register shows up, it must be part of
2003 stack-adjustments (*not* because we eliminated the frame pointer!).
2004 Small hard registers are returned as-is. Pseudo-registers
2005 go through their `reg_map'. */
2006 regno
= REGNO (orig
);
2007 if (regno
<= LAST_VIRTUAL_REGISTER
)
2009 /* Some hard registers are also mapped,
2010 but others are not translated. */
2011 if (map
->reg_map
[regno
] != 0)
2012 return map
->reg_map
[regno
];
2014 /* If this is the virtual frame pointer, make space in current
2015 function's stack frame for the stack frame of the inline function.
2017 Copy the address of this area into a pseudo. Map
2018 virtual_stack_vars_rtx to this pseudo and set up a constant
2019 equivalence for it to be the address. This will substitute the
2020 address into insns where it can be substituted and use the new
2021 pseudo where it can't. */
2022 if (regno
== VIRTUAL_STACK_VARS_REGNUM
)
2025 int size
= DECL_FRAME_SIZE (map
->fndecl
);
2029 loc
= assign_stack_temp (BLKmode
, size
, 1);
2030 loc
= XEXP (loc
, 0);
2031 #ifdef FRAME_GROWS_DOWNWARD
2032 /* In this case, virtual_stack_vars_rtx points to one byte
2033 higher than the top of the frame area. So compute the offset
2034 to one byte higher than our substitute frame.
2035 Keep the fake frame pointer aligned like a real one. */
2036 rounded
= CEIL_ROUND (size
, BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
);
2037 loc
= plus_constant (loc
, rounded
);
2039 map
->reg_map
[regno
] = temp
2040 = force_reg (Pmode
, force_operand (loc
, NULL_RTX
));
2042 if (REGNO (temp
) < map
->const_equiv_map_size
)
2044 map
->const_equiv_map
[REGNO (temp
)] = loc
;
2045 map
->const_age_map
[REGNO (temp
)] = CONST_AGE_PARM
;
2048 seq
= gen_sequence ();
2050 emit_insn_after (seq
, map
->insns_at_start
);
2053 else if (regno
== VIRTUAL_INCOMING_ARGS_REGNUM
)
2055 /* Do the same for a block to contain any arguments referenced
2058 int size
= FUNCTION_ARGS_SIZE (DECL_SAVED_INSNS (map
->fndecl
));
2061 loc
= assign_stack_temp (BLKmode
, size
, 1);
2062 loc
= XEXP (loc
, 0);
2063 /* When arguments grow downward, the virtual incoming
2064 args pointer points to the top of the argument block,
2065 so the remapped location better do the same. */
2066 #ifdef ARGS_GROW_DOWNWARD
2067 loc
= plus_constant (loc
, size
);
2069 map
->reg_map
[regno
] = temp
2070 = force_reg (Pmode
, force_operand (loc
, NULL_RTX
));
2072 if (REGNO (temp
) < map
->const_equiv_map_size
)
2074 map
->const_equiv_map
[REGNO (temp
)] = loc
;
2075 map
->const_age_map
[REGNO (temp
)] = CONST_AGE_PARM
;
2078 seq
= gen_sequence ();
2080 emit_insn_after (seq
, map
->insns_at_start
);
2083 else if (REG_FUNCTION_VALUE_P (orig
))
2085 /* This is a reference to the function return value. If
2086 the function doesn't have a return value, error. If the
2087 mode doesn't agree, make a SUBREG. */
2088 if (map
->inline_target
== 0)
2089 /* Must be unrolling loops or replicating code if we
2090 reach here, so return the register unchanged. */
2092 else if (mode
!= GET_MODE (map
->inline_target
))
2093 return gen_lowpart (mode
, map
->inline_target
);
2095 return map
->inline_target
;
2099 if (map
->reg_map
[regno
] == NULL
)
2101 map
->reg_map
[regno
] = gen_reg_rtx (mode
);
2102 REG_USERVAR_P (map
->reg_map
[regno
]) = REG_USERVAR_P (orig
);
2103 REG_LOOP_TEST_P (map
->reg_map
[regno
]) = REG_LOOP_TEST_P (orig
);
2104 RTX_UNCHANGING_P (map
->reg_map
[regno
]) = RTX_UNCHANGING_P (orig
);
2105 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2107 return map
->reg_map
[regno
];
2110 copy
= copy_rtx_and_substitute (SUBREG_REG (orig
), map
);
2111 /* SUBREG is ordinary, but don't make nested SUBREGs. */
2112 if (GET_CODE (copy
) == SUBREG
)
2113 return gen_rtx (SUBREG
, GET_MODE (orig
), SUBREG_REG (copy
),
2114 SUBREG_WORD (orig
) + SUBREG_WORD (copy
));
2115 else if (GET_CODE (copy
) == CONCAT
)
2116 return (subreg_realpart_p (orig
) ? XEXP (copy
, 0) : XEXP (copy
, 1));
2118 return gen_rtx (SUBREG
, GET_MODE (orig
), copy
,
2119 SUBREG_WORD (orig
));
2123 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
2124 to (use foo) if the original insn didn't have a subreg.
2125 Removing the subreg distorts the VAX movstrhi pattern
2126 by changing the mode of an operand. */
2127 copy
= copy_rtx_and_substitute (XEXP (orig
, 0), map
);
2128 if (GET_CODE (copy
) == SUBREG
&& GET_CODE (XEXP (orig
, 0)) != SUBREG
)
2129 copy
= SUBREG_REG (copy
);
2130 return gen_rtx (code
, VOIDmode
, copy
);
2133 LABEL_PRESERVE_P (map
->label_map
[CODE_LABEL_NUMBER (orig
)])
2134 = LABEL_PRESERVE_P (orig
);
2135 return map
->label_map
[CODE_LABEL_NUMBER (orig
)];
2138 copy
= gen_rtx (LABEL_REF
, mode
,
2139 LABEL_REF_NONLOCAL_P (orig
) ? XEXP (orig
, 0)
2140 : map
->label_map
[CODE_LABEL_NUMBER (XEXP (orig
, 0))]);
2141 LABEL_OUTSIDE_LOOP_P (copy
) = LABEL_OUTSIDE_LOOP_P (orig
);
2143 /* The fact that this label was previously nonlocal does not mean
2144 it still is, so we must check if it is within the range of
2145 this function's labels. */
2146 LABEL_REF_NONLOCAL_P (copy
)
2147 = (LABEL_REF_NONLOCAL_P (orig
)
2148 && ! (CODE_LABEL_NUMBER (XEXP (copy
, 0)) >= get_first_label_num ()
2149 && CODE_LABEL_NUMBER (XEXP (copy
, 0)) < max_label_num ()));
2151 /* If we have made a nonlocal label local, it means that this
2152 inlined call will be refering to our nonlocal goto handler.
2153 So make sure we create one for this block; we normally would
2154 not since this is not otherwise considered a "call". */
2155 if (LABEL_REF_NONLOCAL_P (orig
) && ! LABEL_REF_NONLOCAL_P (copy
))
2156 function_call_count
++;
2166 /* Symbols which represent the address of a label stored in the constant
2167 pool must be modified to point to a constant pool entry for the
2168 remapped label. Otherwise, symbols are returned unchanged. */
2169 if (CONSTANT_POOL_ADDRESS_P (orig
))
2171 rtx constant
= get_pool_constant (orig
);
2172 if (GET_CODE (constant
) == LABEL_REF
)
2173 return XEXP (force_const_mem (Pmode
,
2174 copy_rtx_and_substitute (constant
,
2182 /* We have to make a new copy of this CONST_DOUBLE because don't want
2183 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
2184 duplicate of a CONST_DOUBLE we have already seen. */
2185 if (GET_MODE_CLASS (GET_MODE (orig
)) == MODE_FLOAT
)
2189 REAL_VALUE_FROM_CONST_DOUBLE (d
, orig
);
2190 return CONST_DOUBLE_FROM_REAL_VALUE (d
, GET_MODE (orig
));
2193 return immed_double_const (CONST_DOUBLE_LOW (orig
),
2194 CONST_DOUBLE_HIGH (orig
), VOIDmode
);
2197 /* Make new constant pool entry for a constant
2198 that was in the pool of the inline function. */
2199 if (RTX_INTEGRATED_P (orig
))
2201 /* If this was an address of a constant pool entry that itself
2202 had to be placed in the constant pool, it might not be a
2203 valid address. So the recursive call below might turn it
2204 into a register. In that case, it isn't a constant any
2205 more, so return it. This has the potential of changing a
2206 MEM into a REG, but we'll assume that it safe. */
2207 temp
= copy_rtx_and_substitute (XEXP (orig
, 0), map
);
2208 if (! CONSTANT_P (temp
))
2210 return validize_mem (force_const_mem (GET_MODE (orig
), temp
));
2215 /* If from constant pool address, make new constant pool entry and
2216 return its address. */
2217 if (! RTX_INTEGRATED_P (orig
))
2220 temp
= force_const_mem (GET_MODE (orig
),
2221 copy_rtx_and_substitute (XEXP (orig
, 0), map
));
2224 /* Legitimizing the address here is incorrect.
2226 The only ADDRESS rtx's that can reach here are ones created by
2227 save_constants. Hence the operand of the ADDRESS is always legal
2228 in this position of the instruction, since the original rtx without
2229 the ADDRESS was legal.
2231 The reason we don't legitimize the address here is that on the
2232 Sparc, the caller may have a (high ...) surrounding this ADDRESS.
2233 This code forces the operand of the address to a register, which
2234 fails because we can not take the HIGH part of a register.
2236 Also, change_address may create new registers. These registers
2237 will not have valid reg_map entries. This can cause try_constants()
2238 to fail because assumes that all registers in the rtx have valid
2239 reg_map entries, and it may end up replacing one of these new
2240 registers with junk. */
2242 if (! memory_address_p (GET_MODE (temp
), XEXP (temp
, 0)))
2243 temp
= change_address (temp
, GET_MODE (temp
), XEXP (temp
, 0));
2246 return XEXP (temp
, 0);
2249 /* If a single asm insn contains multiple output operands
2250 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
2251 We must make sure that the copied insn continues to share it. */
2252 if (map
->orig_asm_operands_vector
== XVEC (orig
, 3))
2254 copy
= rtx_alloc (ASM_OPERANDS
);
2255 copy
->volatil
= orig
->volatil
;
2256 XSTR (copy
, 0) = XSTR (orig
, 0);
2257 XSTR (copy
, 1) = XSTR (orig
, 1);
2258 XINT (copy
, 2) = XINT (orig
, 2);
2259 XVEC (copy
, 3) = map
->copy_asm_operands_vector
;
2260 XVEC (copy
, 4) = map
->copy_asm_constraints_vector
;
2261 XSTR (copy
, 5) = XSTR (orig
, 5);
2262 XINT (copy
, 6) = XINT (orig
, 6);
2268 /* This is given special treatment because the first
2269 operand of a CALL is a (MEM ...) which may get
2270 forced into a register for cse. This is undesirable
2271 if function-address cse isn't wanted or if we won't do cse. */
2272 #ifndef NO_FUNCTION_CSE
2273 if (! (optimize
&& ! flag_no_function_cse
))
2275 return gen_rtx (CALL
, GET_MODE (orig
),
2276 gen_rtx (MEM
, GET_MODE (XEXP (orig
, 0)),
2277 copy_rtx_and_substitute (XEXP (XEXP (orig
, 0), 0), map
)),
2278 copy_rtx_and_substitute (XEXP (orig
, 1), map
));
2282 /* Must be ifdefed out for loop unrolling to work. */
2288 /* If this is setting fp or ap, it means that we have a nonlocal goto.
2290 If the nonlocal goto is into the current function,
2291 this will result in unnecessarily bad code, but should work. */
2292 if (SET_DEST (orig
) == virtual_stack_vars_rtx
2293 || SET_DEST (orig
) == virtual_incoming_args_rtx
)
2294 return gen_rtx (SET
, VOIDmode
, SET_DEST (orig
),
2295 copy_rtx_and_substitute (SET_SRC (orig
), map
));
2299 copy
= rtx_alloc (MEM
);
2300 PUT_MODE (copy
, mode
);
2301 XEXP (copy
, 0) = copy_rtx_and_substitute (XEXP (orig
, 0), map
);
2302 MEM_IN_STRUCT_P (copy
) = MEM_IN_STRUCT_P (orig
);
2303 MEM_VOLATILE_P (copy
) = MEM_VOLATILE_P (orig
);
2305 /* If doing function inlining, this MEM might not be const in the
2306 function that it is being inlined into, and thus may not be
2307 unchanging after function inlining. Constant pool references are
2308 handled elsewhere, so this doesn't lose RTX_UNCHANGING_P bits
2310 if (! map
->integrating
)
2311 RTX_UNCHANGING_P (copy
) = RTX_UNCHANGING_P (orig
);
2316 copy
= rtx_alloc (code
);
2317 PUT_MODE (copy
, mode
);
2318 copy
->in_struct
= orig
->in_struct
;
2319 copy
->volatil
= orig
->volatil
;
2320 copy
->unchanging
= orig
->unchanging
;
2322 format_ptr
= GET_RTX_FORMAT (GET_CODE (copy
));
2324 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (copy
)); i
++)
2326 switch (*format_ptr
++)
2332 XEXP (copy
, i
) = copy_rtx_and_substitute (XEXP (orig
, i
), map
);
2336 /* Change any references to old-insns to point to the
2337 corresponding copied insns. */
2338 XEXP (copy
, i
) = map
->insn_map
[INSN_UID (XEXP (orig
, i
))];
2342 XVEC (copy
, i
) = XVEC (orig
, i
);
2343 if (XVEC (orig
, i
) != NULL
&& XVECLEN (orig
, i
) != 0)
2345 XVEC (copy
, i
) = rtvec_alloc (XVECLEN (orig
, i
));
2346 for (j
= 0; j
< XVECLEN (copy
, i
); j
++)
2347 XVECEXP (copy
, i
, j
)
2348 = copy_rtx_and_substitute (XVECEXP (orig
, i
, j
), map
);
2353 XWINT (copy
, i
) = XWINT (orig
, i
);
2357 XINT (copy
, i
) = XINT (orig
, i
);
2361 XSTR (copy
, i
) = XSTR (orig
, i
);
2369 if (code
== ASM_OPERANDS
&& map
->orig_asm_operands_vector
== 0)
2371 map
->orig_asm_operands_vector
= XVEC (orig
, 3);
2372 map
->copy_asm_operands_vector
= XVEC (copy
, 3);
2373 map
->copy_asm_constraints_vector
= XVEC (copy
, 4);
2379 /* Substitute known constant values into INSN, if that is valid. */
2382 try_constants (insn
, map
)
2384 struct inline_remap
*map
;
2389 subst_constants (&PATTERN (insn
), insn
, map
);
2391 /* Apply the changes if they are valid; otherwise discard them. */
2392 apply_change_group ();
2394 /* Show we don't know the value of anything stored or clobbered. */
2395 note_stores (PATTERN (insn
), mark_stores
);
2396 map
->last_pc_value
= 0;
2398 map
->last_cc0_value
= 0;
2401 /* Set up any constant equivalences made in this insn. */
2402 for (i
= 0; i
< map
->num_sets
; i
++)
2404 if (GET_CODE (map
->equiv_sets
[i
].dest
) == REG
)
2406 int regno
= REGNO (map
->equiv_sets
[i
].dest
);
2408 if (regno
< map
->const_equiv_map_size
2409 && (map
->const_equiv_map
[regno
] == 0
2410 /* Following clause is a hack to make case work where GNU C++
2411 reassigns a variable to make cse work right. */
2412 || ! rtx_equal_p (map
->const_equiv_map
[regno
],
2413 map
->equiv_sets
[i
].equiv
)))
2415 map
->const_equiv_map
[regno
] = map
->equiv_sets
[i
].equiv
;
2416 map
->const_age_map
[regno
] = map
->const_age
;
2419 else if (map
->equiv_sets
[i
].dest
== pc_rtx
)
2420 map
->last_pc_value
= map
->equiv_sets
[i
].equiv
;
2422 else if (map
->equiv_sets
[i
].dest
== cc0_rtx
)
2423 map
->last_cc0_value
= map
->equiv_sets
[i
].equiv
;
2428 /* Substitute known constants for pseudo regs in the contents of LOC,
2429 which are part of INSN.
2430 If INSN is zero, the substitution should always be done (this is used to
2432 These changes are taken out by try_constants if the result is not valid.
2434 Note that we are more concerned with determining when the result of a SET
2435 is a constant, for further propagation, than actually inserting constants
2436 into insns; cse will do the latter task better.
2438 This function is also used to adjust address of items previously addressed
2439 via the virtual stack variable or virtual incoming arguments registers. */
2442 subst_constants (loc
, insn
, map
)
2445 struct inline_remap
*map
;
2449 register enum rtx_code code
;
2450 register char *format_ptr
;
2451 int num_changes
= num_validated_changes ();
2453 enum machine_mode op0_mode
;
2455 code
= GET_CODE (x
);
2470 validate_change (insn
, loc
, map
->last_cc0_value
, 1);
2476 /* The only thing we can do with a USE or CLOBBER is possibly do
2477 some substitutions in a MEM within it. */
2478 if (GET_CODE (XEXP (x
, 0)) == MEM
)
2479 subst_constants (&XEXP (XEXP (x
, 0), 0), insn
, map
);
2483 /* Substitute for parms and known constants. Don't replace
2484 hard regs used as user variables with constants. */
2486 int regno
= REGNO (x
);
2488 if (! (regno
< FIRST_PSEUDO_REGISTER
&& REG_USERVAR_P (x
))
2489 && regno
< map
->const_equiv_map_size
2490 && map
->const_equiv_map
[regno
] != 0
2491 && map
->const_age_map
[regno
] >= map
->const_age
)
2492 validate_change (insn
, loc
, map
->const_equiv_map
[regno
], 1);
2497 /* SUBREG applied to something other than a reg
2498 should be treated as ordinary, since that must
2499 be a special hack and we don't know how to treat it specially.
2500 Consider for example mulsidi3 in m68k.md.
2501 Ordinary SUBREG of a REG needs this special treatment. */
2502 if (GET_CODE (SUBREG_REG (x
)) == REG
)
2504 rtx inner
= SUBREG_REG (x
);
2507 /* We can't call subst_constants on &SUBREG_REG (x) because any
2508 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2509 see what is inside, try to form the new SUBREG and see if that is
2510 valid. We handle two cases: extracting a full word in an
2511 integral mode and extracting the low part. */
2512 subst_constants (&inner
, NULL_RTX
, map
);
2514 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_INT
2515 && GET_MODE_SIZE (GET_MODE (x
)) == UNITS_PER_WORD
2516 && GET_MODE (SUBREG_REG (x
)) != VOIDmode
)
2517 new = operand_subword (inner
, SUBREG_WORD (x
), 0,
2518 GET_MODE (SUBREG_REG (x
)));
2520 if (new == 0 && subreg_lowpart_p (x
))
2521 new = gen_lowpart_common (GET_MODE (x
), inner
);
2524 validate_change (insn
, loc
, new, 1);
2531 subst_constants (&XEXP (x
, 0), insn
, map
);
2533 /* If a memory address got spoiled, change it back. */
2534 if (insn
!= 0 && num_validated_changes () != num_changes
2535 && !memory_address_p (GET_MODE (x
), XEXP (x
, 0)))
2536 cancel_changes (num_changes
);
2541 /* Substitute constants in our source, and in any arguments to a
2542 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2544 rtx
*dest_loc
= &SET_DEST (x
);
2545 rtx dest
= *dest_loc
;
2548 subst_constants (&SET_SRC (x
), insn
, map
);
2551 while (GET_CODE (*dest_loc
) == ZERO_EXTRACT
2552 /* By convention, we always use ZERO_EXTRACT in the dest. */
2553 /* || GET_CODE (*dest_loc) == SIGN_EXTRACT */
2554 || GET_CODE (*dest_loc
) == SUBREG
2555 || GET_CODE (*dest_loc
) == STRICT_LOW_PART
)
2557 if (GET_CODE (*dest_loc
) == ZERO_EXTRACT
)
2559 subst_constants (&XEXP (*dest_loc
, 1), insn
, map
);
2560 subst_constants (&XEXP (*dest_loc
, 2), insn
, map
);
2562 dest_loc
= &XEXP (*dest_loc
, 0);
2565 /* Do substitute in the address of a destination in memory. */
2566 if (GET_CODE (*dest_loc
) == MEM
)
2567 subst_constants (&XEXP (*dest_loc
, 0), insn
, map
);
2569 /* Check for the case of DEST a SUBREG, both it and the underlying
2570 register are less than one word, and the SUBREG has the wider mode.
2571 In the case, we are really setting the underlying register to the
2572 source converted to the mode of DEST. So indicate that. */
2573 if (GET_CODE (dest
) == SUBREG
2574 && GET_MODE_SIZE (GET_MODE (dest
)) <= UNITS_PER_WORD
2575 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
))) <= UNITS_PER_WORD
2576 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
)))
2577 <= GET_MODE_SIZE (GET_MODE (dest
)))
2578 && (tem
= gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest
)),
2580 src
= tem
, dest
= SUBREG_REG (dest
);
2582 /* If storing a recognizable value save it for later recording. */
2583 if ((map
->num_sets
< MAX_RECOG_OPERANDS
)
2584 && (CONSTANT_P (src
)
2585 || (GET_CODE (src
) == PLUS
2586 && GET_CODE (XEXP (src
, 0)) == REG
2587 && REGNO (XEXP (src
, 0)) >= FIRST_VIRTUAL_REGISTER
2588 && REGNO (XEXP (src
, 0)) <= LAST_VIRTUAL_REGISTER
2589 && CONSTANT_P (XEXP (src
, 1)))
2590 || GET_CODE (src
) == COMPARE
2595 && (src
== pc_rtx
|| GET_CODE (src
) == RETURN
2596 || GET_CODE (src
) == LABEL_REF
))))
2598 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2599 it will cause us to save the COMPARE with any constants
2600 substituted, which is what we want for later. */
2601 map
->equiv_sets
[map
->num_sets
].equiv
= copy_rtx (src
);
2602 map
->equiv_sets
[map
->num_sets
++].dest
= dest
;
2609 format_ptr
= GET_RTX_FORMAT (code
);
2611 /* If the first operand is an expression, save its mode for later. */
2612 if (*format_ptr
== 'e')
2613 op0_mode
= GET_MODE (XEXP (x
, 0));
2615 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
2617 switch (*format_ptr
++)
2624 subst_constants (&XEXP (x
, i
), insn
, map
);
2634 if (XVEC (x
, i
) != NULL
&& XVECLEN (x
, i
) != 0)
2637 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2638 subst_constants (&XVECEXP (x
, i
, j
), insn
, map
);
2647 /* If this is a commutative operation, move a constant to the second
2648 operand unless the second operand is already a CONST_INT. */
2649 if ((GET_RTX_CLASS (code
) == 'c' || code
== NE
|| code
== EQ
)
2650 && CONSTANT_P (XEXP (x
, 0)) && GET_CODE (XEXP (x
, 1)) != CONST_INT
)
2652 rtx tem
= XEXP (x
, 0);
2653 validate_change (insn
, &XEXP (x
, 0), XEXP (x
, 1), 1);
2654 validate_change (insn
, &XEXP (x
, 1), tem
, 1);
2657 /* Simplify the expression in case we put in some constants. */
2658 switch (GET_RTX_CLASS (code
))
2661 new = simplify_unary_operation (code
, GET_MODE (x
),
2662 XEXP (x
, 0), op0_mode
);
2667 enum machine_mode op_mode
= GET_MODE (XEXP (x
, 0));
2668 if (op_mode
== VOIDmode
)
2669 op_mode
= GET_MODE (XEXP (x
, 1));
2670 new = simplify_relational_operation (code
, op_mode
,
2671 XEXP (x
, 0), XEXP (x
, 1));
2672 #ifdef FLOAT_STORE_FLAG_VALUE
2673 if (new != 0 && GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
)
2674 new = ((new == const0_rtx
) ? CONST0_RTX (GET_MODE (x
))
2675 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE
,
2683 new = simplify_binary_operation (code
, GET_MODE (x
),
2684 XEXP (x
, 0), XEXP (x
, 1));
2689 new = simplify_ternary_operation (code
, GET_MODE (x
), op0_mode
,
2690 XEXP (x
, 0), XEXP (x
, 1), XEXP (x
, 2));
2695 validate_change (insn
, loc
, new, 1);
2698 /* Show that register modified no longer contain known constants. We are
2699 called from note_stores with parts of the new insn. */
2702 mark_stores (dest
, x
)
2707 enum machine_mode mode
;
2709 /* DEST is always the innermost thing set, except in the case of
2710 SUBREGs of hard registers. */
2712 if (GET_CODE (dest
) == REG
)
2713 regno
= REGNO (dest
), mode
= GET_MODE (dest
);
2714 else if (GET_CODE (dest
) == SUBREG
&& GET_CODE (SUBREG_REG (dest
)) == REG
)
2716 regno
= REGNO (SUBREG_REG (dest
)) + SUBREG_WORD (dest
);
2717 mode
= GET_MODE (SUBREG_REG (dest
));
2722 int last_reg
= (regno
>= FIRST_PSEUDO_REGISTER
? regno
2723 : regno
+ HARD_REGNO_NREGS (regno
, mode
) - 1);
2726 for (i
= regno
; i
<= last_reg
; i
++)
2727 if (i
< global_const_equiv_map_size
)
2728 global_const_equiv_map
[i
] = 0;
2732 /* If any CONST expressions with RTX_INTEGRATED_P are present in the rtx
2733 pointed to by PX, they represent constants in the constant pool.
2734 Replace these with a new memory reference obtained from force_const_mem.
2735 Similarly, ADDRESS expressions with RTX_INTEGRATED_P represent the
2736 address of a constant pool entry. Replace them with the address of
2737 a new constant pool entry obtained from force_const_mem. */
2740 restore_constants (px
)
2750 if (GET_CODE (x
) == CONST_DOUBLE
)
2752 /* We have to make a new CONST_DOUBLE to ensure that we account for
2753 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
2754 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
)
2758 REAL_VALUE_FROM_CONST_DOUBLE (d
, x
);
2759 *px
= CONST_DOUBLE_FROM_REAL_VALUE (d
, GET_MODE (x
));
2762 *px
= immed_double_const (CONST_DOUBLE_LOW (x
), CONST_DOUBLE_HIGH (x
),
2766 else if (RTX_INTEGRATED_P (x
) && GET_CODE (x
) == CONST
)
2768 restore_constants (&XEXP (x
, 0));
2769 *px
= validize_mem (force_const_mem (GET_MODE (x
), XEXP (x
, 0)));
2771 else if (RTX_INTEGRATED_P (x
) && GET_CODE (x
) == SUBREG
)
2773 /* This must be (subreg/i:M1 (const/i:M2 ...) 0). */
2774 rtx
new = XEXP (SUBREG_REG (x
), 0);
2776 restore_constants (&new);
2777 new = force_const_mem (GET_MODE (SUBREG_REG (x
)), new);
2778 PUT_MODE (new, GET_MODE (x
));
2779 *px
= validize_mem (new);
2781 else if (RTX_INTEGRATED_P (x
) && GET_CODE (x
) == ADDRESS
)
2783 restore_constants (&XEXP (x
, 0));
2784 *px
= XEXP (force_const_mem (GET_MODE (x
), XEXP (x
, 0)), 0);
2788 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
2789 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (x
)); i
++)
2794 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2795 restore_constants (&XVECEXP (x
, i
, j
));
2799 restore_constants (&XEXP (x
, i
));
2806 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
2807 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
2808 that it points to the node itself, thus indicating that the node is its
2809 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
2810 the given node is NULL, recursively descend the decl/block tree which
2811 it is the root of, and for each other ..._DECL or BLOCK node contained
2812 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
2813 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
2814 values to point to themselves. */
2817 set_block_origin_self (stmt
)
2820 if (BLOCK_ABSTRACT_ORIGIN (stmt
) == NULL_TREE
)
2822 BLOCK_ABSTRACT_ORIGIN (stmt
) = stmt
;
2825 register tree local_decl
;
2827 for (local_decl
= BLOCK_VARS (stmt
);
2828 local_decl
!= NULL_TREE
;
2829 local_decl
= TREE_CHAIN (local_decl
))
2830 set_decl_origin_self (local_decl
); /* Potential recursion. */
2834 register tree subblock
;
2836 for (subblock
= BLOCK_SUBBLOCKS (stmt
);
2837 subblock
!= NULL_TREE
;
2838 subblock
= BLOCK_CHAIN (subblock
))
2839 set_block_origin_self (subblock
); /* Recurse. */
2844 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
2845 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
2846 node to so that it points to the node itself, thus indicating that the
2847 node represents its own (abstract) origin. Additionally, if the
2848 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
2849 the decl/block tree of which the given node is the root of, and for
2850 each other ..._DECL or BLOCK node contained therein whose
2851 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
2852 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
2853 point to themselves. */
2856 set_decl_origin_self (decl
)
2859 if (DECL_ABSTRACT_ORIGIN (decl
) == NULL_TREE
)
2861 DECL_ABSTRACT_ORIGIN (decl
) = decl
;
2862 if (TREE_CODE (decl
) == FUNCTION_DECL
)
2866 for (arg
= DECL_ARGUMENTS (decl
); arg
; arg
= TREE_CHAIN (arg
))
2867 DECL_ABSTRACT_ORIGIN (arg
) = arg
;
2868 if (DECL_INITIAL (decl
) != NULL_TREE
)
2869 set_block_origin_self (DECL_INITIAL (decl
));
2874 /* Given a pointer to some BLOCK node, and a boolean value to set the
2875 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
2876 the given block, and for all local decls and all local sub-blocks
2877 (recursively) which are contained therein. */
2880 set_block_abstract_flags (stmt
, setting
)
2882 register int setting
;
2884 BLOCK_ABSTRACT (stmt
) = setting
;
2887 register tree local_decl
;
2889 for (local_decl
= BLOCK_VARS (stmt
);
2890 local_decl
!= NULL_TREE
;
2891 local_decl
= TREE_CHAIN (local_decl
))
2892 set_decl_abstract_flags (local_decl
, setting
);
2896 register tree subblock
;
2898 for (subblock
= BLOCK_SUBBLOCKS (stmt
);
2899 subblock
!= NULL_TREE
;
2900 subblock
= BLOCK_CHAIN (subblock
))
2901 set_block_abstract_flags (subblock
, setting
);
2905 /* Given a pointer to some ..._DECL node, and a boolean value to set the
2906 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
2907 given decl, and (in the case where the decl is a FUNCTION_DECL) also
2908 set the abstract flags for all of the parameters, local vars, local
2909 blocks and sub-blocks (recursively) to the same setting. */
2912 set_decl_abstract_flags (decl
, setting
)
2914 register int setting
;
2916 DECL_ABSTRACT (decl
) = setting
;
2917 if (TREE_CODE (decl
) == FUNCTION_DECL
)
2921 for (arg
= DECL_ARGUMENTS (decl
); arg
; arg
= TREE_CHAIN (arg
))
2922 DECL_ABSTRACT (arg
) = setting
;
2923 if (DECL_INITIAL (decl
) != NULL_TREE
)
2924 set_block_abstract_flags (DECL_INITIAL (decl
), setting
);
2928 /* Output the assembly language code for the function FNDECL
2929 from its DECL_SAVED_INSNS. Used for inline functions that are output
2930 at end of compilation instead of where they came in the source. */
2933 output_inline_function (fndecl
)
2939 if (output_bytecode
)
2941 warning ("`inline' ignored for bytecode output");
2945 head
= DECL_SAVED_INSNS (fndecl
);
2946 current_function_decl
= fndecl
;
2948 /* This call is only used to initialize global variables. */
2949 init_function_start (fndecl
, "lossage", 1);
2951 /* Redo parameter determinations in case the FUNCTION_...
2952 macros took machine-specific actions that need to be redone. */
2953 assign_parms (fndecl
, 1);
2955 /* Set stack frame size. */
2956 assign_stack_local (BLKmode
, DECL_FRAME_SIZE (fndecl
), 0);
2958 restore_reg_data (FIRST_PARM_INSN (head
));
2960 stack_slot_list
= STACK_SLOT_LIST (head
);
2962 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_CALLS_ALLOCA
)
2963 current_function_calls_alloca
= 1;
2965 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_CALLS_SETJMP
)
2966 current_function_calls_setjmp
= 1;
2968 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_CALLS_LONGJMP
)
2969 current_function_calls_longjmp
= 1;
2971 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_RETURNS_STRUCT
)
2972 current_function_returns_struct
= 1;
2974 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_RETURNS_PCC_STRUCT
)
2975 current_function_returns_pcc_struct
= 1;
2977 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_NEEDS_CONTEXT
)
2978 current_function_needs_context
= 1;
2980 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_HAS_NONLOCAL_LABEL
)
2981 current_function_has_nonlocal_label
= 1;
2983 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_RETURNS_POINTER
)
2984 current_function_returns_pointer
= 1;
2986 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_USES_CONST_POOL
)
2987 current_function_uses_const_pool
= 1;
2989 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE
)
2990 current_function_uses_pic_offset_table
= 1;
2992 current_function_outgoing_args_size
= OUTGOING_ARGS_SIZE (head
);
2993 current_function_pops_args
= POPS_ARGS (head
);
2995 /* There is no need to output a return label again. */
2998 expand_function_end (DECL_SOURCE_FILE (fndecl
), DECL_SOURCE_LINE (fndecl
), 0);
3000 /* Find last insn and rebuild the constant pool. */
3001 for (last
= FIRST_PARM_INSN (head
);
3002 NEXT_INSN (last
); last
= NEXT_INSN (last
))
3004 if (GET_RTX_CLASS (GET_CODE (last
)) == 'i')
3006 restore_constants (&PATTERN (last
));
3007 restore_constants (®_NOTES (last
));
3011 set_new_first_and_last_insn (FIRST_PARM_INSN (head
), last
);
3012 set_new_first_and_last_label_num (FIRST_LABELNO (head
), LAST_LABELNO (head
));
3014 /* We must have already output DWARF debugging information for the
3015 original (abstract) inline function declaration/definition, so
3016 we want to make sure that the debugging information we generate
3017 for this special instance of the inline function refers back to
3018 the information we already generated. To make sure that happens,
3019 we simply have to set the DECL_ABSTRACT_ORIGIN for the function
3020 node (and for all of the local ..._DECL nodes which are its children)
3021 so that they all point to themselves. */
3023 set_decl_origin_self (fndecl
);
3025 /* Compile this function all the way down to assembly code. */
3026 rest_of_compilation (fndecl
);
3028 current_function_decl
= 0;