1 /* Procedure integration for GNU CC.
2 Copyright (C) 1988, 91, 93, 94, 95, 1996 Free Software Foundation, Inc.
3 Contributed by Michael Tiemann (tiemann@cygnus.com)
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
30 #include "insn-config.h"
31 #include "insn-flags.h"
34 #include "integrate.h"
40 #define obstack_chunk_alloc xmalloc
41 #define obstack_chunk_free free
43 extern struct obstack
*function_maybepermanent_obstack
;
45 extern tree
pushdecl ();
46 extern tree
poplevel ();
48 /* Similar, but round to the next highest integer that meets the
50 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
52 /* Default max number of insns a function can have and still be inline.
53 This is overridden on RISC machines. */
54 #ifndef INTEGRATE_THRESHOLD
55 #define INTEGRATE_THRESHOLD(DECL) \
56 (8 * (8 + list_length (DECL_ARGUMENTS (DECL))))
59 static rtx initialize_for_inline
PROTO((tree
, int, int, int, int));
60 static void finish_inline
PROTO((tree
, rtx
));
61 static void adjust_copied_decl_tree
PROTO((tree
));
62 static tree copy_decl_list
PROTO((tree
));
63 static tree copy_decl_tree
PROTO((tree
));
64 static void copy_decl_rtls
PROTO((tree
));
65 static void save_constants
PROTO((rtx
*));
66 static void note_modified_parmregs
PROTO((rtx
, rtx
));
67 static rtx copy_for_inline
PROTO((rtx
));
68 static void integrate_parm_decls
PROTO((tree
, struct inline_remap
*, rtvec
));
69 static void integrate_decl_tree
PROTO((tree
, int, struct inline_remap
*));
70 static void subst_constants
PROTO((rtx
*, rtx
, struct inline_remap
*));
71 static void restore_constants
PROTO((rtx
*));
72 static void set_block_origin_self
PROTO((tree
));
73 static void set_decl_origin_self
PROTO((tree
));
74 static void set_block_abstract_flags
PROTO((tree
, int));
76 void set_decl_abstract_flags
PROTO((tree
, int));
78 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
79 is safe and reasonable to integrate into other functions.
80 Nonzero means value is a warning message with a single %s
81 for the function's name. */
84 function_cannot_inline_p (fndecl
)
88 tree last
= tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl
)));
89 int max_insns
= INTEGRATE_THRESHOLD (fndecl
);
90 register int ninsns
= 0;
93 /* No inlines with varargs. `grokdeclarator' gives a warning
94 message about that if `inline' is specified. This code
95 it put in to catch the volunteers. */
96 if ((last
&& TREE_VALUE (last
) != void_type_node
)
97 || current_function_varargs
)
98 return "varargs function cannot be inline";
100 if (current_function_calls_alloca
)
101 return "function using alloca cannot be inline";
103 if (current_function_contains_functions
)
104 return "function with nested functions cannot be inline";
106 /* If its not even close, don't even look. */
107 if (!DECL_INLINE (fndecl
) && get_max_uid () > 3 * max_insns
)
108 return "function too large to be inline";
111 /* Don't inline functions which do not specify a function prototype and
112 have BLKmode argument or take the address of a parameter. */
113 for (parms
= DECL_ARGUMENTS (fndecl
); parms
; parms
= TREE_CHAIN (parms
))
115 if (TYPE_MODE (TREE_TYPE (parms
)) == BLKmode
)
116 TREE_ADDRESSABLE (parms
) = 1;
117 if (last
== NULL_TREE
&& TREE_ADDRESSABLE (parms
))
118 return "no prototype, and parameter address used; cannot be inline";
122 /* We can't inline functions that return structures
123 the old-fashioned PCC way, copying into a static block. */
124 if (current_function_returns_pcc_struct
)
125 return "inline functions not supported for this return value type";
127 /* We can't inline functions that return BLKmode structures in registers. */
128 if (TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl
))) == BLKmode
129 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl
))))
130 return "inline functions not supported for this return value type";
132 /* We can't inline functions that return structures of varying size. */
133 if (int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl
))) < 0)
134 return "function with varying-size return value cannot be inline";
136 /* Cannot inline a function with a varying size argument or one that
137 receives a transparent union. */
138 for (parms
= DECL_ARGUMENTS (fndecl
); parms
; parms
= TREE_CHAIN (parms
))
140 if (int_size_in_bytes (TREE_TYPE (parms
)) < 0)
141 return "function with varying-size parameter cannot be inline";
142 else if (TYPE_TRANSPARENT_UNION (TREE_TYPE (parms
)))
143 return "function with transparent unit parameter cannot be inline";
146 if (!DECL_INLINE (fndecl
) && get_max_uid () > max_insns
)
148 for (ninsns
= 0, insn
= get_first_nonparm_insn ();
149 insn
&& ninsns
< max_insns
;
150 insn
= NEXT_INSN (insn
))
151 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
154 if (ninsns
>= max_insns
)
155 return "function too large to be inline";
158 /* We cannot inline this function if forced_labels is non-zero. This
159 implies that a label in this function was used as an initializer.
160 Because labels can not be duplicated, all labels in the function
161 will be renamed when it is inlined. However, there is no way to find
162 and fix all variables initialized with addresses of labels in this
163 function, hence inlining is impossible. */
166 return "function with label addresses used in initializers cannot inline";
168 /* We cannot inline a nested function that jumps to a nonlocal label. */
169 if (current_function_has_nonlocal_goto
)
170 return "function with nonlocal goto cannot be inline";
175 /* Variables used within save_for_inline. */
177 /* Mapping from old pseudo-register to new pseudo-registers.
178 The first element of this map is reg_map[FIRST_PSEUDO_REGISTER].
179 It is allocated in `save_for_inline' and `expand_inline_function',
180 and deallocated on exit from each of those routines. */
183 /* Mapping from old code-labels to new code-labels.
184 The first element of this map is label_map[min_labelno].
185 It is allocated in `save_for_inline' and `expand_inline_function',
186 and deallocated on exit from each of those routines. */
187 static rtx
*label_map
;
189 /* Mapping from old insn uid's to copied insns.
190 It is allocated in `save_for_inline' and `expand_inline_function',
191 and deallocated on exit from each of those routines. */
192 static rtx
*insn_map
;
194 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
195 Zero for a reg that isn't a parm's home.
196 Only reg numbers less than max_parm_reg are mapped here. */
197 static tree
*parmdecl_map
;
199 /* Keep track of first pseudo-register beyond those that are parms. */
200 static int max_parm_reg
;
202 /* When an insn is being copied by copy_for_inline,
203 this is nonzero if we have copied an ASM_OPERANDS.
204 In that case, it is the original input-operand vector. */
205 static rtvec orig_asm_operands_vector
;
207 /* When an insn is being copied by copy_for_inline,
208 this is nonzero if we have copied an ASM_OPERANDS.
209 In that case, it is the copied input-operand vector. */
210 static rtvec copy_asm_operands_vector
;
212 /* Likewise, this is the copied constraints vector. */
213 static rtvec copy_asm_constraints_vector
;
215 /* In save_for_inline, nonzero if past the parm-initialization insns. */
216 static int in_nonparm_insns
;
218 /* Subroutine for `save_for_inline{copying,nocopy}'. Performs initialization
219 needed to save FNDECL's insns and info for future inline expansion. */
222 initialize_for_inline (fndecl
, min_labelno
, max_labelno
, max_reg
, copy
)
229 int function_flags
, i
;
233 /* Compute the values of any flags we must restore when inlining this. */
236 = (current_function_calls_alloca
* FUNCTION_FLAGS_CALLS_ALLOCA
237 + current_function_calls_setjmp
* FUNCTION_FLAGS_CALLS_SETJMP
238 + current_function_calls_longjmp
* FUNCTION_FLAGS_CALLS_LONGJMP
239 + current_function_returns_struct
* FUNCTION_FLAGS_RETURNS_STRUCT
240 + current_function_returns_pcc_struct
* FUNCTION_FLAGS_RETURNS_PCC_STRUCT
241 + current_function_needs_context
* FUNCTION_FLAGS_NEEDS_CONTEXT
242 + current_function_has_nonlocal_label
* FUNCTION_FLAGS_HAS_NONLOCAL_LABEL
243 + current_function_returns_pointer
* FUNCTION_FLAGS_RETURNS_POINTER
244 + current_function_uses_const_pool
* FUNCTION_FLAGS_USES_CONST_POOL
245 + current_function_uses_pic_offset_table
* FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE
);
247 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
248 bzero ((char *) parmdecl_map
, max_parm_reg
* sizeof (tree
));
249 arg_vector
= rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl
)));
251 for (parms
= DECL_ARGUMENTS (fndecl
), i
= 0;
253 parms
= TREE_CHAIN (parms
), i
++)
255 rtx p
= DECL_RTL (parms
);
257 if (GET_CODE (p
) == MEM
&& copy
)
259 /* Copy the rtl so that modifications of the addresses
260 later in compilation won't affect this arg_vector.
261 Virtual register instantiation can screw the address
263 rtx
new = copy_rtx (p
);
265 /* Don't leave the old copy anywhere in this decl. */
266 if (DECL_RTL (parms
) == DECL_INCOMING_RTL (parms
)
267 || (GET_CODE (DECL_RTL (parms
)) == MEM
268 && GET_CODE (DECL_INCOMING_RTL (parms
)) == MEM
269 && (XEXP (DECL_RTL (parms
), 0)
270 == XEXP (DECL_INCOMING_RTL (parms
), 0))))
271 DECL_INCOMING_RTL (parms
) = new;
272 DECL_RTL (parms
) = new;
275 RTVEC_ELT (arg_vector
, i
) = p
;
277 if (GET_CODE (p
) == REG
)
278 parmdecl_map
[REGNO (p
)] = parms
;
279 else if (GET_CODE (p
) == CONCAT
)
281 rtx preal
= gen_realpart (GET_MODE (XEXP (p
, 0)), p
);
282 rtx pimag
= gen_imagpart (GET_MODE (preal
), p
);
284 if (GET_CODE (preal
) == REG
)
285 parmdecl_map
[REGNO (preal
)] = parms
;
286 if (GET_CODE (pimag
) == REG
)
287 parmdecl_map
[REGNO (pimag
)] = parms
;
290 /* This flag is cleared later
291 if the function ever modifies the value of the parm. */
292 TREE_READONLY (parms
) = 1;
295 /* Assume we start out in the insns that set up the parameters. */
296 in_nonparm_insns
= 0;
298 /* The list of DECL_SAVED_INSNS, starts off with a header which
299 contains the following information:
301 the first insn of the function (not including the insns that copy
302 parameters into registers).
303 the first parameter insn of the function,
304 the first label used by that function,
305 the last label used by that function,
306 the highest register number used for parameters,
307 the total number of registers used,
308 the size of the incoming stack area for parameters,
309 the number of bytes popped on return,
311 some flags that are used to restore compiler globals,
312 the value of current_function_outgoing_args_size,
313 the original argument vector,
314 the original DECL_INITIAL,
315 and pointers to the table of psuedo regs, pointer flags, and alignment. */
317 return gen_inline_header_rtx (NULL_RTX
, NULL_RTX
, min_labelno
, max_labelno
,
318 max_parm_reg
, max_reg
,
319 current_function_args_size
,
320 current_function_pops_args
,
321 stack_slot_list
, forced_labels
, function_flags
,
322 current_function_outgoing_args_size
,
323 arg_vector
, (rtx
) DECL_INITIAL (fndecl
),
324 (rtvec
) regno_reg_rtx
, regno_pointer_flag
,
325 regno_pointer_align
);
328 /* Subroutine for `save_for_inline{copying,nocopy}'. Finishes up the
329 things that must be done to make FNDECL expandable as an inline function.
330 HEAD contains the chain of insns to which FNDECL will expand. */
333 finish_inline (fndecl
, head
)
337 NEXT_INSN (head
) = get_first_nonparm_insn ();
338 FIRST_PARM_INSN (head
) = get_insns ();
339 DECL_SAVED_INSNS (fndecl
) = head
;
340 DECL_FRAME_SIZE (fndecl
) = get_frame_size ();
343 /* Adjust the BLOCK_END_NOTE pointers in a given copied DECL tree so that
344 they all point to the new (copied) rtxs. */
347 adjust_copied_decl_tree (block
)
350 register tree subblock
;
351 register rtx original_end
;
353 original_end
= BLOCK_END_NOTE (block
);
356 BLOCK_END_NOTE (block
) = (rtx
) NOTE_SOURCE_FILE (original_end
);
357 NOTE_SOURCE_FILE (original_end
) = 0;
360 /* Process all subblocks. */
361 for (subblock
= BLOCK_SUBBLOCKS (block
);
363 subblock
= TREE_CHAIN (subblock
))
364 adjust_copied_decl_tree (subblock
);
367 /* Make the insns and PARM_DECLs of the current function permanent
368 and record other information in DECL_SAVED_INSNS to allow inlining
369 of this function in subsequent calls.
371 This function is called when we are going to immediately compile
372 the insns for FNDECL. The insns in maybepermanent_obstack cannot be
373 modified by the compilation process, so we copy all of them to
374 new storage and consider the new insns to be the insn chain to be
375 compiled. Our caller (rest_of_compilation) saves the original
376 DECL_INITIAL and DECL_ARGUMENTS; here we copy them. */
378 /* ??? The nonlocal_label list should be adjusted also. However, since
379 a function that contains a nested function never gets inlined currently,
380 the nonlocal_label list will always be empty, so we don't worry about
384 save_for_inline_copying (fndecl
)
387 rtx first_insn
, last_insn
, insn
;
389 int max_labelno
, min_labelno
, i
, len
;
392 rtx first_nonparm_insn
;
395 /* Make and emit a return-label if we have not already done so.
396 Do this before recording the bounds on label numbers. */
398 if (return_label
== 0)
400 return_label
= gen_label_rtx ();
401 emit_label (return_label
);
404 /* Get some bounds on the labels and registers used. */
406 max_labelno
= max_label_num ();
407 min_labelno
= get_first_label_num ();
408 max_reg
= max_reg_num ();
410 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
411 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
412 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
413 for the parms, prior to elimination of virtual registers.
414 These values are needed for substituting parms properly. */
416 max_parm_reg
= max_parm_reg_num ();
417 parmdecl_map
= (tree
*) alloca (max_parm_reg
* sizeof (tree
));
419 head
= initialize_for_inline (fndecl
, min_labelno
, max_labelno
, max_reg
, 1);
421 if (current_function_uses_const_pool
)
423 /* Replace any constant pool references with the actual constant. We
424 will put the constants back in the copy made below. */
425 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
426 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
428 save_constants (&PATTERN (insn
));
429 if (REG_NOTES (insn
))
430 save_constants (®_NOTES (insn
));
433 /* Clear out the constant pool so that we can recreate it with the
434 copied constants below. */
435 init_const_rtx_hash_table ();
436 clear_const_double_mem ();
439 max_uid
= INSN_UID (head
);
441 /* We have now allocated all that needs to be allocated permanently
442 on the rtx obstack. Set our high-water mark, so that we
443 can free the rest of this when the time comes. */
447 /* Copy the chain insns of this function.
448 Install the copied chain as the insns of this function,
449 for continued compilation;
450 the original chain is recorded as the DECL_SAVED_INSNS
451 for inlining future calls. */
453 /* If there are insns that copy parms from the stack into pseudo registers,
454 those insns are not copied. `expand_inline_function' must
455 emit the correct code to handle such things. */
458 if (GET_CODE (insn
) != NOTE
)
460 first_insn
= rtx_alloc (NOTE
);
461 NOTE_SOURCE_FILE (first_insn
) = NOTE_SOURCE_FILE (insn
);
462 NOTE_LINE_NUMBER (first_insn
) = NOTE_LINE_NUMBER (insn
);
463 INSN_UID (first_insn
) = INSN_UID (insn
);
464 PREV_INSN (first_insn
) = NULL
;
465 NEXT_INSN (first_insn
) = NULL
;
466 last_insn
= first_insn
;
468 /* Each pseudo-reg in the old insn chain must have a unique rtx in the copy.
469 Make these new rtx's now, and install them in regno_reg_rtx, so they
470 will be the official pseudo-reg rtx's for the rest of compilation. */
472 reg_map
= (rtx
*) savealloc ((max_reg
+ 1) * sizeof (rtx
));
474 len
= sizeof (struct rtx_def
) + (GET_RTX_LENGTH (REG
) - 1) * sizeof (rtunion
);
475 for (i
= max_reg
- 1; i
> LAST_VIRTUAL_REGISTER
; i
--)
476 reg_map
[i
] = (rtx
)obstack_copy (function_maybepermanent_obstack
,
477 regno_reg_rtx
[i
], len
);
479 regno_reg_rtx
= reg_map
;
481 /* Put copies of all the virtual register rtx into the new regno_reg_rtx. */
482 regno_reg_rtx
[VIRTUAL_INCOMING_ARGS_REGNUM
] = virtual_incoming_args_rtx
;
483 regno_reg_rtx
[VIRTUAL_STACK_VARS_REGNUM
] = virtual_stack_vars_rtx
;
484 regno_reg_rtx
[VIRTUAL_STACK_DYNAMIC_REGNUM
] = virtual_stack_dynamic_rtx
;
485 regno_reg_rtx
[VIRTUAL_OUTGOING_ARGS_REGNUM
] = virtual_outgoing_args_rtx
;
487 /* Likewise each label rtx must have a unique rtx as its copy. */
489 label_map
= (rtx
*)alloca ((max_labelno
- min_labelno
) * sizeof (rtx
));
490 label_map
-= min_labelno
;
492 for (i
= min_labelno
; i
< max_labelno
; i
++)
493 label_map
[i
] = gen_label_rtx ();
495 /* Record the mapping of old insns to copied insns. */
497 insn_map
= (rtx
*) alloca (max_uid
* sizeof (rtx
));
498 bzero ((char *) insn_map
, max_uid
* sizeof (rtx
));
500 /* Get the insn which signals the end of parameter setup code. */
501 first_nonparm_insn
= get_first_nonparm_insn ();
503 /* Copy any entries in regno_reg_rtx or DECL_RTLs that reference MEM
504 (the former occurs when a variable has its address taken)
505 since these may be shared and can be changed by virtual
506 register instantiation. DECL_RTL values for our arguments
507 have already been copied by initialize_for_inline. */
508 for (i
= LAST_VIRTUAL_REGISTER
+ 1; i
< max_reg
; i
++)
509 if (GET_CODE (regno_reg_rtx
[i
]) == MEM
)
510 XEXP (regno_reg_rtx
[i
], 0)
511 = copy_for_inline (XEXP (regno_reg_rtx
[i
], 0));
513 /* Copy the tree of subblocks of the function, and the decls in them.
514 We will use the copy for compiling this function, then restore the original
515 subblocks and decls for use when inlining this function.
517 Several parts of the compiler modify BLOCK trees. In particular,
518 instantiate_virtual_regs will instantiate any virtual regs
519 mentioned in the DECL_RTLs of the decls, and loop
520 unrolling will replicate any BLOCK trees inside an unrolled loop.
522 The modified subblocks or DECL_RTLs would be incorrect for the original rtl
523 which we will use for inlining. The rtl might even contain pseudoregs
524 whose space has been freed. */
526 DECL_INITIAL (fndecl
) = copy_decl_tree (DECL_INITIAL (fndecl
));
527 DECL_ARGUMENTS (fndecl
) = copy_decl_list (DECL_ARGUMENTS (fndecl
));
529 /* Now copy each DECL_RTL which is a MEM,
530 so it is safe to modify their addresses. */
531 copy_decl_rtls (DECL_INITIAL (fndecl
));
533 /* The fndecl node acts as its own progenitor, so mark it as such. */
534 DECL_ABSTRACT_ORIGIN (fndecl
) = fndecl
;
536 /* Now copy the chain of insns. Do this twice. The first copy the insn
537 itself and its body. The second time copy of REG_NOTES. This is because
538 a REG_NOTE may have a forward pointer to another insn. */
540 for (insn
= NEXT_INSN (insn
); insn
; insn
= NEXT_INSN (insn
))
542 orig_asm_operands_vector
= 0;
544 if (insn
== first_nonparm_insn
)
545 in_nonparm_insns
= 1;
547 switch (GET_CODE (insn
))
550 /* No need to keep these. */
551 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_DELETED
)
554 copy
= rtx_alloc (NOTE
);
555 NOTE_LINE_NUMBER (copy
) = NOTE_LINE_NUMBER (insn
);
556 if (NOTE_LINE_NUMBER (insn
) != NOTE_INSN_BLOCK_END
)
557 NOTE_SOURCE_FILE (copy
) = NOTE_SOURCE_FILE (insn
);
560 NOTE_SOURCE_FILE (insn
) = (char *) copy
;
561 NOTE_SOURCE_FILE (copy
) = 0;
563 RTX_INTEGRATED_P (copy
) = RTX_INTEGRATED_P (insn
);
569 copy
= rtx_alloc (GET_CODE (insn
));
571 if (GET_CODE (insn
) == CALL_INSN
)
572 CALL_INSN_FUNCTION_USAGE (copy
) =
573 copy_for_inline (CALL_INSN_FUNCTION_USAGE (insn
));
575 PATTERN (copy
) = copy_for_inline (PATTERN (insn
));
576 INSN_CODE (copy
) = -1;
577 LOG_LINKS (copy
) = NULL_RTX
;
578 RTX_INTEGRATED_P (copy
) = RTX_INTEGRATED_P (insn
);
582 copy
= label_map
[CODE_LABEL_NUMBER (insn
)];
583 LABEL_NAME (copy
) = LABEL_NAME (insn
);
587 copy
= rtx_alloc (BARRIER
);
593 INSN_UID (copy
) = INSN_UID (insn
);
594 insn_map
[INSN_UID (insn
)] = copy
;
595 NEXT_INSN (last_insn
) = copy
;
596 PREV_INSN (copy
) = last_insn
;
600 adjust_copied_decl_tree (DECL_INITIAL (fndecl
));
602 /* Now copy the REG_NOTES. */
603 for (insn
= NEXT_INSN (get_insns ()); insn
; insn
= NEXT_INSN (insn
))
604 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i'
605 && insn_map
[INSN_UID(insn
)])
606 REG_NOTES (insn_map
[INSN_UID (insn
)])
607 = copy_for_inline (REG_NOTES (insn
));
609 NEXT_INSN (last_insn
) = NULL
;
611 finish_inline (fndecl
, head
);
613 /* Make new versions of the register tables. */
614 new = (char *) savealloc (regno_pointer_flag_length
);
615 bcopy (regno_pointer_flag
, new, regno_pointer_flag_length
);
616 new1
= (char *) savealloc (regno_pointer_flag_length
);
617 bcopy (regno_pointer_align
, new1
, regno_pointer_flag_length
);
619 regno_pointer_flag
= new;
620 regno_pointer_align
= new1
;
622 set_new_first_and_last_insn (first_insn
, last_insn
);
625 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
626 For example, this can copy a list made of TREE_LIST nodes. While copying,
627 for each node copied which doesn't already have is DECL_ABSTRACT_ORIGIN
628 set to some non-zero value, set the DECL_ABSTRACT_ORIGIN of the copy to
629 point to the corresponding (abstract) original node. */
632 copy_decl_list (list
)
636 register tree prev
, next
;
641 head
= prev
= copy_node (list
);
642 if (DECL_ABSTRACT_ORIGIN (head
) == NULL_TREE
)
643 DECL_ABSTRACT_ORIGIN (head
) = list
;
644 next
= TREE_CHAIN (list
);
649 copy
= copy_node (next
);
650 if (DECL_ABSTRACT_ORIGIN (copy
) == NULL_TREE
)
651 DECL_ABSTRACT_ORIGIN (copy
) = next
;
652 TREE_CHAIN (prev
) = copy
;
654 next
= TREE_CHAIN (next
);
659 /* Make a copy of the entire tree of blocks BLOCK, and return it. */
662 copy_decl_tree (block
)
665 tree t
, vars
, subblocks
;
667 vars
= copy_decl_list (BLOCK_VARS (block
));
670 /* Process all subblocks. */
671 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= TREE_CHAIN (t
))
673 tree copy
= copy_decl_tree (t
);
674 TREE_CHAIN (copy
) = subblocks
;
678 t
= copy_node (block
);
679 BLOCK_VARS (t
) = vars
;
680 BLOCK_SUBBLOCKS (t
) = nreverse (subblocks
);
681 /* If the BLOCK being cloned is already marked as having been instantiated
682 from something else, then leave that `origin' marking alone. Otherwise,
683 mark the clone as having originated from the BLOCK we are cloning. */
684 if (BLOCK_ABSTRACT_ORIGIN (t
) == NULL_TREE
)
685 BLOCK_ABSTRACT_ORIGIN (t
) = block
;
689 /* Copy DECL_RTLs in all decls in the given BLOCK node. */
692 copy_decl_rtls (block
)
697 for (t
= BLOCK_VARS (block
); t
; t
= TREE_CHAIN (t
))
698 if (DECL_RTL (t
) && GET_CODE (DECL_RTL (t
)) == MEM
)
699 DECL_RTL (t
) = copy_for_inline (DECL_RTL (t
));
701 /* Process all subblocks. */
702 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= TREE_CHAIN (t
))
706 /* Make the insns and PARM_DECLs of the current function permanent
707 and record other information in DECL_SAVED_INSNS to allow inlining
708 of this function in subsequent calls.
710 This routine need not copy any insns because we are not going
711 to immediately compile the insns in the insn chain. There
712 are two cases when we would compile the insns for FNDECL:
713 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
714 be output at the end of other compilation, because somebody took
715 its address. In the first case, the insns of FNDECL are copied
716 as it is expanded inline, so FNDECL's saved insns are not
717 modified. In the second case, FNDECL is used for the last time,
718 so modifying the rtl is not a problem.
720 We don't have to worry about FNDECL being inline expanded by
721 other functions which are written at the end of compilation
722 because flag_no_inline is turned on when we begin writing
723 functions at the end of compilation. */
726 save_for_inline_nocopy (fndecl
)
731 rtx first_nonparm_insn
;
733 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
734 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
735 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
736 for the parms, prior to elimination of virtual registers.
737 These values are needed for substituting parms properly. */
739 max_parm_reg
= max_parm_reg_num ();
740 parmdecl_map
= (tree
*) alloca (max_parm_reg
* sizeof (tree
));
742 /* Make and emit a return-label if we have not already done so. */
744 if (return_label
== 0)
746 return_label
= gen_label_rtx ();
747 emit_label (return_label
);
750 head
= initialize_for_inline (fndecl
, get_first_label_num (),
751 max_label_num (), max_reg_num (), 0);
753 /* If there are insns that copy parms from the stack into pseudo registers,
754 those insns are not copied. `expand_inline_function' must
755 emit the correct code to handle such things. */
758 if (GET_CODE (insn
) != NOTE
)
761 /* Get the insn which signals the end of parameter setup code. */
762 first_nonparm_insn
= get_first_nonparm_insn ();
764 /* Now just scan the chain of insns to see what happens to our
765 PARM_DECLs. If a PARM_DECL is used but never modified, we
766 can substitute its rtl directly when expanding inline (and
767 perform constant folding when its incoming value is constant).
768 Otherwise, we have to copy its value into a new register and track
769 the new register's life. */
771 for (insn
= NEXT_INSN (insn
); insn
; insn
= NEXT_INSN (insn
))
773 if (insn
== first_nonparm_insn
)
774 in_nonparm_insns
= 1;
776 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
778 if (current_function_uses_const_pool
)
780 /* Replace any constant pool references with the actual constant.
781 We will put the constant back if we need to write the
782 function out after all. */
783 save_constants (&PATTERN (insn
));
784 if (REG_NOTES (insn
))
785 save_constants (®_NOTES (insn
));
788 /* Record what interesting things happen to our parameters. */
789 note_stores (PATTERN (insn
), note_modified_parmregs
);
793 /* We have now allocated all that needs to be allocated permanently
794 on the rtx obstack. Set our high-water mark, so that we
795 can free the rest of this when the time comes. */
799 finish_inline (fndecl
, head
);
802 /* Given PX, a pointer into an insn, search for references to the constant
803 pool. Replace each with a CONST that has the mode of the original
804 constant, contains the constant, and has RTX_INTEGRATED_P set.
805 Similarly, constant pool addresses not enclosed in a MEM are replaced
806 with an ADDRESS rtx which also gives the constant, mode, and has
807 RTX_INTEGRATED_P set. */
819 /* If this is a CONST_DOUBLE, don't try to fix things up in
820 CONST_DOUBLE_MEM, because this is an infinite recursion. */
821 if (GET_CODE (x
) == CONST_DOUBLE
)
823 else if (GET_CODE (x
) == MEM
&& GET_CODE (XEXP (x
, 0)) == SYMBOL_REF
824 && CONSTANT_POOL_ADDRESS_P (XEXP (x
,0)))
826 enum machine_mode const_mode
= get_pool_mode (XEXP (x
, 0));
827 rtx
new = gen_rtx (CONST
, const_mode
, get_pool_constant (XEXP (x
, 0)));
828 RTX_INTEGRATED_P (new) = 1;
830 /* If the MEM was in a different mode than the constant (perhaps we
831 were only looking at the low-order part), surround it with a
832 SUBREG so we can save both modes. */
834 if (GET_MODE (x
) != const_mode
)
836 new = gen_rtx (SUBREG
, GET_MODE (x
), new, 0);
837 RTX_INTEGRATED_P (new) = 1;
841 save_constants (&XEXP (*px
, 0));
843 else if (GET_CODE (x
) == SYMBOL_REF
844 && CONSTANT_POOL_ADDRESS_P (x
))
846 *px
= gen_rtx (ADDRESS
, get_pool_mode (x
), get_pool_constant (x
));
847 save_constants (&XEXP (*px
, 0));
848 RTX_INTEGRATED_P (*px
) = 1;
853 char *fmt
= GET_RTX_FORMAT (GET_CODE (x
));
854 int len
= GET_RTX_LENGTH (GET_CODE (x
));
856 for (i
= len
-1; i
>= 0; i
--)
861 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
862 save_constants (&XVECEXP (x
, i
, j
));
866 if (XEXP (x
, i
) == 0)
870 /* Hack tail-recursion here. */
874 save_constants (&XEXP (x
, i
));
881 /* Note whether a parameter is modified or not. */
884 note_modified_parmregs (reg
, x
)
888 if (GET_CODE (reg
) == REG
&& in_nonparm_insns
889 && REGNO (reg
) < max_parm_reg
890 && REGNO (reg
) >= FIRST_PSEUDO_REGISTER
891 && parmdecl_map
[REGNO (reg
)] != 0)
892 TREE_READONLY (parmdecl_map
[REGNO (reg
)]) = 0;
895 /* Copy the rtx ORIG recursively, replacing pseudo-regs and labels
896 according to `reg_map' and `label_map'. The original rtl insns
897 will be saved for inlining; this is used to make a copy
898 which is used to finish compiling the inline function itself.
900 If we find a "saved" constant pool entry, one which was replaced with
901 the value of the constant, convert it back to a constant pool entry.
902 Since the pool wasn't touched, this should simply restore the old
905 All other kinds of rtx are copied except those that can never be
906 changed during compilation. */
909 copy_for_inline (orig
)
912 register rtx x
= orig
;
914 register enum rtx_code code
;
915 register char *format_ptr
;
922 /* These types may be freely shared. */
934 /* We have to make a new CONST_DOUBLE to ensure that we account for
935 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
936 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
)
940 REAL_VALUE_FROM_CONST_DOUBLE (d
, x
);
941 return CONST_DOUBLE_FROM_REAL_VALUE (d
, GET_MODE (x
));
944 return immed_double_const (CONST_DOUBLE_LOW (x
), CONST_DOUBLE_HIGH (x
),
948 /* Get constant pool entry for constant in the pool. */
949 if (RTX_INTEGRATED_P (x
))
950 return validize_mem (force_const_mem (GET_MODE (x
),
951 copy_for_inline (XEXP (x
, 0))));
955 /* Get constant pool entry, but access in different mode. */
956 if (RTX_INTEGRATED_P (x
))
959 = force_const_mem (GET_MODE (SUBREG_REG (x
)),
960 copy_for_inline (XEXP (SUBREG_REG (x
), 0)));
962 PUT_MODE (new, GET_MODE (x
));
963 return validize_mem (new);
968 /* If not special for constant pool error. Else get constant pool
970 if (! RTX_INTEGRATED_P (x
))
973 return XEXP (force_const_mem (GET_MODE (x
),
974 copy_for_inline (XEXP (x
, 0))), 0);
977 /* If a single asm insn contains multiple output operands
978 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
979 We must make sure that the copied insn continues to share it. */
980 if (orig_asm_operands_vector
== XVEC (orig
, 3))
982 x
= rtx_alloc (ASM_OPERANDS
);
983 x
->volatil
= orig
->volatil
;
984 XSTR (x
, 0) = XSTR (orig
, 0);
985 XSTR (x
, 1) = XSTR (orig
, 1);
986 XINT (x
, 2) = XINT (orig
, 2);
987 XVEC (x
, 3) = copy_asm_operands_vector
;
988 XVEC (x
, 4) = copy_asm_constraints_vector
;
989 XSTR (x
, 5) = XSTR (orig
, 5);
990 XINT (x
, 6) = XINT (orig
, 6);
996 /* A MEM is usually allowed to be shared if its address is constant
997 or is a constant plus one of the special registers.
999 We do not allow sharing of addresses that are either a special
1000 register or the sum of a constant and a special register because
1001 it is possible for unshare_all_rtl to copy the address, into memory
1002 that won't be saved. Although the MEM can safely be shared, and
1003 won't be copied there, the address itself cannot be shared, and may
1006 There are also two exceptions with constants: The first is if the
1007 constant is a LABEL_REF or the sum of the LABEL_REF
1008 and an integer. This case can happen if we have an inline
1009 function that supplies a constant operand to the call of another
1010 inline function that uses it in a switch statement. In this case,
1011 we will be replacing the LABEL_REF, so we have to replace this MEM
1014 The second case is if we have a (const (plus (address ..) ...)).
1015 In that case we need to put back the address of the constant pool
1018 if (CONSTANT_ADDRESS_P (XEXP (x
, 0))
1019 && GET_CODE (XEXP (x
, 0)) != LABEL_REF
1020 && ! (GET_CODE (XEXP (x
, 0)) == CONST
1021 && (GET_CODE (XEXP (XEXP (x
, 0), 0)) == PLUS
1022 && ((GET_CODE (XEXP (XEXP (XEXP (x
, 0), 0), 0))
1024 || (GET_CODE (XEXP (XEXP (XEXP (x
, 0), 0), 0))
1030 /* If this is a non-local label, just make a new LABEL_REF.
1031 Otherwise, use the new label as well. */
1032 x
= gen_rtx (LABEL_REF
, GET_MODE (orig
),
1033 LABEL_REF_NONLOCAL_P (orig
) ? XEXP (orig
, 0)
1034 : label_map
[CODE_LABEL_NUMBER (XEXP (orig
, 0))]);
1035 LABEL_REF_NONLOCAL_P (x
) = LABEL_REF_NONLOCAL_P (orig
);
1036 LABEL_OUTSIDE_LOOP_P (x
) = LABEL_OUTSIDE_LOOP_P (orig
);
1040 if (REGNO (x
) > LAST_VIRTUAL_REGISTER
)
1041 return reg_map
[REGNO (x
)];
1046 /* If a parm that gets modified lives in a pseudo-reg,
1047 clear its TREE_READONLY to prevent certain optimizations. */
1049 rtx dest
= SET_DEST (x
);
1051 while (GET_CODE (dest
) == STRICT_LOW_PART
1052 || GET_CODE (dest
) == ZERO_EXTRACT
1053 || GET_CODE (dest
) == SUBREG
)
1054 dest
= XEXP (dest
, 0);
1056 if (GET_CODE (dest
) == REG
1057 && REGNO (dest
) < max_parm_reg
1058 && REGNO (dest
) >= FIRST_PSEUDO_REGISTER
1059 && parmdecl_map
[REGNO (dest
)] != 0
1060 /* The insn to load an arg pseudo from a stack slot
1061 does not count as modifying it. */
1062 && in_nonparm_insns
)
1063 TREE_READONLY (parmdecl_map
[REGNO (dest
)]) = 0;
1067 #if 0 /* This is a good idea, but here is the wrong place for it. */
1068 /* Arrange that CONST_INTs always appear as the second operand
1069 if they appear, and that `frame_pointer_rtx' or `arg_pointer_rtx'
1070 always appear as the first. */
1072 if (GET_CODE (XEXP (x
, 0)) == CONST_INT
1073 || (XEXP (x
, 1) == frame_pointer_rtx
1074 || (ARG_POINTER_REGNUM
!= FRAME_POINTER_REGNUM
1075 && XEXP (x
, 1) == arg_pointer_rtx
)))
1077 rtx t
= XEXP (x
, 0);
1078 XEXP (x
, 0) = XEXP (x
, 1);
1085 /* Replace this rtx with a copy of itself. */
1087 x
= rtx_alloc (code
);
1088 bcopy ((char *) orig
, (char *) x
,
1089 (sizeof (*x
) - sizeof (x
->fld
)
1090 + sizeof (x
->fld
[0]) * GET_RTX_LENGTH (code
)));
1092 /* Now scan the subexpressions recursively.
1093 We can store any replaced subexpressions directly into X
1094 since we know X is not shared! Any vectors in X
1095 must be copied if X was copied. */
1097 format_ptr
= GET_RTX_FORMAT (code
);
1099 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
1101 switch (*format_ptr
++)
1104 XEXP (x
, i
) = copy_for_inline (XEXP (x
, i
));
1108 /* Change any references to old-insns to point to the
1109 corresponding copied insns. */
1110 XEXP (x
, i
) = insn_map
[INSN_UID (XEXP (x
, i
))];
1114 if (XVEC (x
, i
) != NULL
&& XVECLEN (x
, i
) != 0)
1118 XVEC (x
, i
) = gen_rtvec_v (XVECLEN (x
, i
), &XVECEXP (x
, i
, 0));
1119 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1121 = copy_for_inline (XVECEXP (x
, i
, j
));
1127 if (code
== ASM_OPERANDS
&& orig_asm_operands_vector
== 0)
1129 orig_asm_operands_vector
= XVEC (orig
, 3);
1130 copy_asm_operands_vector
= XVEC (x
, 3);
1131 copy_asm_constraints_vector
= XVEC (x
, 4);
1137 /* Unfortunately, we need a global copy of const_equiv map for communication
1138 with a function called from note_stores. Be *very* careful that this
1139 is used properly in the presence of recursion. */
1141 rtx
*global_const_equiv_map
;
1142 int global_const_equiv_map_size
;
1144 #define FIXED_BASE_PLUS_P(X) \
1145 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
1146 && GET_CODE (XEXP (X, 0)) == REG \
1147 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
1148 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
1150 /* Integrate the procedure defined by FNDECL. Note that this function
1151 may wind up calling itself. Since the static variables are not
1152 reentrant, we do not assign them until after the possibility
1153 of recursion is eliminated.
1155 If IGNORE is nonzero, do not produce a value.
1156 Otherwise store the value in TARGET if it is nonzero and that is convenient.
1159 (rtx)-1 if we could not substitute the function
1160 0 if we substituted it and it does not produce a value
1161 else an rtx for where the value is stored. */
1164 expand_inline_function (fndecl
, parms
, target
, ignore
, type
,
1165 structure_value_addr
)
1170 rtx structure_value_addr
;
1172 tree formal
, actual
, block
;
1173 rtx header
= DECL_SAVED_INSNS (fndecl
);
1174 rtx insns
= FIRST_FUNCTION_INSN (header
);
1175 rtx parm_insns
= FIRST_PARM_INSN (header
);
1181 int min_labelno
= FIRST_LABELNO (header
);
1182 int max_labelno
= LAST_LABELNO (header
);
1184 rtx local_return_label
= 0;
1188 struct inline_remap
*map
;
1190 rtvec arg_vector
= ORIGINAL_ARG_VECTOR (header
);
1191 rtx static_chain_value
= 0;
1193 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
1194 max_regno
= MAX_REGNUM (header
) + 3;
1195 if (max_regno
< FIRST_PSEUDO_REGISTER
)
1198 nargs
= list_length (DECL_ARGUMENTS (fndecl
));
1200 /* Check that the parms type match and that sufficient arguments were
1201 passed. Since the appropriate conversions or default promotions have
1202 already been applied, the machine modes should match exactly. */
1204 for (formal
= DECL_ARGUMENTS (fndecl
), actual
= parms
;
1206 formal
= TREE_CHAIN (formal
), actual
= TREE_CHAIN (actual
))
1209 enum machine_mode mode
;
1212 return (rtx
) (HOST_WIDE_INT
) -1;
1214 arg
= TREE_VALUE (actual
);
1215 mode
= TYPE_MODE (DECL_ARG_TYPE (formal
));
1217 if (mode
!= TYPE_MODE (TREE_TYPE (arg
))
1218 /* If they are block mode, the types should match exactly.
1219 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
1220 which could happen if the parameter has incomplete type. */
1221 || (mode
== BLKmode
&& TREE_TYPE (arg
) != TREE_TYPE (formal
)))
1222 return (rtx
) (HOST_WIDE_INT
) -1;
1225 /* Extra arguments are valid, but will be ignored below, so we must
1226 evaluate them here for side-effects. */
1227 for (; actual
; actual
= TREE_CHAIN (actual
))
1228 expand_expr (TREE_VALUE (actual
), const0_rtx
,
1229 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual
))), 0);
1231 /* Make a binding contour to keep inline cleanups called at
1232 outer function-scope level from looking like they are shadowing
1233 parameter declarations. */
1236 /* Make a fresh binding contour that we can easily remove. */
1238 expand_start_bindings (0);
1240 /* Expand the function arguments. Do this first so that any
1241 new registers get created before we allocate the maps. */
1243 arg_vals
= (rtx
*) alloca (nargs
* sizeof (rtx
));
1244 arg_trees
= (tree
*) alloca (nargs
* sizeof (tree
));
1246 for (formal
= DECL_ARGUMENTS (fndecl
), actual
= parms
, i
= 0;
1248 formal
= TREE_CHAIN (formal
), actual
= TREE_CHAIN (actual
), i
++)
1250 /* Actual parameter, converted to the type of the argument within the
1252 tree arg
= convert (TREE_TYPE (formal
), TREE_VALUE (actual
));
1253 /* Mode of the variable used within the function. */
1254 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (formal
));
1258 loc
= RTVEC_ELT (arg_vector
, i
);
1260 /* If this is an object passed by invisible reference, we copy the
1261 object into a stack slot and save its address. If this will go
1262 into memory, we do nothing now. Otherwise, we just expand the
1264 if (GET_CODE (loc
) == MEM
&& GET_CODE (XEXP (loc
, 0)) == REG
1265 && REGNO (XEXP (loc
, 0)) > LAST_VIRTUAL_REGISTER
)
1268 = assign_stack_temp (TYPE_MODE (TREE_TYPE (arg
)),
1269 int_size_in_bytes (TREE_TYPE (arg
)), 1);
1270 MEM_IN_STRUCT_P (stack_slot
) = AGGREGATE_TYPE_P (TREE_TYPE (arg
));
1272 store_expr (arg
, stack_slot
, 0);
1274 arg_vals
[i
] = XEXP (stack_slot
, 0);
1277 else if (GET_CODE (loc
) != MEM
)
1279 if (GET_MODE (loc
) != TYPE_MODE (TREE_TYPE (arg
)))
1280 /* The mode if LOC and ARG can differ if LOC was a variable
1281 that had its mode promoted via PROMOTED_MODE. */
1282 arg_vals
[i
] = convert_modes (GET_MODE (loc
),
1283 TYPE_MODE (TREE_TYPE (arg
)),
1284 expand_expr (arg
, NULL_RTX
, mode
,
1286 TREE_UNSIGNED (TREE_TYPE (formal
)));
1288 arg_vals
[i
] = expand_expr (arg
, NULL_RTX
, mode
, EXPAND_SUM
);
1293 if (arg_vals
[i
] != 0
1294 && (! TREE_READONLY (formal
)
1295 /* If the parameter is not read-only, copy our argument through
1296 a register. Also, we cannot use ARG_VALS[I] if it overlaps
1297 TARGET in any way. In the inline function, they will likely
1298 be two different pseudos, and `safe_from_p' will make all
1299 sorts of smart assumptions about their not conflicting.
1300 But if ARG_VALS[I] overlaps TARGET, these assumptions are
1301 wrong, so put ARG_VALS[I] into a fresh register.
1302 Don't worry about invisible references, since their stack
1303 temps will never overlap the target. */
1306 && (GET_CODE (arg_vals
[i
]) == REG
1307 || GET_CODE (arg_vals
[i
]) == SUBREG
1308 || GET_CODE (arg_vals
[i
]) == MEM
)
1309 && reg_overlap_mentioned_p (arg_vals
[i
], target
))
1310 /* ??? We must always copy a SUBREG into a REG, because it might
1311 get substituted into an address, and not all ports correctly
1312 handle SUBREGs in addresses. */
1313 || (GET_CODE (arg_vals
[i
]) == SUBREG
)))
1314 arg_vals
[i
] = copy_to_mode_reg (GET_MODE (loc
), arg_vals
[i
]);
1316 if (arg_vals
[i
] != 0 && GET_CODE (arg_vals
[i
]) == REG
1317 && TREE_CODE (TREE_TYPE (formal
)) == POINTER_TYPE
)
1318 mark_reg_pointer (arg_vals
[i
],
1319 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (formal
)))
1323 /* Allocate the structures we use to remap things. */
1325 map
= (struct inline_remap
*) alloca (sizeof (struct inline_remap
));
1326 map
->fndecl
= fndecl
;
1328 map
->reg_map
= (rtx
*) alloca (max_regno
* sizeof (rtx
));
1329 bzero ((char *) map
->reg_map
, max_regno
* sizeof (rtx
));
1331 map
->label_map
= (rtx
*)alloca ((max_labelno
- min_labelno
) * sizeof (rtx
));
1332 map
->label_map
-= min_labelno
;
1334 map
->insn_map
= (rtx
*) alloca (INSN_UID (header
) * sizeof (rtx
));
1335 bzero ((char *) map
->insn_map
, INSN_UID (header
) * sizeof (rtx
));
1336 map
->min_insnno
= 0;
1337 map
->max_insnno
= INSN_UID (header
);
1339 map
->integrating
= 1;
1341 /* const_equiv_map maps pseudos in our routine to constants, so it needs to
1342 be large enough for all our pseudos. This is the number we are currently
1343 using plus the number in the called routine, plus 15 for each arg,
1344 five to compute the virtual frame pointer, and five for the return value.
1345 This should be enough for most cases. We do not reference entries
1346 outside the range of the map.
1348 ??? These numbers are quite arbitrary and were obtained by
1349 experimentation. At some point, we should try to allocate the
1350 table after all the parameters are set up so we an more accurately
1351 estimate the number of pseudos we will need. */
1353 map
->const_equiv_map_size
1354 = max_reg_num () + (max_regno
- FIRST_PSEUDO_REGISTER
) + 15 * nargs
+ 10;
1356 map
->const_equiv_map
1357 = (rtx
*)alloca (map
->const_equiv_map_size
* sizeof (rtx
));
1358 bzero ((char *) map
->const_equiv_map
,
1359 map
->const_equiv_map_size
* sizeof (rtx
));
1362 = (unsigned *)alloca (map
->const_equiv_map_size
* sizeof (unsigned));
1363 bzero ((char *) map
->const_age_map
,
1364 map
->const_equiv_map_size
* sizeof (unsigned));
1367 /* Record the current insn in case we have to set up pointers to frame
1368 and argument memory blocks. */
1369 map
->insns_at_start
= get_last_insn ();
1371 map
->regno_pointer_flag
= INLINE_REGNO_POINTER_FLAG (header
);
1372 map
->regno_pointer_align
= INLINE_REGNO_POINTER_ALIGN (header
);
1374 /* Update the outgoing argument size to allow for those in the inlined
1376 if (OUTGOING_ARGS_SIZE (header
) > current_function_outgoing_args_size
)
1377 current_function_outgoing_args_size
= OUTGOING_ARGS_SIZE (header
);
1379 /* If the inline function needs to make PIC references, that means
1380 that this function's PIC offset table must be used. */
1381 if (FUNCTION_FLAGS (header
) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE
)
1382 current_function_uses_pic_offset_table
= 1;
1384 /* If this function needs a context, set it up. */
1385 if (FUNCTION_FLAGS (header
) & FUNCTION_FLAGS_NEEDS_CONTEXT
)
1386 static_chain_value
= lookup_static_chain (fndecl
);
1388 if (GET_CODE (parm_insns
) == NOTE
1389 && NOTE_LINE_NUMBER (parm_insns
) > 0)
1391 rtx note
= emit_note (NOTE_SOURCE_FILE (parm_insns
),
1392 NOTE_LINE_NUMBER (parm_insns
));
1394 RTX_INTEGRATED_P (note
) = 1;
1397 /* Process each argument. For each, set up things so that the function's
1398 reference to the argument will refer to the argument being passed.
1399 We only replace REG with REG here. Any simplifications are done
1400 via const_equiv_map.
1402 We make two passes: In the first, we deal with parameters that will
1403 be placed into registers, since we need to ensure that the allocated
1404 register number fits in const_equiv_map. Then we store all non-register
1405 parameters into their memory location. */
1407 /* Don't try to free temp stack slots here, because we may put one of the
1408 parameters into a temp stack slot. */
1410 for (i
= 0; i
< nargs
; i
++)
1412 rtx copy
= arg_vals
[i
];
1414 loc
= RTVEC_ELT (arg_vector
, i
);
1416 /* There are three cases, each handled separately. */
1417 if (GET_CODE (loc
) == MEM
&& GET_CODE (XEXP (loc
, 0)) == REG
1418 && REGNO (XEXP (loc
, 0)) > LAST_VIRTUAL_REGISTER
)
1420 /* This must be an object passed by invisible reference (it could
1421 also be a variable-sized object, but we forbid inlining functions
1422 with variable-sized arguments). COPY is the address of the
1423 actual value (this computation will cause it to be copied). We
1424 map that address for the register, noting the actual address as
1425 an equivalent in case it can be substituted into the insns. */
1427 if (GET_CODE (copy
) != REG
)
1429 temp
= copy_addr_to_reg (copy
);
1430 if ((CONSTANT_P (copy
) || FIXED_BASE_PLUS_P (copy
))
1431 && REGNO (temp
) < map
->const_equiv_map_size
)
1433 map
->const_equiv_map
[REGNO (temp
)] = copy
;
1434 map
->const_age_map
[REGNO (temp
)] = CONST_AGE_PARM
;
1438 map
->reg_map
[REGNO (XEXP (loc
, 0))] = copy
;
1440 else if (GET_CODE (loc
) == MEM
)
1442 /* This is the case of a parameter that lives in memory.
1443 It will live in the block we allocate in the called routine's
1444 frame that simulates the incoming argument area. Do nothing
1445 now; we will call store_expr later. */
1448 else if (GET_CODE (loc
) == REG
)
1450 /* This is the good case where the parameter is in a register.
1451 If it is read-only and our argument is a constant, set up the
1452 constant equivalence.
1454 If LOC is REG_USERVAR_P, the usual case, COPY must also have
1455 that flag set if it is a register.
1457 Also, don't allow hard registers here; they might not be valid
1458 when substituted into insns. */
1460 if ((GET_CODE (copy
) != REG
&& GET_CODE (copy
) != SUBREG
)
1461 || (GET_CODE (copy
) == REG
&& REG_USERVAR_P (loc
)
1462 && ! REG_USERVAR_P (copy
))
1463 || (GET_CODE (copy
) == REG
1464 && REGNO (copy
) < FIRST_PSEUDO_REGISTER
))
1466 temp
= copy_to_mode_reg (GET_MODE (loc
), copy
);
1467 REG_USERVAR_P (temp
) = REG_USERVAR_P (loc
);
1468 if ((CONSTANT_P (copy
) || FIXED_BASE_PLUS_P (copy
))
1469 && REGNO (temp
) < map
->const_equiv_map_size
)
1471 map
->const_equiv_map
[REGNO (temp
)] = copy
;
1472 map
->const_age_map
[REGNO (temp
)] = CONST_AGE_PARM
;
1476 map
->reg_map
[REGNO (loc
)] = copy
;
1478 else if (GET_CODE (loc
) == CONCAT
)
1480 /* This is the good case where the parameter is in a
1481 pair of separate pseudos.
1482 If it is read-only and our argument is a constant, set up the
1483 constant equivalence.
1485 If LOC is REG_USERVAR_P, the usual case, COPY must also have
1486 that flag set if it is a register.
1488 Also, don't allow hard registers here; they might not be valid
1489 when substituted into insns. */
1490 rtx locreal
= gen_realpart (GET_MODE (XEXP (loc
, 0)), loc
);
1491 rtx locimag
= gen_imagpart (GET_MODE (XEXP (loc
, 0)), loc
);
1492 rtx copyreal
= gen_realpart (GET_MODE (locreal
), copy
);
1493 rtx copyimag
= gen_imagpart (GET_MODE (locimag
), copy
);
1495 if ((GET_CODE (copyreal
) != REG
&& GET_CODE (copyreal
) != SUBREG
)
1496 || (GET_CODE (copyreal
) == REG
&& REG_USERVAR_P (locreal
)
1497 && ! REG_USERVAR_P (copyreal
))
1498 || (GET_CODE (copyreal
) == REG
1499 && REGNO (copyreal
) < FIRST_PSEUDO_REGISTER
))
1501 temp
= copy_to_mode_reg (GET_MODE (locreal
), copyreal
);
1502 REG_USERVAR_P (temp
) = REG_USERVAR_P (locreal
);
1503 if ((CONSTANT_P (copyreal
) || FIXED_BASE_PLUS_P (copyreal
))
1504 && REGNO (temp
) < map
->const_equiv_map_size
)
1506 map
->const_equiv_map
[REGNO (temp
)] = copyreal
;
1507 map
->const_age_map
[REGNO (temp
)] = CONST_AGE_PARM
;
1511 map
->reg_map
[REGNO (locreal
)] = copyreal
;
1513 if ((GET_CODE (copyimag
) != REG
&& GET_CODE (copyimag
) != SUBREG
)
1514 || (GET_CODE (copyimag
) == REG
&& REG_USERVAR_P (locimag
)
1515 && ! REG_USERVAR_P (copyimag
))
1516 || (GET_CODE (copyimag
) == REG
1517 && REGNO (copyimag
) < FIRST_PSEUDO_REGISTER
))
1519 temp
= copy_to_mode_reg (GET_MODE (locimag
), copyimag
);
1520 REG_USERVAR_P (temp
) = REG_USERVAR_P (locimag
);
1521 if ((CONSTANT_P (copyimag
) || FIXED_BASE_PLUS_P (copyimag
))
1522 && REGNO (temp
) < map
->const_equiv_map_size
)
1524 map
->const_equiv_map
[REGNO (temp
)] = copyimag
;
1525 map
->const_age_map
[REGNO (temp
)] = CONST_AGE_PARM
;
1529 map
->reg_map
[REGNO (locimag
)] = copyimag
;
1535 /* Now do the parameters that will be placed in memory. */
1537 for (formal
= DECL_ARGUMENTS (fndecl
), i
= 0;
1538 formal
; formal
= TREE_CHAIN (formal
), i
++)
1540 loc
= RTVEC_ELT (arg_vector
, i
);
1542 if (GET_CODE (loc
) == MEM
1543 /* Exclude case handled above. */
1544 && ! (GET_CODE (XEXP (loc
, 0)) == REG
1545 && REGNO (XEXP (loc
, 0)) > LAST_VIRTUAL_REGISTER
))
1547 rtx note
= emit_note (DECL_SOURCE_FILE (formal
),
1548 DECL_SOURCE_LINE (formal
));
1550 RTX_INTEGRATED_P (note
) = 1;
1552 /* Compute the address in the area we reserved and store the
1554 temp
= copy_rtx_and_substitute (loc
, map
);
1555 subst_constants (&temp
, NULL_RTX
, map
);
1556 apply_change_group ();
1557 if (! memory_address_p (GET_MODE (temp
), XEXP (temp
, 0)))
1558 temp
= change_address (temp
, VOIDmode
, XEXP (temp
, 0));
1559 store_expr (arg_trees
[i
], temp
, 0);
1563 /* Deal with the places that the function puts its result.
1564 We are driven by what is placed into DECL_RESULT.
1566 Initially, we assume that we don't have anything special handling for
1567 REG_FUNCTION_RETURN_VALUE_P. */
1569 map
->inline_target
= 0;
1570 loc
= DECL_RTL (DECL_RESULT (fndecl
));
1571 if (TYPE_MODE (type
) == VOIDmode
)
1572 /* There is no return value to worry about. */
1574 else if (GET_CODE (loc
) == MEM
)
1576 if (! structure_value_addr
|| ! aggregate_value_p (DECL_RESULT (fndecl
)))
1579 /* Pass the function the address in which to return a structure value.
1580 Note that a constructor can cause someone to call us with
1581 STRUCTURE_VALUE_ADDR, but the initialization takes place
1582 via the first parameter, rather than the struct return address.
1584 We have two cases: If the address is a simple register indirect,
1585 use the mapping mechanism to point that register to our structure
1586 return address. Otherwise, store the structure return value into
1587 the place that it will be referenced from. */
1589 if (GET_CODE (XEXP (loc
, 0)) == REG
)
1591 temp
= force_reg (Pmode
, structure_value_addr
);
1592 map
->reg_map
[REGNO (XEXP (loc
, 0))] = temp
;
1593 if ((CONSTANT_P (structure_value_addr
)
1594 || (GET_CODE (structure_value_addr
) == PLUS
1595 && XEXP (structure_value_addr
, 0) == virtual_stack_vars_rtx
1596 && GET_CODE (XEXP (structure_value_addr
, 1)) == CONST_INT
))
1597 && REGNO (temp
) < map
->const_equiv_map_size
)
1599 map
->const_equiv_map
[REGNO (temp
)] = structure_value_addr
;
1600 map
->const_age_map
[REGNO (temp
)] = CONST_AGE_PARM
;
1605 temp
= copy_rtx_and_substitute (loc
, map
);
1606 subst_constants (&temp
, NULL_RTX
, map
);
1607 apply_change_group ();
1608 emit_move_insn (temp
, structure_value_addr
);
1612 /* We will ignore the result value, so don't look at its structure.
1613 Note that preparations for an aggregate return value
1614 do need to be made (above) even if it will be ignored. */
1616 else if (GET_CODE (loc
) == REG
)
1618 /* The function returns an object in a register and we use the return
1619 value. Set up our target for remapping. */
1621 /* Machine mode function was declared to return. */
1622 enum machine_mode departing_mode
= TYPE_MODE (type
);
1623 /* (Possibly wider) machine mode it actually computes
1624 (for the sake of callers that fail to declare it right). */
1625 enum machine_mode arriving_mode
1626 = TYPE_MODE (TREE_TYPE (DECL_RESULT (fndecl
)));
1629 /* Don't use MEMs as direct targets because on some machines
1630 substituting a MEM for a REG makes invalid insns.
1631 Let the combiner substitute the MEM if that is valid. */
1632 if (target
== 0 || GET_CODE (target
) != REG
1633 || GET_MODE (target
) != departing_mode
)
1634 target
= gen_reg_rtx (departing_mode
);
1636 /* If function's value was promoted before return,
1637 avoid machine mode mismatch when we substitute INLINE_TARGET.
1638 But TARGET is what we will return to the caller. */
1639 if (arriving_mode
!= departing_mode
)
1640 reg_to_map
= gen_rtx (SUBREG
, arriving_mode
, target
, 0);
1642 reg_to_map
= target
;
1644 /* Usually, the result value is the machine's return register.
1645 Sometimes it may be a pseudo. Handle both cases. */
1646 if (REG_FUNCTION_VALUE_P (loc
))
1647 map
->inline_target
= reg_to_map
;
1649 map
->reg_map
[REGNO (loc
)] = reg_to_map
;
1652 /* Make new label equivalences for the labels in the called function. */
1653 for (i
= min_labelno
; i
< max_labelno
; i
++)
1654 map
->label_map
[i
] = gen_label_rtx ();
1656 /* Perform postincrements before actually calling the function. */
1659 /* Clean up stack so that variables might have smaller offsets. */
1660 do_pending_stack_adjust ();
1662 /* Save a copy of the location of const_equiv_map for mark_stores, called
1664 global_const_equiv_map
= map
->const_equiv_map
;
1665 global_const_equiv_map_size
= map
->const_equiv_map_size
;
1667 /* If the called function does an alloca, save and restore the
1668 stack pointer around the call. This saves stack space, but
1669 also is required if this inline is being done between two
1671 if (FUNCTION_FLAGS (header
) & FUNCTION_FLAGS_CALLS_ALLOCA
)
1672 emit_stack_save (SAVE_BLOCK
, &stack_save
, NULL_RTX
);
1674 /* Now copy the insns one by one. Do this in two passes, first the insns and
1675 then their REG_NOTES, just like save_for_inline. */
1677 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1679 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
1681 rtx copy
, pattern
, set
;
1683 map
->orig_asm_operands_vector
= 0;
1685 switch (GET_CODE (insn
))
1688 pattern
= PATTERN (insn
);
1689 set
= single_set (insn
);
1691 if (GET_CODE (pattern
) == USE
1692 && GET_CODE (XEXP (pattern
, 0)) == REG
1693 && REG_FUNCTION_VALUE_P (XEXP (pattern
, 0)))
1694 /* The (USE (REG n)) at return from the function should
1695 be ignored since we are changing (REG n) into
1699 /* Ignore setting a function value that we don't want to use. */
1700 if (map
->inline_target
== 0
1702 && GET_CODE (SET_DEST (set
)) == REG
1703 && REG_FUNCTION_VALUE_P (SET_DEST (set
)))
1705 if (volatile_refs_p (SET_SRC (set
)))
1709 /* If we must not delete the source,
1710 load it into a new temporary. */
1711 copy
= emit_insn (copy_rtx_and_substitute (pattern
, map
));
1713 new_set
= single_set (copy
);
1718 = gen_reg_rtx (GET_MODE (SET_DEST (new_set
)));
1720 /* If the source and destination are the same and it
1721 has a note on it, keep the insn. */
1722 else if (rtx_equal_p (SET_DEST (set
), SET_SRC (set
))
1723 && REG_NOTES (insn
) != 0)
1724 copy
= emit_insn (copy_rtx_and_substitute (pattern
, map
));
1729 /* If this is setting the static chain rtx, omit it. */
1730 else if (static_chain_value
!= 0
1732 && GET_CODE (SET_DEST (set
)) == REG
1733 && rtx_equal_p (SET_DEST (set
),
1734 static_chain_incoming_rtx
))
1737 /* If this is setting the static chain pseudo, set it from
1738 the value we want to give it instead. */
1739 else if (static_chain_value
!= 0
1741 && rtx_equal_p (SET_SRC (set
),
1742 static_chain_incoming_rtx
))
1744 rtx newdest
= copy_rtx_and_substitute (SET_DEST (set
), map
);
1746 copy
= emit_move_insn (newdest
, static_chain_value
);
1747 static_chain_value
= 0;
1750 copy
= emit_insn (copy_rtx_and_substitute (pattern
, map
));
1751 /* REG_NOTES will be copied later. */
1754 /* If this insn is setting CC0, it may need to look at
1755 the insn that uses CC0 to see what type of insn it is.
1756 In that case, the call to recog via validate_change will
1757 fail. So don't substitute constants here. Instead,
1758 do it when we emit the following insn.
1760 For example, see the pyr.md file. That machine has signed and
1761 unsigned compares. The compare patterns must check the
1762 following branch insn to see which what kind of compare to
1765 If the previous insn set CC0, substitute constants on it as
1767 if (sets_cc0_p (PATTERN (copy
)) != 0)
1772 try_constants (cc0_insn
, map
);
1774 try_constants (copy
, map
);
1777 try_constants (copy
, map
);
1782 if (GET_CODE (PATTERN (insn
)) == RETURN
)
1784 if (local_return_label
== 0)
1785 local_return_label
= gen_label_rtx ();
1786 pattern
= gen_jump (local_return_label
);
1789 pattern
= copy_rtx_and_substitute (PATTERN (insn
), map
);
1791 copy
= emit_jump_insn (pattern
);
1795 try_constants (cc0_insn
, map
);
1798 try_constants (copy
, map
);
1800 /* If this used to be a conditional jump insn but whose branch
1801 direction is now know, we must do something special. */
1802 if (condjump_p (insn
) && ! simplejump_p (insn
) && map
->last_pc_value
)
1805 /* The previous insn set cc0 for us. So delete it. */
1806 delete_insn (PREV_INSN (copy
));
1809 /* If this is now a no-op, delete it. */
1810 if (map
->last_pc_value
== pc_rtx
)
1816 /* Otherwise, this is unconditional jump so we must put a
1817 BARRIER after it. We could do some dead code elimination
1818 here, but jump.c will do it just as well. */
1824 pattern
= copy_rtx_and_substitute (PATTERN (insn
), map
);
1825 copy
= emit_call_insn (pattern
);
1827 /* Because the USAGE information potentially contains objects other
1828 than hard registers, we need to copy it. */
1829 CALL_INSN_FUNCTION_USAGE (copy
) =
1830 copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn
), map
);
1834 try_constants (cc0_insn
, map
);
1837 try_constants (copy
, map
);
1839 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1840 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1841 map
->const_equiv_map
[i
] = 0;
1845 copy
= emit_label (map
->label_map
[CODE_LABEL_NUMBER (insn
)]);
1846 LABEL_NAME (copy
) = LABEL_NAME (insn
);
1851 copy
= emit_barrier ();
1855 /* It is important to discard function-end and function-beg notes,
1856 so we have only one of each in the current function.
1857 Also, NOTE_INSN_DELETED notes aren't useful (save_for_inline
1858 deleted these in the copy used for continuing compilation,
1859 not the copy used for inlining). */
1860 if (NOTE_LINE_NUMBER (insn
) != NOTE_INSN_FUNCTION_END
1861 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_FUNCTION_BEG
1862 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_DELETED
)
1863 copy
= emit_note (NOTE_SOURCE_FILE (insn
), NOTE_LINE_NUMBER (insn
));
1874 RTX_INTEGRATED_P (copy
) = 1;
1876 map
->insn_map
[INSN_UID (insn
)] = copy
;
1879 /* Now copy the REG_NOTES. Increment const_age, so that only constants
1880 from parameters can be substituted in. These are the only ones that
1881 are valid across the entire function. */
1883 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
1884 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i'
1885 && map
->insn_map
[INSN_UID (insn
)]
1886 && REG_NOTES (insn
))
1888 rtx tem
= copy_rtx_and_substitute (REG_NOTES (insn
), map
);
1889 /* We must also do subst_constants, in case one of our parameters
1890 has const type and constant value. */
1891 subst_constants (&tem
, NULL_RTX
, map
);
1892 apply_change_group ();
1893 REG_NOTES (map
->insn_map
[INSN_UID (insn
)]) = tem
;
1896 if (local_return_label
)
1897 emit_label (local_return_label
);
1899 /* Restore the stack pointer if we saved it above. */
1900 if (FUNCTION_FLAGS (header
) & FUNCTION_FLAGS_CALLS_ALLOCA
)
1901 emit_stack_restore (SAVE_BLOCK
, stack_save
, NULL_RTX
);
1903 /* Make copies of the decls of the symbols in the inline function, so that
1904 the copies of the variables get declared in the current function. Set
1905 up things so that lookup_static_chain knows that to interpret registers
1906 in SAVE_EXPRs for TYPE_SIZEs as local. */
1908 inline_function_decl
= fndecl
;
1909 integrate_parm_decls (DECL_ARGUMENTS (fndecl
), map
, arg_vector
);
1910 integrate_decl_tree ((tree
) ORIGINAL_DECL_INITIAL (header
), 0, map
);
1911 inline_function_decl
= 0;
1913 /* End the scope containing the copied formal parameter variables
1914 and copied LABEL_DECLs. */
1916 expand_end_bindings (getdecls (), 1, 1);
1917 block
= poplevel (1, 1, 0);
1918 BLOCK_ABSTRACT_ORIGIN (block
) = (DECL_ABSTRACT_ORIGIN (fndecl
) == NULL
1919 ? fndecl
: DECL_ABSTRACT_ORIGIN (fndecl
));
1921 emit_line_note (input_filename
, lineno
);
1923 if (structure_value_addr
)
1925 target
= gen_rtx (MEM
, TYPE_MODE (type
),
1926 memory_address (TYPE_MODE (type
), structure_value_addr
));
1927 MEM_IN_STRUCT_P (target
) = 1;
1932 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
1933 push all of those decls and give each one the corresponding home. */
1936 integrate_parm_decls (args
, map
, arg_vector
)
1938 struct inline_remap
*map
;
1944 for (tail
= args
, i
= 0; tail
; tail
= TREE_CHAIN (tail
), i
++)
1946 register tree decl
= build_decl (VAR_DECL
, DECL_NAME (tail
),
1949 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector
, i
), map
);
1951 DECL_ARG_TYPE (decl
) = DECL_ARG_TYPE (tail
);
1952 /* We really should be setting DECL_INCOMING_RTL to something reasonable
1953 here, but that's going to require some more work. */
1954 /* DECL_INCOMING_RTL (decl) = ?; */
1955 /* These args would always appear unused, if not for this. */
1956 TREE_USED (decl
) = 1;
1957 /* Prevent warning for shadowing with these. */
1958 DECL_ABSTRACT_ORIGIN (decl
) = tail
;
1960 /* Fully instantiate the address with the equivalent form so that the
1961 debugging information contains the actual register, instead of the
1962 virtual register. Do this by not passing an insn to
1964 subst_constants (&new_decl_rtl
, NULL_RTX
, map
);
1965 apply_change_group ();
1966 DECL_RTL (decl
) = new_decl_rtl
;
1970 /* Given a BLOCK node LET, push decls and levels so as to construct in the
1971 current function a tree of contexts isomorphic to the one that is given.
1973 LEVEL indicates how far down into the BLOCK tree is the node we are
1974 currently traversing. It is always zero except for recursive calls.
1976 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
1977 registers used in the DECL_RTL field should be remapped. If it is zero,
1978 no mapping is necessary. */
1981 integrate_decl_tree (let
, level
, map
)
1984 struct inline_remap
*map
;
1991 for (t
= BLOCK_VARS (let
); t
; t
= TREE_CHAIN (t
))
1995 push_obstacks_nochange ();
1996 saveable_allocation ();
2000 if (DECL_RTL (t
) != 0)
2002 DECL_RTL (d
) = copy_rtx_and_substitute (DECL_RTL (t
), map
);
2003 /* Fully instantiate the address with the equivalent form so that the
2004 debugging information contains the actual register, instead of the
2005 virtual register. Do this by not passing an insn to
2007 subst_constants (&DECL_RTL (d
), NULL_RTX
, map
);
2008 apply_change_group ();
2010 /* These args would always appear unused, if not for this. */
2012 /* Prevent warning for shadowing with these. */
2013 DECL_ABSTRACT_ORIGIN (d
) = t
;
2015 if (DECL_LANG_SPECIFIC (d
))
2021 for (t
= BLOCK_SUBBLOCKS (let
); t
; t
= TREE_CHAIN (t
))
2022 integrate_decl_tree (t
, level
+ 1, map
);
2026 node
= poplevel (1, 0, 0);
2029 TREE_USED (node
) = TREE_USED (let
);
2030 BLOCK_ABSTRACT_ORIGIN (node
) = let
;
2035 /* Create a new copy of an rtx.
2036 Recursively copies the operands of the rtx,
2037 except for those few rtx codes that are sharable.
2039 We always return an rtx that is similar to that incoming rtx, with the
2040 exception of possibly changing a REG to a SUBREG or vice versa. No
2041 rtl is ever emitted.
2043 Handle constants that need to be placed in the constant pool by
2044 calling `force_const_mem'. */
2047 copy_rtx_and_substitute (orig
, map
)
2049 struct inline_remap
*map
;
2051 register rtx copy
, temp
;
2053 register RTX_CODE code
;
2054 register enum machine_mode mode
;
2055 register char *format_ptr
;
2061 code
= GET_CODE (orig
);
2062 mode
= GET_MODE (orig
);
2067 /* If the stack pointer register shows up, it must be part of
2068 stack-adjustments (*not* because we eliminated the frame pointer!).
2069 Small hard registers are returned as-is. Pseudo-registers
2070 go through their `reg_map'. */
2071 regno
= REGNO (orig
);
2072 if (regno
<= LAST_VIRTUAL_REGISTER
)
2074 /* Some hard registers are also mapped,
2075 but others are not translated. */
2076 if (map
->reg_map
[regno
] != 0)
2077 return map
->reg_map
[regno
];
2079 /* If this is the virtual frame pointer, make space in current
2080 function's stack frame for the stack frame of the inline function.
2082 Copy the address of this area into a pseudo. Map
2083 virtual_stack_vars_rtx to this pseudo and set up a constant
2084 equivalence for it to be the address. This will substitute the
2085 address into insns where it can be substituted and use the new
2086 pseudo where it can't. */
2087 if (regno
== VIRTUAL_STACK_VARS_REGNUM
)
2090 int size
= DECL_FRAME_SIZE (map
->fndecl
);
2094 loc
= assign_stack_temp (BLKmode
, size
, 1);
2095 loc
= XEXP (loc
, 0);
2096 #ifdef FRAME_GROWS_DOWNWARD
2097 /* In this case, virtual_stack_vars_rtx points to one byte
2098 higher than the top of the frame area. So compute the offset
2099 to one byte higher than our substitute frame.
2100 Keep the fake frame pointer aligned like a real one. */
2101 rounded
= CEIL_ROUND (size
, BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
);
2102 loc
= plus_constant (loc
, rounded
);
2104 map
->reg_map
[regno
] = temp
2105 = force_reg (Pmode
, force_operand (loc
, NULL_RTX
));
2107 #ifdef STACK_BOUNDARY
2108 mark_reg_pointer (map
->reg_map
[regno
],
2109 STACK_BOUNDARY
/ BITS_PER_UNIT
);
2112 if (REGNO (temp
) < map
->const_equiv_map_size
)
2114 map
->const_equiv_map
[REGNO (temp
)] = loc
;
2115 map
->const_age_map
[REGNO (temp
)] = CONST_AGE_PARM
;
2118 seq
= gen_sequence ();
2120 emit_insn_after (seq
, map
->insns_at_start
);
2123 else if (regno
== VIRTUAL_INCOMING_ARGS_REGNUM
)
2125 /* Do the same for a block to contain any arguments referenced
2128 int size
= FUNCTION_ARGS_SIZE (DECL_SAVED_INSNS (map
->fndecl
));
2131 loc
= assign_stack_temp (BLKmode
, size
, 1);
2132 loc
= XEXP (loc
, 0);
2133 /* When arguments grow downward, the virtual incoming
2134 args pointer points to the top of the argument block,
2135 so the remapped location better do the same. */
2136 #ifdef ARGS_GROW_DOWNWARD
2137 loc
= plus_constant (loc
, size
);
2139 map
->reg_map
[regno
] = temp
2140 = force_reg (Pmode
, force_operand (loc
, NULL_RTX
));
2142 #ifdef STACK_BOUNDARY
2143 mark_reg_pointer (map
->reg_map
[regno
],
2144 STACK_BOUNDARY
/ BITS_PER_UNIT
);
2147 if (REGNO (temp
) < map
->const_equiv_map_size
)
2149 map
->const_equiv_map
[REGNO (temp
)] = loc
;
2150 map
->const_age_map
[REGNO (temp
)] = CONST_AGE_PARM
;
2153 seq
= gen_sequence ();
2155 emit_insn_after (seq
, map
->insns_at_start
);
2158 else if (REG_FUNCTION_VALUE_P (orig
))
2160 /* This is a reference to the function return value. If
2161 the function doesn't have a return value, error. If the
2162 mode doesn't agree, make a SUBREG. */
2163 if (map
->inline_target
== 0)
2164 /* Must be unrolling loops or replicating code if we
2165 reach here, so return the register unchanged. */
2167 else if (mode
!= GET_MODE (map
->inline_target
))
2168 return gen_lowpart (mode
, map
->inline_target
);
2170 return map
->inline_target
;
2174 if (map
->reg_map
[regno
] == NULL
)
2176 map
->reg_map
[regno
] = gen_reg_rtx (mode
);
2177 REG_USERVAR_P (map
->reg_map
[regno
]) = REG_USERVAR_P (orig
);
2178 REG_LOOP_TEST_P (map
->reg_map
[regno
]) = REG_LOOP_TEST_P (orig
);
2179 RTX_UNCHANGING_P (map
->reg_map
[regno
]) = RTX_UNCHANGING_P (orig
);
2180 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2182 if (map
->regno_pointer_flag
[regno
])
2183 mark_reg_pointer (map
->reg_map
[regno
],
2184 map
->regno_pointer_align
[regno
]);
2186 return map
->reg_map
[regno
];
2189 copy
= copy_rtx_and_substitute (SUBREG_REG (orig
), map
);
2190 /* SUBREG is ordinary, but don't make nested SUBREGs. */
2191 if (GET_CODE (copy
) == SUBREG
)
2192 return gen_rtx (SUBREG
, GET_MODE (orig
), SUBREG_REG (copy
),
2193 SUBREG_WORD (orig
) + SUBREG_WORD (copy
));
2194 else if (GET_CODE (copy
) == CONCAT
)
2195 return (subreg_realpart_p (orig
) ? XEXP (copy
, 0) : XEXP (copy
, 1));
2197 return gen_rtx (SUBREG
, GET_MODE (orig
), copy
,
2198 SUBREG_WORD (orig
));
2202 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
2203 to (use foo) if the original insn didn't have a subreg.
2204 Removing the subreg distorts the VAX movstrhi pattern
2205 by changing the mode of an operand. */
2206 copy
= copy_rtx_and_substitute (XEXP (orig
, 0), map
);
2207 if (GET_CODE (copy
) == SUBREG
&& GET_CODE (XEXP (orig
, 0)) != SUBREG
)
2208 copy
= SUBREG_REG (copy
);
2209 return gen_rtx (code
, VOIDmode
, copy
);
2212 LABEL_PRESERVE_P (map
->label_map
[CODE_LABEL_NUMBER (orig
)])
2213 = LABEL_PRESERVE_P (orig
);
2214 return map
->label_map
[CODE_LABEL_NUMBER (orig
)];
2217 copy
= gen_rtx (LABEL_REF
, mode
,
2218 LABEL_REF_NONLOCAL_P (orig
) ? XEXP (orig
, 0)
2219 : map
->label_map
[CODE_LABEL_NUMBER (XEXP (orig
, 0))]);
2220 LABEL_OUTSIDE_LOOP_P (copy
) = LABEL_OUTSIDE_LOOP_P (orig
);
2222 /* The fact that this label was previously nonlocal does not mean
2223 it still is, so we must check if it is within the range of
2224 this function's labels. */
2225 LABEL_REF_NONLOCAL_P (copy
)
2226 = (LABEL_REF_NONLOCAL_P (orig
)
2227 && ! (CODE_LABEL_NUMBER (XEXP (copy
, 0)) >= get_first_label_num ()
2228 && CODE_LABEL_NUMBER (XEXP (copy
, 0)) < max_label_num ()));
2230 /* If we have made a nonlocal label local, it means that this
2231 inlined call will be referring to our nonlocal goto handler.
2232 So make sure we create one for this block; we normally would
2233 not since this is not otherwise considered a "call". */
2234 if (LABEL_REF_NONLOCAL_P (orig
) && ! LABEL_REF_NONLOCAL_P (copy
))
2235 function_call_count
++;
2245 /* Symbols which represent the address of a label stored in the constant
2246 pool must be modified to point to a constant pool entry for the
2247 remapped label. Otherwise, symbols are returned unchanged. */
2248 if (CONSTANT_POOL_ADDRESS_P (orig
))
2250 rtx constant
= get_pool_constant (orig
);
2251 if (GET_CODE (constant
) == LABEL_REF
)
2252 return XEXP (force_const_mem (Pmode
,
2253 copy_rtx_and_substitute (constant
,
2261 /* We have to make a new copy of this CONST_DOUBLE because don't want
2262 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
2263 duplicate of a CONST_DOUBLE we have already seen. */
2264 if (GET_MODE_CLASS (GET_MODE (orig
)) == MODE_FLOAT
)
2268 REAL_VALUE_FROM_CONST_DOUBLE (d
, orig
);
2269 return CONST_DOUBLE_FROM_REAL_VALUE (d
, GET_MODE (orig
));
2272 return immed_double_const (CONST_DOUBLE_LOW (orig
),
2273 CONST_DOUBLE_HIGH (orig
), VOIDmode
);
2276 /* Make new constant pool entry for a constant
2277 that was in the pool of the inline function. */
2278 if (RTX_INTEGRATED_P (orig
))
2280 /* If this was an address of a constant pool entry that itself
2281 had to be placed in the constant pool, it might not be a
2282 valid address. So the recursive call below might turn it
2283 into a register. In that case, it isn't a constant any
2284 more, so return it. This has the potential of changing a
2285 MEM into a REG, but we'll assume that it safe. */
2286 temp
= copy_rtx_and_substitute (XEXP (orig
, 0), map
);
2287 if (! CONSTANT_P (temp
))
2289 return validize_mem (force_const_mem (GET_MODE (orig
), temp
));
2294 /* If from constant pool address, make new constant pool entry and
2295 return its address. */
2296 if (! RTX_INTEGRATED_P (orig
))
2299 temp
= force_const_mem (GET_MODE (orig
),
2300 copy_rtx_and_substitute (XEXP (orig
, 0), map
));
2303 /* Legitimizing the address here is incorrect.
2305 The only ADDRESS rtx's that can reach here are ones created by
2306 save_constants. Hence the operand of the ADDRESS is always valid
2307 in this position of the instruction, since the original rtx without
2308 the ADDRESS was valid.
2310 The reason we don't legitimize the address here is that on the
2311 Sparc, the caller may have a (high ...) surrounding this ADDRESS.
2312 This code forces the operand of the address to a register, which
2313 fails because we can not take the HIGH part of a register.
2315 Also, change_address may create new registers. These registers
2316 will not have valid reg_map entries. This can cause try_constants()
2317 to fail because assumes that all registers in the rtx have valid
2318 reg_map entries, and it may end up replacing one of these new
2319 registers with junk. */
2321 if (! memory_address_p (GET_MODE (temp
), XEXP (temp
, 0)))
2322 temp
= change_address (temp
, GET_MODE (temp
), XEXP (temp
, 0));
2325 return XEXP (temp
, 0);
2328 /* If a single asm insn contains multiple output operands
2329 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
2330 We must make sure that the copied insn continues to share it. */
2331 if (map
->orig_asm_operands_vector
== XVEC (orig
, 3))
2333 copy
= rtx_alloc (ASM_OPERANDS
);
2334 copy
->volatil
= orig
->volatil
;
2335 XSTR (copy
, 0) = XSTR (orig
, 0);
2336 XSTR (copy
, 1) = XSTR (orig
, 1);
2337 XINT (copy
, 2) = XINT (orig
, 2);
2338 XVEC (copy
, 3) = map
->copy_asm_operands_vector
;
2339 XVEC (copy
, 4) = map
->copy_asm_constraints_vector
;
2340 XSTR (copy
, 5) = XSTR (orig
, 5);
2341 XINT (copy
, 6) = XINT (orig
, 6);
2347 /* This is given special treatment because the first
2348 operand of a CALL is a (MEM ...) which may get
2349 forced into a register for cse. This is undesirable
2350 if function-address cse isn't wanted or if we won't do cse. */
2351 #ifndef NO_FUNCTION_CSE
2352 if (! (optimize
&& ! flag_no_function_cse
))
2354 return gen_rtx (CALL
, GET_MODE (orig
),
2355 gen_rtx (MEM
, GET_MODE (XEXP (orig
, 0)),
2356 copy_rtx_and_substitute (XEXP (XEXP (orig
, 0), 0), map
)),
2357 copy_rtx_and_substitute (XEXP (orig
, 1), map
));
2361 /* Must be ifdefed out for loop unrolling to work. */
2367 /* If this is setting fp or ap, it means that we have a nonlocal goto.
2369 If the nonlocal goto is into the current function,
2370 this will result in unnecessarily bad code, but should work. */
2371 if (SET_DEST (orig
) == virtual_stack_vars_rtx
2372 || SET_DEST (orig
) == virtual_incoming_args_rtx
)
2373 return gen_rtx (SET
, VOIDmode
, SET_DEST (orig
),
2374 copy_rtx_and_substitute (SET_SRC (orig
), map
));
2378 copy
= rtx_alloc (MEM
);
2379 PUT_MODE (copy
, mode
);
2380 XEXP (copy
, 0) = copy_rtx_and_substitute (XEXP (orig
, 0), map
);
2381 MEM_IN_STRUCT_P (copy
) = MEM_IN_STRUCT_P (orig
);
2382 MEM_VOLATILE_P (copy
) = MEM_VOLATILE_P (orig
);
2384 /* If doing function inlining, this MEM might not be const in the
2385 function that it is being inlined into, and thus may not be
2386 unchanging after function inlining. Constant pool references are
2387 handled elsewhere, so this doesn't lose RTX_UNCHANGING_P bits
2389 if (! map
->integrating
)
2390 RTX_UNCHANGING_P (copy
) = RTX_UNCHANGING_P (orig
);
2395 copy
= rtx_alloc (code
);
2396 PUT_MODE (copy
, mode
);
2397 copy
->in_struct
= orig
->in_struct
;
2398 copy
->volatil
= orig
->volatil
;
2399 copy
->unchanging
= orig
->unchanging
;
2401 format_ptr
= GET_RTX_FORMAT (GET_CODE (copy
));
2403 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (copy
)); i
++)
2405 switch (*format_ptr
++)
2411 XEXP (copy
, i
) = copy_rtx_and_substitute (XEXP (orig
, i
), map
);
2415 /* Change any references to old-insns to point to the
2416 corresponding copied insns. */
2417 XEXP (copy
, i
) = map
->insn_map
[INSN_UID (XEXP (orig
, i
))];
2421 XVEC (copy
, i
) = XVEC (orig
, i
);
2422 if (XVEC (orig
, i
) != NULL
&& XVECLEN (orig
, i
) != 0)
2424 XVEC (copy
, i
) = rtvec_alloc (XVECLEN (orig
, i
));
2425 for (j
= 0; j
< XVECLEN (copy
, i
); j
++)
2426 XVECEXP (copy
, i
, j
)
2427 = copy_rtx_and_substitute (XVECEXP (orig
, i
, j
), map
);
2432 XWINT (copy
, i
) = XWINT (orig
, i
);
2436 XINT (copy
, i
) = XINT (orig
, i
);
2440 XSTR (copy
, i
) = XSTR (orig
, i
);
2448 if (code
== ASM_OPERANDS
&& map
->orig_asm_operands_vector
== 0)
2450 map
->orig_asm_operands_vector
= XVEC (orig
, 3);
2451 map
->copy_asm_operands_vector
= XVEC (copy
, 3);
2452 map
->copy_asm_constraints_vector
= XVEC (copy
, 4);
2458 /* Substitute known constant values into INSN, if that is valid. */
2461 try_constants (insn
, map
)
2463 struct inline_remap
*map
;
2468 subst_constants (&PATTERN (insn
), insn
, map
);
2470 /* Apply the changes if they are valid; otherwise discard them. */
2471 apply_change_group ();
2473 /* Show we don't know the value of anything stored or clobbered. */
2474 note_stores (PATTERN (insn
), mark_stores
);
2475 map
->last_pc_value
= 0;
2477 map
->last_cc0_value
= 0;
2480 /* Set up any constant equivalences made in this insn. */
2481 for (i
= 0; i
< map
->num_sets
; i
++)
2483 if (GET_CODE (map
->equiv_sets
[i
].dest
) == REG
)
2485 int regno
= REGNO (map
->equiv_sets
[i
].dest
);
2487 if (regno
< map
->const_equiv_map_size
2488 && (map
->const_equiv_map
[regno
] == 0
2489 /* Following clause is a hack to make case work where GNU C++
2490 reassigns a variable to make cse work right. */
2491 || ! rtx_equal_p (map
->const_equiv_map
[regno
],
2492 map
->equiv_sets
[i
].equiv
)))
2494 map
->const_equiv_map
[regno
] = map
->equiv_sets
[i
].equiv
;
2495 map
->const_age_map
[regno
] = map
->const_age
;
2498 else if (map
->equiv_sets
[i
].dest
== pc_rtx
)
2499 map
->last_pc_value
= map
->equiv_sets
[i
].equiv
;
2501 else if (map
->equiv_sets
[i
].dest
== cc0_rtx
)
2502 map
->last_cc0_value
= map
->equiv_sets
[i
].equiv
;
2507 /* Substitute known constants for pseudo regs in the contents of LOC,
2508 which are part of INSN.
2509 If INSN is zero, the substitution should always be done (this is used to
2511 These changes are taken out by try_constants if the result is not valid.
2513 Note that we are more concerned with determining when the result of a SET
2514 is a constant, for further propagation, than actually inserting constants
2515 into insns; cse will do the latter task better.
2517 This function is also used to adjust address of items previously addressed
2518 via the virtual stack variable or virtual incoming arguments registers. */
2521 subst_constants (loc
, insn
, map
)
2524 struct inline_remap
*map
;
2528 register enum rtx_code code
;
2529 register char *format_ptr
;
2530 int num_changes
= num_validated_changes ();
2532 enum machine_mode op0_mode
;
2534 code
= GET_CODE (x
);
2549 validate_change (insn
, loc
, map
->last_cc0_value
, 1);
2555 /* The only thing we can do with a USE or CLOBBER is possibly do
2556 some substitutions in a MEM within it. */
2557 if (GET_CODE (XEXP (x
, 0)) == MEM
)
2558 subst_constants (&XEXP (XEXP (x
, 0), 0), insn
, map
);
2562 /* Substitute for parms and known constants. Don't replace
2563 hard regs used as user variables with constants. */
2565 int regno
= REGNO (x
);
2567 if (! (regno
< FIRST_PSEUDO_REGISTER
&& REG_USERVAR_P (x
))
2568 && regno
< map
->const_equiv_map_size
2569 && map
->const_equiv_map
[regno
] != 0
2570 && map
->const_age_map
[regno
] >= map
->const_age
)
2571 validate_change (insn
, loc
, map
->const_equiv_map
[regno
], 1);
2576 /* SUBREG applied to something other than a reg
2577 should be treated as ordinary, since that must
2578 be a special hack and we don't know how to treat it specially.
2579 Consider for example mulsidi3 in m68k.md.
2580 Ordinary SUBREG of a REG needs this special treatment. */
2581 if (GET_CODE (SUBREG_REG (x
)) == REG
)
2583 rtx inner
= SUBREG_REG (x
);
2586 /* We can't call subst_constants on &SUBREG_REG (x) because any
2587 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2588 see what is inside, try to form the new SUBREG and see if that is
2589 valid. We handle two cases: extracting a full word in an
2590 integral mode and extracting the low part. */
2591 subst_constants (&inner
, NULL_RTX
, map
);
2593 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_INT
2594 && GET_MODE_SIZE (GET_MODE (x
)) == UNITS_PER_WORD
2595 && GET_MODE (SUBREG_REG (x
)) != VOIDmode
)
2596 new = operand_subword (inner
, SUBREG_WORD (x
), 0,
2597 GET_MODE (SUBREG_REG (x
)));
2599 if (new == 0 && subreg_lowpart_p (x
))
2600 new = gen_lowpart_common (GET_MODE (x
), inner
);
2603 validate_change (insn
, loc
, new, 1);
2610 subst_constants (&XEXP (x
, 0), insn
, map
);
2612 /* If a memory address got spoiled, change it back. */
2613 if (insn
!= 0 && num_validated_changes () != num_changes
2614 && !memory_address_p (GET_MODE (x
), XEXP (x
, 0)))
2615 cancel_changes (num_changes
);
2620 /* Substitute constants in our source, and in any arguments to a
2621 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2623 rtx
*dest_loc
= &SET_DEST (x
);
2624 rtx dest
= *dest_loc
;
2627 subst_constants (&SET_SRC (x
), insn
, map
);
2630 while (GET_CODE (*dest_loc
) == ZERO_EXTRACT
2631 || GET_CODE (*dest_loc
) == SUBREG
2632 || GET_CODE (*dest_loc
) == STRICT_LOW_PART
)
2634 if (GET_CODE (*dest_loc
) == ZERO_EXTRACT
)
2636 subst_constants (&XEXP (*dest_loc
, 1), insn
, map
);
2637 subst_constants (&XEXP (*dest_loc
, 2), insn
, map
);
2639 dest_loc
= &XEXP (*dest_loc
, 0);
2642 /* Do substitute in the address of a destination in memory. */
2643 if (GET_CODE (*dest_loc
) == MEM
)
2644 subst_constants (&XEXP (*dest_loc
, 0), insn
, map
);
2646 /* Check for the case of DEST a SUBREG, both it and the underlying
2647 register are less than one word, and the SUBREG has the wider mode.
2648 In the case, we are really setting the underlying register to the
2649 source converted to the mode of DEST. So indicate that. */
2650 if (GET_CODE (dest
) == SUBREG
2651 && GET_MODE_SIZE (GET_MODE (dest
)) <= UNITS_PER_WORD
2652 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
))) <= UNITS_PER_WORD
2653 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
)))
2654 <= GET_MODE_SIZE (GET_MODE (dest
)))
2655 && (tem
= gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest
)),
2657 src
= tem
, dest
= SUBREG_REG (dest
);
2659 /* If storing a recognizable value save it for later recording. */
2660 if ((map
->num_sets
< MAX_RECOG_OPERANDS
)
2661 && (CONSTANT_P (src
)
2662 || (GET_CODE (src
) == REG
2663 && (REGNO (src
) == VIRTUAL_INCOMING_ARGS_REGNUM
2664 || REGNO (src
) == VIRTUAL_STACK_VARS_REGNUM
))
2665 || (GET_CODE (src
) == PLUS
2666 && GET_CODE (XEXP (src
, 0)) == REG
2667 && (REGNO (XEXP (src
, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2668 || REGNO (XEXP (src
, 0)) == VIRTUAL_STACK_VARS_REGNUM
)
2669 && CONSTANT_P (XEXP (src
, 1)))
2670 || GET_CODE (src
) == COMPARE
2675 && (src
== pc_rtx
|| GET_CODE (src
) == RETURN
2676 || GET_CODE (src
) == LABEL_REF
))))
2678 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2679 it will cause us to save the COMPARE with any constants
2680 substituted, which is what we want for later. */
2681 map
->equiv_sets
[map
->num_sets
].equiv
= copy_rtx (src
);
2682 map
->equiv_sets
[map
->num_sets
++].dest
= dest
;
2689 format_ptr
= GET_RTX_FORMAT (code
);
2691 /* If the first operand is an expression, save its mode for later. */
2692 if (*format_ptr
== 'e')
2693 op0_mode
= GET_MODE (XEXP (x
, 0));
2695 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
2697 switch (*format_ptr
++)
2704 subst_constants (&XEXP (x
, i
), insn
, map
);
2714 if (XVEC (x
, i
) != NULL
&& XVECLEN (x
, i
) != 0)
2717 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2718 subst_constants (&XVECEXP (x
, i
, j
), insn
, map
);
2727 /* If this is a commutative operation, move a constant to the second
2728 operand unless the second operand is already a CONST_INT. */
2729 if ((GET_RTX_CLASS (code
) == 'c' || code
== NE
|| code
== EQ
)
2730 && CONSTANT_P (XEXP (x
, 0)) && GET_CODE (XEXP (x
, 1)) != CONST_INT
)
2732 rtx tem
= XEXP (x
, 0);
2733 validate_change (insn
, &XEXP (x
, 0), XEXP (x
, 1), 1);
2734 validate_change (insn
, &XEXP (x
, 1), tem
, 1);
2737 /* Simplify the expression in case we put in some constants. */
2738 switch (GET_RTX_CLASS (code
))
2741 new = simplify_unary_operation (code
, GET_MODE (x
),
2742 XEXP (x
, 0), op0_mode
);
2747 enum machine_mode op_mode
= GET_MODE (XEXP (x
, 0));
2748 if (op_mode
== VOIDmode
)
2749 op_mode
= GET_MODE (XEXP (x
, 1));
2750 new = simplify_relational_operation (code
, op_mode
,
2751 XEXP (x
, 0), XEXP (x
, 1));
2752 #ifdef FLOAT_STORE_FLAG_VALUE
2753 if (new != 0 && GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
)
2754 new = ((new == const0_rtx
) ? CONST0_RTX (GET_MODE (x
))
2755 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE
,
2763 new = simplify_binary_operation (code
, GET_MODE (x
),
2764 XEXP (x
, 0), XEXP (x
, 1));
2769 new = simplify_ternary_operation (code
, GET_MODE (x
), op0_mode
,
2770 XEXP (x
, 0), XEXP (x
, 1), XEXP (x
, 2));
2775 validate_change (insn
, loc
, new, 1);
2778 /* Show that register modified no longer contain known constants. We are
2779 called from note_stores with parts of the new insn. */
2782 mark_stores (dest
, x
)
2787 enum machine_mode mode
;
2789 /* DEST is always the innermost thing set, except in the case of
2790 SUBREGs of hard registers. */
2792 if (GET_CODE (dest
) == REG
)
2793 regno
= REGNO (dest
), mode
= GET_MODE (dest
);
2794 else if (GET_CODE (dest
) == SUBREG
&& GET_CODE (SUBREG_REG (dest
)) == REG
)
2796 regno
= REGNO (SUBREG_REG (dest
)) + SUBREG_WORD (dest
);
2797 mode
= GET_MODE (SUBREG_REG (dest
));
2802 int last_reg
= (regno
>= FIRST_PSEUDO_REGISTER
? regno
2803 : regno
+ HARD_REGNO_NREGS (regno
, mode
) - 1);
2806 for (i
= regno
; i
<= last_reg
; i
++)
2807 if (i
< global_const_equiv_map_size
)
2808 global_const_equiv_map
[i
] = 0;
2812 /* If any CONST expressions with RTX_INTEGRATED_P are present in the rtx
2813 pointed to by PX, they represent constants in the constant pool.
2814 Replace these with a new memory reference obtained from force_const_mem.
2815 Similarly, ADDRESS expressions with RTX_INTEGRATED_P represent the
2816 address of a constant pool entry. Replace them with the address of
2817 a new constant pool entry obtained from force_const_mem. */
2820 restore_constants (px
)
2830 if (GET_CODE (x
) == CONST_DOUBLE
)
2832 /* We have to make a new CONST_DOUBLE to ensure that we account for
2833 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
2834 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
)
2838 REAL_VALUE_FROM_CONST_DOUBLE (d
, x
);
2839 *px
= CONST_DOUBLE_FROM_REAL_VALUE (d
, GET_MODE (x
));
2842 *px
= immed_double_const (CONST_DOUBLE_LOW (x
), CONST_DOUBLE_HIGH (x
),
2846 else if (RTX_INTEGRATED_P (x
) && GET_CODE (x
) == CONST
)
2848 restore_constants (&XEXP (x
, 0));
2849 *px
= validize_mem (force_const_mem (GET_MODE (x
), XEXP (x
, 0)));
2851 else if (RTX_INTEGRATED_P (x
) && GET_CODE (x
) == SUBREG
)
2853 /* This must be (subreg/i:M1 (const/i:M2 ...) 0). */
2854 rtx
new = XEXP (SUBREG_REG (x
), 0);
2856 restore_constants (&new);
2857 new = force_const_mem (GET_MODE (SUBREG_REG (x
)), new);
2858 PUT_MODE (new, GET_MODE (x
));
2859 *px
= validize_mem (new);
2861 else if (RTX_INTEGRATED_P (x
) && GET_CODE (x
) == ADDRESS
)
2863 restore_constants (&XEXP (x
, 0));
2864 *px
= XEXP (force_const_mem (GET_MODE (x
), XEXP (x
, 0)), 0);
2868 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
2869 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (x
)); i
++)
2874 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2875 restore_constants (&XVECEXP (x
, i
, j
));
2879 restore_constants (&XEXP (x
, i
));
2886 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
2887 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
2888 that it points to the node itself, thus indicating that the node is its
2889 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
2890 the given node is NULL, recursively descend the decl/block tree which
2891 it is the root of, and for each other ..._DECL or BLOCK node contained
2892 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
2893 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
2894 values to point to themselves. */
2897 set_block_origin_self (stmt
)
2900 if (BLOCK_ABSTRACT_ORIGIN (stmt
) == NULL_TREE
)
2902 BLOCK_ABSTRACT_ORIGIN (stmt
) = stmt
;
2905 register tree local_decl
;
2907 for (local_decl
= BLOCK_VARS (stmt
);
2908 local_decl
!= NULL_TREE
;
2909 local_decl
= TREE_CHAIN (local_decl
))
2910 set_decl_origin_self (local_decl
); /* Potential recursion. */
2914 register tree subblock
;
2916 for (subblock
= BLOCK_SUBBLOCKS (stmt
);
2917 subblock
!= NULL_TREE
;
2918 subblock
= BLOCK_CHAIN (subblock
))
2919 set_block_origin_self (subblock
); /* Recurse. */
2924 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
2925 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
2926 node to so that it points to the node itself, thus indicating that the
2927 node represents its own (abstract) origin. Additionally, if the
2928 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
2929 the decl/block tree of which the given node is the root of, and for
2930 each other ..._DECL or BLOCK node contained therein whose
2931 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
2932 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
2933 point to themselves. */
2936 set_decl_origin_self (decl
)
2939 if (DECL_ABSTRACT_ORIGIN (decl
) == NULL_TREE
)
2941 DECL_ABSTRACT_ORIGIN (decl
) = decl
;
2942 if (TREE_CODE (decl
) == FUNCTION_DECL
)
2946 for (arg
= DECL_ARGUMENTS (decl
); arg
; arg
= TREE_CHAIN (arg
))
2947 DECL_ABSTRACT_ORIGIN (arg
) = arg
;
2948 if (DECL_INITIAL (decl
) != NULL_TREE
2949 && DECL_INITIAL (decl
) != error_mark_node
)
2950 set_block_origin_self (DECL_INITIAL (decl
));
2955 /* Given a pointer to some BLOCK node, and a boolean value to set the
2956 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
2957 the given block, and for all local decls and all local sub-blocks
2958 (recursively) which are contained therein. */
2961 set_block_abstract_flags (stmt
, setting
)
2963 register int setting
;
2965 register tree local_decl
;
2966 register tree subblock
;
2968 BLOCK_ABSTRACT (stmt
) = setting
;
2970 for (local_decl
= BLOCK_VARS (stmt
);
2971 local_decl
!= NULL_TREE
;
2972 local_decl
= TREE_CHAIN (local_decl
))
2973 set_decl_abstract_flags (local_decl
, setting
);
2975 for (subblock
= BLOCK_SUBBLOCKS (stmt
);
2976 subblock
!= NULL_TREE
;
2977 subblock
= BLOCK_CHAIN (subblock
))
2978 set_block_abstract_flags (subblock
, setting
);
2981 /* Given a pointer to some ..._DECL node, and a boolean value to set the
2982 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
2983 given decl, and (in the case where the decl is a FUNCTION_DECL) also
2984 set the abstract flags for all of the parameters, local vars, local
2985 blocks and sub-blocks (recursively) to the same setting. */
2988 set_decl_abstract_flags (decl
, setting
)
2990 register int setting
;
2992 DECL_ABSTRACT (decl
) = setting
;
2993 if (TREE_CODE (decl
) == FUNCTION_DECL
)
2997 for (arg
= DECL_ARGUMENTS (decl
); arg
; arg
= TREE_CHAIN (arg
))
2998 DECL_ABSTRACT (arg
) = setting
;
2999 if (DECL_INITIAL (decl
) != NULL_TREE
3000 && DECL_INITIAL (decl
) != error_mark_node
)
3001 set_block_abstract_flags (DECL_INITIAL (decl
), setting
);
3005 /* Output the assembly language code for the function FNDECL
3006 from its DECL_SAVED_INSNS. Used for inline functions that are output
3007 at end of compilation instead of where they came in the source. */
3010 output_inline_function (fndecl
)
3015 int save_flag_no_inline
= flag_no_inline
;
3017 if (output_bytecode
)
3019 warning ("`inline' ignored for bytecode output");
3023 /* Things we allocate from here on are part of this function, not
3025 temporary_allocation ();
3027 head
= DECL_SAVED_INSNS (fndecl
);
3028 current_function_decl
= fndecl
;
3030 /* This call is only used to initialize global variables. */
3031 init_function_start (fndecl
, "lossage", 1);
3033 /* Redo parameter determinations in case the FUNCTION_...
3034 macros took machine-specific actions that need to be redone. */
3035 assign_parms (fndecl
, 1);
3037 /* Set stack frame size. */
3038 assign_stack_local (BLKmode
, DECL_FRAME_SIZE (fndecl
), 0);
3040 /* The first is a bit of a lie (the array may be larger), but doesn't
3041 matter too much and it isn't worth saving the actual bound. */
3042 reg_rtx_no
= regno_pointer_flag_length
= MAX_REGNUM (head
);
3043 regno_reg_rtx
= (rtx
*) INLINE_REGNO_REG_RTX (head
);
3044 regno_pointer_flag
= INLINE_REGNO_POINTER_FLAG (head
);
3045 regno_pointer_align
= INLINE_REGNO_POINTER_ALIGN (head
);
3047 stack_slot_list
= STACK_SLOT_LIST (head
);
3048 forced_labels
= FORCED_LABELS (head
);
3050 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_CALLS_ALLOCA
)
3051 current_function_calls_alloca
= 1;
3053 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_CALLS_SETJMP
)
3054 current_function_calls_setjmp
= 1;
3056 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_CALLS_LONGJMP
)
3057 current_function_calls_longjmp
= 1;
3059 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_RETURNS_STRUCT
)
3060 current_function_returns_struct
= 1;
3062 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_RETURNS_PCC_STRUCT
)
3063 current_function_returns_pcc_struct
= 1;
3065 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_NEEDS_CONTEXT
)
3066 current_function_needs_context
= 1;
3068 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_HAS_NONLOCAL_LABEL
)
3069 current_function_has_nonlocal_label
= 1;
3071 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_RETURNS_POINTER
)
3072 current_function_returns_pointer
= 1;
3074 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_USES_CONST_POOL
)
3075 current_function_uses_const_pool
= 1;
3077 if (FUNCTION_FLAGS (head
) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE
)
3078 current_function_uses_pic_offset_table
= 1;
3080 current_function_outgoing_args_size
= OUTGOING_ARGS_SIZE (head
);
3081 current_function_pops_args
= POPS_ARGS (head
);
3083 /* This is the only thing the expand_function_end call that uses to be here
3084 actually does and that call can cause problems. */
3085 immediate_size_expand
--;
3087 /* Find last insn and rebuild the constant pool. */
3088 for (last
= FIRST_PARM_INSN (head
);
3089 NEXT_INSN (last
); last
= NEXT_INSN (last
))
3091 if (GET_RTX_CLASS (GET_CODE (last
)) == 'i')
3093 restore_constants (&PATTERN (last
));
3094 restore_constants (®_NOTES (last
));
3098 set_new_first_and_last_insn (FIRST_PARM_INSN (head
), last
);
3099 set_new_first_and_last_label_num (FIRST_LABELNO (head
), LAST_LABELNO (head
));
3101 /* We must have already output DWARF debugging information for the
3102 original (abstract) inline function declaration/definition, so
3103 we want to make sure that the debugging information we generate
3104 for this special instance of the inline function refers back to
3105 the information we already generated. To make sure that happens,
3106 we simply have to set the DECL_ABSTRACT_ORIGIN for the function
3107 node (and for all of the local ..._DECL nodes which are its children)
3108 so that they all point to themselves. */
3110 set_decl_origin_self (fndecl
);
3112 /* We're not deferring this any longer. */
3113 DECL_DEFER_OUTPUT (fndecl
) = 0;
3115 /* Integrating function calls isn't safe anymore, so turn on
3119 /* Compile this function all the way down to assembly code. */
3120 rest_of_compilation (fndecl
);
3122 /* Reset flag_no_inline to its original value. */
3123 flag_no_inline
= save_flag_no_inline
;
3125 current_function_decl
= 0;