Missed one in last change.
[official-gcc.git] / gcc / integrate.c
blob622c1abfedabf3f7d8d2bb4d9b57d6a25af2ddae
1 /* Procedure integration for GCC.
2 Copyright (C) 1988, 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4 Contributed by Michael Tiemann (tiemann@cygnus.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "tm_p.h"
31 #include "regs.h"
32 #include "flags.h"
33 #include "debug.h"
34 #include "insn-config.h"
35 #include "expr.h"
36 #include "output.h"
37 #include "recog.h"
38 #include "integrate.h"
39 #include "real.h"
40 #include "except.h"
41 #include "function.h"
42 #include "toplev.h"
43 #include "intl.h"
44 #include "loop.h"
45 #include "params.h"
46 #include "ggc.h"
47 #include "target.h"
48 #include "langhooks.h"
50 /* Similar, but round to the next highest integer that meets the
51 alignment. */
52 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
54 /* Default max number of insns a function can have and still be inline.
55 This is overridden on RISC machines. */
56 #ifndef INTEGRATE_THRESHOLD
57 /* Inlining small functions might save more space then not inlining at
58 all. Assume 1 instruction for the call and 1.5 insns per argument. */
59 #define INTEGRATE_THRESHOLD(DECL) \
60 (optimize_size \
61 ? (1 + (3 * list_length (DECL_ARGUMENTS (DECL))) / 2) \
62 : (8 * (8 + list_length (DECL_ARGUMENTS (DECL)))))
63 #endif
66 /* Private type used by {get/has}_func_hard_reg_initial_val. */
67 typedef struct initial_value_pair GTY(()) {
68 rtx hard_reg;
69 rtx pseudo;
70 } initial_value_pair;
71 typedef struct initial_value_struct GTY(()) {
72 int num_entries;
73 int max_entries;
74 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
75 } initial_value_struct;
77 static void setup_initial_hard_reg_value_integration PARAMS ((struct function *, struct inline_remap *));
79 static rtvec initialize_for_inline PARAMS ((tree));
80 static void note_modified_parmregs PARAMS ((rtx, rtx, void *));
81 static void integrate_parm_decls PARAMS ((tree, struct inline_remap *,
82 rtvec));
83 static tree integrate_decl_tree PARAMS ((tree,
84 struct inline_remap *));
85 static void subst_constants PARAMS ((rtx *, rtx,
86 struct inline_remap *, int));
87 static void set_block_origin_self PARAMS ((tree));
88 static void set_block_abstract_flags PARAMS ((tree, int));
89 static void process_reg_param PARAMS ((struct inline_remap *, rtx,
90 rtx));
91 static void mark_stores PARAMS ((rtx, rtx, void *));
92 static void save_parm_insns PARAMS ((rtx, rtx));
93 static void copy_insn_list PARAMS ((rtx, struct inline_remap *,
94 rtx));
95 static void copy_insn_notes PARAMS ((rtx, struct inline_remap *,
96 int));
97 static int compare_blocks PARAMS ((const void *, const void *));
98 static int find_block PARAMS ((const void *, const void *));
100 /* Used by copy_rtx_and_substitute; this indicates whether the function is
101 called for the purpose of inlining or some other purpose (i.e. loop
102 unrolling). This affects how constant pool references are handled.
103 This variable contains the FUNCTION_DECL for the inlined function. */
104 static struct function *inlining = 0;
106 /* Returns the Ith entry in the label_map contained in MAP. If the
107 Ith entry has not yet been set, return a fresh label. This function
108 performs a lazy initialization of label_map, thereby avoiding huge memory
109 explosions when the label_map gets very large. */
112 get_label_from_map (map, i)
113 struct inline_remap *map;
114 int i;
116 rtx x = map->label_map[i];
118 if (x == NULL_RTX)
119 x = map->label_map[i] = gen_label_rtx ();
121 return x;
124 /* Return false if the function FNDECL cannot be inlined on account of its
125 attributes, true otherwise. */
126 bool
127 function_attribute_inlinable_p (fndecl)
128 tree fndecl;
130 if (targetm.attribute_table)
132 tree a;
134 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
136 tree name = TREE_PURPOSE (a);
137 int i;
139 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
140 if (is_attribute_p (targetm.attribute_table[i].name, name))
141 return (*targetm.function_attribute_inlinable_p) (fndecl);
145 return true;
148 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
149 is safe and reasonable to integrate into other functions.
150 Nonzero means value is a warning msgid with a single %s
151 for the function's name. */
153 const char *
154 function_cannot_inline_p (fndecl)
155 tree fndecl;
157 rtx insn;
158 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
160 /* For functions marked as inline increase the maximum size to
161 MAX_INLINE_INSNS_RTL (--param max-inline-insn-rtl=<n>). For
162 regular functions use the limit given by INTEGRATE_THRESHOLD.
163 Note that the RTL inliner is not used by the languages that use
164 the tree inliner (C, C++). */
166 int max_insns = (DECL_INLINE (fndecl))
167 ? (MAX_INLINE_INSNS_RTL
168 + 8 * list_length (DECL_ARGUMENTS (fndecl)))
169 : INTEGRATE_THRESHOLD (fndecl);
171 int ninsns = 0;
172 tree parms;
174 if (DECL_UNINLINABLE (fndecl))
175 return N_("function cannot be inline");
177 /* No inlines with varargs. */
178 if (last && TREE_VALUE (last) != void_type_node)
179 return N_("varargs function cannot be inline");
181 if (current_function_calls_alloca)
182 return N_("function using alloca cannot be inline");
184 if (current_function_calls_setjmp)
185 return N_("function using setjmp cannot be inline");
187 if (current_function_calls_eh_return)
188 return N_("function uses __builtin_eh_return");
190 if (current_function_contains_functions)
191 return N_("function with nested functions cannot be inline");
193 if (forced_labels)
194 return
195 N_("function with label addresses used in initializers cannot inline");
197 if (current_function_cannot_inline)
198 return current_function_cannot_inline;
200 /* If its not even close, don't even look. */
201 if (get_max_uid () > 3 * max_insns)
202 return N_("function too large to be inline");
204 #if 0
205 /* Don't inline functions which do not specify a function prototype and
206 have BLKmode argument or take the address of a parameter. */
207 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
209 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
210 TREE_ADDRESSABLE (parms) = 1;
211 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
212 return N_("no prototype, and parameter address used; cannot be inline");
214 #endif
216 /* We can't inline functions that return structures
217 the old-fashioned PCC way, copying into a static block. */
218 if (current_function_returns_pcc_struct)
219 return N_("inline functions not supported for this return value type");
221 /* We can't inline functions that return structures of varying size. */
222 if (TREE_CODE (TREE_TYPE (TREE_TYPE (fndecl))) != VOID_TYPE
223 && int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
224 return N_("function with varying-size return value cannot be inline");
226 /* Cannot inline a function with a varying size argument or one that
227 receives a transparent union. */
228 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
230 if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
231 return N_("function with varying-size parameter cannot be inline");
232 else if (TREE_CODE (TREE_TYPE (parms)) == UNION_TYPE
233 && TYPE_TRANSPARENT_UNION (TREE_TYPE (parms)))
234 return N_("function with transparent unit parameter cannot be inline");
237 if (get_max_uid () > max_insns)
239 for (ninsns = 0, insn = get_first_nonparm_insn ();
240 insn && ninsns < max_insns;
241 insn = NEXT_INSN (insn))
242 if (INSN_P (insn))
243 ninsns++;
245 if (ninsns >= max_insns)
246 return N_("function too large to be inline");
249 /* We will not inline a function which uses computed goto. The addresses of
250 its local labels, which may be tucked into global storage, are of course
251 not constant across instantiations, which causes unexpected behavior. */
252 if (current_function_has_computed_jump)
253 return N_("function with computed jump cannot inline");
255 /* We cannot inline a nested function that jumps to a nonlocal label. */
256 if (current_function_has_nonlocal_goto)
257 return N_("function with nonlocal goto cannot be inline");
259 /* We can't inline functions that return a PARALLEL rtx. */
260 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
262 rtx result = DECL_RTL (DECL_RESULT (fndecl));
263 if (GET_CODE (result) == PARALLEL)
264 return N_("inline functions not supported for this return value type");
267 /* If the function has a target specific attribute attached to it,
268 then we assume that we should not inline it. This can be overridden
269 by the target if it defines TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P. */
270 if (!function_attribute_inlinable_p (fndecl))
271 return N_("function with target specific attribute(s) cannot be inlined");
273 return NULL;
276 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
277 Zero for a reg that isn't a parm's home.
278 Only reg numbers less than max_parm_reg are mapped here. */
279 static tree *parmdecl_map;
281 /* In save_for_inline, nonzero if past the parm-initialization insns. */
282 static int in_nonparm_insns;
284 /* Subroutine for `save_for_inline'. Performs initialization
285 needed to save FNDECL's insns and info for future inline expansion. */
287 static rtvec
288 initialize_for_inline (fndecl)
289 tree fndecl;
291 int i;
292 rtvec arg_vector;
293 tree parms;
295 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
296 memset ((char *) parmdecl_map, 0, max_parm_reg * sizeof (tree));
297 arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
299 for (parms = DECL_ARGUMENTS (fndecl), i = 0;
300 parms;
301 parms = TREE_CHAIN (parms), i++)
303 rtx p = DECL_RTL (parms);
305 /* If we have (mem (addressof (mem ...))), use the inner MEM since
306 otherwise the copy_rtx call below will not unshare the MEM since
307 it shares ADDRESSOF. */
308 if (GET_CODE (p) == MEM && GET_CODE (XEXP (p, 0)) == ADDRESSOF
309 && GET_CODE (XEXP (XEXP (p, 0), 0)) == MEM)
310 p = XEXP (XEXP (p, 0), 0);
312 RTVEC_ELT (arg_vector, i) = p;
314 if (GET_CODE (p) == REG)
315 parmdecl_map[REGNO (p)] = parms;
316 else if (GET_CODE (p) == CONCAT)
318 rtx preal = gen_realpart (GET_MODE (XEXP (p, 0)), p);
319 rtx pimag = gen_imagpart (GET_MODE (preal), p);
321 if (GET_CODE (preal) == REG)
322 parmdecl_map[REGNO (preal)] = parms;
323 if (GET_CODE (pimag) == REG)
324 parmdecl_map[REGNO (pimag)] = parms;
327 /* This flag is cleared later
328 if the function ever modifies the value of the parm. */
329 TREE_READONLY (parms) = 1;
332 return arg_vector;
335 /* Copy NODE (which must be a DECL, but not a PARM_DECL). The DECL
336 originally was in the FROM_FN, but now it will be in the
337 TO_FN. */
339 tree
340 copy_decl_for_inlining (decl, from_fn, to_fn)
341 tree decl;
342 tree from_fn;
343 tree to_fn;
345 tree copy;
347 /* Copy the declaration. */
348 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
350 tree type;
351 int invisiref = 0;
353 /* See if the frontend wants to pass this by invisible reference. */
354 if (TREE_CODE (decl) == PARM_DECL
355 && DECL_ARG_TYPE (decl) != TREE_TYPE (decl)
356 && POINTER_TYPE_P (DECL_ARG_TYPE (decl))
357 && TREE_TYPE (DECL_ARG_TYPE (decl)) == TREE_TYPE (decl))
359 invisiref = 1;
360 type = DECL_ARG_TYPE (decl);
362 else
363 type = TREE_TYPE (decl);
365 /* For a parameter, we must make an equivalent VAR_DECL, not a
366 new PARM_DECL. */
367 copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
368 if (!invisiref)
370 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
371 TREE_READONLY (copy) = TREE_READONLY (decl);
372 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
374 else
376 TREE_ADDRESSABLE (copy) = 0;
377 TREE_READONLY (copy) = 1;
378 TREE_THIS_VOLATILE (copy) = 0;
381 else
383 copy = copy_node (decl);
384 (*lang_hooks.dup_lang_specific_decl) (copy);
386 /* TREE_ADDRESSABLE isn't used to indicate that a label's
387 address has been taken; it's for internal bookkeeping in
388 expand_goto_internal. */
389 if (TREE_CODE (copy) == LABEL_DECL)
390 TREE_ADDRESSABLE (copy) = 0;
393 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
394 declaration inspired this copy. */
395 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
397 /* The new variable/label has no RTL, yet. */
398 if (!TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
399 SET_DECL_RTL (copy, NULL_RTX);
401 /* These args would always appear unused, if not for this. */
402 TREE_USED (copy) = 1;
404 /* Set the context for the new declaration. */
405 if (!DECL_CONTEXT (decl))
406 /* Globals stay global. */
408 else if (DECL_CONTEXT (decl) != from_fn)
409 /* Things that weren't in the scope of the function we're inlining
410 from aren't in the scope we're inlining to, either. */
412 else if (TREE_STATIC (decl))
413 /* Function-scoped static variables should stay in the original
414 function. */
416 else
417 /* Ordinary automatic local variables are now in the scope of the
418 new function. */
419 DECL_CONTEXT (copy) = to_fn;
421 return copy;
424 /* Make the insns and PARM_DECLs of the current function permanent
425 and record other information in DECL_SAVED_INSNS to allow inlining
426 of this function in subsequent calls.
428 This routine need not copy any insns because we are not going
429 to immediately compile the insns in the insn chain. There
430 are two cases when we would compile the insns for FNDECL:
431 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
432 be output at the end of other compilation, because somebody took
433 its address. In the first case, the insns of FNDECL are copied
434 as it is expanded inline, so FNDECL's saved insns are not
435 modified. In the second case, FNDECL is used for the last time,
436 so modifying the rtl is not a problem.
438 We don't have to worry about FNDECL being inline expanded by
439 other functions which are written at the end of compilation
440 because flag_no_inline is turned on when we begin writing
441 functions at the end of compilation. */
443 void
444 save_for_inline (fndecl)
445 tree fndecl;
447 rtx insn;
448 rtvec argvec;
449 rtx first_nonparm_insn;
451 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
452 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
453 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
454 for the parms, prior to elimination of virtual registers.
455 These values are needed for substituting parms properly. */
456 if (! flag_no_inline)
457 parmdecl_map = (tree *) xmalloc (max_parm_reg * sizeof (tree));
459 /* Make and emit a return-label if we have not already done so. */
461 if (return_label == 0)
463 return_label = gen_label_rtx ();
464 emit_label (return_label);
467 if (! flag_no_inline)
468 argvec = initialize_for_inline (fndecl);
469 else
470 argvec = NULL;
472 /* Delete basic block notes created by early run of find_basic_block.
473 The notes would be later used by find_basic_blocks to reuse the memory
474 for basic_block structures on already freed obstack. */
475 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
476 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK)
477 delete_related_insns (insn);
479 /* If there are insns that copy parms from the stack into pseudo registers,
480 those insns are not copied. `expand_inline_function' must
481 emit the correct code to handle such things. */
483 insn = get_insns ();
484 if (GET_CODE (insn) != NOTE)
485 abort ();
487 if (! flag_no_inline)
489 /* Get the insn which signals the end of parameter setup code. */
490 first_nonparm_insn = get_first_nonparm_insn ();
492 /* Now just scan the chain of insns to see what happens to our
493 PARM_DECLs. If a PARM_DECL is used but never modified, we
494 can substitute its rtl directly when expanding inline (and
495 perform constant folding when its incoming value is
496 constant). Otherwise, we have to copy its value into a new
497 register and track the new register's life. */
498 in_nonparm_insns = 0;
499 save_parm_insns (insn, first_nonparm_insn);
501 cfun->inl_max_label_num = max_label_num ();
502 cfun->inl_last_parm_insn = cfun->x_last_parm_insn;
503 cfun->original_arg_vector = argvec;
505 cfun->original_decl_initial = DECL_INITIAL (fndecl);
506 cfun->no_debugging_symbols = (write_symbols == NO_DEBUG);
507 DECL_SAVED_INSNS (fndecl) = cfun;
509 /* Clean up. */
510 if (! flag_no_inline)
511 free (parmdecl_map);
514 /* Scan the chain of insns to see what happens to our PARM_DECLs. If a
515 PARM_DECL is used but never modified, we can substitute its rtl directly
516 when expanding inline (and perform constant folding when its incoming
517 value is constant). Otherwise, we have to copy its value into a new
518 register and track the new register's life. */
520 static void
521 save_parm_insns (insn, first_nonparm_insn)
522 rtx insn;
523 rtx first_nonparm_insn;
525 if (insn == NULL_RTX)
526 return;
528 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
530 if (insn == first_nonparm_insn)
531 in_nonparm_insns = 1;
533 if (INSN_P (insn))
535 /* Record what interesting things happen to our parameters. */
536 note_stores (PATTERN (insn), note_modified_parmregs, NULL);
538 /* If this is a CALL_PLACEHOLDER insn then we need to look into the
539 three attached sequences: normal call, sibling call and tail
540 recursion. */
541 if (GET_CODE (insn) == CALL_INSN
542 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
544 int i;
546 for (i = 0; i < 3; i++)
547 save_parm_insns (XEXP (PATTERN (insn), i),
548 first_nonparm_insn);
554 /* Note whether a parameter is modified or not. */
556 static void
557 note_modified_parmregs (reg, x, data)
558 rtx reg;
559 rtx x ATTRIBUTE_UNUSED;
560 void *data ATTRIBUTE_UNUSED;
562 if (GET_CODE (reg) == REG && in_nonparm_insns
563 && REGNO (reg) < max_parm_reg
564 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
565 && parmdecl_map[REGNO (reg)] != 0)
566 TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
569 /* Unfortunately, we need a global copy of const_equiv map for communication
570 with a function called from note_stores. Be *very* careful that this
571 is used properly in the presence of recursion. */
573 varray_type global_const_equiv_varray;
575 #define FIXED_BASE_PLUS_P(X) \
576 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
577 && GET_CODE (XEXP (X, 0)) == REG \
578 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
579 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
581 /* Called to set up a mapping for the case where a parameter is in a
582 register. If it is read-only and our argument is a constant, set up the
583 constant equivalence.
585 If LOC is REG_USERVAR_P, the usual case, COPY must also have that flag set
586 if it is a register.
588 Also, don't allow hard registers here; they might not be valid when
589 substituted into insns. */
590 static void
591 process_reg_param (map, loc, copy)
592 struct inline_remap *map;
593 rtx loc, copy;
595 if ((GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
596 || (GET_CODE (copy) == REG && REG_USERVAR_P (loc)
597 && ! REG_USERVAR_P (copy))
598 || (GET_CODE (copy) == REG
599 && REGNO (copy) < FIRST_PSEUDO_REGISTER))
601 rtx temp = copy_to_mode_reg (GET_MODE (loc), copy);
602 REG_USERVAR_P (temp) = REG_USERVAR_P (loc);
603 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
604 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
605 copy = temp;
607 map->reg_map[REGNO (loc)] = copy;
610 /* Compare two BLOCKs for qsort. The key we sort on is the
611 BLOCK_ABSTRACT_ORIGIN of the blocks. We cannot just subtract the
612 two pointers, because it may overflow sizeof(int). */
614 static int
615 compare_blocks (v1, v2)
616 const void *v1;
617 const void *v2;
619 tree b1 = *((const tree *) v1);
620 tree b2 = *((const tree *) v2);
621 char *p1 = (char *) BLOCK_ABSTRACT_ORIGIN (b1);
622 char *p2 = (char *) BLOCK_ABSTRACT_ORIGIN (b2);
624 if (p1 == p2)
625 return 0;
626 return p1 < p2 ? -1 : 1;
629 /* Compare two BLOCKs for bsearch. The first pointer corresponds to
630 an original block; the second to a remapped equivalent. */
632 static int
633 find_block (v1, v2)
634 const void *v1;
635 const void *v2;
637 const union tree_node *b1 = (const union tree_node *) v1;
638 tree b2 = *((const tree *) v2);
639 char *p1 = (char *) b1;
640 char *p2 = (char *) BLOCK_ABSTRACT_ORIGIN (b2);
642 if (p1 == p2)
643 return 0;
644 return p1 < p2 ? -1 : 1;
647 /* Integrate the procedure defined by FNDECL. Note that this function
648 may wind up calling itself. Since the static variables are not
649 reentrant, we do not assign them until after the possibility
650 of recursion is eliminated.
652 If IGNORE is nonzero, do not produce a value.
653 Otherwise store the value in TARGET if it is nonzero and that is convenient.
655 Value is:
656 (rtx)-1 if we could not substitute the function
657 0 if we substituted it and it does not produce a value
658 else an rtx for where the value is stored. */
661 expand_inline_function (fndecl, parms, target, ignore, type,
662 structure_value_addr)
663 tree fndecl, parms;
664 rtx target;
665 int ignore;
666 tree type;
667 rtx structure_value_addr;
669 struct function *inlining_previous;
670 struct function *inl_f = DECL_SAVED_INSNS (fndecl);
671 tree formal, actual, block;
672 rtx parm_insns = inl_f->emit->x_first_insn;
673 rtx insns = (inl_f->inl_last_parm_insn
674 ? NEXT_INSN (inl_f->inl_last_parm_insn)
675 : parm_insns);
676 tree *arg_trees;
677 rtx *arg_vals;
678 int max_regno;
679 int i;
680 int min_labelno = inl_f->emit->x_first_label_num;
681 int max_labelno = inl_f->inl_max_label_num;
682 int nargs;
683 rtx loc;
684 rtx stack_save = 0;
685 rtx temp;
686 struct inline_remap *map = 0;
687 rtvec arg_vector = inl_f->original_arg_vector;
688 rtx static_chain_value = 0;
689 int inl_max_uid;
690 int eh_region_offset;
692 /* The pointer used to track the true location of the memory used
693 for MAP->LABEL_MAP. */
694 rtx *real_label_map = 0;
696 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
697 max_regno = inl_f->emit->x_reg_rtx_no + 3;
698 if (max_regno < FIRST_PSEUDO_REGISTER)
699 abort ();
701 /* Pull out the decl for the function definition; fndecl may be a
702 local declaration, which would break DECL_ABSTRACT_ORIGIN. */
703 fndecl = inl_f->decl;
705 nargs = list_length (DECL_ARGUMENTS (fndecl));
707 if (cfun->preferred_stack_boundary < inl_f->preferred_stack_boundary)
708 cfun->preferred_stack_boundary = inl_f->preferred_stack_boundary;
710 /* Check that the parms type match and that sufficient arguments were
711 passed. Since the appropriate conversions or default promotions have
712 already been applied, the machine modes should match exactly. */
714 for (formal = DECL_ARGUMENTS (fndecl), actual = parms;
715 formal;
716 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual))
718 tree arg;
719 enum machine_mode mode;
721 if (actual == 0)
722 return (rtx) (size_t) -1;
724 arg = TREE_VALUE (actual);
725 mode = TYPE_MODE (DECL_ARG_TYPE (formal));
727 if (arg == error_mark_node
728 || mode != TYPE_MODE (TREE_TYPE (arg))
729 /* If they are block mode, the types should match exactly.
730 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
731 which could happen if the parameter has incomplete type. */
732 || (mode == BLKmode
733 && (TYPE_MAIN_VARIANT (TREE_TYPE (arg))
734 != TYPE_MAIN_VARIANT (TREE_TYPE (formal)))))
735 return (rtx) (size_t) -1;
738 /* If there is a TARGET which is a readonly BLKmode MEM and DECL_RESULT
739 is also a mem, we are going to lose the readonly on the stores, so don't
740 inline. */
741 if (target != 0 && GET_CODE (target) == MEM && GET_MODE (target) == BLKmode
742 && RTX_UNCHANGING_P (target) && DECL_RTL_SET_P (DECL_RESULT (fndecl))
743 && GET_CODE (DECL_RTL (DECL_RESULT (fndecl))) == MEM)
744 return (rtx) (size_t) -1;
746 /* Extra arguments are valid, but will be ignored below, so we must
747 evaluate them here for side-effects. */
748 for (; actual; actual = TREE_CHAIN (actual))
749 expand_expr (TREE_VALUE (actual), const0_rtx,
750 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual))), 0);
752 /* Expand the function arguments. Do this first so that any
753 new registers get created before we allocate the maps. */
755 arg_vals = (rtx *) xmalloc (nargs * sizeof (rtx));
756 arg_trees = (tree *) xmalloc (nargs * sizeof (tree));
758 for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
759 formal;
760 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
762 /* Actual parameter, converted to the type of the argument within the
763 function. */
764 tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
765 /* Mode of the variable used within the function. */
766 enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
767 int invisiref = 0;
769 arg_trees[i] = arg;
770 loc = RTVEC_ELT (arg_vector, i);
772 /* If this is an object passed by invisible reference, we copy the
773 object into a stack slot and save its address. If this will go
774 into memory, we do nothing now. Otherwise, we just expand the
775 argument. */
776 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
777 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
779 rtx stack_slot = assign_temp (TREE_TYPE (arg), 1, 1, 1);
781 store_expr (arg, stack_slot, 0);
782 arg_vals[i] = XEXP (stack_slot, 0);
783 invisiref = 1;
785 else if (GET_CODE (loc) != MEM)
787 if (GET_MODE (loc) != TYPE_MODE (TREE_TYPE (arg)))
789 int unsignedp = TREE_UNSIGNED (TREE_TYPE (formal));
790 enum machine_mode pmode = TYPE_MODE (TREE_TYPE (formal));
792 pmode = promote_mode (TREE_TYPE (formal), pmode,
793 &unsignedp, 0);
795 if (GET_MODE (loc) != pmode)
796 abort ();
798 /* The mode if LOC and ARG can differ if LOC was a variable
799 that had its mode promoted via PROMOTED_MODE. */
800 arg_vals[i] = convert_modes (pmode,
801 TYPE_MODE (TREE_TYPE (arg)),
802 expand_expr (arg, NULL_RTX, mode,
803 EXPAND_SUM),
804 unsignedp);
806 else
807 arg_vals[i] = expand_expr (arg, NULL_RTX, mode, EXPAND_SUM);
809 else
810 arg_vals[i] = 0;
812 /* If the formal type was const but the actual was not, we might
813 end up here with an rtx wrongly tagged unchanging in the caller's
814 context. Fix that. */
815 if (arg_vals[i] != 0
816 && (GET_CODE (arg_vals[i]) == REG || GET_CODE (arg_vals[i]) == MEM)
817 && ! TREE_READONLY (TREE_VALUE (actual)))
818 RTX_UNCHANGING_P (arg_vals[i]) = 0;
820 if (arg_vals[i] != 0
821 && (! TREE_READONLY (formal)
822 /* If the parameter is not read-only, copy our argument through
823 a register. Also, we cannot use ARG_VALS[I] if it overlaps
824 TARGET in any way. In the inline function, they will likely
825 be two different pseudos, and `safe_from_p' will make all
826 sorts of smart assumptions about their not conflicting.
827 But if ARG_VALS[I] overlaps TARGET, these assumptions are
828 wrong, so put ARG_VALS[I] into a fresh register.
829 Don't worry about invisible references, since their stack
830 temps will never overlap the target. */
831 || (target != 0
832 && ! invisiref
833 && (GET_CODE (arg_vals[i]) == REG
834 || GET_CODE (arg_vals[i]) == SUBREG
835 || GET_CODE (arg_vals[i]) == MEM)
836 && reg_overlap_mentioned_p (arg_vals[i], target))
837 /* ??? We must always copy a SUBREG into a REG, because it might
838 get substituted into an address, and not all ports correctly
839 handle SUBREGs in addresses. */
840 || (GET_CODE (arg_vals[i]) == SUBREG)))
841 arg_vals[i] = copy_to_mode_reg (GET_MODE (loc), arg_vals[i]);
843 if (arg_vals[i] != 0 && GET_CODE (arg_vals[i]) == REG
844 && POINTER_TYPE_P (TREE_TYPE (formal)))
845 mark_reg_pointer (arg_vals[i],
846 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (formal))));
849 /* Allocate the structures we use to remap things. */
851 map = (struct inline_remap *) xcalloc (1, sizeof (struct inline_remap));
852 map->fndecl = fndecl;
854 VARRAY_TREE_INIT (map->block_map, 10, "block_map");
855 map->reg_map = (rtx *) xcalloc (max_regno, sizeof (rtx));
857 /* We used to use alloca here, but the size of what it would try to
858 allocate would occasionally cause it to exceed the stack limit and
859 cause unpredictable core dumps. */
860 real_label_map
861 = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
862 map->label_map = real_label_map;
863 map->local_return_label = NULL_RTX;
865 inl_max_uid = (inl_f->emit->x_cur_insn_uid + 1);
866 map->insn_map = (rtx *) xcalloc (inl_max_uid, sizeof (rtx));
867 map->min_insnno = 0;
868 map->max_insnno = inl_max_uid;
870 map->integrating = 1;
871 map->compare_src = NULL_RTX;
872 map->compare_mode = VOIDmode;
874 /* const_equiv_varray maps pseudos in our routine to constants, so
875 it needs to be large enough for all our pseudos. This is the
876 number we are currently using plus the number in the called
877 routine, plus 15 for each arg, five to compute the virtual frame
878 pointer, and five for the return value. This should be enough
879 for most cases. We do not reference entries outside the range of
880 the map.
882 ??? These numbers are quite arbitrary and were obtained by
883 experimentation. At some point, we should try to allocate the
884 table after all the parameters are set up so we can more accurately
885 estimate the number of pseudos we will need. */
887 VARRAY_CONST_EQUIV_INIT (map->const_equiv_varray,
888 (max_reg_num ()
889 + (max_regno - FIRST_PSEUDO_REGISTER)
890 + 15 * nargs
891 + 10),
892 "expand_inline_function");
893 map->const_age = 0;
895 /* Record the current insn in case we have to set up pointers to frame
896 and argument memory blocks. If there are no insns yet, add a dummy
897 insn that can be used as an insertion point. */
898 map->insns_at_start = get_last_insn ();
899 if (map->insns_at_start == 0)
900 map->insns_at_start = emit_note (NOTE_INSN_DELETED);
902 map->regno_pointer_align = inl_f->emit->regno_pointer_align;
903 map->x_regno_reg_rtx = inl_f->emit->x_regno_reg_rtx;
905 /* Update the outgoing argument size to allow for those in the inlined
906 function. */
907 if (inl_f->outgoing_args_size > current_function_outgoing_args_size)
908 current_function_outgoing_args_size = inl_f->outgoing_args_size;
910 /* If the inline function needs to make PIC references, that means
911 that this function's PIC offset table must be used. */
912 if (inl_f->uses_pic_offset_table)
913 current_function_uses_pic_offset_table = 1;
915 /* If this function needs a context, set it up. */
916 if (inl_f->needs_context)
917 static_chain_value = lookup_static_chain (fndecl);
919 /* If the inlined function calls __builtin_constant_p, then we'll
920 need to call purge_builtin_constant_p on this function. */
921 if (inl_f->calls_constant_p)
922 current_function_calls_constant_p = 1;
924 if (GET_CODE (parm_insns) == NOTE
925 && NOTE_LINE_NUMBER (parm_insns) > 0)
927 rtx note = emit_note_copy (parm_insns);
929 if (note)
930 RTX_INTEGRATED_P (note) = 1;
933 /* Process each argument. For each, set up things so that the function's
934 reference to the argument will refer to the argument being passed.
935 We only replace REG with REG here. Any simplifications are done
936 via const_equiv_map.
938 We make two passes: In the first, we deal with parameters that will
939 be placed into registers, since we need to ensure that the allocated
940 register number fits in const_equiv_map. Then we store all non-register
941 parameters into their memory location. */
943 /* Don't try to free temp stack slots here, because we may put one of the
944 parameters into a temp stack slot. */
946 for (i = 0; i < nargs; i++)
948 rtx copy = arg_vals[i];
950 loc = RTVEC_ELT (arg_vector, i);
952 /* There are three cases, each handled separately. */
953 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
954 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
956 /* This must be an object passed by invisible reference (it could
957 also be a variable-sized object, but we forbid inlining functions
958 with variable-sized arguments). COPY is the address of the
959 actual value (this computation will cause it to be copied). We
960 map that address for the register, noting the actual address as
961 an equivalent in case it can be substituted into the insns. */
963 if (GET_CODE (copy) != REG)
965 temp = copy_addr_to_reg (copy);
966 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
967 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
968 copy = temp;
970 map->reg_map[REGNO (XEXP (loc, 0))] = copy;
972 else if (GET_CODE (loc) == MEM)
974 /* This is the case of a parameter that lives in memory. It
975 will live in the block we allocate in the called routine's
976 frame that simulates the incoming argument area. Do nothing
977 with the parameter now; we will call store_expr later. In
978 this case, however, we must ensure that the virtual stack and
979 incoming arg rtx values are expanded now so that we can be
980 sure we have enough slots in the const equiv map since the
981 store_expr call can easily blow the size estimate. */
982 if (DECL_SAVED_INSNS (fndecl)->args_size != 0)
983 copy_rtx_and_substitute (virtual_incoming_args_rtx, map, 0);
985 else if (GET_CODE (loc) == REG)
986 process_reg_param (map, loc, copy);
987 else if (GET_CODE (loc) == CONCAT)
989 rtx locreal = gen_realpart (GET_MODE (XEXP (loc, 0)), loc);
990 rtx locimag = gen_imagpart (GET_MODE (XEXP (loc, 0)), loc);
991 rtx copyreal = gen_realpart (GET_MODE (locreal), copy);
992 rtx copyimag = gen_imagpart (GET_MODE (locimag), copy);
994 process_reg_param (map, locreal, copyreal);
995 process_reg_param (map, locimag, copyimag);
997 else
998 abort ();
1001 /* Tell copy_rtx_and_substitute to handle constant pool SYMBOL_REFs
1002 specially. This function can be called recursively, so we need to
1003 save the previous value. */
1004 inlining_previous = inlining;
1005 inlining = inl_f;
1007 /* Now do the parameters that will be placed in memory. */
1009 for (formal = DECL_ARGUMENTS (fndecl), i = 0;
1010 formal; formal = TREE_CHAIN (formal), i++)
1012 loc = RTVEC_ELT (arg_vector, i);
1014 if (GET_CODE (loc) == MEM
1015 /* Exclude case handled above. */
1016 && ! (GET_CODE (XEXP (loc, 0)) == REG
1017 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
1019 rtx note = emit_line_note (DECL_SOURCE_FILE (formal),
1020 DECL_SOURCE_LINE (formal));
1021 if (note)
1022 RTX_INTEGRATED_P (note) = 1;
1024 /* Compute the address in the area we reserved and store the
1025 value there. */
1026 temp = copy_rtx_and_substitute (loc, map, 1);
1027 subst_constants (&temp, NULL_RTX, map, 1);
1028 apply_change_group ();
1029 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
1030 temp = change_address (temp, VOIDmode, XEXP (temp, 0));
1031 store_expr (arg_trees[i], temp, 0);
1035 /* Deal with the places that the function puts its result.
1036 We are driven by what is placed into DECL_RESULT.
1038 Initially, we assume that we don't have anything special handling for
1039 REG_FUNCTION_RETURN_VALUE_P. */
1041 map->inline_target = 0;
1042 loc = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
1043 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
1045 if (TYPE_MODE (type) == VOIDmode)
1046 /* There is no return value to worry about. */
1048 else if (GET_CODE (loc) == MEM)
1050 if (GET_CODE (XEXP (loc, 0)) == ADDRESSOF)
1052 temp = copy_rtx_and_substitute (loc, map, 1);
1053 subst_constants (&temp, NULL_RTX, map, 1);
1054 apply_change_group ();
1055 target = temp;
1057 else
1059 if (! structure_value_addr
1060 || ! aggregate_value_p (DECL_RESULT (fndecl)))
1061 abort ();
1063 /* Pass the function the address in which to return a structure
1064 value. Note that a constructor can cause someone to call us
1065 with STRUCTURE_VALUE_ADDR, but the initialization takes place
1066 via the first parameter, rather than the struct return address.
1068 We have two cases: If the address is a simple register
1069 indirect, use the mapping mechanism to point that register to
1070 our structure return address. Otherwise, store the structure
1071 return value into the place that it will be referenced from. */
1073 if (GET_CODE (XEXP (loc, 0)) == REG)
1075 temp = force_operand (structure_value_addr, NULL_RTX);
1076 temp = force_reg (Pmode, temp);
1077 /* A virtual register might be invalid in an insn, because
1078 it can cause trouble in reload. Since we don't have access
1079 to the expanders at map translation time, make sure we have
1080 a proper register now.
1081 If a virtual register is actually valid, cse or combine
1082 can put it into the mapped insns. */
1083 if (REGNO (temp) >= FIRST_VIRTUAL_REGISTER
1084 && REGNO (temp) <= LAST_VIRTUAL_REGISTER)
1085 temp = copy_to_mode_reg (Pmode, temp);
1086 map->reg_map[REGNO (XEXP (loc, 0))] = temp;
1088 if (CONSTANT_P (structure_value_addr)
1089 || GET_CODE (structure_value_addr) == ADDRESSOF
1090 || (GET_CODE (structure_value_addr) == PLUS
1091 && (XEXP (structure_value_addr, 0)
1092 == virtual_stack_vars_rtx)
1093 && (GET_CODE (XEXP (structure_value_addr, 1))
1094 == CONST_INT)))
1096 SET_CONST_EQUIV_DATA (map, temp, structure_value_addr,
1097 CONST_AGE_PARM);
1100 else
1102 temp = copy_rtx_and_substitute (loc, map, 1);
1103 subst_constants (&temp, NULL_RTX, map, 0);
1104 apply_change_group ();
1105 emit_move_insn (temp, structure_value_addr);
1109 else if (ignore)
1110 /* We will ignore the result value, so don't look at its structure.
1111 Note that preparations for an aggregate return value
1112 do need to be made (above) even if it will be ignored. */
1114 else if (GET_CODE (loc) == REG)
1116 /* The function returns an object in a register and we use the return
1117 value. Set up our target for remapping. */
1119 /* Machine mode function was declared to return. */
1120 enum machine_mode departing_mode = TYPE_MODE (type);
1121 /* (Possibly wider) machine mode it actually computes
1122 (for the sake of callers that fail to declare it right).
1123 We have to use the mode of the result's RTL, rather than
1124 its type, since expand_function_start may have promoted it. */
1125 enum machine_mode arriving_mode
1126 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1127 rtx reg_to_map;
1129 /* Don't use MEMs as direct targets because on some machines
1130 substituting a MEM for a REG makes invalid insns.
1131 Let the combiner substitute the MEM if that is valid. */
1132 if (target == 0 || GET_CODE (target) != REG
1133 || GET_MODE (target) != departing_mode)
1135 /* Don't make BLKmode registers. If this looks like
1136 a BLKmode object being returned in a register, get
1137 the mode from that, otherwise abort. */
1138 if (departing_mode == BLKmode)
1140 if (REG == GET_CODE (DECL_RTL (DECL_RESULT (fndecl))))
1142 departing_mode = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1143 arriving_mode = departing_mode;
1145 else
1146 abort ();
1149 target = gen_reg_rtx (departing_mode);
1152 /* If function's value was promoted before return,
1153 avoid machine mode mismatch when we substitute INLINE_TARGET.
1154 But TARGET is what we will return to the caller. */
1155 if (arriving_mode != departing_mode)
1157 /* Avoid creating a paradoxical subreg wider than
1158 BITS_PER_WORD, since that is illegal. */
1159 if (GET_MODE_BITSIZE (arriving_mode) > BITS_PER_WORD)
1161 if (!TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (departing_mode),
1162 GET_MODE_BITSIZE (arriving_mode)))
1163 /* Maybe could be handled by using convert_move () ? */
1164 abort ();
1165 reg_to_map = gen_reg_rtx (arriving_mode);
1166 target = gen_lowpart (departing_mode, reg_to_map);
1168 else
1169 reg_to_map = gen_rtx_SUBREG (arriving_mode, target, 0);
1171 else
1172 reg_to_map = target;
1174 /* Usually, the result value is the machine's return register.
1175 Sometimes it may be a pseudo. Handle both cases. */
1176 if (REG_FUNCTION_VALUE_P (loc))
1177 map->inline_target = reg_to_map;
1178 else
1179 map->reg_map[REGNO (loc)] = reg_to_map;
1181 else if (GET_CODE (loc) == CONCAT)
1183 enum machine_mode departing_mode = TYPE_MODE (type);
1184 enum machine_mode arriving_mode
1185 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1187 if (departing_mode != arriving_mode)
1188 abort ();
1189 if (GET_CODE (XEXP (loc, 0)) != REG
1190 || GET_CODE (XEXP (loc, 1)) != REG)
1191 abort ();
1193 /* Don't use MEMs as direct targets because on some machines
1194 substituting a MEM for a REG makes invalid insns.
1195 Let the combiner substitute the MEM if that is valid. */
1196 if (target == 0 || GET_CODE (target) != REG
1197 || GET_MODE (target) != departing_mode)
1198 target = gen_reg_rtx (departing_mode);
1200 if (GET_CODE (target) != CONCAT)
1201 abort ();
1203 map->reg_map[REGNO (XEXP (loc, 0))] = XEXP (target, 0);
1204 map->reg_map[REGNO (XEXP (loc, 1))] = XEXP (target, 1);
1206 else
1207 abort ();
1209 /* Remap the exception handler data pointer from one to the other. */
1210 temp = get_exception_pointer (inl_f);
1211 if (temp)
1212 map->reg_map[REGNO (temp)] = get_exception_pointer (cfun);
1214 /* Initialize label_map. get_label_from_map will actually make
1215 the labels. */
1216 memset ((char *) &map->label_map[min_labelno], 0,
1217 (max_labelno - min_labelno) * sizeof (rtx));
1219 /* Make copies of the decls of the symbols in the inline function, so that
1220 the copies of the variables get declared in the current function. Set
1221 up things so that lookup_static_chain knows that to interpret registers
1222 in SAVE_EXPRs for TYPE_SIZEs as local. */
1223 inline_function_decl = fndecl;
1224 integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
1225 block = integrate_decl_tree (inl_f->original_decl_initial, map);
1226 BLOCK_ABSTRACT_ORIGIN (block) = DECL_ORIGIN (fndecl);
1227 inline_function_decl = 0;
1229 /* Make a fresh binding contour that we can easily remove. Do this after
1230 expanding our arguments so cleanups are properly scoped. */
1231 expand_start_bindings_and_block (0, block);
1233 /* Sort the block-map so that it will be easy to find remapped
1234 blocks later. */
1235 qsort (&VARRAY_TREE (map->block_map, 0),
1236 map->block_map->elements_used,
1237 sizeof (tree),
1238 compare_blocks);
1240 /* Perform postincrements before actually calling the function. */
1241 emit_queue ();
1243 /* Clean up stack so that variables might have smaller offsets. */
1244 do_pending_stack_adjust ();
1246 /* Save a copy of the location of const_equiv_varray for
1247 mark_stores, called via note_stores. */
1248 global_const_equiv_varray = map->const_equiv_varray;
1250 /* If the called function does an alloca, save and restore the
1251 stack pointer around the call. This saves stack space, but
1252 also is required if this inline is being done between two
1253 pushes. */
1254 if (inl_f->calls_alloca)
1255 emit_stack_save (SAVE_BLOCK, &stack_save, NULL_RTX);
1257 /* Map pseudos used for initial hard reg values. */
1258 setup_initial_hard_reg_value_integration (inl_f, map);
1260 /* Now copy the insns one by one. */
1261 copy_insn_list (insns, map, static_chain_value);
1263 /* Duplicate the EH regions. This will create an offset from the
1264 region numbers in the function we're inlining to the region
1265 numbers in the calling function. This must wait until after
1266 copy_insn_list, as we need the insn map to be complete. */
1267 eh_region_offset = duplicate_eh_regions (inl_f, map);
1269 /* Now copy the REG_NOTES for those insns. */
1270 copy_insn_notes (insns, map, eh_region_offset);
1272 /* If the insn sequence required one, emit the return label. */
1273 if (map->local_return_label)
1274 emit_label (map->local_return_label);
1276 /* Restore the stack pointer if we saved it above. */
1277 if (inl_f->calls_alloca)
1278 emit_stack_restore (SAVE_BLOCK, stack_save, NULL_RTX);
1280 if (! cfun->x_whole_function_mode_p)
1281 /* In statement-at-a-time mode, we just tell the front-end to add
1282 this block to the list of blocks at this binding level. We
1283 can't do it the way it's done for function-at-a-time mode the
1284 superblocks have not been created yet. */
1285 (*lang_hooks.decls.insert_block) (block);
1286 else
1288 BLOCK_CHAIN (block)
1289 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
1290 BLOCK_CHAIN (DECL_INITIAL (current_function_decl)) = block;
1293 /* End the scope containing the copied formal parameter variables
1294 and copied LABEL_DECLs. We pass NULL_TREE for the variables list
1295 here so that expand_end_bindings will not check for unused
1296 variables. That's already been checked for when the inlined
1297 function was defined. */
1298 expand_end_bindings (NULL_TREE, 1, 1);
1300 /* Must mark the line number note after inlined functions as a repeat, so
1301 that the test coverage code can avoid counting the call twice. This
1302 just tells the code to ignore the immediately following line note, since
1303 there already exists a copy of this note before the expanded inline call.
1304 This line number note is still needed for debugging though, so we can't
1305 delete it. */
1306 if (flag_test_coverage)
1307 emit_note (NOTE_INSN_REPEATED_LINE_NUMBER);
1309 emit_line_note (input_filename, input_line);
1311 /* If the function returns a BLKmode object in a register, copy it
1312 out of the temp register into a BLKmode memory object. */
1313 if (target
1314 && TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
1315 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl))))
1316 target = copy_blkmode_from_reg (0, target, TREE_TYPE (TREE_TYPE (fndecl)));
1318 if (structure_value_addr)
1320 target = gen_rtx_MEM (TYPE_MODE (type),
1321 memory_address (TYPE_MODE (type),
1322 structure_value_addr));
1323 set_mem_attributes (target, type, 1);
1326 /* Make sure we free the things we explicitly allocated with xmalloc. */
1327 if (real_label_map)
1328 free (real_label_map);
1329 VARRAY_FREE (map->const_equiv_varray);
1330 free (map->reg_map);
1331 free (map->insn_map);
1332 free (map);
1333 free (arg_vals);
1334 free (arg_trees);
1336 inlining = inlining_previous;
1338 return target;
1341 /* Make copies of each insn in the given list using the mapping
1342 computed in expand_inline_function. This function may call itself for
1343 insns containing sequences.
1345 Copying is done in two passes, first the insns and then their REG_NOTES.
1347 If static_chain_value is nonzero, it represents the context-pointer
1348 register for the function. */
1350 static void
1351 copy_insn_list (insns, map, static_chain_value)
1352 rtx insns;
1353 struct inline_remap *map;
1354 rtx static_chain_value;
1356 int i;
1357 rtx insn;
1358 rtx temp;
1359 #ifdef HAVE_cc0
1360 rtx cc0_insn = 0;
1361 #endif
1362 rtx static_chain_mem = 0;
1364 /* Copy the insns one by one. Do this in two passes, first the insns and
1365 then their REG_NOTES. */
1367 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1369 for (insn = insns; insn; insn = NEXT_INSN (insn))
1371 rtx copy, pattern, set;
1373 map->orig_asm_operands_vector = 0;
1375 switch (GET_CODE (insn))
1377 case INSN:
1378 pattern = PATTERN (insn);
1379 set = single_set (insn);
1380 copy = 0;
1381 if (GET_CODE (pattern) == USE
1382 && GET_CODE (XEXP (pattern, 0)) == REG
1383 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1384 /* The (USE (REG n)) at return from the function should
1385 be ignored since we are changing (REG n) into
1386 inline_target. */
1387 break;
1389 /* Ignore setting a function value that we don't want to use. */
1390 if (map->inline_target == 0
1391 && set != 0
1392 && GET_CODE (SET_DEST (set)) == REG
1393 && REG_FUNCTION_VALUE_P (SET_DEST (set)))
1395 if (volatile_refs_p (SET_SRC (set)))
1397 rtx new_set;
1399 /* If we must not delete the source,
1400 load it into a new temporary. */
1401 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1403 new_set = single_set (copy);
1404 if (new_set == 0)
1405 abort ();
1407 SET_DEST (new_set)
1408 = gen_reg_rtx (GET_MODE (SET_DEST (new_set)));
1410 /* If the source and destination are the same and it
1411 has a note on it, keep the insn. */
1412 else if (rtx_equal_p (SET_DEST (set), SET_SRC (set))
1413 && REG_NOTES (insn) != 0)
1414 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1415 else
1416 break;
1419 /* Similarly if an ignored return value is clobbered. */
1420 else if (map->inline_target == 0
1421 && GET_CODE (pattern) == CLOBBER
1422 && GET_CODE (XEXP (pattern, 0)) == REG
1423 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1424 break;
1426 /* Look for the address of the static chain slot. The
1427 rtx_equal_p comparisons against the
1428 static_chain_incoming_rtx below may fail if the static
1429 chain is in memory and the address specified is not
1430 "legitimate". This happens on Xtensa where the static
1431 chain is at a negative offset from argp and where only
1432 positive offsets are legitimate. When the RTL is
1433 generated, the address is "legitimized" by copying it
1434 into a register, causing the rtx_equal_p comparisons to
1435 fail. This workaround looks for code that sets a
1436 register to the address of the static chain. Subsequent
1437 memory references via that register can then be
1438 identified as static chain references. We assume that
1439 the register is only assigned once, and that the static
1440 chain address is only live in one register at a time. */
1442 else if (static_chain_value != 0
1443 && set != 0
1444 && GET_CODE (static_chain_incoming_rtx) == MEM
1445 && GET_CODE (SET_DEST (set)) == REG
1446 && rtx_equal_p (SET_SRC (set),
1447 XEXP (static_chain_incoming_rtx, 0)))
1449 static_chain_mem =
1450 gen_rtx_MEM (GET_MODE (static_chain_incoming_rtx),
1451 SET_DEST (set));
1453 /* emit the instruction in case it is used for something
1454 other than setting the static chain; if it's not used,
1455 it can always be removed as dead code */
1456 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1459 /* If this is setting the static chain rtx, omit it. */
1460 else if (static_chain_value != 0
1461 && set != 0
1462 && (rtx_equal_p (SET_DEST (set),
1463 static_chain_incoming_rtx)
1464 || (static_chain_mem
1465 && rtx_equal_p (SET_DEST (set), static_chain_mem))))
1466 break;
1468 /* If this is setting the static chain pseudo, set it from
1469 the value we want to give it instead. */
1470 else if (static_chain_value != 0
1471 && set != 0
1472 && (rtx_equal_p (SET_SRC (set),
1473 static_chain_incoming_rtx)
1474 || (static_chain_mem
1475 && rtx_equal_p (SET_SRC (set), static_chain_mem))))
1477 rtx newdest = copy_rtx_and_substitute (SET_DEST (set), map, 1);
1479 copy = emit_move_insn (newdest, static_chain_value);
1480 if (GET_CODE (static_chain_incoming_rtx) != MEM)
1481 static_chain_value = 0;
1484 /* If this is setting the virtual stack vars register, this must
1485 be the code at the handler for a builtin longjmp. The value
1486 saved in the setjmp buffer will be the address of the frame
1487 we've made for this inlined instance within our frame. But we
1488 know the offset of that value so we can use it to reconstruct
1489 our virtual stack vars register from that value. If we are
1490 copying it from the stack pointer, leave it unchanged. */
1491 else if (set != 0
1492 && rtx_equal_p (SET_DEST (set), virtual_stack_vars_rtx))
1494 HOST_WIDE_INT offset;
1495 temp = map->reg_map[REGNO (SET_DEST (set))];
1496 temp = VARRAY_CONST_EQUIV (map->const_equiv_varray,
1497 REGNO (temp)).rtx;
1499 if (rtx_equal_p (temp, virtual_stack_vars_rtx))
1500 offset = 0;
1501 else if (GET_CODE (temp) == PLUS
1502 && rtx_equal_p (XEXP (temp, 0), virtual_stack_vars_rtx)
1503 && GET_CODE (XEXP (temp, 1)) == CONST_INT)
1504 offset = INTVAL (XEXP (temp, 1));
1505 else
1506 abort ();
1508 if (rtx_equal_p (SET_SRC (set), stack_pointer_rtx))
1509 temp = SET_SRC (set);
1510 else
1511 temp = force_operand (plus_constant (SET_SRC (set),
1512 - offset),
1513 NULL_RTX);
1515 copy = emit_move_insn (virtual_stack_vars_rtx, temp);
1518 else
1519 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1520 /* REG_NOTES will be copied later. */
1522 #ifdef HAVE_cc0
1523 /* If this insn is setting CC0, it may need to look at
1524 the insn that uses CC0 to see what type of insn it is.
1525 In that case, the call to recog via validate_change will
1526 fail. So don't substitute constants here. Instead,
1527 do it when we emit the following insn.
1529 For example, see the pyr.md file. That machine has signed and
1530 unsigned compares. The compare patterns must check the
1531 following branch insn to see which what kind of compare to
1532 emit.
1534 If the previous insn set CC0, substitute constants on it as
1535 well. */
1536 if (sets_cc0_p (PATTERN (copy)) != 0)
1537 cc0_insn = copy;
1538 else
1540 if (cc0_insn)
1541 try_constants (cc0_insn, map);
1542 cc0_insn = 0;
1543 try_constants (copy, map);
1545 #else
1546 try_constants (copy, map);
1547 #endif
1548 INSN_LOCATOR (copy) = INSN_LOCATOR (insn);
1549 break;
1551 case JUMP_INSN:
1552 if (map->integrating && returnjump_p (insn))
1554 if (map->local_return_label == 0)
1555 map->local_return_label = gen_label_rtx ();
1556 pattern = gen_jump (map->local_return_label);
1558 else
1559 pattern = copy_rtx_and_substitute (PATTERN (insn), map, 0);
1561 copy = emit_jump_insn (pattern);
1563 #ifdef HAVE_cc0
1564 if (cc0_insn)
1565 try_constants (cc0_insn, map);
1566 cc0_insn = 0;
1567 #endif
1568 try_constants (copy, map);
1569 INSN_LOCATOR (copy) = INSN_LOCATOR (insn);
1571 /* If this used to be a conditional jump insn but whose branch
1572 direction is now know, we must do something special. */
1573 if (any_condjump_p (insn) && onlyjump_p (insn) && map->last_pc_value)
1575 #ifdef HAVE_cc0
1576 /* If the previous insn set cc0 for us, delete it. */
1577 if (only_sets_cc0_p (PREV_INSN (copy)))
1578 delete_related_insns (PREV_INSN (copy));
1579 #endif
1581 /* If this is now a no-op, delete it. */
1582 if (map->last_pc_value == pc_rtx)
1584 delete_related_insns (copy);
1585 copy = 0;
1587 else
1588 /* Otherwise, this is unconditional jump so we must put a
1589 BARRIER after it. We could do some dead code elimination
1590 here, but jump.c will do it just as well. */
1591 emit_barrier ();
1593 break;
1595 case CALL_INSN:
1596 /* If this is a CALL_PLACEHOLDER insn then we need to copy the
1597 three attached sequences: normal call, sibling call and tail
1598 recursion. */
1599 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1601 rtx sequence[3];
1602 rtx tail_label;
1604 for (i = 0; i < 3; i++)
1606 rtx seq;
1608 sequence[i] = NULL_RTX;
1609 seq = XEXP (PATTERN (insn), i);
1610 if (seq)
1612 start_sequence ();
1613 copy_insn_list (seq, map, static_chain_value);
1614 sequence[i] = get_insns ();
1615 end_sequence ();
1619 /* Find the new tail recursion label.
1620 It will already be substituted into sequence[2]. */
1621 tail_label = copy_rtx_and_substitute (XEXP (PATTERN (insn), 3),
1622 map, 0);
1624 copy = emit_call_insn (gen_rtx_CALL_PLACEHOLDER (VOIDmode,
1625 sequence[0],
1626 sequence[1],
1627 sequence[2],
1628 tail_label));
1629 break;
1632 pattern = copy_rtx_and_substitute (PATTERN (insn), map, 0);
1633 copy = emit_call_insn (pattern);
1635 SIBLING_CALL_P (copy) = SIBLING_CALL_P (insn);
1636 CONST_OR_PURE_CALL_P (copy) = CONST_OR_PURE_CALL_P (insn);
1637 INSN_LOCATOR (copy) = INSN_LOCATOR (insn);
1639 /* Because the USAGE information potentially contains objects other
1640 than hard registers, we need to copy it. */
1642 CALL_INSN_FUNCTION_USAGE (copy)
1643 = copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn),
1644 map, 0);
1646 #ifdef HAVE_cc0
1647 if (cc0_insn)
1648 try_constants (cc0_insn, map);
1649 cc0_insn = 0;
1650 #endif
1651 try_constants (copy, map);
1653 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1654 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1655 VARRAY_CONST_EQUIV (map->const_equiv_varray, i).rtx = 0;
1656 break;
1658 case CODE_LABEL:
1659 copy = emit_label (get_label_from_map (map,
1660 CODE_LABEL_NUMBER (insn)));
1661 LABEL_NAME (copy) = LABEL_NAME (insn);
1662 map->const_age++;
1663 break;
1665 case BARRIER:
1666 copy = emit_barrier ();
1667 break;
1669 case NOTE:
1670 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)
1672 copy = emit_label (get_label_from_map (map,
1673 CODE_LABEL_NUMBER (insn)));
1674 LABEL_NAME (copy) = NOTE_SOURCE_FILE (insn);
1675 map->const_age++;
1676 break;
1679 /* NOTE_INSN_FUNCTION_END and NOTE_INSN_FUNCTION_BEG are
1680 discarded because it is important to have only one of
1681 each in the current function.
1683 NOTE_INSN_DELETED notes aren't useful. */
1685 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
1686 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
1687 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED)
1689 copy = emit_note_copy (insn);
1690 if (!copy)
1691 /*Copied a line note, but line numbering is off*/;
1692 else if ((NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_BEG
1693 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_END)
1694 && NOTE_BLOCK (insn))
1696 tree *mapped_block_p;
1698 mapped_block_p
1699 = (tree *) bsearch (NOTE_BLOCK (insn),
1700 &VARRAY_TREE (map->block_map, 0),
1701 map->block_map->elements_used,
1702 sizeof (tree),
1703 find_block);
1705 if (!mapped_block_p)
1706 abort ();
1707 else
1708 NOTE_BLOCK (copy) = *mapped_block_p;
1710 else if (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EXPECTED_VALUE)
1711 NOTE_EXPECTED_VALUE (copy)
1712 = copy_rtx_and_substitute (NOTE_EXPECTED_VALUE (insn),
1713 map, 0);
1715 else
1716 copy = 0;
1717 break;
1719 default:
1720 abort ();
1723 if (copy)
1724 RTX_INTEGRATED_P (copy) = 1;
1726 map->insn_map[INSN_UID (insn)] = copy;
1730 /* Copy the REG_NOTES. Increment const_age, so that only constants
1731 from parameters can be substituted in. These are the only ones
1732 that are valid across the entire function. */
1734 static void
1735 copy_insn_notes (insns, map, eh_region_offset)
1736 rtx insns;
1737 struct inline_remap *map;
1738 int eh_region_offset;
1740 rtx insn, new_insn;
1742 map->const_age++;
1743 for (insn = insns; insn; insn = NEXT_INSN (insn))
1745 if (! INSN_P (insn))
1746 continue;
1748 new_insn = map->insn_map[INSN_UID (insn)];
1749 if (! new_insn)
1750 continue;
1752 if (REG_NOTES (insn))
1754 rtx next, note = copy_rtx_and_substitute (REG_NOTES (insn), map, 0);
1756 /* We must also do subst_constants, in case one of our parameters
1757 has const type and constant value. */
1758 subst_constants (&note, NULL_RTX, map, 0);
1759 apply_change_group ();
1760 REG_NOTES (new_insn) = note;
1762 /* Delete any REG_LABEL notes from the chain. Remap any
1763 REG_EH_REGION notes. */
1764 for (; note; note = next)
1766 next = XEXP (note, 1);
1767 if (REG_NOTE_KIND (note) == REG_LABEL)
1768 remove_note (new_insn, note);
1769 else if (REG_NOTE_KIND (note) == REG_EH_REGION
1770 && INTVAL (XEXP (note, 0)) > 0)
1771 XEXP (note, 0) = GEN_INT (INTVAL (XEXP (note, 0))
1772 + eh_region_offset);
1776 if (GET_CODE (insn) == CALL_INSN
1777 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1779 int i;
1780 for (i = 0; i < 3; i++)
1781 copy_insn_notes (XEXP (PATTERN (insn), i), map, eh_region_offset);
1784 if (GET_CODE (insn) == JUMP_INSN
1785 && GET_CODE (PATTERN (insn)) == RESX)
1786 XINT (PATTERN (new_insn), 0) += eh_region_offset;
1790 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
1791 push all of those decls and give each one the corresponding home. */
1793 static void
1794 integrate_parm_decls (args, map, arg_vector)
1795 tree args;
1796 struct inline_remap *map;
1797 rtvec arg_vector;
1799 tree tail;
1800 int i;
1802 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
1804 tree decl = copy_decl_for_inlining (tail, map->fndecl,
1805 current_function_decl);
1806 rtx new_decl_rtl
1807 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map, 1);
1809 /* We really should be setting DECL_INCOMING_RTL to something reasonable
1810 here, but that's going to require some more work. */
1811 /* DECL_INCOMING_RTL (decl) = ?; */
1812 /* Fully instantiate the address with the equivalent form so that the
1813 debugging information contains the actual register, instead of the
1814 virtual register. Do this by not passing an insn to
1815 subst_constants. */
1816 subst_constants (&new_decl_rtl, NULL_RTX, map, 1);
1817 apply_change_group ();
1818 SET_DECL_RTL (decl, new_decl_rtl);
1822 /* Given a BLOCK node LET, push decls and levels so as to construct in the
1823 current function a tree of contexts isomorphic to the one that is given.
1825 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
1826 registers used in the DECL_RTL field should be remapped. If it is zero,
1827 no mapping is necessary. */
1829 static tree
1830 integrate_decl_tree (let, map)
1831 tree let;
1832 struct inline_remap *map;
1834 tree t;
1835 tree new_block;
1836 tree *next;
1838 new_block = make_node (BLOCK);
1839 VARRAY_PUSH_TREE (map->block_map, new_block);
1840 next = &BLOCK_VARS (new_block);
1842 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1844 tree d;
1846 d = copy_decl_for_inlining (t, map->fndecl, current_function_decl);
1848 if (DECL_RTL_SET_P (t))
1850 rtx r;
1852 SET_DECL_RTL (d, copy_rtx_and_substitute (DECL_RTL (t), map, 1));
1854 /* Fully instantiate the address with the equivalent form so that the
1855 debugging information contains the actual register, instead of the
1856 virtual register. Do this by not passing an insn to
1857 subst_constants. */
1858 r = DECL_RTL (d);
1859 subst_constants (&r, NULL_RTX, map, 1);
1860 SET_DECL_RTL (d, r);
1862 apply_change_group ();
1865 /* Add this declaration to the list of variables in the new
1866 block. */
1867 *next = d;
1868 next = &TREE_CHAIN (d);
1871 next = &BLOCK_SUBBLOCKS (new_block);
1872 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1874 *next = integrate_decl_tree (t, map);
1875 BLOCK_SUPERCONTEXT (*next) = new_block;
1876 next = &BLOCK_CHAIN (*next);
1879 TREE_USED (new_block) = TREE_USED (let);
1880 BLOCK_ABSTRACT_ORIGIN (new_block) = let;
1882 return new_block;
1885 /* Create a new copy of an rtx. Recursively copies the operands of the rtx,
1886 except for those few rtx codes that are sharable.
1888 We always return an rtx that is similar to that incoming rtx, with the
1889 exception of possibly changing a REG to a SUBREG or vice versa. No
1890 rtl is ever emitted.
1892 If FOR_LHS is nonzero, if means we are processing something that will
1893 be the LHS of a SET. In that case, we copy RTX_UNCHANGING_P even if
1894 inlining since we need to be conservative in how it is set for
1895 such cases.
1897 Handle constants that need to be placed in the constant pool by
1898 calling `force_const_mem'. */
1901 copy_rtx_and_substitute (orig, map, for_lhs)
1902 rtx orig;
1903 struct inline_remap *map;
1904 int for_lhs;
1906 rtx copy, temp;
1907 int i, j;
1908 RTX_CODE code;
1909 enum machine_mode mode;
1910 const char *format_ptr;
1911 int regno;
1913 if (orig == 0)
1914 return 0;
1916 code = GET_CODE (orig);
1917 mode = GET_MODE (orig);
1919 switch (code)
1921 case REG:
1922 /* If the stack pointer register shows up, it must be part of
1923 stack-adjustments (*not* because we eliminated the frame pointer!).
1924 Small hard registers are returned as-is. Pseudo-registers
1925 go through their `reg_map'. */
1926 regno = REGNO (orig);
1927 if (regno <= LAST_VIRTUAL_REGISTER
1928 || (map->integrating
1929 && DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer == orig))
1931 /* Some hard registers are also mapped,
1932 but others are not translated. */
1933 if (map->reg_map[regno] != 0)
1934 return map->reg_map[regno];
1936 /* If this is the virtual frame pointer, make space in current
1937 function's stack frame for the stack frame of the inline function.
1939 Copy the address of this area into a pseudo. Map
1940 virtual_stack_vars_rtx to this pseudo and set up a constant
1941 equivalence for it to be the address. This will substitute the
1942 address into insns where it can be substituted and use the new
1943 pseudo where it can't. */
1944 else if (regno == VIRTUAL_STACK_VARS_REGNUM)
1946 rtx loc, seq;
1947 int size = get_func_frame_size (DECL_SAVED_INSNS (map->fndecl));
1948 #ifdef FRAME_GROWS_DOWNWARD
1949 int alignment
1950 = (DECL_SAVED_INSNS (map->fndecl)->stack_alignment_needed
1951 / BITS_PER_UNIT);
1953 /* In this case, virtual_stack_vars_rtx points to one byte
1954 higher than the top of the frame area. So make sure we
1955 allocate a big enough chunk to keep the frame pointer
1956 aligned like a real one. */
1957 if (alignment)
1958 size = CEIL_ROUND (size, alignment);
1959 #endif
1960 start_sequence ();
1961 loc = assign_stack_temp (BLKmode, size, 1);
1962 loc = XEXP (loc, 0);
1963 #ifdef FRAME_GROWS_DOWNWARD
1964 /* In this case, virtual_stack_vars_rtx points to one byte
1965 higher than the top of the frame area. So compute the offset
1966 to one byte higher than our substitute frame. */
1967 loc = plus_constant (loc, size);
1968 #endif
1969 map->reg_map[regno] = temp
1970 = force_reg (Pmode, force_operand (loc, NULL_RTX));
1972 #ifdef STACK_BOUNDARY
1973 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
1974 #endif
1976 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
1978 seq = get_insns ();
1979 end_sequence ();
1980 emit_insn_after (seq, map->insns_at_start);
1981 return temp;
1983 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM
1984 || (map->integrating
1985 && (DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer
1986 == orig)))
1988 /* Do the same for a block to contain any arguments referenced
1989 in memory. */
1990 rtx loc, seq;
1991 int size = DECL_SAVED_INSNS (map->fndecl)->args_size;
1993 start_sequence ();
1994 loc = assign_stack_temp (BLKmode, size, 1);
1995 loc = XEXP (loc, 0);
1996 /* When arguments grow downward, the virtual incoming
1997 args pointer points to the top of the argument block,
1998 so the remapped location better do the same. */
1999 #ifdef ARGS_GROW_DOWNWARD
2000 loc = plus_constant (loc, size);
2001 #endif
2002 map->reg_map[regno] = temp
2003 = force_reg (Pmode, force_operand (loc, NULL_RTX));
2005 #ifdef STACK_BOUNDARY
2006 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
2007 #endif
2009 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
2011 seq = get_insns ();
2012 end_sequence ();
2013 emit_insn_after (seq, map->insns_at_start);
2014 return temp;
2016 else if (REG_FUNCTION_VALUE_P (orig))
2018 /* This is a reference to the function return value. If
2019 the function doesn't have a return value, error. If the
2020 mode doesn't agree, and it ain't BLKmode, make a SUBREG. */
2021 if (map->inline_target == 0)
2023 if (rtx_equal_function_value_matters)
2024 /* This is an ignored return value. We must not
2025 leave it in with REG_FUNCTION_VALUE_P set, since
2026 that would confuse subsequent inlining of the
2027 current function into a later function. */
2028 return gen_rtx_REG (GET_MODE (orig), regno);
2029 else
2030 /* Must be unrolling loops or replicating code if we
2031 reach here, so return the register unchanged. */
2032 return orig;
2034 else if (GET_MODE (map->inline_target) != BLKmode
2035 && mode != GET_MODE (map->inline_target))
2036 return gen_lowpart (mode, map->inline_target);
2037 else
2038 return map->inline_target;
2040 #if defined (LEAF_REGISTERS) && defined (LEAF_REG_REMAP)
2041 /* If leaf_renumber_regs_insn() might remap this register to
2042 some other number, make sure we don't share it with the
2043 inlined function, otherwise delayed optimization of the
2044 inlined function may change it in place, breaking our
2045 reference to it. We may still shared it within the
2046 function, so create an entry for this register in the
2047 reg_map. */
2048 if (map->integrating && regno < FIRST_PSEUDO_REGISTER
2049 && LEAF_REGISTERS[regno] && LEAF_REG_REMAP (regno) != regno)
2051 if (!map->leaf_reg_map[regno][mode])
2052 map->leaf_reg_map[regno][mode] = gen_rtx_REG (mode, regno);
2053 return map->leaf_reg_map[regno][mode];
2055 #endif
2056 else
2057 return orig;
2059 abort ();
2061 if (map->reg_map[regno] == NULL)
2063 map->reg_map[regno] = gen_reg_rtx (mode);
2064 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
2065 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
2066 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
2067 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2069 if (REG_POINTER (map->x_regno_reg_rtx[regno]))
2070 mark_reg_pointer (map->reg_map[regno],
2071 map->regno_pointer_align[regno]);
2073 return map->reg_map[regno];
2075 case SUBREG:
2076 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map, for_lhs);
2077 return simplify_gen_subreg (GET_MODE (orig), copy,
2078 GET_MODE (SUBREG_REG (orig)),
2079 SUBREG_BYTE (orig));
2081 case ADDRESSOF:
2082 copy = gen_rtx_ADDRESSOF (mode,
2083 copy_rtx_and_substitute (XEXP (orig, 0),
2084 map, for_lhs),
2085 0, ADDRESSOF_DECL (orig));
2086 regno = ADDRESSOF_REGNO (orig);
2087 if (map->reg_map[regno])
2088 regno = REGNO (map->reg_map[regno]);
2089 else if (regno > LAST_VIRTUAL_REGISTER)
2091 temp = XEXP (orig, 0);
2092 map->reg_map[regno] = gen_reg_rtx (GET_MODE (temp));
2093 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (temp);
2094 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (temp);
2095 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (temp);
2096 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2098 /* Objects may initially be represented as registers, but
2099 but turned into a MEM if their address is taken by
2100 put_var_into_stack. Therefore, the register table may have
2101 entries which are MEMs.
2103 We briefly tried to clear such entries, but that ended up
2104 cascading into many changes due to the optimizers not being
2105 prepared for empty entries in the register table. So we've
2106 decided to allow the MEMs in the register table for now. */
2107 if (REG_P (map->x_regno_reg_rtx[regno])
2108 && REG_POINTER (map->x_regno_reg_rtx[regno]))
2109 mark_reg_pointer (map->reg_map[regno],
2110 map->regno_pointer_align[regno]);
2111 regno = REGNO (map->reg_map[regno]);
2113 ADDRESSOF_REGNO (copy) = regno;
2114 return copy;
2116 case USE:
2117 case CLOBBER:
2118 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
2119 to (use foo) if the original insn didn't have a subreg.
2120 Removing the subreg distorts the VAX movstrhi pattern
2121 by changing the mode of an operand. */
2122 copy = copy_rtx_and_substitute (XEXP (orig, 0), map, code == CLOBBER);
2123 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
2124 copy = SUBREG_REG (copy);
2125 return gen_rtx_fmt_e (code, VOIDmode, copy);
2127 /* We need to handle "deleted" labels that appear in the DECL_RTL
2128 of a LABEL_DECL. */
2129 case NOTE:
2130 if (NOTE_LINE_NUMBER (orig) != NOTE_INSN_DELETED_LABEL)
2131 break;
2133 /* ... FALLTHRU ... */
2134 case CODE_LABEL:
2135 LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
2136 = LABEL_PRESERVE_P (orig);
2137 return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
2139 case LABEL_REF:
2140 copy
2141 = gen_rtx_LABEL_REF
2142 (mode,
2143 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
2144 : get_label_from_map (map, CODE_LABEL_NUMBER (XEXP (orig, 0))));
2146 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
2148 /* The fact that this label was previously nonlocal does not mean
2149 it still is, so we must check if it is within the range of
2150 this function's labels. */
2151 LABEL_REF_NONLOCAL_P (copy)
2152 = (LABEL_REF_NONLOCAL_P (orig)
2153 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
2154 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
2156 /* If we have made a nonlocal label local, it means that this
2157 inlined call will be referring to our nonlocal goto handler.
2158 So make sure we create one for this block; we normally would
2159 not since this is not otherwise considered a "call". */
2160 if (LABEL_REF_NONLOCAL_P (orig) && ! LABEL_REF_NONLOCAL_P (copy))
2161 function_call_count++;
2163 return copy;
2165 case PC:
2166 case CC0:
2167 case CONST_INT:
2168 case CONST_VECTOR:
2169 return orig;
2171 case SYMBOL_REF:
2172 /* Symbols which represent the address of a label stored in the constant
2173 pool must be modified to point to a constant pool entry for the
2174 remapped label. Otherwise, symbols are returned unchanged. */
2175 if (CONSTANT_POOL_ADDRESS_P (orig))
2177 struct function *f = inlining ? inlining : cfun;
2178 rtx constant = get_pool_constant_for_function (f, orig);
2179 enum machine_mode const_mode = get_pool_mode_for_function (f, orig);
2180 if (inlining)
2182 rtx temp = force_const_mem (const_mode,
2183 copy_rtx_and_substitute (constant,
2184 map, 0));
2186 #if 0
2187 /* Legitimizing the address here is incorrect.
2189 Since we had a SYMBOL_REF before, we can assume it is valid
2190 to have one in this position in the insn.
2192 Also, change_address may create new registers. These
2193 registers will not have valid reg_map entries. This can
2194 cause try_constants() to fail because assumes that all
2195 registers in the rtx have valid reg_map entries, and it may
2196 end up replacing one of these new registers with junk. */
2198 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
2199 temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
2200 #endif
2202 temp = XEXP (temp, 0);
2204 #ifdef POINTERS_EXTEND_UNSIGNED
2205 if (GET_MODE (temp) != GET_MODE (orig))
2206 temp = convert_memory_address (GET_MODE (orig), temp);
2207 #endif
2208 return temp;
2210 else if (GET_CODE (constant) == LABEL_REF)
2211 return XEXP (force_const_mem
2212 (GET_MODE (orig),
2213 copy_rtx_and_substitute (constant, map, for_lhs)),
2216 else if (TREE_CONSTANT_POOL_ADDRESS_P (orig) && inlining)
2217 notice_rtl_inlining_of_deferred_constant ();
2219 return orig;
2221 case CONST_DOUBLE:
2222 /* We have to make a new copy of this CONST_DOUBLE because don't want
2223 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
2224 duplicate of a CONST_DOUBLE we have already seen. */
2225 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
2227 REAL_VALUE_TYPE d;
2229 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
2230 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
2232 else
2233 return immed_double_const (CONST_DOUBLE_LOW (orig),
2234 CONST_DOUBLE_HIGH (orig), VOIDmode);
2236 case CONST:
2237 /* Make new constant pool entry for a constant
2238 that was in the pool of the inline function. */
2239 if (RTX_INTEGRATED_P (orig))
2240 abort ();
2241 break;
2243 case ASM_OPERANDS:
2244 /* If a single asm insn contains multiple output operands then
2245 it contains multiple ASM_OPERANDS rtx's that share the input
2246 and constraint vecs. We must make sure that the copied insn
2247 continues to share it. */
2248 if (map->orig_asm_operands_vector == ASM_OPERANDS_INPUT_VEC (orig))
2250 copy = rtx_alloc (ASM_OPERANDS);
2251 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2252 PUT_MODE (copy, GET_MODE (orig));
2253 ASM_OPERANDS_TEMPLATE (copy) = ASM_OPERANDS_TEMPLATE (orig);
2254 ASM_OPERANDS_OUTPUT_CONSTRAINT (copy)
2255 = ASM_OPERANDS_OUTPUT_CONSTRAINT (orig);
2256 ASM_OPERANDS_OUTPUT_IDX (copy) = ASM_OPERANDS_OUTPUT_IDX (orig);
2257 ASM_OPERANDS_INPUT_VEC (copy) = map->copy_asm_operands_vector;
2258 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy)
2259 = map->copy_asm_constraints_vector;
2260 ASM_OPERANDS_SOURCE_FILE (copy) = ASM_OPERANDS_SOURCE_FILE (orig);
2261 ASM_OPERANDS_SOURCE_LINE (copy) = ASM_OPERANDS_SOURCE_LINE (orig);
2262 return copy;
2264 break;
2266 case CALL:
2267 /* This is given special treatment because the first
2268 operand of a CALL is a (MEM ...) which may get
2269 forced into a register for cse. This is undesirable
2270 if function-address cse isn't wanted or if we won't do cse. */
2271 #ifndef NO_FUNCTION_CSE
2272 if (! (optimize && ! flag_no_function_cse))
2273 #endif
2275 rtx copy
2276 = gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
2277 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
2278 map, 0));
2280 MEM_COPY_ATTRIBUTES (copy, XEXP (orig, 0));
2282 return
2283 gen_rtx_CALL (GET_MODE (orig), copy,
2284 copy_rtx_and_substitute (XEXP (orig, 1), map, 0));
2286 break;
2288 #if 0
2289 /* Must be ifdefed out for loop unrolling to work. */
2290 case RETURN:
2291 abort ();
2292 #endif
2294 case SET:
2295 /* If this is setting fp or ap, it means that we have a nonlocal goto.
2296 Adjust the setting by the offset of the area we made.
2297 If the nonlocal goto is into the current function,
2298 this will result in unnecessarily bad code, but should work. */
2299 if (SET_DEST (orig) == virtual_stack_vars_rtx
2300 || SET_DEST (orig) == virtual_incoming_args_rtx)
2302 /* In case a translation hasn't occurred already, make one now. */
2303 rtx equiv_reg;
2304 rtx equiv_loc;
2305 HOST_WIDE_INT loc_offset;
2307 copy_rtx_and_substitute (SET_DEST (orig), map, for_lhs);
2308 equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
2309 equiv_loc = VARRAY_CONST_EQUIV (map->const_equiv_varray,
2310 REGNO (equiv_reg)).rtx;
2311 loc_offset
2312 = GET_CODE (equiv_loc) == REG ? 0 : INTVAL (XEXP (equiv_loc, 1));
2314 return gen_rtx_SET (VOIDmode, SET_DEST (orig),
2315 force_operand
2316 (plus_constant
2317 (copy_rtx_and_substitute (SET_SRC (orig),
2318 map, 0),
2319 - loc_offset),
2320 NULL_RTX));
2322 else
2323 return gen_rtx_SET (VOIDmode,
2324 copy_rtx_and_substitute (SET_DEST (orig), map, 1),
2325 copy_rtx_and_substitute (SET_SRC (orig), map, 0));
2326 break;
2328 case MEM:
2329 if (inlining
2330 && GET_CODE (XEXP (orig, 0)) == SYMBOL_REF
2331 && CONSTANT_POOL_ADDRESS_P (XEXP (orig, 0)))
2333 enum machine_mode const_mode
2334 = get_pool_mode_for_function (inlining, XEXP (orig, 0));
2335 rtx constant
2336 = get_pool_constant_for_function (inlining, XEXP (orig, 0));
2338 constant = copy_rtx_and_substitute (constant, map, 0);
2340 /* If this was an address of a constant pool entry that itself
2341 had to be placed in the constant pool, it might not be a
2342 valid address. So the recursive call might have turned it
2343 into a register. In that case, it isn't a constant any
2344 more, so return it. This has the potential of changing a
2345 MEM into a REG, but we'll assume that it safe. */
2346 if (! CONSTANT_P (constant))
2347 return constant;
2349 return validize_mem (force_const_mem (const_mode, constant));
2352 copy = gen_rtx_MEM (mode, copy_rtx_and_substitute (XEXP (orig, 0),
2353 map, 0));
2354 MEM_COPY_ATTRIBUTES (copy, orig);
2356 /* If inlining and this is not for the LHS, turn off RTX_UNCHANGING_P
2357 since this may be an indirect reference to a parameter and the
2358 actual may not be readonly. */
2359 if (inlining && !for_lhs)
2360 RTX_UNCHANGING_P (copy) = 0;
2362 /* If inlining, squish aliasing data that references the subroutine's
2363 parameter list, since that's no longer applicable. */
2364 if (inlining && MEM_EXPR (copy)
2365 && TREE_CODE (MEM_EXPR (copy)) == INDIRECT_REF
2366 && TREE_CODE (TREE_OPERAND (MEM_EXPR (copy), 0)) == PARM_DECL)
2367 set_mem_expr (copy, NULL_TREE);
2369 return copy;
2371 default:
2372 break;
2375 copy = rtx_alloc (code);
2376 PUT_MODE (copy, mode);
2377 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
2378 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2379 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
2381 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2383 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2385 switch (*format_ptr++)
2387 case '0':
2388 /* Copy this through the wide int field; that's safest. */
2389 X0WINT (copy, i) = X0WINT (orig, i);
2390 break;
2392 case 'e':
2393 XEXP (copy, i)
2394 = copy_rtx_and_substitute (XEXP (orig, i), map, for_lhs);
2395 break;
2397 case 'u':
2398 /* Change any references to old-insns to point to the
2399 corresponding copied insns. */
2400 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
2401 break;
2403 case 'E':
2404 XVEC (copy, i) = XVEC (orig, i);
2405 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2407 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2408 for (j = 0; j < XVECLEN (copy, i); j++)
2409 XVECEXP (copy, i, j)
2410 = copy_rtx_and_substitute (XVECEXP (orig, i, j),
2411 map, for_lhs);
2413 break;
2415 case 'w':
2416 XWINT (copy, i) = XWINT (orig, i);
2417 break;
2419 case 'i':
2420 XINT (copy, i) = XINT (orig, i);
2421 break;
2423 case 's':
2424 XSTR (copy, i) = XSTR (orig, i);
2425 break;
2427 case 't':
2428 XTREE (copy, i) = XTREE (orig, i);
2429 break;
2431 default:
2432 abort ();
2436 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
2438 map->orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
2439 map->copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
2440 map->copy_asm_constraints_vector
2441 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
2444 return copy;
2447 /* Substitute known constant values into INSN, if that is valid. */
2449 void
2450 try_constants (insn, map)
2451 rtx insn;
2452 struct inline_remap *map;
2454 int i;
2456 map->num_sets = 0;
2458 /* First try just updating addresses, then other things. This is
2459 important when we have something like the store of a constant
2460 into memory and we can update the memory address but the machine
2461 does not support a constant source. */
2462 subst_constants (&PATTERN (insn), insn, map, 1);
2463 apply_change_group ();
2464 subst_constants (&PATTERN (insn), insn, map, 0);
2465 apply_change_group ();
2467 /* Enforce consistency between the addresses in the regular insn flow
2468 and the ones in CALL_INSN_FUNCTION_USAGE lists, if any. */
2469 if (GET_CODE (insn) == CALL_INSN && CALL_INSN_FUNCTION_USAGE (insn))
2471 subst_constants (&CALL_INSN_FUNCTION_USAGE (insn), insn, map, 1);
2472 apply_change_group ();
2475 /* Show we don't know the value of anything stored or clobbered. */
2476 note_stores (PATTERN (insn), mark_stores, NULL);
2477 map->last_pc_value = 0;
2478 #ifdef HAVE_cc0
2479 map->last_cc0_value = 0;
2480 #endif
2482 /* Set up any constant equivalences made in this insn. */
2483 for (i = 0; i < map->num_sets; i++)
2485 if (GET_CODE (map->equiv_sets[i].dest) == REG)
2487 int regno = REGNO (map->equiv_sets[i].dest);
2489 MAYBE_EXTEND_CONST_EQUIV_VARRAY (map, regno);
2490 if (VARRAY_CONST_EQUIV (map->const_equiv_varray, regno).rtx == 0
2491 /* Following clause is a hack to make case work where GNU C++
2492 reassigns a variable to make cse work right. */
2493 || ! rtx_equal_p (VARRAY_CONST_EQUIV (map->const_equiv_varray,
2494 regno).rtx,
2495 map->equiv_sets[i].equiv))
2496 SET_CONST_EQUIV_DATA (map, map->equiv_sets[i].dest,
2497 map->equiv_sets[i].equiv, map->const_age);
2499 else if (map->equiv_sets[i].dest == pc_rtx)
2500 map->last_pc_value = map->equiv_sets[i].equiv;
2501 #ifdef HAVE_cc0
2502 else if (map->equiv_sets[i].dest == cc0_rtx)
2503 map->last_cc0_value = map->equiv_sets[i].equiv;
2504 #endif
2508 /* Substitute known constants for pseudo regs in the contents of LOC,
2509 which are part of INSN.
2510 If INSN is zero, the substitution should always be done (this is used to
2511 update DECL_RTL).
2512 These changes are taken out by try_constants if the result is not valid.
2514 Note that we are more concerned with determining when the result of a SET
2515 is a constant, for further propagation, than actually inserting constants
2516 into insns; cse will do the latter task better.
2518 This function is also used to adjust address of items previously addressed
2519 via the virtual stack variable or virtual incoming arguments registers.
2521 If MEMONLY is nonzero, only make changes inside a MEM. */
2523 static void
2524 subst_constants (loc, insn, map, memonly)
2525 rtx *loc;
2526 rtx insn;
2527 struct inline_remap *map;
2528 int memonly;
2530 rtx x = *loc;
2531 int i, j;
2532 enum rtx_code code;
2533 const char *format_ptr;
2534 int num_changes = num_validated_changes ();
2535 rtx new = 0;
2536 enum machine_mode op0_mode = MAX_MACHINE_MODE;
2538 code = GET_CODE (x);
2540 switch (code)
2542 case PC:
2543 case CONST_INT:
2544 case CONST_DOUBLE:
2545 case CONST_VECTOR:
2546 case SYMBOL_REF:
2547 case CONST:
2548 case LABEL_REF:
2549 case ADDRESS:
2550 return;
2552 #ifdef HAVE_cc0
2553 case CC0:
2554 if (! memonly)
2555 validate_change (insn, loc, map->last_cc0_value, 1);
2556 return;
2557 #endif
2559 case USE:
2560 case CLOBBER:
2561 /* The only thing we can do with a USE or CLOBBER is possibly do
2562 some substitutions in a MEM within it. */
2563 if (GET_CODE (XEXP (x, 0)) == MEM)
2564 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map, 0);
2565 return;
2567 case REG:
2568 /* Substitute for parms and known constants. Don't replace
2569 hard regs used as user variables with constants. */
2570 if (! memonly)
2572 int regno = REGNO (x);
2573 struct const_equiv_data *p;
2575 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
2576 && (size_t) regno < VARRAY_SIZE (map->const_equiv_varray)
2577 && (p = &VARRAY_CONST_EQUIV (map->const_equiv_varray, regno),
2578 p->rtx != 0)
2579 && p->age >= map->const_age)
2580 validate_change (insn, loc, p->rtx, 1);
2582 return;
2584 case SUBREG:
2585 /* SUBREG applied to something other than a reg
2586 should be treated as ordinary, since that must
2587 be a special hack and we don't know how to treat it specially.
2588 Consider for example mulsidi3 in m68k.md.
2589 Ordinary SUBREG of a REG needs this special treatment. */
2590 if (! memonly && GET_CODE (SUBREG_REG (x)) == REG)
2592 rtx inner = SUBREG_REG (x);
2593 rtx new = 0;
2595 /* We can't call subst_constants on &SUBREG_REG (x) because any
2596 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2597 see what is inside, try to form the new SUBREG and see if that is
2598 valid. We handle two cases: extracting a full word in an
2599 integral mode and extracting the low part. */
2600 subst_constants (&inner, NULL_RTX, map, 0);
2601 new = simplify_gen_subreg (GET_MODE (x), inner,
2602 GET_MODE (SUBREG_REG (x)),
2603 SUBREG_BYTE (x));
2605 if (new)
2606 validate_change (insn, loc, new, 1);
2607 else
2608 cancel_changes (num_changes);
2610 return;
2612 break;
2614 case MEM:
2615 subst_constants (&XEXP (x, 0), insn, map, 0);
2617 /* If a memory address got spoiled, change it back. */
2618 if (! memonly && insn != 0 && num_validated_changes () != num_changes
2619 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2620 cancel_changes (num_changes);
2621 return;
2623 case SET:
2625 /* Substitute constants in our source, and in any arguments to a
2626 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2627 itself. */
2628 rtx *dest_loc = &SET_DEST (x);
2629 rtx dest = *dest_loc;
2630 rtx src, tem;
2631 enum machine_mode compare_mode = VOIDmode;
2633 /* If SET_SRC is a COMPARE which subst_constants would turn into
2634 COMPARE of 2 VOIDmode constants, note the mode in which comparison
2635 is to be done. */
2636 if (GET_CODE (SET_SRC (x)) == COMPARE)
2638 src = SET_SRC (x);
2639 if (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
2640 || CC0_P (dest))
2642 compare_mode = GET_MODE (XEXP (src, 0));
2643 if (compare_mode == VOIDmode)
2644 compare_mode = GET_MODE (XEXP (src, 1));
2648 subst_constants (&SET_SRC (x), insn, map, memonly);
2649 src = SET_SRC (x);
2651 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
2652 || GET_CODE (*dest_loc) == SUBREG
2653 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2655 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2657 subst_constants (&XEXP (*dest_loc, 1), insn, map, memonly);
2658 subst_constants (&XEXP (*dest_loc, 2), insn, map, memonly);
2660 dest_loc = &XEXP (*dest_loc, 0);
2663 /* Do substitute in the address of a destination in memory. */
2664 if (GET_CODE (*dest_loc) == MEM)
2665 subst_constants (&XEXP (*dest_loc, 0), insn, map, 0);
2667 /* Check for the case of DEST a SUBREG, both it and the underlying
2668 register are less than one word, and the SUBREG has the wider mode.
2669 In the case, we are really setting the underlying register to the
2670 source converted to the mode of DEST. So indicate that. */
2671 if (GET_CODE (dest) == SUBREG
2672 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2673 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2674 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2675 <= GET_MODE_SIZE (GET_MODE (dest)))
2676 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
2677 src)))
2678 src = tem, dest = SUBREG_REG (dest);
2680 /* If storing a recognizable value save it for later recording. */
2681 if ((map->num_sets < MAX_RECOG_OPERANDS)
2682 && (CONSTANT_P (src)
2683 || (GET_CODE (src) == REG
2684 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
2685 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
2686 || (GET_CODE (src) == PLUS
2687 && GET_CODE (XEXP (src, 0)) == REG
2688 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2689 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
2690 && CONSTANT_P (XEXP (src, 1)))
2691 || GET_CODE (src) == COMPARE
2692 || CC0_P (dest)
2693 || (dest == pc_rtx
2694 && (src == pc_rtx || GET_CODE (src) == RETURN
2695 || GET_CODE (src) == LABEL_REF))))
2697 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2698 it will cause us to save the COMPARE with any constants
2699 substituted, which is what we want for later. */
2700 rtx src_copy = copy_rtx (src);
2701 map->equiv_sets[map->num_sets].equiv = src_copy;
2702 map->equiv_sets[map->num_sets++].dest = dest;
2703 if (compare_mode != VOIDmode
2704 && GET_CODE (src) == COMPARE
2705 && (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
2706 || CC0_P (dest))
2707 && GET_MODE (XEXP (src, 0)) == VOIDmode
2708 && GET_MODE (XEXP (src, 1)) == VOIDmode)
2710 map->compare_src = src_copy;
2711 map->compare_mode = compare_mode;
2715 return;
2717 default:
2718 break;
2721 format_ptr = GET_RTX_FORMAT (code);
2723 /* If the first operand is an expression, save its mode for later. */
2724 if (*format_ptr == 'e')
2725 op0_mode = GET_MODE (XEXP (x, 0));
2727 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2729 switch (*format_ptr++)
2731 case '0':
2732 break;
2734 case 'e':
2735 if (XEXP (x, i))
2736 subst_constants (&XEXP (x, i), insn, map, memonly);
2737 break;
2739 case 'u':
2740 case 'i':
2741 case 's':
2742 case 'w':
2743 case 'n':
2744 case 't':
2745 case 'B':
2746 break;
2748 case 'E':
2749 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
2750 for (j = 0; j < XVECLEN (x, i); j++)
2751 subst_constants (&XVECEXP (x, i, j), insn, map, memonly);
2753 break;
2755 default:
2756 abort ();
2760 /* If this is a commutative operation, move a constant to the second
2761 operand unless the second operand is already a CONST_INT. */
2762 if (! memonly
2763 && (GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
2764 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2766 rtx tem = XEXP (x, 0);
2767 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
2768 validate_change (insn, &XEXP (x, 1), tem, 1);
2771 /* Simplify the expression in case we put in some constants. */
2772 if (! memonly)
2773 switch (GET_RTX_CLASS (code))
2775 case '1':
2776 if (op0_mode == MAX_MACHINE_MODE)
2777 abort ();
2778 new = simplify_unary_operation (code, GET_MODE (x),
2779 XEXP (x, 0), op0_mode);
2780 break;
2782 case '<':
2784 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
2786 if (op_mode == VOIDmode)
2787 op_mode = GET_MODE (XEXP (x, 1));
2788 new = simplify_relational_operation (code, op_mode,
2789 XEXP (x, 0), XEXP (x, 1));
2790 #ifdef FLOAT_STORE_FLAG_VALUE
2791 if (new != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2793 enum machine_mode mode = GET_MODE (x);
2794 if (new == const0_rtx)
2795 new = CONST0_RTX (mode);
2796 else
2798 REAL_VALUE_TYPE val;
2800 /* Avoid automatic aggregate initialization. */
2801 val = FLOAT_STORE_FLAG_VALUE (mode);
2802 new = CONST_DOUBLE_FROM_REAL_VALUE (val, mode);
2805 #endif
2806 break;
2809 case '2':
2810 case 'c':
2811 new = simplify_binary_operation (code, GET_MODE (x),
2812 XEXP (x, 0), XEXP (x, 1));
2813 break;
2815 case 'b':
2816 case '3':
2817 if (op0_mode == MAX_MACHINE_MODE)
2818 abort ();
2820 if (code == IF_THEN_ELSE)
2822 rtx op0 = XEXP (x, 0);
2824 if (GET_RTX_CLASS (GET_CODE (op0)) == '<'
2825 && GET_MODE (op0) == VOIDmode
2826 && ! side_effects_p (op0)
2827 && XEXP (op0, 0) == map->compare_src
2828 && GET_MODE (XEXP (op0, 1)) == VOIDmode)
2830 /* We have compare of two VOIDmode constants for which
2831 we recorded the comparison mode. */
2832 rtx temp =
2833 simplify_relational_operation (GET_CODE (op0),
2834 map->compare_mode,
2835 XEXP (op0, 0),
2836 XEXP (op0, 1));
2838 if (temp == const0_rtx)
2839 new = XEXP (x, 2);
2840 else if (temp == const1_rtx)
2841 new = XEXP (x, 1);
2844 if (!new)
2845 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
2846 XEXP (x, 0), XEXP (x, 1),
2847 XEXP (x, 2));
2848 break;
2851 if (new)
2852 validate_change (insn, loc, new, 1);
2855 /* Show that register modified no longer contain known constants. We are
2856 called from note_stores with parts of the new insn. */
2858 static void
2859 mark_stores (dest, x, data)
2860 rtx dest;
2861 rtx x ATTRIBUTE_UNUSED;
2862 void *data ATTRIBUTE_UNUSED;
2864 int regno = -1;
2865 enum machine_mode mode = VOIDmode;
2867 /* DEST is always the innermost thing set, except in the case of
2868 SUBREGs of hard registers. */
2870 if (GET_CODE (dest) == REG)
2871 regno = REGNO (dest), mode = GET_MODE (dest);
2872 else if (GET_CODE (dest) == SUBREG && GET_CODE (SUBREG_REG (dest)) == REG)
2874 regno = REGNO (SUBREG_REG (dest));
2875 if (regno < FIRST_PSEUDO_REGISTER)
2876 regno += subreg_regno_offset (REGNO (SUBREG_REG (dest)),
2877 GET_MODE (SUBREG_REG (dest)),
2878 SUBREG_BYTE (dest),
2879 GET_MODE (dest));
2880 mode = GET_MODE (SUBREG_REG (dest));
2883 if (regno >= 0)
2885 unsigned int uregno = regno;
2886 unsigned int last_reg = (uregno >= FIRST_PSEUDO_REGISTER ? uregno
2887 : uregno + HARD_REGNO_NREGS (uregno, mode) - 1);
2888 unsigned int i;
2890 /* Ignore virtual stack var or virtual arg register since those
2891 are handled separately. */
2892 if (uregno != VIRTUAL_INCOMING_ARGS_REGNUM
2893 && uregno != VIRTUAL_STACK_VARS_REGNUM)
2894 for (i = uregno; i <= last_reg; i++)
2895 if ((size_t) i < VARRAY_SIZE (global_const_equiv_varray))
2896 VARRAY_CONST_EQUIV (global_const_equiv_varray, i).rtx = 0;
2900 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
2901 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
2902 that it points to the node itself, thus indicating that the node is its
2903 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
2904 the given node is NULL, recursively descend the decl/block tree which
2905 it is the root of, and for each other ..._DECL or BLOCK node contained
2906 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
2907 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
2908 values to point to themselves. */
2910 static void
2911 set_block_origin_self (stmt)
2912 tree stmt;
2914 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
2916 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
2919 tree local_decl;
2921 for (local_decl = BLOCK_VARS (stmt);
2922 local_decl != NULL_TREE;
2923 local_decl = TREE_CHAIN (local_decl))
2924 set_decl_origin_self (local_decl); /* Potential recursion. */
2928 tree subblock;
2930 for (subblock = BLOCK_SUBBLOCKS (stmt);
2931 subblock != NULL_TREE;
2932 subblock = BLOCK_CHAIN (subblock))
2933 set_block_origin_self (subblock); /* Recurse. */
2938 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
2939 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
2940 node to so that it points to the node itself, thus indicating that the
2941 node represents its own (abstract) origin. Additionally, if the
2942 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
2943 the decl/block tree of which the given node is the root of, and for
2944 each other ..._DECL or BLOCK node contained therein whose
2945 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
2946 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
2947 point to themselves. */
2949 void
2950 set_decl_origin_self (decl)
2951 tree decl;
2953 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
2955 DECL_ABSTRACT_ORIGIN (decl) = decl;
2956 if (TREE_CODE (decl) == FUNCTION_DECL)
2958 tree arg;
2960 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2961 DECL_ABSTRACT_ORIGIN (arg) = arg;
2962 if (DECL_INITIAL (decl) != NULL_TREE
2963 && DECL_INITIAL (decl) != error_mark_node)
2964 set_block_origin_self (DECL_INITIAL (decl));
2969 /* Given a pointer to some BLOCK node, and a boolean value to set the
2970 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
2971 the given block, and for all local decls and all local sub-blocks
2972 (recursively) which are contained therein. */
2974 static void
2975 set_block_abstract_flags (stmt, setting)
2976 tree stmt;
2977 int setting;
2979 tree local_decl;
2980 tree subblock;
2982 BLOCK_ABSTRACT (stmt) = setting;
2984 for (local_decl = BLOCK_VARS (stmt);
2985 local_decl != NULL_TREE;
2986 local_decl = TREE_CHAIN (local_decl))
2987 set_decl_abstract_flags (local_decl, setting);
2989 for (subblock = BLOCK_SUBBLOCKS (stmt);
2990 subblock != NULL_TREE;
2991 subblock = BLOCK_CHAIN (subblock))
2992 set_block_abstract_flags (subblock, setting);
2995 /* Given a pointer to some ..._DECL node, and a boolean value to set the
2996 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
2997 given decl, and (in the case where the decl is a FUNCTION_DECL) also
2998 set the abstract flags for all of the parameters, local vars, local
2999 blocks and sub-blocks (recursively) to the same setting. */
3001 void
3002 set_decl_abstract_flags (decl, setting)
3003 tree decl;
3004 int setting;
3006 DECL_ABSTRACT (decl) = setting;
3007 if (TREE_CODE (decl) == FUNCTION_DECL)
3009 tree arg;
3011 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3012 DECL_ABSTRACT (arg) = setting;
3013 if (DECL_INITIAL (decl) != NULL_TREE
3014 && DECL_INITIAL (decl) != error_mark_node)
3015 set_block_abstract_flags (DECL_INITIAL (decl), setting);
3019 /* Output the assembly language code for the function FNDECL
3020 from its DECL_SAVED_INSNS. Used for inline functions that are output
3021 at end of compilation instead of where they came in the source. */
3023 static GTY(()) struct function *old_cfun;
3025 void
3026 output_inline_function (fndecl)
3027 tree fndecl;
3029 enum debug_info_type old_write_symbols = write_symbols;
3030 const struct gcc_debug_hooks *const old_debug_hooks = debug_hooks;
3031 struct function *f = DECL_SAVED_INSNS (fndecl);
3033 old_cfun = cfun;
3034 cfun = f;
3035 current_function_decl = fndecl;
3037 set_new_last_label_num (f->inl_max_label_num);
3039 /* We're not deferring this any longer. */
3040 DECL_DEFER_OUTPUT (fndecl) = 0;
3042 /* If requested, suppress debugging information. */
3043 if (f->no_debugging_symbols)
3045 write_symbols = NO_DEBUG;
3046 debug_hooks = &do_nothing_debug_hooks;
3049 /* Make sure warnings emitted by the optimizers (e.g. control reaches
3050 end of non-void function) is not wildly incorrect. */
3051 input_location = DECL_SOURCE_LOCATION (fndecl);
3053 /* Compile this function all the way down to assembly code. As a
3054 side effect this destroys the saved RTL representation, but
3055 that's okay, because we don't need to inline this anymore. */
3056 rest_of_compilation (fndecl);
3057 DECL_INLINE (fndecl) = 0;
3059 cfun = old_cfun;
3060 current_function_decl = old_cfun ? old_cfun->decl : 0;
3061 write_symbols = old_write_symbols;
3062 debug_hooks = old_debug_hooks;
3066 /* Functions to keep track of the values hard regs had at the start of
3067 the function. */
3070 get_hard_reg_initial_reg (fun, reg)
3071 struct function *fun;
3072 rtx reg;
3074 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
3075 int i;
3077 if (ivs == 0)
3078 return NULL_RTX;
3080 for (i = 0; i < ivs->num_entries; i++)
3081 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
3082 return ivs->entries[i].hard_reg;
3084 return NULL_RTX;
3088 has_func_hard_reg_initial_val (fun, reg)
3089 struct function *fun;
3090 rtx reg;
3092 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
3093 int i;
3095 if (ivs == 0)
3096 return NULL_RTX;
3098 for (i = 0; i < ivs->num_entries; i++)
3099 if (rtx_equal_p (ivs->entries[i].hard_reg, reg))
3100 return ivs->entries[i].pseudo;
3102 return NULL_RTX;
3106 get_func_hard_reg_initial_val (fun, reg)
3107 struct function *fun;
3108 rtx reg;
3110 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
3111 rtx rv = has_func_hard_reg_initial_val (fun, reg);
3113 if (rv)
3114 return rv;
3116 if (ivs == 0)
3118 fun->hard_reg_initial_vals = (void *) ggc_alloc (sizeof (initial_value_struct));
3119 ivs = fun->hard_reg_initial_vals;
3120 ivs->num_entries = 0;
3121 ivs->max_entries = 5;
3122 ivs->entries = (initial_value_pair *) ggc_alloc (5 * sizeof (initial_value_pair));
3125 if (ivs->num_entries >= ivs->max_entries)
3127 ivs->max_entries += 5;
3128 ivs->entries =
3129 (initial_value_pair *) ggc_realloc (ivs->entries,
3130 ivs->max_entries
3131 * sizeof (initial_value_pair));
3134 ivs->entries[ivs->num_entries].hard_reg = reg;
3135 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (GET_MODE (reg));
3137 return ivs->entries[ivs->num_entries++].pseudo;
3141 get_hard_reg_initial_val (mode, regno)
3142 enum machine_mode mode;
3143 int regno;
3145 return get_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
3149 has_hard_reg_initial_val (mode, regno)
3150 enum machine_mode mode;
3151 int regno;
3153 return has_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
3156 static void
3157 setup_initial_hard_reg_value_integration (inl_f, remap)
3158 struct function *inl_f;
3159 struct inline_remap *remap;
3161 struct initial_value_struct *ivs = inl_f->hard_reg_initial_vals;
3162 int i;
3164 if (ivs == 0)
3165 return;
3167 for (i = 0; i < ivs->num_entries; i ++)
3168 remap->reg_map[REGNO (ivs->entries[i].pseudo)]
3169 = get_func_hard_reg_initial_val (cfun, ivs->entries[i].hard_reg);
3173 void
3174 emit_initial_value_sets ()
3176 struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
3177 int i;
3178 rtx seq;
3180 if (ivs == 0)
3181 return;
3183 start_sequence ();
3184 for (i = 0; i < ivs->num_entries; i++)
3185 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
3186 seq = get_insns ();
3187 end_sequence ();
3189 emit_insn_after (seq, get_insns ());
3192 /* If the backend knows where to allocate pseudos for hard
3193 register initial values, register these allocations now. */
3194 void
3195 allocate_initial_values (reg_equiv_memory_loc)
3196 rtx *reg_equiv_memory_loc ATTRIBUTE_UNUSED;
3198 #ifdef ALLOCATE_INITIAL_VALUE
3199 struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
3200 int i;
3202 if (ivs == 0)
3203 return;
3205 for (i = 0; i < ivs->num_entries; i++)
3207 int regno = REGNO (ivs->entries[i].pseudo);
3208 rtx x = ALLOCATE_INITIAL_VALUE (ivs->entries[i].hard_reg);
3210 if (x == NULL_RTX || REG_N_SETS (REGNO (ivs->entries[i].pseudo)) > 1)
3211 ; /* Do nothing. */
3212 else if (GET_CODE (x) == MEM)
3213 reg_equiv_memory_loc[regno] = x;
3214 else if (GET_CODE (x) == REG)
3216 reg_renumber[regno] = REGNO (x);
3217 /* Poke the regno right into regno_reg_rtx
3218 so that even fixed regs are accepted. */
3219 REGNO (ivs->entries[i].pseudo) = REGNO (x);
3221 else abort ();
3223 #endif
3226 #include "gt-integrate.h"