HAMMER VFS - REDO implementation base code part 3/many (addendum)
[dragonfly.git] / contrib / gcc-4.4 / gcc / function.c
blob8a38a6f775a51731cece0850706ed72e0216d7e6
1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register. */
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
40 #include "rtl.h"
41 #include "tree.h"
42 #include "flags.h"
43 #include "except.h"
44 #include "function.h"
45 #include "expr.h"
46 #include "optabs.h"
47 #include "libfuncs.h"
48 #include "regs.h"
49 #include "hard-reg-set.h"
50 #include "insn-config.h"
51 #include "recog.h"
52 #include "output.h"
53 #include "basic-block.h"
54 #include "toplev.h"
55 #include "hashtab.h"
56 #include "ggc.h"
57 #include "tm_p.h"
58 #include "integrate.h"
59 #include "langhooks.h"
60 #include "target.h"
61 #include "cfglayout.h"
62 #include "gimple.h"
63 #include "tree-pass.h"
64 #include "predict.h"
65 #include "df.h"
66 #include "timevar.h"
67 #include "vecprim.h"
69 /* So we can assign to cfun in this file. */
70 #undef cfun
72 #ifndef STACK_ALIGNMENT_NEEDED
73 #define STACK_ALIGNMENT_NEEDED 1
74 #endif
76 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
78 /* Some systems use __main in a way incompatible with its use in gcc, in these
79 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
80 give the same symbol without quotes for an alternative entry point. You
81 must define both, or neither. */
82 #ifndef NAME__MAIN
83 #define NAME__MAIN "__main"
84 #endif
86 /* Round a value to the lowest integer less than it that is a multiple of
87 the required alignment. Avoid using division in case the value is
88 negative. Assume the alignment is a power of two. */
89 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
91 /* Similar, but round to the next highest integer that meets the
92 alignment. */
93 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
95 /* Nonzero if function being compiled doesn't contain any calls
96 (ignoring the prologue and epilogue). This is set prior to
97 local register allocation and is valid for the remaining
98 compiler passes. */
99 int current_function_is_leaf;
101 /* Nonzero if function being compiled doesn't modify the stack pointer
102 (ignoring the prologue and epilogue). This is only valid after
103 pass_stack_ptr_mod has run. */
104 int current_function_sp_is_unchanging;
106 /* Nonzero if the function being compiled is a leaf function which only
107 uses leaf registers. This is valid after reload (specifically after
108 sched2) and is useful only if the port defines LEAF_REGISTERS. */
109 int current_function_uses_only_leaf_regs;
111 /* Nonzero once virtual register instantiation has been done.
112 assign_stack_local uses frame_pointer_rtx when this is nonzero.
113 calls.c:emit_library_call_value_1 uses it to set up
114 post-instantiation libcalls. */
115 int virtuals_instantiated;
117 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
118 static GTY(()) int funcdef_no;
120 /* These variables hold pointers to functions to create and destroy
121 target specific, per-function data structures. */
122 struct machine_function * (*init_machine_status) (void);
124 /* The currently compiled function. */
125 struct function *cfun = 0;
127 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
128 static VEC(int,heap) *prologue;
129 static VEC(int,heap) *epilogue;
131 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
132 in this function. */
133 static VEC(int,heap) *sibcall_epilogue;
136 htab_t types_used_by_vars_hash = NULL;
137 tree types_used_by_cur_var_decl = NULL;
139 /* Forward declarations. */
141 static struct temp_slot *find_temp_slot_from_address (rtx);
142 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
143 static void pad_below (struct args_size *, enum machine_mode, tree);
144 static void reorder_blocks_1 (rtx, tree, VEC(tree,heap) **);
145 static int all_blocks (tree, tree *);
146 static tree *get_block_vector (tree, int *);
147 extern tree debug_find_var_in_block_tree (tree, tree);
148 /* We always define `record_insns' even if it's not used so that we
149 can always export `prologue_epilogue_contains'. */
150 static void record_insns (rtx, VEC(int,heap) **) ATTRIBUTE_UNUSED;
151 static int contains (const_rtx, VEC(int,heap) **);
152 #ifdef HAVE_return
153 static void emit_return_into_block (basic_block);
154 #endif
155 static void prepare_function_start (void);
156 static void do_clobber_return_reg (rtx, void *);
157 static void do_use_return_reg (rtx, void *);
158 static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
160 /* Stack of nested functions. */
161 /* Keep track of the cfun stack. */
163 typedef struct function *function_p;
165 DEF_VEC_P(function_p);
166 DEF_VEC_ALLOC_P(function_p,heap);
167 static VEC(function_p,heap) *function_context_stack;
169 /* Save the current context for compilation of a nested function.
170 This is called from language-specific code. */
172 void
173 push_function_context (void)
175 if (cfun == 0)
176 allocate_struct_function (NULL, false);
178 VEC_safe_push (function_p, heap, function_context_stack, cfun);
179 set_cfun (NULL);
182 /* Restore the last saved context, at the end of a nested function.
183 This function is called from language-specific code. */
185 void
186 pop_function_context (void)
188 struct function *p = VEC_pop (function_p, function_context_stack);
189 set_cfun (p);
190 current_function_decl = p->decl;
192 /* Reset variables that have known state during rtx generation. */
193 virtuals_instantiated = 0;
194 generating_concat_p = 1;
197 /* Clear out all parts of the state in F that can safely be discarded
198 after the function has been parsed, but not compiled, to let
199 garbage collection reclaim the memory. */
201 void
202 free_after_parsing (struct function *f)
204 f->language = 0;
207 /* Clear out all parts of the state in F that can safely be discarded
208 after the function has been compiled, to let garbage collection
209 reclaim the memory. */
211 void
212 free_after_compilation (struct function *f)
214 VEC_free (int, heap, prologue);
215 VEC_free (int, heap, epilogue);
216 VEC_free (int, heap, sibcall_epilogue);
217 if (crtl->emit.regno_pointer_align)
218 free (crtl->emit.regno_pointer_align);
220 memset (crtl, 0, sizeof (struct rtl_data));
221 f->eh = NULL;
222 f->machine = NULL;
223 f->cfg = NULL;
225 regno_reg_rtx = NULL;
226 insn_locators_free ();
229 /* Return size needed for stack frame based on slots so far allocated.
230 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
231 the caller may have to do that. */
233 HOST_WIDE_INT
234 get_frame_size (void)
236 if (FRAME_GROWS_DOWNWARD)
237 return -frame_offset;
238 else
239 return frame_offset;
242 /* Issue an error message and return TRUE if frame OFFSET overflows in
243 the signed target pointer arithmetics for function FUNC. Otherwise
244 return FALSE. */
246 bool
247 frame_offset_overflow (HOST_WIDE_INT offset, tree func)
249 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
251 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
252 /* Leave room for the fixed part of the frame. */
253 - 64 * UNITS_PER_WORD)
255 error ("%Jtotal size of local objects too large", func);
256 return TRUE;
259 return FALSE;
262 /* Return stack slot alignment in bits for TYPE and MODE. */
264 static unsigned int
265 get_stack_local_alignment (tree type, enum machine_mode mode)
267 unsigned int alignment;
269 if (mode == BLKmode)
270 alignment = BIGGEST_ALIGNMENT;
271 else
272 alignment = GET_MODE_ALIGNMENT (mode);
274 /* Allow the frond-end to (possibly) increase the alignment of this
275 stack slot. */
276 if (! type)
277 type = lang_hooks.types.type_for_mode (mode, 0);
279 return STACK_SLOT_ALIGNMENT (type, mode, alignment);
282 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
283 with machine mode MODE.
285 ALIGN controls the amount of alignment for the address of the slot:
286 0 means according to MODE,
287 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
288 -2 means use BITS_PER_UNIT,
289 positive specifies alignment boundary in bits.
291 If REDUCE_ALIGNMENT_OK is true, it is OK to reduce alignment.
293 We do not round to stack_boundary here. */
296 assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size,
297 int align,
298 bool reduce_alignment_ok ATTRIBUTE_UNUSED)
300 rtx x, addr;
301 int bigend_correction = 0;
302 unsigned int alignment, alignment_in_bits;
303 int frame_off, frame_alignment, frame_phase;
305 if (align == 0)
307 alignment = get_stack_local_alignment (NULL, mode);
308 alignment /= BITS_PER_UNIT;
310 else if (align == -1)
312 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
313 size = CEIL_ROUND (size, alignment);
315 else if (align == -2)
316 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
317 else
318 alignment = align / BITS_PER_UNIT;
320 alignment_in_bits = alignment * BITS_PER_UNIT;
322 if (FRAME_GROWS_DOWNWARD)
323 frame_offset -= size;
325 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
326 if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
328 alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
329 alignment = alignment_in_bits / BITS_PER_UNIT;
332 if (SUPPORTS_STACK_ALIGNMENT)
334 if (crtl->stack_alignment_estimated < alignment_in_bits)
336 if (!crtl->stack_realign_processed)
337 crtl->stack_alignment_estimated = alignment_in_bits;
338 else
340 /* If stack is realigned and stack alignment value
341 hasn't been finalized, it is OK not to increase
342 stack_alignment_estimated. The bigger alignment
343 requirement is recorded in stack_alignment_needed
344 below. */
345 gcc_assert (!crtl->stack_realign_finalized);
346 if (!crtl->stack_realign_needed)
348 /* It is OK to reduce the alignment as long as the
349 requested size is 0 or the estimated stack
350 alignment >= mode alignment. */
351 gcc_assert (reduce_alignment_ok
352 || size == 0
353 || (crtl->stack_alignment_estimated
354 >= GET_MODE_ALIGNMENT (mode)));
355 alignment_in_bits = crtl->stack_alignment_estimated;
356 alignment = alignment_in_bits / BITS_PER_UNIT;
362 if (crtl->stack_alignment_needed < alignment_in_bits)
363 crtl->stack_alignment_needed = alignment_in_bits;
364 if (crtl->max_used_stack_slot_alignment < crtl->stack_alignment_needed)
365 crtl->max_used_stack_slot_alignment = crtl->stack_alignment_needed;
367 /* Calculate how many bytes the start of local variables is off from
368 stack alignment. */
369 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
370 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
371 frame_phase = frame_off ? frame_alignment - frame_off : 0;
373 /* Round the frame offset to the specified alignment. The default is
374 to always honor requests to align the stack but a port may choose to
375 do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */
376 if (STACK_ALIGNMENT_NEEDED
377 || mode != BLKmode
378 || size != 0)
380 /* We must be careful here, since FRAME_OFFSET might be negative and
381 division with a negative dividend isn't as well defined as we might
382 like. So we instead assume that ALIGNMENT is a power of two and
383 use logical operations which are unambiguous. */
384 if (FRAME_GROWS_DOWNWARD)
385 frame_offset
386 = (FLOOR_ROUND (frame_offset - frame_phase,
387 (unsigned HOST_WIDE_INT) alignment)
388 + frame_phase);
389 else
390 frame_offset
391 = (CEIL_ROUND (frame_offset - frame_phase,
392 (unsigned HOST_WIDE_INT) alignment)
393 + frame_phase);
396 /* On a big-endian machine, if we are allocating more space than we will use,
397 use the least significant bytes of those that are allocated. */
398 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
399 bigend_correction = size - GET_MODE_SIZE (mode);
401 /* If we have already instantiated virtual registers, return the actual
402 address relative to the frame pointer. */
403 if (virtuals_instantiated)
404 addr = plus_constant (frame_pointer_rtx,
405 trunc_int_for_mode
406 (frame_offset + bigend_correction
407 + STARTING_FRAME_OFFSET, Pmode));
408 else
409 addr = plus_constant (virtual_stack_vars_rtx,
410 trunc_int_for_mode
411 (frame_offset + bigend_correction,
412 Pmode));
414 if (!FRAME_GROWS_DOWNWARD)
415 frame_offset += size;
417 x = gen_rtx_MEM (mode, addr);
418 set_mem_align (x, alignment_in_bits);
419 MEM_NOTRAP_P (x) = 1;
421 stack_slot_list
422 = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
424 if (frame_offset_overflow (frame_offset, current_function_decl))
425 frame_offset = 0;
427 return x;
430 /* Wrap up assign_stack_local_1 with last parameter as false. */
433 assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
435 return assign_stack_local_1 (mode, size, align, false);
439 /* In order to evaluate some expressions, such as function calls returning
440 structures in memory, we need to temporarily allocate stack locations.
441 We record each allocated temporary in the following structure.
443 Associated with each temporary slot is a nesting level. When we pop up
444 one level, all temporaries associated with the previous level are freed.
445 Normally, all temporaries are freed after the execution of the statement
446 in which they were created. However, if we are inside a ({...}) grouping,
447 the result may be in a temporary and hence must be preserved. If the
448 result could be in a temporary, we preserve it if we can determine which
449 one it is in. If we cannot determine which temporary may contain the
450 result, all temporaries are preserved. A temporary is preserved by
451 pretending it was allocated at the previous nesting level.
453 Automatic variables are also assigned temporary slots, at the nesting
454 level where they are defined. They are marked a "kept" so that
455 free_temp_slots will not free them. */
457 struct temp_slot GTY(())
459 /* Points to next temporary slot. */
460 struct temp_slot *next;
461 /* Points to previous temporary slot. */
462 struct temp_slot *prev;
463 /* The rtx to used to reference the slot. */
464 rtx slot;
465 /* The size, in units, of the slot. */
466 HOST_WIDE_INT size;
467 /* The type of the object in the slot, or zero if it doesn't correspond
468 to a type. We use this to determine whether a slot can be reused.
469 It can be reused if objects of the type of the new slot will always
470 conflict with objects of the type of the old slot. */
471 tree type;
472 /* The alignment (in bits) of the slot. */
473 unsigned int align;
474 /* Nonzero if this temporary is currently in use. */
475 char in_use;
476 /* Nonzero if this temporary has its address taken. */
477 char addr_taken;
478 /* Nesting level at which this slot is being used. */
479 int level;
480 /* Nonzero if this should survive a call to free_temp_slots. */
481 int keep;
482 /* The offset of the slot from the frame_pointer, including extra space
483 for alignment. This info is for combine_temp_slots. */
484 HOST_WIDE_INT base_offset;
485 /* The size of the slot, including extra space for alignment. This
486 info is for combine_temp_slots. */
487 HOST_WIDE_INT full_size;
490 /* A table of addresses that represent a stack slot. The table is a mapping
491 from address RTXen to a temp slot. */
492 static GTY((param_is(struct temp_slot_address_entry))) htab_t temp_slot_address_table;
494 /* Entry for the above hash table. */
495 struct temp_slot_address_entry GTY(())
497 hashval_t hash;
498 rtx address;
499 struct temp_slot *temp_slot;
502 /* Removes temporary slot TEMP from LIST. */
504 static void
505 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
507 if (temp->next)
508 temp->next->prev = temp->prev;
509 if (temp->prev)
510 temp->prev->next = temp->next;
511 else
512 *list = temp->next;
514 temp->prev = temp->next = NULL;
517 /* Inserts temporary slot TEMP to LIST. */
519 static void
520 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
522 temp->next = *list;
523 if (*list)
524 (*list)->prev = temp;
525 temp->prev = NULL;
526 *list = temp;
529 /* Returns the list of used temp slots at LEVEL. */
531 static struct temp_slot **
532 temp_slots_at_level (int level)
534 if (level >= (int) VEC_length (temp_slot_p, used_temp_slots))
535 VEC_safe_grow_cleared (temp_slot_p, gc, used_temp_slots, level + 1);
537 return &(VEC_address (temp_slot_p, used_temp_slots)[level]);
540 /* Returns the maximal temporary slot level. */
542 static int
543 max_slot_level (void)
545 if (!used_temp_slots)
546 return -1;
548 return VEC_length (temp_slot_p, used_temp_slots) - 1;
551 /* Moves temporary slot TEMP to LEVEL. */
553 static void
554 move_slot_to_level (struct temp_slot *temp, int level)
556 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
557 insert_slot_to_list (temp, temp_slots_at_level (level));
558 temp->level = level;
561 /* Make temporary slot TEMP available. */
563 static void
564 make_slot_available (struct temp_slot *temp)
566 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
567 insert_slot_to_list (temp, &avail_temp_slots);
568 temp->in_use = 0;
569 temp->level = -1;
572 /* Compute the hash value for an address -> temp slot mapping.
573 The value is cached on the mapping entry. */
574 static hashval_t
575 temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
577 int do_not_record = 0;
578 return hash_rtx (t->address, GET_MODE (t->address),
579 &do_not_record, NULL, false);
582 /* Return the hash value for an address -> temp slot mapping. */
583 static hashval_t
584 temp_slot_address_hash (const void *p)
586 const struct temp_slot_address_entry *t;
587 t = (const struct temp_slot_address_entry *) p;
588 return t->hash;
591 /* Compare two address -> temp slot mapping entries. */
592 static int
593 temp_slot_address_eq (const void *p1, const void *p2)
595 const struct temp_slot_address_entry *t1, *t2;
596 t1 = (const struct temp_slot_address_entry *) p1;
597 t2 = (const struct temp_slot_address_entry *) p2;
598 return exp_equiv_p (t1->address, t2->address, 0, true);
601 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
602 static void
603 insert_temp_slot_address (rtx address, struct temp_slot *temp_slot)
605 void **slot;
606 struct temp_slot_address_entry *t = GGC_NEW (struct temp_slot_address_entry);
607 t->address = address;
608 t->temp_slot = temp_slot;
609 t->hash = temp_slot_address_compute_hash (t);
610 slot = htab_find_slot_with_hash (temp_slot_address_table, t, t->hash, INSERT);
611 *slot = t;
614 /* Remove an address -> temp slot mapping entry if the temp slot is
615 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
616 static int
617 remove_unused_temp_slot_addresses_1 (void **slot, void *data ATTRIBUTE_UNUSED)
619 const struct temp_slot_address_entry *t;
620 t = (const struct temp_slot_address_entry *) *slot;
621 if (! t->temp_slot->in_use)
622 *slot = NULL;
623 return 1;
626 /* Remove all mappings of addresses to unused temp slots. */
627 static void
628 remove_unused_temp_slot_addresses (void)
630 htab_traverse (temp_slot_address_table,
631 remove_unused_temp_slot_addresses_1,
632 NULL);
635 /* Find the temp slot corresponding to the object at address X. */
637 static struct temp_slot *
638 find_temp_slot_from_address (rtx x)
640 struct temp_slot *p;
641 struct temp_slot_address_entry tmp, *t;
643 /* First try the easy way:
644 See if X exists in the address -> temp slot mapping. */
645 tmp.address = x;
646 tmp.temp_slot = NULL;
647 tmp.hash = temp_slot_address_compute_hash (&tmp);
648 t = (struct temp_slot_address_entry *)
649 htab_find_with_hash (temp_slot_address_table, &tmp, tmp.hash);
650 if (t)
651 return t->temp_slot;
653 /* If we have a sum involving a register, see if it points to a temp
654 slot. */
655 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
656 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
657 return p;
658 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
659 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
660 return p;
662 /* Last resort: Address is a virtual stack var address. */
663 if (GET_CODE (x) == PLUS
664 && XEXP (x, 0) == virtual_stack_vars_rtx
665 && GET_CODE (XEXP (x, 1)) == CONST_INT)
667 int i;
668 for (i = max_slot_level (); i >= 0; i--)
669 for (p = *temp_slots_at_level (i); p; p = p->next)
671 if (INTVAL (XEXP (x, 1)) >= p->base_offset
672 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size)
673 return p;
677 return NULL;
680 /* Allocate a temporary stack slot and record it for possible later
681 reuse.
683 MODE is the machine mode to be given to the returned rtx.
685 SIZE is the size in units of the space required. We do no rounding here
686 since assign_stack_local will do any required rounding.
688 KEEP is 1 if this slot is to be retained after a call to
689 free_temp_slots. Automatic variables for a block are allocated
690 with this flag. KEEP values of 2 or 3 were needed respectively
691 for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
692 or for SAVE_EXPRs, but they are now unused.
694 TYPE is the type that will be used for the stack slot. */
697 assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size,
698 int keep, tree type)
700 unsigned int align;
701 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
702 rtx slot;
704 /* If SIZE is -1 it means that somebody tried to allocate a temporary
705 of a variable size. */
706 gcc_assert (size != -1);
708 /* These are now unused. */
709 gcc_assert (keep <= 1);
711 align = get_stack_local_alignment (type, mode);
713 /* Try to find an available, already-allocated temporary of the proper
714 mode which meets the size and alignment requirements. Choose the
715 smallest one with the closest alignment.
717 If assign_stack_temp is called outside of the tree->rtl expansion,
718 we cannot reuse the stack slots (that may still refer to
719 VIRTUAL_STACK_VARS_REGNUM). */
720 if (!virtuals_instantiated)
722 for (p = avail_temp_slots; p; p = p->next)
724 if (p->align >= align && p->size >= size
725 && GET_MODE (p->slot) == mode
726 && objects_must_conflict_p (p->type, type)
727 && (best_p == 0 || best_p->size > p->size
728 || (best_p->size == p->size && best_p->align > p->align)))
730 if (p->align == align && p->size == size)
732 selected = p;
733 cut_slot_from_list (selected, &avail_temp_slots);
734 best_p = 0;
735 break;
737 best_p = p;
742 /* Make our best, if any, the one to use. */
743 if (best_p)
745 selected = best_p;
746 cut_slot_from_list (selected, &avail_temp_slots);
748 /* If there are enough aligned bytes left over, make them into a new
749 temp_slot so that the extra bytes don't get wasted. Do this only
750 for BLKmode slots, so that we can be sure of the alignment. */
751 if (GET_MODE (best_p->slot) == BLKmode)
753 int alignment = best_p->align / BITS_PER_UNIT;
754 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
756 if (best_p->size - rounded_size >= alignment)
758 p = GGC_NEW (struct temp_slot);
759 p->in_use = p->addr_taken = 0;
760 p->size = best_p->size - rounded_size;
761 p->base_offset = best_p->base_offset + rounded_size;
762 p->full_size = best_p->full_size - rounded_size;
763 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
764 p->align = best_p->align;
765 p->type = best_p->type;
766 insert_slot_to_list (p, &avail_temp_slots);
768 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
769 stack_slot_list);
771 best_p->size = rounded_size;
772 best_p->full_size = rounded_size;
777 /* If we still didn't find one, make a new temporary. */
778 if (selected == 0)
780 HOST_WIDE_INT frame_offset_old = frame_offset;
782 p = GGC_NEW (struct temp_slot);
784 /* We are passing an explicit alignment request to assign_stack_local.
785 One side effect of that is assign_stack_local will not round SIZE
786 to ensure the frame offset remains suitably aligned.
788 So for requests which depended on the rounding of SIZE, we go ahead
789 and round it now. We also make sure ALIGNMENT is at least
790 BIGGEST_ALIGNMENT. */
791 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
792 p->slot = assign_stack_local (mode,
793 (mode == BLKmode
794 ? CEIL_ROUND (size, (int) align / BITS_PER_UNIT)
795 : size),
796 align);
798 p->align = align;
800 /* The following slot size computation is necessary because we don't
801 know the actual size of the temporary slot until assign_stack_local
802 has performed all the frame alignment and size rounding for the
803 requested temporary. Note that extra space added for alignment
804 can be either above or below this stack slot depending on which
805 way the frame grows. We include the extra space if and only if it
806 is above this slot. */
807 if (FRAME_GROWS_DOWNWARD)
808 p->size = frame_offset_old - frame_offset;
809 else
810 p->size = size;
812 /* Now define the fields used by combine_temp_slots. */
813 if (FRAME_GROWS_DOWNWARD)
815 p->base_offset = frame_offset;
816 p->full_size = frame_offset_old - frame_offset;
818 else
820 p->base_offset = frame_offset_old;
821 p->full_size = frame_offset - frame_offset_old;
824 selected = p;
827 p = selected;
828 p->in_use = 1;
829 p->addr_taken = 0;
830 p->type = type;
831 p->level = temp_slot_level;
832 p->keep = keep;
834 pp = temp_slots_at_level (p->level);
835 insert_slot_to_list (p, pp);
836 insert_temp_slot_address (XEXP (p->slot, 0), p);
838 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
839 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
840 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
842 /* If we know the alias set for the memory that will be used, use
843 it. If there's no TYPE, then we don't know anything about the
844 alias set for the memory. */
845 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
846 set_mem_align (slot, align);
848 /* If a type is specified, set the relevant flags. */
849 if (type != 0)
851 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
852 MEM_SET_IN_STRUCT_P (slot, (AGGREGATE_TYPE_P (type)
853 || TREE_CODE (type) == COMPLEX_TYPE));
855 MEM_NOTRAP_P (slot) = 1;
857 return slot;
860 /* Allocate a temporary stack slot and record it for possible later
861 reuse. First three arguments are same as in preceding function. */
864 assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep)
866 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
869 /* Assign a temporary.
870 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
871 and so that should be used in error messages. In either case, we
872 allocate of the given type.
873 KEEP is as for assign_stack_temp.
874 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
875 it is 0 if a register is OK.
876 DONT_PROMOTE is 1 if we should not promote values in register
877 to wider modes. */
880 assign_temp (tree type_or_decl, int keep, int memory_required,
881 int dont_promote ATTRIBUTE_UNUSED)
883 tree type, decl;
884 enum machine_mode mode;
885 #ifdef PROMOTE_MODE
886 int unsignedp;
887 #endif
889 if (DECL_P (type_or_decl))
890 decl = type_or_decl, type = TREE_TYPE (decl);
891 else
892 decl = NULL, type = type_or_decl;
894 mode = TYPE_MODE (type);
895 #ifdef PROMOTE_MODE
896 unsignedp = TYPE_UNSIGNED (type);
897 #endif
899 if (mode == BLKmode || memory_required)
901 HOST_WIDE_INT size = int_size_in_bytes (type);
902 rtx tmp;
904 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
905 problems with allocating the stack space. */
906 if (size == 0)
907 size = 1;
909 /* Unfortunately, we don't yet know how to allocate variable-sized
910 temporaries. However, sometimes we can find a fixed upper limit on
911 the size, so try that instead. */
912 else if (size == -1)
913 size = max_int_size_in_bytes (type);
915 /* The size of the temporary may be too large to fit into an integer. */
916 /* ??? Not sure this should happen except for user silliness, so limit
917 this to things that aren't compiler-generated temporaries. The
918 rest of the time we'll die in assign_stack_temp_for_type. */
919 if (decl && size == -1
920 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
922 error ("size of variable %q+D is too large", decl);
923 size = 1;
926 tmp = assign_stack_temp_for_type (mode, size, keep, type);
927 return tmp;
930 #ifdef PROMOTE_MODE
931 if (! dont_promote)
932 mode = promote_mode (type, mode, &unsignedp, 0);
933 #endif
935 return gen_reg_rtx (mode);
938 /* Combine temporary stack slots which are adjacent on the stack.
940 This allows for better use of already allocated stack space. This is only
941 done for BLKmode slots because we can be sure that we won't have alignment
942 problems in this case. */
944 static void
945 combine_temp_slots (void)
947 struct temp_slot *p, *q, *next, *next_q;
948 int num_slots;
950 /* We can't combine slots, because the information about which slot
951 is in which alias set will be lost. */
952 if (flag_strict_aliasing)
953 return;
955 /* If there are a lot of temp slots, don't do anything unless
956 high levels of optimization. */
957 if (! flag_expensive_optimizations)
958 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
959 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
960 return;
962 for (p = avail_temp_slots; p; p = next)
964 int delete_p = 0;
966 next = p->next;
968 if (GET_MODE (p->slot) != BLKmode)
969 continue;
971 for (q = p->next; q; q = next_q)
973 int delete_q = 0;
975 next_q = q->next;
977 if (GET_MODE (q->slot) != BLKmode)
978 continue;
980 if (p->base_offset + p->full_size == q->base_offset)
982 /* Q comes after P; combine Q into P. */
983 p->size += q->size;
984 p->full_size += q->full_size;
985 delete_q = 1;
987 else if (q->base_offset + q->full_size == p->base_offset)
989 /* P comes after Q; combine P into Q. */
990 q->size += p->size;
991 q->full_size += p->full_size;
992 delete_p = 1;
993 break;
995 if (delete_q)
996 cut_slot_from_list (q, &avail_temp_slots);
999 /* Either delete P or advance past it. */
1000 if (delete_p)
1001 cut_slot_from_list (p, &avail_temp_slots);
1005 /* Indicate that NEW_RTX is an alternate way of referring to the temp
1006 slot that previously was known by OLD_RTX. */
1008 void
1009 update_temp_slot_address (rtx old_rtx, rtx new_rtx)
1011 struct temp_slot *p;
1013 if (rtx_equal_p (old_rtx, new_rtx))
1014 return;
1016 p = find_temp_slot_from_address (old_rtx);
1018 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1019 NEW_RTX is a register, see if one operand of the PLUS is a
1020 temporary location. If so, NEW_RTX points into it. Otherwise,
1021 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1022 in common between them. If so, try a recursive call on those
1023 values. */
1024 if (p == 0)
1026 if (GET_CODE (old_rtx) != PLUS)
1027 return;
1029 if (REG_P (new_rtx))
1031 update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
1032 update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
1033 return;
1035 else if (GET_CODE (new_rtx) != PLUS)
1036 return;
1038 if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
1039 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
1040 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
1041 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
1042 else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
1043 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
1044 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
1045 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
1047 return;
1050 /* Otherwise add an alias for the temp's address. */
1051 insert_temp_slot_address (new_rtx, p);
1054 /* If X could be a reference to a temporary slot, mark the fact that its
1055 address was taken. */
1057 void
1058 mark_temp_addr_taken (rtx x)
1060 struct temp_slot *p;
1062 if (x == 0)
1063 return;
1065 /* If X is not in memory or is at a constant address, it cannot be in
1066 a temporary slot. */
1067 if (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))
1068 return;
1070 p = find_temp_slot_from_address (XEXP (x, 0));
1071 if (p != 0)
1072 p->addr_taken = 1;
1075 /* If X could be a reference to a temporary slot, mark that slot as
1076 belonging to the to one level higher than the current level. If X
1077 matched one of our slots, just mark that one. Otherwise, we can't
1078 easily predict which it is, so upgrade all of them. Kept slots
1079 need not be touched.
1081 This is called when an ({...}) construct occurs and a statement
1082 returns a value in memory. */
1084 void
1085 preserve_temp_slots (rtx x)
1087 struct temp_slot *p = 0, *next;
1089 /* If there is no result, we still might have some objects whose address
1090 were taken, so we need to make sure they stay around. */
1091 if (x == 0)
1093 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1095 next = p->next;
1097 if (p->addr_taken)
1098 move_slot_to_level (p, temp_slot_level - 1);
1101 return;
1104 /* If X is a register that is being used as a pointer, see if we have
1105 a temporary slot we know it points to. To be consistent with
1106 the code below, we really should preserve all non-kept slots
1107 if we can't find a match, but that seems to be much too costly. */
1108 if (REG_P (x) && REG_POINTER (x))
1109 p = find_temp_slot_from_address (x);
1111 /* If X is not in memory or is at a constant address, it cannot be in
1112 a temporary slot, but it can contain something whose address was
1113 taken. */
1114 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1116 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1118 next = p->next;
1120 if (p->addr_taken)
1121 move_slot_to_level (p, temp_slot_level - 1);
1124 return;
1127 /* First see if we can find a match. */
1128 if (p == 0)
1129 p = find_temp_slot_from_address (XEXP (x, 0));
1131 if (p != 0)
1133 /* Move everything at our level whose address was taken to our new
1134 level in case we used its address. */
1135 struct temp_slot *q;
1137 if (p->level == temp_slot_level)
1139 for (q = *temp_slots_at_level (temp_slot_level); q; q = next)
1141 next = q->next;
1143 if (p != q && q->addr_taken)
1144 move_slot_to_level (q, temp_slot_level - 1);
1147 move_slot_to_level (p, temp_slot_level - 1);
1148 p->addr_taken = 0;
1150 return;
1153 /* Otherwise, preserve all non-kept slots at this level. */
1154 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1156 next = p->next;
1158 if (!p->keep)
1159 move_slot_to_level (p, temp_slot_level - 1);
1163 /* Free all temporaries used so far. This is normally called at the
1164 end of generating code for a statement. */
1166 void
1167 free_temp_slots (void)
1169 struct temp_slot *p, *next;
1171 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1173 next = p->next;
1175 if (!p->keep)
1176 make_slot_available (p);
1179 remove_unused_temp_slot_addresses ();
1180 combine_temp_slots ();
1183 /* Push deeper into the nesting level for stack temporaries. */
1185 void
1186 push_temp_slots (void)
1188 temp_slot_level++;
1191 /* Pop a temporary nesting level. All slots in use in the current level
1192 are freed. */
1194 void
1195 pop_temp_slots (void)
1197 struct temp_slot *p, *next;
1199 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1201 next = p->next;
1202 make_slot_available (p);
1205 remove_unused_temp_slot_addresses ();
1206 combine_temp_slots ();
1208 temp_slot_level--;
1211 /* Initialize temporary slots. */
1213 void
1214 init_temp_slots (void)
1216 /* We have not allocated any temporaries yet. */
1217 avail_temp_slots = 0;
1218 used_temp_slots = 0;
1219 temp_slot_level = 0;
1221 /* Set up the table to map addresses to temp slots. */
1222 if (! temp_slot_address_table)
1223 temp_slot_address_table = htab_create_ggc (32,
1224 temp_slot_address_hash,
1225 temp_slot_address_eq,
1226 NULL);
1227 else
1228 htab_empty (temp_slot_address_table);
1231 /* These routines are responsible for converting virtual register references
1232 to the actual hard register references once RTL generation is complete.
1234 The following four variables are used for communication between the
1235 routines. They contain the offsets of the virtual registers from their
1236 respective hard registers. */
1238 static int in_arg_offset;
1239 static int var_offset;
1240 static int dynamic_offset;
1241 static int out_arg_offset;
1242 static int cfa_offset;
1244 /* In most machines, the stack pointer register is equivalent to the bottom
1245 of the stack. */
1247 #ifndef STACK_POINTER_OFFSET
1248 #define STACK_POINTER_OFFSET 0
1249 #endif
1251 /* If not defined, pick an appropriate default for the offset of dynamically
1252 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1253 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1255 #ifndef STACK_DYNAMIC_OFFSET
1257 /* The bottom of the stack points to the actual arguments. If
1258 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1259 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1260 stack space for register parameters is not pushed by the caller, but
1261 rather part of the fixed stack areas and hence not included in
1262 `crtl->outgoing_args_size'. Nevertheless, we must allow
1263 for it when allocating stack dynamic objects. */
1265 #if defined(REG_PARM_STACK_SPACE)
1266 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1267 ((ACCUMULATE_OUTGOING_ARGS \
1268 ? (crtl->outgoing_args_size \
1269 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1270 : REG_PARM_STACK_SPACE (FNDECL))) \
1271 : 0) + (STACK_POINTER_OFFSET))
1272 #else
1273 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1274 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
1275 + (STACK_POINTER_OFFSET))
1276 #endif
1277 #endif
1280 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1281 is a virtual register, return the equivalent hard register and set the
1282 offset indirectly through the pointer. Otherwise, return 0. */
1284 static rtx
1285 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1287 rtx new_rtx;
1288 HOST_WIDE_INT offset;
1290 if (x == virtual_incoming_args_rtx)
1292 if (stack_realign_drap)
1294 /* Replace virtual_incoming_args_rtx with internal arg
1295 pointer if DRAP is used to realign stack. */
1296 new_rtx = crtl->args.internal_arg_pointer;
1297 offset = 0;
1299 else
1300 new_rtx = arg_pointer_rtx, offset = in_arg_offset;
1302 else if (x == virtual_stack_vars_rtx)
1303 new_rtx = frame_pointer_rtx, offset = var_offset;
1304 else if (x == virtual_stack_dynamic_rtx)
1305 new_rtx = stack_pointer_rtx, offset = dynamic_offset;
1306 else if (x == virtual_outgoing_args_rtx)
1307 new_rtx = stack_pointer_rtx, offset = out_arg_offset;
1308 else if (x == virtual_cfa_rtx)
1310 #ifdef FRAME_POINTER_CFA_OFFSET
1311 new_rtx = frame_pointer_rtx;
1312 #else
1313 new_rtx = arg_pointer_rtx;
1314 #endif
1315 offset = cfa_offset;
1317 else
1318 return NULL_RTX;
1320 *poffset = offset;
1321 return new_rtx;
1324 /* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1325 Instantiate any virtual registers present inside of *LOC. The expression
1326 is simplified, as much as possible, but is not to be considered "valid"
1327 in any sense implied by the target. If any change is made, set CHANGED
1328 to true. */
1330 static int
1331 instantiate_virtual_regs_in_rtx (rtx *loc, void *data)
1333 HOST_WIDE_INT offset;
1334 bool *changed = (bool *) data;
1335 rtx x, new_rtx;
1337 x = *loc;
1338 if (x == 0)
1339 return 0;
1341 switch (GET_CODE (x))
1343 case REG:
1344 new_rtx = instantiate_new_reg (x, &offset);
1345 if (new_rtx)
1347 *loc = plus_constant (new_rtx, offset);
1348 if (changed)
1349 *changed = true;
1351 return -1;
1353 case PLUS:
1354 new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
1355 if (new_rtx)
1357 new_rtx = plus_constant (new_rtx, offset);
1358 *loc = simplify_gen_binary (PLUS, GET_MODE (x), new_rtx, XEXP (x, 1));
1359 if (changed)
1360 *changed = true;
1361 return -1;
1364 /* FIXME -- from old code */
1365 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1366 we can commute the PLUS and SUBREG because pointers into the
1367 frame are well-behaved. */
1368 break;
1370 default:
1371 break;
1374 return 0;
1377 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1378 matches the predicate for insn CODE operand OPERAND. */
1380 static int
1381 safe_insn_predicate (int code, int operand, rtx x)
1383 const struct insn_operand_data *op_data;
1385 if (code < 0)
1386 return true;
1388 op_data = &insn_data[code].operand[operand];
1389 if (op_data->predicate == NULL)
1390 return true;
1392 return op_data->predicate (x, op_data->mode);
1395 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1396 registers present inside of insn. The result will be a valid insn. */
1398 static void
1399 instantiate_virtual_regs_in_insn (rtx insn)
1401 HOST_WIDE_INT offset;
1402 int insn_code, i;
1403 bool any_change = false;
1404 rtx set, new_rtx, x, seq;
1406 /* There are some special cases to be handled first. */
1407 set = single_set (insn);
1408 if (set)
1410 /* We're allowed to assign to a virtual register. This is interpreted
1411 to mean that the underlying register gets assigned the inverse
1412 transformation. This is used, for example, in the handling of
1413 non-local gotos. */
1414 new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
1415 if (new_rtx)
1417 start_sequence ();
1419 for_each_rtx (&SET_SRC (set), instantiate_virtual_regs_in_rtx, NULL);
1420 x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
1421 GEN_INT (-offset));
1422 x = force_operand (x, new_rtx);
1423 if (x != new_rtx)
1424 emit_move_insn (new_rtx, x);
1426 seq = get_insns ();
1427 end_sequence ();
1429 emit_insn_before (seq, insn);
1430 delete_insn (insn);
1431 return;
1434 /* Handle a straight copy from a virtual register by generating a
1435 new add insn. The difference between this and falling through
1436 to the generic case is avoiding a new pseudo and eliminating a
1437 move insn in the initial rtl stream. */
1438 new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
1439 if (new_rtx && offset != 0
1440 && REG_P (SET_DEST (set))
1441 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1443 start_sequence ();
1445 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS,
1446 new_rtx, GEN_INT (offset), SET_DEST (set),
1447 1, OPTAB_LIB_WIDEN);
1448 if (x != SET_DEST (set))
1449 emit_move_insn (SET_DEST (set), x);
1451 seq = get_insns ();
1452 end_sequence ();
1454 emit_insn_before (seq, insn);
1455 delete_insn (insn);
1456 return;
1459 extract_insn (insn);
1460 insn_code = INSN_CODE (insn);
1462 /* Handle a plus involving a virtual register by determining if the
1463 operands remain valid if they're modified in place. */
1464 if (GET_CODE (SET_SRC (set)) == PLUS
1465 && recog_data.n_operands >= 3
1466 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1467 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1468 && GET_CODE (recog_data.operand[2]) == CONST_INT
1469 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
1471 offset += INTVAL (recog_data.operand[2]);
1473 /* If the sum is zero, then replace with a plain move. */
1474 if (offset == 0
1475 && REG_P (SET_DEST (set))
1476 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1478 start_sequence ();
1479 emit_move_insn (SET_DEST (set), new_rtx);
1480 seq = get_insns ();
1481 end_sequence ();
1483 emit_insn_before (seq, insn);
1484 delete_insn (insn);
1485 return;
1488 x = gen_int_mode (offset, recog_data.operand_mode[2]);
1490 /* Using validate_change and apply_change_group here leaves
1491 recog_data in an invalid state. Since we know exactly what
1492 we want to check, do those two by hand. */
1493 if (safe_insn_predicate (insn_code, 1, new_rtx)
1494 && safe_insn_predicate (insn_code, 2, x))
1496 *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
1497 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1498 any_change = true;
1500 /* Fall through into the regular operand fixup loop in
1501 order to take care of operands other than 1 and 2. */
1505 else
1507 extract_insn (insn);
1508 insn_code = INSN_CODE (insn);
1511 /* In the general case, we expect virtual registers to appear only in
1512 operands, and then only as either bare registers or inside memories. */
1513 for (i = 0; i < recog_data.n_operands; ++i)
1515 x = recog_data.operand[i];
1516 switch (GET_CODE (x))
1518 case MEM:
1520 rtx addr = XEXP (x, 0);
1521 bool changed = false;
1523 for_each_rtx (&addr, instantiate_virtual_regs_in_rtx, &changed);
1524 if (!changed)
1525 continue;
1527 start_sequence ();
1528 x = replace_equiv_address (x, addr);
1529 /* It may happen that the address with the virtual reg
1530 was valid (e.g. based on the virtual stack reg, which might
1531 be acceptable to the predicates with all offsets), whereas
1532 the address now isn't anymore, for instance when the address
1533 is still offsetted, but the base reg isn't virtual-stack-reg
1534 anymore. Below we would do a force_reg on the whole operand,
1535 but this insn might actually only accept memory. Hence,
1536 before doing that last resort, try to reload the address into
1537 a register, so this operand stays a MEM. */
1538 if (!safe_insn_predicate (insn_code, i, x))
1540 addr = force_reg (GET_MODE (addr), addr);
1541 x = replace_equiv_address (x, addr);
1543 seq = get_insns ();
1544 end_sequence ();
1545 if (seq)
1546 emit_insn_before (seq, insn);
1548 break;
1550 case REG:
1551 new_rtx = instantiate_new_reg (x, &offset);
1552 if (new_rtx == NULL)
1553 continue;
1554 if (offset == 0)
1555 x = new_rtx;
1556 else
1558 start_sequence ();
1560 /* Careful, special mode predicates may have stuff in
1561 insn_data[insn_code].operand[i].mode that isn't useful
1562 to us for computing a new value. */
1563 /* ??? Recognize address_operand and/or "p" constraints
1564 to see if (plus new offset) is a valid before we put
1565 this through expand_simple_binop. */
1566 x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
1567 GEN_INT (offset), NULL_RTX,
1568 1, OPTAB_LIB_WIDEN);
1569 seq = get_insns ();
1570 end_sequence ();
1571 emit_insn_before (seq, insn);
1573 break;
1575 case SUBREG:
1576 new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
1577 if (new_rtx == NULL)
1578 continue;
1579 if (offset != 0)
1581 start_sequence ();
1582 new_rtx = expand_simple_binop (GET_MODE (new_rtx), PLUS, new_rtx,
1583 GEN_INT (offset), NULL_RTX,
1584 1, OPTAB_LIB_WIDEN);
1585 seq = get_insns ();
1586 end_sequence ();
1587 emit_insn_before (seq, insn);
1589 x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx,
1590 GET_MODE (new_rtx), SUBREG_BYTE (x));
1591 gcc_assert (x);
1592 break;
1594 default:
1595 continue;
1598 /* At this point, X contains the new value for the operand.
1599 Validate the new value vs the insn predicate. Note that
1600 asm insns will have insn_code -1 here. */
1601 if (!safe_insn_predicate (insn_code, i, x))
1603 start_sequence ();
1604 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1605 seq = get_insns ();
1606 end_sequence ();
1607 if (seq)
1608 emit_insn_before (seq, insn);
1611 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1612 any_change = true;
1615 if (any_change)
1617 /* Propagate operand changes into the duplicates. */
1618 for (i = 0; i < recog_data.n_dups; ++i)
1619 *recog_data.dup_loc[i]
1620 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
1622 /* Force re-recognition of the instruction for validation. */
1623 INSN_CODE (insn) = -1;
1626 if (asm_noperands (PATTERN (insn)) >= 0)
1628 if (!check_asm_operands (PATTERN (insn)))
1630 error_for_asm (insn, "impossible constraint in %<asm%>");
1631 delete_insn (insn);
1634 else
1636 if (recog_memoized (insn) < 0)
1637 fatal_insn_not_found (insn);
1641 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1642 do any instantiation required. */
1644 void
1645 instantiate_decl_rtl (rtx x)
1647 rtx addr;
1649 if (x == 0)
1650 return;
1652 /* If this is a CONCAT, recurse for the pieces. */
1653 if (GET_CODE (x) == CONCAT)
1655 instantiate_decl_rtl (XEXP (x, 0));
1656 instantiate_decl_rtl (XEXP (x, 1));
1657 return;
1660 /* If this is not a MEM, no need to do anything. Similarly if the
1661 address is a constant or a register that is not a virtual register. */
1662 if (!MEM_P (x))
1663 return;
1665 addr = XEXP (x, 0);
1666 if (CONSTANT_P (addr)
1667 || (REG_P (addr)
1668 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1669 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1670 return;
1672 for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL);
1675 /* Helper for instantiate_decls called via walk_tree: Process all decls
1676 in the given DECL_VALUE_EXPR. */
1678 static tree
1679 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1681 tree t = *tp;
1682 if (! EXPR_P (t))
1684 *walk_subtrees = 0;
1685 if (DECL_P (t) && DECL_RTL_SET_P (t))
1686 instantiate_decl_rtl (DECL_RTL (t));
1688 return NULL;
1691 /* Subroutine of instantiate_decls: Process all decls in the given
1692 BLOCK node and all its subblocks. */
1694 static void
1695 instantiate_decls_1 (tree let)
1697 tree t;
1699 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1701 if (DECL_RTL_SET_P (t))
1702 instantiate_decl_rtl (DECL_RTL (t));
1703 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1705 tree v = DECL_VALUE_EXPR (t);
1706 walk_tree (&v, instantiate_expr, NULL, NULL);
1710 /* Process all subblocks. */
1711 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1712 instantiate_decls_1 (t);
1715 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1716 all virtual registers in their DECL_RTL's. */
1718 static void
1719 instantiate_decls (tree fndecl)
1721 tree decl, t, next;
1723 /* Process all parameters of the function. */
1724 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1726 instantiate_decl_rtl (DECL_RTL (decl));
1727 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
1728 if (DECL_HAS_VALUE_EXPR_P (decl))
1730 tree v = DECL_VALUE_EXPR (decl);
1731 walk_tree (&v, instantiate_expr, NULL, NULL);
1735 /* Now process all variables defined in the function or its subblocks. */
1736 instantiate_decls_1 (DECL_INITIAL (fndecl));
1738 t = cfun->local_decls;
1739 cfun->local_decls = NULL_TREE;
1740 for (; t; t = next)
1742 next = TREE_CHAIN (t);
1743 decl = TREE_VALUE (t);
1744 if (DECL_RTL_SET_P (decl))
1745 instantiate_decl_rtl (DECL_RTL (decl));
1746 ggc_free (t);
1750 /* Pass through the INSNS of function FNDECL and convert virtual register
1751 references to hard register references. */
1753 static unsigned int
1754 instantiate_virtual_regs (void)
1756 rtx insn;
1758 /* Compute the offsets to use for this function. */
1759 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1760 var_offset = STARTING_FRAME_OFFSET;
1761 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1762 out_arg_offset = STACK_POINTER_OFFSET;
1763 #ifdef FRAME_POINTER_CFA_OFFSET
1764 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1765 #else
1766 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1767 #endif
1769 /* Initialize recognition, indicating that volatile is OK. */
1770 init_recog ();
1772 /* Scan through all the insns, instantiating every virtual register still
1773 present. */
1774 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1775 if (INSN_P (insn))
1777 /* These patterns in the instruction stream can never be recognized.
1778 Fortunately, they shouldn't contain virtual registers either. */
1779 if (GET_CODE (PATTERN (insn)) == USE
1780 || GET_CODE (PATTERN (insn)) == CLOBBER
1781 || GET_CODE (PATTERN (insn)) == ADDR_VEC
1782 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
1783 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1784 continue;
1786 instantiate_virtual_regs_in_insn (insn);
1788 if (INSN_DELETED_P (insn))
1789 continue;
1791 for_each_rtx (&REG_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL);
1793 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1794 if (GET_CODE (insn) == CALL_INSN)
1795 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn),
1796 instantiate_virtual_regs_in_rtx, NULL);
1799 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1800 instantiate_decls (current_function_decl);
1802 targetm.instantiate_decls ();
1804 /* Indicate that, from now on, assign_stack_local should use
1805 frame_pointer_rtx. */
1806 virtuals_instantiated = 1;
1807 return 0;
1810 struct rtl_opt_pass pass_instantiate_virtual_regs =
1813 RTL_PASS,
1814 "vregs", /* name */
1815 NULL, /* gate */
1816 instantiate_virtual_regs, /* execute */
1817 NULL, /* sub */
1818 NULL, /* next */
1819 0, /* static_pass_number */
1820 0, /* tv_id */
1821 0, /* properties_required */
1822 0, /* properties_provided */
1823 0, /* properties_destroyed */
1824 0, /* todo_flags_start */
1825 TODO_dump_func /* todo_flags_finish */
1830 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1831 This means a type for which function calls must pass an address to the
1832 function or get an address back from the function.
1833 EXP may be a type node or an expression (whose type is tested). */
1836 aggregate_value_p (const_tree exp, const_tree fntype)
1838 int i, regno, nregs;
1839 rtx reg;
1841 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
1843 /* DECL node associated with FNTYPE when relevant, which we might need to
1844 check for by-invisible-reference returns, typically for CALL_EXPR input
1845 EXPressions. */
1846 const_tree fndecl = NULL_TREE;
1848 if (fntype)
1849 switch (TREE_CODE (fntype))
1851 case CALL_EXPR:
1852 fndecl = get_callee_fndecl (fntype);
1853 fntype = (fndecl
1854 ? TREE_TYPE (fndecl)
1855 : TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype))));
1856 break;
1857 case FUNCTION_DECL:
1858 fndecl = fntype;
1859 fntype = TREE_TYPE (fndecl);
1860 break;
1861 case FUNCTION_TYPE:
1862 case METHOD_TYPE:
1863 break;
1864 case IDENTIFIER_NODE:
1865 fntype = 0;
1866 break;
1867 default:
1868 /* We don't expect other rtl types here. */
1869 gcc_unreachable ();
1872 if (TREE_CODE (type) == VOID_TYPE)
1873 return 0;
1875 /* If the front end has decided that this needs to be passed by
1876 reference, do so. */
1877 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
1878 && DECL_BY_REFERENCE (exp))
1879 return 1;
1881 /* If the EXPression is a CALL_EXPR, honor DECL_BY_REFERENCE set on the
1882 called function RESULT_DECL, meaning the function returns in memory by
1883 invisible reference. This check lets front-ends not set TREE_ADDRESSABLE
1884 on the function type, which used to be the way to request such a return
1885 mechanism but might now be causing troubles at gimplification time if
1886 temporaries with the function type need to be created. */
1887 if (TREE_CODE (exp) == CALL_EXPR && fndecl && DECL_RESULT (fndecl)
1888 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
1889 return 1;
1891 if (targetm.calls.return_in_memory (type, fntype))
1892 return 1;
1893 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
1894 and thus can't be returned in registers. */
1895 if (TREE_ADDRESSABLE (type))
1896 return 1;
1897 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
1898 return 1;
1899 /* Make sure we have suitable call-clobbered regs to return
1900 the value in; if not, we must return it in memory. */
1901 reg = hard_function_value (type, 0, fntype, 0);
1903 /* If we have something other than a REG (e.g. a PARALLEL), then assume
1904 it is OK. */
1905 if (!REG_P (reg))
1906 return 0;
1908 regno = REGNO (reg);
1909 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
1910 for (i = 0; i < nregs; i++)
1911 if (! call_used_regs[regno + i])
1912 return 1;
1913 return 0;
1916 /* Return true if we should assign DECL a pseudo register; false if it
1917 should live on the local stack. */
1919 bool
1920 use_register_for_decl (const_tree decl)
1922 if (!targetm.calls.allocate_stack_slots_for_args())
1923 return true;
1925 /* Honor volatile. */
1926 if (TREE_SIDE_EFFECTS (decl))
1927 return false;
1929 /* Honor addressability. */
1930 if (TREE_ADDRESSABLE (decl))
1931 return false;
1933 /* Only register-like things go in registers. */
1934 if (DECL_MODE (decl) == BLKmode)
1935 return false;
1937 /* If -ffloat-store specified, don't put explicit float variables
1938 into registers. */
1939 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
1940 propagates values across these stores, and it probably shouldn't. */
1941 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
1942 return false;
1944 /* If we're not interested in tracking debugging information for
1945 this decl, then we can certainly put it in a register. */
1946 if (DECL_IGNORED_P (decl))
1947 return true;
1949 if (optimize)
1950 return true;
1952 if (!DECL_REGISTER (decl))
1953 return false;
1955 switch (TREE_CODE (TREE_TYPE (decl)))
1957 case RECORD_TYPE:
1958 case UNION_TYPE:
1959 case QUAL_UNION_TYPE:
1960 /* When not optimizing, disregard register keyword for variables with
1961 types containing methods, otherwise the methods won't be callable
1962 from the debugger. */
1963 if (TYPE_METHODS (TREE_TYPE (decl)))
1964 return false;
1965 break;
1966 default:
1967 break;
1970 return true;
1973 /* Return true if TYPE should be passed by invisible reference. */
1975 bool
1976 pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1977 tree type, bool named_arg)
1979 if (type)
1981 /* If this type contains non-trivial constructors, then it is
1982 forbidden for the middle-end to create any new copies. */
1983 if (TREE_ADDRESSABLE (type))
1984 return true;
1986 /* GCC post 3.4 passes *all* variable sized types by reference. */
1987 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1988 return true;
1991 return targetm.calls.pass_by_reference (ca, mode, type, named_arg);
1994 /* Return true if TYPE, which is passed by reference, should be callee
1995 copied instead of caller copied. */
1997 bool
1998 reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1999 tree type, bool named_arg)
2001 if (type && TREE_ADDRESSABLE (type))
2002 return false;
2003 return targetm.calls.callee_copies (ca, mode, type, named_arg);
2006 /* Structures to communicate between the subroutines of assign_parms.
2007 The first holds data persistent across all parameters, the second
2008 is cleared out for each parameter. */
2010 struct assign_parm_data_all
2012 CUMULATIVE_ARGS args_so_far;
2013 struct args_size stack_args_size;
2014 tree function_result_decl;
2015 tree orig_fnargs;
2016 rtx first_conversion_insn;
2017 rtx last_conversion_insn;
2018 HOST_WIDE_INT pretend_args_size;
2019 HOST_WIDE_INT extra_pretend_bytes;
2020 int reg_parm_stack_space;
2023 struct assign_parm_data_one
2025 tree nominal_type;
2026 tree passed_type;
2027 rtx entry_parm;
2028 rtx stack_parm;
2029 enum machine_mode nominal_mode;
2030 enum machine_mode passed_mode;
2031 enum machine_mode promoted_mode;
2032 struct locate_and_pad_arg_data locate;
2033 int partial;
2034 BOOL_BITFIELD named_arg : 1;
2035 BOOL_BITFIELD passed_pointer : 1;
2036 BOOL_BITFIELD on_stack : 1;
2037 BOOL_BITFIELD loaded_in_reg : 1;
2040 /* A subroutine of assign_parms. Initialize ALL. */
2042 static void
2043 assign_parms_initialize_all (struct assign_parm_data_all *all)
2045 tree fntype;
2047 memset (all, 0, sizeof (*all));
2049 fntype = TREE_TYPE (current_function_decl);
2051 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2052 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far, fntype, NULL_RTX);
2053 #else
2054 INIT_CUMULATIVE_ARGS (all->args_so_far, fntype, NULL_RTX,
2055 current_function_decl, -1);
2056 #endif
2058 #ifdef REG_PARM_STACK_SPACE
2059 all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
2060 #endif
2063 /* If ARGS contains entries with complex types, split the entry into two
2064 entries of the component type. Return a new list of substitutions are
2065 needed, else the old list. */
2067 static tree
2068 split_complex_args (tree args)
2070 tree p;
2072 /* Before allocating memory, check for the common case of no complex. */
2073 for (p = args; p; p = TREE_CHAIN (p))
2075 tree type = TREE_TYPE (p);
2076 if (TREE_CODE (type) == COMPLEX_TYPE
2077 && targetm.calls.split_complex_arg (type))
2078 goto found;
2080 return args;
2082 found:
2083 args = copy_list (args);
2085 for (p = args; p; p = TREE_CHAIN (p))
2087 tree type = TREE_TYPE (p);
2088 if (TREE_CODE (type) == COMPLEX_TYPE
2089 && targetm.calls.split_complex_arg (type))
2091 tree decl;
2092 tree subtype = TREE_TYPE (type);
2093 bool addressable = TREE_ADDRESSABLE (p);
2095 /* Rewrite the PARM_DECL's type with its component. */
2096 TREE_TYPE (p) = subtype;
2097 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2098 DECL_MODE (p) = VOIDmode;
2099 DECL_SIZE (p) = NULL;
2100 DECL_SIZE_UNIT (p) = NULL;
2101 /* If this arg must go in memory, put it in a pseudo here.
2102 We can't allow it to go in memory as per normal parms,
2103 because the usual place might not have the imag part
2104 adjacent to the real part. */
2105 DECL_ARTIFICIAL (p) = addressable;
2106 DECL_IGNORED_P (p) = addressable;
2107 TREE_ADDRESSABLE (p) = 0;
2108 layout_decl (p, 0);
2110 /* Build a second synthetic decl. */
2111 decl = build_decl (PARM_DECL, NULL_TREE, subtype);
2112 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2113 DECL_ARTIFICIAL (decl) = addressable;
2114 DECL_IGNORED_P (decl) = addressable;
2115 layout_decl (decl, 0);
2117 /* Splice it in; skip the new decl. */
2118 TREE_CHAIN (decl) = TREE_CHAIN (p);
2119 TREE_CHAIN (p) = decl;
2120 p = decl;
2124 return args;
2127 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2128 the hidden struct return argument, and (abi willing) complex args.
2129 Return the new parameter list. */
2131 static tree
2132 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2134 tree fndecl = current_function_decl;
2135 tree fntype = TREE_TYPE (fndecl);
2136 tree fnargs = DECL_ARGUMENTS (fndecl);
2138 /* If struct value address is treated as the first argument, make it so. */
2139 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2140 && ! cfun->returns_pcc_struct
2141 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2143 tree type = build_pointer_type (TREE_TYPE (fntype));
2144 tree decl;
2146 decl = build_decl (PARM_DECL, NULL_TREE, type);
2147 DECL_ARG_TYPE (decl) = type;
2148 DECL_ARTIFICIAL (decl) = 1;
2149 DECL_IGNORED_P (decl) = 1;
2151 TREE_CHAIN (decl) = fnargs;
2152 fnargs = decl;
2153 all->function_result_decl = decl;
2156 all->orig_fnargs = fnargs;
2158 /* If the target wants to split complex arguments into scalars, do so. */
2159 if (targetm.calls.split_complex_arg)
2160 fnargs = split_complex_args (fnargs);
2162 return fnargs;
2165 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2166 data for the parameter. Incorporate ABI specifics such as pass-by-
2167 reference and type promotion. */
2169 static void
2170 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2171 struct assign_parm_data_one *data)
2173 tree nominal_type, passed_type;
2174 enum machine_mode nominal_mode, passed_mode, promoted_mode;
2176 memset (data, 0, sizeof (*data));
2178 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2179 if (!cfun->stdarg)
2180 data->named_arg = 1; /* No variadic parms. */
2181 else if (TREE_CHAIN (parm))
2182 data->named_arg = 1; /* Not the last non-variadic parm. */
2183 else if (targetm.calls.strict_argument_naming (&all->args_so_far))
2184 data->named_arg = 1; /* Only variadic ones are unnamed. */
2185 else
2186 data->named_arg = 0; /* Treat as variadic. */
2188 nominal_type = TREE_TYPE (parm);
2189 passed_type = DECL_ARG_TYPE (parm);
2191 /* Look out for errors propagating this far. Also, if the parameter's
2192 type is void then its value doesn't matter. */
2193 if (TREE_TYPE (parm) == error_mark_node
2194 /* This can happen after weird syntax errors
2195 or if an enum type is defined among the parms. */
2196 || TREE_CODE (parm) != PARM_DECL
2197 || passed_type == NULL
2198 || VOID_TYPE_P (nominal_type))
2200 nominal_type = passed_type = void_type_node;
2201 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2202 goto egress;
2205 /* Find mode of arg as it is passed, and mode of arg as it should be
2206 during execution of this function. */
2207 passed_mode = TYPE_MODE (passed_type);
2208 nominal_mode = TYPE_MODE (nominal_type);
2210 /* If the parm is to be passed as a transparent union, use the type of
2211 the first field for the tests below. We have already verified that
2212 the modes are the same. */
2213 if (TREE_CODE (passed_type) == UNION_TYPE
2214 && TYPE_TRANSPARENT_UNION (passed_type))
2215 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
2217 /* See if this arg was passed by invisible reference. */
2218 if (pass_by_reference (&all->args_so_far, passed_mode,
2219 passed_type, data->named_arg))
2221 passed_type = nominal_type = build_pointer_type (passed_type);
2222 data->passed_pointer = true;
2223 passed_mode = nominal_mode = Pmode;
2226 /* Find mode as it is passed by the ABI. */
2227 promoted_mode = passed_mode;
2228 if (targetm.calls.promote_function_args (TREE_TYPE (current_function_decl)))
2230 int unsignedp = TYPE_UNSIGNED (passed_type);
2231 promoted_mode = promote_mode (passed_type, promoted_mode,
2232 &unsignedp, 1);
2235 egress:
2236 data->nominal_type = nominal_type;
2237 data->passed_type = passed_type;
2238 data->nominal_mode = nominal_mode;
2239 data->passed_mode = passed_mode;
2240 data->promoted_mode = promoted_mode;
2243 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2245 static void
2246 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2247 struct assign_parm_data_one *data, bool no_rtl)
2249 int varargs_pretend_bytes = 0;
2251 targetm.calls.setup_incoming_varargs (&all->args_so_far,
2252 data->promoted_mode,
2253 data->passed_type,
2254 &varargs_pretend_bytes, no_rtl);
2256 /* If the back-end has requested extra stack space, record how much is
2257 needed. Do not change pretend_args_size otherwise since it may be
2258 nonzero from an earlier partial argument. */
2259 if (varargs_pretend_bytes > 0)
2260 all->pretend_args_size = varargs_pretend_bytes;
2263 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2264 the incoming location of the current parameter. */
2266 static void
2267 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2268 struct assign_parm_data_one *data)
2270 HOST_WIDE_INT pretend_bytes = 0;
2271 rtx entry_parm;
2272 bool in_regs;
2274 if (data->promoted_mode == VOIDmode)
2276 data->entry_parm = data->stack_parm = const0_rtx;
2277 return;
2280 #ifdef FUNCTION_INCOMING_ARG
2281 entry_parm = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2282 data->passed_type, data->named_arg);
2283 #else
2284 entry_parm = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2285 data->passed_type, data->named_arg);
2286 #endif
2288 if (entry_parm == 0)
2289 data->promoted_mode = data->passed_mode;
2291 /* Determine parm's home in the stack, in case it arrives in the stack
2292 or we should pretend it did. Compute the stack position and rtx where
2293 the argument arrives and its size.
2295 There is one complexity here: If this was a parameter that would
2296 have been passed in registers, but wasn't only because it is
2297 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2298 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2299 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2300 as it was the previous time. */
2301 in_regs = entry_parm != 0;
2302 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2303 in_regs = true;
2304 #endif
2305 if (!in_regs && !data->named_arg)
2307 if (targetm.calls.pretend_outgoing_varargs_named (&all->args_so_far))
2309 rtx tem;
2310 #ifdef FUNCTION_INCOMING_ARG
2311 tem = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2312 data->passed_type, true);
2313 #else
2314 tem = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2315 data->passed_type, true);
2316 #endif
2317 in_regs = tem != NULL;
2321 /* If this parameter was passed both in registers and in the stack, use
2322 the copy on the stack. */
2323 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2324 data->passed_type))
2325 entry_parm = 0;
2327 if (entry_parm)
2329 int partial;
2331 partial = targetm.calls.arg_partial_bytes (&all->args_so_far,
2332 data->promoted_mode,
2333 data->passed_type,
2334 data->named_arg);
2335 data->partial = partial;
2337 /* The caller might already have allocated stack space for the
2338 register parameters. */
2339 if (partial != 0 && all->reg_parm_stack_space == 0)
2341 /* Part of this argument is passed in registers and part
2342 is passed on the stack. Ask the prologue code to extend
2343 the stack part so that we can recreate the full value.
2345 PRETEND_BYTES is the size of the registers we need to store.
2346 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2347 stack space that the prologue should allocate.
2349 Internally, gcc assumes that the argument pointer is aligned
2350 to STACK_BOUNDARY bits. This is used both for alignment
2351 optimizations (see init_emit) and to locate arguments that are
2352 aligned to more than PARM_BOUNDARY bits. We must preserve this
2353 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2354 a stack boundary. */
2356 /* We assume at most one partial arg, and it must be the first
2357 argument on the stack. */
2358 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2360 pretend_bytes = partial;
2361 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2363 /* We want to align relative to the actual stack pointer, so
2364 don't include this in the stack size until later. */
2365 all->extra_pretend_bytes = all->pretend_args_size;
2369 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2370 entry_parm ? data->partial : 0, current_function_decl,
2371 &all->stack_args_size, &data->locate);
2373 /* Update parm_stack_boundary if this parameter is passed in the
2374 stack. */
2375 if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
2376 crtl->parm_stack_boundary = data->locate.boundary;
2378 /* Adjust offsets to include the pretend args. */
2379 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2380 data->locate.slot_offset.constant += pretend_bytes;
2381 data->locate.offset.constant += pretend_bytes;
2383 data->entry_parm = entry_parm;
2386 /* A subroutine of assign_parms. If there is actually space on the stack
2387 for this parm, count it in stack_args_size and return true. */
2389 static bool
2390 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2391 struct assign_parm_data_one *data)
2393 /* Trivially true if we've no incoming register. */
2394 if (data->entry_parm == NULL)
2396 /* Also true if we're partially in registers and partially not,
2397 since we've arranged to drop the entire argument on the stack. */
2398 else if (data->partial != 0)
2400 /* Also true if the target says that it's passed in both registers
2401 and on the stack. */
2402 else if (GET_CODE (data->entry_parm) == PARALLEL
2403 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2405 /* Also true if the target says that there's stack allocated for
2406 all register parameters. */
2407 else if (all->reg_parm_stack_space > 0)
2409 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2410 else
2411 return false;
2413 all->stack_args_size.constant += data->locate.size.constant;
2414 if (data->locate.size.var)
2415 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2417 return true;
2420 /* A subroutine of assign_parms. Given that this parameter is allocated
2421 stack space by the ABI, find it. */
2423 static void
2424 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2426 rtx offset_rtx, stack_parm;
2427 unsigned int align, boundary;
2429 /* If we're passing this arg using a reg, make its stack home the
2430 aligned stack slot. */
2431 if (data->entry_parm)
2432 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2433 else
2434 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2436 stack_parm = crtl->args.internal_arg_pointer;
2437 if (offset_rtx != const0_rtx)
2438 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2439 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2441 set_mem_attributes (stack_parm, parm, 1);
2442 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2443 while promoted mode's size is needed. */
2444 if (data->promoted_mode != BLKmode
2445 && data->promoted_mode != DECL_MODE (parm))
2447 set_mem_size (stack_parm, GEN_INT (GET_MODE_SIZE (data->promoted_mode)));
2448 if (MEM_EXPR (stack_parm) && MEM_OFFSET (stack_parm))
2450 int offset = subreg_lowpart_offset (DECL_MODE (parm),
2451 data->promoted_mode);
2452 if (offset)
2453 set_mem_offset (stack_parm,
2454 plus_constant (MEM_OFFSET (stack_parm), -offset));
2458 boundary = data->locate.boundary;
2459 align = BITS_PER_UNIT;
2461 /* If we're padding upward, we know that the alignment of the slot
2462 is FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2463 intentionally forcing upward padding. Otherwise we have to come
2464 up with a guess at the alignment based on OFFSET_RTX. */
2465 if (data->locate.where_pad != downward || data->entry_parm)
2466 align = boundary;
2467 else if (GET_CODE (offset_rtx) == CONST_INT)
2469 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2470 align = align & -align;
2472 set_mem_align (stack_parm, align);
2474 if (data->entry_parm)
2475 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2477 data->stack_parm = stack_parm;
2480 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2481 always valid and contiguous. */
2483 static void
2484 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2486 rtx entry_parm = data->entry_parm;
2487 rtx stack_parm = data->stack_parm;
2489 /* If this parm was passed part in regs and part in memory, pretend it
2490 arrived entirely in memory by pushing the register-part onto the stack.
2491 In the special case of a DImode or DFmode that is split, we could put
2492 it together in a pseudoreg directly, but for now that's not worth
2493 bothering with. */
2494 if (data->partial != 0)
2496 /* Handle calls that pass values in multiple non-contiguous
2497 locations. The Irix 6 ABI has examples of this. */
2498 if (GET_CODE (entry_parm) == PARALLEL)
2499 emit_group_store (validize_mem (stack_parm), entry_parm,
2500 data->passed_type,
2501 int_size_in_bytes (data->passed_type));
2502 else
2504 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2505 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2506 data->partial / UNITS_PER_WORD);
2509 entry_parm = stack_parm;
2512 /* If we didn't decide this parm came in a register, by default it came
2513 on the stack. */
2514 else if (entry_parm == NULL)
2515 entry_parm = stack_parm;
2517 /* When an argument is passed in multiple locations, we can't make use
2518 of this information, but we can save some copying if the whole argument
2519 is passed in a single register. */
2520 else if (GET_CODE (entry_parm) == PARALLEL
2521 && data->nominal_mode != BLKmode
2522 && data->passed_mode != BLKmode)
2524 size_t i, len = XVECLEN (entry_parm, 0);
2526 for (i = 0; i < len; i++)
2527 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2528 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2529 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2530 == data->passed_mode)
2531 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2533 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2534 break;
2538 data->entry_parm = entry_parm;
2541 /* A subroutine of assign_parms. Reconstitute any values which were
2542 passed in multiple registers and would fit in a single register. */
2544 static void
2545 assign_parm_remove_parallels (struct assign_parm_data_one *data)
2547 rtx entry_parm = data->entry_parm;
2549 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2550 This can be done with register operations rather than on the
2551 stack, even if we will store the reconstituted parameter on the
2552 stack later. */
2553 if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
2555 rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
2556 emit_group_store (parmreg, entry_parm, data->passed_type,
2557 GET_MODE_SIZE (GET_MODE (entry_parm)));
2558 entry_parm = parmreg;
2561 data->entry_parm = entry_parm;
2564 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2565 always valid and properly aligned. */
2567 static void
2568 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2570 rtx stack_parm = data->stack_parm;
2572 /* If we can't trust the parm stack slot to be aligned enough for its
2573 ultimate type, don't use that slot after entry. We'll make another
2574 stack slot, if we need one. */
2575 if (stack_parm
2576 && ((STRICT_ALIGNMENT
2577 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2578 || (data->nominal_type
2579 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2580 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2581 stack_parm = NULL;
2583 /* If parm was passed in memory, and we need to convert it on entry,
2584 don't store it back in that same slot. */
2585 else if (data->entry_parm == stack_parm
2586 && data->nominal_mode != BLKmode
2587 && data->nominal_mode != data->passed_mode)
2588 stack_parm = NULL;
2590 /* If stack protection is in effect for this function, don't leave any
2591 pointers in their passed stack slots. */
2592 else if (crtl->stack_protect_guard
2593 && (flag_stack_protect == 2
2594 || data->passed_pointer
2595 || POINTER_TYPE_P (data->nominal_type)))
2596 stack_parm = NULL;
2598 data->stack_parm = stack_parm;
2601 /* A subroutine of assign_parms. Return true if the current parameter
2602 should be stored as a BLKmode in the current frame. */
2604 static bool
2605 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2607 if (data->nominal_mode == BLKmode)
2608 return true;
2609 if (GET_MODE (data->entry_parm) == BLKmode)
2610 return true;
2612 #ifdef BLOCK_REG_PADDING
2613 /* Only assign_parm_setup_block knows how to deal with register arguments
2614 that are padded at the least significant end. */
2615 if (REG_P (data->entry_parm)
2616 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2617 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2618 == (BYTES_BIG_ENDIAN ? upward : downward)))
2619 return true;
2620 #endif
2622 return false;
2625 /* A subroutine of assign_parms. Arrange for the parameter to be
2626 present and valid in DATA->STACK_RTL. */
2628 static void
2629 assign_parm_setup_block (struct assign_parm_data_all *all,
2630 tree parm, struct assign_parm_data_one *data)
2632 rtx entry_parm = data->entry_parm;
2633 rtx stack_parm = data->stack_parm;
2634 HOST_WIDE_INT size;
2635 HOST_WIDE_INT size_stored;
2637 if (GET_CODE (entry_parm) == PARALLEL)
2638 entry_parm = emit_group_move_into_temps (entry_parm);
2640 size = int_size_in_bytes (data->passed_type);
2641 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2642 if (stack_parm == 0)
2644 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
2645 stack_parm = assign_stack_local (BLKmode, size_stored,
2646 DECL_ALIGN (parm));
2647 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2648 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2649 set_mem_attributes (stack_parm, parm, 1);
2652 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2653 calls that pass values in multiple non-contiguous locations. */
2654 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2656 rtx mem;
2658 /* Note that we will be storing an integral number of words.
2659 So we have to be careful to ensure that we allocate an
2660 integral number of words. We do this above when we call
2661 assign_stack_local if space was not allocated in the argument
2662 list. If it was, this will not work if PARM_BOUNDARY is not
2663 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2664 if it becomes a problem. Exception is when BLKmode arrives
2665 with arguments not conforming to word_mode. */
2667 if (data->stack_parm == 0)
2669 else if (GET_CODE (entry_parm) == PARALLEL)
2671 else
2672 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2674 mem = validize_mem (stack_parm);
2676 /* Handle values in multiple non-contiguous locations. */
2677 if (GET_CODE (entry_parm) == PARALLEL)
2679 push_to_sequence2 (all->first_conversion_insn,
2680 all->last_conversion_insn);
2681 emit_group_store (mem, entry_parm, data->passed_type, size);
2682 all->first_conversion_insn = get_insns ();
2683 all->last_conversion_insn = get_last_insn ();
2684 end_sequence ();
2687 else if (size == 0)
2690 /* If SIZE is that of a mode no bigger than a word, just use
2691 that mode's store operation. */
2692 else if (size <= UNITS_PER_WORD)
2694 enum machine_mode mode
2695 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2697 if (mode != BLKmode
2698 #ifdef BLOCK_REG_PADDING
2699 && (size == UNITS_PER_WORD
2700 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2701 != (BYTES_BIG_ENDIAN ? upward : downward)))
2702 #endif
2705 rtx reg;
2707 /* We are really truncating a word_mode value containing
2708 SIZE bytes into a value of mode MODE. If such an
2709 operation requires no actual instructions, we can refer
2710 to the value directly in mode MODE, otherwise we must
2711 start with the register in word_mode and explicitly
2712 convert it. */
2713 if (TRULY_NOOP_TRUNCATION (size * BITS_PER_UNIT, BITS_PER_WORD))
2714 reg = gen_rtx_REG (mode, REGNO (entry_parm));
2715 else
2717 reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2718 reg = convert_to_mode (mode, copy_to_reg (reg), 1);
2720 emit_move_insn (change_address (mem, mode, 0), reg);
2723 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2724 machine must be aligned to the left before storing
2725 to memory. Note that the previous test doesn't
2726 handle all cases (e.g. SIZE == 3). */
2727 else if (size != UNITS_PER_WORD
2728 #ifdef BLOCK_REG_PADDING
2729 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2730 == downward)
2731 #else
2732 && BYTES_BIG_ENDIAN
2733 #endif
2736 rtx tem, x;
2737 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2738 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2740 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
2741 build_int_cst (NULL_TREE, by),
2742 NULL_RTX, 1);
2743 tem = change_address (mem, word_mode, 0);
2744 emit_move_insn (tem, x);
2746 else
2747 move_block_from_reg (REGNO (entry_parm), mem,
2748 size_stored / UNITS_PER_WORD);
2750 else
2751 move_block_from_reg (REGNO (entry_parm), mem,
2752 size_stored / UNITS_PER_WORD);
2754 else if (data->stack_parm == 0)
2756 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2757 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2758 BLOCK_OP_NORMAL);
2759 all->first_conversion_insn = get_insns ();
2760 all->last_conversion_insn = get_last_insn ();
2761 end_sequence ();
2764 data->stack_parm = stack_parm;
2765 SET_DECL_RTL (parm, stack_parm);
2768 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2769 parameter. Get it there. Perform all ABI specified conversions. */
2771 static void
2772 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2773 struct assign_parm_data_one *data)
2775 rtx parmreg;
2776 enum machine_mode promoted_nominal_mode;
2777 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2778 bool did_conversion = false;
2780 /* Store the parm in a pseudoregister during the function, but we may
2781 need to do it in a wider mode. */
2783 /* This is not really promoting for a call. However we need to be
2784 consistent with assign_parm_find_data_types and expand_expr_real_1. */
2785 promoted_nominal_mode
2786 = promote_mode (data->nominal_type, data->nominal_mode, &unsignedp, 1);
2788 parmreg = gen_reg_rtx (promoted_nominal_mode);
2790 if (!DECL_ARTIFICIAL (parm))
2791 mark_user_reg (parmreg);
2793 /* If this was an item that we received a pointer to,
2794 set DECL_RTL appropriately. */
2795 if (data->passed_pointer)
2797 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2798 set_mem_attributes (x, parm, 1);
2799 SET_DECL_RTL (parm, x);
2801 else
2802 SET_DECL_RTL (parm, parmreg);
2804 assign_parm_remove_parallels (data);
2806 /* Copy the value into the register. */
2807 if (data->nominal_mode != data->passed_mode
2808 || promoted_nominal_mode != data->promoted_mode)
2810 int save_tree_used;
2812 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2813 mode, by the caller. We now have to convert it to
2814 NOMINAL_MODE, if different. However, PARMREG may be in
2815 a different mode than NOMINAL_MODE if it is being stored
2816 promoted.
2818 If ENTRY_PARM is a hard register, it might be in a register
2819 not valid for operating in its mode (e.g., an odd-numbered
2820 register for a DFmode). In that case, moves are the only
2821 thing valid, so we can't do a convert from there. This
2822 occurs when the calling sequence allow such misaligned
2823 usages.
2825 In addition, the conversion may involve a call, which could
2826 clobber parameters which haven't been copied to pseudo
2827 registers yet. Therefore, we must first copy the parm to
2828 a pseudo reg here, and save the conversion until after all
2829 parameters have been moved. */
2831 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2833 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2835 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2836 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
2838 if (GET_CODE (tempreg) == SUBREG
2839 && GET_MODE (tempreg) == data->nominal_mode
2840 && REG_P (SUBREG_REG (tempreg))
2841 && data->nominal_mode == data->passed_mode
2842 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
2843 && GET_MODE_SIZE (GET_MODE (tempreg))
2844 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
2846 /* The argument is already sign/zero extended, so note it
2847 into the subreg. */
2848 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
2849 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
2852 /* TREE_USED gets set erroneously during expand_assignment. */
2853 save_tree_used = TREE_USED (parm);
2854 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
2855 TREE_USED (parm) = save_tree_used;
2856 all->first_conversion_insn = get_insns ();
2857 all->last_conversion_insn = get_last_insn ();
2858 end_sequence ();
2860 did_conversion = true;
2862 else
2863 emit_move_insn (parmreg, validize_mem (data->entry_parm));
2865 /* If we were passed a pointer but the actual value can safely live
2866 in a register, put it in one. */
2867 if (data->passed_pointer
2868 && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
2869 /* If by-reference argument was promoted, demote it. */
2870 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
2871 || use_register_for_decl (parm)))
2873 /* We can't use nominal_mode, because it will have been set to
2874 Pmode above. We must use the actual mode of the parm. */
2875 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
2876 mark_user_reg (parmreg);
2878 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
2880 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
2881 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
2883 push_to_sequence2 (all->first_conversion_insn,
2884 all->last_conversion_insn);
2885 emit_move_insn (tempreg, DECL_RTL (parm));
2886 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
2887 emit_move_insn (parmreg, tempreg);
2888 all->first_conversion_insn = get_insns ();
2889 all->last_conversion_insn = get_last_insn ();
2890 end_sequence ();
2892 did_conversion = true;
2894 else
2895 emit_move_insn (parmreg, DECL_RTL (parm));
2897 SET_DECL_RTL (parm, parmreg);
2899 /* STACK_PARM is the pointer, not the parm, and PARMREG is
2900 now the parm. */
2901 data->stack_parm = NULL;
2904 /* Mark the register as eliminable if we did no conversion and it was
2905 copied from memory at a fixed offset, and the arg pointer was not
2906 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
2907 offset formed an invalid address, such memory-equivalences as we
2908 make here would screw up life analysis for it. */
2909 if (data->nominal_mode == data->passed_mode
2910 && !did_conversion
2911 && data->stack_parm != 0
2912 && MEM_P (data->stack_parm)
2913 && data->locate.offset.var == 0
2914 && reg_mentioned_p (virtual_incoming_args_rtx,
2915 XEXP (data->stack_parm, 0)))
2917 rtx linsn = get_last_insn ();
2918 rtx sinsn, set;
2920 /* Mark complex types separately. */
2921 if (GET_CODE (parmreg) == CONCAT)
2923 enum machine_mode submode
2924 = GET_MODE_INNER (GET_MODE (parmreg));
2925 int regnor = REGNO (XEXP (parmreg, 0));
2926 int regnoi = REGNO (XEXP (parmreg, 1));
2927 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
2928 rtx stacki = adjust_address_nv (data->stack_parm, submode,
2929 GET_MODE_SIZE (submode));
2931 /* Scan backwards for the set of the real and
2932 imaginary parts. */
2933 for (sinsn = linsn; sinsn != 0;
2934 sinsn = prev_nonnote_insn (sinsn))
2936 set = single_set (sinsn);
2937 if (set == 0)
2938 continue;
2940 if (SET_DEST (set) == regno_reg_rtx [regnoi])
2941 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
2942 else if (SET_DEST (set) == regno_reg_rtx [regnor])
2943 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
2946 else if ((set = single_set (linsn)) != 0
2947 && SET_DEST (set) == parmreg)
2948 set_unique_reg_note (linsn, REG_EQUIV, data->stack_parm);
2951 /* For pointer data type, suggest pointer register. */
2952 if (POINTER_TYPE_P (TREE_TYPE (parm)))
2953 mark_reg_pointer (parmreg,
2954 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
2957 /* A subroutine of assign_parms. Allocate stack space to hold the current
2958 parameter. Get it there. Perform all ABI specified conversions. */
2960 static void
2961 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
2962 struct assign_parm_data_one *data)
2964 /* Value must be stored in the stack slot STACK_PARM during function
2965 execution. */
2966 bool to_conversion = false;
2968 assign_parm_remove_parallels (data);
2970 if (data->promoted_mode != data->nominal_mode)
2972 /* Conversion is required. */
2973 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2975 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2977 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2978 to_conversion = true;
2980 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
2981 TYPE_UNSIGNED (TREE_TYPE (parm)));
2983 if (data->stack_parm)
2985 int offset = subreg_lowpart_offset (data->nominal_mode,
2986 GET_MODE (data->stack_parm));
2987 /* ??? This may need a big-endian conversion on sparc64. */
2988 data->stack_parm
2989 = adjust_address (data->stack_parm, data->nominal_mode, 0);
2990 if (offset && MEM_OFFSET (data->stack_parm))
2991 set_mem_offset (data->stack_parm,
2992 plus_constant (MEM_OFFSET (data->stack_parm),
2993 offset));
2997 if (data->entry_parm != data->stack_parm)
2999 rtx src, dest;
3001 if (data->stack_parm == 0)
3003 int align = STACK_SLOT_ALIGNMENT (data->passed_type,
3004 GET_MODE (data->entry_parm),
3005 TYPE_ALIGN (data->passed_type));
3006 data->stack_parm
3007 = assign_stack_local (GET_MODE (data->entry_parm),
3008 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
3009 align);
3010 set_mem_attributes (data->stack_parm, parm, 1);
3013 dest = validize_mem (data->stack_parm);
3014 src = validize_mem (data->entry_parm);
3016 if (MEM_P (src))
3018 /* Use a block move to handle potentially misaligned entry_parm. */
3019 if (!to_conversion)
3020 push_to_sequence2 (all->first_conversion_insn,
3021 all->last_conversion_insn);
3022 to_conversion = true;
3024 emit_block_move (dest, src,
3025 GEN_INT (int_size_in_bytes (data->passed_type)),
3026 BLOCK_OP_NORMAL);
3028 else
3029 emit_move_insn (dest, src);
3032 if (to_conversion)
3034 all->first_conversion_insn = get_insns ();
3035 all->last_conversion_insn = get_last_insn ();
3036 end_sequence ();
3039 SET_DECL_RTL (parm, data->stack_parm);
3042 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
3043 undo the frobbing that we did in assign_parms_augmented_arg_list. */
3045 static void
3046 assign_parms_unsplit_complex (struct assign_parm_data_all *all, tree fnargs)
3048 tree parm;
3049 tree orig_fnargs = all->orig_fnargs;
3051 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm))
3053 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
3054 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
3056 rtx tmp, real, imag;
3057 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
3059 real = DECL_RTL (fnargs);
3060 imag = DECL_RTL (TREE_CHAIN (fnargs));
3061 if (inner != GET_MODE (real))
3063 real = gen_lowpart_SUBREG (inner, real);
3064 imag = gen_lowpart_SUBREG (inner, imag);
3067 if (TREE_ADDRESSABLE (parm))
3069 rtx rmem, imem;
3070 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
3071 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3072 DECL_MODE (parm),
3073 TYPE_ALIGN (TREE_TYPE (parm)));
3075 /* split_complex_arg put the real and imag parts in
3076 pseudos. Move them to memory. */
3077 tmp = assign_stack_local (DECL_MODE (parm), size, align);
3078 set_mem_attributes (tmp, parm, 1);
3079 rmem = adjust_address_nv (tmp, inner, 0);
3080 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
3081 push_to_sequence2 (all->first_conversion_insn,
3082 all->last_conversion_insn);
3083 emit_move_insn (rmem, real);
3084 emit_move_insn (imem, imag);
3085 all->first_conversion_insn = get_insns ();
3086 all->last_conversion_insn = get_last_insn ();
3087 end_sequence ();
3089 else
3090 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3091 SET_DECL_RTL (parm, tmp);
3093 real = DECL_INCOMING_RTL (fnargs);
3094 imag = DECL_INCOMING_RTL (TREE_CHAIN (fnargs));
3095 if (inner != GET_MODE (real))
3097 real = gen_lowpart_SUBREG (inner, real);
3098 imag = gen_lowpart_SUBREG (inner, imag);
3100 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3101 set_decl_incoming_rtl (parm, tmp, false);
3102 fnargs = TREE_CHAIN (fnargs);
3104 else
3106 SET_DECL_RTL (parm, DECL_RTL (fnargs));
3107 set_decl_incoming_rtl (parm, DECL_INCOMING_RTL (fnargs), false);
3109 /* Set MEM_EXPR to the original decl, i.e. to PARM,
3110 instead of the copy of decl, i.e. FNARGS. */
3111 if (DECL_INCOMING_RTL (parm) && MEM_P (DECL_INCOMING_RTL (parm)))
3112 set_mem_expr (DECL_INCOMING_RTL (parm), parm);
3115 fnargs = TREE_CHAIN (fnargs);
3119 /* Assign RTL expressions to the function's parameters. This may involve
3120 copying them into registers and using those registers as the DECL_RTL. */
3122 static void
3123 assign_parms (tree fndecl)
3125 struct assign_parm_data_all all;
3126 tree fnargs, parm;
3128 crtl->args.internal_arg_pointer
3129 = targetm.calls.internal_arg_pointer ();
3131 assign_parms_initialize_all (&all);
3132 fnargs = assign_parms_augmented_arg_list (&all);
3134 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3136 struct assign_parm_data_one data;
3138 /* Extract the type of PARM; adjust it according to ABI. */
3139 assign_parm_find_data_types (&all, parm, &data);
3141 /* Early out for errors and void parameters. */
3142 if (data.passed_mode == VOIDmode)
3144 SET_DECL_RTL (parm, const0_rtx);
3145 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3146 continue;
3149 /* Estimate stack alignment from parameter alignment. */
3150 if (SUPPORTS_STACK_ALIGNMENT)
3152 unsigned int align = FUNCTION_ARG_BOUNDARY (data.promoted_mode,
3153 data.passed_type);
3154 align = MINIMUM_ALIGNMENT (data.passed_type, data.promoted_mode,
3155 align);
3156 if (TYPE_ALIGN (data.nominal_type) > align)
3157 align = MINIMUM_ALIGNMENT (data.nominal_type,
3158 TYPE_MODE (data.nominal_type),
3159 TYPE_ALIGN (data.nominal_type));
3160 if (crtl->stack_alignment_estimated < align)
3162 gcc_assert (!crtl->stack_realign_processed);
3163 crtl->stack_alignment_estimated = align;
3167 if (cfun->stdarg && !TREE_CHAIN (parm))
3168 assign_parms_setup_varargs (&all, &data, false);
3170 /* Find out where the parameter arrives in this function. */
3171 assign_parm_find_entry_rtl (&all, &data);
3173 /* Find out where stack space for this parameter might be. */
3174 if (assign_parm_is_stack_parm (&all, &data))
3176 assign_parm_find_stack_rtl (parm, &data);
3177 assign_parm_adjust_entry_rtl (&data);
3180 /* Record permanently how this parm was passed. */
3181 set_decl_incoming_rtl (parm, data.entry_parm, data.passed_pointer);
3183 /* Update info on where next arg arrives in registers. */
3184 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3185 data.passed_type, data.named_arg);
3187 assign_parm_adjust_stack_rtl (&data);
3189 if (assign_parm_setup_block_p (&data))
3190 assign_parm_setup_block (&all, parm, &data);
3191 else if (data.passed_pointer || use_register_for_decl (parm))
3192 assign_parm_setup_reg (&all, parm, &data);
3193 else
3194 assign_parm_setup_stack (&all, parm, &data);
3197 if (targetm.calls.split_complex_arg && fnargs != all.orig_fnargs)
3198 assign_parms_unsplit_complex (&all, fnargs);
3200 /* Output all parameter conversion instructions (possibly including calls)
3201 now that all parameters have been copied out of hard registers. */
3202 emit_insn (all.first_conversion_insn);
3204 /* Estimate reload stack alignment from scalar return mode. */
3205 if (SUPPORTS_STACK_ALIGNMENT)
3207 if (DECL_RESULT (fndecl))
3209 tree type = TREE_TYPE (DECL_RESULT (fndecl));
3210 enum machine_mode mode = TYPE_MODE (type);
3212 if (mode != BLKmode
3213 && mode != VOIDmode
3214 && !AGGREGATE_TYPE_P (type))
3216 unsigned int align = GET_MODE_ALIGNMENT (mode);
3217 if (crtl->stack_alignment_estimated < align)
3219 gcc_assert (!crtl->stack_realign_processed);
3220 crtl->stack_alignment_estimated = align;
3226 /* If we are receiving a struct value address as the first argument, set up
3227 the RTL for the function result. As this might require code to convert
3228 the transmitted address to Pmode, we do this here to ensure that possible
3229 preliminary conversions of the address have been emitted already. */
3230 if (all.function_result_decl)
3232 tree result = DECL_RESULT (current_function_decl);
3233 rtx addr = DECL_RTL (all.function_result_decl);
3234 rtx x;
3236 if (DECL_BY_REFERENCE (result))
3237 x = addr;
3238 else
3240 addr = convert_memory_address (Pmode, addr);
3241 x = gen_rtx_MEM (DECL_MODE (result), addr);
3242 set_mem_attributes (x, result, 1);
3244 SET_DECL_RTL (result, x);
3247 /* We have aligned all the args, so add space for the pretend args. */
3248 crtl->args.pretend_args_size = all.pretend_args_size;
3249 all.stack_args_size.constant += all.extra_pretend_bytes;
3250 crtl->args.size = all.stack_args_size.constant;
3252 /* Adjust function incoming argument size for alignment and
3253 minimum length. */
3255 #ifdef REG_PARM_STACK_SPACE
3256 crtl->args.size = MAX (crtl->args.size,
3257 REG_PARM_STACK_SPACE (fndecl));
3258 #endif
3260 crtl->args.size = CEIL_ROUND (crtl->args.size,
3261 PARM_BOUNDARY / BITS_PER_UNIT);
3263 #ifdef ARGS_GROW_DOWNWARD
3264 crtl->args.arg_offset_rtx
3265 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3266 : expand_expr (size_diffop (all.stack_args_size.var,
3267 size_int (-all.stack_args_size.constant)),
3268 NULL_RTX, VOIDmode, 0));
3269 #else
3270 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3271 #endif
3273 /* See how many bytes, if any, of its args a function should try to pop
3274 on return. */
3276 crtl->args.pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
3277 crtl->args.size);
3279 /* For stdarg.h function, save info about
3280 regs and stack space used by the named args. */
3282 crtl->args.info = all.args_so_far;
3284 /* Set the rtx used for the function return value. Put this in its
3285 own variable so any optimizers that need this information don't have
3286 to include tree.h. Do this here so it gets done when an inlined
3287 function gets output. */
3289 crtl->return_rtx
3290 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3291 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3293 /* If scalar return value was computed in a pseudo-reg, or was a named
3294 return value that got dumped to the stack, copy that to the hard
3295 return register. */
3296 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3298 tree decl_result = DECL_RESULT (fndecl);
3299 rtx decl_rtl = DECL_RTL (decl_result);
3301 if (REG_P (decl_rtl)
3302 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3303 : DECL_REGISTER (decl_result))
3305 rtx real_decl_rtl;
3307 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3308 fndecl, true);
3309 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3310 /* The delay slot scheduler assumes that crtl->return_rtx
3311 holds the hard register containing the return value, not a
3312 temporary pseudo. */
3313 crtl->return_rtx = real_decl_rtl;
3318 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3319 For all seen types, gimplify their sizes. */
3321 static tree
3322 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3324 tree t = *tp;
3326 *walk_subtrees = 0;
3327 if (TYPE_P (t))
3329 if (POINTER_TYPE_P (t))
3330 *walk_subtrees = 1;
3331 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3332 && !TYPE_SIZES_GIMPLIFIED (t))
3334 gimplify_type_sizes (t, (gimple_seq *) data);
3335 *walk_subtrees = 1;
3339 return NULL;
3342 /* Gimplify the parameter list for current_function_decl. This involves
3343 evaluating SAVE_EXPRs of variable sized parameters and generating code
3344 to implement callee-copies reference parameters. Returns a sequence of
3345 statements to add to the beginning of the function. */
3347 gimple_seq
3348 gimplify_parameters (void)
3350 struct assign_parm_data_all all;
3351 tree fnargs, parm;
3352 gimple_seq stmts = NULL;
3354 assign_parms_initialize_all (&all);
3355 fnargs = assign_parms_augmented_arg_list (&all);
3357 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3359 struct assign_parm_data_one data;
3361 /* Extract the type of PARM; adjust it according to ABI. */
3362 assign_parm_find_data_types (&all, parm, &data);
3364 /* Early out for errors and void parameters. */
3365 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3366 continue;
3368 /* Update info on where next arg arrives in registers. */
3369 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3370 data.passed_type, data.named_arg);
3372 /* ??? Once upon a time variable_size stuffed parameter list
3373 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3374 turned out to be less than manageable in the gimple world.
3375 Now we have to hunt them down ourselves. */
3376 walk_tree_without_duplicates (&data.passed_type,
3377 gimplify_parm_type, &stmts);
3379 if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
3381 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3382 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3385 if (data.passed_pointer)
3387 tree type = TREE_TYPE (data.passed_type);
3388 if (reference_callee_copied (&all.args_so_far, TYPE_MODE (type),
3389 type, data.named_arg))
3391 tree local, t;
3393 /* For constant-sized objects, this is trivial; for
3394 variable-sized objects, we have to play games. */
3395 if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
3396 && !(flag_stack_check == GENERIC_STACK_CHECK
3397 && compare_tree_int (DECL_SIZE_UNIT (parm),
3398 STACK_CHECK_MAX_VAR_SIZE) > 0))
3400 local = create_tmp_var (type, get_name (parm));
3401 DECL_IGNORED_P (local) = 0;
3402 /* If PARM was addressable, move that flag over
3403 to the local copy, as its address will be taken,
3404 not the PARMs. */
3405 if (TREE_ADDRESSABLE (parm))
3407 TREE_ADDRESSABLE (parm) = 0;
3408 TREE_ADDRESSABLE (local) = 1;
3411 else
3413 tree ptr_type, addr;
3415 ptr_type = build_pointer_type (type);
3416 addr = create_tmp_var (ptr_type, get_name (parm));
3417 DECL_IGNORED_P (addr) = 0;
3418 local = build_fold_indirect_ref (addr);
3420 t = built_in_decls[BUILT_IN_ALLOCA];
3421 t = build_call_expr (t, 1, DECL_SIZE_UNIT (parm));
3422 t = fold_convert (ptr_type, t);
3423 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
3424 gimplify_and_add (t, &stmts);
3427 gimplify_assign (local, parm, &stmts);
3429 SET_DECL_VALUE_EXPR (parm, local);
3430 DECL_HAS_VALUE_EXPR_P (parm) = 1;
3435 return stmts;
3438 /* Compute the size and offset from the start of the stacked arguments for a
3439 parm passed in mode PASSED_MODE and with type TYPE.
3441 INITIAL_OFFSET_PTR points to the current offset into the stacked
3442 arguments.
3444 The starting offset and size for this parm are returned in
3445 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3446 nonzero, the offset is that of stack slot, which is returned in
3447 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3448 padding required from the initial offset ptr to the stack slot.
3450 IN_REGS is nonzero if the argument will be passed in registers. It will
3451 never be set if REG_PARM_STACK_SPACE is not defined.
3453 FNDECL is the function in which the argument was defined.
3455 There are two types of rounding that are done. The first, controlled by
3456 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3457 list to be aligned to the specific boundary (in bits). This rounding
3458 affects the initial and starting offsets, but not the argument size.
3460 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3461 optionally rounds the size of the parm to PARM_BOUNDARY. The
3462 initial offset is not affected by this rounding, while the size always
3463 is and the starting offset may be. */
3465 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3466 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3467 callers pass in the total size of args so far as
3468 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3470 void
3471 locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3472 int partial, tree fndecl ATTRIBUTE_UNUSED,
3473 struct args_size *initial_offset_ptr,
3474 struct locate_and_pad_arg_data *locate)
3476 tree sizetree;
3477 enum direction where_pad;
3478 unsigned int boundary;
3479 int reg_parm_stack_space = 0;
3480 int part_size_in_regs;
3482 #ifdef REG_PARM_STACK_SPACE
3483 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3485 /* If we have found a stack parm before we reach the end of the
3486 area reserved for registers, skip that area. */
3487 if (! in_regs)
3489 if (reg_parm_stack_space > 0)
3491 if (initial_offset_ptr->var)
3493 initial_offset_ptr->var
3494 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3495 ssize_int (reg_parm_stack_space));
3496 initial_offset_ptr->constant = 0;
3498 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3499 initial_offset_ptr->constant = reg_parm_stack_space;
3502 #endif /* REG_PARM_STACK_SPACE */
3504 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
3506 sizetree
3507 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3508 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3509 boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
3510 locate->where_pad = where_pad;
3512 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
3513 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
3514 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
3516 locate->boundary = boundary;
3518 if (SUPPORTS_STACK_ALIGNMENT)
3520 /* stack_alignment_estimated can't change after stack has been
3521 realigned. */
3522 if (crtl->stack_alignment_estimated < boundary)
3524 if (!crtl->stack_realign_processed)
3525 crtl->stack_alignment_estimated = boundary;
3526 else
3528 /* If stack is realigned and stack alignment value
3529 hasn't been finalized, it is OK not to increase
3530 stack_alignment_estimated. The bigger alignment
3531 requirement is recorded in stack_alignment_needed
3532 below. */
3533 gcc_assert (!crtl->stack_realign_finalized
3534 && crtl->stack_realign_needed);
3539 /* Remember if the outgoing parameter requires extra alignment on the
3540 calling function side. */
3541 if (crtl->stack_alignment_needed < boundary)
3542 crtl->stack_alignment_needed = boundary;
3543 if (crtl->max_used_stack_slot_alignment < crtl->stack_alignment_needed)
3544 crtl->max_used_stack_slot_alignment = crtl->stack_alignment_needed;
3545 if (crtl->preferred_stack_boundary < boundary)
3546 crtl->preferred_stack_boundary = boundary;
3548 #ifdef ARGS_GROW_DOWNWARD
3549 locate->slot_offset.constant = -initial_offset_ptr->constant;
3550 if (initial_offset_ptr->var)
3551 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3552 initial_offset_ptr->var);
3555 tree s2 = sizetree;
3556 if (where_pad != none
3557 && (!host_integerp (sizetree, 1)
3558 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3559 s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
3560 SUB_PARM_SIZE (locate->slot_offset, s2);
3563 locate->slot_offset.constant += part_size_in_regs;
3565 if (!in_regs
3566 #ifdef REG_PARM_STACK_SPACE
3567 || REG_PARM_STACK_SPACE (fndecl) > 0
3568 #endif
3570 pad_to_arg_alignment (&locate->slot_offset, boundary,
3571 &locate->alignment_pad);
3573 locate->size.constant = (-initial_offset_ptr->constant
3574 - locate->slot_offset.constant);
3575 if (initial_offset_ptr->var)
3576 locate->size.var = size_binop (MINUS_EXPR,
3577 size_binop (MINUS_EXPR,
3578 ssize_int (0),
3579 initial_offset_ptr->var),
3580 locate->slot_offset.var);
3582 /* Pad_below needs the pre-rounded size to know how much to pad
3583 below. */
3584 locate->offset = locate->slot_offset;
3585 if (where_pad == downward)
3586 pad_below (&locate->offset, passed_mode, sizetree);
3588 #else /* !ARGS_GROW_DOWNWARD */
3589 if (!in_regs
3590 #ifdef REG_PARM_STACK_SPACE
3591 || REG_PARM_STACK_SPACE (fndecl) > 0
3592 #endif
3594 pad_to_arg_alignment (initial_offset_ptr, boundary,
3595 &locate->alignment_pad);
3596 locate->slot_offset = *initial_offset_ptr;
3598 #ifdef PUSH_ROUNDING
3599 if (passed_mode != BLKmode)
3600 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3601 #endif
3603 /* Pad_below needs the pre-rounded size to know how much to pad below
3604 so this must be done before rounding up. */
3605 locate->offset = locate->slot_offset;
3606 if (where_pad == downward)
3607 pad_below (&locate->offset, passed_mode, sizetree);
3609 if (where_pad != none
3610 && (!host_integerp (sizetree, 1)
3611 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3612 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3614 ADD_PARM_SIZE (locate->size, sizetree);
3616 locate->size.constant -= part_size_in_regs;
3617 #endif /* ARGS_GROW_DOWNWARD */
3619 #ifdef FUNCTION_ARG_OFFSET
3620 locate->offset.constant += FUNCTION_ARG_OFFSET (passed_mode, type);
3621 #endif
3624 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3625 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3627 static void
3628 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3629 struct args_size *alignment_pad)
3631 tree save_var = NULL_TREE;
3632 HOST_WIDE_INT save_constant = 0;
3633 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3634 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3636 #ifdef SPARC_STACK_BOUNDARY_HACK
3637 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3638 the real alignment of %sp. However, when it does this, the
3639 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
3640 if (SPARC_STACK_BOUNDARY_HACK)
3641 sp_offset = 0;
3642 #endif
3644 if (boundary > PARM_BOUNDARY)
3646 save_var = offset_ptr->var;
3647 save_constant = offset_ptr->constant;
3650 alignment_pad->var = NULL_TREE;
3651 alignment_pad->constant = 0;
3653 if (boundary > BITS_PER_UNIT)
3655 if (offset_ptr->var)
3657 tree sp_offset_tree = ssize_int (sp_offset);
3658 tree offset = size_binop (PLUS_EXPR,
3659 ARGS_SIZE_TREE (*offset_ptr),
3660 sp_offset_tree);
3661 #ifdef ARGS_GROW_DOWNWARD
3662 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
3663 #else
3664 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
3665 #endif
3667 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
3668 /* ARGS_SIZE_TREE includes constant term. */
3669 offset_ptr->constant = 0;
3670 if (boundary > PARM_BOUNDARY)
3671 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
3672 save_var);
3674 else
3676 offset_ptr->constant = -sp_offset +
3677 #ifdef ARGS_GROW_DOWNWARD
3678 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3679 #else
3680 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3681 #endif
3682 if (boundary > PARM_BOUNDARY)
3683 alignment_pad->constant = offset_ptr->constant - save_constant;
3688 static void
3689 pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
3691 if (passed_mode != BLKmode)
3693 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3694 offset_ptr->constant
3695 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3696 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3697 - GET_MODE_SIZE (passed_mode));
3699 else
3701 if (TREE_CODE (sizetree) != INTEGER_CST
3702 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3704 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3705 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3706 /* Add it in. */
3707 ADD_PARM_SIZE (*offset_ptr, s2);
3708 SUB_PARM_SIZE (*offset_ptr, sizetree);
3714 /* True if register REGNO was alive at a place where `setjmp' was
3715 called and was set more than once or is an argument. Such regs may
3716 be clobbered by `longjmp'. */
3718 static bool
3719 regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
3721 /* There appear to be cases where some local vars never reach the
3722 backend but have bogus regnos. */
3723 if (regno >= max_reg_num ())
3724 return false;
3726 return ((REG_N_SETS (regno) > 1
3727 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR), regno))
3728 && REGNO_REG_SET_P (setjmp_crosses, regno));
3731 /* Walk the tree of blocks describing the binding levels within a
3732 function and warn about variables the might be killed by setjmp or
3733 vfork. This is done after calling flow_analysis before register
3734 allocation since that will clobber the pseudo-regs to hard
3735 regs. */
3737 static void
3738 setjmp_vars_warning (bitmap setjmp_crosses, tree block)
3740 tree decl, sub;
3742 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3744 if (TREE_CODE (decl) == VAR_DECL
3745 && DECL_RTL_SET_P (decl)
3746 && REG_P (DECL_RTL (decl))
3747 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
3748 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
3749 " %<longjmp%> or %<vfork%>", decl);
3752 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
3753 setjmp_vars_warning (setjmp_crosses, sub);
3756 /* Do the appropriate part of setjmp_vars_warning
3757 but for arguments instead of local variables. */
3759 static void
3760 setjmp_args_warning (bitmap setjmp_crosses)
3762 tree decl;
3763 for (decl = DECL_ARGUMENTS (current_function_decl);
3764 decl; decl = TREE_CHAIN (decl))
3765 if (DECL_RTL (decl) != 0
3766 && REG_P (DECL_RTL (decl))
3767 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
3768 warning (OPT_Wclobbered,
3769 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
3770 decl);
3773 /* Generate warning messages for variables live across setjmp. */
3775 void
3776 generate_setjmp_warnings (void)
3778 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
3780 if (n_basic_blocks == NUM_FIXED_BLOCKS
3781 || bitmap_empty_p (setjmp_crosses))
3782 return;
3784 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
3785 setjmp_args_warning (setjmp_crosses);
3789 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
3790 and create duplicate blocks. */
3791 /* ??? Need an option to either create block fragments or to create
3792 abstract origin duplicates of a source block. It really depends
3793 on what optimization has been performed. */
3795 void
3796 reorder_blocks (void)
3798 tree block = DECL_INITIAL (current_function_decl);
3799 VEC(tree,heap) *block_stack;
3801 if (block == NULL_TREE)
3802 return;
3804 block_stack = VEC_alloc (tree, heap, 10);
3806 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
3807 clear_block_marks (block);
3809 /* Prune the old trees away, so that they don't get in the way. */
3810 BLOCK_SUBBLOCKS (block) = NULL_TREE;
3811 BLOCK_CHAIN (block) = NULL_TREE;
3813 /* Recreate the block tree from the note nesting. */
3814 reorder_blocks_1 (get_insns (), block, &block_stack);
3815 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
3817 VEC_free (tree, heap, block_stack);
3820 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
3822 void
3823 clear_block_marks (tree block)
3825 while (block)
3827 TREE_ASM_WRITTEN (block) = 0;
3828 clear_block_marks (BLOCK_SUBBLOCKS (block));
3829 block = BLOCK_CHAIN (block);
3833 static void
3834 reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack)
3836 rtx insn;
3838 for (insn = insns; insn; insn = NEXT_INSN (insn))
3840 if (NOTE_P (insn))
3842 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
3844 tree block = NOTE_BLOCK (insn);
3845 tree origin;
3847 origin = (BLOCK_FRAGMENT_ORIGIN (block)
3848 ? BLOCK_FRAGMENT_ORIGIN (block)
3849 : block);
3851 /* If we have seen this block before, that means it now
3852 spans multiple address regions. Create a new fragment. */
3853 if (TREE_ASM_WRITTEN (block))
3855 tree new_block = copy_node (block);
3857 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
3858 BLOCK_FRAGMENT_CHAIN (new_block)
3859 = BLOCK_FRAGMENT_CHAIN (origin);
3860 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
3862 NOTE_BLOCK (insn) = new_block;
3863 block = new_block;
3866 BLOCK_SUBBLOCKS (block) = 0;
3867 TREE_ASM_WRITTEN (block) = 1;
3868 /* When there's only one block for the entire function,
3869 current_block == block and we mustn't do this, it
3870 will cause infinite recursion. */
3871 if (block != current_block)
3873 if (block != origin)
3874 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block);
3876 BLOCK_SUPERCONTEXT (block) = current_block;
3877 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
3878 BLOCK_SUBBLOCKS (current_block) = block;
3879 current_block = origin;
3881 VEC_safe_push (tree, heap, *p_block_stack, block);
3883 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
3885 NOTE_BLOCK (insn) = VEC_pop (tree, *p_block_stack);
3886 BLOCK_SUBBLOCKS (current_block)
3887 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
3888 current_block = BLOCK_SUPERCONTEXT (current_block);
3894 /* Reverse the order of elements in the chain T of blocks,
3895 and return the new head of the chain (old last element). */
3897 tree
3898 blocks_nreverse (tree t)
3900 tree prev = 0, decl, next;
3901 for (decl = t; decl; decl = next)
3903 next = BLOCK_CHAIN (decl);
3904 BLOCK_CHAIN (decl) = prev;
3905 prev = decl;
3907 return prev;
3910 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
3911 non-NULL, list them all into VECTOR, in a depth-first preorder
3912 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
3913 blocks. */
3915 static int
3916 all_blocks (tree block, tree *vector)
3918 int n_blocks = 0;
3920 while (block)
3922 TREE_ASM_WRITTEN (block) = 0;
3924 /* Record this block. */
3925 if (vector)
3926 vector[n_blocks] = block;
3928 ++n_blocks;
3930 /* Record the subblocks, and their subblocks... */
3931 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
3932 vector ? vector + n_blocks : 0);
3933 block = BLOCK_CHAIN (block);
3936 return n_blocks;
3939 /* Return a vector containing all the blocks rooted at BLOCK. The
3940 number of elements in the vector is stored in N_BLOCKS_P. The
3941 vector is dynamically allocated; it is the caller's responsibility
3942 to call `free' on the pointer returned. */
3944 static tree *
3945 get_block_vector (tree block, int *n_blocks_p)
3947 tree *block_vector;
3949 *n_blocks_p = all_blocks (block, NULL);
3950 block_vector = XNEWVEC (tree, *n_blocks_p);
3951 all_blocks (block, block_vector);
3953 return block_vector;
3956 static GTY(()) int next_block_index = 2;
3958 /* Set BLOCK_NUMBER for all the blocks in FN. */
3960 void
3961 number_blocks (tree fn)
3963 int i;
3964 int n_blocks;
3965 tree *block_vector;
3967 /* For SDB and XCOFF debugging output, we start numbering the blocks
3968 from 1 within each function, rather than keeping a running
3969 count. */
3970 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
3971 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
3972 next_block_index = 1;
3973 #endif
3975 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
3977 /* The top-level BLOCK isn't numbered at all. */
3978 for (i = 1; i < n_blocks; ++i)
3979 /* We number the blocks from two. */
3980 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
3982 free (block_vector);
3984 return;
3987 /* If VAR is present in a subblock of BLOCK, return the subblock. */
3989 tree
3990 debug_find_var_in_block_tree (tree var, tree block)
3992 tree t;
3994 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
3995 if (t == var)
3996 return block;
3998 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
4000 tree ret = debug_find_var_in_block_tree (var, t);
4001 if (ret)
4002 return ret;
4005 return NULL_TREE;
4008 /* Keep track of whether we're in a dummy function context. If we are,
4009 we don't want to invoke the set_current_function hook, because we'll
4010 get into trouble if the hook calls target_reinit () recursively or
4011 when the initial initialization is not yet complete. */
4013 static bool in_dummy_function;
4015 /* Invoke the target hook when setting cfun. Update the optimization options
4016 if the function uses different options than the default. */
4018 static void
4019 invoke_set_current_function_hook (tree fndecl)
4021 if (!in_dummy_function)
4023 tree opts = ((fndecl)
4024 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
4025 : optimization_default_node);
4027 if (!opts)
4028 opts = optimization_default_node;
4030 /* Change optimization options if needed. */
4031 if (optimization_current_node != opts)
4033 optimization_current_node = opts;
4034 cl_optimization_restore (TREE_OPTIMIZATION (opts));
4037 targetm.set_current_function (fndecl);
4041 /* cfun should never be set directly; use this function. */
4043 void
4044 set_cfun (struct function *new_cfun)
4046 if (cfun != new_cfun)
4048 cfun = new_cfun;
4049 invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
4053 /* Initialized with NOGC, making this poisonous to the garbage collector. */
4055 static VEC(function_p,heap) *cfun_stack;
4057 /* Push the current cfun onto the stack, and set cfun to new_cfun. */
4059 void
4060 push_cfun (struct function *new_cfun)
4062 VEC_safe_push (function_p, heap, cfun_stack, cfun);
4063 set_cfun (new_cfun);
4066 /* Pop cfun from the stack. */
4068 void
4069 pop_cfun (void)
4071 struct function *new_cfun = VEC_pop (function_p, cfun_stack);
4072 set_cfun (new_cfun);
4075 /* Return value of funcdef and increase it. */
4077 get_next_funcdef_no (void)
4079 return funcdef_no++;
4082 /* Allocate a function structure for FNDECL and set its contents
4083 to the defaults. Set cfun to the newly-allocated object.
4084 Some of the helper functions invoked during initialization assume
4085 that cfun has already been set. Therefore, assign the new object
4086 directly into cfun and invoke the back end hook explicitly at the
4087 very end, rather than initializing a temporary and calling set_cfun
4088 on it.
4090 ABSTRACT_P is true if this is a function that will never be seen by
4091 the middle-end. Such functions are front-end concepts (like C++
4092 function templates) that do not correspond directly to functions
4093 placed in object files. */
4095 void
4096 allocate_struct_function (tree fndecl, bool abstract_p)
4098 tree result;
4099 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
4101 cfun = GGC_CNEW (struct function);
4103 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
4105 init_eh_for_function ();
4107 if (init_machine_status)
4108 cfun->machine = (*init_machine_status) ();
4110 #ifdef OVERRIDE_ABI_FORMAT
4111 OVERRIDE_ABI_FORMAT (fndecl);
4112 #endif
4114 invoke_set_current_function_hook (fndecl);
4116 if (fndecl != NULL_TREE)
4118 DECL_STRUCT_FUNCTION (fndecl) = cfun;
4119 cfun->decl = fndecl;
4120 current_function_funcdef_no = get_next_funcdef_no ();
4122 result = DECL_RESULT (fndecl);
4123 if (!abstract_p && aggregate_value_p (result, fndecl))
4125 #ifdef PCC_STATIC_STRUCT_RETURN
4126 cfun->returns_pcc_struct = 1;
4127 #endif
4128 cfun->returns_struct = 1;
4131 cfun->stdarg
4132 = (fntype
4133 && TYPE_ARG_TYPES (fntype) != 0
4134 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4135 != void_type_node));
4137 /* Assume all registers in stdarg functions need to be saved. */
4138 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
4139 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
4143 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4144 instead of just setting it. */
4146 void
4147 push_struct_function (tree fndecl)
4149 VEC_safe_push (function_p, heap, cfun_stack, cfun);
4150 allocate_struct_function (fndecl, false);
4153 /* Reset cfun, and other non-struct-function variables to defaults as
4154 appropriate for emitting rtl at the start of a function. */
4156 static void
4157 prepare_function_start (void)
4159 gcc_assert (!crtl->emit.x_last_insn);
4160 init_temp_slots ();
4161 init_emit ();
4162 init_varasm_status ();
4163 init_expr ();
4164 default_rtl_profile ();
4166 cse_not_expected = ! optimize;
4168 /* Caller save not needed yet. */
4169 caller_save_needed = 0;
4171 /* We haven't done register allocation yet. */
4172 reg_renumber = 0;
4174 /* Indicate that we have not instantiated virtual registers yet. */
4175 virtuals_instantiated = 0;
4177 /* Indicate that we want CONCATs now. */
4178 generating_concat_p = 1;
4180 /* Indicate we have no need of a frame pointer yet. */
4181 frame_pointer_needed = 0;
4184 /* Initialize the rtl expansion mechanism so that we can do simple things
4185 like generate sequences. This is used to provide a context during global
4186 initialization of some passes. You must call expand_dummy_function_end
4187 to exit this context. */
4189 void
4190 init_dummy_function_start (void)
4192 gcc_assert (!in_dummy_function);
4193 in_dummy_function = true;
4194 push_struct_function (NULL_TREE);
4195 prepare_function_start ();
4198 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4199 and initialize static variables for generating RTL for the statements
4200 of the function. */
4202 void
4203 init_function_start (tree subr)
4205 if (subr && DECL_STRUCT_FUNCTION (subr))
4206 set_cfun (DECL_STRUCT_FUNCTION (subr));
4207 else
4208 allocate_struct_function (subr, false);
4209 prepare_function_start ();
4211 /* Warn if this value is an aggregate type,
4212 regardless of which calling convention we are using for it. */
4213 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
4214 warning (OPT_Waggregate_return, "function returns an aggregate");
4217 /* Make sure all values used by the optimization passes have sane
4218 defaults. */
4219 unsigned int
4220 init_function_for_compilation (void)
4222 reg_renumber = 0;
4224 /* No prologue/epilogue insns yet. Make sure that these vectors are
4225 empty. */
4226 gcc_assert (VEC_length (int, prologue) == 0);
4227 gcc_assert (VEC_length (int, epilogue) == 0);
4228 gcc_assert (VEC_length (int, sibcall_epilogue) == 0);
4229 return 0;
4232 struct rtl_opt_pass pass_init_function =
4235 RTL_PASS,
4236 NULL, /* name */
4237 NULL, /* gate */
4238 init_function_for_compilation, /* execute */
4239 NULL, /* sub */
4240 NULL, /* next */
4241 0, /* static_pass_number */
4242 0, /* tv_id */
4243 0, /* properties_required */
4244 0, /* properties_provided */
4245 0, /* properties_destroyed */
4246 0, /* todo_flags_start */
4247 0 /* todo_flags_finish */
4252 void
4253 expand_main_function (void)
4255 #if (defined(INVOKE__main) \
4256 || (!defined(HAS_INIT_SECTION) \
4257 && !defined(INIT_SECTION_ASM_OP) \
4258 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
4259 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
4260 #endif
4263 /* Expand code to initialize the stack_protect_guard. This is invoked at
4264 the beginning of a function to be protected. */
4266 #ifndef HAVE_stack_protect_set
4267 # define HAVE_stack_protect_set 0
4268 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
4269 #endif
4271 void
4272 stack_protect_prologue (void)
4274 tree guard_decl = targetm.stack_protect_guard ();
4275 rtx x, y;
4277 /* Avoid expand_expr here, because we don't want guard_decl pulled
4278 into registers unless absolutely necessary. And we know that
4279 crtl->stack_protect_guard is a local stack slot, so this skips
4280 all the fluff. */
4281 x = validize_mem (DECL_RTL (crtl->stack_protect_guard));
4282 y = validize_mem (DECL_RTL (guard_decl));
4284 /* Allow the target to copy from Y to X without leaking Y into a
4285 register. */
4286 if (HAVE_stack_protect_set)
4288 rtx insn = gen_stack_protect_set (x, y);
4289 if (insn)
4291 emit_insn (insn);
4292 return;
4296 /* Otherwise do a straight move. */
4297 emit_move_insn (x, y);
4300 /* Expand code to verify the stack_protect_guard. This is invoked at
4301 the end of a function to be protected. */
4303 #ifndef HAVE_stack_protect_test
4304 # define HAVE_stack_protect_test 0
4305 # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
4306 #endif
4308 void
4309 stack_protect_epilogue (void)
4311 tree guard_decl = targetm.stack_protect_guard ();
4312 rtx label = gen_label_rtx ();
4313 rtx x, y, tmp;
4315 /* Avoid expand_expr here, because we don't want guard_decl pulled
4316 into registers unless absolutely necessary. And we know that
4317 crtl->stack_protect_guard is a local stack slot, so this skips
4318 all the fluff. */
4319 x = validize_mem (DECL_RTL (crtl->stack_protect_guard));
4320 y = validize_mem (DECL_RTL (guard_decl));
4322 /* Allow the target to compare Y with X without leaking either into
4323 a register. */
4324 switch (HAVE_stack_protect_test != 0)
4326 case 1:
4327 tmp = gen_stack_protect_test (x, y, label);
4328 if (tmp)
4330 emit_insn (tmp);
4331 break;
4333 /* FALLTHRU */
4335 default:
4336 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4337 break;
4340 /* The noreturn predictor has been moved to the tree level. The rtl-level
4341 predictors estimate this branch about 20%, which isn't enough to get
4342 things moved out of line. Since this is the only extant case of adding
4343 a noreturn function at the rtl level, it doesn't seem worth doing ought
4344 except adding the prediction by hand. */
4345 tmp = get_last_insn ();
4346 if (JUMP_P (tmp))
4347 predict_insn_def (tmp, PRED_NORETURN, TAKEN);
4349 expand_expr_stmt (targetm.stack_protect_fail ());
4350 emit_label (label);
4353 /* Start the RTL for a new function, and set variables used for
4354 emitting RTL.
4355 SUBR is the FUNCTION_DECL node.
4356 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4357 the function's parameters, which must be run at any return statement. */
4359 void
4360 expand_function_start (tree subr)
4362 /* Make sure volatile mem refs aren't considered
4363 valid operands of arithmetic insns. */
4364 init_recog_no_volatile ();
4366 crtl->profile
4367 = (profile_flag
4368 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4370 crtl->limit_stack
4371 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4373 /* Make the label for return statements to jump to. Do not special
4374 case machines with special return instructions -- they will be
4375 handled later during jump, ifcvt, or epilogue creation. */
4376 return_label = gen_label_rtx ();
4378 /* Initialize rtx used to return the value. */
4379 /* Do this before assign_parms so that we copy the struct value address
4380 before any library calls that assign parms might generate. */
4382 /* Decide whether to return the value in memory or in a register. */
4383 if (aggregate_value_p (DECL_RESULT (subr), subr))
4385 /* Returning something that won't go in a register. */
4386 rtx value_address = 0;
4388 #ifdef PCC_STATIC_STRUCT_RETURN
4389 if (cfun->returns_pcc_struct)
4391 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4392 value_address = assemble_static_space (size);
4394 else
4395 #endif
4397 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
4398 /* Expect to be passed the address of a place to store the value.
4399 If it is passed as an argument, assign_parms will take care of
4400 it. */
4401 if (sv)
4403 value_address = gen_reg_rtx (Pmode);
4404 emit_move_insn (value_address, sv);
4407 if (value_address)
4409 rtx x = value_address;
4410 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4412 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4413 set_mem_attributes (x, DECL_RESULT (subr), 1);
4415 SET_DECL_RTL (DECL_RESULT (subr), x);
4418 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4419 /* If return mode is void, this decl rtl should not be used. */
4420 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
4421 else
4423 /* Compute the return values into a pseudo reg, which we will copy
4424 into the true return register after the cleanups are done. */
4425 tree return_type = TREE_TYPE (DECL_RESULT (subr));
4426 if (TYPE_MODE (return_type) != BLKmode
4427 && targetm.calls.return_in_msb (return_type))
4428 /* expand_function_end will insert the appropriate padding in
4429 this case. Use the return value's natural (unpadded) mode
4430 within the function proper. */
4431 SET_DECL_RTL (DECL_RESULT (subr),
4432 gen_reg_rtx (TYPE_MODE (return_type)));
4433 else
4435 /* In order to figure out what mode to use for the pseudo, we
4436 figure out what the mode of the eventual return register will
4437 actually be, and use that. */
4438 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
4440 /* Structures that are returned in registers are not
4441 aggregate_value_p, so we may see a PARALLEL or a REG. */
4442 if (REG_P (hard_reg))
4443 SET_DECL_RTL (DECL_RESULT (subr),
4444 gen_reg_rtx (GET_MODE (hard_reg)));
4445 else
4447 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4448 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4452 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4453 result to the real return register(s). */
4454 DECL_REGISTER (DECL_RESULT (subr)) = 1;
4457 /* Initialize rtx for parameters and local variables.
4458 In some cases this requires emitting insns. */
4459 assign_parms (subr);
4461 /* If function gets a static chain arg, store it. */
4462 if (cfun->static_chain_decl)
4464 tree parm = cfun->static_chain_decl;
4465 rtx local = gen_reg_rtx (Pmode);
4467 set_decl_incoming_rtl (parm, static_chain_incoming_rtx, false);
4468 SET_DECL_RTL (parm, local);
4469 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4471 emit_move_insn (local, static_chain_incoming_rtx);
4474 /* If the function receives a non-local goto, then store the
4475 bits we need to restore the frame pointer. */
4476 if (cfun->nonlocal_goto_save_area)
4478 tree t_save;
4479 rtx r_save;
4481 /* ??? We need to do this save early. Unfortunately here is
4482 before the frame variable gets declared. Help out... */
4483 tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
4484 if (!DECL_RTL_SET_P (var))
4485 expand_decl (var);
4487 t_save = build4 (ARRAY_REF, ptr_type_node,
4488 cfun->nonlocal_goto_save_area,
4489 integer_zero_node, NULL_TREE, NULL_TREE);
4490 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
4491 r_save = convert_memory_address (Pmode, r_save);
4493 emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ());
4494 update_nonlocal_goto_save_area ();
4497 /* The following was moved from init_function_start.
4498 The move is supposed to make sdb output more accurate. */
4499 /* Indicate the beginning of the function body,
4500 as opposed to parm setup. */
4501 emit_note (NOTE_INSN_FUNCTION_BEG);
4503 gcc_assert (NOTE_P (get_last_insn ()));
4505 parm_birth_insn = get_last_insn ();
4507 if (crtl->profile)
4509 #ifdef PROFILE_HOOK
4510 PROFILE_HOOK (current_function_funcdef_no);
4511 #endif
4514 /* After the display initializations is where the stack checking
4515 probe should go. */
4516 if(flag_stack_check)
4517 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
4519 /* Make sure there is a line number after the function entry setup code. */
4520 force_next_line_note ();
4523 /* Undo the effects of init_dummy_function_start. */
4524 void
4525 expand_dummy_function_end (void)
4527 gcc_assert (in_dummy_function);
4529 /* End any sequences that failed to be closed due to syntax errors. */
4530 while (in_sequence_p ())
4531 end_sequence ();
4533 /* Outside function body, can't compute type's actual size
4534 until next function's body starts. */
4536 free_after_parsing (cfun);
4537 free_after_compilation (cfun);
4538 pop_cfun ();
4539 in_dummy_function = false;
4542 /* Call DOIT for each hard register used as a return value from
4543 the current function. */
4545 void
4546 diddle_return_value (void (*doit) (rtx, void *), void *arg)
4548 rtx outgoing = crtl->return_rtx;
4550 if (! outgoing)
4551 return;
4553 if (REG_P (outgoing))
4554 (*doit) (outgoing, arg);
4555 else if (GET_CODE (outgoing) == PARALLEL)
4557 int i;
4559 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4561 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4563 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
4564 (*doit) (x, arg);
4569 static void
4570 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4572 emit_clobber (reg);
4575 void
4576 clobber_return_register (void)
4578 diddle_return_value (do_clobber_return_reg, NULL);
4580 /* In case we do use pseudo to return value, clobber it too. */
4581 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4583 tree decl_result = DECL_RESULT (current_function_decl);
4584 rtx decl_rtl = DECL_RTL (decl_result);
4585 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4587 do_clobber_return_reg (decl_rtl, NULL);
4592 static void
4593 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4595 emit_use (reg);
4598 static void
4599 use_return_register (void)
4601 diddle_return_value (do_use_return_reg, NULL);
4604 /* Possibly warn about unused parameters. */
4605 void
4606 do_warn_unused_parameter (tree fn)
4608 tree decl;
4610 for (decl = DECL_ARGUMENTS (fn);
4611 decl; decl = TREE_CHAIN (decl))
4612 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
4613 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl)
4614 && !TREE_NO_WARNING (decl))
4615 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
4618 static GTY(()) rtx initial_trampoline;
4620 /* Generate RTL for the end of the current function. */
4622 void
4623 expand_function_end (void)
4625 rtx clobber_after;
4627 /* If arg_pointer_save_area was referenced only from a nested
4628 function, we will not have initialized it yet. Do that now. */
4629 if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
4630 get_arg_pointer_save_area ();
4632 /* If we are doing generic stack checking and this function makes calls,
4633 do a stack probe at the start of the function to ensure we have enough
4634 space for another stack frame. */
4635 if (flag_stack_check == GENERIC_STACK_CHECK)
4637 rtx insn, seq;
4639 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4640 if (CALL_P (insn))
4642 start_sequence ();
4643 probe_stack_range (STACK_OLD_CHECK_PROTECT,
4644 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
4645 seq = get_insns ();
4646 end_sequence ();
4647 emit_insn_before (seq, stack_check_probe_note);
4648 break;
4652 /* End any sequences that failed to be closed due to syntax errors. */
4653 while (in_sequence_p ())
4654 end_sequence ();
4656 clear_pending_stack_adjust ();
4657 do_pending_stack_adjust ();
4659 /* Output a linenumber for the end of the function.
4660 SDB depends on this. */
4661 force_next_line_note ();
4662 set_curr_insn_source_location (input_location);
4664 /* Before the return label (if any), clobber the return
4665 registers so that they are not propagated live to the rest of
4666 the function. This can only happen with functions that drop
4667 through; if there had been a return statement, there would
4668 have either been a return rtx, or a jump to the return label.
4670 We delay actual code generation after the current_function_value_rtx
4671 is computed. */
4672 clobber_after = get_last_insn ();
4674 /* Output the label for the actual return from the function. */
4675 emit_label (return_label);
4677 if (USING_SJLJ_EXCEPTIONS)
4679 /* Let except.c know where it should emit the call to unregister
4680 the function context for sjlj exceptions. */
4681 if (flag_exceptions)
4682 sjlj_emit_function_exit_after (get_last_insn ());
4684 else
4686 /* We want to ensure that instructions that may trap are not
4687 moved into the epilogue by scheduling, because we don't
4688 always emit unwind information for the epilogue. */
4689 if (flag_non_call_exceptions)
4690 emit_insn (gen_blockage ());
4693 /* If this is an implementation of throw, do what's necessary to
4694 communicate between __builtin_eh_return and the epilogue. */
4695 expand_eh_return ();
4697 /* If scalar return value was computed in a pseudo-reg, or was a named
4698 return value that got dumped to the stack, copy that to the hard
4699 return register. */
4700 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4702 tree decl_result = DECL_RESULT (current_function_decl);
4703 rtx decl_rtl = DECL_RTL (decl_result);
4705 if (REG_P (decl_rtl)
4706 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
4707 : DECL_REGISTER (decl_result))
4709 rtx real_decl_rtl = crtl->return_rtx;
4711 /* This should be set in assign_parms. */
4712 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
4714 /* If this is a BLKmode structure being returned in registers,
4715 then use the mode computed in expand_return. Note that if
4716 decl_rtl is memory, then its mode may have been changed,
4717 but that crtl->return_rtx has not. */
4718 if (GET_MODE (real_decl_rtl) == BLKmode)
4719 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
4721 /* If a non-BLKmode return value should be padded at the least
4722 significant end of the register, shift it left by the appropriate
4723 amount. BLKmode results are handled using the group load/store
4724 machinery. */
4725 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
4726 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
4728 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
4729 REGNO (real_decl_rtl)),
4730 decl_rtl);
4731 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
4733 /* If a named return value dumped decl_return to memory, then
4734 we may need to re-do the PROMOTE_MODE signed/unsigned
4735 extension. */
4736 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
4738 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
4740 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
4741 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
4742 &unsignedp, 1);
4744 convert_move (real_decl_rtl, decl_rtl, unsignedp);
4746 else if (GET_CODE (real_decl_rtl) == PARALLEL)
4748 /* If expand_function_start has created a PARALLEL for decl_rtl,
4749 move the result to the real return registers. Otherwise, do
4750 a group load from decl_rtl for a named return. */
4751 if (GET_CODE (decl_rtl) == PARALLEL)
4752 emit_group_move (real_decl_rtl, decl_rtl);
4753 else
4754 emit_group_load (real_decl_rtl, decl_rtl,
4755 TREE_TYPE (decl_result),
4756 int_size_in_bytes (TREE_TYPE (decl_result)));
4758 /* In the case of complex integer modes smaller than a word, we'll
4759 need to generate some non-trivial bitfield insertions. Do that
4760 on a pseudo and not the hard register. */
4761 else if (GET_CODE (decl_rtl) == CONCAT
4762 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
4763 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
4765 int old_generating_concat_p;
4766 rtx tmp;
4768 old_generating_concat_p = generating_concat_p;
4769 generating_concat_p = 0;
4770 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
4771 generating_concat_p = old_generating_concat_p;
4773 emit_move_insn (tmp, decl_rtl);
4774 emit_move_insn (real_decl_rtl, tmp);
4776 else
4777 emit_move_insn (real_decl_rtl, decl_rtl);
4781 /* If returning a structure, arrange to return the address of the value
4782 in a place where debuggers expect to find it.
4784 If returning a structure PCC style,
4785 the caller also depends on this value.
4786 And cfun->returns_pcc_struct is not necessarily set. */
4787 if (cfun->returns_struct
4788 || cfun->returns_pcc_struct)
4790 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
4791 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
4792 rtx outgoing;
4794 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
4795 type = TREE_TYPE (type);
4796 else
4797 value_address = XEXP (value_address, 0);
4799 outgoing = targetm.calls.function_value (build_pointer_type (type),
4800 current_function_decl, true);
4802 /* Mark this as a function return value so integrate will delete the
4803 assignment and USE below when inlining this function. */
4804 REG_FUNCTION_VALUE_P (outgoing) = 1;
4806 /* The address may be ptr_mode and OUTGOING may be Pmode. */
4807 value_address = convert_memory_address (GET_MODE (outgoing),
4808 value_address);
4810 emit_move_insn (outgoing, value_address);
4812 /* Show return register used to hold result (in this case the address
4813 of the result. */
4814 crtl->return_rtx = outgoing;
4817 /* Emit the actual code to clobber return register. */
4819 rtx seq;
4821 start_sequence ();
4822 clobber_return_register ();
4823 expand_naked_return ();
4824 seq = get_insns ();
4825 end_sequence ();
4827 emit_insn_after (seq, clobber_after);
4830 /* Output the label for the naked return from the function. */
4831 emit_label (naked_return_label);
4833 /* @@@ This is a kludge. We want to ensure that instructions that
4834 may trap are not moved into the epilogue by scheduling, because
4835 we don't always emit unwind information for the epilogue. */
4836 if (! USING_SJLJ_EXCEPTIONS && flag_non_call_exceptions)
4837 emit_insn (gen_blockage ());
4839 /* If stack protection is enabled for this function, check the guard. */
4840 if (crtl->stack_protect_guard)
4841 stack_protect_epilogue ();
4843 /* If we had calls to alloca, and this machine needs
4844 an accurate stack pointer to exit the function,
4845 insert some code to save and restore the stack pointer. */
4846 if (! EXIT_IGNORE_STACK
4847 && cfun->calls_alloca)
4849 rtx tem = 0;
4851 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
4852 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
4855 /* ??? This should no longer be necessary since stupid is no longer with
4856 us, but there are some parts of the compiler (eg reload_combine, and
4857 sh mach_dep_reorg) that still try and compute their own lifetime info
4858 instead of using the general framework. */
4859 use_return_register ();
4863 get_arg_pointer_save_area (void)
4865 rtx ret = arg_pointer_save_area;
4867 if (! ret)
4869 ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
4870 arg_pointer_save_area = ret;
4873 if (! crtl->arg_pointer_save_area_init)
4875 rtx seq;
4877 /* Save the arg pointer at the beginning of the function. The
4878 generated stack slot may not be a valid memory address, so we
4879 have to check it and fix it if necessary. */
4880 start_sequence ();
4881 emit_move_insn (validize_mem (ret),
4882 crtl->args.internal_arg_pointer);
4883 seq = get_insns ();
4884 end_sequence ();
4886 push_topmost_sequence ();
4887 emit_insn_after (seq, entry_of_function ());
4888 pop_topmost_sequence ();
4891 return ret;
4894 /* Extend a vector that records the INSN_UIDs of INSNS
4895 (a list of one or more insns). */
4897 static void
4898 record_insns (rtx insns, VEC(int,heap) **vecp)
4900 rtx tmp;
4902 for (tmp = insns; tmp != NULL_RTX; tmp = NEXT_INSN (tmp))
4903 VEC_safe_push (int, heap, *vecp, INSN_UID (tmp));
4906 /* Set the locator of the insn chain starting at INSN to LOC. */
4907 static void
4908 set_insn_locators (rtx insn, int loc)
4910 while (insn != NULL_RTX)
4912 if (INSN_P (insn))
4913 INSN_LOCATOR (insn) = loc;
4914 insn = NEXT_INSN (insn);
4918 /* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
4919 be running after reorg, SEQUENCE rtl is possible. */
4921 static int
4922 contains (const_rtx insn, VEC(int,heap) **vec)
4924 int i, j;
4926 if (NONJUMP_INSN_P (insn)
4927 && GET_CODE (PATTERN (insn)) == SEQUENCE)
4929 int count = 0;
4930 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
4931 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4932 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i))
4933 == VEC_index (int, *vec, j))
4934 count++;
4935 return count;
4937 else
4939 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4940 if (INSN_UID (insn) == VEC_index (int, *vec, j))
4941 return 1;
4943 return 0;
4947 prologue_epilogue_contains (const_rtx insn)
4949 if (contains (insn, &prologue))
4950 return 1;
4951 if (contains (insn, &epilogue))
4952 return 1;
4953 return 0;
4957 sibcall_epilogue_contains (const_rtx insn)
4959 if (sibcall_epilogue)
4960 return contains (insn, &sibcall_epilogue);
4961 return 0;
4964 #ifdef HAVE_return
4965 /* Insert gen_return at the end of block BB. This also means updating
4966 block_for_insn appropriately. */
4968 static void
4969 emit_return_into_block (basic_block bb)
4971 emit_jump_insn_after (gen_return (), BB_END (bb));
4973 #endif /* HAVE_return */
4975 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
4976 this into place with notes indicating where the prologue ends and where
4977 the epilogue begins. Update the basic block information when possible. */
4979 static void
4980 thread_prologue_and_epilogue_insns (void)
4982 int inserted = 0;
4983 edge e;
4984 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
4985 rtx seq;
4986 #endif
4987 #if defined (HAVE_epilogue) || defined(HAVE_return)
4988 rtx epilogue_end = NULL_RTX;
4989 #endif
4990 edge_iterator ei;
4992 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
4993 #ifdef HAVE_prologue
4994 if (HAVE_prologue)
4996 start_sequence ();
4997 seq = gen_prologue ();
4998 emit_insn (seq);
5000 /* Insert an explicit USE for the frame pointer
5001 if the profiling is on and the frame pointer is required. */
5002 if (crtl->profile && frame_pointer_needed)
5003 emit_use (hard_frame_pointer_rtx);
5005 /* Retain a map of the prologue insns. */
5006 record_insns (seq, &prologue);
5007 emit_note (NOTE_INSN_PROLOGUE_END);
5009 #ifndef PROFILE_BEFORE_PROLOGUE
5010 /* Ensure that instructions are not moved into the prologue when
5011 profiling is on. The call to the profiling routine can be
5012 emitted within the live range of a call-clobbered register. */
5013 if (crtl->profile)
5014 emit_insn (gen_blockage ());
5015 #endif
5017 seq = get_insns ();
5018 end_sequence ();
5019 set_insn_locators (seq, prologue_locator);
5021 /* Can't deal with multiple successors of the entry block
5022 at the moment. Function should always have at least one
5023 entry point. */
5024 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR));
5026 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
5027 inserted = 1;
5029 #endif
5031 /* If the exit block has no non-fake predecessors, we don't need
5032 an epilogue. */
5033 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5034 if ((e->flags & EDGE_FAKE) == 0)
5035 break;
5036 if (e == NULL)
5037 goto epilogue_done;
5039 rtl_profile_for_bb (EXIT_BLOCK_PTR);
5040 #ifdef HAVE_return
5041 if (optimize && HAVE_return)
5043 /* If we're allowed to generate a simple return instruction,
5044 then by definition we don't need a full epilogue. Examine
5045 the block that falls through to EXIT. If it does not
5046 contain any code, examine its predecessors and try to
5047 emit (conditional) return instructions. */
5049 basic_block last;
5050 rtx label;
5052 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5053 if (e->flags & EDGE_FALLTHRU)
5054 break;
5055 if (e == NULL)
5056 goto epilogue_done;
5057 last = e->src;
5059 /* Verify that there are no active instructions in the last block. */
5060 label = BB_END (last);
5061 while (label && !LABEL_P (label))
5063 if (active_insn_p (label))
5064 break;
5065 label = PREV_INSN (label);
5068 if (BB_HEAD (last) == label && LABEL_P (label))
5070 edge_iterator ei2;
5072 for (ei2 = ei_start (last->preds); (e = ei_safe_edge (ei2)); )
5074 basic_block bb = e->src;
5075 rtx jump;
5077 if (bb == ENTRY_BLOCK_PTR)
5079 ei_next (&ei2);
5080 continue;
5083 jump = BB_END (bb);
5084 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5086 ei_next (&ei2);
5087 continue;
5090 /* If we have an unconditional jump, we can replace that
5091 with a simple return instruction. */
5092 if (simplejump_p (jump))
5094 emit_return_into_block (bb);
5095 delete_insn (jump);
5098 /* If we have a conditional jump, we can try to replace
5099 that with a conditional return instruction. */
5100 else if (condjump_p (jump))
5102 if (! redirect_jump (jump, 0, 0))
5104 ei_next (&ei2);
5105 continue;
5108 /* If this block has only one successor, it both jumps
5109 and falls through to the fallthru block, so we can't
5110 delete the edge. */
5111 if (single_succ_p (bb))
5113 ei_next (&ei2);
5114 continue;
5117 else
5119 ei_next (&ei2);
5120 continue;
5123 /* Fix up the CFG for the successful change we just made. */
5124 redirect_edge_succ (e, EXIT_BLOCK_PTR);
5127 /* Emit a return insn for the exit fallthru block. Whether
5128 this is still reachable will be determined later. */
5130 emit_barrier_after (BB_END (last));
5131 emit_return_into_block (last);
5132 epilogue_end = BB_END (last);
5133 single_succ_edge (last)->flags &= ~EDGE_FALLTHRU;
5134 goto epilogue_done;
5137 #endif
5138 /* Find the edge that falls through to EXIT. Other edges may exist
5139 due to RETURN instructions, but those don't need epilogues.
5140 There really shouldn't be a mixture -- either all should have
5141 been converted or none, however... */
5143 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5144 if (e->flags & EDGE_FALLTHRU)
5145 break;
5146 if (e == NULL)
5147 goto epilogue_done;
5149 #ifdef HAVE_epilogue
5150 if (HAVE_epilogue)
5152 start_sequence ();
5153 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
5154 seq = gen_epilogue ();
5155 emit_jump_insn (seq);
5157 /* Retain a map of the epilogue insns. */
5158 record_insns (seq, &epilogue);
5159 set_insn_locators (seq, epilogue_locator);
5161 seq = get_insns ();
5162 end_sequence ();
5164 insert_insn_on_edge (seq, e);
5165 inserted = 1;
5167 else
5168 #endif
5170 basic_block cur_bb;
5172 if (! next_active_insn (BB_END (e->src)))
5173 goto epilogue_done;
5174 /* We have a fall-through edge to the exit block, the source is not
5175 at the end of the function, and there will be an assembler epilogue
5176 at the end of the function.
5177 We can't use force_nonfallthru here, because that would try to
5178 use return. Inserting a jump 'by hand' is extremely messy, so
5179 we take advantage of cfg_layout_finalize using
5180 fixup_fallthru_exit_predecessor. */
5181 cfg_layout_initialize (0);
5182 FOR_EACH_BB (cur_bb)
5183 if (cur_bb->index >= NUM_FIXED_BLOCKS
5184 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
5185 cur_bb->aux = cur_bb->next_bb;
5186 cfg_layout_finalize ();
5188 epilogue_done:
5189 default_rtl_profile ();
5191 if (inserted)
5193 commit_edge_insertions ();
5195 /* The epilogue insns we inserted may cause the exit edge to no longer
5196 be fallthru. */
5197 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5199 if (((e->flags & EDGE_FALLTHRU) != 0)
5200 && returnjump_p (BB_END (e->src)))
5201 e->flags &= ~EDGE_FALLTHRU;
5205 #ifdef HAVE_sibcall_epilogue
5206 /* Emit sibling epilogues before any sibling call sites. */
5207 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
5209 basic_block bb = e->src;
5210 rtx insn = BB_END (bb);
5212 if (!CALL_P (insn)
5213 || ! SIBLING_CALL_P (insn))
5215 ei_next (&ei);
5216 continue;
5219 start_sequence ();
5220 emit_insn (gen_sibcall_epilogue ());
5221 seq = get_insns ();
5222 end_sequence ();
5224 /* Retain a map of the epilogue insns. Used in life analysis to
5225 avoid getting rid of sibcall epilogue insns. Do this before we
5226 actually emit the sequence. */
5227 record_insns (seq, &sibcall_epilogue);
5228 set_insn_locators (seq, epilogue_locator);
5230 emit_insn_before (seq, insn);
5231 ei_next (&ei);
5233 #endif
5235 #ifdef HAVE_epilogue
5236 if (epilogue_end)
5238 rtx insn, next;
5240 /* Similarly, move any line notes that appear after the epilogue.
5241 There is no need, however, to be quite so anal about the existence
5242 of such a note. Also possibly move
5243 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5244 info generation. */
5245 for (insn = epilogue_end; insn; insn = next)
5247 next = NEXT_INSN (insn);
5248 if (NOTE_P (insn)
5249 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
5250 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
5253 #endif
5255 /* Threading the prologue and epilogue changes the artificial refs
5256 in the entry and exit blocks. */
5257 epilogue_completed = 1;
5258 df_update_entry_exit_and_calls ();
5261 /* Reposition the prologue-end and epilogue-begin notes after instruction
5262 scheduling and delayed branch scheduling. */
5264 void
5265 reposition_prologue_and_epilogue_notes (void)
5267 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5268 rtx insn, last, note;
5269 int len;
5271 if ((len = VEC_length (int, prologue)) > 0)
5273 last = 0, note = 0;
5275 /* Scan from the beginning until we reach the last prologue insn.
5276 We apparently can't depend on basic_block_{head,end} after
5277 reorg has run. */
5278 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5280 if (NOTE_P (insn))
5282 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
5283 note = insn;
5285 else if (contains (insn, &prologue))
5287 last = insn;
5288 if (--len == 0)
5289 break;
5293 if (last)
5295 /* Find the prologue-end note if we haven't already, and
5296 move it to just after the last prologue insn. */
5297 if (note == 0)
5299 for (note = last; (note = NEXT_INSN (note));)
5300 if (NOTE_P (note)
5301 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
5302 break;
5305 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
5306 if (LABEL_P (last))
5307 last = NEXT_INSN (last);
5308 reorder_insns (note, note, last);
5312 if ((len = VEC_length (int, epilogue)) > 0)
5314 last = 0, note = 0;
5316 /* Scan from the end until we reach the first epilogue insn.
5317 We apparently can't depend on basic_block_{head,end} after
5318 reorg has run. */
5319 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
5321 if (NOTE_P (insn))
5323 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
5324 note = insn;
5326 else if (contains (insn, &epilogue))
5328 last = insn;
5329 if (--len == 0)
5330 break;
5334 if (last)
5336 /* Find the epilogue-begin note if we haven't already, and
5337 move it to just before the first epilogue insn. */
5338 if (note == 0)
5340 for (note = insn; (note = PREV_INSN (note));)
5341 if (NOTE_P (note)
5342 && NOTE_KIND (note) == NOTE_INSN_EPILOGUE_BEG)
5343 break;
5346 if (PREV_INSN (last) != note)
5347 reorder_insns (note, note, PREV_INSN (last));
5350 #endif /* HAVE_prologue or HAVE_epilogue */
5353 /* Returns the name of the current function. */
5354 const char *
5355 current_function_name (void)
5357 return lang_hooks.decl_printable_name (cfun->decl, 2);
5360 /* Returns the raw (mangled) name of the current function. */
5361 const char *
5362 current_function_assembler_name (void)
5364 return IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (cfun->decl));
5368 static unsigned int
5369 rest_of_handle_check_leaf_regs (void)
5371 #ifdef LEAF_REGISTERS
5372 current_function_uses_only_leaf_regs
5373 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
5374 #endif
5375 return 0;
5378 /* Insert a TYPE into the used types hash table of CFUN. */
5380 static void
5381 used_types_insert_helper (tree type, struct function *func)
5383 if (type != NULL && func != NULL)
5385 void **slot;
5387 if (func->used_types_hash == NULL)
5388 func->used_types_hash = htab_create_ggc (37, htab_hash_pointer,
5389 htab_eq_pointer, NULL);
5390 slot = htab_find_slot (func->used_types_hash, type, INSERT);
5391 if (*slot == NULL)
5392 *slot = type;
5396 /* Given a type, insert it into the used hash table in cfun. */
5397 void
5398 used_types_insert (tree t)
5400 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
5401 t = TREE_TYPE (t);
5402 t = TYPE_MAIN_VARIANT (t);
5403 if (debug_info_level > DINFO_LEVEL_NONE)
5405 if (cfun)
5406 used_types_insert_helper (t, cfun);
5407 else
5408 /* So this might be a type referenced by a global variable.
5409 Record that type so that we can later decide to emit its debug
5410 information. */
5411 types_used_by_cur_var_decl =
5412 tree_cons (t, NULL, types_used_by_cur_var_decl);
5417 /* Helper to Hash a struct types_used_by_vars_entry. */
5419 static hashval_t
5420 hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry)
5422 gcc_assert (entry && entry->var_decl && entry->type);
5424 return iterative_hash_object (entry->type,
5425 iterative_hash_object (entry->var_decl, 0));
5428 /* Hash function of the types_used_by_vars_entry hash table. */
5430 hashval_t
5431 types_used_by_vars_do_hash (const void *x)
5433 const struct types_used_by_vars_entry *entry =
5434 (const struct types_used_by_vars_entry *) x;
5436 return hash_types_used_by_vars_entry (entry);
5439 /*Equality function of the types_used_by_vars_entry hash table. */
5442 types_used_by_vars_eq (const void *x1, const void *x2)
5444 const struct types_used_by_vars_entry *e1 =
5445 (const struct types_used_by_vars_entry *) x1;
5446 const struct types_used_by_vars_entry *e2 =
5447 (const struct types_used_by_vars_entry *)x2;
5449 return (e1->var_decl == e2->var_decl && e1->type == e2->type);
5452 /* Inserts an entry into the types_used_by_vars_hash hash table. */
5454 void
5455 types_used_by_var_decl_insert (tree type, tree var_decl)
5457 if (type != NULL && var_decl != NULL)
5459 void **slot;
5460 struct types_used_by_vars_entry e;
5461 e.var_decl = var_decl;
5462 e.type = type;
5463 if (types_used_by_vars_hash == NULL)
5464 types_used_by_vars_hash =
5465 htab_create_ggc (37, types_used_by_vars_do_hash,
5466 types_used_by_vars_eq, NULL);
5467 slot = htab_find_slot_with_hash (types_used_by_vars_hash, &e,
5468 hash_types_used_by_vars_entry (&e), INSERT);
5469 if (*slot == NULL)
5471 struct types_used_by_vars_entry *entry;
5472 entry = (struct types_used_by_vars_entry*) ggc_alloc
5473 (sizeof (struct types_used_by_vars_entry));
5474 entry->type = type;
5475 entry->var_decl = var_decl;
5476 *slot = entry;
5481 struct rtl_opt_pass pass_leaf_regs =
5484 RTL_PASS,
5485 NULL, /* name */
5486 NULL, /* gate */
5487 rest_of_handle_check_leaf_regs, /* execute */
5488 NULL, /* sub */
5489 NULL, /* next */
5490 0, /* static_pass_number */
5491 0, /* tv_id */
5492 0, /* properties_required */
5493 0, /* properties_provided */
5494 0, /* properties_destroyed */
5495 0, /* todo_flags_start */
5496 0 /* todo_flags_finish */
5500 static unsigned int
5501 rest_of_handle_thread_prologue_and_epilogue (void)
5503 if (optimize)
5504 cleanup_cfg (CLEANUP_EXPENSIVE);
5505 /* On some machines, the prologue and epilogue code, or parts thereof,
5506 can be represented as RTL. Doing so lets us schedule insns between
5507 it and the rest of the code and also allows delayed branch
5508 scheduling to operate in the epilogue. */
5510 thread_prologue_and_epilogue_insns ();
5511 return 0;
5514 struct rtl_opt_pass pass_thread_prologue_and_epilogue =
5517 RTL_PASS,
5518 "pro_and_epilogue", /* name */
5519 NULL, /* gate */
5520 rest_of_handle_thread_prologue_and_epilogue, /* execute */
5521 NULL, /* sub */
5522 NULL, /* next */
5523 0, /* static_pass_number */
5524 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
5525 0, /* properties_required */
5526 0, /* properties_provided */
5527 0, /* properties_destroyed */
5528 TODO_verify_flow, /* todo_flags_start */
5529 TODO_dump_func |
5530 TODO_df_verify |
5531 TODO_df_finish | TODO_verify_rtl_sharing |
5532 TODO_ggc_collect /* todo_flags_finish */
5537 /* This mini-pass fixes fall-out from SSA in asm statements that have
5538 in-out constraints. Say you start with
5540 orig = inout;
5541 asm ("": "+mr" (inout));
5542 use (orig);
5544 which is transformed very early to use explicit output and match operands:
5546 orig = inout;
5547 asm ("": "=mr" (inout) : "0" (inout));
5548 use (orig);
5550 Or, after SSA and copyprop,
5552 asm ("": "=mr" (inout_2) : "0" (inout_1));
5553 use (inout_1);
5555 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
5556 they represent two separate values, so they will get different pseudo
5557 registers during expansion. Then, since the two operands need to match
5558 per the constraints, but use different pseudo registers, reload can
5559 only register a reload for these operands. But reloads can only be
5560 satisfied by hardregs, not by memory, so we need a register for this
5561 reload, just because we are presented with non-matching operands.
5562 So, even though we allow memory for this operand, no memory can be
5563 used for it, just because the two operands don't match. This can
5564 cause reload failures on register-starved targets.
5566 So it's a symptom of reload not being able to use memory for reloads
5567 or, alternatively it's also a symptom of both operands not coming into
5568 reload as matching (in which case the pseudo could go to memory just
5569 fine, as the alternative allows it, and no reload would be necessary).
5570 We fix the latter problem here, by transforming
5572 asm ("": "=mr" (inout_2) : "0" (inout_1));
5574 back to
5576 inout_2 = inout_1;
5577 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
5579 static void
5580 match_asm_constraints_1 (rtx insn, rtx *p_sets, int noutputs)
5582 int i;
5583 bool changed = false;
5584 rtx op = SET_SRC (p_sets[0]);
5585 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
5586 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
5587 bool *output_matched = XALLOCAVEC (bool, noutputs);
5589 memset (output_matched, 0, noutputs * sizeof (bool));
5590 for (i = 0; i < ninputs; i++)
5592 rtx input, output, insns;
5593 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
5594 char *end;
5595 int match, j;
5597 if (*constraint == '%')
5598 constraint++;
5600 match = strtoul (constraint, &end, 10);
5601 if (end == constraint)
5602 continue;
5604 gcc_assert (match < noutputs);
5605 output = SET_DEST (p_sets[match]);
5606 input = RTVEC_ELT (inputs, i);
5607 /* Only do the transformation for pseudos. */
5608 if (! REG_P (output)
5609 || rtx_equal_p (output, input)
5610 || (GET_MODE (input) != VOIDmode
5611 && GET_MODE (input) != GET_MODE (output)))
5612 continue;
5614 /* We can't do anything if the output is also used as input,
5615 as we're going to overwrite it. */
5616 for (j = 0; j < ninputs; j++)
5617 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
5618 break;
5619 if (j != ninputs)
5620 continue;
5622 /* Avoid changing the same input several times. For
5623 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
5624 only change in once (to out1), rather than changing it
5625 first to out1 and afterwards to out2. */
5626 if (i > 0)
5628 for (j = 0; j < noutputs; j++)
5629 if (output_matched[j] && input == SET_DEST (p_sets[j]))
5630 break;
5631 if (j != noutputs)
5632 continue;
5634 output_matched[match] = true;
5636 start_sequence ();
5637 emit_move_insn (output, input);
5638 insns = get_insns ();
5639 end_sequence ();
5640 emit_insn_before (insns, insn);
5642 /* Now replace all mentions of the input with output. We can't
5643 just replace the occurrence in inputs[i], as the register might
5644 also be used in some other input (or even in an address of an
5645 output), which would mean possibly increasing the number of
5646 inputs by one (namely 'output' in addition), which might pose
5647 a too complicated problem for reload to solve. E.g. this situation:
5649 asm ("" : "=r" (output), "=m" (input) : "0" (input))
5651 Here 'input' is used in two occurrences as input (once for the
5652 input operand, once for the address in the second output operand).
5653 If we would replace only the occurrence of the input operand (to
5654 make the matching) we would be left with this:
5656 output = input
5657 asm ("" : "=r" (output), "=m" (input) : "0" (output))
5659 Now we suddenly have two different input values (containing the same
5660 value, but different pseudos) where we formerly had only one.
5661 With more complicated asms this might lead to reload failures
5662 which wouldn't have happen without this pass. So, iterate over
5663 all operands and replace all occurrences of the register used. */
5664 for (j = 0; j < noutputs; j++)
5665 if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
5666 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
5667 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
5668 input, output);
5669 for (j = 0; j < ninputs; j++)
5670 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
5671 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
5672 input, output);
5674 changed = true;
5677 if (changed)
5678 df_insn_rescan (insn);
5681 static unsigned
5682 rest_of_match_asm_constraints (void)
5684 basic_block bb;
5685 rtx insn, pat, *p_sets;
5686 int noutputs;
5688 if (!crtl->has_asm_statement)
5689 return 0;
5691 df_set_flags (DF_DEFER_INSN_RESCAN);
5692 FOR_EACH_BB (bb)
5694 FOR_BB_INSNS (bb, insn)
5696 if (!INSN_P (insn))
5697 continue;
5699 pat = PATTERN (insn);
5700 if (GET_CODE (pat) == PARALLEL)
5701 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
5702 else if (GET_CODE (pat) == SET)
5703 p_sets = &PATTERN (insn), noutputs = 1;
5704 else
5705 continue;
5707 if (GET_CODE (*p_sets) == SET
5708 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
5709 match_asm_constraints_1 (insn, p_sets, noutputs);
5713 return TODO_df_finish;
5716 struct rtl_opt_pass pass_match_asm_constraints =
5719 RTL_PASS,
5720 "asmcons", /* name */
5721 NULL, /* gate */
5722 rest_of_match_asm_constraints, /* execute */
5723 NULL, /* sub */
5724 NULL, /* next */
5725 0, /* static_pass_number */
5726 0, /* tv_id */
5727 0, /* properties_required */
5728 0, /* properties_provided */
5729 0, /* properties_destroyed */
5730 0, /* todo_flags_start */
5731 TODO_dump_func /* todo_flags_finish */
5736 #include "gt-function.h"