Pass name cleanups
[official-gcc.git] / gcc / function.c
blobc0350be5b177dc1db9dc693b1901da1a6f7cc8ff
1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
4 2010, 2011 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register. */
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
40 #include "rtl-error.h"
41 #include "tree.h"
42 #include "flags.h"
43 #include "except.h"
44 #include "function.h"
45 #include "expr.h"
46 #include "optabs.h"
47 #include "libfuncs.h"
48 #include "regs.h"
49 #include "hard-reg-set.h"
50 #include "insn-config.h"
51 #include "recog.h"
52 #include "output.h"
53 #include "basic-block.h"
54 #include "hashtab.h"
55 #include "ggc.h"
56 #include "tm_p.h"
57 #include "integrate.h"
58 #include "langhooks.h"
59 #include "target.h"
60 #include "cfglayout.h"
61 #include "gimple.h"
62 #include "tree-pass.h"
63 #include "predict.h"
64 #include "df.h"
65 #include "timevar.h"
66 #include "vecprim.h"
68 /* So we can assign to cfun in this file. */
69 #undef cfun
71 #ifndef STACK_ALIGNMENT_NEEDED
72 #define STACK_ALIGNMENT_NEEDED 1
73 #endif
75 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
77 /* Some systems use __main in a way incompatible with its use in gcc, in these
78 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
79 give the same symbol without quotes for an alternative entry point. You
80 must define both, or neither. */
81 #ifndef NAME__MAIN
82 #define NAME__MAIN "__main"
83 #endif
85 /* Round a value to the lowest integer less than it that is a multiple of
86 the required alignment. Avoid using division in case the value is
87 negative. Assume the alignment is a power of two. */
88 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
90 /* Similar, but round to the next highest integer that meets the
91 alignment. */
92 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
94 /* Nonzero if function being compiled doesn't contain any calls
95 (ignoring the prologue and epilogue). This is set prior to
96 local register allocation and is valid for the remaining
97 compiler passes. */
98 int current_function_is_leaf;
100 /* Nonzero if function being compiled doesn't modify the stack pointer
101 (ignoring the prologue and epilogue). This is only valid after
102 pass_stack_ptr_mod has run. */
103 int current_function_sp_is_unchanging;
105 /* Nonzero if the function being compiled is a leaf function which only
106 uses leaf registers. This is valid after reload (specifically after
107 sched2) and is useful only if the port defines LEAF_REGISTERS. */
108 int current_function_uses_only_leaf_regs;
110 /* Nonzero once virtual register instantiation has been done.
111 assign_stack_local uses frame_pointer_rtx when this is nonzero.
112 calls.c:emit_library_call_value_1 uses it to set up
113 post-instantiation libcalls. */
114 int virtuals_instantiated;
116 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
117 static GTY(()) int funcdef_no;
119 /* These variables hold pointers to functions to create and destroy
120 target specific, per-function data structures. */
121 struct machine_function * (*init_machine_status) (void);
123 /* The currently compiled function. */
124 struct function *cfun = 0;
126 /* These hashes record the prologue and epilogue insns. */
127 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
128 htab_t prologue_insn_hash;
129 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
130 htab_t epilogue_insn_hash;
133 htab_t types_used_by_vars_hash = NULL;
134 VEC(tree,gc) *types_used_by_cur_var_decl;
136 /* Forward declarations. */
138 static struct temp_slot *find_temp_slot_from_address (rtx);
139 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
140 static void pad_below (struct args_size *, enum machine_mode, tree);
141 static void reorder_blocks_1 (rtx, tree, VEC(tree,heap) **);
142 static int all_blocks (tree, tree *);
143 static tree *get_block_vector (tree, int *);
144 extern tree debug_find_var_in_block_tree (tree, tree);
145 /* We always define `record_insns' even if it's not used so that we
146 can always export `prologue_epilogue_contains'. */
147 static void record_insns (rtx, rtx, htab_t *) ATTRIBUTE_UNUSED;
148 static bool contains (const_rtx, htab_t);
149 #ifdef HAVE_return
150 static void emit_return_into_block (basic_block);
151 #endif
152 static void prepare_function_start (void);
153 static void do_clobber_return_reg (rtx, void *);
154 static void do_use_return_reg (rtx, void *);
155 static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
157 /* Stack of nested functions. */
158 /* Keep track of the cfun stack. */
160 typedef struct function *function_p;
162 DEF_VEC_P(function_p);
163 DEF_VEC_ALLOC_P(function_p,heap);
164 static VEC(function_p,heap) *function_context_stack;
166 /* Save the current context for compilation of a nested function.
167 This is called from language-specific code. */
169 void
170 push_function_context (void)
172 if (cfun == 0)
173 allocate_struct_function (NULL, false);
175 VEC_safe_push (function_p, heap, function_context_stack, cfun);
176 set_cfun (NULL);
179 /* Restore the last saved context, at the end of a nested function.
180 This function is called from language-specific code. */
182 void
183 pop_function_context (void)
185 struct function *p = VEC_pop (function_p, function_context_stack);
186 set_cfun (p);
187 current_function_decl = p->decl;
189 /* Reset variables that have known state during rtx generation. */
190 virtuals_instantiated = 0;
191 generating_concat_p = 1;
194 /* Clear out all parts of the state in F that can safely be discarded
195 after the function has been parsed, but not compiled, to let
196 garbage collection reclaim the memory. */
198 void
199 free_after_parsing (struct function *f)
201 f->language = 0;
204 /* Clear out all parts of the state in F that can safely be discarded
205 after the function has been compiled, to let garbage collection
206 reclaim the memory. */
208 void
209 free_after_compilation (struct function *f)
211 prologue_insn_hash = NULL;
212 epilogue_insn_hash = NULL;
214 free (crtl->emit.regno_pointer_align);
216 memset (crtl, 0, sizeof (struct rtl_data));
217 f->eh = NULL;
218 f->machine = NULL;
219 f->cfg = NULL;
221 regno_reg_rtx = NULL;
222 insn_locators_free ();
225 /* Return size needed for stack frame based on slots so far allocated.
226 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
227 the caller may have to do that. */
229 HOST_WIDE_INT
230 get_frame_size (void)
232 if (FRAME_GROWS_DOWNWARD)
233 return -frame_offset;
234 else
235 return frame_offset;
238 /* Issue an error message and return TRUE if frame OFFSET overflows in
239 the signed target pointer arithmetics for function FUNC. Otherwise
240 return FALSE. */
242 bool
243 frame_offset_overflow (HOST_WIDE_INT offset, tree func)
245 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
247 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
248 /* Leave room for the fixed part of the frame. */
249 - 64 * UNITS_PER_WORD)
251 error_at (DECL_SOURCE_LOCATION (func),
252 "total size of local objects too large");
253 return TRUE;
256 return FALSE;
259 /* Return stack slot alignment in bits for TYPE and MODE. */
261 static unsigned int
262 get_stack_local_alignment (tree type, enum machine_mode mode)
264 unsigned int alignment;
266 if (mode == BLKmode)
267 alignment = BIGGEST_ALIGNMENT;
268 else
269 alignment = GET_MODE_ALIGNMENT (mode);
271 /* Allow the frond-end to (possibly) increase the alignment of this
272 stack slot. */
273 if (! type)
274 type = lang_hooks.types.type_for_mode (mode, 0);
276 return STACK_SLOT_ALIGNMENT (type, mode, alignment);
279 /* Determine whether it is possible to fit a stack slot of size SIZE and
280 alignment ALIGNMENT into an area in the stack frame that starts at
281 frame offset START and has a length of LENGTH. If so, store the frame
282 offset to be used for the stack slot in *POFFSET and return true;
283 return false otherwise. This function will extend the frame size when
284 given a start/length pair that lies at the end of the frame. */
286 static bool
287 try_fit_stack_local (HOST_WIDE_INT start, HOST_WIDE_INT length,
288 HOST_WIDE_INT size, unsigned int alignment,
289 HOST_WIDE_INT *poffset)
291 HOST_WIDE_INT this_frame_offset;
292 int frame_off, frame_alignment, frame_phase;
294 /* Calculate how many bytes the start of local variables is off from
295 stack alignment. */
296 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
297 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
298 frame_phase = frame_off ? frame_alignment - frame_off : 0;
300 /* Round the frame offset to the specified alignment. */
302 /* We must be careful here, since FRAME_OFFSET might be negative and
303 division with a negative dividend isn't as well defined as we might
304 like. So we instead assume that ALIGNMENT is a power of two and
305 use logical operations which are unambiguous. */
306 if (FRAME_GROWS_DOWNWARD)
307 this_frame_offset
308 = (FLOOR_ROUND (start + length - size - frame_phase,
309 (unsigned HOST_WIDE_INT) alignment)
310 + frame_phase);
311 else
312 this_frame_offset
313 = (CEIL_ROUND (start - frame_phase,
314 (unsigned HOST_WIDE_INT) alignment)
315 + frame_phase);
317 /* See if it fits. If this space is at the edge of the frame,
318 consider extending the frame to make it fit. Our caller relies on
319 this when allocating a new slot. */
320 if (frame_offset == start && this_frame_offset < frame_offset)
321 frame_offset = this_frame_offset;
322 else if (this_frame_offset < start)
323 return false;
324 else if (start + length == frame_offset
325 && this_frame_offset + size > start + length)
326 frame_offset = this_frame_offset + size;
327 else if (this_frame_offset + size > start + length)
328 return false;
330 *poffset = this_frame_offset;
331 return true;
334 /* Create a new frame_space structure describing free space in the stack
335 frame beginning at START and ending at END, and chain it into the
336 function's frame_space_list. */
338 static void
339 add_frame_space (HOST_WIDE_INT start, HOST_WIDE_INT end)
341 struct frame_space *space = ggc_alloc_frame_space ();
342 space->next = crtl->frame_space_list;
343 crtl->frame_space_list = space;
344 space->start = start;
345 space->length = end - start;
348 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
349 with machine mode MODE.
351 ALIGN controls the amount of alignment for the address of the slot:
352 0 means according to MODE,
353 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
354 -2 means use BITS_PER_UNIT,
355 positive specifies alignment boundary in bits.
357 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
358 alignment and ASLK_RECORD_PAD bit set if we should remember
359 extra space we allocated for alignment purposes. When we are
360 called from assign_stack_temp_for_type, it is not set so we don't
361 track the same stack slot in two independent lists.
363 We do not round to stack_boundary here. */
366 assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size,
367 int align, int kind)
369 rtx x, addr;
370 int bigend_correction = 0;
371 HOST_WIDE_INT slot_offset = 0, old_frame_offset;
372 unsigned int alignment, alignment_in_bits;
374 if (align == 0)
376 alignment = get_stack_local_alignment (NULL, mode);
377 alignment /= BITS_PER_UNIT;
379 else if (align == -1)
381 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
382 size = CEIL_ROUND (size, alignment);
384 else if (align == -2)
385 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
386 else
387 alignment = align / BITS_PER_UNIT;
389 alignment_in_bits = alignment * BITS_PER_UNIT;
391 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
392 if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
394 alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
395 alignment = alignment_in_bits / BITS_PER_UNIT;
398 if (SUPPORTS_STACK_ALIGNMENT)
400 if (crtl->stack_alignment_estimated < alignment_in_bits)
402 if (!crtl->stack_realign_processed)
403 crtl->stack_alignment_estimated = alignment_in_bits;
404 else
406 /* If stack is realigned and stack alignment value
407 hasn't been finalized, it is OK not to increase
408 stack_alignment_estimated. The bigger alignment
409 requirement is recorded in stack_alignment_needed
410 below. */
411 gcc_assert (!crtl->stack_realign_finalized);
412 if (!crtl->stack_realign_needed)
414 /* It is OK to reduce the alignment as long as the
415 requested size is 0 or the estimated stack
416 alignment >= mode alignment. */
417 gcc_assert ((kind & ASLK_REDUCE_ALIGN)
418 || size == 0
419 || (crtl->stack_alignment_estimated
420 >= GET_MODE_ALIGNMENT (mode)));
421 alignment_in_bits = crtl->stack_alignment_estimated;
422 alignment = alignment_in_bits / BITS_PER_UNIT;
428 if (crtl->stack_alignment_needed < alignment_in_bits)
429 crtl->stack_alignment_needed = alignment_in_bits;
430 if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
431 crtl->max_used_stack_slot_alignment = alignment_in_bits;
433 if (mode != BLKmode || size != 0)
435 if (kind & ASLK_RECORD_PAD)
437 struct frame_space **psp;
439 for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
441 struct frame_space *space = *psp;
442 if (!try_fit_stack_local (space->start, space->length, size,
443 alignment, &slot_offset))
444 continue;
445 *psp = space->next;
446 if (slot_offset > space->start)
447 add_frame_space (space->start, slot_offset);
448 if (slot_offset + size < space->start + space->length)
449 add_frame_space (slot_offset + size,
450 space->start + space->length);
451 goto found_space;
455 else if (!STACK_ALIGNMENT_NEEDED)
457 slot_offset = frame_offset;
458 goto found_space;
461 old_frame_offset = frame_offset;
463 if (FRAME_GROWS_DOWNWARD)
465 frame_offset -= size;
466 try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
468 if (kind & ASLK_RECORD_PAD)
470 if (slot_offset > frame_offset)
471 add_frame_space (frame_offset, slot_offset);
472 if (slot_offset + size < old_frame_offset)
473 add_frame_space (slot_offset + size, old_frame_offset);
476 else
478 frame_offset += size;
479 try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
481 if (kind & ASLK_RECORD_PAD)
483 if (slot_offset > old_frame_offset)
484 add_frame_space (old_frame_offset, slot_offset);
485 if (slot_offset + size < frame_offset)
486 add_frame_space (slot_offset + size, frame_offset);
490 found_space:
491 /* On a big-endian machine, if we are allocating more space than we will use,
492 use the least significant bytes of those that are allocated. */
493 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
494 bigend_correction = size - GET_MODE_SIZE (mode);
496 /* If we have already instantiated virtual registers, return the actual
497 address relative to the frame pointer. */
498 if (virtuals_instantiated)
499 addr = plus_constant (frame_pointer_rtx,
500 trunc_int_for_mode
501 (slot_offset + bigend_correction
502 + STARTING_FRAME_OFFSET, Pmode));
503 else
504 addr = plus_constant (virtual_stack_vars_rtx,
505 trunc_int_for_mode
506 (slot_offset + bigend_correction,
507 Pmode));
509 x = gen_rtx_MEM (mode, addr);
510 set_mem_align (x, alignment_in_bits);
511 MEM_NOTRAP_P (x) = 1;
513 stack_slot_list
514 = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
516 if (frame_offset_overflow (frame_offset, current_function_decl))
517 frame_offset = 0;
519 return x;
522 /* Wrap up assign_stack_local_1 with last parameter as false. */
525 assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
527 return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD);
531 /* In order to evaluate some expressions, such as function calls returning
532 structures in memory, we need to temporarily allocate stack locations.
533 We record each allocated temporary in the following structure.
535 Associated with each temporary slot is a nesting level. When we pop up
536 one level, all temporaries associated with the previous level are freed.
537 Normally, all temporaries are freed after the execution of the statement
538 in which they were created. However, if we are inside a ({...}) grouping,
539 the result may be in a temporary and hence must be preserved. If the
540 result could be in a temporary, we preserve it if we can determine which
541 one it is in. If we cannot determine which temporary may contain the
542 result, all temporaries are preserved. A temporary is preserved by
543 pretending it was allocated at the previous nesting level.
545 Automatic variables are also assigned temporary slots, at the nesting
546 level where they are defined. They are marked a "kept" so that
547 free_temp_slots will not free them. */
549 struct GTY(()) temp_slot {
550 /* Points to next temporary slot. */
551 struct temp_slot *next;
552 /* Points to previous temporary slot. */
553 struct temp_slot *prev;
554 /* The rtx to used to reference the slot. */
555 rtx slot;
556 /* The size, in units, of the slot. */
557 HOST_WIDE_INT size;
558 /* The type of the object in the slot, or zero if it doesn't correspond
559 to a type. We use this to determine whether a slot can be reused.
560 It can be reused if objects of the type of the new slot will always
561 conflict with objects of the type of the old slot. */
562 tree type;
563 /* The alignment (in bits) of the slot. */
564 unsigned int align;
565 /* Nonzero if this temporary is currently in use. */
566 char in_use;
567 /* Nonzero if this temporary has its address taken. */
568 char addr_taken;
569 /* Nesting level at which this slot is being used. */
570 int level;
571 /* Nonzero if this should survive a call to free_temp_slots. */
572 int keep;
573 /* The offset of the slot from the frame_pointer, including extra space
574 for alignment. This info is for combine_temp_slots. */
575 HOST_WIDE_INT base_offset;
576 /* The size of the slot, including extra space for alignment. This
577 info is for combine_temp_slots. */
578 HOST_WIDE_INT full_size;
581 /* A table of addresses that represent a stack slot. The table is a mapping
582 from address RTXen to a temp slot. */
583 static GTY((param_is(struct temp_slot_address_entry))) htab_t temp_slot_address_table;
585 /* Entry for the above hash table. */
586 struct GTY(()) temp_slot_address_entry {
587 hashval_t hash;
588 rtx address;
589 struct temp_slot *temp_slot;
592 /* Removes temporary slot TEMP from LIST. */
594 static void
595 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
597 if (temp->next)
598 temp->next->prev = temp->prev;
599 if (temp->prev)
600 temp->prev->next = temp->next;
601 else
602 *list = temp->next;
604 temp->prev = temp->next = NULL;
607 /* Inserts temporary slot TEMP to LIST. */
609 static void
610 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
612 temp->next = *list;
613 if (*list)
614 (*list)->prev = temp;
615 temp->prev = NULL;
616 *list = temp;
619 /* Returns the list of used temp slots at LEVEL. */
621 static struct temp_slot **
622 temp_slots_at_level (int level)
624 if (level >= (int) VEC_length (temp_slot_p, used_temp_slots))
625 VEC_safe_grow_cleared (temp_slot_p, gc, used_temp_slots, level + 1);
627 return &(VEC_address (temp_slot_p, used_temp_slots)[level]);
630 /* Returns the maximal temporary slot level. */
632 static int
633 max_slot_level (void)
635 if (!used_temp_slots)
636 return -1;
638 return VEC_length (temp_slot_p, used_temp_slots) - 1;
641 /* Moves temporary slot TEMP to LEVEL. */
643 static void
644 move_slot_to_level (struct temp_slot *temp, int level)
646 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
647 insert_slot_to_list (temp, temp_slots_at_level (level));
648 temp->level = level;
651 /* Make temporary slot TEMP available. */
653 static void
654 make_slot_available (struct temp_slot *temp)
656 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
657 insert_slot_to_list (temp, &avail_temp_slots);
658 temp->in_use = 0;
659 temp->level = -1;
662 /* Compute the hash value for an address -> temp slot mapping.
663 The value is cached on the mapping entry. */
664 static hashval_t
665 temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
667 int do_not_record = 0;
668 return hash_rtx (t->address, GET_MODE (t->address),
669 &do_not_record, NULL, false);
672 /* Return the hash value for an address -> temp slot mapping. */
673 static hashval_t
674 temp_slot_address_hash (const void *p)
676 const struct temp_slot_address_entry *t;
677 t = (const struct temp_slot_address_entry *) p;
678 return t->hash;
681 /* Compare two address -> temp slot mapping entries. */
682 static int
683 temp_slot_address_eq (const void *p1, const void *p2)
685 const struct temp_slot_address_entry *t1, *t2;
686 t1 = (const struct temp_slot_address_entry *) p1;
687 t2 = (const struct temp_slot_address_entry *) p2;
688 return exp_equiv_p (t1->address, t2->address, 0, true);
691 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
692 static void
693 insert_temp_slot_address (rtx address, struct temp_slot *temp_slot)
695 void **slot;
696 struct temp_slot_address_entry *t = ggc_alloc_temp_slot_address_entry ();
697 t->address = address;
698 t->temp_slot = temp_slot;
699 t->hash = temp_slot_address_compute_hash (t);
700 slot = htab_find_slot_with_hash (temp_slot_address_table, t, t->hash, INSERT);
701 *slot = t;
704 /* Remove an address -> temp slot mapping entry if the temp slot is
705 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
706 static int
707 remove_unused_temp_slot_addresses_1 (void **slot, void *data ATTRIBUTE_UNUSED)
709 const struct temp_slot_address_entry *t;
710 t = (const struct temp_slot_address_entry *) *slot;
711 if (! t->temp_slot->in_use)
712 *slot = NULL;
713 return 1;
716 /* Remove all mappings of addresses to unused temp slots. */
717 static void
718 remove_unused_temp_slot_addresses (void)
720 htab_traverse (temp_slot_address_table,
721 remove_unused_temp_slot_addresses_1,
722 NULL);
725 /* Find the temp slot corresponding to the object at address X. */
727 static struct temp_slot *
728 find_temp_slot_from_address (rtx x)
730 struct temp_slot *p;
731 struct temp_slot_address_entry tmp, *t;
733 /* First try the easy way:
734 See if X exists in the address -> temp slot mapping. */
735 tmp.address = x;
736 tmp.temp_slot = NULL;
737 tmp.hash = temp_slot_address_compute_hash (&tmp);
738 t = (struct temp_slot_address_entry *)
739 htab_find_with_hash (temp_slot_address_table, &tmp, tmp.hash);
740 if (t)
741 return t->temp_slot;
743 /* If we have a sum involving a register, see if it points to a temp
744 slot. */
745 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
746 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
747 return p;
748 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
749 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
750 return p;
752 /* Last resort: Address is a virtual stack var address. */
753 if (GET_CODE (x) == PLUS
754 && XEXP (x, 0) == virtual_stack_vars_rtx
755 && CONST_INT_P (XEXP (x, 1)))
757 int i;
758 for (i = max_slot_level (); i >= 0; i--)
759 for (p = *temp_slots_at_level (i); p; p = p->next)
761 if (INTVAL (XEXP (x, 1)) >= p->base_offset
762 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size)
763 return p;
767 return NULL;
770 /* Allocate a temporary stack slot and record it for possible later
771 reuse.
773 MODE is the machine mode to be given to the returned rtx.
775 SIZE is the size in units of the space required. We do no rounding here
776 since assign_stack_local will do any required rounding.
778 KEEP is 1 if this slot is to be retained after a call to
779 free_temp_slots. Automatic variables for a block are allocated
780 with this flag. KEEP values of 2 or 3 were needed respectively
781 for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
782 or for SAVE_EXPRs, but they are now unused.
784 TYPE is the type that will be used for the stack slot. */
787 assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size,
788 int keep, tree type)
790 unsigned int align;
791 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
792 rtx slot;
794 /* If SIZE is -1 it means that somebody tried to allocate a temporary
795 of a variable size. */
796 gcc_assert (size != -1);
798 /* These are now unused. */
799 gcc_assert (keep <= 1);
801 align = get_stack_local_alignment (type, mode);
803 /* Try to find an available, already-allocated temporary of the proper
804 mode which meets the size and alignment requirements. Choose the
805 smallest one with the closest alignment.
807 If assign_stack_temp is called outside of the tree->rtl expansion,
808 we cannot reuse the stack slots (that may still refer to
809 VIRTUAL_STACK_VARS_REGNUM). */
810 if (!virtuals_instantiated)
812 for (p = avail_temp_slots; p; p = p->next)
814 if (p->align >= align && p->size >= size
815 && GET_MODE (p->slot) == mode
816 && objects_must_conflict_p (p->type, type)
817 && (best_p == 0 || best_p->size > p->size
818 || (best_p->size == p->size && best_p->align > p->align)))
820 if (p->align == align && p->size == size)
822 selected = p;
823 cut_slot_from_list (selected, &avail_temp_slots);
824 best_p = 0;
825 break;
827 best_p = p;
832 /* Make our best, if any, the one to use. */
833 if (best_p)
835 selected = best_p;
836 cut_slot_from_list (selected, &avail_temp_slots);
838 /* If there are enough aligned bytes left over, make them into a new
839 temp_slot so that the extra bytes don't get wasted. Do this only
840 for BLKmode slots, so that we can be sure of the alignment. */
841 if (GET_MODE (best_p->slot) == BLKmode)
843 int alignment = best_p->align / BITS_PER_UNIT;
844 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
846 if (best_p->size - rounded_size >= alignment)
848 p = ggc_alloc_temp_slot ();
849 p->in_use = p->addr_taken = 0;
850 p->size = best_p->size - rounded_size;
851 p->base_offset = best_p->base_offset + rounded_size;
852 p->full_size = best_p->full_size - rounded_size;
853 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
854 p->align = best_p->align;
855 p->type = best_p->type;
856 insert_slot_to_list (p, &avail_temp_slots);
858 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
859 stack_slot_list);
861 best_p->size = rounded_size;
862 best_p->full_size = rounded_size;
867 /* If we still didn't find one, make a new temporary. */
868 if (selected == 0)
870 HOST_WIDE_INT frame_offset_old = frame_offset;
872 p = ggc_alloc_temp_slot ();
874 /* We are passing an explicit alignment request to assign_stack_local.
875 One side effect of that is assign_stack_local will not round SIZE
876 to ensure the frame offset remains suitably aligned.
878 So for requests which depended on the rounding of SIZE, we go ahead
879 and round it now. We also make sure ALIGNMENT is at least
880 BIGGEST_ALIGNMENT. */
881 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
882 p->slot = assign_stack_local_1 (mode,
883 (mode == BLKmode
884 ? CEIL_ROUND (size,
885 (int) align
886 / BITS_PER_UNIT)
887 : size),
888 align, 0);
890 p->align = align;
892 /* The following slot size computation is necessary because we don't
893 know the actual size of the temporary slot until assign_stack_local
894 has performed all the frame alignment and size rounding for the
895 requested temporary. Note that extra space added for alignment
896 can be either above or below this stack slot depending on which
897 way the frame grows. We include the extra space if and only if it
898 is above this slot. */
899 if (FRAME_GROWS_DOWNWARD)
900 p->size = frame_offset_old - frame_offset;
901 else
902 p->size = size;
904 /* Now define the fields used by combine_temp_slots. */
905 if (FRAME_GROWS_DOWNWARD)
907 p->base_offset = frame_offset;
908 p->full_size = frame_offset_old - frame_offset;
910 else
912 p->base_offset = frame_offset_old;
913 p->full_size = frame_offset - frame_offset_old;
916 selected = p;
919 p = selected;
920 p->in_use = 1;
921 p->addr_taken = 0;
922 p->type = type;
923 p->level = temp_slot_level;
924 p->keep = keep;
926 pp = temp_slots_at_level (p->level);
927 insert_slot_to_list (p, pp);
928 insert_temp_slot_address (XEXP (p->slot, 0), p);
930 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
931 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
932 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
934 /* If we know the alias set for the memory that will be used, use
935 it. If there's no TYPE, then we don't know anything about the
936 alias set for the memory. */
937 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
938 set_mem_align (slot, align);
940 /* If a type is specified, set the relevant flags. */
941 if (type != 0)
943 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
944 gcc_checking_assert (!MEM_SCALAR_P (slot) && !MEM_IN_STRUCT_P (slot));
945 if (AGGREGATE_TYPE_P (type) || TREE_CODE (type) == COMPLEX_TYPE)
946 MEM_IN_STRUCT_P (slot) = 1;
947 else
948 MEM_SCALAR_P (slot) = 1;
950 MEM_NOTRAP_P (slot) = 1;
952 return slot;
955 /* Allocate a temporary stack slot and record it for possible later
956 reuse. First three arguments are same as in preceding function. */
959 assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep)
961 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
964 /* Assign a temporary.
965 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
966 and so that should be used in error messages. In either case, we
967 allocate of the given type.
968 KEEP is as for assign_stack_temp.
969 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
970 it is 0 if a register is OK.
971 DONT_PROMOTE is 1 if we should not promote values in register
972 to wider modes. */
975 assign_temp (tree type_or_decl, int keep, int memory_required,
976 int dont_promote ATTRIBUTE_UNUSED)
978 tree type, decl;
979 enum machine_mode mode;
980 #ifdef PROMOTE_MODE
981 int unsignedp;
982 #endif
984 if (DECL_P (type_or_decl))
985 decl = type_or_decl, type = TREE_TYPE (decl);
986 else
987 decl = NULL, type = type_or_decl;
989 mode = TYPE_MODE (type);
990 #ifdef PROMOTE_MODE
991 unsignedp = TYPE_UNSIGNED (type);
992 #endif
994 if (mode == BLKmode || memory_required)
996 HOST_WIDE_INT size = int_size_in_bytes (type);
997 rtx tmp;
999 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
1000 problems with allocating the stack space. */
1001 if (size == 0)
1002 size = 1;
1004 /* Unfortunately, we don't yet know how to allocate variable-sized
1005 temporaries. However, sometimes we can find a fixed upper limit on
1006 the size, so try that instead. */
1007 else if (size == -1)
1008 size = max_int_size_in_bytes (type);
1010 /* The size of the temporary may be too large to fit into an integer. */
1011 /* ??? Not sure this should happen except for user silliness, so limit
1012 this to things that aren't compiler-generated temporaries. The
1013 rest of the time we'll die in assign_stack_temp_for_type. */
1014 if (decl && size == -1
1015 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
1017 error ("size of variable %q+D is too large", decl);
1018 size = 1;
1021 tmp = assign_stack_temp_for_type (mode, size, keep, type);
1022 return tmp;
1025 #ifdef PROMOTE_MODE
1026 if (! dont_promote)
1027 mode = promote_mode (type, mode, &unsignedp);
1028 #endif
1030 return gen_reg_rtx (mode);
1033 /* Combine temporary stack slots which are adjacent on the stack.
1035 This allows for better use of already allocated stack space. This is only
1036 done for BLKmode slots because we can be sure that we won't have alignment
1037 problems in this case. */
1039 static void
1040 combine_temp_slots (void)
1042 struct temp_slot *p, *q, *next, *next_q;
1043 int num_slots;
1045 /* We can't combine slots, because the information about which slot
1046 is in which alias set will be lost. */
1047 if (flag_strict_aliasing)
1048 return;
1050 /* If there are a lot of temp slots, don't do anything unless
1051 high levels of optimization. */
1052 if (! flag_expensive_optimizations)
1053 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1054 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1055 return;
1057 for (p = avail_temp_slots; p; p = next)
1059 int delete_p = 0;
1061 next = p->next;
1063 if (GET_MODE (p->slot) != BLKmode)
1064 continue;
1066 for (q = p->next; q; q = next_q)
1068 int delete_q = 0;
1070 next_q = q->next;
1072 if (GET_MODE (q->slot) != BLKmode)
1073 continue;
1075 if (p->base_offset + p->full_size == q->base_offset)
1077 /* Q comes after P; combine Q into P. */
1078 p->size += q->size;
1079 p->full_size += q->full_size;
1080 delete_q = 1;
1082 else if (q->base_offset + q->full_size == p->base_offset)
1084 /* P comes after Q; combine P into Q. */
1085 q->size += p->size;
1086 q->full_size += p->full_size;
1087 delete_p = 1;
1088 break;
1090 if (delete_q)
1091 cut_slot_from_list (q, &avail_temp_slots);
1094 /* Either delete P or advance past it. */
1095 if (delete_p)
1096 cut_slot_from_list (p, &avail_temp_slots);
1100 /* Indicate that NEW_RTX is an alternate way of referring to the temp
1101 slot that previously was known by OLD_RTX. */
1103 void
1104 update_temp_slot_address (rtx old_rtx, rtx new_rtx)
1106 struct temp_slot *p;
1108 if (rtx_equal_p (old_rtx, new_rtx))
1109 return;
1111 p = find_temp_slot_from_address (old_rtx);
1113 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1114 NEW_RTX is a register, see if one operand of the PLUS is a
1115 temporary location. If so, NEW_RTX points into it. Otherwise,
1116 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1117 in common between them. If so, try a recursive call on those
1118 values. */
1119 if (p == 0)
1121 if (GET_CODE (old_rtx) != PLUS)
1122 return;
1124 if (REG_P (new_rtx))
1126 update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
1127 update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
1128 return;
1130 else if (GET_CODE (new_rtx) != PLUS)
1131 return;
1133 if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
1134 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
1135 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
1136 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
1137 else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
1138 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
1139 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
1140 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
1142 return;
1145 /* Otherwise add an alias for the temp's address. */
1146 insert_temp_slot_address (new_rtx, p);
1149 /* If X could be a reference to a temporary slot, mark the fact that its
1150 address was taken. */
1152 void
1153 mark_temp_addr_taken (rtx x)
1155 struct temp_slot *p;
1157 if (x == 0)
1158 return;
1160 /* If X is not in memory or is at a constant address, it cannot be in
1161 a temporary slot. */
1162 if (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))
1163 return;
1165 p = find_temp_slot_from_address (XEXP (x, 0));
1166 if (p != 0)
1167 p->addr_taken = 1;
1170 /* If X could be a reference to a temporary slot, mark that slot as
1171 belonging to the to one level higher than the current level. If X
1172 matched one of our slots, just mark that one. Otherwise, we can't
1173 easily predict which it is, so upgrade all of them. Kept slots
1174 need not be touched.
1176 This is called when an ({...}) construct occurs and a statement
1177 returns a value in memory. */
1179 void
1180 preserve_temp_slots (rtx x)
1182 struct temp_slot *p = 0, *next;
1184 /* If there is no result, we still might have some objects whose address
1185 were taken, so we need to make sure they stay around. */
1186 if (x == 0)
1188 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1190 next = p->next;
1192 if (p->addr_taken)
1193 move_slot_to_level (p, temp_slot_level - 1);
1196 return;
1199 /* If X is a register that is being used as a pointer, see if we have
1200 a temporary slot we know it points to. To be consistent with
1201 the code below, we really should preserve all non-kept slots
1202 if we can't find a match, but that seems to be much too costly. */
1203 if (REG_P (x) && REG_POINTER (x))
1204 p = find_temp_slot_from_address (x);
1206 /* If X is not in memory or is at a constant address, it cannot be in
1207 a temporary slot, but it can contain something whose address was
1208 taken. */
1209 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1211 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1213 next = p->next;
1215 if (p->addr_taken)
1216 move_slot_to_level (p, temp_slot_level - 1);
1219 return;
1222 /* First see if we can find a match. */
1223 if (p == 0)
1224 p = find_temp_slot_from_address (XEXP (x, 0));
1226 if (p != 0)
1228 /* Move everything at our level whose address was taken to our new
1229 level in case we used its address. */
1230 struct temp_slot *q;
1232 if (p->level == temp_slot_level)
1234 for (q = *temp_slots_at_level (temp_slot_level); q; q = next)
1236 next = q->next;
1238 if (p != q && q->addr_taken)
1239 move_slot_to_level (q, temp_slot_level - 1);
1242 move_slot_to_level (p, temp_slot_level - 1);
1243 p->addr_taken = 0;
1245 return;
1248 /* Otherwise, preserve all non-kept slots at this level. */
1249 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1251 next = p->next;
1253 if (!p->keep)
1254 move_slot_to_level (p, temp_slot_level - 1);
1258 /* Free all temporaries used so far. This is normally called at the
1259 end of generating code for a statement. */
1261 void
1262 free_temp_slots (void)
1264 struct temp_slot *p, *next;
1265 bool some_available = false;
1267 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1269 next = p->next;
1271 if (!p->keep)
1273 make_slot_available (p);
1274 some_available = true;
1278 if (some_available)
1280 remove_unused_temp_slot_addresses ();
1281 combine_temp_slots ();
1285 /* Push deeper into the nesting level for stack temporaries. */
1287 void
1288 push_temp_slots (void)
1290 temp_slot_level++;
1293 /* Pop a temporary nesting level. All slots in use in the current level
1294 are freed. */
1296 void
1297 pop_temp_slots (void)
1299 struct temp_slot *p, *next;
1300 bool some_available = false;
1302 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1304 next = p->next;
1305 make_slot_available (p);
1306 some_available = true;
1309 if (some_available)
1311 remove_unused_temp_slot_addresses ();
1312 combine_temp_slots ();
1315 temp_slot_level--;
1318 /* Initialize temporary slots. */
1320 void
1321 init_temp_slots (void)
1323 /* We have not allocated any temporaries yet. */
1324 avail_temp_slots = 0;
1325 used_temp_slots = 0;
1326 temp_slot_level = 0;
1328 /* Set up the table to map addresses to temp slots. */
1329 if (! temp_slot_address_table)
1330 temp_slot_address_table = htab_create_ggc (32,
1331 temp_slot_address_hash,
1332 temp_slot_address_eq,
1333 NULL);
1334 else
1335 htab_empty (temp_slot_address_table);
1338 /* These routines are responsible for converting virtual register references
1339 to the actual hard register references once RTL generation is complete.
1341 The following four variables are used for communication between the
1342 routines. They contain the offsets of the virtual registers from their
1343 respective hard registers. */
1345 static int in_arg_offset;
1346 static int var_offset;
1347 static int dynamic_offset;
1348 static int out_arg_offset;
1349 static int cfa_offset;
1351 /* In most machines, the stack pointer register is equivalent to the bottom
1352 of the stack. */
1354 #ifndef STACK_POINTER_OFFSET
1355 #define STACK_POINTER_OFFSET 0
1356 #endif
1358 /* If not defined, pick an appropriate default for the offset of dynamically
1359 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1360 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1362 #ifndef STACK_DYNAMIC_OFFSET
1364 /* The bottom of the stack points to the actual arguments. If
1365 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1366 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1367 stack space for register parameters is not pushed by the caller, but
1368 rather part of the fixed stack areas and hence not included in
1369 `crtl->outgoing_args_size'. Nevertheless, we must allow
1370 for it when allocating stack dynamic objects. */
1372 #if defined(REG_PARM_STACK_SPACE)
1373 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1374 ((ACCUMULATE_OUTGOING_ARGS \
1375 ? (crtl->outgoing_args_size \
1376 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1377 : REG_PARM_STACK_SPACE (FNDECL))) \
1378 : 0) + (STACK_POINTER_OFFSET))
1379 #else
1380 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1381 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
1382 + (STACK_POINTER_OFFSET))
1383 #endif
1384 #endif
1387 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1388 is a virtual register, return the equivalent hard register and set the
1389 offset indirectly through the pointer. Otherwise, return 0. */
1391 static rtx
1392 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1394 rtx new_rtx;
1395 HOST_WIDE_INT offset;
1397 if (x == virtual_incoming_args_rtx)
1399 if (stack_realign_drap)
1401 /* Replace virtual_incoming_args_rtx with internal arg
1402 pointer if DRAP is used to realign stack. */
1403 new_rtx = crtl->args.internal_arg_pointer;
1404 offset = 0;
1406 else
1407 new_rtx = arg_pointer_rtx, offset = in_arg_offset;
1409 else if (x == virtual_stack_vars_rtx)
1410 new_rtx = frame_pointer_rtx, offset = var_offset;
1411 else if (x == virtual_stack_dynamic_rtx)
1412 new_rtx = stack_pointer_rtx, offset = dynamic_offset;
1413 else if (x == virtual_outgoing_args_rtx)
1414 new_rtx = stack_pointer_rtx, offset = out_arg_offset;
1415 else if (x == virtual_cfa_rtx)
1417 #ifdef FRAME_POINTER_CFA_OFFSET
1418 new_rtx = frame_pointer_rtx;
1419 #else
1420 new_rtx = arg_pointer_rtx;
1421 #endif
1422 offset = cfa_offset;
1424 else if (x == virtual_preferred_stack_boundary_rtx)
1426 new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT);
1427 offset = 0;
1429 else
1430 return NULL_RTX;
1432 *poffset = offset;
1433 return new_rtx;
1436 /* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1437 Instantiate any virtual registers present inside of *LOC. The expression
1438 is simplified, as much as possible, but is not to be considered "valid"
1439 in any sense implied by the target. If any change is made, set CHANGED
1440 to true. */
1442 static int
1443 instantiate_virtual_regs_in_rtx (rtx *loc, void *data)
1445 HOST_WIDE_INT offset;
1446 bool *changed = (bool *) data;
1447 rtx x, new_rtx;
1449 x = *loc;
1450 if (x == 0)
1451 return 0;
1453 switch (GET_CODE (x))
1455 case REG:
1456 new_rtx = instantiate_new_reg (x, &offset);
1457 if (new_rtx)
1459 *loc = plus_constant (new_rtx, offset);
1460 if (changed)
1461 *changed = true;
1463 return -1;
1465 case PLUS:
1466 new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
1467 if (new_rtx)
1469 new_rtx = plus_constant (new_rtx, offset);
1470 *loc = simplify_gen_binary (PLUS, GET_MODE (x), new_rtx, XEXP (x, 1));
1471 if (changed)
1472 *changed = true;
1473 return -1;
1476 /* FIXME -- from old code */
1477 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1478 we can commute the PLUS and SUBREG because pointers into the
1479 frame are well-behaved. */
1480 break;
1482 default:
1483 break;
1486 return 0;
1489 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1490 matches the predicate for insn CODE operand OPERAND. */
1492 static int
1493 safe_insn_predicate (int code, int operand, rtx x)
1495 return code < 0 || insn_operand_matches ((enum insn_code) code, operand, x);
1498 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1499 registers present inside of insn. The result will be a valid insn. */
1501 static void
1502 instantiate_virtual_regs_in_insn (rtx insn)
1504 HOST_WIDE_INT offset;
1505 int insn_code, i;
1506 bool any_change = false;
1507 rtx set, new_rtx, x, seq;
1509 /* There are some special cases to be handled first. */
1510 set = single_set (insn);
1511 if (set)
1513 /* We're allowed to assign to a virtual register. This is interpreted
1514 to mean that the underlying register gets assigned the inverse
1515 transformation. This is used, for example, in the handling of
1516 non-local gotos. */
1517 new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
1518 if (new_rtx)
1520 start_sequence ();
1522 for_each_rtx (&SET_SRC (set), instantiate_virtual_regs_in_rtx, NULL);
1523 x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
1524 GEN_INT (-offset));
1525 x = force_operand (x, new_rtx);
1526 if (x != new_rtx)
1527 emit_move_insn (new_rtx, x);
1529 seq = get_insns ();
1530 end_sequence ();
1532 emit_insn_before (seq, insn);
1533 delete_insn (insn);
1534 return;
1537 /* Handle a straight copy from a virtual register by generating a
1538 new add insn. The difference between this and falling through
1539 to the generic case is avoiding a new pseudo and eliminating a
1540 move insn in the initial rtl stream. */
1541 new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
1542 if (new_rtx && offset != 0
1543 && REG_P (SET_DEST (set))
1544 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1546 start_sequence ();
1548 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS,
1549 new_rtx, GEN_INT (offset), SET_DEST (set),
1550 1, OPTAB_LIB_WIDEN);
1551 if (x != SET_DEST (set))
1552 emit_move_insn (SET_DEST (set), x);
1554 seq = get_insns ();
1555 end_sequence ();
1557 emit_insn_before (seq, insn);
1558 delete_insn (insn);
1559 return;
1562 extract_insn (insn);
1563 insn_code = INSN_CODE (insn);
1565 /* Handle a plus involving a virtual register by determining if the
1566 operands remain valid if they're modified in place. */
1567 if (GET_CODE (SET_SRC (set)) == PLUS
1568 && recog_data.n_operands >= 3
1569 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1570 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1571 && CONST_INT_P (recog_data.operand[2])
1572 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
1574 offset += INTVAL (recog_data.operand[2]);
1576 /* If the sum is zero, then replace with a plain move. */
1577 if (offset == 0
1578 && REG_P (SET_DEST (set))
1579 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1581 start_sequence ();
1582 emit_move_insn (SET_DEST (set), new_rtx);
1583 seq = get_insns ();
1584 end_sequence ();
1586 emit_insn_before (seq, insn);
1587 delete_insn (insn);
1588 return;
1591 x = gen_int_mode (offset, recog_data.operand_mode[2]);
1593 /* Using validate_change and apply_change_group here leaves
1594 recog_data in an invalid state. Since we know exactly what
1595 we want to check, do those two by hand. */
1596 if (safe_insn_predicate (insn_code, 1, new_rtx)
1597 && safe_insn_predicate (insn_code, 2, x))
1599 *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
1600 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1601 any_change = true;
1603 /* Fall through into the regular operand fixup loop in
1604 order to take care of operands other than 1 and 2. */
1608 else
1610 extract_insn (insn);
1611 insn_code = INSN_CODE (insn);
1614 /* In the general case, we expect virtual registers to appear only in
1615 operands, and then only as either bare registers or inside memories. */
1616 for (i = 0; i < recog_data.n_operands; ++i)
1618 x = recog_data.operand[i];
1619 switch (GET_CODE (x))
1621 case MEM:
1623 rtx addr = XEXP (x, 0);
1624 bool changed = false;
1626 for_each_rtx (&addr, instantiate_virtual_regs_in_rtx, &changed);
1627 if (!changed)
1628 continue;
1630 start_sequence ();
1631 x = replace_equiv_address (x, addr);
1632 /* It may happen that the address with the virtual reg
1633 was valid (e.g. based on the virtual stack reg, which might
1634 be acceptable to the predicates with all offsets), whereas
1635 the address now isn't anymore, for instance when the address
1636 is still offsetted, but the base reg isn't virtual-stack-reg
1637 anymore. Below we would do a force_reg on the whole operand,
1638 but this insn might actually only accept memory. Hence,
1639 before doing that last resort, try to reload the address into
1640 a register, so this operand stays a MEM. */
1641 if (!safe_insn_predicate (insn_code, i, x))
1643 addr = force_reg (GET_MODE (addr), addr);
1644 x = replace_equiv_address (x, addr);
1646 seq = get_insns ();
1647 end_sequence ();
1648 if (seq)
1649 emit_insn_before (seq, insn);
1651 break;
1653 case REG:
1654 new_rtx = instantiate_new_reg (x, &offset);
1655 if (new_rtx == NULL)
1656 continue;
1657 if (offset == 0)
1658 x = new_rtx;
1659 else
1661 start_sequence ();
1663 /* Careful, special mode predicates may have stuff in
1664 insn_data[insn_code].operand[i].mode that isn't useful
1665 to us for computing a new value. */
1666 /* ??? Recognize address_operand and/or "p" constraints
1667 to see if (plus new offset) is a valid before we put
1668 this through expand_simple_binop. */
1669 x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
1670 GEN_INT (offset), NULL_RTX,
1671 1, OPTAB_LIB_WIDEN);
1672 seq = get_insns ();
1673 end_sequence ();
1674 emit_insn_before (seq, insn);
1676 break;
1678 case SUBREG:
1679 new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
1680 if (new_rtx == NULL)
1681 continue;
1682 if (offset != 0)
1684 start_sequence ();
1685 new_rtx = expand_simple_binop (GET_MODE (new_rtx), PLUS, new_rtx,
1686 GEN_INT (offset), NULL_RTX,
1687 1, OPTAB_LIB_WIDEN);
1688 seq = get_insns ();
1689 end_sequence ();
1690 emit_insn_before (seq, insn);
1692 x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx,
1693 GET_MODE (new_rtx), SUBREG_BYTE (x));
1694 gcc_assert (x);
1695 break;
1697 default:
1698 continue;
1701 /* At this point, X contains the new value for the operand.
1702 Validate the new value vs the insn predicate. Note that
1703 asm insns will have insn_code -1 here. */
1704 if (!safe_insn_predicate (insn_code, i, x))
1706 start_sequence ();
1707 if (REG_P (x))
1709 gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
1710 x = copy_to_reg (x);
1712 else
1713 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1714 seq = get_insns ();
1715 end_sequence ();
1716 if (seq)
1717 emit_insn_before (seq, insn);
1720 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1721 any_change = true;
1724 if (any_change)
1726 /* Propagate operand changes into the duplicates. */
1727 for (i = 0; i < recog_data.n_dups; ++i)
1728 *recog_data.dup_loc[i]
1729 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
1731 /* Force re-recognition of the instruction for validation. */
1732 INSN_CODE (insn) = -1;
1735 if (asm_noperands (PATTERN (insn)) >= 0)
1737 if (!check_asm_operands (PATTERN (insn)))
1739 error_for_asm (insn, "impossible constraint in %<asm%>");
1740 delete_insn (insn);
1743 else
1745 if (recog_memoized (insn) < 0)
1746 fatal_insn_not_found (insn);
1750 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1751 do any instantiation required. */
1753 void
1754 instantiate_decl_rtl (rtx x)
1756 rtx addr;
1758 if (x == 0)
1759 return;
1761 /* If this is a CONCAT, recurse for the pieces. */
1762 if (GET_CODE (x) == CONCAT)
1764 instantiate_decl_rtl (XEXP (x, 0));
1765 instantiate_decl_rtl (XEXP (x, 1));
1766 return;
1769 /* If this is not a MEM, no need to do anything. Similarly if the
1770 address is a constant or a register that is not a virtual register. */
1771 if (!MEM_P (x))
1772 return;
1774 addr = XEXP (x, 0);
1775 if (CONSTANT_P (addr)
1776 || (REG_P (addr)
1777 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1778 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1779 return;
1781 for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL);
1784 /* Helper for instantiate_decls called via walk_tree: Process all decls
1785 in the given DECL_VALUE_EXPR. */
1787 static tree
1788 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1790 tree t = *tp;
1791 if (! EXPR_P (t))
1793 *walk_subtrees = 0;
1794 if (DECL_P (t))
1796 if (DECL_RTL_SET_P (t))
1797 instantiate_decl_rtl (DECL_RTL (t));
1798 if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t)
1799 && DECL_INCOMING_RTL (t))
1800 instantiate_decl_rtl (DECL_INCOMING_RTL (t));
1801 if ((TREE_CODE (t) == VAR_DECL
1802 || TREE_CODE (t) == RESULT_DECL)
1803 && DECL_HAS_VALUE_EXPR_P (t))
1805 tree v = DECL_VALUE_EXPR (t);
1806 walk_tree (&v, instantiate_expr, NULL, NULL);
1810 return NULL;
1813 /* Subroutine of instantiate_decls: Process all decls in the given
1814 BLOCK node and all its subblocks. */
1816 static void
1817 instantiate_decls_1 (tree let)
1819 tree t;
1821 for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t))
1823 if (DECL_RTL_SET_P (t))
1824 instantiate_decl_rtl (DECL_RTL (t));
1825 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1827 tree v = DECL_VALUE_EXPR (t);
1828 walk_tree (&v, instantiate_expr, NULL, NULL);
1832 /* Process all subblocks. */
1833 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1834 instantiate_decls_1 (t);
1837 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1838 all virtual registers in their DECL_RTL's. */
1840 static void
1841 instantiate_decls (tree fndecl)
1843 tree decl;
1844 unsigned ix;
1846 /* Process all parameters of the function. */
1847 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl))
1849 instantiate_decl_rtl (DECL_RTL (decl));
1850 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
1851 if (DECL_HAS_VALUE_EXPR_P (decl))
1853 tree v = DECL_VALUE_EXPR (decl);
1854 walk_tree (&v, instantiate_expr, NULL, NULL);
1858 if ((decl = DECL_RESULT (fndecl))
1859 && TREE_CODE (decl) == RESULT_DECL)
1861 if (DECL_RTL_SET_P (decl))
1862 instantiate_decl_rtl (DECL_RTL (decl));
1863 if (DECL_HAS_VALUE_EXPR_P (decl))
1865 tree v = DECL_VALUE_EXPR (decl);
1866 walk_tree (&v, instantiate_expr, NULL, NULL);
1870 /* Now process all variables defined in the function or its subblocks. */
1871 instantiate_decls_1 (DECL_INITIAL (fndecl));
1873 FOR_EACH_LOCAL_DECL (cfun, ix, decl)
1874 if (DECL_RTL_SET_P (decl))
1875 instantiate_decl_rtl (DECL_RTL (decl));
1876 VEC_free (tree, gc, cfun->local_decls);
1879 /* Pass through the INSNS of function FNDECL and convert virtual register
1880 references to hard register references. */
1882 static unsigned int
1883 instantiate_virtual_regs (void)
1885 rtx insn;
1887 /* Compute the offsets to use for this function. */
1888 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1889 var_offset = STARTING_FRAME_OFFSET;
1890 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1891 out_arg_offset = STACK_POINTER_OFFSET;
1892 #ifdef FRAME_POINTER_CFA_OFFSET
1893 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1894 #else
1895 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1896 #endif
1898 /* Initialize recognition, indicating that volatile is OK. */
1899 init_recog ();
1901 /* Scan through all the insns, instantiating every virtual register still
1902 present. */
1903 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1904 if (INSN_P (insn))
1906 /* These patterns in the instruction stream can never be recognized.
1907 Fortunately, they shouldn't contain virtual registers either. */
1908 if (GET_CODE (PATTERN (insn)) == USE
1909 || GET_CODE (PATTERN (insn)) == CLOBBER
1910 || GET_CODE (PATTERN (insn)) == ADDR_VEC
1911 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
1912 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1913 continue;
1914 else if (DEBUG_INSN_P (insn))
1915 for_each_rtx (&INSN_VAR_LOCATION (insn),
1916 instantiate_virtual_regs_in_rtx, NULL);
1917 else
1918 instantiate_virtual_regs_in_insn (insn);
1920 if (INSN_DELETED_P (insn))
1921 continue;
1923 for_each_rtx (&REG_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL);
1925 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1926 if (CALL_P (insn))
1927 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn),
1928 instantiate_virtual_regs_in_rtx, NULL);
1931 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1932 instantiate_decls (current_function_decl);
1934 targetm.instantiate_decls ();
1936 /* Indicate that, from now on, assign_stack_local should use
1937 frame_pointer_rtx. */
1938 virtuals_instantiated = 1;
1940 /* See allocate_dynamic_stack_space for the rationale. */
1941 #ifdef SETJMP_VIA_SAVE_AREA
1942 if (flag_stack_usage_info && cfun->calls_setjmp)
1944 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1945 dynamic_offset = (dynamic_offset + align - 1) / align * align;
1946 current_function_dynamic_stack_size
1947 += current_function_dynamic_alloc_count * dynamic_offset;
1949 #endif
1951 return 0;
1954 struct rtl_opt_pass pass_instantiate_virtual_regs =
1957 RTL_PASS,
1958 "vregs", /* name */
1959 NULL, /* gate */
1960 instantiate_virtual_regs, /* execute */
1961 NULL, /* sub */
1962 NULL, /* next */
1963 0, /* static_pass_number */
1964 TV_NONE, /* tv_id */
1965 0, /* properties_required */
1966 0, /* properties_provided */
1967 0, /* properties_destroyed */
1968 0, /* todo_flags_start */
1969 TODO_dump_func /* todo_flags_finish */
1974 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1975 This means a type for which function calls must pass an address to the
1976 function or get an address back from the function.
1977 EXP may be a type node or an expression (whose type is tested). */
1980 aggregate_value_p (const_tree exp, const_tree fntype)
1982 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
1983 int i, regno, nregs;
1984 rtx reg;
1986 if (fntype)
1987 switch (TREE_CODE (fntype))
1989 case CALL_EXPR:
1991 tree fndecl = get_callee_fndecl (fntype);
1992 fntype = (fndecl
1993 ? TREE_TYPE (fndecl)
1994 : TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype))));
1996 break;
1997 case FUNCTION_DECL:
1998 fntype = TREE_TYPE (fntype);
1999 break;
2000 case FUNCTION_TYPE:
2001 case METHOD_TYPE:
2002 break;
2003 case IDENTIFIER_NODE:
2004 fntype = NULL_TREE;
2005 break;
2006 default:
2007 /* We don't expect other tree types here. */
2008 gcc_unreachable ();
2011 if (VOID_TYPE_P (type))
2012 return 0;
2014 /* If a record should be passed the same as its first (and only) member
2015 don't pass it as an aggregate. */
2016 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2017 return aggregate_value_p (first_field (type), fntype);
2019 /* If the front end has decided that this needs to be passed by
2020 reference, do so. */
2021 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
2022 && DECL_BY_REFERENCE (exp))
2023 return 1;
2025 /* Function types that are TREE_ADDRESSABLE force return in memory. */
2026 if (fntype && TREE_ADDRESSABLE (fntype))
2027 return 1;
2029 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
2030 and thus can't be returned in registers. */
2031 if (TREE_ADDRESSABLE (type))
2032 return 1;
2034 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
2035 return 1;
2037 if (targetm.calls.return_in_memory (type, fntype))
2038 return 1;
2040 /* Make sure we have suitable call-clobbered regs to return
2041 the value in; if not, we must return it in memory. */
2042 reg = hard_function_value (type, 0, fntype, 0);
2044 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2045 it is OK. */
2046 if (!REG_P (reg))
2047 return 0;
2049 regno = REGNO (reg);
2050 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
2051 for (i = 0; i < nregs; i++)
2052 if (! call_used_regs[regno + i])
2053 return 1;
2055 return 0;
2058 /* Return true if we should assign DECL a pseudo register; false if it
2059 should live on the local stack. */
2061 bool
2062 use_register_for_decl (const_tree decl)
2064 if (!targetm.calls.allocate_stack_slots_for_args())
2065 return true;
2067 /* Honor volatile. */
2068 if (TREE_SIDE_EFFECTS (decl))
2069 return false;
2071 /* Honor addressability. */
2072 if (TREE_ADDRESSABLE (decl))
2073 return false;
2075 /* Only register-like things go in registers. */
2076 if (DECL_MODE (decl) == BLKmode)
2077 return false;
2079 /* If -ffloat-store specified, don't put explicit float variables
2080 into registers. */
2081 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2082 propagates values across these stores, and it probably shouldn't. */
2083 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
2084 return false;
2086 /* If we're not interested in tracking debugging information for
2087 this decl, then we can certainly put it in a register. */
2088 if (DECL_IGNORED_P (decl))
2089 return true;
2091 if (optimize)
2092 return true;
2094 if (!DECL_REGISTER (decl))
2095 return false;
2097 switch (TREE_CODE (TREE_TYPE (decl)))
2099 case RECORD_TYPE:
2100 case UNION_TYPE:
2101 case QUAL_UNION_TYPE:
2102 /* When not optimizing, disregard register keyword for variables with
2103 types containing methods, otherwise the methods won't be callable
2104 from the debugger. */
2105 if (TYPE_METHODS (TREE_TYPE (decl)))
2106 return false;
2107 break;
2108 default:
2109 break;
2112 return true;
2115 /* Return true if TYPE should be passed by invisible reference. */
2117 bool
2118 pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
2119 tree type, bool named_arg)
2121 if (type)
2123 /* If this type contains non-trivial constructors, then it is
2124 forbidden for the middle-end to create any new copies. */
2125 if (TREE_ADDRESSABLE (type))
2126 return true;
2128 /* GCC post 3.4 passes *all* variable sized types by reference. */
2129 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2130 return true;
2132 /* If a record type should be passed the same as its first (and only)
2133 member, use the type and mode of that member. */
2134 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2136 type = TREE_TYPE (first_field (type));
2137 mode = TYPE_MODE (type);
2141 return targetm.calls.pass_by_reference (ca, mode, type, named_arg);
2144 /* Return true if TYPE, which is passed by reference, should be callee
2145 copied instead of caller copied. */
2147 bool
2148 reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
2149 tree type, bool named_arg)
2151 if (type && TREE_ADDRESSABLE (type))
2152 return false;
2153 return targetm.calls.callee_copies (ca, mode, type, named_arg);
2156 /* Structures to communicate between the subroutines of assign_parms.
2157 The first holds data persistent across all parameters, the second
2158 is cleared out for each parameter. */
2160 struct assign_parm_data_all
2162 CUMULATIVE_ARGS args_so_far;
2163 struct args_size stack_args_size;
2164 tree function_result_decl;
2165 tree orig_fnargs;
2166 rtx first_conversion_insn;
2167 rtx last_conversion_insn;
2168 HOST_WIDE_INT pretend_args_size;
2169 HOST_WIDE_INT extra_pretend_bytes;
2170 int reg_parm_stack_space;
2173 struct assign_parm_data_one
2175 tree nominal_type;
2176 tree passed_type;
2177 rtx entry_parm;
2178 rtx stack_parm;
2179 enum machine_mode nominal_mode;
2180 enum machine_mode passed_mode;
2181 enum machine_mode promoted_mode;
2182 struct locate_and_pad_arg_data locate;
2183 int partial;
2184 BOOL_BITFIELD named_arg : 1;
2185 BOOL_BITFIELD passed_pointer : 1;
2186 BOOL_BITFIELD on_stack : 1;
2187 BOOL_BITFIELD loaded_in_reg : 1;
2190 /* A subroutine of assign_parms. Initialize ALL. */
2192 static void
2193 assign_parms_initialize_all (struct assign_parm_data_all *all)
2195 tree fntype ATTRIBUTE_UNUSED;
2197 memset (all, 0, sizeof (*all));
2199 fntype = TREE_TYPE (current_function_decl);
2201 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2202 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far, fntype, NULL_RTX);
2203 #else
2204 INIT_CUMULATIVE_ARGS (all->args_so_far, fntype, NULL_RTX,
2205 current_function_decl, -1);
2206 #endif
2208 #ifdef REG_PARM_STACK_SPACE
2209 all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
2210 #endif
2213 /* If ARGS contains entries with complex types, split the entry into two
2214 entries of the component type. Return a new list of substitutions are
2215 needed, else the old list. */
2217 static void
2218 split_complex_args (VEC(tree, heap) **args)
2220 unsigned i;
2221 tree p;
2223 FOR_EACH_VEC_ELT (tree, *args, i, p)
2225 tree type = TREE_TYPE (p);
2226 if (TREE_CODE (type) == COMPLEX_TYPE
2227 && targetm.calls.split_complex_arg (type))
2229 tree decl;
2230 tree subtype = TREE_TYPE (type);
2231 bool addressable = TREE_ADDRESSABLE (p);
2233 /* Rewrite the PARM_DECL's type with its component. */
2234 p = copy_node (p);
2235 TREE_TYPE (p) = subtype;
2236 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2237 DECL_MODE (p) = VOIDmode;
2238 DECL_SIZE (p) = NULL;
2239 DECL_SIZE_UNIT (p) = NULL;
2240 /* If this arg must go in memory, put it in a pseudo here.
2241 We can't allow it to go in memory as per normal parms,
2242 because the usual place might not have the imag part
2243 adjacent to the real part. */
2244 DECL_ARTIFICIAL (p) = addressable;
2245 DECL_IGNORED_P (p) = addressable;
2246 TREE_ADDRESSABLE (p) = 0;
2247 layout_decl (p, 0);
2248 VEC_replace (tree, *args, i, p);
2250 /* Build a second synthetic decl. */
2251 decl = build_decl (EXPR_LOCATION (p),
2252 PARM_DECL, NULL_TREE, subtype);
2253 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2254 DECL_ARTIFICIAL (decl) = addressable;
2255 DECL_IGNORED_P (decl) = addressable;
2256 layout_decl (decl, 0);
2257 VEC_safe_insert (tree, heap, *args, ++i, decl);
2262 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2263 the hidden struct return argument, and (abi willing) complex args.
2264 Return the new parameter list. */
2266 static VEC(tree, heap) *
2267 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2269 tree fndecl = current_function_decl;
2270 tree fntype = TREE_TYPE (fndecl);
2271 VEC(tree, heap) *fnargs = NULL;
2272 tree arg;
2274 for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg))
2275 VEC_safe_push (tree, heap, fnargs, arg);
2277 all->orig_fnargs = DECL_ARGUMENTS (fndecl);
2279 /* If struct value address is treated as the first argument, make it so. */
2280 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2281 && ! cfun->returns_pcc_struct
2282 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2284 tree type = build_pointer_type (TREE_TYPE (fntype));
2285 tree decl;
2287 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2288 PARM_DECL, get_identifier (".result_ptr"), type);
2289 DECL_ARG_TYPE (decl) = type;
2290 DECL_ARTIFICIAL (decl) = 1;
2291 DECL_NAMELESS (decl) = 1;
2292 TREE_CONSTANT (decl) = 1;
2294 DECL_CHAIN (decl) = all->orig_fnargs;
2295 all->orig_fnargs = decl;
2296 VEC_safe_insert (tree, heap, fnargs, 0, decl);
2298 all->function_result_decl = decl;
2301 /* If the target wants to split complex arguments into scalars, do so. */
2302 if (targetm.calls.split_complex_arg)
2303 split_complex_args (&fnargs);
2305 return fnargs;
2308 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2309 data for the parameter. Incorporate ABI specifics such as pass-by-
2310 reference and type promotion. */
2312 static void
2313 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2314 struct assign_parm_data_one *data)
2316 tree nominal_type, passed_type;
2317 enum machine_mode nominal_mode, passed_mode, promoted_mode;
2318 int unsignedp;
2320 memset (data, 0, sizeof (*data));
2322 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2323 if (!cfun->stdarg)
2324 data->named_arg = 1; /* No variadic parms. */
2325 else if (DECL_CHAIN (parm))
2326 data->named_arg = 1; /* Not the last non-variadic parm. */
2327 else if (targetm.calls.strict_argument_naming (&all->args_so_far))
2328 data->named_arg = 1; /* Only variadic ones are unnamed. */
2329 else
2330 data->named_arg = 0; /* Treat as variadic. */
2332 nominal_type = TREE_TYPE (parm);
2333 passed_type = DECL_ARG_TYPE (parm);
2335 /* Look out for errors propagating this far. Also, if the parameter's
2336 type is void then its value doesn't matter. */
2337 if (TREE_TYPE (parm) == error_mark_node
2338 /* This can happen after weird syntax errors
2339 or if an enum type is defined among the parms. */
2340 || TREE_CODE (parm) != PARM_DECL
2341 || passed_type == NULL
2342 || VOID_TYPE_P (nominal_type))
2344 nominal_type = passed_type = void_type_node;
2345 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2346 goto egress;
2349 /* Find mode of arg as it is passed, and mode of arg as it should be
2350 during execution of this function. */
2351 passed_mode = TYPE_MODE (passed_type);
2352 nominal_mode = TYPE_MODE (nominal_type);
2354 /* If the parm is to be passed as a transparent union or record, use the
2355 type of the first field for the tests below. We have already verified
2356 that the modes are the same. */
2357 if ((TREE_CODE (passed_type) == UNION_TYPE
2358 || TREE_CODE (passed_type) == RECORD_TYPE)
2359 && TYPE_TRANSPARENT_AGGR (passed_type))
2360 passed_type = TREE_TYPE (first_field (passed_type));
2362 /* See if this arg was passed by invisible reference. */
2363 if (pass_by_reference (&all->args_so_far, passed_mode,
2364 passed_type, data->named_arg))
2366 passed_type = nominal_type = build_pointer_type (passed_type);
2367 data->passed_pointer = true;
2368 passed_mode = nominal_mode = Pmode;
2371 /* Find mode as it is passed by the ABI. */
2372 unsignedp = TYPE_UNSIGNED (passed_type);
2373 promoted_mode = promote_function_mode (passed_type, passed_mode, &unsignedp,
2374 TREE_TYPE (current_function_decl), 0);
2376 egress:
2377 data->nominal_type = nominal_type;
2378 data->passed_type = passed_type;
2379 data->nominal_mode = nominal_mode;
2380 data->passed_mode = passed_mode;
2381 data->promoted_mode = promoted_mode;
2384 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2386 static void
2387 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2388 struct assign_parm_data_one *data, bool no_rtl)
2390 int varargs_pretend_bytes = 0;
2392 targetm.calls.setup_incoming_varargs (&all->args_so_far,
2393 data->promoted_mode,
2394 data->passed_type,
2395 &varargs_pretend_bytes, no_rtl);
2397 /* If the back-end has requested extra stack space, record how much is
2398 needed. Do not change pretend_args_size otherwise since it may be
2399 nonzero from an earlier partial argument. */
2400 if (varargs_pretend_bytes > 0)
2401 all->pretend_args_size = varargs_pretend_bytes;
2404 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2405 the incoming location of the current parameter. */
2407 static void
2408 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2409 struct assign_parm_data_one *data)
2411 HOST_WIDE_INT pretend_bytes = 0;
2412 rtx entry_parm;
2413 bool in_regs;
2415 if (data->promoted_mode == VOIDmode)
2417 data->entry_parm = data->stack_parm = const0_rtx;
2418 return;
2421 entry_parm = targetm.calls.function_incoming_arg (&all->args_so_far,
2422 data->promoted_mode,
2423 data->passed_type,
2424 data->named_arg);
2426 if (entry_parm == 0)
2427 data->promoted_mode = data->passed_mode;
2429 /* Determine parm's home in the stack, in case it arrives in the stack
2430 or we should pretend it did. Compute the stack position and rtx where
2431 the argument arrives and its size.
2433 There is one complexity here: If this was a parameter that would
2434 have been passed in registers, but wasn't only because it is
2435 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2436 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2437 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2438 as it was the previous time. */
2439 in_regs = entry_parm != 0;
2440 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2441 in_regs = true;
2442 #endif
2443 if (!in_regs && !data->named_arg)
2445 if (targetm.calls.pretend_outgoing_varargs_named (&all->args_so_far))
2447 rtx tem;
2448 tem = targetm.calls.function_incoming_arg (&all->args_so_far,
2449 data->promoted_mode,
2450 data->passed_type, true);
2451 in_regs = tem != NULL;
2455 /* If this parameter was passed both in registers and in the stack, use
2456 the copy on the stack. */
2457 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2458 data->passed_type))
2459 entry_parm = 0;
2461 if (entry_parm)
2463 int partial;
2465 partial = targetm.calls.arg_partial_bytes (&all->args_so_far,
2466 data->promoted_mode,
2467 data->passed_type,
2468 data->named_arg);
2469 data->partial = partial;
2471 /* The caller might already have allocated stack space for the
2472 register parameters. */
2473 if (partial != 0 && all->reg_parm_stack_space == 0)
2475 /* Part of this argument is passed in registers and part
2476 is passed on the stack. Ask the prologue code to extend
2477 the stack part so that we can recreate the full value.
2479 PRETEND_BYTES is the size of the registers we need to store.
2480 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2481 stack space that the prologue should allocate.
2483 Internally, gcc assumes that the argument pointer is aligned
2484 to STACK_BOUNDARY bits. This is used both for alignment
2485 optimizations (see init_emit) and to locate arguments that are
2486 aligned to more than PARM_BOUNDARY bits. We must preserve this
2487 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2488 a stack boundary. */
2490 /* We assume at most one partial arg, and it must be the first
2491 argument on the stack. */
2492 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2494 pretend_bytes = partial;
2495 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2497 /* We want to align relative to the actual stack pointer, so
2498 don't include this in the stack size until later. */
2499 all->extra_pretend_bytes = all->pretend_args_size;
2503 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2504 entry_parm ? data->partial : 0, current_function_decl,
2505 &all->stack_args_size, &data->locate);
2507 /* Update parm_stack_boundary if this parameter is passed in the
2508 stack. */
2509 if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
2510 crtl->parm_stack_boundary = data->locate.boundary;
2512 /* Adjust offsets to include the pretend args. */
2513 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2514 data->locate.slot_offset.constant += pretend_bytes;
2515 data->locate.offset.constant += pretend_bytes;
2517 data->entry_parm = entry_parm;
2520 /* A subroutine of assign_parms. If there is actually space on the stack
2521 for this parm, count it in stack_args_size and return true. */
2523 static bool
2524 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2525 struct assign_parm_data_one *data)
2527 /* Trivially true if we've no incoming register. */
2528 if (data->entry_parm == NULL)
2530 /* Also true if we're partially in registers and partially not,
2531 since we've arranged to drop the entire argument on the stack. */
2532 else if (data->partial != 0)
2534 /* Also true if the target says that it's passed in both registers
2535 and on the stack. */
2536 else if (GET_CODE (data->entry_parm) == PARALLEL
2537 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2539 /* Also true if the target says that there's stack allocated for
2540 all register parameters. */
2541 else if (all->reg_parm_stack_space > 0)
2543 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2544 else
2545 return false;
2547 all->stack_args_size.constant += data->locate.size.constant;
2548 if (data->locate.size.var)
2549 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2551 return true;
2554 /* A subroutine of assign_parms. Given that this parameter is allocated
2555 stack space by the ABI, find it. */
2557 static void
2558 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2560 rtx offset_rtx, stack_parm;
2561 unsigned int align, boundary;
2563 /* If we're passing this arg using a reg, make its stack home the
2564 aligned stack slot. */
2565 if (data->entry_parm)
2566 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2567 else
2568 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2570 stack_parm = crtl->args.internal_arg_pointer;
2571 if (offset_rtx != const0_rtx)
2572 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2573 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2575 if (!data->passed_pointer)
2577 set_mem_attributes (stack_parm, parm, 1);
2578 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2579 while promoted mode's size is needed. */
2580 if (data->promoted_mode != BLKmode
2581 && data->promoted_mode != DECL_MODE (parm))
2583 set_mem_size (stack_parm,
2584 GEN_INT (GET_MODE_SIZE (data->promoted_mode)));
2585 if (MEM_EXPR (stack_parm) && MEM_OFFSET (stack_parm))
2587 int offset = subreg_lowpart_offset (DECL_MODE (parm),
2588 data->promoted_mode);
2589 if (offset)
2590 set_mem_offset (stack_parm,
2591 plus_constant (MEM_OFFSET (stack_parm),
2592 -offset));
2597 boundary = data->locate.boundary;
2598 align = BITS_PER_UNIT;
2600 /* If we're padding upward, we know that the alignment of the slot
2601 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2602 intentionally forcing upward padding. Otherwise we have to come
2603 up with a guess at the alignment based on OFFSET_RTX. */
2604 if (data->locate.where_pad != downward || data->entry_parm)
2605 align = boundary;
2606 else if (CONST_INT_P (offset_rtx))
2608 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2609 align = align & -align;
2611 set_mem_align (stack_parm, align);
2613 if (data->entry_parm)
2614 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2616 data->stack_parm = stack_parm;
2619 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2620 always valid and contiguous. */
2622 static void
2623 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2625 rtx entry_parm = data->entry_parm;
2626 rtx stack_parm = data->stack_parm;
2628 /* If this parm was passed part in regs and part in memory, pretend it
2629 arrived entirely in memory by pushing the register-part onto the stack.
2630 In the special case of a DImode or DFmode that is split, we could put
2631 it together in a pseudoreg directly, but for now that's not worth
2632 bothering with. */
2633 if (data->partial != 0)
2635 /* Handle calls that pass values in multiple non-contiguous
2636 locations. The Irix 6 ABI has examples of this. */
2637 if (GET_CODE (entry_parm) == PARALLEL)
2638 emit_group_store (validize_mem (stack_parm), entry_parm,
2639 data->passed_type,
2640 int_size_in_bytes (data->passed_type));
2641 else
2643 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2644 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2645 data->partial / UNITS_PER_WORD);
2648 entry_parm = stack_parm;
2651 /* If we didn't decide this parm came in a register, by default it came
2652 on the stack. */
2653 else if (entry_parm == NULL)
2654 entry_parm = stack_parm;
2656 /* When an argument is passed in multiple locations, we can't make use
2657 of this information, but we can save some copying if the whole argument
2658 is passed in a single register. */
2659 else if (GET_CODE (entry_parm) == PARALLEL
2660 && data->nominal_mode != BLKmode
2661 && data->passed_mode != BLKmode)
2663 size_t i, len = XVECLEN (entry_parm, 0);
2665 for (i = 0; i < len; i++)
2666 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2667 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2668 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2669 == data->passed_mode)
2670 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2672 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2673 break;
2677 data->entry_parm = entry_parm;
2680 /* A subroutine of assign_parms. Reconstitute any values which were
2681 passed in multiple registers and would fit in a single register. */
2683 static void
2684 assign_parm_remove_parallels (struct assign_parm_data_one *data)
2686 rtx entry_parm = data->entry_parm;
2688 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2689 This can be done with register operations rather than on the
2690 stack, even if we will store the reconstituted parameter on the
2691 stack later. */
2692 if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
2694 rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
2695 emit_group_store (parmreg, entry_parm, data->passed_type,
2696 GET_MODE_SIZE (GET_MODE (entry_parm)));
2697 entry_parm = parmreg;
2700 data->entry_parm = entry_parm;
2703 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2704 always valid and properly aligned. */
2706 static void
2707 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2709 rtx stack_parm = data->stack_parm;
2711 /* If we can't trust the parm stack slot to be aligned enough for its
2712 ultimate type, don't use that slot after entry. We'll make another
2713 stack slot, if we need one. */
2714 if (stack_parm
2715 && ((STRICT_ALIGNMENT
2716 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2717 || (data->nominal_type
2718 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2719 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2720 stack_parm = NULL;
2722 /* If parm was passed in memory, and we need to convert it on entry,
2723 don't store it back in that same slot. */
2724 else if (data->entry_parm == stack_parm
2725 && data->nominal_mode != BLKmode
2726 && data->nominal_mode != data->passed_mode)
2727 stack_parm = NULL;
2729 /* If stack protection is in effect for this function, don't leave any
2730 pointers in their passed stack slots. */
2731 else if (crtl->stack_protect_guard
2732 && (flag_stack_protect == 2
2733 || data->passed_pointer
2734 || POINTER_TYPE_P (data->nominal_type)))
2735 stack_parm = NULL;
2737 data->stack_parm = stack_parm;
2740 /* A subroutine of assign_parms. Return true if the current parameter
2741 should be stored as a BLKmode in the current frame. */
2743 static bool
2744 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2746 if (data->nominal_mode == BLKmode)
2747 return true;
2748 if (GET_MODE (data->entry_parm) == BLKmode)
2749 return true;
2751 #ifdef BLOCK_REG_PADDING
2752 /* Only assign_parm_setup_block knows how to deal with register arguments
2753 that are padded at the least significant end. */
2754 if (REG_P (data->entry_parm)
2755 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2756 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2757 == (BYTES_BIG_ENDIAN ? upward : downward)))
2758 return true;
2759 #endif
2761 return false;
2764 /* A subroutine of assign_parms. Arrange for the parameter to be
2765 present and valid in DATA->STACK_RTL. */
2767 static void
2768 assign_parm_setup_block (struct assign_parm_data_all *all,
2769 tree parm, struct assign_parm_data_one *data)
2771 rtx entry_parm = data->entry_parm;
2772 rtx stack_parm = data->stack_parm;
2773 HOST_WIDE_INT size;
2774 HOST_WIDE_INT size_stored;
2776 if (GET_CODE (entry_parm) == PARALLEL)
2777 entry_parm = emit_group_move_into_temps (entry_parm);
2779 size = int_size_in_bytes (data->passed_type);
2780 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2781 if (stack_parm == 0)
2783 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
2784 stack_parm = assign_stack_local (BLKmode, size_stored,
2785 DECL_ALIGN (parm));
2786 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2787 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2788 set_mem_attributes (stack_parm, parm, 1);
2791 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2792 calls that pass values in multiple non-contiguous locations. */
2793 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2795 rtx mem;
2797 /* Note that we will be storing an integral number of words.
2798 So we have to be careful to ensure that we allocate an
2799 integral number of words. We do this above when we call
2800 assign_stack_local if space was not allocated in the argument
2801 list. If it was, this will not work if PARM_BOUNDARY is not
2802 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2803 if it becomes a problem. Exception is when BLKmode arrives
2804 with arguments not conforming to word_mode. */
2806 if (data->stack_parm == 0)
2808 else if (GET_CODE (entry_parm) == PARALLEL)
2810 else
2811 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2813 mem = validize_mem (stack_parm);
2815 /* Handle values in multiple non-contiguous locations. */
2816 if (GET_CODE (entry_parm) == PARALLEL)
2818 push_to_sequence2 (all->first_conversion_insn,
2819 all->last_conversion_insn);
2820 emit_group_store (mem, entry_parm, data->passed_type, size);
2821 all->first_conversion_insn = get_insns ();
2822 all->last_conversion_insn = get_last_insn ();
2823 end_sequence ();
2826 else if (size == 0)
2829 /* If SIZE is that of a mode no bigger than a word, just use
2830 that mode's store operation. */
2831 else if (size <= UNITS_PER_WORD)
2833 enum machine_mode mode
2834 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2836 if (mode != BLKmode
2837 #ifdef BLOCK_REG_PADDING
2838 && (size == UNITS_PER_WORD
2839 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2840 != (BYTES_BIG_ENDIAN ? upward : downward)))
2841 #endif
2844 rtx reg;
2846 /* We are really truncating a word_mode value containing
2847 SIZE bytes into a value of mode MODE. If such an
2848 operation requires no actual instructions, we can refer
2849 to the value directly in mode MODE, otherwise we must
2850 start with the register in word_mode and explicitly
2851 convert it. */
2852 if (TRULY_NOOP_TRUNCATION (size * BITS_PER_UNIT, BITS_PER_WORD))
2853 reg = gen_rtx_REG (mode, REGNO (entry_parm));
2854 else
2856 reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2857 reg = convert_to_mode (mode, copy_to_reg (reg), 1);
2859 emit_move_insn (change_address (mem, mode, 0), reg);
2862 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2863 machine must be aligned to the left before storing
2864 to memory. Note that the previous test doesn't
2865 handle all cases (e.g. SIZE == 3). */
2866 else if (size != UNITS_PER_WORD
2867 #ifdef BLOCK_REG_PADDING
2868 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2869 == downward)
2870 #else
2871 && BYTES_BIG_ENDIAN
2872 #endif
2875 rtx tem, x;
2876 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2877 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2879 x = expand_shift (LSHIFT_EXPR, word_mode, reg, by, NULL_RTX, 1);
2880 tem = change_address (mem, word_mode, 0);
2881 emit_move_insn (tem, x);
2883 else
2884 move_block_from_reg (REGNO (entry_parm), mem,
2885 size_stored / UNITS_PER_WORD);
2887 else
2888 move_block_from_reg (REGNO (entry_parm), mem,
2889 size_stored / UNITS_PER_WORD);
2891 else if (data->stack_parm == 0)
2893 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2894 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2895 BLOCK_OP_NORMAL);
2896 all->first_conversion_insn = get_insns ();
2897 all->last_conversion_insn = get_last_insn ();
2898 end_sequence ();
2901 data->stack_parm = stack_parm;
2902 SET_DECL_RTL (parm, stack_parm);
2905 /* A subroutine of assign_parm_setup_reg, called through note_stores.
2906 This collects sets and clobbers of hard registers in a HARD_REG_SET,
2907 which is pointed to by DATA. */
2908 static void
2909 record_hard_reg_sets (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
2911 HARD_REG_SET *pset = (HARD_REG_SET *)data;
2912 if (REG_P (x) && HARD_REGISTER_P (x))
2913 add_to_hard_reg_set (pset, GET_MODE (x), REGNO (x));
2916 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2917 parameter. Get it there. Perform all ABI specified conversions. */
2919 static void
2920 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2921 struct assign_parm_data_one *data)
2923 rtx parmreg, validated_mem;
2924 rtx equiv_stack_parm;
2925 enum machine_mode promoted_nominal_mode;
2926 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2927 bool did_conversion = false;
2928 bool need_conversion, moved;
2930 /* Store the parm in a pseudoregister during the function, but we may
2931 need to do it in a wider mode. Using 2 here makes the result
2932 consistent with promote_decl_mode and thus expand_expr_real_1. */
2933 promoted_nominal_mode
2934 = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp,
2935 TREE_TYPE (current_function_decl), 2);
2937 parmreg = gen_reg_rtx (promoted_nominal_mode);
2939 if (!DECL_ARTIFICIAL (parm))
2940 mark_user_reg (parmreg);
2942 /* If this was an item that we received a pointer to,
2943 set DECL_RTL appropriately. */
2944 if (data->passed_pointer)
2946 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2947 set_mem_attributes (x, parm, 1);
2948 SET_DECL_RTL (parm, x);
2950 else
2951 SET_DECL_RTL (parm, parmreg);
2953 assign_parm_remove_parallels (data);
2955 /* Copy the value into the register, thus bridging between
2956 assign_parm_find_data_types and expand_expr_real_1. */
2958 equiv_stack_parm = data->stack_parm;
2959 validated_mem = validize_mem (data->entry_parm);
2961 need_conversion = (data->nominal_mode != data->passed_mode
2962 || promoted_nominal_mode != data->promoted_mode);
2963 moved = false;
2965 if (need_conversion
2966 && GET_MODE_CLASS (data->nominal_mode) == MODE_INT
2967 && data->nominal_mode == data->passed_mode
2968 && data->nominal_mode == GET_MODE (data->entry_parm))
2970 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2971 mode, by the caller. We now have to convert it to
2972 NOMINAL_MODE, if different. However, PARMREG may be in
2973 a different mode than NOMINAL_MODE if it is being stored
2974 promoted.
2976 If ENTRY_PARM is a hard register, it might be in a register
2977 not valid for operating in its mode (e.g., an odd-numbered
2978 register for a DFmode). In that case, moves are the only
2979 thing valid, so we can't do a convert from there. This
2980 occurs when the calling sequence allow such misaligned
2981 usages.
2983 In addition, the conversion may involve a call, which could
2984 clobber parameters which haven't been copied to pseudo
2985 registers yet.
2987 First, we try to emit an insn which performs the necessary
2988 conversion. We verify that this insn does not clobber any
2989 hard registers. */
2991 enum insn_code icode;
2992 rtx op0, op1;
2994 icode = can_extend_p (promoted_nominal_mode, data->passed_mode,
2995 unsignedp);
2997 op0 = parmreg;
2998 op1 = validated_mem;
2999 if (icode != CODE_FOR_nothing
3000 && insn_operand_matches (icode, 0, op0)
3001 && insn_operand_matches (icode, 1, op1))
3003 enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
3004 rtx insn, insns;
3005 HARD_REG_SET hardregs;
3007 start_sequence ();
3008 insn = gen_extend_insn (op0, op1, promoted_nominal_mode,
3009 data->passed_mode, unsignedp);
3010 emit_insn (insn);
3011 insns = get_insns ();
3013 moved = true;
3014 CLEAR_HARD_REG_SET (hardregs);
3015 for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
3017 if (INSN_P (insn))
3018 note_stores (PATTERN (insn), record_hard_reg_sets,
3019 &hardregs);
3020 if (!hard_reg_set_empty_p (hardregs))
3021 moved = false;
3024 end_sequence ();
3026 if (moved)
3028 emit_insn (insns);
3029 if (equiv_stack_parm != NULL_RTX)
3030 equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg),
3031 equiv_stack_parm);
3036 if (moved)
3037 /* Nothing to do. */
3039 else if (need_conversion)
3041 /* We did not have an insn to convert directly, or the sequence
3042 generated appeared unsafe. We must first copy the parm to a
3043 pseudo reg, and save the conversion until after all
3044 parameters have been moved. */
3046 int save_tree_used;
3047 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3049 emit_move_insn (tempreg, validated_mem);
3051 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3052 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
3054 if (GET_CODE (tempreg) == SUBREG
3055 && GET_MODE (tempreg) == data->nominal_mode
3056 && REG_P (SUBREG_REG (tempreg))
3057 && data->nominal_mode == data->passed_mode
3058 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
3059 && GET_MODE_SIZE (GET_MODE (tempreg))
3060 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
3062 /* The argument is already sign/zero extended, so note it
3063 into the subreg. */
3064 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
3065 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
3068 /* TREE_USED gets set erroneously during expand_assignment. */
3069 save_tree_used = TREE_USED (parm);
3070 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
3071 TREE_USED (parm) = save_tree_used;
3072 all->first_conversion_insn = get_insns ();
3073 all->last_conversion_insn = get_last_insn ();
3074 end_sequence ();
3076 did_conversion = true;
3078 else
3079 emit_move_insn (parmreg, validated_mem);
3081 /* If we were passed a pointer but the actual value can safely live
3082 in a register, put it in one. */
3083 if (data->passed_pointer
3084 && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
3085 /* If by-reference argument was promoted, demote it. */
3086 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
3087 || use_register_for_decl (parm)))
3089 /* We can't use nominal_mode, because it will have been set to
3090 Pmode above. We must use the actual mode of the parm. */
3091 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3092 mark_user_reg (parmreg);
3094 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
3096 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
3097 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
3099 push_to_sequence2 (all->first_conversion_insn,
3100 all->last_conversion_insn);
3101 emit_move_insn (tempreg, DECL_RTL (parm));
3102 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
3103 emit_move_insn (parmreg, tempreg);
3104 all->first_conversion_insn = get_insns ();
3105 all->last_conversion_insn = get_last_insn ();
3106 end_sequence ();
3108 did_conversion = true;
3110 else
3111 emit_move_insn (parmreg, DECL_RTL (parm));
3113 SET_DECL_RTL (parm, parmreg);
3115 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3116 now the parm. */
3117 data->stack_parm = NULL;
3120 /* Mark the register as eliminable if we did no conversion and it was
3121 copied from memory at a fixed offset, and the arg pointer was not
3122 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3123 offset formed an invalid address, such memory-equivalences as we
3124 make here would screw up life analysis for it. */
3125 if (data->nominal_mode == data->passed_mode
3126 && !did_conversion
3127 && data->stack_parm != 0
3128 && MEM_P (data->stack_parm)
3129 && data->locate.offset.var == 0
3130 && reg_mentioned_p (virtual_incoming_args_rtx,
3131 XEXP (data->stack_parm, 0)))
3133 rtx linsn = get_last_insn ();
3134 rtx sinsn, set;
3136 /* Mark complex types separately. */
3137 if (GET_CODE (parmreg) == CONCAT)
3139 enum machine_mode submode
3140 = GET_MODE_INNER (GET_MODE (parmreg));
3141 int regnor = REGNO (XEXP (parmreg, 0));
3142 int regnoi = REGNO (XEXP (parmreg, 1));
3143 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
3144 rtx stacki = adjust_address_nv (data->stack_parm, submode,
3145 GET_MODE_SIZE (submode));
3147 /* Scan backwards for the set of the real and
3148 imaginary parts. */
3149 for (sinsn = linsn; sinsn != 0;
3150 sinsn = prev_nonnote_insn (sinsn))
3152 set = single_set (sinsn);
3153 if (set == 0)
3154 continue;
3156 if (SET_DEST (set) == regno_reg_rtx [regnoi])
3157 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
3158 else if (SET_DEST (set) == regno_reg_rtx [regnor])
3159 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
3162 else if ((set = single_set (linsn)) != 0
3163 && SET_DEST (set) == parmreg)
3164 set_unique_reg_note (linsn, REG_EQUIV, equiv_stack_parm);
3167 /* For pointer data type, suggest pointer register. */
3168 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3169 mark_reg_pointer (parmreg,
3170 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
3173 /* A subroutine of assign_parms. Allocate stack space to hold the current
3174 parameter. Get it there. Perform all ABI specified conversions. */
3176 static void
3177 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
3178 struct assign_parm_data_one *data)
3180 /* Value must be stored in the stack slot STACK_PARM during function
3181 execution. */
3182 bool to_conversion = false;
3184 assign_parm_remove_parallels (data);
3186 if (data->promoted_mode != data->nominal_mode)
3188 /* Conversion is required. */
3189 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3191 emit_move_insn (tempreg, validize_mem (data->entry_parm));
3193 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3194 to_conversion = true;
3196 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
3197 TYPE_UNSIGNED (TREE_TYPE (parm)));
3199 if (data->stack_parm)
3201 int offset = subreg_lowpart_offset (data->nominal_mode,
3202 GET_MODE (data->stack_parm));
3203 /* ??? This may need a big-endian conversion on sparc64. */
3204 data->stack_parm
3205 = adjust_address (data->stack_parm, data->nominal_mode, 0);
3206 if (offset && MEM_OFFSET (data->stack_parm))
3207 set_mem_offset (data->stack_parm,
3208 plus_constant (MEM_OFFSET (data->stack_parm),
3209 offset));
3213 if (data->entry_parm != data->stack_parm)
3215 rtx src, dest;
3217 if (data->stack_parm == 0)
3219 int align = STACK_SLOT_ALIGNMENT (data->passed_type,
3220 GET_MODE (data->entry_parm),
3221 TYPE_ALIGN (data->passed_type));
3222 data->stack_parm
3223 = assign_stack_local (GET_MODE (data->entry_parm),
3224 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
3225 align);
3226 set_mem_attributes (data->stack_parm, parm, 1);
3229 dest = validize_mem (data->stack_parm);
3230 src = validize_mem (data->entry_parm);
3232 if (MEM_P (src))
3234 /* Use a block move to handle potentially misaligned entry_parm. */
3235 if (!to_conversion)
3236 push_to_sequence2 (all->first_conversion_insn,
3237 all->last_conversion_insn);
3238 to_conversion = true;
3240 emit_block_move (dest, src,
3241 GEN_INT (int_size_in_bytes (data->passed_type)),
3242 BLOCK_OP_NORMAL);
3244 else
3245 emit_move_insn (dest, src);
3248 if (to_conversion)
3250 all->first_conversion_insn = get_insns ();
3251 all->last_conversion_insn = get_last_insn ();
3252 end_sequence ();
3255 SET_DECL_RTL (parm, data->stack_parm);
3258 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
3259 undo the frobbing that we did in assign_parms_augmented_arg_list. */
3261 static void
3262 assign_parms_unsplit_complex (struct assign_parm_data_all *all,
3263 VEC(tree, heap) *fnargs)
3265 tree parm;
3266 tree orig_fnargs = all->orig_fnargs;
3267 unsigned i = 0;
3269 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i)
3271 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
3272 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
3274 rtx tmp, real, imag;
3275 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
3277 real = DECL_RTL (VEC_index (tree, fnargs, i));
3278 imag = DECL_RTL (VEC_index (tree, fnargs, i + 1));
3279 if (inner != GET_MODE (real))
3281 real = gen_lowpart_SUBREG (inner, real);
3282 imag = gen_lowpart_SUBREG (inner, imag);
3285 if (TREE_ADDRESSABLE (parm))
3287 rtx rmem, imem;
3288 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
3289 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3290 DECL_MODE (parm),
3291 TYPE_ALIGN (TREE_TYPE (parm)));
3293 /* split_complex_arg put the real and imag parts in
3294 pseudos. Move them to memory. */
3295 tmp = assign_stack_local (DECL_MODE (parm), size, align);
3296 set_mem_attributes (tmp, parm, 1);
3297 rmem = adjust_address_nv (tmp, inner, 0);
3298 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
3299 push_to_sequence2 (all->first_conversion_insn,
3300 all->last_conversion_insn);
3301 emit_move_insn (rmem, real);
3302 emit_move_insn (imem, imag);
3303 all->first_conversion_insn = get_insns ();
3304 all->last_conversion_insn = get_last_insn ();
3305 end_sequence ();
3307 else
3308 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3309 SET_DECL_RTL (parm, tmp);
3311 real = DECL_INCOMING_RTL (VEC_index (tree, fnargs, i));
3312 imag = DECL_INCOMING_RTL (VEC_index (tree, fnargs, i + 1));
3313 if (inner != GET_MODE (real))
3315 real = gen_lowpart_SUBREG (inner, real);
3316 imag = gen_lowpart_SUBREG (inner, imag);
3318 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3319 set_decl_incoming_rtl (parm, tmp, false);
3320 i++;
3325 /* Assign RTL expressions to the function's parameters. This may involve
3326 copying them into registers and using those registers as the DECL_RTL. */
3328 static void
3329 assign_parms (tree fndecl)
3331 struct assign_parm_data_all all;
3332 tree parm;
3333 VEC(tree, heap) *fnargs;
3334 unsigned i;
3336 crtl->args.internal_arg_pointer
3337 = targetm.calls.internal_arg_pointer ();
3339 assign_parms_initialize_all (&all);
3340 fnargs = assign_parms_augmented_arg_list (&all);
3342 FOR_EACH_VEC_ELT (tree, fnargs, i, parm)
3344 struct assign_parm_data_one data;
3346 /* Extract the type of PARM; adjust it according to ABI. */
3347 assign_parm_find_data_types (&all, parm, &data);
3349 /* Early out for errors and void parameters. */
3350 if (data.passed_mode == VOIDmode)
3352 SET_DECL_RTL (parm, const0_rtx);
3353 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3354 continue;
3357 /* Estimate stack alignment from parameter alignment. */
3358 if (SUPPORTS_STACK_ALIGNMENT)
3360 unsigned int align
3361 = targetm.calls.function_arg_boundary (data.promoted_mode,
3362 data.passed_type);
3363 align = MINIMUM_ALIGNMENT (data.passed_type, data.promoted_mode,
3364 align);
3365 if (TYPE_ALIGN (data.nominal_type) > align)
3366 align = MINIMUM_ALIGNMENT (data.nominal_type,
3367 TYPE_MODE (data.nominal_type),
3368 TYPE_ALIGN (data.nominal_type));
3369 if (crtl->stack_alignment_estimated < align)
3371 gcc_assert (!crtl->stack_realign_processed);
3372 crtl->stack_alignment_estimated = align;
3376 if (cfun->stdarg && !DECL_CHAIN (parm))
3377 assign_parms_setup_varargs (&all, &data, false);
3379 /* Find out where the parameter arrives in this function. */
3380 assign_parm_find_entry_rtl (&all, &data);
3382 /* Find out where stack space for this parameter might be. */
3383 if (assign_parm_is_stack_parm (&all, &data))
3385 assign_parm_find_stack_rtl (parm, &data);
3386 assign_parm_adjust_entry_rtl (&data);
3389 /* Record permanently how this parm was passed. */
3390 if (data.passed_pointer)
3392 rtx incoming_rtl
3393 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.passed_type)),
3394 data.entry_parm);
3395 set_decl_incoming_rtl (parm, incoming_rtl, true);
3397 else
3398 set_decl_incoming_rtl (parm, data.entry_parm, false);
3400 /* Update info on where next arg arrives in registers. */
3401 targetm.calls.function_arg_advance (&all.args_so_far, data.promoted_mode,
3402 data.passed_type, data.named_arg);
3404 assign_parm_adjust_stack_rtl (&data);
3406 if (assign_parm_setup_block_p (&data))
3407 assign_parm_setup_block (&all, parm, &data);
3408 else if (data.passed_pointer || use_register_for_decl (parm))
3409 assign_parm_setup_reg (&all, parm, &data);
3410 else
3411 assign_parm_setup_stack (&all, parm, &data);
3414 if (targetm.calls.split_complex_arg)
3415 assign_parms_unsplit_complex (&all, fnargs);
3417 VEC_free (tree, heap, fnargs);
3419 /* Output all parameter conversion instructions (possibly including calls)
3420 now that all parameters have been copied out of hard registers. */
3421 emit_insn (all.first_conversion_insn);
3423 /* Estimate reload stack alignment from scalar return mode. */
3424 if (SUPPORTS_STACK_ALIGNMENT)
3426 if (DECL_RESULT (fndecl))
3428 tree type = TREE_TYPE (DECL_RESULT (fndecl));
3429 enum machine_mode mode = TYPE_MODE (type);
3431 if (mode != BLKmode
3432 && mode != VOIDmode
3433 && !AGGREGATE_TYPE_P (type))
3435 unsigned int align = GET_MODE_ALIGNMENT (mode);
3436 if (crtl->stack_alignment_estimated < align)
3438 gcc_assert (!crtl->stack_realign_processed);
3439 crtl->stack_alignment_estimated = align;
3445 /* If we are receiving a struct value address as the first argument, set up
3446 the RTL for the function result. As this might require code to convert
3447 the transmitted address to Pmode, we do this here to ensure that possible
3448 preliminary conversions of the address have been emitted already. */
3449 if (all.function_result_decl)
3451 tree result = DECL_RESULT (current_function_decl);
3452 rtx addr = DECL_RTL (all.function_result_decl);
3453 rtx x;
3455 if (DECL_BY_REFERENCE (result))
3457 SET_DECL_VALUE_EXPR (result, all.function_result_decl);
3458 x = addr;
3460 else
3462 SET_DECL_VALUE_EXPR (result,
3463 build1 (INDIRECT_REF, TREE_TYPE (result),
3464 all.function_result_decl));
3465 addr = convert_memory_address (Pmode, addr);
3466 x = gen_rtx_MEM (DECL_MODE (result), addr);
3467 set_mem_attributes (x, result, 1);
3470 DECL_HAS_VALUE_EXPR_P (result) = 1;
3472 SET_DECL_RTL (result, x);
3475 /* We have aligned all the args, so add space for the pretend args. */
3476 crtl->args.pretend_args_size = all.pretend_args_size;
3477 all.stack_args_size.constant += all.extra_pretend_bytes;
3478 crtl->args.size = all.stack_args_size.constant;
3480 /* Adjust function incoming argument size for alignment and
3481 minimum length. */
3483 #ifdef REG_PARM_STACK_SPACE
3484 crtl->args.size = MAX (crtl->args.size,
3485 REG_PARM_STACK_SPACE (fndecl));
3486 #endif
3488 crtl->args.size = CEIL_ROUND (crtl->args.size,
3489 PARM_BOUNDARY / BITS_PER_UNIT);
3491 #ifdef ARGS_GROW_DOWNWARD
3492 crtl->args.arg_offset_rtx
3493 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3494 : expand_expr (size_diffop (all.stack_args_size.var,
3495 size_int (-all.stack_args_size.constant)),
3496 NULL_RTX, VOIDmode, EXPAND_NORMAL));
3497 #else
3498 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3499 #endif
3501 /* See how many bytes, if any, of its args a function should try to pop
3502 on return. */
3504 crtl->args.pops_args = targetm.calls.return_pops_args (fndecl,
3505 TREE_TYPE (fndecl),
3506 crtl->args.size);
3508 /* For stdarg.h function, save info about
3509 regs and stack space used by the named args. */
3511 crtl->args.info = all.args_so_far;
3513 /* Set the rtx used for the function return value. Put this in its
3514 own variable so any optimizers that need this information don't have
3515 to include tree.h. Do this here so it gets done when an inlined
3516 function gets output. */
3518 crtl->return_rtx
3519 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3520 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3522 /* If scalar return value was computed in a pseudo-reg, or was a named
3523 return value that got dumped to the stack, copy that to the hard
3524 return register. */
3525 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3527 tree decl_result = DECL_RESULT (fndecl);
3528 rtx decl_rtl = DECL_RTL (decl_result);
3530 if (REG_P (decl_rtl)
3531 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3532 : DECL_REGISTER (decl_result))
3534 rtx real_decl_rtl;
3536 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3537 fndecl, true);
3538 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3539 /* The delay slot scheduler assumes that crtl->return_rtx
3540 holds the hard register containing the return value, not a
3541 temporary pseudo. */
3542 crtl->return_rtx = real_decl_rtl;
3547 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3548 For all seen types, gimplify their sizes. */
3550 static tree
3551 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3553 tree t = *tp;
3555 *walk_subtrees = 0;
3556 if (TYPE_P (t))
3558 if (POINTER_TYPE_P (t))
3559 *walk_subtrees = 1;
3560 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3561 && !TYPE_SIZES_GIMPLIFIED (t))
3563 gimplify_type_sizes (t, (gimple_seq *) data);
3564 *walk_subtrees = 1;
3568 return NULL;
3571 /* Gimplify the parameter list for current_function_decl. This involves
3572 evaluating SAVE_EXPRs of variable sized parameters and generating code
3573 to implement callee-copies reference parameters. Returns a sequence of
3574 statements to add to the beginning of the function. */
3576 gimple_seq
3577 gimplify_parameters (void)
3579 struct assign_parm_data_all all;
3580 tree parm;
3581 gimple_seq stmts = NULL;
3582 VEC(tree, heap) *fnargs;
3583 unsigned i;
3585 assign_parms_initialize_all (&all);
3586 fnargs = assign_parms_augmented_arg_list (&all);
3588 FOR_EACH_VEC_ELT (tree, fnargs, i, parm)
3590 struct assign_parm_data_one data;
3592 /* Extract the type of PARM; adjust it according to ABI. */
3593 assign_parm_find_data_types (&all, parm, &data);
3595 /* Early out for errors and void parameters. */
3596 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3597 continue;
3599 /* Update info on where next arg arrives in registers. */
3600 targetm.calls.function_arg_advance (&all.args_so_far, data.promoted_mode,
3601 data.passed_type, data.named_arg);
3603 /* ??? Once upon a time variable_size stuffed parameter list
3604 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3605 turned out to be less than manageable in the gimple world.
3606 Now we have to hunt them down ourselves. */
3607 walk_tree_without_duplicates (&data.passed_type,
3608 gimplify_parm_type, &stmts);
3610 if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
3612 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3613 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3616 if (data.passed_pointer)
3618 tree type = TREE_TYPE (data.passed_type);
3619 if (reference_callee_copied (&all.args_so_far, TYPE_MODE (type),
3620 type, data.named_arg))
3622 tree local, t;
3624 /* For constant-sized objects, this is trivial; for
3625 variable-sized objects, we have to play games. */
3626 if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
3627 && !(flag_stack_check == GENERIC_STACK_CHECK
3628 && compare_tree_int (DECL_SIZE_UNIT (parm),
3629 STACK_CHECK_MAX_VAR_SIZE) > 0))
3631 local = create_tmp_reg (type, get_name (parm));
3632 DECL_IGNORED_P (local) = 0;
3633 /* If PARM was addressable, move that flag over
3634 to the local copy, as its address will be taken,
3635 not the PARMs. Keep the parms address taken
3636 as we'll query that flag during gimplification. */
3637 if (TREE_ADDRESSABLE (parm))
3638 TREE_ADDRESSABLE (local) = 1;
3640 else
3642 tree ptr_type, addr;
3644 ptr_type = build_pointer_type (type);
3645 addr = create_tmp_reg (ptr_type, get_name (parm));
3646 DECL_IGNORED_P (addr) = 0;
3647 local = build_fold_indirect_ref (addr);
3649 t = built_in_decls[BUILT_IN_ALLOCA];
3650 t = build_call_expr (t, 1, DECL_SIZE_UNIT (parm));
3651 /* The call has been built for a variable-sized object. */
3652 CALL_ALLOCA_FOR_VAR_P (t) = 1;
3653 t = fold_convert (ptr_type, t);
3654 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
3655 gimplify_and_add (t, &stmts);
3658 gimplify_assign (local, parm, &stmts);
3660 SET_DECL_VALUE_EXPR (parm, local);
3661 DECL_HAS_VALUE_EXPR_P (parm) = 1;
3666 VEC_free (tree, heap, fnargs);
3668 return stmts;
3671 /* Compute the size and offset from the start of the stacked arguments for a
3672 parm passed in mode PASSED_MODE and with type TYPE.
3674 INITIAL_OFFSET_PTR points to the current offset into the stacked
3675 arguments.
3677 The starting offset and size for this parm are returned in
3678 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3679 nonzero, the offset is that of stack slot, which is returned in
3680 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3681 padding required from the initial offset ptr to the stack slot.
3683 IN_REGS is nonzero if the argument will be passed in registers. It will
3684 never be set if REG_PARM_STACK_SPACE is not defined.
3686 FNDECL is the function in which the argument was defined.
3688 There are two types of rounding that are done. The first, controlled by
3689 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
3690 argument list to be aligned to the specific boundary (in bits). This
3691 rounding affects the initial and starting offsets, but not the argument
3692 size.
3694 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3695 optionally rounds the size of the parm to PARM_BOUNDARY. The
3696 initial offset is not affected by this rounding, while the size always
3697 is and the starting offset may be. */
3699 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3700 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3701 callers pass in the total size of args so far as
3702 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3704 void
3705 locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3706 int partial, tree fndecl ATTRIBUTE_UNUSED,
3707 struct args_size *initial_offset_ptr,
3708 struct locate_and_pad_arg_data *locate)
3710 tree sizetree;
3711 enum direction where_pad;
3712 unsigned int boundary;
3713 int reg_parm_stack_space = 0;
3714 int part_size_in_regs;
3716 #ifdef REG_PARM_STACK_SPACE
3717 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3719 /* If we have found a stack parm before we reach the end of the
3720 area reserved for registers, skip that area. */
3721 if (! in_regs)
3723 if (reg_parm_stack_space > 0)
3725 if (initial_offset_ptr->var)
3727 initial_offset_ptr->var
3728 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3729 ssize_int (reg_parm_stack_space));
3730 initial_offset_ptr->constant = 0;
3732 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3733 initial_offset_ptr->constant = reg_parm_stack_space;
3736 #endif /* REG_PARM_STACK_SPACE */
3738 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
3740 sizetree
3741 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3742 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3743 boundary = targetm.calls.function_arg_boundary (passed_mode, type);
3744 locate->where_pad = where_pad;
3746 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
3747 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
3748 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
3750 locate->boundary = boundary;
3752 if (SUPPORTS_STACK_ALIGNMENT)
3754 /* stack_alignment_estimated can't change after stack has been
3755 realigned. */
3756 if (crtl->stack_alignment_estimated < boundary)
3758 if (!crtl->stack_realign_processed)
3759 crtl->stack_alignment_estimated = boundary;
3760 else
3762 /* If stack is realigned and stack alignment value
3763 hasn't been finalized, it is OK not to increase
3764 stack_alignment_estimated. The bigger alignment
3765 requirement is recorded in stack_alignment_needed
3766 below. */
3767 gcc_assert (!crtl->stack_realign_finalized
3768 && crtl->stack_realign_needed);
3773 /* Remember if the outgoing parameter requires extra alignment on the
3774 calling function side. */
3775 if (crtl->stack_alignment_needed < boundary)
3776 crtl->stack_alignment_needed = boundary;
3777 if (crtl->preferred_stack_boundary < boundary)
3778 crtl->preferred_stack_boundary = boundary;
3780 #ifdef ARGS_GROW_DOWNWARD
3781 locate->slot_offset.constant = -initial_offset_ptr->constant;
3782 if (initial_offset_ptr->var)
3783 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3784 initial_offset_ptr->var);
3787 tree s2 = sizetree;
3788 if (where_pad != none
3789 && (!host_integerp (sizetree, 1)
3790 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3791 s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
3792 SUB_PARM_SIZE (locate->slot_offset, s2);
3795 locate->slot_offset.constant += part_size_in_regs;
3797 if (!in_regs
3798 #ifdef REG_PARM_STACK_SPACE
3799 || REG_PARM_STACK_SPACE (fndecl) > 0
3800 #endif
3802 pad_to_arg_alignment (&locate->slot_offset, boundary,
3803 &locate->alignment_pad);
3805 locate->size.constant = (-initial_offset_ptr->constant
3806 - locate->slot_offset.constant);
3807 if (initial_offset_ptr->var)
3808 locate->size.var = size_binop (MINUS_EXPR,
3809 size_binop (MINUS_EXPR,
3810 ssize_int (0),
3811 initial_offset_ptr->var),
3812 locate->slot_offset.var);
3814 /* Pad_below needs the pre-rounded size to know how much to pad
3815 below. */
3816 locate->offset = locate->slot_offset;
3817 if (where_pad == downward)
3818 pad_below (&locate->offset, passed_mode, sizetree);
3820 #else /* !ARGS_GROW_DOWNWARD */
3821 if (!in_regs
3822 #ifdef REG_PARM_STACK_SPACE
3823 || REG_PARM_STACK_SPACE (fndecl) > 0
3824 #endif
3826 pad_to_arg_alignment (initial_offset_ptr, boundary,
3827 &locate->alignment_pad);
3828 locate->slot_offset = *initial_offset_ptr;
3830 #ifdef PUSH_ROUNDING
3831 if (passed_mode != BLKmode)
3832 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3833 #endif
3835 /* Pad_below needs the pre-rounded size to know how much to pad below
3836 so this must be done before rounding up. */
3837 locate->offset = locate->slot_offset;
3838 if (where_pad == downward)
3839 pad_below (&locate->offset, passed_mode, sizetree);
3841 if (where_pad != none
3842 && (!host_integerp (sizetree, 1)
3843 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3844 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3846 ADD_PARM_SIZE (locate->size, sizetree);
3848 locate->size.constant -= part_size_in_regs;
3849 #endif /* ARGS_GROW_DOWNWARD */
3851 #ifdef FUNCTION_ARG_OFFSET
3852 locate->offset.constant += FUNCTION_ARG_OFFSET (passed_mode, type);
3853 #endif
3856 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3857 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3859 static void
3860 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3861 struct args_size *alignment_pad)
3863 tree save_var = NULL_TREE;
3864 HOST_WIDE_INT save_constant = 0;
3865 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3866 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3868 #ifdef SPARC_STACK_BOUNDARY_HACK
3869 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3870 the real alignment of %sp. However, when it does this, the
3871 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
3872 if (SPARC_STACK_BOUNDARY_HACK)
3873 sp_offset = 0;
3874 #endif
3876 if (boundary > PARM_BOUNDARY)
3878 save_var = offset_ptr->var;
3879 save_constant = offset_ptr->constant;
3882 alignment_pad->var = NULL_TREE;
3883 alignment_pad->constant = 0;
3885 if (boundary > BITS_PER_UNIT)
3887 if (offset_ptr->var)
3889 tree sp_offset_tree = ssize_int (sp_offset);
3890 tree offset = size_binop (PLUS_EXPR,
3891 ARGS_SIZE_TREE (*offset_ptr),
3892 sp_offset_tree);
3893 #ifdef ARGS_GROW_DOWNWARD
3894 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
3895 #else
3896 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
3897 #endif
3899 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
3900 /* ARGS_SIZE_TREE includes constant term. */
3901 offset_ptr->constant = 0;
3902 if (boundary > PARM_BOUNDARY)
3903 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
3904 save_var);
3906 else
3908 offset_ptr->constant = -sp_offset +
3909 #ifdef ARGS_GROW_DOWNWARD
3910 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3911 #else
3912 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3913 #endif
3914 if (boundary > PARM_BOUNDARY)
3915 alignment_pad->constant = offset_ptr->constant - save_constant;
3920 static void
3921 pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
3923 if (passed_mode != BLKmode)
3925 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3926 offset_ptr->constant
3927 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3928 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3929 - GET_MODE_SIZE (passed_mode));
3931 else
3933 if (TREE_CODE (sizetree) != INTEGER_CST
3934 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3936 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3937 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3938 /* Add it in. */
3939 ADD_PARM_SIZE (*offset_ptr, s2);
3940 SUB_PARM_SIZE (*offset_ptr, sizetree);
3946 /* True if register REGNO was alive at a place where `setjmp' was
3947 called and was set more than once or is an argument. Such regs may
3948 be clobbered by `longjmp'. */
3950 static bool
3951 regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
3953 /* There appear to be cases where some local vars never reach the
3954 backend but have bogus regnos. */
3955 if (regno >= max_reg_num ())
3956 return false;
3958 return ((REG_N_SETS (regno) > 1
3959 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR), regno))
3960 && REGNO_REG_SET_P (setjmp_crosses, regno));
3963 /* Walk the tree of blocks describing the binding levels within a
3964 function and warn about variables the might be killed by setjmp or
3965 vfork. This is done after calling flow_analysis before register
3966 allocation since that will clobber the pseudo-regs to hard
3967 regs. */
3969 static void
3970 setjmp_vars_warning (bitmap setjmp_crosses, tree block)
3972 tree decl, sub;
3974 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
3976 if (TREE_CODE (decl) == VAR_DECL
3977 && DECL_RTL_SET_P (decl)
3978 && REG_P (DECL_RTL (decl))
3979 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
3980 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
3981 " %<longjmp%> or %<vfork%>", decl);
3984 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
3985 setjmp_vars_warning (setjmp_crosses, sub);
3988 /* Do the appropriate part of setjmp_vars_warning
3989 but for arguments instead of local variables. */
3991 static void
3992 setjmp_args_warning (bitmap setjmp_crosses)
3994 tree decl;
3995 for (decl = DECL_ARGUMENTS (current_function_decl);
3996 decl; decl = DECL_CHAIN (decl))
3997 if (DECL_RTL (decl) != 0
3998 && REG_P (DECL_RTL (decl))
3999 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4000 warning (OPT_Wclobbered,
4001 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
4002 decl);
4005 /* Generate warning messages for variables live across setjmp. */
4007 void
4008 generate_setjmp_warnings (void)
4010 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
4012 if (n_basic_blocks == NUM_FIXED_BLOCKS
4013 || bitmap_empty_p (setjmp_crosses))
4014 return;
4016 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
4017 setjmp_args_warning (setjmp_crosses);
4021 /* Reverse the order of elements in the fragment chain T of blocks,
4022 and return the new head of the chain (old last element). */
4024 static tree
4025 block_fragments_nreverse (tree t)
4027 tree prev = 0, block, next;
4028 for (block = t; block; block = next)
4030 next = BLOCK_FRAGMENT_CHAIN (block);
4031 BLOCK_FRAGMENT_CHAIN (block) = prev;
4032 prev = block;
4034 return prev;
4037 /* Reverse the order of elements in the chain T of blocks,
4038 and return the new head of the chain (old last element).
4039 Also do the same on subblocks and reverse the order of elements
4040 in BLOCK_FRAGMENT_CHAIN as well. */
4042 static tree
4043 blocks_nreverse_all (tree t)
4045 tree prev = 0, block, next;
4046 for (block = t; block; block = next)
4048 next = BLOCK_CHAIN (block);
4049 BLOCK_CHAIN (block) = prev;
4050 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4051 if (BLOCK_FRAGMENT_CHAIN (block)
4052 && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE)
4053 BLOCK_FRAGMENT_CHAIN (block)
4054 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block));
4055 prev = block;
4057 return prev;
4061 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4062 and create duplicate blocks. */
4063 /* ??? Need an option to either create block fragments or to create
4064 abstract origin duplicates of a source block. It really depends
4065 on what optimization has been performed. */
4067 void
4068 reorder_blocks (void)
4070 tree block = DECL_INITIAL (current_function_decl);
4071 VEC(tree,heap) *block_stack;
4073 if (block == NULL_TREE)
4074 return;
4076 block_stack = VEC_alloc (tree, heap, 10);
4078 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
4079 clear_block_marks (block);
4081 /* Prune the old trees away, so that they don't get in the way. */
4082 BLOCK_SUBBLOCKS (block) = NULL_TREE;
4083 BLOCK_CHAIN (block) = NULL_TREE;
4085 /* Recreate the block tree from the note nesting. */
4086 reorder_blocks_1 (get_insns (), block, &block_stack);
4087 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4089 VEC_free (tree, heap, block_stack);
4092 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
4094 void
4095 clear_block_marks (tree block)
4097 while (block)
4099 TREE_ASM_WRITTEN (block) = 0;
4100 clear_block_marks (BLOCK_SUBBLOCKS (block));
4101 block = BLOCK_CHAIN (block);
4105 static void
4106 reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack)
4108 rtx insn;
4110 for (insn = insns; insn; insn = NEXT_INSN (insn))
4112 if (NOTE_P (insn))
4114 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
4116 tree block = NOTE_BLOCK (insn);
4117 tree origin;
4119 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE);
4120 origin = block;
4122 /* If we have seen this block before, that means it now
4123 spans multiple address regions. Create a new fragment. */
4124 if (TREE_ASM_WRITTEN (block))
4126 tree new_block = copy_node (block);
4128 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
4129 BLOCK_FRAGMENT_CHAIN (new_block)
4130 = BLOCK_FRAGMENT_CHAIN (origin);
4131 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
4133 NOTE_BLOCK (insn) = new_block;
4134 block = new_block;
4137 BLOCK_SUBBLOCKS (block) = 0;
4138 TREE_ASM_WRITTEN (block) = 1;
4139 /* When there's only one block for the entire function,
4140 current_block == block and we mustn't do this, it
4141 will cause infinite recursion. */
4142 if (block != current_block)
4144 if (block != origin)
4145 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block);
4147 BLOCK_SUPERCONTEXT (block) = current_block;
4148 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4149 BLOCK_SUBBLOCKS (current_block) = block;
4150 current_block = origin;
4152 VEC_safe_push (tree, heap, *p_block_stack, block);
4154 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
4156 NOTE_BLOCK (insn) = VEC_pop (tree, *p_block_stack);
4157 current_block = BLOCK_SUPERCONTEXT (current_block);
4163 /* Reverse the order of elements in the chain T of blocks,
4164 and return the new head of the chain (old last element). */
4166 tree
4167 blocks_nreverse (tree t)
4169 tree prev = 0, block, next;
4170 for (block = t; block; block = next)
4172 next = BLOCK_CHAIN (block);
4173 BLOCK_CHAIN (block) = prev;
4174 prev = block;
4176 return prev;
4179 /* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4180 by modifying the last node in chain 1 to point to chain 2. */
4182 tree
4183 block_chainon (tree op1, tree op2)
4185 tree t1;
4187 if (!op1)
4188 return op2;
4189 if (!op2)
4190 return op1;
4192 for (t1 = op1; BLOCK_CHAIN (t1); t1 = BLOCK_CHAIN (t1))
4193 continue;
4194 BLOCK_CHAIN (t1) = op2;
4196 #ifdef ENABLE_TREE_CHECKING
4198 tree t2;
4199 for (t2 = op2; t2; t2 = BLOCK_CHAIN (t2))
4200 gcc_assert (t2 != t1);
4202 #endif
4204 return op1;
4207 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
4208 non-NULL, list them all into VECTOR, in a depth-first preorder
4209 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
4210 blocks. */
4212 static int
4213 all_blocks (tree block, tree *vector)
4215 int n_blocks = 0;
4217 while (block)
4219 TREE_ASM_WRITTEN (block) = 0;
4221 /* Record this block. */
4222 if (vector)
4223 vector[n_blocks] = block;
4225 ++n_blocks;
4227 /* Record the subblocks, and their subblocks... */
4228 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4229 vector ? vector + n_blocks : 0);
4230 block = BLOCK_CHAIN (block);
4233 return n_blocks;
4236 /* Return a vector containing all the blocks rooted at BLOCK. The
4237 number of elements in the vector is stored in N_BLOCKS_P. The
4238 vector is dynamically allocated; it is the caller's responsibility
4239 to call `free' on the pointer returned. */
4241 static tree *
4242 get_block_vector (tree block, int *n_blocks_p)
4244 tree *block_vector;
4246 *n_blocks_p = all_blocks (block, NULL);
4247 block_vector = XNEWVEC (tree, *n_blocks_p);
4248 all_blocks (block, block_vector);
4250 return block_vector;
4253 static GTY(()) int next_block_index = 2;
4255 /* Set BLOCK_NUMBER for all the blocks in FN. */
4257 void
4258 number_blocks (tree fn)
4260 int i;
4261 int n_blocks;
4262 tree *block_vector;
4264 /* For SDB and XCOFF debugging output, we start numbering the blocks
4265 from 1 within each function, rather than keeping a running
4266 count. */
4267 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
4268 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
4269 next_block_index = 1;
4270 #endif
4272 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
4274 /* The top-level BLOCK isn't numbered at all. */
4275 for (i = 1; i < n_blocks; ++i)
4276 /* We number the blocks from two. */
4277 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
4279 free (block_vector);
4281 return;
4284 /* If VAR is present in a subblock of BLOCK, return the subblock. */
4286 DEBUG_FUNCTION tree
4287 debug_find_var_in_block_tree (tree var, tree block)
4289 tree t;
4291 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
4292 if (t == var)
4293 return block;
4295 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
4297 tree ret = debug_find_var_in_block_tree (var, t);
4298 if (ret)
4299 return ret;
4302 return NULL_TREE;
4305 /* Keep track of whether we're in a dummy function context. If we are,
4306 we don't want to invoke the set_current_function hook, because we'll
4307 get into trouble if the hook calls target_reinit () recursively or
4308 when the initial initialization is not yet complete. */
4310 static bool in_dummy_function;
4312 /* Invoke the target hook when setting cfun. Update the optimization options
4313 if the function uses different options than the default. */
4315 static void
4316 invoke_set_current_function_hook (tree fndecl)
4318 if (!in_dummy_function)
4320 tree opts = ((fndecl)
4321 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
4322 : optimization_default_node);
4324 if (!opts)
4325 opts = optimization_default_node;
4327 /* Change optimization options if needed. */
4328 if (optimization_current_node != opts)
4330 optimization_current_node = opts;
4331 cl_optimization_restore (&global_options, TREE_OPTIMIZATION (opts));
4334 targetm.set_current_function (fndecl);
4338 /* cfun should never be set directly; use this function. */
4340 void
4341 set_cfun (struct function *new_cfun)
4343 if (cfun != new_cfun)
4345 cfun = new_cfun;
4346 invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
4350 /* Initialized with NOGC, making this poisonous to the garbage collector. */
4352 static VEC(function_p,heap) *cfun_stack;
4354 /* Push the current cfun onto the stack, and set cfun to new_cfun. */
4356 void
4357 push_cfun (struct function *new_cfun)
4359 VEC_safe_push (function_p, heap, cfun_stack, cfun);
4360 set_cfun (new_cfun);
4363 /* Pop cfun from the stack. */
4365 void
4366 pop_cfun (void)
4368 struct function *new_cfun = VEC_pop (function_p, cfun_stack);
4369 set_cfun (new_cfun);
4372 /* Return value of funcdef and increase it. */
4374 get_next_funcdef_no (void)
4376 return funcdef_no++;
4379 /* Return value of funcdef. */
4381 get_last_funcdef_no (void)
4383 return funcdef_no;
4386 /* Allocate a function structure for FNDECL and set its contents
4387 to the defaults. Set cfun to the newly-allocated object.
4388 Some of the helper functions invoked during initialization assume
4389 that cfun has already been set. Therefore, assign the new object
4390 directly into cfun and invoke the back end hook explicitly at the
4391 very end, rather than initializing a temporary and calling set_cfun
4392 on it.
4394 ABSTRACT_P is true if this is a function that will never be seen by
4395 the middle-end. Such functions are front-end concepts (like C++
4396 function templates) that do not correspond directly to functions
4397 placed in object files. */
4399 void
4400 allocate_struct_function (tree fndecl, bool abstract_p)
4402 tree result;
4403 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
4405 cfun = ggc_alloc_cleared_function ();
4407 init_eh_for_function ();
4409 if (init_machine_status)
4410 cfun->machine = (*init_machine_status) ();
4412 #ifdef OVERRIDE_ABI_FORMAT
4413 OVERRIDE_ABI_FORMAT (fndecl);
4414 #endif
4416 invoke_set_current_function_hook (fndecl);
4418 if (fndecl != NULL_TREE)
4420 DECL_STRUCT_FUNCTION (fndecl) = cfun;
4421 cfun->decl = fndecl;
4422 current_function_funcdef_no = get_next_funcdef_no ();
4424 result = DECL_RESULT (fndecl);
4425 if (!abstract_p && aggregate_value_p (result, fndecl))
4427 #ifdef PCC_STATIC_STRUCT_RETURN
4428 cfun->returns_pcc_struct = 1;
4429 #endif
4430 cfun->returns_struct = 1;
4433 cfun->stdarg = stdarg_p (fntype);
4435 /* Assume all registers in stdarg functions need to be saved. */
4436 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
4437 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
4439 /* ??? This could be set on a per-function basis by the front-end
4440 but is this worth the hassle? */
4441 cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
4445 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4446 instead of just setting it. */
4448 void
4449 push_struct_function (tree fndecl)
4451 VEC_safe_push (function_p, heap, cfun_stack, cfun);
4452 allocate_struct_function (fndecl, false);
4455 /* Reset crtl and other non-struct-function variables to defaults as
4456 appropriate for emitting rtl at the start of a function. */
4458 static void
4459 prepare_function_start (void)
4461 gcc_assert (!crtl->emit.x_last_insn);
4462 init_temp_slots ();
4463 init_emit ();
4464 init_varasm_status ();
4465 init_expr ();
4466 default_rtl_profile ();
4468 if (flag_stack_usage_info)
4470 cfun->su = ggc_alloc_cleared_stack_usage ();
4471 cfun->su->static_stack_size = -1;
4474 cse_not_expected = ! optimize;
4476 /* Caller save not needed yet. */
4477 caller_save_needed = 0;
4479 /* We haven't done register allocation yet. */
4480 reg_renumber = 0;
4482 /* Indicate that we have not instantiated virtual registers yet. */
4483 virtuals_instantiated = 0;
4485 /* Indicate that we want CONCATs now. */
4486 generating_concat_p = 1;
4488 /* Indicate we have no need of a frame pointer yet. */
4489 frame_pointer_needed = 0;
4492 /* Initialize the rtl expansion mechanism so that we can do simple things
4493 like generate sequences. This is used to provide a context during global
4494 initialization of some passes. You must call expand_dummy_function_end
4495 to exit this context. */
4497 void
4498 init_dummy_function_start (void)
4500 gcc_assert (!in_dummy_function);
4501 in_dummy_function = true;
4502 push_struct_function (NULL_TREE);
4503 prepare_function_start ();
4506 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4507 and initialize static variables for generating RTL for the statements
4508 of the function. */
4510 void
4511 init_function_start (tree subr)
4513 if (subr && DECL_STRUCT_FUNCTION (subr))
4514 set_cfun (DECL_STRUCT_FUNCTION (subr));
4515 else
4516 allocate_struct_function (subr, false);
4517 prepare_function_start ();
4518 decide_function_section (subr);
4520 /* Warn if this value is an aggregate type,
4521 regardless of which calling convention we are using for it. */
4522 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
4523 warning (OPT_Waggregate_return, "function returns an aggregate");
4526 /* Make sure all values used by the optimization passes have sane defaults. */
4527 unsigned int
4528 init_function_for_compilation (void)
4530 reg_renumber = 0;
4531 return 0;
4534 struct rtl_opt_pass pass_init_function =
4537 RTL_PASS,
4538 "*init_function", /* name */
4539 NULL, /* gate */
4540 init_function_for_compilation, /* execute */
4541 NULL, /* sub */
4542 NULL, /* next */
4543 0, /* static_pass_number */
4544 TV_NONE, /* tv_id */
4545 0, /* properties_required */
4546 0, /* properties_provided */
4547 0, /* properties_destroyed */
4548 0, /* todo_flags_start */
4549 0 /* todo_flags_finish */
4554 void
4555 expand_main_function (void)
4557 #if (defined(INVOKE__main) \
4558 || (!defined(HAS_INIT_SECTION) \
4559 && !defined(INIT_SECTION_ASM_OP) \
4560 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
4561 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
4562 #endif
4565 /* Expand code to initialize the stack_protect_guard. This is invoked at
4566 the beginning of a function to be protected. */
4568 #ifndef HAVE_stack_protect_set
4569 # define HAVE_stack_protect_set 0
4570 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
4571 #endif
4573 void
4574 stack_protect_prologue (void)
4576 tree guard_decl = targetm.stack_protect_guard ();
4577 rtx x, y;
4579 x = expand_normal (crtl->stack_protect_guard);
4580 y = expand_normal (guard_decl);
4582 /* Allow the target to copy from Y to X without leaking Y into a
4583 register. */
4584 if (HAVE_stack_protect_set)
4586 rtx insn = gen_stack_protect_set (x, y);
4587 if (insn)
4589 emit_insn (insn);
4590 return;
4594 /* Otherwise do a straight move. */
4595 emit_move_insn (x, y);
4598 /* Expand code to verify the stack_protect_guard. This is invoked at
4599 the end of a function to be protected. */
4601 #ifndef HAVE_stack_protect_test
4602 # define HAVE_stack_protect_test 0
4603 # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
4604 #endif
4606 void
4607 stack_protect_epilogue (void)
4609 tree guard_decl = targetm.stack_protect_guard ();
4610 rtx label = gen_label_rtx ();
4611 rtx x, y, tmp;
4613 x = expand_normal (crtl->stack_protect_guard);
4614 y = expand_normal (guard_decl);
4616 /* Allow the target to compare Y with X without leaking either into
4617 a register. */
4618 switch (HAVE_stack_protect_test != 0)
4620 case 1:
4621 tmp = gen_stack_protect_test (x, y, label);
4622 if (tmp)
4624 emit_insn (tmp);
4625 break;
4627 /* FALLTHRU */
4629 default:
4630 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4631 break;
4634 /* The noreturn predictor has been moved to the tree level. The rtl-level
4635 predictors estimate this branch about 20%, which isn't enough to get
4636 things moved out of line. Since this is the only extant case of adding
4637 a noreturn function at the rtl level, it doesn't seem worth doing ought
4638 except adding the prediction by hand. */
4639 tmp = get_last_insn ();
4640 if (JUMP_P (tmp))
4641 predict_insn_def (tmp, PRED_NORETURN, TAKEN);
4643 expand_expr_stmt (targetm.stack_protect_fail ());
4644 emit_label (label);
4647 /* Start the RTL for a new function, and set variables used for
4648 emitting RTL.
4649 SUBR is the FUNCTION_DECL node.
4650 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4651 the function's parameters, which must be run at any return statement. */
4653 void
4654 expand_function_start (tree subr)
4656 /* Make sure volatile mem refs aren't considered
4657 valid operands of arithmetic insns. */
4658 init_recog_no_volatile ();
4660 crtl->profile
4661 = (profile_flag
4662 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4664 crtl->limit_stack
4665 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4667 /* Make the label for return statements to jump to. Do not special
4668 case machines with special return instructions -- they will be
4669 handled later during jump, ifcvt, or epilogue creation. */
4670 return_label = gen_label_rtx ();
4672 /* Initialize rtx used to return the value. */
4673 /* Do this before assign_parms so that we copy the struct value address
4674 before any library calls that assign parms might generate. */
4676 /* Decide whether to return the value in memory or in a register. */
4677 if (aggregate_value_p (DECL_RESULT (subr), subr))
4679 /* Returning something that won't go in a register. */
4680 rtx value_address = 0;
4682 #ifdef PCC_STATIC_STRUCT_RETURN
4683 if (cfun->returns_pcc_struct)
4685 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4686 value_address = assemble_static_space (size);
4688 else
4689 #endif
4691 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
4692 /* Expect to be passed the address of a place to store the value.
4693 If it is passed as an argument, assign_parms will take care of
4694 it. */
4695 if (sv)
4697 value_address = gen_reg_rtx (Pmode);
4698 emit_move_insn (value_address, sv);
4701 if (value_address)
4703 rtx x = value_address;
4704 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4706 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4707 set_mem_attributes (x, DECL_RESULT (subr), 1);
4709 SET_DECL_RTL (DECL_RESULT (subr), x);
4712 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4713 /* If return mode is void, this decl rtl should not be used. */
4714 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
4715 else
4717 /* Compute the return values into a pseudo reg, which we will copy
4718 into the true return register after the cleanups are done. */
4719 tree return_type = TREE_TYPE (DECL_RESULT (subr));
4720 if (TYPE_MODE (return_type) != BLKmode
4721 && targetm.calls.return_in_msb (return_type))
4722 /* expand_function_end will insert the appropriate padding in
4723 this case. Use the return value's natural (unpadded) mode
4724 within the function proper. */
4725 SET_DECL_RTL (DECL_RESULT (subr),
4726 gen_reg_rtx (TYPE_MODE (return_type)));
4727 else
4729 /* In order to figure out what mode to use for the pseudo, we
4730 figure out what the mode of the eventual return register will
4731 actually be, and use that. */
4732 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
4734 /* Structures that are returned in registers are not
4735 aggregate_value_p, so we may see a PARALLEL or a REG. */
4736 if (REG_P (hard_reg))
4737 SET_DECL_RTL (DECL_RESULT (subr),
4738 gen_reg_rtx (GET_MODE (hard_reg)));
4739 else
4741 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4742 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4746 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4747 result to the real return register(s). */
4748 DECL_REGISTER (DECL_RESULT (subr)) = 1;
4751 /* Initialize rtx for parameters and local variables.
4752 In some cases this requires emitting insns. */
4753 assign_parms (subr);
4755 /* If function gets a static chain arg, store it. */
4756 if (cfun->static_chain_decl)
4758 tree parm = cfun->static_chain_decl;
4759 rtx local, chain, insn;
4761 local = gen_reg_rtx (Pmode);
4762 chain = targetm.calls.static_chain (current_function_decl, true);
4764 set_decl_incoming_rtl (parm, chain, false);
4765 SET_DECL_RTL (parm, local);
4766 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4768 insn = emit_move_insn (local, chain);
4770 /* Mark the register as eliminable, similar to parameters. */
4771 if (MEM_P (chain)
4772 && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0)))
4773 set_unique_reg_note (insn, REG_EQUIV, chain);
4776 /* If the function receives a non-local goto, then store the
4777 bits we need to restore the frame pointer. */
4778 if (cfun->nonlocal_goto_save_area)
4780 tree t_save;
4781 rtx r_save;
4783 /* ??? We need to do this save early. Unfortunately here is
4784 before the frame variable gets declared. Help out... */
4785 tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
4786 if (!DECL_RTL_SET_P (var))
4787 expand_decl (var);
4789 t_save = build4 (ARRAY_REF, ptr_type_node,
4790 cfun->nonlocal_goto_save_area,
4791 integer_zero_node, NULL_TREE, NULL_TREE);
4792 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
4793 r_save = convert_memory_address (Pmode, r_save);
4795 emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ());
4796 update_nonlocal_goto_save_area ();
4799 /* The following was moved from init_function_start.
4800 The move is supposed to make sdb output more accurate. */
4801 /* Indicate the beginning of the function body,
4802 as opposed to parm setup. */
4803 emit_note (NOTE_INSN_FUNCTION_BEG);
4805 gcc_assert (NOTE_P (get_last_insn ()));
4807 parm_birth_insn = get_last_insn ();
4809 if (crtl->profile)
4811 #ifdef PROFILE_HOOK
4812 PROFILE_HOOK (current_function_funcdef_no);
4813 #endif
4816 /* If we are doing generic stack checking, the probe should go here. */
4817 if (flag_stack_check == GENERIC_STACK_CHECK)
4818 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
4820 /* Make sure there is a line number after the function entry setup code. */
4821 force_next_line_note ();
4824 /* Undo the effects of init_dummy_function_start. */
4825 void
4826 expand_dummy_function_end (void)
4828 gcc_assert (in_dummy_function);
4830 /* End any sequences that failed to be closed due to syntax errors. */
4831 while (in_sequence_p ())
4832 end_sequence ();
4834 /* Outside function body, can't compute type's actual size
4835 until next function's body starts. */
4837 free_after_parsing (cfun);
4838 free_after_compilation (cfun);
4839 pop_cfun ();
4840 in_dummy_function = false;
4843 /* Call DOIT for each hard register used as a return value from
4844 the current function. */
4846 void
4847 diddle_return_value (void (*doit) (rtx, void *), void *arg)
4849 rtx outgoing = crtl->return_rtx;
4851 if (! outgoing)
4852 return;
4854 if (REG_P (outgoing))
4855 (*doit) (outgoing, arg);
4856 else if (GET_CODE (outgoing) == PARALLEL)
4858 int i;
4860 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4862 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4864 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
4865 (*doit) (x, arg);
4870 static void
4871 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4873 emit_clobber (reg);
4876 void
4877 clobber_return_register (void)
4879 diddle_return_value (do_clobber_return_reg, NULL);
4881 /* In case we do use pseudo to return value, clobber it too. */
4882 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4884 tree decl_result = DECL_RESULT (current_function_decl);
4885 rtx decl_rtl = DECL_RTL (decl_result);
4886 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4888 do_clobber_return_reg (decl_rtl, NULL);
4893 static void
4894 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4896 emit_use (reg);
4899 static void
4900 use_return_register (void)
4902 diddle_return_value (do_use_return_reg, NULL);
4905 /* Possibly warn about unused parameters. */
4906 void
4907 do_warn_unused_parameter (tree fn)
4909 tree decl;
4911 for (decl = DECL_ARGUMENTS (fn);
4912 decl; decl = DECL_CHAIN (decl))
4913 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
4914 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl)
4915 && !TREE_NO_WARNING (decl))
4916 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
4919 static GTY(()) rtx initial_trampoline;
4921 /* Generate RTL for the end of the current function. */
4923 void
4924 expand_function_end (void)
4926 rtx clobber_after;
4928 /* If arg_pointer_save_area was referenced only from a nested
4929 function, we will not have initialized it yet. Do that now. */
4930 if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
4931 get_arg_pointer_save_area ();
4933 /* If we are doing generic stack checking and this function makes calls,
4934 do a stack probe at the start of the function to ensure we have enough
4935 space for another stack frame. */
4936 if (flag_stack_check == GENERIC_STACK_CHECK)
4938 rtx insn, seq;
4940 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4941 if (CALL_P (insn))
4943 rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE);
4944 start_sequence ();
4945 if (STACK_CHECK_MOVING_SP)
4946 anti_adjust_stack_and_probe (max_frame_size, true);
4947 else
4948 probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size);
4949 seq = get_insns ();
4950 end_sequence ();
4951 set_insn_locators (seq, prologue_locator);
4952 emit_insn_before (seq, stack_check_probe_note);
4953 break;
4957 /* End any sequences that failed to be closed due to syntax errors. */
4958 while (in_sequence_p ())
4959 end_sequence ();
4961 clear_pending_stack_adjust ();
4962 do_pending_stack_adjust ();
4964 /* Output a linenumber for the end of the function.
4965 SDB depends on this. */
4966 force_next_line_note ();
4967 set_curr_insn_source_location (input_location);
4969 /* Before the return label (if any), clobber the return
4970 registers so that they are not propagated live to the rest of
4971 the function. This can only happen with functions that drop
4972 through; if there had been a return statement, there would
4973 have either been a return rtx, or a jump to the return label.
4975 We delay actual code generation after the current_function_value_rtx
4976 is computed. */
4977 clobber_after = get_last_insn ();
4979 /* Output the label for the actual return from the function. */
4980 emit_label (return_label);
4982 if (targetm.except_unwind_info (&global_options) == UI_SJLJ)
4984 /* Let except.c know where it should emit the call to unregister
4985 the function context for sjlj exceptions. */
4986 if (flag_exceptions)
4987 sjlj_emit_function_exit_after (get_last_insn ());
4989 else
4991 /* We want to ensure that instructions that may trap are not
4992 moved into the epilogue by scheduling, because we don't
4993 always emit unwind information for the epilogue. */
4994 if (cfun->can_throw_non_call_exceptions)
4995 emit_insn (gen_blockage ());
4998 /* If this is an implementation of throw, do what's necessary to
4999 communicate between __builtin_eh_return and the epilogue. */
5000 expand_eh_return ();
5002 /* If scalar return value was computed in a pseudo-reg, or was a named
5003 return value that got dumped to the stack, copy that to the hard
5004 return register. */
5005 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5007 tree decl_result = DECL_RESULT (current_function_decl);
5008 rtx decl_rtl = DECL_RTL (decl_result);
5010 if (REG_P (decl_rtl)
5011 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5012 : DECL_REGISTER (decl_result))
5014 rtx real_decl_rtl = crtl->return_rtx;
5016 /* This should be set in assign_parms. */
5017 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
5019 /* If this is a BLKmode structure being returned in registers,
5020 then use the mode computed in expand_return. Note that if
5021 decl_rtl is memory, then its mode may have been changed,
5022 but that crtl->return_rtx has not. */
5023 if (GET_MODE (real_decl_rtl) == BLKmode)
5024 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
5026 /* If a non-BLKmode return value should be padded at the least
5027 significant end of the register, shift it left by the appropriate
5028 amount. BLKmode results are handled using the group load/store
5029 machinery. */
5030 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
5031 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
5033 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
5034 REGNO (real_decl_rtl)),
5035 decl_rtl);
5036 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
5038 /* If a named return value dumped decl_return to memory, then
5039 we may need to re-do the PROMOTE_MODE signed/unsigned
5040 extension. */
5041 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
5043 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
5044 promote_function_mode (TREE_TYPE (decl_result),
5045 GET_MODE (decl_rtl), &unsignedp,
5046 TREE_TYPE (current_function_decl), 1);
5048 convert_move (real_decl_rtl, decl_rtl, unsignedp);
5050 else if (GET_CODE (real_decl_rtl) == PARALLEL)
5052 /* If expand_function_start has created a PARALLEL for decl_rtl,
5053 move the result to the real return registers. Otherwise, do
5054 a group load from decl_rtl for a named return. */
5055 if (GET_CODE (decl_rtl) == PARALLEL)
5056 emit_group_move (real_decl_rtl, decl_rtl);
5057 else
5058 emit_group_load (real_decl_rtl, decl_rtl,
5059 TREE_TYPE (decl_result),
5060 int_size_in_bytes (TREE_TYPE (decl_result)));
5062 /* In the case of complex integer modes smaller than a word, we'll
5063 need to generate some non-trivial bitfield insertions. Do that
5064 on a pseudo and not the hard register. */
5065 else if (GET_CODE (decl_rtl) == CONCAT
5066 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
5067 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
5069 int old_generating_concat_p;
5070 rtx tmp;
5072 old_generating_concat_p = generating_concat_p;
5073 generating_concat_p = 0;
5074 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
5075 generating_concat_p = old_generating_concat_p;
5077 emit_move_insn (tmp, decl_rtl);
5078 emit_move_insn (real_decl_rtl, tmp);
5080 else
5081 emit_move_insn (real_decl_rtl, decl_rtl);
5085 /* If returning a structure, arrange to return the address of the value
5086 in a place where debuggers expect to find it.
5088 If returning a structure PCC style,
5089 the caller also depends on this value.
5090 And cfun->returns_pcc_struct is not necessarily set. */
5091 if (cfun->returns_struct
5092 || cfun->returns_pcc_struct)
5094 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
5095 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5096 rtx outgoing;
5098 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
5099 type = TREE_TYPE (type);
5100 else
5101 value_address = XEXP (value_address, 0);
5103 outgoing = targetm.calls.function_value (build_pointer_type (type),
5104 current_function_decl, true);
5106 /* Mark this as a function return value so integrate will delete the
5107 assignment and USE below when inlining this function. */
5108 REG_FUNCTION_VALUE_P (outgoing) = 1;
5110 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5111 value_address = convert_memory_address (GET_MODE (outgoing),
5112 value_address);
5114 emit_move_insn (outgoing, value_address);
5116 /* Show return register used to hold result (in this case the address
5117 of the result. */
5118 crtl->return_rtx = outgoing;
5121 /* Emit the actual code to clobber return register. */
5123 rtx seq;
5125 start_sequence ();
5126 clobber_return_register ();
5127 seq = get_insns ();
5128 end_sequence ();
5130 emit_insn_after (seq, clobber_after);
5133 /* Output the label for the naked return from the function. */
5134 if (naked_return_label)
5135 emit_label (naked_return_label);
5137 /* @@@ This is a kludge. We want to ensure that instructions that
5138 may trap are not moved into the epilogue by scheduling, because
5139 we don't always emit unwind information for the epilogue. */
5140 if (cfun->can_throw_non_call_exceptions
5141 && targetm.except_unwind_info (&global_options) != UI_SJLJ)
5142 emit_insn (gen_blockage ());
5144 /* If stack protection is enabled for this function, check the guard. */
5145 if (crtl->stack_protect_guard)
5146 stack_protect_epilogue ();
5148 /* If we had calls to alloca, and this machine needs
5149 an accurate stack pointer to exit the function,
5150 insert some code to save and restore the stack pointer. */
5151 if (! EXIT_IGNORE_STACK
5152 && cfun->calls_alloca)
5154 rtx tem = 0, seq;
5156 start_sequence ();
5157 emit_stack_save (SAVE_FUNCTION, &tem);
5158 seq = get_insns ();
5159 end_sequence ();
5160 emit_insn_before (seq, parm_birth_insn);
5162 emit_stack_restore (SAVE_FUNCTION, tem);
5165 /* ??? This should no longer be necessary since stupid is no longer with
5166 us, but there are some parts of the compiler (eg reload_combine, and
5167 sh mach_dep_reorg) that still try and compute their own lifetime info
5168 instead of using the general framework. */
5169 use_return_register ();
5173 get_arg_pointer_save_area (void)
5175 rtx ret = arg_pointer_save_area;
5177 if (! ret)
5179 ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5180 arg_pointer_save_area = ret;
5183 if (! crtl->arg_pointer_save_area_init)
5185 rtx seq;
5187 /* Save the arg pointer at the beginning of the function. The
5188 generated stack slot may not be a valid memory address, so we
5189 have to check it and fix it if necessary. */
5190 start_sequence ();
5191 emit_move_insn (validize_mem (ret),
5192 crtl->args.internal_arg_pointer);
5193 seq = get_insns ();
5194 end_sequence ();
5196 push_topmost_sequence ();
5197 emit_insn_after (seq, entry_of_function ());
5198 pop_topmost_sequence ();
5200 crtl->arg_pointer_save_area_init = true;
5203 return ret;
5206 /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5207 for the first time. */
5209 static void
5210 record_insns (rtx insns, rtx end, htab_t *hashp)
5212 rtx tmp;
5213 htab_t hash = *hashp;
5215 if (hash == NULL)
5216 *hashp = hash
5217 = htab_create_ggc (17, htab_hash_pointer, htab_eq_pointer, NULL);
5219 for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp))
5221 void **slot = htab_find_slot (hash, tmp, INSERT);
5222 gcc_assert (*slot == NULL);
5223 *slot = tmp;
5227 /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5228 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5229 insn, then record COPY as well. */
5231 void
5232 maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
5234 htab_t hash;
5235 void **slot;
5237 hash = epilogue_insn_hash;
5238 if (!hash || !htab_find (hash, insn))
5240 hash = prologue_insn_hash;
5241 if (!hash || !htab_find (hash, insn))
5242 return;
5245 slot = htab_find_slot (hash, copy, INSERT);
5246 gcc_assert (*slot == NULL);
5247 *slot = copy;
5250 /* Set the locator of the insn chain starting at INSN to LOC. */
5251 static void
5252 set_insn_locators (rtx insn, int loc)
5254 while (insn != NULL_RTX)
5256 if (INSN_P (insn))
5257 INSN_LOCATOR (insn) = loc;
5258 insn = NEXT_INSN (insn);
5262 /* Determine if any INSNs in HASH are, or are part of, INSN. Because
5263 we can be running after reorg, SEQUENCE rtl is possible. */
5265 static bool
5266 contains (const_rtx insn, htab_t hash)
5268 if (hash == NULL)
5269 return false;
5271 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
5273 int i;
5274 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
5275 if (htab_find (hash, XVECEXP (PATTERN (insn), 0, i)))
5276 return true;
5277 return false;
5280 return htab_find (hash, insn) != NULL;
5284 prologue_epilogue_contains (const_rtx insn)
5286 if (contains (insn, prologue_insn_hash))
5287 return 1;
5288 if (contains (insn, epilogue_insn_hash))
5289 return 1;
5290 return 0;
5293 #ifdef HAVE_return
5294 /* Insert use of return register before the end of BB. */
5296 static void
5297 emit_use_return_register_into_block (basic_block bb)
5299 rtx seq;
5300 start_sequence ();
5301 use_return_register ();
5302 seq = get_insns ();
5303 end_sequence ();
5304 emit_insn_before (seq, BB_END (bb));
5307 /* Insert gen_return at the end of block BB. This also means updating
5308 block_for_insn appropriately. */
5310 static void
5311 emit_return_into_block (basic_block bb)
5313 emit_jump_insn_after (gen_return (), BB_END (bb));
5315 #endif /* HAVE_return */
5317 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5318 this into place with notes indicating where the prologue ends and where
5319 the epilogue begins. Update the basic block information when possible. */
5321 static void
5322 thread_prologue_and_epilogue_insns (void)
5324 bool inserted;
5325 rtx seq ATTRIBUTE_UNUSED, epilogue_end ATTRIBUTE_UNUSED;
5326 edge entry_edge, e;
5327 edge_iterator ei;
5329 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
5331 inserted = false;
5332 seq = NULL_RTX;
5333 epilogue_end = NULL_RTX;
5335 /* Can't deal with multiple successors of the entry block at the
5336 moment. Function should always have at least one entry
5337 point. */
5338 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR));
5339 entry_edge = single_succ_edge (ENTRY_BLOCK_PTR);
5341 if (flag_split_stack
5342 && (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl))
5343 == NULL))
5345 #ifndef HAVE_split_stack_prologue
5346 gcc_unreachable ();
5347 #else
5348 gcc_assert (HAVE_split_stack_prologue);
5350 start_sequence ();
5351 emit_insn (gen_split_stack_prologue ());
5352 seq = get_insns ();
5353 end_sequence ();
5355 record_insns (seq, NULL, &prologue_insn_hash);
5356 set_insn_locators (seq, prologue_locator);
5358 insert_insn_on_edge (seq, entry_edge);
5359 inserted = true;
5360 #endif
5363 #ifdef HAVE_prologue
5364 if (HAVE_prologue)
5366 start_sequence ();
5367 seq = gen_prologue ();
5368 emit_insn (seq);
5370 /* Insert an explicit USE for the frame pointer
5371 if the profiling is on and the frame pointer is required. */
5372 if (crtl->profile && frame_pointer_needed)
5373 emit_use (hard_frame_pointer_rtx);
5375 /* Retain a map of the prologue insns. */
5376 record_insns (seq, NULL, &prologue_insn_hash);
5377 emit_note (NOTE_INSN_PROLOGUE_END);
5379 /* Ensure that instructions are not moved into the prologue when
5380 profiling is on. The call to the profiling routine can be
5381 emitted within the live range of a call-clobbered register. */
5382 if (!targetm.profile_before_prologue () && crtl->profile)
5383 emit_insn (gen_blockage ());
5385 seq = get_insns ();
5386 end_sequence ();
5387 set_insn_locators (seq, prologue_locator);
5389 insert_insn_on_edge (seq, entry_edge);
5390 inserted = true;
5392 #endif
5394 /* If the exit block has no non-fake predecessors, we don't need
5395 an epilogue. */
5396 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5397 if ((e->flags & EDGE_FAKE) == 0)
5398 break;
5399 if (e == NULL)
5400 goto epilogue_done;
5402 rtl_profile_for_bb (EXIT_BLOCK_PTR);
5403 #ifdef HAVE_return
5404 if (optimize && HAVE_return)
5406 /* If we're allowed to generate a simple return instruction,
5407 then by definition we don't need a full epilogue. Examine
5408 the block that falls through to EXIT. If it does not
5409 contain any code, examine its predecessors and try to
5410 emit (conditional) return instructions. */
5412 basic_block last;
5413 rtx label;
5415 e = find_fallthru_edge (EXIT_BLOCK_PTR->preds);
5416 if (e == NULL)
5417 goto epilogue_done;
5418 last = e->src;
5420 /* Verify that there are no active instructions in the last block. */
5421 label = BB_END (last);
5422 while (label && !LABEL_P (label))
5424 if (active_insn_p (label))
5425 break;
5426 label = PREV_INSN (label);
5429 if (BB_HEAD (last) == label && LABEL_P (label))
5431 edge_iterator ei2;
5433 for (ei2 = ei_start (last->preds); (e = ei_safe_edge (ei2)); )
5435 basic_block bb = e->src;
5436 rtx jump;
5438 if (bb == ENTRY_BLOCK_PTR)
5440 ei_next (&ei2);
5441 continue;
5444 jump = BB_END (bb);
5445 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5447 ei_next (&ei2);
5448 continue;
5451 /* If we have an unconditional jump, we can replace that
5452 with a simple return instruction. */
5453 if (simplejump_p (jump))
5455 /* The use of the return register might be present in the exit
5456 fallthru block. Either:
5457 - removing the use is safe, and we should remove the use in
5458 the exit fallthru block, or
5459 - removing the use is not safe, and we should add it here.
5460 For now, we conservatively choose the latter. Either of the
5461 2 helps in crossjumping. */
5462 emit_use_return_register_into_block (bb);
5464 emit_return_into_block (bb);
5465 delete_insn (jump);
5468 /* If we have a conditional jump, we can try to replace
5469 that with a conditional return instruction. */
5470 else if (condjump_p (jump))
5472 if (! redirect_jump (jump, 0, 0))
5474 ei_next (&ei2);
5475 continue;
5478 /* See comment in simple_jump_p case above. */
5479 emit_use_return_register_into_block (bb);
5481 /* If this block has only one successor, it both jumps
5482 and falls through to the fallthru block, so we can't
5483 delete the edge. */
5484 if (single_succ_p (bb))
5486 ei_next (&ei2);
5487 continue;
5490 else
5492 ei_next (&ei2);
5493 continue;
5496 /* Fix up the CFG for the successful change we just made. */
5497 redirect_edge_succ (e, EXIT_BLOCK_PTR);
5500 /* Emit a return insn for the exit fallthru block. Whether
5501 this is still reachable will be determined later. */
5503 emit_barrier_after (BB_END (last));
5504 emit_return_into_block (last);
5505 epilogue_end = BB_END (last);
5506 single_succ_edge (last)->flags &= ~EDGE_FALLTHRU;
5507 goto epilogue_done;
5510 #endif
5512 /* A small fib -- epilogue is not yet completed, but we wish to re-use
5513 this marker for the splits of EH_RETURN patterns, and nothing else
5514 uses the flag in the meantime. */
5515 epilogue_completed = 1;
5517 #ifdef HAVE_eh_return
5518 /* Find non-fallthru edges that end with EH_RETURN instructions. On
5519 some targets, these get split to a special version of the epilogue
5520 code. In order to be able to properly annotate these with unwind
5521 info, try to split them now. If we get a valid split, drop an
5522 EPILOGUE_BEG note and mark the insns as epilogue insns. */
5523 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5525 rtx prev, last, trial;
5527 if (e->flags & EDGE_FALLTHRU)
5528 continue;
5529 last = BB_END (e->src);
5530 if (!eh_returnjump_p (last))
5531 continue;
5533 prev = PREV_INSN (last);
5534 trial = try_split (PATTERN (last), last, 1);
5535 if (trial == last)
5536 continue;
5538 record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash);
5539 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
5541 #endif
5543 /* Find the edge that falls through to EXIT. Other edges may exist
5544 due to RETURN instructions, but those don't need epilogues.
5545 There really shouldn't be a mixture -- either all should have
5546 been converted or none, however... */
5548 e = find_fallthru_edge (EXIT_BLOCK_PTR->preds);
5549 if (e == NULL)
5550 goto epilogue_done;
5552 #ifdef HAVE_epilogue
5553 if (HAVE_epilogue)
5555 start_sequence ();
5556 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
5557 seq = gen_epilogue ();
5558 if (seq)
5559 emit_jump_insn (seq);
5561 /* Retain a map of the epilogue insns. */
5562 record_insns (seq, NULL, &epilogue_insn_hash);
5563 set_insn_locators (seq, epilogue_locator);
5565 seq = get_insns ();
5566 end_sequence ();
5568 insert_insn_on_edge (seq, e);
5569 inserted = true;
5571 else
5572 #endif
5574 basic_block cur_bb;
5576 if (! next_active_insn (BB_END (e->src)))
5577 goto epilogue_done;
5578 /* We have a fall-through edge to the exit block, the source is not
5579 at the end of the function, and there will be an assembler epilogue
5580 at the end of the function.
5581 We can't use force_nonfallthru here, because that would try to
5582 use return. Inserting a jump 'by hand' is extremely messy, so
5583 we take advantage of cfg_layout_finalize using
5584 fixup_fallthru_exit_predecessor. */
5585 cfg_layout_initialize (0);
5586 FOR_EACH_BB (cur_bb)
5587 if (cur_bb->index >= NUM_FIXED_BLOCKS
5588 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
5589 cur_bb->aux = cur_bb->next_bb;
5590 cfg_layout_finalize ();
5593 epilogue_done:
5594 default_rtl_profile ();
5596 if (inserted)
5598 sbitmap blocks;
5600 commit_edge_insertions ();
5602 /* Look for basic blocks within the prologue insns. */
5603 blocks = sbitmap_alloc (last_basic_block);
5604 sbitmap_zero (blocks);
5605 SET_BIT (blocks, entry_edge->dest->index);
5606 find_many_sub_basic_blocks (blocks);
5607 sbitmap_free (blocks);
5609 /* The epilogue insns we inserted may cause the exit edge to no longer
5610 be fallthru. */
5611 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5613 if (((e->flags & EDGE_FALLTHRU) != 0)
5614 && returnjump_p (BB_END (e->src)))
5615 e->flags &= ~EDGE_FALLTHRU;
5619 #ifdef HAVE_sibcall_epilogue
5620 /* Emit sibling epilogues before any sibling call sites. */
5621 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
5623 basic_block bb = e->src;
5624 rtx insn = BB_END (bb);
5626 if (!CALL_P (insn)
5627 || ! SIBLING_CALL_P (insn))
5629 ei_next (&ei);
5630 continue;
5633 start_sequence ();
5634 emit_note (NOTE_INSN_EPILOGUE_BEG);
5635 emit_insn (gen_sibcall_epilogue ());
5636 seq = get_insns ();
5637 end_sequence ();
5639 /* Retain a map of the epilogue insns. Used in life analysis to
5640 avoid getting rid of sibcall epilogue insns. Do this before we
5641 actually emit the sequence. */
5642 record_insns (seq, NULL, &epilogue_insn_hash);
5643 set_insn_locators (seq, epilogue_locator);
5645 emit_insn_before (seq, insn);
5646 ei_next (&ei);
5648 #endif
5650 #ifdef HAVE_epilogue
5651 if (epilogue_end)
5653 rtx insn, next;
5655 /* Similarly, move any line notes that appear after the epilogue.
5656 There is no need, however, to be quite so anal about the existence
5657 of such a note. Also possibly move
5658 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5659 info generation. */
5660 for (insn = epilogue_end; insn; insn = next)
5662 next = NEXT_INSN (insn);
5663 if (NOTE_P (insn)
5664 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
5665 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
5668 #endif
5670 /* Threading the prologue and epilogue changes the artificial refs
5671 in the entry and exit blocks. */
5672 epilogue_completed = 1;
5673 df_update_entry_exit_and_calls ();
5676 /* Reposition the prologue-end and epilogue-begin notes after
5677 instruction scheduling. */
5679 void
5680 reposition_prologue_and_epilogue_notes (void)
5682 #if defined (HAVE_prologue) || defined (HAVE_epilogue) \
5683 || defined (HAVE_sibcall_epilogue)
5684 /* Since the hash table is created on demand, the fact that it is
5685 non-null is a signal that it is non-empty. */
5686 if (prologue_insn_hash != NULL)
5688 size_t len = htab_elements (prologue_insn_hash);
5689 rtx insn, last = NULL, note = NULL;
5691 /* Scan from the beginning until we reach the last prologue insn. */
5692 /* ??? While we do have the CFG intact, there are two problems:
5693 (1) The prologue can contain loops (typically probing the stack),
5694 which means that the end of the prologue isn't in the first bb.
5695 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
5696 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5698 if (NOTE_P (insn))
5700 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
5701 note = insn;
5703 else if (contains (insn, prologue_insn_hash))
5705 last = insn;
5706 if (--len == 0)
5707 break;
5711 if (last)
5713 if (note == NULL)
5715 /* Scan forward looking for the PROLOGUE_END note. It should
5716 be right at the beginning of the block, possibly with other
5717 insn notes that got moved there. */
5718 for (note = NEXT_INSN (last); ; note = NEXT_INSN (note))
5720 if (NOTE_P (note)
5721 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
5722 break;
5726 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
5727 if (LABEL_P (last))
5728 last = NEXT_INSN (last);
5729 reorder_insns (note, note, last);
5733 if (epilogue_insn_hash != NULL)
5735 edge_iterator ei;
5736 edge e;
5738 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5740 rtx insn, first = NULL, note = NULL;
5741 basic_block bb = e->src;
5743 /* Scan from the beginning until we reach the first epilogue insn. */
5744 FOR_BB_INSNS (bb, insn)
5746 if (NOTE_P (insn))
5748 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
5750 note = insn;
5751 if (first != NULL)
5752 break;
5755 else if (first == NULL && contains (insn, epilogue_insn_hash))
5757 first = insn;
5758 if (note != NULL)
5759 break;
5763 if (note)
5765 /* If the function has a single basic block, and no real
5766 epilogue insns (e.g. sibcall with no cleanup), the
5767 epilogue note can get scheduled before the prologue
5768 note. If we have frame related prologue insns, having
5769 them scanned during the epilogue will result in a crash.
5770 In this case re-order the epilogue note to just before
5771 the last insn in the block. */
5772 if (first == NULL)
5773 first = BB_END (bb);
5775 if (PREV_INSN (first) != note)
5776 reorder_insns (note, note, PREV_INSN (first));
5780 #endif /* HAVE_prologue or HAVE_epilogue */
5783 /* Returns the name of the current function. */
5784 const char *
5785 current_function_name (void)
5787 if (cfun == NULL)
5788 return "<none>";
5789 return lang_hooks.decl_printable_name (cfun->decl, 2);
5793 static unsigned int
5794 rest_of_handle_check_leaf_regs (void)
5796 #ifdef LEAF_REGISTERS
5797 current_function_uses_only_leaf_regs
5798 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
5799 #endif
5800 return 0;
5803 /* Insert a TYPE into the used types hash table of CFUN. */
5805 static void
5806 used_types_insert_helper (tree type, struct function *func)
5808 if (type != NULL && func != NULL)
5810 void **slot;
5812 if (func->used_types_hash == NULL)
5813 func->used_types_hash = htab_create_ggc (37, htab_hash_pointer,
5814 htab_eq_pointer, NULL);
5815 slot = htab_find_slot (func->used_types_hash, type, INSERT);
5816 if (*slot == NULL)
5817 *slot = type;
5821 /* Given a type, insert it into the used hash table in cfun. */
5822 void
5823 used_types_insert (tree t)
5825 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
5826 if (TYPE_NAME (t))
5827 break;
5828 else
5829 t = TREE_TYPE (t);
5830 if (TREE_CODE (t) == ERROR_MARK)
5831 return;
5832 if (TYPE_NAME (t) == NULL_TREE
5833 || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t)))
5834 t = TYPE_MAIN_VARIANT (t);
5835 if (debug_info_level > DINFO_LEVEL_NONE)
5837 if (cfun)
5838 used_types_insert_helper (t, cfun);
5839 else
5840 /* So this might be a type referenced by a global variable.
5841 Record that type so that we can later decide to emit its debug
5842 information. */
5843 VEC_safe_push (tree, gc, types_used_by_cur_var_decl, t);
5847 /* Helper to Hash a struct types_used_by_vars_entry. */
5849 static hashval_t
5850 hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry)
5852 gcc_assert (entry && entry->var_decl && entry->type);
5854 return iterative_hash_object (entry->type,
5855 iterative_hash_object (entry->var_decl, 0));
5858 /* Hash function of the types_used_by_vars_entry hash table. */
5860 hashval_t
5861 types_used_by_vars_do_hash (const void *x)
5863 const struct types_used_by_vars_entry *entry =
5864 (const struct types_used_by_vars_entry *) x;
5866 return hash_types_used_by_vars_entry (entry);
5869 /*Equality function of the types_used_by_vars_entry hash table. */
5872 types_used_by_vars_eq (const void *x1, const void *x2)
5874 const struct types_used_by_vars_entry *e1 =
5875 (const struct types_used_by_vars_entry *) x1;
5876 const struct types_used_by_vars_entry *e2 =
5877 (const struct types_used_by_vars_entry *)x2;
5879 return (e1->var_decl == e2->var_decl && e1->type == e2->type);
5882 /* Inserts an entry into the types_used_by_vars_hash hash table. */
5884 void
5885 types_used_by_var_decl_insert (tree type, tree var_decl)
5887 if (type != NULL && var_decl != NULL)
5889 void **slot;
5890 struct types_used_by_vars_entry e;
5891 e.var_decl = var_decl;
5892 e.type = type;
5893 if (types_used_by_vars_hash == NULL)
5894 types_used_by_vars_hash =
5895 htab_create_ggc (37, types_used_by_vars_do_hash,
5896 types_used_by_vars_eq, NULL);
5897 slot = htab_find_slot_with_hash (types_used_by_vars_hash, &e,
5898 hash_types_used_by_vars_entry (&e), INSERT);
5899 if (*slot == NULL)
5901 struct types_used_by_vars_entry *entry;
5902 entry = ggc_alloc_types_used_by_vars_entry ();
5903 entry->type = type;
5904 entry->var_decl = var_decl;
5905 *slot = entry;
5910 struct rtl_opt_pass pass_leaf_regs =
5913 RTL_PASS,
5914 "*leaf_regs", /* name */
5915 NULL, /* gate */
5916 rest_of_handle_check_leaf_regs, /* execute */
5917 NULL, /* sub */
5918 NULL, /* next */
5919 0, /* static_pass_number */
5920 TV_NONE, /* tv_id */
5921 0, /* properties_required */
5922 0, /* properties_provided */
5923 0, /* properties_destroyed */
5924 0, /* todo_flags_start */
5925 0 /* todo_flags_finish */
5929 static unsigned int
5930 rest_of_handle_thread_prologue_and_epilogue (void)
5932 if (optimize)
5933 cleanup_cfg (CLEANUP_EXPENSIVE);
5935 /* On some machines, the prologue and epilogue code, or parts thereof,
5936 can be represented as RTL. Doing so lets us schedule insns between
5937 it and the rest of the code and also allows delayed branch
5938 scheduling to operate in the epilogue. */
5939 thread_prologue_and_epilogue_insns ();
5941 /* The stack usage info is finalized during prologue expansion. */
5942 if (flag_stack_usage_info)
5943 output_stack_usage ();
5945 return 0;
5948 struct rtl_opt_pass pass_thread_prologue_and_epilogue =
5951 RTL_PASS,
5952 "pro_and_epilogue", /* name */
5953 NULL, /* gate */
5954 rest_of_handle_thread_prologue_and_epilogue, /* execute */
5955 NULL, /* sub */
5956 NULL, /* next */
5957 0, /* static_pass_number */
5958 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
5959 0, /* properties_required */
5960 0, /* properties_provided */
5961 0, /* properties_destroyed */
5962 TODO_verify_flow, /* todo_flags_start */
5963 TODO_dump_func |
5964 TODO_df_verify |
5965 TODO_df_finish | TODO_verify_rtl_sharing |
5966 TODO_ggc_collect /* todo_flags_finish */
5971 /* This mini-pass fixes fall-out from SSA in asm statements that have
5972 in-out constraints. Say you start with
5974 orig = inout;
5975 asm ("": "+mr" (inout));
5976 use (orig);
5978 which is transformed very early to use explicit output and match operands:
5980 orig = inout;
5981 asm ("": "=mr" (inout) : "0" (inout));
5982 use (orig);
5984 Or, after SSA and copyprop,
5986 asm ("": "=mr" (inout_2) : "0" (inout_1));
5987 use (inout_1);
5989 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
5990 they represent two separate values, so they will get different pseudo
5991 registers during expansion. Then, since the two operands need to match
5992 per the constraints, but use different pseudo registers, reload can
5993 only register a reload for these operands. But reloads can only be
5994 satisfied by hardregs, not by memory, so we need a register for this
5995 reload, just because we are presented with non-matching operands.
5996 So, even though we allow memory for this operand, no memory can be
5997 used for it, just because the two operands don't match. This can
5998 cause reload failures on register-starved targets.
6000 So it's a symptom of reload not being able to use memory for reloads
6001 or, alternatively it's also a symptom of both operands not coming into
6002 reload as matching (in which case the pseudo could go to memory just
6003 fine, as the alternative allows it, and no reload would be necessary).
6004 We fix the latter problem here, by transforming
6006 asm ("": "=mr" (inout_2) : "0" (inout_1));
6008 back to
6010 inout_2 = inout_1;
6011 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
6013 static void
6014 match_asm_constraints_1 (rtx insn, rtx *p_sets, int noutputs)
6016 int i;
6017 bool changed = false;
6018 rtx op = SET_SRC (p_sets[0]);
6019 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
6020 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
6021 bool *output_matched = XALLOCAVEC (bool, noutputs);
6023 memset (output_matched, 0, noutputs * sizeof (bool));
6024 for (i = 0; i < ninputs; i++)
6026 rtx input, output, insns;
6027 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
6028 char *end;
6029 int match, j;
6031 if (*constraint == '%')
6032 constraint++;
6034 match = strtoul (constraint, &end, 10);
6035 if (end == constraint)
6036 continue;
6038 gcc_assert (match < noutputs);
6039 output = SET_DEST (p_sets[match]);
6040 input = RTVEC_ELT (inputs, i);
6041 /* Only do the transformation for pseudos. */
6042 if (! REG_P (output)
6043 || rtx_equal_p (output, input)
6044 || (GET_MODE (input) != VOIDmode
6045 && GET_MODE (input) != GET_MODE (output)))
6046 continue;
6048 /* We can't do anything if the output is also used as input,
6049 as we're going to overwrite it. */
6050 for (j = 0; j < ninputs; j++)
6051 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
6052 break;
6053 if (j != ninputs)
6054 continue;
6056 /* Avoid changing the same input several times. For
6057 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
6058 only change in once (to out1), rather than changing it
6059 first to out1 and afterwards to out2. */
6060 if (i > 0)
6062 for (j = 0; j < noutputs; j++)
6063 if (output_matched[j] && input == SET_DEST (p_sets[j]))
6064 break;
6065 if (j != noutputs)
6066 continue;
6068 output_matched[match] = true;
6070 start_sequence ();
6071 emit_move_insn (output, input);
6072 insns = get_insns ();
6073 end_sequence ();
6074 emit_insn_before (insns, insn);
6076 /* Now replace all mentions of the input with output. We can't
6077 just replace the occurrence in inputs[i], as the register might
6078 also be used in some other input (or even in an address of an
6079 output), which would mean possibly increasing the number of
6080 inputs by one (namely 'output' in addition), which might pose
6081 a too complicated problem for reload to solve. E.g. this situation:
6083 asm ("" : "=r" (output), "=m" (input) : "0" (input))
6085 Here 'input' is used in two occurrences as input (once for the
6086 input operand, once for the address in the second output operand).
6087 If we would replace only the occurrence of the input operand (to
6088 make the matching) we would be left with this:
6090 output = input
6091 asm ("" : "=r" (output), "=m" (input) : "0" (output))
6093 Now we suddenly have two different input values (containing the same
6094 value, but different pseudos) where we formerly had only one.
6095 With more complicated asms this might lead to reload failures
6096 which wouldn't have happen without this pass. So, iterate over
6097 all operands and replace all occurrences of the register used. */
6098 for (j = 0; j < noutputs; j++)
6099 if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
6100 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
6101 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
6102 input, output);
6103 for (j = 0; j < ninputs; j++)
6104 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
6105 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
6106 input, output);
6108 changed = true;
6111 if (changed)
6112 df_insn_rescan (insn);
6115 static unsigned
6116 rest_of_match_asm_constraints (void)
6118 basic_block bb;
6119 rtx insn, pat, *p_sets;
6120 int noutputs;
6122 if (!crtl->has_asm_statement)
6123 return 0;
6125 df_set_flags (DF_DEFER_INSN_RESCAN);
6126 FOR_EACH_BB (bb)
6128 FOR_BB_INSNS (bb, insn)
6130 if (!INSN_P (insn))
6131 continue;
6133 pat = PATTERN (insn);
6134 if (GET_CODE (pat) == PARALLEL)
6135 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
6136 else if (GET_CODE (pat) == SET)
6137 p_sets = &PATTERN (insn), noutputs = 1;
6138 else
6139 continue;
6141 if (GET_CODE (*p_sets) == SET
6142 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
6143 match_asm_constraints_1 (insn, p_sets, noutputs);
6147 return TODO_df_finish;
6150 struct rtl_opt_pass pass_match_asm_constraints =
6153 RTL_PASS,
6154 "asmcons", /* name */
6155 NULL, /* gate */
6156 rest_of_match_asm_constraints, /* execute */
6157 NULL, /* sub */
6158 NULL, /* next */
6159 0, /* static_pass_number */
6160 TV_NONE, /* tv_id */
6161 0, /* properties_required */
6162 0, /* properties_provided */
6163 0, /* properties_destroyed */
6164 0, /* todo_flags_start */
6165 TODO_dump_func /* todo_flags_finish */
6170 #include "gt-function.h"