2011-08-19 Andrew Stubbs <ams@codesourcery.com>
[official-gcc.git] / gcc / function.c
blobc94680c76b7233df9a0d142403686f570a2c476f
1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
4 2010, 2011 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register. */
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
40 #include "rtl-error.h"
41 #include "tree.h"
42 #include "flags.h"
43 #include "except.h"
44 #include "function.h"
45 #include "expr.h"
46 #include "optabs.h"
47 #include "libfuncs.h"
48 #include "regs.h"
49 #include "hard-reg-set.h"
50 #include "insn-config.h"
51 #include "recog.h"
52 #include "output.h"
53 #include "basic-block.h"
54 #include "hashtab.h"
55 #include "ggc.h"
56 #include "tm_p.h"
57 #include "integrate.h"
58 #include "langhooks.h"
59 #include "target.h"
60 #include "common/common-target.h"
61 #include "cfglayout.h"
62 #include "gimple.h"
63 #include "tree-pass.h"
64 #include "predict.h"
65 #include "df.h"
66 #include "timevar.h"
67 #include "vecprim.h"
69 /* So we can assign to cfun in this file. */
70 #undef cfun
72 #ifndef STACK_ALIGNMENT_NEEDED
73 #define STACK_ALIGNMENT_NEEDED 1
74 #endif
76 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
78 /* Some systems use __main in a way incompatible with its use in gcc, in these
79 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
80 give the same symbol without quotes for an alternative entry point. You
81 must define both, or neither. */
82 #ifndef NAME__MAIN
83 #define NAME__MAIN "__main"
84 #endif
86 /* Round a value to the lowest integer less than it that is a multiple of
87 the required alignment. Avoid using division in case the value is
88 negative. Assume the alignment is a power of two. */
89 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
91 /* Similar, but round to the next highest integer that meets the
92 alignment. */
93 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
95 /* Nonzero if function being compiled doesn't contain any calls
96 (ignoring the prologue and epilogue). This is set prior to
97 local register allocation and is valid for the remaining
98 compiler passes. */
99 int current_function_is_leaf;
101 /* Nonzero if function being compiled doesn't modify the stack pointer
102 (ignoring the prologue and epilogue). This is only valid after
103 pass_stack_ptr_mod has run. */
104 int current_function_sp_is_unchanging;
106 /* Nonzero if the function being compiled is a leaf function which only
107 uses leaf registers. This is valid after reload (specifically after
108 sched2) and is useful only if the port defines LEAF_REGISTERS. */
109 int current_function_uses_only_leaf_regs;
111 /* Nonzero once virtual register instantiation has been done.
112 assign_stack_local uses frame_pointer_rtx when this is nonzero.
113 calls.c:emit_library_call_value_1 uses it to set up
114 post-instantiation libcalls. */
115 int virtuals_instantiated;
117 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
118 static GTY(()) int funcdef_no;
120 /* These variables hold pointers to functions to create and destroy
121 target specific, per-function data structures. */
122 struct machine_function * (*init_machine_status) (void);
124 /* The currently compiled function. */
125 struct function *cfun = 0;
127 /* These hashes record the prologue and epilogue insns. */
128 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
129 htab_t prologue_insn_hash;
130 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
131 htab_t epilogue_insn_hash;
134 htab_t types_used_by_vars_hash = NULL;
135 VEC(tree,gc) *types_used_by_cur_var_decl;
137 /* Forward declarations. */
139 static struct temp_slot *find_temp_slot_from_address (rtx);
140 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
141 static void pad_below (struct args_size *, enum machine_mode, tree);
142 static void reorder_blocks_1 (rtx, tree, VEC(tree,heap) **);
143 static int all_blocks (tree, tree *);
144 static tree *get_block_vector (tree, int *);
145 extern tree debug_find_var_in_block_tree (tree, tree);
146 /* We always define `record_insns' even if it's not used so that we
147 can always export `prologue_epilogue_contains'. */
148 static void record_insns (rtx, rtx, htab_t *) ATTRIBUTE_UNUSED;
149 static bool contains (const_rtx, htab_t);
150 #ifdef HAVE_return
151 static void emit_return_into_block (basic_block);
152 #endif
153 static void prepare_function_start (void);
154 static void do_clobber_return_reg (rtx, void *);
155 static void do_use_return_reg (rtx, void *);
156 static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
158 /* Stack of nested functions. */
159 /* Keep track of the cfun stack. */
161 typedef struct function *function_p;
163 DEF_VEC_P(function_p);
164 DEF_VEC_ALLOC_P(function_p,heap);
165 static VEC(function_p,heap) *function_context_stack;
167 /* Save the current context for compilation of a nested function.
168 This is called from language-specific code. */
170 void
171 push_function_context (void)
173 if (cfun == 0)
174 allocate_struct_function (NULL, false);
176 VEC_safe_push (function_p, heap, function_context_stack, cfun);
177 set_cfun (NULL);
180 /* Restore the last saved context, at the end of a nested function.
181 This function is called from language-specific code. */
183 void
184 pop_function_context (void)
186 struct function *p = VEC_pop (function_p, function_context_stack);
187 set_cfun (p);
188 current_function_decl = p->decl;
190 /* Reset variables that have known state during rtx generation. */
191 virtuals_instantiated = 0;
192 generating_concat_p = 1;
195 /* Clear out all parts of the state in F that can safely be discarded
196 after the function has been parsed, but not compiled, to let
197 garbage collection reclaim the memory. */
199 void
200 free_after_parsing (struct function *f)
202 f->language = 0;
205 /* Clear out all parts of the state in F that can safely be discarded
206 after the function has been compiled, to let garbage collection
207 reclaim the memory. */
209 void
210 free_after_compilation (struct function *f)
212 prologue_insn_hash = NULL;
213 epilogue_insn_hash = NULL;
215 free (crtl->emit.regno_pointer_align);
217 memset (crtl, 0, sizeof (struct rtl_data));
218 f->eh = NULL;
219 f->machine = NULL;
220 f->cfg = NULL;
222 regno_reg_rtx = NULL;
223 insn_locators_free ();
226 /* Return size needed for stack frame based on slots so far allocated.
227 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
228 the caller may have to do that. */
230 HOST_WIDE_INT
231 get_frame_size (void)
233 if (FRAME_GROWS_DOWNWARD)
234 return -frame_offset;
235 else
236 return frame_offset;
239 /* Issue an error message and return TRUE if frame OFFSET overflows in
240 the signed target pointer arithmetics for function FUNC. Otherwise
241 return FALSE. */
243 bool
244 frame_offset_overflow (HOST_WIDE_INT offset, tree func)
246 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
248 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
249 /* Leave room for the fixed part of the frame. */
250 - 64 * UNITS_PER_WORD)
252 error_at (DECL_SOURCE_LOCATION (func),
253 "total size of local objects too large");
254 return TRUE;
257 return FALSE;
260 /* Return stack slot alignment in bits for TYPE and MODE. */
262 static unsigned int
263 get_stack_local_alignment (tree type, enum machine_mode mode)
265 unsigned int alignment;
267 if (mode == BLKmode)
268 alignment = BIGGEST_ALIGNMENT;
269 else
270 alignment = GET_MODE_ALIGNMENT (mode);
272 /* Allow the frond-end to (possibly) increase the alignment of this
273 stack slot. */
274 if (! type)
275 type = lang_hooks.types.type_for_mode (mode, 0);
277 return STACK_SLOT_ALIGNMENT (type, mode, alignment);
280 /* Determine whether it is possible to fit a stack slot of size SIZE and
281 alignment ALIGNMENT into an area in the stack frame that starts at
282 frame offset START and has a length of LENGTH. If so, store the frame
283 offset to be used for the stack slot in *POFFSET and return true;
284 return false otherwise. This function will extend the frame size when
285 given a start/length pair that lies at the end of the frame. */
287 static bool
288 try_fit_stack_local (HOST_WIDE_INT start, HOST_WIDE_INT length,
289 HOST_WIDE_INT size, unsigned int alignment,
290 HOST_WIDE_INT *poffset)
292 HOST_WIDE_INT this_frame_offset;
293 int frame_off, frame_alignment, frame_phase;
295 /* Calculate how many bytes the start of local variables is off from
296 stack alignment. */
297 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
298 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
299 frame_phase = frame_off ? frame_alignment - frame_off : 0;
301 /* Round the frame offset to the specified alignment. */
303 /* We must be careful here, since FRAME_OFFSET might be negative and
304 division with a negative dividend isn't as well defined as we might
305 like. So we instead assume that ALIGNMENT is a power of two and
306 use logical operations which are unambiguous. */
307 if (FRAME_GROWS_DOWNWARD)
308 this_frame_offset
309 = (FLOOR_ROUND (start + length - size - frame_phase,
310 (unsigned HOST_WIDE_INT) alignment)
311 + frame_phase);
312 else
313 this_frame_offset
314 = (CEIL_ROUND (start - frame_phase,
315 (unsigned HOST_WIDE_INT) alignment)
316 + frame_phase);
318 /* See if it fits. If this space is at the edge of the frame,
319 consider extending the frame to make it fit. Our caller relies on
320 this when allocating a new slot. */
321 if (frame_offset == start && this_frame_offset < frame_offset)
322 frame_offset = this_frame_offset;
323 else if (this_frame_offset < start)
324 return false;
325 else if (start + length == frame_offset
326 && this_frame_offset + size > start + length)
327 frame_offset = this_frame_offset + size;
328 else if (this_frame_offset + size > start + length)
329 return false;
331 *poffset = this_frame_offset;
332 return true;
335 /* Create a new frame_space structure describing free space in the stack
336 frame beginning at START and ending at END, and chain it into the
337 function's frame_space_list. */
339 static void
340 add_frame_space (HOST_WIDE_INT start, HOST_WIDE_INT end)
342 struct frame_space *space = ggc_alloc_frame_space ();
343 space->next = crtl->frame_space_list;
344 crtl->frame_space_list = space;
345 space->start = start;
346 space->length = end - start;
349 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
350 with machine mode MODE.
352 ALIGN controls the amount of alignment for the address of the slot:
353 0 means according to MODE,
354 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
355 -2 means use BITS_PER_UNIT,
356 positive specifies alignment boundary in bits.
358 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
359 alignment and ASLK_RECORD_PAD bit set if we should remember
360 extra space we allocated for alignment purposes. When we are
361 called from assign_stack_temp_for_type, it is not set so we don't
362 track the same stack slot in two independent lists.
364 We do not round to stack_boundary here. */
367 assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size,
368 int align, int kind)
370 rtx x, addr;
371 int bigend_correction = 0;
372 HOST_WIDE_INT slot_offset = 0, old_frame_offset;
373 unsigned int alignment, alignment_in_bits;
375 if (align == 0)
377 alignment = get_stack_local_alignment (NULL, mode);
378 alignment /= BITS_PER_UNIT;
380 else if (align == -1)
382 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
383 size = CEIL_ROUND (size, alignment);
385 else if (align == -2)
386 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
387 else
388 alignment = align / BITS_PER_UNIT;
390 alignment_in_bits = alignment * BITS_PER_UNIT;
392 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
393 if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
395 alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
396 alignment = alignment_in_bits / BITS_PER_UNIT;
399 if (SUPPORTS_STACK_ALIGNMENT)
401 if (crtl->stack_alignment_estimated < alignment_in_bits)
403 if (!crtl->stack_realign_processed)
404 crtl->stack_alignment_estimated = alignment_in_bits;
405 else
407 /* If stack is realigned and stack alignment value
408 hasn't been finalized, it is OK not to increase
409 stack_alignment_estimated. The bigger alignment
410 requirement is recorded in stack_alignment_needed
411 below. */
412 gcc_assert (!crtl->stack_realign_finalized);
413 if (!crtl->stack_realign_needed)
415 /* It is OK to reduce the alignment as long as the
416 requested size is 0 or the estimated stack
417 alignment >= mode alignment. */
418 gcc_assert ((kind & ASLK_REDUCE_ALIGN)
419 || size == 0
420 || (crtl->stack_alignment_estimated
421 >= GET_MODE_ALIGNMENT (mode)));
422 alignment_in_bits = crtl->stack_alignment_estimated;
423 alignment = alignment_in_bits / BITS_PER_UNIT;
429 if (crtl->stack_alignment_needed < alignment_in_bits)
430 crtl->stack_alignment_needed = alignment_in_bits;
431 if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
432 crtl->max_used_stack_slot_alignment = alignment_in_bits;
434 if (mode != BLKmode || size != 0)
436 if (kind & ASLK_RECORD_PAD)
438 struct frame_space **psp;
440 for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
442 struct frame_space *space = *psp;
443 if (!try_fit_stack_local (space->start, space->length, size,
444 alignment, &slot_offset))
445 continue;
446 *psp = space->next;
447 if (slot_offset > space->start)
448 add_frame_space (space->start, slot_offset);
449 if (slot_offset + size < space->start + space->length)
450 add_frame_space (slot_offset + size,
451 space->start + space->length);
452 goto found_space;
456 else if (!STACK_ALIGNMENT_NEEDED)
458 slot_offset = frame_offset;
459 goto found_space;
462 old_frame_offset = frame_offset;
464 if (FRAME_GROWS_DOWNWARD)
466 frame_offset -= size;
467 try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
469 if (kind & ASLK_RECORD_PAD)
471 if (slot_offset > frame_offset)
472 add_frame_space (frame_offset, slot_offset);
473 if (slot_offset + size < old_frame_offset)
474 add_frame_space (slot_offset + size, old_frame_offset);
477 else
479 frame_offset += size;
480 try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
482 if (kind & ASLK_RECORD_PAD)
484 if (slot_offset > old_frame_offset)
485 add_frame_space (old_frame_offset, slot_offset);
486 if (slot_offset + size < frame_offset)
487 add_frame_space (slot_offset + size, frame_offset);
491 found_space:
492 /* On a big-endian machine, if we are allocating more space than we will use,
493 use the least significant bytes of those that are allocated. */
494 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
495 bigend_correction = size - GET_MODE_SIZE (mode);
497 /* If we have already instantiated virtual registers, return the actual
498 address relative to the frame pointer. */
499 if (virtuals_instantiated)
500 addr = plus_constant (frame_pointer_rtx,
501 trunc_int_for_mode
502 (slot_offset + bigend_correction
503 + STARTING_FRAME_OFFSET, Pmode));
504 else
505 addr = plus_constant (virtual_stack_vars_rtx,
506 trunc_int_for_mode
507 (slot_offset + bigend_correction,
508 Pmode));
510 x = gen_rtx_MEM (mode, addr);
511 set_mem_align (x, alignment_in_bits);
512 MEM_NOTRAP_P (x) = 1;
514 stack_slot_list
515 = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
517 if (frame_offset_overflow (frame_offset, current_function_decl))
518 frame_offset = 0;
520 return x;
523 /* Wrap up assign_stack_local_1 with last parameter as false. */
526 assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
528 return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD);
532 /* In order to evaluate some expressions, such as function calls returning
533 structures in memory, we need to temporarily allocate stack locations.
534 We record each allocated temporary in the following structure.
536 Associated with each temporary slot is a nesting level. When we pop up
537 one level, all temporaries associated with the previous level are freed.
538 Normally, all temporaries are freed after the execution of the statement
539 in which they were created. However, if we are inside a ({...}) grouping,
540 the result may be in a temporary and hence must be preserved. If the
541 result could be in a temporary, we preserve it if we can determine which
542 one it is in. If we cannot determine which temporary may contain the
543 result, all temporaries are preserved. A temporary is preserved by
544 pretending it was allocated at the previous nesting level.
546 Automatic variables are also assigned temporary slots, at the nesting
547 level where they are defined. They are marked a "kept" so that
548 free_temp_slots will not free them. */
550 struct GTY(()) temp_slot {
551 /* Points to next temporary slot. */
552 struct temp_slot *next;
553 /* Points to previous temporary slot. */
554 struct temp_slot *prev;
555 /* The rtx to used to reference the slot. */
556 rtx slot;
557 /* The size, in units, of the slot. */
558 HOST_WIDE_INT size;
559 /* The type of the object in the slot, or zero if it doesn't correspond
560 to a type. We use this to determine whether a slot can be reused.
561 It can be reused if objects of the type of the new slot will always
562 conflict with objects of the type of the old slot. */
563 tree type;
564 /* The alignment (in bits) of the slot. */
565 unsigned int align;
566 /* Nonzero if this temporary is currently in use. */
567 char in_use;
568 /* Nonzero if this temporary has its address taken. */
569 char addr_taken;
570 /* Nesting level at which this slot is being used. */
571 int level;
572 /* Nonzero if this should survive a call to free_temp_slots. */
573 int keep;
574 /* The offset of the slot from the frame_pointer, including extra space
575 for alignment. This info is for combine_temp_slots. */
576 HOST_WIDE_INT base_offset;
577 /* The size of the slot, including extra space for alignment. This
578 info is for combine_temp_slots. */
579 HOST_WIDE_INT full_size;
582 /* A table of addresses that represent a stack slot. The table is a mapping
583 from address RTXen to a temp slot. */
584 static GTY((param_is(struct temp_slot_address_entry))) htab_t temp_slot_address_table;
586 /* Entry for the above hash table. */
587 struct GTY(()) temp_slot_address_entry {
588 hashval_t hash;
589 rtx address;
590 struct temp_slot *temp_slot;
593 /* Removes temporary slot TEMP from LIST. */
595 static void
596 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
598 if (temp->next)
599 temp->next->prev = temp->prev;
600 if (temp->prev)
601 temp->prev->next = temp->next;
602 else
603 *list = temp->next;
605 temp->prev = temp->next = NULL;
608 /* Inserts temporary slot TEMP to LIST. */
610 static void
611 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
613 temp->next = *list;
614 if (*list)
615 (*list)->prev = temp;
616 temp->prev = NULL;
617 *list = temp;
620 /* Returns the list of used temp slots at LEVEL. */
622 static struct temp_slot **
623 temp_slots_at_level (int level)
625 if (level >= (int) VEC_length (temp_slot_p, used_temp_slots))
626 VEC_safe_grow_cleared (temp_slot_p, gc, used_temp_slots, level + 1);
628 return &(VEC_address (temp_slot_p, used_temp_slots)[level]);
631 /* Returns the maximal temporary slot level. */
633 static int
634 max_slot_level (void)
636 if (!used_temp_slots)
637 return -1;
639 return VEC_length (temp_slot_p, used_temp_slots) - 1;
642 /* Moves temporary slot TEMP to LEVEL. */
644 static void
645 move_slot_to_level (struct temp_slot *temp, int level)
647 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
648 insert_slot_to_list (temp, temp_slots_at_level (level));
649 temp->level = level;
652 /* Make temporary slot TEMP available. */
654 static void
655 make_slot_available (struct temp_slot *temp)
657 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
658 insert_slot_to_list (temp, &avail_temp_slots);
659 temp->in_use = 0;
660 temp->level = -1;
663 /* Compute the hash value for an address -> temp slot mapping.
664 The value is cached on the mapping entry. */
665 static hashval_t
666 temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
668 int do_not_record = 0;
669 return hash_rtx (t->address, GET_MODE (t->address),
670 &do_not_record, NULL, false);
673 /* Return the hash value for an address -> temp slot mapping. */
674 static hashval_t
675 temp_slot_address_hash (const void *p)
677 const struct temp_slot_address_entry *t;
678 t = (const struct temp_slot_address_entry *) p;
679 return t->hash;
682 /* Compare two address -> temp slot mapping entries. */
683 static int
684 temp_slot_address_eq (const void *p1, const void *p2)
686 const struct temp_slot_address_entry *t1, *t2;
687 t1 = (const struct temp_slot_address_entry *) p1;
688 t2 = (const struct temp_slot_address_entry *) p2;
689 return exp_equiv_p (t1->address, t2->address, 0, true);
692 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
693 static void
694 insert_temp_slot_address (rtx address, struct temp_slot *temp_slot)
696 void **slot;
697 struct temp_slot_address_entry *t = ggc_alloc_temp_slot_address_entry ();
698 t->address = address;
699 t->temp_slot = temp_slot;
700 t->hash = temp_slot_address_compute_hash (t);
701 slot = htab_find_slot_with_hash (temp_slot_address_table, t, t->hash, INSERT);
702 *slot = t;
705 /* Remove an address -> temp slot mapping entry if the temp slot is
706 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
707 static int
708 remove_unused_temp_slot_addresses_1 (void **slot, void *data ATTRIBUTE_UNUSED)
710 const struct temp_slot_address_entry *t;
711 t = (const struct temp_slot_address_entry *) *slot;
712 if (! t->temp_slot->in_use)
713 *slot = NULL;
714 return 1;
717 /* Remove all mappings of addresses to unused temp slots. */
718 static void
719 remove_unused_temp_slot_addresses (void)
721 htab_traverse (temp_slot_address_table,
722 remove_unused_temp_slot_addresses_1,
723 NULL);
726 /* Find the temp slot corresponding to the object at address X. */
728 static struct temp_slot *
729 find_temp_slot_from_address (rtx x)
731 struct temp_slot *p;
732 struct temp_slot_address_entry tmp, *t;
734 /* First try the easy way:
735 See if X exists in the address -> temp slot mapping. */
736 tmp.address = x;
737 tmp.temp_slot = NULL;
738 tmp.hash = temp_slot_address_compute_hash (&tmp);
739 t = (struct temp_slot_address_entry *)
740 htab_find_with_hash (temp_slot_address_table, &tmp, tmp.hash);
741 if (t)
742 return t->temp_slot;
744 /* If we have a sum involving a register, see if it points to a temp
745 slot. */
746 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
747 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
748 return p;
749 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
750 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
751 return p;
753 /* Last resort: Address is a virtual stack var address. */
754 if (GET_CODE (x) == PLUS
755 && XEXP (x, 0) == virtual_stack_vars_rtx
756 && CONST_INT_P (XEXP (x, 1)))
758 int i;
759 for (i = max_slot_level (); i >= 0; i--)
760 for (p = *temp_slots_at_level (i); p; p = p->next)
762 if (INTVAL (XEXP (x, 1)) >= p->base_offset
763 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size)
764 return p;
768 return NULL;
771 /* Allocate a temporary stack slot and record it for possible later
772 reuse.
774 MODE is the machine mode to be given to the returned rtx.
776 SIZE is the size in units of the space required. We do no rounding here
777 since assign_stack_local will do any required rounding.
779 KEEP is 1 if this slot is to be retained after a call to
780 free_temp_slots. Automatic variables for a block are allocated
781 with this flag. KEEP values of 2 or 3 were needed respectively
782 for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
783 or for SAVE_EXPRs, but they are now unused.
785 TYPE is the type that will be used for the stack slot. */
788 assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size,
789 int keep, tree type)
791 unsigned int align;
792 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
793 rtx slot;
795 /* If SIZE is -1 it means that somebody tried to allocate a temporary
796 of a variable size. */
797 gcc_assert (size != -1);
799 /* These are now unused. */
800 gcc_assert (keep <= 1);
802 align = get_stack_local_alignment (type, mode);
804 /* Try to find an available, already-allocated temporary of the proper
805 mode which meets the size and alignment requirements. Choose the
806 smallest one with the closest alignment.
808 If assign_stack_temp is called outside of the tree->rtl expansion,
809 we cannot reuse the stack slots (that may still refer to
810 VIRTUAL_STACK_VARS_REGNUM). */
811 if (!virtuals_instantiated)
813 for (p = avail_temp_slots; p; p = p->next)
815 if (p->align >= align && p->size >= size
816 && GET_MODE (p->slot) == mode
817 && objects_must_conflict_p (p->type, type)
818 && (best_p == 0 || best_p->size > p->size
819 || (best_p->size == p->size && best_p->align > p->align)))
821 if (p->align == align && p->size == size)
823 selected = p;
824 cut_slot_from_list (selected, &avail_temp_slots);
825 best_p = 0;
826 break;
828 best_p = p;
833 /* Make our best, if any, the one to use. */
834 if (best_p)
836 selected = best_p;
837 cut_slot_from_list (selected, &avail_temp_slots);
839 /* If there are enough aligned bytes left over, make them into a new
840 temp_slot so that the extra bytes don't get wasted. Do this only
841 for BLKmode slots, so that we can be sure of the alignment. */
842 if (GET_MODE (best_p->slot) == BLKmode)
844 int alignment = best_p->align / BITS_PER_UNIT;
845 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
847 if (best_p->size - rounded_size >= alignment)
849 p = ggc_alloc_temp_slot ();
850 p->in_use = p->addr_taken = 0;
851 p->size = best_p->size - rounded_size;
852 p->base_offset = best_p->base_offset + rounded_size;
853 p->full_size = best_p->full_size - rounded_size;
854 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
855 p->align = best_p->align;
856 p->type = best_p->type;
857 insert_slot_to_list (p, &avail_temp_slots);
859 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
860 stack_slot_list);
862 best_p->size = rounded_size;
863 best_p->full_size = rounded_size;
868 /* If we still didn't find one, make a new temporary. */
869 if (selected == 0)
871 HOST_WIDE_INT frame_offset_old = frame_offset;
873 p = ggc_alloc_temp_slot ();
875 /* We are passing an explicit alignment request to assign_stack_local.
876 One side effect of that is assign_stack_local will not round SIZE
877 to ensure the frame offset remains suitably aligned.
879 So for requests which depended on the rounding of SIZE, we go ahead
880 and round it now. We also make sure ALIGNMENT is at least
881 BIGGEST_ALIGNMENT. */
882 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
883 p->slot = assign_stack_local_1 (mode,
884 (mode == BLKmode
885 ? CEIL_ROUND (size,
886 (int) align
887 / BITS_PER_UNIT)
888 : size),
889 align, 0);
891 p->align = align;
893 /* The following slot size computation is necessary because we don't
894 know the actual size of the temporary slot until assign_stack_local
895 has performed all the frame alignment and size rounding for the
896 requested temporary. Note that extra space added for alignment
897 can be either above or below this stack slot depending on which
898 way the frame grows. We include the extra space if and only if it
899 is above this slot. */
900 if (FRAME_GROWS_DOWNWARD)
901 p->size = frame_offset_old - frame_offset;
902 else
903 p->size = size;
905 /* Now define the fields used by combine_temp_slots. */
906 if (FRAME_GROWS_DOWNWARD)
908 p->base_offset = frame_offset;
909 p->full_size = frame_offset_old - frame_offset;
911 else
913 p->base_offset = frame_offset_old;
914 p->full_size = frame_offset - frame_offset_old;
917 selected = p;
920 p = selected;
921 p->in_use = 1;
922 p->addr_taken = 0;
923 p->type = type;
924 p->level = temp_slot_level;
925 p->keep = keep;
927 pp = temp_slots_at_level (p->level);
928 insert_slot_to_list (p, pp);
929 insert_temp_slot_address (XEXP (p->slot, 0), p);
931 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
932 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
933 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
935 /* If we know the alias set for the memory that will be used, use
936 it. If there's no TYPE, then we don't know anything about the
937 alias set for the memory. */
938 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
939 set_mem_align (slot, align);
941 /* If a type is specified, set the relevant flags. */
942 if (type != 0)
944 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
945 gcc_checking_assert (!MEM_SCALAR_P (slot) && !MEM_IN_STRUCT_P (slot));
946 if (AGGREGATE_TYPE_P (type) || TREE_CODE (type) == COMPLEX_TYPE)
947 MEM_IN_STRUCT_P (slot) = 1;
948 else
949 MEM_SCALAR_P (slot) = 1;
951 MEM_NOTRAP_P (slot) = 1;
953 return slot;
956 /* Allocate a temporary stack slot and record it for possible later
957 reuse. First three arguments are same as in preceding function. */
960 assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep)
962 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
965 /* Assign a temporary.
966 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
967 and so that should be used in error messages. In either case, we
968 allocate of the given type.
969 KEEP is as for assign_stack_temp.
970 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
971 it is 0 if a register is OK.
972 DONT_PROMOTE is 1 if we should not promote values in register
973 to wider modes. */
976 assign_temp (tree type_or_decl, int keep, int memory_required,
977 int dont_promote ATTRIBUTE_UNUSED)
979 tree type, decl;
980 enum machine_mode mode;
981 #ifdef PROMOTE_MODE
982 int unsignedp;
983 #endif
985 if (DECL_P (type_or_decl))
986 decl = type_or_decl, type = TREE_TYPE (decl);
987 else
988 decl = NULL, type = type_or_decl;
990 mode = TYPE_MODE (type);
991 #ifdef PROMOTE_MODE
992 unsignedp = TYPE_UNSIGNED (type);
993 #endif
995 if (mode == BLKmode || memory_required)
997 HOST_WIDE_INT size = int_size_in_bytes (type);
998 rtx tmp;
1000 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
1001 problems with allocating the stack space. */
1002 if (size == 0)
1003 size = 1;
1005 /* Unfortunately, we don't yet know how to allocate variable-sized
1006 temporaries. However, sometimes we can find a fixed upper limit on
1007 the size, so try that instead. */
1008 else if (size == -1)
1009 size = max_int_size_in_bytes (type);
1011 /* The size of the temporary may be too large to fit into an integer. */
1012 /* ??? Not sure this should happen except for user silliness, so limit
1013 this to things that aren't compiler-generated temporaries. The
1014 rest of the time we'll die in assign_stack_temp_for_type. */
1015 if (decl && size == -1
1016 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
1018 error ("size of variable %q+D is too large", decl);
1019 size = 1;
1022 tmp = assign_stack_temp_for_type (mode, size, keep, type);
1023 return tmp;
1026 #ifdef PROMOTE_MODE
1027 if (! dont_promote)
1028 mode = promote_mode (type, mode, &unsignedp);
1029 #endif
1031 return gen_reg_rtx (mode);
1034 /* Combine temporary stack slots which are adjacent on the stack.
1036 This allows for better use of already allocated stack space. This is only
1037 done for BLKmode slots because we can be sure that we won't have alignment
1038 problems in this case. */
1040 static void
1041 combine_temp_slots (void)
1043 struct temp_slot *p, *q, *next, *next_q;
1044 int num_slots;
1046 /* We can't combine slots, because the information about which slot
1047 is in which alias set will be lost. */
1048 if (flag_strict_aliasing)
1049 return;
1051 /* If there are a lot of temp slots, don't do anything unless
1052 high levels of optimization. */
1053 if (! flag_expensive_optimizations)
1054 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1055 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1056 return;
1058 for (p = avail_temp_slots; p; p = next)
1060 int delete_p = 0;
1062 next = p->next;
1064 if (GET_MODE (p->slot) != BLKmode)
1065 continue;
1067 for (q = p->next; q; q = next_q)
1069 int delete_q = 0;
1071 next_q = q->next;
1073 if (GET_MODE (q->slot) != BLKmode)
1074 continue;
1076 if (p->base_offset + p->full_size == q->base_offset)
1078 /* Q comes after P; combine Q into P. */
1079 p->size += q->size;
1080 p->full_size += q->full_size;
1081 delete_q = 1;
1083 else if (q->base_offset + q->full_size == p->base_offset)
1085 /* P comes after Q; combine P into Q. */
1086 q->size += p->size;
1087 q->full_size += p->full_size;
1088 delete_p = 1;
1089 break;
1091 if (delete_q)
1092 cut_slot_from_list (q, &avail_temp_slots);
1095 /* Either delete P or advance past it. */
1096 if (delete_p)
1097 cut_slot_from_list (p, &avail_temp_slots);
1101 /* Indicate that NEW_RTX is an alternate way of referring to the temp
1102 slot that previously was known by OLD_RTX. */
1104 void
1105 update_temp_slot_address (rtx old_rtx, rtx new_rtx)
1107 struct temp_slot *p;
1109 if (rtx_equal_p (old_rtx, new_rtx))
1110 return;
1112 p = find_temp_slot_from_address (old_rtx);
1114 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1115 NEW_RTX is a register, see if one operand of the PLUS is a
1116 temporary location. If so, NEW_RTX points into it. Otherwise,
1117 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1118 in common between them. If so, try a recursive call on those
1119 values. */
1120 if (p == 0)
1122 if (GET_CODE (old_rtx) != PLUS)
1123 return;
1125 if (REG_P (new_rtx))
1127 update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
1128 update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
1129 return;
1131 else if (GET_CODE (new_rtx) != PLUS)
1132 return;
1134 if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
1135 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
1136 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
1137 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
1138 else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
1139 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
1140 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
1141 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
1143 return;
1146 /* Otherwise add an alias for the temp's address. */
1147 insert_temp_slot_address (new_rtx, p);
1150 /* If X could be a reference to a temporary slot, mark the fact that its
1151 address was taken. */
1153 void
1154 mark_temp_addr_taken (rtx x)
1156 struct temp_slot *p;
1158 if (x == 0)
1159 return;
1161 /* If X is not in memory or is at a constant address, it cannot be in
1162 a temporary slot. */
1163 if (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))
1164 return;
1166 p = find_temp_slot_from_address (XEXP (x, 0));
1167 if (p != 0)
1168 p->addr_taken = 1;
1171 /* If X could be a reference to a temporary slot, mark that slot as
1172 belonging to the to one level higher than the current level. If X
1173 matched one of our slots, just mark that one. Otherwise, we can't
1174 easily predict which it is, so upgrade all of them. Kept slots
1175 need not be touched.
1177 This is called when an ({...}) construct occurs and a statement
1178 returns a value in memory. */
1180 void
1181 preserve_temp_slots (rtx x)
1183 struct temp_slot *p = 0, *next;
1185 /* If there is no result, we still might have some objects whose address
1186 were taken, so we need to make sure they stay around. */
1187 if (x == 0)
1189 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1191 next = p->next;
1193 if (p->addr_taken)
1194 move_slot_to_level (p, temp_slot_level - 1);
1197 return;
1200 /* If X is a register that is being used as a pointer, see if we have
1201 a temporary slot we know it points to. To be consistent with
1202 the code below, we really should preserve all non-kept slots
1203 if we can't find a match, but that seems to be much too costly. */
1204 if (REG_P (x) && REG_POINTER (x))
1205 p = find_temp_slot_from_address (x);
1207 /* If X is not in memory or is at a constant address, it cannot be in
1208 a temporary slot, but it can contain something whose address was
1209 taken. */
1210 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1212 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1214 next = p->next;
1216 if (p->addr_taken)
1217 move_slot_to_level (p, temp_slot_level - 1);
1220 return;
1223 /* First see if we can find a match. */
1224 if (p == 0)
1225 p = find_temp_slot_from_address (XEXP (x, 0));
1227 if (p != 0)
1229 /* Move everything at our level whose address was taken to our new
1230 level in case we used its address. */
1231 struct temp_slot *q;
1233 if (p->level == temp_slot_level)
1235 for (q = *temp_slots_at_level (temp_slot_level); q; q = next)
1237 next = q->next;
1239 if (p != q && q->addr_taken)
1240 move_slot_to_level (q, temp_slot_level - 1);
1243 move_slot_to_level (p, temp_slot_level - 1);
1244 p->addr_taken = 0;
1246 return;
1249 /* Otherwise, preserve all non-kept slots at this level. */
1250 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1252 next = p->next;
1254 if (!p->keep)
1255 move_slot_to_level (p, temp_slot_level - 1);
1259 /* Free all temporaries used so far. This is normally called at the
1260 end of generating code for a statement. */
1262 void
1263 free_temp_slots (void)
1265 struct temp_slot *p, *next;
1266 bool some_available = false;
1268 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1270 next = p->next;
1272 if (!p->keep)
1274 make_slot_available (p);
1275 some_available = true;
1279 if (some_available)
1281 remove_unused_temp_slot_addresses ();
1282 combine_temp_slots ();
1286 /* Push deeper into the nesting level for stack temporaries. */
1288 void
1289 push_temp_slots (void)
1291 temp_slot_level++;
1294 /* Pop a temporary nesting level. All slots in use in the current level
1295 are freed. */
1297 void
1298 pop_temp_slots (void)
1300 struct temp_slot *p, *next;
1301 bool some_available = false;
1303 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1305 next = p->next;
1306 make_slot_available (p);
1307 some_available = true;
1310 if (some_available)
1312 remove_unused_temp_slot_addresses ();
1313 combine_temp_slots ();
1316 temp_slot_level--;
1319 /* Initialize temporary slots. */
1321 void
1322 init_temp_slots (void)
1324 /* We have not allocated any temporaries yet. */
1325 avail_temp_slots = 0;
1326 used_temp_slots = 0;
1327 temp_slot_level = 0;
1329 /* Set up the table to map addresses to temp slots. */
1330 if (! temp_slot_address_table)
1331 temp_slot_address_table = htab_create_ggc (32,
1332 temp_slot_address_hash,
1333 temp_slot_address_eq,
1334 NULL);
1335 else
1336 htab_empty (temp_slot_address_table);
1339 /* These routines are responsible for converting virtual register references
1340 to the actual hard register references once RTL generation is complete.
1342 The following four variables are used for communication between the
1343 routines. They contain the offsets of the virtual registers from their
1344 respective hard registers. */
1346 static int in_arg_offset;
1347 static int var_offset;
1348 static int dynamic_offset;
1349 static int out_arg_offset;
1350 static int cfa_offset;
1352 /* In most machines, the stack pointer register is equivalent to the bottom
1353 of the stack. */
1355 #ifndef STACK_POINTER_OFFSET
1356 #define STACK_POINTER_OFFSET 0
1357 #endif
1359 /* If not defined, pick an appropriate default for the offset of dynamically
1360 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1361 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1363 #ifndef STACK_DYNAMIC_OFFSET
1365 /* The bottom of the stack points to the actual arguments. If
1366 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1367 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1368 stack space for register parameters is not pushed by the caller, but
1369 rather part of the fixed stack areas and hence not included in
1370 `crtl->outgoing_args_size'. Nevertheless, we must allow
1371 for it when allocating stack dynamic objects. */
1373 #if defined(REG_PARM_STACK_SPACE)
1374 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1375 ((ACCUMULATE_OUTGOING_ARGS \
1376 ? (crtl->outgoing_args_size \
1377 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1378 : REG_PARM_STACK_SPACE (FNDECL))) \
1379 : 0) + (STACK_POINTER_OFFSET))
1380 #else
1381 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1382 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
1383 + (STACK_POINTER_OFFSET))
1384 #endif
1385 #endif
1388 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1389 is a virtual register, return the equivalent hard register and set the
1390 offset indirectly through the pointer. Otherwise, return 0. */
1392 static rtx
1393 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1395 rtx new_rtx;
1396 HOST_WIDE_INT offset;
1398 if (x == virtual_incoming_args_rtx)
1400 if (stack_realign_drap)
1402 /* Replace virtual_incoming_args_rtx with internal arg
1403 pointer if DRAP is used to realign stack. */
1404 new_rtx = crtl->args.internal_arg_pointer;
1405 offset = 0;
1407 else
1408 new_rtx = arg_pointer_rtx, offset = in_arg_offset;
1410 else if (x == virtual_stack_vars_rtx)
1411 new_rtx = frame_pointer_rtx, offset = var_offset;
1412 else if (x == virtual_stack_dynamic_rtx)
1413 new_rtx = stack_pointer_rtx, offset = dynamic_offset;
1414 else if (x == virtual_outgoing_args_rtx)
1415 new_rtx = stack_pointer_rtx, offset = out_arg_offset;
1416 else if (x == virtual_cfa_rtx)
1418 #ifdef FRAME_POINTER_CFA_OFFSET
1419 new_rtx = frame_pointer_rtx;
1420 #else
1421 new_rtx = arg_pointer_rtx;
1422 #endif
1423 offset = cfa_offset;
1425 else if (x == virtual_preferred_stack_boundary_rtx)
1427 new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT);
1428 offset = 0;
1430 else
1431 return NULL_RTX;
1433 *poffset = offset;
1434 return new_rtx;
1437 /* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1438 Instantiate any virtual registers present inside of *LOC. The expression
1439 is simplified, as much as possible, but is not to be considered "valid"
1440 in any sense implied by the target. If any change is made, set CHANGED
1441 to true. */
1443 static int
1444 instantiate_virtual_regs_in_rtx (rtx *loc, void *data)
1446 HOST_WIDE_INT offset;
1447 bool *changed = (bool *) data;
1448 rtx x, new_rtx;
1450 x = *loc;
1451 if (x == 0)
1452 return 0;
1454 switch (GET_CODE (x))
1456 case REG:
1457 new_rtx = instantiate_new_reg (x, &offset);
1458 if (new_rtx)
1460 *loc = plus_constant (new_rtx, offset);
1461 if (changed)
1462 *changed = true;
1464 return -1;
1466 case PLUS:
1467 new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
1468 if (new_rtx)
1470 new_rtx = plus_constant (new_rtx, offset);
1471 *loc = simplify_gen_binary (PLUS, GET_MODE (x), new_rtx, XEXP (x, 1));
1472 if (changed)
1473 *changed = true;
1474 return -1;
1477 /* FIXME -- from old code */
1478 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1479 we can commute the PLUS and SUBREG because pointers into the
1480 frame are well-behaved. */
1481 break;
1483 default:
1484 break;
1487 return 0;
1490 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1491 matches the predicate for insn CODE operand OPERAND. */
1493 static int
1494 safe_insn_predicate (int code, int operand, rtx x)
1496 return code < 0 || insn_operand_matches ((enum insn_code) code, operand, x);
1499 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1500 registers present inside of insn. The result will be a valid insn. */
1502 static void
1503 instantiate_virtual_regs_in_insn (rtx insn)
1505 HOST_WIDE_INT offset;
1506 int insn_code, i;
1507 bool any_change = false;
1508 rtx set, new_rtx, x, seq;
1510 /* There are some special cases to be handled first. */
1511 set = single_set (insn);
1512 if (set)
1514 /* We're allowed to assign to a virtual register. This is interpreted
1515 to mean that the underlying register gets assigned the inverse
1516 transformation. This is used, for example, in the handling of
1517 non-local gotos. */
1518 new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
1519 if (new_rtx)
1521 start_sequence ();
1523 for_each_rtx (&SET_SRC (set), instantiate_virtual_regs_in_rtx, NULL);
1524 x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
1525 GEN_INT (-offset));
1526 x = force_operand (x, new_rtx);
1527 if (x != new_rtx)
1528 emit_move_insn (new_rtx, x);
1530 seq = get_insns ();
1531 end_sequence ();
1533 emit_insn_before (seq, insn);
1534 delete_insn (insn);
1535 return;
1538 /* Handle a straight copy from a virtual register by generating a
1539 new add insn. The difference between this and falling through
1540 to the generic case is avoiding a new pseudo and eliminating a
1541 move insn in the initial rtl stream. */
1542 new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
1543 if (new_rtx && offset != 0
1544 && REG_P (SET_DEST (set))
1545 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1547 start_sequence ();
1549 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS,
1550 new_rtx, GEN_INT (offset), SET_DEST (set),
1551 1, OPTAB_LIB_WIDEN);
1552 if (x != SET_DEST (set))
1553 emit_move_insn (SET_DEST (set), x);
1555 seq = get_insns ();
1556 end_sequence ();
1558 emit_insn_before (seq, insn);
1559 delete_insn (insn);
1560 return;
1563 extract_insn (insn);
1564 insn_code = INSN_CODE (insn);
1566 /* Handle a plus involving a virtual register by determining if the
1567 operands remain valid if they're modified in place. */
1568 if (GET_CODE (SET_SRC (set)) == PLUS
1569 && recog_data.n_operands >= 3
1570 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1571 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1572 && CONST_INT_P (recog_data.operand[2])
1573 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
1575 offset += INTVAL (recog_data.operand[2]);
1577 /* If the sum is zero, then replace with a plain move. */
1578 if (offset == 0
1579 && REG_P (SET_DEST (set))
1580 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1582 start_sequence ();
1583 emit_move_insn (SET_DEST (set), new_rtx);
1584 seq = get_insns ();
1585 end_sequence ();
1587 emit_insn_before (seq, insn);
1588 delete_insn (insn);
1589 return;
1592 x = gen_int_mode (offset, recog_data.operand_mode[2]);
1594 /* Using validate_change and apply_change_group here leaves
1595 recog_data in an invalid state. Since we know exactly what
1596 we want to check, do those two by hand. */
1597 if (safe_insn_predicate (insn_code, 1, new_rtx)
1598 && safe_insn_predicate (insn_code, 2, x))
1600 *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
1601 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1602 any_change = true;
1604 /* Fall through into the regular operand fixup loop in
1605 order to take care of operands other than 1 and 2. */
1609 else
1611 extract_insn (insn);
1612 insn_code = INSN_CODE (insn);
1615 /* In the general case, we expect virtual registers to appear only in
1616 operands, and then only as either bare registers or inside memories. */
1617 for (i = 0; i < recog_data.n_operands; ++i)
1619 x = recog_data.operand[i];
1620 switch (GET_CODE (x))
1622 case MEM:
1624 rtx addr = XEXP (x, 0);
1625 bool changed = false;
1627 for_each_rtx (&addr, instantiate_virtual_regs_in_rtx, &changed);
1628 if (!changed)
1629 continue;
1631 start_sequence ();
1632 x = replace_equiv_address (x, addr);
1633 /* It may happen that the address with the virtual reg
1634 was valid (e.g. based on the virtual stack reg, which might
1635 be acceptable to the predicates with all offsets), whereas
1636 the address now isn't anymore, for instance when the address
1637 is still offsetted, but the base reg isn't virtual-stack-reg
1638 anymore. Below we would do a force_reg on the whole operand,
1639 but this insn might actually only accept memory. Hence,
1640 before doing that last resort, try to reload the address into
1641 a register, so this operand stays a MEM. */
1642 if (!safe_insn_predicate (insn_code, i, x))
1644 addr = force_reg (GET_MODE (addr), addr);
1645 x = replace_equiv_address (x, addr);
1647 seq = get_insns ();
1648 end_sequence ();
1649 if (seq)
1650 emit_insn_before (seq, insn);
1652 break;
1654 case REG:
1655 new_rtx = instantiate_new_reg (x, &offset);
1656 if (new_rtx == NULL)
1657 continue;
1658 if (offset == 0)
1659 x = new_rtx;
1660 else
1662 start_sequence ();
1664 /* Careful, special mode predicates may have stuff in
1665 insn_data[insn_code].operand[i].mode that isn't useful
1666 to us for computing a new value. */
1667 /* ??? Recognize address_operand and/or "p" constraints
1668 to see if (plus new offset) is a valid before we put
1669 this through expand_simple_binop. */
1670 x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
1671 GEN_INT (offset), NULL_RTX,
1672 1, OPTAB_LIB_WIDEN);
1673 seq = get_insns ();
1674 end_sequence ();
1675 emit_insn_before (seq, insn);
1677 break;
1679 case SUBREG:
1680 new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
1681 if (new_rtx == NULL)
1682 continue;
1683 if (offset != 0)
1685 start_sequence ();
1686 new_rtx = expand_simple_binop (GET_MODE (new_rtx), PLUS, new_rtx,
1687 GEN_INT (offset), NULL_RTX,
1688 1, OPTAB_LIB_WIDEN);
1689 seq = get_insns ();
1690 end_sequence ();
1691 emit_insn_before (seq, insn);
1693 x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx,
1694 GET_MODE (new_rtx), SUBREG_BYTE (x));
1695 gcc_assert (x);
1696 break;
1698 default:
1699 continue;
1702 /* At this point, X contains the new value for the operand.
1703 Validate the new value vs the insn predicate. Note that
1704 asm insns will have insn_code -1 here. */
1705 if (!safe_insn_predicate (insn_code, i, x))
1707 start_sequence ();
1708 if (REG_P (x))
1710 gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
1711 x = copy_to_reg (x);
1713 else
1714 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1715 seq = get_insns ();
1716 end_sequence ();
1717 if (seq)
1718 emit_insn_before (seq, insn);
1721 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1722 any_change = true;
1725 if (any_change)
1727 /* Propagate operand changes into the duplicates. */
1728 for (i = 0; i < recog_data.n_dups; ++i)
1729 *recog_data.dup_loc[i]
1730 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
1732 /* Force re-recognition of the instruction for validation. */
1733 INSN_CODE (insn) = -1;
1736 if (asm_noperands (PATTERN (insn)) >= 0)
1738 if (!check_asm_operands (PATTERN (insn)))
1740 error_for_asm (insn, "impossible constraint in %<asm%>");
1741 delete_insn (insn);
1744 else
1746 if (recog_memoized (insn) < 0)
1747 fatal_insn_not_found (insn);
1751 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1752 do any instantiation required. */
1754 void
1755 instantiate_decl_rtl (rtx x)
1757 rtx addr;
1759 if (x == 0)
1760 return;
1762 /* If this is a CONCAT, recurse for the pieces. */
1763 if (GET_CODE (x) == CONCAT)
1765 instantiate_decl_rtl (XEXP (x, 0));
1766 instantiate_decl_rtl (XEXP (x, 1));
1767 return;
1770 /* If this is not a MEM, no need to do anything. Similarly if the
1771 address is a constant or a register that is not a virtual register. */
1772 if (!MEM_P (x))
1773 return;
1775 addr = XEXP (x, 0);
1776 if (CONSTANT_P (addr)
1777 || (REG_P (addr)
1778 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1779 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1780 return;
1782 for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL);
1785 /* Helper for instantiate_decls called via walk_tree: Process all decls
1786 in the given DECL_VALUE_EXPR. */
1788 static tree
1789 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1791 tree t = *tp;
1792 if (! EXPR_P (t))
1794 *walk_subtrees = 0;
1795 if (DECL_P (t))
1797 if (DECL_RTL_SET_P (t))
1798 instantiate_decl_rtl (DECL_RTL (t));
1799 if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t)
1800 && DECL_INCOMING_RTL (t))
1801 instantiate_decl_rtl (DECL_INCOMING_RTL (t));
1802 if ((TREE_CODE (t) == VAR_DECL
1803 || TREE_CODE (t) == RESULT_DECL)
1804 && DECL_HAS_VALUE_EXPR_P (t))
1806 tree v = DECL_VALUE_EXPR (t);
1807 walk_tree (&v, instantiate_expr, NULL, NULL);
1811 return NULL;
1814 /* Subroutine of instantiate_decls: Process all decls in the given
1815 BLOCK node and all its subblocks. */
1817 static void
1818 instantiate_decls_1 (tree let)
1820 tree t;
1822 for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t))
1824 if (DECL_RTL_SET_P (t))
1825 instantiate_decl_rtl (DECL_RTL (t));
1826 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1828 tree v = DECL_VALUE_EXPR (t);
1829 walk_tree (&v, instantiate_expr, NULL, NULL);
1833 /* Process all subblocks. */
1834 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1835 instantiate_decls_1 (t);
1838 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1839 all virtual registers in their DECL_RTL's. */
1841 static void
1842 instantiate_decls (tree fndecl)
1844 tree decl;
1845 unsigned ix;
1847 /* Process all parameters of the function. */
1848 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl))
1850 instantiate_decl_rtl (DECL_RTL (decl));
1851 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
1852 if (DECL_HAS_VALUE_EXPR_P (decl))
1854 tree v = DECL_VALUE_EXPR (decl);
1855 walk_tree (&v, instantiate_expr, NULL, NULL);
1859 if ((decl = DECL_RESULT (fndecl))
1860 && TREE_CODE (decl) == RESULT_DECL)
1862 if (DECL_RTL_SET_P (decl))
1863 instantiate_decl_rtl (DECL_RTL (decl));
1864 if (DECL_HAS_VALUE_EXPR_P (decl))
1866 tree v = DECL_VALUE_EXPR (decl);
1867 walk_tree (&v, instantiate_expr, NULL, NULL);
1871 /* Now process all variables defined in the function or its subblocks. */
1872 instantiate_decls_1 (DECL_INITIAL (fndecl));
1874 FOR_EACH_LOCAL_DECL (cfun, ix, decl)
1875 if (DECL_RTL_SET_P (decl))
1876 instantiate_decl_rtl (DECL_RTL (decl));
1877 VEC_free (tree, gc, cfun->local_decls);
1880 /* Pass through the INSNS of function FNDECL and convert virtual register
1881 references to hard register references. */
1883 static unsigned int
1884 instantiate_virtual_regs (void)
1886 rtx insn;
1888 /* Compute the offsets to use for this function. */
1889 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1890 var_offset = STARTING_FRAME_OFFSET;
1891 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1892 out_arg_offset = STACK_POINTER_OFFSET;
1893 #ifdef FRAME_POINTER_CFA_OFFSET
1894 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1895 #else
1896 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1897 #endif
1899 /* Initialize recognition, indicating that volatile is OK. */
1900 init_recog ();
1902 /* Scan through all the insns, instantiating every virtual register still
1903 present. */
1904 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1905 if (INSN_P (insn))
1907 /* These patterns in the instruction stream can never be recognized.
1908 Fortunately, they shouldn't contain virtual registers either. */
1909 if (GET_CODE (PATTERN (insn)) == USE
1910 || GET_CODE (PATTERN (insn)) == CLOBBER
1911 || GET_CODE (PATTERN (insn)) == ADDR_VEC
1912 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
1913 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1914 continue;
1915 else if (DEBUG_INSN_P (insn))
1916 for_each_rtx (&INSN_VAR_LOCATION (insn),
1917 instantiate_virtual_regs_in_rtx, NULL);
1918 else
1919 instantiate_virtual_regs_in_insn (insn);
1921 if (INSN_DELETED_P (insn))
1922 continue;
1924 for_each_rtx (&REG_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL);
1926 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1927 if (CALL_P (insn))
1928 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn),
1929 instantiate_virtual_regs_in_rtx, NULL);
1932 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1933 instantiate_decls (current_function_decl);
1935 targetm.instantiate_decls ();
1937 /* Indicate that, from now on, assign_stack_local should use
1938 frame_pointer_rtx. */
1939 virtuals_instantiated = 1;
1941 return 0;
1944 struct rtl_opt_pass pass_instantiate_virtual_regs =
1947 RTL_PASS,
1948 "vregs", /* name */
1949 NULL, /* gate */
1950 instantiate_virtual_regs, /* execute */
1951 NULL, /* sub */
1952 NULL, /* next */
1953 0, /* static_pass_number */
1954 TV_NONE, /* tv_id */
1955 0, /* properties_required */
1956 0, /* properties_provided */
1957 0, /* properties_destroyed */
1958 0, /* todo_flags_start */
1959 0 /* todo_flags_finish */
1964 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1965 This means a type for which function calls must pass an address to the
1966 function or get an address back from the function.
1967 EXP may be a type node or an expression (whose type is tested). */
1970 aggregate_value_p (const_tree exp, const_tree fntype)
1972 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
1973 int i, regno, nregs;
1974 rtx reg;
1976 if (fntype)
1977 switch (TREE_CODE (fntype))
1979 case CALL_EXPR:
1981 tree fndecl = get_callee_fndecl (fntype);
1982 fntype = (fndecl
1983 ? TREE_TYPE (fndecl)
1984 : TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype))));
1986 break;
1987 case FUNCTION_DECL:
1988 fntype = TREE_TYPE (fntype);
1989 break;
1990 case FUNCTION_TYPE:
1991 case METHOD_TYPE:
1992 break;
1993 case IDENTIFIER_NODE:
1994 fntype = NULL_TREE;
1995 break;
1996 default:
1997 /* We don't expect other tree types here. */
1998 gcc_unreachable ();
2001 if (VOID_TYPE_P (type))
2002 return 0;
2004 /* If a record should be passed the same as its first (and only) member
2005 don't pass it as an aggregate. */
2006 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2007 return aggregate_value_p (first_field (type), fntype);
2009 /* If the front end has decided that this needs to be passed by
2010 reference, do so. */
2011 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
2012 && DECL_BY_REFERENCE (exp))
2013 return 1;
2015 /* Function types that are TREE_ADDRESSABLE force return in memory. */
2016 if (fntype && TREE_ADDRESSABLE (fntype))
2017 return 1;
2019 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
2020 and thus can't be returned in registers. */
2021 if (TREE_ADDRESSABLE (type))
2022 return 1;
2024 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
2025 return 1;
2027 if (targetm.calls.return_in_memory (type, fntype))
2028 return 1;
2030 /* Make sure we have suitable call-clobbered regs to return
2031 the value in; if not, we must return it in memory. */
2032 reg = hard_function_value (type, 0, fntype, 0);
2034 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2035 it is OK. */
2036 if (!REG_P (reg))
2037 return 0;
2039 regno = REGNO (reg);
2040 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
2041 for (i = 0; i < nregs; i++)
2042 if (! call_used_regs[regno + i])
2043 return 1;
2045 return 0;
2048 /* Return true if we should assign DECL a pseudo register; false if it
2049 should live on the local stack. */
2051 bool
2052 use_register_for_decl (const_tree decl)
2054 if (!targetm.calls.allocate_stack_slots_for_args())
2055 return true;
2057 /* Honor volatile. */
2058 if (TREE_SIDE_EFFECTS (decl))
2059 return false;
2061 /* Honor addressability. */
2062 if (TREE_ADDRESSABLE (decl))
2063 return false;
2065 /* Only register-like things go in registers. */
2066 if (DECL_MODE (decl) == BLKmode)
2067 return false;
2069 /* If -ffloat-store specified, don't put explicit float variables
2070 into registers. */
2071 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2072 propagates values across these stores, and it probably shouldn't. */
2073 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
2074 return false;
2076 /* If we're not interested in tracking debugging information for
2077 this decl, then we can certainly put it in a register. */
2078 if (DECL_IGNORED_P (decl))
2079 return true;
2081 if (optimize)
2082 return true;
2084 if (!DECL_REGISTER (decl))
2085 return false;
2087 switch (TREE_CODE (TREE_TYPE (decl)))
2089 case RECORD_TYPE:
2090 case UNION_TYPE:
2091 case QUAL_UNION_TYPE:
2092 /* When not optimizing, disregard register keyword for variables with
2093 types containing methods, otherwise the methods won't be callable
2094 from the debugger. */
2095 if (TYPE_METHODS (TREE_TYPE (decl)))
2096 return false;
2097 break;
2098 default:
2099 break;
2102 return true;
2105 /* Return true if TYPE should be passed by invisible reference. */
2107 bool
2108 pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
2109 tree type, bool named_arg)
2111 if (type)
2113 /* If this type contains non-trivial constructors, then it is
2114 forbidden for the middle-end to create any new copies. */
2115 if (TREE_ADDRESSABLE (type))
2116 return true;
2118 /* GCC post 3.4 passes *all* variable sized types by reference. */
2119 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2120 return true;
2122 /* If a record type should be passed the same as its first (and only)
2123 member, use the type and mode of that member. */
2124 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2126 type = TREE_TYPE (first_field (type));
2127 mode = TYPE_MODE (type);
2131 return targetm.calls.pass_by_reference (pack_cumulative_args (ca), mode,
2132 type, named_arg);
2135 /* Return true if TYPE, which is passed by reference, should be callee
2136 copied instead of caller copied. */
2138 bool
2139 reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
2140 tree type, bool named_arg)
2142 if (type && TREE_ADDRESSABLE (type))
2143 return false;
2144 return targetm.calls.callee_copies (pack_cumulative_args (ca), mode, type,
2145 named_arg);
2148 /* Structures to communicate between the subroutines of assign_parms.
2149 The first holds data persistent across all parameters, the second
2150 is cleared out for each parameter. */
2152 struct assign_parm_data_all
2154 /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2155 should become a job of the target or otherwise encapsulated. */
2156 CUMULATIVE_ARGS args_so_far_v;
2157 cumulative_args_t args_so_far;
2158 struct args_size stack_args_size;
2159 tree function_result_decl;
2160 tree orig_fnargs;
2161 rtx first_conversion_insn;
2162 rtx last_conversion_insn;
2163 HOST_WIDE_INT pretend_args_size;
2164 HOST_WIDE_INT extra_pretend_bytes;
2165 int reg_parm_stack_space;
2168 struct assign_parm_data_one
2170 tree nominal_type;
2171 tree passed_type;
2172 rtx entry_parm;
2173 rtx stack_parm;
2174 enum machine_mode nominal_mode;
2175 enum machine_mode passed_mode;
2176 enum machine_mode promoted_mode;
2177 struct locate_and_pad_arg_data locate;
2178 int partial;
2179 BOOL_BITFIELD named_arg : 1;
2180 BOOL_BITFIELD passed_pointer : 1;
2181 BOOL_BITFIELD on_stack : 1;
2182 BOOL_BITFIELD loaded_in_reg : 1;
2185 /* A subroutine of assign_parms. Initialize ALL. */
2187 static void
2188 assign_parms_initialize_all (struct assign_parm_data_all *all)
2190 tree fntype ATTRIBUTE_UNUSED;
2192 memset (all, 0, sizeof (*all));
2194 fntype = TREE_TYPE (current_function_decl);
2196 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2197 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far_v, fntype, NULL_RTX);
2198 #else
2199 INIT_CUMULATIVE_ARGS (all->args_so_far_v, fntype, NULL_RTX,
2200 current_function_decl, -1);
2201 #endif
2202 all->args_so_far = pack_cumulative_args (&all->args_so_far_v);
2204 #ifdef REG_PARM_STACK_SPACE
2205 all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
2206 #endif
2209 /* If ARGS contains entries with complex types, split the entry into two
2210 entries of the component type. Return a new list of substitutions are
2211 needed, else the old list. */
2213 static void
2214 split_complex_args (VEC(tree, heap) **args)
2216 unsigned i;
2217 tree p;
2219 FOR_EACH_VEC_ELT (tree, *args, i, p)
2221 tree type = TREE_TYPE (p);
2222 if (TREE_CODE (type) == COMPLEX_TYPE
2223 && targetm.calls.split_complex_arg (type))
2225 tree decl;
2226 tree subtype = TREE_TYPE (type);
2227 bool addressable = TREE_ADDRESSABLE (p);
2229 /* Rewrite the PARM_DECL's type with its component. */
2230 p = copy_node (p);
2231 TREE_TYPE (p) = subtype;
2232 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2233 DECL_MODE (p) = VOIDmode;
2234 DECL_SIZE (p) = NULL;
2235 DECL_SIZE_UNIT (p) = NULL;
2236 /* If this arg must go in memory, put it in a pseudo here.
2237 We can't allow it to go in memory as per normal parms,
2238 because the usual place might not have the imag part
2239 adjacent to the real part. */
2240 DECL_ARTIFICIAL (p) = addressable;
2241 DECL_IGNORED_P (p) = addressable;
2242 TREE_ADDRESSABLE (p) = 0;
2243 layout_decl (p, 0);
2244 VEC_replace (tree, *args, i, p);
2246 /* Build a second synthetic decl. */
2247 decl = build_decl (EXPR_LOCATION (p),
2248 PARM_DECL, NULL_TREE, subtype);
2249 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2250 DECL_ARTIFICIAL (decl) = addressable;
2251 DECL_IGNORED_P (decl) = addressable;
2252 layout_decl (decl, 0);
2253 VEC_safe_insert (tree, heap, *args, ++i, decl);
2258 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2259 the hidden struct return argument, and (abi willing) complex args.
2260 Return the new parameter list. */
2262 static VEC(tree, heap) *
2263 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2265 tree fndecl = current_function_decl;
2266 tree fntype = TREE_TYPE (fndecl);
2267 VEC(tree, heap) *fnargs = NULL;
2268 tree arg;
2270 for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg))
2271 VEC_safe_push (tree, heap, fnargs, arg);
2273 all->orig_fnargs = DECL_ARGUMENTS (fndecl);
2275 /* If struct value address is treated as the first argument, make it so. */
2276 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2277 && ! cfun->returns_pcc_struct
2278 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2280 tree type = build_pointer_type (TREE_TYPE (fntype));
2281 tree decl;
2283 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2284 PARM_DECL, get_identifier (".result_ptr"), type);
2285 DECL_ARG_TYPE (decl) = type;
2286 DECL_ARTIFICIAL (decl) = 1;
2287 DECL_NAMELESS (decl) = 1;
2288 TREE_CONSTANT (decl) = 1;
2290 DECL_CHAIN (decl) = all->orig_fnargs;
2291 all->orig_fnargs = decl;
2292 VEC_safe_insert (tree, heap, fnargs, 0, decl);
2294 all->function_result_decl = decl;
2297 /* If the target wants to split complex arguments into scalars, do so. */
2298 if (targetm.calls.split_complex_arg)
2299 split_complex_args (&fnargs);
2301 return fnargs;
2304 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2305 data for the parameter. Incorporate ABI specifics such as pass-by-
2306 reference and type promotion. */
2308 static void
2309 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2310 struct assign_parm_data_one *data)
2312 tree nominal_type, passed_type;
2313 enum machine_mode nominal_mode, passed_mode, promoted_mode;
2314 int unsignedp;
2316 memset (data, 0, sizeof (*data));
2318 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2319 if (!cfun->stdarg)
2320 data->named_arg = 1; /* No variadic parms. */
2321 else if (DECL_CHAIN (parm))
2322 data->named_arg = 1; /* Not the last non-variadic parm. */
2323 else if (targetm.calls.strict_argument_naming (all->args_so_far))
2324 data->named_arg = 1; /* Only variadic ones are unnamed. */
2325 else
2326 data->named_arg = 0; /* Treat as variadic. */
2328 nominal_type = TREE_TYPE (parm);
2329 passed_type = DECL_ARG_TYPE (parm);
2331 /* Look out for errors propagating this far. Also, if the parameter's
2332 type is void then its value doesn't matter. */
2333 if (TREE_TYPE (parm) == error_mark_node
2334 /* This can happen after weird syntax errors
2335 or if an enum type is defined among the parms. */
2336 || TREE_CODE (parm) != PARM_DECL
2337 || passed_type == NULL
2338 || VOID_TYPE_P (nominal_type))
2340 nominal_type = passed_type = void_type_node;
2341 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2342 goto egress;
2345 /* Find mode of arg as it is passed, and mode of arg as it should be
2346 during execution of this function. */
2347 passed_mode = TYPE_MODE (passed_type);
2348 nominal_mode = TYPE_MODE (nominal_type);
2350 /* If the parm is to be passed as a transparent union or record, use the
2351 type of the first field for the tests below. We have already verified
2352 that the modes are the same. */
2353 if ((TREE_CODE (passed_type) == UNION_TYPE
2354 || TREE_CODE (passed_type) == RECORD_TYPE)
2355 && TYPE_TRANSPARENT_AGGR (passed_type))
2356 passed_type = TREE_TYPE (first_field (passed_type));
2358 /* See if this arg was passed by invisible reference. */
2359 if (pass_by_reference (&all->args_so_far_v, passed_mode,
2360 passed_type, data->named_arg))
2362 passed_type = nominal_type = build_pointer_type (passed_type);
2363 data->passed_pointer = true;
2364 passed_mode = nominal_mode = Pmode;
2367 /* Find mode as it is passed by the ABI. */
2368 unsignedp = TYPE_UNSIGNED (passed_type);
2369 promoted_mode = promote_function_mode (passed_type, passed_mode, &unsignedp,
2370 TREE_TYPE (current_function_decl), 0);
2372 egress:
2373 data->nominal_type = nominal_type;
2374 data->passed_type = passed_type;
2375 data->nominal_mode = nominal_mode;
2376 data->passed_mode = passed_mode;
2377 data->promoted_mode = promoted_mode;
2380 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2382 static void
2383 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2384 struct assign_parm_data_one *data, bool no_rtl)
2386 int varargs_pretend_bytes = 0;
2388 targetm.calls.setup_incoming_varargs (all->args_so_far,
2389 data->promoted_mode,
2390 data->passed_type,
2391 &varargs_pretend_bytes, no_rtl);
2393 /* If the back-end has requested extra stack space, record how much is
2394 needed. Do not change pretend_args_size otherwise since it may be
2395 nonzero from an earlier partial argument. */
2396 if (varargs_pretend_bytes > 0)
2397 all->pretend_args_size = varargs_pretend_bytes;
2400 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2401 the incoming location of the current parameter. */
2403 static void
2404 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2405 struct assign_parm_data_one *data)
2407 HOST_WIDE_INT pretend_bytes = 0;
2408 rtx entry_parm;
2409 bool in_regs;
2411 if (data->promoted_mode == VOIDmode)
2413 data->entry_parm = data->stack_parm = const0_rtx;
2414 return;
2417 entry_parm = targetm.calls.function_incoming_arg (all->args_so_far,
2418 data->promoted_mode,
2419 data->passed_type,
2420 data->named_arg);
2422 if (entry_parm == 0)
2423 data->promoted_mode = data->passed_mode;
2425 /* Determine parm's home in the stack, in case it arrives in the stack
2426 or we should pretend it did. Compute the stack position and rtx where
2427 the argument arrives and its size.
2429 There is one complexity here: If this was a parameter that would
2430 have been passed in registers, but wasn't only because it is
2431 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2432 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2433 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2434 as it was the previous time. */
2435 in_regs = entry_parm != 0;
2436 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2437 in_regs = true;
2438 #endif
2439 if (!in_regs && !data->named_arg)
2441 if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far))
2443 rtx tem;
2444 tem = targetm.calls.function_incoming_arg (all->args_so_far,
2445 data->promoted_mode,
2446 data->passed_type, true);
2447 in_regs = tem != NULL;
2451 /* If this parameter was passed both in registers and in the stack, use
2452 the copy on the stack. */
2453 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2454 data->passed_type))
2455 entry_parm = 0;
2457 if (entry_parm)
2459 int partial;
2461 partial = targetm.calls.arg_partial_bytes (all->args_so_far,
2462 data->promoted_mode,
2463 data->passed_type,
2464 data->named_arg);
2465 data->partial = partial;
2467 /* The caller might already have allocated stack space for the
2468 register parameters. */
2469 if (partial != 0 && all->reg_parm_stack_space == 0)
2471 /* Part of this argument is passed in registers and part
2472 is passed on the stack. Ask the prologue code to extend
2473 the stack part so that we can recreate the full value.
2475 PRETEND_BYTES is the size of the registers we need to store.
2476 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2477 stack space that the prologue should allocate.
2479 Internally, gcc assumes that the argument pointer is aligned
2480 to STACK_BOUNDARY bits. This is used both for alignment
2481 optimizations (see init_emit) and to locate arguments that are
2482 aligned to more than PARM_BOUNDARY bits. We must preserve this
2483 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2484 a stack boundary. */
2486 /* We assume at most one partial arg, and it must be the first
2487 argument on the stack. */
2488 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2490 pretend_bytes = partial;
2491 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2493 /* We want to align relative to the actual stack pointer, so
2494 don't include this in the stack size until later. */
2495 all->extra_pretend_bytes = all->pretend_args_size;
2499 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2500 entry_parm ? data->partial : 0, current_function_decl,
2501 &all->stack_args_size, &data->locate);
2503 /* Update parm_stack_boundary if this parameter is passed in the
2504 stack. */
2505 if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
2506 crtl->parm_stack_boundary = data->locate.boundary;
2508 /* Adjust offsets to include the pretend args. */
2509 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2510 data->locate.slot_offset.constant += pretend_bytes;
2511 data->locate.offset.constant += pretend_bytes;
2513 data->entry_parm = entry_parm;
2516 /* A subroutine of assign_parms. If there is actually space on the stack
2517 for this parm, count it in stack_args_size and return true. */
2519 static bool
2520 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2521 struct assign_parm_data_one *data)
2523 /* Trivially true if we've no incoming register. */
2524 if (data->entry_parm == NULL)
2526 /* Also true if we're partially in registers and partially not,
2527 since we've arranged to drop the entire argument on the stack. */
2528 else if (data->partial != 0)
2530 /* Also true if the target says that it's passed in both registers
2531 and on the stack. */
2532 else if (GET_CODE (data->entry_parm) == PARALLEL
2533 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2535 /* Also true if the target says that there's stack allocated for
2536 all register parameters. */
2537 else if (all->reg_parm_stack_space > 0)
2539 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2540 else
2541 return false;
2543 all->stack_args_size.constant += data->locate.size.constant;
2544 if (data->locate.size.var)
2545 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2547 return true;
2550 /* A subroutine of assign_parms. Given that this parameter is allocated
2551 stack space by the ABI, find it. */
2553 static void
2554 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2556 rtx offset_rtx, stack_parm;
2557 unsigned int align, boundary;
2559 /* If we're passing this arg using a reg, make its stack home the
2560 aligned stack slot. */
2561 if (data->entry_parm)
2562 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2563 else
2564 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2566 stack_parm = crtl->args.internal_arg_pointer;
2567 if (offset_rtx != const0_rtx)
2568 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2569 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2571 if (!data->passed_pointer)
2573 set_mem_attributes (stack_parm, parm, 1);
2574 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2575 while promoted mode's size is needed. */
2576 if (data->promoted_mode != BLKmode
2577 && data->promoted_mode != DECL_MODE (parm))
2579 set_mem_size (stack_parm, GET_MODE_SIZE (data->promoted_mode));
2580 if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm))
2582 int offset = subreg_lowpart_offset (DECL_MODE (parm),
2583 data->promoted_mode);
2584 if (offset)
2585 set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset);
2590 boundary = data->locate.boundary;
2591 align = BITS_PER_UNIT;
2593 /* If we're padding upward, we know that the alignment of the slot
2594 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2595 intentionally forcing upward padding. Otherwise we have to come
2596 up with a guess at the alignment based on OFFSET_RTX. */
2597 if (data->locate.where_pad != downward || data->entry_parm)
2598 align = boundary;
2599 else if (CONST_INT_P (offset_rtx))
2601 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2602 align = align & -align;
2604 set_mem_align (stack_parm, align);
2606 if (data->entry_parm)
2607 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2609 data->stack_parm = stack_parm;
2612 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2613 always valid and contiguous. */
2615 static void
2616 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2618 rtx entry_parm = data->entry_parm;
2619 rtx stack_parm = data->stack_parm;
2621 /* If this parm was passed part in regs and part in memory, pretend it
2622 arrived entirely in memory by pushing the register-part onto the stack.
2623 In the special case of a DImode or DFmode that is split, we could put
2624 it together in a pseudoreg directly, but for now that's not worth
2625 bothering with. */
2626 if (data->partial != 0)
2628 /* Handle calls that pass values in multiple non-contiguous
2629 locations. The Irix 6 ABI has examples of this. */
2630 if (GET_CODE (entry_parm) == PARALLEL)
2631 emit_group_store (validize_mem (stack_parm), entry_parm,
2632 data->passed_type,
2633 int_size_in_bytes (data->passed_type));
2634 else
2636 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2637 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2638 data->partial / UNITS_PER_WORD);
2641 entry_parm = stack_parm;
2644 /* If we didn't decide this parm came in a register, by default it came
2645 on the stack. */
2646 else if (entry_parm == NULL)
2647 entry_parm = stack_parm;
2649 /* When an argument is passed in multiple locations, we can't make use
2650 of this information, but we can save some copying if the whole argument
2651 is passed in a single register. */
2652 else if (GET_CODE (entry_parm) == PARALLEL
2653 && data->nominal_mode != BLKmode
2654 && data->passed_mode != BLKmode)
2656 size_t i, len = XVECLEN (entry_parm, 0);
2658 for (i = 0; i < len; i++)
2659 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2660 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2661 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2662 == data->passed_mode)
2663 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2665 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2666 break;
2670 data->entry_parm = entry_parm;
2673 /* A subroutine of assign_parms. Reconstitute any values which were
2674 passed in multiple registers and would fit in a single register. */
2676 static void
2677 assign_parm_remove_parallels (struct assign_parm_data_one *data)
2679 rtx entry_parm = data->entry_parm;
2681 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2682 This can be done with register operations rather than on the
2683 stack, even if we will store the reconstituted parameter on the
2684 stack later. */
2685 if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
2687 rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
2688 emit_group_store (parmreg, entry_parm, data->passed_type,
2689 GET_MODE_SIZE (GET_MODE (entry_parm)));
2690 entry_parm = parmreg;
2693 data->entry_parm = entry_parm;
2696 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2697 always valid and properly aligned. */
2699 static void
2700 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2702 rtx stack_parm = data->stack_parm;
2704 /* If we can't trust the parm stack slot to be aligned enough for its
2705 ultimate type, don't use that slot after entry. We'll make another
2706 stack slot, if we need one. */
2707 if (stack_parm
2708 && ((STRICT_ALIGNMENT
2709 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2710 || (data->nominal_type
2711 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2712 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2713 stack_parm = NULL;
2715 /* If parm was passed in memory, and we need to convert it on entry,
2716 don't store it back in that same slot. */
2717 else if (data->entry_parm == stack_parm
2718 && data->nominal_mode != BLKmode
2719 && data->nominal_mode != data->passed_mode)
2720 stack_parm = NULL;
2722 /* If stack protection is in effect for this function, don't leave any
2723 pointers in their passed stack slots. */
2724 else if (crtl->stack_protect_guard
2725 && (flag_stack_protect == 2
2726 || data->passed_pointer
2727 || POINTER_TYPE_P (data->nominal_type)))
2728 stack_parm = NULL;
2730 data->stack_parm = stack_parm;
2733 /* A subroutine of assign_parms. Return true if the current parameter
2734 should be stored as a BLKmode in the current frame. */
2736 static bool
2737 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2739 if (data->nominal_mode == BLKmode)
2740 return true;
2741 if (GET_MODE (data->entry_parm) == BLKmode)
2742 return true;
2744 #ifdef BLOCK_REG_PADDING
2745 /* Only assign_parm_setup_block knows how to deal with register arguments
2746 that are padded at the least significant end. */
2747 if (REG_P (data->entry_parm)
2748 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2749 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2750 == (BYTES_BIG_ENDIAN ? upward : downward)))
2751 return true;
2752 #endif
2754 return false;
2757 /* A subroutine of assign_parms. Arrange for the parameter to be
2758 present and valid in DATA->STACK_RTL. */
2760 static void
2761 assign_parm_setup_block (struct assign_parm_data_all *all,
2762 tree parm, struct assign_parm_data_one *data)
2764 rtx entry_parm = data->entry_parm;
2765 rtx stack_parm = data->stack_parm;
2766 HOST_WIDE_INT size;
2767 HOST_WIDE_INT size_stored;
2769 if (GET_CODE (entry_parm) == PARALLEL)
2770 entry_parm = emit_group_move_into_temps (entry_parm);
2772 size = int_size_in_bytes (data->passed_type);
2773 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2774 if (stack_parm == 0)
2776 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
2777 stack_parm = assign_stack_local (BLKmode, size_stored,
2778 DECL_ALIGN (parm));
2779 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2780 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2781 set_mem_attributes (stack_parm, parm, 1);
2784 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2785 calls that pass values in multiple non-contiguous locations. */
2786 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2788 rtx mem;
2790 /* Note that we will be storing an integral number of words.
2791 So we have to be careful to ensure that we allocate an
2792 integral number of words. We do this above when we call
2793 assign_stack_local if space was not allocated in the argument
2794 list. If it was, this will not work if PARM_BOUNDARY is not
2795 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2796 if it becomes a problem. Exception is when BLKmode arrives
2797 with arguments not conforming to word_mode. */
2799 if (data->stack_parm == 0)
2801 else if (GET_CODE (entry_parm) == PARALLEL)
2803 else
2804 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2806 mem = validize_mem (stack_parm);
2808 /* Handle values in multiple non-contiguous locations. */
2809 if (GET_CODE (entry_parm) == PARALLEL)
2811 push_to_sequence2 (all->first_conversion_insn,
2812 all->last_conversion_insn);
2813 emit_group_store (mem, entry_parm, data->passed_type, size);
2814 all->first_conversion_insn = get_insns ();
2815 all->last_conversion_insn = get_last_insn ();
2816 end_sequence ();
2819 else if (size == 0)
2822 /* If SIZE is that of a mode no bigger than a word, just use
2823 that mode's store operation. */
2824 else if (size <= UNITS_PER_WORD)
2826 enum machine_mode mode
2827 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2829 if (mode != BLKmode
2830 #ifdef BLOCK_REG_PADDING
2831 && (size == UNITS_PER_WORD
2832 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2833 != (BYTES_BIG_ENDIAN ? upward : downward)))
2834 #endif
2837 rtx reg;
2839 /* We are really truncating a word_mode value containing
2840 SIZE bytes into a value of mode MODE. If such an
2841 operation requires no actual instructions, we can refer
2842 to the value directly in mode MODE, otherwise we must
2843 start with the register in word_mode and explicitly
2844 convert it. */
2845 if (TRULY_NOOP_TRUNCATION (size * BITS_PER_UNIT, BITS_PER_WORD))
2846 reg = gen_rtx_REG (mode, REGNO (entry_parm));
2847 else
2849 reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2850 reg = convert_to_mode (mode, copy_to_reg (reg), 1);
2852 emit_move_insn (change_address (mem, mode, 0), reg);
2855 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2856 machine must be aligned to the left before storing
2857 to memory. Note that the previous test doesn't
2858 handle all cases (e.g. SIZE == 3). */
2859 else if (size != UNITS_PER_WORD
2860 #ifdef BLOCK_REG_PADDING
2861 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2862 == downward)
2863 #else
2864 && BYTES_BIG_ENDIAN
2865 #endif
2868 rtx tem, x;
2869 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2870 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2872 x = expand_shift (LSHIFT_EXPR, word_mode, reg, by, NULL_RTX, 1);
2873 tem = change_address (mem, word_mode, 0);
2874 emit_move_insn (tem, x);
2876 else
2877 move_block_from_reg (REGNO (entry_parm), mem,
2878 size_stored / UNITS_PER_WORD);
2880 else
2881 move_block_from_reg (REGNO (entry_parm), mem,
2882 size_stored / UNITS_PER_WORD);
2884 else if (data->stack_parm == 0)
2886 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2887 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2888 BLOCK_OP_NORMAL);
2889 all->first_conversion_insn = get_insns ();
2890 all->last_conversion_insn = get_last_insn ();
2891 end_sequence ();
2894 data->stack_parm = stack_parm;
2895 SET_DECL_RTL (parm, stack_parm);
2898 /* A subroutine of assign_parm_setup_reg, called through note_stores.
2899 This collects sets and clobbers of hard registers in a HARD_REG_SET,
2900 which is pointed to by DATA. */
2901 static void
2902 record_hard_reg_sets (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
2904 HARD_REG_SET *pset = (HARD_REG_SET *)data;
2905 if (REG_P (x) && HARD_REGISTER_P (x))
2906 add_to_hard_reg_set (pset, GET_MODE (x), REGNO (x));
2909 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2910 parameter. Get it there. Perform all ABI specified conversions. */
2912 static void
2913 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2914 struct assign_parm_data_one *data)
2916 rtx parmreg, validated_mem;
2917 rtx equiv_stack_parm;
2918 enum machine_mode promoted_nominal_mode;
2919 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2920 bool did_conversion = false;
2921 bool need_conversion, moved;
2923 /* Store the parm in a pseudoregister during the function, but we may
2924 need to do it in a wider mode. Using 2 here makes the result
2925 consistent with promote_decl_mode and thus expand_expr_real_1. */
2926 promoted_nominal_mode
2927 = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp,
2928 TREE_TYPE (current_function_decl), 2);
2930 parmreg = gen_reg_rtx (promoted_nominal_mode);
2932 if (!DECL_ARTIFICIAL (parm))
2933 mark_user_reg (parmreg);
2935 /* If this was an item that we received a pointer to,
2936 set DECL_RTL appropriately. */
2937 if (data->passed_pointer)
2939 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2940 set_mem_attributes (x, parm, 1);
2941 SET_DECL_RTL (parm, x);
2943 else
2944 SET_DECL_RTL (parm, parmreg);
2946 assign_parm_remove_parallels (data);
2948 /* Copy the value into the register, thus bridging between
2949 assign_parm_find_data_types and expand_expr_real_1. */
2951 equiv_stack_parm = data->stack_parm;
2952 validated_mem = validize_mem (data->entry_parm);
2954 need_conversion = (data->nominal_mode != data->passed_mode
2955 || promoted_nominal_mode != data->promoted_mode);
2956 moved = false;
2958 if (need_conversion
2959 && GET_MODE_CLASS (data->nominal_mode) == MODE_INT
2960 && data->nominal_mode == data->passed_mode
2961 && data->nominal_mode == GET_MODE (data->entry_parm))
2963 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2964 mode, by the caller. We now have to convert it to
2965 NOMINAL_MODE, if different. However, PARMREG may be in
2966 a different mode than NOMINAL_MODE if it is being stored
2967 promoted.
2969 If ENTRY_PARM is a hard register, it might be in a register
2970 not valid for operating in its mode (e.g., an odd-numbered
2971 register for a DFmode). In that case, moves are the only
2972 thing valid, so we can't do a convert from there. This
2973 occurs when the calling sequence allow such misaligned
2974 usages.
2976 In addition, the conversion may involve a call, which could
2977 clobber parameters which haven't been copied to pseudo
2978 registers yet.
2980 First, we try to emit an insn which performs the necessary
2981 conversion. We verify that this insn does not clobber any
2982 hard registers. */
2984 enum insn_code icode;
2985 rtx op0, op1;
2987 icode = can_extend_p (promoted_nominal_mode, data->passed_mode,
2988 unsignedp);
2990 op0 = parmreg;
2991 op1 = validated_mem;
2992 if (icode != CODE_FOR_nothing
2993 && insn_operand_matches (icode, 0, op0)
2994 && insn_operand_matches (icode, 1, op1))
2996 enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
2997 rtx insn, insns;
2998 HARD_REG_SET hardregs;
3000 start_sequence ();
3001 insn = gen_extend_insn (op0, op1, promoted_nominal_mode,
3002 data->passed_mode, unsignedp);
3003 emit_insn (insn);
3004 insns = get_insns ();
3006 moved = true;
3007 CLEAR_HARD_REG_SET (hardregs);
3008 for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
3010 if (INSN_P (insn))
3011 note_stores (PATTERN (insn), record_hard_reg_sets,
3012 &hardregs);
3013 if (!hard_reg_set_empty_p (hardregs))
3014 moved = false;
3017 end_sequence ();
3019 if (moved)
3021 emit_insn (insns);
3022 if (equiv_stack_parm != NULL_RTX)
3023 equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg),
3024 equiv_stack_parm);
3029 if (moved)
3030 /* Nothing to do. */
3032 else if (need_conversion)
3034 /* We did not have an insn to convert directly, or the sequence
3035 generated appeared unsafe. We must first copy the parm to a
3036 pseudo reg, and save the conversion until after all
3037 parameters have been moved. */
3039 int save_tree_used;
3040 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3042 emit_move_insn (tempreg, validated_mem);
3044 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3045 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
3047 if (GET_CODE (tempreg) == SUBREG
3048 && GET_MODE (tempreg) == data->nominal_mode
3049 && REG_P (SUBREG_REG (tempreg))
3050 && data->nominal_mode == data->passed_mode
3051 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
3052 && GET_MODE_SIZE (GET_MODE (tempreg))
3053 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
3055 /* The argument is already sign/zero extended, so note it
3056 into the subreg. */
3057 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
3058 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
3061 /* TREE_USED gets set erroneously during expand_assignment. */
3062 save_tree_used = TREE_USED (parm);
3063 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
3064 TREE_USED (parm) = save_tree_used;
3065 all->first_conversion_insn = get_insns ();
3066 all->last_conversion_insn = get_last_insn ();
3067 end_sequence ();
3069 did_conversion = true;
3071 else
3072 emit_move_insn (parmreg, validated_mem);
3074 /* If we were passed a pointer but the actual value can safely live
3075 in a register, put it in one. */
3076 if (data->passed_pointer
3077 && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
3078 /* If by-reference argument was promoted, demote it. */
3079 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
3080 || use_register_for_decl (parm)))
3082 /* We can't use nominal_mode, because it will have been set to
3083 Pmode above. We must use the actual mode of the parm. */
3084 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3085 mark_user_reg (parmreg);
3087 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
3089 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
3090 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
3092 push_to_sequence2 (all->first_conversion_insn,
3093 all->last_conversion_insn);
3094 emit_move_insn (tempreg, DECL_RTL (parm));
3095 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
3096 emit_move_insn (parmreg, tempreg);
3097 all->first_conversion_insn = get_insns ();
3098 all->last_conversion_insn = get_last_insn ();
3099 end_sequence ();
3101 did_conversion = true;
3103 else
3104 emit_move_insn (parmreg, DECL_RTL (parm));
3106 SET_DECL_RTL (parm, parmreg);
3108 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3109 now the parm. */
3110 data->stack_parm = NULL;
3113 /* Mark the register as eliminable if we did no conversion and it was
3114 copied from memory at a fixed offset, and the arg pointer was not
3115 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3116 offset formed an invalid address, such memory-equivalences as we
3117 make here would screw up life analysis for it. */
3118 if (data->nominal_mode == data->passed_mode
3119 && !did_conversion
3120 && data->stack_parm != 0
3121 && MEM_P (data->stack_parm)
3122 && data->locate.offset.var == 0
3123 && reg_mentioned_p (virtual_incoming_args_rtx,
3124 XEXP (data->stack_parm, 0)))
3126 rtx linsn = get_last_insn ();
3127 rtx sinsn, set;
3129 /* Mark complex types separately. */
3130 if (GET_CODE (parmreg) == CONCAT)
3132 enum machine_mode submode
3133 = GET_MODE_INNER (GET_MODE (parmreg));
3134 int regnor = REGNO (XEXP (parmreg, 0));
3135 int regnoi = REGNO (XEXP (parmreg, 1));
3136 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
3137 rtx stacki = adjust_address_nv (data->stack_parm, submode,
3138 GET_MODE_SIZE (submode));
3140 /* Scan backwards for the set of the real and
3141 imaginary parts. */
3142 for (sinsn = linsn; sinsn != 0;
3143 sinsn = prev_nonnote_insn (sinsn))
3145 set = single_set (sinsn);
3146 if (set == 0)
3147 continue;
3149 if (SET_DEST (set) == regno_reg_rtx [regnoi])
3150 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
3151 else if (SET_DEST (set) == regno_reg_rtx [regnor])
3152 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
3155 else if ((set = single_set (linsn)) != 0
3156 && SET_DEST (set) == parmreg)
3157 set_unique_reg_note (linsn, REG_EQUIV, equiv_stack_parm);
3160 /* For pointer data type, suggest pointer register. */
3161 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3162 mark_reg_pointer (parmreg,
3163 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
3166 /* A subroutine of assign_parms. Allocate stack space to hold the current
3167 parameter. Get it there. Perform all ABI specified conversions. */
3169 static void
3170 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
3171 struct assign_parm_data_one *data)
3173 /* Value must be stored in the stack slot STACK_PARM during function
3174 execution. */
3175 bool to_conversion = false;
3177 assign_parm_remove_parallels (data);
3179 if (data->promoted_mode != data->nominal_mode)
3181 /* Conversion is required. */
3182 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3184 emit_move_insn (tempreg, validize_mem (data->entry_parm));
3186 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3187 to_conversion = true;
3189 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
3190 TYPE_UNSIGNED (TREE_TYPE (parm)));
3192 if (data->stack_parm)
3194 int offset = subreg_lowpart_offset (data->nominal_mode,
3195 GET_MODE (data->stack_parm));
3196 /* ??? This may need a big-endian conversion on sparc64. */
3197 data->stack_parm
3198 = adjust_address (data->stack_parm, data->nominal_mode, 0);
3199 if (offset && MEM_OFFSET_KNOWN_P (data->stack_parm))
3200 set_mem_offset (data->stack_parm,
3201 MEM_OFFSET (data->stack_parm) + offset);
3205 if (data->entry_parm != data->stack_parm)
3207 rtx src, dest;
3209 if (data->stack_parm == 0)
3211 int align = STACK_SLOT_ALIGNMENT (data->passed_type,
3212 GET_MODE (data->entry_parm),
3213 TYPE_ALIGN (data->passed_type));
3214 data->stack_parm
3215 = assign_stack_local (GET_MODE (data->entry_parm),
3216 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
3217 align);
3218 set_mem_attributes (data->stack_parm, parm, 1);
3221 dest = validize_mem (data->stack_parm);
3222 src = validize_mem (data->entry_parm);
3224 if (MEM_P (src))
3226 /* Use a block move to handle potentially misaligned entry_parm. */
3227 if (!to_conversion)
3228 push_to_sequence2 (all->first_conversion_insn,
3229 all->last_conversion_insn);
3230 to_conversion = true;
3232 emit_block_move (dest, src,
3233 GEN_INT (int_size_in_bytes (data->passed_type)),
3234 BLOCK_OP_NORMAL);
3236 else
3237 emit_move_insn (dest, src);
3240 if (to_conversion)
3242 all->first_conversion_insn = get_insns ();
3243 all->last_conversion_insn = get_last_insn ();
3244 end_sequence ();
3247 SET_DECL_RTL (parm, data->stack_parm);
3250 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
3251 undo the frobbing that we did in assign_parms_augmented_arg_list. */
3253 static void
3254 assign_parms_unsplit_complex (struct assign_parm_data_all *all,
3255 VEC(tree, heap) *fnargs)
3257 tree parm;
3258 tree orig_fnargs = all->orig_fnargs;
3259 unsigned i = 0;
3261 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i)
3263 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
3264 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
3266 rtx tmp, real, imag;
3267 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
3269 real = DECL_RTL (VEC_index (tree, fnargs, i));
3270 imag = DECL_RTL (VEC_index (tree, fnargs, i + 1));
3271 if (inner != GET_MODE (real))
3273 real = gen_lowpart_SUBREG (inner, real);
3274 imag = gen_lowpart_SUBREG (inner, imag);
3277 if (TREE_ADDRESSABLE (parm))
3279 rtx rmem, imem;
3280 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
3281 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3282 DECL_MODE (parm),
3283 TYPE_ALIGN (TREE_TYPE (parm)));
3285 /* split_complex_arg put the real and imag parts in
3286 pseudos. Move them to memory. */
3287 tmp = assign_stack_local (DECL_MODE (parm), size, align);
3288 set_mem_attributes (tmp, parm, 1);
3289 rmem = adjust_address_nv (tmp, inner, 0);
3290 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
3291 push_to_sequence2 (all->first_conversion_insn,
3292 all->last_conversion_insn);
3293 emit_move_insn (rmem, real);
3294 emit_move_insn (imem, imag);
3295 all->first_conversion_insn = get_insns ();
3296 all->last_conversion_insn = get_last_insn ();
3297 end_sequence ();
3299 else
3300 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3301 SET_DECL_RTL (parm, tmp);
3303 real = DECL_INCOMING_RTL (VEC_index (tree, fnargs, i));
3304 imag = DECL_INCOMING_RTL (VEC_index (tree, fnargs, i + 1));
3305 if (inner != GET_MODE (real))
3307 real = gen_lowpart_SUBREG (inner, real);
3308 imag = gen_lowpart_SUBREG (inner, imag);
3310 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3311 set_decl_incoming_rtl (parm, tmp, false);
3312 i++;
3317 /* Assign RTL expressions to the function's parameters. This may involve
3318 copying them into registers and using those registers as the DECL_RTL. */
3320 static void
3321 assign_parms (tree fndecl)
3323 struct assign_parm_data_all all;
3324 tree parm;
3325 VEC(tree, heap) *fnargs;
3326 unsigned i;
3328 crtl->args.internal_arg_pointer
3329 = targetm.calls.internal_arg_pointer ();
3331 assign_parms_initialize_all (&all);
3332 fnargs = assign_parms_augmented_arg_list (&all);
3334 FOR_EACH_VEC_ELT (tree, fnargs, i, parm)
3336 struct assign_parm_data_one data;
3338 /* Extract the type of PARM; adjust it according to ABI. */
3339 assign_parm_find_data_types (&all, parm, &data);
3341 /* Early out for errors and void parameters. */
3342 if (data.passed_mode == VOIDmode)
3344 SET_DECL_RTL (parm, const0_rtx);
3345 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3346 continue;
3349 /* Estimate stack alignment from parameter alignment. */
3350 if (SUPPORTS_STACK_ALIGNMENT)
3352 unsigned int align
3353 = targetm.calls.function_arg_boundary (data.promoted_mode,
3354 data.passed_type);
3355 align = MINIMUM_ALIGNMENT (data.passed_type, data.promoted_mode,
3356 align);
3357 if (TYPE_ALIGN (data.nominal_type) > align)
3358 align = MINIMUM_ALIGNMENT (data.nominal_type,
3359 TYPE_MODE (data.nominal_type),
3360 TYPE_ALIGN (data.nominal_type));
3361 if (crtl->stack_alignment_estimated < align)
3363 gcc_assert (!crtl->stack_realign_processed);
3364 crtl->stack_alignment_estimated = align;
3368 if (cfun->stdarg && !DECL_CHAIN (parm))
3369 assign_parms_setup_varargs (&all, &data, false);
3371 /* Find out where the parameter arrives in this function. */
3372 assign_parm_find_entry_rtl (&all, &data);
3374 /* Find out where stack space for this parameter might be. */
3375 if (assign_parm_is_stack_parm (&all, &data))
3377 assign_parm_find_stack_rtl (parm, &data);
3378 assign_parm_adjust_entry_rtl (&data);
3381 /* Record permanently how this parm was passed. */
3382 if (data.passed_pointer)
3384 rtx incoming_rtl
3385 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.passed_type)),
3386 data.entry_parm);
3387 set_decl_incoming_rtl (parm, incoming_rtl, true);
3389 else
3390 set_decl_incoming_rtl (parm, data.entry_parm, false);
3392 /* Update info on where next arg arrives in registers. */
3393 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3394 data.passed_type, data.named_arg);
3396 assign_parm_adjust_stack_rtl (&data);
3398 if (assign_parm_setup_block_p (&data))
3399 assign_parm_setup_block (&all, parm, &data);
3400 else if (data.passed_pointer || use_register_for_decl (parm))
3401 assign_parm_setup_reg (&all, parm, &data);
3402 else
3403 assign_parm_setup_stack (&all, parm, &data);
3406 if (targetm.calls.split_complex_arg)
3407 assign_parms_unsplit_complex (&all, fnargs);
3409 VEC_free (tree, heap, fnargs);
3411 /* Output all parameter conversion instructions (possibly including calls)
3412 now that all parameters have been copied out of hard registers. */
3413 emit_insn (all.first_conversion_insn);
3415 /* Estimate reload stack alignment from scalar return mode. */
3416 if (SUPPORTS_STACK_ALIGNMENT)
3418 if (DECL_RESULT (fndecl))
3420 tree type = TREE_TYPE (DECL_RESULT (fndecl));
3421 enum machine_mode mode = TYPE_MODE (type);
3423 if (mode != BLKmode
3424 && mode != VOIDmode
3425 && !AGGREGATE_TYPE_P (type))
3427 unsigned int align = GET_MODE_ALIGNMENT (mode);
3428 if (crtl->stack_alignment_estimated < align)
3430 gcc_assert (!crtl->stack_realign_processed);
3431 crtl->stack_alignment_estimated = align;
3437 /* If we are receiving a struct value address as the first argument, set up
3438 the RTL for the function result. As this might require code to convert
3439 the transmitted address to Pmode, we do this here to ensure that possible
3440 preliminary conversions of the address have been emitted already. */
3441 if (all.function_result_decl)
3443 tree result = DECL_RESULT (current_function_decl);
3444 rtx addr = DECL_RTL (all.function_result_decl);
3445 rtx x;
3447 if (DECL_BY_REFERENCE (result))
3449 SET_DECL_VALUE_EXPR (result, all.function_result_decl);
3450 x = addr;
3452 else
3454 SET_DECL_VALUE_EXPR (result,
3455 build1 (INDIRECT_REF, TREE_TYPE (result),
3456 all.function_result_decl));
3457 addr = convert_memory_address (Pmode, addr);
3458 x = gen_rtx_MEM (DECL_MODE (result), addr);
3459 set_mem_attributes (x, result, 1);
3462 DECL_HAS_VALUE_EXPR_P (result) = 1;
3464 SET_DECL_RTL (result, x);
3467 /* We have aligned all the args, so add space for the pretend args. */
3468 crtl->args.pretend_args_size = all.pretend_args_size;
3469 all.stack_args_size.constant += all.extra_pretend_bytes;
3470 crtl->args.size = all.stack_args_size.constant;
3472 /* Adjust function incoming argument size for alignment and
3473 minimum length. */
3475 #ifdef REG_PARM_STACK_SPACE
3476 crtl->args.size = MAX (crtl->args.size,
3477 REG_PARM_STACK_SPACE (fndecl));
3478 #endif
3480 crtl->args.size = CEIL_ROUND (crtl->args.size,
3481 PARM_BOUNDARY / BITS_PER_UNIT);
3483 #ifdef ARGS_GROW_DOWNWARD
3484 crtl->args.arg_offset_rtx
3485 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3486 : expand_expr (size_diffop (all.stack_args_size.var,
3487 size_int (-all.stack_args_size.constant)),
3488 NULL_RTX, VOIDmode, EXPAND_NORMAL));
3489 #else
3490 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3491 #endif
3493 /* See how many bytes, if any, of its args a function should try to pop
3494 on return. */
3496 crtl->args.pops_args = targetm.calls.return_pops_args (fndecl,
3497 TREE_TYPE (fndecl),
3498 crtl->args.size);
3500 /* For stdarg.h function, save info about
3501 regs and stack space used by the named args. */
3503 crtl->args.info = all.args_so_far_v;
3505 /* Set the rtx used for the function return value. Put this in its
3506 own variable so any optimizers that need this information don't have
3507 to include tree.h. Do this here so it gets done when an inlined
3508 function gets output. */
3510 crtl->return_rtx
3511 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3512 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3514 /* If scalar return value was computed in a pseudo-reg, or was a named
3515 return value that got dumped to the stack, copy that to the hard
3516 return register. */
3517 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3519 tree decl_result = DECL_RESULT (fndecl);
3520 rtx decl_rtl = DECL_RTL (decl_result);
3522 if (REG_P (decl_rtl)
3523 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3524 : DECL_REGISTER (decl_result))
3526 rtx real_decl_rtl;
3528 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3529 fndecl, true);
3530 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3531 /* The delay slot scheduler assumes that crtl->return_rtx
3532 holds the hard register containing the return value, not a
3533 temporary pseudo. */
3534 crtl->return_rtx = real_decl_rtl;
3539 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3540 For all seen types, gimplify their sizes. */
3542 static tree
3543 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3545 tree t = *tp;
3547 *walk_subtrees = 0;
3548 if (TYPE_P (t))
3550 if (POINTER_TYPE_P (t))
3551 *walk_subtrees = 1;
3552 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3553 && !TYPE_SIZES_GIMPLIFIED (t))
3555 gimplify_type_sizes (t, (gimple_seq *) data);
3556 *walk_subtrees = 1;
3560 return NULL;
3563 /* Gimplify the parameter list for current_function_decl. This involves
3564 evaluating SAVE_EXPRs of variable sized parameters and generating code
3565 to implement callee-copies reference parameters. Returns a sequence of
3566 statements to add to the beginning of the function. */
3568 gimple_seq
3569 gimplify_parameters (void)
3571 struct assign_parm_data_all all;
3572 tree parm;
3573 gimple_seq stmts = NULL;
3574 VEC(tree, heap) *fnargs;
3575 unsigned i;
3577 assign_parms_initialize_all (&all);
3578 fnargs = assign_parms_augmented_arg_list (&all);
3580 FOR_EACH_VEC_ELT (tree, fnargs, i, parm)
3582 struct assign_parm_data_one data;
3584 /* Extract the type of PARM; adjust it according to ABI. */
3585 assign_parm_find_data_types (&all, parm, &data);
3587 /* Early out for errors and void parameters. */
3588 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3589 continue;
3591 /* Update info on where next arg arrives in registers. */
3592 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3593 data.passed_type, data.named_arg);
3595 /* ??? Once upon a time variable_size stuffed parameter list
3596 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3597 turned out to be less than manageable in the gimple world.
3598 Now we have to hunt them down ourselves. */
3599 walk_tree_without_duplicates (&data.passed_type,
3600 gimplify_parm_type, &stmts);
3602 if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
3604 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3605 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3608 if (data.passed_pointer)
3610 tree type = TREE_TYPE (data.passed_type);
3611 if (reference_callee_copied (&all.args_so_far_v, TYPE_MODE (type),
3612 type, data.named_arg))
3614 tree local, t;
3616 /* For constant-sized objects, this is trivial; for
3617 variable-sized objects, we have to play games. */
3618 if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
3619 && !(flag_stack_check == GENERIC_STACK_CHECK
3620 && compare_tree_int (DECL_SIZE_UNIT (parm),
3621 STACK_CHECK_MAX_VAR_SIZE) > 0))
3623 local = create_tmp_reg (type, get_name (parm));
3624 DECL_IGNORED_P (local) = 0;
3625 /* If PARM was addressable, move that flag over
3626 to the local copy, as its address will be taken,
3627 not the PARMs. Keep the parms address taken
3628 as we'll query that flag during gimplification. */
3629 if (TREE_ADDRESSABLE (parm))
3630 TREE_ADDRESSABLE (local) = 1;
3632 else
3634 tree ptr_type, addr;
3636 ptr_type = build_pointer_type (type);
3637 addr = create_tmp_reg (ptr_type, get_name (parm));
3638 DECL_IGNORED_P (addr) = 0;
3639 local = build_fold_indirect_ref (addr);
3641 t = built_in_decls[BUILT_IN_ALLOCA];
3642 t = build_call_expr (t, 1, DECL_SIZE_UNIT (parm));
3643 /* The call has been built for a variable-sized object. */
3644 CALL_ALLOCA_FOR_VAR_P (t) = 1;
3645 t = fold_convert (ptr_type, t);
3646 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
3647 gimplify_and_add (t, &stmts);
3650 gimplify_assign (local, parm, &stmts);
3652 SET_DECL_VALUE_EXPR (parm, local);
3653 DECL_HAS_VALUE_EXPR_P (parm) = 1;
3658 VEC_free (tree, heap, fnargs);
3660 return stmts;
3663 /* Compute the size and offset from the start of the stacked arguments for a
3664 parm passed in mode PASSED_MODE and with type TYPE.
3666 INITIAL_OFFSET_PTR points to the current offset into the stacked
3667 arguments.
3669 The starting offset and size for this parm are returned in
3670 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3671 nonzero, the offset is that of stack slot, which is returned in
3672 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3673 padding required from the initial offset ptr to the stack slot.
3675 IN_REGS is nonzero if the argument will be passed in registers. It will
3676 never be set if REG_PARM_STACK_SPACE is not defined.
3678 FNDECL is the function in which the argument was defined.
3680 There are two types of rounding that are done. The first, controlled by
3681 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
3682 argument list to be aligned to the specific boundary (in bits). This
3683 rounding affects the initial and starting offsets, but not the argument
3684 size.
3686 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3687 optionally rounds the size of the parm to PARM_BOUNDARY. The
3688 initial offset is not affected by this rounding, while the size always
3689 is and the starting offset may be. */
3691 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3692 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3693 callers pass in the total size of args so far as
3694 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3696 void
3697 locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3698 int partial, tree fndecl ATTRIBUTE_UNUSED,
3699 struct args_size *initial_offset_ptr,
3700 struct locate_and_pad_arg_data *locate)
3702 tree sizetree;
3703 enum direction where_pad;
3704 unsigned int boundary, round_boundary;
3705 int reg_parm_stack_space = 0;
3706 int part_size_in_regs;
3708 #ifdef REG_PARM_STACK_SPACE
3709 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3711 /* If we have found a stack parm before we reach the end of the
3712 area reserved for registers, skip that area. */
3713 if (! in_regs)
3715 if (reg_parm_stack_space > 0)
3717 if (initial_offset_ptr->var)
3719 initial_offset_ptr->var
3720 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3721 ssize_int (reg_parm_stack_space));
3722 initial_offset_ptr->constant = 0;
3724 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3725 initial_offset_ptr->constant = reg_parm_stack_space;
3728 #endif /* REG_PARM_STACK_SPACE */
3730 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
3732 sizetree
3733 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3734 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3735 boundary = targetm.calls.function_arg_boundary (passed_mode, type);
3736 round_boundary = targetm.calls.function_arg_round_boundary (passed_mode,
3737 type);
3738 locate->where_pad = where_pad;
3740 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
3741 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
3742 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
3744 locate->boundary = boundary;
3746 if (SUPPORTS_STACK_ALIGNMENT)
3748 /* stack_alignment_estimated can't change after stack has been
3749 realigned. */
3750 if (crtl->stack_alignment_estimated < boundary)
3752 if (!crtl->stack_realign_processed)
3753 crtl->stack_alignment_estimated = boundary;
3754 else
3756 /* If stack is realigned and stack alignment value
3757 hasn't been finalized, it is OK not to increase
3758 stack_alignment_estimated. The bigger alignment
3759 requirement is recorded in stack_alignment_needed
3760 below. */
3761 gcc_assert (!crtl->stack_realign_finalized
3762 && crtl->stack_realign_needed);
3767 /* Remember if the outgoing parameter requires extra alignment on the
3768 calling function side. */
3769 if (crtl->stack_alignment_needed < boundary)
3770 crtl->stack_alignment_needed = boundary;
3771 if (crtl->preferred_stack_boundary < boundary)
3772 crtl->preferred_stack_boundary = boundary;
3774 #ifdef ARGS_GROW_DOWNWARD
3775 locate->slot_offset.constant = -initial_offset_ptr->constant;
3776 if (initial_offset_ptr->var)
3777 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3778 initial_offset_ptr->var);
3781 tree s2 = sizetree;
3782 if (where_pad != none
3783 && (!host_integerp (sizetree, 1)
3784 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % round_boundary))
3785 s2 = round_up (s2, round_boundary / BITS_PER_UNIT);
3786 SUB_PARM_SIZE (locate->slot_offset, s2);
3789 locate->slot_offset.constant += part_size_in_regs;
3791 if (!in_regs
3792 #ifdef REG_PARM_STACK_SPACE
3793 || REG_PARM_STACK_SPACE (fndecl) > 0
3794 #endif
3796 pad_to_arg_alignment (&locate->slot_offset, boundary,
3797 &locate->alignment_pad);
3799 locate->size.constant = (-initial_offset_ptr->constant
3800 - locate->slot_offset.constant);
3801 if (initial_offset_ptr->var)
3802 locate->size.var = size_binop (MINUS_EXPR,
3803 size_binop (MINUS_EXPR,
3804 ssize_int (0),
3805 initial_offset_ptr->var),
3806 locate->slot_offset.var);
3808 /* Pad_below needs the pre-rounded size to know how much to pad
3809 below. */
3810 locate->offset = locate->slot_offset;
3811 if (where_pad == downward)
3812 pad_below (&locate->offset, passed_mode, sizetree);
3814 #else /* !ARGS_GROW_DOWNWARD */
3815 if (!in_regs
3816 #ifdef REG_PARM_STACK_SPACE
3817 || REG_PARM_STACK_SPACE (fndecl) > 0
3818 #endif
3820 pad_to_arg_alignment (initial_offset_ptr, boundary,
3821 &locate->alignment_pad);
3822 locate->slot_offset = *initial_offset_ptr;
3824 #ifdef PUSH_ROUNDING
3825 if (passed_mode != BLKmode)
3826 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3827 #endif
3829 /* Pad_below needs the pre-rounded size to know how much to pad below
3830 so this must be done before rounding up. */
3831 locate->offset = locate->slot_offset;
3832 if (where_pad == downward)
3833 pad_below (&locate->offset, passed_mode, sizetree);
3835 if (where_pad != none
3836 && (!host_integerp (sizetree, 1)
3837 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % round_boundary))
3838 sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT);
3840 ADD_PARM_SIZE (locate->size, sizetree);
3842 locate->size.constant -= part_size_in_regs;
3843 #endif /* ARGS_GROW_DOWNWARD */
3845 #ifdef FUNCTION_ARG_OFFSET
3846 locate->offset.constant += FUNCTION_ARG_OFFSET (passed_mode, type);
3847 #endif
3850 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3851 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3853 static void
3854 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3855 struct args_size *alignment_pad)
3857 tree save_var = NULL_TREE;
3858 HOST_WIDE_INT save_constant = 0;
3859 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3860 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3862 #ifdef SPARC_STACK_BOUNDARY_HACK
3863 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3864 the real alignment of %sp. However, when it does this, the
3865 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
3866 if (SPARC_STACK_BOUNDARY_HACK)
3867 sp_offset = 0;
3868 #endif
3870 if (boundary > PARM_BOUNDARY)
3872 save_var = offset_ptr->var;
3873 save_constant = offset_ptr->constant;
3876 alignment_pad->var = NULL_TREE;
3877 alignment_pad->constant = 0;
3879 if (boundary > BITS_PER_UNIT)
3881 if (offset_ptr->var)
3883 tree sp_offset_tree = ssize_int (sp_offset);
3884 tree offset = size_binop (PLUS_EXPR,
3885 ARGS_SIZE_TREE (*offset_ptr),
3886 sp_offset_tree);
3887 #ifdef ARGS_GROW_DOWNWARD
3888 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
3889 #else
3890 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
3891 #endif
3893 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
3894 /* ARGS_SIZE_TREE includes constant term. */
3895 offset_ptr->constant = 0;
3896 if (boundary > PARM_BOUNDARY)
3897 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
3898 save_var);
3900 else
3902 offset_ptr->constant = -sp_offset +
3903 #ifdef ARGS_GROW_DOWNWARD
3904 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3905 #else
3906 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3907 #endif
3908 if (boundary > PARM_BOUNDARY)
3909 alignment_pad->constant = offset_ptr->constant - save_constant;
3914 static void
3915 pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
3917 if (passed_mode != BLKmode)
3919 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3920 offset_ptr->constant
3921 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3922 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3923 - GET_MODE_SIZE (passed_mode));
3925 else
3927 if (TREE_CODE (sizetree) != INTEGER_CST
3928 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3930 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3931 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3932 /* Add it in. */
3933 ADD_PARM_SIZE (*offset_ptr, s2);
3934 SUB_PARM_SIZE (*offset_ptr, sizetree);
3940 /* True if register REGNO was alive at a place where `setjmp' was
3941 called and was set more than once or is an argument. Such regs may
3942 be clobbered by `longjmp'. */
3944 static bool
3945 regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
3947 /* There appear to be cases where some local vars never reach the
3948 backend but have bogus regnos. */
3949 if (regno >= max_reg_num ())
3950 return false;
3952 return ((REG_N_SETS (regno) > 1
3953 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR), regno))
3954 && REGNO_REG_SET_P (setjmp_crosses, regno));
3957 /* Walk the tree of blocks describing the binding levels within a
3958 function and warn about variables the might be killed by setjmp or
3959 vfork. This is done after calling flow_analysis before register
3960 allocation since that will clobber the pseudo-regs to hard
3961 regs. */
3963 static void
3964 setjmp_vars_warning (bitmap setjmp_crosses, tree block)
3966 tree decl, sub;
3968 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
3970 if (TREE_CODE (decl) == VAR_DECL
3971 && DECL_RTL_SET_P (decl)
3972 && REG_P (DECL_RTL (decl))
3973 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
3974 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
3975 " %<longjmp%> or %<vfork%>", decl);
3978 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
3979 setjmp_vars_warning (setjmp_crosses, sub);
3982 /* Do the appropriate part of setjmp_vars_warning
3983 but for arguments instead of local variables. */
3985 static void
3986 setjmp_args_warning (bitmap setjmp_crosses)
3988 tree decl;
3989 for (decl = DECL_ARGUMENTS (current_function_decl);
3990 decl; decl = DECL_CHAIN (decl))
3991 if (DECL_RTL (decl) != 0
3992 && REG_P (DECL_RTL (decl))
3993 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
3994 warning (OPT_Wclobbered,
3995 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
3996 decl);
3999 /* Generate warning messages for variables live across setjmp. */
4001 void
4002 generate_setjmp_warnings (void)
4004 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
4006 if (n_basic_blocks == NUM_FIXED_BLOCKS
4007 || bitmap_empty_p (setjmp_crosses))
4008 return;
4010 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
4011 setjmp_args_warning (setjmp_crosses);
4015 /* Reverse the order of elements in the fragment chain T of blocks,
4016 and return the new head of the chain (old last element). */
4018 static tree
4019 block_fragments_nreverse (tree t)
4021 tree prev = 0, block, next;
4022 for (block = t; block; block = next)
4024 next = BLOCK_FRAGMENT_CHAIN (block);
4025 BLOCK_FRAGMENT_CHAIN (block) = prev;
4026 prev = block;
4028 return prev;
4031 /* Reverse the order of elements in the chain T of blocks,
4032 and return the new head of the chain (old last element).
4033 Also do the same on subblocks and reverse the order of elements
4034 in BLOCK_FRAGMENT_CHAIN as well. */
4036 static tree
4037 blocks_nreverse_all (tree t)
4039 tree prev = 0, block, next;
4040 for (block = t; block; block = next)
4042 next = BLOCK_CHAIN (block);
4043 BLOCK_CHAIN (block) = prev;
4044 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4045 if (BLOCK_FRAGMENT_CHAIN (block)
4046 && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE)
4047 BLOCK_FRAGMENT_CHAIN (block)
4048 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block));
4049 prev = block;
4051 return prev;
4055 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4056 and create duplicate blocks. */
4057 /* ??? Need an option to either create block fragments or to create
4058 abstract origin duplicates of a source block. It really depends
4059 on what optimization has been performed. */
4061 void
4062 reorder_blocks (void)
4064 tree block = DECL_INITIAL (current_function_decl);
4065 VEC(tree,heap) *block_stack;
4067 if (block == NULL_TREE)
4068 return;
4070 block_stack = VEC_alloc (tree, heap, 10);
4072 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
4073 clear_block_marks (block);
4075 /* Prune the old trees away, so that they don't get in the way. */
4076 BLOCK_SUBBLOCKS (block) = NULL_TREE;
4077 BLOCK_CHAIN (block) = NULL_TREE;
4079 /* Recreate the block tree from the note nesting. */
4080 reorder_blocks_1 (get_insns (), block, &block_stack);
4081 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4083 VEC_free (tree, heap, block_stack);
4086 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
4088 void
4089 clear_block_marks (tree block)
4091 while (block)
4093 TREE_ASM_WRITTEN (block) = 0;
4094 clear_block_marks (BLOCK_SUBBLOCKS (block));
4095 block = BLOCK_CHAIN (block);
4099 static void
4100 reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack)
4102 rtx insn;
4104 for (insn = insns; insn; insn = NEXT_INSN (insn))
4106 if (NOTE_P (insn))
4108 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
4110 tree block = NOTE_BLOCK (insn);
4111 tree origin;
4113 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE);
4114 origin = block;
4116 /* If we have seen this block before, that means it now
4117 spans multiple address regions. Create a new fragment. */
4118 if (TREE_ASM_WRITTEN (block))
4120 tree new_block = copy_node (block);
4122 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
4123 BLOCK_FRAGMENT_CHAIN (new_block)
4124 = BLOCK_FRAGMENT_CHAIN (origin);
4125 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
4127 NOTE_BLOCK (insn) = new_block;
4128 block = new_block;
4131 BLOCK_SUBBLOCKS (block) = 0;
4132 TREE_ASM_WRITTEN (block) = 1;
4133 /* When there's only one block for the entire function,
4134 current_block == block and we mustn't do this, it
4135 will cause infinite recursion. */
4136 if (block != current_block)
4138 if (block != origin)
4139 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block);
4141 BLOCK_SUPERCONTEXT (block) = current_block;
4142 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4143 BLOCK_SUBBLOCKS (current_block) = block;
4144 current_block = origin;
4146 VEC_safe_push (tree, heap, *p_block_stack, block);
4148 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
4150 NOTE_BLOCK (insn) = VEC_pop (tree, *p_block_stack);
4151 current_block = BLOCK_SUPERCONTEXT (current_block);
4157 /* Reverse the order of elements in the chain T of blocks,
4158 and return the new head of the chain (old last element). */
4160 tree
4161 blocks_nreverse (tree t)
4163 tree prev = 0, block, next;
4164 for (block = t; block; block = next)
4166 next = BLOCK_CHAIN (block);
4167 BLOCK_CHAIN (block) = prev;
4168 prev = block;
4170 return prev;
4173 /* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4174 by modifying the last node in chain 1 to point to chain 2. */
4176 tree
4177 block_chainon (tree op1, tree op2)
4179 tree t1;
4181 if (!op1)
4182 return op2;
4183 if (!op2)
4184 return op1;
4186 for (t1 = op1; BLOCK_CHAIN (t1); t1 = BLOCK_CHAIN (t1))
4187 continue;
4188 BLOCK_CHAIN (t1) = op2;
4190 #ifdef ENABLE_TREE_CHECKING
4192 tree t2;
4193 for (t2 = op2; t2; t2 = BLOCK_CHAIN (t2))
4194 gcc_assert (t2 != t1);
4196 #endif
4198 return op1;
4201 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
4202 non-NULL, list them all into VECTOR, in a depth-first preorder
4203 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
4204 blocks. */
4206 static int
4207 all_blocks (tree block, tree *vector)
4209 int n_blocks = 0;
4211 while (block)
4213 TREE_ASM_WRITTEN (block) = 0;
4215 /* Record this block. */
4216 if (vector)
4217 vector[n_blocks] = block;
4219 ++n_blocks;
4221 /* Record the subblocks, and their subblocks... */
4222 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4223 vector ? vector + n_blocks : 0);
4224 block = BLOCK_CHAIN (block);
4227 return n_blocks;
4230 /* Return a vector containing all the blocks rooted at BLOCK. The
4231 number of elements in the vector is stored in N_BLOCKS_P. The
4232 vector is dynamically allocated; it is the caller's responsibility
4233 to call `free' on the pointer returned. */
4235 static tree *
4236 get_block_vector (tree block, int *n_blocks_p)
4238 tree *block_vector;
4240 *n_blocks_p = all_blocks (block, NULL);
4241 block_vector = XNEWVEC (tree, *n_blocks_p);
4242 all_blocks (block, block_vector);
4244 return block_vector;
4247 static GTY(()) int next_block_index = 2;
4249 /* Set BLOCK_NUMBER for all the blocks in FN. */
4251 void
4252 number_blocks (tree fn)
4254 int i;
4255 int n_blocks;
4256 tree *block_vector;
4258 /* For SDB and XCOFF debugging output, we start numbering the blocks
4259 from 1 within each function, rather than keeping a running
4260 count. */
4261 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
4262 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
4263 next_block_index = 1;
4264 #endif
4266 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
4268 /* The top-level BLOCK isn't numbered at all. */
4269 for (i = 1; i < n_blocks; ++i)
4270 /* We number the blocks from two. */
4271 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
4273 free (block_vector);
4275 return;
4278 /* If VAR is present in a subblock of BLOCK, return the subblock. */
4280 DEBUG_FUNCTION tree
4281 debug_find_var_in_block_tree (tree var, tree block)
4283 tree t;
4285 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
4286 if (t == var)
4287 return block;
4289 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
4291 tree ret = debug_find_var_in_block_tree (var, t);
4292 if (ret)
4293 return ret;
4296 return NULL_TREE;
4299 /* Keep track of whether we're in a dummy function context. If we are,
4300 we don't want to invoke the set_current_function hook, because we'll
4301 get into trouble if the hook calls target_reinit () recursively or
4302 when the initial initialization is not yet complete. */
4304 static bool in_dummy_function;
4306 /* Invoke the target hook when setting cfun. Update the optimization options
4307 if the function uses different options than the default. */
4309 static void
4310 invoke_set_current_function_hook (tree fndecl)
4312 if (!in_dummy_function)
4314 tree opts = ((fndecl)
4315 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
4316 : optimization_default_node);
4318 if (!opts)
4319 opts = optimization_default_node;
4321 /* Change optimization options if needed. */
4322 if (optimization_current_node != opts)
4324 optimization_current_node = opts;
4325 cl_optimization_restore (&global_options, TREE_OPTIMIZATION (opts));
4328 targetm.set_current_function (fndecl);
4332 /* cfun should never be set directly; use this function. */
4334 void
4335 set_cfun (struct function *new_cfun)
4337 if (cfun != new_cfun)
4339 cfun = new_cfun;
4340 invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
4344 /* Initialized with NOGC, making this poisonous to the garbage collector. */
4346 static VEC(function_p,heap) *cfun_stack;
4348 /* Push the current cfun onto the stack, and set cfun to new_cfun. */
4350 void
4351 push_cfun (struct function *new_cfun)
4353 VEC_safe_push (function_p, heap, cfun_stack, cfun);
4354 set_cfun (new_cfun);
4357 /* Pop cfun from the stack. */
4359 void
4360 pop_cfun (void)
4362 struct function *new_cfun = VEC_pop (function_p, cfun_stack);
4363 set_cfun (new_cfun);
4366 /* Return value of funcdef and increase it. */
4368 get_next_funcdef_no (void)
4370 return funcdef_no++;
4373 /* Return value of funcdef. */
4375 get_last_funcdef_no (void)
4377 return funcdef_no;
4380 /* Allocate a function structure for FNDECL and set its contents
4381 to the defaults. Set cfun to the newly-allocated object.
4382 Some of the helper functions invoked during initialization assume
4383 that cfun has already been set. Therefore, assign the new object
4384 directly into cfun and invoke the back end hook explicitly at the
4385 very end, rather than initializing a temporary and calling set_cfun
4386 on it.
4388 ABSTRACT_P is true if this is a function that will never be seen by
4389 the middle-end. Such functions are front-end concepts (like C++
4390 function templates) that do not correspond directly to functions
4391 placed in object files. */
4393 void
4394 allocate_struct_function (tree fndecl, bool abstract_p)
4396 tree result;
4397 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
4399 cfun = ggc_alloc_cleared_function ();
4401 init_eh_for_function ();
4403 if (init_machine_status)
4404 cfun->machine = (*init_machine_status) ();
4406 #ifdef OVERRIDE_ABI_FORMAT
4407 OVERRIDE_ABI_FORMAT (fndecl);
4408 #endif
4410 invoke_set_current_function_hook (fndecl);
4412 if (fndecl != NULL_TREE)
4414 DECL_STRUCT_FUNCTION (fndecl) = cfun;
4415 cfun->decl = fndecl;
4416 current_function_funcdef_no = get_next_funcdef_no ();
4418 result = DECL_RESULT (fndecl);
4419 if (!abstract_p && aggregate_value_p (result, fndecl))
4421 #ifdef PCC_STATIC_STRUCT_RETURN
4422 cfun->returns_pcc_struct = 1;
4423 #endif
4424 cfun->returns_struct = 1;
4427 cfun->stdarg = stdarg_p (fntype);
4429 /* Assume all registers in stdarg functions need to be saved. */
4430 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
4431 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
4433 /* ??? This could be set on a per-function basis by the front-end
4434 but is this worth the hassle? */
4435 cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
4439 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4440 instead of just setting it. */
4442 void
4443 push_struct_function (tree fndecl)
4445 VEC_safe_push (function_p, heap, cfun_stack, cfun);
4446 allocate_struct_function (fndecl, false);
4449 /* Reset crtl and other non-struct-function variables to defaults as
4450 appropriate for emitting rtl at the start of a function. */
4452 static void
4453 prepare_function_start (void)
4455 gcc_assert (!crtl->emit.x_last_insn);
4456 init_temp_slots ();
4457 init_emit ();
4458 init_varasm_status ();
4459 init_expr ();
4460 default_rtl_profile ();
4462 if (flag_stack_usage_info)
4464 cfun->su = ggc_alloc_cleared_stack_usage ();
4465 cfun->su->static_stack_size = -1;
4468 cse_not_expected = ! optimize;
4470 /* Caller save not needed yet. */
4471 caller_save_needed = 0;
4473 /* We haven't done register allocation yet. */
4474 reg_renumber = 0;
4476 /* Indicate that we have not instantiated virtual registers yet. */
4477 virtuals_instantiated = 0;
4479 /* Indicate that we want CONCATs now. */
4480 generating_concat_p = 1;
4482 /* Indicate we have no need of a frame pointer yet. */
4483 frame_pointer_needed = 0;
4486 /* Initialize the rtl expansion mechanism so that we can do simple things
4487 like generate sequences. This is used to provide a context during global
4488 initialization of some passes. You must call expand_dummy_function_end
4489 to exit this context. */
4491 void
4492 init_dummy_function_start (void)
4494 gcc_assert (!in_dummy_function);
4495 in_dummy_function = true;
4496 push_struct_function (NULL_TREE);
4497 prepare_function_start ();
4500 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4501 and initialize static variables for generating RTL for the statements
4502 of the function. */
4504 void
4505 init_function_start (tree subr)
4507 if (subr && DECL_STRUCT_FUNCTION (subr))
4508 set_cfun (DECL_STRUCT_FUNCTION (subr));
4509 else
4510 allocate_struct_function (subr, false);
4511 prepare_function_start ();
4512 decide_function_section (subr);
4514 /* Warn if this value is an aggregate type,
4515 regardless of which calling convention we are using for it. */
4516 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
4517 warning (OPT_Waggregate_return, "function returns an aggregate");
4520 /* Make sure all values used by the optimization passes have sane defaults. */
4521 unsigned int
4522 init_function_for_compilation (void)
4524 reg_renumber = 0;
4525 return 0;
4528 struct rtl_opt_pass pass_init_function =
4531 RTL_PASS,
4532 "*init_function", /* name */
4533 NULL, /* gate */
4534 init_function_for_compilation, /* execute */
4535 NULL, /* sub */
4536 NULL, /* next */
4537 0, /* static_pass_number */
4538 TV_NONE, /* tv_id */
4539 0, /* properties_required */
4540 0, /* properties_provided */
4541 0, /* properties_destroyed */
4542 0, /* todo_flags_start */
4543 0 /* todo_flags_finish */
4548 void
4549 expand_main_function (void)
4551 #if (defined(INVOKE__main) \
4552 || (!defined(HAS_INIT_SECTION) \
4553 && !defined(INIT_SECTION_ASM_OP) \
4554 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
4555 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
4556 #endif
4559 /* Expand code to initialize the stack_protect_guard. This is invoked at
4560 the beginning of a function to be protected. */
4562 #ifndef HAVE_stack_protect_set
4563 # define HAVE_stack_protect_set 0
4564 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
4565 #endif
4567 void
4568 stack_protect_prologue (void)
4570 tree guard_decl = targetm.stack_protect_guard ();
4571 rtx x, y;
4573 x = expand_normal (crtl->stack_protect_guard);
4574 y = expand_normal (guard_decl);
4576 /* Allow the target to copy from Y to X without leaking Y into a
4577 register. */
4578 if (HAVE_stack_protect_set)
4580 rtx insn = gen_stack_protect_set (x, y);
4581 if (insn)
4583 emit_insn (insn);
4584 return;
4588 /* Otherwise do a straight move. */
4589 emit_move_insn (x, y);
4592 /* Expand code to verify the stack_protect_guard. This is invoked at
4593 the end of a function to be protected. */
4595 #ifndef HAVE_stack_protect_test
4596 # define HAVE_stack_protect_test 0
4597 # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
4598 #endif
4600 void
4601 stack_protect_epilogue (void)
4603 tree guard_decl = targetm.stack_protect_guard ();
4604 rtx label = gen_label_rtx ();
4605 rtx x, y, tmp;
4607 x = expand_normal (crtl->stack_protect_guard);
4608 y = expand_normal (guard_decl);
4610 /* Allow the target to compare Y with X without leaking either into
4611 a register. */
4612 switch (HAVE_stack_protect_test != 0)
4614 case 1:
4615 tmp = gen_stack_protect_test (x, y, label);
4616 if (tmp)
4618 emit_insn (tmp);
4619 break;
4621 /* FALLTHRU */
4623 default:
4624 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4625 break;
4628 /* The noreturn predictor has been moved to the tree level. The rtl-level
4629 predictors estimate this branch about 20%, which isn't enough to get
4630 things moved out of line. Since this is the only extant case of adding
4631 a noreturn function at the rtl level, it doesn't seem worth doing ought
4632 except adding the prediction by hand. */
4633 tmp = get_last_insn ();
4634 if (JUMP_P (tmp))
4635 predict_insn_def (tmp, PRED_NORETURN, TAKEN);
4637 expand_expr_stmt (targetm.stack_protect_fail ());
4638 emit_label (label);
4641 /* Start the RTL for a new function, and set variables used for
4642 emitting RTL.
4643 SUBR is the FUNCTION_DECL node.
4644 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4645 the function's parameters, which must be run at any return statement. */
4647 void
4648 expand_function_start (tree subr)
4650 /* Make sure volatile mem refs aren't considered
4651 valid operands of arithmetic insns. */
4652 init_recog_no_volatile ();
4654 crtl->profile
4655 = (profile_flag
4656 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4658 crtl->limit_stack
4659 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4661 /* Make the label for return statements to jump to. Do not special
4662 case machines with special return instructions -- they will be
4663 handled later during jump, ifcvt, or epilogue creation. */
4664 return_label = gen_label_rtx ();
4666 /* Initialize rtx used to return the value. */
4667 /* Do this before assign_parms so that we copy the struct value address
4668 before any library calls that assign parms might generate. */
4670 /* Decide whether to return the value in memory or in a register. */
4671 if (aggregate_value_p (DECL_RESULT (subr), subr))
4673 /* Returning something that won't go in a register. */
4674 rtx value_address = 0;
4676 #ifdef PCC_STATIC_STRUCT_RETURN
4677 if (cfun->returns_pcc_struct)
4679 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4680 value_address = assemble_static_space (size);
4682 else
4683 #endif
4685 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
4686 /* Expect to be passed the address of a place to store the value.
4687 If it is passed as an argument, assign_parms will take care of
4688 it. */
4689 if (sv)
4691 value_address = gen_reg_rtx (Pmode);
4692 emit_move_insn (value_address, sv);
4695 if (value_address)
4697 rtx x = value_address;
4698 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4700 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4701 set_mem_attributes (x, DECL_RESULT (subr), 1);
4703 SET_DECL_RTL (DECL_RESULT (subr), x);
4706 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4707 /* If return mode is void, this decl rtl should not be used. */
4708 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
4709 else
4711 /* Compute the return values into a pseudo reg, which we will copy
4712 into the true return register after the cleanups are done. */
4713 tree return_type = TREE_TYPE (DECL_RESULT (subr));
4714 if (TYPE_MODE (return_type) != BLKmode
4715 && targetm.calls.return_in_msb (return_type))
4716 /* expand_function_end will insert the appropriate padding in
4717 this case. Use the return value's natural (unpadded) mode
4718 within the function proper. */
4719 SET_DECL_RTL (DECL_RESULT (subr),
4720 gen_reg_rtx (TYPE_MODE (return_type)));
4721 else
4723 /* In order to figure out what mode to use for the pseudo, we
4724 figure out what the mode of the eventual return register will
4725 actually be, and use that. */
4726 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
4728 /* Structures that are returned in registers are not
4729 aggregate_value_p, so we may see a PARALLEL or a REG. */
4730 if (REG_P (hard_reg))
4731 SET_DECL_RTL (DECL_RESULT (subr),
4732 gen_reg_rtx (GET_MODE (hard_reg)));
4733 else
4735 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4736 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4740 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4741 result to the real return register(s). */
4742 DECL_REGISTER (DECL_RESULT (subr)) = 1;
4745 /* Initialize rtx for parameters and local variables.
4746 In some cases this requires emitting insns. */
4747 assign_parms (subr);
4749 /* If function gets a static chain arg, store it. */
4750 if (cfun->static_chain_decl)
4752 tree parm = cfun->static_chain_decl;
4753 rtx local, chain, insn;
4755 local = gen_reg_rtx (Pmode);
4756 chain = targetm.calls.static_chain (current_function_decl, true);
4758 set_decl_incoming_rtl (parm, chain, false);
4759 SET_DECL_RTL (parm, local);
4760 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4762 insn = emit_move_insn (local, chain);
4764 /* Mark the register as eliminable, similar to parameters. */
4765 if (MEM_P (chain)
4766 && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0)))
4767 set_unique_reg_note (insn, REG_EQUIV, chain);
4770 /* If the function receives a non-local goto, then store the
4771 bits we need to restore the frame pointer. */
4772 if (cfun->nonlocal_goto_save_area)
4774 tree t_save;
4775 rtx r_save;
4777 /* ??? We need to do this save early. Unfortunately here is
4778 before the frame variable gets declared. Help out... */
4779 tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
4780 if (!DECL_RTL_SET_P (var))
4781 expand_decl (var);
4783 t_save = build4 (ARRAY_REF,
4784 TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
4785 cfun->nonlocal_goto_save_area,
4786 integer_zero_node, NULL_TREE, NULL_TREE);
4787 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
4788 gcc_assert (GET_MODE (r_save) == Pmode);
4790 emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ());
4791 update_nonlocal_goto_save_area ();
4794 /* The following was moved from init_function_start.
4795 The move is supposed to make sdb output more accurate. */
4796 /* Indicate the beginning of the function body,
4797 as opposed to parm setup. */
4798 emit_note (NOTE_INSN_FUNCTION_BEG);
4800 gcc_assert (NOTE_P (get_last_insn ()));
4802 parm_birth_insn = get_last_insn ();
4804 if (crtl->profile)
4806 #ifdef PROFILE_HOOK
4807 PROFILE_HOOK (current_function_funcdef_no);
4808 #endif
4811 /* If we are doing generic stack checking, the probe should go here. */
4812 if (flag_stack_check == GENERIC_STACK_CHECK)
4813 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
4815 /* Make sure there is a line number after the function entry setup code. */
4816 force_next_line_note ();
4819 /* Undo the effects of init_dummy_function_start. */
4820 void
4821 expand_dummy_function_end (void)
4823 gcc_assert (in_dummy_function);
4825 /* End any sequences that failed to be closed due to syntax errors. */
4826 while (in_sequence_p ())
4827 end_sequence ();
4829 /* Outside function body, can't compute type's actual size
4830 until next function's body starts. */
4832 free_after_parsing (cfun);
4833 free_after_compilation (cfun);
4834 pop_cfun ();
4835 in_dummy_function = false;
4838 /* Call DOIT for each hard register used as a return value from
4839 the current function. */
4841 void
4842 diddle_return_value (void (*doit) (rtx, void *), void *arg)
4844 rtx outgoing = crtl->return_rtx;
4846 if (! outgoing)
4847 return;
4849 if (REG_P (outgoing))
4850 (*doit) (outgoing, arg);
4851 else if (GET_CODE (outgoing) == PARALLEL)
4853 int i;
4855 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4857 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4859 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
4860 (*doit) (x, arg);
4865 static void
4866 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4868 emit_clobber (reg);
4871 void
4872 clobber_return_register (void)
4874 diddle_return_value (do_clobber_return_reg, NULL);
4876 /* In case we do use pseudo to return value, clobber it too. */
4877 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4879 tree decl_result = DECL_RESULT (current_function_decl);
4880 rtx decl_rtl = DECL_RTL (decl_result);
4881 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4883 do_clobber_return_reg (decl_rtl, NULL);
4888 static void
4889 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4891 emit_use (reg);
4894 static void
4895 use_return_register (void)
4897 diddle_return_value (do_use_return_reg, NULL);
4900 /* Possibly warn about unused parameters. */
4901 void
4902 do_warn_unused_parameter (tree fn)
4904 tree decl;
4906 for (decl = DECL_ARGUMENTS (fn);
4907 decl; decl = DECL_CHAIN (decl))
4908 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
4909 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl)
4910 && !TREE_NO_WARNING (decl))
4911 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
4914 static GTY(()) rtx initial_trampoline;
4916 /* Generate RTL for the end of the current function. */
4918 void
4919 expand_function_end (void)
4921 rtx clobber_after;
4923 /* If arg_pointer_save_area was referenced only from a nested
4924 function, we will not have initialized it yet. Do that now. */
4925 if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
4926 get_arg_pointer_save_area ();
4928 /* If we are doing generic stack checking and this function makes calls,
4929 do a stack probe at the start of the function to ensure we have enough
4930 space for another stack frame. */
4931 if (flag_stack_check == GENERIC_STACK_CHECK)
4933 rtx insn, seq;
4935 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4936 if (CALL_P (insn))
4938 rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE);
4939 start_sequence ();
4940 if (STACK_CHECK_MOVING_SP)
4941 anti_adjust_stack_and_probe (max_frame_size, true);
4942 else
4943 probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size);
4944 seq = get_insns ();
4945 end_sequence ();
4946 set_insn_locators (seq, prologue_locator);
4947 emit_insn_before (seq, stack_check_probe_note);
4948 break;
4952 /* End any sequences that failed to be closed due to syntax errors. */
4953 while (in_sequence_p ())
4954 end_sequence ();
4956 clear_pending_stack_adjust ();
4957 do_pending_stack_adjust ();
4959 /* Output a linenumber for the end of the function.
4960 SDB depends on this. */
4961 force_next_line_note ();
4962 set_curr_insn_source_location (input_location);
4964 /* Before the return label (if any), clobber the return
4965 registers so that they are not propagated live to the rest of
4966 the function. This can only happen with functions that drop
4967 through; if there had been a return statement, there would
4968 have either been a return rtx, or a jump to the return label.
4970 We delay actual code generation after the current_function_value_rtx
4971 is computed. */
4972 clobber_after = get_last_insn ();
4974 /* Output the label for the actual return from the function. */
4975 emit_label (return_label);
4977 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
4979 /* Let except.c know where it should emit the call to unregister
4980 the function context for sjlj exceptions. */
4981 if (flag_exceptions)
4982 sjlj_emit_function_exit_after (get_last_insn ());
4984 else
4986 /* We want to ensure that instructions that may trap are not
4987 moved into the epilogue by scheduling, because we don't
4988 always emit unwind information for the epilogue. */
4989 if (cfun->can_throw_non_call_exceptions)
4990 emit_insn (gen_blockage ());
4993 /* If this is an implementation of throw, do what's necessary to
4994 communicate between __builtin_eh_return and the epilogue. */
4995 expand_eh_return ();
4997 /* If scalar return value was computed in a pseudo-reg, or was a named
4998 return value that got dumped to the stack, copy that to the hard
4999 return register. */
5000 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5002 tree decl_result = DECL_RESULT (current_function_decl);
5003 rtx decl_rtl = DECL_RTL (decl_result);
5005 if (REG_P (decl_rtl)
5006 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5007 : DECL_REGISTER (decl_result))
5009 rtx real_decl_rtl = crtl->return_rtx;
5011 /* This should be set in assign_parms. */
5012 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
5014 /* If this is a BLKmode structure being returned in registers,
5015 then use the mode computed in expand_return. Note that if
5016 decl_rtl is memory, then its mode may have been changed,
5017 but that crtl->return_rtx has not. */
5018 if (GET_MODE (real_decl_rtl) == BLKmode)
5019 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
5021 /* If a non-BLKmode return value should be padded at the least
5022 significant end of the register, shift it left by the appropriate
5023 amount. BLKmode results are handled using the group load/store
5024 machinery. */
5025 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
5026 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
5028 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
5029 REGNO (real_decl_rtl)),
5030 decl_rtl);
5031 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
5033 /* If a named return value dumped decl_return to memory, then
5034 we may need to re-do the PROMOTE_MODE signed/unsigned
5035 extension. */
5036 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
5038 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
5039 promote_function_mode (TREE_TYPE (decl_result),
5040 GET_MODE (decl_rtl), &unsignedp,
5041 TREE_TYPE (current_function_decl), 1);
5043 convert_move (real_decl_rtl, decl_rtl, unsignedp);
5045 else if (GET_CODE (real_decl_rtl) == PARALLEL)
5047 /* If expand_function_start has created a PARALLEL for decl_rtl,
5048 move the result to the real return registers. Otherwise, do
5049 a group load from decl_rtl for a named return. */
5050 if (GET_CODE (decl_rtl) == PARALLEL)
5051 emit_group_move (real_decl_rtl, decl_rtl);
5052 else
5053 emit_group_load (real_decl_rtl, decl_rtl,
5054 TREE_TYPE (decl_result),
5055 int_size_in_bytes (TREE_TYPE (decl_result)));
5057 /* In the case of complex integer modes smaller than a word, we'll
5058 need to generate some non-trivial bitfield insertions. Do that
5059 on a pseudo and not the hard register. */
5060 else if (GET_CODE (decl_rtl) == CONCAT
5061 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
5062 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
5064 int old_generating_concat_p;
5065 rtx tmp;
5067 old_generating_concat_p = generating_concat_p;
5068 generating_concat_p = 0;
5069 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
5070 generating_concat_p = old_generating_concat_p;
5072 emit_move_insn (tmp, decl_rtl);
5073 emit_move_insn (real_decl_rtl, tmp);
5075 else
5076 emit_move_insn (real_decl_rtl, decl_rtl);
5080 /* If returning a structure, arrange to return the address of the value
5081 in a place where debuggers expect to find it.
5083 If returning a structure PCC style,
5084 the caller also depends on this value.
5085 And cfun->returns_pcc_struct is not necessarily set. */
5086 if (cfun->returns_struct
5087 || cfun->returns_pcc_struct)
5089 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
5090 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5091 rtx outgoing;
5093 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
5094 type = TREE_TYPE (type);
5095 else
5096 value_address = XEXP (value_address, 0);
5098 outgoing = targetm.calls.function_value (build_pointer_type (type),
5099 current_function_decl, true);
5101 /* Mark this as a function return value so integrate will delete the
5102 assignment and USE below when inlining this function. */
5103 REG_FUNCTION_VALUE_P (outgoing) = 1;
5105 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5106 value_address = convert_memory_address (GET_MODE (outgoing),
5107 value_address);
5109 emit_move_insn (outgoing, value_address);
5111 /* Show return register used to hold result (in this case the address
5112 of the result. */
5113 crtl->return_rtx = outgoing;
5116 /* Emit the actual code to clobber return register. */
5118 rtx seq;
5120 start_sequence ();
5121 clobber_return_register ();
5122 seq = get_insns ();
5123 end_sequence ();
5125 emit_insn_after (seq, clobber_after);
5128 /* Output the label for the naked return from the function. */
5129 if (naked_return_label)
5130 emit_label (naked_return_label);
5132 /* @@@ This is a kludge. We want to ensure that instructions that
5133 may trap are not moved into the epilogue by scheduling, because
5134 we don't always emit unwind information for the epilogue. */
5135 if (cfun->can_throw_non_call_exceptions
5136 && targetm_common.except_unwind_info (&global_options) != UI_SJLJ)
5137 emit_insn (gen_blockage ());
5139 /* If stack protection is enabled for this function, check the guard. */
5140 if (crtl->stack_protect_guard)
5141 stack_protect_epilogue ();
5143 /* If we had calls to alloca, and this machine needs
5144 an accurate stack pointer to exit the function,
5145 insert some code to save and restore the stack pointer. */
5146 if (! EXIT_IGNORE_STACK
5147 && cfun->calls_alloca)
5149 rtx tem = 0, seq;
5151 start_sequence ();
5152 emit_stack_save (SAVE_FUNCTION, &tem);
5153 seq = get_insns ();
5154 end_sequence ();
5155 emit_insn_before (seq, parm_birth_insn);
5157 emit_stack_restore (SAVE_FUNCTION, tem);
5160 /* ??? This should no longer be necessary since stupid is no longer with
5161 us, but there are some parts of the compiler (eg reload_combine, and
5162 sh mach_dep_reorg) that still try and compute their own lifetime info
5163 instead of using the general framework. */
5164 use_return_register ();
5168 get_arg_pointer_save_area (void)
5170 rtx ret = arg_pointer_save_area;
5172 if (! ret)
5174 ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5175 arg_pointer_save_area = ret;
5178 if (! crtl->arg_pointer_save_area_init)
5180 rtx seq;
5182 /* Save the arg pointer at the beginning of the function. The
5183 generated stack slot may not be a valid memory address, so we
5184 have to check it and fix it if necessary. */
5185 start_sequence ();
5186 emit_move_insn (validize_mem (ret),
5187 crtl->args.internal_arg_pointer);
5188 seq = get_insns ();
5189 end_sequence ();
5191 push_topmost_sequence ();
5192 emit_insn_after (seq, entry_of_function ());
5193 pop_topmost_sequence ();
5195 crtl->arg_pointer_save_area_init = true;
5198 return ret;
5201 /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5202 for the first time. */
5204 static void
5205 record_insns (rtx insns, rtx end, htab_t *hashp)
5207 rtx tmp;
5208 htab_t hash = *hashp;
5210 if (hash == NULL)
5211 *hashp = hash
5212 = htab_create_ggc (17, htab_hash_pointer, htab_eq_pointer, NULL);
5214 for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp))
5216 void **slot = htab_find_slot (hash, tmp, INSERT);
5217 gcc_assert (*slot == NULL);
5218 *slot = tmp;
5222 /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5223 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5224 insn, then record COPY as well. */
5226 void
5227 maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
5229 htab_t hash;
5230 void **slot;
5232 hash = epilogue_insn_hash;
5233 if (!hash || !htab_find (hash, insn))
5235 hash = prologue_insn_hash;
5236 if (!hash || !htab_find (hash, insn))
5237 return;
5240 slot = htab_find_slot (hash, copy, INSERT);
5241 gcc_assert (*slot == NULL);
5242 *slot = copy;
5245 /* Set the locator of the insn chain starting at INSN to LOC. */
5246 static void
5247 set_insn_locators (rtx insn, int loc)
5249 while (insn != NULL_RTX)
5251 if (INSN_P (insn))
5252 INSN_LOCATOR (insn) = loc;
5253 insn = NEXT_INSN (insn);
5257 /* Determine if any INSNs in HASH are, or are part of, INSN. Because
5258 we can be running after reorg, SEQUENCE rtl is possible. */
5260 static bool
5261 contains (const_rtx insn, htab_t hash)
5263 if (hash == NULL)
5264 return false;
5266 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
5268 int i;
5269 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
5270 if (htab_find (hash, XVECEXP (PATTERN (insn), 0, i)))
5271 return true;
5272 return false;
5275 return htab_find (hash, insn) != NULL;
5279 prologue_epilogue_contains (const_rtx insn)
5281 if (contains (insn, prologue_insn_hash))
5282 return 1;
5283 if (contains (insn, epilogue_insn_hash))
5284 return 1;
5285 return 0;
5288 #ifdef HAVE_return
5289 /* Insert use of return register before the end of BB. */
5291 static void
5292 emit_use_return_register_into_block (basic_block bb)
5294 rtx seq;
5295 start_sequence ();
5296 use_return_register ();
5297 seq = get_insns ();
5298 end_sequence ();
5299 emit_insn_before (seq, BB_END (bb));
5302 /* Insert gen_return at the end of block BB. This also means updating
5303 block_for_insn appropriately. */
5305 static void
5306 emit_return_into_block (basic_block bb)
5308 rtx jump = emit_jump_insn_after (gen_return (), BB_END (bb));
5309 JUMP_LABEL (jump) = ret_rtx;
5311 #endif /* HAVE_return */
5313 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5314 this into place with notes indicating where the prologue ends and where
5315 the epilogue begins. Update the basic block information when possible. */
5317 static void
5318 thread_prologue_and_epilogue_insns (void)
5320 bool inserted;
5321 rtx seq ATTRIBUTE_UNUSED, epilogue_end ATTRIBUTE_UNUSED;
5322 edge entry_edge, e;
5323 edge_iterator ei;
5325 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
5327 inserted = false;
5328 seq = NULL_RTX;
5329 epilogue_end = NULL_RTX;
5331 /* Can't deal with multiple successors of the entry block at the
5332 moment. Function should always have at least one entry
5333 point. */
5334 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR));
5335 entry_edge = single_succ_edge (ENTRY_BLOCK_PTR);
5337 if (flag_split_stack
5338 && (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl))
5339 == NULL))
5341 #ifndef HAVE_split_stack_prologue
5342 gcc_unreachable ();
5343 #else
5344 gcc_assert (HAVE_split_stack_prologue);
5346 start_sequence ();
5347 emit_insn (gen_split_stack_prologue ());
5348 seq = get_insns ();
5349 end_sequence ();
5351 record_insns (seq, NULL, &prologue_insn_hash);
5352 set_insn_locators (seq, prologue_locator);
5354 insert_insn_on_edge (seq, entry_edge);
5355 inserted = true;
5356 #endif
5359 #ifdef HAVE_prologue
5360 if (HAVE_prologue)
5362 start_sequence ();
5363 seq = gen_prologue ();
5364 emit_insn (seq);
5366 /* Insert an explicit USE for the frame pointer
5367 if the profiling is on and the frame pointer is required. */
5368 if (crtl->profile && frame_pointer_needed)
5369 emit_use (hard_frame_pointer_rtx);
5371 /* Retain a map of the prologue insns. */
5372 record_insns (seq, NULL, &prologue_insn_hash);
5373 emit_note (NOTE_INSN_PROLOGUE_END);
5375 /* Ensure that instructions are not moved into the prologue when
5376 profiling is on. The call to the profiling routine can be
5377 emitted within the live range of a call-clobbered register. */
5378 if (!targetm.profile_before_prologue () && crtl->profile)
5379 emit_insn (gen_blockage ());
5381 seq = get_insns ();
5382 end_sequence ();
5383 set_insn_locators (seq, prologue_locator);
5385 insert_insn_on_edge (seq, entry_edge);
5386 inserted = true;
5388 #endif
5390 /* If the exit block has no non-fake predecessors, we don't need
5391 an epilogue. */
5392 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5393 if ((e->flags & EDGE_FAKE) == 0)
5394 break;
5395 if (e == NULL)
5396 goto epilogue_done;
5398 rtl_profile_for_bb (EXIT_BLOCK_PTR);
5399 #ifdef HAVE_return
5400 if (optimize && HAVE_return)
5402 /* If we're allowed to generate a simple return instruction,
5403 then by definition we don't need a full epilogue. Examine
5404 the block that falls through to EXIT. If it does not
5405 contain any code, examine its predecessors and try to
5406 emit (conditional) return instructions. */
5408 basic_block last;
5409 rtx label;
5411 e = find_fallthru_edge (EXIT_BLOCK_PTR->preds);
5412 if (e == NULL)
5413 goto epilogue_done;
5414 last = e->src;
5416 /* Verify that there are no active instructions in the last block. */
5417 label = BB_END (last);
5418 while (label && !LABEL_P (label))
5420 if (active_insn_p (label))
5421 break;
5422 label = PREV_INSN (label);
5425 if (BB_HEAD (last) == label && LABEL_P (label))
5427 edge_iterator ei2;
5429 for (ei2 = ei_start (last->preds); (e = ei_safe_edge (ei2)); )
5431 basic_block bb = e->src;
5432 rtx jump;
5434 if (bb == ENTRY_BLOCK_PTR)
5436 ei_next (&ei2);
5437 continue;
5440 jump = BB_END (bb);
5441 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5443 ei_next (&ei2);
5444 continue;
5447 /* If we have an unconditional jump, we can replace that
5448 with a simple return instruction. */
5449 if (simplejump_p (jump))
5451 /* The use of the return register might be present in the exit
5452 fallthru block. Either:
5453 - removing the use is safe, and we should remove the use in
5454 the exit fallthru block, or
5455 - removing the use is not safe, and we should add it here.
5456 For now, we conservatively choose the latter. Either of the
5457 2 helps in crossjumping. */
5458 emit_use_return_register_into_block (bb);
5460 emit_return_into_block (bb);
5461 delete_insn (jump);
5464 /* If we have a conditional jump, we can try to replace
5465 that with a conditional return instruction. */
5466 else if (condjump_p (jump))
5468 if (! redirect_jump (jump, ret_rtx, 0))
5470 ei_next (&ei2);
5471 continue;
5474 /* See comment in simple_jump_p case above. */
5475 emit_use_return_register_into_block (bb);
5477 /* If this block has only one successor, it both jumps
5478 and falls through to the fallthru block, so we can't
5479 delete the edge. */
5480 if (single_succ_p (bb))
5482 ei_next (&ei2);
5483 continue;
5486 else
5488 ei_next (&ei2);
5489 continue;
5492 /* Fix up the CFG for the successful change we just made. */
5493 redirect_edge_succ (e, EXIT_BLOCK_PTR);
5496 /* Emit a return insn for the exit fallthru block. Whether
5497 this is still reachable will be determined later. */
5499 emit_barrier_after (BB_END (last));
5500 emit_return_into_block (last);
5501 epilogue_end = BB_END (last);
5502 single_succ_edge (last)->flags &= ~EDGE_FALLTHRU;
5503 goto epilogue_done;
5506 #endif
5508 /* A small fib -- epilogue is not yet completed, but we wish to re-use
5509 this marker for the splits of EH_RETURN patterns, and nothing else
5510 uses the flag in the meantime. */
5511 epilogue_completed = 1;
5513 #ifdef HAVE_eh_return
5514 /* Find non-fallthru edges that end with EH_RETURN instructions. On
5515 some targets, these get split to a special version of the epilogue
5516 code. In order to be able to properly annotate these with unwind
5517 info, try to split them now. If we get a valid split, drop an
5518 EPILOGUE_BEG note and mark the insns as epilogue insns. */
5519 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5521 rtx prev, last, trial;
5523 if (e->flags & EDGE_FALLTHRU)
5524 continue;
5525 last = BB_END (e->src);
5526 if (!eh_returnjump_p (last))
5527 continue;
5529 prev = PREV_INSN (last);
5530 trial = try_split (PATTERN (last), last, 1);
5531 if (trial == last)
5532 continue;
5534 record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash);
5535 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
5537 #endif
5539 /* Find the edge that falls through to EXIT. Other edges may exist
5540 due to RETURN instructions, but those don't need epilogues.
5541 There really shouldn't be a mixture -- either all should have
5542 been converted or none, however... */
5544 e = find_fallthru_edge (EXIT_BLOCK_PTR->preds);
5545 if (e == NULL)
5546 goto epilogue_done;
5548 #ifdef HAVE_epilogue
5549 if (HAVE_epilogue)
5551 rtx returnjump;
5553 start_sequence ();
5554 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
5555 seq = gen_epilogue ();
5556 if (seq)
5557 emit_jump_insn (seq);
5559 /* Retain a map of the epilogue insns. */
5560 record_insns (seq, NULL, &epilogue_insn_hash);
5561 set_insn_locators (seq, epilogue_locator);
5563 returnjump = get_last_insn ();
5564 seq = get_insns ();
5565 end_sequence ();
5567 insert_insn_on_edge (seq, e);
5568 inserted = true;
5570 if (JUMP_P (returnjump))
5572 rtx pat = PATTERN (returnjump);
5573 if (GET_CODE (pat) == PARALLEL)
5574 pat = XVECEXP (pat, 0, 0);
5575 if (ANY_RETURN_P (pat))
5576 JUMP_LABEL (returnjump) = pat;
5577 else
5578 JUMP_LABEL (returnjump) = ret_rtx;
5580 else
5581 returnjump = NULL_RTX;
5583 else
5584 #endif
5586 basic_block cur_bb;
5588 if (! next_active_insn (BB_END (e->src)))
5589 goto epilogue_done;
5590 /* We have a fall-through edge to the exit block, the source is not
5591 at the end of the function, and there will be an assembler epilogue
5592 at the end of the function.
5593 We can't use force_nonfallthru here, because that would try to
5594 use return. Inserting a jump 'by hand' is extremely messy, so
5595 we take advantage of cfg_layout_finalize using
5596 fixup_fallthru_exit_predecessor. */
5597 cfg_layout_initialize (0);
5598 FOR_EACH_BB (cur_bb)
5599 if (cur_bb->index >= NUM_FIXED_BLOCKS
5600 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
5601 cur_bb->aux = cur_bb->next_bb;
5602 cfg_layout_finalize ();
5605 epilogue_done:
5606 default_rtl_profile ();
5608 if (inserted)
5610 sbitmap blocks;
5612 commit_edge_insertions ();
5614 /* Look for basic blocks within the prologue insns. */
5615 blocks = sbitmap_alloc (last_basic_block);
5616 sbitmap_zero (blocks);
5617 SET_BIT (blocks, entry_edge->dest->index);
5618 find_many_sub_basic_blocks (blocks);
5619 sbitmap_free (blocks);
5621 /* The epilogue insns we inserted may cause the exit edge to no longer
5622 be fallthru. */
5623 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5625 if (((e->flags & EDGE_FALLTHRU) != 0)
5626 && returnjump_p (BB_END (e->src)))
5627 e->flags &= ~EDGE_FALLTHRU;
5631 #ifdef HAVE_sibcall_epilogue
5632 /* Emit sibling epilogues before any sibling call sites. */
5633 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
5635 basic_block bb = e->src;
5636 rtx insn = BB_END (bb);
5638 if (!CALL_P (insn)
5639 || ! SIBLING_CALL_P (insn))
5641 ei_next (&ei);
5642 continue;
5645 start_sequence ();
5646 emit_note (NOTE_INSN_EPILOGUE_BEG);
5647 emit_insn (gen_sibcall_epilogue ());
5648 seq = get_insns ();
5649 end_sequence ();
5651 /* Retain a map of the epilogue insns. Used in life analysis to
5652 avoid getting rid of sibcall epilogue insns. Do this before we
5653 actually emit the sequence. */
5654 record_insns (seq, NULL, &epilogue_insn_hash);
5655 set_insn_locators (seq, epilogue_locator);
5657 emit_insn_before (seq, insn);
5658 ei_next (&ei);
5660 #endif
5662 #ifdef HAVE_epilogue
5663 if (epilogue_end)
5665 rtx insn, next;
5667 /* Similarly, move any line notes that appear after the epilogue.
5668 There is no need, however, to be quite so anal about the existence
5669 of such a note. Also possibly move
5670 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5671 info generation. */
5672 for (insn = epilogue_end; insn; insn = next)
5674 next = NEXT_INSN (insn);
5675 if (NOTE_P (insn)
5676 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
5677 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
5680 #endif
5682 /* Threading the prologue and epilogue changes the artificial refs
5683 in the entry and exit blocks. */
5684 epilogue_completed = 1;
5685 df_update_entry_exit_and_calls ();
5688 /* Reposition the prologue-end and epilogue-begin notes after
5689 instruction scheduling. */
5691 void
5692 reposition_prologue_and_epilogue_notes (void)
5694 #if defined (HAVE_prologue) || defined (HAVE_epilogue) \
5695 || defined (HAVE_sibcall_epilogue)
5696 /* Since the hash table is created on demand, the fact that it is
5697 non-null is a signal that it is non-empty. */
5698 if (prologue_insn_hash != NULL)
5700 size_t len = htab_elements (prologue_insn_hash);
5701 rtx insn, last = NULL, note = NULL;
5703 /* Scan from the beginning until we reach the last prologue insn. */
5704 /* ??? While we do have the CFG intact, there are two problems:
5705 (1) The prologue can contain loops (typically probing the stack),
5706 which means that the end of the prologue isn't in the first bb.
5707 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
5708 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5710 if (NOTE_P (insn))
5712 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
5713 note = insn;
5715 else if (contains (insn, prologue_insn_hash))
5717 last = insn;
5718 if (--len == 0)
5719 break;
5723 if (last)
5725 if (note == NULL)
5727 /* Scan forward looking for the PROLOGUE_END note. It should
5728 be right at the beginning of the block, possibly with other
5729 insn notes that got moved there. */
5730 for (note = NEXT_INSN (last); ; note = NEXT_INSN (note))
5732 if (NOTE_P (note)
5733 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
5734 break;
5738 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
5739 if (LABEL_P (last))
5740 last = NEXT_INSN (last);
5741 reorder_insns (note, note, last);
5745 if (epilogue_insn_hash != NULL)
5747 edge_iterator ei;
5748 edge e;
5750 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5752 rtx insn, first = NULL, note = NULL;
5753 basic_block bb = e->src;
5755 /* Scan from the beginning until we reach the first epilogue insn. */
5756 FOR_BB_INSNS (bb, insn)
5758 if (NOTE_P (insn))
5760 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
5762 note = insn;
5763 if (first != NULL)
5764 break;
5767 else if (first == NULL && contains (insn, epilogue_insn_hash))
5769 first = insn;
5770 if (note != NULL)
5771 break;
5775 if (note)
5777 /* If the function has a single basic block, and no real
5778 epilogue insns (e.g. sibcall with no cleanup), the
5779 epilogue note can get scheduled before the prologue
5780 note. If we have frame related prologue insns, having
5781 them scanned during the epilogue will result in a crash.
5782 In this case re-order the epilogue note to just before
5783 the last insn in the block. */
5784 if (first == NULL)
5785 first = BB_END (bb);
5787 if (PREV_INSN (first) != note)
5788 reorder_insns (note, note, PREV_INSN (first));
5792 #endif /* HAVE_prologue or HAVE_epilogue */
5795 /* Returns the name of the current function. */
5796 const char *
5797 current_function_name (void)
5799 if (cfun == NULL)
5800 return "<none>";
5801 return lang_hooks.decl_printable_name (cfun->decl, 2);
5805 static unsigned int
5806 rest_of_handle_check_leaf_regs (void)
5808 #ifdef LEAF_REGISTERS
5809 current_function_uses_only_leaf_regs
5810 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
5811 #endif
5812 return 0;
5815 /* Insert a TYPE into the used types hash table of CFUN. */
5817 static void
5818 used_types_insert_helper (tree type, struct function *func)
5820 if (type != NULL && func != NULL)
5822 void **slot;
5824 if (func->used_types_hash == NULL)
5825 func->used_types_hash = htab_create_ggc (37, htab_hash_pointer,
5826 htab_eq_pointer, NULL);
5827 slot = htab_find_slot (func->used_types_hash, type, INSERT);
5828 if (*slot == NULL)
5829 *slot = type;
5833 /* Given a type, insert it into the used hash table in cfun. */
5834 void
5835 used_types_insert (tree t)
5837 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
5838 if (TYPE_NAME (t))
5839 break;
5840 else
5841 t = TREE_TYPE (t);
5842 if (TREE_CODE (t) == ERROR_MARK)
5843 return;
5844 if (TYPE_NAME (t) == NULL_TREE
5845 || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t)))
5846 t = TYPE_MAIN_VARIANT (t);
5847 if (debug_info_level > DINFO_LEVEL_NONE)
5849 if (cfun)
5850 used_types_insert_helper (t, cfun);
5851 else
5852 /* So this might be a type referenced by a global variable.
5853 Record that type so that we can later decide to emit its debug
5854 information. */
5855 VEC_safe_push (tree, gc, types_used_by_cur_var_decl, t);
5859 /* Helper to Hash a struct types_used_by_vars_entry. */
5861 static hashval_t
5862 hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry)
5864 gcc_assert (entry && entry->var_decl && entry->type);
5866 return iterative_hash_object (entry->type,
5867 iterative_hash_object (entry->var_decl, 0));
5870 /* Hash function of the types_used_by_vars_entry hash table. */
5872 hashval_t
5873 types_used_by_vars_do_hash (const void *x)
5875 const struct types_used_by_vars_entry *entry =
5876 (const struct types_used_by_vars_entry *) x;
5878 return hash_types_used_by_vars_entry (entry);
5881 /*Equality function of the types_used_by_vars_entry hash table. */
5884 types_used_by_vars_eq (const void *x1, const void *x2)
5886 const struct types_used_by_vars_entry *e1 =
5887 (const struct types_used_by_vars_entry *) x1;
5888 const struct types_used_by_vars_entry *e2 =
5889 (const struct types_used_by_vars_entry *)x2;
5891 return (e1->var_decl == e2->var_decl && e1->type == e2->type);
5894 /* Inserts an entry into the types_used_by_vars_hash hash table. */
5896 void
5897 types_used_by_var_decl_insert (tree type, tree var_decl)
5899 if (type != NULL && var_decl != NULL)
5901 void **slot;
5902 struct types_used_by_vars_entry e;
5903 e.var_decl = var_decl;
5904 e.type = type;
5905 if (types_used_by_vars_hash == NULL)
5906 types_used_by_vars_hash =
5907 htab_create_ggc (37, types_used_by_vars_do_hash,
5908 types_used_by_vars_eq, NULL);
5909 slot = htab_find_slot_with_hash (types_used_by_vars_hash, &e,
5910 hash_types_used_by_vars_entry (&e), INSERT);
5911 if (*slot == NULL)
5913 struct types_used_by_vars_entry *entry;
5914 entry = ggc_alloc_types_used_by_vars_entry ();
5915 entry->type = type;
5916 entry->var_decl = var_decl;
5917 *slot = entry;
5922 struct rtl_opt_pass pass_leaf_regs =
5925 RTL_PASS,
5926 "*leaf_regs", /* name */
5927 NULL, /* gate */
5928 rest_of_handle_check_leaf_regs, /* execute */
5929 NULL, /* sub */
5930 NULL, /* next */
5931 0, /* static_pass_number */
5932 TV_NONE, /* tv_id */
5933 0, /* properties_required */
5934 0, /* properties_provided */
5935 0, /* properties_destroyed */
5936 0, /* todo_flags_start */
5937 0 /* todo_flags_finish */
5941 static unsigned int
5942 rest_of_handle_thread_prologue_and_epilogue (void)
5944 if (optimize)
5945 cleanup_cfg (CLEANUP_EXPENSIVE);
5947 /* On some machines, the prologue and epilogue code, or parts thereof,
5948 can be represented as RTL. Doing so lets us schedule insns between
5949 it and the rest of the code and also allows delayed branch
5950 scheduling to operate in the epilogue. */
5951 thread_prologue_and_epilogue_insns ();
5953 /* The stack usage info is finalized during prologue expansion. */
5954 if (flag_stack_usage_info)
5955 output_stack_usage ();
5957 return 0;
5960 struct rtl_opt_pass pass_thread_prologue_and_epilogue =
5963 RTL_PASS,
5964 "pro_and_epilogue", /* name */
5965 NULL, /* gate */
5966 rest_of_handle_thread_prologue_and_epilogue, /* execute */
5967 NULL, /* sub */
5968 NULL, /* next */
5969 0, /* static_pass_number */
5970 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
5971 0, /* properties_required */
5972 0, /* properties_provided */
5973 0, /* properties_destroyed */
5974 TODO_verify_flow, /* todo_flags_start */
5975 TODO_df_verify |
5976 TODO_df_finish | TODO_verify_rtl_sharing |
5977 TODO_ggc_collect /* todo_flags_finish */
5982 /* This mini-pass fixes fall-out from SSA in asm statements that have
5983 in-out constraints. Say you start with
5985 orig = inout;
5986 asm ("": "+mr" (inout));
5987 use (orig);
5989 which is transformed very early to use explicit output and match operands:
5991 orig = inout;
5992 asm ("": "=mr" (inout) : "0" (inout));
5993 use (orig);
5995 Or, after SSA and copyprop,
5997 asm ("": "=mr" (inout_2) : "0" (inout_1));
5998 use (inout_1);
6000 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
6001 they represent two separate values, so they will get different pseudo
6002 registers during expansion. Then, since the two operands need to match
6003 per the constraints, but use different pseudo registers, reload can
6004 only register a reload for these operands. But reloads can only be
6005 satisfied by hardregs, not by memory, so we need a register for this
6006 reload, just because we are presented with non-matching operands.
6007 So, even though we allow memory for this operand, no memory can be
6008 used for it, just because the two operands don't match. This can
6009 cause reload failures on register-starved targets.
6011 So it's a symptom of reload not being able to use memory for reloads
6012 or, alternatively it's also a symptom of both operands not coming into
6013 reload as matching (in which case the pseudo could go to memory just
6014 fine, as the alternative allows it, and no reload would be necessary).
6015 We fix the latter problem here, by transforming
6017 asm ("": "=mr" (inout_2) : "0" (inout_1));
6019 back to
6021 inout_2 = inout_1;
6022 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
6024 static void
6025 match_asm_constraints_1 (rtx insn, rtx *p_sets, int noutputs)
6027 int i;
6028 bool changed = false;
6029 rtx op = SET_SRC (p_sets[0]);
6030 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
6031 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
6032 bool *output_matched = XALLOCAVEC (bool, noutputs);
6034 memset (output_matched, 0, noutputs * sizeof (bool));
6035 for (i = 0; i < ninputs; i++)
6037 rtx input, output, insns;
6038 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
6039 char *end;
6040 int match, j;
6042 if (*constraint == '%')
6043 constraint++;
6045 match = strtoul (constraint, &end, 10);
6046 if (end == constraint)
6047 continue;
6049 gcc_assert (match < noutputs);
6050 output = SET_DEST (p_sets[match]);
6051 input = RTVEC_ELT (inputs, i);
6052 /* Only do the transformation for pseudos. */
6053 if (! REG_P (output)
6054 || rtx_equal_p (output, input)
6055 || (GET_MODE (input) != VOIDmode
6056 && GET_MODE (input) != GET_MODE (output)))
6057 continue;
6059 /* We can't do anything if the output is also used as input,
6060 as we're going to overwrite it. */
6061 for (j = 0; j < ninputs; j++)
6062 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
6063 break;
6064 if (j != ninputs)
6065 continue;
6067 /* Avoid changing the same input several times. For
6068 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
6069 only change in once (to out1), rather than changing it
6070 first to out1 and afterwards to out2. */
6071 if (i > 0)
6073 for (j = 0; j < noutputs; j++)
6074 if (output_matched[j] && input == SET_DEST (p_sets[j]))
6075 break;
6076 if (j != noutputs)
6077 continue;
6079 output_matched[match] = true;
6081 start_sequence ();
6082 emit_move_insn (output, input);
6083 insns = get_insns ();
6084 end_sequence ();
6085 emit_insn_before (insns, insn);
6087 /* Now replace all mentions of the input with output. We can't
6088 just replace the occurrence in inputs[i], as the register might
6089 also be used in some other input (or even in an address of an
6090 output), which would mean possibly increasing the number of
6091 inputs by one (namely 'output' in addition), which might pose
6092 a too complicated problem for reload to solve. E.g. this situation:
6094 asm ("" : "=r" (output), "=m" (input) : "0" (input))
6096 Here 'input' is used in two occurrences as input (once for the
6097 input operand, once for the address in the second output operand).
6098 If we would replace only the occurrence of the input operand (to
6099 make the matching) we would be left with this:
6101 output = input
6102 asm ("" : "=r" (output), "=m" (input) : "0" (output))
6104 Now we suddenly have two different input values (containing the same
6105 value, but different pseudos) where we formerly had only one.
6106 With more complicated asms this might lead to reload failures
6107 which wouldn't have happen without this pass. So, iterate over
6108 all operands and replace all occurrences of the register used. */
6109 for (j = 0; j < noutputs; j++)
6110 if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
6111 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
6112 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
6113 input, output);
6114 for (j = 0; j < ninputs; j++)
6115 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
6116 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
6117 input, output);
6119 changed = true;
6122 if (changed)
6123 df_insn_rescan (insn);
6126 static unsigned
6127 rest_of_match_asm_constraints (void)
6129 basic_block bb;
6130 rtx insn, pat, *p_sets;
6131 int noutputs;
6133 if (!crtl->has_asm_statement)
6134 return 0;
6136 df_set_flags (DF_DEFER_INSN_RESCAN);
6137 FOR_EACH_BB (bb)
6139 FOR_BB_INSNS (bb, insn)
6141 if (!INSN_P (insn))
6142 continue;
6144 pat = PATTERN (insn);
6145 if (GET_CODE (pat) == PARALLEL)
6146 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
6147 else if (GET_CODE (pat) == SET)
6148 p_sets = &PATTERN (insn), noutputs = 1;
6149 else
6150 continue;
6152 if (GET_CODE (*p_sets) == SET
6153 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
6154 match_asm_constraints_1 (insn, p_sets, noutputs);
6158 return TODO_df_finish;
6161 struct rtl_opt_pass pass_match_asm_constraints =
6164 RTL_PASS,
6165 "asmcons", /* name */
6166 NULL, /* gate */
6167 rest_of_match_asm_constraints, /* execute */
6168 NULL, /* sub */
6169 NULL, /* next */
6170 0, /* static_pass_number */
6171 TV_NONE, /* tv_id */
6172 0, /* properties_required */
6173 0, /* properties_provided */
6174 0, /* properties_destroyed */
6175 0, /* todo_flags_start */
6176 0 /* todo_flags_finish */
6181 #include "gt-function.h"