re PR libstdc++/48114 ([C++0x] binomial_distribution incorrect for p > .5 and geometr...
[official-gcc.git] / gcc / function.c
blob1f7722c62a22a04b17d3b72a976117d3be68b95a
1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
4 2010, 2011 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register. */
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
40 #include "rtl-error.h"
41 #include "tree.h"
42 #include "flags.h"
43 #include "except.h"
44 #include "function.h"
45 #include "expr.h"
46 #include "optabs.h"
47 #include "libfuncs.h"
48 #include "regs.h"
49 #include "hard-reg-set.h"
50 #include "insn-config.h"
51 #include "recog.h"
52 #include "output.h"
53 #include "basic-block.h"
54 #include "hashtab.h"
55 #include "ggc.h"
56 #include "tm_p.h"
57 #include "integrate.h"
58 #include "langhooks.h"
59 #include "target.h"
60 #include "cfglayout.h"
61 #include "gimple.h"
62 #include "tree-pass.h"
63 #include "predict.h"
64 #include "df.h"
65 #include "timevar.h"
66 #include "vecprim.h"
68 /* So we can assign to cfun in this file. */
69 #undef cfun
71 #ifndef STACK_ALIGNMENT_NEEDED
72 #define STACK_ALIGNMENT_NEEDED 1
73 #endif
75 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
77 /* Some systems use __main in a way incompatible with its use in gcc, in these
78 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
79 give the same symbol without quotes for an alternative entry point. You
80 must define both, or neither. */
81 #ifndef NAME__MAIN
82 #define NAME__MAIN "__main"
83 #endif
85 /* Round a value to the lowest integer less than it that is a multiple of
86 the required alignment. Avoid using division in case the value is
87 negative. Assume the alignment is a power of two. */
88 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
90 /* Similar, but round to the next highest integer that meets the
91 alignment. */
92 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
94 /* Nonzero if function being compiled doesn't contain any calls
95 (ignoring the prologue and epilogue). This is set prior to
96 local register allocation and is valid for the remaining
97 compiler passes. */
98 int current_function_is_leaf;
100 /* Nonzero if function being compiled doesn't modify the stack pointer
101 (ignoring the prologue and epilogue). This is only valid after
102 pass_stack_ptr_mod has run. */
103 int current_function_sp_is_unchanging;
105 /* Nonzero if the function being compiled is a leaf function which only
106 uses leaf registers. This is valid after reload (specifically after
107 sched2) and is useful only if the port defines LEAF_REGISTERS. */
108 int current_function_uses_only_leaf_regs;
110 /* Nonzero once virtual register instantiation has been done.
111 assign_stack_local uses frame_pointer_rtx when this is nonzero.
112 calls.c:emit_library_call_value_1 uses it to set up
113 post-instantiation libcalls. */
114 int virtuals_instantiated;
116 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
117 static GTY(()) int funcdef_no;
119 /* These variables hold pointers to functions to create and destroy
120 target specific, per-function data structures. */
121 struct machine_function * (*init_machine_status) (void);
123 /* The currently compiled function. */
124 struct function *cfun = 0;
126 /* These hashes record the prologue and epilogue insns. */
127 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
128 htab_t prologue_insn_hash;
129 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
130 htab_t epilogue_insn_hash;
133 htab_t types_used_by_vars_hash = NULL;
134 VEC(tree,gc) *types_used_by_cur_var_decl;
136 /* Forward declarations. */
138 static struct temp_slot *find_temp_slot_from_address (rtx);
139 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
140 static void pad_below (struct args_size *, enum machine_mode, tree);
141 static void reorder_blocks_1 (rtx, tree, VEC(tree,heap) **);
142 static int all_blocks (tree, tree *);
143 static tree *get_block_vector (tree, int *);
144 extern tree debug_find_var_in_block_tree (tree, tree);
145 /* We always define `record_insns' even if it's not used so that we
146 can always export `prologue_epilogue_contains'. */
147 static void record_insns (rtx, rtx, htab_t *) ATTRIBUTE_UNUSED;
148 static bool contains (const_rtx, htab_t);
149 #ifdef HAVE_return
150 static void emit_return_into_block (basic_block);
151 #endif
152 static void prepare_function_start (void);
153 static void do_clobber_return_reg (rtx, void *);
154 static void do_use_return_reg (rtx, void *);
155 static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
157 /* Stack of nested functions. */
158 /* Keep track of the cfun stack. */
160 typedef struct function *function_p;
162 DEF_VEC_P(function_p);
163 DEF_VEC_ALLOC_P(function_p,heap);
164 static VEC(function_p,heap) *function_context_stack;
166 /* Save the current context for compilation of a nested function.
167 This is called from language-specific code. */
169 void
170 push_function_context (void)
172 if (cfun == 0)
173 allocate_struct_function (NULL, false);
175 VEC_safe_push (function_p, heap, function_context_stack, cfun);
176 set_cfun (NULL);
179 /* Restore the last saved context, at the end of a nested function.
180 This function is called from language-specific code. */
182 void
183 pop_function_context (void)
185 struct function *p = VEC_pop (function_p, function_context_stack);
186 set_cfun (p);
187 current_function_decl = p->decl;
189 /* Reset variables that have known state during rtx generation. */
190 virtuals_instantiated = 0;
191 generating_concat_p = 1;
194 /* Clear out all parts of the state in F that can safely be discarded
195 after the function has been parsed, but not compiled, to let
196 garbage collection reclaim the memory. */
198 void
199 free_after_parsing (struct function *f)
201 f->language = 0;
204 /* Clear out all parts of the state in F that can safely be discarded
205 after the function has been compiled, to let garbage collection
206 reclaim the memory. */
208 void
209 free_after_compilation (struct function *f)
211 prologue_insn_hash = NULL;
212 epilogue_insn_hash = NULL;
214 if (crtl->emit.regno_pointer_align)
215 free (crtl->emit.regno_pointer_align);
217 memset (crtl, 0, sizeof (struct rtl_data));
218 f->eh = NULL;
219 f->machine = NULL;
220 f->cfg = NULL;
222 regno_reg_rtx = NULL;
223 insn_locators_free ();
226 /* Return size needed for stack frame based on slots so far allocated.
227 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
228 the caller may have to do that. */
230 HOST_WIDE_INT
231 get_frame_size (void)
233 if (FRAME_GROWS_DOWNWARD)
234 return -frame_offset;
235 else
236 return frame_offset;
239 /* Issue an error message and return TRUE if frame OFFSET overflows in
240 the signed target pointer arithmetics for function FUNC. Otherwise
241 return FALSE. */
243 bool
244 frame_offset_overflow (HOST_WIDE_INT offset, tree func)
246 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
248 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
249 /* Leave room for the fixed part of the frame. */
250 - 64 * UNITS_PER_WORD)
252 error_at (DECL_SOURCE_LOCATION (func),
253 "total size of local objects too large");
254 return TRUE;
257 return FALSE;
260 /* Return stack slot alignment in bits for TYPE and MODE. */
262 static unsigned int
263 get_stack_local_alignment (tree type, enum machine_mode mode)
265 unsigned int alignment;
267 if (mode == BLKmode)
268 alignment = BIGGEST_ALIGNMENT;
269 else
270 alignment = GET_MODE_ALIGNMENT (mode);
272 /* Allow the frond-end to (possibly) increase the alignment of this
273 stack slot. */
274 if (! type)
275 type = lang_hooks.types.type_for_mode (mode, 0);
277 return STACK_SLOT_ALIGNMENT (type, mode, alignment);
280 /* Determine whether it is possible to fit a stack slot of size SIZE and
281 alignment ALIGNMENT into an area in the stack frame that starts at
282 frame offset START and has a length of LENGTH. If so, store the frame
283 offset to be used for the stack slot in *POFFSET and return true;
284 return false otherwise. This function will extend the frame size when
285 given a start/length pair that lies at the end of the frame. */
287 static bool
288 try_fit_stack_local (HOST_WIDE_INT start, HOST_WIDE_INT length,
289 HOST_WIDE_INT size, unsigned int alignment,
290 HOST_WIDE_INT *poffset)
292 HOST_WIDE_INT this_frame_offset;
293 int frame_off, frame_alignment, frame_phase;
295 /* Calculate how many bytes the start of local variables is off from
296 stack alignment. */
297 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
298 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
299 frame_phase = frame_off ? frame_alignment - frame_off : 0;
301 /* Round the frame offset to the specified alignment. */
303 /* We must be careful here, since FRAME_OFFSET might be negative and
304 division with a negative dividend isn't as well defined as we might
305 like. So we instead assume that ALIGNMENT is a power of two and
306 use logical operations which are unambiguous. */
307 if (FRAME_GROWS_DOWNWARD)
308 this_frame_offset
309 = (FLOOR_ROUND (start + length - size - frame_phase,
310 (unsigned HOST_WIDE_INT) alignment)
311 + frame_phase);
312 else
313 this_frame_offset
314 = (CEIL_ROUND (start - frame_phase,
315 (unsigned HOST_WIDE_INT) alignment)
316 + frame_phase);
318 /* See if it fits. If this space is at the edge of the frame,
319 consider extending the frame to make it fit. Our caller relies on
320 this when allocating a new slot. */
321 if (frame_offset == start && this_frame_offset < frame_offset)
322 frame_offset = this_frame_offset;
323 else if (this_frame_offset < start)
324 return false;
325 else if (start + length == frame_offset
326 && this_frame_offset + size > start + length)
327 frame_offset = this_frame_offset + size;
328 else if (this_frame_offset + size > start + length)
329 return false;
331 *poffset = this_frame_offset;
332 return true;
335 /* Create a new frame_space structure describing free space in the stack
336 frame beginning at START and ending at END, and chain it into the
337 function's frame_space_list. */
339 static void
340 add_frame_space (HOST_WIDE_INT start, HOST_WIDE_INT end)
342 struct frame_space *space = ggc_alloc_frame_space ();
343 space->next = crtl->frame_space_list;
344 crtl->frame_space_list = space;
345 space->start = start;
346 space->length = end - start;
349 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
350 with machine mode MODE.
352 ALIGN controls the amount of alignment for the address of the slot:
353 0 means according to MODE,
354 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
355 -2 means use BITS_PER_UNIT,
356 positive specifies alignment boundary in bits.
358 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
359 alignment and ASLK_RECORD_PAD bit set if we should remember
360 extra space we allocated for alignment purposes. When we are
361 called from assign_stack_temp_for_type, it is not set so we don't
362 track the same stack slot in two independent lists.
364 We do not round to stack_boundary here. */
367 assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size,
368 int align, int kind)
370 rtx x, addr;
371 int bigend_correction = 0;
372 HOST_WIDE_INT slot_offset = 0, old_frame_offset;
373 unsigned int alignment, alignment_in_bits;
375 if (align == 0)
377 alignment = get_stack_local_alignment (NULL, mode);
378 alignment /= BITS_PER_UNIT;
380 else if (align == -1)
382 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
383 size = CEIL_ROUND (size, alignment);
385 else if (align == -2)
386 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
387 else
388 alignment = align / BITS_PER_UNIT;
390 alignment_in_bits = alignment * BITS_PER_UNIT;
392 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
393 if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
395 alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
396 alignment = alignment_in_bits / BITS_PER_UNIT;
399 if (SUPPORTS_STACK_ALIGNMENT)
401 if (crtl->stack_alignment_estimated < alignment_in_bits)
403 if (!crtl->stack_realign_processed)
404 crtl->stack_alignment_estimated = alignment_in_bits;
405 else
407 /* If stack is realigned and stack alignment value
408 hasn't been finalized, it is OK not to increase
409 stack_alignment_estimated. The bigger alignment
410 requirement is recorded in stack_alignment_needed
411 below. */
412 gcc_assert (!crtl->stack_realign_finalized);
413 if (!crtl->stack_realign_needed)
415 /* It is OK to reduce the alignment as long as the
416 requested size is 0 or the estimated stack
417 alignment >= mode alignment. */
418 gcc_assert ((kind & ASLK_REDUCE_ALIGN)
419 || size == 0
420 || (crtl->stack_alignment_estimated
421 >= GET_MODE_ALIGNMENT (mode)));
422 alignment_in_bits = crtl->stack_alignment_estimated;
423 alignment = alignment_in_bits / BITS_PER_UNIT;
429 if (crtl->stack_alignment_needed < alignment_in_bits)
430 crtl->stack_alignment_needed = alignment_in_bits;
431 if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
432 crtl->max_used_stack_slot_alignment = alignment_in_bits;
434 if (mode != BLKmode || size != 0)
436 if (kind & ASLK_RECORD_PAD)
438 struct frame_space **psp;
440 for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
442 struct frame_space *space = *psp;
443 if (!try_fit_stack_local (space->start, space->length, size,
444 alignment, &slot_offset))
445 continue;
446 *psp = space->next;
447 if (slot_offset > space->start)
448 add_frame_space (space->start, slot_offset);
449 if (slot_offset + size < space->start + space->length)
450 add_frame_space (slot_offset + size,
451 space->start + space->length);
452 goto found_space;
456 else if (!STACK_ALIGNMENT_NEEDED)
458 slot_offset = frame_offset;
459 goto found_space;
462 old_frame_offset = frame_offset;
464 if (FRAME_GROWS_DOWNWARD)
466 frame_offset -= size;
467 try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
469 if (kind & ASLK_RECORD_PAD)
471 if (slot_offset > frame_offset)
472 add_frame_space (frame_offset, slot_offset);
473 if (slot_offset + size < old_frame_offset)
474 add_frame_space (slot_offset + size, old_frame_offset);
477 else
479 frame_offset += size;
480 try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
482 if (kind & ASLK_RECORD_PAD)
484 if (slot_offset > old_frame_offset)
485 add_frame_space (old_frame_offset, slot_offset);
486 if (slot_offset + size < frame_offset)
487 add_frame_space (slot_offset + size, frame_offset);
491 found_space:
492 /* On a big-endian machine, if we are allocating more space than we will use,
493 use the least significant bytes of those that are allocated. */
494 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
495 bigend_correction = size - GET_MODE_SIZE (mode);
497 /* If we have already instantiated virtual registers, return the actual
498 address relative to the frame pointer. */
499 if (virtuals_instantiated)
500 addr = plus_constant (frame_pointer_rtx,
501 trunc_int_for_mode
502 (slot_offset + bigend_correction
503 + STARTING_FRAME_OFFSET, Pmode));
504 else
505 addr = plus_constant (virtual_stack_vars_rtx,
506 trunc_int_for_mode
507 (slot_offset + bigend_correction,
508 Pmode));
510 x = gen_rtx_MEM (mode, addr);
511 set_mem_align (x, alignment_in_bits);
512 MEM_NOTRAP_P (x) = 1;
514 stack_slot_list
515 = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
517 if (frame_offset_overflow (frame_offset, current_function_decl))
518 frame_offset = 0;
520 return x;
523 /* Wrap up assign_stack_local_1 with last parameter as false. */
526 assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
528 return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD);
532 /* In order to evaluate some expressions, such as function calls returning
533 structures in memory, we need to temporarily allocate stack locations.
534 We record each allocated temporary in the following structure.
536 Associated with each temporary slot is a nesting level. When we pop up
537 one level, all temporaries associated with the previous level are freed.
538 Normally, all temporaries are freed after the execution of the statement
539 in which they were created. However, if we are inside a ({...}) grouping,
540 the result may be in a temporary and hence must be preserved. If the
541 result could be in a temporary, we preserve it if we can determine which
542 one it is in. If we cannot determine which temporary may contain the
543 result, all temporaries are preserved. A temporary is preserved by
544 pretending it was allocated at the previous nesting level.
546 Automatic variables are also assigned temporary slots, at the nesting
547 level where they are defined. They are marked a "kept" so that
548 free_temp_slots will not free them. */
550 struct GTY(()) temp_slot {
551 /* Points to next temporary slot. */
552 struct temp_slot *next;
553 /* Points to previous temporary slot. */
554 struct temp_slot *prev;
555 /* The rtx to used to reference the slot. */
556 rtx slot;
557 /* The size, in units, of the slot. */
558 HOST_WIDE_INT size;
559 /* The type of the object in the slot, or zero if it doesn't correspond
560 to a type. We use this to determine whether a slot can be reused.
561 It can be reused if objects of the type of the new slot will always
562 conflict with objects of the type of the old slot. */
563 tree type;
564 /* The alignment (in bits) of the slot. */
565 unsigned int align;
566 /* Nonzero if this temporary is currently in use. */
567 char in_use;
568 /* Nonzero if this temporary has its address taken. */
569 char addr_taken;
570 /* Nesting level at which this slot is being used. */
571 int level;
572 /* Nonzero if this should survive a call to free_temp_slots. */
573 int keep;
574 /* The offset of the slot from the frame_pointer, including extra space
575 for alignment. This info is for combine_temp_slots. */
576 HOST_WIDE_INT base_offset;
577 /* The size of the slot, including extra space for alignment. This
578 info is for combine_temp_slots. */
579 HOST_WIDE_INT full_size;
582 /* A table of addresses that represent a stack slot. The table is a mapping
583 from address RTXen to a temp slot. */
584 static GTY((param_is(struct temp_slot_address_entry))) htab_t temp_slot_address_table;
586 /* Entry for the above hash table. */
587 struct GTY(()) temp_slot_address_entry {
588 hashval_t hash;
589 rtx address;
590 struct temp_slot *temp_slot;
593 /* Removes temporary slot TEMP from LIST. */
595 static void
596 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
598 if (temp->next)
599 temp->next->prev = temp->prev;
600 if (temp->prev)
601 temp->prev->next = temp->next;
602 else
603 *list = temp->next;
605 temp->prev = temp->next = NULL;
608 /* Inserts temporary slot TEMP to LIST. */
610 static void
611 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
613 temp->next = *list;
614 if (*list)
615 (*list)->prev = temp;
616 temp->prev = NULL;
617 *list = temp;
620 /* Returns the list of used temp slots at LEVEL. */
622 static struct temp_slot **
623 temp_slots_at_level (int level)
625 if (level >= (int) VEC_length (temp_slot_p, used_temp_slots))
626 VEC_safe_grow_cleared (temp_slot_p, gc, used_temp_slots, level + 1);
628 return &(VEC_address (temp_slot_p, used_temp_slots)[level]);
631 /* Returns the maximal temporary slot level. */
633 static int
634 max_slot_level (void)
636 if (!used_temp_slots)
637 return -1;
639 return VEC_length (temp_slot_p, used_temp_slots) - 1;
642 /* Moves temporary slot TEMP to LEVEL. */
644 static void
645 move_slot_to_level (struct temp_slot *temp, int level)
647 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
648 insert_slot_to_list (temp, temp_slots_at_level (level));
649 temp->level = level;
652 /* Make temporary slot TEMP available. */
654 static void
655 make_slot_available (struct temp_slot *temp)
657 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
658 insert_slot_to_list (temp, &avail_temp_slots);
659 temp->in_use = 0;
660 temp->level = -1;
663 /* Compute the hash value for an address -> temp slot mapping.
664 The value is cached on the mapping entry. */
665 static hashval_t
666 temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
668 int do_not_record = 0;
669 return hash_rtx (t->address, GET_MODE (t->address),
670 &do_not_record, NULL, false);
673 /* Return the hash value for an address -> temp slot mapping. */
674 static hashval_t
675 temp_slot_address_hash (const void *p)
677 const struct temp_slot_address_entry *t;
678 t = (const struct temp_slot_address_entry *) p;
679 return t->hash;
682 /* Compare two address -> temp slot mapping entries. */
683 static int
684 temp_slot_address_eq (const void *p1, const void *p2)
686 const struct temp_slot_address_entry *t1, *t2;
687 t1 = (const struct temp_slot_address_entry *) p1;
688 t2 = (const struct temp_slot_address_entry *) p2;
689 return exp_equiv_p (t1->address, t2->address, 0, true);
692 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
693 static void
694 insert_temp_slot_address (rtx address, struct temp_slot *temp_slot)
696 void **slot;
697 struct temp_slot_address_entry *t = ggc_alloc_temp_slot_address_entry ();
698 t->address = address;
699 t->temp_slot = temp_slot;
700 t->hash = temp_slot_address_compute_hash (t);
701 slot = htab_find_slot_with_hash (temp_slot_address_table, t, t->hash, INSERT);
702 *slot = t;
705 /* Remove an address -> temp slot mapping entry if the temp slot is
706 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
707 static int
708 remove_unused_temp_slot_addresses_1 (void **slot, void *data ATTRIBUTE_UNUSED)
710 const struct temp_slot_address_entry *t;
711 t = (const struct temp_slot_address_entry *) *slot;
712 if (! t->temp_slot->in_use)
713 *slot = NULL;
714 return 1;
717 /* Remove all mappings of addresses to unused temp slots. */
718 static void
719 remove_unused_temp_slot_addresses (void)
721 htab_traverse (temp_slot_address_table,
722 remove_unused_temp_slot_addresses_1,
723 NULL);
726 /* Find the temp slot corresponding to the object at address X. */
728 static struct temp_slot *
729 find_temp_slot_from_address (rtx x)
731 struct temp_slot *p;
732 struct temp_slot_address_entry tmp, *t;
734 /* First try the easy way:
735 See if X exists in the address -> temp slot mapping. */
736 tmp.address = x;
737 tmp.temp_slot = NULL;
738 tmp.hash = temp_slot_address_compute_hash (&tmp);
739 t = (struct temp_slot_address_entry *)
740 htab_find_with_hash (temp_slot_address_table, &tmp, tmp.hash);
741 if (t)
742 return t->temp_slot;
744 /* If we have a sum involving a register, see if it points to a temp
745 slot. */
746 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
747 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
748 return p;
749 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
750 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
751 return p;
753 /* Last resort: Address is a virtual stack var address. */
754 if (GET_CODE (x) == PLUS
755 && XEXP (x, 0) == virtual_stack_vars_rtx
756 && CONST_INT_P (XEXP (x, 1)))
758 int i;
759 for (i = max_slot_level (); i >= 0; i--)
760 for (p = *temp_slots_at_level (i); p; p = p->next)
762 if (INTVAL (XEXP (x, 1)) >= p->base_offset
763 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size)
764 return p;
768 return NULL;
771 /* Allocate a temporary stack slot and record it for possible later
772 reuse.
774 MODE is the machine mode to be given to the returned rtx.
776 SIZE is the size in units of the space required. We do no rounding here
777 since assign_stack_local will do any required rounding.
779 KEEP is 1 if this slot is to be retained after a call to
780 free_temp_slots. Automatic variables for a block are allocated
781 with this flag. KEEP values of 2 or 3 were needed respectively
782 for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
783 or for SAVE_EXPRs, but they are now unused.
785 TYPE is the type that will be used for the stack slot. */
788 assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size,
789 int keep, tree type)
791 unsigned int align;
792 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
793 rtx slot;
795 /* If SIZE is -1 it means that somebody tried to allocate a temporary
796 of a variable size. */
797 gcc_assert (size != -1);
799 /* These are now unused. */
800 gcc_assert (keep <= 1);
802 align = get_stack_local_alignment (type, mode);
804 /* Try to find an available, already-allocated temporary of the proper
805 mode which meets the size and alignment requirements. Choose the
806 smallest one with the closest alignment.
808 If assign_stack_temp is called outside of the tree->rtl expansion,
809 we cannot reuse the stack slots (that may still refer to
810 VIRTUAL_STACK_VARS_REGNUM). */
811 if (!virtuals_instantiated)
813 for (p = avail_temp_slots; p; p = p->next)
815 if (p->align >= align && p->size >= size
816 && GET_MODE (p->slot) == mode
817 && objects_must_conflict_p (p->type, type)
818 && (best_p == 0 || best_p->size > p->size
819 || (best_p->size == p->size && best_p->align > p->align)))
821 if (p->align == align && p->size == size)
823 selected = p;
824 cut_slot_from_list (selected, &avail_temp_slots);
825 best_p = 0;
826 break;
828 best_p = p;
833 /* Make our best, if any, the one to use. */
834 if (best_p)
836 selected = best_p;
837 cut_slot_from_list (selected, &avail_temp_slots);
839 /* If there are enough aligned bytes left over, make them into a new
840 temp_slot so that the extra bytes don't get wasted. Do this only
841 for BLKmode slots, so that we can be sure of the alignment. */
842 if (GET_MODE (best_p->slot) == BLKmode)
844 int alignment = best_p->align / BITS_PER_UNIT;
845 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
847 if (best_p->size - rounded_size >= alignment)
849 p = ggc_alloc_temp_slot ();
850 p->in_use = p->addr_taken = 0;
851 p->size = best_p->size - rounded_size;
852 p->base_offset = best_p->base_offset + rounded_size;
853 p->full_size = best_p->full_size - rounded_size;
854 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
855 p->align = best_p->align;
856 p->type = best_p->type;
857 insert_slot_to_list (p, &avail_temp_slots);
859 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
860 stack_slot_list);
862 best_p->size = rounded_size;
863 best_p->full_size = rounded_size;
868 /* If we still didn't find one, make a new temporary. */
869 if (selected == 0)
871 HOST_WIDE_INT frame_offset_old = frame_offset;
873 p = ggc_alloc_temp_slot ();
875 /* We are passing an explicit alignment request to assign_stack_local.
876 One side effect of that is assign_stack_local will not round SIZE
877 to ensure the frame offset remains suitably aligned.
879 So for requests which depended on the rounding of SIZE, we go ahead
880 and round it now. We also make sure ALIGNMENT is at least
881 BIGGEST_ALIGNMENT. */
882 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
883 p->slot = assign_stack_local_1 (mode,
884 (mode == BLKmode
885 ? CEIL_ROUND (size,
886 (int) align
887 / BITS_PER_UNIT)
888 : size),
889 align, 0);
891 p->align = align;
893 /* The following slot size computation is necessary because we don't
894 know the actual size of the temporary slot until assign_stack_local
895 has performed all the frame alignment and size rounding for the
896 requested temporary. Note that extra space added for alignment
897 can be either above or below this stack slot depending on which
898 way the frame grows. We include the extra space if and only if it
899 is above this slot. */
900 if (FRAME_GROWS_DOWNWARD)
901 p->size = frame_offset_old - frame_offset;
902 else
903 p->size = size;
905 /* Now define the fields used by combine_temp_slots. */
906 if (FRAME_GROWS_DOWNWARD)
908 p->base_offset = frame_offset;
909 p->full_size = frame_offset_old - frame_offset;
911 else
913 p->base_offset = frame_offset_old;
914 p->full_size = frame_offset - frame_offset_old;
917 selected = p;
920 p = selected;
921 p->in_use = 1;
922 p->addr_taken = 0;
923 p->type = type;
924 p->level = temp_slot_level;
925 p->keep = keep;
927 pp = temp_slots_at_level (p->level);
928 insert_slot_to_list (p, pp);
929 insert_temp_slot_address (XEXP (p->slot, 0), p);
931 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
932 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
933 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
935 /* If we know the alias set for the memory that will be used, use
936 it. If there's no TYPE, then we don't know anything about the
937 alias set for the memory. */
938 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
939 set_mem_align (slot, align);
941 /* If a type is specified, set the relevant flags. */
942 if (type != 0)
944 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
945 gcc_checking_assert (!MEM_SCALAR_P (slot) && !MEM_IN_STRUCT_P (slot));
946 if (AGGREGATE_TYPE_P (type) || TREE_CODE (type) == COMPLEX_TYPE)
947 MEM_IN_STRUCT_P (slot) = 1;
948 else
949 MEM_SCALAR_P (slot) = 1;
951 MEM_NOTRAP_P (slot) = 1;
953 return slot;
956 /* Allocate a temporary stack slot and record it for possible later
957 reuse. First three arguments are same as in preceding function. */
960 assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep)
962 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
965 /* Assign a temporary.
966 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
967 and so that should be used in error messages. In either case, we
968 allocate of the given type.
969 KEEP is as for assign_stack_temp.
970 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
971 it is 0 if a register is OK.
972 DONT_PROMOTE is 1 if we should not promote values in register
973 to wider modes. */
976 assign_temp (tree type_or_decl, int keep, int memory_required,
977 int dont_promote ATTRIBUTE_UNUSED)
979 tree type, decl;
980 enum machine_mode mode;
981 #ifdef PROMOTE_MODE
982 int unsignedp;
983 #endif
985 if (DECL_P (type_or_decl))
986 decl = type_or_decl, type = TREE_TYPE (decl);
987 else
988 decl = NULL, type = type_or_decl;
990 mode = TYPE_MODE (type);
991 #ifdef PROMOTE_MODE
992 unsignedp = TYPE_UNSIGNED (type);
993 #endif
995 if (mode == BLKmode || memory_required)
997 HOST_WIDE_INT size = int_size_in_bytes (type);
998 rtx tmp;
1000 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
1001 problems with allocating the stack space. */
1002 if (size == 0)
1003 size = 1;
1005 /* Unfortunately, we don't yet know how to allocate variable-sized
1006 temporaries. However, sometimes we can find a fixed upper limit on
1007 the size, so try that instead. */
1008 else if (size == -1)
1009 size = max_int_size_in_bytes (type);
1011 /* The size of the temporary may be too large to fit into an integer. */
1012 /* ??? Not sure this should happen except for user silliness, so limit
1013 this to things that aren't compiler-generated temporaries. The
1014 rest of the time we'll die in assign_stack_temp_for_type. */
1015 if (decl && size == -1
1016 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
1018 error ("size of variable %q+D is too large", decl);
1019 size = 1;
1022 tmp = assign_stack_temp_for_type (mode, size, keep, type);
1023 return tmp;
1026 #ifdef PROMOTE_MODE
1027 if (! dont_promote)
1028 mode = promote_mode (type, mode, &unsignedp);
1029 #endif
1031 return gen_reg_rtx (mode);
1034 /* Combine temporary stack slots which are adjacent on the stack.
1036 This allows for better use of already allocated stack space. This is only
1037 done for BLKmode slots because we can be sure that we won't have alignment
1038 problems in this case. */
1040 static void
1041 combine_temp_slots (void)
1043 struct temp_slot *p, *q, *next, *next_q;
1044 int num_slots;
1046 /* We can't combine slots, because the information about which slot
1047 is in which alias set will be lost. */
1048 if (flag_strict_aliasing)
1049 return;
1051 /* If there are a lot of temp slots, don't do anything unless
1052 high levels of optimization. */
1053 if (! flag_expensive_optimizations)
1054 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1055 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1056 return;
1058 for (p = avail_temp_slots; p; p = next)
1060 int delete_p = 0;
1062 next = p->next;
1064 if (GET_MODE (p->slot) != BLKmode)
1065 continue;
1067 for (q = p->next; q; q = next_q)
1069 int delete_q = 0;
1071 next_q = q->next;
1073 if (GET_MODE (q->slot) != BLKmode)
1074 continue;
1076 if (p->base_offset + p->full_size == q->base_offset)
1078 /* Q comes after P; combine Q into P. */
1079 p->size += q->size;
1080 p->full_size += q->full_size;
1081 delete_q = 1;
1083 else if (q->base_offset + q->full_size == p->base_offset)
1085 /* P comes after Q; combine P into Q. */
1086 q->size += p->size;
1087 q->full_size += p->full_size;
1088 delete_p = 1;
1089 break;
1091 if (delete_q)
1092 cut_slot_from_list (q, &avail_temp_slots);
1095 /* Either delete P or advance past it. */
1096 if (delete_p)
1097 cut_slot_from_list (p, &avail_temp_slots);
1101 /* Indicate that NEW_RTX is an alternate way of referring to the temp
1102 slot that previously was known by OLD_RTX. */
1104 void
1105 update_temp_slot_address (rtx old_rtx, rtx new_rtx)
1107 struct temp_slot *p;
1109 if (rtx_equal_p (old_rtx, new_rtx))
1110 return;
1112 p = find_temp_slot_from_address (old_rtx);
1114 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1115 NEW_RTX is a register, see if one operand of the PLUS is a
1116 temporary location. If so, NEW_RTX points into it. Otherwise,
1117 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1118 in common between them. If so, try a recursive call on those
1119 values. */
1120 if (p == 0)
1122 if (GET_CODE (old_rtx) != PLUS)
1123 return;
1125 if (REG_P (new_rtx))
1127 update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
1128 update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
1129 return;
1131 else if (GET_CODE (new_rtx) != PLUS)
1132 return;
1134 if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
1135 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
1136 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
1137 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
1138 else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
1139 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
1140 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
1141 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
1143 return;
1146 /* Otherwise add an alias for the temp's address. */
1147 insert_temp_slot_address (new_rtx, p);
1150 /* If X could be a reference to a temporary slot, mark the fact that its
1151 address was taken. */
1153 void
1154 mark_temp_addr_taken (rtx x)
1156 struct temp_slot *p;
1158 if (x == 0)
1159 return;
1161 /* If X is not in memory or is at a constant address, it cannot be in
1162 a temporary slot. */
1163 if (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))
1164 return;
1166 p = find_temp_slot_from_address (XEXP (x, 0));
1167 if (p != 0)
1168 p->addr_taken = 1;
1171 /* If X could be a reference to a temporary slot, mark that slot as
1172 belonging to the to one level higher than the current level. If X
1173 matched one of our slots, just mark that one. Otherwise, we can't
1174 easily predict which it is, so upgrade all of them. Kept slots
1175 need not be touched.
1177 This is called when an ({...}) construct occurs and a statement
1178 returns a value in memory. */
1180 void
1181 preserve_temp_slots (rtx x)
1183 struct temp_slot *p = 0, *next;
1185 /* If there is no result, we still might have some objects whose address
1186 were taken, so we need to make sure they stay around. */
1187 if (x == 0)
1189 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1191 next = p->next;
1193 if (p->addr_taken)
1194 move_slot_to_level (p, temp_slot_level - 1);
1197 return;
1200 /* If X is a register that is being used as a pointer, see if we have
1201 a temporary slot we know it points to. To be consistent with
1202 the code below, we really should preserve all non-kept slots
1203 if we can't find a match, but that seems to be much too costly. */
1204 if (REG_P (x) && REG_POINTER (x))
1205 p = find_temp_slot_from_address (x);
1207 /* If X is not in memory or is at a constant address, it cannot be in
1208 a temporary slot, but it can contain something whose address was
1209 taken. */
1210 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1212 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1214 next = p->next;
1216 if (p->addr_taken)
1217 move_slot_to_level (p, temp_slot_level - 1);
1220 return;
1223 /* First see if we can find a match. */
1224 if (p == 0)
1225 p = find_temp_slot_from_address (XEXP (x, 0));
1227 if (p != 0)
1229 /* Move everything at our level whose address was taken to our new
1230 level in case we used its address. */
1231 struct temp_slot *q;
1233 if (p->level == temp_slot_level)
1235 for (q = *temp_slots_at_level (temp_slot_level); q; q = next)
1237 next = q->next;
1239 if (p != q && q->addr_taken)
1240 move_slot_to_level (q, temp_slot_level - 1);
1243 move_slot_to_level (p, temp_slot_level - 1);
1244 p->addr_taken = 0;
1246 return;
1249 /* Otherwise, preserve all non-kept slots at this level. */
1250 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1252 next = p->next;
1254 if (!p->keep)
1255 move_slot_to_level (p, temp_slot_level - 1);
1259 /* Free all temporaries used so far. This is normally called at the
1260 end of generating code for a statement. */
1262 void
1263 free_temp_slots (void)
1265 struct temp_slot *p, *next;
1266 bool some_available = false;
1268 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1270 next = p->next;
1272 if (!p->keep)
1274 make_slot_available (p);
1275 some_available = true;
1279 if (some_available)
1281 remove_unused_temp_slot_addresses ();
1282 combine_temp_slots ();
1286 /* Push deeper into the nesting level for stack temporaries. */
1288 void
1289 push_temp_slots (void)
1291 temp_slot_level++;
1294 /* Pop a temporary nesting level. All slots in use in the current level
1295 are freed. */
1297 void
1298 pop_temp_slots (void)
1300 struct temp_slot *p, *next;
1301 bool some_available = false;
1303 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1305 next = p->next;
1306 make_slot_available (p);
1307 some_available = true;
1310 if (some_available)
1312 remove_unused_temp_slot_addresses ();
1313 combine_temp_slots ();
1316 temp_slot_level--;
1319 /* Initialize temporary slots. */
1321 void
1322 init_temp_slots (void)
1324 /* We have not allocated any temporaries yet. */
1325 avail_temp_slots = 0;
1326 used_temp_slots = 0;
1327 temp_slot_level = 0;
1329 /* Set up the table to map addresses to temp slots. */
1330 if (! temp_slot_address_table)
1331 temp_slot_address_table = htab_create_ggc (32,
1332 temp_slot_address_hash,
1333 temp_slot_address_eq,
1334 NULL);
1335 else
1336 htab_empty (temp_slot_address_table);
1339 /* These routines are responsible for converting virtual register references
1340 to the actual hard register references once RTL generation is complete.
1342 The following four variables are used for communication between the
1343 routines. They contain the offsets of the virtual registers from their
1344 respective hard registers. */
1346 static int in_arg_offset;
1347 static int var_offset;
1348 static int dynamic_offset;
1349 static int out_arg_offset;
1350 static int cfa_offset;
1352 /* In most machines, the stack pointer register is equivalent to the bottom
1353 of the stack. */
1355 #ifndef STACK_POINTER_OFFSET
1356 #define STACK_POINTER_OFFSET 0
1357 #endif
1359 /* If not defined, pick an appropriate default for the offset of dynamically
1360 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1361 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1363 #ifndef STACK_DYNAMIC_OFFSET
1365 /* The bottom of the stack points to the actual arguments. If
1366 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1367 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1368 stack space for register parameters is not pushed by the caller, but
1369 rather part of the fixed stack areas and hence not included in
1370 `crtl->outgoing_args_size'. Nevertheless, we must allow
1371 for it when allocating stack dynamic objects. */
1373 #if defined(REG_PARM_STACK_SPACE)
1374 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1375 ((ACCUMULATE_OUTGOING_ARGS \
1376 ? (crtl->outgoing_args_size \
1377 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1378 : REG_PARM_STACK_SPACE (FNDECL))) \
1379 : 0) + (STACK_POINTER_OFFSET))
1380 #else
1381 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1382 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
1383 + (STACK_POINTER_OFFSET))
1384 #endif
1385 #endif
1388 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1389 is a virtual register, return the equivalent hard register and set the
1390 offset indirectly through the pointer. Otherwise, return 0. */
1392 static rtx
1393 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1395 rtx new_rtx;
1396 HOST_WIDE_INT offset;
1398 if (x == virtual_incoming_args_rtx)
1400 if (stack_realign_drap)
1402 /* Replace virtual_incoming_args_rtx with internal arg
1403 pointer if DRAP is used to realign stack. */
1404 new_rtx = crtl->args.internal_arg_pointer;
1405 offset = 0;
1407 else
1408 new_rtx = arg_pointer_rtx, offset = in_arg_offset;
1410 else if (x == virtual_stack_vars_rtx)
1411 new_rtx = frame_pointer_rtx, offset = var_offset;
1412 else if (x == virtual_stack_dynamic_rtx)
1413 new_rtx = stack_pointer_rtx, offset = dynamic_offset;
1414 else if (x == virtual_outgoing_args_rtx)
1415 new_rtx = stack_pointer_rtx, offset = out_arg_offset;
1416 else if (x == virtual_cfa_rtx)
1418 #ifdef FRAME_POINTER_CFA_OFFSET
1419 new_rtx = frame_pointer_rtx;
1420 #else
1421 new_rtx = arg_pointer_rtx;
1422 #endif
1423 offset = cfa_offset;
1425 else if (x == virtual_preferred_stack_boundary_rtx)
1427 new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT);
1428 offset = 0;
1430 else
1431 return NULL_RTX;
1433 *poffset = offset;
1434 return new_rtx;
1437 /* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1438 Instantiate any virtual registers present inside of *LOC. The expression
1439 is simplified, as much as possible, but is not to be considered "valid"
1440 in any sense implied by the target. If any change is made, set CHANGED
1441 to true. */
1443 static int
1444 instantiate_virtual_regs_in_rtx (rtx *loc, void *data)
1446 HOST_WIDE_INT offset;
1447 bool *changed = (bool *) data;
1448 rtx x, new_rtx;
1450 x = *loc;
1451 if (x == 0)
1452 return 0;
1454 switch (GET_CODE (x))
1456 case REG:
1457 new_rtx = instantiate_new_reg (x, &offset);
1458 if (new_rtx)
1460 *loc = plus_constant (new_rtx, offset);
1461 if (changed)
1462 *changed = true;
1464 return -1;
1466 case PLUS:
1467 new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
1468 if (new_rtx)
1470 new_rtx = plus_constant (new_rtx, offset);
1471 *loc = simplify_gen_binary (PLUS, GET_MODE (x), new_rtx, XEXP (x, 1));
1472 if (changed)
1473 *changed = true;
1474 return -1;
1477 /* FIXME -- from old code */
1478 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1479 we can commute the PLUS and SUBREG because pointers into the
1480 frame are well-behaved. */
1481 break;
1483 default:
1484 break;
1487 return 0;
1490 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1491 matches the predicate for insn CODE operand OPERAND. */
1493 static int
1494 safe_insn_predicate (int code, int operand, rtx x)
1496 const struct insn_operand_data *op_data;
1498 if (code < 0)
1499 return true;
1501 op_data = &insn_data[code].operand[operand];
1502 if (op_data->predicate == NULL)
1503 return true;
1505 return op_data->predicate (x, op_data->mode);
1508 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1509 registers present inside of insn. The result will be a valid insn. */
1511 static void
1512 instantiate_virtual_regs_in_insn (rtx insn)
1514 HOST_WIDE_INT offset;
1515 int insn_code, i;
1516 bool any_change = false;
1517 rtx set, new_rtx, x, seq;
1519 /* There are some special cases to be handled first. */
1520 set = single_set (insn);
1521 if (set)
1523 /* We're allowed to assign to a virtual register. This is interpreted
1524 to mean that the underlying register gets assigned the inverse
1525 transformation. This is used, for example, in the handling of
1526 non-local gotos. */
1527 new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
1528 if (new_rtx)
1530 start_sequence ();
1532 for_each_rtx (&SET_SRC (set), instantiate_virtual_regs_in_rtx, NULL);
1533 x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
1534 GEN_INT (-offset));
1535 x = force_operand (x, new_rtx);
1536 if (x != new_rtx)
1537 emit_move_insn (new_rtx, x);
1539 seq = get_insns ();
1540 end_sequence ();
1542 emit_insn_before (seq, insn);
1543 delete_insn (insn);
1544 return;
1547 /* Handle a straight copy from a virtual register by generating a
1548 new add insn. The difference between this and falling through
1549 to the generic case is avoiding a new pseudo and eliminating a
1550 move insn in the initial rtl stream. */
1551 new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
1552 if (new_rtx && offset != 0
1553 && REG_P (SET_DEST (set))
1554 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1556 start_sequence ();
1558 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS,
1559 new_rtx, GEN_INT (offset), SET_DEST (set),
1560 1, OPTAB_LIB_WIDEN);
1561 if (x != SET_DEST (set))
1562 emit_move_insn (SET_DEST (set), x);
1564 seq = get_insns ();
1565 end_sequence ();
1567 emit_insn_before (seq, insn);
1568 delete_insn (insn);
1569 return;
1572 extract_insn (insn);
1573 insn_code = INSN_CODE (insn);
1575 /* Handle a plus involving a virtual register by determining if the
1576 operands remain valid if they're modified in place. */
1577 if (GET_CODE (SET_SRC (set)) == PLUS
1578 && recog_data.n_operands >= 3
1579 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1580 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1581 && CONST_INT_P (recog_data.operand[2])
1582 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
1584 offset += INTVAL (recog_data.operand[2]);
1586 /* If the sum is zero, then replace with a plain move. */
1587 if (offset == 0
1588 && REG_P (SET_DEST (set))
1589 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1591 start_sequence ();
1592 emit_move_insn (SET_DEST (set), new_rtx);
1593 seq = get_insns ();
1594 end_sequence ();
1596 emit_insn_before (seq, insn);
1597 delete_insn (insn);
1598 return;
1601 x = gen_int_mode (offset, recog_data.operand_mode[2]);
1603 /* Using validate_change and apply_change_group here leaves
1604 recog_data in an invalid state. Since we know exactly what
1605 we want to check, do those two by hand. */
1606 if (safe_insn_predicate (insn_code, 1, new_rtx)
1607 && safe_insn_predicate (insn_code, 2, x))
1609 *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
1610 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1611 any_change = true;
1613 /* Fall through into the regular operand fixup loop in
1614 order to take care of operands other than 1 and 2. */
1618 else
1620 extract_insn (insn);
1621 insn_code = INSN_CODE (insn);
1624 /* In the general case, we expect virtual registers to appear only in
1625 operands, and then only as either bare registers or inside memories. */
1626 for (i = 0; i < recog_data.n_operands; ++i)
1628 x = recog_data.operand[i];
1629 switch (GET_CODE (x))
1631 case MEM:
1633 rtx addr = XEXP (x, 0);
1634 bool changed = false;
1636 for_each_rtx (&addr, instantiate_virtual_regs_in_rtx, &changed);
1637 if (!changed)
1638 continue;
1640 start_sequence ();
1641 x = replace_equiv_address (x, addr);
1642 /* It may happen that the address with the virtual reg
1643 was valid (e.g. based on the virtual stack reg, which might
1644 be acceptable to the predicates with all offsets), whereas
1645 the address now isn't anymore, for instance when the address
1646 is still offsetted, but the base reg isn't virtual-stack-reg
1647 anymore. Below we would do a force_reg on the whole operand,
1648 but this insn might actually only accept memory. Hence,
1649 before doing that last resort, try to reload the address into
1650 a register, so this operand stays a MEM. */
1651 if (!safe_insn_predicate (insn_code, i, x))
1653 addr = force_reg (GET_MODE (addr), addr);
1654 x = replace_equiv_address (x, addr);
1656 seq = get_insns ();
1657 end_sequence ();
1658 if (seq)
1659 emit_insn_before (seq, insn);
1661 break;
1663 case REG:
1664 new_rtx = instantiate_new_reg (x, &offset);
1665 if (new_rtx == NULL)
1666 continue;
1667 if (offset == 0)
1668 x = new_rtx;
1669 else
1671 start_sequence ();
1673 /* Careful, special mode predicates may have stuff in
1674 insn_data[insn_code].operand[i].mode that isn't useful
1675 to us for computing a new value. */
1676 /* ??? Recognize address_operand and/or "p" constraints
1677 to see if (plus new offset) is a valid before we put
1678 this through expand_simple_binop. */
1679 x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
1680 GEN_INT (offset), NULL_RTX,
1681 1, OPTAB_LIB_WIDEN);
1682 seq = get_insns ();
1683 end_sequence ();
1684 emit_insn_before (seq, insn);
1686 break;
1688 case SUBREG:
1689 new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
1690 if (new_rtx == NULL)
1691 continue;
1692 if (offset != 0)
1694 start_sequence ();
1695 new_rtx = expand_simple_binop (GET_MODE (new_rtx), PLUS, new_rtx,
1696 GEN_INT (offset), NULL_RTX,
1697 1, OPTAB_LIB_WIDEN);
1698 seq = get_insns ();
1699 end_sequence ();
1700 emit_insn_before (seq, insn);
1702 x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx,
1703 GET_MODE (new_rtx), SUBREG_BYTE (x));
1704 gcc_assert (x);
1705 break;
1707 default:
1708 continue;
1711 /* At this point, X contains the new value for the operand.
1712 Validate the new value vs the insn predicate. Note that
1713 asm insns will have insn_code -1 here. */
1714 if (!safe_insn_predicate (insn_code, i, x))
1716 start_sequence ();
1717 if (REG_P (x))
1719 gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
1720 x = copy_to_reg (x);
1722 else
1723 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1724 seq = get_insns ();
1725 end_sequence ();
1726 if (seq)
1727 emit_insn_before (seq, insn);
1730 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1731 any_change = true;
1734 if (any_change)
1736 /* Propagate operand changes into the duplicates. */
1737 for (i = 0; i < recog_data.n_dups; ++i)
1738 *recog_data.dup_loc[i]
1739 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
1741 /* Force re-recognition of the instruction for validation. */
1742 INSN_CODE (insn) = -1;
1745 if (asm_noperands (PATTERN (insn)) >= 0)
1747 if (!check_asm_operands (PATTERN (insn)))
1749 error_for_asm (insn, "impossible constraint in %<asm%>");
1750 delete_insn (insn);
1753 else
1755 if (recog_memoized (insn) < 0)
1756 fatal_insn_not_found (insn);
1760 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1761 do any instantiation required. */
1763 void
1764 instantiate_decl_rtl (rtx x)
1766 rtx addr;
1768 if (x == 0)
1769 return;
1771 /* If this is a CONCAT, recurse for the pieces. */
1772 if (GET_CODE (x) == CONCAT)
1774 instantiate_decl_rtl (XEXP (x, 0));
1775 instantiate_decl_rtl (XEXP (x, 1));
1776 return;
1779 /* If this is not a MEM, no need to do anything. Similarly if the
1780 address is a constant or a register that is not a virtual register. */
1781 if (!MEM_P (x))
1782 return;
1784 addr = XEXP (x, 0);
1785 if (CONSTANT_P (addr)
1786 || (REG_P (addr)
1787 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1788 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1789 return;
1791 for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL);
1794 /* Helper for instantiate_decls called via walk_tree: Process all decls
1795 in the given DECL_VALUE_EXPR. */
1797 static tree
1798 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1800 tree t = *tp;
1801 if (! EXPR_P (t))
1803 *walk_subtrees = 0;
1804 if (DECL_P (t))
1806 if (DECL_RTL_SET_P (t))
1807 instantiate_decl_rtl (DECL_RTL (t));
1808 if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t)
1809 && DECL_INCOMING_RTL (t))
1810 instantiate_decl_rtl (DECL_INCOMING_RTL (t));
1811 if ((TREE_CODE (t) == VAR_DECL
1812 || TREE_CODE (t) == RESULT_DECL)
1813 && DECL_HAS_VALUE_EXPR_P (t))
1815 tree v = DECL_VALUE_EXPR (t);
1816 walk_tree (&v, instantiate_expr, NULL, NULL);
1820 return NULL;
1823 /* Subroutine of instantiate_decls: Process all decls in the given
1824 BLOCK node and all its subblocks. */
1826 static void
1827 instantiate_decls_1 (tree let)
1829 tree t;
1831 for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t))
1833 if (DECL_RTL_SET_P (t))
1834 instantiate_decl_rtl (DECL_RTL (t));
1835 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1837 tree v = DECL_VALUE_EXPR (t);
1838 walk_tree (&v, instantiate_expr, NULL, NULL);
1842 /* Process all subblocks. */
1843 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1844 instantiate_decls_1 (t);
1847 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1848 all virtual registers in their DECL_RTL's. */
1850 static void
1851 instantiate_decls (tree fndecl)
1853 tree decl;
1854 unsigned ix;
1856 /* Process all parameters of the function. */
1857 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl))
1859 instantiate_decl_rtl (DECL_RTL (decl));
1860 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
1861 if (DECL_HAS_VALUE_EXPR_P (decl))
1863 tree v = DECL_VALUE_EXPR (decl);
1864 walk_tree (&v, instantiate_expr, NULL, NULL);
1868 if ((decl = DECL_RESULT (fndecl))
1869 && TREE_CODE (decl) == RESULT_DECL)
1871 if (DECL_RTL_SET_P (decl))
1872 instantiate_decl_rtl (DECL_RTL (decl));
1873 if (DECL_HAS_VALUE_EXPR_P (decl))
1875 tree v = DECL_VALUE_EXPR (decl);
1876 walk_tree (&v, instantiate_expr, NULL, NULL);
1880 /* Now process all variables defined in the function or its subblocks. */
1881 instantiate_decls_1 (DECL_INITIAL (fndecl));
1883 FOR_EACH_LOCAL_DECL (cfun, ix, decl)
1884 if (DECL_RTL_SET_P (decl))
1885 instantiate_decl_rtl (DECL_RTL (decl));
1886 VEC_free (tree, gc, cfun->local_decls);
1889 /* Pass through the INSNS of function FNDECL and convert virtual register
1890 references to hard register references. */
1892 static unsigned int
1893 instantiate_virtual_regs (void)
1895 rtx insn;
1897 /* Compute the offsets to use for this function. */
1898 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1899 var_offset = STARTING_FRAME_OFFSET;
1900 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1901 out_arg_offset = STACK_POINTER_OFFSET;
1902 #ifdef FRAME_POINTER_CFA_OFFSET
1903 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1904 #else
1905 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1906 #endif
1908 /* Initialize recognition, indicating that volatile is OK. */
1909 init_recog ();
1911 /* Scan through all the insns, instantiating every virtual register still
1912 present. */
1913 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1914 if (INSN_P (insn))
1916 /* These patterns in the instruction stream can never be recognized.
1917 Fortunately, they shouldn't contain virtual registers either. */
1918 if (GET_CODE (PATTERN (insn)) == USE
1919 || GET_CODE (PATTERN (insn)) == CLOBBER
1920 || GET_CODE (PATTERN (insn)) == ADDR_VEC
1921 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
1922 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1923 continue;
1924 else if (DEBUG_INSN_P (insn))
1925 for_each_rtx (&INSN_VAR_LOCATION (insn),
1926 instantiate_virtual_regs_in_rtx, NULL);
1927 else
1928 instantiate_virtual_regs_in_insn (insn);
1930 if (INSN_DELETED_P (insn))
1931 continue;
1933 for_each_rtx (&REG_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL);
1935 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1936 if (CALL_P (insn))
1937 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn),
1938 instantiate_virtual_regs_in_rtx, NULL);
1941 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1942 instantiate_decls (current_function_decl);
1944 targetm.instantiate_decls ();
1946 /* Indicate that, from now on, assign_stack_local should use
1947 frame_pointer_rtx. */
1948 virtuals_instantiated = 1;
1950 /* See allocate_dynamic_stack_space for the rationale. */
1951 #ifdef SETJMP_VIA_SAVE_AREA
1952 if (flag_stack_usage && cfun->calls_setjmp)
1954 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1955 dynamic_offset = (dynamic_offset + align - 1) / align * align;
1956 current_function_dynamic_stack_size
1957 += current_function_dynamic_alloc_count * dynamic_offset;
1959 #endif
1961 return 0;
1964 struct rtl_opt_pass pass_instantiate_virtual_regs =
1967 RTL_PASS,
1968 "vregs", /* name */
1969 NULL, /* gate */
1970 instantiate_virtual_regs, /* execute */
1971 NULL, /* sub */
1972 NULL, /* next */
1973 0, /* static_pass_number */
1974 TV_NONE, /* tv_id */
1975 0, /* properties_required */
1976 0, /* properties_provided */
1977 0, /* properties_destroyed */
1978 0, /* todo_flags_start */
1979 TODO_dump_func /* todo_flags_finish */
1984 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1985 This means a type for which function calls must pass an address to the
1986 function or get an address back from the function.
1987 EXP may be a type node or an expression (whose type is tested). */
1990 aggregate_value_p (const_tree exp, const_tree fntype)
1992 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
1993 int i, regno, nregs;
1994 rtx reg;
1996 if (fntype)
1997 switch (TREE_CODE (fntype))
1999 case CALL_EXPR:
2001 tree fndecl = get_callee_fndecl (fntype);
2002 fntype = (fndecl
2003 ? TREE_TYPE (fndecl)
2004 : TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype))));
2006 break;
2007 case FUNCTION_DECL:
2008 fntype = TREE_TYPE (fntype);
2009 break;
2010 case FUNCTION_TYPE:
2011 case METHOD_TYPE:
2012 break;
2013 case IDENTIFIER_NODE:
2014 fntype = NULL_TREE;
2015 break;
2016 default:
2017 /* We don't expect other tree types here. */
2018 gcc_unreachable ();
2021 if (VOID_TYPE_P (type))
2022 return 0;
2024 /* If a record should be passed the same as its first (and only) member
2025 don't pass it as an aggregate. */
2026 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2027 return aggregate_value_p (first_field (type), fntype);
2029 /* If the front end has decided that this needs to be passed by
2030 reference, do so. */
2031 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
2032 && DECL_BY_REFERENCE (exp))
2033 return 1;
2035 /* Function types that are TREE_ADDRESSABLE force return in memory. */
2036 if (fntype && TREE_ADDRESSABLE (fntype))
2037 return 1;
2039 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
2040 and thus can't be returned in registers. */
2041 if (TREE_ADDRESSABLE (type))
2042 return 1;
2044 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
2045 return 1;
2047 if (targetm.calls.return_in_memory (type, fntype))
2048 return 1;
2050 /* Make sure we have suitable call-clobbered regs to return
2051 the value in; if not, we must return it in memory. */
2052 reg = hard_function_value (type, 0, fntype, 0);
2054 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2055 it is OK. */
2056 if (!REG_P (reg))
2057 return 0;
2059 regno = REGNO (reg);
2060 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
2061 for (i = 0; i < nregs; i++)
2062 if (! call_used_regs[regno + i])
2063 return 1;
2065 return 0;
2068 /* Return true if we should assign DECL a pseudo register; false if it
2069 should live on the local stack. */
2071 bool
2072 use_register_for_decl (const_tree decl)
2074 if (!targetm.calls.allocate_stack_slots_for_args())
2075 return true;
2077 /* Honor volatile. */
2078 if (TREE_SIDE_EFFECTS (decl))
2079 return false;
2081 /* Honor addressability. */
2082 if (TREE_ADDRESSABLE (decl))
2083 return false;
2085 /* Only register-like things go in registers. */
2086 if (DECL_MODE (decl) == BLKmode)
2087 return false;
2089 /* If -ffloat-store specified, don't put explicit float variables
2090 into registers. */
2091 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2092 propagates values across these stores, and it probably shouldn't. */
2093 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
2094 return false;
2096 /* If we're not interested in tracking debugging information for
2097 this decl, then we can certainly put it in a register. */
2098 if (DECL_IGNORED_P (decl))
2099 return true;
2101 if (optimize)
2102 return true;
2104 if (!DECL_REGISTER (decl))
2105 return false;
2107 switch (TREE_CODE (TREE_TYPE (decl)))
2109 case RECORD_TYPE:
2110 case UNION_TYPE:
2111 case QUAL_UNION_TYPE:
2112 /* When not optimizing, disregard register keyword for variables with
2113 types containing methods, otherwise the methods won't be callable
2114 from the debugger. */
2115 if (TYPE_METHODS (TREE_TYPE (decl)))
2116 return false;
2117 break;
2118 default:
2119 break;
2122 return true;
2125 /* Return true if TYPE should be passed by invisible reference. */
2127 bool
2128 pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
2129 tree type, bool named_arg)
2131 if (type)
2133 /* If this type contains non-trivial constructors, then it is
2134 forbidden for the middle-end to create any new copies. */
2135 if (TREE_ADDRESSABLE (type))
2136 return true;
2138 /* GCC post 3.4 passes *all* variable sized types by reference. */
2139 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2140 return true;
2142 /* If a record type should be passed the same as its first (and only)
2143 member, use the type and mode of that member. */
2144 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2146 type = TREE_TYPE (first_field (type));
2147 mode = TYPE_MODE (type);
2151 return targetm.calls.pass_by_reference (ca, mode, type, named_arg);
2154 /* Return true if TYPE, which is passed by reference, should be callee
2155 copied instead of caller copied. */
2157 bool
2158 reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
2159 tree type, bool named_arg)
2161 if (type && TREE_ADDRESSABLE (type))
2162 return false;
2163 return targetm.calls.callee_copies (ca, mode, type, named_arg);
2166 /* Structures to communicate between the subroutines of assign_parms.
2167 The first holds data persistent across all parameters, the second
2168 is cleared out for each parameter. */
2170 struct assign_parm_data_all
2172 CUMULATIVE_ARGS args_so_far;
2173 struct args_size stack_args_size;
2174 tree function_result_decl;
2175 tree orig_fnargs;
2176 rtx first_conversion_insn;
2177 rtx last_conversion_insn;
2178 HOST_WIDE_INT pretend_args_size;
2179 HOST_WIDE_INT extra_pretend_bytes;
2180 int reg_parm_stack_space;
2183 struct assign_parm_data_one
2185 tree nominal_type;
2186 tree passed_type;
2187 rtx entry_parm;
2188 rtx stack_parm;
2189 enum machine_mode nominal_mode;
2190 enum machine_mode passed_mode;
2191 enum machine_mode promoted_mode;
2192 struct locate_and_pad_arg_data locate;
2193 int partial;
2194 BOOL_BITFIELD named_arg : 1;
2195 BOOL_BITFIELD passed_pointer : 1;
2196 BOOL_BITFIELD on_stack : 1;
2197 BOOL_BITFIELD loaded_in_reg : 1;
2200 /* A subroutine of assign_parms. Initialize ALL. */
2202 static void
2203 assign_parms_initialize_all (struct assign_parm_data_all *all)
2205 tree fntype ATTRIBUTE_UNUSED;
2207 memset (all, 0, sizeof (*all));
2209 fntype = TREE_TYPE (current_function_decl);
2211 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2212 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far, fntype, NULL_RTX);
2213 #else
2214 INIT_CUMULATIVE_ARGS (all->args_so_far, fntype, NULL_RTX,
2215 current_function_decl, -1);
2216 #endif
2218 #ifdef REG_PARM_STACK_SPACE
2219 all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
2220 #endif
2223 /* If ARGS contains entries with complex types, split the entry into two
2224 entries of the component type. Return a new list of substitutions are
2225 needed, else the old list. */
2227 static void
2228 split_complex_args (VEC(tree, heap) **args)
2230 unsigned i;
2231 tree p;
2233 FOR_EACH_VEC_ELT (tree, *args, i, p)
2235 tree type = TREE_TYPE (p);
2236 if (TREE_CODE (type) == COMPLEX_TYPE
2237 && targetm.calls.split_complex_arg (type))
2239 tree decl;
2240 tree subtype = TREE_TYPE (type);
2241 bool addressable = TREE_ADDRESSABLE (p);
2243 /* Rewrite the PARM_DECL's type with its component. */
2244 p = copy_node (p);
2245 TREE_TYPE (p) = subtype;
2246 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2247 DECL_MODE (p) = VOIDmode;
2248 DECL_SIZE (p) = NULL;
2249 DECL_SIZE_UNIT (p) = NULL;
2250 /* If this arg must go in memory, put it in a pseudo here.
2251 We can't allow it to go in memory as per normal parms,
2252 because the usual place might not have the imag part
2253 adjacent to the real part. */
2254 DECL_ARTIFICIAL (p) = addressable;
2255 DECL_IGNORED_P (p) = addressable;
2256 TREE_ADDRESSABLE (p) = 0;
2257 layout_decl (p, 0);
2258 VEC_replace (tree, *args, i, p);
2260 /* Build a second synthetic decl. */
2261 decl = build_decl (EXPR_LOCATION (p),
2262 PARM_DECL, NULL_TREE, subtype);
2263 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2264 DECL_ARTIFICIAL (decl) = addressable;
2265 DECL_IGNORED_P (decl) = addressable;
2266 layout_decl (decl, 0);
2267 VEC_safe_insert (tree, heap, *args, ++i, decl);
2272 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2273 the hidden struct return argument, and (abi willing) complex args.
2274 Return the new parameter list. */
2276 static VEC(tree, heap) *
2277 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2279 tree fndecl = current_function_decl;
2280 tree fntype = TREE_TYPE (fndecl);
2281 VEC(tree, heap) *fnargs = NULL;
2282 tree arg;
2284 for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg))
2285 VEC_safe_push (tree, heap, fnargs, arg);
2287 all->orig_fnargs = DECL_ARGUMENTS (fndecl);
2289 /* If struct value address is treated as the first argument, make it so. */
2290 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2291 && ! cfun->returns_pcc_struct
2292 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2294 tree type = build_pointer_type (TREE_TYPE (fntype));
2295 tree decl;
2297 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2298 PARM_DECL, get_identifier (".result_ptr"), type);
2299 DECL_ARG_TYPE (decl) = type;
2300 DECL_ARTIFICIAL (decl) = 1;
2301 DECL_NAMELESS (decl) = 1;
2302 TREE_CONSTANT (decl) = 1;
2304 DECL_CHAIN (decl) = all->orig_fnargs;
2305 all->orig_fnargs = decl;
2306 VEC_safe_insert (tree, heap, fnargs, 0, decl);
2308 all->function_result_decl = decl;
2311 /* If the target wants to split complex arguments into scalars, do so. */
2312 if (targetm.calls.split_complex_arg)
2313 split_complex_args (&fnargs);
2315 return fnargs;
2318 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2319 data for the parameter. Incorporate ABI specifics such as pass-by-
2320 reference and type promotion. */
2322 static void
2323 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2324 struct assign_parm_data_one *data)
2326 tree nominal_type, passed_type;
2327 enum machine_mode nominal_mode, passed_mode, promoted_mode;
2328 int unsignedp;
2330 memset (data, 0, sizeof (*data));
2332 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2333 if (!cfun->stdarg)
2334 data->named_arg = 1; /* No variadic parms. */
2335 else if (DECL_CHAIN (parm))
2336 data->named_arg = 1; /* Not the last non-variadic parm. */
2337 else if (targetm.calls.strict_argument_naming (&all->args_so_far))
2338 data->named_arg = 1; /* Only variadic ones are unnamed. */
2339 else
2340 data->named_arg = 0; /* Treat as variadic. */
2342 nominal_type = TREE_TYPE (parm);
2343 passed_type = DECL_ARG_TYPE (parm);
2345 /* Look out for errors propagating this far. Also, if the parameter's
2346 type is void then its value doesn't matter. */
2347 if (TREE_TYPE (parm) == error_mark_node
2348 /* This can happen after weird syntax errors
2349 or if an enum type is defined among the parms. */
2350 || TREE_CODE (parm) != PARM_DECL
2351 || passed_type == NULL
2352 || VOID_TYPE_P (nominal_type))
2354 nominal_type = passed_type = void_type_node;
2355 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2356 goto egress;
2359 /* Find mode of arg as it is passed, and mode of arg as it should be
2360 during execution of this function. */
2361 passed_mode = TYPE_MODE (passed_type);
2362 nominal_mode = TYPE_MODE (nominal_type);
2364 /* If the parm is to be passed as a transparent union or record, use the
2365 type of the first field for the tests below. We have already verified
2366 that the modes are the same. */
2367 if ((TREE_CODE (passed_type) == UNION_TYPE
2368 || TREE_CODE (passed_type) == RECORD_TYPE)
2369 && TYPE_TRANSPARENT_AGGR (passed_type))
2370 passed_type = TREE_TYPE (first_field (passed_type));
2372 /* See if this arg was passed by invisible reference. */
2373 if (pass_by_reference (&all->args_so_far, passed_mode,
2374 passed_type, data->named_arg))
2376 passed_type = nominal_type = build_pointer_type (passed_type);
2377 data->passed_pointer = true;
2378 passed_mode = nominal_mode = Pmode;
2381 /* Find mode as it is passed by the ABI. */
2382 unsignedp = TYPE_UNSIGNED (passed_type);
2383 promoted_mode = promote_function_mode (passed_type, passed_mode, &unsignedp,
2384 TREE_TYPE (current_function_decl), 0);
2386 egress:
2387 data->nominal_type = nominal_type;
2388 data->passed_type = passed_type;
2389 data->nominal_mode = nominal_mode;
2390 data->passed_mode = passed_mode;
2391 data->promoted_mode = promoted_mode;
2394 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2396 static void
2397 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2398 struct assign_parm_data_one *data, bool no_rtl)
2400 int varargs_pretend_bytes = 0;
2402 targetm.calls.setup_incoming_varargs (&all->args_so_far,
2403 data->promoted_mode,
2404 data->passed_type,
2405 &varargs_pretend_bytes, no_rtl);
2407 /* If the back-end has requested extra stack space, record how much is
2408 needed. Do not change pretend_args_size otherwise since it may be
2409 nonzero from an earlier partial argument. */
2410 if (varargs_pretend_bytes > 0)
2411 all->pretend_args_size = varargs_pretend_bytes;
2414 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2415 the incoming location of the current parameter. */
2417 static void
2418 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2419 struct assign_parm_data_one *data)
2421 HOST_WIDE_INT pretend_bytes = 0;
2422 rtx entry_parm;
2423 bool in_regs;
2425 if (data->promoted_mode == VOIDmode)
2427 data->entry_parm = data->stack_parm = const0_rtx;
2428 return;
2431 entry_parm = targetm.calls.function_incoming_arg (&all->args_so_far,
2432 data->promoted_mode,
2433 data->passed_type,
2434 data->named_arg);
2436 if (entry_parm == 0)
2437 data->promoted_mode = data->passed_mode;
2439 /* Determine parm's home in the stack, in case it arrives in the stack
2440 or we should pretend it did. Compute the stack position and rtx where
2441 the argument arrives and its size.
2443 There is one complexity here: If this was a parameter that would
2444 have been passed in registers, but wasn't only because it is
2445 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2446 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2447 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2448 as it was the previous time. */
2449 in_regs = entry_parm != 0;
2450 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2451 in_regs = true;
2452 #endif
2453 if (!in_regs && !data->named_arg)
2455 if (targetm.calls.pretend_outgoing_varargs_named (&all->args_so_far))
2457 rtx tem;
2458 tem = targetm.calls.function_incoming_arg (&all->args_so_far,
2459 data->promoted_mode,
2460 data->passed_type, true);
2461 in_regs = tem != NULL;
2465 /* If this parameter was passed both in registers and in the stack, use
2466 the copy on the stack. */
2467 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2468 data->passed_type))
2469 entry_parm = 0;
2471 if (entry_parm)
2473 int partial;
2475 partial = targetm.calls.arg_partial_bytes (&all->args_so_far,
2476 data->promoted_mode,
2477 data->passed_type,
2478 data->named_arg);
2479 data->partial = partial;
2481 /* The caller might already have allocated stack space for the
2482 register parameters. */
2483 if (partial != 0 && all->reg_parm_stack_space == 0)
2485 /* Part of this argument is passed in registers and part
2486 is passed on the stack. Ask the prologue code to extend
2487 the stack part so that we can recreate the full value.
2489 PRETEND_BYTES is the size of the registers we need to store.
2490 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2491 stack space that the prologue should allocate.
2493 Internally, gcc assumes that the argument pointer is aligned
2494 to STACK_BOUNDARY bits. This is used both for alignment
2495 optimizations (see init_emit) and to locate arguments that are
2496 aligned to more than PARM_BOUNDARY bits. We must preserve this
2497 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2498 a stack boundary. */
2500 /* We assume at most one partial arg, and it must be the first
2501 argument on the stack. */
2502 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2504 pretend_bytes = partial;
2505 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2507 /* We want to align relative to the actual stack pointer, so
2508 don't include this in the stack size until later. */
2509 all->extra_pretend_bytes = all->pretend_args_size;
2513 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2514 entry_parm ? data->partial : 0, current_function_decl,
2515 &all->stack_args_size, &data->locate);
2517 /* Update parm_stack_boundary if this parameter is passed in the
2518 stack. */
2519 if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
2520 crtl->parm_stack_boundary = data->locate.boundary;
2522 /* Adjust offsets to include the pretend args. */
2523 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2524 data->locate.slot_offset.constant += pretend_bytes;
2525 data->locate.offset.constant += pretend_bytes;
2527 data->entry_parm = entry_parm;
2530 /* A subroutine of assign_parms. If there is actually space on the stack
2531 for this parm, count it in stack_args_size and return true. */
2533 static bool
2534 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2535 struct assign_parm_data_one *data)
2537 /* Trivially true if we've no incoming register. */
2538 if (data->entry_parm == NULL)
2540 /* Also true if we're partially in registers and partially not,
2541 since we've arranged to drop the entire argument on the stack. */
2542 else if (data->partial != 0)
2544 /* Also true if the target says that it's passed in both registers
2545 and on the stack. */
2546 else if (GET_CODE (data->entry_parm) == PARALLEL
2547 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2549 /* Also true if the target says that there's stack allocated for
2550 all register parameters. */
2551 else if (all->reg_parm_stack_space > 0)
2553 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2554 else
2555 return false;
2557 all->stack_args_size.constant += data->locate.size.constant;
2558 if (data->locate.size.var)
2559 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2561 return true;
2564 /* A subroutine of assign_parms. Given that this parameter is allocated
2565 stack space by the ABI, find it. */
2567 static void
2568 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2570 rtx offset_rtx, stack_parm;
2571 unsigned int align, boundary;
2573 /* If we're passing this arg using a reg, make its stack home the
2574 aligned stack slot. */
2575 if (data->entry_parm)
2576 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2577 else
2578 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2580 stack_parm = crtl->args.internal_arg_pointer;
2581 if (offset_rtx != const0_rtx)
2582 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2583 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2585 if (!data->passed_pointer)
2587 set_mem_attributes (stack_parm, parm, 1);
2588 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2589 while promoted mode's size is needed. */
2590 if (data->promoted_mode != BLKmode
2591 && data->promoted_mode != DECL_MODE (parm))
2593 set_mem_size (stack_parm,
2594 GEN_INT (GET_MODE_SIZE (data->promoted_mode)));
2595 if (MEM_EXPR (stack_parm) && MEM_OFFSET (stack_parm))
2597 int offset = subreg_lowpart_offset (DECL_MODE (parm),
2598 data->promoted_mode);
2599 if (offset)
2600 set_mem_offset (stack_parm,
2601 plus_constant (MEM_OFFSET (stack_parm),
2602 -offset));
2607 boundary = data->locate.boundary;
2608 align = BITS_PER_UNIT;
2610 /* If we're padding upward, we know that the alignment of the slot
2611 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2612 intentionally forcing upward padding. Otherwise we have to come
2613 up with a guess at the alignment based on OFFSET_RTX. */
2614 if (data->locate.where_pad != downward || data->entry_parm)
2615 align = boundary;
2616 else if (CONST_INT_P (offset_rtx))
2618 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2619 align = align & -align;
2621 set_mem_align (stack_parm, align);
2623 if (data->entry_parm)
2624 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2626 data->stack_parm = stack_parm;
2629 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2630 always valid and contiguous. */
2632 static void
2633 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2635 rtx entry_parm = data->entry_parm;
2636 rtx stack_parm = data->stack_parm;
2638 /* If this parm was passed part in regs and part in memory, pretend it
2639 arrived entirely in memory by pushing the register-part onto the stack.
2640 In the special case of a DImode or DFmode that is split, we could put
2641 it together in a pseudoreg directly, but for now that's not worth
2642 bothering with. */
2643 if (data->partial != 0)
2645 /* Handle calls that pass values in multiple non-contiguous
2646 locations. The Irix 6 ABI has examples of this. */
2647 if (GET_CODE (entry_parm) == PARALLEL)
2648 emit_group_store (validize_mem (stack_parm), entry_parm,
2649 data->passed_type,
2650 int_size_in_bytes (data->passed_type));
2651 else
2653 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2654 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2655 data->partial / UNITS_PER_WORD);
2658 entry_parm = stack_parm;
2661 /* If we didn't decide this parm came in a register, by default it came
2662 on the stack. */
2663 else if (entry_parm == NULL)
2664 entry_parm = stack_parm;
2666 /* When an argument is passed in multiple locations, we can't make use
2667 of this information, but we can save some copying if the whole argument
2668 is passed in a single register. */
2669 else if (GET_CODE (entry_parm) == PARALLEL
2670 && data->nominal_mode != BLKmode
2671 && data->passed_mode != BLKmode)
2673 size_t i, len = XVECLEN (entry_parm, 0);
2675 for (i = 0; i < len; i++)
2676 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2677 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2678 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2679 == data->passed_mode)
2680 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2682 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2683 break;
2687 data->entry_parm = entry_parm;
2690 /* A subroutine of assign_parms. Reconstitute any values which were
2691 passed in multiple registers and would fit in a single register. */
2693 static void
2694 assign_parm_remove_parallels (struct assign_parm_data_one *data)
2696 rtx entry_parm = data->entry_parm;
2698 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2699 This can be done with register operations rather than on the
2700 stack, even if we will store the reconstituted parameter on the
2701 stack later. */
2702 if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
2704 rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
2705 emit_group_store (parmreg, entry_parm, data->passed_type,
2706 GET_MODE_SIZE (GET_MODE (entry_parm)));
2707 entry_parm = parmreg;
2710 data->entry_parm = entry_parm;
2713 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2714 always valid and properly aligned. */
2716 static void
2717 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2719 rtx stack_parm = data->stack_parm;
2721 /* If we can't trust the parm stack slot to be aligned enough for its
2722 ultimate type, don't use that slot after entry. We'll make another
2723 stack slot, if we need one. */
2724 if (stack_parm
2725 && ((STRICT_ALIGNMENT
2726 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2727 || (data->nominal_type
2728 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2729 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2730 stack_parm = NULL;
2732 /* If parm was passed in memory, and we need to convert it on entry,
2733 don't store it back in that same slot. */
2734 else if (data->entry_parm == stack_parm
2735 && data->nominal_mode != BLKmode
2736 && data->nominal_mode != data->passed_mode)
2737 stack_parm = NULL;
2739 /* If stack protection is in effect for this function, don't leave any
2740 pointers in their passed stack slots. */
2741 else if (crtl->stack_protect_guard
2742 && (flag_stack_protect == 2
2743 || data->passed_pointer
2744 || POINTER_TYPE_P (data->nominal_type)))
2745 stack_parm = NULL;
2747 data->stack_parm = stack_parm;
2750 /* A subroutine of assign_parms. Return true if the current parameter
2751 should be stored as a BLKmode in the current frame. */
2753 static bool
2754 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2756 if (data->nominal_mode == BLKmode)
2757 return true;
2758 if (GET_MODE (data->entry_parm) == BLKmode)
2759 return true;
2761 #ifdef BLOCK_REG_PADDING
2762 /* Only assign_parm_setup_block knows how to deal with register arguments
2763 that are padded at the least significant end. */
2764 if (REG_P (data->entry_parm)
2765 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2766 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2767 == (BYTES_BIG_ENDIAN ? upward : downward)))
2768 return true;
2769 #endif
2771 return false;
2774 /* A subroutine of assign_parms. Arrange for the parameter to be
2775 present and valid in DATA->STACK_RTL. */
2777 static void
2778 assign_parm_setup_block (struct assign_parm_data_all *all,
2779 tree parm, struct assign_parm_data_one *data)
2781 rtx entry_parm = data->entry_parm;
2782 rtx stack_parm = data->stack_parm;
2783 HOST_WIDE_INT size;
2784 HOST_WIDE_INT size_stored;
2786 if (GET_CODE (entry_parm) == PARALLEL)
2787 entry_parm = emit_group_move_into_temps (entry_parm);
2789 size = int_size_in_bytes (data->passed_type);
2790 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2791 if (stack_parm == 0)
2793 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
2794 stack_parm = assign_stack_local (BLKmode, size_stored,
2795 DECL_ALIGN (parm));
2796 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2797 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2798 set_mem_attributes (stack_parm, parm, 1);
2801 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2802 calls that pass values in multiple non-contiguous locations. */
2803 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2805 rtx mem;
2807 /* Note that we will be storing an integral number of words.
2808 So we have to be careful to ensure that we allocate an
2809 integral number of words. We do this above when we call
2810 assign_stack_local if space was not allocated in the argument
2811 list. If it was, this will not work if PARM_BOUNDARY is not
2812 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2813 if it becomes a problem. Exception is when BLKmode arrives
2814 with arguments not conforming to word_mode. */
2816 if (data->stack_parm == 0)
2818 else if (GET_CODE (entry_parm) == PARALLEL)
2820 else
2821 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2823 mem = validize_mem (stack_parm);
2825 /* Handle values in multiple non-contiguous locations. */
2826 if (GET_CODE (entry_parm) == PARALLEL)
2828 push_to_sequence2 (all->first_conversion_insn,
2829 all->last_conversion_insn);
2830 emit_group_store (mem, entry_parm, data->passed_type, size);
2831 all->first_conversion_insn = get_insns ();
2832 all->last_conversion_insn = get_last_insn ();
2833 end_sequence ();
2836 else if (size == 0)
2839 /* If SIZE is that of a mode no bigger than a word, just use
2840 that mode's store operation. */
2841 else if (size <= UNITS_PER_WORD)
2843 enum machine_mode mode
2844 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2846 if (mode != BLKmode
2847 #ifdef BLOCK_REG_PADDING
2848 && (size == UNITS_PER_WORD
2849 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2850 != (BYTES_BIG_ENDIAN ? upward : downward)))
2851 #endif
2854 rtx reg;
2856 /* We are really truncating a word_mode value containing
2857 SIZE bytes into a value of mode MODE. If such an
2858 operation requires no actual instructions, we can refer
2859 to the value directly in mode MODE, otherwise we must
2860 start with the register in word_mode and explicitly
2861 convert it. */
2862 if (TRULY_NOOP_TRUNCATION (size * BITS_PER_UNIT, BITS_PER_WORD))
2863 reg = gen_rtx_REG (mode, REGNO (entry_parm));
2864 else
2866 reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2867 reg = convert_to_mode (mode, copy_to_reg (reg), 1);
2869 emit_move_insn (change_address (mem, mode, 0), reg);
2872 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2873 machine must be aligned to the left before storing
2874 to memory. Note that the previous test doesn't
2875 handle all cases (e.g. SIZE == 3). */
2876 else if (size != UNITS_PER_WORD
2877 #ifdef BLOCK_REG_PADDING
2878 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2879 == downward)
2880 #else
2881 && BYTES_BIG_ENDIAN
2882 #endif
2885 rtx tem, x;
2886 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2887 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2889 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
2890 build_int_cst (NULL_TREE, by),
2891 NULL_RTX, 1);
2892 tem = change_address (mem, word_mode, 0);
2893 emit_move_insn (tem, x);
2895 else
2896 move_block_from_reg (REGNO (entry_parm), mem,
2897 size_stored / UNITS_PER_WORD);
2899 else
2900 move_block_from_reg (REGNO (entry_parm), mem,
2901 size_stored / UNITS_PER_WORD);
2903 else if (data->stack_parm == 0)
2905 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2906 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2907 BLOCK_OP_NORMAL);
2908 all->first_conversion_insn = get_insns ();
2909 all->last_conversion_insn = get_last_insn ();
2910 end_sequence ();
2913 data->stack_parm = stack_parm;
2914 SET_DECL_RTL (parm, stack_parm);
2917 /* A subroutine of assign_parm_setup_reg, called through note_stores.
2918 This collects sets and clobbers of hard registers in a HARD_REG_SET,
2919 which is pointed to by DATA. */
2920 static void
2921 record_hard_reg_sets (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
2923 HARD_REG_SET *pset = (HARD_REG_SET *)data;
2924 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
2926 int nregs = hard_regno_nregs[REGNO (x)][GET_MODE (x)];
2927 while (nregs-- > 0)
2928 SET_HARD_REG_BIT (*pset, REGNO (x) + nregs);
2932 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2933 parameter. Get it there. Perform all ABI specified conversions. */
2935 static void
2936 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2937 struct assign_parm_data_one *data)
2939 rtx parmreg, validated_mem;
2940 rtx equiv_stack_parm;
2941 enum machine_mode promoted_nominal_mode;
2942 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2943 bool did_conversion = false;
2944 bool need_conversion, moved;
2946 /* Store the parm in a pseudoregister during the function, but we may
2947 need to do it in a wider mode. Using 2 here makes the result
2948 consistent with promote_decl_mode and thus expand_expr_real_1. */
2949 promoted_nominal_mode
2950 = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp,
2951 TREE_TYPE (current_function_decl), 2);
2953 parmreg = gen_reg_rtx (promoted_nominal_mode);
2955 if (!DECL_ARTIFICIAL (parm))
2956 mark_user_reg (parmreg);
2958 /* If this was an item that we received a pointer to,
2959 set DECL_RTL appropriately. */
2960 if (data->passed_pointer)
2962 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2963 set_mem_attributes (x, parm, 1);
2964 SET_DECL_RTL (parm, x);
2966 else
2967 SET_DECL_RTL (parm, parmreg);
2969 assign_parm_remove_parallels (data);
2971 /* Copy the value into the register, thus bridging between
2972 assign_parm_find_data_types and expand_expr_real_1. */
2974 equiv_stack_parm = data->stack_parm;
2975 validated_mem = validize_mem (data->entry_parm);
2977 need_conversion = (data->nominal_mode != data->passed_mode
2978 || promoted_nominal_mode != data->promoted_mode);
2979 moved = false;
2981 if (need_conversion
2982 && GET_MODE_CLASS (data->nominal_mode) == MODE_INT
2983 && data->nominal_mode == data->passed_mode
2984 && data->nominal_mode == GET_MODE (data->entry_parm))
2986 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2987 mode, by the caller. We now have to convert it to
2988 NOMINAL_MODE, if different. However, PARMREG may be in
2989 a different mode than NOMINAL_MODE if it is being stored
2990 promoted.
2992 If ENTRY_PARM is a hard register, it might be in a register
2993 not valid for operating in its mode (e.g., an odd-numbered
2994 register for a DFmode). In that case, moves are the only
2995 thing valid, so we can't do a convert from there. This
2996 occurs when the calling sequence allow such misaligned
2997 usages.
2999 In addition, the conversion may involve a call, which could
3000 clobber parameters which haven't been copied to pseudo
3001 registers yet.
3003 First, we try to emit an insn which performs the necessary
3004 conversion. We verify that this insn does not clobber any
3005 hard registers. */
3007 enum insn_code icode;
3008 rtx op0, op1;
3010 icode = can_extend_p (promoted_nominal_mode, data->passed_mode,
3011 unsignedp);
3013 op0 = parmreg;
3014 op1 = validated_mem;
3015 if (icode != CODE_FOR_nothing
3016 && insn_data[icode].operand[0].predicate (op0, promoted_nominal_mode)
3017 && insn_data[icode].operand[1].predicate (op1, data->passed_mode))
3019 enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
3020 rtx insn, insns;
3021 HARD_REG_SET hardregs;
3023 start_sequence ();
3024 insn = gen_extend_insn (op0, op1, promoted_nominal_mode,
3025 data->passed_mode, unsignedp);
3026 emit_insn (insn);
3027 insns = get_insns ();
3029 moved = true;
3030 CLEAR_HARD_REG_SET (hardregs);
3031 for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
3033 if (INSN_P (insn))
3034 note_stores (PATTERN (insn), record_hard_reg_sets,
3035 &hardregs);
3036 if (!hard_reg_set_empty_p (hardregs))
3037 moved = false;
3040 end_sequence ();
3042 if (moved)
3044 emit_insn (insns);
3045 if (equiv_stack_parm != NULL_RTX)
3046 equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg),
3047 equiv_stack_parm);
3052 if (moved)
3053 /* Nothing to do. */
3055 else if (need_conversion)
3057 /* We did not have an insn to convert directly, or the sequence
3058 generated appeared unsafe. We must first copy the parm to a
3059 pseudo reg, and save the conversion until after all
3060 parameters have been moved. */
3062 int save_tree_used;
3063 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3065 emit_move_insn (tempreg, validated_mem);
3067 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3068 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
3070 if (GET_CODE (tempreg) == SUBREG
3071 && GET_MODE (tempreg) == data->nominal_mode
3072 && REG_P (SUBREG_REG (tempreg))
3073 && data->nominal_mode == data->passed_mode
3074 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
3075 && GET_MODE_SIZE (GET_MODE (tempreg))
3076 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
3078 /* The argument is already sign/zero extended, so note it
3079 into the subreg. */
3080 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
3081 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
3084 /* TREE_USED gets set erroneously during expand_assignment. */
3085 save_tree_used = TREE_USED (parm);
3086 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
3087 TREE_USED (parm) = save_tree_used;
3088 all->first_conversion_insn = get_insns ();
3089 all->last_conversion_insn = get_last_insn ();
3090 end_sequence ();
3092 did_conversion = true;
3094 else
3095 emit_move_insn (parmreg, validated_mem);
3097 /* If we were passed a pointer but the actual value can safely live
3098 in a register, put it in one. */
3099 if (data->passed_pointer
3100 && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
3101 /* If by-reference argument was promoted, demote it. */
3102 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
3103 || use_register_for_decl (parm)))
3105 /* We can't use nominal_mode, because it will have been set to
3106 Pmode above. We must use the actual mode of the parm. */
3107 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3108 mark_user_reg (parmreg);
3110 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
3112 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
3113 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
3115 push_to_sequence2 (all->first_conversion_insn,
3116 all->last_conversion_insn);
3117 emit_move_insn (tempreg, DECL_RTL (parm));
3118 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
3119 emit_move_insn (parmreg, tempreg);
3120 all->first_conversion_insn = get_insns ();
3121 all->last_conversion_insn = get_last_insn ();
3122 end_sequence ();
3124 did_conversion = true;
3126 else
3127 emit_move_insn (parmreg, DECL_RTL (parm));
3129 SET_DECL_RTL (parm, parmreg);
3131 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3132 now the parm. */
3133 data->stack_parm = NULL;
3136 /* Mark the register as eliminable if we did no conversion and it was
3137 copied from memory at a fixed offset, and the arg pointer was not
3138 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3139 offset formed an invalid address, such memory-equivalences as we
3140 make here would screw up life analysis for it. */
3141 if (data->nominal_mode == data->passed_mode
3142 && !did_conversion
3143 && data->stack_parm != 0
3144 && MEM_P (data->stack_parm)
3145 && data->locate.offset.var == 0
3146 && reg_mentioned_p (virtual_incoming_args_rtx,
3147 XEXP (data->stack_parm, 0)))
3149 rtx linsn = get_last_insn ();
3150 rtx sinsn, set;
3152 /* Mark complex types separately. */
3153 if (GET_CODE (parmreg) == CONCAT)
3155 enum machine_mode submode
3156 = GET_MODE_INNER (GET_MODE (parmreg));
3157 int regnor = REGNO (XEXP (parmreg, 0));
3158 int regnoi = REGNO (XEXP (parmreg, 1));
3159 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
3160 rtx stacki = adjust_address_nv (data->stack_parm, submode,
3161 GET_MODE_SIZE (submode));
3163 /* Scan backwards for the set of the real and
3164 imaginary parts. */
3165 for (sinsn = linsn; sinsn != 0;
3166 sinsn = prev_nonnote_insn (sinsn))
3168 set = single_set (sinsn);
3169 if (set == 0)
3170 continue;
3172 if (SET_DEST (set) == regno_reg_rtx [regnoi])
3173 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
3174 else if (SET_DEST (set) == regno_reg_rtx [regnor])
3175 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
3178 else if ((set = single_set (linsn)) != 0
3179 && SET_DEST (set) == parmreg)
3180 set_unique_reg_note (linsn, REG_EQUIV, equiv_stack_parm);
3183 /* For pointer data type, suggest pointer register. */
3184 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3185 mark_reg_pointer (parmreg,
3186 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
3189 /* A subroutine of assign_parms. Allocate stack space to hold the current
3190 parameter. Get it there. Perform all ABI specified conversions. */
3192 static void
3193 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
3194 struct assign_parm_data_one *data)
3196 /* Value must be stored in the stack slot STACK_PARM during function
3197 execution. */
3198 bool to_conversion = false;
3200 assign_parm_remove_parallels (data);
3202 if (data->promoted_mode != data->nominal_mode)
3204 /* Conversion is required. */
3205 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3207 emit_move_insn (tempreg, validize_mem (data->entry_parm));
3209 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3210 to_conversion = true;
3212 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
3213 TYPE_UNSIGNED (TREE_TYPE (parm)));
3215 if (data->stack_parm)
3217 int offset = subreg_lowpart_offset (data->nominal_mode,
3218 GET_MODE (data->stack_parm));
3219 /* ??? This may need a big-endian conversion on sparc64. */
3220 data->stack_parm
3221 = adjust_address (data->stack_parm, data->nominal_mode, 0);
3222 if (offset && MEM_OFFSET (data->stack_parm))
3223 set_mem_offset (data->stack_parm,
3224 plus_constant (MEM_OFFSET (data->stack_parm),
3225 offset));
3229 if (data->entry_parm != data->stack_parm)
3231 rtx src, dest;
3233 if (data->stack_parm == 0)
3235 int align = STACK_SLOT_ALIGNMENT (data->passed_type,
3236 GET_MODE (data->entry_parm),
3237 TYPE_ALIGN (data->passed_type));
3238 data->stack_parm
3239 = assign_stack_local (GET_MODE (data->entry_parm),
3240 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
3241 align);
3242 set_mem_attributes (data->stack_parm, parm, 1);
3245 dest = validize_mem (data->stack_parm);
3246 src = validize_mem (data->entry_parm);
3248 if (MEM_P (src))
3250 /* Use a block move to handle potentially misaligned entry_parm. */
3251 if (!to_conversion)
3252 push_to_sequence2 (all->first_conversion_insn,
3253 all->last_conversion_insn);
3254 to_conversion = true;
3256 emit_block_move (dest, src,
3257 GEN_INT (int_size_in_bytes (data->passed_type)),
3258 BLOCK_OP_NORMAL);
3260 else
3261 emit_move_insn (dest, src);
3264 if (to_conversion)
3266 all->first_conversion_insn = get_insns ();
3267 all->last_conversion_insn = get_last_insn ();
3268 end_sequence ();
3271 SET_DECL_RTL (parm, data->stack_parm);
3274 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
3275 undo the frobbing that we did in assign_parms_augmented_arg_list. */
3277 static void
3278 assign_parms_unsplit_complex (struct assign_parm_data_all *all,
3279 VEC(tree, heap) *fnargs)
3281 tree parm;
3282 tree orig_fnargs = all->orig_fnargs;
3283 unsigned i = 0;
3285 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i)
3287 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
3288 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
3290 rtx tmp, real, imag;
3291 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
3293 real = DECL_RTL (VEC_index (tree, fnargs, i));
3294 imag = DECL_RTL (VEC_index (tree, fnargs, i + 1));
3295 if (inner != GET_MODE (real))
3297 real = gen_lowpart_SUBREG (inner, real);
3298 imag = gen_lowpart_SUBREG (inner, imag);
3301 if (TREE_ADDRESSABLE (parm))
3303 rtx rmem, imem;
3304 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
3305 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3306 DECL_MODE (parm),
3307 TYPE_ALIGN (TREE_TYPE (parm)));
3309 /* split_complex_arg put the real and imag parts in
3310 pseudos. Move them to memory. */
3311 tmp = assign_stack_local (DECL_MODE (parm), size, align);
3312 set_mem_attributes (tmp, parm, 1);
3313 rmem = adjust_address_nv (tmp, inner, 0);
3314 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
3315 push_to_sequence2 (all->first_conversion_insn,
3316 all->last_conversion_insn);
3317 emit_move_insn (rmem, real);
3318 emit_move_insn (imem, imag);
3319 all->first_conversion_insn = get_insns ();
3320 all->last_conversion_insn = get_last_insn ();
3321 end_sequence ();
3323 else
3324 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3325 SET_DECL_RTL (parm, tmp);
3327 real = DECL_INCOMING_RTL (VEC_index (tree, fnargs, i));
3328 imag = DECL_INCOMING_RTL (VEC_index (tree, fnargs, i + 1));
3329 if (inner != GET_MODE (real))
3331 real = gen_lowpart_SUBREG (inner, real);
3332 imag = gen_lowpart_SUBREG (inner, imag);
3334 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3335 set_decl_incoming_rtl (parm, tmp, false);
3336 i++;
3341 /* Assign RTL expressions to the function's parameters. This may involve
3342 copying them into registers and using those registers as the DECL_RTL. */
3344 static void
3345 assign_parms (tree fndecl)
3347 struct assign_parm_data_all all;
3348 tree parm;
3349 VEC(tree, heap) *fnargs;
3350 unsigned i;
3352 crtl->args.internal_arg_pointer
3353 = targetm.calls.internal_arg_pointer ();
3355 assign_parms_initialize_all (&all);
3356 fnargs = assign_parms_augmented_arg_list (&all);
3358 FOR_EACH_VEC_ELT (tree, fnargs, i, parm)
3360 struct assign_parm_data_one data;
3362 /* Extract the type of PARM; adjust it according to ABI. */
3363 assign_parm_find_data_types (&all, parm, &data);
3365 /* Early out for errors and void parameters. */
3366 if (data.passed_mode == VOIDmode)
3368 SET_DECL_RTL (parm, const0_rtx);
3369 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3370 continue;
3373 /* Estimate stack alignment from parameter alignment. */
3374 if (SUPPORTS_STACK_ALIGNMENT)
3376 unsigned int align
3377 = targetm.calls.function_arg_boundary (data.promoted_mode,
3378 data.passed_type);
3379 align = MINIMUM_ALIGNMENT (data.passed_type, data.promoted_mode,
3380 align);
3381 if (TYPE_ALIGN (data.nominal_type) > align)
3382 align = MINIMUM_ALIGNMENT (data.nominal_type,
3383 TYPE_MODE (data.nominal_type),
3384 TYPE_ALIGN (data.nominal_type));
3385 if (crtl->stack_alignment_estimated < align)
3387 gcc_assert (!crtl->stack_realign_processed);
3388 crtl->stack_alignment_estimated = align;
3392 if (cfun->stdarg && !DECL_CHAIN (parm))
3393 assign_parms_setup_varargs (&all, &data, false);
3395 /* Find out where the parameter arrives in this function. */
3396 assign_parm_find_entry_rtl (&all, &data);
3398 /* Find out where stack space for this parameter might be. */
3399 if (assign_parm_is_stack_parm (&all, &data))
3401 assign_parm_find_stack_rtl (parm, &data);
3402 assign_parm_adjust_entry_rtl (&data);
3405 /* Record permanently how this parm was passed. */
3406 set_decl_incoming_rtl (parm, data.entry_parm, data.passed_pointer);
3408 /* Update info on where next arg arrives in registers. */
3409 targetm.calls.function_arg_advance (&all.args_so_far, data.promoted_mode,
3410 data.passed_type, data.named_arg);
3412 assign_parm_adjust_stack_rtl (&data);
3414 if (assign_parm_setup_block_p (&data))
3415 assign_parm_setup_block (&all, parm, &data);
3416 else if (data.passed_pointer || use_register_for_decl (parm))
3417 assign_parm_setup_reg (&all, parm, &data);
3418 else
3419 assign_parm_setup_stack (&all, parm, &data);
3422 if (targetm.calls.split_complex_arg)
3423 assign_parms_unsplit_complex (&all, fnargs);
3425 VEC_free (tree, heap, fnargs);
3427 /* Output all parameter conversion instructions (possibly including calls)
3428 now that all parameters have been copied out of hard registers. */
3429 emit_insn (all.first_conversion_insn);
3431 /* Estimate reload stack alignment from scalar return mode. */
3432 if (SUPPORTS_STACK_ALIGNMENT)
3434 if (DECL_RESULT (fndecl))
3436 tree type = TREE_TYPE (DECL_RESULT (fndecl));
3437 enum machine_mode mode = TYPE_MODE (type);
3439 if (mode != BLKmode
3440 && mode != VOIDmode
3441 && !AGGREGATE_TYPE_P (type))
3443 unsigned int align = GET_MODE_ALIGNMENT (mode);
3444 if (crtl->stack_alignment_estimated < align)
3446 gcc_assert (!crtl->stack_realign_processed);
3447 crtl->stack_alignment_estimated = align;
3453 /* If we are receiving a struct value address as the first argument, set up
3454 the RTL for the function result. As this might require code to convert
3455 the transmitted address to Pmode, we do this here to ensure that possible
3456 preliminary conversions of the address have been emitted already. */
3457 if (all.function_result_decl)
3459 tree result = DECL_RESULT (current_function_decl);
3460 rtx addr = DECL_RTL (all.function_result_decl);
3461 rtx x;
3463 if (DECL_BY_REFERENCE (result))
3465 SET_DECL_VALUE_EXPR (result, all.function_result_decl);
3466 x = addr;
3468 else
3470 SET_DECL_VALUE_EXPR (result,
3471 build1 (INDIRECT_REF, TREE_TYPE (result),
3472 all.function_result_decl));
3473 addr = convert_memory_address (Pmode, addr);
3474 x = gen_rtx_MEM (DECL_MODE (result), addr);
3475 set_mem_attributes (x, result, 1);
3478 DECL_HAS_VALUE_EXPR_P (result) = 1;
3480 SET_DECL_RTL (result, x);
3483 /* We have aligned all the args, so add space for the pretend args. */
3484 crtl->args.pretend_args_size = all.pretend_args_size;
3485 all.stack_args_size.constant += all.extra_pretend_bytes;
3486 crtl->args.size = all.stack_args_size.constant;
3488 /* Adjust function incoming argument size for alignment and
3489 minimum length. */
3491 #ifdef REG_PARM_STACK_SPACE
3492 crtl->args.size = MAX (crtl->args.size,
3493 REG_PARM_STACK_SPACE (fndecl));
3494 #endif
3496 crtl->args.size = CEIL_ROUND (crtl->args.size,
3497 PARM_BOUNDARY / BITS_PER_UNIT);
3499 #ifdef ARGS_GROW_DOWNWARD
3500 crtl->args.arg_offset_rtx
3501 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3502 : expand_expr (size_diffop (all.stack_args_size.var,
3503 size_int (-all.stack_args_size.constant)),
3504 NULL_RTX, VOIDmode, EXPAND_NORMAL));
3505 #else
3506 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3507 #endif
3509 /* See how many bytes, if any, of its args a function should try to pop
3510 on return. */
3512 crtl->args.pops_args = targetm.calls.return_pops_args (fndecl,
3513 TREE_TYPE (fndecl),
3514 crtl->args.size);
3516 /* For stdarg.h function, save info about
3517 regs and stack space used by the named args. */
3519 crtl->args.info = all.args_so_far;
3521 /* Set the rtx used for the function return value. Put this in its
3522 own variable so any optimizers that need this information don't have
3523 to include tree.h. Do this here so it gets done when an inlined
3524 function gets output. */
3526 crtl->return_rtx
3527 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3528 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3530 /* If scalar return value was computed in a pseudo-reg, or was a named
3531 return value that got dumped to the stack, copy that to the hard
3532 return register. */
3533 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3535 tree decl_result = DECL_RESULT (fndecl);
3536 rtx decl_rtl = DECL_RTL (decl_result);
3538 if (REG_P (decl_rtl)
3539 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3540 : DECL_REGISTER (decl_result))
3542 rtx real_decl_rtl;
3544 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3545 fndecl, true);
3546 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3547 /* The delay slot scheduler assumes that crtl->return_rtx
3548 holds the hard register containing the return value, not a
3549 temporary pseudo. */
3550 crtl->return_rtx = real_decl_rtl;
3555 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3556 For all seen types, gimplify their sizes. */
3558 static tree
3559 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3561 tree t = *tp;
3563 *walk_subtrees = 0;
3564 if (TYPE_P (t))
3566 if (POINTER_TYPE_P (t))
3567 *walk_subtrees = 1;
3568 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3569 && !TYPE_SIZES_GIMPLIFIED (t))
3571 gimplify_type_sizes (t, (gimple_seq *) data);
3572 *walk_subtrees = 1;
3576 return NULL;
3579 /* Gimplify the parameter list for current_function_decl. This involves
3580 evaluating SAVE_EXPRs of variable sized parameters and generating code
3581 to implement callee-copies reference parameters. Returns a sequence of
3582 statements to add to the beginning of the function. */
3584 gimple_seq
3585 gimplify_parameters (void)
3587 struct assign_parm_data_all all;
3588 tree parm;
3589 gimple_seq stmts = NULL;
3590 VEC(tree, heap) *fnargs;
3591 unsigned i;
3593 assign_parms_initialize_all (&all);
3594 fnargs = assign_parms_augmented_arg_list (&all);
3596 FOR_EACH_VEC_ELT (tree, fnargs, i, parm)
3598 struct assign_parm_data_one data;
3600 /* Extract the type of PARM; adjust it according to ABI. */
3601 assign_parm_find_data_types (&all, parm, &data);
3603 /* Early out for errors and void parameters. */
3604 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3605 continue;
3607 /* Update info on where next arg arrives in registers. */
3608 targetm.calls.function_arg_advance (&all.args_so_far, data.promoted_mode,
3609 data.passed_type, data.named_arg);
3611 /* ??? Once upon a time variable_size stuffed parameter list
3612 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3613 turned out to be less than manageable in the gimple world.
3614 Now we have to hunt them down ourselves. */
3615 walk_tree_without_duplicates (&data.passed_type,
3616 gimplify_parm_type, &stmts);
3618 if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
3620 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3621 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3624 if (data.passed_pointer)
3626 tree type = TREE_TYPE (data.passed_type);
3627 if (reference_callee_copied (&all.args_so_far, TYPE_MODE (type),
3628 type, data.named_arg))
3630 tree local, t;
3632 /* For constant-sized objects, this is trivial; for
3633 variable-sized objects, we have to play games. */
3634 if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
3635 && !(flag_stack_check == GENERIC_STACK_CHECK
3636 && compare_tree_int (DECL_SIZE_UNIT (parm),
3637 STACK_CHECK_MAX_VAR_SIZE) > 0))
3639 local = create_tmp_reg (type, get_name (parm));
3640 DECL_IGNORED_P (local) = 0;
3641 /* If PARM was addressable, move that flag over
3642 to the local copy, as its address will be taken,
3643 not the PARMs. Keep the parms address taken
3644 as we'll query that flag during gimplification. */
3645 if (TREE_ADDRESSABLE (parm))
3646 TREE_ADDRESSABLE (local) = 1;
3648 else
3650 tree ptr_type, addr;
3652 ptr_type = build_pointer_type (type);
3653 addr = create_tmp_reg (ptr_type, get_name (parm));
3654 DECL_IGNORED_P (addr) = 0;
3655 local = build_fold_indirect_ref (addr);
3657 t = built_in_decls[BUILT_IN_ALLOCA];
3658 t = build_call_expr (t, 1, DECL_SIZE_UNIT (parm));
3659 /* The call has been built for a variable-sized object. */
3660 ALLOCA_FOR_VAR_P (t) = 1;
3661 t = fold_convert (ptr_type, t);
3662 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
3663 gimplify_and_add (t, &stmts);
3666 gimplify_assign (local, parm, &stmts);
3668 SET_DECL_VALUE_EXPR (parm, local);
3669 DECL_HAS_VALUE_EXPR_P (parm) = 1;
3674 VEC_free (tree, heap, fnargs);
3676 return stmts;
3679 /* Compute the size and offset from the start of the stacked arguments for a
3680 parm passed in mode PASSED_MODE and with type TYPE.
3682 INITIAL_OFFSET_PTR points to the current offset into the stacked
3683 arguments.
3685 The starting offset and size for this parm are returned in
3686 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3687 nonzero, the offset is that of stack slot, which is returned in
3688 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3689 padding required from the initial offset ptr to the stack slot.
3691 IN_REGS is nonzero if the argument will be passed in registers. It will
3692 never be set if REG_PARM_STACK_SPACE is not defined.
3694 FNDECL is the function in which the argument was defined.
3696 There are two types of rounding that are done. The first, controlled by
3697 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
3698 argument list to be aligned to the specific boundary (in bits). This
3699 rounding affects the initial and starting offsets, but not the argument
3700 size.
3702 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3703 optionally rounds the size of the parm to PARM_BOUNDARY. The
3704 initial offset is not affected by this rounding, while the size always
3705 is and the starting offset may be. */
3707 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3708 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3709 callers pass in the total size of args so far as
3710 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3712 void
3713 locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3714 int partial, tree fndecl ATTRIBUTE_UNUSED,
3715 struct args_size *initial_offset_ptr,
3716 struct locate_and_pad_arg_data *locate)
3718 tree sizetree;
3719 enum direction where_pad;
3720 unsigned int boundary;
3721 int reg_parm_stack_space = 0;
3722 int part_size_in_regs;
3724 #ifdef REG_PARM_STACK_SPACE
3725 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3727 /* If we have found a stack parm before we reach the end of the
3728 area reserved for registers, skip that area. */
3729 if (! in_regs)
3731 if (reg_parm_stack_space > 0)
3733 if (initial_offset_ptr->var)
3735 initial_offset_ptr->var
3736 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3737 ssize_int (reg_parm_stack_space));
3738 initial_offset_ptr->constant = 0;
3740 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3741 initial_offset_ptr->constant = reg_parm_stack_space;
3744 #endif /* REG_PARM_STACK_SPACE */
3746 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
3748 sizetree
3749 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3750 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3751 boundary = targetm.calls.function_arg_boundary (passed_mode, type);
3752 locate->where_pad = where_pad;
3754 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
3755 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
3756 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
3758 locate->boundary = boundary;
3760 if (SUPPORTS_STACK_ALIGNMENT)
3762 /* stack_alignment_estimated can't change after stack has been
3763 realigned. */
3764 if (crtl->stack_alignment_estimated < boundary)
3766 if (!crtl->stack_realign_processed)
3767 crtl->stack_alignment_estimated = boundary;
3768 else
3770 /* If stack is realigned and stack alignment value
3771 hasn't been finalized, it is OK not to increase
3772 stack_alignment_estimated. The bigger alignment
3773 requirement is recorded in stack_alignment_needed
3774 below. */
3775 gcc_assert (!crtl->stack_realign_finalized
3776 && crtl->stack_realign_needed);
3781 /* Remember if the outgoing parameter requires extra alignment on the
3782 calling function side. */
3783 if (crtl->stack_alignment_needed < boundary)
3784 crtl->stack_alignment_needed = boundary;
3785 if (crtl->preferred_stack_boundary < boundary)
3786 crtl->preferred_stack_boundary = boundary;
3788 #ifdef ARGS_GROW_DOWNWARD
3789 locate->slot_offset.constant = -initial_offset_ptr->constant;
3790 if (initial_offset_ptr->var)
3791 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3792 initial_offset_ptr->var);
3795 tree s2 = sizetree;
3796 if (where_pad != none
3797 && (!host_integerp (sizetree, 1)
3798 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3799 s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
3800 SUB_PARM_SIZE (locate->slot_offset, s2);
3803 locate->slot_offset.constant += part_size_in_regs;
3805 if (!in_regs
3806 #ifdef REG_PARM_STACK_SPACE
3807 || REG_PARM_STACK_SPACE (fndecl) > 0
3808 #endif
3810 pad_to_arg_alignment (&locate->slot_offset, boundary,
3811 &locate->alignment_pad);
3813 locate->size.constant = (-initial_offset_ptr->constant
3814 - locate->slot_offset.constant);
3815 if (initial_offset_ptr->var)
3816 locate->size.var = size_binop (MINUS_EXPR,
3817 size_binop (MINUS_EXPR,
3818 ssize_int (0),
3819 initial_offset_ptr->var),
3820 locate->slot_offset.var);
3822 /* Pad_below needs the pre-rounded size to know how much to pad
3823 below. */
3824 locate->offset = locate->slot_offset;
3825 if (where_pad == downward)
3826 pad_below (&locate->offset, passed_mode, sizetree);
3828 #else /* !ARGS_GROW_DOWNWARD */
3829 if (!in_regs
3830 #ifdef REG_PARM_STACK_SPACE
3831 || REG_PARM_STACK_SPACE (fndecl) > 0
3832 #endif
3834 pad_to_arg_alignment (initial_offset_ptr, boundary,
3835 &locate->alignment_pad);
3836 locate->slot_offset = *initial_offset_ptr;
3838 #ifdef PUSH_ROUNDING
3839 if (passed_mode != BLKmode)
3840 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3841 #endif
3843 /* Pad_below needs the pre-rounded size to know how much to pad below
3844 so this must be done before rounding up. */
3845 locate->offset = locate->slot_offset;
3846 if (where_pad == downward)
3847 pad_below (&locate->offset, passed_mode, sizetree);
3849 if (where_pad != none
3850 && (!host_integerp (sizetree, 1)
3851 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3852 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3854 ADD_PARM_SIZE (locate->size, sizetree);
3856 locate->size.constant -= part_size_in_regs;
3857 #endif /* ARGS_GROW_DOWNWARD */
3859 #ifdef FUNCTION_ARG_OFFSET
3860 locate->offset.constant += FUNCTION_ARG_OFFSET (passed_mode, type);
3861 #endif
3864 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3865 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3867 static void
3868 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3869 struct args_size *alignment_pad)
3871 tree save_var = NULL_TREE;
3872 HOST_WIDE_INT save_constant = 0;
3873 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3874 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3876 #ifdef SPARC_STACK_BOUNDARY_HACK
3877 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3878 the real alignment of %sp. However, when it does this, the
3879 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
3880 if (SPARC_STACK_BOUNDARY_HACK)
3881 sp_offset = 0;
3882 #endif
3884 if (boundary > PARM_BOUNDARY)
3886 save_var = offset_ptr->var;
3887 save_constant = offset_ptr->constant;
3890 alignment_pad->var = NULL_TREE;
3891 alignment_pad->constant = 0;
3893 if (boundary > BITS_PER_UNIT)
3895 if (offset_ptr->var)
3897 tree sp_offset_tree = ssize_int (sp_offset);
3898 tree offset = size_binop (PLUS_EXPR,
3899 ARGS_SIZE_TREE (*offset_ptr),
3900 sp_offset_tree);
3901 #ifdef ARGS_GROW_DOWNWARD
3902 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
3903 #else
3904 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
3905 #endif
3907 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
3908 /* ARGS_SIZE_TREE includes constant term. */
3909 offset_ptr->constant = 0;
3910 if (boundary > PARM_BOUNDARY)
3911 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
3912 save_var);
3914 else
3916 offset_ptr->constant = -sp_offset +
3917 #ifdef ARGS_GROW_DOWNWARD
3918 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3919 #else
3920 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3921 #endif
3922 if (boundary > PARM_BOUNDARY)
3923 alignment_pad->constant = offset_ptr->constant - save_constant;
3928 static void
3929 pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
3931 if (passed_mode != BLKmode)
3933 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3934 offset_ptr->constant
3935 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3936 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3937 - GET_MODE_SIZE (passed_mode));
3939 else
3941 if (TREE_CODE (sizetree) != INTEGER_CST
3942 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3944 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3945 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3946 /* Add it in. */
3947 ADD_PARM_SIZE (*offset_ptr, s2);
3948 SUB_PARM_SIZE (*offset_ptr, sizetree);
3954 /* True if register REGNO was alive at a place where `setjmp' was
3955 called and was set more than once or is an argument. Such regs may
3956 be clobbered by `longjmp'. */
3958 static bool
3959 regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
3961 /* There appear to be cases where some local vars never reach the
3962 backend but have bogus regnos. */
3963 if (regno >= max_reg_num ())
3964 return false;
3966 return ((REG_N_SETS (regno) > 1
3967 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR), regno))
3968 && REGNO_REG_SET_P (setjmp_crosses, regno));
3971 /* Walk the tree of blocks describing the binding levels within a
3972 function and warn about variables the might be killed by setjmp or
3973 vfork. This is done after calling flow_analysis before register
3974 allocation since that will clobber the pseudo-regs to hard
3975 regs. */
3977 static void
3978 setjmp_vars_warning (bitmap setjmp_crosses, tree block)
3980 tree decl, sub;
3982 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
3984 if (TREE_CODE (decl) == VAR_DECL
3985 && DECL_RTL_SET_P (decl)
3986 && REG_P (DECL_RTL (decl))
3987 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
3988 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
3989 " %<longjmp%> or %<vfork%>", decl);
3992 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
3993 setjmp_vars_warning (setjmp_crosses, sub);
3996 /* Do the appropriate part of setjmp_vars_warning
3997 but for arguments instead of local variables. */
3999 static void
4000 setjmp_args_warning (bitmap setjmp_crosses)
4002 tree decl;
4003 for (decl = DECL_ARGUMENTS (current_function_decl);
4004 decl; decl = DECL_CHAIN (decl))
4005 if (DECL_RTL (decl) != 0
4006 && REG_P (DECL_RTL (decl))
4007 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4008 warning (OPT_Wclobbered,
4009 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
4010 decl);
4013 /* Generate warning messages for variables live across setjmp. */
4015 void
4016 generate_setjmp_warnings (void)
4018 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
4020 if (n_basic_blocks == NUM_FIXED_BLOCKS
4021 || bitmap_empty_p (setjmp_crosses))
4022 return;
4024 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
4025 setjmp_args_warning (setjmp_crosses);
4029 /* Reverse the order of elements in the fragment chain T of blocks,
4030 and return the new head of the chain (old last element). */
4032 static tree
4033 block_fragments_nreverse (tree t)
4035 tree prev = 0, block, next;
4036 for (block = t; block; block = next)
4038 next = BLOCK_FRAGMENT_CHAIN (block);
4039 BLOCK_FRAGMENT_CHAIN (block) = prev;
4040 prev = block;
4042 return prev;
4045 /* Reverse the order of elements in the chain T of blocks,
4046 and return the new head of the chain (old last element).
4047 Also do the same on subblocks and reverse the order of elements
4048 in BLOCK_FRAGMENT_CHAIN as well. */
4050 static tree
4051 blocks_nreverse_all (tree t)
4053 tree prev = 0, block, next;
4054 for (block = t; block; block = next)
4056 next = BLOCK_CHAIN (block);
4057 BLOCK_CHAIN (block) = prev;
4058 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4059 if (BLOCK_FRAGMENT_CHAIN (block)
4060 && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE)
4061 BLOCK_FRAGMENT_CHAIN (block)
4062 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block));
4063 prev = block;
4065 return prev;
4069 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4070 and create duplicate blocks. */
4071 /* ??? Need an option to either create block fragments or to create
4072 abstract origin duplicates of a source block. It really depends
4073 on what optimization has been performed. */
4075 void
4076 reorder_blocks (void)
4078 tree block = DECL_INITIAL (current_function_decl);
4079 VEC(tree,heap) *block_stack;
4081 if (block == NULL_TREE)
4082 return;
4084 block_stack = VEC_alloc (tree, heap, 10);
4086 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
4087 clear_block_marks (block);
4089 /* Prune the old trees away, so that they don't get in the way. */
4090 BLOCK_SUBBLOCKS (block) = NULL_TREE;
4091 BLOCK_CHAIN (block) = NULL_TREE;
4093 /* Recreate the block tree from the note nesting. */
4094 reorder_blocks_1 (get_insns (), block, &block_stack);
4095 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4097 VEC_free (tree, heap, block_stack);
4100 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
4102 void
4103 clear_block_marks (tree block)
4105 while (block)
4107 TREE_ASM_WRITTEN (block) = 0;
4108 clear_block_marks (BLOCK_SUBBLOCKS (block));
4109 block = BLOCK_CHAIN (block);
4113 static void
4114 reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack)
4116 rtx insn;
4118 for (insn = insns; insn; insn = NEXT_INSN (insn))
4120 if (NOTE_P (insn))
4122 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
4124 tree block = NOTE_BLOCK (insn);
4125 tree origin;
4127 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE);
4128 origin = block;
4130 /* If we have seen this block before, that means it now
4131 spans multiple address regions. Create a new fragment. */
4132 if (TREE_ASM_WRITTEN (block))
4134 tree new_block = copy_node (block);
4136 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
4137 BLOCK_FRAGMENT_CHAIN (new_block)
4138 = BLOCK_FRAGMENT_CHAIN (origin);
4139 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
4141 NOTE_BLOCK (insn) = new_block;
4142 block = new_block;
4145 BLOCK_SUBBLOCKS (block) = 0;
4146 TREE_ASM_WRITTEN (block) = 1;
4147 /* When there's only one block for the entire function,
4148 current_block == block and we mustn't do this, it
4149 will cause infinite recursion. */
4150 if (block != current_block)
4152 if (block != origin)
4153 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block);
4155 BLOCK_SUPERCONTEXT (block) = current_block;
4156 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4157 BLOCK_SUBBLOCKS (current_block) = block;
4158 current_block = origin;
4160 VEC_safe_push (tree, heap, *p_block_stack, block);
4162 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
4164 NOTE_BLOCK (insn) = VEC_pop (tree, *p_block_stack);
4165 current_block = BLOCK_SUPERCONTEXT (current_block);
4171 /* Reverse the order of elements in the chain T of blocks,
4172 and return the new head of the chain (old last element). */
4174 tree
4175 blocks_nreverse (tree t)
4177 tree prev = 0, block, next;
4178 for (block = t; block; block = next)
4180 next = BLOCK_CHAIN (block);
4181 BLOCK_CHAIN (block) = prev;
4182 prev = block;
4184 return prev;
4187 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
4188 non-NULL, list them all into VECTOR, in a depth-first preorder
4189 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
4190 blocks. */
4192 static int
4193 all_blocks (tree block, tree *vector)
4195 int n_blocks = 0;
4197 while (block)
4199 TREE_ASM_WRITTEN (block) = 0;
4201 /* Record this block. */
4202 if (vector)
4203 vector[n_blocks] = block;
4205 ++n_blocks;
4207 /* Record the subblocks, and their subblocks... */
4208 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4209 vector ? vector + n_blocks : 0);
4210 block = BLOCK_CHAIN (block);
4213 return n_blocks;
4216 /* Return a vector containing all the blocks rooted at BLOCK. The
4217 number of elements in the vector is stored in N_BLOCKS_P. The
4218 vector is dynamically allocated; it is the caller's responsibility
4219 to call `free' on the pointer returned. */
4221 static tree *
4222 get_block_vector (tree block, int *n_blocks_p)
4224 tree *block_vector;
4226 *n_blocks_p = all_blocks (block, NULL);
4227 block_vector = XNEWVEC (tree, *n_blocks_p);
4228 all_blocks (block, block_vector);
4230 return block_vector;
4233 static GTY(()) int next_block_index = 2;
4235 /* Set BLOCK_NUMBER for all the blocks in FN. */
4237 void
4238 number_blocks (tree fn)
4240 int i;
4241 int n_blocks;
4242 tree *block_vector;
4244 /* For SDB and XCOFF debugging output, we start numbering the blocks
4245 from 1 within each function, rather than keeping a running
4246 count. */
4247 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
4248 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
4249 next_block_index = 1;
4250 #endif
4252 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
4254 /* The top-level BLOCK isn't numbered at all. */
4255 for (i = 1; i < n_blocks; ++i)
4256 /* We number the blocks from two. */
4257 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
4259 free (block_vector);
4261 return;
4264 /* If VAR is present in a subblock of BLOCK, return the subblock. */
4266 DEBUG_FUNCTION tree
4267 debug_find_var_in_block_tree (tree var, tree block)
4269 tree t;
4271 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
4272 if (t == var)
4273 return block;
4275 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
4277 tree ret = debug_find_var_in_block_tree (var, t);
4278 if (ret)
4279 return ret;
4282 return NULL_TREE;
4285 /* Keep track of whether we're in a dummy function context. If we are,
4286 we don't want to invoke the set_current_function hook, because we'll
4287 get into trouble if the hook calls target_reinit () recursively or
4288 when the initial initialization is not yet complete. */
4290 static bool in_dummy_function;
4292 /* Invoke the target hook when setting cfun. Update the optimization options
4293 if the function uses different options than the default. */
4295 static void
4296 invoke_set_current_function_hook (tree fndecl)
4298 if (!in_dummy_function)
4300 tree opts = ((fndecl)
4301 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
4302 : optimization_default_node);
4304 if (!opts)
4305 opts = optimization_default_node;
4307 /* Change optimization options if needed. */
4308 if (optimization_current_node != opts)
4310 optimization_current_node = opts;
4311 cl_optimization_restore (&global_options, TREE_OPTIMIZATION (opts));
4314 targetm.set_current_function (fndecl);
4318 /* cfun should never be set directly; use this function. */
4320 void
4321 set_cfun (struct function *new_cfun)
4323 if (cfun != new_cfun)
4325 cfun = new_cfun;
4326 invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
4330 /* Initialized with NOGC, making this poisonous to the garbage collector. */
4332 static VEC(function_p,heap) *cfun_stack;
4334 /* Push the current cfun onto the stack, and set cfun to new_cfun. */
4336 void
4337 push_cfun (struct function *new_cfun)
4339 VEC_safe_push (function_p, heap, cfun_stack, cfun);
4340 set_cfun (new_cfun);
4343 /* Pop cfun from the stack. */
4345 void
4346 pop_cfun (void)
4348 struct function *new_cfun = VEC_pop (function_p, cfun_stack);
4349 set_cfun (new_cfun);
4352 /* Return value of funcdef and increase it. */
4354 get_next_funcdef_no (void)
4356 return funcdef_no++;
4359 /* Allocate a function structure for FNDECL and set its contents
4360 to the defaults. Set cfun to the newly-allocated object.
4361 Some of the helper functions invoked during initialization assume
4362 that cfun has already been set. Therefore, assign the new object
4363 directly into cfun and invoke the back end hook explicitly at the
4364 very end, rather than initializing a temporary and calling set_cfun
4365 on it.
4367 ABSTRACT_P is true if this is a function that will never be seen by
4368 the middle-end. Such functions are front-end concepts (like C++
4369 function templates) that do not correspond directly to functions
4370 placed in object files. */
4372 void
4373 allocate_struct_function (tree fndecl, bool abstract_p)
4375 tree result;
4376 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
4378 cfun = ggc_alloc_cleared_function ();
4380 init_eh_for_function ();
4382 if (init_machine_status)
4383 cfun->machine = (*init_machine_status) ();
4385 #ifdef OVERRIDE_ABI_FORMAT
4386 OVERRIDE_ABI_FORMAT (fndecl);
4387 #endif
4389 invoke_set_current_function_hook (fndecl);
4391 if (fndecl != NULL_TREE)
4393 DECL_STRUCT_FUNCTION (fndecl) = cfun;
4394 cfun->decl = fndecl;
4395 current_function_funcdef_no = get_next_funcdef_no ();
4397 result = DECL_RESULT (fndecl);
4398 if (!abstract_p && aggregate_value_p (result, fndecl))
4400 #ifdef PCC_STATIC_STRUCT_RETURN
4401 cfun->returns_pcc_struct = 1;
4402 #endif
4403 cfun->returns_struct = 1;
4406 cfun->stdarg = stdarg_p (fntype);
4408 /* Assume all registers in stdarg functions need to be saved. */
4409 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
4410 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
4412 /* ??? This could be set on a per-function basis by the front-end
4413 but is this worth the hassle? */
4414 cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
4418 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4419 instead of just setting it. */
4421 void
4422 push_struct_function (tree fndecl)
4424 VEC_safe_push (function_p, heap, cfun_stack, cfun);
4425 allocate_struct_function (fndecl, false);
4428 /* Reset crtl and other non-struct-function variables to defaults as
4429 appropriate for emitting rtl at the start of a function. */
4431 static void
4432 prepare_function_start (void)
4434 gcc_assert (!crtl->emit.x_last_insn);
4435 init_temp_slots ();
4436 init_emit ();
4437 init_varasm_status ();
4438 init_expr ();
4439 default_rtl_profile ();
4441 if (flag_stack_usage)
4443 cfun->su = ggc_alloc_cleared_stack_usage ();
4444 cfun->su->static_stack_size = -1;
4447 cse_not_expected = ! optimize;
4449 /* Caller save not needed yet. */
4450 caller_save_needed = 0;
4452 /* We haven't done register allocation yet. */
4453 reg_renumber = 0;
4455 /* Indicate that we have not instantiated virtual registers yet. */
4456 virtuals_instantiated = 0;
4458 /* Indicate that we want CONCATs now. */
4459 generating_concat_p = 1;
4461 /* Indicate we have no need of a frame pointer yet. */
4462 frame_pointer_needed = 0;
4465 /* Initialize the rtl expansion mechanism so that we can do simple things
4466 like generate sequences. This is used to provide a context during global
4467 initialization of some passes. You must call expand_dummy_function_end
4468 to exit this context. */
4470 void
4471 init_dummy_function_start (void)
4473 gcc_assert (!in_dummy_function);
4474 in_dummy_function = true;
4475 push_struct_function (NULL_TREE);
4476 prepare_function_start ();
4479 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4480 and initialize static variables for generating RTL for the statements
4481 of the function. */
4483 void
4484 init_function_start (tree subr)
4486 if (subr && DECL_STRUCT_FUNCTION (subr))
4487 set_cfun (DECL_STRUCT_FUNCTION (subr));
4488 else
4489 allocate_struct_function (subr, false);
4490 prepare_function_start ();
4492 /* Warn if this value is an aggregate type,
4493 regardless of which calling convention we are using for it. */
4494 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
4495 warning (OPT_Waggregate_return, "function returns an aggregate");
4498 /* Make sure all values used by the optimization passes have sane defaults. */
4499 unsigned int
4500 init_function_for_compilation (void)
4502 reg_renumber = 0;
4503 return 0;
4506 struct rtl_opt_pass pass_init_function =
4509 RTL_PASS,
4510 "*init_function", /* name */
4511 NULL, /* gate */
4512 init_function_for_compilation, /* execute */
4513 NULL, /* sub */
4514 NULL, /* next */
4515 0, /* static_pass_number */
4516 TV_NONE, /* tv_id */
4517 0, /* properties_required */
4518 0, /* properties_provided */
4519 0, /* properties_destroyed */
4520 0, /* todo_flags_start */
4521 0 /* todo_flags_finish */
4526 void
4527 expand_main_function (void)
4529 #if (defined(INVOKE__main) \
4530 || (!defined(HAS_INIT_SECTION) \
4531 && !defined(INIT_SECTION_ASM_OP) \
4532 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
4533 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
4534 #endif
4537 /* Expand code to initialize the stack_protect_guard. This is invoked at
4538 the beginning of a function to be protected. */
4540 #ifndef HAVE_stack_protect_set
4541 # define HAVE_stack_protect_set 0
4542 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
4543 #endif
4545 void
4546 stack_protect_prologue (void)
4548 tree guard_decl = targetm.stack_protect_guard ();
4549 rtx x, y;
4551 x = expand_normal (crtl->stack_protect_guard);
4552 y = expand_normal (guard_decl);
4554 /* Allow the target to copy from Y to X without leaking Y into a
4555 register. */
4556 if (HAVE_stack_protect_set)
4558 rtx insn = gen_stack_protect_set (x, y);
4559 if (insn)
4561 emit_insn (insn);
4562 return;
4566 /* Otherwise do a straight move. */
4567 emit_move_insn (x, y);
4570 /* Expand code to verify the stack_protect_guard. This is invoked at
4571 the end of a function to be protected. */
4573 #ifndef HAVE_stack_protect_test
4574 # define HAVE_stack_protect_test 0
4575 # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
4576 #endif
4578 void
4579 stack_protect_epilogue (void)
4581 tree guard_decl = targetm.stack_protect_guard ();
4582 rtx label = gen_label_rtx ();
4583 rtx x, y, tmp;
4585 x = expand_normal (crtl->stack_protect_guard);
4586 y = expand_normal (guard_decl);
4588 /* Allow the target to compare Y with X without leaking either into
4589 a register. */
4590 switch (HAVE_stack_protect_test != 0)
4592 case 1:
4593 tmp = gen_stack_protect_test (x, y, label);
4594 if (tmp)
4596 emit_insn (tmp);
4597 break;
4599 /* FALLTHRU */
4601 default:
4602 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4603 break;
4606 /* The noreturn predictor has been moved to the tree level. The rtl-level
4607 predictors estimate this branch about 20%, which isn't enough to get
4608 things moved out of line. Since this is the only extant case of adding
4609 a noreturn function at the rtl level, it doesn't seem worth doing ought
4610 except adding the prediction by hand. */
4611 tmp = get_last_insn ();
4612 if (JUMP_P (tmp))
4613 predict_insn_def (tmp, PRED_NORETURN, TAKEN);
4615 expand_expr_stmt (targetm.stack_protect_fail ());
4616 emit_label (label);
4619 /* Start the RTL for a new function, and set variables used for
4620 emitting RTL.
4621 SUBR is the FUNCTION_DECL node.
4622 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4623 the function's parameters, which must be run at any return statement. */
4625 void
4626 expand_function_start (tree subr)
4628 /* Make sure volatile mem refs aren't considered
4629 valid operands of arithmetic insns. */
4630 init_recog_no_volatile ();
4632 crtl->profile
4633 = (profile_flag
4634 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4636 crtl->limit_stack
4637 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4639 /* Make the label for return statements to jump to. Do not special
4640 case machines with special return instructions -- they will be
4641 handled later during jump, ifcvt, or epilogue creation. */
4642 return_label = gen_label_rtx ();
4644 /* Initialize rtx used to return the value. */
4645 /* Do this before assign_parms so that we copy the struct value address
4646 before any library calls that assign parms might generate. */
4648 /* Decide whether to return the value in memory or in a register. */
4649 if (aggregate_value_p (DECL_RESULT (subr), subr))
4651 /* Returning something that won't go in a register. */
4652 rtx value_address = 0;
4654 #ifdef PCC_STATIC_STRUCT_RETURN
4655 if (cfun->returns_pcc_struct)
4657 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4658 value_address = assemble_static_space (size);
4660 else
4661 #endif
4663 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
4664 /* Expect to be passed the address of a place to store the value.
4665 If it is passed as an argument, assign_parms will take care of
4666 it. */
4667 if (sv)
4669 value_address = gen_reg_rtx (Pmode);
4670 emit_move_insn (value_address, sv);
4673 if (value_address)
4675 rtx x = value_address;
4676 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4678 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4679 set_mem_attributes (x, DECL_RESULT (subr), 1);
4681 SET_DECL_RTL (DECL_RESULT (subr), x);
4684 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4685 /* If return mode is void, this decl rtl should not be used. */
4686 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
4687 else
4689 /* Compute the return values into a pseudo reg, which we will copy
4690 into the true return register after the cleanups are done. */
4691 tree return_type = TREE_TYPE (DECL_RESULT (subr));
4692 if (TYPE_MODE (return_type) != BLKmode
4693 && targetm.calls.return_in_msb (return_type))
4694 /* expand_function_end will insert the appropriate padding in
4695 this case. Use the return value's natural (unpadded) mode
4696 within the function proper. */
4697 SET_DECL_RTL (DECL_RESULT (subr),
4698 gen_reg_rtx (TYPE_MODE (return_type)));
4699 else
4701 /* In order to figure out what mode to use for the pseudo, we
4702 figure out what the mode of the eventual return register will
4703 actually be, and use that. */
4704 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
4706 /* Structures that are returned in registers are not
4707 aggregate_value_p, so we may see a PARALLEL or a REG. */
4708 if (REG_P (hard_reg))
4709 SET_DECL_RTL (DECL_RESULT (subr),
4710 gen_reg_rtx (GET_MODE (hard_reg)));
4711 else
4713 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4714 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4718 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4719 result to the real return register(s). */
4720 DECL_REGISTER (DECL_RESULT (subr)) = 1;
4723 /* Initialize rtx for parameters and local variables.
4724 In some cases this requires emitting insns. */
4725 assign_parms (subr);
4727 /* If function gets a static chain arg, store it. */
4728 if (cfun->static_chain_decl)
4730 tree parm = cfun->static_chain_decl;
4731 rtx local, chain, insn;
4733 local = gen_reg_rtx (Pmode);
4734 chain = targetm.calls.static_chain (current_function_decl, true);
4736 set_decl_incoming_rtl (parm, chain, false);
4737 SET_DECL_RTL (parm, local);
4738 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4740 insn = emit_move_insn (local, chain);
4742 /* Mark the register as eliminable, similar to parameters. */
4743 if (MEM_P (chain)
4744 && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0)))
4745 set_unique_reg_note (insn, REG_EQUIV, chain);
4748 /* If the function receives a non-local goto, then store the
4749 bits we need to restore the frame pointer. */
4750 if (cfun->nonlocal_goto_save_area)
4752 tree t_save;
4753 rtx r_save;
4755 /* ??? We need to do this save early. Unfortunately here is
4756 before the frame variable gets declared. Help out... */
4757 tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
4758 if (!DECL_RTL_SET_P (var))
4759 expand_decl (var);
4761 t_save = build4 (ARRAY_REF, ptr_type_node,
4762 cfun->nonlocal_goto_save_area,
4763 integer_zero_node, NULL_TREE, NULL_TREE);
4764 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
4765 r_save = convert_memory_address (Pmode, r_save);
4767 emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ());
4768 update_nonlocal_goto_save_area ();
4771 /* The following was moved from init_function_start.
4772 The move is supposed to make sdb output more accurate. */
4773 /* Indicate the beginning of the function body,
4774 as opposed to parm setup. */
4775 emit_note (NOTE_INSN_FUNCTION_BEG);
4777 gcc_assert (NOTE_P (get_last_insn ()));
4779 parm_birth_insn = get_last_insn ();
4781 if (crtl->profile)
4783 #ifdef PROFILE_HOOK
4784 PROFILE_HOOK (current_function_funcdef_no);
4785 #endif
4788 /* After the display initializations is where the stack checking
4789 probe should go. */
4790 if(flag_stack_check)
4791 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
4793 /* Make sure there is a line number after the function entry setup code. */
4794 force_next_line_note ();
4797 /* Undo the effects of init_dummy_function_start. */
4798 void
4799 expand_dummy_function_end (void)
4801 gcc_assert (in_dummy_function);
4803 /* End any sequences that failed to be closed due to syntax errors. */
4804 while (in_sequence_p ())
4805 end_sequence ();
4807 /* Outside function body, can't compute type's actual size
4808 until next function's body starts. */
4810 free_after_parsing (cfun);
4811 free_after_compilation (cfun);
4812 pop_cfun ();
4813 in_dummy_function = false;
4816 /* Call DOIT for each hard register used as a return value from
4817 the current function. */
4819 void
4820 diddle_return_value (void (*doit) (rtx, void *), void *arg)
4822 rtx outgoing = crtl->return_rtx;
4824 if (! outgoing)
4825 return;
4827 if (REG_P (outgoing))
4828 (*doit) (outgoing, arg);
4829 else if (GET_CODE (outgoing) == PARALLEL)
4831 int i;
4833 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4835 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4837 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
4838 (*doit) (x, arg);
4843 static void
4844 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4846 emit_clobber (reg);
4849 void
4850 clobber_return_register (void)
4852 diddle_return_value (do_clobber_return_reg, NULL);
4854 /* In case we do use pseudo to return value, clobber it too. */
4855 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4857 tree decl_result = DECL_RESULT (current_function_decl);
4858 rtx decl_rtl = DECL_RTL (decl_result);
4859 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4861 do_clobber_return_reg (decl_rtl, NULL);
4866 static void
4867 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4869 emit_use (reg);
4872 static void
4873 use_return_register (void)
4875 diddle_return_value (do_use_return_reg, NULL);
4878 /* Possibly warn about unused parameters. */
4879 void
4880 do_warn_unused_parameter (tree fn)
4882 tree decl;
4884 for (decl = DECL_ARGUMENTS (fn);
4885 decl; decl = DECL_CHAIN (decl))
4886 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
4887 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl)
4888 && !TREE_NO_WARNING (decl))
4889 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
4892 static GTY(()) rtx initial_trampoline;
4894 /* Generate RTL for the end of the current function. */
4896 void
4897 expand_function_end (void)
4899 rtx clobber_after;
4901 /* If arg_pointer_save_area was referenced only from a nested
4902 function, we will not have initialized it yet. Do that now. */
4903 if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
4904 get_arg_pointer_save_area ();
4906 /* If we are doing generic stack checking and this function makes calls,
4907 do a stack probe at the start of the function to ensure we have enough
4908 space for another stack frame. */
4909 if (flag_stack_check == GENERIC_STACK_CHECK)
4911 rtx insn, seq;
4913 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4914 if (CALL_P (insn))
4916 rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE);
4917 start_sequence ();
4918 if (STACK_CHECK_MOVING_SP)
4919 anti_adjust_stack_and_probe (max_frame_size, true);
4920 else
4921 probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size);
4922 seq = get_insns ();
4923 end_sequence ();
4924 set_insn_locators (seq, prologue_locator);
4925 emit_insn_before (seq, stack_check_probe_note);
4926 break;
4930 /* End any sequences that failed to be closed due to syntax errors. */
4931 while (in_sequence_p ())
4932 end_sequence ();
4934 clear_pending_stack_adjust ();
4935 do_pending_stack_adjust ();
4937 /* Output a linenumber for the end of the function.
4938 SDB depends on this. */
4939 force_next_line_note ();
4940 set_curr_insn_source_location (input_location);
4942 /* Before the return label (if any), clobber the return
4943 registers so that they are not propagated live to the rest of
4944 the function. This can only happen with functions that drop
4945 through; if there had been a return statement, there would
4946 have either been a return rtx, or a jump to the return label.
4948 We delay actual code generation after the current_function_value_rtx
4949 is computed. */
4950 clobber_after = get_last_insn ();
4952 /* Output the label for the actual return from the function. */
4953 emit_label (return_label);
4955 if (targetm.except_unwind_info (&global_options) == UI_SJLJ)
4957 /* Let except.c know where it should emit the call to unregister
4958 the function context for sjlj exceptions. */
4959 if (flag_exceptions)
4960 sjlj_emit_function_exit_after (get_last_insn ());
4962 else
4964 /* We want to ensure that instructions that may trap are not
4965 moved into the epilogue by scheduling, because we don't
4966 always emit unwind information for the epilogue. */
4967 if (cfun->can_throw_non_call_exceptions)
4968 emit_insn (gen_blockage ());
4971 /* If this is an implementation of throw, do what's necessary to
4972 communicate between __builtin_eh_return and the epilogue. */
4973 expand_eh_return ();
4975 /* If scalar return value was computed in a pseudo-reg, or was a named
4976 return value that got dumped to the stack, copy that to the hard
4977 return register. */
4978 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4980 tree decl_result = DECL_RESULT (current_function_decl);
4981 rtx decl_rtl = DECL_RTL (decl_result);
4983 if (REG_P (decl_rtl)
4984 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
4985 : DECL_REGISTER (decl_result))
4987 rtx real_decl_rtl = crtl->return_rtx;
4989 /* This should be set in assign_parms. */
4990 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
4992 /* If this is a BLKmode structure being returned in registers,
4993 then use the mode computed in expand_return. Note that if
4994 decl_rtl is memory, then its mode may have been changed,
4995 but that crtl->return_rtx has not. */
4996 if (GET_MODE (real_decl_rtl) == BLKmode)
4997 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
4999 /* If a non-BLKmode return value should be padded at the least
5000 significant end of the register, shift it left by the appropriate
5001 amount. BLKmode results are handled using the group load/store
5002 machinery. */
5003 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
5004 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
5006 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
5007 REGNO (real_decl_rtl)),
5008 decl_rtl);
5009 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
5011 /* If a named return value dumped decl_return to memory, then
5012 we may need to re-do the PROMOTE_MODE signed/unsigned
5013 extension. */
5014 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
5016 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
5017 promote_function_mode (TREE_TYPE (decl_result),
5018 GET_MODE (decl_rtl), &unsignedp,
5019 TREE_TYPE (current_function_decl), 1);
5021 convert_move (real_decl_rtl, decl_rtl, unsignedp);
5023 else if (GET_CODE (real_decl_rtl) == PARALLEL)
5025 /* If expand_function_start has created a PARALLEL for decl_rtl,
5026 move the result to the real return registers. Otherwise, do
5027 a group load from decl_rtl for a named return. */
5028 if (GET_CODE (decl_rtl) == PARALLEL)
5029 emit_group_move (real_decl_rtl, decl_rtl);
5030 else
5031 emit_group_load (real_decl_rtl, decl_rtl,
5032 TREE_TYPE (decl_result),
5033 int_size_in_bytes (TREE_TYPE (decl_result)));
5035 /* In the case of complex integer modes smaller than a word, we'll
5036 need to generate some non-trivial bitfield insertions. Do that
5037 on a pseudo and not the hard register. */
5038 else if (GET_CODE (decl_rtl) == CONCAT
5039 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
5040 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
5042 int old_generating_concat_p;
5043 rtx tmp;
5045 old_generating_concat_p = generating_concat_p;
5046 generating_concat_p = 0;
5047 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
5048 generating_concat_p = old_generating_concat_p;
5050 emit_move_insn (tmp, decl_rtl);
5051 emit_move_insn (real_decl_rtl, tmp);
5053 else
5054 emit_move_insn (real_decl_rtl, decl_rtl);
5058 /* If returning a structure, arrange to return the address of the value
5059 in a place where debuggers expect to find it.
5061 If returning a structure PCC style,
5062 the caller also depends on this value.
5063 And cfun->returns_pcc_struct is not necessarily set. */
5064 if (cfun->returns_struct
5065 || cfun->returns_pcc_struct)
5067 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
5068 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5069 rtx outgoing;
5071 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
5072 type = TREE_TYPE (type);
5073 else
5074 value_address = XEXP (value_address, 0);
5076 outgoing = targetm.calls.function_value (build_pointer_type (type),
5077 current_function_decl, true);
5079 /* Mark this as a function return value so integrate will delete the
5080 assignment and USE below when inlining this function. */
5081 REG_FUNCTION_VALUE_P (outgoing) = 1;
5083 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5084 value_address = convert_memory_address (GET_MODE (outgoing),
5085 value_address);
5087 emit_move_insn (outgoing, value_address);
5089 /* Show return register used to hold result (in this case the address
5090 of the result. */
5091 crtl->return_rtx = outgoing;
5094 /* Emit the actual code to clobber return register. */
5096 rtx seq;
5098 start_sequence ();
5099 clobber_return_register ();
5100 seq = get_insns ();
5101 end_sequence ();
5103 emit_insn_after (seq, clobber_after);
5106 /* Output the label for the naked return from the function. */
5107 if (naked_return_label)
5108 emit_label (naked_return_label);
5110 /* @@@ This is a kludge. We want to ensure that instructions that
5111 may trap are not moved into the epilogue by scheduling, because
5112 we don't always emit unwind information for the epilogue. */
5113 if (cfun->can_throw_non_call_exceptions
5114 && targetm.except_unwind_info (&global_options) != UI_SJLJ)
5115 emit_insn (gen_blockage ());
5117 /* If stack protection is enabled for this function, check the guard. */
5118 if (crtl->stack_protect_guard)
5119 stack_protect_epilogue ();
5121 /* If we had calls to alloca, and this machine needs
5122 an accurate stack pointer to exit the function,
5123 insert some code to save and restore the stack pointer. */
5124 if (! EXIT_IGNORE_STACK
5125 && cfun->calls_alloca)
5127 rtx tem = 0, seq;
5129 start_sequence ();
5130 emit_stack_save (SAVE_FUNCTION, &tem);
5131 seq = get_insns ();
5132 end_sequence ();
5133 emit_insn_before (seq, parm_birth_insn);
5135 emit_stack_restore (SAVE_FUNCTION, tem);
5138 /* ??? This should no longer be necessary since stupid is no longer with
5139 us, but there are some parts of the compiler (eg reload_combine, and
5140 sh mach_dep_reorg) that still try and compute their own lifetime info
5141 instead of using the general framework. */
5142 use_return_register ();
5146 get_arg_pointer_save_area (void)
5148 rtx ret = arg_pointer_save_area;
5150 if (! ret)
5152 ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5153 arg_pointer_save_area = ret;
5156 if (! crtl->arg_pointer_save_area_init)
5158 rtx seq;
5160 /* Save the arg pointer at the beginning of the function. The
5161 generated stack slot may not be a valid memory address, so we
5162 have to check it and fix it if necessary. */
5163 start_sequence ();
5164 emit_move_insn (validize_mem (ret),
5165 crtl->args.internal_arg_pointer);
5166 seq = get_insns ();
5167 end_sequence ();
5169 push_topmost_sequence ();
5170 emit_insn_after (seq, entry_of_function ());
5171 pop_topmost_sequence ();
5173 crtl->arg_pointer_save_area_init = true;
5176 return ret;
5179 /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5180 for the first time. */
5182 static void
5183 record_insns (rtx insns, rtx end, htab_t *hashp)
5185 rtx tmp;
5186 htab_t hash = *hashp;
5188 if (hash == NULL)
5189 *hashp = hash
5190 = htab_create_ggc (17, htab_hash_pointer, htab_eq_pointer, NULL);
5192 for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp))
5194 void **slot = htab_find_slot (hash, tmp, INSERT);
5195 gcc_assert (*slot == NULL);
5196 *slot = tmp;
5200 /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5201 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5202 insn, then record COPY as well. */
5204 void
5205 maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
5207 htab_t hash;
5208 void **slot;
5210 hash = epilogue_insn_hash;
5211 if (!hash || !htab_find (hash, insn))
5213 hash = prologue_insn_hash;
5214 if (!hash || !htab_find (hash, insn))
5215 return;
5218 slot = htab_find_slot (hash, copy, INSERT);
5219 gcc_assert (*slot == NULL);
5220 *slot = copy;
5223 /* Set the locator of the insn chain starting at INSN to LOC. */
5224 static void
5225 set_insn_locators (rtx insn, int loc)
5227 while (insn != NULL_RTX)
5229 if (INSN_P (insn))
5230 INSN_LOCATOR (insn) = loc;
5231 insn = NEXT_INSN (insn);
5235 /* Determine if any INSNs in HASH are, or are part of, INSN. Because
5236 we can be running after reorg, SEQUENCE rtl is possible. */
5238 static bool
5239 contains (const_rtx insn, htab_t hash)
5241 if (hash == NULL)
5242 return false;
5244 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
5246 int i;
5247 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
5248 if (htab_find (hash, XVECEXP (PATTERN (insn), 0, i)))
5249 return true;
5250 return false;
5253 return htab_find (hash, insn) != NULL;
5257 prologue_epilogue_contains (const_rtx insn)
5259 if (contains (insn, prologue_insn_hash))
5260 return 1;
5261 if (contains (insn, epilogue_insn_hash))
5262 return 1;
5263 return 0;
5266 #ifdef HAVE_return
5267 /* Insert gen_return at the end of block BB. This also means updating
5268 block_for_insn appropriately. */
5270 static void
5271 emit_return_into_block (basic_block bb)
5273 emit_jump_insn_after (gen_return (), BB_END (bb));
5275 #endif /* HAVE_return */
5277 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5278 this into place with notes indicating where the prologue ends and where
5279 the epilogue begins. Update the basic block information when possible. */
5281 static void
5282 thread_prologue_and_epilogue_insns (void)
5284 bool inserted;
5285 rtx seq ATTRIBUTE_UNUSED, epilogue_end ATTRIBUTE_UNUSED;
5286 edge entry_edge ATTRIBUTE_UNUSED;
5287 edge e;
5288 edge_iterator ei;
5290 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
5292 inserted = false;
5293 seq = NULL_RTX;
5294 epilogue_end = NULL_RTX;
5296 /* Can't deal with multiple successors of the entry block at the
5297 moment. Function should always have at least one entry
5298 point. */
5299 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR));
5300 entry_edge = single_succ_edge (ENTRY_BLOCK_PTR);
5302 if (flag_split_stack
5303 && (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl))
5304 == NULL))
5306 #ifndef HAVE_split_stack_prologue
5307 gcc_unreachable ();
5308 #else
5309 gcc_assert (HAVE_split_stack_prologue);
5311 start_sequence ();
5312 emit_insn (gen_split_stack_prologue ());
5313 seq = get_insns ();
5314 end_sequence ();
5316 record_insns (seq, NULL, &prologue_insn_hash);
5317 set_insn_locators (seq, prologue_locator);
5319 /* This relies on the fact that committing the edge insertion
5320 will look for basic blocks within the inserted instructions,
5321 which in turn relies on the fact that we are not in CFG
5322 layout mode here. */
5323 insert_insn_on_edge (seq, entry_edge);
5324 inserted = true;
5325 #endif
5328 #ifdef HAVE_prologue
5329 if (HAVE_prologue)
5331 start_sequence ();
5332 seq = gen_prologue ();
5333 emit_insn (seq);
5335 /* Insert an explicit USE for the frame pointer
5336 if the profiling is on and the frame pointer is required. */
5337 if (crtl->profile && frame_pointer_needed)
5338 emit_use (hard_frame_pointer_rtx);
5340 /* Retain a map of the prologue insns. */
5341 record_insns (seq, NULL, &prologue_insn_hash);
5342 emit_note (NOTE_INSN_PROLOGUE_END);
5344 /* Ensure that instructions are not moved into the prologue when
5345 profiling is on. The call to the profiling routine can be
5346 emitted within the live range of a call-clobbered register. */
5347 if (!targetm.profile_before_prologue () && crtl->profile)
5348 emit_insn (gen_blockage ());
5350 seq = get_insns ();
5351 end_sequence ();
5352 set_insn_locators (seq, prologue_locator);
5354 insert_insn_on_edge (seq, entry_edge);
5355 inserted = true;
5357 #endif
5359 /* If the exit block has no non-fake predecessors, we don't need
5360 an epilogue. */
5361 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5362 if ((e->flags & EDGE_FAKE) == 0)
5363 break;
5364 if (e == NULL)
5365 goto epilogue_done;
5367 rtl_profile_for_bb (EXIT_BLOCK_PTR);
5368 #ifdef HAVE_return
5369 if (optimize && HAVE_return)
5371 /* If we're allowed to generate a simple return instruction,
5372 then by definition we don't need a full epilogue. Examine
5373 the block that falls through to EXIT. If it does not
5374 contain any code, examine its predecessors and try to
5375 emit (conditional) return instructions. */
5377 basic_block last;
5378 rtx label;
5380 e = find_fallthru_edge (EXIT_BLOCK_PTR->preds);
5381 if (e == NULL)
5382 goto epilogue_done;
5383 last = e->src;
5385 /* Verify that there are no active instructions in the last block. */
5386 label = BB_END (last);
5387 while (label && !LABEL_P (label))
5389 if (active_insn_p (label))
5390 break;
5391 label = PREV_INSN (label);
5394 if (BB_HEAD (last) == label && LABEL_P (label))
5396 edge_iterator ei2;
5398 for (ei2 = ei_start (last->preds); (e = ei_safe_edge (ei2)); )
5400 basic_block bb = e->src;
5401 rtx jump;
5403 if (bb == ENTRY_BLOCK_PTR)
5405 ei_next (&ei2);
5406 continue;
5409 jump = BB_END (bb);
5410 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5412 ei_next (&ei2);
5413 continue;
5416 /* If we have an unconditional jump, we can replace that
5417 with a simple return instruction. */
5418 if (simplejump_p (jump))
5420 emit_return_into_block (bb);
5421 delete_insn (jump);
5424 /* If we have a conditional jump, we can try to replace
5425 that with a conditional return instruction. */
5426 else if (condjump_p (jump))
5428 if (! redirect_jump (jump, 0, 0))
5430 ei_next (&ei2);
5431 continue;
5434 /* If this block has only one successor, it both jumps
5435 and falls through to the fallthru block, so we can't
5436 delete the edge. */
5437 if (single_succ_p (bb))
5439 ei_next (&ei2);
5440 continue;
5443 else
5445 ei_next (&ei2);
5446 continue;
5449 /* Fix up the CFG for the successful change we just made. */
5450 redirect_edge_succ (e, EXIT_BLOCK_PTR);
5453 /* Emit a return insn for the exit fallthru block. Whether
5454 this is still reachable will be determined later. */
5456 emit_barrier_after (BB_END (last));
5457 emit_return_into_block (last);
5458 epilogue_end = BB_END (last);
5459 single_succ_edge (last)->flags &= ~EDGE_FALLTHRU;
5460 goto epilogue_done;
5463 #endif
5465 /* A small fib -- epilogue is not yet completed, but we wish to re-use
5466 this marker for the splits of EH_RETURN patterns, and nothing else
5467 uses the flag in the meantime. */
5468 epilogue_completed = 1;
5470 #ifdef HAVE_eh_return
5471 /* Find non-fallthru edges that end with EH_RETURN instructions. On
5472 some targets, these get split to a special version of the epilogue
5473 code. In order to be able to properly annotate these with unwind
5474 info, try to split them now. If we get a valid split, drop an
5475 EPILOGUE_BEG note and mark the insns as epilogue insns. */
5476 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5478 rtx prev, last, trial;
5480 if (e->flags & EDGE_FALLTHRU)
5481 continue;
5482 last = BB_END (e->src);
5483 if (!eh_returnjump_p (last))
5484 continue;
5486 prev = PREV_INSN (last);
5487 trial = try_split (PATTERN (last), last, 1);
5488 if (trial == last)
5489 continue;
5491 record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash);
5492 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
5494 #endif
5496 /* Find the edge that falls through to EXIT. Other edges may exist
5497 due to RETURN instructions, but those don't need epilogues.
5498 There really shouldn't be a mixture -- either all should have
5499 been converted or none, however... */
5501 e = find_fallthru_edge (EXIT_BLOCK_PTR->preds);
5502 if (e == NULL)
5503 goto epilogue_done;
5505 #ifdef HAVE_epilogue
5506 if (HAVE_epilogue)
5508 start_sequence ();
5509 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
5510 seq = gen_epilogue ();
5511 if (seq)
5512 emit_jump_insn (seq);
5514 /* Retain a map of the epilogue insns. */
5515 record_insns (seq, NULL, &epilogue_insn_hash);
5516 set_insn_locators (seq, epilogue_locator);
5518 seq = get_insns ();
5519 end_sequence ();
5521 insert_insn_on_edge (seq, e);
5522 inserted = true;
5524 else
5525 #endif
5527 basic_block cur_bb;
5529 if (! next_active_insn (BB_END (e->src)))
5530 goto epilogue_done;
5531 /* We have a fall-through edge to the exit block, the source is not
5532 at the end of the function, and there will be an assembler epilogue
5533 at the end of the function.
5534 We can't use force_nonfallthru here, because that would try to
5535 use return. Inserting a jump 'by hand' is extremely messy, so
5536 we take advantage of cfg_layout_finalize using
5537 fixup_fallthru_exit_predecessor. */
5538 cfg_layout_initialize (0);
5539 FOR_EACH_BB (cur_bb)
5540 if (cur_bb->index >= NUM_FIXED_BLOCKS
5541 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
5542 cur_bb->aux = cur_bb->next_bb;
5543 cfg_layout_finalize ();
5545 epilogue_done:
5546 default_rtl_profile ();
5548 if (inserted)
5550 commit_edge_insertions ();
5552 /* The epilogue insns we inserted may cause the exit edge to no longer
5553 be fallthru. */
5554 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5556 if (((e->flags & EDGE_FALLTHRU) != 0)
5557 && returnjump_p (BB_END (e->src)))
5558 e->flags &= ~EDGE_FALLTHRU;
5562 #ifdef HAVE_sibcall_epilogue
5563 /* Emit sibling epilogues before any sibling call sites. */
5564 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
5566 basic_block bb = e->src;
5567 rtx insn = BB_END (bb);
5569 if (!CALL_P (insn)
5570 || ! SIBLING_CALL_P (insn))
5572 ei_next (&ei);
5573 continue;
5576 start_sequence ();
5577 emit_note (NOTE_INSN_EPILOGUE_BEG);
5578 emit_insn (gen_sibcall_epilogue ());
5579 seq = get_insns ();
5580 end_sequence ();
5582 /* Retain a map of the epilogue insns. Used in life analysis to
5583 avoid getting rid of sibcall epilogue insns. Do this before we
5584 actually emit the sequence. */
5585 record_insns (seq, NULL, &epilogue_insn_hash);
5586 set_insn_locators (seq, epilogue_locator);
5588 emit_insn_before (seq, insn);
5589 ei_next (&ei);
5591 #endif
5593 #ifdef HAVE_epilogue
5594 if (epilogue_end)
5596 rtx insn, next;
5598 /* Similarly, move any line notes that appear after the epilogue.
5599 There is no need, however, to be quite so anal about the existence
5600 of such a note. Also possibly move
5601 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5602 info generation. */
5603 for (insn = epilogue_end; insn; insn = next)
5605 next = NEXT_INSN (insn);
5606 if (NOTE_P (insn)
5607 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
5608 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
5611 #endif
5613 /* Threading the prologue and epilogue changes the artificial refs
5614 in the entry and exit blocks. */
5615 epilogue_completed = 1;
5616 df_update_entry_exit_and_calls ();
5619 /* Reposition the prologue-end and epilogue-begin notes after
5620 instruction scheduling. */
5622 void
5623 reposition_prologue_and_epilogue_notes (void)
5625 #if defined (HAVE_prologue) || defined (HAVE_epilogue) \
5626 || defined (HAVE_sibcall_epilogue)
5627 /* Since the hash table is created on demand, the fact that it is
5628 non-null is a signal that it is non-empty. */
5629 if (prologue_insn_hash != NULL)
5631 size_t len = htab_elements (prologue_insn_hash);
5632 rtx insn, last = NULL, note = NULL;
5634 /* Scan from the beginning until we reach the last prologue insn. */
5635 /* ??? While we do have the CFG intact, there are two problems:
5636 (1) The prologue can contain loops (typically probing the stack),
5637 which means that the end of the prologue isn't in the first bb.
5638 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
5639 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5641 if (NOTE_P (insn))
5643 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
5644 note = insn;
5646 else if (contains (insn, prologue_insn_hash))
5648 last = insn;
5649 if (--len == 0)
5650 break;
5654 if (last)
5656 if (note == NULL)
5658 /* Scan forward looking for the PROLOGUE_END note. It should
5659 be right at the beginning of the block, possibly with other
5660 insn notes that got moved there. */
5661 for (note = NEXT_INSN (last); ; note = NEXT_INSN (note))
5663 if (NOTE_P (note)
5664 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
5665 break;
5669 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
5670 if (LABEL_P (last))
5671 last = NEXT_INSN (last);
5672 reorder_insns (note, note, last);
5676 if (epilogue_insn_hash != NULL)
5678 edge_iterator ei;
5679 edge e;
5681 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5683 rtx insn, first = NULL, note = NULL;
5684 basic_block bb = e->src;
5686 /* Scan from the beginning until we reach the first epilogue insn. */
5687 FOR_BB_INSNS (bb, insn)
5689 if (NOTE_P (insn))
5691 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
5693 note = insn;
5694 if (first != NULL)
5695 break;
5698 else if (first == NULL && contains (insn, epilogue_insn_hash))
5700 first = insn;
5701 if (note != NULL)
5702 break;
5706 if (note)
5708 /* If the function has a single basic block, and no real
5709 epilogue insns (e.g. sibcall with no cleanup), the
5710 epilogue note can get scheduled before the prologue
5711 note. If we have frame related prologue insns, having
5712 them scanned during the epilogue will result in a crash.
5713 In this case re-order the epilogue note to just before
5714 the last insn in the block. */
5715 if (first == NULL)
5716 first = BB_END (bb);
5718 if (PREV_INSN (first) != note)
5719 reorder_insns (note, note, PREV_INSN (first));
5723 #endif /* HAVE_prologue or HAVE_epilogue */
5726 /* Returns the name of the current function. */
5727 const char *
5728 current_function_name (void)
5730 if (cfun == NULL)
5731 return "<none>";
5732 return lang_hooks.decl_printable_name (cfun->decl, 2);
5736 static unsigned int
5737 rest_of_handle_check_leaf_regs (void)
5739 #ifdef LEAF_REGISTERS
5740 current_function_uses_only_leaf_regs
5741 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
5742 #endif
5743 return 0;
5746 /* Insert a TYPE into the used types hash table of CFUN. */
5748 static void
5749 used_types_insert_helper (tree type, struct function *func)
5751 if (type != NULL && func != NULL)
5753 void **slot;
5755 if (func->used_types_hash == NULL)
5756 func->used_types_hash = htab_create_ggc (37, htab_hash_pointer,
5757 htab_eq_pointer, NULL);
5758 slot = htab_find_slot (func->used_types_hash, type, INSERT);
5759 if (*slot == NULL)
5760 *slot = type;
5764 /* Given a type, insert it into the used hash table in cfun. */
5765 void
5766 used_types_insert (tree t)
5768 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
5769 if (TYPE_NAME (t))
5770 break;
5771 else
5772 t = TREE_TYPE (t);
5773 if (TREE_CODE (t) == ERROR_MARK)
5774 return;
5775 if (TYPE_NAME (t) == NULL_TREE
5776 || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t)))
5777 t = TYPE_MAIN_VARIANT (t);
5778 if (debug_info_level > DINFO_LEVEL_NONE)
5780 if (cfun)
5781 used_types_insert_helper (t, cfun);
5782 else
5783 /* So this might be a type referenced by a global variable.
5784 Record that type so that we can later decide to emit its debug
5785 information. */
5786 VEC_safe_push (tree, gc, types_used_by_cur_var_decl, t);
5790 /* Helper to Hash a struct types_used_by_vars_entry. */
5792 static hashval_t
5793 hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry)
5795 gcc_assert (entry && entry->var_decl && entry->type);
5797 return iterative_hash_object (entry->type,
5798 iterative_hash_object (entry->var_decl, 0));
5801 /* Hash function of the types_used_by_vars_entry hash table. */
5803 hashval_t
5804 types_used_by_vars_do_hash (const void *x)
5806 const struct types_used_by_vars_entry *entry =
5807 (const struct types_used_by_vars_entry *) x;
5809 return hash_types_used_by_vars_entry (entry);
5812 /*Equality function of the types_used_by_vars_entry hash table. */
5815 types_used_by_vars_eq (const void *x1, const void *x2)
5817 const struct types_used_by_vars_entry *e1 =
5818 (const struct types_used_by_vars_entry *) x1;
5819 const struct types_used_by_vars_entry *e2 =
5820 (const struct types_used_by_vars_entry *)x2;
5822 return (e1->var_decl == e2->var_decl && e1->type == e2->type);
5825 /* Inserts an entry into the types_used_by_vars_hash hash table. */
5827 void
5828 types_used_by_var_decl_insert (tree type, tree var_decl)
5830 if (type != NULL && var_decl != NULL)
5832 void **slot;
5833 struct types_used_by_vars_entry e;
5834 e.var_decl = var_decl;
5835 e.type = type;
5836 if (types_used_by_vars_hash == NULL)
5837 types_used_by_vars_hash =
5838 htab_create_ggc (37, types_used_by_vars_do_hash,
5839 types_used_by_vars_eq, NULL);
5840 slot = htab_find_slot_with_hash (types_used_by_vars_hash, &e,
5841 hash_types_used_by_vars_entry (&e), INSERT);
5842 if (*slot == NULL)
5844 struct types_used_by_vars_entry *entry;
5845 entry = ggc_alloc_types_used_by_vars_entry ();
5846 entry->type = type;
5847 entry->var_decl = var_decl;
5848 *slot = entry;
5853 struct rtl_opt_pass pass_leaf_regs =
5856 RTL_PASS,
5857 "*leaf_regs", /* name */
5858 NULL, /* gate */
5859 rest_of_handle_check_leaf_regs, /* execute */
5860 NULL, /* sub */
5861 NULL, /* next */
5862 0, /* static_pass_number */
5863 TV_NONE, /* tv_id */
5864 0, /* properties_required */
5865 0, /* properties_provided */
5866 0, /* properties_destroyed */
5867 0, /* todo_flags_start */
5868 0 /* todo_flags_finish */
5872 static unsigned int
5873 rest_of_handle_thread_prologue_and_epilogue (void)
5875 if (optimize)
5876 cleanup_cfg (CLEANUP_EXPENSIVE);
5878 /* On some machines, the prologue and epilogue code, or parts thereof,
5879 can be represented as RTL. Doing so lets us schedule insns between
5880 it and the rest of the code and also allows delayed branch
5881 scheduling to operate in the epilogue. */
5882 thread_prologue_and_epilogue_insns ();
5884 /* The stack usage info is finalized during prologue expansion. */
5885 if (flag_stack_usage)
5886 output_stack_usage ();
5888 return 0;
5891 struct rtl_opt_pass pass_thread_prologue_and_epilogue =
5894 RTL_PASS,
5895 "pro_and_epilogue", /* name */
5896 NULL, /* gate */
5897 rest_of_handle_thread_prologue_and_epilogue, /* execute */
5898 NULL, /* sub */
5899 NULL, /* next */
5900 0, /* static_pass_number */
5901 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
5902 0, /* properties_required */
5903 0, /* properties_provided */
5904 0, /* properties_destroyed */
5905 TODO_verify_flow, /* todo_flags_start */
5906 TODO_dump_func |
5907 TODO_df_verify |
5908 TODO_df_finish | TODO_verify_rtl_sharing |
5909 TODO_ggc_collect /* todo_flags_finish */
5914 /* This mini-pass fixes fall-out from SSA in asm statements that have
5915 in-out constraints. Say you start with
5917 orig = inout;
5918 asm ("": "+mr" (inout));
5919 use (orig);
5921 which is transformed very early to use explicit output and match operands:
5923 orig = inout;
5924 asm ("": "=mr" (inout) : "0" (inout));
5925 use (orig);
5927 Or, after SSA and copyprop,
5929 asm ("": "=mr" (inout_2) : "0" (inout_1));
5930 use (inout_1);
5932 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
5933 they represent two separate values, so they will get different pseudo
5934 registers during expansion. Then, since the two operands need to match
5935 per the constraints, but use different pseudo registers, reload can
5936 only register a reload for these operands. But reloads can only be
5937 satisfied by hardregs, not by memory, so we need a register for this
5938 reload, just because we are presented with non-matching operands.
5939 So, even though we allow memory for this operand, no memory can be
5940 used for it, just because the two operands don't match. This can
5941 cause reload failures on register-starved targets.
5943 So it's a symptom of reload not being able to use memory for reloads
5944 or, alternatively it's also a symptom of both operands not coming into
5945 reload as matching (in which case the pseudo could go to memory just
5946 fine, as the alternative allows it, and no reload would be necessary).
5947 We fix the latter problem here, by transforming
5949 asm ("": "=mr" (inout_2) : "0" (inout_1));
5951 back to
5953 inout_2 = inout_1;
5954 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
5956 static void
5957 match_asm_constraints_1 (rtx insn, rtx *p_sets, int noutputs)
5959 int i;
5960 bool changed = false;
5961 rtx op = SET_SRC (p_sets[0]);
5962 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
5963 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
5964 bool *output_matched = XALLOCAVEC (bool, noutputs);
5966 memset (output_matched, 0, noutputs * sizeof (bool));
5967 for (i = 0; i < ninputs; i++)
5969 rtx input, output, insns;
5970 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
5971 char *end;
5972 int match, j;
5974 if (*constraint == '%')
5975 constraint++;
5977 match = strtoul (constraint, &end, 10);
5978 if (end == constraint)
5979 continue;
5981 gcc_assert (match < noutputs);
5982 output = SET_DEST (p_sets[match]);
5983 input = RTVEC_ELT (inputs, i);
5984 /* Only do the transformation for pseudos. */
5985 if (! REG_P (output)
5986 || rtx_equal_p (output, input)
5987 || (GET_MODE (input) != VOIDmode
5988 && GET_MODE (input) != GET_MODE (output)))
5989 continue;
5991 /* We can't do anything if the output is also used as input,
5992 as we're going to overwrite it. */
5993 for (j = 0; j < ninputs; j++)
5994 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
5995 break;
5996 if (j != ninputs)
5997 continue;
5999 /* Avoid changing the same input several times. For
6000 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
6001 only change in once (to out1), rather than changing it
6002 first to out1 and afterwards to out2. */
6003 if (i > 0)
6005 for (j = 0; j < noutputs; j++)
6006 if (output_matched[j] && input == SET_DEST (p_sets[j]))
6007 break;
6008 if (j != noutputs)
6009 continue;
6011 output_matched[match] = true;
6013 start_sequence ();
6014 emit_move_insn (output, input);
6015 insns = get_insns ();
6016 end_sequence ();
6017 emit_insn_before (insns, insn);
6019 /* Now replace all mentions of the input with output. We can't
6020 just replace the occurrence in inputs[i], as the register might
6021 also be used in some other input (or even in an address of an
6022 output), which would mean possibly increasing the number of
6023 inputs by one (namely 'output' in addition), which might pose
6024 a too complicated problem for reload to solve. E.g. this situation:
6026 asm ("" : "=r" (output), "=m" (input) : "0" (input))
6028 Here 'input' is used in two occurrences as input (once for the
6029 input operand, once for the address in the second output operand).
6030 If we would replace only the occurrence of the input operand (to
6031 make the matching) we would be left with this:
6033 output = input
6034 asm ("" : "=r" (output), "=m" (input) : "0" (output))
6036 Now we suddenly have two different input values (containing the same
6037 value, but different pseudos) where we formerly had only one.
6038 With more complicated asms this might lead to reload failures
6039 which wouldn't have happen without this pass. So, iterate over
6040 all operands and replace all occurrences of the register used. */
6041 for (j = 0; j < noutputs; j++)
6042 if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
6043 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
6044 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
6045 input, output);
6046 for (j = 0; j < ninputs; j++)
6047 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
6048 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
6049 input, output);
6051 changed = true;
6054 if (changed)
6055 df_insn_rescan (insn);
6058 static unsigned
6059 rest_of_match_asm_constraints (void)
6061 basic_block bb;
6062 rtx insn, pat, *p_sets;
6063 int noutputs;
6065 if (!crtl->has_asm_statement)
6066 return 0;
6068 df_set_flags (DF_DEFER_INSN_RESCAN);
6069 FOR_EACH_BB (bb)
6071 FOR_BB_INSNS (bb, insn)
6073 if (!INSN_P (insn))
6074 continue;
6076 pat = PATTERN (insn);
6077 if (GET_CODE (pat) == PARALLEL)
6078 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
6079 else if (GET_CODE (pat) == SET)
6080 p_sets = &PATTERN (insn), noutputs = 1;
6081 else
6082 continue;
6084 if (GET_CODE (*p_sets) == SET
6085 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
6086 match_asm_constraints_1 (insn, p_sets, noutputs);
6090 return TODO_df_finish;
6093 struct rtl_opt_pass pass_match_asm_constraints =
6096 RTL_PASS,
6097 "asmcons", /* name */
6098 NULL, /* gate */
6099 rest_of_match_asm_constraints, /* execute */
6100 NULL, /* sub */
6101 NULL, /* next */
6102 0, /* static_pass_number */
6103 TV_NONE, /* tv_id */
6104 0, /* properties_required */
6105 0, /* properties_provided */
6106 0, /* properties_destroyed */
6107 0, /* todo_flags_start */
6108 TODO_dump_func /* todo_flags_finish */
6113 #include "gt-function.h"