2014-10-30 Hristian Kirtchev <kirtchev@adacore.com>
[official-gcc.git] / gcc / function.c
blob1ef43c4dea7d4913eb7b9ba0bd7cf916f31cf992
1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file handles the generation of rtl code from tree structure
21 at the level of the function as a whole.
22 It creates the rtl expressions for parameters and auto variables
23 and has full responsibility for allocating stack slots.
25 `expand_function_start' is called at the beginning of a function,
26 before the function body is parsed, and `expand_function_end' is
27 called after parsing the body.
29 Call `assign_stack_local' to allocate a stack slot for a local variable.
30 This is usually done during the RTL generation for the function body,
31 but it can also be done in the reload pass when a pseudo-register does
32 not get a hard register. */
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "tm.h"
38 #include "rtl-error.h"
39 #include "tree.h"
40 #include "stor-layout.h"
41 #include "varasm.h"
42 #include "stringpool.h"
43 #include "flags.h"
44 #include "except.h"
45 #include "hashtab.h"
46 #include "hash-set.h"
47 #include "vec.h"
48 #include "machmode.h"
49 #include "hard-reg-set.h"
50 #include "input.h"
51 #include "function.h"
52 #include "expr.h"
53 #include "optabs.h"
54 #include "libfuncs.h"
55 #include "regs.h"
56 #include "insn-config.h"
57 #include "recog.h"
58 #include "output.h"
59 #include "tm_p.h"
60 #include "langhooks.h"
61 #include "target.h"
62 #include "common/common-target.h"
63 #include "gimple-expr.h"
64 #include "gimplify.h"
65 #include "tree-pass.h"
66 #include "predict.h"
67 #include "dominance.h"
68 #include "cfg.h"
69 #include "cfgrtl.h"
70 #include "cfganal.h"
71 #include "cfgbuild.h"
72 #include "cfgcleanup.h"
73 #include "basic-block.h"
74 #include "df.h"
75 #include "params.h"
76 #include "bb-reorder.h"
77 #include "shrink-wrap.h"
78 #include "toplev.h"
79 #include "rtl-iter.h"
81 /* So we can assign to cfun in this file. */
82 #undef cfun
84 #ifndef STACK_ALIGNMENT_NEEDED
85 #define STACK_ALIGNMENT_NEEDED 1
86 #endif
88 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
90 /* Round a value to the lowest integer less than it that is a multiple of
91 the required alignment. Avoid using division in case the value is
92 negative. Assume the alignment is a power of two. */
93 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
95 /* Similar, but round to the next highest integer that meets the
96 alignment. */
97 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
99 /* Nonzero once virtual register instantiation has been done.
100 assign_stack_local uses frame_pointer_rtx when this is nonzero.
101 calls.c:emit_library_call_value_1 uses it to set up
102 post-instantiation libcalls. */
103 int virtuals_instantiated;
105 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
106 static GTY(()) int funcdef_no;
108 /* These variables hold pointers to functions to create and destroy
109 target specific, per-function data structures. */
110 struct machine_function * (*init_machine_status) (void);
112 /* The currently compiled function. */
113 struct function *cfun = 0;
115 /* These hashes record the prologue and epilogue insns. */
116 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
117 htab_t prologue_insn_hash;
118 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
119 htab_t epilogue_insn_hash;
122 hash_table<used_type_hasher> *types_used_by_vars_hash = NULL;
123 vec<tree, va_gc> *types_used_by_cur_var_decl;
125 /* Forward declarations. */
127 static struct temp_slot *find_temp_slot_from_address (rtx);
128 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
129 static void pad_below (struct args_size *, machine_mode, tree);
130 static void reorder_blocks_1 (rtx_insn *, tree, vec<tree> *);
131 static int all_blocks (tree, tree *);
132 static tree *get_block_vector (tree, int *);
133 extern tree debug_find_var_in_block_tree (tree, tree);
134 /* We always define `record_insns' even if it's not used so that we
135 can always export `prologue_epilogue_contains'. */
136 static void record_insns (rtx_insn *, rtx, htab_t *) ATTRIBUTE_UNUSED;
137 static bool contains (const_rtx, htab_t);
138 static void prepare_function_start (void);
139 static void do_clobber_return_reg (rtx, void *);
140 static void do_use_return_reg (rtx, void *);
142 /* Stack of nested functions. */
143 /* Keep track of the cfun stack. */
145 typedef struct function *function_p;
147 static vec<function_p> function_context_stack;
149 /* Save the current context for compilation of a nested function.
150 This is called from language-specific code. */
152 void
153 push_function_context (void)
155 if (cfun == 0)
156 allocate_struct_function (NULL, false);
158 function_context_stack.safe_push (cfun);
159 set_cfun (NULL);
162 /* Restore the last saved context, at the end of a nested function.
163 This function is called from language-specific code. */
165 void
166 pop_function_context (void)
168 struct function *p = function_context_stack.pop ();
169 set_cfun (p);
170 current_function_decl = p->decl;
172 /* Reset variables that have known state during rtx generation. */
173 virtuals_instantiated = 0;
174 generating_concat_p = 1;
177 /* Clear out all parts of the state in F that can safely be discarded
178 after the function has been parsed, but not compiled, to let
179 garbage collection reclaim the memory. */
181 void
182 free_after_parsing (struct function *f)
184 f->language = 0;
187 /* Clear out all parts of the state in F that can safely be discarded
188 after the function has been compiled, to let garbage collection
189 reclaim the memory. */
191 void
192 free_after_compilation (struct function *f)
194 prologue_insn_hash = NULL;
195 epilogue_insn_hash = NULL;
197 free (crtl->emit.regno_pointer_align);
199 memset (crtl, 0, sizeof (struct rtl_data));
200 f->eh = NULL;
201 f->machine = NULL;
202 f->cfg = NULL;
204 regno_reg_rtx = NULL;
207 /* Return size needed for stack frame based on slots so far allocated.
208 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
209 the caller may have to do that. */
211 HOST_WIDE_INT
212 get_frame_size (void)
214 if (FRAME_GROWS_DOWNWARD)
215 return -frame_offset;
216 else
217 return frame_offset;
220 /* Issue an error message and return TRUE if frame OFFSET overflows in
221 the signed target pointer arithmetics for function FUNC. Otherwise
222 return FALSE. */
224 bool
225 frame_offset_overflow (HOST_WIDE_INT offset, tree func)
227 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
229 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
230 /* Leave room for the fixed part of the frame. */
231 - 64 * UNITS_PER_WORD)
233 error_at (DECL_SOURCE_LOCATION (func),
234 "total size of local objects too large");
235 return TRUE;
238 return FALSE;
241 /* Return stack slot alignment in bits for TYPE and MODE. */
243 static unsigned int
244 get_stack_local_alignment (tree type, machine_mode mode)
246 unsigned int alignment;
248 if (mode == BLKmode)
249 alignment = BIGGEST_ALIGNMENT;
250 else
251 alignment = GET_MODE_ALIGNMENT (mode);
253 /* Allow the frond-end to (possibly) increase the alignment of this
254 stack slot. */
255 if (! type)
256 type = lang_hooks.types.type_for_mode (mode, 0);
258 return STACK_SLOT_ALIGNMENT (type, mode, alignment);
261 /* Determine whether it is possible to fit a stack slot of size SIZE and
262 alignment ALIGNMENT into an area in the stack frame that starts at
263 frame offset START and has a length of LENGTH. If so, store the frame
264 offset to be used for the stack slot in *POFFSET and return true;
265 return false otherwise. This function will extend the frame size when
266 given a start/length pair that lies at the end of the frame. */
268 static bool
269 try_fit_stack_local (HOST_WIDE_INT start, HOST_WIDE_INT length,
270 HOST_WIDE_INT size, unsigned int alignment,
271 HOST_WIDE_INT *poffset)
273 HOST_WIDE_INT this_frame_offset;
274 int frame_off, frame_alignment, frame_phase;
276 /* Calculate how many bytes the start of local variables is off from
277 stack alignment. */
278 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
279 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
280 frame_phase = frame_off ? frame_alignment - frame_off : 0;
282 /* Round the frame offset to the specified alignment. */
284 /* We must be careful here, since FRAME_OFFSET might be negative and
285 division with a negative dividend isn't as well defined as we might
286 like. So we instead assume that ALIGNMENT is a power of two and
287 use logical operations which are unambiguous. */
288 if (FRAME_GROWS_DOWNWARD)
289 this_frame_offset
290 = (FLOOR_ROUND (start + length - size - frame_phase,
291 (unsigned HOST_WIDE_INT) alignment)
292 + frame_phase);
293 else
294 this_frame_offset
295 = (CEIL_ROUND (start - frame_phase,
296 (unsigned HOST_WIDE_INT) alignment)
297 + frame_phase);
299 /* See if it fits. If this space is at the edge of the frame,
300 consider extending the frame to make it fit. Our caller relies on
301 this when allocating a new slot. */
302 if (frame_offset == start && this_frame_offset < frame_offset)
303 frame_offset = this_frame_offset;
304 else if (this_frame_offset < start)
305 return false;
306 else if (start + length == frame_offset
307 && this_frame_offset + size > start + length)
308 frame_offset = this_frame_offset + size;
309 else if (this_frame_offset + size > start + length)
310 return false;
312 *poffset = this_frame_offset;
313 return true;
316 /* Create a new frame_space structure describing free space in the stack
317 frame beginning at START and ending at END, and chain it into the
318 function's frame_space_list. */
320 static void
321 add_frame_space (HOST_WIDE_INT start, HOST_WIDE_INT end)
323 struct frame_space *space = ggc_alloc<frame_space> ();
324 space->next = crtl->frame_space_list;
325 crtl->frame_space_list = space;
326 space->start = start;
327 space->length = end - start;
330 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
331 with machine mode MODE.
333 ALIGN controls the amount of alignment for the address of the slot:
334 0 means according to MODE,
335 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
336 -2 means use BITS_PER_UNIT,
337 positive specifies alignment boundary in bits.
339 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
340 alignment and ASLK_RECORD_PAD bit set if we should remember
341 extra space we allocated for alignment purposes. When we are
342 called from assign_stack_temp_for_type, it is not set so we don't
343 track the same stack slot in two independent lists.
345 We do not round to stack_boundary here. */
348 assign_stack_local_1 (machine_mode mode, HOST_WIDE_INT size,
349 int align, int kind)
351 rtx x, addr;
352 int bigend_correction = 0;
353 HOST_WIDE_INT slot_offset = 0, old_frame_offset;
354 unsigned int alignment, alignment_in_bits;
356 if (align == 0)
358 alignment = get_stack_local_alignment (NULL, mode);
359 alignment /= BITS_PER_UNIT;
361 else if (align == -1)
363 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
364 size = CEIL_ROUND (size, alignment);
366 else if (align == -2)
367 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
368 else
369 alignment = align / BITS_PER_UNIT;
371 alignment_in_bits = alignment * BITS_PER_UNIT;
373 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
374 if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
376 alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
377 alignment = alignment_in_bits / BITS_PER_UNIT;
380 if (SUPPORTS_STACK_ALIGNMENT)
382 if (crtl->stack_alignment_estimated < alignment_in_bits)
384 if (!crtl->stack_realign_processed)
385 crtl->stack_alignment_estimated = alignment_in_bits;
386 else
388 /* If stack is realigned and stack alignment value
389 hasn't been finalized, it is OK not to increase
390 stack_alignment_estimated. The bigger alignment
391 requirement is recorded in stack_alignment_needed
392 below. */
393 gcc_assert (!crtl->stack_realign_finalized);
394 if (!crtl->stack_realign_needed)
396 /* It is OK to reduce the alignment as long as the
397 requested size is 0 or the estimated stack
398 alignment >= mode alignment. */
399 gcc_assert ((kind & ASLK_REDUCE_ALIGN)
400 || size == 0
401 || (crtl->stack_alignment_estimated
402 >= GET_MODE_ALIGNMENT (mode)));
403 alignment_in_bits = crtl->stack_alignment_estimated;
404 alignment = alignment_in_bits / BITS_PER_UNIT;
410 if (crtl->stack_alignment_needed < alignment_in_bits)
411 crtl->stack_alignment_needed = alignment_in_bits;
412 if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
413 crtl->max_used_stack_slot_alignment = alignment_in_bits;
415 if (mode != BLKmode || size != 0)
417 if (kind & ASLK_RECORD_PAD)
419 struct frame_space **psp;
421 for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
423 struct frame_space *space = *psp;
424 if (!try_fit_stack_local (space->start, space->length, size,
425 alignment, &slot_offset))
426 continue;
427 *psp = space->next;
428 if (slot_offset > space->start)
429 add_frame_space (space->start, slot_offset);
430 if (slot_offset + size < space->start + space->length)
431 add_frame_space (slot_offset + size,
432 space->start + space->length);
433 goto found_space;
437 else if (!STACK_ALIGNMENT_NEEDED)
439 slot_offset = frame_offset;
440 goto found_space;
443 old_frame_offset = frame_offset;
445 if (FRAME_GROWS_DOWNWARD)
447 frame_offset -= size;
448 try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
450 if (kind & ASLK_RECORD_PAD)
452 if (slot_offset > frame_offset)
453 add_frame_space (frame_offset, slot_offset);
454 if (slot_offset + size < old_frame_offset)
455 add_frame_space (slot_offset + size, old_frame_offset);
458 else
460 frame_offset += size;
461 try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
463 if (kind & ASLK_RECORD_PAD)
465 if (slot_offset > old_frame_offset)
466 add_frame_space (old_frame_offset, slot_offset);
467 if (slot_offset + size < frame_offset)
468 add_frame_space (slot_offset + size, frame_offset);
472 found_space:
473 /* On a big-endian machine, if we are allocating more space than we will use,
474 use the least significant bytes of those that are allocated. */
475 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
476 bigend_correction = size - GET_MODE_SIZE (mode);
478 /* If we have already instantiated virtual registers, return the actual
479 address relative to the frame pointer. */
480 if (virtuals_instantiated)
481 addr = plus_constant (Pmode, frame_pointer_rtx,
482 trunc_int_for_mode
483 (slot_offset + bigend_correction
484 + STARTING_FRAME_OFFSET, Pmode));
485 else
486 addr = plus_constant (Pmode, virtual_stack_vars_rtx,
487 trunc_int_for_mode
488 (slot_offset + bigend_correction,
489 Pmode));
491 x = gen_rtx_MEM (mode, addr);
492 set_mem_align (x, alignment_in_bits);
493 MEM_NOTRAP_P (x) = 1;
495 stack_slot_list
496 = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
498 if (frame_offset_overflow (frame_offset, current_function_decl))
499 frame_offset = 0;
501 return x;
504 /* Wrap up assign_stack_local_1 with last parameter as false. */
507 assign_stack_local (machine_mode mode, HOST_WIDE_INT size, int align)
509 return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD);
512 /* In order to evaluate some expressions, such as function calls returning
513 structures in memory, we need to temporarily allocate stack locations.
514 We record each allocated temporary in the following structure.
516 Associated with each temporary slot is a nesting level. When we pop up
517 one level, all temporaries associated with the previous level are freed.
518 Normally, all temporaries are freed after the execution of the statement
519 in which they were created. However, if we are inside a ({...}) grouping,
520 the result may be in a temporary and hence must be preserved. If the
521 result could be in a temporary, we preserve it if we can determine which
522 one it is in. If we cannot determine which temporary may contain the
523 result, all temporaries are preserved. A temporary is preserved by
524 pretending it was allocated at the previous nesting level. */
526 struct GTY(()) temp_slot {
527 /* Points to next temporary slot. */
528 struct temp_slot *next;
529 /* Points to previous temporary slot. */
530 struct temp_slot *prev;
531 /* The rtx to used to reference the slot. */
532 rtx slot;
533 /* The size, in units, of the slot. */
534 HOST_WIDE_INT size;
535 /* The type of the object in the slot, or zero if it doesn't correspond
536 to a type. We use this to determine whether a slot can be reused.
537 It can be reused if objects of the type of the new slot will always
538 conflict with objects of the type of the old slot. */
539 tree type;
540 /* The alignment (in bits) of the slot. */
541 unsigned int align;
542 /* Nonzero if this temporary is currently in use. */
543 char in_use;
544 /* Nesting level at which this slot is being used. */
545 int level;
546 /* The offset of the slot from the frame_pointer, including extra space
547 for alignment. This info is for combine_temp_slots. */
548 HOST_WIDE_INT base_offset;
549 /* The size of the slot, including extra space for alignment. This
550 info is for combine_temp_slots. */
551 HOST_WIDE_INT full_size;
554 /* Entry for the below hash table. */
555 struct GTY((for_user)) temp_slot_address_entry {
556 hashval_t hash;
557 rtx address;
558 struct temp_slot *temp_slot;
561 struct temp_address_hasher : ggc_hasher<temp_slot_address_entry *>
563 static hashval_t hash (temp_slot_address_entry *);
564 static bool equal (temp_slot_address_entry *, temp_slot_address_entry *);
567 /* A table of addresses that represent a stack slot. The table is a mapping
568 from address RTXen to a temp slot. */
569 static GTY(()) hash_table<temp_address_hasher> *temp_slot_address_table;
570 static size_t n_temp_slots_in_use;
572 /* Removes temporary slot TEMP from LIST. */
574 static void
575 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
577 if (temp->next)
578 temp->next->prev = temp->prev;
579 if (temp->prev)
580 temp->prev->next = temp->next;
581 else
582 *list = temp->next;
584 temp->prev = temp->next = NULL;
587 /* Inserts temporary slot TEMP to LIST. */
589 static void
590 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
592 temp->next = *list;
593 if (*list)
594 (*list)->prev = temp;
595 temp->prev = NULL;
596 *list = temp;
599 /* Returns the list of used temp slots at LEVEL. */
601 static struct temp_slot **
602 temp_slots_at_level (int level)
604 if (level >= (int) vec_safe_length (used_temp_slots))
605 vec_safe_grow_cleared (used_temp_slots, level + 1);
607 return &(*used_temp_slots)[level];
610 /* Returns the maximal temporary slot level. */
612 static int
613 max_slot_level (void)
615 if (!used_temp_slots)
616 return -1;
618 return used_temp_slots->length () - 1;
621 /* Moves temporary slot TEMP to LEVEL. */
623 static void
624 move_slot_to_level (struct temp_slot *temp, int level)
626 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
627 insert_slot_to_list (temp, temp_slots_at_level (level));
628 temp->level = level;
631 /* Make temporary slot TEMP available. */
633 static void
634 make_slot_available (struct temp_slot *temp)
636 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
637 insert_slot_to_list (temp, &avail_temp_slots);
638 temp->in_use = 0;
639 temp->level = -1;
640 n_temp_slots_in_use--;
643 /* Compute the hash value for an address -> temp slot mapping.
644 The value is cached on the mapping entry. */
645 static hashval_t
646 temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
648 int do_not_record = 0;
649 return hash_rtx (t->address, GET_MODE (t->address),
650 &do_not_record, NULL, false);
653 /* Return the hash value for an address -> temp slot mapping. */
654 hashval_t
655 temp_address_hasher::hash (temp_slot_address_entry *t)
657 return t->hash;
660 /* Compare two address -> temp slot mapping entries. */
661 bool
662 temp_address_hasher::equal (temp_slot_address_entry *t1,
663 temp_slot_address_entry *t2)
665 return exp_equiv_p (t1->address, t2->address, 0, true);
668 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
669 static void
670 insert_temp_slot_address (rtx address, struct temp_slot *temp_slot)
672 struct temp_slot_address_entry *t = ggc_alloc<temp_slot_address_entry> ();
673 t->address = address;
674 t->temp_slot = temp_slot;
675 t->hash = temp_slot_address_compute_hash (t);
676 *temp_slot_address_table->find_slot_with_hash (t, t->hash, INSERT) = t;
679 /* Remove an address -> temp slot mapping entry if the temp slot is
680 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
682 remove_unused_temp_slot_addresses_1 (temp_slot_address_entry **slot, void *)
684 const struct temp_slot_address_entry *t = *slot;
685 if (! t->temp_slot->in_use)
686 temp_slot_address_table->clear_slot (slot);
687 return 1;
690 /* Remove all mappings of addresses to unused temp slots. */
691 static void
692 remove_unused_temp_slot_addresses (void)
694 /* Use quicker clearing if there aren't any active temp slots. */
695 if (n_temp_slots_in_use)
696 temp_slot_address_table->traverse
697 <void *, remove_unused_temp_slot_addresses_1> (NULL);
698 else
699 temp_slot_address_table->empty ();
702 /* Find the temp slot corresponding to the object at address X. */
704 static struct temp_slot *
705 find_temp_slot_from_address (rtx x)
707 struct temp_slot *p;
708 struct temp_slot_address_entry tmp, *t;
710 /* First try the easy way:
711 See if X exists in the address -> temp slot mapping. */
712 tmp.address = x;
713 tmp.temp_slot = NULL;
714 tmp.hash = temp_slot_address_compute_hash (&tmp);
715 t = temp_slot_address_table->find_with_hash (&tmp, tmp.hash);
716 if (t)
717 return t->temp_slot;
719 /* If we have a sum involving a register, see if it points to a temp
720 slot. */
721 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
722 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
723 return p;
724 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
725 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
726 return p;
728 /* Last resort: Address is a virtual stack var address. */
729 if (GET_CODE (x) == PLUS
730 && XEXP (x, 0) == virtual_stack_vars_rtx
731 && CONST_INT_P (XEXP (x, 1)))
733 int i;
734 for (i = max_slot_level (); i >= 0; i--)
735 for (p = *temp_slots_at_level (i); p; p = p->next)
737 if (INTVAL (XEXP (x, 1)) >= p->base_offset
738 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size)
739 return p;
743 return NULL;
746 /* Allocate a temporary stack slot and record it for possible later
747 reuse.
749 MODE is the machine mode to be given to the returned rtx.
751 SIZE is the size in units of the space required. We do no rounding here
752 since assign_stack_local will do any required rounding.
754 TYPE is the type that will be used for the stack slot. */
757 assign_stack_temp_for_type (machine_mode mode, HOST_WIDE_INT size,
758 tree type)
760 unsigned int align;
761 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
762 rtx slot;
764 /* If SIZE is -1 it means that somebody tried to allocate a temporary
765 of a variable size. */
766 gcc_assert (size != -1);
768 align = get_stack_local_alignment (type, mode);
770 /* Try to find an available, already-allocated temporary of the proper
771 mode which meets the size and alignment requirements. Choose the
772 smallest one with the closest alignment.
774 If assign_stack_temp is called outside of the tree->rtl expansion,
775 we cannot reuse the stack slots (that may still refer to
776 VIRTUAL_STACK_VARS_REGNUM). */
777 if (!virtuals_instantiated)
779 for (p = avail_temp_slots; p; p = p->next)
781 if (p->align >= align && p->size >= size
782 && GET_MODE (p->slot) == mode
783 && objects_must_conflict_p (p->type, type)
784 && (best_p == 0 || best_p->size > p->size
785 || (best_p->size == p->size && best_p->align > p->align)))
787 if (p->align == align && p->size == size)
789 selected = p;
790 cut_slot_from_list (selected, &avail_temp_slots);
791 best_p = 0;
792 break;
794 best_p = p;
799 /* Make our best, if any, the one to use. */
800 if (best_p)
802 selected = best_p;
803 cut_slot_from_list (selected, &avail_temp_slots);
805 /* If there are enough aligned bytes left over, make them into a new
806 temp_slot so that the extra bytes don't get wasted. Do this only
807 for BLKmode slots, so that we can be sure of the alignment. */
808 if (GET_MODE (best_p->slot) == BLKmode)
810 int alignment = best_p->align / BITS_PER_UNIT;
811 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
813 if (best_p->size - rounded_size >= alignment)
815 p = ggc_alloc<temp_slot> ();
816 p->in_use = 0;
817 p->size = best_p->size - rounded_size;
818 p->base_offset = best_p->base_offset + rounded_size;
819 p->full_size = best_p->full_size - rounded_size;
820 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
821 p->align = best_p->align;
822 p->type = best_p->type;
823 insert_slot_to_list (p, &avail_temp_slots);
825 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
826 stack_slot_list);
828 best_p->size = rounded_size;
829 best_p->full_size = rounded_size;
834 /* If we still didn't find one, make a new temporary. */
835 if (selected == 0)
837 HOST_WIDE_INT frame_offset_old = frame_offset;
839 p = ggc_alloc<temp_slot> ();
841 /* We are passing an explicit alignment request to assign_stack_local.
842 One side effect of that is assign_stack_local will not round SIZE
843 to ensure the frame offset remains suitably aligned.
845 So for requests which depended on the rounding of SIZE, we go ahead
846 and round it now. We also make sure ALIGNMENT is at least
847 BIGGEST_ALIGNMENT. */
848 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
849 p->slot = assign_stack_local_1 (mode,
850 (mode == BLKmode
851 ? CEIL_ROUND (size,
852 (int) align
853 / BITS_PER_UNIT)
854 : size),
855 align, 0);
857 p->align = align;
859 /* The following slot size computation is necessary because we don't
860 know the actual size of the temporary slot until assign_stack_local
861 has performed all the frame alignment and size rounding for the
862 requested temporary. Note that extra space added for alignment
863 can be either above or below this stack slot depending on which
864 way the frame grows. We include the extra space if and only if it
865 is above this slot. */
866 if (FRAME_GROWS_DOWNWARD)
867 p->size = frame_offset_old - frame_offset;
868 else
869 p->size = size;
871 /* Now define the fields used by combine_temp_slots. */
872 if (FRAME_GROWS_DOWNWARD)
874 p->base_offset = frame_offset;
875 p->full_size = frame_offset_old - frame_offset;
877 else
879 p->base_offset = frame_offset_old;
880 p->full_size = frame_offset - frame_offset_old;
883 selected = p;
886 p = selected;
887 p->in_use = 1;
888 p->type = type;
889 p->level = temp_slot_level;
890 n_temp_slots_in_use++;
892 pp = temp_slots_at_level (p->level);
893 insert_slot_to_list (p, pp);
894 insert_temp_slot_address (XEXP (p->slot, 0), p);
896 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
897 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
898 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
900 /* If we know the alias set for the memory that will be used, use
901 it. If there's no TYPE, then we don't know anything about the
902 alias set for the memory. */
903 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
904 set_mem_align (slot, align);
906 /* If a type is specified, set the relevant flags. */
907 if (type != 0)
908 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
909 MEM_NOTRAP_P (slot) = 1;
911 return slot;
914 /* Allocate a temporary stack slot and record it for possible later
915 reuse. First two arguments are same as in preceding function. */
918 assign_stack_temp (machine_mode mode, HOST_WIDE_INT size)
920 return assign_stack_temp_for_type (mode, size, NULL_TREE);
923 /* Assign a temporary.
924 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
925 and so that should be used in error messages. In either case, we
926 allocate of the given type.
927 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
928 it is 0 if a register is OK.
929 DONT_PROMOTE is 1 if we should not promote values in register
930 to wider modes. */
933 assign_temp (tree type_or_decl, int memory_required,
934 int dont_promote ATTRIBUTE_UNUSED)
936 tree type, decl;
937 machine_mode mode;
938 #ifdef PROMOTE_MODE
939 int unsignedp;
940 #endif
942 if (DECL_P (type_or_decl))
943 decl = type_or_decl, type = TREE_TYPE (decl);
944 else
945 decl = NULL, type = type_or_decl;
947 mode = TYPE_MODE (type);
948 #ifdef PROMOTE_MODE
949 unsignedp = TYPE_UNSIGNED (type);
950 #endif
952 if (mode == BLKmode || memory_required)
954 HOST_WIDE_INT size = int_size_in_bytes (type);
955 rtx tmp;
957 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
958 problems with allocating the stack space. */
959 if (size == 0)
960 size = 1;
962 /* Unfortunately, we don't yet know how to allocate variable-sized
963 temporaries. However, sometimes we can find a fixed upper limit on
964 the size, so try that instead. */
965 else if (size == -1)
966 size = max_int_size_in_bytes (type);
968 /* The size of the temporary may be too large to fit into an integer. */
969 /* ??? Not sure this should happen except for user silliness, so limit
970 this to things that aren't compiler-generated temporaries. The
971 rest of the time we'll die in assign_stack_temp_for_type. */
972 if (decl && size == -1
973 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
975 error ("size of variable %q+D is too large", decl);
976 size = 1;
979 tmp = assign_stack_temp_for_type (mode, size, type);
980 return tmp;
983 #ifdef PROMOTE_MODE
984 if (! dont_promote)
985 mode = promote_mode (type, mode, &unsignedp);
986 #endif
988 return gen_reg_rtx (mode);
991 /* Combine temporary stack slots which are adjacent on the stack.
993 This allows for better use of already allocated stack space. This is only
994 done for BLKmode slots because we can be sure that we won't have alignment
995 problems in this case. */
997 static void
998 combine_temp_slots (void)
1000 struct temp_slot *p, *q, *next, *next_q;
1001 int num_slots;
1003 /* We can't combine slots, because the information about which slot
1004 is in which alias set will be lost. */
1005 if (flag_strict_aliasing)
1006 return;
1008 /* If there are a lot of temp slots, don't do anything unless
1009 high levels of optimization. */
1010 if (! flag_expensive_optimizations)
1011 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1012 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1013 return;
1015 for (p = avail_temp_slots; p; p = next)
1017 int delete_p = 0;
1019 next = p->next;
1021 if (GET_MODE (p->slot) != BLKmode)
1022 continue;
1024 for (q = p->next; q; q = next_q)
1026 int delete_q = 0;
1028 next_q = q->next;
1030 if (GET_MODE (q->slot) != BLKmode)
1031 continue;
1033 if (p->base_offset + p->full_size == q->base_offset)
1035 /* Q comes after P; combine Q into P. */
1036 p->size += q->size;
1037 p->full_size += q->full_size;
1038 delete_q = 1;
1040 else if (q->base_offset + q->full_size == p->base_offset)
1042 /* P comes after Q; combine P into Q. */
1043 q->size += p->size;
1044 q->full_size += p->full_size;
1045 delete_p = 1;
1046 break;
1048 if (delete_q)
1049 cut_slot_from_list (q, &avail_temp_slots);
1052 /* Either delete P or advance past it. */
1053 if (delete_p)
1054 cut_slot_from_list (p, &avail_temp_slots);
1058 /* Indicate that NEW_RTX is an alternate way of referring to the temp
1059 slot that previously was known by OLD_RTX. */
1061 void
1062 update_temp_slot_address (rtx old_rtx, rtx new_rtx)
1064 struct temp_slot *p;
1066 if (rtx_equal_p (old_rtx, new_rtx))
1067 return;
1069 p = find_temp_slot_from_address (old_rtx);
1071 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1072 NEW_RTX is a register, see if one operand of the PLUS is a
1073 temporary location. If so, NEW_RTX points into it. Otherwise,
1074 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1075 in common between them. If so, try a recursive call on those
1076 values. */
1077 if (p == 0)
1079 if (GET_CODE (old_rtx) != PLUS)
1080 return;
1082 if (REG_P (new_rtx))
1084 update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
1085 update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
1086 return;
1088 else if (GET_CODE (new_rtx) != PLUS)
1089 return;
1091 if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
1092 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
1093 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
1094 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
1095 else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
1096 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
1097 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
1098 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
1100 return;
1103 /* Otherwise add an alias for the temp's address. */
1104 insert_temp_slot_address (new_rtx, p);
1107 /* If X could be a reference to a temporary slot, mark that slot as
1108 belonging to the to one level higher than the current level. If X
1109 matched one of our slots, just mark that one. Otherwise, we can't
1110 easily predict which it is, so upgrade all of them.
1112 This is called when an ({...}) construct occurs and a statement
1113 returns a value in memory. */
1115 void
1116 preserve_temp_slots (rtx x)
1118 struct temp_slot *p = 0, *next;
1120 if (x == 0)
1121 return;
1123 /* If X is a register that is being used as a pointer, see if we have
1124 a temporary slot we know it points to. */
1125 if (REG_P (x) && REG_POINTER (x))
1126 p = find_temp_slot_from_address (x);
1128 /* If X is not in memory or is at a constant address, it cannot be in
1129 a temporary slot. */
1130 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1131 return;
1133 /* First see if we can find a match. */
1134 if (p == 0)
1135 p = find_temp_slot_from_address (XEXP (x, 0));
1137 if (p != 0)
1139 if (p->level == temp_slot_level)
1140 move_slot_to_level (p, temp_slot_level - 1);
1141 return;
1144 /* Otherwise, preserve all non-kept slots at this level. */
1145 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1147 next = p->next;
1148 move_slot_to_level (p, temp_slot_level - 1);
1152 /* Free all temporaries used so far. This is normally called at the
1153 end of generating code for a statement. */
1155 void
1156 free_temp_slots (void)
1158 struct temp_slot *p, *next;
1159 bool some_available = false;
1161 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1163 next = p->next;
1164 make_slot_available (p);
1165 some_available = true;
1168 if (some_available)
1170 remove_unused_temp_slot_addresses ();
1171 combine_temp_slots ();
1175 /* Push deeper into the nesting level for stack temporaries. */
1177 void
1178 push_temp_slots (void)
1180 temp_slot_level++;
1183 /* Pop a temporary nesting level. All slots in use in the current level
1184 are freed. */
1186 void
1187 pop_temp_slots (void)
1189 free_temp_slots ();
1190 temp_slot_level--;
1193 /* Initialize temporary slots. */
1195 void
1196 init_temp_slots (void)
1198 /* We have not allocated any temporaries yet. */
1199 avail_temp_slots = 0;
1200 vec_alloc (used_temp_slots, 0);
1201 temp_slot_level = 0;
1202 n_temp_slots_in_use = 0;
1204 /* Set up the table to map addresses to temp slots. */
1205 if (! temp_slot_address_table)
1206 temp_slot_address_table = hash_table<temp_address_hasher>::create_ggc (32);
1207 else
1208 temp_slot_address_table->empty ();
1211 /* Functions and data structures to keep track of the values hard regs
1212 had at the start of the function. */
1214 /* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
1215 and has_hard_reg_initial_val.. */
1216 typedef struct GTY(()) initial_value_pair {
1217 rtx hard_reg;
1218 rtx pseudo;
1219 } initial_value_pair;
1220 /* ??? This could be a VEC but there is currently no way to define an
1221 opaque VEC type. This could be worked around by defining struct
1222 initial_value_pair in function.h. */
1223 typedef struct GTY(()) initial_value_struct {
1224 int num_entries;
1225 int max_entries;
1226 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
1227 } initial_value_struct;
1229 /* If a pseudo represents an initial hard reg (or expression), return
1230 it, else return NULL_RTX. */
1233 get_hard_reg_initial_reg (rtx reg)
1235 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1236 int i;
1238 if (ivs == 0)
1239 return NULL_RTX;
1241 for (i = 0; i < ivs->num_entries; i++)
1242 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
1243 return ivs->entries[i].hard_reg;
1245 return NULL_RTX;
1248 /* Make sure that there's a pseudo register of mode MODE that stores the
1249 initial value of hard register REGNO. Return an rtx for such a pseudo. */
1252 get_hard_reg_initial_val (machine_mode mode, unsigned int regno)
1254 struct initial_value_struct *ivs;
1255 rtx rv;
1257 rv = has_hard_reg_initial_val (mode, regno);
1258 if (rv)
1259 return rv;
1261 ivs = crtl->hard_reg_initial_vals;
1262 if (ivs == 0)
1264 ivs = ggc_alloc<initial_value_struct> ();
1265 ivs->num_entries = 0;
1266 ivs->max_entries = 5;
1267 ivs->entries = ggc_vec_alloc<initial_value_pair> (5);
1268 crtl->hard_reg_initial_vals = ivs;
1271 if (ivs->num_entries >= ivs->max_entries)
1273 ivs->max_entries += 5;
1274 ivs->entries = GGC_RESIZEVEC (initial_value_pair, ivs->entries,
1275 ivs->max_entries);
1278 ivs->entries[ivs->num_entries].hard_reg = gen_rtx_REG (mode, regno);
1279 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (mode);
1281 return ivs->entries[ivs->num_entries++].pseudo;
1284 /* See if get_hard_reg_initial_val has been used to create a pseudo
1285 for the initial value of hard register REGNO in mode MODE. Return
1286 the associated pseudo if so, otherwise return NULL. */
1289 has_hard_reg_initial_val (machine_mode mode, unsigned int regno)
1291 struct initial_value_struct *ivs;
1292 int i;
1294 ivs = crtl->hard_reg_initial_vals;
1295 if (ivs != 0)
1296 for (i = 0; i < ivs->num_entries; i++)
1297 if (GET_MODE (ivs->entries[i].hard_reg) == mode
1298 && REGNO (ivs->entries[i].hard_reg) == regno)
1299 return ivs->entries[i].pseudo;
1301 return NULL_RTX;
1304 unsigned int
1305 emit_initial_value_sets (void)
1307 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1308 int i;
1309 rtx_insn *seq;
1311 if (ivs == 0)
1312 return 0;
1314 start_sequence ();
1315 for (i = 0; i < ivs->num_entries; i++)
1316 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
1317 seq = get_insns ();
1318 end_sequence ();
1320 emit_insn_at_entry (seq);
1321 return 0;
1324 /* Return the hardreg-pseudoreg initial values pair entry I and
1325 TRUE if I is a valid entry, or FALSE if I is not a valid entry. */
1326 bool
1327 initial_value_entry (int i, rtx *hreg, rtx *preg)
1329 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1330 if (!ivs || i >= ivs->num_entries)
1331 return false;
1333 *hreg = ivs->entries[i].hard_reg;
1334 *preg = ivs->entries[i].pseudo;
1335 return true;
1338 /* These routines are responsible for converting virtual register references
1339 to the actual hard register references once RTL generation is complete.
1341 The following four variables are used for communication between the
1342 routines. They contain the offsets of the virtual registers from their
1343 respective hard registers. */
1345 static int in_arg_offset;
1346 static int var_offset;
1347 static int dynamic_offset;
1348 static int out_arg_offset;
1349 static int cfa_offset;
1351 /* In most machines, the stack pointer register is equivalent to the bottom
1352 of the stack. */
1354 #ifndef STACK_POINTER_OFFSET
1355 #define STACK_POINTER_OFFSET 0
1356 #endif
1358 #if defined (REG_PARM_STACK_SPACE) && !defined (INCOMING_REG_PARM_STACK_SPACE)
1359 #define INCOMING_REG_PARM_STACK_SPACE REG_PARM_STACK_SPACE
1360 #endif
1362 /* If not defined, pick an appropriate default for the offset of dynamically
1363 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1364 INCOMING_REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1366 #ifndef STACK_DYNAMIC_OFFSET
1368 /* The bottom of the stack points to the actual arguments. If
1369 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1370 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1371 stack space for register parameters is not pushed by the caller, but
1372 rather part of the fixed stack areas and hence not included in
1373 `crtl->outgoing_args_size'. Nevertheless, we must allow
1374 for it when allocating stack dynamic objects. */
1376 #ifdef INCOMING_REG_PARM_STACK_SPACE
1377 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1378 ((ACCUMULATE_OUTGOING_ARGS \
1379 ? (crtl->outgoing_args_size \
1380 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1381 : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \
1382 : 0) + (STACK_POINTER_OFFSET))
1383 #else
1384 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1385 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
1386 + (STACK_POINTER_OFFSET))
1387 #endif
1388 #endif
1391 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1392 is a virtual register, return the equivalent hard register and set the
1393 offset indirectly through the pointer. Otherwise, return 0. */
1395 static rtx
1396 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1398 rtx new_rtx;
1399 HOST_WIDE_INT offset;
1401 if (x == virtual_incoming_args_rtx)
1403 if (stack_realign_drap)
1405 /* Replace virtual_incoming_args_rtx with internal arg
1406 pointer if DRAP is used to realign stack. */
1407 new_rtx = crtl->args.internal_arg_pointer;
1408 offset = 0;
1410 else
1411 new_rtx = arg_pointer_rtx, offset = in_arg_offset;
1413 else if (x == virtual_stack_vars_rtx)
1414 new_rtx = frame_pointer_rtx, offset = var_offset;
1415 else if (x == virtual_stack_dynamic_rtx)
1416 new_rtx = stack_pointer_rtx, offset = dynamic_offset;
1417 else if (x == virtual_outgoing_args_rtx)
1418 new_rtx = stack_pointer_rtx, offset = out_arg_offset;
1419 else if (x == virtual_cfa_rtx)
1421 #ifdef FRAME_POINTER_CFA_OFFSET
1422 new_rtx = frame_pointer_rtx;
1423 #else
1424 new_rtx = arg_pointer_rtx;
1425 #endif
1426 offset = cfa_offset;
1428 else if (x == virtual_preferred_stack_boundary_rtx)
1430 new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT);
1431 offset = 0;
1433 else
1434 return NULL_RTX;
1436 *poffset = offset;
1437 return new_rtx;
1440 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1441 registers present inside of *LOC. The expression is simplified,
1442 as much as possible, but is not to be considered "valid" in any sense
1443 implied by the target. Return true if any change is made. */
1445 static bool
1446 instantiate_virtual_regs_in_rtx (rtx *loc)
1448 if (!*loc)
1449 return false;
1450 bool changed = false;
1451 subrtx_ptr_iterator::array_type array;
1452 FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
1454 rtx *loc = *iter;
1455 if (rtx x = *loc)
1457 rtx new_rtx;
1458 HOST_WIDE_INT offset;
1459 switch (GET_CODE (x))
1461 case REG:
1462 new_rtx = instantiate_new_reg (x, &offset);
1463 if (new_rtx)
1465 *loc = plus_constant (GET_MODE (x), new_rtx, offset);
1466 changed = true;
1468 iter.skip_subrtxes ();
1469 break;
1471 case PLUS:
1472 new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
1473 if (new_rtx)
1475 XEXP (x, 0) = new_rtx;
1476 *loc = plus_constant (GET_MODE (x), x, offset, true);
1477 changed = true;
1478 iter.skip_subrtxes ();
1479 break;
1482 /* FIXME -- from old code */
1483 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1484 we can commute the PLUS and SUBREG because pointers into the
1485 frame are well-behaved. */
1486 break;
1488 default:
1489 break;
1493 return changed;
1496 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1497 matches the predicate for insn CODE operand OPERAND. */
1499 static int
1500 safe_insn_predicate (int code, int operand, rtx x)
1502 return code < 0 || insn_operand_matches ((enum insn_code) code, operand, x);
1505 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1506 registers present inside of insn. The result will be a valid insn. */
1508 static void
1509 instantiate_virtual_regs_in_insn (rtx_insn *insn)
1511 HOST_WIDE_INT offset;
1512 int insn_code, i;
1513 bool any_change = false;
1514 rtx set, new_rtx, x;
1515 rtx_insn *seq;
1517 /* There are some special cases to be handled first. */
1518 set = single_set (insn);
1519 if (set)
1521 /* We're allowed to assign to a virtual register. This is interpreted
1522 to mean that the underlying register gets assigned the inverse
1523 transformation. This is used, for example, in the handling of
1524 non-local gotos. */
1525 new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
1526 if (new_rtx)
1528 start_sequence ();
1530 instantiate_virtual_regs_in_rtx (&SET_SRC (set));
1531 x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
1532 gen_int_mode (-offset, GET_MODE (new_rtx)));
1533 x = force_operand (x, new_rtx);
1534 if (x != new_rtx)
1535 emit_move_insn (new_rtx, x);
1537 seq = get_insns ();
1538 end_sequence ();
1540 emit_insn_before (seq, insn);
1541 delete_insn (insn);
1542 return;
1545 /* Handle a straight copy from a virtual register by generating a
1546 new add insn. The difference between this and falling through
1547 to the generic case is avoiding a new pseudo and eliminating a
1548 move insn in the initial rtl stream. */
1549 new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
1550 if (new_rtx && offset != 0
1551 && REG_P (SET_DEST (set))
1552 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1554 start_sequence ();
1556 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS, new_rtx,
1557 gen_int_mode (offset,
1558 GET_MODE (SET_DEST (set))),
1559 SET_DEST (set), 1, OPTAB_LIB_WIDEN);
1560 if (x != SET_DEST (set))
1561 emit_move_insn (SET_DEST (set), x);
1563 seq = get_insns ();
1564 end_sequence ();
1566 emit_insn_before (seq, insn);
1567 delete_insn (insn);
1568 return;
1571 extract_insn (insn);
1572 insn_code = INSN_CODE (insn);
1574 /* Handle a plus involving a virtual register by determining if the
1575 operands remain valid if they're modified in place. */
1576 if (GET_CODE (SET_SRC (set)) == PLUS
1577 && recog_data.n_operands >= 3
1578 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1579 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1580 && CONST_INT_P (recog_data.operand[2])
1581 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
1583 offset += INTVAL (recog_data.operand[2]);
1585 /* If the sum is zero, then replace with a plain move. */
1586 if (offset == 0
1587 && REG_P (SET_DEST (set))
1588 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1590 start_sequence ();
1591 emit_move_insn (SET_DEST (set), new_rtx);
1592 seq = get_insns ();
1593 end_sequence ();
1595 emit_insn_before (seq, insn);
1596 delete_insn (insn);
1597 return;
1600 x = gen_int_mode (offset, recog_data.operand_mode[2]);
1602 /* Using validate_change and apply_change_group here leaves
1603 recog_data in an invalid state. Since we know exactly what
1604 we want to check, do those two by hand. */
1605 if (safe_insn_predicate (insn_code, 1, new_rtx)
1606 && safe_insn_predicate (insn_code, 2, x))
1608 *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
1609 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1610 any_change = true;
1612 /* Fall through into the regular operand fixup loop in
1613 order to take care of operands other than 1 and 2. */
1617 else
1619 extract_insn (insn);
1620 insn_code = INSN_CODE (insn);
1623 /* In the general case, we expect virtual registers to appear only in
1624 operands, and then only as either bare registers or inside memories. */
1625 for (i = 0; i < recog_data.n_operands; ++i)
1627 x = recog_data.operand[i];
1628 switch (GET_CODE (x))
1630 case MEM:
1632 rtx addr = XEXP (x, 0);
1634 if (!instantiate_virtual_regs_in_rtx (&addr))
1635 continue;
1637 start_sequence ();
1638 x = replace_equiv_address (x, addr, true);
1639 /* It may happen that the address with the virtual reg
1640 was valid (e.g. based on the virtual stack reg, which might
1641 be acceptable to the predicates with all offsets), whereas
1642 the address now isn't anymore, for instance when the address
1643 is still offsetted, but the base reg isn't virtual-stack-reg
1644 anymore. Below we would do a force_reg on the whole operand,
1645 but this insn might actually only accept memory. Hence,
1646 before doing that last resort, try to reload the address into
1647 a register, so this operand stays a MEM. */
1648 if (!safe_insn_predicate (insn_code, i, x))
1650 addr = force_reg (GET_MODE (addr), addr);
1651 x = replace_equiv_address (x, addr, true);
1653 seq = get_insns ();
1654 end_sequence ();
1655 if (seq)
1656 emit_insn_before (seq, insn);
1658 break;
1660 case REG:
1661 new_rtx = instantiate_new_reg (x, &offset);
1662 if (new_rtx == NULL)
1663 continue;
1664 if (offset == 0)
1665 x = new_rtx;
1666 else
1668 start_sequence ();
1670 /* Careful, special mode predicates may have stuff in
1671 insn_data[insn_code].operand[i].mode that isn't useful
1672 to us for computing a new value. */
1673 /* ??? Recognize address_operand and/or "p" constraints
1674 to see if (plus new offset) is a valid before we put
1675 this through expand_simple_binop. */
1676 x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
1677 gen_int_mode (offset, GET_MODE (x)),
1678 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1679 seq = get_insns ();
1680 end_sequence ();
1681 emit_insn_before (seq, insn);
1683 break;
1685 case SUBREG:
1686 new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
1687 if (new_rtx == NULL)
1688 continue;
1689 if (offset != 0)
1691 start_sequence ();
1692 new_rtx = expand_simple_binop
1693 (GET_MODE (new_rtx), PLUS, new_rtx,
1694 gen_int_mode (offset, GET_MODE (new_rtx)),
1695 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1696 seq = get_insns ();
1697 end_sequence ();
1698 emit_insn_before (seq, insn);
1700 x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx,
1701 GET_MODE (new_rtx), SUBREG_BYTE (x));
1702 gcc_assert (x);
1703 break;
1705 default:
1706 continue;
1709 /* At this point, X contains the new value for the operand.
1710 Validate the new value vs the insn predicate. Note that
1711 asm insns will have insn_code -1 here. */
1712 if (!safe_insn_predicate (insn_code, i, x))
1714 start_sequence ();
1715 if (REG_P (x))
1717 gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
1718 x = copy_to_reg (x);
1720 else
1721 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1722 seq = get_insns ();
1723 end_sequence ();
1724 if (seq)
1725 emit_insn_before (seq, insn);
1728 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1729 any_change = true;
1732 if (any_change)
1734 /* Propagate operand changes into the duplicates. */
1735 for (i = 0; i < recog_data.n_dups; ++i)
1736 *recog_data.dup_loc[i]
1737 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
1739 /* Force re-recognition of the instruction for validation. */
1740 INSN_CODE (insn) = -1;
1743 if (asm_noperands (PATTERN (insn)) >= 0)
1745 if (!check_asm_operands (PATTERN (insn)))
1747 error_for_asm (insn, "impossible constraint in %<asm%>");
1748 /* For asm goto, instead of fixing up all the edges
1749 just clear the template and clear input operands
1750 (asm goto doesn't have any output operands). */
1751 if (JUMP_P (insn))
1753 rtx asm_op = extract_asm_operands (PATTERN (insn));
1754 ASM_OPERANDS_TEMPLATE (asm_op) = ggc_strdup ("");
1755 ASM_OPERANDS_INPUT_VEC (asm_op) = rtvec_alloc (0);
1756 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op) = rtvec_alloc (0);
1758 else
1759 delete_insn (insn);
1762 else
1764 if (recog_memoized (insn) < 0)
1765 fatal_insn_not_found (insn);
1769 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1770 do any instantiation required. */
1772 void
1773 instantiate_decl_rtl (rtx x)
1775 rtx addr;
1777 if (x == 0)
1778 return;
1780 /* If this is a CONCAT, recurse for the pieces. */
1781 if (GET_CODE (x) == CONCAT)
1783 instantiate_decl_rtl (XEXP (x, 0));
1784 instantiate_decl_rtl (XEXP (x, 1));
1785 return;
1788 /* If this is not a MEM, no need to do anything. Similarly if the
1789 address is a constant or a register that is not a virtual register. */
1790 if (!MEM_P (x))
1791 return;
1793 addr = XEXP (x, 0);
1794 if (CONSTANT_P (addr)
1795 || (REG_P (addr)
1796 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1797 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1798 return;
1800 instantiate_virtual_regs_in_rtx (&XEXP (x, 0));
1803 /* Helper for instantiate_decls called via walk_tree: Process all decls
1804 in the given DECL_VALUE_EXPR. */
1806 static tree
1807 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1809 tree t = *tp;
1810 if (! EXPR_P (t))
1812 *walk_subtrees = 0;
1813 if (DECL_P (t))
1815 if (DECL_RTL_SET_P (t))
1816 instantiate_decl_rtl (DECL_RTL (t));
1817 if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t)
1818 && DECL_INCOMING_RTL (t))
1819 instantiate_decl_rtl (DECL_INCOMING_RTL (t));
1820 if ((TREE_CODE (t) == VAR_DECL
1821 || TREE_CODE (t) == RESULT_DECL)
1822 && DECL_HAS_VALUE_EXPR_P (t))
1824 tree v = DECL_VALUE_EXPR (t);
1825 walk_tree (&v, instantiate_expr, NULL, NULL);
1829 return NULL;
1832 /* Subroutine of instantiate_decls: Process all decls in the given
1833 BLOCK node and all its subblocks. */
1835 static void
1836 instantiate_decls_1 (tree let)
1838 tree t;
1840 for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t))
1842 if (DECL_RTL_SET_P (t))
1843 instantiate_decl_rtl (DECL_RTL (t));
1844 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1846 tree v = DECL_VALUE_EXPR (t);
1847 walk_tree (&v, instantiate_expr, NULL, NULL);
1851 /* Process all subblocks. */
1852 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1853 instantiate_decls_1 (t);
1856 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1857 all virtual registers in their DECL_RTL's. */
1859 static void
1860 instantiate_decls (tree fndecl)
1862 tree decl;
1863 unsigned ix;
1865 /* Process all parameters of the function. */
1866 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl))
1868 instantiate_decl_rtl (DECL_RTL (decl));
1869 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
1870 if (DECL_HAS_VALUE_EXPR_P (decl))
1872 tree v = DECL_VALUE_EXPR (decl);
1873 walk_tree (&v, instantiate_expr, NULL, NULL);
1877 if ((decl = DECL_RESULT (fndecl))
1878 && TREE_CODE (decl) == RESULT_DECL)
1880 if (DECL_RTL_SET_P (decl))
1881 instantiate_decl_rtl (DECL_RTL (decl));
1882 if (DECL_HAS_VALUE_EXPR_P (decl))
1884 tree v = DECL_VALUE_EXPR (decl);
1885 walk_tree (&v, instantiate_expr, NULL, NULL);
1889 /* Process the saved static chain if it exists. */
1890 decl = DECL_STRUCT_FUNCTION (fndecl)->static_chain_decl;
1891 if (decl && DECL_HAS_VALUE_EXPR_P (decl))
1892 instantiate_decl_rtl (DECL_RTL (DECL_VALUE_EXPR (decl)));
1894 /* Now process all variables defined in the function or its subblocks. */
1895 instantiate_decls_1 (DECL_INITIAL (fndecl));
1897 FOR_EACH_LOCAL_DECL (cfun, ix, decl)
1898 if (DECL_RTL_SET_P (decl))
1899 instantiate_decl_rtl (DECL_RTL (decl));
1900 vec_free (cfun->local_decls);
1903 /* Pass through the INSNS of function FNDECL and convert virtual register
1904 references to hard register references. */
1906 static unsigned int
1907 instantiate_virtual_regs (void)
1909 rtx_insn *insn;
1911 /* Compute the offsets to use for this function. */
1912 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1913 var_offset = STARTING_FRAME_OFFSET;
1914 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1915 out_arg_offset = STACK_POINTER_OFFSET;
1916 #ifdef FRAME_POINTER_CFA_OFFSET
1917 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1918 #else
1919 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1920 #endif
1922 /* Initialize recognition, indicating that volatile is OK. */
1923 init_recog ();
1925 /* Scan through all the insns, instantiating every virtual register still
1926 present. */
1927 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1928 if (INSN_P (insn))
1930 /* These patterns in the instruction stream can never be recognized.
1931 Fortunately, they shouldn't contain virtual registers either. */
1932 if (GET_CODE (PATTERN (insn)) == USE
1933 || GET_CODE (PATTERN (insn)) == CLOBBER
1934 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1935 continue;
1936 else if (DEBUG_INSN_P (insn))
1937 instantiate_virtual_regs_in_rtx (&INSN_VAR_LOCATION (insn));
1938 else
1939 instantiate_virtual_regs_in_insn (insn);
1941 if (insn->deleted ())
1942 continue;
1944 instantiate_virtual_regs_in_rtx (&REG_NOTES (insn));
1946 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1947 if (CALL_P (insn))
1948 instantiate_virtual_regs_in_rtx (&CALL_INSN_FUNCTION_USAGE (insn));
1951 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1952 instantiate_decls (current_function_decl);
1954 targetm.instantiate_decls ();
1956 /* Indicate that, from now on, assign_stack_local should use
1957 frame_pointer_rtx. */
1958 virtuals_instantiated = 1;
1960 return 0;
1963 namespace {
1965 const pass_data pass_data_instantiate_virtual_regs =
1967 RTL_PASS, /* type */
1968 "vregs", /* name */
1969 OPTGROUP_NONE, /* optinfo_flags */
1970 TV_NONE, /* tv_id */
1971 0, /* properties_required */
1972 0, /* properties_provided */
1973 0, /* properties_destroyed */
1974 0, /* todo_flags_start */
1975 0, /* todo_flags_finish */
1978 class pass_instantiate_virtual_regs : public rtl_opt_pass
1980 public:
1981 pass_instantiate_virtual_regs (gcc::context *ctxt)
1982 : rtl_opt_pass (pass_data_instantiate_virtual_regs, ctxt)
1985 /* opt_pass methods: */
1986 virtual unsigned int execute (function *)
1988 return instantiate_virtual_regs ();
1991 }; // class pass_instantiate_virtual_regs
1993 } // anon namespace
1995 rtl_opt_pass *
1996 make_pass_instantiate_virtual_regs (gcc::context *ctxt)
1998 return new pass_instantiate_virtual_regs (ctxt);
2002 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
2003 This means a type for which function calls must pass an address to the
2004 function or get an address back from the function.
2005 EXP may be a type node or an expression (whose type is tested). */
2008 aggregate_value_p (const_tree exp, const_tree fntype)
2010 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
2011 int i, regno, nregs;
2012 rtx reg;
2014 if (fntype)
2015 switch (TREE_CODE (fntype))
2017 case CALL_EXPR:
2019 tree fndecl = get_callee_fndecl (fntype);
2020 fntype = (fndecl
2021 ? TREE_TYPE (fndecl)
2022 : TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype))));
2024 break;
2025 case FUNCTION_DECL:
2026 fntype = TREE_TYPE (fntype);
2027 break;
2028 case FUNCTION_TYPE:
2029 case METHOD_TYPE:
2030 break;
2031 case IDENTIFIER_NODE:
2032 fntype = NULL_TREE;
2033 break;
2034 default:
2035 /* We don't expect other tree types here. */
2036 gcc_unreachable ();
2039 if (VOID_TYPE_P (type))
2040 return 0;
2042 /* If a record should be passed the same as its first (and only) member
2043 don't pass it as an aggregate. */
2044 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2045 return aggregate_value_p (first_field (type), fntype);
2047 /* If the front end has decided that this needs to be passed by
2048 reference, do so. */
2049 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
2050 && DECL_BY_REFERENCE (exp))
2051 return 1;
2053 /* Function types that are TREE_ADDRESSABLE force return in memory. */
2054 if (fntype && TREE_ADDRESSABLE (fntype))
2055 return 1;
2057 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
2058 and thus can't be returned in registers. */
2059 if (TREE_ADDRESSABLE (type))
2060 return 1;
2062 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
2063 return 1;
2065 if (targetm.calls.return_in_memory (type, fntype))
2066 return 1;
2068 /* Make sure we have suitable call-clobbered regs to return
2069 the value in; if not, we must return it in memory. */
2070 reg = hard_function_value (type, 0, fntype, 0);
2072 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2073 it is OK. */
2074 if (!REG_P (reg))
2075 return 0;
2077 regno = REGNO (reg);
2078 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
2079 for (i = 0; i < nregs; i++)
2080 if (! call_used_regs[regno + i])
2081 return 1;
2083 return 0;
2086 /* Return true if we should assign DECL a pseudo register; false if it
2087 should live on the local stack. */
2089 bool
2090 use_register_for_decl (const_tree decl)
2092 if (!targetm.calls.allocate_stack_slots_for_args ())
2093 return true;
2095 /* Honor volatile. */
2096 if (TREE_SIDE_EFFECTS (decl))
2097 return false;
2099 /* Honor addressability. */
2100 if (TREE_ADDRESSABLE (decl))
2101 return false;
2103 /* Only register-like things go in registers. */
2104 if (DECL_MODE (decl) == BLKmode)
2105 return false;
2107 /* If -ffloat-store specified, don't put explicit float variables
2108 into registers. */
2109 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2110 propagates values across these stores, and it probably shouldn't. */
2111 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
2112 return false;
2114 /* If we're not interested in tracking debugging information for
2115 this decl, then we can certainly put it in a register. */
2116 if (DECL_IGNORED_P (decl))
2117 return true;
2119 if (optimize)
2120 return true;
2122 if (!DECL_REGISTER (decl))
2123 return false;
2125 switch (TREE_CODE (TREE_TYPE (decl)))
2127 case RECORD_TYPE:
2128 case UNION_TYPE:
2129 case QUAL_UNION_TYPE:
2130 /* When not optimizing, disregard register keyword for variables with
2131 types containing methods, otherwise the methods won't be callable
2132 from the debugger. */
2133 if (TYPE_METHODS (TREE_TYPE (decl)))
2134 return false;
2135 break;
2136 default:
2137 break;
2140 return true;
2143 /* Return true if TYPE should be passed by invisible reference. */
2145 bool
2146 pass_by_reference (CUMULATIVE_ARGS *ca, machine_mode mode,
2147 tree type, bool named_arg)
2149 if (type)
2151 /* If this type contains non-trivial constructors, then it is
2152 forbidden for the middle-end to create any new copies. */
2153 if (TREE_ADDRESSABLE (type))
2154 return true;
2156 /* GCC post 3.4 passes *all* variable sized types by reference. */
2157 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2158 return true;
2160 /* If a record type should be passed the same as its first (and only)
2161 member, use the type and mode of that member. */
2162 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2164 type = TREE_TYPE (first_field (type));
2165 mode = TYPE_MODE (type);
2169 return targetm.calls.pass_by_reference (pack_cumulative_args (ca), mode,
2170 type, named_arg);
2173 /* Return true if TYPE, which is passed by reference, should be callee
2174 copied instead of caller copied. */
2176 bool
2177 reference_callee_copied (CUMULATIVE_ARGS *ca, machine_mode mode,
2178 tree type, bool named_arg)
2180 if (type && TREE_ADDRESSABLE (type))
2181 return false;
2182 return targetm.calls.callee_copies (pack_cumulative_args (ca), mode, type,
2183 named_arg);
2186 /* Structures to communicate between the subroutines of assign_parms.
2187 The first holds data persistent across all parameters, the second
2188 is cleared out for each parameter. */
2190 struct assign_parm_data_all
2192 /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2193 should become a job of the target or otherwise encapsulated. */
2194 CUMULATIVE_ARGS args_so_far_v;
2195 cumulative_args_t args_so_far;
2196 struct args_size stack_args_size;
2197 tree function_result_decl;
2198 tree orig_fnargs;
2199 rtx_insn *first_conversion_insn;
2200 rtx_insn *last_conversion_insn;
2201 HOST_WIDE_INT pretend_args_size;
2202 HOST_WIDE_INT extra_pretend_bytes;
2203 int reg_parm_stack_space;
2206 struct assign_parm_data_one
2208 tree nominal_type;
2209 tree passed_type;
2210 rtx entry_parm;
2211 rtx stack_parm;
2212 machine_mode nominal_mode;
2213 machine_mode passed_mode;
2214 machine_mode promoted_mode;
2215 struct locate_and_pad_arg_data locate;
2216 int partial;
2217 BOOL_BITFIELD named_arg : 1;
2218 BOOL_BITFIELD passed_pointer : 1;
2219 BOOL_BITFIELD on_stack : 1;
2220 BOOL_BITFIELD loaded_in_reg : 1;
2223 /* A subroutine of assign_parms. Initialize ALL. */
2225 static void
2226 assign_parms_initialize_all (struct assign_parm_data_all *all)
2228 tree fntype ATTRIBUTE_UNUSED;
2230 memset (all, 0, sizeof (*all));
2232 fntype = TREE_TYPE (current_function_decl);
2234 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2235 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far_v, fntype, NULL_RTX);
2236 #else
2237 INIT_CUMULATIVE_ARGS (all->args_so_far_v, fntype, NULL_RTX,
2238 current_function_decl, -1);
2239 #endif
2240 all->args_so_far = pack_cumulative_args (&all->args_so_far_v);
2242 #ifdef INCOMING_REG_PARM_STACK_SPACE
2243 all->reg_parm_stack_space
2244 = INCOMING_REG_PARM_STACK_SPACE (current_function_decl);
2245 #endif
2248 /* If ARGS contains entries with complex types, split the entry into two
2249 entries of the component type. Return a new list of substitutions are
2250 needed, else the old list. */
2252 static void
2253 split_complex_args (vec<tree> *args)
2255 unsigned i;
2256 tree p;
2258 FOR_EACH_VEC_ELT (*args, i, p)
2260 tree type = TREE_TYPE (p);
2261 if (TREE_CODE (type) == COMPLEX_TYPE
2262 && targetm.calls.split_complex_arg (type))
2264 tree decl;
2265 tree subtype = TREE_TYPE (type);
2266 bool addressable = TREE_ADDRESSABLE (p);
2268 /* Rewrite the PARM_DECL's type with its component. */
2269 p = copy_node (p);
2270 TREE_TYPE (p) = subtype;
2271 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2272 DECL_MODE (p) = VOIDmode;
2273 DECL_SIZE (p) = NULL;
2274 DECL_SIZE_UNIT (p) = NULL;
2275 /* If this arg must go in memory, put it in a pseudo here.
2276 We can't allow it to go in memory as per normal parms,
2277 because the usual place might not have the imag part
2278 adjacent to the real part. */
2279 DECL_ARTIFICIAL (p) = addressable;
2280 DECL_IGNORED_P (p) = addressable;
2281 TREE_ADDRESSABLE (p) = 0;
2282 layout_decl (p, 0);
2283 (*args)[i] = p;
2285 /* Build a second synthetic decl. */
2286 decl = build_decl (EXPR_LOCATION (p),
2287 PARM_DECL, NULL_TREE, subtype);
2288 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2289 DECL_ARTIFICIAL (decl) = addressable;
2290 DECL_IGNORED_P (decl) = addressable;
2291 layout_decl (decl, 0);
2292 args->safe_insert (++i, decl);
2297 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2298 the hidden struct return argument, and (abi willing) complex args.
2299 Return the new parameter list. */
2301 static vec<tree>
2302 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2304 tree fndecl = current_function_decl;
2305 tree fntype = TREE_TYPE (fndecl);
2306 vec<tree> fnargs = vNULL;
2307 tree arg;
2309 for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg))
2310 fnargs.safe_push (arg);
2312 all->orig_fnargs = DECL_ARGUMENTS (fndecl);
2314 /* If struct value address is treated as the first argument, make it so. */
2315 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2316 && ! cfun->returns_pcc_struct
2317 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2319 tree type = build_pointer_type (TREE_TYPE (fntype));
2320 tree decl;
2322 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2323 PARM_DECL, get_identifier (".result_ptr"), type);
2324 DECL_ARG_TYPE (decl) = type;
2325 DECL_ARTIFICIAL (decl) = 1;
2326 DECL_NAMELESS (decl) = 1;
2327 TREE_CONSTANT (decl) = 1;
2329 DECL_CHAIN (decl) = all->orig_fnargs;
2330 all->orig_fnargs = decl;
2331 fnargs.safe_insert (0, decl);
2333 all->function_result_decl = decl;
2336 /* If the target wants to split complex arguments into scalars, do so. */
2337 if (targetm.calls.split_complex_arg)
2338 split_complex_args (&fnargs);
2340 return fnargs;
2343 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2344 data for the parameter. Incorporate ABI specifics such as pass-by-
2345 reference and type promotion. */
2347 static void
2348 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2349 struct assign_parm_data_one *data)
2351 tree nominal_type, passed_type;
2352 machine_mode nominal_mode, passed_mode, promoted_mode;
2353 int unsignedp;
2355 memset (data, 0, sizeof (*data));
2357 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2358 if (!cfun->stdarg)
2359 data->named_arg = 1; /* No variadic parms. */
2360 else if (DECL_CHAIN (parm))
2361 data->named_arg = 1; /* Not the last non-variadic parm. */
2362 else if (targetm.calls.strict_argument_naming (all->args_so_far))
2363 data->named_arg = 1; /* Only variadic ones are unnamed. */
2364 else
2365 data->named_arg = 0; /* Treat as variadic. */
2367 nominal_type = TREE_TYPE (parm);
2368 passed_type = DECL_ARG_TYPE (parm);
2370 /* Look out for errors propagating this far. Also, if the parameter's
2371 type is void then its value doesn't matter. */
2372 if (TREE_TYPE (parm) == error_mark_node
2373 /* This can happen after weird syntax errors
2374 or if an enum type is defined among the parms. */
2375 || TREE_CODE (parm) != PARM_DECL
2376 || passed_type == NULL
2377 || VOID_TYPE_P (nominal_type))
2379 nominal_type = passed_type = void_type_node;
2380 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2381 goto egress;
2384 /* Find mode of arg as it is passed, and mode of arg as it should be
2385 during execution of this function. */
2386 passed_mode = TYPE_MODE (passed_type);
2387 nominal_mode = TYPE_MODE (nominal_type);
2389 /* If the parm is to be passed as a transparent union or record, use the
2390 type of the first field for the tests below. We have already verified
2391 that the modes are the same. */
2392 if ((TREE_CODE (passed_type) == UNION_TYPE
2393 || TREE_CODE (passed_type) == RECORD_TYPE)
2394 && TYPE_TRANSPARENT_AGGR (passed_type))
2395 passed_type = TREE_TYPE (first_field (passed_type));
2397 /* See if this arg was passed by invisible reference. */
2398 if (pass_by_reference (&all->args_so_far_v, passed_mode,
2399 passed_type, data->named_arg))
2401 passed_type = nominal_type = build_pointer_type (passed_type);
2402 data->passed_pointer = true;
2403 passed_mode = nominal_mode = TYPE_MODE (nominal_type);
2406 /* Find mode as it is passed by the ABI. */
2407 unsignedp = TYPE_UNSIGNED (passed_type);
2408 promoted_mode = promote_function_mode (passed_type, passed_mode, &unsignedp,
2409 TREE_TYPE (current_function_decl), 0);
2411 egress:
2412 data->nominal_type = nominal_type;
2413 data->passed_type = passed_type;
2414 data->nominal_mode = nominal_mode;
2415 data->passed_mode = passed_mode;
2416 data->promoted_mode = promoted_mode;
2419 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2421 static void
2422 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2423 struct assign_parm_data_one *data, bool no_rtl)
2425 int varargs_pretend_bytes = 0;
2427 targetm.calls.setup_incoming_varargs (all->args_so_far,
2428 data->promoted_mode,
2429 data->passed_type,
2430 &varargs_pretend_bytes, no_rtl);
2432 /* If the back-end has requested extra stack space, record how much is
2433 needed. Do not change pretend_args_size otherwise since it may be
2434 nonzero from an earlier partial argument. */
2435 if (varargs_pretend_bytes > 0)
2436 all->pretend_args_size = varargs_pretend_bytes;
2439 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2440 the incoming location of the current parameter. */
2442 static void
2443 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2444 struct assign_parm_data_one *data)
2446 HOST_WIDE_INT pretend_bytes = 0;
2447 rtx entry_parm;
2448 bool in_regs;
2450 if (data->promoted_mode == VOIDmode)
2452 data->entry_parm = data->stack_parm = const0_rtx;
2453 return;
2456 entry_parm = targetm.calls.function_incoming_arg (all->args_so_far,
2457 data->promoted_mode,
2458 data->passed_type,
2459 data->named_arg);
2461 if (entry_parm == 0)
2462 data->promoted_mode = data->passed_mode;
2464 /* Determine parm's home in the stack, in case it arrives in the stack
2465 or we should pretend it did. Compute the stack position and rtx where
2466 the argument arrives and its size.
2468 There is one complexity here: If this was a parameter that would
2469 have been passed in registers, but wasn't only because it is
2470 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2471 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2472 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2473 as it was the previous time. */
2474 in_regs = entry_parm != 0;
2475 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2476 in_regs = true;
2477 #endif
2478 if (!in_regs && !data->named_arg)
2480 if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far))
2482 rtx tem;
2483 tem = targetm.calls.function_incoming_arg (all->args_so_far,
2484 data->promoted_mode,
2485 data->passed_type, true);
2486 in_regs = tem != NULL;
2490 /* If this parameter was passed both in registers and in the stack, use
2491 the copy on the stack. */
2492 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2493 data->passed_type))
2494 entry_parm = 0;
2496 if (entry_parm)
2498 int partial;
2500 partial = targetm.calls.arg_partial_bytes (all->args_so_far,
2501 data->promoted_mode,
2502 data->passed_type,
2503 data->named_arg);
2504 data->partial = partial;
2506 /* The caller might already have allocated stack space for the
2507 register parameters. */
2508 if (partial != 0 && all->reg_parm_stack_space == 0)
2510 /* Part of this argument is passed in registers and part
2511 is passed on the stack. Ask the prologue code to extend
2512 the stack part so that we can recreate the full value.
2514 PRETEND_BYTES is the size of the registers we need to store.
2515 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2516 stack space that the prologue should allocate.
2518 Internally, gcc assumes that the argument pointer is aligned
2519 to STACK_BOUNDARY bits. This is used both for alignment
2520 optimizations (see init_emit) and to locate arguments that are
2521 aligned to more than PARM_BOUNDARY bits. We must preserve this
2522 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2523 a stack boundary. */
2525 /* We assume at most one partial arg, and it must be the first
2526 argument on the stack. */
2527 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2529 pretend_bytes = partial;
2530 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2532 /* We want to align relative to the actual stack pointer, so
2533 don't include this in the stack size until later. */
2534 all->extra_pretend_bytes = all->pretend_args_size;
2538 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2539 all->reg_parm_stack_space,
2540 entry_parm ? data->partial : 0, current_function_decl,
2541 &all->stack_args_size, &data->locate);
2543 /* Update parm_stack_boundary if this parameter is passed in the
2544 stack. */
2545 if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
2546 crtl->parm_stack_boundary = data->locate.boundary;
2548 /* Adjust offsets to include the pretend args. */
2549 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2550 data->locate.slot_offset.constant += pretend_bytes;
2551 data->locate.offset.constant += pretend_bytes;
2553 data->entry_parm = entry_parm;
2556 /* A subroutine of assign_parms. If there is actually space on the stack
2557 for this parm, count it in stack_args_size and return true. */
2559 static bool
2560 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2561 struct assign_parm_data_one *data)
2563 /* Trivially true if we've no incoming register. */
2564 if (data->entry_parm == NULL)
2566 /* Also true if we're partially in registers and partially not,
2567 since we've arranged to drop the entire argument on the stack. */
2568 else if (data->partial != 0)
2570 /* Also true if the target says that it's passed in both registers
2571 and on the stack. */
2572 else if (GET_CODE (data->entry_parm) == PARALLEL
2573 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2575 /* Also true if the target says that there's stack allocated for
2576 all register parameters. */
2577 else if (all->reg_parm_stack_space > 0)
2579 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2580 else
2581 return false;
2583 all->stack_args_size.constant += data->locate.size.constant;
2584 if (data->locate.size.var)
2585 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2587 return true;
2590 /* A subroutine of assign_parms. Given that this parameter is allocated
2591 stack space by the ABI, find it. */
2593 static void
2594 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2596 rtx offset_rtx, stack_parm;
2597 unsigned int align, boundary;
2599 /* If we're passing this arg using a reg, make its stack home the
2600 aligned stack slot. */
2601 if (data->entry_parm)
2602 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2603 else
2604 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2606 stack_parm = crtl->args.internal_arg_pointer;
2607 if (offset_rtx != const0_rtx)
2608 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2609 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2611 if (!data->passed_pointer)
2613 set_mem_attributes (stack_parm, parm, 1);
2614 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2615 while promoted mode's size is needed. */
2616 if (data->promoted_mode != BLKmode
2617 && data->promoted_mode != DECL_MODE (parm))
2619 set_mem_size (stack_parm, GET_MODE_SIZE (data->promoted_mode));
2620 if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm))
2622 int offset = subreg_lowpart_offset (DECL_MODE (parm),
2623 data->promoted_mode);
2624 if (offset)
2625 set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset);
2630 boundary = data->locate.boundary;
2631 align = BITS_PER_UNIT;
2633 /* If we're padding upward, we know that the alignment of the slot
2634 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2635 intentionally forcing upward padding. Otherwise we have to come
2636 up with a guess at the alignment based on OFFSET_RTX. */
2637 if (data->locate.where_pad != downward || data->entry_parm)
2638 align = boundary;
2639 else if (CONST_INT_P (offset_rtx))
2641 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2642 align = align & -align;
2644 set_mem_align (stack_parm, align);
2646 if (data->entry_parm)
2647 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2649 data->stack_parm = stack_parm;
2652 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2653 always valid and contiguous. */
2655 static void
2656 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2658 rtx entry_parm = data->entry_parm;
2659 rtx stack_parm = data->stack_parm;
2661 /* If this parm was passed part in regs and part in memory, pretend it
2662 arrived entirely in memory by pushing the register-part onto the stack.
2663 In the special case of a DImode or DFmode that is split, we could put
2664 it together in a pseudoreg directly, but for now that's not worth
2665 bothering with. */
2666 if (data->partial != 0)
2668 /* Handle calls that pass values in multiple non-contiguous
2669 locations. The Irix 6 ABI has examples of this. */
2670 if (GET_CODE (entry_parm) == PARALLEL)
2671 emit_group_store (validize_mem (copy_rtx (stack_parm)), entry_parm,
2672 data->passed_type,
2673 int_size_in_bytes (data->passed_type));
2674 else
2676 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2677 move_block_from_reg (REGNO (entry_parm),
2678 validize_mem (copy_rtx (stack_parm)),
2679 data->partial / UNITS_PER_WORD);
2682 entry_parm = stack_parm;
2685 /* If we didn't decide this parm came in a register, by default it came
2686 on the stack. */
2687 else if (entry_parm == NULL)
2688 entry_parm = stack_parm;
2690 /* When an argument is passed in multiple locations, we can't make use
2691 of this information, but we can save some copying if the whole argument
2692 is passed in a single register. */
2693 else if (GET_CODE (entry_parm) == PARALLEL
2694 && data->nominal_mode != BLKmode
2695 && data->passed_mode != BLKmode)
2697 size_t i, len = XVECLEN (entry_parm, 0);
2699 for (i = 0; i < len; i++)
2700 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2701 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2702 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2703 == data->passed_mode)
2704 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2706 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2707 break;
2711 data->entry_parm = entry_parm;
2714 /* A subroutine of assign_parms. Reconstitute any values which were
2715 passed in multiple registers and would fit in a single register. */
2717 static void
2718 assign_parm_remove_parallels (struct assign_parm_data_one *data)
2720 rtx entry_parm = data->entry_parm;
2722 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2723 This can be done with register operations rather than on the
2724 stack, even if we will store the reconstituted parameter on the
2725 stack later. */
2726 if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
2728 rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
2729 emit_group_store (parmreg, entry_parm, data->passed_type,
2730 GET_MODE_SIZE (GET_MODE (entry_parm)));
2731 entry_parm = parmreg;
2734 data->entry_parm = entry_parm;
2737 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2738 always valid and properly aligned. */
2740 static void
2741 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2743 rtx stack_parm = data->stack_parm;
2745 /* If we can't trust the parm stack slot to be aligned enough for its
2746 ultimate type, don't use that slot after entry. We'll make another
2747 stack slot, if we need one. */
2748 if (stack_parm
2749 && ((STRICT_ALIGNMENT
2750 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2751 || (data->nominal_type
2752 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2753 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2754 stack_parm = NULL;
2756 /* If parm was passed in memory, and we need to convert it on entry,
2757 don't store it back in that same slot. */
2758 else if (data->entry_parm == stack_parm
2759 && data->nominal_mode != BLKmode
2760 && data->nominal_mode != data->passed_mode)
2761 stack_parm = NULL;
2763 /* If stack protection is in effect for this function, don't leave any
2764 pointers in their passed stack slots. */
2765 else if (crtl->stack_protect_guard
2766 && (flag_stack_protect == 2
2767 || data->passed_pointer
2768 || POINTER_TYPE_P (data->nominal_type)))
2769 stack_parm = NULL;
2771 data->stack_parm = stack_parm;
2774 /* A subroutine of assign_parms. Return true if the current parameter
2775 should be stored as a BLKmode in the current frame. */
2777 static bool
2778 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2780 if (data->nominal_mode == BLKmode)
2781 return true;
2782 if (GET_MODE (data->entry_parm) == BLKmode)
2783 return true;
2785 #ifdef BLOCK_REG_PADDING
2786 /* Only assign_parm_setup_block knows how to deal with register arguments
2787 that are padded at the least significant end. */
2788 if (REG_P (data->entry_parm)
2789 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2790 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2791 == (BYTES_BIG_ENDIAN ? upward : downward)))
2792 return true;
2793 #endif
2795 return false;
2798 /* A subroutine of assign_parms. Arrange for the parameter to be
2799 present and valid in DATA->STACK_RTL. */
2801 static void
2802 assign_parm_setup_block (struct assign_parm_data_all *all,
2803 tree parm, struct assign_parm_data_one *data)
2805 rtx entry_parm = data->entry_parm;
2806 rtx stack_parm = data->stack_parm;
2807 HOST_WIDE_INT size;
2808 HOST_WIDE_INT size_stored;
2810 if (GET_CODE (entry_parm) == PARALLEL)
2811 entry_parm = emit_group_move_into_temps (entry_parm);
2813 size = int_size_in_bytes (data->passed_type);
2814 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2815 if (stack_parm == 0)
2817 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
2818 stack_parm = assign_stack_local (BLKmode, size_stored,
2819 DECL_ALIGN (parm));
2820 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2821 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2822 set_mem_attributes (stack_parm, parm, 1);
2825 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2826 calls that pass values in multiple non-contiguous locations. */
2827 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2829 rtx mem;
2831 /* Note that we will be storing an integral number of words.
2832 So we have to be careful to ensure that we allocate an
2833 integral number of words. We do this above when we call
2834 assign_stack_local if space was not allocated in the argument
2835 list. If it was, this will not work if PARM_BOUNDARY is not
2836 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2837 if it becomes a problem. Exception is when BLKmode arrives
2838 with arguments not conforming to word_mode. */
2840 if (data->stack_parm == 0)
2842 else if (GET_CODE (entry_parm) == PARALLEL)
2844 else
2845 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2847 mem = validize_mem (copy_rtx (stack_parm));
2849 /* Handle values in multiple non-contiguous locations. */
2850 if (GET_CODE (entry_parm) == PARALLEL)
2852 push_to_sequence2 (all->first_conversion_insn,
2853 all->last_conversion_insn);
2854 emit_group_store (mem, entry_parm, data->passed_type, size);
2855 all->first_conversion_insn = get_insns ();
2856 all->last_conversion_insn = get_last_insn ();
2857 end_sequence ();
2860 else if (size == 0)
2863 /* If SIZE is that of a mode no bigger than a word, just use
2864 that mode's store operation. */
2865 else if (size <= UNITS_PER_WORD)
2867 machine_mode mode
2868 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2870 if (mode != BLKmode
2871 #ifdef BLOCK_REG_PADDING
2872 && (size == UNITS_PER_WORD
2873 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2874 != (BYTES_BIG_ENDIAN ? upward : downward)))
2875 #endif
2878 rtx reg;
2880 /* We are really truncating a word_mode value containing
2881 SIZE bytes into a value of mode MODE. If such an
2882 operation requires no actual instructions, we can refer
2883 to the value directly in mode MODE, otherwise we must
2884 start with the register in word_mode and explicitly
2885 convert it. */
2886 if (TRULY_NOOP_TRUNCATION (size * BITS_PER_UNIT, BITS_PER_WORD))
2887 reg = gen_rtx_REG (mode, REGNO (entry_parm));
2888 else
2890 reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2891 reg = convert_to_mode (mode, copy_to_reg (reg), 1);
2893 emit_move_insn (change_address (mem, mode, 0), reg);
2896 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2897 machine must be aligned to the left before storing
2898 to memory. Note that the previous test doesn't
2899 handle all cases (e.g. SIZE == 3). */
2900 else if (size != UNITS_PER_WORD
2901 #ifdef BLOCK_REG_PADDING
2902 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2903 == downward)
2904 #else
2905 && BYTES_BIG_ENDIAN
2906 #endif
2909 rtx tem, x;
2910 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2911 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2913 x = expand_shift (LSHIFT_EXPR, word_mode, reg, by, NULL_RTX, 1);
2914 tem = change_address (mem, word_mode, 0);
2915 emit_move_insn (tem, x);
2917 else
2918 move_block_from_reg (REGNO (entry_parm), mem,
2919 size_stored / UNITS_PER_WORD);
2921 else
2922 move_block_from_reg (REGNO (entry_parm), mem,
2923 size_stored / UNITS_PER_WORD);
2925 else if (data->stack_parm == 0)
2927 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2928 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2929 BLOCK_OP_NORMAL);
2930 all->first_conversion_insn = get_insns ();
2931 all->last_conversion_insn = get_last_insn ();
2932 end_sequence ();
2935 data->stack_parm = stack_parm;
2936 SET_DECL_RTL (parm, stack_parm);
2939 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2940 parameter. Get it there. Perform all ABI specified conversions. */
2942 static void
2943 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2944 struct assign_parm_data_one *data)
2946 rtx parmreg, validated_mem;
2947 rtx equiv_stack_parm;
2948 machine_mode promoted_nominal_mode;
2949 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2950 bool did_conversion = false;
2951 bool need_conversion, moved;
2953 /* Store the parm in a pseudoregister during the function, but we may
2954 need to do it in a wider mode. Using 2 here makes the result
2955 consistent with promote_decl_mode and thus expand_expr_real_1. */
2956 promoted_nominal_mode
2957 = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp,
2958 TREE_TYPE (current_function_decl), 2);
2960 parmreg = gen_reg_rtx (promoted_nominal_mode);
2962 if (!DECL_ARTIFICIAL (parm))
2963 mark_user_reg (parmreg);
2965 /* If this was an item that we received a pointer to,
2966 set DECL_RTL appropriately. */
2967 if (data->passed_pointer)
2969 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2970 set_mem_attributes (x, parm, 1);
2971 SET_DECL_RTL (parm, x);
2973 else
2974 SET_DECL_RTL (parm, parmreg);
2976 assign_parm_remove_parallels (data);
2978 /* Copy the value into the register, thus bridging between
2979 assign_parm_find_data_types and expand_expr_real_1. */
2981 equiv_stack_parm = data->stack_parm;
2982 validated_mem = validize_mem (copy_rtx (data->entry_parm));
2984 need_conversion = (data->nominal_mode != data->passed_mode
2985 || promoted_nominal_mode != data->promoted_mode);
2986 moved = false;
2988 if (need_conversion
2989 && GET_MODE_CLASS (data->nominal_mode) == MODE_INT
2990 && data->nominal_mode == data->passed_mode
2991 && data->nominal_mode == GET_MODE (data->entry_parm))
2993 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2994 mode, by the caller. We now have to convert it to
2995 NOMINAL_MODE, if different. However, PARMREG may be in
2996 a different mode than NOMINAL_MODE if it is being stored
2997 promoted.
2999 If ENTRY_PARM is a hard register, it might be in a register
3000 not valid for operating in its mode (e.g., an odd-numbered
3001 register for a DFmode). In that case, moves are the only
3002 thing valid, so we can't do a convert from there. This
3003 occurs when the calling sequence allow such misaligned
3004 usages.
3006 In addition, the conversion may involve a call, which could
3007 clobber parameters which haven't been copied to pseudo
3008 registers yet.
3010 First, we try to emit an insn which performs the necessary
3011 conversion. We verify that this insn does not clobber any
3012 hard registers. */
3014 enum insn_code icode;
3015 rtx op0, op1;
3017 icode = can_extend_p (promoted_nominal_mode, data->passed_mode,
3018 unsignedp);
3020 op0 = parmreg;
3021 op1 = validated_mem;
3022 if (icode != CODE_FOR_nothing
3023 && insn_operand_matches (icode, 0, op0)
3024 && insn_operand_matches (icode, 1, op1))
3026 enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
3027 rtx_insn *insn, *insns;
3028 rtx t = op1;
3029 HARD_REG_SET hardregs;
3031 start_sequence ();
3032 /* If op1 is a hard register that is likely spilled, first
3033 force it into a pseudo, otherwise combiner might extend
3034 its lifetime too much. */
3035 if (GET_CODE (t) == SUBREG)
3036 t = SUBREG_REG (t);
3037 if (REG_P (t)
3038 && HARD_REGISTER_P (t)
3039 && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (t))
3040 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t))))
3042 t = gen_reg_rtx (GET_MODE (op1));
3043 emit_move_insn (t, op1);
3045 else
3046 t = op1;
3047 rtx pat = gen_extend_insn (op0, t, promoted_nominal_mode,
3048 data->passed_mode, unsignedp);
3049 emit_insn (pat);
3050 insns = get_insns ();
3052 moved = true;
3053 CLEAR_HARD_REG_SET (hardregs);
3054 for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
3056 if (INSN_P (insn))
3057 note_stores (PATTERN (insn), record_hard_reg_sets,
3058 &hardregs);
3059 if (!hard_reg_set_empty_p (hardregs))
3060 moved = false;
3063 end_sequence ();
3065 if (moved)
3067 emit_insn (insns);
3068 if (equiv_stack_parm != NULL_RTX)
3069 equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg),
3070 equiv_stack_parm);
3075 if (moved)
3076 /* Nothing to do. */
3078 else if (need_conversion)
3080 /* We did not have an insn to convert directly, or the sequence
3081 generated appeared unsafe. We must first copy the parm to a
3082 pseudo reg, and save the conversion until after all
3083 parameters have been moved. */
3085 int save_tree_used;
3086 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3088 emit_move_insn (tempreg, validated_mem);
3090 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3091 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
3093 if (GET_CODE (tempreg) == SUBREG
3094 && GET_MODE (tempreg) == data->nominal_mode
3095 && REG_P (SUBREG_REG (tempreg))
3096 && data->nominal_mode == data->passed_mode
3097 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
3098 && GET_MODE_SIZE (GET_MODE (tempreg))
3099 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
3101 /* The argument is already sign/zero extended, so note it
3102 into the subreg. */
3103 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
3104 SUBREG_PROMOTED_SET (tempreg, unsignedp);
3107 /* TREE_USED gets set erroneously during expand_assignment. */
3108 save_tree_used = TREE_USED (parm);
3109 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
3110 TREE_USED (parm) = save_tree_used;
3111 all->first_conversion_insn = get_insns ();
3112 all->last_conversion_insn = get_last_insn ();
3113 end_sequence ();
3115 did_conversion = true;
3117 else
3118 emit_move_insn (parmreg, validated_mem);
3120 /* If we were passed a pointer but the actual value can safely live
3121 in a register, retrieve it and use it directly. */
3122 if (data->passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode)
3124 /* We can't use nominal_mode, because it will have been set to
3125 Pmode above. We must use the actual mode of the parm. */
3126 if (use_register_for_decl (parm))
3128 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3129 mark_user_reg (parmreg);
3131 else
3133 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3134 TYPE_MODE (TREE_TYPE (parm)),
3135 TYPE_ALIGN (TREE_TYPE (parm)));
3136 parmreg
3137 = assign_stack_local (TYPE_MODE (TREE_TYPE (parm)),
3138 GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (parm))),
3139 align);
3140 set_mem_attributes (parmreg, parm, 1);
3143 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
3145 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
3146 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
3148 push_to_sequence2 (all->first_conversion_insn,
3149 all->last_conversion_insn);
3150 emit_move_insn (tempreg, DECL_RTL (parm));
3151 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
3152 emit_move_insn (parmreg, tempreg);
3153 all->first_conversion_insn = get_insns ();
3154 all->last_conversion_insn = get_last_insn ();
3155 end_sequence ();
3157 did_conversion = true;
3159 else
3160 emit_move_insn (parmreg, DECL_RTL (parm));
3162 SET_DECL_RTL (parm, parmreg);
3164 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3165 now the parm. */
3166 data->stack_parm = NULL;
3169 /* Mark the register as eliminable if we did no conversion and it was
3170 copied from memory at a fixed offset, and the arg pointer was not
3171 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3172 offset formed an invalid address, such memory-equivalences as we
3173 make here would screw up life analysis for it. */
3174 if (data->nominal_mode == data->passed_mode
3175 && !did_conversion
3176 && data->stack_parm != 0
3177 && MEM_P (data->stack_parm)
3178 && data->locate.offset.var == 0
3179 && reg_mentioned_p (virtual_incoming_args_rtx,
3180 XEXP (data->stack_parm, 0)))
3182 rtx_insn *linsn = get_last_insn ();
3183 rtx_insn *sinsn;
3184 rtx set;
3186 /* Mark complex types separately. */
3187 if (GET_CODE (parmreg) == CONCAT)
3189 machine_mode submode
3190 = GET_MODE_INNER (GET_MODE (parmreg));
3191 int regnor = REGNO (XEXP (parmreg, 0));
3192 int regnoi = REGNO (XEXP (parmreg, 1));
3193 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
3194 rtx stacki = adjust_address_nv (data->stack_parm, submode,
3195 GET_MODE_SIZE (submode));
3197 /* Scan backwards for the set of the real and
3198 imaginary parts. */
3199 for (sinsn = linsn; sinsn != 0;
3200 sinsn = prev_nonnote_insn (sinsn))
3202 set = single_set (sinsn);
3203 if (set == 0)
3204 continue;
3206 if (SET_DEST (set) == regno_reg_rtx [regnoi])
3207 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
3208 else if (SET_DEST (set) == regno_reg_rtx [regnor])
3209 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
3212 else
3213 set_dst_reg_note (linsn, REG_EQUIV, equiv_stack_parm, parmreg);
3216 /* For pointer data type, suggest pointer register. */
3217 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3218 mark_reg_pointer (parmreg,
3219 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
3222 /* A subroutine of assign_parms. Allocate stack space to hold the current
3223 parameter. Get it there. Perform all ABI specified conversions. */
3225 static void
3226 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
3227 struct assign_parm_data_one *data)
3229 /* Value must be stored in the stack slot STACK_PARM during function
3230 execution. */
3231 bool to_conversion = false;
3233 assign_parm_remove_parallels (data);
3235 if (data->promoted_mode != data->nominal_mode)
3237 /* Conversion is required. */
3238 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3240 emit_move_insn (tempreg, validize_mem (copy_rtx (data->entry_parm)));
3242 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3243 to_conversion = true;
3245 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
3246 TYPE_UNSIGNED (TREE_TYPE (parm)));
3248 if (data->stack_parm)
3250 int offset = subreg_lowpart_offset (data->nominal_mode,
3251 GET_MODE (data->stack_parm));
3252 /* ??? This may need a big-endian conversion on sparc64. */
3253 data->stack_parm
3254 = adjust_address (data->stack_parm, data->nominal_mode, 0);
3255 if (offset && MEM_OFFSET_KNOWN_P (data->stack_parm))
3256 set_mem_offset (data->stack_parm,
3257 MEM_OFFSET (data->stack_parm) + offset);
3261 if (data->entry_parm != data->stack_parm)
3263 rtx src, dest;
3265 if (data->stack_parm == 0)
3267 int align = STACK_SLOT_ALIGNMENT (data->passed_type,
3268 GET_MODE (data->entry_parm),
3269 TYPE_ALIGN (data->passed_type));
3270 data->stack_parm
3271 = assign_stack_local (GET_MODE (data->entry_parm),
3272 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
3273 align);
3274 set_mem_attributes (data->stack_parm, parm, 1);
3277 dest = validize_mem (copy_rtx (data->stack_parm));
3278 src = validize_mem (copy_rtx (data->entry_parm));
3280 if (MEM_P (src))
3282 /* Use a block move to handle potentially misaligned entry_parm. */
3283 if (!to_conversion)
3284 push_to_sequence2 (all->first_conversion_insn,
3285 all->last_conversion_insn);
3286 to_conversion = true;
3288 emit_block_move (dest, src,
3289 GEN_INT (int_size_in_bytes (data->passed_type)),
3290 BLOCK_OP_NORMAL);
3292 else
3293 emit_move_insn (dest, src);
3296 if (to_conversion)
3298 all->first_conversion_insn = get_insns ();
3299 all->last_conversion_insn = get_last_insn ();
3300 end_sequence ();
3303 SET_DECL_RTL (parm, data->stack_parm);
3306 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
3307 undo the frobbing that we did in assign_parms_augmented_arg_list. */
3309 static void
3310 assign_parms_unsplit_complex (struct assign_parm_data_all *all,
3311 vec<tree> fnargs)
3313 tree parm;
3314 tree orig_fnargs = all->orig_fnargs;
3315 unsigned i = 0;
3317 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i)
3319 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
3320 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
3322 rtx tmp, real, imag;
3323 machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
3325 real = DECL_RTL (fnargs[i]);
3326 imag = DECL_RTL (fnargs[i + 1]);
3327 if (inner != GET_MODE (real))
3329 real = gen_lowpart_SUBREG (inner, real);
3330 imag = gen_lowpart_SUBREG (inner, imag);
3333 if (TREE_ADDRESSABLE (parm))
3335 rtx rmem, imem;
3336 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
3337 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3338 DECL_MODE (parm),
3339 TYPE_ALIGN (TREE_TYPE (parm)));
3341 /* split_complex_arg put the real and imag parts in
3342 pseudos. Move them to memory. */
3343 tmp = assign_stack_local (DECL_MODE (parm), size, align);
3344 set_mem_attributes (tmp, parm, 1);
3345 rmem = adjust_address_nv (tmp, inner, 0);
3346 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
3347 push_to_sequence2 (all->first_conversion_insn,
3348 all->last_conversion_insn);
3349 emit_move_insn (rmem, real);
3350 emit_move_insn (imem, imag);
3351 all->first_conversion_insn = get_insns ();
3352 all->last_conversion_insn = get_last_insn ();
3353 end_sequence ();
3355 else
3356 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3357 SET_DECL_RTL (parm, tmp);
3359 real = DECL_INCOMING_RTL (fnargs[i]);
3360 imag = DECL_INCOMING_RTL (fnargs[i + 1]);
3361 if (inner != GET_MODE (real))
3363 real = gen_lowpart_SUBREG (inner, real);
3364 imag = gen_lowpart_SUBREG (inner, imag);
3366 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3367 set_decl_incoming_rtl (parm, tmp, false);
3368 i++;
3373 /* Assign RTL expressions to the function's parameters. This may involve
3374 copying them into registers and using those registers as the DECL_RTL. */
3376 static void
3377 assign_parms (tree fndecl)
3379 struct assign_parm_data_all all;
3380 tree parm;
3381 vec<tree> fnargs;
3382 unsigned i;
3384 crtl->args.internal_arg_pointer
3385 = targetm.calls.internal_arg_pointer ();
3387 assign_parms_initialize_all (&all);
3388 fnargs = assign_parms_augmented_arg_list (&all);
3390 FOR_EACH_VEC_ELT (fnargs, i, parm)
3392 struct assign_parm_data_one data;
3394 /* Extract the type of PARM; adjust it according to ABI. */
3395 assign_parm_find_data_types (&all, parm, &data);
3397 /* Early out for errors and void parameters. */
3398 if (data.passed_mode == VOIDmode)
3400 SET_DECL_RTL (parm, const0_rtx);
3401 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3402 continue;
3405 /* Estimate stack alignment from parameter alignment. */
3406 if (SUPPORTS_STACK_ALIGNMENT)
3408 unsigned int align
3409 = targetm.calls.function_arg_boundary (data.promoted_mode,
3410 data.passed_type);
3411 align = MINIMUM_ALIGNMENT (data.passed_type, data.promoted_mode,
3412 align);
3413 if (TYPE_ALIGN (data.nominal_type) > align)
3414 align = MINIMUM_ALIGNMENT (data.nominal_type,
3415 TYPE_MODE (data.nominal_type),
3416 TYPE_ALIGN (data.nominal_type));
3417 if (crtl->stack_alignment_estimated < align)
3419 gcc_assert (!crtl->stack_realign_processed);
3420 crtl->stack_alignment_estimated = align;
3424 if (cfun->stdarg && !DECL_CHAIN (parm))
3425 assign_parms_setup_varargs (&all, &data, false);
3427 /* Find out where the parameter arrives in this function. */
3428 assign_parm_find_entry_rtl (&all, &data);
3430 /* Find out where stack space for this parameter might be. */
3431 if (assign_parm_is_stack_parm (&all, &data))
3433 assign_parm_find_stack_rtl (parm, &data);
3434 assign_parm_adjust_entry_rtl (&data);
3437 /* Record permanently how this parm was passed. */
3438 if (data.passed_pointer)
3440 rtx incoming_rtl
3441 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.passed_type)),
3442 data.entry_parm);
3443 set_decl_incoming_rtl (parm, incoming_rtl, true);
3445 else
3446 set_decl_incoming_rtl (parm, data.entry_parm, false);
3448 /* Update info on where next arg arrives in registers. */
3449 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3450 data.passed_type, data.named_arg);
3452 assign_parm_adjust_stack_rtl (&data);
3454 if (assign_parm_setup_block_p (&data))
3455 assign_parm_setup_block (&all, parm, &data);
3456 else if (data.passed_pointer || use_register_for_decl (parm))
3457 assign_parm_setup_reg (&all, parm, &data);
3458 else
3459 assign_parm_setup_stack (&all, parm, &data);
3462 if (targetm.calls.split_complex_arg)
3463 assign_parms_unsplit_complex (&all, fnargs);
3465 fnargs.release ();
3467 /* Initialize pic_offset_table_rtx with a pseudo register
3468 if required. */
3469 if (targetm.use_pseudo_pic_reg ())
3470 pic_offset_table_rtx = gen_reg_rtx (Pmode);
3472 /* Output all parameter conversion instructions (possibly including calls)
3473 now that all parameters have been copied out of hard registers. */
3474 emit_insn (all.first_conversion_insn);
3476 /* Estimate reload stack alignment from scalar return mode. */
3477 if (SUPPORTS_STACK_ALIGNMENT)
3479 if (DECL_RESULT (fndecl))
3481 tree type = TREE_TYPE (DECL_RESULT (fndecl));
3482 machine_mode mode = TYPE_MODE (type);
3484 if (mode != BLKmode
3485 && mode != VOIDmode
3486 && !AGGREGATE_TYPE_P (type))
3488 unsigned int align = GET_MODE_ALIGNMENT (mode);
3489 if (crtl->stack_alignment_estimated < align)
3491 gcc_assert (!crtl->stack_realign_processed);
3492 crtl->stack_alignment_estimated = align;
3498 /* If we are receiving a struct value address as the first argument, set up
3499 the RTL for the function result. As this might require code to convert
3500 the transmitted address to Pmode, we do this here to ensure that possible
3501 preliminary conversions of the address have been emitted already. */
3502 if (all.function_result_decl)
3504 tree result = DECL_RESULT (current_function_decl);
3505 rtx addr = DECL_RTL (all.function_result_decl);
3506 rtx x;
3508 if (DECL_BY_REFERENCE (result))
3510 SET_DECL_VALUE_EXPR (result, all.function_result_decl);
3511 x = addr;
3513 else
3515 SET_DECL_VALUE_EXPR (result,
3516 build1 (INDIRECT_REF, TREE_TYPE (result),
3517 all.function_result_decl));
3518 addr = convert_memory_address (Pmode, addr);
3519 x = gen_rtx_MEM (DECL_MODE (result), addr);
3520 set_mem_attributes (x, result, 1);
3523 DECL_HAS_VALUE_EXPR_P (result) = 1;
3525 SET_DECL_RTL (result, x);
3528 /* We have aligned all the args, so add space for the pretend args. */
3529 crtl->args.pretend_args_size = all.pretend_args_size;
3530 all.stack_args_size.constant += all.extra_pretend_bytes;
3531 crtl->args.size = all.stack_args_size.constant;
3533 /* Adjust function incoming argument size for alignment and
3534 minimum length. */
3536 crtl->args.size = MAX (crtl->args.size, all.reg_parm_stack_space);
3537 crtl->args.size = CEIL_ROUND (crtl->args.size,
3538 PARM_BOUNDARY / BITS_PER_UNIT);
3540 #ifdef ARGS_GROW_DOWNWARD
3541 crtl->args.arg_offset_rtx
3542 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3543 : expand_expr (size_diffop (all.stack_args_size.var,
3544 size_int (-all.stack_args_size.constant)),
3545 NULL_RTX, VOIDmode, EXPAND_NORMAL));
3546 #else
3547 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3548 #endif
3550 /* See how many bytes, if any, of its args a function should try to pop
3551 on return. */
3553 crtl->args.pops_args = targetm.calls.return_pops_args (fndecl,
3554 TREE_TYPE (fndecl),
3555 crtl->args.size);
3557 /* For stdarg.h function, save info about
3558 regs and stack space used by the named args. */
3560 crtl->args.info = all.args_so_far_v;
3562 /* Set the rtx used for the function return value. Put this in its
3563 own variable so any optimizers that need this information don't have
3564 to include tree.h. Do this here so it gets done when an inlined
3565 function gets output. */
3567 crtl->return_rtx
3568 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3569 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3571 /* If scalar return value was computed in a pseudo-reg, or was a named
3572 return value that got dumped to the stack, copy that to the hard
3573 return register. */
3574 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3576 tree decl_result = DECL_RESULT (fndecl);
3577 rtx decl_rtl = DECL_RTL (decl_result);
3579 if (REG_P (decl_rtl)
3580 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3581 : DECL_REGISTER (decl_result))
3583 rtx real_decl_rtl;
3585 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3586 fndecl, true);
3587 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3588 /* The delay slot scheduler assumes that crtl->return_rtx
3589 holds the hard register containing the return value, not a
3590 temporary pseudo. */
3591 crtl->return_rtx = real_decl_rtl;
3596 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3597 For all seen types, gimplify their sizes. */
3599 static tree
3600 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3602 tree t = *tp;
3604 *walk_subtrees = 0;
3605 if (TYPE_P (t))
3607 if (POINTER_TYPE_P (t))
3608 *walk_subtrees = 1;
3609 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3610 && !TYPE_SIZES_GIMPLIFIED (t))
3612 gimplify_type_sizes (t, (gimple_seq *) data);
3613 *walk_subtrees = 1;
3617 return NULL;
3620 /* Gimplify the parameter list for current_function_decl. This involves
3621 evaluating SAVE_EXPRs of variable sized parameters and generating code
3622 to implement callee-copies reference parameters. Returns a sequence of
3623 statements to add to the beginning of the function. */
3625 gimple_seq
3626 gimplify_parameters (void)
3628 struct assign_parm_data_all all;
3629 tree parm;
3630 gimple_seq stmts = NULL;
3631 vec<tree> fnargs;
3632 unsigned i;
3634 assign_parms_initialize_all (&all);
3635 fnargs = assign_parms_augmented_arg_list (&all);
3637 FOR_EACH_VEC_ELT (fnargs, i, parm)
3639 struct assign_parm_data_one data;
3641 /* Extract the type of PARM; adjust it according to ABI. */
3642 assign_parm_find_data_types (&all, parm, &data);
3644 /* Early out for errors and void parameters. */
3645 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3646 continue;
3648 /* Update info on where next arg arrives in registers. */
3649 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3650 data.passed_type, data.named_arg);
3652 /* ??? Once upon a time variable_size stuffed parameter list
3653 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3654 turned out to be less than manageable in the gimple world.
3655 Now we have to hunt them down ourselves. */
3656 walk_tree_without_duplicates (&data.passed_type,
3657 gimplify_parm_type, &stmts);
3659 if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
3661 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3662 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3665 if (data.passed_pointer)
3667 tree type = TREE_TYPE (data.passed_type);
3668 if (reference_callee_copied (&all.args_so_far_v, TYPE_MODE (type),
3669 type, data.named_arg))
3671 tree local, t;
3673 /* For constant-sized objects, this is trivial; for
3674 variable-sized objects, we have to play games. */
3675 if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
3676 && !(flag_stack_check == GENERIC_STACK_CHECK
3677 && compare_tree_int (DECL_SIZE_UNIT (parm),
3678 STACK_CHECK_MAX_VAR_SIZE) > 0))
3680 local = create_tmp_var (type, get_name (parm));
3681 DECL_IGNORED_P (local) = 0;
3682 /* If PARM was addressable, move that flag over
3683 to the local copy, as its address will be taken,
3684 not the PARMs. Keep the parms address taken
3685 as we'll query that flag during gimplification. */
3686 if (TREE_ADDRESSABLE (parm))
3687 TREE_ADDRESSABLE (local) = 1;
3688 else if (TREE_CODE (type) == COMPLEX_TYPE
3689 || TREE_CODE (type) == VECTOR_TYPE)
3690 DECL_GIMPLE_REG_P (local) = 1;
3692 else
3694 tree ptr_type, addr;
3696 ptr_type = build_pointer_type (type);
3697 addr = create_tmp_reg (ptr_type, get_name (parm));
3698 DECL_IGNORED_P (addr) = 0;
3699 local = build_fold_indirect_ref (addr);
3701 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3702 t = build_call_expr (t, 2, DECL_SIZE_UNIT (parm),
3703 size_int (DECL_ALIGN (parm)));
3705 /* The call has been built for a variable-sized object. */
3706 CALL_ALLOCA_FOR_VAR_P (t) = 1;
3707 t = fold_convert (ptr_type, t);
3708 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
3709 gimplify_and_add (t, &stmts);
3712 gimplify_assign (local, parm, &stmts);
3714 SET_DECL_VALUE_EXPR (parm, local);
3715 DECL_HAS_VALUE_EXPR_P (parm) = 1;
3720 fnargs.release ();
3722 return stmts;
3725 /* Compute the size and offset from the start of the stacked arguments for a
3726 parm passed in mode PASSED_MODE and with type TYPE.
3728 INITIAL_OFFSET_PTR points to the current offset into the stacked
3729 arguments.
3731 The starting offset and size for this parm are returned in
3732 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3733 nonzero, the offset is that of stack slot, which is returned in
3734 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3735 padding required from the initial offset ptr to the stack slot.
3737 IN_REGS is nonzero if the argument will be passed in registers. It will
3738 never be set if REG_PARM_STACK_SPACE is not defined.
3740 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
3741 for arguments which are passed in registers.
3743 FNDECL is the function in which the argument was defined.
3745 There are two types of rounding that are done. The first, controlled by
3746 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
3747 argument list to be aligned to the specific boundary (in bits). This
3748 rounding affects the initial and starting offsets, but not the argument
3749 size.
3751 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3752 optionally rounds the size of the parm to PARM_BOUNDARY. The
3753 initial offset is not affected by this rounding, while the size always
3754 is and the starting offset may be. */
3756 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3757 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3758 callers pass in the total size of args so far as
3759 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3761 void
3762 locate_and_pad_parm (machine_mode passed_mode, tree type, int in_regs,
3763 int reg_parm_stack_space, int partial,
3764 tree fndecl ATTRIBUTE_UNUSED,
3765 struct args_size *initial_offset_ptr,
3766 struct locate_and_pad_arg_data *locate)
3768 tree sizetree;
3769 enum direction where_pad;
3770 unsigned int boundary, round_boundary;
3771 int part_size_in_regs;
3773 /* If we have found a stack parm before we reach the end of the
3774 area reserved for registers, skip that area. */
3775 if (! in_regs)
3777 if (reg_parm_stack_space > 0)
3779 if (initial_offset_ptr->var)
3781 initial_offset_ptr->var
3782 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3783 ssize_int (reg_parm_stack_space));
3784 initial_offset_ptr->constant = 0;
3786 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3787 initial_offset_ptr->constant = reg_parm_stack_space;
3791 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
3793 sizetree
3794 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3795 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3796 boundary = targetm.calls.function_arg_boundary (passed_mode, type);
3797 round_boundary = targetm.calls.function_arg_round_boundary (passed_mode,
3798 type);
3799 locate->where_pad = where_pad;
3801 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
3802 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
3803 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
3805 locate->boundary = boundary;
3807 if (SUPPORTS_STACK_ALIGNMENT)
3809 /* stack_alignment_estimated can't change after stack has been
3810 realigned. */
3811 if (crtl->stack_alignment_estimated < boundary)
3813 if (!crtl->stack_realign_processed)
3814 crtl->stack_alignment_estimated = boundary;
3815 else
3817 /* If stack is realigned and stack alignment value
3818 hasn't been finalized, it is OK not to increase
3819 stack_alignment_estimated. The bigger alignment
3820 requirement is recorded in stack_alignment_needed
3821 below. */
3822 gcc_assert (!crtl->stack_realign_finalized
3823 && crtl->stack_realign_needed);
3828 /* Remember if the outgoing parameter requires extra alignment on the
3829 calling function side. */
3830 if (crtl->stack_alignment_needed < boundary)
3831 crtl->stack_alignment_needed = boundary;
3832 if (crtl->preferred_stack_boundary < boundary)
3833 crtl->preferred_stack_boundary = boundary;
3835 #ifdef ARGS_GROW_DOWNWARD
3836 locate->slot_offset.constant = -initial_offset_ptr->constant;
3837 if (initial_offset_ptr->var)
3838 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3839 initial_offset_ptr->var);
3842 tree s2 = sizetree;
3843 if (where_pad != none
3844 && (!tree_fits_uhwi_p (sizetree)
3845 || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
3846 s2 = round_up (s2, round_boundary / BITS_PER_UNIT);
3847 SUB_PARM_SIZE (locate->slot_offset, s2);
3850 locate->slot_offset.constant += part_size_in_regs;
3852 if (!in_regs || reg_parm_stack_space > 0)
3853 pad_to_arg_alignment (&locate->slot_offset, boundary,
3854 &locate->alignment_pad);
3856 locate->size.constant = (-initial_offset_ptr->constant
3857 - locate->slot_offset.constant);
3858 if (initial_offset_ptr->var)
3859 locate->size.var = size_binop (MINUS_EXPR,
3860 size_binop (MINUS_EXPR,
3861 ssize_int (0),
3862 initial_offset_ptr->var),
3863 locate->slot_offset.var);
3865 /* Pad_below needs the pre-rounded size to know how much to pad
3866 below. */
3867 locate->offset = locate->slot_offset;
3868 if (where_pad == downward)
3869 pad_below (&locate->offset, passed_mode, sizetree);
3871 #else /* !ARGS_GROW_DOWNWARD */
3872 if (!in_regs || reg_parm_stack_space > 0)
3873 pad_to_arg_alignment (initial_offset_ptr, boundary,
3874 &locate->alignment_pad);
3875 locate->slot_offset = *initial_offset_ptr;
3877 #ifdef PUSH_ROUNDING
3878 if (passed_mode != BLKmode)
3879 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3880 #endif
3882 /* Pad_below needs the pre-rounded size to know how much to pad below
3883 so this must be done before rounding up. */
3884 locate->offset = locate->slot_offset;
3885 if (where_pad == downward)
3886 pad_below (&locate->offset, passed_mode, sizetree);
3888 if (where_pad != none
3889 && (!tree_fits_uhwi_p (sizetree)
3890 || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
3891 sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT);
3893 ADD_PARM_SIZE (locate->size, sizetree);
3895 locate->size.constant -= part_size_in_regs;
3896 #endif /* ARGS_GROW_DOWNWARD */
3898 #ifdef FUNCTION_ARG_OFFSET
3899 locate->offset.constant += FUNCTION_ARG_OFFSET (passed_mode, type);
3900 #endif
3903 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3904 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3906 static void
3907 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3908 struct args_size *alignment_pad)
3910 tree save_var = NULL_TREE;
3911 HOST_WIDE_INT save_constant = 0;
3912 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3913 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3915 #ifdef SPARC_STACK_BOUNDARY_HACK
3916 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3917 the real alignment of %sp. However, when it does this, the
3918 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
3919 if (SPARC_STACK_BOUNDARY_HACK)
3920 sp_offset = 0;
3921 #endif
3923 if (boundary > PARM_BOUNDARY)
3925 save_var = offset_ptr->var;
3926 save_constant = offset_ptr->constant;
3929 alignment_pad->var = NULL_TREE;
3930 alignment_pad->constant = 0;
3932 if (boundary > BITS_PER_UNIT)
3934 if (offset_ptr->var)
3936 tree sp_offset_tree = ssize_int (sp_offset);
3937 tree offset = size_binop (PLUS_EXPR,
3938 ARGS_SIZE_TREE (*offset_ptr),
3939 sp_offset_tree);
3940 #ifdef ARGS_GROW_DOWNWARD
3941 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
3942 #else
3943 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
3944 #endif
3946 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
3947 /* ARGS_SIZE_TREE includes constant term. */
3948 offset_ptr->constant = 0;
3949 if (boundary > PARM_BOUNDARY)
3950 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
3951 save_var);
3953 else
3955 offset_ptr->constant = -sp_offset +
3956 #ifdef ARGS_GROW_DOWNWARD
3957 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3958 #else
3959 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3960 #endif
3961 if (boundary > PARM_BOUNDARY)
3962 alignment_pad->constant = offset_ptr->constant - save_constant;
3967 static void
3968 pad_below (struct args_size *offset_ptr, machine_mode passed_mode, tree sizetree)
3970 if (passed_mode != BLKmode)
3972 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3973 offset_ptr->constant
3974 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3975 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3976 - GET_MODE_SIZE (passed_mode));
3978 else
3980 if (TREE_CODE (sizetree) != INTEGER_CST
3981 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3983 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3984 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3985 /* Add it in. */
3986 ADD_PARM_SIZE (*offset_ptr, s2);
3987 SUB_PARM_SIZE (*offset_ptr, sizetree);
3993 /* True if register REGNO was alive at a place where `setjmp' was
3994 called and was set more than once or is an argument. Such regs may
3995 be clobbered by `longjmp'. */
3997 static bool
3998 regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
4000 /* There appear to be cases where some local vars never reach the
4001 backend but have bogus regnos. */
4002 if (regno >= max_reg_num ())
4003 return false;
4005 return ((REG_N_SETS (regno) > 1
4006 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
4007 regno))
4008 && REGNO_REG_SET_P (setjmp_crosses, regno));
4011 /* Walk the tree of blocks describing the binding levels within a
4012 function and warn about variables the might be killed by setjmp or
4013 vfork. This is done after calling flow_analysis before register
4014 allocation since that will clobber the pseudo-regs to hard
4015 regs. */
4017 static void
4018 setjmp_vars_warning (bitmap setjmp_crosses, tree block)
4020 tree decl, sub;
4022 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
4024 if (TREE_CODE (decl) == VAR_DECL
4025 && DECL_RTL_SET_P (decl)
4026 && REG_P (DECL_RTL (decl))
4027 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4028 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
4029 " %<longjmp%> or %<vfork%>", decl);
4032 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
4033 setjmp_vars_warning (setjmp_crosses, sub);
4036 /* Do the appropriate part of setjmp_vars_warning
4037 but for arguments instead of local variables. */
4039 static void
4040 setjmp_args_warning (bitmap setjmp_crosses)
4042 tree decl;
4043 for (decl = DECL_ARGUMENTS (current_function_decl);
4044 decl; decl = DECL_CHAIN (decl))
4045 if (DECL_RTL (decl) != 0
4046 && REG_P (DECL_RTL (decl))
4047 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4048 warning (OPT_Wclobbered,
4049 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
4050 decl);
4053 /* Generate warning messages for variables live across setjmp. */
4055 void
4056 generate_setjmp_warnings (void)
4058 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
4060 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS
4061 || bitmap_empty_p (setjmp_crosses))
4062 return;
4064 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
4065 setjmp_args_warning (setjmp_crosses);
4069 /* Reverse the order of elements in the fragment chain T of blocks,
4070 and return the new head of the chain (old last element).
4071 In addition to that clear BLOCK_SAME_RANGE flags when needed
4072 and adjust BLOCK_SUPERCONTEXT from the super fragment to
4073 its super fragment origin. */
4075 static tree
4076 block_fragments_nreverse (tree t)
4078 tree prev = 0, block, next, prev_super = 0;
4079 tree super = BLOCK_SUPERCONTEXT (t);
4080 if (BLOCK_FRAGMENT_ORIGIN (super))
4081 super = BLOCK_FRAGMENT_ORIGIN (super);
4082 for (block = t; block; block = next)
4084 next = BLOCK_FRAGMENT_CHAIN (block);
4085 BLOCK_FRAGMENT_CHAIN (block) = prev;
4086 if ((prev && !BLOCK_SAME_RANGE (prev))
4087 || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block))
4088 != prev_super))
4089 BLOCK_SAME_RANGE (block) = 0;
4090 prev_super = BLOCK_SUPERCONTEXT (block);
4091 BLOCK_SUPERCONTEXT (block) = super;
4092 prev = block;
4094 t = BLOCK_FRAGMENT_ORIGIN (t);
4095 if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t))
4096 != prev_super)
4097 BLOCK_SAME_RANGE (t) = 0;
4098 BLOCK_SUPERCONTEXT (t) = super;
4099 return prev;
4102 /* Reverse the order of elements in the chain T of blocks,
4103 and return the new head of the chain (old last element).
4104 Also do the same on subblocks and reverse the order of elements
4105 in BLOCK_FRAGMENT_CHAIN as well. */
4107 static tree
4108 blocks_nreverse_all (tree t)
4110 tree prev = 0, block, next;
4111 for (block = t; block; block = next)
4113 next = BLOCK_CHAIN (block);
4114 BLOCK_CHAIN (block) = prev;
4115 if (BLOCK_FRAGMENT_CHAIN (block)
4116 && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE)
4118 BLOCK_FRAGMENT_CHAIN (block)
4119 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block));
4120 if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block)))
4121 BLOCK_SAME_RANGE (block) = 0;
4123 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4124 prev = block;
4126 return prev;
4130 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4131 and create duplicate blocks. */
4132 /* ??? Need an option to either create block fragments or to create
4133 abstract origin duplicates of a source block. It really depends
4134 on what optimization has been performed. */
4136 void
4137 reorder_blocks (void)
4139 tree block = DECL_INITIAL (current_function_decl);
4141 if (block == NULL_TREE)
4142 return;
4144 auto_vec<tree, 10> block_stack;
4146 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
4147 clear_block_marks (block);
4149 /* Prune the old trees away, so that they don't get in the way. */
4150 BLOCK_SUBBLOCKS (block) = NULL_TREE;
4151 BLOCK_CHAIN (block) = NULL_TREE;
4153 /* Recreate the block tree from the note nesting. */
4154 reorder_blocks_1 (get_insns (), block, &block_stack);
4155 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4158 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
4160 void
4161 clear_block_marks (tree block)
4163 while (block)
4165 TREE_ASM_WRITTEN (block) = 0;
4166 clear_block_marks (BLOCK_SUBBLOCKS (block));
4167 block = BLOCK_CHAIN (block);
4171 static void
4172 reorder_blocks_1 (rtx_insn *insns, tree current_block,
4173 vec<tree> *p_block_stack)
4175 rtx_insn *insn;
4176 tree prev_beg = NULL_TREE, prev_end = NULL_TREE;
4178 for (insn = insns; insn; insn = NEXT_INSN (insn))
4180 if (NOTE_P (insn))
4182 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
4184 tree block = NOTE_BLOCK (insn);
4185 tree origin;
4187 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE);
4188 origin = block;
4190 if (prev_end)
4191 BLOCK_SAME_RANGE (prev_end) = 0;
4192 prev_end = NULL_TREE;
4194 /* If we have seen this block before, that means it now
4195 spans multiple address regions. Create a new fragment. */
4196 if (TREE_ASM_WRITTEN (block))
4198 tree new_block = copy_node (block);
4200 BLOCK_SAME_RANGE (new_block) = 0;
4201 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
4202 BLOCK_FRAGMENT_CHAIN (new_block)
4203 = BLOCK_FRAGMENT_CHAIN (origin);
4204 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
4206 NOTE_BLOCK (insn) = new_block;
4207 block = new_block;
4210 if (prev_beg == current_block && prev_beg)
4211 BLOCK_SAME_RANGE (block) = 1;
4213 prev_beg = origin;
4215 BLOCK_SUBBLOCKS (block) = 0;
4216 TREE_ASM_WRITTEN (block) = 1;
4217 /* When there's only one block for the entire function,
4218 current_block == block and we mustn't do this, it
4219 will cause infinite recursion. */
4220 if (block != current_block)
4222 tree super;
4223 if (block != origin)
4224 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block
4225 || BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
4226 (origin))
4227 == current_block);
4228 if (p_block_stack->is_empty ())
4229 super = current_block;
4230 else
4232 super = p_block_stack->last ();
4233 gcc_assert (super == current_block
4234 || BLOCK_FRAGMENT_ORIGIN (super)
4235 == current_block);
4237 BLOCK_SUPERCONTEXT (block) = super;
4238 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4239 BLOCK_SUBBLOCKS (current_block) = block;
4240 current_block = origin;
4242 p_block_stack->safe_push (block);
4244 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
4246 NOTE_BLOCK (insn) = p_block_stack->pop ();
4247 current_block = BLOCK_SUPERCONTEXT (current_block);
4248 if (BLOCK_FRAGMENT_ORIGIN (current_block))
4249 current_block = BLOCK_FRAGMENT_ORIGIN (current_block);
4250 prev_beg = NULL_TREE;
4251 prev_end = BLOCK_SAME_RANGE (NOTE_BLOCK (insn))
4252 ? NOTE_BLOCK (insn) : NULL_TREE;
4255 else
4257 prev_beg = NULL_TREE;
4258 if (prev_end)
4259 BLOCK_SAME_RANGE (prev_end) = 0;
4260 prev_end = NULL_TREE;
4265 /* Reverse the order of elements in the chain T of blocks,
4266 and return the new head of the chain (old last element). */
4268 tree
4269 blocks_nreverse (tree t)
4271 tree prev = 0, block, next;
4272 for (block = t; block; block = next)
4274 next = BLOCK_CHAIN (block);
4275 BLOCK_CHAIN (block) = prev;
4276 prev = block;
4278 return prev;
4281 /* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4282 by modifying the last node in chain 1 to point to chain 2. */
4284 tree
4285 block_chainon (tree op1, tree op2)
4287 tree t1;
4289 if (!op1)
4290 return op2;
4291 if (!op2)
4292 return op1;
4294 for (t1 = op1; BLOCK_CHAIN (t1); t1 = BLOCK_CHAIN (t1))
4295 continue;
4296 BLOCK_CHAIN (t1) = op2;
4298 #ifdef ENABLE_TREE_CHECKING
4300 tree t2;
4301 for (t2 = op2; t2; t2 = BLOCK_CHAIN (t2))
4302 gcc_assert (t2 != t1);
4304 #endif
4306 return op1;
4309 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
4310 non-NULL, list them all into VECTOR, in a depth-first preorder
4311 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
4312 blocks. */
4314 static int
4315 all_blocks (tree block, tree *vector)
4317 int n_blocks = 0;
4319 while (block)
4321 TREE_ASM_WRITTEN (block) = 0;
4323 /* Record this block. */
4324 if (vector)
4325 vector[n_blocks] = block;
4327 ++n_blocks;
4329 /* Record the subblocks, and their subblocks... */
4330 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4331 vector ? vector + n_blocks : 0);
4332 block = BLOCK_CHAIN (block);
4335 return n_blocks;
4338 /* Return a vector containing all the blocks rooted at BLOCK. The
4339 number of elements in the vector is stored in N_BLOCKS_P. The
4340 vector is dynamically allocated; it is the caller's responsibility
4341 to call `free' on the pointer returned. */
4343 static tree *
4344 get_block_vector (tree block, int *n_blocks_p)
4346 tree *block_vector;
4348 *n_blocks_p = all_blocks (block, NULL);
4349 block_vector = XNEWVEC (tree, *n_blocks_p);
4350 all_blocks (block, block_vector);
4352 return block_vector;
4355 static GTY(()) int next_block_index = 2;
4357 /* Set BLOCK_NUMBER for all the blocks in FN. */
4359 void
4360 number_blocks (tree fn)
4362 int i;
4363 int n_blocks;
4364 tree *block_vector;
4366 /* For SDB and XCOFF debugging output, we start numbering the blocks
4367 from 1 within each function, rather than keeping a running
4368 count. */
4369 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
4370 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
4371 next_block_index = 1;
4372 #endif
4374 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
4376 /* The top-level BLOCK isn't numbered at all. */
4377 for (i = 1; i < n_blocks; ++i)
4378 /* We number the blocks from two. */
4379 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
4381 free (block_vector);
4383 return;
4386 /* If VAR is present in a subblock of BLOCK, return the subblock. */
4388 DEBUG_FUNCTION tree
4389 debug_find_var_in_block_tree (tree var, tree block)
4391 tree t;
4393 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
4394 if (t == var)
4395 return block;
4397 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
4399 tree ret = debug_find_var_in_block_tree (var, t);
4400 if (ret)
4401 return ret;
4404 return NULL_TREE;
4407 /* Keep track of whether we're in a dummy function context. If we are,
4408 we don't want to invoke the set_current_function hook, because we'll
4409 get into trouble if the hook calls target_reinit () recursively or
4410 when the initial initialization is not yet complete. */
4412 static bool in_dummy_function;
4414 /* Invoke the target hook when setting cfun. Update the optimization options
4415 if the function uses different options than the default. */
4417 static void
4418 invoke_set_current_function_hook (tree fndecl)
4420 if (!in_dummy_function)
4422 tree opts = ((fndecl)
4423 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
4424 : optimization_default_node);
4426 if (!opts)
4427 opts = optimization_default_node;
4429 /* Change optimization options if needed. */
4430 if (optimization_current_node != opts)
4432 optimization_current_node = opts;
4433 cl_optimization_restore (&global_options, TREE_OPTIMIZATION (opts));
4436 targetm.set_current_function (fndecl);
4437 this_fn_optabs = this_target_optabs;
4439 if (opts != optimization_default_node)
4441 init_tree_optimization_optabs (opts);
4442 if (TREE_OPTIMIZATION_OPTABS (opts))
4443 this_fn_optabs = (struct target_optabs *)
4444 TREE_OPTIMIZATION_OPTABS (opts);
4449 /* cfun should never be set directly; use this function. */
4451 void
4452 set_cfun (struct function *new_cfun)
4454 if (cfun != new_cfun)
4456 cfun = new_cfun;
4457 invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
4461 /* Initialized with NOGC, making this poisonous to the garbage collector. */
4463 static vec<function_p> cfun_stack;
4465 /* Push the current cfun onto the stack, and set cfun to new_cfun. Also set
4466 current_function_decl accordingly. */
4468 void
4469 push_cfun (struct function *new_cfun)
4471 gcc_assert ((!cfun && !current_function_decl)
4472 || (cfun && current_function_decl == cfun->decl));
4473 cfun_stack.safe_push (cfun);
4474 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
4475 set_cfun (new_cfun);
4478 /* Pop cfun from the stack. Also set current_function_decl accordingly. */
4480 void
4481 pop_cfun (void)
4483 struct function *new_cfun = cfun_stack.pop ();
4484 /* When in_dummy_function, we do have a cfun but current_function_decl is
4485 NULL. We also allow pushing NULL cfun and subsequently changing
4486 current_function_decl to something else and have both restored by
4487 pop_cfun. */
4488 gcc_checking_assert (in_dummy_function
4489 || !cfun
4490 || current_function_decl == cfun->decl);
4491 set_cfun (new_cfun);
4492 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
4495 /* Return value of funcdef and increase it. */
4497 get_next_funcdef_no (void)
4499 return funcdef_no++;
4502 /* Return value of funcdef. */
4504 get_last_funcdef_no (void)
4506 return funcdef_no;
4509 /* Allocate a function structure for FNDECL and set its contents
4510 to the defaults. Set cfun to the newly-allocated object.
4511 Some of the helper functions invoked during initialization assume
4512 that cfun has already been set. Therefore, assign the new object
4513 directly into cfun and invoke the back end hook explicitly at the
4514 very end, rather than initializing a temporary and calling set_cfun
4515 on it.
4517 ABSTRACT_P is true if this is a function that will never be seen by
4518 the middle-end. Such functions are front-end concepts (like C++
4519 function templates) that do not correspond directly to functions
4520 placed in object files. */
4522 void
4523 allocate_struct_function (tree fndecl, bool abstract_p)
4525 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
4527 cfun = ggc_cleared_alloc<function> ();
4529 init_eh_for_function ();
4531 if (init_machine_status)
4532 cfun->machine = (*init_machine_status) ();
4534 #ifdef OVERRIDE_ABI_FORMAT
4535 OVERRIDE_ABI_FORMAT (fndecl);
4536 #endif
4538 if (fndecl != NULL_TREE)
4540 DECL_STRUCT_FUNCTION (fndecl) = cfun;
4541 cfun->decl = fndecl;
4542 current_function_funcdef_no = get_next_funcdef_no ();
4545 invoke_set_current_function_hook (fndecl);
4547 if (fndecl != NULL_TREE)
4549 tree result = DECL_RESULT (fndecl);
4550 if (!abstract_p && aggregate_value_p (result, fndecl))
4552 #ifdef PCC_STATIC_STRUCT_RETURN
4553 cfun->returns_pcc_struct = 1;
4554 #endif
4555 cfun->returns_struct = 1;
4558 cfun->stdarg = stdarg_p (fntype);
4560 /* Assume all registers in stdarg functions need to be saved. */
4561 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
4562 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
4564 /* ??? This could be set on a per-function basis by the front-end
4565 but is this worth the hassle? */
4566 cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
4567 cfun->can_delete_dead_exceptions = flag_delete_dead_exceptions;
4569 if (!profile_flag && !flag_instrument_function_entry_exit)
4570 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) = 1;
4574 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4575 instead of just setting it. */
4577 void
4578 push_struct_function (tree fndecl)
4580 /* When in_dummy_function we might be in the middle of a pop_cfun and
4581 current_function_decl and cfun may not match. */
4582 gcc_assert (in_dummy_function
4583 || (!cfun && !current_function_decl)
4584 || (cfun && current_function_decl == cfun->decl));
4585 cfun_stack.safe_push (cfun);
4586 current_function_decl = fndecl;
4587 allocate_struct_function (fndecl, false);
4590 /* Reset crtl and other non-struct-function variables to defaults as
4591 appropriate for emitting rtl at the start of a function. */
4593 static void
4594 prepare_function_start (void)
4596 gcc_assert (!crtl->emit.x_last_insn);
4597 init_temp_slots ();
4598 init_emit ();
4599 init_varasm_status ();
4600 init_expr ();
4601 default_rtl_profile ();
4603 if (flag_stack_usage_info)
4605 cfun->su = ggc_cleared_alloc<stack_usage> ();
4606 cfun->su->static_stack_size = -1;
4609 cse_not_expected = ! optimize;
4611 /* Caller save not needed yet. */
4612 caller_save_needed = 0;
4614 /* We haven't done register allocation yet. */
4615 reg_renumber = 0;
4617 /* Indicate that we have not instantiated virtual registers yet. */
4618 virtuals_instantiated = 0;
4620 /* Indicate that we want CONCATs now. */
4621 generating_concat_p = 1;
4623 /* Indicate we have no need of a frame pointer yet. */
4624 frame_pointer_needed = 0;
4627 /* Initialize the rtl expansion mechanism so that we can do simple things
4628 like generate sequences. This is used to provide a context during global
4629 initialization of some passes. You must call expand_dummy_function_end
4630 to exit this context. */
4632 void
4633 init_dummy_function_start (void)
4635 gcc_assert (!in_dummy_function);
4636 in_dummy_function = true;
4637 push_struct_function (NULL_TREE);
4638 prepare_function_start ();
4641 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4642 and initialize static variables for generating RTL for the statements
4643 of the function. */
4645 void
4646 init_function_start (tree subr)
4648 if (subr && DECL_STRUCT_FUNCTION (subr))
4649 set_cfun (DECL_STRUCT_FUNCTION (subr));
4650 else
4651 allocate_struct_function (subr, false);
4653 /* Initialize backend, if needed. */
4654 initialize_rtl ();
4656 prepare_function_start ();
4657 decide_function_section (subr);
4659 /* Warn if this value is an aggregate type,
4660 regardless of which calling convention we are using for it. */
4661 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
4662 warning (OPT_Waggregate_return, "function returns an aggregate");
4665 /* Expand code to verify the stack_protect_guard. This is invoked at
4666 the end of a function to be protected. */
4668 #ifndef HAVE_stack_protect_test
4669 # define HAVE_stack_protect_test 0
4670 # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
4671 #endif
4673 void
4674 stack_protect_epilogue (void)
4676 tree guard_decl = targetm.stack_protect_guard ();
4677 rtx_code_label *label = gen_label_rtx ();
4678 rtx x, y, tmp;
4680 x = expand_normal (crtl->stack_protect_guard);
4681 y = expand_normal (guard_decl);
4683 /* Allow the target to compare Y with X without leaking either into
4684 a register. */
4685 switch ((int) (HAVE_stack_protect_test != 0))
4687 case 1:
4688 tmp = gen_stack_protect_test (x, y, label);
4689 if (tmp)
4691 emit_insn (tmp);
4692 break;
4694 /* FALLTHRU */
4696 default:
4697 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4698 break;
4701 /* The noreturn predictor has been moved to the tree level. The rtl-level
4702 predictors estimate this branch about 20%, which isn't enough to get
4703 things moved out of line. Since this is the only extant case of adding
4704 a noreturn function at the rtl level, it doesn't seem worth doing ought
4705 except adding the prediction by hand. */
4706 tmp = get_last_insn ();
4707 if (JUMP_P (tmp))
4708 predict_insn_def (as_a <rtx_insn *> (tmp), PRED_NORETURN, TAKEN);
4710 expand_call (targetm.stack_protect_fail (), NULL_RTX, /*ignore=*/true);
4711 free_temp_slots ();
4712 emit_label (label);
4715 /* Start the RTL for a new function, and set variables used for
4716 emitting RTL.
4717 SUBR is the FUNCTION_DECL node.
4718 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4719 the function's parameters, which must be run at any return statement. */
4721 void
4722 expand_function_start (tree subr)
4724 /* Make sure volatile mem refs aren't considered
4725 valid operands of arithmetic insns. */
4726 init_recog_no_volatile ();
4728 crtl->profile
4729 = (profile_flag
4730 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4732 crtl->limit_stack
4733 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4735 /* Make the label for return statements to jump to. Do not special
4736 case machines with special return instructions -- they will be
4737 handled later during jump, ifcvt, or epilogue creation. */
4738 return_label = gen_label_rtx ();
4740 /* Initialize rtx used to return the value. */
4741 /* Do this before assign_parms so that we copy the struct value address
4742 before any library calls that assign parms might generate. */
4744 /* Decide whether to return the value in memory or in a register. */
4745 if (aggregate_value_p (DECL_RESULT (subr), subr))
4747 /* Returning something that won't go in a register. */
4748 rtx value_address = 0;
4750 #ifdef PCC_STATIC_STRUCT_RETURN
4751 if (cfun->returns_pcc_struct)
4753 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4754 value_address = assemble_static_space (size);
4756 else
4757 #endif
4759 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
4760 /* Expect to be passed the address of a place to store the value.
4761 If it is passed as an argument, assign_parms will take care of
4762 it. */
4763 if (sv)
4765 value_address = gen_reg_rtx (Pmode);
4766 emit_move_insn (value_address, sv);
4769 if (value_address)
4771 rtx x = value_address;
4772 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4774 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4775 set_mem_attributes (x, DECL_RESULT (subr), 1);
4777 SET_DECL_RTL (DECL_RESULT (subr), x);
4780 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4781 /* If return mode is void, this decl rtl should not be used. */
4782 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
4783 else
4785 /* Compute the return values into a pseudo reg, which we will copy
4786 into the true return register after the cleanups are done. */
4787 tree return_type = TREE_TYPE (DECL_RESULT (subr));
4788 if (TYPE_MODE (return_type) != BLKmode
4789 && targetm.calls.return_in_msb (return_type))
4790 /* expand_function_end will insert the appropriate padding in
4791 this case. Use the return value's natural (unpadded) mode
4792 within the function proper. */
4793 SET_DECL_RTL (DECL_RESULT (subr),
4794 gen_reg_rtx (TYPE_MODE (return_type)));
4795 else
4797 /* In order to figure out what mode to use for the pseudo, we
4798 figure out what the mode of the eventual return register will
4799 actually be, and use that. */
4800 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
4802 /* Structures that are returned in registers are not
4803 aggregate_value_p, so we may see a PARALLEL or a REG. */
4804 if (REG_P (hard_reg))
4805 SET_DECL_RTL (DECL_RESULT (subr),
4806 gen_reg_rtx (GET_MODE (hard_reg)));
4807 else
4809 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4810 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4814 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4815 result to the real return register(s). */
4816 DECL_REGISTER (DECL_RESULT (subr)) = 1;
4819 /* Initialize rtx for parameters and local variables.
4820 In some cases this requires emitting insns. */
4821 assign_parms (subr);
4823 /* If function gets a static chain arg, store it. */
4824 if (cfun->static_chain_decl)
4826 tree parm = cfun->static_chain_decl;
4827 rtx local, chain, insn;
4829 local = gen_reg_rtx (Pmode);
4830 chain = targetm.calls.static_chain (current_function_decl, true);
4832 set_decl_incoming_rtl (parm, chain, false);
4833 SET_DECL_RTL (parm, local);
4834 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4836 insn = emit_move_insn (local, chain);
4838 /* Mark the register as eliminable, similar to parameters. */
4839 if (MEM_P (chain)
4840 && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0)))
4841 set_dst_reg_note (insn, REG_EQUIV, chain, local);
4843 /* If we aren't optimizing, save the static chain onto the stack. */
4844 if (!optimize)
4846 tree saved_static_chain_decl
4847 = build_decl (DECL_SOURCE_LOCATION (parm), VAR_DECL,
4848 DECL_NAME (parm), TREE_TYPE (parm));
4849 rtx saved_static_chain_rtx
4850 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
4851 SET_DECL_RTL (saved_static_chain_decl, saved_static_chain_rtx);
4852 emit_move_insn (saved_static_chain_rtx, chain);
4853 SET_DECL_VALUE_EXPR (parm, saved_static_chain_decl);
4854 DECL_HAS_VALUE_EXPR_P (parm) = 1;
4858 /* If the function receives a non-local goto, then store the
4859 bits we need to restore the frame pointer. */
4860 if (cfun->nonlocal_goto_save_area)
4862 tree t_save;
4863 rtx r_save;
4865 tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
4866 gcc_assert (DECL_RTL_SET_P (var));
4868 t_save = build4 (ARRAY_REF,
4869 TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
4870 cfun->nonlocal_goto_save_area,
4871 integer_zero_node, NULL_TREE, NULL_TREE);
4872 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
4873 gcc_assert (GET_MODE (r_save) == Pmode);
4875 emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ());
4876 update_nonlocal_goto_save_area ();
4879 /* The following was moved from init_function_start.
4880 The move is supposed to make sdb output more accurate. */
4881 /* Indicate the beginning of the function body,
4882 as opposed to parm setup. */
4883 emit_note (NOTE_INSN_FUNCTION_BEG);
4885 gcc_assert (NOTE_P (get_last_insn ()));
4887 parm_birth_insn = get_last_insn ();
4889 if (crtl->profile)
4891 #ifdef PROFILE_HOOK
4892 PROFILE_HOOK (current_function_funcdef_no);
4893 #endif
4896 /* If we are doing generic stack checking, the probe should go here. */
4897 if (flag_stack_check == GENERIC_STACK_CHECK)
4898 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
4901 /* Undo the effects of init_dummy_function_start. */
4902 void
4903 expand_dummy_function_end (void)
4905 gcc_assert (in_dummy_function);
4907 /* End any sequences that failed to be closed due to syntax errors. */
4908 while (in_sequence_p ())
4909 end_sequence ();
4911 /* Outside function body, can't compute type's actual size
4912 until next function's body starts. */
4914 free_after_parsing (cfun);
4915 free_after_compilation (cfun);
4916 pop_cfun ();
4917 in_dummy_function = false;
4920 /* Call DOIT for each hard register used as a return value from
4921 the current function. */
4923 void
4924 diddle_return_value (void (*doit) (rtx, void *), void *arg)
4926 rtx outgoing = crtl->return_rtx;
4928 if (! outgoing)
4929 return;
4931 if (REG_P (outgoing))
4932 (*doit) (outgoing, arg);
4933 else if (GET_CODE (outgoing) == PARALLEL)
4935 int i;
4937 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4939 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4941 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
4942 (*doit) (x, arg);
4947 static void
4948 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4950 emit_clobber (reg);
4953 void
4954 clobber_return_register (void)
4956 diddle_return_value (do_clobber_return_reg, NULL);
4958 /* In case we do use pseudo to return value, clobber it too. */
4959 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4961 tree decl_result = DECL_RESULT (current_function_decl);
4962 rtx decl_rtl = DECL_RTL (decl_result);
4963 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4965 do_clobber_return_reg (decl_rtl, NULL);
4970 static void
4971 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4973 emit_use (reg);
4976 static void
4977 use_return_register (void)
4979 diddle_return_value (do_use_return_reg, NULL);
4982 /* Possibly warn about unused parameters. */
4983 void
4984 do_warn_unused_parameter (tree fn)
4986 tree decl;
4988 for (decl = DECL_ARGUMENTS (fn);
4989 decl; decl = DECL_CHAIN (decl))
4990 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
4991 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl)
4992 && !TREE_NO_WARNING (decl))
4993 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
4996 /* Set the location of the insn chain starting at INSN to LOC. */
4998 static void
4999 set_insn_locations (rtx_insn *insn, int loc)
5001 while (insn != NULL)
5003 if (INSN_P (insn))
5004 INSN_LOCATION (insn) = loc;
5005 insn = NEXT_INSN (insn);
5009 /* Generate RTL for the end of the current function. */
5011 void
5012 expand_function_end (void)
5014 rtx clobber_after;
5016 /* If arg_pointer_save_area was referenced only from a nested
5017 function, we will not have initialized it yet. Do that now. */
5018 if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
5019 get_arg_pointer_save_area ();
5021 /* If we are doing generic stack checking and this function makes calls,
5022 do a stack probe at the start of the function to ensure we have enough
5023 space for another stack frame. */
5024 if (flag_stack_check == GENERIC_STACK_CHECK)
5026 rtx_insn *insn, *seq;
5028 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5029 if (CALL_P (insn))
5031 rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE);
5032 start_sequence ();
5033 if (STACK_CHECK_MOVING_SP)
5034 anti_adjust_stack_and_probe (max_frame_size, true);
5035 else
5036 probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size);
5037 seq = get_insns ();
5038 end_sequence ();
5039 set_insn_locations (seq, prologue_location);
5040 emit_insn_before (seq, stack_check_probe_note);
5041 break;
5045 /* End any sequences that failed to be closed due to syntax errors. */
5046 while (in_sequence_p ())
5047 end_sequence ();
5049 clear_pending_stack_adjust ();
5050 do_pending_stack_adjust ();
5052 /* Output a linenumber for the end of the function.
5053 SDB depends on this. */
5054 set_curr_insn_location (input_location);
5056 /* Before the return label (if any), clobber the return
5057 registers so that they are not propagated live to the rest of
5058 the function. This can only happen with functions that drop
5059 through; if there had been a return statement, there would
5060 have either been a return rtx, or a jump to the return label.
5062 We delay actual code generation after the current_function_value_rtx
5063 is computed. */
5064 clobber_after = get_last_insn ();
5066 /* Output the label for the actual return from the function. */
5067 emit_label (return_label);
5069 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
5071 /* Let except.c know where it should emit the call to unregister
5072 the function context for sjlj exceptions. */
5073 if (flag_exceptions)
5074 sjlj_emit_function_exit_after (get_last_insn ());
5076 else
5078 /* We want to ensure that instructions that may trap are not
5079 moved into the epilogue by scheduling, because we don't
5080 always emit unwind information for the epilogue. */
5081 if (cfun->can_throw_non_call_exceptions)
5082 emit_insn (gen_blockage ());
5085 /* If this is an implementation of throw, do what's necessary to
5086 communicate between __builtin_eh_return and the epilogue. */
5087 expand_eh_return ();
5089 /* If scalar return value was computed in a pseudo-reg, or was a named
5090 return value that got dumped to the stack, copy that to the hard
5091 return register. */
5092 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5094 tree decl_result = DECL_RESULT (current_function_decl);
5095 rtx decl_rtl = DECL_RTL (decl_result);
5097 if (REG_P (decl_rtl)
5098 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5099 : DECL_REGISTER (decl_result))
5101 rtx real_decl_rtl = crtl->return_rtx;
5103 /* This should be set in assign_parms. */
5104 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
5106 /* If this is a BLKmode structure being returned in registers,
5107 then use the mode computed in expand_return. Note that if
5108 decl_rtl is memory, then its mode may have been changed,
5109 but that crtl->return_rtx has not. */
5110 if (GET_MODE (real_decl_rtl) == BLKmode)
5111 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
5113 /* If a non-BLKmode return value should be padded at the least
5114 significant end of the register, shift it left by the appropriate
5115 amount. BLKmode results are handled using the group load/store
5116 machinery. */
5117 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
5118 && REG_P (real_decl_rtl)
5119 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
5121 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
5122 REGNO (real_decl_rtl)),
5123 decl_rtl);
5124 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
5126 /* If a named return value dumped decl_return to memory, then
5127 we may need to re-do the PROMOTE_MODE signed/unsigned
5128 extension. */
5129 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
5131 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
5132 promote_function_mode (TREE_TYPE (decl_result),
5133 GET_MODE (decl_rtl), &unsignedp,
5134 TREE_TYPE (current_function_decl), 1);
5136 convert_move (real_decl_rtl, decl_rtl, unsignedp);
5138 else if (GET_CODE (real_decl_rtl) == PARALLEL)
5140 /* If expand_function_start has created a PARALLEL for decl_rtl,
5141 move the result to the real return registers. Otherwise, do
5142 a group load from decl_rtl for a named return. */
5143 if (GET_CODE (decl_rtl) == PARALLEL)
5144 emit_group_move (real_decl_rtl, decl_rtl);
5145 else
5146 emit_group_load (real_decl_rtl, decl_rtl,
5147 TREE_TYPE (decl_result),
5148 int_size_in_bytes (TREE_TYPE (decl_result)));
5150 /* In the case of complex integer modes smaller than a word, we'll
5151 need to generate some non-trivial bitfield insertions. Do that
5152 on a pseudo and not the hard register. */
5153 else if (GET_CODE (decl_rtl) == CONCAT
5154 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
5155 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
5157 int old_generating_concat_p;
5158 rtx tmp;
5160 old_generating_concat_p = generating_concat_p;
5161 generating_concat_p = 0;
5162 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
5163 generating_concat_p = old_generating_concat_p;
5165 emit_move_insn (tmp, decl_rtl);
5166 emit_move_insn (real_decl_rtl, tmp);
5168 else
5169 emit_move_insn (real_decl_rtl, decl_rtl);
5173 /* If returning a structure, arrange to return the address of the value
5174 in a place where debuggers expect to find it.
5176 If returning a structure PCC style,
5177 the caller also depends on this value.
5178 And cfun->returns_pcc_struct is not necessarily set. */
5179 if (cfun->returns_struct
5180 || cfun->returns_pcc_struct)
5182 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
5183 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5184 rtx outgoing;
5186 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
5187 type = TREE_TYPE (type);
5188 else
5189 value_address = XEXP (value_address, 0);
5191 outgoing = targetm.calls.function_value (build_pointer_type (type),
5192 current_function_decl, true);
5194 /* Mark this as a function return value so integrate will delete the
5195 assignment and USE below when inlining this function. */
5196 REG_FUNCTION_VALUE_P (outgoing) = 1;
5198 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5199 value_address = convert_memory_address (GET_MODE (outgoing),
5200 value_address);
5202 emit_move_insn (outgoing, value_address);
5204 /* Show return register used to hold result (in this case the address
5205 of the result. */
5206 crtl->return_rtx = outgoing;
5209 /* Emit the actual code to clobber return register. Don't emit
5210 it if clobber_after is a barrier, then the previous basic block
5211 certainly doesn't fall thru into the exit block. */
5212 if (!BARRIER_P (clobber_after))
5214 rtx seq;
5216 start_sequence ();
5217 clobber_return_register ();
5218 seq = get_insns ();
5219 end_sequence ();
5221 emit_insn_after (seq, clobber_after);
5224 /* Output the label for the naked return from the function. */
5225 if (naked_return_label)
5226 emit_label (naked_return_label);
5228 /* @@@ This is a kludge. We want to ensure that instructions that
5229 may trap are not moved into the epilogue by scheduling, because
5230 we don't always emit unwind information for the epilogue. */
5231 if (cfun->can_throw_non_call_exceptions
5232 && targetm_common.except_unwind_info (&global_options) != UI_SJLJ)
5233 emit_insn (gen_blockage ());
5235 /* If stack protection is enabled for this function, check the guard. */
5236 if (crtl->stack_protect_guard)
5237 stack_protect_epilogue ();
5239 /* If we had calls to alloca, and this machine needs
5240 an accurate stack pointer to exit the function,
5241 insert some code to save and restore the stack pointer. */
5242 if (! EXIT_IGNORE_STACK
5243 && cfun->calls_alloca)
5245 rtx tem = 0, seq;
5247 start_sequence ();
5248 emit_stack_save (SAVE_FUNCTION, &tem);
5249 seq = get_insns ();
5250 end_sequence ();
5251 emit_insn_before (seq, parm_birth_insn);
5253 emit_stack_restore (SAVE_FUNCTION, tem);
5256 /* ??? This should no longer be necessary since stupid is no longer with
5257 us, but there are some parts of the compiler (eg reload_combine, and
5258 sh mach_dep_reorg) that still try and compute their own lifetime info
5259 instead of using the general framework. */
5260 use_return_register ();
5264 get_arg_pointer_save_area (void)
5266 rtx ret = arg_pointer_save_area;
5268 if (! ret)
5270 ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5271 arg_pointer_save_area = ret;
5274 if (! crtl->arg_pointer_save_area_init)
5276 rtx seq;
5278 /* Save the arg pointer at the beginning of the function. The
5279 generated stack slot may not be a valid memory address, so we
5280 have to check it and fix it if necessary. */
5281 start_sequence ();
5282 emit_move_insn (validize_mem (copy_rtx (ret)),
5283 crtl->args.internal_arg_pointer);
5284 seq = get_insns ();
5285 end_sequence ();
5287 push_topmost_sequence ();
5288 emit_insn_after (seq, entry_of_function ());
5289 pop_topmost_sequence ();
5291 crtl->arg_pointer_save_area_init = true;
5294 return ret;
5297 /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5298 for the first time. */
5300 static void
5301 record_insns (rtx_insn *insns, rtx end, htab_t *hashp)
5303 rtx_insn *tmp;
5304 htab_t hash = *hashp;
5306 if (hash == NULL)
5307 *hashp = hash
5308 = htab_create_ggc (17, htab_hash_pointer, htab_eq_pointer, NULL);
5310 for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp))
5312 void **slot = htab_find_slot (hash, tmp, INSERT);
5313 gcc_assert (*slot == NULL);
5314 *slot = tmp;
5318 /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5319 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5320 insn, then record COPY as well. */
5322 void
5323 maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
5325 htab_t hash;
5326 void **slot;
5328 hash = epilogue_insn_hash;
5329 if (!hash || !htab_find (hash, insn))
5331 hash = prologue_insn_hash;
5332 if (!hash || !htab_find (hash, insn))
5333 return;
5336 slot = htab_find_slot (hash, copy, INSERT);
5337 gcc_assert (*slot == NULL);
5338 *slot = copy;
5341 /* Determine if any INSNs in HASH are, or are part of, INSN. Because
5342 we can be running after reorg, SEQUENCE rtl is possible. */
5344 static bool
5345 contains (const_rtx insn, htab_t hash)
5347 if (hash == NULL)
5348 return false;
5350 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
5352 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
5353 int i;
5354 for (i = seq->len () - 1; i >= 0; i--)
5355 if (htab_find (hash, seq->element (i)))
5356 return true;
5357 return false;
5360 return htab_find (hash, insn) != NULL;
5364 prologue_epilogue_contains (const_rtx insn)
5366 if (contains (insn, prologue_insn_hash))
5367 return 1;
5368 if (contains (insn, epilogue_insn_hash))
5369 return 1;
5370 return 0;
5373 #ifdef HAVE_return
5374 /* Insert use of return register before the end of BB. */
5376 static void
5377 emit_use_return_register_into_block (basic_block bb)
5379 rtx seq, insn;
5380 start_sequence ();
5381 use_return_register ();
5382 seq = get_insns ();
5383 end_sequence ();
5384 insn = BB_END (bb);
5385 #ifdef HAVE_cc0
5386 if (reg_mentioned_p (cc0_rtx, PATTERN (insn)))
5387 insn = prev_cc0_setter (insn);
5388 #endif
5389 emit_insn_before (seq, insn);
5393 /* Create a return pattern, either simple_return or return, depending on
5394 simple_p. */
5396 static rtx
5397 gen_return_pattern (bool simple_p)
5399 #ifdef HAVE_simple_return
5400 return simple_p ? gen_simple_return () : gen_return ();
5401 #else
5402 gcc_assert (!simple_p);
5403 return gen_return ();
5404 #endif
5407 /* Insert an appropriate return pattern at the end of block BB. This
5408 also means updating block_for_insn appropriately. SIMPLE_P is
5409 the same as in gen_return_pattern and passed to it. */
5411 void
5412 emit_return_into_block (bool simple_p, basic_block bb)
5414 rtx jump, pat;
5415 jump = emit_jump_insn_after (gen_return_pattern (simple_p), BB_END (bb));
5416 pat = PATTERN (jump);
5417 if (GET_CODE (pat) == PARALLEL)
5418 pat = XVECEXP (pat, 0, 0);
5419 gcc_assert (ANY_RETURN_P (pat));
5420 JUMP_LABEL (jump) = pat;
5422 #endif
5424 /* Set JUMP_LABEL for a return insn. */
5426 void
5427 set_return_jump_label (rtx returnjump)
5429 rtx pat = PATTERN (returnjump);
5430 if (GET_CODE (pat) == PARALLEL)
5431 pat = XVECEXP (pat, 0, 0);
5432 if (ANY_RETURN_P (pat))
5433 JUMP_LABEL (returnjump) = pat;
5434 else
5435 JUMP_LABEL (returnjump) = ret_rtx;
5438 #if defined (HAVE_return) || defined (HAVE_simple_return)
5439 /* Return true if there are any active insns between HEAD and TAIL. */
5440 bool
5441 active_insn_between (rtx_insn *head, rtx_insn *tail)
5443 while (tail)
5445 if (active_insn_p (tail))
5446 return true;
5447 if (tail == head)
5448 return false;
5449 tail = PREV_INSN (tail);
5451 return false;
5454 /* LAST_BB is a block that exits, and empty of active instructions.
5455 Examine its predecessors for jumps that can be converted to
5456 (conditional) returns. */
5457 vec<edge>
5458 convert_jumps_to_returns (basic_block last_bb, bool simple_p,
5459 vec<edge> unconverted ATTRIBUTE_UNUSED)
5461 int i;
5462 basic_block bb;
5463 rtx label;
5464 edge_iterator ei;
5465 edge e;
5466 auto_vec<basic_block> src_bbs (EDGE_COUNT (last_bb->preds));
5468 FOR_EACH_EDGE (e, ei, last_bb->preds)
5469 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5470 src_bbs.quick_push (e->src);
5472 label = BB_HEAD (last_bb);
5474 FOR_EACH_VEC_ELT (src_bbs, i, bb)
5476 rtx_insn *jump = BB_END (bb);
5478 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5479 continue;
5481 e = find_edge (bb, last_bb);
5483 /* If we have an unconditional jump, we can replace that
5484 with a simple return instruction. */
5485 if (simplejump_p (jump))
5487 /* The use of the return register might be present in the exit
5488 fallthru block. Either:
5489 - removing the use is safe, and we should remove the use in
5490 the exit fallthru block, or
5491 - removing the use is not safe, and we should add it here.
5492 For now, we conservatively choose the latter. Either of the
5493 2 helps in crossjumping. */
5494 emit_use_return_register_into_block (bb);
5496 emit_return_into_block (simple_p, bb);
5497 delete_insn (jump);
5500 /* If we have a conditional jump branching to the last
5501 block, we can try to replace that with a conditional
5502 return instruction. */
5503 else if (condjump_p (jump))
5505 rtx dest;
5507 if (simple_p)
5508 dest = simple_return_rtx;
5509 else
5510 dest = ret_rtx;
5511 if (!redirect_jump (jump, dest, 0))
5513 #ifdef HAVE_simple_return
5514 if (simple_p)
5516 if (dump_file)
5517 fprintf (dump_file,
5518 "Failed to redirect bb %d branch.\n", bb->index);
5519 unconverted.safe_push (e);
5521 #endif
5522 continue;
5525 /* See comment in simplejump_p case above. */
5526 emit_use_return_register_into_block (bb);
5528 /* If this block has only one successor, it both jumps
5529 and falls through to the fallthru block, so we can't
5530 delete the edge. */
5531 if (single_succ_p (bb))
5532 continue;
5534 else
5536 #ifdef HAVE_simple_return
5537 if (simple_p)
5539 if (dump_file)
5540 fprintf (dump_file,
5541 "Failed to redirect bb %d branch.\n", bb->index);
5542 unconverted.safe_push (e);
5544 #endif
5545 continue;
5548 /* Fix up the CFG for the successful change we just made. */
5549 redirect_edge_succ (e, EXIT_BLOCK_PTR_FOR_FN (cfun));
5550 e->flags &= ~EDGE_CROSSING;
5552 src_bbs.release ();
5553 return unconverted;
5556 /* Emit a return insn for the exit fallthru block. */
5557 basic_block
5558 emit_return_for_exit (edge exit_fallthru_edge, bool simple_p)
5560 basic_block last_bb = exit_fallthru_edge->src;
5562 if (JUMP_P (BB_END (last_bb)))
5564 last_bb = split_edge (exit_fallthru_edge);
5565 exit_fallthru_edge = single_succ_edge (last_bb);
5567 emit_barrier_after (BB_END (last_bb));
5568 emit_return_into_block (simple_p, last_bb);
5569 exit_fallthru_edge->flags &= ~EDGE_FALLTHRU;
5570 return last_bb;
5572 #endif
5575 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5576 this into place with notes indicating where the prologue ends and where
5577 the epilogue begins. Update the basic block information when possible.
5579 Notes on epilogue placement:
5580 There are several kinds of edges to the exit block:
5581 * a single fallthru edge from LAST_BB
5582 * possibly, edges from blocks containing sibcalls
5583 * possibly, fake edges from infinite loops
5585 The epilogue is always emitted on the fallthru edge from the last basic
5586 block in the function, LAST_BB, into the exit block.
5588 If LAST_BB is empty except for a label, it is the target of every
5589 other basic block in the function that ends in a return. If a
5590 target has a return or simple_return pattern (possibly with
5591 conditional variants), these basic blocks can be changed so that a
5592 return insn is emitted into them, and their target is adjusted to
5593 the real exit block.
5595 Notes on shrink wrapping: We implement a fairly conservative
5596 version of shrink-wrapping rather than the textbook one. We only
5597 generate a single prologue and a single epilogue. This is
5598 sufficient to catch a number of interesting cases involving early
5599 exits.
5601 First, we identify the blocks that require the prologue to occur before
5602 them. These are the ones that modify a call-saved register, or reference
5603 any of the stack or frame pointer registers. To simplify things, we then
5604 mark everything reachable from these blocks as also requiring a prologue.
5605 This takes care of loops automatically, and avoids the need to examine
5606 whether MEMs reference the frame, since it is sufficient to check for
5607 occurrences of the stack or frame pointer.
5609 We then compute the set of blocks for which the need for a prologue
5610 is anticipatable (borrowing terminology from the shrink-wrapping
5611 description in Muchnick's book). These are the blocks which either
5612 require a prologue themselves, or those that have only successors
5613 where the prologue is anticipatable. The prologue needs to be
5614 inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
5615 is not. For the moment, we ensure that only one such edge exists.
5617 The epilogue is placed as described above, but we make a
5618 distinction between inserting return and simple_return patterns
5619 when modifying other blocks that end in a return. Blocks that end
5620 in a sibcall omit the sibcall_epilogue if the block is not in
5621 ANTIC. */
5623 static void
5624 thread_prologue_and_epilogue_insns (void)
5626 bool inserted;
5627 #ifdef HAVE_simple_return
5628 vec<edge> unconverted_simple_returns = vNULL;
5629 bitmap_head bb_flags;
5630 #endif
5631 rtx_insn *returnjump;
5632 rtx_insn *epilogue_end ATTRIBUTE_UNUSED;
5633 rtx_insn *prologue_seq ATTRIBUTE_UNUSED, *split_prologue_seq ATTRIBUTE_UNUSED;
5634 edge e, entry_edge, orig_entry_edge, exit_fallthru_edge;
5635 edge_iterator ei;
5637 df_analyze ();
5639 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5641 inserted = false;
5642 epilogue_end = NULL;
5643 returnjump = NULL;
5645 /* Can't deal with multiple successors of the entry block at the
5646 moment. Function should always have at least one entry
5647 point. */
5648 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5649 entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5650 orig_entry_edge = entry_edge;
5652 split_prologue_seq = NULL;
5653 if (flag_split_stack
5654 && (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl))
5655 == NULL))
5657 #ifndef HAVE_split_stack_prologue
5658 gcc_unreachable ();
5659 #else
5660 gcc_assert (HAVE_split_stack_prologue);
5662 start_sequence ();
5663 emit_insn (gen_split_stack_prologue ());
5664 split_prologue_seq = get_insns ();
5665 end_sequence ();
5667 record_insns (split_prologue_seq, NULL, &prologue_insn_hash);
5668 set_insn_locations (split_prologue_seq, prologue_location);
5669 #endif
5672 prologue_seq = NULL;
5673 #ifdef HAVE_prologue
5674 if (HAVE_prologue)
5676 start_sequence ();
5677 rtx_insn *seq = safe_as_a <rtx_insn *> (gen_prologue ());
5678 emit_insn (seq);
5680 /* Insert an explicit USE for the frame pointer
5681 if the profiling is on and the frame pointer is required. */
5682 if (crtl->profile && frame_pointer_needed)
5683 emit_use (hard_frame_pointer_rtx);
5685 /* Retain a map of the prologue insns. */
5686 record_insns (seq, NULL, &prologue_insn_hash);
5687 emit_note (NOTE_INSN_PROLOGUE_END);
5689 /* Ensure that instructions are not moved into the prologue when
5690 profiling is on. The call to the profiling routine can be
5691 emitted within the live range of a call-clobbered register. */
5692 if (!targetm.profile_before_prologue () && crtl->profile)
5693 emit_insn (gen_blockage ());
5695 prologue_seq = get_insns ();
5696 end_sequence ();
5697 set_insn_locations (prologue_seq, prologue_location);
5699 #endif
5701 #ifdef HAVE_simple_return
5702 bitmap_initialize (&bb_flags, &bitmap_default_obstack);
5704 /* Try to perform a kind of shrink-wrapping, making sure the
5705 prologue/epilogue is emitted only around those parts of the
5706 function that require it. */
5708 try_shrink_wrapping (&entry_edge, orig_entry_edge, &bb_flags, prologue_seq);
5709 #endif
5711 if (split_prologue_seq != NULL_RTX)
5713 insert_insn_on_edge (split_prologue_seq, orig_entry_edge);
5714 inserted = true;
5716 if (prologue_seq != NULL_RTX)
5718 insert_insn_on_edge (prologue_seq, entry_edge);
5719 inserted = true;
5722 /* If the exit block has no non-fake predecessors, we don't need
5723 an epilogue. */
5724 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5725 if ((e->flags & EDGE_FAKE) == 0)
5726 break;
5727 if (e == NULL)
5728 goto epilogue_done;
5730 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
5732 exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
5734 #ifdef HAVE_simple_return
5735 if (entry_edge != orig_entry_edge)
5736 exit_fallthru_edge
5737 = get_unconverted_simple_return (exit_fallthru_edge, bb_flags,
5738 &unconverted_simple_returns,
5739 &returnjump);
5740 #endif
5741 #ifdef HAVE_return
5742 if (HAVE_return)
5744 if (exit_fallthru_edge == NULL)
5745 goto epilogue_done;
5747 if (optimize)
5749 basic_block last_bb = exit_fallthru_edge->src;
5751 if (LABEL_P (BB_HEAD (last_bb))
5752 && !active_insn_between (BB_HEAD (last_bb), BB_END (last_bb)))
5753 convert_jumps_to_returns (last_bb, false, vNULL);
5755 if (EDGE_COUNT (last_bb->preds) != 0
5756 && single_succ_p (last_bb))
5758 last_bb = emit_return_for_exit (exit_fallthru_edge, false);
5759 epilogue_end = returnjump = BB_END (last_bb);
5760 #ifdef HAVE_simple_return
5761 /* Emitting the return may add a basic block.
5762 Fix bb_flags for the added block. */
5763 if (last_bb != exit_fallthru_edge->src)
5764 bitmap_set_bit (&bb_flags, last_bb->index);
5765 #endif
5766 goto epilogue_done;
5770 #endif
5772 /* A small fib -- epilogue is not yet completed, but we wish to re-use
5773 this marker for the splits of EH_RETURN patterns, and nothing else
5774 uses the flag in the meantime. */
5775 epilogue_completed = 1;
5777 #ifdef HAVE_eh_return
5778 /* Find non-fallthru edges that end with EH_RETURN instructions. On
5779 some targets, these get split to a special version of the epilogue
5780 code. In order to be able to properly annotate these with unwind
5781 info, try to split them now. If we get a valid split, drop an
5782 EPILOGUE_BEG note and mark the insns as epilogue insns. */
5783 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5785 rtx_insn *prev, *last, *trial;
5787 if (e->flags & EDGE_FALLTHRU)
5788 continue;
5789 last = BB_END (e->src);
5790 if (!eh_returnjump_p (last))
5791 continue;
5793 prev = PREV_INSN (last);
5794 trial = try_split (PATTERN (last), last, 1);
5795 if (trial == last)
5796 continue;
5798 record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash);
5799 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
5801 #endif
5803 /* If nothing falls through into the exit block, we don't need an
5804 epilogue. */
5806 if (exit_fallthru_edge == NULL)
5807 goto epilogue_done;
5809 #ifdef HAVE_epilogue
5810 if (HAVE_epilogue)
5812 start_sequence ();
5813 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
5814 rtx_insn *seq = as_a <rtx_insn *> (gen_epilogue ());
5815 if (seq)
5816 emit_jump_insn (seq);
5818 /* Retain a map of the epilogue insns. */
5819 record_insns (seq, NULL, &epilogue_insn_hash);
5820 set_insn_locations (seq, epilogue_location);
5822 seq = get_insns ();
5823 returnjump = get_last_insn ();
5824 end_sequence ();
5826 insert_insn_on_edge (seq, exit_fallthru_edge);
5827 inserted = true;
5829 if (JUMP_P (returnjump))
5830 set_return_jump_label (returnjump);
5832 else
5833 #endif
5835 basic_block cur_bb;
5837 if (! next_active_insn (BB_END (exit_fallthru_edge->src)))
5838 goto epilogue_done;
5839 /* We have a fall-through edge to the exit block, the source is not
5840 at the end of the function, and there will be an assembler epilogue
5841 at the end of the function.
5842 We can't use force_nonfallthru here, because that would try to
5843 use return. Inserting a jump 'by hand' is extremely messy, so
5844 we take advantage of cfg_layout_finalize using
5845 fixup_fallthru_exit_predecessor. */
5846 cfg_layout_initialize (0);
5847 FOR_EACH_BB_FN (cur_bb, cfun)
5848 if (cur_bb->index >= NUM_FIXED_BLOCKS
5849 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
5850 cur_bb->aux = cur_bb->next_bb;
5851 cfg_layout_finalize ();
5854 epilogue_done:
5856 default_rtl_profile ();
5858 if (inserted)
5860 sbitmap blocks;
5862 commit_edge_insertions ();
5864 /* Look for basic blocks within the prologue insns. */
5865 blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
5866 bitmap_clear (blocks);
5867 bitmap_set_bit (blocks, entry_edge->dest->index);
5868 bitmap_set_bit (blocks, orig_entry_edge->dest->index);
5869 find_many_sub_basic_blocks (blocks);
5870 sbitmap_free (blocks);
5872 /* The epilogue insns we inserted may cause the exit edge to no longer
5873 be fallthru. */
5874 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5876 if (((e->flags & EDGE_FALLTHRU) != 0)
5877 && returnjump_p (BB_END (e->src)))
5878 e->flags &= ~EDGE_FALLTHRU;
5882 #ifdef HAVE_simple_return
5883 convert_to_simple_return (entry_edge, orig_entry_edge, bb_flags, returnjump,
5884 unconverted_simple_returns);
5885 #endif
5887 #ifdef HAVE_sibcall_epilogue
5888 /* Emit sibling epilogues before any sibling call sites. */
5889 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds); (e =
5890 ei_safe_edge (ei));
5893 basic_block bb = e->src;
5894 rtx_insn *insn = BB_END (bb);
5895 rtx ep_seq;
5897 if (!CALL_P (insn)
5898 || ! SIBLING_CALL_P (insn)
5899 #ifdef HAVE_simple_return
5900 || (entry_edge != orig_entry_edge
5901 && !bitmap_bit_p (&bb_flags, bb->index))
5902 #endif
5905 ei_next (&ei);
5906 continue;
5909 ep_seq = gen_sibcall_epilogue ();
5910 if (ep_seq)
5912 start_sequence ();
5913 emit_note (NOTE_INSN_EPILOGUE_BEG);
5914 emit_insn (ep_seq);
5915 rtx_insn *seq = get_insns ();
5916 end_sequence ();
5918 /* Retain a map of the epilogue insns. Used in life analysis to
5919 avoid getting rid of sibcall epilogue insns. Do this before we
5920 actually emit the sequence. */
5921 record_insns (seq, NULL, &epilogue_insn_hash);
5922 set_insn_locations (seq, epilogue_location);
5924 emit_insn_before (seq, insn);
5926 ei_next (&ei);
5928 #endif
5930 #ifdef HAVE_epilogue
5931 if (epilogue_end)
5933 rtx_insn *insn, *next;
5935 /* Similarly, move any line notes that appear after the epilogue.
5936 There is no need, however, to be quite so anal about the existence
5937 of such a note. Also possibly move
5938 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5939 info generation. */
5940 for (insn = epilogue_end; insn; insn = next)
5942 next = NEXT_INSN (insn);
5943 if (NOTE_P (insn)
5944 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
5945 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
5948 #endif
5950 #ifdef HAVE_simple_return
5951 bitmap_clear (&bb_flags);
5952 #endif
5954 /* Threading the prologue and epilogue changes the artificial refs
5955 in the entry and exit blocks. */
5956 epilogue_completed = 1;
5957 df_update_entry_exit_and_calls ();
5960 /* Reposition the prologue-end and epilogue-begin notes after
5961 instruction scheduling. */
5963 void
5964 reposition_prologue_and_epilogue_notes (void)
5966 #if defined (HAVE_prologue) || defined (HAVE_epilogue) \
5967 || defined (HAVE_sibcall_epilogue)
5968 /* Since the hash table is created on demand, the fact that it is
5969 non-null is a signal that it is non-empty. */
5970 if (prologue_insn_hash != NULL)
5972 size_t len = htab_elements (prologue_insn_hash);
5973 rtx_insn *insn, *last = NULL, *note = NULL;
5975 /* Scan from the beginning until we reach the last prologue insn. */
5976 /* ??? While we do have the CFG intact, there are two problems:
5977 (1) The prologue can contain loops (typically probing the stack),
5978 which means that the end of the prologue isn't in the first bb.
5979 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
5980 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5982 if (NOTE_P (insn))
5984 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
5985 note = insn;
5987 else if (contains (insn, prologue_insn_hash))
5989 last = insn;
5990 if (--len == 0)
5991 break;
5995 if (last)
5997 if (note == NULL)
5999 /* Scan forward looking for the PROLOGUE_END note. It should
6000 be right at the beginning of the block, possibly with other
6001 insn notes that got moved there. */
6002 for (note = NEXT_INSN (last); ; note = NEXT_INSN (note))
6004 if (NOTE_P (note)
6005 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
6006 break;
6010 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
6011 if (LABEL_P (last))
6012 last = NEXT_INSN (last);
6013 reorder_insns (note, note, last);
6017 if (epilogue_insn_hash != NULL)
6019 edge_iterator ei;
6020 edge e;
6022 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6024 rtx_insn *insn, *first = NULL, *note = NULL;
6025 basic_block bb = e->src;
6027 /* Scan from the beginning until we reach the first epilogue insn. */
6028 FOR_BB_INSNS (bb, insn)
6030 if (NOTE_P (insn))
6032 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
6034 note = insn;
6035 if (first != NULL)
6036 break;
6039 else if (first == NULL && contains (insn, epilogue_insn_hash))
6041 first = insn;
6042 if (note != NULL)
6043 break;
6047 if (note)
6049 /* If the function has a single basic block, and no real
6050 epilogue insns (e.g. sibcall with no cleanup), the
6051 epilogue note can get scheduled before the prologue
6052 note. If we have frame related prologue insns, having
6053 them scanned during the epilogue will result in a crash.
6054 In this case re-order the epilogue note to just before
6055 the last insn in the block. */
6056 if (first == NULL)
6057 first = BB_END (bb);
6059 if (PREV_INSN (first) != note)
6060 reorder_insns (note, note, PREV_INSN (first));
6064 #endif /* HAVE_prologue or HAVE_epilogue */
6067 /* Returns the name of function declared by FNDECL. */
6068 const char *
6069 fndecl_name (tree fndecl)
6071 if (fndecl == NULL)
6072 return "(nofn)";
6073 return lang_hooks.decl_printable_name (fndecl, 2);
6076 /* Returns the name of function FN. */
6077 const char *
6078 function_name (struct function *fn)
6080 tree fndecl = (fn == NULL) ? NULL : fn->decl;
6081 return fndecl_name (fndecl);
6084 /* Returns the name of the current function. */
6085 const char *
6086 current_function_name (void)
6088 return function_name (cfun);
6092 static unsigned int
6093 rest_of_handle_check_leaf_regs (void)
6095 #ifdef LEAF_REGISTERS
6096 crtl->uses_only_leaf_regs
6097 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
6098 #endif
6099 return 0;
6102 /* Insert a TYPE into the used types hash table of CFUN. */
6104 static void
6105 used_types_insert_helper (tree type, struct function *func)
6107 if (type != NULL && func != NULL)
6109 if (func->used_types_hash == NULL)
6110 func->used_types_hash = hash_set<tree>::create_ggc (37);
6112 func->used_types_hash->add (type);
6116 /* Given a type, insert it into the used hash table in cfun. */
6117 void
6118 used_types_insert (tree t)
6120 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
6121 if (TYPE_NAME (t))
6122 break;
6123 else
6124 t = TREE_TYPE (t);
6125 if (TREE_CODE (t) == ERROR_MARK)
6126 return;
6127 if (TYPE_NAME (t) == NULL_TREE
6128 || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t)))
6129 t = TYPE_MAIN_VARIANT (t);
6130 if (debug_info_level > DINFO_LEVEL_NONE)
6132 if (cfun)
6133 used_types_insert_helper (t, cfun);
6134 else
6136 /* So this might be a type referenced by a global variable.
6137 Record that type so that we can later decide to emit its
6138 debug information. */
6139 vec_safe_push (types_used_by_cur_var_decl, t);
6144 /* Helper to Hash a struct types_used_by_vars_entry. */
6146 static hashval_t
6147 hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry)
6149 gcc_assert (entry && entry->var_decl && entry->type);
6151 return iterative_hash_object (entry->type,
6152 iterative_hash_object (entry->var_decl, 0));
6155 /* Hash function of the types_used_by_vars_entry hash table. */
6157 hashval_t
6158 used_type_hasher::hash (types_used_by_vars_entry *entry)
6160 return hash_types_used_by_vars_entry (entry);
6163 /*Equality function of the types_used_by_vars_entry hash table. */
6165 bool
6166 used_type_hasher::equal (types_used_by_vars_entry *e1,
6167 types_used_by_vars_entry *e2)
6169 return (e1->var_decl == e2->var_decl && e1->type == e2->type);
6172 /* Inserts an entry into the types_used_by_vars_hash hash table. */
6174 void
6175 types_used_by_var_decl_insert (tree type, tree var_decl)
6177 if (type != NULL && var_decl != NULL)
6179 types_used_by_vars_entry **slot;
6180 struct types_used_by_vars_entry e;
6181 e.var_decl = var_decl;
6182 e.type = type;
6183 if (types_used_by_vars_hash == NULL)
6184 types_used_by_vars_hash
6185 = hash_table<used_type_hasher>::create_ggc (37);
6187 slot = types_used_by_vars_hash->find_slot (&e, INSERT);
6188 if (*slot == NULL)
6190 struct types_used_by_vars_entry *entry;
6191 entry = ggc_alloc<types_used_by_vars_entry> ();
6192 entry->type = type;
6193 entry->var_decl = var_decl;
6194 *slot = entry;
6199 namespace {
6201 const pass_data pass_data_leaf_regs =
6203 RTL_PASS, /* type */
6204 "*leaf_regs", /* name */
6205 OPTGROUP_NONE, /* optinfo_flags */
6206 TV_NONE, /* tv_id */
6207 0, /* properties_required */
6208 0, /* properties_provided */
6209 0, /* properties_destroyed */
6210 0, /* todo_flags_start */
6211 0, /* todo_flags_finish */
6214 class pass_leaf_regs : public rtl_opt_pass
6216 public:
6217 pass_leaf_regs (gcc::context *ctxt)
6218 : rtl_opt_pass (pass_data_leaf_regs, ctxt)
6221 /* opt_pass methods: */
6222 virtual unsigned int execute (function *)
6224 return rest_of_handle_check_leaf_regs ();
6227 }; // class pass_leaf_regs
6229 } // anon namespace
6231 rtl_opt_pass *
6232 make_pass_leaf_regs (gcc::context *ctxt)
6234 return new pass_leaf_regs (ctxt);
6237 static unsigned int
6238 rest_of_handle_thread_prologue_and_epilogue (void)
6240 if (optimize)
6241 cleanup_cfg (CLEANUP_EXPENSIVE);
6243 /* On some machines, the prologue and epilogue code, or parts thereof,
6244 can be represented as RTL. Doing so lets us schedule insns between
6245 it and the rest of the code and also allows delayed branch
6246 scheduling to operate in the epilogue. */
6247 thread_prologue_and_epilogue_insns ();
6249 /* Shrink-wrapping can result in unreachable edges in the epilogue,
6250 see PR57320. */
6251 cleanup_cfg (0);
6253 /* The stack usage info is finalized during prologue expansion. */
6254 if (flag_stack_usage_info)
6255 output_stack_usage ();
6257 return 0;
6260 namespace {
6262 const pass_data pass_data_thread_prologue_and_epilogue =
6264 RTL_PASS, /* type */
6265 "pro_and_epilogue", /* name */
6266 OPTGROUP_NONE, /* optinfo_flags */
6267 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
6268 0, /* properties_required */
6269 0, /* properties_provided */
6270 0, /* properties_destroyed */
6271 0, /* todo_flags_start */
6272 ( TODO_df_verify | TODO_df_finish ), /* todo_flags_finish */
6275 class pass_thread_prologue_and_epilogue : public rtl_opt_pass
6277 public:
6278 pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6279 : rtl_opt_pass (pass_data_thread_prologue_and_epilogue, ctxt)
6282 /* opt_pass methods: */
6283 virtual unsigned int execute (function *)
6285 return rest_of_handle_thread_prologue_and_epilogue ();
6288 }; // class pass_thread_prologue_and_epilogue
6290 } // anon namespace
6292 rtl_opt_pass *
6293 make_pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6295 return new pass_thread_prologue_and_epilogue (ctxt);
6299 /* This mini-pass fixes fall-out from SSA in asm statements that have
6300 in-out constraints. Say you start with
6302 orig = inout;
6303 asm ("": "+mr" (inout));
6304 use (orig);
6306 which is transformed very early to use explicit output and match operands:
6308 orig = inout;
6309 asm ("": "=mr" (inout) : "0" (inout));
6310 use (orig);
6312 Or, after SSA and copyprop,
6314 asm ("": "=mr" (inout_2) : "0" (inout_1));
6315 use (inout_1);
6317 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
6318 they represent two separate values, so they will get different pseudo
6319 registers during expansion. Then, since the two operands need to match
6320 per the constraints, but use different pseudo registers, reload can
6321 only register a reload for these operands. But reloads can only be
6322 satisfied by hardregs, not by memory, so we need a register for this
6323 reload, just because we are presented with non-matching operands.
6324 So, even though we allow memory for this operand, no memory can be
6325 used for it, just because the two operands don't match. This can
6326 cause reload failures on register-starved targets.
6328 So it's a symptom of reload not being able to use memory for reloads
6329 or, alternatively it's also a symptom of both operands not coming into
6330 reload as matching (in which case the pseudo could go to memory just
6331 fine, as the alternative allows it, and no reload would be necessary).
6332 We fix the latter problem here, by transforming
6334 asm ("": "=mr" (inout_2) : "0" (inout_1));
6336 back to
6338 inout_2 = inout_1;
6339 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
6341 static void
6342 match_asm_constraints_1 (rtx_insn *insn, rtx *p_sets, int noutputs)
6344 int i;
6345 bool changed = false;
6346 rtx op = SET_SRC (p_sets[0]);
6347 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
6348 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
6349 bool *output_matched = XALLOCAVEC (bool, noutputs);
6351 memset (output_matched, 0, noutputs * sizeof (bool));
6352 for (i = 0; i < ninputs; i++)
6354 rtx input, output;
6355 rtx_insn *insns;
6356 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
6357 char *end;
6358 int match, j;
6360 if (*constraint == '%')
6361 constraint++;
6363 match = strtoul (constraint, &end, 10);
6364 if (end == constraint)
6365 continue;
6367 gcc_assert (match < noutputs);
6368 output = SET_DEST (p_sets[match]);
6369 input = RTVEC_ELT (inputs, i);
6370 /* Only do the transformation for pseudos. */
6371 if (! REG_P (output)
6372 || rtx_equal_p (output, input)
6373 || (GET_MODE (input) != VOIDmode
6374 && GET_MODE (input) != GET_MODE (output)))
6375 continue;
6377 /* We can't do anything if the output is also used as input,
6378 as we're going to overwrite it. */
6379 for (j = 0; j < ninputs; j++)
6380 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
6381 break;
6382 if (j != ninputs)
6383 continue;
6385 /* Avoid changing the same input several times. For
6386 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
6387 only change in once (to out1), rather than changing it
6388 first to out1 and afterwards to out2. */
6389 if (i > 0)
6391 for (j = 0; j < noutputs; j++)
6392 if (output_matched[j] && input == SET_DEST (p_sets[j]))
6393 break;
6394 if (j != noutputs)
6395 continue;
6397 output_matched[match] = true;
6399 start_sequence ();
6400 emit_move_insn (output, input);
6401 insns = get_insns ();
6402 end_sequence ();
6403 emit_insn_before (insns, insn);
6405 /* Now replace all mentions of the input with output. We can't
6406 just replace the occurrence in inputs[i], as the register might
6407 also be used in some other input (or even in an address of an
6408 output), which would mean possibly increasing the number of
6409 inputs by one (namely 'output' in addition), which might pose
6410 a too complicated problem for reload to solve. E.g. this situation:
6412 asm ("" : "=r" (output), "=m" (input) : "0" (input))
6414 Here 'input' is used in two occurrences as input (once for the
6415 input operand, once for the address in the second output operand).
6416 If we would replace only the occurrence of the input operand (to
6417 make the matching) we would be left with this:
6419 output = input
6420 asm ("" : "=r" (output), "=m" (input) : "0" (output))
6422 Now we suddenly have two different input values (containing the same
6423 value, but different pseudos) where we formerly had only one.
6424 With more complicated asms this might lead to reload failures
6425 which wouldn't have happen without this pass. So, iterate over
6426 all operands and replace all occurrences of the register used. */
6427 for (j = 0; j < noutputs; j++)
6428 if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
6429 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
6430 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
6431 input, output);
6432 for (j = 0; j < ninputs; j++)
6433 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
6434 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
6435 input, output);
6437 changed = true;
6440 if (changed)
6441 df_insn_rescan (insn);
6444 /* Add the decl D to the local_decls list of FUN. */
6446 void
6447 add_local_decl (struct function *fun, tree d)
6449 gcc_assert (TREE_CODE (d) == VAR_DECL);
6450 vec_safe_push (fun->local_decls, d);
6453 namespace {
6455 const pass_data pass_data_match_asm_constraints =
6457 RTL_PASS, /* type */
6458 "asmcons", /* name */
6459 OPTGROUP_NONE, /* optinfo_flags */
6460 TV_NONE, /* tv_id */
6461 0, /* properties_required */
6462 0, /* properties_provided */
6463 0, /* properties_destroyed */
6464 0, /* todo_flags_start */
6465 0, /* todo_flags_finish */
6468 class pass_match_asm_constraints : public rtl_opt_pass
6470 public:
6471 pass_match_asm_constraints (gcc::context *ctxt)
6472 : rtl_opt_pass (pass_data_match_asm_constraints, ctxt)
6475 /* opt_pass methods: */
6476 virtual unsigned int execute (function *);
6478 }; // class pass_match_asm_constraints
6480 unsigned
6481 pass_match_asm_constraints::execute (function *fun)
6483 basic_block bb;
6484 rtx_insn *insn;
6485 rtx pat, *p_sets;
6486 int noutputs;
6488 if (!crtl->has_asm_statement)
6489 return 0;
6491 df_set_flags (DF_DEFER_INSN_RESCAN);
6492 FOR_EACH_BB_FN (bb, fun)
6494 FOR_BB_INSNS (bb, insn)
6496 if (!INSN_P (insn))
6497 continue;
6499 pat = PATTERN (insn);
6500 if (GET_CODE (pat) == PARALLEL)
6501 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
6502 else if (GET_CODE (pat) == SET)
6503 p_sets = &PATTERN (insn), noutputs = 1;
6504 else
6505 continue;
6507 if (GET_CODE (*p_sets) == SET
6508 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
6509 match_asm_constraints_1 (insn, p_sets, noutputs);
6513 return TODO_df_finish;
6516 } // anon namespace
6518 rtl_opt_pass *
6519 make_pass_match_asm_constraints (gcc::context *ctxt)
6521 return new pass_match_asm_constraints (ctxt);
6525 #include "gt-function.h"