Merge trunk version 195330 into gupc branch.
[official-gcc.git] / gcc / function.c
blobd88f05bbee76d11a588addb4573268e9f9b1993e
1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file handles the generation of rtl code from tree structure
21 at the level of the function as a whole.
22 It creates the rtl expressions for parameters and auto variables
23 and has full responsibility for allocating stack slots.
25 `expand_function_start' is called at the beginning of a function,
26 before the function body is parsed, and `expand_function_end' is
27 called after parsing the body.
29 Call `assign_stack_local' to allocate a stack slot for a local variable.
30 This is usually done during the RTL generation for the function body,
31 but it can also be done in the reload pass when a pseudo-register does
32 not get a hard register. */
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "tm.h"
38 #include "rtl-error.h"
39 #include "tree.h"
40 #include "flags.h"
41 #include "except.h"
42 #include "function.h"
43 #include "expr.h"
44 #include "optabs.h"
45 #include "libfuncs.h"
46 #include "regs.h"
47 #include "hard-reg-set.h"
48 #include "insn-config.h"
49 #include "recog.h"
50 #include "output.h"
51 #include "basic-block.h"
52 #include "hashtab.h"
53 #include "ggc.h"
54 #include "tm_p.h"
55 #include "langhooks.h"
56 #include "target.h"
57 #include "common/common-target.h"
58 #include "gimple.h"
59 #include "tree-pass.h"
60 #include "predict.h"
61 #include "df.h"
62 #include "params.h"
63 #include "bb-reorder.h"
65 /* So we can assign to cfun in this file. */
66 #undef cfun
68 #ifndef STACK_ALIGNMENT_NEEDED
69 #define STACK_ALIGNMENT_NEEDED 1
70 #endif
72 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
74 /* Some systems use __main in a way incompatible with its use in gcc, in these
75 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
76 give the same symbol without quotes for an alternative entry point. You
77 must define both, or neither. */
78 #ifndef NAME__MAIN
79 #define NAME__MAIN "__main"
80 #endif
82 /* Round a value to the lowest integer less than it that is a multiple of
83 the required alignment. Avoid using division in case the value is
84 negative. Assume the alignment is a power of two. */
85 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
87 /* Similar, but round to the next highest integer that meets the
88 alignment. */
89 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
91 /* Nonzero once virtual register instantiation has been done.
92 assign_stack_local uses frame_pointer_rtx when this is nonzero.
93 calls.c:emit_library_call_value_1 uses it to set up
94 post-instantiation libcalls. */
95 int virtuals_instantiated;
97 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
98 static GTY(()) int funcdef_no;
100 /* These variables hold pointers to functions to create and destroy
101 target specific, per-function data structures. */
102 struct machine_function * (*init_machine_status) (void);
104 /* The currently compiled function. */
105 struct function *cfun = 0;
107 /* These hashes record the prologue and epilogue insns. */
108 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
109 htab_t prologue_insn_hash;
110 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
111 htab_t epilogue_insn_hash;
114 htab_t types_used_by_vars_hash = NULL;
115 vec<tree, va_gc> *types_used_by_cur_var_decl;
117 /* Forward declarations. */
119 static struct temp_slot *find_temp_slot_from_address (rtx);
120 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
121 static void pad_below (struct args_size *, enum machine_mode, tree);
122 static void reorder_blocks_1 (rtx, tree, vec<tree> *);
123 static int all_blocks (tree, tree *);
124 static tree *get_block_vector (tree, int *);
125 extern tree debug_find_var_in_block_tree (tree, tree);
126 /* We always define `record_insns' even if it's not used so that we
127 can always export `prologue_epilogue_contains'. */
128 static void record_insns (rtx, rtx, htab_t *) ATTRIBUTE_UNUSED;
129 static bool contains (const_rtx, htab_t);
130 static void prepare_function_start (void);
131 static void do_clobber_return_reg (rtx, void *);
132 static void do_use_return_reg (rtx, void *);
133 static void set_insn_locations (rtx, int) ATTRIBUTE_UNUSED;
135 /* Stack of nested functions. */
136 /* Keep track of the cfun stack. */
138 typedef struct function *function_p;
140 static vec<function_p> function_context_stack;
142 /* Save the current context for compilation of a nested function.
143 This is called from language-specific code. */
145 void
146 push_function_context (void)
148 if (cfun == 0)
149 allocate_struct_function (NULL, false);
151 function_context_stack.safe_push (cfun);
152 set_cfun (NULL);
155 /* Restore the last saved context, at the end of a nested function.
156 This function is called from language-specific code. */
158 void
159 pop_function_context (void)
161 struct function *p = function_context_stack.pop ();
162 set_cfun (p);
163 current_function_decl = p->decl;
165 /* Reset variables that have known state during rtx generation. */
166 virtuals_instantiated = 0;
167 generating_concat_p = 1;
170 /* Clear out all parts of the state in F that can safely be discarded
171 after the function has been parsed, but not compiled, to let
172 garbage collection reclaim the memory. */
174 void
175 free_after_parsing (struct function *f)
177 f->language = 0;
180 /* Clear out all parts of the state in F that can safely be discarded
181 after the function has been compiled, to let garbage collection
182 reclaim the memory. */
184 void
185 free_after_compilation (struct function *f)
187 prologue_insn_hash = NULL;
188 epilogue_insn_hash = NULL;
190 free (crtl->emit.regno_pointer_align);
192 memset (crtl, 0, sizeof (struct rtl_data));
193 f->eh = NULL;
194 f->machine = NULL;
195 f->cfg = NULL;
197 regno_reg_rtx = NULL;
200 /* Return size needed for stack frame based on slots so far allocated.
201 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
202 the caller may have to do that. */
204 HOST_WIDE_INT
205 get_frame_size (void)
207 if (FRAME_GROWS_DOWNWARD)
208 return -frame_offset;
209 else
210 return frame_offset;
213 /* Issue an error message and return TRUE if frame OFFSET overflows in
214 the signed target pointer arithmetics for function FUNC. Otherwise
215 return FALSE. */
217 bool
218 frame_offset_overflow (HOST_WIDE_INT offset, tree func)
220 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
222 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
223 /* Leave room for the fixed part of the frame. */
224 - 64 * UNITS_PER_WORD)
226 error_at (DECL_SOURCE_LOCATION (func),
227 "total size of local objects too large");
228 return TRUE;
231 return FALSE;
234 /* Return stack slot alignment in bits for TYPE and MODE. */
236 static unsigned int
237 get_stack_local_alignment (tree type, enum machine_mode mode)
239 unsigned int alignment;
241 if (mode == BLKmode)
242 alignment = BIGGEST_ALIGNMENT;
243 else
244 alignment = GET_MODE_ALIGNMENT (mode);
246 /* Allow the frond-end to (possibly) increase the alignment of this
247 stack slot. */
248 if (! type)
249 type = lang_hooks.types.type_for_mode (mode, 0);
251 return STACK_SLOT_ALIGNMENT (type, mode, alignment);
254 /* Determine whether it is possible to fit a stack slot of size SIZE and
255 alignment ALIGNMENT into an area in the stack frame that starts at
256 frame offset START and has a length of LENGTH. If so, store the frame
257 offset to be used for the stack slot in *POFFSET and return true;
258 return false otherwise. This function will extend the frame size when
259 given a start/length pair that lies at the end of the frame. */
261 static bool
262 try_fit_stack_local (HOST_WIDE_INT start, HOST_WIDE_INT length,
263 HOST_WIDE_INT size, unsigned int alignment,
264 HOST_WIDE_INT *poffset)
266 HOST_WIDE_INT this_frame_offset;
267 int frame_off, frame_alignment, frame_phase;
269 /* Calculate how many bytes the start of local variables is off from
270 stack alignment. */
271 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
272 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
273 frame_phase = frame_off ? frame_alignment - frame_off : 0;
275 /* Round the frame offset to the specified alignment. */
277 /* We must be careful here, since FRAME_OFFSET might be negative and
278 division with a negative dividend isn't as well defined as we might
279 like. So we instead assume that ALIGNMENT is a power of two and
280 use logical operations which are unambiguous. */
281 if (FRAME_GROWS_DOWNWARD)
282 this_frame_offset
283 = (FLOOR_ROUND (start + length - size - frame_phase,
284 (unsigned HOST_WIDE_INT) alignment)
285 + frame_phase);
286 else
287 this_frame_offset
288 = (CEIL_ROUND (start - frame_phase,
289 (unsigned HOST_WIDE_INT) alignment)
290 + frame_phase);
292 /* See if it fits. If this space is at the edge of the frame,
293 consider extending the frame to make it fit. Our caller relies on
294 this when allocating a new slot. */
295 if (frame_offset == start && this_frame_offset < frame_offset)
296 frame_offset = this_frame_offset;
297 else if (this_frame_offset < start)
298 return false;
299 else if (start + length == frame_offset
300 && this_frame_offset + size > start + length)
301 frame_offset = this_frame_offset + size;
302 else if (this_frame_offset + size > start + length)
303 return false;
305 *poffset = this_frame_offset;
306 return true;
309 /* Create a new frame_space structure describing free space in the stack
310 frame beginning at START and ending at END, and chain it into the
311 function's frame_space_list. */
313 static void
314 add_frame_space (HOST_WIDE_INT start, HOST_WIDE_INT end)
316 struct frame_space *space = ggc_alloc_frame_space ();
317 space->next = crtl->frame_space_list;
318 crtl->frame_space_list = space;
319 space->start = start;
320 space->length = end - start;
323 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
324 with machine mode MODE.
326 ALIGN controls the amount of alignment for the address of the slot:
327 0 means according to MODE,
328 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
329 -2 means use BITS_PER_UNIT,
330 positive specifies alignment boundary in bits.
332 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
333 alignment and ASLK_RECORD_PAD bit set if we should remember
334 extra space we allocated for alignment purposes. When we are
335 called from assign_stack_temp_for_type, it is not set so we don't
336 track the same stack slot in two independent lists.
338 We do not round to stack_boundary here. */
341 assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size,
342 int align, int kind)
344 rtx x, addr;
345 int bigend_correction = 0;
346 HOST_WIDE_INT slot_offset = 0, old_frame_offset;
347 unsigned int alignment, alignment_in_bits;
349 if (align == 0)
351 alignment = get_stack_local_alignment (NULL, mode);
352 alignment /= BITS_PER_UNIT;
354 else if (align == -1)
356 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
357 size = CEIL_ROUND (size, alignment);
359 else if (align == -2)
360 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
361 else
362 alignment = align / BITS_PER_UNIT;
364 alignment_in_bits = alignment * BITS_PER_UNIT;
366 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
367 if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
369 alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
370 alignment = alignment_in_bits / BITS_PER_UNIT;
373 if (SUPPORTS_STACK_ALIGNMENT)
375 if (crtl->stack_alignment_estimated < alignment_in_bits)
377 if (!crtl->stack_realign_processed)
378 crtl->stack_alignment_estimated = alignment_in_bits;
379 else
381 /* If stack is realigned and stack alignment value
382 hasn't been finalized, it is OK not to increase
383 stack_alignment_estimated. The bigger alignment
384 requirement is recorded in stack_alignment_needed
385 below. */
386 gcc_assert (!crtl->stack_realign_finalized);
387 if (!crtl->stack_realign_needed)
389 /* It is OK to reduce the alignment as long as the
390 requested size is 0 or the estimated stack
391 alignment >= mode alignment. */
392 gcc_assert ((kind & ASLK_REDUCE_ALIGN)
393 || size == 0
394 || (crtl->stack_alignment_estimated
395 >= GET_MODE_ALIGNMENT (mode)));
396 alignment_in_bits = crtl->stack_alignment_estimated;
397 alignment = alignment_in_bits / BITS_PER_UNIT;
403 if (crtl->stack_alignment_needed < alignment_in_bits)
404 crtl->stack_alignment_needed = alignment_in_bits;
405 if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
406 crtl->max_used_stack_slot_alignment = alignment_in_bits;
408 if (mode != BLKmode || size != 0)
410 if (kind & ASLK_RECORD_PAD)
412 struct frame_space **psp;
414 for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
416 struct frame_space *space = *psp;
417 if (!try_fit_stack_local (space->start, space->length, size,
418 alignment, &slot_offset))
419 continue;
420 *psp = space->next;
421 if (slot_offset > space->start)
422 add_frame_space (space->start, slot_offset);
423 if (slot_offset + size < space->start + space->length)
424 add_frame_space (slot_offset + size,
425 space->start + space->length);
426 goto found_space;
430 else if (!STACK_ALIGNMENT_NEEDED)
432 slot_offset = frame_offset;
433 goto found_space;
436 old_frame_offset = frame_offset;
438 if (FRAME_GROWS_DOWNWARD)
440 frame_offset -= size;
441 try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
443 if (kind & ASLK_RECORD_PAD)
445 if (slot_offset > frame_offset)
446 add_frame_space (frame_offset, slot_offset);
447 if (slot_offset + size < old_frame_offset)
448 add_frame_space (slot_offset + size, old_frame_offset);
451 else
453 frame_offset += size;
454 try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
456 if (kind & ASLK_RECORD_PAD)
458 if (slot_offset > old_frame_offset)
459 add_frame_space (old_frame_offset, slot_offset);
460 if (slot_offset + size < frame_offset)
461 add_frame_space (slot_offset + size, frame_offset);
465 found_space:
466 /* On a big-endian machine, if we are allocating more space than we will use,
467 use the least significant bytes of those that are allocated. */
468 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
469 bigend_correction = size - GET_MODE_SIZE (mode);
471 /* If we have already instantiated virtual registers, return the actual
472 address relative to the frame pointer. */
473 if (virtuals_instantiated)
474 addr = plus_constant (Pmode, frame_pointer_rtx,
475 trunc_int_for_mode
476 (slot_offset + bigend_correction
477 + STARTING_FRAME_OFFSET, Pmode));
478 else
479 addr = plus_constant (Pmode, virtual_stack_vars_rtx,
480 trunc_int_for_mode
481 (slot_offset + bigend_correction,
482 Pmode));
484 x = gen_rtx_MEM (mode, addr);
485 set_mem_align (x, alignment_in_bits);
486 MEM_NOTRAP_P (x) = 1;
488 stack_slot_list
489 = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
491 if (frame_offset_overflow (frame_offset, current_function_decl))
492 frame_offset = 0;
494 return x;
497 /* Wrap up assign_stack_local_1 with last parameter as false. */
500 assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
502 return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD);
505 /* In order to evaluate some expressions, such as function calls returning
506 structures in memory, we need to temporarily allocate stack locations.
507 We record each allocated temporary in the following structure.
509 Associated with each temporary slot is a nesting level. When we pop up
510 one level, all temporaries associated with the previous level are freed.
511 Normally, all temporaries are freed after the execution of the statement
512 in which they were created. However, if we are inside a ({...}) grouping,
513 the result may be in a temporary and hence must be preserved. If the
514 result could be in a temporary, we preserve it if we can determine which
515 one it is in. If we cannot determine which temporary may contain the
516 result, all temporaries are preserved. A temporary is preserved by
517 pretending it was allocated at the previous nesting level. */
519 struct GTY(()) temp_slot {
520 /* Points to next temporary slot. */
521 struct temp_slot *next;
522 /* Points to previous temporary slot. */
523 struct temp_slot *prev;
524 /* The rtx to used to reference the slot. */
525 rtx slot;
526 /* The size, in units, of the slot. */
527 HOST_WIDE_INT size;
528 /* The type of the object in the slot, or zero if it doesn't correspond
529 to a type. We use this to determine whether a slot can be reused.
530 It can be reused if objects of the type of the new slot will always
531 conflict with objects of the type of the old slot. */
532 tree type;
533 /* The alignment (in bits) of the slot. */
534 unsigned int align;
535 /* Nonzero if this temporary is currently in use. */
536 char in_use;
537 /* Nesting level at which this slot is being used. */
538 int level;
539 /* The offset of the slot from the frame_pointer, including extra space
540 for alignment. This info is for combine_temp_slots. */
541 HOST_WIDE_INT base_offset;
542 /* The size of the slot, including extra space for alignment. This
543 info is for combine_temp_slots. */
544 HOST_WIDE_INT full_size;
547 /* A table of addresses that represent a stack slot. The table is a mapping
548 from address RTXen to a temp slot. */
549 static GTY((param_is(struct temp_slot_address_entry))) htab_t temp_slot_address_table;
550 static size_t n_temp_slots_in_use;
552 /* Entry for the above hash table. */
553 struct GTY(()) temp_slot_address_entry {
554 hashval_t hash;
555 rtx address;
556 struct temp_slot *temp_slot;
559 /* Removes temporary slot TEMP from LIST. */
561 static void
562 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
564 if (temp->next)
565 temp->next->prev = temp->prev;
566 if (temp->prev)
567 temp->prev->next = temp->next;
568 else
569 *list = temp->next;
571 temp->prev = temp->next = NULL;
574 /* Inserts temporary slot TEMP to LIST. */
576 static void
577 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
579 temp->next = *list;
580 if (*list)
581 (*list)->prev = temp;
582 temp->prev = NULL;
583 *list = temp;
586 /* Returns the list of used temp slots at LEVEL. */
588 static struct temp_slot **
589 temp_slots_at_level (int level)
591 if (level >= (int) vec_safe_length (used_temp_slots))
592 vec_safe_grow_cleared (used_temp_slots, level + 1);
594 return &(*used_temp_slots)[level];
597 /* Returns the maximal temporary slot level. */
599 static int
600 max_slot_level (void)
602 if (!used_temp_slots)
603 return -1;
605 return used_temp_slots->length () - 1;
608 /* Moves temporary slot TEMP to LEVEL. */
610 static void
611 move_slot_to_level (struct temp_slot *temp, int level)
613 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
614 insert_slot_to_list (temp, temp_slots_at_level (level));
615 temp->level = level;
618 /* Make temporary slot TEMP available. */
620 static void
621 make_slot_available (struct temp_slot *temp)
623 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
624 insert_slot_to_list (temp, &avail_temp_slots);
625 temp->in_use = 0;
626 temp->level = -1;
627 n_temp_slots_in_use--;
630 /* Compute the hash value for an address -> temp slot mapping.
631 The value is cached on the mapping entry. */
632 static hashval_t
633 temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
635 int do_not_record = 0;
636 return hash_rtx (t->address, GET_MODE (t->address),
637 &do_not_record, NULL, false);
640 /* Return the hash value for an address -> temp slot mapping. */
641 static hashval_t
642 temp_slot_address_hash (const void *p)
644 const struct temp_slot_address_entry *t;
645 t = (const struct temp_slot_address_entry *) p;
646 return t->hash;
649 /* Compare two address -> temp slot mapping entries. */
650 static int
651 temp_slot_address_eq (const void *p1, const void *p2)
653 const struct temp_slot_address_entry *t1, *t2;
654 t1 = (const struct temp_slot_address_entry *) p1;
655 t2 = (const struct temp_slot_address_entry *) p2;
656 return exp_equiv_p (t1->address, t2->address, 0, true);
659 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
660 static void
661 insert_temp_slot_address (rtx address, struct temp_slot *temp_slot)
663 void **slot;
664 struct temp_slot_address_entry *t = ggc_alloc_temp_slot_address_entry ();
665 t->address = address;
666 t->temp_slot = temp_slot;
667 t->hash = temp_slot_address_compute_hash (t);
668 slot = htab_find_slot_with_hash (temp_slot_address_table, t, t->hash, INSERT);
669 *slot = t;
672 /* Remove an address -> temp slot mapping entry if the temp slot is
673 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
674 static int
675 remove_unused_temp_slot_addresses_1 (void **slot, void *data ATTRIBUTE_UNUSED)
677 const struct temp_slot_address_entry *t;
678 t = (const struct temp_slot_address_entry *) *slot;
679 if (! t->temp_slot->in_use)
680 htab_clear_slot (temp_slot_address_table, slot);
681 return 1;
684 /* Remove all mappings of addresses to unused temp slots. */
685 static void
686 remove_unused_temp_slot_addresses (void)
688 /* Use quicker clearing if there aren't any active temp slots. */
689 if (n_temp_slots_in_use)
690 htab_traverse (temp_slot_address_table,
691 remove_unused_temp_slot_addresses_1,
692 NULL);
693 else
694 htab_empty (temp_slot_address_table);
697 /* Find the temp slot corresponding to the object at address X. */
699 static struct temp_slot *
700 find_temp_slot_from_address (rtx x)
702 struct temp_slot *p;
703 struct temp_slot_address_entry tmp, *t;
705 /* First try the easy way:
706 See if X exists in the address -> temp slot mapping. */
707 tmp.address = x;
708 tmp.temp_slot = NULL;
709 tmp.hash = temp_slot_address_compute_hash (&tmp);
710 t = (struct temp_slot_address_entry *)
711 htab_find_with_hash (temp_slot_address_table, &tmp, tmp.hash);
712 if (t)
713 return t->temp_slot;
715 /* If we have a sum involving a register, see if it points to a temp
716 slot. */
717 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
718 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
719 return p;
720 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
721 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
722 return p;
724 /* Last resort: Address is a virtual stack var address. */
725 if (GET_CODE (x) == PLUS
726 && XEXP (x, 0) == virtual_stack_vars_rtx
727 && CONST_INT_P (XEXP (x, 1)))
729 int i;
730 for (i = max_slot_level (); i >= 0; i--)
731 for (p = *temp_slots_at_level (i); p; p = p->next)
733 if (INTVAL (XEXP (x, 1)) >= p->base_offset
734 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size)
735 return p;
739 return NULL;
742 /* Allocate a temporary stack slot and record it for possible later
743 reuse.
745 MODE is the machine mode to be given to the returned rtx.
747 SIZE is the size in units of the space required. We do no rounding here
748 since assign_stack_local will do any required rounding.
750 TYPE is the type that will be used for the stack slot. */
753 assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size,
754 tree type)
756 unsigned int align;
757 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
758 rtx slot;
760 /* If SIZE is -1 it means that somebody tried to allocate a temporary
761 of a variable size. */
762 gcc_assert (size != -1);
764 align = get_stack_local_alignment (type, mode);
766 /* Try to find an available, already-allocated temporary of the proper
767 mode which meets the size and alignment requirements. Choose the
768 smallest one with the closest alignment.
770 If assign_stack_temp is called outside of the tree->rtl expansion,
771 we cannot reuse the stack slots (that may still refer to
772 VIRTUAL_STACK_VARS_REGNUM). */
773 if (!virtuals_instantiated)
775 for (p = avail_temp_slots; p; p = p->next)
777 if (p->align >= align && p->size >= size
778 && GET_MODE (p->slot) == mode
779 && objects_must_conflict_p (p->type, type)
780 && (best_p == 0 || best_p->size > p->size
781 || (best_p->size == p->size && best_p->align > p->align)))
783 if (p->align == align && p->size == size)
785 selected = p;
786 cut_slot_from_list (selected, &avail_temp_slots);
787 best_p = 0;
788 break;
790 best_p = p;
795 /* Make our best, if any, the one to use. */
796 if (best_p)
798 selected = best_p;
799 cut_slot_from_list (selected, &avail_temp_slots);
801 /* If there are enough aligned bytes left over, make them into a new
802 temp_slot so that the extra bytes don't get wasted. Do this only
803 for BLKmode slots, so that we can be sure of the alignment. */
804 if (GET_MODE (best_p->slot) == BLKmode)
806 int alignment = best_p->align / BITS_PER_UNIT;
807 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
809 if (best_p->size - rounded_size >= alignment)
811 p = ggc_alloc_temp_slot ();
812 p->in_use = 0;
813 p->size = best_p->size - rounded_size;
814 p->base_offset = best_p->base_offset + rounded_size;
815 p->full_size = best_p->full_size - rounded_size;
816 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
817 p->align = best_p->align;
818 p->type = best_p->type;
819 insert_slot_to_list (p, &avail_temp_slots);
821 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
822 stack_slot_list);
824 best_p->size = rounded_size;
825 best_p->full_size = rounded_size;
830 /* If we still didn't find one, make a new temporary. */
831 if (selected == 0)
833 HOST_WIDE_INT frame_offset_old = frame_offset;
835 p = ggc_alloc_temp_slot ();
837 /* We are passing an explicit alignment request to assign_stack_local.
838 One side effect of that is assign_stack_local will not round SIZE
839 to ensure the frame offset remains suitably aligned.
841 So for requests which depended on the rounding of SIZE, we go ahead
842 and round it now. We also make sure ALIGNMENT is at least
843 BIGGEST_ALIGNMENT. */
844 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
845 p->slot = assign_stack_local_1 (mode,
846 (mode == BLKmode
847 ? CEIL_ROUND (size,
848 (int) align
849 / BITS_PER_UNIT)
850 : size),
851 align, 0);
853 p->align = align;
855 /* The following slot size computation is necessary because we don't
856 know the actual size of the temporary slot until assign_stack_local
857 has performed all the frame alignment and size rounding for the
858 requested temporary. Note that extra space added for alignment
859 can be either above or below this stack slot depending on which
860 way the frame grows. We include the extra space if and only if it
861 is above this slot. */
862 if (FRAME_GROWS_DOWNWARD)
863 p->size = frame_offset_old - frame_offset;
864 else
865 p->size = size;
867 /* Now define the fields used by combine_temp_slots. */
868 if (FRAME_GROWS_DOWNWARD)
870 p->base_offset = frame_offset;
871 p->full_size = frame_offset_old - frame_offset;
873 else
875 p->base_offset = frame_offset_old;
876 p->full_size = frame_offset - frame_offset_old;
879 selected = p;
882 p = selected;
883 p->in_use = 1;
884 p->type = type;
885 p->level = temp_slot_level;
886 n_temp_slots_in_use++;
888 pp = temp_slots_at_level (p->level);
889 insert_slot_to_list (p, pp);
890 insert_temp_slot_address (XEXP (p->slot, 0), p);
892 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
893 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
894 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
896 /* If we know the alias set for the memory that will be used, use
897 it. If there's no TYPE, then we don't know anything about the
898 alias set for the memory. */
899 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
900 set_mem_align (slot, align);
902 /* If a type is specified, set the relevant flags. */
903 if (type != 0)
904 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
905 MEM_NOTRAP_P (slot) = 1;
907 return slot;
910 /* Allocate a temporary stack slot and record it for possible later
911 reuse. First two arguments are same as in preceding function. */
914 assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size)
916 return assign_stack_temp_for_type (mode, size, NULL_TREE);
919 /* Assign a temporary.
920 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
921 and so that should be used in error messages. In either case, we
922 allocate of the given type.
923 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
924 it is 0 if a register is OK.
925 DONT_PROMOTE is 1 if we should not promote values in register
926 to wider modes. */
929 assign_temp (tree type_or_decl, int memory_required,
930 int dont_promote ATTRIBUTE_UNUSED)
932 tree type, decl;
933 enum machine_mode mode;
934 #ifdef PROMOTE_MODE
935 int unsignedp;
936 #endif
938 if (DECL_P (type_or_decl))
939 decl = type_or_decl, type = TREE_TYPE (decl);
940 else
941 decl = NULL, type = type_or_decl;
943 mode = TYPE_MODE (type);
944 #ifdef PROMOTE_MODE
945 unsignedp = TYPE_UNSIGNED (type);
946 #endif
948 if (mode == BLKmode || memory_required)
950 HOST_WIDE_INT size = int_size_in_bytes (type);
951 rtx tmp;
953 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
954 problems with allocating the stack space. */
955 if (size == 0)
956 size = 1;
958 /* Unfortunately, we don't yet know how to allocate variable-sized
959 temporaries. However, sometimes we can find a fixed upper limit on
960 the size, so try that instead. */
961 else if (size == -1)
962 size = max_int_size_in_bytes (type);
964 /* The size of the temporary may be too large to fit into an integer. */
965 /* ??? Not sure this should happen except for user silliness, so limit
966 this to things that aren't compiler-generated temporaries. The
967 rest of the time we'll die in assign_stack_temp_for_type. */
968 if (decl && size == -1
969 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
971 error ("size of variable %q+D is too large", decl);
972 size = 1;
975 tmp = assign_stack_temp_for_type (mode, size, type);
976 return tmp;
979 #ifdef PROMOTE_MODE
980 if (! dont_promote)
981 mode = promote_mode (type, mode, &unsignedp);
982 #endif
984 return gen_reg_rtx (mode);
987 /* Combine temporary stack slots which are adjacent on the stack.
989 This allows for better use of already allocated stack space. This is only
990 done for BLKmode slots because we can be sure that we won't have alignment
991 problems in this case. */
993 static void
994 combine_temp_slots (void)
996 struct temp_slot *p, *q, *next, *next_q;
997 int num_slots;
999 /* We can't combine slots, because the information about which slot
1000 is in which alias set will be lost. */
1001 if (flag_strict_aliasing)
1002 return;
1004 /* If there are a lot of temp slots, don't do anything unless
1005 high levels of optimization. */
1006 if (! flag_expensive_optimizations)
1007 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1008 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1009 return;
1011 for (p = avail_temp_slots; p; p = next)
1013 int delete_p = 0;
1015 next = p->next;
1017 if (GET_MODE (p->slot) != BLKmode)
1018 continue;
1020 for (q = p->next; q; q = next_q)
1022 int delete_q = 0;
1024 next_q = q->next;
1026 if (GET_MODE (q->slot) != BLKmode)
1027 continue;
1029 if (p->base_offset + p->full_size == q->base_offset)
1031 /* Q comes after P; combine Q into P. */
1032 p->size += q->size;
1033 p->full_size += q->full_size;
1034 delete_q = 1;
1036 else if (q->base_offset + q->full_size == p->base_offset)
1038 /* P comes after Q; combine P into Q. */
1039 q->size += p->size;
1040 q->full_size += p->full_size;
1041 delete_p = 1;
1042 break;
1044 if (delete_q)
1045 cut_slot_from_list (q, &avail_temp_slots);
1048 /* Either delete P or advance past it. */
1049 if (delete_p)
1050 cut_slot_from_list (p, &avail_temp_slots);
1054 /* Indicate that NEW_RTX is an alternate way of referring to the temp
1055 slot that previously was known by OLD_RTX. */
1057 void
1058 update_temp_slot_address (rtx old_rtx, rtx new_rtx)
1060 struct temp_slot *p;
1062 if (rtx_equal_p (old_rtx, new_rtx))
1063 return;
1065 p = find_temp_slot_from_address (old_rtx);
1067 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1068 NEW_RTX is a register, see if one operand of the PLUS is a
1069 temporary location. If so, NEW_RTX points into it. Otherwise,
1070 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1071 in common between them. If so, try a recursive call on those
1072 values. */
1073 if (p == 0)
1075 if (GET_CODE (old_rtx) != PLUS)
1076 return;
1078 if (REG_P (new_rtx))
1080 update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
1081 update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
1082 return;
1084 else if (GET_CODE (new_rtx) != PLUS)
1085 return;
1087 if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
1088 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
1089 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
1090 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
1091 else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
1092 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
1093 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
1094 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
1096 return;
1099 /* Otherwise add an alias for the temp's address. */
1100 insert_temp_slot_address (new_rtx, p);
1103 /* If X could be a reference to a temporary slot, mark that slot as
1104 belonging to the to one level higher than the current level. If X
1105 matched one of our slots, just mark that one. Otherwise, we can't
1106 easily predict which it is, so upgrade all of them.
1108 This is called when an ({...}) construct occurs and a statement
1109 returns a value in memory. */
1111 void
1112 preserve_temp_slots (rtx x)
1114 struct temp_slot *p = 0, *next;
1116 if (x == 0)
1117 return;
1119 /* If X is a register that is being used as a pointer, see if we have
1120 a temporary slot we know it points to. */
1121 if (REG_P (x) && REG_POINTER (x))
1122 p = find_temp_slot_from_address (x);
1124 /* If X is not in memory or is at a constant address, it cannot be in
1125 a temporary slot. */
1126 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1127 return;
1129 /* First see if we can find a match. */
1130 if (p == 0)
1131 p = find_temp_slot_from_address (XEXP (x, 0));
1133 if (p != 0)
1135 if (p->level == temp_slot_level)
1136 move_slot_to_level (p, temp_slot_level - 1);
1137 return;
1140 /* Otherwise, preserve all non-kept slots at this level. */
1141 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1143 next = p->next;
1144 move_slot_to_level (p, temp_slot_level - 1);
1148 /* Free all temporaries used so far. This is normally called at the
1149 end of generating code for a statement. */
1151 void
1152 free_temp_slots (void)
1154 struct temp_slot *p, *next;
1155 bool some_available = false;
1157 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1159 next = p->next;
1160 make_slot_available (p);
1161 some_available = true;
1164 if (some_available)
1166 remove_unused_temp_slot_addresses ();
1167 combine_temp_slots ();
1171 /* Push deeper into the nesting level for stack temporaries. */
1173 void
1174 push_temp_slots (void)
1176 temp_slot_level++;
1179 /* Pop a temporary nesting level. All slots in use in the current level
1180 are freed. */
1182 void
1183 pop_temp_slots (void)
1185 free_temp_slots ();
1186 temp_slot_level--;
1189 /* Initialize temporary slots. */
1191 void
1192 init_temp_slots (void)
1194 /* We have not allocated any temporaries yet. */
1195 avail_temp_slots = 0;
1196 vec_alloc (used_temp_slots, 0);
1197 temp_slot_level = 0;
1198 n_temp_slots_in_use = 0;
1200 /* Set up the table to map addresses to temp slots. */
1201 if (! temp_slot_address_table)
1202 temp_slot_address_table = htab_create_ggc (32,
1203 temp_slot_address_hash,
1204 temp_slot_address_eq,
1205 NULL);
1206 else
1207 htab_empty (temp_slot_address_table);
1210 /* Functions and data structures to keep track of the values hard regs
1211 had at the start of the function. */
1213 /* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
1214 and has_hard_reg_initial_val.. */
1215 typedef struct GTY(()) initial_value_pair {
1216 rtx hard_reg;
1217 rtx pseudo;
1218 } initial_value_pair;
1219 /* ??? This could be a VEC but there is currently no way to define an
1220 opaque VEC type. This could be worked around by defining struct
1221 initial_value_pair in function.h. */
1222 typedef struct GTY(()) initial_value_struct {
1223 int num_entries;
1224 int max_entries;
1225 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
1226 } initial_value_struct;
1228 /* If a pseudo represents an initial hard reg (or expression), return
1229 it, else return NULL_RTX. */
1232 get_hard_reg_initial_reg (rtx reg)
1234 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1235 int i;
1237 if (ivs == 0)
1238 return NULL_RTX;
1240 for (i = 0; i < ivs->num_entries; i++)
1241 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
1242 return ivs->entries[i].hard_reg;
1244 return NULL_RTX;
1247 /* Make sure that there's a pseudo register of mode MODE that stores the
1248 initial value of hard register REGNO. Return an rtx for such a pseudo. */
1251 get_hard_reg_initial_val (enum machine_mode mode, unsigned int regno)
1253 struct initial_value_struct *ivs;
1254 rtx rv;
1256 rv = has_hard_reg_initial_val (mode, regno);
1257 if (rv)
1258 return rv;
1260 ivs = crtl->hard_reg_initial_vals;
1261 if (ivs == 0)
1263 ivs = ggc_alloc_initial_value_struct ();
1264 ivs->num_entries = 0;
1265 ivs->max_entries = 5;
1266 ivs->entries = ggc_alloc_vec_initial_value_pair (5);
1267 crtl->hard_reg_initial_vals = ivs;
1270 if (ivs->num_entries >= ivs->max_entries)
1272 ivs->max_entries += 5;
1273 ivs->entries = GGC_RESIZEVEC (initial_value_pair, ivs->entries,
1274 ivs->max_entries);
1277 ivs->entries[ivs->num_entries].hard_reg = gen_rtx_REG (mode, regno);
1278 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (mode);
1280 return ivs->entries[ivs->num_entries++].pseudo;
1283 /* See if get_hard_reg_initial_val has been used to create a pseudo
1284 for the initial value of hard register REGNO in mode MODE. Return
1285 the associated pseudo if so, otherwise return NULL. */
1288 has_hard_reg_initial_val (enum machine_mode mode, unsigned int regno)
1290 struct initial_value_struct *ivs;
1291 int i;
1293 ivs = crtl->hard_reg_initial_vals;
1294 if (ivs != 0)
1295 for (i = 0; i < ivs->num_entries; i++)
1296 if (GET_MODE (ivs->entries[i].hard_reg) == mode
1297 && REGNO (ivs->entries[i].hard_reg) == regno)
1298 return ivs->entries[i].pseudo;
1300 return NULL_RTX;
1303 unsigned int
1304 emit_initial_value_sets (void)
1306 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1307 int i;
1308 rtx seq;
1310 if (ivs == 0)
1311 return 0;
1313 start_sequence ();
1314 for (i = 0; i < ivs->num_entries; i++)
1315 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
1316 seq = get_insns ();
1317 end_sequence ();
1319 emit_insn_at_entry (seq);
1320 return 0;
1323 /* Return the hardreg-pseudoreg initial values pair entry I and
1324 TRUE if I is a valid entry, or FALSE if I is not a valid entry. */
1325 bool
1326 initial_value_entry (int i, rtx *hreg, rtx *preg)
1328 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1329 if (!ivs || i >= ivs->num_entries)
1330 return false;
1332 *hreg = ivs->entries[i].hard_reg;
1333 *preg = ivs->entries[i].pseudo;
1334 return true;
1337 /* These routines are responsible for converting virtual register references
1338 to the actual hard register references once RTL generation is complete.
1340 The following four variables are used for communication between the
1341 routines. They contain the offsets of the virtual registers from their
1342 respective hard registers. */
1344 static int in_arg_offset;
1345 static int var_offset;
1346 static int dynamic_offset;
1347 static int out_arg_offset;
1348 static int cfa_offset;
1350 /* In most machines, the stack pointer register is equivalent to the bottom
1351 of the stack. */
1353 #ifndef STACK_POINTER_OFFSET
1354 #define STACK_POINTER_OFFSET 0
1355 #endif
1357 /* If not defined, pick an appropriate default for the offset of dynamically
1358 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1359 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1361 #ifndef STACK_DYNAMIC_OFFSET
1363 /* The bottom of the stack points to the actual arguments. If
1364 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1365 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1366 stack space for register parameters is not pushed by the caller, but
1367 rather part of the fixed stack areas and hence not included in
1368 `crtl->outgoing_args_size'. Nevertheless, we must allow
1369 for it when allocating stack dynamic objects. */
1371 #if defined(REG_PARM_STACK_SPACE)
1372 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1373 ((ACCUMULATE_OUTGOING_ARGS \
1374 ? (crtl->outgoing_args_size \
1375 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1376 : REG_PARM_STACK_SPACE (FNDECL))) \
1377 : 0) + (STACK_POINTER_OFFSET))
1378 #else
1379 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1380 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
1381 + (STACK_POINTER_OFFSET))
1382 #endif
1383 #endif
1386 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1387 is a virtual register, return the equivalent hard register and set the
1388 offset indirectly through the pointer. Otherwise, return 0. */
1390 static rtx
1391 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1393 rtx new_rtx;
1394 HOST_WIDE_INT offset;
1396 if (x == virtual_incoming_args_rtx)
1398 if (stack_realign_drap)
1400 /* Replace virtual_incoming_args_rtx with internal arg
1401 pointer if DRAP is used to realign stack. */
1402 new_rtx = crtl->args.internal_arg_pointer;
1403 offset = 0;
1405 else
1406 new_rtx = arg_pointer_rtx, offset = in_arg_offset;
1408 else if (x == virtual_stack_vars_rtx)
1409 new_rtx = frame_pointer_rtx, offset = var_offset;
1410 else if (x == virtual_stack_dynamic_rtx)
1411 new_rtx = stack_pointer_rtx, offset = dynamic_offset;
1412 else if (x == virtual_outgoing_args_rtx)
1413 new_rtx = stack_pointer_rtx, offset = out_arg_offset;
1414 else if (x == virtual_cfa_rtx)
1416 #ifdef FRAME_POINTER_CFA_OFFSET
1417 new_rtx = frame_pointer_rtx;
1418 #else
1419 new_rtx = arg_pointer_rtx;
1420 #endif
1421 offset = cfa_offset;
1423 else if (x == virtual_preferred_stack_boundary_rtx)
1425 new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT);
1426 offset = 0;
1428 else
1429 return NULL_RTX;
1431 *poffset = offset;
1432 return new_rtx;
1435 /* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1436 Instantiate any virtual registers present inside of *LOC. The expression
1437 is simplified, as much as possible, but is not to be considered "valid"
1438 in any sense implied by the target. If any change is made, set CHANGED
1439 to true. */
1441 static int
1442 instantiate_virtual_regs_in_rtx (rtx *loc, void *data)
1444 HOST_WIDE_INT offset;
1445 bool *changed = (bool *) data;
1446 rtx x, new_rtx;
1448 x = *loc;
1449 if (x == 0)
1450 return 0;
1452 switch (GET_CODE (x))
1454 case REG:
1455 new_rtx = instantiate_new_reg (x, &offset);
1456 if (new_rtx)
1458 *loc = plus_constant (GET_MODE (x), new_rtx, offset);
1459 if (changed)
1460 *changed = true;
1462 return -1;
1464 case PLUS:
1465 new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
1466 if (new_rtx)
1468 new_rtx = plus_constant (GET_MODE (x), new_rtx, offset);
1469 *loc = simplify_gen_binary (PLUS, GET_MODE (x), new_rtx, XEXP (x, 1));
1470 if (changed)
1471 *changed = true;
1472 return -1;
1475 /* FIXME -- from old code */
1476 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1477 we can commute the PLUS and SUBREG because pointers into the
1478 frame are well-behaved. */
1479 break;
1481 default:
1482 break;
1485 return 0;
1488 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1489 matches the predicate for insn CODE operand OPERAND. */
1491 static int
1492 safe_insn_predicate (int code, int operand, rtx x)
1494 return code < 0 || insn_operand_matches ((enum insn_code) code, operand, x);
1497 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1498 registers present inside of insn. The result will be a valid insn. */
1500 static void
1501 instantiate_virtual_regs_in_insn (rtx insn)
1503 HOST_WIDE_INT offset;
1504 int insn_code, i;
1505 bool any_change = false;
1506 rtx set, new_rtx, x, seq;
1508 /* There are some special cases to be handled first. */
1509 set = single_set (insn);
1510 if (set)
1512 /* We're allowed to assign to a virtual register. This is interpreted
1513 to mean that the underlying register gets assigned the inverse
1514 transformation. This is used, for example, in the handling of
1515 non-local gotos. */
1516 new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
1517 if (new_rtx)
1519 start_sequence ();
1521 for_each_rtx (&SET_SRC (set), instantiate_virtual_regs_in_rtx, NULL);
1522 x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
1523 GEN_INT (-offset));
1524 x = force_operand (x, new_rtx);
1525 if (x != new_rtx)
1526 emit_move_insn (new_rtx, x);
1528 seq = get_insns ();
1529 end_sequence ();
1531 emit_insn_before (seq, insn);
1532 delete_insn (insn);
1533 return;
1536 /* Handle a straight copy from a virtual register by generating a
1537 new add insn. The difference between this and falling through
1538 to the generic case is avoiding a new pseudo and eliminating a
1539 move insn in the initial rtl stream. */
1540 new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
1541 if (new_rtx && offset != 0
1542 && REG_P (SET_DEST (set))
1543 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1545 start_sequence ();
1547 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS,
1548 new_rtx, GEN_INT (offset), SET_DEST (set),
1549 1, OPTAB_LIB_WIDEN);
1550 if (x != SET_DEST (set))
1551 emit_move_insn (SET_DEST (set), x);
1553 seq = get_insns ();
1554 end_sequence ();
1556 emit_insn_before (seq, insn);
1557 delete_insn (insn);
1558 return;
1561 extract_insn (insn);
1562 insn_code = INSN_CODE (insn);
1564 /* Handle a plus involving a virtual register by determining if the
1565 operands remain valid if they're modified in place. */
1566 if (GET_CODE (SET_SRC (set)) == PLUS
1567 && recog_data.n_operands >= 3
1568 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1569 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1570 && CONST_INT_P (recog_data.operand[2])
1571 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
1573 offset += INTVAL (recog_data.operand[2]);
1575 /* If the sum is zero, then replace with a plain move. */
1576 if (offset == 0
1577 && REG_P (SET_DEST (set))
1578 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1580 start_sequence ();
1581 emit_move_insn (SET_DEST (set), new_rtx);
1582 seq = get_insns ();
1583 end_sequence ();
1585 emit_insn_before (seq, insn);
1586 delete_insn (insn);
1587 return;
1590 x = gen_int_mode (offset, recog_data.operand_mode[2]);
1592 /* Using validate_change and apply_change_group here leaves
1593 recog_data in an invalid state. Since we know exactly what
1594 we want to check, do those two by hand. */
1595 if (safe_insn_predicate (insn_code, 1, new_rtx)
1596 && safe_insn_predicate (insn_code, 2, x))
1598 *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
1599 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1600 any_change = true;
1602 /* Fall through into the regular operand fixup loop in
1603 order to take care of operands other than 1 and 2. */
1607 else
1609 extract_insn (insn);
1610 insn_code = INSN_CODE (insn);
1613 /* In the general case, we expect virtual registers to appear only in
1614 operands, and then only as either bare registers or inside memories. */
1615 for (i = 0; i < recog_data.n_operands; ++i)
1617 x = recog_data.operand[i];
1618 switch (GET_CODE (x))
1620 case MEM:
1622 rtx addr = XEXP (x, 0);
1623 bool changed = false;
1625 for_each_rtx (&addr, instantiate_virtual_regs_in_rtx, &changed);
1626 if (!changed)
1627 continue;
1629 start_sequence ();
1630 x = replace_equiv_address (x, addr);
1631 /* It may happen that the address with the virtual reg
1632 was valid (e.g. based on the virtual stack reg, which might
1633 be acceptable to the predicates with all offsets), whereas
1634 the address now isn't anymore, for instance when the address
1635 is still offsetted, but the base reg isn't virtual-stack-reg
1636 anymore. Below we would do a force_reg on the whole operand,
1637 but this insn might actually only accept memory. Hence,
1638 before doing that last resort, try to reload the address into
1639 a register, so this operand stays a MEM. */
1640 if (!safe_insn_predicate (insn_code, i, x))
1642 addr = force_reg (GET_MODE (addr), addr);
1643 x = replace_equiv_address (x, addr);
1645 seq = get_insns ();
1646 end_sequence ();
1647 if (seq)
1648 emit_insn_before (seq, insn);
1650 break;
1652 case REG:
1653 new_rtx = instantiate_new_reg (x, &offset);
1654 if (new_rtx == NULL)
1655 continue;
1656 if (offset == 0)
1657 x = new_rtx;
1658 else
1660 start_sequence ();
1662 /* Careful, special mode predicates may have stuff in
1663 insn_data[insn_code].operand[i].mode that isn't useful
1664 to us for computing a new value. */
1665 /* ??? Recognize address_operand and/or "p" constraints
1666 to see if (plus new offset) is a valid before we put
1667 this through expand_simple_binop. */
1668 x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
1669 GEN_INT (offset), NULL_RTX,
1670 1, OPTAB_LIB_WIDEN);
1671 seq = get_insns ();
1672 end_sequence ();
1673 emit_insn_before (seq, insn);
1675 break;
1677 case SUBREG:
1678 new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
1679 if (new_rtx == NULL)
1680 continue;
1681 if (offset != 0)
1683 start_sequence ();
1684 new_rtx = expand_simple_binop (GET_MODE (new_rtx), PLUS, new_rtx,
1685 GEN_INT (offset), NULL_RTX,
1686 1, OPTAB_LIB_WIDEN);
1687 seq = get_insns ();
1688 end_sequence ();
1689 emit_insn_before (seq, insn);
1691 x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx,
1692 GET_MODE (new_rtx), SUBREG_BYTE (x));
1693 gcc_assert (x);
1694 break;
1696 default:
1697 continue;
1700 /* At this point, X contains the new value for the operand.
1701 Validate the new value vs the insn predicate. Note that
1702 asm insns will have insn_code -1 here. */
1703 if (!safe_insn_predicate (insn_code, i, x))
1705 start_sequence ();
1706 if (REG_P (x))
1708 gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
1709 x = copy_to_reg (x);
1711 else
1712 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1713 seq = get_insns ();
1714 end_sequence ();
1715 if (seq)
1716 emit_insn_before (seq, insn);
1719 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1720 any_change = true;
1723 if (any_change)
1725 /* Propagate operand changes into the duplicates. */
1726 for (i = 0; i < recog_data.n_dups; ++i)
1727 *recog_data.dup_loc[i]
1728 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
1730 /* Force re-recognition of the instruction for validation. */
1731 INSN_CODE (insn) = -1;
1734 if (asm_noperands (PATTERN (insn)) >= 0)
1736 if (!check_asm_operands (PATTERN (insn)))
1738 error_for_asm (insn, "impossible constraint in %<asm%>");
1739 /* For asm goto, instead of fixing up all the edges
1740 just clear the template and clear input operands
1741 (asm goto doesn't have any output operands). */
1742 if (JUMP_P (insn))
1744 rtx asm_op = extract_asm_operands (PATTERN (insn));
1745 ASM_OPERANDS_TEMPLATE (asm_op) = ggc_strdup ("");
1746 ASM_OPERANDS_INPUT_VEC (asm_op) = rtvec_alloc (0);
1747 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op) = rtvec_alloc (0);
1749 else
1750 delete_insn (insn);
1753 else
1755 if (recog_memoized (insn) < 0)
1756 fatal_insn_not_found (insn);
1760 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1761 do any instantiation required. */
1763 void
1764 instantiate_decl_rtl (rtx x)
1766 rtx addr;
1768 if (x == 0)
1769 return;
1771 /* If this is a CONCAT, recurse for the pieces. */
1772 if (GET_CODE (x) == CONCAT)
1774 instantiate_decl_rtl (XEXP (x, 0));
1775 instantiate_decl_rtl (XEXP (x, 1));
1776 return;
1779 /* If this is not a MEM, no need to do anything. Similarly if the
1780 address is a constant or a register that is not a virtual register. */
1781 if (!MEM_P (x))
1782 return;
1784 addr = XEXP (x, 0);
1785 if (CONSTANT_P (addr)
1786 || (REG_P (addr)
1787 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1788 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1789 return;
1791 for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL);
1794 /* Helper for instantiate_decls called via walk_tree: Process all decls
1795 in the given DECL_VALUE_EXPR. */
1797 static tree
1798 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1800 tree t = *tp;
1801 if (! EXPR_P (t))
1803 *walk_subtrees = 0;
1804 if (DECL_P (t))
1806 if (DECL_RTL_SET_P (t))
1807 instantiate_decl_rtl (DECL_RTL (t));
1808 if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t)
1809 && DECL_INCOMING_RTL (t))
1810 instantiate_decl_rtl (DECL_INCOMING_RTL (t));
1811 if ((TREE_CODE (t) == VAR_DECL
1812 || TREE_CODE (t) == RESULT_DECL)
1813 && DECL_HAS_VALUE_EXPR_P (t))
1815 tree v = DECL_VALUE_EXPR (t);
1816 walk_tree (&v, instantiate_expr, NULL, NULL);
1820 return NULL;
1823 /* Subroutine of instantiate_decls: Process all decls in the given
1824 BLOCK node and all its subblocks. */
1826 static void
1827 instantiate_decls_1 (tree let)
1829 tree t;
1831 for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t))
1833 if (DECL_RTL_SET_P (t))
1834 instantiate_decl_rtl (DECL_RTL (t));
1835 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1837 tree v = DECL_VALUE_EXPR (t);
1838 walk_tree (&v, instantiate_expr, NULL, NULL);
1842 /* Process all subblocks. */
1843 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1844 instantiate_decls_1 (t);
1847 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1848 all virtual registers in their DECL_RTL's. */
1850 static void
1851 instantiate_decls (tree fndecl)
1853 tree decl;
1854 unsigned ix;
1856 /* Process all parameters of the function. */
1857 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl))
1859 instantiate_decl_rtl (DECL_RTL (decl));
1860 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
1861 if (DECL_HAS_VALUE_EXPR_P (decl))
1863 tree v = DECL_VALUE_EXPR (decl);
1864 walk_tree (&v, instantiate_expr, NULL, NULL);
1868 if ((decl = DECL_RESULT (fndecl))
1869 && TREE_CODE (decl) == RESULT_DECL)
1871 if (DECL_RTL_SET_P (decl))
1872 instantiate_decl_rtl (DECL_RTL (decl));
1873 if (DECL_HAS_VALUE_EXPR_P (decl))
1875 tree v = DECL_VALUE_EXPR (decl);
1876 walk_tree (&v, instantiate_expr, NULL, NULL);
1880 /* Now process all variables defined in the function or its subblocks. */
1881 instantiate_decls_1 (DECL_INITIAL (fndecl));
1883 FOR_EACH_LOCAL_DECL (cfun, ix, decl)
1884 if (DECL_RTL_SET_P (decl))
1885 instantiate_decl_rtl (DECL_RTL (decl));
1886 vec_free (cfun->local_decls);
1889 /* Pass through the INSNS of function FNDECL and convert virtual register
1890 references to hard register references. */
1892 static unsigned int
1893 instantiate_virtual_regs (void)
1895 rtx insn;
1897 /* Compute the offsets to use for this function. */
1898 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1899 var_offset = STARTING_FRAME_OFFSET;
1900 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1901 out_arg_offset = STACK_POINTER_OFFSET;
1902 #ifdef FRAME_POINTER_CFA_OFFSET
1903 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1904 #else
1905 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1906 #endif
1908 /* Initialize recognition, indicating that volatile is OK. */
1909 init_recog ();
1911 /* Scan through all the insns, instantiating every virtual register still
1912 present. */
1913 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1914 if (INSN_P (insn))
1916 /* These patterns in the instruction stream can never be recognized.
1917 Fortunately, they shouldn't contain virtual registers either. */
1918 if (GET_CODE (PATTERN (insn)) == USE
1919 || GET_CODE (PATTERN (insn)) == CLOBBER
1920 || GET_CODE (PATTERN (insn)) == ADDR_VEC
1921 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
1922 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1923 continue;
1924 else if (DEBUG_INSN_P (insn))
1925 for_each_rtx (&INSN_VAR_LOCATION (insn),
1926 instantiate_virtual_regs_in_rtx, NULL);
1927 else
1928 instantiate_virtual_regs_in_insn (insn);
1930 if (INSN_DELETED_P (insn))
1931 continue;
1933 for_each_rtx (&REG_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL);
1935 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1936 if (CALL_P (insn))
1937 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn),
1938 instantiate_virtual_regs_in_rtx, NULL);
1941 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1942 instantiate_decls (current_function_decl);
1944 targetm.instantiate_decls ();
1946 /* Indicate that, from now on, assign_stack_local should use
1947 frame_pointer_rtx. */
1948 virtuals_instantiated = 1;
1950 return 0;
1953 struct rtl_opt_pass pass_instantiate_virtual_regs =
1956 RTL_PASS,
1957 "vregs", /* name */
1958 OPTGROUP_NONE, /* optinfo_flags */
1959 NULL, /* gate */
1960 instantiate_virtual_regs, /* execute */
1961 NULL, /* sub */
1962 NULL, /* next */
1963 0, /* static_pass_number */
1964 TV_NONE, /* tv_id */
1965 0, /* properties_required */
1966 0, /* properties_provided */
1967 0, /* properties_destroyed */
1968 0, /* todo_flags_start */
1969 0 /* todo_flags_finish */
1974 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1975 This means a type for which function calls must pass an address to the
1976 function or get an address back from the function.
1977 EXP may be a type node or an expression (whose type is tested). */
1980 aggregate_value_p (const_tree exp, const_tree fntype)
1982 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
1983 int i, regno, nregs;
1984 rtx reg;
1986 if (fntype)
1987 switch (TREE_CODE (fntype))
1989 case CALL_EXPR:
1991 tree fndecl = get_callee_fndecl (fntype);
1992 fntype = (fndecl
1993 ? TREE_TYPE (fndecl)
1994 : TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype))));
1996 break;
1997 case FUNCTION_DECL:
1998 fntype = TREE_TYPE (fntype);
1999 break;
2000 case FUNCTION_TYPE:
2001 case METHOD_TYPE:
2002 break;
2003 case IDENTIFIER_NODE:
2004 fntype = NULL_TREE;
2005 break;
2006 default:
2007 /* We don't expect other tree types here. */
2008 gcc_unreachable ();
2011 if (VOID_TYPE_P (type))
2012 return 0;
2014 /* If a record should be passed the same as its first (and only) member
2015 don't pass it as an aggregate. */
2016 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2017 return aggregate_value_p (first_field (type), fntype);
2019 /* If the front end has decided that this needs to be passed by
2020 reference, do so. */
2021 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
2022 && DECL_BY_REFERENCE (exp))
2023 return 1;
2025 /* Function types that are TREE_ADDRESSABLE force return in memory. */
2026 if (fntype && TREE_ADDRESSABLE (fntype))
2027 return 1;
2029 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
2030 and thus can't be returned in registers. */
2031 if (TREE_ADDRESSABLE (type))
2032 return 1;
2034 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
2035 return 1;
2037 /* Pointers-to-shared must be considered as aggregates for
2038 the purpose of passing them as return values, but only
2039 when the underlying mode of the representation would
2040 require that its value be passed on the stack.
2041 This occurs when using the 'struct' representation
2042 of a shared pointer. */
2043 if (flag_pcc_struct_return && POINTER_TYPE_P (type)
2044 && upc_shared_type_p (TREE_TYPE (type))
2045 && AGGREGATE_TYPE_P (upc_pts_rep_type_node))
2046 return 1;
2048 if (targetm.calls.return_in_memory (type, fntype))
2049 return 1;
2051 /* Make sure we have suitable call-clobbered regs to return
2052 the value in; if not, we must return it in memory. */
2053 reg = hard_function_value (type, 0, fntype, 0);
2055 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2056 it is OK. */
2057 if (!REG_P (reg))
2058 return 0;
2060 regno = REGNO (reg);
2061 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
2062 for (i = 0; i < nregs; i++)
2063 if (! call_used_regs[regno + i])
2064 return 1;
2066 return 0;
2069 /* Return true if we should assign DECL a pseudo register; false if it
2070 should live on the local stack. */
2072 bool
2073 use_register_for_decl (const_tree decl)
2075 if (!targetm.calls.allocate_stack_slots_for_args())
2076 return true;
2078 /* Honor volatile. */
2079 if (TREE_SIDE_EFFECTS (decl))
2080 return false;
2082 /* Honor addressability. */
2083 if (TREE_ADDRESSABLE (decl))
2084 return false;
2086 /* Only register-like things go in registers. */
2087 if (DECL_MODE (decl) == BLKmode)
2088 return false;
2090 /* If -ffloat-store specified, don't put explicit float variables
2091 into registers. */
2092 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2093 propagates values across these stores, and it probably shouldn't. */
2094 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
2095 return false;
2097 /* If we're not interested in tracking debugging information for
2098 this decl, then we can certainly put it in a register. */
2099 if (DECL_IGNORED_P (decl))
2100 return true;
2102 if (optimize)
2103 return true;
2105 if (!DECL_REGISTER (decl))
2106 return false;
2108 switch (TREE_CODE (TREE_TYPE (decl)))
2110 case RECORD_TYPE:
2111 case UNION_TYPE:
2112 case QUAL_UNION_TYPE:
2113 /* When not optimizing, disregard register keyword for variables with
2114 types containing methods, otherwise the methods won't be callable
2115 from the debugger. */
2116 if (TYPE_METHODS (TREE_TYPE (decl)))
2117 return false;
2118 break;
2119 default:
2120 break;
2123 return true;
2126 /* Return true if TYPE should be passed by invisible reference. */
2128 bool
2129 pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
2130 tree type, bool named_arg)
2132 if (type)
2134 /* If this type contains non-trivial constructors, then it is
2135 forbidden for the middle-end to create any new copies. */
2136 if (TREE_ADDRESSABLE (type))
2137 return true;
2139 /* GCC post 3.4 passes *all* variable sized types by reference. */
2140 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2141 return true;
2143 /* If a record type should be passed the same as its first (and only)
2144 member, use the type and mode of that member. */
2145 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2147 type = TREE_TYPE (first_field (type));
2148 mode = TYPE_MODE (type);
2152 return targetm.calls.pass_by_reference (pack_cumulative_args (ca), mode,
2153 type, named_arg);
2156 /* Return true if TYPE, which is passed by reference, should be callee
2157 copied instead of caller copied. */
2159 bool
2160 reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
2161 tree type, bool named_arg)
2163 if (type && TREE_ADDRESSABLE (type))
2164 return false;
2165 return targetm.calls.callee_copies (pack_cumulative_args (ca), mode, type,
2166 named_arg);
2169 /* Structures to communicate between the subroutines of assign_parms.
2170 The first holds data persistent across all parameters, the second
2171 is cleared out for each parameter. */
2173 struct assign_parm_data_all
2175 /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2176 should become a job of the target or otherwise encapsulated. */
2177 CUMULATIVE_ARGS args_so_far_v;
2178 cumulative_args_t args_so_far;
2179 struct args_size stack_args_size;
2180 tree function_result_decl;
2181 tree orig_fnargs;
2182 rtx first_conversion_insn;
2183 rtx last_conversion_insn;
2184 HOST_WIDE_INT pretend_args_size;
2185 HOST_WIDE_INT extra_pretend_bytes;
2186 int reg_parm_stack_space;
2189 struct assign_parm_data_one
2191 tree nominal_type;
2192 tree passed_type;
2193 rtx entry_parm;
2194 rtx stack_parm;
2195 enum machine_mode nominal_mode;
2196 enum machine_mode passed_mode;
2197 enum machine_mode promoted_mode;
2198 struct locate_and_pad_arg_data locate;
2199 int partial;
2200 BOOL_BITFIELD named_arg : 1;
2201 BOOL_BITFIELD passed_pointer : 1;
2202 BOOL_BITFIELD on_stack : 1;
2203 BOOL_BITFIELD loaded_in_reg : 1;
2206 /* A subroutine of assign_parms. Initialize ALL. */
2208 static void
2209 assign_parms_initialize_all (struct assign_parm_data_all *all)
2211 tree fntype ATTRIBUTE_UNUSED;
2213 memset (all, 0, sizeof (*all));
2215 fntype = TREE_TYPE (current_function_decl);
2217 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2218 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far_v, fntype, NULL_RTX);
2219 #else
2220 INIT_CUMULATIVE_ARGS (all->args_so_far_v, fntype, NULL_RTX,
2221 current_function_decl, -1);
2222 #endif
2223 all->args_so_far = pack_cumulative_args (&all->args_so_far_v);
2225 #ifdef REG_PARM_STACK_SPACE
2226 all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
2227 #endif
2230 /* If ARGS contains entries with complex types, split the entry into two
2231 entries of the component type. Return a new list of substitutions are
2232 needed, else the old list. */
2234 static void
2235 split_complex_args (vec<tree> *args)
2237 unsigned i;
2238 tree p;
2240 FOR_EACH_VEC_ELT (*args, i, p)
2242 tree type = TREE_TYPE (p);
2243 if (TREE_CODE (type) == COMPLEX_TYPE
2244 && targetm.calls.split_complex_arg (type))
2246 tree decl;
2247 tree subtype = TREE_TYPE (type);
2248 bool addressable = TREE_ADDRESSABLE (p);
2250 /* Rewrite the PARM_DECL's type with its component. */
2251 p = copy_node (p);
2252 TREE_TYPE (p) = subtype;
2253 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2254 DECL_MODE (p) = VOIDmode;
2255 DECL_SIZE (p) = NULL;
2256 DECL_SIZE_UNIT (p) = NULL;
2257 /* If this arg must go in memory, put it in a pseudo here.
2258 We can't allow it to go in memory as per normal parms,
2259 because the usual place might not have the imag part
2260 adjacent to the real part. */
2261 DECL_ARTIFICIAL (p) = addressable;
2262 DECL_IGNORED_P (p) = addressable;
2263 TREE_ADDRESSABLE (p) = 0;
2264 layout_decl (p, 0);
2265 (*args)[i] = p;
2267 /* Build a second synthetic decl. */
2268 decl = build_decl (EXPR_LOCATION (p),
2269 PARM_DECL, NULL_TREE, subtype);
2270 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2271 DECL_ARTIFICIAL (decl) = addressable;
2272 DECL_IGNORED_P (decl) = addressable;
2273 layout_decl (decl, 0);
2274 args->safe_insert (++i, decl);
2279 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2280 the hidden struct return argument, and (abi willing) complex args.
2281 Return the new parameter list. */
2283 static vec<tree>
2284 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2286 tree fndecl = current_function_decl;
2287 tree fntype = TREE_TYPE (fndecl);
2288 vec<tree> fnargs = vNULL;
2289 tree arg;
2291 for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg))
2292 fnargs.safe_push (arg);
2294 all->orig_fnargs = DECL_ARGUMENTS (fndecl);
2296 /* If struct value address is treated as the first argument, make it so. */
2297 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2298 && ! cfun->returns_pcc_struct
2299 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2301 tree type = build_pointer_type (TREE_TYPE (fntype));
2302 tree decl;
2304 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2305 PARM_DECL, get_identifier (".result_ptr"), type);
2306 DECL_ARG_TYPE (decl) = type;
2307 DECL_ARTIFICIAL (decl) = 1;
2308 DECL_NAMELESS (decl) = 1;
2309 TREE_CONSTANT (decl) = 1;
2311 DECL_CHAIN (decl) = all->orig_fnargs;
2312 all->orig_fnargs = decl;
2313 fnargs.safe_insert (0, decl);
2315 all->function_result_decl = decl;
2318 /* If the target wants to split complex arguments into scalars, do so. */
2319 if (targetm.calls.split_complex_arg)
2320 split_complex_args (&fnargs);
2322 return fnargs;
2325 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2326 data for the parameter. Incorporate ABI specifics such as pass-by-
2327 reference and type promotion. */
2329 static void
2330 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2331 struct assign_parm_data_one *data)
2333 tree nominal_type, passed_type;
2334 enum machine_mode nominal_mode, passed_mode, promoted_mode;
2335 int unsignedp;
2337 memset (data, 0, sizeof (*data));
2339 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2340 if (!cfun->stdarg)
2341 data->named_arg = 1; /* No variadic parms. */
2342 else if (DECL_CHAIN (parm))
2343 data->named_arg = 1; /* Not the last non-variadic parm. */
2344 else if (targetm.calls.strict_argument_naming (all->args_so_far))
2345 data->named_arg = 1; /* Only variadic ones are unnamed. */
2346 else
2347 data->named_arg = 0; /* Treat as variadic. */
2349 nominal_type = TREE_TYPE (parm);
2350 passed_type = DECL_ARG_TYPE (parm);
2352 /* Look out for errors propagating this far. Also, if the parameter's
2353 type is void then its value doesn't matter. */
2354 if (TREE_TYPE (parm) == error_mark_node
2355 /* This can happen after weird syntax errors
2356 or if an enum type is defined among the parms. */
2357 || TREE_CODE (parm) != PARM_DECL
2358 || passed_type == NULL
2359 || VOID_TYPE_P (nominal_type))
2361 nominal_type = passed_type = void_type_node;
2362 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2363 goto egress;
2366 /* Find mode of arg as it is passed, and mode of arg as it should be
2367 during execution of this function. */
2368 passed_mode = TYPE_MODE (passed_type);
2369 nominal_mode = TYPE_MODE (nominal_type);
2371 /* If the parm is to be passed as a transparent union or record, use the
2372 type of the first field for the tests below. We have already verified
2373 that the modes are the same. */
2374 if ((TREE_CODE (passed_type) == UNION_TYPE
2375 || TREE_CODE (passed_type) == RECORD_TYPE)
2376 && TYPE_TRANSPARENT_AGGR (passed_type))
2377 passed_type = TREE_TYPE (first_field (passed_type));
2379 /* See if this arg was passed by invisible reference. */
2380 if (pass_by_reference (&all->args_so_far_v, passed_mode,
2381 passed_type, data->named_arg))
2383 passed_type = nominal_type = build_pointer_type (passed_type);
2384 data->passed_pointer = true;
2385 passed_mode = nominal_mode = Pmode;
2388 /* Find mode as it is passed by the ABI. */
2389 unsignedp = TYPE_UNSIGNED (passed_type);
2390 promoted_mode = promote_function_mode (passed_type, passed_mode, &unsignedp,
2391 TREE_TYPE (current_function_decl), 0);
2393 egress:
2394 data->nominal_type = nominal_type;
2395 data->passed_type = passed_type;
2396 data->nominal_mode = nominal_mode;
2397 data->passed_mode = passed_mode;
2398 data->promoted_mode = promoted_mode;
2401 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2403 static void
2404 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2405 struct assign_parm_data_one *data, bool no_rtl)
2407 int varargs_pretend_bytes = 0;
2409 targetm.calls.setup_incoming_varargs (all->args_so_far,
2410 data->promoted_mode,
2411 data->passed_type,
2412 &varargs_pretend_bytes, no_rtl);
2414 /* If the back-end has requested extra stack space, record how much is
2415 needed. Do not change pretend_args_size otherwise since it may be
2416 nonzero from an earlier partial argument. */
2417 if (varargs_pretend_bytes > 0)
2418 all->pretend_args_size = varargs_pretend_bytes;
2421 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2422 the incoming location of the current parameter. */
2424 static void
2425 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2426 struct assign_parm_data_one *data)
2428 HOST_WIDE_INT pretend_bytes = 0;
2429 rtx entry_parm;
2430 bool in_regs;
2432 if (data->promoted_mode == VOIDmode)
2434 data->entry_parm = data->stack_parm = const0_rtx;
2435 return;
2438 entry_parm = targetm.calls.function_incoming_arg (all->args_so_far,
2439 data->promoted_mode,
2440 data->passed_type,
2441 data->named_arg);
2443 if (entry_parm == 0)
2444 data->promoted_mode = data->passed_mode;
2446 /* Determine parm's home in the stack, in case it arrives in the stack
2447 or we should pretend it did. Compute the stack position and rtx where
2448 the argument arrives and its size.
2450 There is one complexity here: If this was a parameter that would
2451 have been passed in registers, but wasn't only because it is
2452 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2453 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2454 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2455 as it was the previous time. */
2456 in_regs = entry_parm != 0;
2457 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2458 in_regs = true;
2459 #endif
2460 if (!in_regs && !data->named_arg)
2462 if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far))
2464 rtx tem;
2465 tem = targetm.calls.function_incoming_arg (all->args_so_far,
2466 data->promoted_mode,
2467 data->passed_type, true);
2468 in_regs = tem != NULL;
2472 /* If this parameter was passed both in registers and in the stack, use
2473 the copy on the stack. */
2474 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2475 data->passed_type))
2476 entry_parm = 0;
2478 if (entry_parm)
2480 int partial;
2482 partial = targetm.calls.arg_partial_bytes (all->args_so_far,
2483 data->promoted_mode,
2484 data->passed_type,
2485 data->named_arg);
2486 data->partial = partial;
2488 /* The caller might already have allocated stack space for the
2489 register parameters. */
2490 if (partial != 0 && all->reg_parm_stack_space == 0)
2492 /* Part of this argument is passed in registers and part
2493 is passed on the stack. Ask the prologue code to extend
2494 the stack part so that we can recreate the full value.
2496 PRETEND_BYTES is the size of the registers we need to store.
2497 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2498 stack space that the prologue should allocate.
2500 Internally, gcc assumes that the argument pointer is aligned
2501 to STACK_BOUNDARY bits. This is used both for alignment
2502 optimizations (see init_emit) and to locate arguments that are
2503 aligned to more than PARM_BOUNDARY bits. We must preserve this
2504 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2505 a stack boundary. */
2507 /* We assume at most one partial arg, and it must be the first
2508 argument on the stack. */
2509 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2511 pretend_bytes = partial;
2512 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2514 /* We want to align relative to the actual stack pointer, so
2515 don't include this in the stack size until later. */
2516 all->extra_pretend_bytes = all->pretend_args_size;
2520 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2521 entry_parm ? data->partial : 0, current_function_decl,
2522 &all->stack_args_size, &data->locate);
2524 /* Update parm_stack_boundary if this parameter is passed in the
2525 stack. */
2526 if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
2527 crtl->parm_stack_boundary = data->locate.boundary;
2529 /* Adjust offsets to include the pretend args. */
2530 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2531 data->locate.slot_offset.constant += pretend_bytes;
2532 data->locate.offset.constant += pretend_bytes;
2534 data->entry_parm = entry_parm;
2537 /* A subroutine of assign_parms. If there is actually space on the stack
2538 for this parm, count it in stack_args_size and return true. */
2540 static bool
2541 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2542 struct assign_parm_data_one *data)
2544 /* Trivially true if we've no incoming register. */
2545 if (data->entry_parm == NULL)
2547 /* Also true if we're partially in registers and partially not,
2548 since we've arranged to drop the entire argument on the stack. */
2549 else if (data->partial != 0)
2551 /* Also true if the target says that it's passed in both registers
2552 and on the stack. */
2553 else if (GET_CODE (data->entry_parm) == PARALLEL
2554 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2556 /* Also true if the target says that there's stack allocated for
2557 all register parameters. */
2558 else if (all->reg_parm_stack_space > 0)
2560 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2561 else
2562 return false;
2564 all->stack_args_size.constant += data->locate.size.constant;
2565 if (data->locate.size.var)
2566 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2568 return true;
2571 /* A subroutine of assign_parms. Given that this parameter is allocated
2572 stack space by the ABI, find it. */
2574 static void
2575 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2577 rtx offset_rtx, stack_parm;
2578 unsigned int align, boundary;
2580 /* If we're passing this arg using a reg, make its stack home the
2581 aligned stack slot. */
2582 if (data->entry_parm)
2583 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2584 else
2585 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2587 stack_parm = crtl->args.internal_arg_pointer;
2588 if (offset_rtx != const0_rtx)
2589 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2590 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2592 if (!data->passed_pointer)
2594 set_mem_attributes (stack_parm, parm, 1);
2595 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2596 while promoted mode's size is needed. */
2597 if (data->promoted_mode != BLKmode
2598 && data->promoted_mode != DECL_MODE (parm))
2600 set_mem_size (stack_parm, GET_MODE_SIZE (data->promoted_mode));
2601 if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm))
2603 int offset = subreg_lowpart_offset (DECL_MODE (parm),
2604 data->promoted_mode);
2605 if (offset)
2606 set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset);
2611 boundary = data->locate.boundary;
2612 align = BITS_PER_UNIT;
2614 /* If we're padding upward, we know that the alignment of the slot
2615 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2616 intentionally forcing upward padding. Otherwise we have to come
2617 up with a guess at the alignment based on OFFSET_RTX. */
2618 if (data->locate.where_pad != downward || data->entry_parm)
2619 align = boundary;
2620 else if (CONST_INT_P (offset_rtx))
2622 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2623 align = align & -align;
2625 set_mem_align (stack_parm, align);
2627 if (data->entry_parm)
2628 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2630 data->stack_parm = stack_parm;
2633 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2634 always valid and contiguous. */
2636 static void
2637 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2639 rtx entry_parm = data->entry_parm;
2640 rtx stack_parm = data->stack_parm;
2642 /* If this parm was passed part in regs and part in memory, pretend it
2643 arrived entirely in memory by pushing the register-part onto the stack.
2644 In the special case of a DImode or DFmode that is split, we could put
2645 it together in a pseudoreg directly, but for now that's not worth
2646 bothering with. */
2647 if (data->partial != 0)
2649 /* Handle calls that pass values in multiple non-contiguous
2650 locations. The Irix 6 ABI has examples of this. */
2651 if (GET_CODE (entry_parm) == PARALLEL)
2652 emit_group_store (validize_mem (stack_parm), entry_parm,
2653 data->passed_type,
2654 int_size_in_bytes (data->passed_type));
2655 else
2657 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2658 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2659 data->partial / UNITS_PER_WORD);
2662 entry_parm = stack_parm;
2665 /* If we didn't decide this parm came in a register, by default it came
2666 on the stack. */
2667 else if (entry_parm == NULL)
2668 entry_parm = stack_parm;
2670 /* When an argument is passed in multiple locations, we can't make use
2671 of this information, but we can save some copying if the whole argument
2672 is passed in a single register. */
2673 else if (GET_CODE (entry_parm) == PARALLEL
2674 && data->nominal_mode != BLKmode
2675 && data->passed_mode != BLKmode)
2677 size_t i, len = XVECLEN (entry_parm, 0);
2679 for (i = 0; i < len; i++)
2680 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2681 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2682 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2683 == data->passed_mode)
2684 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2686 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2687 break;
2691 data->entry_parm = entry_parm;
2694 /* A subroutine of assign_parms. Reconstitute any values which were
2695 passed in multiple registers and would fit in a single register. */
2697 static void
2698 assign_parm_remove_parallels (struct assign_parm_data_one *data)
2700 rtx entry_parm = data->entry_parm;
2702 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2703 This can be done with register operations rather than on the
2704 stack, even if we will store the reconstituted parameter on the
2705 stack later. */
2706 if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
2708 rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
2709 emit_group_store (parmreg, entry_parm, data->passed_type,
2710 GET_MODE_SIZE (GET_MODE (entry_parm)));
2711 entry_parm = parmreg;
2714 data->entry_parm = entry_parm;
2717 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2718 always valid and properly aligned. */
2720 static void
2721 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2723 rtx stack_parm = data->stack_parm;
2725 /* If we can't trust the parm stack slot to be aligned enough for its
2726 ultimate type, don't use that slot after entry. We'll make another
2727 stack slot, if we need one. */
2728 if (stack_parm
2729 && ((STRICT_ALIGNMENT
2730 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2731 || (data->nominal_type
2732 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2733 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2734 stack_parm = NULL;
2736 /* If parm was passed in memory, and we need to convert it on entry,
2737 don't store it back in that same slot. */
2738 else if (data->entry_parm == stack_parm
2739 && data->nominal_mode != BLKmode
2740 && data->nominal_mode != data->passed_mode)
2741 stack_parm = NULL;
2743 /* If stack protection is in effect for this function, don't leave any
2744 pointers in their passed stack slots. */
2745 else if (crtl->stack_protect_guard
2746 && (flag_stack_protect == 2
2747 || data->passed_pointer
2748 || POINTER_TYPE_P (data->nominal_type)))
2749 stack_parm = NULL;
2751 data->stack_parm = stack_parm;
2754 /* A subroutine of assign_parms. Return true if the current parameter
2755 should be stored as a BLKmode in the current frame. */
2757 static bool
2758 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2760 if (data->nominal_mode == BLKmode)
2761 return true;
2762 if (GET_MODE (data->entry_parm) == BLKmode)
2763 return true;
2765 #ifdef BLOCK_REG_PADDING
2766 /* Only assign_parm_setup_block knows how to deal with register arguments
2767 that are padded at the least significant end. */
2768 if (REG_P (data->entry_parm)
2769 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2770 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2771 == (BYTES_BIG_ENDIAN ? upward : downward)))
2772 return true;
2773 #endif
2775 return false;
2778 /* A subroutine of assign_parms. Arrange for the parameter to be
2779 present and valid in DATA->STACK_RTL. */
2781 static void
2782 assign_parm_setup_block (struct assign_parm_data_all *all,
2783 tree parm, struct assign_parm_data_one *data)
2785 rtx entry_parm = data->entry_parm;
2786 rtx stack_parm = data->stack_parm;
2787 HOST_WIDE_INT size;
2788 HOST_WIDE_INT size_stored;
2790 if (GET_CODE (entry_parm) == PARALLEL)
2791 entry_parm = emit_group_move_into_temps (entry_parm);
2793 size = int_size_in_bytes (data->passed_type);
2794 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2795 if (stack_parm == 0)
2797 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
2798 stack_parm = assign_stack_local (BLKmode, size_stored,
2799 DECL_ALIGN (parm));
2800 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2801 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2802 set_mem_attributes (stack_parm, parm, 1);
2805 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2806 calls that pass values in multiple non-contiguous locations. */
2807 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2809 rtx mem;
2811 /* Note that we will be storing an integral number of words.
2812 So we have to be careful to ensure that we allocate an
2813 integral number of words. We do this above when we call
2814 assign_stack_local if space was not allocated in the argument
2815 list. If it was, this will not work if PARM_BOUNDARY is not
2816 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2817 if it becomes a problem. Exception is when BLKmode arrives
2818 with arguments not conforming to word_mode. */
2820 if (data->stack_parm == 0)
2822 else if (GET_CODE (entry_parm) == PARALLEL)
2824 else
2825 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2827 mem = validize_mem (stack_parm);
2829 /* Handle values in multiple non-contiguous locations. */
2830 if (GET_CODE (entry_parm) == PARALLEL)
2832 push_to_sequence2 (all->first_conversion_insn,
2833 all->last_conversion_insn);
2834 emit_group_store (mem, entry_parm, data->passed_type, size);
2835 all->first_conversion_insn = get_insns ();
2836 all->last_conversion_insn = get_last_insn ();
2837 end_sequence ();
2840 else if (size == 0)
2843 /* If SIZE is that of a mode no bigger than a word, just use
2844 that mode's store operation. */
2845 else if (size <= UNITS_PER_WORD)
2847 enum machine_mode mode
2848 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2850 if (mode != BLKmode
2851 #ifdef BLOCK_REG_PADDING
2852 && (size == UNITS_PER_WORD
2853 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2854 != (BYTES_BIG_ENDIAN ? upward : downward)))
2855 #endif
2858 rtx reg;
2860 /* We are really truncating a word_mode value containing
2861 SIZE bytes into a value of mode MODE. If such an
2862 operation requires no actual instructions, we can refer
2863 to the value directly in mode MODE, otherwise we must
2864 start with the register in word_mode and explicitly
2865 convert it. */
2866 if (TRULY_NOOP_TRUNCATION (size * BITS_PER_UNIT, BITS_PER_WORD))
2867 reg = gen_rtx_REG (mode, REGNO (entry_parm));
2868 else
2870 reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2871 reg = convert_to_mode (mode, copy_to_reg (reg), 1);
2873 emit_move_insn (change_address (mem, mode, 0), reg);
2876 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2877 machine must be aligned to the left before storing
2878 to memory. Note that the previous test doesn't
2879 handle all cases (e.g. SIZE == 3). */
2880 else if (size != UNITS_PER_WORD
2881 #ifdef BLOCK_REG_PADDING
2882 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2883 == downward)
2884 #else
2885 && BYTES_BIG_ENDIAN
2886 #endif
2889 rtx tem, x;
2890 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2891 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2893 x = expand_shift (LSHIFT_EXPR, word_mode, reg, by, NULL_RTX, 1);
2894 tem = change_address (mem, word_mode, 0);
2895 emit_move_insn (tem, x);
2897 else
2898 move_block_from_reg (REGNO (entry_parm), mem,
2899 size_stored / UNITS_PER_WORD);
2901 else
2902 move_block_from_reg (REGNO (entry_parm), mem,
2903 size_stored / UNITS_PER_WORD);
2905 else if (data->stack_parm == 0)
2907 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2908 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2909 BLOCK_OP_NORMAL);
2910 all->first_conversion_insn = get_insns ();
2911 all->last_conversion_insn = get_last_insn ();
2912 end_sequence ();
2915 data->stack_parm = stack_parm;
2916 SET_DECL_RTL (parm, stack_parm);
2919 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2920 parameter. Get it there. Perform all ABI specified conversions. */
2922 static void
2923 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2924 struct assign_parm_data_one *data)
2926 rtx parmreg, validated_mem;
2927 rtx equiv_stack_parm;
2928 enum machine_mode promoted_nominal_mode;
2929 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2930 bool did_conversion = false;
2931 bool need_conversion, moved;
2933 /* Store the parm in a pseudoregister during the function, but we may
2934 need to do it in a wider mode. Using 2 here makes the result
2935 consistent with promote_decl_mode and thus expand_expr_real_1. */
2936 promoted_nominal_mode
2937 = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp,
2938 TREE_TYPE (current_function_decl), 2);
2940 parmreg = gen_reg_rtx (promoted_nominal_mode);
2942 if (!DECL_ARTIFICIAL (parm))
2943 mark_user_reg (parmreg);
2945 /* If this was an item that we received a pointer to,
2946 set DECL_RTL appropriately. */
2947 if (data->passed_pointer)
2949 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2950 set_mem_attributes (x, parm, 1);
2951 SET_DECL_RTL (parm, x);
2953 else
2954 SET_DECL_RTL (parm, parmreg);
2956 assign_parm_remove_parallels (data);
2958 /* Copy the value into the register, thus bridging between
2959 assign_parm_find_data_types and expand_expr_real_1. */
2961 equiv_stack_parm = data->stack_parm;
2962 validated_mem = validize_mem (data->entry_parm);
2964 need_conversion = (data->nominal_mode != data->passed_mode
2965 || promoted_nominal_mode != data->promoted_mode);
2966 moved = false;
2968 if (need_conversion
2969 && GET_MODE_CLASS (data->nominal_mode) == MODE_INT
2970 && data->nominal_mode == data->passed_mode
2971 && data->nominal_mode == GET_MODE (data->entry_parm))
2973 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2974 mode, by the caller. We now have to convert it to
2975 NOMINAL_MODE, if different. However, PARMREG may be in
2976 a different mode than NOMINAL_MODE if it is being stored
2977 promoted.
2979 If ENTRY_PARM is a hard register, it might be in a register
2980 not valid for operating in its mode (e.g., an odd-numbered
2981 register for a DFmode). In that case, moves are the only
2982 thing valid, so we can't do a convert from there. This
2983 occurs when the calling sequence allow such misaligned
2984 usages.
2986 In addition, the conversion may involve a call, which could
2987 clobber parameters which haven't been copied to pseudo
2988 registers yet.
2990 First, we try to emit an insn which performs the necessary
2991 conversion. We verify that this insn does not clobber any
2992 hard registers. */
2994 enum insn_code icode;
2995 rtx op0, op1;
2997 icode = can_extend_p (promoted_nominal_mode, data->passed_mode,
2998 unsignedp);
3000 op0 = parmreg;
3001 op1 = validated_mem;
3002 if (icode != CODE_FOR_nothing
3003 && insn_operand_matches (icode, 0, op0)
3004 && insn_operand_matches (icode, 1, op1))
3006 enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
3007 rtx insn, insns, t = op1;
3008 HARD_REG_SET hardregs;
3010 start_sequence ();
3011 /* If op1 is a hard register that is likely spilled, first
3012 force it into a pseudo, otherwise combiner might extend
3013 its lifetime too much. */
3014 if (GET_CODE (t) == SUBREG)
3015 t = SUBREG_REG (t);
3016 if (REG_P (t)
3017 && HARD_REGISTER_P (t)
3018 && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (t))
3019 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t))))
3021 t = gen_reg_rtx (GET_MODE (op1));
3022 emit_move_insn (t, op1);
3024 else
3025 t = op1;
3026 insn = gen_extend_insn (op0, t, promoted_nominal_mode,
3027 data->passed_mode, unsignedp);
3028 emit_insn (insn);
3029 insns = get_insns ();
3031 moved = true;
3032 CLEAR_HARD_REG_SET (hardregs);
3033 for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
3035 if (INSN_P (insn))
3036 note_stores (PATTERN (insn), record_hard_reg_sets,
3037 &hardregs);
3038 if (!hard_reg_set_empty_p (hardregs))
3039 moved = false;
3042 end_sequence ();
3044 if (moved)
3046 emit_insn (insns);
3047 if (equiv_stack_parm != NULL_RTX)
3048 equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg),
3049 equiv_stack_parm);
3054 if (moved)
3055 /* Nothing to do. */
3057 else if (need_conversion)
3059 /* We did not have an insn to convert directly, or the sequence
3060 generated appeared unsafe. We must first copy the parm to a
3061 pseudo reg, and save the conversion until after all
3062 parameters have been moved. */
3064 int save_tree_used;
3065 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3067 emit_move_insn (tempreg, validated_mem);
3069 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3070 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
3072 if (GET_CODE (tempreg) == SUBREG
3073 && GET_MODE (tempreg) == data->nominal_mode
3074 && REG_P (SUBREG_REG (tempreg))
3075 && data->nominal_mode == data->passed_mode
3076 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
3077 && GET_MODE_SIZE (GET_MODE (tempreg))
3078 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
3080 /* The argument is already sign/zero extended, so note it
3081 into the subreg. */
3082 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
3083 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
3086 /* TREE_USED gets set erroneously during expand_assignment. */
3087 save_tree_used = TREE_USED (parm);
3088 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
3089 TREE_USED (parm) = save_tree_used;
3090 all->first_conversion_insn = get_insns ();
3091 all->last_conversion_insn = get_last_insn ();
3092 end_sequence ();
3094 did_conversion = true;
3096 else
3097 emit_move_insn (parmreg, validated_mem);
3099 /* If we were passed a pointer but the actual value can safely live
3100 in a register, put it in one. */
3101 if (data->passed_pointer
3102 && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
3103 /* If by-reference argument was promoted, demote it. */
3104 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
3105 || use_register_for_decl (parm)))
3107 /* We can't use nominal_mode, because it will have been set to
3108 Pmode above. We must use the actual mode of the parm. */
3109 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3110 mark_user_reg (parmreg);
3112 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
3114 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
3115 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
3117 push_to_sequence2 (all->first_conversion_insn,
3118 all->last_conversion_insn);
3119 emit_move_insn (tempreg, DECL_RTL (parm));
3120 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
3121 emit_move_insn (parmreg, tempreg);
3122 all->first_conversion_insn = get_insns ();
3123 all->last_conversion_insn = get_last_insn ();
3124 end_sequence ();
3126 did_conversion = true;
3128 else
3129 emit_move_insn (parmreg, DECL_RTL (parm));
3131 SET_DECL_RTL (parm, parmreg);
3133 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3134 now the parm. */
3135 data->stack_parm = NULL;
3138 /* Mark the register as eliminable if we did no conversion and it was
3139 copied from memory at a fixed offset, and the arg pointer was not
3140 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3141 offset formed an invalid address, such memory-equivalences as we
3142 make here would screw up life analysis for it. */
3143 if (data->nominal_mode == data->passed_mode
3144 && !did_conversion
3145 && data->stack_parm != 0
3146 && MEM_P (data->stack_parm)
3147 && data->locate.offset.var == 0
3148 && reg_mentioned_p (virtual_incoming_args_rtx,
3149 XEXP (data->stack_parm, 0)))
3151 rtx linsn = get_last_insn ();
3152 rtx sinsn, set;
3154 /* Mark complex types separately. */
3155 if (GET_CODE (parmreg) == CONCAT)
3157 enum machine_mode submode
3158 = GET_MODE_INNER (GET_MODE (parmreg));
3159 int regnor = REGNO (XEXP (parmreg, 0));
3160 int regnoi = REGNO (XEXP (parmreg, 1));
3161 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
3162 rtx stacki = adjust_address_nv (data->stack_parm, submode,
3163 GET_MODE_SIZE (submode));
3165 /* Scan backwards for the set of the real and
3166 imaginary parts. */
3167 for (sinsn = linsn; sinsn != 0;
3168 sinsn = prev_nonnote_insn (sinsn))
3170 set = single_set (sinsn);
3171 if (set == 0)
3172 continue;
3174 if (SET_DEST (set) == regno_reg_rtx [regnoi])
3175 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
3176 else if (SET_DEST (set) == regno_reg_rtx [regnor])
3177 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
3180 else
3181 set_dst_reg_note (linsn, REG_EQUIV, equiv_stack_parm, parmreg);
3184 /* For pointer data type, suggest pointer register. */
3185 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3186 mark_reg_pointer (parmreg,
3187 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
3190 /* A subroutine of assign_parms. Allocate stack space to hold the current
3191 parameter. Get it there. Perform all ABI specified conversions. */
3193 static void
3194 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
3195 struct assign_parm_data_one *data)
3197 /* Value must be stored in the stack slot STACK_PARM during function
3198 execution. */
3199 bool to_conversion = false;
3201 assign_parm_remove_parallels (data);
3203 if (data->promoted_mode != data->nominal_mode)
3205 /* Conversion is required. */
3206 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3208 emit_move_insn (tempreg, validize_mem (data->entry_parm));
3210 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3211 to_conversion = true;
3213 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
3214 TYPE_UNSIGNED (TREE_TYPE (parm)));
3216 if (data->stack_parm)
3218 int offset = subreg_lowpart_offset (data->nominal_mode,
3219 GET_MODE (data->stack_parm));
3220 /* ??? This may need a big-endian conversion on sparc64. */
3221 data->stack_parm
3222 = adjust_address (data->stack_parm, data->nominal_mode, 0);
3223 if (offset && MEM_OFFSET_KNOWN_P (data->stack_parm))
3224 set_mem_offset (data->stack_parm,
3225 MEM_OFFSET (data->stack_parm) + offset);
3229 if (data->entry_parm != data->stack_parm)
3231 rtx src, dest;
3233 if (data->stack_parm == 0)
3235 int align = STACK_SLOT_ALIGNMENT (data->passed_type,
3236 GET_MODE (data->entry_parm),
3237 TYPE_ALIGN (data->passed_type));
3238 data->stack_parm
3239 = assign_stack_local (GET_MODE (data->entry_parm),
3240 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
3241 align);
3242 set_mem_attributes (data->stack_parm, parm, 1);
3245 dest = validize_mem (data->stack_parm);
3246 src = validize_mem (data->entry_parm);
3248 if (MEM_P (src))
3250 /* Use a block move to handle potentially misaligned entry_parm. */
3251 if (!to_conversion)
3252 push_to_sequence2 (all->first_conversion_insn,
3253 all->last_conversion_insn);
3254 to_conversion = true;
3256 emit_block_move (dest, src,
3257 GEN_INT (int_size_in_bytes (data->passed_type)),
3258 BLOCK_OP_NORMAL);
3260 else
3261 emit_move_insn (dest, src);
3264 if (to_conversion)
3266 all->first_conversion_insn = get_insns ();
3267 all->last_conversion_insn = get_last_insn ();
3268 end_sequence ();
3271 SET_DECL_RTL (parm, data->stack_parm);
3274 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
3275 undo the frobbing that we did in assign_parms_augmented_arg_list. */
3277 static void
3278 assign_parms_unsplit_complex (struct assign_parm_data_all *all,
3279 vec<tree> fnargs)
3281 tree parm;
3282 tree orig_fnargs = all->orig_fnargs;
3283 unsigned i = 0;
3285 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i)
3287 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
3288 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
3290 rtx tmp, real, imag;
3291 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
3293 real = DECL_RTL (fnargs[i]);
3294 imag = DECL_RTL (fnargs[i + 1]);
3295 if (inner != GET_MODE (real))
3297 real = gen_lowpart_SUBREG (inner, real);
3298 imag = gen_lowpart_SUBREG (inner, imag);
3301 if (TREE_ADDRESSABLE (parm))
3303 rtx rmem, imem;
3304 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
3305 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3306 DECL_MODE (parm),
3307 TYPE_ALIGN (TREE_TYPE (parm)));
3309 /* split_complex_arg put the real and imag parts in
3310 pseudos. Move them to memory. */
3311 tmp = assign_stack_local (DECL_MODE (parm), size, align);
3312 set_mem_attributes (tmp, parm, 1);
3313 rmem = adjust_address_nv (tmp, inner, 0);
3314 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
3315 push_to_sequence2 (all->first_conversion_insn,
3316 all->last_conversion_insn);
3317 emit_move_insn (rmem, real);
3318 emit_move_insn (imem, imag);
3319 all->first_conversion_insn = get_insns ();
3320 all->last_conversion_insn = get_last_insn ();
3321 end_sequence ();
3323 else
3324 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3325 SET_DECL_RTL (parm, tmp);
3327 real = DECL_INCOMING_RTL (fnargs[i]);
3328 imag = DECL_INCOMING_RTL (fnargs[i + 1]);
3329 if (inner != GET_MODE (real))
3331 real = gen_lowpart_SUBREG (inner, real);
3332 imag = gen_lowpart_SUBREG (inner, imag);
3334 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3335 set_decl_incoming_rtl (parm, tmp, false);
3336 i++;
3341 /* Assign RTL expressions to the function's parameters. This may involve
3342 copying them into registers and using those registers as the DECL_RTL. */
3344 static void
3345 assign_parms (tree fndecl)
3347 struct assign_parm_data_all all;
3348 tree parm;
3349 vec<tree> fnargs;
3350 unsigned i;
3352 crtl->args.internal_arg_pointer
3353 = targetm.calls.internal_arg_pointer ();
3355 assign_parms_initialize_all (&all);
3356 fnargs = assign_parms_augmented_arg_list (&all);
3358 FOR_EACH_VEC_ELT (fnargs, i, parm)
3360 struct assign_parm_data_one data;
3362 /* Extract the type of PARM; adjust it according to ABI. */
3363 assign_parm_find_data_types (&all, parm, &data);
3365 /* Early out for errors and void parameters. */
3366 if (data.passed_mode == VOIDmode)
3368 SET_DECL_RTL (parm, const0_rtx);
3369 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3370 continue;
3373 /* Estimate stack alignment from parameter alignment. */
3374 if (SUPPORTS_STACK_ALIGNMENT)
3376 unsigned int align
3377 = targetm.calls.function_arg_boundary (data.promoted_mode,
3378 data.passed_type);
3379 align = MINIMUM_ALIGNMENT (data.passed_type, data.promoted_mode,
3380 align);
3381 if (TYPE_ALIGN (data.nominal_type) > align)
3382 align = MINIMUM_ALIGNMENT (data.nominal_type,
3383 TYPE_MODE (data.nominal_type),
3384 TYPE_ALIGN (data.nominal_type));
3385 if (crtl->stack_alignment_estimated < align)
3387 gcc_assert (!crtl->stack_realign_processed);
3388 crtl->stack_alignment_estimated = align;
3392 if (cfun->stdarg && !DECL_CHAIN (parm))
3393 assign_parms_setup_varargs (&all, &data, false);
3395 /* Find out where the parameter arrives in this function. */
3396 assign_parm_find_entry_rtl (&all, &data);
3398 /* Find out where stack space for this parameter might be. */
3399 if (assign_parm_is_stack_parm (&all, &data))
3401 assign_parm_find_stack_rtl (parm, &data);
3402 assign_parm_adjust_entry_rtl (&data);
3405 /* Record permanently how this parm was passed. */
3406 if (data.passed_pointer)
3408 rtx incoming_rtl
3409 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.passed_type)),
3410 data.entry_parm);
3411 set_decl_incoming_rtl (parm, incoming_rtl, true);
3413 else
3414 set_decl_incoming_rtl (parm, data.entry_parm, false);
3416 /* Update info on where next arg arrives in registers. */
3417 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3418 data.passed_type, data.named_arg);
3420 assign_parm_adjust_stack_rtl (&data);
3422 if (assign_parm_setup_block_p (&data))
3423 assign_parm_setup_block (&all, parm, &data);
3424 else if (data.passed_pointer || use_register_for_decl (parm))
3425 assign_parm_setup_reg (&all, parm, &data);
3426 else
3427 assign_parm_setup_stack (&all, parm, &data);
3430 if (targetm.calls.split_complex_arg)
3431 assign_parms_unsplit_complex (&all, fnargs);
3433 fnargs.release ();
3435 /* Output all parameter conversion instructions (possibly including calls)
3436 now that all parameters have been copied out of hard registers. */
3437 emit_insn (all.first_conversion_insn);
3439 /* Estimate reload stack alignment from scalar return mode. */
3440 if (SUPPORTS_STACK_ALIGNMENT)
3442 if (DECL_RESULT (fndecl))
3444 tree type = TREE_TYPE (DECL_RESULT (fndecl));
3445 enum machine_mode mode = TYPE_MODE (type);
3447 if (mode != BLKmode
3448 && mode != VOIDmode
3449 && !AGGREGATE_TYPE_P (type))
3451 unsigned int align = GET_MODE_ALIGNMENT (mode);
3452 if (crtl->stack_alignment_estimated < align)
3454 gcc_assert (!crtl->stack_realign_processed);
3455 crtl->stack_alignment_estimated = align;
3461 /* If we are receiving a struct value address as the first argument, set up
3462 the RTL for the function result. As this might require code to convert
3463 the transmitted address to Pmode, we do this here to ensure that possible
3464 preliminary conversions of the address have been emitted already. */
3465 if (all.function_result_decl)
3467 tree result = DECL_RESULT (current_function_decl);
3468 rtx addr = DECL_RTL (all.function_result_decl);
3469 rtx x;
3471 if (DECL_BY_REFERENCE (result))
3473 SET_DECL_VALUE_EXPR (result, all.function_result_decl);
3474 x = addr;
3476 else
3478 SET_DECL_VALUE_EXPR (result,
3479 build1 (INDIRECT_REF, TREE_TYPE (result),
3480 all.function_result_decl));
3481 addr = convert_memory_address (Pmode, addr);
3482 x = gen_rtx_MEM (DECL_MODE (result), addr);
3483 set_mem_attributes (x, result, 1);
3486 DECL_HAS_VALUE_EXPR_P (result) = 1;
3488 SET_DECL_RTL (result, x);
3491 /* We have aligned all the args, so add space for the pretend args. */
3492 crtl->args.pretend_args_size = all.pretend_args_size;
3493 all.stack_args_size.constant += all.extra_pretend_bytes;
3494 crtl->args.size = all.stack_args_size.constant;
3496 /* Adjust function incoming argument size for alignment and
3497 minimum length. */
3499 #ifdef REG_PARM_STACK_SPACE
3500 crtl->args.size = MAX (crtl->args.size,
3501 REG_PARM_STACK_SPACE (fndecl));
3502 #endif
3504 crtl->args.size = CEIL_ROUND (crtl->args.size,
3505 PARM_BOUNDARY / BITS_PER_UNIT);
3507 #ifdef ARGS_GROW_DOWNWARD
3508 crtl->args.arg_offset_rtx
3509 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3510 : expand_expr (size_diffop (all.stack_args_size.var,
3511 size_int (-all.stack_args_size.constant)),
3512 NULL_RTX, VOIDmode, EXPAND_NORMAL));
3513 #else
3514 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3515 #endif
3517 /* See how many bytes, if any, of its args a function should try to pop
3518 on return. */
3520 crtl->args.pops_args = targetm.calls.return_pops_args (fndecl,
3521 TREE_TYPE (fndecl),
3522 crtl->args.size);
3524 /* For stdarg.h function, save info about
3525 regs and stack space used by the named args. */
3527 crtl->args.info = all.args_so_far_v;
3529 /* Set the rtx used for the function return value. Put this in its
3530 own variable so any optimizers that need this information don't have
3531 to include tree.h. Do this here so it gets done when an inlined
3532 function gets output. */
3534 crtl->return_rtx
3535 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3536 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3538 /* If scalar return value was computed in a pseudo-reg, or was a named
3539 return value that got dumped to the stack, copy that to the hard
3540 return register. */
3541 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3543 tree decl_result = DECL_RESULT (fndecl);
3544 rtx decl_rtl = DECL_RTL (decl_result);
3546 if (REG_P (decl_rtl)
3547 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3548 : DECL_REGISTER (decl_result))
3550 rtx real_decl_rtl;
3552 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3553 fndecl, true);
3554 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3555 /* The delay slot scheduler assumes that crtl->return_rtx
3556 holds the hard register containing the return value, not a
3557 temporary pseudo. */
3558 crtl->return_rtx = real_decl_rtl;
3563 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3564 For all seen types, gimplify their sizes. */
3566 static tree
3567 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3569 tree t = *tp;
3571 *walk_subtrees = 0;
3572 if (TYPE_P (t))
3574 if (POINTER_TYPE_P (t))
3575 *walk_subtrees = 1;
3576 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3577 && !TYPE_SIZES_GIMPLIFIED (t))
3579 gimplify_type_sizes (t, (gimple_seq *) data);
3580 *walk_subtrees = 1;
3584 return NULL;
3587 /* Gimplify the parameter list for current_function_decl. This involves
3588 evaluating SAVE_EXPRs of variable sized parameters and generating code
3589 to implement callee-copies reference parameters. Returns a sequence of
3590 statements to add to the beginning of the function. */
3592 gimple_seq
3593 gimplify_parameters (void)
3595 struct assign_parm_data_all all;
3596 tree parm;
3597 gimple_seq stmts = NULL;
3598 vec<tree> fnargs;
3599 unsigned i;
3601 assign_parms_initialize_all (&all);
3602 fnargs = assign_parms_augmented_arg_list (&all);
3604 FOR_EACH_VEC_ELT (fnargs, i, parm)
3606 struct assign_parm_data_one data;
3608 /* Extract the type of PARM; adjust it according to ABI. */
3609 assign_parm_find_data_types (&all, parm, &data);
3611 /* Early out for errors and void parameters. */
3612 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3613 continue;
3615 /* Update info on where next arg arrives in registers. */
3616 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3617 data.passed_type, data.named_arg);
3619 /* ??? Once upon a time variable_size stuffed parameter list
3620 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3621 turned out to be less than manageable in the gimple world.
3622 Now we have to hunt them down ourselves. */
3623 walk_tree_without_duplicates (&data.passed_type,
3624 gimplify_parm_type, &stmts);
3626 if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
3628 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3629 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3632 if (data.passed_pointer)
3634 tree type = TREE_TYPE (data.passed_type);
3635 if (reference_callee_copied (&all.args_so_far_v, TYPE_MODE (type),
3636 type, data.named_arg))
3638 tree local, t;
3640 /* For constant-sized objects, this is trivial; for
3641 variable-sized objects, we have to play games. */
3642 if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
3643 && !(flag_stack_check == GENERIC_STACK_CHECK
3644 && compare_tree_int (DECL_SIZE_UNIT (parm),
3645 STACK_CHECK_MAX_VAR_SIZE) > 0))
3647 local = create_tmp_var (type, get_name (parm));
3648 DECL_IGNORED_P (local) = 0;
3649 /* If PARM was addressable, move that flag over
3650 to the local copy, as its address will be taken,
3651 not the PARMs. Keep the parms address taken
3652 as we'll query that flag during gimplification. */
3653 if (TREE_ADDRESSABLE (parm))
3654 TREE_ADDRESSABLE (local) = 1;
3655 else if (TREE_CODE (type) == COMPLEX_TYPE
3656 || TREE_CODE (type) == VECTOR_TYPE)
3657 DECL_GIMPLE_REG_P (local) = 1;
3659 else
3661 tree ptr_type, addr;
3663 ptr_type = build_pointer_type (type);
3664 addr = create_tmp_reg (ptr_type, get_name (parm));
3665 DECL_IGNORED_P (addr) = 0;
3666 local = build_fold_indirect_ref (addr);
3668 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3669 t = build_call_expr (t, 2, DECL_SIZE_UNIT (parm),
3670 size_int (DECL_ALIGN (parm)));
3672 /* The call has been built for a variable-sized object. */
3673 CALL_ALLOCA_FOR_VAR_P (t) = 1;
3674 t = fold_convert (ptr_type, t);
3675 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
3676 gimplify_and_add (t, &stmts);
3679 gimplify_assign (local, parm, &stmts);
3681 SET_DECL_VALUE_EXPR (parm, local);
3682 DECL_HAS_VALUE_EXPR_P (parm) = 1;
3687 fnargs.release ();
3689 return stmts;
3692 /* Compute the size and offset from the start of the stacked arguments for a
3693 parm passed in mode PASSED_MODE and with type TYPE.
3695 INITIAL_OFFSET_PTR points to the current offset into the stacked
3696 arguments.
3698 The starting offset and size for this parm are returned in
3699 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3700 nonzero, the offset is that of stack slot, which is returned in
3701 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3702 padding required from the initial offset ptr to the stack slot.
3704 IN_REGS is nonzero if the argument will be passed in registers. It will
3705 never be set if REG_PARM_STACK_SPACE is not defined.
3707 FNDECL is the function in which the argument was defined.
3709 There are two types of rounding that are done. The first, controlled by
3710 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
3711 argument list to be aligned to the specific boundary (in bits). This
3712 rounding affects the initial and starting offsets, but not the argument
3713 size.
3715 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3716 optionally rounds the size of the parm to PARM_BOUNDARY. The
3717 initial offset is not affected by this rounding, while the size always
3718 is and the starting offset may be. */
3720 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3721 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3722 callers pass in the total size of args so far as
3723 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3725 void
3726 locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3727 int partial, tree fndecl ATTRIBUTE_UNUSED,
3728 struct args_size *initial_offset_ptr,
3729 struct locate_and_pad_arg_data *locate)
3731 tree sizetree;
3732 enum direction where_pad;
3733 unsigned int boundary, round_boundary;
3734 int reg_parm_stack_space = 0;
3735 int part_size_in_regs;
3737 #ifdef REG_PARM_STACK_SPACE
3738 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3740 /* If we have found a stack parm before we reach the end of the
3741 area reserved for registers, skip that area. */
3742 if (! in_regs)
3744 if (reg_parm_stack_space > 0)
3746 if (initial_offset_ptr->var)
3748 initial_offset_ptr->var
3749 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3750 ssize_int (reg_parm_stack_space));
3751 initial_offset_ptr->constant = 0;
3753 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3754 initial_offset_ptr->constant = reg_parm_stack_space;
3757 #endif /* REG_PARM_STACK_SPACE */
3759 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
3761 sizetree
3762 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3763 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3764 boundary = targetm.calls.function_arg_boundary (passed_mode, type);
3765 round_boundary = targetm.calls.function_arg_round_boundary (passed_mode,
3766 type);
3767 locate->where_pad = where_pad;
3769 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
3770 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
3771 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
3773 locate->boundary = boundary;
3775 if (SUPPORTS_STACK_ALIGNMENT)
3777 /* stack_alignment_estimated can't change after stack has been
3778 realigned. */
3779 if (crtl->stack_alignment_estimated < boundary)
3781 if (!crtl->stack_realign_processed)
3782 crtl->stack_alignment_estimated = boundary;
3783 else
3785 /* If stack is realigned and stack alignment value
3786 hasn't been finalized, it is OK not to increase
3787 stack_alignment_estimated. The bigger alignment
3788 requirement is recorded in stack_alignment_needed
3789 below. */
3790 gcc_assert (!crtl->stack_realign_finalized
3791 && crtl->stack_realign_needed);
3796 /* Remember if the outgoing parameter requires extra alignment on the
3797 calling function side. */
3798 if (crtl->stack_alignment_needed < boundary)
3799 crtl->stack_alignment_needed = boundary;
3800 if (crtl->preferred_stack_boundary < boundary)
3801 crtl->preferred_stack_boundary = boundary;
3803 #ifdef ARGS_GROW_DOWNWARD
3804 locate->slot_offset.constant = -initial_offset_ptr->constant;
3805 if (initial_offset_ptr->var)
3806 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3807 initial_offset_ptr->var);
3810 tree s2 = sizetree;
3811 if (where_pad != none
3812 && (!host_integerp (sizetree, 1)
3813 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % round_boundary))
3814 s2 = round_up (s2, round_boundary / BITS_PER_UNIT);
3815 SUB_PARM_SIZE (locate->slot_offset, s2);
3818 locate->slot_offset.constant += part_size_in_regs;
3820 if (!in_regs
3821 #ifdef REG_PARM_STACK_SPACE
3822 || REG_PARM_STACK_SPACE (fndecl) > 0
3823 #endif
3825 pad_to_arg_alignment (&locate->slot_offset, boundary,
3826 &locate->alignment_pad);
3828 locate->size.constant = (-initial_offset_ptr->constant
3829 - locate->slot_offset.constant);
3830 if (initial_offset_ptr->var)
3831 locate->size.var = size_binop (MINUS_EXPR,
3832 size_binop (MINUS_EXPR,
3833 ssize_int (0),
3834 initial_offset_ptr->var),
3835 locate->slot_offset.var);
3837 /* Pad_below needs the pre-rounded size to know how much to pad
3838 below. */
3839 locate->offset = locate->slot_offset;
3840 if (where_pad == downward)
3841 pad_below (&locate->offset, passed_mode, sizetree);
3843 #else /* !ARGS_GROW_DOWNWARD */
3844 if (!in_regs
3845 #ifdef REG_PARM_STACK_SPACE
3846 || REG_PARM_STACK_SPACE (fndecl) > 0
3847 #endif
3849 pad_to_arg_alignment (initial_offset_ptr, boundary,
3850 &locate->alignment_pad);
3851 locate->slot_offset = *initial_offset_ptr;
3853 #ifdef PUSH_ROUNDING
3854 if (passed_mode != BLKmode)
3855 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3856 #endif
3858 /* Pad_below needs the pre-rounded size to know how much to pad below
3859 so this must be done before rounding up. */
3860 locate->offset = locate->slot_offset;
3861 if (where_pad == downward)
3862 pad_below (&locate->offset, passed_mode, sizetree);
3864 if (where_pad != none
3865 && (!host_integerp (sizetree, 1)
3866 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % round_boundary))
3867 sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT);
3869 ADD_PARM_SIZE (locate->size, sizetree);
3871 locate->size.constant -= part_size_in_regs;
3872 #endif /* ARGS_GROW_DOWNWARD */
3874 #ifdef FUNCTION_ARG_OFFSET
3875 locate->offset.constant += FUNCTION_ARG_OFFSET (passed_mode, type);
3876 #endif
3879 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3880 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3882 static void
3883 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3884 struct args_size *alignment_pad)
3886 tree save_var = NULL_TREE;
3887 HOST_WIDE_INT save_constant = 0;
3888 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3889 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3891 #ifdef SPARC_STACK_BOUNDARY_HACK
3892 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3893 the real alignment of %sp. However, when it does this, the
3894 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
3895 if (SPARC_STACK_BOUNDARY_HACK)
3896 sp_offset = 0;
3897 #endif
3899 if (boundary > PARM_BOUNDARY)
3901 save_var = offset_ptr->var;
3902 save_constant = offset_ptr->constant;
3905 alignment_pad->var = NULL_TREE;
3906 alignment_pad->constant = 0;
3908 if (boundary > BITS_PER_UNIT)
3910 if (offset_ptr->var)
3912 tree sp_offset_tree = ssize_int (sp_offset);
3913 tree offset = size_binop (PLUS_EXPR,
3914 ARGS_SIZE_TREE (*offset_ptr),
3915 sp_offset_tree);
3916 #ifdef ARGS_GROW_DOWNWARD
3917 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
3918 #else
3919 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
3920 #endif
3922 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
3923 /* ARGS_SIZE_TREE includes constant term. */
3924 offset_ptr->constant = 0;
3925 if (boundary > PARM_BOUNDARY)
3926 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
3927 save_var);
3929 else
3931 offset_ptr->constant = -sp_offset +
3932 #ifdef ARGS_GROW_DOWNWARD
3933 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3934 #else
3935 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3936 #endif
3937 if (boundary > PARM_BOUNDARY)
3938 alignment_pad->constant = offset_ptr->constant - save_constant;
3943 static void
3944 pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
3946 if (passed_mode != BLKmode)
3948 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3949 offset_ptr->constant
3950 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3951 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3952 - GET_MODE_SIZE (passed_mode));
3954 else
3956 if (TREE_CODE (sizetree) != INTEGER_CST
3957 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3959 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3960 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3961 /* Add it in. */
3962 ADD_PARM_SIZE (*offset_ptr, s2);
3963 SUB_PARM_SIZE (*offset_ptr, sizetree);
3969 /* True if register REGNO was alive at a place where `setjmp' was
3970 called and was set more than once or is an argument. Such regs may
3971 be clobbered by `longjmp'. */
3973 static bool
3974 regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
3976 /* There appear to be cases where some local vars never reach the
3977 backend but have bogus regnos. */
3978 if (regno >= max_reg_num ())
3979 return false;
3981 return ((REG_N_SETS (regno) > 1
3982 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR), regno))
3983 && REGNO_REG_SET_P (setjmp_crosses, regno));
3986 /* Walk the tree of blocks describing the binding levels within a
3987 function and warn about variables the might be killed by setjmp or
3988 vfork. This is done after calling flow_analysis before register
3989 allocation since that will clobber the pseudo-regs to hard
3990 regs. */
3992 static void
3993 setjmp_vars_warning (bitmap setjmp_crosses, tree block)
3995 tree decl, sub;
3997 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
3999 if (TREE_CODE (decl) == VAR_DECL
4000 && DECL_RTL_SET_P (decl)
4001 && REG_P (DECL_RTL (decl))
4002 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4003 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
4004 " %<longjmp%> or %<vfork%>", decl);
4007 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
4008 setjmp_vars_warning (setjmp_crosses, sub);
4011 /* Do the appropriate part of setjmp_vars_warning
4012 but for arguments instead of local variables. */
4014 static void
4015 setjmp_args_warning (bitmap setjmp_crosses)
4017 tree decl;
4018 for (decl = DECL_ARGUMENTS (current_function_decl);
4019 decl; decl = DECL_CHAIN (decl))
4020 if (DECL_RTL (decl) != 0
4021 && REG_P (DECL_RTL (decl))
4022 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4023 warning (OPT_Wclobbered,
4024 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
4025 decl);
4028 /* Generate warning messages for variables live across setjmp. */
4030 void
4031 generate_setjmp_warnings (void)
4033 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
4035 if (n_basic_blocks == NUM_FIXED_BLOCKS
4036 || bitmap_empty_p (setjmp_crosses))
4037 return;
4039 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
4040 setjmp_args_warning (setjmp_crosses);
4044 /* Reverse the order of elements in the fragment chain T of blocks,
4045 and return the new head of the chain (old last element).
4046 In addition to that clear BLOCK_SAME_RANGE flags when needed
4047 and adjust BLOCK_SUPERCONTEXT from the super fragment to
4048 its super fragment origin. */
4050 static tree
4051 block_fragments_nreverse (tree t)
4053 tree prev = 0, block, next, prev_super = 0;
4054 tree super = BLOCK_SUPERCONTEXT (t);
4055 if (BLOCK_FRAGMENT_ORIGIN (super))
4056 super = BLOCK_FRAGMENT_ORIGIN (super);
4057 for (block = t; block; block = next)
4059 next = BLOCK_FRAGMENT_CHAIN (block);
4060 BLOCK_FRAGMENT_CHAIN (block) = prev;
4061 if ((prev && !BLOCK_SAME_RANGE (prev))
4062 || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block))
4063 != prev_super))
4064 BLOCK_SAME_RANGE (block) = 0;
4065 prev_super = BLOCK_SUPERCONTEXT (block);
4066 BLOCK_SUPERCONTEXT (block) = super;
4067 prev = block;
4069 t = BLOCK_FRAGMENT_ORIGIN (t);
4070 if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t))
4071 != prev_super)
4072 BLOCK_SAME_RANGE (t) = 0;
4073 BLOCK_SUPERCONTEXT (t) = super;
4074 return prev;
4077 /* Reverse the order of elements in the chain T of blocks,
4078 and return the new head of the chain (old last element).
4079 Also do the same on subblocks and reverse the order of elements
4080 in BLOCK_FRAGMENT_CHAIN as well. */
4082 static tree
4083 blocks_nreverse_all (tree t)
4085 tree prev = 0, block, next;
4086 for (block = t; block; block = next)
4088 next = BLOCK_CHAIN (block);
4089 BLOCK_CHAIN (block) = prev;
4090 if (BLOCK_FRAGMENT_CHAIN (block)
4091 && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE)
4093 BLOCK_FRAGMENT_CHAIN (block)
4094 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block));
4095 if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block)))
4096 BLOCK_SAME_RANGE (block) = 0;
4098 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4099 prev = block;
4101 return prev;
4105 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4106 and create duplicate blocks. */
4107 /* ??? Need an option to either create block fragments or to create
4108 abstract origin duplicates of a source block. It really depends
4109 on what optimization has been performed. */
4111 void
4112 reorder_blocks (void)
4114 tree block = DECL_INITIAL (current_function_decl);
4115 vec<tree> block_stack;
4117 if (block == NULL_TREE)
4118 return;
4120 block_stack.create (10);
4122 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
4123 clear_block_marks (block);
4125 /* Prune the old trees away, so that they don't get in the way. */
4126 BLOCK_SUBBLOCKS (block) = NULL_TREE;
4127 BLOCK_CHAIN (block) = NULL_TREE;
4129 /* Recreate the block tree from the note nesting. */
4130 reorder_blocks_1 (get_insns (), block, &block_stack);
4131 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4133 block_stack.release ();
4136 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
4138 void
4139 clear_block_marks (tree block)
4141 while (block)
4143 TREE_ASM_WRITTEN (block) = 0;
4144 clear_block_marks (BLOCK_SUBBLOCKS (block));
4145 block = BLOCK_CHAIN (block);
4149 static void
4150 reorder_blocks_1 (rtx insns, tree current_block, vec<tree> *p_block_stack)
4152 rtx insn;
4153 tree prev_beg = NULL_TREE, prev_end = NULL_TREE;
4155 for (insn = insns; insn; insn = NEXT_INSN (insn))
4157 if (NOTE_P (insn))
4159 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
4161 tree block = NOTE_BLOCK (insn);
4162 tree origin;
4164 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE);
4165 origin = block;
4167 if (prev_end)
4168 BLOCK_SAME_RANGE (prev_end) = 0;
4169 prev_end = NULL_TREE;
4171 /* If we have seen this block before, that means it now
4172 spans multiple address regions. Create a new fragment. */
4173 if (TREE_ASM_WRITTEN (block))
4175 tree new_block = copy_node (block);
4177 BLOCK_SAME_RANGE (new_block) = 0;
4178 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
4179 BLOCK_FRAGMENT_CHAIN (new_block)
4180 = BLOCK_FRAGMENT_CHAIN (origin);
4181 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
4183 NOTE_BLOCK (insn) = new_block;
4184 block = new_block;
4187 if (prev_beg == current_block && prev_beg)
4188 BLOCK_SAME_RANGE (block) = 1;
4190 prev_beg = origin;
4192 BLOCK_SUBBLOCKS (block) = 0;
4193 TREE_ASM_WRITTEN (block) = 1;
4194 /* When there's only one block for the entire function,
4195 current_block == block and we mustn't do this, it
4196 will cause infinite recursion. */
4197 if (block != current_block)
4199 tree super;
4200 if (block != origin)
4201 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block
4202 || BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
4203 (origin))
4204 == current_block);
4205 if (p_block_stack->is_empty ())
4206 super = current_block;
4207 else
4209 super = p_block_stack->last ();
4210 gcc_assert (super == current_block
4211 || BLOCK_FRAGMENT_ORIGIN (super)
4212 == current_block);
4214 BLOCK_SUPERCONTEXT (block) = super;
4215 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4216 BLOCK_SUBBLOCKS (current_block) = block;
4217 current_block = origin;
4219 p_block_stack->safe_push (block);
4221 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
4223 NOTE_BLOCK (insn) = p_block_stack->pop ();
4224 current_block = BLOCK_SUPERCONTEXT (current_block);
4225 if (BLOCK_FRAGMENT_ORIGIN (current_block))
4226 current_block = BLOCK_FRAGMENT_ORIGIN (current_block);
4227 prev_beg = NULL_TREE;
4228 prev_end = BLOCK_SAME_RANGE (NOTE_BLOCK (insn))
4229 ? NOTE_BLOCK (insn) : NULL_TREE;
4232 else
4234 prev_beg = NULL_TREE;
4235 if (prev_end)
4236 BLOCK_SAME_RANGE (prev_end) = 0;
4237 prev_end = NULL_TREE;
4242 /* Reverse the order of elements in the chain T of blocks,
4243 and return the new head of the chain (old last element). */
4245 tree
4246 blocks_nreverse (tree t)
4248 tree prev = 0, block, next;
4249 for (block = t; block; block = next)
4251 next = BLOCK_CHAIN (block);
4252 BLOCK_CHAIN (block) = prev;
4253 prev = block;
4255 return prev;
4258 /* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4259 by modifying the last node in chain 1 to point to chain 2. */
4261 tree
4262 block_chainon (tree op1, tree op2)
4264 tree t1;
4266 if (!op1)
4267 return op2;
4268 if (!op2)
4269 return op1;
4271 for (t1 = op1; BLOCK_CHAIN (t1); t1 = BLOCK_CHAIN (t1))
4272 continue;
4273 BLOCK_CHAIN (t1) = op2;
4275 #ifdef ENABLE_TREE_CHECKING
4277 tree t2;
4278 for (t2 = op2; t2; t2 = BLOCK_CHAIN (t2))
4279 gcc_assert (t2 != t1);
4281 #endif
4283 return op1;
4286 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
4287 non-NULL, list them all into VECTOR, in a depth-first preorder
4288 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
4289 blocks. */
4291 static int
4292 all_blocks (tree block, tree *vector)
4294 int n_blocks = 0;
4296 while (block)
4298 TREE_ASM_WRITTEN (block) = 0;
4300 /* Record this block. */
4301 if (vector)
4302 vector[n_blocks] = block;
4304 ++n_blocks;
4306 /* Record the subblocks, and their subblocks... */
4307 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4308 vector ? vector + n_blocks : 0);
4309 block = BLOCK_CHAIN (block);
4312 return n_blocks;
4315 /* Return a vector containing all the blocks rooted at BLOCK. The
4316 number of elements in the vector is stored in N_BLOCKS_P. The
4317 vector is dynamically allocated; it is the caller's responsibility
4318 to call `free' on the pointer returned. */
4320 static tree *
4321 get_block_vector (tree block, int *n_blocks_p)
4323 tree *block_vector;
4325 *n_blocks_p = all_blocks (block, NULL);
4326 block_vector = XNEWVEC (tree, *n_blocks_p);
4327 all_blocks (block, block_vector);
4329 return block_vector;
4332 static GTY(()) int next_block_index = 2;
4334 /* Set BLOCK_NUMBER for all the blocks in FN. */
4336 void
4337 number_blocks (tree fn)
4339 int i;
4340 int n_blocks;
4341 tree *block_vector;
4343 /* For SDB and XCOFF debugging output, we start numbering the blocks
4344 from 1 within each function, rather than keeping a running
4345 count. */
4346 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
4347 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
4348 next_block_index = 1;
4349 #endif
4351 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
4353 /* The top-level BLOCK isn't numbered at all. */
4354 for (i = 1; i < n_blocks; ++i)
4355 /* We number the blocks from two. */
4356 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
4358 free (block_vector);
4360 return;
4363 /* If VAR is present in a subblock of BLOCK, return the subblock. */
4365 DEBUG_FUNCTION tree
4366 debug_find_var_in_block_tree (tree var, tree block)
4368 tree t;
4370 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
4371 if (t == var)
4372 return block;
4374 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
4376 tree ret = debug_find_var_in_block_tree (var, t);
4377 if (ret)
4378 return ret;
4381 return NULL_TREE;
4384 /* Keep track of whether we're in a dummy function context. If we are,
4385 we don't want to invoke the set_current_function hook, because we'll
4386 get into trouble if the hook calls target_reinit () recursively or
4387 when the initial initialization is not yet complete. */
4389 static bool in_dummy_function;
4391 /* Invoke the target hook when setting cfun. Update the optimization options
4392 if the function uses different options than the default. */
4394 static void
4395 invoke_set_current_function_hook (tree fndecl)
4397 if (!in_dummy_function)
4399 tree opts = ((fndecl)
4400 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
4401 : optimization_default_node);
4403 if (!opts)
4404 opts = optimization_default_node;
4406 /* Change optimization options if needed. */
4407 if (optimization_current_node != opts)
4409 optimization_current_node = opts;
4410 cl_optimization_restore (&global_options, TREE_OPTIMIZATION (opts));
4413 targetm.set_current_function (fndecl);
4417 /* cfun should never be set directly; use this function. */
4419 void
4420 set_cfun (struct function *new_cfun)
4422 if (cfun != new_cfun)
4424 cfun = new_cfun;
4425 invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
4429 /* Initialized with NOGC, making this poisonous to the garbage collector. */
4431 static vec<function_p> cfun_stack;
4433 /* Push the current cfun onto the stack, and set cfun to new_cfun. Also set
4434 current_function_decl accordingly. */
4436 void
4437 push_cfun (struct function *new_cfun)
4439 gcc_assert ((!cfun && !current_function_decl)
4440 || (cfun && current_function_decl == cfun->decl));
4441 cfun_stack.safe_push (cfun);
4442 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
4443 set_cfun (new_cfun);
4446 /* Pop cfun from the stack. Also set current_function_decl accordingly. */
4448 void
4449 pop_cfun (void)
4451 struct function *new_cfun = cfun_stack.pop ();
4452 /* When in_dummy_function, we do have a cfun but current_function_decl is
4453 NULL. We also allow pushing NULL cfun and subsequently changing
4454 current_function_decl to something else and have both restored by
4455 pop_cfun. */
4456 gcc_checking_assert (in_dummy_function
4457 || !cfun
4458 || current_function_decl == cfun->decl);
4459 set_cfun (new_cfun);
4460 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
4463 /* Return value of funcdef and increase it. */
4465 get_next_funcdef_no (void)
4467 return funcdef_no++;
4470 /* Return value of funcdef. */
4472 get_last_funcdef_no (void)
4474 return funcdef_no;
4477 /* Allocate a function structure for FNDECL and set its contents
4478 to the defaults. Set cfun to the newly-allocated object.
4479 Some of the helper functions invoked during initialization assume
4480 that cfun has already been set. Therefore, assign the new object
4481 directly into cfun and invoke the back end hook explicitly at the
4482 very end, rather than initializing a temporary and calling set_cfun
4483 on it.
4485 ABSTRACT_P is true if this is a function that will never be seen by
4486 the middle-end. Such functions are front-end concepts (like C++
4487 function templates) that do not correspond directly to functions
4488 placed in object files. */
4490 void
4491 allocate_struct_function (tree fndecl, bool abstract_p)
4493 tree result;
4494 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
4496 cfun = ggc_alloc_cleared_function ();
4498 init_eh_for_function ();
4500 if (init_machine_status)
4501 cfun->machine = (*init_machine_status) ();
4503 #ifdef OVERRIDE_ABI_FORMAT
4504 OVERRIDE_ABI_FORMAT (fndecl);
4505 #endif
4507 if (fndecl != NULL_TREE)
4509 DECL_STRUCT_FUNCTION (fndecl) = cfun;
4510 cfun->decl = fndecl;
4511 current_function_funcdef_no = get_next_funcdef_no ();
4513 result = DECL_RESULT (fndecl);
4514 if (!abstract_p && aggregate_value_p (result, fndecl))
4516 #ifdef PCC_STATIC_STRUCT_RETURN
4517 cfun->returns_pcc_struct = 1;
4518 #endif
4519 cfun->returns_struct = 1;
4522 cfun->stdarg = stdarg_p (fntype);
4524 /* Assume all registers in stdarg functions need to be saved. */
4525 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
4526 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
4528 /* ??? This could be set on a per-function basis by the front-end
4529 but is this worth the hassle? */
4530 cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
4533 invoke_set_current_function_hook (fndecl);
4536 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4537 instead of just setting it. */
4539 void
4540 push_struct_function (tree fndecl)
4542 /* When in_dummy_function we might be in the middle of a pop_cfun and
4543 current_function_decl and cfun may not match. */
4544 gcc_assert (in_dummy_function
4545 || (!cfun && !current_function_decl)
4546 || (cfun && current_function_decl == cfun->decl));
4547 cfun_stack.safe_push (cfun);
4548 current_function_decl = fndecl;
4549 allocate_struct_function (fndecl, false);
4552 /* Reset crtl and other non-struct-function variables to defaults as
4553 appropriate for emitting rtl at the start of a function. */
4555 static void
4556 prepare_function_start (void)
4558 gcc_assert (!crtl->emit.x_last_insn);
4559 init_temp_slots ();
4560 init_emit ();
4561 init_varasm_status ();
4562 init_expr ();
4563 default_rtl_profile ();
4565 if (flag_stack_usage_info)
4567 cfun->su = ggc_alloc_cleared_stack_usage ();
4568 cfun->su->static_stack_size = -1;
4571 cse_not_expected = ! optimize;
4573 /* Caller save not needed yet. */
4574 caller_save_needed = 0;
4576 /* We haven't done register allocation yet. */
4577 reg_renumber = 0;
4579 /* Indicate that we have not instantiated virtual registers yet. */
4580 virtuals_instantiated = 0;
4582 /* Indicate that we want CONCATs now. */
4583 generating_concat_p = 1;
4585 /* Indicate we have no need of a frame pointer yet. */
4586 frame_pointer_needed = 0;
4589 /* Initialize the rtl expansion mechanism so that we can do simple things
4590 like generate sequences. This is used to provide a context during global
4591 initialization of some passes. You must call expand_dummy_function_end
4592 to exit this context. */
4594 void
4595 init_dummy_function_start (void)
4597 gcc_assert (!in_dummy_function);
4598 in_dummy_function = true;
4599 push_struct_function (NULL_TREE);
4600 prepare_function_start ();
4603 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4604 and initialize static variables for generating RTL for the statements
4605 of the function. */
4607 void
4608 init_function_start (tree subr)
4610 if (subr && DECL_STRUCT_FUNCTION (subr))
4611 set_cfun (DECL_STRUCT_FUNCTION (subr));
4612 else
4613 allocate_struct_function (subr, false);
4614 prepare_function_start ();
4615 decide_function_section (subr);
4617 /* Warn if this value is an aggregate type,
4618 regardless of which calling convention we are using for it. */
4619 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
4620 warning (OPT_Waggregate_return, "function returns an aggregate");
4624 void
4625 expand_main_function (void)
4627 #if (defined(INVOKE__main) \
4628 || (!defined(HAS_INIT_SECTION) \
4629 && !defined(INIT_SECTION_ASM_OP) \
4630 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
4631 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
4632 #endif
4635 /* Expand code to initialize the stack_protect_guard. This is invoked at
4636 the beginning of a function to be protected. */
4638 #ifndef HAVE_stack_protect_set
4639 # define HAVE_stack_protect_set 0
4640 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
4641 #endif
4643 void
4644 stack_protect_prologue (void)
4646 tree guard_decl = targetm.stack_protect_guard ();
4647 rtx x, y;
4649 x = expand_normal (crtl->stack_protect_guard);
4650 y = expand_normal (guard_decl);
4652 /* Allow the target to copy from Y to X without leaking Y into a
4653 register. */
4654 if (HAVE_stack_protect_set)
4656 rtx insn = gen_stack_protect_set (x, y);
4657 if (insn)
4659 emit_insn (insn);
4660 return;
4664 /* Otherwise do a straight move. */
4665 emit_move_insn (x, y);
4668 /* Expand code to verify the stack_protect_guard. This is invoked at
4669 the end of a function to be protected. */
4671 #ifndef HAVE_stack_protect_test
4672 # define HAVE_stack_protect_test 0
4673 # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
4674 #endif
4676 void
4677 stack_protect_epilogue (void)
4679 tree guard_decl = targetm.stack_protect_guard ();
4680 rtx label = gen_label_rtx ();
4681 rtx x, y, tmp;
4683 x = expand_normal (crtl->stack_protect_guard);
4684 y = expand_normal (guard_decl);
4686 /* Allow the target to compare Y with X without leaking either into
4687 a register. */
4688 switch (HAVE_stack_protect_test != 0)
4690 case 1:
4691 tmp = gen_stack_protect_test (x, y, label);
4692 if (tmp)
4694 emit_insn (tmp);
4695 break;
4697 /* FALLTHRU */
4699 default:
4700 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4701 break;
4704 /* The noreturn predictor has been moved to the tree level. The rtl-level
4705 predictors estimate this branch about 20%, which isn't enough to get
4706 things moved out of line. Since this is the only extant case of adding
4707 a noreturn function at the rtl level, it doesn't seem worth doing ought
4708 except adding the prediction by hand. */
4709 tmp = get_last_insn ();
4710 if (JUMP_P (tmp))
4711 predict_insn_def (tmp, PRED_NORETURN, TAKEN);
4713 expand_call (targetm.stack_protect_fail (), NULL_RTX, /*ignore=*/true);
4714 free_temp_slots ();
4715 emit_label (label);
4718 /* Start the RTL for a new function, and set variables used for
4719 emitting RTL.
4720 SUBR is the FUNCTION_DECL node.
4721 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4722 the function's parameters, which must be run at any return statement. */
4724 void
4725 expand_function_start (tree subr)
4727 /* Make sure volatile mem refs aren't considered
4728 valid operands of arithmetic insns. */
4729 init_recog_no_volatile ();
4731 crtl->profile
4732 = (profile_flag
4733 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4735 crtl->limit_stack
4736 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4738 /* Make the label for return statements to jump to. Do not special
4739 case machines with special return instructions -- they will be
4740 handled later during jump, ifcvt, or epilogue creation. */
4741 return_label = gen_label_rtx ();
4743 /* Initialize rtx used to return the value. */
4744 /* Do this before assign_parms so that we copy the struct value address
4745 before any library calls that assign parms might generate. */
4747 /* Decide whether to return the value in memory or in a register. */
4748 if (aggregate_value_p (DECL_RESULT (subr), subr))
4750 /* Returning something that won't go in a register. */
4751 rtx value_address = 0;
4753 #ifdef PCC_STATIC_STRUCT_RETURN
4754 if (cfun->returns_pcc_struct)
4756 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4757 value_address = assemble_static_space (size);
4759 else
4760 #endif
4762 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
4763 /* Expect to be passed the address of a place to store the value.
4764 If it is passed as an argument, assign_parms will take care of
4765 it. */
4766 if (sv)
4768 value_address = gen_reg_rtx (Pmode);
4769 emit_move_insn (value_address, sv);
4772 if (value_address)
4774 rtx x = value_address;
4775 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4777 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4778 set_mem_attributes (x, DECL_RESULT (subr), 1);
4780 SET_DECL_RTL (DECL_RESULT (subr), x);
4783 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4784 /* If return mode is void, this decl rtl should not be used. */
4785 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
4786 else
4788 /* Compute the return values into a pseudo reg, which we will copy
4789 into the true return register after the cleanups are done. */
4790 tree return_type = TREE_TYPE (DECL_RESULT (subr));
4791 if (TYPE_MODE (return_type) != BLKmode
4792 && targetm.calls.return_in_msb (return_type))
4793 /* expand_function_end will insert the appropriate padding in
4794 this case. Use the return value's natural (unpadded) mode
4795 within the function proper. */
4796 SET_DECL_RTL (DECL_RESULT (subr),
4797 gen_reg_rtx (TYPE_MODE (return_type)));
4798 else
4800 /* In order to figure out what mode to use for the pseudo, we
4801 figure out what the mode of the eventual return register will
4802 actually be, and use that. */
4803 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
4805 /* Structures that are returned in registers are not
4806 aggregate_value_p, so we may see a PARALLEL or a REG. */
4807 if (REG_P (hard_reg))
4808 SET_DECL_RTL (DECL_RESULT (subr),
4809 gen_reg_rtx (GET_MODE (hard_reg)));
4810 else
4812 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4813 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4817 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4818 result to the real return register(s). */
4819 DECL_REGISTER (DECL_RESULT (subr)) = 1;
4822 /* Initialize rtx for parameters and local variables.
4823 In some cases this requires emitting insns. */
4824 assign_parms (subr);
4826 /* If function gets a static chain arg, store it. */
4827 if (cfun->static_chain_decl)
4829 tree parm = cfun->static_chain_decl;
4830 rtx local, chain, insn;
4832 local = gen_reg_rtx (Pmode);
4833 chain = targetm.calls.static_chain (current_function_decl, true);
4835 set_decl_incoming_rtl (parm, chain, false);
4836 SET_DECL_RTL (parm, local);
4837 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4839 insn = emit_move_insn (local, chain);
4841 /* Mark the register as eliminable, similar to parameters. */
4842 if (MEM_P (chain)
4843 && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0)))
4844 set_dst_reg_note (insn, REG_EQUIV, chain, local);
4847 /* If the function receives a non-local goto, then store the
4848 bits we need to restore the frame pointer. */
4849 if (cfun->nonlocal_goto_save_area)
4851 tree t_save;
4852 rtx r_save;
4854 tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
4855 gcc_assert (DECL_RTL_SET_P (var));
4857 t_save = build4 (ARRAY_REF,
4858 TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
4859 cfun->nonlocal_goto_save_area,
4860 integer_zero_node, NULL_TREE, NULL_TREE);
4861 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
4862 gcc_assert (GET_MODE (r_save) == Pmode);
4864 emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ());
4865 update_nonlocal_goto_save_area ();
4868 /* The following was moved from init_function_start.
4869 The move is supposed to make sdb output more accurate. */
4870 /* Indicate the beginning of the function body,
4871 as opposed to parm setup. */
4872 emit_note (NOTE_INSN_FUNCTION_BEG);
4874 gcc_assert (NOTE_P (get_last_insn ()));
4876 parm_birth_insn = get_last_insn ();
4878 if (crtl->profile)
4880 #ifdef PROFILE_HOOK
4881 PROFILE_HOOK (current_function_funcdef_no);
4882 #endif
4885 /* If we are doing generic stack checking, the probe should go here. */
4886 if (flag_stack_check == GENERIC_STACK_CHECK)
4887 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
4890 /* Undo the effects of init_dummy_function_start. */
4891 void
4892 expand_dummy_function_end (void)
4894 gcc_assert (in_dummy_function);
4896 /* End any sequences that failed to be closed due to syntax errors. */
4897 while (in_sequence_p ())
4898 end_sequence ();
4900 /* Outside function body, can't compute type's actual size
4901 until next function's body starts. */
4903 free_after_parsing (cfun);
4904 free_after_compilation (cfun);
4905 pop_cfun ();
4906 in_dummy_function = false;
4909 /* Call DOIT for each hard register used as a return value from
4910 the current function. */
4912 void
4913 diddle_return_value (void (*doit) (rtx, void *), void *arg)
4915 rtx outgoing = crtl->return_rtx;
4917 if (! outgoing)
4918 return;
4920 if (REG_P (outgoing))
4921 (*doit) (outgoing, arg);
4922 else if (GET_CODE (outgoing) == PARALLEL)
4924 int i;
4926 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4928 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4930 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
4931 (*doit) (x, arg);
4936 static void
4937 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4939 emit_clobber (reg);
4942 void
4943 clobber_return_register (void)
4945 diddle_return_value (do_clobber_return_reg, NULL);
4947 /* In case we do use pseudo to return value, clobber it too. */
4948 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4950 tree decl_result = DECL_RESULT (current_function_decl);
4951 rtx decl_rtl = DECL_RTL (decl_result);
4952 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4954 do_clobber_return_reg (decl_rtl, NULL);
4959 static void
4960 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4962 emit_use (reg);
4965 static void
4966 use_return_register (void)
4968 diddle_return_value (do_use_return_reg, NULL);
4971 /* Possibly warn about unused parameters. */
4972 void
4973 do_warn_unused_parameter (tree fn)
4975 tree decl;
4977 for (decl = DECL_ARGUMENTS (fn);
4978 decl; decl = DECL_CHAIN (decl))
4979 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
4980 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl)
4981 && !TREE_NO_WARNING (decl))
4982 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
4985 static GTY(()) rtx initial_trampoline;
4987 /* Generate RTL for the end of the current function. */
4989 void
4990 expand_function_end (void)
4992 rtx clobber_after;
4994 /* If arg_pointer_save_area was referenced only from a nested
4995 function, we will not have initialized it yet. Do that now. */
4996 if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
4997 get_arg_pointer_save_area ();
4999 /* If we are doing generic stack checking and this function makes calls,
5000 do a stack probe at the start of the function to ensure we have enough
5001 space for another stack frame. */
5002 if (flag_stack_check == GENERIC_STACK_CHECK)
5004 rtx insn, seq;
5006 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5007 if (CALL_P (insn))
5009 rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE);
5010 start_sequence ();
5011 if (STACK_CHECK_MOVING_SP)
5012 anti_adjust_stack_and_probe (max_frame_size, true);
5013 else
5014 probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size);
5015 seq = get_insns ();
5016 end_sequence ();
5017 set_insn_locations (seq, prologue_location);
5018 emit_insn_before (seq, stack_check_probe_note);
5019 break;
5023 /* End any sequences that failed to be closed due to syntax errors. */
5024 while (in_sequence_p ())
5025 end_sequence ();
5027 clear_pending_stack_adjust ();
5028 do_pending_stack_adjust ();
5030 /* Output a linenumber for the end of the function.
5031 SDB depends on this. */
5032 set_curr_insn_location (input_location);
5034 /* Before the return label (if any), clobber the return
5035 registers so that they are not propagated live to the rest of
5036 the function. This can only happen with functions that drop
5037 through; if there had been a return statement, there would
5038 have either been a return rtx, or a jump to the return label.
5040 We delay actual code generation after the current_function_value_rtx
5041 is computed. */
5042 clobber_after = get_last_insn ();
5044 /* Output the label for the actual return from the function. */
5045 emit_label (return_label);
5047 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
5049 /* Let except.c know where it should emit the call to unregister
5050 the function context for sjlj exceptions. */
5051 if (flag_exceptions)
5052 sjlj_emit_function_exit_after (get_last_insn ());
5054 else
5056 /* We want to ensure that instructions that may trap are not
5057 moved into the epilogue by scheduling, because we don't
5058 always emit unwind information for the epilogue. */
5059 if (cfun->can_throw_non_call_exceptions)
5060 emit_insn (gen_blockage ());
5063 /* If this is an implementation of throw, do what's necessary to
5064 communicate between __builtin_eh_return and the epilogue. */
5065 expand_eh_return ();
5067 /* If scalar return value was computed in a pseudo-reg, or was a named
5068 return value that got dumped to the stack, copy that to the hard
5069 return register. */
5070 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5072 tree decl_result = DECL_RESULT (current_function_decl);
5073 rtx decl_rtl = DECL_RTL (decl_result);
5075 if (REG_P (decl_rtl)
5076 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5077 : DECL_REGISTER (decl_result))
5079 rtx real_decl_rtl = crtl->return_rtx;
5081 /* This should be set in assign_parms. */
5082 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
5084 /* If this is a BLKmode structure being returned in registers,
5085 then use the mode computed in expand_return. Note that if
5086 decl_rtl is memory, then its mode may have been changed,
5087 but that crtl->return_rtx has not. */
5088 if (GET_MODE (real_decl_rtl) == BLKmode)
5089 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
5091 /* If a non-BLKmode return value should be padded at the least
5092 significant end of the register, shift it left by the appropriate
5093 amount. BLKmode results are handled using the group load/store
5094 machinery. */
5095 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
5096 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
5098 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
5099 REGNO (real_decl_rtl)),
5100 decl_rtl);
5101 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
5103 /* If a named return value dumped decl_return to memory, then
5104 we may need to re-do the PROMOTE_MODE signed/unsigned
5105 extension. */
5106 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
5108 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
5109 promote_function_mode (TREE_TYPE (decl_result),
5110 GET_MODE (decl_rtl), &unsignedp,
5111 TREE_TYPE (current_function_decl), 1);
5113 convert_move (real_decl_rtl, decl_rtl, unsignedp);
5115 else if (GET_CODE (real_decl_rtl) == PARALLEL)
5117 /* If expand_function_start has created a PARALLEL for decl_rtl,
5118 move the result to the real return registers. Otherwise, do
5119 a group load from decl_rtl for a named return. */
5120 if (GET_CODE (decl_rtl) == PARALLEL)
5121 emit_group_move (real_decl_rtl, decl_rtl);
5122 else
5123 emit_group_load (real_decl_rtl, decl_rtl,
5124 TREE_TYPE (decl_result),
5125 int_size_in_bytes (TREE_TYPE (decl_result)));
5127 /* In the case of complex integer modes smaller than a word, we'll
5128 need to generate some non-trivial bitfield insertions. Do that
5129 on a pseudo and not the hard register. */
5130 else if (GET_CODE (decl_rtl) == CONCAT
5131 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
5132 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
5134 int old_generating_concat_p;
5135 rtx tmp;
5137 old_generating_concat_p = generating_concat_p;
5138 generating_concat_p = 0;
5139 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
5140 generating_concat_p = old_generating_concat_p;
5142 emit_move_insn (tmp, decl_rtl);
5143 emit_move_insn (real_decl_rtl, tmp);
5145 else
5146 emit_move_insn (real_decl_rtl, decl_rtl);
5150 /* If returning a structure, arrange to return the address of the value
5151 in a place where debuggers expect to find it.
5153 If returning a structure PCC style,
5154 the caller also depends on this value.
5155 And cfun->returns_pcc_struct is not necessarily set. */
5156 if (cfun->returns_struct
5157 || cfun->returns_pcc_struct)
5159 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
5160 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5161 rtx outgoing;
5163 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
5164 type = TREE_TYPE (type);
5165 else
5166 value_address = XEXP (value_address, 0);
5168 outgoing = targetm.calls.function_value (build_pointer_type (type),
5169 current_function_decl, true);
5171 /* Mark this as a function return value so integrate will delete the
5172 assignment and USE below when inlining this function. */
5173 REG_FUNCTION_VALUE_P (outgoing) = 1;
5175 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5176 value_address = convert_memory_address (GET_MODE (outgoing),
5177 value_address);
5179 emit_move_insn (outgoing, value_address);
5181 /* Show return register used to hold result (in this case the address
5182 of the result. */
5183 crtl->return_rtx = outgoing;
5186 /* Emit the actual code to clobber return register. */
5188 rtx seq;
5190 start_sequence ();
5191 clobber_return_register ();
5192 seq = get_insns ();
5193 end_sequence ();
5195 emit_insn_after (seq, clobber_after);
5198 /* Output the label for the naked return from the function. */
5199 if (naked_return_label)
5200 emit_label (naked_return_label);
5202 /* @@@ This is a kludge. We want to ensure that instructions that
5203 may trap are not moved into the epilogue by scheduling, because
5204 we don't always emit unwind information for the epilogue. */
5205 if (cfun->can_throw_non_call_exceptions
5206 && targetm_common.except_unwind_info (&global_options) != UI_SJLJ)
5207 emit_insn (gen_blockage ());
5209 /* If stack protection is enabled for this function, check the guard. */
5210 if (crtl->stack_protect_guard)
5211 stack_protect_epilogue ();
5213 /* If we had calls to alloca, and this machine needs
5214 an accurate stack pointer to exit the function,
5215 insert some code to save and restore the stack pointer. */
5216 if (! EXIT_IGNORE_STACK
5217 && cfun->calls_alloca)
5219 rtx tem = 0, seq;
5221 start_sequence ();
5222 emit_stack_save (SAVE_FUNCTION, &tem);
5223 seq = get_insns ();
5224 end_sequence ();
5225 emit_insn_before (seq, parm_birth_insn);
5227 emit_stack_restore (SAVE_FUNCTION, tem);
5230 /* ??? This should no longer be necessary since stupid is no longer with
5231 us, but there are some parts of the compiler (eg reload_combine, and
5232 sh mach_dep_reorg) that still try and compute their own lifetime info
5233 instead of using the general framework. */
5234 use_return_register ();
5238 get_arg_pointer_save_area (void)
5240 rtx ret = arg_pointer_save_area;
5242 if (! ret)
5244 ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5245 arg_pointer_save_area = ret;
5248 if (! crtl->arg_pointer_save_area_init)
5250 rtx seq;
5252 /* Save the arg pointer at the beginning of the function. The
5253 generated stack slot may not be a valid memory address, so we
5254 have to check it and fix it if necessary. */
5255 start_sequence ();
5256 emit_move_insn (validize_mem (ret),
5257 crtl->args.internal_arg_pointer);
5258 seq = get_insns ();
5259 end_sequence ();
5261 push_topmost_sequence ();
5262 emit_insn_after (seq, entry_of_function ());
5263 pop_topmost_sequence ();
5265 crtl->arg_pointer_save_area_init = true;
5268 return ret;
5271 /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5272 for the first time. */
5274 static void
5275 record_insns (rtx insns, rtx end, htab_t *hashp)
5277 rtx tmp;
5278 htab_t hash = *hashp;
5280 if (hash == NULL)
5281 *hashp = hash
5282 = htab_create_ggc (17, htab_hash_pointer, htab_eq_pointer, NULL);
5284 for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp))
5286 void **slot = htab_find_slot (hash, tmp, INSERT);
5287 gcc_assert (*slot == NULL);
5288 *slot = tmp;
5292 /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5293 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5294 insn, then record COPY as well. */
5296 void
5297 maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
5299 htab_t hash;
5300 void **slot;
5302 hash = epilogue_insn_hash;
5303 if (!hash || !htab_find (hash, insn))
5305 hash = prologue_insn_hash;
5306 if (!hash || !htab_find (hash, insn))
5307 return;
5310 slot = htab_find_slot (hash, copy, INSERT);
5311 gcc_assert (*slot == NULL);
5312 *slot = copy;
5315 /* Set the location of the insn chain starting at INSN to LOC. */
5316 static void
5317 set_insn_locations (rtx insn, int loc)
5319 while (insn != NULL_RTX)
5321 if (INSN_P (insn))
5322 INSN_LOCATION (insn) = loc;
5323 insn = NEXT_INSN (insn);
5327 /* Determine if any INSNs in HASH are, or are part of, INSN. Because
5328 we can be running after reorg, SEQUENCE rtl is possible. */
5330 static bool
5331 contains (const_rtx insn, htab_t hash)
5333 if (hash == NULL)
5334 return false;
5336 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
5338 int i;
5339 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
5340 if (htab_find (hash, XVECEXP (PATTERN (insn), 0, i)))
5341 return true;
5342 return false;
5345 return htab_find (hash, insn) != NULL;
5349 prologue_epilogue_contains (const_rtx insn)
5351 if (contains (insn, prologue_insn_hash))
5352 return 1;
5353 if (contains (insn, epilogue_insn_hash))
5354 return 1;
5355 return 0;
5358 #ifdef HAVE_simple_return
5360 /* Return true if INSN requires the stack frame to be set up.
5361 PROLOGUE_USED contains the hard registers used in the function
5362 prologue. SET_UP_BY_PROLOGUE is the set of registers we expect the
5363 prologue to set up for the function. */
5364 bool
5365 requires_stack_frame_p (rtx insn, HARD_REG_SET prologue_used,
5366 HARD_REG_SET set_up_by_prologue)
5368 df_ref *df_rec;
5369 HARD_REG_SET hardregs;
5370 unsigned regno;
5372 if (CALL_P (insn))
5373 return !SIBLING_CALL_P (insn);
5375 /* We need a frame to get the unique CFA expected by the unwinder. */
5376 if (cfun->can_throw_non_call_exceptions && can_throw_internal (insn))
5377 return true;
5379 CLEAR_HARD_REG_SET (hardregs);
5380 for (df_rec = DF_INSN_DEFS (insn); *df_rec; df_rec++)
5382 rtx dreg = DF_REF_REG (*df_rec);
5384 if (!REG_P (dreg))
5385 continue;
5387 add_to_hard_reg_set (&hardregs, GET_MODE (dreg),
5388 REGNO (dreg));
5390 if (hard_reg_set_intersect_p (hardregs, prologue_used))
5391 return true;
5392 AND_COMPL_HARD_REG_SET (hardregs, call_used_reg_set);
5393 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
5394 if (TEST_HARD_REG_BIT (hardregs, regno)
5395 && df_regs_ever_live_p (regno))
5396 return true;
5398 for (df_rec = DF_INSN_USES (insn); *df_rec; df_rec++)
5400 rtx reg = DF_REF_REG (*df_rec);
5402 if (!REG_P (reg))
5403 continue;
5405 add_to_hard_reg_set (&hardregs, GET_MODE (reg),
5406 REGNO (reg));
5408 if (hard_reg_set_intersect_p (hardregs, set_up_by_prologue))
5409 return true;
5411 return false;
5414 /* See whether BB has a single successor that uses [REGNO, END_REGNO),
5415 and if BB is its only predecessor. Return that block if so,
5416 otherwise return null. */
5418 static basic_block
5419 next_block_for_reg (basic_block bb, int regno, int end_regno)
5421 edge e, live_edge;
5422 edge_iterator ei;
5423 bitmap live;
5424 int i;
5426 live_edge = NULL;
5427 FOR_EACH_EDGE (e, ei, bb->succs)
5429 live = df_get_live_in (e->dest);
5430 for (i = regno; i < end_regno; i++)
5431 if (REGNO_REG_SET_P (live, i))
5433 if (live_edge && live_edge != e)
5434 return NULL;
5435 live_edge = e;
5439 /* We can sometimes encounter dead code. Don't try to move it
5440 into the exit block. */
5441 if (!live_edge || live_edge->dest == EXIT_BLOCK_PTR)
5442 return NULL;
5444 /* Reject targets of abnormal edges. This is needed for correctness
5445 on ports like Alpha and MIPS, whose pic_offset_table_rtx can die on
5446 exception edges even though it is generally treated as call-saved
5447 for the majority of the compilation. Moving across abnormal edges
5448 isn't going to be interesting for shrink-wrap usage anyway. */
5449 if (live_edge->flags & EDGE_ABNORMAL)
5450 return NULL;
5452 if (EDGE_COUNT (live_edge->dest->preds) > 1)
5453 return NULL;
5455 return live_edge->dest;
5458 /* Try to move INSN from BB to a successor. Return true on success.
5459 USES and DEFS are the set of registers that are used and defined
5460 after INSN in BB. */
5462 static bool
5463 move_insn_for_shrink_wrap (basic_block bb, rtx insn,
5464 const HARD_REG_SET uses,
5465 const HARD_REG_SET defs)
5467 rtx set, src, dest;
5468 bitmap live_out, live_in, bb_uses, bb_defs;
5469 unsigned int i, dregno, end_dregno, sregno, end_sregno;
5470 basic_block next_block;
5472 /* Look for a simple register copy. */
5473 set = single_set (insn);
5474 if (!set)
5475 return false;
5476 src = SET_SRC (set);
5477 dest = SET_DEST (set);
5478 if (!REG_P (dest) || !REG_P (src))
5479 return false;
5481 /* Make sure that the source register isn't defined later in BB. */
5482 sregno = REGNO (src);
5483 end_sregno = END_REGNO (src);
5484 if (overlaps_hard_reg_set_p (defs, GET_MODE (src), sregno))
5485 return false;
5487 /* Make sure that the destination register isn't referenced later in BB. */
5488 dregno = REGNO (dest);
5489 end_dregno = END_REGNO (dest);
5490 if (overlaps_hard_reg_set_p (uses, GET_MODE (dest), dregno)
5491 || overlaps_hard_reg_set_p (defs, GET_MODE (dest), dregno))
5492 return false;
5494 /* See whether there is a successor block to which we could move INSN. */
5495 next_block = next_block_for_reg (bb, dregno, end_dregno);
5496 if (!next_block)
5497 return false;
5499 /* At this point we are committed to moving INSN, but let's try to
5500 move it as far as we can. */
5503 live_out = df_get_live_out (bb);
5504 live_in = df_get_live_in (next_block);
5505 bb = next_block;
5507 /* Check whether BB uses DEST or clobbers DEST. We need to add
5508 INSN to BB if so. Either way, DEST is no longer live on entry,
5509 except for any part that overlaps SRC (next loop). */
5510 bb_uses = &DF_LR_BB_INFO (bb)->use;
5511 bb_defs = &DF_LR_BB_INFO (bb)->def;
5512 for (i = dregno; i < end_dregno; i++)
5514 if (REGNO_REG_SET_P (bb_uses, i) || REGNO_REG_SET_P (bb_defs, i))
5515 next_block = NULL;
5516 CLEAR_REGNO_REG_SET (live_out, i);
5517 CLEAR_REGNO_REG_SET (live_in, i);
5520 /* Check whether BB clobbers SRC. We need to add INSN to BB if so.
5521 Either way, SRC is now live on entry. */
5522 for (i = sregno; i < end_sregno; i++)
5524 if (REGNO_REG_SET_P (bb_defs, i))
5525 next_block = NULL;
5526 SET_REGNO_REG_SET (live_out, i);
5527 SET_REGNO_REG_SET (live_in, i);
5530 /* If we don't need to add the move to BB, look for a single
5531 successor block. */
5532 if (next_block)
5533 next_block = next_block_for_reg (next_block, dregno, end_dregno);
5535 while (next_block);
5537 /* BB now defines DEST. It only uses the parts of DEST that overlap SRC
5538 (next loop). */
5539 for (i = dregno; i < end_dregno; i++)
5541 CLEAR_REGNO_REG_SET (bb_uses, i);
5542 SET_REGNO_REG_SET (bb_defs, i);
5545 /* BB now uses SRC. */
5546 for (i = sregno; i < end_sregno; i++)
5547 SET_REGNO_REG_SET (bb_uses, i);
5549 emit_insn_after (PATTERN (insn), bb_note (bb));
5550 delete_insn (insn);
5551 return true;
5554 /* Look for register copies in the first block of the function, and move
5555 them down into successor blocks if the register is used only on one
5556 path. This exposes more opportunities for shrink-wrapping. These
5557 kinds of sets often occur when incoming argument registers are moved
5558 to call-saved registers because their values are live across one or
5559 more calls during the function. */
5561 static void
5562 prepare_shrink_wrap (basic_block entry_block)
5564 rtx insn, curr, x;
5565 HARD_REG_SET uses, defs;
5566 df_ref *ref;
5568 CLEAR_HARD_REG_SET (uses);
5569 CLEAR_HARD_REG_SET (defs);
5570 FOR_BB_INSNS_REVERSE_SAFE (entry_block, insn, curr)
5571 if (NONDEBUG_INSN_P (insn)
5572 && !move_insn_for_shrink_wrap (entry_block, insn, uses, defs))
5574 /* Add all defined registers to DEFs. */
5575 for (ref = DF_INSN_DEFS (insn); *ref; ref++)
5577 x = DF_REF_REG (*ref);
5578 if (REG_P (x) && HARD_REGISTER_P (x))
5579 SET_HARD_REG_BIT (defs, REGNO (x));
5582 /* Add all used registers to USESs. */
5583 for (ref = DF_INSN_USES (insn); *ref; ref++)
5585 x = DF_REF_REG (*ref);
5586 if (REG_P (x) && HARD_REGISTER_P (x))
5587 SET_HARD_REG_BIT (uses, REGNO (x));
5592 #endif
5594 #ifdef HAVE_return
5595 /* Insert use of return register before the end of BB. */
5597 static void
5598 emit_use_return_register_into_block (basic_block bb)
5600 rtx seq;
5601 start_sequence ();
5602 use_return_register ();
5603 seq = get_insns ();
5604 end_sequence ();
5605 emit_insn_before (seq, BB_END (bb));
5609 /* Create a return pattern, either simple_return or return, depending on
5610 simple_p. */
5612 static rtx
5613 gen_return_pattern (bool simple_p)
5615 #ifdef HAVE_simple_return
5616 return simple_p ? gen_simple_return () : gen_return ();
5617 #else
5618 gcc_assert (!simple_p);
5619 return gen_return ();
5620 #endif
5623 /* Insert an appropriate return pattern at the end of block BB. This
5624 also means updating block_for_insn appropriately. SIMPLE_P is
5625 the same as in gen_return_pattern and passed to it. */
5627 static void
5628 emit_return_into_block (bool simple_p, basic_block bb)
5630 rtx jump, pat;
5631 jump = emit_jump_insn_after (gen_return_pattern (simple_p), BB_END (bb));
5632 pat = PATTERN (jump);
5633 if (GET_CODE (pat) == PARALLEL)
5634 pat = XVECEXP (pat, 0, 0);
5635 gcc_assert (ANY_RETURN_P (pat));
5636 JUMP_LABEL (jump) = pat;
5638 #endif
5640 /* Set JUMP_LABEL for a return insn. */
5642 void
5643 set_return_jump_label (rtx returnjump)
5645 rtx pat = PATTERN (returnjump);
5646 if (GET_CODE (pat) == PARALLEL)
5647 pat = XVECEXP (pat, 0, 0);
5648 if (ANY_RETURN_P (pat))
5649 JUMP_LABEL (returnjump) = pat;
5650 else
5651 JUMP_LABEL (returnjump) = ret_rtx;
5654 #ifdef HAVE_simple_return
5655 /* Create a copy of BB instructions and insert at BEFORE. Redirect
5656 preds of BB to COPY_BB if they don't appear in NEED_PROLOGUE. */
5657 static void
5658 dup_block_and_redirect (basic_block bb, basic_block copy_bb, rtx before,
5659 bitmap_head *need_prologue)
5661 edge_iterator ei;
5662 edge e;
5663 rtx insn = BB_END (bb);
5665 /* We know BB has a single successor, so there is no need to copy a
5666 simple jump at the end of BB. */
5667 if (simplejump_p (insn))
5668 insn = PREV_INSN (insn);
5670 start_sequence ();
5671 duplicate_insn_chain (BB_HEAD (bb), insn);
5672 if (dump_file)
5674 unsigned count = 0;
5675 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5676 if (active_insn_p (insn))
5677 ++count;
5678 fprintf (dump_file, "Duplicating bb %d to bb %d, %u active insns.\n",
5679 bb->index, copy_bb->index, count);
5681 insn = get_insns ();
5682 end_sequence ();
5683 emit_insn_before (insn, before);
5685 /* Redirect all the paths that need no prologue into copy_bb. */
5686 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
5687 if (!bitmap_bit_p (need_prologue, e->src->index))
5689 int freq = EDGE_FREQUENCY (e);
5690 copy_bb->count += e->count;
5691 copy_bb->frequency += EDGE_FREQUENCY (e);
5692 e->dest->count -= e->count;
5693 if (e->dest->count < 0)
5694 e->dest->count = 0;
5695 e->dest->frequency -= freq;
5696 if (e->dest->frequency < 0)
5697 e->dest->frequency = 0;
5698 redirect_edge_and_branch_force (e, copy_bb);
5699 continue;
5701 else
5702 ei_next (&ei);
5704 #endif
5706 #if defined (HAVE_return) || defined (HAVE_simple_return)
5707 /* Return true if there are any active insns between HEAD and TAIL. */
5708 static bool
5709 active_insn_between (rtx head, rtx tail)
5711 while (tail)
5713 if (active_insn_p (tail))
5714 return true;
5715 if (tail == head)
5716 return false;
5717 tail = PREV_INSN (tail);
5719 return false;
5722 /* LAST_BB is a block that exits, and empty of active instructions.
5723 Examine its predecessors for jumps that can be converted to
5724 (conditional) returns. */
5725 static vec<edge>
5726 convert_jumps_to_returns (basic_block last_bb, bool simple_p,
5727 vec<edge> unconverted ATTRIBUTE_UNUSED)
5729 int i;
5730 basic_block bb;
5731 rtx label;
5732 edge_iterator ei;
5733 edge e;
5734 vec<basic_block> src_bbs;
5736 src_bbs.create (EDGE_COUNT (last_bb->preds));
5737 FOR_EACH_EDGE (e, ei, last_bb->preds)
5738 if (e->src != ENTRY_BLOCK_PTR)
5739 src_bbs.quick_push (e->src);
5741 label = BB_HEAD (last_bb);
5743 FOR_EACH_VEC_ELT (src_bbs, i, bb)
5745 rtx jump = BB_END (bb);
5747 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5748 continue;
5750 e = find_edge (bb, last_bb);
5752 /* If we have an unconditional jump, we can replace that
5753 with a simple return instruction. */
5754 if (simplejump_p (jump))
5756 /* The use of the return register might be present in the exit
5757 fallthru block. Either:
5758 - removing the use is safe, and we should remove the use in
5759 the exit fallthru block, or
5760 - removing the use is not safe, and we should add it here.
5761 For now, we conservatively choose the latter. Either of the
5762 2 helps in crossjumping. */
5763 emit_use_return_register_into_block (bb);
5765 emit_return_into_block (simple_p, bb);
5766 delete_insn (jump);
5769 /* If we have a conditional jump branching to the last
5770 block, we can try to replace that with a conditional
5771 return instruction. */
5772 else if (condjump_p (jump))
5774 rtx dest;
5776 if (simple_p)
5777 dest = simple_return_rtx;
5778 else
5779 dest = ret_rtx;
5780 if (!redirect_jump (jump, dest, 0))
5782 #ifdef HAVE_simple_return
5783 if (simple_p)
5785 if (dump_file)
5786 fprintf (dump_file,
5787 "Failed to redirect bb %d branch.\n", bb->index);
5788 unconverted.safe_push (e);
5790 #endif
5791 continue;
5794 /* See comment in simplejump_p case above. */
5795 emit_use_return_register_into_block (bb);
5797 /* If this block has only one successor, it both jumps
5798 and falls through to the fallthru block, so we can't
5799 delete the edge. */
5800 if (single_succ_p (bb))
5801 continue;
5803 else
5805 #ifdef HAVE_simple_return
5806 if (simple_p)
5808 if (dump_file)
5809 fprintf (dump_file,
5810 "Failed to redirect bb %d branch.\n", bb->index);
5811 unconverted.safe_push (e);
5813 #endif
5814 continue;
5817 /* Fix up the CFG for the successful change we just made. */
5818 redirect_edge_succ (e, EXIT_BLOCK_PTR);
5819 e->flags &= ~EDGE_CROSSING;
5821 src_bbs.release ();
5822 return unconverted;
5825 /* Emit a return insn for the exit fallthru block. */
5826 static basic_block
5827 emit_return_for_exit (edge exit_fallthru_edge, bool simple_p)
5829 basic_block last_bb = exit_fallthru_edge->src;
5831 if (JUMP_P (BB_END (last_bb)))
5833 last_bb = split_edge (exit_fallthru_edge);
5834 exit_fallthru_edge = single_succ_edge (last_bb);
5836 emit_barrier_after (BB_END (last_bb));
5837 emit_return_into_block (simple_p, last_bb);
5838 exit_fallthru_edge->flags &= ~EDGE_FALLTHRU;
5839 return last_bb;
5841 #endif
5844 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5845 this into place with notes indicating where the prologue ends and where
5846 the epilogue begins. Update the basic block information when possible.
5848 Notes on epilogue placement:
5849 There are several kinds of edges to the exit block:
5850 * a single fallthru edge from LAST_BB
5851 * possibly, edges from blocks containing sibcalls
5852 * possibly, fake edges from infinite loops
5854 The epilogue is always emitted on the fallthru edge from the last basic
5855 block in the function, LAST_BB, into the exit block.
5857 If LAST_BB is empty except for a label, it is the target of every
5858 other basic block in the function that ends in a return. If a
5859 target has a return or simple_return pattern (possibly with
5860 conditional variants), these basic blocks can be changed so that a
5861 return insn is emitted into them, and their target is adjusted to
5862 the real exit block.
5864 Notes on shrink wrapping: We implement a fairly conservative
5865 version of shrink-wrapping rather than the textbook one. We only
5866 generate a single prologue and a single epilogue. This is
5867 sufficient to catch a number of interesting cases involving early
5868 exits.
5870 First, we identify the blocks that require the prologue to occur before
5871 them. These are the ones that modify a call-saved register, or reference
5872 any of the stack or frame pointer registers. To simplify things, we then
5873 mark everything reachable from these blocks as also requiring a prologue.
5874 This takes care of loops automatically, and avoids the need to examine
5875 whether MEMs reference the frame, since it is sufficient to check for
5876 occurrences of the stack or frame pointer.
5878 We then compute the set of blocks for which the need for a prologue
5879 is anticipatable (borrowing terminology from the shrink-wrapping
5880 description in Muchnick's book). These are the blocks which either
5881 require a prologue themselves, or those that have only successors
5882 where the prologue is anticipatable. The prologue needs to be
5883 inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
5884 is not. For the moment, we ensure that only one such edge exists.
5886 The epilogue is placed as described above, but we make a
5887 distinction between inserting return and simple_return patterns
5888 when modifying other blocks that end in a return. Blocks that end
5889 in a sibcall omit the sibcall_epilogue if the block is not in
5890 ANTIC. */
5892 static void
5893 thread_prologue_and_epilogue_insns (void)
5895 bool inserted;
5896 #ifdef HAVE_simple_return
5897 vec<edge> unconverted_simple_returns = vNULL;
5898 bool nonempty_prologue;
5899 bitmap_head bb_flags;
5900 unsigned max_grow_size;
5901 #endif
5902 rtx returnjump;
5903 rtx seq ATTRIBUTE_UNUSED, epilogue_end ATTRIBUTE_UNUSED;
5904 rtx prologue_seq ATTRIBUTE_UNUSED, split_prologue_seq ATTRIBUTE_UNUSED;
5905 edge e, entry_edge, orig_entry_edge, exit_fallthru_edge;
5906 edge_iterator ei;
5908 df_analyze ();
5910 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
5912 inserted = false;
5913 seq = NULL_RTX;
5914 epilogue_end = NULL_RTX;
5915 returnjump = NULL_RTX;
5917 /* Can't deal with multiple successors of the entry block at the
5918 moment. Function should always have at least one entry
5919 point. */
5920 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR));
5921 entry_edge = single_succ_edge (ENTRY_BLOCK_PTR);
5922 orig_entry_edge = entry_edge;
5924 split_prologue_seq = NULL_RTX;
5925 if (flag_split_stack
5926 && (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl))
5927 == NULL))
5929 #ifndef HAVE_split_stack_prologue
5930 gcc_unreachable ();
5931 #else
5932 gcc_assert (HAVE_split_stack_prologue);
5934 start_sequence ();
5935 emit_insn (gen_split_stack_prologue ());
5936 split_prologue_seq = get_insns ();
5937 end_sequence ();
5939 record_insns (split_prologue_seq, NULL, &prologue_insn_hash);
5940 set_insn_locations (split_prologue_seq, prologue_location);
5941 #endif
5944 prologue_seq = NULL_RTX;
5945 #ifdef HAVE_prologue
5946 if (HAVE_prologue)
5948 start_sequence ();
5949 seq = gen_prologue ();
5950 emit_insn (seq);
5952 /* Insert an explicit USE for the frame pointer
5953 if the profiling is on and the frame pointer is required. */
5954 if (crtl->profile && frame_pointer_needed)
5955 emit_use (hard_frame_pointer_rtx);
5957 /* Retain a map of the prologue insns. */
5958 record_insns (seq, NULL, &prologue_insn_hash);
5959 emit_note (NOTE_INSN_PROLOGUE_END);
5961 /* Ensure that instructions are not moved into the prologue when
5962 profiling is on. The call to the profiling routine can be
5963 emitted within the live range of a call-clobbered register. */
5964 if (!targetm.profile_before_prologue () && crtl->profile)
5965 emit_insn (gen_blockage ());
5967 prologue_seq = get_insns ();
5968 end_sequence ();
5969 set_insn_locations (prologue_seq, prologue_location);
5971 #endif
5973 #ifdef HAVE_simple_return
5974 bitmap_initialize (&bb_flags, &bitmap_default_obstack);
5976 /* Try to perform a kind of shrink-wrapping, making sure the
5977 prologue/epilogue is emitted only around those parts of the
5978 function that require it. */
5980 nonempty_prologue = false;
5981 for (seq = prologue_seq; seq; seq = NEXT_INSN (seq))
5982 if (!NOTE_P (seq) || NOTE_KIND (seq) != NOTE_INSN_PROLOGUE_END)
5984 nonempty_prologue = true;
5985 break;
5988 if (flag_shrink_wrap && HAVE_simple_return
5989 && (targetm.profile_before_prologue () || !crtl->profile)
5990 && nonempty_prologue && !crtl->calls_eh_return)
5992 HARD_REG_SET prologue_clobbered, prologue_used, live_on_edge;
5993 struct hard_reg_set_container set_up_by_prologue;
5994 rtx p_insn;
5995 vec<basic_block> vec;
5996 basic_block bb;
5997 bitmap_head bb_antic_flags;
5998 bitmap_head bb_on_list;
5999 bitmap_head bb_tail;
6001 if (dump_file)
6002 fprintf (dump_file, "Attempting shrink-wrapping optimization.\n");
6004 /* Compute the registers set and used in the prologue. */
6005 CLEAR_HARD_REG_SET (prologue_clobbered);
6006 CLEAR_HARD_REG_SET (prologue_used);
6007 for (p_insn = prologue_seq; p_insn; p_insn = NEXT_INSN (p_insn))
6009 HARD_REG_SET this_used;
6010 if (!NONDEBUG_INSN_P (p_insn))
6011 continue;
6013 CLEAR_HARD_REG_SET (this_used);
6014 note_uses (&PATTERN (p_insn), record_hard_reg_uses,
6015 &this_used);
6016 AND_COMPL_HARD_REG_SET (this_used, prologue_clobbered);
6017 IOR_HARD_REG_SET (prologue_used, this_used);
6018 note_stores (PATTERN (p_insn), record_hard_reg_sets,
6019 &prologue_clobbered);
6022 prepare_shrink_wrap (entry_edge->dest);
6024 bitmap_initialize (&bb_antic_flags, &bitmap_default_obstack);
6025 bitmap_initialize (&bb_on_list, &bitmap_default_obstack);
6026 bitmap_initialize (&bb_tail, &bitmap_default_obstack);
6028 /* Find the set of basic blocks that require a stack frame,
6029 and blocks that are too big to be duplicated. */
6031 vec.create (n_basic_blocks);
6033 CLEAR_HARD_REG_SET (set_up_by_prologue.set);
6034 add_to_hard_reg_set (&set_up_by_prologue.set, Pmode,
6035 STACK_POINTER_REGNUM);
6036 add_to_hard_reg_set (&set_up_by_prologue.set, Pmode, ARG_POINTER_REGNUM);
6037 if (frame_pointer_needed)
6038 add_to_hard_reg_set (&set_up_by_prologue.set, Pmode,
6039 HARD_FRAME_POINTER_REGNUM);
6040 if (pic_offset_table_rtx)
6041 add_to_hard_reg_set (&set_up_by_prologue.set, Pmode,
6042 PIC_OFFSET_TABLE_REGNUM);
6043 if (crtl->drap_reg)
6044 add_to_hard_reg_set (&set_up_by_prologue.set,
6045 GET_MODE (crtl->drap_reg),
6046 REGNO (crtl->drap_reg));
6047 if (targetm.set_up_by_prologue)
6048 targetm.set_up_by_prologue (&set_up_by_prologue);
6050 /* We don't use a different max size depending on
6051 optimize_bb_for_speed_p because increasing shrink-wrapping
6052 opportunities by duplicating tail blocks can actually result
6053 in an overall decrease in code size. */
6054 max_grow_size = get_uncond_jump_length ();
6055 max_grow_size *= PARAM_VALUE (PARAM_MAX_GROW_COPY_BB_INSNS);
6057 FOR_EACH_BB (bb)
6059 rtx insn;
6060 unsigned size = 0;
6062 FOR_BB_INSNS (bb, insn)
6063 if (NONDEBUG_INSN_P (insn))
6065 if (requires_stack_frame_p (insn, prologue_used,
6066 set_up_by_prologue.set))
6068 if (bb == entry_edge->dest)
6069 goto fail_shrinkwrap;
6070 bitmap_set_bit (&bb_flags, bb->index);
6071 vec.quick_push (bb);
6072 break;
6074 else if (size <= max_grow_size)
6076 size += get_attr_min_length (insn);
6077 if (size > max_grow_size)
6078 bitmap_set_bit (&bb_on_list, bb->index);
6083 /* Blocks that really need a prologue, or are too big for tails. */
6084 bitmap_ior_into (&bb_on_list, &bb_flags);
6086 /* For every basic block that needs a prologue, mark all blocks
6087 reachable from it, so as to ensure they are also seen as
6088 requiring a prologue. */
6089 while (!vec.is_empty ())
6091 basic_block tmp_bb = vec.pop ();
6093 FOR_EACH_EDGE (e, ei, tmp_bb->succs)
6094 if (e->dest != EXIT_BLOCK_PTR
6095 && bitmap_set_bit (&bb_flags, e->dest->index))
6096 vec.quick_push (e->dest);
6099 /* Find the set of basic blocks that need no prologue, have a
6100 single successor, can be duplicated, meet a max size
6101 requirement, and go to the exit via like blocks. */
6102 vec.quick_push (EXIT_BLOCK_PTR);
6103 while (!vec.is_empty ())
6105 basic_block tmp_bb = vec.pop ();
6107 FOR_EACH_EDGE (e, ei, tmp_bb->preds)
6108 if (single_succ_p (e->src)
6109 && !bitmap_bit_p (&bb_on_list, e->src->index)
6110 && can_duplicate_block_p (e->src))
6112 edge pe;
6113 edge_iterator pei;
6115 /* If there is predecessor of e->src which doesn't
6116 need prologue and the edge is complex,
6117 we might not be able to redirect the branch
6118 to a copy of e->src. */
6119 FOR_EACH_EDGE (pe, pei, e->src->preds)
6120 if ((pe->flags & EDGE_COMPLEX) != 0
6121 && !bitmap_bit_p (&bb_flags, pe->src->index))
6122 break;
6123 if (pe == NULL && bitmap_set_bit (&bb_tail, e->src->index))
6124 vec.quick_push (e->src);
6128 /* Now walk backwards from every block that is marked as needing
6129 a prologue to compute the bb_antic_flags bitmap. Exclude
6130 tail blocks; They can be duplicated to be used on paths not
6131 needing a prologue. */
6132 bitmap_clear (&bb_on_list);
6133 bitmap_and_compl (&bb_antic_flags, &bb_flags, &bb_tail);
6134 FOR_EACH_BB (bb)
6136 if (!bitmap_bit_p (&bb_antic_flags, bb->index))
6137 continue;
6138 FOR_EACH_EDGE (e, ei, bb->preds)
6139 if (!bitmap_bit_p (&bb_antic_flags, e->src->index)
6140 && bitmap_set_bit (&bb_on_list, e->src->index))
6141 vec.quick_push (e->src);
6143 while (!vec.is_empty ())
6145 basic_block tmp_bb = vec.pop ();
6146 bool all_set = true;
6148 bitmap_clear_bit (&bb_on_list, tmp_bb->index);
6149 FOR_EACH_EDGE (e, ei, tmp_bb->succs)
6150 if (!bitmap_bit_p (&bb_antic_flags, e->dest->index))
6152 all_set = false;
6153 break;
6156 if (all_set)
6158 bitmap_set_bit (&bb_antic_flags, tmp_bb->index);
6159 FOR_EACH_EDGE (e, ei, tmp_bb->preds)
6160 if (!bitmap_bit_p (&bb_antic_flags, e->src->index)
6161 && bitmap_set_bit (&bb_on_list, e->src->index))
6162 vec.quick_push (e->src);
6165 /* Find exactly one edge that leads to a block in ANTIC from
6166 a block that isn't. */
6167 if (!bitmap_bit_p (&bb_antic_flags, entry_edge->dest->index))
6168 FOR_EACH_BB (bb)
6170 if (!bitmap_bit_p (&bb_antic_flags, bb->index))
6171 continue;
6172 FOR_EACH_EDGE (e, ei, bb->preds)
6173 if (!bitmap_bit_p (&bb_antic_flags, e->src->index))
6175 if (entry_edge != orig_entry_edge)
6177 entry_edge = orig_entry_edge;
6178 if (dump_file)
6179 fprintf (dump_file, "More than one candidate edge.\n");
6180 goto fail_shrinkwrap;
6182 if (dump_file)
6183 fprintf (dump_file, "Found candidate edge for "
6184 "shrink-wrapping, %d->%d.\n", e->src->index,
6185 e->dest->index);
6186 entry_edge = e;
6190 if (entry_edge != orig_entry_edge)
6192 /* Test whether the prologue is known to clobber any register
6193 (other than FP or SP) which are live on the edge. */
6194 CLEAR_HARD_REG_BIT (prologue_clobbered, STACK_POINTER_REGNUM);
6195 if (frame_pointer_needed)
6196 CLEAR_HARD_REG_BIT (prologue_clobbered, HARD_FRAME_POINTER_REGNUM);
6197 REG_SET_TO_HARD_REG_SET (live_on_edge,
6198 df_get_live_in (entry_edge->dest));
6199 if (hard_reg_set_intersect_p (live_on_edge, prologue_clobbered))
6201 entry_edge = orig_entry_edge;
6202 if (dump_file)
6203 fprintf (dump_file,
6204 "Shrink-wrapping aborted due to clobber.\n");
6207 if (entry_edge != orig_entry_edge)
6209 crtl->shrink_wrapped = true;
6210 if (dump_file)
6211 fprintf (dump_file, "Performing shrink-wrapping.\n");
6213 /* Find tail blocks reachable from both blocks needing a
6214 prologue and blocks not needing a prologue. */
6215 if (!bitmap_empty_p (&bb_tail))
6216 FOR_EACH_BB (bb)
6218 bool some_pro, some_no_pro;
6219 if (!bitmap_bit_p (&bb_tail, bb->index))
6220 continue;
6221 some_pro = some_no_pro = false;
6222 FOR_EACH_EDGE (e, ei, bb->preds)
6224 if (bitmap_bit_p (&bb_flags, e->src->index))
6225 some_pro = true;
6226 else
6227 some_no_pro = true;
6229 if (some_pro && some_no_pro)
6230 vec.quick_push (bb);
6231 else
6232 bitmap_clear_bit (&bb_tail, bb->index);
6234 /* Find the head of each tail. */
6235 while (!vec.is_empty ())
6237 basic_block tbb = vec.pop ();
6239 if (!bitmap_bit_p (&bb_tail, tbb->index))
6240 continue;
6242 while (single_succ_p (tbb))
6244 tbb = single_succ (tbb);
6245 bitmap_clear_bit (&bb_tail, tbb->index);
6248 /* Now duplicate the tails. */
6249 if (!bitmap_empty_p (&bb_tail))
6250 FOR_EACH_BB_REVERSE (bb)
6252 basic_block copy_bb, tbb;
6253 rtx insert_point;
6254 int eflags;
6256 if (!bitmap_clear_bit (&bb_tail, bb->index))
6257 continue;
6259 /* Create a copy of BB, instructions and all, for
6260 use on paths that don't need a prologue.
6261 Ideal placement of the copy is on a fall-thru edge
6262 or after a block that would jump to the copy. */
6263 FOR_EACH_EDGE (e, ei, bb->preds)
6264 if (!bitmap_bit_p (&bb_flags, e->src->index)
6265 && single_succ_p (e->src))
6266 break;
6267 if (e)
6269 copy_bb = create_basic_block (NEXT_INSN (BB_END (e->src)),
6270 NULL_RTX, e->src);
6271 BB_COPY_PARTITION (copy_bb, e->src);
6273 else
6275 /* Otherwise put the copy at the end of the function. */
6276 copy_bb = create_basic_block (NULL_RTX, NULL_RTX,
6277 EXIT_BLOCK_PTR->prev_bb);
6278 BB_COPY_PARTITION (copy_bb, bb);
6281 insert_point = emit_note_after (NOTE_INSN_DELETED,
6282 BB_END (copy_bb));
6283 emit_barrier_after (BB_END (copy_bb));
6285 tbb = bb;
6286 while (1)
6288 dup_block_and_redirect (tbb, copy_bb, insert_point,
6289 &bb_flags);
6290 tbb = single_succ (tbb);
6291 if (tbb == EXIT_BLOCK_PTR)
6292 break;
6293 e = split_block (copy_bb, PREV_INSN (insert_point));
6294 copy_bb = e->dest;
6297 /* Quiet verify_flow_info by (ab)using EDGE_FAKE.
6298 We have yet to add a simple_return to the tails,
6299 as we'd like to first convert_jumps_to_returns in
6300 case the block is no longer used after that. */
6301 eflags = EDGE_FAKE;
6302 if (CALL_P (PREV_INSN (insert_point))
6303 && SIBLING_CALL_P (PREV_INSN (insert_point)))
6304 eflags = EDGE_SIBCALL | EDGE_ABNORMAL;
6305 make_single_succ_edge (copy_bb, EXIT_BLOCK_PTR, eflags);
6307 /* verify_flow_info doesn't like a note after a
6308 sibling call. */
6309 delete_insn (insert_point);
6310 if (bitmap_empty_p (&bb_tail))
6311 break;
6315 fail_shrinkwrap:
6316 bitmap_clear (&bb_tail);
6317 bitmap_clear (&bb_antic_flags);
6318 bitmap_clear (&bb_on_list);
6319 vec.release ();
6321 #endif
6323 if (split_prologue_seq != NULL_RTX)
6325 insert_insn_on_edge (split_prologue_seq, orig_entry_edge);
6326 inserted = true;
6328 if (prologue_seq != NULL_RTX)
6330 insert_insn_on_edge (prologue_seq, entry_edge);
6331 inserted = true;
6334 /* If the exit block has no non-fake predecessors, we don't need
6335 an epilogue. */
6336 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6337 if ((e->flags & EDGE_FAKE) == 0)
6338 break;
6339 if (e == NULL)
6340 goto epilogue_done;
6342 rtl_profile_for_bb (EXIT_BLOCK_PTR);
6344 exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR->preds);
6346 /* If we're allowed to generate a simple return instruction, then by
6347 definition we don't need a full epilogue. If the last basic
6348 block before the exit block does not contain active instructions,
6349 examine its predecessors and try to emit (conditional) return
6350 instructions. */
6351 #ifdef HAVE_simple_return
6352 if (entry_edge != orig_entry_edge)
6354 if (optimize)
6356 unsigned i, last;
6358 /* convert_jumps_to_returns may add to EXIT_BLOCK_PTR->preds
6359 (but won't remove). Stop at end of current preds. */
6360 last = EDGE_COUNT (EXIT_BLOCK_PTR->preds);
6361 for (i = 0; i < last; i++)
6363 e = EDGE_I (EXIT_BLOCK_PTR->preds, i);
6364 if (LABEL_P (BB_HEAD (e->src))
6365 && !bitmap_bit_p (&bb_flags, e->src->index)
6366 && !active_insn_between (BB_HEAD (e->src), BB_END (e->src)))
6367 unconverted_simple_returns
6368 = convert_jumps_to_returns (e->src, true,
6369 unconverted_simple_returns);
6373 if (exit_fallthru_edge != NULL
6374 && EDGE_COUNT (exit_fallthru_edge->src->preds) != 0
6375 && !bitmap_bit_p (&bb_flags, exit_fallthru_edge->src->index))
6377 basic_block last_bb;
6379 last_bb = emit_return_for_exit (exit_fallthru_edge, true);
6380 returnjump = BB_END (last_bb);
6381 exit_fallthru_edge = NULL;
6384 #endif
6385 #ifdef HAVE_return
6386 if (HAVE_return)
6388 if (exit_fallthru_edge == NULL)
6389 goto epilogue_done;
6391 if (optimize)
6393 basic_block last_bb = exit_fallthru_edge->src;
6395 if (LABEL_P (BB_HEAD (last_bb))
6396 && !active_insn_between (BB_HEAD (last_bb), BB_END (last_bb)))
6397 convert_jumps_to_returns (last_bb, false, vNULL);
6399 if (EDGE_COUNT (last_bb->preds) != 0
6400 && single_succ_p (last_bb))
6402 last_bb = emit_return_for_exit (exit_fallthru_edge, false);
6403 epilogue_end = returnjump = BB_END (last_bb);
6404 #ifdef HAVE_simple_return
6405 /* Emitting the return may add a basic block.
6406 Fix bb_flags for the added block. */
6407 if (last_bb != exit_fallthru_edge->src)
6408 bitmap_set_bit (&bb_flags, last_bb->index);
6409 #endif
6410 goto epilogue_done;
6414 #endif
6416 /* A small fib -- epilogue is not yet completed, but we wish to re-use
6417 this marker for the splits of EH_RETURN patterns, and nothing else
6418 uses the flag in the meantime. */
6419 epilogue_completed = 1;
6421 #ifdef HAVE_eh_return
6422 /* Find non-fallthru edges that end with EH_RETURN instructions. On
6423 some targets, these get split to a special version of the epilogue
6424 code. In order to be able to properly annotate these with unwind
6425 info, try to split them now. If we get a valid split, drop an
6426 EPILOGUE_BEG note and mark the insns as epilogue insns. */
6427 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6429 rtx prev, last, trial;
6431 if (e->flags & EDGE_FALLTHRU)
6432 continue;
6433 last = BB_END (e->src);
6434 if (!eh_returnjump_p (last))
6435 continue;
6437 prev = PREV_INSN (last);
6438 trial = try_split (PATTERN (last), last, 1);
6439 if (trial == last)
6440 continue;
6442 record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash);
6443 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
6445 #endif
6447 /* If nothing falls through into the exit block, we don't need an
6448 epilogue. */
6450 if (exit_fallthru_edge == NULL)
6451 goto epilogue_done;
6453 #ifdef HAVE_epilogue
6454 if (HAVE_epilogue)
6456 start_sequence ();
6457 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
6458 seq = gen_epilogue ();
6459 if (seq)
6460 emit_jump_insn (seq);
6462 /* Retain a map of the epilogue insns. */
6463 record_insns (seq, NULL, &epilogue_insn_hash);
6464 set_insn_locations (seq, epilogue_location);
6466 seq = get_insns ();
6467 returnjump = get_last_insn ();
6468 end_sequence ();
6470 insert_insn_on_edge (seq, exit_fallthru_edge);
6471 inserted = true;
6473 if (JUMP_P (returnjump))
6474 set_return_jump_label (returnjump);
6476 else
6477 #endif
6479 basic_block cur_bb;
6481 if (! next_active_insn (BB_END (exit_fallthru_edge->src)))
6482 goto epilogue_done;
6483 /* We have a fall-through edge to the exit block, the source is not
6484 at the end of the function, and there will be an assembler epilogue
6485 at the end of the function.
6486 We can't use force_nonfallthru here, because that would try to
6487 use return. Inserting a jump 'by hand' is extremely messy, so
6488 we take advantage of cfg_layout_finalize using
6489 fixup_fallthru_exit_predecessor. */
6490 cfg_layout_initialize (0);
6491 FOR_EACH_BB (cur_bb)
6492 if (cur_bb->index >= NUM_FIXED_BLOCKS
6493 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
6494 cur_bb->aux = cur_bb->next_bb;
6495 cfg_layout_finalize ();
6498 epilogue_done:
6500 default_rtl_profile ();
6502 if (inserted)
6504 sbitmap blocks;
6506 commit_edge_insertions ();
6508 /* Look for basic blocks within the prologue insns. */
6509 blocks = sbitmap_alloc (last_basic_block);
6510 bitmap_clear (blocks);
6511 bitmap_set_bit (blocks, entry_edge->dest->index);
6512 bitmap_set_bit (blocks, orig_entry_edge->dest->index);
6513 find_many_sub_basic_blocks (blocks);
6514 sbitmap_free (blocks);
6516 /* The epilogue insns we inserted may cause the exit edge to no longer
6517 be fallthru. */
6518 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6520 if (((e->flags & EDGE_FALLTHRU) != 0)
6521 && returnjump_p (BB_END (e->src)))
6522 e->flags &= ~EDGE_FALLTHRU;
6526 #ifdef HAVE_simple_return
6527 /* If there were branches to an empty LAST_BB which we tried to
6528 convert to conditional simple_returns, but couldn't for some
6529 reason, create a block to hold a simple_return insn and redirect
6530 those remaining edges. */
6531 if (!unconverted_simple_returns.is_empty ())
6533 basic_block simple_return_block_hot = NULL;
6534 basic_block simple_return_block_cold = NULL;
6535 edge pending_edge_hot = NULL;
6536 edge pending_edge_cold = NULL;
6537 basic_block exit_pred = EXIT_BLOCK_PTR->prev_bb;
6538 int i;
6540 gcc_assert (entry_edge != orig_entry_edge);
6542 /* See if we can reuse the last insn that was emitted for the
6543 epilogue. */
6544 if (returnjump != NULL_RTX
6545 && JUMP_LABEL (returnjump) == simple_return_rtx)
6547 e = split_block (BLOCK_FOR_INSN (returnjump), PREV_INSN (returnjump));
6548 if (BB_PARTITION (e->src) == BB_HOT_PARTITION)
6549 simple_return_block_hot = e->dest;
6550 else
6551 simple_return_block_cold = e->dest;
6554 /* Also check returns we might need to add to tail blocks. */
6555 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6556 if (EDGE_COUNT (e->src->preds) != 0
6557 && (e->flags & EDGE_FAKE) != 0
6558 && !bitmap_bit_p (&bb_flags, e->src->index))
6560 if (BB_PARTITION (e->src) == BB_HOT_PARTITION)
6561 pending_edge_hot = e;
6562 else
6563 pending_edge_cold = e;
6566 FOR_EACH_VEC_ELT (unconverted_simple_returns, i, e)
6568 basic_block *pdest_bb;
6569 edge pending;
6571 if (BB_PARTITION (e->src) == BB_HOT_PARTITION)
6573 pdest_bb = &simple_return_block_hot;
6574 pending = pending_edge_hot;
6576 else
6578 pdest_bb = &simple_return_block_cold;
6579 pending = pending_edge_cold;
6582 if (*pdest_bb == NULL && pending != NULL)
6584 emit_return_into_block (true, pending->src);
6585 pending->flags &= ~(EDGE_FALLTHRU | EDGE_FAKE);
6586 *pdest_bb = pending->src;
6588 else if (*pdest_bb == NULL)
6590 basic_block bb;
6591 rtx start;
6593 bb = create_basic_block (NULL, NULL, exit_pred);
6594 BB_COPY_PARTITION (bb, e->src);
6595 start = emit_jump_insn_after (gen_simple_return (),
6596 BB_END (bb));
6597 JUMP_LABEL (start) = simple_return_rtx;
6598 emit_barrier_after (start);
6600 *pdest_bb = bb;
6601 make_edge (bb, EXIT_BLOCK_PTR, 0);
6603 redirect_edge_and_branch_force (e, *pdest_bb);
6605 unconverted_simple_returns.release ();
6608 if (entry_edge != orig_entry_edge)
6610 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6611 if (EDGE_COUNT (e->src->preds) != 0
6612 && (e->flags & EDGE_FAKE) != 0
6613 && !bitmap_bit_p (&bb_flags, e->src->index))
6615 emit_return_into_block (true, e->src);
6616 e->flags &= ~(EDGE_FALLTHRU | EDGE_FAKE);
6619 #endif
6621 #ifdef HAVE_sibcall_epilogue
6622 /* Emit sibling epilogues before any sibling call sites. */
6623 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
6625 basic_block bb = e->src;
6626 rtx insn = BB_END (bb);
6627 rtx ep_seq;
6629 if (!CALL_P (insn)
6630 || ! SIBLING_CALL_P (insn)
6631 #ifdef HAVE_simple_return
6632 || (entry_edge != orig_entry_edge
6633 && !bitmap_bit_p (&bb_flags, bb->index))
6634 #endif
6637 ei_next (&ei);
6638 continue;
6641 ep_seq = gen_sibcall_epilogue ();
6642 if (ep_seq)
6644 start_sequence ();
6645 emit_note (NOTE_INSN_EPILOGUE_BEG);
6646 emit_insn (ep_seq);
6647 seq = get_insns ();
6648 end_sequence ();
6650 /* Retain a map of the epilogue insns. Used in life analysis to
6651 avoid getting rid of sibcall epilogue insns. Do this before we
6652 actually emit the sequence. */
6653 record_insns (seq, NULL, &epilogue_insn_hash);
6654 set_insn_locations (seq, epilogue_location);
6656 emit_insn_before (seq, insn);
6658 ei_next (&ei);
6660 #endif
6662 #ifdef HAVE_epilogue
6663 if (epilogue_end)
6665 rtx insn, next;
6667 /* Similarly, move any line notes that appear after the epilogue.
6668 There is no need, however, to be quite so anal about the existence
6669 of such a note. Also possibly move
6670 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
6671 info generation. */
6672 for (insn = epilogue_end; insn; insn = next)
6674 next = NEXT_INSN (insn);
6675 if (NOTE_P (insn)
6676 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
6677 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
6680 #endif
6682 #ifdef HAVE_simple_return
6683 bitmap_clear (&bb_flags);
6684 #endif
6686 /* Threading the prologue and epilogue changes the artificial refs
6687 in the entry and exit blocks. */
6688 epilogue_completed = 1;
6689 df_update_entry_exit_and_calls ();
6692 /* Reposition the prologue-end and epilogue-begin notes after
6693 instruction scheduling. */
6695 void
6696 reposition_prologue_and_epilogue_notes (void)
6698 #if defined (HAVE_prologue) || defined (HAVE_epilogue) \
6699 || defined (HAVE_sibcall_epilogue)
6700 /* Since the hash table is created on demand, the fact that it is
6701 non-null is a signal that it is non-empty. */
6702 if (prologue_insn_hash != NULL)
6704 size_t len = htab_elements (prologue_insn_hash);
6705 rtx insn, last = NULL, note = NULL;
6707 /* Scan from the beginning until we reach the last prologue insn. */
6708 /* ??? While we do have the CFG intact, there are two problems:
6709 (1) The prologue can contain loops (typically probing the stack),
6710 which means that the end of the prologue isn't in the first bb.
6711 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
6712 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6714 if (NOTE_P (insn))
6716 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
6717 note = insn;
6719 else if (contains (insn, prologue_insn_hash))
6721 last = insn;
6722 if (--len == 0)
6723 break;
6727 if (last)
6729 if (note == NULL)
6731 /* Scan forward looking for the PROLOGUE_END note. It should
6732 be right at the beginning of the block, possibly with other
6733 insn notes that got moved there. */
6734 for (note = NEXT_INSN (last); ; note = NEXT_INSN (note))
6736 if (NOTE_P (note)
6737 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
6738 break;
6742 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
6743 if (LABEL_P (last))
6744 last = NEXT_INSN (last);
6745 reorder_insns (note, note, last);
6749 if (epilogue_insn_hash != NULL)
6751 edge_iterator ei;
6752 edge e;
6754 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6756 rtx insn, first = NULL, note = NULL;
6757 basic_block bb = e->src;
6759 /* Scan from the beginning until we reach the first epilogue insn. */
6760 FOR_BB_INSNS (bb, insn)
6762 if (NOTE_P (insn))
6764 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
6766 note = insn;
6767 if (first != NULL)
6768 break;
6771 else if (first == NULL && contains (insn, epilogue_insn_hash))
6773 first = insn;
6774 if (note != NULL)
6775 break;
6779 if (note)
6781 /* If the function has a single basic block, and no real
6782 epilogue insns (e.g. sibcall with no cleanup), the
6783 epilogue note can get scheduled before the prologue
6784 note. If we have frame related prologue insns, having
6785 them scanned during the epilogue will result in a crash.
6786 In this case re-order the epilogue note to just before
6787 the last insn in the block. */
6788 if (first == NULL)
6789 first = BB_END (bb);
6791 if (PREV_INSN (first) != note)
6792 reorder_insns (note, note, PREV_INSN (first));
6796 #endif /* HAVE_prologue or HAVE_epilogue */
6799 /* Returns the name of function declared by FNDECL. */
6800 const char *
6801 fndecl_name (tree fndecl)
6803 if (fndecl == NULL)
6804 return "(nofn)";
6805 return lang_hooks.decl_printable_name (fndecl, 2);
6808 /* Returns the name of function FN. */
6809 const char *
6810 function_name (struct function *fn)
6812 tree fndecl = (fn == NULL) ? NULL : fn->decl;
6813 return fndecl_name (fndecl);
6816 /* Returns the name of the current function. */
6817 const char *
6818 current_function_name (void)
6820 return function_name (cfun);
6824 static unsigned int
6825 rest_of_handle_check_leaf_regs (void)
6827 #ifdef LEAF_REGISTERS
6828 crtl->uses_only_leaf_regs
6829 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
6830 #endif
6831 return 0;
6834 /* Insert a TYPE into the used types hash table of CFUN. */
6836 static void
6837 used_types_insert_helper (tree type, struct function *func)
6839 if (type != NULL && func != NULL)
6841 void **slot;
6843 if (func->used_types_hash == NULL)
6844 func->used_types_hash = htab_create_ggc (37, htab_hash_pointer,
6845 htab_eq_pointer, NULL);
6846 slot = htab_find_slot (func->used_types_hash, type, INSERT);
6847 if (*slot == NULL)
6848 *slot = type;
6852 /* Given a type, insert it into the used hash table in cfun. */
6853 void
6854 used_types_insert (tree t)
6856 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
6857 if (TYPE_NAME (t))
6858 break;
6859 else
6860 t = TREE_TYPE (t);
6861 if (TREE_CODE (t) == ERROR_MARK)
6862 return;
6863 if (TYPE_NAME (t) == NULL_TREE
6864 || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t)))
6865 t = TYPE_MAIN_VARIANT (t);
6866 if (debug_info_level > DINFO_LEVEL_NONE)
6868 if (cfun)
6869 used_types_insert_helper (t, cfun);
6870 else
6872 /* So this might be a type referenced by a global variable.
6873 Record that type so that we can later decide to emit its
6874 debug information. */
6875 vec_safe_push (types_used_by_cur_var_decl, t);
6880 /* Helper to Hash a struct types_used_by_vars_entry. */
6882 static hashval_t
6883 hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry)
6885 gcc_assert (entry && entry->var_decl && entry->type);
6887 return iterative_hash_object (entry->type,
6888 iterative_hash_object (entry->var_decl, 0));
6891 /* Hash function of the types_used_by_vars_entry hash table. */
6893 hashval_t
6894 types_used_by_vars_do_hash (const void *x)
6896 const struct types_used_by_vars_entry *entry =
6897 (const struct types_used_by_vars_entry *) x;
6899 return hash_types_used_by_vars_entry (entry);
6902 /*Equality function of the types_used_by_vars_entry hash table. */
6905 types_used_by_vars_eq (const void *x1, const void *x2)
6907 const struct types_used_by_vars_entry *e1 =
6908 (const struct types_used_by_vars_entry *) x1;
6909 const struct types_used_by_vars_entry *e2 =
6910 (const struct types_used_by_vars_entry *)x2;
6912 return (e1->var_decl == e2->var_decl && e1->type == e2->type);
6915 /* Inserts an entry into the types_used_by_vars_hash hash table. */
6917 void
6918 types_used_by_var_decl_insert (tree type, tree var_decl)
6920 if (type != NULL && var_decl != NULL)
6922 void **slot;
6923 struct types_used_by_vars_entry e;
6924 e.var_decl = var_decl;
6925 e.type = type;
6926 if (types_used_by_vars_hash == NULL)
6927 types_used_by_vars_hash =
6928 htab_create_ggc (37, types_used_by_vars_do_hash,
6929 types_used_by_vars_eq, NULL);
6930 slot = htab_find_slot_with_hash (types_used_by_vars_hash, &e,
6931 hash_types_used_by_vars_entry (&e), INSERT);
6932 if (*slot == NULL)
6934 struct types_used_by_vars_entry *entry;
6935 entry = ggc_alloc_types_used_by_vars_entry ();
6936 entry->type = type;
6937 entry->var_decl = var_decl;
6938 *slot = entry;
6943 struct rtl_opt_pass pass_leaf_regs =
6946 RTL_PASS,
6947 "*leaf_regs", /* name */
6948 OPTGROUP_NONE, /* optinfo_flags */
6949 NULL, /* gate */
6950 rest_of_handle_check_leaf_regs, /* execute */
6951 NULL, /* sub */
6952 NULL, /* next */
6953 0, /* static_pass_number */
6954 TV_NONE, /* tv_id */
6955 0, /* properties_required */
6956 0, /* properties_provided */
6957 0, /* properties_destroyed */
6958 0, /* todo_flags_start */
6959 0 /* todo_flags_finish */
6963 static unsigned int
6964 rest_of_handle_thread_prologue_and_epilogue (void)
6966 if (optimize)
6967 cleanup_cfg (CLEANUP_EXPENSIVE);
6969 /* On some machines, the prologue and epilogue code, or parts thereof,
6970 can be represented as RTL. Doing so lets us schedule insns between
6971 it and the rest of the code and also allows delayed branch
6972 scheduling to operate in the epilogue. */
6973 thread_prologue_and_epilogue_insns ();
6975 /* The stack usage info is finalized during prologue expansion. */
6976 if (flag_stack_usage_info)
6977 output_stack_usage ();
6979 return 0;
6982 struct rtl_opt_pass pass_thread_prologue_and_epilogue =
6985 RTL_PASS,
6986 "pro_and_epilogue", /* name */
6987 OPTGROUP_NONE, /* optinfo_flags */
6988 NULL, /* gate */
6989 rest_of_handle_thread_prologue_and_epilogue, /* execute */
6990 NULL, /* sub */
6991 NULL, /* next */
6992 0, /* static_pass_number */
6993 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
6994 0, /* properties_required */
6995 0, /* properties_provided */
6996 0, /* properties_destroyed */
6997 TODO_verify_flow, /* todo_flags_start */
6998 TODO_df_verify |
6999 TODO_df_finish | TODO_verify_rtl_sharing |
7000 TODO_ggc_collect /* todo_flags_finish */
7005 /* This mini-pass fixes fall-out from SSA in asm statements that have
7006 in-out constraints. Say you start with
7008 orig = inout;
7009 asm ("": "+mr" (inout));
7010 use (orig);
7012 which is transformed very early to use explicit output and match operands:
7014 orig = inout;
7015 asm ("": "=mr" (inout) : "0" (inout));
7016 use (orig);
7018 Or, after SSA and copyprop,
7020 asm ("": "=mr" (inout_2) : "0" (inout_1));
7021 use (inout_1);
7023 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
7024 they represent two separate values, so they will get different pseudo
7025 registers during expansion. Then, since the two operands need to match
7026 per the constraints, but use different pseudo registers, reload can
7027 only register a reload for these operands. But reloads can only be
7028 satisfied by hardregs, not by memory, so we need a register for this
7029 reload, just because we are presented with non-matching operands.
7030 So, even though we allow memory for this operand, no memory can be
7031 used for it, just because the two operands don't match. This can
7032 cause reload failures on register-starved targets.
7034 So it's a symptom of reload not being able to use memory for reloads
7035 or, alternatively it's also a symptom of both operands not coming into
7036 reload as matching (in which case the pseudo could go to memory just
7037 fine, as the alternative allows it, and no reload would be necessary).
7038 We fix the latter problem here, by transforming
7040 asm ("": "=mr" (inout_2) : "0" (inout_1));
7042 back to
7044 inout_2 = inout_1;
7045 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
7047 static void
7048 match_asm_constraints_1 (rtx insn, rtx *p_sets, int noutputs)
7050 int i;
7051 bool changed = false;
7052 rtx op = SET_SRC (p_sets[0]);
7053 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
7054 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
7055 bool *output_matched = XALLOCAVEC (bool, noutputs);
7057 memset (output_matched, 0, noutputs * sizeof (bool));
7058 for (i = 0; i < ninputs; i++)
7060 rtx input, output, insns;
7061 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
7062 char *end;
7063 int match, j;
7065 if (*constraint == '%')
7066 constraint++;
7068 match = strtoul (constraint, &end, 10);
7069 if (end == constraint)
7070 continue;
7072 gcc_assert (match < noutputs);
7073 output = SET_DEST (p_sets[match]);
7074 input = RTVEC_ELT (inputs, i);
7075 /* Only do the transformation for pseudos. */
7076 if (! REG_P (output)
7077 || rtx_equal_p (output, input)
7078 || (GET_MODE (input) != VOIDmode
7079 && GET_MODE (input) != GET_MODE (output)))
7080 continue;
7082 /* We can't do anything if the output is also used as input,
7083 as we're going to overwrite it. */
7084 for (j = 0; j < ninputs; j++)
7085 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
7086 break;
7087 if (j != ninputs)
7088 continue;
7090 /* Avoid changing the same input several times. For
7091 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
7092 only change in once (to out1), rather than changing it
7093 first to out1 and afterwards to out2. */
7094 if (i > 0)
7096 for (j = 0; j < noutputs; j++)
7097 if (output_matched[j] && input == SET_DEST (p_sets[j]))
7098 break;
7099 if (j != noutputs)
7100 continue;
7102 output_matched[match] = true;
7104 start_sequence ();
7105 emit_move_insn (output, input);
7106 insns = get_insns ();
7107 end_sequence ();
7108 emit_insn_before (insns, insn);
7110 /* Now replace all mentions of the input with output. We can't
7111 just replace the occurrence in inputs[i], as the register might
7112 also be used in some other input (or even in an address of an
7113 output), which would mean possibly increasing the number of
7114 inputs by one (namely 'output' in addition), which might pose
7115 a too complicated problem for reload to solve. E.g. this situation:
7117 asm ("" : "=r" (output), "=m" (input) : "0" (input))
7119 Here 'input' is used in two occurrences as input (once for the
7120 input operand, once for the address in the second output operand).
7121 If we would replace only the occurrence of the input operand (to
7122 make the matching) we would be left with this:
7124 output = input
7125 asm ("" : "=r" (output), "=m" (input) : "0" (output))
7127 Now we suddenly have two different input values (containing the same
7128 value, but different pseudos) where we formerly had only one.
7129 With more complicated asms this might lead to reload failures
7130 which wouldn't have happen without this pass. So, iterate over
7131 all operands and replace all occurrences of the register used. */
7132 for (j = 0; j < noutputs; j++)
7133 if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
7134 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
7135 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
7136 input, output);
7137 for (j = 0; j < ninputs; j++)
7138 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
7139 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
7140 input, output);
7142 changed = true;
7145 if (changed)
7146 df_insn_rescan (insn);
7149 static unsigned
7150 rest_of_match_asm_constraints (void)
7152 basic_block bb;
7153 rtx insn, pat, *p_sets;
7154 int noutputs;
7156 if (!crtl->has_asm_statement)
7157 return 0;
7159 df_set_flags (DF_DEFER_INSN_RESCAN);
7160 FOR_EACH_BB (bb)
7162 FOR_BB_INSNS (bb, insn)
7164 if (!INSN_P (insn))
7165 continue;
7167 pat = PATTERN (insn);
7168 if (GET_CODE (pat) == PARALLEL)
7169 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
7170 else if (GET_CODE (pat) == SET)
7171 p_sets = &PATTERN (insn), noutputs = 1;
7172 else
7173 continue;
7175 if (GET_CODE (*p_sets) == SET
7176 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
7177 match_asm_constraints_1 (insn, p_sets, noutputs);
7181 return TODO_df_finish;
7184 struct rtl_opt_pass pass_match_asm_constraints =
7187 RTL_PASS,
7188 "asmcons", /* name */
7189 OPTGROUP_NONE, /* optinfo_flags */
7190 NULL, /* gate */
7191 rest_of_match_asm_constraints, /* execute */
7192 NULL, /* sub */
7193 NULL, /* next */
7194 0, /* static_pass_number */
7195 TV_NONE, /* tv_id */
7196 0, /* properties_required */
7197 0, /* properties_provided */
7198 0, /* properties_destroyed */
7199 0, /* todo_flags_start */
7200 0 /* todo_flags_finish */
7205 #include "gt-function.h"