Mark ChangeLog
[official-gcc.git] / gcc / function.c
blob76baf30798429a8f8f4d4ceb22575604c8f5dcf5
1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file handles the generation of rtl code from tree structure
21 at the level of the function as a whole.
22 It creates the rtl expressions for parameters and auto variables
23 and has full responsibility for allocating stack slots.
25 `expand_function_start' is called at the beginning of a function,
26 before the function body is parsed, and `expand_function_end' is
27 called after parsing the body.
29 Call `assign_stack_local' to allocate a stack slot for a local variable.
30 This is usually done during the RTL generation for the function body,
31 but it can also be done in the reload pass when a pseudo-register does
32 not get a hard register. */
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "tm.h"
38 #include "rtl-error.h"
39 #include "tree.h"
40 #include "flags.h"
41 #include "except.h"
42 #include "function.h"
43 #include "expr.h"
44 #include "optabs.h"
45 #include "libfuncs.h"
46 #include "regs.h"
47 #include "hard-reg-set.h"
48 #include "insn-config.h"
49 #include "recog.h"
50 #include "output.h"
51 #include "basic-block.h"
52 #include "hashtab.h"
53 #include "ggc.h"
54 #include "tm_p.h"
55 #include "langhooks.h"
56 #include "target.h"
57 #include "common/common-target.h"
58 #include "gimple.h"
59 #include "tree-pass.h"
60 #include "predict.h"
61 #include "df.h"
62 #include "params.h"
63 #include "bb-reorder.h"
65 /* So we can assign to cfun in this file. */
66 #undef cfun
68 #ifndef STACK_ALIGNMENT_NEEDED
69 #define STACK_ALIGNMENT_NEEDED 1
70 #endif
72 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
74 /* Some systems use __main in a way incompatible with its use in gcc, in these
75 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
76 give the same symbol without quotes for an alternative entry point. You
77 must define both, or neither. */
78 #ifndef NAME__MAIN
79 #define NAME__MAIN "__main"
80 #endif
82 /* Round a value to the lowest integer less than it that is a multiple of
83 the required alignment. Avoid using division in case the value is
84 negative. Assume the alignment is a power of two. */
85 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
87 /* Similar, but round to the next highest integer that meets the
88 alignment. */
89 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
91 /* Nonzero once virtual register instantiation has been done.
92 assign_stack_local uses frame_pointer_rtx when this is nonzero.
93 calls.c:emit_library_call_value_1 uses it to set up
94 post-instantiation libcalls. */
95 int virtuals_instantiated;
97 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
98 static GTY(()) int funcdef_no;
100 /* These variables hold pointers to functions to create and destroy
101 target specific, per-function data structures. */
102 struct machine_function * (*init_machine_status) (void);
104 /* The currently compiled function. */
105 struct function *cfun = 0;
107 /* These hashes record the prologue and epilogue insns. */
108 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
109 htab_t prologue_insn_hash;
110 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
111 htab_t epilogue_insn_hash;
114 htab_t types_used_by_vars_hash = NULL;
115 vec<tree, va_gc> *types_used_by_cur_var_decl;
117 /* Forward declarations. */
119 static struct temp_slot *find_temp_slot_from_address (rtx);
120 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
121 static void pad_below (struct args_size *, enum machine_mode, tree);
122 static void reorder_blocks_1 (rtx, tree, vec<tree> *);
123 static int all_blocks (tree, tree *);
124 static tree *get_block_vector (tree, int *);
125 extern tree debug_find_var_in_block_tree (tree, tree);
126 /* We always define `record_insns' even if it's not used so that we
127 can always export `prologue_epilogue_contains'. */
128 static void record_insns (rtx, rtx, htab_t *) ATTRIBUTE_UNUSED;
129 static bool contains (const_rtx, htab_t);
130 static void prepare_function_start (void);
131 static void do_clobber_return_reg (rtx, void *);
132 static void do_use_return_reg (rtx, void *);
133 static void set_insn_locations (rtx, int) ATTRIBUTE_UNUSED;
135 /* Stack of nested functions. */
136 /* Keep track of the cfun stack. */
138 typedef struct function *function_p;
140 static vec<function_p> function_context_stack;
142 /* Save the current context for compilation of a nested function.
143 This is called from language-specific code. */
145 void
146 push_function_context (void)
148 if (cfun == 0)
149 allocate_struct_function (NULL, false);
151 function_context_stack.safe_push (cfun);
152 set_cfun (NULL);
155 /* Restore the last saved context, at the end of a nested function.
156 This function is called from language-specific code. */
158 void
159 pop_function_context (void)
161 struct function *p = function_context_stack.pop ();
162 set_cfun (p);
163 current_function_decl = p->decl;
165 /* Reset variables that have known state during rtx generation. */
166 virtuals_instantiated = 0;
167 generating_concat_p = 1;
170 /* Clear out all parts of the state in F that can safely be discarded
171 after the function has been parsed, but not compiled, to let
172 garbage collection reclaim the memory. */
174 void
175 free_after_parsing (struct function *f)
177 f->language = 0;
180 /* Clear out all parts of the state in F that can safely be discarded
181 after the function has been compiled, to let garbage collection
182 reclaim the memory. */
184 void
185 free_after_compilation (struct function *f)
187 prologue_insn_hash = NULL;
188 epilogue_insn_hash = NULL;
190 free (crtl->emit.regno_pointer_align);
192 memset (crtl, 0, sizeof (struct rtl_data));
193 f->eh = NULL;
194 f->machine = NULL;
195 f->cfg = NULL;
197 regno_reg_rtx = NULL;
200 /* Return size needed for stack frame based on slots so far allocated.
201 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
202 the caller may have to do that. */
204 HOST_WIDE_INT
205 get_frame_size (void)
207 if (FRAME_GROWS_DOWNWARD)
208 return -frame_offset;
209 else
210 return frame_offset;
213 /* Issue an error message and return TRUE if frame OFFSET overflows in
214 the signed target pointer arithmetics for function FUNC. Otherwise
215 return FALSE. */
217 bool
218 frame_offset_overflow (HOST_WIDE_INT offset, tree func)
220 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
222 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
223 /* Leave room for the fixed part of the frame. */
224 - 64 * UNITS_PER_WORD)
226 error_at (DECL_SOURCE_LOCATION (func),
227 "total size of local objects too large");
228 return TRUE;
231 return FALSE;
234 /* Return stack slot alignment in bits for TYPE and MODE. */
236 static unsigned int
237 get_stack_local_alignment (tree type, enum machine_mode mode)
239 unsigned int alignment;
241 if (mode == BLKmode)
242 alignment = BIGGEST_ALIGNMENT;
243 else
244 alignment = GET_MODE_ALIGNMENT (mode);
246 /* Allow the frond-end to (possibly) increase the alignment of this
247 stack slot. */
248 if (! type)
249 type = lang_hooks.types.type_for_mode (mode, 0);
251 return STACK_SLOT_ALIGNMENT (type, mode, alignment);
254 /* Determine whether it is possible to fit a stack slot of size SIZE and
255 alignment ALIGNMENT into an area in the stack frame that starts at
256 frame offset START and has a length of LENGTH. If so, store the frame
257 offset to be used for the stack slot in *POFFSET and return true;
258 return false otherwise. This function will extend the frame size when
259 given a start/length pair that lies at the end of the frame. */
261 static bool
262 try_fit_stack_local (HOST_WIDE_INT start, HOST_WIDE_INT length,
263 HOST_WIDE_INT size, unsigned int alignment,
264 HOST_WIDE_INT *poffset)
266 HOST_WIDE_INT this_frame_offset;
267 int frame_off, frame_alignment, frame_phase;
269 /* Calculate how many bytes the start of local variables is off from
270 stack alignment. */
271 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
272 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
273 frame_phase = frame_off ? frame_alignment - frame_off : 0;
275 /* Round the frame offset to the specified alignment. */
277 /* We must be careful here, since FRAME_OFFSET might be negative and
278 division with a negative dividend isn't as well defined as we might
279 like. So we instead assume that ALIGNMENT is a power of two and
280 use logical operations which are unambiguous. */
281 if (FRAME_GROWS_DOWNWARD)
282 this_frame_offset
283 = (FLOOR_ROUND (start + length - size - frame_phase,
284 (unsigned HOST_WIDE_INT) alignment)
285 + frame_phase);
286 else
287 this_frame_offset
288 = (CEIL_ROUND (start - frame_phase,
289 (unsigned HOST_WIDE_INT) alignment)
290 + frame_phase);
292 /* See if it fits. If this space is at the edge of the frame,
293 consider extending the frame to make it fit. Our caller relies on
294 this when allocating a new slot. */
295 if (frame_offset == start && this_frame_offset < frame_offset)
296 frame_offset = this_frame_offset;
297 else if (this_frame_offset < start)
298 return false;
299 else if (start + length == frame_offset
300 && this_frame_offset + size > start + length)
301 frame_offset = this_frame_offset + size;
302 else if (this_frame_offset + size > start + length)
303 return false;
305 *poffset = this_frame_offset;
306 return true;
309 /* Create a new frame_space structure describing free space in the stack
310 frame beginning at START and ending at END, and chain it into the
311 function's frame_space_list. */
313 static void
314 add_frame_space (HOST_WIDE_INT start, HOST_WIDE_INT end)
316 struct frame_space *space = ggc_alloc_frame_space ();
317 space->next = crtl->frame_space_list;
318 crtl->frame_space_list = space;
319 space->start = start;
320 space->length = end - start;
323 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
324 with machine mode MODE.
326 ALIGN controls the amount of alignment for the address of the slot:
327 0 means according to MODE,
328 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
329 -2 means use BITS_PER_UNIT,
330 positive specifies alignment boundary in bits.
332 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
333 alignment and ASLK_RECORD_PAD bit set if we should remember
334 extra space we allocated for alignment purposes. When we are
335 called from assign_stack_temp_for_type, it is not set so we don't
336 track the same stack slot in two independent lists.
338 We do not round to stack_boundary here. */
341 assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size,
342 int align, int kind)
344 rtx x, addr;
345 int bigend_correction = 0;
346 HOST_WIDE_INT slot_offset = 0, old_frame_offset;
347 unsigned int alignment, alignment_in_bits;
349 if (align == 0)
351 alignment = get_stack_local_alignment (NULL, mode);
352 alignment /= BITS_PER_UNIT;
354 else if (align == -1)
356 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
357 size = CEIL_ROUND (size, alignment);
359 else if (align == -2)
360 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
361 else
362 alignment = align / BITS_PER_UNIT;
364 alignment_in_bits = alignment * BITS_PER_UNIT;
366 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
367 if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
369 alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
370 alignment = alignment_in_bits / BITS_PER_UNIT;
373 if (SUPPORTS_STACK_ALIGNMENT)
375 if (crtl->stack_alignment_estimated < alignment_in_bits)
377 if (!crtl->stack_realign_processed)
378 crtl->stack_alignment_estimated = alignment_in_bits;
379 else
381 /* If stack is realigned and stack alignment value
382 hasn't been finalized, it is OK not to increase
383 stack_alignment_estimated. The bigger alignment
384 requirement is recorded in stack_alignment_needed
385 below. */
386 gcc_assert (!crtl->stack_realign_finalized);
387 if (!crtl->stack_realign_needed)
389 /* It is OK to reduce the alignment as long as the
390 requested size is 0 or the estimated stack
391 alignment >= mode alignment. */
392 gcc_assert ((kind & ASLK_REDUCE_ALIGN)
393 || size == 0
394 || (crtl->stack_alignment_estimated
395 >= GET_MODE_ALIGNMENT (mode)));
396 alignment_in_bits = crtl->stack_alignment_estimated;
397 alignment = alignment_in_bits / BITS_PER_UNIT;
403 if (crtl->stack_alignment_needed < alignment_in_bits)
404 crtl->stack_alignment_needed = alignment_in_bits;
405 if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
406 crtl->max_used_stack_slot_alignment = alignment_in_bits;
408 if (mode != BLKmode || size != 0)
410 if (kind & ASLK_RECORD_PAD)
412 struct frame_space **psp;
414 for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
416 struct frame_space *space = *psp;
417 if (!try_fit_stack_local (space->start, space->length, size,
418 alignment, &slot_offset))
419 continue;
420 *psp = space->next;
421 if (slot_offset > space->start)
422 add_frame_space (space->start, slot_offset);
423 if (slot_offset + size < space->start + space->length)
424 add_frame_space (slot_offset + size,
425 space->start + space->length);
426 goto found_space;
430 else if (!STACK_ALIGNMENT_NEEDED)
432 slot_offset = frame_offset;
433 goto found_space;
436 old_frame_offset = frame_offset;
438 if (FRAME_GROWS_DOWNWARD)
440 frame_offset -= size;
441 try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
443 if (kind & ASLK_RECORD_PAD)
445 if (slot_offset > frame_offset)
446 add_frame_space (frame_offset, slot_offset);
447 if (slot_offset + size < old_frame_offset)
448 add_frame_space (slot_offset + size, old_frame_offset);
451 else
453 frame_offset += size;
454 try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
456 if (kind & ASLK_RECORD_PAD)
458 if (slot_offset > old_frame_offset)
459 add_frame_space (old_frame_offset, slot_offset);
460 if (slot_offset + size < frame_offset)
461 add_frame_space (slot_offset + size, frame_offset);
465 found_space:
466 /* On a big-endian machine, if we are allocating more space than we will use,
467 use the least significant bytes of those that are allocated. */
468 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
469 bigend_correction = size - GET_MODE_SIZE (mode);
471 /* If we have already instantiated virtual registers, return the actual
472 address relative to the frame pointer. */
473 if (virtuals_instantiated)
474 addr = plus_constant (Pmode, frame_pointer_rtx,
475 trunc_int_for_mode
476 (slot_offset + bigend_correction
477 + STARTING_FRAME_OFFSET, Pmode));
478 else
479 addr = plus_constant (Pmode, virtual_stack_vars_rtx,
480 trunc_int_for_mode
481 (slot_offset + bigend_correction,
482 Pmode));
484 x = gen_rtx_MEM (mode, addr);
485 set_mem_align (x, alignment_in_bits);
486 MEM_NOTRAP_P (x) = 1;
488 stack_slot_list
489 = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
491 if (frame_offset_overflow (frame_offset, current_function_decl))
492 frame_offset = 0;
494 return x;
497 /* Wrap up assign_stack_local_1 with last parameter as false. */
500 assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
502 return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD);
505 /* In order to evaluate some expressions, such as function calls returning
506 structures in memory, we need to temporarily allocate stack locations.
507 We record each allocated temporary in the following structure.
509 Associated with each temporary slot is a nesting level. When we pop up
510 one level, all temporaries associated with the previous level are freed.
511 Normally, all temporaries are freed after the execution of the statement
512 in which they were created. However, if we are inside a ({...}) grouping,
513 the result may be in a temporary and hence must be preserved. If the
514 result could be in a temporary, we preserve it if we can determine which
515 one it is in. If we cannot determine which temporary may contain the
516 result, all temporaries are preserved. A temporary is preserved by
517 pretending it was allocated at the previous nesting level. */
519 struct GTY(()) temp_slot {
520 /* Points to next temporary slot. */
521 struct temp_slot *next;
522 /* Points to previous temporary slot. */
523 struct temp_slot *prev;
524 /* The rtx to used to reference the slot. */
525 rtx slot;
526 /* The size, in units, of the slot. */
527 HOST_WIDE_INT size;
528 /* The type of the object in the slot, or zero if it doesn't correspond
529 to a type. We use this to determine whether a slot can be reused.
530 It can be reused if objects of the type of the new slot will always
531 conflict with objects of the type of the old slot. */
532 tree type;
533 /* The alignment (in bits) of the slot. */
534 unsigned int align;
535 /* Nonzero if this temporary is currently in use. */
536 char in_use;
537 /* Nesting level at which this slot is being used. */
538 int level;
539 /* The offset of the slot from the frame_pointer, including extra space
540 for alignment. This info is for combine_temp_slots. */
541 HOST_WIDE_INT base_offset;
542 /* The size of the slot, including extra space for alignment. This
543 info is for combine_temp_slots. */
544 HOST_WIDE_INT full_size;
547 /* A table of addresses that represent a stack slot. The table is a mapping
548 from address RTXen to a temp slot. */
549 static GTY((param_is(struct temp_slot_address_entry))) htab_t temp_slot_address_table;
550 static size_t n_temp_slots_in_use;
552 /* Entry for the above hash table. */
553 struct GTY(()) temp_slot_address_entry {
554 hashval_t hash;
555 rtx address;
556 struct temp_slot *temp_slot;
559 /* Removes temporary slot TEMP from LIST. */
561 static void
562 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
564 if (temp->next)
565 temp->next->prev = temp->prev;
566 if (temp->prev)
567 temp->prev->next = temp->next;
568 else
569 *list = temp->next;
571 temp->prev = temp->next = NULL;
574 /* Inserts temporary slot TEMP to LIST. */
576 static void
577 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
579 temp->next = *list;
580 if (*list)
581 (*list)->prev = temp;
582 temp->prev = NULL;
583 *list = temp;
586 /* Returns the list of used temp slots at LEVEL. */
588 static struct temp_slot **
589 temp_slots_at_level (int level)
591 if (level >= (int) vec_safe_length (used_temp_slots))
592 vec_safe_grow_cleared (used_temp_slots, level + 1);
594 return &(*used_temp_slots)[level];
597 /* Returns the maximal temporary slot level. */
599 static int
600 max_slot_level (void)
602 if (!used_temp_slots)
603 return -1;
605 return used_temp_slots->length () - 1;
608 /* Moves temporary slot TEMP to LEVEL. */
610 static void
611 move_slot_to_level (struct temp_slot *temp, int level)
613 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
614 insert_slot_to_list (temp, temp_slots_at_level (level));
615 temp->level = level;
618 /* Make temporary slot TEMP available. */
620 static void
621 make_slot_available (struct temp_slot *temp)
623 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
624 insert_slot_to_list (temp, &avail_temp_slots);
625 temp->in_use = 0;
626 temp->level = -1;
627 n_temp_slots_in_use--;
630 /* Compute the hash value for an address -> temp slot mapping.
631 The value is cached on the mapping entry. */
632 static hashval_t
633 temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
635 int do_not_record = 0;
636 return hash_rtx (t->address, GET_MODE (t->address),
637 &do_not_record, NULL, false);
640 /* Return the hash value for an address -> temp slot mapping. */
641 static hashval_t
642 temp_slot_address_hash (const void *p)
644 const struct temp_slot_address_entry *t;
645 t = (const struct temp_slot_address_entry *) p;
646 return t->hash;
649 /* Compare two address -> temp slot mapping entries. */
650 static int
651 temp_slot_address_eq (const void *p1, const void *p2)
653 const struct temp_slot_address_entry *t1, *t2;
654 t1 = (const struct temp_slot_address_entry *) p1;
655 t2 = (const struct temp_slot_address_entry *) p2;
656 return exp_equiv_p (t1->address, t2->address, 0, true);
659 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
660 static void
661 insert_temp_slot_address (rtx address, struct temp_slot *temp_slot)
663 void **slot;
664 struct temp_slot_address_entry *t = ggc_alloc_temp_slot_address_entry ();
665 t->address = address;
666 t->temp_slot = temp_slot;
667 t->hash = temp_slot_address_compute_hash (t);
668 slot = htab_find_slot_with_hash (temp_slot_address_table, t, t->hash, INSERT);
669 *slot = t;
672 /* Remove an address -> temp slot mapping entry if the temp slot is
673 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
674 static int
675 remove_unused_temp_slot_addresses_1 (void **slot, void *data ATTRIBUTE_UNUSED)
677 const struct temp_slot_address_entry *t;
678 t = (const struct temp_slot_address_entry *) *slot;
679 if (! t->temp_slot->in_use)
680 htab_clear_slot (temp_slot_address_table, slot);
681 return 1;
684 /* Remove all mappings of addresses to unused temp slots. */
685 static void
686 remove_unused_temp_slot_addresses (void)
688 /* Use quicker clearing if there aren't any active temp slots. */
689 if (n_temp_slots_in_use)
690 htab_traverse (temp_slot_address_table,
691 remove_unused_temp_slot_addresses_1,
692 NULL);
693 else
694 htab_empty (temp_slot_address_table);
697 /* Find the temp slot corresponding to the object at address X. */
699 static struct temp_slot *
700 find_temp_slot_from_address (rtx x)
702 struct temp_slot *p;
703 struct temp_slot_address_entry tmp, *t;
705 /* First try the easy way:
706 See if X exists in the address -> temp slot mapping. */
707 tmp.address = x;
708 tmp.temp_slot = NULL;
709 tmp.hash = temp_slot_address_compute_hash (&tmp);
710 t = (struct temp_slot_address_entry *)
711 htab_find_with_hash (temp_slot_address_table, &tmp, tmp.hash);
712 if (t)
713 return t->temp_slot;
715 /* If we have a sum involving a register, see if it points to a temp
716 slot. */
717 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
718 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
719 return p;
720 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
721 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
722 return p;
724 /* Last resort: Address is a virtual stack var address. */
725 if (GET_CODE (x) == PLUS
726 && XEXP (x, 0) == virtual_stack_vars_rtx
727 && CONST_INT_P (XEXP (x, 1)))
729 int i;
730 for (i = max_slot_level (); i >= 0; i--)
731 for (p = *temp_slots_at_level (i); p; p = p->next)
733 if (INTVAL (XEXP (x, 1)) >= p->base_offset
734 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size)
735 return p;
739 return NULL;
742 /* Allocate a temporary stack slot and record it for possible later
743 reuse.
745 MODE is the machine mode to be given to the returned rtx.
747 SIZE is the size in units of the space required. We do no rounding here
748 since assign_stack_local will do any required rounding.
750 TYPE is the type that will be used for the stack slot. */
753 assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size,
754 tree type)
756 unsigned int align;
757 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
758 rtx slot;
760 /* If SIZE is -1 it means that somebody tried to allocate a temporary
761 of a variable size. */
762 gcc_assert (size != -1);
764 align = get_stack_local_alignment (type, mode);
766 /* Try to find an available, already-allocated temporary of the proper
767 mode which meets the size and alignment requirements. Choose the
768 smallest one with the closest alignment.
770 If assign_stack_temp is called outside of the tree->rtl expansion,
771 we cannot reuse the stack slots (that may still refer to
772 VIRTUAL_STACK_VARS_REGNUM). */
773 if (!virtuals_instantiated)
775 for (p = avail_temp_slots; p; p = p->next)
777 if (p->align >= align && p->size >= size
778 && GET_MODE (p->slot) == mode
779 && objects_must_conflict_p (p->type, type)
780 && (best_p == 0 || best_p->size > p->size
781 || (best_p->size == p->size && best_p->align > p->align)))
783 if (p->align == align && p->size == size)
785 selected = p;
786 cut_slot_from_list (selected, &avail_temp_slots);
787 best_p = 0;
788 break;
790 best_p = p;
795 /* Make our best, if any, the one to use. */
796 if (best_p)
798 selected = best_p;
799 cut_slot_from_list (selected, &avail_temp_slots);
801 /* If there are enough aligned bytes left over, make them into a new
802 temp_slot so that the extra bytes don't get wasted. Do this only
803 for BLKmode slots, so that we can be sure of the alignment. */
804 if (GET_MODE (best_p->slot) == BLKmode)
806 int alignment = best_p->align / BITS_PER_UNIT;
807 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
809 if (best_p->size - rounded_size >= alignment)
811 p = ggc_alloc_temp_slot ();
812 p->in_use = 0;
813 p->size = best_p->size - rounded_size;
814 p->base_offset = best_p->base_offset + rounded_size;
815 p->full_size = best_p->full_size - rounded_size;
816 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
817 p->align = best_p->align;
818 p->type = best_p->type;
819 insert_slot_to_list (p, &avail_temp_slots);
821 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
822 stack_slot_list);
824 best_p->size = rounded_size;
825 best_p->full_size = rounded_size;
830 /* If we still didn't find one, make a new temporary. */
831 if (selected == 0)
833 HOST_WIDE_INT frame_offset_old = frame_offset;
835 p = ggc_alloc_temp_slot ();
837 /* We are passing an explicit alignment request to assign_stack_local.
838 One side effect of that is assign_stack_local will not round SIZE
839 to ensure the frame offset remains suitably aligned.
841 So for requests which depended on the rounding of SIZE, we go ahead
842 and round it now. We also make sure ALIGNMENT is at least
843 BIGGEST_ALIGNMENT. */
844 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
845 p->slot = assign_stack_local_1 (mode,
846 (mode == BLKmode
847 ? CEIL_ROUND (size,
848 (int) align
849 / BITS_PER_UNIT)
850 : size),
851 align, 0);
853 p->align = align;
855 /* The following slot size computation is necessary because we don't
856 know the actual size of the temporary slot until assign_stack_local
857 has performed all the frame alignment and size rounding for the
858 requested temporary. Note that extra space added for alignment
859 can be either above or below this stack slot depending on which
860 way the frame grows. We include the extra space if and only if it
861 is above this slot. */
862 if (FRAME_GROWS_DOWNWARD)
863 p->size = frame_offset_old - frame_offset;
864 else
865 p->size = size;
867 /* Now define the fields used by combine_temp_slots. */
868 if (FRAME_GROWS_DOWNWARD)
870 p->base_offset = frame_offset;
871 p->full_size = frame_offset_old - frame_offset;
873 else
875 p->base_offset = frame_offset_old;
876 p->full_size = frame_offset - frame_offset_old;
879 selected = p;
882 p = selected;
883 p->in_use = 1;
884 p->type = type;
885 p->level = temp_slot_level;
886 n_temp_slots_in_use++;
888 pp = temp_slots_at_level (p->level);
889 insert_slot_to_list (p, pp);
890 insert_temp_slot_address (XEXP (p->slot, 0), p);
892 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
893 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
894 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
896 /* If we know the alias set for the memory that will be used, use
897 it. If there's no TYPE, then we don't know anything about the
898 alias set for the memory. */
899 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
900 set_mem_align (slot, align);
902 /* If a type is specified, set the relevant flags. */
903 if (type != 0)
904 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
905 MEM_NOTRAP_P (slot) = 1;
907 return slot;
910 /* Allocate a temporary stack slot and record it for possible later
911 reuse. First two arguments are same as in preceding function. */
914 assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size)
916 return assign_stack_temp_for_type (mode, size, NULL_TREE);
919 /* Assign a temporary.
920 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
921 and so that should be used in error messages. In either case, we
922 allocate of the given type.
923 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
924 it is 0 if a register is OK.
925 DONT_PROMOTE is 1 if we should not promote values in register
926 to wider modes. */
929 assign_temp (tree type_or_decl, int memory_required,
930 int dont_promote ATTRIBUTE_UNUSED)
932 tree type, decl;
933 enum machine_mode mode;
934 #ifdef PROMOTE_MODE
935 int unsignedp;
936 #endif
938 if (DECL_P (type_or_decl))
939 decl = type_or_decl, type = TREE_TYPE (decl);
940 else
941 decl = NULL, type = type_or_decl;
943 mode = TYPE_MODE (type);
944 #ifdef PROMOTE_MODE
945 unsignedp = TYPE_UNSIGNED (type);
946 #endif
948 if (mode == BLKmode || memory_required)
950 HOST_WIDE_INT size = int_size_in_bytes (type);
951 rtx tmp;
953 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
954 problems with allocating the stack space. */
955 if (size == 0)
956 size = 1;
958 /* Unfortunately, we don't yet know how to allocate variable-sized
959 temporaries. However, sometimes we can find a fixed upper limit on
960 the size, so try that instead. */
961 else if (size == -1)
962 size = max_int_size_in_bytes (type);
964 /* The size of the temporary may be too large to fit into an integer. */
965 /* ??? Not sure this should happen except for user silliness, so limit
966 this to things that aren't compiler-generated temporaries. The
967 rest of the time we'll die in assign_stack_temp_for_type. */
968 if (decl && size == -1
969 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
971 error ("size of variable %q+D is too large", decl);
972 size = 1;
975 tmp = assign_stack_temp_for_type (mode, size, type);
976 return tmp;
979 #ifdef PROMOTE_MODE
980 if (! dont_promote)
981 mode = promote_mode (type, mode, &unsignedp);
982 #endif
984 return gen_reg_rtx (mode);
987 /* Combine temporary stack slots which are adjacent on the stack.
989 This allows for better use of already allocated stack space. This is only
990 done for BLKmode slots because we can be sure that we won't have alignment
991 problems in this case. */
993 static void
994 combine_temp_slots (void)
996 struct temp_slot *p, *q, *next, *next_q;
997 int num_slots;
999 /* We can't combine slots, because the information about which slot
1000 is in which alias set will be lost. */
1001 if (flag_strict_aliasing)
1002 return;
1004 /* If there are a lot of temp slots, don't do anything unless
1005 high levels of optimization. */
1006 if (! flag_expensive_optimizations)
1007 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1008 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1009 return;
1011 for (p = avail_temp_slots; p; p = next)
1013 int delete_p = 0;
1015 next = p->next;
1017 if (GET_MODE (p->slot) != BLKmode)
1018 continue;
1020 for (q = p->next; q; q = next_q)
1022 int delete_q = 0;
1024 next_q = q->next;
1026 if (GET_MODE (q->slot) != BLKmode)
1027 continue;
1029 if (p->base_offset + p->full_size == q->base_offset)
1031 /* Q comes after P; combine Q into P. */
1032 p->size += q->size;
1033 p->full_size += q->full_size;
1034 delete_q = 1;
1036 else if (q->base_offset + q->full_size == p->base_offset)
1038 /* P comes after Q; combine P into Q. */
1039 q->size += p->size;
1040 q->full_size += p->full_size;
1041 delete_p = 1;
1042 break;
1044 if (delete_q)
1045 cut_slot_from_list (q, &avail_temp_slots);
1048 /* Either delete P or advance past it. */
1049 if (delete_p)
1050 cut_slot_from_list (p, &avail_temp_slots);
1054 /* Indicate that NEW_RTX is an alternate way of referring to the temp
1055 slot that previously was known by OLD_RTX. */
1057 void
1058 update_temp_slot_address (rtx old_rtx, rtx new_rtx)
1060 struct temp_slot *p;
1062 if (rtx_equal_p (old_rtx, new_rtx))
1063 return;
1065 p = find_temp_slot_from_address (old_rtx);
1067 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1068 NEW_RTX is a register, see if one operand of the PLUS is a
1069 temporary location. If so, NEW_RTX points into it. Otherwise,
1070 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1071 in common between them. If so, try a recursive call on those
1072 values. */
1073 if (p == 0)
1075 if (GET_CODE (old_rtx) != PLUS)
1076 return;
1078 if (REG_P (new_rtx))
1080 update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
1081 update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
1082 return;
1084 else if (GET_CODE (new_rtx) != PLUS)
1085 return;
1087 if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
1088 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
1089 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
1090 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
1091 else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
1092 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
1093 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
1094 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
1096 return;
1099 /* Otherwise add an alias for the temp's address. */
1100 insert_temp_slot_address (new_rtx, p);
1103 /* If X could be a reference to a temporary slot, mark that slot as
1104 belonging to the to one level higher than the current level. If X
1105 matched one of our slots, just mark that one. Otherwise, we can't
1106 easily predict which it is, so upgrade all of them.
1108 This is called when an ({...}) construct occurs and a statement
1109 returns a value in memory. */
1111 void
1112 preserve_temp_slots (rtx x)
1114 struct temp_slot *p = 0, *next;
1116 if (x == 0)
1117 return;
1119 /* If X is a register that is being used as a pointer, see if we have
1120 a temporary slot we know it points to. */
1121 if (REG_P (x) && REG_POINTER (x))
1122 p = find_temp_slot_from_address (x);
1124 /* If X is not in memory or is at a constant address, it cannot be in
1125 a temporary slot. */
1126 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1127 return;
1129 /* First see if we can find a match. */
1130 if (p == 0)
1131 p = find_temp_slot_from_address (XEXP (x, 0));
1133 if (p != 0)
1135 if (p->level == temp_slot_level)
1136 move_slot_to_level (p, temp_slot_level - 1);
1137 return;
1140 /* Otherwise, preserve all non-kept slots at this level. */
1141 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1143 next = p->next;
1144 move_slot_to_level (p, temp_slot_level - 1);
1148 /* Free all temporaries used so far. This is normally called at the
1149 end of generating code for a statement. */
1151 void
1152 free_temp_slots (void)
1154 struct temp_slot *p, *next;
1155 bool some_available = false;
1157 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1159 next = p->next;
1160 make_slot_available (p);
1161 some_available = true;
1164 if (some_available)
1166 remove_unused_temp_slot_addresses ();
1167 combine_temp_slots ();
1171 /* Push deeper into the nesting level for stack temporaries. */
1173 void
1174 push_temp_slots (void)
1176 temp_slot_level++;
1179 /* Pop a temporary nesting level. All slots in use in the current level
1180 are freed. */
1182 void
1183 pop_temp_slots (void)
1185 free_temp_slots ();
1186 temp_slot_level--;
1189 /* Initialize temporary slots. */
1191 void
1192 init_temp_slots (void)
1194 /* We have not allocated any temporaries yet. */
1195 avail_temp_slots = 0;
1196 vec_alloc (used_temp_slots, 0);
1197 temp_slot_level = 0;
1198 n_temp_slots_in_use = 0;
1200 /* Set up the table to map addresses to temp slots. */
1201 if (! temp_slot_address_table)
1202 temp_slot_address_table = htab_create_ggc (32,
1203 temp_slot_address_hash,
1204 temp_slot_address_eq,
1205 NULL);
1206 else
1207 htab_empty (temp_slot_address_table);
1210 /* Functions and data structures to keep track of the values hard regs
1211 had at the start of the function. */
1213 /* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
1214 and has_hard_reg_initial_val.. */
1215 typedef struct GTY(()) initial_value_pair {
1216 rtx hard_reg;
1217 rtx pseudo;
1218 } initial_value_pair;
1219 /* ??? This could be a VEC but there is currently no way to define an
1220 opaque VEC type. This could be worked around by defining struct
1221 initial_value_pair in function.h. */
1222 typedef struct GTY(()) initial_value_struct {
1223 int num_entries;
1224 int max_entries;
1225 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
1226 } initial_value_struct;
1228 /* If a pseudo represents an initial hard reg (or expression), return
1229 it, else return NULL_RTX. */
1232 get_hard_reg_initial_reg (rtx reg)
1234 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1235 int i;
1237 if (ivs == 0)
1238 return NULL_RTX;
1240 for (i = 0; i < ivs->num_entries; i++)
1241 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
1242 return ivs->entries[i].hard_reg;
1244 return NULL_RTX;
1247 /* Make sure that there's a pseudo register of mode MODE that stores the
1248 initial value of hard register REGNO. Return an rtx for such a pseudo. */
1251 get_hard_reg_initial_val (enum machine_mode mode, unsigned int regno)
1253 struct initial_value_struct *ivs;
1254 rtx rv;
1256 rv = has_hard_reg_initial_val (mode, regno);
1257 if (rv)
1258 return rv;
1260 ivs = crtl->hard_reg_initial_vals;
1261 if (ivs == 0)
1263 ivs = ggc_alloc_initial_value_struct ();
1264 ivs->num_entries = 0;
1265 ivs->max_entries = 5;
1266 ivs->entries = ggc_alloc_vec_initial_value_pair (5);
1267 crtl->hard_reg_initial_vals = ivs;
1270 if (ivs->num_entries >= ivs->max_entries)
1272 ivs->max_entries += 5;
1273 ivs->entries = GGC_RESIZEVEC (initial_value_pair, ivs->entries,
1274 ivs->max_entries);
1277 ivs->entries[ivs->num_entries].hard_reg = gen_rtx_REG (mode, regno);
1278 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (mode);
1280 return ivs->entries[ivs->num_entries++].pseudo;
1283 /* See if get_hard_reg_initial_val has been used to create a pseudo
1284 for the initial value of hard register REGNO in mode MODE. Return
1285 the associated pseudo if so, otherwise return NULL. */
1288 has_hard_reg_initial_val (enum machine_mode mode, unsigned int regno)
1290 struct initial_value_struct *ivs;
1291 int i;
1293 ivs = crtl->hard_reg_initial_vals;
1294 if (ivs != 0)
1295 for (i = 0; i < ivs->num_entries; i++)
1296 if (GET_MODE (ivs->entries[i].hard_reg) == mode
1297 && REGNO (ivs->entries[i].hard_reg) == regno)
1298 return ivs->entries[i].pseudo;
1300 return NULL_RTX;
1303 unsigned int
1304 emit_initial_value_sets (void)
1306 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1307 int i;
1308 rtx seq;
1310 if (ivs == 0)
1311 return 0;
1313 start_sequence ();
1314 for (i = 0; i < ivs->num_entries; i++)
1315 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
1316 seq = get_insns ();
1317 end_sequence ();
1319 emit_insn_at_entry (seq);
1320 return 0;
1323 /* Return the hardreg-pseudoreg initial values pair entry I and
1324 TRUE if I is a valid entry, or FALSE if I is not a valid entry. */
1325 bool
1326 initial_value_entry (int i, rtx *hreg, rtx *preg)
1328 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1329 if (!ivs || i >= ivs->num_entries)
1330 return false;
1332 *hreg = ivs->entries[i].hard_reg;
1333 *preg = ivs->entries[i].pseudo;
1334 return true;
1337 /* These routines are responsible for converting virtual register references
1338 to the actual hard register references once RTL generation is complete.
1340 The following four variables are used for communication between the
1341 routines. They contain the offsets of the virtual registers from their
1342 respective hard registers. */
1344 static int in_arg_offset;
1345 static int var_offset;
1346 static int dynamic_offset;
1347 static int out_arg_offset;
1348 static int cfa_offset;
1350 /* In most machines, the stack pointer register is equivalent to the bottom
1351 of the stack. */
1353 #ifndef STACK_POINTER_OFFSET
1354 #define STACK_POINTER_OFFSET 0
1355 #endif
1357 #if defined (REG_PARM_STACK_SPACE) && !defined (INCOMING_REG_PARM_STACK_SPACE)
1358 #define INCOMING_REG_PARM_STACK_SPACE REG_PARM_STACK_SPACE
1359 #endif
1361 /* If not defined, pick an appropriate default for the offset of dynamically
1362 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1363 INCOMING_REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1365 #ifndef STACK_DYNAMIC_OFFSET
1367 /* The bottom of the stack points to the actual arguments. If
1368 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1369 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1370 stack space for register parameters is not pushed by the caller, but
1371 rather part of the fixed stack areas and hence not included in
1372 `crtl->outgoing_args_size'. Nevertheless, we must allow
1373 for it when allocating stack dynamic objects. */
1375 #ifdef INCOMING_REG_PARM_STACK_SPACE
1376 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1377 ((ACCUMULATE_OUTGOING_ARGS \
1378 ? (crtl->outgoing_args_size \
1379 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1380 : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \
1381 : 0) + (STACK_POINTER_OFFSET))
1382 #else
1383 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1384 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
1385 + (STACK_POINTER_OFFSET))
1386 #endif
1387 #endif
1390 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1391 is a virtual register, return the equivalent hard register and set the
1392 offset indirectly through the pointer. Otherwise, return 0. */
1394 static rtx
1395 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1397 rtx new_rtx;
1398 HOST_WIDE_INT offset;
1400 if (x == virtual_incoming_args_rtx)
1402 if (stack_realign_drap)
1404 /* Replace virtual_incoming_args_rtx with internal arg
1405 pointer if DRAP is used to realign stack. */
1406 new_rtx = crtl->args.internal_arg_pointer;
1407 offset = 0;
1409 else
1410 new_rtx = arg_pointer_rtx, offset = in_arg_offset;
1412 else if (x == virtual_stack_vars_rtx)
1413 new_rtx = frame_pointer_rtx, offset = var_offset;
1414 else if (x == virtual_stack_dynamic_rtx)
1415 new_rtx = stack_pointer_rtx, offset = dynamic_offset;
1416 else if (x == virtual_outgoing_args_rtx)
1417 new_rtx = stack_pointer_rtx, offset = out_arg_offset;
1418 else if (x == virtual_cfa_rtx)
1420 #ifdef FRAME_POINTER_CFA_OFFSET
1421 new_rtx = frame_pointer_rtx;
1422 #else
1423 new_rtx = arg_pointer_rtx;
1424 #endif
1425 offset = cfa_offset;
1427 else if (x == virtual_preferred_stack_boundary_rtx)
1429 new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT);
1430 offset = 0;
1432 else
1433 return NULL_RTX;
1435 *poffset = offset;
1436 return new_rtx;
1439 /* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1440 Instantiate any virtual registers present inside of *LOC. The expression
1441 is simplified, as much as possible, but is not to be considered "valid"
1442 in any sense implied by the target. If any change is made, set CHANGED
1443 to true. */
1445 static int
1446 instantiate_virtual_regs_in_rtx (rtx *loc, void *data)
1448 HOST_WIDE_INT offset;
1449 bool *changed = (bool *) data;
1450 rtx x, new_rtx;
1452 x = *loc;
1453 if (x == 0)
1454 return 0;
1456 switch (GET_CODE (x))
1458 case REG:
1459 new_rtx = instantiate_new_reg (x, &offset);
1460 if (new_rtx)
1462 *loc = plus_constant (GET_MODE (x), new_rtx, offset);
1463 if (changed)
1464 *changed = true;
1466 return -1;
1468 case PLUS:
1469 new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
1470 if (new_rtx)
1472 new_rtx = plus_constant (GET_MODE (x), new_rtx, offset);
1473 *loc = simplify_gen_binary (PLUS, GET_MODE (x), new_rtx, XEXP (x, 1));
1474 if (changed)
1475 *changed = true;
1476 return -1;
1479 /* FIXME -- from old code */
1480 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1481 we can commute the PLUS and SUBREG because pointers into the
1482 frame are well-behaved. */
1483 break;
1485 default:
1486 break;
1489 return 0;
1492 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1493 matches the predicate for insn CODE operand OPERAND. */
1495 static int
1496 safe_insn_predicate (int code, int operand, rtx x)
1498 return code < 0 || insn_operand_matches ((enum insn_code) code, operand, x);
1501 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1502 registers present inside of insn. The result will be a valid insn. */
1504 static void
1505 instantiate_virtual_regs_in_insn (rtx insn)
1507 HOST_WIDE_INT offset;
1508 int insn_code, i;
1509 bool any_change = false;
1510 rtx set, new_rtx, x, seq;
1512 /* There are some special cases to be handled first. */
1513 set = single_set (insn);
1514 if (set)
1516 /* We're allowed to assign to a virtual register. This is interpreted
1517 to mean that the underlying register gets assigned the inverse
1518 transformation. This is used, for example, in the handling of
1519 non-local gotos. */
1520 new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
1521 if (new_rtx)
1523 start_sequence ();
1525 for_each_rtx (&SET_SRC (set), instantiate_virtual_regs_in_rtx, NULL);
1526 x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
1527 GEN_INT (-offset));
1528 x = force_operand (x, new_rtx);
1529 if (x != new_rtx)
1530 emit_move_insn (new_rtx, x);
1532 seq = get_insns ();
1533 end_sequence ();
1535 emit_insn_before (seq, insn);
1536 delete_insn (insn);
1537 return;
1540 /* Handle a straight copy from a virtual register by generating a
1541 new add insn. The difference between this and falling through
1542 to the generic case is avoiding a new pseudo and eliminating a
1543 move insn in the initial rtl stream. */
1544 new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
1545 if (new_rtx && offset != 0
1546 && REG_P (SET_DEST (set))
1547 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1549 start_sequence ();
1551 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS,
1552 new_rtx, GEN_INT (offset), SET_DEST (set),
1553 1, OPTAB_LIB_WIDEN);
1554 if (x != SET_DEST (set))
1555 emit_move_insn (SET_DEST (set), x);
1557 seq = get_insns ();
1558 end_sequence ();
1560 emit_insn_before (seq, insn);
1561 delete_insn (insn);
1562 return;
1565 extract_insn (insn);
1566 insn_code = INSN_CODE (insn);
1568 /* Handle a plus involving a virtual register by determining if the
1569 operands remain valid if they're modified in place. */
1570 if (GET_CODE (SET_SRC (set)) == PLUS
1571 && recog_data.n_operands >= 3
1572 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1573 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1574 && CONST_INT_P (recog_data.operand[2])
1575 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
1577 offset += INTVAL (recog_data.operand[2]);
1579 /* If the sum is zero, then replace with a plain move. */
1580 if (offset == 0
1581 && REG_P (SET_DEST (set))
1582 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1584 start_sequence ();
1585 emit_move_insn (SET_DEST (set), new_rtx);
1586 seq = get_insns ();
1587 end_sequence ();
1589 emit_insn_before (seq, insn);
1590 delete_insn (insn);
1591 return;
1594 x = gen_int_mode (offset, recog_data.operand_mode[2]);
1596 /* Using validate_change and apply_change_group here leaves
1597 recog_data in an invalid state. Since we know exactly what
1598 we want to check, do those two by hand. */
1599 if (safe_insn_predicate (insn_code, 1, new_rtx)
1600 && safe_insn_predicate (insn_code, 2, x))
1602 *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
1603 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1604 any_change = true;
1606 /* Fall through into the regular operand fixup loop in
1607 order to take care of operands other than 1 and 2. */
1611 else
1613 extract_insn (insn);
1614 insn_code = INSN_CODE (insn);
1617 /* In the general case, we expect virtual registers to appear only in
1618 operands, and then only as either bare registers or inside memories. */
1619 for (i = 0; i < recog_data.n_operands; ++i)
1621 x = recog_data.operand[i];
1622 switch (GET_CODE (x))
1624 case MEM:
1626 rtx addr = XEXP (x, 0);
1627 bool changed = false;
1629 for_each_rtx (&addr, instantiate_virtual_regs_in_rtx, &changed);
1630 if (!changed)
1631 continue;
1633 start_sequence ();
1634 x = replace_equiv_address (x, addr);
1635 /* It may happen that the address with the virtual reg
1636 was valid (e.g. based on the virtual stack reg, which might
1637 be acceptable to the predicates with all offsets), whereas
1638 the address now isn't anymore, for instance when the address
1639 is still offsetted, but the base reg isn't virtual-stack-reg
1640 anymore. Below we would do a force_reg on the whole operand,
1641 but this insn might actually only accept memory. Hence,
1642 before doing that last resort, try to reload the address into
1643 a register, so this operand stays a MEM. */
1644 if (!safe_insn_predicate (insn_code, i, x))
1646 addr = force_reg (GET_MODE (addr), addr);
1647 x = replace_equiv_address (x, addr);
1649 seq = get_insns ();
1650 end_sequence ();
1651 if (seq)
1652 emit_insn_before (seq, insn);
1654 break;
1656 case REG:
1657 new_rtx = instantiate_new_reg (x, &offset);
1658 if (new_rtx == NULL)
1659 continue;
1660 if (offset == 0)
1661 x = new_rtx;
1662 else
1664 start_sequence ();
1666 /* Careful, special mode predicates may have stuff in
1667 insn_data[insn_code].operand[i].mode that isn't useful
1668 to us for computing a new value. */
1669 /* ??? Recognize address_operand and/or "p" constraints
1670 to see if (plus new offset) is a valid before we put
1671 this through expand_simple_binop. */
1672 x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
1673 GEN_INT (offset), NULL_RTX,
1674 1, OPTAB_LIB_WIDEN);
1675 seq = get_insns ();
1676 end_sequence ();
1677 emit_insn_before (seq, insn);
1679 break;
1681 case SUBREG:
1682 new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
1683 if (new_rtx == NULL)
1684 continue;
1685 if (offset != 0)
1687 start_sequence ();
1688 new_rtx = expand_simple_binop (GET_MODE (new_rtx), PLUS, new_rtx,
1689 GEN_INT (offset), NULL_RTX,
1690 1, OPTAB_LIB_WIDEN);
1691 seq = get_insns ();
1692 end_sequence ();
1693 emit_insn_before (seq, insn);
1695 x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx,
1696 GET_MODE (new_rtx), SUBREG_BYTE (x));
1697 gcc_assert (x);
1698 break;
1700 default:
1701 continue;
1704 /* At this point, X contains the new value for the operand.
1705 Validate the new value vs the insn predicate. Note that
1706 asm insns will have insn_code -1 here. */
1707 if (!safe_insn_predicate (insn_code, i, x))
1709 start_sequence ();
1710 if (REG_P (x))
1712 gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
1713 x = copy_to_reg (x);
1715 else
1716 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1717 seq = get_insns ();
1718 end_sequence ();
1719 if (seq)
1720 emit_insn_before (seq, insn);
1723 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1724 any_change = true;
1727 if (any_change)
1729 /* Propagate operand changes into the duplicates. */
1730 for (i = 0; i < recog_data.n_dups; ++i)
1731 *recog_data.dup_loc[i]
1732 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
1734 /* Force re-recognition of the instruction for validation. */
1735 INSN_CODE (insn) = -1;
1738 if (asm_noperands (PATTERN (insn)) >= 0)
1740 if (!check_asm_operands (PATTERN (insn)))
1742 error_for_asm (insn, "impossible constraint in %<asm%>");
1743 /* For asm goto, instead of fixing up all the edges
1744 just clear the template and clear input operands
1745 (asm goto doesn't have any output operands). */
1746 if (JUMP_P (insn))
1748 rtx asm_op = extract_asm_operands (PATTERN (insn));
1749 ASM_OPERANDS_TEMPLATE (asm_op) = ggc_strdup ("");
1750 ASM_OPERANDS_INPUT_VEC (asm_op) = rtvec_alloc (0);
1751 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op) = rtvec_alloc (0);
1753 else
1754 delete_insn (insn);
1757 else
1759 if (recog_memoized (insn) < 0)
1760 fatal_insn_not_found (insn);
1764 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1765 do any instantiation required. */
1767 void
1768 instantiate_decl_rtl (rtx x)
1770 rtx addr;
1772 if (x == 0)
1773 return;
1775 /* If this is a CONCAT, recurse for the pieces. */
1776 if (GET_CODE (x) == CONCAT)
1778 instantiate_decl_rtl (XEXP (x, 0));
1779 instantiate_decl_rtl (XEXP (x, 1));
1780 return;
1783 /* If this is not a MEM, no need to do anything. Similarly if the
1784 address is a constant or a register that is not a virtual register. */
1785 if (!MEM_P (x))
1786 return;
1788 addr = XEXP (x, 0);
1789 if (CONSTANT_P (addr)
1790 || (REG_P (addr)
1791 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1792 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1793 return;
1795 for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL);
1798 /* Helper for instantiate_decls called via walk_tree: Process all decls
1799 in the given DECL_VALUE_EXPR. */
1801 static tree
1802 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1804 tree t = *tp;
1805 if (! EXPR_P (t))
1807 *walk_subtrees = 0;
1808 if (DECL_P (t))
1810 if (DECL_RTL_SET_P (t))
1811 instantiate_decl_rtl (DECL_RTL (t));
1812 if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t)
1813 && DECL_INCOMING_RTL (t))
1814 instantiate_decl_rtl (DECL_INCOMING_RTL (t));
1815 if ((TREE_CODE (t) == VAR_DECL
1816 || TREE_CODE (t) == RESULT_DECL)
1817 && DECL_HAS_VALUE_EXPR_P (t))
1819 tree v = DECL_VALUE_EXPR (t);
1820 walk_tree (&v, instantiate_expr, NULL, NULL);
1824 return NULL;
1827 /* Subroutine of instantiate_decls: Process all decls in the given
1828 BLOCK node and all its subblocks. */
1830 static void
1831 instantiate_decls_1 (tree let)
1833 tree t;
1835 for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t))
1837 if (DECL_RTL_SET_P (t))
1838 instantiate_decl_rtl (DECL_RTL (t));
1839 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1841 tree v = DECL_VALUE_EXPR (t);
1842 walk_tree (&v, instantiate_expr, NULL, NULL);
1846 /* Process all subblocks. */
1847 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1848 instantiate_decls_1 (t);
1851 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1852 all virtual registers in their DECL_RTL's. */
1854 static void
1855 instantiate_decls (tree fndecl)
1857 tree decl;
1858 unsigned ix;
1860 /* Process all parameters of the function. */
1861 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl))
1863 instantiate_decl_rtl (DECL_RTL (decl));
1864 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
1865 if (DECL_HAS_VALUE_EXPR_P (decl))
1867 tree v = DECL_VALUE_EXPR (decl);
1868 walk_tree (&v, instantiate_expr, NULL, NULL);
1872 if ((decl = DECL_RESULT (fndecl))
1873 && TREE_CODE (decl) == RESULT_DECL)
1875 if (DECL_RTL_SET_P (decl))
1876 instantiate_decl_rtl (DECL_RTL (decl));
1877 if (DECL_HAS_VALUE_EXPR_P (decl))
1879 tree v = DECL_VALUE_EXPR (decl);
1880 walk_tree (&v, instantiate_expr, NULL, NULL);
1884 /* Now process all variables defined in the function or its subblocks. */
1885 instantiate_decls_1 (DECL_INITIAL (fndecl));
1887 FOR_EACH_LOCAL_DECL (cfun, ix, decl)
1888 if (DECL_RTL_SET_P (decl))
1889 instantiate_decl_rtl (DECL_RTL (decl));
1890 vec_free (cfun->local_decls);
1893 /* Pass through the INSNS of function FNDECL and convert virtual register
1894 references to hard register references. */
1896 static unsigned int
1897 instantiate_virtual_regs (void)
1899 rtx insn;
1901 /* Compute the offsets to use for this function. */
1902 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1903 var_offset = STARTING_FRAME_OFFSET;
1904 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1905 out_arg_offset = STACK_POINTER_OFFSET;
1906 #ifdef FRAME_POINTER_CFA_OFFSET
1907 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1908 #else
1909 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1910 #endif
1912 /* Initialize recognition, indicating that volatile is OK. */
1913 init_recog ();
1915 /* Scan through all the insns, instantiating every virtual register still
1916 present. */
1917 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1918 if (INSN_P (insn))
1920 /* These patterns in the instruction stream can never be recognized.
1921 Fortunately, they shouldn't contain virtual registers either. */
1922 if (GET_CODE (PATTERN (insn)) == USE
1923 || GET_CODE (PATTERN (insn)) == CLOBBER
1924 || GET_CODE (PATTERN (insn)) == ADDR_VEC
1925 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
1926 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1927 continue;
1928 else if (DEBUG_INSN_P (insn))
1929 for_each_rtx (&INSN_VAR_LOCATION (insn),
1930 instantiate_virtual_regs_in_rtx, NULL);
1931 else
1932 instantiate_virtual_regs_in_insn (insn);
1934 if (INSN_DELETED_P (insn))
1935 continue;
1937 for_each_rtx (&REG_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL);
1939 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1940 if (CALL_P (insn))
1941 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn),
1942 instantiate_virtual_regs_in_rtx, NULL);
1945 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1946 instantiate_decls (current_function_decl);
1948 targetm.instantiate_decls ();
1950 /* Indicate that, from now on, assign_stack_local should use
1951 frame_pointer_rtx. */
1952 virtuals_instantiated = 1;
1954 return 0;
1957 struct rtl_opt_pass pass_instantiate_virtual_regs =
1960 RTL_PASS,
1961 "vregs", /* name */
1962 OPTGROUP_NONE, /* optinfo_flags */
1963 NULL, /* gate */
1964 instantiate_virtual_regs, /* execute */
1965 NULL, /* sub */
1966 NULL, /* next */
1967 0, /* static_pass_number */
1968 TV_NONE, /* tv_id */
1969 0, /* properties_required */
1970 0, /* properties_provided */
1971 0, /* properties_destroyed */
1972 0, /* todo_flags_start */
1973 0 /* todo_flags_finish */
1978 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1979 This means a type for which function calls must pass an address to the
1980 function or get an address back from the function.
1981 EXP may be a type node or an expression (whose type is tested). */
1984 aggregate_value_p (const_tree exp, const_tree fntype)
1986 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
1987 int i, regno, nregs;
1988 rtx reg;
1990 if (fntype)
1991 switch (TREE_CODE (fntype))
1993 case CALL_EXPR:
1995 tree fndecl = get_callee_fndecl (fntype);
1996 fntype = (fndecl
1997 ? TREE_TYPE (fndecl)
1998 : TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype))));
2000 break;
2001 case FUNCTION_DECL:
2002 fntype = TREE_TYPE (fntype);
2003 break;
2004 case FUNCTION_TYPE:
2005 case METHOD_TYPE:
2006 break;
2007 case IDENTIFIER_NODE:
2008 fntype = NULL_TREE;
2009 break;
2010 default:
2011 /* We don't expect other tree types here. */
2012 gcc_unreachable ();
2015 if (VOID_TYPE_P (type))
2016 return 0;
2018 /* If a record should be passed the same as its first (and only) member
2019 don't pass it as an aggregate. */
2020 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2021 return aggregate_value_p (first_field (type), fntype);
2023 /* If the front end has decided that this needs to be passed by
2024 reference, do so. */
2025 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
2026 && DECL_BY_REFERENCE (exp))
2027 return 1;
2029 /* Function types that are TREE_ADDRESSABLE force return in memory. */
2030 if (fntype && TREE_ADDRESSABLE (fntype))
2031 return 1;
2033 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
2034 and thus can't be returned in registers. */
2035 if (TREE_ADDRESSABLE (type))
2036 return 1;
2038 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
2039 return 1;
2041 if (targetm.calls.return_in_memory (type, fntype))
2042 return 1;
2044 /* Make sure we have suitable call-clobbered regs to return
2045 the value in; if not, we must return it in memory. */
2046 reg = hard_function_value (type, 0, fntype, 0);
2048 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2049 it is OK. */
2050 if (!REG_P (reg))
2051 return 0;
2053 regno = REGNO (reg);
2054 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
2055 for (i = 0; i < nregs; i++)
2056 if (! call_used_regs[regno + i])
2057 return 1;
2059 return 0;
2062 /* Return true if we should assign DECL a pseudo register; false if it
2063 should live on the local stack. */
2065 bool
2066 use_register_for_decl (const_tree decl)
2068 if (!targetm.calls.allocate_stack_slots_for_args())
2069 return true;
2071 /* Honor volatile. */
2072 if (TREE_SIDE_EFFECTS (decl))
2073 return false;
2075 /* Honor addressability. */
2076 if (TREE_ADDRESSABLE (decl))
2077 return false;
2079 /* Only register-like things go in registers. */
2080 if (DECL_MODE (decl) == BLKmode)
2081 return false;
2083 /* If -ffloat-store specified, don't put explicit float variables
2084 into registers. */
2085 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2086 propagates values across these stores, and it probably shouldn't. */
2087 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
2088 return false;
2090 /* If we're not interested in tracking debugging information for
2091 this decl, then we can certainly put it in a register. */
2092 if (DECL_IGNORED_P (decl))
2093 return true;
2095 if (optimize)
2096 return true;
2098 if (!DECL_REGISTER (decl))
2099 return false;
2101 switch (TREE_CODE (TREE_TYPE (decl)))
2103 case RECORD_TYPE:
2104 case UNION_TYPE:
2105 case QUAL_UNION_TYPE:
2106 /* When not optimizing, disregard register keyword for variables with
2107 types containing methods, otherwise the methods won't be callable
2108 from the debugger. */
2109 if (TYPE_METHODS (TREE_TYPE (decl)))
2110 return false;
2111 break;
2112 default:
2113 break;
2116 return true;
2119 /* Return true if TYPE should be passed by invisible reference. */
2121 bool
2122 pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
2123 tree type, bool named_arg)
2125 if (type)
2127 /* If this type contains non-trivial constructors, then it is
2128 forbidden for the middle-end to create any new copies. */
2129 if (TREE_ADDRESSABLE (type))
2130 return true;
2132 /* GCC post 3.4 passes *all* variable sized types by reference. */
2133 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2134 return true;
2136 /* If a record type should be passed the same as its first (and only)
2137 member, use the type and mode of that member. */
2138 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2140 type = TREE_TYPE (first_field (type));
2141 mode = TYPE_MODE (type);
2145 return targetm.calls.pass_by_reference (pack_cumulative_args (ca), mode,
2146 type, named_arg);
2149 /* Return true if TYPE, which is passed by reference, should be callee
2150 copied instead of caller copied. */
2152 bool
2153 reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
2154 tree type, bool named_arg)
2156 if (type && TREE_ADDRESSABLE (type))
2157 return false;
2158 return targetm.calls.callee_copies (pack_cumulative_args (ca), mode, type,
2159 named_arg);
2162 /* Structures to communicate between the subroutines of assign_parms.
2163 The first holds data persistent across all parameters, the second
2164 is cleared out for each parameter. */
2166 struct assign_parm_data_all
2168 /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2169 should become a job of the target or otherwise encapsulated. */
2170 CUMULATIVE_ARGS args_so_far_v;
2171 cumulative_args_t args_so_far;
2172 struct args_size stack_args_size;
2173 tree function_result_decl;
2174 tree orig_fnargs;
2175 rtx first_conversion_insn;
2176 rtx last_conversion_insn;
2177 HOST_WIDE_INT pretend_args_size;
2178 HOST_WIDE_INT extra_pretend_bytes;
2179 int reg_parm_stack_space;
2182 struct assign_parm_data_one
2184 tree nominal_type;
2185 tree passed_type;
2186 rtx entry_parm;
2187 rtx stack_parm;
2188 enum machine_mode nominal_mode;
2189 enum machine_mode passed_mode;
2190 enum machine_mode promoted_mode;
2191 struct locate_and_pad_arg_data locate;
2192 int partial;
2193 BOOL_BITFIELD named_arg : 1;
2194 BOOL_BITFIELD passed_pointer : 1;
2195 BOOL_BITFIELD on_stack : 1;
2196 BOOL_BITFIELD loaded_in_reg : 1;
2199 /* A subroutine of assign_parms. Initialize ALL. */
2201 static void
2202 assign_parms_initialize_all (struct assign_parm_data_all *all)
2204 tree fntype ATTRIBUTE_UNUSED;
2206 memset (all, 0, sizeof (*all));
2208 fntype = TREE_TYPE (current_function_decl);
2210 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2211 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far_v, fntype, NULL_RTX);
2212 #else
2213 INIT_CUMULATIVE_ARGS (all->args_so_far_v, fntype, NULL_RTX,
2214 current_function_decl, -1);
2215 #endif
2216 all->args_so_far = pack_cumulative_args (&all->args_so_far_v);
2218 #ifdef INCOMING_REG_PARM_STACK_SPACE
2219 all->reg_parm_stack_space
2220 = INCOMING_REG_PARM_STACK_SPACE (current_function_decl);
2221 #endif
2224 /* If ARGS contains entries with complex types, split the entry into two
2225 entries of the component type. Return a new list of substitutions are
2226 needed, else the old list. */
2228 static void
2229 split_complex_args (vec<tree> *args)
2231 unsigned i;
2232 tree p;
2234 FOR_EACH_VEC_ELT (*args, i, p)
2236 tree type = TREE_TYPE (p);
2237 if (TREE_CODE (type) == COMPLEX_TYPE
2238 && targetm.calls.split_complex_arg (type))
2240 tree decl;
2241 tree subtype = TREE_TYPE (type);
2242 bool addressable = TREE_ADDRESSABLE (p);
2244 /* Rewrite the PARM_DECL's type with its component. */
2245 p = copy_node (p);
2246 TREE_TYPE (p) = subtype;
2247 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2248 DECL_MODE (p) = VOIDmode;
2249 DECL_SIZE (p) = NULL;
2250 DECL_SIZE_UNIT (p) = NULL;
2251 /* If this arg must go in memory, put it in a pseudo here.
2252 We can't allow it to go in memory as per normal parms,
2253 because the usual place might not have the imag part
2254 adjacent to the real part. */
2255 DECL_ARTIFICIAL (p) = addressable;
2256 DECL_IGNORED_P (p) = addressable;
2257 TREE_ADDRESSABLE (p) = 0;
2258 layout_decl (p, 0);
2259 (*args)[i] = p;
2261 /* Build a second synthetic decl. */
2262 decl = build_decl (EXPR_LOCATION (p),
2263 PARM_DECL, NULL_TREE, subtype);
2264 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2265 DECL_ARTIFICIAL (decl) = addressable;
2266 DECL_IGNORED_P (decl) = addressable;
2267 layout_decl (decl, 0);
2268 args->safe_insert (++i, decl);
2273 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2274 the hidden struct return argument, and (abi willing) complex args.
2275 Return the new parameter list. */
2277 static vec<tree>
2278 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2280 tree fndecl = current_function_decl;
2281 tree fntype = TREE_TYPE (fndecl);
2282 vec<tree> fnargs = vNULL;
2283 tree arg;
2285 for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg))
2286 fnargs.safe_push (arg);
2288 all->orig_fnargs = DECL_ARGUMENTS (fndecl);
2290 /* If struct value address is treated as the first argument, make it so. */
2291 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2292 && ! cfun->returns_pcc_struct
2293 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2295 tree type = build_pointer_type (TREE_TYPE (fntype));
2296 tree decl;
2298 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2299 PARM_DECL, get_identifier (".result_ptr"), type);
2300 DECL_ARG_TYPE (decl) = type;
2301 DECL_ARTIFICIAL (decl) = 1;
2302 DECL_NAMELESS (decl) = 1;
2303 TREE_CONSTANT (decl) = 1;
2305 DECL_CHAIN (decl) = all->orig_fnargs;
2306 all->orig_fnargs = decl;
2307 fnargs.safe_insert (0, decl);
2309 all->function_result_decl = decl;
2312 /* If the target wants to split complex arguments into scalars, do so. */
2313 if (targetm.calls.split_complex_arg)
2314 split_complex_args (&fnargs);
2316 return fnargs;
2319 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2320 data for the parameter. Incorporate ABI specifics such as pass-by-
2321 reference and type promotion. */
2323 static void
2324 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2325 struct assign_parm_data_one *data)
2327 tree nominal_type, passed_type;
2328 enum machine_mode nominal_mode, passed_mode, promoted_mode;
2329 int unsignedp;
2331 memset (data, 0, sizeof (*data));
2333 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2334 if (!cfun->stdarg)
2335 data->named_arg = 1; /* No variadic parms. */
2336 else if (DECL_CHAIN (parm))
2337 data->named_arg = 1; /* Not the last non-variadic parm. */
2338 else if (targetm.calls.strict_argument_naming (all->args_so_far))
2339 data->named_arg = 1; /* Only variadic ones are unnamed. */
2340 else
2341 data->named_arg = 0; /* Treat as variadic. */
2343 nominal_type = TREE_TYPE (parm);
2344 passed_type = DECL_ARG_TYPE (parm);
2346 /* Look out for errors propagating this far. Also, if the parameter's
2347 type is void then its value doesn't matter. */
2348 if (TREE_TYPE (parm) == error_mark_node
2349 /* This can happen after weird syntax errors
2350 or if an enum type is defined among the parms. */
2351 || TREE_CODE (parm) != PARM_DECL
2352 || passed_type == NULL
2353 || VOID_TYPE_P (nominal_type))
2355 nominal_type = passed_type = void_type_node;
2356 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2357 goto egress;
2360 /* Find mode of arg as it is passed, and mode of arg as it should be
2361 during execution of this function. */
2362 passed_mode = TYPE_MODE (passed_type);
2363 nominal_mode = TYPE_MODE (nominal_type);
2365 /* If the parm is to be passed as a transparent union or record, use the
2366 type of the first field for the tests below. We have already verified
2367 that the modes are the same. */
2368 if ((TREE_CODE (passed_type) == UNION_TYPE
2369 || TREE_CODE (passed_type) == RECORD_TYPE)
2370 && TYPE_TRANSPARENT_AGGR (passed_type))
2371 passed_type = TREE_TYPE (first_field (passed_type));
2373 /* See if this arg was passed by invisible reference. */
2374 if (pass_by_reference (&all->args_so_far_v, passed_mode,
2375 passed_type, data->named_arg))
2377 passed_type = nominal_type = build_pointer_type (passed_type);
2378 data->passed_pointer = true;
2379 passed_mode = nominal_mode = Pmode;
2382 /* Find mode as it is passed by the ABI. */
2383 unsignedp = TYPE_UNSIGNED (passed_type);
2384 promoted_mode = promote_function_mode (passed_type, passed_mode, &unsignedp,
2385 TREE_TYPE (current_function_decl), 0);
2387 egress:
2388 data->nominal_type = nominal_type;
2389 data->passed_type = passed_type;
2390 data->nominal_mode = nominal_mode;
2391 data->passed_mode = passed_mode;
2392 data->promoted_mode = promoted_mode;
2395 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2397 static void
2398 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2399 struct assign_parm_data_one *data, bool no_rtl)
2401 int varargs_pretend_bytes = 0;
2403 targetm.calls.setup_incoming_varargs (all->args_so_far,
2404 data->promoted_mode,
2405 data->passed_type,
2406 &varargs_pretend_bytes, no_rtl);
2408 /* If the back-end has requested extra stack space, record how much is
2409 needed. Do not change pretend_args_size otherwise since it may be
2410 nonzero from an earlier partial argument. */
2411 if (varargs_pretend_bytes > 0)
2412 all->pretend_args_size = varargs_pretend_bytes;
2415 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2416 the incoming location of the current parameter. */
2418 static void
2419 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2420 struct assign_parm_data_one *data)
2422 HOST_WIDE_INT pretend_bytes = 0;
2423 rtx entry_parm;
2424 bool in_regs;
2426 if (data->promoted_mode == VOIDmode)
2428 data->entry_parm = data->stack_parm = const0_rtx;
2429 return;
2432 entry_parm = targetm.calls.function_incoming_arg (all->args_so_far,
2433 data->promoted_mode,
2434 data->passed_type,
2435 data->named_arg);
2437 if (entry_parm == 0)
2438 data->promoted_mode = data->passed_mode;
2440 /* Determine parm's home in the stack, in case it arrives in the stack
2441 or we should pretend it did. Compute the stack position and rtx where
2442 the argument arrives and its size.
2444 There is one complexity here: If this was a parameter that would
2445 have been passed in registers, but wasn't only because it is
2446 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2447 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2448 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2449 as it was the previous time. */
2450 in_regs = entry_parm != 0;
2451 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2452 in_regs = true;
2453 #endif
2454 if (!in_regs && !data->named_arg)
2456 if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far))
2458 rtx tem;
2459 tem = targetm.calls.function_incoming_arg (all->args_so_far,
2460 data->promoted_mode,
2461 data->passed_type, true);
2462 in_regs = tem != NULL;
2466 /* If this parameter was passed both in registers and in the stack, use
2467 the copy on the stack. */
2468 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2469 data->passed_type))
2470 entry_parm = 0;
2472 if (entry_parm)
2474 int partial;
2476 partial = targetm.calls.arg_partial_bytes (all->args_so_far,
2477 data->promoted_mode,
2478 data->passed_type,
2479 data->named_arg);
2480 data->partial = partial;
2482 /* The caller might already have allocated stack space for the
2483 register parameters. */
2484 if (partial != 0 && all->reg_parm_stack_space == 0)
2486 /* Part of this argument is passed in registers and part
2487 is passed on the stack. Ask the prologue code to extend
2488 the stack part so that we can recreate the full value.
2490 PRETEND_BYTES is the size of the registers we need to store.
2491 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2492 stack space that the prologue should allocate.
2494 Internally, gcc assumes that the argument pointer is aligned
2495 to STACK_BOUNDARY bits. This is used both for alignment
2496 optimizations (see init_emit) and to locate arguments that are
2497 aligned to more than PARM_BOUNDARY bits. We must preserve this
2498 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2499 a stack boundary. */
2501 /* We assume at most one partial arg, and it must be the first
2502 argument on the stack. */
2503 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2505 pretend_bytes = partial;
2506 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2508 /* We want to align relative to the actual stack pointer, so
2509 don't include this in the stack size until later. */
2510 all->extra_pretend_bytes = all->pretend_args_size;
2514 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2515 all->reg_parm_stack_space,
2516 entry_parm ? data->partial : 0, current_function_decl,
2517 &all->stack_args_size, &data->locate);
2519 /* Update parm_stack_boundary if this parameter is passed in the
2520 stack. */
2521 if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
2522 crtl->parm_stack_boundary = data->locate.boundary;
2524 /* Adjust offsets to include the pretend args. */
2525 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2526 data->locate.slot_offset.constant += pretend_bytes;
2527 data->locate.offset.constant += pretend_bytes;
2529 data->entry_parm = entry_parm;
2532 /* A subroutine of assign_parms. If there is actually space on the stack
2533 for this parm, count it in stack_args_size and return true. */
2535 static bool
2536 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2537 struct assign_parm_data_one *data)
2539 /* Trivially true if we've no incoming register. */
2540 if (data->entry_parm == NULL)
2542 /* Also true if we're partially in registers and partially not,
2543 since we've arranged to drop the entire argument on the stack. */
2544 else if (data->partial != 0)
2546 /* Also true if the target says that it's passed in both registers
2547 and on the stack. */
2548 else if (GET_CODE (data->entry_parm) == PARALLEL
2549 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2551 /* Also true if the target says that there's stack allocated for
2552 all register parameters. */
2553 else if (all->reg_parm_stack_space > 0)
2555 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2556 else
2557 return false;
2559 all->stack_args_size.constant += data->locate.size.constant;
2560 if (data->locate.size.var)
2561 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2563 return true;
2566 /* A subroutine of assign_parms. Given that this parameter is allocated
2567 stack space by the ABI, find it. */
2569 static void
2570 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2572 rtx offset_rtx, stack_parm;
2573 unsigned int align, boundary;
2575 /* If we're passing this arg using a reg, make its stack home the
2576 aligned stack slot. */
2577 if (data->entry_parm)
2578 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2579 else
2580 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2582 stack_parm = crtl->args.internal_arg_pointer;
2583 if (offset_rtx != const0_rtx)
2584 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2585 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2587 if (!data->passed_pointer)
2589 set_mem_attributes (stack_parm, parm, 1);
2590 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2591 while promoted mode's size is needed. */
2592 if (data->promoted_mode != BLKmode
2593 && data->promoted_mode != DECL_MODE (parm))
2595 set_mem_size (stack_parm, GET_MODE_SIZE (data->promoted_mode));
2596 if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm))
2598 int offset = subreg_lowpart_offset (DECL_MODE (parm),
2599 data->promoted_mode);
2600 if (offset)
2601 set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset);
2606 boundary = data->locate.boundary;
2607 align = BITS_PER_UNIT;
2609 /* If we're padding upward, we know that the alignment of the slot
2610 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2611 intentionally forcing upward padding. Otherwise we have to come
2612 up with a guess at the alignment based on OFFSET_RTX. */
2613 if (data->locate.where_pad != downward || data->entry_parm)
2614 align = boundary;
2615 else if (CONST_INT_P (offset_rtx))
2617 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2618 align = align & -align;
2620 set_mem_align (stack_parm, align);
2622 if (data->entry_parm)
2623 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2625 data->stack_parm = stack_parm;
2628 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2629 always valid and contiguous. */
2631 static void
2632 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2634 rtx entry_parm = data->entry_parm;
2635 rtx stack_parm = data->stack_parm;
2637 /* If this parm was passed part in regs and part in memory, pretend it
2638 arrived entirely in memory by pushing the register-part onto the stack.
2639 In the special case of a DImode or DFmode that is split, we could put
2640 it together in a pseudoreg directly, but for now that's not worth
2641 bothering with. */
2642 if (data->partial != 0)
2644 /* Handle calls that pass values in multiple non-contiguous
2645 locations. The Irix 6 ABI has examples of this. */
2646 if (GET_CODE (entry_parm) == PARALLEL)
2647 emit_group_store (validize_mem (stack_parm), entry_parm,
2648 data->passed_type,
2649 int_size_in_bytes (data->passed_type));
2650 else
2652 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2653 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2654 data->partial / UNITS_PER_WORD);
2657 entry_parm = stack_parm;
2660 /* If we didn't decide this parm came in a register, by default it came
2661 on the stack. */
2662 else if (entry_parm == NULL)
2663 entry_parm = stack_parm;
2665 /* When an argument is passed in multiple locations, we can't make use
2666 of this information, but we can save some copying if the whole argument
2667 is passed in a single register. */
2668 else if (GET_CODE (entry_parm) == PARALLEL
2669 && data->nominal_mode != BLKmode
2670 && data->passed_mode != BLKmode)
2672 size_t i, len = XVECLEN (entry_parm, 0);
2674 for (i = 0; i < len; i++)
2675 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2676 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2677 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2678 == data->passed_mode)
2679 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2681 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2682 break;
2686 data->entry_parm = entry_parm;
2689 /* A subroutine of assign_parms. Reconstitute any values which were
2690 passed in multiple registers and would fit in a single register. */
2692 static void
2693 assign_parm_remove_parallels (struct assign_parm_data_one *data)
2695 rtx entry_parm = data->entry_parm;
2697 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2698 This can be done with register operations rather than on the
2699 stack, even if we will store the reconstituted parameter on the
2700 stack later. */
2701 if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
2703 rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
2704 emit_group_store (parmreg, entry_parm, data->passed_type,
2705 GET_MODE_SIZE (GET_MODE (entry_parm)));
2706 entry_parm = parmreg;
2709 data->entry_parm = entry_parm;
2712 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2713 always valid and properly aligned. */
2715 static void
2716 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2718 rtx stack_parm = data->stack_parm;
2720 /* If we can't trust the parm stack slot to be aligned enough for its
2721 ultimate type, don't use that slot after entry. We'll make another
2722 stack slot, if we need one. */
2723 if (stack_parm
2724 && ((STRICT_ALIGNMENT
2725 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2726 || (data->nominal_type
2727 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2728 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2729 stack_parm = NULL;
2731 /* If parm was passed in memory, and we need to convert it on entry,
2732 don't store it back in that same slot. */
2733 else if (data->entry_parm == stack_parm
2734 && data->nominal_mode != BLKmode
2735 && data->nominal_mode != data->passed_mode)
2736 stack_parm = NULL;
2738 /* If stack protection is in effect for this function, don't leave any
2739 pointers in their passed stack slots. */
2740 else if (crtl->stack_protect_guard
2741 && (flag_stack_protect == 2
2742 || data->passed_pointer
2743 || POINTER_TYPE_P (data->nominal_type)))
2744 stack_parm = NULL;
2746 data->stack_parm = stack_parm;
2749 /* A subroutine of assign_parms. Return true if the current parameter
2750 should be stored as a BLKmode in the current frame. */
2752 static bool
2753 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2755 if (data->nominal_mode == BLKmode)
2756 return true;
2757 if (GET_MODE (data->entry_parm) == BLKmode)
2758 return true;
2760 #ifdef BLOCK_REG_PADDING
2761 /* Only assign_parm_setup_block knows how to deal with register arguments
2762 that are padded at the least significant end. */
2763 if (REG_P (data->entry_parm)
2764 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2765 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2766 == (BYTES_BIG_ENDIAN ? upward : downward)))
2767 return true;
2768 #endif
2770 return false;
2773 /* A subroutine of assign_parms. Arrange for the parameter to be
2774 present and valid in DATA->STACK_RTL. */
2776 static void
2777 assign_parm_setup_block (struct assign_parm_data_all *all,
2778 tree parm, struct assign_parm_data_one *data)
2780 rtx entry_parm = data->entry_parm;
2781 rtx stack_parm = data->stack_parm;
2782 HOST_WIDE_INT size;
2783 HOST_WIDE_INT size_stored;
2785 if (GET_CODE (entry_parm) == PARALLEL)
2786 entry_parm = emit_group_move_into_temps (entry_parm);
2788 size = int_size_in_bytes (data->passed_type);
2789 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2790 if (stack_parm == 0)
2792 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
2793 stack_parm = assign_stack_local (BLKmode, size_stored,
2794 DECL_ALIGN (parm));
2795 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2796 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2797 set_mem_attributes (stack_parm, parm, 1);
2800 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2801 calls that pass values in multiple non-contiguous locations. */
2802 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2804 rtx mem;
2806 /* Note that we will be storing an integral number of words.
2807 So we have to be careful to ensure that we allocate an
2808 integral number of words. We do this above when we call
2809 assign_stack_local if space was not allocated in the argument
2810 list. If it was, this will not work if PARM_BOUNDARY is not
2811 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2812 if it becomes a problem. Exception is when BLKmode arrives
2813 with arguments not conforming to word_mode. */
2815 if (data->stack_parm == 0)
2817 else if (GET_CODE (entry_parm) == PARALLEL)
2819 else
2820 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2822 mem = validize_mem (stack_parm);
2824 /* Handle values in multiple non-contiguous locations. */
2825 if (GET_CODE (entry_parm) == PARALLEL)
2827 push_to_sequence2 (all->first_conversion_insn,
2828 all->last_conversion_insn);
2829 emit_group_store (mem, entry_parm, data->passed_type, size);
2830 all->first_conversion_insn = get_insns ();
2831 all->last_conversion_insn = get_last_insn ();
2832 end_sequence ();
2835 else if (size == 0)
2838 /* If SIZE is that of a mode no bigger than a word, just use
2839 that mode's store operation. */
2840 else if (size <= UNITS_PER_WORD)
2842 enum machine_mode mode
2843 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2845 if (mode != BLKmode
2846 #ifdef BLOCK_REG_PADDING
2847 && (size == UNITS_PER_WORD
2848 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2849 != (BYTES_BIG_ENDIAN ? upward : downward)))
2850 #endif
2853 rtx reg;
2855 /* We are really truncating a word_mode value containing
2856 SIZE bytes into a value of mode MODE. If such an
2857 operation requires no actual instructions, we can refer
2858 to the value directly in mode MODE, otherwise we must
2859 start with the register in word_mode and explicitly
2860 convert it. */
2861 if (TRULY_NOOP_TRUNCATION (size * BITS_PER_UNIT, BITS_PER_WORD))
2862 reg = gen_rtx_REG (mode, REGNO (entry_parm));
2863 else
2865 reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2866 reg = convert_to_mode (mode, copy_to_reg (reg), 1);
2868 emit_move_insn (change_address (mem, mode, 0), reg);
2871 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2872 machine must be aligned to the left before storing
2873 to memory. Note that the previous test doesn't
2874 handle all cases (e.g. SIZE == 3). */
2875 else if (size != UNITS_PER_WORD
2876 #ifdef BLOCK_REG_PADDING
2877 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2878 == downward)
2879 #else
2880 && BYTES_BIG_ENDIAN
2881 #endif
2884 rtx tem, x;
2885 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2886 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2888 x = expand_shift (LSHIFT_EXPR, word_mode, reg, by, NULL_RTX, 1);
2889 tem = change_address (mem, word_mode, 0);
2890 emit_move_insn (tem, x);
2892 else
2893 move_block_from_reg (REGNO (entry_parm), mem,
2894 size_stored / UNITS_PER_WORD);
2896 else
2897 move_block_from_reg (REGNO (entry_parm), mem,
2898 size_stored / UNITS_PER_WORD);
2900 else if (data->stack_parm == 0)
2902 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2903 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2904 BLOCK_OP_NORMAL);
2905 all->first_conversion_insn = get_insns ();
2906 all->last_conversion_insn = get_last_insn ();
2907 end_sequence ();
2910 data->stack_parm = stack_parm;
2911 SET_DECL_RTL (parm, stack_parm);
2914 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2915 parameter. Get it there. Perform all ABI specified conversions. */
2917 static void
2918 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2919 struct assign_parm_data_one *data)
2921 rtx parmreg, validated_mem;
2922 rtx equiv_stack_parm;
2923 enum machine_mode promoted_nominal_mode;
2924 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2925 bool did_conversion = false;
2926 bool need_conversion, moved;
2928 /* Store the parm in a pseudoregister during the function, but we may
2929 need to do it in a wider mode. Using 2 here makes the result
2930 consistent with promote_decl_mode and thus expand_expr_real_1. */
2931 promoted_nominal_mode
2932 = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp,
2933 TREE_TYPE (current_function_decl), 2);
2935 parmreg = gen_reg_rtx (promoted_nominal_mode);
2937 if (!DECL_ARTIFICIAL (parm))
2938 mark_user_reg (parmreg);
2940 /* If this was an item that we received a pointer to,
2941 set DECL_RTL appropriately. */
2942 if (data->passed_pointer)
2944 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2945 set_mem_attributes (x, parm, 1);
2946 SET_DECL_RTL (parm, x);
2948 else
2949 SET_DECL_RTL (parm, parmreg);
2951 assign_parm_remove_parallels (data);
2953 /* Copy the value into the register, thus bridging between
2954 assign_parm_find_data_types and expand_expr_real_1. */
2956 equiv_stack_parm = data->stack_parm;
2957 validated_mem = validize_mem (data->entry_parm);
2959 need_conversion = (data->nominal_mode != data->passed_mode
2960 || promoted_nominal_mode != data->promoted_mode);
2961 moved = false;
2963 if (need_conversion
2964 && GET_MODE_CLASS (data->nominal_mode) == MODE_INT
2965 && data->nominal_mode == data->passed_mode
2966 && data->nominal_mode == GET_MODE (data->entry_parm))
2968 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2969 mode, by the caller. We now have to convert it to
2970 NOMINAL_MODE, if different. However, PARMREG may be in
2971 a different mode than NOMINAL_MODE if it is being stored
2972 promoted.
2974 If ENTRY_PARM is a hard register, it might be in a register
2975 not valid for operating in its mode (e.g., an odd-numbered
2976 register for a DFmode). In that case, moves are the only
2977 thing valid, so we can't do a convert from there. This
2978 occurs when the calling sequence allow such misaligned
2979 usages.
2981 In addition, the conversion may involve a call, which could
2982 clobber parameters which haven't been copied to pseudo
2983 registers yet.
2985 First, we try to emit an insn which performs the necessary
2986 conversion. We verify that this insn does not clobber any
2987 hard registers. */
2989 enum insn_code icode;
2990 rtx op0, op1;
2992 icode = can_extend_p (promoted_nominal_mode, data->passed_mode,
2993 unsignedp);
2995 op0 = parmreg;
2996 op1 = validated_mem;
2997 if (icode != CODE_FOR_nothing
2998 && insn_operand_matches (icode, 0, op0)
2999 && insn_operand_matches (icode, 1, op1))
3001 enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
3002 rtx insn, insns, t = op1;
3003 HARD_REG_SET hardregs;
3005 start_sequence ();
3006 /* If op1 is a hard register that is likely spilled, first
3007 force it into a pseudo, otherwise combiner might extend
3008 its lifetime too much. */
3009 if (GET_CODE (t) == SUBREG)
3010 t = SUBREG_REG (t);
3011 if (REG_P (t)
3012 && HARD_REGISTER_P (t)
3013 && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (t))
3014 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t))))
3016 t = gen_reg_rtx (GET_MODE (op1));
3017 emit_move_insn (t, op1);
3019 else
3020 t = op1;
3021 insn = gen_extend_insn (op0, t, promoted_nominal_mode,
3022 data->passed_mode, unsignedp);
3023 emit_insn (insn);
3024 insns = get_insns ();
3026 moved = true;
3027 CLEAR_HARD_REG_SET (hardregs);
3028 for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
3030 if (INSN_P (insn))
3031 note_stores (PATTERN (insn), record_hard_reg_sets,
3032 &hardregs);
3033 if (!hard_reg_set_empty_p (hardregs))
3034 moved = false;
3037 end_sequence ();
3039 if (moved)
3041 emit_insn (insns);
3042 if (equiv_stack_parm != NULL_RTX)
3043 equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg),
3044 equiv_stack_parm);
3049 if (moved)
3050 /* Nothing to do. */
3052 else if (need_conversion)
3054 /* We did not have an insn to convert directly, or the sequence
3055 generated appeared unsafe. We must first copy the parm to a
3056 pseudo reg, and save the conversion until after all
3057 parameters have been moved. */
3059 int save_tree_used;
3060 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3062 emit_move_insn (tempreg, validated_mem);
3064 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3065 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
3067 if (GET_CODE (tempreg) == SUBREG
3068 && GET_MODE (tempreg) == data->nominal_mode
3069 && REG_P (SUBREG_REG (tempreg))
3070 && data->nominal_mode == data->passed_mode
3071 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
3072 && GET_MODE_SIZE (GET_MODE (tempreg))
3073 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
3075 /* The argument is already sign/zero extended, so note it
3076 into the subreg. */
3077 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
3078 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
3081 /* TREE_USED gets set erroneously during expand_assignment. */
3082 save_tree_used = TREE_USED (parm);
3083 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
3084 TREE_USED (parm) = save_tree_used;
3085 all->first_conversion_insn = get_insns ();
3086 all->last_conversion_insn = get_last_insn ();
3087 end_sequence ();
3089 did_conversion = true;
3091 else
3092 emit_move_insn (parmreg, validated_mem);
3094 /* If we were passed a pointer but the actual value can safely live
3095 in a register, put it in one. */
3096 if (data->passed_pointer
3097 && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
3098 /* If by-reference argument was promoted, demote it. */
3099 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
3100 || use_register_for_decl (parm)))
3102 /* We can't use nominal_mode, because it will have been set to
3103 Pmode above. We must use the actual mode of the parm. */
3104 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3105 mark_user_reg (parmreg);
3107 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
3109 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
3110 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
3112 push_to_sequence2 (all->first_conversion_insn,
3113 all->last_conversion_insn);
3114 emit_move_insn (tempreg, DECL_RTL (parm));
3115 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
3116 emit_move_insn (parmreg, tempreg);
3117 all->first_conversion_insn = get_insns ();
3118 all->last_conversion_insn = get_last_insn ();
3119 end_sequence ();
3121 did_conversion = true;
3123 else
3124 emit_move_insn (parmreg, DECL_RTL (parm));
3126 SET_DECL_RTL (parm, parmreg);
3128 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3129 now the parm. */
3130 data->stack_parm = NULL;
3133 /* Mark the register as eliminable if we did no conversion and it was
3134 copied from memory at a fixed offset, and the arg pointer was not
3135 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3136 offset formed an invalid address, such memory-equivalences as we
3137 make here would screw up life analysis for it. */
3138 if (data->nominal_mode == data->passed_mode
3139 && !did_conversion
3140 && data->stack_parm != 0
3141 && MEM_P (data->stack_parm)
3142 && data->locate.offset.var == 0
3143 && reg_mentioned_p (virtual_incoming_args_rtx,
3144 XEXP (data->stack_parm, 0)))
3146 rtx linsn = get_last_insn ();
3147 rtx sinsn, set;
3149 /* Mark complex types separately. */
3150 if (GET_CODE (parmreg) == CONCAT)
3152 enum machine_mode submode
3153 = GET_MODE_INNER (GET_MODE (parmreg));
3154 int regnor = REGNO (XEXP (parmreg, 0));
3155 int regnoi = REGNO (XEXP (parmreg, 1));
3156 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
3157 rtx stacki = adjust_address_nv (data->stack_parm, submode,
3158 GET_MODE_SIZE (submode));
3160 /* Scan backwards for the set of the real and
3161 imaginary parts. */
3162 for (sinsn = linsn; sinsn != 0;
3163 sinsn = prev_nonnote_insn (sinsn))
3165 set = single_set (sinsn);
3166 if (set == 0)
3167 continue;
3169 if (SET_DEST (set) == regno_reg_rtx [regnoi])
3170 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
3171 else if (SET_DEST (set) == regno_reg_rtx [regnor])
3172 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
3175 else
3176 set_dst_reg_note (linsn, REG_EQUIV, equiv_stack_parm, parmreg);
3179 /* For pointer data type, suggest pointer register. */
3180 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3181 mark_reg_pointer (parmreg,
3182 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
3185 /* A subroutine of assign_parms. Allocate stack space to hold the current
3186 parameter. Get it there. Perform all ABI specified conversions. */
3188 static void
3189 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
3190 struct assign_parm_data_one *data)
3192 /* Value must be stored in the stack slot STACK_PARM during function
3193 execution. */
3194 bool to_conversion = false;
3196 assign_parm_remove_parallels (data);
3198 if (data->promoted_mode != data->nominal_mode)
3200 /* Conversion is required. */
3201 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3203 emit_move_insn (tempreg, validize_mem (data->entry_parm));
3205 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3206 to_conversion = true;
3208 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
3209 TYPE_UNSIGNED (TREE_TYPE (parm)));
3211 if (data->stack_parm)
3213 int offset = subreg_lowpart_offset (data->nominal_mode,
3214 GET_MODE (data->stack_parm));
3215 /* ??? This may need a big-endian conversion on sparc64. */
3216 data->stack_parm
3217 = adjust_address (data->stack_parm, data->nominal_mode, 0);
3218 if (offset && MEM_OFFSET_KNOWN_P (data->stack_parm))
3219 set_mem_offset (data->stack_parm,
3220 MEM_OFFSET (data->stack_parm) + offset);
3224 if (data->entry_parm != data->stack_parm)
3226 rtx src, dest;
3228 if (data->stack_parm == 0)
3230 int align = STACK_SLOT_ALIGNMENT (data->passed_type,
3231 GET_MODE (data->entry_parm),
3232 TYPE_ALIGN (data->passed_type));
3233 data->stack_parm
3234 = assign_stack_local (GET_MODE (data->entry_parm),
3235 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
3236 align);
3237 set_mem_attributes (data->stack_parm, parm, 1);
3240 dest = validize_mem (data->stack_parm);
3241 src = validize_mem (data->entry_parm);
3243 if (MEM_P (src))
3245 /* Use a block move to handle potentially misaligned entry_parm. */
3246 if (!to_conversion)
3247 push_to_sequence2 (all->first_conversion_insn,
3248 all->last_conversion_insn);
3249 to_conversion = true;
3251 emit_block_move (dest, src,
3252 GEN_INT (int_size_in_bytes (data->passed_type)),
3253 BLOCK_OP_NORMAL);
3255 else
3256 emit_move_insn (dest, src);
3259 if (to_conversion)
3261 all->first_conversion_insn = get_insns ();
3262 all->last_conversion_insn = get_last_insn ();
3263 end_sequence ();
3266 SET_DECL_RTL (parm, data->stack_parm);
3269 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
3270 undo the frobbing that we did in assign_parms_augmented_arg_list. */
3272 static void
3273 assign_parms_unsplit_complex (struct assign_parm_data_all *all,
3274 vec<tree> fnargs)
3276 tree parm;
3277 tree orig_fnargs = all->orig_fnargs;
3278 unsigned i = 0;
3280 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i)
3282 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
3283 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
3285 rtx tmp, real, imag;
3286 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
3288 real = DECL_RTL (fnargs[i]);
3289 imag = DECL_RTL (fnargs[i + 1]);
3290 if (inner != GET_MODE (real))
3292 real = gen_lowpart_SUBREG (inner, real);
3293 imag = gen_lowpart_SUBREG (inner, imag);
3296 if (TREE_ADDRESSABLE (parm))
3298 rtx rmem, imem;
3299 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
3300 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3301 DECL_MODE (parm),
3302 TYPE_ALIGN (TREE_TYPE (parm)));
3304 /* split_complex_arg put the real and imag parts in
3305 pseudos. Move them to memory. */
3306 tmp = assign_stack_local (DECL_MODE (parm), size, align);
3307 set_mem_attributes (tmp, parm, 1);
3308 rmem = adjust_address_nv (tmp, inner, 0);
3309 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
3310 push_to_sequence2 (all->first_conversion_insn,
3311 all->last_conversion_insn);
3312 emit_move_insn (rmem, real);
3313 emit_move_insn (imem, imag);
3314 all->first_conversion_insn = get_insns ();
3315 all->last_conversion_insn = get_last_insn ();
3316 end_sequence ();
3318 else
3319 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3320 SET_DECL_RTL (parm, tmp);
3322 real = DECL_INCOMING_RTL (fnargs[i]);
3323 imag = DECL_INCOMING_RTL (fnargs[i + 1]);
3324 if (inner != GET_MODE (real))
3326 real = gen_lowpart_SUBREG (inner, real);
3327 imag = gen_lowpart_SUBREG (inner, imag);
3329 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3330 set_decl_incoming_rtl (parm, tmp, false);
3331 i++;
3336 /* Assign RTL expressions to the function's parameters. This may involve
3337 copying them into registers and using those registers as the DECL_RTL. */
3339 static void
3340 assign_parms (tree fndecl)
3342 struct assign_parm_data_all all;
3343 tree parm;
3344 vec<tree> fnargs;
3345 unsigned i;
3347 crtl->args.internal_arg_pointer
3348 = targetm.calls.internal_arg_pointer ();
3350 assign_parms_initialize_all (&all);
3351 fnargs = assign_parms_augmented_arg_list (&all);
3353 FOR_EACH_VEC_ELT (fnargs, i, parm)
3355 struct assign_parm_data_one data;
3357 /* Extract the type of PARM; adjust it according to ABI. */
3358 assign_parm_find_data_types (&all, parm, &data);
3360 /* Early out for errors and void parameters. */
3361 if (data.passed_mode == VOIDmode)
3363 SET_DECL_RTL (parm, const0_rtx);
3364 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3365 continue;
3368 /* Estimate stack alignment from parameter alignment. */
3369 if (SUPPORTS_STACK_ALIGNMENT)
3371 unsigned int align
3372 = targetm.calls.function_arg_boundary (data.promoted_mode,
3373 data.passed_type);
3374 align = MINIMUM_ALIGNMENT (data.passed_type, data.promoted_mode,
3375 align);
3376 if (TYPE_ALIGN (data.nominal_type) > align)
3377 align = MINIMUM_ALIGNMENT (data.nominal_type,
3378 TYPE_MODE (data.nominal_type),
3379 TYPE_ALIGN (data.nominal_type));
3380 if (crtl->stack_alignment_estimated < align)
3382 gcc_assert (!crtl->stack_realign_processed);
3383 crtl->stack_alignment_estimated = align;
3387 if (cfun->stdarg && !DECL_CHAIN (parm))
3388 assign_parms_setup_varargs (&all, &data, false);
3390 /* Find out where the parameter arrives in this function. */
3391 assign_parm_find_entry_rtl (&all, &data);
3393 /* Find out where stack space for this parameter might be. */
3394 if (assign_parm_is_stack_parm (&all, &data))
3396 assign_parm_find_stack_rtl (parm, &data);
3397 assign_parm_adjust_entry_rtl (&data);
3400 /* Record permanently how this parm was passed. */
3401 if (data.passed_pointer)
3403 rtx incoming_rtl
3404 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.passed_type)),
3405 data.entry_parm);
3406 set_decl_incoming_rtl (parm, incoming_rtl, true);
3408 else
3409 set_decl_incoming_rtl (parm, data.entry_parm, false);
3411 /* Update info on where next arg arrives in registers. */
3412 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3413 data.passed_type, data.named_arg);
3415 assign_parm_adjust_stack_rtl (&data);
3417 if (assign_parm_setup_block_p (&data))
3418 assign_parm_setup_block (&all, parm, &data);
3419 else if (data.passed_pointer || use_register_for_decl (parm))
3420 assign_parm_setup_reg (&all, parm, &data);
3421 else
3422 assign_parm_setup_stack (&all, parm, &data);
3425 if (targetm.calls.split_complex_arg)
3426 assign_parms_unsplit_complex (&all, fnargs);
3428 fnargs.release ();
3430 /* Output all parameter conversion instructions (possibly including calls)
3431 now that all parameters have been copied out of hard registers. */
3432 emit_insn (all.first_conversion_insn);
3434 /* Estimate reload stack alignment from scalar return mode. */
3435 if (SUPPORTS_STACK_ALIGNMENT)
3437 if (DECL_RESULT (fndecl))
3439 tree type = TREE_TYPE (DECL_RESULT (fndecl));
3440 enum machine_mode mode = TYPE_MODE (type);
3442 if (mode != BLKmode
3443 && mode != VOIDmode
3444 && !AGGREGATE_TYPE_P (type))
3446 unsigned int align = GET_MODE_ALIGNMENT (mode);
3447 if (crtl->stack_alignment_estimated < align)
3449 gcc_assert (!crtl->stack_realign_processed);
3450 crtl->stack_alignment_estimated = align;
3456 /* If we are receiving a struct value address as the first argument, set up
3457 the RTL for the function result. As this might require code to convert
3458 the transmitted address to Pmode, we do this here to ensure that possible
3459 preliminary conversions of the address have been emitted already. */
3460 if (all.function_result_decl)
3462 tree result = DECL_RESULT (current_function_decl);
3463 rtx addr = DECL_RTL (all.function_result_decl);
3464 rtx x;
3466 if (DECL_BY_REFERENCE (result))
3468 SET_DECL_VALUE_EXPR (result, all.function_result_decl);
3469 x = addr;
3471 else
3473 SET_DECL_VALUE_EXPR (result,
3474 build1 (INDIRECT_REF, TREE_TYPE (result),
3475 all.function_result_decl));
3476 addr = convert_memory_address (Pmode, addr);
3477 x = gen_rtx_MEM (DECL_MODE (result), addr);
3478 set_mem_attributes (x, result, 1);
3481 DECL_HAS_VALUE_EXPR_P (result) = 1;
3483 SET_DECL_RTL (result, x);
3486 /* We have aligned all the args, so add space for the pretend args. */
3487 crtl->args.pretend_args_size = all.pretend_args_size;
3488 all.stack_args_size.constant += all.extra_pretend_bytes;
3489 crtl->args.size = all.stack_args_size.constant;
3491 /* Adjust function incoming argument size for alignment and
3492 minimum length. */
3494 crtl->args.size = MAX (crtl->args.size, all.reg_parm_stack_space);
3495 crtl->args.size = CEIL_ROUND (crtl->args.size,
3496 PARM_BOUNDARY / BITS_PER_UNIT);
3498 #ifdef ARGS_GROW_DOWNWARD
3499 crtl->args.arg_offset_rtx
3500 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3501 : expand_expr (size_diffop (all.stack_args_size.var,
3502 size_int (-all.stack_args_size.constant)),
3503 NULL_RTX, VOIDmode, EXPAND_NORMAL));
3504 #else
3505 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3506 #endif
3508 /* See how many bytes, if any, of its args a function should try to pop
3509 on return. */
3511 crtl->args.pops_args = targetm.calls.return_pops_args (fndecl,
3512 TREE_TYPE (fndecl),
3513 crtl->args.size);
3515 /* For stdarg.h function, save info about
3516 regs and stack space used by the named args. */
3518 crtl->args.info = all.args_so_far_v;
3520 /* Set the rtx used for the function return value. Put this in its
3521 own variable so any optimizers that need this information don't have
3522 to include tree.h. Do this here so it gets done when an inlined
3523 function gets output. */
3525 crtl->return_rtx
3526 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3527 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3529 /* If scalar return value was computed in a pseudo-reg, or was a named
3530 return value that got dumped to the stack, copy that to the hard
3531 return register. */
3532 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3534 tree decl_result = DECL_RESULT (fndecl);
3535 rtx decl_rtl = DECL_RTL (decl_result);
3537 if (REG_P (decl_rtl)
3538 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3539 : DECL_REGISTER (decl_result))
3541 rtx real_decl_rtl;
3543 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3544 fndecl, true);
3545 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3546 /* The delay slot scheduler assumes that crtl->return_rtx
3547 holds the hard register containing the return value, not a
3548 temporary pseudo. */
3549 crtl->return_rtx = real_decl_rtl;
3554 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3555 For all seen types, gimplify their sizes. */
3557 static tree
3558 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3560 tree t = *tp;
3562 *walk_subtrees = 0;
3563 if (TYPE_P (t))
3565 if (POINTER_TYPE_P (t))
3566 *walk_subtrees = 1;
3567 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3568 && !TYPE_SIZES_GIMPLIFIED (t))
3570 gimplify_type_sizes (t, (gimple_seq *) data);
3571 *walk_subtrees = 1;
3575 return NULL;
3578 /* Gimplify the parameter list for current_function_decl. This involves
3579 evaluating SAVE_EXPRs of variable sized parameters and generating code
3580 to implement callee-copies reference parameters. Returns a sequence of
3581 statements to add to the beginning of the function. */
3583 gimple_seq
3584 gimplify_parameters (void)
3586 struct assign_parm_data_all all;
3587 tree parm;
3588 gimple_seq stmts = NULL;
3589 vec<tree> fnargs;
3590 unsigned i;
3592 assign_parms_initialize_all (&all);
3593 fnargs = assign_parms_augmented_arg_list (&all);
3595 FOR_EACH_VEC_ELT (fnargs, i, parm)
3597 struct assign_parm_data_one data;
3599 /* Extract the type of PARM; adjust it according to ABI. */
3600 assign_parm_find_data_types (&all, parm, &data);
3602 /* Early out for errors and void parameters. */
3603 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3604 continue;
3606 /* Update info on where next arg arrives in registers. */
3607 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3608 data.passed_type, data.named_arg);
3610 /* ??? Once upon a time variable_size stuffed parameter list
3611 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3612 turned out to be less than manageable in the gimple world.
3613 Now we have to hunt them down ourselves. */
3614 walk_tree_without_duplicates (&data.passed_type,
3615 gimplify_parm_type, &stmts);
3617 if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
3619 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3620 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3623 if (data.passed_pointer)
3625 tree type = TREE_TYPE (data.passed_type);
3626 if (reference_callee_copied (&all.args_so_far_v, TYPE_MODE (type),
3627 type, data.named_arg))
3629 tree local, t;
3631 /* For constant-sized objects, this is trivial; for
3632 variable-sized objects, we have to play games. */
3633 if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
3634 && !(flag_stack_check == GENERIC_STACK_CHECK
3635 && compare_tree_int (DECL_SIZE_UNIT (parm),
3636 STACK_CHECK_MAX_VAR_SIZE) > 0))
3638 local = create_tmp_var (type, get_name (parm));
3639 DECL_IGNORED_P (local) = 0;
3640 /* If PARM was addressable, move that flag over
3641 to the local copy, as its address will be taken,
3642 not the PARMs. Keep the parms address taken
3643 as we'll query that flag during gimplification. */
3644 if (TREE_ADDRESSABLE (parm))
3645 TREE_ADDRESSABLE (local) = 1;
3646 else if (TREE_CODE (type) == COMPLEX_TYPE
3647 || TREE_CODE (type) == VECTOR_TYPE)
3648 DECL_GIMPLE_REG_P (local) = 1;
3650 else
3652 tree ptr_type, addr;
3654 ptr_type = build_pointer_type (type);
3655 addr = create_tmp_reg (ptr_type, get_name (parm));
3656 DECL_IGNORED_P (addr) = 0;
3657 local = build_fold_indirect_ref (addr);
3659 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3660 t = build_call_expr (t, 2, DECL_SIZE_UNIT (parm),
3661 size_int (DECL_ALIGN (parm)));
3663 /* The call has been built for a variable-sized object. */
3664 CALL_ALLOCA_FOR_VAR_P (t) = 1;
3665 t = fold_convert (ptr_type, t);
3666 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
3667 gimplify_and_add (t, &stmts);
3670 gimplify_assign (local, parm, &stmts);
3672 SET_DECL_VALUE_EXPR (parm, local);
3673 DECL_HAS_VALUE_EXPR_P (parm) = 1;
3678 fnargs.release ();
3680 return stmts;
3683 /* Compute the size and offset from the start of the stacked arguments for a
3684 parm passed in mode PASSED_MODE and with type TYPE.
3686 INITIAL_OFFSET_PTR points to the current offset into the stacked
3687 arguments.
3689 The starting offset and size for this parm are returned in
3690 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3691 nonzero, the offset is that of stack slot, which is returned in
3692 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3693 padding required from the initial offset ptr to the stack slot.
3695 IN_REGS is nonzero if the argument will be passed in registers. It will
3696 never be set if REG_PARM_STACK_SPACE is not defined.
3698 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
3699 for arguments which are passed in registers.
3701 FNDECL is the function in which the argument was defined.
3703 There are two types of rounding that are done. The first, controlled by
3704 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
3705 argument list to be aligned to the specific boundary (in bits). This
3706 rounding affects the initial and starting offsets, but not the argument
3707 size.
3709 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3710 optionally rounds the size of the parm to PARM_BOUNDARY. The
3711 initial offset is not affected by this rounding, while the size always
3712 is and the starting offset may be. */
3714 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3715 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3716 callers pass in the total size of args so far as
3717 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3719 void
3720 locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3721 int reg_parm_stack_space, int partial,
3722 tree fndecl ATTRIBUTE_UNUSED,
3723 struct args_size *initial_offset_ptr,
3724 struct locate_and_pad_arg_data *locate)
3726 tree sizetree;
3727 enum direction where_pad;
3728 unsigned int boundary, round_boundary;
3729 int part_size_in_regs;
3731 /* If we have found a stack parm before we reach the end of the
3732 area reserved for registers, skip that area. */
3733 if (! in_regs)
3735 if (reg_parm_stack_space > 0)
3737 if (initial_offset_ptr->var)
3739 initial_offset_ptr->var
3740 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3741 ssize_int (reg_parm_stack_space));
3742 initial_offset_ptr->constant = 0;
3744 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3745 initial_offset_ptr->constant = reg_parm_stack_space;
3749 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
3751 sizetree
3752 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3753 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3754 boundary = targetm.calls.function_arg_boundary (passed_mode, type);
3755 round_boundary = targetm.calls.function_arg_round_boundary (passed_mode,
3756 type);
3757 locate->where_pad = where_pad;
3759 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
3760 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
3761 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
3763 locate->boundary = boundary;
3765 if (SUPPORTS_STACK_ALIGNMENT)
3767 /* stack_alignment_estimated can't change after stack has been
3768 realigned. */
3769 if (crtl->stack_alignment_estimated < boundary)
3771 if (!crtl->stack_realign_processed)
3772 crtl->stack_alignment_estimated = boundary;
3773 else
3775 /* If stack is realigned and stack alignment value
3776 hasn't been finalized, it is OK not to increase
3777 stack_alignment_estimated. The bigger alignment
3778 requirement is recorded in stack_alignment_needed
3779 below. */
3780 gcc_assert (!crtl->stack_realign_finalized
3781 && crtl->stack_realign_needed);
3786 /* Remember if the outgoing parameter requires extra alignment on the
3787 calling function side. */
3788 if (crtl->stack_alignment_needed < boundary)
3789 crtl->stack_alignment_needed = boundary;
3790 if (crtl->preferred_stack_boundary < boundary)
3791 crtl->preferred_stack_boundary = boundary;
3793 #ifdef ARGS_GROW_DOWNWARD
3794 locate->slot_offset.constant = -initial_offset_ptr->constant;
3795 if (initial_offset_ptr->var)
3796 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3797 initial_offset_ptr->var);
3800 tree s2 = sizetree;
3801 if (where_pad != none
3802 && (!host_integerp (sizetree, 1)
3803 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % round_boundary))
3804 s2 = round_up (s2, round_boundary / BITS_PER_UNIT);
3805 SUB_PARM_SIZE (locate->slot_offset, s2);
3808 locate->slot_offset.constant += part_size_in_regs;
3810 if (!in_regs || reg_parm_stack_space > 0)
3811 pad_to_arg_alignment (&locate->slot_offset, boundary,
3812 &locate->alignment_pad);
3814 locate->size.constant = (-initial_offset_ptr->constant
3815 - locate->slot_offset.constant);
3816 if (initial_offset_ptr->var)
3817 locate->size.var = size_binop (MINUS_EXPR,
3818 size_binop (MINUS_EXPR,
3819 ssize_int (0),
3820 initial_offset_ptr->var),
3821 locate->slot_offset.var);
3823 /* Pad_below needs the pre-rounded size to know how much to pad
3824 below. */
3825 locate->offset = locate->slot_offset;
3826 if (where_pad == downward)
3827 pad_below (&locate->offset, passed_mode, sizetree);
3829 #else /* !ARGS_GROW_DOWNWARD */
3830 if (!in_regs || reg_parm_stack_space > 0)
3831 pad_to_arg_alignment (initial_offset_ptr, boundary,
3832 &locate->alignment_pad);
3833 locate->slot_offset = *initial_offset_ptr;
3835 #ifdef PUSH_ROUNDING
3836 if (passed_mode != BLKmode)
3837 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3838 #endif
3840 /* Pad_below needs the pre-rounded size to know how much to pad below
3841 so this must be done before rounding up. */
3842 locate->offset = locate->slot_offset;
3843 if (where_pad == downward)
3844 pad_below (&locate->offset, passed_mode, sizetree);
3846 if (where_pad != none
3847 && (!host_integerp (sizetree, 1)
3848 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % round_boundary))
3849 sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT);
3851 ADD_PARM_SIZE (locate->size, sizetree);
3853 locate->size.constant -= part_size_in_regs;
3854 #endif /* ARGS_GROW_DOWNWARD */
3856 #ifdef FUNCTION_ARG_OFFSET
3857 locate->offset.constant += FUNCTION_ARG_OFFSET (passed_mode, type);
3858 #endif
3861 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3862 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3864 static void
3865 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3866 struct args_size *alignment_pad)
3868 tree save_var = NULL_TREE;
3869 HOST_WIDE_INT save_constant = 0;
3870 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3871 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3873 #ifdef SPARC_STACK_BOUNDARY_HACK
3874 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3875 the real alignment of %sp. However, when it does this, the
3876 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
3877 if (SPARC_STACK_BOUNDARY_HACK)
3878 sp_offset = 0;
3879 #endif
3881 if (boundary > PARM_BOUNDARY)
3883 save_var = offset_ptr->var;
3884 save_constant = offset_ptr->constant;
3887 alignment_pad->var = NULL_TREE;
3888 alignment_pad->constant = 0;
3890 if (boundary > BITS_PER_UNIT)
3892 if (offset_ptr->var)
3894 tree sp_offset_tree = ssize_int (sp_offset);
3895 tree offset = size_binop (PLUS_EXPR,
3896 ARGS_SIZE_TREE (*offset_ptr),
3897 sp_offset_tree);
3898 #ifdef ARGS_GROW_DOWNWARD
3899 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
3900 #else
3901 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
3902 #endif
3904 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
3905 /* ARGS_SIZE_TREE includes constant term. */
3906 offset_ptr->constant = 0;
3907 if (boundary > PARM_BOUNDARY)
3908 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
3909 save_var);
3911 else
3913 offset_ptr->constant = -sp_offset +
3914 #ifdef ARGS_GROW_DOWNWARD
3915 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3916 #else
3917 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3918 #endif
3919 if (boundary > PARM_BOUNDARY)
3920 alignment_pad->constant = offset_ptr->constant - save_constant;
3925 static void
3926 pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
3928 if (passed_mode != BLKmode)
3930 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3931 offset_ptr->constant
3932 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3933 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3934 - GET_MODE_SIZE (passed_mode));
3936 else
3938 if (TREE_CODE (sizetree) != INTEGER_CST
3939 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3941 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3942 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3943 /* Add it in. */
3944 ADD_PARM_SIZE (*offset_ptr, s2);
3945 SUB_PARM_SIZE (*offset_ptr, sizetree);
3951 /* True if register REGNO was alive at a place where `setjmp' was
3952 called and was set more than once or is an argument. Such regs may
3953 be clobbered by `longjmp'. */
3955 static bool
3956 regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
3958 /* There appear to be cases where some local vars never reach the
3959 backend but have bogus regnos. */
3960 if (regno >= max_reg_num ())
3961 return false;
3963 return ((REG_N_SETS (regno) > 1
3964 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR), regno))
3965 && REGNO_REG_SET_P (setjmp_crosses, regno));
3968 /* Walk the tree of blocks describing the binding levels within a
3969 function and warn about variables the might be killed by setjmp or
3970 vfork. This is done after calling flow_analysis before register
3971 allocation since that will clobber the pseudo-regs to hard
3972 regs. */
3974 static void
3975 setjmp_vars_warning (bitmap setjmp_crosses, tree block)
3977 tree decl, sub;
3979 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
3981 if (TREE_CODE (decl) == VAR_DECL
3982 && DECL_RTL_SET_P (decl)
3983 && REG_P (DECL_RTL (decl))
3984 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
3985 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
3986 " %<longjmp%> or %<vfork%>", decl);
3989 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
3990 setjmp_vars_warning (setjmp_crosses, sub);
3993 /* Do the appropriate part of setjmp_vars_warning
3994 but for arguments instead of local variables. */
3996 static void
3997 setjmp_args_warning (bitmap setjmp_crosses)
3999 tree decl;
4000 for (decl = DECL_ARGUMENTS (current_function_decl);
4001 decl; decl = DECL_CHAIN (decl))
4002 if (DECL_RTL (decl) != 0
4003 && REG_P (DECL_RTL (decl))
4004 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4005 warning (OPT_Wclobbered,
4006 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
4007 decl);
4010 /* Generate warning messages for variables live across setjmp. */
4012 void
4013 generate_setjmp_warnings (void)
4015 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
4017 if (n_basic_blocks == NUM_FIXED_BLOCKS
4018 || bitmap_empty_p (setjmp_crosses))
4019 return;
4021 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
4022 setjmp_args_warning (setjmp_crosses);
4026 /* Reverse the order of elements in the fragment chain T of blocks,
4027 and return the new head of the chain (old last element).
4028 In addition to that clear BLOCK_SAME_RANGE flags when needed
4029 and adjust BLOCK_SUPERCONTEXT from the super fragment to
4030 its super fragment origin. */
4032 static tree
4033 block_fragments_nreverse (tree t)
4035 tree prev = 0, block, next, prev_super = 0;
4036 tree super = BLOCK_SUPERCONTEXT (t);
4037 if (BLOCK_FRAGMENT_ORIGIN (super))
4038 super = BLOCK_FRAGMENT_ORIGIN (super);
4039 for (block = t; block; block = next)
4041 next = BLOCK_FRAGMENT_CHAIN (block);
4042 BLOCK_FRAGMENT_CHAIN (block) = prev;
4043 if ((prev && !BLOCK_SAME_RANGE (prev))
4044 || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block))
4045 != prev_super))
4046 BLOCK_SAME_RANGE (block) = 0;
4047 prev_super = BLOCK_SUPERCONTEXT (block);
4048 BLOCK_SUPERCONTEXT (block) = super;
4049 prev = block;
4051 t = BLOCK_FRAGMENT_ORIGIN (t);
4052 if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t))
4053 != prev_super)
4054 BLOCK_SAME_RANGE (t) = 0;
4055 BLOCK_SUPERCONTEXT (t) = super;
4056 return prev;
4059 /* Reverse the order of elements in the chain T of blocks,
4060 and return the new head of the chain (old last element).
4061 Also do the same on subblocks and reverse the order of elements
4062 in BLOCK_FRAGMENT_CHAIN as well. */
4064 static tree
4065 blocks_nreverse_all (tree t)
4067 tree prev = 0, block, next;
4068 for (block = t; block; block = next)
4070 next = BLOCK_CHAIN (block);
4071 BLOCK_CHAIN (block) = prev;
4072 if (BLOCK_FRAGMENT_CHAIN (block)
4073 && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE)
4075 BLOCK_FRAGMENT_CHAIN (block)
4076 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block));
4077 if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block)))
4078 BLOCK_SAME_RANGE (block) = 0;
4080 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4081 prev = block;
4083 return prev;
4087 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4088 and create duplicate blocks. */
4089 /* ??? Need an option to either create block fragments or to create
4090 abstract origin duplicates of a source block. It really depends
4091 on what optimization has been performed. */
4093 void
4094 reorder_blocks (void)
4096 tree block = DECL_INITIAL (current_function_decl);
4097 vec<tree> block_stack;
4099 if (block == NULL_TREE)
4100 return;
4102 block_stack.create (10);
4104 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
4105 clear_block_marks (block);
4107 /* Prune the old trees away, so that they don't get in the way. */
4108 BLOCK_SUBBLOCKS (block) = NULL_TREE;
4109 BLOCK_CHAIN (block) = NULL_TREE;
4111 /* Recreate the block tree from the note nesting. */
4112 reorder_blocks_1 (get_insns (), block, &block_stack);
4113 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4115 block_stack.release ();
4118 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
4120 void
4121 clear_block_marks (tree block)
4123 while (block)
4125 TREE_ASM_WRITTEN (block) = 0;
4126 clear_block_marks (BLOCK_SUBBLOCKS (block));
4127 block = BLOCK_CHAIN (block);
4131 static void
4132 reorder_blocks_1 (rtx insns, tree current_block, vec<tree> *p_block_stack)
4134 rtx insn;
4135 tree prev_beg = NULL_TREE, prev_end = NULL_TREE;
4137 for (insn = insns; insn; insn = NEXT_INSN (insn))
4139 if (NOTE_P (insn))
4141 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
4143 tree block = NOTE_BLOCK (insn);
4144 tree origin;
4146 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE);
4147 origin = block;
4149 if (prev_end)
4150 BLOCK_SAME_RANGE (prev_end) = 0;
4151 prev_end = NULL_TREE;
4153 /* If we have seen this block before, that means it now
4154 spans multiple address regions. Create a new fragment. */
4155 if (TREE_ASM_WRITTEN (block))
4157 tree new_block = copy_node (block);
4159 BLOCK_SAME_RANGE (new_block) = 0;
4160 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
4161 BLOCK_FRAGMENT_CHAIN (new_block)
4162 = BLOCK_FRAGMENT_CHAIN (origin);
4163 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
4165 NOTE_BLOCK (insn) = new_block;
4166 block = new_block;
4169 if (prev_beg == current_block && prev_beg)
4170 BLOCK_SAME_RANGE (block) = 1;
4172 prev_beg = origin;
4174 BLOCK_SUBBLOCKS (block) = 0;
4175 TREE_ASM_WRITTEN (block) = 1;
4176 /* When there's only one block for the entire function,
4177 current_block == block and we mustn't do this, it
4178 will cause infinite recursion. */
4179 if (block != current_block)
4181 tree super;
4182 if (block != origin)
4183 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block
4184 || BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
4185 (origin))
4186 == current_block);
4187 if (p_block_stack->is_empty ())
4188 super = current_block;
4189 else
4191 super = p_block_stack->last ();
4192 gcc_assert (super == current_block
4193 || BLOCK_FRAGMENT_ORIGIN (super)
4194 == current_block);
4196 BLOCK_SUPERCONTEXT (block) = super;
4197 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4198 BLOCK_SUBBLOCKS (current_block) = block;
4199 current_block = origin;
4201 p_block_stack->safe_push (block);
4203 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
4205 NOTE_BLOCK (insn) = p_block_stack->pop ();
4206 current_block = BLOCK_SUPERCONTEXT (current_block);
4207 if (BLOCK_FRAGMENT_ORIGIN (current_block))
4208 current_block = BLOCK_FRAGMENT_ORIGIN (current_block);
4209 prev_beg = NULL_TREE;
4210 prev_end = BLOCK_SAME_RANGE (NOTE_BLOCK (insn))
4211 ? NOTE_BLOCK (insn) : NULL_TREE;
4214 else
4216 prev_beg = NULL_TREE;
4217 if (prev_end)
4218 BLOCK_SAME_RANGE (prev_end) = 0;
4219 prev_end = NULL_TREE;
4224 /* Reverse the order of elements in the chain T of blocks,
4225 and return the new head of the chain (old last element). */
4227 tree
4228 blocks_nreverse (tree t)
4230 tree prev = 0, block, next;
4231 for (block = t; block; block = next)
4233 next = BLOCK_CHAIN (block);
4234 BLOCK_CHAIN (block) = prev;
4235 prev = block;
4237 return prev;
4240 /* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4241 by modifying the last node in chain 1 to point to chain 2. */
4243 tree
4244 block_chainon (tree op1, tree op2)
4246 tree t1;
4248 if (!op1)
4249 return op2;
4250 if (!op2)
4251 return op1;
4253 for (t1 = op1; BLOCK_CHAIN (t1); t1 = BLOCK_CHAIN (t1))
4254 continue;
4255 BLOCK_CHAIN (t1) = op2;
4257 #ifdef ENABLE_TREE_CHECKING
4259 tree t2;
4260 for (t2 = op2; t2; t2 = BLOCK_CHAIN (t2))
4261 gcc_assert (t2 != t1);
4263 #endif
4265 return op1;
4268 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
4269 non-NULL, list them all into VECTOR, in a depth-first preorder
4270 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
4271 blocks. */
4273 static int
4274 all_blocks (tree block, tree *vector)
4276 int n_blocks = 0;
4278 while (block)
4280 TREE_ASM_WRITTEN (block) = 0;
4282 /* Record this block. */
4283 if (vector)
4284 vector[n_blocks] = block;
4286 ++n_blocks;
4288 /* Record the subblocks, and their subblocks... */
4289 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4290 vector ? vector + n_blocks : 0);
4291 block = BLOCK_CHAIN (block);
4294 return n_blocks;
4297 /* Return a vector containing all the blocks rooted at BLOCK. The
4298 number of elements in the vector is stored in N_BLOCKS_P. The
4299 vector is dynamically allocated; it is the caller's responsibility
4300 to call `free' on the pointer returned. */
4302 static tree *
4303 get_block_vector (tree block, int *n_blocks_p)
4305 tree *block_vector;
4307 *n_blocks_p = all_blocks (block, NULL);
4308 block_vector = XNEWVEC (tree, *n_blocks_p);
4309 all_blocks (block, block_vector);
4311 return block_vector;
4314 static GTY(()) int next_block_index = 2;
4316 /* Set BLOCK_NUMBER for all the blocks in FN. */
4318 void
4319 number_blocks (tree fn)
4321 int i;
4322 int n_blocks;
4323 tree *block_vector;
4325 /* For SDB and XCOFF debugging output, we start numbering the blocks
4326 from 1 within each function, rather than keeping a running
4327 count. */
4328 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
4329 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
4330 next_block_index = 1;
4331 #endif
4333 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
4335 /* The top-level BLOCK isn't numbered at all. */
4336 for (i = 1; i < n_blocks; ++i)
4337 /* We number the blocks from two. */
4338 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
4340 free (block_vector);
4342 return;
4345 /* If VAR is present in a subblock of BLOCK, return the subblock. */
4347 DEBUG_FUNCTION tree
4348 debug_find_var_in_block_tree (tree var, tree block)
4350 tree t;
4352 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
4353 if (t == var)
4354 return block;
4356 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
4358 tree ret = debug_find_var_in_block_tree (var, t);
4359 if (ret)
4360 return ret;
4363 return NULL_TREE;
4366 /* Keep track of whether we're in a dummy function context. If we are,
4367 we don't want to invoke the set_current_function hook, because we'll
4368 get into trouble if the hook calls target_reinit () recursively or
4369 when the initial initialization is not yet complete. */
4371 static bool in_dummy_function;
4373 /* Invoke the target hook when setting cfun. Update the optimization options
4374 if the function uses different options than the default. */
4376 static void
4377 invoke_set_current_function_hook (tree fndecl)
4379 if (!in_dummy_function)
4381 tree opts = ((fndecl)
4382 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
4383 : optimization_default_node);
4385 if (!opts)
4386 opts = optimization_default_node;
4388 /* Change optimization options if needed. */
4389 if (optimization_current_node != opts)
4391 optimization_current_node = opts;
4392 cl_optimization_restore (&global_options, TREE_OPTIMIZATION (opts));
4395 targetm.set_current_function (fndecl);
4396 this_fn_optabs = this_target_optabs;
4398 if (opts != optimization_default_node)
4400 init_tree_optimization_optabs (opts);
4401 if (TREE_OPTIMIZATION_OPTABS (opts))
4402 this_fn_optabs = (struct target_optabs *)
4403 TREE_OPTIMIZATION_OPTABS (opts);
4408 /* cfun should never be set directly; use this function. */
4410 void
4411 set_cfun (struct function *new_cfun)
4413 if (cfun != new_cfun)
4415 cfun = new_cfun;
4416 invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
4420 /* Initialized with NOGC, making this poisonous to the garbage collector. */
4422 static vec<function_p> cfun_stack;
4424 /* Push the current cfun onto the stack, and set cfun to new_cfun. Also set
4425 current_function_decl accordingly. */
4427 void
4428 push_cfun (struct function *new_cfun)
4430 gcc_assert ((!cfun && !current_function_decl)
4431 || (cfun && current_function_decl == cfun->decl));
4432 cfun_stack.safe_push (cfun);
4433 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
4434 set_cfun (new_cfun);
4437 /* Pop cfun from the stack. Also set current_function_decl accordingly. */
4439 void
4440 pop_cfun (void)
4442 struct function *new_cfun = cfun_stack.pop ();
4443 /* When in_dummy_function, we do have a cfun but current_function_decl is
4444 NULL. We also allow pushing NULL cfun and subsequently changing
4445 current_function_decl to something else and have both restored by
4446 pop_cfun. */
4447 gcc_checking_assert (in_dummy_function
4448 || !cfun
4449 || current_function_decl == cfun->decl);
4450 set_cfun (new_cfun);
4451 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
4454 /* Return value of funcdef and increase it. */
4456 get_next_funcdef_no (void)
4458 return funcdef_no++;
4461 /* Return value of funcdef. */
4463 get_last_funcdef_no (void)
4465 return funcdef_no;
4468 /* Allocate a function structure for FNDECL and set its contents
4469 to the defaults. Set cfun to the newly-allocated object.
4470 Some of the helper functions invoked during initialization assume
4471 that cfun has already been set. Therefore, assign the new object
4472 directly into cfun and invoke the back end hook explicitly at the
4473 very end, rather than initializing a temporary and calling set_cfun
4474 on it.
4476 ABSTRACT_P is true if this is a function that will never be seen by
4477 the middle-end. Such functions are front-end concepts (like C++
4478 function templates) that do not correspond directly to functions
4479 placed in object files. */
4481 void
4482 allocate_struct_function (tree fndecl, bool abstract_p)
4484 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
4486 cfun = ggc_alloc_cleared_function ();
4488 init_eh_for_function ();
4490 if (init_machine_status)
4491 cfun->machine = (*init_machine_status) ();
4493 #ifdef OVERRIDE_ABI_FORMAT
4494 OVERRIDE_ABI_FORMAT (fndecl);
4495 #endif
4497 if (fndecl != NULL_TREE)
4499 DECL_STRUCT_FUNCTION (fndecl) = cfun;
4500 cfun->decl = fndecl;
4501 current_function_funcdef_no = get_next_funcdef_no ();
4504 invoke_set_current_function_hook (fndecl);
4506 if (fndecl != NULL_TREE)
4508 tree result = DECL_RESULT (fndecl);
4509 if (!abstract_p && aggregate_value_p (result, fndecl))
4511 #ifdef PCC_STATIC_STRUCT_RETURN
4512 cfun->returns_pcc_struct = 1;
4513 #endif
4514 cfun->returns_struct = 1;
4517 cfun->stdarg = stdarg_p (fntype);
4519 /* Assume all registers in stdarg functions need to be saved. */
4520 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
4521 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
4523 /* ??? This could be set on a per-function basis by the front-end
4524 but is this worth the hassle? */
4525 cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
4526 cfun->can_delete_dead_exceptions = flag_delete_dead_exceptions;
4530 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4531 instead of just setting it. */
4533 void
4534 push_struct_function (tree fndecl)
4536 /* When in_dummy_function we might be in the middle of a pop_cfun and
4537 current_function_decl and cfun may not match. */
4538 gcc_assert (in_dummy_function
4539 || (!cfun && !current_function_decl)
4540 || (cfun && current_function_decl == cfun->decl));
4541 cfun_stack.safe_push (cfun);
4542 current_function_decl = fndecl;
4543 allocate_struct_function (fndecl, false);
4546 /* Reset crtl and other non-struct-function variables to defaults as
4547 appropriate for emitting rtl at the start of a function. */
4549 static void
4550 prepare_function_start (void)
4552 gcc_assert (!crtl->emit.x_last_insn);
4553 init_temp_slots ();
4554 init_emit ();
4555 init_varasm_status ();
4556 init_expr ();
4557 default_rtl_profile ();
4559 if (flag_stack_usage_info)
4561 cfun->su = ggc_alloc_cleared_stack_usage ();
4562 cfun->su->static_stack_size = -1;
4565 cse_not_expected = ! optimize;
4567 /* Caller save not needed yet. */
4568 caller_save_needed = 0;
4570 /* We haven't done register allocation yet. */
4571 reg_renumber = 0;
4573 /* Indicate that we have not instantiated virtual registers yet. */
4574 virtuals_instantiated = 0;
4576 /* Indicate that we want CONCATs now. */
4577 generating_concat_p = 1;
4579 /* Indicate we have no need of a frame pointer yet. */
4580 frame_pointer_needed = 0;
4583 /* Initialize the rtl expansion mechanism so that we can do simple things
4584 like generate sequences. This is used to provide a context during global
4585 initialization of some passes. You must call expand_dummy_function_end
4586 to exit this context. */
4588 void
4589 init_dummy_function_start (void)
4591 gcc_assert (!in_dummy_function);
4592 in_dummy_function = true;
4593 push_struct_function (NULL_TREE);
4594 prepare_function_start ();
4597 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4598 and initialize static variables for generating RTL for the statements
4599 of the function. */
4601 void
4602 init_function_start (tree subr)
4604 if (subr && DECL_STRUCT_FUNCTION (subr))
4605 set_cfun (DECL_STRUCT_FUNCTION (subr));
4606 else
4607 allocate_struct_function (subr, false);
4608 prepare_function_start ();
4609 decide_function_section (subr);
4611 /* Warn if this value is an aggregate type,
4612 regardless of which calling convention we are using for it. */
4613 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
4614 warning (OPT_Waggregate_return, "function returns an aggregate");
4618 void
4619 expand_main_function (void)
4621 #if (defined(INVOKE__main) \
4622 || (!defined(HAS_INIT_SECTION) \
4623 && !defined(INIT_SECTION_ASM_OP) \
4624 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
4625 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
4626 #endif
4629 /* Expand code to initialize the stack_protect_guard. This is invoked at
4630 the beginning of a function to be protected. */
4632 #ifndef HAVE_stack_protect_set
4633 # define HAVE_stack_protect_set 0
4634 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
4635 #endif
4637 void
4638 stack_protect_prologue (void)
4640 tree guard_decl = targetm.stack_protect_guard ();
4641 rtx x, y;
4643 x = expand_normal (crtl->stack_protect_guard);
4644 y = expand_normal (guard_decl);
4646 /* Allow the target to copy from Y to X without leaking Y into a
4647 register. */
4648 if (HAVE_stack_protect_set)
4650 rtx insn = gen_stack_protect_set (x, y);
4651 if (insn)
4653 emit_insn (insn);
4654 return;
4658 /* Otherwise do a straight move. */
4659 emit_move_insn (x, y);
4662 /* Expand code to verify the stack_protect_guard. This is invoked at
4663 the end of a function to be protected. */
4665 #ifndef HAVE_stack_protect_test
4666 # define HAVE_stack_protect_test 0
4667 # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
4668 #endif
4670 void
4671 stack_protect_epilogue (void)
4673 tree guard_decl = targetm.stack_protect_guard ();
4674 rtx label = gen_label_rtx ();
4675 rtx x, y, tmp;
4677 x = expand_normal (crtl->stack_protect_guard);
4678 y = expand_normal (guard_decl);
4680 /* Allow the target to compare Y with X without leaking either into
4681 a register. */
4682 switch (HAVE_stack_protect_test != 0)
4684 case 1:
4685 tmp = gen_stack_protect_test (x, y, label);
4686 if (tmp)
4688 emit_insn (tmp);
4689 break;
4691 /* FALLTHRU */
4693 default:
4694 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4695 break;
4698 /* The noreturn predictor has been moved to the tree level. The rtl-level
4699 predictors estimate this branch about 20%, which isn't enough to get
4700 things moved out of line. Since this is the only extant case of adding
4701 a noreturn function at the rtl level, it doesn't seem worth doing ought
4702 except adding the prediction by hand. */
4703 tmp = get_last_insn ();
4704 if (JUMP_P (tmp))
4705 predict_insn_def (tmp, PRED_NORETURN, TAKEN);
4707 expand_call (targetm.stack_protect_fail (), NULL_RTX, /*ignore=*/true);
4708 free_temp_slots ();
4709 emit_label (label);
4712 /* Start the RTL for a new function, and set variables used for
4713 emitting RTL.
4714 SUBR is the FUNCTION_DECL node.
4715 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4716 the function's parameters, which must be run at any return statement. */
4718 void
4719 expand_function_start (tree subr)
4721 /* Make sure volatile mem refs aren't considered
4722 valid operands of arithmetic insns. */
4723 init_recog_no_volatile ();
4725 crtl->profile
4726 = (profile_flag
4727 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4729 crtl->limit_stack
4730 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4732 /* Make the label for return statements to jump to. Do not special
4733 case machines with special return instructions -- they will be
4734 handled later during jump, ifcvt, or epilogue creation. */
4735 return_label = gen_label_rtx ();
4737 /* Initialize rtx used to return the value. */
4738 /* Do this before assign_parms so that we copy the struct value address
4739 before any library calls that assign parms might generate. */
4741 /* Decide whether to return the value in memory or in a register. */
4742 if (aggregate_value_p (DECL_RESULT (subr), subr))
4744 /* Returning something that won't go in a register. */
4745 rtx value_address = 0;
4747 #ifdef PCC_STATIC_STRUCT_RETURN
4748 if (cfun->returns_pcc_struct)
4750 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4751 value_address = assemble_static_space (size);
4753 else
4754 #endif
4756 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
4757 /* Expect to be passed the address of a place to store the value.
4758 If it is passed as an argument, assign_parms will take care of
4759 it. */
4760 if (sv)
4762 value_address = gen_reg_rtx (Pmode);
4763 emit_move_insn (value_address, sv);
4766 if (value_address)
4768 rtx x = value_address;
4769 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4771 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4772 set_mem_attributes (x, DECL_RESULT (subr), 1);
4774 SET_DECL_RTL (DECL_RESULT (subr), x);
4777 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4778 /* If return mode is void, this decl rtl should not be used. */
4779 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
4780 else
4782 /* Compute the return values into a pseudo reg, which we will copy
4783 into the true return register after the cleanups are done. */
4784 tree return_type = TREE_TYPE (DECL_RESULT (subr));
4785 if (TYPE_MODE (return_type) != BLKmode
4786 && targetm.calls.return_in_msb (return_type))
4787 /* expand_function_end will insert the appropriate padding in
4788 this case. Use the return value's natural (unpadded) mode
4789 within the function proper. */
4790 SET_DECL_RTL (DECL_RESULT (subr),
4791 gen_reg_rtx (TYPE_MODE (return_type)));
4792 else
4794 /* In order to figure out what mode to use for the pseudo, we
4795 figure out what the mode of the eventual return register will
4796 actually be, and use that. */
4797 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
4799 /* Structures that are returned in registers are not
4800 aggregate_value_p, so we may see a PARALLEL or a REG. */
4801 if (REG_P (hard_reg))
4802 SET_DECL_RTL (DECL_RESULT (subr),
4803 gen_reg_rtx (GET_MODE (hard_reg)));
4804 else
4806 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4807 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4811 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4812 result to the real return register(s). */
4813 DECL_REGISTER (DECL_RESULT (subr)) = 1;
4816 /* Initialize rtx for parameters and local variables.
4817 In some cases this requires emitting insns. */
4818 assign_parms (subr);
4820 /* If function gets a static chain arg, store it. */
4821 if (cfun->static_chain_decl)
4823 tree parm = cfun->static_chain_decl;
4824 rtx local, chain, insn;
4826 local = gen_reg_rtx (Pmode);
4827 chain = targetm.calls.static_chain (current_function_decl, true);
4829 set_decl_incoming_rtl (parm, chain, false);
4830 SET_DECL_RTL (parm, local);
4831 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4833 insn = emit_move_insn (local, chain);
4835 /* Mark the register as eliminable, similar to parameters. */
4836 if (MEM_P (chain)
4837 && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0)))
4838 set_dst_reg_note (insn, REG_EQUIV, chain, local);
4841 /* If the function receives a non-local goto, then store the
4842 bits we need to restore the frame pointer. */
4843 if (cfun->nonlocal_goto_save_area)
4845 tree t_save;
4846 rtx r_save;
4848 tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
4849 gcc_assert (DECL_RTL_SET_P (var));
4851 t_save = build4 (ARRAY_REF,
4852 TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
4853 cfun->nonlocal_goto_save_area,
4854 integer_zero_node, NULL_TREE, NULL_TREE);
4855 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
4856 gcc_assert (GET_MODE (r_save) == Pmode);
4858 emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ());
4859 update_nonlocal_goto_save_area ();
4862 /* The following was moved from init_function_start.
4863 The move is supposed to make sdb output more accurate. */
4864 /* Indicate the beginning of the function body,
4865 as opposed to parm setup. */
4866 emit_note (NOTE_INSN_FUNCTION_BEG);
4868 gcc_assert (NOTE_P (get_last_insn ()));
4870 parm_birth_insn = get_last_insn ();
4872 if (crtl->profile)
4874 #ifdef PROFILE_HOOK
4875 PROFILE_HOOK (current_function_funcdef_no);
4876 #endif
4879 /* If we are doing generic stack checking, the probe should go here. */
4880 if (flag_stack_check == GENERIC_STACK_CHECK)
4881 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
4884 /* Undo the effects of init_dummy_function_start. */
4885 void
4886 expand_dummy_function_end (void)
4888 gcc_assert (in_dummy_function);
4890 /* End any sequences that failed to be closed due to syntax errors. */
4891 while (in_sequence_p ())
4892 end_sequence ();
4894 /* Outside function body, can't compute type's actual size
4895 until next function's body starts. */
4897 free_after_parsing (cfun);
4898 free_after_compilation (cfun);
4899 pop_cfun ();
4900 in_dummy_function = false;
4903 /* Call DOIT for each hard register used as a return value from
4904 the current function. */
4906 void
4907 diddle_return_value (void (*doit) (rtx, void *), void *arg)
4909 rtx outgoing = crtl->return_rtx;
4911 if (! outgoing)
4912 return;
4914 if (REG_P (outgoing))
4915 (*doit) (outgoing, arg);
4916 else if (GET_CODE (outgoing) == PARALLEL)
4918 int i;
4920 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4922 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4924 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
4925 (*doit) (x, arg);
4930 static void
4931 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4933 emit_clobber (reg);
4936 void
4937 clobber_return_register (void)
4939 diddle_return_value (do_clobber_return_reg, NULL);
4941 /* In case we do use pseudo to return value, clobber it too. */
4942 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4944 tree decl_result = DECL_RESULT (current_function_decl);
4945 rtx decl_rtl = DECL_RTL (decl_result);
4946 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4948 do_clobber_return_reg (decl_rtl, NULL);
4953 static void
4954 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4956 emit_use (reg);
4959 static void
4960 use_return_register (void)
4962 diddle_return_value (do_use_return_reg, NULL);
4965 /* Possibly warn about unused parameters. */
4966 void
4967 do_warn_unused_parameter (tree fn)
4969 tree decl;
4971 for (decl = DECL_ARGUMENTS (fn);
4972 decl; decl = DECL_CHAIN (decl))
4973 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
4974 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl)
4975 && !TREE_NO_WARNING (decl))
4976 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
4979 static GTY(()) rtx initial_trampoline;
4981 /* Generate RTL for the end of the current function. */
4983 void
4984 expand_function_end (void)
4986 rtx clobber_after;
4988 /* If arg_pointer_save_area was referenced only from a nested
4989 function, we will not have initialized it yet. Do that now. */
4990 if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
4991 get_arg_pointer_save_area ();
4993 /* If we are doing generic stack checking and this function makes calls,
4994 do a stack probe at the start of the function to ensure we have enough
4995 space for another stack frame. */
4996 if (flag_stack_check == GENERIC_STACK_CHECK)
4998 rtx insn, seq;
5000 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5001 if (CALL_P (insn))
5003 rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE);
5004 start_sequence ();
5005 if (STACK_CHECK_MOVING_SP)
5006 anti_adjust_stack_and_probe (max_frame_size, true);
5007 else
5008 probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size);
5009 seq = get_insns ();
5010 end_sequence ();
5011 set_insn_locations (seq, prologue_location);
5012 emit_insn_before (seq, stack_check_probe_note);
5013 break;
5017 /* End any sequences that failed to be closed due to syntax errors. */
5018 while (in_sequence_p ())
5019 end_sequence ();
5021 clear_pending_stack_adjust ();
5022 do_pending_stack_adjust ();
5024 /* Output a linenumber for the end of the function.
5025 SDB depends on this. */
5026 set_curr_insn_location (input_location);
5028 /* Before the return label (if any), clobber the return
5029 registers so that they are not propagated live to the rest of
5030 the function. This can only happen with functions that drop
5031 through; if there had been a return statement, there would
5032 have either been a return rtx, or a jump to the return label.
5034 We delay actual code generation after the current_function_value_rtx
5035 is computed. */
5036 clobber_after = get_last_insn ();
5038 /* Output the label for the actual return from the function. */
5039 emit_label (return_label);
5041 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
5043 /* Let except.c know where it should emit the call to unregister
5044 the function context for sjlj exceptions. */
5045 if (flag_exceptions)
5046 sjlj_emit_function_exit_after (get_last_insn ());
5048 else
5050 /* We want to ensure that instructions that may trap are not
5051 moved into the epilogue by scheduling, because we don't
5052 always emit unwind information for the epilogue. */
5053 if (cfun->can_throw_non_call_exceptions)
5054 emit_insn (gen_blockage ());
5057 /* If this is an implementation of throw, do what's necessary to
5058 communicate between __builtin_eh_return and the epilogue. */
5059 expand_eh_return ();
5061 /* If scalar return value was computed in a pseudo-reg, or was a named
5062 return value that got dumped to the stack, copy that to the hard
5063 return register. */
5064 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5066 tree decl_result = DECL_RESULT (current_function_decl);
5067 rtx decl_rtl = DECL_RTL (decl_result);
5069 if (REG_P (decl_rtl)
5070 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5071 : DECL_REGISTER (decl_result))
5073 rtx real_decl_rtl = crtl->return_rtx;
5075 /* This should be set in assign_parms. */
5076 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
5078 /* If this is a BLKmode structure being returned in registers,
5079 then use the mode computed in expand_return. Note that if
5080 decl_rtl is memory, then its mode may have been changed,
5081 but that crtl->return_rtx has not. */
5082 if (GET_MODE (real_decl_rtl) == BLKmode)
5083 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
5085 /* If a non-BLKmode return value should be padded at the least
5086 significant end of the register, shift it left by the appropriate
5087 amount. BLKmode results are handled using the group load/store
5088 machinery. */
5089 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
5090 && REG_P (real_decl_rtl)
5091 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
5093 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
5094 REGNO (real_decl_rtl)),
5095 decl_rtl);
5096 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
5098 /* If a named return value dumped decl_return to memory, then
5099 we may need to re-do the PROMOTE_MODE signed/unsigned
5100 extension. */
5101 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
5103 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
5104 promote_function_mode (TREE_TYPE (decl_result),
5105 GET_MODE (decl_rtl), &unsignedp,
5106 TREE_TYPE (current_function_decl), 1);
5108 convert_move (real_decl_rtl, decl_rtl, unsignedp);
5110 else if (GET_CODE (real_decl_rtl) == PARALLEL)
5112 /* If expand_function_start has created a PARALLEL for decl_rtl,
5113 move the result to the real return registers. Otherwise, do
5114 a group load from decl_rtl for a named return. */
5115 if (GET_CODE (decl_rtl) == PARALLEL)
5116 emit_group_move (real_decl_rtl, decl_rtl);
5117 else
5118 emit_group_load (real_decl_rtl, decl_rtl,
5119 TREE_TYPE (decl_result),
5120 int_size_in_bytes (TREE_TYPE (decl_result)));
5122 /* In the case of complex integer modes smaller than a word, we'll
5123 need to generate some non-trivial bitfield insertions. Do that
5124 on a pseudo and not the hard register. */
5125 else if (GET_CODE (decl_rtl) == CONCAT
5126 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
5127 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
5129 int old_generating_concat_p;
5130 rtx tmp;
5132 old_generating_concat_p = generating_concat_p;
5133 generating_concat_p = 0;
5134 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
5135 generating_concat_p = old_generating_concat_p;
5137 emit_move_insn (tmp, decl_rtl);
5138 emit_move_insn (real_decl_rtl, tmp);
5140 else
5141 emit_move_insn (real_decl_rtl, decl_rtl);
5145 /* If returning a structure, arrange to return the address of the value
5146 in a place where debuggers expect to find it.
5148 If returning a structure PCC style,
5149 the caller also depends on this value.
5150 And cfun->returns_pcc_struct is not necessarily set. */
5151 if (cfun->returns_struct
5152 || cfun->returns_pcc_struct)
5154 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
5155 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5156 rtx outgoing;
5158 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
5159 type = TREE_TYPE (type);
5160 else
5161 value_address = XEXP (value_address, 0);
5163 outgoing = targetm.calls.function_value (build_pointer_type (type),
5164 current_function_decl, true);
5166 /* Mark this as a function return value so integrate will delete the
5167 assignment and USE below when inlining this function. */
5168 REG_FUNCTION_VALUE_P (outgoing) = 1;
5170 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5171 value_address = convert_memory_address (GET_MODE (outgoing),
5172 value_address);
5174 emit_move_insn (outgoing, value_address);
5176 /* Show return register used to hold result (in this case the address
5177 of the result. */
5178 crtl->return_rtx = outgoing;
5181 /* Emit the actual code to clobber return register. */
5183 rtx seq;
5185 start_sequence ();
5186 clobber_return_register ();
5187 seq = get_insns ();
5188 end_sequence ();
5190 emit_insn_after (seq, clobber_after);
5193 /* Output the label for the naked return from the function. */
5194 if (naked_return_label)
5195 emit_label (naked_return_label);
5197 /* @@@ This is a kludge. We want to ensure that instructions that
5198 may trap are not moved into the epilogue by scheduling, because
5199 we don't always emit unwind information for the epilogue. */
5200 if (cfun->can_throw_non_call_exceptions
5201 && targetm_common.except_unwind_info (&global_options) != UI_SJLJ)
5202 emit_insn (gen_blockage ());
5204 /* If stack protection is enabled for this function, check the guard. */
5205 if (crtl->stack_protect_guard)
5206 stack_protect_epilogue ();
5208 /* If we had calls to alloca, and this machine needs
5209 an accurate stack pointer to exit the function,
5210 insert some code to save and restore the stack pointer. */
5211 if (! EXIT_IGNORE_STACK
5212 && cfun->calls_alloca)
5214 rtx tem = 0, seq;
5216 start_sequence ();
5217 emit_stack_save (SAVE_FUNCTION, &tem);
5218 seq = get_insns ();
5219 end_sequence ();
5220 emit_insn_before (seq, parm_birth_insn);
5222 emit_stack_restore (SAVE_FUNCTION, tem);
5225 /* ??? This should no longer be necessary since stupid is no longer with
5226 us, but there are some parts of the compiler (eg reload_combine, and
5227 sh mach_dep_reorg) that still try and compute their own lifetime info
5228 instead of using the general framework. */
5229 use_return_register ();
5233 get_arg_pointer_save_area (void)
5235 rtx ret = arg_pointer_save_area;
5237 if (! ret)
5239 ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5240 arg_pointer_save_area = ret;
5243 if (! crtl->arg_pointer_save_area_init)
5245 rtx seq;
5247 /* Save the arg pointer at the beginning of the function. The
5248 generated stack slot may not be a valid memory address, so we
5249 have to check it and fix it if necessary. */
5250 start_sequence ();
5251 emit_move_insn (validize_mem (ret),
5252 crtl->args.internal_arg_pointer);
5253 seq = get_insns ();
5254 end_sequence ();
5256 push_topmost_sequence ();
5257 emit_insn_after (seq, entry_of_function ());
5258 pop_topmost_sequence ();
5260 crtl->arg_pointer_save_area_init = true;
5263 return ret;
5266 /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5267 for the first time. */
5269 static void
5270 record_insns (rtx insns, rtx end, htab_t *hashp)
5272 rtx tmp;
5273 htab_t hash = *hashp;
5275 if (hash == NULL)
5276 *hashp = hash
5277 = htab_create_ggc (17, htab_hash_pointer, htab_eq_pointer, NULL);
5279 for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp))
5281 void **slot = htab_find_slot (hash, tmp, INSERT);
5282 gcc_assert (*slot == NULL);
5283 *slot = tmp;
5287 /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5288 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5289 insn, then record COPY as well. */
5291 void
5292 maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
5294 htab_t hash;
5295 void **slot;
5297 hash = epilogue_insn_hash;
5298 if (!hash || !htab_find (hash, insn))
5300 hash = prologue_insn_hash;
5301 if (!hash || !htab_find (hash, insn))
5302 return;
5305 slot = htab_find_slot (hash, copy, INSERT);
5306 gcc_assert (*slot == NULL);
5307 *slot = copy;
5310 /* Set the location of the insn chain starting at INSN to LOC. */
5311 static void
5312 set_insn_locations (rtx insn, int loc)
5314 while (insn != NULL_RTX)
5316 if (INSN_P (insn))
5317 INSN_LOCATION (insn) = loc;
5318 insn = NEXT_INSN (insn);
5322 /* Determine if any INSNs in HASH are, or are part of, INSN. Because
5323 we can be running after reorg, SEQUENCE rtl is possible. */
5325 static bool
5326 contains (const_rtx insn, htab_t hash)
5328 if (hash == NULL)
5329 return false;
5331 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
5333 int i;
5334 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
5335 if (htab_find (hash, XVECEXP (PATTERN (insn), 0, i)))
5336 return true;
5337 return false;
5340 return htab_find (hash, insn) != NULL;
5344 prologue_epilogue_contains (const_rtx insn)
5346 if (contains (insn, prologue_insn_hash))
5347 return 1;
5348 if (contains (insn, epilogue_insn_hash))
5349 return 1;
5350 return 0;
5353 #ifdef HAVE_simple_return
5355 /* Return true if INSN requires the stack frame to be set up.
5356 PROLOGUE_USED contains the hard registers used in the function
5357 prologue. SET_UP_BY_PROLOGUE is the set of registers we expect the
5358 prologue to set up for the function. */
5359 bool
5360 requires_stack_frame_p (rtx insn, HARD_REG_SET prologue_used,
5361 HARD_REG_SET set_up_by_prologue)
5363 df_ref *df_rec;
5364 HARD_REG_SET hardregs;
5365 unsigned regno;
5367 if (CALL_P (insn))
5368 return !SIBLING_CALL_P (insn);
5370 /* We need a frame to get the unique CFA expected by the unwinder. */
5371 if (cfun->can_throw_non_call_exceptions && can_throw_internal (insn))
5372 return true;
5374 CLEAR_HARD_REG_SET (hardregs);
5375 for (df_rec = DF_INSN_DEFS (insn); *df_rec; df_rec++)
5377 rtx dreg = DF_REF_REG (*df_rec);
5379 if (!REG_P (dreg))
5380 continue;
5382 add_to_hard_reg_set (&hardregs, GET_MODE (dreg),
5383 REGNO (dreg));
5385 if (hard_reg_set_intersect_p (hardregs, prologue_used))
5386 return true;
5387 AND_COMPL_HARD_REG_SET (hardregs, call_used_reg_set);
5388 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
5389 if (TEST_HARD_REG_BIT (hardregs, regno)
5390 && df_regs_ever_live_p (regno))
5391 return true;
5393 for (df_rec = DF_INSN_USES (insn); *df_rec; df_rec++)
5395 rtx reg = DF_REF_REG (*df_rec);
5397 if (!REG_P (reg))
5398 continue;
5400 add_to_hard_reg_set (&hardregs, GET_MODE (reg),
5401 REGNO (reg));
5403 if (hard_reg_set_intersect_p (hardregs, set_up_by_prologue))
5404 return true;
5406 return false;
5409 /* See whether BB has a single successor that uses [REGNO, END_REGNO),
5410 and if BB is its only predecessor. Return that block if so,
5411 otherwise return null. */
5413 static basic_block
5414 next_block_for_reg (basic_block bb, int regno, int end_regno)
5416 edge e, live_edge;
5417 edge_iterator ei;
5418 bitmap live;
5419 int i;
5421 live_edge = NULL;
5422 FOR_EACH_EDGE (e, ei, bb->succs)
5424 live = df_get_live_in (e->dest);
5425 for (i = regno; i < end_regno; i++)
5426 if (REGNO_REG_SET_P (live, i))
5428 if (live_edge && live_edge != e)
5429 return NULL;
5430 live_edge = e;
5434 /* We can sometimes encounter dead code. Don't try to move it
5435 into the exit block. */
5436 if (!live_edge || live_edge->dest == EXIT_BLOCK_PTR)
5437 return NULL;
5439 /* Reject targets of abnormal edges. This is needed for correctness
5440 on ports like Alpha and MIPS, whose pic_offset_table_rtx can die on
5441 exception edges even though it is generally treated as call-saved
5442 for the majority of the compilation. Moving across abnormal edges
5443 isn't going to be interesting for shrink-wrap usage anyway. */
5444 if (live_edge->flags & EDGE_ABNORMAL)
5445 return NULL;
5447 if (EDGE_COUNT (live_edge->dest->preds) > 1)
5448 return NULL;
5450 return live_edge->dest;
5453 /* Try to move INSN from BB to a successor. Return true on success.
5454 USES and DEFS are the set of registers that are used and defined
5455 after INSN in BB. */
5457 static bool
5458 move_insn_for_shrink_wrap (basic_block bb, rtx insn,
5459 const HARD_REG_SET uses,
5460 const HARD_REG_SET defs)
5462 rtx set, src, dest;
5463 bitmap live_out, live_in, bb_uses, bb_defs;
5464 unsigned int i, dregno, end_dregno, sregno, end_sregno;
5465 basic_block next_block;
5467 /* Look for a simple register copy. */
5468 set = single_set (insn);
5469 if (!set)
5470 return false;
5471 src = SET_SRC (set);
5472 dest = SET_DEST (set);
5473 if (!REG_P (dest) || !REG_P (src))
5474 return false;
5476 /* Make sure that the source register isn't defined later in BB. */
5477 sregno = REGNO (src);
5478 end_sregno = END_REGNO (src);
5479 if (overlaps_hard_reg_set_p (defs, GET_MODE (src), sregno))
5480 return false;
5482 /* Make sure that the destination register isn't referenced later in BB. */
5483 dregno = REGNO (dest);
5484 end_dregno = END_REGNO (dest);
5485 if (overlaps_hard_reg_set_p (uses, GET_MODE (dest), dregno)
5486 || overlaps_hard_reg_set_p (defs, GET_MODE (dest), dregno))
5487 return false;
5489 /* See whether there is a successor block to which we could move INSN. */
5490 next_block = next_block_for_reg (bb, dregno, end_dregno);
5491 if (!next_block)
5492 return false;
5494 /* At this point we are committed to moving INSN, but let's try to
5495 move it as far as we can. */
5498 live_out = df_get_live_out (bb);
5499 live_in = df_get_live_in (next_block);
5500 bb = next_block;
5502 /* Check whether BB uses DEST or clobbers DEST. We need to add
5503 INSN to BB if so. Either way, DEST is no longer live on entry,
5504 except for any part that overlaps SRC (next loop). */
5505 bb_uses = &DF_LR_BB_INFO (bb)->use;
5506 bb_defs = &DF_LR_BB_INFO (bb)->def;
5507 if (df_live)
5509 for (i = dregno; i < end_dregno; i++)
5511 if (REGNO_REG_SET_P (bb_uses, i) || REGNO_REG_SET_P (bb_defs, i)
5512 || REGNO_REG_SET_P (&DF_LIVE_BB_INFO (bb)->gen, i))
5513 next_block = NULL;
5514 CLEAR_REGNO_REG_SET (live_out, i);
5515 CLEAR_REGNO_REG_SET (live_in, i);
5518 /* Check whether BB clobbers SRC. We need to add INSN to BB if so.
5519 Either way, SRC is now live on entry. */
5520 for (i = sregno; i < end_sregno; i++)
5522 if (REGNO_REG_SET_P (bb_defs, i)
5523 || REGNO_REG_SET_P (&DF_LIVE_BB_INFO (bb)->gen, i))
5524 next_block = NULL;
5525 SET_REGNO_REG_SET (live_out, i);
5526 SET_REGNO_REG_SET (live_in, i);
5529 else
5531 /* DF_LR_BB_INFO (bb)->def does not comprise the DF_REF_PARTIAL and
5532 DF_REF_CONDITIONAL defs. So if DF_LIVE doesn't exist, i.e.
5533 at -O1, just give up searching NEXT_BLOCK. */
5534 next_block = NULL;
5535 for (i = dregno; i < end_dregno; i++)
5537 CLEAR_REGNO_REG_SET (live_out, i);
5538 CLEAR_REGNO_REG_SET (live_in, i);
5541 for (i = sregno; i < end_sregno; i++)
5543 SET_REGNO_REG_SET (live_out, i);
5544 SET_REGNO_REG_SET (live_in, i);
5548 /* If we don't need to add the move to BB, look for a single
5549 successor block. */
5550 if (next_block)
5551 next_block = next_block_for_reg (next_block, dregno, end_dregno);
5553 while (next_block);
5555 /* BB now defines DEST. It only uses the parts of DEST that overlap SRC
5556 (next loop). */
5557 for (i = dregno; i < end_dregno; i++)
5559 CLEAR_REGNO_REG_SET (bb_uses, i);
5560 SET_REGNO_REG_SET (bb_defs, i);
5563 /* BB now uses SRC. */
5564 for (i = sregno; i < end_sregno; i++)
5565 SET_REGNO_REG_SET (bb_uses, i);
5567 emit_insn_after (PATTERN (insn), bb_note (bb));
5568 delete_insn (insn);
5569 return true;
5572 /* Look for register copies in the first block of the function, and move
5573 them down into successor blocks if the register is used only on one
5574 path. This exposes more opportunities for shrink-wrapping. These
5575 kinds of sets often occur when incoming argument registers are moved
5576 to call-saved registers because their values are live across one or
5577 more calls during the function. */
5579 static void
5580 prepare_shrink_wrap (basic_block entry_block)
5582 rtx insn, curr, x;
5583 HARD_REG_SET uses, defs;
5584 df_ref *ref;
5586 CLEAR_HARD_REG_SET (uses);
5587 CLEAR_HARD_REG_SET (defs);
5588 FOR_BB_INSNS_REVERSE_SAFE (entry_block, insn, curr)
5589 if (NONDEBUG_INSN_P (insn)
5590 && !move_insn_for_shrink_wrap (entry_block, insn, uses, defs))
5592 /* Add all defined registers to DEFs. */
5593 for (ref = DF_INSN_DEFS (insn); *ref; ref++)
5595 x = DF_REF_REG (*ref);
5596 if (REG_P (x) && HARD_REGISTER_P (x))
5597 SET_HARD_REG_BIT (defs, REGNO (x));
5600 /* Add all used registers to USESs. */
5601 for (ref = DF_INSN_USES (insn); *ref; ref++)
5603 x = DF_REF_REG (*ref);
5604 if (REG_P (x) && HARD_REGISTER_P (x))
5605 SET_HARD_REG_BIT (uses, REGNO (x));
5610 #endif
5612 #ifdef HAVE_return
5613 /* Insert use of return register before the end of BB. */
5615 static void
5616 emit_use_return_register_into_block (basic_block bb)
5618 rtx seq;
5619 start_sequence ();
5620 use_return_register ();
5621 seq = get_insns ();
5622 end_sequence ();
5623 emit_insn_before (seq, BB_END (bb));
5627 /* Create a return pattern, either simple_return or return, depending on
5628 simple_p. */
5630 static rtx
5631 gen_return_pattern (bool simple_p)
5633 #ifdef HAVE_simple_return
5634 return simple_p ? gen_simple_return () : gen_return ();
5635 #else
5636 gcc_assert (!simple_p);
5637 return gen_return ();
5638 #endif
5641 /* Insert an appropriate return pattern at the end of block BB. This
5642 also means updating block_for_insn appropriately. SIMPLE_P is
5643 the same as in gen_return_pattern and passed to it. */
5645 static void
5646 emit_return_into_block (bool simple_p, basic_block bb)
5648 rtx jump, pat;
5649 jump = emit_jump_insn_after (gen_return_pattern (simple_p), BB_END (bb));
5650 pat = PATTERN (jump);
5651 if (GET_CODE (pat) == PARALLEL)
5652 pat = XVECEXP (pat, 0, 0);
5653 gcc_assert (ANY_RETURN_P (pat));
5654 JUMP_LABEL (jump) = pat;
5656 #endif
5658 /* Set JUMP_LABEL for a return insn. */
5660 void
5661 set_return_jump_label (rtx returnjump)
5663 rtx pat = PATTERN (returnjump);
5664 if (GET_CODE (pat) == PARALLEL)
5665 pat = XVECEXP (pat, 0, 0);
5666 if (ANY_RETURN_P (pat))
5667 JUMP_LABEL (returnjump) = pat;
5668 else
5669 JUMP_LABEL (returnjump) = ret_rtx;
5672 #ifdef HAVE_simple_return
5673 /* Create a copy of BB instructions and insert at BEFORE. Redirect
5674 preds of BB to COPY_BB if they don't appear in NEED_PROLOGUE. */
5675 static void
5676 dup_block_and_redirect (basic_block bb, basic_block copy_bb, rtx before,
5677 bitmap_head *need_prologue)
5679 edge_iterator ei;
5680 edge e;
5681 rtx insn = BB_END (bb);
5683 /* We know BB has a single successor, so there is no need to copy a
5684 simple jump at the end of BB. */
5685 if (simplejump_p (insn))
5686 insn = PREV_INSN (insn);
5688 start_sequence ();
5689 duplicate_insn_chain (BB_HEAD (bb), insn);
5690 if (dump_file)
5692 unsigned count = 0;
5693 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5694 if (active_insn_p (insn))
5695 ++count;
5696 fprintf (dump_file, "Duplicating bb %d to bb %d, %u active insns.\n",
5697 bb->index, copy_bb->index, count);
5699 insn = get_insns ();
5700 end_sequence ();
5701 emit_insn_before (insn, before);
5703 /* Redirect all the paths that need no prologue into copy_bb. */
5704 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
5705 if (!bitmap_bit_p (need_prologue, e->src->index))
5707 int freq = EDGE_FREQUENCY (e);
5708 copy_bb->count += e->count;
5709 copy_bb->frequency += EDGE_FREQUENCY (e);
5710 e->dest->count -= e->count;
5711 if (e->dest->count < 0)
5712 e->dest->count = 0;
5713 e->dest->frequency -= freq;
5714 if (e->dest->frequency < 0)
5715 e->dest->frequency = 0;
5716 redirect_edge_and_branch_force (e, copy_bb);
5717 continue;
5719 else
5720 ei_next (&ei);
5722 #endif
5724 #if defined (HAVE_return) || defined (HAVE_simple_return)
5725 /* Return true if there are any active insns between HEAD and TAIL. */
5726 static bool
5727 active_insn_between (rtx head, rtx tail)
5729 while (tail)
5731 if (active_insn_p (tail))
5732 return true;
5733 if (tail == head)
5734 return false;
5735 tail = PREV_INSN (tail);
5737 return false;
5740 /* LAST_BB is a block that exits, and empty of active instructions.
5741 Examine its predecessors for jumps that can be converted to
5742 (conditional) returns. */
5743 static vec<edge>
5744 convert_jumps_to_returns (basic_block last_bb, bool simple_p,
5745 vec<edge> unconverted ATTRIBUTE_UNUSED)
5747 int i;
5748 basic_block bb;
5749 rtx label;
5750 edge_iterator ei;
5751 edge e;
5752 vec<basic_block> src_bbs;
5754 src_bbs.create (EDGE_COUNT (last_bb->preds));
5755 FOR_EACH_EDGE (e, ei, last_bb->preds)
5756 if (e->src != ENTRY_BLOCK_PTR)
5757 src_bbs.quick_push (e->src);
5759 label = BB_HEAD (last_bb);
5761 FOR_EACH_VEC_ELT (src_bbs, i, bb)
5763 rtx jump = BB_END (bb);
5765 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5766 continue;
5768 e = find_edge (bb, last_bb);
5770 /* If we have an unconditional jump, we can replace that
5771 with a simple return instruction. */
5772 if (simplejump_p (jump))
5774 /* The use of the return register might be present in the exit
5775 fallthru block. Either:
5776 - removing the use is safe, and we should remove the use in
5777 the exit fallthru block, or
5778 - removing the use is not safe, and we should add it here.
5779 For now, we conservatively choose the latter. Either of the
5780 2 helps in crossjumping. */
5781 emit_use_return_register_into_block (bb);
5783 emit_return_into_block (simple_p, bb);
5784 delete_insn (jump);
5787 /* If we have a conditional jump branching to the last
5788 block, we can try to replace that with a conditional
5789 return instruction. */
5790 else if (condjump_p (jump))
5792 rtx dest;
5794 if (simple_p)
5795 dest = simple_return_rtx;
5796 else
5797 dest = ret_rtx;
5798 if (!redirect_jump (jump, dest, 0))
5800 #ifdef HAVE_simple_return
5801 if (simple_p)
5803 if (dump_file)
5804 fprintf (dump_file,
5805 "Failed to redirect bb %d branch.\n", bb->index);
5806 unconverted.safe_push (e);
5808 #endif
5809 continue;
5812 /* See comment in simplejump_p case above. */
5813 emit_use_return_register_into_block (bb);
5815 /* If this block has only one successor, it both jumps
5816 and falls through to the fallthru block, so we can't
5817 delete the edge. */
5818 if (single_succ_p (bb))
5819 continue;
5821 else
5823 #ifdef HAVE_simple_return
5824 if (simple_p)
5826 if (dump_file)
5827 fprintf (dump_file,
5828 "Failed to redirect bb %d branch.\n", bb->index);
5829 unconverted.safe_push (e);
5831 #endif
5832 continue;
5835 /* Fix up the CFG for the successful change we just made. */
5836 redirect_edge_succ (e, EXIT_BLOCK_PTR);
5837 e->flags &= ~EDGE_CROSSING;
5839 src_bbs.release ();
5840 return unconverted;
5843 /* Emit a return insn for the exit fallthru block. */
5844 static basic_block
5845 emit_return_for_exit (edge exit_fallthru_edge, bool simple_p)
5847 basic_block last_bb = exit_fallthru_edge->src;
5849 if (JUMP_P (BB_END (last_bb)))
5851 last_bb = split_edge (exit_fallthru_edge);
5852 exit_fallthru_edge = single_succ_edge (last_bb);
5854 emit_barrier_after (BB_END (last_bb));
5855 emit_return_into_block (simple_p, last_bb);
5856 exit_fallthru_edge->flags &= ~EDGE_FALLTHRU;
5857 return last_bb;
5859 #endif
5862 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5863 this into place with notes indicating where the prologue ends and where
5864 the epilogue begins. Update the basic block information when possible.
5866 Notes on epilogue placement:
5867 There are several kinds of edges to the exit block:
5868 * a single fallthru edge from LAST_BB
5869 * possibly, edges from blocks containing sibcalls
5870 * possibly, fake edges from infinite loops
5872 The epilogue is always emitted on the fallthru edge from the last basic
5873 block in the function, LAST_BB, into the exit block.
5875 If LAST_BB is empty except for a label, it is the target of every
5876 other basic block in the function that ends in a return. If a
5877 target has a return or simple_return pattern (possibly with
5878 conditional variants), these basic blocks can be changed so that a
5879 return insn is emitted into them, and their target is adjusted to
5880 the real exit block.
5882 Notes on shrink wrapping: We implement a fairly conservative
5883 version of shrink-wrapping rather than the textbook one. We only
5884 generate a single prologue and a single epilogue. This is
5885 sufficient to catch a number of interesting cases involving early
5886 exits.
5888 First, we identify the blocks that require the prologue to occur before
5889 them. These are the ones that modify a call-saved register, or reference
5890 any of the stack or frame pointer registers. To simplify things, we then
5891 mark everything reachable from these blocks as also requiring a prologue.
5892 This takes care of loops automatically, and avoids the need to examine
5893 whether MEMs reference the frame, since it is sufficient to check for
5894 occurrences of the stack or frame pointer.
5896 We then compute the set of blocks for which the need for a prologue
5897 is anticipatable (borrowing terminology from the shrink-wrapping
5898 description in Muchnick's book). These are the blocks which either
5899 require a prologue themselves, or those that have only successors
5900 where the prologue is anticipatable. The prologue needs to be
5901 inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
5902 is not. For the moment, we ensure that only one such edge exists.
5904 The epilogue is placed as described above, but we make a
5905 distinction between inserting return and simple_return patterns
5906 when modifying other blocks that end in a return. Blocks that end
5907 in a sibcall omit the sibcall_epilogue if the block is not in
5908 ANTIC. */
5910 static void
5911 thread_prologue_and_epilogue_insns (void)
5913 bool inserted;
5914 #ifdef HAVE_simple_return
5915 vec<edge> unconverted_simple_returns = vNULL;
5916 bool nonempty_prologue;
5917 bitmap_head bb_flags;
5918 unsigned max_grow_size;
5919 #endif
5920 rtx returnjump;
5921 rtx seq ATTRIBUTE_UNUSED, epilogue_end ATTRIBUTE_UNUSED;
5922 rtx prologue_seq ATTRIBUTE_UNUSED, split_prologue_seq ATTRIBUTE_UNUSED;
5923 edge e, entry_edge, orig_entry_edge, exit_fallthru_edge;
5924 edge_iterator ei;
5926 df_analyze ();
5928 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
5930 inserted = false;
5931 seq = NULL_RTX;
5932 epilogue_end = NULL_RTX;
5933 returnjump = NULL_RTX;
5935 /* Can't deal with multiple successors of the entry block at the
5936 moment. Function should always have at least one entry
5937 point. */
5938 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR));
5939 entry_edge = single_succ_edge (ENTRY_BLOCK_PTR);
5940 orig_entry_edge = entry_edge;
5942 split_prologue_seq = NULL_RTX;
5943 if (flag_split_stack
5944 && (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl))
5945 == NULL))
5947 #ifndef HAVE_split_stack_prologue
5948 gcc_unreachable ();
5949 #else
5950 gcc_assert (HAVE_split_stack_prologue);
5952 start_sequence ();
5953 emit_insn (gen_split_stack_prologue ());
5954 split_prologue_seq = get_insns ();
5955 end_sequence ();
5957 record_insns (split_prologue_seq, NULL, &prologue_insn_hash);
5958 set_insn_locations (split_prologue_seq, prologue_location);
5959 #endif
5962 prologue_seq = NULL_RTX;
5963 #ifdef HAVE_prologue
5964 if (HAVE_prologue)
5966 start_sequence ();
5967 seq = gen_prologue ();
5968 emit_insn (seq);
5970 /* Insert an explicit USE for the frame pointer
5971 if the profiling is on and the frame pointer is required. */
5972 if (crtl->profile && frame_pointer_needed)
5973 emit_use (hard_frame_pointer_rtx);
5975 /* Retain a map of the prologue insns. */
5976 record_insns (seq, NULL, &prologue_insn_hash);
5977 emit_note (NOTE_INSN_PROLOGUE_END);
5979 /* Ensure that instructions are not moved into the prologue when
5980 profiling is on. The call to the profiling routine can be
5981 emitted within the live range of a call-clobbered register. */
5982 if (!targetm.profile_before_prologue () && crtl->profile)
5983 emit_insn (gen_blockage ());
5985 prologue_seq = get_insns ();
5986 end_sequence ();
5987 set_insn_locations (prologue_seq, prologue_location);
5989 #endif
5991 #ifdef HAVE_simple_return
5992 bitmap_initialize (&bb_flags, &bitmap_default_obstack);
5994 /* Try to perform a kind of shrink-wrapping, making sure the
5995 prologue/epilogue is emitted only around those parts of the
5996 function that require it. */
5998 nonempty_prologue = false;
5999 for (seq = prologue_seq; seq; seq = NEXT_INSN (seq))
6000 if (!NOTE_P (seq) || NOTE_KIND (seq) != NOTE_INSN_PROLOGUE_END)
6002 nonempty_prologue = true;
6003 break;
6006 if (flag_shrink_wrap && HAVE_simple_return
6007 && (targetm.profile_before_prologue () || !crtl->profile)
6008 && nonempty_prologue && !crtl->calls_eh_return)
6010 HARD_REG_SET prologue_clobbered, prologue_used, live_on_edge;
6011 struct hard_reg_set_container set_up_by_prologue;
6012 rtx p_insn;
6013 vec<basic_block> vec;
6014 basic_block bb;
6015 bitmap_head bb_antic_flags;
6016 bitmap_head bb_on_list;
6017 bitmap_head bb_tail;
6019 if (dump_file)
6020 fprintf (dump_file, "Attempting shrink-wrapping optimization.\n");
6022 /* Compute the registers set and used in the prologue. */
6023 CLEAR_HARD_REG_SET (prologue_clobbered);
6024 CLEAR_HARD_REG_SET (prologue_used);
6025 for (p_insn = prologue_seq; p_insn; p_insn = NEXT_INSN (p_insn))
6027 HARD_REG_SET this_used;
6028 if (!NONDEBUG_INSN_P (p_insn))
6029 continue;
6031 CLEAR_HARD_REG_SET (this_used);
6032 note_uses (&PATTERN (p_insn), record_hard_reg_uses,
6033 &this_used);
6034 AND_COMPL_HARD_REG_SET (this_used, prologue_clobbered);
6035 IOR_HARD_REG_SET (prologue_used, this_used);
6036 note_stores (PATTERN (p_insn), record_hard_reg_sets,
6037 &prologue_clobbered);
6040 prepare_shrink_wrap (entry_edge->dest);
6042 bitmap_initialize (&bb_antic_flags, &bitmap_default_obstack);
6043 bitmap_initialize (&bb_on_list, &bitmap_default_obstack);
6044 bitmap_initialize (&bb_tail, &bitmap_default_obstack);
6046 /* Find the set of basic blocks that require a stack frame,
6047 and blocks that are too big to be duplicated. */
6049 vec.create (n_basic_blocks);
6051 CLEAR_HARD_REG_SET (set_up_by_prologue.set);
6052 add_to_hard_reg_set (&set_up_by_prologue.set, Pmode,
6053 STACK_POINTER_REGNUM);
6054 add_to_hard_reg_set (&set_up_by_prologue.set, Pmode, ARG_POINTER_REGNUM);
6055 if (frame_pointer_needed)
6056 add_to_hard_reg_set (&set_up_by_prologue.set, Pmode,
6057 HARD_FRAME_POINTER_REGNUM);
6058 if (pic_offset_table_rtx)
6059 add_to_hard_reg_set (&set_up_by_prologue.set, Pmode,
6060 PIC_OFFSET_TABLE_REGNUM);
6061 if (crtl->drap_reg)
6062 add_to_hard_reg_set (&set_up_by_prologue.set,
6063 GET_MODE (crtl->drap_reg),
6064 REGNO (crtl->drap_reg));
6065 if (targetm.set_up_by_prologue)
6066 targetm.set_up_by_prologue (&set_up_by_prologue);
6068 /* We don't use a different max size depending on
6069 optimize_bb_for_speed_p because increasing shrink-wrapping
6070 opportunities by duplicating tail blocks can actually result
6071 in an overall decrease in code size. */
6072 max_grow_size = get_uncond_jump_length ();
6073 max_grow_size *= PARAM_VALUE (PARAM_MAX_GROW_COPY_BB_INSNS);
6075 FOR_EACH_BB (bb)
6077 rtx insn;
6078 unsigned size = 0;
6080 FOR_BB_INSNS (bb, insn)
6081 if (NONDEBUG_INSN_P (insn))
6083 if (requires_stack_frame_p (insn, prologue_used,
6084 set_up_by_prologue.set))
6086 if (bb == entry_edge->dest)
6087 goto fail_shrinkwrap;
6088 bitmap_set_bit (&bb_flags, bb->index);
6089 vec.quick_push (bb);
6090 break;
6092 else if (size <= max_grow_size)
6094 size += get_attr_min_length (insn);
6095 if (size > max_grow_size)
6096 bitmap_set_bit (&bb_on_list, bb->index);
6101 /* Blocks that really need a prologue, or are too big for tails. */
6102 bitmap_ior_into (&bb_on_list, &bb_flags);
6104 /* For every basic block that needs a prologue, mark all blocks
6105 reachable from it, so as to ensure they are also seen as
6106 requiring a prologue. */
6107 while (!vec.is_empty ())
6109 basic_block tmp_bb = vec.pop ();
6111 FOR_EACH_EDGE (e, ei, tmp_bb->succs)
6112 if (e->dest != EXIT_BLOCK_PTR
6113 && bitmap_set_bit (&bb_flags, e->dest->index))
6114 vec.quick_push (e->dest);
6117 /* Find the set of basic blocks that need no prologue, have a
6118 single successor, can be duplicated, meet a max size
6119 requirement, and go to the exit via like blocks. */
6120 vec.quick_push (EXIT_BLOCK_PTR);
6121 while (!vec.is_empty ())
6123 basic_block tmp_bb = vec.pop ();
6125 FOR_EACH_EDGE (e, ei, tmp_bb->preds)
6126 if (single_succ_p (e->src)
6127 && !bitmap_bit_p (&bb_on_list, e->src->index)
6128 && can_duplicate_block_p (e->src))
6130 edge pe;
6131 edge_iterator pei;
6133 /* If there is predecessor of e->src which doesn't
6134 need prologue and the edge is complex,
6135 we might not be able to redirect the branch
6136 to a copy of e->src. */
6137 FOR_EACH_EDGE (pe, pei, e->src->preds)
6138 if ((pe->flags & EDGE_COMPLEX) != 0
6139 && !bitmap_bit_p (&bb_flags, pe->src->index))
6140 break;
6141 if (pe == NULL && bitmap_set_bit (&bb_tail, e->src->index))
6142 vec.quick_push (e->src);
6146 /* Now walk backwards from every block that is marked as needing
6147 a prologue to compute the bb_antic_flags bitmap. Exclude
6148 tail blocks; They can be duplicated to be used on paths not
6149 needing a prologue. */
6150 bitmap_clear (&bb_on_list);
6151 bitmap_and_compl (&bb_antic_flags, &bb_flags, &bb_tail);
6152 FOR_EACH_BB (bb)
6154 if (!bitmap_bit_p (&bb_antic_flags, bb->index))
6155 continue;
6156 FOR_EACH_EDGE (e, ei, bb->preds)
6157 if (!bitmap_bit_p (&bb_antic_flags, e->src->index)
6158 && bitmap_set_bit (&bb_on_list, e->src->index))
6159 vec.quick_push (e->src);
6161 while (!vec.is_empty ())
6163 basic_block tmp_bb = vec.pop ();
6164 bool all_set = true;
6166 bitmap_clear_bit (&bb_on_list, tmp_bb->index);
6167 FOR_EACH_EDGE (e, ei, tmp_bb->succs)
6168 if (!bitmap_bit_p (&bb_antic_flags, e->dest->index))
6170 all_set = false;
6171 break;
6174 if (all_set)
6176 bitmap_set_bit (&bb_antic_flags, tmp_bb->index);
6177 FOR_EACH_EDGE (e, ei, tmp_bb->preds)
6178 if (!bitmap_bit_p (&bb_antic_flags, e->src->index)
6179 && bitmap_set_bit (&bb_on_list, e->src->index))
6180 vec.quick_push (e->src);
6183 /* Find exactly one edge that leads to a block in ANTIC from
6184 a block that isn't. */
6185 if (!bitmap_bit_p (&bb_antic_flags, entry_edge->dest->index))
6186 FOR_EACH_BB (bb)
6188 if (!bitmap_bit_p (&bb_antic_flags, bb->index))
6189 continue;
6190 FOR_EACH_EDGE (e, ei, bb->preds)
6191 if (!bitmap_bit_p (&bb_antic_flags, e->src->index))
6193 if (entry_edge != orig_entry_edge)
6195 entry_edge = orig_entry_edge;
6196 if (dump_file)
6197 fprintf (dump_file, "More than one candidate edge.\n");
6198 goto fail_shrinkwrap;
6200 if (dump_file)
6201 fprintf (dump_file, "Found candidate edge for "
6202 "shrink-wrapping, %d->%d.\n", e->src->index,
6203 e->dest->index);
6204 entry_edge = e;
6208 if (entry_edge != orig_entry_edge)
6210 /* Test whether the prologue is known to clobber any register
6211 (other than FP or SP) which are live on the edge. */
6212 CLEAR_HARD_REG_BIT (prologue_clobbered, STACK_POINTER_REGNUM);
6213 if (frame_pointer_needed)
6214 CLEAR_HARD_REG_BIT (prologue_clobbered, HARD_FRAME_POINTER_REGNUM);
6215 REG_SET_TO_HARD_REG_SET (live_on_edge,
6216 df_get_live_in (entry_edge->dest));
6217 if (hard_reg_set_intersect_p (live_on_edge, prologue_clobbered))
6219 entry_edge = orig_entry_edge;
6220 if (dump_file)
6221 fprintf (dump_file,
6222 "Shrink-wrapping aborted due to clobber.\n");
6225 if (entry_edge != orig_entry_edge)
6227 crtl->shrink_wrapped = true;
6228 if (dump_file)
6229 fprintf (dump_file, "Performing shrink-wrapping.\n");
6231 /* Find tail blocks reachable from both blocks needing a
6232 prologue and blocks not needing a prologue. */
6233 if (!bitmap_empty_p (&bb_tail))
6234 FOR_EACH_BB (bb)
6236 bool some_pro, some_no_pro;
6237 if (!bitmap_bit_p (&bb_tail, bb->index))
6238 continue;
6239 some_pro = some_no_pro = false;
6240 FOR_EACH_EDGE (e, ei, bb->preds)
6242 if (bitmap_bit_p (&bb_flags, e->src->index))
6243 some_pro = true;
6244 else
6245 some_no_pro = true;
6247 if (some_pro && some_no_pro)
6248 vec.quick_push (bb);
6249 else
6250 bitmap_clear_bit (&bb_tail, bb->index);
6252 /* Find the head of each tail. */
6253 while (!vec.is_empty ())
6255 basic_block tbb = vec.pop ();
6257 if (!bitmap_bit_p (&bb_tail, tbb->index))
6258 continue;
6260 while (single_succ_p (tbb))
6262 tbb = single_succ (tbb);
6263 bitmap_clear_bit (&bb_tail, tbb->index);
6266 /* Now duplicate the tails. */
6267 if (!bitmap_empty_p (&bb_tail))
6268 FOR_EACH_BB_REVERSE (bb)
6270 basic_block copy_bb, tbb;
6271 rtx insert_point;
6272 int eflags;
6274 if (!bitmap_clear_bit (&bb_tail, bb->index))
6275 continue;
6277 /* Create a copy of BB, instructions and all, for
6278 use on paths that don't need a prologue.
6279 Ideal placement of the copy is on a fall-thru edge
6280 or after a block that would jump to the copy. */
6281 FOR_EACH_EDGE (e, ei, bb->preds)
6282 if (!bitmap_bit_p (&bb_flags, e->src->index)
6283 && single_succ_p (e->src))
6284 break;
6285 if (e)
6287 copy_bb = create_basic_block (NEXT_INSN (BB_END (e->src)),
6288 NULL_RTX, e->src);
6289 BB_COPY_PARTITION (copy_bb, e->src);
6291 else
6293 /* Otherwise put the copy at the end of the function. */
6294 copy_bb = create_basic_block (NULL_RTX, NULL_RTX,
6295 EXIT_BLOCK_PTR->prev_bb);
6296 BB_COPY_PARTITION (copy_bb, bb);
6299 insert_point = emit_note_after (NOTE_INSN_DELETED,
6300 BB_END (copy_bb));
6301 emit_barrier_after (BB_END (copy_bb));
6303 tbb = bb;
6304 while (1)
6306 dup_block_and_redirect (tbb, copy_bb, insert_point,
6307 &bb_flags);
6308 tbb = single_succ (tbb);
6309 if (tbb == EXIT_BLOCK_PTR)
6310 break;
6311 e = split_block (copy_bb, PREV_INSN (insert_point));
6312 copy_bb = e->dest;
6315 /* Quiet verify_flow_info by (ab)using EDGE_FAKE.
6316 We have yet to add a simple_return to the tails,
6317 as we'd like to first convert_jumps_to_returns in
6318 case the block is no longer used after that. */
6319 eflags = EDGE_FAKE;
6320 if (CALL_P (PREV_INSN (insert_point))
6321 && SIBLING_CALL_P (PREV_INSN (insert_point)))
6322 eflags = EDGE_SIBCALL | EDGE_ABNORMAL;
6323 make_single_succ_edge (copy_bb, EXIT_BLOCK_PTR, eflags);
6325 /* verify_flow_info doesn't like a note after a
6326 sibling call. */
6327 delete_insn (insert_point);
6328 if (bitmap_empty_p (&bb_tail))
6329 break;
6333 fail_shrinkwrap:
6334 bitmap_clear (&bb_tail);
6335 bitmap_clear (&bb_antic_flags);
6336 bitmap_clear (&bb_on_list);
6337 vec.release ();
6339 #endif
6341 if (split_prologue_seq != NULL_RTX)
6343 insert_insn_on_edge (split_prologue_seq, orig_entry_edge);
6344 inserted = true;
6346 if (prologue_seq != NULL_RTX)
6348 insert_insn_on_edge (prologue_seq, entry_edge);
6349 inserted = true;
6352 /* If the exit block has no non-fake predecessors, we don't need
6353 an epilogue. */
6354 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6355 if ((e->flags & EDGE_FAKE) == 0)
6356 break;
6357 if (e == NULL)
6358 goto epilogue_done;
6360 rtl_profile_for_bb (EXIT_BLOCK_PTR);
6362 exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR->preds);
6364 /* If we're allowed to generate a simple return instruction, then by
6365 definition we don't need a full epilogue. If the last basic
6366 block before the exit block does not contain active instructions,
6367 examine its predecessors and try to emit (conditional) return
6368 instructions. */
6369 #ifdef HAVE_simple_return
6370 if (entry_edge != orig_entry_edge)
6372 if (optimize)
6374 unsigned i, last;
6376 /* convert_jumps_to_returns may add to EXIT_BLOCK_PTR->preds
6377 (but won't remove). Stop at end of current preds. */
6378 last = EDGE_COUNT (EXIT_BLOCK_PTR->preds);
6379 for (i = 0; i < last; i++)
6381 e = EDGE_I (EXIT_BLOCK_PTR->preds, i);
6382 if (LABEL_P (BB_HEAD (e->src))
6383 && !bitmap_bit_p (&bb_flags, e->src->index)
6384 && !active_insn_between (BB_HEAD (e->src), BB_END (e->src)))
6385 unconverted_simple_returns
6386 = convert_jumps_to_returns (e->src, true,
6387 unconverted_simple_returns);
6391 if (exit_fallthru_edge != NULL
6392 && EDGE_COUNT (exit_fallthru_edge->src->preds) != 0
6393 && !bitmap_bit_p (&bb_flags, exit_fallthru_edge->src->index))
6395 basic_block last_bb;
6397 last_bb = emit_return_for_exit (exit_fallthru_edge, true);
6398 returnjump = BB_END (last_bb);
6399 exit_fallthru_edge = NULL;
6402 #endif
6403 #ifdef HAVE_return
6404 if (HAVE_return)
6406 if (exit_fallthru_edge == NULL)
6407 goto epilogue_done;
6409 if (optimize)
6411 basic_block last_bb = exit_fallthru_edge->src;
6413 if (LABEL_P (BB_HEAD (last_bb))
6414 && !active_insn_between (BB_HEAD (last_bb), BB_END (last_bb)))
6415 convert_jumps_to_returns (last_bb, false, vNULL);
6417 if (EDGE_COUNT (last_bb->preds) != 0
6418 && single_succ_p (last_bb))
6420 last_bb = emit_return_for_exit (exit_fallthru_edge, false);
6421 epilogue_end = returnjump = BB_END (last_bb);
6422 #ifdef HAVE_simple_return
6423 /* Emitting the return may add a basic block.
6424 Fix bb_flags for the added block. */
6425 if (last_bb != exit_fallthru_edge->src)
6426 bitmap_set_bit (&bb_flags, last_bb->index);
6427 #endif
6428 goto epilogue_done;
6432 #endif
6434 /* A small fib -- epilogue is not yet completed, but we wish to re-use
6435 this marker for the splits of EH_RETURN patterns, and nothing else
6436 uses the flag in the meantime. */
6437 epilogue_completed = 1;
6439 #ifdef HAVE_eh_return
6440 /* Find non-fallthru edges that end with EH_RETURN instructions. On
6441 some targets, these get split to a special version of the epilogue
6442 code. In order to be able to properly annotate these with unwind
6443 info, try to split them now. If we get a valid split, drop an
6444 EPILOGUE_BEG note and mark the insns as epilogue insns. */
6445 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6447 rtx prev, last, trial;
6449 if (e->flags & EDGE_FALLTHRU)
6450 continue;
6451 last = BB_END (e->src);
6452 if (!eh_returnjump_p (last))
6453 continue;
6455 prev = PREV_INSN (last);
6456 trial = try_split (PATTERN (last), last, 1);
6457 if (trial == last)
6458 continue;
6460 record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash);
6461 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
6463 #endif
6465 /* If nothing falls through into the exit block, we don't need an
6466 epilogue. */
6468 if (exit_fallthru_edge == NULL)
6469 goto epilogue_done;
6471 #ifdef HAVE_epilogue
6472 if (HAVE_epilogue)
6474 start_sequence ();
6475 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
6476 seq = gen_epilogue ();
6477 if (seq)
6478 emit_jump_insn (seq);
6480 /* Retain a map of the epilogue insns. */
6481 record_insns (seq, NULL, &epilogue_insn_hash);
6482 set_insn_locations (seq, epilogue_location);
6484 seq = get_insns ();
6485 returnjump = get_last_insn ();
6486 end_sequence ();
6488 insert_insn_on_edge (seq, exit_fallthru_edge);
6489 inserted = true;
6491 if (JUMP_P (returnjump))
6492 set_return_jump_label (returnjump);
6494 else
6495 #endif
6497 basic_block cur_bb;
6499 if (! next_active_insn (BB_END (exit_fallthru_edge->src)))
6500 goto epilogue_done;
6501 /* We have a fall-through edge to the exit block, the source is not
6502 at the end of the function, and there will be an assembler epilogue
6503 at the end of the function.
6504 We can't use force_nonfallthru here, because that would try to
6505 use return. Inserting a jump 'by hand' is extremely messy, so
6506 we take advantage of cfg_layout_finalize using
6507 fixup_fallthru_exit_predecessor. */
6508 cfg_layout_initialize (0);
6509 FOR_EACH_BB (cur_bb)
6510 if (cur_bb->index >= NUM_FIXED_BLOCKS
6511 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
6512 cur_bb->aux = cur_bb->next_bb;
6513 cfg_layout_finalize ();
6516 epilogue_done:
6518 default_rtl_profile ();
6520 if (inserted)
6522 sbitmap blocks;
6524 commit_edge_insertions ();
6526 /* Look for basic blocks within the prologue insns. */
6527 blocks = sbitmap_alloc (last_basic_block);
6528 bitmap_clear (blocks);
6529 bitmap_set_bit (blocks, entry_edge->dest->index);
6530 bitmap_set_bit (blocks, orig_entry_edge->dest->index);
6531 find_many_sub_basic_blocks (blocks);
6532 sbitmap_free (blocks);
6534 /* The epilogue insns we inserted may cause the exit edge to no longer
6535 be fallthru. */
6536 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6538 if (((e->flags & EDGE_FALLTHRU) != 0)
6539 && returnjump_p (BB_END (e->src)))
6540 e->flags &= ~EDGE_FALLTHRU;
6544 #ifdef HAVE_simple_return
6545 /* If there were branches to an empty LAST_BB which we tried to
6546 convert to conditional simple_returns, but couldn't for some
6547 reason, create a block to hold a simple_return insn and redirect
6548 those remaining edges. */
6549 if (!unconverted_simple_returns.is_empty ())
6551 basic_block simple_return_block_hot = NULL;
6552 basic_block simple_return_block_cold = NULL;
6553 edge pending_edge_hot = NULL;
6554 edge pending_edge_cold = NULL;
6555 basic_block exit_pred = EXIT_BLOCK_PTR->prev_bb;
6556 int i;
6558 gcc_assert (entry_edge != orig_entry_edge);
6560 /* See if we can reuse the last insn that was emitted for the
6561 epilogue. */
6562 if (returnjump != NULL_RTX
6563 && JUMP_LABEL (returnjump) == simple_return_rtx)
6565 e = split_block (BLOCK_FOR_INSN (returnjump), PREV_INSN (returnjump));
6566 if (BB_PARTITION (e->src) == BB_HOT_PARTITION)
6567 simple_return_block_hot = e->dest;
6568 else
6569 simple_return_block_cold = e->dest;
6572 /* Also check returns we might need to add to tail blocks. */
6573 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6574 if (EDGE_COUNT (e->src->preds) != 0
6575 && (e->flags & EDGE_FAKE) != 0
6576 && !bitmap_bit_p (&bb_flags, e->src->index))
6578 if (BB_PARTITION (e->src) == BB_HOT_PARTITION)
6579 pending_edge_hot = e;
6580 else
6581 pending_edge_cold = e;
6584 FOR_EACH_VEC_ELT (unconverted_simple_returns, i, e)
6586 basic_block *pdest_bb;
6587 edge pending;
6589 if (BB_PARTITION (e->src) == BB_HOT_PARTITION)
6591 pdest_bb = &simple_return_block_hot;
6592 pending = pending_edge_hot;
6594 else
6596 pdest_bb = &simple_return_block_cold;
6597 pending = pending_edge_cold;
6600 if (*pdest_bb == NULL && pending != NULL)
6602 emit_return_into_block (true, pending->src);
6603 pending->flags &= ~(EDGE_FALLTHRU | EDGE_FAKE);
6604 *pdest_bb = pending->src;
6606 else if (*pdest_bb == NULL)
6608 basic_block bb;
6609 rtx start;
6611 bb = create_basic_block (NULL, NULL, exit_pred);
6612 BB_COPY_PARTITION (bb, e->src);
6613 start = emit_jump_insn_after (gen_simple_return (),
6614 BB_END (bb));
6615 JUMP_LABEL (start) = simple_return_rtx;
6616 emit_barrier_after (start);
6618 *pdest_bb = bb;
6619 make_edge (bb, EXIT_BLOCK_PTR, 0);
6621 redirect_edge_and_branch_force (e, *pdest_bb);
6623 unconverted_simple_returns.release ();
6626 if (entry_edge != orig_entry_edge)
6628 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6629 if (EDGE_COUNT (e->src->preds) != 0
6630 && (e->flags & EDGE_FAKE) != 0
6631 && !bitmap_bit_p (&bb_flags, e->src->index))
6633 emit_return_into_block (true, e->src);
6634 e->flags &= ~(EDGE_FALLTHRU | EDGE_FAKE);
6637 #endif
6639 #ifdef HAVE_sibcall_epilogue
6640 /* Emit sibling epilogues before any sibling call sites. */
6641 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
6643 basic_block bb = e->src;
6644 rtx insn = BB_END (bb);
6645 rtx ep_seq;
6647 if (!CALL_P (insn)
6648 || ! SIBLING_CALL_P (insn)
6649 #ifdef HAVE_simple_return
6650 || (entry_edge != orig_entry_edge
6651 && !bitmap_bit_p (&bb_flags, bb->index))
6652 #endif
6655 ei_next (&ei);
6656 continue;
6659 ep_seq = gen_sibcall_epilogue ();
6660 if (ep_seq)
6662 start_sequence ();
6663 emit_note (NOTE_INSN_EPILOGUE_BEG);
6664 emit_insn (ep_seq);
6665 seq = get_insns ();
6666 end_sequence ();
6668 /* Retain a map of the epilogue insns. Used in life analysis to
6669 avoid getting rid of sibcall epilogue insns. Do this before we
6670 actually emit the sequence. */
6671 record_insns (seq, NULL, &epilogue_insn_hash);
6672 set_insn_locations (seq, epilogue_location);
6674 emit_insn_before (seq, insn);
6676 ei_next (&ei);
6678 #endif
6680 #ifdef HAVE_epilogue
6681 if (epilogue_end)
6683 rtx insn, next;
6685 /* Similarly, move any line notes that appear after the epilogue.
6686 There is no need, however, to be quite so anal about the existence
6687 of such a note. Also possibly move
6688 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
6689 info generation. */
6690 for (insn = epilogue_end; insn; insn = next)
6692 next = NEXT_INSN (insn);
6693 if (NOTE_P (insn)
6694 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
6695 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
6698 #endif
6700 #ifdef HAVE_simple_return
6701 bitmap_clear (&bb_flags);
6702 #endif
6704 /* Threading the prologue and epilogue changes the artificial refs
6705 in the entry and exit blocks. */
6706 epilogue_completed = 1;
6707 df_update_entry_exit_and_calls ();
6710 /* Reposition the prologue-end and epilogue-begin notes after
6711 instruction scheduling. */
6713 void
6714 reposition_prologue_and_epilogue_notes (void)
6716 #if defined (HAVE_prologue) || defined (HAVE_epilogue) \
6717 || defined (HAVE_sibcall_epilogue)
6718 /* Since the hash table is created on demand, the fact that it is
6719 non-null is a signal that it is non-empty. */
6720 if (prologue_insn_hash != NULL)
6722 size_t len = htab_elements (prologue_insn_hash);
6723 rtx insn, last = NULL, note = NULL;
6725 /* Scan from the beginning until we reach the last prologue insn. */
6726 /* ??? While we do have the CFG intact, there are two problems:
6727 (1) The prologue can contain loops (typically probing the stack),
6728 which means that the end of the prologue isn't in the first bb.
6729 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
6730 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6732 if (NOTE_P (insn))
6734 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
6735 note = insn;
6737 else if (contains (insn, prologue_insn_hash))
6739 last = insn;
6740 if (--len == 0)
6741 break;
6745 if (last)
6747 if (note == NULL)
6749 /* Scan forward looking for the PROLOGUE_END note. It should
6750 be right at the beginning of the block, possibly with other
6751 insn notes that got moved there. */
6752 for (note = NEXT_INSN (last); ; note = NEXT_INSN (note))
6754 if (NOTE_P (note)
6755 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
6756 break;
6760 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
6761 if (LABEL_P (last))
6762 last = NEXT_INSN (last);
6763 reorder_insns (note, note, last);
6767 if (epilogue_insn_hash != NULL)
6769 edge_iterator ei;
6770 edge e;
6772 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6774 rtx insn, first = NULL, note = NULL;
6775 basic_block bb = e->src;
6777 /* Scan from the beginning until we reach the first epilogue insn. */
6778 FOR_BB_INSNS (bb, insn)
6780 if (NOTE_P (insn))
6782 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
6784 note = insn;
6785 if (first != NULL)
6786 break;
6789 else if (first == NULL && contains (insn, epilogue_insn_hash))
6791 first = insn;
6792 if (note != NULL)
6793 break;
6797 if (note)
6799 /* If the function has a single basic block, and no real
6800 epilogue insns (e.g. sibcall with no cleanup), the
6801 epilogue note can get scheduled before the prologue
6802 note. If we have frame related prologue insns, having
6803 them scanned during the epilogue will result in a crash.
6804 In this case re-order the epilogue note to just before
6805 the last insn in the block. */
6806 if (first == NULL)
6807 first = BB_END (bb);
6809 if (PREV_INSN (first) != note)
6810 reorder_insns (note, note, PREV_INSN (first));
6814 #endif /* HAVE_prologue or HAVE_epilogue */
6817 /* Returns the name of function declared by FNDECL. */
6818 const char *
6819 fndecl_name (tree fndecl)
6821 if (fndecl == NULL)
6822 return "(nofn)";
6823 return lang_hooks.decl_printable_name (fndecl, 2);
6826 /* Returns the name of function FN. */
6827 const char *
6828 function_name (struct function *fn)
6830 tree fndecl = (fn == NULL) ? NULL : fn->decl;
6831 return fndecl_name (fndecl);
6834 /* Returns the name of the current function. */
6835 const char *
6836 current_function_name (void)
6838 return function_name (cfun);
6842 static unsigned int
6843 rest_of_handle_check_leaf_regs (void)
6845 #ifdef LEAF_REGISTERS
6846 crtl->uses_only_leaf_regs
6847 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
6848 #endif
6849 return 0;
6852 /* Insert a TYPE into the used types hash table of CFUN. */
6854 static void
6855 used_types_insert_helper (tree type, struct function *func)
6857 if (type != NULL && func != NULL)
6859 void **slot;
6861 if (func->used_types_hash == NULL)
6862 func->used_types_hash = htab_create_ggc (37, htab_hash_pointer,
6863 htab_eq_pointer, NULL);
6864 slot = htab_find_slot (func->used_types_hash, type, INSERT);
6865 if (*slot == NULL)
6866 *slot = type;
6870 /* Given a type, insert it into the used hash table in cfun. */
6871 void
6872 used_types_insert (tree t)
6874 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
6875 if (TYPE_NAME (t))
6876 break;
6877 else
6878 t = TREE_TYPE (t);
6879 if (TREE_CODE (t) == ERROR_MARK)
6880 return;
6881 if (TYPE_NAME (t) == NULL_TREE
6882 || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t)))
6883 t = TYPE_MAIN_VARIANT (t);
6884 if (debug_info_level > DINFO_LEVEL_NONE)
6886 if (cfun)
6887 used_types_insert_helper (t, cfun);
6888 else
6890 /* So this might be a type referenced by a global variable.
6891 Record that type so that we can later decide to emit its
6892 debug information. */
6893 vec_safe_push (types_used_by_cur_var_decl, t);
6898 /* Helper to Hash a struct types_used_by_vars_entry. */
6900 static hashval_t
6901 hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry)
6903 gcc_assert (entry && entry->var_decl && entry->type);
6905 return iterative_hash_object (entry->type,
6906 iterative_hash_object (entry->var_decl, 0));
6909 /* Hash function of the types_used_by_vars_entry hash table. */
6911 hashval_t
6912 types_used_by_vars_do_hash (const void *x)
6914 const struct types_used_by_vars_entry *entry =
6915 (const struct types_used_by_vars_entry *) x;
6917 return hash_types_used_by_vars_entry (entry);
6920 /*Equality function of the types_used_by_vars_entry hash table. */
6923 types_used_by_vars_eq (const void *x1, const void *x2)
6925 const struct types_used_by_vars_entry *e1 =
6926 (const struct types_used_by_vars_entry *) x1;
6927 const struct types_used_by_vars_entry *e2 =
6928 (const struct types_used_by_vars_entry *)x2;
6930 return (e1->var_decl == e2->var_decl && e1->type == e2->type);
6933 /* Inserts an entry into the types_used_by_vars_hash hash table. */
6935 void
6936 types_used_by_var_decl_insert (tree type, tree var_decl)
6938 if (type != NULL && var_decl != NULL)
6940 void **slot;
6941 struct types_used_by_vars_entry e;
6942 e.var_decl = var_decl;
6943 e.type = type;
6944 if (types_used_by_vars_hash == NULL)
6945 types_used_by_vars_hash =
6946 htab_create_ggc (37, types_used_by_vars_do_hash,
6947 types_used_by_vars_eq, NULL);
6948 slot = htab_find_slot_with_hash (types_used_by_vars_hash, &e,
6949 hash_types_used_by_vars_entry (&e), INSERT);
6950 if (*slot == NULL)
6952 struct types_used_by_vars_entry *entry;
6953 entry = ggc_alloc_types_used_by_vars_entry ();
6954 entry->type = type;
6955 entry->var_decl = var_decl;
6956 *slot = entry;
6961 struct rtl_opt_pass pass_leaf_regs =
6964 RTL_PASS,
6965 "*leaf_regs", /* name */
6966 OPTGROUP_NONE, /* optinfo_flags */
6967 NULL, /* gate */
6968 rest_of_handle_check_leaf_regs, /* execute */
6969 NULL, /* sub */
6970 NULL, /* next */
6971 0, /* static_pass_number */
6972 TV_NONE, /* tv_id */
6973 0, /* properties_required */
6974 0, /* properties_provided */
6975 0, /* properties_destroyed */
6976 0, /* todo_flags_start */
6977 0 /* todo_flags_finish */
6981 static unsigned int
6982 rest_of_handle_thread_prologue_and_epilogue (void)
6984 if (optimize)
6985 cleanup_cfg (CLEANUP_EXPENSIVE);
6987 /* On some machines, the prologue and epilogue code, or parts thereof,
6988 can be represented as RTL. Doing so lets us schedule insns between
6989 it and the rest of the code and also allows delayed branch
6990 scheduling to operate in the epilogue. */
6991 thread_prologue_and_epilogue_insns ();
6993 /* The stack usage info is finalized during prologue expansion. */
6994 if (flag_stack_usage_info)
6995 output_stack_usage ();
6997 return 0;
7000 struct rtl_opt_pass pass_thread_prologue_and_epilogue =
7003 RTL_PASS,
7004 "pro_and_epilogue", /* name */
7005 OPTGROUP_NONE, /* optinfo_flags */
7006 NULL, /* gate */
7007 rest_of_handle_thread_prologue_and_epilogue, /* execute */
7008 NULL, /* sub */
7009 NULL, /* next */
7010 0, /* static_pass_number */
7011 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
7012 0, /* properties_required */
7013 0, /* properties_provided */
7014 0, /* properties_destroyed */
7015 TODO_verify_flow, /* todo_flags_start */
7016 TODO_df_verify |
7017 TODO_df_finish | TODO_verify_rtl_sharing |
7018 TODO_ggc_collect /* todo_flags_finish */
7023 /* This mini-pass fixes fall-out from SSA in asm statements that have
7024 in-out constraints. Say you start with
7026 orig = inout;
7027 asm ("": "+mr" (inout));
7028 use (orig);
7030 which is transformed very early to use explicit output and match operands:
7032 orig = inout;
7033 asm ("": "=mr" (inout) : "0" (inout));
7034 use (orig);
7036 Or, after SSA and copyprop,
7038 asm ("": "=mr" (inout_2) : "0" (inout_1));
7039 use (inout_1);
7041 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
7042 they represent two separate values, so they will get different pseudo
7043 registers during expansion. Then, since the two operands need to match
7044 per the constraints, but use different pseudo registers, reload can
7045 only register a reload for these operands. But reloads can only be
7046 satisfied by hardregs, not by memory, so we need a register for this
7047 reload, just because we are presented with non-matching operands.
7048 So, even though we allow memory for this operand, no memory can be
7049 used for it, just because the two operands don't match. This can
7050 cause reload failures on register-starved targets.
7052 So it's a symptom of reload not being able to use memory for reloads
7053 or, alternatively it's also a symptom of both operands not coming into
7054 reload as matching (in which case the pseudo could go to memory just
7055 fine, as the alternative allows it, and no reload would be necessary).
7056 We fix the latter problem here, by transforming
7058 asm ("": "=mr" (inout_2) : "0" (inout_1));
7060 back to
7062 inout_2 = inout_1;
7063 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
7065 static void
7066 match_asm_constraints_1 (rtx insn, rtx *p_sets, int noutputs)
7068 int i;
7069 bool changed = false;
7070 rtx op = SET_SRC (p_sets[0]);
7071 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
7072 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
7073 bool *output_matched = XALLOCAVEC (bool, noutputs);
7075 memset (output_matched, 0, noutputs * sizeof (bool));
7076 for (i = 0; i < ninputs; i++)
7078 rtx input, output, insns;
7079 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
7080 char *end;
7081 int match, j;
7083 if (*constraint == '%')
7084 constraint++;
7086 match = strtoul (constraint, &end, 10);
7087 if (end == constraint)
7088 continue;
7090 gcc_assert (match < noutputs);
7091 output = SET_DEST (p_sets[match]);
7092 input = RTVEC_ELT (inputs, i);
7093 /* Only do the transformation for pseudos. */
7094 if (! REG_P (output)
7095 || rtx_equal_p (output, input)
7096 || (GET_MODE (input) != VOIDmode
7097 && GET_MODE (input) != GET_MODE (output)))
7098 continue;
7100 /* We can't do anything if the output is also used as input,
7101 as we're going to overwrite it. */
7102 for (j = 0; j < ninputs; j++)
7103 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
7104 break;
7105 if (j != ninputs)
7106 continue;
7108 /* Avoid changing the same input several times. For
7109 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
7110 only change in once (to out1), rather than changing it
7111 first to out1 and afterwards to out2. */
7112 if (i > 0)
7114 for (j = 0; j < noutputs; j++)
7115 if (output_matched[j] && input == SET_DEST (p_sets[j]))
7116 break;
7117 if (j != noutputs)
7118 continue;
7120 output_matched[match] = true;
7122 start_sequence ();
7123 emit_move_insn (output, input);
7124 insns = get_insns ();
7125 end_sequence ();
7126 emit_insn_before (insns, insn);
7128 /* Now replace all mentions of the input with output. We can't
7129 just replace the occurrence in inputs[i], as the register might
7130 also be used in some other input (or even in an address of an
7131 output), which would mean possibly increasing the number of
7132 inputs by one (namely 'output' in addition), which might pose
7133 a too complicated problem for reload to solve. E.g. this situation:
7135 asm ("" : "=r" (output), "=m" (input) : "0" (input))
7137 Here 'input' is used in two occurrences as input (once for the
7138 input operand, once for the address in the second output operand).
7139 If we would replace only the occurrence of the input operand (to
7140 make the matching) we would be left with this:
7142 output = input
7143 asm ("" : "=r" (output), "=m" (input) : "0" (output))
7145 Now we suddenly have two different input values (containing the same
7146 value, but different pseudos) where we formerly had only one.
7147 With more complicated asms this might lead to reload failures
7148 which wouldn't have happen without this pass. So, iterate over
7149 all operands and replace all occurrences of the register used. */
7150 for (j = 0; j < noutputs; j++)
7151 if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
7152 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
7153 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
7154 input, output);
7155 for (j = 0; j < ninputs; j++)
7156 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
7157 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
7158 input, output);
7160 changed = true;
7163 if (changed)
7164 df_insn_rescan (insn);
7167 static unsigned
7168 rest_of_match_asm_constraints (void)
7170 basic_block bb;
7171 rtx insn, pat, *p_sets;
7172 int noutputs;
7174 if (!crtl->has_asm_statement)
7175 return 0;
7177 df_set_flags (DF_DEFER_INSN_RESCAN);
7178 FOR_EACH_BB (bb)
7180 FOR_BB_INSNS (bb, insn)
7182 if (!INSN_P (insn))
7183 continue;
7185 pat = PATTERN (insn);
7186 if (GET_CODE (pat) == PARALLEL)
7187 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
7188 else if (GET_CODE (pat) == SET)
7189 p_sets = &PATTERN (insn), noutputs = 1;
7190 else
7191 continue;
7193 if (GET_CODE (*p_sets) == SET
7194 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
7195 match_asm_constraints_1 (insn, p_sets, noutputs);
7199 return TODO_df_finish;
7202 struct rtl_opt_pass pass_match_asm_constraints =
7205 RTL_PASS,
7206 "asmcons", /* name */
7207 OPTGROUP_NONE, /* optinfo_flags */
7208 NULL, /* gate */
7209 rest_of_match_asm_constraints, /* execute */
7210 NULL, /* sub */
7211 NULL, /* next */
7212 0, /* static_pass_number */
7213 TV_NONE, /* tv_id */
7214 0, /* properties_required */
7215 0, /* properties_provided */
7216 0, /* properties_destroyed */
7217 0, /* todo_flags_start */
7218 0 /* todo_flags_finish */
7223 #include "gt-function.h"