2013-09-20 Richard Biener <rguenther@suse.de>
[official-gcc.git] / gcc / function.c
blobc7d259c73ffc53f92ac3c7e5f1877a6862a4b297
1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file handles the generation of rtl code from tree structure
21 at the level of the function as a whole.
22 It creates the rtl expressions for parameters and auto variables
23 and has full responsibility for allocating stack slots.
25 `expand_function_start' is called at the beginning of a function,
26 before the function body is parsed, and `expand_function_end' is
27 called after parsing the body.
29 Call `assign_stack_local' to allocate a stack slot for a local variable.
30 This is usually done during the RTL generation for the function body,
31 but it can also be done in the reload pass when a pseudo-register does
32 not get a hard register. */
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "tm.h"
38 #include "rtl-error.h"
39 #include "tree.h"
40 #include "flags.h"
41 #include "except.h"
42 #include "function.h"
43 #include "expr.h"
44 #include "optabs.h"
45 #include "libfuncs.h"
46 #include "regs.h"
47 #include "hard-reg-set.h"
48 #include "insn-config.h"
49 #include "recog.h"
50 #include "output.h"
51 #include "basic-block.h"
52 #include "hashtab.h"
53 #include "ggc.h"
54 #include "tm_p.h"
55 #include "langhooks.h"
56 #include "target.h"
57 #include "common/common-target.h"
58 #include "gimple.h"
59 #include "tree-pass.h"
60 #include "predict.h"
61 #include "df.h"
62 #include "params.h"
63 #include "bb-reorder.h"
65 /* So we can assign to cfun in this file. */
66 #undef cfun
68 #ifndef STACK_ALIGNMENT_NEEDED
69 #define STACK_ALIGNMENT_NEEDED 1
70 #endif
72 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
74 /* Some systems use __main in a way incompatible with its use in gcc, in these
75 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
76 give the same symbol without quotes for an alternative entry point. You
77 must define both, or neither. */
78 #ifndef NAME__MAIN
79 #define NAME__MAIN "__main"
80 #endif
82 /* Round a value to the lowest integer less than it that is a multiple of
83 the required alignment. Avoid using division in case the value is
84 negative. Assume the alignment is a power of two. */
85 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
87 /* Similar, but round to the next highest integer that meets the
88 alignment. */
89 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
91 /* Nonzero once virtual register instantiation has been done.
92 assign_stack_local uses frame_pointer_rtx when this is nonzero.
93 calls.c:emit_library_call_value_1 uses it to set up
94 post-instantiation libcalls. */
95 int virtuals_instantiated;
97 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
98 static GTY(()) int funcdef_no;
100 /* These variables hold pointers to functions to create and destroy
101 target specific, per-function data structures. */
102 struct machine_function * (*init_machine_status) (void);
104 /* The currently compiled function. */
105 struct function *cfun = 0;
107 /* These hashes record the prologue and epilogue insns. */
108 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
109 htab_t prologue_insn_hash;
110 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
111 htab_t epilogue_insn_hash;
114 htab_t types_used_by_vars_hash = NULL;
115 vec<tree, va_gc> *types_used_by_cur_var_decl;
117 /* Forward declarations. */
119 static struct temp_slot *find_temp_slot_from_address (rtx);
120 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
121 static void pad_below (struct args_size *, enum machine_mode, tree);
122 static void reorder_blocks_1 (rtx, tree, vec<tree> *);
123 static int all_blocks (tree, tree *);
124 static tree *get_block_vector (tree, int *);
125 extern tree debug_find_var_in_block_tree (tree, tree);
126 /* We always define `record_insns' even if it's not used so that we
127 can always export `prologue_epilogue_contains'. */
128 static void record_insns (rtx, rtx, htab_t *) ATTRIBUTE_UNUSED;
129 static bool contains (const_rtx, htab_t);
130 static void prepare_function_start (void);
131 static void do_clobber_return_reg (rtx, void *);
132 static void do_use_return_reg (rtx, void *);
133 static void set_insn_locations (rtx, int) ATTRIBUTE_UNUSED;
135 /* Stack of nested functions. */
136 /* Keep track of the cfun stack. */
138 typedef struct function *function_p;
140 static vec<function_p> function_context_stack;
142 /* Save the current context for compilation of a nested function.
143 This is called from language-specific code. */
145 void
146 push_function_context (void)
148 if (cfun == 0)
149 allocate_struct_function (NULL, false);
151 function_context_stack.safe_push (cfun);
152 set_cfun (NULL);
155 /* Restore the last saved context, at the end of a nested function.
156 This function is called from language-specific code. */
158 void
159 pop_function_context (void)
161 struct function *p = function_context_stack.pop ();
162 set_cfun (p);
163 current_function_decl = p->decl;
165 /* Reset variables that have known state during rtx generation. */
166 virtuals_instantiated = 0;
167 generating_concat_p = 1;
170 /* Clear out all parts of the state in F that can safely be discarded
171 after the function has been parsed, but not compiled, to let
172 garbage collection reclaim the memory. */
174 void
175 free_after_parsing (struct function *f)
177 f->language = 0;
180 /* Clear out all parts of the state in F that can safely be discarded
181 after the function has been compiled, to let garbage collection
182 reclaim the memory. */
184 void
185 free_after_compilation (struct function *f)
187 prologue_insn_hash = NULL;
188 epilogue_insn_hash = NULL;
190 free (crtl->emit.regno_pointer_align);
192 memset (crtl, 0, sizeof (struct rtl_data));
193 f->eh = NULL;
194 f->machine = NULL;
195 f->cfg = NULL;
197 regno_reg_rtx = NULL;
200 /* Return size needed for stack frame based on slots so far allocated.
201 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
202 the caller may have to do that. */
204 HOST_WIDE_INT
205 get_frame_size (void)
207 if (FRAME_GROWS_DOWNWARD)
208 return -frame_offset;
209 else
210 return frame_offset;
213 /* Issue an error message and return TRUE if frame OFFSET overflows in
214 the signed target pointer arithmetics for function FUNC. Otherwise
215 return FALSE. */
217 bool
218 frame_offset_overflow (HOST_WIDE_INT offset, tree func)
220 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
222 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
223 /* Leave room for the fixed part of the frame. */
224 - 64 * UNITS_PER_WORD)
226 error_at (DECL_SOURCE_LOCATION (func),
227 "total size of local objects too large");
228 return TRUE;
231 return FALSE;
234 /* Return stack slot alignment in bits for TYPE and MODE. */
236 static unsigned int
237 get_stack_local_alignment (tree type, enum machine_mode mode)
239 unsigned int alignment;
241 if (mode == BLKmode)
242 alignment = BIGGEST_ALIGNMENT;
243 else
244 alignment = GET_MODE_ALIGNMENT (mode);
246 /* Allow the frond-end to (possibly) increase the alignment of this
247 stack slot. */
248 if (! type)
249 type = lang_hooks.types.type_for_mode (mode, 0);
251 return STACK_SLOT_ALIGNMENT (type, mode, alignment);
254 /* Determine whether it is possible to fit a stack slot of size SIZE and
255 alignment ALIGNMENT into an area in the stack frame that starts at
256 frame offset START and has a length of LENGTH. If so, store the frame
257 offset to be used for the stack slot in *POFFSET and return true;
258 return false otherwise. This function will extend the frame size when
259 given a start/length pair that lies at the end of the frame. */
261 static bool
262 try_fit_stack_local (HOST_WIDE_INT start, HOST_WIDE_INT length,
263 HOST_WIDE_INT size, unsigned int alignment,
264 HOST_WIDE_INT *poffset)
266 HOST_WIDE_INT this_frame_offset;
267 int frame_off, frame_alignment, frame_phase;
269 /* Calculate how many bytes the start of local variables is off from
270 stack alignment. */
271 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
272 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
273 frame_phase = frame_off ? frame_alignment - frame_off : 0;
275 /* Round the frame offset to the specified alignment. */
277 /* We must be careful here, since FRAME_OFFSET might be negative and
278 division with a negative dividend isn't as well defined as we might
279 like. So we instead assume that ALIGNMENT is a power of two and
280 use logical operations which are unambiguous. */
281 if (FRAME_GROWS_DOWNWARD)
282 this_frame_offset
283 = (FLOOR_ROUND (start + length - size - frame_phase,
284 (unsigned HOST_WIDE_INT) alignment)
285 + frame_phase);
286 else
287 this_frame_offset
288 = (CEIL_ROUND (start - frame_phase,
289 (unsigned HOST_WIDE_INT) alignment)
290 + frame_phase);
292 /* See if it fits. If this space is at the edge of the frame,
293 consider extending the frame to make it fit. Our caller relies on
294 this when allocating a new slot. */
295 if (frame_offset == start && this_frame_offset < frame_offset)
296 frame_offset = this_frame_offset;
297 else if (this_frame_offset < start)
298 return false;
299 else if (start + length == frame_offset
300 && this_frame_offset + size > start + length)
301 frame_offset = this_frame_offset + size;
302 else if (this_frame_offset + size > start + length)
303 return false;
305 *poffset = this_frame_offset;
306 return true;
309 /* Create a new frame_space structure describing free space in the stack
310 frame beginning at START and ending at END, and chain it into the
311 function's frame_space_list. */
313 static void
314 add_frame_space (HOST_WIDE_INT start, HOST_WIDE_INT end)
316 struct frame_space *space = ggc_alloc_frame_space ();
317 space->next = crtl->frame_space_list;
318 crtl->frame_space_list = space;
319 space->start = start;
320 space->length = end - start;
323 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
324 with machine mode MODE.
326 ALIGN controls the amount of alignment for the address of the slot:
327 0 means according to MODE,
328 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
329 -2 means use BITS_PER_UNIT,
330 positive specifies alignment boundary in bits.
332 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
333 alignment and ASLK_RECORD_PAD bit set if we should remember
334 extra space we allocated for alignment purposes. When we are
335 called from assign_stack_temp_for_type, it is not set so we don't
336 track the same stack slot in two independent lists.
338 We do not round to stack_boundary here. */
341 assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size,
342 int align, int kind)
344 rtx x, addr;
345 int bigend_correction = 0;
346 HOST_WIDE_INT slot_offset = 0, old_frame_offset;
347 unsigned int alignment, alignment_in_bits;
349 if (align == 0)
351 alignment = get_stack_local_alignment (NULL, mode);
352 alignment /= BITS_PER_UNIT;
354 else if (align == -1)
356 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
357 size = CEIL_ROUND (size, alignment);
359 else if (align == -2)
360 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
361 else
362 alignment = align / BITS_PER_UNIT;
364 alignment_in_bits = alignment * BITS_PER_UNIT;
366 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
367 if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
369 alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
370 alignment = alignment_in_bits / BITS_PER_UNIT;
373 if (SUPPORTS_STACK_ALIGNMENT)
375 if (crtl->stack_alignment_estimated < alignment_in_bits)
377 if (!crtl->stack_realign_processed)
378 crtl->stack_alignment_estimated = alignment_in_bits;
379 else
381 /* If stack is realigned and stack alignment value
382 hasn't been finalized, it is OK not to increase
383 stack_alignment_estimated. The bigger alignment
384 requirement is recorded in stack_alignment_needed
385 below. */
386 gcc_assert (!crtl->stack_realign_finalized);
387 if (!crtl->stack_realign_needed)
389 /* It is OK to reduce the alignment as long as the
390 requested size is 0 or the estimated stack
391 alignment >= mode alignment. */
392 gcc_assert ((kind & ASLK_REDUCE_ALIGN)
393 || size == 0
394 || (crtl->stack_alignment_estimated
395 >= GET_MODE_ALIGNMENT (mode)));
396 alignment_in_bits = crtl->stack_alignment_estimated;
397 alignment = alignment_in_bits / BITS_PER_UNIT;
403 if (crtl->stack_alignment_needed < alignment_in_bits)
404 crtl->stack_alignment_needed = alignment_in_bits;
405 if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
406 crtl->max_used_stack_slot_alignment = alignment_in_bits;
408 if (mode != BLKmode || size != 0)
410 if (kind & ASLK_RECORD_PAD)
412 struct frame_space **psp;
414 for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
416 struct frame_space *space = *psp;
417 if (!try_fit_stack_local (space->start, space->length, size,
418 alignment, &slot_offset))
419 continue;
420 *psp = space->next;
421 if (slot_offset > space->start)
422 add_frame_space (space->start, slot_offset);
423 if (slot_offset + size < space->start + space->length)
424 add_frame_space (slot_offset + size,
425 space->start + space->length);
426 goto found_space;
430 else if (!STACK_ALIGNMENT_NEEDED)
432 slot_offset = frame_offset;
433 goto found_space;
436 old_frame_offset = frame_offset;
438 if (FRAME_GROWS_DOWNWARD)
440 frame_offset -= size;
441 try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
443 if (kind & ASLK_RECORD_PAD)
445 if (slot_offset > frame_offset)
446 add_frame_space (frame_offset, slot_offset);
447 if (slot_offset + size < old_frame_offset)
448 add_frame_space (slot_offset + size, old_frame_offset);
451 else
453 frame_offset += size;
454 try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
456 if (kind & ASLK_RECORD_PAD)
458 if (slot_offset > old_frame_offset)
459 add_frame_space (old_frame_offset, slot_offset);
460 if (slot_offset + size < frame_offset)
461 add_frame_space (slot_offset + size, frame_offset);
465 found_space:
466 /* On a big-endian machine, if we are allocating more space than we will use,
467 use the least significant bytes of those that are allocated. */
468 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
469 bigend_correction = size - GET_MODE_SIZE (mode);
471 /* If we have already instantiated virtual registers, return the actual
472 address relative to the frame pointer. */
473 if (virtuals_instantiated)
474 addr = plus_constant (Pmode, frame_pointer_rtx,
475 trunc_int_for_mode
476 (slot_offset + bigend_correction
477 + STARTING_FRAME_OFFSET, Pmode));
478 else
479 addr = plus_constant (Pmode, virtual_stack_vars_rtx,
480 trunc_int_for_mode
481 (slot_offset + bigend_correction,
482 Pmode));
484 x = gen_rtx_MEM (mode, addr);
485 set_mem_align (x, alignment_in_bits);
486 MEM_NOTRAP_P (x) = 1;
488 stack_slot_list
489 = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
491 if (frame_offset_overflow (frame_offset, current_function_decl))
492 frame_offset = 0;
494 return x;
497 /* Wrap up assign_stack_local_1 with last parameter as false. */
500 assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
502 return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD);
505 /* In order to evaluate some expressions, such as function calls returning
506 structures in memory, we need to temporarily allocate stack locations.
507 We record each allocated temporary in the following structure.
509 Associated with each temporary slot is a nesting level. When we pop up
510 one level, all temporaries associated with the previous level are freed.
511 Normally, all temporaries are freed after the execution of the statement
512 in which they were created. However, if we are inside a ({...}) grouping,
513 the result may be in a temporary and hence must be preserved. If the
514 result could be in a temporary, we preserve it if we can determine which
515 one it is in. If we cannot determine which temporary may contain the
516 result, all temporaries are preserved. A temporary is preserved by
517 pretending it was allocated at the previous nesting level. */
519 struct GTY(()) temp_slot {
520 /* Points to next temporary slot. */
521 struct temp_slot *next;
522 /* Points to previous temporary slot. */
523 struct temp_slot *prev;
524 /* The rtx to used to reference the slot. */
525 rtx slot;
526 /* The size, in units, of the slot. */
527 HOST_WIDE_INT size;
528 /* The type of the object in the slot, or zero if it doesn't correspond
529 to a type. We use this to determine whether a slot can be reused.
530 It can be reused if objects of the type of the new slot will always
531 conflict with objects of the type of the old slot. */
532 tree type;
533 /* The alignment (in bits) of the slot. */
534 unsigned int align;
535 /* Nonzero if this temporary is currently in use. */
536 char in_use;
537 /* Nesting level at which this slot is being used. */
538 int level;
539 /* The offset of the slot from the frame_pointer, including extra space
540 for alignment. This info is for combine_temp_slots. */
541 HOST_WIDE_INT base_offset;
542 /* The size of the slot, including extra space for alignment. This
543 info is for combine_temp_slots. */
544 HOST_WIDE_INT full_size;
547 /* A table of addresses that represent a stack slot. The table is a mapping
548 from address RTXen to a temp slot. */
549 static GTY((param_is(struct temp_slot_address_entry))) htab_t temp_slot_address_table;
550 static size_t n_temp_slots_in_use;
552 /* Entry for the above hash table. */
553 struct GTY(()) temp_slot_address_entry {
554 hashval_t hash;
555 rtx address;
556 struct temp_slot *temp_slot;
559 /* Removes temporary slot TEMP from LIST. */
561 static void
562 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
564 if (temp->next)
565 temp->next->prev = temp->prev;
566 if (temp->prev)
567 temp->prev->next = temp->next;
568 else
569 *list = temp->next;
571 temp->prev = temp->next = NULL;
574 /* Inserts temporary slot TEMP to LIST. */
576 static void
577 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
579 temp->next = *list;
580 if (*list)
581 (*list)->prev = temp;
582 temp->prev = NULL;
583 *list = temp;
586 /* Returns the list of used temp slots at LEVEL. */
588 static struct temp_slot **
589 temp_slots_at_level (int level)
591 if (level >= (int) vec_safe_length (used_temp_slots))
592 vec_safe_grow_cleared (used_temp_slots, level + 1);
594 return &(*used_temp_slots)[level];
597 /* Returns the maximal temporary slot level. */
599 static int
600 max_slot_level (void)
602 if (!used_temp_slots)
603 return -1;
605 return used_temp_slots->length () - 1;
608 /* Moves temporary slot TEMP to LEVEL. */
610 static void
611 move_slot_to_level (struct temp_slot *temp, int level)
613 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
614 insert_slot_to_list (temp, temp_slots_at_level (level));
615 temp->level = level;
618 /* Make temporary slot TEMP available. */
620 static void
621 make_slot_available (struct temp_slot *temp)
623 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
624 insert_slot_to_list (temp, &avail_temp_slots);
625 temp->in_use = 0;
626 temp->level = -1;
627 n_temp_slots_in_use--;
630 /* Compute the hash value for an address -> temp slot mapping.
631 The value is cached on the mapping entry. */
632 static hashval_t
633 temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
635 int do_not_record = 0;
636 return hash_rtx (t->address, GET_MODE (t->address),
637 &do_not_record, NULL, false);
640 /* Return the hash value for an address -> temp slot mapping. */
641 static hashval_t
642 temp_slot_address_hash (const void *p)
644 const struct temp_slot_address_entry *t;
645 t = (const struct temp_slot_address_entry *) p;
646 return t->hash;
649 /* Compare two address -> temp slot mapping entries. */
650 static int
651 temp_slot_address_eq (const void *p1, const void *p2)
653 const struct temp_slot_address_entry *t1, *t2;
654 t1 = (const struct temp_slot_address_entry *) p1;
655 t2 = (const struct temp_slot_address_entry *) p2;
656 return exp_equiv_p (t1->address, t2->address, 0, true);
659 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
660 static void
661 insert_temp_slot_address (rtx address, struct temp_slot *temp_slot)
663 void **slot;
664 struct temp_slot_address_entry *t = ggc_alloc_temp_slot_address_entry ();
665 t->address = address;
666 t->temp_slot = temp_slot;
667 t->hash = temp_slot_address_compute_hash (t);
668 slot = htab_find_slot_with_hash (temp_slot_address_table, t, t->hash, INSERT);
669 *slot = t;
672 /* Remove an address -> temp slot mapping entry if the temp slot is
673 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
674 static int
675 remove_unused_temp_slot_addresses_1 (void **slot, void *data ATTRIBUTE_UNUSED)
677 const struct temp_slot_address_entry *t;
678 t = (const struct temp_slot_address_entry *) *slot;
679 if (! t->temp_slot->in_use)
680 htab_clear_slot (temp_slot_address_table, slot);
681 return 1;
684 /* Remove all mappings of addresses to unused temp slots. */
685 static void
686 remove_unused_temp_slot_addresses (void)
688 /* Use quicker clearing if there aren't any active temp slots. */
689 if (n_temp_slots_in_use)
690 htab_traverse (temp_slot_address_table,
691 remove_unused_temp_slot_addresses_1,
692 NULL);
693 else
694 htab_empty (temp_slot_address_table);
697 /* Find the temp slot corresponding to the object at address X. */
699 static struct temp_slot *
700 find_temp_slot_from_address (rtx x)
702 struct temp_slot *p;
703 struct temp_slot_address_entry tmp, *t;
705 /* First try the easy way:
706 See if X exists in the address -> temp slot mapping. */
707 tmp.address = x;
708 tmp.temp_slot = NULL;
709 tmp.hash = temp_slot_address_compute_hash (&tmp);
710 t = (struct temp_slot_address_entry *)
711 htab_find_with_hash (temp_slot_address_table, &tmp, tmp.hash);
712 if (t)
713 return t->temp_slot;
715 /* If we have a sum involving a register, see if it points to a temp
716 slot. */
717 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
718 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
719 return p;
720 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
721 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
722 return p;
724 /* Last resort: Address is a virtual stack var address. */
725 if (GET_CODE (x) == PLUS
726 && XEXP (x, 0) == virtual_stack_vars_rtx
727 && CONST_INT_P (XEXP (x, 1)))
729 int i;
730 for (i = max_slot_level (); i >= 0; i--)
731 for (p = *temp_slots_at_level (i); p; p = p->next)
733 if (INTVAL (XEXP (x, 1)) >= p->base_offset
734 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size)
735 return p;
739 return NULL;
742 /* Allocate a temporary stack slot and record it for possible later
743 reuse.
745 MODE is the machine mode to be given to the returned rtx.
747 SIZE is the size in units of the space required. We do no rounding here
748 since assign_stack_local will do any required rounding.
750 TYPE is the type that will be used for the stack slot. */
753 assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size,
754 tree type)
756 unsigned int align;
757 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
758 rtx slot;
760 /* If SIZE is -1 it means that somebody tried to allocate a temporary
761 of a variable size. */
762 gcc_assert (size != -1);
764 align = get_stack_local_alignment (type, mode);
766 /* Try to find an available, already-allocated temporary of the proper
767 mode which meets the size and alignment requirements. Choose the
768 smallest one with the closest alignment.
770 If assign_stack_temp is called outside of the tree->rtl expansion,
771 we cannot reuse the stack slots (that may still refer to
772 VIRTUAL_STACK_VARS_REGNUM). */
773 if (!virtuals_instantiated)
775 for (p = avail_temp_slots; p; p = p->next)
777 if (p->align >= align && p->size >= size
778 && GET_MODE (p->slot) == mode
779 && objects_must_conflict_p (p->type, type)
780 && (best_p == 0 || best_p->size > p->size
781 || (best_p->size == p->size && best_p->align > p->align)))
783 if (p->align == align && p->size == size)
785 selected = p;
786 cut_slot_from_list (selected, &avail_temp_slots);
787 best_p = 0;
788 break;
790 best_p = p;
795 /* Make our best, if any, the one to use. */
796 if (best_p)
798 selected = best_p;
799 cut_slot_from_list (selected, &avail_temp_slots);
801 /* If there are enough aligned bytes left over, make them into a new
802 temp_slot so that the extra bytes don't get wasted. Do this only
803 for BLKmode slots, so that we can be sure of the alignment. */
804 if (GET_MODE (best_p->slot) == BLKmode)
806 int alignment = best_p->align / BITS_PER_UNIT;
807 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
809 if (best_p->size - rounded_size >= alignment)
811 p = ggc_alloc_temp_slot ();
812 p->in_use = 0;
813 p->size = best_p->size - rounded_size;
814 p->base_offset = best_p->base_offset + rounded_size;
815 p->full_size = best_p->full_size - rounded_size;
816 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
817 p->align = best_p->align;
818 p->type = best_p->type;
819 insert_slot_to_list (p, &avail_temp_slots);
821 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
822 stack_slot_list);
824 best_p->size = rounded_size;
825 best_p->full_size = rounded_size;
830 /* If we still didn't find one, make a new temporary. */
831 if (selected == 0)
833 HOST_WIDE_INT frame_offset_old = frame_offset;
835 p = ggc_alloc_temp_slot ();
837 /* We are passing an explicit alignment request to assign_stack_local.
838 One side effect of that is assign_stack_local will not round SIZE
839 to ensure the frame offset remains suitably aligned.
841 So for requests which depended on the rounding of SIZE, we go ahead
842 and round it now. We also make sure ALIGNMENT is at least
843 BIGGEST_ALIGNMENT. */
844 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
845 p->slot = assign_stack_local_1 (mode,
846 (mode == BLKmode
847 ? CEIL_ROUND (size,
848 (int) align
849 / BITS_PER_UNIT)
850 : size),
851 align, 0);
853 p->align = align;
855 /* The following slot size computation is necessary because we don't
856 know the actual size of the temporary slot until assign_stack_local
857 has performed all the frame alignment and size rounding for the
858 requested temporary. Note that extra space added for alignment
859 can be either above or below this stack slot depending on which
860 way the frame grows. We include the extra space if and only if it
861 is above this slot. */
862 if (FRAME_GROWS_DOWNWARD)
863 p->size = frame_offset_old - frame_offset;
864 else
865 p->size = size;
867 /* Now define the fields used by combine_temp_slots. */
868 if (FRAME_GROWS_DOWNWARD)
870 p->base_offset = frame_offset;
871 p->full_size = frame_offset_old - frame_offset;
873 else
875 p->base_offset = frame_offset_old;
876 p->full_size = frame_offset - frame_offset_old;
879 selected = p;
882 p = selected;
883 p->in_use = 1;
884 p->type = type;
885 p->level = temp_slot_level;
886 n_temp_slots_in_use++;
888 pp = temp_slots_at_level (p->level);
889 insert_slot_to_list (p, pp);
890 insert_temp_slot_address (XEXP (p->slot, 0), p);
892 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
893 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
894 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
896 /* If we know the alias set for the memory that will be used, use
897 it. If there's no TYPE, then we don't know anything about the
898 alias set for the memory. */
899 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
900 set_mem_align (slot, align);
902 /* If a type is specified, set the relevant flags. */
903 if (type != 0)
904 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
905 MEM_NOTRAP_P (slot) = 1;
907 return slot;
910 /* Allocate a temporary stack slot and record it for possible later
911 reuse. First two arguments are same as in preceding function. */
914 assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size)
916 return assign_stack_temp_for_type (mode, size, NULL_TREE);
919 /* Assign a temporary.
920 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
921 and so that should be used in error messages. In either case, we
922 allocate of the given type.
923 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
924 it is 0 if a register is OK.
925 DONT_PROMOTE is 1 if we should not promote values in register
926 to wider modes. */
929 assign_temp (tree type_or_decl, int memory_required,
930 int dont_promote ATTRIBUTE_UNUSED)
932 tree type, decl;
933 enum machine_mode mode;
934 #ifdef PROMOTE_MODE
935 int unsignedp;
936 #endif
938 if (DECL_P (type_or_decl))
939 decl = type_or_decl, type = TREE_TYPE (decl);
940 else
941 decl = NULL, type = type_or_decl;
943 mode = TYPE_MODE (type);
944 #ifdef PROMOTE_MODE
945 unsignedp = TYPE_UNSIGNED (type);
946 #endif
948 if (mode == BLKmode || memory_required)
950 HOST_WIDE_INT size = int_size_in_bytes (type);
951 rtx tmp;
953 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
954 problems with allocating the stack space. */
955 if (size == 0)
956 size = 1;
958 /* Unfortunately, we don't yet know how to allocate variable-sized
959 temporaries. However, sometimes we can find a fixed upper limit on
960 the size, so try that instead. */
961 else if (size == -1)
962 size = max_int_size_in_bytes (type);
964 /* The size of the temporary may be too large to fit into an integer. */
965 /* ??? Not sure this should happen except for user silliness, so limit
966 this to things that aren't compiler-generated temporaries. The
967 rest of the time we'll die in assign_stack_temp_for_type. */
968 if (decl && size == -1
969 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
971 error ("size of variable %q+D is too large", decl);
972 size = 1;
975 tmp = assign_stack_temp_for_type (mode, size, type);
976 return tmp;
979 #ifdef PROMOTE_MODE
980 if (! dont_promote)
981 mode = promote_mode (type, mode, &unsignedp);
982 #endif
984 return gen_reg_rtx (mode);
987 /* Combine temporary stack slots which are adjacent on the stack.
989 This allows for better use of already allocated stack space. This is only
990 done for BLKmode slots because we can be sure that we won't have alignment
991 problems in this case. */
993 static void
994 combine_temp_slots (void)
996 struct temp_slot *p, *q, *next, *next_q;
997 int num_slots;
999 /* We can't combine slots, because the information about which slot
1000 is in which alias set will be lost. */
1001 if (flag_strict_aliasing)
1002 return;
1004 /* If there are a lot of temp slots, don't do anything unless
1005 high levels of optimization. */
1006 if (! flag_expensive_optimizations)
1007 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1008 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1009 return;
1011 for (p = avail_temp_slots; p; p = next)
1013 int delete_p = 0;
1015 next = p->next;
1017 if (GET_MODE (p->slot) != BLKmode)
1018 continue;
1020 for (q = p->next; q; q = next_q)
1022 int delete_q = 0;
1024 next_q = q->next;
1026 if (GET_MODE (q->slot) != BLKmode)
1027 continue;
1029 if (p->base_offset + p->full_size == q->base_offset)
1031 /* Q comes after P; combine Q into P. */
1032 p->size += q->size;
1033 p->full_size += q->full_size;
1034 delete_q = 1;
1036 else if (q->base_offset + q->full_size == p->base_offset)
1038 /* P comes after Q; combine P into Q. */
1039 q->size += p->size;
1040 q->full_size += p->full_size;
1041 delete_p = 1;
1042 break;
1044 if (delete_q)
1045 cut_slot_from_list (q, &avail_temp_slots);
1048 /* Either delete P or advance past it. */
1049 if (delete_p)
1050 cut_slot_from_list (p, &avail_temp_slots);
1054 /* Indicate that NEW_RTX is an alternate way of referring to the temp
1055 slot that previously was known by OLD_RTX. */
1057 void
1058 update_temp_slot_address (rtx old_rtx, rtx new_rtx)
1060 struct temp_slot *p;
1062 if (rtx_equal_p (old_rtx, new_rtx))
1063 return;
1065 p = find_temp_slot_from_address (old_rtx);
1067 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1068 NEW_RTX is a register, see if one operand of the PLUS is a
1069 temporary location. If so, NEW_RTX points into it. Otherwise,
1070 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1071 in common between them. If so, try a recursive call on those
1072 values. */
1073 if (p == 0)
1075 if (GET_CODE (old_rtx) != PLUS)
1076 return;
1078 if (REG_P (new_rtx))
1080 update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
1081 update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
1082 return;
1084 else if (GET_CODE (new_rtx) != PLUS)
1085 return;
1087 if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
1088 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
1089 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
1090 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
1091 else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
1092 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
1093 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
1094 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
1096 return;
1099 /* Otherwise add an alias for the temp's address. */
1100 insert_temp_slot_address (new_rtx, p);
1103 /* If X could be a reference to a temporary slot, mark that slot as
1104 belonging to the to one level higher than the current level. If X
1105 matched one of our slots, just mark that one. Otherwise, we can't
1106 easily predict which it is, so upgrade all of them.
1108 This is called when an ({...}) construct occurs and a statement
1109 returns a value in memory. */
1111 void
1112 preserve_temp_slots (rtx x)
1114 struct temp_slot *p = 0, *next;
1116 if (x == 0)
1117 return;
1119 /* If X is a register that is being used as a pointer, see if we have
1120 a temporary slot we know it points to. */
1121 if (REG_P (x) && REG_POINTER (x))
1122 p = find_temp_slot_from_address (x);
1124 /* If X is not in memory or is at a constant address, it cannot be in
1125 a temporary slot. */
1126 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1127 return;
1129 /* First see if we can find a match. */
1130 if (p == 0)
1131 p = find_temp_slot_from_address (XEXP (x, 0));
1133 if (p != 0)
1135 if (p->level == temp_slot_level)
1136 move_slot_to_level (p, temp_slot_level - 1);
1137 return;
1140 /* Otherwise, preserve all non-kept slots at this level. */
1141 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1143 next = p->next;
1144 move_slot_to_level (p, temp_slot_level - 1);
1148 /* Free all temporaries used so far. This is normally called at the
1149 end of generating code for a statement. */
1151 void
1152 free_temp_slots (void)
1154 struct temp_slot *p, *next;
1155 bool some_available = false;
1157 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1159 next = p->next;
1160 make_slot_available (p);
1161 some_available = true;
1164 if (some_available)
1166 remove_unused_temp_slot_addresses ();
1167 combine_temp_slots ();
1171 /* Push deeper into the nesting level for stack temporaries. */
1173 void
1174 push_temp_slots (void)
1176 temp_slot_level++;
1179 /* Pop a temporary nesting level. All slots in use in the current level
1180 are freed. */
1182 void
1183 pop_temp_slots (void)
1185 free_temp_slots ();
1186 temp_slot_level--;
1189 /* Initialize temporary slots. */
1191 void
1192 init_temp_slots (void)
1194 /* We have not allocated any temporaries yet. */
1195 avail_temp_slots = 0;
1196 vec_alloc (used_temp_slots, 0);
1197 temp_slot_level = 0;
1198 n_temp_slots_in_use = 0;
1200 /* Set up the table to map addresses to temp slots. */
1201 if (! temp_slot_address_table)
1202 temp_slot_address_table = htab_create_ggc (32,
1203 temp_slot_address_hash,
1204 temp_slot_address_eq,
1205 NULL);
1206 else
1207 htab_empty (temp_slot_address_table);
1210 /* Functions and data structures to keep track of the values hard regs
1211 had at the start of the function. */
1213 /* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
1214 and has_hard_reg_initial_val.. */
1215 typedef struct GTY(()) initial_value_pair {
1216 rtx hard_reg;
1217 rtx pseudo;
1218 } initial_value_pair;
1219 /* ??? This could be a VEC but there is currently no way to define an
1220 opaque VEC type. This could be worked around by defining struct
1221 initial_value_pair in function.h. */
1222 typedef struct GTY(()) initial_value_struct {
1223 int num_entries;
1224 int max_entries;
1225 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
1226 } initial_value_struct;
1228 /* If a pseudo represents an initial hard reg (or expression), return
1229 it, else return NULL_RTX. */
1232 get_hard_reg_initial_reg (rtx reg)
1234 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1235 int i;
1237 if (ivs == 0)
1238 return NULL_RTX;
1240 for (i = 0; i < ivs->num_entries; i++)
1241 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
1242 return ivs->entries[i].hard_reg;
1244 return NULL_RTX;
1247 /* Make sure that there's a pseudo register of mode MODE that stores the
1248 initial value of hard register REGNO. Return an rtx for such a pseudo. */
1251 get_hard_reg_initial_val (enum machine_mode mode, unsigned int regno)
1253 struct initial_value_struct *ivs;
1254 rtx rv;
1256 rv = has_hard_reg_initial_val (mode, regno);
1257 if (rv)
1258 return rv;
1260 ivs = crtl->hard_reg_initial_vals;
1261 if (ivs == 0)
1263 ivs = ggc_alloc_initial_value_struct ();
1264 ivs->num_entries = 0;
1265 ivs->max_entries = 5;
1266 ivs->entries = ggc_alloc_vec_initial_value_pair (5);
1267 crtl->hard_reg_initial_vals = ivs;
1270 if (ivs->num_entries >= ivs->max_entries)
1272 ivs->max_entries += 5;
1273 ivs->entries = GGC_RESIZEVEC (initial_value_pair, ivs->entries,
1274 ivs->max_entries);
1277 ivs->entries[ivs->num_entries].hard_reg = gen_rtx_REG (mode, regno);
1278 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (mode);
1280 return ivs->entries[ivs->num_entries++].pseudo;
1283 /* See if get_hard_reg_initial_val has been used to create a pseudo
1284 for the initial value of hard register REGNO in mode MODE. Return
1285 the associated pseudo if so, otherwise return NULL. */
1288 has_hard_reg_initial_val (enum machine_mode mode, unsigned int regno)
1290 struct initial_value_struct *ivs;
1291 int i;
1293 ivs = crtl->hard_reg_initial_vals;
1294 if (ivs != 0)
1295 for (i = 0; i < ivs->num_entries; i++)
1296 if (GET_MODE (ivs->entries[i].hard_reg) == mode
1297 && REGNO (ivs->entries[i].hard_reg) == regno)
1298 return ivs->entries[i].pseudo;
1300 return NULL_RTX;
1303 unsigned int
1304 emit_initial_value_sets (void)
1306 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1307 int i;
1308 rtx seq;
1310 if (ivs == 0)
1311 return 0;
1313 start_sequence ();
1314 for (i = 0; i < ivs->num_entries; i++)
1315 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
1316 seq = get_insns ();
1317 end_sequence ();
1319 emit_insn_at_entry (seq);
1320 return 0;
1323 /* Return the hardreg-pseudoreg initial values pair entry I and
1324 TRUE if I is a valid entry, or FALSE if I is not a valid entry. */
1325 bool
1326 initial_value_entry (int i, rtx *hreg, rtx *preg)
1328 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1329 if (!ivs || i >= ivs->num_entries)
1330 return false;
1332 *hreg = ivs->entries[i].hard_reg;
1333 *preg = ivs->entries[i].pseudo;
1334 return true;
1337 /* These routines are responsible for converting virtual register references
1338 to the actual hard register references once RTL generation is complete.
1340 The following four variables are used for communication between the
1341 routines. They contain the offsets of the virtual registers from their
1342 respective hard registers. */
1344 static int in_arg_offset;
1345 static int var_offset;
1346 static int dynamic_offset;
1347 static int out_arg_offset;
1348 static int cfa_offset;
1350 /* In most machines, the stack pointer register is equivalent to the bottom
1351 of the stack. */
1353 #ifndef STACK_POINTER_OFFSET
1354 #define STACK_POINTER_OFFSET 0
1355 #endif
1357 /* If not defined, pick an appropriate default for the offset of dynamically
1358 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1359 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1361 #ifndef STACK_DYNAMIC_OFFSET
1363 /* The bottom of the stack points to the actual arguments. If
1364 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1365 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1366 stack space for register parameters is not pushed by the caller, but
1367 rather part of the fixed stack areas and hence not included in
1368 `crtl->outgoing_args_size'. Nevertheless, we must allow
1369 for it when allocating stack dynamic objects. */
1371 #if defined(REG_PARM_STACK_SPACE)
1372 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1373 ((ACCUMULATE_OUTGOING_ARGS \
1374 ? (crtl->outgoing_args_size \
1375 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1376 : REG_PARM_STACK_SPACE (FNDECL))) \
1377 : 0) + (STACK_POINTER_OFFSET))
1378 #else
1379 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1380 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
1381 + (STACK_POINTER_OFFSET))
1382 #endif
1383 #endif
1386 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1387 is a virtual register, return the equivalent hard register and set the
1388 offset indirectly through the pointer. Otherwise, return 0. */
1390 static rtx
1391 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1393 rtx new_rtx;
1394 HOST_WIDE_INT offset;
1396 if (x == virtual_incoming_args_rtx)
1398 if (stack_realign_drap)
1400 /* Replace virtual_incoming_args_rtx with internal arg
1401 pointer if DRAP is used to realign stack. */
1402 new_rtx = crtl->args.internal_arg_pointer;
1403 offset = 0;
1405 else
1406 new_rtx = arg_pointer_rtx, offset = in_arg_offset;
1408 else if (x == virtual_stack_vars_rtx)
1409 new_rtx = frame_pointer_rtx, offset = var_offset;
1410 else if (x == virtual_stack_dynamic_rtx)
1411 new_rtx = stack_pointer_rtx, offset = dynamic_offset;
1412 else if (x == virtual_outgoing_args_rtx)
1413 new_rtx = stack_pointer_rtx, offset = out_arg_offset;
1414 else if (x == virtual_cfa_rtx)
1416 #ifdef FRAME_POINTER_CFA_OFFSET
1417 new_rtx = frame_pointer_rtx;
1418 #else
1419 new_rtx = arg_pointer_rtx;
1420 #endif
1421 offset = cfa_offset;
1423 else if (x == virtual_preferred_stack_boundary_rtx)
1425 new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT);
1426 offset = 0;
1428 else
1429 return NULL_RTX;
1431 *poffset = offset;
1432 return new_rtx;
1435 /* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1436 Instantiate any virtual registers present inside of *LOC. The expression
1437 is simplified, as much as possible, but is not to be considered "valid"
1438 in any sense implied by the target. If any change is made, set CHANGED
1439 to true. */
1441 static int
1442 instantiate_virtual_regs_in_rtx (rtx *loc, void *data)
1444 HOST_WIDE_INT offset;
1445 bool *changed = (bool *) data;
1446 rtx x, new_rtx;
1448 x = *loc;
1449 if (x == 0)
1450 return 0;
1452 switch (GET_CODE (x))
1454 case REG:
1455 new_rtx = instantiate_new_reg (x, &offset);
1456 if (new_rtx)
1458 *loc = plus_constant (GET_MODE (x), new_rtx, offset);
1459 if (changed)
1460 *changed = true;
1462 return -1;
1464 case PLUS:
1465 new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
1466 if (new_rtx)
1468 new_rtx = plus_constant (GET_MODE (x), new_rtx, offset);
1469 *loc = simplify_gen_binary (PLUS, GET_MODE (x), new_rtx, XEXP (x, 1));
1470 if (changed)
1471 *changed = true;
1472 return -1;
1475 /* FIXME -- from old code */
1476 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1477 we can commute the PLUS and SUBREG because pointers into the
1478 frame are well-behaved. */
1479 break;
1481 default:
1482 break;
1485 return 0;
1488 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1489 matches the predicate for insn CODE operand OPERAND. */
1491 static int
1492 safe_insn_predicate (int code, int operand, rtx x)
1494 return code < 0 || insn_operand_matches ((enum insn_code) code, operand, x);
1497 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1498 registers present inside of insn. The result will be a valid insn. */
1500 static void
1501 instantiate_virtual_regs_in_insn (rtx insn)
1503 HOST_WIDE_INT offset;
1504 int insn_code, i;
1505 bool any_change = false;
1506 rtx set, new_rtx, x, seq;
1508 /* There are some special cases to be handled first. */
1509 set = single_set (insn);
1510 if (set)
1512 /* We're allowed to assign to a virtual register. This is interpreted
1513 to mean that the underlying register gets assigned the inverse
1514 transformation. This is used, for example, in the handling of
1515 non-local gotos. */
1516 new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
1517 if (new_rtx)
1519 start_sequence ();
1521 for_each_rtx (&SET_SRC (set), instantiate_virtual_regs_in_rtx, NULL);
1522 x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
1523 gen_int_mode (-offset, GET_MODE (new_rtx)));
1524 x = force_operand (x, new_rtx);
1525 if (x != new_rtx)
1526 emit_move_insn (new_rtx, x);
1528 seq = get_insns ();
1529 end_sequence ();
1531 emit_insn_before (seq, insn);
1532 delete_insn (insn);
1533 return;
1536 /* Handle a straight copy from a virtual register by generating a
1537 new add insn. The difference between this and falling through
1538 to the generic case is avoiding a new pseudo and eliminating a
1539 move insn in the initial rtl stream. */
1540 new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
1541 if (new_rtx && offset != 0
1542 && REG_P (SET_DEST (set))
1543 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1545 start_sequence ();
1547 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS, new_rtx,
1548 gen_int_mode (offset,
1549 GET_MODE (SET_DEST (set))),
1550 SET_DEST (set), 1, OPTAB_LIB_WIDEN);
1551 if (x != SET_DEST (set))
1552 emit_move_insn (SET_DEST (set), x);
1554 seq = get_insns ();
1555 end_sequence ();
1557 emit_insn_before (seq, insn);
1558 delete_insn (insn);
1559 return;
1562 extract_insn (insn);
1563 insn_code = INSN_CODE (insn);
1565 /* Handle a plus involving a virtual register by determining if the
1566 operands remain valid if they're modified in place. */
1567 if (GET_CODE (SET_SRC (set)) == PLUS
1568 && recog_data.n_operands >= 3
1569 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1570 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1571 && CONST_INT_P (recog_data.operand[2])
1572 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
1574 offset += INTVAL (recog_data.operand[2]);
1576 /* If the sum is zero, then replace with a plain move. */
1577 if (offset == 0
1578 && REG_P (SET_DEST (set))
1579 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1581 start_sequence ();
1582 emit_move_insn (SET_DEST (set), new_rtx);
1583 seq = get_insns ();
1584 end_sequence ();
1586 emit_insn_before (seq, insn);
1587 delete_insn (insn);
1588 return;
1591 x = gen_int_mode (offset, recog_data.operand_mode[2]);
1593 /* Using validate_change and apply_change_group here leaves
1594 recog_data in an invalid state. Since we know exactly what
1595 we want to check, do those two by hand. */
1596 if (safe_insn_predicate (insn_code, 1, new_rtx)
1597 && safe_insn_predicate (insn_code, 2, x))
1599 *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
1600 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1601 any_change = true;
1603 /* Fall through into the regular operand fixup loop in
1604 order to take care of operands other than 1 and 2. */
1608 else
1610 extract_insn (insn);
1611 insn_code = INSN_CODE (insn);
1614 /* In the general case, we expect virtual registers to appear only in
1615 operands, and then only as either bare registers or inside memories. */
1616 for (i = 0; i < recog_data.n_operands; ++i)
1618 x = recog_data.operand[i];
1619 switch (GET_CODE (x))
1621 case MEM:
1623 rtx addr = XEXP (x, 0);
1624 bool changed = false;
1626 for_each_rtx (&addr, instantiate_virtual_regs_in_rtx, &changed);
1627 if (!changed)
1628 continue;
1630 start_sequence ();
1631 x = replace_equiv_address (x, addr);
1632 /* It may happen that the address with the virtual reg
1633 was valid (e.g. based on the virtual stack reg, which might
1634 be acceptable to the predicates with all offsets), whereas
1635 the address now isn't anymore, for instance when the address
1636 is still offsetted, but the base reg isn't virtual-stack-reg
1637 anymore. Below we would do a force_reg on the whole operand,
1638 but this insn might actually only accept memory. Hence,
1639 before doing that last resort, try to reload the address into
1640 a register, so this operand stays a MEM. */
1641 if (!safe_insn_predicate (insn_code, i, x))
1643 addr = force_reg (GET_MODE (addr), addr);
1644 x = replace_equiv_address (x, addr);
1646 seq = get_insns ();
1647 end_sequence ();
1648 if (seq)
1649 emit_insn_before (seq, insn);
1651 break;
1653 case REG:
1654 new_rtx = instantiate_new_reg (x, &offset);
1655 if (new_rtx == NULL)
1656 continue;
1657 if (offset == 0)
1658 x = new_rtx;
1659 else
1661 start_sequence ();
1663 /* Careful, special mode predicates may have stuff in
1664 insn_data[insn_code].operand[i].mode that isn't useful
1665 to us for computing a new value. */
1666 /* ??? Recognize address_operand and/or "p" constraints
1667 to see if (plus new offset) is a valid before we put
1668 this through expand_simple_binop. */
1669 x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
1670 gen_int_mode (offset, GET_MODE (x)),
1671 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1672 seq = get_insns ();
1673 end_sequence ();
1674 emit_insn_before (seq, insn);
1676 break;
1678 case SUBREG:
1679 new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
1680 if (new_rtx == NULL)
1681 continue;
1682 if (offset != 0)
1684 start_sequence ();
1685 new_rtx = expand_simple_binop
1686 (GET_MODE (new_rtx), PLUS, new_rtx,
1687 gen_int_mode (offset, GET_MODE (new_rtx)),
1688 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1689 seq = get_insns ();
1690 end_sequence ();
1691 emit_insn_before (seq, insn);
1693 x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx,
1694 GET_MODE (new_rtx), SUBREG_BYTE (x));
1695 gcc_assert (x);
1696 break;
1698 default:
1699 continue;
1702 /* At this point, X contains the new value for the operand.
1703 Validate the new value vs the insn predicate. Note that
1704 asm insns will have insn_code -1 here. */
1705 if (!safe_insn_predicate (insn_code, i, x))
1707 start_sequence ();
1708 if (REG_P (x))
1710 gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
1711 x = copy_to_reg (x);
1713 else
1714 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1715 seq = get_insns ();
1716 end_sequence ();
1717 if (seq)
1718 emit_insn_before (seq, insn);
1721 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1722 any_change = true;
1725 if (any_change)
1727 /* Propagate operand changes into the duplicates. */
1728 for (i = 0; i < recog_data.n_dups; ++i)
1729 *recog_data.dup_loc[i]
1730 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
1732 /* Force re-recognition of the instruction for validation. */
1733 INSN_CODE (insn) = -1;
1736 if (asm_noperands (PATTERN (insn)) >= 0)
1738 if (!check_asm_operands (PATTERN (insn)))
1740 error_for_asm (insn, "impossible constraint in %<asm%>");
1741 /* For asm goto, instead of fixing up all the edges
1742 just clear the template and clear input operands
1743 (asm goto doesn't have any output operands). */
1744 if (JUMP_P (insn))
1746 rtx asm_op = extract_asm_operands (PATTERN (insn));
1747 ASM_OPERANDS_TEMPLATE (asm_op) = ggc_strdup ("");
1748 ASM_OPERANDS_INPUT_VEC (asm_op) = rtvec_alloc (0);
1749 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op) = rtvec_alloc (0);
1751 else
1752 delete_insn (insn);
1755 else
1757 if (recog_memoized (insn) < 0)
1758 fatal_insn_not_found (insn);
1762 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1763 do any instantiation required. */
1765 void
1766 instantiate_decl_rtl (rtx x)
1768 rtx addr;
1770 if (x == 0)
1771 return;
1773 /* If this is a CONCAT, recurse for the pieces. */
1774 if (GET_CODE (x) == CONCAT)
1776 instantiate_decl_rtl (XEXP (x, 0));
1777 instantiate_decl_rtl (XEXP (x, 1));
1778 return;
1781 /* If this is not a MEM, no need to do anything. Similarly if the
1782 address is a constant or a register that is not a virtual register. */
1783 if (!MEM_P (x))
1784 return;
1786 addr = XEXP (x, 0);
1787 if (CONSTANT_P (addr)
1788 || (REG_P (addr)
1789 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1790 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1791 return;
1793 for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL);
1796 /* Helper for instantiate_decls called via walk_tree: Process all decls
1797 in the given DECL_VALUE_EXPR. */
1799 static tree
1800 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1802 tree t = *tp;
1803 if (! EXPR_P (t))
1805 *walk_subtrees = 0;
1806 if (DECL_P (t))
1808 if (DECL_RTL_SET_P (t))
1809 instantiate_decl_rtl (DECL_RTL (t));
1810 if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t)
1811 && DECL_INCOMING_RTL (t))
1812 instantiate_decl_rtl (DECL_INCOMING_RTL (t));
1813 if ((TREE_CODE (t) == VAR_DECL
1814 || TREE_CODE (t) == RESULT_DECL)
1815 && DECL_HAS_VALUE_EXPR_P (t))
1817 tree v = DECL_VALUE_EXPR (t);
1818 walk_tree (&v, instantiate_expr, NULL, NULL);
1822 return NULL;
1825 /* Subroutine of instantiate_decls: Process all decls in the given
1826 BLOCK node and all its subblocks. */
1828 static void
1829 instantiate_decls_1 (tree let)
1831 tree t;
1833 for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t))
1835 if (DECL_RTL_SET_P (t))
1836 instantiate_decl_rtl (DECL_RTL (t));
1837 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1839 tree v = DECL_VALUE_EXPR (t);
1840 walk_tree (&v, instantiate_expr, NULL, NULL);
1844 /* Process all subblocks. */
1845 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1846 instantiate_decls_1 (t);
1849 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1850 all virtual registers in their DECL_RTL's. */
1852 static void
1853 instantiate_decls (tree fndecl)
1855 tree decl;
1856 unsigned ix;
1858 /* Process all parameters of the function. */
1859 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl))
1861 instantiate_decl_rtl (DECL_RTL (decl));
1862 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
1863 if (DECL_HAS_VALUE_EXPR_P (decl))
1865 tree v = DECL_VALUE_EXPR (decl);
1866 walk_tree (&v, instantiate_expr, NULL, NULL);
1870 if ((decl = DECL_RESULT (fndecl))
1871 && TREE_CODE (decl) == RESULT_DECL)
1873 if (DECL_RTL_SET_P (decl))
1874 instantiate_decl_rtl (DECL_RTL (decl));
1875 if (DECL_HAS_VALUE_EXPR_P (decl))
1877 tree v = DECL_VALUE_EXPR (decl);
1878 walk_tree (&v, instantiate_expr, NULL, NULL);
1882 /* Now process all variables defined in the function or its subblocks. */
1883 instantiate_decls_1 (DECL_INITIAL (fndecl));
1885 FOR_EACH_LOCAL_DECL (cfun, ix, decl)
1886 if (DECL_RTL_SET_P (decl))
1887 instantiate_decl_rtl (DECL_RTL (decl));
1888 vec_free (cfun->local_decls);
1891 /* Pass through the INSNS of function FNDECL and convert virtual register
1892 references to hard register references. */
1894 static unsigned int
1895 instantiate_virtual_regs (void)
1897 rtx insn;
1899 /* Compute the offsets to use for this function. */
1900 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1901 var_offset = STARTING_FRAME_OFFSET;
1902 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1903 out_arg_offset = STACK_POINTER_OFFSET;
1904 #ifdef FRAME_POINTER_CFA_OFFSET
1905 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1906 #else
1907 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1908 #endif
1910 /* Initialize recognition, indicating that volatile is OK. */
1911 init_recog ();
1913 /* Scan through all the insns, instantiating every virtual register still
1914 present. */
1915 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1916 if (INSN_P (insn))
1918 /* These patterns in the instruction stream can never be recognized.
1919 Fortunately, they shouldn't contain virtual registers either. */
1920 if (GET_CODE (PATTERN (insn)) == USE
1921 || GET_CODE (PATTERN (insn)) == CLOBBER
1922 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1923 continue;
1924 else if (DEBUG_INSN_P (insn))
1925 for_each_rtx (&INSN_VAR_LOCATION (insn),
1926 instantiate_virtual_regs_in_rtx, NULL);
1927 else
1928 instantiate_virtual_regs_in_insn (insn);
1930 if (INSN_DELETED_P (insn))
1931 continue;
1933 for_each_rtx (&REG_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL);
1935 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1936 if (CALL_P (insn))
1937 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn),
1938 instantiate_virtual_regs_in_rtx, NULL);
1941 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1942 instantiate_decls (current_function_decl);
1944 targetm.instantiate_decls ();
1946 /* Indicate that, from now on, assign_stack_local should use
1947 frame_pointer_rtx. */
1948 virtuals_instantiated = 1;
1950 return 0;
1953 namespace {
1955 const pass_data pass_data_instantiate_virtual_regs =
1957 RTL_PASS, /* type */
1958 "vregs", /* name */
1959 OPTGROUP_NONE, /* optinfo_flags */
1960 false, /* has_gate */
1961 true, /* has_execute */
1962 TV_NONE, /* tv_id */
1963 0, /* properties_required */
1964 0, /* properties_provided */
1965 0, /* properties_destroyed */
1966 0, /* todo_flags_start */
1967 0, /* todo_flags_finish */
1970 class pass_instantiate_virtual_regs : public rtl_opt_pass
1972 public:
1973 pass_instantiate_virtual_regs(gcc::context *ctxt)
1974 : rtl_opt_pass(pass_data_instantiate_virtual_regs, ctxt)
1977 /* opt_pass methods: */
1978 unsigned int execute () { return instantiate_virtual_regs (); }
1980 }; // class pass_instantiate_virtual_regs
1982 } // anon namespace
1984 rtl_opt_pass *
1985 make_pass_instantiate_virtual_regs (gcc::context *ctxt)
1987 return new pass_instantiate_virtual_regs (ctxt);
1991 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1992 This means a type for which function calls must pass an address to the
1993 function or get an address back from the function.
1994 EXP may be a type node or an expression (whose type is tested). */
1997 aggregate_value_p (const_tree exp, const_tree fntype)
1999 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
2000 int i, regno, nregs;
2001 rtx reg;
2003 if (fntype)
2004 switch (TREE_CODE (fntype))
2006 case CALL_EXPR:
2008 tree fndecl = get_callee_fndecl (fntype);
2009 fntype = (fndecl
2010 ? TREE_TYPE (fndecl)
2011 : TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype))));
2013 break;
2014 case FUNCTION_DECL:
2015 fntype = TREE_TYPE (fntype);
2016 break;
2017 case FUNCTION_TYPE:
2018 case METHOD_TYPE:
2019 break;
2020 case IDENTIFIER_NODE:
2021 fntype = NULL_TREE;
2022 break;
2023 default:
2024 /* We don't expect other tree types here. */
2025 gcc_unreachable ();
2028 if (VOID_TYPE_P (type))
2029 return 0;
2031 /* If a record should be passed the same as its first (and only) member
2032 don't pass it as an aggregate. */
2033 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2034 return aggregate_value_p (first_field (type), fntype);
2036 /* If the front end has decided that this needs to be passed by
2037 reference, do so. */
2038 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
2039 && DECL_BY_REFERENCE (exp))
2040 return 1;
2042 /* Function types that are TREE_ADDRESSABLE force return in memory. */
2043 if (fntype && TREE_ADDRESSABLE (fntype))
2044 return 1;
2046 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
2047 and thus can't be returned in registers. */
2048 if (TREE_ADDRESSABLE (type))
2049 return 1;
2051 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
2052 return 1;
2054 if (targetm.calls.return_in_memory (type, fntype))
2055 return 1;
2057 /* Make sure we have suitable call-clobbered regs to return
2058 the value in; if not, we must return it in memory. */
2059 reg = hard_function_value (type, 0, fntype, 0);
2061 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2062 it is OK. */
2063 if (!REG_P (reg))
2064 return 0;
2066 regno = REGNO (reg);
2067 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
2068 for (i = 0; i < nregs; i++)
2069 if (! call_used_regs[regno + i])
2070 return 1;
2072 return 0;
2075 /* Return true if we should assign DECL a pseudo register; false if it
2076 should live on the local stack. */
2078 bool
2079 use_register_for_decl (const_tree decl)
2081 if (!targetm.calls.allocate_stack_slots_for_args())
2082 return true;
2084 /* Honor volatile. */
2085 if (TREE_SIDE_EFFECTS (decl))
2086 return false;
2088 /* Honor addressability. */
2089 if (TREE_ADDRESSABLE (decl))
2090 return false;
2092 /* Only register-like things go in registers. */
2093 if (DECL_MODE (decl) == BLKmode)
2094 return false;
2096 /* If -ffloat-store specified, don't put explicit float variables
2097 into registers. */
2098 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2099 propagates values across these stores, and it probably shouldn't. */
2100 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
2101 return false;
2103 /* If we're not interested in tracking debugging information for
2104 this decl, then we can certainly put it in a register. */
2105 if (DECL_IGNORED_P (decl))
2106 return true;
2108 if (optimize)
2109 return true;
2111 if (!DECL_REGISTER (decl))
2112 return false;
2114 switch (TREE_CODE (TREE_TYPE (decl)))
2116 case RECORD_TYPE:
2117 case UNION_TYPE:
2118 case QUAL_UNION_TYPE:
2119 /* When not optimizing, disregard register keyword for variables with
2120 types containing methods, otherwise the methods won't be callable
2121 from the debugger. */
2122 if (TYPE_METHODS (TREE_TYPE (decl)))
2123 return false;
2124 break;
2125 default:
2126 break;
2129 return true;
2132 /* Return true if TYPE should be passed by invisible reference. */
2134 bool
2135 pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
2136 tree type, bool named_arg)
2138 if (type)
2140 /* If this type contains non-trivial constructors, then it is
2141 forbidden for the middle-end to create any new copies. */
2142 if (TREE_ADDRESSABLE (type))
2143 return true;
2145 /* GCC post 3.4 passes *all* variable sized types by reference. */
2146 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2147 return true;
2149 /* If a record type should be passed the same as its first (and only)
2150 member, use the type and mode of that member. */
2151 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2153 type = TREE_TYPE (first_field (type));
2154 mode = TYPE_MODE (type);
2158 return targetm.calls.pass_by_reference (pack_cumulative_args (ca), mode,
2159 type, named_arg);
2162 /* Return true if TYPE, which is passed by reference, should be callee
2163 copied instead of caller copied. */
2165 bool
2166 reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
2167 tree type, bool named_arg)
2169 if (type && TREE_ADDRESSABLE (type))
2170 return false;
2171 return targetm.calls.callee_copies (pack_cumulative_args (ca), mode, type,
2172 named_arg);
2175 /* Structures to communicate between the subroutines of assign_parms.
2176 The first holds data persistent across all parameters, the second
2177 is cleared out for each parameter. */
2179 struct assign_parm_data_all
2181 /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2182 should become a job of the target or otherwise encapsulated. */
2183 CUMULATIVE_ARGS args_so_far_v;
2184 cumulative_args_t args_so_far;
2185 struct args_size stack_args_size;
2186 tree function_result_decl;
2187 tree orig_fnargs;
2188 rtx first_conversion_insn;
2189 rtx last_conversion_insn;
2190 HOST_WIDE_INT pretend_args_size;
2191 HOST_WIDE_INT extra_pretend_bytes;
2192 int reg_parm_stack_space;
2195 struct assign_parm_data_one
2197 tree nominal_type;
2198 tree passed_type;
2199 rtx entry_parm;
2200 rtx stack_parm;
2201 enum machine_mode nominal_mode;
2202 enum machine_mode passed_mode;
2203 enum machine_mode promoted_mode;
2204 struct locate_and_pad_arg_data locate;
2205 int partial;
2206 BOOL_BITFIELD named_arg : 1;
2207 BOOL_BITFIELD passed_pointer : 1;
2208 BOOL_BITFIELD on_stack : 1;
2209 BOOL_BITFIELD loaded_in_reg : 1;
2212 /* A subroutine of assign_parms. Initialize ALL. */
2214 static void
2215 assign_parms_initialize_all (struct assign_parm_data_all *all)
2217 tree fntype ATTRIBUTE_UNUSED;
2219 memset (all, 0, sizeof (*all));
2221 fntype = TREE_TYPE (current_function_decl);
2223 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2224 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far_v, fntype, NULL_RTX);
2225 #else
2226 INIT_CUMULATIVE_ARGS (all->args_so_far_v, fntype, NULL_RTX,
2227 current_function_decl, -1);
2228 #endif
2229 all->args_so_far = pack_cumulative_args (&all->args_so_far_v);
2231 #ifdef REG_PARM_STACK_SPACE
2232 all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
2233 #endif
2236 /* If ARGS contains entries with complex types, split the entry into two
2237 entries of the component type. Return a new list of substitutions are
2238 needed, else the old list. */
2240 static void
2241 split_complex_args (vec<tree> *args)
2243 unsigned i;
2244 tree p;
2246 FOR_EACH_VEC_ELT (*args, i, p)
2248 tree type = TREE_TYPE (p);
2249 if (TREE_CODE (type) == COMPLEX_TYPE
2250 && targetm.calls.split_complex_arg (type))
2252 tree decl;
2253 tree subtype = TREE_TYPE (type);
2254 bool addressable = TREE_ADDRESSABLE (p);
2256 /* Rewrite the PARM_DECL's type with its component. */
2257 p = copy_node (p);
2258 TREE_TYPE (p) = subtype;
2259 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2260 DECL_MODE (p) = VOIDmode;
2261 DECL_SIZE (p) = NULL;
2262 DECL_SIZE_UNIT (p) = NULL;
2263 /* If this arg must go in memory, put it in a pseudo here.
2264 We can't allow it to go in memory as per normal parms,
2265 because the usual place might not have the imag part
2266 adjacent to the real part. */
2267 DECL_ARTIFICIAL (p) = addressable;
2268 DECL_IGNORED_P (p) = addressable;
2269 TREE_ADDRESSABLE (p) = 0;
2270 layout_decl (p, 0);
2271 (*args)[i] = p;
2273 /* Build a second synthetic decl. */
2274 decl = build_decl (EXPR_LOCATION (p),
2275 PARM_DECL, NULL_TREE, subtype);
2276 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2277 DECL_ARTIFICIAL (decl) = addressable;
2278 DECL_IGNORED_P (decl) = addressable;
2279 layout_decl (decl, 0);
2280 args->safe_insert (++i, decl);
2285 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2286 the hidden struct return argument, and (abi willing) complex args.
2287 Return the new parameter list. */
2289 static vec<tree>
2290 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2292 tree fndecl = current_function_decl;
2293 tree fntype = TREE_TYPE (fndecl);
2294 vec<tree> fnargs = vNULL;
2295 tree arg;
2297 for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg))
2298 fnargs.safe_push (arg);
2300 all->orig_fnargs = DECL_ARGUMENTS (fndecl);
2302 /* If struct value address is treated as the first argument, make it so. */
2303 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2304 && ! cfun->returns_pcc_struct
2305 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2307 tree type = build_pointer_type (TREE_TYPE (fntype));
2308 tree decl;
2310 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2311 PARM_DECL, get_identifier (".result_ptr"), type);
2312 DECL_ARG_TYPE (decl) = type;
2313 DECL_ARTIFICIAL (decl) = 1;
2314 DECL_NAMELESS (decl) = 1;
2315 TREE_CONSTANT (decl) = 1;
2317 DECL_CHAIN (decl) = all->orig_fnargs;
2318 all->orig_fnargs = decl;
2319 fnargs.safe_insert (0, decl);
2321 all->function_result_decl = decl;
2324 /* If the target wants to split complex arguments into scalars, do so. */
2325 if (targetm.calls.split_complex_arg)
2326 split_complex_args (&fnargs);
2328 return fnargs;
2331 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2332 data for the parameter. Incorporate ABI specifics such as pass-by-
2333 reference and type promotion. */
2335 static void
2336 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2337 struct assign_parm_data_one *data)
2339 tree nominal_type, passed_type;
2340 enum machine_mode nominal_mode, passed_mode, promoted_mode;
2341 int unsignedp;
2343 memset (data, 0, sizeof (*data));
2345 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2346 if (!cfun->stdarg)
2347 data->named_arg = 1; /* No variadic parms. */
2348 else if (DECL_CHAIN (parm))
2349 data->named_arg = 1; /* Not the last non-variadic parm. */
2350 else if (targetm.calls.strict_argument_naming (all->args_so_far))
2351 data->named_arg = 1; /* Only variadic ones are unnamed. */
2352 else
2353 data->named_arg = 0; /* Treat as variadic. */
2355 nominal_type = TREE_TYPE (parm);
2356 passed_type = DECL_ARG_TYPE (parm);
2358 /* Look out for errors propagating this far. Also, if the parameter's
2359 type is void then its value doesn't matter. */
2360 if (TREE_TYPE (parm) == error_mark_node
2361 /* This can happen after weird syntax errors
2362 or if an enum type is defined among the parms. */
2363 || TREE_CODE (parm) != PARM_DECL
2364 || passed_type == NULL
2365 || VOID_TYPE_P (nominal_type))
2367 nominal_type = passed_type = void_type_node;
2368 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2369 goto egress;
2372 /* Find mode of arg as it is passed, and mode of arg as it should be
2373 during execution of this function. */
2374 passed_mode = TYPE_MODE (passed_type);
2375 nominal_mode = TYPE_MODE (nominal_type);
2377 /* If the parm is to be passed as a transparent union or record, use the
2378 type of the first field for the tests below. We have already verified
2379 that the modes are the same. */
2380 if ((TREE_CODE (passed_type) == UNION_TYPE
2381 || TREE_CODE (passed_type) == RECORD_TYPE)
2382 && TYPE_TRANSPARENT_AGGR (passed_type))
2383 passed_type = TREE_TYPE (first_field (passed_type));
2385 /* See if this arg was passed by invisible reference. */
2386 if (pass_by_reference (&all->args_so_far_v, passed_mode,
2387 passed_type, data->named_arg))
2389 passed_type = nominal_type = build_pointer_type (passed_type);
2390 data->passed_pointer = true;
2391 passed_mode = nominal_mode = TYPE_MODE (nominal_type);
2394 /* Find mode as it is passed by the ABI. */
2395 unsignedp = TYPE_UNSIGNED (passed_type);
2396 promoted_mode = promote_function_mode (passed_type, passed_mode, &unsignedp,
2397 TREE_TYPE (current_function_decl), 0);
2399 egress:
2400 data->nominal_type = nominal_type;
2401 data->passed_type = passed_type;
2402 data->nominal_mode = nominal_mode;
2403 data->passed_mode = passed_mode;
2404 data->promoted_mode = promoted_mode;
2407 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2409 static void
2410 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2411 struct assign_parm_data_one *data, bool no_rtl)
2413 int varargs_pretend_bytes = 0;
2415 targetm.calls.setup_incoming_varargs (all->args_so_far,
2416 data->promoted_mode,
2417 data->passed_type,
2418 &varargs_pretend_bytes, no_rtl);
2420 /* If the back-end has requested extra stack space, record how much is
2421 needed. Do not change pretend_args_size otherwise since it may be
2422 nonzero from an earlier partial argument. */
2423 if (varargs_pretend_bytes > 0)
2424 all->pretend_args_size = varargs_pretend_bytes;
2427 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2428 the incoming location of the current parameter. */
2430 static void
2431 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2432 struct assign_parm_data_one *data)
2434 HOST_WIDE_INT pretend_bytes = 0;
2435 rtx entry_parm;
2436 bool in_regs;
2438 if (data->promoted_mode == VOIDmode)
2440 data->entry_parm = data->stack_parm = const0_rtx;
2441 return;
2444 entry_parm = targetm.calls.function_incoming_arg (all->args_so_far,
2445 data->promoted_mode,
2446 data->passed_type,
2447 data->named_arg);
2449 if (entry_parm == 0)
2450 data->promoted_mode = data->passed_mode;
2452 /* Determine parm's home in the stack, in case it arrives in the stack
2453 or we should pretend it did. Compute the stack position and rtx where
2454 the argument arrives and its size.
2456 There is one complexity here: If this was a parameter that would
2457 have been passed in registers, but wasn't only because it is
2458 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2459 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2460 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2461 as it was the previous time. */
2462 in_regs = entry_parm != 0;
2463 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2464 in_regs = true;
2465 #endif
2466 if (!in_regs && !data->named_arg)
2468 if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far))
2470 rtx tem;
2471 tem = targetm.calls.function_incoming_arg (all->args_so_far,
2472 data->promoted_mode,
2473 data->passed_type, true);
2474 in_regs = tem != NULL;
2478 /* If this parameter was passed both in registers and in the stack, use
2479 the copy on the stack. */
2480 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2481 data->passed_type))
2482 entry_parm = 0;
2484 if (entry_parm)
2486 int partial;
2488 partial = targetm.calls.arg_partial_bytes (all->args_so_far,
2489 data->promoted_mode,
2490 data->passed_type,
2491 data->named_arg);
2492 data->partial = partial;
2494 /* The caller might already have allocated stack space for the
2495 register parameters. */
2496 if (partial != 0 && all->reg_parm_stack_space == 0)
2498 /* Part of this argument is passed in registers and part
2499 is passed on the stack. Ask the prologue code to extend
2500 the stack part so that we can recreate the full value.
2502 PRETEND_BYTES is the size of the registers we need to store.
2503 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2504 stack space that the prologue should allocate.
2506 Internally, gcc assumes that the argument pointer is aligned
2507 to STACK_BOUNDARY bits. This is used both for alignment
2508 optimizations (see init_emit) and to locate arguments that are
2509 aligned to more than PARM_BOUNDARY bits. We must preserve this
2510 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2511 a stack boundary. */
2513 /* We assume at most one partial arg, and it must be the first
2514 argument on the stack. */
2515 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2517 pretend_bytes = partial;
2518 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2520 /* We want to align relative to the actual stack pointer, so
2521 don't include this in the stack size until later. */
2522 all->extra_pretend_bytes = all->pretend_args_size;
2526 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2527 entry_parm ? data->partial : 0, current_function_decl,
2528 &all->stack_args_size, &data->locate);
2530 /* Update parm_stack_boundary if this parameter is passed in the
2531 stack. */
2532 if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
2533 crtl->parm_stack_boundary = data->locate.boundary;
2535 /* Adjust offsets to include the pretend args. */
2536 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2537 data->locate.slot_offset.constant += pretend_bytes;
2538 data->locate.offset.constant += pretend_bytes;
2540 data->entry_parm = entry_parm;
2543 /* A subroutine of assign_parms. If there is actually space on the stack
2544 for this parm, count it in stack_args_size and return true. */
2546 static bool
2547 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2548 struct assign_parm_data_one *data)
2550 /* Trivially true if we've no incoming register. */
2551 if (data->entry_parm == NULL)
2553 /* Also true if we're partially in registers and partially not,
2554 since we've arranged to drop the entire argument on the stack. */
2555 else if (data->partial != 0)
2557 /* Also true if the target says that it's passed in both registers
2558 and on the stack. */
2559 else if (GET_CODE (data->entry_parm) == PARALLEL
2560 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2562 /* Also true if the target says that there's stack allocated for
2563 all register parameters. */
2564 else if (all->reg_parm_stack_space > 0)
2566 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2567 else
2568 return false;
2570 all->stack_args_size.constant += data->locate.size.constant;
2571 if (data->locate.size.var)
2572 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2574 return true;
2577 /* A subroutine of assign_parms. Given that this parameter is allocated
2578 stack space by the ABI, find it. */
2580 static void
2581 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2583 rtx offset_rtx, stack_parm;
2584 unsigned int align, boundary;
2586 /* If we're passing this arg using a reg, make its stack home the
2587 aligned stack slot. */
2588 if (data->entry_parm)
2589 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2590 else
2591 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2593 stack_parm = crtl->args.internal_arg_pointer;
2594 if (offset_rtx != const0_rtx)
2595 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2596 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2598 if (!data->passed_pointer)
2600 set_mem_attributes (stack_parm, parm, 1);
2601 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2602 while promoted mode's size is needed. */
2603 if (data->promoted_mode != BLKmode
2604 && data->promoted_mode != DECL_MODE (parm))
2606 set_mem_size (stack_parm, GET_MODE_SIZE (data->promoted_mode));
2607 if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm))
2609 int offset = subreg_lowpart_offset (DECL_MODE (parm),
2610 data->promoted_mode);
2611 if (offset)
2612 set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset);
2617 boundary = data->locate.boundary;
2618 align = BITS_PER_UNIT;
2620 /* If we're padding upward, we know that the alignment of the slot
2621 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2622 intentionally forcing upward padding. Otherwise we have to come
2623 up with a guess at the alignment based on OFFSET_RTX. */
2624 if (data->locate.where_pad != downward || data->entry_parm)
2625 align = boundary;
2626 else if (CONST_INT_P (offset_rtx))
2628 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2629 align = align & -align;
2631 set_mem_align (stack_parm, align);
2633 if (data->entry_parm)
2634 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2636 data->stack_parm = stack_parm;
2639 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2640 always valid and contiguous. */
2642 static void
2643 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2645 rtx entry_parm = data->entry_parm;
2646 rtx stack_parm = data->stack_parm;
2648 /* If this parm was passed part in regs and part in memory, pretend it
2649 arrived entirely in memory by pushing the register-part onto the stack.
2650 In the special case of a DImode or DFmode that is split, we could put
2651 it together in a pseudoreg directly, but for now that's not worth
2652 bothering with. */
2653 if (data->partial != 0)
2655 /* Handle calls that pass values in multiple non-contiguous
2656 locations. The Irix 6 ABI has examples of this. */
2657 if (GET_CODE (entry_parm) == PARALLEL)
2658 emit_group_store (validize_mem (stack_parm), entry_parm,
2659 data->passed_type,
2660 int_size_in_bytes (data->passed_type));
2661 else
2663 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2664 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2665 data->partial / UNITS_PER_WORD);
2668 entry_parm = stack_parm;
2671 /* If we didn't decide this parm came in a register, by default it came
2672 on the stack. */
2673 else if (entry_parm == NULL)
2674 entry_parm = stack_parm;
2676 /* When an argument is passed in multiple locations, we can't make use
2677 of this information, but we can save some copying if the whole argument
2678 is passed in a single register. */
2679 else if (GET_CODE (entry_parm) == PARALLEL
2680 && data->nominal_mode != BLKmode
2681 && data->passed_mode != BLKmode)
2683 size_t i, len = XVECLEN (entry_parm, 0);
2685 for (i = 0; i < len; i++)
2686 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2687 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2688 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2689 == data->passed_mode)
2690 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2692 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2693 break;
2697 data->entry_parm = entry_parm;
2700 /* A subroutine of assign_parms. Reconstitute any values which were
2701 passed in multiple registers and would fit in a single register. */
2703 static void
2704 assign_parm_remove_parallels (struct assign_parm_data_one *data)
2706 rtx entry_parm = data->entry_parm;
2708 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2709 This can be done with register operations rather than on the
2710 stack, even if we will store the reconstituted parameter on the
2711 stack later. */
2712 if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
2714 rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
2715 emit_group_store (parmreg, entry_parm, data->passed_type,
2716 GET_MODE_SIZE (GET_MODE (entry_parm)));
2717 entry_parm = parmreg;
2720 data->entry_parm = entry_parm;
2723 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2724 always valid and properly aligned. */
2726 static void
2727 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2729 rtx stack_parm = data->stack_parm;
2731 /* If we can't trust the parm stack slot to be aligned enough for its
2732 ultimate type, don't use that slot after entry. We'll make another
2733 stack slot, if we need one. */
2734 if (stack_parm
2735 && ((STRICT_ALIGNMENT
2736 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2737 || (data->nominal_type
2738 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2739 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2740 stack_parm = NULL;
2742 /* If parm was passed in memory, and we need to convert it on entry,
2743 don't store it back in that same slot. */
2744 else if (data->entry_parm == stack_parm
2745 && data->nominal_mode != BLKmode
2746 && data->nominal_mode != data->passed_mode)
2747 stack_parm = NULL;
2749 /* If stack protection is in effect for this function, don't leave any
2750 pointers in their passed stack slots. */
2751 else if (crtl->stack_protect_guard
2752 && (flag_stack_protect == 2
2753 || data->passed_pointer
2754 || POINTER_TYPE_P (data->nominal_type)))
2755 stack_parm = NULL;
2757 data->stack_parm = stack_parm;
2760 /* A subroutine of assign_parms. Return true if the current parameter
2761 should be stored as a BLKmode in the current frame. */
2763 static bool
2764 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2766 if (data->nominal_mode == BLKmode)
2767 return true;
2768 if (GET_MODE (data->entry_parm) == BLKmode)
2769 return true;
2771 #ifdef BLOCK_REG_PADDING
2772 /* Only assign_parm_setup_block knows how to deal with register arguments
2773 that are padded at the least significant end. */
2774 if (REG_P (data->entry_parm)
2775 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2776 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2777 == (BYTES_BIG_ENDIAN ? upward : downward)))
2778 return true;
2779 #endif
2781 return false;
2784 /* A subroutine of assign_parms. Arrange for the parameter to be
2785 present and valid in DATA->STACK_RTL. */
2787 static void
2788 assign_parm_setup_block (struct assign_parm_data_all *all,
2789 tree parm, struct assign_parm_data_one *data)
2791 rtx entry_parm = data->entry_parm;
2792 rtx stack_parm = data->stack_parm;
2793 HOST_WIDE_INT size;
2794 HOST_WIDE_INT size_stored;
2796 if (GET_CODE (entry_parm) == PARALLEL)
2797 entry_parm = emit_group_move_into_temps (entry_parm);
2799 size = int_size_in_bytes (data->passed_type);
2800 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2801 if (stack_parm == 0)
2803 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
2804 stack_parm = assign_stack_local (BLKmode, size_stored,
2805 DECL_ALIGN (parm));
2806 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2807 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2808 set_mem_attributes (stack_parm, parm, 1);
2811 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2812 calls that pass values in multiple non-contiguous locations. */
2813 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2815 rtx mem;
2817 /* Note that we will be storing an integral number of words.
2818 So we have to be careful to ensure that we allocate an
2819 integral number of words. We do this above when we call
2820 assign_stack_local if space was not allocated in the argument
2821 list. If it was, this will not work if PARM_BOUNDARY is not
2822 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2823 if it becomes a problem. Exception is when BLKmode arrives
2824 with arguments not conforming to word_mode. */
2826 if (data->stack_parm == 0)
2828 else if (GET_CODE (entry_parm) == PARALLEL)
2830 else
2831 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2833 mem = validize_mem (stack_parm);
2835 /* Handle values in multiple non-contiguous locations. */
2836 if (GET_CODE (entry_parm) == PARALLEL)
2838 push_to_sequence2 (all->first_conversion_insn,
2839 all->last_conversion_insn);
2840 emit_group_store (mem, entry_parm, data->passed_type, size);
2841 all->first_conversion_insn = get_insns ();
2842 all->last_conversion_insn = get_last_insn ();
2843 end_sequence ();
2846 else if (size == 0)
2849 /* If SIZE is that of a mode no bigger than a word, just use
2850 that mode's store operation. */
2851 else if (size <= UNITS_PER_WORD)
2853 enum machine_mode mode
2854 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2856 if (mode != BLKmode
2857 #ifdef BLOCK_REG_PADDING
2858 && (size == UNITS_PER_WORD
2859 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2860 != (BYTES_BIG_ENDIAN ? upward : downward)))
2861 #endif
2864 rtx reg;
2866 /* We are really truncating a word_mode value containing
2867 SIZE bytes into a value of mode MODE. If such an
2868 operation requires no actual instructions, we can refer
2869 to the value directly in mode MODE, otherwise we must
2870 start with the register in word_mode and explicitly
2871 convert it. */
2872 if (TRULY_NOOP_TRUNCATION (size * BITS_PER_UNIT, BITS_PER_WORD))
2873 reg = gen_rtx_REG (mode, REGNO (entry_parm));
2874 else
2876 reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2877 reg = convert_to_mode (mode, copy_to_reg (reg), 1);
2879 emit_move_insn (change_address (mem, mode, 0), reg);
2882 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2883 machine must be aligned to the left before storing
2884 to memory. Note that the previous test doesn't
2885 handle all cases (e.g. SIZE == 3). */
2886 else if (size != UNITS_PER_WORD
2887 #ifdef BLOCK_REG_PADDING
2888 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2889 == downward)
2890 #else
2891 && BYTES_BIG_ENDIAN
2892 #endif
2895 rtx tem, x;
2896 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2897 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2899 x = expand_shift (LSHIFT_EXPR, word_mode, reg, by, NULL_RTX, 1);
2900 tem = change_address (mem, word_mode, 0);
2901 emit_move_insn (tem, x);
2903 else
2904 move_block_from_reg (REGNO (entry_parm), mem,
2905 size_stored / UNITS_PER_WORD);
2907 else
2908 move_block_from_reg (REGNO (entry_parm), mem,
2909 size_stored / UNITS_PER_WORD);
2911 else if (data->stack_parm == 0)
2913 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2914 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2915 BLOCK_OP_NORMAL);
2916 all->first_conversion_insn = get_insns ();
2917 all->last_conversion_insn = get_last_insn ();
2918 end_sequence ();
2921 data->stack_parm = stack_parm;
2922 SET_DECL_RTL (parm, stack_parm);
2925 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2926 parameter. Get it there. Perform all ABI specified conversions. */
2928 static void
2929 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2930 struct assign_parm_data_one *data)
2932 rtx parmreg, validated_mem;
2933 rtx equiv_stack_parm;
2934 enum machine_mode promoted_nominal_mode;
2935 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2936 bool did_conversion = false;
2937 bool need_conversion, moved;
2939 /* Store the parm in a pseudoregister during the function, but we may
2940 need to do it in a wider mode. Using 2 here makes the result
2941 consistent with promote_decl_mode and thus expand_expr_real_1. */
2942 promoted_nominal_mode
2943 = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp,
2944 TREE_TYPE (current_function_decl), 2);
2946 parmreg = gen_reg_rtx (promoted_nominal_mode);
2948 if (!DECL_ARTIFICIAL (parm))
2949 mark_user_reg (parmreg);
2951 /* If this was an item that we received a pointer to,
2952 set DECL_RTL appropriately. */
2953 if (data->passed_pointer)
2955 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2956 set_mem_attributes (x, parm, 1);
2957 SET_DECL_RTL (parm, x);
2959 else
2960 SET_DECL_RTL (parm, parmreg);
2962 assign_parm_remove_parallels (data);
2964 /* Copy the value into the register, thus bridging between
2965 assign_parm_find_data_types and expand_expr_real_1. */
2967 equiv_stack_parm = data->stack_parm;
2968 validated_mem = validize_mem (data->entry_parm);
2970 need_conversion = (data->nominal_mode != data->passed_mode
2971 || promoted_nominal_mode != data->promoted_mode);
2972 moved = false;
2974 if (need_conversion
2975 && GET_MODE_CLASS (data->nominal_mode) == MODE_INT
2976 && data->nominal_mode == data->passed_mode
2977 && data->nominal_mode == GET_MODE (data->entry_parm))
2979 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2980 mode, by the caller. We now have to convert it to
2981 NOMINAL_MODE, if different. However, PARMREG may be in
2982 a different mode than NOMINAL_MODE if it is being stored
2983 promoted.
2985 If ENTRY_PARM is a hard register, it might be in a register
2986 not valid for operating in its mode (e.g., an odd-numbered
2987 register for a DFmode). In that case, moves are the only
2988 thing valid, so we can't do a convert from there. This
2989 occurs when the calling sequence allow such misaligned
2990 usages.
2992 In addition, the conversion may involve a call, which could
2993 clobber parameters which haven't been copied to pseudo
2994 registers yet.
2996 First, we try to emit an insn which performs the necessary
2997 conversion. We verify that this insn does not clobber any
2998 hard registers. */
3000 enum insn_code icode;
3001 rtx op0, op1;
3003 icode = can_extend_p (promoted_nominal_mode, data->passed_mode,
3004 unsignedp);
3006 op0 = parmreg;
3007 op1 = validated_mem;
3008 if (icode != CODE_FOR_nothing
3009 && insn_operand_matches (icode, 0, op0)
3010 && insn_operand_matches (icode, 1, op1))
3012 enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
3013 rtx insn, insns, t = op1;
3014 HARD_REG_SET hardregs;
3016 start_sequence ();
3017 /* If op1 is a hard register that is likely spilled, first
3018 force it into a pseudo, otherwise combiner might extend
3019 its lifetime too much. */
3020 if (GET_CODE (t) == SUBREG)
3021 t = SUBREG_REG (t);
3022 if (REG_P (t)
3023 && HARD_REGISTER_P (t)
3024 && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (t))
3025 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t))))
3027 t = gen_reg_rtx (GET_MODE (op1));
3028 emit_move_insn (t, op1);
3030 else
3031 t = op1;
3032 insn = gen_extend_insn (op0, t, promoted_nominal_mode,
3033 data->passed_mode, unsignedp);
3034 emit_insn (insn);
3035 insns = get_insns ();
3037 moved = true;
3038 CLEAR_HARD_REG_SET (hardregs);
3039 for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
3041 if (INSN_P (insn))
3042 note_stores (PATTERN (insn), record_hard_reg_sets,
3043 &hardregs);
3044 if (!hard_reg_set_empty_p (hardregs))
3045 moved = false;
3048 end_sequence ();
3050 if (moved)
3052 emit_insn (insns);
3053 if (equiv_stack_parm != NULL_RTX)
3054 equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg),
3055 equiv_stack_parm);
3060 if (moved)
3061 /* Nothing to do. */
3063 else if (need_conversion)
3065 /* We did not have an insn to convert directly, or the sequence
3066 generated appeared unsafe. We must first copy the parm to a
3067 pseudo reg, and save the conversion until after all
3068 parameters have been moved. */
3070 int save_tree_used;
3071 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3073 emit_move_insn (tempreg, validated_mem);
3075 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3076 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
3078 if (GET_CODE (tempreg) == SUBREG
3079 && GET_MODE (tempreg) == data->nominal_mode
3080 && REG_P (SUBREG_REG (tempreg))
3081 && data->nominal_mode == data->passed_mode
3082 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
3083 && GET_MODE_SIZE (GET_MODE (tempreg))
3084 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
3086 /* The argument is already sign/zero extended, so note it
3087 into the subreg. */
3088 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
3089 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
3092 /* TREE_USED gets set erroneously during expand_assignment. */
3093 save_tree_used = TREE_USED (parm);
3094 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
3095 TREE_USED (parm) = save_tree_used;
3096 all->first_conversion_insn = get_insns ();
3097 all->last_conversion_insn = get_last_insn ();
3098 end_sequence ();
3100 did_conversion = true;
3102 else
3103 emit_move_insn (parmreg, validated_mem);
3105 /* If we were passed a pointer but the actual value can safely live
3106 in a register, retrieve it and use it directly. */
3107 if (data->passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode)
3109 /* We can't use nominal_mode, because it will have been set to
3110 Pmode above. We must use the actual mode of the parm. */
3111 if (use_register_for_decl (parm))
3113 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3114 mark_user_reg (parmreg);
3116 else
3118 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3119 TYPE_MODE (TREE_TYPE (parm)),
3120 TYPE_ALIGN (TREE_TYPE (parm)));
3121 parmreg
3122 = assign_stack_local (TYPE_MODE (TREE_TYPE (parm)),
3123 GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (parm))),
3124 align);
3125 set_mem_attributes (parmreg, parm, 1);
3128 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
3130 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
3131 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
3133 push_to_sequence2 (all->first_conversion_insn,
3134 all->last_conversion_insn);
3135 emit_move_insn (tempreg, DECL_RTL (parm));
3136 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
3137 emit_move_insn (parmreg, tempreg);
3138 all->first_conversion_insn = get_insns ();
3139 all->last_conversion_insn = get_last_insn ();
3140 end_sequence ();
3142 did_conversion = true;
3144 else
3145 emit_move_insn (parmreg, DECL_RTL (parm));
3147 SET_DECL_RTL (parm, parmreg);
3149 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3150 now the parm. */
3151 data->stack_parm = NULL;
3154 /* Mark the register as eliminable if we did no conversion and it was
3155 copied from memory at a fixed offset, and the arg pointer was not
3156 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3157 offset formed an invalid address, such memory-equivalences as we
3158 make here would screw up life analysis for it. */
3159 if (data->nominal_mode == data->passed_mode
3160 && !did_conversion
3161 && data->stack_parm != 0
3162 && MEM_P (data->stack_parm)
3163 && data->locate.offset.var == 0
3164 && reg_mentioned_p (virtual_incoming_args_rtx,
3165 XEXP (data->stack_parm, 0)))
3167 rtx linsn = get_last_insn ();
3168 rtx sinsn, set;
3170 /* Mark complex types separately. */
3171 if (GET_CODE (parmreg) == CONCAT)
3173 enum machine_mode submode
3174 = GET_MODE_INNER (GET_MODE (parmreg));
3175 int regnor = REGNO (XEXP (parmreg, 0));
3176 int regnoi = REGNO (XEXP (parmreg, 1));
3177 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
3178 rtx stacki = adjust_address_nv (data->stack_parm, submode,
3179 GET_MODE_SIZE (submode));
3181 /* Scan backwards for the set of the real and
3182 imaginary parts. */
3183 for (sinsn = linsn; sinsn != 0;
3184 sinsn = prev_nonnote_insn (sinsn))
3186 set = single_set (sinsn);
3187 if (set == 0)
3188 continue;
3190 if (SET_DEST (set) == regno_reg_rtx [regnoi])
3191 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
3192 else if (SET_DEST (set) == regno_reg_rtx [regnor])
3193 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
3196 else
3197 set_dst_reg_note (linsn, REG_EQUIV, equiv_stack_parm, parmreg);
3200 /* For pointer data type, suggest pointer register. */
3201 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3202 mark_reg_pointer (parmreg,
3203 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
3206 /* A subroutine of assign_parms. Allocate stack space to hold the current
3207 parameter. Get it there. Perform all ABI specified conversions. */
3209 static void
3210 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
3211 struct assign_parm_data_one *data)
3213 /* Value must be stored in the stack slot STACK_PARM during function
3214 execution. */
3215 bool to_conversion = false;
3217 assign_parm_remove_parallels (data);
3219 if (data->promoted_mode != data->nominal_mode)
3221 /* Conversion is required. */
3222 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3224 emit_move_insn (tempreg, validize_mem (data->entry_parm));
3226 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3227 to_conversion = true;
3229 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
3230 TYPE_UNSIGNED (TREE_TYPE (parm)));
3232 if (data->stack_parm)
3234 int offset = subreg_lowpart_offset (data->nominal_mode,
3235 GET_MODE (data->stack_parm));
3236 /* ??? This may need a big-endian conversion on sparc64. */
3237 data->stack_parm
3238 = adjust_address (data->stack_parm, data->nominal_mode, 0);
3239 if (offset && MEM_OFFSET_KNOWN_P (data->stack_parm))
3240 set_mem_offset (data->stack_parm,
3241 MEM_OFFSET (data->stack_parm) + offset);
3245 if (data->entry_parm != data->stack_parm)
3247 rtx src, dest;
3249 if (data->stack_parm == 0)
3251 int align = STACK_SLOT_ALIGNMENT (data->passed_type,
3252 GET_MODE (data->entry_parm),
3253 TYPE_ALIGN (data->passed_type));
3254 data->stack_parm
3255 = assign_stack_local (GET_MODE (data->entry_parm),
3256 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
3257 align);
3258 set_mem_attributes (data->stack_parm, parm, 1);
3261 dest = validize_mem (data->stack_parm);
3262 src = validize_mem (data->entry_parm);
3264 if (MEM_P (src))
3266 /* Use a block move to handle potentially misaligned entry_parm. */
3267 if (!to_conversion)
3268 push_to_sequence2 (all->first_conversion_insn,
3269 all->last_conversion_insn);
3270 to_conversion = true;
3272 emit_block_move (dest, src,
3273 GEN_INT (int_size_in_bytes (data->passed_type)),
3274 BLOCK_OP_NORMAL);
3276 else
3277 emit_move_insn (dest, src);
3280 if (to_conversion)
3282 all->first_conversion_insn = get_insns ();
3283 all->last_conversion_insn = get_last_insn ();
3284 end_sequence ();
3287 SET_DECL_RTL (parm, data->stack_parm);
3290 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
3291 undo the frobbing that we did in assign_parms_augmented_arg_list. */
3293 static void
3294 assign_parms_unsplit_complex (struct assign_parm_data_all *all,
3295 vec<tree> fnargs)
3297 tree parm;
3298 tree orig_fnargs = all->orig_fnargs;
3299 unsigned i = 0;
3301 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i)
3303 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
3304 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
3306 rtx tmp, real, imag;
3307 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
3309 real = DECL_RTL (fnargs[i]);
3310 imag = DECL_RTL (fnargs[i + 1]);
3311 if (inner != GET_MODE (real))
3313 real = gen_lowpart_SUBREG (inner, real);
3314 imag = gen_lowpart_SUBREG (inner, imag);
3317 if (TREE_ADDRESSABLE (parm))
3319 rtx rmem, imem;
3320 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
3321 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3322 DECL_MODE (parm),
3323 TYPE_ALIGN (TREE_TYPE (parm)));
3325 /* split_complex_arg put the real and imag parts in
3326 pseudos. Move them to memory. */
3327 tmp = assign_stack_local (DECL_MODE (parm), size, align);
3328 set_mem_attributes (tmp, parm, 1);
3329 rmem = adjust_address_nv (tmp, inner, 0);
3330 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
3331 push_to_sequence2 (all->first_conversion_insn,
3332 all->last_conversion_insn);
3333 emit_move_insn (rmem, real);
3334 emit_move_insn (imem, imag);
3335 all->first_conversion_insn = get_insns ();
3336 all->last_conversion_insn = get_last_insn ();
3337 end_sequence ();
3339 else
3340 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3341 SET_DECL_RTL (parm, tmp);
3343 real = DECL_INCOMING_RTL (fnargs[i]);
3344 imag = DECL_INCOMING_RTL (fnargs[i + 1]);
3345 if (inner != GET_MODE (real))
3347 real = gen_lowpart_SUBREG (inner, real);
3348 imag = gen_lowpart_SUBREG (inner, imag);
3350 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3351 set_decl_incoming_rtl (parm, tmp, false);
3352 i++;
3357 /* Assign RTL expressions to the function's parameters. This may involve
3358 copying them into registers and using those registers as the DECL_RTL. */
3360 static void
3361 assign_parms (tree fndecl)
3363 struct assign_parm_data_all all;
3364 tree parm;
3365 vec<tree> fnargs;
3366 unsigned i;
3368 crtl->args.internal_arg_pointer
3369 = targetm.calls.internal_arg_pointer ();
3371 assign_parms_initialize_all (&all);
3372 fnargs = assign_parms_augmented_arg_list (&all);
3374 FOR_EACH_VEC_ELT (fnargs, i, parm)
3376 struct assign_parm_data_one data;
3378 /* Extract the type of PARM; adjust it according to ABI. */
3379 assign_parm_find_data_types (&all, parm, &data);
3381 /* Early out for errors and void parameters. */
3382 if (data.passed_mode == VOIDmode)
3384 SET_DECL_RTL (parm, const0_rtx);
3385 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3386 continue;
3389 /* Estimate stack alignment from parameter alignment. */
3390 if (SUPPORTS_STACK_ALIGNMENT)
3392 unsigned int align
3393 = targetm.calls.function_arg_boundary (data.promoted_mode,
3394 data.passed_type);
3395 align = MINIMUM_ALIGNMENT (data.passed_type, data.promoted_mode,
3396 align);
3397 if (TYPE_ALIGN (data.nominal_type) > align)
3398 align = MINIMUM_ALIGNMENT (data.nominal_type,
3399 TYPE_MODE (data.nominal_type),
3400 TYPE_ALIGN (data.nominal_type));
3401 if (crtl->stack_alignment_estimated < align)
3403 gcc_assert (!crtl->stack_realign_processed);
3404 crtl->stack_alignment_estimated = align;
3408 if (cfun->stdarg && !DECL_CHAIN (parm))
3409 assign_parms_setup_varargs (&all, &data, false);
3411 /* Find out where the parameter arrives in this function. */
3412 assign_parm_find_entry_rtl (&all, &data);
3414 /* Find out where stack space for this parameter might be. */
3415 if (assign_parm_is_stack_parm (&all, &data))
3417 assign_parm_find_stack_rtl (parm, &data);
3418 assign_parm_adjust_entry_rtl (&data);
3421 /* Record permanently how this parm was passed. */
3422 if (data.passed_pointer)
3424 rtx incoming_rtl
3425 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.passed_type)),
3426 data.entry_parm);
3427 set_decl_incoming_rtl (parm, incoming_rtl, true);
3429 else
3430 set_decl_incoming_rtl (parm, data.entry_parm, false);
3432 /* Update info on where next arg arrives in registers. */
3433 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3434 data.passed_type, data.named_arg);
3436 assign_parm_adjust_stack_rtl (&data);
3438 if (assign_parm_setup_block_p (&data))
3439 assign_parm_setup_block (&all, parm, &data);
3440 else if (data.passed_pointer || use_register_for_decl (parm))
3441 assign_parm_setup_reg (&all, parm, &data);
3442 else
3443 assign_parm_setup_stack (&all, parm, &data);
3446 if (targetm.calls.split_complex_arg)
3447 assign_parms_unsplit_complex (&all, fnargs);
3449 fnargs.release ();
3451 /* Output all parameter conversion instructions (possibly including calls)
3452 now that all parameters have been copied out of hard registers. */
3453 emit_insn (all.first_conversion_insn);
3455 /* Estimate reload stack alignment from scalar return mode. */
3456 if (SUPPORTS_STACK_ALIGNMENT)
3458 if (DECL_RESULT (fndecl))
3460 tree type = TREE_TYPE (DECL_RESULT (fndecl));
3461 enum machine_mode mode = TYPE_MODE (type);
3463 if (mode != BLKmode
3464 && mode != VOIDmode
3465 && !AGGREGATE_TYPE_P (type))
3467 unsigned int align = GET_MODE_ALIGNMENT (mode);
3468 if (crtl->stack_alignment_estimated < align)
3470 gcc_assert (!crtl->stack_realign_processed);
3471 crtl->stack_alignment_estimated = align;
3477 /* If we are receiving a struct value address as the first argument, set up
3478 the RTL for the function result. As this might require code to convert
3479 the transmitted address to Pmode, we do this here to ensure that possible
3480 preliminary conversions of the address have been emitted already. */
3481 if (all.function_result_decl)
3483 tree result = DECL_RESULT (current_function_decl);
3484 rtx addr = DECL_RTL (all.function_result_decl);
3485 rtx x;
3487 if (DECL_BY_REFERENCE (result))
3489 SET_DECL_VALUE_EXPR (result, all.function_result_decl);
3490 x = addr;
3492 else
3494 SET_DECL_VALUE_EXPR (result,
3495 build1 (INDIRECT_REF, TREE_TYPE (result),
3496 all.function_result_decl));
3497 addr = convert_memory_address (Pmode, addr);
3498 x = gen_rtx_MEM (DECL_MODE (result), addr);
3499 set_mem_attributes (x, result, 1);
3502 DECL_HAS_VALUE_EXPR_P (result) = 1;
3504 SET_DECL_RTL (result, x);
3507 /* We have aligned all the args, so add space for the pretend args. */
3508 crtl->args.pretend_args_size = all.pretend_args_size;
3509 all.stack_args_size.constant += all.extra_pretend_bytes;
3510 crtl->args.size = all.stack_args_size.constant;
3512 /* Adjust function incoming argument size for alignment and
3513 minimum length. */
3515 #ifdef REG_PARM_STACK_SPACE
3516 crtl->args.size = MAX (crtl->args.size,
3517 REG_PARM_STACK_SPACE (fndecl));
3518 #endif
3520 crtl->args.size = CEIL_ROUND (crtl->args.size,
3521 PARM_BOUNDARY / BITS_PER_UNIT);
3523 #ifdef ARGS_GROW_DOWNWARD
3524 crtl->args.arg_offset_rtx
3525 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3526 : expand_expr (size_diffop (all.stack_args_size.var,
3527 size_int (-all.stack_args_size.constant)),
3528 NULL_RTX, VOIDmode, EXPAND_NORMAL));
3529 #else
3530 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3531 #endif
3533 /* See how many bytes, if any, of its args a function should try to pop
3534 on return. */
3536 crtl->args.pops_args = targetm.calls.return_pops_args (fndecl,
3537 TREE_TYPE (fndecl),
3538 crtl->args.size);
3540 /* For stdarg.h function, save info about
3541 regs and stack space used by the named args. */
3543 crtl->args.info = all.args_so_far_v;
3545 /* Set the rtx used for the function return value. Put this in its
3546 own variable so any optimizers that need this information don't have
3547 to include tree.h. Do this here so it gets done when an inlined
3548 function gets output. */
3550 crtl->return_rtx
3551 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3552 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3554 /* If scalar return value was computed in a pseudo-reg, or was a named
3555 return value that got dumped to the stack, copy that to the hard
3556 return register. */
3557 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3559 tree decl_result = DECL_RESULT (fndecl);
3560 rtx decl_rtl = DECL_RTL (decl_result);
3562 if (REG_P (decl_rtl)
3563 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3564 : DECL_REGISTER (decl_result))
3566 rtx real_decl_rtl;
3568 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3569 fndecl, true);
3570 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3571 /* The delay slot scheduler assumes that crtl->return_rtx
3572 holds the hard register containing the return value, not a
3573 temporary pseudo. */
3574 crtl->return_rtx = real_decl_rtl;
3579 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3580 For all seen types, gimplify their sizes. */
3582 static tree
3583 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3585 tree t = *tp;
3587 *walk_subtrees = 0;
3588 if (TYPE_P (t))
3590 if (POINTER_TYPE_P (t))
3591 *walk_subtrees = 1;
3592 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3593 && !TYPE_SIZES_GIMPLIFIED (t))
3595 gimplify_type_sizes (t, (gimple_seq *) data);
3596 *walk_subtrees = 1;
3600 return NULL;
3603 /* Gimplify the parameter list for current_function_decl. This involves
3604 evaluating SAVE_EXPRs of variable sized parameters and generating code
3605 to implement callee-copies reference parameters. Returns a sequence of
3606 statements to add to the beginning of the function. */
3608 gimple_seq
3609 gimplify_parameters (void)
3611 struct assign_parm_data_all all;
3612 tree parm;
3613 gimple_seq stmts = NULL;
3614 vec<tree> fnargs;
3615 unsigned i;
3617 assign_parms_initialize_all (&all);
3618 fnargs = assign_parms_augmented_arg_list (&all);
3620 FOR_EACH_VEC_ELT (fnargs, i, parm)
3622 struct assign_parm_data_one data;
3624 /* Extract the type of PARM; adjust it according to ABI. */
3625 assign_parm_find_data_types (&all, parm, &data);
3627 /* Early out for errors and void parameters. */
3628 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3629 continue;
3631 /* Update info on where next arg arrives in registers. */
3632 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3633 data.passed_type, data.named_arg);
3635 /* ??? Once upon a time variable_size stuffed parameter list
3636 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3637 turned out to be less than manageable in the gimple world.
3638 Now we have to hunt them down ourselves. */
3639 walk_tree_without_duplicates (&data.passed_type,
3640 gimplify_parm_type, &stmts);
3642 if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
3644 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3645 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3648 if (data.passed_pointer)
3650 tree type = TREE_TYPE (data.passed_type);
3651 if (reference_callee_copied (&all.args_so_far_v, TYPE_MODE (type),
3652 type, data.named_arg))
3654 tree local, t;
3656 /* For constant-sized objects, this is trivial; for
3657 variable-sized objects, we have to play games. */
3658 if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
3659 && !(flag_stack_check == GENERIC_STACK_CHECK
3660 && compare_tree_int (DECL_SIZE_UNIT (parm),
3661 STACK_CHECK_MAX_VAR_SIZE) > 0))
3663 local = create_tmp_var (type, get_name (parm));
3664 DECL_IGNORED_P (local) = 0;
3665 /* If PARM was addressable, move that flag over
3666 to the local copy, as its address will be taken,
3667 not the PARMs. Keep the parms address taken
3668 as we'll query that flag during gimplification. */
3669 if (TREE_ADDRESSABLE (parm))
3670 TREE_ADDRESSABLE (local) = 1;
3671 else if (TREE_CODE (type) == COMPLEX_TYPE
3672 || TREE_CODE (type) == VECTOR_TYPE)
3673 DECL_GIMPLE_REG_P (local) = 1;
3675 else
3677 tree ptr_type, addr;
3679 ptr_type = build_pointer_type (type);
3680 addr = create_tmp_reg (ptr_type, get_name (parm));
3681 DECL_IGNORED_P (addr) = 0;
3682 local = build_fold_indirect_ref (addr);
3684 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3685 t = build_call_expr (t, 2, DECL_SIZE_UNIT (parm),
3686 size_int (DECL_ALIGN (parm)));
3688 /* The call has been built for a variable-sized object. */
3689 CALL_ALLOCA_FOR_VAR_P (t) = 1;
3690 t = fold_convert (ptr_type, t);
3691 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
3692 gimplify_and_add (t, &stmts);
3695 gimplify_assign (local, parm, &stmts);
3697 SET_DECL_VALUE_EXPR (parm, local);
3698 DECL_HAS_VALUE_EXPR_P (parm) = 1;
3703 fnargs.release ();
3705 return stmts;
3708 /* Compute the size and offset from the start of the stacked arguments for a
3709 parm passed in mode PASSED_MODE and with type TYPE.
3711 INITIAL_OFFSET_PTR points to the current offset into the stacked
3712 arguments.
3714 The starting offset and size for this parm are returned in
3715 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3716 nonzero, the offset is that of stack slot, which is returned in
3717 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3718 padding required from the initial offset ptr to the stack slot.
3720 IN_REGS is nonzero if the argument will be passed in registers. It will
3721 never be set if REG_PARM_STACK_SPACE is not defined.
3723 FNDECL is the function in which the argument was defined.
3725 There are two types of rounding that are done. The first, controlled by
3726 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
3727 argument list to be aligned to the specific boundary (in bits). This
3728 rounding affects the initial and starting offsets, but not the argument
3729 size.
3731 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3732 optionally rounds the size of the parm to PARM_BOUNDARY. The
3733 initial offset is not affected by this rounding, while the size always
3734 is and the starting offset may be. */
3736 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3737 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3738 callers pass in the total size of args so far as
3739 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3741 void
3742 locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3743 int partial, tree fndecl ATTRIBUTE_UNUSED,
3744 struct args_size *initial_offset_ptr,
3745 struct locate_and_pad_arg_data *locate)
3747 tree sizetree;
3748 enum direction where_pad;
3749 unsigned int boundary, round_boundary;
3750 int reg_parm_stack_space = 0;
3751 int part_size_in_regs;
3753 #ifdef REG_PARM_STACK_SPACE
3754 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3756 /* If we have found a stack parm before we reach the end of the
3757 area reserved for registers, skip that area. */
3758 if (! in_regs)
3760 if (reg_parm_stack_space > 0)
3762 if (initial_offset_ptr->var)
3764 initial_offset_ptr->var
3765 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3766 ssize_int (reg_parm_stack_space));
3767 initial_offset_ptr->constant = 0;
3769 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3770 initial_offset_ptr->constant = reg_parm_stack_space;
3773 #endif /* REG_PARM_STACK_SPACE */
3775 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
3777 sizetree
3778 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3779 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3780 boundary = targetm.calls.function_arg_boundary (passed_mode, type);
3781 round_boundary = targetm.calls.function_arg_round_boundary (passed_mode,
3782 type);
3783 locate->where_pad = where_pad;
3785 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
3786 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
3787 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
3789 locate->boundary = boundary;
3791 if (SUPPORTS_STACK_ALIGNMENT)
3793 /* stack_alignment_estimated can't change after stack has been
3794 realigned. */
3795 if (crtl->stack_alignment_estimated < boundary)
3797 if (!crtl->stack_realign_processed)
3798 crtl->stack_alignment_estimated = boundary;
3799 else
3801 /* If stack is realigned and stack alignment value
3802 hasn't been finalized, it is OK not to increase
3803 stack_alignment_estimated. The bigger alignment
3804 requirement is recorded in stack_alignment_needed
3805 below. */
3806 gcc_assert (!crtl->stack_realign_finalized
3807 && crtl->stack_realign_needed);
3812 /* Remember if the outgoing parameter requires extra alignment on the
3813 calling function side. */
3814 if (crtl->stack_alignment_needed < boundary)
3815 crtl->stack_alignment_needed = boundary;
3816 if (crtl->preferred_stack_boundary < boundary)
3817 crtl->preferred_stack_boundary = boundary;
3819 #ifdef ARGS_GROW_DOWNWARD
3820 locate->slot_offset.constant = -initial_offset_ptr->constant;
3821 if (initial_offset_ptr->var)
3822 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3823 initial_offset_ptr->var);
3826 tree s2 = sizetree;
3827 if (where_pad != none
3828 && (!host_integerp (sizetree, 1)
3829 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % round_boundary))
3830 s2 = round_up (s2, round_boundary / BITS_PER_UNIT);
3831 SUB_PARM_SIZE (locate->slot_offset, s2);
3834 locate->slot_offset.constant += part_size_in_regs;
3836 if (!in_regs
3837 #ifdef REG_PARM_STACK_SPACE
3838 || REG_PARM_STACK_SPACE (fndecl) > 0
3839 #endif
3841 pad_to_arg_alignment (&locate->slot_offset, boundary,
3842 &locate->alignment_pad);
3844 locate->size.constant = (-initial_offset_ptr->constant
3845 - locate->slot_offset.constant);
3846 if (initial_offset_ptr->var)
3847 locate->size.var = size_binop (MINUS_EXPR,
3848 size_binop (MINUS_EXPR,
3849 ssize_int (0),
3850 initial_offset_ptr->var),
3851 locate->slot_offset.var);
3853 /* Pad_below needs the pre-rounded size to know how much to pad
3854 below. */
3855 locate->offset = locate->slot_offset;
3856 if (where_pad == downward)
3857 pad_below (&locate->offset, passed_mode, sizetree);
3859 #else /* !ARGS_GROW_DOWNWARD */
3860 if (!in_regs
3861 #ifdef REG_PARM_STACK_SPACE
3862 || REG_PARM_STACK_SPACE (fndecl) > 0
3863 #endif
3865 pad_to_arg_alignment (initial_offset_ptr, boundary,
3866 &locate->alignment_pad);
3867 locate->slot_offset = *initial_offset_ptr;
3869 #ifdef PUSH_ROUNDING
3870 if (passed_mode != BLKmode)
3871 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3872 #endif
3874 /* Pad_below needs the pre-rounded size to know how much to pad below
3875 so this must be done before rounding up. */
3876 locate->offset = locate->slot_offset;
3877 if (where_pad == downward)
3878 pad_below (&locate->offset, passed_mode, sizetree);
3880 if (where_pad != none
3881 && (!host_integerp (sizetree, 1)
3882 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % round_boundary))
3883 sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT);
3885 ADD_PARM_SIZE (locate->size, sizetree);
3887 locate->size.constant -= part_size_in_regs;
3888 #endif /* ARGS_GROW_DOWNWARD */
3890 #ifdef FUNCTION_ARG_OFFSET
3891 locate->offset.constant += FUNCTION_ARG_OFFSET (passed_mode, type);
3892 #endif
3895 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3896 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3898 static void
3899 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3900 struct args_size *alignment_pad)
3902 tree save_var = NULL_TREE;
3903 HOST_WIDE_INT save_constant = 0;
3904 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3905 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3907 #ifdef SPARC_STACK_BOUNDARY_HACK
3908 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3909 the real alignment of %sp. However, when it does this, the
3910 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
3911 if (SPARC_STACK_BOUNDARY_HACK)
3912 sp_offset = 0;
3913 #endif
3915 if (boundary > PARM_BOUNDARY)
3917 save_var = offset_ptr->var;
3918 save_constant = offset_ptr->constant;
3921 alignment_pad->var = NULL_TREE;
3922 alignment_pad->constant = 0;
3924 if (boundary > BITS_PER_UNIT)
3926 if (offset_ptr->var)
3928 tree sp_offset_tree = ssize_int (sp_offset);
3929 tree offset = size_binop (PLUS_EXPR,
3930 ARGS_SIZE_TREE (*offset_ptr),
3931 sp_offset_tree);
3932 #ifdef ARGS_GROW_DOWNWARD
3933 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
3934 #else
3935 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
3936 #endif
3938 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
3939 /* ARGS_SIZE_TREE includes constant term. */
3940 offset_ptr->constant = 0;
3941 if (boundary > PARM_BOUNDARY)
3942 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
3943 save_var);
3945 else
3947 offset_ptr->constant = -sp_offset +
3948 #ifdef ARGS_GROW_DOWNWARD
3949 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3950 #else
3951 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3952 #endif
3953 if (boundary > PARM_BOUNDARY)
3954 alignment_pad->constant = offset_ptr->constant - save_constant;
3959 static void
3960 pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
3962 if (passed_mode != BLKmode)
3964 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3965 offset_ptr->constant
3966 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3967 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3968 - GET_MODE_SIZE (passed_mode));
3970 else
3972 if (TREE_CODE (sizetree) != INTEGER_CST
3973 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3975 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3976 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3977 /* Add it in. */
3978 ADD_PARM_SIZE (*offset_ptr, s2);
3979 SUB_PARM_SIZE (*offset_ptr, sizetree);
3985 /* True if register REGNO was alive at a place where `setjmp' was
3986 called and was set more than once or is an argument. Such regs may
3987 be clobbered by `longjmp'. */
3989 static bool
3990 regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
3992 /* There appear to be cases where some local vars never reach the
3993 backend but have bogus regnos. */
3994 if (regno >= max_reg_num ())
3995 return false;
3997 return ((REG_N_SETS (regno) > 1
3998 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR), regno))
3999 && REGNO_REG_SET_P (setjmp_crosses, regno));
4002 /* Walk the tree of blocks describing the binding levels within a
4003 function and warn about variables the might be killed by setjmp or
4004 vfork. This is done after calling flow_analysis before register
4005 allocation since that will clobber the pseudo-regs to hard
4006 regs. */
4008 static void
4009 setjmp_vars_warning (bitmap setjmp_crosses, tree block)
4011 tree decl, sub;
4013 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
4015 if (TREE_CODE (decl) == VAR_DECL
4016 && DECL_RTL_SET_P (decl)
4017 && REG_P (DECL_RTL (decl))
4018 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4019 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
4020 " %<longjmp%> or %<vfork%>", decl);
4023 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
4024 setjmp_vars_warning (setjmp_crosses, sub);
4027 /* Do the appropriate part of setjmp_vars_warning
4028 but for arguments instead of local variables. */
4030 static void
4031 setjmp_args_warning (bitmap setjmp_crosses)
4033 tree decl;
4034 for (decl = DECL_ARGUMENTS (current_function_decl);
4035 decl; decl = DECL_CHAIN (decl))
4036 if (DECL_RTL (decl) != 0
4037 && REG_P (DECL_RTL (decl))
4038 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4039 warning (OPT_Wclobbered,
4040 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
4041 decl);
4044 /* Generate warning messages for variables live across setjmp. */
4046 void
4047 generate_setjmp_warnings (void)
4049 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
4051 if (n_basic_blocks == NUM_FIXED_BLOCKS
4052 || bitmap_empty_p (setjmp_crosses))
4053 return;
4055 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
4056 setjmp_args_warning (setjmp_crosses);
4060 /* Reverse the order of elements in the fragment chain T of blocks,
4061 and return the new head of the chain (old last element).
4062 In addition to that clear BLOCK_SAME_RANGE flags when needed
4063 and adjust BLOCK_SUPERCONTEXT from the super fragment to
4064 its super fragment origin. */
4066 static tree
4067 block_fragments_nreverse (tree t)
4069 tree prev = 0, block, next, prev_super = 0;
4070 tree super = BLOCK_SUPERCONTEXT (t);
4071 if (BLOCK_FRAGMENT_ORIGIN (super))
4072 super = BLOCK_FRAGMENT_ORIGIN (super);
4073 for (block = t; block; block = next)
4075 next = BLOCK_FRAGMENT_CHAIN (block);
4076 BLOCK_FRAGMENT_CHAIN (block) = prev;
4077 if ((prev && !BLOCK_SAME_RANGE (prev))
4078 || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block))
4079 != prev_super))
4080 BLOCK_SAME_RANGE (block) = 0;
4081 prev_super = BLOCK_SUPERCONTEXT (block);
4082 BLOCK_SUPERCONTEXT (block) = super;
4083 prev = block;
4085 t = BLOCK_FRAGMENT_ORIGIN (t);
4086 if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t))
4087 != prev_super)
4088 BLOCK_SAME_RANGE (t) = 0;
4089 BLOCK_SUPERCONTEXT (t) = super;
4090 return prev;
4093 /* Reverse the order of elements in the chain T of blocks,
4094 and return the new head of the chain (old last element).
4095 Also do the same on subblocks and reverse the order of elements
4096 in BLOCK_FRAGMENT_CHAIN as well. */
4098 static tree
4099 blocks_nreverse_all (tree t)
4101 tree prev = 0, block, next;
4102 for (block = t; block; block = next)
4104 next = BLOCK_CHAIN (block);
4105 BLOCK_CHAIN (block) = prev;
4106 if (BLOCK_FRAGMENT_CHAIN (block)
4107 && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE)
4109 BLOCK_FRAGMENT_CHAIN (block)
4110 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block));
4111 if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block)))
4112 BLOCK_SAME_RANGE (block) = 0;
4114 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4115 prev = block;
4117 return prev;
4121 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4122 and create duplicate blocks. */
4123 /* ??? Need an option to either create block fragments or to create
4124 abstract origin duplicates of a source block. It really depends
4125 on what optimization has been performed. */
4127 void
4128 reorder_blocks (void)
4130 tree block = DECL_INITIAL (current_function_decl);
4131 vec<tree> block_stack;
4133 if (block == NULL_TREE)
4134 return;
4136 block_stack.create (10);
4138 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
4139 clear_block_marks (block);
4141 /* Prune the old trees away, so that they don't get in the way. */
4142 BLOCK_SUBBLOCKS (block) = NULL_TREE;
4143 BLOCK_CHAIN (block) = NULL_TREE;
4145 /* Recreate the block tree from the note nesting. */
4146 reorder_blocks_1 (get_insns (), block, &block_stack);
4147 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4149 block_stack.release ();
4152 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
4154 void
4155 clear_block_marks (tree block)
4157 while (block)
4159 TREE_ASM_WRITTEN (block) = 0;
4160 clear_block_marks (BLOCK_SUBBLOCKS (block));
4161 block = BLOCK_CHAIN (block);
4165 static void
4166 reorder_blocks_1 (rtx insns, tree current_block, vec<tree> *p_block_stack)
4168 rtx insn;
4169 tree prev_beg = NULL_TREE, prev_end = NULL_TREE;
4171 for (insn = insns; insn; insn = NEXT_INSN (insn))
4173 if (NOTE_P (insn))
4175 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
4177 tree block = NOTE_BLOCK (insn);
4178 tree origin;
4180 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE);
4181 origin = block;
4183 if (prev_end)
4184 BLOCK_SAME_RANGE (prev_end) = 0;
4185 prev_end = NULL_TREE;
4187 /* If we have seen this block before, that means it now
4188 spans multiple address regions. Create a new fragment. */
4189 if (TREE_ASM_WRITTEN (block))
4191 tree new_block = copy_node (block);
4193 BLOCK_SAME_RANGE (new_block) = 0;
4194 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
4195 BLOCK_FRAGMENT_CHAIN (new_block)
4196 = BLOCK_FRAGMENT_CHAIN (origin);
4197 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
4199 NOTE_BLOCK (insn) = new_block;
4200 block = new_block;
4203 if (prev_beg == current_block && prev_beg)
4204 BLOCK_SAME_RANGE (block) = 1;
4206 prev_beg = origin;
4208 BLOCK_SUBBLOCKS (block) = 0;
4209 TREE_ASM_WRITTEN (block) = 1;
4210 /* When there's only one block for the entire function,
4211 current_block == block and we mustn't do this, it
4212 will cause infinite recursion. */
4213 if (block != current_block)
4215 tree super;
4216 if (block != origin)
4217 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block
4218 || BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
4219 (origin))
4220 == current_block);
4221 if (p_block_stack->is_empty ())
4222 super = current_block;
4223 else
4225 super = p_block_stack->last ();
4226 gcc_assert (super == current_block
4227 || BLOCK_FRAGMENT_ORIGIN (super)
4228 == current_block);
4230 BLOCK_SUPERCONTEXT (block) = super;
4231 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4232 BLOCK_SUBBLOCKS (current_block) = block;
4233 current_block = origin;
4235 p_block_stack->safe_push (block);
4237 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
4239 NOTE_BLOCK (insn) = p_block_stack->pop ();
4240 current_block = BLOCK_SUPERCONTEXT (current_block);
4241 if (BLOCK_FRAGMENT_ORIGIN (current_block))
4242 current_block = BLOCK_FRAGMENT_ORIGIN (current_block);
4243 prev_beg = NULL_TREE;
4244 prev_end = BLOCK_SAME_RANGE (NOTE_BLOCK (insn))
4245 ? NOTE_BLOCK (insn) : NULL_TREE;
4248 else
4250 prev_beg = NULL_TREE;
4251 if (prev_end)
4252 BLOCK_SAME_RANGE (prev_end) = 0;
4253 prev_end = NULL_TREE;
4258 /* Reverse the order of elements in the chain T of blocks,
4259 and return the new head of the chain (old last element). */
4261 tree
4262 blocks_nreverse (tree t)
4264 tree prev = 0, block, next;
4265 for (block = t; block; block = next)
4267 next = BLOCK_CHAIN (block);
4268 BLOCK_CHAIN (block) = prev;
4269 prev = block;
4271 return prev;
4274 /* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4275 by modifying the last node in chain 1 to point to chain 2. */
4277 tree
4278 block_chainon (tree op1, tree op2)
4280 tree t1;
4282 if (!op1)
4283 return op2;
4284 if (!op2)
4285 return op1;
4287 for (t1 = op1; BLOCK_CHAIN (t1); t1 = BLOCK_CHAIN (t1))
4288 continue;
4289 BLOCK_CHAIN (t1) = op2;
4291 #ifdef ENABLE_TREE_CHECKING
4293 tree t2;
4294 for (t2 = op2; t2; t2 = BLOCK_CHAIN (t2))
4295 gcc_assert (t2 != t1);
4297 #endif
4299 return op1;
4302 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
4303 non-NULL, list them all into VECTOR, in a depth-first preorder
4304 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
4305 blocks. */
4307 static int
4308 all_blocks (tree block, tree *vector)
4310 int n_blocks = 0;
4312 while (block)
4314 TREE_ASM_WRITTEN (block) = 0;
4316 /* Record this block. */
4317 if (vector)
4318 vector[n_blocks] = block;
4320 ++n_blocks;
4322 /* Record the subblocks, and their subblocks... */
4323 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4324 vector ? vector + n_blocks : 0);
4325 block = BLOCK_CHAIN (block);
4328 return n_blocks;
4331 /* Return a vector containing all the blocks rooted at BLOCK. The
4332 number of elements in the vector is stored in N_BLOCKS_P. The
4333 vector is dynamically allocated; it is the caller's responsibility
4334 to call `free' on the pointer returned. */
4336 static tree *
4337 get_block_vector (tree block, int *n_blocks_p)
4339 tree *block_vector;
4341 *n_blocks_p = all_blocks (block, NULL);
4342 block_vector = XNEWVEC (tree, *n_blocks_p);
4343 all_blocks (block, block_vector);
4345 return block_vector;
4348 static GTY(()) int next_block_index = 2;
4350 /* Set BLOCK_NUMBER for all the blocks in FN. */
4352 void
4353 number_blocks (tree fn)
4355 int i;
4356 int n_blocks;
4357 tree *block_vector;
4359 /* For SDB and XCOFF debugging output, we start numbering the blocks
4360 from 1 within each function, rather than keeping a running
4361 count. */
4362 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
4363 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
4364 next_block_index = 1;
4365 #endif
4367 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
4369 /* The top-level BLOCK isn't numbered at all. */
4370 for (i = 1; i < n_blocks; ++i)
4371 /* We number the blocks from two. */
4372 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
4374 free (block_vector);
4376 return;
4379 /* If VAR is present in a subblock of BLOCK, return the subblock. */
4381 DEBUG_FUNCTION tree
4382 debug_find_var_in_block_tree (tree var, tree block)
4384 tree t;
4386 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
4387 if (t == var)
4388 return block;
4390 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
4392 tree ret = debug_find_var_in_block_tree (var, t);
4393 if (ret)
4394 return ret;
4397 return NULL_TREE;
4400 /* Keep track of whether we're in a dummy function context. If we are,
4401 we don't want to invoke the set_current_function hook, because we'll
4402 get into trouble if the hook calls target_reinit () recursively or
4403 when the initial initialization is not yet complete. */
4405 static bool in_dummy_function;
4407 /* Invoke the target hook when setting cfun. Update the optimization options
4408 if the function uses different options than the default. */
4410 static void
4411 invoke_set_current_function_hook (tree fndecl)
4413 if (!in_dummy_function)
4415 tree opts = ((fndecl)
4416 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
4417 : optimization_default_node);
4419 if (!opts)
4420 opts = optimization_default_node;
4422 /* Change optimization options if needed. */
4423 if (optimization_current_node != opts)
4425 optimization_current_node = opts;
4426 cl_optimization_restore (&global_options, TREE_OPTIMIZATION (opts));
4429 targetm.set_current_function (fndecl);
4430 this_fn_optabs = this_target_optabs;
4432 if (opts != optimization_default_node)
4434 init_tree_optimization_optabs (opts);
4435 if (TREE_OPTIMIZATION_OPTABS (opts))
4436 this_fn_optabs = (struct target_optabs *)
4437 TREE_OPTIMIZATION_OPTABS (opts);
4442 /* cfun should never be set directly; use this function. */
4444 void
4445 set_cfun (struct function *new_cfun)
4447 if (cfun != new_cfun)
4449 cfun = new_cfun;
4450 invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
4454 /* Initialized with NOGC, making this poisonous to the garbage collector. */
4456 static vec<function_p> cfun_stack;
4458 /* Push the current cfun onto the stack, and set cfun to new_cfun. Also set
4459 current_function_decl accordingly. */
4461 void
4462 push_cfun (struct function *new_cfun)
4464 gcc_assert ((!cfun && !current_function_decl)
4465 || (cfun && current_function_decl == cfun->decl));
4466 cfun_stack.safe_push (cfun);
4467 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
4468 set_cfun (new_cfun);
4471 /* Pop cfun from the stack. Also set current_function_decl accordingly. */
4473 void
4474 pop_cfun (void)
4476 struct function *new_cfun = cfun_stack.pop ();
4477 /* When in_dummy_function, we do have a cfun but current_function_decl is
4478 NULL. We also allow pushing NULL cfun and subsequently changing
4479 current_function_decl to something else and have both restored by
4480 pop_cfun. */
4481 gcc_checking_assert (in_dummy_function
4482 || !cfun
4483 || current_function_decl == cfun->decl);
4484 set_cfun (new_cfun);
4485 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
4488 /* Return value of funcdef and increase it. */
4490 get_next_funcdef_no (void)
4492 return funcdef_no++;
4495 /* Return value of funcdef. */
4497 get_last_funcdef_no (void)
4499 return funcdef_no;
4502 /* Allocate a function structure for FNDECL and set its contents
4503 to the defaults. Set cfun to the newly-allocated object.
4504 Some of the helper functions invoked during initialization assume
4505 that cfun has already been set. Therefore, assign the new object
4506 directly into cfun and invoke the back end hook explicitly at the
4507 very end, rather than initializing a temporary and calling set_cfun
4508 on it.
4510 ABSTRACT_P is true if this is a function that will never be seen by
4511 the middle-end. Such functions are front-end concepts (like C++
4512 function templates) that do not correspond directly to functions
4513 placed in object files. */
4515 void
4516 allocate_struct_function (tree fndecl, bool abstract_p)
4518 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
4520 cfun = ggc_alloc_cleared_function ();
4522 init_eh_for_function ();
4524 if (init_machine_status)
4525 cfun->machine = (*init_machine_status) ();
4527 #ifdef OVERRIDE_ABI_FORMAT
4528 OVERRIDE_ABI_FORMAT (fndecl);
4529 #endif
4531 if (fndecl != NULL_TREE)
4533 DECL_STRUCT_FUNCTION (fndecl) = cfun;
4534 cfun->decl = fndecl;
4535 current_function_funcdef_no = get_next_funcdef_no ();
4538 invoke_set_current_function_hook (fndecl);
4540 if (fndecl != NULL_TREE)
4542 tree result = DECL_RESULT (fndecl);
4543 if (!abstract_p && aggregate_value_p (result, fndecl))
4545 #ifdef PCC_STATIC_STRUCT_RETURN
4546 cfun->returns_pcc_struct = 1;
4547 #endif
4548 cfun->returns_struct = 1;
4551 cfun->stdarg = stdarg_p (fntype);
4553 /* Assume all registers in stdarg functions need to be saved. */
4554 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
4555 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
4557 /* ??? This could be set on a per-function basis by the front-end
4558 but is this worth the hassle? */
4559 cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
4563 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4564 instead of just setting it. */
4566 void
4567 push_struct_function (tree fndecl)
4569 /* When in_dummy_function we might be in the middle of a pop_cfun and
4570 current_function_decl and cfun may not match. */
4571 gcc_assert (in_dummy_function
4572 || (!cfun && !current_function_decl)
4573 || (cfun && current_function_decl == cfun->decl));
4574 cfun_stack.safe_push (cfun);
4575 current_function_decl = fndecl;
4576 allocate_struct_function (fndecl, false);
4579 /* Reset crtl and other non-struct-function variables to defaults as
4580 appropriate for emitting rtl at the start of a function. */
4582 static void
4583 prepare_function_start (void)
4585 gcc_assert (!crtl->emit.x_last_insn);
4586 init_temp_slots ();
4587 init_emit ();
4588 init_varasm_status ();
4589 init_expr ();
4590 default_rtl_profile ();
4592 if (flag_stack_usage_info)
4594 cfun->su = ggc_alloc_cleared_stack_usage ();
4595 cfun->su->static_stack_size = -1;
4598 cse_not_expected = ! optimize;
4600 /* Caller save not needed yet. */
4601 caller_save_needed = 0;
4603 /* We haven't done register allocation yet. */
4604 reg_renumber = 0;
4606 /* Indicate that we have not instantiated virtual registers yet. */
4607 virtuals_instantiated = 0;
4609 /* Indicate that we want CONCATs now. */
4610 generating_concat_p = 1;
4612 /* Indicate we have no need of a frame pointer yet. */
4613 frame_pointer_needed = 0;
4616 /* Initialize the rtl expansion mechanism so that we can do simple things
4617 like generate sequences. This is used to provide a context during global
4618 initialization of some passes. You must call expand_dummy_function_end
4619 to exit this context. */
4621 void
4622 init_dummy_function_start (void)
4624 gcc_assert (!in_dummy_function);
4625 in_dummy_function = true;
4626 push_struct_function (NULL_TREE);
4627 prepare_function_start ();
4630 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4631 and initialize static variables for generating RTL for the statements
4632 of the function. */
4634 void
4635 init_function_start (tree subr)
4637 if (subr && DECL_STRUCT_FUNCTION (subr))
4638 set_cfun (DECL_STRUCT_FUNCTION (subr));
4639 else
4640 allocate_struct_function (subr, false);
4641 prepare_function_start ();
4642 decide_function_section (subr);
4644 /* Warn if this value is an aggregate type,
4645 regardless of which calling convention we are using for it. */
4646 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
4647 warning (OPT_Waggregate_return, "function returns an aggregate");
4651 void
4652 expand_main_function (void)
4654 #if (defined(INVOKE__main) \
4655 || (!defined(HAS_INIT_SECTION) \
4656 && !defined(INIT_SECTION_ASM_OP) \
4657 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
4658 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
4659 #endif
4662 /* Expand code to initialize the stack_protect_guard. This is invoked at
4663 the beginning of a function to be protected. */
4665 #ifndef HAVE_stack_protect_set
4666 # define HAVE_stack_protect_set 0
4667 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
4668 #endif
4670 void
4671 stack_protect_prologue (void)
4673 tree guard_decl = targetm.stack_protect_guard ();
4674 rtx x, y;
4676 x = expand_normal (crtl->stack_protect_guard);
4677 y = expand_normal (guard_decl);
4679 /* Allow the target to copy from Y to X without leaking Y into a
4680 register. */
4681 if (HAVE_stack_protect_set)
4683 rtx insn = gen_stack_protect_set (x, y);
4684 if (insn)
4686 emit_insn (insn);
4687 return;
4691 /* Otherwise do a straight move. */
4692 emit_move_insn (x, y);
4695 /* Expand code to verify the stack_protect_guard. This is invoked at
4696 the end of a function to be protected. */
4698 #ifndef HAVE_stack_protect_test
4699 # define HAVE_stack_protect_test 0
4700 # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
4701 #endif
4703 void
4704 stack_protect_epilogue (void)
4706 tree guard_decl = targetm.stack_protect_guard ();
4707 rtx label = gen_label_rtx ();
4708 rtx x, y, tmp;
4710 x = expand_normal (crtl->stack_protect_guard);
4711 y = expand_normal (guard_decl);
4713 /* Allow the target to compare Y with X without leaking either into
4714 a register. */
4715 switch (HAVE_stack_protect_test != 0)
4717 case 1:
4718 tmp = gen_stack_protect_test (x, y, label);
4719 if (tmp)
4721 emit_insn (tmp);
4722 break;
4724 /* FALLTHRU */
4726 default:
4727 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4728 break;
4731 /* The noreturn predictor has been moved to the tree level. The rtl-level
4732 predictors estimate this branch about 20%, which isn't enough to get
4733 things moved out of line. Since this is the only extant case of adding
4734 a noreturn function at the rtl level, it doesn't seem worth doing ought
4735 except adding the prediction by hand. */
4736 tmp = get_last_insn ();
4737 if (JUMP_P (tmp))
4738 predict_insn_def (tmp, PRED_NORETURN, TAKEN);
4740 expand_call (targetm.stack_protect_fail (), NULL_RTX, /*ignore=*/true);
4741 free_temp_slots ();
4742 emit_label (label);
4745 /* Start the RTL for a new function, and set variables used for
4746 emitting RTL.
4747 SUBR is the FUNCTION_DECL node.
4748 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4749 the function's parameters, which must be run at any return statement. */
4751 void
4752 expand_function_start (tree subr)
4754 /* Make sure volatile mem refs aren't considered
4755 valid operands of arithmetic insns. */
4756 init_recog_no_volatile ();
4758 crtl->profile
4759 = (profile_flag
4760 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4762 crtl->limit_stack
4763 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4765 /* Make the label for return statements to jump to. Do not special
4766 case machines with special return instructions -- they will be
4767 handled later during jump, ifcvt, or epilogue creation. */
4768 return_label = gen_label_rtx ();
4770 /* Initialize rtx used to return the value. */
4771 /* Do this before assign_parms so that we copy the struct value address
4772 before any library calls that assign parms might generate. */
4774 /* Decide whether to return the value in memory or in a register. */
4775 if (aggregate_value_p (DECL_RESULT (subr), subr))
4777 /* Returning something that won't go in a register. */
4778 rtx value_address = 0;
4780 #ifdef PCC_STATIC_STRUCT_RETURN
4781 if (cfun->returns_pcc_struct)
4783 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4784 value_address = assemble_static_space (size);
4786 else
4787 #endif
4789 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
4790 /* Expect to be passed the address of a place to store the value.
4791 If it is passed as an argument, assign_parms will take care of
4792 it. */
4793 if (sv)
4795 value_address = gen_reg_rtx (Pmode);
4796 emit_move_insn (value_address, sv);
4799 if (value_address)
4801 rtx x = value_address;
4802 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4804 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4805 set_mem_attributes (x, DECL_RESULT (subr), 1);
4807 SET_DECL_RTL (DECL_RESULT (subr), x);
4810 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4811 /* If return mode is void, this decl rtl should not be used. */
4812 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
4813 else
4815 /* Compute the return values into a pseudo reg, which we will copy
4816 into the true return register after the cleanups are done. */
4817 tree return_type = TREE_TYPE (DECL_RESULT (subr));
4818 if (TYPE_MODE (return_type) != BLKmode
4819 && targetm.calls.return_in_msb (return_type))
4820 /* expand_function_end will insert the appropriate padding in
4821 this case. Use the return value's natural (unpadded) mode
4822 within the function proper. */
4823 SET_DECL_RTL (DECL_RESULT (subr),
4824 gen_reg_rtx (TYPE_MODE (return_type)));
4825 else
4827 /* In order to figure out what mode to use for the pseudo, we
4828 figure out what the mode of the eventual return register will
4829 actually be, and use that. */
4830 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
4832 /* Structures that are returned in registers are not
4833 aggregate_value_p, so we may see a PARALLEL or a REG. */
4834 if (REG_P (hard_reg))
4835 SET_DECL_RTL (DECL_RESULT (subr),
4836 gen_reg_rtx (GET_MODE (hard_reg)));
4837 else
4839 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4840 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4844 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4845 result to the real return register(s). */
4846 DECL_REGISTER (DECL_RESULT (subr)) = 1;
4849 /* Initialize rtx for parameters and local variables.
4850 In some cases this requires emitting insns. */
4851 assign_parms (subr);
4853 /* If function gets a static chain arg, store it. */
4854 if (cfun->static_chain_decl)
4856 tree parm = cfun->static_chain_decl;
4857 rtx local, chain, insn;
4859 local = gen_reg_rtx (Pmode);
4860 chain = targetm.calls.static_chain (current_function_decl, true);
4862 set_decl_incoming_rtl (parm, chain, false);
4863 SET_DECL_RTL (parm, local);
4864 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4866 insn = emit_move_insn (local, chain);
4868 /* Mark the register as eliminable, similar to parameters. */
4869 if (MEM_P (chain)
4870 && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0)))
4871 set_dst_reg_note (insn, REG_EQUIV, chain, local);
4874 /* If the function receives a non-local goto, then store the
4875 bits we need to restore the frame pointer. */
4876 if (cfun->nonlocal_goto_save_area)
4878 tree t_save;
4879 rtx r_save;
4881 tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
4882 gcc_assert (DECL_RTL_SET_P (var));
4884 t_save = build4 (ARRAY_REF,
4885 TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
4886 cfun->nonlocal_goto_save_area,
4887 integer_zero_node, NULL_TREE, NULL_TREE);
4888 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
4889 gcc_assert (GET_MODE (r_save) == Pmode);
4891 emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ());
4892 update_nonlocal_goto_save_area ();
4895 /* The following was moved from init_function_start.
4896 The move is supposed to make sdb output more accurate. */
4897 /* Indicate the beginning of the function body,
4898 as opposed to parm setup. */
4899 emit_note (NOTE_INSN_FUNCTION_BEG);
4901 gcc_assert (NOTE_P (get_last_insn ()));
4903 parm_birth_insn = get_last_insn ();
4905 if (crtl->profile)
4907 #ifdef PROFILE_HOOK
4908 PROFILE_HOOK (current_function_funcdef_no);
4909 #endif
4912 /* If we are doing generic stack checking, the probe should go here. */
4913 if (flag_stack_check == GENERIC_STACK_CHECK)
4914 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
4917 /* Undo the effects of init_dummy_function_start. */
4918 void
4919 expand_dummy_function_end (void)
4921 gcc_assert (in_dummy_function);
4923 /* End any sequences that failed to be closed due to syntax errors. */
4924 while (in_sequence_p ())
4925 end_sequence ();
4927 /* Outside function body, can't compute type's actual size
4928 until next function's body starts. */
4930 free_after_parsing (cfun);
4931 free_after_compilation (cfun);
4932 pop_cfun ();
4933 in_dummy_function = false;
4936 /* Call DOIT for each hard register used as a return value from
4937 the current function. */
4939 void
4940 diddle_return_value (void (*doit) (rtx, void *), void *arg)
4942 rtx outgoing = crtl->return_rtx;
4944 if (! outgoing)
4945 return;
4947 if (REG_P (outgoing))
4948 (*doit) (outgoing, arg);
4949 else if (GET_CODE (outgoing) == PARALLEL)
4951 int i;
4953 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4955 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4957 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
4958 (*doit) (x, arg);
4963 static void
4964 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4966 emit_clobber (reg);
4969 void
4970 clobber_return_register (void)
4972 diddle_return_value (do_clobber_return_reg, NULL);
4974 /* In case we do use pseudo to return value, clobber it too. */
4975 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4977 tree decl_result = DECL_RESULT (current_function_decl);
4978 rtx decl_rtl = DECL_RTL (decl_result);
4979 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4981 do_clobber_return_reg (decl_rtl, NULL);
4986 static void
4987 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4989 emit_use (reg);
4992 static void
4993 use_return_register (void)
4995 diddle_return_value (do_use_return_reg, NULL);
4998 /* Possibly warn about unused parameters. */
4999 void
5000 do_warn_unused_parameter (tree fn)
5002 tree decl;
5004 for (decl = DECL_ARGUMENTS (fn);
5005 decl; decl = DECL_CHAIN (decl))
5006 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
5007 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl)
5008 && !TREE_NO_WARNING (decl))
5009 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
5012 /* Generate RTL for the end of the current function. */
5014 void
5015 expand_function_end (void)
5017 rtx clobber_after;
5019 /* If arg_pointer_save_area was referenced only from a nested
5020 function, we will not have initialized it yet. Do that now. */
5021 if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
5022 get_arg_pointer_save_area ();
5024 /* If we are doing generic stack checking and this function makes calls,
5025 do a stack probe at the start of the function to ensure we have enough
5026 space for another stack frame. */
5027 if (flag_stack_check == GENERIC_STACK_CHECK)
5029 rtx insn, seq;
5031 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5032 if (CALL_P (insn))
5034 rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE);
5035 start_sequence ();
5036 if (STACK_CHECK_MOVING_SP)
5037 anti_adjust_stack_and_probe (max_frame_size, true);
5038 else
5039 probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size);
5040 seq = get_insns ();
5041 end_sequence ();
5042 set_insn_locations (seq, prologue_location);
5043 emit_insn_before (seq, stack_check_probe_note);
5044 break;
5048 /* End any sequences that failed to be closed due to syntax errors. */
5049 while (in_sequence_p ())
5050 end_sequence ();
5052 clear_pending_stack_adjust ();
5053 do_pending_stack_adjust ();
5055 /* Output a linenumber for the end of the function.
5056 SDB depends on this. */
5057 set_curr_insn_location (input_location);
5059 /* Before the return label (if any), clobber the return
5060 registers so that they are not propagated live to the rest of
5061 the function. This can only happen with functions that drop
5062 through; if there had been a return statement, there would
5063 have either been a return rtx, or a jump to the return label.
5065 We delay actual code generation after the current_function_value_rtx
5066 is computed. */
5067 clobber_after = get_last_insn ();
5069 /* Output the label for the actual return from the function. */
5070 emit_label (return_label);
5072 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
5074 /* Let except.c know where it should emit the call to unregister
5075 the function context for sjlj exceptions. */
5076 if (flag_exceptions)
5077 sjlj_emit_function_exit_after (get_last_insn ());
5079 else
5081 /* We want to ensure that instructions that may trap are not
5082 moved into the epilogue by scheduling, because we don't
5083 always emit unwind information for the epilogue. */
5084 if (cfun->can_throw_non_call_exceptions)
5085 emit_insn (gen_blockage ());
5088 /* If this is an implementation of throw, do what's necessary to
5089 communicate between __builtin_eh_return and the epilogue. */
5090 expand_eh_return ();
5092 /* If scalar return value was computed in a pseudo-reg, or was a named
5093 return value that got dumped to the stack, copy that to the hard
5094 return register. */
5095 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5097 tree decl_result = DECL_RESULT (current_function_decl);
5098 rtx decl_rtl = DECL_RTL (decl_result);
5100 if (REG_P (decl_rtl)
5101 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5102 : DECL_REGISTER (decl_result))
5104 rtx real_decl_rtl = crtl->return_rtx;
5106 /* This should be set in assign_parms. */
5107 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
5109 /* If this is a BLKmode structure being returned in registers,
5110 then use the mode computed in expand_return. Note that if
5111 decl_rtl is memory, then its mode may have been changed,
5112 but that crtl->return_rtx has not. */
5113 if (GET_MODE (real_decl_rtl) == BLKmode)
5114 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
5116 /* If a non-BLKmode return value should be padded at the least
5117 significant end of the register, shift it left by the appropriate
5118 amount. BLKmode results are handled using the group load/store
5119 machinery. */
5120 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
5121 && REG_P (real_decl_rtl)
5122 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
5124 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
5125 REGNO (real_decl_rtl)),
5126 decl_rtl);
5127 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
5129 /* If a named return value dumped decl_return to memory, then
5130 we may need to re-do the PROMOTE_MODE signed/unsigned
5131 extension. */
5132 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
5134 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
5135 promote_function_mode (TREE_TYPE (decl_result),
5136 GET_MODE (decl_rtl), &unsignedp,
5137 TREE_TYPE (current_function_decl), 1);
5139 convert_move (real_decl_rtl, decl_rtl, unsignedp);
5141 else if (GET_CODE (real_decl_rtl) == PARALLEL)
5143 /* If expand_function_start has created a PARALLEL for decl_rtl,
5144 move the result to the real return registers. Otherwise, do
5145 a group load from decl_rtl for a named return. */
5146 if (GET_CODE (decl_rtl) == PARALLEL)
5147 emit_group_move (real_decl_rtl, decl_rtl);
5148 else
5149 emit_group_load (real_decl_rtl, decl_rtl,
5150 TREE_TYPE (decl_result),
5151 int_size_in_bytes (TREE_TYPE (decl_result)));
5153 /* In the case of complex integer modes smaller than a word, we'll
5154 need to generate some non-trivial bitfield insertions. Do that
5155 on a pseudo and not the hard register. */
5156 else if (GET_CODE (decl_rtl) == CONCAT
5157 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
5158 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
5160 int old_generating_concat_p;
5161 rtx tmp;
5163 old_generating_concat_p = generating_concat_p;
5164 generating_concat_p = 0;
5165 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
5166 generating_concat_p = old_generating_concat_p;
5168 emit_move_insn (tmp, decl_rtl);
5169 emit_move_insn (real_decl_rtl, tmp);
5171 else
5172 emit_move_insn (real_decl_rtl, decl_rtl);
5176 /* If returning a structure, arrange to return the address of the value
5177 in a place where debuggers expect to find it.
5179 If returning a structure PCC style,
5180 the caller also depends on this value.
5181 And cfun->returns_pcc_struct is not necessarily set. */
5182 if (cfun->returns_struct
5183 || cfun->returns_pcc_struct)
5185 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
5186 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5187 rtx outgoing;
5189 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
5190 type = TREE_TYPE (type);
5191 else
5192 value_address = XEXP (value_address, 0);
5194 outgoing = targetm.calls.function_value (build_pointer_type (type),
5195 current_function_decl, true);
5197 /* Mark this as a function return value so integrate will delete the
5198 assignment and USE below when inlining this function. */
5199 REG_FUNCTION_VALUE_P (outgoing) = 1;
5201 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5202 value_address = convert_memory_address (GET_MODE (outgoing),
5203 value_address);
5205 emit_move_insn (outgoing, value_address);
5207 /* Show return register used to hold result (in this case the address
5208 of the result. */
5209 crtl->return_rtx = outgoing;
5212 /* Emit the actual code to clobber return register. */
5214 rtx seq;
5216 start_sequence ();
5217 clobber_return_register ();
5218 seq = get_insns ();
5219 end_sequence ();
5221 emit_insn_after (seq, clobber_after);
5224 /* Output the label for the naked return from the function. */
5225 if (naked_return_label)
5226 emit_label (naked_return_label);
5228 /* @@@ This is a kludge. We want to ensure that instructions that
5229 may trap are not moved into the epilogue by scheduling, because
5230 we don't always emit unwind information for the epilogue. */
5231 if (cfun->can_throw_non_call_exceptions
5232 && targetm_common.except_unwind_info (&global_options) != UI_SJLJ)
5233 emit_insn (gen_blockage ());
5235 /* If stack protection is enabled for this function, check the guard. */
5236 if (crtl->stack_protect_guard)
5237 stack_protect_epilogue ();
5239 /* If we had calls to alloca, and this machine needs
5240 an accurate stack pointer to exit the function,
5241 insert some code to save and restore the stack pointer. */
5242 if (! EXIT_IGNORE_STACK
5243 && cfun->calls_alloca)
5245 rtx tem = 0, seq;
5247 start_sequence ();
5248 emit_stack_save (SAVE_FUNCTION, &tem);
5249 seq = get_insns ();
5250 end_sequence ();
5251 emit_insn_before (seq, parm_birth_insn);
5253 emit_stack_restore (SAVE_FUNCTION, tem);
5256 /* ??? This should no longer be necessary since stupid is no longer with
5257 us, but there are some parts of the compiler (eg reload_combine, and
5258 sh mach_dep_reorg) that still try and compute their own lifetime info
5259 instead of using the general framework. */
5260 use_return_register ();
5264 get_arg_pointer_save_area (void)
5266 rtx ret = arg_pointer_save_area;
5268 if (! ret)
5270 ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5271 arg_pointer_save_area = ret;
5274 if (! crtl->arg_pointer_save_area_init)
5276 rtx seq;
5278 /* Save the arg pointer at the beginning of the function. The
5279 generated stack slot may not be a valid memory address, so we
5280 have to check it and fix it if necessary. */
5281 start_sequence ();
5282 emit_move_insn (validize_mem (ret),
5283 crtl->args.internal_arg_pointer);
5284 seq = get_insns ();
5285 end_sequence ();
5287 push_topmost_sequence ();
5288 emit_insn_after (seq, entry_of_function ());
5289 pop_topmost_sequence ();
5291 crtl->arg_pointer_save_area_init = true;
5294 return ret;
5297 /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5298 for the first time. */
5300 static void
5301 record_insns (rtx insns, rtx end, htab_t *hashp)
5303 rtx tmp;
5304 htab_t hash = *hashp;
5306 if (hash == NULL)
5307 *hashp = hash
5308 = htab_create_ggc (17, htab_hash_pointer, htab_eq_pointer, NULL);
5310 for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp))
5312 void **slot = htab_find_slot (hash, tmp, INSERT);
5313 gcc_assert (*slot == NULL);
5314 *slot = tmp;
5318 /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5319 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5320 insn, then record COPY as well. */
5322 void
5323 maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
5325 htab_t hash;
5326 void **slot;
5328 hash = epilogue_insn_hash;
5329 if (!hash || !htab_find (hash, insn))
5331 hash = prologue_insn_hash;
5332 if (!hash || !htab_find (hash, insn))
5333 return;
5336 slot = htab_find_slot (hash, copy, INSERT);
5337 gcc_assert (*slot == NULL);
5338 *slot = copy;
5341 /* Set the location of the insn chain starting at INSN to LOC. */
5342 static void
5343 set_insn_locations (rtx insn, int loc)
5345 while (insn != NULL_RTX)
5347 if (INSN_P (insn))
5348 INSN_LOCATION (insn) = loc;
5349 insn = NEXT_INSN (insn);
5353 /* Determine if any INSNs in HASH are, or are part of, INSN. Because
5354 we can be running after reorg, SEQUENCE rtl is possible. */
5356 static bool
5357 contains (const_rtx insn, htab_t hash)
5359 if (hash == NULL)
5360 return false;
5362 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
5364 int i;
5365 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
5366 if (htab_find (hash, XVECEXP (PATTERN (insn), 0, i)))
5367 return true;
5368 return false;
5371 return htab_find (hash, insn) != NULL;
5375 prologue_epilogue_contains (const_rtx insn)
5377 if (contains (insn, prologue_insn_hash))
5378 return 1;
5379 if (contains (insn, epilogue_insn_hash))
5380 return 1;
5381 return 0;
5384 #ifdef HAVE_simple_return
5386 /* Return true if INSN requires the stack frame to be set up.
5387 PROLOGUE_USED contains the hard registers used in the function
5388 prologue. SET_UP_BY_PROLOGUE is the set of registers we expect the
5389 prologue to set up for the function. */
5390 bool
5391 requires_stack_frame_p (rtx insn, HARD_REG_SET prologue_used,
5392 HARD_REG_SET set_up_by_prologue)
5394 df_ref *df_rec;
5395 HARD_REG_SET hardregs;
5396 unsigned regno;
5398 if (CALL_P (insn))
5399 return !SIBLING_CALL_P (insn);
5401 /* We need a frame to get the unique CFA expected by the unwinder. */
5402 if (cfun->can_throw_non_call_exceptions && can_throw_internal (insn))
5403 return true;
5405 CLEAR_HARD_REG_SET (hardregs);
5406 for (df_rec = DF_INSN_DEFS (insn); *df_rec; df_rec++)
5408 rtx dreg = DF_REF_REG (*df_rec);
5410 if (!REG_P (dreg))
5411 continue;
5413 add_to_hard_reg_set (&hardregs, GET_MODE (dreg),
5414 REGNO (dreg));
5416 if (hard_reg_set_intersect_p (hardregs, prologue_used))
5417 return true;
5418 AND_COMPL_HARD_REG_SET (hardregs, call_used_reg_set);
5419 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
5420 if (TEST_HARD_REG_BIT (hardregs, regno)
5421 && df_regs_ever_live_p (regno))
5422 return true;
5424 for (df_rec = DF_INSN_USES (insn); *df_rec; df_rec++)
5426 rtx reg = DF_REF_REG (*df_rec);
5428 if (!REG_P (reg))
5429 continue;
5431 add_to_hard_reg_set (&hardregs, GET_MODE (reg),
5432 REGNO (reg));
5434 if (hard_reg_set_intersect_p (hardregs, set_up_by_prologue))
5435 return true;
5437 return false;
5440 /* See whether BB has a single successor that uses [REGNO, END_REGNO),
5441 and if BB is its only predecessor. Return that block if so,
5442 otherwise return null. */
5444 static basic_block
5445 next_block_for_reg (basic_block bb, int regno, int end_regno)
5447 edge e, live_edge;
5448 edge_iterator ei;
5449 bitmap live;
5450 int i;
5452 live_edge = NULL;
5453 FOR_EACH_EDGE (e, ei, bb->succs)
5455 live = df_get_live_in (e->dest);
5456 for (i = regno; i < end_regno; i++)
5457 if (REGNO_REG_SET_P (live, i))
5459 if (live_edge && live_edge != e)
5460 return NULL;
5461 live_edge = e;
5465 /* We can sometimes encounter dead code. Don't try to move it
5466 into the exit block. */
5467 if (!live_edge || live_edge->dest == EXIT_BLOCK_PTR)
5468 return NULL;
5470 /* Reject targets of abnormal edges. This is needed for correctness
5471 on ports like Alpha and MIPS, whose pic_offset_table_rtx can die on
5472 exception edges even though it is generally treated as call-saved
5473 for the majority of the compilation. Moving across abnormal edges
5474 isn't going to be interesting for shrink-wrap usage anyway. */
5475 if (live_edge->flags & EDGE_ABNORMAL)
5476 return NULL;
5478 if (EDGE_COUNT (live_edge->dest->preds) > 1)
5479 return NULL;
5481 return live_edge->dest;
5484 /* Try to move INSN from BB to a successor. Return true on success.
5485 USES and DEFS are the set of registers that are used and defined
5486 after INSN in BB. */
5488 static bool
5489 move_insn_for_shrink_wrap (basic_block bb, rtx insn,
5490 const HARD_REG_SET uses,
5491 const HARD_REG_SET defs)
5493 rtx set, src, dest;
5494 bitmap live_out, live_in, bb_uses, bb_defs;
5495 unsigned int i, dregno, end_dregno, sregno, end_sregno;
5496 basic_block next_block;
5498 /* Look for a simple register copy. */
5499 set = single_set (insn);
5500 if (!set)
5501 return false;
5502 src = SET_SRC (set);
5503 dest = SET_DEST (set);
5504 if (!REG_P (dest) || !REG_P (src))
5505 return false;
5507 /* Make sure that the source register isn't defined later in BB. */
5508 sregno = REGNO (src);
5509 end_sregno = END_REGNO (src);
5510 if (overlaps_hard_reg_set_p (defs, GET_MODE (src), sregno))
5511 return false;
5513 /* Make sure that the destination register isn't referenced later in BB. */
5514 dregno = REGNO (dest);
5515 end_dregno = END_REGNO (dest);
5516 if (overlaps_hard_reg_set_p (uses, GET_MODE (dest), dregno)
5517 || overlaps_hard_reg_set_p (defs, GET_MODE (dest), dregno))
5518 return false;
5520 /* See whether there is a successor block to which we could move INSN. */
5521 next_block = next_block_for_reg (bb, dregno, end_dregno);
5522 if (!next_block)
5523 return false;
5525 /* At this point we are committed to moving INSN, but let's try to
5526 move it as far as we can. */
5529 live_out = df_get_live_out (bb);
5530 live_in = df_get_live_in (next_block);
5531 bb = next_block;
5533 /* Check whether BB uses DEST or clobbers DEST. We need to add
5534 INSN to BB if so. Either way, DEST is no longer live on entry,
5535 except for any part that overlaps SRC (next loop). */
5536 bb_uses = &DF_LR_BB_INFO (bb)->use;
5537 bb_defs = &DF_LR_BB_INFO (bb)->def;
5538 if (df_live)
5540 for (i = dregno; i < end_dregno; i++)
5542 if (REGNO_REG_SET_P (bb_uses, i) || REGNO_REG_SET_P (bb_defs, i)
5543 || REGNO_REG_SET_P (&DF_LIVE_BB_INFO (bb)->gen, i))
5544 next_block = NULL;
5545 CLEAR_REGNO_REG_SET (live_out, i);
5546 CLEAR_REGNO_REG_SET (live_in, i);
5549 /* Check whether BB clobbers SRC. We need to add INSN to BB if so.
5550 Either way, SRC is now live on entry. */
5551 for (i = sregno; i < end_sregno; i++)
5553 if (REGNO_REG_SET_P (bb_defs, i)
5554 || REGNO_REG_SET_P (&DF_LIVE_BB_INFO (bb)->gen, i))
5555 next_block = NULL;
5556 SET_REGNO_REG_SET (live_out, i);
5557 SET_REGNO_REG_SET (live_in, i);
5560 else
5562 /* DF_LR_BB_INFO (bb)->def does not comprise the DF_REF_PARTIAL and
5563 DF_REF_CONDITIONAL defs. So if DF_LIVE doesn't exist, i.e.
5564 at -O1, just give up searching NEXT_BLOCK. */
5565 next_block = NULL;
5566 for (i = dregno; i < end_dregno; i++)
5568 CLEAR_REGNO_REG_SET (live_out, i);
5569 CLEAR_REGNO_REG_SET (live_in, i);
5572 for (i = sregno; i < end_sregno; i++)
5574 SET_REGNO_REG_SET (live_out, i);
5575 SET_REGNO_REG_SET (live_in, i);
5579 /* If we don't need to add the move to BB, look for a single
5580 successor block. */
5581 if (next_block)
5582 next_block = next_block_for_reg (next_block, dregno, end_dregno);
5584 while (next_block);
5586 /* BB now defines DEST. It only uses the parts of DEST that overlap SRC
5587 (next loop). */
5588 for (i = dregno; i < end_dregno; i++)
5590 CLEAR_REGNO_REG_SET (bb_uses, i);
5591 SET_REGNO_REG_SET (bb_defs, i);
5594 /* BB now uses SRC. */
5595 for (i = sregno; i < end_sregno; i++)
5596 SET_REGNO_REG_SET (bb_uses, i);
5598 emit_insn_after (PATTERN (insn), bb_note (bb));
5599 delete_insn (insn);
5600 return true;
5603 /* Look for register copies in the first block of the function, and move
5604 them down into successor blocks if the register is used only on one
5605 path. This exposes more opportunities for shrink-wrapping. These
5606 kinds of sets often occur when incoming argument registers are moved
5607 to call-saved registers because their values are live across one or
5608 more calls during the function. */
5610 static void
5611 prepare_shrink_wrap (basic_block entry_block)
5613 rtx insn, curr, x;
5614 HARD_REG_SET uses, defs;
5615 df_ref *ref;
5617 CLEAR_HARD_REG_SET (uses);
5618 CLEAR_HARD_REG_SET (defs);
5619 FOR_BB_INSNS_REVERSE_SAFE (entry_block, insn, curr)
5620 if (NONDEBUG_INSN_P (insn)
5621 && !move_insn_for_shrink_wrap (entry_block, insn, uses, defs))
5623 /* Add all defined registers to DEFs. */
5624 for (ref = DF_INSN_DEFS (insn); *ref; ref++)
5626 x = DF_REF_REG (*ref);
5627 if (REG_P (x) && HARD_REGISTER_P (x))
5628 SET_HARD_REG_BIT (defs, REGNO (x));
5631 /* Add all used registers to USESs. */
5632 for (ref = DF_INSN_USES (insn); *ref; ref++)
5634 x = DF_REF_REG (*ref);
5635 if (REG_P (x) && HARD_REGISTER_P (x))
5636 SET_HARD_REG_BIT (uses, REGNO (x));
5641 #endif
5643 #ifdef HAVE_return
5644 /* Insert use of return register before the end of BB. */
5646 static void
5647 emit_use_return_register_into_block (basic_block bb)
5649 rtx seq, insn;
5650 start_sequence ();
5651 use_return_register ();
5652 seq = get_insns ();
5653 end_sequence ();
5654 insn = BB_END (bb);
5655 #ifdef HAVE_cc0
5656 if (reg_mentioned_p (cc0_rtx, PATTERN (insn)))
5657 insn = prev_cc0_setter (insn);
5658 #endif
5659 emit_insn_before (seq, insn);
5663 /* Create a return pattern, either simple_return or return, depending on
5664 simple_p. */
5666 static rtx
5667 gen_return_pattern (bool simple_p)
5669 #ifdef HAVE_simple_return
5670 return simple_p ? gen_simple_return () : gen_return ();
5671 #else
5672 gcc_assert (!simple_p);
5673 return gen_return ();
5674 #endif
5677 /* Insert an appropriate return pattern at the end of block BB. This
5678 also means updating block_for_insn appropriately. SIMPLE_P is
5679 the same as in gen_return_pattern and passed to it. */
5681 static void
5682 emit_return_into_block (bool simple_p, basic_block bb)
5684 rtx jump, pat;
5685 jump = emit_jump_insn_after (gen_return_pattern (simple_p), BB_END (bb));
5686 pat = PATTERN (jump);
5687 if (GET_CODE (pat) == PARALLEL)
5688 pat = XVECEXP (pat, 0, 0);
5689 gcc_assert (ANY_RETURN_P (pat));
5690 JUMP_LABEL (jump) = pat;
5692 #endif
5694 /* Set JUMP_LABEL for a return insn. */
5696 void
5697 set_return_jump_label (rtx returnjump)
5699 rtx pat = PATTERN (returnjump);
5700 if (GET_CODE (pat) == PARALLEL)
5701 pat = XVECEXP (pat, 0, 0);
5702 if (ANY_RETURN_P (pat))
5703 JUMP_LABEL (returnjump) = pat;
5704 else
5705 JUMP_LABEL (returnjump) = ret_rtx;
5708 #ifdef HAVE_simple_return
5709 /* Create a copy of BB instructions and insert at BEFORE. Redirect
5710 preds of BB to COPY_BB if they don't appear in NEED_PROLOGUE. */
5711 static void
5712 dup_block_and_redirect (basic_block bb, basic_block copy_bb, rtx before,
5713 bitmap_head *need_prologue)
5715 edge_iterator ei;
5716 edge e;
5717 rtx insn = BB_END (bb);
5719 /* We know BB has a single successor, so there is no need to copy a
5720 simple jump at the end of BB. */
5721 if (simplejump_p (insn))
5722 insn = PREV_INSN (insn);
5724 start_sequence ();
5725 duplicate_insn_chain (BB_HEAD (bb), insn);
5726 if (dump_file)
5728 unsigned count = 0;
5729 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5730 if (active_insn_p (insn))
5731 ++count;
5732 fprintf (dump_file, "Duplicating bb %d to bb %d, %u active insns.\n",
5733 bb->index, copy_bb->index, count);
5735 insn = get_insns ();
5736 end_sequence ();
5737 emit_insn_before (insn, before);
5739 /* Redirect all the paths that need no prologue into copy_bb. */
5740 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
5741 if (!bitmap_bit_p (need_prologue, e->src->index))
5743 int freq = EDGE_FREQUENCY (e);
5744 copy_bb->count += e->count;
5745 copy_bb->frequency += EDGE_FREQUENCY (e);
5746 e->dest->count -= e->count;
5747 if (e->dest->count < 0)
5748 e->dest->count = 0;
5749 e->dest->frequency -= freq;
5750 if (e->dest->frequency < 0)
5751 e->dest->frequency = 0;
5752 redirect_edge_and_branch_force (e, copy_bb);
5753 continue;
5755 else
5756 ei_next (&ei);
5758 #endif
5760 #if defined (HAVE_return) || defined (HAVE_simple_return)
5761 /* Return true if there are any active insns between HEAD and TAIL. */
5762 static bool
5763 active_insn_between (rtx head, rtx tail)
5765 while (tail)
5767 if (active_insn_p (tail))
5768 return true;
5769 if (tail == head)
5770 return false;
5771 tail = PREV_INSN (tail);
5773 return false;
5776 /* LAST_BB is a block that exits, and empty of active instructions.
5777 Examine its predecessors for jumps that can be converted to
5778 (conditional) returns. */
5779 static vec<edge>
5780 convert_jumps_to_returns (basic_block last_bb, bool simple_p,
5781 vec<edge> unconverted ATTRIBUTE_UNUSED)
5783 int i;
5784 basic_block bb;
5785 rtx label;
5786 edge_iterator ei;
5787 edge e;
5788 vec<basic_block> src_bbs;
5790 src_bbs.create (EDGE_COUNT (last_bb->preds));
5791 FOR_EACH_EDGE (e, ei, last_bb->preds)
5792 if (e->src != ENTRY_BLOCK_PTR)
5793 src_bbs.quick_push (e->src);
5795 label = BB_HEAD (last_bb);
5797 FOR_EACH_VEC_ELT (src_bbs, i, bb)
5799 rtx jump = BB_END (bb);
5801 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5802 continue;
5804 e = find_edge (bb, last_bb);
5806 /* If we have an unconditional jump, we can replace that
5807 with a simple return instruction. */
5808 if (simplejump_p (jump))
5810 /* The use of the return register might be present in the exit
5811 fallthru block. Either:
5812 - removing the use is safe, and we should remove the use in
5813 the exit fallthru block, or
5814 - removing the use is not safe, and we should add it here.
5815 For now, we conservatively choose the latter. Either of the
5816 2 helps in crossjumping. */
5817 emit_use_return_register_into_block (bb);
5819 emit_return_into_block (simple_p, bb);
5820 delete_insn (jump);
5823 /* If we have a conditional jump branching to the last
5824 block, we can try to replace that with a conditional
5825 return instruction. */
5826 else if (condjump_p (jump))
5828 rtx dest;
5830 if (simple_p)
5831 dest = simple_return_rtx;
5832 else
5833 dest = ret_rtx;
5834 if (!redirect_jump (jump, dest, 0))
5836 #ifdef HAVE_simple_return
5837 if (simple_p)
5839 if (dump_file)
5840 fprintf (dump_file,
5841 "Failed to redirect bb %d branch.\n", bb->index);
5842 unconverted.safe_push (e);
5844 #endif
5845 continue;
5848 /* See comment in simplejump_p case above. */
5849 emit_use_return_register_into_block (bb);
5851 /* If this block has only one successor, it both jumps
5852 and falls through to the fallthru block, so we can't
5853 delete the edge. */
5854 if (single_succ_p (bb))
5855 continue;
5857 else
5859 #ifdef HAVE_simple_return
5860 if (simple_p)
5862 if (dump_file)
5863 fprintf (dump_file,
5864 "Failed to redirect bb %d branch.\n", bb->index);
5865 unconverted.safe_push (e);
5867 #endif
5868 continue;
5871 /* Fix up the CFG for the successful change we just made. */
5872 redirect_edge_succ (e, EXIT_BLOCK_PTR);
5873 e->flags &= ~EDGE_CROSSING;
5875 src_bbs.release ();
5876 return unconverted;
5879 /* Emit a return insn for the exit fallthru block. */
5880 static basic_block
5881 emit_return_for_exit (edge exit_fallthru_edge, bool simple_p)
5883 basic_block last_bb = exit_fallthru_edge->src;
5885 if (JUMP_P (BB_END (last_bb)))
5887 last_bb = split_edge (exit_fallthru_edge);
5888 exit_fallthru_edge = single_succ_edge (last_bb);
5890 emit_barrier_after (BB_END (last_bb));
5891 emit_return_into_block (simple_p, last_bb);
5892 exit_fallthru_edge->flags &= ~EDGE_FALLTHRU;
5893 return last_bb;
5895 #endif
5898 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5899 this into place with notes indicating where the prologue ends and where
5900 the epilogue begins. Update the basic block information when possible.
5902 Notes on epilogue placement:
5903 There are several kinds of edges to the exit block:
5904 * a single fallthru edge from LAST_BB
5905 * possibly, edges from blocks containing sibcalls
5906 * possibly, fake edges from infinite loops
5908 The epilogue is always emitted on the fallthru edge from the last basic
5909 block in the function, LAST_BB, into the exit block.
5911 If LAST_BB is empty except for a label, it is the target of every
5912 other basic block in the function that ends in a return. If a
5913 target has a return or simple_return pattern (possibly with
5914 conditional variants), these basic blocks can be changed so that a
5915 return insn is emitted into them, and their target is adjusted to
5916 the real exit block.
5918 Notes on shrink wrapping: We implement a fairly conservative
5919 version of shrink-wrapping rather than the textbook one. We only
5920 generate a single prologue and a single epilogue. This is
5921 sufficient to catch a number of interesting cases involving early
5922 exits.
5924 First, we identify the blocks that require the prologue to occur before
5925 them. These are the ones that modify a call-saved register, or reference
5926 any of the stack or frame pointer registers. To simplify things, we then
5927 mark everything reachable from these blocks as also requiring a prologue.
5928 This takes care of loops automatically, and avoids the need to examine
5929 whether MEMs reference the frame, since it is sufficient to check for
5930 occurrences of the stack or frame pointer.
5932 We then compute the set of blocks for which the need for a prologue
5933 is anticipatable (borrowing terminology from the shrink-wrapping
5934 description in Muchnick's book). These are the blocks which either
5935 require a prologue themselves, or those that have only successors
5936 where the prologue is anticipatable. The prologue needs to be
5937 inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
5938 is not. For the moment, we ensure that only one such edge exists.
5940 The epilogue is placed as described above, but we make a
5941 distinction between inserting return and simple_return patterns
5942 when modifying other blocks that end in a return. Blocks that end
5943 in a sibcall omit the sibcall_epilogue if the block is not in
5944 ANTIC. */
5946 static void
5947 thread_prologue_and_epilogue_insns (void)
5949 bool inserted;
5950 #ifdef HAVE_simple_return
5951 vec<edge> unconverted_simple_returns = vNULL;
5952 bool nonempty_prologue;
5953 bitmap_head bb_flags;
5954 unsigned max_grow_size;
5955 #endif
5956 rtx returnjump;
5957 rtx seq ATTRIBUTE_UNUSED, epilogue_end ATTRIBUTE_UNUSED;
5958 rtx prologue_seq ATTRIBUTE_UNUSED, split_prologue_seq ATTRIBUTE_UNUSED;
5959 edge e, entry_edge, orig_entry_edge, exit_fallthru_edge;
5960 edge_iterator ei;
5962 df_analyze ();
5964 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
5966 inserted = false;
5967 seq = NULL_RTX;
5968 epilogue_end = NULL_RTX;
5969 returnjump = NULL_RTX;
5971 /* Can't deal with multiple successors of the entry block at the
5972 moment. Function should always have at least one entry
5973 point. */
5974 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR));
5975 entry_edge = single_succ_edge (ENTRY_BLOCK_PTR);
5976 orig_entry_edge = entry_edge;
5978 split_prologue_seq = NULL_RTX;
5979 if (flag_split_stack
5980 && (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl))
5981 == NULL))
5983 #ifndef HAVE_split_stack_prologue
5984 gcc_unreachable ();
5985 #else
5986 gcc_assert (HAVE_split_stack_prologue);
5988 start_sequence ();
5989 emit_insn (gen_split_stack_prologue ());
5990 split_prologue_seq = get_insns ();
5991 end_sequence ();
5993 record_insns (split_prologue_seq, NULL, &prologue_insn_hash);
5994 set_insn_locations (split_prologue_seq, prologue_location);
5995 #endif
5998 prologue_seq = NULL_RTX;
5999 #ifdef HAVE_prologue
6000 if (HAVE_prologue)
6002 start_sequence ();
6003 seq = gen_prologue ();
6004 emit_insn (seq);
6006 /* Insert an explicit USE for the frame pointer
6007 if the profiling is on and the frame pointer is required. */
6008 if (crtl->profile && frame_pointer_needed)
6009 emit_use (hard_frame_pointer_rtx);
6011 /* Retain a map of the prologue insns. */
6012 record_insns (seq, NULL, &prologue_insn_hash);
6013 emit_note (NOTE_INSN_PROLOGUE_END);
6015 /* Ensure that instructions are not moved into the prologue when
6016 profiling is on. The call to the profiling routine can be
6017 emitted within the live range of a call-clobbered register. */
6018 if (!targetm.profile_before_prologue () && crtl->profile)
6019 emit_insn (gen_blockage ());
6021 prologue_seq = get_insns ();
6022 end_sequence ();
6023 set_insn_locations (prologue_seq, prologue_location);
6025 #endif
6027 #ifdef HAVE_simple_return
6028 bitmap_initialize (&bb_flags, &bitmap_default_obstack);
6030 /* Try to perform a kind of shrink-wrapping, making sure the
6031 prologue/epilogue is emitted only around those parts of the
6032 function that require it. */
6034 nonempty_prologue = false;
6035 for (seq = prologue_seq; seq; seq = NEXT_INSN (seq))
6036 if (!NOTE_P (seq) || NOTE_KIND (seq) != NOTE_INSN_PROLOGUE_END)
6038 nonempty_prologue = true;
6039 break;
6042 if (flag_shrink_wrap && HAVE_simple_return
6043 && (targetm.profile_before_prologue () || !crtl->profile)
6044 && nonempty_prologue && !crtl->calls_eh_return)
6046 HARD_REG_SET prologue_clobbered, prologue_used, live_on_edge;
6047 struct hard_reg_set_container set_up_by_prologue;
6048 rtx p_insn;
6049 vec<basic_block> vec;
6050 basic_block bb;
6051 bitmap_head bb_antic_flags;
6052 bitmap_head bb_on_list;
6053 bitmap_head bb_tail;
6055 if (dump_file)
6056 fprintf (dump_file, "Attempting shrink-wrapping optimization.\n");
6058 /* Compute the registers set and used in the prologue. */
6059 CLEAR_HARD_REG_SET (prologue_clobbered);
6060 CLEAR_HARD_REG_SET (prologue_used);
6061 for (p_insn = prologue_seq; p_insn; p_insn = NEXT_INSN (p_insn))
6063 HARD_REG_SET this_used;
6064 if (!NONDEBUG_INSN_P (p_insn))
6065 continue;
6067 CLEAR_HARD_REG_SET (this_used);
6068 note_uses (&PATTERN (p_insn), record_hard_reg_uses,
6069 &this_used);
6070 AND_COMPL_HARD_REG_SET (this_used, prologue_clobbered);
6071 IOR_HARD_REG_SET (prologue_used, this_used);
6072 note_stores (PATTERN (p_insn), record_hard_reg_sets,
6073 &prologue_clobbered);
6076 prepare_shrink_wrap (entry_edge->dest);
6078 bitmap_initialize (&bb_antic_flags, &bitmap_default_obstack);
6079 bitmap_initialize (&bb_on_list, &bitmap_default_obstack);
6080 bitmap_initialize (&bb_tail, &bitmap_default_obstack);
6082 /* Find the set of basic blocks that require a stack frame,
6083 and blocks that are too big to be duplicated. */
6085 vec.create (n_basic_blocks);
6087 CLEAR_HARD_REG_SET (set_up_by_prologue.set);
6088 add_to_hard_reg_set (&set_up_by_prologue.set, Pmode,
6089 STACK_POINTER_REGNUM);
6090 add_to_hard_reg_set (&set_up_by_prologue.set, Pmode, ARG_POINTER_REGNUM);
6091 if (frame_pointer_needed)
6092 add_to_hard_reg_set (&set_up_by_prologue.set, Pmode,
6093 HARD_FRAME_POINTER_REGNUM);
6094 if (pic_offset_table_rtx)
6095 add_to_hard_reg_set (&set_up_by_prologue.set, Pmode,
6096 PIC_OFFSET_TABLE_REGNUM);
6097 if (crtl->drap_reg)
6098 add_to_hard_reg_set (&set_up_by_prologue.set,
6099 GET_MODE (crtl->drap_reg),
6100 REGNO (crtl->drap_reg));
6101 if (targetm.set_up_by_prologue)
6102 targetm.set_up_by_prologue (&set_up_by_prologue);
6104 /* We don't use a different max size depending on
6105 optimize_bb_for_speed_p because increasing shrink-wrapping
6106 opportunities by duplicating tail blocks can actually result
6107 in an overall decrease in code size. */
6108 max_grow_size = get_uncond_jump_length ();
6109 max_grow_size *= PARAM_VALUE (PARAM_MAX_GROW_COPY_BB_INSNS);
6111 FOR_EACH_BB (bb)
6113 rtx insn;
6114 unsigned size = 0;
6116 FOR_BB_INSNS (bb, insn)
6117 if (NONDEBUG_INSN_P (insn))
6119 if (requires_stack_frame_p (insn, prologue_used,
6120 set_up_by_prologue.set))
6122 if (bb == entry_edge->dest)
6123 goto fail_shrinkwrap;
6124 bitmap_set_bit (&bb_flags, bb->index);
6125 vec.quick_push (bb);
6126 break;
6128 else if (size <= max_grow_size)
6130 size += get_attr_min_length (insn);
6131 if (size > max_grow_size)
6132 bitmap_set_bit (&bb_on_list, bb->index);
6137 /* Blocks that really need a prologue, or are too big for tails. */
6138 bitmap_ior_into (&bb_on_list, &bb_flags);
6140 /* For every basic block that needs a prologue, mark all blocks
6141 reachable from it, so as to ensure they are also seen as
6142 requiring a prologue. */
6143 while (!vec.is_empty ())
6145 basic_block tmp_bb = vec.pop ();
6147 FOR_EACH_EDGE (e, ei, tmp_bb->succs)
6148 if (e->dest != EXIT_BLOCK_PTR
6149 && bitmap_set_bit (&bb_flags, e->dest->index))
6150 vec.quick_push (e->dest);
6153 /* Find the set of basic blocks that need no prologue, have a
6154 single successor, can be duplicated, meet a max size
6155 requirement, and go to the exit via like blocks. */
6156 vec.quick_push (EXIT_BLOCK_PTR);
6157 while (!vec.is_empty ())
6159 basic_block tmp_bb = vec.pop ();
6161 FOR_EACH_EDGE (e, ei, tmp_bb->preds)
6162 if (single_succ_p (e->src)
6163 && !bitmap_bit_p (&bb_on_list, e->src->index)
6164 && can_duplicate_block_p (e->src))
6166 edge pe;
6167 edge_iterator pei;
6169 /* If there is predecessor of e->src which doesn't
6170 need prologue and the edge is complex,
6171 we might not be able to redirect the branch
6172 to a copy of e->src. */
6173 FOR_EACH_EDGE (pe, pei, e->src->preds)
6174 if ((pe->flags & EDGE_COMPLEX) != 0
6175 && !bitmap_bit_p (&bb_flags, pe->src->index))
6176 break;
6177 if (pe == NULL && bitmap_set_bit (&bb_tail, e->src->index))
6178 vec.quick_push (e->src);
6182 /* Now walk backwards from every block that is marked as needing
6183 a prologue to compute the bb_antic_flags bitmap. Exclude
6184 tail blocks; They can be duplicated to be used on paths not
6185 needing a prologue. */
6186 bitmap_clear (&bb_on_list);
6187 bitmap_and_compl (&bb_antic_flags, &bb_flags, &bb_tail);
6188 FOR_EACH_BB (bb)
6190 if (!bitmap_bit_p (&bb_antic_flags, bb->index))
6191 continue;
6192 FOR_EACH_EDGE (e, ei, bb->preds)
6193 if (!bitmap_bit_p (&bb_antic_flags, e->src->index)
6194 && bitmap_set_bit (&bb_on_list, e->src->index))
6195 vec.quick_push (e->src);
6197 while (!vec.is_empty ())
6199 basic_block tmp_bb = vec.pop ();
6200 bool all_set = true;
6202 bitmap_clear_bit (&bb_on_list, tmp_bb->index);
6203 FOR_EACH_EDGE (e, ei, tmp_bb->succs)
6204 if (!bitmap_bit_p (&bb_antic_flags, e->dest->index))
6206 all_set = false;
6207 break;
6210 if (all_set)
6212 bitmap_set_bit (&bb_antic_flags, tmp_bb->index);
6213 FOR_EACH_EDGE (e, ei, tmp_bb->preds)
6214 if (!bitmap_bit_p (&bb_antic_flags, e->src->index)
6215 && bitmap_set_bit (&bb_on_list, e->src->index))
6216 vec.quick_push (e->src);
6219 /* Find exactly one edge that leads to a block in ANTIC from
6220 a block that isn't. */
6221 if (!bitmap_bit_p (&bb_antic_flags, entry_edge->dest->index))
6222 FOR_EACH_BB (bb)
6224 if (!bitmap_bit_p (&bb_antic_flags, bb->index))
6225 continue;
6226 FOR_EACH_EDGE (e, ei, bb->preds)
6227 if (!bitmap_bit_p (&bb_antic_flags, e->src->index))
6229 if (entry_edge != orig_entry_edge)
6231 entry_edge = orig_entry_edge;
6232 if (dump_file)
6233 fprintf (dump_file, "More than one candidate edge.\n");
6234 goto fail_shrinkwrap;
6236 if (dump_file)
6237 fprintf (dump_file, "Found candidate edge for "
6238 "shrink-wrapping, %d->%d.\n", e->src->index,
6239 e->dest->index);
6240 entry_edge = e;
6244 if (entry_edge != orig_entry_edge)
6246 /* Test whether the prologue is known to clobber any register
6247 (other than FP or SP) which are live on the edge. */
6248 CLEAR_HARD_REG_BIT (prologue_clobbered, STACK_POINTER_REGNUM);
6249 if (frame_pointer_needed)
6250 CLEAR_HARD_REG_BIT (prologue_clobbered, HARD_FRAME_POINTER_REGNUM);
6251 REG_SET_TO_HARD_REG_SET (live_on_edge,
6252 df_get_live_in (entry_edge->dest));
6253 if (hard_reg_set_intersect_p (live_on_edge, prologue_clobbered))
6255 entry_edge = orig_entry_edge;
6256 if (dump_file)
6257 fprintf (dump_file,
6258 "Shrink-wrapping aborted due to clobber.\n");
6261 if (entry_edge != orig_entry_edge)
6263 crtl->shrink_wrapped = true;
6264 if (dump_file)
6265 fprintf (dump_file, "Performing shrink-wrapping.\n");
6267 /* Find tail blocks reachable from both blocks needing a
6268 prologue and blocks not needing a prologue. */
6269 if (!bitmap_empty_p (&bb_tail))
6270 FOR_EACH_BB (bb)
6272 bool some_pro, some_no_pro;
6273 if (!bitmap_bit_p (&bb_tail, bb->index))
6274 continue;
6275 some_pro = some_no_pro = false;
6276 FOR_EACH_EDGE (e, ei, bb->preds)
6278 if (bitmap_bit_p (&bb_flags, e->src->index))
6279 some_pro = true;
6280 else
6281 some_no_pro = true;
6283 if (some_pro && some_no_pro)
6284 vec.quick_push (bb);
6285 else
6286 bitmap_clear_bit (&bb_tail, bb->index);
6288 /* Find the head of each tail. */
6289 while (!vec.is_empty ())
6291 basic_block tbb = vec.pop ();
6293 if (!bitmap_bit_p (&bb_tail, tbb->index))
6294 continue;
6296 while (single_succ_p (tbb))
6298 tbb = single_succ (tbb);
6299 bitmap_clear_bit (&bb_tail, tbb->index);
6302 /* Now duplicate the tails. */
6303 if (!bitmap_empty_p (&bb_tail))
6304 FOR_EACH_BB_REVERSE (bb)
6306 basic_block copy_bb, tbb;
6307 rtx insert_point;
6308 int eflags;
6310 if (!bitmap_clear_bit (&bb_tail, bb->index))
6311 continue;
6313 /* Create a copy of BB, instructions and all, for
6314 use on paths that don't need a prologue.
6315 Ideal placement of the copy is on a fall-thru edge
6316 or after a block that would jump to the copy. */
6317 FOR_EACH_EDGE (e, ei, bb->preds)
6318 if (!bitmap_bit_p (&bb_flags, e->src->index)
6319 && single_succ_p (e->src))
6320 break;
6321 if (e)
6323 /* Make sure we insert after any barriers. */
6324 rtx end = get_last_bb_insn (e->src);
6325 copy_bb = create_basic_block (NEXT_INSN (end),
6326 NULL_RTX, e->src);
6327 BB_COPY_PARTITION (copy_bb, e->src);
6329 else
6331 /* Otherwise put the copy at the end of the function. */
6332 copy_bb = create_basic_block (NULL_RTX, NULL_RTX,
6333 EXIT_BLOCK_PTR->prev_bb);
6334 BB_COPY_PARTITION (copy_bb, bb);
6337 insert_point = emit_note_after (NOTE_INSN_DELETED,
6338 BB_END (copy_bb));
6339 emit_barrier_after (BB_END (copy_bb));
6341 tbb = bb;
6342 while (1)
6344 dup_block_and_redirect (tbb, copy_bb, insert_point,
6345 &bb_flags);
6346 tbb = single_succ (tbb);
6347 if (tbb == EXIT_BLOCK_PTR)
6348 break;
6349 e = split_block (copy_bb, PREV_INSN (insert_point));
6350 copy_bb = e->dest;
6353 /* Quiet verify_flow_info by (ab)using EDGE_FAKE.
6354 We have yet to add a simple_return to the tails,
6355 as we'd like to first convert_jumps_to_returns in
6356 case the block is no longer used after that. */
6357 eflags = EDGE_FAKE;
6358 if (CALL_P (PREV_INSN (insert_point))
6359 && SIBLING_CALL_P (PREV_INSN (insert_point)))
6360 eflags = EDGE_SIBCALL | EDGE_ABNORMAL;
6361 make_single_succ_edge (copy_bb, EXIT_BLOCK_PTR, eflags);
6363 /* verify_flow_info doesn't like a note after a
6364 sibling call. */
6365 delete_insn (insert_point);
6366 if (bitmap_empty_p (&bb_tail))
6367 break;
6371 fail_shrinkwrap:
6372 bitmap_clear (&bb_tail);
6373 bitmap_clear (&bb_antic_flags);
6374 bitmap_clear (&bb_on_list);
6375 vec.release ();
6377 #endif
6379 if (split_prologue_seq != NULL_RTX)
6381 insert_insn_on_edge (split_prologue_seq, orig_entry_edge);
6382 inserted = true;
6384 if (prologue_seq != NULL_RTX)
6386 insert_insn_on_edge (prologue_seq, entry_edge);
6387 inserted = true;
6390 /* If the exit block has no non-fake predecessors, we don't need
6391 an epilogue. */
6392 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6393 if ((e->flags & EDGE_FAKE) == 0)
6394 break;
6395 if (e == NULL)
6396 goto epilogue_done;
6398 rtl_profile_for_bb (EXIT_BLOCK_PTR);
6400 exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR->preds);
6402 /* If we're allowed to generate a simple return instruction, then by
6403 definition we don't need a full epilogue. If the last basic
6404 block before the exit block does not contain active instructions,
6405 examine its predecessors and try to emit (conditional) return
6406 instructions. */
6407 #ifdef HAVE_simple_return
6408 if (entry_edge != orig_entry_edge)
6410 if (optimize)
6412 unsigned i, last;
6414 /* convert_jumps_to_returns may add to EXIT_BLOCK_PTR->preds
6415 (but won't remove). Stop at end of current preds. */
6416 last = EDGE_COUNT (EXIT_BLOCK_PTR->preds);
6417 for (i = 0; i < last; i++)
6419 e = EDGE_I (EXIT_BLOCK_PTR->preds, i);
6420 if (LABEL_P (BB_HEAD (e->src))
6421 && !bitmap_bit_p (&bb_flags, e->src->index)
6422 && !active_insn_between (BB_HEAD (e->src), BB_END (e->src)))
6423 unconverted_simple_returns
6424 = convert_jumps_to_returns (e->src, true,
6425 unconverted_simple_returns);
6429 if (exit_fallthru_edge != NULL
6430 && EDGE_COUNT (exit_fallthru_edge->src->preds) != 0
6431 && !bitmap_bit_p (&bb_flags, exit_fallthru_edge->src->index))
6433 basic_block last_bb;
6435 last_bb = emit_return_for_exit (exit_fallthru_edge, true);
6436 returnjump = BB_END (last_bb);
6437 exit_fallthru_edge = NULL;
6440 #endif
6441 #ifdef HAVE_return
6442 if (HAVE_return)
6444 if (exit_fallthru_edge == NULL)
6445 goto epilogue_done;
6447 if (optimize)
6449 basic_block last_bb = exit_fallthru_edge->src;
6451 if (LABEL_P (BB_HEAD (last_bb))
6452 && !active_insn_between (BB_HEAD (last_bb), BB_END (last_bb)))
6453 convert_jumps_to_returns (last_bb, false, vNULL);
6455 if (EDGE_COUNT (last_bb->preds) != 0
6456 && single_succ_p (last_bb))
6458 last_bb = emit_return_for_exit (exit_fallthru_edge, false);
6459 epilogue_end = returnjump = BB_END (last_bb);
6460 #ifdef HAVE_simple_return
6461 /* Emitting the return may add a basic block.
6462 Fix bb_flags for the added block. */
6463 if (last_bb != exit_fallthru_edge->src)
6464 bitmap_set_bit (&bb_flags, last_bb->index);
6465 #endif
6466 goto epilogue_done;
6470 #endif
6472 /* A small fib -- epilogue is not yet completed, but we wish to re-use
6473 this marker for the splits of EH_RETURN patterns, and nothing else
6474 uses the flag in the meantime. */
6475 epilogue_completed = 1;
6477 #ifdef HAVE_eh_return
6478 /* Find non-fallthru edges that end with EH_RETURN instructions. On
6479 some targets, these get split to a special version of the epilogue
6480 code. In order to be able to properly annotate these with unwind
6481 info, try to split them now. If we get a valid split, drop an
6482 EPILOGUE_BEG note and mark the insns as epilogue insns. */
6483 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6485 rtx prev, last, trial;
6487 if (e->flags & EDGE_FALLTHRU)
6488 continue;
6489 last = BB_END (e->src);
6490 if (!eh_returnjump_p (last))
6491 continue;
6493 prev = PREV_INSN (last);
6494 trial = try_split (PATTERN (last), last, 1);
6495 if (trial == last)
6496 continue;
6498 record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash);
6499 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
6501 #endif
6503 /* If nothing falls through into the exit block, we don't need an
6504 epilogue. */
6506 if (exit_fallthru_edge == NULL)
6507 goto epilogue_done;
6509 #ifdef HAVE_epilogue
6510 if (HAVE_epilogue)
6512 start_sequence ();
6513 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
6514 seq = gen_epilogue ();
6515 if (seq)
6516 emit_jump_insn (seq);
6518 /* Retain a map of the epilogue insns. */
6519 record_insns (seq, NULL, &epilogue_insn_hash);
6520 set_insn_locations (seq, epilogue_location);
6522 seq = get_insns ();
6523 returnjump = get_last_insn ();
6524 end_sequence ();
6526 insert_insn_on_edge (seq, exit_fallthru_edge);
6527 inserted = true;
6529 if (JUMP_P (returnjump))
6530 set_return_jump_label (returnjump);
6532 else
6533 #endif
6535 basic_block cur_bb;
6537 if (! next_active_insn (BB_END (exit_fallthru_edge->src)))
6538 goto epilogue_done;
6539 /* We have a fall-through edge to the exit block, the source is not
6540 at the end of the function, and there will be an assembler epilogue
6541 at the end of the function.
6542 We can't use force_nonfallthru here, because that would try to
6543 use return. Inserting a jump 'by hand' is extremely messy, so
6544 we take advantage of cfg_layout_finalize using
6545 fixup_fallthru_exit_predecessor. */
6546 cfg_layout_initialize (0);
6547 FOR_EACH_BB (cur_bb)
6548 if (cur_bb->index >= NUM_FIXED_BLOCKS
6549 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
6550 cur_bb->aux = cur_bb->next_bb;
6551 cfg_layout_finalize ();
6554 epilogue_done:
6556 default_rtl_profile ();
6558 if (inserted)
6560 sbitmap blocks;
6562 commit_edge_insertions ();
6564 /* Look for basic blocks within the prologue insns. */
6565 blocks = sbitmap_alloc (last_basic_block);
6566 bitmap_clear (blocks);
6567 bitmap_set_bit (blocks, entry_edge->dest->index);
6568 bitmap_set_bit (blocks, orig_entry_edge->dest->index);
6569 find_many_sub_basic_blocks (blocks);
6570 sbitmap_free (blocks);
6572 /* The epilogue insns we inserted may cause the exit edge to no longer
6573 be fallthru. */
6574 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6576 if (((e->flags & EDGE_FALLTHRU) != 0)
6577 && returnjump_p (BB_END (e->src)))
6578 e->flags &= ~EDGE_FALLTHRU;
6582 #ifdef HAVE_simple_return
6583 /* If there were branches to an empty LAST_BB which we tried to
6584 convert to conditional simple_returns, but couldn't for some
6585 reason, create a block to hold a simple_return insn and redirect
6586 those remaining edges. */
6587 if (!unconverted_simple_returns.is_empty ())
6589 basic_block simple_return_block_hot = NULL;
6590 basic_block simple_return_block_cold = NULL;
6591 edge pending_edge_hot = NULL;
6592 edge pending_edge_cold = NULL;
6593 basic_block exit_pred;
6594 int i;
6596 gcc_assert (entry_edge != orig_entry_edge);
6598 /* See if we can reuse the last insn that was emitted for the
6599 epilogue. */
6600 if (returnjump != NULL_RTX
6601 && JUMP_LABEL (returnjump) == simple_return_rtx)
6603 e = split_block (BLOCK_FOR_INSN (returnjump), PREV_INSN (returnjump));
6604 if (BB_PARTITION (e->src) == BB_HOT_PARTITION)
6605 simple_return_block_hot = e->dest;
6606 else
6607 simple_return_block_cold = e->dest;
6610 /* Also check returns we might need to add to tail blocks. */
6611 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6612 if (EDGE_COUNT (e->src->preds) != 0
6613 && (e->flags & EDGE_FAKE) != 0
6614 && !bitmap_bit_p (&bb_flags, e->src->index))
6616 if (BB_PARTITION (e->src) == BB_HOT_PARTITION)
6617 pending_edge_hot = e;
6618 else
6619 pending_edge_cold = e;
6622 /* Save a pointer to the exit's predecessor BB for use in
6623 inserting new BBs at the end of the function. Do this
6624 after the call to split_block above which may split
6625 the original exit pred. */
6626 exit_pred = EXIT_BLOCK_PTR->prev_bb;
6628 FOR_EACH_VEC_ELT (unconverted_simple_returns, i, e)
6630 basic_block *pdest_bb;
6631 edge pending;
6633 if (BB_PARTITION (e->src) == BB_HOT_PARTITION)
6635 pdest_bb = &simple_return_block_hot;
6636 pending = pending_edge_hot;
6638 else
6640 pdest_bb = &simple_return_block_cold;
6641 pending = pending_edge_cold;
6644 if (*pdest_bb == NULL && pending != NULL)
6646 emit_return_into_block (true, pending->src);
6647 pending->flags &= ~(EDGE_FALLTHRU | EDGE_FAKE);
6648 *pdest_bb = pending->src;
6650 else if (*pdest_bb == NULL)
6652 basic_block bb;
6653 rtx start;
6655 bb = create_basic_block (NULL, NULL, exit_pred);
6656 BB_COPY_PARTITION (bb, e->src);
6657 start = emit_jump_insn_after (gen_simple_return (),
6658 BB_END (bb));
6659 JUMP_LABEL (start) = simple_return_rtx;
6660 emit_barrier_after (start);
6662 *pdest_bb = bb;
6663 make_edge (bb, EXIT_BLOCK_PTR, 0);
6665 redirect_edge_and_branch_force (e, *pdest_bb);
6667 unconverted_simple_returns.release ();
6670 if (entry_edge != orig_entry_edge)
6672 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6673 if (EDGE_COUNT (e->src->preds) != 0
6674 && (e->flags & EDGE_FAKE) != 0
6675 && !bitmap_bit_p (&bb_flags, e->src->index))
6677 emit_return_into_block (true, e->src);
6678 e->flags &= ~(EDGE_FALLTHRU | EDGE_FAKE);
6681 #endif
6683 #ifdef HAVE_sibcall_epilogue
6684 /* Emit sibling epilogues before any sibling call sites. */
6685 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
6687 basic_block bb = e->src;
6688 rtx insn = BB_END (bb);
6689 rtx ep_seq;
6691 if (!CALL_P (insn)
6692 || ! SIBLING_CALL_P (insn)
6693 #ifdef HAVE_simple_return
6694 || (entry_edge != orig_entry_edge
6695 && !bitmap_bit_p (&bb_flags, bb->index))
6696 #endif
6699 ei_next (&ei);
6700 continue;
6703 ep_seq = gen_sibcall_epilogue ();
6704 if (ep_seq)
6706 start_sequence ();
6707 emit_note (NOTE_INSN_EPILOGUE_BEG);
6708 emit_insn (ep_seq);
6709 seq = get_insns ();
6710 end_sequence ();
6712 /* Retain a map of the epilogue insns. Used in life analysis to
6713 avoid getting rid of sibcall epilogue insns. Do this before we
6714 actually emit the sequence. */
6715 record_insns (seq, NULL, &epilogue_insn_hash);
6716 set_insn_locations (seq, epilogue_location);
6718 emit_insn_before (seq, insn);
6720 ei_next (&ei);
6722 #endif
6724 #ifdef HAVE_epilogue
6725 if (epilogue_end)
6727 rtx insn, next;
6729 /* Similarly, move any line notes that appear after the epilogue.
6730 There is no need, however, to be quite so anal about the existence
6731 of such a note. Also possibly move
6732 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
6733 info generation. */
6734 for (insn = epilogue_end; insn; insn = next)
6736 next = NEXT_INSN (insn);
6737 if (NOTE_P (insn)
6738 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
6739 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
6742 #endif
6744 #ifdef HAVE_simple_return
6745 bitmap_clear (&bb_flags);
6746 #endif
6748 /* Threading the prologue and epilogue changes the artificial refs
6749 in the entry and exit blocks. */
6750 epilogue_completed = 1;
6751 df_update_entry_exit_and_calls ();
6754 /* Reposition the prologue-end and epilogue-begin notes after
6755 instruction scheduling. */
6757 void
6758 reposition_prologue_and_epilogue_notes (void)
6760 #if defined (HAVE_prologue) || defined (HAVE_epilogue) \
6761 || defined (HAVE_sibcall_epilogue)
6762 /* Since the hash table is created on demand, the fact that it is
6763 non-null is a signal that it is non-empty. */
6764 if (prologue_insn_hash != NULL)
6766 size_t len = htab_elements (prologue_insn_hash);
6767 rtx insn, last = NULL, note = NULL;
6769 /* Scan from the beginning until we reach the last prologue insn. */
6770 /* ??? While we do have the CFG intact, there are two problems:
6771 (1) The prologue can contain loops (typically probing the stack),
6772 which means that the end of the prologue isn't in the first bb.
6773 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
6774 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6776 if (NOTE_P (insn))
6778 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
6779 note = insn;
6781 else if (contains (insn, prologue_insn_hash))
6783 last = insn;
6784 if (--len == 0)
6785 break;
6789 if (last)
6791 if (note == NULL)
6793 /* Scan forward looking for the PROLOGUE_END note. It should
6794 be right at the beginning of the block, possibly with other
6795 insn notes that got moved there. */
6796 for (note = NEXT_INSN (last); ; note = NEXT_INSN (note))
6798 if (NOTE_P (note)
6799 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
6800 break;
6804 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
6805 if (LABEL_P (last))
6806 last = NEXT_INSN (last);
6807 reorder_insns (note, note, last);
6811 if (epilogue_insn_hash != NULL)
6813 edge_iterator ei;
6814 edge e;
6816 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6818 rtx insn, first = NULL, note = NULL;
6819 basic_block bb = e->src;
6821 /* Scan from the beginning until we reach the first epilogue insn. */
6822 FOR_BB_INSNS (bb, insn)
6824 if (NOTE_P (insn))
6826 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
6828 note = insn;
6829 if (first != NULL)
6830 break;
6833 else if (first == NULL && contains (insn, epilogue_insn_hash))
6835 first = insn;
6836 if (note != NULL)
6837 break;
6841 if (note)
6843 /* If the function has a single basic block, and no real
6844 epilogue insns (e.g. sibcall with no cleanup), the
6845 epilogue note can get scheduled before the prologue
6846 note. If we have frame related prologue insns, having
6847 them scanned during the epilogue will result in a crash.
6848 In this case re-order the epilogue note to just before
6849 the last insn in the block. */
6850 if (first == NULL)
6851 first = BB_END (bb);
6853 if (PREV_INSN (first) != note)
6854 reorder_insns (note, note, PREV_INSN (first));
6858 #endif /* HAVE_prologue or HAVE_epilogue */
6861 /* Returns the name of function declared by FNDECL. */
6862 const char *
6863 fndecl_name (tree fndecl)
6865 if (fndecl == NULL)
6866 return "(nofn)";
6867 return lang_hooks.decl_printable_name (fndecl, 2);
6870 /* Returns the name of function FN. */
6871 const char *
6872 function_name (struct function *fn)
6874 tree fndecl = (fn == NULL) ? NULL : fn->decl;
6875 return fndecl_name (fndecl);
6878 /* Returns the name of the current function. */
6879 const char *
6880 current_function_name (void)
6882 return function_name (cfun);
6886 static unsigned int
6887 rest_of_handle_check_leaf_regs (void)
6889 #ifdef LEAF_REGISTERS
6890 crtl->uses_only_leaf_regs
6891 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
6892 #endif
6893 return 0;
6896 /* Insert a TYPE into the used types hash table of CFUN. */
6898 static void
6899 used_types_insert_helper (tree type, struct function *func)
6901 if (type != NULL && func != NULL)
6903 void **slot;
6905 if (func->used_types_hash == NULL)
6906 func->used_types_hash = htab_create_ggc (37, htab_hash_pointer,
6907 htab_eq_pointer, NULL);
6908 slot = htab_find_slot (func->used_types_hash, type, INSERT);
6909 if (*slot == NULL)
6910 *slot = type;
6914 /* Given a type, insert it into the used hash table in cfun. */
6915 void
6916 used_types_insert (tree t)
6918 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
6919 if (TYPE_NAME (t))
6920 break;
6921 else
6922 t = TREE_TYPE (t);
6923 if (TREE_CODE (t) == ERROR_MARK)
6924 return;
6925 if (TYPE_NAME (t) == NULL_TREE
6926 || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t)))
6927 t = TYPE_MAIN_VARIANT (t);
6928 if (debug_info_level > DINFO_LEVEL_NONE)
6930 if (cfun)
6931 used_types_insert_helper (t, cfun);
6932 else
6934 /* So this might be a type referenced by a global variable.
6935 Record that type so that we can later decide to emit its
6936 debug information. */
6937 vec_safe_push (types_used_by_cur_var_decl, t);
6942 /* Helper to Hash a struct types_used_by_vars_entry. */
6944 static hashval_t
6945 hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry)
6947 gcc_assert (entry && entry->var_decl && entry->type);
6949 return iterative_hash_object (entry->type,
6950 iterative_hash_object (entry->var_decl, 0));
6953 /* Hash function of the types_used_by_vars_entry hash table. */
6955 hashval_t
6956 types_used_by_vars_do_hash (const void *x)
6958 const struct types_used_by_vars_entry *entry =
6959 (const struct types_used_by_vars_entry *) x;
6961 return hash_types_used_by_vars_entry (entry);
6964 /*Equality function of the types_used_by_vars_entry hash table. */
6967 types_used_by_vars_eq (const void *x1, const void *x2)
6969 const struct types_used_by_vars_entry *e1 =
6970 (const struct types_used_by_vars_entry *) x1;
6971 const struct types_used_by_vars_entry *e2 =
6972 (const struct types_used_by_vars_entry *)x2;
6974 return (e1->var_decl == e2->var_decl && e1->type == e2->type);
6977 /* Inserts an entry into the types_used_by_vars_hash hash table. */
6979 void
6980 types_used_by_var_decl_insert (tree type, tree var_decl)
6982 if (type != NULL && var_decl != NULL)
6984 void **slot;
6985 struct types_used_by_vars_entry e;
6986 e.var_decl = var_decl;
6987 e.type = type;
6988 if (types_used_by_vars_hash == NULL)
6989 types_used_by_vars_hash =
6990 htab_create_ggc (37, types_used_by_vars_do_hash,
6991 types_used_by_vars_eq, NULL);
6992 slot = htab_find_slot_with_hash (types_used_by_vars_hash, &e,
6993 hash_types_used_by_vars_entry (&e), INSERT);
6994 if (*slot == NULL)
6996 struct types_used_by_vars_entry *entry;
6997 entry = ggc_alloc_types_used_by_vars_entry ();
6998 entry->type = type;
6999 entry->var_decl = var_decl;
7000 *slot = entry;
7005 namespace {
7007 const pass_data pass_data_leaf_regs =
7009 RTL_PASS, /* type */
7010 "*leaf_regs", /* name */
7011 OPTGROUP_NONE, /* optinfo_flags */
7012 false, /* has_gate */
7013 true, /* has_execute */
7014 TV_NONE, /* tv_id */
7015 0, /* properties_required */
7016 0, /* properties_provided */
7017 0, /* properties_destroyed */
7018 0, /* todo_flags_start */
7019 0, /* todo_flags_finish */
7022 class pass_leaf_regs : public rtl_opt_pass
7024 public:
7025 pass_leaf_regs(gcc::context *ctxt)
7026 : rtl_opt_pass(pass_data_leaf_regs, ctxt)
7029 /* opt_pass methods: */
7030 unsigned int execute () { return rest_of_handle_check_leaf_regs (); }
7032 }; // class pass_leaf_regs
7034 } // anon namespace
7036 rtl_opt_pass *
7037 make_pass_leaf_regs (gcc::context *ctxt)
7039 return new pass_leaf_regs (ctxt);
7042 static unsigned int
7043 rest_of_handle_thread_prologue_and_epilogue (void)
7045 if (optimize)
7046 cleanup_cfg (CLEANUP_EXPENSIVE);
7048 /* On some machines, the prologue and epilogue code, or parts thereof,
7049 can be represented as RTL. Doing so lets us schedule insns between
7050 it and the rest of the code and also allows delayed branch
7051 scheduling to operate in the epilogue. */
7052 thread_prologue_and_epilogue_insns ();
7054 /* The stack usage info is finalized during prologue expansion. */
7055 if (flag_stack_usage_info)
7056 output_stack_usage ();
7058 return 0;
7061 namespace {
7063 const pass_data pass_data_thread_prologue_and_epilogue =
7065 RTL_PASS, /* type */
7066 "pro_and_epilogue", /* name */
7067 OPTGROUP_NONE, /* optinfo_flags */
7068 false, /* has_gate */
7069 true, /* has_execute */
7070 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
7071 0, /* properties_required */
7072 0, /* properties_provided */
7073 0, /* properties_destroyed */
7074 TODO_verify_flow, /* todo_flags_start */
7075 ( TODO_df_verify | TODO_df_finish
7076 | TODO_verify_rtl_sharing ), /* todo_flags_finish */
7079 class pass_thread_prologue_and_epilogue : public rtl_opt_pass
7081 public:
7082 pass_thread_prologue_and_epilogue(gcc::context *ctxt)
7083 : rtl_opt_pass(pass_data_thread_prologue_and_epilogue, ctxt)
7086 /* opt_pass methods: */
7087 unsigned int execute () {
7088 return rest_of_handle_thread_prologue_and_epilogue ();
7091 }; // class pass_thread_prologue_and_epilogue
7093 } // anon namespace
7095 rtl_opt_pass *
7096 make_pass_thread_prologue_and_epilogue (gcc::context *ctxt)
7098 return new pass_thread_prologue_and_epilogue (ctxt);
7102 /* This mini-pass fixes fall-out from SSA in asm statements that have
7103 in-out constraints. Say you start with
7105 orig = inout;
7106 asm ("": "+mr" (inout));
7107 use (orig);
7109 which is transformed very early to use explicit output and match operands:
7111 orig = inout;
7112 asm ("": "=mr" (inout) : "0" (inout));
7113 use (orig);
7115 Or, after SSA and copyprop,
7117 asm ("": "=mr" (inout_2) : "0" (inout_1));
7118 use (inout_1);
7120 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
7121 they represent two separate values, so they will get different pseudo
7122 registers during expansion. Then, since the two operands need to match
7123 per the constraints, but use different pseudo registers, reload can
7124 only register a reload for these operands. But reloads can only be
7125 satisfied by hardregs, not by memory, so we need a register for this
7126 reload, just because we are presented with non-matching operands.
7127 So, even though we allow memory for this operand, no memory can be
7128 used for it, just because the two operands don't match. This can
7129 cause reload failures on register-starved targets.
7131 So it's a symptom of reload not being able to use memory for reloads
7132 or, alternatively it's also a symptom of both operands not coming into
7133 reload as matching (in which case the pseudo could go to memory just
7134 fine, as the alternative allows it, and no reload would be necessary).
7135 We fix the latter problem here, by transforming
7137 asm ("": "=mr" (inout_2) : "0" (inout_1));
7139 back to
7141 inout_2 = inout_1;
7142 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
7144 static void
7145 match_asm_constraints_1 (rtx insn, rtx *p_sets, int noutputs)
7147 int i;
7148 bool changed = false;
7149 rtx op = SET_SRC (p_sets[0]);
7150 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
7151 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
7152 bool *output_matched = XALLOCAVEC (bool, noutputs);
7154 memset (output_matched, 0, noutputs * sizeof (bool));
7155 for (i = 0; i < ninputs; i++)
7157 rtx input, output, insns;
7158 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
7159 char *end;
7160 int match, j;
7162 if (*constraint == '%')
7163 constraint++;
7165 match = strtoul (constraint, &end, 10);
7166 if (end == constraint)
7167 continue;
7169 gcc_assert (match < noutputs);
7170 output = SET_DEST (p_sets[match]);
7171 input = RTVEC_ELT (inputs, i);
7172 /* Only do the transformation for pseudos. */
7173 if (! REG_P (output)
7174 || rtx_equal_p (output, input)
7175 || (GET_MODE (input) != VOIDmode
7176 && GET_MODE (input) != GET_MODE (output)))
7177 continue;
7179 /* We can't do anything if the output is also used as input,
7180 as we're going to overwrite it. */
7181 for (j = 0; j < ninputs; j++)
7182 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
7183 break;
7184 if (j != ninputs)
7185 continue;
7187 /* Avoid changing the same input several times. For
7188 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
7189 only change in once (to out1), rather than changing it
7190 first to out1 and afterwards to out2. */
7191 if (i > 0)
7193 for (j = 0; j < noutputs; j++)
7194 if (output_matched[j] && input == SET_DEST (p_sets[j]))
7195 break;
7196 if (j != noutputs)
7197 continue;
7199 output_matched[match] = true;
7201 start_sequence ();
7202 emit_move_insn (output, input);
7203 insns = get_insns ();
7204 end_sequence ();
7205 emit_insn_before (insns, insn);
7207 /* Now replace all mentions of the input with output. We can't
7208 just replace the occurrence in inputs[i], as the register might
7209 also be used in some other input (or even in an address of an
7210 output), which would mean possibly increasing the number of
7211 inputs by one (namely 'output' in addition), which might pose
7212 a too complicated problem for reload to solve. E.g. this situation:
7214 asm ("" : "=r" (output), "=m" (input) : "0" (input))
7216 Here 'input' is used in two occurrences as input (once for the
7217 input operand, once for the address in the second output operand).
7218 If we would replace only the occurrence of the input operand (to
7219 make the matching) we would be left with this:
7221 output = input
7222 asm ("" : "=r" (output), "=m" (input) : "0" (output))
7224 Now we suddenly have two different input values (containing the same
7225 value, but different pseudos) where we formerly had only one.
7226 With more complicated asms this might lead to reload failures
7227 which wouldn't have happen without this pass. So, iterate over
7228 all operands and replace all occurrences of the register used. */
7229 for (j = 0; j < noutputs; j++)
7230 if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
7231 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
7232 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
7233 input, output);
7234 for (j = 0; j < ninputs; j++)
7235 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
7236 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
7237 input, output);
7239 changed = true;
7242 if (changed)
7243 df_insn_rescan (insn);
7246 static unsigned
7247 rest_of_match_asm_constraints (void)
7249 basic_block bb;
7250 rtx insn, pat, *p_sets;
7251 int noutputs;
7253 if (!crtl->has_asm_statement)
7254 return 0;
7256 df_set_flags (DF_DEFER_INSN_RESCAN);
7257 FOR_EACH_BB (bb)
7259 FOR_BB_INSNS (bb, insn)
7261 if (!INSN_P (insn))
7262 continue;
7264 pat = PATTERN (insn);
7265 if (GET_CODE (pat) == PARALLEL)
7266 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
7267 else if (GET_CODE (pat) == SET)
7268 p_sets = &PATTERN (insn), noutputs = 1;
7269 else
7270 continue;
7272 if (GET_CODE (*p_sets) == SET
7273 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
7274 match_asm_constraints_1 (insn, p_sets, noutputs);
7278 return TODO_df_finish;
7281 namespace {
7283 const pass_data pass_data_match_asm_constraints =
7285 RTL_PASS, /* type */
7286 "asmcons", /* name */
7287 OPTGROUP_NONE, /* optinfo_flags */
7288 false, /* has_gate */
7289 true, /* has_execute */
7290 TV_NONE, /* tv_id */
7291 0, /* properties_required */
7292 0, /* properties_provided */
7293 0, /* properties_destroyed */
7294 0, /* todo_flags_start */
7295 0, /* todo_flags_finish */
7298 class pass_match_asm_constraints : public rtl_opt_pass
7300 public:
7301 pass_match_asm_constraints(gcc::context *ctxt)
7302 : rtl_opt_pass(pass_data_match_asm_constraints, ctxt)
7305 /* opt_pass methods: */
7306 unsigned int execute () { return rest_of_match_asm_constraints (); }
7308 }; // class pass_match_asm_constraints
7310 } // anon namespace
7312 rtl_opt_pass *
7313 make_pass_match_asm_constraints (gcc::context *ctxt)
7315 return new pass_match_asm_constraints (ctxt);
7319 #include "gt-function.h"