re PR bootstrap/51346 (LTO bootstrap failed with bootstrap-profiled)
[official-gcc.git] / gcc / function.c
blobfa9236ce4b84d9d54ffeafbdd42deab37766471d
1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
4 2010, 2011 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register. */
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
40 #include "rtl-error.h"
41 #include "tree.h"
42 #include "flags.h"
43 #include "except.h"
44 #include "function.h"
45 #include "expr.h"
46 #include "optabs.h"
47 #include "libfuncs.h"
48 #include "regs.h"
49 #include "hard-reg-set.h"
50 #include "insn-config.h"
51 #include "recog.h"
52 #include "output.h"
53 #include "basic-block.h"
54 #include "hashtab.h"
55 #include "ggc.h"
56 #include "tm_p.h"
57 #include "integrate.h"
58 #include "langhooks.h"
59 #include "target.h"
60 #include "common/common-target.h"
61 #include "cfglayout.h"
62 #include "gimple.h"
63 #include "tree-pass.h"
64 #include "predict.h"
65 #include "df.h"
66 #include "timevar.h"
67 #include "vecprim.h"
68 #include "params.h"
69 #include "bb-reorder.h"
71 /* So we can assign to cfun in this file. */
72 #undef cfun
74 #ifndef STACK_ALIGNMENT_NEEDED
75 #define STACK_ALIGNMENT_NEEDED 1
76 #endif
78 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
80 /* Some systems use __main in a way incompatible with its use in gcc, in these
81 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
82 give the same symbol without quotes for an alternative entry point. You
83 must define both, or neither. */
84 #ifndef NAME__MAIN
85 #define NAME__MAIN "__main"
86 #endif
88 /* Round a value to the lowest integer less than it that is a multiple of
89 the required alignment. Avoid using division in case the value is
90 negative. Assume the alignment is a power of two. */
91 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
93 /* Similar, but round to the next highest integer that meets the
94 alignment. */
95 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
97 /* Nonzero if function being compiled doesn't contain any calls
98 (ignoring the prologue and epilogue). This is set prior to
99 local register allocation and is valid for the remaining
100 compiler passes. */
101 int current_function_is_leaf;
103 /* Nonzero if function being compiled doesn't modify the stack pointer
104 (ignoring the prologue and epilogue). This is only valid after
105 pass_stack_ptr_mod has run. */
106 int current_function_sp_is_unchanging;
108 /* Nonzero if the function being compiled is a leaf function which only
109 uses leaf registers. This is valid after reload (specifically after
110 sched2) and is useful only if the port defines LEAF_REGISTERS. */
111 int current_function_uses_only_leaf_regs;
113 /* Nonzero once virtual register instantiation has been done.
114 assign_stack_local uses frame_pointer_rtx when this is nonzero.
115 calls.c:emit_library_call_value_1 uses it to set up
116 post-instantiation libcalls. */
117 int virtuals_instantiated;
119 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
120 static GTY(()) int funcdef_no;
122 /* These variables hold pointers to functions to create and destroy
123 target specific, per-function data structures. */
124 struct machine_function * (*init_machine_status) (void);
126 /* The currently compiled function. */
127 struct function *cfun = 0;
129 /* These hashes record the prologue and epilogue insns. */
130 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
131 htab_t prologue_insn_hash;
132 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
133 htab_t epilogue_insn_hash;
136 htab_t types_used_by_vars_hash = NULL;
137 VEC(tree,gc) *types_used_by_cur_var_decl;
139 /* Forward declarations. */
141 static struct temp_slot *find_temp_slot_from_address (rtx);
142 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
143 static void pad_below (struct args_size *, enum machine_mode, tree);
144 static void reorder_blocks_1 (rtx, tree, VEC(tree,heap) **);
145 static int all_blocks (tree, tree *);
146 static tree *get_block_vector (tree, int *);
147 extern tree debug_find_var_in_block_tree (tree, tree);
148 /* We always define `record_insns' even if it's not used so that we
149 can always export `prologue_epilogue_contains'. */
150 static void record_insns (rtx, rtx, htab_t *) ATTRIBUTE_UNUSED;
151 static bool contains (const_rtx, htab_t);
152 static void prepare_function_start (void);
153 static void do_clobber_return_reg (rtx, void *);
154 static void do_use_return_reg (rtx, void *);
155 static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
157 /* Stack of nested functions. */
158 /* Keep track of the cfun stack. */
160 typedef struct function *function_p;
162 DEF_VEC_P(function_p);
163 DEF_VEC_ALLOC_P(function_p,heap);
164 static VEC(function_p,heap) *function_context_stack;
166 /* Save the current context for compilation of a nested function.
167 This is called from language-specific code. */
169 void
170 push_function_context (void)
172 if (cfun == 0)
173 allocate_struct_function (NULL, false);
175 VEC_safe_push (function_p, heap, function_context_stack, cfun);
176 set_cfun (NULL);
179 /* Restore the last saved context, at the end of a nested function.
180 This function is called from language-specific code. */
182 void
183 pop_function_context (void)
185 struct function *p = VEC_pop (function_p, function_context_stack);
186 set_cfun (p);
187 current_function_decl = p->decl;
189 /* Reset variables that have known state during rtx generation. */
190 virtuals_instantiated = 0;
191 generating_concat_p = 1;
194 /* Clear out all parts of the state in F that can safely be discarded
195 after the function has been parsed, but not compiled, to let
196 garbage collection reclaim the memory. */
198 void
199 free_after_parsing (struct function *f)
201 f->language = 0;
204 /* Clear out all parts of the state in F that can safely be discarded
205 after the function has been compiled, to let garbage collection
206 reclaim the memory. */
208 void
209 free_after_compilation (struct function *f)
211 prologue_insn_hash = NULL;
212 epilogue_insn_hash = NULL;
214 free (crtl->emit.regno_pointer_align);
216 memset (crtl, 0, sizeof (struct rtl_data));
217 f->eh = NULL;
218 f->machine = NULL;
219 f->cfg = NULL;
221 regno_reg_rtx = NULL;
222 insn_locators_free ();
225 /* Return size needed for stack frame based on slots so far allocated.
226 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
227 the caller may have to do that. */
229 HOST_WIDE_INT
230 get_frame_size (void)
232 if (FRAME_GROWS_DOWNWARD)
233 return -frame_offset;
234 else
235 return frame_offset;
238 /* Issue an error message and return TRUE if frame OFFSET overflows in
239 the signed target pointer arithmetics for function FUNC. Otherwise
240 return FALSE. */
242 bool
243 frame_offset_overflow (HOST_WIDE_INT offset, tree func)
245 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
247 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
248 /* Leave room for the fixed part of the frame. */
249 - 64 * UNITS_PER_WORD)
251 error_at (DECL_SOURCE_LOCATION (func),
252 "total size of local objects too large");
253 return TRUE;
256 return FALSE;
259 /* Return stack slot alignment in bits for TYPE and MODE. */
261 static unsigned int
262 get_stack_local_alignment (tree type, enum machine_mode mode)
264 unsigned int alignment;
266 if (mode == BLKmode)
267 alignment = BIGGEST_ALIGNMENT;
268 else
269 alignment = GET_MODE_ALIGNMENT (mode);
271 /* Allow the frond-end to (possibly) increase the alignment of this
272 stack slot. */
273 if (! type)
274 type = lang_hooks.types.type_for_mode (mode, 0);
276 return STACK_SLOT_ALIGNMENT (type, mode, alignment);
279 /* Determine whether it is possible to fit a stack slot of size SIZE and
280 alignment ALIGNMENT into an area in the stack frame that starts at
281 frame offset START and has a length of LENGTH. If so, store the frame
282 offset to be used for the stack slot in *POFFSET and return true;
283 return false otherwise. This function will extend the frame size when
284 given a start/length pair that lies at the end of the frame. */
286 static bool
287 try_fit_stack_local (HOST_WIDE_INT start, HOST_WIDE_INT length,
288 HOST_WIDE_INT size, unsigned int alignment,
289 HOST_WIDE_INT *poffset)
291 HOST_WIDE_INT this_frame_offset;
292 int frame_off, frame_alignment, frame_phase;
294 /* Calculate how many bytes the start of local variables is off from
295 stack alignment. */
296 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
297 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
298 frame_phase = frame_off ? frame_alignment - frame_off : 0;
300 /* Round the frame offset to the specified alignment. */
302 /* We must be careful here, since FRAME_OFFSET might be negative and
303 division with a negative dividend isn't as well defined as we might
304 like. So we instead assume that ALIGNMENT is a power of two and
305 use logical operations which are unambiguous. */
306 if (FRAME_GROWS_DOWNWARD)
307 this_frame_offset
308 = (FLOOR_ROUND (start + length - size - frame_phase,
309 (unsigned HOST_WIDE_INT) alignment)
310 + frame_phase);
311 else
312 this_frame_offset
313 = (CEIL_ROUND (start - frame_phase,
314 (unsigned HOST_WIDE_INT) alignment)
315 + frame_phase);
317 /* See if it fits. If this space is at the edge of the frame,
318 consider extending the frame to make it fit. Our caller relies on
319 this when allocating a new slot. */
320 if (frame_offset == start && this_frame_offset < frame_offset)
321 frame_offset = this_frame_offset;
322 else if (this_frame_offset < start)
323 return false;
324 else if (start + length == frame_offset
325 && this_frame_offset + size > start + length)
326 frame_offset = this_frame_offset + size;
327 else if (this_frame_offset + size > start + length)
328 return false;
330 *poffset = this_frame_offset;
331 return true;
334 /* Create a new frame_space structure describing free space in the stack
335 frame beginning at START and ending at END, and chain it into the
336 function's frame_space_list. */
338 static void
339 add_frame_space (HOST_WIDE_INT start, HOST_WIDE_INT end)
341 struct frame_space *space = ggc_alloc_frame_space ();
342 space->next = crtl->frame_space_list;
343 crtl->frame_space_list = space;
344 space->start = start;
345 space->length = end - start;
348 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
349 with machine mode MODE.
351 ALIGN controls the amount of alignment for the address of the slot:
352 0 means according to MODE,
353 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
354 -2 means use BITS_PER_UNIT,
355 positive specifies alignment boundary in bits.
357 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
358 alignment and ASLK_RECORD_PAD bit set if we should remember
359 extra space we allocated for alignment purposes. When we are
360 called from assign_stack_temp_for_type, it is not set so we don't
361 track the same stack slot in two independent lists.
363 We do not round to stack_boundary here. */
366 assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size,
367 int align, int kind)
369 rtx x, addr;
370 int bigend_correction = 0;
371 HOST_WIDE_INT slot_offset = 0, old_frame_offset;
372 unsigned int alignment, alignment_in_bits;
374 if (align == 0)
376 alignment = get_stack_local_alignment (NULL, mode);
377 alignment /= BITS_PER_UNIT;
379 else if (align == -1)
381 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
382 size = CEIL_ROUND (size, alignment);
384 else if (align == -2)
385 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
386 else
387 alignment = align / BITS_PER_UNIT;
389 alignment_in_bits = alignment * BITS_PER_UNIT;
391 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
392 if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
394 alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
395 alignment = alignment_in_bits / BITS_PER_UNIT;
398 if (SUPPORTS_STACK_ALIGNMENT)
400 if (crtl->stack_alignment_estimated < alignment_in_bits)
402 if (!crtl->stack_realign_processed)
403 crtl->stack_alignment_estimated = alignment_in_bits;
404 else
406 /* If stack is realigned and stack alignment value
407 hasn't been finalized, it is OK not to increase
408 stack_alignment_estimated. The bigger alignment
409 requirement is recorded in stack_alignment_needed
410 below. */
411 gcc_assert (!crtl->stack_realign_finalized);
412 if (!crtl->stack_realign_needed)
414 /* It is OK to reduce the alignment as long as the
415 requested size is 0 or the estimated stack
416 alignment >= mode alignment. */
417 gcc_assert ((kind & ASLK_REDUCE_ALIGN)
418 || size == 0
419 || (crtl->stack_alignment_estimated
420 >= GET_MODE_ALIGNMENT (mode)));
421 alignment_in_bits = crtl->stack_alignment_estimated;
422 alignment = alignment_in_bits / BITS_PER_UNIT;
428 if (crtl->stack_alignment_needed < alignment_in_bits)
429 crtl->stack_alignment_needed = alignment_in_bits;
430 if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
431 crtl->max_used_stack_slot_alignment = alignment_in_bits;
433 if (mode != BLKmode || size != 0)
435 if (kind & ASLK_RECORD_PAD)
437 struct frame_space **psp;
439 for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
441 struct frame_space *space = *psp;
442 if (!try_fit_stack_local (space->start, space->length, size,
443 alignment, &slot_offset))
444 continue;
445 *psp = space->next;
446 if (slot_offset > space->start)
447 add_frame_space (space->start, slot_offset);
448 if (slot_offset + size < space->start + space->length)
449 add_frame_space (slot_offset + size,
450 space->start + space->length);
451 goto found_space;
455 else if (!STACK_ALIGNMENT_NEEDED)
457 slot_offset = frame_offset;
458 goto found_space;
461 old_frame_offset = frame_offset;
463 if (FRAME_GROWS_DOWNWARD)
465 frame_offset -= size;
466 try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
468 if (kind & ASLK_RECORD_PAD)
470 if (slot_offset > frame_offset)
471 add_frame_space (frame_offset, slot_offset);
472 if (slot_offset + size < old_frame_offset)
473 add_frame_space (slot_offset + size, old_frame_offset);
476 else
478 frame_offset += size;
479 try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
481 if (kind & ASLK_RECORD_PAD)
483 if (slot_offset > old_frame_offset)
484 add_frame_space (old_frame_offset, slot_offset);
485 if (slot_offset + size < frame_offset)
486 add_frame_space (slot_offset + size, frame_offset);
490 found_space:
491 /* On a big-endian machine, if we are allocating more space than we will use,
492 use the least significant bytes of those that are allocated. */
493 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
494 bigend_correction = size - GET_MODE_SIZE (mode);
496 /* If we have already instantiated virtual registers, return the actual
497 address relative to the frame pointer. */
498 if (virtuals_instantiated)
499 addr = plus_constant (frame_pointer_rtx,
500 trunc_int_for_mode
501 (slot_offset + bigend_correction
502 + STARTING_FRAME_OFFSET, Pmode));
503 else
504 addr = plus_constant (virtual_stack_vars_rtx,
505 trunc_int_for_mode
506 (slot_offset + bigend_correction,
507 Pmode));
509 x = gen_rtx_MEM (mode, addr);
510 set_mem_align (x, alignment_in_bits);
511 MEM_NOTRAP_P (x) = 1;
513 stack_slot_list
514 = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
516 if (frame_offset_overflow (frame_offset, current_function_decl))
517 frame_offset = 0;
519 return x;
522 /* Wrap up assign_stack_local_1 with last parameter as false. */
525 assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
527 return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD);
531 /* In order to evaluate some expressions, such as function calls returning
532 structures in memory, we need to temporarily allocate stack locations.
533 We record each allocated temporary in the following structure.
535 Associated with each temporary slot is a nesting level. When we pop up
536 one level, all temporaries associated with the previous level are freed.
537 Normally, all temporaries are freed after the execution of the statement
538 in which they were created. However, if we are inside a ({...}) grouping,
539 the result may be in a temporary and hence must be preserved. If the
540 result could be in a temporary, we preserve it if we can determine which
541 one it is in. If we cannot determine which temporary may contain the
542 result, all temporaries are preserved. A temporary is preserved by
543 pretending it was allocated at the previous nesting level.
545 Automatic variables are also assigned temporary slots, at the nesting
546 level where they are defined. They are marked a "kept" so that
547 free_temp_slots will not free them. */
549 struct GTY(()) temp_slot {
550 /* Points to next temporary slot. */
551 struct temp_slot *next;
552 /* Points to previous temporary slot. */
553 struct temp_slot *prev;
554 /* The rtx to used to reference the slot. */
555 rtx slot;
556 /* The size, in units, of the slot. */
557 HOST_WIDE_INT size;
558 /* The type of the object in the slot, or zero if it doesn't correspond
559 to a type. We use this to determine whether a slot can be reused.
560 It can be reused if objects of the type of the new slot will always
561 conflict with objects of the type of the old slot. */
562 tree type;
563 /* The alignment (in bits) of the slot. */
564 unsigned int align;
565 /* Nonzero if this temporary is currently in use. */
566 char in_use;
567 /* Nonzero if this temporary has its address taken. */
568 char addr_taken;
569 /* Nesting level at which this slot is being used. */
570 int level;
571 /* Nonzero if this should survive a call to free_temp_slots. */
572 int keep;
573 /* The offset of the slot from the frame_pointer, including extra space
574 for alignment. This info is for combine_temp_slots. */
575 HOST_WIDE_INT base_offset;
576 /* The size of the slot, including extra space for alignment. This
577 info is for combine_temp_slots. */
578 HOST_WIDE_INT full_size;
581 /* A table of addresses that represent a stack slot. The table is a mapping
582 from address RTXen to a temp slot. */
583 static GTY((param_is(struct temp_slot_address_entry))) htab_t temp_slot_address_table;
585 /* Entry for the above hash table. */
586 struct GTY(()) temp_slot_address_entry {
587 hashval_t hash;
588 rtx address;
589 struct temp_slot *temp_slot;
592 /* Removes temporary slot TEMP from LIST. */
594 static void
595 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
597 if (temp->next)
598 temp->next->prev = temp->prev;
599 if (temp->prev)
600 temp->prev->next = temp->next;
601 else
602 *list = temp->next;
604 temp->prev = temp->next = NULL;
607 /* Inserts temporary slot TEMP to LIST. */
609 static void
610 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
612 temp->next = *list;
613 if (*list)
614 (*list)->prev = temp;
615 temp->prev = NULL;
616 *list = temp;
619 /* Returns the list of used temp slots at LEVEL. */
621 static struct temp_slot **
622 temp_slots_at_level (int level)
624 if (level >= (int) VEC_length (temp_slot_p, used_temp_slots))
625 VEC_safe_grow_cleared (temp_slot_p, gc, used_temp_slots, level + 1);
627 return &(VEC_address (temp_slot_p, used_temp_slots)[level]);
630 /* Returns the maximal temporary slot level. */
632 static int
633 max_slot_level (void)
635 if (!used_temp_slots)
636 return -1;
638 return VEC_length (temp_slot_p, used_temp_slots) - 1;
641 /* Moves temporary slot TEMP to LEVEL. */
643 static void
644 move_slot_to_level (struct temp_slot *temp, int level)
646 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
647 insert_slot_to_list (temp, temp_slots_at_level (level));
648 temp->level = level;
651 /* Make temporary slot TEMP available. */
653 static void
654 make_slot_available (struct temp_slot *temp)
656 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
657 insert_slot_to_list (temp, &avail_temp_slots);
658 temp->in_use = 0;
659 temp->level = -1;
662 /* Compute the hash value for an address -> temp slot mapping.
663 The value is cached on the mapping entry. */
664 static hashval_t
665 temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
667 int do_not_record = 0;
668 return hash_rtx (t->address, GET_MODE (t->address),
669 &do_not_record, NULL, false);
672 /* Return the hash value for an address -> temp slot mapping. */
673 static hashval_t
674 temp_slot_address_hash (const void *p)
676 const struct temp_slot_address_entry *t;
677 t = (const struct temp_slot_address_entry *) p;
678 return t->hash;
681 /* Compare two address -> temp slot mapping entries. */
682 static int
683 temp_slot_address_eq (const void *p1, const void *p2)
685 const struct temp_slot_address_entry *t1, *t2;
686 t1 = (const struct temp_slot_address_entry *) p1;
687 t2 = (const struct temp_slot_address_entry *) p2;
688 return exp_equiv_p (t1->address, t2->address, 0, true);
691 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
692 static void
693 insert_temp_slot_address (rtx address, struct temp_slot *temp_slot)
695 void **slot;
696 struct temp_slot_address_entry *t = ggc_alloc_temp_slot_address_entry ();
697 t->address = address;
698 t->temp_slot = temp_slot;
699 t->hash = temp_slot_address_compute_hash (t);
700 slot = htab_find_slot_with_hash (temp_slot_address_table, t, t->hash, INSERT);
701 *slot = t;
704 /* Remove an address -> temp slot mapping entry if the temp slot is
705 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
706 static int
707 remove_unused_temp_slot_addresses_1 (void **slot, void *data ATTRIBUTE_UNUSED)
709 const struct temp_slot_address_entry *t;
710 t = (const struct temp_slot_address_entry *) *slot;
711 if (! t->temp_slot->in_use)
712 *slot = NULL;
713 return 1;
716 /* Remove all mappings of addresses to unused temp slots. */
717 static void
718 remove_unused_temp_slot_addresses (void)
720 htab_traverse (temp_slot_address_table,
721 remove_unused_temp_slot_addresses_1,
722 NULL);
725 /* Find the temp slot corresponding to the object at address X. */
727 static struct temp_slot *
728 find_temp_slot_from_address (rtx x)
730 struct temp_slot *p;
731 struct temp_slot_address_entry tmp, *t;
733 /* First try the easy way:
734 See if X exists in the address -> temp slot mapping. */
735 tmp.address = x;
736 tmp.temp_slot = NULL;
737 tmp.hash = temp_slot_address_compute_hash (&tmp);
738 t = (struct temp_slot_address_entry *)
739 htab_find_with_hash (temp_slot_address_table, &tmp, tmp.hash);
740 if (t)
741 return t->temp_slot;
743 /* If we have a sum involving a register, see if it points to a temp
744 slot. */
745 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
746 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
747 return p;
748 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
749 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
750 return p;
752 /* Last resort: Address is a virtual stack var address. */
753 if (GET_CODE (x) == PLUS
754 && XEXP (x, 0) == virtual_stack_vars_rtx
755 && CONST_INT_P (XEXP (x, 1)))
757 int i;
758 for (i = max_slot_level (); i >= 0; i--)
759 for (p = *temp_slots_at_level (i); p; p = p->next)
761 if (INTVAL (XEXP (x, 1)) >= p->base_offset
762 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size)
763 return p;
767 return NULL;
770 /* Allocate a temporary stack slot and record it for possible later
771 reuse.
773 MODE is the machine mode to be given to the returned rtx.
775 SIZE is the size in units of the space required. We do no rounding here
776 since assign_stack_local will do any required rounding.
778 KEEP is 1 if this slot is to be retained after a call to
779 free_temp_slots. Automatic variables for a block are allocated
780 with this flag. KEEP values of 2 or 3 were needed respectively
781 for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
782 or for SAVE_EXPRs, but they are now unused.
784 TYPE is the type that will be used for the stack slot. */
787 assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size,
788 int keep, tree type)
790 unsigned int align;
791 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
792 rtx slot;
794 /* If SIZE is -1 it means that somebody tried to allocate a temporary
795 of a variable size. */
796 gcc_assert (size != -1);
798 /* These are now unused. */
799 gcc_assert (keep <= 1);
801 align = get_stack_local_alignment (type, mode);
803 /* Try to find an available, already-allocated temporary of the proper
804 mode which meets the size and alignment requirements. Choose the
805 smallest one with the closest alignment.
807 If assign_stack_temp is called outside of the tree->rtl expansion,
808 we cannot reuse the stack slots (that may still refer to
809 VIRTUAL_STACK_VARS_REGNUM). */
810 if (!virtuals_instantiated)
812 for (p = avail_temp_slots; p; p = p->next)
814 if (p->align >= align && p->size >= size
815 && GET_MODE (p->slot) == mode
816 && objects_must_conflict_p (p->type, type)
817 && (best_p == 0 || best_p->size > p->size
818 || (best_p->size == p->size && best_p->align > p->align)))
820 if (p->align == align && p->size == size)
822 selected = p;
823 cut_slot_from_list (selected, &avail_temp_slots);
824 best_p = 0;
825 break;
827 best_p = p;
832 /* Make our best, if any, the one to use. */
833 if (best_p)
835 selected = best_p;
836 cut_slot_from_list (selected, &avail_temp_slots);
838 /* If there are enough aligned bytes left over, make them into a new
839 temp_slot so that the extra bytes don't get wasted. Do this only
840 for BLKmode slots, so that we can be sure of the alignment. */
841 if (GET_MODE (best_p->slot) == BLKmode)
843 int alignment = best_p->align / BITS_PER_UNIT;
844 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
846 if (best_p->size - rounded_size >= alignment)
848 p = ggc_alloc_temp_slot ();
849 p->in_use = p->addr_taken = 0;
850 p->size = best_p->size - rounded_size;
851 p->base_offset = best_p->base_offset + rounded_size;
852 p->full_size = best_p->full_size - rounded_size;
853 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
854 p->align = best_p->align;
855 p->type = best_p->type;
856 insert_slot_to_list (p, &avail_temp_slots);
858 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
859 stack_slot_list);
861 best_p->size = rounded_size;
862 best_p->full_size = rounded_size;
867 /* If we still didn't find one, make a new temporary. */
868 if (selected == 0)
870 HOST_WIDE_INT frame_offset_old = frame_offset;
872 p = ggc_alloc_temp_slot ();
874 /* We are passing an explicit alignment request to assign_stack_local.
875 One side effect of that is assign_stack_local will not round SIZE
876 to ensure the frame offset remains suitably aligned.
878 So for requests which depended on the rounding of SIZE, we go ahead
879 and round it now. We also make sure ALIGNMENT is at least
880 BIGGEST_ALIGNMENT. */
881 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
882 p->slot = assign_stack_local_1 (mode,
883 (mode == BLKmode
884 ? CEIL_ROUND (size,
885 (int) align
886 / BITS_PER_UNIT)
887 : size),
888 align, 0);
890 p->align = align;
892 /* The following slot size computation is necessary because we don't
893 know the actual size of the temporary slot until assign_stack_local
894 has performed all the frame alignment and size rounding for the
895 requested temporary. Note that extra space added for alignment
896 can be either above or below this stack slot depending on which
897 way the frame grows. We include the extra space if and only if it
898 is above this slot. */
899 if (FRAME_GROWS_DOWNWARD)
900 p->size = frame_offset_old - frame_offset;
901 else
902 p->size = size;
904 /* Now define the fields used by combine_temp_slots. */
905 if (FRAME_GROWS_DOWNWARD)
907 p->base_offset = frame_offset;
908 p->full_size = frame_offset_old - frame_offset;
910 else
912 p->base_offset = frame_offset_old;
913 p->full_size = frame_offset - frame_offset_old;
916 selected = p;
919 p = selected;
920 p->in_use = 1;
921 p->addr_taken = 0;
922 p->type = type;
923 p->level = temp_slot_level;
924 p->keep = keep;
926 pp = temp_slots_at_level (p->level);
927 insert_slot_to_list (p, pp);
928 insert_temp_slot_address (XEXP (p->slot, 0), p);
930 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
931 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
932 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
934 /* If we know the alias set for the memory that will be used, use
935 it. If there's no TYPE, then we don't know anything about the
936 alias set for the memory. */
937 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
938 set_mem_align (slot, align);
940 /* If a type is specified, set the relevant flags. */
941 if (type != 0)
943 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
944 gcc_checking_assert (!MEM_SCALAR_P (slot) && !MEM_IN_STRUCT_P (slot));
945 if (AGGREGATE_TYPE_P (type) || TREE_CODE (type) == COMPLEX_TYPE)
946 MEM_IN_STRUCT_P (slot) = 1;
947 else
948 MEM_SCALAR_P (slot) = 1;
950 MEM_NOTRAP_P (slot) = 1;
952 return slot;
955 /* Allocate a temporary stack slot and record it for possible later
956 reuse. First three arguments are same as in preceding function. */
959 assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep)
961 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
964 /* Assign a temporary.
965 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
966 and so that should be used in error messages. In either case, we
967 allocate of the given type.
968 KEEP is as for assign_stack_temp.
969 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
970 it is 0 if a register is OK.
971 DONT_PROMOTE is 1 if we should not promote values in register
972 to wider modes. */
975 assign_temp (tree type_or_decl, int keep, int memory_required,
976 int dont_promote ATTRIBUTE_UNUSED)
978 tree type, decl;
979 enum machine_mode mode;
980 #ifdef PROMOTE_MODE
981 int unsignedp;
982 #endif
984 if (DECL_P (type_or_decl))
985 decl = type_or_decl, type = TREE_TYPE (decl);
986 else
987 decl = NULL, type = type_or_decl;
989 mode = TYPE_MODE (type);
990 #ifdef PROMOTE_MODE
991 unsignedp = TYPE_UNSIGNED (type);
992 #endif
994 if (mode == BLKmode || memory_required)
996 HOST_WIDE_INT size = int_size_in_bytes (type);
997 rtx tmp;
999 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
1000 problems with allocating the stack space. */
1001 if (size == 0)
1002 size = 1;
1004 /* Unfortunately, we don't yet know how to allocate variable-sized
1005 temporaries. However, sometimes we can find a fixed upper limit on
1006 the size, so try that instead. */
1007 else if (size == -1)
1008 size = max_int_size_in_bytes (type);
1010 /* The size of the temporary may be too large to fit into an integer. */
1011 /* ??? Not sure this should happen except for user silliness, so limit
1012 this to things that aren't compiler-generated temporaries. The
1013 rest of the time we'll die in assign_stack_temp_for_type. */
1014 if (decl && size == -1
1015 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
1017 error ("size of variable %q+D is too large", decl);
1018 size = 1;
1021 tmp = assign_stack_temp_for_type (mode, size, keep, type);
1022 return tmp;
1025 #ifdef PROMOTE_MODE
1026 if (! dont_promote)
1027 mode = promote_mode (type, mode, &unsignedp);
1028 #endif
1030 return gen_reg_rtx (mode);
1033 /* Combine temporary stack slots which are adjacent on the stack.
1035 This allows for better use of already allocated stack space. This is only
1036 done for BLKmode slots because we can be sure that we won't have alignment
1037 problems in this case. */
1039 static void
1040 combine_temp_slots (void)
1042 struct temp_slot *p, *q, *next, *next_q;
1043 int num_slots;
1045 /* We can't combine slots, because the information about which slot
1046 is in which alias set will be lost. */
1047 if (flag_strict_aliasing)
1048 return;
1050 /* If there are a lot of temp slots, don't do anything unless
1051 high levels of optimization. */
1052 if (! flag_expensive_optimizations)
1053 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1054 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1055 return;
1057 for (p = avail_temp_slots; p; p = next)
1059 int delete_p = 0;
1061 next = p->next;
1063 if (GET_MODE (p->slot) != BLKmode)
1064 continue;
1066 for (q = p->next; q; q = next_q)
1068 int delete_q = 0;
1070 next_q = q->next;
1072 if (GET_MODE (q->slot) != BLKmode)
1073 continue;
1075 if (p->base_offset + p->full_size == q->base_offset)
1077 /* Q comes after P; combine Q into P. */
1078 p->size += q->size;
1079 p->full_size += q->full_size;
1080 delete_q = 1;
1082 else if (q->base_offset + q->full_size == p->base_offset)
1084 /* P comes after Q; combine P into Q. */
1085 q->size += p->size;
1086 q->full_size += p->full_size;
1087 delete_p = 1;
1088 break;
1090 if (delete_q)
1091 cut_slot_from_list (q, &avail_temp_slots);
1094 /* Either delete P or advance past it. */
1095 if (delete_p)
1096 cut_slot_from_list (p, &avail_temp_slots);
1100 /* Indicate that NEW_RTX is an alternate way of referring to the temp
1101 slot that previously was known by OLD_RTX. */
1103 void
1104 update_temp_slot_address (rtx old_rtx, rtx new_rtx)
1106 struct temp_slot *p;
1108 if (rtx_equal_p (old_rtx, new_rtx))
1109 return;
1111 p = find_temp_slot_from_address (old_rtx);
1113 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1114 NEW_RTX is a register, see if one operand of the PLUS is a
1115 temporary location. If so, NEW_RTX points into it. Otherwise,
1116 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1117 in common between them. If so, try a recursive call on those
1118 values. */
1119 if (p == 0)
1121 if (GET_CODE (old_rtx) != PLUS)
1122 return;
1124 if (REG_P (new_rtx))
1126 update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
1127 update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
1128 return;
1130 else if (GET_CODE (new_rtx) != PLUS)
1131 return;
1133 if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
1134 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
1135 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
1136 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
1137 else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
1138 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
1139 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
1140 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
1142 return;
1145 /* Otherwise add an alias for the temp's address. */
1146 insert_temp_slot_address (new_rtx, p);
1149 /* If X could be a reference to a temporary slot, mark the fact that its
1150 address was taken. */
1152 void
1153 mark_temp_addr_taken (rtx x)
1155 struct temp_slot *p;
1157 if (x == 0)
1158 return;
1160 /* If X is not in memory or is at a constant address, it cannot be in
1161 a temporary slot. */
1162 if (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))
1163 return;
1165 p = find_temp_slot_from_address (XEXP (x, 0));
1166 if (p != 0)
1167 p->addr_taken = 1;
1170 /* If X could be a reference to a temporary slot, mark that slot as
1171 belonging to the to one level higher than the current level. If X
1172 matched one of our slots, just mark that one. Otherwise, we can't
1173 easily predict which it is, so upgrade all of them. Kept slots
1174 need not be touched.
1176 This is called when an ({...}) construct occurs and a statement
1177 returns a value in memory. */
1179 void
1180 preserve_temp_slots (rtx x)
1182 struct temp_slot *p = 0, *next;
1184 /* If there is no result, we still might have some objects whose address
1185 were taken, so we need to make sure they stay around. */
1186 if (x == 0)
1188 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1190 next = p->next;
1192 if (p->addr_taken)
1193 move_slot_to_level (p, temp_slot_level - 1);
1196 return;
1199 /* If X is a register that is being used as a pointer, see if we have
1200 a temporary slot we know it points to. To be consistent with
1201 the code below, we really should preserve all non-kept slots
1202 if we can't find a match, but that seems to be much too costly. */
1203 if (REG_P (x) && REG_POINTER (x))
1204 p = find_temp_slot_from_address (x);
1206 /* If X is not in memory or is at a constant address, it cannot be in
1207 a temporary slot, but it can contain something whose address was
1208 taken. */
1209 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1211 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1213 next = p->next;
1215 if (p->addr_taken)
1216 move_slot_to_level (p, temp_slot_level - 1);
1219 return;
1222 /* First see if we can find a match. */
1223 if (p == 0)
1224 p = find_temp_slot_from_address (XEXP (x, 0));
1226 if (p != 0)
1228 /* Move everything at our level whose address was taken to our new
1229 level in case we used its address. */
1230 struct temp_slot *q;
1232 if (p->level == temp_slot_level)
1234 for (q = *temp_slots_at_level (temp_slot_level); q; q = next)
1236 next = q->next;
1238 if (p != q && q->addr_taken)
1239 move_slot_to_level (q, temp_slot_level - 1);
1242 move_slot_to_level (p, temp_slot_level - 1);
1243 p->addr_taken = 0;
1245 return;
1248 /* Otherwise, preserve all non-kept slots at this level. */
1249 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1251 next = p->next;
1253 if (!p->keep)
1254 move_slot_to_level (p, temp_slot_level - 1);
1258 /* Free all temporaries used so far. This is normally called at the
1259 end of generating code for a statement. */
1261 void
1262 free_temp_slots (void)
1264 struct temp_slot *p, *next;
1265 bool some_available = false;
1267 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1269 next = p->next;
1271 if (!p->keep)
1273 make_slot_available (p);
1274 some_available = true;
1278 if (some_available)
1280 remove_unused_temp_slot_addresses ();
1281 combine_temp_slots ();
1285 /* Push deeper into the nesting level for stack temporaries. */
1287 void
1288 push_temp_slots (void)
1290 temp_slot_level++;
1293 /* Pop a temporary nesting level. All slots in use in the current level
1294 are freed. */
1296 void
1297 pop_temp_slots (void)
1299 struct temp_slot *p, *next;
1300 bool some_available = false;
1302 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1304 next = p->next;
1305 make_slot_available (p);
1306 some_available = true;
1309 if (some_available)
1311 remove_unused_temp_slot_addresses ();
1312 combine_temp_slots ();
1315 temp_slot_level--;
1318 /* Initialize temporary slots. */
1320 void
1321 init_temp_slots (void)
1323 /* We have not allocated any temporaries yet. */
1324 avail_temp_slots = 0;
1325 used_temp_slots = 0;
1326 temp_slot_level = 0;
1328 /* Set up the table to map addresses to temp slots. */
1329 if (! temp_slot_address_table)
1330 temp_slot_address_table = htab_create_ggc (32,
1331 temp_slot_address_hash,
1332 temp_slot_address_eq,
1333 NULL);
1334 else
1335 htab_empty (temp_slot_address_table);
1338 /* These routines are responsible for converting virtual register references
1339 to the actual hard register references once RTL generation is complete.
1341 The following four variables are used for communication between the
1342 routines. They contain the offsets of the virtual registers from their
1343 respective hard registers. */
1345 static int in_arg_offset;
1346 static int var_offset;
1347 static int dynamic_offset;
1348 static int out_arg_offset;
1349 static int cfa_offset;
1351 /* In most machines, the stack pointer register is equivalent to the bottom
1352 of the stack. */
1354 #ifndef STACK_POINTER_OFFSET
1355 #define STACK_POINTER_OFFSET 0
1356 #endif
1358 /* If not defined, pick an appropriate default for the offset of dynamically
1359 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1360 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1362 #ifndef STACK_DYNAMIC_OFFSET
1364 /* The bottom of the stack points to the actual arguments. If
1365 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1366 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1367 stack space for register parameters is not pushed by the caller, but
1368 rather part of the fixed stack areas and hence not included in
1369 `crtl->outgoing_args_size'. Nevertheless, we must allow
1370 for it when allocating stack dynamic objects. */
1372 #if defined(REG_PARM_STACK_SPACE)
1373 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1374 ((ACCUMULATE_OUTGOING_ARGS \
1375 ? (crtl->outgoing_args_size \
1376 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1377 : REG_PARM_STACK_SPACE (FNDECL))) \
1378 : 0) + (STACK_POINTER_OFFSET))
1379 #else
1380 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1381 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
1382 + (STACK_POINTER_OFFSET))
1383 #endif
1384 #endif
1387 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1388 is a virtual register, return the equivalent hard register and set the
1389 offset indirectly through the pointer. Otherwise, return 0. */
1391 static rtx
1392 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1394 rtx new_rtx;
1395 HOST_WIDE_INT offset;
1397 if (x == virtual_incoming_args_rtx)
1399 if (stack_realign_drap)
1401 /* Replace virtual_incoming_args_rtx with internal arg
1402 pointer if DRAP is used to realign stack. */
1403 new_rtx = crtl->args.internal_arg_pointer;
1404 offset = 0;
1406 else
1407 new_rtx = arg_pointer_rtx, offset = in_arg_offset;
1409 else if (x == virtual_stack_vars_rtx)
1410 new_rtx = frame_pointer_rtx, offset = var_offset;
1411 else if (x == virtual_stack_dynamic_rtx)
1412 new_rtx = stack_pointer_rtx, offset = dynamic_offset;
1413 else if (x == virtual_outgoing_args_rtx)
1414 new_rtx = stack_pointer_rtx, offset = out_arg_offset;
1415 else if (x == virtual_cfa_rtx)
1417 #ifdef FRAME_POINTER_CFA_OFFSET
1418 new_rtx = frame_pointer_rtx;
1419 #else
1420 new_rtx = arg_pointer_rtx;
1421 #endif
1422 offset = cfa_offset;
1424 else if (x == virtual_preferred_stack_boundary_rtx)
1426 new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT);
1427 offset = 0;
1429 else
1430 return NULL_RTX;
1432 *poffset = offset;
1433 return new_rtx;
1436 /* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1437 Instantiate any virtual registers present inside of *LOC. The expression
1438 is simplified, as much as possible, but is not to be considered "valid"
1439 in any sense implied by the target. If any change is made, set CHANGED
1440 to true. */
1442 static int
1443 instantiate_virtual_regs_in_rtx (rtx *loc, void *data)
1445 HOST_WIDE_INT offset;
1446 bool *changed = (bool *) data;
1447 rtx x, new_rtx;
1449 x = *loc;
1450 if (x == 0)
1451 return 0;
1453 switch (GET_CODE (x))
1455 case REG:
1456 new_rtx = instantiate_new_reg (x, &offset);
1457 if (new_rtx)
1459 *loc = plus_constant (new_rtx, offset);
1460 if (changed)
1461 *changed = true;
1463 return -1;
1465 case PLUS:
1466 new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
1467 if (new_rtx)
1469 new_rtx = plus_constant (new_rtx, offset);
1470 *loc = simplify_gen_binary (PLUS, GET_MODE (x), new_rtx, XEXP (x, 1));
1471 if (changed)
1472 *changed = true;
1473 return -1;
1476 /* FIXME -- from old code */
1477 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1478 we can commute the PLUS and SUBREG because pointers into the
1479 frame are well-behaved. */
1480 break;
1482 default:
1483 break;
1486 return 0;
1489 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1490 matches the predicate for insn CODE operand OPERAND. */
1492 static int
1493 safe_insn_predicate (int code, int operand, rtx x)
1495 return code < 0 || insn_operand_matches ((enum insn_code) code, operand, x);
1498 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1499 registers present inside of insn. The result will be a valid insn. */
1501 static void
1502 instantiate_virtual_regs_in_insn (rtx insn)
1504 HOST_WIDE_INT offset;
1505 int insn_code, i;
1506 bool any_change = false;
1507 rtx set, new_rtx, x, seq;
1509 /* There are some special cases to be handled first. */
1510 set = single_set (insn);
1511 if (set)
1513 /* We're allowed to assign to a virtual register. This is interpreted
1514 to mean that the underlying register gets assigned the inverse
1515 transformation. This is used, for example, in the handling of
1516 non-local gotos. */
1517 new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
1518 if (new_rtx)
1520 start_sequence ();
1522 for_each_rtx (&SET_SRC (set), instantiate_virtual_regs_in_rtx, NULL);
1523 x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
1524 GEN_INT (-offset));
1525 x = force_operand (x, new_rtx);
1526 if (x != new_rtx)
1527 emit_move_insn (new_rtx, x);
1529 seq = get_insns ();
1530 end_sequence ();
1532 emit_insn_before (seq, insn);
1533 delete_insn (insn);
1534 return;
1537 /* Handle a straight copy from a virtual register by generating a
1538 new add insn. The difference between this and falling through
1539 to the generic case is avoiding a new pseudo and eliminating a
1540 move insn in the initial rtl stream. */
1541 new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
1542 if (new_rtx && offset != 0
1543 && REG_P (SET_DEST (set))
1544 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1546 start_sequence ();
1548 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS,
1549 new_rtx, GEN_INT (offset), SET_DEST (set),
1550 1, OPTAB_LIB_WIDEN);
1551 if (x != SET_DEST (set))
1552 emit_move_insn (SET_DEST (set), x);
1554 seq = get_insns ();
1555 end_sequence ();
1557 emit_insn_before (seq, insn);
1558 delete_insn (insn);
1559 return;
1562 extract_insn (insn);
1563 insn_code = INSN_CODE (insn);
1565 /* Handle a plus involving a virtual register by determining if the
1566 operands remain valid if they're modified in place. */
1567 if (GET_CODE (SET_SRC (set)) == PLUS
1568 && recog_data.n_operands >= 3
1569 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1570 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1571 && CONST_INT_P (recog_data.operand[2])
1572 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
1574 offset += INTVAL (recog_data.operand[2]);
1576 /* If the sum is zero, then replace with a plain move. */
1577 if (offset == 0
1578 && REG_P (SET_DEST (set))
1579 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1581 start_sequence ();
1582 emit_move_insn (SET_DEST (set), new_rtx);
1583 seq = get_insns ();
1584 end_sequence ();
1586 emit_insn_before (seq, insn);
1587 delete_insn (insn);
1588 return;
1591 x = gen_int_mode (offset, recog_data.operand_mode[2]);
1593 /* Using validate_change and apply_change_group here leaves
1594 recog_data in an invalid state. Since we know exactly what
1595 we want to check, do those two by hand. */
1596 if (safe_insn_predicate (insn_code, 1, new_rtx)
1597 && safe_insn_predicate (insn_code, 2, x))
1599 *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
1600 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1601 any_change = true;
1603 /* Fall through into the regular operand fixup loop in
1604 order to take care of operands other than 1 and 2. */
1608 else
1610 extract_insn (insn);
1611 insn_code = INSN_CODE (insn);
1614 /* In the general case, we expect virtual registers to appear only in
1615 operands, and then only as either bare registers or inside memories. */
1616 for (i = 0; i < recog_data.n_operands; ++i)
1618 x = recog_data.operand[i];
1619 switch (GET_CODE (x))
1621 case MEM:
1623 rtx addr = XEXP (x, 0);
1624 bool changed = false;
1626 for_each_rtx (&addr, instantiate_virtual_regs_in_rtx, &changed);
1627 if (!changed)
1628 continue;
1630 start_sequence ();
1631 x = replace_equiv_address (x, addr);
1632 /* It may happen that the address with the virtual reg
1633 was valid (e.g. based on the virtual stack reg, which might
1634 be acceptable to the predicates with all offsets), whereas
1635 the address now isn't anymore, for instance when the address
1636 is still offsetted, but the base reg isn't virtual-stack-reg
1637 anymore. Below we would do a force_reg on the whole operand,
1638 but this insn might actually only accept memory. Hence,
1639 before doing that last resort, try to reload the address into
1640 a register, so this operand stays a MEM. */
1641 if (!safe_insn_predicate (insn_code, i, x))
1643 addr = force_reg (GET_MODE (addr), addr);
1644 x = replace_equiv_address (x, addr);
1646 seq = get_insns ();
1647 end_sequence ();
1648 if (seq)
1649 emit_insn_before (seq, insn);
1651 break;
1653 case REG:
1654 new_rtx = instantiate_new_reg (x, &offset);
1655 if (new_rtx == NULL)
1656 continue;
1657 if (offset == 0)
1658 x = new_rtx;
1659 else
1661 start_sequence ();
1663 /* Careful, special mode predicates may have stuff in
1664 insn_data[insn_code].operand[i].mode that isn't useful
1665 to us for computing a new value. */
1666 /* ??? Recognize address_operand and/or "p" constraints
1667 to see if (plus new offset) is a valid before we put
1668 this through expand_simple_binop. */
1669 x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
1670 GEN_INT (offset), NULL_RTX,
1671 1, OPTAB_LIB_WIDEN);
1672 seq = get_insns ();
1673 end_sequence ();
1674 emit_insn_before (seq, insn);
1676 break;
1678 case SUBREG:
1679 new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
1680 if (new_rtx == NULL)
1681 continue;
1682 if (offset != 0)
1684 start_sequence ();
1685 new_rtx = expand_simple_binop (GET_MODE (new_rtx), PLUS, new_rtx,
1686 GEN_INT (offset), NULL_RTX,
1687 1, OPTAB_LIB_WIDEN);
1688 seq = get_insns ();
1689 end_sequence ();
1690 emit_insn_before (seq, insn);
1692 x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx,
1693 GET_MODE (new_rtx), SUBREG_BYTE (x));
1694 gcc_assert (x);
1695 break;
1697 default:
1698 continue;
1701 /* At this point, X contains the new value for the operand.
1702 Validate the new value vs the insn predicate. Note that
1703 asm insns will have insn_code -1 here. */
1704 if (!safe_insn_predicate (insn_code, i, x))
1706 start_sequence ();
1707 if (REG_P (x))
1709 gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
1710 x = copy_to_reg (x);
1712 else
1713 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1714 seq = get_insns ();
1715 end_sequence ();
1716 if (seq)
1717 emit_insn_before (seq, insn);
1720 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1721 any_change = true;
1724 if (any_change)
1726 /* Propagate operand changes into the duplicates. */
1727 for (i = 0; i < recog_data.n_dups; ++i)
1728 *recog_data.dup_loc[i]
1729 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
1731 /* Force re-recognition of the instruction for validation. */
1732 INSN_CODE (insn) = -1;
1735 if (asm_noperands (PATTERN (insn)) >= 0)
1737 if (!check_asm_operands (PATTERN (insn)))
1739 error_for_asm (insn, "impossible constraint in %<asm%>");
1740 delete_insn (insn);
1743 else
1745 if (recog_memoized (insn) < 0)
1746 fatal_insn_not_found (insn);
1750 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1751 do any instantiation required. */
1753 void
1754 instantiate_decl_rtl (rtx x)
1756 rtx addr;
1758 if (x == 0)
1759 return;
1761 /* If this is a CONCAT, recurse for the pieces. */
1762 if (GET_CODE (x) == CONCAT)
1764 instantiate_decl_rtl (XEXP (x, 0));
1765 instantiate_decl_rtl (XEXP (x, 1));
1766 return;
1769 /* If this is not a MEM, no need to do anything. Similarly if the
1770 address is a constant or a register that is not a virtual register. */
1771 if (!MEM_P (x))
1772 return;
1774 addr = XEXP (x, 0);
1775 if (CONSTANT_P (addr)
1776 || (REG_P (addr)
1777 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1778 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1779 return;
1781 for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL);
1784 /* Helper for instantiate_decls called via walk_tree: Process all decls
1785 in the given DECL_VALUE_EXPR. */
1787 static tree
1788 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1790 tree t = *tp;
1791 if (! EXPR_P (t))
1793 *walk_subtrees = 0;
1794 if (DECL_P (t))
1796 if (DECL_RTL_SET_P (t))
1797 instantiate_decl_rtl (DECL_RTL (t));
1798 if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t)
1799 && DECL_INCOMING_RTL (t))
1800 instantiate_decl_rtl (DECL_INCOMING_RTL (t));
1801 if ((TREE_CODE (t) == VAR_DECL
1802 || TREE_CODE (t) == RESULT_DECL)
1803 && DECL_HAS_VALUE_EXPR_P (t))
1805 tree v = DECL_VALUE_EXPR (t);
1806 walk_tree (&v, instantiate_expr, NULL, NULL);
1810 return NULL;
1813 /* Subroutine of instantiate_decls: Process all decls in the given
1814 BLOCK node and all its subblocks. */
1816 static void
1817 instantiate_decls_1 (tree let)
1819 tree t;
1821 for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t))
1823 if (DECL_RTL_SET_P (t))
1824 instantiate_decl_rtl (DECL_RTL (t));
1825 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1827 tree v = DECL_VALUE_EXPR (t);
1828 walk_tree (&v, instantiate_expr, NULL, NULL);
1832 /* Process all subblocks. */
1833 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1834 instantiate_decls_1 (t);
1837 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1838 all virtual registers in their DECL_RTL's. */
1840 static void
1841 instantiate_decls (tree fndecl)
1843 tree decl;
1844 unsigned ix;
1846 /* Process all parameters of the function. */
1847 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl))
1849 instantiate_decl_rtl (DECL_RTL (decl));
1850 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
1851 if (DECL_HAS_VALUE_EXPR_P (decl))
1853 tree v = DECL_VALUE_EXPR (decl);
1854 walk_tree (&v, instantiate_expr, NULL, NULL);
1858 if ((decl = DECL_RESULT (fndecl))
1859 && TREE_CODE (decl) == RESULT_DECL)
1861 if (DECL_RTL_SET_P (decl))
1862 instantiate_decl_rtl (DECL_RTL (decl));
1863 if (DECL_HAS_VALUE_EXPR_P (decl))
1865 tree v = DECL_VALUE_EXPR (decl);
1866 walk_tree (&v, instantiate_expr, NULL, NULL);
1870 /* Now process all variables defined in the function or its subblocks. */
1871 instantiate_decls_1 (DECL_INITIAL (fndecl));
1873 FOR_EACH_LOCAL_DECL (cfun, ix, decl)
1874 if (DECL_RTL_SET_P (decl))
1875 instantiate_decl_rtl (DECL_RTL (decl));
1876 VEC_free (tree, gc, cfun->local_decls);
1879 /* Pass through the INSNS of function FNDECL and convert virtual register
1880 references to hard register references. */
1882 static unsigned int
1883 instantiate_virtual_regs (void)
1885 rtx insn;
1887 /* Compute the offsets to use for this function. */
1888 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1889 var_offset = STARTING_FRAME_OFFSET;
1890 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1891 out_arg_offset = STACK_POINTER_OFFSET;
1892 #ifdef FRAME_POINTER_CFA_OFFSET
1893 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1894 #else
1895 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1896 #endif
1898 /* Initialize recognition, indicating that volatile is OK. */
1899 init_recog ();
1901 /* Scan through all the insns, instantiating every virtual register still
1902 present. */
1903 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1904 if (INSN_P (insn))
1906 /* These patterns in the instruction stream can never be recognized.
1907 Fortunately, they shouldn't contain virtual registers either. */
1908 if (GET_CODE (PATTERN (insn)) == USE
1909 || GET_CODE (PATTERN (insn)) == CLOBBER
1910 || GET_CODE (PATTERN (insn)) == ADDR_VEC
1911 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
1912 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1913 continue;
1914 else if (DEBUG_INSN_P (insn))
1915 for_each_rtx (&INSN_VAR_LOCATION (insn),
1916 instantiate_virtual_regs_in_rtx, NULL);
1917 else
1918 instantiate_virtual_regs_in_insn (insn);
1920 if (INSN_DELETED_P (insn))
1921 continue;
1923 for_each_rtx (&REG_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL);
1925 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1926 if (CALL_P (insn))
1927 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn),
1928 instantiate_virtual_regs_in_rtx, NULL);
1931 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1932 instantiate_decls (current_function_decl);
1934 targetm.instantiate_decls ();
1936 /* Indicate that, from now on, assign_stack_local should use
1937 frame_pointer_rtx. */
1938 virtuals_instantiated = 1;
1940 return 0;
1943 struct rtl_opt_pass pass_instantiate_virtual_regs =
1946 RTL_PASS,
1947 "vregs", /* name */
1948 NULL, /* gate */
1949 instantiate_virtual_regs, /* execute */
1950 NULL, /* sub */
1951 NULL, /* next */
1952 0, /* static_pass_number */
1953 TV_NONE, /* tv_id */
1954 0, /* properties_required */
1955 0, /* properties_provided */
1956 0, /* properties_destroyed */
1957 0, /* todo_flags_start */
1958 0 /* todo_flags_finish */
1963 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1964 This means a type for which function calls must pass an address to the
1965 function or get an address back from the function.
1966 EXP may be a type node or an expression (whose type is tested). */
1969 aggregate_value_p (const_tree exp, const_tree fntype)
1971 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
1972 int i, regno, nregs;
1973 rtx reg;
1975 if (fntype)
1976 switch (TREE_CODE (fntype))
1978 case CALL_EXPR:
1980 tree fndecl = get_callee_fndecl (fntype);
1981 fntype = (fndecl
1982 ? TREE_TYPE (fndecl)
1983 : TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype))));
1985 break;
1986 case FUNCTION_DECL:
1987 fntype = TREE_TYPE (fntype);
1988 break;
1989 case FUNCTION_TYPE:
1990 case METHOD_TYPE:
1991 break;
1992 case IDENTIFIER_NODE:
1993 fntype = NULL_TREE;
1994 break;
1995 default:
1996 /* We don't expect other tree types here. */
1997 gcc_unreachable ();
2000 if (VOID_TYPE_P (type))
2001 return 0;
2003 /* If a record should be passed the same as its first (and only) member
2004 don't pass it as an aggregate. */
2005 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2006 return aggregate_value_p (first_field (type), fntype);
2008 /* If the front end has decided that this needs to be passed by
2009 reference, do so. */
2010 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
2011 && DECL_BY_REFERENCE (exp))
2012 return 1;
2014 /* Function types that are TREE_ADDRESSABLE force return in memory. */
2015 if (fntype && TREE_ADDRESSABLE (fntype))
2016 return 1;
2018 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
2019 and thus can't be returned in registers. */
2020 if (TREE_ADDRESSABLE (type))
2021 return 1;
2023 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
2024 return 1;
2026 if (targetm.calls.return_in_memory (type, fntype))
2027 return 1;
2029 /* Make sure we have suitable call-clobbered regs to return
2030 the value in; if not, we must return it in memory. */
2031 reg = hard_function_value (type, 0, fntype, 0);
2033 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2034 it is OK. */
2035 if (!REG_P (reg))
2036 return 0;
2038 regno = REGNO (reg);
2039 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
2040 for (i = 0; i < nregs; i++)
2041 if (! call_used_regs[regno + i])
2042 return 1;
2044 return 0;
2047 /* Return true if we should assign DECL a pseudo register; false if it
2048 should live on the local stack. */
2050 bool
2051 use_register_for_decl (const_tree decl)
2053 if (!targetm.calls.allocate_stack_slots_for_args())
2054 return true;
2056 /* Honor volatile. */
2057 if (TREE_SIDE_EFFECTS (decl))
2058 return false;
2060 /* Honor addressability. */
2061 if (TREE_ADDRESSABLE (decl))
2062 return false;
2064 /* Only register-like things go in registers. */
2065 if (DECL_MODE (decl) == BLKmode)
2066 return false;
2068 /* If -ffloat-store specified, don't put explicit float variables
2069 into registers. */
2070 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2071 propagates values across these stores, and it probably shouldn't. */
2072 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
2073 return false;
2075 /* If we're not interested in tracking debugging information for
2076 this decl, then we can certainly put it in a register. */
2077 if (DECL_IGNORED_P (decl))
2078 return true;
2080 if (optimize)
2081 return true;
2083 if (!DECL_REGISTER (decl))
2084 return false;
2086 switch (TREE_CODE (TREE_TYPE (decl)))
2088 case RECORD_TYPE:
2089 case UNION_TYPE:
2090 case QUAL_UNION_TYPE:
2091 /* When not optimizing, disregard register keyword for variables with
2092 types containing methods, otherwise the methods won't be callable
2093 from the debugger. */
2094 if (TYPE_METHODS (TREE_TYPE (decl)))
2095 return false;
2096 break;
2097 default:
2098 break;
2101 return true;
2104 /* Return true if TYPE should be passed by invisible reference. */
2106 bool
2107 pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
2108 tree type, bool named_arg)
2110 if (type)
2112 /* If this type contains non-trivial constructors, then it is
2113 forbidden for the middle-end to create any new copies. */
2114 if (TREE_ADDRESSABLE (type))
2115 return true;
2117 /* GCC post 3.4 passes *all* variable sized types by reference. */
2118 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2119 return true;
2121 /* If a record type should be passed the same as its first (and only)
2122 member, use the type and mode of that member. */
2123 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2125 type = TREE_TYPE (first_field (type));
2126 mode = TYPE_MODE (type);
2130 return targetm.calls.pass_by_reference (pack_cumulative_args (ca), mode,
2131 type, named_arg);
2134 /* Return true if TYPE, which is passed by reference, should be callee
2135 copied instead of caller copied. */
2137 bool
2138 reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
2139 tree type, bool named_arg)
2141 if (type && TREE_ADDRESSABLE (type))
2142 return false;
2143 return targetm.calls.callee_copies (pack_cumulative_args (ca), mode, type,
2144 named_arg);
2147 /* Structures to communicate between the subroutines of assign_parms.
2148 The first holds data persistent across all parameters, the second
2149 is cleared out for each parameter. */
2151 struct assign_parm_data_all
2153 /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2154 should become a job of the target or otherwise encapsulated. */
2155 CUMULATIVE_ARGS args_so_far_v;
2156 cumulative_args_t args_so_far;
2157 struct args_size stack_args_size;
2158 tree function_result_decl;
2159 tree orig_fnargs;
2160 rtx first_conversion_insn;
2161 rtx last_conversion_insn;
2162 HOST_WIDE_INT pretend_args_size;
2163 HOST_WIDE_INT extra_pretend_bytes;
2164 int reg_parm_stack_space;
2167 struct assign_parm_data_one
2169 tree nominal_type;
2170 tree passed_type;
2171 rtx entry_parm;
2172 rtx stack_parm;
2173 enum machine_mode nominal_mode;
2174 enum machine_mode passed_mode;
2175 enum machine_mode promoted_mode;
2176 struct locate_and_pad_arg_data locate;
2177 int partial;
2178 BOOL_BITFIELD named_arg : 1;
2179 BOOL_BITFIELD passed_pointer : 1;
2180 BOOL_BITFIELD on_stack : 1;
2181 BOOL_BITFIELD loaded_in_reg : 1;
2184 /* A subroutine of assign_parms. Initialize ALL. */
2186 static void
2187 assign_parms_initialize_all (struct assign_parm_data_all *all)
2189 tree fntype ATTRIBUTE_UNUSED;
2191 memset (all, 0, sizeof (*all));
2193 fntype = TREE_TYPE (current_function_decl);
2195 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2196 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far_v, fntype, NULL_RTX);
2197 #else
2198 INIT_CUMULATIVE_ARGS (all->args_so_far_v, fntype, NULL_RTX,
2199 current_function_decl, -1);
2200 #endif
2201 all->args_so_far = pack_cumulative_args (&all->args_so_far_v);
2203 #ifdef REG_PARM_STACK_SPACE
2204 all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
2205 #endif
2208 /* If ARGS contains entries with complex types, split the entry into two
2209 entries of the component type. Return a new list of substitutions are
2210 needed, else the old list. */
2212 static void
2213 split_complex_args (VEC(tree, heap) **args)
2215 unsigned i;
2216 tree p;
2218 FOR_EACH_VEC_ELT (tree, *args, i, p)
2220 tree type = TREE_TYPE (p);
2221 if (TREE_CODE (type) == COMPLEX_TYPE
2222 && targetm.calls.split_complex_arg (type))
2224 tree decl;
2225 tree subtype = TREE_TYPE (type);
2226 bool addressable = TREE_ADDRESSABLE (p);
2228 /* Rewrite the PARM_DECL's type with its component. */
2229 p = copy_node (p);
2230 TREE_TYPE (p) = subtype;
2231 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2232 DECL_MODE (p) = VOIDmode;
2233 DECL_SIZE (p) = NULL;
2234 DECL_SIZE_UNIT (p) = NULL;
2235 /* If this arg must go in memory, put it in a pseudo here.
2236 We can't allow it to go in memory as per normal parms,
2237 because the usual place might not have the imag part
2238 adjacent to the real part. */
2239 DECL_ARTIFICIAL (p) = addressable;
2240 DECL_IGNORED_P (p) = addressable;
2241 TREE_ADDRESSABLE (p) = 0;
2242 layout_decl (p, 0);
2243 VEC_replace (tree, *args, i, p);
2245 /* Build a second synthetic decl. */
2246 decl = build_decl (EXPR_LOCATION (p),
2247 PARM_DECL, NULL_TREE, subtype);
2248 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2249 DECL_ARTIFICIAL (decl) = addressable;
2250 DECL_IGNORED_P (decl) = addressable;
2251 layout_decl (decl, 0);
2252 VEC_safe_insert (tree, heap, *args, ++i, decl);
2257 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2258 the hidden struct return argument, and (abi willing) complex args.
2259 Return the new parameter list. */
2261 static VEC(tree, heap) *
2262 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2264 tree fndecl = current_function_decl;
2265 tree fntype = TREE_TYPE (fndecl);
2266 VEC(tree, heap) *fnargs = NULL;
2267 tree arg;
2269 for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg))
2270 VEC_safe_push (tree, heap, fnargs, arg);
2272 all->orig_fnargs = DECL_ARGUMENTS (fndecl);
2274 /* If struct value address is treated as the first argument, make it so. */
2275 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2276 && ! cfun->returns_pcc_struct
2277 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2279 tree type = build_pointer_type (TREE_TYPE (fntype));
2280 tree decl;
2282 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2283 PARM_DECL, get_identifier (".result_ptr"), type);
2284 DECL_ARG_TYPE (decl) = type;
2285 DECL_ARTIFICIAL (decl) = 1;
2286 DECL_NAMELESS (decl) = 1;
2287 TREE_CONSTANT (decl) = 1;
2289 DECL_CHAIN (decl) = all->orig_fnargs;
2290 all->orig_fnargs = decl;
2291 VEC_safe_insert (tree, heap, fnargs, 0, decl);
2293 all->function_result_decl = decl;
2296 /* If the target wants to split complex arguments into scalars, do so. */
2297 if (targetm.calls.split_complex_arg)
2298 split_complex_args (&fnargs);
2300 return fnargs;
2303 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2304 data for the parameter. Incorporate ABI specifics such as pass-by-
2305 reference and type promotion. */
2307 static void
2308 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2309 struct assign_parm_data_one *data)
2311 tree nominal_type, passed_type;
2312 enum machine_mode nominal_mode, passed_mode, promoted_mode;
2313 int unsignedp;
2315 memset (data, 0, sizeof (*data));
2317 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2318 if (!cfun->stdarg)
2319 data->named_arg = 1; /* No variadic parms. */
2320 else if (DECL_CHAIN (parm))
2321 data->named_arg = 1; /* Not the last non-variadic parm. */
2322 else if (targetm.calls.strict_argument_naming (all->args_so_far))
2323 data->named_arg = 1; /* Only variadic ones are unnamed. */
2324 else
2325 data->named_arg = 0; /* Treat as variadic. */
2327 nominal_type = TREE_TYPE (parm);
2328 passed_type = DECL_ARG_TYPE (parm);
2330 /* Look out for errors propagating this far. Also, if the parameter's
2331 type is void then its value doesn't matter. */
2332 if (TREE_TYPE (parm) == error_mark_node
2333 /* This can happen after weird syntax errors
2334 or if an enum type is defined among the parms. */
2335 || TREE_CODE (parm) != PARM_DECL
2336 || passed_type == NULL
2337 || VOID_TYPE_P (nominal_type))
2339 nominal_type = passed_type = void_type_node;
2340 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2341 goto egress;
2344 /* Find mode of arg as it is passed, and mode of arg as it should be
2345 during execution of this function. */
2346 passed_mode = TYPE_MODE (passed_type);
2347 nominal_mode = TYPE_MODE (nominal_type);
2349 /* If the parm is to be passed as a transparent union or record, use the
2350 type of the first field for the tests below. We have already verified
2351 that the modes are the same. */
2352 if ((TREE_CODE (passed_type) == UNION_TYPE
2353 || TREE_CODE (passed_type) == RECORD_TYPE)
2354 && TYPE_TRANSPARENT_AGGR (passed_type))
2355 passed_type = TREE_TYPE (first_field (passed_type));
2357 /* See if this arg was passed by invisible reference. */
2358 if (pass_by_reference (&all->args_so_far_v, passed_mode,
2359 passed_type, data->named_arg))
2361 passed_type = nominal_type = build_pointer_type (passed_type);
2362 data->passed_pointer = true;
2363 passed_mode = nominal_mode = Pmode;
2366 /* Find mode as it is passed by the ABI. */
2367 unsignedp = TYPE_UNSIGNED (passed_type);
2368 promoted_mode = promote_function_mode (passed_type, passed_mode, &unsignedp,
2369 TREE_TYPE (current_function_decl), 0);
2371 egress:
2372 data->nominal_type = nominal_type;
2373 data->passed_type = passed_type;
2374 data->nominal_mode = nominal_mode;
2375 data->passed_mode = passed_mode;
2376 data->promoted_mode = promoted_mode;
2379 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2381 static void
2382 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2383 struct assign_parm_data_one *data, bool no_rtl)
2385 int varargs_pretend_bytes = 0;
2387 targetm.calls.setup_incoming_varargs (all->args_so_far,
2388 data->promoted_mode,
2389 data->passed_type,
2390 &varargs_pretend_bytes, no_rtl);
2392 /* If the back-end has requested extra stack space, record how much is
2393 needed. Do not change pretend_args_size otherwise since it may be
2394 nonzero from an earlier partial argument. */
2395 if (varargs_pretend_bytes > 0)
2396 all->pretend_args_size = varargs_pretend_bytes;
2399 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2400 the incoming location of the current parameter. */
2402 static void
2403 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2404 struct assign_parm_data_one *data)
2406 HOST_WIDE_INT pretend_bytes = 0;
2407 rtx entry_parm;
2408 bool in_regs;
2410 if (data->promoted_mode == VOIDmode)
2412 data->entry_parm = data->stack_parm = const0_rtx;
2413 return;
2416 entry_parm = targetm.calls.function_incoming_arg (all->args_so_far,
2417 data->promoted_mode,
2418 data->passed_type,
2419 data->named_arg);
2421 if (entry_parm == 0)
2422 data->promoted_mode = data->passed_mode;
2424 /* Determine parm's home in the stack, in case it arrives in the stack
2425 or we should pretend it did. Compute the stack position and rtx where
2426 the argument arrives and its size.
2428 There is one complexity here: If this was a parameter that would
2429 have been passed in registers, but wasn't only because it is
2430 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2431 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2432 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2433 as it was the previous time. */
2434 in_regs = entry_parm != 0;
2435 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2436 in_regs = true;
2437 #endif
2438 if (!in_regs && !data->named_arg)
2440 if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far))
2442 rtx tem;
2443 tem = targetm.calls.function_incoming_arg (all->args_so_far,
2444 data->promoted_mode,
2445 data->passed_type, true);
2446 in_regs = tem != NULL;
2450 /* If this parameter was passed both in registers and in the stack, use
2451 the copy on the stack. */
2452 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2453 data->passed_type))
2454 entry_parm = 0;
2456 if (entry_parm)
2458 int partial;
2460 partial = targetm.calls.arg_partial_bytes (all->args_so_far,
2461 data->promoted_mode,
2462 data->passed_type,
2463 data->named_arg);
2464 data->partial = partial;
2466 /* The caller might already have allocated stack space for the
2467 register parameters. */
2468 if (partial != 0 && all->reg_parm_stack_space == 0)
2470 /* Part of this argument is passed in registers and part
2471 is passed on the stack. Ask the prologue code to extend
2472 the stack part so that we can recreate the full value.
2474 PRETEND_BYTES is the size of the registers we need to store.
2475 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2476 stack space that the prologue should allocate.
2478 Internally, gcc assumes that the argument pointer is aligned
2479 to STACK_BOUNDARY bits. This is used both for alignment
2480 optimizations (see init_emit) and to locate arguments that are
2481 aligned to more than PARM_BOUNDARY bits. We must preserve this
2482 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2483 a stack boundary. */
2485 /* We assume at most one partial arg, and it must be the first
2486 argument on the stack. */
2487 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2489 pretend_bytes = partial;
2490 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2492 /* We want to align relative to the actual stack pointer, so
2493 don't include this in the stack size until later. */
2494 all->extra_pretend_bytes = all->pretend_args_size;
2498 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2499 entry_parm ? data->partial : 0, current_function_decl,
2500 &all->stack_args_size, &data->locate);
2502 /* Update parm_stack_boundary if this parameter is passed in the
2503 stack. */
2504 if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
2505 crtl->parm_stack_boundary = data->locate.boundary;
2507 /* Adjust offsets to include the pretend args. */
2508 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2509 data->locate.slot_offset.constant += pretend_bytes;
2510 data->locate.offset.constant += pretend_bytes;
2512 data->entry_parm = entry_parm;
2515 /* A subroutine of assign_parms. If there is actually space on the stack
2516 for this parm, count it in stack_args_size and return true. */
2518 static bool
2519 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2520 struct assign_parm_data_one *data)
2522 /* Trivially true if we've no incoming register. */
2523 if (data->entry_parm == NULL)
2525 /* Also true if we're partially in registers and partially not,
2526 since we've arranged to drop the entire argument on the stack. */
2527 else if (data->partial != 0)
2529 /* Also true if the target says that it's passed in both registers
2530 and on the stack. */
2531 else if (GET_CODE (data->entry_parm) == PARALLEL
2532 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2534 /* Also true if the target says that there's stack allocated for
2535 all register parameters. */
2536 else if (all->reg_parm_stack_space > 0)
2538 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2539 else
2540 return false;
2542 all->stack_args_size.constant += data->locate.size.constant;
2543 if (data->locate.size.var)
2544 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2546 return true;
2549 /* A subroutine of assign_parms. Given that this parameter is allocated
2550 stack space by the ABI, find it. */
2552 static void
2553 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2555 rtx offset_rtx, stack_parm;
2556 unsigned int align, boundary;
2558 /* If we're passing this arg using a reg, make its stack home the
2559 aligned stack slot. */
2560 if (data->entry_parm)
2561 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2562 else
2563 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2565 stack_parm = crtl->args.internal_arg_pointer;
2566 if (offset_rtx != const0_rtx)
2567 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2568 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2570 if (!data->passed_pointer)
2572 set_mem_attributes (stack_parm, parm, 1);
2573 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2574 while promoted mode's size is needed. */
2575 if (data->promoted_mode != BLKmode
2576 && data->promoted_mode != DECL_MODE (parm))
2578 set_mem_size (stack_parm, GET_MODE_SIZE (data->promoted_mode));
2579 if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm))
2581 int offset = subreg_lowpart_offset (DECL_MODE (parm),
2582 data->promoted_mode);
2583 if (offset)
2584 set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset);
2589 boundary = data->locate.boundary;
2590 align = BITS_PER_UNIT;
2592 /* If we're padding upward, we know that the alignment of the slot
2593 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2594 intentionally forcing upward padding. Otherwise we have to come
2595 up with a guess at the alignment based on OFFSET_RTX. */
2596 if (data->locate.where_pad != downward || data->entry_parm)
2597 align = boundary;
2598 else if (CONST_INT_P (offset_rtx))
2600 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2601 align = align & -align;
2603 set_mem_align (stack_parm, align);
2605 if (data->entry_parm)
2606 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2608 data->stack_parm = stack_parm;
2611 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2612 always valid and contiguous. */
2614 static void
2615 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2617 rtx entry_parm = data->entry_parm;
2618 rtx stack_parm = data->stack_parm;
2620 /* If this parm was passed part in regs and part in memory, pretend it
2621 arrived entirely in memory by pushing the register-part onto the stack.
2622 In the special case of a DImode or DFmode that is split, we could put
2623 it together in a pseudoreg directly, but for now that's not worth
2624 bothering with. */
2625 if (data->partial != 0)
2627 /* Handle calls that pass values in multiple non-contiguous
2628 locations. The Irix 6 ABI has examples of this. */
2629 if (GET_CODE (entry_parm) == PARALLEL)
2630 emit_group_store (validize_mem (stack_parm), entry_parm,
2631 data->passed_type,
2632 int_size_in_bytes (data->passed_type));
2633 else
2635 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2636 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2637 data->partial / UNITS_PER_WORD);
2640 entry_parm = stack_parm;
2643 /* If we didn't decide this parm came in a register, by default it came
2644 on the stack. */
2645 else if (entry_parm == NULL)
2646 entry_parm = stack_parm;
2648 /* When an argument is passed in multiple locations, we can't make use
2649 of this information, but we can save some copying if the whole argument
2650 is passed in a single register. */
2651 else if (GET_CODE (entry_parm) == PARALLEL
2652 && data->nominal_mode != BLKmode
2653 && data->passed_mode != BLKmode)
2655 size_t i, len = XVECLEN (entry_parm, 0);
2657 for (i = 0; i < len; i++)
2658 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2659 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2660 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2661 == data->passed_mode)
2662 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2664 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2665 break;
2669 data->entry_parm = entry_parm;
2672 /* A subroutine of assign_parms. Reconstitute any values which were
2673 passed in multiple registers and would fit in a single register. */
2675 static void
2676 assign_parm_remove_parallels (struct assign_parm_data_one *data)
2678 rtx entry_parm = data->entry_parm;
2680 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2681 This can be done with register operations rather than on the
2682 stack, even if we will store the reconstituted parameter on the
2683 stack later. */
2684 if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
2686 rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
2687 emit_group_store (parmreg, entry_parm, data->passed_type,
2688 GET_MODE_SIZE (GET_MODE (entry_parm)));
2689 entry_parm = parmreg;
2692 data->entry_parm = entry_parm;
2695 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2696 always valid and properly aligned. */
2698 static void
2699 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2701 rtx stack_parm = data->stack_parm;
2703 /* If we can't trust the parm stack slot to be aligned enough for its
2704 ultimate type, don't use that slot after entry. We'll make another
2705 stack slot, if we need one. */
2706 if (stack_parm
2707 && ((STRICT_ALIGNMENT
2708 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2709 || (data->nominal_type
2710 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2711 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2712 stack_parm = NULL;
2714 /* If parm was passed in memory, and we need to convert it on entry,
2715 don't store it back in that same slot. */
2716 else if (data->entry_parm == stack_parm
2717 && data->nominal_mode != BLKmode
2718 && data->nominal_mode != data->passed_mode)
2719 stack_parm = NULL;
2721 /* If stack protection is in effect for this function, don't leave any
2722 pointers in their passed stack slots. */
2723 else if (crtl->stack_protect_guard
2724 && (flag_stack_protect == 2
2725 || data->passed_pointer
2726 || POINTER_TYPE_P (data->nominal_type)))
2727 stack_parm = NULL;
2729 data->stack_parm = stack_parm;
2732 /* A subroutine of assign_parms. Return true if the current parameter
2733 should be stored as a BLKmode in the current frame. */
2735 static bool
2736 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2738 if (data->nominal_mode == BLKmode)
2739 return true;
2740 if (GET_MODE (data->entry_parm) == BLKmode)
2741 return true;
2743 #ifdef BLOCK_REG_PADDING
2744 /* Only assign_parm_setup_block knows how to deal with register arguments
2745 that are padded at the least significant end. */
2746 if (REG_P (data->entry_parm)
2747 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2748 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2749 == (BYTES_BIG_ENDIAN ? upward : downward)))
2750 return true;
2751 #endif
2753 return false;
2756 /* A subroutine of assign_parms. Arrange for the parameter to be
2757 present and valid in DATA->STACK_RTL. */
2759 static void
2760 assign_parm_setup_block (struct assign_parm_data_all *all,
2761 tree parm, struct assign_parm_data_one *data)
2763 rtx entry_parm = data->entry_parm;
2764 rtx stack_parm = data->stack_parm;
2765 HOST_WIDE_INT size;
2766 HOST_WIDE_INT size_stored;
2768 if (GET_CODE (entry_parm) == PARALLEL)
2769 entry_parm = emit_group_move_into_temps (entry_parm);
2771 size = int_size_in_bytes (data->passed_type);
2772 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2773 if (stack_parm == 0)
2775 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
2776 stack_parm = assign_stack_local (BLKmode, size_stored,
2777 DECL_ALIGN (parm));
2778 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2779 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2780 set_mem_attributes (stack_parm, parm, 1);
2783 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2784 calls that pass values in multiple non-contiguous locations. */
2785 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2787 rtx mem;
2789 /* Note that we will be storing an integral number of words.
2790 So we have to be careful to ensure that we allocate an
2791 integral number of words. We do this above when we call
2792 assign_stack_local if space was not allocated in the argument
2793 list. If it was, this will not work if PARM_BOUNDARY is not
2794 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2795 if it becomes a problem. Exception is when BLKmode arrives
2796 with arguments not conforming to word_mode. */
2798 if (data->stack_parm == 0)
2800 else if (GET_CODE (entry_parm) == PARALLEL)
2802 else
2803 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2805 mem = validize_mem (stack_parm);
2807 /* Handle values in multiple non-contiguous locations. */
2808 if (GET_CODE (entry_parm) == PARALLEL)
2810 push_to_sequence2 (all->first_conversion_insn,
2811 all->last_conversion_insn);
2812 emit_group_store (mem, entry_parm, data->passed_type, size);
2813 all->first_conversion_insn = get_insns ();
2814 all->last_conversion_insn = get_last_insn ();
2815 end_sequence ();
2818 else if (size == 0)
2821 /* If SIZE is that of a mode no bigger than a word, just use
2822 that mode's store operation. */
2823 else if (size <= UNITS_PER_WORD)
2825 enum machine_mode mode
2826 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2828 if (mode != BLKmode
2829 #ifdef BLOCK_REG_PADDING
2830 && (size == UNITS_PER_WORD
2831 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2832 != (BYTES_BIG_ENDIAN ? upward : downward)))
2833 #endif
2836 rtx reg;
2838 /* We are really truncating a word_mode value containing
2839 SIZE bytes into a value of mode MODE. If such an
2840 operation requires no actual instructions, we can refer
2841 to the value directly in mode MODE, otherwise we must
2842 start with the register in word_mode and explicitly
2843 convert it. */
2844 if (TRULY_NOOP_TRUNCATION (size * BITS_PER_UNIT, BITS_PER_WORD))
2845 reg = gen_rtx_REG (mode, REGNO (entry_parm));
2846 else
2848 reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2849 reg = convert_to_mode (mode, copy_to_reg (reg), 1);
2851 emit_move_insn (change_address (mem, mode, 0), reg);
2854 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2855 machine must be aligned to the left before storing
2856 to memory. Note that the previous test doesn't
2857 handle all cases (e.g. SIZE == 3). */
2858 else if (size != UNITS_PER_WORD
2859 #ifdef BLOCK_REG_PADDING
2860 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2861 == downward)
2862 #else
2863 && BYTES_BIG_ENDIAN
2864 #endif
2867 rtx tem, x;
2868 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2869 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2871 x = expand_shift (LSHIFT_EXPR, word_mode, reg, by, NULL_RTX, 1);
2872 tem = change_address (mem, word_mode, 0);
2873 emit_move_insn (tem, x);
2875 else
2876 move_block_from_reg (REGNO (entry_parm), mem,
2877 size_stored / UNITS_PER_WORD);
2879 else
2880 move_block_from_reg (REGNO (entry_parm), mem,
2881 size_stored / UNITS_PER_WORD);
2883 else if (data->stack_parm == 0)
2885 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2886 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2887 BLOCK_OP_NORMAL);
2888 all->first_conversion_insn = get_insns ();
2889 all->last_conversion_insn = get_last_insn ();
2890 end_sequence ();
2893 data->stack_parm = stack_parm;
2894 SET_DECL_RTL (parm, stack_parm);
2897 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2898 parameter. Get it there. Perform all ABI specified conversions. */
2900 static void
2901 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2902 struct assign_parm_data_one *data)
2904 rtx parmreg, validated_mem;
2905 rtx equiv_stack_parm;
2906 enum machine_mode promoted_nominal_mode;
2907 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2908 bool did_conversion = false;
2909 bool need_conversion, moved;
2911 /* Store the parm in a pseudoregister during the function, but we may
2912 need to do it in a wider mode. Using 2 here makes the result
2913 consistent with promote_decl_mode and thus expand_expr_real_1. */
2914 promoted_nominal_mode
2915 = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp,
2916 TREE_TYPE (current_function_decl), 2);
2918 parmreg = gen_reg_rtx (promoted_nominal_mode);
2920 if (!DECL_ARTIFICIAL (parm))
2921 mark_user_reg (parmreg);
2923 /* If this was an item that we received a pointer to,
2924 set DECL_RTL appropriately. */
2925 if (data->passed_pointer)
2927 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2928 set_mem_attributes (x, parm, 1);
2929 SET_DECL_RTL (parm, x);
2931 else
2932 SET_DECL_RTL (parm, parmreg);
2934 assign_parm_remove_parallels (data);
2936 /* Copy the value into the register, thus bridging between
2937 assign_parm_find_data_types and expand_expr_real_1. */
2939 equiv_stack_parm = data->stack_parm;
2940 validated_mem = validize_mem (data->entry_parm);
2942 need_conversion = (data->nominal_mode != data->passed_mode
2943 || promoted_nominal_mode != data->promoted_mode);
2944 moved = false;
2946 if (need_conversion
2947 && GET_MODE_CLASS (data->nominal_mode) == MODE_INT
2948 && data->nominal_mode == data->passed_mode
2949 && data->nominal_mode == GET_MODE (data->entry_parm))
2951 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2952 mode, by the caller. We now have to convert it to
2953 NOMINAL_MODE, if different. However, PARMREG may be in
2954 a different mode than NOMINAL_MODE if it is being stored
2955 promoted.
2957 If ENTRY_PARM is a hard register, it might be in a register
2958 not valid for operating in its mode (e.g., an odd-numbered
2959 register for a DFmode). In that case, moves are the only
2960 thing valid, so we can't do a convert from there. This
2961 occurs when the calling sequence allow such misaligned
2962 usages.
2964 In addition, the conversion may involve a call, which could
2965 clobber parameters which haven't been copied to pseudo
2966 registers yet.
2968 First, we try to emit an insn which performs the necessary
2969 conversion. We verify that this insn does not clobber any
2970 hard registers. */
2972 enum insn_code icode;
2973 rtx op0, op1;
2975 icode = can_extend_p (promoted_nominal_mode, data->passed_mode,
2976 unsignedp);
2978 op0 = parmreg;
2979 op1 = validated_mem;
2980 if (icode != CODE_FOR_nothing
2981 && insn_operand_matches (icode, 0, op0)
2982 && insn_operand_matches (icode, 1, op1))
2984 enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
2985 rtx insn, insns;
2986 HARD_REG_SET hardregs;
2988 start_sequence ();
2989 insn = gen_extend_insn (op0, op1, promoted_nominal_mode,
2990 data->passed_mode, unsignedp);
2991 emit_insn (insn);
2992 insns = get_insns ();
2994 moved = true;
2995 CLEAR_HARD_REG_SET (hardregs);
2996 for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
2998 if (INSN_P (insn))
2999 note_stores (PATTERN (insn), record_hard_reg_sets,
3000 &hardregs);
3001 if (!hard_reg_set_empty_p (hardregs))
3002 moved = false;
3005 end_sequence ();
3007 if (moved)
3009 emit_insn (insns);
3010 if (equiv_stack_parm != NULL_RTX)
3011 equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg),
3012 equiv_stack_parm);
3017 if (moved)
3018 /* Nothing to do. */
3020 else if (need_conversion)
3022 /* We did not have an insn to convert directly, or the sequence
3023 generated appeared unsafe. We must first copy the parm to a
3024 pseudo reg, and save the conversion until after all
3025 parameters have been moved. */
3027 int save_tree_used;
3028 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3030 emit_move_insn (tempreg, validated_mem);
3032 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3033 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
3035 if (GET_CODE (tempreg) == SUBREG
3036 && GET_MODE (tempreg) == data->nominal_mode
3037 && REG_P (SUBREG_REG (tempreg))
3038 && data->nominal_mode == data->passed_mode
3039 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
3040 && GET_MODE_SIZE (GET_MODE (tempreg))
3041 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
3043 /* The argument is already sign/zero extended, so note it
3044 into the subreg. */
3045 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
3046 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
3049 /* TREE_USED gets set erroneously during expand_assignment. */
3050 save_tree_used = TREE_USED (parm);
3051 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
3052 TREE_USED (parm) = save_tree_used;
3053 all->first_conversion_insn = get_insns ();
3054 all->last_conversion_insn = get_last_insn ();
3055 end_sequence ();
3057 did_conversion = true;
3059 else
3060 emit_move_insn (parmreg, validated_mem);
3062 /* If we were passed a pointer but the actual value can safely live
3063 in a register, put it in one. */
3064 if (data->passed_pointer
3065 && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
3066 /* If by-reference argument was promoted, demote it. */
3067 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
3068 || use_register_for_decl (parm)))
3070 /* We can't use nominal_mode, because it will have been set to
3071 Pmode above. We must use the actual mode of the parm. */
3072 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3073 mark_user_reg (parmreg);
3075 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
3077 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
3078 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
3080 push_to_sequence2 (all->first_conversion_insn,
3081 all->last_conversion_insn);
3082 emit_move_insn (tempreg, DECL_RTL (parm));
3083 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
3084 emit_move_insn (parmreg, tempreg);
3085 all->first_conversion_insn = get_insns ();
3086 all->last_conversion_insn = get_last_insn ();
3087 end_sequence ();
3089 did_conversion = true;
3091 else
3092 emit_move_insn (parmreg, DECL_RTL (parm));
3094 SET_DECL_RTL (parm, parmreg);
3096 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3097 now the parm. */
3098 data->stack_parm = NULL;
3101 /* Mark the register as eliminable if we did no conversion and it was
3102 copied from memory at a fixed offset, and the arg pointer was not
3103 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3104 offset formed an invalid address, such memory-equivalences as we
3105 make here would screw up life analysis for it. */
3106 if (data->nominal_mode == data->passed_mode
3107 && !did_conversion
3108 && data->stack_parm != 0
3109 && MEM_P (data->stack_parm)
3110 && data->locate.offset.var == 0
3111 && reg_mentioned_p (virtual_incoming_args_rtx,
3112 XEXP (data->stack_parm, 0)))
3114 rtx linsn = get_last_insn ();
3115 rtx sinsn, set;
3117 /* Mark complex types separately. */
3118 if (GET_CODE (parmreg) == CONCAT)
3120 enum machine_mode submode
3121 = GET_MODE_INNER (GET_MODE (parmreg));
3122 int regnor = REGNO (XEXP (parmreg, 0));
3123 int regnoi = REGNO (XEXP (parmreg, 1));
3124 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
3125 rtx stacki = adjust_address_nv (data->stack_parm, submode,
3126 GET_MODE_SIZE (submode));
3128 /* Scan backwards for the set of the real and
3129 imaginary parts. */
3130 for (sinsn = linsn; sinsn != 0;
3131 sinsn = prev_nonnote_insn (sinsn))
3133 set = single_set (sinsn);
3134 if (set == 0)
3135 continue;
3137 if (SET_DEST (set) == regno_reg_rtx [regnoi])
3138 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
3139 else if (SET_DEST (set) == regno_reg_rtx [regnor])
3140 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
3143 else if ((set = single_set (linsn)) != 0
3144 && SET_DEST (set) == parmreg)
3145 set_unique_reg_note (linsn, REG_EQUIV, equiv_stack_parm);
3148 /* For pointer data type, suggest pointer register. */
3149 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3150 mark_reg_pointer (parmreg,
3151 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
3154 /* A subroutine of assign_parms. Allocate stack space to hold the current
3155 parameter. Get it there. Perform all ABI specified conversions. */
3157 static void
3158 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
3159 struct assign_parm_data_one *data)
3161 /* Value must be stored in the stack slot STACK_PARM during function
3162 execution. */
3163 bool to_conversion = false;
3165 assign_parm_remove_parallels (data);
3167 if (data->promoted_mode != data->nominal_mode)
3169 /* Conversion is required. */
3170 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3172 emit_move_insn (tempreg, validize_mem (data->entry_parm));
3174 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3175 to_conversion = true;
3177 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
3178 TYPE_UNSIGNED (TREE_TYPE (parm)));
3180 if (data->stack_parm)
3182 int offset = subreg_lowpart_offset (data->nominal_mode,
3183 GET_MODE (data->stack_parm));
3184 /* ??? This may need a big-endian conversion on sparc64. */
3185 data->stack_parm
3186 = adjust_address (data->stack_parm, data->nominal_mode, 0);
3187 if (offset && MEM_OFFSET_KNOWN_P (data->stack_parm))
3188 set_mem_offset (data->stack_parm,
3189 MEM_OFFSET (data->stack_parm) + offset);
3193 if (data->entry_parm != data->stack_parm)
3195 rtx src, dest;
3197 if (data->stack_parm == 0)
3199 int align = STACK_SLOT_ALIGNMENT (data->passed_type,
3200 GET_MODE (data->entry_parm),
3201 TYPE_ALIGN (data->passed_type));
3202 data->stack_parm
3203 = assign_stack_local (GET_MODE (data->entry_parm),
3204 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
3205 align);
3206 set_mem_attributes (data->stack_parm, parm, 1);
3209 dest = validize_mem (data->stack_parm);
3210 src = validize_mem (data->entry_parm);
3212 if (MEM_P (src))
3214 /* Use a block move to handle potentially misaligned entry_parm. */
3215 if (!to_conversion)
3216 push_to_sequence2 (all->first_conversion_insn,
3217 all->last_conversion_insn);
3218 to_conversion = true;
3220 emit_block_move (dest, src,
3221 GEN_INT (int_size_in_bytes (data->passed_type)),
3222 BLOCK_OP_NORMAL);
3224 else
3225 emit_move_insn (dest, src);
3228 if (to_conversion)
3230 all->first_conversion_insn = get_insns ();
3231 all->last_conversion_insn = get_last_insn ();
3232 end_sequence ();
3235 SET_DECL_RTL (parm, data->stack_parm);
3238 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
3239 undo the frobbing that we did in assign_parms_augmented_arg_list. */
3241 static void
3242 assign_parms_unsplit_complex (struct assign_parm_data_all *all,
3243 VEC(tree, heap) *fnargs)
3245 tree parm;
3246 tree orig_fnargs = all->orig_fnargs;
3247 unsigned i = 0;
3249 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i)
3251 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
3252 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
3254 rtx tmp, real, imag;
3255 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
3257 real = DECL_RTL (VEC_index (tree, fnargs, i));
3258 imag = DECL_RTL (VEC_index (tree, fnargs, i + 1));
3259 if (inner != GET_MODE (real))
3261 real = gen_lowpart_SUBREG (inner, real);
3262 imag = gen_lowpart_SUBREG (inner, imag);
3265 if (TREE_ADDRESSABLE (parm))
3267 rtx rmem, imem;
3268 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
3269 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3270 DECL_MODE (parm),
3271 TYPE_ALIGN (TREE_TYPE (parm)));
3273 /* split_complex_arg put the real and imag parts in
3274 pseudos. Move them to memory. */
3275 tmp = assign_stack_local (DECL_MODE (parm), size, align);
3276 set_mem_attributes (tmp, parm, 1);
3277 rmem = adjust_address_nv (tmp, inner, 0);
3278 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
3279 push_to_sequence2 (all->first_conversion_insn,
3280 all->last_conversion_insn);
3281 emit_move_insn (rmem, real);
3282 emit_move_insn (imem, imag);
3283 all->first_conversion_insn = get_insns ();
3284 all->last_conversion_insn = get_last_insn ();
3285 end_sequence ();
3287 else
3288 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3289 SET_DECL_RTL (parm, tmp);
3291 real = DECL_INCOMING_RTL (VEC_index (tree, fnargs, i));
3292 imag = DECL_INCOMING_RTL (VEC_index (tree, fnargs, i + 1));
3293 if (inner != GET_MODE (real))
3295 real = gen_lowpart_SUBREG (inner, real);
3296 imag = gen_lowpart_SUBREG (inner, imag);
3298 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3299 set_decl_incoming_rtl (parm, tmp, false);
3300 i++;
3305 /* Assign RTL expressions to the function's parameters. This may involve
3306 copying them into registers and using those registers as the DECL_RTL. */
3308 static void
3309 assign_parms (tree fndecl)
3311 struct assign_parm_data_all all;
3312 tree parm;
3313 VEC(tree, heap) *fnargs;
3314 unsigned i;
3316 crtl->args.internal_arg_pointer
3317 = targetm.calls.internal_arg_pointer ();
3319 assign_parms_initialize_all (&all);
3320 fnargs = assign_parms_augmented_arg_list (&all);
3322 FOR_EACH_VEC_ELT (tree, fnargs, i, parm)
3324 struct assign_parm_data_one data;
3326 /* Extract the type of PARM; adjust it according to ABI. */
3327 assign_parm_find_data_types (&all, parm, &data);
3329 /* Early out for errors and void parameters. */
3330 if (data.passed_mode == VOIDmode)
3332 SET_DECL_RTL (parm, const0_rtx);
3333 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3334 continue;
3337 /* Estimate stack alignment from parameter alignment. */
3338 if (SUPPORTS_STACK_ALIGNMENT)
3340 unsigned int align
3341 = targetm.calls.function_arg_boundary (data.promoted_mode,
3342 data.passed_type);
3343 align = MINIMUM_ALIGNMENT (data.passed_type, data.promoted_mode,
3344 align);
3345 if (TYPE_ALIGN (data.nominal_type) > align)
3346 align = MINIMUM_ALIGNMENT (data.nominal_type,
3347 TYPE_MODE (data.nominal_type),
3348 TYPE_ALIGN (data.nominal_type));
3349 if (crtl->stack_alignment_estimated < align)
3351 gcc_assert (!crtl->stack_realign_processed);
3352 crtl->stack_alignment_estimated = align;
3356 if (cfun->stdarg && !DECL_CHAIN (parm))
3357 assign_parms_setup_varargs (&all, &data, false);
3359 /* Find out where the parameter arrives in this function. */
3360 assign_parm_find_entry_rtl (&all, &data);
3362 /* Find out where stack space for this parameter might be. */
3363 if (assign_parm_is_stack_parm (&all, &data))
3365 assign_parm_find_stack_rtl (parm, &data);
3366 assign_parm_adjust_entry_rtl (&data);
3369 /* Record permanently how this parm was passed. */
3370 if (data.passed_pointer)
3372 rtx incoming_rtl
3373 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.passed_type)),
3374 data.entry_parm);
3375 set_decl_incoming_rtl (parm, incoming_rtl, true);
3377 else
3378 set_decl_incoming_rtl (parm, data.entry_parm, false);
3380 /* Update info on where next arg arrives in registers. */
3381 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3382 data.passed_type, data.named_arg);
3384 assign_parm_adjust_stack_rtl (&data);
3386 if (assign_parm_setup_block_p (&data))
3387 assign_parm_setup_block (&all, parm, &data);
3388 else if (data.passed_pointer || use_register_for_decl (parm))
3389 assign_parm_setup_reg (&all, parm, &data);
3390 else
3391 assign_parm_setup_stack (&all, parm, &data);
3394 if (targetm.calls.split_complex_arg)
3395 assign_parms_unsplit_complex (&all, fnargs);
3397 VEC_free (tree, heap, fnargs);
3399 /* Output all parameter conversion instructions (possibly including calls)
3400 now that all parameters have been copied out of hard registers. */
3401 emit_insn (all.first_conversion_insn);
3403 /* Estimate reload stack alignment from scalar return mode. */
3404 if (SUPPORTS_STACK_ALIGNMENT)
3406 if (DECL_RESULT (fndecl))
3408 tree type = TREE_TYPE (DECL_RESULT (fndecl));
3409 enum machine_mode mode = TYPE_MODE (type);
3411 if (mode != BLKmode
3412 && mode != VOIDmode
3413 && !AGGREGATE_TYPE_P (type))
3415 unsigned int align = GET_MODE_ALIGNMENT (mode);
3416 if (crtl->stack_alignment_estimated < align)
3418 gcc_assert (!crtl->stack_realign_processed);
3419 crtl->stack_alignment_estimated = align;
3425 /* If we are receiving a struct value address as the first argument, set up
3426 the RTL for the function result. As this might require code to convert
3427 the transmitted address to Pmode, we do this here to ensure that possible
3428 preliminary conversions of the address have been emitted already. */
3429 if (all.function_result_decl)
3431 tree result = DECL_RESULT (current_function_decl);
3432 rtx addr = DECL_RTL (all.function_result_decl);
3433 rtx x;
3435 if (DECL_BY_REFERENCE (result))
3437 SET_DECL_VALUE_EXPR (result, all.function_result_decl);
3438 x = addr;
3440 else
3442 SET_DECL_VALUE_EXPR (result,
3443 build1 (INDIRECT_REF, TREE_TYPE (result),
3444 all.function_result_decl));
3445 addr = convert_memory_address (Pmode, addr);
3446 x = gen_rtx_MEM (DECL_MODE (result), addr);
3447 set_mem_attributes (x, result, 1);
3450 DECL_HAS_VALUE_EXPR_P (result) = 1;
3452 SET_DECL_RTL (result, x);
3455 /* We have aligned all the args, so add space for the pretend args. */
3456 crtl->args.pretend_args_size = all.pretend_args_size;
3457 all.stack_args_size.constant += all.extra_pretend_bytes;
3458 crtl->args.size = all.stack_args_size.constant;
3460 /* Adjust function incoming argument size for alignment and
3461 minimum length. */
3463 #ifdef REG_PARM_STACK_SPACE
3464 crtl->args.size = MAX (crtl->args.size,
3465 REG_PARM_STACK_SPACE (fndecl));
3466 #endif
3468 crtl->args.size = CEIL_ROUND (crtl->args.size,
3469 PARM_BOUNDARY / BITS_PER_UNIT);
3471 #ifdef ARGS_GROW_DOWNWARD
3472 crtl->args.arg_offset_rtx
3473 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3474 : expand_expr (size_diffop (all.stack_args_size.var,
3475 size_int (-all.stack_args_size.constant)),
3476 NULL_RTX, VOIDmode, EXPAND_NORMAL));
3477 #else
3478 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3479 #endif
3481 /* See how many bytes, if any, of its args a function should try to pop
3482 on return. */
3484 crtl->args.pops_args = targetm.calls.return_pops_args (fndecl,
3485 TREE_TYPE (fndecl),
3486 crtl->args.size);
3488 /* For stdarg.h function, save info about
3489 regs and stack space used by the named args. */
3491 crtl->args.info = all.args_so_far_v;
3493 /* Set the rtx used for the function return value. Put this in its
3494 own variable so any optimizers that need this information don't have
3495 to include tree.h. Do this here so it gets done when an inlined
3496 function gets output. */
3498 crtl->return_rtx
3499 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3500 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3502 /* If scalar return value was computed in a pseudo-reg, or was a named
3503 return value that got dumped to the stack, copy that to the hard
3504 return register. */
3505 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3507 tree decl_result = DECL_RESULT (fndecl);
3508 rtx decl_rtl = DECL_RTL (decl_result);
3510 if (REG_P (decl_rtl)
3511 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3512 : DECL_REGISTER (decl_result))
3514 rtx real_decl_rtl;
3516 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3517 fndecl, true);
3518 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3519 /* The delay slot scheduler assumes that crtl->return_rtx
3520 holds the hard register containing the return value, not a
3521 temporary pseudo. */
3522 crtl->return_rtx = real_decl_rtl;
3527 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3528 For all seen types, gimplify their sizes. */
3530 static tree
3531 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3533 tree t = *tp;
3535 *walk_subtrees = 0;
3536 if (TYPE_P (t))
3538 if (POINTER_TYPE_P (t))
3539 *walk_subtrees = 1;
3540 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3541 && !TYPE_SIZES_GIMPLIFIED (t))
3543 gimplify_type_sizes (t, (gimple_seq *) data);
3544 *walk_subtrees = 1;
3548 return NULL;
3551 /* Gimplify the parameter list for current_function_decl. This involves
3552 evaluating SAVE_EXPRs of variable sized parameters and generating code
3553 to implement callee-copies reference parameters. Returns a sequence of
3554 statements to add to the beginning of the function. */
3556 gimple_seq
3557 gimplify_parameters (void)
3559 struct assign_parm_data_all all;
3560 tree parm;
3561 gimple_seq stmts = NULL;
3562 VEC(tree, heap) *fnargs;
3563 unsigned i;
3565 assign_parms_initialize_all (&all);
3566 fnargs = assign_parms_augmented_arg_list (&all);
3568 FOR_EACH_VEC_ELT (tree, fnargs, i, parm)
3570 struct assign_parm_data_one data;
3572 /* Extract the type of PARM; adjust it according to ABI. */
3573 assign_parm_find_data_types (&all, parm, &data);
3575 /* Early out for errors and void parameters. */
3576 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3577 continue;
3579 /* Update info on where next arg arrives in registers. */
3580 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3581 data.passed_type, data.named_arg);
3583 /* ??? Once upon a time variable_size stuffed parameter list
3584 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3585 turned out to be less than manageable in the gimple world.
3586 Now we have to hunt them down ourselves. */
3587 walk_tree_without_duplicates (&data.passed_type,
3588 gimplify_parm_type, &stmts);
3590 if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
3592 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3593 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3596 if (data.passed_pointer)
3598 tree type = TREE_TYPE (data.passed_type);
3599 if (reference_callee_copied (&all.args_so_far_v, TYPE_MODE (type),
3600 type, data.named_arg))
3602 tree local, t;
3604 /* For constant-sized objects, this is trivial; for
3605 variable-sized objects, we have to play games. */
3606 if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
3607 && !(flag_stack_check == GENERIC_STACK_CHECK
3608 && compare_tree_int (DECL_SIZE_UNIT (parm),
3609 STACK_CHECK_MAX_VAR_SIZE) > 0))
3611 local = create_tmp_var (type, get_name (parm));
3612 DECL_IGNORED_P (local) = 0;
3613 /* If PARM was addressable, move that flag over
3614 to the local copy, as its address will be taken,
3615 not the PARMs. Keep the parms address taken
3616 as we'll query that flag during gimplification. */
3617 if (TREE_ADDRESSABLE (parm))
3618 TREE_ADDRESSABLE (local) = 1;
3619 else if (TREE_CODE (type) == COMPLEX_TYPE
3620 || TREE_CODE (type) == VECTOR_TYPE)
3621 DECL_GIMPLE_REG_P (local) = 1;
3623 else
3625 tree ptr_type, addr;
3627 ptr_type = build_pointer_type (type);
3628 addr = create_tmp_reg (ptr_type, get_name (parm));
3629 DECL_IGNORED_P (addr) = 0;
3630 local = build_fold_indirect_ref (addr);
3632 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3633 t = build_call_expr (t, 2, DECL_SIZE_UNIT (parm),
3634 size_int (DECL_ALIGN (parm)));
3636 /* The call has been built for a variable-sized object. */
3637 CALL_ALLOCA_FOR_VAR_P (t) = 1;
3638 t = fold_convert (ptr_type, t);
3639 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
3640 gimplify_and_add (t, &stmts);
3643 gimplify_assign (local, parm, &stmts);
3645 SET_DECL_VALUE_EXPR (parm, local);
3646 DECL_HAS_VALUE_EXPR_P (parm) = 1;
3651 VEC_free (tree, heap, fnargs);
3653 return stmts;
3656 /* Compute the size and offset from the start of the stacked arguments for a
3657 parm passed in mode PASSED_MODE and with type TYPE.
3659 INITIAL_OFFSET_PTR points to the current offset into the stacked
3660 arguments.
3662 The starting offset and size for this parm are returned in
3663 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3664 nonzero, the offset is that of stack slot, which is returned in
3665 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3666 padding required from the initial offset ptr to the stack slot.
3668 IN_REGS is nonzero if the argument will be passed in registers. It will
3669 never be set if REG_PARM_STACK_SPACE is not defined.
3671 FNDECL is the function in which the argument was defined.
3673 There are two types of rounding that are done. The first, controlled by
3674 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
3675 argument list to be aligned to the specific boundary (in bits). This
3676 rounding affects the initial and starting offsets, but not the argument
3677 size.
3679 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3680 optionally rounds the size of the parm to PARM_BOUNDARY. The
3681 initial offset is not affected by this rounding, while the size always
3682 is and the starting offset may be. */
3684 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3685 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3686 callers pass in the total size of args so far as
3687 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3689 void
3690 locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3691 int partial, tree fndecl ATTRIBUTE_UNUSED,
3692 struct args_size *initial_offset_ptr,
3693 struct locate_and_pad_arg_data *locate)
3695 tree sizetree;
3696 enum direction where_pad;
3697 unsigned int boundary, round_boundary;
3698 int reg_parm_stack_space = 0;
3699 int part_size_in_regs;
3701 #ifdef REG_PARM_STACK_SPACE
3702 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3704 /* If we have found a stack parm before we reach the end of the
3705 area reserved for registers, skip that area. */
3706 if (! in_regs)
3708 if (reg_parm_stack_space > 0)
3710 if (initial_offset_ptr->var)
3712 initial_offset_ptr->var
3713 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3714 ssize_int (reg_parm_stack_space));
3715 initial_offset_ptr->constant = 0;
3717 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3718 initial_offset_ptr->constant = reg_parm_stack_space;
3721 #endif /* REG_PARM_STACK_SPACE */
3723 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
3725 sizetree
3726 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3727 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3728 boundary = targetm.calls.function_arg_boundary (passed_mode, type);
3729 round_boundary = targetm.calls.function_arg_round_boundary (passed_mode,
3730 type);
3731 locate->where_pad = where_pad;
3733 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
3734 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
3735 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
3737 locate->boundary = boundary;
3739 if (SUPPORTS_STACK_ALIGNMENT)
3741 /* stack_alignment_estimated can't change after stack has been
3742 realigned. */
3743 if (crtl->stack_alignment_estimated < boundary)
3745 if (!crtl->stack_realign_processed)
3746 crtl->stack_alignment_estimated = boundary;
3747 else
3749 /* If stack is realigned and stack alignment value
3750 hasn't been finalized, it is OK not to increase
3751 stack_alignment_estimated. The bigger alignment
3752 requirement is recorded in stack_alignment_needed
3753 below. */
3754 gcc_assert (!crtl->stack_realign_finalized
3755 && crtl->stack_realign_needed);
3760 /* Remember if the outgoing parameter requires extra alignment on the
3761 calling function side. */
3762 if (crtl->stack_alignment_needed < boundary)
3763 crtl->stack_alignment_needed = boundary;
3764 if (crtl->preferred_stack_boundary < boundary)
3765 crtl->preferred_stack_boundary = boundary;
3767 #ifdef ARGS_GROW_DOWNWARD
3768 locate->slot_offset.constant = -initial_offset_ptr->constant;
3769 if (initial_offset_ptr->var)
3770 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3771 initial_offset_ptr->var);
3774 tree s2 = sizetree;
3775 if (where_pad != none
3776 && (!host_integerp (sizetree, 1)
3777 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % round_boundary))
3778 s2 = round_up (s2, round_boundary / BITS_PER_UNIT);
3779 SUB_PARM_SIZE (locate->slot_offset, s2);
3782 locate->slot_offset.constant += part_size_in_regs;
3784 if (!in_regs
3785 #ifdef REG_PARM_STACK_SPACE
3786 || REG_PARM_STACK_SPACE (fndecl) > 0
3787 #endif
3789 pad_to_arg_alignment (&locate->slot_offset, boundary,
3790 &locate->alignment_pad);
3792 locate->size.constant = (-initial_offset_ptr->constant
3793 - locate->slot_offset.constant);
3794 if (initial_offset_ptr->var)
3795 locate->size.var = size_binop (MINUS_EXPR,
3796 size_binop (MINUS_EXPR,
3797 ssize_int (0),
3798 initial_offset_ptr->var),
3799 locate->slot_offset.var);
3801 /* Pad_below needs the pre-rounded size to know how much to pad
3802 below. */
3803 locate->offset = locate->slot_offset;
3804 if (where_pad == downward)
3805 pad_below (&locate->offset, passed_mode, sizetree);
3807 #else /* !ARGS_GROW_DOWNWARD */
3808 if (!in_regs
3809 #ifdef REG_PARM_STACK_SPACE
3810 || REG_PARM_STACK_SPACE (fndecl) > 0
3811 #endif
3813 pad_to_arg_alignment (initial_offset_ptr, boundary,
3814 &locate->alignment_pad);
3815 locate->slot_offset = *initial_offset_ptr;
3817 #ifdef PUSH_ROUNDING
3818 if (passed_mode != BLKmode)
3819 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3820 #endif
3822 /* Pad_below needs the pre-rounded size to know how much to pad below
3823 so this must be done before rounding up. */
3824 locate->offset = locate->slot_offset;
3825 if (where_pad == downward)
3826 pad_below (&locate->offset, passed_mode, sizetree);
3828 if (where_pad != none
3829 && (!host_integerp (sizetree, 1)
3830 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % round_boundary))
3831 sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT);
3833 ADD_PARM_SIZE (locate->size, sizetree);
3835 locate->size.constant -= part_size_in_regs;
3836 #endif /* ARGS_GROW_DOWNWARD */
3838 #ifdef FUNCTION_ARG_OFFSET
3839 locate->offset.constant += FUNCTION_ARG_OFFSET (passed_mode, type);
3840 #endif
3843 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3844 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3846 static void
3847 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3848 struct args_size *alignment_pad)
3850 tree save_var = NULL_TREE;
3851 HOST_WIDE_INT save_constant = 0;
3852 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3853 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3855 #ifdef SPARC_STACK_BOUNDARY_HACK
3856 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3857 the real alignment of %sp. However, when it does this, the
3858 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
3859 if (SPARC_STACK_BOUNDARY_HACK)
3860 sp_offset = 0;
3861 #endif
3863 if (boundary > PARM_BOUNDARY)
3865 save_var = offset_ptr->var;
3866 save_constant = offset_ptr->constant;
3869 alignment_pad->var = NULL_TREE;
3870 alignment_pad->constant = 0;
3872 if (boundary > BITS_PER_UNIT)
3874 if (offset_ptr->var)
3876 tree sp_offset_tree = ssize_int (sp_offset);
3877 tree offset = size_binop (PLUS_EXPR,
3878 ARGS_SIZE_TREE (*offset_ptr),
3879 sp_offset_tree);
3880 #ifdef ARGS_GROW_DOWNWARD
3881 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
3882 #else
3883 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
3884 #endif
3886 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
3887 /* ARGS_SIZE_TREE includes constant term. */
3888 offset_ptr->constant = 0;
3889 if (boundary > PARM_BOUNDARY)
3890 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
3891 save_var);
3893 else
3895 offset_ptr->constant = -sp_offset +
3896 #ifdef ARGS_GROW_DOWNWARD
3897 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3898 #else
3899 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3900 #endif
3901 if (boundary > PARM_BOUNDARY)
3902 alignment_pad->constant = offset_ptr->constant - save_constant;
3907 static void
3908 pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
3910 if (passed_mode != BLKmode)
3912 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3913 offset_ptr->constant
3914 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3915 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3916 - GET_MODE_SIZE (passed_mode));
3918 else
3920 if (TREE_CODE (sizetree) != INTEGER_CST
3921 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3923 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3924 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3925 /* Add it in. */
3926 ADD_PARM_SIZE (*offset_ptr, s2);
3927 SUB_PARM_SIZE (*offset_ptr, sizetree);
3933 /* True if register REGNO was alive at a place where `setjmp' was
3934 called and was set more than once or is an argument. Such regs may
3935 be clobbered by `longjmp'. */
3937 static bool
3938 regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
3940 /* There appear to be cases where some local vars never reach the
3941 backend but have bogus regnos. */
3942 if (regno >= max_reg_num ())
3943 return false;
3945 return ((REG_N_SETS (regno) > 1
3946 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR), regno))
3947 && REGNO_REG_SET_P (setjmp_crosses, regno));
3950 /* Walk the tree of blocks describing the binding levels within a
3951 function and warn about variables the might be killed by setjmp or
3952 vfork. This is done after calling flow_analysis before register
3953 allocation since that will clobber the pseudo-regs to hard
3954 regs. */
3956 static void
3957 setjmp_vars_warning (bitmap setjmp_crosses, tree block)
3959 tree decl, sub;
3961 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
3963 if (TREE_CODE (decl) == VAR_DECL
3964 && DECL_RTL_SET_P (decl)
3965 && REG_P (DECL_RTL (decl))
3966 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
3967 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
3968 " %<longjmp%> or %<vfork%>", decl);
3971 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
3972 setjmp_vars_warning (setjmp_crosses, sub);
3975 /* Do the appropriate part of setjmp_vars_warning
3976 but for arguments instead of local variables. */
3978 static void
3979 setjmp_args_warning (bitmap setjmp_crosses)
3981 tree decl;
3982 for (decl = DECL_ARGUMENTS (current_function_decl);
3983 decl; decl = DECL_CHAIN (decl))
3984 if (DECL_RTL (decl) != 0
3985 && REG_P (DECL_RTL (decl))
3986 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
3987 warning (OPT_Wclobbered,
3988 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
3989 decl);
3992 /* Generate warning messages for variables live across setjmp. */
3994 void
3995 generate_setjmp_warnings (void)
3997 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
3999 if (n_basic_blocks == NUM_FIXED_BLOCKS
4000 || bitmap_empty_p (setjmp_crosses))
4001 return;
4003 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
4004 setjmp_args_warning (setjmp_crosses);
4008 /* Reverse the order of elements in the fragment chain T of blocks,
4009 and return the new head of the chain (old last element). */
4011 static tree
4012 block_fragments_nreverse (tree t)
4014 tree prev = 0, block, next;
4015 for (block = t; block; block = next)
4017 next = BLOCK_FRAGMENT_CHAIN (block);
4018 BLOCK_FRAGMENT_CHAIN (block) = prev;
4019 prev = block;
4021 return prev;
4024 /* Reverse the order of elements in the chain T of blocks,
4025 and return the new head of the chain (old last element).
4026 Also do the same on subblocks and reverse the order of elements
4027 in BLOCK_FRAGMENT_CHAIN as well. */
4029 static tree
4030 blocks_nreverse_all (tree t)
4032 tree prev = 0, block, next;
4033 for (block = t; block; block = next)
4035 next = BLOCK_CHAIN (block);
4036 BLOCK_CHAIN (block) = prev;
4037 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4038 if (BLOCK_FRAGMENT_CHAIN (block)
4039 && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE)
4040 BLOCK_FRAGMENT_CHAIN (block)
4041 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block));
4042 prev = block;
4044 return prev;
4048 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4049 and create duplicate blocks. */
4050 /* ??? Need an option to either create block fragments or to create
4051 abstract origin duplicates of a source block. It really depends
4052 on what optimization has been performed. */
4054 void
4055 reorder_blocks (void)
4057 tree block = DECL_INITIAL (current_function_decl);
4058 VEC(tree,heap) *block_stack;
4060 if (block == NULL_TREE)
4061 return;
4063 block_stack = VEC_alloc (tree, heap, 10);
4065 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
4066 clear_block_marks (block);
4068 /* Prune the old trees away, so that they don't get in the way. */
4069 BLOCK_SUBBLOCKS (block) = NULL_TREE;
4070 BLOCK_CHAIN (block) = NULL_TREE;
4072 /* Recreate the block tree from the note nesting. */
4073 reorder_blocks_1 (get_insns (), block, &block_stack);
4074 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4076 VEC_free (tree, heap, block_stack);
4079 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
4081 void
4082 clear_block_marks (tree block)
4084 while (block)
4086 TREE_ASM_WRITTEN (block) = 0;
4087 clear_block_marks (BLOCK_SUBBLOCKS (block));
4088 block = BLOCK_CHAIN (block);
4092 static void
4093 reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack)
4095 rtx insn;
4097 for (insn = insns; insn; insn = NEXT_INSN (insn))
4099 if (NOTE_P (insn))
4101 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
4103 tree block = NOTE_BLOCK (insn);
4104 tree origin;
4106 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE);
4107 origin = block;
4109 /* If we have seen this block before, that means it now
4110 spans multiple address regions. Create a new fragment. */
4111 if (TREE_ASM_WRITTEN (block))
4113 tree new_block = copy_node (block);
4115 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
4116 BLOCK_FRAGMENT_CHAIN (new_block)
4117 = BLOCK_FRAGMENT_CHAIN (origin);
4118 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
4120 NOTE_BLOCK (insn) = new_block;
4121 block = new_block;
4124 BLOCK_SUBBLOCKS (block) = 0;
4125 TREE_ASM_WRITTEN (block) = 1;
4126 /* When there's only one block for the entire function,
4127 current_block == block and we mustn't do this, it
4128 will cause infinite recursion. */
4129 if (block != current_block)
4131 if (block != origin)
4132 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block);
4134 BLOCK_SUPERCONTEXT (block) = current_block;
4135 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4136 BLOCK_SUBBLOCKS (current_block) = block;
4137 current_block = origin;
4139 VEC_safe_push (tree, heap, *p_block_stack, block);
4141 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
4143 NOTE_BLOCK (insn) = VEC_pop (tree, *p_block_stack);
4144 current_block = BLOCK_SUPERCONTEXT (current_block);
4150 /* Reverse the order of elements in the chain T of blocks,
4151 and return the new head of the chain (old last element). */
4153 tree
4154 blocks_nreverse (tree t)
4156 tree prev = 0, block, next;
4157 for (block = t; block; block = next)
4159 next = BLOCK_CHAIN (block);
4160 BLOCK_CHAIN (block) = prev;
4161 prev = block;
4163 return prev;
4166 /* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4167 by modifying the last node in chain 1 to point to chain 2. */
4169 tree
4170 block_chainon (tree op1, tree op2)
4172 tree t1;
4174 if (!op1)
4175 return op2;
4176 if (!op2)
4177 return op1;
4179 for (t1 = op1; BLOCK_CHAIN (t1); t1 = BLOCK_CHAIN (t1))
4180 continue;
4181 BLOCK_CHAIN (t1) = op2;
4183 #ifdef ENABLE_TREE_CHECKING
4185 tree t2;
4186 for (t2 = op2; t2; t2 = BLOCK_CHAIN (t2))
4187 gcc_assert (t2 != t1);
4189 #endif
4191 return op1;
4194 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
4195 non-NULL, list them all into VECTOR, in a depth-first preorder
4196 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
4197 blocks. */
4199 static int
4200 all_blocks (tree block, tree *vector)
4202 int n_blocks = 0;
4204 while (block)
4206 TREE_ASM_WRITTEN (block) = 0;
4208 /* Record this block. */
4209 if (vector)
4210 vector[n_blocks] = block;
4212 ++n_blocks;
4214 /* Record the subblocks, and their subblocks... */
4215 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4216 vector ? vector + n_blocks : 0);
4217 block = BLOCK_CHAIN (block);
4220 return n_blocks;
4223 /* Return a vector containing all the blocks rooted at BLOCK. The
4224 number of elements in the vector is stored in N_BLOCKS_P. The
4225 vector is dynamically allocated; it is the caller's responsibility
4226 to call `free' on the pointer returned. */
4228 static tree *
4229 get_block_vector (tree block, int *n_blocks_p)
4231 tree *block_vector;
4233 *n_blocks_p = all_blocks (block, NULL);
4234 block_vector = XNEWVEC (tree, *n_blocks_p);
4235 all_blocks (block, block_vector);
4237 return block_vector;
4240 static GTY(()) int next_block_index = 2;
4242 /* Set BLOCK_NUMBER for all the blocks in FN. */
4244 void
4245 number_blocks (tree fn)
4247 int i;
4248 int n_blocks;
4249 tree *block_vector;
4251 /* For SDB and XCOFF debugging output, we start numbering the blocks
4252 from 1 within each function, rather than keeping a running
4253 count. */
4254 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
4255 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
4256 next_block_index = 1;
4257 #endif
4259 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
4261 /* The top-level BLOCK isn't numbered at all. */
4262 for (i = 1; i < n_blocks; ++i)
4263 /* We number the blocks from two. */
4264 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
4266 free (block_vector);
4268 return;
4271 /* If VAR is present in a subblock of BLOCK, return the subblock. */
4273 DEBUG_FUNCTION tree
4274 debug_find_var_in_block_tree (tree var, tree block)
4276 tree t;
4278 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
4279 if (t == var)
4280 return block;
4282 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
4284 tree ret = debug_find_var_in_block_tree (var, t);
4285 if (ret)
4286 return ret;
4289 return NULL_TREE;
4292 /* Keep track of whether we're in a dummy function context. If we are,
4293 we don't want to invoke the set_current_function hook, because we'll
4294 get into trouble if the hook calls target_reinit () recursively or
4295 when the initial initialization is not yet complete. */
4297 static bool in_dummy_function;
4299 /* Invoke the target hook when setting cfun. Update the optimization options
4300 if the function uses different options than the default. */
4302 static void
4303 invoke_set_current_function_hook (tree fndecl)
4305 if (!in_dummy_function)
4307 tree opts = ((fndecl)
4308 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
4309 : optimization_default_node);
4311 if (!opts)
4312 opts = optimization_default_node;
4314 /* Change optimization options if needed. */
4315 if (optimization_current_node != opts)
4317 optimization_current_node = opts;
4318 cl_optimization_restore (&global_options, TREE_OPTIMIZATION (opts));
4321 targetm.set_current_function (fndecl);
4325 /* cfun should never be set directly; use this function. */
4327 void
4328 set_cfun (struct function *new_cfun)
4330 if (cfun != new_cfun)
4332 cfun = new_cfun;
4333 invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
4337 /* Initialized with NOGC, making this poisonous to the garbage collector. */
4339 static VEC(function_p,heap) *cfun_stack;
4341 /* Push the current cfun onto the stack, and set cfun to new_cfun. */
4343 void
4344 push_cfun (struct function *new_cfun)
4346 VEC_safe_push (function_p, heap, cfun_stack, cfun);
4347 set_cfun (new_cfun);
4350 /* Pop cfun from the stack. */
4352 void
4353 pop_cfun (void)
4355 struct function *new_cfun = VEC_pop (function_p, cfun_stack);
4356 set_cfun (new_cfun);
4359 /* Return value of funcdef and increase it. */
4361 get_next_funcdef_no (void)
4363 return funcdef_no++;
4366 /* Return value of funcdef. */
4368 get_last_funcdef_no (void)
4370 return funcdef_no;
4373 /* Allocate a function structure for FNDECL and set its contents
4374 to the defaults. Set cfun to the newly-allocated object.
4375 Some of the helper functions invoked during initialization assume
4376 that cfun has already been set. Therefore, assign the new object
4377 directly into cfun and invoke the back end hook explicitly at the
4378 very end, rather than initializing a temporary and calling set_cfun
4379 on it.
4381 ABSTRACT_P is true if this is a function that will never be seen by
4382 the middle-end. Such functions are front-end concepts (like C++
4383 function templates) that do not correspond directly to functions
4384 placed in object files. */
4386 void
4387 allocate_struct_function (tree fndecl, bool abstract_p)
4389 tree result;
4390 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
4392 cfun = ggc_alloc_cleared_function ();
4394 init_eh_for_function ();
4396 if (init_machine_status)
4397 cfun->machine = (*init_machine_status) ();
4399 #ifdef OVERRIDE_ABI_FORMAT
4400 OVERRIDE_ABI_FORMAT (fndecl);
4401 #endif
4403 invoke_set_current_function_hook (fndecl);
4405 if (fndecl != NULL_TREE)
4407 DECL_STRUCT_FUNCTION (fndecl) = cfun;
4408 cfun->decl = fndecl;
4409 current_function_funcdef_no = get_next_funcdef_no ();
4411 result = DECL_RESULT (fndecl);
4412 if (!abstract_p && aggregate_value_p (result, fndecl))
4414 #ifdef PCC_STATIC_STRUCT_RETURN
4415 cfun->returns_pcc_struct = 1;
4416 #endif
4417 cfun->returns_struct = 1;
4420 cfun->stdarg = stdarg_p (fntype);
4422 /* Assume all registers in stdarg functions need to be saved. */
4423 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
4424 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
4426 /* ??? This could be set on a per-function basis by the front-end
4427 but is this worth the hassle? */
4428 cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
4432 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4433 instead of just setting it. */
4435 void
4436 push_struct_function (tree fndecl)
4438 VEC_safe_push (function_p, heap, cfun_stack, cfun);
4439 allocate_struct_function (fndecl, false);
4442 /* Reset crtl and other non-struct-function variables to defaults as
4443 appropriate for emitting rtl at the start of a function. */
4445 static void
4446 prepare_function_start (void)
4448 gcc_assert (!crtl->emit.x_last_insn);
4449 init_temp_slots ();
4450 init_emit ();
4451 init_varasm_status ();
4452 init_expr ();
4453 default_rtl_profile ();
4455 if (flag_stack_usage_info)
4457 cfun->su = ggc_alloc_cleared_stack_usage ();
4458 cfun->su->static_stack_size = -1;
4461 cse_not_expected = ! optimize;
4463 /* Caller save not needed yet. */
4464 caller_save_needed = 0;
4466 /* We haven't done register allocation yet. */
4467 reg_renumber = 0;
4469 /* Indicate that we have not instantiated virtual registers yet. */
4470 virtuals_instantiated = 0;
4472 /* Indicate that we want CONCATs now. */
4473 generating_concat_p = 1;
4475 /* Indicate we have no need of a frame pointer yet. */
4476 frame_pointer_needed = 0;
4479 /* Initialize the rtl expansion mechanism so that we can do simple things
4480 like generate sequences. This is used to provide a context during global
4481 initialization of some passes. You must call expand_dummy_function_end
4482 to exit this context. */
4484 void
4485 init_dummy_function_start (void)
4487 gcc_assert (!in_dummy_function);
4488 in_dummy_function = true;
4489 push_struct_function (NULL_TREE);
4490 prepare_function_start ();
4493 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4494 and initialize static variables for generating RTL for the statements
4495 of the function. */
4497 void
4498 init_function_start (tree subr)
4500 if (subr && DECL_STRUCT_FUNCTION (subr))
4501 set_cfun (DECL_STRUCT_FUNCTION (subr));
4502 else
4503 allocate_struct_function (subr, false);
4504 prepare_function_start ();
4505 decide_function_section (subr);
4507 /* Warn if this value is an aggregate type,
4508 regardless of which calling convention we are using for it. */
4509 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
4510 warning (OPT_Waggregate_return, "function returns an aggregate");
4513 /* Make sure all values used by the optimization passes have sane defaults. */
4514 unsigned int
4515 init_function_for_compilation (void)
4517 reg_renumber = 0;
4518 return 0;
4521 struct rtl_opt_pass pass_init_function =
4524 RTL_PASS,
4525 "*init_function", /* name */
4526 NULL, /* gate */
4527 init_function_for_compilation, /* execute */
4528 NULL, /* sub */
4529 NULL, /* next */
4530 0, /* static_pass_number */
4531 TV_NONE, /* tv_id */
4532 0, /* properties_required */
4533 0, /* properties_provided */
4534 0, /* properties_destroyed */
4535 0, /* todo_flags_start */
4536 0 /* todo_flags_finish */
4541 void
4542 expand_main_function (void)
4544 #if (defined(INVOKE__main) \
4545 || (!defined(HAS_INIT_SECTION) \
4546 && !defined(INIT_SECTION_ASM_OP) \
4547 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
4548 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
4549 #endif
4552 /* Expand code to initialize the stack_protect_guard. This is invoked at
4553 the beginning of a function to be protected. */
4555 #ifndef HAVE_stack_protect_set
4556 # define HAVE_stack_protect_set 0
4557 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
4558 #endif
4560 void
4561 stack_protect_prologue (void)
4563 tree guard_decl = targetm.stack_protect_guard ();
4564 rtx x, y;
4566 x = expand_normal (crtl->stack_protect_guard);
4567 y = expand_normal (guard_decl);
4569 /* Allow the target to copy from Y to X without leaking Y into a
4570 register. */
4571 if (HAVE_stack_protect_set)
4573 rtx insn = gen_stack_protect_set (x, y);
4574 if (insn)
4576 emit_insn (insn);
4577 return;
4581 /* Otherwise do a straight move. */
4582 emit_move_insn (x, y);
4585 /* Expand code to verify the stack_protect_guard. This is invoked at
4586 the end of a function to be protected. */
4588 #ifndef HAVE_stack_protect_test
4589 # define HAVE_stack_protect_test 0
4590 # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
4591 #endif
4593 void
4594 stack_protect_epilogue (void)
4596 tree guard_decl = targetm.stack_protect_guard ();
4597 rtx label = gen_label_rtx ();
4598 rtx x, y, tmp;
4600 x = expand_normal (crtl->stack_protect_guard);
4601 y = expand_normal (guard_decl);
4603 /* Allow the target to compare Y with X without leaking either into
4604 a register. */
4605 switch (HAVE_stack_protect_test != 0)
4607 case 1:
4608 tmp = gen_stack_protect_test (x, y, label);
4609 if (tmp)
4611 emit_insn (tmp);
4612 break;
4614 /* FALLTHRU */
4616 default:
4617 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4618 break;
4621 /* The noreturn predictor has been moved to the tree level. The rtl-level
4622 predictors estimate this branch about 20%, which isn't enough to get
4623 things moved out of line. Since this is the only extant case of adding
4624 a noreturn function at the rtl level, it doesn't seem worth doing ought
4625 except adding the prediction by hand. */
4626 tmp = get_last_insn ();
4627 if (JUMP_P (tmp))
4628 predict_insn_def (tmp, PRED_NORETURN, TAKEN);
4630 expand_expr_stmt (targetm.stack_protect_fail ());
4631 emit_label (label);
4634 /* Start the RTL for a new function, and set variables used for
4635 emitting RTL.
4636 SUBR is the FUNCTION_DECL node.
4637 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4638 the function's parameters, which must be run at any return statement. */
4640 void
4641 expand_function_start (tree subr)
4643 /* Make sure volatile mem refs aren't considered
4644 valid operands of arithmetic insns. */
4645 init_recog_no_volatile ();
4647 crtl->profile
4648 = (profile_flag
4649 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4651 crtl->limit_stack
4652 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4654 /* Make the label for return statements to jump to. Do not special
4655 case machines with special return instructions -- they will be
4656 handled later during jump, ifcvt, or epilogue creation. */
4657 return_label = gen_label_rtx ();
4659 /* Initialize rtx used to return the value. */
4660 /* Do this before assign_parms so that we copy the struct value address
4661 before any library calls that assign parms might generate. */
4663 /* Decide whether to return the value in memory or in a register. */
4664 if (aggregate_value_p (DECL_RESULT (subr), subr))
4666 /* Returning something that won't go in a register. */
4667 rtx value_address = 0;
4669 #ifdef PCC_STATIC_STRUCT_RETURN
4670 if (cfun->returns_pcc_struct)
4672 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4673 value_address = assemble_static_space (size);
4675 else
4676 #endif
4678 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
4679 /* Expect to be passed the address of a place to store the value.
4680 If it is passed as an argument, assign_parms will take care of
4681 it. */
4682 if (sv)
4684 value_address = gen_reg_rtx (Pmode);
4685 emit_move_insn (value_address, sv);
4688 if (value_address)
4690 rtx x = value_address;
4691 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4693 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4694 set_mem_attributes (x, DECL_RESULT (subr), 1);
4696 SET_DECL_RTL (DECL_RESULT (subr), x);
4699 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4700 /* If return mode is void, this decl rtl should not be used. */
4701 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
4702 else
4704 /* Compute the return values into a pseudo reg, which we will copy
4705 into the true return register after the cleanups are done. */
4706 tree return_type = TREE_TYPE (DECL_RESULT (subr));
4707 if (TYPE_MODE (return_type) != BLKmode
4708 && targetm.calls.return_in_msb (return_type))
4709 /* expand_function_end will insert the appropriate padding in
4710 this case. Use the return value's natural (unpadded) mode
4711 within the function proper. */
4712 SET_DECL_RTL (DECL_RESULT (subr),
4713 gen_reg_rtx (TYPE_MODE (return_type)));
4714 else
4716 /* In order to figure out what mode to use for the pseudo, we
4717 figure out what the mode of the eventual return register will
4718 actually be, and use that. */
4719 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
4721 /* Structures that are returned in registers are not
4722 aggregate_value_p, so we may see a PARALLEL or a REG. */
4723 if (REG_P (hard_reg))
4724 SET_DECL_RTL (DECL_RESULT (subr),
4725 gen_reg_rtx (GET_MODE (hard_reg)));
4726 else
4728 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4729 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4733 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4734 result to the real return register(s). */
4735 DECL_REGISTER (DECL_RESULT (subr)) = 1;
4738 /* Initialize rtx for parameters and local variables.
4739 In some cases this requires emitting insns. */
4740 assign_parms (subr);
4742 /* If function gets a static chain arg, store it. */
4743 if (cfun->static_chain_decl)
4745 tree parm = cfun->static_chain_decl;
4746 rtx local, chain, insn;
4748 local = gen_reg_rtx (Pmode);
4749 chain = targetm.calls.static_chain (current_function_decl, true);
4751 set_decl_incoming_rtl (parm, chain, false);
4752 SET_DECL_RTL (parm, local);
4753 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4755 insn = emit_move_insn (local, chain);
4757 /* Mark the register as eliminable, similar to parameters. */
4758 if (MEM_P (chain)
4759 && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0)))
4760 set_unique_reg_note (insn, REG_EQUIV, chain);
4763 /* If the function receives a non-local goto, then store the
4764 bits we need to restore the frame pointer. */
4765 if (cfun->nonlocal_goto_save_area)
4767 tree t_save;
4768 rtx r_save;
4770 /* ??? We need to do this save early. Unfortunately here is
4771 before the frame variable gets declared. Help out... */
4772 tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
4773 if (!DECL_RTL_SET_P (var))
4774 expand_decl (var);
4776 t_save = build4 (ARRAY_REF,
4777 TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
4778 cfun->nonlocal_goto_save_area,
4779 integer_zero_node, NULL_TREE, NULL_TREE);
4780 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
4781 gcc_assert (GET_MODE (r_save) == Pmode);
4783 emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ());
4784 update_nonlocal_goto_save_area ();
4787 /* The following was moved from init_function_start.
4788 The move is supposed to make sdb output more accurate. */
4789 /* Indicate the beginning of the function body,
4790 as opposed to parm setup. */
4791 emit_note (NOTE_INSN_FUNCTION_BEG);
4793 gcc_assert (NOTE_P (get_last_insn ()));
4795 parm_birth_insn = get_last_insn ();
4797 if (crtl->profile)
4799 #ifdef PROFILE_HOOK
4800 PROFILE_HOOK (current_function_funcdef_no);
4801 #endif
4804 /* If we are doing generic stack checking, the probe should go here. */
4805 if (flag_stack_check == GENERIC_STACK_CHECK)
4806 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
4808 /* Make sure there is a line number after the function entry setup code. */
4809 force_next_line_note ();
4812 /* Undo the effects of init_dummy_function_start. */
4813 void
4814 expand_dummy_function_end (void)
4816 gcc_assert (in_dummy_function);
4818 /* End any sequences that failed to be closed due to syntax errors. */
4819 while (in_sequence_p ())
4820 end_sequence ();
4822 /* Outside function body, can't compute type's actual size
4823 until next function's body starts. */
4825 free_after_parsing (cfun);
4826 free_after_compilation (cfun);
4827 pop_cfun ();
4828 in_dummy_function = false;
4831 /* Call DOIT for each hard register used as a return value from
4832 the current function. */
4834 void
4835 diddle_return_value (void (*doit) (rtx, void *), void *arg)
4837 rtx outgoing = crtl->return_rtx;
4839 if (! outgoing)
4840 return;
4842 if (REG_P (outgoing))
4843 (*doit) (outgoing, arg);
4844 else if (GET_CODE (outgoing) == PARALLEL)
4846 int i;
4848 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4850 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4852 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
4853 (*doit) (x, arg);
4858 static void
4859 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4861 emit_clobber (reg);
4864 void
4865 clobber_return_register (void)
4867 diddle_return_value (do_clobber_return_reg, NULL);
4869 /* In case we do use pseudo to return value, clobber it too. */
4870 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4872 tree decl_result = DECL_RESULT (current_function_decl);
4873 rtx decl_rtl = DECL_RTL (decl_result);
4874 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4876 do_clobber_return_reg (decl_rtl, NULL);
4881 static void
4882 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4884 emit_use (reg);
4887 static void
4888 use_return_register (void)
4890 diddle_return_value (do_use_return_reg, NULL);
4893 /* Possibly warn about unused parameters. */
4894 void
4895 do_warn_unused_parameter (tree fn)
4897 tree decl;
4899 for (decl = DECL_ARGUMENTS (fn);
4900 decl; decl = DECL_CHAIN (decl))
4901 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
4902 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl)
4903 && !TREE_NO_WARNING (decl))
4904 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
4907 static GTY(()) rtx initial_trampoline;
4909 /* Generate RTL for the end of the current function. */
4911 void
4912 expand_function_end (void)
4914 rtx clobber_after;
4916 /* If arg_pointer_save_area was referenced only from a nested
4917 function, we will not have initialized it yet. Do that now. */
4918 if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
4919 get_arg_pointer_save_area ();
4921 /* If we are doing generic stack checking and this function makes calls,
4922 do a stack probe at the start of the function to ensure we have enough
4923 space for another stack frame. */
4924 if (flag_stack_check == GENERIC_STACK_CHECK)
4926 rtx insn, seq;
4928 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4929 if (CALL_P (insn))
4931 rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE);
4932 start_sequence ();
4933 if (STACK_CHECK_MOVING_SP)
4934 anti_adjust_stack_and_probe (max_frame_size, true);
4935 else
4936 probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size);
4937 seq = get_insns ();
4938 end_sequence ();
4939 set_insn_locators (seq, prologue_locator);
4940 emit_insn_before (seq, stack_check_probe_note);
4941 break;
4945 /* End any sequences that failed to be closed due to syntax errors. */
4946 while (in_sequence_p ())
4947 end_sequence ();
4949 clear_pending_stack_adjust ();
4950 do_pending_stack_adjust ();
4952 /* Output a linenumber for the end of the function.
4953 SDB depends on this. */
4954 force_next_line_note ();
4955 set_curr_insn_source_location (input_location);
4957 /* Before the return label (if any), clobber the return
4958 registers so that they are not propagated live to the rest of
4959 the function. This can only happen with functions that drop
4960 through; if there had been a return statement, there would
4961 have either been a return rtx, or a jump to the return label.
4963 We delay actual code generation after the current_function_value_rtx
4964 is computed. */
4965 clobber_after = get_last_insn ();
4967 /* Output the label for the actual return from the function. */
4968 emit_label (return_label);
4970 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
4972 /* Let except.c know where it should emit the call to unregister
4973 the function context for sjlj exceptions. */
4974 if (flag_exceptions)
4975 sjlj_emit_function_exit_after (get_last_insn ());
4977 else
4979 /* We want to ensure that instructions that may trap are not
4980 moved into the epilogue by scheduling, because we don't
4981 always emit unwind information for the epilogue. */
4982 if (cfun->can_throw_non_call_exceptions)
4983 emit_insn (gen_blockage ());
4986 /* If this is an implementation of throw, do what's necessary to
4987 communicate between __builtin_eh_return and the epilogue. */
4988 expand_eh_return ();
4990 /* If scalar return value was computed in a pseudo-reg, or was a named
4991 return value that got dumped to the stack, copy that to the hard
4992 return register. */
4993 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4995 tree decl_result = DECL_RESULT (current_function_decl);
4996 rtx decl_rtl = DECL_RTL (decl_result);
4998 if (REG_P (decl_rtl)
4999 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5000 : DECL_REGISTER (decl_result))
5002 rtx real_decl_rtl = crtl->return_rtx;
5004 /* This should be set in assign_parms. */
5005 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
5007 /* If this is a BLKmode structure being returned in registers,
5008 then use the mode computed in expand_return. Note that if
5009 decl_rtl is memory, then its mode may have been changed,
5010 but that crtl->return_rtx has not. */
5011 if (GET_MODE (real_decl_rtl) == BLKmode)
5012 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
5014 /* If a non-BLKmode return value should be padded at the least
5015 significant end of the register, shift it left by the appropriate
5016 amount. BLKmode results are handled using the group load/store
5017 machinery. */
5018 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
5019 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
5021 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
5022 REGNO (real_decl_rtl)),
5023 decl_rtl);
5024 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
5026 /* If a named return value dumped decl_return to memory, then
5027 we may need to re-do the PROMOTE_MODE signed/unsigned
5028 extension. */
5029 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
5031 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
5032 promote_function_mode (TREE_TYPE (decl_result),
5033 GET_MODE (decl_rtl), &unsignedp,
5034 TREE_TYPE (current_function_decl), 1);
5036 convert_move (real_decl_rtl, decl_rtl, unsignedp);
5038 else if (GET_CODE (real_decl_rtl) == PARALLEL)
5040 /* If expand_function_start has created a PARALLEL for decl_rtl,
5041 move the result to the real return registers. Otherwise, do
5042 a group load from decl_rtl for a named return. */
5043 if (GET_CODE (decl_rtl) == PARALLEL)
5044 emit_group_move (real_decl_rtl, decl_rtl);
5045 else
5046 emit_group_load (real_decl_rtl, decl_rtl,
5047 TREE_TYPE (decl_result),
5048 int_size_in_bytes (TREE_TYPE (decl_result)));
5050 /* In the case of complex integer modes smaller than a word, we'll
5051 need to generate some non-trivial bitfield insertions. Do that
5052 on a pseudo and not the hard register. */
5053 else if (GET_CODE (decl_rtl) == CONCAT
5054 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
5055 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
5057 int old_generating_concat_p;
5058 rtx tmp;
5060 old_generating_concat_p = generating_concat_p;
5061 generating_concat_p = 0;
5062 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
5063 generating_concat_p = old_generating_concat_p;
5065 emit_move_insn (tmp, decl_rtl);
5066 emit_move_insn (real_decl_rtl, tmp);
5068 else
5069 emit_move_insn (real_decl_rtl, decl_rtl);
5073 /* If returning a structure, arrange to return the address of the value
5074 in a place where debuggers expect to find it.
5076 If returning a structure PCC style,
5077 the caller also depends on this value.
5078 And cfun->returns_pcc_struct is not necessarily set. */
5079 if (cfun->returns_struct
5080 || cfun->returns_pcc_struct)
5082 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
5083 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5084 rtx outgoing;
5086 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
5087 type = TREE_TYPE (type);
5088 else
5089 value_address = XEXP (value_address, 0);
5091 outgoing = targetm.calls.function_value (build_pointer_type (type),
5092 current_function_decl, true);
5094 /* Mark this as a function return value so integrate will delete the
5095 assignment and USE below when inlining this function. */
5096 REG_FUNCTION_VALUE_P (outgoing) = 1;
5098 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5099 value_address = convert_memory_address (GET_MODE (outgoing),
5100 value_address);
5102 emit_move_insn (outgoing, value_address);
5104 /* Show return register used to hold result (in this case the address
5105 of the result. */
5106 crtl->return_rtx = outgoing;
5109 /* Emit the actual code to clobber return register. */
5111 rtx seq;
5113 start_sequence ();
5114 clobber_return_register ();
5115 seq = get_insns ();
5116 end_sequence ();
5118 emit_insn_after (seq, clobber_after);
5121 /* Output the label for the naked return from the function. */
5122 if (naked_return_label)
5123 emit_label (naked_return_label);
5125 /* @@@ This is a kludge. We want to ensure that instructions that
5126 may trap are not moved into the epilogue by scheduling, because
5127 we don't always emit unwind information for the epilogue. */
5128 if (cfun->can_throw_non_call_exceptions
5129 && targetm_common.except_unwind_info (&global_options) != UI_SJLJ)
5130 emit_insn (gen_blockage ());
5132 /* If stack protection is enabled for this function, check the guard. */
5133 if (crtl->stack_protect_guard)
5134 stack_protect_epilogue ();
5136 /* If we had calls to alloca, and this machine needs
5137 an accurate stack pointer to exit the function,
5138 insert some code to save and restore the stack pointer. */
5139 if (! EXIT_IGNORE_STACK
5140 && cfun->calls_alloca)
5142 rtx tem = 0, seq;
5144 start_sequence ();
5145 emit_stack_save (SAVE_FUNCTION, &tem);
5146 seq = get_insns ();
5147 end_sequence ();
5148 emit_insn_before (seq, parm_birth_insn);
5150 emit_stack_restore (SAVE_FUNCTION, tem);
5153 /* ??? This should no longer be necessary since stupid is no longer with
5154 us, but there are some parts of the compiler (eg reload_combine, and
5155 sh mach_dep_reorg) that still try and compute their own lifetime info
5156 instead of using the general framework. */
5157 use_return_register ();
5161 get_arg_pointer_save_area (void)
5163 rtx ret = arg_pointer_save_area;
5165 if (! ret)
5167 ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5168 arg_pointer_save_area = ret;
5171 if (! crtl->arg_pointer_save_area_init)
5173 rtx seq;
5175 /* Save the arg pointer at the beginning of the function. The
5176 generated stack slot may not be a valid memory address, so we
5177 have to check it and fix it if necessary. */
5178 start_sequence ();
5179 emit_move_insn (validize_mem (ret),
5180 crtl->args.internal_arg_pointer);
5181 seq = get_insns ();
5182 end_sequence ();
5184 push_topmost_sequence ();
5185 emit_insn_after (seq, entry_of_function ());
5186 pop_topmost_sequence ();
5188 crtl->arg_pointer_save_area_init = true;
5191 return ret;
5194 /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5195 for the first time. */
5197 static void
5198 record_insns (rtx insns, rtx end, htab_t *hashp)
5200 rtx tmp;
5201 htab_t hash = *hashp;
5203 if (hash == NULL)
5204 *hashp = hash
5205 = htab_create_ggc (17, htab_hash_pointer, htab_eq_pointer, NULL);
5207 for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp))
5209 void **slot = htab_find_slot (hash, tmp, INSERT);
5210 gcc_assert (*slot == NULL);
5211 *slot = tmp;
5215 /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5216 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5217 insn, then record COPY as well. */
5219 void
5220 maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
5222 htab_t hash;
5223 void **slot;
5225 hash = epilogue_insn_hash;
5226 if (!hash || !htab_find (hash, insn))
5228 hash = prologue_insn_hash;
5229 if (!hash || !htab_find (hash, insn))
5230 return;
5233 slot = htab_find_slot (hash, copy, INSERT);
5234 gcc_assert (*slot == NULL);
5235 *slot = copy;
5238 /* Set the locator of the insn chain starting at INSN to LOC. */
5239 static void
5240 set_insn_locators (rtx insn, int loc)
5242 while (insn != NULL_RTX)
5244 if (INSN_P (insn))
5245 INSN_LOCATOR (insn) = loc;
5246 insn = NEXT_INSN (insn);
5250 /* Determine if any INSNs in HASH are, or are part of, INSN. Because
5251 we can be running after reorg, SEQUENCE rtl is possible. */
5253 static bool
5254 contains (const_rtx insn, htab_t hash)
5256 if (hash == NULL)
5257 return false;
5259 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
5261 int i;
5262 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
5263 if (htab_find (hash, XVECEXP (PATTERN (insn), 0, i)))
5264 return true;
5265 return false;
5268 return htab_find (hash, insn) != NULL;
5272 prologue_epilogue_contains (const_rtx insn)
5274 if (contains (insn, prologue_insn_hash))
5275 return 1;
5276 if (contains (insn, epilogue_insn_hash))
5277 return 1;
5278 return 0;
5281 #ifdef HAVE_simple_return
5283 /* Return true if INSN requires the stack frame to be set up.
5284 PROLOGUE_USED contains the hard registers used in the function
5285 prologue. SET_UP_BY_PROLOGUE is the set of registers we expect the
5286 prologue to set up for the function. */
5287 bool
5288 requires_stack_frame_p (rtx insn, HARD_REG_SET prologue_used,
5289 HARD_REG_SET set_up_by_prologue)
5291 df_ref *df_rec;
5292 HARD_REG_SET hardregs;
5293 unsigned regno;
5295 if (CALL_P (insn))
5296 return !SIBLING_CALL_P (insn);
5298 CLEAR_HARD_REG_SET (hardregs);
5299 for (df_rec = DF_INSN_DEFS (insn); *df_rec; df_rec++)
5301 rtx dreg = DF_REF_REG (*df_rec);
5303 if (!REG_P (dreg))
5304 continue;
5306 add_to_hard_reg_set (&hardregs, GET_MODE (dreg),
5307 REGNO (dreg));
5309 if (hard_reg_set_intersect_p (hardregs, prologue_used))
5310 return true;
5311 AND_COMPL_HARD_REG_SET (hardregs, call_used_reg_set);
5312 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
5313 if (TEST_HARD_REG_BIT (hardregs, regno)
5314 && df_regs_ever_live_p (regno))
5315 return true;
5317 for (df_rec = DF_INSN_USES (insn); *df_rec; df_rec++)
5319 rtx reg = DF_REF_REG (*df_rec);
5321 if (!REG_P (reg))
5322 continue;
5324 add_to_hard_reg_set (&hardregs, GET_MODE (reg),
5325 REGNO (reg));
5327 if (hard_reg_set_intersect_p (hardregs, set_up_by_prologue))
5328 return true;
5330 return false;
5333 /* Look for sets of call-saved registers in the first block of the
5334 function, and move them down into successor blocks if the register
5335 is used only on one path. This exposes more opportunities for
5336 shrink-wrapping.
5337 These kinds of sets often occur when incoming argument registers are
5338 moved to call-saved registers because their values are live across
5339 one or more calls during the function. */
5341 static void
5342 prepare_shrink_wrap (basic_block entry_block)
5344 rtx insn, curr;
5345 FOR_BB_INSNS_SAFE (entry_block, insn, curr)
5347 basic_block next_bb;
5348 edge e, live_edge;
5349 edge_iterator ei;
5350 rtx set, scan;
5351 unsigned destreg, srcreg;
5353 if (!NONDEBUG_INSN_P (insn))
5354 continue;
5355 set = single_set (insn);
5356 if (!set)
5357 continue;
5359 if (!REG_P (SET_SRC (set)) || !REG_P (SET_DEST (set)))
5360 continue;
5361 srcreg = REGNO (SET_SRC (set));
5362 destreg = REGNO (SET_DEST (set));
5363 if (hard_regno_nregs[srcreg][GET_MODE (SET_SRC (set))] > 1
5364 || hard_regno_nregs[destreg][GET_MODE (SET_DEST (set))] > 1)
5365 continue;
5367 next_bb = entry_block;
5368 scan = insn;
5370 for (;;)
5372 live_edge = NULL;
5373 /* Try to find a single edge across which the register is live.
5374 If we find one, we'll try to move the set across this edge. */
5375 FOR_EACH_EDGE (e, ei, next_bb->succs)
5377 if (REGNO_REG_SET_P (df_get_live_in (e->dest), destreg))
5379 if (live_edge)
5381 live_edge = NULL;
5382 break;
5384 live_edge = e;
5387 if (!live_edge)
5388 break;
5389 /* We can sometimes encounter dead code. Don't try to move it
5390 into the exit block. */
5391 if (live_edge->dest == EXIT_BLOCK_PTR)
5392 break;
5393 if (EDGE_COUNT (live_edge->dest->preds) > 1)
5394 break;
5395 while (scan != BB_END (next_bb))
5397 scan = NEXT_INSN (scan);
5398 if (NONDEBUG_INSN_P (scan))
5400 rtx link;
5401 HARD_REG_SET set_regs;
5403 CLEAR_HARD_REG_SET (set_regs);
5404 note_stores (PATTERN (scan), record_hard_reg_sets,
5405 &set_regs);
5406 if (CALL_P (scan))
5407 IOR_HARD_REG_SET (set_regs, call_used_reg_set);
5408 for (link = REG_NOTES (scan); link; link = XEXP (link, 1))
5409 if (REG_NOTE_KIND (link) == REG_INC)
5410 record_hard_reg_sets (XEXP (link, 0), NULL, &set_regs);
5412 if (TEST_HARD_REG_BIT (set_regs, srcreg)
5413 || reg_referenced_p (SET_DEST (set),
5414 PATTERN (scan)))
5416 scan = NULL_RTX;
5417 break;
5419 if (CALL_P (scan))
5421 rtx link = CALL_INSN_FUNCTION_USAGE (scan);
5422 while (link)
5424 rtx tmp = XEXP (link, 0);
5425 if (GET_CODE (tmp) == USE
5426 && reg_referenced_p (SET_DEST (set), tmp))
5427 break;
5428 link = XEXP (link, 1);
5430 if (link)
5432 scan = NULL_RTX;
5433 break;
5438 if (!scan)
5439 break;
5440 next_bb = live_edge->dest;
5443 if (next_bb != entry_block)
5445 rtx after = BB_HEAD (next_bb);
5446 while (!NOTE_P (after)
5447 || NOTE_KIND (after) != NOTE_INSN_BASIC_BLOCK)
5448 after = NEXT_INSN (after);
5449 emit_insn_after (PATTERN (insn), after);
5450 delete_insn (insn);
5455 #endif
5457 #ifdef HAVE_return
5458 /* Insert use of return register before the end of BB. */
5460 static void
5461 emit_use_return_register_into_block (basic_block bb)
5463 rtx seq;
5464 start_sequence ();
5465 use_return_register ();
5466 seq = get_insns ();
5467 end_sequence ();
5468 emit_insn_before (seq, BB_END (bb));
5472 /* Create a return pattern, either simple_return or return, depending on
5473 simple_p. */
5475 static rtx
5476 gen_return_pattern (bool simple_p)
5478 #ifdef HAVE_simple_return
5479 return simple_p ? gen_simple_return () : gen_return ();
5480 #else
5481 gcc_assert (!simple_p);
5482 return gen_return ();
5483 #endif
5486 /* Insert an appropriate return pattern at the end of block BB. This
5487 also means updating block_for_insn appropriately. SIMPLE_P is
5488 the same as in gen_return_pattern and passed to it. */
5490 static void
5491 emit_return_into_block (bool simple_p, basic_block bb)
5493 rtx jump, pat;
5494 jump = emit_jump_insn_after (gen_return_pattern (simple_p), BB_END (bb));
5495 pat = PATTERN (jump);
5496 if (GET_CODE (pat) == PARALLEL)
5497 pat = XVECEXP (pat, 0, 0);
5498 gcc_assert (ANY_RETURN_P (pat));
5499 JUMP_LABEL (jump) = pat;
5501 #endif
5503 /* Set JUMP_LABEL for a return insn. */
5505 void
5506 set_return_jump_label (rtx returnjump)
5508 rtx pat = PATTERN (returnjump);
5509 if (GET_CODE (pat) == PARALLEL)
5510 pat = XVECEXP (pat, 0, 0);
5511 if (ANY_RETURN_P (pat))
5512 JUMP_LABEL (returnjump) = pat;
5513 else
5514 JUMP_LABEL (returnjump) = ret_rtx;
5517 #ifdef HAVE_simple_return
5518 /* Create a copy of BB instructions and insert at BEFORE. Redirect
5519 preds of BB to COPY_BB if they don't appear in NEED_PROLOGUE. */
5520 static void
5521 dup_block_and_redirect (basic_block bb, basic_block copy_bb, rtx before,
5522 bitmap_head *need_prologue)
5524 edge_iterator ei;
5525 edge e;
5526 rtx insn = BB_END (bb);
5528 /* We know BB has a single successor, so there is no need to copy a
5529 simple jump at the end of BB. */
5530 if (simplejump_p (insn))
5531 insn = PREV_INSN (insn);
5533 start_sequence ();
5534 duplicate_insn_chain (BB_HEAD (bb), insn);
5535 if (dump_file)
5537 unsigned count = 0;
5538 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5539 if (active_insn_p (insn))
5540 ++count;
5541 fprintf (dump_file, "Duplicating bb %d to bb %d, %u active insns.\n",
5542 bb->index, copy_bb->index, count);
5544 insn = get_insns ();
5545 end_sequence ();
5546 emit_insn_before (insn, before);
5548 /* Redirect all the paths that need no prologue into copy_bb. */
5549 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
5550 if (!bitmap_bit_p (need_prologue, e->src->index))
5552 redirect_edge_and_branch_force (e, copy_bb);
5553 continue;
5555 else
5556 ei_next (&ei);
5558 #endif
5560 #if defined (HAVE_return) || defined (HAVE_simple_return)
5561 /* Return true if there are any active insns between HEAD and TAIL. */
5562 static bool
5563 active_insn_between (rtx head, rtx tail)
5565 while (tail)
5567 if (active_insn_p (tail))
5568 return true;
5569 if (tail == head)
5570 return false;
5571 tail = PREV_INSN (tail);
5573 return false;
5576 /* LAST_BB is a block that exits, and empty of active instructions.
5577 Examine its predecessors for jumps that can be converted to
5578 (conditional) returns. */
5579 static VEC (edge, heap) *
5580 convert_jumps_to_returns (basic_block last_bb, bool simple_p,
5581 VEC (edge, heap) *unconverted ATTRIBUTE_UNUSED)
5583 int i;
5584 basic_block bb;
5585 rtx label;
5586 edge_iterator ei;
5587 edge e;
5588 VEC(basic_block,heap) *src_bbs;
5590 src_bbs = VEC_alloc (basic_block, heap, EDGE_COUNT (last_bb->preds));
5591 FOR_EACH_EDGE (e, ei, last_bb->preds)
5592 if (e->src != ENTRY_BLOCK_PTR)
5593 VEC_quick_push (basic_block, src_bbs, e->src);
5595 label = BB_HEAD (last_bb);
5597 FOR_EACH_VEC_ELT (basic_block, src_bbs, i, bb)
5599 rtx jump = BB_END (bb);
5601 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5602 continue;
5604 e = find_edge (bb, last_bb);
5606 /* If we have an unconditional jump, we can replace that
5607 with a simple return instruction. */
5608 if (simplejump_p (jump))
5610 /* The use of the return register might be present in the exit
5611 fallthru block. Either:
5612 - removing the use is safe, and we should remove the use in
5613 the exit fallthru block, or
5614 - removing the use is not safe, and we should add it here.
5615 For now, we conservatively choose the latter. Either of the
5616 2 helps in crossjumping. */
5617 emit_use_return_register_into_block (bb);
5619 emit_return_into_block (simple_p, bb);
5620 delete_insn (jump);
5623 /* If we have a conditional jump branching to the last
5624 block, we can try to replace that with a conditional
5625 return instruction. */
5626 else if (condjump_p (jump))
5628 rtx dest;
5630 if (simple_p)
5631 dest = simple_return_rtx;
5632 else
5633 dest = ret_rtx;
5634 if (!redirect_jump (jump, dest, 0))
5636 #ifdef HAVE_simple_return
5637 if (simple_p)
5639 if (dump_file)
5640 fprintf (dump_file,
5641 "Failed to redirect bb %d branch.\n", bb->index);
5642 VEC_safe_push (edge, heap, unconverted, e);
5644 #endif
5645 continue;
5648 /* See comment in simplejump_p case above. */
5649 emit_use_return_register_into_block (bb);
5651 /* If this block has only one successor, it both jumps
5652 and falls through to the fallthru block, so we can't
5653 delete the edge. */
5654 if (single_succ_p (bb))
5655 continue;
5657 else
5659 #ifdef HAVE_simple_return
5660 if (simple_p)
5662 if (dump_file)
5663 fprintf (dump_file,
5664 "Failed to redirect bb %d branch.\n", bb->index);
5665 VEC_safe_push (edge, heap, unconverted, e);
5667 #endif
5668 continue;
5671 /* Fix up the CFG for the successful change we just made. */
5672 redirect_edge_succ (e, EXIT_BLOCK_PTR);
5673 e->flags &= ~EDGE_CROSSING;
5675 VEC_free (basic_block, heap, src_bbs);
5676 return unconverted;
5679 /* Emit a return insn for the exit fallthru block. */
5680 static basic_block
5681 emit_return_for_exit (edge exit_fallthru_edge, bool simple_p)
5683 basic_block last_bb = exit_fallthru_edge->src;
5685 if (JUMP_P (BB_END (last_bb)))
5687 last_bb = split_edge (exit_fallthru_edge);
5688 exit_fallthru_edge = single_succ_edge (last_bb);
5690 emit_barrier_after (BB_END (last_bb));
5691 emit_return_into_block (simple_p, last_bb);
5692 exit_fallthru_edge->flags &= ~EDGE_FALLTHRU;
5693 return last_bb;
5695 #endif
5698 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5699 this into place with notes indicating where the prologue ends and where
5700 the epilogue begins. Update the basic block information when possible.
5702 Notes on epilogue placement:
5703 There are several kinds of edges to the exit block:
5704 * a single fallthru edge from LAST_BB
5705 * possibly, edges from blocks containing sibcalls
5706 * possibly, fake edges from infinite loops
5708 The epilogue is always emitted on the fallthru edge from the last basic
5709 block in the function, LAST_BB, into the exit block.
5711 If LAST_BB is empty except for a label, it is the target of every
5712 other basic block in the function that ends in a return. If a
5713 target has a return or simple_return pattern (possibly with
5714 conditional variants), these basic blocks can be changed so that a
5715 return insn is emitted into them, and their target is adjusted to
5716 the real exit block.
5718 Notes on shrink wrapping: We implement a fairly conservative
5719 version of shrink-wrapping rather than the textbook one. We only
5720 generate a single prologue and a single epilogue. This is
5721 sufficient to catch a number of interesting cases involving early
5722 exits.
5724 First, we identify the blocks that require the prologue to occur before
5725 them. These are the ones that modify a call-saved register, or reference
5726 any of the stack or frame pointer registers. To simplify things, we then
5727 mark everything reachable from these blocks as also requiring a prologue.
5728 This takes care of loops automatically, and avoids the need to examine
5729 whether MEMs reference the frame, since it is sufficient to check for
5730 occurrences of the stack or frame pointer.
5732 We then compute the set of blocks for which the need for a prologue
5733 is anticipatable (borrowing terminology from the shrink-wrapping
5734 description in Muchnick's book). These are the blocks which either
5735 require a prologue themselves, or those that have only successors
5736 where the prologue is anticipatable. The prologue needs to be
5737 inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
5738 is not. For the moment, we ensure that only one such edge exists.
5740 The epilogue is placed as described above, but we make a
5741 distinction between inserting return and simple_return patterns
5742 when modifying other blocks that end in a return. Blocks that end
5743 in a sibcall omit the sibcall_epilogue if the block is not in
5744 ANTIC. */
5746 static void
5747 thread_prologue_and_epilogue_insns (void)
5749 bool inserted;
5750 #ifdef HAVE_simple_return
5751 VEC (edge, heap) *unconverted_simple_returns = NULL;
5752 bool nonempty_prologue;
5753 bitmap_head bb_flags;
5754 unsigned max_grow_size;
5755 #endif
5756 rtx returnjump;
5757 rtx seq ATTRIBUTE_UNUSED, epilogue_end ATTRIBUTE_UNUSED;
5758 rtx prologue_seq ATTRIBUTE_UNUSED, split_prologue_seq ATTRIBUTE_UNUSED;
5759 edge e, entry_edge, orig_entry_edge, exit_fallthru_edge;
5760 edge_iterator ei;
5762 df_analyze ();
5764 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
5766 inserted = false;
5767 seq = NULL_RTX;
5768 epilogue_end = NULL_RTX;
5769 returnjump = NULL_RTX;
5771 /* Can't deal with multiple successors of the entry block at the
5772 moment. Function should always have at least one entry
5773 point. */
5774 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR));
5775 entry_edge = single_succ_edge (ENTRY_BLOCK_PTR);
5776 orig_entry_edge = entry_edge;
5778 split_prologue_seq = NULL_RTX;
5779 if (flag_split_stack
5780 && (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl))
5781 == NULL))
5783 #ifndef HAVE_split_stack_prologue
5784 gcc_unreachable ();
5785 #else
5786 gcc_assert (HAVE_split_stack_prologue);
5788 start_sequence ();
5789 emit_insn (gen_split_stack_prologue ());
5790 split_prologue_seq = get_insns ();
5791 end_sequence ();
5793 record_insns (split_prologue_seq, NULL, &prologue_insn_hash);
5794 set_insn_locators (split_prologue_seq, prologue_locator);
5795 #endif
5798 prologue_seq = NULL_RTX;
5799 #ifdef HAVE_prologue
5800 if (HAVE_prologue)
5802 start_sequence ();
5803 seq = gen_prologue ();
5804 emit_insn (seq);
5806 /* Insert an explicit USE for the frame pointer
5807 if the profiling is on and the frame pointer is required. */
5808 if (crtl->profile && frame_pointer_needed)
5809 emit_use (hard_frame_pointer_rtx);
5811 /* Retain a map of the prologue insns. */
5812 record_insns (seq, NULL, &prologue_insn_hash);
5813 emit_note (NOTE_INSN_PROLOGUE_END);
5815 /* Ensure that instructions are not moved into the prologue when
5816 profiling is on. The call to the profiling routine can be
5817 emitted within the live range of a call-clobbered register. */
5818 if (!targetm.profile_before_prologue () && crtl->profile)
5819 emit_insn (gen_blockage ());
5821 prologue_seq = get_insns ();
5822 end_sequence ();
5823 set_insn_locators (prologue_seq, prologue_locator);
5825 #endif
5827 #ifdef HAVE_simple_return
5828 bitmap_initialize (&bb_flags, &bitmap_default_obstack);
5830 /* Try to perform a kind of shrink-wrapping, making sure the
5831 prologue/epilogue is emitted only around those parts of the
5832 function that require it. */
5834 nonempty_prologue = false;
5835 for (seq = prologue_seq; seq; seq = NEXT_INSN (seq))
5836 if (!NOTE_P (seq) || NOTE_KIND (seq) != NOTE_INSN_PROLOGUE_END)
5838 nonempty_prologue = true;
5839 break;
5842 if (flag_shrink_wrap && HAVE_simple_return
5843 && (targetm.profile_before_prologue () || !crtl->profile)
5844 && nonempty_prologue && !crtl->calls_eh_return)
5846 HARD_REG_SET prologue_clobbered, prologue_used, live_on_edge;
5847 HARD_REG_SET set_up_by_prologue;
5848 rtx p_insn;
5849 VEC(basic_block, heap) *vec;
5850 basic_block bb;
5851 bitmap_head bb_antic_flags;
5852 bitmap_head bb_on_list;
5853 bitmap_head bb_tail;
5855 if (dump_file)
5856 fprintf (dump_file, "Attempting shrink-wrapping optimization.\n");
5858 /* Compute the registers set and used in the prologue. */
5859 CLEAR_HARD_REG_SET (prologue_clobbered);
5860 CLEAR_HARD_REG_SET (prologue_used);
5861 for (p_insn = prologue_seq; p_insn; p_insn = NEXT_INSN (p_insn))
5863 HARD_REG_SET this_used;
5864 if (!NONDEBUG_INSN_P (p_insn))
5865 continue;
5867 CLEAR_HARD_REG_SET (this_used);
5868 note_uses (&PATTERN (p_insn), record_hard_reg_uses,
5869 &this_used);
5870 AND_COMPL_HARD_REG_SET (this_used, prologue_clobbered);
5871 IOR_HARD_REG_SET (prologue_used, this_used);
5872 note_stores (PATTERN (p_insn), record_hard_reg_sets,
5873 &prologue_clobbered);
5876 prepare_shrink_wrap (entry_edge->dest);
5878 bitmap_initialize (&bb_antic_flags, &bitmap_default_obstack);
5879 bitmap_initialize (&bb_on_list, &bitmap_default_obstack);
5880 bitmap_initialize (&bb_tail, &bitmap_default_obstack);
5882 /* Find the set of basic blocks that require a stack frame,
5883 and blocks that are too big to be duplicated. */
5885 vec = VEC_alloc (basic_block, heap, n_basic_blocks);
5887 CLEAR_HARD_REG_SET (set_up_by_prologue);
5888 add_to_hard_reg_set (&set_up_by_prologue, Pmode, STACK_POINTER_REGNUM);
5889 add_to_hard_reg_set (&set_up_by_prologue, Pmode, ARG_POINTER_REGNUM);
5890 if (frame_pointer_needed)
5891 add_to_hard_reg_set (&set_up_by_prologue, Pmode,
5892 HARD_FRAME_POINTER_REGNUM);
5893 if (pic_offset_table_rtx)
5894 add_to_hard_reg_set (&set_up_by_prologue, Pmode,
5895 PIC_OFFSET_TABLE_REGNUM);
5896 if (stack_realign_drap && crtl->drap_reg)
5897 add_to_hard_reg_set (&set_up_by_prologue, GET_MODE (crtl->drap_reg),
5898 REGNO (crtl->drap_reg));
5900 /* We don't use a different max size depending on
5901 optimize_bb_for_speed_p because increasing shrink-wrapping
5902 opportunities by duplicating tail blocks can actually result
5903 in an overall decrease in code size. */
5904 max_grow_size = get_uncond_jump_length ();
5905 max_grow_size *= PARAM_VALUE (PARAM_MAX_GROW_COPY_BB_INSNS);
5907 FOR_EACH_BB (bb)
5909 rtx insn;
5910 unsigned size = 0;
5912 FOR_BB_INSNS (bb, insn)
5913 if (NONDEBUG_INSN_P (insn))
5915 if (requires_stack_frame_p (insn, prologue_used,
5916 set_up_by_prologue))
5918 if (bb == entry_edge->dest)
5919 goto fail_shrinkwrap;
5920 bitmap_set_bit (&bb_flags, bb->index);
5921 VEC_quick_push (basic_block, vec, bb);
5922 break;
5924 else if (size <= max_grow_size)
5926 size += get_attr_min_length (insn);
5927 if (size > max_grow_size)
5928 bitmap_set_bit (&bb_on_list, bb->index);
5933 /* Blocks that really need a prologue, or are too big for tails. */
5934 bitmap_ior_into (&bb_on_list, &bb_flags);
5936 /* For every basic block that needs a prologue, mark all blocks
5937 reachable from it, so as to ensure they are also seen as
5938 requiring a prologue. */
5939 while (!VEC_empty (basic_block, vec))
5941 basic_block tmp_bb = VEC_pop (basic_block, vec);
5943 FOR_EACH_EDGE (e, ei, tmp_bb->succs)
5944 if (e->dest != EXIT_BLOCK_PTR
5945 && bitmap_set_bit (&bb_flags, e->dest->index))
5946 VEC_quick_push (basic_block, vec, e->dest);
5949 /* Find the set of basic blocks that need no prologue, have a
5950 single successor, can be duplicated, meet a max size
5951 requirement, and go to the exit via like blocks. */
5952 VEC_quick_push (basic_block, vec, EXIT_BLOCK_PTR);
5953 while (!VEC_empty (basic_block, vec))
5955 basic_block tmp_bb = VEC_pop (basic_block, vec);
5957 FOR_EACH_EDGE (e, ei, tmp_bb->preds)
5958 if (single_succ_p (e->src)
5959 && !bitmap_bit_p (&bb_on_list, e->src->index)
5960 && can_duplicate_block_p (e->src)
5961 && bitmap_set_bit (&bb_tail, e->src->index))
5962 VEC_quick_push (basic_block, vec, e->src);
5965 /* Now walk backwards from every block that is marked as needing
5966 a prologue to compute the bb_antic_flags bitmap. Exclude
5967 tail blocks; They can be duplicated to be used on paths not
5968 needing a prologue. */
5969 bitmap_clear (&bb_on_list);
5970 bitmap_and_compl (&bb_antic_flags, &bb_flags, &bb_tail);
5971 FOR_EACH_BB (bb)
5973 if (!bitmap_bit_p (&bb_antic_flags, bb->index))
5974 continue;
5975 FOR_EACH_EDGE (e, ei, bb->preds)
5976 if (!bitmap_bit_p (&bb_antic_flags, e->src->index)
5977 && bitmap_set_bit (&bb_on_list, e->src->index))
5978 VEC_quick_push (basic_block, vec, e->src);
5980 while (!VEC_empty (basic_block, vec))
5982 basic_block tmp_bb = VEC_pop (basic_block, vec);
5983 bool all_set = true;
5985 bitmap_clear_bit (&bb_on_list, tmp_bb->index);
5986 FOR_EACH_EDGE (e, ei, tmp_bb->succs)
5987 if (!bitmap_bit_p (&bb_antic_flags, e->dest->index))
5989 all_set = false;
5990 break;
5993 if (all_set)
5995 bitmap_set_bit (&bb_antic_flags, tmp_bb->index);
5996 FOR_EACH_EDGE (e, ei, tmp_bb->preds)
5997 if (!bitmap_bit_p (&bb_antic_flags, e->src->index)
5998 && bitmap_set_bit (&bb_on_list, e->src->index))
5999 VEC_quick_push (basic_block, vec, e->src);
6002 /* Find exactly one edge that leads to a block in ANTIC from
6003 a block that isn't. */
6004 if (!bitmap_bit_p (&bb_antic_flags, entry_edge->dest->index))
6005 FOR_EACH_BB (bb)
6007 if (!bitmap_bit_p (&bb_antic_flags, bb->index))
6008 continue;
6009 FOR_EACH_EDGE (e, ei, bb->preds)
6010 if (!bitmap_bit_p (&bb_antic_flags, e->src->index))
6012 if (entry_edge != orig_entry_edge)
6014 entry_edge = orig_entry_edge;
6015 if (dump_file)
6016 fprintf (dump_file, "More than one candidate edge.\n");
6017 goto fail_shrinkwrap;
6019 if (dump_file)
6020 fprintf (dump_file, "Found candidate edge for "
6021 "shrink-wrapping, %d->%d.\n", e->src->index,
6022 e->dest->index);
6023 entry_edge = e;
6027 if (entry_edge != orig_entry_edge)
6029 /* Test whether the prologue is known to clobber any register
6030 (other than FP or SP) which are live on the edge. */
6031 CLEAR_HARD_REG_BIT (prologue_clobbered, STACK_POINTER_REGNUM);
6032 if (frame_pointer_needed)
6033 CLEAR_HARD_REG_BIT (prologue_clobbered, HARD_FRAME_POINTER_REGNUM);
6034 CLEAR_HARD_REG_SET (live_on_edge);
6035 reg_set_to_hard_reg_set (&live_on_edge,
6036 df_get_live_in (entry_edge->dest));
6037 if (hard_reg_set_intersect_p (live_on_edge, prologue_clobbered))
6039 entry_edge = orig_entry_edge;
6040 if (dump_file)
6041 fprintf (dump_file,
6042 "Shrink-wrapping aborted due to clobber.\n");
6045 if (entry_edge != orig_entry_edge)
6047 crtl->shrink_wrapped = true;
6048 if (dump_file)
6049 fprintf (dump_file, "Performing shrink-wrapping.\n");
6051 /* Find tail blocks reachable from both blocks needing a
6052 prologue and blocks not needing a prologue. */
6053 if (!bitmap_empty_p (&bb_tail))
6054 FOR_EACH_BB (bb)
6056 bool some_pro, some_no_pro;
6057 if (!bitmap_bit_p (&bb_tail, bb->index))
6058 continue;
6059 some_pro = some_no_pro = false;
6060 FOR_EACH_EDGE (e, ei, bb->preds)
6062 if (bitmap_bit_p (&bb_flags, e->src->index))
6063 some_pro = true;
6064 else
6065 some_no_pro = true;
6067 if (some_pro && some_no_pro)
6068 VEC_quick_push (basic_block, vec, bb);
6069 else
6070 bitmap_clear_bit (&bb_tail, bb->index);
6072 /* Find the head of each tail. */
6073 while (!VEC_empty (basic_block, vec))
6075 basic_block tbb = VEC_pop (basic_block, vec);
6077 if (!bitmap_bit_p (&bb_tail, tbb->index))
6078 continue;
6080 while (single_succ_p (tbb))
6082 tbb = single_succ (tbb);
6083 bitmap_clear_bit (&bb_tail, tbb->index);
6086 /* Now duplicate the tails. */
6087 if (!bitmap_empty_p (&bb_tail))
6088 FOR_EACH_BB_REVERSE (bb)
6090 basic_block copy_bb, tbb;
6091 rtx insert_point;
6092 int eflags;
6094 if (!bitmap_clear_bit (&bb_tail, bb->index))
6095 continue;
6097 /* Create a copy of BB, instructions and all, for
6098 use on paths that don't need a prologue.
6099 Ideal placement of the copy is on a fall-thru edge
6100 or after a block that would jump to the copy. */
6101 FOR_EACH_EDGE (e, ei, bb->preds)
6102 if (!bitmap_bit_p (&bb_flags, e->src->index)
6103 && single_succ_p (e->src))
6104 break;
6105 if (e)
6107 copy_bb = create_basic_block (NEXT_INSN (BB_END (e->src)),
6108 NULL_RTX, e->src);
6109 BB_COPY_PARTITION (copy_bb, e->src);
6111 else
6113 /* Otherwise put the copy at the end of the function. */
6114 copy_bb = create_basic_block (NULL_RTX, NULL_RTX,
6115 EXIT_BLOCK_PTR->prev_bb);
6116 BB_COPY_PARTITION (copy_bb, bb);
6119 insert_point = emit_note_after (NOTE_INSN_DELETED,
6120 BB_END (copy_bb));
6121 emit_barrier_after (BB_END (copy_bb));
6123 tbb = bb;
6124 while (1)
6126 dup_block_and_redirect (tbb, copy_bb, insert_point,
6127 &bb_flags);
6128 tbb = single_succ (tbb);
6129 if (tbb == EXIT_BLOCK_PTR)
6130 break;
6131 e = split_block (copy_bb, PREV_INSN (insert_point));
6132 copy_bb = e->dest;
6135 /* Quiet verify_flow_info by (ab)using EDGE_FAKE.
6136 We have yet to add a simple_return to the tails,
6137 as we'd like to first convert_jumps_to_returns in
6138 case the block is no longer used after that. */
6139 eflags = EDGE_FAKE;
6140 if (CALL_P (PREV_INSN (insert_point))
6141 && SIBLING_CALL_P (PREV_INSN (insert_point)))
6142 eflags = EDGE_SIBCALL | EDGE_ABNORMAL;
6143 make_single_succ_edge (copy_bb, EXIT_BLOCK_PTR, eflags);
6145 /* verify_flow_info doesn't like a note after a
6146 sibling call. */
6147 delete_insn (insert_point);
6148 if (bitmap_empty_p (&bb_tail))
6149 break;
6153 fail_shrinkwrap:
6154 bitmap_clear (&bb_tail);
6155 bitmap_clear (&bb_antic_flags);
6156 bitmap_clear (&bb_on_list);
6157 VEC_free (basic_block, heap, vec);
6159 #endif
6161 if (split_prologue_seq != NULL_RTX)
6163 insert_insn_on_edge (split_prologue_seq, orig_entry_edge);
6164 inserted = true;
6166 if (prologue_seq != NULL_RTX)
6168 insert_insn_on_edge (prologue_seq, entry_edge);
6169 inserted = true;
6172 /* If the exit block has no non-fake predecessors, we don't need
6173 an epilogue. */
6174 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6175 if ((e->flags & EDGE_FAKE) == 0)
6176 break;
6177 if (e == NULL)
6178 goto epilogue_done;
6180 rtl_profile_for_bb (EXIT_BLOCK_PTR);
6182 exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR->preds);
6184 /* If we're allowed to generate a simple return instruction, then by
6185 definition we don't need a full epilogue. If the last basic
6186 block before the exit block does not contain active instructions,
6187 examine its predecessors and try to emit (conditional) return
6188 instructions. */
6189 #ifdef HAVE_simple_return
6190 if (entry_edge != orig_entry_edge)
6192 if (optimize)
6194 unsigned i, last;
6196 /* convert_jumps_to_returns may add to EXIT_BLOCK_PTR->preds
6197 (but won't remove). Stop at end of current preds. */
6198 last = EDGE_COUNT (EXIT_BLOCK_PTR->preds);
6199 for (i = 0; i < last; i++)
6201 e = EDGE_I (EXIT_BLOCK_PTR->preds, i);
6202 if (LABEL_P (BB_HEAD (e->src))
6203 && !bitmap_bit_p (&bb_flags, e->src->index)
6204 && !active_insn_between (BB_HEAD (e->src), BB_END (e->src)))
6205 unconverted_simple_returns
6206 = convert_jumps_to_returns (e->src, true,
6207 unconverted_simple_returns);
6211 if (exit_fallthru_edge != NULL
6212 && EDGE_COUNT (exit_fallthru_edge->src->preds) != 0
6213 && !bitmap_bit_p (&bb_flags, exit_fallthru_edge->src->index))
6215 basic_block last_bb;
6217 last_bb = emit_return_for_exit (exit_fallthru_edge, true);
6218 returnjump = BB_END (last_bb);
6219 exit_fallthru_edge = NULL;
6222 #endif
6223 #ifdef HAVE_return
6224 if (HAVE_return)
6226 if (exit_fallthru_edge == NULL)
6227 goto epilogue_done;
6229 if (optimize)
6231 basic_block last_bb = exit_fallthru_edge->src;
6233 if (LABEL_P (BB_HEAD (last_bb))
6234 && !active_insn_between (BB_HEAD (last_bb), BB_END (last_bb)))
6235 convert_jumps_to_returns (last_bb, false, NULL);
6237 if (EDGE_COUNT (last_bb->preds) != 0
6238 && single_succ_p (last_bb))
6240 last_bb = emit_return_for_exit (exit_fallthru_edge, false);
6241 epilogue_end = returnjump = BB_END (last_bb);
6242 #ifdef HAVE_simple_return
6243 /* Emitting the return may add a basic block.
6244 Fix bb_flags for the added block. */
6245 if (last_bb != exit_fallthru_edge->src)
6246 bitmap_set_bit (&bb_flags, last_bb->index);
6247 #endif
6248 goto epilogue_done;
6252 #endif
6254 /* A small fib -- epilogue is not yet completed, but we wish to re-use
6255 this marker for the splits of EH_RETURN patterns, and nothing else
6256 uses the flag in the meantime. */
6257 epilogue_completed = 1;
6259 #ifdef HAVE_eh_return
6260 /* Find non-fallthru edges that end with EH_RETURN instructions. On
6261 some targets, these get split to a special version of the epilogue
6262 code. In order to be able to properly annotate these with unwind
6263 info, try to split them now. If we get a valid split, drop an
6264 EPILOGUE_BEG note and mark the insns as epilogue insns. */
6265 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6267 rtx prev, last, trial;
6269 if (e->flags & EDGE_FALLTHRU)
6270 continue;
6271 last = BB_END (e->src);
6272 if (!eh_returnjump_p (last))
6273 continue;
6275 prev = PREV_INSN (last);
6276 trial = try_split (PATTERN (last), last, 1);
6277 if (trial == last)
6278 continue;
6280 record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash);
6281 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
6283 #endif
6285 /* If nothing falls through into the exit block, we don't need an
6286 epilogue. */
6288 if (exit_fallthru_edge == NULL)
6289 goto epilogue_done;
6291 #ifdef HAVE_epilogue
6292 if (HAVE_epilogue)
6294 start_sequence ();
6295 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
6296 seq = gen_epilogue ();
6297 if (seq)
6298 emit_jump_insn (seq);
6300 /* Retain a map of the epilogue insns. */
6301 record_insns (seq, NULL, &epilogue_insn_hash);
6302 set_insn_locators (seq, epilogue_locator);
6304 seq = get_insns ();
6305 returnjump = get_last_insn ();
6306 end_sequence ();
6308 insert_insn_on_edge (seq, exit_fallthru_edge);
6309 inserted = true;
6311 if (JUMP_P (returnjump))
6312 set_return_jump_label (returnjump);
6314 else
6315 #endif
6317 basic_block cur_bb;
6319 if (! next_active_insn (BB_END (exit_fallthru_edge->src)))
6320 goto epilogue_done;
6321 /* We have a fall-through edge to the exit block, the source is not
6322 at the end of the function, and there will be an assembler epilogue
6323 at the end of the function.
6324 We can't use force_nonfallthru here, because that would try to
6325 use return. Inserting a jump 'by hand' is extremely messy, so
6326 we take advantage of cfg_layout_finalize using
6327 fixup_fallthru_exit_predecessor. */
6328 cfg_layout_initialize (0);
6329 FOR_EACH_BB (cur_bb)
6330 if (cur_bb->index >= NUM_FIXED_BLOCKS
6331 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
6332 cur_bb->aux = cur_bb->next_bb;
6333 cfg_layout_finalize ();
6336 epilogue_done:
6338 default_rtl_profile ();
6340 if (inserted)
6342 sbitmap blocks;
6344 commit_edge_insertions ();
6346 /* Look for basic blocks within the prologue insns. */
6347 blocks = sbitmap_alloc (last_basic_block);
6348 sbitmap_zero (blocks);
6349 SET_BIT (blocks, entry_edge->dest->index);
6350 SET_BIT (blocks, orig_entry_edge->dest->index);
6351 find_many_sub_basic_blocks (blocks);
6352 sbitmap_free (blocks);
6354 /* The epilogue insns we inserted may cause the exit edge to no longer
6355 be fallthru. */
6356 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6358 if (((e->flags & EDGE_FALLTHRU) != 0)
6359 && returnjump_p (BB_END (e->src)))
6360 e->flags &= ~EDGE_FALLTHRU;
6364 #ifdef HAVE_simple_return
6365 /* If there were branches to an empty LAST_BB which we tried to
6366 convert to conditional simple_returns, but couldn't for some
6367 reason, create a block to hold a simple_return insn and redirect
6368 those remaining edges. */
6369 if (!VEC_empty (edge, unconverted_simple_returns))
6371 basic_block simple_return_block_hot = NULL;
6372 basic_block simple_return_block_cold = NULL;
6373 edge pending_edge_hot = NULL;
6374 edge pending_edge_cold = NULL;
6375 basic_block exit_pred = EXIT_BLOCK_PTR->prev_bb;
6376 int i;
6378 gcc_assert (entry_edge != orig_entry_edge);
6380 /* See if we can reuse the last insn that was emitted for the
6381 epilogue. */
6382 if (returnjump != NULL_RTX
6383 && JUMP_LABEL (returnjump) == simple_return_rtx)
6385 e = split_block (BLOCK_FOR_INSN (returnjump), PREV_INSN (returnjump));
6386 if (BB_PARTITION (e->src) == BB_HOT_PARTITION)
6387 simple_return_block_hot = e->dest;
6388 else
6389 simple_return_block_cold = e->dest;
6392 /* Also check returns we might need to add to tail blocks. */
6393 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6394 if (EDGE_COUNT (e->src->preds) != 0
6395 && (e->flags & EDGE_FAKE) != 0
6396 && !bitmap_bit_p (&bb_flags, e->src->index))
6398 if (BB_PARTITION (e->src) == BB_HOT_PARTITION)
6399 pending_edge_hot = e;
6400 else
6401 pending_edge_cold = e;
6404 FOR_EACH_VEC_ELT (edge, unconverted_simple_returns, i, e)
6406 basic_block *pdest_bb;
6407 edge pending;
6409 if (BB_PARTITION (e->src) == BB_HOT_PARTITION)
6411 pdest_bb = &simple_return_block_hot;
6412 pending = pending_edge_hot;
6414 else
6416 pdest_bb = &simple_return_block_cold;
6417 pending = pending_edge_cold;
6420 if (*pdest_bb == NULL && pending != NULL)
6422 emit_return_into_block (true, pending->src);
6423 pending->flags &= ~(EDGE_FALLTHRU | EDGE_FAKE);
6424 *pdest_bb = pending->src;
6426 else if (*pdest_bb == NULL)
6428 basic_block bb;
6429 rtx start;
6431 bb = create_basic_block (NULL, NULL, exit_pred);
6432 BB_COPY_PARTITION (bb, e->src);
6433 start = emit_jump_insn_after (gen_simple_return (),
6434 BB_END (bb));
6435 JUMP_LABEL (start) = simple_return_rtx;
6436 emit_barrier_after (start);
6438 *pdest_bb = bb;
6439 make_edge (bb, EXIT_BLOCK_PTR, 0);
6441 redirect_edge_and_branch_force (e, *pdest_bb);
6443 VEC_free (edge, heap, unconverted_simple_returns);
6446 if (entry_edge != orig_entry_edge)
6448 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6449 if (EDGE_COUNT (e->src->preds) != 0
6450 && (e->flags & EDGE_FAKE) != 0
6451 && !bitmap_bit_p (&bb_flags, e->src->index))
6453 emit_return_into_block (true, e->src);
6454 e->flags &= ~(EDGE_FALLTHRU | EDGE_FAKE);
6457 #endif
6459 #ifdef HAVE_sibcall_epilogue
6460 /* Emit sibling epilogues before any sibling call sites. */
6461 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
6463 basic_block bb = e->src;
6464 rtx insn = BB_END (bb);
6465 rtx ep_seq;
6467 if (!CALL_P (insn)
6468 || ! SIBLING_CALL_P (insn)
6469 #ifdef HAVE_simple_return
6470 || (entry_edge != orig_entry_edge
6471 && !bitmap_bit_p (&bb_flags, bb->index))
6472 #endif
6475 ei_next (&ei);
6476 continue;
6479 ep_seq = gen_sibcall_epilogue ();
6480 if (ep_seq)
6482 start_sequence ();
6483 emit_note (NOTE_INSN_EPILOGUE_BEG);
6484 emit_insn (ep_seq);
6485 seq = get_insns ();
6486 end_sequence ();
6488 /* Retain a map of the epilogue insns. Used in life analysis to
6489 avoid getting rid of sibcall epilogue insns. Do this before we
6490 actually emit the sequence. */
6491 record_insns (seq, NULL, &epilogue_insn_hash);
6492 set_insn_locators (seq, epilogue_locator);
6494 emit_insn_before (seq, insn);
6496 ei_next (&ei);
6498 #endif
6500 #ifdef HAVE_epilogue
6501 if (epilogue_end)
6503 rtx insn, next;
6505 /* Similarly, move any line notes that appear after the epilogue.
6506 There is no need, however, to be quite so anal about the existence
6507 of such a note. Also possibly move
6508 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
6509 info generation. */
6510 for (insn = epilogue_end; insn; insn = next)
6512 next = NEXT_INSN (insn);
6513 if (NOTE_P (insn)
6514 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
6515 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
6518 #endif
6520 #ifdef HAVE_simple_return
6521 bitmap_clear (&bb_flags);
6522 #endif
6524 /* Threading the prologue and epilogue changes the artificial refs
6525 in the entry and exit blocks. */
6526 epilogue_completed = 1;
6527 df_update_entry_exit_and_calls ();
6530 /* Reposition the prologue-end and epilogue-begin notes after
6531 instruction scheduling. */
6533 void
6534 reposition_prologue_and_epilogue_notes (void)
6536 #if defined (HAVE_prologue) || defined (HAVE_epilogue) \
6537 || defined (HAVE_sibcall_epilogue)
6538 /* Since the hash table is created on demand, the fact that it is
6539 non-null is a signal that it is non-empty. */
6540 if (prologue_insn_hash != NULL)
6542 size_t len = htab_elements (prologue_insn_hash);
6543 rtx insn, last = NULL, note = NULL;
6545 /* Scan from the beginning until we reach the last prologue insn. */
6546 /* ??? While we do have the CFG intact, there are two problems:
6547 (1) The prologue can contain loops (typically probing the stack),
6548 which means that the end of the prologue isn't in the first bb.
6549 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
6550 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6552 if (NOTE_P (insn))
6554 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
6555 note = insn;
6557 else if (contains (insn, prologue_insn_hash))
6559 last = insn;
6560 if (--len == 0)
6561 break;
6565 if (last)
6567 if (note == NULL)
6569 /* Scan forward looking for the PROLOGUE_END note. It should
6570 be right at the beginning of the block, possibly with other
6571 insn notes that got moved there. */
6572 for (note = NEXT_INSN (last); ; note = NEXT_INSN (note))
6574 if (NOTE_P (note)
6575 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
6576 break;
6580 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
6581 if (LABEL_P (last))
6582 last = NEXT_INSN (last);
6583 reorder_insns (note, note, last);
6587 if (epilogue_insn_hash != NULL)
6589 edge_iterator ei;
6590 edge e;
6592 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6594 rtx insn, first = NULL, note = NULL;
6595 basic_block bb = e->src;
6597 /* Scan from the beginning until we reach the first epilogue insn. */
6598 FOR_BB_INSNS (bb, insn)
6600 if (NOTE_P (insn))
6602 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
6604 note = insn;
6605 if (first != NULL)
6606 break;
6609 else if (first == NULL && contains (insn, epilogue_insn_hash))
6611 first = insn;
6612 if (note != NULL)
6613 break;
6617 if (note)
6619 /* If the function has a single basic block, and no real
6620 epilogue insns (e.g. sibcall with no cleanup), the
6621 epilogue note can get scheduled before the prologue
6622 note. If we have frame related prologue insns, having
6623 them scanned during the epilogue will result in a crash.
6624 In this case re-order the epilogue note to just before
6625 the last insn in the block. */
6626 if (first == NULL)
6627 first = BB_END (bb);
6629 if (PREV_INSN (first) != note)
6630 reorder_insns (note, note, PREV_INSN (first));
6634 #endif /* HAVE_prologue or HAVE_epilogue */
6637 /* Returns the name of the current function. */
6638 const char *
6639 current_function_name (void)
6641 if (cfun == NULL)
6642 return "<none>";
6643 return lang_hooks.decl_printable_name (cfun->decl, 2);
6647 static unsigned int
6648 rest_of_handle_check_leaf_regs (void)
6650 #ifdef LEAF_REGISTERS
6651 current_function_uses_only_leaf_regs
6652 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
6653 #endif
6654 return 0;
6657 /* Insert a TYPE into the used types hash table of CFUN. */
6659 static void
6660 used_types_insert_helper (tree type, struct function *func)
6662 if (type != NULL && func != NULL)
6664 void **slot;
6666 if (func->used_types_hash == NULL)
6667 func->used_types_hash = htab_create_ggc (37, htab_hash_pointer,
6668 htab_eq_pointer, NULL);
6669 slot = htab_find_slot (func->used_types_hash, type, INSERT);
6670 if (*slot == NULL)
6671 *slot = type;
6675 /* Given a type, insert it into the used hash table in cfun. */
6676 void
6677 used_types_insert (tree t)
6679 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
6680 if (TYPE_NAME (t))
6681 break;
6682 else
6683 t = TREE_TYPE (t);
6684 if (TREE_CODE (t) == ERROR_MARK)
6685 return;
6686 if (TYPE_NAME (t) == NULL_TREE
6687 || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t)))
6688 t = TYPE_MAIN_VARIANT (t);
6689 if (debug_info_level > DINFO_LEVEL_NONE)
6691 if (cfun)
6692 used_types_insert_helper (t, cfun);
6693 else
6694 /* So this might be a type referenced by a global variable.
6695 Record that type so that we can later decide to emit its debug
6696 information. */
6697 VEC_safe_push (tree, gc, types_used_by_cur_var_decl, t);
6701 /* Helper to Hash a struct types_used_by_vars_entry. */
6703 static hashval_t
6704 hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry)
6706 gcc_assert (entry && entry->var_decl && entry->type);
6708 return iterative_hash_object (entry->type,
6709 iterative_hash_object (entry->var_decl, 0));
6712 /* Hash function of the types_used_by_vars_entry hash table. */
6714 hashval_t
6715 types_used_by_vars_do_hash (const void *x)
6717 const struct types_used_by_vars_entry *entry =
6718 (const struct types_used_by_vars_entry *) x;
6720 return hash_types_used_by_vars_entry (entry);
6723 /*Equality function of the types_used_by_vars_entry hash table. */
6726 types_used_by_vars_eq (const void *x1, const void *x2)
6728 const struct types_used_by_vars_entry *e1 =
6729 (const struct types_used_by_vars_entry *) x1;
6730 const struct types_used_by_vars_entry *e2 =
6731 (const struct types_used_by_vars_entry *)x2;
6733 return (e1->var_decl == e2->var_decl && e1->type == e2->type);
6736 /* Inserts an entry into the types_used_by_vars_hash hash table. */
6738 void
6739 types_used_by_var_decl_insert (tree type, tree var_decl)
6741 if (type != NULL && var_decl != NULL)
6743 void **slot;
6744 struct types_used_by_vars_entry e;
6745 e.var_decl = var_decl;
6746 e.type = type;
6747 if (types_used_by_vars_hash == NULL)
6748 types_used_by_vars_hash =
6749 htab_create_ggc (37, types_used_by_vars_do_hash,
6750 types_used_by_vars_eq, NULL);
6751 slot = htab_find_slot_with_hash (types_used_by_vars_hash, &e,
6752 hash_types_used_by_vars_entry (&e), INSERT);
6753 if (*slot == NULL)
6755 struct types_used_by_vars_entry *entry;
6756 entry = ggc_alloc_types_used_by_vars_entry ();
6757 entry->type = type;
6758 entry->var_decl = var_decl;
6759 *slot = entry;
6764 struct rtl_opt_pass pass_leaf_regs =
6767 RTL_PASS,
6768 "*leaf_regs", /* name */
6769 NULL, /* gate */
6770 rest_of_handle_check_leaf_regs, /* execute */
6771 NULL, /* sub */
6772 NULL, /* next */
6773 0, /* static_pass_number */
6774 TV_NONE, /* tv_id */
6775 0, /* properties_required */
6776 0, /* properties_provided */
6777 0, /* properties_destroyed */
6778 0, /* todo_flags_start */
6779 0 /* todo_flags_finish */
6783 static unsigned int
6784 rest_of_handle_thread_prologue_and_epilogue (void)
6786 if (optimize)
6787 cleanup_cfg (CLEANUP_EXPENSIVE);
6789 /* On some machines, the prologue and epilogue code, or parts thereof,
6790 can be represented as RTL. Doing so lets us schedule insns between
6791 it and the rest of the code and also allows delayed branch
6792 scheduling to operate in the epilogue. */
6793 thread_prologue_and_epilogue_insns ();
6795 /* The stack usage info is finalized during prologue expansion. */
6796 if (flag_stack_usage_info)
6797 output_stack_usage ();
6799 return 0;
6802 struct rtl_opt_pass pass_thread_prologue_and_epilogue =
6805 RTL_PASS,
6806 "pro_and_epilogue", /* name */
6807 NULL, /* gate */
6808 rest_of_handle_thread_prologue_and_epilogue, /* execute */
6809 NULL, /* sub */
6810 NULL, /* next */
6811 0, /* static_pass_number */
6812 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
6813 0, /* properties_required */
6814 0, /* properties_provided */
6815 0, /* properties_destroyed */
6816 TODO_verify_flow, /* todo_flags_start */
6817 TODO_df_verify |
6818 TODO_df_finish | TODO_verify_rtl_sharing |
6819 TODO_ggc_collect /* todo_flags_finish */
6824 /* This mini-pass fixes fall-out from SSA in asm statements that have
6825 in-out constraints. Say you start with
6827 orig = inout;
6828 asm ("": "+mr" (inout));
6829 use (orig);
6831 which is transformed very early to use explicit output and match operands:
6833 orig = inout;
6834 asm ("": "=mr" (inout) : "0" (inout));
6835 use (orig);
6837 Or, after SSA and copyprop,
6839 asm ("": "=mr" (inout_2) : "0" (inout_1));
6840 use (inout_1);
6842 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
6843 they represent two separate values, so they will get different pseudo
6844 registers during expansion. Then, since the two operands need to match
6845 per the constraints, but use different pseudo registers, reload can
6846 only register a reload for these operands. But reloads can only be
6847 satisfied by hardregs, not by memory, so we need a register for this
6848 reload, just because we are presented with non-matching operands.
6849 So, even though we allow memory for this operand, no memory can be
6850 used for it, just because the two operands don't match. This can
6851 cause reload failures on register-starved targets.
6853 So it's a symptom of reload not being able to use memory for reloads
6854 or, alternatively it's also a symptom of both operands not coming into
6855 reload as matching (in which case the pseudo could go to memory just
6856 fine, as the alternative allows it, and no reload would be necessary).
6857 We fix the latter problem here, by transforming
6859 asm ("": "=mr" (inout_2) : "0" (inout_1));
6861 back to
6863 inout_2 = inout_1;
6864 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
6866 static void
6867 match_asm_constraints_1 (rtx insn, rtx *p_sets, int noutputs)
6869 int i;
6870 bool changed = false;
6871 rtx op = SET_SRC (p_sets[0]);
6872 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
6873 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
6874 bool *output_matched = XALLOCAVEC (bool, noutputs);
6876 memset (output_matched, 0, noutputs * sizeof (bool));
6877 for (i = 0; i < ninputs; i++)
6879 rtx input, output, insns;
6880 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
6881 char *end;
6882 int match, j;
6884 if (*constraint == '%')
6885 constraint++;
6887 match = strtoul (constraint, &end, 10);
6888 if (end == constraint)
6889 continue;
6891 gcc_assert (match < noutputs);
6892 output = SET_DEST (p_sets[match]);
6893 input = RTVEC_ELT (inputs, i);
6894 /* Only do the transformation for pseudos. */
6895 if (! REG_P (output)
6896 || rtx_equal_p (output, input)
6897 || (GET_MODE (input) != VOIDmode
6898 && GET_MODE (input) != GET_MODE (output)))
6899 continue;
6901 /* We can't do anything if the output is also used as input,
6902 as we're going to overwrite it. */
6903 for (j = 0; j < ninputs; j++)
6904 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
6905 break;
6906 if (j != ninputs)
6907 continue;
6909 /* Avoid changing the same input several times. For
6910 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
6911 only change in once (to out1), rather than changing it
6912 first to out1 and afterwards to out2. */
6913 if (i > 0)
6915 for (j = 0; j < noutputs; j++)
6916 if (output_matched[j] && input == SET_DEST (p_sets[j]))
6917 break;
6918 if (j != noutputs)
6919 continue;
6921 output_matched[match] = true;
6923 start_sequence ();
6924 emit_move_insn (output, input);
6925 insns = get_insns ();
6926 end_sequence ();
6927 emit_insn_before (insns, insn);
6929 /* Now replace all mentions of the input with output. We can't
6930 just replace the occurrence in inputs[i], as the register might
6931 also be used in some other input (or even in an address of an
6932 output), which would mean possibly increasing the number of
6933 inputs by one (namely 'output' in addition), which might pose
6934 a too complicated problem for reload to solve. E.g. this situation:
6936 asm ("" : "=r" (output), "=m" (input) : "0" (input))
6938 Here 'input' is used in two occurrences as input (once for the
6939 input operand, once for the address in the second output operand).
6940 If we would replace only the occurrence of the input operand (to
6941 make the matching) we would be left with this:
6943 output = input
6944 asm ("" : "=r" (output), "=m" (input) : "0" (output))
6946 Now we suddenly have two different input values (containing the same
6947 value, but different pseudos) where we formerly had only one.
6948 With more complicated asms this might lead to reload failures
6949 which wouldn't have happen without this pass. So, iterate over
6950 all operands and replace all occurrences of the register used. */
6951 for (j = 0; j < noutputs; j++)
6952 if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
6953 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
6954 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
6955 input, output);
6956 for (j = 0; j < ninputs; j++)
6957 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
6958 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
6959 input, output);
6961 changed = true;
6964 if (changed)
6965 df_insn_rescan (insn);
6968 static unsigned
6969 rest_of_match_asm_constraints (void)
6971 basic_block bb;
6972 rtx insn, pat, *p_sets;
6973 int noutputs;
6975 if (!crtl->has_asm_statement)
6976 return 0;
6978 df_set_flags (DF_DEFER_INSN_RESCAN);
6979 FOR_EACH_BB (bb)
6981 FOR_BB_INSNS (bb, insn)
6983 if (!INSN_P (insn))
6984 continue;
6986 pat = PATTERN (insn);
6987 if (GET_CODE (pat) == PARALLEL)
6988 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
6989 else if (GET_CODE (pat) == SET)
6990 p_sets = &PATTERN (insn), noutputs = 1;
6991 else
6992 continue;
6994 if (GET_CODE (*p_sets) == SET
6995 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
6996 match_asm_constraints_1 (insn, p_sets, noutputs);
7000 return TODO_df_finish;
7003 struct rtl_opt_pass pass_match_asm_constraints =
7006 RTL_PASS,
7007 "asmcons", /* name */
7008 NULL, /* gate */
7009 rest_of_match_asm_constraints, /* execute */
7010 NULL, /* sub */
7011 NULL, /* next */
7012 0, /* static_pass_number */
7013 TV_NONE, /* tv_id */
7014 0, /* properties_required */
7015 0, /* properties_provided */
7016 0, /* properties_destroyed */
7017 0, /* todo_flags_start */
7018 0 /* todo_flags_finish */
7023 #include "gt-function.h"